diff --git a/.gitattributes b/.gitattributes index 460c60cde5ea001bdbb4e378afbd7ff25f93580c..a4c2924d8669ac14ddf1c561e38d7d70fea673a3 100644 --- a/.gitattributes +++ b/.gitattributes @@ -37,3 +37,11 @@ comfyui_screenshot.png filter=lfs diff=lfs merge=lfs -text NotoSans-Regular.ttf filter=lfs diff=lfs merge=lfs -text custom_controlnet_aux/mesh_graphormer/hand_landmarker.task filter=lfs diff=lfs merge=lfs -text custom_controlnet_aux/tests/test_image.png filter=lfs diff=lfs merge=lfs -text +ComfyUI-Easy-Use/py/kolors/chatglm/tokenizer/vocab.txt filter=lfs diff=lfs merge=lfs -text +ComfyUI-Easy-Use/resources/OpenSans-Medium.ttf filter=lfs diff=lfs merge=lfs -text +ComfyUI-KJNodes/docs/images/2024-04-03_20_49_29-ComfyUI.png filter=lfs diff=lfs merge=lfs -text +ComfyUI-KJNodes/fonts/FreeMono.ttf filter=lfs diff=lfs merge=lfs -text +ComfyUI-KJNodes/fonts/FreeMonoBoldOblique.otf filter=lfs diff=lfs merge=lfs -text +ComfyUI-KJNodes/fonts/TTNorms-Black.otf filter=lfs diff=lfs merge=lfs -text +ComfyUI-Kolors-MZ/configs/tokenizer/vocab.txt filter=lfs diff=lfs merge=lfs -text +ComfyUI-KwaiKolorsWrapper/configs/tokenizer/vocab.txt filter=lfs diff=lfs merge=lfs -text diff --git a/ComfyUI-Advanced-ControlNet/LICENSE b/ComfyUI-Advanced-ControlNet/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..f288702d2fa16d3cdf0035b15a9fcbc552cd88e7 --- /dev/null +++ b/ComfyUI-Advanced-ControlNet/LICENSE @@ -0,0 +1,674 @@ + GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU General Public License is a free, copyleft license for +software and other kinds of works. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +the GNU General Public License is intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU Affero General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If the program does terminal interaction, make it output a short +notice like this when it starts in an interactive mode: + + Copyright (C) + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, your program's commands +might be different; for a GUI interface, you would use an "about box". + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU GPL, see +. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +. diff --git a/ComfyUI-Advanced-ControlNet/README.md b/ComfyUI-Advanced-ControlNet/README.md new file mode 100644 index 0000000000000000000000000000000000000000..084ff2b7bc1b1231f0b3855bcda3ea88707b0bb8 --- /dev/null +++ b/ComfyUI-Advanced-ControlNet/README.md @@ -0,0 +1,202 @@ +# ComfyUI-Advanced-ControlNet +Nodes for scheduling ControlNet strength across timesteps and batched latents, as well as applying custom weights and attention masks. The ControlNet nodes here fully support sliding context sampling, like the one used in the [ComfyUI-AnimateDiff-Evolved](https://github.com/Kosinkadink/ComfyUI-AnimateDiff-Evolved) nodes. Currently supports ControlNets, T2IAdapters, ControlLoRAs, ControlLLLite, SparseCtrls, SVD-ControlNets, and Reference. + +Custom weights allow replication of the "My prompt is more important" feature of Auto1111's sd-webui ControlNet extension via Soft Weights, and the "ControlNet is more important" feature can be granularly controlled by changing the uncond_multiplier on the same Soft Weights. + +ControlNet preprocessors are available through [comfyui_controlnet_aux](https://github.com/Fannovel16/comfyui_controlnet_aux) nodes. + +## Features +- Timestep and latent strength scheduling +- Attention masks +- Replicate ***"My prompt is more important"*** feature from sd-webui-controlnet extension via ***Soft Weights***, and allow softness to be tweaked via ***base_multiplier*** +- Replicate ***"ControlNet is more important"*** feature from sd-webui-controlnet extension via ***uncond_multiplier*** on ***Soft Weights*** + - uncond_multiplier=0.0 gives identical results of auto1111's feature, but values between 0.0 and 1.0 can be used without issue to granularly control the setting. +- ControlNet, T2IAdapter, and ControlLoRA support for sliding context windows +- ControlLLLite support (requires model_optional to be passed into and out of Apply Advanced ControlNet node) +- SparseCtrl support +- SVD-ControlNet support + - Stable Video Diffusion ControlNets trained by **CiaraRowles**: [Depth](https://huggingface.co/CiaraRowles/temporal-controlnet-depth-svd-v1/tree/main/controlnet), [Lineart](https://huggingface.co/CiaraRowles/temporal-controlnet-lineart-svd-v1/tree/main/controlnet) +- Reference support + - Supports ```reference_attn```, ```reference_adain```, and ```refrence_adain+attn``` modes. ```style_fidelity``` and ```ref_weight``` are equivalent to style_fidelity and control_weight in Auto1111, respectively, and strength of the Apply ControlNet is the balance between ref-influenced result and no-ref result. There is also a Reference ControlNet (Finetune) node that allows adjust the style_fidelity, weight, and strength of attn and adain separately. + +## Table of Contents: +- [Scheduling Explanation](#scheduling-explanation) +- [Nodes](#nodes) +- [Usage](#usage) (will fill this out soon) + + +# Scheduling Explanation + +The two core concepts for scheduling are ***Timestep Keyframes*** and ***Latent Keyframes***. + +***Timestep Keyframes*** hold the values that guide the settings for a controlnet, and begin to take effect based on their start_percent, which corresponds to the percentage of the sampling process. They can contain masks for the strengths of each latent, control_net_weights, and latent_keyframes (specific strengths for each latent), all optional. + +***Latent Keyframes*** determine the strength of the controlnet for specific latents - all they contain is the batch_index of the latent, and the strength the controlnet should apply for that latent. As a concept, latent keyframes achieve the same affect as a uniform mask with the chosen strength value. + +![advcn_image](https://github.com/Kosinkadink/ComfyUI-Advanced-ControlNet/assets/7365912/e6275264-6c3f-4246-a319-111ee48f4cd9) + +# Nodes + +The ControlNet nodes provided here are the ***Apply Advanced ControlNet*** and ***Load Advanced ControlNet Model*** (or diff) nodes. The vanilla ControlNet nodes are also compatible, and can be used almost interchangeably - the only difference is that **at least one of these nodes must be used** for Advanced versions of ControlNets to be used (important for sliding context sampling, like with AnimateDiff-Evolved). + +Key: +- 🟩 - required inputs +- 🟨 - optional inputs +- 🟦 - start as widgets, can be converted to inputs +- 🟥 - optional input/output, but not recommended to use unless needed +- 🟪 - output + +## Apply Advanced ControlNet +![image](https://github.com/Kosinkadink/ComfyUI-Advanced-ControlNet/assets/7365912/dc541d41-70df-4a71-b832-efa65af98f06) + +Same functionality as the vanilla Apply Advanced ControlNet (Advanced) node, except with Advanced ControlNet features added to it. Automatically converts any ControlNet from ControlNet loaders into Advanced versions. + +### Inputs +- 🟩***positive***: conditioning (positive). +- 🟩***negative***: conditioning (negative). +- 🟩***control_net***: loaded controlnet; will be converted to Advanced version automatically by this node, if it's a supported type. +- 🟩***image***: images to guide controlnets - if the loaded controlnet requires it, they must preprocessed images. If one image provided, will be used for all latents. If more images provided, will use each image separately for each latent. If not enough images to meet latent count, will repeat the images from the beginning to match vanilla ControlNet functionality. +- 🟨***mask_optional***: attention masks to apply to controlnets; basically, decides what part of the image the controlnet to apply to (and the relative strength, if the mask is not binary). Same as image input, if you provide more than one mask, each can apply to a different latent. +- 🟨***timestep_kf***: timestep keyframes to guide controlnet effect throughout sampling steps. +- 🟨***latent_kf_override***: override for latent keyframes, useful if no other features from timestep keyframes is needed. *NOTE: this latent keyframe will be applied to ALL timesteps, regardless if there are other latent keyframes attached to connected timestep keyframes.* +- 🟨***weights_override***: override for weights, useful if no other features from timestep keyframes is needed. *NOTE: this weight will be applied to ALL timesteps, regardless if there are other weights attached to connected timestep keyframes.* +- 🟦***strength***: strength of controlnet; 1.0 is full strength, 0.0 is no effect at all. +- 🟦***start_percent***: sampling step percentage at which controlnet should start to be applied - no matter what start_percent is set on timestep keyframes, they won't take effect until this start_percent is reached. +- 🟦***stop_percent***: sampling step percentage at which controlnet should stop being applied - no matter what start_percent is set on timestep keyframes, they won't take effect once this end_percent is reached. + +### Outputs +- 🟪***positive***: conditioning (positive) with applied controlnets +- 🟪***negative***: conditioning (negative) with applied controlnets + +## Load Advanced ControlNet Model +![image](https://github.com/Kosinkadink/ComfyUI-Advanced-ControlNet/assets/7365912/4a7f58a9-783d-4da4-bf82-bc9c167e4722) + +Loads a ControlNet model and converts it into an Advanced version that supports all the features in this repo. When used with **Apply Advanced ControlNet** node, there is no reason to use the timestep_keyframe input on this node - use timestep_kf on the Apply node instead. + +### Inputs +- 🟥***timestep_keyframe***: optional and likely unnecessary input to have ControlNet use selected timestep_keyframes - should not be used unless you need to. Useful if this node is not attached to **Apply Advanced ControlNet** node, but still want to use Timestep Keyframe, or to use TK_SHORTCUT outputs from ControlWeights in the same scenario. Will be overriden by the timestep_kf input on **Apply Advanced ControlNet** node, if one is provided there. +- 🟨***model***: model to plug into the diff version of the node. Some controlnets are designed for receive the model; if you don't know what this does, you probably don't want tot use the diff version of the node. + +### Outputs +- 🟪***CONTROL_NET***: loaded Advanced ControlNet + +## Timestep Keyframe +![image](https://github.com/Kosinkadink/ComfyUI-Advanced-ControlNet/assets/7365912/404f3cfe-5852-4eed-935b-37e32493d1b5) + +Scheduling node across timesteps (sampling steps) based on the set start_percent. Chaining Timestep Keyframes allows ControlNet scheduling across sampling steps (percentage-wise), through a timestep keyframe schedule. + +### Inputs +- 🟨***prev_timestep_kf***: used to chain Timestep Keyframes together to create a schedule. The order does not matter - the Timestep Keyframes sort themselves automatically by their start_percent. *Any Timestep Keyframe contained in the prev_timestep_keyframe that contains the same start_percent as the Timestep Keyframe will be overwritten.* +- 🟨***cn_weights***: weights to apply to controlnet while this Timestep Keyframe is in effect. Must be compatible with the loaded controlnet, or will throw an error explaining what weight types are compatible. If inherit_missing is True, if no control_net_weight is passed in, will attempt to reuse the last-used weights in the timestep keyframe schedule. *If Apply Advanced ControlNet node has a weight_override, the weight_override will be used during sampling instead of control_net_weight.* +- 🟨***latent_keyframe***: latent keyframes to apply to controlnet while this Timestep Keyframe is in effect. If inherit_missing is True, if no latent_keyframe is passed in, will attempt to reuse the last-used weights in the timestep keyframe schedule. *If Apply Advanced ControlNet node has a latent_kf_override, the latent_lf_override will be used during sampling instead of latent_keyframe.* +- 🟨***mask_optional***: attention masks to apply to controlnets; basically, decides what part of the image the controlnet to apply to (and the relative strength, if the mask is not binary). Same as mask_optional on the Apply Advanced ControlNet node, can apply either one maks to all latents, or individual masks for each latent. If inherit_missing is True, if no mask_optional is passed in, will attempt to reuse the last-used mask_optional in the timestep keyframe schedule. It is NOT overriden by mask_optional on the Apply Advanced ControlNet node; will be used together. +- 🟦***start_percent***: sampling step percentage at which this Timestep Keyframe qualifies to be used. Acts as the 'key' for the Timestep Keyframe in the timestep keyframe schedule. +- 🟦***strength***: strength of the controlnet; multiplies the controlnet by this value, basically, applied alongside the strength on the Apply ControlNet node. If set to 0.0 will not have any effect during the duration of this Timestep Keyframe's effect, and will increase sampling speed by not doing any work. +- 🟦***null_latent_kf_strength***: strength to assign to latents that are unaccounted for in the passed in latent_keyframes. Has no effect if no latent_keyframes are passed in, or no batch_indeces are unaccounted in the latent_keyframes for during sampling. +- 🟦***inherit_missing***: determines if should reuse values from previous Timestep Keyframes for optional values (control_net_weights, latent_keyframe, and mask_option) that are not included on this TimestepKeyframe. To inherit only specific inputs, use default inputs. +- 🟦***guarantee_steps***: when 1 or greater, even if a Timestep Keyframe's start_percent ahead of this one in the schedule is closer to current sampling percentage, this Timestep Keyframe will still be used for the specified amount of steps before moving on to the next selected Timestep Keyframe in the following step. Whether the Timestep Keyframe is used or not, its inputs will still be accounted for inherit_missing purposes. + +### Outputs +- 🟪***TIMESTEP_KF***: the created Timestep Keyframe, that can either be linked to another or into a Timestep Keyframe input. + +## Timestep Keyframe Interpolation +![image](https://github.com/Kosinkadink/ComfyUI-Advanced-ControlNet/assets/7365912/9789617c-202c-4271-92a2-0909bcf9b108) + +Allows to create Timestep Keyframe with interpolated strength values in a given percent range. (The first generated keyframe will have guarantee_steps=1, rest that follow will have guarantee_steps=0). + +### Inputs +- 🟨***prev_timestep_kf***: used to chain Timestep Keyframes together to create a schedule. The order does not matter - the Timestep Keyframes sort themselves automatically by their start_percent. *Any Timestep Keyframe contained in the prev_timestep_keyframe that contains the same start_percent as the Timestep Keyframe will be overwritten.* +- 🟨***cn_weights***: weights to apply to controlnet while this Timestep Keyframe is in effect. Must be compatible with the loaded controlnet, or will throw an error explaining what weight types are compatible. If inherit_missing is True, if no control_net_weight is passed in, will attempt to reuse the last-used weights in the timestep keyframe schedule. *If Apply Advanced ControlNet node has a weight_override, the weight_override will be used during sampling instead of control_net_weight.* +- 🟨***latent_keyframe***: latent keyframes to apply to controlnet while this Timestep Keyframe is in effect. If inherit_missing is True, if no latent_keyframe is passed in, will attempt to reuse the last-used weights in the timestep keyframe schedule. *If Apply Advanced ControlNet node has a latent_kf_override, the latent_lf_override will be used during sampling instead of latent_keyframe.* +- 🟨***mask_optional***: attention masks to apply to controlnets; basically, decides what part of the image the controlnet to apply to (and the relative strength, if the mask is not binary). Same as mask_optional on the Apply Advanced ControlNet node, can apply either one maks to all latents, or individual masks for each latent. If inherit_missing is True, if no mask_optional is passed in, will attempt to reuse the last-used mask_optional in the timestep keyframe schedule. It is NOT overriden by mask_optional on the Apply Advanced ControlNet node; will be used together. +- 🟦***start_percent***: sampling step percentage at which the first generated Timestep Keyframe qualifies to be used. +- 🟦***end_percent***: sampling step percentage at which the last generated Timestep Keyframe qualifies to be used. +- 🟦***strength_start***: strength of the Timestep Keyframe at start of range. +- 🟦***strength_end***: strength of the Timestep Keyframe at end of range. +- 🟦***interpolation***: the method of interpolation. +- 🟦***intervals***: the amount of keyframes to generate in total - the first will have its start_percent equal to start_percent, the last will have its start_percent equal to end_percent. +- 🟦***null_latent_kf_strength***: strength to assign to latents that are unaccounted for in the passed in latent_keyframes. Has no effect if no latent_keyframes are passed in, or no batch_indeces are unaccounted in the latent_keyframes for during sampling. +- 🟦***inherit_missing***: determines if should reuse values from previous Timestep Keyframes for optional values (control_net_weights, latent_keyframe, and mask_option) that are not included on this TimestepKeyframe. To inherit only specific inputs, use default inputs. +- 🟦***print_keyframes***: if True, will print the Timestep Keyframes generated by this node for debugging purposes. + +### Outputs +- 🟪***TIMESTEP_KF***: the created Timestep Keyframe, that can either be linked to another or into a Timestep Keyframe input. + +## Timestep Keyframe From List +![image](https://github.com/Kosinkadink/ComfyUI-Advanced-ControlNet/assets/7365912/9e9c23bf-6f82-4ce7-b4d1-3016fd14707d) + +Allows to create Timestep Keyframe via a list of floats, such as with Batch Value Schedule from [ComfyUI_FizzNodes](https://github.com/FizzleDorf/ComfyUI_FizzNodes) nodes. (The first generated keyframe will have guarantee_steps=1, rest that follow will have guarantee_steps=0). + +### Inputs +- 🟨***prev_timestep_kf***: used to chain Timestep Keyframes together to create a schedule. The order does not matter - the Timestep Keyframes sort themselves automatically by their start_percent. *Any Timestep Keyframe contained in the prev_timestep_keyframe that contains the same start_percent as the Timestep Keyframe will be overwritten.* +- 🟨***cn_weights***: weights to apply to controlnet while this Timestep Keyframe is in effect. Must be compatible with the loaded controlnet, or will throw an error explaining what weight types are compatible. If inherit_missing is True, if no control_net_weight is passed in, will attempt to reuse the last-used weights in the timestep keyframe schedule. *If Apply Advanced ControlNet node has a weight_override, the weight_override will be used during sampling instead of control_net_weight.* +- 🟨***latent_keyframe***: latent keyframes to apply to controlnet while this Timestep Keyframe is in effect. If inherit_missing is True, if no latent_keyframe is passed in, will attempt to reuse the last-used weights in the timestep keyframe schedule. *If Apply Advanced ControlNet node has a latent_kf_override, the latent_lf_override will be used during sampling instead of latent_keyframe.* +- 🟨***mask_optional***: attention masks to apply to controlnets; basically, decides what part of the image the controlnet to apply to (and the relative strength, if the mask is not binary). Same as mask_optional on the Apply Advanced ControlNet node, can apply either one maks to all latents, or individual masks for each latent. If inherit_missing is True, if no mask_optional is passed in, will attempt to reuse the last-used mask_optional in the timestep keyframe schedule. It is NOT overriden by mask_optional on the Apply Advanced ControlNet node; will be used together. +- 🟩***float_strengths***: a list of floats, that will correspond to the strength of each Timestep Keyframe; first will be assigned to start_percent, last will be assigned to end_percent, and the rest spread linearly between. +- 🟦***start_percent***: sampling step percentage at which the first generated Timestep Keyframe qualifies to be used. +- 🟦***end_percent***: sampling step percentage at which the last generated Timestep Keyframe qualifies to be used. +- 🟦***null_latent_kf_strength***: strength to assign to latents that are unaccounted for in the passed in latent_keyframes. Has no effect if no latent_keyframes are passed in, or no batch_indeces are unaccounted in the latent_keyframes for during sampling. +- 🟦***inherit_missing***: determines if should reuse values from previous Timestep Keyframes for optional values (control_net_weights, latent_keyframe, and mask_option) that are not included on this TimestepKeyframe. To inherit only specific inputs, use default inputs. +- 🟦***print_keyframes***: if True, will print the Timestep Keyframes generated by this node for debugging purposes. + +### Outputs +- 🟪***TIMESTEP_KF***: the created Timestep Keyframe, that can either be linked to another or into a Timestep Keyframe input. + +## Latent Keyframe +![image](https://github.com/Kosinkadink/ComfyUI-Advanced-ControlNet/assets/7365912/7eb2cc4c-255c-4f32-b09b-699f713fada3) + +A singular Latent Keyframe, selects the strength for a specific batch_index. If batch_index is not present during sampling, will simply have no effect. Can be chained with any other Latent Keyframe-type node to create a latent keyframe schedule. + +### Inputs +- 🟨***prev_latent_kf***: used to chain Latent Keyframes together to create a schedule. *If a Latent Keyframe contained in prev_latent_keyframes have the same batch_index as this Latent Keyframe, they will take priority over this node's value.* +- 🟦***batch_index***: index of latent in batch to apply controlnet strength to. Acts as the 'key' for the Latent Keyframe in the latent keyframe schedule. +- 🟦***strength***: strength of controlnet to apply to the corresponding latent. + +### Outputs +- 🟪***LATENT_KF***: the created Latent Keyframe, that can either be linked to another or into a Latent Keyframe input. + +## Latent Keyframe Group +![image](https://github.com/Kosinkadink/ComfyUI-Advanced-ControlNet/assets/7365912/5ce3b795-f5fc-4dc3-ae30-a4c7f87e278c) + +Allows to create Latent Keyframes via individual indeces or python-style ranges. + +### Inputs +- 🟨***prev_latent_kf***: used to chain Latent Keyframes together to create a schedule. *If any Latent Keyframes contained in prev_latent_keyframes have the same batch_index as a this Latent Keyframe, they will take priority over this node's version.* +- 🟨***latent_optional***: the latents expected to be passed in for sampling; only required if you wish to use negative indeces (will be automatically converted to real values). +- 🟦***index_strengths***: string list of indeces or python-style ranges of indeces to assign strengths to. If latent_optional is passed in, can contain negative indeces or ranges that contain negative numbers, python-style. The different indeces must be comma separated. Individual latents can be specified by ```batch_index=strength```, like ```0=0.9```. Ranges can be specified by ```start_index_inclusive:end_index_exclusive=strength```, like ```0:8=strength```. Negative indeces are possible when latents_optional has an input, with a string such as ```0,-4=0.25```. +- 🟦***print_keyframes***: if True, will print the Latent Keyframes generated by this node for debugging purposes. + +### Outputs +- 🟪***LATENT_KF***: the created Latent Keyframe, that can either be linked to another or into a Latent Keyframe input. + +## Latent Keyframe Interpolation +![image](https://github.com/Kosinkadink/ComfyUI-Advanced-ControlNet/assets/7365912/7986c737-83b9-46bc-aab0-ae4c368df446) + +Allows to create Latent Keyframes with interpolated values in a range. + +### Inputs +- 🟨***prev_latent_kf***: used to chain Latent Keyframes together to create a schedule. *If any Latent Keyframes contained in prev_latent_keyframes have the same batch_index as a this Latent Keyframe, they will take priority over this node's version.* +- 🟦***batch_index_from***: starting batch_index of range, included. +- 🟦***batch_index_to***: end batch_index of range, excluded (python-style range). +- 🟦***strength_from***: starting strength of interpolation. +- 🟦***strength_to***: end strength of interpolation. +- 🟦***interpolation***: the method of interpolation. +- 🟦***print_keyframes***: if True, will print the Latent Keyframes generated by this node for debugging purposes. + +### Outputs +- 🟪***LATENT_KF***: the created Latent Keyframe, that can either be linked to another or into a Latent Keyframe input. + +## Latent Keyframe From List +![image](https://github.com/Kosinkadink/ComfyUI-Advanced-ControlNet/assets/7365912/6cec701f-6183-4aeb-af5c-cac76f5591b7) + +Allows to create Latent Keyframes via a list of floats, such as with Batch Value Schedule from [ComfyUI_FizzNodes](https://github.com/FizzleDorf/ComfyUI_FizzNodes) nodes. + +### Inputs +- 🟨***prev_latent_kf***: used to chain Latent Keyframes together to create a schedule. *If any Latent Keyframes contained in prev_latent_keyframes have the same batch_index as a this Latent Keyframe, they will take priority over this node's version.* +- 🟩***float_strengths***: a list of floats, that will correspond to the strength of each Latent Keyframe; the batch_index is the index of each float value in the list. +- 🟦***print_keyframes***: if True, will print the Latent Keyframes generated by this node for debugging purposes. + +### Outputs +- 🟪***LATENT_KF***: the created Latent Keyframe, that can either be linked to another or into a Latent Keyframe input. + +# There are more nodes to document and show usage - will add this soon! TODO diff --git a/ComfyUI-Advanced-ControlNet/__init__.py b/ComfyUI-Advanced-ControlNet/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..d30f8238dd16c33ca38498c7c1fa4e6ab45b0709 --- /dev/null +++ b/ComfyUI-Advanced-ControlNet/__init__.py @@ -0,0 +1,3 @@ +from .adv_control.nodes import NODE_CLASS_MAPPINGS, NODE_DISPLAY_NAME_MAPPINGS + +__all__ = ['NODE_CLASS_MAPPINGS', 'NODE_DISPLAY_NAME_MAPPINGS'] diff --git a/ComfyUI-Advanced-ControlNet/adv_control/control.py b/ComfyUI-Advanced-ControlNet/adv_control/control.py new file mode 100644 index 0000000000000000000000000000000000000000..bea5aa7e8a5bb1753308158e6a0207a90078bde5 --- /dev/null +++ b/ComfyUI-Advanced-ControlNet/adv_control/control.py @@ -0,0 +1,860 @@ +from typing import Callable, Union +from torch import Tensor +import torch +import os + +import comfy.utils +import comfy.model_management +import comfy.model_detection +import comfy.controlnet as comfy_cn +from comfy.controlnet import ControlBase, ControlNet, ControlLora, T2IAdapter, broadcast_image_to +from comfy.model_patcher import ModelPatcher + +from .control_sparsectrl import SparseModelPatcher, SparseControlNet, SparseCtrlMotionWrapper, SparseMethod, SparseSettings, SparseSpreadMethod, PreprocSparseRGBWrapper +from .control_lllite import LLLiteModule, LLLitePatch +from .control_svd import svd_unet_config_from_diffusers_unet, SVDControlNet, svd_unet_to_diffusers +from .utils import (AdvancedControlBase, TimestepKeyframeGroup, LatentKeyframeGroup, ControlWeightType, ControlWeights, WeightTypeException, + manual_cast_clean_groupnorm, disable_weight_init_clean_groupnorm, prepare_mask_batch, get_properly_arranged_t2i_weights, load_torch_file_with_dict_factory) +from .logger import logger + + +class ControlNetAdvanced(ControlNet, AdvancedControlBase): + def __init__(self, control_model, timestep_keyframes: TimestepKeyframeGroup, global_average_pooling=False, device=None, load_device=None, manual_cast_dtype=None): + super().__init__(control_model=control_model, global_average_pooling=global_average_pooling, device=device, load_device=load_device, manual_cast_dtype=manual_cast_dtype) + AdvancedControlBase.__init__(self, super(), timestep_keyframes=timestep_keyframes, weights_default=ControlWeights.controlnet()) + + def get_universal_weights(self) -> ControlWeights: + raw_weights = [(self.weights.base_multiplier ** float(12 - i)) for i in range(13)] + return self.weights.copy_with_new_weights(raw_weights) + + def get_control_advanced(self, x_noisy, t, cond, batched_number): + # perform special version of get_control that supports sliding context and masks + return self.sliding_get_control(x_noisy, t, cond, batched_number) + + def sliding_get_control(self, x_noisy: Tensor, t, cond, batched_number): + control_prev = None + if self.previous_controlnet is not None: + control_prev = self.previous_controlnet.get_control(x_noisy, t, cond, batched_number) + + if self.timestep_range is not None: + if t[0] > self.timestep_range[0] or t[0] < self.timestep_range[1]: + if control_prev is not None: + return control_prev + else: + return None + + dtype = self.control_model.dtype + if self.manual_cast_dtype is not None: + dtype = self.manual_cast_dtype + + output_dtype = x_noisy.dtype + # make cond_hint appropriate dimensions + # TODO: change this to not require cond_hint upscaling every step when self.sub_idxs are present + if self.sub_idxs is not None or self.cond_hint is None or x_noisy.shape[2] * 8 != self.cond_hint.shape[2] or x_noisy.shape[3] * 8 != self.cond_hint.shape[3]: + if self.cond_hint is not None: + del self.cond_hint + self.cond_hint = None + # if self.cond_hint_original length greater or equal to real latent count, subdivide it before scaling + if self.sub_idxs is not None and self.cond_hint_original.size(0) >= self.full_latent_length: + self.cond_hint = comfy.utils.common_upscale(self.cond_hint_original[self.sub_idxs], x_noisy.shape[3] * 8, x_noisy.shape[2] * 8, 'nearest-exact', "center").to(dtype).to(self.device) + else: + self.cond_hint = comfy.utils.common_upscale(self.cond_hint_original, x_noisy.shape[3] * 8, x_noisy.shape[2] * 8, 'nearest-exact', "center").to(dtype).to(self.device) + if x_noisy.shape[0] != self.cond_hint.shape[0]: + self.cond_hint = broadcast_image_to(self.cond_hint, x_noisy.shape[0], batched_number) + + # prepare mask_cond_hint + self.prepare_mask_cond_hint(x_noisy=x_noisy, t=t, cond=cond, batched_number=batched_number, dtype=dtype) + + context = cond.get('crossattn_controlnet', cond['c_crossattn']) + # uses 'y' in new ComfyUI update + y = cond.get('y', None) + if y is None: # TODO: remove this in the future since no longer used by newest ComfyUI + y = cond.get('c_adm', None) + if y is not None: + y = y.to(dtype) + timestep = self.model_sampling_current.timestep(t) + x_noisy = self.model_sampling_current.calculate_input(t, x_noisy) + + control = self.control_model(x=x_noisy.to(dtype), hint=self.cond_hint, timesteps=timestep.float(), context=context.to(dtype), y=y) + return self.control_merge(None, control, control_prev, output_dtype) + + def copy(self): + c = ControlNetAdvanced(self.control_model, self.timestep_keyframes, global_average_pooling=self.global_average_pooling, load_device=self.load_device, manual_cast_dtype=self.manual_cast_dtype) + self.copy_to(c) + self.copy_to_advanced(c) + return c + + @staticmethod + def from_vanilla(v: ControlNet, timestep_keyframe: TimestepKeyframeGroup=None) -> 'ControlNetAdvanced': + return ControlNetAdvanced(control_model=v.control_model, timestep_keyframes=timestep_keyframe, + global_average_pooling=v.global_average_pooling, device=v.device, load_device=v.load_device, manual_cast_dtype=v.manual_cast_dtype) + + +class T2IAdapterAdvanced(T2IAdapter, AdvancedControlBase): + def __init__(self, t2i_model, timestep_keyframes: TimestepKeyframeGroup, channels_in, compression_ratio=8, upscale_algorithm="nearest_exact", device=None): + super().__init__(t2i_model=t2i_model, channels_in=channels_in, compression_ratio=compression_ratio, upscale_algorithm=upscale_algorithm, device=device) + AdvancedControlBase.__init__(self, super(), timestep_keyframes=timestep_keyframes, weights_default=ControlWeights.t2iadapter()) + + def control_merge_inject(self, control_input, control_output, control_prev, output_dtype): + # if has uncond multiplier, need to make sure control shapes are the same batch size as expected + if self.weights.has_uncond_multiplier: + if control_input is not None: + for i in range(len(control_input)): + x = control_input[i] + if x is not None: + if x.size(0) < self.batch_size: + control_input[i] = x.repeat(self.batched_number, 1, 1, 1)[:self.batch_size] + if control_output is not None: + for i in range(len(control_output)): + x = control_output[i] + if x is not None: + if x.size(0) < self.batch_size: + control_output[i] = x.repeat(self.batched_number, 1, 1, 1)[:self.batch_size] + return AdvancedControlBase.control_merge_inject(self, control_input, control_output, control_prev, output_dtype) + + def get_universal_weights(self) -> ControlWeights: + raw_weights = [(self.weights.base_multiplier ** float(7 - i)) for i in range(8)] + raw_weights = [raw_weights[-8], raw_weights[-3], raw_weights[-2], raw_weights[-1]] + raw_weights = get_properly_arranged_t2i_weights(raw_weights) + return self.weights.copy_with_new_weights(raw_weights) + + def get_calc_pow(self, idx: int, layers: int) -> int: + # match how T2IAdapterAdvanced deals with universal weights + indeces = [7 - i for i in range(8)] + indeces = [indeces[-8], indeces[-3], indeces[-2], indeces[-1]] + indeces = get_properly_arranged_t2i_weights(indeces) + return indeces[idx] + + def get_control_advanced(self, x_noisy, t, cond, batched_number): + try: + # if sub indexes present, replace original hint with subsection + if self.sub_idxs is not None: + # cond hints + full_cond_hint_original = self.cond_hint_original + del self.cond_hint + self.cond_hint = None + self.cond_hint_original = full_cond_hint_original[self.sub_idxs] + # mask hints + self.prepare_mask_cond_hint(x_noisy=x_noisy, t=t, cond=cond, batched_number=batched_number) + return super().get_control(x_noisy, t, cond, batched_number) + finally: + if self.sub_idxs is not None: + # replace original cond hint + self.cond_hint_original = full_cond_hint_original + del full_cond_hint_original + + def copy(self): + c = T2IAdapterAdvanced(self.t2i_model, self.timestep_keyframes, self.channels_in, self.compression_ratio, self.upscale_algorithm) + self.copy_to(c) + self.copy_to_advanced(c) + return c + + def cleanup(self): + super().cleanup() + self.cleanup_advanced() + + @staticmethod + def from_vanilla(v: T2IAdapter, timestep_keyframe: TimestepKeyframeGroup=None) -> 'T2IAdapterAdvanced': + return T2IAdapterAdvanced(t2i_model=v.t2i_model, timestep_keyframes=timestep_keyframe, channels_in=v.channels_in, + compression_ratio=v.compression_ratio, upscale_algorithm=v.upscale_algorithm, device=v.device) + + +class ControlLoraAdvanced(ControlLora, AdvancedControlBase): + def __init__(self, control_weights, timestep_keyframes: TimestepKeyframeGroup, global_average_pooling=False, device=None): + super().__init__(control_weights=control_weights, global_average_pooling=global_average_pooling, device=device) + AdvancedControlBase.__init__(self, super(), timestep_keyframes=timestep_keyframes, weights_default=ControlWeights.controllora()) + # use some functions from ControlNetAdvanced + self.get_control_advanced = ControlNetAdvanced.get_control_advanced.__get__(self, type(self)) + self.sliding_get_control = ControlNetAdvanced.sliding_get_control.__get__(self, type(self)) + + def get_universal_weights(self) -> ControlWeights: + raw_weights = [(self.weights.base_multiplier ** float(9 - i)) for i in range(10)] + return self.weights.copy_with_new_weights(raw_weights) + + def copy(self): + c = ControlLoraAdvanced(self.control_weights, self.timestep_keyframes, global_average_pooling=self.global_average_pooling) + self.copy_to(c) + self.copy_to_advanced(c) + return c + + def cleanup(self): + super().cleanup() + self.cleanup_advanced() + + @staticmethod + def from_vanilla(v: ControlLora, timestep_keyframe: TimestepKeyframeGroup=None) -> 'ControlLoraAdvanced': + return ControlLoraAdvanced(control_weights=v.control_weights, timestep_keyframes=timestep_keyframe, + global_average_pooling=v.global_average_pooling, device=v.device) + + +class SVDControlNetAdvanced(ControlNetAdvanced): + def __init__(self, control_model: SVDControlNet, timestep_keyframes: TimestepKeyframeGroup, global_average_pooling=False, device=None, load_device=None, manual_cast_dtype=None): + super().__init__(control_model=control_model, timestep_keyframes=timestep_keyframes, global_average_pooling=global_average_pooling, device=device, load_device=load_device, manual_cast_dtype=manual_cast_dtype) + + def set_cond_hint(self, *args, **kwargs): + to_return = super().set_cond_hint(*args, **kwargs) + # cond hint for SVD-ControlNet needs to be scaled between (-1, 1) instead of (0, 1) + self.cond_hint_original = self.cond_hint_original * 2.0 - 1.0 + return to_return + + def get_control_advanced(self, x_noisy, t, cond, batched_number): + control_prev = None + if self.previous_controlnet is not None: + control_prev = self.previous_controlnet.get_control(x_noisy, t, cond, batched_number) + + if self.timestep_range is not None: + if t[0] > self.timestep_range[0] or t[0] < self.timestep_range[1]: + if control_prev is not None: + return control_prev + else: + return None + + dtype = self.control_model.dtype + if self.manual_cast_dtype is not None: + dtype = self.manual_cast_dtype + + output_dtype = x_noisy.dtype + # make cond_hint appropriate dimensions + # TODO: change this to not require cond_hint upscaling every step when self.sub_idxs are present + if self.sub_idxs is not None or self.cond_hint is None or x_noisy.shape[2] * 8 != self.cond_hint.shape[2] or x_noisy.shape[3] * 8 != self.cond_hint.shape[3]: + if self.cond_hint is not None: + del self.cond_hint + self.cond_hint = None + # if self.cond_hint_original length greater or equal to real latent count, subdivide it before scaling + if self.sub_idxs is not None and self.cond_hint_original.size(0) >= self.full_latent_length: + self.cond_hint = comfy.utils.common_upscale(self.cond_hint_original[self.sub_idxs], x_noisy.shape[3] * 8, x_noisy.shape[2] * 8, 'nearest-exact', "center").to(dtype).to(self.device) + else: + self.cond_hint = comfy.utils.common_upscale(self.cond_hint_original, x_noisy.shape[3] * 8, x_noisy.shape[2] * 8, 'nearest-exact', "center").to(dtype).to(self.device) + if x_noisy.shape[0] != self.cond_hint.shape[0]: + self.cond_hint = broadcast_image_to(self.cond_hint, x_noisy.shape[0], batched_number) + + # prepare mask_cond_hint + self.prepare_mask_cond_hint(x_noisy=x_noisy, t=t, cond=cond, batched_number=batched_number, dtype=dtype) + + context = cond.get('crossattn_controlnet', cond['c_crossattn']) + # uses 'y' in new ComfyUI update + y = cond.get('y', None) + if y is not None: + y = y.to(dtype) + timestep = self.model_sampling_current.timestep(t) + x_noisy = self.model_sampling_current.calculate_input(t, x_noisy) + # concat c_concat if exists (should exist for SVD), doubling channels to 8 + if cond.get('c_concat', None) is not None: + x_noisy = torch.cat([x_noisy] + [cond['c_concat']], dim=1) + + control = self.control_model(x=x_noisy.to(dtype), hint=self.cond_hint, timesteps=timestep.float(), context=context.to(dtype), y=y, cond=cond) + return self.control_merge(None, control, control_prev, output_dtype) + + def copy(self): + c = SVDControlNetAdvanced(self.control_model, self.timestep_keyframes, global_average_pooling=self.global_average_pooling, load_device=self.load_device, manual_cast_dtype=self.manual_cast_dtype) + self.copy_to(c) + self.copy_to_advanced(c) + return c + + +class SparseCtrlAdvanced(ControlNetAdvanced): + def __init__(self, control_model, timestep_keyframes: TimestepKeyframeGroup, sparse_settings: SparseSettings=None, global_average_pooling=False, device=None, load_device=None, manual_cast_dtype=None): + super().__init__(control_model=control_model, timestep_keyframes=timestep_keyframes, global_average_pooling=global_average_pooling, device=device, load_device=load_device, manual_cast_dtype=manual_cast_dtype) + self.control_model_wrapped = SparseModelPatcher(self.control_model, load_device=load_device, offload_device=comfy.model_management.unet_offload_device()) + self.add_compatible_weight(ControlWeightType.SPARSECTRL) + self.control_model: SparseControlNet = self.control_model # does nothing except help with IDE hints + self.sparse_settings = sparse_settings if sparse_settings is not None else SparseSettings.default() + self.latent_format = None + self.preprocessed = False + + def get_control_advanced(self, x_noisy: Tensor, t, cond, batched_number: int): + # normal ControlNet stuff + control_prev = None + if self.previous_controlnet is not None: + control_prev = self.previous_controlnet.get_control(x_noisy, t, cond, batched_number) + + if self.timestep_range is not None: + if t[0] > self.timestep_range[0] or t[0] < self.timestep_range[1]: + if control_prev is not None: + return control_prev + else: + return None + + dtype = self.control_model.dtype + if self.manual_cast_dtype is not None: + dtype = self.manual_cast_dtype + output_dtype = x_noisy.dtype + # set actual input length on motion model + actual_length = x_noisy.size(0)//batched_number + full_length = actual_length if self.sub_idxs is None else self.full_latent_length + self.control_model.set_actual_length(actual_length=actual_length, full_length=full_length) + # prepare cond_hint, if needed + dim_mult = 1 if self.control_model.use_simplified_conditioning_embedding else 8 + if self.sub_idxs is not None or self.cond_hint is None or x_noisy.shape[2]*dim_mult != self.cond_hint.shape[2] or x_noisy.shape[3]*dim_mult != self.cond_hint.shape[3]: + # clear out cond_hint and conditioning_mask + if self.cond_hint is not None: + del self.cond_hint + self.cond_hint = None + # first, figure out which cond idxs are relevant, and where they fit in + cond_idxs = self.sparse_settings.sparse_method.get_indexes(hint_length=self.cond_hint_original.size(0), full_length=full_length) + + range_idxs = list(range(full_length)) if self.sub_idxs is None else self.sub_idxs + hint_idxs = [] # idxs in cond_idxs + local_idxs = [] # idx to pun in final cond_hint + for i,cond_idx in enumerate(cond_idxs): + if cond_idx in range_idxs: + hint_idxs.append(i) + local_idxs.append(range_idxs.index(cond_idx)) + # sub_cond_hint now contains the hints relevant to current x_noisy + sub_cond_hint = self.cond_hint_original[hint_idxs].to(dtype).to(self.device) + + # scale cond_hints to match noisy input + if self.control_model.use_simplified_conditioning_embedding: + # RGB SparseCtrl; the inputs are latents - use bilinear to avoid blocky artifacts + sub_cond_hint = self.latent_format.process_in(sub_cond_hint) # multiplies by model scale factor + sub_cond_hint = comfy.utils.common_upscale(sub_cond_hint, x_noisy.shape[3], x_noisy.shape[2], "nearest-exact", "center").to(dtype).to(self.device) + else: + # other SparseCtrl; inputs are typical images + sub_cond_hint = comfy.utils.common_upscale(sub_cond_hint, x_noisy.shape[3] * 8, x_noisy.shape[2] * 8, 'nearest-exact', "center").to(dtype).to(self.device) + # prepare cond_hint (b, c, h ,w) + cond_shape = list(sub_cond_hint.shape) + cond_shape[0] = len(range_idxs) + self.cond_hint = torch.zeros(cond_shape).to(dtype).to(self.device) + self.cond_hint[local_idxs] = sub_cond_hint[:] + # prepare cond_mask (b, 1, h, w) + cond_shape[1] = 1 + cond_mask = torch.zeros(cond_shape).to(dtype).to(self.device) + cond_mask[local_idxs] = 1.0 + # combine cond_hint and cond_mask into (b, c+1, h, w) + if not self.sparse_settings.merged: + self.cond_hint = torch.cat([self.cond_hint, cond_mask], dim=1) + del sub_cond_hint + del cond_mask + # make cond_hint match x_noisy batch + if x_noisy.shape[0] != self.cond_hint.shape[0]: + self.cond_hint = broadcast_image_to(self.cond_hint, x_noisy.shape[0], batched_number) + + # prepare mask_cond_hint + self.prepare_mask_cond_hint(x_noisy=x_noisy, t=t, cond=cond, batched_number=batched_number, dtype=dtype) + + context = cond['c_crossattn'] + y = cond.get('y', None) + if y is not None: + y = y.to(dtype) + timestep = self.model_sampling_current.timestep(t) + x_noisy = self.model_sampling_current.calculate_input(t, x_noisy) + + control = self.control_model(x=x_noisy.to(dtype), hint=self.cond_hint, timesteps=timestep.float(), context=context.to(dtype), y=y) + return self.control_merge(None, control, control_prev, output_dtype) + + def pre_run_advanced(self, model, percent_to_timestep_function): + super().pre_run_advanced(model, percent_to_timestep_function) + if type(self.cond_hint_original) == PreprocSparseRGBWrapper: + if not self.control_model.use_simplified_conditioning_embedding: + raise ValueError("Any model besides RGB SparseCtrl should NOT have its images go through the RGB SparseCtrl preprocessor.") + self.cond_hint_original = self.cond_hint_original.condhint + self.latent_format = model.latent_format # LatentFormat object, used to process_in latent cond hint + if self.control_model.motion_wrapper is not None: + self.control_model.motion_wrapper.reset() + self.control_model.motion_wrapper.set_strength(self.sparse_settings.motion_strength) + self.control_model.motion_wrapper.set_scale_multiplier(self.sparse_settings.motion_scale) + + def cleanup_advanced(self): + super().cleanup_advanced() + if self.latent_format is not None: + del self.latent_format + self.latent_format = None + + def copy(self): + c = SparseCtrlAdvanced(self.control_model, self.timestep_keyframes, self.sparse_settings, self.global_average_pooling, self.device, self.load_device, self.manual_cast_dtype) + self.copy_to(c) + self.copy_to_advanced(c) + return c + + +class ControlLLLiteAdvanced(ControlBase, AdvancedControlBase): + # This ControlNet is more of an attention patch than a traditional controlnet + def __init__(self, patch_attn1: LLLitePatch, patch_attn2: LLLitePatch, timestep_keyframes: TimestepKeyframeGroup, device=None): + super().__init__(device) + AdvancedControlBase.__init__(self, super(), timestep_keyframes=timestep_keyframes, weights_default=ControlWeights.controllllite(), require_model=True) + self.patch_attn1 = patch_attn1.set_control(self) + self.patch_attn2 = patch_attn2.set_control(self) + self.latent_dims_div2 = None + self.latent_dims_div4 = None + + def patch_model(self, model: ModelPatcher): + model.set_model_attn1_patch(self.patch_attn1) + model.set_model_attn2_patch(self.patch_attn2) + + def set_cond_hint(self, *args, **kwargs): + to_return = super().set_cond_hint(*args, **kwargs) + # cond hint for LLLite needs to be scaled between (-1, 1) instead of (0, 1) + self.cond_hint_original = self.cond_hint_original * 2.0 - 1.0 + return to_return + + def pre_run_advanced(self, *args, **kwargs): + AdvancedControlBase.pre_run_advanced(self, *args, **kwargs) + #logger.error(f"in cn: {id(self.patch_attn1)},{id(self.patch_attn2)}") + self.patch_attn1.set_control(self) + self.patch_attn2.set_control(self) + #logger.warn(f"in pre_run_advanced: {id(self)}") + + def get_control_advanced(self, x_noisy: Tensor, t, cond, batched_number: int): + # normal ControlNet stuff + control_prev = None + if self.previous_controlnet is not None: + control_prev = self.previous_controlnet.get_control(x_noisy, t, cond, batched_number) + + if self.timestep_range is not None: + if t[0] > self.timestep_range[0] or t[0] < self.timestep_range[1]: + return control_prev + + dtype = x_noisy.dtype + # prepare cond_hint + if self.sub_idxs is not None or self.cond_hint is None or x_noisy.shape[2] * 8 != self.cond_hint.shape[2] or x_noisy.shape[3] * 8 != self.cond_hint.shape[3]: + if self.cond_hint is not None: + del self.cond_hint + self.cond_hint = None + # if self.cond_hint_original length greater or equal to real latent count, subdivide it before scaling + if self.sub_idxs is not None and self.cond_hint_original.size(0) >= self.full_latent_length: + self.cond_hint = comfy.utils.common_upscale(self.cond_hint_original[self.sub_idxs], x_noisy.shape[3] * 8, x_noisy.shape[2] * 8, 'nearest-exact', "center").to(dtype).to(self.device) + else: + self.cond_hint = comfy.utils.common_upscale(self.cond_hint_original, x_noisy.shape[3] * 8, x_noisy.shape[2] * 8, 'nearest-exact', "center").to(dtype).to(self.device) + if x_noisy.shape[0] != self.cond_hint.shape[0]: + self.cond_hint = broadcast_image_to(self.cond_hint, x_noisy.shape[0], batched_number) + # some special logic here compared to other controlnets: + # * The cond_emb in attn patches will divide latent dims by 2 or 4, integer + # * Due to this loss, the cond_emb will become smaller than x input if latent dims are not divisble by 2 or 4 + divisible_by_2_h = x_noisy.shape[2]%2==0 + divisible_by_2_w = x_noisy.shape[3]%2==0 + if not (divisible_by_2_h and divisible_by_2_w): + #logger.warn(f"{x_noisy.shape} not divisible by 2!") + new_h = (x_noisy.shape[2]//2)*2 + new_w = (x_noisy.shape[3]//2)*2 + if not divisible_by_2_h: + new_h += 2 + if not divisible_by_2_w: + new_w += 2 + self.latent_dims_div2 = (new_h, new_w) + divisible_by_4_h = x_noisy.shape[2]%4==0 + divisible_by_4_w = x_noisy.shape[3]%4==0 + if not (divisible_by_4_h and divisible_by_4_w): + #logger.warn(f"{x_noisy.shape} not divisible by 4!") + new_h = (x_noisy.shape[2]//4)*4 + new_w = (x_noisy.shape[3]//4)*4 + if not divisible_by_4_h: + new_h += 4 + if not divisible_by_4_w: + new_w += 4 + self.latent_dims_div4 = (new_h, new_w) + # prepare mask + self.prepare_mask_cond_hint(x_noisy=x_noisy, t=t, cond=cond, batched_number=batched_number) + # done preparing; model patches will take care of everything now. + # return normal controlnet stuff + return control_prev + + def cleanup_advanced(self): + super().cleanup_advanced() + self.patch_attn1.cleanup() + self.patch_attn2.cleanup() + self.latent_dims_div2 = None + self.latent_dims_div4 = None + + def copy(self): + c = ControlLLLiteAdvanced(self.patch_attn1, self.patch_attn2, self.timestep_keyframes) + self.copy_to(c) + self.copy_to_advanced(c) + return c + + # deepcopy needs to properly keep track of objects to work between model.clone calls! + # def __deepcopy__(self, *args, **kwargs): + # self.cleanup_advanced() + # return self + + # def get_models(self): + # # get_models is called once at the start of every KSampler run - use to reset already_patched status + # out = super().get_models() + # logger.error(f"in get_models! {id(self)}") + # return out + + +def load_controlnet(ckpt_path, timestep_keyframe: TimestepKeyframeGroup=None, model=None): + controlnet_data = comfy.utils.load_torch_file(ckpt_path, safe_load=True) + control = None + # check if a non-vanilla ControlNet + controlnet_type = ControlWeightType.DEFAULT + has_controlnet_key = False + has_motion_modules_key = False + has_temporal_res_block_key = False + for key in controlnet_data: + # LLLite check + if "lllite" in key: + controlnet_type = ControlWeightType.CONTROLLLLITE + break + # SparseCtrl check + elif "motion_modules" in key: + has_motion_modules_key = True + elif "controlnet" in key: + has_controlnet_key = True + # SVD-ControlNet check + elif "temporal_res_block" in key: + has_temporal_res_block_key = True + if has_controlnet_key and has_motion_modules_key: + controlnet_type = ControlWeightType.SPARSECTRL + elif has_controlnet_key and has_temporal_res_block_key: + controlnet_type = ControlWeightType.SVD_CONTROLNET + + if controlnet_type != ControlWeightType.DEFAULT: + if controlnet_type == ControlWeightType.CONTROLLLLITE: + control = load_controllllite(ckpt_path, controlnet_data=controlnet_data, timestep_keyframe=timestep_keyframe) + elif controlnet_type == ControlWeightType.SPARSECTRL: + control = load_sparsectrl(ckpt_path, controlnet_data=controlnet_data, timestep_keyframe=timestep_keyframe, model=model) + elif controlnet_type == ControlWeightType.SVD_CONTROLNET: + control = load_svdcontrolnet(ckpt_path, controlnet_data=controlnet_data, timestep_keyframe=timestep_keyframe) + #raise Exception(f"SVD-ControlNet is not supported yet!") + #control = comfy_cn.load_controlnet(ckpt_path, model=model) + # otherwise, load vanilla ControlNet + else: + try: + # hacky way of getting load_torch_file in load_controlnet to use already-present controlnet_data and not redo loading + orig_load_torch_file = comfy.utils.load_torch_file + comfy.utils.load_torch_file = load_torch_file_with_dict_factory(controlnet_data, orig_load_torch_file) + control = comfy_cn.load_controlnet(ckpt_path, model=model) + finally: + comfy.utils.load_torch_file = orig_load_torch_file + return convert_to_advanced(control, timestep_keyframe=timestep_keyframe) + + +def convert_to_advanced(control, timestep_keyframe: TimestepKeyframeGroup=None): + # if already advanced, leave it be + if is_advanced_controlnet(control): + return control + # if exactly ControlNet returned, transform it into ControlNetAdvanced + if type(control) == ControlNet: + return ControlNetAdvanced.from_vanilla(v=control, timestep_keyframe=timestep_keyframe) + # if exactly ControlLora returned, transform it into ControlLoraAdvanced + elif type(control) == ControlLora: + return ControlLoraAdvanced.from_vanilla(v=control, timestep_keyframe=timestep_keyframe) + # if T2IAdapter returned, transform it into T2IAdapterAdvanced + elif isinstance(control, T2IAdapter): + return T2IAdapterAdvanced.from_vanilla(v=control, timestep_keyframe=timestep_keyframe) + # otherwise, leave it be - might be something I am not supporting yet + return control + + +def is_advanced_controlnet(input_object): + return hasattr(input_object, "sub_idxs") + + +def load_sparsectrl(ckpt_path: str, controlnet_data: dict[str, Tensor]=None, timestep_keyframe: TimestepKeyframeGroup=None, sparse_settings=SparseSettings.default(), model=None) -> SparseCtrlAdvanced: + if controlnet_data is None: + controlnet_data = comfy.utils.load_torch_file(ckpt_path, safe_load=True) + # first, separate out motion part from normal controlnet part and attempt to load that portion + motion_data = {} + for key in list(controlnet_data.keys()): + if "temporal" in key: + motion_data[key] = controlnet_data.pop(key) + if len(motion_data) == 0: + raise ValueError(f"No motion-related keys in '{ckpt_path}'; not a valid SparseCtrl model!") + motion_wrapper: SparseCtrlMotionWrapper = SparseCtrlMotionWrapper(motion_data).to(comfy.model_management.unet_dtype()) + missing, unexpected = motion_wrapper.load_state_dict(motion_data) + if len(missing) > 0 or len(unexpected) > 0: + logger.info(f"SparseCtrlMotionWrapper: {missing}, {unexpected}") + + # now, load as if it was a normal controlnet - mostly copied from comfy load_controlnet function + controlnet_config = None + is_diffusers = False + use_simplified_conditioning_embedding = False + if "controlnet_cond_embedding.conv_in.weight" in controlnet_data: + is_diffusers = True + if "controlnet_cond_embedding.weight" in controlnet_data: + is_diffusers = True + use_simplified_conditioning_embedding = True + if is_diffusers: #diffusers format + unet_dtype = comfy.model_management.unet_dtype() + controlnet_config = comfy.model_detection.unet_config_from_diffusers_unet(controlnet_data, unet_dtype) + diffusers_keys = comfy.utils.unet_to_diffusers(controlnet_config) + diffusers_keys["controlnet_mid_block.weight"] = "middle_block_out.0.weight" + diffusers_keys["controlnet_mid_block.bias"] = "middle_block_out.0.bias" + + count = 0 + loop = True + while loop: + suffix = [".weight", ".bias"] + for s in suffix: + k_in = "controlnet_down_blocks.{}{}".format(count, s) + k_out = "zero_convs.{}.0{}".format(count, s) + if k_in not in controlnet_data: + loop = False + break + diffusers_keys[k_in] = k_out + count += 1 + # normal conditioning embedding + if not use_simplified_conditioning_embedding: + count = 0 + loop = True + while loop: + suffix = [".weight", ".bias"] + for s in suffix: + if count == 0: + k_in = "controlnet_cond_embedding.conv_in{}".format(s) + else: + k_in = "controlnet_cond_embedding.blocks.{}{}".format(count - 1, s) + k_out = "input_hint_block.{}{}".format(count * 2, s) + if k_in not in controlnet_data: + k_in = "controlnet_cond_embedding.conv_out{}".format(s) + loop = False + diffusers_keys[k_in] = k_out + count += 1 + # simplified conditioning embedding + else: + count = 0 + suffix = [".weight", ".bias"] + for s in suffix: + k_in = "controlnet_cond_embedding{}".format(s) + k_out = "input_hint_block.{}{}".format(count, s) + diffusers_keys[k_in] = k_out + + new_sd = {} + for k in diffusers_keys: + if k in controlnet_data: + new_sd[diffusers_keys[k]] = controlnet_data.pop(k) + + leftover_keys = controlnet_data.keys() + if len(leftover_keys) > 0: + logger.info("leftover keys:", leftover_keys) + controlnet_data = new_sd + + pth_key = 'control_model.zero_convs.0.0.weight' + pth = False + key = 'zero_convs.0.0.weight' + if pth_key in controlnet_data: + pth = True + key = pth_key + prefix = "control_model." + elif key in controlnet_data: + prefix = "" + else: + raise ValueError("The provided model is not a valid SparseCtrl model! [ErrorCode: HORSERADISH]") + + if controlnet_config is None: + unet_dtype = comfy.model_management.unet_dtype() + controlnet_config = comfy.model_detection.model_config_from_unet(controlnet_data, prefix, unet_dtype, True).unet_config + load_device = comfy.model_management.get_torch_device() + manual_cast_dtype = comfy.model_management.unet_manual_cast(unet_dtype, load_device) + if manual_cast_dtype is not None: + controlnet_config["operations"] = manual_cast_clean_groupnorm + else: + controlnet_config["operations"] = disable_weight_init_clean_groupnorm + controlnet_config.pop("out_channels") + # get proper hint channels + if use_simplified_conditioning_embedding: + controlnet_config["hint_channels"] = controlnet_data["{}input_hint_block.0.weight".format(prefix)].shape[1] + controlnet_config["use_simplified_conditioning_embedding"] = use_simplified_conditioning_embedding + else: + controlnet_config["hint_channels"] = controlnet_data["{}input_hint_block.0.weight".format(prefix)].shape[1] + controlnet_config["use_simplified_conditioning_embedding"] = use_simplified_conditioning_embedding + control_model = SparseControlNet(**controlnet_config) + + if pth: + if 'difference' in controlnet_data: + if model is not None: + comfy.model_management.load_models_gpu([model]) + model_sd = model.model_state_dict() + for x in controlnet_data: + c_m = "control_model." + if x.startswith(c_m): + sd_key = "diffusion_model.{}".format(x[len(c_m):]) + if sd_key in model_sd: + cd = controlnet_data[x] + cd += model_sd[sd_key].type(cd.dtype).to(cd.device) + else: + logger.warning("WARNING: Loaded a diff SparseCtrl without a model. It will very likely not work.") + + class WeightsLoader(torch.nn.Module): + pass + w = WeightsLoader() + w.control_model = control_model + missing, unexpected = w.load_state_dict(controlnet_data, strict=False) + else: + missing, unexpected = control_model.load_state_dict(controlnet_data, strict=False) + if len(missing) > 0 or len(unexpected) > 0: + logger.info(f"SparseCtrl ControlNet: {missing}, {unexpected}") + + global_average_pooling = False + filename = os.path.splitext(ckpt_path)[0] + if filename.endswith("_shuffle") or filename.endswith("_shuffle_fp16"): #TODO: smarter way of enabling global_average_pooling + global_average_pooling = True + + # both motion portion and controlnet portions are loaded; bring them together if using motion model + if sparse_settings.use_motion: + motion_wrapper.inject(control_model) + + control = SparseCtrlAdvanced(control_model, timestep_keyframes=timestep_keyframe, sparse_settings=sparse_settings, global_average_pooling=global_average_pooling, load_device=load_device, manual_cast_dtype=manual_cast_dtype) + return control + + +def load_controllllite(ckpt_path: str, controlnet_data: dict[str, Tensor]=None, timestep_keyframe: TimestepKeyframeGroup=None): + if controlnet_data is None: + controlnet_data = comfy.utils.load_torch_file(ckpt_path, safe_load=True) + # adapted from https://github.com/kohya-ss/ControlNet-LLLite-ComfyUI + # first, split weights for each module + module_weights = {} + for key, value in controlnet_data.items(): + fragments = key.split(".") + module_name = fragments[0] + weight_name = ".".join(fragments[1:]) + + if module_name not in module_weights: + module_weights[module_name] = {} + module_weights[module_name][weight_name] = value + + # next, load each module + modules = {} + for module_name, weights in module_weights.items(): + # kohya planned to do something about how these should be chosen, so I'm not touching this + # since I am not familiar with the logic for this + if "conditioning1.4.weight" in weights: + depth = 3 + elif weights["conditioning1.2.weight"].shape[-1] == 4: + depth = 2 + else: + depth = 1 + + module = LLLiteModule( + name=module_name, + is_conv2d=weights["down.0.weight"].ndim == 4, + in_dim=weights["down.0.weight"].shape[1], + depth=depth, + cond_emb_dim=weights["conditioning1.0.weight"].shape[0] * 2, + mlp_dim=weights["down.0.weight"].shape[0], + ) + # load weights into module + module.load_state_dict(weights) + modules[module_name] = module + if len(modules) == 1: + module.is_first = True + + #logger.info(f"loaded {ckpt_path} successfully, {len(modules)} modules") + + patch_attn1 = LLLitePatch(modules=modules, patch_type=LLLitePatch.ATTN1) + patch_attn2 = LLLitePatch(modules=modules, patch_type=LLLitePatch.ATTN2) + control = ControlLLLiteAdvanced(patch_attn1=patch_attn1, patch_attn2=patch_attn2, timestep_keyframes=timestep_keyframe) + return control + + +def load_svdcontrolnet(ckpt_path: str, controlnet_data: dict[str, Tensor]=None, timestep_keyframe: TimestepKeyframeGroup=None, model=None): + if controlnet_data is None: + controlnet_data = comfy.utils.load_torch_file(ckpt_path, safe_load=True) + + controlnet_config = None + if "controlnet_cond_embedding.conv_in.weight" in controlnet_data: #diffusers format + unet_dtype = comfy.model_management.unet_dtype() + controlnet_config = svd_unet_config_from_diffusers_unet(controlnet_data, unet_dtype) + diffusers_keys = svd_unet_to_diffusers(controlnet_config) + diffusers_keys["controlnet_mid_block.weight"] = "middle_block_out.0.weight" + diffusers_keys["controlnet_mid_block.bias"] = "middle_block_out.0.bias" + + count = 0 + loop = True + while loop: + suffix = [".weight", ".bias"] + for s in suffix: + k_in = "controlnet_down_blocks.{}{}".format(count, s) + k_out = "zero_convs.{}.0{}".format(count, s) + if k_in not in controlnet_data: + loop = False + break + diffusers_keys[k_in] = k_out + count += 1 + + count = 0 + loop = True + while loop: + suffix = [".weight", ".bias"] + for s in suffix: + if count == 0: + k_in = "controlnet_cond_embedding.conv_in{}".format(s) + else: + k_in = "controlnet_cond_embedding.blocks.{}{}".format(count - 1, s) + k_out = "input_hint_block.{}{}".format(count * 2, s) + if k_in not in controlnet_data: + k_in = "controlnet_cond_embedding.conv_out{}".format(s) + loop = False + diffusers_keys[k_in] = k_out + count += 1 + + new_sd = {} + for k in diffusers_keys: + if k in controlnet_data: + new_sd[diffusers_keys[k]] = controlnet_data.pop(k) + + leftover_keys = controlnet_data.keys() + if len(leftover_keys) > 0: + spatial_leftover_keys = [] + temporal_leftover_keys = [] + other_leftover_keys = [] + for key in leftover_keys: + if "spatial" in key: + spatial_leftover_keys.append(key) + elif "temporal" in key: + temporal_leftover_keys.append(key) + else: + other_leftover_keys.append(key) + logger.warn(f"spatial_leftover_keys ({len(spatial_leftover_keys)}): {spatial_leftover_keys}") + logger.warn(f"temporal_leftover_keys ({len(temporal_leftover_keys)}): {temporal_leftover_keys}") + logger.warn(f"other_leftover_keys ({len(other_leftover_keys)}): {other_leftover_keys}") + #print("leftover keys:", leftover_keys) + controlnet_data = new_sd + + pth_key = 'control_model.zero_convs.0.0.weight' + pth = False + key = 'zero_convs.0.0.weight' + if pth_key in controlnet_data: + pth = True + key = pth_key + prefix = "control_model." + elif key in controlnet_data: + prefix = "" + else: + raise ValueError("The provided model is not a valid SVD-ControlNet model! [ErrorCode: MUSTARD]") + + if controlnet_config is None: + unet_dtype = comfy.model_management.unet_dtype() + controlnet_config = comfy.model_detection.model_config_from_unet(controlnet_data, prefix, unet_dtype, True).unet_config + load_device = comfy.model_management.get_torch_device() + manual_cast_dtype = comfy.model_management.unet_manual_cast(unet_dtype, load_device) + if manual_cast_dtype is not None: + controlnet_config["operations"] = comfy.ops.manual_cast + controlnet_config.pop("out_channels") + controlnet_config["hint_channels"] = controlnet_data["{}input_hint_block.0.weight".format(prefix)].shape[1] + control_model = SVDControlNet(**controlnet_config) + + if pth: + if 'difference' in controlnet_data: + if model is not None: + comfy.model_management.load_models_gpu([model]) + model_sd = model.model_state_dict() + for x in controlnet_data: + c_m = "control_model." + if x.startswith(c_m): + sd_key = "diffusion_model.{}".format(x[len(c_m):]) + if sd_key in model_sd: + cd = controlnet_data[x] + cd += model_sd[sd_key].type(cd.dtype).to(cd.device) + else: + print("WARNING: Loaded a diff controlnet without a model. It will very likely not work.") + + class WeightsLoader(torch.nn.Module): + pass + w = WeightsLoader() + w.control_model = control_model + missing, unexpected = w.load_state_dict(controlnet_data, strict=False) + else: + missing, unexpected = control_model.load_state_dict(controlnet_data, strict=False) + if len(missing) > 0 or len(unexpected) > 0: + logger.info(f"SVD-ControlNet: {missing}, {unexpected}") + + global_average_pooling = False + filename = os.path.splitext(ckpt_path)[0] + if filename.endswith("_shuffle") or filename.endswith("_shuffle_fp16"): #TODO: smarter way of enabling global_average_pooling + global_average_pooling = True + + control = SVDControlNetAdvanced(control_model, timestep_keyframes=timestep_keyframe, global_average_pooling=global_average_pooling, load_device=load_device, manual_cast_dtype=manual_cast_dtype) + return control + diff --git a/ComfyUI-Advanced-ControlNet/adv_control/control_lllite.py b/ComfyUI-Advanced-ControlNet/adv_control/control_lllite.py new file mode 100644 index 0000000000000000000000000000000000000000..96cb471bf7a55fb9c1c0abe658b100d2d4bb417b --- /dev/null +++ b/ComfyUI-Advanced-ControlNet/adv_control/control_lllite.py @@ -0,0 +1,254 @@ +# adapted from https://github.com/kohya-ss/ControlNet-LLLite-ComfyUI +# basically, all the LLLite core code is from there, which I then combined with +# Advanced-ControlNet features and QoL +import math +from typing import Union +from torch import Tensor +import torch +import os + +import comfy.utils +from comfy.controlnet import ControlBase + +from .logger import logger +from .utils import AdvancedControlBase, deepcopy_with_sharing, prepare_mask_batch + + +def extra_options_to_module_prefix(extra_options): + # extra_options = {'transformer_index': 2, 'block_index': 8, 'original_shape': [2, 4, 128, 128], 'block': ('input', 7), 'n_heads': 20, 'dim_head': 64} + + # block is: [('input', 4), ('input', 5), ('input', 7), ('input', 8), ('middle', 0), + # ('output', 0), ('output', 1), ('output', 2), ('output', 3), ('output', 4), ('output', 5)] + # transformer_index is: [0, 1, 2, 3, 4, 5, 6, 7, 8], for each block + # block_index is: 0-1 or 0-9, depends on the block + # input 7 and 8, middle has 10 blocks + + # make module name from extra_options + block = extra_options["block"] + block_index = extra_options["block_index"] + if block[0] == "input": + module_pfx = f"lllite_unet_input_blocks_{block[1]}_1_transformer_blocks_{block_index}" + elif block[0] == "middle": + module_pfx = f"lllite_unet_middle_block_1_transformer_blocks_{block_index}" + elif block[0] == "output": + module_pfx = f"lllite_unet_output_blocks_{block[1]}_1_transformer_blocks_{block_index}" + else: + raise Exception(f"ControlLLLite: invalid block name '{block[0]}'. Expected 'input', 'middle', or 'output'.") + return module_pfx + + +class LLLitePatch: + ATTN1 = "attn1" + ATTN2 = "attn2" + def __init__(self, modules: dict[str, 'LLLiteModule'], patch_type: str, control: Union[AdvancedControlBase, ControlBase]=None): + self.modules = modules + self.control = control + self.patch_type = patch_type + #logger.error(f"create LLLitePatch: {id(self)},{control}") + + def __call__(self, q, k, v, extra_options): + #logger.error(f"in __call__: {id(self)}") + # determine if have anything to run + if self.control.timestep_range is not None: + # it turns out comparing single-value tensors to floats is extremely slow + # a: Tensor = extra_options["sigmas"][0] + if self.control.t > self.control.timestep_range[0] or self.control.t < self.control.timestep_range[1]: + return q, k, v + + module_pfx = extra_options_to_module_prefix(extra_options) + + is_attn1 = q.shape[-1] == k.shape[-1] # self attention + if is_attn1: + module_pfx = module_pfx + "_attn1" + else: + module_pfx = module_pfx + "_attn2" + + module_pfx_to_q = module_pfx + "_to_q" + module_pfx_to_k = module_pfx + "_to_k" + module_pfx_to_v = module_pfx + "_to_v" + + if module_pfx_to_q in self.modules: + q = q + self.modules[module_pfx_to_q](q, self.control) + if module_pfx_to_k in self.modules: + k = k + self.modules[module_pfx_to_k](k, self.control) + if module_pfx_to_v in self.modules: + v = v + self.modules[module_pfx_to_v](v, self.control) + + return q, k, v + + def to(self, device): + #logger.info(f"to... has control? {self.control}") + for d in self.modules.keys(): + self.modules[d] = self.modules[d].to(device) + return self + + def set_control(self, control: Union[AdvancedControlBase, ControlBase]) -> 'LLLitePatch': + self.control = control + return self + #logger.error(f"set control for LLLitePatch: {id(self)}, cn: {id(control)}") + + def clone_with_control(self, control: AdvancedControlBase): + #logger.error(f"clone-set control for LLLitePatch: {id(self)},{id(control)}") + return LLLitePatch(self.modules, self.patch_type, control) + + def cleanup(self): + #total_cleaned = 0 + for module in self.modules.values(): + module.cleanup() + # total_cleaned += 1 + #logger.info(f"cleaned modules: {total_cleaned}, {id(self)}") + #logger.error(f"cleanup LLLitePatch: {id(self)}") + + # make sure deepcopy does not copy control, and deepcopied LLLitePatch should be assigned to control + def __deepcopy__(self, memo): + self.cleanup() + to_return: LLLitePatch = deepcopy_with_sharing(self, shared_attribute_names = ['control'], memo=memo) + #logger.warn(f"patch {id(self)} turned into {id(to_return)}") + try: + if self.patch_type == self.ATTN1: + to_return.control.patch_attn1 = to_return + elif self.patch_type == self.ATTN2: + to_return.control.patch_attn2 = to_return + except Exception: + pass + return to_return + + +# TODO: use comfy.ops to support fp8 properly +class LLLiteModule(torch.nn.Module): + def __init__( + self, + name: str, + is_conv2d: bool, + in_dim: int, + depth: int, + cond_emb_dim: int, + mlp_dim: int, + ): + super().__init__() + self.name = name + self.is_conv2d = is_conv2d + self.is_first = False + + modules = [] + modules.append(torch.nn.Conv2d(3, cond_emb_dim // 2, kernel_size=4, stride=4, padding=0)) # to latent (from VAE) size*2 + if depth == 1: + modules.append(torch.nn.ReLU(inplace=True)) + modules.append(torch.nn.Conv2d(cond_emb_dim // 2, cond_emb_dim, kernel_size=2, stride=2, padding=0)) + elif depth == 2: + modules.append(torch.nn.ReLU(inplace=True)) + modules.append(torch.nn.Conv2d(cond_emb_dim // 2, cond_emb_dim, kernel_size=4, stride=4, padding=0)) + elif depth == 3: + # kernel size 8 is too large, so set it to 4 + modules.append(torch.nn.ReLU(inplace=True)) + modules.append(torch.nn.Conv2d(cond_emb_dim // 2, cond_emb_dim // 2, kernel_size=4, stride=4, padding=0)) + modules.append(torch.nn.ReLU(inplace=True)) + modules.append(torch.nn.Conv2d(cond_emb_dim // 2, cond_emb_dim, kernel_size=2, stride=2, padding=0)) + + self.conditioning1 = torch.nn.Sequential(*modules) + + if self.is_conv2d: + self.down = torch.nn.Sequential( + torch.nn.Conv2d(in_dim, mlp_dim, kernel_size=1, stride=1, padding=0), + torch.nn.ReLU(inplace=True), + ) + self.mid = torch.nn.Sequential( + torch.nn.Conv2d(mlp_dim + cond_emb_dim, mlp_dim, kernel_size=1, stride=1, padding=0), + torch.nn.ReLU(inplace=True), + ) + self.up = torch.nn.Sequential( + torch.nn.Conv2d(mlp_dim, in_dim, kernel_size=1, stride=1, padding=0), + ) + else: + self.down = torch.nn.Sequential( + torch.nn.Linear(in_dim, mlp_dim), + torch.nn.ReLU(inplace=True), + ) + self.mid = torch.nn.Sequential( + torch.nn.Linear(mlp_dim + cond_emb_dim, mlp_dim), + torch.nn.ReLU(inplace=True), + ) + self.up = torch.nn.Sequential( + torch.nn.Linear(mlp_dim, in_dim), + ) + + self.depth = depth + self.cond_emb = None + self.cx_shape = None + self.prev_batch = 0 + self.prev_sub_idxs = None + + def cleanup(self): + del self.cond_emb + self.cond_emb = None + self.cx_shape = None + self.prev_batch = 0 + self.prev_sub_idxs = None + + def forward(self, x: Tensor, control: Union[AdvancedControlBase, ControlBase]): + mask = None + mask_tk = None + #logger.info(x.shape) + if self.cond_emb is None or control.sub_idxs != self.prev_sub_idxs or x.shape[0] != self.prev_batch: + # print(f"cond_emb is None, {self.name}") + cond_hint = control.cond_hint.to(x.device, dtype=x.dtype) + if control.latent_dims_div2 is not None and x.shape[-1] != 1280: + cond_hint = comfy.utils.common_upscale(cond_hint, control.latent_dims_div2[0] * 8, control.latent_dims_div2[1] * 8, 'nearest-exact', "center").to(x.device, dtype=x.dtype) + elif control.latent_dims_div4 is not None and x.shape[-1] == 1280: + cond_hint = comfy.utils.common_upscale(cond_hint, control.latent_dims_div4[0] * 8, control.latent_dims_div4[1] * 8, 'nearest-exact', "center").to(x.device, dtype=x.dtype) + cx = self.conditioning1(cond_hint) + self.cx_shape = cx.shape + if not self.is_conv2d: + # reshape / b,c,h,w -> b,h*w,c + n, c, h, w = cx.shape + cx = cx.view(n, c, h * w).permute(0, 2, 1) + self.cond_emb = cx + # save prev values + self.prev_batch = x.shape[0] + self.prev_sub_idxs = control.sub_idxs + + cx: torch.Tensor = self.cond_emb + # print(f"forward {self.name}, {cx.shape}, {x.shape}") + + # TODO: make masks work for conv2d (could not find any ControlLLLites at this time that use them) + # create masks + if not self.is_conv2d: + n, c, h, w = self.cx_shape + if control.mask_cond_hint is not None: + mask = prepare_mask_batch(control.mask_cond_hint, (1, 1, h, w)).to(cx.dtype) + mask = mask.view(mask.shape[0], 1, h * w).permute(0, 2, 1) + if control.tk_mask_cond_hint is not None: + mask_tk = prepare_mask_batch(control.mask_cond_hint, (1, 1, h, w)).to(cx.dtype) + mask_tk = mask_tk.view(mask_tk.shape[0], 1, h * w).permute(0, 2, 1) + + # x in uncond/cond doubles batch size + if x.shape[0] != cx.shape[0]: + if self.is_conv2d: + cx = cx.repeat(x.shape[0] // cx.shape[0], 1, 1, 1) + else: + # print("x.shape[0] != cx.shape[0]", x.shape[0], cx.shape[0]) + cx = cx.repeat(x.shape[0] // cx.shape[0], 1, 1) + if mask is not None: + mask = mask.repeat(x.shape[0] // mask.shape[0], 1, 1) + if mask_tk is not None: + mask_tk = mask_tk.repeat(x.shape[0] // mask_tk.shape[0], 1, 1) + + if mask is None: + mask = 1.0 + elif mask_tk is not None: + mask = mask * mask_tk + + #logger.info(f"cs: {cx.shape}, x: {x.shape}, is_conv2d: {self.is_conv2d}") + cx = torch.cat([cx, self.down(x)], dim=1 if self.is_conv2d else 2) + cx = self.mid(cx) + cx = self.up(cx) + if control.latent_keyframes is not None: + cx = cx * control.calc_latent_keyframe_mults(x=cx, batched_number=control.batched_number) + if control.weights is not None and control.weights.has_uncond_multiplier: + cond_or_uncond = control.batched_number.cond_or_uncond + actual_length = cx.size(0) // control.batched_number + for idx, cond_type in enumerate(cond_or_uncond): + # if uncond, set to weight's uncond_multiplier + if cond_type == 1: + cx[actual_length*idx:actual_length*(idx+1)] *= control.weights.uncond_multiplier + return cx * mask * control.strength * control._current_timestep_keyframe.strength diff --git a/ComfyUI-Advanced-ControlNet/adv_control/control_reference.py b/ComfyUI-Advanced-ControlNet/adv_control/control_reference.py new file mode 100644 index 0000000000000000000000000000000000000000..815d651fe5d3621a5bb8b65bb9d1fd0621360ba4 --- /dev/null +++ b/ComfyUI-Advanced-ControlNet/adv_control/control_reference.py @@ -0,0 +1,833 @@ +from typing import Callable, Union + +import math +import torch +from torch import Tensor + +import comfy.sample +import comfy.model_patcher +import comfy.utils +from comfy.controlnet import ControlBase +from comfy.model_patcher import ModelPatcher +from comfy.ldm.modules.attention import BasicTransformerBlock +from comfy.ldm.modules.diffusionmodules import openaimodel + +from .logger import logger +from .utils import (AdvancedControlBase, ControlWeights, TimestepKeyframeGroup, AbstractPreprocWrapper, + deepcopy_with_sharing, prepare_mask_batch, broadcast_image_to_full) + + +def refcn_sample_factory(orig_comfy_sample: Callable, is_custom=False) -> Callable: + def get_refcn(control: ControlBase, order: int=-1): + ref_set: set[ReferenceAdvanced] = set() + if control is None: + return ref_set + if type(control) == ReferenceAdvanced: + control.order = order + order -= 1 + ref_set.add(control) + ref_set.update(get_refcn(control.previous_controlnet, order=order)) + return ref_set + + def refcn_sample(model: ModelPatcher, *args, **kwargs): + # check if positive or negative conds contain ref cn + positive = args[-3] + negative = args[-2] + ref_set = set() + if positive is not None: + for cond in positive: + if "control" in cond[1]: + ref_set.update(get_refcn(cond[1]["control"])) + if negative is not None: + for cond in negative: + if "control" in cond[1]: + ref_set.update(get_refcn(cond[1]["control"])) + # if no ref cn found, do original function immediately + if len(ref_set) == 0: + return orig_comfy_sample(model, *args, **kwargs) + # otherwise, injection time + try: + # inject + # storage for all Reference-related injections + reference_injections = ReferenceInjections() + + # first, handle attn module injection + all_modules = torch_dfs(model.model) + attn_modules: list[RefBasicTransformerBlock] = [] + for module in all_modules: + if isinstance(module, BasicTransformerBlock): + attn_modules.append(module) + attn_modules = [module for module in all_modules if isinstance(module, BasicTransformerBlock)] + attn_modules = sorted(attn_modules, key=lambda x: -x.norm1.normalized_shape[0]) + for i, module in enumerate(attn_modules): + injection_holder = InjectionBasicTransformerBlockHolder(block=module, idx=i) + injection_holder.attn_weight = float(i) / float(len(attn_modules)) + if hasattr(module, "_forward"): # backward compatibility + module._forward = _forward_inject_BasicTransformerBlock.__get__(module, type(module)) + else: + module.forward = _forward_inject_BasicTransformerBlock.__get__(module, type(module)) + module.injection_holder = injection_holder + reference_injections.attn_modules.append(module) + # figure out which module is middle block + if hasattr(model.model.diffusion_model, "middle_block"): + mid_modules = torch_dfs(model.model.diffusion_model.middle_block) + mid_attn_modules: list[RefBasicTransformerBlock] = [module for module in mid_modules if isinstance(module, BasicTransformerBlock)] + for module in mid_attn_modules: + module.injection_holder.is_middle = True + + # next, handle gn module injection (TimestepEmbedSequential) + # TODO: figure out the logic behind these hardcoded indexes + if type(model.model).__name__ == "SDXL": + input_block_indices = [4, 5, 7, 8] + output_block_indices = [0, 1, 2, 3, 4, 5] + else: + input_block_indices = [4, 5, 7, 8, 10, 11] + output_block_indices = [0, 1, 2, 3, 4, 5, 6, 7] + if hasattr(model.model.diffusion_model, "middle_block"): + module = model.model.diffusion_model.middle_block + injection_holder = InjectionTimestepEmbedSequentialHolder(block=module, idx=0, is_middle=True) + injection_holder.gn_weight = 0.0 + module.injection_holder = injection_holder + reference_injections.gn_modules.append(module) + for w, i in enumerate(input_block_indices): + module = model.model.diffusion_model.input_blocks[i] + injection_holder = InjectionTimestepEmbedSequentialHolder(block=module, idx=i, is_input=True) + injection_holder.gn_weight = 1.0 - float(w) / float(len(input_block_indices)) + module.injection_holder = injection_holder + reference_injections.gn_modules.append(module) + for w, i in enumerate(output_block_indices): + module = model.model.diffusion_model.output_blocks[i] + injection_holder = InjectionTimestepEmbedSequentialHolder(block=module, idx=i, is_output=True) + injection_holder.gn_weight = float(w) / float(len(output_block_indices)) + module.injection_holder = injection_holder + reference_injections.gn_modules.append(module) + # hack gn_module forwards and update weights + for i, module in enumerate(reference_injections.gn_modules): + module.injection_holder.gn_weight *= 2 + + # handle diffusion_model forward injection + reference_injections.diffusion_model_orig_forward = model.model.diffusion_model.forward + model.model.diffusion_model.forward = factory_forward_inject_UNetModel(reference_injections).__get__(model.model.diffusion_model, type(model.model.diffusion_model)) + # store ordered ref cns in model's transformer options + orig_model_options = model.model_options + new_model_options = model.model_options.copy() + new_model_options["transformer_options"] = model.model_options["transformer_options"].copy() + ref_list: list[ReferenceAdvanced] = list(ref_set) + new_model_options["transformer_options"][REF_CONTROL_LIST_ALL] = sorted(ref_list, key=lambda x: x.order) + model.model_options = new_model_options + # continue with original function + return orig_comfy_sample(model, *args, **kwargs) + finally: + # cleanup injections + # restore attn modules + attn_modules: list[RefBasicTransformerBlock] = reference_injections.attn_modules + for module in attn_modules: + module.injection_holder.restore(module) + module.injection_holder.clean() + del module.injection_holder + del attn_modules + # restore gn modules + gn_modules: list[RefTimestepEmbedSequential] = reference_injections.gn_modules + for module in gn_modules: + module.injection_holder.restore(module) + module.injection_holder.clean() + del module.injection_holder + del gn_modules + # restore diffusion_model forward function + model.model.diffusion_model.forward = reference_injections.diffusion_model_orig_forward.__get__(model.model.diffusion_model, type(model.model.diffusion_model)) + # restore model_options + model.model_options = orig_model_options + # cleanup + reference_injections.cleanup() + return refcn_sample +# inject sample functions +comfy.sample.sample = refcn_sample_factory(comfy.sample.sample) +comfy.sample.sample_custom = refcn_sample_factory(comfy.sample.sample_custom, is_custom=True) + + +REF_ATTN_CONTROL_LIST = "ref_attn_control_list" +REF_ADAIN_CONTROL_LIST = "ref_adain_control_list" +REF_CONTROL_LIST_ALL = "ref_control_list_all" +REF_CONTROL_INFO = "ref_control_info" +REF_ATTN_MACHINE_STATE = "ref_attn_machine_state" +REF_ADAIN_MACHINE_STATE = "ref_adain_machine_state" +REF_COND_IDXS = "ref_cond_idxs" +REF_UNCOND_IDXS = "ref_uncond_idxs" + + +class MachineState: + WRITE = "write" + READ = "read" + STYLEALIGN = "stylealign" + OFF = "off" + + +class ReferenceType: + ATTN = "reference_attn" + ADAIN = "reference_adain" + ATTN_ADAIN = "reference_attn+adain" + STYLE_ALIGN = "StyleAlign" + + _LIST = [ATTN, ADAIN, ATTN_ADAIN] + _LIST_ATTN = [ATTN, ATTN_ADAIN] + _LIST_ADAIN = [ADAIN, ATTN_ADAIN] + + @classmethod + def is_attn(cls, ref_type: str): + return ref_type in cls._LIST_ATTN + + @classmethod + def is_adain(cls, ref_type: str): + return ref_type in cls._LIST_ADAIN + + +class ReferenceOptions: + def __init__(self, reference_type: str, + attn_style_fidelity: float, adain_style_fidelity: float, + attn_ref_weight: float, adain_ref_weight: float, + attn_strength: float=1.0, adain_strength: float=1.0, + ref_with_other_cns: bool=False): + self.reference_type = reference_type + # attn + self.original_attn_style_fidelity = attn_style_fidelity + self.attn_style_fidelity = attn_style_fidelity + self.attn_ref_weight = attn_ref_weight + self.attn_strength = attn_strength + # adain + self.original_adain_style_fidelity = adain_style_fidelity + self.adain_style_fidelity = adain_style_fidelity + self.adain_ref_weight = adain_ref_weight + self.adain_strength = adain_strength + # other + self.ref_with_other_cns = ref_with_other_cns + + def clone(self): + return ReferenceOptions(reference_type=self.reference_type, + attn_style_fidelity=self.original_attn_style_fidelity, adain_style_fidelity=self.original_adain_style_fidelity, + attn_ref_weight=self.attn_ref_weight, adain_ref_weight=self.adain_ref_weight, + attn_strength=self.attn_strength, adain_strength=self.adain_strength, + ref_with_other_cns=self.ref_with_other_cns) + + @staticmethod + def create_combo(reference_type: str, style_fidelity: float, ref_weight: float, ref_with_other_cns: bool=False): + return ReferenceOptions(reference_type=reference_type, + attn_style_fidelity=style_fidelity, adain_style_fidelity=style_fidelity, + attn_ref_weight=ref_weight, adain_ref_weight=ref_weight, + ref_with_other_cns=ref_with_other_cns) + + + +class ReferencePreprocWrapper(AbstractPreprocWrapper): + error_msg = error_msg = "Invalid use of Reference Preprocess output. The output of RGB SparseCtrl preprocessor is NOT a usual image, but a latent pretending to be an image - you must connect the output directly to an Apply Advanced ControlNet node. It cannot be used for anything else that accepts IMAGE input." + def __init__(self, condhint: Tensor): + super().__init__(condhint) + + +class ReferenceAdvanced(ControlBase, AdvancedControlBase): + CHANNEL_TO_MULT = {320: 1, 640: 2, 1280: 4} + + def __init__(self, ref_opts: ReferenceOptions, timestep_keyframes: TimestepKeyframeGroup, device=None): + super().__init__(device) + AdvancedControlBase.__init__(self, super(), timestep_keyframes=timestep_keyframes, weights_default=ControlWeights.controllllite()) + self.ref_opts = ref_opts + self.order = 0 + self.latent_format = None + self.model_sampling_current = None + self.should_apply_attn_effective_strength = False + self.should_apply_adain_effective_strength = False + self.should_apply_effective_masks = False + self.latent_shape = None + + def any_attn_strength_to_apply(self): + return self.should_apply_attn_effective_strength or self.should_apply_effective_masks + + def any_adain_strength_to_apply(self): + return self.should_apply_adain_effective_strength or self.should_apply_effective_masks + + def get_effective_strength(self): + effective_strength = self.strength + if self._current_timestep_keyframe is not None: + effective_strength = effective_strength * self._current_timestep_keyframe.strength + return effective_strength + + def get_effective_attn_mask_or_float(self, x: Tensor, channels: int, is_mid: bool): + if not self.should_apply_effective_masks: + return self.get_effective_strength() * self.ref_opts.attn_strength + if is_mid: + div = 8 + else: + div = self.CHANNEL_TO_MULT[channels] + real_mask = torch.ones([self.latent_shape[0], 1, self.latent_shape[2]//div, self.latent_shape[3]//div]).to(dtype=x.dtype, device=x.device) * self.strength * self.ref_opts.attn_strength + self.apply_advanced_strengths_and_masks(x=real_mask, batched_number=self.batched_number) + # mask is now shape [b, 1, h ,w]; need to turn into [b, h*w, 1] + b, c, h, w = real_mask.shape + real_mask = real_mask.permute(0, 2, 3, 1).reshape(b, h*w, c) + return real_mask + + def get_effective_adain_mask_or_float(self, x: Tensor): + if not self.should_apply_effective_masks: + return self.get_effective_strength() * self.ref_opts.adain_strength + b, c, h, w = x.shape + real_mask = torch.ones([b, 1, h, w]).to(dtype=x.dtype, device=x.device) * self.strength * self.ref_opts.adain_strength + self.apply_advanced_strengths_and_masks(x=real_mask, batched_number=self.batched_number) + return real_mask + + def should_run(self): + running = super().should_run() + if not running: + return running + attn_run = False + adain_run = False + if ReferenceType.is_attn(self.ref_opts.reference_type): + # attn will run as long as neither weight or strength is zero + attn_run = not (math.isclose(self.ref_opts.attn_ref_weight, 0.0) or math.isclose(self.ref_opts.attn_strength, 0.0)) + if ReferenceType.is_adain(self.ref_opts.reference_type): + # adain will run as long as neither weight or strength is zero + adain_run = not (math.isclose(self.ref_opts.adain_ref_weight, 0.0) or math.isclose(self.ref_opts.adain_strength, 0.0)) + return attn_run or adain_run + + def pre_run_advanced(self, model, percent_to_timestep_function): + AdvancedControlBase.pre_run_advanced(self, model, percent_to_timestep_function) + if type(self.cond_hint_original) == ReferencePreprocWrapper: + self.cond_hint_original = self.cond_hint_original.condhint + self.latent_format = model.latent_format # LatentFormat object, used to process_in latent cond_hint + self.model_sampling_current = model.model_sampling + # SDXL is more sensitive to style_fidelity according to sd-webui-controlnet comments + if type(model).__name__ == "SDXL": + self.ref_opts.attn_style_fidelity = self.ref_opts.original_attn_style_fidelity ** 3.0 + self.ref_opts.adain_style_fidelity = self.ref_opts.original_adain_style_fidelity ** 3.0 + else: + self.ref_opts.attn_style_fidelity = self.ref_opts.original_attn_style_fidelity + self.ref_opts.adain_style_fidelity = self.ref_opts.original_adain_style_fidelity + + def get_control_advanced(self, x_noisy: Tensor, t, cond, batched_number: int): + # normal ControlNet stuff + control_prev = None + if self.previous_controlnet is not None: + control_prev = self.previous_controlnet.get_control(x_noisy, t, cond, batched_number) + + if self.timestep_range is not None: + if t[0] > self.timestep_range[0] or t[0] < self.timestep_range[1]: + return control_prev + + dtype = x_noisy.dtype + # prepare cond_hint - it is a latent, NOT an image + #if self.sub_idxs is not None or self.cond_hint is None or x_noisy.shape[2] != self.cond_hint.shape[2] or x_noisy.shape[3] != self.cond_hint.shape[3]: + if self.cond_hint is not None: + del self.cond_hint + self.cond_hint = None + # if self.cond_hint_original length greater or equal to real latent count, subdivide it before scaling + if self.sub_idxs is not None and self.cond_hint_original.size(0) >= self.full_latent_length: + self.cond_hint = comfy.utils.common_upscale( + self.cond_hint_original[self.sub_idxs], + x_noisy.shape[3], x_noisy.shape[2], 'nearest-exact', "center").to(dtype).to(self.device) + else: + self.cond_hint = comfy.utils.common_upscale( + self.cond_hint_original, + x_noisy.shape[3], x_noisy.shape[2], 'nearest-exact', "center").to(dtype).to(self.device) + if x_noisy.shape[0] != self.cond_hint.shape[0]: + self.cond_hint = broadcast_image_to_full(self.cond_hint, x_noisy.shape[0], batched_number, except_one=False) + # noise cond_hint based on sigma (current step) + self.cond_hint = self.latent_format.process_in(self.cond_hint) + self.cond_hint = ref_noise_latents(self.cond_hint, sigma=t, noise=None) + timestep = self.model_sampling_current.timestep(t) + self.should_apply_attn_effective_strength = not (math.isclose(self.strength, 1.0) and math.isclose(self._current_timestep_keyframe.strength, 1.0) and math.isclose(self.ref_opts.attn_strength, 1.0)) + self.should_apply_adain_effective_strength = not (math.isclose(self.strength, 1.0) and math.isclose(self._current_timestep_keyframe.strength, 1.0) and math.isclose(self.ref_opts.adain_strength, 1.0)) + # prepare mask - use direct_attn, so the mask dims will match source latents (and be smaller) + self.prepare_mask_cond_hint(x_noisy=x_noisy, t=t, cond=cond, batched_number=batched_number, direct_attn=True) + self.should_apply_effective_masks = self.latent_keyframes is not None or self.mask_cond_hint is not None or self.tk_mask_cond_hint is not None + self.latent_shape = list(x_noisy.shape) + # done preparing; model patches will take care of everything now. + # return normal controlnet stuff + return control_prev + + def cleanup_advanced(self): + super().cleanup_advanced() + del self.latent_format + self.latent_format = None + del self.model_sampling_current + self.model_sampling_current = None + self.should_apply_attn_effective_strength = False + self.should_apply_adain_effective_strength = False + self.should_apply_effective_masks = False + + def copy(self): + c = ReferenceAdvanced(self.ref_opts, self.timestep_keyframes) + c.order = self.order + self.copy_to(c) + self.copy_to_advanced(c) + return c + + # avoid deepcopy shenanigans by making deepcopy not do anything to the reference + # TODO: do the bookkeeping to do this in a proper way for all Adv-ControlNets + def __deepcopy__(self, memo): + return self + + +def ref_noise_latents(latents: Tensor, sigma: Tensor, noise: Tensor=None): + sigma = sigma.unsqueeze(-1).unsqueeze(-1).unsqueeze(-1) + alpha_cumprod = 1 / ((sigma * sigma) + 1) + sqrt_alpha_prod = alpha_cumprod ** 0.5 + sqrt_one_minus_alpha_prod = (1. - alpha_cumprod) ** 0.5 + if noise is None: + # generator = torch.Generator(device="cuda") + # generator.manual_seed(0) + # noise = torch.empty_like(latents).normal_(generator=generator) + # generator = torch.Generator() + # generator.manual_seed(0) + # noise = torch.randn(latents.size(), generator=generator).to(latents.device) + noise = torch.randn_like(latents).to(latents.device) + return sqrt_alpha_prod * latents + sqrt_one_minus_alpha_prod * noise + + +def simple_noise_latents(latents: Tensor, sigma: float, noise: Tensor=None): + if noise is None: + noise = torch.rand_like(latents) + return latents + noise * sigma + + +class BankStylesBasicTransformerBlock: + def __init__(self): + self.bank = [] + self.style_cfgs = [] + self.cn_idx: list[int] = [] + + def get_avg_style_fidelity(self): + return sum(self.style_cfgs) / float(len(self.style_cfgs)) + + def clean(self): + del self.bank + self.bank = [] + del self.style_cfgs + self.style_cfgs = [] + del self.cn_idx + self.cn_idx = [] + + +class BankStylesTimestepEmbedSequential: + def __init__(self): + self.var_bank = [] + self.mean_bank = [] + self.style_cfgs = [] + self.cn_idx: list[int] = [] + + def get_avg_var_bank(self): + return sum(self.var_bank) / float(len(self.var_bank)) + + def get_avg_mean_bank(self): + return sum(self.mean_bank) / float(len(self.mean_bank)) + + def get_avg_style_fidelity(self): + return sum(self.style_cfgs) / float(len(self.style_cfgs)) + + def clean(self): + del self.mean_bank + self.mean_bank = [] + del self.var_bank + self.var_bank = [] + del self.style_cfgs + self.style_cfgs = [] + del self.cn_idx + self.cn_idx = [] + + +class InjectionBasicTransformerBlockHolder: + def __init__(self, block: BasicTransformerBlock, idx=None): + if hasattr(block, "_forward"): # backward compatibility + self.original_forward = block._forward + else: + self.original_forward = block.forward + self.idx = idx + self.attn_weight = 1.0 + self.is_middle = False + self.bank_styles = BankStylesBasicTransformerBlock() + + def restore(self, block: BasicTransformerBlock): + if hasattr(block, "_forward"): # backward compatibility + block._forward = self.original_forward + else: + block.forward = self.original_forward + + def clean(self): + self.bank_styles.clean() + + +class InjectionTimestepEmbedSequentialHolder: + def __init__(self, block: openaimodel.TimestepEmbedSequential, idx=None, is_middle=False, is_input=False, is_output=False): + self.original_forward = block.forward + self.idx = idx + self.gn_weight = 1.0 + self.is_middle = is_middle + self.is_input = is_input + self.is_output = is_output + self.bank_styles = BankStylesTimestepEmbedSequential() + + def restore(self, block: openaimodel.TimestepEmbedSequential): + block.forward = self.original_forward + + def clean(self): + self.bank_styles.clean() + + +class ReferenceInjections: + def __init__(self, attn_modules: list['RefBasicTransformerBlock']=None, gn_modules: list['RefTimestepEmbedSequential']=None): + self.attn_modules = attn_modules if attn_modules else [] + self.gn_modules = gn_modules if gn_modules else [] + self.diffusion_model_orig_forward: Callable = None + + def clean_module_mem(self): + for attn_module in self.attn_modules: + try: + attn_module.injection_holder.clean() + except Exception: + pass + for gn_module in self.gn_modules: + try: + gn_module.injection_holder.clean() + except Exception: + pass + + def cleanup(self): + self.clean_module_mem() + del self.attn_modules + self.attn_modules = [] + del self.gn_modules + self.gn_modules = [] + self.diffusion_model_orig_forward = None + + +def factory_forward_inject_UNetModel(reference_injections: ReferenceInjections): + def forward_inject_UNetModel(self, x: Tensor, *args, **kwargs): + # get control and transformer_options from kwargs + real_args = list(args) + real_kwargs = list(kwargs.keys()) + control = kwargs.get("control", None) + transformer_options = kwargs.get("transformer_options", None) + # look for ReferenceAttnPatch objects to get ReferenceAdvanced objects + ref_controlnets: list[ReferenceAdvanced] = transformer_options[REF_CONTROL_LIST_ALL] + # discard any controlnets that should not run + ref_controlnets = [x for x in ref_controlnets if x.should_run()] + # if nothing related to reference controlnets, do nothing special + if len(ref_controlnets) == 0: + return reference_injections.diffusion_model_orig_forward(x, *args, **kwargs) + try: + # assign cond and uncond idxs + batched_number = len(transformer_options["cond_or_uncond"]) + per_batch = x.shape[0] // batched_number + indiv_conds = [] + for cond_type in transformer_options["cond_or_uncond"]: + indiv_conds.extend([cond_type] * per_batch) + transformer_options[REF_UNCOND_IDXS] = [i for i, x in enumerate(indiv_conds) if x == 1] + transformer_options[REF_COND_IDXS] = [i for i, x in enumerate(indiv_conds) if x == 0] + # check which controlnets do which thing + attn_controlnets = [] + adain_controlnets = [] + for control in ref_controlnets: + if ReferenceType.is_attn(control.ref_opts.reference_type): + attn_controlnets.append(control) + if ReferenceType.is_adain(control.ref_opts.reference_type): + adain_controlnets.append(control) + if len(adain_controlnets) > 0: + # ComfyUI uses forward_timestep_embed with the TimestepEmbedSequential passed into it + orig_forward_timestep_embed = openaimodel.forward_timestep_embed + openaimodel.forward_timestep_embed = forward_timestep_embed_ref_inject_factory(orig_forward_timestep_embed) + # handle running diffusion with ref cond hints + for control in ref_controlnets: + if ReferenceType.is_attn(control.ref_opts.reference_type): + transformer_options[REF_ATTN_MACHINE_STATE] = MachineState.WRITE + else: + transformer_options[REF_ATTN_MACHINE_STATE] = MachineState.OFF + if ReferenceType.is_adain(control.ref_opts.reference_type): + transformer_options[REF_ADAIN_MACHINE_STATE] = MachineState.WRITE + else: + transformer_options[REF_ADAIN_MACHINE_STATE] = MachineState.OFF + transformer_options[REF_ATTN_CONTROL_LIST] = [control] + transformer_options[REF_ADAIN_CONTROL_LIST] = [control] + + orig_kwargs = kwargs + if not control.ref_opts.ref_with_other_cns: + kwargs = kwargs.copy() + kwargs["control"] = None + reference_injections.diffusion_model_orig_forward(control.cond_hint.to(dtype=x.dtype).to(device=x.device), *args, **kwargs) + kwargs = orig_kwargs + # run diffusion for real now + transformer_options[REF_ATTN_MACHINE_STATE] = MachineState.READ + transformer_options[REF_ADAIN_MACHINE_STATE] = MachineState.READ + transformer_options[REF_ATTN_CONTROL_LIST] = attn_controlnets + transformer_options[REF_ADAIN_CONTROL_LIST] = adain_controlnets + return reference_injections.diffusion_model_orig_forward(x, *args, **kwargs) + finally: + # make sure banks are cleared no matter what happens - otherwise, RIP VRAM + reference_injections.clean_module_mem() + if len(adain_controlnets) > 0: + openaimodel.forward_timestep_embed = orig_forward_timestep_embed + + return forward_inject_UNetModel + + +# dummy class just to help IDE keep track of injected variables +class RefBasicTransformerBlock(BasicTransformerBlock): + injection_holder: InjectionBasicTransformerBlockHolder = None + +def _forward_inject_BasicTransformerBlock(self: RefBasicTransformerBlock, x: Tensor, context: Tensor=None, transformer_options: dict[str]={}): + extra_options = {} + block = transformer_options.get("block", None) + block_index = transformer_options.get("block_index", 0) + transformer_patches = {} + transformer_patches_replace = {} + + for k in transformer_options: + if k == "patches": + transformer_patches = transformer_options[k] + elif k == "patches_replace": + transformer_patches_replace = transformer_options[k] + else: + extra_options[k] = transformer_options[k] + + extra_options["n_heads"] = self.n_heads + extra_options["dim_head"] = self.d_head + + if self.ff_in: + x_skip = x + x = self.ff_in(self.norm_in(x)) + if self.is_res: + x += x_skip + + n: Tensor = self.norm1(x) + if self.disable_self_attn: + context_attn1 = context + else: + context_attn1 = None + value_attn1 = None + + # Reference CN stuff + uc_idx_mask = transformer_options.get(REF_UNCOND_IDXS, []) + c_idx_mask = transformer_options.get(REF_COND_IDXS, []) + # WRITE mode will only have one ReferenceAdvanced, other modes will have all ReferenceAdvanced + ref_controlnets: list[ReferenceAdvanced] = transformer_options.get(REF_ATTN_CONTROL_LIST, None) + ref_machine_state: str = transformer_options.get(REF_ATTN_MACHINE_STATE, None) + # if in WRITE mode, save n and style_fidelity + if ref_controlnets and ref_machine_state == MachineState.WRITE: + if ref_controlnets[0].ref_opts.attn_ref_weight > self.injection_holder.attn_weight: + self.injection_holder.bank_styles.bank.append(n.detach().clone()) + self.injection_holder.bank_styles.style_cfgs.append(ref_controlnets[0].ref_opts.attn_style_fidelity) + self.injection_holder.bank_styles.cn_idx.append(ref_controlnets[0].order) + + if "attn1_patch" in transformer_patches: + patch = transformer_patches["attn1_patch"] + if context_attn1 is None: + context_attn1 = n + value_attn1 = context_attn1 + for p in patch: + n, context_attn1, value_attn1 = p(n, context_attn1, value_attn1, extra_options) + + if block is not None: + transformer_block = (block[0], block[1], block_index) + else: + transformer_block = None + attn1_replace_patch = transformer_patches_replace.get("attn1", {}) + block_attn1 = transformer_block + if block_attn1 not in attn1_replace_patch: + block_attn1 = block + + if block_attn1 in attn1_replace_patch: + if context_attn1 is None: + context_attn1 = n + value_attn1 = n + n = self.attn1.to_q(n) + # Reference CN READ - use attn1_replace_patch appropriately + if ref_machine_state == MachineState.READ and len(self.injection_holder.bank_styles.bank) > 0: + bank_styles = self.injection_holder.bank_styles + style_fidelity = bank_styles.get_avg_style_fidelity() + real_bank = bank_styles.bank.copy() + cn_idx = 0 + for idx, order in enumerate(bank_styles.cn_idx): + # make sure matching ref cn is selected + for i in range(cn_idx, len(ref_controlnets)): + if ref_controlnets[i].order == order: + cn_idx = i + break + assert order == ref_controlnets[cn_idx].order + if ref_controlnets[cn_idx].any_attn_strength_to_apply(): + effective_strength = ref_controlnets[cn_idx].get_effective_attn_mask_or_float(x=n, channels=n.shape[2], is_mid=self.injection_holder.is_middle) + real_bank[idx] = real_bank[idx] * effective_strength + context_attn1 * (1-effective_strength) + n_uc = self.attn1.to_out(attn1_replace_patch[block_attn1]( + n, + self.attn1.to_k(torch.cat([context_attn1] + real_bank, dim=1)), + self.attn1.to_v(torch.cat([value_attn1] + real_bank, dim=1)), + extra_options)) + n_c = n_uc.clone() + if len(uc_idx_mask) > 0 and not math.isclose(style_fidelity, 0.0): + n_c[uc_idx_mask] = self.attn1.to_out(attn1_replace_patch[block_attn1]( + n[uc_idx_mask], + self.attn1.to_k(context_attn1[uc_idx_mask]), + self.attn1.to_v(value_attn1[uc_idx_mask]), + extra_options)) + n = style_fidelity * n_c + (1.0-style_fidelity) * n_uc + bank_styles.clean() + else: + context_attn1 = self.attn1.to_k(context_attn1) + value_attn1 = self.attn1.to_v(value_attn1) + n = attn1_replace_patch[block_attn1](n, context_attn1, value_attn1, extra_options) + n = self.attn1.to_out(n) + else: + # Reference CN READ - no attn1_replace_patch + if ref_machine_state == MachineState.READ and len(self.injection_holder.bank_styles.bank) > 0: + if context_attn1 is None: + context_attn1 = n + bank_styles = self.injection_holder.bank_styles + style_fidelity = bank_styles.get_avg_style_fidelity() + real_bank = bank_styles.bank.copy() + cn_idx = 0 + for idx, order in enumerate(bank_styles.cn_idx): + # make sure matching ref cn is selected + for i in range(cn_idx, len(ref_controlnets)): + if ref_controlnets[i].order == order: + cn_idx = i + break + assert order == ref_controlnets[cn_idx].order + if ref_controlnets[cn_idx].any_attn_strength_to_apply(): + effective_strength = ref_controlnets[cn_idx].get_effective_attn_mask_or_float(x=n, channels=n.shape[2], is_mid=self.injection_holder.is_middle) + real_bank[idx] = real_bank[idx] * effective_strength + context_attn1 * (1-effective_strength) + n_uc: Tensor = self.attn1( + n, + context=torch.cat([context_attn1] + real_bank, dim=1), + value=torch.cat([value_attn1] + real_bank, dim=1) if value_attn1 is not None else value_attn1) + n_c = n_uc.clone() + if len(uc_idx_mask) > 0 and not math.isclose(style_fidelity, 0.0): + n_c[uc_idx_mask] = self.attn1( + n[uc_idx_mask], + context=context_attn1[uc_idx_mask], + value=value_attn1[uc_idx_mask] if value_attn1 is not None else value_attn1) + n = style_fidelity * n_c + (1.0-style_fidelity) * n_uc + bank_styles.clean() + else: + n = self.attn1(n, context=context_attn1, value=value_attn1) + + if "attn1_output_patch" in transformer_patches: + patch = transformer_patches["attn1_output_patch"] + for p in patch: + n = p(n, extra_options) + + x += n + if "middle_patch" in transformer_patches: + patch = transformer_patches["middle_patch"] + for p in patch: + x = p(x, extra_options) + + if self.attn2 is not None: + n = self.norm2(x) + if self.switch_temporal_ca_to_sa: + context_attn2 = n + else: + context_attn2 = context + value_attn2 = None + if "attn2_patch" in transformer_patches: + patch = transformer_patches["attn2_patch"] + value_attn2 = context_attn2 + for p in patch: + n, context_attn2, value_attn2 = p(n, context_attn2, value_attn2, extra_options) + + attn2_replace_patch = transformer_patches_replace.get("attn2", {}) + block_attn2 = transformer_block + if block_attn2 not in attn2_replace_patch: + block_attn2 = block + + if block_attn2 in attn2_replace_patch: + if value_attn2 is None: + value_attn2 = context_attn2 + n = self.attn2.to_q(n) + context_attn2 = self.attn2.to_k(context_attn2) + value_attn2 = self.attn2.to_v(value_attn2) + n = attn2_replace_patch[block_attn2](n, context_attn2, value_attn2, extra_options) + n = self.attn2.to_out(n) + else: + n = self.attn2(n, context=context_attn2, value=value_attn2) + + if "attn2_output_patch" in transformer_patches: + patch = transformer_patches["attn2_output_patch"] + for p in patch: + n = p(n, extra_options) + + x += n + if self.is_res: + x_skip = x + x = self.ff(self.norm3(x)) + if self.is_res: + x += x_skip + + return x + + +class RefTimestepEmbedSequential(openaimodel.TimestepEmbedSequential): + injection_holder: InjectionTimestepEmbedSequentialHolder = None + +def forward_timestep_embed_ref_inject_factory(orig_timestep_embed_inject_factory: Callable): + def forward_timestep_embed_ref_inject(*args, **kwargs): + ts: RefTimestepEmbedSequential = args[0] + if not hasattr(ts, "injection_holder"): + return orig_timestep_embed_inject_factory(*args, **kwargs) + eps = 1e-6 + x: Tensor = orig_timestep_embed_inject_factory(*args, **kwargs) + y: Tensor = None + transformer_options: dict[str] = args[4] + # Reference CN stuff + uc_idx_mask = transformer_options.get(REF_UNCOND_IDXS, []) + c_idx_mask = transformer_options.get(REF_COND_IDXS, []) + # WRITE mode will only have one ReferenceAdvanced, other modes will have all ReferenceAdvanced + ref_controlnets: list[ReferenceAdvanced] = transformer_options.get(REF_ADAIN_CONTROL_LIST, None) + ref_machine_state: str = transformer_options.get(REF_ADAIN_MACHINE_STATE, None) + + # if in WRITE mode, save var, mean, and style_cfg + if ref_machine_state == MachineState.WRITE: + if ref_controlnets[0].ref_opts.adain_ref_weight > ts.injection_holder.gn_weight: + var, mean = torch.var_mean(x, dim=(2, 3), keepdim=True, correction=0) + ts.injection_holder.bank_styles.var_bank.append(var) + ts.injection_holder.bank_styles.mean_bank.append(mean) + ts.injection_holder.bank_styles.style_cfgs.append(ref_controlnets[0].ref_opts.adain_style_fidelity) + ts.injection_holder.bank_styles.cn_idx.append(ref_controlnets[0].order) + # if in READ mode, do math with saved var, mean, and style_cfg + if ref_machine_state == MachineState.READ: + if len(ts.injection_holder.bank_styles.var_bank) > 0: + bank_styles = ts.injection_holder.bank_styles + var, mean = torch.var_mean(x, dim=(2, 3), keepdim=True, correction=0) + std = torch.maximum(var, torch.zeros_like(var) + eps) ** 0.5 + y_uc = torch.zeros_like(x) + cn_idx = 0 + for idx, order in enumerate(bank_styles.cn_idx): + # make sure matching ref cn is selected + for i in range(cn_idx, len(ref_controlnets)): + if ref_controlnets[i].order == order: + cn_idx = i + break + assert order == ref_controlnets[cn_idx].order + style_fidelity = bank_styles.style_cfgs[idx] + var_acc = bank_styles.var_bank[idx] + mean_acc = bank_styles.mean_bank[idx] + std_acc = torch.maximum(var_acc, torch.zeros_like(var_acc) + eps) ** 0.5 + sub_y_uc = (((x - mean) / std) * std_acc) + mean_acc + if ref_controlnets[cn_idx].any_adain_strength_to_apply(): + effective_strength = ref_controlnets[cn_idx].get_effective_adain_mask_or_float(x=x) + sub_y_uc = sub_y_uc * effective_strength + x * (1-effective_strength) + y_uc += sub_y_uc + # get average, if more than one + if len(bank_styles.cn_idx) > 1: + y_uc /= len(bank_styles.cn_idx) + y_c = y_uc.clone() + if len(uc_idx_mask) > 0 and not math.isclose(style_fidelity, 0.0): + y_c[uc_idx_mask] = x.to(y_c.dtype)[uc_idx_mask] + y = style_fidelity * y_c + (1.0 - style_fidelity) * y_uc + ts.injection_holder.bank_styles.clean() + + if y is None: + y = x + return y.to(x.dtype) + + return forward_timestep_embed_ref_inject + +# DFS Search for Torch.nn.Module, Written by Lvmin +def torch_dfs(model: torch.nn.Module): + result = [model] + for child in model.children(): + result += torch_dfs(child) + return result diff --git a/ComfyUI-Advanced-ControlNet/adv_control/control_sparsectrl.py b/ComfyUI-Advanced-ControlNet/adv_control/control_sparsectrl.py new file mode 100644 index 0000000000000000000000000000000000000000..60ffbccad3553f111f3774fe60ce1e8dc657f511 --- /dev/null +++ b/ComfyUI-Advanced-ControlNet/adv_control/control_sparsectrl.py @@ -0,0 +1,949 @@ +#taken from: https://github.com/lllyasviel/ControlNet +#and modified +#and then taken from comfy/cldm/cldm.py and modified again + +from abc import ABC, abstractmethod +import math +import numpy as np +from typing import Iterable, Union +import torch +import torch as th +import torch.nn as nn +from torch import Tensor +from einops import rearrange, repeat + +from comfy.ldm.modules.diffusionmodules.util import ( + zero_module, + timestep_embedding, +) + +from comfy.cli_args import args +from comfy.cldm.cldm import ControlNet as ControlNetCLDM +from comfy.ldm.modules.attention import SpatialTransformer +from comfy.ldm.modules.attention import attention_basic, attention_pytorch, attention_split, attention_sub_quad, default +from comfy.ldm.modules.attention import FeedForward, SpatialTransformer +from comfy.ldm.modules.diffusionmodules.openaimodel import TimestepEmbedSequential, ResBlock, Downsample +from comfy.model_patcher import ModelPatcher +from comfy.controlnet import broadcast_image_to +from comfy.utils import repeat_to_batch_size +import comfy.ops +import comfy.model_management + +from .utils import TimestepKeyframeGroup, disable_weight_init_clean_groupnorm, prepare_mask_batch + + +# until xformers bug is fixed, do not use xformers for VersatileAttention! TODO: change this when fix is out +# logic for choosing optimized_attention method taken from comfy/ldm/modules/attention.py +optimized_attention_mm = attention_basic +if comfy.model_management.xformers_enabled(): + pass + #optimized_attention_mm = attention_xformers +if comfy.model_management.pytorch_attention_enabled(): + optimized_attention_mm = attention_pytorch +else: + if args.use_split_cross_attention: + optimized_attention_mm = attention_split + else: + optimized_attention_mm = attention_sub_quad + + +class SparseControlNet(ControlNetCLDM): + def __init__(self, *args,**kwargs): + super().__init__(*args, **kwargs) + hint_channels = kwargs.get("hint_channels") + operations: disable_weight_init_clean_groupnorm = kwargs.get("operations", disable_weight_init_clean_groupnorm) + device = kwargs.get("device", None) + self.use_simplified_conditioning_embedding = kwargs.get("use_simplified_conditioning_embedding", False) + if self.use_simplified_conditioning_embedding: + self.input_hint_block = TimestepEmbedSequential( + zero_module(operations.conv_nd(self.dims, hint_channels, self.model_channels, 3, padding=1, dtype=self.dtype, device=device)), + ) + self.motion_wrapper: SparseCtrlMotionWrapper = None + + def set_actual_length(self, actual_length: int, full_length: int): + if self.motion_wrapper is not None: + self.motion_wrapper.set_video_length(video_length=actual_length, full_length=full_length) + + def forward(self, x: Tensor, hint: Tensor, timesteps, context, y=None, **kwargs): + t_emb = timestep_embedding(timesteps, self.model_channels, repeat_only=False).to(x.dtype) + emb = self.time_embed(t_emb) + + # SparseCtrl sets noisy input to zeros + x = torch.zeros_like(x) + guided_hint = self.input_hint_block(hint, emb, context) + + outs = [] + + hs = [] + if self.num_classes is not None: + assert y.shape[0] == x.shape[0] + emb = emb + self.label_emb(y) + + h = x + for module, zero_conv in zip(self.input_blocks, self.zero_convs): + if guided_hint is not None: + h = module(h, emb, context) + h += guided_hint + guided_hint = None + else: + h = module(h, emb, context) + outs.append(zero_conv(h, emb, context)) + + h = self.middle_block(h, emb, context) + outs.append(self.middle_block_out(h, emb, context)) + + return outs + + +class SparseModelPatcher(ModelPatcher): + def __init__(self, *args, **kwargs): + self.model: SparseControlNet + super().__init__(*args, **kwargs) + + def patch_model(self, device_to=None, patch_weights=True): + if patch_weights: + patched_model = super().patch_model(device_to) + else: + patched_model = super().patch_model(device_to, patch_weights) + try: + if self.model.motion_wrapper is not None: + self.model.motion_wrapper.to(device=device_to) + except Exception: + pass + return patched_model + + def unpatch_model(self, device_to=None, unpatch_weights=True): + try: + if self.model.motion_wrapper is not None: + self.model.motion_wrapper.to(device=device_to) + except Exception: + pass + if unpatch_weights: + return super().unpatch_model(device_to) + else: + return super().unpatch_model(device_to, unpatch_weights) + + def clone(self): + # normal ModelPatcher clone actions + n = SparseModelPatcher(self.model, self.load_device, self.offload_device, self.size, self.current_device, weight_inplace_update=self.weight_inplace_update) + n.patches = {} + for k in self.patches: + n.patches[k] = self.patches[k][:] + if hasattr(n, "patches_uuid"): + self.patches_uuid = n.patches_uuid + + n.object_patches = self.object_patches.copy() + n.model_options = copy.deepcopy(self.model_options) + n.model_keys = self.model_keys + if hasattr(n, "backup"): + self.backup = n.backup + if hasattr(n, "object_patches_backup"): + self.object_patches_backup = n.object_patches_backup + + +class PreprocSparseRGBWrapper: + error_msg = "Invalid use of RGB SparseCtrl output. The output of RGB SparseCtrl preprocessor is NOT a usual image, but a latent pretending to be an image - you must connect the output directly to an Apply ControlNet node (advanced or otherwise). It cannot be used for anything else that accepts IMAGE input." + def __init__(self, condhint: Tensor): + self.condhint = condhint + + def movedim(self, *args, **kwargs): + return self + + def __getattr__(self, *args, **kwargs): + raise AttributeError(self.error_msg) + + def __setattr__(self, name, value): + if name != "condhint": + raise AttributeError(self.error_msg) + super().__setattr__(name, value) + + def __iter__(self, *args, **kwargs): + raise AttributeError(self.error_msg) + + def __next__(self, *args, **kwargs): + raise AttributeError(self.error_msg) + + def __len__(self, *args, **kwargs): + raise AttributeError(self.error_msg) + + def __getitem__(self, *args, **kwargs): + raise AttributeError(self.error_msg) + + def __setitem__(self, *args, **kwargs): + raise AttributeError(self.error_msg) + + +class SparseSettings: + def __init__(self, sparse_method: 'SparseMethod', use_motion: bool=True, motion_strength=1.0, motion_scale=1.0, merged=False): + self.sparse_method = sparse_method + self.use_motion = use_motion + self.motion_strength = motion_strength + self.motion_scale = motion_scale + self.merged = merged + + @classmethod + def default(cls): + return SparseSettings(sparse_method=SparseSpreadMethod(), use_motion=True) + + +class SparseMethod(ABC): + SPREAD = "spread" + INDEX = "index" + def __init__(self, method: str): + self.method = method + + @abstractmethod + def get_indexes(self, hint_length: int, full_length: int) -> list[int]: + pass + + +class SparseSpreadMethod(SparseMethod): + UNIFORM = "uniform" + STARTING = "starting" + ENDING = "ending" + CENTER = "center" + + LIST = [UNIFORM, STARTING, ENDING, CENTER] + + def __init__(self, spread=UNIFORM): + super().__init__(self.SPREAD) + self.spread = spread + + def get_indexes(self, hint_length: int, full_length: int) -> list[int]: + # if hint_length >= full_length, limit hints to full_length + if hint_length >= full_length: + return list(range(full_length)) + # handle special case of 1 hint image + if hint_length == 1: + if self.spread in [self.UNIFORM, self.STARTING]: + return [0] + elif self.spread == self.ENDING: + return [full_length-1] + elif self.spread == self.CENTER: + # return second (of three) values as the center + return [np.linspace(0, full_length-1, 3, endpoint=True, dtype=int)[1]] + else: + raise ValueError(f"Unrecognized spread: {self.spread}") + # otherwise, handle other cases + if self.spread == self.UNIFORM: + return list(np.linspace(0, full_length-1, hint_length, endpoint=True, dtype=int)) + elif self.spread == self.STARTING: + # make split 1 larger, remove last element + return list(np.linspace(0, full_length-1, hint_length+1, endpoint=True, dtype=int))[:-1] + elif self.spread == self.ENDING: + # make split 1 larger, remove first element + return list(np.linspace(0, full_length-1, hint_length+1, endpoint=True, dtype=int))[1:] + elif self.spread == self.CENTER: + # if hint length is not 3 greater than full length, do STARTING behavior + if full_length-hint_length < 3: + return list(np.linspace(0, full_length-1, hint_length+1, endpoint=True, dtype=int))[:-1] + # otherwise, get linspace of 2 greater than needed, then cut off first and last + return list(np.linspace(0, full_length-1, hint_length+2, endpoint=True, dtype=int))[1:-1] + return ValueError(f"Unrecognized spread: {self.spread}") + + +class SparseIndexMethod(SparseMethod): + def __init__(self, idxs: list[int]): + super().__init__(self.INDEX) + self.idxs = idxs + + def get_indexes(self, hint_length: int, full_length: int) -> list[int]: + orig_hint_length = hint_length + if hint_length > full_length: + hint_length = full_length + # if idxs is less than hint_length, throw error + if len(self.idxs) < hint_length: + err_msg = f"There are not enough indexes ({len(self.idxs)}) provided to fit the usable {hint_length} input images." + if orig_hint_length != hint_length: + err_msg = f"{err_msg} (original input images: {orig_hint_length})" + raise ValueError(err_msg) + # cap idxs to hint_length + idxs = self.idxs[:hint_length] + new_idxs = [] + real_idxs = set() + for idx in idxs: + if idx < 0: + real_idx = full_length+idx + if real_idx in real_idxs: + raise ValueError(f"Index '{idx}' maps to '{real_idx}' and is duplicate - indexes in Sparse Index Method must be unique.") + else: + real_idx = idx + if real_idx in real_idxs: + raise ValueError(f"Index '{idx}' is duplicate (or a negative index is equivalent) - indexes in Sparse Index Method must be unique.") + real_idxs.add(real_idx) + new_idxs.append(real_idx) + return new_idxs + + +######################################### +# motion-related portion of controlnet +class BlockType: + UP = "up" + DOWN = "down" + MID = "mid" + +def get_down_block_max(mm_state_dict: dict[str, Tensor]) -> int: + return get_block_max(mm_state_dict, "down_blocks") + +def get_up_block_max(mm_state_dict: dict[str, Tensor]) -> int: + return get_block_max(mm_state_dict, "up_blocks") + +def get_block_max(mm_state_dict: dict[str, Tensor], block_name: str) -> int: + # keep track of biggest down_block count in module + biggest_block = -1 + for key in mm_state_dict.keys(): + if block_name in key: + try: + block_int = key.split(".")[1] + block_num = int(block_int) + if block_num > biggest_block: + biggest_block = block_num + except ValueError: + pass + return biggest_block + +def has_mid_block(mm_state_dict: dict[str, Tensor]): + # check if keys contain mid_block + for key in mm_state_dict.keys(): + if key.startswith("mid_block."): + return True + return False + +def get_position_encoding_max_len(mm_state_dict: dict[str, Tensor], mm_name: str=None) -> int: + # use pos_encoder.pe entries to determine max length - [1, {max_length}, {320|640|1280}] + for key in mm_state_dict.keys(): + if key.endswith("pos_encoder.pe"): + return mm_state_dict[key].size(1) # get middle dim + raise ValueError(f"No pos_encoder.pe found in SparseCtrl state_dict - {mm_name} is not a valid SparseCtrl model!") + + +class SparseCtrlMotionWrapper(nn.Module): + def __init__(self, mm_state_dict: dict[str, Tensor]): + super().__init__() + self.down_blocks: Iterable[MotionModule] = None + self.up_blocks: Iterable[MotionModule] = None + self.mid_block: MotionModule = None + self.encoding_max_len = get_position_encoding_max_len(mm_state_dict, "") + layer_channels = (320, 640, 1280, 1280) + if get_down_block_max(mm_state_dict) > -1: + self.down_blocks = nn.ModuleList([]) + for c in layer_channels: + self.down_blocks.append(MotionModule(c, temporal_position_encoding_max_len=self.encoding_max_len, block_type=BlockType.DOWN)) + if get_up_block_max(mm_state_dict) > -1: + self.up_blocks = nn.ModuleList([]) + for c in reversed(layer_channels): + self.up_blocks.append(MotionModule(c, temporal_position_encoding_max_len=self.encoding_max_len, block_type=BlockType.UP)) + if has_mid_block(mm_state_dict): + self.mid_block = MotionModule(1280, temporal_position_encoding_max_len=self.encoding_max_len, block_type=BlockType.MID) + + def inject(self, unet: SparseControlNet): + # inject input (down) blocks + self._inject(unet.input_blocks, self.down_blocks) + # inject mid block, if present + if self.mid_block is not None: + self._inject([unet.middle_block], [self.mid_block]) + unet.motion_wrapper = self + + def _inject(self, unet_blocks: nn.ModuleList, mm_blocks: nn.ModuleList): + # Rules for injection: + # For each component list in a unet block: + # if SpatialTransformer exists in list, place next block after last occurrence + # elif ResBlock exists in list, place next block after first occurrence + # else don't place block + injection_count = 0 + unet_idx = 0 + # details about blocks passed in + per_block = len(mm_blocks[0].motion_modules) + injection_goal = len(mm_blocks) * per_block + # only stop injecting when modules exhausted + while injection_count < injection_goal: + # figure out which VanillaTemporalModule from mm to inject + mm_blk_idx, mm_vtm_idx = injection_count // per_block, injection_count % per_block + # figure out layout of unet block components + st_idx = -1 # SpatialTransformer index + res_idx = -1 # first ResBlock index + # first, figure out indeces of relevant blocks + for idx, component in enumerate(unet_blocks[unet_idx]): + if type(component) == SpatialTransformer: + st_idx = idx + elif type(component).__name__ == "ResBlock" and res_idx < 0: + res_idx = idx + # if SpatialTransformer exists, inject right after + if st_idx >= 0: + unet_blocks[unet_idx].insert(st_idx+1, mm_blocks[mm_blk_idx].motion_modules[mm_vtm_idx]) + injection_count += 1 + # otherwise, if only ResBlock exists, inject right after + elif res_idx >= 0: + unet_blocks[unet_idx].insert(res_idx+1, mm_blocks[mm_blk_idx].motion_modules[mm_vtm_idx]) + injection_count += 1 + # increment unet_idx + unet_idx += 1 + + def eject(self, unet: SparseControlNet): + # remove from input blocks (downblocks) + self._eject(unet.input_blocks) + # remove from middle block (encapsulate in list to make compatible) + self._eject([unet.middle_block]) + del unet.motion_wrapper + unet.motion_wrapper = None + + def _eject(self, unet_blocks: nn.ModuleList): + # eject all VanillaTemporalModule objects from all blocks + for block in unet_blocks: + idx_to_pop = [] + for idx, component in enumerate(block): + if type(component) == VanillaTemporalModule: + idx_to_pop.append(idx) + # pop in backwards order, as to not disturb what the indeces refer to + for idx in sorted(idx_to_pop, reverse=True): + block.pop(idx) + + def set_video_length(self, video_length: int, full_length: int): + self.AD_video_length = video_length + if self.down_blocks is not None: + for block in self.down_blocks: + block.set_video_length(video_length, full_length) + if self.up_blocks is not None: + for block in self.up_blocks: + block.set_video_length(video_length, full_length) + if self.mid_block is not None: + self.mid_block.set_video_length(video_length, full_length) + + def set_scale_multiplier(self, multiplier: Union[float, None]): + if self.down_blocks is not None: + for block in self.down_blocks: + block.set_scale_multiplier(multiplier) + if self.up_blocks is not None: + for block in self.up_blocks: + block.set_scale_multiplier(multiplier) + if self.mid_block is not None: + self.mid_block.set_scale_multiplier(multiplier) + + def set_strength(self, strength: float): + if self.down_blocks is not None: + for block in self.down_blocks: + block.set_strength(strength) + if self.up_blocks is not None: + for block in self.up_blocks: + block.set_strength(strength) + if self.mid_block is not None: + self.mid_block.set_strength(strength) + + def reset_temp_vars(self): + if self.down_blocks is not None: + for block in self.down_blocks: + block.reset_temp_vars() + if self.up_blocks is not None: + for block in self.up_blocks: + block.reset_temp_vars() + if self.mid_block is not None: + self.mid_block.reset_temp_vars() + + def reset_scale_multiplier(self): + self.set_scale_multiplier(None) + + def reset(self): + self.reset_scale_multiplier() + self.reset_temp_vars() + + +class MotionModule(nn.Module): + def __init__(self, in_channels, temporal_position_encoding_max_len=24, block_type: str=BlockType.DOWN): + super().__init__() + if block_type == BlockType.MID: + # mid blocks contain only a single VanillaTemporalModule + self.motion_modules: Iterable[VanillaTemporalModule] = nn.ModuleList([get_motion_module(in_channels, temporal_position_encoding_max_len)]) + else: + # down blocks contain two VanillaTemporalModules + self.motion_modules: Iterable[VanillaTemporalModule] = nn.ModuleList( + [ + get_motion_module(in_channels, temporal_position_encoding_max_len), + get_motion_module(in_channels, temporal_position_encoding_max_len) + ] + ) + # up blocks contain one additional VanillaTemporalModule + if block_type == BlockType.UP: + self.motion_modules.append(get_motion_module(in_channels, temporal_position_encoding_max_len)) + + def set_video_length(self, video_length: int, full_length: int): + for motion_module in self.motion_modules: + motion_module.set_video_length(video_length, full_length) + + def set_scale_multiplier(self, multiplier: Union[float, None]): + for motion_module in self.motion_modules: + motion_module.set_scale_multiplier(multiplier) + + def set_masks(self, masks: Tensor, min_val: float, max_val: float): + for motion_module in self.motion_modules: + motion_module.set_masks(masks, min_val, max_val) + + def set_sub_idxs(self, sub_idxs: list[int]): + for motion_module in self.motion_modules: + motion_module.set_sub_idxs(sub_idxs) + + def set_strength(self, strength: float): + for motion_module in self.motion_modules: + motion_module.set_strength(strength) + + def reset_temp_vars(self): + for motion_module in self.motion_modules: + motion_module.reset_temp_vars() + + +def get_motion_module(in_channels, temporal_position_encoding_max_len): + # unlike normal AD, there is only one attention block expected in SparseCtrl models + return VanillaTemporalModule(in_channels=in_channels, attention_block_types=("Temporal_Self",), temporal_position_encoding_max_len=temporal_position_encoding_max_len) + + +class VanillaTemporalModule(nn.Module): + def __init__( + self, + in_channels, + num_attention_heads=8, + num_transformer_block=1, + attention_block_types=("Temporal_Self", "Temporal_Self"), + cross_frame_attention_mode=None, + temporal_position_encoding=True, + temporal_position_encoding_max_len=24, + temporal_attention_dim_div=1, + zero_initialize=True, + ): + super().__init__() + self.strength = 1.0 + self.temporal_transformer = TemporalTransformer3DModel( + in_channels=in_channels, + num_attention_heads=num_attention_heads, + attention_head_dim=in_channels + // num_attention_heads + // temporal_attention_dim_div, + num_layers=num_transformer_block, + attention_block_types=attention_block_types, + cross_frame_attention_mode=cross_frame_attention_mode, + temporal_position_encoding=temporal_position_encoding, + temporal_position_encoding_max_len=temporal_position_encoding_max_len, + ) + + if zero_initialize: + self.temporal_transformer.proj_out = zero_module( + self.temporal_transformer.proj_out + ) + + def set_video_length(self, video_length: int, full_length: int): + self.temporal_transformer.set_video_length(video_length, full_length) + + def set_scale_multiplier(self, multiplier: Union[float, None]): + self.temporal_transformer.set_scale_multiplier(multiplier) + + def set_masks(self, masks: Tensor, min_val: float, max_val: float): + self.temporal_transformer.set_masks(masks, min_val, max_val) + + def set_sub_idxs(self, sub_idxs: list[int]): + self.temporal_transformer.set_sub_idxs(sub_idxs) + + def set_strength(self, strength: float): + self.strength = strength + + def reset_temp_vars(self): + self.set_strength(1.0) + self.temporal_transformer.reset_temp_vars() + + def forward(self, input_tensor, encoder_hidden_states=None, attention_mask=None): + if math.isclose(self.strength, 1.0): + return self.temporal_transformer(input_tensor, encoder_hidden_states, attention_mask) + elif math.isclose(self.strength, 0.0): + return input_tensor + elif self.strength > 1.0: + return self.temporal_transformer(input_tensor, encoder_hidden_states, attention_mask)*self.strength + else: + return self.temporal_transformer(input_tensor, encoder_hidden_states, attention_mask)*self.strength + input_tensor*(1.0-self.strength) + + +class TemporalTransformer3DModel(nn.Module): + def __init__( + self, + in_channels, + num_attention_heads, + attention_head_dim, + num_layers, + attention_block_types=( + "Temporal_Self", + "Temporal_Self", + ), + dropout=0.0, + norm_num_groups=32, + cross_attention_dim=768, + activation_fn="geglu", + attention_bias=False, + upcast_attention=False, + cross_frame_attention_mode=None, + temporal_position_encoding=False, + temporal_position_encoding_max_len=24, + ): + super().__init__() + self.video_length = 16 + self.full_length = 16 + self.scale_min = 1.0 + self.scale_max = 1.0 + self.raw_scale_mask: Union[Tensor, None] = None + self.temp_scale_mask: Union[Tensor, None] = None + self.sub_idxs: Union[list[int], None] = None + self.prev_hidden_states_batch = 0 + + + inner_dim = num_attention_heads * attention_head_dim + + self.norm = disable_weight_init_clean_groupnorm.GroupNorm( + num_groups=norm_num_groups, num_channels=in_channels, eps=1e-6, affine=True + ) + self.proj_in = nn.Linear(in_channels, inner_dim) + + self.transformer_blocks: Iterable[TemporalTransformerBlock] = nn.ModuleList( + [ + TemporalTransformerBlock( + dim=inner_dim, + num_attention_heads=num_attention_heads, + attention_head_dim=attention_head_dim, + attention_block_types=attention_block_types, + dropout=dropout, + norm_num_groups=norm_num_groups, + cross_attention_dim=cross_attention_dim, + activation_fn=activation_fn, + attention_bias=attention_bias, + upcast_attention=upcast_attention, + cross_frame_attention_mode=cross_frame_attention_mode, + temporal_position_encoding=temporal_position_encoding, + temporal_position_encoding_max_len=temporal_position_encoding_max_len, + ) + for d in range(num_layers) + ] + ) + self.proj_out = nn.Linear(inner_dim, in_channels) + + def set_video_length(self, video_length: int, full_length: int): + self.video_length = video_length + self.full_length = full_length + + def set_scale_multiplier(self, multiplier: Union[float, None]): + for block in self.transformer_blocks: + block.set_scale_multiplier(multiplier) + + def set_masks(self, masks: Tensor, min_val: float, max_val: float): + self.scale_min = min_val + self.scale_max = max_val + self.raw_scale_mask = masks + + def set_sub_idxs(self, sub_idxs: list[int]): + self.sub_idxs = sub_idxs + for block in self.transformer_blocks: + block.set_sub_idxs(sub_idxs) + + def reset_temp_vars(self): + del self.temp_scale_mask + self.temp_scale_mask = None + self.prev_hidden_states_batch = 0 + + def get_scale_mask(self, hidden_states: Tensor) -> Union[Tensor, None]: + # if no raw mask, return None + if self.raw_scale_mask is None: + return None + shape = hidden_states.shape + batch, channel, height, width = shape + # if temp mask already calculated, return it + if self.temp_scale_mask != None: + # check if hidden_states batch matches + if batch == self.prev_hidden_states_batch: + if self.sub_idxs is not None: + return self.temp_scale_mask[:, self.sub_idxs, :] + return self.temp_scale_mask + # if does not match, reset cached temp_scale_mask and recalculate it + del self.temp_scale_mask + self.temp_scale_mask = None + # otherwise, calculate temp mask + self.prev_hidden_states_batch = batch + mask = prepare_mask_batch(self.raw_scale_mask, shape=(self.full_length, 1, height, width)) + mask = repeat_to_batch_size(mask, self.full_length) + # if mask not the same amount length as full length, make it match + if self.full_length != mask.shape[0]: + mask = broadcast_image_to(mask, self.full_length, 1) + # reshape mask to attention K shape (h*w, latent_count, 1) + batch, channel, height, width = mask.shape + # first, perform same operations as on hidden_states, + # turning (b, c, h, w) -> (b, h*w, c) + mask = mask.permute(0, 2, 3, 1).reshape(batch, height*width, channel) + # then, make it the same shape as attention's k, (h*w, b, c) + mask = mask.permute(1, 0, 2) + # make masks match the expected length of h*w + batched_number = shape[0] // self.video_length + if batched_number > 1: + mask = torch.cat([mask] * batched_number, dim=0) + # cache mask and set to proper device + self.temp_scale_mask = mask + # move temp_scale_mask to proper dtype + device + self.temp_scale_mask = self.temp_scale_mask.to(dtype=hidden_states.dtype, device=hidden_states.device) + # return subset of masks, if needed + if self.sub_idxs is not None: + return self.temp_scale_mask[:, self.sub_idxs, :] + return self.temp_scale_mask + + def forward(self, hidden_states, encoder_hidden_states=None, attention_mask=None): + batch, channel, height, width = hidden_states.shape + residual = hidden_states + scale_mask = self.get_scale_mask(hidden_states) + # add some casts for fp8 purposes - does not affect speed otherwise + hidden_states = self.norm(hidden_states).to(hidden_states.dtype) + inner_dim = hidden_states.shape[1] + hidden_states = hidden_states.permute(0, 2, 3, 1).reshape( + batch, height * width, inner_dim + ) + hidden_states = self.proj_in(hidden_states).to(hidden_states.dtype) + + # Transformer Blocks + for block in self.transformer_blocks: + hidden_states = block( + hidden_states, + encoder_hidden_states=encoder_hidden_states, + attention_mask=attention_mask, + video_length=self.video_length, + scale_mask=scale_mask + ) + + # output + hidden_states = self.proj_out(hidden_states) + hidden_states = ( + hidden_states.reshape(batch, height, width, inner_dim) + .permute(0, 3, 1, 2) + .contiguous() + ) + + output = hidden_states + residual + + return output + + +class TemporalTransformerBlock(nn.Module): + def __init__( + self, + dim, + num_attention_heads, + attention_head_dim, + attention_block_types=( + "Temporal_Self", + "Temporal_Self", + ), + dropout=0.0, + norm_num_groups=32, + cross_attention_dim=768, + activation_fn="geglu", + attention_bias=False, + upcast_attention=False, + cross_frame_attention_mode=None, + temporal_position_encoding=False, + temporal_position_encoding_max_len=24, + ): + super().__init__() + + attention_blocks = [] + norms = [] + + for block_name in attention_block_types: + attention_blocks.append( + VersatileAttention( + attention_mode=block_name.split("_")[0], + context_dim=cross_attention_dim # called context_dim for ComfyUI impl + if block_name.endswith("_Cross") + else None, + query_dim=dim, + heads=num_attention_heads, + dim_head=attention_head_dim, + dropout=dropout, + #bias=attention_bias, # remove for Comfy CrossAttention + #upcast_attention=upcast_attention, # remove for Comfy CrossAttention + cross_frame_attention_mode=cross_frame_attention_mode, + temporal_position_encoding=temporal_position_encoding, + temporal_position_encoding_max_len=temporal_position_encoding_max_len, + ) + ) + norms.append(nn.LayerNorm(dim)) + + self.attention_blocks: Iterable[VersatileAttention] = nn.ModuleList(attention_blocks) + self.norms = nn.ModuleList(norms) + + self.ff = FeedForward(dim, dropout=dropout, glu=(activation_fn == "geglu")) + self.ff_norm = nn.LayerNorm(dim) + + def set_scale_multiplier(self, multiplier: Union[float, None]): + for block in self.attention_blocks: + block.set_scale_multiplier(multiplier) + + def set_sub_idxs(self, sub_idxs: list[int]): + for block in self.attention_blocks: + block.set_sub_idxs(sub_idxs) + + def forward( + self, + hidden_states, + encoder_hidden_states=None, + attention_mask=None, + video_length=None, + scale_mask=None + ): + for attention_block, norm in zip(self.attention_blocks, self.norms): + norm_hidden_states = norm(hidden_states).to(hidden_states.dtype) + hidden_states = ( + attention_block( + norm_hidden_states, + encoder_hidden_states=encoder_hidden_states + if attention_block.is_cross_attention + else None, + attention_mask=attention_mask, + video_length=video_length, + scale_mask=scale_mask + ) + + hidden_states + ) + + hidden_states = self.ff(self.ff_norm(hidden_states)) + hidden_states + + output = hidden_states + return output + + +class PositionalEncoding(nn.Module): + def __init__(self, d_model, dropout=0.0, max_len=24): + super().__init__() + self.dropout = nn.Dropout(p=dropout) + position = torch.arange(max_len).unsqueeze(1) + div_term = torch.exp( + torch.arange(0, d_model, 2) * (-math.log(10000.0) / d_model) + ) + pe = torch.zeros(1, max_len, d_model) + pe[0, :, 0::2] = torch.sin(position * div_term) + pe[0, :, 1::2] = torch.cos(position * div_term) + self.register_buffer("pe", pe) + self.sub_idxs = None + + def set_sub_idxs(self, sub_idxs: list[int]): + self.sub_idxs = sub_idxs + + def forward(self, x): + #if self.sub_idxs is not None: + # x = x + self.pe[:, self.sub_idxs] + #else: + x = x + self.pe[:, : x.size(1)] + return self.dropout(x) + + +class CrossAttentionMM(nn.Module): + def __init__(self, query_dim, context_dim=None, heads=8, dim_head=64, dropout=0., dtype=None, device=None, + operations=comfy.ops.disable_weight_init): + super().__init__() + inner_dim = dim_head * heads + context_dim = default(context_dim, query_dim) + + self.heads = heads + self.dim_head = dim_head + self.scale = None + + self.to_q = operations.Linear(query_dim, inner_dim, bias=False, dtype=dtype, device=device) + self.to_k = operations.Linear(context_dim, inner_dim, bias=False, dtype=dtype, device=device) + self.to_v = operations.Linear(context_dim, inner_dim, bias=False, dtype=dtype, device=device) + + self.to_out = nn.Sequential(operations.Linear(inner_dim, query_dim, dtype=dtype, device=device), nn.Dropout(dropout)) + + def forward(self, x, context=None, value=None, mask=None, scale_mask=None): + q = self.to_q(x) + context = default(context, x) + k: Tensor = self.to_k(context) + if value is not None: + v = self.to_v(value) + del value + else: + v = self.to_v(context) + + # apply custom scale by multiplying k by scale factor + if self.scale is not None: + k *= self.scale + + # apply scale mask, if present + if scale_mask is not None: + k *= scale_mask + + out = optimized_attention_mm(q, k, v, self.heads, mask) + return self.to_out(out) + + +class VersatileAttention(CrossAttentionMM): + def __init__( + self, + attention_mode=None, + cross_frame_attention_mode=None, + temporal_position_encoding=False, + temporal_position_encoding_max_len=24, + *args, + **kwargs, + ): + super().__init__(*args, **kwargs) + assert attention_mode == "Temporal" + + self.attention_mode = attention_mode + self.is_cross_attention = kwargs["context_dim"] is not None + + self.pos_encoder = ( + PositionalEncoding( + kwargs["query_dim"], + dropout=0.0, + max_len=temporal_position_encoding_max_len, + ) + if (temporal_position_encoding and attention_mode == "Temporal") + else None + ) + + def extra_repr(self): + return f"(Module Info) Attention_Mode: {self.attention_mode}, Is_Cross_Attention: {self.is_cross_attention}" + + def set_scale_multiplier(self, multiplier: Union[float, None]): + if multiplier is None or math.isclose(multiplier, 1.0): + self.scale = None + else: + self.scale = multiplier + + def set_sub_idxs(self, sub_idxs: list[int]): + if self.pos_encoder != None: + self.pos_encoder.set_sub_idxs(sub_idxs) + + def forward( + self, + hidden_states: Tensor, + encoder_hidden_states=None, + attention_mask=None, + video_length=None, + scale_mask=None, + ): + if self.attention_mode != "Temporal": + raise NotImplementedError + + d = hidden_states.shape[1] + hidden_states = rearrange( + hidden_states, "(b f) d c -> (b d) f c", f=video_length + ) + + if self.pos_encoder is not None: + hidden_states = self.pos_encoder(hidden_states).to(hidden_states.dtype) + + encoder_hidden_states = ( + repeat(encoder_hidden_states, "b n c -> (b d) n c", d=d) + if encoder_hidden_states is not None + else encoder_hidden_states + ) + + hidden_states = super().forward( + hidden_states, + encoder_hidden_states, + value=None, + mask=attention_mask, + scale_mask=scale_mask, + ) + + hidden_states = rearrange(hidden_states, "(b d) f c -> (b f) d c", d=d) + + return hidden_states diff --git a/ComfyUI-Advanced-ControlNet/adv_control/control_svd.py b/ComfyUI-Advanced-ControlNet/adv_control/control_svd.py new file mode 100644 index 0000000000000000000000000000000000000000..9458f6aec65661f0301b4ae87eb37e8180eefb05 --- /dev/null +++ b/ComfyUI-Advanced-ControlNet/adv_control/control_svd.py @@ -0,0 +1,517 @@ +import torch +import torch.nn as nn +from torch import Tensor + +import comfy.model_detection +from comfy.utils import UNET_MAP_BASIC, UNET_MAP_RESNET, UNET_MAP_ATTENTIONS, TRANSFORMER_BLOCKS + +import torch + + +from comfy.ldm.modules.diffusionmodules.util import ( + zero_module, + timestep_embedding, +) + +from comfy.ldm.modules.attention import SpatialVideoTransformer +from comfy.ldm.modules.diffusionmodules.openaimodel import UNetModel, TimestepEmbedSequential, VideoResBlock, Downsample +from comfy.ldm.util import exists +import comfy.ops + + +class SVDControlNet(nn.Module): + def __init__( + self, + image_size, + in_channels, + model_channels, + hint_channels, + num_res_blocks, + dropout=0, + channel_mult=(1, 2, 4, 8), + conv_resample=True, + dims=2, + num_classes=None, + use_checkpoint=False, + dtype=torch.float32, + num_heads=-1, + num_head_channels=-1, + num_heads_upsample=-1, + use_scale_shift_norm=False, + resblock_updown=False, + use_new_attention_order=False, + use_spatial_transformer=False, # custom transformer support + transformer_depth=1, # custom transformer support + context_dim=None, # custom transformer support + n_embed=None, # custom support for prediction of discrete ids into codebook of first stage vq model + legacy=True, + disable_self_attentions=None, + num_attention_blocks=None, + disable_middle_self_attn=False, + use_linear_in_transformer=False, + adm_in_channels=None, + transformer_depth_middle=None, + transformer_depth_output=None, + use_spatial_context=False, + extra_ff_mix_layer=False, + merge_strategy="fixed", + merge_factor=0.5, + video_kernel_size=3, + device=None, + operations=comfy.ops.disable_weight_init, + **kwargs, + ): + super().__init__() + assert use_spatial_transformer == True, "use_spatial_transformer has to be true" + if use_spatial_transformer: + assert context_dim is not None, 'Fool!! You forgot to include the dimension of your cross-attention conditioning...' + + if context_dim is not None: + assert use_spatial_transformer, 'Fool!! You forgot to use the spatial transformer for your cross-attention conditioning...' + # from omegaconf.listconfig import ListConfig + # if type(context_dim) == ListConfig: + # context_dim = list(context_dim) + + if num_heads_upsample == -1: + num_heads_upsample = num_heads + + if num_heads == -1: + assert num_head_channels != -1, 'Either num_heads or num_head_channels has to be set' + + if num_head_channels == -1: + assert num_heads != -1, 'Either num_heads or num_head_channels has to be set' + + self.dims = dims + self.image_size = image_size + self.in_channels = in_channels + self.model_channels = model_channels + + if isinstance(num_res_blocks, int): + self.num_res_blocks = len(channel_mult) * [num_res_blocks] + else: + if len(num_res_blocks) != len(channel_mult): + raise ValueError("provide num_res_blocks either as an int (globally constant) or " + "as a list/tuple (per-level) with the same length as channel_mult") + self.num_res_blocks = num_res_blocks + + if disable_self_attentions is not None: + # should be a list of booleans, indicating whether to disable self-attention in TransformerBlocks or not + assert len(disable_self_attentions) == len(channel_mult) + if num_attention_blocks is not None: + assert len(num_attention_blocks) == len(self.num_res_blocks) + assert all(map(lambda i: self.num_res_blocks[i] >= num_attention_blocks[i], range(len(num_attention_blocks)))) + + transformer_depth = transformer_depth[:] + + self.dropout = dropout + self.channel_mult = channel_mult + self.conv_resample = conv_resample + self.num_classes = num_classes + self.use_checkpoint = use_checkpoint + self.dtype = dtype + self.num_heads = num_heads + self.num_head_channels = num_head_channels + self.num_heads_upsample = num_heads_upsample + self.predict_codebook_ids = n_embed is not None + + time_embed_dim = model_channels * 4 + self.time_embed = nn.Sequential( + operations.Linear(model_channels, time_embed_dim, dtype=self.dtype, device=device), + nn.SiLU(), + operations.Linear(time_embed_dim, time_embed_dim, dtype=self.dtype, device=device), + ) + + if self.num_classes is not None: + if isinstance(self.num_classes, int): + self.label_emb = nn.Embedding(num_classes, time_embed_dim) + elif self.num_classes == "continuous": + print("setting up linear c_adm embedding layer") + self.label_emb = nn.Linear(1, time_embed_dim) + elif self.num_classes == "sequential": + assert adm_in_channels is not None + self.label_emb = nn.Sequential( + nn.Sequential( + operations.Linear(adm_in_channels, time_embed_dim, dtype=self.dtype, device=device), + nn.SiLU(), + operations.Linear(time_embed_dim, time_embed_dim, dtype=self.dtype, device=device), + ) + ) + else: + raise ValueError() + + self.input_blocks = nn.ModuleList( + [ + TimestepEmbedSequential( + operations.conv_nd(dims, in_channels, model_channels, 3, padding=1, dtype=self.dtype, device=device) + ) + ] + ) + self.zero_convs = nn.ModuleList([self.make_zero_conv(model_channels, operations=operations, dtype=self.dtype, device=device)]) + + self.input_hint_block = TimestepEmbedSequential( + operations.conv_nd(dims, hint_channels, 16, 3, padding=1, dtype=self.dtype, device=device), + nn.SiLU(), + operations.conv_nd(dims, 16, 16, 3, padding=1, dtype=self.dtype, device=device), + nn.SiLU(), + operations.conv_nd(dims, 16, 32, 3, padding=1, stride=2, dtype=self.dtype, device=device), + nn.SiLU(), + operations.conv_nd(dims, 32, 32, 3, padding=1, dtype=self.dtype, device=device), + nn.SiLU(), + operations.conv_nd(dims, 32, 96, 3, padding=1, stride=2, dtype=self.dtype, device=device), + nn.SiLU(), + operations.conv_nd(dims, 96, 96, 3, padding=1, dtype=self.dtype, device=device), + nn.SiLU(), + operations.conv_nd(dims, 96, 256, 3, padding=1, stride=2, dtype=self.dtype, device=device), + nn.SiLU(), + operations.conv_nd(dims, 256, model_channels, 3, padding=1, dtype=self.dtype, device=device) + ) + + self._feature_size = model_channels + input_block_chans = [model_channels] + ch = model_channels + ds = 1 + for level, mult in enumerate(channel_mult): + for nr in range(self.num_res_blocks[level]): + layers = [ + VideoResBlock( + ch, + time_embed_dim, + dropout, + out_channels=mult * model_channels, + dims=dims, + use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, + dtype=self.dtype, + device=device, + operations=operations, + video_kernel_size=video_kernel_size, + merge_strategy=merge_strategy, merge_factor=merge_factor, + ) + ] + ch = mult * model_channels + num_transformers = transformer_depth.pop(0) + if num_transformers > 0: + if num_head_channels == -1: + dim_head = ch // num_heads + else: + num_heads = ch // num_head_channels + dim_head = num_head_channels + if legacy: + #num_heads = 1 + dim_head = ch // num_heads if use_spatial_transformer else num_head_channels + if exists(disable_self_attentions): + disabled_sa = disable_self_attentions[level] + else: + disabled_sa = False + + if not exists(num_attention_blocks) or nr < num_attention_blocks[level]: + layers.append( + SpatialVideoTransformer( + ch, num_heads, dim_head, depth=num_transformers, context_dim=context_dim, + disable_self_attn=disabled_sa, use_linear=use_linear_in_transformer, + checkpoint=use_checkpoint, dtype=self.dtype, device=device, operations=operations, + use_spatial_context=use_spatial_context, ff_in=extra_ff_mix_layer, + merge_strategy=merge_strategy, merge_factor=merge_factor, + ) + ) + self.input_blocks.append(TimestepEmbedSequential(*layers)) + self.zero_convs.append(self.make_zero_conv(ch, operations=operations, dtype=self.dtype, device=device)) + self._feature_size += ch + input_block_chans.append(ch) + if level != len(channel_mult) - 1: + out_ch = ch + self.input_blocks.append( + TimestepEmbedSequential( + VideoResBlock( + ch, + time_embed_dim, + dropout, + out_channels=out_ch, + dims=dims, + use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, + down=True, + dtype=self.dtype, + device=device, + operations=operations, + video_kernel_size=video_kernel_size, + merge_strategy=merge_strategy, merge_factor=merge_factor, + ) + if resblock_updown + else Downsample( + ch, conv_resample, dims=dims, out_channels=out_ch, dtype=self.dtype, device=device, operations=operations + ) + ) + ) + ch = out_ch + input_block_chans.append(ch) + self.zero_convs.append(self.make_zero_conv(ch, operations=operations, dtype=self.dtype, device=device)) + ds *= 2 + self._feature_size += ch + + if num_head_channels == -1: + dim_head = ch // num_heads + else: + num_heads = ch // num_head_channels + dim_head = num_head_channels + if legacy: + #num_heads = 1 + dim_head = ch // num_heads if use_spatial_transformer else num_head_channels + mid_block = [ + VideoResBlock( + ch, + time_embed_dim, + dropout, + dims=dims, + use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, + dtype=self.dtype, + device=device, + operations=operations, + video_kernel_size=video_kernel_size, + merge_strategy=merge_strategy, merge_factor=merge_factor, + )] + if transformer_depth_middle >= 0: + mid_block += [SpatialVideoTransformer( # always uses a self-attn + ch, num_heads, dim_head, depth=transformer_depth_middle, context_dim=context_dim, + disable_self_attn=disable_middle_self_attn, use_linear=use_linear_in_transformer, + checkpoint=use_checkpoint, dtype=self.dtype, device=device, operations=operations, + use_spatial_context=use_spatial_context, ff_in=extra_ff_mix_layer, + merge_strategy=merge_strategy, merge_factor=merge_factor, + ), + VideoResBlock( + ch, + time_embed_dim, + dropout, + dims=dims, + use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, + dtype=self.dtype, + device=device, + operations=operations, + video_kernel_size=video_kernel_size, + merge_strategy=merge_strategy, merge_factor=merge_factor, + )] + self.middle_block = TimestepEmbedSequential(*mid_block) + self.middle_block_out = self.make_zero_conv(ch, operations=operations, dtype=self.dtype, device=device) + self._feature_size += ch + + def make_zero_conv(self, channels, operations=None, dtype=None, device=None): + return TimestepEmbedSequential(operations.conv_nd(self.dims, channels, channels, 1, padding=0, dtype=dtype, device=device)) + + def forward(self, x, hint, timesteps, context, y=None, **kwargs): + t_emb = timestep_embedding(timesteps, self.model_channels, repeat_only=False).to(x.dtype) + emb = self.time_embed(t_emb) + + cond = kwargs["cond"] + num_video_frames = cond["num_video_frames"] + image_only_indicator = cond.get("image_only_indicator", None) + time_context = cond.get("time_context", None) + del cond + + guided_hint = self.input_hint_block(hint, emb, context, time_context=time_context, num_video_frames=num_video_frames, image_only_indicator=image_only_indicator) + + outs = [] + + hs = [] + if self.num_classes is not None: + assert y.shape[0] == x.shape[0] + emb = emb + self.label_emb(y) + + h = x + for module, zero_conv in zip(self.input_blocks, self.zero_convs): + if guided_hint is not None: + h = module(h, emb, context, time_context=time_context, num_video_frames=num_video_frames, image_only_indicator=image_only_indicator) + h += guided_hint + guided_hint = None + else: + h = module(h, emb, context, time_context=time_context, num_video_frames=num_video_frames, image_only_indicator=image_only_indicator) + outs.append(zero_conv(h, emb, context, time_context=time_context, num_video_frames=num_video_frames, image_only_indicator=image_only_indicator)) + + h = self.middle_block(h, emb, context, time_context=time_context, num_video_frames=num_video_frames, image_only_indicator=image_only_indicator) + outs.append(self.middle_block_out(h, emb, context, time_context=time_context, num_video_frames=num_video_frames, image_only_indicator=image_only_indicator)) + + return outs + + +TEMPORAL_TRANSFORMER_BLOCKS = { + "norm_in.weight", + "norm_in.bias", + "ff_in.net.0.proj.weight", + "ff_in.net.0.proj.bias", + "ff_in.net.2.weight", + "ff_in.net.2.bias", +} +TEMPORAL_TRANSFORMER_BLOCKS.update(TRANSFORMER_BLOCKS) + + +TEMPORAL_UNET_MAP_ATTENTIONS = { + "time_mixer.mix_factor", +} +TEMPORAL_UNET_MAP_ATTENTIONS.update(UNET_MAP_ATTENTIONS) + + +TEMPORAL_TRANSFORMER_MAP = { + "time_pos_embed.0.weight": "time_pos_embed.linear_1.weight", + "time_pos_embed.0.bias": "time_pos_embed.linear_1.bias", + "time_pos_embed.2.weight": "time_pos_embed.linear_2.weight", + "time_pos_embed.2.bias": "time_pos_embed.linear_2.bias", +} + + +TEMPORAL_RESNET = { + "time_mixer.mix_factor", +} + + +def svd_unet_config_from_diffusers_unet(state_dict: dict[str, Tensor], dtype): + match = {} + transformer_depth = [] + + attn_res = 1 + down_blocks = comfy.model_detection.count_blocks(state_dict, "down_blocks.{}") + for i in range(down_blocks): + attn_blocks = comfy.model_detection.count_blocks(state_dict, "down_blocks.{}.attentions.".format(i) + '{}') + for ab in range(attn_blocks): + transformer_count = comfy.model_detection.count_blocks(state_dict, "down_blocks.{}.attentions.{}.transformer_blocks.".format(i, ab) + '{}') + transformer_depth.append(transformer_count) + if transformer_count > 0: + match["context_dim"] = state_dict["down_blocks.{}.attentions.{}.transformer_blocks.0.attn2.to_k.weight".format(i, ab)].shape[1] + + attn_res *= 2 + if attn_blocks == 0: + transformer_depth.append(0) + transformer_depth.append(0) + + match["transformer_depth"] = transformer_depth + + match["model_channels"] = state_dict["conv_in.weight"].shape[0] + match["in_channels"] = state_dict["conv_in.weight"].shape[1] + match["adm_in_channels"] = None + if "class_embedding.linear_1.weight" in state_dict: + match["adm_in_channels"] = state_dict["class_embedding.linear_1.weight"].shape[1] + elif "add_embedding.linear_1.weight" in state_dict: + match["adm_in_channels"] = state_dict["add_embedding.linear_1.weight"].shape[1] + + # based on unet_config of SVD + SVD = { + 'use_checkpoint': False, + 'image_size': 32, + 'use_spatial_transformer': True, + 'legacy': False, + 'num_classes': 'sequential', + 'adm_in_channels': 768, + 'dtype': dtype, + 'in_channels': 8, + 'out_channels': 4, + 'model_channels': 320, + 'num_res_blocks': [2, 2, 2, 2], + 'transformer_depth': [1, 1, 1, 1, 1, 1, 0, 0], + 'transformer_depth_output': [1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0], + 'channel_mult': [1, 2, 4, 4], + 'transformer_depth_middle': 1, + 'use_linear_in_transformer': True, + 'context_dim': 1024, + 'extra_ff_mix_layer': True, + 'use_spatial_context': True, + 'merge_strategy': 'learned_with_images', + 'merge_factor': 0.0, + 'video_kernel_size': [3, 1, 1], + 'use_temporal_attention': True, + 'use_temporal_resblock': True, + 'num_heads': -1, + 'num_head_channels': 64, + } + + supported_models = [SVD] + + for unet_config in supported_models: + matches = True + for k in match: + if match[k] != unet_config[k]: + matches = False + break + if matches: + return comfy.model_detection.convert_config(unet_config) + return None + + +def svd_unet_to_diffusers(unet_config): + num_res_blocks = unet_config["num_res_blocks"] + channel_mult = unet_config["channel_mult"] + transformer_depth = unet_config["transformer_depth"][:] + transformer_depth_output = unet_config["transformer_depth_output"][:] + num_blocks = len(channel_mult) + + transformers_mid = unet_config.get("transformer_depth_middle", None) + + diffusers_unet_map = {} + for x in range(num_blocks): + n = 1 + (num_res_blocks[x] + 1) * x + for i in range(num_res_blocks[x]): + for b in TEMPORAL_RESNET: + diffusers_unet_map["down_blocks.{}.resnets.{}.{}".format(x, i, b)] = "input_blocks.{}.0.{}".format(n, b) + for b in UNET_MAP_RESNET: + diffusers_unet_map["down_blocks.{}.resnets.{}.spatial_res_block.{}".format(x, i, UNET_MAP_RESNET[b])] = "input_blocks.{}.0.{}".format(n, b) + diffusers_unet_map["down_blocks.{}.resnets.{}.temporal_res_block.{}".format(x, i, UNET_MAP_RESNET[b])] = "input_blocks.{}.0.time_stack.{}".format(n, b) + #diffusers_unet_map["down_blocks.{}.resnets.{}.{}".format(x, i, UNET_MAP_RESNET[b])] = "input_blocks.{}.0.{}".format(n, b) + num_transformers = transformer_depth.pop(0) + if num_transformers > 0: + for b in TEMPORAL_UNET_MAP_ATTENTIONS: + diffusers_unet_map["down_blocks.{}.attentions.{}.{}".format(x, i, b)] = "input_blocks.{}.1.{}".format(n, b) + for b in TEMPORAL_TRANSFORMER_MAP: + diffusers_unet_map["down_blocks.{}.attentions.{}.{}".format(x, i, TEMPORAL_TRANSFORMER_MAP[b])] = "input_blocks.{}.1.{}".format(n, b) + for t in range(num_transformers): + for b in TRANSFORMER_BLOCKS: + diffusers_unet_map["down_blocks.{}.attentions.{}.transformer_blocks.{}.{}".format(x, i, t, b)] = "input_blocks.{}.1.transformer_blocks.{}.{}".format(n, t, b) + for b in TEMPORAL_TRANSFORMER_BLOCKS: + diffusers_unet_map["down_blocks.{}.attentions.{}.temporal_transformer_blocks.{}.{}".format(x, i, t, b)] = "input_blocks.{}.1.time_stack.{}.{}".format(n, t, b) + n += 1 + for k in ["weight", "bias"]: + diffusers_unet_map["down_blocks.{}.downsamplers.0.conv.{}".format(x, k)] = "input_blocks.{}.0.op.{}".format(n, k) + + i = 0 + for b in TEMPORAL_UNET_MAP_ATTENTIONS: + diffusers_unet_map["mid_block.attentions.{}.{}".format(i, b)] = "middle_block.1.{}".format(b) + for b in TEMPORAL_TRANSFORMER_MAP: + diffusers_unet_map["mid_block.attentions.{}.{}".format(i, TEMPORAL_TRANSFORMER_MAP[b])] = "middle_block.1.{}".format(b) + for t in range(transformers_mid): + for b in TRANSFORMER_BLOCKS: + diffusers_unet_map["mid_block.attentions.{}.transformer_blocks.{}.{}".format(i, t, b)] = "middle_block.1.transformer_blocks.{}.{}".format(t, b) + for b in TEMPORAL_TRANSFORMER_BLOCKS: + diffusers_unet_map["mid_block.attentions.{}.temporal_transformer_blocks.{}.{}".format(i, t, b)] = "middle_block.1.time_stack.{}.{}".format(t, b) + + for i, n in enumerate([0, 2]): + for b in TEMPORAL_RESNET: + diffusers_unet_map["mid_block.resnets.{}.{}".format(i, b)] = "middle_block.{}.{}".format(n, b) + for b in UNET_MAP_RESNET: + diffusers_unet_map["mid_block.resnets.{}.spatial_res_block.{}".format(i, UNET_MAP_RESNET[b])] = "middle_block.{}.{}".format(n, b) + diffusers_unet_map["mid_block.resnets.{}.temporal_res_block.{}".format(i, UNET_MAP_RESNET[b])] = "middle_block.{}.time_stack.{}".format(n, b) + #diffusers_unet_map["mid_block.resnets.{}.{}".format(i, UNET_MAP_RESNET[b])] = "middle_block.{}.{}".format(n, b) + + num_res_blocks = list(reversed(num_res_blocks)) + for x in range(num_blocks): + n = (num_res_blocks[x] + 1) * x + l = num_res_blocks[x] + 1 + for i in range(l): + c = 0 + for b in UNET_MAP_RESNET: + diffusers_unet_map["up_blocks.{}.resnets.{}.{}".format(x, i, UNET_MAP_RESNET[b])] = "output_blocks.{}.0.{}".format(n, b) + c += 1 + num_transformers = transformer_depth_output.pop() + if num_transformers > 0: + c += 1 + for b in UNET_MAP_ATTENTIONS: + diffusers_unet_map["up_blocks.{}.attentions.{}.{}".format(x, i, b)] = "output_blocks.{}.1.{}".format(n, b) + for t in range(num_transformers): + for b in TRANSFORMER_BLOCKS: + diffusers_unet_map["up_blocks.{}.attentions.{}.transformer_blocks.{}.{}".format(x, i, t, b)] = "output_blocks.{}.1.transformer_blocks.{}.{}".format(n, t, b) + if i == l - 1: + for k in ["weight", "bias"]: + diffusers_unet_map["up_blocks.{}.upsamplers.0.conv.{}".format(x, k)] = "output_blocks.{}.{}.conv.{}".format(n, c, k) + n += 1 + + for k in UNET_MAP_BASIC: + diffusers_unet_map[k[1]] = k[0] + + return diffusers_unet_map diff --git a/ComfyUI-Advanced-ControlNet/adv_control/logger.py b/ComfyUI-Advanced-ControlNet/adv_control/logger.py new file mode 100644 index 0000000000000000000000000000000000000000..b23b82fa5b8456e87b3b86da21a3f07bed6682b6 --- /dev/null +++ b/ComfyUI-Advanced-ControlNet/adv_control/logger.py @@ -0,0 +1,36 @@ +import sys +import copy +import logging + + +class ColoredFormatter(logging.Formatter): + COLORS = { + "DEBUG": "\033[0;36m", # CYAN + "INFO": "\033[0;32m", # GREEN + "WARNING": "\033[0;33m", # YELLOW + "ERROR": "\033[0;31m", # RED + "CRITICAL": "\033[0;37;41m", # WHITE ON RED + "RESET": "\033[0m", # RESET COLOR + } + + def format(self, record): + colored_record = copy.copy(record) + levelname = colored_record.levelname + seq = self.COLORS.get(levelname, self.COLORS["RESET"]) + colored_record.levelname = f"{seq}{levelname}{self.COLORS['RESET']}" + return super().format(colored_record) + + +# Create a new logger +logger = logging.getLogger("Advanced-ControlNet") +logger.propagate = False + +# Add handler if we don't have one. +if not logger.handlers: + handler = logging.StreamHandler(sys.stdout) + handler.setFormatter(ColoredFormatter("[%(name)s] - %(levelname)s - %(message)s")) + logger.addHandler(handler) + +# Configure logger +loglevel = logging.INFO +logger.setLevel(loglevel) diff --git a/ComfyUI-Advanced-ControlNet/adv_control/nodes.py b/ComfyUI-Advanced-ControlNet/adv_control/nodes.py new file mode 100644 index 0000000000000000000000000000000000000000..5d5655470879c5c7e1a3b6ad9fd7b62e0760ea3e --- /dev/null +++ b/ComfyUI-Advanced-ControlNet/adv_control/nodes.py @@ -0,0 +1,235 @@ +import numpy as np +from torch import Tensor + +import folder_paths +from comfy.model_patcher import ModelPatcher + +from .control import load_controlnet, convert_to_advanced, is_advanced_controlnet +from .utils import ControlWeights, LatentKeyframeGroup, TimestepKeyframeGroup, BIGMAX +from .nodes_weight import (DefaultWeights, ScaledSoftMaskedUniversalWeights, ScaledSoftUniversalWeights, SoftControlNetWeights, CustomControlNetWeights, + SoftT2IAdapterWeights, CustomT2IAdapterWeights) +from .nodes_keyframes import (LatentKeyframeGroupNode, LatentKeyframeInterpolationNode, LatentKeyframeBatchedGroupNode, LatentKeyframeNode, + TimestepKeyframeNode, TimestepKeyframeInterpolationNode, TimestepKeyframeFromStrengthListNode) +from .nodes_sparsectrl import SparseCtrlMergedLoaderAdvanced, SparseCtrlLoaderAdvanced, SparseIndexMethodNode, SparseSpreadMethodNode, RgbSparseCtrlPreprocessor +from .nodes_reference import ReferenceControlNetNode, ReferenceControlFinetune, ReferencePreprocessorNode +from .nodes_loosecontrol import ControlNetLoaderWithLoraAdvanced +from .nodes_deprecated import LoadImagesFromDirectory +from .logger import logger + + +class ControlNetLoaderAdvanced: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "control_net_name": (folder_paths.get_filename_list("controlnet"), ), + }, + "optional": { + "timestep_keyframe": ("TIMESTEP_KEYFRAME", ), + } + } + + RETURN_TYPES = ("CONTROL_NET", ) + FUNCTION = "load_controlnet" + + CATEGORY = "Adv-ControlNet 🛂🅐🅒🅝" + + def load_controlnet(self, control_net_name, + timestep_keyframe: TimestepKeyframeGroup=None + ): + controlnet_path = folder_paths.get_full_path("controlnet", control_net_name) + controlnet = load_controlnet(controlnet_path, timestep_keyframe) + return (controlnet,) + + +class DiffControlNetLoaderAdvanced: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "model": ("MODEL",), + "control_net_name": (folder_paths.get_filename_list("controlnet"), ) + }, + "optional": { + "timestep_keyframe": ("TIMESTEP_KEYFRAME", ), + } + } + + RETURN_TYPES = ("CONTROL_NET", ) + FUNCTION = "load_controlnet" + + CATEGORY = "Adv-ControlNet 🛂🅐🅒🅝" + + def load_controlnet(self, control_net_name, model, + timestep_keyframe: TimestepKeyframeGroup=None + ): + controlnet_path = folder_paths.get_full_path("controlnet", control_net_name) + controlnet = load_controlnet(controlnet_path, timestep_keyframe, model) + if is_advanced_controlnet(controlnet): + controlnet.verify_all_weights() + return (controlnet,) + + +class AdvancedControlNetApply: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "positive": ("CONDITIONING", ), + "negative": ("CONDITIONING", ), + "control_net": ("CONTROL_NET", ), + "image": ("IMAGE", ), + "strength": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}), + "start_percent": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 1.0, "step": 0.001}), + "end_percent": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.001}) + }, + "optional": { + "mask_optional": ("MASK", ), + "timestep_kf": ("TIMESTEP_KEYFRAME", ), + "latent_kf_override": ("LATENT_KEYFRAME", ), + "weights_override": ("CONTROL_NET_WEIGHTS", ), + "model_optional": ("MODEL",), + } + } + + RETURN_TYPES = ("CONDITIONING","CONDITIONING","MODEL",) + RETURN_NAMES = ("positive", "negative", "model_opt") + FUNCTION = "apply_controlnet" + + CATEGORY = "Adv-ControlNet 🛂🅐🅒🅝" + + def apply_controlnet(self, positive, negative, control_net, image, strength, start_percent, end_percent, + mask_optional: Tensor=None, model_optional: ModelPatcher=None, + timestep_kf: TimestepKeyframeGroup=None, latent_kf_override: LatentKeyframeGroup=None, + weights_override: ControlWeights=None): + if strength == 0: + return (positive, negative, model_optional) + if model_optional: + model_optional = model_optional.clone() + + control_hint = image.movedim(-1,1) + cnets = {} + + out = [] + for conditioning in [positive, negative]: + c = [] + for t in conditioning: + d = t[1].copy() + + prev_cnet = d.get('control', None) + if prev_cnet in cnets: + c_net = cnets[prev_cnet] + else: + # copy, convert to advanced if needed, and set cond + c_net = convert_to_advanced(control_net.copy()).set_cond_hint(control_hint, strength, (start_percent, end_percent)) + if is_advanced_controlnet(c_net): + # disarm node check + c_net.disarm() + # if model required, verify model is passed in, and if so patch it + if c_net.require_model: + if not model_optional: + raise Exception(f"Type '{type(c_net).__name__}' requires model_optional input, but got None.") + c_net.patch_model(model=model_optional) + # apply optional parameters and overrides, if provided + if timestep_kf is not None: + c_net.set_timestep_keyframes(timestep_kf) + if latent_kf_override is not None: + c_net.latent_keyframe_override = latent_kf_override + if weights_override is not None: + c_net.weights_override = weights_override + # verify weights are compatible + c_net.verify_all_weights() + # set cond hint mask + if mask_optional is not None: + mask_optional = mask_optional.clone() + # if not in the form of a batch, make it so + if len(mask_optional.shape) < 3: + mask_optional = mask_optional.unsqueeze(0) + c_net.set_cond_hint_mask(mask_optional) + c_net.set_previous_controlnet(prev_cnet) + cnets[prev_cnet] = c_net + + d['control'] = c_net + d['control_apply_to_uncond'] = False + n = [t[0], d] + c.append(n) + out.append(c) + return (out[0], out[1], model_optional) + + +# NODE MAPPING +NODE_CLASS_MAPPINGS = { + # Keyframes + "TimestepKeyframe": TimestepKeyframeNode, + "ACN_TimestepKeyframeInterpolation": TimestepKeyframeInterpolationNode, + "ACN_TimestepKeyframeFromStrengthList": TimestepKeyframeFromStrengthListNode, + "LatentKeyframe": LatentKeyframeNode, + "LatentKeyframeTiming": LatentKeyframeInterpolationNode, + "LatentKeyframeBatchedGroup": LatentKeyframeBatchedGroupNode, + "LatentKeyframeGroup": LatentKeyframeGroupNode, + # Conditioning + "ACN_AdvancedControlNetApply": AdvancedControlNetApply, + # Loaders + "ControlNetLoaderAdvanced": ControlNetLoaderAdvanced, + "DiffControlNetLoaderAdvanced": DiffControlNetLoaderAdvanced, + # Weights + "ScaledSoftControlNetWeights": ScaledSoftUniversalWeights, + "ScaledSoftMaskedUniversalWeights": ScaledSoftMaskedUniversalWeights, + "SoftControlNetWeights": SoftControlNetWeights, + "CustomControlNetWeights": CustomControlNetWeights, + "SoftT2IAdapterWeights": SoftT2IAdapterWeights, + "CustomT2IAdapterWeights": CustomT2IAdapterWeights, + "ACN_DefaultUniversalWeights": DefaultWeights, + # SparseCtrl + "ACN_SparseCtrlRGBPreprocessor": RgbSparseCtrlPreprocessor, + "ACN_SparseCtrlLoaderAdvanced": SparseCtrlLoaderAdvanced, + "ACN_SparseCtrlMergedLoaderAdvanced": SparseCtrlMergedLoaderAdvanced, + "ACN_SparseCtrlIndexMethodNode": SparseIndexMethodNode, + "ACN_SparseCtrlSpreadMethodNode": SparseSpreadMethodNode, + # Reference + "ACN_ReferencePreprocessor": ReferencePreprocessorNode, + "ACN_ReferenceControlNet": ReferenceControlNetNode, + "ACN_ReferenceControlNetFinetune": ReferenceControlFinetune, + # LOOSEControl + #"ACN_ControlNetLoaderWithLoraAdvanced": ControlNetLoaderWithLoraAdvanced, + # Deprecated + "LoadImagesFromDirectory": LoadImagesFromDirectory, +} + +NODE_DISPLAY_NAME_MAPPINGS = { + # Keyframes + "TimestepKeyframe": "Timestep Keyframe 🛂🅐🅒🅝", + "ACN_TimestepKeyframeInterpolation": "Timestep Keyframe Interpolation 🛂🅐🅒🅝", + "ACN_TimestepKeyframeFromStrengthList": "Timestep Keyframe From List 🛂🅐🅒🅝", + "LatentKeyframe": "Latent Keyframe 🛂🅐🅒🅝", + "LatentKeyframeTiming": "Latent Keyframe Interpolation 🛂🅐🅒🅝", + "LatentKeyframeBatchedGroup": "Latent Keyframe From List 🛂🅐🅒🅝", + "LatentKeyframeGroup": "Latent Keyframe Group 🛂🅐🅒🅝", + # Conditioning + "ACN_AdvancedControlNetApply": "Apply Advanced ControlNet 🛂🅐🅒🅝", + # Loaders + "ControlNetLoaderAdvanced": "Load Advanced ControlNet Model 🛂🅐🅒🅝", + "DiffControlNetLoaderAdvanced": "Load Advanced ControlNet Model (diff) 🛂🅐🅒🅝", + # Weights + "ScaledSoftControlNetWeights": "Scaled Soft Weights 🛂🅐🅒🅝", + "ScaledSoftMaskedUniversalWeights": "Scaled Soft Masked Weights 🛂🅐🅒🅝", + "SoftControlNetWeights": "ControlNet Soft Weights 🛂🅐🅒🅝", + "CustomControlNetWeights": "ControlNet Custom Weights 🛂🅐🅒🅝", + "SoftT2IAdapterWeights": "T2IAdapter Soft Weights 🛂🅐🅒🅝", + "CustomT2IAdapterWeights": "T2IAdapter Custom Weights 🛂🅐🅒🅝", + "ACN_DefaultUniversalWeights": "Force Default Weights 🛂🅐🅒🅝", + # SparseCtrl + "ACN_SparseCtrlRGBPreprocessor": "RGB SparseCtrl 🛂🅐🅒🅝", + "ACN_SparseCtrlLoaderAdvanced": "Load SparseCtrl Model 🛂🅐🅒🅝", + "ACN_SparseCtrlMergedLoaderAdvanced": "🧪Load Merged SparseCtrl Model 🛂🅐🅒🅝", + "ACN_SparseCtrlIndexMethodNode": "SparseCtrl Index Method 🛂🅐🅒🅝", + "ACN_SparseCtrlSpreadMethodNode": "SparseCtrl Spread Method 🛂🅐🅒🅝", + # Reference + "ACN_ReferencePreprocessor": "Reference Preproccessor 🛂🅐🅒🅝", + "ACN_ReferenceControlNet": "Reference ControlNet 🛂🅐🅒🅝", + "ACN_ReferenceControlNetFinetune": "Reference ControlNet (Finetune) 🛂🅐🅒🅝", + # LOOSEControl + #"ACN_ControlNetLoaderWithLoraAdvanced": "Load Adv. ControlNet Model w/ LoRA 🛂🅐🅒🅝", + # Deprecated + "LoadImagesFromDirectory": "🚫Load Images [DEPRECATED] 🛂🅐🅒🅝", +} diff --git a/ComfyUI-Advanced-ControlNet/adv_control/nodes_deprecated.py b/ComfyUI-Advanced-ControlNet/adv_control/nodes_deprecated.py new file mode 100644 index 0000000000000000000000000000000000000000..bd98607bdb3cf6757c63bd3a01663bb7bac59417 --- /dev/null +++ b/ComfyUI-Advanced-ControlNet/adv_control/nodes_deprecated.py @@ -0,0 +1,71 @@ +import os + +import torch + +import numpy as np +from PIL import Image, ImageOps +from .utils import BIGMAX +from .logger import logger + + +class LoadImagesFromDirectory: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "directory": ("STRING", {"default": ""}), + }, + "optional": { + "image_load_cap": ("INT", {"default": 0, "min": 0, "max": BIGMAX, "step": 1}), + "start_index": ("INT", {"default": 0, "min": 0, "max": BIGMAX, "step": 1}), + } + } + + RETURN_TYPES = ("IMAGE", "MASK", "INT") + FUNCTION = "load_images" + + CATEGORY = "" + + def load_images(self, directory: str, image_load_cap: int = 0, start_index: int = 0): + if not os.path.isdir(directory): + raise FileNotFoundError(f"Directory '{directory} cannot be found.'") + dir_files = os.listdir(directory) + if len(dir_files) == 0: + raise FileNotFoundError(f"No files in directory '{directory}'.") + + dir_files = sorted(dir_files) + dir_files = [os.path.join(directory, x) for x in dir_files] + # start at start_index + dir_files = dir_files[start_index:] + + images = [] + masks = [] + + limit_images = False + if image_load_cap > 0: + limit_images = True + image_count = 0 + + for image_path in dir_files: + if os.path.isdir(image_path): + continue + if limit_images and image_count >= image_load_cap: + break + i = Image.open(image_path) + i = ImageOps.exif_transpose(i) + image = i.convert("RGB") + image = np.array(image).astype(np.float32) / 255.0 + image = torch.from_numpy(image)[None,] + if 'A' in i.getbands(): + mask = np.array(i.getchannel('A')).astype(np.float32) / 255.0 + mask = 1. - torch.from_numpy(mask) + else: + mask = torch.zeros((64,64), dtype=torch.float32, device="cpu") + images.append(image) + masks.append(mask) + image_count += 1 + + if len(images) == 0: + raise FileNotFoundError(f"No images could be loaded from directory '{directory}'.") + + return (torch.cat(images, dim=0), torch.stack(masks, dim=0), image_count) diff --git a/ComfyUI-Advanced-ControlNet/adv_control/nodes_keyframes.py b/ComfyUI-Advanced-ControlNet/adv_control/nodes_keyframes.py new file mode 100644 index 0000000000000000000000000000000000000000..ad05aec2d115d17e5384890e015c120ab4a5d8d3 --- /dev/null +++ b/ComfyUI-Advanced-ControlNet/adv_control/nodes_keyframes.py @@ -0,0 +1,461 @@ +from typing import Union +import numpy as np +from collections.abc import Iterable + +from .utils import ControlWeights, TimestepKeyframe, TimestepKeyframeGroup, LatentKeyframe, LatentKeyframeGroup, BIGMIN, BIGMAX +from .utils import StrengthInterpolation as SI +from .logger import logger + + +class TimestepKeyframeNode: + OUTDATED_DUMMY = -39 + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "start_percent": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 1.0, "step": 0.001}, ), + }, + "optional": { + "prev_timestep_kf": ("TIMESTEP_KEYFRAME", ), + "strength": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.001}, ), + "cn_weights": ("CONTROL_NET_WEIGHTS", ), + "latent_keyframe": ("LATENT_KEYFRAME", ), + "null_latent_kf_strength": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 10.0, "step": 0.001}, ), + "inherit_missing": ("BOOLEAN", {"default": True}, ), + "guarantee_steps": ("INT", {"default": 1, "min": 0, "max": BIGMAX}), + "mask_optional": ("MASK", ), + } + } + + RETURN_NAMES = ("TIMESTEP_KF", ) + RETURN_TYPES = ("TIMESTEP_KEYFRAME", ) + FUNCTION = "load_keyframe" + + CATEGORY = "Adv-ControlNet 🛂🅐🅒🅝/keyframes" + + def load_keyframe(self, + start_percent: float, + strength: float=1.0, + cn_weights: ControlWeights=None, control_net_weights: ControlWeights=None, # old name + latent_keyframe: LatentKeyframeGroup=None, + prev_timestep_kf: TimestepKeyframeGroup=None, prev_timestep_keyframe: TimestepKeyframeGroup=None, # old name + null_latent_kf_strength: float=0.0, + inherit_missing=True, + guarantee_steps=OUTDATED_DUMMY, + guarantee_usage=True, # old input + mask_optional=None,): + # if using outdated dummy value, means node on workflow is outdated and should appropriately convert behavior + if guarantee_steps == self.OUTDATED_DUMMY: + guarantee_steps = int(guarantee_usage) + control_net_weights = control_net_weights if control_net_weights else cn_weights + prev_timestep_keyframe = prev_timestep_keyframe if prev_timestep_keyframe else prev_timestep_kf + if not prev_timestep_keyframe: + prev_timestep_keyframe = TimestepKeyframeGroup() + else: + prev_timestep_keyframe = prev_timestep_keyframe.clone() + keyframe = TimestepKeyframe(start_percent=start_percent, strength=strength, null_latent_kf_strength=null_latent_kf_strength, + control_weights=control_net_weights, latent_keyframes=latent_keyframe, inherit_missing=inherit_missing, + guarantee_steps=guarantee_steps, mask_hint_orig=mask_optional) + prev_timestep_keyframe.add(keyframe) + return (prev_timestep_keyframe,) + + +class TimestepKeyframeInterpolationNode: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "start_percent": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 1.0, "step": 0.001},), + "end_percent": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.001}), + "strength_start": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.001},), + "strength_end": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.001},), + "interpolation": (SI._LIST, ), + "intervals": ("INT", {"default": 50, "min": 2, "max": 100, "step": 1}), + }, + "optional": { + "prev_timestep_kf": ("TIMESTEP_KEYFRAME", ), + "cn_weights": ("CONTROL_NET_WEIGHTS", ), + "latent_keyframe": ("LATENT_KEYFRAME", ), + "null_latent_kf_strength": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 10.0, "step": 0.001},), + "inherit_missing": ("BOOLEAN", {"default": True},), + "mask_optional": ("MASK", ), + "print_keyframes": ("BOOLEAN", {"default": False}), + } + } + + RETURN_NAMES = ("TIMESTEP_KF", ) + RETURN_TYPES = ("TIMESTEP_KEYFRAME", ) + FUNCTION = "load_keyframe" + + CATEGORY = "Adv-ControlNet 🛂🅐🅒🅝/keyframes" + + def load_keyframe(self, + start_percent: float, end_percent: float, + strength_start: float, strength_end: float, interpolation: str, intervals: int, + cn_weights: ControlWeights=None, + latent_keyframe: LatentKeyframeGroup=None, + prev_timestep_kf: TimestepKeyframeGroup=None, + null_latent_kf_strength: float=0.0, + inherit_missing=True, + guarantee_steps=1, + mask_optional=None, print_keyframes=False): + if not prev_timestep_kf: + prev_timestep_kf = TimestepKeyframeGroup() + else: + prev_timestep_kf = prev_timestep_kf.clone() + + percents = SI.get_weights(num_from=start_percent, num_to=end_percent, length=intervals, method=SI.LINEAR) + strengths = SI.get_weights(num_from=strength_start, num_to=strength_end, length=intervals, method=interpolation) + + is_first = True + for percent, strength in zip(percents, strengths): + guarantee_steps = 0 + if is_first: + guarantee_steps = 1 + is_first = False + prev_timestep_kf.add(TimestepKeyframe(start_percent=percent, strength=strength, null_latent_kf_strength=null_latent_kf_strength, + control_weights=cn_weights, latent_keyframes=latent_keyframe, inherit_missing=inherit_missing, + guarantee_steps=guarantee_steps, mask_hint_orig=mask_optional)) + if print_keyframes: + logger.info(f"TimestepKeyframe - start_percent:{percent} = {strength}") + return (prev_timestep_kf,) + + +class TimestepKeyframeFromStrengthListNode: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "float_strengths": ("FLOAT", {"default": -1, "min": -1, "step": 0.001, "forceInput": True}), + "start_percent": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 1.0, "step": 0.001},), + "end_percent": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.001}), + }, + "optional": { + "prev_timestep_kf": ("TIMESTEP_KEYFRAME", ), + "cn_weights": ("CONTROL_NET_WEIGHTS", ), + "latent_keyframe": ("LATENT_KEYFRAME", ), + "null_latent_kf_strength": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 10.0, "step": 0.001},), + "inherit_missing": ("BOOLEAN", {"default": True},), + "mask_optional": ("MASK", ), + "print_keyframes": ("BOOLEAN", {"default": False}), + } + } + + RETURN_NAMES = ("TIMESTEP_KF", ) + RETURN_TYPES = ("TIMESTEP_KEYFRAME", ) + FUNCTION = "load_keyframe" + + CATEGORY = "Adv-ControlNet 🛂🅐🅒🅝/keyframes" + + def load_keyframe(self, + start_percent: float, end_percent: float, + float_strengths: float, + cn_weights: ControlWeights=None, + latent_keyframe: LatentKeyframeGroup=None, + prev_timestep_kf: TimestepKeyframeGroup=None, + null_latent_kf_strength: float=0.0, + inherit_missing=True, + guarantee_steps=1, + mask_optional=None, print_keyframes=False): + if not prev_timestep_kf: + prev_timestep_kf = TimestepKeyframeGroup() + else: + prev_timestep_kf = prev_timestep_kf.clone() + + if type(float_strengths) in (float, int): + float_strengths = [float(float_strengths)] + elif isinstance(float_strengths, Iterable): + pass + else: + raise Exception(f"strengths_float must be either an iterable input or a float, but was {type(float_strengths).__repr__}.") + percents = SI.get_weights(num_from=start_percent, num_to=end_percent, length=len(float_strengths), method=SI.LINEAR) + + is_first = True + for percent, strength in zip(percents, float_strengths): + guarantee_steps = 0 + if is_first: + guarantee_steps = 1 + is_first = False + prev_timestep_kf.add(TimestepKeyframe(start_percent=percent, strength=strength, null_latent_kf_strength=null_latent_kf_strength, + control_weights=cn_weights, latent_keyframes=latent_keyframe, inherit_missing=inherit_missing, + guarantee_steps=guarantee_steps, mask_hint_orig=mask_optional)) + if print_keyframes: + logger.info(f"TimestepKeyframe - start_percent:{percent} = {strength}") + return (prev_timestep_kf,) + + +class LatentKeyframeNode: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "batch_index": ("INT", {"default": 0, "min": BIGMIN, "max": BIGMAX, "step": 1}), + "strength": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.001}, ), + }, + "optional": { + "prev_latent_kf": ("LATENT_KEYFRAME", ), + } + } + + RETURN_NAMES = ("LATENT_KF", ) + RETURN_TYPES = ("LATENT_KEYFRAME", ) + FUNCTION = "load_keyframe" + + CATEGORY = "Adv-ControlNet 🛂🅐🅒🅝/keyframes" + + def load_keyframe(self, + batch_index: int, + strength: float, + prev_latent_kf: LatentKeyframeGroup=None, + prev_latent_keyframe: LatentKeyframeGroup=None, # old name + ): + prev_latent_keyframe = prev_latent_keyframe if prev_latent_keyframe else prev_latent_kf + if not prev_latent_keyframe: + prev_latent_keyframe = LatentKeyframeGroup() + else: + prev_latent_keyframe = prev_latent_keyframe.clone() + keyframe = LatentKeyframe(batch_index, strength) + prev_latent_keyframe.add(keyframe) + return (prev_latent_keyframe,) + + +class LatentKeyframeGroupNode: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "index_strengths": ("STRING", {"multiline": True, "default": ""}), + }, + "optional": { + "prev_latent_kf": ("LATENT_KEYFRAME", ), + "latent_optional": ("LATENT", ), + "print_keyframes": ("BOOLEAN", {"default": False}) + } + } + + RETURN_NAMES = ("LATENT_KF", ) + RETURN_TYPES = ("LATENT_KEYFRAME", ) + FUNCTION = "load_keyframes" + + CATEGORY = "Adv-ControlNet 🛂🅐🅒🅝/keyframes" + + def validate_index(self, index: int, latent_count: int = 0, is_range: bool = False, allow_negative = False) -> int: + # if part of range, do nothing + if is_range: + return index + # otherwise, validate index + # validate not out of range - only when latent_count is passed in + if latent_count > 0 and index > latent_count-1: + raise IndexError(f"Index '{index}' out of range for the total {latent_count} latents.") + # if negative, validate not out of range + if index < 0: + if not allow_negative: + raise IndexError(f"Negative indeces not allowed, but was {index}.") + conv_index = latent_count+index + if conv_index < 0: + raise IndexError(f"Index '{index}', converted to '{conv_index}' out of range for the total {latent_count} latents.") + index = conv_index + return index + + def convert_to_index_int(self, raw_index: str, latent_count: int = 0, is_range: bool = False, allow_negative = False) -> int: + try: + return self.validate_index(int(raw_index), latent_count=latent_count, is_range=is_range, allow_negative=allow_negative) + except ValueError as e: + raise ValueError(f"index '{raw_index}' must be an integer.", e) + + def convert_to_latent_keyframes(self, latent_indeces: str, latent_count: int) -> set[LatentKeyframe]: + if not latent_indeces: + return set() + int_latent_indeces = [i for i in range(0, latent_count)] + allow_negative = latent_count > 0 + chosen_indeces = set() + # parse string - allow positive ints, negative ints, and ranges separated by ':' + groups = latent_indeces.split(",") + groups = [g.strip() for g in groups] + for g in groups: + # parse strengths - default to 1.0 if no strength given + strength = 1.0 + if '=' in g: + g, strength_str = g.split("=", 1) + g = g.strip() + try: + strength = float(strength_str.strip()) + except ValueError as e: + raise ValueError(f"strength '{strength_str}' must be a float.", e) + if strength < 0: + raise ValueError(f"Strength '{strength}' cannot be negative.") + # parse range of indeces (e.g. 2:16) + if ':' in g: + index_range = g.split(":", 1) + index_range = [r.strip() for r in index_range] + start_index = self.convert_to_index_int(index_range[0], latent_count=latent_count, is_range=True, allow_negative=allow_negative) + end_index = self.convert_to_index_int(index_range[1], latent_count=latent_count, is_range=True, allow_negative=allow_negative) + # if latents were passed in, base indeces on known latent count + if len(int_latent_indeces) > 0: + for i in int_latent_indeces[start_index:end_index]: + chosen_indeces.add(LatentKeyframe(i, strength)) + # otherwise, assume indeces are valid + else: + for i in range(start_index, end_index): + chosen_indeces.add(LatentKeyframe(i, strength)) + # parse individual indeces + else: + chosen_indeces.add(LatentKeyframe(self.convert_to_index_int(g, latent_count=latent_count, allow_negative=allow_negative), strength)) + return chosen_indeces + + def load_keyframes(self, + index_strengths: str, + prev_latent_kf: LatentKeyframeGroup=None, + prev_latent_keyframe: LatentKeyframeGroup=None, # old name + latent_image_opt=None, + print_keyframes=False): + prev_latent_keyframe = prev_latent_keyframe if prev_latent_keyframe else prev_latent_kf + if not prev_latent_keyframe: + prev_latent_keyframe = LatentKeyframeGroup() + else: + prev_latent_keyframe = prev_latent_keyframe.clone() + curr_latent_keyframe = LatentKeyframeGroup() + + latent_count = -1 + if latent_image_opt: + latent_count = latent_image_opt['samples'].size()[0] + latent_keyframes = self.convert_to_latent_keyframes(index_strengths, latent_count=latent_count) + + for latent_keyframe in latent_keyframes: + curr_latent_keyframe.add(latent_keyframe) + + if print_keyframes: + for keyframe in curr_latent_keyframe.keyframes: + logger.info(f"LatentKeyframe {keyframe.batch_index}={keyframe.strength}") + + # replace values with prev_latent_keyframes + for latent_keyframe in prev_latent_keyframe.keyframes: + curr_latent_keyframe.add(latent_keyframe) + + return (curr_latent_keyframe,) + + +class LatentKeyframeInterpolationNode: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "batch_index_from": ("INT", {"default": 0, "min": BIGMIN, "max": BIGMAX, "step": 1}), + "batch_index_to_excl": ("INT", {"default": 0, "min": BIGMIN, "max": BIGMAX, "step": 1}), + "strength_from": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.001}, ), + "strength_to": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.001}, ), + "interpolation": (SI._LIST, ), + }, + "optional": { + "prev_latent_kf": ("LATENT_KEYFRAME", ), + "print_keyframes": ("BOOLEAN", {"default": False}) + } + } + + RETURN_NAMES = ("LATENT_KF", ) + RETURN_TYPES = ("LATENT_KEYFRAME", ) + FUNCTION = "load_keyframe" + CATEGORY = "Adv-ControlNet 🛂🅐🅒🅝/keyframes" + + def load_keyframe(self, + batch_index_from: int, + strength_from: float, + batch_index_to_excl: int, + strength_to: float, + interpolation: str, + prev_latent_kf: LatentKeyframeGroup=None, + prev_latent_keyframe: LatentKeyframeGroup=None, # old name + print_keyframes=False): + + if (batch_index_from > batch_index_to_excl): + raise ValueError("batch_index_from must be less than or equal to batch_index_to.") + + if (batch_index_from < 0 and batch_index_to_excl >= 0): + raise ValueError("batch_index_from and batch_index_to must be either both positive or both negative.") + + prev_latent_keyframe = prev_latent_keyframe if prev_latent_keyframe else prev_latent_kf + if not prev_latent_keyframe: + prev_latent_keyframe = LatentKeyframeGroup() + else: + prev_latent_keyframe = prev_latent_keyframe.clone() + curr_latent_keyframe = LatentKeyframeGroup() + + steps = batch_index_to_excl - batch_index_from + diff = strength_to - strength_from + if interpolation == SI.LINEAR: + weights = np.linspace(strength_from, strength_to, steps) + elif interpolation == SI.EASE_IN: + index = np.linspace(0, 1, steps) + weights = diff * np.power(index, 2) + strength_from + elif interpolation == SI.EASE_OUT: + index = np.linspace(0, 1, steps) + weights = diff * (1 - np.power(1 - index, 2)) + strength_from + elif interpolation == SI.EASE_IN_OUT: + index = np.linspace(0, 1, steps) + weights = diff * ((1 - np.cos(index * np.pi)) / 2) + strength_from + + for i in range(steps): + keyframe = LatentKeyframe(batch_index_from + i, float(weights[i])) + curr_latent_keyframe.add(keyframe) + + if print_keyframes: + for keyframe in curr_latent_keyframe.keyframes: + logger.info(f"LatentKeyframe {keyframe.batch_index}={keyframe.strength}") + + # replace values with prev_latent_keyframes + for latent_keyframe in prev_latent_keyframe.keyframes: + curr_latent_keyframe.add(latent_keyframe) + + return (curr_latent_keyframe,) + + +class LatentKeyframeBatchedGroupNode: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "float_strengths": ("FLOAT", {"default": -1, "min": -1, "step": 0.001, "forceInput": True}), + }, + "optional": { + "prev_latent_kf": ("LATENT_KEYFRAME", ), + "print_keyframes": ("BOOLEAN", {"default": False}) + } + } + + RETURN_NAMES = ("LATENT_KF", ) + RETURN_TYPES = ("LATENT_KEYFRAME", ) + FUNCTION = "load_keyframe" + CATEGORY = "Adv-ControlNet 🛂🅐🅒🅝/keyframes" + + def load_keyframe(self, float_strengths: Union[float, list[float]], + prev_latent_kf: LatentKeyframeGroup=None, + prev_latent_keyframe: LatentKeyframeGroup=None, # old name + print_keyframes=False): + prev_latent_keyframe = prev_latent_keyframe if prev_latent_keyframe else prev_latent_kf + if not prev_latent_keyframe: + prev_latent_keyframe = LatentKeyframeGroup() + else: + prev_latent_keyframe = prev_latent_keyframe.clone() + curr_latent_keyframe = LatentKeyframeGroup() + + # if received a normal float input, do nothing + if type(float_strengths) in (float, int): + logger.info("No batched float_strengths passed into Latent Keyframe Batch Group node; will not create any new keyframes.") + # if iterable, attempt to create LatentKeyframes with chosen strengths + elif isinstance(float_strengths, Iterable): + for idx, strength in enumerate(float_strengths): + keyframe = LatentKeyframe(idx, strength) + curr_latent_keyframe.add(keyframe) + else: + raise ValueError(f"Expected strengths to be an iterable input, but was {type(float_strengths).__repr__}.") + + if print_keyframes: + for keyframe in curr_latent_keyframe.keyframes: + logger.info(f"LatentKeyframe {keyframe.batch_index}={keyframe.strength}") + + # replace values with prev_latent_keyframes + for latent_keyframe in prev_latent_keyframe.keyframes: + curr_latent_keyframe.add(latent_keyframe) + + return (curr_latent_keyframe,) diff --git a/ComfyUI-Advanced-ControlNet/adv_control/nodes_loosecontrol.py b/ComfyUI-Advanced-ControlNet/adv_control/nodes_loosecontrol.py new file mode 100644 index 0000000000000000000000000000000000000000..5c1d43eb6152cd110feb1397f77f7e6adde8a16c --- /dev/null +++ b/ComfyUI-Advanced-ControlNet/adv_control/nodes_loosecontrol.py @@ -0,0 +1,67 @@ +import folder_paths +import comfy.utils +import comfy.model_detection +import comfy.model_management +import comfy.lora +from comfy.model_patcher import ModelPatcher + +from .utils import TimestepKeyframeGroup +from .control import ControlNetAdvanced, load_controlnet + + + + +def convert_cn_lora_from_diffusers(cn_model: ModelPatcher, lora_path: str): + lora_data = comfy.utils.load_torch_file(lora_path, safe_load=True) + unet_dtype = comfy.model_management.unet_dtype() + for key, value in lora_data.items(): + lora_data[key] = value.to(unet_dtype) + diffusers_keys = comfy.utils.unet_to_diffusers(cn_model.model.state_dict()) + + #lora_data = comfy.model_detection.unet_config_from_diffusers_unet(lora_data, dtype=unet_dtype) + + + + #key_map = comfy.lora.model_lora_keys_unet(cn_model.model, key_map) + lora_data = comfy.lora.load_lora(lora_data, to_load=diffusers_keys) + + # TODO: detect if diffusers for sure? not sure if needed at this time, since cn loras are + # only used currently for LOOSEControl, and those are all in diffusers format + #unet_dtype = comfy.model_management.unet_dtype() + #lora_data = comfy.model_detection.unet_config_from_diffusers_unet(lora_data, unet_dtype) + return lora_data + + +class ControlNetLoaderWithLoraAdvanced: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "control_net_name": (folder_paths.get_filename_list("controlnet"), ), + "cn_lora_name": (folder_paths.get_filename_list("controlnet"), ), + "cn_lora_strength": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.001}), + }, + "optional": { + "timestep_keyframe": ("TIMESTEP_KEYFRAME", ), + } + } + + RETURN_TYPES = ("CONTROL_NET", ) + FUNCTION = "load_controlnet" + + CATEGORY = "Adv-ControlNet 🛂🅐🅒🅝/LOOSEControl" + + def load_controlnet(self, control_net_name, cn_lora_name, cn_lora_strength: float, + timestep_keyframe: TimestepKeyframeGroup=None + ): + controlnet_path = folder_paths.get_full_path("controlnet", control_net_name) + controlnet: ControlNetAdvanced = load_controlnet(controlnet_path, timestep_keyframe) + if not isinstance(controlnet, ControlNetAdvanced): + raise ValueError("Type {} is not compatible with CN LoRA features at this time.") + # now, try to load CN LoRA + lora_path = folder_paths.get_full_path("controlnet", cn_lora_name) + lora_data = convert_cn_lora_from_diffusers(cn_model=controlnet.control_model_wrapped, lora_path=lora_path) + # apply patches to wrapped control_model + controlnet.control_model_wrapped.add_patches(lora_data, strength_patch=cn_lora_strength) + # all done + return (controlnet,) diff --git a/ComfyUI-Advanced-ControlNet/adv_control/nodes_reference.py b/ComfyUI-Advanced-ControlNet/adv_control/nodes_reference.py new file mode 100644 index 0000000000000000000000000000000000000000..fd0e4dc326a240f4dab04eb645db0ba04d10f5ef --- /dev/null +++ b/ComfyUI-Advanced-ControlNet/adv_control/nodes_reference.py @@ -0,0 +1,90 @@ +from torch import Tensor + +from nodes import VAEEncode +import comfy.utils +from comfy.sd import VAE + +from .control_reference import ReferenceAdvanced, ReferenceOptions, ReferenceType, ReferencePreprocWrapper + + +# node for ReferenceCN +class ReferenceControlNetNode: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "reference_type": (ReferenceType._LIST,), + "style_fidelity": ("FLOAT", {"default": 0.5, "min": 0.0, "max": 1.0, "step": 0.01}), + "ref_weight": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}), + }, + } + + RETURN_TYPES = ("CONTROL_NET", ) + FUNCTION = "load_controlnet" + + CATEGORY = "Adv-ControlNet 🛂🅐🅒🅝/Reference" + + def load_controlnet(self, reference_type: str, style_fidelity: float, ref_weight: float): + ref_opts = ReferenceOptions.create_combo(reference_type=reference_type, style_fidelity=style_fidelity, ref_weight=ref_weight) + controlnet = ReferenceAdvanced(ref_opts=ref_opts, timestep_keyframes=None) + return (controlnet,) + + +class ReferenceControlFinetune: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "attn_style_fidelity": ("FLOAT", {"default": 0.5, "min": 0.0, "max": 1.0, "step": 0.01}), + "attn_ref_weight": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}), + "attn_strength": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}), + "adain_style_fidelity": ("FLOAT", {"default": 0.5, "min": 0.0, "max": 1.0, "step": 0.01}), + "adain_ref_weight": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}), + "adain_strength": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}), + }, + } + + RETURN_TYPES = ("CONTROL_NET", ) + FUNCTION = "load_controlnet" + + CATEGORY = "Adv-ControlNet 🛂🅐🅒🅝/Reference" + + def load_controlnet(self, + attn_style_fidelity: float, attn_ref_weight: float, attn_strength: float, + adain_style_fidelity: float, adain_ref_weight: float, adain_strength: float): + ref_opts = ReferenceOptions(reference_type=ReferenceType.ATTN_ADAIN, + attn_style_fidelity=attn_style_fidelity, attn_ref_weight=attn_ref_weight, attn_strength=attn_strength, + adain_style_fidelity=adain_style_fidelity, adain_ref_weight=adain_ref_weight, adain_strength=adain_strength) + controlnet = ReferenceAdvanced(ref_opts=ref_opts, timestep_keyframes=None) + return (controlnet,) + + +class ReferencePreprocessorNode: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image": ("IMAGE", ), + "vae": ("VAE", ), + "latent_size": ("LATENT", ), + } + } + + RETURN_TYPES = ("IMAGE",) + RETURN_NAMES = ("proc_IMAGE",) + FUNCTION = "preprocess_images" + + CATEGORY = "Adv-ControlNet 🛂🅐🅒🅝/Reference/preprocess" + + def preprocess_images(self, vae: VAE, image: Tensor, latent_size: Tensor): + # first, resize image to match latents + image = image.movedim(-1,1) + image = comfy.utils.common_upscale(image, latent_size["samples"].shape[3] * 8, latent_size["samples"].shape[2] * 8, 'nearest-exact', "center") + image = image.movedim(1,-1) + # then, vae encode + try: + image = vae.vae_encode_crop_pixels(image) + except Exception: + image = VAEEncode.vae_encode_crop_pixels(image) + encoded = vae.encode(image[:,:,:,:3]) + return (ReferencePreprocWrapper(condhint=encoded),) diff --git a/ComfyUI-Advanced-ControlNet/adv_control/nodes_sparsectrl.py b/ComfyUI-Advanced-ControlNet/adv_control/nodes_sparsectrl.py new file mode 100644 index 0000000000000000000000000000000000000000..4df32b0883e7256484f299a9ce85420487340cfb --- /dev/null +++ b/ComfyUI-Advanced-ControlNet/adv_control/nodes_sparsectrl.py @@ -0,0 +1,163 @@ +from torch import Tensor + +import folder_paths +from nodes import VAEEncode +import comfy.utils +from comfy.sd import VAE + +from .utils import TimestepKeyframeGroup +from .control_sparsectrl import SparseMethod, SparseIndexMethod, SparseSettings, SparseSpreadMethod, PreprocSparseRGBWrapper +from .control import load_sparsectrl, load_controlnet, ControlNetAdvanced, SparseCtrlAdvanced + + +# node for SparseCtrl loading +class SparseCtrlLoaderAdvanced: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "sparsectrl_name": (folder_paths.get_filename_list("controlnet"), ), + "use_motion": ("BOOLEAN", {"default": True}, ), + "motion_strength": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.001}, ), + "motion_scale": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.001}, ), + }, + "optional": { + "sparse_method": ("SPARSE_METHOD", ), + "tk_optional": ("TIMESTEP_KEYFRAME", ), + } + } + + RETURN_TYPES = ("CONTROL_NET", ) + FUNCTION = "load_controlnet" + + CATEGORY = "Adv-ControlNet 🛂🅐🅒🅝/SparseCtrl" + + def load_controlnet(self, sparsectrl_name: str, use_motion: bool, motion_strength: float, motion_scale: float, sparse_method: SparseMethod=SparseSpreadMethod(), tk_optional: TimestepKeyframeGroup=None): + sparsectrl_path = folder_paths.get_full_path("controlnet", sparsectrl_name) + sparse_settings = SparseSettings(sparse_method=sparse_method, use_motion=use_motion, motion_strength=motion_strength, motion_scale=motion_scale) + sparsectrl = load_sparsectrl(sparsectrl_path, timestep_keyframe=tk_optional, sparse_settings=sparse_settings) + return (sparsectrl,) + + +class SparseCtrlMergedLoaderAdvanced: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "sparsectrl_name": (folder_paths.get_filename_list("controlnet"), ), + "control_net_name": (folder_paths.get_filename_list("controlnet"), ), + "use_motion": ("BOOLEAN", {"default": True}, ), + "motion_strength": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.001}, ), + "motion_scale": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.001}, ), + }, + "optional": { + "sparse_method": ("SPARSE_METHOD", ), + "tk_optional": ("TIMESTEP_KEYFRAME", ), + } + } + + RETURN_TYPES = ("CONTROL_NET", ) + FUNCTION = "load_controlnet" + + CATEGORY = "Adv-ControlNet 🛂🅐🅒🅝/SparseCtrl/experimental" + + def load_controlnet(self, sparsectrl_name: str, control_net_name: str, use_motion: bool, motion_strength: float, motion_scale: float, sparse_method: SparseMethod=SparseSpreadMethod(), tk_optional: TimestepKeyframeGroup=None): + sparsectrl_path = folder_paths.get_full_path("controlnet", sparsectrl_name) + controlnet_path = folder_paths.get_full_path("controlnet", control_net_name) + sparse_settings = SparseSettings(sparse_method=sparse_method, use_motion=use_motion, motion_strength=motion_strength, motion_scale=motion_scale, merged=True) + # first, load normal controlnet + controlnet = load_controlnet(controlnet_path, timestep_keyframe=tk_optional) + # confirm that controlnet is ControlNetAdvanced + if controlnet is None or type(controlnet) != ControlNetAdvanced: + raise ValueError(f"controlnet_path must point to a normal ControlNet, but instead: {type(controlnet).__name__}") + # next, load sparsectrl, making sure to load motion portion + sparsectrl = load_sparsectrl(sparsectrl_path, timestep_keyframe=tk_optional, sparse_settings=SparseSettings.default()) + # now, combine state dicts + new_state_dict = controlnet.control_model.state_dict() + for key, value in sparsectrl.control_model.motion_holder.motion_wrapper.state_dict().items(): + new_state_dict[key] = value + # now, reload sparsectrl with real settings + sparsectrl = load_sparsectrl(sparsectrl_path, controlnet_data=new_state_dict, timestep_keyframe=tk_optional, sparse_settings=sparse_settings) + return (sparsectrl,) + + +class SparseIndexMethodNode: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "indexes": ("STRING", {"default": "0"}), + } + } + + RETURN_TYPES = ("SPARSE_METHOD",) + FUNCTION = "get_method" + + CATEGORY = "Adv-ControlNet 🛂🅐🅒🅝/SparseCtrl" + + def get_method(self, indexes: str): + idxs = [] + unique_idxs = set() + # get indeces from string + str_idxs = [x.strip() for x in indexes.strip().split(",")] + for str_idx in str_idxs: + try: + idx = int(str_idx) + if idx in unique_idxs: + raise ValueError(f"'{idx}' is duplicated; indexes must be unique.") + idxs.append(idx) + unique_idxs.add(idx) + except ValueError: + raise ValueError(f"'{str_idx}' is not a valid integer index.") + if len(idxs) == 0: + raise ValueError(f"No indexes were listed in Sparse Index Method.") + return (SparseIndexMethod(idxs),) + + +class SparseSpreadMethodNode: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "spread": (SparseSpreadMethod.LIST,), + } + } + + RETURN_TYPES = ("SPARSE_METHOD",) + FUNCTION = "get_method" + + CATEGORY = "Adv-ControlNet 🛂🅐🅒🅝/SparseCtrl" + + def get_method(self, spread: str): + return (SparseSpreadMethod(spread=spread),) + + +class RgbSparseCtrlPreprocessor: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image": ("IMAGE", ), + "vae": ("VAE", ), + "latent_size": ("LATENT", ), + } + } + + RETURN_TYPES = ("IMAGE",) + RETURN_NAMES = ("proc_IMAGE",) + FUNCTION = "preprocess_images" + + CATEGORY = "Adv-ControlNet 🛂🅐🅒🅝/SparseCtrl/preprocess" + + def preprocess_images(self, vae: VAE, image: Tensor, latent_size: Tensor): + # first, resize image to match latents + image = image.movedim(-1,1) + image = comfy.utils.common_upscale(image, latent_size["samples"].shape[3] * 8, latent_size["samples"].shape[2] * 8, 'nearest-exact', "center") + image = image.movedim(1,-1) + # then, vae encode + try: + image = vae.vae_encode_crop_pixels(image) + except Exception: + image = VAEEncode.vae_encode_crop_pixels(image) + encoded = vae.encode(image[:,:,:,:3]) + return (PreprocSparseRGBWrapper(condhint=encoded),) diff --git a/ComfyUI-Advanced-ControlNet/adv_control/nodes_weight.py b/ComfyUI-Advanced-ControlNet/adv_control/nodes_weight.py new file mode 100644 index 0000000000000000000000000000000000000000..7eb58d4ab9267dc416690b1ecd29f6965f302b84 --- /dev/null +++ b/ComfyUI-Advanced-ControlNet/adv_control/nodes_weight.py @@ -0,0 +1,224 @@ +from torch import Tensor +import torch +from .utils import TimestepKeyframe, TimestepKeyframeGroup, ControlWeights, get_properly_arranged_t2i_weights, linear_conversion +from .logger import logger + + +WEIGHTS_RETURN_NAMES = ("CN_WEIGHTS", "TK_SHORTCUT") + + +class DefaultWeights: + @classmethod + def INPUT_TYPES(s): + return { + } + + RETURN_TYPES = ("CONTROL_NET_WEIGHTS", "TIMESTEP_KEYFRAME",) + RETURN_NAMES = WEIGHTS_RETURN_NAMES + FUNCTION = "load_weights" + + CATEGORY = "Adv-ControlNet 🛂🅐🅒🅝/weights" + + def load_weights(self): + weights = ControlWeights.default() + return (weights, TimestepKeyframeGroup.default(TimestepKeyframe(control_weights=weights))) + + +class ScaledSoftMaskedUniversalWeights: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "mask": ("MASK", ), + "min_base_multiplier": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 1.0, "step": 0.001}, ), + "max_base_multiplier": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.001}, ), + #"lock_min": ("BOOLEAN", {"default": False}, ), + #"lock_max": ("BOOLEAN", {"default": False}, ), + }, + "optional": { + "uncond_multiplier": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}, ), + } + } + + RETURN_TYPES = ("CONTROL_NET_WEIGHTS", "TIMESTEP_KEYFRAME",) + RETURN_NAMES = WEIGHTS_RETURN_NAMES + FUNCTION = "load_weights" + + CATEGORY = "Adv-ControlNet 🛂🅐🅒🅝/weights" + + def load_weights(self, mask: Tensor, min_base_multiplier: float, max_base_multiplier: float, lock_min=False, lock_max=False, + uncond_multiplier: float=1.0): + # normalize mask + mask = mask.clone() + x_min = 0.0 if lock_min else mask.min() + x_max = 1.0 if lock_max else mask.max() + if x_min == x_max: + mask = torch.ones_like(mask) * max_base_multiplier + else: + mask = linear_conversion(mask, x_min, x_max, min_base_multiplier, max_base_multiplier) + weights = ControlWeights.universal_mask(weight_mask=mask, uncond_multiplier=uncond_multiplier) + return (weights, TimestepKeyframeGroup.default(TimestepKeyframe(control_weights=weights))) + + +class ScaledSoftUniversalWeights: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "base_multiplier": ("FLOAT", {"default": 0.825, "min": 0.0, "max": 1.0, "step": 0.001}, ), + "flip_weights": ("BOOLEAN", {"default": False}), + }, + "optional": { + "uncond_multiplier": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}, ), + } + } + + RETURN_TYPES = ("CONTROL_NET_WEIGHTS", "TIMESTEP_KEYFRAME",) + RETURN_NAMES = WEIGHTS_RETURN_NAMES + FUNCTION = "load_weights" + + CATEGORY = "Adv-ControlNet 🛂🅐🅒🅝/weights" + + def load_weights(self, base_multiplier, flip_weights, uncond_multiplier: float=1.0): + weights = ControlWeights.universal(base_multiplier=base_multiplier, flip_weights=flip_weights, uncond_multiplier=uncond_multiplier) + return (weights, TimestepKeyframeGroup.default(TimestepKeyframe(control_weights=weights))) + + +class SoftControlNetWeights: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "weight_00": ("FLOAT", {"default": 0.09941396206337118, "min": 0.0, "max": 10.0, "step": 0.001}, ), + "weight_01": ("FLOAT", {"default": 0.12050177219802567, "min": 0.0, "max": 10.0, "step": 0.001}, ), + "weight_02": ("FLOAT", {"default": 0.14606275417942507, "min": 0.0, "max": 10.0, "step": 0.001}, ), + "weight_03": ("FLOAT", {"default": 0.17704576264172736, "min": 0.0, "max": 10.0, "step": 0.001}, ), + "weight_04": ("FLOAT", {"default": 0.214600924414215, "min": 0.0, "max": 10.0, "step": 0.001}, ), + "weight_05": ("FLOAT", {"default": 0.26012233262329093, "min": 0.0, "max": 10.0, "step": 0.001}, ), + "weight_06": ("FLOAT", {"default": 0.3152997971191405, "min": 0.0, "max": 10.0, "step": 0.001}, ), + "weight_07": ("FLOAT", {"default": 0.3821815722656249, "min": 0.0, "max": 10.0, "step": 0.001}, ), + "weight_08": ("FLOAT", {"default": 0.4632503906249999, "min": 0.0, "max": 10.0, "step": 0.001}, ), + "weight_09": ("FLOAT", {"default": 0.561515625, "min": 0.0, "max": 10.0, "step": 0.001}, ), + "weight_10": ("FLOAT", {"default": 0.6806249999999999, "min": 0.0, "max": 10.0, "step": 0.001}, ), + "weight_11": ("FLOAT", {"default": 0.825, "min": 0.0, "max": 10.0, "step": 0.001}, ), + "weight_12": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.001}, ), + "flip_weights": ("BOOLEAN", {"default": False}), + }, + "optional": { + "uncond_multiplier": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}, ), + } + } + + RETURN_TYPES = ("CONTROL_NET_WEIGHTS", "TIMESTEP_KEYFRAME",) + RETURN_NAMES = WEIGHTS_RETURN_NAMES + FUNCTION = "load_weights" + + CATEGORY = "Adv-ControlNet 🛂🅐🅒🅝/weights/ControlNet" + + def load_weights(self, weight_00, weight_01, weight_02, weight_03, weight_04, weight_05, weight_06, + weight_07, weight_08, weight_09, weight_10, weight_11, weight_12, flip_weights, + uncond_multiplier: float=1.0): + weights = [weight_00, weight_01, weight_02, weight_03, weight_04, weight_05, weight_06, + weight_07, weight_08, weight_09, weight_10, weight_11, weight_12] + weights = ControlWeights.controlnet(weights, flip_weights=flip_weights, uncond_multiplier=uncond_multiplier) + return (weights, TimestepKeyframeGroup.default(TimestepKeyframe(control_weights=weights))) + + +class CustomControlNetWeights: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "weight_00": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.001}, ), + "weight_01": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.001}, ), + "weight_02": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.001}, ), + "weight_03": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.001}, ), + "weight_04": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.001}, ), + "weight_05": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.001}, ), + "weight_06": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.001}, ), + "weight_07": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.001}, ), + "weight_08": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.001}, ), + "weight_09": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.001}, ), + "weight_10": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.001}, ), + "weight_11": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.001}, ), + "weight_12": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.001}, ), + "flip_weights": ("BOOLEAN", {"default": False}), + }, + "optional": { + "uncond_multiplier": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}, ), + } + } + + RETURN_TYPES = ("CONTROL_NET_WEIGHTS", "TIMESTEP_KEYFRAME",) + RETURN_NAMES = WEIGHTS_RETURN_NAMES + FUNCTION = "load_weights" + + CATEGORY = "Adv-ControlNet 🛂🅐🅒🅝/weights/ControlNet" + + def load_weights(self, weight_00, weight_01, weight_02, weight_03, weight_04, weight_05, weight_06, + weight_07, weight_08, weight_09, weight_10, weight_11, weight_12, flip_weights, + uncond_multiplier: float=1.0): + weights = [weight_00, weight_01, weight_02, weight_03, weight_04, weight_05, weight_06, + weight_07, weight_08, weight_09, weight_10, weight_11, weight_12] + weights = ControlWeights.controlnet(weights, flip_weights=flip_weights, uncond_multiplier=uncond_multiplier) + return (weights, TimestepKeyframeGroup.default(TimestepKeyframe(control_weights=weights))) + + +class SoftT2IAdapterWeights: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "weight_00": ("FLOAT", {"default": 0.25, "min": 0.0, "max": 10.0, "step": 0.001}, ), + "weight_01": ("FLOAT", {"default": 0.62, "min": 0.0, "max": 10.0, "step": 0.001}, ), + "weight_02": ("FLOAT", {"default": 0.825, "min": 0.0, "max": 10.0, "step": 0.001}, ), + "weight_03": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.001}, ), + "flip_weights": ("BOOLEAN", {"default": False}), + }, + "optional": { + "uncond_multiplier": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}, ), + } + } + + RETURN_TYPES = ("CONTROL_NET_WEIGHTS", "TIMESTEP_KEYFRAME",) + RETURN_NAMES = WEIGHTS_RETURN_NAMES + FUNCTION = "load_weights" + + CATEGORY = "Adv-ControlNet 🛂🅐🅒🅝/weights/T2IAdapter" + + def load_weights(self, weight_00, weight_01, weight_02, weight_03, flip_weights, + uncond_multiplier: float=1.0): + weights = [weight_00, weight_01, weight_02, weight_03] + weights = get_properly_arranged_t2i_weights(weights) + weights = ControlWeights.t2iadapter(weights, flip_weights=flip_weights, uncond_multiplier=uncond_multiplier) + return (weights, TimestepKeyframeGroup.default(TimestepKeyframe(control_weights=weights))) + + +class CustomT2IAdapterWeights: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "weight_00": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.001}, ), + "weight_01": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.001}, ), + "weight_02": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.001}, ), + "weight_03": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.001}, ), + "flip_weights": ("BOOLEAN", {"default": False}), + }, + "optional": { + "uncond_multiplier": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}, ), + } + } + + RETURN_TYPES = ("CONTROL_NET_WEIGHTS", "TIMESTEP_KEYFRAME",) + RETURN_NAMES = WEIGHTS_RETURN_NAMES + FUNCTION = "load_weights" + + CATEGORY = "Adv-ControlNet 🛂🅐🅒🅝/weights/T2IAdapter" + + def load_weights(self, weight_00, weight_01, weight_02, weight_03, flip_weights, + uncond_multiplier: float=1.0): + weights = [weight_00, weight_01, weight_02, weight_03] + weights = get_properly_arranged_t2i_weights(weights) + weights = ControlWeights.t2iadapter(weights, flip_weights=flip_weights, uncond_multiplier=uncond_multiplier) + return (weights, TimestepKeyframeGroup.default(TimestepKeyframe(control_weights=weights))) diff --git a/ComfyUI-Advanced-ControlNet/adv_control/utils.py b/ComfyUI-Advanced-ControlNet/adv_control/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..9e0beda1d569af04ec8125872fbe78df1fddcec0 --- /dev/null +++ b/ComfyUI-Advanced-ControlNet/adv_control/utils.py @@ -0,0 +1,915 @@ +from copy import deepcopy +from typing import Callable, Union +import torch +from torch import Tensor +import torch.nn.functional +import numpy as np +import math + +import comfy.ops +import comfy.utils +import comfy.sample +import comfy.samplers +import comfy.model_base + +from comfy.controlnet import ControlBase, broadcast_image_to +from comfy.model_patcher import ModelPatcher + +from .logger import logger + +BIGMIN = -(2**53-1) +BIGMAX = (2**53-1) + +def load_torch_file_with_dict_factory(controlnet_data: dict[str, Tensor], orig_load_torch_file: Callable): + def load_torch_file_with_dict(*args, **kwargs): + # immediately restore load_torch_file to original version + comfy.utils.load_torch_file = orig_load_torch_file + return controlnet_data + return load_torch_file_with_dict + +# wrapping len function so that it will save the thing len is trying to get the length of; +# this will be assumed to be the cond_or_uncond variable; +# automatically restores len to original function after running +def wrapper_len_factory(orig_len: Callable) -> Callable: + def wrapper_len(*args, **kwargs): + cond_or_uncond = args[0] + real_length = orig_len(*args, **kwargs) + if real_length > 0 and type(cond_or_uncond) == list and (cond_or_uncond[0] in [0, 1]): + try: + to_return = IntWithCondOrUncond(real_length) + setattr(to_return, "cond_or_uncond", cond_or_uncond) + return to_return + finally: + __builtins__["len"] = orig_len + else: + return real_length + return wrapper_len + +# wrapping cond_cat function so that it will wrap around len function to get cond_or_uncond variable value +# from comfy.samplers.calc_conds_batch +def wrapper_cond_cat_factory(orig_cond_cat: Callable): + def wrapper_cond_cat(*args, **kwargs): + __builtins__["len"] = wrapper_len_factory(__builtins__["len"]) + return orig_cond_cat(*args, **kwargs) + return wrapper_cond_cat +orig_cond_cat = comfy.samplers.cond_cat +comfy.samplers.cond_cat = wrapper_cond_cat_factory(orig_cond_cat) + + +# wrapping apply_model so that len function will be cleaned up fairly soon after being injected +def apply_model_uncond_cleanup_factory(orig_apply_model, orig_len): + def apply_model_uncond_cleanup_wrapper(self, *args, **kwargs): + __builtins__["len"] = orig_len + return orig_apply_model(self, *args, **kwargs) + return apply_model_uncond_cleanup_wrapper +global_orig_len = __builtins__["len"] +orig_apply_model = comfy.model_base.BaseModel.apply_model +comfy.model_base.BaseModel.apply_model = apply_model_uncond_cleanup_factory(orig_apply_model, global_orig_len) + + +def uncond_multiplier_check_cn_sample_factory(orig_comfy_sample: Callable, is_custom=False) -> Callable: + def contains_uncond_multiplier(control: Union[ControlBase, 'AdvancedControlBase']): + if control is None: + return False + if not isinstance(control, AdvancedControlBase): + return contains_uncond_multiplier(control.previous_controlnet) + # check if weights_override has an uncond_multiplier + if control.weights_override is not None and control.weights_override.has_uncond_multiplier: + return True + # check if any timestep_keyframes have an uncond_multiplier on their weights + if control.timestep_keyframes is not None: + for tk in control.timestep_keyframes.keyframes: + if tk.has_control_weights() and tk.control_weights.has_uncond_multiplier: + return True + return contains_uncond_multiplier(control.previous_controlnet) + + # check if positive or negative conds contain Adv. Cns that use multiply_negative on weights + def uncond_multiplier_check_cn_sample(model: ModelPatcher, *args, **kwargs): + positive = args[-3] + negative = args[-2] + has_uncond_multiplier = False + if positive is not None: + for cond in positive: + if "control" in cond[1]: + has_uncond_multiplier = contains_uncond_multiplier(cond[1]["control"]) + if has_uncond_multiplier: + break + if negative is not None and not has_uncond_multiplier: + for cond in negative: + if "control" in cond[1]: + has_uncond_multiplier = contains_uncond_multiplier(cond[1]["control"]) + if has_uncond_multiplier: + break + try: + # if uncond_multiplier found, continue to use wrapped version of function + if has_uncond_multiplier: + return orig_comfy_sample(model, *args, **kwargs) + # otherwise, use original version of function to prevent even the smallest of slowdowns (0.XX%) + try: + wrapped_cond_cat = comfy.samplers.cond_cat + comfy.samplers.cond_cat = orig_cond_cat + return orig_comfy_sample(model, *args, **kwargs) + finally: + comfy.samplers.cond_cat = wrapped_cond_cat + finally: + # make sure len function is unwrapped by the time sampling is done, just in case + __builtins__["len"] = global_orig_len + return uncond_multiplier_check_cn_sample +# inject sample functions +comfy.sample.sample = uncond_multiplier_check_cn_sample_factory(comfy.sample.sample) +comfy.sample.sample_custom = uncond_multiplier_check_cn_sample_factory(comfy.sample.sample_custom, is_custom=True) + + +class IntWithCondOrUncond(int): + def __new__(cls, *args, **kwargs): + return super(IntWithCondOrUncond, cls).__new__(cls, *args, **kwargs) + + def __init__(self, *args, **kwargs): + super().__init__() + self.cond_or_uncond = None + + + +def get_properly_arranged_t2i_weights(initial_weights: list[float]): + new_weights = [] + new_weights.extend([initial_weights[0]]*3) + new_weights.extend([initial_weights[1]]*3) + new_weights.extend([initial_weights[2]]*3) + new_weights.extend([initial_weights[3]]*3) + return new_weights + + +class ControlWeightType: + DEFAULT = "default" + UNIVERSAL = "universal" + T2IADAPTER = "t2iadapter" + CONTROLNET = "controlnet" + CONTROLLORA = "controllora" + CONTROLLLLITE = "controllllite" + SVD_CONTROLNET = "svd_controlnet" + SPARSECTRL = "sparsectrl" + + +class ControlWeights: + def __init__(self, weight_type: str, base_multiplier: float=1.0, flip_weights: bool=False, weights: list[float]=None, weight_mask: Tensor=None, + uncond_multiplier=1.0): + self.weight_type = weight_type + self.base_multiplier = base_multiplier + self.flip_weights = flip_weights + self.weights = weights + if self.weights is not None and self.flip_weights: + self.weights.reverse() + self.weight_mask = weight_mask + self.uncond_multiplier = float(uncond_multiplier) + self.has_uncond_multiplier = not math.isclose(self.uncond_multiplier, 1.0) + + def get(self, idx: int, default=1.0) -> Union[float, Tensor]: + # if weights is not none, return index + if self.weights is not None: + # this implies weights list is not aligning with expectations - will need to adjust code + if idx >= len(self.weights): + return default + return self.weights[idx] + return 1.0 + + def copy_with_new_weights(self, new_weights: list[float]): + return ControlWeights(weight_type=self.weight_type, base_multiplier=self.base_multiplier, flip_weights=self.flip_weights, + weights=new_weights, weight_mask=self.weight_mask, uncond_multiplier=self.uncond_multiplier) + + @classmethod + def default(cls): + return cls(ControlWeightType.DEFAULT) + + @classmethod + def universal(cls, base_multiplier: float, flip_weights: bool=False, uncond_multiplier: float=1.0): + return cls(ControlWeightType.UNIVERSAL, base_multiplier=base_multiplier, flip_weights=flip_weights, uncond_multiplier=uncond_multiplier) + + @classmethod + def universal_mask(cls, weight_mask: Tensor, uncond_multiplier: float=1.0): + return cls(ControlWeightType.UNIVERSAL, weight_mask=weight_mask, uncond_multiplier=uncond_multiplier) + + @classmethod + def t2iadapter(cls, weights: list[float]=None, flip_weights: bool=False, uncond_multiplier: float=1.0): + if weights is None: + weights = [1.0]*12 + return cls(ControlWeightType.T2IADAPTER, weights=weights,flip_weights=flip_weights, uncond_multiplier=uncond_multiplier) + + @classmethod + def controlnet(cls, weights: list[float]=None, flip_weights: bool=False, uncond_multiplier: float=1.0): + if weights is None: + weights = [1.0]*13 + return cls(ControlWeightType.CONTROLNET, weights=weights, flip_weights=flip_weights, uncond_multiplier=uncond_multiplier) + + @classmethod + def controllora(cls, weights: list[float]=None, flip_weights: bool=False, uncond_multiplier: float=1.0): + if weights is None: + weights = [1.0]*10 + return cls(ControlWeightType.CONTROLLORA, weights=weights, flip_weights=flip_weights, uncond_multiplier=uncond_multiplier) + + @classmethod + def controllllite(cls, weights: list[float]=None, flip_weights: bool=False, uncond_multiplier: float=1.0): + if weights is None: + # TODO: make this have a real value + weights = [1.0]*200 + return cls(ControlWeightType.CONTROLLLLITE, weights=weights, flip_weights=flip_weights, uncond_multiplier=uncond_multiplier) + + +class StrengthInterpolation: + LINEAR = "linear" + EASE_IN = "ease-in" + EASE_OUT = "ease-out" + EASE_IN_OUT = "ease-in-out" + NONE = "none" + + _LIST = [LINEAR, EASE_IN, EASE_OUT, EASE_IN_OUT] + _LIST_WITH_NONE = [LINEAR, EASE_IN, EASE_OUT, EASE_IN_OUT, NONE] + + @classmethod + def get_weights(cls, num_from: float, num_to: float, length: int, method: str, reverse=False): + diff = num_to - num_from + if method == cls.LINEAR: + weights = torch.linspace(num_from, num_to, length) + elif method == cls.EASE_IN: + index = torch.linspace(0, 1, length) + weights = diff * np.power(index, 2) + num_from + elif method == cls.EASE_OUT: + index = torch.linspace(0, 1, length) + weights = diff * (1 - np.power(1 - index, 2)) + num_from + elif method == cls.EASE_IN_OUT: + index = torch.linspace(0, 1, length) + weights = diff * ((1 - np.cos(index * np.pi)) / 2) + num_from + else: + raise ValueError(f"Unrecognized interpolation method '{method}'.") + if reverse: + weights = weights.flip(dims=(0,)) + return weights + + +class LatentKeyframe: + def __init__(self, batch_index: int, strength: float) -> None: + self.batch_index = batch_index + self.strength = strength + + +# always maintain sorted state (by batch_index of LatentKeyframe) +class LatentKeyframeGroup: + def __init__(self) -> None: + self.keyframes: list[LatentKeyframe] = [] + + def add(self, keyframe: LatentKeyframe) -> None: + added = False + # replace existing keyframe if same batch_index + for i in range(len(self.keyframes)): + if self.keyframes[i].batch_index == keyframe.batch_index: + self.keyframes[i] = keyframe + added = True + break + if not added: + self.keyframes.append(keyframe) + self.keyframes.sort(key=lambda k: k.batch_index) + + def get_index(self, index: int) -> Union[LatentKeyframe, None]: + try: + return self.keyframes[index] + except IndexError: + return None + + def __getitem__(self, index) -> LatentKeyframe: + return self.keyframes[index] + + def is_empty(self) -> bool: + return len(self.keyframes) == 0 + + def clone(self) -> 'LatentKeyframeGroup': + cloned = LatentKeyframeGroup() + for tk in self.keyframes: + cloned.add(tk) + return cloned + + +class TimestepKeyframe: + def __init__(self, + start_percent: float = 0.0, + strength: float = 1.0, + control_weights: ControlWeights = None, + latent_keyframes: LatentKeyframeGroup = None, + null_latent_kf_strength: float = 0.0, + inherit_missing: bool = True, + guarantee_steps: int = 1, + mask_hint_orig: Tensor = None) -> None: + self.start_percent = float(start_percent) + self.start_t = 999999999.9 + self.strength = strength + self.control_weights = control_weights + self.latent_keyframes = latent_keyframes + self.null_latent_kf_strength = null_latent_kf_strength + self.inherit_missing = inherit_missing + self.guarantee_steps = guarantee_steps + self.mask_hint_orig = mask_hint_orig + + def has_control_weights(self): + return self.control_weights is not None + + def has_latent_keyframes(self): + return self.latent_keyframes is not None + + def has_mask_hint(self): + return self.mask_hint_orig is not None + + + @staticmethod + def default() -> 'TimestepKeyframe': + return TimestepKeyframe(start_percent=0.0, guarantee_steps=0) + + +# always maintain sorted state (by start_percent of TimestepKeyFrame) +class TimestepKeyframeGroup: + def __init__(self) -> None: + self.keyframes: list[TimestepKeyframe] = [] + self.keyframes.append(TimestepKeyframe.default()) + + def add(self, keyframe: TimestepKeyframe) -> None: + # add to end of list, then sort + self.keyframes.append(keyframe) + self.keyframes = get_sorted_list_via_attr(self.keyframes, attr="start_percent") + + def get_index(self, index: int) -> Union[TimestepKeyframe, None]: + try: + return self.keyframes[index] + except IndexError: + return None + + def has_index(self, index: int) -> int: + return index >=0 and index < len(self.keyframes) + + def __getitem__(self, index) -> TimestepKeyframe: + return self.keyframes[index] + + def __len__(self) -> int: + return len(self.keyframes) + + def is_empty(self) -> bool: + return len(self.keyframes) == 0 + + def clone(self) -> 'TimestepKeyframeGroup': + cloned = TimestepKeyframeGroup() + # already sorted, so don't use add function to make cloning quicker + for tk in self.keyframes: + cloned.keyframes.append(tk) + return cloned + + @classmethod + def default(cls, keyframe: TimestepKeyframe) -> 'TimestepKeyframeGroup': + group = cls() + group.keyframes[0] = keyframe + return group + + +class AbstractPreprocWrapper: + error_msg = "Invalid use of [InsertHere] output. The output of [InsertHere] preprocessor is NOT a usual image, but a latent pretending to be an image - you must connect the output directly to an Apply ControlNet node (advanced or otherwise). It cannot be used for anything else that accepts IMAGE input." + def __init__(self, condhint: Tensor): + self.condhint = condhint + + def movedim(self, *args, **kwargs): + return self + + def __getattr__(self, *args, **kwargs): + raise AttributeError(self.error_msg) + + def __setattr__(self, name, value): + if name != "condhint": + raise AttributeError(self.error_msg) + super().__setattr__(name, value) + + def __iter__(self, *args, **kwargs): + raise AttributeError(self.error_msg) + + def __next__(self, *args, **kwargs): + raise AttributeError(self.error_msg) + + def __len__(self, *args, **kwargs): + raise AttributeError(self.error_msg) + + def __getitem__(self, *args, **kwargs): + raise AttributeError(self.error_msg) + + def __setitem__(self, *args, **kwargs): + raise AttributeError(self.error_msg) + + +# depending on model, AnimateDiff may inject into GroupNorm, so make sure GroupNorm will be clean +class disable_weight_init_clean_groupnorm(comfy.ops.disable_weight_init): + class GroupNorm(comfy.ops.disable_weight_init.GroupNorm): + def forward_comfy_cast_weights(self, input): + weight, bias = comfy.ops.cast_bias_weight(self, input) + return torch.nn.functional.group_norm(input, self.num_groups, weight, bias, self.eps) + + def forward(self, input): + if self.comfy_cast_weights: + return self.forward_comfy_cast_weights(input) + else: + return torch.nn.functional.group_norm(input, self.num_groups, self.weight, self.bias, self.eps) + +class manual_cast_clean_groupnorm(comfy.ops.manual_cast): + class GroupNorm(disable_weight_init_clean_groupnorm.GroupNorm): + comfy_cast_weights = True + + +# adapted from comfy/sample.py +def prepare_mask_batch(mask: Tensor, shape: Tensor, multiplier: int=1, match_dim1=False): + mask = mask.clone() + mask = torch.nn.functional.interpolate(mask.reshape((-1, 1, mask.shape[-2], mask.shape[-1])), size=(shape[2]*multiplier, shape[3]*multiplier), mode="bilinear") + if match_dim1: + mask = torch.cat([mask] * shape[1], dim=1) + return mask + + +# applies min-max normalization, from: +# https://stackoverflow.com/questions/68791508/min-max-normalization-of-a-tensor-in-pytorch +def normalize_min_max(x: Tensor, new_min = 0.0, new_max = 1.0): + x_min, x_max = x.min(), x.max() + return (((x - x_min)/(x_max - x_min)) * (new_max - new_min)) + new_min + +def linear_conversion(x, x_min=0.0, x_max=1.0, new_min=0.0, new_max=1.0): + return (((x - x_min)/(x_max - x_min)) * (new_max - new_min)) + new_min + + +def broadcast_image_to_full(tensor, target_batch_size, batched_number, except_one=True): + current_batch_size = tensor.shape[0] + #print(current_batch_size, target_batch_size) + if except_one and current_batch_size == 1: + return tensor + + per_batch = target_batch_size // batched_number + tensor = tensor[:per_batch] + + if per_batch > tensor.shape[0]: + tensor = torch.cat([tensor] * (per_batch // tensor.shape[0]) + [tensor[:(per_batch % tensor.shape[0])]], dim=0) + + current_batch_size = tensor.shape[0] + if current_batch_size == target_batch_size: + return tensor + else: + return torch.cat([tensor] * batched_number, dim=0) + + +# from https://stackoverflow.com/a/24621200 +def deepcopy_with_sharing(obj, shared_attribute_names, memo=None): + ''' + Deepcopy an object, except for a given list of attributes, which should + be shared between the original object and its copy. + + obj is some object + shared_attribute_names: A list of strings identifying the attributes that + should be shared between the original and its copy. + memo is the dictionary passed into __deepcopy__. Ignore this argument if + not calling from within __deepcopy__. + ''' + assert isinstance(shared_attribute_names, (list, tuple)) + + shared_attributes = {k: getattr(obj, k) for k in shared_attribute_names} + + if hasattr(obj, '__deepcopy__'): + # Do hack to prevent infinite recursion in call to deepcopy + deepcopy_method = obj.__deepcopy__ + obj.__deepcopy__ = None + + for attr in shared_attribute_names: + del obj.__dict__[attr] + + clone = deepcopy(obj) + + for attr, val in shared_attributes.items(): + setattr(obj, attr, val) + setattr(clone, attr, val) + + if hasattr(obj, '__deepcopy__'): + # Undo hack + obj.__deepcopy__ = deepcopy_method + del clone.__deepcopy__ + + return clone + + +def get_sorted_list_via_attr(objects: list, attr: str) -> list: + if not objects: + return objects + elif len(objects) <= 1: + return [x for x in objects] + # now that we know we have to sort, do it following these rules: + # a) if objects have same value of attribute, maintain their relative order + # b) perform sorting of the groups of objects with same attributes + unique_attrs = {} + for o in objects: + val_attr = getattr(o, attr) + attr_list: list = unique_attrs.get(val_attr, list()) + attr_list.append(o) + if val_attr not in unique_attrs: + unique_attrs[val_attr] = attr_list + # now that we have the unique attr values grouped together in relative order, sort them by key + sorted_attrs = dict(sorted(unique_attrs.items())) + # now flatten out the dict into a list to return + sorted_list = [] + for object_list in sorted_attrs.values(): + sorted_list.extend(object_list) + return sorted_list + + +class WeightTypeException(TypeError): + "Raised when weight not compatible with AdvancedControlBase object" + pass + + +class AdvancedControlBase: + def __init__(self, base: ControlBase, timestep_keyframes: TimestepKeyframeGroup, weights_default: ControlWeights, require_model=False): + self.base = base + self.compatible_weights = [ControlWeightType.UNIVERSAL] + self.add_compatible_weight(weights_default.weight_type) + # mask for which parts of controlnet output to keep + self.mask_cond_hint_original = None + self.mask_cond_hint = None + self.tk_mask_cond_hint_original = None + self.tk_mask_cond_hint = None + self.weight_mask_cond_hint = None + # actual index values + self.sub_idxs = None + self.full_latent_length = 0 + self.context_length = 0 + # timesteps + self.t: Tensor = None + self.batched_number: Union[int, IntWithCondOrUncond] = None + self.batch_size: int = 0 + # weights + override + self.weights: ControlWeights = None + self.weights_default: ControlWeights = weights_default + self.weights_override: ControlWeights = None + # latent keyframe + override + self.latent_keyframes: LatentKeyframeGroup = None + self.latent_keyframe_override: LatentKeyframeGroup = None + # initialize timestep_keyframes + self.set_timestep_keyframes(timestep_keyframes) + # override some functions + self.get_control = self.get_control_inject + self.control_merge = self.control_merge_inject + self.pre_run = self.pre_run_inject + self.cleanup = self.cleanup_inject + self.set_previous_controlnet = self.set_previous_controlnet_inject + # require model to be passed into Apply Advanced ControlNet 🛂🅐🅒🅝 node + self.require_model = require_model + # disarm - when set to False, used to force usage of Apply Advanced ControlNet 🛂🅐🅒🅝 node (which will set it to True) + self.disarmed = not require_model + + def patch_model(self, model: ModelPatcher): + pass + + def add_compatible_weight(self, control_weight_type: str): + self.compatible_weights.append(control_weight_type) + + def verify_all_weights(self, throw_error=True): + # first, check if override exists - if so, only need to check the override + if self.weights_override is not None: + if self.weights_override.weight_type not in self.compatible_weights: + msg = f"Weight override is type {self.weights_override.weight_type}, but loaded {type(self).__name__}" + \ + f"only supports {self.compatible_weights} weights." + raise WeightTypeException(msg) + # otherwise, check all timestep keyframe weights + else: + for tk in self.timestep_keyframes.keyframes: + if tk.has_control_weights() and tk.control_weights.weight_type not in self.compatible_weights: + msg = f"Weight on Timestep Keyframe with start_percent={tk.start_percent} is type" + \ + f"{tk.control_weights.weight_type}, but loaded {type(self).__name__} only supports {self.compatible_weights} weights." + raise WeightTypeException(msg) + + def set_timestep_keyframes(self, timestep_keyframes: TimestepKeyframeGroup): + self.timestep_keyframes = timestep_keyframes if timestep_keyframes else TimestepKeyframeGroup() + # prepare first timestep_keyframe related stuff + self._current_timestep_keyframe = None + self._current_timestep_index = -1 + self._current_used_steps = 0 + self.weights = None + self.latent_keyframes = None + + def prepare_current_timestep(self, t: Tensor, batched_number: int): + self.t = float(t[0]) + self.batched_number = batched_number + self.batch_size = len(t) + # get current step percent + curr_t: float = self.t + prev_index = self._current_timestep_index + # if met guaranteed steps (or no current keyframe), look for next keyframe in case need to switch + if self._current_timestep_keyframe is None or self._current_used_steps >= self._current_timestep_keyframe.guarantee_steps: + # if has next index, loop through and see if need to switch + if self.timestep_keyframes.has_index(self._current_timestep_index+1): + for i in range(self._current_timestep_index+1, len(self.timestep_keyframes)): + eval_tk = self.timestep_keyframes[i] + # check if start percent is less or equal to curr_t + if eval_tk.start_t >= curr_t: + self._current_timestep_index = i + self._current_timestep_keyframe = eval_tk + self._current_used_steps = 0 + # keep track of control weights, latent keyframes, and masks, + # accounting for inherit_missing + if self._current_timestep_keyframe.has_control_weights(): + self.weights = self._current_timestep_keyframe.control_weights + elif not self._current_timestep_keyframe.inherit_missing: + self.weights = self.weights_default + if self._current_timestep_keyframe.has_latent_keyframes(): + self.latent_keyframes = self._current_timestep_keyframe.latent_keyframes + elif not self._current_timestep_keyframe.inherit_missing: + self.latent_keyframes = None + if self._current_timestep_keyframe.has_mask_hint(): + self.tk_mask_cond_hint_original = self._current_timestep_keyframe.mask_hint_orig + elif not self._current_timestep_keyframe.inherit_missing: + del self.tk_mask_cond_hint_original + self.tk_mask_cond_hint_original = None + # if guarantee_steps greater than zero, stop searching for other keyframes + if self._current_timestep_keyframe.guarantee_steps > 0: + break + # if eval_tk is outside of percent range, stop looking further + else: + break + + # update steps current keyframe is used + self._current_used_steps += 1 + # if index changed, apply overrides + if prev_index != self._current_timestep_index: + if self.weights_override is not None: + self.weights = self.weights_override + if self.latent_keyframe_override is not None: + self.latent_keyframes = self.latent_keyframe_override + + # make sure weights and latent_keyframes are in a workable state + # Note: each AdvancedControlBase should create their own get_universal_weights class + self.prepare_weights() + + def prepare_weights(self): + if self.weights is None or self.weights.weight_type == ControlWeightType.DEFAULT: + self.weights = self.weights_default + elif self.weights.weight_type == ControlWeightType.UNIVERSAL: + # if universal and weight_mask present, no need to convert + if self.weights.weight_mask is not None: + return + self.weights = self.get_universal_weights() + + def get_universal_weights(self) -> ControlWeights: + return self.weights + + def set_cond_hint_mask(self, mask_hint): + self.mask_cond_hint_original = mask_hint + return self + + def pre_run_inject(self, model, percent_to_timestep_function): + self.base.pre_run(model, percent_to_timestep_function) + self.pre_run_advanced(model, percent_to_timestep_function) + + def pre_run_advanced(self, model, percent_to_timestep_function): + # for each timestep keyframe, calculate the start_t + for tk in self.timestep_keyframes.keyframes: + tk.start_t = percent_to_timestep_function(tk.start_percent) + # clear variables + self.cleanup_advanced() + + def set_previous_controlnet_inject(self, *args, **kwargs): + to_return = self.base.set_previous_controlnet(*args, **kwargs) + if not self.disarmed: + raise Exception(f"Type '{type(self).__name__}' must be used with Apply Advanced ControlNet 🛂🅐🅒🅝 node (with model_optional passed in); otherwise, it will not work.") + return to_return + + def disarm(self): + self.disarmed = True + + def should_run(self): + if math.isclose(self.strength, 0.0) or math.isclose(self._current_timestep_keyframe.strength, 0.0): + return False + if self.timestep_range is not None: + if self.t > self.timestep_range[0] or self.t < self.timestep_range[1]: + return False + return True + + def get_control_inject(self, x_noisy, t, cond, batched_number): + # prepare timestep and everything related + self.prepare_current_timestep(t=t, batched_number=batched_number) + # if should not perform any actions for the controlnet, exit without doing any work + if self.strength == 0.0 or self._current_timestep_keyframe.strength == 0.0: + return self.default_control_actions(x_noisy, t, cond, batched_number) + # otherwise, perform normal function + return self.get_control_advanced(x_noisy, t, cond, batched_number) + + def get_control_advanced(self, x_noisy, t, cond, batched_number): + return self.default_control_actions(x_noisy, t, cond, batched_number) + + def default_control_actions(self, x_noisy, t, cond, batched_number): + control_prev = None + if self.previous_controlnet is not None: + control_prev = self.previous_controlnet.get_control(x_noisy, t, cond, batched_number) + return control_prev + + def calc_weight(self, idx: int, x: Tensor, layers: int) -> Union[float, Tensor]: + if self.weights.weight_mask is not None: + # prepare weight mask + self.prepare_weight_mask_cond_hint(x, self.batched_number) + # adjust mask for current layer and return + return torch.pow(self.weight_mask_cond_hint, self.get_calc_pow(idx=idx, layers=layers)) + return self.weights.get(idx=idx) + + def get_calc_pow(self, idx: int, layers: int) -> int: + return (layers-1)-idx + + def calc_latent_keyframe_mults(self, x: Tensor, batched_number: int) -> Tensor: + # apply strengths, and get batch indeces to null out + # AKA latents that should not be influenced by ControlNet + final_mults = [1.0] * x.shape[0] + if self.latent_keyframes: + latent_count = x.shape[0] // batched_number + indeces_to_null = set(range(latent_count)) + mapped_indeces = None + # if expecting subdivision, will need to translate between subset and actual idx values + if self.sub_idxs: + mapped_indeces = {} + for i, actual in enumerate(self.sub_idxs): + mapped_indeces[actual] = i + for keyframe in self.latent_keyframes: + real_index = keyframe.batch_index + # if negative, count from end + if real_index < 0: + real_index += latent_count if self.sub_idxs is None else self.full_latent_length + + # if not mapping indeces, what you see is what you get + if mapped_indeces is None: + if real_index in indeces_to_null: + indeces_to_null.remove(real_index) + # otherwise, see if batch_index is even included in this set of latents + else: + real_index = mapped_indeces.get(real_index, None) + if real_index is None: + continue + indeces_to_null.remove(real_index) + + # if real_index is outside the bounds of latents, don't apply + if real_index >= latent_count or real_index < 0: + continue + + # apply strength for each batched cond/uncond + for b in range(batched_number): + final_mults[(latent_count*b)+real_index] = keyframe.strength + # null them out by multiplying by null_latent_kf_strength + for batch_index in indeces_to_null: + # apply null for each batched cond/uncond + for b in range(batched_number): + final_mults[(latent_count*b)+batch_index] = self._current_timestep_keyframe.null_latent_kf_strength + # convert final_mults into tensor and match expected dimension count + final_tensor = torch.tensor(final_mults, dtype=x.dtype, device=x.device) + while len(final_tensor.shape) < len(x.shape): + final_tensor = final_tensor.unsqueeze(-1) + return final_tensor + + def apply_advanced_strengths_and_masks(self, x: Tensor, batched_number: int): + # handle weight's uncond_multiplier, if applicable + if self.weights.has_uncond_multiplier: + cond_or_uncond = self.batched_number.cond_or_uncond + actual_length = x.size(0) // batched_number + for idx, cond_type in enumerate(cond_or_uncond): + # if uncond, set to weight's uncond_multiplier + if cond_type == 1: + x[actual_length*idx:actual_length*(idx+1)] *= self.weights.uncond_multiplier + + if self.latent_keyframes is not None: + x[:] = x[:] * self.calc_latent_keyframe_mults(x=x, batched_number=batched_number) + # apply masks, resizing mask to required dims + if self.mask_cond_hint is not None: + masks = prepare_mask_batch(self.mask_cond_hint, x.shape) + x[:] = x[:] * masks + if self.tk_mask_cond_hint is not None: + masks = prepare_mask_batch(self.tk_mask_cond_hint, x.shape) + x[:] = x[:] * masks + # apply timestep keyframe strengths + if self._current_timestep_keyframe.strength != 1.0: + x[:] *= self._current_timestep_keyframe.strength + + def control_merge_inject(self: 'AdvancedControlBase', control_input, control_output, control_prev, output_dtype): + out = {'input':[], 'middle':[], 'output': []} + + if control_input is not None: + for i in range(len(control_input)): + key = 'input' + x = control_input[i] + if x is not None: + self.apply_advanced_strengths_and_masks(x, self.batched_number) + + x *= self.strength * self.calc_weight(i, x, len(control_input)) + if x.dtype != output_dtype: + x = x.to(output_dtype) + out[key].insert(0, x) + + if control_output is not None: + for i in range(len(control_output)): + if i == (len(control_output) - 1): + key = 'middle' + index = 0 + else: + key = 'output' + index = i + x = control_output[i] + if x is not None: + self.apply_advanced_strengths_and_masks(x, self.batched_number) + + if self.global_average_pooling: + x = torch.mean(x, dim=(2, 3), keepdim=True).repeat(1, 1, x.shape[2], x.shape[3]) + + x *= self.strength * self.calc_weight(i, x, len(control_output)) + if x.dtype != output_dtype: + x = x.to(output_dtype) + + out[key].append(x) + if control_prev is not None: + for x in ['input', 'middle', 'output']: + o = out[x] + for i in range(len(control_prev[x])): + prev_val = control_prev[x][i] + if i >= len(o): + o.append(prev_val) + elif prev_val is not None: + if o[i] is None: + o[i] = prev_val + else: + if o[i].shape[0] < prev_val.shape[0]: + o[i] = prev_val + o[i] + else: + o[i] += prev_val + return out + + def prepare_mask_cond_hint(self, x_noisy: Tensor, t, cond, batched_number, dtype=None, direct_attn=False): + self._prepare_mask("mask_cond_hint", self.mask_cond_hint_original, x_noisy, t, cond, batched_number, dtype, direct_attn=direct_attn) + self.prepare_tk_mask_cond_hint(x_noisy, t, cond, batched_number, dtype, direct_attn=direct_attn) + + def prepare_tk_mask_cond_hint(self, x_noisy: Tensor, t, cond, batched_number, dtype=None, direct_attn=False): + return self._prepare_mask("tk_mask_cond_hint", self._current_timestep_keyframe.mask_hint_orig, x_noisy, t, cond, batched_number, dtype, direct_attn=direct_attn) + + def prepare_weight_mask_cond_hint(self, x_noisy: Tensor, batched_number, dtype=None): + return self._prepare_mask("weight_mask_cond_hint", self.weights.weight_mask, x_noisy, t=None, cond=None, batched_number=batched_number, dtype=dtype, direct_attn=True) + + def _prepare_mask(self, attr_name, orig_mask: Tensor, x_noisy: Tensor, t, cond, batched_number, dtype=None, direct_attn=False): + # make mask appropriate dimensions, if present + if orig_mask is not None: + out_mask = getattr(self, attr_name) + multiplier = 1 if direct_attn else 8 + if self.sub_idxs is not None or out_mask is None or x_noisy.shape[2] * multiplier != out_mask.shape[1] or x_noisy.shape[3] * multiplier != out_mask.shape[2]: + self._reset_attr(attr_name) + del out_mask + # TODO: perform upscale on only the sub_idxs masks at a time instead of all to conserve RAM + # resize mask and match batch count + out_mask = prepare_mask_batch(orig_mask, x_noisy.shape, multiplier=multiplier) + actual_latent_length = x_noisy.shape[0] // batched_number + out_mask = comfy.utils.repeat_to_batch_size(out_mask, actual_latent_length if self.sub_idxs is None else self.full_latent_length) + if self.sub_idxs is not None: + out_mask = out_mask[self.sub_idxs] + # make cond_hint_mask length match x_noise + if x_noisy.shape[0] != out_mask.shape[0]: + out_mask = broadcast_image_to(out_mask, x_noisy.shape[0], batched_number) + # default dtype to be same as x_noisy + if dtype is None: + dtype = x_noisy.dtype + setattr(self, attr_name, out_mask.to(dtype=dtype).to(self.device)) + del out_mask + + def _reset_attr(self, attr_name, new_value=None): + if hasattr(self, attr_name): + delattr(self, attr_name) + setattr(self, attr_name, new_value) + + def cleanup_inject(self): + self.base.cleanup() + self.cleanup_advanced() + + def cleanup_advanced(self): + self.sub_idxs = None + self.full_latent_length = 0 + self.context_length = 0 + self.t = None + self.batched_number = None + self.batch_size = 0 + self.weights = None + self.latent_keyframes = None + # timestep stuff + self._current_timestep_keyframe = None + self._current_timestep_index = -1 + self._current_used_steps = 0 + # clear mask hints + if self.mask_cond_hint is not None: + del self.mask_cond_hint + self.mask_cond_hint = None + if self.tk_mask_cond_hint_original is not None: + del self.tk_mask_cond_hint_original + self.tk_mask_cond_hint_original = None + if self.tk_mask_cond_hint is not None: + del self.tk_mask_cond_hint + self.tk_mask_cond_hint = None + if self.weight_mask_cond_hint is not None: + del self.weight_mask_cond_hint + self.weight_mask_cond_hint = None + + def copy_to_advanced(self, copied: 'AdvancedControlBase'): + copied.mask_cond_hint_original = self.mask_cond_hint_original + copied.weights_override = self.weights_override + copied.latent_keyframe_override = self.latent_keyframe_override + copied.disarmed = self.disarmed diff --git a/ComfyUI-Advanced-ControlNet/pyproject.toml b/ComfyUI-Advanced-ControlNet/pyproject.toml new file mode 100644 index 0000000000000000000000000000000000000000..9df3e690cbb51beedd0b34a54607cb583f322a6c --- /dev/null +++ b/ComfyUI-Advanced-ControlNet/pyproject.toml @@ -0,0 +1,15 @@ +[project] +name = "comfyui-advanced-controlnet" +description = "Nodes for scheduling ControlNet strength across timesteps and batched latents, as well as applying custom weights and attention masks." +version = "1.0.2" +license = "LICENSE" +dependencies = [] + +[project.urls] +Repository = "https://github.com/Kosinkadink/ComfyUI-Advanced-ControlNet" + +# Used by Comfy Registry https://comfyregistry.org +[tool.comfy] +PublisherId = "kosinkadink" +DisplayName = "ComfyUI-Advanced-ControlNet" +Icon = "" diff --git a/ComfyUI-Advanced-ControlNet/requirements.txt b/ComfyUI-Advanced-ControlNet/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/ComfyUI-BrushNet/BIG_IMAGE.md b/ComfyUI-BrushNet/BIG_IMAGE.md new file mode 100644 index 0000000000000000000000000000000000000000..84f735aae9605a80aa852ca8a923f3cf32269160 --- /dev/null +++ b/ComfyUI-BrushNet/BIG_IMAGE.md @@ -0,0 +1,6 @@ +![example workflow](example/BrushNet_cut_for_inpaint.png?raw=true) + +[workflow](example/BrushNet_cut_for_inpaint.json) + +When you work with big image and your inpaint mask is small it is better to cut part of the image, work with it and then blend it back. +I created a node for such workflow, see example. diff --git a/ComfyUI-BrushNet/CN.md b/ComfyUI-BrushNet/CN.md new file mode 100644 index 0000000000000000000000000000000000000000..88263a29be0ff26b2a3706e2be9f6da44fa92614 --- /dev/null +++ b/ComfyUI-BrushNet/CN.md @@ -0,0 +1,39 @@ +## ControlNet Canny Edge + +Let's take the pestered cake and try to inpaint it again. Now I would like to use a sleeping cat for it: + +![sleeping cat](example/sleeping_cat.png?raw=true) + +I use Canny Edge node from [comfyui_controlnet_aux](https://github.com/Fannovel16/comfyui_controlnet_aux). Don't forget to resize canny edge mask to 512 pixels: + +![sleeping cat inpaint](example/sleeping_cat_inpaint1.png?raw=true) + +Let's look at the result: + +![sleeping cat inpaint](example/sleeping_cat_inpaint2.png?raw=true) + +The first problem I see here is some kind of object behind the cat. Such objects appear since the inpainting mask strictly aligns with the removed object, the cake in our case. To remove such artifact we should expand our mask a little: + +![sleeping cat inpaint](example/sleeping_cat_inpaint3.png?raw=true) + +Now. what's up with cat back and tail? Let's see the inpainting mask and canny edge mask side to side: + +![masks](example/sleeping_cat_inpaint4.png?raw=true) + +The inpainting works (mostly) only in masked (white) area, so we cut off cat's back. **The ControlNet mask should be inside the inpaint mask.** + +To address the issue I resized the mask to 256 pixels: + +![sleeping cat inpaint](example/sleeping_cat_inpaint5.png?raw=true) + +This is better but still have a room for improvement. The problem with edge mask downsampling is that edge lines tend to be broken and after some size we will got a mess: + +![sleeping cat inpaint](example/sleeping_cat_inpaint6.png?raw=true) + +Look at the edge mask, at this resolution it is so broken: + +![masks](example/sleeping_cat_mask.png?raw=true) + + + + diff --git a/ComfyUI-BrushNet/LICENSE b/ComfyUI-BrushNet/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..261eeb9e9f8b2b4b0d119366dda99c6fd7d35c64 --- /dev/null +++ b/ComfyUI-BrushNet/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/ComfyUI-BrushNet/PARAMS.md b/ComfyUI-BrushNet/PARAMS.md new file mode 100644 index 0000000000000000000000000000000000000000..1be01bed6ee1bdbfa5a1431ea9b8265cd0c47ccc --- /dev/null +++ b/ComfyUI-BrushNet/PARAMS.md @@ -0,0 +1,47 @@ +## Start At and End At parameters usage + +### start_at + +Let's start with a ELLA outpaint [workflow](example/BrushNet_with_ELLA.json) and switch off Blend Inpaint node: + +![example workflow](example/params1.png?raw=true) + +For this example I use "wargaming shop showcase" prompt, `dpmpp_2m` deterministic sampler and `karras` scheduler with 15 steps. This is the result: + +![goblin in the shop](example/params2.png?raw=true) + +The `start_at` BrushNet node parameter allows us to delay BrushNet inference for some steps, so the base model will do all the job. Let's see what the result will be without BrushNet. For this I set up `start_at` parameter to 20 - it should be more then `steps` in KSampler node: + +![the shop](example/params3.png?raw=true) + +So, if we apply BrushNet from the beginning (`start_at` equals 0), the resulting scene will be heavily influenced by BrushNet image. The more we increase this parameter, the more scene will be based on prompt. Let's compare: + +| `start_at` = 1 | `start_at` = 2 | `start_at` = 3 | +|:--------------:|:--------------:|:--------------:| +| ![p1](example/params4.png?raw=true) | ![p2](example/params5.png?raw=true) | ![p3](example/params6.png?raw=true) | +| `start_at` = 4 | `start_at` = 5 | `start_at` = 6 | +| ![p1](example/params7.png?raw=true) | ![p2](example/params8.png?raw=true) | ![p3](example/params9.png?raw=true) | +| `start_at` = 7 | `start_at` = 8 | `start_at` = 9 | +| ![p1](example/params10.png?raw=true) | ![p2](example/params11.png?raw=true) | ![p3](example/params12.png?raw=true) | + +Look how the floor is aligned with toy's base - at some step it looses consistency. The results will depend on type of sampler and number of KSampler steps, of course. + +### end_at + +The `end_at` parameter switches off BrushNet at the last steps. If you use deterministic sampler it will only influences details on last steps, but stochastic samplers can change the whole scene. For a description of samplers see, for example, Matteo Spinelli's [video on ComfyUI basics](https://youtu.be/_C7kR2TFIX0?t=516). + +Here I use basic BrushNet inpaint [example](example/BrushNet_basic.json), with "intricate teapot" prompt, `dpmpp_2m` deterministic sampler and `karras` scheduler with 15 steps: + +![example workflow](example/params13.png?raw=true) + +There are almost no changes when we set 'end_at' paramter to 10, but starting from it: + +| `end_at` = 10 | `end_at` = 9 | `end_at` = 8 | +|:--------------:|:--------------:|:--------------:| +| ![p1](example/params14.png?raw=true) | ![p2](example/params15.png?raw=true) | ![p3](example/params16.png?raw=true) | +| `end_at` = 7 | `end_at` = 6 | `end_at` = 5 | +| ![p1](example/params17.png?raw=true) | ![p2](example/params18.png?raw=true) | ![p3](example/params19.png?raw=true) | +| `end_at` = 4 | `end_at` = 3 | `end_at` = 2 | +| ![p1](example/params20.png?raw=true) | ![p2](example/params21.png?raw=true) | ![p3](example/params22.png?raw=true) | + +You can see how the scene was completely redrawn. diff --git a/ComfyUI-BrushNet/RAUNET.md b/ComfyUI-BrushNet/RAUNET.md new file mode 100644 index 0000000000000000000000000000000000000000..05a75aeb8cbbeae3457e3b690809a569217205f8 --- /dev/null +++ b/ComfyUI-BrushNet/RAUNET.md @@ -0,0 +1,39 @@ +During investigation of compatibility issues with [WASasquatch's FreeU_Advanced](https://github.com/WASasquatch/FreeU_Advanced/tree/main) and [blepping's jank HiDiffusion](https://github.com/blepping/comfyui_jankhidiffusion) nodes I stumbled upon some quite hard problems. There are `FreeU` nodes in ComfyUI, but no such for HiDiffusion, so I decided to implement RAUNet on base of my BrushNet implementation. **blepping**, I am sorry. :) + +### RAUNet + +What is RAUNet? I know many of you saw and generate images with a lot of limbs, fingers and faces all morphed together. + +The authors of HiDiffusion invent simple, yet efficient trick to alleviate this problem. Here is an example: + +![example workflow](example/RAUNet1.png?raw=true) + +[workflow](example/RAUNet_basic.json) + +The left picture is created using ZavyChromaXL checkpoint on 2048x2048 canvas. The right one uses RAUNet. + +In my experience the node is helpful but quite sensitive to its parameters. And there is no universal solution - you should adjust them for every new image you generate. It also lowers model's imagination, you usually get only what you described in the prompt. Look at the example: in first you have a forest in the background, but RAUNet deleted all except fox which is described in the prompt. + +From the [paper](https://arxiv.org/abs/2311.17528): Diffusion models denoise from structures to details. RAU-Net introduces additional downsampling and upsampling operations, leading to a certain degree of information loss. In the early stages of denoising, RAU-Net can generate reasonable structures with minimal impact from information loss. However, in the later stages of denoising when generating fine details, the information loss in RAU-Net results in the loss of image details and a degradation in quality. + +### Parameters + +There are two independent parts in this node: DU (Downsample/Upsample) and XA (CrossAttention). The four parameters are the start and end steps for applying these parts. + +The Downsample/Upsample part lowers models degrees of freedom. If you apply it a lot (for more steps) the resulting images will have a lot of symmetries. + +The CrossAttension part lowers number of objects which model tracks in image. + +Usually you apply DU and after several steps apply XA, sometimes you will need only XA, you should try it yourself. + +### Compatibility + +It is compatible with BrushNet and most other nodes. + +This is ControlNet example. The lower image is pure model, the upper is after using RAUNet. You can see small fox and two tails in lower image. + +![example workflow](example/RAUNet2.png?raw=true) + +[workflow](example/RAUNet_with_CN.json) + +The node can be implemented for any model. Right now it can be applied to SD15 and SDXL models. \ No newline at end of file diff --git a/ComfyUI-BrushNet/README.md b/ComfyUI-BrushNet/README.md new file mode 100644 index 0000000000000000000000000000000000000000..42227d538db939fa4fe0d82d00b1325df6c91c58 --- /dev/null +++ b/ComfyUI-BrushNet/README.md @@ -0,0 +1,261 @@ +## ComfyUI-BrushNet + +These are custom nodes for ComfyUI native implementation of + +- Brushnet: ["BrushNet: A Plug-and-Play Image Inpainting Model with Decomposed Dual-Branch Diffusion"](https://arxiv.org/abs/2403.06976) +- PowerPaint: [A Task is Worth One Word: Learning with Task Prompts for High-Quality Versatile Image Inpainting](https://arxiv.org/abs/2312.03594) +- HiDiffusion: [HiDiffusion: Unlocking Higher-Resolution Creativity and Efficiency in Pretrained Diffusion Models](https://arxiv.org/abs/2311.17528) + +My contribution is limited to the ComfyUI adaptation, and all credit goes to the authors of the papers. + +## Updates + +May 16, 2024. Internal rework to improve compatibility with other nodes. [RAUNet](RAUNET.md) is implemented. + +May 12, 2024. CutForInpaint node, see [example](BIG_IMAGE.md). + +May 11, 2024. Image batch is implemented. You can even add BrushNet to AnimateDiff vid2vid workflow, but they don't work together - they are different models and both try to patch UNet. Added some more examples. + +May 6, 2024. PowerPaint v2 model is implemented. After update your workflow probably will not work. Don't panic! Check `end_at` parameter of BrushNode, if it equals 1, change it to some big number. Read about parameters in Usage section below. + +May 2, 2024. BrushNet SDXL is live. It needs positive and negative conditioning though, so workflow changes a little, see example. + +Apr 28, 2024. Another rework, sorry for inconvenience. But now BrushNet is native to ComfyUI. Famous cubiq's [IPAdapter Plus](https://github.com/cubiq/ComfyUI_IPAdapter_plus) is now working with BrushNet! I hope... :) Please, report any bugs you found. + +Apr 18, 2024. Complete rework, no more custom `diffusers` library. It is possible to use LoRA models. + +Apr 11, 2024. Initial commit. + +## Plans + +- [x] BrushNet SDXL +- [x] PowerPaint v2 +- [x] Image batch + +## Installation + +Clone the repo into the `custom_nodes` directory and install the requirements: + +``` +git clone https://github.com/nullquant/ComfyUI-BrushNet.git +pip install -r requirements.txt +``` + +Checkpoints of BrushNet can be downloaded from [here](https://drive.google.com/drive/folders/1fqmS1CEOvXCxNWFrsSYd_jHYXxrydh1n?usp=drive_link). + +The checkpoint in `segmentation_mask_brushnet_ckpt` provides checkpoints trained on BrushData, which has segmentation prior (mask are with the same shape of objects). The `random_mask_brushnet_ckpt` provides a more general ckpt for random mask shape. + +`segmentation_mask_brushnet_ckpt` and `random_mask_brushnet_ckpt` contains BrushNet for SD 1.5 models while +`segmentation_mask_brushnet_ckpt_sdxl_v0` and `random_mask_brushnet_ckpt_sdxl_v0` for SDXL. + +You should place `diffusion_pytorch_model.safetensors` files to your `models/inpaint` folder. You can also specify `inpaint` folder in your `extra_model_paths.yaml`. + +For PowerPaint you should download three files. Both `diffusion_pytorch_model.safetensors` and `pytorch_model.bin` from [here](https://huggingface.co/JunhaoZhuang/PowerPaint-v2-1/tree/main/PowerPaint_Brushnet) should be placed in your `models/inpaint` folder. + +Also you need SD1.5 text encoder model `model.fp16.safetensors` from [here](https://huggingface.co/runwayml/stable-diffusion-v1-5/tree/main/text_encoder). It should be placed in your `models/clip` folder. + +This is a structure of my `models/inpaint` folder: + +![inpaint folder](example/inpaint_folder.png?raw=true) + +Yours can be different. + +## Usage + +Below is an example for the intended workflow. The [workflow](example/BrushNet_basic.json) for the example can be found inside the 'example' directory. + +![example workflow](example/BrushNet_basic.png?raw=true) + +
+ SDXL + +![example workflow](example/BrushNet_SDXL_basic.png?raw=true) + +[workflow](example/BrushNet_SDXL_basic.json) + +
+ +
+ IPAdapter plus + +![example workflow](example/BrushNet_with_IPA.png?raw=true) + +[workflow](example/BrushNet_with_IPA.json) + +
+ +
+ LoRA + +![example workflow](example/BrushNet_with_LoRA.png?raw=true) + +[workflow](example/BrushNet_with_LoRA.json) + +
+ +
+ Blending inpaint + +![example workflow](example/BrushNet_inpaint.png?raw=true) + +Sometimes inference and VAE broke image, so you need to blend inpaint image with the original: [workflow](example/BrushNet_inpaint.json). You can see blurred and broken text after inpainting in the first image and how I suppose to repair it. + +
+ +
+ ControlNet + +![example workflow](example/BrushNet_with_CN.png?raw=true) + +[workflow](example/BrushNet_with_CN.json) + +[ControlNet canny edge](CN.md) + +
+ +
+ ELLA outpaint + +![example workflow](example/BrushNet_with_ELLA.png?raw=true) + +[workflow](example/BrushNet_with_ELLA.json) + +
+ +
+ Upscale + +![example workflow](example/BrushNet_SDXL_upscale.png?raw=true) + +[workflow](example/BrushNet_SDXL_upscale.json) + +To upscale you should use base model, not BrushNet. The same is true for conditioning. Latent upscaling between BrushNet and KSampler will not work or will give you wierd results. These limitations are due to structure of BrushNet and its influence on UNet calculations. + +
+ +
+ Image batch + +![example workflow](example/BrushNet_image_batch.png?raw=true) + +[workflow](example/BrushNet_image_batch.json) + +If you have OOM problems, you can use Evolved Sampling from [AnimateDiff-Evolved](https://github.com/Kosinkadink/ComfyUI-AnimateDiff-Evolved): + +![example workflow](example/BrushNet_image_big_batch.png?raw=true) + +[workflow](example/BrushNet_image_big_batch.json) + +In Context Options set context_length to number of images which can be loaded into VRAM. Images will be processed in chunks of this size. + +
+ + +
+ Big image inpaint + +![example workflow](example/BrushNet_cut_for_inpaint.png?raw=true) + +[workflow](example/BrushNet_cut_for_inpaint.json) + +When you work with big image and your inpaint mask is small it is better to cut part of the image, work with it and then blend it back. +I created a node for such workflow, see example. + +
+ + +
+ PowerPaint outpaint + +![example workflow](example/PowerPaint_outpaint.png?raw=true) + +[workflow](example/PowerPaint_outpaint.json) + +
+ +
+ PowerPaint object removal + +![example workflow](example/PowerPaint_object_removal.png?raw=true) + +[workflow](example/PowerPaint_object_removal.json) + +It is often hard to completely remove the object, especially if it is at the front: + +![object removal example](example/object_removal_fail.png?raw=true) + +You should try to add object description to negative prompt and describe empty scene, like here: + +![object removal example](example/object_removal.png?raw=true) + +
+ +### Parameters + +#### Brushnet Loader + +- `dtype`, defaults to `torch.float16`. The torch.dtype of BrushNet. If you have old GPU or NVIDIA 16 series card try to switch to `torch.float32`. + +#### Brushnet + +- `scale`, defaults to 1.0: The "strength" of BrushNet. The outputs of the BrushNet are multiplied by `scale` before they are added to the residual in the original unet. +- `start_at`, defaults to 0: step at which the BrushNet starts applying. +- `end_at`, defaults to 10000: step at which the BrushNet stops applying. + +[Here](PARAMS.md) are examples of use these two last parameters. + +#### PowerPaint + +- `CLIP`: PowerPaint CLIP that should be passed from PowerPaintCLIPLoader node. +- `fitting`: PowerPaint fitting degree. +- `function`: PowerPaint function, see its [page](https://github.com/open-mmlab/PowerPaint) for details. + +When using certain network functions, the authors of PowerPaint recommend adding phrases to the prompt: + +- object removal: `empty scene blur` +- context aware: `empty scene` +- outpainting: `empty scene` + +Many of ComfyUI users use custom text generation nodes, CLIP nodes and a lot of other conditioning. I don't want to break all of these nodes, so I didn't add prompt updating and instead rely on users. Also my own experiments show that these additions to prompt are not strictly necessary. + +The latent image can be from BrushNet node or not, but it should be the same size as original image (divided by 8 in latent space). + +The both conditioning `positive` and `negative` in BrushNet and PowerPaint nodes are used for calculation inside, but then simply copied to output. + +Be advised, not all workflows and nodes will work with BrushNet due to its structure. Also put model changes before BrushNet nodes, not after. If you need model to work with image after BrushNet inference use base one (see Upscale example below). + +#### RAUNet + +- `du_start`, defaults to 0: step at which the Downsample/Upsample resize starts applying. +- `du_end`, defaults to 4: step at which the Downsample/Upsample resize stops applying. +- `xa_start`, defaults to 4: step at which the CrossAttention resize starts applying. +- `xa_end`, defaults to 10: step at which the CrossAttention resize stops applying. + +For an examples and explanation, please look [here](RAUNET.md). + +## Limitations + +BrushNet has some limitations (from the [paper](https://arxiv.org/abs/2403.06976)): + +- The quality and content generated by the model are heavily dependent on the chosen base model. +The results can exhibit incoherence if, for example, the given image is a natural image while the base model primarily focuses on anime. +- Even with BrushNet, we still observe poor generation results in cases where the given mask has an unusually shaped +or irregular form, or when the given text does not align well with the masked image. + +## Notes + +Unfortunately, due to the nature of BrushNet code some nodes are not compatible with these, since we are trying to patch the same ComfyUI's functions. + +List of known uncompartible nodes. + +- [WASasquatch's FreeU_Advanced](https://github.com/WASasquatch/FreeU_Advanced/tree/main) +- [blepping's jank HiDiffusion](https://github.com/blepping/comfyui_jankhidiffusion) + +## Credits + +The code is based on + +- [BrushNet](https://github.com/TencentARC/BrushNet) +- [PowerPaint](https://github.com/zhuang2002/PowerPaint) +- [HiDiffusion](https://github.com/megvii-research/HiDiffusion) +- [diffusers](https://github.com/huggingface/diffusers) diff --git a/ComfyUI-BrushNet/__init__.py b/ComfyUI-BrushNet/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..1aa35c40a86a57ba4d628855eae37be1cf76715f --- /dev/null +++ b/ComfyUI-BrushNet/__init__.py @@ -0,0 +1,32 @@ +from .brushnet_nodes import BrushNetLoader, BrushNet, BlendInpaint, PowerPaintCLIPLoader, PowerPaint, CutForInpaint +from .raunet_nodes import RAUNet + +""" +@author: nullquant +@title: BrushNet +@nickname: BrushName nodes +@description: These are custom nodes for ComfyUI native implementation of BrushNet, PowerPaint and RAUNet models +""" + +# A dictionary that contains all nodes you want to export with their names +# NOTE: names should be globally unique +NODE_CLASS_MAPPINGS = { + "BrushNetLoader": BrushNetLoader, + "BrushNet": BrushNet, + "BlendInpaint": BlendInpaint, + "PowerPaintCLIPLoader": PowerPaintCLIPLoader, + "PowerPaint": PowerPaint, + "CutForInpaint": CutForInpaint, + "RAUNet": RAUNet, +} + +# A dictionary that contains the friendly/humanly readable titles for the nodes +NODE_DISPLAY_NAME_MAPPINGS = { + "BrushNetLoader": "BrushNet Loader", + "BrushNet": "BrushNet", + "BlendInpaint": "Blend Inpaint", + "PowerPaintCLIPLoader": "PowerPaint CLIP Loader", + "PowerPaint": "PowerPaint", + "CutForInpaint": "Cut For Inpaint", + "RAUNet": "RAUNet", +} diff --git a/ComfyUI-BrushNet/brushnet/brushnet.json b/ComfyUI-BrushNet/brushnet/brushnet.json new file mode 100644 index 0000000000000000000000000000000000000000..65713bfcd0113271496bd06fe6b57299822e0f76 --- /dev/null +++ b/ComfyUI-BrushNet/brushnet/brushnet.json @@ -0,0 +1,58 @@ +{ + "_class_name": "BrushNetModel", + "_diffusers_version": "0.27.0.dev0", + "_name_or_path": "runs/logs/brushnet_randommask/checkpoint-100000", + "act_fn": "silu", + "addition_embed_type": null, + "addition_embed_type_num_heads": 64, + "addition_time_embed_dim": null, + "attention_head_dim": 8, + "block_out_channels": [ + 320, + 640, + 1280, + 1280 + ], + "brushnet_conditioning_channel_order": "rgb", + "class_embed_type": null, + "conditioning_channels": 5, + "conditioning_embedding_out_channels": [ + 16, + 32, + 96, + 256 + ], + "cross_attention_dim": 768, + "down_block_types": [ + "DownBlock2D", + "DownBlock2D", + "DownBlock2D", + "DownBlock2D" + ], + "downsample_padding": 1, + "encoder_hid_dim": null, + "encoder_hid_dim_type": null, + "flip_sin_to_cos": true, + "freq_shift": 0, + "global_pool_conditions": false, + "in_channels": 4, + "layers_per_block": 2, + "mid_block_scale_factor": 1, + "mid_block_type": "MidBlock2D", + "norm_eps": 1e-05, + "norm_num_groups": 32, + "num_attention_heads": null, + "num_class_embeds": null, + "only_cross_attention": false, + "projection_class_embeddings_input_dim": null, + "resnet_time_scale_shift": "default", + "transformer_layers_per_block": 1, + "up_block_types": [ + "UpBlock2D", + "UpBlock2D", + "UpBlock2D", + "UpBlock2D" + ], + "upcast_attention": false, + "use_linear_projection": false +} diff --git a/ComfyUI-BrushNet/brushnet/brushnet.py b/ComfyUI-BrushNet/brushnet/brushnet.py new file mode 100644 index 0000000000000000000000000000000000000000..8ce9910fdff6dde5744581935f62b35a3be1eb23 --- /dev/null +++ b/ComfyUI-BrushNet/brushnet/brushnet.py @@ -0,0 +1,948 @@ +from dataclasses import dataclass +from typing import Any, Dict, List, Optional, Tuple, Union + +import torch +from torch import nn +from torch.nn import functional as F + +from diffusers.configuration_utils import ConfigMixin, register_to_config +from diffusers.utils import BaseOutput, logging +from diffusers.models.attention_processor import ( + ADDED_KV_ATTENTION_PROCESSORS, + CROSS_ATTENTION_PROCESSORS, + AttentionProcessor, + AttnAddedKVProcessor, + AttnProcessor, +) +from diffusers.models.embeddings import TextImageProjection, TextImageTimeEmbedding, TextTimeEmbedding, TimestepEmbedding, Timesteps +from diffusers.models.modeling_utils import ModelMixin + +from .unet_2d_blocks import ( + CrossAttnDownBlock2D, + DownBlock2D, + UNetMidBlock2D, + UNetMidBlock2DCrossAttn, + get_down_block, + get_mid_block, + get_up_block, + MidBlock2D +) + +from .unet_2d_condition import UNet2DConditionModel + + +logger = logging.get_logger(__name__) # pylint: disable=invalid-name + + +@dataclass +class BrushNetOutput(BaseOutput): + """ + The output of [`BrushNetModel`]. + + Args: + up_block_res_samples (`tuple[torch.Tensor]`): + A tuple of upsample activations at different resolutions for each upsampling block. Each tensor should + be of shape `(batch_size, channel * resolution, height //resolution, width // resolution)`. Output can be + used to condition the original UNet's upsampling activations. + down_block_res_samples (`tuple[torch.Tensor]`): + A tuple of downsample activations at different resolutions for each downsampling block. Each tensor should + be of shape `(batch_size, channel * resolution, height //resolution, width // resolution)`. Output can be + used to condition the original UNet's downsampling activations. + mid_down_block_re_sample (`torch.Tensor`): + The activation of the midde block (the lowest sample resolution). Each tensor should be of shape + `(batch_size, channel * lowest_resolution, height // lowest_resolution, width // lowest_resolution)`. + Output can be used to condition the original UNet's middle block activation. + """ + + up_block_res_samples: Tuple[torch.Tensor] + down_block_res_samples: Tuple[torch.Tensor] + mid_block_res_sample: torch.Tensor + + +class BrushNetModel(ModelMixin, ConfigMixin): + """ + A BrushNet model. + + Args: + in_channels (`int`, defaults to 4): + The number of channels in the input sample. + flip_sin_to_cos (`bool`, defaults to `True`): + Whether to flip the sin to cos in the time embedding. + freq_shift (`int`, defaults to 0): + The frequency shift to apply to the time embedding. + down_block_types (`tuple[str]`, defaults to `("CrossAttnDownBlock2D", "CrossAttnDownBlock2D", "CrossAttnDownBlock2D", "DownBlock2D")`): + The tuple of downsample blocks to use. + mid_block_type (`str`, *optional*, defaults to `"UNetMidBlock2DCrossAttn"`): + Block type for middle of UNet, it can be one of `UNetMidBlock2DCrossAttn`, `UNetMidBlock2D`, or + `UNetMidBlock2DSimpleCrossAttn`. If `None`, the mid block layer is skipped. + up_block_types (`Tuple[str]`, *optional*, defaults to `("UpBlock2D", "CrossAttnUpBlock2D", "CrossAttnUpBlock2D", "CrossAttnUpBlock2D")`): + The tuple of upsample blocks to use. + only_cross_attention (`Union[bool, Tuple[bool]]`, defaults to `False`): + block_out_channels (`tuple[int]`, defaults to `(320, 640, 1280, 1280)`): + The tuple of output channels for each block. + layers_per_block (`int`, defaults to 2): + The number of layers per block. + downsample_padding (`int`, defaults to 1): + The padding to use for the downsampling convolution. + mid_block_scale_factor (`float`, defaults to 1): + The scale factor to use for the mid block. + act_fn (`str`, defaults to "silu"): + The activation function to use. + norm_num_groups (`int`, *optional*, defaults to 32): + The number of groups to use for the normalization. If None, normalization and activation layers is skipped + in post-processing. + norm_eps (`float`, defaults to 1e-5): + The epsilon to use for the normalization. + cross_attention_dim (`int`, defaults to 1280): + The dimension of the cross attention features. + transformer_layers_per_block (`int` or `Tuple[int]`, *optional*, defaults to 1): + The number of transformer blocks of type [`~models.attention.BasicTransformerBlock`]. Only relevant for + [`~models.unet_2d_blocks.CrossAttnDownBlock2D`], [`~models.unet_2d_blocks.CrossAttnUpBlock2D`], + [`~models.unet_2d_blocks.UNetMidBlock2DCrossAttn`]. + encoder_hid_dim (`int`, *optional*, defaults to None): + If `encoder_hid_dim_type` is defined, `encoder_hidden_states` will be projected from `encoder_hid_dim` + dimension to `cross_attention_dim`. + encoder_hid_dim_type (`str`, *optional*, defaults to `None`): + If given, the `encoder_hidden_states` and potentially other embeddings are down-projected to text + embeddings of dimension `cross_attention` according to `encoder_hid_dim_type`. + attention_head_dim (`Union[int, Tuple[int]]`, defaults to 8): + The dimension of the attention heads. + use_linear_projection (`bool`, defaults to `False`): + class_embed_type (`str`, *optional*, defaults to `None`): + The type of class embedding to use which is ultimately summed with the time embeddings. Choose from None, + `"timestep"`, `"identity"`, `"projection"`, or `"simple_projection"`. + addition_embed_type (`str`, *optional*, defaults to `None`): + Configures an optional embedding which will be summed with the time embeddings. Choose from `None` or + "text". "text" will use the `TextTimeEmbedding` layer. + num_class_embeds (`int`, *optional*, defaults to 0): + Input dimension of the learnable embedding matrix to be projected to `time_embed_dim`, when performing + class conditioning with `class_embed_type` equal to `None`. + upcast_attention (`bool`, defaults to `False`): + resnet_time_scale_shift (`str`, defaults to `"default"`): + Time scale shift config for ResNet blocks (see `ResnetBlock2D`). Choose from `default` or `scale_shift`. + projection_class_embeddings_input_dim (`int`, *optional*, defaults to `None`): + The dimension of the `class_labels` input when `class_embed_type="projection"`. Required when + `class_embed_type="projection"`. + brushnet_conditioning_channel_order (`str`, defaults to `"rgb"`): + The channel order of conditional image. Will convert to `rgb` if it's `bgr`. + conditioning_embedding_out_channels (`tuple[int]`, *optional*, defaults to `(16, 32, 96, 256)`): + The tuple of output channel for each block in the `conditioning_embedding` layer. + global_pool_conditions (`bool`, defaults to `False`): + TODO(Patrick) - unused parameter. + addition_embed_type_num_heads (`int`, defaults to 64): + The number of heads to use for the `TextTimeEmbedding` layer. + """ + + _supports_gradient_checkpointing = True + + @register_to_config + def __init__( + self, + in_channels: int = 4, + conditioning_channels: int = 5, + flip_sin_to_cos: bool = True, + freq_shift: int = 0, + down_block_types: Tuple[str, ...] = ( + "DownBlock2D", + "DownBlock2D", + "DownBlock2D", + "DownBlock2D", + ), + mid_block_type: Optional[str] = "UNetMidBlock2D", + up_block_types: Tuple[str, ...] = ( + "UpBlock2D", + "UpBlock2D", + "UpBlock2D", + "UpBlock2D", + ), + only_cross_attention: Union[bool, Tuple[bool]] = False, + block_out_channels: Tuple[int, ...] = (320, 640, 1280, 1280), + layers_per_block: int = 2, + downsample_padding: int = 1, + mid_block_scale_factor: float = 1, + act_fn: str = "silu", + norm_num_groups: Optional[int] = 32, + norm_eps: float = 1e-5, + cross_attention_dim: int = 1280, + transformer_layers_per_block: Union[int, Tuple[int, ...]] = 1, + encoder_hid_dim: Optional[int] = None, + encoder_hid_dim_type: Optional[str] = None, + attention_head_dim: Union[int, Tuple[int, ...]] = 8, + num_attention_heads: Optional[Union[int, Tuple[int, ...]]] = None, + use_linear_projection: bool = False, + class_embed_type: Optional[str] = None, + addition_embed_type: Optional[str] = None, + addition_time_embed_dim: Optional[int] = None, + num_class_embeds: Optional[int] = None, + upcast_attention: bool = False, + resnet_time_scale_shift: str = "default", + projection_class_embeddings_input_dim: Optional[int] = None, + brushnet_conditioning_channel_order: str = "rgb", + conditioning_embedding_out_channels: Optional[Tuple[int, ...]] = (16, 32, 96, 256), + global_pool_conditions: bool = False, + addition_embed_type_num_heads: int = 64, + ): + super().__init__() + + # If `num_attention_heads` is not defined (which is the case for most models) + # it will default to `attention_head_dim`. This looks weird upon first reading it and it is. + # The reason for this behavior is to correct for incorrectly named variables that were introduced + # when this library was created. The incorrect naming was only discovered much later in https://github.com/huggingface/diffusers/issues/2011#issuecomment-1547958131 + # Changing `attention_head_dim` to `num_attention_heads` for 40,000+ configurations is too backwards breaking + # which is why we correct for the naming here. + num_attention_heads = num_attention_heads or attention_head_dim + + # Check inputs + if len(down_block_types) != len(up_block_types): + raise ValueError( + f"Must provide the same number of `down_block_types` as `up_block_types`. `down_block_types`: {down_block_types}. `up_block_types`: {up_block_types}." + ) + + if len(block_out_channels) != len(down_block_types): + raise ValueError( + f"Must provide the same number of `block_out_channels` as `down_block_types`. `block_out_channels`: {block_out_channels}. `down_block_types`: {down_block_types}." + ) + + if not isinstance(only_cross_attention, bool) and len(only_cross_attention) != len(down_block_types): + raise ValueError( + f"Must provide the same number of `only_cross_attention` as `down_block_types`. `only_cross_attention`: {only_cross_attention}. `down_block_types`: {down_block_types}." + ) + + if not isinstance(num_attention_heads, int) and len(num_attention_heads) != len(down_block_types): + raise ValueError( + f"Must provide the same number of `num_attention_heads` as `down_block_types`. `num_attention_heads`: {num_attention_heads}. `down_block_types`: {down_block_types}." + ) + + if isinstance(transformer_layers_per_block, int): + transformer_layers_per_block = [transformer_layers_per_block] * len(down_block_types) + + # input + conv_in_kernel = 3 + conv_in_padding = (conv_in_kernel - 1) // 2 + self.conv_in_condition = nn.Conv2d( + in_channels+conditioning_channels, block_out_channels[0], kernel_size=conv_in_kernel, padding=conv_in_padding + ) + + # time + time_embed_dim = block_out_channels[0] * 4 + self.time_proj = Timesteps(block_out_channels[0], flip_sin_to_cos, freq_shift) + timestep_input_dim = block_out_channels[0] + self.time_embedding = TimestepEmbedding( + timestep_input_dim, + time_embed_dim, + act_fn=act_fn, + ) + + if encoder_hid_dim_type is None and encoder_hid_dim is not None: + encoder_hid_dim_type = "text_proj" + self.register_to_config(encoder_hid_dim_type=encoder_hid_dim_type) + logger.info("encoder_hid_dim_type defaults to 'text_proj' as `encoder_hid_dim` is defined.") + + if encoder_hid_dim is None and encoder_hid_dim_type is not None: + raise ValueError( + f"`encoder_hid_dim` has to be defined when `encoder_hid_dim_type` is set to {encoder_hid_dim_type}." + ) + + if encoder_hid_dim_type == "text_proj": + self.encoder_hid_proj = nn.Linear(encoder_hid_dim, cross_attention_dim) + elif encoder_hid_dim_type == "text_image_proj": + # image_embed_dim DOESN'T have to be `cross_attention_dim`. To not clutter the __init__ too much + # they are set to `cross_attention_dim` here as this is exactly the required dimension for the currently only use + # case when `addition_embed_type == "text_image_proj"` (Kadinsky 2.1)` + self.encoder_hid_proj = TextImageProjection( + text_embed_dim=encoder_hid_dim, + image_embed_dim=cross_attention_dim, + cross_attention_dim=cross_attention_dim, + ) + + elif encoder_hid_dim_type is not None: + raise ValueError( + f"encoder_hid_dim_type: {encoder_hid_dim_type} must be None, 'text_proj' or 'text_image_proj'." + ) + else: + self.encoder_hid_proj = None + + # class embedding + if class_embed_type is None and num_class_embeds is not None: + self.class_embedding = nn.Embedding(num_class_embeds, time_embed_dim) + elif class_embed_type == "timestep": + self.class_embedding = TimestepEmbedding(timestep_input_dim, time_embed_dim) + elif class_embed_type == "identity": + self.class_embedding = nn.Identity(time_embed_dim, time_embed_dim) + elif class_embed_type == "projection": + if projection_class_embeddings_input_dim is None: + raise ValueError( + "`class_embed_type`: 'projection' requires `projection_class_embeddings_input_dim` be set" + ) + # The projection `class_embed_type` is the same as the timestep `class_embed_type` except + # 1. the `class_labels` inputs are not first converted to sinusoidal embeddings + # 2. it projects from an arbitrary input dimension. + # + # Note that `TimestepEmbedding` is quite general, being mainly linear layers and activations. + # When used for embedding actual timesteps, the timesteps are first converted to sinusoidal embeddings. + # As a result, `TimestepEmbedding` can be passed arbitrary vectors. + self.class_embedding = TimestepEmbedding(projection_class_embeddings_input_dim, time_embed_dim) + else: + self.class_embedding = None + + if addition_embed_type == "text": + if encoder_hid_dim is not None: + text_time_embedding_from_dim = encoder_hid_dim + else: + text_time_embedding_from_dim = cross_attention_dim + + self.add_embedding = TextTimeEmbedding( + text_time_embedding_from_dim, time_embed_dim, num_heads=addition_embed_type_num_heads + ) + elif addition_embed_type == "text_image": + # text_embed_dim and image_embed_dim DON'T have to be `cross_attention_dim`. To not clutter the __init__ too much + # they are set to `cross_attention_dim` here as this is exactly the required dimension for the currently only use + # case when `addition_embed_type == "text_image"` (Kadinsky 2.1)` + self.add_embedding = TextImageTimeEmbedding( + text_embed_dim=cross_attention_dim, image_embed_dim=cross_attention_dim, time_embed_dim=time_embed_dim + ) + elif addition_embed_type == "text_time": + self.add_time_proj = Timesteps(addition_time_embed_dim, flip_sin_to_cos, freq_shift) + self.add_embedding = TimestepEmbedding(projection_class_embeddings_input_dim, time_embed_dim) + + elif addition_embed_type is not None: + raise ValueError(f"addition_embed_type: {addition_embed_type} must be None, 'text' or 'text_image'.") + + self.down_blocks = nn.ModuleList([]) + self.brushnet_down_blocks = nn.ModuleList([]) + + if isinstance(only_cross_attention, bool): + only_cross_attention = [only_cross_attention] * len(down_block_types) + + if isinstance(attention_head_dim, int): + attention_head_dim = (attention_head_dim,) * len(down_block_types) + + if isinstance(num_attention_heads, int): + num_attention_heads = (num_attention_heads,) * len(down_block_types) + + # down + output_channel = block_out_channels[0] + + brushnet_block = nn.Conv2d(output_channel, output_channel, kernel_size=1) + brushnet_block = zero_module(brushnet_block) + self.brushnet_down_blocks.append(brushnet_block) + + for i, down_block_type in enumerate(down_block_types): + input_channel = output_channel + output_channel = block_out_channels[i] + is_final_block = i == len(block_out_channels) - 1 + + down_block = get_down_block( + down_block_type, + num_layers=layers_per_block, + transformer_layers_per_block=transformer_layers_per_block[i], + in_channels=input_channel, + out_channels=output_channel, + temb_channels=time_embed_dim, + add_downsample=not is_final_block, + resnet_eps=norm_eps, + resnet_act_fn=act_fn, + resnet_groups=norm_num_groups, + cross_attention_dim=cross_attention_dim, + num_attention_heads=num_attention_heads[i], + attention_head_dim=attention_head_dim[i] if attention_head_dim[i] is not None else output_channel, + downsample_padding=downsample_padding, + use_linear_projection=use_linear_projection, + only_cross_attention=only_cross_attention[i], + upcast_attention=upcast_attention, + resnet_time_scale_shift=resnet_time_scale_shift, + ) + self.down_blocks.append(down_block) + + for _ in range(layers_per_block): + brushnet_block = nn.Conv2d(output_channel, output_channel, kernel_size=1) + brushnet_block = zero_module(brushnet_block) + self.brushnet_down_blocks.append(brushnet_block) + + if not is_final_block: + brushnet_block = nn.Conv2d(output_channel, output_channel, kernel_size=1) + brushnet_block = zero_module(brushnet_block) + self.brushnet_down_blocks.append(brushnet_block) + + # mid + mid_block_channel = block_out_channels[-1] + + brushnet_block = nn.Conv2d(mid_block_channel, mid_block_channel, kernel_size=1) + brushnet_block = zero_module(brushnet_block) + self.brushnet_mid_block = brushnet_block + + self.mid_block = get_mid_block( + mid_block_type, + transformer_layers_per_block=transformer_layers_per_block[-1], + in_channels=mid_block_channel, + temb_channels=time_embed_dim, + resnet_eps=norm_eps, + resnet_act_fn=act_fn, + output_scale_factor=mid_block_scale_factor, + resnet_time_scale_shift=resnet_time_scale_shift, + cross_attention_dim=cross_attention_dim, + num_attention_heads=num_attention_heads[-1], + resnet_groups=norm_num_groups, + use_linear_projection=use_linear_projection, + upcast_attention=upcast_attention, + ) + + # count how many layers upsample the images + self.num_upsamplers = 0 + + # up + reversed_block_out_channels = list(reversed(block_out_channels)) + reversed_num_attention_heads = list(reversed(num_attention_heads)) + reversed_transformer_layers_per_block = (list(reversed(transformer_layers_per_block))) + only_cross_attention = list(reversed(only_cross_attention)) + + output_channel = reversed_block_out_channels[0] + + self.up_blocks = nn.ModuleList([]) + self.brushnet_up_blocks = nn.ModuleList([]) + + for i, up_block_type in enumerate(up_block_types): + is_final_block = i == len(block_out_channels) - 1 + + prev_output_channel = output_channel + output_channel = reversed_block_out_channels[i] + input_channel = reversed_block_out_channels[min(i + 1, len(block_out_channels) - 1)] + + # add upsample block for all BUT final layer + if not is_final_block: + add_upsample = True + self.num_upsamplers += 1 + else: + add_upsample = False + + up_block = get_up_block( + up_block_type, + num_layers=layers_per_block+1, + transformer_layers_per_block=reversed_transformer_layers_per_block[i], + in_channels=input_channel, + out_channels=output_channel, + prev_output_channel=prev_output_channel, + temb_channels=time_embed_dim, + add_upsample=add_upsample, + resnet_eps=norm_eps, + resnet_act_fn=act_fn, + resolution_idx=i, + resnet_groups=norm_num_groups, + cross_attention_dim=cross_attention_dim, + num_attention_heads=reversed_num_attention_heads[i], + use_linear_projection=use_linear_projection, + only_cross_attention=only_cross_attention[i], + upcast_attention=upcast_attention, + resnet_time_scale_shift=resnet_time_scale_shift, + attention_head_dim=attention_head_dim[i] if attention_head_dim[i] is not None else output_channel, + ) + self.up_blocks.append(up_block) + prev_output_channel = output_channel + + for _ in range(layers_per_block+1): + brushnet_block = nn.Conv2d(output_channel, output_channel, kernel_size=1) + brushnet_block = zero_module(brushnet_block) + self.brushnet_up_blocks.append(brushnet_block) + + if not is_final_block: + brushnet_block = nn.Conv2d(output_channel, output_channel, kernel_size=1) + brushnet_block = zero_module(brushnet_block) + self.brushnet_up_blocks.append(brushnet_block) + + + @classmethod + def from_unet( + cls, + unet: UNet2DConditionModel, + brushnet_conditioning_channel_order: str = "rgb", + conditioning_embedding_out_channels: Optional[Tuple[int, ...]] = (16, 32, 96, 256), + load_weights_from_unet: bool = True, + conditioning_channels: int = 5, + ): + r""" + Instantiate a [`BrushNetModel`] from [`UNet2DConditionModel`]. + + Parameters: + unet (`UNet2DConditionModel`): + The UNet model weights to copy to the [`BrushNetModel`]. All configuration options are also copied + where applicable. + """ + transformer_layers_per_block = ( + unet.config.transformer_layers_per_block if "transformer_layers_per_block" in unet.config else 1 + ) + encoder_hid_dim = unet.config.encoder_hid_dim if "encoder_hid_dim" in unet.config else None + encoder_hid_dim_type = unet.config.encoder_hid_dim_type if "encoder_hid_dim_type" in unet.config else None + addition_embed_type = unet.config.addition_embed_type if "addition_embed_type" in unet.config else None + addition_time_embed_dim = ( + unet.config.addition_time_embed_dim if "addition_time_embed_dim" in unet.config else None + ) + + brushnet = cls( + in_channels=unet.config.in_channels, + conditioning_channels=conditioning_channels, + flip_sin_to_cos=unet.config.flip_sin_to_cos, + freq_shift=unet.config.freq_shift, + down_block_types=["DownBlock2D" for block_name in unet.config.down_block_types], + mid_block_type='MidBlock2D', + up_block_types=["UpBlock2D" for block_name in unet.config.down_block_types], + only_cross_attention=unet.config.only_cross_attention, + block_out_channels=unet.config.block_out_channels, + layers_per_block=unet.config.layers_per_block, + downsample_padding=unet.config.downsample_padding, + mid_block_scale_factor=unet.config.mid_block_scale_factor, + act_fn=unet.config.act_fn, + norm_num_groups=unet.config.norm_num_groups, + norm_eps=unet.config.norm_eps, + cross_attention_dim=unet.config.cross_attention_dim, + transformer_layers_per_block=transformer_layers_per_block, + encoder_hid_dim=encoder_hid_dim, + encoder_hid_dim_type=encoder_hid_dim_type, + attention_head_dim=unet.config.attention_head_dim, + num_attention_heads=unet.config.num_attention_heads, + use_linear_projection=unet.config.use_linear_projection, + class_embed_type=unet.config.class_embed_type, + addition_embed_type=addition_embed_type, + addition_time_embed_dim=addition_time_embed_dim, + num_class_embeds=unet.config.num_class_embeds, + upcast_attention=unet.config.upcast_attention, + resnet_time_scale_shift=unet.config.resnet_time_scale_shift, + projection_class_embeddings_input_dim=unet.config.projection_class_embeddings_input_dim, + brushnet_conditioning_channel_order=brushnet_conditioning_channel_order, + conditioning_embedding_out_channels=conditioning_embedding_out_channels, + ) + + if load_weights_from_unet: + conv_in_condition_weight=torch.zeros_like(brushnet.conv_in_condition.weight) + conv_in_condition_weight[:,:4,...]=unet.conv_in.weight + conv_in_condition_weight[:,4:8,...]=unet.conv_in.weight + brushnet.conv_in_condition.weight=torch.nn.Parameter(conv_in_condition_weight) + brushnet.conv_in_condition.bias=unet.conv_in.bias + + brushnet.time_proj.load_state_dict(unet.time_proj.state_dict()) + brushnet.time_embedding.load_state_dict(unet.time_embedding.state_dict()) + + if brushnet.class_embedding: + brushnet.class_embedding.load_state_dict(unet.class_embedding.state_dict()) + + brushnet.down_blocks.load_state_dict(unet.down_blocks.state_dict(),strict=False) + brushnet.mid_block.load_state_dict(unet.mid_block.state_dict(),strict=False) + brushnet.up_blocks.load_state_dict(unet.up_blocks.state_dict(),strict=False) + + return brushnet + + @property + # Copied from diffusers.models.unets.unet_2d_condition.UNet2DConditionModel.attn_processors + def attn_processors(self) -> Dict[str, AttentionProcessor]: + r""" + Returns: + `dict` of attention processors: A dictionary containing all attention processors used in the model with + indexed by its weight name. + """ + # set recursively + processors = {} + + def fn_recursive_add_processors(name: str, module: torch.nn.Module, processors: Dict[str, AttentionProcessor]): + if hasattr(module, "get_processor"): + processors[f"{name}.processor"] = module.get_processor(return_deprecated_lora=True) + + for sub_name, child in module.named_children(): + fn_recursive_add_processors(f"{name}.{sub_name}", child, processors) + + return processors + + for name, module in self.named_children(): + fn_recursive_add_processors(name, module, processors) + + return processors + + # Copied from diffusers.models.unets.unet_2d_condition.UNet2DConditionModel.set_attn_processor + def set_attn_processor(self, processor: Union[AttentionProcessor, Dict[str, AttentionProcessor]]): + r""" + Sets the attention processor to use to compute attention. + + Parameters: + processor (`dict` of `AttentionProcessor` or only `AttentionProcessor`): + The instantiated processor class or a dictionary of processor classes that will be set as the processor + for **all** `Attention` layers. + + If `processor` is a dict, the key needs to define the path to the corresponding cross attention + processor. This is strongly recommended when setting trainable attention processors. + + """ + count = len(self.attn_processors.keys()) + + if isinstance(processor, dict) and len(processor) != count: + raise ValueError( + f"A dict of processors was passed, but the number of processors {len(processor)} does not match the" + f" number of attention layers: {count}. Please make sure to pass {count} processor classes." + ) + + def fn_recursive_attn_processor(name: str, module: torch.nn.Module, processor): + if hasattr(module, "set_processor"): + if not isinstance(processor, dict): + module.set_processor(processor) + else: + module.set_processor(processor.pop(f"{name}.processor")) + + for sub_name, child in module.named_children(): + fn_recursive_attn_processor(f"{name}.{sub_name}", child, processor) + + for name, module in self.named_children(): + fn_recursive_attn_processor(name, module, processor) + + # Copied from diffusers.models.unets.unet_2d_condition.UNet2DConditionModel.set_default_attn_processor + def set_default_attn_processor(self): + """ + Disables custom attention processors and sets the default attention implementation. + """ + if all(proc.__class__ in ADDED_KV_ATTENTION_PROCESSORS for proc in self.attn_processors.values()): + processor = AttnAddedKVProcessor() + elif all(proc.__class__ in CROSS_ATTENTION_PROCESSORS for proc in self.attn_processors.values()): + processor = AttnProcessor() + else: + raise ValueError( + f"Cannot call `set_default_attn_processor` when attention processors are of type {next(iter(self.attn_processors.values()))}" + ) + + self.set_attn_processor(processor) + + # Copied from diffusers.models.unets.unet_2d_condition.UNet2DConditionModel.set_attention_slice + def set_attention_slice(self, slice_size: Union[str, int, List[int]]) -> None: + r""" + Enable sliced attention computation. + + When this option is enabled, the attention module splits the input tensor in slices to compute attention in + several steps. This is useful for saving some memory in exchange for a small decrease in speed. + + Args: + slice_size (`str` or `int` or `list(int)`, *optional*, defaults to `"auto"`): + When `"auto"`, input to the attention heads is halved, so attention is computed in two steps. If + `"max"`, maximum amount of memory is saved by running only one slice at a time. If a number is + provided, uses as many slices as `attention_head_dim // slice_size`. In this case, `attention_head_dim` + must be a multiple of `slice_size`. + """ + sliceable_head_dims = [] + + def fn_recursive_retrieve_sliceable_dims(module: torch.nn.Module): + if hasattr(module, "set_attention_slice"): + sliceable_head_dims.append(module.sliceable_head_dim) + + for child in module.children(): + fn_recursive_retrieve_sliceable_dims(child) + + # retrieve number of attention layers + for module in self.children(): + fn_recursive_retrieve_sliceable_dims(module) + + num_sliceable_layers = len(sliceable_head_dims) + + if slice_size == "auto": + # half the attention head size is usually a good trade-off between + # speed and memory + slice_size = [dim // 2 for dim in sliceable_head_dims] + elif slice_size == "max": + # make smallest slice possible + slice_size = num_sliceable_layers * [1] + + slice_size = num_sliceable_layers * [slice_size] if not isinstance(slice_size, list) else slice_size + + if len(slice_size) != len(sliceable_head_dims): + raise ValueError( + f"You have provided {len(slice_size)}, but {self.config} has {len(sliceable_head_dims)} different" + f" attention layers. Make sure to match `len(slice_size)` to be {len(sliceable_head_dims)}." + ) + + for i in range(len(slice_size)): + size = slice_size[i] + dim = sliceable_head_dims[i] + if size is not None and size > dim: + raise ValueError(f"size {size} has to be smaller or equal to {dim}.") + + # Recursively walk through all the children. + # Any children which exposes the set_attention_slice method + # gets the message + def fn_recursive_set_attention_slice(module: torch.nn.Module, slice_size: List[int]): + if hasattr(module, "set_attention_slice"): + module.set_attention_slice(slice_size.pop()) + + for child in module.children(): + fn_recursive_set_attention_slice(child, slice_size) + + reversed_slice_size = list(reversed(slice_size)) + for module in self.children(): + fn_recursive_set_attention_slice(module, reversed_slice_size) + + def _set_gradient_checkpointing(self, module, value: bool = False) -> None: + if isinstance(module, (CrossAttnDownBlock2D, DownBlock2D)): + module.gradient_checkpointing = value + + def forward( + self, + sample: torch.FloatTensor, + encoder_hidden_states: torch.Tensor, + brushnet_cond: torch.FloatTensor, + timestep = None, + time_emb = None, + conditioning_scale: float = 1.0, + class_labels: Optional[torch.Tensor] = None, + timestep_cond: Optional[torch.Tensor] = None, + attention_mask: Optional[torch.Tensor] = None, + added_cond_kwargs: Optional[Dict[str, torch.Tensor]] = None, + cross_attention_kwargs: Optional[Dict[str, Any]] = None, + guess_mode: bool = False, + return_dict: bool = True, + ) -> Union[BrushNetOutput, Tuple[Tuple[torch.FloatTensor, ...], torch.FloatTensor]]: + """ + The [`BrushNetModel`] forward method. + + Args: + sample (`torch.FloatTensor`): + The noisy input tensor. + timestep (`Union[torch.Tensor, float, int]`): + The number of timesteps to denoise an input. + encoder_hidden_states (`torch.Tensor`): + The encoder hidden states. + brushnet_cond (`torch.FloatTensor`): + The conditional input tensor of shape `(batch_size, sequence_length, hidden_size)`. + conditioning_scale (`float`, defaults to `1.0`): + The scale factor for BrushNet outputs. + class_labels (`torch.Tensor`, *optional*, defaults to `None`): + Optional class labels for conditioning. Their embeddings will be summed with the timestep embeddings. + timestep_cond (`torch.Tensor`, *optional*, defaults to `None`): + Additional conditional embeddings for timestep. If provided, the embeddings will be summed with the + timestep_embedding passed through the `self.time_embedding` layer to obtain the final timestep + embeddings. + attention_mask (`torch.Tensor`, *optional*, defaults to `None`): + An attention mask of shape `(batch, key_tokens)` is applied to `encoder_hidden_states`. If `1` the mask + is kept, otherwise if `0` it is discarded. Mask will be converted into a bias, which adds large + negative values to the attention scores corresponding to "discard" tokens. + added_cond_kwargs (`dict`): + Additional conditions for the Stable Diffusion XL UNet. + cross_attention_kwargs (`dict[str]`, *optional*, defaults to `None`): + A kwargs dictionary that if specified is passed along to the `AttnProcessor`. + guess_mode (`bool`, defaults to `False`): + In this mode, the BrushNet encoder tries its best to recognize the input content of the input even if + you remove all prompts. A `guidance_scale` between 3.0 and 5.0 is recommended. + return_dict (`bool`, defaults to `True`): + Whether or not to return a [`~models.brushnet.BrushNetOutput`] instead of a plain tuple. + + Returns: + [`~models.brushnet.BrushNetOutput`] **or** `tuple`: + If `return_dict` is `True`, a [`~models.brushnet.BrushNetOutput`] is returned, otherwise a tuple is + returned where the first element is the sample tensor. + """ + + # check channel order + channel_order = self.config.brushnet_conditioning_channel_order + + if channel_order == "rgb": + # in rgb order by default + ... + elif channel_order == "bgr": + brushnet_cond = torch.flip(brushnet_cond, dims=[1]) + else: + raise ValueError(f"unknown `brushnet_conditioning_channel_order`: {channel_order}") + + # prepare attention_mask + if attention_mask is not None: + attention_mask = (1 - attention_mask.to(sample.dtype)) * -10000.0 + attention_mask = attention_mask.unsqueeze(1) + + if timestep is None and time_emb is None: + raise ValueError(f"`timestep` and `emb` are both None") + + #print("BN: sample.device", sample.device) + #print("BN: TE.device", self.time_embedding.linear_1.weight.device) + + if timestep is not None: + # 1. time + timesteps = timestep + if not torch.is_tensor(timesteps): + # TODO: this requires sync between CPU and GPU. So try to pass timesteps as tensors if you can + # This would be a good case for the `match` statement (Python 3.10+) + is_mps = sample.device.type == "mps" + if isinstance(timestep, float): + dtype = torch.float32 if is_mps else torch.float64 + else: + dtype = torch.int32 if is_mps else torch.int64 + timesteps = torch.tensor([timesteps], dtype=dtype, device=sample.device) + elif len(timesteps.shape) == 0: + timesteps = timesteps[None].to(sample.device) + + # broadcast to batch dimension in a way that's compatible with ONNX/Core ML + timesteps = timesteps.expand(sample.shape[0]) + + t_emb = self.time_proj(timesteps) + + # timesteps does not contain any weights and will always return f32 tensors + # but time_embedding might actually be running in fp16. so we need to cast here. + # there might be better ways to encapsulate this. + t_emb = t_emb.to(dtype=sample.dtype) + + #print("t_emb.device =",t_emb.device) + + emb = self.time_embedding(t_emb, timestep_cond) + aug_emb = None + + #print('emb.shape', emb.shape) + + if self.class_embedding is not None: + if class_labels is None: + raise ValueError("class_labels should be provided when num_class_embeds > 0") + + if self.config.class_embed_type == "timestep": + class_labels = self.time_proj(class_labels) + + class_emb = self.class_embedding(class_labels).to(dtype=self.dtype) + emb = emb + class_emb + + if self.config.addition_embed_type is not None: + if self.config.addition_embed_type == "text": + aug_emb = self.add_embedding(encoder_hidden_states) + + elif self.config.addition_embed_type == "text_time": + if "text_embeds" not in added_cond_kwargs: + raise ValueError( + f"{self.__class__} has the config param `addition_embed_type` set to 'text_time' which requires the keyword argument `text_embeds` to be passed in `added_cond_kwargs`" + ) + text_embeds = added_cond_kwargs.get("text_embeds") + if "time_ids" not in added_cond_kwargs: + raise ValueError( + f"{self.__class__} has the config param `addition_embed_type` set to 'text_time' which requires the keyword argument `time_ids` to be passed in `added_cond_kwargs`" + ) + time_ids = added_cond_kwargs.get("time_ids") + time_embeds = self.add_time_proj(time_ids.flatten()) + time_embeds = time_embeds.reshape((text_embeds.shape[0], -1)) + + add_embeds = torch.concat([text_embeds, time_embeds], dim=-1) + add_embeds = add_embeds.to(emb.dtype) + aug_emb = self.add_embedding(add_embeds) + + #print('text_embeds', text_embeds.shape, 'time_ids', time_ids.shape, 'time_embeds', time_embeds.shape, 'add__embeds', add_embeds.shape, 'aug_emb', aug_emb.shape) + + emb = emb + aug_emb if aug_emb is not None else emb + else: + emb = time_emb + + # 2. pre-process + + brushnet_cond=torch.concat([sample,brushnet_cond],1) + sample = self.conv_in_condition(brushnet_cond) + + # 3. down + down_block_res_samples = (sample,) + for downsample_block in self.down_blocks: + if hasattr(downsample_block, "has_cross_attention") and downsample_block.has_cross_attention: + sample, res_samples = downsample_block( + hidden_states=sample, + temb=emb, + encoder_hidden_states=encoder_hidden_states, + attention_mask=attention_mask, + cross_attention_kwargs=cross_attention_kwargs, + ) + else: + sample, res_samples = downsample_block(hidden_states=sample, temb=emb) + + down_block_res_samples += res_samples + + # 4. PaintingNet down blocks + brushnet_down_block_res_samples = () + for down_block_res_sample, brushnet_down_block in zip(down_block_res_samples, self.brushnet_down_blocks): + down_block_res_sample = brushnet_down_block(down_block_res_sample) + brushnet_down_block_res_samples = brushnet_down_block_res_samples + (down_block_res_sample,) + + + # 5. mid + if self.mid_block is not None: + if hasattr(self.mid_block, "has_cross_attention") and self.mid_block.has_cross_attention: + sample = self.mid_block( + sample, + emb, + encoder_hidden_states=encoder_hidden_states, + attention_mask=attention_mask, + cross_attention_kwargs=cross_attention_kwargs, + ) + else: + sample = self.mid_block(sample, emb) + + # 6. BrushNet mid blocks + brushnet_mid_block_res_sample = self.brushnet_mid_block(sample) + + # 7. up + up_block_res_samples = () + for i, upsample_block in enumerate(self.up_blocks): + is_final_block = i == len(self.up_blocks) - 1 + + res_samples = down_block_res_samples[-len(upsample_block.resnets) :] + down_block_res_samples = down_block_res_samples[: -len(upsample_block.resnets)] + + # if we have not reached the final block and need to forward the + # upsample size, we do it here + if not is_final_block: + upsample_size = down_block_res_samples[-1].shape[2:] + + if hasattr(upsample_block, "has_cross_attention") and upsample_block.has_cross_attention: + sample, up_res_samples = upsample_block( + hidden_states=sample, + temb=emb, + res_hidden_states_tuple=res_samples, + encoder_hidden_states=encoder_hidden_states, + cross_attention_kwargs=cross_attention_kwargs, + upsample_size=upsample_size, + attention_mask=attention_mask, + return_res_samples=True + ) + else: + sample, up_res_samples = upsample_block( + hidden_states=sample, + temb=emb, + res_hidden_states_tuple=res_samples, + upsample_size=upsample_size, + return_res_samples=True + ) + + up_block_res_samples += up_res_samples + + # 8. BrushNet up blocks + brushnet_up_block_res_samples = () + for up_block_res_sample, brushnet_up_block in zip(up_block_res_samples, self.brushnet_up_blocks): + up_block_res_sample = brushnet_up_block(up_block_res_sample) + brushnet_up_block_res_samples = brushnet_up_block_res_samples + (up_block_res_sample,) + + # 6. scaling + if guess_mode and not self.config.global_pool_conditions: + scales = torch.logspace(-1, 0, len(brushnet_down_block_res_samples) + 1 + len(brushnet_up_block_res_samples), device=sample.device) # 0.1 to 1.0 + scales = scales * conditioning_scale + + brushnet_down_block_res_samples = [sample * scale for sample, scale in zip(brushnet_down_block_res_samples, scales[:len(brushnet_down_block_res_samples)])] + brushnet_mid_block_res_sample = brushnet_mid_block_res_sample * scales[len(brushnet_down_block_res_samples)] + brushnet_up_block_res_samples = [sample * scale for sample, scale in zip(brushnet_up_block_res_samples, scales[len(brushnet_down_block_res_samples)+1:])] + else: + brushnet_down_block_res_samples = [sample * conditioning_scale for sample in brushnet_down_block_res_samples] + brushnet_mid_block_res_sample = brushnet_mid_block_res_sample * conditioning_scale + brushnet_up_block_res_samples = [sample * conditioning_scale for sample in brushnet_up_block_res_samples] + + + if self.config.global_pool_conditions: + brushnet_down_block_res_samples = [ + torch.mean(sample, dim=(2, 3), keepdim=True) for sample in brushnet_down_block_res_samples + ] + brushnet_mid_block_res_sample = torch.mean(brushnet_mid_block_res_sample, dim=(2, 3), keepdim=True) + brushnet_up_block_res_samples = [ + torch.mean(sample, dim=(2, 3), keepdim=True) for sample in brushnet_up_block_res_samples + ] + + if not return_dict: + return (brushnet_down_block_res_samples, brushnet_mid_block_res_sample, brushnet_up_block_res_samples) + + return BrushNetOutput( + down_block_res_samples=brushnet_down_block_res_samples, + mid_block_res_sample=brushnet_mid_block_res_sample, + up_block_res_samples=brushnet_up_block_res_samples + ) + + +def zero_module(module): + for p in module.parameters(): + nn.init.zeros_(p) + return module diff --git a/ComfyUI-BrushNet/brushnet/brushnet_ca.py b/ComfyUI-BrushNet/brushnet/brushnet_ca.py new file mode 100644 index 0000000000000000000000000000000000000000..10366de57477ac8beb80ce31001fba8088982c11 --- /dev/null +++ b/ComfyUI-BrushNet/brushnet/brushnet_ca.py @@ -0,0 +1,956 @@ +from dataclasses import dataclass +from typing import Any, Dict, List, Optional, Tuple, Union + +import torch +from torch import nn + +from diffusers.configuration_utils import ConfigMixin, register_to_config +from diffusers.utils import BaseOutput, logging +from diffusers.models.attention_processor import ( + ADDED_KV_ATTENTION_PROCESSORS, + CROSS_ATTENTION_PROCESSORS, + AttentionProcessor, + AttnAddedKVProcessor, + AttnProcessor, +) +from diffusers.models.embeddings import TextImageProjection, TextImageTimeEmbedding, TextTimeEmbedding, TimestepEmbedding, Timesteps +from diffusers.models.modeling_utils import ModelMixin + +from .unet_2d_blocks import ( + CrossAttnDownBlock2D, + DownBlock2D, + UNetMidBlock2D, + UNetMidBlock2DCrossAttn, + get_down_block, + get_mid_block, + get_up_block, + MidBlock2D +) + +from .unet_2d_condition import UNet2DConditionModel + + +logger = logging.get_logger(__name__) # pylint: disable=invalid-name + + +@dataclass +class BrushNetOutput(BaseOutput): + """ + The output of [`BrushNetModel`]. + + Args: + up_block_res_samples (`tuple[torch.Tensor]`): + A tuple of upsample activations at different resolutions for each upsampling block. Each tensor should + be of shape `(batch_size, channel * resolution, height //resolution, width // resolution)`. Output can be + used to condition the original UNet's upsampling activations. + down_block_res_samples (`tuple[torch.Tensor]`): + A tuple of downsample activations at different resolutions for each downsampling block. Each tensor should + be of shape `(batch_size, channel * resolution, height //resolution, width // resolution)`. Output can be + used to condition the original UNet's downsampling activations. + mid_down_block_re_sample (`torch.Tensor`): + The activation of the midde block (the lowest sample resolution). Each tensor should be of shape + `(batch_size, channel * lowest_resolution, height // lowest_resolution, width // lowest_resolution)`. + Output can be used to condition the original UNet's middle block activation. + """ + + up_block_res_samples: Tuple[torch.Tensor] + down_block_res_samples: Tuple[torch.Tensor] + mid_block_res_sample: torch.Tensor + + +class BrushNetModel(ModelMixin, ConfigMixin): + """ + A BrushNet model. + + Args: + in_channels (`int`, defaults to 4): + The number of channels in the input sample. + flip_sin_to_cos (`bool`, defaults to `True`): + Whether to flip the sin to cos in the time embedding. + freq_shift (`int`, defaults to 0): + The frequency shift to apply to the time embedding. + down_block_types (`tuple[str]`, defaults to `("CrossAttnDownBlock2D", "CrossAttnDownBlock2D", "CrossAttnDownBlock2D", "DownBlock2D")`): + The tuple of downsample blocks to use. + mid_block_type (`str`, *optional*, defaults to `"UNetMidBlock2DCrossAttn"`): + Block type for middle of UNet, it can be one of `UNetMidBlock2DCrossAttn`, `UNetMidBlock2D`, or + `UNetMidBlock2DSimpleCrossAttn`. If `None`, the mid block layer is skipped. + up_block_types (`Tuple[str]`, *optional*, defaults to `("UpBlock2D", "CrossAttnUpBlock2D", "CrossAttnUpBlock2D", "CrossAttnUpBlock2D")`): + The tuple of upsample blocks to use. + only_cross_attention (`Union[bool, Tuple[bool]]`, defaults to `False`): + block_out_channels (`tuple[int]`, defaults to `(320, 640, 1280, 1280)`): + The tuple of output channels for each block. + layers_per_block (`int`, defaults to 2): + The number of layers per block. + downsample_padding (`int`, defaults to 1): + The padding to use for the downsampling convolution. + mid_block_scale_factor (`float`, defaults to 1): + The scale factor to use for the mid block. + act_fn (`str`, defaults to "silu"): + The activation function to use. + norm_num_groups (`int`, *optional*, defaults to 32): + The number of groups to use for the normalization. If None, normalization and activation layers is skipped + in post-processing. + norm_eps (`float`, defaults to 1e-5): + The epsilon to use for the normalization. + cross_attention_dim (`int`, defaults to 1280): + The dimension of the cross attention features. + transformer_layers_per_block (`int` or `Tuple[int]`, *optional*, defaults to 1): + The number of transformer blocks of type [`~models.attention.BasicTransformerBlock`]. Only relevant for + [`~models.unet_2d_blocks.CrossAttnDownBlock2D`], [`~models.unet_2d_blocks.CrossAttnUpBlock2D`], + [`~models.unet_2d_blocks.UNetMidBlock2DCrossAttn`]. + encoder_hid_dim (`int`, *optional*, defaults to None): + If `encoder_hid_dim_type` is defined, `encoder_hidden_states` will be projected from `encoder_hid_dim` + dimension to `cross_attention_dim`. + encoder_hid_dim_type (`str`, *optional*, defaults to `None`): + If given, the `encoder_hidden_states` and potentially other embeddings are down-projected to text + embeddings of dimension `cross_attention` according to `encoder_hid_dim_type`. + attention_head_dim (`Union[int, Tuple[int]]`, defaults to 8): + The dimension of the attention heads. + use_linear_projection (`bool`, defaults to `False`): + class_embed_type (`str`, *optional*, defaults to `None`): + The type of class embedding to use which is ultimately summed with the time embeddings. Choose from None, + `"timestep"`, `"identity"`, `"projection"`, or `"simple_projection"`. + addition_embed_type (`str`, *optional*, defaults to `None`): + Configures an optional embedding which will be summed with the time embeddings. Choose from `None` or + "text". "text" will use the `TextTimeEmbedding` layer. + num_class_embeds (`int`, *optional*, defaults to 0): + Input dimension of the learnable embedding matrix to be projected to `time_embed_dim`, when performing + class conditioning with `class_embed_type` equal to `None`. + upcast_attention (`bool`, defaults to `False`): + resnet_time_scale_shift (`str`, defaults to `"default"`): + Time scale shift config for ResNet blocks (see `ResnetBlock2D`). Choose from `default` or `scale_shift`. + projection_class_embeddings_input_dim (`int`, *optional*, defaults to `None`): + The dimension of the `class_labels` input when `class_embed_type="projection"`. Required when + `class_embed_type="projection"`. + brushnet_conditioning_channel_order (`str`, defaults to `"rgb"`): + The channel order of conditional image. Will convert to `rgb` if it's `bgr`. + conditioning_embedding_out_channels (`tuple[int]`, *optional*, defaults to `(16, 32, 96, 256)`): + The tuple of output channel for each block in the `conditioning_embedding` layer. + global_pool_conditions (`bool`, defaults to `False`): + TODO(Patrick) - unused parameter. + addition_embed_type_num_heads (`int`, defaults to 64): + The number of heads to use for the `TextTimeEmbedding` layer. + """ + + _supports_gradient_checkpointing = True + + @register_to_config + def __init__( + self, + in_channels: int = 4, + conditioning_channels: int = 5, + flip_sin_to_cos: bool = True, + freq_shift: int = 0, + down_block_types: Tuple[str, ...] = ( + "CrossAttnDownBlock2D", + "CrossAttnDownBlock2D", + "CrossAttnDownBlock2D", + "DownBlock2D", + ), + mid_block_type: Optional[str] = "UNetMidBlock2DCrossAttn", + up_block_types: Tuple[str, ...] = ( + "UpBlock2D", + "CrossAttnUpBlock2D", + "CrossAttnUpBlock2D", + "CrossAttnUpBlock2D", + ), + only_cross_attention: Union[bool, Tuple[bool]] = False, + block_out_channels: Tuple[int, ...] = (320, 640, 1280, 1280), + layers_per_block: int = 2, + downsample_padding: int = 1, + mid_block_scale_factor: float = 1, + act_fn: str = "silu", + norm_num_groups: Optional[int] = 32, + norm_eps: float = 1e-5, + cross_attention_dim: int = 1280, + transformer_layers_per_block: Union[int, Tuple[int, ...]] = 1, + encoder_hid_dim: Optional[int] = None, + encoder_hid_dim_type: Optional[str] = None, + attention_head_dim: Union[int, Tuple[int, ...]] = 8, + num_attention_heads: Optional[Union[int, Tuple[int, ...]]] = None, + use_linear_projection: bool = False, + class_embed_type: Optional[str] = None, + addition_embed_type: Optional[str] = None, + addition_time_embed_dim: Optional[int] = None, + num_class_embeds: Optional[int] = None, + upcast_attention: bool = False, + resnet_time_scale_shift: str = "default", + projection_class_embeddings_input_dim: Optional[int] = None, + brushnet_conditioning_channel_order: str = "rgb", + conditioning_embedding_out_channels: Optional[Tuple[int, ...]] = (16, 32, 96, 256), + global_pool_conditions: bool = False, + addition_embed_type_num_heads: int = 64, + ): + super().__init__() + + # If `num_attention_heads` is not defined (which is the case for most models) + # it will default to `attention_head_dim`. This looks weird upon first reading it and it is. + # The reason for this behavior is to correct for incorrectly named variables that were introduced + # when this library was created. The incorrect naming was only discovered much later in https://github.com/huggingface/diffusers/issues/2011#issuecomment-1547958131 + # Changing `attention_head_dim` to `num_attention_heads` for 40,000+ configurations is too backwards breaking + # which is why we correct for the naming here. + num_attention_heads = num_attention_heads or attention_head_dim + + # Check inputs + if len(down_block_types) != len(up_block_types): + raise ValueError( + f"Must provide the same number of `down_block_types` as `up_block_types`. `down_block_types`: {down_block_types}. `up_block_types`: {up_block_types}." + ) + + if len(block_out_channels) != len(down_block_types): + raise ValueError( + f"Must provide the same number of `block_out_channels` as `down_block_types`. `block_out_channels`: {block_out_channels}. `down_block_types`: {down_block_types}." + ) + + if not isinstance(only_cross_attention, bool) and len(only_cross_attention) != len(down_block_types): + raise ValueError( + f"Must provide the same number of `only_cross_attention` as `down_block_types`. `only_cross_attention`: {only_cross_attention}. `down_block_types`: {down_block_types}." + ) + + if not isinstance(num_attention_heads, int) and len(num_attention_heads) != len(down_block_types): + raise ValueError( + f"Must provide the same number of `num_attention_heads` as `down_block_types`. `num_attention_heads`: {num_attention_heads}. `down_block_types`: {down_block_types}." + ) + + if isinstance(transformer_layers_per_block, int): + transformer_layers_per_block = [transformer_layers_per_block] * len(down_block_types) + + # input + conv_in_kernel = 3 + conv_in_padding = (conv_in_kernel - 1) // 2 + self.conv_in_condition = nn.Conv2d( + in_channels + conditioning_channels, + block_out_channels[0], + kernel_size=conv_in_kernel, + padding=conv_in_padding, + ) + + # time + time_embed_dim = block_out_channels[0] * 4 + self.time_proj = Timesteps(block_out_channels[0], flip_sin_to_cos, freq_shift) + timestep_input_dim = block_out_channels[0] + self.time_embedding = TimestepEmbedding( + timestep_input_dim, + time_embed_dim, + act_fn=act_fn, + ) + + if encoder_hid_dim_type is None and encoder_hid_dim is not None: + encoder_hid_dim_type = "text_proj" + self.register_to_config(encoder_hid_dim_type=encoder_hid_dim_type) + logger.info("encoder_hid_dim_type defaults to 'text_proj' as `encoder_hid_dim` is defined.") + + if encoder_hid_dim is None and encoder_hid_dim_type is not None: + raise ValueError( + f"`encoder_hid_dim` has to be defined when `encoder_hid_dim_type` is set to {encoder_hid_dim_type}." + ) + + if encoder_hid_dim_type == "text_proj": + self.encoder_hid_proj = nn.Linear(encoder_hid_dim, cross_attention_dim) + elif encoder_hid_dim_type == "text_image_proj": + # image_embed_dim DOESN'T have to be `cross_attention_dim`. To not clutter the __init__ too much + # they are set to `cross_attention_dim` here as this is exactly the required dimension for the currently only use + # case when `addition_embed_type == "text_image_proj"` (Kadinsky 2.1)` + self.encoder_hid_proj = TextImageProjection( + text_embed_dim=encoder_hid_dim, + image_embed_dim=cross_attention_dim, + cross_attention_dim=cross_attention_dim, + ) + + elif encoder_hid_dim_type is not None: + raise ValueError( + f"encoder_hid_dim_type: {encoder_hid_dim_type} must be None, 'text_proj' or 'text_image_proj'." + ) + else: + self.encoder_hid_proj = None + + # class embedding + if class_embed_type is None and num_class_embeds is not None: + self.class_embedding = nn.Embedding(num_class_embeds, time_embed_dim) + elif class_embed_type == "timestep": + self.class_embedding = TimestepEmbedding(timestep_input_dim, time_embed_dim) + elif class_embed_type == "identity": + self.class_embedding = nn.Identity(time_embed_dim, time_embed_dim) + elif class_embed_type == "projection": + if projection_class_embeddings_input_dim is None: + raise ValueError( + "`class_embed_type`: 'projection' requires `projection_class_embeddings_input_dim` be set" + ) + # The projection `class_embed_type` is the same as the timestep `class_embed_type` except + # 1. the `class_labels` inputs are not first converted to sinusoidal embeddings + # 2. it projects from an arbitrary input dimension. + # + # Note that `TimestepEmbedding` is quite general, being mainly linear layers and activations. + # When used for embedding actual timesteps, the timesteps are first converted to sinusoidal embeddings. + # As a result, `TimestepEmbedding` can be passed arbitrary vectors. + self.class_embedding = TimestepEmbedding(projection_class_embeddings_input_dim, time_embed_dim) + else: + self.class_embedding = None + + if addition_embed_type == "text": + if encoder_hid_dim is not None: + text_time_embedding_from_dim = encoder_hid_dim + else: + text_time_embedding_from_dim = cross_attention_dim + + self.add_embedding = TextTimeEmbedding( + text_time_embedding_from_dim, time_embed_dim, num_heads=addition_embed_type_num_heads + ) + elif addition_embed_type == "text_image": + # text_embed_dim and image_embed_dim DON'T have to be `cross_attention_dim`. To not clutter the __init__ too much + # they are set to `cross_attention_dim` here as this is exactly the required dimension for the currently only use + # case when `addition_embed_type == "text_image"` (Kadinsky 2.1)` + self.add_embedding = TextImageTimeEmbedding( + text_embed_dim=cross_attention_dim, image_embed_dim=cross_attention_dim, time_embed_dim=time_embed_dim + ) + elif addition_embed_type == "text_time": + self.add_time_proj = Timesteps(addition_time_embed_dim, flip_sin_to_cos, freq_shift) + self.add_embedding = TimestepEmbedding(projection_class_embeddings_input_dim, time_embed_dim) + + elif addition_embed_type is not None: + raise ValueError(f"addition_embed_type: {addition_embed_type} must be None, 'text' or 'text_image'.") + + self.down_blocks = nn.ModuleList([]) + self.brushnet_down_blocks = nn.ModuleList([]) + + if isinstance(only_cross_attention, bool): + only_cross_attention = [only_cross_attention] * len(down_block_types) + + if isinstance(attention_head_dim, int): + attention_head_dim = (attention_head_dim,) * len(down_block_types) + + if isinstance(num_attention_heads, int): + num_attention_heads = (num_attention_heads,) * len(down_block_types) + + # down + output_channel = block_out_channels[0] + + brushnet_block = nn.Conv2d(output_channel, output_channel, kernel_size=1) + brushnet_block = zero_module(brushnet_block) + self.brushnet_down_blocks.append(brushnet_block) + + for i, down_block_type in enumerate(down_block_types): + input_channel = output_channel + output_channel = block_out_channels[i] + is_final_block = i == len(block_out_channels) - 1 + + down_block = get_down_block( + down_block_type, + num_layers=layers_per_block, + transformer_layers_per_block=transformer_layers_per_block[i], + in_channels=input_channel, + out_channels=output_channel, + temb_channels=time_embed_dim, + add_downsample=not is_final_block, + resnet_eps=norm_eps, + resnet_act_fn=act_fn, + resnet_groups=norm_num_groups, + cross_attention_dim=cross_attention_dim, + num_attention_heads=num_attention_heads[i], + attention_head_dim=attention_head_dim[i] if attention_head_dim[i] is not None else output_channel, + downsample_padding=downsample_padding, + use_linear_projection=use_linear_projection, + only_cross_attention=only_cross_attention[i], + upcast_attention=upcast_attention, + resnet_time_scale_shift=resnet_time_scale_shift, + ) + self.down_blocks.append(down_block) + + for _ in range(layers_per_block): + brushnet_block = nn.Conv2d(output_channel, output_channel, kernel_size=1) + brushnet_block = zero_module(brushnet_block) + self.brushnet_down_blocks.append(brushnet_block) + + if not is_final_block: + brushnet_block = nn.Conv2d(output_channel, output_channel, kernel_size=1) + brushnet_block = zero_module(brushnet_block) + self.brushnet_down_blocks.append(brushnet_block) + + # mid + mid_block_channel = block_out_channels[-1] + + brushnet_block = nn.Conv2d(mid_block_channel, mid_block_channel, kernel_size=1) + brushnet_block = zero_module(brushnet_block) + self.brushnet_mid_block = brushnet_block + + self.mid_block = get_mid_block( + mid_block_type, + transformer_layers_per_block=transformer_layers_per_block[-1], + in_channels=mid_block_channel, + temb_channels=time_embed_dim, + resnet_eps=norm_eps, + resnet_act_fn=act_fn, + output_scale_factor=mid_block_scale_factor, + resnet_time_scale_shift=resnet_time_scale_shift, + cross_attention_dim=cross_attention_dim, + num_attention_heads=num_attention_heads[-1], + resnet_groups=norm_num_groups, + use_linear_projection=use_linear_projection, + upcast_attention=upcast_attention, + ) + + # count how many layers upsample the images + self.num_upsamplers = 0 + + # up + reversed_block_out_channels = list(reversed(block_out_channels)) + reversed_num_attention_heads = list(reversed(num_attention_heads)) + reversed_transformer_layers_per_block = list(reversed(transformer_layers_per_block)) + only_cross_attention = list(reversed(only_cross_attention)) + + output_channel = reversed_block_out_channels[0] + + self.up_blocks = nn.ModuleList([]) + self.brushnet_up_blocks = nn.ModuleList([]) + + for i, up_block_type in enumerate(up_block_types): + is_final_block = i == len(block_out_channels) - 1 + + prev_output_channel = output_channel + output_channel = reversed_block_out_channels[i] + input_channel = reversed_block_out_channels[min(i + 1, len(block_out_channels) - 1)] + + # add upsample block for all BUT final layer + if not is_final_block: + add_upsample = True + self.num_upsamplers += 1 + else: + add_upsample = False + + up_block = get_up_block( + up_block_type, + num_layers=layers_per_block + 1, + transformer_layers_per_block=reversed_transformer_layers_per_block[i], + in_channels=input_channel, + out_channels=output_channel, + prev_output_channel=prev_output_channel, + temb_channels=time_embed_dim, + add_upsample=add_upsample, + resnet_eps=norm_eps, + resnet_act_fn=act_fn, + resolution_idx=i, + resnet_groups=norm_num_groups, + cross_attention_dim=cross_attention_dim, + num_attention_heads=reversed_num_attention_heads[i], + use_linear_projection=use_linear_projection, + only_cross_attention=only_cross_attention[i], + upcast_attention=upcast_attention, + resnet_time_scale_shift=resnet_time_scale_shift, + attention_head_dim=attention_head_dim[i] if attention_head_dim[i] is not None else output_channel, + ) + self.up_blocks.append(up_block) + prev_output_channel = output_channel + + for _ in range(layers_per_block + 1): + brushnet_block = nn.Conv2d(output_channel, output_channel, kernel_size=1) + brushnet_block = zero_module(brushnet_block) + self.brushnet_up_blocks.append(brushnet_block) + + if not is_final_block: + brushnet_block = nn.Conv2d(output_channel, output_channel, kernel_size=1) + brushnet_block = zero_module(brushnet_block) + self.brushnet_up_blocks.append(brushnet_block) + + @classmethod + def from_unet( + cls, + unet: UNet2DConditionModel, + brushnet_conditioning_channel_order: str = "rgb", + conditioning_embedding_out_channels: Optional[Tuple[int, ...]] = (16, 32, 96, 256), + load_weights_from_unet: bool = True, + conditioning_channels: int = 5, + ): + r""" + Instantiate a [`BrushNetModel`] from [`UNet2DConditionModel`]. + + Parameters: + unet (`UNet2DConditionModel`): + The UNet model weights to copy to the [`BrushNetModel`]. All configuration options are also copied + where applicable. + """ + transformer_layers_per_block = ( + unet.config.transformer_layers_per_block if "transformer_layers_per_block" in unet.config else 1 + ) + encoder_hid_dim = unet.config.encoder_hid_dim if "encoder_hid_dim" in unet.config else None + encoder_hid_dim_type = unet.config.encoder_hid_dim_type if "encoder_hid_dim_type" in unet.config else None + addition_embed_type = unet.config.addition_embed_type if "addition_embed_type" in unet.config else None + addition_time_embed_dim = ( + unet.config.addition_time_embed_dim if "addition_time_embed_dim" in unet.config else None + ) + + brushnet = cls( + in_channels=unet.config.in_channels, + conditioning_channels=conditioning_channels, + flip_sin_to_cos=unet.config.flip_sin_to_cos, + freq_shift=unet.config.freq_shift, + # down_block_types=['DownBlock2D','DownBlock2D','DownBlock2D','DownBlock2D'], + down_block_types=[ + "CrossAttnDownBlock2D", + "CrossAttnDownBlock2D", + "CrossAttnDownBlock2D", + "DownBlock2D", + ], + # mid_block_type='MidBlock2D', + mid_block_type="UNetMidBlock2DCrossAttn", + # up_block_types=['UpBlock2D','UpBlock2D','UpBlock2D','UpBlock2D'], + up_block_types=["UpBlock2D", "CrossAttnUpBlock2D", "CrossAttnUpBlock2D", "CrossAttnUpBlock2D"], + only_cross_attention=unet.config.only_cross_attention, + block_out_channels=unet.config.block_out_channels, + layers_per_block=unet.config.layers_per_block, + downsample_padding=unet.config.downsample_padding, + mid_block_scale_factor=unet.config.mid_block_scale_factor, + act_fn=unet.config.act_fn, + norm_num_groups=unet.config.norm_num_groups, + norm_eps=unet.config.norm_eps, + cross_attention_dim=unet.config.cross_attention_dim, + transformer_layers_per_block=transformer_layers_per_block, + encoder_hid_dim=encoder_hid_dim, + encoder_hid_dim_type=encoder_hid_dim_type, + attention_head_dim=unet.config.attention_head_dim, + num_attention_heads=unet.config.num_attention_heads, + use_linear_projection=unet.config.use_linear_projection, + class_embed_type=unet.config.class_embed_type, + addition_embed_type=addition_embed_type, + addition_time_embed_dim=addition_time_embed_dim, + num_class_embeds=unet.config.num_class_embeds, + upcast_attention=unet.config.upcast_attention, + resnet_time_scale_shift=unet.config.resnet_time_scale_shift, + projection_class_embeddings_input_dim=unet.config.projection_class_embeddings_input_dim, + brushnet_conditioning_channel_order=brushnet_conditioning_channel_order, + conditioning_embedding_out_channels=conditioning_embedding_out_channels, + ) + + if load_weights_from_unet: + conv_in_condition_weight = torch.zeros_like(brushnet.conv_in_condition.weight) + conv_in_condition_weight[:, :4, ...] = unet.conv_in.weight + conv_in_condition_weight[:, 4:8, ...] = unet.conv_in.weight + brushnet.conv_in_condition.weight = torch.nn.Parameter(conv_in_condition_weight) + brushnet.conv_in_condition.bias = unet.conv_in.bias + + brushnet.time_proj.load_state_dict(unet.time_proj.state_dict()) + brushnet.time_embedding.load_state_dict(unet.time_embedding.state_dict()) + + if brushnet.class_embedding: + brushnet.class_embedding.load_state_dict(unet.class_embedding.state_dict()) + + brushnet.down_blocks.load_state_dict(unet.down_blocks.state_dict(), strict=False) + brushnet.mid_block.load_state_dict(unet.mid_block.state_dict(), strict=False) + brushnet.up_blocks.load_state_dict(unet.up_blocks.state_dict(), strict=False) + + return brushnet.to(unet.dtype) + + @property + # Copied from diffusers.models.unets.unet_2d_condition.UNet2DConditionModel.attn_processors + def attn_processors(self) -> Dict[str, AttentionProcessor]: + r""" + Returns: + `dict` of attention processors: A dictionary containing all attention processors used in the model with + indexed by its weight name. + """ + # set recursively + processors = {} + + def fn_recursive_add_processors(name: str, module: torch.nn.Module, processors: Dict[str, AttentionProcessor]): + if hasattr(module, "get_processor"): + processors[f"{name}.processor"] = module.get_processor(return_deprecated_lora=True) + + for sub_name, child in module.named_children(): + fn_recursive_add_processors(f"{name}.{sub_name}", child, processors) + + return processors + + for name, module in self.named_children(): + fn_recursive_add_processors(name, module, processors) + + return processors + + # Copied from diffusers.models.unets.unet_2d_condition.UNet2DConditionModel.set_attn_processor + def set_attn_processor(self, processor: Union[AttentionProcessor, Dict[str, AttentionProcessor]]): + r""" + Sets the attention processor to use to compute attention. + + Parameters: + processor (`dict` of `AttentionProcessor` or only `AttentionProcessor`): + The instantiated processor class or a dictionary of processor classes that will be set as the processor + for **all** `Attention` layers. + + If `processor` is a dict, the key needs to define the path to the corresponding cross attention + processor. This is strongly recommended when setting trainable attention processors. + + """ + count = len(self.attn_processors.keys()) + + if isinstance(processor, dict) and len(processor) != count: + raise ValueError( + f"A dict of processors was passed, but the number of processors {len(processor)} does not match the" + f" number of attention layers: {count}. Please make sure to pass {count} processor classes." + ) + + def fn_recursive_attn_processor(name: str, module: torch.nn.Module, processor): + if hasattr(module, "set_processor"): + if not isinstance(processor, dict): + module.set_processor(processor) + else: + module.set_processor(processor.pop(f"{name}.processor")) + + for sub_name, child in module.named_children(): + fn_recursive_attn_processor(f"{name}.{sub_name}", child, processor) + + for name, module in self.named_children(): + fn_recursive_attn_processor(name, module, processor) + + # Copied from diffusers.models.unets.unet_2d_condition.UNet2DConditionModel.set_default_attn_processor + def set_default_attn_processor(self): + """ + Disables custom attention processors and sets the default attention implementation. + """ + if all(proc.__class__ in ADDED_KV_ATTENTION_PROCESSORS for proc in self.attn_processors.values()): + processor = AttnAddedKVProcessor() + elif all(proc.__class__ in CROSS_ATTENTION_PROCESSORS for proc in self.attn_processors.values()): + processor = AttnProcessor() + else: + raise ValueError( + f"Cannot call `set_default_attn_processor` when attention processors are of type {next(iter(self.attn_processors.values()))}" + ) + + self.set_attn_processor(processor) + + # Copied from diffusers.models.unets.unet_2d_condition.UNet2DConditionModel.set_attention_slice + def set_attention_slice(self, slice_size: Union[str, int, List[int]]) -> None: + r""" + Enable sliced attention computation. + + When this option is enabled, the attention module splits the input tensor in slices to compute attention in + several steps. This is useful for saving some memory in exchange for a small decrease in speed. + + Args: + slice_size (`str` or `int` or `list(int)`, *optional*, defaults to `"auto"`): + When `"auto"`, input to the attention heads is halved, so attention is computed in two steps. If + `"max"`, maximum amount of memory is saved by running only one slice at a time. If a number is + provided, uses as many slices as `attention_head_dim // slice_size`. In this case, `attention_head_dim` + must be a multiple of `slice_size`. + """ + sliceable_head_dims = [] + + def fn_recursive_retrieve_sliceable_dims(module: torch.nn.Module): + if hasattr(module, "set_attention_slice"): + sliceable_head_dims.append(module.sliceable_head_dim) + + for child in module.children(): + fn_recursive_retrieve_sliceable_dims(child) + + # retrieve number of attention layers + for module in self.children(): + fn_recursive_retrieve_sliceable_dims(module) + + num_sliceable_layers = len(sliceable_head_dims) + + if slice_size == "auto": + # half the attention head size is usually a good trade-off between + # speed and memory + slice_size = [dim // 2 for dim in sliceable_head_dims] + elif slice_size == "max": + # make smallest slice possible + slice_size = num_sliceable_layers * [1] + + slice_size = num_sliceable_layers * [slice_size] if not isinstance(slice_size, list) else slice_size + + if len(slice_size) != len(sliceable_head_dims): + raise ValueError( + f"You have provided {len(slice_size)}, but {self.config} has {len(sliceable_head_dims)} different" + f" attention layers. Make sure to match `len(slice_size)` to be {len(sliceable_head_dims)}." + ) + + for i in range(len(slice_size)): + size = slice_size[i] + dim = sliceable_head_dims[i] + if size is not None and size > dim: + raise ValueError(f"size {size} has to be smaller or equal to {dim}.") + + # Recursively walk through all the children. + # Any children which exposes the set_attention_slice method + # gets the message + def fn_recursive_set_attention_slice(module: torch.nn.Module, slice_size: List[int]): + if hasattr(module, "set_attention_slice"): + module.set_attention_slice(slice_size.pop()) + + for child in module.children(): + fn_recursive_set_attention_slice(child, slice_size) + + reversed_slice_size = list(reversed(slice_size)) + for module in self.children(): + fn_recursive_set_attention_slice(module, reversed_slice_size) + + def _set_gradient_checkpointing(self, module, value: bool = False) -> None: + if isinstance(module, (CrossAttnDownBlock2D, DownBlock2D)): + module.gradient_checkpointing = value + + def forward( + self, + sample: torch.FloatTensor, + timestep: Union[torch.Tensor, float, int], + encoder_hidden_states: torch.Tensor, + brushnet_cond: torch.FloatTensor, + conditioning_scale: float = 1.0, + class_labels: Optional[torch.Tensor] = None, + timestep_cond: Optional[torch.Tensor] = None, + attention_mask: Optional[torch.Tensor] = None, + added_cond_kwargs: Optional[Dict[str, torch.Tensor]] = None, + cross_attention_kwargs: Optional[Dict[str, Any]] = None, + guess_mode: bool = False, + return_dict: bool = True, + ) -> Union[BrushNetOutput, Tuple[Tuple[torch.FloatTensor, ...], torch.FloatTensor]]: + """ + The [`BrushNetModel`] forward method. + + Args: + sample (`torch.FloatTensor`): + The noisy input tensor. + timestep (`Union[torch.Tensor, float, int]`): + The number of timesteps to denoise an input. + encoder_hidden_states (`torch.Tensor`): + The encoder hidden states. + brushnet_cond (`torch.FloatTensor`): + The conditional input tensor of shape `(batch_size, sequence_length, hidden_size)`. + conditioning_scale (`float`, defaults to `1.0`): + The scale factor for BrushNet outputs. + class_labels (`torch.Tensor`, *optional*, defaults to `None`): + Optional class labels for conditioning. Their embeddings will be summed with the timestep embeddings. + timestep_cond (`torch.Tensor`, *optional*, defaults to `None`): + Additional conditional embeddings for timestep. If provided, the embeddings will be summed with the + timestep_embedding passed through the `self.time_embedding` layer to obtain the final timestep + embeddings. + attention_mask (`torch.Tensor`, *optional*, defaults to `None`): + An attention mask of shape `(batch, key_tokens)` is applied to `encoder_hidden_states`. If `1` the mask + is kept, otherwise if `0` it is discarded. Mask will be converted into a bias, which adds large + negative values to the attention scores corresponding to "discard" tokens. + added_cond_kwargs (`dict`): + Additional conditions for the Stable Diffusion XL UNet. + cross_attention_kwargs (`dict[str]`, *optional*, defaults to `None`): + A kwargs dictionary that if specified is passed along to the `AttnProcessor`. + guess_mode (`bool`, defaults to `False`): + In this mode, the BrushNet encoder tries its best to recognize the input content of the input even if + you remove all prompts. A `guidance_scale` between 3.0 and 5.0 is recommended. + return_dict (`bool`, defaults to `True`): + Whether or not to return a [`~models.brushnet.BrushNetOutput`] instead of a plain tuple. + + Returns: + [`~models.brushnet.BrushNetOutput`] **or** `tuple`: + If `return_dict` is `True`, a [`~models.brushnet.BrushNetOutput`] is returned, otherwise a tuple is + returned where the first element is the sample tensor. + """ + # check channel order + channel_order = self.config.brushnet_conditioning_channel_order + + if channel_order == "rgb": + # in rgb order by default + ... + elif channel_order == "bgr": + brushnet_cond = torch.flip(brushnet_cond, dims=[1]) + else: + raise ValueError(f"unknown `brushnet_conditioning_channel_order`: {channel_order}") + + # prepare attention_mask + if attention_mask is not None: + attention_mask = (1 - attention_mask.to(sample.dtype)) * -10000.0 + attention_mask = attention_mask.unsqueeze(1) + + # 1. time + timesteps = timestep + if not torch.is_tensor(timesteps): + # TODO: this requires sync between CPU and GPU. So try to pass timesteps as tensors if you can + # This would be a good case for the `match` statement (Python 3.10+) + is_mps = sample.device.type == "mps" + if isinstance(timestep, float): + dtype = torch.float32 if is_mps else torch.float64 + else: + dtype = torch.int32 if is_mps else torch.int64 + timesteps = torch.tensor([timesteps], dtype=dtype, device=sample.device) + elif len(timesteps.shape) == 0: + timesteps = timesteps[None].to(sample.device) + + # broadcast to batch dimension in a way that's compatible with ONNX/Core ML + timesteps = timesteps.expand(sample.shape[0]) + + t_emb = self.time_proj(timesteps) + + # timesteps does not contain any weights and will always return f32 tensors + # but time_embedding might actually be running in fp16. so we need to cast here. + # there might be better ways to encapsulate this. + t_emb = t_emb.to(dtype=sample.dtype) + + emb = self.time_embedding(t_emb, timestep_cond) + aug_emb = None + + if self.class_embedding is not None: + if class_labels is None: + raise ValueError("class_labels should be provided when num_class_embeds > 0") + + if self.config.class_embed_type == "timestep": + class_labels = self.time_proj(class_labels) + + class_emb = self.class_embedding(class_labels).to(dtype=self.dtype) + emb = emb + class_emb + + if self.config.addition_embed_type is not None: + if self.config.addition_embed_type == "text": + aug_emb = self.add_embedding(encoder_hidden_states) + + elif self.config.addition_embed_type == "text_time": + if "text_embeds" not in added_cond_kwargs: + raise ValueError( + f"{self.__class__} has the config param `addition_embed_type` set to 'text_time' which requires the keyword argument `text_embeds` to be passed in `added_cond_kwargs`" + ) + text_embeds = added_cond_kwargs.get("text_embeds") + if "time_ids" not in added_cond_kwargs: + raise ValueError( + f"{self.__class__} has the config param `addition_embed_type` set to 'text_time' which requires the keyword argument `time_ids` to be passed in `added_cond_kwargs`" + ) + time_ids = added_cond_kwargs.get("time_ids") + time_embeds = self.add_time_proj(time_ids.flatten()) + time_embeds = time_embeds.reshape((text_embeds.shape[0], -1)) + + add_embeds = torch.concat([text_embeds, time_embeds], dim=-1) + add_embeds = add_embeds.to(emb.dtype) + aug_emb = self.add_embedding(add_embeds) + + emb = emb + aug_emb if aug_emb is not None else emb + + # 2. pre-process + brushnet_cond = torch.concat([sample, brushnet_cond], 1) + sample = self.conv_in_condition(brushnet_cond) + + # 3. down + down_block_res_samples = (sample,) + for downsample_block in self.down_blocks: + if hasattr(downsample_block, "has_cross_attention") and downsample_block.has_cross_attention: + sample, res_samples = downsample_block( + hidden_states=sample, + temb=emb, + encoder_hidden_states=encoder_hidden_states, + attention_mask=attention_mask, + cross_attention_kwargs=cross_attention_kwargs, + ) + else: + sample, res_samples = downsample_block(hidden_states=sample, temb=emb) + + down_block_res_samples += res_samples + + # 4. PaintingNet down blocks + brushnet_down_block_res_samples = () + for down_block_res_sample, brushnet_down_block in zip(down_block_res_samples, self.brushnet_down_blocks): + down_block_res_sample = brushnet_down_block(down_block_res_sample) + brushnet_down_block_res_samples = brushnet_down_block_res_samples + (down_block_res_sample,) + + # 5. mid + if self.mid_block is not None: + if hasattr(self.mid_block, "has_cross_attention") and self.mid_block.has_cross_attention: + sample = self.mid_block( + sample, + emb, + encoder_hidden_states=encoder_hidden_states, + attention_mask=attention_mask, + cross_attention_kwargs=cross_attention_kwargs, + ) + else: + sample = self.mid_block(sample, emb) + + # 6. BrushNet mid blocks + brushnet_mid_block_res_sample = self.brushnet_mid_block(sample) + + # 7. up + up_block_res_samples = () + for i, upsample_block in enumerate(self.up_blocks): + is_final_block = i == len(self.up_blocks) - 1 + + res_samples = down_block_res_samples[-len(upsample_block.resnets) :] + down_block_res_samples = down_block_res_samples[: -len(upsample_block.resnets)] + + # if we have not reached the final block and need to forward the + # upsample size, we do it here + if not is_final_block: + upsample_size = down_block_res_samples[-1].shape[2:] + + if hasattr(upsample_block, "has_cross_attention") and upsample_block.has_cross_attention: + sample, up_res_samples = upsample_block( + hidden_states=sample, + temb=emb, + res_hidden_states_tuple=res_samples, + encoder_hidden_states=encoder_hidden_states, + cross_attention_kwargs=cross_attention_kwargs, + upsample_size=upsample_size, + attention_mask=attention_mask, + return_res_samples=True, + ) + else: + sample, up_res_samples = upsample_block( + hidden_states=sample, + temb=emb, + res_hidden_states_tuple=res_samples, + upsample_size=upsample_size, + return_res_samples=True, + ) + + up_block_res_samples += up_res_samples + + # 8. BrushNet up blocks + brushnet_up_block_res_samples = () + for up_block_res_sample, brushnet_up_block in zip(up_block_res_samples, self.brushnet_up_blocks): + up_block_res_sample = brushnet_up_block(up_block_res_sample) + brushnet_up_block_res_samples = brushnet_up_block_res_samples + (up_block_res_sample,) + + # 6. scaling + if guess_mode and not self.config.global_pool_conditions: + scales = torch.logspace( + -1, + 0, + len(brushnet_down_block_res_samples) + 1 + len(brushnet_up_block_res_samples), + device=sample.device, + ) # 0.1 to 1.0 + scales = scales * conditioning_scale + + brushnet_down_block_res_samples = [ + sample * scale + for sample, scale in zip( + brushnet_down_block_res_samples, scales[: len(brushnet_down_block_res_samples)] + ) + ] + brushnet_mid_block_res_sample = ( + brushnet_mid_block_res_sample * scales[len(brushnet_down_block_res_samples)] + ) + brushnet_up_block_res_samples = [ + sample * scale + for sample, scale in zip( + brushnet_up_block_res_samples, scales[len(brushnet_down_block_res_samples) + 1 :] + ) + ] + else: + brushnet_down_block_res_samples = [ + sample * conditioning_scale for sample in brushnet_down_block_res_samples + ] + brushnet_mid_block_res_sample = brushnet_mid_block_res_sample * conditioning_scale + brushnet_up_block_res_samples = [sample * conditioning_scale for sample in brushnet_up_block_res_samples] + + if self.config.global_pool_conditions: + brushnet_down_block_res_samples = [ + torch.mean(sample, dim=(2, 3), keepdim=True) for sample in brushnet_down_block_res_samples + ] + brushnet_mid_block_res_sample = torch.mean(brushnet_mid_block_res_sample, dim=(2, 3), keepdim=True) + brushnet_up_block_res_samples = [ + torch.mean(sample, dim=(2, 3), keepdim=True) for sample in brushnet_up_block_res_samples + ] + + if not return_dict: + return (brushnet_down_block_res_samples, brushnet_mid_block_res_sample, brushnet_up_block_res_samples) + + return BrushNetOutput( + down_block_res_samples=brushnet_down_block_res_samples, + mid_block_res_sample=brushnet_mid_block_res_sample, + up_block_res_samples=brushnet_up_block_res_samples, + ) + + +def zero_module(module): + for p in module.parameters(): + nn.init.zeros_(p) + return module diff --git a/ComfyUI-BrushNet/brushnet/brushnet_xl.json b/ComfyUI-BrushNet/brushnet/brushnet_xl.json new file mode 100644 index 0000000000000000000000000000000000000000..c1a3c655549879fb2e9d7441ec71eef5167eac12 --- /dev/null +++ b/ComfyUI-BrushNet/brushnet/brushnet_xl.json @@ -0,0 +1,63 @@ +{ + "_class_name": "BrushNetModel", + "_diffusers_version": "0.27.0.dev0", + "_name_or_path": "runs/logs/brushnetsdxl_randommask/checkpoint-80000", + "act_fn": "silu", + "addition_embed_type": "text_time", + "addition_embed_type_num_heads": 64, + "addition_time_embed_dim": 256, + "attention_head_dim": [ + 5, + 10, + 20 + ], + "block_out_channels": [ + 320, + 640, + 1280 + ], + "brushnet_conditioning_channel_order": "rgb", + "class_embed_type": null, + "conditioning_channels": 5, + "conditioning_embedding_out_channels": [ + 16, + 32, + 96, + 256 + ], + "cross_attention_dim": 2048, + "down_block_types": [ + "DownBlock2D", + "DownBlock2D", + "DownBlock2D" + ], + "downsample_padding": 1, + "encoder_hid_dim": null, + "encoder_hid_dim_type": null, + "flip_sin_to_cos": true, + "freq_shift": 0, + "global_pool_conditions": false, + "in_channels": 4, + "layers_per_block": 2, + "mid_block_scale_factor": 1, + "mid_block_type": "MidBlock2D", + "norm_eps": 1e-05, + "norm_num_groups": 32, + "num_attention_heads": null, + "num_class_embeds": null, + "only_cross_attention": false, + "projection_class_embeddings_input_dim": 2816, + "resnet_time_scale_shift": "default", + "transformer_layers_per_block": [ + 1, + 2, + 10 + ], + "up_block_types": [ + "UpBlock2D", + "UpBlock2D", + "UpBlock2D" + ], + "upcast_attention": null, + "use_linear_projection": true +} diff --git a/ComfyUI-BrushNet/brushnet/powerpaint.json b/ComfyUI-BrushNet/brushnet/powerpaint.json new file mode 100644 index 0000000000000000000000000000000000000000..4d7c73e9f5654cd775db99a0d77234765f808e6c --- /dev/null +++ b/ComfyUI-BrushNet/brushnet/powerpaint.json @@ -0,0 +1,57 @@ +{ + "_class_name": "BrushNetModel", + "_diffusers_version": "0.27.2", + "act_fn": "silu", + "addition_embed_type": null, + "addition_embed_type_num_heads": 64, + "addition_time_embed_dim": null, + "attention_head_dim": 8, + "block_out_channels": [ + 320, + 640, + 1280, + 1280 + ], + "brushnet_conditioning_channel_order": "rgb", + "class_embed_type": null, + "conditioning_channels": 5, + "conditioning_embedding_out_channels": [ + 16, + 32, + 96, + 256 + ], + "cross_attention_dim": 768, + "down_block_types": [ + "CrossAttnDownBlock2D", + "CrossAttnDownBlock2D", + "CrossAttnDownBlock2D", + "DownBlock2D" + ], + "downsample_padding": 1, + "encoder_hid_dim": null, + "encoder_hid_dim_type": null, + "flip_sin_to_cos": true, + "freq_shift": 0, + "global_pool_conditions": false, + "in_channels": 4, + "layers_per_block": 2, + "mid_block_scale_factor": 1, + "mid_block_type": "UNetMidBlock2DCrossAttn", + "norm_eps": 1e-05, + "norm_num_groups": 32, + "num_attention_heads": null, + "num_class_embeds": null, + "only_cross_attention": false, + "projection_class_embeddings_input_dim": null, + "resnet_time_scale_shift": "default", + "transformer_layers_per_block": 1, + "up_block_types": [ + "UpBlock2D", + "CrossAttnUpBlock2D", + "CrossAttnUpBlock2D", + "CrossAttnUpBlock2D" + ], + "upcast_attention": false, + "use_linear_projection": false +} diff --git a/ComfyUI-BrushNet/brushnet/powerpaint_utils.py b/ComfyUI-BrushNet/brushnet/powerpaint_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..bcbb1f715bd33ef79064361be41c99309a176424 --- /dev/null +++ b/ComfyUI-BrushNet/brushnet/powerpaint_utils.py @@ -0,0 +1,496 @@ +import copy +import random + +import torch +import torch.nn as nn +from transformers import CLIPTokenizer +from typing import Any, List, Optional, Union + +class TokenizerWrapper: + """Tokenizer wrapper for CLIPTokenizer. Only support CLIPTokenizer + currently. This wrapper is modified from https://github.com/huggingface/dif + fusers/blob/e51f19aee82c8dd874b715a09dbc521d88835d68/src/diffusers/loaders. + py#L358 # noqa. + + Args: + from_pretrained (Union[str, os.PathLike], optional): The *model id* + of a pretrained model or a path to a *directory* containing + model weights and config. Defaults to None. + from_config (Union[str, os.PathLike], optional): The *model id* + of a pretrained model or a path to a *directory* containing + model weights and config. Defaults to None. + + *args, **kwargs: If `from_pretrained` is passed, *args and **kwargs + will be passed to `from_pretrained` function. Otherwise, *args + and **kwargs will be used to initialize the model by + `self._module_cls(*args, **kwargs)`. + """ + + def __init__(self, tokenizer: CLIPTokenizer): + self.wrapped = tokenizer + self.token_map = {} + + def __getattr__(self, name: str) -> Any: + if name in self.__dict__: + return getattr(self, name) + #if name == "wrapped": + # return getattr(self, 'wrapped')#super().__getattr__("wrapped") + + try: + return getattr(self.wrapped, name) + except AttributeError: + raise AttributeError( + "'name' cannot be found in both " + f"'{self.__class__.__name__}' and " + f"'{self.__class__.__name__}.tokenizer'." + ) + + def try_adding_tokens(self, tokens: Union[str, List[str]], *args, **kwargs): + """Attempt to add tokens to the tokenizer. + + Args: + tokens (Union[str, List[str]]): The tokens to be added. + """ + num_added_tokens = self.wrapped.add_tokens(tokens, *args, **kwargs) + assert num_added_tokens != 0, ( + f"The tokenizer already contains the token {tokens}. Please pass " + "a different `placeholder_token` that is not already in the " + "tokenizer." + ) + + def get_token_info(self, token: str) -> dict: + """Get the information of a token, including its start and end index in + the current tokenizer. + + Args: + token (str): The token to be queried. + + Returns: + dict: The information of the token, including its start and end + index in current tokenizer. + """ + token_ids = self.__call__(token).input_ids + start, end = token_ids[1], token_ids[-2] + 1 + return {"name": token, "start": start, "end": end} + + def add_placeholder_token(self, placeholder_token: str, *args, num_vec_per_token: int = 1, **kwargs): + """Add placeholder tokens to the tokenizer. + + Args: + placeholder_token (str): The placeholder token to be added. + num_vec_per_token (int, optional): The number of vectors of + the added placeholder token. + *args, **kwargs: The arguments for `self.wrapped.add_tokens`. + """ + output = [] + if num_vec_per_token == 1: + self.try_adding_tokens(placeholder_token, *args, **kwargs) + output.append(placeholder_token) + else: + output = [] + for i in range(num_vec_per_token): + ith_token = placeholder_token + f"_{i}" + self.try_adding_tokens(ith_token, *args, **kwargs) + output.append(ith_token) + + for token in self.token_map: + if token in placeholder_token: + raise ValueError( + f"The tokenizer already has placeholder token {token} " + f"that can get confused with {placeholder_token} " + "keep placeholder tokens independent" + ) + self.token_map[placeholder_token] = output + + def replace_placeholder_tokens_in_text( + self, text: Union[str, List[str]], vector_shuffle: bool = False, prop_tokens_to_load: float = 1.0 + ) -> Union[str, List[str]]: + """Replace the keywords in text with placeholder tokens. This function + will be called in `self.__call__` and `self.encode`. + + Args: + text (Union[str, List[str]]): The text to be processed. + vector_shuffle (bool, optional): Whether to shuffle the vectors. + Defaults to False. + prop_tokens_to_load (float, optional): The proportion of tokens to + be loaded. If 1.0, all tokens will be loaded. Defaults to 1.0. + + Returns: + Union[str, List[str]]: The processed text. + """ + if isinstance(text, list): + output = [] + for i in range(len(text)): + output.append(self.replace_placeholder_tokens_in_text(text[i], vector_shuffle=vector_shuffle)) + return output + + for placeholder_token in self.token_map: + if placeholder_token in text: + tokens = self.token_map[placeholder_token] + tokens = tokens[: 1 + int(len(tokens) * prop_tokens_to_load)] + if vector_shuffle: + tokens = copy.copy(tokens) + random.shuffle(tokens) + text = text.replace(placeholder_token, " ".join(tokens)) + return text + + def replace_text_with_placeholder_tokens(self, text: Union[str, List[str]]) -> Union[str, List[str]]: + """Replace the placeholder tokens in text with the original keywords. + This function will be called in `self.decode`. + + Args: + text (Union[str, List[str]]): The text to be processed. + + Returns: + Union[str, List[str]]: The processed text. + """ + if isinstance(text, list): + output = [] + for i in range(len(text)): + output.append(self.replace_text_with_placeholder_tokens(text[i])) + return output + + for placeholder_token, tokens in self.token_map.items(): + merged_tokens = " ".join(tokens) + if merged_tokens in text: + text = text.replace(merged_tokens, placeholder_token) + return text + + def __call__( + self, + text: Union[str, List[str]], + *args, + vector_shuffle: bool = False, + prop_tokens_to_load: float = 1.0, + **kwargs, + ): + """The call function of the wrapper. + + Args: + text (Union[str, List[str]]): The text to be tokenized. + vector_shuffle (bool, optional): Whether to shuffle the vectors. + Defaults to False. + prop_tokens_to_load (float, optional): The proportion of tokens to + be loaded. If 1.0, all tokens will be loaded. Defaults to 1.0 + *args, **kwargs: The arguments for `self.wrapped.__call__`. + """ + replaced_text = self.replace_placeholder_tokens_in_text( + text, vector_shuffle=vector_shuffle, prop_tokens_to_load=prop_tokens_to_load + ) + + return self.wrapped.__call__(replaced_text, *args, **kwargs) + + def encode(self, text: Union[str, List[str]], *args, **kwargs): + """Encode the passed text to token index. + + Args: + text (Union[str, List[str]]): The text to be encode. + *args, **kwargs: The arguments for `self.wrapped.__call__`. + """ + replaced_text = self.replace_placeholder_tokens_in_text(text) + return self.wrapped(replaced_text, *args, **kwargs) + + def decode(self, token_ids, return_raw: bool = False, *args, **kwargs) -> Union[str, List[str]]: + """Decode the token index to text. + + Args: + token_ids: The token index to be decoded. + return_raw: Whether keep the placeholder token in the text. + Defaults to False. + *args, **kwargs: The arguments for `self.wrapped.decode`. + + Returns: + Union[str, List[str]]: The decoded text. + """ + text = self.wrapped.decode(token_ids, *args, **kwargs) + if return_raw: + return text + replaced_text = self.replace_text_with_placeholder_tokens(text) + return replaced_text + + def __repr__(self): + """The representation of the wrapper.""" + s = super().__repr__() + prefix = f"Wrapped Module Class: {self._module_cls}\n" + prefix += f"Wrapped Module Name: {self._module_name}\n" + if self._from_pretrained: + prefix += f"From Pretrained: {self._from_pretrained}\n" + s = prefix + s + return s + + +class EmbeddingLayerWithFixes(nn.Module): + """The revised embedding layer to support external embeddings. This design + of this class is inspired by https://github.com/AUTOMATIC1111/stable- + diffusion-webui/blob/22bcc7be428c94e9408f589966c2040187245d81/modules/sd_hi + jack.py#L224 # noqa. + + Args: + wrapped (nn.Emebdding): The embedding layer to be wrapped. + external_embeddings (Union[dict, List[dict]], optional): The external + embeddings added to this layer. Defaults to None. + """ + + def __init__(self, wrapped: nn.Embedding, external_embeddings: Optional[Union[dict, List[dict]]] = None): + super().__init__() + self.wrapped = wrapped + self.num_embeddings = wrapped.weight.shape[0] + + self.external_embeddings = [] + if external_embeddings: + self.add_embeddings(external_embeddings) + + self.trainable_embeddings = nn.ParameterDict() + + @property + def weight(self): + """Get the weight of wrapped embedding layer.""" + return self.wrapped.weight + + def check_duplicate_names(self, embeddings: List[dict]): + """Check whether duplicate names exist in list of 'external + embeddings'. + + Args: + embeddings (List[dict]): A list of embedding to be check. + """ + names = [emb["name"] for emb in embeddings] + assert len(names) == len(set(names)), ( + "Found duplicated names in 'external_embeddings'. Name list: " f"'{names}'" + ) + + def check_ids_overlap(self, embeddings): + """Check whether overlap exist in token ids of 'external_embeddings'. + + Args: + embeddings (List[dict]): A list of embedding to be check. + """ + ids_range = [[emb["start"], emb["end"], emb["name"]] for emb in embeddings] + ids_range.sort() # sort by 'start' + # check if 'end' has overlapping + for idx in range(len(ids_range) - 1): + name1, name2 = ids_range[idx][-1], ids_range[idx + 1][-1] + assert ids_range[idx][1] <= ids_range[idx + 1][0], ( + f"Found ids overlapping between embeddings '{name1}' " f"and '{name2}'." + ) + + def add_embeddings(self, embeddings: Optional[Union[dict, List[dict]]]): + """Add external embeddings to this layer. + + Use case: + + >>> 1. Add token to tokenizer and get the token id. + >>> tokenizer = TokenizerWrapper('openai/clip-vit-base-patch32') + >>> # 'how much' in kiswahili + >>> tokenizer.add_placeholder_tokens('ngapi', num_vec_per_token=4) + >>> + >>> 2. Add external embeddings to the model. + >>> new_embedding = { + >>> 'name': 'ngapi', # 'how much' in kiswahili + >>> 'embedding': torch.ones(1, 15) * 4, + >>> 'start': tokenizer.get_token_info('kwaheri')['start'], + >>> 'end': tokenizer.get_token_info('kwaheri')['end'], + >>> 'trainable': False # if True, will registry as a parameter + >>> } + >>> embedding_layer = nn.Embedding(10, 15) + >>> embedding_layer_wrapper = EmbeddingLayerWithFixes(embedding_layer) + >>> embedding_layer_wrapper.add_embeddings(new_embedding) + >>> + >>> 3. Forward tokenizer and embedding layer! + >>> input_text = ['hello, ngapi!', 'hello my friend, ngapi?'] + >>> input_ids = tokenizer( + >>> input_text, padding='max_length', truncation=True, + >>> return_tensors='pt')['input_ids'] + >>> out_feat = embedding_layer_wrapper(input_ids) + >>> + >>> 4. Let's validate the result! + >>> assert (out_feat[0, 3: 7] == 2.3).all() + >>> assert (out_feat[2, 5: 9] == 2.3).all() + + Args: + embeddings (Union[dict, list[dict]]): The external embeddings to + be added. Each dict must contain the following 4 fields: 'name' + (the name of this embedding), 'embedding' (the embedding + tensor), 'start' (the start token id of this embedding), 'end' + (the end token id of this embedding). For example: + `{name: NAME, start: START, end: END, embedding: torch.Tensor}` + """ + if isinstance(embeddings, dict): + embeddings = [embeddings] + + self.external_embeddings += embeddings + self.check_duplicate_names(self.external_embeddings) + self.check_ids_overlap(self.external_embeddings) + + # set for trainable + added_trainable_emb_info = [] + for embedding in embeddings: + trainable = embedding.get("trainable", False) + if trainable: + name = embedding["name"] + embedding["embedding"] = torch.nn.Parameter(embedding["embedding"]) + self.trainable_embeddings[name] = embedding["embedding"] + added_trainable_emb_info.append(name) + + added_emb_info = [emb["name"] for emb in embeddings] + added_emb_info = ", ".join(added_emb_info) + print(f"Successfully add external embeddings: {added_emb_info}.", "current") + + if added_trainable_emb_info: + added_trainable_emb_info = ", ".join(added_trainable_emb_info) + print("Successfully add trainable external embeddings: " f"{added_trainable_emb_info}", "current") + + def replace_input_ids(self, input_ids: torch.Tensor) -> torch.Tensor: + """Replace external input ids to 0. + + Args: + input_ids (torch.Tensor): The input ids to be replaced. + + Returns: + torch.Tensor: The replaced input ids. + """ + input_ids_fwd = input_ids.clone() + input_ids_fwd[input_ids_fwd >= self.num_embeddings] = 0 + return input_ids_fwd + + def replace_embeddings( + self, input_ids: torch.Tensor, embedding: torch.Tensor, external_embedding: dict + ) -> torch.Tensor: + """Replace external embedding to the embedding layer. Noted that, in + this function we use `torch.cat` to avoid inplace modification. + + Args: + input_ids (torch.Tensor): The original token ids. Shape like + [LENGTH, ]. + embedding (torch.Tensor): The embedding of token ids after + `replace_input_ids` function. + external_embedding (dict): The external embedding to be replaced. + + Returns: + torch.Tensor: The replaced embedding. + """ + new_embedding = [] + + name = external_embedding["name"] + start = external_embedding["start"] + end = external_embedding["end"] + target_ids_to_replace = [i for i in range(start, end)] + ext_emb = external_embedding["embedding"] + + # do not need to replace + if not (input_ids == start).any(): + return embedding + + # start replace + s_idx, e_idx = 0, 0 + while e_idx < len(input_ids): + if input_ids[e_idx] == start: + if e_idx != 0: + # add embedding do not need to replace + new_embedding.append(embedding[s_idx:e_idx]) + + # check if the next embedding need to replace is valid + actually_ids_to_replace = [int(i) for i in input_ids[e_idx : e_idx + end - start]] + assert actually_ids_to_replace == target_ids_to_replace, ( + f"Invalid 'input_ids' in position: {s_idx} to {e_idx}. " + f"Expect '{target_ids_to_replace}' for embedding " + f"'{name}' but found '{actually_ids_to_replace}'." + ) + + new_embedding.append(ext_emb) + + s_idx = e_idx + end - start + e_idx = s_idx + 1 + else: + e_idx += 1 + + if e_idx == len(input_ids): + new_embedding.append(embedding[s_idx:e_idx]) + + return torch.cat(new_embedding, dim=0) + + def forward(self, input_ids: torch.Tensor, external_embeddings: Optional[List[dict]] = None): + """The forward function. + + Args: + input_ids (torch.Tensor): The token ids shape like [bz, LENGTH] or + [LENGTH, ]. + external_embeddings (Optional[List[dict]]): The external + embeddings. If not passed, only `self.external_embeddings` + will be used. Defaults to None. + + input_ids: shape like [bz, LENGTH] or [LENGTH]. + """ + assert input_ids.ndim in [1, 2] + if input_ids.ndim == 1: + input_ids = input_ids.unsqueeze(0) + + if external_embeddings is None and not self.external_embeddings: + return self.wrapped(input_ids) + + input_ids_fwd = self.replace_input_ids(input_ids) + inputs_embeds = self.wrapped(input_ids_fwd) + + vecs = [] + + if external_embeddings is None: + external_embeddings = [] + elif isinstance(external_embeddings, dict): + external_embeddings = [external_embeddings] + embeddings = self.external_embeddings + external_embeddings + + for input_id, embedding in zip(input_ids, inputs_embeds): + new_embedding = embedding + for external_embedding in embeddings: + new_embedding = self.replace_embeddings(input_id, new_embedding, external_embedding) + vecs.append(new_embedding) + + return torch.stack(vecs) + + + +def add_tokens( + tokenizer, text_encoder, placeholder_tokens: list, initialize_tokens: list = None, num_vectors_per_token: int = 1 +): + """Add token for training. + + # TODO: support add tokens as dict, then we can load pretrained tokens. + """ + if initialize_tokens is not None: + assert len(initialize_tokens) == len( + placeholder_tokens + ), "placeholder_token should be the same length as initialize_token" + for ii in range(len(placeholder_tokens)): + tokenizer.add_placeholder_token(placeholder_tokens[ii], num_vec_per_token=num_vectors_per_token) + + # text_encoder.set_embedding_layer() + embedding_layer = text_encoder.text_model.embeddings.token_embedding + text_encoder.text_model.embeddings.token_embedding = EmbeddingLayerWithFixes(embedding_layer) + embedding_layer = text_encoder.text_model.embeddings.token_embedding + + assert embedding_layer is not None, ( + "Do not support get embedding layer for current text encoder. " "Please check your configuration." + ) + initialize_embedding = [] + if initialize_tokens is not None: + for ii in range(len(placeholder_tokens)): + init_id = tokenizer(initialize_tokens[ii]).input_ids[1] + temp_embedding = embedding_layer.weight[init_id] + initialize_embedding.append(temp_embedding[None, ...].repeat(num_vectors_per_token, 1)) + else: + for ii in range(len(placeholder_tokens)): + init_id = tokenizer("a").input_ids[1] + temp_embedding = embedding_layer.weight[init_id] + len_emb = temp_embedding.shape[0] + init_weight = (torch.rand(num_vectors_per_token, len_emb) - 0.5) / 2.0 + initialize_embedding.append(init_weight) + + # initialize_embedding = torch.cat(initialize_embedding,dim=0) + + token_info_all = [] + for ii in range(len(placeholder_tokens)): + token_info = tokenizer.get_token_info(placeholder_tokens[ii]) + token_info["embedding"] = initialize_embedding[ii] + token_info["trainable"] = True + token_info_all.append(token_info) + embedding_layer.add_embeddings(token_info_all) diff --git a/ComfyUI-BrushNet/brushnet/unet_2d_blocks.py b/ComfyUI-BrushNet/brushnet/unet_2d_blocks.py new file mode 100644 index 0000000000000000000000000000000000000000..55f0d3fe5f56122fb34d37e13c63a73c61ad0bb8 --- /dev/null +++ b/ComfyUI-BrushNet/brushnet/unet_2d_blocks.py @@ -0,0 +1,3877 @@ +# Copyright 2024 The HuggingFace Team. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from typing import Any, Dict, Optional, Tuple, Union + +import numpy as np +import torch +import torch.nn.functional as F +from torch import nn + +from diffusers.utils import deprecate, is_torch_version, logging +from diffusers.utils.torch_utils import apply_freeu +from diffusers.models.activations import get_activation +from diffusers.models.attention_processor import Attention, AttnAddedKVProcessor, AttnAddedKVProcessor2_0 +from diffusers.models.normalization import AdaGroupNorm +from diffusers.models.resnet import ( + Downsample2D, + FirDownsample2D, + FirUpsample2D, + KDownsample2D, + KUpsample2D, + ResnetBlock2D, + ResnetBlockCondNorm2D, + Upsample2D, +) +from diffusers.models.transformers.dual_transformer_2d import DualTransformer2DModel +from diffusers.models.transformers.transformer_2d import Transformer2DModel + + +logger = logging.get_logger(__name__) # pylint: disable=invalid-name + + +def get_down_block( + down_block_type: str, + num_layers: int, + in_channels: int, + out_channels: int, + temb_channels: int, + add_downsample: bool, + resnet_eps: float, + resnet_act_fn: str, + transformer_layers_per_block: int = 1, + num_attention_heads: Optional[int] = None, + resnet_groups: Optional[int] = None, + cross_attention_dim: Optional[int] = None, + downsample_padding: Optional[int] = None, + dual_cross_attention: bool = False, + use_linear_projection: bool = False, + only_cross_attention: bool = False, + upcast_attention: bool = False, + resnet_time_scale_shift: str = "default", + attention_type: str = "default", + resnet_skip_time_act: bool = False, + resnet_out_scale_factor: float = 1.0, + cross_attention_norm: Optional[str] = None, + attention_head_dim: Optional[int] = None, + downsample_type: Optional[str] = None, + dropout: float = 0.0, +): + # If attn head dim is not defined, we default it to the number of heads + if attention_head_dim is None: + logger.warning( + f"It is recommended to provide `attention_head_dim` when calling `get_down_block`. Defaulting `attention_head_dim` to {num_attention_heads}." + ) + attention_head_dim = num_attention_heads + + down_block_type = down_block_type[7:] if down_block_type.startswith("UNetRes") else down_block_type + if down_block_type == "DownBlock2D": + return DownBlock2D( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + dropout=dropout, + add_downsample=add_downsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + resnet_groups=resnet_groups, + downsample_padding=downsample_padding, + resnet_time_scale_shift=resnet_time_scale_shift, + ) + elif down_block_type == "ResnetDownsampleBlock2D": + return ResnetDownsampleBlock2D( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + dropout=dropout, + add_downsample=add_downsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + resnet_groups=resnet_groups, + resnet_time_scale_shift=resnet_time_scale_shift, + skip_time_act=resnet_skip_time_act, + output_scale_factor=resnet_out_scale_factor, + ) + elif down_block_type == "AttnDownBlock2D": + if add_downsample is False: + downsample_type = None + else: + downsample_type = downsample_type or "conv" # default to 'conv' + return AttnDownBlock2D( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + dropout=dropout, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + resnet_groups=resnet_groups, + downsample_padding=downsample_padding, + attention_head_dim=attention_head_dim, + resnet_time_scale_shift=resnet_time_scale_shift, + downsample_type=downsample_type, + ) + elif down_block_type == "CrossAttnDownBlock2D": + if cross_attention_dim is None: + raise ValueError("cross_attention_dim must be specified for CrossAttnDownBlock2D") + return CrossAttnDownBlock2D( + num_layers=num_layers, + transformer_layers_per_block=transformer_layers_per_block, + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + dropout=dropout, + add_downsample=add_downsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + resnet_groups=resnet_groups, + downsample_padding=downsample_padding, + cross_attention_dim=cross_attention_dim, + num_attention_heads=num_attention_heads, + dual_cross_attention=dual_cross_attention, + use_linear_projection=use_linear_projection, + only_cross_attention=only_cross_attention, + upcast_attention=upcast_attention, + resnet_time_scale_shift=resnet_time_scale_shift, + attention_type=attention_type, + ) + elif down_block_type == "SimpleCrossAttnDownBlock2D": + if cross_attention_dim is None: + raise ValueError("cross_attention_dim must be specified for SimpleCrossAttnDownBlock2D") + return SimpleCrossAttnDownBlock2D( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + dropout=dropout, + add_downsample=add_downsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + resnet_groups=resnet_groups, + cross_attention_dim=cross_attention_dim, + attention_head_dim=attention_head_dim, + resnet_time_scale_shift=resnet_time_scale_shift, + skip_time_act=resnet_skip_time_act, + output_scale_factor=resnet_out_scale_factor, + only_cross_attention=only_cross_attention, + cross_attention_norm=cross_attention_norm, + ) + elif down_block_type == "SkipDownBlock2D": + return SkipDownBlock2D( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + dropout=dropout, + add_downsample=add_downsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + downsample_padding=downsample_padding, + resnet_time_scale_shift=resnet_time_scale_shift, + ) + elif down_block_type == "AttnSkipDownBlock2D": + return AttnSkipDownBlock2D( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + dropout=dropout, + add_downsample=add_downsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + attention_head_dim=attention_head_dim, + resnet_time_scale_shift=resnet_time_scale_shift, + ) + elif down_block_type == "DownEncoderBlock2D": + return DownEncoderBlock2D( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + dropout=dropout, + add_downsample=add_downsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + resnet_groups=resnet_groups, + downsample_padding=downsample_padding, + resnet_time_scale_shift=resnet_time_scale_shift, + ) + elif down_block_type == "AttnDownEncoderBlock2D": + return AttnDownEncoderBlock2D( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + dropout=dropout, + add_downsample=add_downsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + resnet_groups=resnet_groups, + downsample_padding=downsample_padding, + attention_head_dim=attention_head_dim, + resnet_time_scale_shift=resnet_time_scale_shift, + ) + elif down_block_type == "KDownBlock2D": + return KDownBlock2D( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + dropout=dropout, + add_downsample=add_downsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + ) + elif down_block_type == "KCrossAttnDownBlock2D": + return KCrossAttnDownBlock2D( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + dropout=dropout, + add_downsample=add_downsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + cross_attention_dim=cross_attention_dim, + attention_head_dim=attention_head_dim, + add_self_attention=True if not add_downsample else False, + ) + raise ValueError(f"{down_block_type} does not exist.") + + +def get_mid_block( + mid_block_type: str, + temb_channels: int, + in_channels: int, + resnet_eps: float, + resnet_act_fn: str, + resnet_groups: int, + output_scale_factor: float = 1.0, + transformer_layers_per_block: int = 1, + num_attention_heads: Optional[int] = None, + cross_attention_dim: Optional[int] = None, + dual_cross_attention: bool = False, + use_linear_projection: bool = False, + mid_block_only_cross_attention: bool = False, + upcast_attention: bool = False, + resnet_time_scale_shift: str = "default", + attention_type: str = "default", + resnet_skip_time_act: bool = False, + cross_attention_norm: Optional[str] = None, + attention_head_dim: Optional[int] = 1, + dropout: float = 0.0, +): + if mid_block_type == "UNetMidBlock2DCrossAttn": + return UNetMidBlock2DCrossAttn( + transformer_layers_per_block=transformer_layers_per_block, + in_channels=in_channels, + temb_channels=temb_channels, + dropout=dropout, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + output_scale_factor=output_scale_factor, + resnet_time_scale_shift=resnet_time_scale_shift, + cross_attention_dim=cross_attention_dim, + num_attention_heads=num_attention_heads, + resnet_groups=resnet_groups, + dual_cross_attention=dual_cross_attention, + use_linear_projection=use_linear_projection, + upcast_attention=upcast_attention, + attention_type=attention_type, + ) + elif mid_block_type == "UNetMidBlock2DSimpleCrossAttn": + return UNetMidBlock2DSimpleCrossAttn( + in_channels=in_channels, + temb_channels=temb_channels, + dropout=dropout, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + output_scale_factor=output_scale_factor, + cross_attention_dim=cross_attention_dim, + attention_head_dim=attention_head_dim, + resnet_groups=resnet_groups, + resnet_time_scale_shift=resnet_time_scale_shift, + skip_time_act=resnet_skip_time_act, + only_cross_attention=mid_block_only_cross_attention, + cross_attention_norm=cross_attention_norm, + ) + elif mid_block_type == "UNetMidBlock2D": + return UNetMidBlock2D( + in_channels=in_channels, + temb_channels=temb_channels, + dropout=dropout, + num_layers=0, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + output_scale_factor=output_scale_factor, + resnet_groups=resnet_groups, + resnet_time_scale_shift=resnet_time_scale_shift, + add_attention=False, + ) + elif mid_block_type == "MidBlock2D": + return MidBlock2D( + in_channels=in_channels, + temb_channels=temb_channels, + dropout=dropout, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + output_scale_factor=output_scale_factor, + resnet_time_scale_shift=resnet_time_scale_shift, + resnet_groups=resnet_groups, + use_linear_projection=use_linear_projection, + ) + elif mid_block_type is None: + return None + else: + raise ValueError(f"unknown mid_block_type : {mid_block_type}") + + +def get_up_block( + up_block_type: str, + num_layers: int, + in_channels: int, + out_channels: int, + prev_output_channel: int, + temb_channels: int, + add_upsample: bool, + resnet_eps: float, + resnet_act_fn: str, + resolution_idx: Optional[int] = None, + transformer_layers_per_block: int = 1, + num_attention_heads: Optional[int] = None, + resnet_groups: Optional[int] = None, + cross_attention_dim: Optional[int] = None, + dual_cross_attention: bool = False, + use_linear_projection: bool = False, + only_cross_attention: bool = False, + upcast_attention: bool = False, + resnet_time_scale_shift: str = "default", + attention_type: str = "default", + resnet_skip_time_act: bool = False, + resnet_out_scale_factor: float = 1.0, + cross_attention_norm: Optional[str] = None, + attention_head_dim: Optional[int] = None, + upsample_type: Optional[str] = None, + dropout: float = 0.0, +) -> nn.Module: + # If attn head dim is not defined, we default it to the number of heads + if attention_head_dim is None: + logger.warning( + f"It is recommended to provide `attention_head_dim` when calling `get_up_block`. Defaulting `attention_head_dim` to {num_attention_heads}." + ) + attention_head_dim = num_attention_heads + + up_block_type = up_block_type[7:] if up_block_type.startswith("UNetRes") else up_block_type + if up_block_type == "UpBlock2D": + return UpBlock2D( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + prev_output_channel=prev_output_channel, + temb_channels=temb_channels, + resolution_idx=resolution_idx, + dropout=dropout, + add_upsample=add_upsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + resnet_groups=resnet_groups, + resnet_time_scale_shift=resnet_time_scale_shift, + ) + elif up_block_type == "ResnetUpsampleBlock2D": + return ResnetUpsampleBlock2D( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + prev_output_channel=prev_output_channel, + temb_channels=temb_channels, + resolution_idx=resolution_idx, + dropout=dropout, + add_upsample=add_upsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + resnet_groups=resnet_groups, + resnet_time_scale_shift=resnet_time_scale_shift, + skip_time_act=resnet_skip_time_act, + output_scale_factor=resnet_out_scale_factor, + ) + elif up_block_type == "CrossAttnUpBlock2D": + if cross_attention_dim is None: + raise ValueError("cross_attention_dim must be specified for CrossAttnUpBlock2D") + return CrossAttnUpBlock2D( + num_layers=num_layers, + transformer_layers_per_block=transformer_layers_per_block, + in_channels=in_channels, + out_channels=out_channels, + prev_output_channel=prev_output_channel, + temb_channels=temb_channels, + resolution_idx=resolution_idx, + dropout=dropout, + add_upsample=add_upsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + resnet_groups=resnet_groups, + cross_attention_dim=cross_attention_dim, + num_attention_heads=num_attention_heads, + dual_cross_attention=dual_cross_attention, + use_linear_projection=use_linear_projection, + only_cross_attention=only_cross_attention, + upcast_attention=upcast_attention, + resnet_time_scale_shift=resnet_time_scale_shift, + attention_type=attention_type, + ) + elif up_block_type == "SimpleCrossAttnUpBlock2D": + if cross_attention_dim is None: + raise ValueError("cross_attention_dim must be specified for SimpleCrossAttnUpBlock2D") + return SimpleCrossAttnUpBlock2D( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + prev_output_channel=prev_output_channel, + temb_channels=temb_channels, + resolution_idx=resolution_idx, + dropout=dropout, + add_upsample=add_upsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + resnet_groups=resnet_groups, + cross_attention_dim=cross_attention_dim, + attention_head_dim=attention_head_dim, + resnet_time_scale_shift=resnet_time_scale_shift, + skip_time_act=resnet_skip_time_act, + output_scale_factor=resnet_out_scale_factor, + only_cross_attention=only_cross_attention, + cross_attention_norm=cross_attention_norm, + ) + elif up_block_type == "AttnUpBlock2D": + if add_upsample is False: + upsample_type = None + else: + upsample_type = upsample_type or "conv" # default to 'conv' + + return AttnUpBlock2D( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + prev_output_channel=prev_output_channel, + temb_channels=temb_channels, + resolution_idx=resolution_idx, + dropout=dropout, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + resnet_groups=resnet_groups, + attention_head_dim=attention_head_dim, + resnet_time_scale_shift=resnet_time_scale_shift, + upsample_type=upsample_type, + ) + elif up_block_type == "SkipUpBlock2D": + return SkipUpBlock2D( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + prev_output_channel=prev_output_channel, + temb_channels=temb_channels, + resolution_idx=resolution_idx, + dropout=dropout, + add_upsample=add_upsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + resnet_time_scale_shift=resnet_time_scale_shift, + ) + elif up_block_type == "AttnSkipUpBlock2D": + return AttnSkipUpBlock2D( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + prev_output_channel=prev_output_channel, + temb_channels=temb_channels, + resolution_idx=resolution_idx, + dropout=dropout, + add_upsample=add_upsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + attention_head_dim=attention_head_dim, + resnet_time_scale_shift=resnet_time_scale_shift, + ) + elif up_block_type == "UpDecoderBlock2D": + return UpDecoderBlock2D( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + resolution_idx=resolution_idx, + dropout=dropout, + add_upsample=add_upsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + resnet_groups=resnet_groups, + resnet_time_scale_shift=resnet_time_scale_shift, + temb_channels=temb_channels, + ) + elif up_block_type == "AttnUpDecoderBlock2D": + return AttnUpDecoderBlock2D( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + resolution_idx=resolution_idx, + dropout=dropout, + add_upsample=add_upsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + resnet_groups=resnet_groups, + attention_head_dim=attention_head_dim, + resnet_time_scale_shift=resnet_time_scale_shift, + temb_channels=temb_channels, + ) + elif up_block_type == "KUpBlock2D": + return KUpBlock2D( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + resolution_idx=resolution_idx, + dropout=dropout, + add_upsample=add_upsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + ) + elif up_block_type == "KCrossAttnUpBlock2D": + return KCrossAttnUpBlock2D( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + resolution_idx=resolution_idx, + dropout=dropout, + add_upsample=add_upsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + cross_attention_dim=cross_attention_dim, + attention_head_dim=attention_head_dim, + ) + + raise ValueError(f"{up_block_type} does not exist.") + + +class AutoencoderTinyBlock(nn.Module): + """ + Tiny Autoencoder block used in [`AutoencoderTiny`]. It is a mini residual module consisting of plain conv + ReLU + blocks. + + Args: + in_channels (`int`): The number of input channels. + out_channels (`int`): The number of output channels. + act_fn (`str`): + ` The activation function to use. Supported values are `"swish"`, `"mish"`, `"gelu"`, and `"relu"`. + + Returns: + `torch.FloatTensor`: A tensor with the same shape as the input tensor, but with the number of channels equal to + `out_channels`. + """ + + def __init__(self, in_channels: int, out_channels: int, act_fn: str): + super().__init__() + act_fn = get_activation(act_fn) + self.conv = nn.Sequential( + nn.Conv2d(in_channels, out_channels, kernel_size=3, padding=1), + act_fn, + nn.Conv2d(out_channels, out_channels, kernel_size=3, padding=1), + act_fn, + nn.Conv2d(out_channels, out_channels, kernel_size=3, padding=1), + ) + self.skip = ( + nn.Conv2d(in_channels, out_channels, kernel_size=1, bias=False) + if in_channels != out_channels + else nn.Identity() + ) + self.fuse = nn.ReLU() + + def forward(self, x: torch.FloatTensor) -> torch.FloatTensor: + return self.fuse(self.conv(x) + self.skip(x)) + + +class UNetMidBlock2D(nn.Module): + """ + A 2D UNet mid-block [`UNetMidBlock2D`] with multiple residual blocks and optional attention blocks. + + Args: + in_channels (`int`): The number of input channels. + temb_channels (`int`): The number of temporal embedding channels. + dropout (`float`, *optional*, defaults to 0.0): The dropout rate. + num_layers (`int`, *optional*, defaults to 1): The number of residual blocks. + resnet_eps (`float`, *optional*, 1e-6 ): The epsilon value for the resnet blocks. + resnet_time_scale_shift (`str`, *optional*, defaults to `default`): + The type of normalization to apply to the time embeddings. This can help to improve the performance of the + model on tasks with long-range temporal dependencies. + resnet_act_fn (`str`, *optional*, defaults to `swish`): The activation function for the resnet blocks. + resnet_groups (`int`, *optional*, defaults to 32): + The number of groups to use in the group normalization layers of the resnet blocks. + attn_groups (`Optional[int]`, *optional*, defaults to None): The number of groups for the attention blocks. + resnet_pre_norm (`bool`, *optional*, defaults to `True`): + Whether to use pre-normalization for the resnet blocks. + add_attention (`bool`, *optional*, defaults to `True`): Whether to add attention blocks. + attention_head_dim (`int`, *optional*, defaults to 1): + Dimension of a single attention head. The number of attention heads is determined based on this value and + the number of input channels. + output_scale_factor (`float`, *optional*, defaults to 1.0): The output scale factor. + + Returns: + `torch.FloatTensor`: The output of the last residual block, which is a tensor of shape `(batch_size, + in_channels, height, width)`. + + """ + + def __init__( + self, + in_channels: int, + temb_channels: int, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", # default, spatial + resnet_act_fn: str = "swish", + resnet_groups: int = 32, + attn_groups: Optional[int] = None, + resnet_pre_norm: bool = True, + add_attention: bool = True, + attention_head_dim: int = 1, + output_scale_factor: float = 1.0, + ): + super().__init__() + resnet_groups = resnet_groups if resnet_groups is not None else min(in_channels // 4, 32) + self.add_attention = add_attention + + if attn_groups is None: + attn_groups = resnet_groups if resnet_time_scale_shift == "default" else None + + # there is always at least one resnet + if resnet_time_scale_shift == "spatial": + resnets = [ + ResnetBlockCondNorm2D( + in_channels=in_channels, + out_channels=in_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm="spatial", + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + ) + ] + else: + resnets = [ + ResnetBlock2D( + in_channels=in_channels, + out_channels=in_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ] + attentions = [] + + if attention_head_dim is None: + logger.warning( + f"It is not recommend to pass `attention_head_dim=None`. Defaulting `attention_head_dim` to `in_channels`: {in_channels}." + ) + attention_head_dim = in_channels + + for _ in range(num_layers): + if self.add_attention: + attentions.append( + Attention( + in_channels, + heads=in_channels // attention_head_dim, + dim_head=attention_head_dim, + rescale_output_factor=output_scale_factor, + eps=resnet_eps, + norm_num_groups=attn_groups, + spatial_norm_dim=temb_channels if resnet_time_scale_shift == "spatial" else None, + residual_connection=True, + bias=True, + upcast_softmax=True, + _from_deprecated_attn_block=True, + ) + ) + else: + attentions.append(None) + + if resnet_time_scale_shift == "spatial": + resnets.append( + ResnetBlockCondNorm2D( + in_channels=in_channels, + out_channels=in_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm="spatial", + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + ) + ) + else: + resnets.append( + ResnetBlock2D( + in_channels=in_channels, + out_channels=in_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ) + + self.attentions = nn.ModuleList(attentions) + self.resnets = nn.ModuleList(resnets) + + def forward(self, hidden_states: torch.FloatTensor, temb: Optional[torch.FloatTensor] = None) -> torch.FloatTensor: + hidden_states = self.resnets[0](hidden_states, temb) + for attn, resnet in zip(self.attentions, self.resnets[1:]): + if attn is not None: + hidden_states = attn(hidden_states, temb=temb) + hidden_states = resnet(hidden_states, temb) + + return hidden_states + + +class UNetMidBlock2DCrossAttn(nn.Module): + def __init__( + self, + in_channels: int, + temb_channels: int, + dropout: float = 0.0, + num_layers: int = 1, + transformer_layers_per_block: Union[int, Tuple[int]] = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + num_attention_heads: int = 1, + output_scale_factor: float = 1.0, + cross_attention_dim: int = 1280, + dual_cross_attention: bool = False, + use_linear_projection: bool = False, + upcast_attention: bool = False, + attention_type: str = "default", + ): + super().__init__() + + self.has_cross_attention = True + self.num_attention_heads = num_attention_heads + resnet_groups = resnet_groups if resnet_groups is not None else min(in_channels // 4, 32) + + # support for variable transformer layers per block + if isinstance(transformer_layers_per_block, int): + transformer_layers_per_block = [transformer_layers_per_block] * num_layers + + # there is always at least one resnet + resnets = [ + ResnetBlock2D( + in_channels=in_channels, + out_channels=in_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ] + attentions = [] + + for i in range(num_layers): + if not dual_cross_attention: + attentions.append( + Transformer2DModel( + num_attention_heads, + in_channels // num_attention_heads, + in_channels=in_channels, + num_layers=transformer_layers_per_block[i], + cross_attention_dim=cross_attention_dim, + norm_num_groups=resnet_groups, + use_linear_projection=use_linear_projection, + upcast_attention=upcast_attention, + attention_type=attention_type, + ) + ) + else: + attentions.append( + DualTransformer2DModel( + num_attention_heads, + in_channels // num_attention_heads, + in_channels=in_channels, + num_layers=1, + cross_attention_dim=cross_attention_dim, + norm_num_groups=resnet_groups, + ) + ) + resnets.append( + ResnetBlock2D( + in_channels=in_channels, + out_channels=in_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ) + + self.attentions = nn.ModuleList(attentions) + self.resnets = nn.ModuleList(resnets) + + self.gradient_checkpointing = False + + def forward( + self, + hidden_states: torch.FloatTensor, + temb: Optional[torch.FloatTensor] = None, + encoder_hidden_states: Optional[torch.FloatTensor] = None, + attention_mask: Optional[torch.FloatTensor] = None, + cross_attention_kwargs: Optional[Dict[str, Any]] = None, + encoder_attention_mask: Optional[torch.FloatTensor] = None, + ) -> torch.FloatTensor: + if cross_attention_kwargs is not None: + if cross_attention_kwargs.get("scale", None) is not None: + logger.warning("Passing `scale` to `cross_attention_kwargs` is deprecated. `scale` will be ignored.") + + hidden_states = self.resnets[0](hidden_states, temb) + for attn, resnet in zip(self.attentions, self.resnets[1:]): + if self.training and self.gradient_checkpointing: + + def create_custom_forward(module, return_dict=None): + def custom_forward(*inputs): + if return_dict is not None: + return module(*inputs, return_dict=return_dict) + else: + return module(*inputs) + + return custom_forward + + ckpt_kwargs: Dict[str, Any] = {"use_reentrant": False} if is_torch_version(">=", "1.11.0") else {} + hidden_states = attn( + hidden_states, + encoder_hidden_states=encoder_hidden_states, + cross_attention_kwargs=cross_attention_kwargs, + attention_mask=attention_mask, + encoder_attention_mask=encoder_attention_mask, + return_dict=False, + )[0] + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(resnet), + hidden_states, + temb, + **ckpt_kwargs, + ) + else: + hidden_states = attn( + hidden_states, + encoder_hidden_states=encoder_hidden_states, + cross_attention_kwargs=cross_attention_kwargs, + attention_mask=attention_mask, + encoder_attention_mask=encoder_attention_mask, + return_dict=False, + )[0] + hidden_states = resnet(hidden_states, temb) + + return hidden_states + + +class UNetMidBlock2DSimpleCrossAttn(nn.Module): + def __init__( + self, + in_channels: int, + temb_channels: int, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + attention_head_dim: int = 1, + output_scale_factor: float = 1.0, + cross_attention_dim: int = 1280, + skip_time_act: bool = False, + only_cross_attention: bool = False, + cross_attention_norm: Optional[str] = None, + ): + super().__init__() + + self.has_cross_attention = True + + self.attention_head_dim = attention_head_dim + resnet_groups = resnet_groups if resnet_groups is not None else min(in_channels // 4, 32) + + self.num_heads = in_channels // self.attention_head_dim + + # there is always at least one resnet + resnets = [ + ResnetBlock2D( + in_channels=in_channels, + out_channels=in_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + skip_time_act=skip_time_act, + ) + ] + attentions = [] + + for _ in range(num_layers): + processor = ( + AttnAddedKVProcessor2_0() if hasattr(F, "scaled_dot_product_attention") else AttnAddedKVProcessor() + ) + + attentions.append( + Attention( + query_dim=in_channels, + cross_attention_dim=in_channels, + heads=self.num_heads, + dim_head=self.attention_head_dim, + added_kv_proj_dim=cross_attention_dim, + norm_num_groups=resnet_groups, + bias=True, + upcast_softmax=True, + only_cross_attention=only_cross_attention, + cross_attention_norm=cross_attention_norm, + processor=processor, + ) + ) + resnets.append( + ResnetBlock2D( + in_channels=in_channels, + out_channels=in_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + skip_time_act=skip_time_act, + ) + ) + + self.attentions = nn.ModuleList(attentions) + self.resnets = nn.ModuleList(resnets) + + def forward( + self, + hidden_states: torch.FloatTensor, + temb: Optional[torch.FloatTensor] = None, + encoder_hidden_states: Optional[torch.FloatTensor] = None, + attention_mask: Optional[torch.FloatTensor] = None, + cross_attention_kwargs: Optional[Dict[str, Any]] = None, + encoder_attention_mask: Optional[torch.FloatTensor] = None, + ) -> torch.FloatTensor: + cross_attention_kwargs = cross_attention_kwargs if cross_attention_kwargs is not None else {} + if cross_attention_kwargs.get("scale", None) is not None: + logger.warning("Passing `scale` to `cross_attention_kwargs` is deprecated. `scale` will be ignored.") + + if attention_mask is None: + # if encoder_hidden_states is defined: we are doing cross-attn, so we should use cross-attn mask. + mask = None if encoder_hidden_states is None else encoder_attention_mask + else: + # when attention_mask is defined: we don't even check for encoder_attention_mask. + # this is to maintain compatibility with UnCLIP, which uses 'attention_mask' param for cross-attn masks. + # TODO: UnCLIP should express cross-attn mask via encoder_attention_mask param instead of via attention_mask. + # then we can simplify this whole if/else block to: + # mask = attention_mask if encoder_hidden_states is None else encoder_attention_mask + mask = attention_mask + + hidden_states = self.resnets[0](hidden_states, temb) + for attn, resnet in zip(self.attentions, self.resnets[1:]): + # attn + hidden_states = attn( + hidden_states, + encoder_hidden_states=encoder_hidden_states, + attention_mask=mask, + **cross_attention_kwargs, + ) + + # resnet + hidden_states = resnet(hidden_states, temb) + + return hidden_states + + +class MidBlock2D(nn.Module): + def __init__( + self, + in_channels: int, + temb_channels: int, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + output_scale_factor: float = 1.0, + use_linear_projection: bool = False, + ): + super().__init__() + + self.has_cross_attention = False + resnet_groups = resnet_groups if resnet_groups is not None else min(in_channels // 4, 32) + + # there is always at least one resnet + resnets = [ + ResnetBlock2D( + in_channels=in_channels, + out_channels=in_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ] + + for i in range(num_layers): + resnets.append( + ResnetBlock2D( + in_channels=in_channels, + out_channels=in_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ) + + self.resnets = nn.ModuleList(resnets) + + self.gradient_checkpointing = False + + def forward( + self, + hidden_states: torch.FloatTensor, + temb: Optional[torch.FloatTensor] = None, + ) -> torch.FloatTensor: + lora_scale = 1.0 + hidden_states = self.resnets[0](hidden_states, temb, scale=lora_scale) + for resnet in self.resnets[1:]: + if self.training and self.gradient_checkpointing: + + def create_custom_forward(module, return_dict=None): + def custom_forward(*inputs): + if return_dict is not None: + return module(*inputs, return_dict=return_dict) + else: + return module(*inputs) + + return custom_forward + + ckpt_kwargs: Dict[str, Any] = {"use_reentrant": False} if is_torch_version(">=", "1.11.0") else {} + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(resnet), + hidden_states, + temb, + **ckpt_kwargs, + ) + else: + hidden_states = resnet(hidden_states, temb, scale=lora_scale) + + return hidden_states + + +class AttnDownBlock2D(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + temb_channels: int, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + attention_head_dim: int = 1, + output_scale_factor: float = 1.0, + downsample_padding: int = 1, + downsample_type: str = "conv", + ): + super().__init__() + resnets = [] + attentions = [] + self.downsample_type = downsample_type + + if attention_head_dim is None: + logger.warning( + f"It is not recommend to pass `attention_head_dim=None`. Defaulting `attention_head_dim` to `in_channels`: {out_channels}." + ) + attention_head_dim = out_channels + + for i in range(num_layers): + in_channels = in_channels if i == 0 else out_channels + resnets.append( + ResnetBlock2D( + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ) + attentions.append( + Attention( + out_channels, + heads=out_channels // attention_head_dim, + dim_head=attention_head_dim, + rescale_output_factor=output_scale_factor, + eps=resnet_eps, + norm_num_groups=resnet_groups, + residual_connection=True, + bias=True, + upcast_softmax=True, + _from_deprecated_attn_block=True, + ) + ) + + self.attentions = nn.ModuleList(attentions) + self.resnets = nn.ModuleList(resnets) + + if downsample_type == "conv": + self.downsamplers = nn.ModuleList( + [ + Downsample2D( + out_channels, use_conv=True, out_channels=out_channels, padding=downsample_padding, name="op" + ) + ] + ) + elif downsample_type == "resnet": + self.downsamplers = nn.ModuleList( + [ + ResnetBlock2D( + in_channels=out_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + down=True, + ) + ] + ) + else: + self.downsamplers = None + + def forward( + self, + hidden_states: torch.FloatTensor, + temb: Optional[torch.FloatTensor] = None, + upsample_size: Optional[int] = None, + cross_attention_kwargs: Optional[Dict[str, Any]] = None, + ) -> Tuple[torch.FloatTensor, Tuple[torch.FloatTensor, ...]]: + cross_attention_kwargs = cross_attention_kwargs if cross_attention_kwargs is not None else {} + if cross_attention_kwargs.get("scale", None) is not None: + logger.warning("Passing `scale` to `cross_attention_kwargs` is deprecated. `scale` will be ignored.") + + output_states = () + + for resnet, attn in zip(self.resnets, self.attentions): + hidden_states = resnet(hidden_states, temb) + hidden_states = attn(hidden_states, **cross_attention_kwargs) + output_states = output_states + (hidden_states,) + + if self.downsamplers is not None: + for downsampler in self.downsamplers: + if self.downsample_type == "resnet": + hidden_states = downsampler(hidden_states, temb=temb) + else: + hidden_states = downsampler(hidden_states) + + output_states += (hidden_states,) + + return hidden_states, output_states + + +class CrossAttnDownBlock2D(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + temb_channels: int, + dropout: float = 0.0, + num_layers: int = 1, + transformer_layers_per_block: Union[int, Tuple[int]] = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + num_attention_heads: int = 1, + cross_attention_dim: int = 1280, + output_scale_factor: float = 1.0, + downsample_padding: int = 1, + add_downsample: bool = True, + dual_cross_attention: bool = False, + use_linear_projection: bool = False, + only_cross_attention: bool = False, + upcast_attention: bool = False, + attention_type: str = "default", + ): + super().__init__() + resnets = [] + attentions = [] + + self.has_cross_attention = True + self.num_attention_heads = num_attention_heads + if isinstance(transformer_layers_per_block, int): + transformer_layers_per_block = [transformer_layers_per_block] * num_layers + + for i in range(num_layers): + in_channels = in_channels if i == 0 else out_channels + resnets.append( + ResnetBlock2D( + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ) + if not dual_cross_attention: + attentions.append( + Transformer2DModel( + num_attention_heads, + out_channels // num_attention_heads, + in_channels=out_channels, + num_layers=transformer_layers_per_block[i], + cross_attention_dim=cross_attention_dim, + norm_num_groups=resnet_groups, + use_linear_projection=use_linear_projection, + only_cross_attention=only_cross_attention, + upcast_attention=upcast_attention, + attention_type=attention_type, + ) + ) + else: + attentions.append( + DualTransformer2DModel( + num_attention_heads, + out_channels // num_attention_heads, + in_channels=out_channels, + num_layers=1, + cross_attention_dim=cross_attention_dim, + norm_num_groups=resnet_groups, + ) + ) + self.attentions = nn.ModuleList(attentions) + self.resnets = nn.ModuleList(resnets) + + if add_downsample: + self.downsamplers = nn.ModuleList( + [ + Downsample2D( + out_channels, use_conv=True, out_channels=out_channels, padding=downsample_padding, name="op" + ) + ] + ) + else: + self.downsamplers = None + + self.gradient_checkpointing = False + + def forward( + self, + hidden_states: torch.FloatTensor, + temb: Optional[torch.FloatTensor] = None, + encoder_hidden_states: Optional[torch.FloatTensor] = None, + attention_mask: Optional[torch.FloatTensor] = None, + cross_attention_kwargs: Optional[Dict[str, Any]] = None, + encoder_attention_mask: Optional[torch.FloatTensor] = None, + additional_residuals: Optional[torch.FloatTensor] = None, + down_block_add_samples: Optional[torch.FloatTensor] = None, + ) -> Tuple[torch.FloatTensor, Tuple[torch.FloatTensor, ...]]: + if cross_attention_kwargs is not None: + if cross_attention_kwargs.get("scale", None) is not None: + logger.warning("Passing `scale` to `cross_attention_kwargs` is deprecated. `scale` will be ignored.") + + output_states = () + + blocks = list(zip(self.resnets, self.attentions)) + + for i, (resnet, attn) in enumerate(blocks): + if self.training and self.gradient_checkpointing: + + def create_custom_forward(module, return_dict=None): + def custom_forward(*inputs): + if return_dict is not None: + return module(*inputs, return_dict=return_dict) + else: + return module(*inputs) + + return custom_forward + + ckpt_kwargs: Dict[str, Any] = {"use_reentrant": False} if is_torch_version(">=", "1.11.0") else {} + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(resnet), + hidden_states, + temb, + **ckpt_kwargs, + ) + hidden_states = attn( + hidden_states, + encoder_hidden_states=encoder_hidden_states, + cross_attention_kwargs=cross_attention_kwargs, + attention_mask=attention_mask, + encoder_attention_mask=encoder_attention_mask, + return_dict=False, + )[0] + else: + hidden_states = resnet(hidden_states, temb) + hidden_states = attn( + hidden_states, + encoder_hidden_states=encoder_hidden_states, + cross_attention_kwargs=cross_attention_kwargs, + attention_mask=attention_mask, + encoder_attention_mask=encoder_attention_mask, + return_dict=False, + )[0] + + # apply additional residuals to the output of the last pair of resnet and attention blocks + if i == len(blocks) - 1 and additional_residuals is not None: + hidden_states = hidden_states + additional_residuals + + if down_block_add_samples is not None: + hidden_states = hidden_states + down_block_add_samples.pop(0) + + + output_states = output_states + (hidden_states,) + + if self.downsamplers is not None: + for downsampler in self.downsamplers: + hidden_states = downsampler(hidden_states) + + if down_block_add_samples is not None: + hidden_states = hidden_states + down_block_add_samples.pop(0) # todo: add before or after + + output_states = output_states + (hidden_states,) + + return hidden_states, output_states + + +class DownBlock2D(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + temb_channels: int, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + output_scale_factor: float = 1.0, + add_downsample: bool = True, + downsample_padding: int = 1, + ): + super().__init__() + resnets = [] + + for i in range(num_layers): + in_channels = in_channels if i == 0 else out_channels + resnets.append( + ResnetBlock2D( + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ) + + self.resnets = nn.ModuleList(resnets) + + if add_downsample: + self.downsamplers = nn.ModuleList( + [ + Downsample2D( + out_channels, use_conv=True, out_channels=out_channels, padding=downsample_padding, name="op" + ) + ] + ) + else: + self.downsamplers = None + + self.gradient_checkpointing = False + + def forward( + self, hidden_states: torch.FloatTensor, temb: Optional[torch.FloatTensor] = None, + down_block_add_samples: Optional[torch.FloatTensor] = None, *args, **kwargs + ) -> Tuple[torch.FloatTensor, Tuple[torch.FloatTensor, ...]]: + if len(args) > 0 or kwargs.get("scale", None) is not None: + deprecation_message = "The `scale` argument is deprecated and will be ignored. Please remove it, as passing it will raise an error in the future. `scale` should directly be passed while calling the underlying pipeline component i.e., via `cross_attention_kwargs`." + deprecate("scale", "1.0.0", deprecation_message) + + output_states = () + + for resnet in self.resnets: + if self.training and self.gradient_checkpointing: + + def create_custom_forward(module): + def custom_forward(*inputs): + return module(*inputs) + + return custom_forward + + if is_torch_version(">=", "1.11.0"): + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(resnet), hidden_states, temb, use_reentrant=False + ) + else: + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(resnet), hidden_states, temb + ) + else: + hidden_states = resnet(hidden_states, temb) + + if down_block_add_samples is not None: + hidden_states = hidden_states + down_block_add_samples.pop(0) + + output_states = output_states + (hidden_states,) + + if self.downsamplers is not None: + for downsampler in self.downsamplers: + hidden_states = downsampler(hidden_states) + + if down_block_add_samples is not None: + hidden_states = hidden_states + down_block_add_samples.pop(0) # todo: add before or after + + output_states = output_states + (hidden_states,) + + return hidden_states, output_states + + +class DownEncoderBlock2D(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + output_scale_factor: float = 1.0, + add_downsample: bool = True, + downsample_padding: int = 1, + ): + super().__init__() + resnets = [] + + for i in range(num_layers): + in_channels = in_channels if i == 0 else out_channels + if resnet_time_scale_shift == "spatial": + resnets.append( + ResnetBlockCondNorm2D( + in_channels=in_channels, + out_channels=out_channels, + temb_channels=None, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm="spatial", + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + ) + ) + else: + resnets.append( + ResnetBlock2D( + in_channels=in_channels, + out_channels=out_channels, + temb_channels=None, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ) + + self.resnets = nn.ModuleList(resnets) + + if add_downsample: + self.downsamplers = nn.ModuleList( + [ + Downsample2D( + out_channels, use_conv=True, out_channels=out_channels, padding=downsample_padding, name="op" + ) + ] + ) + else: + self.downsamplers = None + + def forward(self, hidden_states: torch.FloatTensor, *args, **kwargs) -> torch.FloatTensor: + if len(args) > 0 or kwargs.get("scale", None) is not None: + deprecation_message = "The `scale` argument is deprecated and will be ignored. Please remove it, as passing it will raise an error in the future. `scale` should directly be passed while calling the underlying pipeline component i.e., via `cross_attention_kwargs`." + deprecate("scale", "1.0.0", deprecation_message) + + for resnet in self.resnets: + hidden_states = resnet(hidden_states, temb=None) + + if self.downsamplers is not None: + for downsampler in self.downsamplers: + hidden_states = downsampler(hidden_states) + + return hidden_states + + +class AttnDownEncoderBlock2D(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + attention_head_dim: int = 1, + output_scale_factor: float = 1.0, + add_downsample: bool = True, + downsample_padding: int = 1, + ): + super().__init__() + resnets = [] + attentions = [] + + if attention_head_dim is None: + logger.warning( + f"It is not recommend to pass `attention_head_dim=None`. Defaulting `attention_head_dim` to `in_channels`: {out_channels}." + ) + attention_head_dim = out_channels + + for i in range(num_layers): + in_channels = in_channels if i == 0 else out_channels + if resnet_time_scale_shift == "spatial": + resnets.append( + ResnetBlockCondNorm2D( + in_channels=in_channels, + out_channels=out_channels, + temb_channels=None, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm="spatial", + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + ) + ) + else: + resnets.append( + ResnetBlock2D( + in_channels=in_channels, + out_channels=out_channels, + temb_channels=None, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ) + attentions.append( + Attention( + out_channels, + heads=out_channels // attention_head_dim, + dim_head=attention_head_dim, + rescale_output_factor=output_scale_factor, + eps=resnet_eps, + norm_num_groups=resnet_groups, + residual_connection=True, + bias=True, + upcast_softmax=True, + _from_deprecated_attn_block=True, + ) + ) + + self.attentions = nn.ModuleList(attentions) + self.resnets = nn.ModuleList(resnets) + + if add_downsample: + self.downsamplers = nn.ModuleList( + [ + Downsample2D( + out_channels, use_conv=True, out_channels=out_channels, padding=downsample_padding, name="op" + ) + ] + ) + else: + self.downsamplers = None + + def forward(self, hidden_states: torch.FloatTensor, *args, **kwargs) -> torch.FloatTensor: + if len(args) > 0 or kwargs.get("scale", None) is not None: + deprecation_message = "The `scale` argument is deprecated and will be ignored. Please remove it, as passing it will raise an error in the future. `scale` should directly be passed while calling the underlying pipeline component i.e., via `cross_attention_kwargs`." + deprecate("scale", "1.0.0", deprecation_message) + + for resnet, attn in zip(self.resnets, self.attentions): + hidden_states = resnet(hidden_states, temb=None) + hidden_states = attn(hidden_states) + + if self.downsamplers is not None: + for downsampler in self.downsamplers: + hidden_states = downsampler(hidden_states) + + return hidden_states + + +class AttnSkipDownBlock2D(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + temb_channels: int, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_pre_norm: bool = True, + attention_head_dim: int = 1, + output_scale_factor: float = np.sqrt(2.0), + add_downsample: bool = True, + ): + super().__init__() + self.attentions = nn.ModuleList([]) + self.resnets = nn.ModuleList([]) + + if attention_head_dim is None: + logger.warning( + f"It is not recommend to pass `attention_head_dim=None`. Defaulting `attention_head_dim` to `in_channels`: {out_channels}." + ) + attention_head_dim = out_channels + + for i in range(num_layers): + in_channels = in_channels if i == 0 else out_channels + self.resnets.append( + ResnetBlock2D( + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=min(in_channels // 4, 32), + groups_out=min(out_channels // 4, 32), + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ) + self.attentions.append( + Attention( + out_channels, + heads=out_channels // attention_head_dim, + dim_head=attention_head_dim, + rescale_output_factor=output_scale_factor, + eps=resnet_eps, + norm_num_groups=32, + residual_connection=True, + bias=True, + upcast_softmax=True, + _from_deprecated_attn_block=True, + ) + ) + + if add_downsample: + self.resnet_down = ResnetBlock2D( + in_channels=out_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=min(out_channels // 4, 32), + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + use_in_shortcut=True, + down=True, + kernel="fir", + ) + self.downsamplers = nn.ModuleList([FirDownsample2D(out_channels, out_channels=out_channels)]) + self.skip_conv = nn.Conv2d(3, out_channels, kernel_size=(1, 1), stride=(1, 1)) + else: + self.resnet_down = None + self.downsamplers = None + self.skip_conv = None + + def forward( + self, + hidden_states: torch.FloatTensor, + temb: Optional[torch.FloatTensor] = None, + skip_sample: Optional[torch.FloatTensor] = None, + *args, + **kwargs, + ) -> Tuple[torch.FloatTensor, Tuple[torch.FloatTensor, ...], torch.FloatTensor]: + if len(args) > 0 or kwargs.get("scale", None) is not None: + deprecation_message = "The `scale` argument is deprecated and will be ignored. Please remove it, as passing it will raise an error in the future. `scale` should directly be passed while calling the underlying pipeline component i.e., via `cross_attention_kwargs`." + deprecate("scale", "1.0.0", deprecation_message) + + output_states = () + + for resnet, attn in zip(self.resnets, self.attentions): + hidden_states = resnet(hidden_states, temb) + hidden_states = attn(hidden_states) + output_states += (hidden_states,) + + if self.downsamplers is not None: + hidden_states = self.resnet_down(hidden_states, temb) + for downsampler in self.downsamplers: + skip_sample = downsampler(skip_sample) + + hidden_states = self.skip_conv(skip_sample) + hidden_states + + output_states += (hidden_states,) + + return hidden_states, output_states, skip_sample + + +class SkipDownBlock2D(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + temb_channels: int, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_pre_norm: bool = True, + output_scale_factor: float = np.sqrt(2.0), + add_downsample: bool = True, + downsample_padding: int = 1, + ): + super().__init__() + self.resnets = nn.ModuleList([]) + + for i in range(num_layers): + in_channels = in_channels if i == 0 else out_channels + self.resnets.append( + ResnetBlock2D( + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=min(in_channels // 4, 32), + groups_out=min(out_channels // 4, 32), + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ) + + if add_downsample: + self.resnet_down = ResnetBlock2D( + in_channels=out_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=min(out_channels // 4, 32), + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + use_in_shortcut=True, + down=True, + kernel="fir", + ) + self.downsamplers = nn.ModuleList([FirDownsample2D(out_channels, out_channels=out_channels)]) + self.skip_conv = nn.Conv2d(3, out_channels, kernel_size=(1, 1), stride=(1, 1)) + else: + self.resnet_down = None + self.downsamplers = None + self.skip_conv = None + + def forward( + self, + hidden_states: torch.FloatTensor, + temb: Optional[torch.FloatTensor] = None, + skip_sample: Optional[torch.FloatTensor] = None, + *args, + **kwargs, + ) -> Tuple[torch.FloatTensor, Tuple[torch.FloatTensor, ...], torch.FloatTensor]: + if len(args) > 0 or kwargs.get("scale", None) is not None: + deprecation_message = "The `scale` argument is deprecated and will be ignored. Please remove it, as passing it will raise an error in the future. `scale` should directly be passed while calling the underlying pipeline component i.e., via `cross_attention_kwargs`." + deprecate("scale", "1.0.0", deprecation_message) + + output_states = () + + for resnet in self.resnets: + hidden_states = resnet(hidden_states, temb) + output_states += (hidden_states,) + + if self.downsamplers is not None: + hidden_states = self.resnet_down(hidden_states, temb) + for downsampler in self.downsamplers: + skip_sample = downsampler(skip_sample) + + hidden_states = self.skip_conv(skip_sample) + hidden_states + + output_states += (hidden_states,) + + return hidden_states, output_states, skip_sample + + +class ResnetDownsampleBlock2D(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + temb_channels: int, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + output_scale_factor: float = 1.0, + add_downsample: bool = True, + skip_time_act: bool = False, + ): + super().__init__() + resnets = [] + + for i in range(num_layers): + in_channels = in_channels if i == 0 else out_channels + resnets.append( + ResnetBlock2D( + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + skip_time_act=skip_time_act, + ) + ) + + self.resnets = nn.ModuleList(resnets) + + if add_downsample: + self.downsamplers = nn.ModuleList( + [ + ResnetBlock2D( + in_channels=out_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + skip_time_act=skip_time_act, + down=True, + ) + ] + ) + else: + self.downsamplers = None + + self.gradient_checkpointing = False + + def forward( + self, hidden_states: torch.FloatTensor, temb: Optional[torch.FloatTensor] = None, *args, **kwargs + ) -> Tuple[torch.FloatTensor, Tuple[torch.FloatTensor, ...]]: + if len(args) > 0 or kwargs.get("scale", None) is not None: + deprecation_message = "The `scale` argument is deprecated and will be ignored. Please remove it, as passing it will raise an error in the future. `scale` should directly be passed while calling the underlying pipeline component i.e., via `cross_attention_kwargs`." + deprecate("scale", "1.0.0", deprecation_message) + + output_states = () + + for resnet in self.resnets: + if self.training and self.gradient_checkpointing: + + def create_custom_forward(module): + def custom_forward(*inputs): + return module(*inputs) + + return custom_forward + + if is_torch_version(">=", "1.11.0"): + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(resnet), hidden_states, temb, use_reentrant=False + ) + else: + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(resnet), hidden_states, temb + ) + else: + hidden_states = resnet(hidden_states, temb) + + output_states = output_states + (hidden_states,) + + if self.downsamplers is not None: + for downsampler in self.downsamplers: + hidden_states = downsampler(hidden_states, temb) + + output_states = output_states + (hidden_states,) + + return hidden_states, output_states + + +class SimpleCrossAttnDownBlock2D(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + temb_channels: int, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + attention_head_dim: int = 1, + cross_attention_dim: int = 1280, + output_scale_factor: float = 1.0, + add_downsample: bool = True, + skip_time_act: bool = False, + only_cross_attention: bool = False, + cross_attention_norm: Optional[str] = None, + ): + super().__init__() + + self.has_cross_attention = True + + resnets = [] + attentions = [] + + self.attention_head_dim = attention_head_dim + self.num_heads = out_channels // self.attention_head_dim + + for i in range(num_layers): + in_channels = in_channels if i == 0 else out_channels + resnets.append( + ResnetBlock2D( + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + skip_time_act=skip_time_act, + ) + ) + + processor = ( + AttnAddedKVProcessor2_0() if hasattr(F, "scaled_dot_product_attention") else AttnAddedKVProcessor() + ) + + attentions.append( + Attention( + query_dim=out_channels, + cross_attention_dim=out_channels, + heads=self.num_heads, + dim_head=attention_head_dim, + added_kv_proj_dim=cross_attention_dim, + norm_num_groups=resnet_groups, + bias=True, + upcast_softmax=True, + only_cross_attention=only_cross_attention, + cross_attention_norm=cross_attention_norm, + processor=processor, + ) + ) + self.attentions = nn.ModuleList(attentions) + self.resnets = nn.ModuleList(resnets) + + if add_downsample: + self.downsamplers = nn.ModuleList( + [ + ResnetBlock2D( + in_channels=out_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + skip_time_act=skip_time_act, + down=True, + ) + ] + ) + else: + self.downsamplers = None + + self.gradient_checkpointing = False + + def forward( + self, + hidden_states: torch.FloatTensor, + temb: Optional[torch.FloatTensor] = None, + encoder_hidden_states: Optional[torch.FloatTensor] = None, + attention_mask: Optional[torch.FloatTensor] = None, + cross_attention_kwargs: Optional[Dict[str, Any]] = None, + encoder_attention_mask: Optional[torch.FloatTensor] = None, + ) -> Tuple[torch.FloatTensor, Tuple[torch.FloatTensor, ...]]: + cross_attention_kwargs = cross_attention_kwargs if cross_attention_kwargs is not None else {} + if cross_attention_kwargs.get("scale", None) is not None: + logger.warning("Passing `scale` to `cross_attention_kwargs` is deprecated. `scale` will be ignored.") + + output_states = () + + if attention_mask is None: + # if encoder_hidden_states is defined: we are doing cross-attn, so we should use cross-attn mask. + mask = None if encoder_hidden_states is None else encoder_attention_mask + else: + # when attention_mask is defined: we don't even check for encoder_attention_mask. + # this is to maintain compatibility with UnCLIP, which uses 'attention_mask' param for cross-attn masks. + # TODO: UnCLIP should express cross-attn mask via encoder_attention_mask param instead of via attention_mask. + # then we can simplify this whole if/else block to: + # mask = attention_mask if encoder_hidden_states is None else encoder_attention_mask + mask = attention_mask + + for resnet, attn in zip(self.resnets, self.attentions): + if self.training and self.gradient_checkpointing: + + def create_custom_forward(module, return_dict=None): + def custom_forward(*inputs): + if return_dict is not None: + return module(*inputs, return_dict=return_dict) + else: + return module(*inputs) + + return custom_forward + + hidden_states = torch.utils.checkpoint.checkpoint(create_custom_forward(resnet), hidden_states, temb) + hidden_states = attn( + hidden_states, + encoder_hidden_states=encoder_hidden_states, + attention_mask=mask, + **cross_attention_kwargs, + ) + else: + hidden_states = resnet(hidden_states, temb) + + hidden_states = attn( + hidden_states, + encoder_hidden_states=encoder_hidden_states, + attention_mask=mask, + **cross_attention_kwargs, + ) + + output_states = output_states + (hidden_states,) + + if self.downsamplers is not None: + for downsampler in self.downsamplers: + hidden_states = downsampler(hidden_states, temb) + + output_states = output_states + (hidden_states,) + + return hidden_states, output_states + + +class KDownBlock2D(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + temb_channels: int, + dropout: float = 0.0, + num_layers: int = 4, + resnet_eps: float = 1e-5, + resnet_act_fn: str = "gelu", + resnet_group_size: int = 32, + add_downsample: bool = False, + ): + super().__init__() + resnets = [] + + for i in range(num_layers): + in_channels = in_channels if i == 0 else out_channels + groups = in_channels // resnet_group_size + groups_out = out_channels // resnet_group_size + + resnets.append( + ResnetBlockCondNorm2D( + in_channels=in_channels, + out_channels=out_channels, + dropout=dropout, + temb_channels=temb_channels, + groups=groups, + groups_out=groups_out, + eps=resnet_eps, + non_linearity=resnet_act_fn, + time_embedding_norm="ada_group", + conv_shortcut_bias=False, + ) + ) + + self.resnets = nn.ModuleList(resnets) + + if add_downsample: + # YiYi's comments- might be able to use FirDownsample2D, look into details later + self.downsamplers = nn.ModuleList([KDownsample2D()]) + else: + self.downsamplers = None + + self.gradient_checkpointing = False + + def forward( + self, hidden_states: torch.FloatTensor, temb: Optional[torch.FloatTensor] = None, *args, **kwargs + ) -> Tuple[torch.FloatTensor, Tuple[torch.FloatTensor, ...]]: + if len(args) > 0 or kwargs.get("scale", None) is not None: + deprecation_message = "The `scale` argument is deprecated and will be ignored. Please remove it, as passing it will raise an error in the future. `scale` should directly be passed while calling the underlying pipeline component i.e., via `cross_attention_kwargs`." + deprecate("scale", "1.0.0", deprecation_message) + + output_states = () + + for resnet in self.resnets: + if self.training and self.gradient_checkpointing: + + def create_custom_forward(module): + def custom_forward(*inputs): + return module(*inputs) + + return custom_forward + + if is_torch_version(">=", "1.11.0"): + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(resnet), hidden_states, temb, use_reentrant=False + ) + else: + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(resnet), hidden_states, temb + ) + else: + hidden_states = resnet(hidden_states, temb) + + output_states += (hidden_states,) + + if self.downsamplers is not None: + for downsampler in self.downsamplers: + hidden_states = downsampler(hidden_states) + + return hidden_states, output_states + + +class KCrossAttnDownBlock2D(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + temb_channels: int, + cross_attention_dim: int, + dropout: float = 0.0, + num_layers: int = 4, + resnet_group_size: int = 32, + add_downsample: bool = True, + attention_head_dim: int = 64, + add_self_attention: bool = False, + resnet_eps: float = 1e-5, + resnet_act_fn: str = "gelu", + ): + super().__init__() + resnets = [] + attentions = [] + + self.has_cross_attention = True + + for i in range(num_layers): + in_channels = in_channels if i == 0 else out_channels + groups = in_channels // resnet_group_size + groups_out = out_channels // resnet_group_size + + resnets.append( + ResnetBlockCondNorm2D( + in_channels=in_channels, + out_channels=out_channels, + dropout=dropout, + temb_channels=temb_channels, + groups=groups, + groups_out=groups_out, + eps=resnet_eps, + non_linearity=resnet_act_fn, + time_embedding_norm="ada_group", + conv_shortcut_bias=False, + ) + ) + attentions.append( + KAttentionBlock( + out_channels, + out_channels // attention_head_dim, + attention_head_dim, + cross_attention_dim=cross_attention_dim, + temb_channels=temb_channels, + attention_bias=True, + add_self_attention=add_self_attention, + cross_attention_norm="layer_norm", + group_size=resnet_group_size, + ) + ) + + self.resnets = nn.ModuleList(resnets) + self.attentions = nn.ModuleList(attentions) + + if add_downsample: + self.downsamplers = nn.ModuleList([KDownsample2D()]) + else: + self.downsamplers = None + + self.gradient_checkpointing = False + + def forward( + self, + hidden_states: torch.FloatTensor, + temb: Optional[torch.FloatTensor] = None, + encoder_hidden_states: Optional[torch.FloatTensor] = None, + attention_mask: Optional[torch.FloatTensor] = None, + cross_attention_kwargs: Optional[Dict[str, Any]] = None, + encoder_attention_mask: Optional[torch.FloatTensor] = None, + ) -> Tuple[torch.FloatTensor, Tuple[torch.FloatTensor, ...]]: + cross_attention_kwargs = cross_attention_kwargs if cross_attention_kwargs is not None else {} + if cross_attention_kwargs.get("scale", None) is not None: + logger.warning("Passing `scale` to `cross_attention_kwargs` is deprecated. `scale` will be ignored.") + + output_states = () + + for resnet, attn in zip(self.resnets, self.attentions): + if self.training and self.gradient_checkpointing: + + def create_custom_forward(module, return_dict=None): + def custom_forward(*inputs): + if return_dict is not None: + return module(*inputs, return_dict=return_dict) + else: + return module(*inputs) + + return custom_forward + + ckpt_kwargs: Dict[str, Any] = {"use_reentrant": False} if is_torch_version(">=", "1.11.0") else {} + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(resnet), + hidden_states, + temb, + **ckpt_kwargs, + ) + hidden_states = attn( + hidden_states, + encoder_hidden_states=encoder_hidden_states, + emb=temb, + attention_mask=attention_mask, + cross_attention_kwargs=cross_attention_kwargs, + encoder_attention_mask=encoder_attention_mask, + ) + else: + hidden_states = resnet(hidden_states, temb) + hidden_states = attn( + hidden_states, + encoder_hidden_states=encoder_hidden_states, + emb=temb, + attention_mask=attention_mask, + cross_attention_kwargs=cross_attention_kwargs, + encoder_attention_mask=encoder_attention_mask, + ) + + if self.downsamplers is None: + output_states += (None,) + else: + output_states += (hidden_states,) + + if self.downsamplers is not None: + for downsampler in self.downsamplers: + hidden_states = downsampler(hidden_states) + + return hidden_states, output_states + + +class AttnUpBlock2D(nn.Module): + def __init__( + self, + in_channels: int, + prev_output_channel: int, + out_channels: int, + temb_channels: int, + resolution_idx: int = None, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + attention_head_dim: int = 1, + output_scale_factor: float = 1.0, + upsample_type: str = "conv", + ): + super().__init__() + resnets = [] + attentions = [] + + self.upsample_type = upsample_type + + if attention_head_dim is None: + logger.warning( + f"It is not recommend to pass `attention_head_dim=None`. Defaulting `attention_head_dim` to `in_channels`: {out_channels}." + ) + attention_head_dim = out_channels + + for i in range(num_layers): + res_skip_channels = in_channels if (i == num_layers - 1) else out_channels + resnet_in_channels = prev_output_channel if i == 0 else out_channels + + resnets.append( + ResnetBlock2D( + in_channels=resnet_in_channels + res_skip_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ) + attentions.append( + Attention( + out_channels, + heads=out_channels // attention_head_dim, + dim_head=attention_head_dim, + rescale_output_factor=output_scale_factor, + eps=resnet_eps, + norm_num_groups=resnet_groups, + residual_connection=True, + bias=True, + upcast_softmax=True, + _from_deprecated_attn_block=True, + ) + ) + + self.attentions = nn.ModuleList(attentions) + self.resnets = nn.ModuleList(resnets) + + if upsample_type == "conv": + self.upsamplers = nn.ModuleList([Upsample2D(out_channels, use_conv=True, out_channels=out_channels)]) + elif upsample_type == "resnet": + self.upsamplers = nn.ModuleList( + [ + ResnetBlock2D( + in_channels=out_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + up=True, + ) + ] + ) + else: + self.upsamplers = None + + self.resolution_idx = resolution_idx + + def forward( + self, + hidden_states: torch.FloatTensor, + res_hidden_states_tuple: Tuple[torch.FloatTensor, ...], + temb: Optional[torch.FloatTensor] = None, + upsample_size: Optional[int] = None, + *args, + **kwargs, + ) -> torch.FloatTensor: + if len(args) > 0 or kwargs.get("scale", None) is not None: + deprecation_message = "The `scale` argument is deprecated and will be ignored. Please remove it, as passing it will raise an error in the future. `scale` should directly be passed while calling the underlying pipeline component i.e., via `cross_attention_kwargs`." + deprecate("scale", "1.0.0", deprecation_message) + + for resnet, attn in zip(self.resnets, self.attentions): + # pop res hidden states + res_hidden_states = res_hidden_states_tuple[-1] + res_hidden_states_tuple = res_hidden_states_tuple[:-1] + hidden_states = torch.cat([hidden_states, res_hidden_states], dim=1) + + hidden_states = resnet(hidden_states, temb) + hidden_states = attn(hidden_states) + + if self.upsamplers is not None: + for upsampler in self.upsamplers: + if self.upsample_type == "resnet": + hidden_states = upsampler(hidden_states, temb=temb) + else: + hidden_states = upsampler(hidden_states) + + return hidden_states + + +class CrossAttnUpBlock2D(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + prev_output_channel: int, + temb_channels: int, + resolution_idx: Optional[int] = None, + dropout: float = 0.0, + num_layers: int = 1, + transformer_layers_per_block: Union[int, Tuple[int]] = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + num_attention_heads: int = 1, + cross_attention_dim: int = 1280, + output_scale_factor: float = 1.0, + add_upsample: bool = True, + dual_cross_attention: bool = False, + use_linear_projection: bool = False, + only_cross_attention: bool = False, + upcast_attention: bool = False, + attention_type: str = "default", + ): + super().__init__() + resnets = [] + attentions = [] + + self.has_cross_attention = True + self.num_attention_heads = num_attention_heads + + if isinstance(transformer_layers_per_block, int): + transformer_layers_per_block = [transformer_layers_per_block] * num_layers + + for i in range(num_layers): + res_skip_channels = in_channels if (i == num_layers - 1) else out_channels + resnet_in_channels = prev_output_channel if i == 0 else out_channels + + resnets.append( + ResnetBlock2D( + in_channels=resnet_in_channels + res_skip_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ) + if not dual_cross_attention: + attentions.append( + Transformer2DModel( + num_attention_heads, + out_channels // num_attention_heads, + in_channels=out_channels, + num_layers=transformer_layers_per_block[i], + cross_attention_dim=cross_attention_dim, + norm_num_groups=resnet_groups, + use_linear_projection=use_linear_projection, + only_cross_attention=only_cross_attention, + upcast_attention=upcast_attention, + attention_type=attention_type, + ) + ) + else: + attentions.append( + DualTransformer2DModel( + num_attention_heads, + out_channels // num_attention_heads, + in_channels=out_channels, + num_layers=1, + cross_attention_dim=cross_attention_dim, + norm_num_groups=resnet_groups, + ) + ) + self.attentions = nn.ModuleList(attentions) + self.resnets = nn.ModuleList(resnets) + + if add_upsample: + self.upsamplers = nn.ModuleList([Upsample2D(out_channels, use_conv=True, out_channels=out_channels)]) + else: + self.upsamplers = None + + self.gradient_checkpointing = False + self.resolution_idx = resolution_idx + + def forward( + self, + hidden_states: torch.FloatTensor, + res_hidden_states_tuple: Tuple[torch.FloatTensor, ...], + temb: Optional[torch.FloatTensor] = None, + encoder_hidden_states: Optional[torch.FloatTensor] = None, + cross_attention_kwargs: Optional[Dict[str, Any]] = None, + upsample_size: Optional[int] = None, + attention_mask: Optional[torch.FloatTensor] = None, + encoder_attention_mask: Optional[torch.FloatTensor] = None, + return_res_samples: Optional[bool]=False, + up_block_add_samples: Optional[torch.FloatTensor] = None, + ) -> torch.FloatTensor: + if cross_attention_kwargs is not None: + if cross_attention_kwargs.get("scale", None) is not None: + logger.warning("Passing `scale` to `cross_attention_kwargs` is deprecated. `scale` will be ignored.") + + is_freeu_enabled = ( + getattr(self, "s1", None) + and getattr(self, "s2", None) + and getattr(self, "b1", None) + and getattr(self, "b2", None) + ) + if return_res_samples: + output_states=() + + for resnet, attn in zip(self.resnets, self.attentions): + # pop res hidden states + res_hidden_states = res_hidden_states_tuple[-1] + res_hidden_states_tuple = res_hidden_states_tuple[:-1] + + # FreeU: Only operate on the first two stages + if is_freeu_enabled: + hidden_states, res_hidden_states = apply_freeu( + self.resolution_idx, + hidden_states, + res_hidden_states, + s1=self.s1, + s2=self.s2, + b1=self.b1, + b2=self.b2, + ) + + hidden_states = torch.cat([hidden_states, res_hidden_states], dim=1) + + if self.training and self.gradient_checkpointing: + + def create_custom_forward(module, return_dict=None): + def custom_forward(*inputs): + if return_dict is not None: + return module(*inputs, return_dict=return_dict) + else: + return module(*inputs) + + return custom_forward + + ckpt_kwargs: Dict[str, Any] = {"use_reentrant": False} if is_torch_version(">=", "1.11.0") else {} + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(resnet), + hidden_states, + temb, + **ckpt_kwargs, + ) + hidden_states = attn( + hidden_states, + encoder_hidden_states=encoder_hidden_states, + cross_attention_kwargs=cross_attention_kwargs, + attention_mask=attention_mask, + encoder_attention_mask=encoder_attention_mask, + return_dict=False, + )[0] + else: + hidden_states = resnet(hidden_states, temb) + hidden_states = attn( + hidden_states, + encoder_hidden_states=encoder_hidden_states, + cross_attention_kwargs=cross_attention_kwargs, + attention_mask=attention_mask, + encoder_attention_mask=encoder_attention_mask, + return_dict=False, + )[0] + if return_res_samples: + output_states = output_states + (hidden_states,) + if up_block_add_samples is not None: + hidden_states = hidden_states + up_block_add_samples.pop(0) + + if self.upsamplers is not None: + for upsampler in self.upsamplers: + hidden_states = upsampler(hidden_states, upsample_size) + if return_res_samples: + output_states = output_states + (hidden_states,) + if up_block_add_samples is not None: + hidden_states = hidden_states + up_block_add_samples.pop(0) + + if return_res_samples: + return hidden_states, output_states + else: + return hidden_states + +class UpBlock2D(nn.Module): + def __init__( + self, + in_channels: int, + prev_output_channel: int, + out_channels: int, + temb_channels: int, + resolution_idx: Optional[int] = None, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + output_scale_factor: float = 1.0, + add_upsample: bool = True, + ): + super().__init__() + resnets = [] + + for i in range(num_layers): + res_skip_channels = in_channels if (i == num_layers - 1) else out_channels + resnet_in_channels = prev_output_channel if i == 0 else out_channels + + resnets.append( + ResnetBlock2D( + in_channels=resnet_in_channels + res_skip_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ) + + self.resnets = nn.ModuleList(resnets) + + if add_upsample: + self.upsamplers = nn.ModuleList([Upsample2D(out_channels, use_conv=True, out_channels=out_channels)]) + else: + self.upsamplers = None + + self.gradient_checkpointing = False + self.resolution_idx = resolution_idx + + def forward( + self, + hidden_states: torch.FloatTensor, + res_hidden_states_tuple: Tuple[torch.FloatTensor, ...], + temb: Optional[torch.FloatTensor] = None, + upsample_size: Optional[int] = None, + return_res_samples: Optional[bool]=False, + up_block_add_samples: Optional[torch.FloatTensor] = None, + *args, + **kwargs, + ) -> torch.FloatTensor: + if len(args) > 0 or kwargs.get("scale", None) is not None: + deprecation_message = "The `scale` argument is deprecated and will be ignored. Please remove it, as passing it will raise an error in the future. `scale` should directly be passed while calling the underlying pipeline component i.e., via `cross_attention_kwargs`." + deprecate("scale", "1.0.0", deprecation_message) + + is_freeu_enabled = ( + getattr(self, "s1", None) + and getattr(self, "s2", None) + and getattr(self, "b1", None) + and getattr(self, "b2", None) + ) + if return_res_samples: + output_states = () + + for resnet in self.resnets: + # pop res hidden states + res_hidden_states = res_hidden_states_tuple[-1] + res_hidden_states_tuple = res_hidden_states_tuple[:-1] + + # FreeU: Only operate on the first two stages + if is_freeu_enabled: + hidden_states, res_hidden_states = apply_freeu( + self.resolution_idx, + hidden_states, + res_hidden_states, + s1=self.s1, + s2=self.s2, + b1=self.b1, + b2=self.b2, + ) + + hidden_states = torch.cat([hidden_states, res_hidden_states], dim=1) + + if self.training and self.gradient_checkpointing: + + def create_custom_forward(module): + def custom_forward(*inputs): + return module(*inputs) + + return custom_forward + + if is_torch_version(">=", "1.11.0"): + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(resnet), hidden_states, temb, use_reentrant=False + ) + else: + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(resnet), hidden_states, temb + ) + else: + hidden_states = resnet(hidden_states, temb) + + if return_res_samples: + output_states = output_states + (hidden_states,) + if up_block_add_samples is not None: + hidden_states = hidden_states + up_block_add_samples.pop(0) # todo: add before or after + + if self.upsamplers is not None: + for upsampler in self.upsamplers: + hidden_states = upsampler(hidden_states, upsample_size) + + if return_res_samples: + output_states = output_states + (hidden_states,) + if up_block_add_samples is not None: + hidden_states = hidden_states + up_block_add_samples.pop(0) # todo: add before or after + + if return_res_samples: + return hidden_states, output_states + else: + return hidden_states + + +class UpDecoderBlock2D(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + resolution_idx: Optional[int] = None, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", # default, spatial + resnet_act_fn: str = "swish", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + output_scale_factor: float = 1.0, + add_upsample: bool = True, + temb_channels: Optional[int] = None, + ): + super().__init__() + resnets = [] + + for i in range(num_layers): + input_channels = in_channels if i == 0 else out_channels + + if resnet_time_scale_shift == "spatial": + resnets.append( + ResnetBlockCondNorm2D( + in_channels=input_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm="spatial", + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + ) + ) + else: + resnets.append( + ResnetBlock2D( + in_channels=input_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ) + + self.resnets = nn.ModuleList(resnets) + + if add_upsample: + self.upsamplers = nn.ModuleList([Upsample2D(out_channels, use_conv=True, out_channels=out_channels)]) + else: + self.upsamplers = None + + self.resolution_idx = resolution_idx + + def forward(self, hidden_states: torch.FloatTensor, temb: Optional[torch.FloatTensor] = None) -> torch.FloatTensor: + for resnet in self.resnets: + hidden_states = resnet(hidden_states, temb=temb) + + if self.upsamplers is not None: + for upsampler in self.upsamplers: + hidden_states = upsampler(hidden_states) + + return hidden_states + + +class AttnUpDecoderBlock2D(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + resolution_idx: Optional[int] = None, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + attention_head_dim: int = 1, + output_scale_factor: float = 1.0, + add_upsample: bool = True, + temb_channels: Optional[int] = None, + ): + super().__init__() + resnets = [] + attentions = [] + + if attention_head_dim is None: + logger.warning( + f"It is not recommend to pass `attention_head_dim=None`. Defaulting `attention_head_dim` to `out_channels`: {out_channels}." + ) + attention_head_dim = out_channels + + for i in range(num_layers): + input_channels = in_channels if i == 0 else out_channels + + if resnet_time_scale_shift == "spatial": + resnets.append( + ResnetBlockCondNorm2D( + in_channels=input_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm="spatial", + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + ) + ) + else: + resnets.append( + ResnetBlock2D( + in_channels=input_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ) + + attentions.append( + Attention( + out_channels, + heads=out_channels // attention_head_dim, + dim_head=attention_head_dim, + rescale_output_factor=output_scale_factor, + eps=resnet_eps, + norm_num_groups=resnet_groups if resnet_time_scale_shift != "spatial" else None, + spatial_norm_dim=temb_channels if resnet_time_scale_shift == "spatial" else None, + residual_connection=True, + bias=True, + upcast_softmax=True, + _from_deprecated_attn_block=True, + ) + ) + + self.attentions = nn.ModuleList(attentions) + self.resnets = nn.ModuleList(resnets) + + if add_upsample: + self.upsamplers = nn.ModuleList([Upsample2D(out_channels, use_conv=True, out_channels=out_channels)]) + else: + self.upsamplers = None + + self.resolution_idx = resolution_idx + + def forward(self, hidden_states: torch.FloatTensor, temb: Optional[torch.FloatTensor] = None) -> torch.FloatTensor: + for resnet, attn in zip(self.resnets, self.attentions): + hidden_states = resnet(hidden_states, temb=temb) + hidden_states = attn(hidden_states, temb=temb) + + if self.upsamplers is not None: + for upsampler in self.upsamplers: + hidden_states = upsampler(hidden_states) + + return hidden_states + + +class AttnSkipUpBlock2D(nn.Module): + def __init__( + self, + in_channels: int, + prev_output_channel: int, + out_channels: int, + temb_channels: int, + resolution_idx: Optional[int] = None, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_pre_norm: bool = True, + attention_head_dim: int = 1, + output_scale_factor: float = np.sqrt(2.0), + add_upsample: bool = True, + ): + super().__init__() + self.attentions = nn.ModuleList([]) + self.resnets = nn.ModuleList([]) + + for i in range(num_layers): + res_skip_channels = in_channels if (i == num_layers - 1) else out_channels + resnet_in_channels = prev_output_channel if i == 0 else out_channels + + self.resnets.append( + ResnetBlock2D( + in_channels=resnet_in_channels + res_skip_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=min(resnet_in_channels + res_skip_channels // 4, 32), + groups_out=min(out_channels // 4, 32), + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ) + + if attention_head_dim is None: + logger.warning( + f"It is not recommend to pass `attention_head_dim=None`. Defaulting `attention_head_dim` to `out_channels`: {out_channels}." + ) + attention_head_dim = out_channels + + self.attentions.append( + Attention( + out_channels, + heads=out_channels // attention_head_dim, + dim_head=attention_head_dim, + rescale_output_factor=output_scale_factor, + eps=resnet_eps, + norm_num_groups=32, + residual_connection=True, + bias=True, + upcast_softmax=True, + _from_deprecated_attn_block=True, + ) + ) + + self.upsampler = FirUpsample2D(in_channels, out_channels=out_channels) + if add_upsample: + self.resnet_up = ResnetBlock2D( + in_channels=out_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=min(out_channels // 4, 32), + groups_out=min(out_channels // 4, 32), + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + use_in_shortcut=True, + up=True, + kernel="fir", + ) + self.skip_conv = nn.Conv2d(out_channels, 3, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + self.skip_norm = torch.nn.GroupNorm( + num_groups=min(out_channels // 4, 32), num_channels=out_channels, eps=resnet_eps, affine=True + ) + self.act = nn.SiLU() + else: + self.resnet_up = None + self.skip_conv = None + self.skip_norm = None + self.act = None + + self.resolution_idx = resolution_idx + + def forward( + self, + hidden_states: torch.FloatTensor, + res_hidden_states_tuple: Tuple[torch.FloatTensor, ...], + temb: Optional[torch.FloatTensor] = None, + skip_sample=None, + *args, + **kwargs, + ) -> Tuple[torch.FloatTensor, torch.FloatTensor]: + if len(args) > 0 or kwargs.get("scale", None) is not None: + deprecation_message = "The `scale` argument is deprecated and will be ignored. Please remove it, as passing it will raise an error in the future. `scale` should directly be passed while calling the underlying pipeline component i.e., via `cross_attention_kwargs`." + deprecate("scale", "1.0.0", deprecation_message) + + for resnet in self.resnets: + # pop res hidden states + res_hidden_states = res_hidden_states_tuple[-1] + res_hidden_states_tuple = res_hidden_states_tuple[:-1] + hidden_states = torch.cat([hidden_states, res_hidden_states], dim=1) + + hidden_states = resnet(hidden_states, temb) + + hidden_states = self.attentions[0](hidden_states) + + if skip_sample is not None: + skip_sample = self.upsampler(skip_sample) + else: + skip_sample = 0 + + if self.resnet_up is not None: + skip_sample_states = self.skip_norm(hidden_states) + skip_sample_states = self.act(skip_sample_states) + skip_sample_states = self.skip_conv(skip_sample_states) + + skip_sample = skip_sample + skip_sample_states + + hidden_states = self.resnet_up(hidden_states, temb) + + return hidden_states, skip_sample + + +class SkipUpBlock2D(nn.Module): + def __init__( + self, + in_channels: int, + prev_output_channel: int, + out_channels: int, + temb_channels: int, + resolution_idx: Optional[int] = None, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_pre_norm: bool = True, + output_scale_factor: float = np.sqrt(2.0), + add_upsample: bool = True, + upsample_padding: int = 1, + ): + super().__init__() + self.resnets = nn.ModuleList([]) + + for i in range(num_layers): + res_skip_channels = in_channels if (i == num_layers - 1) else out_channels + resnet_in_channels = prev_output_channel if i == 0 else out_channels + + self.resnets.append( + ResnetBlock2D( + in_channels=resnet_in_channels + res_skip_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=min((resnet_in_channels + res_skip_channels) // 4, 32), + groups_out=min(out_channels // 4, 32), + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ) + + self.upsampler = FirUpsample2D(in_channels, out_channels=out_channels) + if add_upsample: + self.resnet_up = ResnetBlock2D( + in_channels=out_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=min(out_channels // 4, 32), + groups_out=min(out_channels // 4, 32), + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + use_in_shortcut=True, + up=True, + kernel="fir", + ) + self.skip_conv = nn.Conv2d(out_channels, 3, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + self.skip_norm = torch.nn.GroupNorm( + num_groups=min(out_channels // 4, 32), num_channels=out_channels, eps=resnet_eps, affine=True + ) + self.act = nn.SiLU() + else: + self.resnet_up = None + self.skip_conv = None + self.skip_norm = None + self.act = None + + self.resolution_idx = resolution_idx + + def forward( + self, + hidden_states: torch.FloatTensor, + res_hidden_states_tuple: Tuple[torch.FloatTensor, ...], + temb: Optional[torch.FloatTensor] = None, + skip_sample=None, + *args, + **kwargs, + ) -> Tuple[torch.FloatTensor, torch.FloatTensor]: + if len(args) > 0 or kwargs.get("scale", None) is not None: + deprecation_message = "The `scale` argument is deprecated and will be ignored. Please remove it, as passing it will raise an error in the future. `scale` should directly be passed while calling the underlying pipeline component i.e., via `cross_attention_kwargs`." + deprecate("scale", "1.0.0", deprecation_message) + + for resnet in self.resnets: + # pop res hidden states + res_hidden_states = res_hidden_states_tuple[-1] + res_hidden_states_tuple = res_hidden_states_tuple[:-1] + hidden_states = torch.cat([hidden_states, res_hidden_states], dim=1) + + hidden_states = resnet(hidden_states, temb) + + if skip_sample is not None: + skip_sample = self.upsampler(skip_sample) + else: + skip_sample = 0 + + if self.resnet_up is not None: + skip_sample_states = self.skip_norm(hidden_states) + skip_sample_states = self.act(skip_sample_states) + skip_sample_states = self.skip_conv(skip_sample_states) + + skip_sample = skip_sample + skip_sample_states + + hidden_states = self.resnet_up(hidden_states, temb) + + return hidden_states, skip_sample + + +class ResnetUpsampleBlock2D(nn.Module): + def __init__( + self, + in_channels: int, + prev_output_channel: int, + out_channels: int, + temb_channels: int, + resolution_idx: Optional[int] = None, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + output_scale_factor: float = 1.0, + add_upsample: bool = True, + skip_time_act: bool = False, + ): + super().__init__() + resnets = [] + + for i in range(num_layers): + res_skip_channels = in_channels if (i == num_layers - 1) else out_channels + resnet_in_channels = prev_output_channel if i == 0 else out_channels + + resnets.append( + ResnetBlock2D( + in_channels=resnet_in_channels + res_skip_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + skip_time_act=skip_time_act, + ) + ) + + self.resnets = nn.ModuleList(resnets) + + if add_upsample: + self.upsamplers = nn.ModuleList( + [ + ResnetBlock2D( + in_channels=out_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + skip_time_act=skip_time_act, + up=True, + ) + ] + ) + else: + self.upsamplers = None + + self.gradient_checkpointing = False + self.resolution_idx = resolution_idx + + def forward( + self, + hidden_states: torch.FloatTensor, + res_hidden_states_tuple: Tuple[torch.FloatTensor, ...], + temb: Optional[torch.FloatTensor] = None, + upsample_size: Optional[int] = None, + *args, + **kwargs, + ) -> torch.FloatTensor: + if len(args) > 0 or kwargs.get("scale", None) is not None: + deprecation_message = "The `scale` argument is deprecated and will be ignored. Please remove it, as passing it will raise an error in the future. `scale` should directly be passed while calling the underlying pipeline component i.e., via `cross_attention_kwargs`." + deprecate("scale", "1.0.0", deprecation_message) + + for resnet in self.resnets: + # pop res hidden states + res_hidden_states = res_hidden_states_tuple[-1] + res_hidden_states_tuple = res_hidden_states_tuple[:-1] + hidden_states = torch.cat([hidden_states, res_hidden_states], dim=1) + + if self.training and self.gradient_checkpointing: + + def create_custom_forward(module): + def custom_forward(*inputs): + return module(*inputs) + + return custom_forward + + if is_torch_version(">=", "1.11.0"): + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(resnet), hidden_states, temb, use_reentrant=False + ) + else: + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(resnet), hidden_states, temb + ) + else: + hidden_states = resnet(hidden_states, temb) + + if self.upsamplers is not None: + for upsampler in self.upsamplers: + hidden_states = upsampler(hidden_states, temb) + + return hidden_states + + +class SimpleCrossAttnUpBlock2D(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + prev_output_channel: int, + temb_channels: int, + resolution_idx: Optional[int] = None, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + attention_head_dim: int = 1, + cross_attention_dim: int = 1280, + output_scale_factor: float = 1.0, + add_upsample: bool = True, + skip_time_act: bool = False, + only_cross_attention: bool = False, + cross_attention_norm: Optional[str] = None, + ): + super().__init__() + resnets = [] + attentions = [] + + self.has_cross_attention = True + self.attention_head_dim = attention_head_dim + + self.num_heads = out_channels // self.attention_head_dim + + for i in range(num_layers): + res_skip_channels = in_channels if (i == num_layers - 1) else out_channels + resnet_in_channels = prev_output_channel if i == 0 else out_channels + + resnets.append( + ResnetBlock2D( + in_channels=resnet_in_channels + res_skip_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + skip_time_act=skip_time_act, + ) + ) + + processor = ( + AttnAddedKVProcessor2_0() if hasattr(F, "scaled_dot_product_attention") else AttnAddedKVProcessor() + ) + + attentions.append( + Attention( + query_dim=out_channels, + cross_attention_dim=out_channels, + heads=self.num_heads, + dim_head=self.attention_head_dim, + added_kv_proj_dim=cross_attention_dim, + norm_num_groups=resnet_groups, + bias=True, + upcast_softmax=True, + only_cross_attention=only_cross_attention, + cross_attention_norm=cross_attention_norm, + processor=processor, + ) + ) + self.attentions = nn.ModuleList(attentions) + self.resnets = nn.ModuleList(resnets) + + if add_upsample: + self.upsamplers = nn.ModuleList( + [ + ResnetBlock2D( + in_channels=out_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + skip_time_act=skip_time_act, + up=True, + ) + ] + ) + else: + self.upsamplers = None + + self.gradient_checkpointing = False + self.resolution_idx = resolution_idx + + def forward( + self, + hidden_states: torch.FloatTensor, + res_hidden_states_tuple: Tuple[torch.FloatTensor, ...], + temb: Optional[torch.FloatTensor] = None, + encoder_hidden_states: Optional[torch.FloatTensor] = None, + upsample_size: Optional[int] = None, + attention_mask: Optional[torch.FloatTensor] = None, + cross_attention_kwargs: Optional[Dict[str, Any]] = None, + encoder_attention_mask: Optional[torch.FloatTensor] = None, + ) -> torch.FloatTensor: + cross_attention_kwargs = cross_attention_kwargs if cross_attention_kwargs is not None else {} + if cross_attention_kwargs.get("scale", None) is not None: + logger.warning("Passing `scale` to `cross_attention_kwargs` is deprecated. `scale` will be ignored.") + + if attention_mask is None: + # if encoder_hidden_states is defined: we are doing cross-attn, so we should use cross-attn mask. + mask = None if encoder_hidden_states is None else encoder_attention_mask + else: + # when attention_mask is defined: we don't even check for encoder_attention_mask. + # this is to maintain compatibility with UnCLIP, which uses 'attention_mask' param for cross-attn masks. + # TODO: UnCLIP should express cross-attn mask via encoder_attention_mask param instead of via attention_mask. + # then we can simplify this whole if/else block to: + # mask = attention_mask if encoder_hidden_states is None else encoder_attention_mask + mask = attention_mask + + for resnet, attn in zip(self.resnets, self.attentions): + # resnet + # pop res hidden states + res_hidden_states = res_hidden_states_tuple[-1] + res_hidden_states_tuple = res_hidden_states_tuple[:-1] + hidden_states = torch.cat([hidden_states, res_hidden_states], dim=1) + + if self.training and self.gradient_checkpointing: + + def create_custom_forward(module, return_dict=None): + def custom_forward(*inputs): + if return_dict is not None: + return module(*inputs, return_dict=return_dict) + else: + return module(*inputs) + + return custom_forward + + hidden_states = torch.utils.checkpoint.checkpoint(create_custom_forward(resnet), hidden_states, temb) + hidden_states = attn( + hidden_states, + encoder_hidden_states=encoder_hidden_states, + attention_mask=mask, + **cross_attention_kwargs, + ) + else: + hidden_states = resnet(hidden_states, temb) + + hidden_states = attn( + hidden_states, + encoder_hidden_states=encoder_hidden_states, + attention_mask=mask, + **cross_attention_kwargs, + ) + + if self.upsamplers is not None: + for upsampler in self.upsamplers: + hidden_states = upsampler(hidden_states, temb) + + return hidden_states + + +class KUpBlock2D(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + temb_channels: int, + resolution_idx: int, + dropout: float = 0.0, + num_layers: int = 5, + resnet_eps: float = 1e-5, + resnet_act_fn: str = "gelu", + resnet_group_size: Optional[int] = 32, + add_upsample: bool = True, + ): + super().__init__() + resnets = [] + k_in_channels = 2 * out_channels + k_out_channels = in_channels + num_layers = num_layers - 1 + + for i in range(num_layers): + in_channels = k_in_channels if i == 0 else out_channels + groups = in_channels // resnet_group_size + groups_out = out_channels // resnet_group_size + + resnets.append( + ResnetBlockCondNorm2D( + in_channels=in_channels, + out_channels=k_out_channels if (i == num_layers - 1) else out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=groups, + groups_out=groups_out, + dropout=dropout, + non_linearity=resnet_act_fn, + time_embedding_norm="ada_group", + conv_shortcut_bias=False, + ) + ) + + self.resnets = nn.ModuleList(resnets) + + if add_upsample: + self.upsamplers = nn.ModuleList([KUpsample2D()]) + else: + self.upsamplers = None + + self.gradient_checkpointing = False + self.resolution_idx = resolution_idx + + def forward( + self, + hidden_states: torch.FloatTensor, + res_hidden_states_tuple: Tuple[torch.FloatTensor, ...], + temb: Optional[torch.FloatTensor] = None, + upsample_size: Optional[int] = None, + *args, + **kwargs, + ) -> torch.FloatTensor: + if len(args) > 0 or kwargs.get("scale", None) is not None: + deprecation_message = "The `scale` argument is deprecated and will be ignored. Please remove it, as passing it will raise an error in the future. `scale` should directly be passed while calling the underlying pipeline component i.e., via `cross_attention_kwargs`." + deprecate("scale", "1.0.0", deprecation_message) + + res_hidden_states_tuple = res_hidden_states_tuple[-1] + if res_hidden_states_tuple is not None: + hidden_states = torch.cat([hidden_states, res_hidden_states_tuple], dim=1) + + for resnet in self.resnets: + if self.training and self.gradient_checkpointing: + + def create_custom_forward(module): + def custom_forward(*inputs): + return module(*inputs) + + return custom_forward + + if is_torch_version(">=", "1.11.0"): + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(resnet), hidden_states, temb, use_reentrant=False + ) + else: + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(resnet), hidden_states, temb + ) + else: + hidden_states = resnet(hidden_states, temb) + + if self.upsamplers is not None: + for upsampler in self.upsamplers: + hidden_states = upsampler(hidden_states) + + return hidden_states + + +class KCrossAttnUpBlock2D(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + temb_channels: int, + resolution_idx: int, + dropout: float = 0.0, + num_layers: int = 4, + resnet_eps: float = 1e-5, + resnet_act_fn: str = "gelu", + resnet_group_size: int = 32, + attention_head_dim: int = 1, # attention dim_head + cross_attention_dim: int = 768, + add_upsample: bool = True, + upcast_attention: bool = False, + ): + super().__init__() + resnets = [] + attentions = [] + + is_first_block = in_channels == out_channels == temb_channels + is_middle_block = in_channels != out_channels + add_self_attention = True if is_first_block else False + + self.has_cross_attention = True + self.attention_head_dim = attention_head_dim + + # in_channels, and out_channels for the block (k-unet) + k_in_channels = out_channels if is_first_block else 2 * out_channels + k_out_channels = in_channels + + num_layers = num_layers - 1 + + for i in range(num_layers): + in_channels = k_in_channels if i == 0 else out_channels + groups = in_channels // resnet_group_size + groups_out = out_channels // resnet_group_size + + if is_middle_block and (i == num_layers - 1): + conv_2d_out_channels = k_out_channels + else: + conv_2d_out_channels = None + + resnets.append( + ResnetBlockCondNorm2D( + in_channels=in_channels, + out_channels=out_channels, + conv_2d_out_channels=conv_2d_out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=groups, + groups_out=groups_out, + dropout=dropout, + non_linearity=resnet_act_fn, + time_embedding_norm="ada_group", + conv_shortcut_bias=False, + ) + ) + attentions.append( + KAttentionBlock( + k_out_channels if (i == num_layers - 1) else out_channels, + k_out_channels // attention_head_dim + if (i == num_layers - 1) + else out_channels // attention_head_dim, + attention_head_dim, + cross_attention_dim=cross_attention_dim, + temb_channels=temb_channels, + attention_bias=True, + add_self_attention=add_self_attention, + cross_attention_norm="layer_norm", + upcast_attention=upcast_attention, + ) + ) + + self.resnets = nn.ModuleList(resnets) + self.attentions = nn.ModuleList(attentions) + + if add_upsample: + self.upsamplers = nn.ModuleList([KUpsample2D()]) + else: + self.upsamplers = None + + self.gradient_checkpointing = False + self.resolution_idx = resolution_idx + + def forward( + self, + hidden_states: torch.FloatTensor, + res_hidden_states_tuple: Tuple[torch.FloatTensor, ...], + temb: Optional[torch.FloatTensor] = None, + encoder_hidden_states: Optional[torch.FloatTensor] = None, + cross_attention_kwargs: Optional[Dict[str, Any]] = None, + upsample_size: Optional[int] = None, + attention_mask: Optional[torch.FloatTensor] = None, + encoder_attention_mask: Optional[torch.FloatTensor] = None, + ) -> torch.FloatTensor: + res_hidden_states_tuple = res_hidden_states_tuple[-1] + if res_hidden_states_tuple is not None: + hidden_states = torch.cat([hidden_states, res_hidden_states_tuple], dim=1) + + for resnet, attn in zip(self.resnets, self.attentions): + if self.training and self.gradient_checkpointing: + + def create_custom_forward(module, return_dict=None): + def custom_forward(*inputs): + if return_dict is not None: + return module(*inputs, return_dict=return_dict) + else: + return module(*inputs) + + return custom_forward + + ckpt_kwargs: Dict[str, Any] = {"use_reentrant": False} if is_torch_version(">=", "1.11.0") else {} + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(resnet), + hidden_states, + temb, + **ckpt_kwargs, + ) + hidden_states = attn( + hidden_states, + encoder_hidden_states=encoder_hidden_states, + emb=temb, + attention_mask=attention_mask, + cross_attention_kwargs=cross_attention_kwargs, + encoder_attention_mask=encoder_attention_mask, + ) + else: + hidden_states = resnet(hidden_states, temb) + hidden_states = attn( + hidden_states, + encoder_hidden_states=encoder_hidden_states, + emb=temb, + attention_mask=attention_mask, + cross_attention_kwargs=cross_attention_kwargs, + encoder_attention_mask=encoder_attention_mask, + ) + + if self.upsamplers is not None: + for upsampler in self.upsamplers: + hidden_states = upsampler(hidden_states) + + return hidden_states + + +# can potentially later be renamed to `No-feed-forward` attention +class KAttentionBlock(nn.Module): + r""" + A basic Transformer block. + + Parameters: + dim (`int`): The number of channels in the input and output. + num_attention_heads (`int`): The number of heads to use for multi-head attention. + attention_head_dim (`int`): The number of channels in each head. + dropout (`float`, *optional*, defaults to 0.0): The dropout probability to use. + cross_attention_dim (`int`, *optional*): The size of the encoder_hidden_states vector for cross attention. + attention_bias (`bool`, *optional*, defaults to `False`): + Configure if the attention layers should contain a bias parameter. + upcast_attention (`bool`, *optional*, defaults to `False`): + Set to `True` to upcast the attention computation to `float32`. + temb_channels (`int`, *optional*, defaults to 768): + The number of channels in the token embedding. + add_self_attention (`bool`, *optional*, defaults to `False`): + Set to `True` to add self-attention to the block. + cross_attention_norm (`str`, *optional*, defaults to `None`): + The type of normalization to use for the cross attention. Can be `None`, `layer_norm`, or `group_norm`. + group_size (`int`, *optional*, defaults to 32): + The number of groups to separate the channels into for group normalization. + """ + + def __init__( + self, + dim: int, + num_attention_heads: int, + attention_head_dim: int, + dropout: float = 0.0, + cross_attention_dim: Optional[int] = None, + attention_bias: bool = False, + upcast_attention: bool = False, + temb_channels: int = 768, # for ada_group_norm + add_self_attention: bool = False, + cross_attention_norm: Optional[str] = None, + group_size: int = 32, + ): + super().__init__() + self.add_self_attention = add_self_attention + + # 1. Self-Attn + if add_self_attention: + self.norm1 = AdaGroupNorm(temb_channels, dim, max(1, dim // group_size)) + self.attn1 = Attention( + query_dim=dim, + heads=num_attention_heads, + dim_head=attention_head_dim, + dropout=dropout, + bias=attention_bias, + cross_attention_dim=None, + cross_attention_norm=None, + ) + + # 2. Cross-Attn + self.norm2 = AdaGroupNorm(temb_channels, dim, max(1, dim // group_size)) + self.attn2 = Attention( + query_dim=dim, + cross_attention_dim=cross_attention_dim, + heads=num_attention_heads, + dim_head=attention_head_dim, + dropout=dropout, + bias=attention_bias, + upcast_attention=upcast_attention, + cross_attention_norm=cross_attention_norm, + ) + + def _to_3d(self, hidden_states: torch.FloatTensor, height: int, weight: int) -> torch.FloatTensor: + return hidden_states.permute(0, 2, 3, 1).reshape(hidden_states.shape[0], height * weight, -1) + + def _to_4d(self, hidden_states: torch.FloatTensor, height: int, weight: int) -> torch.FloatTensor: + return hidden_states.permute(0, 2, 1).reshape(hidden_states.shape[0], -1, height, weight) + + def forward( + self, + hidden_states: torch.FloatTensor, + encoder_hidden_states: Optional[torch.FloatTensor] = None, + # TODO: mark emb as non-optional (self.norm2 requires it). + # requires assessing impact of change to positional param interface. + emb: Optional[torch.FloatTensor] = None, + attention_mask: Optional[torch.FloatTensor] = None, + cross_attention_kwargs: Optional[Dict[str, Any]] = None, + encoder_attention_mask: Optional[torch.FloatTensor] = None, + ) -> torch.FloatTensor: + cross_attention_kwargs = cross_attention_kwargs if cross_attention_kwargs is not None else {} + if cross_attention_kwargs.get("scale", None) is not None: + logger.warning("Passing `scale` to `cross_attention_kwargs` is deprecated. `scale` will be ignored.") + + # 1. Self-Attention + if self.add_self_attention: + norm_hidden_states = self.norm1(hidden_states, emb) + + height, weight = norm_hidden_states.shape[2:] + norm_hidden_states = self._to_3d(norm_hidden_states, height, weight) + + attn_output = self.attn1( + norm_hidden_states, + encoder_hidden_states=None, + attention_mask=attention_mask, + **cross_attention_kwargs, + ) + attn_output = self._to_4d(attn_output, height, weight) + + hidden_states = attn_output + hidden_states + + # 2. Cross-Attention/None + norm_hidden_states = self.norm2(hidden_states, emb) + + height, weight = norm_hidden_states.shape[2:] + norm_hidden_states = self._to_3d(norm_hidden_states, height, weight) + attn_output = self.attn2( + norm_hidden_states, + encoder_hidden_states=encoder_hidden_states, + attention_mask=attention_mask if encoder_hidden_states is None else encoder_attention_mask, + **cross_attention_kwargs, + ) + attn_output = self._to_4d(attn_output, height, weight) + + hidden_states = attn_output + hidden_states + + return hidden_states diff --git a/ComfyUI-BrushNet/brushnet/unet_2d_condition.py b/ComfyUI-BrushNet/brushnet/unet_2d_condition.py new file mode 100644 index 0000000000000000000000000000000000000000..088e0efdba9f481c57137e5413e795fcca74c6a5 --- /dev/null +++ b/ComfyUI-BrushNet/brushnet/unet_2d_condition.py @@ -0,0 +1,1355 @@ +# Copyright 2024 The HuggingFace Team. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from dataclasses import dataclass +from typing import Any, Dict, List, Optional, Tuple, Union + +import torch +import torch.nn as nn +import torch.utils.checkpoint + +from diffusers.configuration_utils import ConfigMixin, register_to_config +from diffusers.loaders import PeftAdapterMixin, UNet2DConditionLoadersMixin +from diffusers.utils import USE_PEFT_BACKEND, BaseOutput, deprecate, logging, scale_lora_layers, unscale_lora_layers +from diffusers.models.activations import get_activation +from diffusers.models.attention_processor import ( + ADDED_KV_ATTENTION_PROCESSORS, + CROSS_ATTENTION_PROCESSORS, + Attention, + AttentionProcessor, + AttnAddedKVProcessor, + AttnProcessor, +) +from diffusers.models.embeddings import ( + GaussianFourierProjection, + GLIGENTextBoundingboxProjection, + ImageHintTimeEmbedding, + ImageProjection, + ImageTimeEmbedding, + TextImageProjection, + TextImageTimeEmbedding, + TextTimeEmbedding, + TimestepEmbedding, + Timesteps, +) +from diffusers.models.modeling_utils import ModelMixin +from .unet_2d_blocks import ( + get_down_block, + get_mid_block, + get_up_block, +) + + +logger = logging.get_logger(__name__) # pylint: disable=invalid-name + + +@dataclass +class UNet2DConditionOutput(BaseOutput): + """ + The output of [`UNet2DConditionModel`]. + + Args: + sample (`torch.FloatTensor` of shape `(batch_size, num_channels, height, width)`): + The hidden states output conditioned on `encoder_hidden_states` input. Output of last layer of model. + """ + + sample: torch.FloatTensor = None + + +class UNet2DConditionModel(ModelMixin, ConfigMixin, UNet2DConditionLoadersMixin, PeftAdapterMixin): + r""" + A conditional 2D UNet model that takes a noisy sample, conditional state, and a timestep and returns a sample + shaped output. + + This model inherits from [`ModelMixin`]. Check the superclass documentation for it's generic methods implemented + for all models (such as downloading or saving). + + Parameters: + sample_size (`int` or `Tuple[int, int]`, *optional*, defaults to `None`): + Height and width of input/output sample. + in_channels (`int`, *optional*, defaults to 4): Number of channels in the input sample. + out_channels (`int`, *optional*, defaults to 4): Number of channels in the output. + center_input_sample (`bool`, *optional*, defaults to `False`): Whether to center the input sample. + flip_sin_to_cos (`bool`, *optional*, defaults to `True`): + Whether to flip the sin to cos in the time embedding. + freq_shift (`int`, *optional*, defaults to 0): The frequency shift to apply to the time embedding. + down_block_types (`Tuple[str]`, *optional*, defaults to `("CrossAttnDownBlock2D", "CrossAttnDownBlock2D", "CrossAttnDownBlock2D", "DownBlock2D")`): + The tuple of downsample blocks to use. + mid_block_type (`str`, *optional*, defaults to `"UNetMidBlock2DCrossAttn"`): + Block type for middle of UNet, it can be one of `UNetMidBlock2DCrossAttn`, `UNetMidBlock2D`, or + `UNetMidBlock2DSimpleCrossAttn`. If `None`, the mid block layer is skipped. + up_block_types (`Tuple[str]`, *optional*, defaults to `("UpBlock2D", "CrossAttnUpBlock2D", "CrossAttnUpBlock2D", "CrossAttnUpBlock2D")`): + The tuple of upsample blocks to use. + only_cross_attention(`bool` or `Tuple[bool]`, *optional*, default to `False`): + Whether to include self-attention in the basic transformer blocks, see + [`~models.attention.BasicTransformerBlock`]. + block_out_channels (`Tuple[int]`, *optional*, defaults to `(320, 640, 1280, 1280)`): + The tuple of output channels for each block. + layers_per_block (`int`, *optional*, defaults to 2): The number of layers per block. + downsample_padding (`int`, *optional*, defaults to 1): The padding to use for the downsampling convolution. + mid_block_scale_factor (`float`, *optional*, defaults to 1.0): The scale factor to use for the mid block. + dropout (`float`, *optional*, defaults to 0.0): The dropout probability to use. + act_fn (`str`, *optional*, defaults to `"silu"`): The activation function to use. + norm_num_groups (`int`, *optional*, defaults to 32): The number of groups to use for the normalization. + If `None`, normalization and activation layers is skipped in post-processing. + norm_eps (`float`, *optional*, defaults to 1e-5): The epsilon to use for the normalization. + cross_attention_dim (`int` or `Tuple[int]`, *optional*, defaults to 1280): + The dimension of the cross attention features. + transformer_layers_per_block (`int`, `Tuple[int]`, or `Tuple[Tuple]` , *optional*, defaults to 1): + The number of transformer blocks of type [`~models.attention.BasicTransformerBlock`]. Only relevant for + [`~models.unet_2d_blocks.CrossAttnDownBlock2D`], [`~models.unet_2d_blocks.CrossAttnUpBlock2D`], + [`~models.unet_2d_blocks.UNetMidBlock2DCrossAttn`]. + reverse_transformer_layers_per_block : (`Tuple[Tuple]`, *optional*, defaults to None): + The number of transformer blocks of type [`~models.attention.BasicTransformerBlock`], in the upsampling + blocks of the U-Net. Only relevant if `transformer_layers_per_block` is of type `Tuple[Tuple]` and for + [`~models.unet_2d_blocks.CrossAttnDownBlock2D`], [`~models.unet_2d_blocks.CrossAttnUpBlock2D`], + [`~models.unet_2d_blocks.UNetMidBlock2DCrossAttn`]. + encoder_hid_dim (`int`, *optional*, defaults to None): + If `encoder_hid_dim_type` is defined, `encoder_hidden_states` will be projected from `encoder_hid_dim` + dimension to `cross_attention_dim`. + encoder_hid_dim_type (`str`, *optional*, defaults to `None`): + If given, the `encoder_hidden_states` and potentially other embeddings are down-projected to text + embeddings of dimension `cross_attention` according to `encoder_hid_dim_type`. + attention_head_dim (`int`, *optional*, defaults to 8): The dimension of the attention heads. + num_attention_heads (`int`, *optional*): + The number of attention heads. If not defined, defaults to `attention_head_dim` + resnet_time_scale_shift (`str`, *optional*, defaults to `"default"`): Time scale shift config + for ResNet blocks (see [`~models.resnet.ResnetBlock2D`]). Choose from `default` or `scale_shift`. + class_embed_type (`str`, *optional*, defaults to `None`): + The type of class embedding to use which is ultimately summed with the time embeddings. Choose from `None`, + `"timestep"`, `"identity"`, `"projection"`, or `"simple_projection"`. + addition_embed_type (`str`, *optional*, defaults to `None`): + Configures an optional embedding which will be summed with the time embeddings. Choose from `None` or + "text". "text" will use the `TextTimeEmbedding` layer. + addition_time_embed_dim: (`int`, *optional*, defaults to `None`): + Dimension for the timestep embeddings. + num_class_embeds (`int`, *optional*, defaults to `None`): + Input dimension of the learnable embedding matrix to be projected to `time_embed_dim`, when performing + class conditioning with `class_embed_type` equal to `None`. + time_embedding_type (`str`, *optional*, defaults to `positional`): + The type of position embedding to use for timesteps. Choose from `positional` or `fourier`. + time_embedding_dim (`int`, *optional*, defaults to `None`): + An optional override for the dimension of the projected time embedding. + time_embedding_act_fn (`str`, *optional*, defaults to `None`): + Optional activation function to use only once on the time embeddings before they are passed to the rest of + the UNet. Choose from `silu`, `mish`, `gelu`, and `swish`. + timestep_post_act (`str`, *optional*, defaults to `None`): + The second activation function to use in timestep embedding. Choose from `silu`, `mish` and `gelu`. + time_cond_proj_dim (`int`, *optional*, defaults to `None`): + The dimension of `cond_proj` layer in the timestep embedding. + conv_in_kernel (`int`, *optional*, default to `3`): The kernel size of `conv_in` layer. + conv_out_kernel (`int`, *optional*, default to `3`): The kernel size of `conv_out` layer. + projection_class_embeddings_input_dim (`int`, *optional*): The dimension of the `class_labels` input when + `class_embed_type="projection"`. Required when `class_embed_type="projection"`. + class_embeddings_concat (`bool`, *optional*, defaults to `False`): Whether to concatenate the time + embeddings with the class embeddings. + mid_block_only_cross_attention (`bool`, *optional*, defaults to `None`): + Whether to use cross attention with the mid block when using the `UNetMidBlock2DSimpleCrossAttn`. If + `only_cross_attention` is given as a single boolean and `mid_block_only_cross_attention` is `None`, the + `only_cross_attention` value is used as the value for `mid_block_only_cross_attention`. Default to `False` + otherwise. + """ + + _supports_gradient_checkpointing = True + + @register_to_config + def __init__( + self, + sample_size: Optional[int] = None, + in_channels: int = 4, + out_channels: int = 4, + center_input_sample: bool = False, + flip_sin_to_cos: bool = True, + freq_shift: int = 0, + down_block_types: Tuple[str] = ( + "CrossAttnDownBlock2D", + "CrossAttnDownBlock2D", + "CrossAttnDownBlock2D", + "DownBlock2D", + ), + mid_block_type: Optional[str] = "UNetMidBlock2DCrossAttn", + up_block_types: Tuple[str] = ("UpBlock2D", "CrossAttnUpBlock2D", "CrossAttnUpBlock2D", "CrossAttnUpBlock2D"), + only_cross_attention: Union[bool, Tuple[bool]] = False, + block_out_channels: Tuple[int] = (320, 640, 1280, 1280), + layers_per_block: Union[int, Tuple[int]] = 2, + downsample_padding: int = 1, + mid_block_scale_factor: float = 1, + dropout: float = 0.0, + act_fn: str = "silu", + norm_num_groups: Optional[int] = 32, + norm_eps: float = 1e-5, + cross_attention_dim: Union[int, Tuple[int]] = 1280, + transformer_layers_per_block: Union[int, Tuple[int], Tuple[Tuple]] = 1, + reverse_transformer_layers_per_block: Optional[Tuple[Tuple[int]]] = None, + encoder_hid_dim: Optional[int] = None, + encoder_hid_dim_type: Optional[str] = None, + attention_head_dim: Union[int, Tuple[int]] = 8, + num_attention_heads: Optional[Union[int, Tuple[int]]] = None, + dual_cross_attention: bool = False, + use_linear_projection: bool = False, + class_embed_type: Optional[str] = None, + addition_embed_type: Optional[str] = None, + addition_time_embed_dim: Optional[int] = None, + num_class_embeds: Optional[int] = None, + upcast_attention: bool = False, + resnet_time_scale_shift: str = "default", + resnet_skip_time_act: bool = False, + resnet_out_scale_factor: float = 1.0, + time_embedding_type: str = "positional", + time_embedding_dim: Optional[int] = None, + time_embedding_act_fn: Optional[str] = None, + timestep_post_act: Optional[str] = None, + time_cond_proj_dim: Optional[int] = None, + conv_in_kernel: int = 3, + conv_out_kernel: int = 3, + projection_class_embeddings_input_dim: Optional[int] = None, + attention_type: str = "default", + class_embeddings_concat: bool = False, + mid_block_only_cross_attention: Optional[bool] = None, + cross_attention_norm: Optional[str] = None, + addition_embed_type_num_heads: int = 64, + ): + super().__init__() + + self.sample_size = sample_size + + if num_attention_heads is not None: + raise ValueError( + "At the moment it is not possible to define the number of attention heads via `num_attention_heads` because of a naming issue as described in https://github.com/huggingface/diffusers/issues/2011#issuecomment-1547958131. Passing `num_attention_heads` will only be supported in diffusers v0.19." + ) + + # If `num_attention_heads` is not defined (which is the case for most models) + # it will default to `attention_head_dim`. This looks weird upon first reading it and it is. + # The reason for this behavior is to correct for incorrectly named variables that were introduced + # when this library was created. The incorrect naming was only discovered much later in https://github.com/huggingface/diffusers/issues/2011#issuecomment-1547958131 + # Changing `attention_head_dim` to `num_attention_heads` for 40,000+ configurations is too backwards breaking + # which is why we correct for the naming here. + num_attention_heads = num_attention_heads or attention_head_dim + + # Check inputs + self._check_config( + down_block_types=down_block_types, + up_block_types=up_block_types, + only_cross_attention=only_cross_attention, + block_out_channels=block_out_channels, + layers_per_block=layers_per_block, + cross_attention_dim=cross_attention_dim, + transformer_layers_per_block=transformer_layers_per_block, + reverse_transformer_layers_per_block=reverse_transformer_layers_per_block, + attention_head_dim=attention_head_dim, + num_attention_heads=num_attention_heads, + ) + + # input + conv_in_padding = (conv_in_kernel - 1) // 2 + self.conv_in = nn.Conv2d( + in_channels, block_out_channels[0], kernel_size=conv_in_kernel, padding=conv_in_padding + ) + + # time + time_embed_dim, timestep_input_dim = self._set_time_proj( + time_embedding_type, + block_out_channels=block_out_channels, + flip_sin_to_cos=flip_sin_to_cos, + freq_shift=freq_shift, + time_embedding_dim=time_embedding_dim, + ) + + self.time_embedding = TimestepEmbedding( + timestep_input_dim, + time_embed_dim, + act_fn=act_fn, + post_act_fn=timestep_post_act, + cond_proj_dim=time_cond_proj_dim, + ) + + self._set_encoder_hid_proj( + encoder_hid_dim_type, + cross_attention_dim=cross_attention_dim, + encoder_hid_dim=encoder_hid_dim, + ) + + # class embedding + self._set_class_embedding( + class_embed_type, + act_fn=act_fn, + num_class_embeds=num_class_embeds, + projection_class_embeddings_input_dim=projection_class_embeddings_input_dim, + time_embed_dim=time_embed_dim, + timestep_input_dim=timestep_input_dim, + ) + + self._set_add_embedding( + addition_embed_type, + addition_embed_type_num_heads=addition_embed_type_num_heads, + addition_time_embed_dim=addition_time_embed_dim, + cross_attention_dim=cross_attention_dim, + encoder_hid_dim=encoder_hid_dim, + flip_sin_to_cos=flip_sin_to_cos, + freq_shift=freq_shift, + projection_class_embeddings_input_dim=projection_class_embeddings_input_dim, + time_embed_dim=time_embed_dim, + ) + + if time_embedding_act_fn is None: + self.time_embed_act = None + else: + self.time_embed_act = get_activation(time_embedding_act_fn) + + self.down_blocks = nn.ModuleList([]) + self.up_blocks = nn.ModuleList([]) + + if isinstance(only_cross_attention, bool): + if mid_block_only_cross_attention is None: + mid_block_only_cross_attention = only_cross_attention + + only_cross_attention = [only_cross_attention] * len(down_block_types) + + if mid_block_only_cross_attention is None: + mid_block_only_cross_attention = False + + if isinstance(num_attention_heads, int): + num_attention_heads = (num_attention_heads,) * len(down_block_types) + + if isinstance(attention_head_dim, int): + attention_head_dim = (attention_head_dim,) * len(down_block_types) + + if isinstance(cross_attention_dim, int): + cross_attention_dim = (cross_attention_dim,) * len(down_block_types) + + if isinstance(layers_per_block, int): + layers_per_block = [layers_per_block] * len(down_block_types) + + if isinstance(transformer_layers_per_block, int): + transformer_layers_per_block = [transformer_layers_per_block] * len(down_block_types) + + if class_embeddings_concat: + # The time embeddings are concatenated with the class embeddings. The dimension of the + # time embeddings passed to the down, middle, and up blocks is twice the dimension of the + # regular time embeddings + blocks_time_embed_dim = time_embed_dim * 2 + else: + blocks_time_embed_dim = time_embed_dim + + # down + output_channel = block_out_channels[0] + for i, down_block_type in enumerate(down_block_types): + input_channel = output_channel + output_channel = block_out_channels[i] + is_final_block = i == len(block_out_channels) - 1 + + down_block = get_down_block( + down_block_type, + num_layers=layers_per_block[i], + transformer_layers_per_block=transformer_layers_per_block[i], + in_channels=input_channel, + out_channels=output_channel, + temb_channels=blocks_time_embed_dim, + add_downsample=not is_final_block, + resnet_eps=norm_eps, + resnet_act_fn=act_fn, + resnet_groups=norm_num_groups, + cross_attention_dim=cross_attention_dim[i], + num_attention_heads=num_attention_heads[i], + downsample_padding=downsample_padding, + dual_cross_attention=dual_cross_attention, + use_linear_projection=use_linear_projection, + only_cross_attention=only_cross_attention[i], + upcast_attention=upcast_attention, + resnet_time_scale_shift=resnet_time_scale_shift, + attention_type=attention_type, + resnet_skip_time_act=resnet_skip_time_act, + resnet_out_scale_factor=resnet_out_scale_factor, + cross_attention_norm=cross_attention_norm, + attention_head_dim=attention_head_dim[i] if attention_head_dim[i] is not None else output_channel, + dropout=dropout, + ) + self.down_blocks.append(down_block) + + # mid + self.mid_block = get_mid_block( + mid_block_type, + temb_channels=blocks_time_embed_dim, + in_channels=block_out_channels[-1], + resnet_eps=norm_eps, + resnet_act_fn=act_fn, + resnet_groups=norm_num_groups, + output_scale_factor=mid_block_scale_factor, + transformer_layers_per_block=transformer_layers_per_block[-1], + num_attention_heads=num_attention_heads[-1], + cross_attention_dim=cross_attention_dim[-1], + dual_cross_attention=dual_cross_attention, + use_linear_projection=use_linear_projection, + mid_block_only_cross_attention=mid_block_only_cross_attention, + upcast_attention=upcast_attention, + resnet_time_scale_shift=resnet_time_scale_shift, + attention_type=attention_type, + resnet_skip_time_act=resnet_skip_time_act, + cross_attention_norm=cross_attention_norm, + attention_head_dim=attention_head_dim[-1], + dropout=dropout, + ) + + # count how many layers upsample the images + self.num_upsamplers = 0 + + # up + reversed_block_out_channels = list(reversed(block_out_channels)) + reversed_num_attention_heads = list(reversed(num_attention_heads)) + reversed_layers_per_block = list(reversed(layers_per_block)) + reversed_cross_attention_dim = list(reversed(cross_attention_dim)) + reversed_transformer_layers_per_block = ( + list(reversed(transformer_layers_per_block)) + if reverse_transformer_layers_per_block is None + else reverse_transformer_layers_per_block + ) + only_cross_attention = list(reversed(only_cross_attention)) + + output_channel = reversed_block_out_channels[0] + for i, up_block_type in enumerate(up_block_types): + is_final_block = i == len(block_out_channels) - 1 + + prev_output_channel = output_channel + output_channel = reversed_block_out_channels[i] + input_channel = reversed_block_out_channels[min(i + 1, len(block_out_channels) - 1)] + + # add upsample block for all BUT final layer + if not is_final_block: + add_upsample = True + self.num_upsamplers += 1 + else: + add_upsample = False + + up_block = get_up_block( + up_block_type, + num_layers=reversed_layers_per_block[i] + 1, + transformer_layers_per_block=reversed_transformer_layers_per_block[i], + in_channels=input_channel, + out_channels=output_channel, + prev_output_channel=prev_output_channel, + temb_channels=blocks_time_embed_dim, + add_upsample=add_upsample, + resnet_eps=norm_eps, + resnet_act_fn=act_fn, + resolution_idx=i, + resnet_groups=norm_num_groups, + cross_attention_dim=reversed_cross_attention_dim[i], + num_attention_heads=reversed_num_attention_heads[i], + dual_cross_attention=dual_cross_attention, + use_linear_projection=use_linear_projection, + only_cross_attention=only_cross_attention[i], + upcast_attention=upcast_attention, + resnet_time_scale_shift=resnet_time_scale_shift, + attention_type=attention_type, + resnet_skip_time_act=resnet_skip_time_act, + resnet_out_scale_factor=resnet_out_scale_factor, + cross_attention_norm=cross_attention_norm, + attention_head_dim=attention_head_dim[i] if attention_head_dim[i] is not None else output_channel, + dropout=dropout, + ) + self.up_blocks.append(up_block) + prev_output_channel = output_channel + + # out + if norm_num_groups is not None: + self.conv_norm_out = nn.GroupNorm( + num_channels=block_out_channels[0], num_groups=norm_num_groups, eps=norm_eps + ) + + self.conv_act = get_activation(act_fn) + + else: + self.conv_norm_out = None + self.conv_act = None + + conv_out_padding = (conv_out_kernel - 1) // 2 + self.conv_out = nn.Conv2d( + block_out_channels[0], out_channels, kernel_size=conv_out_kernel, padding=conv_out_padding + ) + + self._set_pos_net_if_use_gligen(attention_type=attention_type, cross_attention_dim=cross_attention_dim) + + def _check_config( + self, + down_block_types: Tuple[str], + up_block_types: Tuple[str], + only_cross_attention: Union[bool, Tuple[bool]], + block_out_channels: Tuple[int], + layers_per_block: Union[int, Tuple[int]], + cross_attention_dim: Union[int, Tuple[int]], + transformer_layers_per_block: Union[int, Tuple[int], Tuple[Tuple[int]]], + reverse_transformer_layers_per_block: bool, + attention_head_dim: int, + num_attention_heads: Optional[Union[int, Tuple[int]]], + ): + if len(down_block_types) != len(up_block_types): + raise ValueError( + f"Must provide the same number of `down_block_types` as `up_block_types`. `down_block_types`: {down_block_types}. `up_block_types`: {up_block_types}." + ) + + if len(block_out_channels) != len(down_block_types): + raise ValueError( + f"Must provide the same number of `block_out_channels` as `down_block_types`. `block_out_channels`: {block_out_channels}. `down_block_types`: {down_block_types}." + ) + + if not isinstance(only_cross_attention, bool) and len(only_cross_attention) != len(down_block_types): + raise ValueError( + f"Must provide the same number of `only_cross_attention` as `down_block_types`. `only_cross_attention`: {only_cross_attention}. `down_block_types`: {down_block_types}." + ) + + if not isinstance(num_attention_heads, int) and len(num_attention_heads) != len(down_block_types): + raise ValueError( + f"Must provide the same number of `num_attention_heads` as `down_block_types`. `num_attention_heads`: {num_attention_heads}. `down_block_types`: {down_block_types}." + ) + + if not isinstance(attention_head_dim, int) and len(attention_head_dim) != len(down_block_types): + raise ValueError( + f"Must provide the same number of `attention_head_dim` as `down_block_types`. `attention_head_dim`: {attention_head_dim}. `down_block_types`: {down_block_types}." + ) + + if isinstance(cross_attention_dim, list) and len(cross_attention_dim) != len(down_block_types): + raise ValueError( + f"Must provide the same number of `cross_attention_dim` as `down_block_types`. `cross_attention_dim`: {cross_attention_dim}. `down_block_types`: {down_block_types}." + ) + + if not isinstance(layers_per_block, int) and len(layers_per_block) != len(down_block_types): + raise ValueError( + f"Must provide the same number of `layers_per_block` as `down_block_types`. `layers_per_block`: {layers_per_block}. `down_block_types`: {down_block_types}." + ) + if isinstance(transformer_layers_per_block, list) and reverse_transformer_layers_per_block is None: + for layer_number_per_block in transformer_layers_per_block: + if isinstance(layer_number_per_block, list): + raise ValueError("Must provide 'reverse_transformer_layers_per_block` if using asymmetrical UNet.") + + def _set_time_proj( + self, + time_embedding_type: str, + block_out_channels: int, + flip_sin_to_cos: bool, + freq_shift: float, + time_embedding_dim: int, + ) -> Tuple[int, int]: + if time_embedding_type == "fourier": + time_embed_dim = time_embedding_dim or block_out_channels[0] * 2 + if time_embed_dim % 2 != 0: + raise ValueError(f"`time_embed_dim` should be divisible by 2, but is {time_embed_dim}.") + self.time_proj = GaussianFourierProjection( + time_embed_dim // 2, set_W_to_weight=False, log=False, flip_sin_to_cos=flip_sin_to_cos + ) + timestep_input_dim = time_embed_dim + elif time_embedding_type == "positional": + time_embed_dim = time_embedding_dim or block_out_channels[0] * 4 + + self.time_proj = Timesteps(block_out_channels[0], flip_sin_to_cos, freq_shift) + timestep_input_dim = block_out_channels[0] + else: + raise ValueError( + f"{time_embedding_type} does not exist. Please make sure to use one of `fourier` or `positional`." + ) + + return time_embed_dim, timestep_input_dim + + def _set_encoder_hid_proj( + self, + encoder_hid_dim_type: Optional[str], + cross_attention_dim: Union[int, Tuple[int]], + encoder_hid_dim: Optional[int], + ): + if encoder_hid_dim_type is None and encoder_hid_dim is not None: + encoder_hid_dim_type = "text_proj" + self.register_to_config(encoder_hid_dim_type=encoder_hid_dim_type) + logger.info("encoder_hid_dim_type defaults to 'text_proj' as `encoder_hid_dim` is defined.") + + if encoder_hid_dim is None and encoder_hid_dim_type is not None: + raise ValueError( + f"`encoder_hid_dim` has to be defined when `encoder_hid_dim_type` is set to {encoder_hid_dim_type}." + ) + + if encoder_hid_dim_type == "text_proj": + self.encoder_hid_proj = nn.Linear(encoder_hid_dim, cross_attention_dim) + elif encoder_hid_dim_type == "text_image_proj": + # image_embed_dim DOESN'T have to be `cross_attention_dim`. To not clutter the __init__ too much + # they are set to `cross_attention_dim` here as this is exactly the required dimension for the currently only use + # case when `addition_embed_type == "text_image_proj"` (Kandinsky 2.1)` + self.encoder_hid_proj = TextImageProjection( + text_embed_dim=encoder_hid_dim, + image_embed_dim=cross_attention_dim, + cross_attention_dim=cross_attention_dim, + ) + elif encoder_hid_dim_type == "image_proj": + # Kandinsky 2.2 + self.encoder_hid_proj = ImageProjection( + image_embed_dim=encoder_hid_dim, + cross_attention_dim=cross_attention_dim, + ) + elif encoder_hid_dim_type is not None: + raise ValueError( + f"encoder_hid_dim_type: {encoder_hid_dim_type} must be None, 'text_proj' or 'text_image_proj'." + ) + else: + self.encoder_hid_proj = None + + def _set_class_embedding( + self, + class_embed_type: Optional[str], + act_fn: str, + num_class_embeds: Optional[int], + projection_class_embeddings_input_dim: Optional[int], + time_embed_dim: int, + timestep_input_dim: int, + ): + if class_embed_type is None and num_class_embeds is not None: + self.class_embedding = nn.Embedding(num_class_embeds, time_embed_dim) + elif class_embed_type == "timestep": + self.class_embedding = TimestepEmbedding(timestep_input_dim, time_embed_dim, act_fn=act_fn) + elif class_embed_type == "identity": + self.class_embedding = nn.Identity(time_embed_dim, time_embed_dim) + elif class_embed_type == "projection": + if projection_class_embeddings_input_dim is None: + raise ValueError( + "`class_embed_type`: 'projection' requires `projection_class_embeddings_input_dim` be set" + ) + # The projection `class_embed_type` is the same as the timestep `class_embed_type` except + # 1. the `class_labels` inputs are not first converted to sinusoidal embeddings + # 2. it projects from an arbitrary input dimension. + # + # Note that `TimestepEmbedding` is quite general, being mainly linear layers and activations. + # When used for embedding actual timesteps, the timesteps are first converted to sinusoidal embeddings. + # As a result, `TimestepEmbedding` can be passed arbitrary vectors. + self.class_embedding = TimestepEmbedding(projection_class_embeddings_input_dim, time_embed_dim) + elif class_embed_type == "simple_projection": + if projection_class_embeddings_input_dim is None: + raise ValueError( + "`class_embed_type`: 'simple_projection' requires `projection_class_embeddings_input_dim` be set" + ) + self.class_embedding = nn.Linear(projection_class_embeddings_input_dim, time_embed_dim) + else: + self.class_embedding = None + + def _set_add_embedding( + self, + addition_embed_type: str, + addition_embed_type_num_heads: int, + addition_time_embed_dim: Optional[int], + flip_sin_to_cos: bool, + freq_shift: float, + cross_attention_dim: Optional[int], + encoder_hid_dim: Optional[int], + projection_class_embeddings_input_dim: Optional[int], + time_embed_dim: int, + ): + if addition_embed_type == "text": + if encoder_hid_dim is not None: + text_time_embedding_from_dim = encoder_hid_dim + else: + text_time_embedding_from_dim = cross_attention_dim + + self.add_embedding = TextTimeEmbedding( + text_time_embedding_from_dim, time_embed_dim, num_heads=addition_embed_type_num_heads + ) + elif addition_embed_type == "text_image": + # text_embed_dim and image_embed_dim DON'T have to be `cross_attention_dim`. To not clutter the __init__ too much + # they are set to `cross_attention_dim` here as this is exactly the required dimension for the currently only use + # case when `addition_embed_type == "text_image"` (Kandinsky 2.1)` + self.add_embedding = TextImageTimeEmbedding( + text_embed_dim=cross_attention_dim, image_embed_dim=cross_attention_dim, time_embed_dim=time_embed_dim + ) + elif addition_embed_type == "text_time": + self.add_time_proj = Timesteps(addition_time_embed_dim, flip_sin_to_cos, freq_shift) + self.add_embedding = TimestepEmbedding(projection_class_embeddings_input_dim, time_embed_dim) + elif addition_embed_type == "image": + # Kandinsky 2.2 + self.add_embedding = ImageTimeEmbedding(image_embed_dim=encoder_hid_dim, time_embed_dim=time_embed_dim) + elif addition_embed_type == "image_hint": + # Kandinsky 2.2 ControlNet + self.add_embedding = ImageHintTimeEmbedding(image_embed_dim=encoder_hid_dim, time_embed_dim=time_embed_dim) + elif addition_embed_type is not None: + raise ValueError(f"addition_embed_type: {addition_embed_type} must be None, 'text' or 'text_image'.") + + def _set_pos_net_if_use_gligen(self, attention_type: str, cross_attention_dim: int): + if attention_type in ["gated", "gated-text-image"]: + positive_len = 768 + if isinstance(cross_attention_dim, int): + positive_len = cross_attention_dim + elif isinstance(cross_attention_dim, tuple) or isinstance(cross_attention_dim, list): + positive_len = cross_attention_dim[0] + + feature_type = "text-only" if attention_type == "gated" else "text-image" + self.position_net = GLIGENTextBoundingboxProjection( + positive_len=positive_len, out_dim=cross_attention_dim, feature_type=feature_type + ) + + @property + def attn_processors(self) -> Dict[str, AttentionProcessor]: + r""" + Returns: + `dict` of attention processors: A dictionary containing all attention processors used in the model with + indexed by its weight name. + """ + # set recursively + processors = {} + + def fn_recursive_add_processors(name: str, module: torch.nn.Module, processors: Dict[str, AttentionProcessor]): + if hasattr(module, "get_processor"): + processors[f"{name}.processor"] = module.get_processor(return_deprecated_lora=True) + + for sub_name, child in module.named_children(): + fn_recursive_add_processors(f"{name}.{sub_name}", child, processors) + + return processors + + for name, module in self.named_children(): + fn_recursive_add_processors(name, module, processors) + + return processors + + def set_attn_processor(self, processor: Union[AttentionProcessor, Dict[str, AttentionProcessor]]): + r""" + Sets the attention processor to use to compute attention. + + Parameters: + processor (`dict` of `AttentionProcessor` or only `AttentionProcessor`): + The instantiated processor class or a dictionary of processor classes that will be set as the processor + for **all** `Attention` layers. + + If `processor` is a dict, the key needs to define the path to the corresponding cross attention + processor. This is strongly recommended when setting trainable attention processors. + + """ + count = len(self.attn_processors.keys()) + + if isinstance(processor, dict) and len(processor) != count: + raise ValueError( + f"A dict of processors was passed, but the number of processors {len(processor)} does not match the" + f" number of attention layers: {count}. Please make sure to pass {count} processor classes." + ) + + def fn_recursive_attn_processor(name: str, module: torch.nn.Module, processor): + if hasattr(module, "set_processor"): + if not isinstance(processor, dict): + module.set_processor(processor) + else: + module.set_processor(processor.pop(f"{name}.processor")) + + for sub_name, child in module.named_children(): + fn_recursive_attn_processor(f"{name}.{sub_name}", child, processor) + + for name, module in self.named_children(): + fn_recursive_attn_processor(name, module, processor) + + def set_default_attn_processor(self): + """ + Disables custom attention processors and sets the default attention implementation. + """ + if all(proc.__class__ in ADDED_KV_ATTENTION_PROCESSORS for proc in self.attn_processors.values()): + processor = AttnAddedKVProcessor() + elif all(proc.__class__ in CROSS_ATTENTION_PROCESSORS for proc in self.attn_processors.values()): + processor = AttnProcessor() + else: + raise ValueError( + f"Cannot call `set_default_attn_processor` when attention processors are of type {next(iter(self.attn_processors.values()))}" + ) + + self.set_attn_processor(processor) + + def set_attention_slice(self, slice_size: Union[str, int, List[int]] = "auto"): + r""" + Enable sliced attention computation. + + When this option is enabled, the attention module splits the input tensor in slices to compute attention in + several steps. This is useful for saving some memory in exchange for a small decrease in speed. + + Args: + slice_size (`str` or `int` or `list(int)`, *optional*, defaults to `"auto"`): + When `"auto"`, input to the attention heads is halved, so attention is computed in two steps. If + `"max"`, maximum amount of memory is saved by running only one slice at a time. If a number is + provided, uses as many slices as `attention_head_dim // slice_size`. In this case, `attention_head_dim` + must be a multiple of `slice_size`. + """ + sliceable_head_dims = [] + + def fn_recursive_retrieve_sliceable_dims(module: torch.nn.Module): + if hasattr(module, "set_attention_slice"): + sliceable_head_dims.append(module.sliceable_head_dim) + + for child in module.children(): + fn_recursive_retrieve_sliceable_dims(child) + + # retrieve number of attention layers + for module in self.children(): + fn_recursive_retrieve_sliceable_dims(module) + + num_sliceable_layers = len(sliceable_head_dims) + + if slice_size == "auto": + # half the attention head size is usually a good trade-off between + # speed and memory + slice_size = [dim // 2 for dim in sliceable_head_dims] + elif slice_size == "max": + # make smallest slice possible + slice_size = num_sliceable_layers * [1] + + slice_size = num_sliceable_layers * [slice_size] if not isinstance(slice_size, list) else slice_size + + if len(slice_size) != len(sliceable_head_dims): + raise ValueError( + f"You have provided {len(slice_size)}, but {self.config} has {len(sliceable_head_dims)} different" + f" attention layers. Make sure to match `len(slice_size)` to be {len(sliceable_head_dims)}." + ) + + for i in range(len(slice_size)): + size = slice_size[i] + dim = sliceable_head_dims[i] + if size is not None and size > dim: + raise ValueError(f"size {size} has to be smaller or equal to {dim}.") + + # Recursively walk through all the children. + # Any children which exposes the set_attention_slice method + # gets the message + def fn_recursive_set_attention_slice(module: torch.nn.Module, slice_size: List[int]): + if hasattr(module, "set_attention_slice"): + module.set_attention_slice(slice_size.pop()) + + for child in module.children(): + fn_recursive_set_attention_slice(child, slice_size) + + reversed_slice_size = list(reversed(slice_size)) + for module in self.children(): + fn_recursive_set_attention_slice(module, reversed_slice_size) + + def _set_gradient_checkpointing(self, module, value=False): + if hasattr(module, "gradient_checkpointing"): + module.gradient_checkpointing = value + + def enable_freeu(self, s1: float, s2: float, b1: float, b2: float): + r"""Enables the FreeU mechanism from https://arxiv.org/abs/2309.11497. + + The suffixes after the scaling factors represent the stage blocks where they are being applied. + + Please refer to the [official repository](https://github.com/ChenyangSi/FreeU) for combinations of values that + are known to work well for different pipelines such as Stable Diffusion v1, v2, and Stable Diffusion XL. + + Args: + s1 (`float`): + Scaling factor for stage 1 to attenuate the contributions of the skip features. This is done to + mitigate the "oversmoothing effect" in the enhanced denoising process. + s2 (`float`): + Scaling factor for stage 2 to attenuate the contributions of the skip features. This is done to + mitigate the "oversmoothing effect" in the enhanced denoising process. + b1 (`float`): Scaling factor for stage 1 to amplify the contributions of backbone features. + b2 (`float`): Scaling factor for stage 2 to amplify the contributions of backbone features. + """ + for i, upsample_block in enumerate(self.up_blocks): + setattr(upsample_block, "s1", s1) + setattr(upsample_block, "s2", s2) + setattr(upsample_block, "b1", b1) + setattr(upsample_block, "b2", b2) + + def disable_freeu(self): + """Disables the FreeU mechanism.""" + freeu_keys = {"s1", "s2", "b1", "b2"} + for i, upsample_block in enumerate(self.up_blocks): + for k in freeu_keys: + if hasattr(upsample_block, k) or getattr(upsample_block, k, None) is not None: + setattr(upsample_block, k, None) + + def fuse_qkv_projections(self): + """ + Enables fused QKV projections. For self-attention modules, all projection matrices (i.e., query, key, value) + are fused. For cross-attention modules, key and value projection matrices are fused. + + + + This API is 🧪 experimental. + + + """ + self.original_attn_processors = None + + for _, attn_processor in self.attn_processors.items(): + if "Added" in str(attn_processor.__class__.__name__): + raise ValueError("`fuse_qkv_projections()` is not supported for models having added KV projections.") + + self.original_attn_processors = self.attn_processors + + for module in self.modules(): + if isinstance(module, Attention): + module.fuse_projections(fuse=True) + + def unfuse_qkv_projections(self): + """Disables the fused QKV projection if enabled. + + + + This API is 🧪 experimental. + + + + """ + if self.original_attn_processors is not None: + self.set_attn_processor(self.original_attn_processors) + + def unload_lora(self): + """Unloads LoRA weights.""" + deprecate( + "unload_lora", + "0.28.0", + "Calling `unload_lora()` is deprecated and will be removed in a future version. Please install `peft` and then call `disable_adapters().", + ) + for module in self.modules(): + if hasattr(module, "set_lora_layer"): + module.set_lora_layer(None) + + def get_time_embed( + self, sample: torch.Tensor, timestep: Union[torch.Tensor, float, int] + ) -> Optional[torch.Tensor]: + timesteps = timestep + if not torch.is_tensor(timesteps): + # TODO: this requires sync between CPU and GPU. So try to pass timesteps as tensors if you can + # This would be a good case for the `match` statement (Python 3.10+) + is_mps = sample.device.type == "mps" + if isinstance(timestep, float): + dtype = torch.float32 if is_mps else torch.float64 + else: + dtype = torch.int32 if is_mps else torch.int64 + timesteps = torch.tensor([timesteps], dtype=dtype, device=sample.device) + elif len(timesteps.shape) == 0: + timesteps = timesteps[None].to(sample.device) + + # broadcast to batch dimension in a way that's compatible with ONNX/Core ML + timesteps = timesteps.expand(sample.shape[0]) + + t_emb = self.time_proj(timesteps) + # `Timesteps` does not contain any weights and will always return f32 tensors + # but time_embedding might actually be running in fp16. so we need to cast here. + # there might be better ways to encapsulate this. + t_emb = t_emb.to(dtype=sample.dtype) + return t_emb + + def get_class_embed(self, sample: torch.Tensor, class_labels: Optional[torch.Tensor]) -> Optional[torch.Tensor]: + class_emb = None + if self.class_embedding is not None: + if class_labels is None: + raise ValueError("class_labels should be provided when num_class_embeds > 0") + + if self.config.class_embed_type == "timestep": + class_labels = self.time_proj(class_labels) + + # `Timesteps` does not contain any weights and will always return f32 tensors + # there might be better ways to encapsulate this. + class_labels = class_labels.to(dtype=sample.dtype) + + class_emb = self.class_embedding(class_labels).to(dtype=sample.dtype) + return class_emb + + def get_aug_embed( + self, emb: torch.Tensor, encoder_hidden_states: torch.Tensor, added_cond_kwargs: Dict[str, Any] + ) -> Optional[torch.Tensor]: + aug_emb = None + if self.config.addition_embed_type == "text": + aug_emb = self.add_embedding(encoder_hidden_states) + elif self.config.addition_embed_type == "text_image": + # Kandinsky 2.1 - style + if "image_embeds" not in added_cond_kwargs: + raise ValueError( + f"{self.__class__} has the config param `addition_embed_type` set to 'text_image' which requires the keyword argument `image_embeds` to be passed in `added_cond_kwargs`" + ) + + image_embs = added_cond_kwargs.get("image_embeds") + text_embs = added_cond_kwargs.get("text_embeds", encoder_hidden_states) + aug_emb = self.add_embedding(text_embs, image_embs) + elif self.config.addition_embed_type == "text_time": + # SDXL - style + if "text_embeds" not in added_cond_kwargs: + raise ValueError( + f"{self.__class__} has the config param `addition_embed_type` set to 'text_time' which requires the keyword argument `text_embeds` to be passed in `added_cond_kwargs`" + ) + text_embeds = added_cond_kwargs.get("text_embeds") + if "time_ids" not in added_cond_kwargs: + raise ValueError( + f"{self.__class__} has the config param `addition_embed_type` set to 'text_time' which requires the keyword argument `time_ids` to be passed in `added_cond_kwargs`" + ) + time_ids = added_cond_kwargs.get("time_ids") + time_embeds = self.add_time_proj(time_ids.flatten()) + time_embeds = time_embeds.reshape((text_embeds.shape[0], -1)) + add_embeds = torch.concat([text_embeds, time_embeds], dim=-1) + add_embeds = add_embeds.to(emb.dtype) + aug_emb = self.add_embedding(add_embeds) + elif self.config.addition_embed_type == "image": + # Kandinsky 2.2 - style + if "image_embeds" not in added_cond_kwargs: + raise ValueError( + f"{self.__class__} has the config param `addition_embed_type` set to 'image' which requires the keyword argument `image_embeds` to be passed in `added_cond_kwargs`" + ) + image_embs = added_cond_kwargs.get("image_embeds") + aug_emb = self.add_embedding(image_embs) + elif self.config.addition_embed_type == "image_hint": + # Kandinsky 2.2 - style + if "image_embeds" not in added_cond_kwargs or "hint" not in added_cond_kwargs: + raise ValueError( + f"{self.__class__} has the config param `addition_embed_type` set to 'image_hint' which requires the keyword arguments `image_embeds` and `hint` to be passed in `added_cond_kwargs`" + ) + image_embs = added_cond_kwargs.get("image_embeds") + hint = added_cond_kwargs.get("hint") + aug_emb = self.add_embedding(image_embs, hint) + return aug_emb + + def process_encoder_hidden_states( + self, encoder_hidden_states: torch.Tensor, added_cond_kwargs: Dict[str, Any] + ) -> torch.Tensor: + if self.encoder_hid_proj is not None and self.config.encoder_hid_dim_type == "text_proj": + encoder_hidden_states = self.encoder_hid_proj(encoder_hidden_states) + elif self.encoder_hid_proj is not None and self.config.encoder_hid_dim_type == "text_image_proj": + # Kandinsky 2.1 - style + if "image_embeds" not in added_cond_kwargs: + raise ValueError( + f"{self.__class__} has the config param `encoder_hid_dim_type` set to 'text_image_proj' which requires the keyword argument `image_embeds` to be passed in `added_conditions`" + ) + + image_embeds = added_cond_kwargs.get("image_embeds") + encoder_hidden_states = self.encoder_hid_proj(encoder_hidden_states, image_embeds) + elif self.encoder_hid_proj is not None and self.config.encoder_hid_dim_type == "image_proj": + # Kandinsky 2.2 - style + if "image_embeds" not in added_cond_kwargs: + raise ValueError( + f"{self.__class__} has the config param `encoder_hid_dim_type` set to 'image_proj' which requires the keyword argument `image_embeds` to be passed in `added_conditions`" + ) + image_embeds = added_cond_kwargs.get("image_embeds") + encoder_hidden_states = self.encoder_hid_proj(image_embeds) + elif self.encoder_hid_proj is not None and self.config.encoder_hid_dim_type == "ip_image_proj": + if "image_embeds" not in added_cond_kwargs: + raise ValueError( + f"{self.__class__} has the config param `encoder_hid_dim_type` set to 'ip_image_proj' which requires the keyword argument `image_embeds` to be passed in `added_conditions`" + ) + image_embeds = added_cond_kwargs.get("image_embeds") + image_embeds = self.encoder_hid_proj(image_embeds) + encoder_hidden_states = (encoder_hidden_states, image_embeds) + return encoder_hidden_states + + def forward( + self, + sample: torch.FloatTensor, + timestep: Union[torch.Tensor, float, int], + encoder_hidden_states: torch.Tensor, + class_labels: Optional[torch.Tensor] = None, + timestep_cond: Optional[torch.Tensor] = None, + attention_mask: Optional[torch.Tensor] = None, + cross_attention_kwargs: Optional[Dict[str, Any]] = None, + added_cond_kwargs: Optional[Dict[str, torch.Tensor]] = None, + down_block_additional_residuals: Optional[Tuple[torch.Tensor]] = None, + mid_block_additional_residual: Optional[torch.Tensor] = None, + down_intrablock_additional_residuals: Optional[Tuple[torch.Tensor]] = None, + encoder_attention_mask: Optional[torch.Tensor] = None, + return_dict: bool = True, + down_block_add_samples: Optional[Tuple[torch.Tensor]] = None, + mid_block_add_sample: Optional[Tuple[torch.Tensor]] = None, + up_block_add_samples: Optional[Tuple[torch.Tensor]] = None, + ) -> Union[UNet2DConditionOutput, Tuple]: + r""" + The [`UNet2DConditionModel`] forward method. + + Args: + sample (`torch.FloatTensor`): + The noisy input tensor with the following shape `(batch, channel, height, width)`. + timestep (`torch.FloatTensor` or `float` or `int`): The number of timesteps to denoise an input. + encoder_hidden_states (`torch.FloatTensor`): + The encoder hidden states with shape `(batch, sequence_length, feature_dim)`. + class_labels (`torch.Tensor`, *optional*, defaults to `None`): + Optional class labels for conditioning. Their embeddings will be summed with the timestep embeddings. + timestep_cond: (`torch.Tensor`, *optional*, defaults to `None`): + Conditional embeddings for timestep. If provided, the embeddings will be summed with the samples passed + through the `self.time_embedding` layer to obtain the timestep embeddings. + attention_mask (`torch.Tensor`, *optional*, defaults to `None`): + An attention mask of shape `(batch, key_tokens)` is applied to `encoder_hidden_states`. If `1` the mask + is kept, otherwise if `0` it is discarded. Mask will be converted into a bias, which adds large + negative values to the attention scores corresponding to "discard" tokens. + cross_attention_kwargs (`dict`, *optional*): + A kwargs dictionary that if specified is passed along to the `AttentionProcessor` as defined under + `self.processor` in + [diffusers.models.attention_processor](https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/attention_processor.py). + added_cond_kwargs: (`dict`, *optional*): + A kwargs dictionary containing additional embeddings that if specified are added to the embeddings that + are passed along to the UNet blocks. + down_block_additional_residuals: (`tuple` of `torch.Tensor`, *optional*): + A tuple of tensors that if specified are added to the residuals of down unet blocks. + mid_block_additional_residual: (`torch.Tensor`, *optional*): + A tensor that if specified is added to the residual of the middle unet block. + down_intrablock_additional_residuals (`tuple` of `torch.Tensor`, *optional*): + additional residuals to be added within UNet down blocks, for example from T2I-Adapter side model(s) + encoder_attention_mask (`torch.Tensor`): + A cross-attention mask of shape `(batch, sequence_length)` is applied to `encoder_hidden_states`. If + `True` the mask is kept, otherwise if `False` it is discarded. Mask will be converted into a bias, + which adds large negative values to the attention scores corresponding to "discard" tokens. + return_dict (`bool`, *optional*, defaults to `True`): + Whether or not to return a [`~models.unets.unet_2d_condition.UNet2DConditionOutput`] instead of a plain + tuple. + + Returns: + [`~models.unets.unet_2d_condition.UNet2DConditionOutput`] or `tuple`: + If `return_dict` is True, an [`~models.unets.unet_2d_condition.UNet2DConditionOutput`] is returned, + otherwise a `tuple` is returned where the first element is the sample tensor. + """ + # By default samples have to be AT least a multiple of the overall upsampling factor. + # The overall upsampling factor is equal to 2 ** (# num of upsampling layers). + # However, the upsampling interpolation output size can be forced to fit any upsampling size + # on the fly if necessary. + default_overall_up_factor = 2**self.num_upsamplers + + # upsample size should be forwarded when sample is not a multiple of `default_overall_up_factor` + forward_upsample_size = False + upsample_size = None + + for dim in sample.shape[-2:]: + if dim % default_overall_up_factor != 0: + # Forward upsample size to force interpolation output size. + forward_upsample_size = True + break + + # ensure attention_mask is a bias, and give it a singleton query_tokens dimension + # expects mask of shape: + # [batch, key_tokens] + # adds singleton query_tokens dimension: + # [batch, 1, key_tokens] + # this helps to broadcast it as a bias over attention scores, which will be in one of the following shapes: + # [batch, heads, query_tokens, key_tokens] (e.g. torch sdp attn) + # [batch * heads, query_tokens, key_tokens] (e.g. xformers or classic attn) + if attention_mask is not None: + # assume that mask is expressed as: + # (1 = keep, 0 = discard) + # convert mask into a bias that can be added to attention scores: + # (keep = +0, discard = -10000.0) + attention_mask = (1 - attention_mask.to(sample.dtype)) * -10000.0 + attention_mask = attention_mask.unsqueeze(1) + + # convert encoder_attention_mask to a bias the same way we do for attention_mask + if encoder_attention_mask is not None: + encoder_attention_mask = (1 - encoder_attention_mask.to(sample.dtype)) * -10000.0 + encoder_attention_mask = encoder_attention_mask.unsqueeze(1) + + # 0. center input if necessary + if self.config.center_input_sample: + sample = 2 * sample - 1.0 + + # 1. time + t_emb = self.get_time_embed(sample=sample, timestep=timestep) + emb = self.time_embedding(t_emb, timestep_cond) + aug_emb = None + + class_emb = self.get_class_embed(sample=sample, class_labels=class_labels) + if class_emb is not None: + if self.config.class_embeddings_concat: + emb = torch.cat([emb, class_emb], dim=-1) + else: + emb = emb + class_emb + + aug_emb = self.get_aug_embed( + emb=emb, encoder_hidden_states=encoder_hidden_states, added_cond_kwargs=added_cond_kwargs + ) + if self.config.addition_embed_type == "image_hint": + aug_emb, hint = aug_emb + sample = torch.cat([sample, hint], dim=1) + + emb = emb + aug_emb if aug_emb is not None else emb + + if self.time_embed_act is not None: + emb = self.time_embed_act(emb) + + encoder_hidden_states = self.process_encoder_hidden_states( + encoder_hidden_states=encoder_hidden_states, added_cond_kwargs=added_cond_kwargs + ) + + # 2. pre-process + sample = self.conv_in(sample) + + # 2.5 GLIGEN position net + if cross_attention_kwargs is not None and cross_attention_kwargs.get("gligen", None) is not None: + cross_attention_kwargs = cross_attention_kwargs.copy() + gligen_args = cross_attention_kwargs.pop("gligen") + cross_attention_kwargs["gligen"] = {"objs": self.position_net(**gligen_args)} + + # 3. down + # we're popping the `scale` instead of getting it because otherwise `scale` will be propagated + # to the internal blocks and will raise deprecation warnings. this will be confusing for our users. + if cross_attention_kwargs is not None: + cross_attention_kwargs = cross_attention_kwargs.copy() + lora_scale = cross_attention_kwargs.pop("scale", 1.0) + else: + lora_scale = 1.0 + + if USE_PEFT_BACKEND: + # weight the lora layers by setting `lora_scale` for each PEFT layer + scale_lora_layers(self, lora_scale) + + is_controlnet = mid_block_additional_residual is not None and down_block_additional_residuals is not None + # using new arg down_intrablock_additional_residuals for T2I-Adapters, to distinguish from controlnets + is_adapter = down_intrablock_additional_residuals is not None + # maintain backward compatibility for legacy usage, where + # T2I-Adapter and ControlNet both use down_block_additional_residuals arg + # but can only use one or the other + is_brushnet = down_block_add_samples is not None and mid_block_add_sample is not None and up_block_add_samples is not None + if not is_adapter and mid_block_additional_residual is None and down_block_additional_residuals is not None: + deprecate( + "T2I should not use down_block_additional_residuals", + "1.3.0", + "Passing intrablock residual connections with `down_block_additional_residuals` is deprecated \ + and will be removed in diffusers 1.3.0. `down_block_additional_residuals` should only be used \ + for ControlNet. Please make sure use `down_intrablock_additional_residuals` instead. ", + standard_warn=False, + ) + down_intrablock_additional_residuals = down_block_additional_residuals + is_adapter = True + + down_block_res_samples = (sample,) + + if is_brushnet: + sample = sample + down_block_add_samples.pop(0) + + for downsample_block in self.down_blocks: + if hasattr(downsample_block, "has_cross_attention") and downsample_block.has_cross_attention: + # For t2i-adapter CrossAttnDownBlock2D + additional_residuals = {} + if is_adapter and len(down_intrablock_additional_residuals) > 0: + additional_residuals["additional_residuals"] = down_intrablock_additional_residuals.pop(0) + + i = len(down_block_add_samples) + + if is_brushnet and len(down_block_add_samples)>0: + additional_residuals["down_block_add_samples"] = [down_block_add_samples.pop(0) + for _ in range(len(downsample_block.resnets)+(downsample_block.downsamplers !=None))] + + sample, res_samples = downsample_block( + hidden_states=sample, + temb=emb, + encoder_hidden_states=encoder_hidden_states, + attention_mask=attention_mask, + cross_attention_kwargs=cross_attention_kwargs, + encoder_attention_mask=encoder_attention_mask, + **additional_residuals, + ) + else: + additional_residuals = {} + + i = len(down_block_add_samples) + + if is_brushnet and len(down_block_add_samples)>0: + additional_residuals["down_block_add_samples"] = [down_block_add_samples.pop(0) + for _ in range(len(downsample_block.resnets)+(downsample_block.downsamplers !=None))] + + sample, res_samples = downsample_block(hidden_states=sample, temb=emb, **additional_residuals) + if is_adapter and len(down_intrablock_additional_residuals) > 0: + sample += down_intrablock_additional_residuals.pop(0) + + down_block_res_samples += res_samples + + if is_controlnet: + new_down_block_res_samples = () + + for down_block_res_sample, down_block_additional_residual in zip( + down_block_res_samples, down_block_additional_residuals + ): + down_block_res_sample = down_block_res_sample + down_block_additional_residual + new_down_block_res_samples = new_down_block_res_samples + (down_block_res_sample,) + + down_block_res_samples = new_down_block_res_samples + + # 4. mid + if self.mid_block is not None: + if hasattr(self.mid_block, "has_cross_attention") and self.mid_block.has_cross_attention: + sample = self.mid_block( + sample, + emb, + encoder_hidden_states=encoder_hidden_states, + attention_mask=attention_mask, + cross_attention_kwargs=cross_attention_kwargs, + encoder_attention_mask=encoder_attention_mask, + ) + else: + sample = self.mid_block(sample, emb) + + # To support T2I-Adapter-XL + if ( + is_adapter + and len(down_intrablock_additional_residuals) > 0 + and sample.shape == down_intrablock_additional_residuals[0].shape + ): + sample += down_intrablock_additional_residuals.pop(0) + + if is_controlnet: + sample = sample + mid_block_additional_residual + + if is_brushnet: + sample = sample + mid_block_add_sample + + # 5. up + for i, upsample_block in enumerate(self.up_blocks): + is_final_block = i == len(self.up_blocks) - 1 + + res_samples = down_block_res_samples[-len(upsample_block.resnets) :] + down_block_res_samples = down_block_res_samples[: -len(upsample_block.resnets)] + + # if we have not reached the final block and need to forward the + # upsample size, we do it here + if not is_final_block and forward_upsample_size: + upsample_size = down_block_res_samples[-1].shape[2:] + + if hasattr(upsample_block, "has_cross_attention") and upsample_block.has_cross_attention: + additional_residuals = {} + + i = len(up_block_add_samples) + + if is_brushnet and len(up_block_add_samples)>0: + additional_residuals["up_block_add_samples"] = [up_block_add_samples.pop(0) + for _ in range(len(upsample_block.resnets)+(upsample_block.upsamplers !=None))] + + sample = upsample_block( + hidden_states=sample, + temb=emb, + res_hidden_states_tuple=res_samples, + encoder_hidden_states=encoder_hidden_states, + cross_attention_kwargs=cross_attention_kwargs, + upsample_size=upsample_size, + attention_mask=attention_mask, + encoder_attention_mask=encoder_attention_mask, + **additional_residuals, + ) + else: + additional_residuals = {} + + i = len(up_block_add_samples) + + if is_brushnet and len(up_block_add_samples)>0: + additional_residuals["up_block_add_samples"] = [up_block_add_samples.pop(0) + for _ in range(len(upsample_block.resnets)+(upsample_block.upsamplers !=None))] + + sample = upsample_block( + hidden_states=sample, + temb=emb, + res_hidden_states_tuple=res_samples, + upsample_size=upsample_size, + **additional_residuals, + ) + + # 6. post-process + if self.conv_norm_out: + sample = self.conv_norm_out(sample) + sample = self.conv_act(sample) + sample = self.conv_out(sample) + + if USE_PEFT_BACKEND: + # remove `lora_scale` from each PEFT layer + unscale_lora_layers(self, lora_scale) + + if not return_dict: + return (sample,) + + return UNet2DConditionOutput(sample=sample) diff --git a/ComfyUI-BrushNet/brushnet_nodes.py b/ComfyUI-BrushNet/brushnet_nodes.py new file mode 100644 index 0000000000000000000000000000000000000000..557995d56ec069721a0589d5bfb34c8d1cab6757 --- /dev/null +++ b/ComfyUI-BrushNet/brushnet_nodes.py @@ -0,0 +1,1080 @@ +import os +import types +from typing import Tuple + +import torch +import torchvision.transforms as T +import torch.nn.functional as F +from accelerate import init_empty_weights, load_checkpoint_and_dispatch + +#import sys +#from sys import platform +# Get the parent directory of 'comfy' and add it to the Python path +#comfy_parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '../../')) +#sys.path.append(comfy_parent_dir) + +import comfy +import folder_paths + +from .model_patch import add_model_patch_option, patch_model_function_wrapper + +from .brushnet.brushnet import BrushNetModel +from .brushnet.brushnet_ca import BrushNetModel as PowerPaintModel + +from .brushnet.powerpaint_utils import TokenizerWrapper, add_tokens + +current_directory = os.path.dirname(os.path.abspath(__file__)) +brushnet_config_file = os.path.join(current_directory, 'brushnet', 'brushnet.json') +brushnet_xl_config_file = os.path.join(current_directory, 'brushnet', 'brushnet_xl.json') +powerpaint_config_file = os.path.join(current_directory,'brushnet', 'powerpaint.json') + +sd15_scaling_factor = 0.18215 +sdxl_scaling_factor = 0.13025 + +ModelsToUnload = [comfy.sd1_clip.SD1ClipModel, + comfy.ldm.models.autoencoder.AutoencoderKL + ] + + +class BrushNetLoader: + + @classmethod + def INPUT_TYPES(s): + files, inpaint_path = get_files_with_extension('inpaint') + s.inpaint_path = inpaint_path + return {"required": + { + "brushnet": (files, ), + "dtype": (['float16', 'bfloat16', 'float32', 'float64'], ), + }, + } + + CATEGORY = "inpaint" + RETURN_TYPES = ("BRMODEL",) + RETURN_NAMES = ("brushnet",) + + FUNCTION = "brushnet_loading" + + def brushnet_loading(self, brushnet, dtype): + brushnet_file = os.path.join(self.inpaint_path, brushnet) + is_SDXL = False + is_PP = False + sd = comfy.utils.load_torch_file(brushnet_file) + brushnet_down_block, brushnet_mid_block, brushnet_up_block, keys = brushnet_blocks(sd) + del sd + if brushnet_down_block == 24 and brushnet_mid_block == 2 and brushnet_up_block == 30: + is_SDXL = False + if keys == 322: + is_PP = False + print('BrushNet model type: SD1.5') + else: + is_PP = True + print('PowerPaint model type: SD1.5') + elif brushnet_down_block == 18 and brushnet_mid_block == 2 and brushnet_up_block == 22: + print('BrushNet model type: Loading SDXL') + is_SDXL = True + is_PP = False + else: + raise Exception("Unknown BrushNet model") + + with init_empty_weights(): + if is_SDXL: + brushnet_config = BrushNetModel.load_config(brushnet_xl_config_file) + brushnet_model = BrushNetModel.from_config(brushnet_config) + elif is_PP: + brushnet_config = PowerPaintModel.load_config(powerpaint_config_file) + brushnet_model = PowerPaintModel.from_config(brushnet_config) + else: + brushnet_config = BrushNetModel.load_config(brushnet_config_file) + brushnet_model = BrushNetModel.from_config(brushnet_config) + + if is_PP: + print("PowerPaint model file:", brushnet_file) + else: + print("BrushNet model file:", brushnet_file) + + if dtype == 'float16': + torch_dtype = torch.float16 + elif dtype == 'bfloat16': + torch_dtype = torch.bfloat16 + elif dtype == 'float32': + torch_dtype = torch.float32 + else: + torch_dtype = torch.float64 + + brushnet_model = load_checkpoint_and_dispatch( + brushnet_model, + brushnet_file, + device_map="sequential", + max_memory=None, + offload_folder=None, + offload_state_dict=False, + dtype=torch_dtype, + force_hooks=False, + ) + + if is_PP: + print("PowerPaint model is loaded") + elif is_SDXL: + print("BrushNet SDXL model is loaded") + else: + print("BrushNet SD1.5 model is loaded") + + return ({"brushnet": brushnet_model, "SDXL": is_SDXL, "PP": is_PP, "dtype": torch_dtype}, ) + + +class PowerPaintCLIPLoader: + + @classmethod + def INPUT_TYPES(s): + inpaint_files, inpaint_path = get_files_with_extension('inpaint', ['bin']) + s.inpaint_path = inpaint_path + clip_files, clip_path = get_files_with_extension('clip') + s.clip_path = clip_path + return {"required": + { + "base": (clip_files, ), + "powerpaint": (inpaint_files, ), + }, + } + + CATEGORY = "inpaint" + RETURN_TYPES = ("CLIP",) + RETURN_NAMES = ("clip",) + + FUNCTION = "ppclip_loading" + + def ppclip_loading(self, base, powerpaint): + base_CLIP_file = os.path.join(self.clip_path, base) + pp_CLIP_file = os.path.join(self.inpaint_path, powerpaint) + + pp_clip = comfy.sd.load_clip(ckpt_paths=[base_CLIP_file]) + + print('PowerPaint base CLIP file: ', base_CLIP_file) + + pp_tokenizer = TokenizerWrapper(pp_clip.tokenizer.clip_l.tokenizer) + pp_text_encoder = pp_clip.patcher.model.clip_l.transformer + + add_tokens( + tokenizer = pp_tokenizer, + text_encoder = pp_text_encoder, + placeholder_tokens = ["P_ctxt", "P_shape", "P_obj"], + initialize_tokens = ["a", "a", "a"], + num_vectors_per_token = 10, + ) + + pp_text_encoder.load_state_dict(comfy.utils.load_torch_file(pp_CLIP_file), strict=False) + + print('PowerPaint CLIP file: ', pp_CLIP_file) + + pp_clip.tokenizer.clip_l.tokenizer = pp_tokenizer + pp_clip.patcher.model.clip_l.transformer = pp_text_encoder + + return (pp_clip,) + + +class PowerPaint: + + @classmethod + def INPUT_TYPES(s): + return {"required": + { + "model": ("MODEL",), + "vae": ("VAE", ), + "image": ("IMAGE",), + "mask": ("MASK",), + "powerpaint": ("BRMODEL", ), + "clip": ("CLIP", ), + "positive": ("CONDITIONING", ), + "negative": ("CONDITIONING", ), + "fitting" : ("FLOAT", {"default": 1.0, "min": 0.3, "max": 1.0}), + "function": (['text guided', 'shape guided', 'object removal', 'context aware', 'image outpainting'], ), + "scale": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0}), + "start_at": ("INT", {"default": 0, "min": 0, "max": 10000}), + "end_at": ("INT", {"default": 10000, "min": 0, "max": 10000}), + }, + } + + CATEGORY = "inpaint" + RETURN_TYPES = ("MODEL","CONDITIONING","CONDITIONING","LATENT",) + RETURN_NAMES = ("model","positive","negative","latent",) + + FUNCTION = "model_update" + + def model_update(self, model, vae, image, mask, powerpaint, clip, positive, negative, fitting, function, scale, start_at, end_at): + + is_SDXL, is_PP = check_compatibilty(model, powerpaint) + if not is_PP: + raise Exception("BrushNet model was loaded, please use BrushNet node") + + # Make a copy of the model so that we're not patching it everywhere in the workflow. + model = model.clone() + + # prepare image and mask + # no batches for original image and mask + masked_image, mask = prepare_image(image, mask) + + batch = masked_image.shape[0] + #width = masked_image.shape[2] + #height = masked_image.shape[1] + + if hasattr(model.model.model_config, 'latent_format') and hasattr(model.model.model_config.latent_format, 'scale_factor'): + scaling_factor = model.model.model_config.latent_format.scale_factor + else: + scaling_factor = sd15_scaling_factor + + torch_dtype = powerpaint['dtype'] + + # prepare conditioning latents + conditioning_latents = get_image_latents(masked_image, mask, vae, scaling_factor) + conditioning_latents[0] = conditioning_latents[0].to(dtype=torch_dtype).to(powerpaint['brushnet'].device) + conditioning_latents[1] = conditioning_latents[1].to(dtype=torch_dtype).to(powerpaint['brushnet'].device) + + # prepare embeddings + + if function == "object removal": + promptA = "P_ctxt" + promptB = "P_ctxt" + negative_promptA = "P_obj" + negative_promptB = "P_obj" + print('You should add to positive prompt: "empty scene blur"') + #positive = positive + " empty scene blur" + elif function == "context aware": + promptA = "P_ctxt" + promptB = "P_ctxt" + negative_promptA = "" + negative_promptB = "" + #positive = positive + " empty scene" + print('You should add to positive prompt: "empty scene"') + elif function == "shape guided": + promptA = "P_shape" + promptB = "P_ctxt" + negative_promptA = "P_shape" + negative_promptB = "P_ctxt" + elif function == "image outpainting": + promptA = "P_ctxt" + promptB = "P_ctxt" + negative_promptA = "P_obj" + negative_promptB = "P_obj" + #positive = positive + " empty scene" + print('You should add to positive prompt: "empty scene"') + else: + promptA = "P_obj" + promptB = "P_obj" + negative_promptA = "P_obj" + negative_promptB = "P_obj" + + tokens = clip.tokenize(promptA) + prompt_embedsA = clip.encode_from_tokens(tokens, return_pooled=False) + + tokens = clip.tokenize(negative_promptA) + negative_prompt_embedsA = clip.encode_from_tokens(tokens, return_pooled=False) + + tokens = clip.tokenize(promptB) + prompt_embedsB = clip.encode_from_tokens(tokens, return_pooled=False) + + tokens = clip.tokenize(negative_promptB) + negative_prompt_embedsB = clip.encode_from_tokens(tokens, return_pooled=False) + + prompt_embeds_pp = (prompt_embedsA * fitting + (1.0 - fitting) * prompt_embedsB).to(dtype=torch_dtype).to(powerpaint['brushnet'].device) + negative_prompt_embeds_pp = (negative_prompt_embedsA * fitting + (1.0 - fitting) * negative_prompt_embedsB).to(dtype=torch_dtype).to(powerpaint['brushnet'].device) + + # unload vae and CLIPs + del vae + del clip + for loaded_model in comfy.model_management.current_loaded_models: + if type(loaded_model.model.model) in ModelsToUnload: + comfy.model_management.current_loaded_models.remove(loaded_model) + loaded_model.model_unload() + del loaded_model + + # apply patch to model + + brushnet_conditioning_scale = scale + control_guidance_start = start_at + control_guidance_end = end_at + + add_brushnet_patch(model, + powerpaint['brushnet'], + torch_dtype, + conditioning_latents, + (brushnet_conditioning_scale, control_guidance_start, control_guidance_end), + negative_prompt_embeds_pp, prompt_embeds_pp, + None, None, None) + + latent = torch.zeros([batch, 4, conditioning_latents[0].shape[2], conditioning_latents[0].shape[3]], device=powerpaint['brushnet'].device) + + return (model, positive, negative, {"samples":latent},) + + +class BrushNet: + + @classmethod + def INPUT_TYPES(s): + return {"required": + { + "model": ("MODEL",), + "vae": ("VAE", ), + "image": ("IMAGE",), + "mask": ("MASK",), + "brushnet": ("BRMODEL", ), + "positive": ("CONDITIONING", ), + "negative": ("CONDITIONING", ), + "scale": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0}), + "start_at": ("INT", {"default": 0, "min": 0, "max": 10000}), + "end_at": ("INT", {"default": 10000, "min": 0, "max": 10000}), + }, + } + + CATEGORY = "inpaint" + RETURN_TYPES = ("MODEL","CONDITIONING","CONDITIONING","LATENT",) + RETURN_NAMES = ("model","positive","negative","latent",) + + FUNCTION = "model_update" + + def model_update(self, model, vae, image, mask, brushnet, positive, negative, scale, start_at, end_at): + + is_SDXL, is_PP = check_compatibilty(model, brushnet) + + if is_PP: + raise Exception("PowerPaint model was loaded, please use PowerPaint node") + + # Make a copy of the model so that we're not patching it everywhere in the workflow. + model = model.clone() + + # prepare image and mask + # no batches for original image and mask + masked_image, mask = prepare_image(image, mask) + + batch = masked_image.shape[0] + width = masked_image.shape[2] + height = masked_image.shape[1] + + if hasattr(model.model.model_config, 'latent_format') and hasattr(model.model.model_config.latent_format, 'scale_factor'): + scaling_factor = model.model.model_config.latent_format.scale_factor + elif is_SDXL: + scaling_factor = sdxl_scaling_factor + else: + scaling_factor = sd15_scaling_factor + + torch_dtype = brushnet['dtype'] + + # prepare conditioning latents + conditioning_latents = get_image_latents(masked_image, mask, vae, scaling_factor) + conditioning_latents[0] = conditioning_latents[0].to(dtype=torch_dtype).to(brushnet['brushnet'].device) + conditioning_latents[1] = conditioning_latents[1].to(dtype=torch_dtype).to(brushnet['brushnet'].device) + + # unload vae + del vae + for loaded_model in comfy.model_management.current_loaded_models: + if type(loaded_model.model.model) in ModelsToUnload: + comfy.model_management.current_loaded_models.remove(loaded_model) + loaded_model.model_unload() + del loaded_model + + # prepare embeddings + + prompt_embeds = positive[0][0].to(dtype=torch_dtype).to(brushnet['brushnet'].device) + negative_prompt_embeds = negative[0][0].to(dtype=torch_dtype).to(brushnet['brushnet'].device) + + max_tokens = max(prompt_embeds.shape[1], negative_prompt_embeds.shape[1]) + if prompt_embeds.shape[1] < max_tokens: + multiplier = max_tokens // 77 - prompt_embeds.shape[1] // 77 + prompt_embeds = torch.concat([prompt_embeds] + [prompt_embeds[:,-77:,:]] * multiplier, dim=1) + print('BrushNet: negative prompt more than 75 tokens:', negative_prompt_embeds.shape, 'multiplying prompt_embeds') + if negative_prompt_embeds.shape[1] < max_tokens: + multiplier = max_tokens // 77 - negative_prompt_embeds.shape[1] // 77 + negative_prompt_embeds = torch.concat([negative_prompt_embeds] + [negative_prompt_embeds[:,-77:,:]] * multiplier, dim=1) + print('BrushNet: positive prompt more than 75 tokens:', prompt_embeds.shape, 'multiplying negative_prompt_embeds') + + if len(positive[0]) > 1 and 'pooled_output' in positive[0][1] and positive[0][1]['pooled_output'] is not None: + pooled_prompt_embeds = positive[0][1]['pooled_output'].to(dtype=torch_dtype).to(brushnet['brushnet'].device) + else: + print('BrushNet: positive conditioning has not pooled_output') + if is_SDXL: + print('BrushNet will not produce correct results') + pooled_prompt_embeds = torch.empty([2, 1280], device=brushnet['brushnet'].device).to(dtype=torch_dtype) + + if len(negative[0]) > 1 and 'pooled_output' in negative[0][1] and negative[0][1]['pooled_output'] is not None: + negative_pooled_prompt_embeds = negative[0][1]['pooled_output'].to(dtype=torch_dtype).to(brushnet['brushnet'].device) + else: + print('BrushNet: negative conditioning has not pooled_output') + if is_SDXL: + print('BrushNet will not produce correct results') + negative_pooled_prompt_embeds = torch.empty([1, pooled_prompt_embeds.shape[1]], device=brushnet['brushnet'].device).to(dtype=torch_dtype) + + time_ids = torch.FloatTensor([[height, width, 0., 0., height, width]]).to(dtype=torch_dtype).to(brushnet['brushnet'].device) + + if not is_SDXL: + pooled_prompt_embeds = None + negative_pooled_prompt_embeds = None + time_ids = None + + # apply patch to model + + brushnet_conditioning_scale = scale + control_guidance_start = start_at + control_guidance_end = end_at + + add_brushnet_patch(model, + brushnet['brushnet'], + torch_dtype, + conditioning_latents, + (brushnet_conditioning_scale, control_guidance_start, control_guidance_end), + prompt_embeds, negative_prompt_embeds, + pooled_prompt_embeds, negative_pooled_prompt_embeds, time_ids) + + latent = torch.zeros([batch, 4, conditioning_latents[0].shape[2], conditioning_latents[0].shape[3]], device=brushnet['brushnet'].device) + + return (model, positive, negative, {"samples":latent},) + + +class BlendInpaint: + + @classmethod + def INPUT_TYPES(s): + return {"required": + { + "inpaint": ("IMAGE",), + "original": ("IMAGE",), + "mask": ("MASK",), + "kernel": ("INT", {"default": 10, "min": 1, "max": 1000}), + "sigma": ("FLOAT", {"default": 10.0, "min": 0.01, "max": 1000}), + }, + "optional": + { + "origin": ("VECTOR",), + }, + } + + CATEGORY = "inpaint" + RETURN_TYPES = ("IMAGE","MASK",) + RETURN_NAMES = ("image","MASK",) + + FUNCTION = "blend_inpaint" + + def blend_inpaint(self, inpaint: torch.Tensor, original: torch.Tensor, mask, kernel: int, sigma:int, origin=None) -> Tuple[torch.Tensor]: + + original, mask = check_image_mask(original, mask, 'Blend Inpaint') + + if len(inpaint.shape) < 4: + # image tensor shape should be [B, H, W, C], but batch somehow is missing + inpaint = inpaint[None,:,:,:] + + if inpaint.shape[0] < original.shape[0]: + print("Blend Inpaint gets batch of original images (%d) but only (%d) inpaint images" % (original.shape[0], inpaint.shape[0])) + original= original[:inpaint.shape[0],:,:] + mask = mask[:inpaint.shape[0],:,:] + + if inpaint.shape[0] > original.shape[0]: + # batch over inpaint + count = 0 + original_list = [] + mask_list = [] + origin_list = [] + while (count < inpaint.shape[0]): + for i in range(original.shape[0]): + original_list.append(original[i][None,:,:,:]) + mask_list.append(mask[i][None,:,:]) + if origin is not None: + origin_list.append(origin[i][None,:]) + count += 1 + if count >= inpaint.shape[0]: + break + original = torch.concat(original_list, dim=0) + mask = torch.concat(mask_list, dim=0) + if origin is not None: + origin = torch.concat(origin_list, dim=0) + + if kernel % 2 == 0: + kernel += 1 + transform = T.GaussianBlur(kernel_size=(kernel, kernel), sigma=(sigma, sigma)) + + ret = [] + blurred = [] + for i in range(inpaint.shape[0]): + if origin is None: + blurred_mask = transform(mask[i][None,None,:,:]).to(original.device).to(original.dtype) + blurred.append(blurred_mask[0]) + + result = torch.nn.functional.interpolate( + inpaint[i][None,:,:,:].permute(0, 3, 1, 2), + size=( + original[i].shape[0], + original[i].shape[1], + ) + ).permute(0, 2, 3, 1).to(original.device).to(original.dtype) + else: + # got mask from CutForInpaint + height, width, _ = original[i].shape + x0 = origin[i][0].item() + y0 = origin[i][1].item() + + if mask[i].shape[0] < height or mask[i].shape[1] < width: + padded_mask = F.pad(input=mask[i], pad=(x0, width-x0-mask[i].shape[1], + y0, height-y0-mask[i].shape[0]), mode='constant', value=0) + else: + padded_mask = mask[i] + blurred_mask = transform(padded_mask[None,None,:,:]).to(original.device).to(original.dtype) + blurred.append(blurred_mask[0][0]) + + result = F.pad(input=inpaint[i], pad=(0, 0, x0, width-x0-inpaint[i].shape[1], + y0, height-y0-inpaint[i].shape[0]), mode='constant', value=0) + result = result[None,:,:,:].to(original.device).to(original.dtype) + + ret.append(original[i] * (1.0 - blurred_mask[0][0][:,:,None]) + result[0] * blurred_mask[0][0][:,:,None]) + + return (torch.stack(ret), torch.stack(blurred), ) + + +class CutForInpaint: + + @classmethod + def INPUT_TYPES(s): + return {"required": + { + "image": ("IMAGE",), + "mask": ("MASK",), + "width": ("INT", {"default": 512, "min": 64, "max": 2048}), + "height": ("INT", {"default": 512, "min": 64, "max": 2048}), + }, + } + + CATEGORY = "inpaint" + RETURN_TYPES = ("IMAGE","MASK","VECTOR",) + RETURN_NAMES = ("image","mask","origin",) + + FUNCTION = "cut_for_inpaint" + + def cut_for_inpaint(self, image: torch.Tensor, mask: torch.Tensor, width: int, height: int): + + image, mask = check_image_mask(image, mask, 'BrushNet') + + ret = [] + msk = [] + org = [] + for i in range(image.shape[0]): + x0, y0, w, h = cut_with_mask(mask[i], width, height) + ret.append((image[i][y0:y0+h,x0:x0+w,:])) + msk.append((mask[i][y0:y0+h,x0:x0+w])) + org.append(torch.IntTensor([x0,y0])) + + return (torch.stack(ret), torch.stack(msk), torch.stack(org), ) + + +#### Utility function + +def get_files_with_extension(folder_name, extension=['safetensors']): + + try: + inpaint_path = folder_paths.get_folder_paths(folder_name)[0] + except: + inpaint_path = os.path.join(folder_paths.models_dir, folder_name) + + if not os.path.isdir(inpaint_path): + inpaint_path = os.path.join(folder_paths.base_path, inpaint_path) + if not os.path.isdir(inpaint_path): + return ([], '') + #raise Exception("Can't find", folder_name, " path") + + while not inpaint_path[-1].isalpha(): + inpaint_path = inpaint_path[:-1] + + abs_list = [] + for x in os.walk(inpaint_path): + for name in x[2]: + for ext in extension: + if ext in name: + abs_list.append(os.path.join(x[0], name)) + + abs_list = sorted(list(set(abs_list))) + + names = [] + for x in abs_list: + remain = x + y = '' + while remain != inpaint_path: + remain, folder = os.path.split(remain) + if len(y) > 0: + y = os.path.join(folder, y) + else: + y = folder + names.append(y) + return names, inpaint_path + + +def brushnet_blocks(sd): + brushnet_down_block = 0 + brushnet_mid_block = 0 + brushnet_up_block = 0 + for key in sd: + if 'brushnet_down_block' in key: + brushnet_down_block += 1 + if 'brushnet_mid_block' in key: + brushnet_mid_block += 1 + if 'brushnet_up_block' in key: + brushnet_up_block += 1 + return (brushnet_down_block, brushnet_mid_block, brushnet_up_block, len(sd)) + + +# Check models compatibility +def check_compatibilty(model, brushnet): + is_SDXL = False + is_PP = False + if isinstance(model.model.model_config, comfy.supported_models.SD15): + print('Base model type: SD1.5') + is_SDXL = False + if brushnet["SDXL"]: + raise Exception("Base model is SD15, but BrushNet is SDXL type") + if brushnet["PP"]: + is_PP = True + elif isinstance(model.model.model_config, comfy.supported_models.SDXL): + print('Base model type: SDXL') + is_SDXL = True + if not brushnet["SDXL"]: + raise Exception("Base model is SDXL, but BrushNet is SD15 type") + else: + print('Base model type: ', type(model.model.model_config)) + raise Exception("Unsupported model type: " + str(type(model.model.model_config))) + + return (is_SDXL, is_PP) + + +def check_image_mask(image, mask, name): + if len(image.shape) < 4: + # image tensor shape should be [B, H, W, C], but batch somehow is missing + image = image[None,:,:,:] + + if len(mask.shape) > 3: + # mask tensor shape should be [B, H, W] but we get [B, H, W, C], image may be? + # take first mask, red channel + mask = (mask[:,:,:,0])[:,:,:] + elif len(mask.shape) < 3: + # mask tensor shape should be [B, H, W] but batch somehow is missing + mask = mask[None,:,:] + + if image.shape[0] > mask.shape[0]: + print(name, "gets batch of images (%d) but only %d masks" % (image.shape[0], mask.shape[0])) + if mask.shape[0] == 1: + print(name, "will copy the mask to fill batch") + mask = torch.cat([mask] * image.shape[0], dim=0) + else: + print(name, "will add empty masks to fill batch") + empty_mask = torch.zeros([image.shape[0] - mask.shape[0], mask.shape[1], mask.shape[2]]) + mask = torch.cat([mask, empty_mask], dim=0) + elif image.shape[0] < mask.shape[0]: + print(name, "gets batch of images (%d) but too many (%d) masks" % (image.shape[0], mask.shape[0])) + mask = mask[:image.shape[0],:,:] + + return (image, mask) + +# Prepare image and mask +def prepare_image(image, mask): + + image, mask = check_image_mask(image, mask, 'BrushNet') + + print("BrushNet image.shape =", image.shape, "mask.shape =", mask.shape) + + if mask.shape[2] != image.shape[2] or mask.shape[1] != image.shape[1]: + raise Exception("Image and mask should be the same size") + + # As a suggestion of inferno46n2 (https://github.com/nullquant/ComfyUI-BrushNet/issues/64) + mask = mask.round() + + masked_image = image * (1.0 - mask[:,:,:,None]) + + return (masked_image, mask) + + +def cut_with_mask(mask, width, height): + iy, ix = (mask == 1).nonzero(as_tuple=True) + + h0, w0 = mask.shape + + if iy.numel() == 0: + x_c = w0 / 2.0 + y_c = h0 / 2.0 + else: + x_min = ix.min().item() + x_max = ix.max().item() + y_min = iy.min().item() + y_max = iy.max().item() + + if x_max - x_min > width or y_max - y_min > height: + raise Exception("Mask is bigger than provided dimensions") + + x_c = (x_min + x_max) / 2.0 + y_c = (y_min + y_max) / 2.0 + + width2 = width / 2.0 + height2 = height / 2.0 + + if w0 <= width: + x0 = 0 + w = w0 + else: + x0 = max(0, x_c - width2) + w = width + if x0 + width > w0: + x0 = w0 - width + + if h0 <= height: + y0 = 0 + h = h0 + else: + y0 = max(0, y_c - height2) + h = height + if y0 + height > h0: + y0 = h0 - height + + return (int(x0), int(y0), int(w), int(h)) + + +# Prepare conditioning_latents +@torch.inference_mode() +def get_image_latents(masked_image, mask, vae, scaling_factor): + processed_image = masked_image.to(vae.device) + image_latents = vae.encode(processed_image[:,:,:,:3]) * scaling_factor + processed_mask = 1. - mask[:,None,:,:] + interpolated_mask = torch.nn.functional.interpolate( + processed_mask, + size=( + image_latents.shape[-2], + image_latents.shape[-1] + ) + ) + interpolated_mask = interpolated_mask.to(image_latents.device) + + conditioning_latents = [image_latents, interpolated_mask] + + print('BrushNet CL: image_latents shape =', image_latents.shape, 'interpolated_mask shape =', interpolated_mask.shape) + + return conditioning_latents + + +# Main function where magic happens +@torch.inference_mode() +def brushnet_inference(x, timesteps, transformer_options): + if 'model_patch' not in transformer_options: + print('BrushNet inference: there is no model_patch key in transformer_options') + return ([], 0, []) + mp = transformer_options['model_patch'] + if 'brushnet' not in mp: + print('BrushNet inference: there is no brushnet key in mdel_patch') + return ([], 0, []) + bo = mp['brushnet'] + if 'model' not in bo: + print('BrushNet inference: there is no model key in brushnet') + return ([], 0, []) + brushnet = bo['model'] + if not (isinstance(brushnet, BrushNetModel) or isinstance(brushnet, PowerPaintModel)): + print('BrushNet model is not a BrushNetModel class') + return ([], 0, []) + + torch_dtype = bo['dtype'] + cl_list = bo['latents'] + brushnet_conditioning_scale, control_guidance_start, control_guidance_end = bo['controls'] + pe = bo['prompt_embeds'] + npe = bo['negative_prompt_embeds'] + ppe, nppe, time_ids = bo['add_embeds'] + + #do_classifier_free_guidance = mp['free_guidance'] + do_classifier_free_guidance = len(transformer_options['cond_or_uncond']) > 1 + + x = x.detach().clone() + x = x.to(torch_dtype).to(brushnet.device) + + timesteps = timesteps.detach().clone() + timesteps = timesteps.to(torch_dtype).to(brushnet.device) + + total_steps = mp['total_steps'] + step = mp['step'] + + added_cond_kwargs = {} + + if do_classifier_free_guidance and step == 0: + print('BrushNet inference: do_classifier_free_guidance is True') + + sub_idx = None + if 'ad_params' in transformer_options and 'sub_idxs' in transformer_options['ad_params']: + sub_idx = transformer_options['ad_params']['sub_idxs'] + + # we have batch input images + batch = cl_list[0].shape[0] + # we have incoming latents + latents_incoming = x.shape[0] + # and we already got some + latents_got = bo['latent_id'] + if step == 0 or batch > 1: + print('BrushNet inference, step = %d: image batch = %d, got %d latents, starting from %d' \ + % (step, batch, latents_incoming, latents_got)) + + image_latents = [] + masks = [] + prompt_embeds = [] + negative_prompt_embeds = [] + pooled_prompt_embeds = [] + negative_pooled_prompt_embeds = [] + if sub_idx: + # AnimateDiff indexes detected + if step == 0: + print('BrushNet inference: AnimateDiff indexes detected and applied') + + batch = len(sub_idx) + + if do_classifier_free_guidance: + for i in sub_idx: + image_latents.append(cl_list[0][i][None,:,:,:]) + masks.append(cl_list[1][i][None,:,:,:]) + prompt_embeds.append(pe) + negative_prompt_embeds.append(npe) + pooled_prompt_embeds.append(ppe) + negative_pooled_prompt_embeds.append(nppe) + for i in sub_idx: + image_latents.append(cl_list[0][i][None,:,:,:]) + masks.append(cl_list[1][i][None,:,:,:]) + else: + for i in sub_idx: + image_latents.append(cl_list[0][i][None,:,:,:]) + masks.append(cl_list[1][i][None,:,:,:]) + prompt_embeds.append(pe) + pooled_prompt_embeds.append(ppe) + else: + # do_classifier_free_guidance = 2 passes, 1st pass is cond, 2nd is uncond + continue_batch = True + for i in range(latents_incoming): + number = latents_got + i + if number < batch: + # 1st pass, cond + image_latents.append(cl_list[0][number][None,:,:,:]) + masks.append(cl_list[1][number][None,:,:,:]) + prompt_embeds.append(pe) + pooled_prompt_embeds.append(ppe) + elif do_classifier_free_guidance and number < batch * 2: + # 2nd pass, uncond + image_latents.append(cl_list[0][number-batch][None,:,:,:]) + masks.append(cl_list[1][number-batch][None,:,:,:]) + negative_prompt_embeds.append(npe) + negative_pooled_prompt_embeds.append(nppe) + else: + # latent batch + image_latents.append(cl_list[0][0][None,:,:,:]) + masks.append(cl_list[1][0][None,:,:,:]) + prompt_embeds.append(pe) + pooled_prompt_embeds.append(ppe) + latents_got = -i + continue_batch = False + + if continue_batch: + # we don't have full batch yet + if do_classifier_free_guidance: + if number < batch * 2 - 1: + bo['latent_id'] = number + 1 + else: + bo['latent_id'] = 0 + else: + if number < batch - 1: + bo['latent_id'] = number + 1 + else: + bo['latent_id'] = 0 + else: + bo['latent_id'] = 0 + + cl = [] + for il, m in zip(image_latents, masks): + cl.append(torch.concat([il, m], dim=1)) + cl2apply = torch.concat(cl, dim=0) + + conditioning_latents = cl2apply.to(torch_dtype).to(brushnet.device) + + prompt_embeds.extend(negative_prompt_embeds) + prompt_embeds = torch.concat(prompt_embeds, dim=0).to(torch_dtype).to(brushnet.device) + + if ppe is not None: + added_cond_kwargs = {} + added_cond_kwargs['time_ids'] = torch.concat([time_ids] * latents_incoming, dim = 0).to(torch_dtype).to(brushnet.device) + + pooled_prompt_embeds.extend(negative_pooled_prompt_embeds) + pooled_prompt_embeds = torch.concat(pooled_prompt_embeds, dim=0).to(torch_dtype).to(brushnet.device) + added_cond_kwargs['text_embeds'] = pooled_prompt_embeds + else: + added_cond_kwargs = None + + if x.shape[2] != conditioning_latents.shape[2] or x.shape[3] != conditioning_latents.shape[3]: + if step == 0: + print('BrushNet inference: image', conditioning_latents.shape, 'and latent', x.shape, 'have different size, resizing image') + conditioning_latents = torch.nn.functional.interpolate( + conditioning_latents, size=( + x.shape[2], + x.shape[3], + ), mode='bicubic', + ).to(torch_dtype).to(brushnet.device) + + if step == 0: + print('BrushNet inference: sample', x.shape, ', CL', conditioning_latents.shape) + + if step < control_guidance_start or step > control_guidance_end: + cond_scale = 0.0 + else: + cond_scale = brushnet_conditioning_scale + + return brushnet(x, + encoder_hidden_states=prompt_embeds, + brushnet_cond=conditioning_latents, + timestep = timesteps, + conditioning_scale=cond_scale, + guess_mode=False, + added_cond_kwargs=added_cond_kwargs, + return_dict=False, + ) + + +# This is main patch function +def add_brushnet_patch(model, brushnet, torch_dtype, conditioning_latents, + controls, + prompt_embeds, negative_prompt_embeds, + pooled_prompt_embeds, negative_pooled_prompt_embeds, time_ids): + + is_SDXL = isinstance(model.model.model_config, comfy.supported_models.SDXL) + + if is_SDXL: + input_blocks = [[0, comfy.ops.disable_weight_init.Conv2d], + [1, comfy.ldm.modules.diffusionmodules.openaimodel.ResBlock], + [2, comfy.ldm.modules.diffusionmodules.openaimodel.ResBlock], + [3, comfy.ldm.modules.diffusionmodules.openaimodel.Downsample], + [4, comfy.ldm.modules.attention.SpatialTransformer], + [5, comfy.ldm.modules.attention.SpatialTransformer], + [6, comfy.ldm.modules.diffusionmodules.openaimodel.Downsample], + [7, comfy.ldm.modules.attention.SpatialTransformer], + [8, comfy.ldm.modules.attention.SpatialTransformer]] + middle_block = [0, comfy.ldm.modules.diffusionmodules.openaimodel.ResBlock] + output_blocks = [[0, comfy.ldm.modules.attention.SpatialTransformer], + [1, comfy.ldm.modules.attention.SpatialTransformer], + [2, comfy.ldm.modules.attention.SpatialTransformer], + [2, comfy.ldm.modules.diffusionmodules.openaimodel.Upsample], + [3, comfy.ldm.modules.attention.SpatialTransformer], + [4, comfy.ldm.modules.attention.SpatialTransformer], + [5, comfy.ldm.modules.attention.SpatialTransformer], + [5, comfy.ldm.modules.diffusionmodules.openaimodel.Upsample], + [6, comfy.ldm.modules.diffusionmodules.openaimodel.ResBlock], + [7, comfy.ldm.modules.diffusionmodules.openaimodel.ResBlock], + [8, comfy.ldm.modules.diffusionmodules.openaimodel.ResBlock]] + else: + input_blocks = [[0, comfy.ops.disable_weight_init.Conv2d], + [1, comfy.ldm.modules.attention.SpatialTransformer], + [2, comfy.ldm.modules.attention.SpatialTransformer], + [3, comfy.ldm.modules.diffusionmodules.openaimodel.Downsample], + [4, comfy.ldm.modules.attention.SpatialTransformer], + [5, comfy.ldm.modules.attention.SpatialTransformer], + [6, comfy.ldm.modules.diffusionmodules.openaimodel.Downsample], + [7, comfy.ldm.modules.attention.SpatialTransformer], + [8, comfy.ldm.modules.attention.SpatialTransformer], + [9, comfy.ldm.modules.diffusionmodules.openaimodel.Downsample], + [10, comfy.ldm.modules.diffusionmodules.openaimodel.ResBlock], + [11, comfy.ldm.modules.diffusionmodules.openaimodel.ResBlock]] + middle_block = [0, comfy.ldm.modules.diffusionmodules.openaimodel.ResBlock] + output_blocks = [[0, comfy.ldm.modules.diffusionmodules.openaimodel.ResBlock], + [1, comfy.ldm.modules.diffusionmodules.openaimodel.ResBlock], + [2, comfy.ldm.modules.diffusionmodules.openaimodel.ResBlock], + [2, comfy.ldm.modules.diffusionmodules.openaimodel.Upsample], + [3, comfy.ldm.modules.attention.SpatialTransformer], + [4, comfy.ldm.modules.attention.SpatialTransformer], + [5, comfy.ldm.modules.attention.SpatialTransformer], + [5, comfy.ldm.modules.diffusionmodules.openaimodel.Upsample], + [6, comfy.ldm.modules.attention.SpatialTransformer], + [7, comfy.ldm.modules.attention.SpatialTransformer], + [8, comfy.ldm.modules.attention.SpatialTransformer], + [8, comfy.ldm.modules.diffusionmodules.openaimodel.Upsample], + [9, comfy.ldm.modules.attention.SpatialTransformer], + [10, comfy.ldm.modules.attention.SpatialTransformer], + [11, comfy.ldm.modules.attention.SpatialTransformer]] + + def last_layer_index(block, tp): + layer_list = [] + for layer in block: + layer_list.append(type(layer)) + layer_list.reverse() + if tp not in layer_list: + return -1, layer_list.reverse() + return len(layer_list) - 1 - layer_list.index(tp), layer_list + + def brushnet_forward(model, x, timesteps, transformer_options, control): + if 'brushnet' not in transformer_options['model_patch']: + input_samples = [] + mid_sample = 0 + output_samples = [] + else: + # brushnet inference + input_samples, mid_sample, output_samples = brushnet_inference(x, timesteps, transformer_options) + + # give additional samples to blocks + for i, tp in input_blocks: + idx, layer_list = last_layer_index(model.input_blocks[i], tp) + if idx < 0: + print("BrushNet can't find", tp, "layer in", i,"input block:", layer_list) + continue + model.input_blocks[i][idx].add_sample_after = input_samples.pop(0) if input_samples else 0 + + idx, layer_list = last_layer_index(model.middle_block, middle_block[1]) + if idx < 0: + print("BrushNet can't find", middle_block[1], "layer in middle block", layer_list) + model.middle_block[idx].add_sample_after = mid_sample + + for i, tp in output_blocks: + idx, layer_list = last_layer_index(model.output_blocks[i], tp) + if idx < 0: + print("BrushNet can't find", tp, "layer in", i,"outnput block:", layer_list) + continue + model.output_blocks[i][idx].add_sample_after = output_samples.pop(0) if output_samples else 0 + + patch_model_function_wrapper(model, brushnet_forward) + + to = add_model_patch_option(model) + mp = to['model_patch'] + if 'brushnet' not in mp: + mp['brushnet'] = {} + bo = mp['brushnet'] + + bo['model'] = brushnet + bo['dtype'] = torch_dtype + bo['latents'] = conditioning_latents + bo['controls'] = controls + bo['prompt_embeds'] = prompt_embeds + bo['negative_prompt_embeds'] = negative_prompt_embeds + bo['add_embeds'] = (pooled_prompt_embeds, negative_pooled_prompt_embeds, time_ids) + bo['latent_id'] = 0 + + # patch layers `forward` so we can apply brushnet + def forward_patched_by_brushnet(self, x, *args, **kwargs): + h = self.original_forward(x, *args, **kwargs) + if hasattr(self, 'add_sample_after') and type(self): + to_add = self.add_sample_after + if torch.is_tensor(to_add): + # interpolate due to RAUNet + if h.shape[2] != to_add.shape[2] or h.shape[3] != to_add.shape[3]: + to_add = torch.nn.functional.interpolate(to_add, size=(h.shape[2], h.shape[3]), mode='bicubic') + h += to_add.to(h.dtype).to(h.device) + else: + h += self.add_sample_after + self.add_sample_after = 0 + return h + + for i, block in enumerate(model.model.diffusion_model.input_blocks): + for j, layer in enumerate(block): + if not hasattr(layer, 'original_forward'): + layer.original_forward = layer.forward + layer.forward = types.MethodType(forward_patched_by_brushnet, layer) + layer.add_sample_after = 0 + + for j, layer in enumerate(model.model.diffusion_model.middle_block): + if not hasattr(layer, 'original_forward'): + layer.original_forward = layer.forward + layer.forward = types.MethodType(forward_patched_by_brushnet, layer) + layer.add_sample_after = 0 + + for i, block in enumerate(model.model.diffusion_model.output_blocks): + for j, layer in enumerate(block): + if not hasattr(layer, 'original_forward'): + layer.original_forward = layer.forward + layer.forward = types.MethodType(forward_patched_by_brushnet, layer) + layer.add_sample_after = 0 diff --git a/ComfyUI-BrushNet/model_patch.py b/ComfyUI-BrushNet/model_patch.py new file mode 100644 index 0000000000000000000000000000000000000000..5103bd29ceb2f741ae678151bed25e224f74fd00 --- /dev/null +++ b/ComfyUI-BrushNet/model_patch.py @@ -0,0 +1,134 @@ +import torch +import comfy + + +# Check and add 'model_patch' to model.model_options['transformer_options'] +def add_model_patch_option(model): + if 'transformer_options' not in model.model_options: + model.model_options['transformer_options'] = {} + to = model.model_options['transformer_options'] + if "model_patch" not in to: + to["model_patch"] = {} + return to + + +# Patch model with model_function_wrapper +def patch_model_function_wrapper(model, forward_patch): + + def brushnet_model_function_wrapper(apply_model_method, options_dict): + to = options_dict['c']['transformer_options'] + + control = None + if 'control' in options_dict['c']: + control = options_dict['c']['control'] + + x = options_dict['input'] + timestep = options_dict['timestep'] + + # check if there are patches to execute + if 'model_patch' not in to or 'forward' not in to['model_patch']: + return apply_model_method(x, timestep, **options_dict['c']) + + mp = to['model_patch'] + unet = mp['unet'] + + all_sigmas = mp['all_sigmas'] + sigma = to['sigmas'][0].item() + total_steps = all_sigmas.shape[0] - 1 + step = torch.argmin((all_sigmas - sigma).abs()).item() + + mp['step'] = step + mp['total_steps'] = total_steps + + # comfy.model_base.apply_model + xc = model.model.model_sampling.calculate_input(timestep, x) + if 'c_concat' in options_dict['c'] and options_dict['c']['c_concat'] is not None: + xc = torch.cat([xc] + [options_dict['c']['c_concat']], dim=1) + t = model.model.model_sampling.timestep(timestep).float() + # execute all patches + for method in mp['forward']: + method(unet, xc, t, to, control) + + return apply_model_method(x, timestep, **options_dict['c']) + + if "model_function_wrapper" in model.model_options and model.model_options["model_function_wrapper"]: + print('BrushNet is going to replace existing model_function_wrapper:', model.model_options["model_function_wrapper"]) + model.set_model_unet_function_wrapper(brushnet_model_function_wrapper) + + to = add_model_patch_option(model) + mp = to['model_patch'] + + if isinstance(model.model.model_config, comfy.supported_models.SD15): + mp['SDXL'] = False + elif isinstance(model.model.model_config, comfy.supported_models.SDXL): + mp['SDXL'] = True + else: + print('Base model type: ', type(model.model.model_config)) + raise Exception("Unsupported model type: ", type(model.model.model_config)) + + if 'forward' not in mp: + mp['forward'] = [forward_patch] + else: + mp['forward'].append(forward_patch) + + mp['unet'] = model.model.diffusion_model + mp['step'] = 0 + mp['total_steps'] = 1 + + # apply patches to code + if comfy.samplers.sample.__doc__ is None or 'BrushNet' not in comfy.samplers.sample.__doc__: + comfy.samplers.original_sample = comfy.samplers.sample + comfy.samplers.sample = modified_sample + + if comfy.ldm.modules.diffusionmodules.openaimodel.apply_control.__doc__ is None or \ + 'BrushNet' not in comfy.ldm.modules.diffusionmodules.openaimodel.apply_control.__doc__: + comfy.ldm.modules.diffusionmodules.openaimodel.original_apply_control = comfy.ldm.modules.diffusionmodules.openaimodel.apply_control + comfy.ldm.modules.diffusionmodules.openaimodel.apply_control = modified_apply_control + + +# Model needs current step number and cfg at inference step. It is possible to write a custom KSampler but I'd like to use ComfyUI's one. +# The first versions had modified_common_ksampler, but it broke custom KSampler nodes +def modified_sample(model, noise, positive, negative, cfg, device, sampler, sigmas, model_options={}, + latent_image=None, denoise_mask=None, callback=None, disable_pbar=False, seed=None): + ''' + Modified by BrushNet nodes + ''' + cfg_guider = comfy.samplers.CFGGuider(model) + cfg_guider.set_conds(positive, negative) + cfg_guider.set_cfg(cfg) + + ### Modified part ###################################################################### + # + to = add_model_patch_option(model) + to['model_patch']['all_sigmas'] = sigmas + # + #sigma_start = model.get_model_object("model_sampling").percent_to_sigma(start_at) + #sigma_end = model.get_model_object("model_sampling").percent_to_sigma(end_at) + # + # + #if math.isclose(cfg, 1.0) and model_options.get("disable_cfg1_optimization", False) == False: + # to['model_patch']['free_guidance'] = False + #else: + # to['model_patch']['free_guidance'] = True + # + ####################################################################################### + + return cfg_guider.sample(noise, latent_image, sampler, sigmas, denoise_mask, callback, disable_pbar, seed) + + +# To use Controlnet with RAUNet it is much easier to modify apply_control a little +def modified_apply_control(h, control, name): + ''' + Modified by BrushNet nodes + ''' + if control is not None and name in control and len(control[name]) > 0: + ctrl = control[name].pop() + if ctrl is not None: + if h.shape[2] != ctrl.shape[2] or h.shape[3] != ctrl.shape[3]: + ctrl = torch.nn.functional.interpolate(ctrl, size=(h.shape[2], h.shape[3]), mode='bicubic').to(h.dtype).to(h.device) + try: + h += ctrl + except: + print.warning("warning control could not be applied {} {}".format(h.shape, ctrl.shape)) + return h + diff --git a/ComfyUI-BrushNet/raunet_nodes.py b/ComfyUI-BrushNet/raunet_nodes.py new file mode 100644 index 0000000000000000000000000000000000000000..8c1b0d66708eded76997bd25a1f693922ad49bad --- /dev/null +++ b/ComfyUI-BrushNet/raunet_nodes.py @@ -0,0 +1,158 @@ +import torch.nn.functional as F +import comfy + +from .model_patch import add_model_patch_option, patch_model_function_wrapper + + + +class RAUNet: + + @classmethod + def INPUT_TYPES(s): + return {"required": + { + "model": ("MODEL",), + "du_start": ("INT", {"default": 0, "min": 0, "max": 10000}), + "du_end": ("INT", {"default": 4, "min": 0, "max": 10000}), + "xa_start": ("INT", {"default": 4, "min": 0, "max": 10000}), + "xa_end": ("INT", {"default": 10, "min": 0, "max": 10000}), + }, + } + + CATEGORY = "inpaint" + RETURN_TYPES = ("MODEL",) + RETURN_NAMES = ("model",) + + FUNCTION = "model_update" + + def model_update(self, model, du_start, du_end, xa_start, xa_end): + + model = model.clone() + + add_raunet_patch(model, + du_start, + du_end, + xa_start, + xa_end) + + return (model,) + + +# This is main patch function +def add_raunet_patch(model, du_start, du_end, xa_start, xa_end): + + def raunet_forward(model, x, timesteps, transformer_options, control): + if 'model_patch' not in transformer_options: + print("RAUNet: 'model_patch' not in transformer_options, skip") + return + + mp = transformer_options['model_patch'] + is_SDXL = mp['SDXL'] + + if is_SDXL and type(model.input_blocks[6][0]) != comfy.ldm.modules.diffusionmodules.openaimodel.Downsample: + print('RAUNet: model is SDXL, but input[6] != Downsample, skip') + return + + if not is_SDXL and type(model.input_blocks[3][0]) != comfy.ldm.modules.diffusionmodules.openaimodel.Downsample: + print('RAUNet: model is not SDXL, but input[3] != Downsample, skip') + return + + if 'raunet' not in mp: + print('RAUNet: "raunet" not in model_patch options, skip') + return + + if is_SDXL: + block = model.input_blocks[6][0] + else: + block = model.input_blocks[3][0] + + total_steps = mp['total_steps'] + step = mp['step'] + + ro = mp['raunet'] + du_start = ro['du_start'] + du_end = ro['du_end'] + + if step >= du_start and step < du_end: + block.op.stride = (4, 4) + block.op.padding = (2, 2) + block.op.dilation = (2, 2) + else: + block.op.stride = (2, 2) + block.op.padding = (1, 1) + block.op.dilation = (1, 1) + + patch_model_function_wrapper(model, raunet_forward) + model.set_model_input_block_patch(in_xattn_patch) + model.set_model_output_block_patch(out_xattn_patch) + + to = add_model_patch_option(model) + mp = to['model_patch'] + if 'raunet' not in mp: + mp['raunet'] = {} + ro = mp['raunet'] + + ro['du_start'] = du_start + ro['du_end'] = du_end + ro['xa_start'] = xa_start + ro['xa_end'] = xa_end + + +def in_xattn_patch(h, transformer_options): + # both SDXL and SD15 = (input,4) + if transformer_options["block"] != ("input", 4): + # wrong block + return h + if 'model_patch' not in transformer_options: + print("RAUNet (i-x-p): 'model_patch' not in transformer_options") + return h + mp = transformer_options['model_patch'] + if 'raunet' not in mp: + print("RAUNet (i-x-p): 'raunet' not in model_patch options") + return h + + step = mp['step'] + ro = mp['raunet'] + xa_start = ro['xa_start'] + xa_end = ro['xa_end'] + + if step < xa_start or step >= xa_end: + return h + h = F.avg_pool2d(h, kernel_size=(2,2)) + return h + + +def out_xattn_patch(h, hsp, transformer_options): + if 'model_patch' not in transformer_options: + print("RAUNet (o-x-p): 'model_patch' not in transformer_options") + return h, hsp + mp = transformer_options['model_patch'] + if 'raunet' not in mp: + print("RAUNet (o-x-p): 'raunet' not in model_patch options") + return h + + step = mp['step'] + is_SDXL = mp['SDXL'] + ro = mp['raunet'] + xa_start = ro['xa_start'] + xa_end = ro['xa_end'] + + if is_SDXL: + if transformer_options["block"] != ("output", 5): + # wrong block + return h, hsp + else: + if transformer_options["block"] != ("output", 8): + # wrong block + return h, hsp + + if step < xa_start or step >= xa_end: + return h, hsp + #error in hidiffusion codebase, size * 2 for particular sizes only + #re_size = (int(h.shape[-2] * 2), int(h.shape[-1] * 2)) + re_size = (hsp.shape[-2], hsp.shape[-1]) + h = F.interpolate(h, size=re_size, mode='bicubic') + + return h, hsp + + diff --git a/ComfyUI-BrushNet/requirements.txt b/ComfyUI-BrushNet/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..14e60f27e893ee522ab62a0ac1bcf4a822085a1c --- /dev/null +++ b/ComfyUI-BrushNet/requirements.txt @@ -0,0 +1,3 @@ +diffusers>=0.27.0 +accelerate>=0.29.0 +peft>=0.7.0 diff --git a/ComfyUI-Easy-Use/LICENSE b/ComfyUI-Easy-Use/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..f288702d2fa16d3cdf0035b15a9fcbc552cd88e7 --- /dev/null +++ b/ComfyUI-Easy-Use/LICENSE @@ -0,0 +1,674 @@ + GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU General Public License is a free, copyleft license for +software and other kinds of works. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +the GNU General Public License is intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU Affero General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If the program does terminal interaction, make it output a short +notice like this when it starts in an interactive mode: + + Copyright (C) + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, your program's commands +might be different; for a GUI interface, you would use an "about box". + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU GPL, see +. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +. diff --git a/ComfyUI-Easy-Use/README.ZH_CN.md b/ComfyUI-Easy-Use/README.ZH_CN.md new file mode 100644 index 0000000000000000000000000000000000000000..6a2ae9735fcb4f08d7f5cdcd837e5f1677540e04 --- /dev/null +++ b/ComfyUI-Easy-Use/README.ZH_CN.md @@ -0,0 +1,459 @@ +![comfyui-easy-use](https://github.com/user-attachments/assets/9b7a5e44-f5e2-4c27-aed2-d0e6b50c46bb) + +
+视频介绍 | +文档 (康明孙) | +工作流合集 | +捐助 +

+ + +
+ +**ComfyUI-Easy-Use** 是一个化繁为简的节点整合包, 在 [tinyterraNodes](https://github.com/TinyTerra/ComfyUI_tinyterraNodes) 的基础上进行延展,并针对了诸多主流的节点包做了整合与优化,以达到更快更方便使用ComfyUI的目的,在保证自由度的同时还原了本属于Stable Diffusion的极致畅快出图体验。 + +## 👨🏻‍🎨 特色介绍 + +- 沿用了 [tinyterraNodes](https://github.com/TinyTerra/ComfyUI_tinyterraNodes) 的思路,大大减少了折腾工作流的时间成本。 +- UI界面美化,首次安装的用户,如需使用UI主题,请在 Settings -> Color Palette 中自行切换主题并**刷新页面**即可 +- 增加了预采样参数配置的节点,可与采样节点分离,更方便预览。 +- 支持通配符与Lora的提示词节点,如需使用Lora Block Weight用法,需先保证自定义节点包中安装了 [ComfyUI-Inspire-Pack](https://github.com/ltdrdata/ComfyUI-Inspire-Pack) +- 可多选的风格化提示词选择器,默认是Fooocus的样式json,可自定义json放在styles底下,samples文件夹里可放预览图(名称和name一致,图片文件名如有空格需转为下划线'_') +- 加载器可开启A1111提示词风格模式,可重现与webui生成近乎相同的图像,需先安装 [ComfyUI_smZNodes](https://github.com/shiimizu/ComfyUI_smZNodes) +- 可使用`easy latentNoisy`或`easy preSamplingNoiseIn`节点实现对潜空间的噪声注入 +- 简化 SD1.x、SD2.x、SDXL、SVD、Zero123等流程 +- 简化 Stable Cascade [示例参考](https://github.com/yolain/ComfyUI-Yolain-Workflows?tab=readme-ov-file#1-13-stable-cascade) +- 简化 Layer Diffuse [示例参考](https://github.com/yolain/ComfyUI-Yolain-Workflows?tab=readme-ov-file#2-3-layerdiffusion) +- 简化 InstantID [示例参考](https://github.com/yolain/ComfyUI-Yolain-Workflows?tab=readme-ov-file#2-2-instantid), 需先保证自定义节点包中安装了 [ComfyUI_InstantID](https://github.com/cubiq/ComfyUI_InstantID) +- 简化 IPAdapter, 需先保证自定义节点包中安装最新版v2的 [ComfyUI_IPAdapter_plus](https://github.com/cubiq/ComfyUI_IPAdapter_plus) +- 扩展 XYplot 的可用性 +- 整合了Fooocus Inpaint功能 +- 整合了常用的逻辑计算、转换类型、展示所有类型等 +- 支持节点上checkpoint、lora模型子目录分类及预览图 (请在设置中开启上下文菜单嵌套子目录) +- 支持BriaAI的RMBG-1.4模型的背景去除节点,[技术参考](https://huggingface.co/briaai/RMBG-1.4) +- 支持 强制清理comfyUI模型显存占用 +- 支持Stable Diffusion 3 多账号API节点 +- 支持IC-Light的应用 [示例参考](https://github.com/yolain/ComfyUI-Yolain-Workflows?tab=readme-ov-file#2-5-ic-light) | [代码整合来源](https://github.com/huchenlei/ComfyUI-IC-Light) | [技术参考](https://github.com/lllyasviel/IC-Light) +- 中文提示词自动识别,使用[opus-mt-zh-en模型](https://huggingface.co/Helsinki-NLP/opus-mt-zh-en) +- 支持 sd3 模型 +- 支持 kolors 模型 +- 支持 flux 模型 + +## 👨🏻‍🔧 安装 + +1. 将存储库克隆到 **custom_nodes** 目录并安装依赖 +```shell +#1. git下载 +git clone https://github.com/yolain/ComfyUI-Easy-Use +#2. 安装依赖 +双击install.bat安装依赖 +``` + +## 👨🏻‍🚀 计划 + +- [x] 更新便于维护的新前端代码 + - [x] 使用sass维护css样式 + - [x] 对原有扩展进行优化 + - [x] 增加新的组件(如节点时间统计等) +- [ ] 在[ComfyUI-Yolain-Workflows](https://github.com/yolain/ComfyUI-Yolain-Workflows)中上传更多的工作流(如kolors,sd3等),并更新english版本的readme +- [ ] 更详细功能介绍的 gitbook + +## 📜 更新日志 + +**v1.2.2** + +- 增加 v2 版本新前端代码 +- 增加 `easy fluxLoader` +- 增加 `controlnetApply` 相关节点对sd3和hunyuanDiT的支持 + +**v1.2.1** + +- 增加 `easy ipadapterApplyFaceIDKolors` +- `easy ipadapterApply` 和 `easy ipadapterApplyADV` 增加 **PLUS (kolors genernal)** 和 **FACEID PLUS KOLORS** 预置项 +- `easy imageRemBg` 增加 **inspyrenet** 选项 +- 增加 `easy controlnetLoader++` +- 去除 `easy positive` `easy negative` 等prompt节点的自动将中文翻译功能,自动翻译仅在 `easy a1111Loader` 等不支持中文TE的加载器中生效 +- 增加 `easy kolorsLoader` - 可灵加载器,参考了 [MinusZoneAI](https://github.com/MinusZoneAI/ComfyUI-Kolors-MZ) 和 [kijai](https://github.com/kijai/ComfyUI-KwaiKolorsWrapper) 的代码。 + +**v1.2.0** + +- 增加 `easy pulIDApply` 和 `easy pulIDApplyADV` +- 增加 `easy hunyuanDiTLoader` 和 `easy pixArtLoader` +- 当新菜单的位置在上或者下时增加上 crystools 的显示,推荐开两个就好(如果后续crystools有更新UI适配我可能会删除掉) +- 增加 **easy sliderControl** - 滑块控制节点,当前可用于控制ipadapterMS的参数 (双击滑块可重置为默认值) +- 增加 **layer_weights** 属性在 `easy ipadapterApplyADV` 节点 + +**v1.1.9** + +- 增加 新的调度器 **gitsScheduler** +- 增加 `easy imageBatchToImageList` 和 `easy imageListToImageBatch` (修复Impact版的一点小问题) +- 递归模型子目录嵌套 +- 支持 sd3 模型 +- 增加 `easy applyInpaint` - 局部重绘全模式节点 (相比与之前的kSamplerInpating节点逻辑会更合理些) + +**v1.1.8** + +- 增加中文提示词自动翻译,使用[opus-mt-zh-en模型](https://huggingface.co/Helsinki-NLP/opus-mt-zh-en), 默认已对wildcard、lora正则处理, 其他需要保留的中文,可使用`@你的提示词@`包裹 (若依赖安装完成后报错, 请重启),测算大约会占0.3GB显存 +- 增加 `easy controlnetStack` - controlnet堆 +- 增加 `easy applyBrushNet` - [示例参考](https://github.com/yolain/ComfyUI-Yolain-Workflows/blob/main/workflows/2_advanced/2-4inpainting/2-4brushnet_1.1.8.json) +- 增加 `easy applyPowerPaint` - [示例参考](https://github.com/yolain/ComfyUI-Yolain-Workflows/blob/main/workflows/2_advanced/2-4inpainting/2-4powerpaint_outpaint_1.1.8.json) + +**v1.1.7** + +- 修复 一些模型(如controlnet模型等)未成功写入缓存,导致修改前置节点束参数(如提示词)需要二次载入模型的问题 +- 增加 `easy prompt` - 主体和光影预置项,后期可能会调整 +- 增加 `easy icLightApply` - 重绘光影, 从[ComfyUI-IC-Light](https://github.com/huchenlei/ComfyUI-IC-Light)优化 +- 增加 `easy imageSplitGrid` - 图像网格拆分 +- `easy kSamplerInpainting` 的 **additional** 属性增加差异扩散和brushnet等相关选项 +- 增加 brushnet模型加载的支持 - [ComfyUI-BrushNet](https://github.com/nullquant/ComfyUI-BrushNet) +- 增加 `easy applyFooocusInpaint` - Fooocus内补节点 替代原有的 FooocusInpaintLoader +- 移除 `easy fooocusInpaintLoader` - 容易bug,不再使用 +- 修改 easy kSampler等采样器中并联的model 不再替换输出中pipe里的model + +**v1.1.6** + +- 增加步调齐整适配 - 在所有的预采样和全采样器节点中的 调度器(schedulder) 增加了 **alignYourSteps** 选项 +- `easy kSampler` 和 `easy fullkSampler` 的 **image_output** 增加 **Preview&Choose**选项 +- 增加 `easy styleAlignedBatchAlign` - 风格对齐 [style_aligned_comfy](https://github.com/brianfitzgerald/style_aligned_comfy) +- 增加 `easy ckptNames` +- 增加 `easy controlnetNames` +- 增加 `easy imagesSplitimage` - 批次图像拆分单张 +- 增加 `easy imageCount` - 图像数量 +- 增加 `easy textSwitch` - 文字切换 + +**v1.1.5** + +- 重写 `easy cleanGPUUsed` - 可强制清理comfyUI的模型显存占用 +- 增加 `easy humanSegmentation` - 多类分割、人像分割 +- 增加 `easy imageColorMatch` +- 增加 `easy ipadapterApplyRegional` +- 增加 `easy ipadapterApplyFromParams` +- 增加 `easy imageInterrogator` - 图像反推 +- 增加 `easy stableDiffusion3API` - 简易的Stable Diffusion 3 多账号API节点 + +**v1.1.4** + +- 增加 `easy imageChooser` - 从[cg-image-picker](https://github.com/chrisgoringe/cg-image-picker)简化的图片选择器 +- 增加 `easy preSamplingCustom` - 自定义预采样,可支持cosXL-edit +- 增加 `easy ipadapterStyleComposition` +- 增加 在Loaders上右键菜单可查看 checkpoints、lora 信息 +- 修复 `easy preSamplingNoiseIn`、`easy latentNoisy`、`east Unsampler` 以兼容ComfyUI Revision>=2098 [0542088e] 以上版本 +- 修复 FooocusInpaint修改ModelPatcher计算权重引发的问题,理应在生成model后重置ModelPatcher为默认值 + +**v1.1.3** + +- `easy ipadapterApply` 增加 **COMPOSITION** 预置项 +- 增加 对[ResAdapter](https://huggingface.co/jiaxiangc/res-adapter) lora模型 的加载支持 +- 增加 `easy promptLine` +- 增加 `easy promptReplace` +- 增加 `easy promptConcat` +- `easy wildcards` 增加 **multiline_mode**属性 +- 增加 当节点需要下载模型时,若huggingface连接超时,会切换至镜像地址下载模型 + +
+v1.1.2 + +- 改写 EasyUse 相关节点的部分插槽推荐节点 +- 增加 **启用上下文菜单自动嵌套子目录** 设置项,默认为启用状态,可分类子目录及checkpoints、loras预览图 +- 增加 `easy sv3dLoader` +- 增加 `easy dynamiCrafterLoader` +- 增加 `easy ipadapterApply` +- 增加 `easy ipadapterApplyADV` +- 增加 `easy ipadapterApplyEncoder` +- 增加 `easy ipadapterApplyEmbeds` +- 增加 `easy preMaskDetailerFix` +- `easy kSamplerInpainting` 增加 **additional** 属性,可设置成 Differential Diffusion 或 Only InpaintModelConditioning +- 修复 `easy stylesSelector` 当未选择样式时,原有提示词发生了变化 +- 修复 `easy pipeEdit` 提示词输入lora时报错 +- 修复 layerDiffuse xyplot相关bug +
+ +
+v1.1.1/b> + +- 修复首次添加含seed的节点且当前模式为control_before_generate时,seed为0的问题 +- `easy preSamplingAdvanced` 增加 **return_with_leftover_noise** +- 修复 `easy stylesSelector` 当选择自定义样式文件时运行队列报错 +- `easy preSamplingLayerDiffusion` 增加 mask 可选传入参数 +- 将所有 **seed_num** 调整回 **seed** +- 修补官方BUG: 当control_mode为before 在首次加载页面时未修改节点中widget名称为 control_before_generate +- 去除强制**control_before_generate**设定 +- 增加 `easy imageRemBg` - 默认为BriaAI的RMBG-1.4模型, 移除背景效果更加,速度更快 +
+ +
+v1.1.0 + +- 增加 `easy imageSplitList` - 拆分每 N 张图像 +- 增加 `easy preSamplingDiffusionADDTL` - 可配置前景、背景、blended的additional_prompt等 +- 增加 `easy preSamplingNoiseIn` 可替代需要前置的`easy latentNoisy`节点 实现效果更好的噪声注入 +- `easy pipeEdit` 增加 条件拼接模式选择,可选择替换、合并、联结、平均、设置条件时间 +- 增加 `easy pipeEdit` - 可编辑Pipe的节点(包含可重新输入提示词) +- 增加 `easy preSamplingLayerDiffusion` 与 `easy kSamplerLayerDiffusion` (连接 `easy kSampler` 也能通) +- 增加 在 加载器、预采样、采样器、Controlnet等节点上右键可快速替换同类型节点的便捷菜单 +- 增加 `easy instantIDApplyADV` 可连入 positive 与 negative +- 修复 `easy wildcards` 读取lora未填写完整路径时未自动检索导致加载lora失败的问题 +- 修复 `easy instantIDApply` mask 未传入正确值 +- 修复 在 非a1111提示词风格下 BREAK 不生效的问题 +
+ +
+v1.0.9 + +- 修复未安装 ComfyUI-Impack-Pack 和 ComfyUI_InstantID 时报错 +- 修复 `easy pipeIn` - pipe设为可不必选 +- 增加 `easy instantIDApply` - 需要先安装 [ComfyUI_InstantID](https://github.com/cubiq/ComfyUI_InstantID), 工作流参考[示例](https://github.com/yolain/ComfyUI-Yolain-Workflows?tab=readme-ov-file#2-2-instantid) +- 修复 `easy detailerFix` 未添加到保存图片格式化扩展名可用节点列表 +- 修复 `easy XYInputs: PromptSR` 在替换负面提示词时报错 +
+ +
+v1.0.8 + +- `easy cascadeLoader` stage_c 与 stage_b 支持checkpoint模型 (需要下载[checkpoints](https://huggingface.co/stabilityai/stable-cascade/tree/main/comfyui_checkpoints)) +- `easy styleSelector` 搜索框修改为不区分大小写匹配 +- `easy fullLoader` 增加 **positive**、**negative**、**latent** 输出项 +- 修复 SDXLClipModel 在 ComfyUI 修订版本号 2016[c2cb8e88] 及以上的报错(判断了版本号可兼容老版本) +- 修复 `easy detailerFix` 批次大小大于1时生成出错 +- 修复`easy preSampling`等 latent传入后无法根据批次索引生成的问题 +- 修复 `easy svdLoader` 报错 +- 优化代码,减少了诸多冗余,提升运行速度 +- 去除中文翻译对照文本 + +(翻译对照已由 [AIGODLIKE-COMFYUI-TRANSLATION](https://github.com/AIGODLIKE/AIGODLIKE-ComfyUI-Translation) 统一维护啦! +首次下载或者版本较早的朋友请更新 AIGODLIKE-COMFYUI-TRANSLATION 和本节点包至最新版本。) +
+ +
+v1.0.7 + +- 增加 `easy cascadeLoader` - stable cascade 加载器 +- 增加 `easy preSamplingCascade` - stabled cascade stage_c 预采样参数 +- 增加 `easy fullCascadeKSampler` - stable cascade stage_c 完整版采样器 +- 增加 `easy cascadeKSampler` - stable cascade stage-c ksampler simple +
+ +
+v1.0.6 + +- 增加 `easy XYInputs: Checkpoint` +- 增加 `easy XYInputs: Lora` +- `easy seed` 增加固定种子值时可手动切换随机种 +- 修复 `easy fullLoader`等加载器切换lora时自动调整节点大小的问题 +- 去除原有ttn的图片保存逻辑并适配ComfyUI默认的图片保存格式化扩展 +
+ +
+v1.0.5 + +- 增加 `easy isSDXL` +- `easy svdLoader` 增加提示词控制, 可配合open_clip模型进行使用 +- `easy wildcards` 增加 **populated_text** 可输出通配填充后文本 +
+ +
+v1.0.4 + +- 增加 `easy showLoaderSettingsNames` 可显示与输出加载器部件中的 模型与VAE名称 +- 增加 `easy promptList` - 提示词列表 +- 增加 `easy fooocusInpaintLoader` - Fooocus内补节点(仅支持XL模型的流程) +- 增加 **Logic** 逻辑类节点 - 包含类型、计算、判断和转换类型等 +- 增加 `easy imageSave` - 带日期转换和宽高格式化的图像保存节点 +- 增加 `easy joinImageBatch` - 合并图像批次 +- `easy showAnything` 增加支持转换其他类型(如:tensor类型的条件、图像等) +- `easy kSamplerInpainting` 增加 **patch** 传入值,配合Fooocus内补节点使用 +- `easy imageSave` 增加 **only_preivew** + +- 修复 xyplot在pillow>9.5中报错 +- 修复 `easy wildcards` 在使用PS扩展插件运行时报错 +- 修复 `easy latentCompositeMaskedWithCond` +- 修复 `easy XYInputs: ControlNet` 报错 +- 修复 `easy loraStack` **toggle** 为 disabled 时报错 + +- 修改首次安装节点包不再自动替换主题,需手动调整并刷新页面 +
+ +
+v1.0.3 + +- 增加 `easy stylesSelector` 风格化提示词选择器 +- 增加队列进度条设置项,默认为未启用状态 +- `easy controlnetLoader` 和 `easy controlnetLoaderADV` 增加参数 **scale_soft_weights** + + +- 修复 `easy XYInputs: Sampler/Scheduler` 报错 +- 修复 右侧菜单 点击按钮时老是跑位的问题 +- 修复 styles 路径在其他环境报错 +- 修复 `easy comfyLoader` 读取错误 +- 修复 xyPlot 在连接 zero123 时报错 +- 修复加载器中提示词为组件时报错 +- 修复 `easy getNode` 和 `easy setNode` 加载时标题未更改 +- 修复所有采样器中存储图片使用子目录前缀不生效的问题 + + +- 调整UI主题 +
+ +
+v1.0.2 + +- 增加 **autocomplete** 文件夹,如果您安装了 [ComfyUI-Custom-Scripts](https://github.com/pythongosssss/ComfyUI-Custom-Scripts), 将在启动时合并该文件夹下的所有txt文件并覆盖到pyssss包里的autocomplete.txt文件。 +- 增加 `easy XYPlotAdvanced` 和 `easy XYInputs` 等相关节点 +- 增加 **Alt+1到9** 快捷键,可快速粘贴 Node templates 的节点预设 (对应 1到9 顺序) + +- 修复 `easy imageInsetCrop` 测量值为百分比时步进为1 +- 修复 开启 `a1111_prompt_style` 时XY图表无法使用的问题 +- 右键菜单中增加了一个 `📜Groups Map(EasyUse)` + +- 修复在Comfy新版本中UI加载失败 +- 修复 `easy pipeToBasicPipe` 报错 +- 修改 `easy fullLoader` 和 `easy a1111Loader` 中的 **a1111_prompt_style** 默认值为 False +- `easy XYInputs ModelMergeBlocks` 支持csv文件导入数值 + +- 替换了XY图生成时的字体文件 + +- 移除 `easy imageRemBg` +- 移除包中的介绍图和工作流文件,减少包体积 + +
+ +
+v1.0.1 + +- 新增 `easy seed` - 简易随机种 +- `easy preDetailerFix` 新增了 `optional_image` 传入图像可选,如未传默认取值为pipe里的图像 +- 新增 `easy kSamplerInpainting` 用于内补潜空间的采样器 +- 新增 `easy pipeToBasicPipe` 用于转换到Impact的某些节点上 + +- 修复 `easy comfyLoader` 报错 +- 修复所有包含输出图片尺寸的节点取值方式无法批处理的问题 +- 修复 `width` 和 `height` 无法在 `easy svdLoader` 自定义的报错问题 +- 修复所有采样器预览图片的地址链接 (解决在 MACOS 系统中图片无法在采样器中预览的问题) +- 修复 `vae_name` 在 `easy fullLoader` 和 `easy a1111Loader` 和 `easy comfyLoader` 中选择但未替换原始vae问题 +- 修复 `easy fullkSampler` 除pipe外其他输出值的报错 +- 修复 `easy hiresFix` 输入连接pipe和image、vae同时存在时报错 +- 修复 `easy fullLoader` 中 `model_override` 连接后未执行 +- 修复 因新增`easy seed` 导致action错误 +- 修复 `easy xyplot` 的字体文件路径读取错误 +- 修复 convert 到 `easy seed` 随机种无法固定的问题 +- 修复 `easy pipeIn` 值传入的报错问题 +- 修复 `easy zero123Loader` 和 `easy svdLoader` 读取模型时将模型加入到缓存中 +- 修复 `easy kSampler` `easy kSamplerTiled` `easy detailerFix` 的 `image_output` 默认值为 Preview +- `easy fullLoader` 和 `easy a1111Loader` 新增了 `a1111_prompt_style` 参数可以重现和webui生成相同的图像,当前您需要安装 [ComfyUI_smZNodes](https://github.com/shiimizu/ComfyUI_smZNodes) 才能使用此功能 +
+ +
+v1.0.0 + +- 新增`easy positive` - 简易正面提示词文本 +- 新增`easy negative` - 简易负面提示词文本 +- 新增`easy wildcards` - 支持通配符和Lora选择的提示词文本 +- 新增`easy portraitMaster` - 肖像大师v2.2 +- 新增`easy loraStack` - Lora堆 +- 新增`easy fullLoader` - 完整版的加载器 +- 新增`easy zero123Loader` - 简易zero123加载器 +- 新增`easy svdLoader` - 简易svd加载器 +- 新增`easy fullkSampler` - 完整版的采样器(无分离) +- 新增`easy hiresFix` - 支持Pipe的高清修复 +- 新增`easy predetailerFix` `easy DetailerFix` - 支持Pipe的细节修复 +- 新增`easy ultralyticsDetectorPipe` `easy samLoaderPipe` - 检测加载器(细节修复的输入项) +- 新增`easy pipein` `easy pipeout` - Pipe的输入与输出 +- 新增`easy xyPlot` - 简易的xyplot (后续会更新更多可控参数) +- 新增`easy imageRemoveBG` - 图像去除背景 +- 新增`easy imagePixelPerfect` - 图像完美像素 +- 新增`easy poseEditor` - 姿势编辑器 +- 新增UI主题(黑曜石)- 默认自动加载UI, 也可在设置中自行更替 + +- 修复 `easy globalSeed` 不生效问题 +- 修复所有的`seed_num` 因 [cg-use-everywhere](https://github.com/chrisgoringe/cg-use-everywhere) 实时更新图表导致值错乱的问题 +- 修复`easy imageSize` `easy imageSizeBySide` `easy imageSizeByLongerSide` 可作为终节点 +- 修复 `seed_num` (随机种子值) 在历史记录中读取无法一致的Bug +
+ + +
+v0.5 + +- 新增 `easy controlnetLoaderADV` 节点 +- 新增 `easy imageSizeBySide` 节点,可选输出为长边或短边 +- 新增 `easy LLLiteLoader` 节点,如果您预先安装过 kohya-ss/ControlNet-LLLite-ComfyUI 包,请将 models 里的模型文件移动至 ComfyUI\models\controlnet\ (即comfy默认的controlnet路径里,请勿修改模型的文件名,不然会读取不到)。 +- 新增 `easy imageSize` 和 `easy imageSizeByLongerSize` 输出的尺寸显示。 +- 新增 `easy showSpentTime` 节点用于展示图片推理花费时间与VAE解码花费时间。 +- `easy controlnetLoaderADV` 和 `easy controlnetLoader` 新增 `control_net` 可选传入参数 +- `easy preSampling` 和 `easy preSamplingAdvanced` 新增 `image_to_latent` 可选传入参数 +- `easy a1111Loader` 和 `easy comfyLoader` 新增 `batch_size` 传入参数 + +- 修改 `easy controlnetLoader` 到 loader 分类底下。 +
+ +## 整合参考到的相关节点包 + +声明: 非常尊重这些原作者们的付出,开源不易,我仅仅只是做了一些整合与优化。 + +| 节点名 (搜索名) | 相关的库 | 库相关的节点 | +|:-------------------------------|:----------------------------------------------------------------------------|:------------------------| +| easy setNode | [ComfyUI-extensions](https://github.com/diffus3/ComfyUI-extensions) | diffus3.SetNode | +| easy getNode | [ComfyUI-extensions](https://github.com/diffus3/ComfyUI-extensions) | diffus3.GetNode | +| easy bookmark | [rgthree-comfy](https://github.com/rgthree/rgthree-comfy) | Bookmark 🔖 | +| easy portraitMarker | [comfyui-portrait-master](https://github.com/florestefano1975/comfyui-portrait-master) | Portrait Master | +| easy LLLiteLoader | [ControlNet-LLLite-ComfyUI](https://github.com/kohya-ss/ControlNet-LLLite-ComfyUI) | LLLiteLoader | +| easy globalSeed | [ComfyUI-Inspire-Pack](https://github.com/ltdrdata/ComfyUI-Inspire-Pack) | Global Seed (Inspire) | +| easy preSamplingDynamicCFG | [sd-dynamic-thresholding](https://github.com/mcmonkeyprojects/sd-dynamic-thresholding) | DynamicThresholdingFull | +| dynamicThresholdingFull | [sd-dynamic-thresholding](https://github.com/mcmonkeyprojects/sd-dynamic-thresholding) | DynamicThresholdingFull | +| easy imageInsetCrop | [rgthree-comfy](https://github.com/rgthree/rgthree-comfy) | ImageInsetCrop | +| easy poseEditor | [ComfyUI_Custom_Nodes_AlekPet](https://github.com/AlekPet/ComfyUI_Custom_Nodes_AlekPet) | poseNode | +| easy if | [ComfyUI-Logic](https://github.com/theUpsider/ComfyUI-Logic) | IfExecute | +| easy preSamplingLayerDiffusion | [ComfyUI-layerdiffusion](https://github.com/huchenlei/ComfyUI-layerdiffusion) | LayeredDiffusionApply等 | +| easy dynamiCrafterLoader | [ComfyUI-layerdiffusion](https://github.com/ExponentialML/ComfyUI_Native_DynamiCrafter) | Apply Dynamicrafter | +| easy imageChooser | [cg-image-picker](https://github.com/chrisgoringe/cg-image-picker) | Preview Chooser | +| easy styleAlignedBatchAlign | [style_aligned_comfy](https://github.com/chrisgoringe/cg-image-picker) | styleAlignedBatchAlign | +| easy icLightApply | [ComfyUI-IC-Light](https://github.com/huchenlei/ComfyUI-IC-Light) | ICLightApply等 | +| easy kolorsLoader | [ComfyUI-Kolors-MZ](https://github.com/MinusZoneAI/ComfyUI-Kolors-MZ) | kolorsLoader | + +## Credits + +[ComfyUI](https://github.com/comfyanonymous/ComfyUI) - 功能强大且模块化的Stable Diffusion GUI + +[ComfyUI-ComfyUI-Manager](https://github.com/ltdrdata/ComfyUI-Manager) - ComfyUI管理器 + +[tinyterraNodes](https://github.com/TinyTerra/ComfyUI_tinyterraNodes) - 管道节点(节点束)让用户减少了不必要的连接 + +[ComfyUI-extensions](https://github.com/diffus3/ComfyUI-extensions) - diffus3的获取与设置点让用户可以分离工作流构成 + +[ComfyUI-Impact-Pack](https://github.com/ltdrdata/ComfyUI-Impact-Pack) - 常规整合包1 + +[ComfyUI-Inspire-Pack](https://github.com/ltdrdata/ComfyUI-Inspire-Pack) - 常规整合包2 + +[ComfyUI-Logic](https://github.com/theUpsider/ComfyUI-Logic) - ComfyUI逻辑运算 + +[ComfyUI-ResAdapter](https://github.com/jiaxiangc/ComfyUI-ResAdapter) - 让模型生成不受训练分辨率限制 + +[ComfyUI_IPAdapter_plus](https://github.com/cubiq/ComfyUI_IPAdapter_plus) - 风格迁移 + +[ComfyUI_InstantID](https://github.com/cubiq/ComfyUI_InstantID) - 人脸迁移 + +[ComfyUI_PuLID](https://github.com/cubiq/PuLID_ComfyUI) - 人脸迁移 + +[ComfyUI-Custom-Scripts](https://github.com/pythongosssss/ComfyUI-Custom-Scripts) - pyssss 小蛇🐍脚本 + +[cg-image-picker](https://github.com/chrisgoringe/cg-image-picker) - 图片选择器 + +[ComfyUI-BrushNet](https://github.com/nullquant/ComfyUI-BrushNet) - BrushNet 内补节点 + +[ComfyUI_ExtraModels](https://github.com/city96/ComfyUI_ExtraModels) - DiT架构相关节点(Pixart、混元DiT等) + +## ☕️ Donation + +**Comfyui-Easy-Use** 是一个 GPL 许可的开源项目。为了项目取得更好、可持续的发展,我希望能够获得更多的支持。 如果我的自定义节点为您的一天增添了价值,请考虑喝杯咖啡来进一步补充能量! 💖感谢您的支持,每一杯咖啡都是我创作的动力! + +- [BiliBili充电](https://space.bilibili.com/1840885116) +- [爱发电](https://afdian.com/a/yolain) +- [Wechat/Alipay](https://github.com/user-attachments/assets/803469bd-ed6a-4fab-932d-50e5088a2d03) + +感谢您的捐助,我将用这些费用来租用 GPU 或购买其他 GPT 服务,以便更好地调试和完善 ComfyUI-Easy-Use 功能 + +## 🌟Stargazers + +My gratitude extends to the generous souls who bestow a star. Your support is much appreciated! + +[![Stargazers repo roster for @yolain/ComfyUI-Easy-Use](https://reporoster.com/stars/yolain/ComfyUI-Easy-Use)](https://github.com/yolain/ComfyUI-Easy-Use/stargazers) \ No newline at end of file diff --git a/ComfyUI-Easy-Use/README.en.md b/ComfyUI-Easy-Use/README.en.md new file mode 100644 index 0000000000000000000000000000000000000000..2d492509b0faaf1094e7eee8a7fcb20585dba08d --- /dev/null +++ b/ComfyUI-Easy-Use/README.en.md @@ -0,0 +1,422 @@ +

+ 中文 | English +

+ +
+ +# ComfyUI Easy Use +
+ +**ComfyUI-Easy-Use** is a simplified node integration package, which is extended on the basis of [tinyterraNodes](https://github.com/TinyTerra/ComfyUI_tinyterraNodes), and has been integrated and optimized for many mainstream node packages to achieve the purpose of faster and more convenient use of ComfyUI. While ensuring the degree of freedom, it restores the ultimate smooth image production experience that belongs to Stable Diffusion. + +[![ComfyUI-Yolain-Workflows](https://github.com/yolain/ComfyUI-Easy-Use/assets/73304135/9a3f54bc-a677-4bf1-a196-8845dd57c942)](https://github.com/yolain/ComfyUI-Yolain-Workflows) + +## 👨🏻‍🎨 Introduce + +- Inspire by [tinyterraNodes](https://github.com/TinyTerra/ComfyUI_tinyterraNodes), which greatly reduces the time cost of tossing workflows。 +- UI interface beautification, the first time you install the user, if you need to use the UI theme, please switch the theme in Settings -> Color Palette and refresh page. +- Added a node for pre-sampling parameter configuration, which can be separated from the sampling node for easier previewing +- Wildcards and lora's are supported, for Lora Block Weight usage, ensure that the custom node package has the [ComfyUI-Inspire-Pack](https://github.com/ltdrdata/ComfyUI-Inspire-Pack) +- Multi-selectable styled cue word selector, default is Fooocus style json, custom json can be placed under styles, samples folder can be placed in the preview image (name and name consistent, image file name such as spaces need to be converted to underscores '_') +- The loader enables the A1111 prompt mode, which reproduces nearly identical images to those generated by webui, and needs to be installed [ComfyUI_smZNodes](https://github.com/shiimizu/ComfyUI_smZNodes) first. +- Noise injection into the latent space can be achieved using the `easy latentNoisy` or `easy preSamplingNoiseIn` node +- Simplified processes for SD1.x, SD2.x, SDXL, SVD, Zero123, etc. [Example](https://github.com/yolain/ComfyUI-Easy-Use?tab=readme-ov-file#StableDiffusion) +- Simplified Stable Cascade [Example](https://github.com/yolain/ComfyUI-Easy-Use?tab=readme-ov-file#StableCascade) +- Simplified Layer Diffuse [Example](https://github.com/yolain/ComfyUI-Easy-Use?tab=readme-ov-file#LayerDiffusion),The first time you use it you may need to run `pip install -r requirements.txt` to install the required dependencies. +- Simplified InstantID [Example](https://github.com/yolain/ComfyUI-Easy-Use?tab=readme-ov-file#InstantID), You need to make sure that the custom node package has the [ComfyUI_InstantID](https://github.com/cubiq/ComfyUI_InstantID) +- Extending the usability of XYplot +- Fooocus Inpaint integration +- Integration of common logical calculations, conversion of types, display of all types, etc. +- Background removal nodes for the RMBG-1.4 model supporting BriaAI, [BriaAI Guide](https://huggingface.co/briaai/RMBG-1.4) +- Forcibly cleared the memory usage of the comfy UI model are supported +- Stable Diffusion 3 multi-account API nodes are supported +- Support Stable Diffusion 3 model +- Support Kolors model + +## 👨🏻‍🔧 Installation +Clone the repo into the **custom_nodes** directory and install the requirements: +```shell +#1. Clone the repo +git clone https://github.com/yolain/ComfyUI-Easy-Use +#2. Install the requirements +Double-click install.bat to install the required dependencies +``` + +## ☕️ Plan + +- [ ] Updated new front-end code for easier maintenance + - [x] Maintain css styles using sass + - [ ] Optimize existing extensions + - [ ] Add new components + - [ ] Add light theme +- [ ] Upload new workflows to [ComfyUI-Yolain-Workflows](https://github.com/yolain/ComfyUI-Yolain-Workflows) and translate readme to english version. +- [ ] Write gitbook with more detailed function introdution + +## 📜 Changelog + +**v1.2.1** + +- Added **inspyrenet** to `easy imageRemBg` +- Added `easy controlnetLoader++` +- Added **PLUS (kolors genernal)** preset to `easy ipadapterApply` and `easy ipadapterApplyADV` (Supported kolors ipadapter) +- Added `easy kolorsLoader` - Code based on [MinusZoneAI](https://github.com/MinusZoneAI/ComfyUI-Kolors-MZ)'s and [kijai](https://github.com/kijai/ComfyUI-KwaiKolorsWrapper)'s repo, thanks for their contribution. + +**v1.2.0** + +- Added `easy pulIDApply` and `easy pulIDApplyADV` +- Added `easy huanyuanDiTLoader` and `easy pixArtLoader` +- Added **easy sliderControl** - Slider control node, which can currently be used to control the parameters of ipadapterMS (double-click the slider to reset to default) +- Added **layer_weights** in `easy ipadapterApplyADV` + +**v1.1.9** + +- Added **gitsScheduler** +- Added `easy imageBatchToImageList` and `easy imageListToImageBatch` +- Recursive subcategories nested for models +- Support for Stable Diffusion 3 model +- Added `easy applyInpaint` - All inpainting mode in this node + +**v1.1.8** + +- Added `easy controlnetStack` +- Added `easy applyBrushNet` - [Workflow Example](https://github.com/yolain/ComfyUI-Yolain-Workflows/blob/main/workflows/2_advanced/2-4inpainting/2-4brushnet_1.1.8.json) +- Added `easy applyPowerPaint` - [Workflow Example](https://github.com/yolain/ComfyUI-Yolain-Workflows/blob/main/workflows/2_advanced/2-4inpainting/2-4powerpaint_outpaint_1.1.8.json) + +**v1.1.7** + +- Added `easy prompt` - Subject and light presets, maybe adjusted later +- Added `easy icLightApply` - Light and shadow migration, Code based on [ComfyUI-IC-Light](https://github.com/huchenlei/ComfyUI-IC-Light) +- Added `easy imageSplitGrid` +- `easy kSamplerInpainting` added options such as different diffusion and brushnet in **additional** widget +- Support for brushnet model loading - [ComfyUI-BrushNet](https://github.com/nullquant/ComfyUI-BrushNet) +- Added `easy applyFooocusInpaint` - Replace FooocusInpaintLoader +- Removed `easy fooocusInpaintLoader` + +**v1.1.6** + +- Added **alignYourSteps** to **schedulder** widget in all `easy preSampling` and `easy fullkSampler` +- Added **Preview&Choose** to **image_output** widget in `easy kSampler` & `easy fullkSampler` +- Added `easy styleAlignedBatchAlign` - Credit of [style_aligned_comfy](https://github.com/brianfitzgerald/style_aligned_comfy) +- Added `easy ckptNames` +- Added `easy controlnetNames` +- Added `easy imagesSplitimage` - Batch images split into single images +- Added `easy imageCount` - Get Image Count +- Added `easy textSwitch` - Text Switch + +**v1.1.5** + +- Rewrite `easy cleanGPUUsed` - the memory usage of the comfyUI can to be cleared +- Added `easy humanSegmentation` - Human Part Segmentation +- Added `easy imageColorMatch` +- Added `easy ipadapterApplyRegional` +- Added `easy ipadapterApplyFromParams` +- Added `easy imageInterrogator` - Image To Prompt +- Added `easy stableDiffusion3API` - Easy Stable Diffusion 3 Multiple accounts API Node + +**v1.1.4** + +- Added `easy preSamplingCustom` - Custom-PreSampling, can be supported cosXL-edit +- Added `easy ipadapterStyleComposition` +- Added the right-click menu to view checkpoints and lora information in all Loaders +- Fixed `easy preSamplingNoiseIn`、`easy latentNoisy`、`east Unsampler` compatible with ComfyUI Revision>=2098 [0542088e] or later + + +**v1.1.3** + +- `easy ipadapterApply` Added **COMPOSITION** preset +- Supported [ResAdapter](https://huggingface.co/jiaxiangc/res-adapter) when load ResAdapter lora +- Added `easy promptLine` +- Added `easy promptReplace` +- Added `easy promptConcat` +- `easy wildcards` Added **multiline_mode** + +**v1.1.2** + +- Optimized some of the recommended nodes for slots related to EasyUse +- Added **Enable ContextMenu Auto Nest Subdirectories** The setting item is enabled by default, and it can be classified into subdirectories, checkpoints and loras previews +- Added `easy sv3dLoader` +- Added `easy dynamiCrafterLoader` +- Added `easy ipadapterApply` +- Added `easy ipadapterApplyADV` +- Added `easy ipadapterApplyEncoder` +- Added `easy ipadapterApplyEmbeds` +- Added `easy preMaskDetailerFix` +- Fixed `easy stylesSelector` is change the prompt when not select the style +- Fixed `easy pipeEdit` error when add lora to prompt +- Fixed layerDiffuse xyplot bug +- `easy kSamplerInpainting` add *additional* widget,you can choose 'Differential Diffusion' or 'Only InpaintModelConditioning' + +**v1.1.1** + +- The issue that the seed is 0 when a node with a seed control is added and **control before generate** is fixed for the first time run queue prompt. +- `easy preSamplingAdvanced` Added **return_with_leftover_noise** +- Fixed `easy stylesSelector` error when choose the custom file +- `easy preSamplingLayerDiffusion` Added optional input parameter for mask +- Renamed all nodes widget name named seed_num to seed +- Remove forced **control_before_generate** settings。 If you want to use control_before_generate, change widget_value_control_mode to before in system settings +- Added `easy imageRemBg` - The default is BriaAI's RMBG-1.4 model, which removes the background effect more and faster + +
+v1.1.0 + +- Added `easy imageSplitList` - to split every N images +- Added `easy preSamplingDiffusionADDTL` - It can modify foreground、background or blended additional prompt +- Added `easy preSamplingNoiseIn` It can replace the `easy latentNoisy` node that needs to be fronted to achieve better noise injection +- `easy pipeEdit` Added conditioning splicing mode selection, you can choose to replace, concat, combine, average, and set timestep range +- Added `easy pipeEdit` - nodes that can edit pipes (including re-enterable prompts) +- Added `easy preSamplingLayerDiffusion` and `easy kSamplerLayerDiffusion` +- Added a convenient menu to right-click on nodes such as Loader, Presampler, Sampler, Controlnet, etc. to quickly replace nodes of the same type +- Added `easy instantIDApplyADV` can link positive and negative +- Fixed layerDiffusion error when batch size greater than 1 +- Fixed `easy wildcards` When LoRa is not filled in completely, LoRa is not automatically retrieved, resulting in failure to load LoRa +- Fixed the issue that 'BREAK' non-initiation when didn't use a1111 prompt style +- Fixed `easy instantIDApply` mask not input right +
+ +
+v1.0.9 + +- Fixed the error when ComfyUI-Impack-Pack and ComfyUI_InstantID were not installed +- Fixed `easy pipeIn` +- Added `easy instantIDApply` - you need installed [ComfyUI_InstantID](https://github.com/cubiq/ComfyUI_InstantID) fisrt, Workflow[Example](https://github.com/yolain/ComfyUI-Easy-Use/blob/main/README.en.md#InstantID) +- Fixed `easy detailerFix` not added to the list of nodes available for saving images formatting extensions +- Fixed `easy XYInputs: PromptSR` errors are reported when replacing negative prompts +
+ +
+v1.0.8 + +- `easy cascadeLoader` stage_c and stage_b support the checkpoint model (Download [checkpoints](https://huggingface.co/stabilityai/stable-cascade/tree/main/comfyui_checkpoints) models) +- `easy styleSelector` The search box is modified to be case-insensitive +- `easy fullLoader` **positive**、**negative**、**latent** added to the output items +- Fixed the issue that 'easy preSampling' and other similar node, latent could not be generated based on the batch index after passing in +- Fixed `easy svdLoader` error when the positive or negative is empty +- Fixed the error of SDXLClipModel in ComfyUI revision 2016[c2cb8e88] and above (the revision number was judged to be compatible with the old revision) +- Fixed `easy detailerFix` generation error when batch size is greater than 1 +- Optimize the code, reduce a lot of redundant code and improve the running speed +
+ +
+v1.0.7 + +- Added `easy cascadeLoader` - stable cascade Loader +- Added `easy preSamplingCascade` - stable cascade preSampling Settings +- Added `easy fullCascadeKSampler` - stable cascade stage-c ksampler full +- Added `easy cascadeKSampler` - stable cascade stage-c ksampler simple +- +- Optimize the image to image[Example](https://github.com/yolain/ComfyUI-Easy-Use/blob/main/README.en.md#image-to-image) +
+ +
+v1.0.6 + +- Added `easy XYInputs: Checkpoint` +- Added `easy XYInputs: Lora` +- `easy seed` can manually switch the random seed when increasing the fixed seed value +- Fixed `easy fullLoader` and all loaders to automatically adjust the node size when switching LoRa +- Removed the original ttn image saving logic and adapted to the default image saving format extension of ComfyUI +
+ +
+v1.0.5 + +- Added `easy isSDXL` +- Added prompt word control on `easy svdLoader`, which can be used with open_clip model +- Added **populated_text** on `easy wildcards`, wildcard populated text can be output +
+ +
+v1.0.4 + +- `easy showAnything` added support for converting other types (e.g., tensor conditions, images, etc.) +- Added `easy showLoaderSettingsNames` can display the model and VAE name in the output loader assembly +- Added `easy promptList` +- Added `easy fooocusInpaintLoader` (only the process of SDXLModel is supported) +- Added **Logic** nodes +- Added `easy imageSave` - Image saving node with date conversion and aspect and height formatting +- Added `easy joinImageBatch` +- `easy kSamplerInpainting` Added the **patch** input value to be used with the FooocusInpaintLoader node + +- Fixed xyplot error when with Pillow>9.5 +- Fixed `easy wildcards` An error is reported when running with the PS extension +- Fixed `easy XYInputs: ControlNet` Error +- Fixed `easy loraStack` error when **toggle** is disabled + + +- Changing the first-time install node package no longer automatically replaces the theme, you need to manually adjust and refresh the page +- `easy imageSave` added **only_preivew** +- Adjust the `easy latentCompositeMaskedWithCond` node +
+ +
+v1.0.3 + +- Added `easy stylesSelector` +- Added **scale_soft_weights** in `easy controlnetLoader` and `easy controlnetLoaderADV` +- Added the queue progress bar setting item, which is not enabled by default + + +- Fixed `easy XYInputs: Sampler/Scheduler` Error +- Fixed the right menu has a problem when clicking the button +- Fixed `easy comfyLoader` error +- Fixed xyPlot error when connecting to zero123 +- Fixed the error message in the loader when the prompt word was component +- Fixed `easy getNode` and `easy setNode` the title does not change when loading +- Fixed all samplers using subdirectories to store images + + +- Adjust the UI theme, divided into two sets of styles: the official default background and the dark black background, which can be switched in the color palette in the settings +- Modify the styles path to be compatible with other environments +
+ +
+v1.0.2 + +- Added `easy XYPlotAdvanced` and some nodes about `easy XYInputs` +- Added **Alt+1-Alt+9** Shortcut keys to quickly paste node presets for Node templates (corresponding to 1~9 sequences) +- Added a `📜Groups Map(EasyUse)` to the context menu. +- An `autocomplete` folder has been added, If you have [ComfyUI-Custom-Scripts](https://github.com/pythongosssss/ComfyUI-Custom-Scripts) installed, the txt files in that folder will be merged and overwritten to the autocomplete .txt file of the pyssss package at startup. + + +- Fixed XYPlot is not working when `a1111_prompt_style` is True +- Fixed UI loading failure in the new version of ComfyUI +- `easy XYInputs ModelMergeBlocks` Values can be imported from CSV files +- Fixed `easy pipeToBasicPipe` Bug + + +- Removed `easy imageRemBg` +- Remove the introductory diagram and workflow files from the package to reduce the package size +- Replaced the font file used in the generation of XY diagrams +
+ +
+v1.0.1 + +- Fixed `easy comfyLoader` error +- Fixed All nodes that contain the value of the image size +- Added `easy kSamplerInpainting` +- Added `easy pipeToBasicPipe` +- Fixed `width` and `height` can not customize in `easy svdLoader` +- Fixed all preview image path (Previously, it was not possible to preview the image on the Mac system) +- Fixed `vae_name` is not working in `easy fullLoader` and `easy a1111Loader` and `easy comfyLoader` +- Fixed `easy fullkSampler` outputs error +- Fixed `model_override` is not working in `easy fullLoader` +- Fixed `easy hiresFix` error +- Fixed `easy xyplot` font file path error +- Fixed seed that cannot be fixed when you convert `seed_num` to `easy seed` +- Fixed `easy pipeIn` inputs bug +- `easy preDetailerFix` have added a new parameter `optional_image` +- Fixed `easy zero123Loader` and `easy svdLoader` model into cache. +- Added `easy seed` +- Fixed `image_output` default value is "Preview" +- `easy fullLoader` and `easy a1111Loader` have added a new parameter `a1111_prompt_style`,that can reproduce the same image generated from stable-diffusion-webui on comfyui, but you need to install [ComfyUI_smZNodes](https://github.com/shiimizu/ComfyUI_smZNodes) to use this feature in the current version +
+ +
+v1.0.0 + +- Added `easy positive` - simple positive prompt text +- Added `easy negative` - simple negative prompt text +- Added `easy wildcards` - support for wildcards and hint text selected by Lora +- Added `easy portraitMaster` - PortraitMaster v2.2 +- Added `easy loraStack` - Lora stack +- Added `easy fullLoader` - full version of the loader +- Added `easy zero123Loader` - simple zero123 loader +- Added `easy svdLoader` - easy svd loader +- Added `easy fullkSampler` - full version of the sampler (no separation) +- Added `easy hiresFix` - support for HD repair of Pipe +- Added `easy predetailerFix` and `easy DetailerFix` - support for Pipe detail fixing +- Added `easy ultralyticsDetectorPipe` and `easy samLoaderPipe` - Detect loader (detail fixed input) +- Added `easy pipein` `easy pipeout` - Pipe input and output +- Added `easy xyPlot` - simple xyplot (more controllable parameters will be updated in the future) +- Added `easy imageRemoveBG` - image to remove background +- Added `easy imagePixelPerfect` - image pixel perfect +- Added `easy poseEditor` - Pose editor +- New UI Theme (Obsidian) - Auto-load UI by default, which can also be changed in the settings + +- Fixed `easy globalSeed` is not working +- Fixed an issue where all `seed_num` values were out of order due to [cg-use-everywhere](https://github.com/chrisgoringe/cg-use-everywhere) updating the chart in real time +- Fixed `easy imageSize`, `easy imageSizeBySide`, `easy imageSizeByLongerSide` as end nodes +- Fixed the bug that `seed_num` (random seed value) could not be read consistently in history +
+ +
+Updated at 12/14/2023 + +- `easy a1111Loader` and `easy comfyLoader` added `batch_size` of required input parameters +- Added the `easy controlnetLoaderADV` node +- `easy controlnetLoaderADV` and `easy controlnetLoader` added `control_net ` of optional input parameters +- `easy preSampling` and `easy preSamplingAdvanced` added `image_to_latent` optional input parameters +- Added the `easy imageSizeBySide` node, which can be output as a long side or a short side +
+ +
+Updated at 12/13/2023 + +- Added the `easy LLLiteLoader` node, if you have pre-installed the kohya-ss/ControlNet-LLLite-ComfyUI package, please move the model files in the models to `ComfyUI\models\controlnet\` (i.e. in the default controlnet path of comfy, please do not change the file name of the model, otherwise it will not be read). +- Modify `easy controlnetLoader` to the bottom of the loader category. +- Added size display for `easy imageSize` and `easy imageSizeByLongerSize` outputs. +
+ +
+Updated at 12/11/2023 +- Added the `showSpentTime` node to display the time spent on image diffusion and the time spent on VAE decoding images +
+ +## The relevant node package involved + +Disclaimer: Opened source was not easy. I have a lot of respect for the contributions of these original authors. I just did some integration and optimization. + +| Nodes Name(Search Name) | Related libraries | Library-related node | +|:-------------------------------|:----------------------------------------------------------------------------|:-------------------------| +| easy setNode | [ComfyUI-extensions](https://github.com/diffus3/ComfyUI-extensions) | diffus3.SetNode | +| easy getNode | [ComfyUI-extensions](https://github.com/diffus3/ComfyUI-extensions) | diffus3.GetNode | +| easy bookmark | [rgthree-comfy](https://github.com/rgthree/rgthree-comfy) | Bookmark 🔖 | +| easy portraitMarker | [comfyui-portrait-master](https://github.com/florestefano1975/comfyui-portrait-master) | Portrait Master | +| easy LLLiteLoader | [ControlNet-LLLite-ComfyUI](https://github.com/kohya-ss/ControlNet-LLLite-ComfyUI) | LLLiteLoader | +| easy globalSeed | [ComfyUI-Inspire-Pack](https://github.com/ltdrdata/ComfyUI-Inspire-Pack) | Global Seed (Inspire) | +| easy preSamplingDynamicCFG | [sd-dynamic-thresholding](https://github.com/mcmonkeyprojects/sd-dynamic-thresholding) | DynamicThresholdingFull | +| dynamicThresholdingFull | [sd-dynamic-thresholding](https://github.com/mcmonkeyprojects/sd-dynamic-thresholding) | DynamicThresholdingFull | +| easy imageInsetCrop | [rgthree-comfy](https://github.com/rgthree/rgthree-comfy) | ImageInsetCrop | +| easy poseEditor | [ComfyUI_Custom_Nodes_AlekPet](https://github.com/AlekPet/ComfyUI_Custom_Nodes_AlekPet) | poseNode | +| easy preSamplingLayerDiffusion | [ComfyUI-layerdiffusion](https://github.com/huchenlei/ComfyUI-layerdiffusion) | LayeredDiffusionApply... | +| easy dynamiCrafterLoader | [ComfyUI-layerdiffusion](https://github.com/ExponentialML/ComfyUI_Native_DynamiCrafter) | Apply Dynamicrafter | +| easy imageChooser | [cg-image-picker](https://github.com/chrisgoringe/cg-image-picker) | Preview Chooser | +| easy styleAlignedBatchAlign | [style_aligned_comfy](https://github.com/chrisgoringe/cg-image-picker) | styleAlignedBatchAlign | +| easy kolorsLoader | [ComfyUI-Kolors-MZ](https://github.com/MinusZoneAI/ComfyUI-Kolors-MZ) | kolorsLoader | + + +## Credits + +[ComfyUI](https://github.com/comfyanonymous/ComfyUI) - Powerful and modular Stable Diffusion GUI + +[ComfyUI-ComfyUI-Manager](https://github.com/ltdrdata/ComfyUI-Manager) - ComfyUI Manager + +[tinyterraNodes](https://github.com/TinyTerra/ComfyUI_tinyterraNodes) - Pipe nodes (node bundles) allow users to reduce unnecessary connections + +[ComfyUI-extensions](https://github.com/diffus3/ComfyUI-extensions) - Diffus3 gets and sets points that allow the user to detach the composition of the workflow + +[ComfyUI-Impact-Pack](https://github.com/ltdrdata/ComfyUI-Impact-Pack) - General modpack 1 + +[ComfyUI-Inspire-Pack](https://github.com/ltdrdata/ComfyUI-Inspire-Pack) - General Modpack 2 + +[ComfyUI-ResAdapter](https://github.com/jiaxiangc/ComfyUI-ResAdapter) - Make model generation independent of training resolution + +[ComfyUI_IPAdapter_plus](https://github.com/cubiq/ComfyUI_IPAdapter_plus) - Style migration + +[ComfyUI_InstantID](https://github.com/cubiq/ComfyUI_InstantID) - Face migration + +[ComfyUI_PuLID](https://github.com/cubiq/PuLID_ComfyUI) - Face migration + +[ComfyUI-Custom-Scripts](https://github.com/pythongosssss/ComfyUI-Custom-Scripts) - pyssss🐍 + +[cg-image-picker](https://github.com/chrisgoringe/cg-image-picker) - Image Preview Chooser + +[ComfyUI_ExtraModels](https://github.com/city96/ComfyUI_ExtraModels) - DiT custom nodes + + +## 🌟Stargazers + +My gratitude extends to the generous souls who bestow a star. Your support is much appreciated! + +[![Stargazers repo roster for @yolain/ComfyUI-Easy-Use](https://reporoster.com/stars/yolain/ComfyUI-Easy-Use)](https://github.com/yolain/ComfyUI-Easy-Use/stargazers) diff --git a/ComfyUI-Easy-Use/README.md b/ComfyUI-Easy-Use/README.md new file mode 100644 index 0000000000000000000000000000000000000000..72f9fe0e1c90af7de1f69aa833ea01be2c652f09 --- /dev/null +++ b/ComfyUI-Easy-Use/README.md @@ -0,0 +1,448 @@ +![comfyui-easy-use](https://github.com/user-attachments/assets/9b7a5e44-f5e2-4c27-aed2-d0e6b50c46bb) + +
+Video Tutorial | +Docs (Cooming Soon) | +Workflow Collection | +Donation +

+ + +
+ +**ComfyUI-Easy-Use** is an efficiency custom nodes integration package, which is extended on the basis of [TinyTerraNodes](https://github.com/TinyTerra/ComfyUI_tinyterraNodes). It has been integrated and optimized for many popular awesome custom nodes to achieve the purpose of faster and more convenient use of ComfyUI. While ensuring the degree of freedom, it restores the ultimate smooth image production experience that belongs to Stable Diffusion. + +## 👨🏻‍🎨 Introduce + +- Inspire by [tinyterraNodes](https://github.com/TinyTerra/ComfyUI_tinyterraNodes), which greatly reduces the time cost of tossing workflows。 +- UI interface beautification, the first time you install the user, if you need to use the UI theme, please switch the theme in Settings -> Color Palette and refresh page. +- Added a node for pre-sampling parameter configuration, which can be separated from the sampling node for easier previewing +- Wildcards and lora's are supported, for Lora Block Weight usage, ensure that the custom node package has the [ComfyUI-Inspire-Pack](https://github.com/ltdrdata/ComfyUI-Inspire-Pack) +- Multi-selectable styled cue word selector, default is Fooocus style json, custom json can be placed under styles, samples folder can be placed in the preview image (name and name consistent, image file name such as spaces need to be converted to underscores '_') +- The loader enables the A1111 prompt mode, which reproduces nearly identical images to those generated by webui, and needs to be installed [ComfyUI_smZNodes](https://github.com/shiimizu/ComfyUI_smZNodes) first. +- Noise injection into the latent space can be achieved using the `easy latentNoisy` or `easy preSamplingNoiseIn` node +- Simplified processes for SD1.x, SD2.x, SDXL, SVD, Zero123, etc. [Example](https://github.com/yolain/ComfyUI-Easy-Use?tab=readme-ov-file#StableDiffusion) +- Simplified Stable Cascade [Example](https://github.com/yolain/ComfyUI-Easy-Use?tab=readme-ov-file#StableCascade) +- Simplified Layer Diffuse [Example](https://github.com/yolain/ComfyUI-Easy-Use?tab=readme-ov-file#LayerDiffusion),The first time you use it you may need to run `pip install -r requirements.txt` to install the required dependencies. +- Simplified InstantID [Example](https://github.com/yolain/ComfyUI-Easy-Use?tab=readme-ov-file#InstantID), You need to make sure that the custom node package has the [ComfyUI_InstantID](https://github.com/cubiq/ComfyUI_InstantID) +- Extending the usability of XYplot +- Fooocus Inpaint integration +- Integration of common logical calculations, conversion of types, display of all types, etc. +- Background removal nodes for the RMBG-1.4 model supporting BriaAI, [BriaAI Guide](https://huggingface.co/briaai/RMBG-1.4) +- Forcibly cleared the memory usage of the comfy UI model are supported +- Stable Diffusion 3 multi-account API nodes are supported +- Support SD3's model +- Support Kolors‘s model +- Support Flux's model + +## 👨🏻‍🔧 Installation +Clone the repo into the **custom_nodes** directory and install the requirements: +```shell +#1. Clone the repo +git clone https://github.com/yolain/ComfyUI-Easy-Use +#2. Install the requirements +Double-click install.bat to install the required dependencies +``` + +## 👨🏻‍🚀 Plan + +- [x] Updated new front-end code for easier maintenance + - [x] Maintain css styles using sass + - [x] Optimize existing extensions + - [x] Add new components +- [ ] Upload new workflows to [ComfyUI-Yolain-Workflows](https://github.com/yolain/ComfyUI-Yolain-Workflows) and translate readme to english version. +- [ ] Write gitbook with more detailed function introdution + +## 📜 Changelog + +**v1.2.2** + +- Added v2 web frond-end code +- Added `easy fluxLoader` +- Added support for `controlnetApply` Related nodes with SD3 and hunyuanDiT + +**v1.2.1** + +- Added `easy ipadapterApplyFaceIDKolors` +- Added **inspyrenet** to `easy imageRemBg` +- Added `easy controlnetLoader++` +- Added **PLUS (kolors genernal)** and **FACEID PLUS KOLORS** preset to `easy ipadapterApply` and `easy ipadapterApplyADV` (Supported kolors ipadapter) +- Added `easy kolorsLoader` - Code based on [MinusZoneAI](https://github.com/MinusZoneAI/ComfyUI-Kolors-MZ)'s and [kijai](https://github.com/kijai/ComfyUI-KwaiKolorsWrapper)'s repo, thanks for their contribution. + +**v1.2.0** + +- Added `easy pulIDApply` and `easy pulIDApplyADV` +- Added `easy huanyuanDiTLoader` and `easy pixArtLoader` +- Added **easy sliderControl** - Slider control node, which can currently be used to control the parameters of ipadapterMS (double-click the slider to reset to default) +- Added **layer_weights** in `easy ipadapterApplyADV` + +**v1.1.9** + +- Added **gitsScheduler** +- Added `easy imageBatchToImageList` and `easy imageListToImageBatch` +- Recursive subcategories nested for models +- Support for Stable Diffusion 3 model +- Added `easy applyInpaint` - All inpainting mode in this node + +**v1.1.8** + +- Added `easy controlnetStack` +- Added `easy applyBrushNet` - [Workflow Example](https://github.com/yolain/ComfyUI-Yolain-Workflows/blob/main/workflows/2_advanced/2-4inpainting/2-4brushnet_1.1.8.json) +- Added `easy applyPowerPaint` - [Workflow Example](https://github.com/yolain/ComfyUI-Yolain-Workflows/blob/main/workflows/2_advanced/2-4inpainting/2-4powerpaint_outpaint_1.1.8.json) + +**v1.1.7** + +- Added `easy prompt` - Subject and light presets, maybe adjusted later +- Added `easy icLightApply` - Light and shadow migration, Code based on [ComfyUI-IC-Light](https://github.com/huchenlei/ComfyUI-IC-Light) +- Added `easy imageSplitGrid` +- `easy kSamplerInpainting` added options such as different diffusion and brushnet in **additional** widget +- Support for brushnet model loading - [ComfyUI-BrushNet](https://github.com/nullquant/ComfyUI-BrushNet) +- Added `easy applyFooocusInpaint` - Replace FooocusInpaintLoader +- Removed `easy fooocusInpaintLoader` + +**v1.1.6** + +- Added **alignYourSteps** to **schedulder** widget in all `easy preSampling` and `easy fullkSampler` +- Added **Preview&Choose** to **image_output** widget in `easy kSampler` & `easy fullkSampler` +- Added `easy styleAlignedBatchAlign` - Credit of [style_aligned_comfy](https://github.com/brianfitzgerald/style_aligned_comfy) +- Added `easy ckptNames` +- Added `easy controlnetNames` +- Added `easy imagesSplitimage` - Batch images split into single images +- Added `easy imageCount` - Get Image Count +- Added `easy textSwitch` - Text Switch + +**v1.1.5** + +- Rewrite `easy cleanGPUUsed` - the memory usage of the comfyUI can to be cleared +- Added `easy humanSegmentation` - Human Part Segmentation +- Added `easy imageColorMatch` +- Added `easy ipadapterApplyRegional` +- Added `easy ipadapterApplyFromParams` +- Added `easy imageInterrogator` - Image To Prompt +- Added `easy stableDiffusion3API` - Easy Stable Diffusion 3 Multiple accounts API Node + +**v1.1.4** + +- Added `easy preSamplingCustom` - Custom-PreSampling, can be supported cosXL-edit +- Added `easy ipadapterStyleComposition` +- Added the right-click menu to view checkpoints and lora information in all Loaders +- Fixed `easy preSamplingNoiseIn`、`easy latentNoisy`、`east Unsampler` compatible with ComfyUI Revision>=2098 [0542088e] or later + + +**v1.1.3** + +- `easy ipadapterApply` Added **COMPOSITION** preset +- Supported [ResAdapter](https://huggingface.co/jiaxiangc/res-adapter) when load ResAdapter lora +- Added `easy promptLine` +- Added `easy promptReplace` +- Added `easy promptConcat` +- `easy wildcards` Added **multiline_mode** + +
+v1.1.2 + +- Optimized some of the recommended nodes for slots related to EasyUse +- Added **Enable ContextMenu Auto Nest Subdirectories** The setting item is enabled by default, and it can be classified into subdirectories, checkpoints and loras previews +- Added `easy sv3dLoader` +- Added `easy dynamiCrafterLoader` +- Added `easy ipadapterApply` +- Added `easy ipadapterApplyADV` +- Added `easy ipadapterApplyEncoder` +- Added `easy ipadapterApplyEmbeds` +- Added `easy preMaskDetailerFix` +- Fixed `easy stylesSelector` is change the prompt when not select the style +- Fixed `easy pipeEdit` error when add lora to prompt +- Fixed layerDiffuse xyplot bug +- `easy kSamplerInpainting` add *additional* widget,you can choose 'Differential Diffusion' or 'Only InpaintModelConditioning' +
+ +
+v1.1.1 + +- The issue that the seed is 0 when a node with a seed control is added and **control before generate** is fixed for the first time run queue prompt. +- `easy preSamplingAdvanced` Added **return_with_leftover_noise** +- Fixed `easy stylesSelector` error when choose the custom file +- `easy preSamplingLayerDiffusion` Added optional input parameter for mask +- Renamed all nodes widget name named seed_num to seed +- Remove forced **control_before_generate** settings。 If you want to use control_before_generate, change widget_value_control_mode to before in system settings +- Added `easy imageRemBg` - The default is BriaAI's RMBG-1.4 model, which removes the background effect more and faster +
+ +
+v1.1.0 + +- Added `easy imageSplitList` - to split every N images +- Added `easy preSamplingDiffusionADDTL` - It can modify foreground、background or blended additional prompt +- Added `easy preSamplingNoiseIn` It can replace the `easy latentNoisy` node that needs to be fronted to achieve better noise injection +- `easy pipeEdit` Added conditioning splicing mode selection, you can choose to replace, concat, combine, average, and set timestep range +- Added `easy pipeEdit` - nodes that can edit pipes (including re-enterable prompts) +- Added `easy preSamplingLayerDiffusion` and `easy kSamplerLayerDiffusion` +- Added a convenient menu to right-click on nodes such as Loader, Presampler, Sampler, Controlnet, etc. to quickly replace nodes of the same type +- Added `easy instantIDApplyADV` can link positive and negative +- Fixed layerDiffusion error when batch size greater than 1 +- Fixed `easy wildcards` When LoRa is not filled in completely, LoRa is not automatically retrieved, resulting in failure to load LoRa +- Fixed the issue that 'BREAK' non-initiation when didn't use a1111 prompt style +- Fixed `easy instantIDApply` mask not input right +
+ +
+v1.0.9 + +- Fixed the error when ComfyUI-Impack-Pack and ComfyUI_InstantID were not installed +- Fixed `easy pipeIn` +- Added `easy instantIDApply` - you need installed [ComfyUI_InstantID](https://github.com/cubiq/ComfyUI_InstantID) fisrt, Workflow[Example](https://github.com/yolain/ComfyUI-Easy-Use/blob/main/README.en.md#InstantID) +- Fixed `easy detailerFix` not added to the list of nodes available for saving images formatting extensions +- Fixed `easy XYInputs: PromptSR` errors are reported when replacing negative prompts +
+ +
+v1.0.8 + +- `easy cascadeLoader` stage_c and stage_b support the checkpoint model (Download [checkpoints](https://huggingface.co/stabilityai/stable-cascade/tree/main/comfyui_checkpoints) models) +- `easy styleSelector` The search box is modified to be case-insensitive +- `easy fullLoader` **positive**、**negative**、**latent** added to the output items +- Fixed the issue that 'easy preSampling' and other similar node, latent could not be generated based on the batch index after passing in +- Fixed `easy svdLoader` error when the positive or negative is empty +- Fixed the error of SDXLClipModel in ComfyUI revision 2016[c2cb8e88] and above (the revision number was judged to be compatible with the old revision) +- Fixed `easy detailerFix` generation error when batch size is greater than 1 +- Optimize the code, reduce a lot of redundant code and improve the running speed +
+ +
+v1.0.7 + +- Added `easy cascadeLoader` - stable cascade Loader +- Added `easy preSamplingCascade` - stable cascade preSampling Settings +- Added `easy fullCascadeKSampler` - stable cascade stage-c ksampler full +- Added `easy cascadeKSampler` - stable cascade stage-c ksampler simple +- +- Optimize the image to image[Example](https://github.com/yolain/ComfyUI-Easy-Use/blob/main/README.en.md#image-to-image) +
+ +
+v1.0.6 + +- Added `easy XYInputs: Checkpoint` +- Added `easy XYInputs: Lora` +- `easy seed` can manually switch the random seed when increasing the fixed seed value +- Fixed `easy fullLoader` and all loaders to automatically adjust the node size when switching LoRa +- Removed the original ttn image saving logic and adapted to the default image saving format extension of ComfyUI +
+ +
+v1.0.5 + +- Added `easy isSDXL` +- Added prompt word control on `easy svdLoader`, which can be used with open_clip model +- Added **populated_text** on `easy wildcards`, wildcard populated text can be output +
+ +
+v1.0.4 + +- `easy showAnything` added support for converting other types (e.g., tensor conditions, images, etc.) +- Added `easy showLoaderSettingsNames` can display the model and VAE name in the output loader assembly +- Added `easy promptList` +- Added `easy fooocusInpaintLoader` (only the process of SDXLModel is supported) +- Added **Logic** nodes +- Added `easy imageSave` - Image saving node with date conversion and aspect and height formatting +- Added `easy joinImageBatch` +- `easy kSamplerInpainting` Added the **patch** input value to be used with the FooocusInpaintLoader node + +- Fixed xyplot error when with Pillow>9.5 +- Fixed `easy wildcards` An error is reported when running with the PS extension +- Fixed `easy XYInputs: ControlNet` Error +- Fixed `easy loraStack` error when **toggle** is disabled + + +- Changing the first-time install node package no longer automatically replaces the theme, you need to manually adjust and refresh the page +- `easy imageSave` added **only_preivew** +- Adjust the `easy latentCompositeMaskedWithCond` node +
+ +
+v1.0.3 + +- Added `easy stylesSelector` +- Added **scale_soft_weights** in `easy controlnetLoader` and `easy controlnetLoaderADV` +- Added the queue progress bar setting item, which is not enabled by default + + +- Fixed `easy XYInputs: Sampler/Scheduler` Error +- Fixed the right menu has a problem when clicking the button +- Fixed `easy comfyLoader` error +- Fixed xyPlot error when connecting to zero123 +- Fixed the error message in the loader when the prompt word was component +- Fixed `easy getNode` and `easy setNode` the title does not change when loading +- Fixed all samplers using subdirectories to store images + + +- Adjust the UI theme, divided into two sets of styles: the official default background and the dark black background, which can be switched in the color palette in the settings +- Modify the styles path to be compatible with other environments +
+ +
+v1.0.2 + +- Added `easy XYPlotAdvanced` and some nodes about `easy XYInputs` +- Added **Alt+1-Alt+9** Shortcut keys to quickly paste node presets for Node templates (corresponding to 1~9 sequences) +- Added a `📜Groups Map(EasyUse)` to the context menu. +- An `autocomplete` folder has been added, If you have [ComfyUI-Custom-Scripts](https://github.com/pythongosssss/ComfyUI-Custom-Scripts) installed, the txt files in that folder will be merged and overwritten to the autocomplete .txt file of the pyssss package at startup. + + +- Fixed XYPlot is not working when `a1111_prompt_style` is True +- Fixed UI loading failure in the new version of ComfyUI +- `easy XYInputs ModelMergeBlocks` Values can be imported from CSV files +- Fixed `easy pipeToBasicPipe` Bug + + +- Removed `easy imageRemBg` +- Remove the introductory diagram and workflow files from the package to reduce the package size +- Replaced the font file used in the generation of XY diagrams +
+ +
+v1.0.1 + +- Fixed `easy comfyLoader` error +- Fixed All nodes that contain the value of the image size +- Added `easy kSamplerInpainting` +- Added `easy pipeToBasicPipe` +- Fixed `width` and `height` can not customize in `easy svdLoader` +- Fixed all preview image path (Previously, it was not possible to preview the image on the Mac system) +- Fixed `vae_name` is not working in `easy fullLoader` and `easy a1111Loader` and `easy comfyLoader` +- Fixed `easy fullkSampler` outputs error +- Fixed `model_override` is not working in `easy fullLoader` +- Fixed `easy hiresFix` error +- Fixed `easy xyplot` font file path error +- Fixed seed that cannot be fixed when you convert `seed_num` to `easy seed` +- Fixed `easy pipeIn` inputs bug +- `easy preDetailerFix` have added a new parameter `optional_image` +- Fixed `easy zero123Loader` and `easy svdLoader` model into cache. +- Added `easy seed` +- Fixed `image_output` default value is "Preview" +- `easy fullLoader` and `easy a1111Loader` have added a new parameter `a1111_prompt_style`,that can reproduce the same image generated from stable-diffusion-webui on comfyui, but you need to install [ComfyUI_smZNodes](https://github.com/shiimizu/ComfyUI_smZNodes) to use this feature in the current version +
+ +
+v1.0.0 + +- Added `easy positive` - simple positive prompt text +- Added `easy negative` - simple negative prompt text +- Added `easy wildcards` - support for wildcards and hint text selected by Lora +- Added `easy portraitMaster` - PortraitMaster v2.2 +- Added `easy loraStack` - Lora stack +- Added `easy fullLoader` - full version of the loader +- Added `easy zero123Loader` - simple zero123 loader +- Added `easy svdLoader` - easy svd loader +- Added `easy fullkSampler` - full version of the sampler (no separation) +- Added `easy hiresFix` - support for HD repair of Pipe +- Added `easy predetailerFix` and `easy DetailerFix` - support for Pipe detail fixing +- Added `easy ultralyticsDetectorPipe` and `easy samLoaderPipe` - Detect loader (detail fixed input) +- Added `easy pipein` `easy pipeout` - Pipe input and output +- Added `easy xyPlot` - simple xyplot (more controllable parameters will be updated in the future) +- Added `easy imageRemoveBG` - image to remove background +- Added `easy imagePixelPerfect` - image pixel perfect +- Added `easy poseEditor` - Pose editor +- New UI Theme (Obsidian) - Auto-load UI by default, which can also be changed in the settings + +- Fixed `easy globalSeed` is not working +- Fixed an issue where all `seed_num` values were out of order due to [cg-use-everywhere](https://github.com/chrisgoringe/cg-use-everywhere) updating the chart in real time +- Fixed `easy imageSize`, `easy imageSizeBySide`, `easy imageSizeByLongerSide` as end nodes +- Fixed the bug that `seed_num` (random seed value) could not be read consistently in history +
+ +
+Updated at 12/14/2023 + +- `easy a1111Loader` and `easy comfyLoader` added `batch_size` of required input parameters +- Added the `easy controlnetLoaderADV` node +- `easy controlnetLoaderADV` and `easy controlnetLoader` added `control_net ` of optional input parameters +- `easy preSampling` and `easy preSamplingAdvanced` added `image_to_latent` optional input parameters +- Added the `easy imageSizeBySide` node, which can be output as a long side or a short side +
+ +
+Updated at 12/13/2023 + +- Added the `easy LLLiteLoader` node, if you have pre-installed the kohya-ss/ControlNet-LLLite-ComfyUI package, please move the model files in the models to `ComfyUI\models\controlnet\` (i.e. in the default controlnet path of comfy, please do not change the file name of the model, otherwise it will not be read). +- Modify `easy controlnetLoader` to the bottom of the loader category. +- Added size display for `easy imageSize` and `easy imageSizeByLongerSize` outputs. +
+ +
+Updated at 12/11/2023 +- Added the `showSpentTime` node to display the time spent on image diffusion and the time spent on VAE decoding images +
+ +## The relevant node package involved + +Disclaimer: Opened source was not easy. I have a lot of respect for the contributions of these original authors. I just did some integration and optimization. + +| Nodes Name(Search Name) | Related libraries | Library-related node | +|:-------------------------------|:----------------------------------------------------------------------------|:-------------------------| +| easy setNode | [ComfyUI-extensions](https://github.com/diffus3/ComfyUI-extensions) | diffus3.SetNode | +| easy getNode | [ComfyUI-extensions](https://github.com/diffus3/ComfyUI-extensions) | diffus3.GetNode | +| easy bookmark | [rgthree-comfy](https://github.com/rgthree/rgthree-comfy) | Bookmark 🔖 | +| easy portraitMarker | [comfyui-portrait-master](https://github.com/florestefano1975/comfyui-portrait-master) | Portrait Master | +| easy LLLiteLoader | [ControlNet-LLLite-ComfyUI](https://github.com/kohya-ss/ControlNet-LLLite-ComfyUI) | LLLiteLoader | +| easy globalSeed | [ComfyUI-Inspire-Pack](https://github.com/ltdrdata/ComfyUI-Inspire-Pack) | Global Seed (Inspire) | +| easy preSamplingDynamicCFG | [sd-dynamic-thresholding](https://github.com/mcmonkeyprojects/sd-dynamic-thresholding) | DynamicThresholdingFull | +| dynamicThresholdingFull | [sd-dynamic-thresholding](https://github.com/mcmonkeyprojects/sd-dynamic-thresholding) | DynamicThresholdingFull | +| easy imageInsetCrop | [rgthree-comfy](https://github.com/rgthree/rgthree-comfy) | ImageInsetCrop | +| easy poseEditor | [ComfyUI_Custom_Nodes_AlekPet](https://github.com/AlekPet/ComfyUI_Custom_Nodes_AlekPet) | poseNode | +| easy preSamplingLayerDiffusion | [ComfyUI-layerdiffusion](https://github.com/huchenlei/ComfyUI-layerdiffusion) | LayeredDiffusionApply... | +| easy dynamiCrafterLoader | [ComfyUI-layerdiffusion](https://github.com/ExponentialML/ComfyUI_Native_DynamiCrafter) | Apply Dynamicrafter | +| easy imageChooser | [cg-image-picker](https://github.com/chrisgoringe/cg-image-picker) | Preview Chooser | +| easy styleAlignedBatchAlign | [style_aligned_comfy](https://github.com/chrisgoringe/cg-image-picker) | styleAlignedBatchAlign | +| easy kolorsLoader | [ComfyUI-Kolors-MZ](https://github.com/MinusZoneAI/ComfyUI-Kolors-MZ) | kolorsLoader | + + +## Credits + +[ComfyUI](https://github.com/comfyanonymous/ComfyUI) - Powerful and modular Stable Diffusion GUI + +[ComfyUI-ComfyUI-Manager](https://github.com/ltdrdata/ComfyUI-Manager) - ComfyUI Manager + +[tinyterraNodes](https://github.com/TinyTerra/ComfyUI_tinyterraNodes) - Pipe nodes (node bundles) allow users to reduce unnecessary connections + +[ComfyUI-extensions](https://github.com/diffus3/ComfyUI-extensions) - Diffus3 gets and sets points that allow the user to detach the composition of the workflow + +[ComfyUI-Impact-Pack](https://github.com/ltdrdata/ComfyUI-Impact-Pack) - General modpack 1 + +[ComfyUI-Inspire-Pack](https://github.com/ltdrdata/ComfyUI-Inspire-Pack) - General Modpack 2 + +[ComfyUI-ResAdapter](https://github.com/jiaxiangc/ComfyUI-ResAdapter) - Make model generation independent of training resolution + +[ComfyUI_IPAdapter_plus](https://github.com/cubiq/ComfyUI_IPAdapter_plus) - Style migration + +[ComfyUI_InstantID](https://github.com/cubiq/ComfyUI_InstantID) - Face migration + +[ComfyUI_PuLID](https://github.com/cubiq/PuLID_ComfyUI) - Face migration + +[ComfyUI-Custom-Scripts](https://github.com/pythongosssss/ComfyUI-Custom-Scripts) - pyssss🐍 + +[cg-image-picker](https://github.com/chrisgoringe/cg-image-picker) - Image Preview Chooser + +[ComfyUI_ExtraModels](https://github.com/city96/ComfyUI_ExtraModels) - DiT custom nodes + +## ☕️ Donation + +**Comfyui-Easy-Use** is an GPL-licensed open source project. In order to achieve better and sustainable development of the project, i expect to gain more backers.
+If my custom nodes has added value to your day, consider indulging in a coffee to fuel it further!
+💖You can support me in any of the following ways: + +- [BiliBili](https://space.bilibili.com/1840885116) +- [Afdian](https://afdian.com/a/yolain) +- [Wechat / Alipay](https://github.com/user-attachments/assets/803469bd-ed6a-4fab-932d-50e5088a2d03) +- 🪙 Wallet Address: + - ETH: 0x01f7CEd3245CaB3891A0ec8f528178db352EaC74 + - USDT(tron): TP3AnJXkAzfebL2GKmFAvQvXgsxzivweV6 + +(This is a newly created wallet, and if it receives sponsorship, I'll use it to rent GPUs or other GPT services for better debugging and refinement of ComfyUI-Easy-Use features.) + +## 🌟Stargazers + +My gratitude extends to the generous souls who bestow a star. Your support is much appreciated! + +[![Stargazers repo roster for @yolain/ComfyUI-Easy-Use](https://reporoster.com/stars/yolain/ComfyUI-Easy-Use)](https://github.com/yolain/ComfyUI-Easy-Use/stargazers) \ No newline at end of file diff --git a/ComfyUI-Easy-Use/__init__.py b/ComfyUI-Easy-Use/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..99782cda5d831c6a5240ba16820ada74ddf9ef47 --- /dev/null +++ b/ComfyUI-Easy-Use/__init__.py @@ -0,0 +1,92 @@ +__version__ = "1.2.2" + +import yaml +import os +import folder_paths +import importlib +from pathlib import Path + +node_list = [ + "server", + "api", + "easyNodes", + "image", + "logic" +] + +NODE_CLASS_MAPPINGS = {} +NODE_DISPLAY_NAME_MAPPINGS = {} + +for module_name in node_list: + imported_module = importlib.import_module(".py.{}".format(module_name), __name__) + NODE_CLASS_MAPPINGS = {**NODE_CLASS_MAPPINGS, **imported_module.NODE_CLASS_MAPPINGS} + NODE_DISPLAY_NAME_MAPPINGS = {**NODE_DISPLAY_NAME_MAPPINGS, **imported_module.NODE_DISPLAY_NAME_MAPPINGS} + +cwd_path = os.path.dirname(os.path.realpath(__file__)) +comfy_path = folder_paths.base_path + +#Wildcards +from .py.libs.wildcards import read_wildcard_dict +wildcards_path = os.path.join(os.path.dirname(__file__), "wildcards") +if os.path.exists(wildcards_path): + read_wildcard_dict(wildcards_path) +else: + os.mkdir(wildcards_path) + +#Styles +styles_path = os.path.join(os.path.dirname(__file__), "styles") +samples_path = os.path.join(os.path.dirname(__file__), "styles", "samples") +if os.path.exists(styles_path): + if not os.path.exists(samples_path): + os.mkdir(samples_path) +else: + os.mkdir(styles_path) + os.mkdir(samples_path) + +# Model thumbnails +from .py.libs.add_resources import add_static_resource +from .py.libs.model import easyModelManager +model_config = easyModelManager().models_config +for model in model_config: + paths = folder_paths.get_folder_paths(model) + for path in paths: + if not Path(path).exists(): + continue + add_static_resource(path, path, limit=True) + +# get comfyui revision +from .py.libs.utils import compare_revision + +new_frontend_revision = 2546 +web_default_version = 'v2' if compare_revision(new_frontend_revision) else 'v1' +# web directory +config_path = os.path.join(cwd_path, "config.yaml") +if os.path.isfile(config_path): + with open(config_path, 'r') as f: + data = yaml.load(f, Loader=yaml.FullLoader) + if data and "WEB_VERSION" in data: + directory = f"web_version/{data['WEB_VERSION']}" + with open(config_path, 'w') as f: + yaml.dump(data, f) + elif web_default_version != 'v1': + if not data: + data = {'WEB_VERSION': web_default_version} + elif 'WEB_VERSION' not in data: + data = {**data, 'WEB_VERSION': web_default_version} + with open(config_path, 'w') as f: + yaml.dump(data, f) + directory = f"web_version/{web_default_version}" + else: + directory = f"web_version/v1" + if not os.path.exists(os.path.join(cwd_path, directory)): + print(f"web root {data['WEB_VERSION']} not found, using default") + directory = f"web_version/{web_default_version}" + WEB_DIRECTORY = directory +else: + directory = f"web_version/{web_default_version}" + WEB_DIRECTORY = directory + +__all__ = ['NODE_CLASS_MAPPINGS', 'NODE_DISPLAY_NAME_MAPPINGS', "WEB_DIRECTORY"] + +print(f'\033[34m[ComfyUI-Easy-Use] server: \033[0mv{__version__} \033[92mLoaded\033[0m') +print(f'\033[34m[ComfyUI-Easy-Use] web root: \033[0m{os.path.join(cwd_path, directory)} \033[92mLoaded\033[0m') diff --git a/ComfyUI-Easy-Use/config.yaml b/ComfyUI-Easy-Use/config.yaml new file mode 100644 index 0000000000000000000000000000000000000000..70b06c3224b2490073ce04446d5d518ed2b7f7a1 --- /dev/null +++ b/ComfyUI-Easy-Use/config.yaml @@ -0,0 +1,5 @@ +STABILITY_API_DEFAULT: 0 +STABILITY_API_KEY: +- key: '' + name: Default +WEB_VERSION: v2 diff --git a/ComfyUI-Easy-Use/install.bat b/ComfyUI-Easy-Use/install.bat new file mode 100644 index 0000000000000000000000000000000000000000..5789ff049221ddaa358b61712743c6863f88b313 --- /dev/null +++ b/ComfyUI-Easy-Use/install.bat @@ -0,0 +1,18 @@ +@echo off + +set "requirements_txt=%~dp0\requirements.txt" +set "requirements_repair_txt=%~dp0\repair_dependency_list.txt" +set "python_exec=..\..\..\python_embedded\python.exe" +set "aki_python_exec=..\..\python\python.exe" + +echo Installing EasyUse Requirements... + +if exist "%python_exec%" ( + echo Installing with ComfyUI Portable + "%python_exec%" -s -m pip install -r "%requirements_txt%" +)^ +else ( + echo Installing with Python + pip install -r "%requirements_txt%" +) +pause diff --git a/ComfyUI-Easy-Use/prestartup_script.py b/ComfyUI-Easy-Use/prestartup_script.py new file mode 100644 index 0000000000000000000000000000000000000000..b6f7d50e1c9977809ffe44df4c382f1376987ce9 --- /dev/null +++ b/ComfyUI-Easy-Use/prestartup_script.py @@ -0,0 +1,37 @@ +import folder_paths +import os +def add_folder_path_and_extensions(folder_name, full_folder_paths, extensions): + for full_folder_path in full_folder_paths: + folder_paths.add_model_folder_path(folder_name, full_folder_path) + if folder_name in folder_paths.folder_names_and_paths: + current_paths, current_extensions = folder_paths.folder_names_and_paths[folder_name] + updated_extensions = current_extensions | extensions + folder_paths.folder_names_and_paths[folder_name] = (current_paths, updated_extensions) + else: + folder_paths.folder_names_and_paths[folder_name] = (full_folder_paths, extensions) + +image_suffixs = set([".jpg", ".jpeg", ".png", ".gif", ".webp", ".bmp", ".tiff", ".svg", ".ico", ".apng", ".tif", ".hdr", ".exr"]) + +model_path = folder_paths.models_dir +add_folder_path_and_extensions("ultralytics_bbox", [os.path.join(model_path, "ultralytics", "bbox")], folder_paths.supported_pt_extensions) +add_folder_path_and_extensions("ultralytics_segm", [os.path.join(model_path, "ultralytics", "segm")], folder_paths.supported_pt_extensions) +add_folder_path_and_extensions("ultralytics", [os.path.join(model_path, "ultralytics")], folder_paths.supported_pt_extensions) +add_folder_path_and_extensions("mmdets_bbox", [os.path.join(model_path, "mmdets", "bbox")], folder_paths.supported_pt_extensions) +add_folder_path_and_extensions("mmdets_segm", [os.path.join(model_path, "mmdets", "segm")], folder_paths.supported_pt_extensions) +add_folder_path_and_extensions("mmdets", [os.path.join(model_path, "mmdets")], folder_paths.supported_pt_extensions) +add_folder_path_and_extensions("sams", [os.path.join(model_path, "sams")], folder_paths.supported_pt_extensions) +add_folder_path_and_extensions("onnx", [os.path.join(model_path, "onnx")], {'.onnx'}) +add_folder_path_and_extensions("instantid", [os.path.join(model_path, "instantid")], folder_paths.supported_pt_extensions) +add_folder_path_and_extensions("pulid", [os.path.join(model_path, "pulid")], folder_paths.supported_pt_extensions) +add_folder_path_and_extensions("layer_model", [os.path.join(model_path, "layer_model")], folder_paths.supported_pt_extensions) +add_folder_path_and_extensions("rembg", [os.path.join(model_path, "rembg")], folder_paths.supported_pt_extensions) +add_folder_path_and_extensions("ipadapter", [os.path.join(model_path, "ipadapter")], folder_paths.supported_pt_extensions) +add_folder_path_and_extensions("dynamicrafter_models", [os.path.join(model_path, "dynamicrafter_models")], folder_paths.supported_pt_extensions) +add_folder_path_and_extensions("mediapipe", [os.path.join(model_path, "mediapipe")], set(['.tflite','.pth'])) +add_folder_path_and_extensions("inpaint", [os.path.join(model_path, "inpaint")], folder_paths.supported_pt_extensions) +add_folder_path_and_extensions("prompt_generator", [os.path.join(model_path, "prompt_generator")], folder_paths.supported_pt_extensions) +add_folder_path_and_extensions("t5", [os.path.join(model_path, "t5")], folder_paths.supported_pt_extensions) +add_folder_path_and_extensions("llm", [os.path.join(model_path, "LLM")], folder_paths.supported_pt_extensions) + +add_folder_path_and_extensions("checkpoints_thumb", [os.path.join(model_path, "checkpoints")], image_suffixs) +add_folder_path_and_extensions("loras_thumb", [os.path.join(model_path, "loras")], image_suffixs) diff --git a/ComfyUI-Easy-Use/py/__init__.py b/ComfyUI-Easy-Use/py/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/ComfyUI-Easy-Use/py/api.py b/ComfyUI-Easy-Use/py/api.py new file mode 100644 index 0000000000000000000000000000000000000000..fae60d5025cc6d6d825d330cc9c1c82d3f5f24fb --- /dev/null +++ b/ComfyUI-Easy-Use/py/api.py @@ -0,0 +1,293 @@ +import os +import hashlib +import sys +import json +import shutil +import folder_paths +from folder_paths import get_directory_by_type +from server import PromptServer +from .config import RESOURCES_DIR, FOOOCUS_STYLES_DIR, FOOOCUS_STYLES_SAMPLES +from .libs.model import easyModelManager +from .libs.utils import getMetadata, cleanGPUUsedForce, get_local_filepath +from .libs.cache import remove_cache +from .libs.translate import has_chinese, zh_to_en + +try: + import aiohttp + from aiohttp import web +except ImportError: + print("Module 'aiohttp' not installed. Please install it via:") + print("pip install aiohttp") + sys.exit() + +@PromptServer.instance.routes.post("/easyuse/cleangpu") +def cleanGPU(request): + try: + cleanGPUUsedForce() + remove_cache('*') + return web.Response(status=200) + except Exception as e: + return web.Response(status=500) + pass + +@PromptServer.instance.routes.post("/easyuse/translate") +async def translate(request): + post = await request.post() + text = post.get("text") + if has_chinese(text): + return web.json_response({"text": zh_to_en([text])[0]}) + else: + return web.json_response({"text": text}) + +@PromptServer.instance.routes.get("/easyuse/reboot") +def reboot(request): + try: + sys.stdout.close_log() + except Exception as e: + pass + + return os.execv(sys.executable, [sys.executable] + sys.argv) + +# parse csv +@PromptServer.instance.routes.post("/easyuse/upload/csv") +async def parse_csv(request): + post = await request.post() + csv = post.get("csv") + if csv and csv.file: + file = csv.file + text = '' + for line in file.readlines(): + line = str(line.strip()) + line = line.replace("'", "").replace("b",'') + text += line + '; \n' + return web.json_response(text) + +#get style list +@PromptServer.instance.routes.get("/easyuse/prompt/styles") +async def getStylesList(request): + if "name" in request.rel_url.query: + name = request.rel_url.query["name"] + if name == 'fooocus_styles': + file = os.path.join(RESOURCES_DIR, name+'.json') + cn_file = os.path.join(RESOURCES_DIR, name + '_cn.json') + else: + file = os.path.join(FOOOCUS_STYLES_DIR, name+'.json') + cn_file = os.path.join(FOOOCUS_STYLES_DIR, name + '_cn.json') + cn_data = None + if os.path.isfile(cn_file): + f = open(cn_file, 'r', encoding='utf-8') + cn_data = json.load(f) + f.close() + if os.path.isfile(file): + f = open(file, 'r', encoding='utf-8') + data = json.load(f) + f.close() + if data: + ndata = [] + for d in data: + nd = {} + name = d['name'].replace('-', ' ') + words = name.split(' ') + key = ' '.join( + word.upper() if word.lower() in ['mre', 'sai', '3d'] else word.capitalize() for word in + words) + img_name = '_'.join(words).lower() + if "name_cn" in d: + nd['name_cn'] = d['name_cn'] + elif cn_data: + nd['name_cn'] = cn_data[key] if key in cn_data else key + nd["name"] = d['name'] + nd['imgName'] = img_name + if "prompt" in d: + nd['prompt'] = d['prompt'] + if "negative_prompt" in d: + nd['negative_prompt'] = d['negative_prompt'] + ndata.append(nd) + return web.json_response(ndata) + return web.Response(status=400) + +# get style preview image +@PromptServer.instance.routes.get("/easyuse/prompt/styles/image") +async def getStylesImage(request): + styles_name = request.rel_url.query["styles_name"] if "styles_name" in request.rel_url.query else None + if "name" in request.rel_url.query: + name = request.rel_url.query["name"] + if os.path.exists(os.path.join(FOOOCUS_STYLES_DIR, 'samples')): + file = os.path.join(FOOOCUS_STYLES_DIR, 'samples', name + '.jpg') + if os.path.isfile(file): + return web.FileResponse(file) + elif styles_name == 'fooocus_styles': + return web.Response(text=FOOOCUS_STYLES_SAMPLES + name + '.jpg') + elif styles_name == 'fooocus_styles': + return web.Response(text=FOOOCUS_STYLES_SAMPLES + name + '.jpg') + return web.Response(status=400) + +# get models lists +@PromptServer.instance.routes.get("/easyuse/models/list") +async def getModelsList(request): + if "type" in request.rel_url.query: + type = request.rel_url.query["type"] + if type not in ['checkpoints', 'loras']: + return web.Response(status=400) + manager = easyModelManager() + return web.json_response(manager.get_model_lists(type)) + else: + return web.Response(status=400) + +# get models thumbnails +@PromptServer.instance.routes.get("/easyuse/models/thumbnail") +async def getModelsThumbnail(request): + limit = 500 + if "limit" in request.rel_url.query: + limit = request.rel_url.query.get("limit") + limit = int(limit) + checkpoints = folder_paths.get_filename_list("checkpoints_thumb") + loras = folder_paths.get_filename_list("loras_thumb") + checkpoints_full = [] + loras_full = [] + if len(checkpoints) + len(loras) >= limit: + return web.Response(status=400) + for index, i in enumerate(checkpoints): + full_path = folder_paths.get_full_path('checkpoints_thumb', str(i)) + if full_path: + checkpoints_full.append(full_path) + for index, i in enumerate(loras): + full_path = folder_paths.get_full_path('loras_thumb', str(i)) + if full_path: + loras_full.append(full_path) + return web.json_response(checkpoints_full + loras_full) + +@PromptServer.instance.routes.post("/easyuse/metadata/notes/{name}") +async def save_notes(request): + name = request.match_info["name"] + pos = name.index("/") + type = name[0:pos] + name = name[pos+1:] + + file_path = None + if type == "embeddings" or type == "loras": + name = name.lower() + files = folder_paths.get_filename_list(type) + for f in files: + lower_f = f.lower() + if lower_f == name: + file_path = folder_paths.get_full_path(type, f) + else: + n = os.path.splitext(f)[0].lower() + if n == name: + file_path = folder_paths.get_full_path(type, f) + + if file_path is not None: + break + else: + file_path = folder_paths.get_full_path( + type, name) + if not file_path: + return web.Response(status=404) + + file_no_ext = os.path.splitext(file_path)[0] + info_file = file_no_ext + ".txt" + with open(info_file, "w") as f: + f.write(await request.text()) + + return web.Response(status=200) + +@PromptServer.instance.routes.get("/easyuse/metadata/{name}") +async def load_metadata(request): + name = request.match_info["name"] + pos = name.index("/") + type = name[0:pos] + name = name[pos+1:] + + file_path = None + if type == "embeddings": + name = name.lower() + files = folder_paths.get_filename_list(type) + for f in files: + lower_f = f.lower() + if lower_f == name: + file_path = folder_paths.get_full_path(type, f) + else: + n = os.path.splitext(f)[0].lower() + if n == name: + file_path = folder_paths.get_full_path(type, f) + + if file_path is not None: + break + else: + file_path = folder_paths.get_full_path(type, name) + if not file_path: + return web.Response(status=404) + + try: + header = getMetadata(file_path) + header_json = json.loads(header) + meta = header_json["__metadata__"] if "__metadata__" in header_json else None + except: + meta = None + + if meta is None: + meta = {} + + file_no_ext = os.path.splitext(file_path)[0] + + info_file = file_no_ext + ".txt" + if os.path.isfile(info_file): + with open(info_file, "r") as f: + meta["easyuse.notes"] = f.read() + + hash_file = file_no_ext + ".sha256" + if os.path.isfile(hash_file): + with open(hash_file, "rt") as f: + meta["easyuse.sha256"] = f.read() + else: + with open(file_path, "rb") as f: + meta["easyuse.sha256"] = hashlib.sha256(f.read()).hexdigest() + with open(hash_file, "wt") as f: + f.write(meta["easyuse.sha256"]) + + return web.json_response(meta) + +@PromptServer.instance.routes.post("/easyuse/save/{name}") +async def save_preview(request): + name = request.match_info["name"] + pos = name.index("/") + type = name[0:pos] + name = name[pos+1:] + + body = await request.json() + + dir = get_directory_by_type(body.get("type", "output")) + subfolder = body.get("subfolder", "") + full_output_folder = os.path.join(dir, os.path.normpath(subfolder)) + + if os.path.commonpath((dir, os.path.abspath(full_output_folder))) != dir: + return web.Response(status=400) + + filepath = os.path.join(full_output_folder, body.get("filename", "")) + image_path = folder_paths.get_full_path(type, name) + image_path = os.path.splitext( + image_path)[0] + os.path.splitext(filepath)[1] + + shutil.copyfile(filepath, image_path) + + return web.json_response({ + "image": type + "/" + os.path.basename(image_path) + }) + +@PromptServer.instance.routes.post("/easyuse/model/download") +async def download_model(request): + post = await request.post() + url = post.get("url") + local_dir = post.get("local_dir") + if local_dir not in ['checkpoints', 'loras', 'controlnet', 'onnx', 'instantid', 'ipadapter', 'dynamicrafter_models', 'mediapipe', 'rembg', 'layer_model']: + return web.Response(status=400) + local_path = os.path.join(folder_paths.models_dir, local_dir) + try: + get_local_filepath(url, local_path) + return web.Response(status=200) + except: + return web.Response(status=500) + +NODE_CLASS_MAPPINGS = {} +NODE_DISPLAY_NAME_MAPPINGS = {} \ No newline at end of file diff --git a/ComfyUI-Easy-Use/py/bitsandbytes_NF4/__init__.py b/ComfyUI-Easy-Use/py/bitsandbytes_NF4/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..6a402c2b1de78595d639702300024c7ab8e824be --- /dev/null +++ b/ComfyUI-Easy-Use/py/bitsandbytes_NF4/__init__.py @@ -0,0 +1,167 @@ +#credit to comfyanonymous for this module +#from https://github.com/comfyanonymous/ComfyUI_bitsandbytes_NF4 +import comfy.ops +import torch +import folder_paths +from ..libs.utils import install_package + +try: + from bitsandbytes.nn.modules import Params4bit, QuantState +except ImportError: + Params4bit = torch.nn.Parameter + raise ImportError("Please install bitsandbytes>=0.43.3") + +def functional_linear_4bits(x, weight, bias): + try: + install_package("bitsandbytes", "0.43.3", True, "0.43.3") + import bitsandbytes as bnb + except ImportError: + raise ImportError("Please install bitsandbytes>=0.43.3") + + out = bnb.matmul_4bit(x, weight.t(), bias=bias, quant_state=weight.quant_state) + out = out.to(x) + return out + + +def copy_quant_state(state, device: torch.device = None): + if state is None: + return None + + device = device or state.absmax.device + + state2 = ( + QuantState( + absmax=state.state2.absmax.to(device), + shape=state.state2.shape, + code=state.state2.code.to(device), + blocksize=state.state2.blocksize, + quant_type=state.state2.quant_type, + dtype=state.state2.dtype, + ) + if state.nested + else None + ) + + return QuantState( + absmax=state.absmax.to(device), + shape=state.shape, + code=state.code.to(device), + blocksize=state.blocksize, + quant_type=state.quant_type, + dtype=state.dtype, + offset=state.offset.to(device) if state.nested else None, + state2=state2, + ) + + +class ForgeParams4bit(Params4bit): + + def to(self, *args, **kwargs): + device, dtype, non_blocking, convert_to_format = torch._C._nn._parse_to(*args, **kwargs) + if device is not None and device.type == "cuda" and not self.bnb_quantized: + return self._quantize(device) + else: + n = ForgeParams4bit( + torch.nn.Parameter.to(self, device=device, dtype=dtype, non_blocking=non_blocking), + requires_grad=self.requires_grad, + quant_state=copy_quant_state(self.quant_state, device), + blocksize=self.blocksize, + compress_statistics=self.compress_statistics, + quant_type=self.quant_type, + quant_storage=self.quant_storage, + bnb_quantized=self.bnb_quantized, + module=self.module + ) + self.module.quant_state = n.quant_state + self.data = n.data + self.quant_state = n.quant_state + return n + +class ForgeLoader4Bit(torch.nn.Module): + def __init__(self, *, device, dtype, quant_type, **kwargs): + super().__init__() + self.dummy = torch.nn.Parameter(torch.empty(1, device=device, dtype=dtype)) + self.weight = None + self.quant_state = None + self.bias = None + self.quant_type = quant_type + + def _save_to_state_dict(self, destination, prefix, keep_vars): + super()._save_to_state_dict(destination, prefix, keep_vars) + quant_state = getattr(self.weight, "quant_state", None) + if quant_state is not None: + for k, v in quant_state.as_dict(packed=True).items(): + destination[prefix + "weight." + k] = v if keep_vars else v.detach() + return + + def _load_from_state_dict(self, state_dict, prefix, local_metadata, strict, missing_keys, unexpected_keys, error_msgs): + quant_state_keys = {k[len(prefix + "weight."):] for k in state_dict.keys() if k.startswith(prefix + "weight.")} + + if any('bitsandbytes' in k for k in quant_state_keys): + quant_state_dict = {k: state_dict[prefix + "weight." + k] for k in quant_state_keys} + + self.weight = ForgeParams4bit().from_prequantized( + data=state_dict[prefix + 'weight'], + quantized_stats=quant_state_dict, + requires_grad=False, + device=self.dummy.device, + module=self + ) + self.quant_state = self.weight.quant_state + + if prefix + 'bias' in state_dict: + self.bias = torch.nn.Parameter(state_dict[prefix + 'bias'].to(self.dummy)) + + del self.dummy + elif hasattr(self, 'dummy'): + if prefix + 'weight' in state_dict: + self.weight = ForgeParams4bit( + state_dict[prefix + 'weight'].to(self.dummy), + requires_grad=False, + compress_statistics=True, + quant_type=self.quant_type, + quant_storage=torch.uint8, + module=self, + ) + self.quant_state = self.weight.quant_state + + if prefix + 'bias' in state_dict: + self.bias = torch.nn.Parameter(state_dict[prefix + 'bias'].to(self.dummy)) + + del self.dummy + else: + super()._load_from_state_dict(state_dict, prefix, local_metadata, strict, missing_keys, unexpected_keys, error_msgs) + +current_device = None +current_dtype = None +current_manual_cast_enabled = False +current_bnb_dtype = None + +class OPS(comfy.ops.manual_cast): + class Linear(ForgeLoader4Bit): + def __init__(self, *args, device=None, dtype=None, **kwargs): + super().__init__(device=device, dtype=dtype, quant_type=current_bnb_dtype) + self.parameters_manual_cast = current_manual_cast_enabled + + def forward(self, x): + self.weight.quant_state = self.quant_state + + if self.bias is not None and self.bias.dtype != x.dtype: + # Maybe this can also be set to all non-bnb ops since the cost is very low. + # And it only invokes one time, and most linear does not have bias + self.bias.data = self.bias.data.to(x.dtype) + + if not self.parameters_manual_cast: + return functional_linear_4bits(x, self.weight, self.bias) + elif not self.weight.bnb_quantized: + assert x.device.type == 'cuda', 'BNB Must Use CUDA as Computation Device!' + layer_original_device = self.weight.device + self.weight = self.weight._quantize(x.device) + bias = self.bias.to(x.device) if self.bias is not None else None + out = functional_linear_4bits(x, self.weight, bias) + self.weight = self.weight.to(layer_original_device) + return out + else: + weight, bias, signal = weights_manual_cast(self, x, skip_weight_dtype=True, skip_bias_dtype=True) + with main_stream_worker(weight, bias, signal): + return functional_linear_4bits(x, weight, bias) diff --git a/ComfyUI-Easy-Use/py/briaai/rembg.py b/ComfyUI-Easy-Use/py/briaai/rembg.py new file mode 100644 index 0000000000000000000000000000000000000000..4689d1494f0be21909a82171ec091462f078738c --- /dev/null +++ b/ComfyUI-Easy-Use/py/briaai/rembg.py @@ -0,0 +1,475 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F +from torchvision.transforms.functional import normalize +import numpy as np + + +class REBNCONV(nn.Module): + def __init__(self,in_ch=3,out_ch=3,dirate=1,stride=1): + super(REBNCONV,self).__init__() + + self.conv_s1 = nn.Conv2d(in_ch,out_ch,3,padding=1*dirate,dilation=1*dirate,stride=stride) + self.bn_s1 = nn.BatchNorm2d(out_ch) + self.relu_s1 = nn.ReLU(inplace=True) + + def forward(self,x): + + hx = x + xout = self.relu_s1(self.bn_s1(self.conv_s1(hx))) + + return xout + +## upsample tensor 'src' to have the same spatial size with tensor 'tar' +def _upsample_like(src,tar): + + src = F.interpolate(src,size=tar.shape[2:],mode='bilinear') + + return src + + +### RSU-7 ### +class RSU7(nn.Module): + + def __init__(self, in_ch=3, mid_ch=12, out_ch=3, img_size=512): + super(RSU7,self).__init__() + + self.in_ch = in_ch + self.mid_ch = mid_ch + self.out_ch = out_ch + + self.rebnconvin = REBNCONV(in_ch,out_ch,dirate=1) ## 1 -> 1/2 + + self.rebnconv1 = REBNCONV(out_ch,mid_ch,dirate=1) + self.pool1 = nn.MaxPool2d(2,stride=2,ceil_mode=True) + + self.rebnconv2 = REBNCONV(mid_ch,mid_ch,dirate=1) + self.pool2 = nn.MaxPool2d(2,stride=2,ceil_mode=True) + + self.rebnconv3 = REBNCONV(mid_ch,mid_ch,dirate=1) + self.pool3 = nn.MaxPool2d(2,stride=2,ceil_mode=True) + + self.rebnconv4 = REBNCONV(mid_ch,mid_ch,dirate=1) + self.pool4 = nn.MaxPool2d(2,stride=2,ceil_mode=True) + + self.rebnconv5 = REBNCONV(mid_ch,mid_ch,dirate=1) + self.pool5 = nn.MaxPool2d(2,stride=2,ceil_mode=True) + + self.rebnconv6 = REBNCONV(mid_ch,mid_ch,dirate=1) + + self.rebnconv7 = REBNCONV(mid_ch,mid_ch,dirate=2) + + self.rebnconv6d = REBNCONV(mid_ch*2,mid_ch,dirate=1) + self.rebnconv5d = REBNCONV(mid_ch*2,mid_ch,dirate=1) + self.rebnconv4d = REBNCONV(mid_ch*2,mid_ch,dirate=1) + self.rebnconv3d = REBNCONV(mid_ch*2,mid_ch,dirate=1) + self.rebnconv2d = REBNCONV(mid_ch*2,mid_ch,dirate=1) + self.rebnconv1d = REBNCONV(mid_ch*2,out_ch,dirate=1) + + def forward(self,x): + b, c, h, w = x.shape + + hx = x + hxin = self.rebnconvin(hx) + + hx1 = self.rebnconv1(hxin) + hx = self.pool1(hx1) + + hx2 = self.rebnconv2(hx) + hx = self.pool2(hx2) + + hx3 = self.rebnconv3(hx) + hx = self.pool3(hx3) + + hx4 = self.rebnconv4(hx) + hx = self.pool4(hx4) + + hx5 = self.rebnconv5(hx) + hx = self.pool5(hx5) + + hx6 = self.rebnconv6(hx) + + hx7 = self.rebnconv7(hx6) + + hx6d = self.rebnconv6d(torch.cat((hx7,hx6),1)) + hx6dup = _upsample_like(hx6d,hx5) + + hx5d = self.rebnconv5d(torch.cat((hx6dup,hx5),1)) + hx5dup = _upsample_like(hx5d,hx4) + + hx4d = self.rebnconv4d(torch.cat((hx5dup,hx4),1)) + hx4dup = _upsample_like(hx4d,hx3) + + hx3d = self.rebnconv3d(torch.cat((hx4dup,hx3),1)) + hx3dup = _upsample_like(hx3d,hx2) + + hx2d = self.rebnconv2d(torch.cat((hx3dup,hx2),1)) + hx2dup = _upsample_like(hx2d,hx1) + + hx1d = self.rebnconv1d(torch.cat((hx2dup,hx1),1)) + + return hx1d + hxin + + +### RSU-6 ### +class RSU6(nn.Module): + + def __init__(self, in_ch=3, mid_ch=12, out_ch=3): + super(RSU6,self).__init__() + + self.rebnconvin = REBNCONV(in_ch,out_ch,dirate=1) + + self.rebnconv1 = REBNCONV(out_ch,mid_ch,dirate=1) + self.pool1 = nn.MaxPool2d(2,stride=2,ceil_mode=True) + + self.rebnconv2 = REBNCONV(mid_ch,mid_ch,dirate=1) + self.pool2 = nn.MaxPool2d(2,stride=2,ceil_mode=True) + + self.rebnconv3 = REBNCONV(mid_ch,mid_ch,dirate=1) + self.pool3 = nn.MaxPool2d(2,stride=2,ceil_mode=True) + + self.rebnconv4 = REBNCONV(mid_ch,mid_ch,dirate=1) + self.pool4 = nn.MaxPool2d(2,stride=2,ceil_mode=True) + + self.rebnconv5 = REBNCONV(mid_ch,mid_ch,dirate=1) + + self.rebnconv6 = REBNCONV(mid_ch,mid_ch,dirate=2) + + self.rebnconv5d = REBNCONV(mid_ch*2,mid_ch,dirate=1) + self.rebnconv4d = REBNCONV(mid_ch*2,mid_ch,dirate=1) + self.rebnconv3d = REBNCONV(mid_ch*2,mid_ch,dirate=1) + self.rebnconv2d = REBNCONV(mid_ch*2,mid_ch,dirate=1) + self.rebnconv1d = REBNCONV(mid_ch*2,out_ch,dirate=1) + + def forward(self,x): + + hx = x + + hxin = self.rebnconvin(hx) + + hx1 = self.rebnconv1(hxin) + hx = self.pool1(hx1) + + hx2 = self.rebnconv2(hx) + hx = self.pool2(hx2) + + hx3 = self.rebnconv3(hx) + hx = self.pool3(hx3) + + hx4 = self.rebnconv4(hx) + hx = self.pool4(hx4) + + hx5 = self.rebnconv5(hx) + + hx6 = self.rebnconv6(hx5) + + + hx5d = self.rebnconv5d(torch.cat((hx6,hx5),1)) + hx5dup = _upsample_like(hx5d,hx4) + + hx4d = self.rebnconv4d(torch.cat((hx5dup,hx4),1)) + hx4dup = _upsample_like(hx4d,hx3) + + hx3d = self.rebnconv3d(torch.cat((hx4dup,hx3),1)) + hx3dup = _upsample_like(hx3d,hx2) + + hx2d = self.rebnconv2d(torch.cat((hx3dup,hx2),1)) + hx2dup = _upsample_like(hx2d,hx1) + + hx1d = self.rebnconv1d(torch.cat((hx2dup,hx1),1)) + + return hx1d + hxin + +### RSU-5 ### +class RSU5(nn.Module): + + def __init__(self, in_ch=3, mid_ch=12, out_ch=3): + super(RSU5,self).__init__() + + self.rebnconvin = REBNCONV(in_ch,out_ch,dirate=1) + + self.rebnconv1 = REBNCONV(out_ch,mid_ch,dirate=1) + self.pool1 = nn.MaxPool2d(2,stride=2,ceil_mode=True) + + self.rebnconv2 = REBNCONV(mid_ch,mid_ch,dirate=1) + self.pool2 = nn.MaxPool2d(2,stride=2,ceil_mode=True) + + self.rebnconv3 = REBNCONV(mid_ch,mid_ch,dirate=1) + self.pool3 = nn.MaxPool2d(2,stride=2,ceil_mode=True) + + self.rebnconv4 = REBNCONV(mid_ch,mid_ch,dirate=1) + + self.rebnconv5 = REBNCONV(mid_ch,mid_ch,dirate=2) + + self.rebnconv4d = REBNCONV(mid_ch*2,mid_ch,dirate=1) + self.rebnconv3d = REBNCONV(mid_ch*2,mid_ch,dirate=1) + self.rebnconv2d = REBNCONV(mid_ch*2,mid_ch,dirate=1) + self.rebnconv1d = REBNCONV(mid_ch*2,out_ch,dirate=1) + + def forward(self,x): + + hx = x + + hxin = self.rebnconvin(hx) + + hx1 = self.rebnconv1(hxin) + hx = self.pool1(hx1) + + hx2 = self.rebnconv2(hx) + hx = self.pool2(hx2) + + hx3 = self.rebnconv3(hx) + hx = self.pool3(hx3) + + hx4 = self.rebnconv4(hx) + + hx5 = self.rebnconv5(hx4) + + hx4d = self.rebnconv4d(torch.cat((hx5,hx4),1)) + hx4dup = _upsample_like(hx4d,hx3) + + hx3d = self.rebnconv3d(torch.cat((hx4dup,hx3),1)) + hx3dup = _upsample_like(hx3d,hx2) + + hx2d = self.rebnconv2d(torch.cat((hx3dup,hx2),1)) + hx2dup = _upsample_like(hx2d,hx1) + + hx1d = self.rebnconv1d(torch.cat((hx2dup,hx1),1)) + + return hx1d + hxin + +### RSU-4 ### +class RSU4(nn.Module): + + def __init__(self, in_ch=3, mid_ch=12, out_ch=3): + super(RSU4,self).__init__() + + self.rebnconvin = REBNCONV(in_ch,out_ch,dirate=1) + + self.rebnconv1 = REBNCONV(out_ch,mid_ch,dirate=1) + self.pool1 = nn.MaxPool2d(2,stride=2,ceil_mode=True) + + self.rebnconv2 = REBNCONV(mid_ch,mid_ch,dirate=1) + self.pool2 = nn.MaxPool2d(2,stride=2,ceil_mode=True) + + self.rebnconv3 = REBNCONV(mid_ch,mid_ch,dirate=1) + + self.rebnconv4 = REBNCONV(mid_ch,mid_ch,dirate=2) + + self.rebnconv3d = REBNCONV(mid_ch*2,mid_ch,dirate=1) + self.rebnconv2d = REBNCONV(mid_ch*2,mid_ch,dirate=1) + self.rebnconv1d = REBNCONV(mid_ch*2,out_ch,dirate=1) + + def forward(self,x): + + hx = x + + hxin = self.rebnconvin(hx) + + hx1 = self.rebnconv1(hxin) + hx = self.pool1(hx1) + + hx2 = self.rebnconv2(hx) + hx = self.pool2(hx2) + + hx3 = self.rebnconv3(hx) + + hx4 = self.rebnconv4(hx3) + + hx3d = self.rebnconv3d(torch.cat((hx4,hx3),1)) + hx3dup = _upsample_like(hx3d,hx2) + + hx2d = self.rebnconv2d(torch.cat((hx3dup,hx2),1)) + hx2dup = _upsample_like(hx2d,hx1) + + hx1d = self.rebnconv1d(torch.cat((hx2dup,hx1),1)) + + return hx1d + hxin + +### RSU-4F ### +class RSU4F(nn.Module): + + def __init__(self, in_ch=3, mid_ch=12, out_ch=3): + super(RSU4F,self).__init__() + + self.rebnconvin = REBNCONV(in_ch,out_ch,dirate=1) + + self.rebnconv1 = REBNCONV(out_ch,mid_ch,dirate=1) + self.rebnconv2 = REBNCONV(mid_ch,mid_ch,dirate=2) + self.rebnconv3 = REBNCONV(mid_ch,mid_ch,dirate=4) + + self.rebnconv4 = REBNCONV(mid_ch,mid_ch,dirate=8) + + self.rebnconv3d = REBNCONV(mid_ch*2,mid_ch,dirate=4) + self.rebnconv2d = REBNCONV(mid_ch*2,mid_ch,dirate=2) + self.rebnconv1d = REBNCONV(mid_ch*2,out_ch,dirate=1) + + def forward(self,x): + + hx = x + + hxin = self.rebnconvin(hx) + + hx1 = self.rebnconv1(hxin) + hx2 = self.rebnconv2(hx1) + hx3 = self.rebnconv3(hx2) + + hx4 = self.rebnconv4(hx3) + + hx3d = self.rebnconv3d(torch.cat((hx4,hx3),1)) + hx2d = self.rebnconv2d(torch.cat((hx3d,hx2),1)) + hx1d = self.rebnconv1d(torch.cat((hx2d,hx1),1)) + + return hx1d + hxin + + +class myrebnconv(nn.Module): + def __init__(self, in_ch=3, + out_ch=1, + kernel_size=3, + stride=1, + padding=1, + dilation=1, + groups=1): + super(myrebnconv,self).__init__() + + self.conv = nn.Conv2d(in_ch, + out_ch, + kernel_size=kernel_size, + stride=stride, + padding=padding, + dilation=dilation, + groups=groups) + self.bn = nn.BatchNorm2d(out_ch) + self.rl = nn.ReLU(inplace=True) + + def forward(self,x): + return self.rl(self.bn(self.conv(x))) + +def preprocess_image(im, model_input_size: list) -> torch.Tensor: + # im = im.resize(model_input_size, Image.BILINEAR) + im_np = np.array(im) + im_tensor = torch.tensor(im_np, dtype=torch.float32).permute(2,0,1) + im_tensor = F.interpolate(torch.unsqueeze(im_tensor,0), size=model_input_size, mode='bilinear').type(torch.uint8) + image = torch.divide(im_tensor,255.0) + image = normalize(image,[0.5,0.5,0.5],[1.0,1.0,1.0]) + return image + +def postprocess_image(result: torch.Tensor, im_size: list)-> np.ndarray: + result = torch.squeeze(F.interpolate(result, size=im_size, mode='bilinear') ,0) + ma = torch.max(result) + mi = torch.min(result) + result = (result-mi)/(ma-mi) + im_array = (result*255).permute(1,2,0).cpu().data.numpy().astype(np.uint8) + im_array = np.squeeze(im_array) + return im_array + +class BriaRMBG(nn.Module): + + def __init__(self, config:dict={"in_ch":3,"out_ch":1}): + super(BriaRMBG,self).__init__() + in_ch = config["in_ch"] + out_ch = config["out_ch"] + self.conv_in = nn.Conv2d(in_ch,64,3,stride=2,padding=1) + self.pool_in = nn.MaxPool2d(2,stride=2,ceil_mode=True) + + self.stage1 = RSU7(64,32,64) + self.pool12 = nn.MaxPool2d(2,stride=2,ceil_mode=True) + + self.stage2 = RSU6(64,32,128) + self.pool23 = nn.MaxPool2d(2,stride=2,ceil_mode=True) + + self.stage3 = RSU5(128,64,256) + self.pool34 = nn.MaxPool2d(2,stride=2,ceil_mode=True) + + self.stage4 = RSU4(256,128,512) + self.pool45 = nn.MaxPool2d(2,stride=2,ceil_mode=True) + + self.stage5 = RSU4F(512,256,512) + self.pool56 = nn.MaxPool2d(2,stride=2,ceil_mode=True) + + self.stage6 = RSU4F(512,256,512) + + # decoder + self.stage5d = RSU4F(1024,256,512) + self.stage4d = RSU4(1024,128,256) + self.stage3d = RSU5(512,64,128) + self.stage2d = RSU6(256,32,64) + self.stage1d = RSU7(128,16,64) + + self.side1 = nn.Conv2d(64,out_ch,3,padding=1) + self.side2 = nn.Conv2d(64,out_ch,3,padding=1) + self.side3 = nn.Conv2d(128,out_ch,3,padding=1) + self.side4 = nn.Conv2d(256,out_ch,3,padding=1) + self.side5 = nn.Conv2d(512,out_ch,3,padding=1) + self.side6 = nn.Conv2d(512,out_ch,3,padding=1) + + # self.outconv = nn.Conv2d(6*out_ch,out_ch,1) + + def forward(self,x): + + hx = x + + hxin = self.conv_in(hx) + #hx = self.pool_in(hxin) + + #stage 1 + hx1 = self.stage1(hxin) + hx = self.pool12(hx1) + + #stage 2 + hx2 = self.stage2(hx) + hx = self.pool23(hx2) + + #stage 3 + hx3 = self.stage3(hx) + hx = self.pool34(hx3) + + #stage 4 + hx4 = self.stage4(hx) + hx = self.pool45(hx4) + + #stage 5 + hx5 = self.stage5(hx) + hx = self.pool56(hx5) + + #stage 6 + hx6 = self.stage6(hx) + hx6up = _upsample_like(hx6,hx5) + + #-------------------- decoder -------------------- + hx5d = self.stage5d(torch.cat((hx6up,hx5),1)) + hx5dup = _upsample_like(hx5d,hx4) + + hx4d = self.stage4d(torch.cat((hx5dup,hx4),1)) + hx4dup = _upsample_like(hx4d,hx3) + + hx3d = self.stage3d(torch.cat((hx4dup,hx3),1)) + hx3dup = _upsample_like(hx3d,hx2) + + hx2d = self.stage2d(torch.cat((hx3dup,hx2),1)) + hx2dup = _upsample_like(hx2d,hx1) + + hx1d = self.stage1d(torch.cat((hx2dup,hx1),1)) + + + #side output + d1 = self.side1(hx1d) + d1 = _upsample_like(d1,x) + + d2 = self.side2(hx2d) + d2 = _upsample_like(d2,x) + + d3 = self.side3(hx3d) + d3 = _upsample_like(d3,x) + + d4 = self.side4(hx4d) + d4 = _upsample_like(d4,x) + + d5 = self.side5(hx5d) + d5 = _upsample_like(d5,x) + + d6 = self.side6(hx6) + d6 = _upsample_like(d6,x) + + return [F.sigmoid(d1), F.sigmoid(d2), F.sigmoid(d3), F.sigmoid(d4), F.sigmoid(d5), F.sigmoid(d6)],[hx1d,hx2d,hx3d,hx4d,hx5d,hx6] \ No newline at end of file diff --git a/ComfyUI-Easy-Use/py/brushnet/__init__.py b/ComfyUI-Easy-Use/py/brushnet/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..5955fe9951d51a912c2622e48c70c124ab30ea62 --- /dev/null +++ b/ComfyUI-Easy-Use/py/brushnet/__init__.py @@ -0,0 +1,806 @@ +#credit to nullquant for this module +#from https://github.com/nullquant/ComfyUI-BrushNet + +import os +import types + +import torch +from accelerate import init_empty_weights, load_checkpoint_and_dispatch + +import comfy + +from .model import BrushNetModel, PowerPaintModel +from .model_patch import add_model_patch_option, patch_model_function_wrapper +from .powerpaint_utils import TokenizerWrapper, add_tokens + +cwd_path = os.path.dirname(os.path.realpath(__file__)) +brushnet_config_file = os.path.join(cwd_path, 'config', 'brushnet.json') +brushnet_xl_config_file = os.path.join(cwd_path, 'config', 'brushnet_xl.json') +powerpaint_config_file = os.path.join(cwd_path, 'config', 'powerpaint.json') + +sd15_scaling_factor = 0.18215 +sdxl_scaling_factor = 0.13025 + +ModelsToUnload = [comfy.sd1_clip.SD1ClipModel, comfy.ldm.models.autoencoder.AutoencoderKL] + +class BrushNet: + + # Check models compatibility + def check_compatibilty(self, model, brushnet): + is_SDXL = False + is_PP = False + if isinstance(model.model.model_config, comfy.supported_models.SD15): + print('Base model type: SD1.5') + is_SDXL = False + if brushnet["SDXL"]: + raise Exception("Base model is SD15, but BrushNet is SDXL type") + if brushnet["PP"]: + is_PP = True + elif isinstance(model.model.model_config, comfy.supported_models.SDXL): + print('Base model type: SDXL') + is_SDXL = True + if not brushnet["SDXL"]: + raise Exception("Base model is SDXL, but BrushNet is SD15 type") + else: + print('Base model type: ', type(model.model.model_config)) + raise Exception("Unsupported model type: " + str(type(model.model.model_config))) + + return (is_SDXL, is_PP) + + def check_image_mask(self, image, mask, name): + if len(image.shape) < 4: + # image tensor shape should be [B, H, W, C], but batch somehow is missing + image = image[None, :, :, :] + + if len(mask.shape) > 3: + # mask tensor shape should be [B, H, W] but we get [B, H, W, C], image may be? + # take first mask, red channel + mask = (mask[:, :, :, 0])[:, :, :] + elif len(mask.shape) < 3: + # mask tensor shape should be [B, H, W] but batch somehow is missing + mask = mask[None, :, :] + + if image.shape[0] > mask.shape[0]: + print(name, "gets batch of images (%d) but only %d masks" % (image.shape[0], mask.shape[0])) + if mask.shape[0] == 1: + print(name, "will copy the mask to fill batch") + mask = torch.cat([mask] * image.shape[0], dim=0) + else: + print(name, "will add empty masks to fill batch") + empty_mask = torch.zeros([image.shape[0] - mask.shape[0], mask.shape[1], mask.shape[2]]) + mask = torch.cat([mask, empty_mask], dim=0) + elif image.shape[0] < mask.shape[0]: + print(name, "gets batch of images (%d) but too many (%d) masks" % (image.shape[0], mask.shape[0])) + mask = mask[:image.shape[0], :, :] + + return (image, mask) + + # Prepare image and mask + def prepare_image(self, image, mask): + + image, mask = self.check_image_mask(image, mask, 'BrushNet') + + print("BrushNet image.shape =", image.shape, "mask.shape =", mask.shape) + + if mask.shape[2] != image.shape[2] or mask.shape[1] != image.shape[1]: + raise Exception("Image and mask should be the same size") + + # As a suggestion of inferno46n2 (https://github.com/nullquant/ComfyUI-BrushNet/issues/64) + mask = mask.round() + + masked_image = image * (1.0 - mask[:, :, :, None]) + + return (masked_image, mask) + + # Get origin of the mask + def cut_with_mask(self, mask, width, height): + iy, ix = (mask == 1).nonzero(as_tuple=True) + + h0, w0 = mask.shape + + if iy.numel() == 0: + x_c = w0 / 2.0 + y_c = h0 / 2.0 + else: + x_min = ix.min().item() + x_max = ix.max().item() + y_min = iy.min().item() + y_max = iy.max().item() + + if x_max - x_min > width or y_max - y_min > height: + raise Exception("Mask is bigger than provided dimensions") + + x_c = (x_min + x_max) / 2.0 + y_c = (y_min + y_max) / 2.0 + + width2 = width / 2.0 + height2 = height / 2.0 + + if w0 <= width: + x0 = 0 + w = w0 + else: + x0 = max(0, x_c - width2) + w = width + if x0 + width > w0: + x0 = w0 - width + + if h0 <= height: + y0 = 0 + h = h0 + else: + y0 = max(0, y_c - height2) + h = height + if y0 + height > h0: + y0 = h0 - height + + return (int(x0), int(y0), int(w), int(h)) + + # Prepare conditioning_latents + @torch.inference_mode() + def get_image_latents(self, masked_image, mask, vae, scaling_factor): + processed_image = masked_image.to(vae.device) + image_latents = vae.encode(processed_image[:, :, :, :3]) * scaling_factor + processed_mask = 1. - mask[:, None, :, :] + interpolated_mask = torch.nn.functional.interpolate( + processed_mask, + size=( + image_latents.shape[-2], + image_latents.shape[-1] + ) + ) + interpolated_mask = interpolated_mask.to(image_latents.device) + + conditioning_latents = [image_latents, interpolated_mask] + + print('BrushNet CL: image_latents shape =', image_latents.shape, 'interpolated_mask shape =', + interpolated_mask.shape) + + return conditioning_latents + + def brushnet_blocks(self, sd): + brushnet_down_block = 0 + brushnet_mid_block = 0 + brushnet_up_block = 0 + for key in sd: + if 'brushnet_down_block' in key: + brushnet_down_block += 1 + if 'brushnet_mid_block' in key: + brushnet_mid_block += 1 + if 'brushnet_up_block' in key: + brushnet_up_block += 1 + return (brushnet_down_block, brushnet_mid_block, brushnet_up_block, len(sd)) + + def get_model_type(self, brushnet_file): + sd = comfy.utils.load_torch_file(brushnet_file) + brushnet_down_block, brushnet_mid_block, brushnet_up_block, keys = self.brushnet_blocks(sd) + del sd + if brushnet_down_block == 24 and brushnet_mid_block == 2 and brushnet_up_block == 30: + is_SDXL = False + if keys == 322: + is_PP = False + print('BrushNet model type: SD1.5') + else: + is_PP = True + print('PowerPaint model type: SD1.5') + elif brushnet_down_block == 18 and brushnet_mid_block == 2 and brushnet_up_block == 22: + print('BrushNet model type: Loading SDXL') + is_SDXL = True + is_PP = False + else: + raise Exception("Unknown BrushNet model") + return is_SDXL, is_PP + + def load_brushnet_model(self, brushnet_file, dtype='float16'): + is_SDXL, is_PP = self.get_model_type(brushnet_file) + with init_empty_weights(): + if is_SDXL: + brushnet_config = BrushNetModel.load_config(brushnet_xl_config_file) + brushnet_model = BrushNetModel.from_config(brushnet_config) + elif is_PP: + brushnet_config = PowerPaintModel.load_config(powerpaint_config_file) + brushnet_model = PowerPaintModel.from_config(brushnet_config) + else: + brushnet_config = BrushNetModel.load_config(brushnet_config_file) + brushnet_model = BrushNetModel.from_config(brushnet_config) + if is_PP: + print("PowerPaint model file:", brushnet_file) + else: + print("BrushNet model file:", brushnet_file) + + if dtype == 'float16': + torch_dtype = torch.float16 + elif dtype == 'bfloat16': + torch_dtype = torch.bfloat16 + elif dtype == 'float32': + torch_dtype = torch.float32 + else: + torch_dtype = torch.float64 + + brushnet_model = load_checkpoint_and_dispatch( + brushnet_model, + brushnet_file, + device_map="sequential", + max_memory=None, + offload_folder=None, + offload_state_dict=False, + dtype=torch_dtype, + force_hooks=False, + ) + + if is_PP: + print("PowerPaint model is loaded") + elif is_SDXL: + print("BrushNet SDXL model is loaded") + else: + print("BrushNet SD1.5 model is loaded") + + return ({"brushnet": brushnet_model, "SDXL": is_SDXL, "PP": is_PP, "dtype": torch_dtype},) + + def brushnet_model_update(self, model, vae, image, mask, brushnet, positive, negative, scale, start_at, end_at): + + is_SDXL, is_PP = self.check_compatibilty(model, brushnet) + + if is_PP: + raise Exception("PowerPaint model was loaded, please use PowerPaint node") + + # Make a copy of the model so that we're not patching it everywhere in the workflow. + model = model.clone() + + # prepare image and mask + # no batches for original image and mask + masked_image, mask = self.prepare_image(image, mask) + + batch = masked_image.shape[0] + width = masked_image.shape[2] + height = masked_image.shape[1] + + if hasattr(model.model.model_config, 'latent_format') and hasattr(model.model.model_config.latent_format, + 'scale_factor'): + scaling_factor = model.model.model_config.latent_format.scale_factor + elif is_SDXL: + scaling_factor = sdxl_scaling_factor + else: + scaling_factor = sd15_scaling_factor + + torch_dtype = brushnet['dtype'] + + # prepare conditioning latents + conditioning_latents = self.get_image_latents(masked_image, mask, vae, scaling_factor) + conditioning_latents[0] = conditioning_latents[0].to(dtype=torch_dtype).to(brushnet['brushnet'].device) + conditioning_latents[1] = conditioning_latents[1].to(dtype=torch_dtype).to(brushnet['brushnet'].device) + + # unload vae + del vae + for loaded_model in comfy.model_management.current_loaded_models: + if type(loaded_model.model.model) in ModelsToUnload: + comfy.model_management.current_loaded_models.remove(loaded_model) + loaded_model.model_unload() + del loaded_model + + # prepare embeddings + prompt_embeds = positive[0][0].to(dtype=torch_dtype).to(brushnet['brushnet'].device) + negative_prompt_embeds = negative[0][0].to(dtype=torch_dtype).to(brushnet['brushnet'].device) + + max_tokens = max(prompt_embeds.shape[1], negative_prompt_embeds.shape[1]) + if prompt_embeds.shape[1] < max_tokens: + multiplier = max_tokens // 77 - prompt_embeds.shape[1] // 77 + prompt_embeds = torch.concat([prompt_embeds] + [prompt_embeds[:, -77:, :]] * multiplier, dim=1) + print('BrushNet: negative prompt more than 75 tokens:', negative_prompt_embeds.shape, + 'multiplying prompt_embeds') + if negative_prompt_embeds.shape[1] < max_tokens: + multiplier = max_tokens // 77 - negative_prompt_embeds.shape[1] // 77 + negative_prompt_embeds = torch.concat( + [negative_prompt_embeds] + [negative_prompt_embeds[:, -77:, :]] * multiplier, dim=1) + print('BrushNet: positive prompt more than 75 tokens:', prompt_embeds.shape, + 'multiplying negative_prompt_embeds') + + if len(positive[0]) > 1 and 'pooled_output' in positive[0][1] and positive[0][1]['pooled_output'] is not None: + pooled_prompt_embeds = positive[0][1]['pooled_output'].to(dtype=torch_dtype).to(brushnet['brushnet'].device) + else: + print('BrushNet: positive conditioning has not pooled_output') + if is_SDXL: + print('BrushNet will not produce correct results') + pooled_prompt_embeds = torch.empty([2, 1280], device=brushnet['brushnet'].device).to(dtype=torch_dtype) + + if len(negative[0]) > 1 and 'pooled_output' in negative[0][1] and negative[0][1]['pooled_output'] is not None: + negative_pooled_prompt_embeds = negative[0][1]['pooled_output'].to(dtype=torch_dtype).to( + brushnet['brushnet'].device) + else: + print('BrushNet: negative conditioning has not pooled_output') + if is_SDXL: + print('BrushNet will not produce correct results') + negative_pooled_prompt_embeds = torch.empty([1, pooled_prompt_embeds.shape[1]], + device=brushnet['brushnet'].device).to(dtype=torch_dtype) + + time_ids = torch.FloatTensor([[height, width, 0., 0., height, width]]).to(dtype=torch_dtype).to( + brushnet['brushnet'].device) + + if not is_SDXL: + pooled_prompt_embeds = None + negative_pooled_prompt_embeds = None + time_ids = None + + # apply patch to model + brushnet_conditioning_scale = scale + control_guidance_start = start_at + control_guidance_end = end_at + + add_brushnet_patch(model, + brushnet['brushnet'], + torch_dtype, + conditioning_latents, + (brushnet_conditioning_scale, control_guidance_start, control_guidance_end), + prompt_embeds, negative_prompt_embeds, + pooled_prompt_embeds, negative_pooled_prompt_embeds, time_ids, + False) + + latent = torch.zeros([batch, 4, conditioning_latents[0].shape[2], conditioning_latents[0].shape[3]], + device=brushnet['brushnet'].device) + + return (model, positive, negative, {"samples": latent},) + + #powperpaint + def load_powerpaint_clip(self, base_clip_file, pp_clip_file): + pp_clip = comfy.sd.load_clip(ckpt_paths=[base_clip_file]) + + print('PowerPaint base CLIP file: ', base_clip_file) + + pp_tokenizer = TokenizerWrapper(pp_clip.tokenizer.clip_l.tokenizer) + pp_text_encoder = pp_clip.patcher.model.clip_l.transformer + + add_tokens( + tokenizer=pp_tokenizer, + text_encoder=pp_text_encoder, + placeholder_tokens=["P_ctxt", "P_shape", "P_obj"], + initialize_tokens=["a", "a", "a"], + num_vectors_per_token=10, + ) + + pp_text_encoder.load_state_dict(comfy.utils.load_torch_file(pp_clip_file), strict=False) + + print('PowerPaint CLIP file: ', pp_clip_file) + + pp_clip.tokenizer.clip_l.tokenizer = pp_tokenizer + pp_clip.patcher.model.clip_l.transformer = pp_text_encoder + + return (pp_clip,) + + def powerpaint_model_update(self, model, vae, image, mask, powerpaint, clip, positive, negative, fitting, function, scale, start_at, end_at, save_memory): + is_SDXL, is_PP = self.check_compatibilty(model, powerpaint) + if not is_PP: + raise Exception("BrushNet model was loaded, please use BrushNet node") + + # Make a copy of the model so that we're not patching it everywhere in the workflow. + model = model.clone() + + # prepare image and mask + # no batches for original image and mask + masked_image, mask = self.prepare_image(image, mask) + + batch = masked_image.shape[0] + # width = masked_image.shape[2] + # height = masked_image.shape[1] + + if hasattr(model.model.model_config, 'latent_format') and hasattr(model.model.model_config.latent_format, + 'scale_factor'): + scaling_factor = model.model.model_config.latent_format.scale_factor + else: + scaling_factor = sd15_scaling_factor + + torch_dtype = powerpaint['dtype'] + + # prepare conditioning latents + conditioning_latents = self.get_image_latents(masked_image, mask, vae, scaling_factor) + conditioning_latents[0] = conditioning_latents[0].to(dtype=torch_dtype).to(powerpaint['brushnet'].device) + conditioning_latents[1] = conditioning_latents[1].to(dtype=torch_dtype).to(powerpaint['brushnet'].device) + + # prepare embeddings + + if function == "object removal": + promptA = "P_ctxt" + promptB = "P_ctxt" + negative_promptA = "P_obj" + negative_promptB = "P_obj" + print('You should add to positive prompt: "empty scene blur"') + # positive = positive + " empty scene blur" + elif function == "context aware": + promptA = "P_ctxt" + promptB = "P_ctxt" + negative_promptA = "" + negative_promptB = "" + # positive = positive + " empty scene" + print('You should add to positive prompt: "empty scene"') + elif function == "shape guided": + promptA = "P_shape" + promptB = "P_ctxt" + negative_promptA = "P_shape" + negative_promptB = "P_ctxt" + elif function == "image outpainting": + promptA = "P_ctxt" + promptB = "P_ctxt" + negative_promptA = "P_obj" + negative_promptB = "P_obj" + # positive = positive + " empty scene" + print('You should add to positive prompt: "empty scene"') + else: + promptA = "P_obj" + promptB = "P_obj" + negative_promptA = "P_obj" + negative_promptB = "P_obj" + + tokens = clip.tokenize(promptA) + prompt_embedsA = clip.encode_from_tokens(tokens, return_pooled=False) + + tokens = clip.tokenize(negative_promptA) + negative_prompt_embedsA = clip.encode_from_tokens(tokens, return_pooled=False) + + tokens = clip.tokenize(promptB) + prompt_embedsB = clip.encode_from_tokens(tokens, return_pooled=False) + + tokens = clip.tokenize(negative_promptB) + negative_prompt_embedsB = clip.encode_from_tokens(tokens, return_pooled=False) + + prompt_embeds_pp = (prompt_embedsA * fitting + (1.0 - fitting) * prompt_embedsB).to(dtype=torch_dtype).to( + powerpaint['brushnet'].device) + negative_prompt_embeds_pp = (negative_prompt_embedsA * fitting + (1.0 - fitting) * negative_prompt_embedsB).to( + dtype=torch_dtype).to(powerpaint['brushnet'].device) + + # unload vae and CLIPs + del vae + del clip + for loaded_model in comfy.model_management.current_loaded_models: + if type(loaded_model.model.model) in ModelsToUnload: + comfy.model_management.current_loaded_models.remove(loaded_model) + loaded_model.model_unload() + del loaded_model + + # apply patch to model + + brushnet_conditioning_scale = scale + control_guidance_start = start_at + control_guidance_end = end_at + + if save_memory != 'none': + powerpaint['brushnet'].set_attention_slice(save_memory) + + add_brushnet_patch(model, + powerpaint['brushnet'], + torch_dtype, + conditioning_latents, + (brushnet_conditioning_scale, control_guidance_start, control_guidance_end), + negative_prompt_embeds_pp, prompt_embeds_pp, + None, None, None, + False) + + latent = torch.zeros([batch, 4, conditioning_latents[0].shape[2], conditioning_latents[0].shape[3]], + device=powerpaint['brushnet'].device) + + return (model, positive, negative, {"samples": latent},) +@torch.inference_mode() +def brushnet_inference(x, timesteps, transformer_options, debug): + if 'model_patch' not in transformer_options: + print('BrushNet inference: there is no model_patch key in transformer_options') + return ([], 0, []) + mp = transformer_options['model_patch'] + if 'brushnet' not in mp: + print('BrushNet inference: there is no brushnet key in mdel_patch') + return ([], 0, []) + bo = mp['brushnet'] + if 'model' not in bo: + print('BrushNet inference: there is no model key in brushnet') + return ([], 0, []) + brushnet = bo['model'] + if not (isinstance(brushnet, BrushNetModel) or isinstance(brushnet, PowerPaintModel)): + print('BrushNet model is not a BrushNetModel class') + return ([], 0, []) + + torch_dtype = bo['dtype'] + cl_list = bo['latents'] + brushnet_conditioning_scale, control_guidance_start, control_guidance_end = bo['controls'] + pe = bo['prompt_embeds'] + npe = bo['negative_prompt_embeds'] + ppe, nppe, time_ids = bo['add_embeds'] + + #do_classifier_free_guidance = mp['free_guidance'] + do_classifier_free_guidance = len(transformer_options['cond_or_uncond']) > 1 + + x = x.detach().clone() + x = x.to(torch_dtype).to(brushnet.device) + + timesteps = timesteps.detach().clone() + timesteps = timesteps.to(torch_dtype).to(brushnet.device) + + total_steps = mp['total_steps'] + step = mp['step'] + + added_cond_kwargs = {} + + if do_classifier_free_guidance and step == 0: + print('BrushNet inference: do_classifier_free_guidance is True') + + sub_idx = None + if 'ad_params' in transformer_options and 'sub_idxs' in transformer_options['ad_params']: + sub_idx = transformer_options['ad_params']['sub_idxs'] + + # we have batch input images + batch = cl_list[0].shape[0] + # we have incoming latents + latents_incoming = x.shape[0] + # and we already got some + latents_got = bo['latent_id'] + if step == 0 or batch > 1: + print('BrushNet inference, step = %d: image batch = %d, got %d latents, starting from %d' \ + % (step, batch, latents_incoming, latents_got)) + + image_latents = [] + masks = [] + prompt_embeds = [] + negative_prompt_embeds = [] + pooled_prompt_embeds = [] + negative_pooled_prompt_embeds = [] + if sub_idx: + # AnimateDiff indexes detected + if step == 0: + print('BrushNet inference: AnimateDiff indexes detected and applied') + + batch = len(sub_idx) + + if do_classifier_free_guidance: + for i in sub_idx: + image_latents.append(cl_list[0][i][None,:,:,:]) + masks.append(cl_list[1][i][None,:,:,:]) + prompt_embeds.append(pe) + negative_prompt_embeds.append(npe) + pooled_prompt_embeds.append(ppe) + negative_pooled_prompt_embeds.append(nppe) + for i in sub_idx: + image_latents.append(cl_list[0][i][None,:,:,:]) + masks.append(cl_list[1][i][None,:,:,:]) + else: + for i in sub_idx: + image_latents.append(cl_list[0][i][None,:,:,:]) + masks.append(cl_list[1][i][None,:,:,:]) + prompt_embeds.append(pe) + pooled_prompt_embeds.append(ppe) + else: + # do_classifier_free_guidance = 2 passes, 1st pass is cond, 2nd is uncond + continue_batch = True + for i in range(latents_incoming): + number = latents_got + i + if number < batch: + # 1st pass, cond + image_latents.append(cl_list[0][number][None,:,:,:]) + masks.append(cl_list[1][number][None,:,:,:]) + prompt_embeds.append(pe) + pooled_prompt_embeds.append(ppe) + elif do_classifier_free_guidance and number < batch * 2: + # 2nd pass, uncond + image_latents.append(cl_list[0][number-batch][None,:,:,:]) + masks.append(cl_list[1][number-batch][None,:,:,:]) + negative_prompt_embeds.append(npe) + negative_pooled_prompt_embeds.append(nppe) + else: + # latent batch + image_latents.append(cl_list[0][0][None,:,:,:]) + masks.append(cl_list[1][0][None,:,:,:]) + prompt_embeds.append(pe) + pooled_prompt_embeds.append(ppe) + latents_got = -i + continue_batch = False + + if continue_batch: + # we don't have full batch yet + if do_classifier_free_guidance: + if number < batch * 2 - 1: + bo['latent_id'] = number + 1 + else: + bo['latent_id'] = 0 + else: + if number < batch - 1: + bo['latent_id'] = number + 1 + else: + bo['latent_id'] = 0 + else: + bo['latent_id'] = 0 + + cl = [] + for il, m in zip(image_latents, masks): + cl.append(torch.concat([il, m], dim=1)) + cl2apply = torch.concat(cl, dim=0) + + conditioning_latents = cl2apply.to(torch_dtype).to(brushnet.device) + + prompt_embeds.extend(negative_prompt_embeds) + prompt_embeds = torch.concat(prompt_embeds, dim=0).to(torch_dtype).to(brushnet.device) + + if ppe is not None: + added_cond_kwargs = {} + added_cond_kwargs['time_ids'] = torch.concat([time_ids] * latents_incoming, dim = 0).to(torch_dtype).to(brushnet.device) + + pooled_prompt_embeds.extend(negative_pooled_prompt_embeds) + pooled_prompt_embeds = torch.concat(pooled_prompt_embeds, dim=0).to(torch_dtype).to(brushnet.device) + added_cond_kwargs['text_embeds'] = pooled_prompt_embeds + else: + added_cond_kwargs = None + + if x.shape[2] != conditioning_latents.shape[2] or x.shape[3] != conditioning_latents.shape[3]: + if step == 0: + print('BrushNet inference: image', conditioning_latents.shape, 'and latent', x.shape, 'have different size, resizing image') + conditioning_latents = torch.nn.functional.interpolate( + conditioning_latents, size=( + x.shape[2], + x.shape[3], + ), mode='bicubic', + ).to(torch_dtype).to(brushnet.device) + + if step == 0: + print('BrushNet inference: sample', x.shape, ', CL', conditioning_latents.shape, 'dtype', torch_dtype) + + if debug: print('BrushNet: step =', step) + + if step < control_guidance_start or step > control_guidance_end: + cond_scale = 0.0 + else: + cond_scale = brushnet_conditioning_scale + + return brushnet(x, + encoder_hidden_states=prompt_embeds, + brushnet_cond=conditioning_latents, + timestep = timesteps, + conditioning_scale=cond_scale, + guess_mode=False, + added_cond_kwargs=added_cond_kwargs, + return_dict=False, + debug=debug, + ) + +def add_brushnet_patch(model, brushnet, torch_dtype, conditioning_latents, + controls, + prompt_embeds, negative_prompt_embeds, + pooled_prompt_embeds, negative_pooled_prompt_embeds, time_ids, + debug): + + is_SDXL = isinstance(model.model.model_config, comfy.supported_models.SDXL) + + if is_SDXL: + input_blocks = [[0, comfy.ops.disable_weight_init.Conv2d], + [1, comfy.ldm.modules.diffusionmodules.openaimodel.ResBlock], + [2, comfy.ldm.modules.diffusionmodules.openaimodel.ResBlock], + [3, comfy.ldm.modules.diffusionmodules.openaimodel.Downsample], + [4, comfy.ldm.modules.attention.SpatialTransformer], + [5, comfy.ldm.modules.attention.SpatialTransformer], + [6, comfy.ldm.modules.diffusionmodules.openaimodel.Downsample], + [7, comfy.ldm.modules.attention.SpatialTransformer], + [8, comfy.ldm.modules.attention.SpatialTransformer]] + middle_block = [0, comfy.ldm.modules.diffusionmodules.openaimodel.ResBlock] + output_blocks = [[0, comfy.ldm.modules.attention.SpatialTransformer], + [1, comfy.ldm.modules.attention.SpatialTransformer], + [2, comfy.ldm.modules.attention.SpatialTransformer], + [2, comfy.ldm.modules.diffusionmodules.openaimodel.Upsample], + [3, comfy.ldm.modules.attention.SpatialTransformer], + [4, comfy.ldm.modules.attention.SpatialTransformer], + [5, comfy.ldm.modules.attention.SpatialTransformer], + [5, comfy.ldm.modules.diffusionmodules.openaimodel.Upsample], + [6, comfy.ldm.modules.diffusionmodules.openaimodel.ResBlock], + [7, comfy.ldm.modules.diffusionmodules.openaimodel.ResBlock], + [8, comfy.ldm.modules.diffusionmodules.openaimodel.ResBlock]] + else: + input_blocks = [[0, comfy.ops.disable_weight_init.Conv2d], + [1, comfy.ldm.modules.attention.SpatialTransformer], + [2, comfy.ldm.modules.attention.SpatialTransformer], + [3, comfy.ldm.modules.diffusionmodules.openaimodel.Downsample], + [4, comfy.ldm.modules.attention.SpatialTransformer], + [5, comfy.ldm.modules.attention.SpatialTransformer], + [6, comfy.ldm.modules.diffusionmodules.openaimodel.Downsample], + [7, comfy.ldm.modules.attention.SpatialTransformer], + [8, comfy.ldm.modules.attention.SpatialTransformer], + [9, comfy.ldm.modules.diffusionmodules.openaimodel.Downsample], + [10, comfy.ldm.modules.diffusionmodules.openaimodel.ResBlock], + [11, comfy.ldm.modules.diffusionmodules.openaimodel.ResBlock]] + middle_block = [0, comfy.ldm.modules.diffusionmodules.openaimodel.ResBlock] + output_blocks = [[0, comfy.ldm.modules.diffusionmodules.openaimodel.ResBlock], + [1, comfy.ldm.modules.diffusionmodules.openaimodel.ResBlock], + [2, comfy.ldm.modules.diffusionmodules.openaimodel.ResBlock], + [2, comfy.ldm.modules.diffusionmodules.openaimodel.Upsample], + [3, comfy.ldm.modules.attention.SpatialTransformer], + [4, comfy.ldm.modules.attention.SpatialTransformer], + [5, comfy.ldm.modules.attention.SpatialTransformer], + [5, comfy.ldm.modules.diffusionmodules.openaimodel.Upsample], + [6, comfy.ldm.modules.attention.SpatialTransformer], + [7, comfy.ldm.modules.attention.SpatialTransformer], + [8, comfy.ldm.modules.attention.SpatialTransformer], + [8, comfy.ldm.modules.diffusionmodules.openaimodel.Upsample], + [9, comfy.ldm.modules.attention.SpatialTransformer], + [10, comfy.ldm.modules.attention.SpatialTransformer], + [11, comfy.ldm.modules.attention.SpatialTransformer]] + + def last_layer_index(block, tp): + layer_list = [] + for layer in block: + layer_list.append(type(layer)) + layer_list.reverse() + if tp not in layer_list: + return -1, layer_list.reverse() + return len(layer_list) - 1 - layer_list.index(tp), layer_list + + def brushnet_forward(model, x, timesteps, transformer_options, control): + if 'brushnet' not in transformer_options['model_patch']: + input_samples = [] + mid_sample = 0 + output_samples = [] + else: + # brushnet inference + input_samples, mid_sample, output_samples = brushnet_inference(x, timesteps, transformer_options, debug) + + # give additional samples to blocks + for i, tp in input_blocks: + idx, layer_list = last_layer_index(model.input_blocks[i], tp) + if idx < 0: + print("BrushNet can't find", tp, "layer in", i, "input block:", layer_list) + continue + model.input_blocks[i][idx].add_sample_after = input_samples.pop(0) if input_samples else 0 + + idx, layer_list = last_layer_index(model.middle_block, middle_block[1]) + if idx < 0: + print("BrushNet can't find", middle_block[1], "layer in middle block", layer_list) + model.middle_block[idx].add_sample_after = mid_sample + + for i, tp in output_blocks: + idx, layer_list = last_layer_index(model.output_blocks[i], tp) + if idx < 0: + print("BrushNet can't find", tp, "layer in", i, "outnput block:", layer_list) + continue + model.output_blocks[i][idx].add_sample_after = output_samples.pop(0) if output_samples else 0 + + patch_model_function_wrapper(model, brushnet_forward) + + to = add_model_patch_option(model) + mp = to['model_patch'] + if 'brushnet' not in mp: + mp['brushnet'] = {} + bo = mp['brushnet'] + + bo['model'] = brushnet + bo['dtype'] = torch_dtype + bo['latents'] = conditioning_latents + bo['controls'] = controls + bo['prompt_embeds'] = prompt_embeds + bo['negative_prompt_embeds'] = negative_prompt_embeds + bo['add_embeds'] = (pooled_prompt_embeds, negative_pooled_prompt_embeds, time_ids) + bo['latent_id'] = 0 + + # patch layers `forward` so we can apply brushnet + def forward_patched_by_brushnet(self, x, *args, **kwargs): + h = self.original_forward(x, *args, **kwargs) + if hasattr(self, 'add_sample_after') and type(self): + to_add = self.add_sample_after + if torch.is_tensor(to_add): + # interpolate due to RAUNet + if h.shape[2] != to_add.shape[2] or h.shape[3] != to_add.shape[3]: + to_add = torch.nn.functional.interpolate(to_add, size=(h.shape[2], h.shape[3]), mode='bicubic') + h += to_add.to(h.dtype).to(h.device) + else: + h += self.add_sample_after + self.add_sample_after = 0 + return h + + for i, block in enumerate(model.model.diffusion_model.input_blocks): + for j, layer in enumerate(block): + if not hasattr(layer, 'original_forward'): + layer.original_forward = layer.forward + layer.forward = types.MethodType(forward_patched_by_brushnet, layer) + layer.add_sample_after = 0 + + for j, layer in enumerate(model.model.diffusion_model.middle_block): + if not hasattr(layer, 'original_forward'): + layer.original_forward = layer.forward + layer.forward = types.MethodType(forward_patched_by_brushnet, layer) + layer.add_sample_after = 0 + + for i, block in enumerate(model.model.diffusion_model.output_blocks): + for j, layer in enumerate(block): + if not hasattr(layer, 'original_forward'): + layer.original_forward = layer.forward + layer.forward = types.MethodType(forward_patched_by_brushnet, layer) + layer.add_sample_after = 0 \ No newline at end of file diff --git a/ComfyUI-Easy-Use/py/brushnet/config/brushnet.json b/ComfyUI-Easy-Use/py/brushnet/config/brushnet.json new file mode 100644 index 0000000000000000000000000000000000000000..c22ae1573b55249f221f0f426cb1bbcada747254 --- /dev/null +++ b/ComfyUI-Easy-Use/py/brushnet/config/brushnet.json @@ -0,0 +1,58 @@ +{ + "_class_name": "BrushNetModel", + "_diffusers_version": "0.27.0.dev0", + "_name_or_path": "runs/logs/brushnet_randommask/checkpoint-100000", + "act_fn": "silu", + "addition_embed_type": null, + "addition_embed_type_num_heads": 64, + "addition_time_embed_dim": null, + "attention_head_dim": 8, + "block_out_channels": [ + 320, + 640, + 1280, + 1280 + ], + "brushnet_conditioning_channel_order": "rgb", + "class_embed_type": null, + "conditioning_channels": 5, + "conditioning_embedding_out_channels": [ + 16, + 32, + 96, + 256 + ], + "cross_attention_dim": 768, + "down_block_types": [ + "DownBlock2D", + "DownBlock2D", + "DownBlock2D", + "DownBlock2D" + ], + "downsample_padding": 1, + "encoder_hid_dim": null, + "encoder_hid_dim_type": null, + "flip_sin_to_cos": true, + "freq_shift": 0, + "global_pool_conditions": false, + "in_channels": 4, + "layers_per_block": 2, + "mid_block_scale_factor": 1, + "mid_block_type": "MidBlock2D", + "norm_eps": 1e-05, + "norm_num_groups": 32, + "num_attention_heads": null, + "num_class_embeds": null, + "only_cross_attention": false, + "projection_class_embeddings_input_dim": null, + "resnet_time_scale_shift": "default", + "transformer_layers_per_block": 1, + "up_block_types": [ + "UpBlock2D", + "UpBlock2D", + "UpBlock2D", + "UpBlock2D" + ], + "upcast_attention": false, + "use_linear_projection": false +} \ No newline at end of file diff --git a/ComfyUI-Easy-Use/py/brushnet/config/brushnet_xl.json b/ComfyUI-Easy-Use/py/brushnet/config/brushnet_xl.json new file mode 100644 index 0000000000000000000000000000000000000000..1e0ebb7a3928c8ae586b330f4e38f2fe0e8d8dd7 --- /dev/null +++ b/ComfyUI-Easy-Use/py/brushnet/config/brushnet_xl.json @@ -0,0 +1,63 @@ +{ + "_class_name": "BrushNetModel", + "_diffusers_version": "0.27.0.dev0", + "_name_or_path": "runs/logs/brushnetsdxl_randommask/checkpoint-80000", + "act_fn": "silu", + "addition_embed_type": "text_time", + "addition_embed_type_num_heads": 64, + "addition_time_embed_dim": 256, + "attention_head_dim": [ + 5, + 10, + 20 + ], + "block_out_channels": [ + 320, + 640, + 1280 + ], + "brushnet_conditioning_channel_order": "rgb", + "class_embed_type": null, + "conditioning_channels": 5, + "conditioning_embedding_out_channels": [ + 16, + 32, + 96, + 256 + ], + "cross_attention_dim": 2048, + "down_block_types": [ + "DownBlock2D", + "DownBlock2D", + "DownBlock2D" + ], + "downsample_padding": 1, + "encoder_hid_dim": null, + "encoder_hid_dim_type": null, + "flip_sin_to_cos": true, + "freq_shift": 0, + "global_pool_conditions": false, + "in_channels": 4, + "layers_per_block": 2, + "mid_block_scale_factor": 1, + "mid_block_type": "MidBlock2D", + "norm_eps": 1e-05, + "norm_num_groups": 32, + "num_attention_heads": null, + "num_class_embeds": null, + "only_cross_attention": false, + "projection_class_embeddings_input_dim": 2816, + "resnet_time_scale_shift": "default", + "transformer_layers_per_block": [ + 1, + 2, + 10 + ], + "up_block_types": [ + "UpBlock2D", + "UpBlock2D", + "UpBlock2D" + ], + "upcast_attention": null, + "use_linear_projection": true +} \ No newline at end of file diff --git a/ComfyUI-Easy-Use/py/brushnet/config/powerpaint.json b/ComfyUI-Easy-Use/py/brushnet/config/powerpaint.json new file mode 100644 index 0000000000000000000000000000000000000000..1ea970014ae98e5dda242202bec89ee258e1e554 --- /dev/null +++ b/ComfyUI-Easy-Use/py/brushnet/config/powerpaint.json @@ -0,0 +1,57 @@ +{ + "_class_name": "BrushNetModel", + "_diffusers_version": "0.27.2", + "act_fn": "silu", + "addition_embed_type": null, + "addition_embed_type_num_heads": 64, + "addition_time_embed_dim": null, + "attention_head_dim": 8, + "block_out_channels": [ + 320, + 640, + 1280, + 1280 + ], + "brushnet_conditioning_channel_order": "rgb", + "class_embed_type": null, + "conditioning_channels": 5, + "conditioning_embedding_out_channels": [ + 16, + 32, + 96, + 256 + ], + "cross_attention_dim": 768, + "down_block_types": [ + "CrossAttnDownBlock2D", + "CrossAttnDownBlock2D", + "CrossAttnDownBlock2D", + "DownBlock2D" + ], + "downsample_padding": 1, + "encoder_hid_dim": null, + "encoder_hid_dim_type": null, + "flip_sin_to_cos": true, + "freq_shift": 0, + "global_pool_conditions": false, + "in_channels": 4, + "layers_per_block": 2, + "mid_block_scale_factor": 1, + "mid_block_type": "UNetMidBlock2DCrossAttn", + "norm_eps": 1e-05, + "norm_num_groups": 32, + "num_attention_heads": null, + "num_class_embeds": null, + "only_cross_attention": false, + "projection_class_embeddings_input_dim": null, + "resnet_time_scale_shift": "default", + "transformer_layers_per_block": 1, + "up_block_types": [ + "UpBlock2D", + "CrossAttnUpBlock2D", + "CrossAttnUpBlock2D", + "CrossAttnUpBlock2D" + ], + "upcast_attention": false, + "use_linear_projection": false +} \ No newline at end of file diff --git a/ComfyUI-Easy-Use/py/brushnet/model.py b/ComfyUI-Easy-Use/py/brushnet/model.py new file mode 100644 index 0000000000000000000000000000000000000000..1ca3ec7a3d279137bb32d5786e2e12111c437c0f --- /dev/null +++ b/ComfyUI-Easy-Use/py/brushnet/model.py @@ -0,0 +1,1688 @@ +from dataclasses import dataclass +from typing import Any, Dict, List, Optional, Tuple, Union + +import torch +from torch import nn + +from ..libs.utils import install_package +try: + install_package("diffusers", "0.27.2", True, "0.25.0") + + from diffusers.configuration_utils import ConfigMixin, register_to_config + from diffusers.utils import BaseOutput, logging + from diffusers.models.attention_processor import ( + ADDED_KV_ATTENTION_PROCESSORS, + CROSS_ATTENTION_PROCESSORS, + AttentionProcessor, + AttnAddedKVProcessor, + AttnProcessor, + ) + from diffusers.models.embeddings import TextImageProjection, TextImageTimeEmbedding, TextTimeEmbedding, TimestepEmbedding, Timesteps + from diffusers.models.modeling_utils import ModelMixin + from diffusers.models.resnet import ResnetBlock2D + from diffusers.models.unets.unet_2d_condition import UNet2DConditionModel + + from diffusers.models.transformers.dual_transformer_2d import DualTransformer2DModel + from diffusers.models.transformers.transformer_2d import Transformer2DModel + + from .unet_2d_blocks import ( + CrossAttnDownBlock2D, + DownBlock2D, + get_down_block, + get_mid_block, + get_up_block, + ) + + from .unet_2d_condition import UNet2DConditionModel + + logger = logging.get_logger(__name__) + + def zero_module(module): + for p in module.parameters(): + nn.init.zeros_(p) + return module + + @dataclass + class BrushNetOutput(BaseOutput): + + up_block_res_samples: Tuple[torch.Tensor] + down_block_res_samples: Tuple[torch.Tensor] + mid_block_res_sample: torch.Tensor + + # BrushNetModel + class BrushNetModel(ModelMixin, ConfigMixin): + """A BrushNet model.""" + _supports_gradient_checkpointing = True + + @register_to_config + def __init__( + self, + in_channels: int = 4, + conditioning_channels: int = 5, + flip_sin_to_cos: bool = True, + freq_shift: int = 0, + down_block_types: Tuple[str, ...] = ( + "DownBlock2D", + "DownBlock2D", + "DownBlock2D", + "DownBlock2D", + ), + mid_block_type: Optional[str] = "UNetMidBlock2D", + up_block_types: Tuple[str, ...] = ( + "UpBlock2D", + "UpBlock2D", + "UpBlock2D", + "UpBlock2D", + ), + only_cross_attention: Union[bool, Tuple[bool]] = False, + block_out_channels: Tuple[int, ...] = (320, 640, 1280, 1280), + layers_per_block: int = 2, + downsample_padding: int = 1, + mid_block_scale_factor: float = 1, + act_fn: str = "silu", + norm_num_groups: Optional[int] = 32, + norm_eps: float = 1e-5, + cross_attention_dim: int = 1280, + transformer_layers_per_block: Union[int, Tuple[int, ...]] = 1, + encoder_hid_dim: Optional[int] = None, + encoder_hid_dim_type: Optional[str] = None, + attention_head_dim: Union[int, Tuple[int, ...]] = 8, + num_attention_heads: Optional[Union[int, Tuple[int, ...]]] = None, + use_linear_projection: bool = False, + class_embed_type: Optional[str] = None, + addition_embed_type: Optional[str] = None, + addition_time_embed_dim: Optional[int] = None, + num_class_embeds: Optional[int] = None, + upcast_attention: bool = False, + resnet_time_scale_shift: str = "default", + projection_class_embeddings_input_dim: Optional[int] = None, + brushnet_conditioning_channel_order: str = "rgb", + conditioning_embedding_out_channels: Optional[Tuple[int, ...]] = (16, 32, 96, 256), + global_pool_conditions: bool = False, + addition_embed_type_num_heads: int = 64, + ): + super().__init__() + + # If `num_attention_heads` is not defined (which is the case for most models) + # it will default to `attention_head_dim`. This looks weird upon first reading it and it is. + # The reason for this behavior is to correct for incorrectly named variables that were introduced + # when this library was created. The incorrect naming was only discovered much later in https://github.com/huggingface/diffusers/issues/2011#issuecomment-1547958131 + # Changing `attention_head_dim` to `num_attention_heads` for 40,000+ configurations is too backwards breaking + # which is why we correct for the naming here. + num_attention_heads = num_attention_heads or attention_head_dim + + # Check inputs + if len(down_block_types) != len(up_block_types): + raise ValueError( + f"Must provide the same number of `down_block_types` as `up_block_types`. `down_block_types`: {down_block_types}. `up_block_types`: {up_block_types}." + ) + + if len(block_out_channels) != len(down_block_types): + raise ValueError( + f"Must provide the same number of `block_out_channels` as `down_block_types`. `block_out_channels`: {block_out_channels}. `down_block_types`: {down_block_types}." + ) + + if not isinstance(only_cross_attention, bool) and len(only_cross_attention) != len(down_block_types): + raise ValueError( + f"Must provide the same number of `only_cross_attention` as `down_block_types`. `only_cross_attention`: {only_cross_attention}. `down_block_types`: {down_block_types}." + ) + + if not isinstance(num_attention_heads, int) and len(num_attention_heads) != len(down_block_types): + raise ValueError( + f"Must provide the same number of `num_attention_heads` as `down_block_types`. `num_attention_heads`: {num_attention_heads}. `down_block_types`: {down_block_types}." + ) + + if isinstance(transformer_layers_per_block, int): + transformer_layers_per_block = [transformer_layers_per_block] * len(down_block_types) + + # input + conv_in_kernel = 3 + conv_in_padding = (conv_in_kernel - 1) // 2 + self.conv_in_condition = nn.Conv2d( + in_channels + conditioning_channels, block_out_channels[0], kernel_size=conv_in_kernel, + padding=conv_in_padding + ) + + # time + time_embed_dim = block_out_channels[0] * 4 + self.time_proj = Timesteps(block_out_channels[0], flip_sin_to_cos, freq_shift) + timestep_input_dim = block_out_channels[0] + self.time_embedding = TimestepEmbedding( + timestep_input_dim, + time_embed_dim, + act_fn=act_fn, + ) + + if encoder_hid_dim_type is None and encoder_hid_dim is not None: + encoder_hid_dim_type = "text_proj" + self.register_to_config(encoder_hid_dim_type=encoder_hid_dim_type) + print("encoder_hid_dim_type defaults to 'text_proj' as `encoder_hid_dim` is defined.") + + if encoder_hid_dim is None and encoder_hid_dim_type is not None: + raise ValueError( + f"`encoder_hid_dim` has to be defined when `encoder_hid_dim_type` is set to {encoder_hid_dim_type}." + ) + + if encoder_hid_dim_type == "text_proj": + self.encoder_hid_proj = nn.Linear(encoder_hid_dim, cross_attention_dim) + elif encoder_hid_dim_type == "text_image_proj": + # image_embed_dim DOESN'T have to be `cross_attention_dim`. To not clutter the __init__ too much + # they are set to `cross_attention_dim` here as this is exactly the required dimension for the currently only use + # case when `addition_embed_type == "text_image_proj"` (Kadinsky 2.1)` + self.encoder_hid_proj = TextImageProjection( + text_embed_dim=encoder_hid_dim, + image_embed_dim=cross_attention_dim, + cross_attention_dim=cross_attention_dim, + ) + + elif encoder_hid_dim_type is not None: + raise ValueError( + f"encoder_hid_dim_type: {encoder_hid_dim_type} must be None, 'text_proj' or 'text_image_proj'." + ) + else: + self.encoder_hid_proj = None + + # class embedding + if class_embed_type is None and num_class_embeds is not None: + self.class_embedding = nn.Embedding(num_class_embeds, time_embed_dim) + elif class_embed_type == "timestep": + self.class_embedding = TimestepEmbedding(timestep_input_dim, time_embed_dim) + elif class_embed_type == "identity": + self.class_embedding = nn.Identity(time_embed_dim, time_embed_dim) + elif class_embed_type == "projection": + if projection_class_embeddings_input_dim is None: + raise ValueError( + "`class_embed_type`: 'projection' requires `projection_class_embeddings_input_dim` be set" + ) + # The projection `class_embed_type` is the same as the timestep `class_embed_type` except + # 1. the `class_labels` inputs are not first converted to sinusoidal embeddings + # 2. it projects from an arbitrary input dimension. + # + # Note that `TimestepEmbedding` is quite general, being mainly linear layers and activations. + # When used for embedding actual timesteps, the timesteps are first converted to sinusoidal embeddings. + # As a result, `TimestepEmbedding` can be passed arbitrary vectors. + self.class_embedding = TimestepEmbedding(projection_class_embeddings_input_dim, time_embed_dim) + else: + self.class_embedding = None + + if addition_embed_type == "text": + if encoder_hid_dim is not None: + text_time_embedding_from_dim = encoder_hid_dim + else: + text_time_embedding_from_dim = cross_attention_dim + + self.add_embedding = TextTimeEmbedding( + text_time_embedding_from_dim, time_embed_dim, num_heads=addition_embed_type_num_heads + ) + elif addition_embed_type == "text_image": + # text_embed_dim and image_embed_dim DON'T have to be `cross_attention_dim`. To not clutter the __init__ too much + # they are set to `cross_attention_dim` here as this is exactly the required dimension for the currently only use + # case when `addition_embed_type == "text_image"` (Kadinsky 2.1)` + self.add_embedding = TextImageTimeEmbedding( + text_embed_dim=cross_attention_dim, image_embed_dim=cross_attention_dim, + time_embed_dim=time_embed_dim + ) + elif addition_embed_type == "text_time": + self.add_time_proj = Timesteps(addition_time_embed_dim, flip_sin_to_cos, freq_shift) + self.add_embedding = TimestepEmbedding(projection_class_embeddings_input_dim, time_embed_dim) + + elif addition_embed_type is not None: + raise ValueError(f"addition_embed_type: {addition_embed_type} must be None, 'text' or 'text_image'.") + + self.down_blocks = nn.ModuleList([]) + self.brushnet_down_blocks = nn.ModuleList([]) + + if isinstance(only_cross_attention, bool): + only_cross_attention = [only_cross_attention] * len(down_block_types) + + if isinstance(attention_head_dim, int): + attention_head_dim = (attention_head_dim,) * len(down_block_types) + + if isinstance(num_attention_heads, int): + num_attention_heads = (num_attention_heads,) * len(down_block_types) + + # down + output_channel = block_out_channels[0] + + brushnet_block = nn.Conv2d(output_channel, output_channel, kernel_size=1) + brushnet_block = zero_module(brushnet_block) + self.brushnet_down_blocks.append(brushnet_block) + + for i, down_block_type in enumerate(down_block_types): + input_channel = output_channel + output_channel = block_out_channels[i] + is_final_block = i == len(block_out_channels) - 1 + + down_block = get_down_block( + down_block_type, + num_layers=layers_per_block, + transformer_layers_per_block=transformer_layers_per_block[i], + in_channels=input_channel, + out_channels=output_channel, + temb_channels=time_embed_dim, + add_downsample=not is_final_block, + resnet_eps=norm_eps, + resnet_act_fn=act_fn, + resnet_groups=norm_num_groups, + cross_attention_dim=cross_attention_dim, + num_attention_heads=num_attention_heads[i], + attention_head_dim=attention_head_dim[i] if attention_head_dim[i] is not None else output_channel, + downsample_padding=downsample_padding, + use_linear_projection=use_linear_projection, + only_cross_attention=only_cross_attention[i], + upcast_attention=upcast_attention, + resnet_time_scale_shift=resnet_time_scale_shift, + ) + self.down_blocks.append(down_block) + + for _ in range(layers_per_block): + brushnet_block = nn.Conv2d(output_channel, output_channel, kernel_size=1) + brushnet_block = zero_module(brushnet_block) + self.brushnet_down_blocks.append(brushnet_block) + + if not is_final_block: + brushnet_block = nn.Conv2d(output_channel, output_channel, kernel_size=1) + brushnet_block = zero_module(brushnet_block) + self.brushnet_down_blocks.append(brushnet_block) + + # mid + mid_block_channel = block_out_channels[-1] + + brushnet_block = nn.Conv2d(mid_block_channel, mid_block_channel, kernel_size=1) + brushnet_block = zero_module(brushnet_block) + self.brushnet_mid_block = brushnet_block + + self.mid_block = get_mid_block( + mid_block_type, + transformer_layers_per_block=transformer_layers_per_block[-1], + in_channels=mid_block_channel, + temb_channels=time_embed_dim, + resnet_eps=norm_eps, + resnet_act_fn=act_fn, + output_scale_factor=mid_block_scale_factor, + resnet_time_scale_shift=resnet_time_scale_shift, + cross_attention_dim=cross_attention_dim, + num_attention_heads=num_attention_heads[-1], + resnet_groups=norm_num_groups, + use_linear_projection=use_linear_projection, + upcast_attention=upcast_attention, + ) + + # count how many layers upsample the images + self.num_upsamplers = 0 + + # up + reversed_block_out_channels = list(reversed(block_out_channels)) + reversed_num_attention_heads = list(reversed(num_attention_heads)) + reversed_transformer_layers_per_block = (list(reversed(transformer_layers_per_block))) + only_cross_attention = list(reversed(only_cross_attention)) + + output_channel = reversed_block_out_channels[0] + + self.up_blocks = nn.ModuleList([]) + self.brushnet_up_blocks = nn.ModuleList([]) + + for i, up_block_type in enumerate(up_block_types): + is_final_block = i == len(block_out_channels) - 1 + + prev_output_channel = output_channel + output_channel = reversed_block_out_channels[i] + input_channel = reversed_block_out_channels[min(i + 1, len(block_out_channels) - 1)] + + # add upsample block for all BUT final layer + if not is_final_block: + add_upsample = True + self.num_upsamplers += 1 + else: + add_upsample = False + + up_block = get_up_block( + up_block_type, + num_layers=layers_per_block + 1, + transformer_layers_per_block=reversed_transformer_layers_per_block[i], + in_channels=input_channel, + out_channels=output_channel, + prev_output_channel=prev_output_channel, + temb_channels=time_embed_dim, + add_upsample=add_upsample, + resnet_eps=norm_eps, + resnet_act_fn=act_fn, + resolution_idx=i, + resnet_groups=norm_num_groups, + cross_attention_dim=cross_attention_dim, + num_attention_heads=reversed_num_attention_heads[i], + use_linear_projection=use_linear_projection, + only_cross_attention=only_cross_attention[i], + upcast_attention=upcast_attention, + resnet_time_scale_shift=resnet_time_scale_shift, + attention_head_dim=attention_head_dim[i] if attention_head_dim[i] is not None else output_channel, + ) + self.up_blocks.append(up_block) + prev_output_channel = output_channel + + for _ in range(layers_per_block + 1): + brushnet_block = nn.Conv2d(output_channel, output_channel, kernel_size=1) + brushnet_block = zero_module(brushnet_block) + self.brushnet_up_blocks.append(brushnet_block) + + if not is_final_block: + brushnet_block = nn.Conv2d(output_channel, output_channel, kernel_size=1) + brushnet_block = zero_module(brushnet_block) + self.brushnet_up_blocks.append(brushnet_block) + + @classmethod + def from_unet( + cls, + unet: UNet2DConditionModel, + brushnet_conditioning_channel_order: str = "rgb", + conditioning_embedding_out_channels: Optional[Tuple[int, ...]] = (16, 32, 96, 256), + load_weights_from_unet: bool = True, + conditioning_channels: int = 5, + ): + r""" + Instantiate a [`BrushNetModel`] from [`UNet2DConditionModel`]. + + Parameters: + unet (`UNet2DConditionModel`): + The UNet model weights to copy to the [`BrushNetModel`]. All configuration options are also copied + where applicable. + """ + transformer_layers_per_block = ( + unet.config.transformer_layers_per_block if "transformer_layers_per_block" in unet.config else 1 + ) + encoder_hid_dim = unet.config.encoder_hid_dim if "encoder_hid_dim" in unet.config else None + encoder_hid_dim_type = unet.config.encoder_hid_dim_type if "encoder_hid_dim_type" in unet.config else None + addition_embed_type = unet.config.addition_embed_type if "addition_embed_type" in unet.config else None + addition_time_embed_dim = ( + unet.config.addition_time_embed_dim if "addition_time_embed_dim" in unet.config else None + ) + + down_block_types = ["DownBlock2D" for block_name in unet.config.down_block_types] + mid_block_type = "MidBlock2D" + up_block_types = ["UpBlock2D" for block_name in unet.config.down_block_types] + + brushnet = cls( + in_channels=unet.config.in_channels, + conditioning_channels=conditioning_channels, + flip_sin_to_cos=unet.config.flip_sin_to_cos, + freq_shift=unet.config.freq_shift, + down_block_types=down_block_types, + mid_block_type=mid_block_type, + up_block_types=up_block_types, + only_cross_attention=unet.config.only_cross_attention, + block_out_channels=unet.config.block_out_channels, + layers_per_block=unet.config.layers_per_block, + downsample_padding=unet.config.downsample_padding, + mid_block_scale_factor=unet.config.mid_block_scale_factor, + act_fn=unet.config.act_fn, + norm_num_groups=unet.config.norm_num_groups, + norm_eps=unet.config.norm_eps, + cross_attention_dim=unet.config.cross_attention_dim, + transformer_layers_per_block=transformer_layers_per_block, + encoder_hid_dim=encoder_hid_dim, + encoder_hid_dim_type=encoder_hid_dim_type, + attention_head_dim=unet.config.attention_head_dim, + num_attention_heads=unet.config.num_attention_heads, + use_linear_projection=unet.config.use_linear_projection, + class_embed_type=unet.config.class_embed_type, + addition_embed_type=addition_embed_type, + addition_time_embed_dim=addition_time_embed_dim, + num_class_embeds=unet.config.num_class_embeds, + upcast_attention=unet.config.upcast_attention, + resnet_time_scale_shift=unet.config.resnet_time_scale_shift, + projection_class_embeddings_input_dim=unet.config.projection_class_embeddings_input_dim, + brushnet_conditioning_channel_order=brushnet_conditioning_channel_order, + conditioning_embedding_out_channels=conditioning_embedding_out_channels, + ) + + if load_weights_from_unet: + conv_in_condition_weight = torch.zeros_like(brushnet.conv_in_condition.weight) + conv_in_condition_weight[:, :4, ...] = unet.conv_in.weight + conv_in_condition_weight[:, 4:8, ...] = unet.conv_in.weight + brushnet.conv_in_condition.weight = torch.nn.Parameter(conv_in_condition_weight) + brushnet.conv_in_condition.bias = unet.conv_in.bias + + brushnet.time_proj.load_state_dict(unet.time_proj.state_dict()) + brushnet.time_embedding.load_state_dict(unet.time_embedding.state_dict()) + + if brushnet.class_embedding: + brushnet.class_embedding.load_state_dict(unet.class_embedding.state_dict()) + + brushnet.down_blocks.load_state_dict(unet.down_blocks.state_dict(), strict=False) + brushnet.mid_block.load_state_dict(unet.mid_block.state_dict(), strict=False) + brushnet.up_blocks.load_state_dict(unet.up_blocks.state_dict(), strict=False) + + return brushnet + + @property + # Copied from diffusers.models.unets.unet_2d_condition.UNet2DConditionModel.attn_processors + def attn_processors(self) -> Dict[str, AttentionProcessor]: + r""" + Returns: + `dict` of attention processors: A dictionary containing all attention processors used in the model with + indexed by its weight name. + """ + # set recursively + processors = {} + + def fn_recursive_add_processors(name: str, module: torch.nn.Module, + processors: Dict[str, AttentionProcessor]): + if hasattr(module, "get_processor"): + processors[f"{name}.processor"] = module.get_processor(return_deprecated_lora=True) + + for sub_name, child in module.named_children(): + fn_recursive_add_processors(f"{name}.{sub_name}", child, processors) + + return processors + + for name, module in self.named_children(): + fn_recursive_add_processors(name, module, processors) + + return processors + + # Copied from diffusers.models.unets.unet_2d_condition.UNet2DConditionModel.set_attn_processor + def set_attn_processor(self, processor: Union[AttentionProcessor, Dict[str, AttentionProcessor]]): + r""" + Sets the attention processor to use to compute attention. + + Parameters: + processor (`dict` of `AttentionProcessor` or only `AttentionProcessor`): + The instantiated processor class or a dictionary of processor classes that will be set as the processor + for **all** `Attention` layers. + + If `processor` is a dict, the key needs to define the path to the corresponding cross attention + processor. This is strongly recommended when setting trainable attention processors. + + """ + count = len(self.attn_processors.keys()) + + if isinstance(processor, dict) and len(processor) != count: + raise ValueError( + f"A dict of processors was passed, but the number of processors {len(processor)} does not match the" + f" number of attention layers: {count}. Please make sure to pass {count} processor classes." + ) + + def fn_recursive_attn_processor(name: str, module: torch.nn.Module, processor): + if hasattr(module, "set_processor"): + if not isinstance(processor, dict): + module.set_processor(processor) + else: + module.set_processor(processor.pop(f"{name}.processor")) + + for sub_name, child in module.named_children(): + fn_recursive_attn_processor(f"{name}.{sub_name}", child, processor) + + for name, module in self.named_children(): + fn_recursive_attn_processor(name, module, processor) + + # Copied from diffusers.models.unets.unet_2d_condition.UNet2DConditionModel.set_default_attn_processor + def set_default_attn_processor(self): + """ + Disables custom attention processors and sets the default attention implementation. + """ + if all(proc.__class__ in ADDED_KV_ATTENTION_PROCESSORS for proc in self.attn_processors.values()): + processor = AttnAddedKVProcessor() + elif all(proc.__class__ in CROSS_ATTENTION_PROCESSORS for proc in self.attn_processors.values()): + processor = AttnProcessor() + else: + raise ValueError( + f"Cannot call `set_default_attn_processor` when attention processors are of type {next(iter(self.attn_processors.values()))}" + ) + + self.set_attn_processor(processor) + + # Copied from diffusers.models.unets.unet_2d_condition.UNet2DConditionModel.set_attention_slice + def set_attention_slice(self, slice_size: Union[str, int, List[int]]) -> None: + r""" + Enable sliced attention computation. + + When this option is enabled, the attention module splits the input tensor in slices to compute attention in + several steps. This is useful for saving some memory in exchange for a small decrease in speed. + + Args: + slice_size (`str` or `int` or `list(int)`, *optional*, defaults to `"auto"`): + When `"auto"`, input to the attention heads is halved, so attention is computed in two steps. If + `"max"`, maximum amount of memory is saved by running only one slice at a time. If a number is + provided, uses as many slices as `attention_head_dim // slice_size`. In this case, `attention_head_dim` + must be a multiple of `slice_size`. + """ + sliceable_head_dims = [] + + def fn_recursive_retrieve_sliceable_dims(module: torch.nn.Module): + if hasattr(module, "set_attention_slice"): + sliceable_head_dims.append(module.sliceable_head_dim) + + for child in module.children(): + fn_recursive_retrieve_sliceable_dims(child) + + # retrieve number of attention layers + for module in self.children(): + fn_recursive_retrieve_sliceable_dims(module) + + num_sliceable_layers = len(sliceable_head_dims) + + if slice_size == "auto": + # half the attention head size is usually a good trade-off between + # speed and memory + slice_size = [dim // 2 for dim in sliceable_head_dims] + elif slice_size == "max": + # make smallest slice possible + slice_size = num_sliceable_layers * [1] + + slice_size = num_sliceable_layers * [slice_size] if not isinstance(slice_size, list) else slice_size + + if len(slice_size) != len(sliceable_head_dims): + raise ValueError( + f"You have provided {len(slice_size)}, but {self.config} has {len(sliceable_head_dims)} different" + f" attention layers. Make sure to match `len(slice_size)` to be {len(sliceable_head_dims)}." + ) + + for i in range(len(slice_size)): + size = slice_size[i] + dim = sliceable_head_dims[i] + if size is not None and size > dim: + raise ValueError(f"size {size} has to be smaller or equal to {dim}.") + + # Recursively walk through all the children. + # Any children which exposes the set_attention_slice method + # gets the message + def fn_recursive_set_attention_slice(module: torch.nn.Module, slice_size: List[int]): + if hasattr(module, "set_attention_slice"): + module.set_attention_slice(slice_size.pop()) + + for child in module.children(): + fn_recursive_set_attention_slice(child, slice_size) + + reversed_slice_size = list(reversed(slice_size)) + for module in self.children(): + fn_recursive_set_attention_slice(module, reversed_slice_size) + + def _set_gradient_checkpointing(self, module, value: bool = False) -> None: + if isinstance(module, (CrossAttnDownBlock2D, DownBlock2D)): + module.gradient_checkpointing = value + + def forward( + self, + sample: torch.FloatTensor, + encoder_hidden_states: torch.Tensor, + brushnet_cond: torch.FloatTensor, + timestep=None, + time_emb=None, + conditioning_scale: float = 1.0, + class_labels: Optional[torch.Tensor] = None, + timestep_cond: Optional[torch.Tensor] = None, + attention_mask: Optional[torch.Tensor] = None, + added_cond_kwargs: Optional[Dict[str, torch.Tensor]] = None, + cross_attention_kwargs: Optional[Dict[str, Any]] = None, + guess_mode: bool = False, + return_dict: bool = True, + debug=False, + ) -> Union[BrushNetOutput, Tuple[Tuple[torch.FloatTensor, ...], torch.FloatTensor]]: + + # check channel order + channel_order = self.config.brushnet_conditioning_channel_order + + if channel_order == "rgb": + # in rgb order by default + ... + elif channel_order == "bgr": + brushnet_cond = torch.flip(brushnet_cond, dims=[1]) + else: + raise ValueError(f"unknown `brushnet_conditioning_channel_order`: {channel_order}") + + # prepare attention_mask + if attention_mask is not None: + attention_mask = (1 - attention_mask.to(sample.dtype)) * -10000.0 + attention_mask = attention_mask.unsqueeze(1) + + if timestep is None and time_emb is None: + raise ValueError(f"`timestep` and `emb` are both None") + + # print("BN: sample.device", sample.device) + # print("BN: TE.device", self.time_embedding.linear_1.weight.device) + + if timestep is not None: + # 1. time + timesteps = timestep + if not torch.is_tensor(timesteps): + # TODO: this requires sync between CPU and GPU. So try to pass timesteps as tensors if you can + # This would be a good case for the `match` statement (Python 3.10+) + is_mps = sample.device.type == "mps" + if isinstance(timestep, float): + dtype = torch.float32 if is_mps else torch.float64 + else: + dtype = torch.int32 if is_mps else torch.int64 + timesteps = torch.tensor([timesteps], dtype=dtype, device=sample.device) + elif len(timesteps.shape) == 0: + timesteps = timesteps[None].to(sample.device) + + # broadcast to batch dimension in a way that's compatible with ONNX/Core ML + timesteps = timesteps.expand(sample.shape[0]) + + t_emb = self.time_proj(timesteps) + + # timesteps does not contain any weights and will always return f32 tensors + # but time_embedding might actually be running in fp16. so we need to cast here. + # there might be better ways to encapsulate this. + t_emb = t_emb.to(dtype=sample.dtype) + + # print("t_emb.device =",t_emb.device) + + emb = self.time_embedding(t_emb, timestep_cond) + aug_emb = None + + # print('emb.shape', emb.shape) + + if self.class_embedding is not None: + if class_labels is None: + raise ValueError("class_labels should be provided when num_class_embeds > 0") + + if self.config.class_embed_type == "timestep": + class_labels = self.time_proj(class_labels) + + class_emb = self.class_embedding(class_labels).to(dtype=self.dtype) + emb = emb + class_emb + + if self.config.addition_embed_type is not None: + if self.config.addition_embed_type == "text": + aug_emb = self.add_embedding(encoder_hidden_states) + + elif self.config.addition_embed_type == "text_time": + if "text_embeds" not in added_cond_kwargs: + raise ValueError( + f"{self.__class__} has the config param `addition_embed_type` set to 'text_time' which requires the keyword argument `text_embeds` to be passed in `added_cond_kwargs`" + ) + text_embeds = added_cond_kwargs.get("text_embeds") + if "time_ids" not in added_cond_kwargs: + raise ValueError( + f"{self.__class__} has the config param `addition_embed_type` set to 'text_time' which requires the keyword argument `time_ids` to be passed in `added_cond_kwargs`" + ) + time_ids = added_cond_kwargs.get("time_ids") + time_embeds = self.add_time_proj(time_ids.flatten()) + time_embeds = time_embeds.reshape((text_embeds.shape[0], -1)) + + add_embeds = torch.concat([text_embeds, time_embeds], dim=-1) + add_embeds = add_embeds.to(emb.dtype) + aug_emb = self.add_embedding(add_embeds) + + # print('text_embeds', text_embeds.shape, 'time_ids', time_ids.shape, 'time_embeds', time_embeds.shape, 'add__embeds', add_embeds.shape, 'aug_emb', aug_emb.shape) + + emb = emb + aug_emb if aug_emb is not None else emb + else: + emb = time_emb + + # 2. pre-process + + brushnet_cond = torch.concat([sample, brushnet_cond], 1) + sample = self.conv_in_condition(brushnet_cond) + + # 3. down + down_block_res_samples = (sample,) + for downsample_block in self.down_blocks: + if hasattr(downsample_block, "has_cross_attention") and downsample_block.has_cross_attention: + sample, res_samples = downsample_block( + hidden_states=sample, + temb=emb, + encoder_hidden_states=encoder_hidden_states, + attention_mask=attention_mask, + cross_attention_kwargs=cross_attention_kwargs, + ) + else: + sample, res_samples = downsample_block(hidden_states=sample, temb=emb) + + down_block_res_samples += res_samples + + # 4. PaintingNet down blocks + brushnet_down_block_res_samples = () + for down_block_res_sample, brushnet_down_block in zip(down_block_res_samples, self.brushnet_down_blocks): + down_block_res_sample = brushnet_down_block(down_block_res_sample) + brushnet_down_block_res_samples = brushnet_down_block_res_samples + (down_block_res_sample,) + + # 5. mid + if self.mid_block is not None: + if hasattr(self.mid_block, "has_cross_attention") and self.mid_block.has_cross_attention: + sample = self.mid_block( + sample, + emb, + encoder_hidden_states=encoder_hidden_states, + attention_mask=attention_mask, + cross_attention_kwargs=cross_attention_kwargs, + ) + else: + sample = self.mid_block(sample, emb) + + # 6. BrushNet mid blocks + brushnet_mid_block_res_sample = self.brushnet_mid_block(sample) + + # 7. up + up_block_res_samples = () + for i, upsample_block in enumerate(self.up_blocks): + is_final_block = i == len(self.up_blocks) - 1 + + res_samples = down_block_res_samples[-len(upsample_block.resnets):] + down_block_res_samples = down_block_res_samples[: -len(upsample_block.resnets)] + + # if we have not reached the final block and need to forward the + # upsample size, we do it here + if not is_final_block: + upsample_size = down_block_res_samples[-1].shape[2:] + + if hasattr(upsample_block, "has_cross_attention") and upsample_block.has_cross_attention: + sample, up_res_samples = upsample_block( + hidden_states=sample, + temb=emb, + res_hidden_states_tuple=res_samples, + encoder_hidden_states=encoder_hidden_states, + cross_attention_kwargs=cross_attention_kwargs, + upsample_size=upsample_size, + attention_mask=attention_mask, + return_res_samples=True + ) + else: + sample, up_res_samples = upsample_block( + hidden_states=sample, + temb=emb, + res_hidden_states_tuple=res_samples, + upsample_size=upsample_size, + return_res_samples=True + ) + + up_block_res_samples += up_res_samples + + # 8. BrushNet up blocks + brushnet_up_block_res_samples = () + for up_block_res_sample, brushnet_up_block in zip(up_block_res_samples, self.brushnet_up_blocks): + up_block_res_sample = brushnet_up_block(up_block_res_sample) + brushnet_up_block_res_samples = brushnet_up_block_res_samples + (up_block_res_sample,) + + # 6. scaling + if guess_mode and not self.config.global_pool_conditions: + scales = torch.logspace(-1, 0, + len(brushnet_down_block_res_samples) + 1 + len(brushnet_up_block_res_samples), + device=sample.device) # 0.1 to 1.0 + scales = scales * conditioning_scale + + brushnet_down_block_res_samples = [sample * scale for sample, scale in + zip(brushnet_down_block_res_samples, + scales[:len(brushnet_down_block_res_samples)])] + brushnet_mid_block_res_sample = brushnet_mid_block_res_sample * scales[ + len(brushnet_down_block_res_samples)] + brushnet_up_block_res_samples = [sample * scale for sample, scale in zip(brushnet_up_block_res_samples, + scales[ + len(brushnet_down_block_res_samples) + 1:])] + else: + brushnet_down_block_res_samples = [sample * conditioning_scale for sample in + brushnet_down_block_res_samples] + brushnet_mid_block_res_sample = brushnet_mid_block_res_sample * conditioning_scale + brushnet_up_block_res_samples = [sample * conditioning_scale for sample in + brushnet_up_block_res_samples] + + if self.config.global_pool_conditions: + brushnet_down_block_res_samples = [ + torch.mean(sample, dim=(2, 3), keepdim=True) for sample in brushnet_down_block_res_samples + ] + brushnet_mid_block_res_sample = torch.mean(brushnet_mid_block_res_sample, dim=(2, 3), keepdim=True) + brushnet_up_block_res_samples = [ + torch.mean(sample, dim=(2, 3), keepdim=True) for sample in brushnet_up_block_res_samples + ] + + if not return_dict: + return (brushnet_down_block_res_samples, brushnet_mid_block_res_sample, brushnet_up_block_res_samples) + + return BrushNetOutput( + down_block_res_samples=brushnet_down_block_res_samples, + mid_block_res_sample=brushnet_mid_block_res_sample, + up_block_res_samples=brushnet_up_block_res_samples + ) + + # PowerPaintModel + class PowerPaintModel(ModelMixin, ConfigMixin): + _supports_gradient_checkpointing = True + + @register_to_config + def __init__( + self, + in_channels: int = 4, + conditioning_channels: int = 5, + flip_sin_to_cos: bool = True, + freq_shift: int = 0, + down_block_types: Tuple[str, ...] = ( + "CrossAttnDownBlock2D", + "CrossAttnDownBlock2D", + "CrossAttnDownBlock2D", + "DownBlock2D", + ), + mid_block_type: Optional[str] = "UNetMidBlock2DCrossAttn", + up_block_types: Tuple[str, ...] = ( + "UpBlock2D", + "CrossAttnUpBlock2D", + "CrossAttnUpBlock2D", + "CrossAttnUpBlock2D", + ), + only_cross_attention: Union[bool, Tuple[bool]] = False, + block_out_channels: Tuple[int, ...] = (320, 640, 1280, 1280), + layers_per_block: int = 2, + downsample_padding: int = 1, + mid_block_scale_factor: float = 1, + act_fn: str = "silu", + norm_num_groups: Optional[int] = 32, + norm_eps: float = 1e-5, + cross_attention_dim: int = 1280, + transformer_layers_per_block: Union[int, Tuple[int, ...]] = 1, + encoder_hid_dim: Optional[int] = None, + encoder_hid_dim_type: Optional[str] = None, + attention_head_dim: Union[int, Tuple[int, ...]] = 8, + num_attention_heads: Optional[Union[int, Tuple[int, ...]]] = None, + use_linear_projection: bool = False, + class_embed_type: Optional[str] = None, + addition_embed_type: Optional[str] = None, + addition_time_embed_dim: Optional[int] = None, + num_class_embeds: Optional[int] = None, + upcast_attention: bool = False, + resnet_time_scale_shift: str = "default", + projection_class_embeddings_input_dim: Optional[int] = None, + brushnet_conditioning_channel_order: str = "rgb", + conditioning_embedding_out_channels: Optional[Tuple[int, ...]] = (16, 32, 96, 256), + global_pool_conditions: bool = False, + addition_embed_type_num_heads: int = 64, + ): + super().__init__() + + # If `num_attention_heads` is not defined (which is the case for most models) + # it will default to `attention_head_dim`. This looks weird upon first reading it and it is. + # The reason for this behavior is to correct for incorrectly named variables that were introduced + # when this library was created. The incorrect naming was only discovered much later in https://github.com/huggingface/diffusers/issues/2011#issuecomment-1547958131 + # Changing `attention_head_dim` to `num_attention_heads` for 40,000+ configurations is too backwards breaking + # which is why we correct for the naming here. + num_attention_heads = num_attention_heads or attention_head_dim + + # Check inputs + if len(down_block_types) != len(up_block_types): + raise ValueError( + f"Must provide the same number of `down_block_types` as `up_block_types`. `down_block_types`: {down_block_types}. `up_block_types`: {up_block_types}." + ) + + if len(block_out_channels) != len(down_block_types): + raise ValueError( + f"Must provide the same number of `block_out_channels` as `down_block_types`. `block_out_channels`: {block_out_channels}. `down_block_types`: {down_block_types}." + ) + + if not isinstance(only_cross_attention, bool) and len(only_cross_attention) != len(down_block_types): + raise ValueError( + f"Must provide the same number of `only_cross_attention` as `down_block_types`. `only_cross_attention`: {only_cross_attention}. `down_block_types`: {down_block_types}." + ) + + if not isinstance(num_attention_heads, int) and len(num_attention_heads) != len(down_block_types): + raise ValueError( + f"Must provide the same number of `num_attention_heads` as `down_block_types`. `num_attention_heads`: {num_attention_heads}. `down_block_types`: {down_block_types}." + ) + + if isinstance(transformer_layers_per_block, int): + transformer_layers_per_block = [transformer_layers_per_block] * len(down_block_types) + + # input + conv_in_kernel = 3 + conv_in_padding = (conv_in_kernel - 1) // 2 + self.conv_in_condition = nn.Conv2d( + in_channels + conditioning_channels, + block_out_channels[0], + kernel_size=conv_in_kernel, + padding=conv_in_padding, + ) + + # time + time_embed_dim = block_out_channels[0] * 4 + self.time_proj = Timesteps(block_out_channels[0], flip_sin_to_cos, freq_shift) + timestep_input_dim = block_out_channels[0] + self.time_embedding = TimestepEmbedding( + timestep_input_dim, + time_embed_dim, + act_fn=act_fn, + ) + + if encoder_hid_dim_type is None and encoder_hid_dim is not None: + encoder_hid_dim_type = "text_proj" + self.register_to_config(encoder_hid_dim_type=encoder_hid_dim_type) + logger.info("encoder_hid_dim_type defaults to 'text_proj' as `encoder_hid_dim` is defined.") + + if encoder_hid_dim is None and encoder_hid_dim_type is not None: + raise ValueError( + f"`encoder_hid_dim` has to be defined when `encoder_hid_dim_type` is set to {encoder_hid_dim_type}." + ) + + if encoder_hid_dim_type == "text_proj": + self.encoder_hid_proj = nn.Linear(encoder_hid_dim, cross_attention_dim) + elif encoder_hid_dim_type == "text_image_proj": + # image_embed_dim DOESN'T have to be `cross_attention_dim`. To not clutter the __init__ too much + # they are set to `cross_attention_dim` here as this is exactly the required dimension for the currently only use + # case when `addition_embed_type == "text_image_proj"` (Kadinsky 2.1)` + self.encoder_hid_proj = TextImageProjection( + text_embed_dim=encoder_hid_dim, + image_embed_dim=cross_attention_dim, + cross_attention_dim=cross_attention_dim, + ) + + elif encoder_hid_dim_type is not None: + raise ValueError( + f"encoder_hid_dim_type: {encoder_hid_dim_type} must be None, 'text_proj' or 'text_image_proj'." + ) + else: + self.encoder_hid_proj = None + + # class embedding + if class_embed_type is None and num_class_embeds is not None: + self.class_embedding = nn.Embedding(num_class_embeds, time_embed_dim) + elif class_embed_type == "timestep": + self.class_embedding = TimestepEmbedding(timestep_input_dim, time_embed_dim) + elif class_embed_type == "identity": + self.class_embedding = nn.Identity(time_embed_dim, time_embed_dim) + elif class_embed_type == "projection": + if projection_class_embeddings_input_dim is None: + raise ValueError( + "`class_embed_type`: 'projection' requires `projection_class_embeddings_input_dim` be set" + ) + # The projection `class_embed_type` is the same as the timestep `class_embed_type` except + # 1. the `class_labels` inputs are not first converted to sinusoidal embeddings + # 2. it projects from an arbitrary input dimension. + # + # Note that `TimestepEmbedding` is quite general, being mainly linear layers and activations. + # When used for embedding actual timesteps, the timesteps are first converted to sinusoidal embeddings. + # As a result, `TimestepEmbedding` can be passed arbitrary vectors. + self.class_embedding = TimestepEmbedding(projection_class_embeddings_input_dim, time_embed_dim) + else: + self.class_embedding = None + + if addition_embed_type == "text": + if encoder_hid_dim is not None: + text_time_embedding_from_dim = encoder_hid_dim + else: + text_time_embedding_from_dim = cross_attention_dim + + self.add_embedding = TextTimeEmbedding( + text_time_embedding_from_dim, time_embed_dim, num_heads=addition_embed_type_num_heads + ) + elif addition_embed_type == "text_image": + # text_embed_dim and image_embed_dim DON'T have to be `cross_attention_dim`. To not clutter the __init__ too much + # they are set to `cross_attention_dim` here as this is exactly the required dimension for the currently only use + # case when `addition_embed_type == "text_image"` (Kadinsky 2.1)` + self.add_embedding = TextImageTimeEmbedding( + text_embed_dim=cross_attention_dim, image_embed_dim=cross_attention_dim, time_embed_dim=time_embed_dim + ) + elif addition_embed_type == "text_time": + self.add_time_proj = Timesteps(addition_time_embed_dim, flip_sin_to_cos, freq_shift) + self.add_embedding = TimestepEmbedding(projection_class_embeddings_input_dim, time_embed_dim) + + elif addition_embed_type is not None: + raise ValueError(f"addition_embed_type: {addition_embed_type} must be None, 'text' or 'text_image'.") + + self.down_blocks = nn.ModuleList([]) + self.brushnet_down_blocks = nn.ModuleList([]) + + if isinstance(only_cross_attention, bool): + only_cross_attention = [only_cross_attention] * len(down_block_types) + + if isinstance(attention_head_dim, int): + attention_head_dim = (attention_head_dim,) * len(down_block_types) + + if isinstance(num_attention_heads, int): + num_attention_heads = (num_attention_heads,) * len(down_block_types) + + # down + output_channel = block_out_channels[0] + + brushnet_block = nn.Conv2d(output_channel, output_channel, kernel_size=1) + brushnet_block = zero_module(brushnet_block) + self.brushnet_down_blocks.append(brushnet_block) + + for i, down_block_type in enumerate(down_block_types): + input_channel = output_channel + output_channel = block_out_channels[i] + is_final_block = i == len(block_out_channels) - 1 + + down_block = get_down_block( + down_block_type, + num_layers=layers_per_block, + transformer_layers_per_block=transformer_layers_per_block[i], + in_channels=input_channel, + out_channels=output_channel, + temb_channels=time_embed_dim, + add_downsample=not is_final_block, + resnet_eps=norm_eps, + resnet_act_fn=act_fn, + resnet_groups=norm_num_groups, + cross_attention_dim=cross_attention_dim, + num_attention_heads=num_attention_heads[i], + attention_head_dim=attention_head_dim[i] if attention_head_dim[i] is not None else output_channel, + downsample_padding=downsample_padding, + use_linear_projection=use_linear_projection, + only_cross_attention=only_cross_attention[i], + upcast_attention=upcast_attention, + resnet_time_scale_shift=resnet_time_scale_shift, + ) + self.down_blocks.append(down_block) + + for _ in range(layers_per_block): + brushnet_block = nn.Conv2d(output_channel, output_channel, kernel_size=1) + brushnet_block = zero_module(brushnet_block) + self.brushnet_down_blocks.append(brushnet_block) + + if not is_final_block: + brushnet_block = nn.Conv2d(output_channel, output_channel, kernel_size=1) + brushnet_block = zero_module(brushnet_block) + self.brushnet_down_blocks.append(brushnet_block) + + # mid + mid_block_channel = block_out_channels[-1] + + brushnet_block = nn.Conv2d(mid_block_channel, mid_block_channel, kernel_size=1) + brushnet_block = zero_module(brushnet_block) + self.brushnet_mid_block = brushnet_block + + self.mid_block = get_mid_block( + mid_block_type, + transformer_layers_per_block=transformer_layers_per_block[-1], + in_channels=mid_block_channel, + temb_channels=time_embed_dim, + resnet_eps=norm_eps, + resnet_act_fn=act_fn, + output_scale_factor=mid_block_scale_factor, + resnet_time_scale_shift=resnet_time_scale_shift, + cross_attention_dim=cross_attention_dim, + num_attention_heads=num_attention_heads[-1], + resnet_groups=norm_num_groups, + use_linear_projection=use_linear_projection, + upcast_attention=upcast_attention, + ) + + # count how many layers upsample the images + self.num_upsamplers = 0 + + # up + reversed_block_out_channels = list(reversed(block_out_channels)) + reversed_num_attention_heads = list(reversed(num_attention_heads)) + reversed_transformer_layers_per_block = list(reversed(transformer_layers_per_block)) + only_cross_attention = list(reversed(only_cross_attention)) + + output_channel = reversed_block_out_channels[0] + + self.up_blocks = nn.ModuleList([]) + self.brushnet_up_blocks = nn.ModuleList([]) + + for i, up_block_type in enumerate(up_block_types): + is_final_block = i == len(block_out_channels) - 1 + + prev_output_channel = output_channel + output_channel = reversed_block_out_channels[i] + input_channel = reversed_block_out_channels[min(i + 1, len(block_out_channels) - 1)] + + # add upsample block for all BUT final layer + if not is_final_block: + add_upsample = True + self.num_upsamplers += 1 + else: + add_upsample = False + + up_block = get_up_block( + up_block_type, + num_layers=layers_per_block + 1, + transformer_layers_per_block=reversed_transformer_layers_per_block[i], + in_channels=input_channel, + out_channels=output_channel, + prev_output_channel=prev_output_channel, + temb_channels=time_embed_dim, + add_upsample=add_upsample, + resnet_eps=norm_eps, + resnet_act_fn=act_fn, + resolution_idx=i, + resnet_groups=norm_num_groups, + cross_attention_dim=cross_attention_dim, + num_attention_heads=reversed_num_attention_heads[i], + use_linear_projection=use_linear_projection, + only_cross_attention=only_cross_attention[i], + upcast_attention=upcast_attention, + resnet_time_scale_shift=resnet_time_scale_shift, + attention_head_dim=attention_head_dim[i] if attention_head_dim[i] is not None else output_channel, + ) + self.up_blocks.append(up_block) + prev_output_channel = output_channel + + for _ in range(layers_per_block + 1): + brushnet_block = nn.Conv2d(output_channel, output_channel, kernel_size=1) + brushnet_block = zero_module(brushnet_block) + self.brushnet_up_blocks.append(brushnet_block) + + if not is_final_block: + brushnet_block = nn.Conv2d(output_channel, output_channel, kernel_size=1) + brushnet_block = zero_module(brushnet_block) + self.brushnet_up_blocks.append(brushnet_block) + + @classmethod + def from_unet( + cls, + unet: UNet2DConditionModel, + brushnet_conditioning_channel_order: str = "rgb", + conditioning_embedding_out_channels: Optional[Tuple[int, ...]] = (16, 32, 96, 256), + load_weights_from_unet: bool = True, + conditioning_channels: int = 5, + ): + r""" + Instantiate a [`BrushNetModel`] from [`UNet2DConditionModel`]. + + Parameters: + unet (`UNet2DConditionModel`): + The UNet model weights to copy to the [`BrushNetModel`]. All configuration options are also copied + where applicable. + """ + transformer_layers_per_block = ( + unet.config.transformer_layers_per_block if "transformer_layers_per_block" in unet.config else 1 + ) + encoder_hid_dim = unet.config.encoder_hid_dim if "encoder_hid_dim" in unet.config else None + encoder_hid_dim_type = unet.config.encoder_hid_dim_type if "encoder_hid_dim_type" in unet.config else None + addition_embed_type = unet.config.addition_embed_type if "addition_embed_type" in unet.config else None + addition_time_embed_dim = ( + unet.config.addition_time_embed_dim if "addition_time_embed_dim" in unet.config else None + ) + + brushnet = cls( + in_channels=unet.config.in_channels, + conditioning_channels=conditioning_channels, + flip_sin_to_cos=unet.config.flip_sin_to_cos, + freq_shift=unet.config.freq_shift, + # down_block_types=['DownBlock2D','DownBlock2D','DownBlock2D','DownBlock2D'], + down_block_types=[ + "CrossAttnDownBlock2D", + "CrossAttnDownBlock2D", + "CrossAttnDownBlock2D", + "DownBlock2D", + ], + # mid_block_type='MidBlock2D', + mid_block_type="UNetMidBlock2DCrossAttn", + # up_block_types=['UpBlock2D','UpBlock2D','UpBlock2D','UpBlock2D'], + up_block_types=["UpBlock2D", "CrossAttnUpBlock2D", "CrossAttnUpBlock2D", "CrossAttnUpBlock2D"], + only_cross_attention=unet.config.only_cross_attention, + block_out_channels=unet.config.block_out_channels, + layers_per_block=unet.config.layers_per_block, + downsample_padding=unet.config.downsample_padding, + mid_block_scale_factor=unet.config.mid_block_scale_factor, + act_fn=unet.config.act_fn, + norm_num_groups=unet.config.norm_num_groups, + norm_eps=unet.config.norm_eps, + cross_attention_dim=unet.config.cross_attention_dim, + transformer_layers_per_block=transformer_layers_per_block, + encoder_hid_dim=encoder_hid_dim, + encoder_hid_dim_type=encoder_hid_dim_type, + attention_head_dim=unet.config.attention_head_dim, + num_attention_heads=unet.config.num_attention_heads, + use_linear_projection=unet.config.use_linear_projection, + class_embed_type=unet.config.class_embed_type, + addition_embed_type=addition_embed_type, + addition_time_embed_dim=addition_time_embed_dim, + num_class_embeds=unet.config.num_class_embeds, + upcast_attention=unet.config.upcast_attention, + resnet_time_scale_shift=unet.config.resnet_time_scale_shift, + projection_class_embeddings_input_dim=unet.config.projection_class_embeddings_input_dim, + brushnet_conditioning_channel_order=brushnet_conditioning_channel_order, + conditioning_embedding_out_channels=conditioning_embedding_out_channels, + ) + + if load_weights_from_unet: + conv_in_condition_weight = torch.zeros_like(brushnet.conv_in_condition.weight) + conv_in_condition_weight[:, :4, ...] = unet.conv_in.weight + conv_in_condition_weight[:, 4:8, ...] = unet.conv_in.weight + brushnet.conv_in_condition.weight = torch.nn.Parameter(conv_in_condition_weight) + brushnet.conv_in_condition.bias = unet.conv_in.bias + + brushnet.time_proj.load_state_dict(unet.time_proj.state_dict()) + brushnet.time_embedding.load_state_dict(unet.time_embedding.state_dict()) + + if brushnet.class_embedding: + brushnet.class_embedding.load_state_dict(unet.class_embedding.state_dict()) + + brushnet.down_blocks.load_state_dict(unet.down_blocks.state_dict(), strict=False) + brushnet.mid_block.load_state_dict(unet.mid_block.state_dict(), strict=False) + brushnet.up_blocks.load_state_dict(unet.up_blocks.state_dict(), strict=False) + + return brushnet.to(unet.dtype) + + @property + # Copied from diffusers.models.unets.unet_2d_condition.UNet2DConditionModel.attn_processors + def attn_processors(self) -> Dict[str, AttentionProcessor]: + r""" + Returns: + `dict` of attention processors: A dictionary containing all attention processors used in the model with + indexed by its weight name. + """ + # set recursively + processors = {} + + def fn_recursive_add_processors(name: str, module: torch.nn.Module, processors: Dict[str, AttentionProcessor]): + if hasattr(module, "get_processor"): + processors[f"{name}.processor"] = module.get_processor(return_deprecated_lora=True) + + for sub_name, child in module.named_children(): + fn_recursive_add_processors(f"{name}.{sub_name}", child, processors) + + return processors + + for name, module in self.named_children(): + fn_recursive_add_processors(name, module, processors) + + return processors + + # Copied from diffusers.models.unets.unet_2d_condition.UNet2DConditionModel.set_attn_processor + def set_attn_processor(self, processor: Union[AttentionProcessor, Dict[str, AttentionProcessor]]): + r""" + Sets the attention processor to use to compute attention. + + Parameters: + processor (`dict` of `AttentionProcessor` or only `AttentionProcessor`): + The instantiated processor class or a dictionary of processor classes that will be set as the processor + for **all** `Attention` layers. + + If `processor` is a dict, the key needs to define the path to the corresponding cross attention + processor. This is strongly recommended when setting trainable attention processors. + + """ + count = len(self.attn_processors.keys()) + + if isinstance(processor, dict) and len(processor) != count: + raise ValueError( + f"A dict of processors was passed, but the number of processors {len(processor)} does not match the" + f" number of attention layers: {count}. Please make sure to pass {count} processor classes." + ) + + def fn_recursive_attn_processor(name: str, module: torch.nn.Module, processor): + if hasattr(module, "set_processor"): + if not isinstance(processor, dict): + module.set_processor(processor) + else: + module.set_processor(processor.pop(f"{name}.processor")) + + for sub_name, child in module.named_children(): + fn_recursive_attn_processor(f"{name}.{sub_name}", child, processor) + + for name, module in self.named_children(): + fn_recursive_attn_processor(name, module, processor) + + # Copied from diffusers.models.unets.unet_2d_condition.UNet2DConditionModel.set_default_attn_processor + def set_default_attn_processor(self): + """ + Disables custom attention processors and sets the default attention implementation. + """ + if all(proc.__class__ in ADDED_KV_ATTENTION_PROCESSORS for proc in self.attn_processors.values()): + processor = AttnAddedKVProcessor() + elif all(proc.__class__ in CROSS_ATTENTION_PROCESSORS for proc in self.attn_processors.values()): + processor = AttnProcessor() + else: + raise ValueError( + f"Cannot call `set_default_attn_processor` when attention processors are of type {next(iter(self.attn_processors.values()))}" + ) + + self.set_attn_processor(processor) + + # Copied from diffusers.models.unets.unet_2d_condition.UNet2DConditionModel.set_attention_slice + def set_attention_slice(self, slice_size: Union[str, int, List[int]]) -> None: + r""" + Enable sliced attention computation. + + When this option is enabled, the attention module splits the input tensor in slices to compute attention in + several steps. This is useful for saving some memory in exchange for a small decrease in speed. + + Args: + slice_size (`str` or `int` or `list(int)`, *optional*, defaults to `"auto"`): + When `"auto"`, input to the attention heads is halved, so attention is computed in two steps. If + `"max"`, maximum amount of memory is saved by running only one slice at a time. If a number is + provided, uses as many slices as `attention_head_dim // slice_size`. In this case, `attention_head_dim` + must be a multiple of `slice_size`. + """ + sliceable_head_dims = [] + + def fn_recursive_retrieve_sliceable_dims(module: torch.nn.Module): + if hasattr(module, "set_attention_slice"): + sliceable_head_dims.append(module.sliceable_head_dim) + + for child in module.children(): + fn_recursive_retrieve_sliceable_dims(child) + + # retrieve number of attention layers + for module in self.children(): + fn_recursive_retrieve_sliceable_dims(module) + + num_sliceable_layers = len(sliceable_head_dims) + + if slice_size == "auto": + # half the attention head size is usually a good trade-off between + # speed and memory + slice_size = [dim // 2 for dim in sliceable_head_dims] + elif slice_size == "max": + # make smallest slice possible + slice_size = num_sliceable_layers * [1] + + slice_size = num_sliceable_layers * [slice_size] if not isinstance(slice_size, list) else slice_size + + if len(slice_size) != len(sliceable_head_dims): + raise ValueError( + f"You have provided {len(slice_size)}, but {self.config} has {len(sliceable_head_dims)} different" + f" attention layers. Make sure to match `len(slice_size)` to be {len(sliceable_head_dims)}." + ) + + for i in range(len(slice_size)): + size = slice_size[i] + dim = sliceable_head_dims[i] + if size is not None and size > dim: + raise ValueError(f"size {size} has to be smaller or equal to {dim}.") + + # Recursively walk through all the children. + # Any children which exposes the set_attention_slice method + # gets the message + def fn_recursive_set_attention_slice(module: torch.nn.Module, slice_size: List[int]): + if hasattr(module, "set_attention_slice"): + module.set_attention_slice(slice_size.pop()) + + for child in module.children(): + fn_recursive_set_attention_slice(child, slice_size) + + reversed_slice_size = list(reversed(slice_size)) + for module in self.children(): + fn_recursive_set_attention_slice(module, reversed_slice_size) + + def _set_gradient_checkpointing(self, module, value: bool = False) -> None: + if isinstance(module, (CrossAttnDownBlock2D, DownBlock2D)): + module.gradient_checkpointing = value + + def forward( + self, + sample: torch.FloatTensor, + timestep: Union[torch.Tensor, float, int], + encoder_hidden_states: torch.Tensor, + brushnet_cond: torch.FloatTensor, + conditioning_scale: float = 1.0, + class_labels: Optional[torch.Tensor] = None, + timestep_cond: Optional[torch.Tensor] = None, + attention_mask: Optional[torch.Tensor] = None, + added_cond_kwargs: Optional[Dict[str, torch.Tensor]] = None, + cross_attention_kwargs: Optional[Dict[str, Any]] = None, + guess_mode: bool = False, + return_dict: bool = True, + debug=False, + ) -> Union[BrushNetOutput, Tuple[Tuple[torch.FloatTensor, ...], torch.FloatTensor]]: + """ + The [`BrushNetModel`] forward method. + + Args: + sample (`torch.FloatTensor`): + The noisy input tensor. + timestep (`Union[torch.Tensor, float, int]`): + The number of timesteps to denoise an input. + encoder_hidden_states (`torch.Tensor`): + The encoder hidden states. + brushnet_cond (`torch.FloatTensor`): + The conditional input tensor of shape `(batch_size, sequence_length, hidden_size)`. + conditioning_scale (`float`, defaults to `1.0`): + The scale factor for BrushNet outputs. + class_labels (`torch.Tensor`, *optional*, defaults to `None`): + Optional class labels for conditioning. Their embeddings will be summed with the timestep embeddings. + timestep_cond (`torch.Tensor`, *optional*, defaults to `None`): + Additional conditional embeddings for timestep. If provided, the embeddings will be summed with the + timestep_embedding passed through the `self.time_embedding` layer to obtain the final timestep + embeddings. + attention_mask (`torch.Tensor`, *optional*, defaults to `None`): + An attention mask of shape `(batch, key_tokens)` is applied to `encoder_hidden_states`. If `1` the mask + is kept, otherwise if `0` it is discarded. Mask will be converted into a bias, which adds large + negative values to the attention scores corresponding to "discard" tokens. + added_cond_kwargs (`dict`): + Additional conditions for the Stable Diffusion XL UNet. + cross_attention_kwargs (`dict[str]`, *optional*, defaults to `None`): + A kwargs dictionary that if specified is passed along to the `AttnProcessor`. + guess_mode (`bool`, defaults to `False`): + In this mode, the BrushNet encoder tries its best to recognize the input content of the input even if + you remove all prompts. A `guidance_scale` between 3.0 and 5.0 is recommended. + return_dict (`bool`, defaults to `True`): + Whether or not to return a [`~models.brushnet.BrushNetOutput`] instead of a plain tuple. + + Returns: + [`~models.brushnet.BrushNetOutput`] **or** `tuple`: + If `return_dict` is `True`, a [`~models.brushnet.BrushNetOutput`] is returned, otherwise a tuple is + returned where the first element is the sample tensor. + """ + # check channel order + channel_order = self.config.brushnet_conditioning_channel_order + + if channel_order == "rgb": + # in rgb order by default + ... + elif channel_order == "bgr": + brushnet_cond = torch.flip(brushnet_cond, dims=[1]) + else: + raise ValueError(f"unknown `brushnet_conditioning_channel_order`: {channel_order}") + + if debug: print('BrushNet CA: attn mask') + + # prepare attention_mask + if attention_mask is not None: + attention_mask = (1 - attention_mask.to(sample.dtype)) * -10000.0 + attention_mask = attention_mask.unsqueeze(1) + + if debug: print('BrushNet CA: time') + + # 1. time + timesteps = timestep + if not torch.is_tensor(timesteps): + # TODO: this requires sync between CPU and GPU. So try to pass timesteps as tensors if you can + # This would be a good case for the `match` statement (Python 3.10+) + is_mps = sample.device.type == "mps" + if isinstance(timestep, float): + dtype = torch.float32 if is_mps else torch.float64 + else: + dtype = torch.int32 if is_mps else torch.int64 + timesteps = torch.tensor([timesteps], dtype=dtype, device=sample.device) + elif len(timesteps.shape) == 0: + timesteps = timesteps[None].to(sample.device) + + # broadcast to batch dimension in a way that's compatible with ONNX/Core ML + timesteps = timesteps.expand(sample.shape[0]) + + t_emb = self.time_proj(timesteps) + + # timesteps does not contain any weights and will always return f32 tensors + # but time_embedding might actually be running in fp16. so we need to cast here. + # there might be better ways to encapsulate this. + t_emb = t_emb.to(dtype=sample.dtype) + + emb = self.time_embedding(t_emb, timestep_cond) + aug_emb = None + + if self.class_embedding is not None: + if class_labels is None: + raise ValueError("class_labels should be provided when num_class_embeds > 0") + + if self.config.class_embed_type == "timestep": + class_labels = self.time_proj(class_labels) + + class_emb = self.class_embedding(class_labels).to(dtype=self.dtype) + emb = emb + class_emb + + if self.config.addition_embed_type is not None: + if self.config.addition_embed_type == "text": + aug_emb = self.add_embedding(encoder_hidden_states) + + elif self.config.addition_embed_type == "text_time": + if "text_embeds" not in added_cond_kwargs: + raise ValueError( + f"{self.__class__} has the config param `addition_embed_type` set to 'text_time' which requires the keyword argument `text_embeds` to be passed in `added_cond_kwargs`" + ) + text_embeds = added_cond_kwargs.get("text_embeds") + if "time_ids" not in added_cond_kwargs: + raise ValueError( + f"{self.__class__} has the config param `addition_embed_type` set to 'text_time' which requires the keyword argument `time_ids` to be passed in `added_cond_kwargs`" + ) + time_ids = added_cond_kwargs.get("time_ids") + time_embeds = self.add_time_proj(time_ids.flatten()) + time_embeds = time_embeds.reshape((text_embeds.shape[0], -1)) + + add_embeds = torch.concat([text_embeds, time_embeds], dim=-1) + add_embeds = add_embeds.to(emb.dtype) + aug_emb = self.add_embedding(add_embeds) + + emb = emb + aug_emb if aug_emb is not None else emb + + if debug: print('BrushNet CA: pre-process') + + + # 2. pre-process + brushnet_cond = torch.concat([sample, brushnet_cond], 1) + sample = self.conv_in_condition(brushnet_cond) + + if debug: print('BrushNet CA: down') + + # 3. down + down_block_res_samples = (sample,) + for downsample_block in self.down_blocks: + if hasattr(downsample_block, "has_cross_attention") and downsample_block.has_cross_attention: + if debug: print('BrushNet CA (down block with XA): ', type(downsample_block)) + sample, res_samples = downsample_block( + hidden_states=sample, + temb=emb, + encoder_hidden_states=encoder_hidden_states, + attention_mask=attention_mask, + cross_attention_kwargs=cross_attention_kwargs, + debug=debug, + ) + else: + if debug: print('BrushNet CA (down block): ', type(downsample_block)) + sample, res_samples = downsample_block(hidden_states=sample, temb=emb, debug=debug) + + down_block_res_samples += res_samples + + if debug: print('BrushNet CA: PP down') + + # 4. PaintingNet down blocks + brushnet_down_block_res_samples = () + for down_block_res_sample, brushnet_down_block in zip(down_block_res_samples, self.brushnet_down_blocks): + down_block_res_sample = brushnet_down_block(down_block_res_sample) + brushnet_down_block_res_samples = brushnet_down_block_res_samples + (down_block_res_sample,) + + if debug: print('BrushNet CA: PP mid') + + # 5. mid + if self.mid_block is not None: + if hasattr(self.mid_block, "has_cross_attention") and self.mid_block.has_cross_attention: + sample = self.mid_block( + sample, + emb, + encoder_hidden_states=encoder_hidden_states, + attention_mask=attention_mask, + cross_attention_kwargs=cross_attention_kwargs, + ) + else: + sample = self.mid_block(sample, emb) + + if debug: print('BrushNet CA: mid') + + # 6. BrushNet mid blocks + brushnet_mid_block_res_sample = self.brushnet_mid_block(sample) + + if debug: print('BrushNet CA: PP up') + + # 7. up + up_block_res_samples = () + for i, upsample_block in enumerate(self.up_blocks): + is_final_block = i == len(self.up_blocks) - 1 + + res_samples = down_block_res_samples[-len(upsample_block.resnets) :] + down_block_res_samples = down_block_res_samples[: -len(upsample_block.resnets)] + + # if we have not reached the final block and need to forward the + # upsample size, we do it here + if not is_final_block: + upsample_size = down_block_res_samples[-1].shape[2:] + + if hasattr(upsample_block, "has_cross_attention") and upsample_block.has_cross_attention: + sample, up_res_samples = upsample_block( + hidden_states=sample, + temb=emb, + res_hidden_states_tuple=res_samples, + encoder_hidden_states=encoder_hidden_states, + cross_attention_kwargs=cross_attention_kwargs, + upsample_size=upsample_size, + attention_mask=attention_mask, + return_res_samples=True, + ) + else: + sample, up_res_samples = upsample_block( + hidden_states=sample, + temb=emb, + res_hidden_states_tuple=res_samples, + upsample_size=upsample_size, + return_res_samples=True, + ) + + up_block_res_samples += up_res_samples + + if debug: print('BrushNet CA: up') + + # 8. BrushNet up blocks + brushnet_up_block_res_samples = () + for up_block_res_sample, brushnet_up_block in zip(up_block_res_samples, self.brushnet_up_blocks): + up_block_res_sample = brushnet_up_block(up_block_res_sample) + brushnet_up_block_res_samples = brushnet_up_block_res_samples + (up_block_res_sample,) + + if debug: print('BrushNet CA: scaling') + + # 6. scaling + if guess_mode and not self.config.global_pool_conditions: + scales = torch.logspace( + -1, + 0, + len(brushnet_down_block_res_samples) + 1 + len(brushnet_up_block_res_samples), + device=sample.device, + ) # 0.1 to 1.0 + scales = scales * conditioning_scale + + brushnet_down_block_res_samples = [ + sample * scale + for sample, scale in zip( + brushnet_down_block_res_samples, scales[: len(brushnet_down_block_res_samples)] + ) + ] + brushnet_mid_block_res_sample = ( + brushnet_mid_block_res_sample * scales[len(brushnet_down_block_res_samples)] + ) + brushnet_up_block_res_samples = [ + sample * scale + for sample, scale in zip( + brushnet_up_block_res_samples, scales[len(brushnet_down_block_res_samples) + 1 :] + ) + ] + else: + brushnet_down_block_res_samples = [ + sample * conditioning_scale for sample in brushnet_down_block_res_samples + ] + brushnet_mid_block_res_sample = brushnet_mid_block_res_sample * conditioning_scale + brushnet_up_block_res_samples = [sample * conditioning_scale for sample in brushnet_up_block_res_samples] + + if self.config.global_pool_conditions: + brushnet_down_block_res_samples = [ + torch.mean(sample, dim=(2, 3), keepdim=True) for sample in brushnet_down_block_res_samples + ] + brushnet_mid_block_res_sample = torch.mean(brushnet_mid_block_res_sample, dim=(2, 3), keepdim=True) + brushnet_up_block_res_samples = [ + torch.mean(sample, dim=(2, 3), keepdim=True) for sample in brushnet_up_block_res_samples + ] + + if debug: print('BrushNet CA: finish') + + if not return_dict: + return (brushnet_down_block_res_samples, brushnet_mid_block_res_sample, brushnet_up_block_res_samples) + + return BrushNetOutput( + down_block_res_samples=brushnet_down_block_res_samples, + mid_block_res_sample=brushnet_mid_block_res_sample, + up_block_res_samples=brushnet_up_block_res_samples, + ) + +except ImportError: + BrushNetModel = None + PowerPaintModel = None + # print("\33[33mModule 'diffusers' load failed. If you don't have it installed, do it:\033[0m") + # print("\33[33mpip install diffusers\033[0m") \ No newline at end of file diff --git a/ComfyUI-Easy-Use/py/brushnet/model_patch.py b/ComfyUI-Easy-Use/py/brushnet/model_patch.py new file mode 100644 index 0000000000000000000000000000000000000000..fe8d3f271a3b315b16a16aab8a6814b5b51148f4 --- /dev/null +++ b/ComfyUI-Easy-Use/py/brushnet/model_patch.py @@ -0,0 +1,137 @@ +import torch +import comfy + +# Check and add 'model_patch' to model.model_options['transformer_options'] +def add_model_patch_option(model): + if 'transformer_options' not in model.model_options: + model.model_options['transformer_options'] = {} + to = model.model_options['transformer_options'] + if "model_patch" not in to: + to["model_patch"] = {} + return to + + +# Patch model with model_function_wrapper +def patch_model_function_wrapper(model, forward_patch, remove=False): + def brushnet_model_function_wrapper(apply_model_method, options_dict): + to = options_dict['c']['transformer_options'] + + control = None + if 'control' in options_dict['c']: + control = options_dict['c']['control'] + + x = options_dict['input'] + timestep = options_dict['timestep'] + + # check if there are patches to execute + if 'model_patch' not in to or 'forward' not in to['model_patch']: + return apply_model_method(x, timestep, **options_dict['c']) + + mp = to['model_patch'] + unet = mp['unet'] + + all_sigmas = mp['all_sigmas'] + sigma = to['sigmas'][0].item() + total_steps = all_sigmas.shape[0] - 1 + step = torch.argmin((all_sigmas - sigma).abs()).item() + + mp['step'] = step + mp['total_steps'] = total_steps + + # comfy.model_base.apply_model + xc = model.model.model_sampling.calculate_input(timestep, x) + if 'c_concat' in options_dict['c'] and options_dict['c']['c_concat'] is not None: + xc = torch.cat([xc] + [options_dict['c']['c_concat']], dim=1) + t = model.model.model_sampling.timestep(timestep).float() + # execute all patches + for method in mp['forward']: + method(unet, xc, t, to, control) + + return apply_model_method(x, timestep, **options_dict['c']) + + if "model_function_wrapper" in model.model_options and model.model_options["model_function_wrapper"]: + print('BrushNet is going to replace existing model_function_wrapper:', + model.model_options["model_function_wrapper"]) + model.set_model_unet_function_wrapper(brushnet_model_function_wrapper) + + to = add_model_patch_option(model) + mp = to['model_patch'] + + if isinstance(model.model.model_config, comfy.supported_models.SD15): + mp['SDXL'] = False + elif isinstance(model.model.model_config, comfy.supported_models.SDXL): + mp['SDXL'] = True + else: + print('Base model type: ', type(model.model.model_config)) + raise Exception("Unsupported model type: ", type(model.model.model_config)) + + if 'forward' not in mp: + mp['forward'] = [] + + if remove: + if forward_patch in mp['forward']: + mp['forward'].remove(forward_patch) + else: + mp['forward'].append(forward_patch) + + mp['unet'] = model.model.diffusion_model + mp['step'] = 0 + mp['total_steps'] = 1 + + # apply patches to code + if comfy.samplers.sample.__doc__ is None or 'BrushNet' not in comfy.samplers.sample.__doc__: + comfy.samplers.original_sample = comfy.samplers.sample + comfy.samplers.sample = modified_sample + + if comfy.ldm.modules.diffusionmodules.openaimodel.apply_control.__doc__ is None or \ + 'BrushNet' not in comfy.ldm.modules.diffusionmodules.openaimodel.apply_control.__doc__: + comfy.ldm.modules.diffusionmodules.openaimodel.original_apply_control = comfy.ldm.modules.diffusionmodules.openaimodel.apply_control + comfy.ldm.modules.diffusionmodules.openaimodel.apply_control = modified_apply_control + + +# Model needs current step number and cfg at inference step. It is possible to write a custom KSampler but I'd like to use ComfyUI's one. +# The first versions had modified_common_ksampler, but it broke custom KSampler nodes +def modified_sample(model, noise, positive, negative, cfg, device, sampler, sigmas, model_options={}, + latent_image=None, denoise_mask=None, callback=None, disable_pbar=False, seed=None): + ''' Modified by BrushNet nodes''' + cfg_guider = comfy.samplers.CFGGuider(model) + cfg_guider.set_conds(positive, negative) + cfg_guider.set_cfg(cfg) + + ### Modified part ###################################################################### + to = add_model_patch_option(model) + to['model_patch']['all_sigmas'] = sigmas + ####################################################################################### + + return cfg_guider.sample(noise, latent_image, sampler, sigmas, denoise_mask, callback, disable_pbar, seed) + +# To use Controlnet with RAUNet it is much easier to modify apply_control a little +def modified_apply_control(h, control, name): + '''Modified by BrushNet nodes''' + if control is not None and name in control and len(control[name]) > 0: + ctrl = control[name].pop() + if ctrl is not None: + if h.shape[2] != ctrl.shape[2] or h.shape[3] != ctrl.shape[3]: + ctrl = torch.nn.functional.interpolate(ctrl, size=(h.shape[2], h.shape[3]), mode='bicubic').to( + h.dtype).to(h.device) + try: + h += ctrl + except: + print.warning("warning control could not be applied {} {}".format(h.shape, ctrl.shape)) + return h + +def add_model_patch(model): + to = add_model_patch_option(model) + mp = to['model_patch'] + if "brushnet" in mp: + if isinstance(model.model.model_config, comfy.supported_models.SD15): + mp['SDXL'] = False + elif isinstance(model.model.model_config, comfy.supported_models.SDXL): + mp['SDXL'] = True + else: + print('Base model type: ', type(model.model.model_config)) + raise Exception("Unsupported model type: ", type(model.model.model_config)) + + mp['unet'] = model.model.diffusion_model + mp['step'] = 0 + mp['total_steps'] = 1 \ No newline at end of file diff --git a/ComfyUI-Easy-Use/py/brushnet/powerpaint_utils.py b/ComfyUI-Easy-Use/py/brushnet/powerpaint_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..1cae2c1334c56e390121dc050e690f5cefdd5d2a --- /dev/null +++ b/ComfyUI-Easy-Use/py/brushnet/powerpaint_utils.py @@ -0,0 +1,467 @@ +import copy +import random + +import torch +import torch.nn as nn +from transformers import CLIPTokenizer +from typing import Any, List, Optional, Union + + +class TokenizerWrapper: + """Tokenizer wrapper for CLIPTokenizer. Only support CLIPTokenizer + currently. This wrapper is modified from https://github.com/huggingface/dif + fusers/blob/e51f19aee82c8dd874b715a09dbc521d88835d68/src/diffusers/loaders. + py#L358 # noqa. + + Args: + from_pretrained (Union[str, os.PathLike], optional): The *model id* + of a pretrained model or a path to a *directory* containing + model weights and config. Defaults to None. + from_config (Union[str, os.PathLike], optional): The *model id* + of a pretrained model or a path to a *directory* containing + model weights and config. Defaults to None. + + *args, **kwargs: If `from_pretrained` is passed, *args and **kwargs + will be passed to `from_pretrained` function. Otherwise, *args + and **kwargs will be used to initialize the model by + `self._module_cls(*args, **kwargs)`. + """ + + def __init__(self, tokenizer: CLIPTokenizer): + self.wrapped = tokenizer + self.token_map = {} + + def __getattr__(self, name: str) -> Any: + if name in self.__dict__: + return getattr(self, name) + # if name == "wrapped": + # return getattr(self, 'wrapped')#super().__getattr__("wrapped") + + try: + return getattr(self.wrapped, name) + except AttributeError: + raise AttributeError( + "'name' cannot be found in both " + f"'{self.__class__.__name__}' and " + f"'{self.__class__.__name__}.tokenizer'." + ) + + def try_adding_tokens(self, tokens: Union[str, List[str]], *args, **kwargs): + """Attempt to add tokens to the tokenizer. + + Args: + tokens (Union[str, List[str]]): The tokens to be added. + """ + num_added_tokens = self.wrapped.add_tokens(tokens, *args, **kwargs) + assert num_added_tokens != 0, ( + f"The tokenizer already contains the token {tokens}. Please pass " + "a different `placeholder_token` that is not already in the " + "tokenizer." + ) + + def get_token_info(self, token: str) -> dict: + """Get the information of a token, including its start and end index in + the current tokenizer. + + Args: + token (str): The token to be queried. + + Returns: + dict: The information of the token, including its start and end + index in current tokenizer. + """ + token_ids = self.__call__(token).input_ids + start, end = token_ids[1], token_ids[-2] + 1 + return {"name": token, "start": start, "end": end} + + def add_placeholder_token(self, placeholder_token: str, *args, num_vec_per_token: int = 1, **kwargs): + """Add placeholder tokens to the tokenizer. + + Args: + placeholder_token (str): The placeholder token to be added. + num_vec_per_token (int, optional): The number of vectors of + the added placeholder token. + *args, **kwargs: The arguments for `self.wrapped.add_tokens`. + """ + output = [] + if num_vec_per_token == 1: + self.try_adding_tokens(placeholder_token, *args, **kwargs) + output.append(placeholder_token) + else: + output = [] + for i in range(num_vec_per_token): + ith_token = placeholder_token + f"_{i}" + self.try_adding_tokens(ith_token, *args, **kwargs) + output.append(ith_token) + + for token in self.token_map: + if token in placeholder_token: + raise ValueError( + f"The tokenizer already has placeholder token {token} " + f"that can get confused with {placeholder_token} " + "keep placeholder tokens independent" + ) + self.token_map[placeholder_token] = output + + def replace_placeholder_tokens_in_text( + self, text: Union[str, List[str]], vector_shuffle: bool = False, prop_tokens_to_load: float = 1.0 + ) -> Union[str, List[str]]: + """Replace the keywords in text with placeholder tokens. This function + will be called in `self.__call__` and `self.encode`. + + Args: + text (Union[str, List[str]]): The text to be processed. + vector_shuffle (bool, optional): Whether to shuffle the vectors. + Defaults to False. + prop_tokens_to_load (float, optional): The proportion of tokens to + be loaded. If 1.0, all tokens will be loaded. Defaults to 1.0. + + Returns: + Union[str, List[str]]: The processed text. + """ + if isinstance(text, list): + output = [] + for i in range(len(text)): + output.append(self.replace_placeholder_tokens_in_text(text[i], vector_shuffle=vector_shuffle)) + return output + + for placeholder_token in self.token_map: + if placeholder_token in text: + tokens = self.token_map[placeholder_token] + tokens = tokens[: 1 + int(len(tokens) * prop_tokens_to_load)] + if vector_shuffle: + tokens = copy.copy(tokens) + random.shuffle(tokens) + text = text.replace(placeholder_token, " ".join(tokens)) + return text + + def replace_text_with_placeholder_tokens(self, text: Union[str, List[str]]) -> Union[str, List[str]]: + """Replace the placeholder tokens in text with the original keywords. + This function will be called in `self.decode`. + + Args: + text (Union[str, List[str]]): The text to be processed. + + Returns: + Union[str, List[str]]: The processed text. + """ + if isinstance(text, list): + output = [] + for i in range(len(text)): + output.append(self.replace_text_with_placeholder_tokens(text[i])) + return output + + for placeholder_token, tokens in self.token_map.items(): + merged_tokens = " ".join(tokens) + if merged_tokens in text: + text = text.replace(merged_tokens, placeholder_token) + return text + + def __call__( + self, + text: Union[str, List[str]], + *args, + vector_shuffle: bool = False, + prop_tokens_to_load: float = 1.0, + **kwargs, + ): + """The call function of the wrapper. + + Args: + text (Union[str, List[str]]): The text to be tokenized. + vector_shuffle (bool, optional): Whether to shuffle the vectors. + Defaults to False. + prop_tokens_to_load (float, optional): The proportion of tokens to + be loaded. If 1.0, all tokens will be loaded. Defaults to 1.0 + *args, **kwargs: The arguments for `self.wrapped.__call__`. + """ + replaced_text = self.replace_placeholder_tokens_in_text( + text, vector_shuffle=vector_shuffle, prop_tokens_to_load=prop_tokens_to_load + ) + + return self.wrapped.__call__(replaced_text, *args, **kwargs) + + def encode(self, text: Union[str, List[str]], *args, **kwargs): + """Encode the passed text to token index. + + Args: + text (Union[str, List[str]]): The text to be encode. + *args, **kwargs: The arguments for `self.wrapped.__call__`. + """ + replaced_text = self.replace_placeholder_tokens_in_text(text) + return self.wrapped(replaced_text, *args, **kwargs) + + def decode(self, token_ids, return_raw: bool = False, *args, **kwargs) -> Union[str, List[str]]: + """Decode the token index to text. + + Args: + token_ids: The token index to be decoded. + return_raw: Whether keep the placeholder token in the text. + Defaults to False. + *args, **kwargs: The arguments for `self.wrapped.decode`. + + Returns: + Union[str, List[str]]: The decoded text. + """ + text = self.wrapped.decode(token_ids, *args, **kwargs) + if return_raw: + return text + replaced_text = self.replace_text_with_placeholder_tokens(text) + return replaced_text + + def __repr__(self): + """The representation of the wrapper.""" + s = super().__repr__() + prefix = f"Wrapped Module Class: {self._module_cls}\n" + prefix += f"Wrapped Module Name: {self._module_name}\n" + if self._from_pretrained: + prefix += f"From Pretrained: {self._from_pretrained}\n" + s = prefix + s + return s + + +class EmbeddingLayerWithFixes(nn.Module): + """The revised embedding layer to support external embeddings. This design + of this class is inspired by https://github.com/AUTOMATIC1111/stable- + diffusion-webui/blob/22bcc7be428c94e9408f589966c2040187245d81/modules/sd_hi + jack.py#L224 # noqa. + + Args: + wrapped (nn.Emebdding): The embedding layer to be wrapped. + external_embeddings (Union[dict, List[dict]], optional): The external + embeddings added to this layer. Defaults to None. + """ + + def __init__(self, wrapped: nn.Embedding, external_embeddings: Optional[Union[dict, List[dict]]] = None): + super().__init__() + self.wrapped = wrapped + self.num_embeddings = wrapped.weight.shape[0] + + self.external_embeddings = [] + if external_embeddings: + self.add_embeddings(external_embeddings) + + self.trainable_embeddings = nn.ParameterDict() + + @property + def weight(self): + """Get the weight of wrapped embedding layer.""" + return self.wrapped.weight + + def check_duplicate_names(self, embeddings: List[dict]): + """Check whether duplicate names exist in list of 'external + embeddings'. + + Args: + embeddings (List[dict]): A list of embedding to be check. + """ + names = [emb["name"] for emb in embeddings] + assert len(names) == len(set(names)), ( + "Found duplicated names in 'external_embeddings'. Name list: " f"'{names}'" + ) + + def check_ids_overlap(self, embeddings): + """Check whether overlap exist in token ids of 'external_embeddings'. + + Args: + embeddings (List[dict]): A list of embedding to be check. + """ + ids_range = [[emb["start"], emb["end"], emb["name"]] for emb in embeddings] + ids_range.sort() # sort by 'start' + # check if 'end' has overlapping + for idx in range(len(ids_range) - 1): + name1, name2 = ids_range[idx][-1], ids_range[idx + 1][-1] + assert ids_range[idx][1] <= ids_range[idx + 1][0], ( + f"Found ids overlapping between embeddings '{name1}' " f"and '{name2}'." + ) + + def add_embeddings(self, embeddings: Optional[Union[dict, List[dict]]]): + """Add external embeddings to this layer. + Use case: + Args: + embeddings (Union[dict, list[dict]]): The external embeddings to + be added. Each dict must contain the following 4 fields: 'name' + (the name of this embedding), 'embedding' (the embedding + tensor), 'start' (the start token id of this embedding), 'end' + (the end token id of this embedding). For example: + `{name: NAME, start: START, end: END, embedding: torch.Tensor}` + """ + if isinstance(embeddings, dict): + embeddings = [embeddings] + + self.external_embeddings += embeddings + self.check_duplicate_names(self.external_embeddings) + self.check_ids_overlap(self.external_embeddings) + + # set for trainable + added_trainable_emb_info = [] + for embedding in embeddings: + trainable = embedding.get("trainable", False) + if trainable: + name = embedding["name"] + embedding["embedding"] = torch.nn.Parameter(embedding["embedding"]) + self.trainable_embeddings[name] = embedding["embedding"] + added_trainable_emb_info.append(name) + + added_emb_info = [emb["name"] for emb in embeddings] + added_emb_info = ", ".join(added_emb_info) + print(f"Successfully add external embeddings: {added_emb_info}.", "current") + + if added_trainable_emb_info: + added_trainable_emb_info = ", ".join(added_trainable_emb_info) + print("Successfully add trainable external embeddings: " f"{added_trainable_emb_info}", "current") + + def replace_input_ids(self, input_ids: torch.Tensor) -> torch.Tensor: + """Replace external input ids to 0. + + Args: + input_ids (torch.Tensor): The input ids to be replaced. + + Returns: + torch.Tensor: The replaced input ids. + """ + input_ids_fwd = input_ids.clone() + input_ids_fwd[input_ids_fwd >= self.num_embeddings] = 0 + return input_ids_fwd + + def replace_embeddings( + self, input_ids: torch.Tensor, embedding: torch.Tensor, external_embedding: dict + ) -> torch.Tensor: + """Replace external embedding to the embedding layer. Noted that, in + this function we use `torch.cat` to avoid inplace modification. + + Args: + input_ids (torch.Tensor): The original token ids. Shape like + [LENGTH, ]. + embedding (torch.Tensor): The embedding of token ids after + `replace_input_ids` function. + external_embedding (dict): The external embedding to be replaced. + + Returns: + torch.Tensor: The replaced embedding. + """ + new_embedding = [] + + name = external_embedding["name"] + start = external_embedding["start"] + end = external_embedding["end"] + target_ids_to_replace = [i for i in range(start, end)] + ext_emb = external_embedding["embedding"].to(embedding.device) + + # do not need to replace + if not (input_ids == start).any(): + return embedding + + # start replace + s_idx, e_idx = 0, 0 + while e_idx < len(input_ids): + if input_ids[e_idx] == start: + if e_idx != 0: + # add embedding do not need to replace + new_embedding.append(embedding[s_idx:e_idx]) + + # check if the next embedding need to replace is valid + actually_ids_to_replace = [int(i) for i in input_ids[e_idx: e_idx + end - start]] + assert actually_ids_to_replace == target_ids_to_replace, ( + f"Invalid 'input_ids' in position: {s_idx} to {e_idx}. " + f"Expect '{target_ids_to_replace}' for embedding " + f"'{name}' but found '{actually_ids_to_replace}'." + ) + + new_embedding.append(ext_emb) + + s_idx = e_idx + end - start + e_idx = s_idx + 1 + else: + e_idx += 1 + + if e_idx == len(input_ids): + new_embedding.append(embedding[s_idx:e_idx]) + + return torch.cat(new_embedding, dim=0) + + def forward(self, input_ids: torch.Tensor, external_embeddings: Optional[List[dict]] = None, out_dtype = None): + """The forward function. + + Args: + input_ids (torch.Tensor): The token ids shape like [bz, LENGTH] or + [LENGTH, ]. + external_embeddings (Optional[List[dict]]): The external + embeddings. If not passed, only `self.external_embeddings` + will be used. Defaults to None. + + input_ids: shape like [bz, LENGTH] or [LENGTH]. + """ + assert input_ids.ndim in [1, 2] + if input_ids.ndim == 1: + input_ids = input_ids.unsqueeze(0) + + if external_embeddings is None and not self.external_embeddings: + return self.wrapped(input_ids, out_dtype=out_dtype) + + input_ids_fwd = self.replace_input_ids(input_ids) + inputs_embeds = self.wrapped(input_ids_fwd) + + vecs = [] + + if external_embeddings is None: + external_embeddings = [] + elif isinstance(external_embeddings, dict): + external_embeddings = [external_embeddings] + embeddings = self.external_embeddings + external_embeddings + + for input_id, embedding in zip(input_ids, inputs_embeds): + new_embedding = embedding + for external_embedding in embeddings: + new_embedding = self.replace_embeddings(input_id, new_embedding, external_embedding) + vecs.append(new_embedding) + + return torch.stack(vecs).to(out_dtype) + + +def add_tokens( + tokenizer, text_encoder, placeholder_tokens: list, initialize_tokens: list = None, + num_vectors_per_token: int = 1 +): + """Add token for training. + + # TODO: support add tokens as dict, then we can load pretrained tokens. + """ + if initialize_tokens is not None: + assert len(initialize_tokens) == len( + placeholder_tokens + ), "placeholder_token should be the same length as initialize_token" + for ii in range(len(placeholder_tokens)): + tokenizer.add_placeholder_token(placeholder_tokens[ii], num_vec_per_token=num_vectors_per_token) + + # text_encoder.set_embedding_layer() + embedding_layer = text_encoder.text_model.embeddings.token_embedding + text_encoder.text_model.embeddings.token_embedding = EmbeddingLayerWithFixes(embedding_layer) + embedding_layer = text_encoder.text_model.embeddings.token_embedding + + assert embedding_layer is not None, ( + "Do not support get embedding layer for current text encoder. " "Please check your configuration." + ) + initialize_embedding = [] + if initialize_tokens is not None: + for ii in range(len(placeholder_tokens)): + init_id = tokenizer(initialize_tokens[ii]).input_ids[1] + temp_embedding = embedding_layer.weight[init_id] + initialize_embedding.append(temp_embedding[None, ...].repeat(num_vectors_per_token, 1)) + else: + for ii in range(len(placeholder_tokens)): + init_id = tokenizer("a").input_ids[1] + temp_embedding = embedding_layer.weight[init_id] + len_emb = temp_embedding.shape[0] + init_weight = (torch.rand(num_vectors_per_token, len_emb) - 0.5) / 2.0 + initialize_embedding.append(init_weight) + + # initialize_embedding = torch.cat(initialize_embedding,dim=0) + + token_info_all = [] + for ii in range(len(placeholder_tokens)): + token_info = tokenizer.get_token_info(placeholder_tokens[ii]) + token_info["embedding"] = initialize_embedding[ii] + token_info["trainable"] = True + token_info_all.append(token_info) + embedding_layer.add_embeddings(token_info_all) diff --git a/ComfyUI-Easy-Use/py/brushnet/unet_2d_blocks.py b/ComfyUI-Easy-Use/py/brushnet/unet_2d_blocks.py new file mode 100644 index 0000000000000000000000000000000000000000..e332bb9c2b1e4ae3c0fab577cab14c111a806c19 --- /dev/null +++ b/ComfyUI-Easy-Use/py/brushnet/unet_2d_blocks.py @@ -0,0 +1,3908 @@ +# Copyright 2024 The HuggingFace Team. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from typing import Any, Dict, Optional, Tuple, Union + +import numpy as np +import torch +import torch.nn.functional as F +from torch import nn + +from diffusers.utils import deprecate, is_torch_version, logging +from diffusers.utils.torch_utils import apply_freeu +from diffusers.models.activations import get_activation +from diffusers.models.attention_processor import Attention, AttnAddedKVProcessor, AttnAddedKVProcessor2_0 +from diffusers.models.normalization import AdaGroupNorm +from diffusers.models.resnet import ( + Downsample2D, + FirDownsample2D, + FirUpsample2D, + KDownsample2D, + KUpsample2D, + ResnetBlock2D, + ResnetBlockCondNorm2D, + Upsample2D, +) +from diffusers.models.transformers.dual_transformer_2d import DualTransformer2DModel +from diffusers.models.transformers.transformer_2d import Transformer2DModel + +logger = logging.get_logger(__name__) # pylint: disable=invalid-name + + +def get_down_block( + down_block_type: str, + num_layers: int, + in_channels: int, + out_channels: int, + temb_channels: int, + add_downsample: bool, + resnet_eps: float, + resnet_act_fn: str, + transformer_layers_per_block: int = 1, + num_attention_heads: Optional[int] = None, + resnet_groups: Optional[int] = None, + cross_attention_dim: Optional[int] = None, + downsample_padding: Optional[int] = None, + dual_cross_attention: bool = False, + use_linear_projection: bool = False, + only_cross_attention: bool = False, + upcast_attention: bool = False, + resnet_time_scale_shift: str = "default", + attention_type: str = "default", + resnet_skip_time_act: bool = False, + resnet_out_scale_factor: float = 1.0, + cross_attention_norm: Optional[str] = None, + attention_head_dim: Optional[int] = None, + downsample_type: Optional[str] = None, + dropout: float = 0.0, +): + # If attn head dim is not defined, we default it to the number of heads + if attention_head_dim is None: + logger.warning( + f"It is recommended to provide `attention_head_dim` when calling `get_down_block`. Defaulting `attention_head_dim` to {num_attention_heads}." + ) + attention_head_dim = num_attention_heads + + down_block_type = down_block_type[7:] if down_block_type.startswith("UNetRes") else down_block_type + if down_block_type == "DownBlock2D": + return DownBlock2D( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + dropout=dropout, + add_downsample=add_downsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + resnet_groups=resnet_groups, + downsample_padding=downsample_padding, + resnet_time_scale_shift=resnet_time_scale_shift, + ) + elif down_block_type == "ResnetDownsampleBlock2D": + return ResnetDownsampleBlock2D( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + dropout=dropout, + add_downsample=add_downsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + resnet_groups=resnet_groups, + resnet_time_scale_shift=resnet_time_scale_shift, + skip_time_act=resnet_skip_time_act, + output_scale_factor=resnet_out_scale_factor, + ) + elif down_block_type == "AttnDownBlock2D": + if add_downsample is False: + downsample_type = None + else: + downsample_type = downsample_type or "conv" # default to 'conv' + return AttnDownBlock2D( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + dropout=dropout, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + resnet_groups=resnet_groups, + downsample_padding=downsample_padding, + attention_head_dim=attention_head_dim, + resnet_time_scale_shift=resnet_time_scale_shift, + downsample_type=downsample_type, + ) + elif down_block_type == "CrossAttnDownBlock2D": + if cross_attention_dim is None: + raise ValueError("cross_attention_dim must be specified for CrossAttnDownBlock2D") + return CrossAttnDownBlock2D( + num_layers=num_layers, + transformer_layers_per_block=transformer_layers_per_block, + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + dropout=dropout, + add_downsample=add_downsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + resnet_groups=resnet_groups, + downsample_padding=downsample_padding, + cross_attention_dim=cross_attention_dim, + num_attention_heads=num_attention_heads, + dual_cross_attention=dual_cross_attention, + use_linear_projection=use_linear_projection, + only_cross_attention=only_cross_attention, + upcast_attention=upcast_attention, + resnet_time_scale_shift=resnet_time_scale_shift, + attention_type=attention_type, + ) + elif down_block_type == "SimpleCrossAttnDownBlock2D": + if cross_attention_dim is None: + raise ValueError("cross_attention_dim must be specified for SimpleCrossAttnDownBlock2D") + return SimpleCrossAttnDownBlock2D( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + dropout=dropout, + add_downsample=add_downsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + resnet_groups=resnet_groups, + cross_attention_dim=cross_attention_dim, + attention_head_dim=attention_head_dim, + resnet_time_scale_shift=resnet_time_scale_shift, + skip_time_act=resnet_skip_time_act, + output_scale_factor=resnet_out_scale_factor, + only_cross_attention=only_cross_attention, + cross_attention_norm=cross_attention_norm, + ) + elif down_block_type == "SkipDownBlock2D": + return SkipDownBlock2D( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + dropout=dropout, + add_downsample=add_downsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + downsample_padding=downsample_padding, + resnet_time_scale_shift=resnet_time_scale_shift, + ) + elif down_block_type == "AttnSkipDownBlock2D": + return AttnSkipDownBlock2D( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + dropout=dropout, + add_downsample=add_downsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + attention_head_dim=attention_head_dim, + resnet_time_scale_shift=resnet_time_scale_shift, + ) + elif down_block_type == "DownEncoderBlock2D": + return DownEncoderBlock2D( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + dropout=dropout, + add_downsample=add_downsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + resnet_groups=resnet_groups, + downsample_padding=downsample_padding, + resnet_time_scale_shift=resnet_time_scale_shift, + ) + elif down_block_type == "AttnDownEncoderBlock2D": + return AttnDownEncoderBlock2D( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + dropout=dropout, + add_downsample=add_downsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + resnet_groups=resnet_groups, + downsample_padding=downsample_padding, + attention_head_dim=attention_head_dim, + resnet_time_scale_shift=resnet_time_scale_shift, + ) + elif down_block_type == "KDownBlock2D": + return KDownBlock2D( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + dropout=dropout, + add_downsample=add_downsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + ) + elif down_block_type == "KCrossAttnDownBlock2D": + return KCrossAttnDownBlock2D( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + dropout=dropout, + add_downsample=add_downsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + cross_attention_dim=cross_attention_dim, + attention_head_dim=attention_head_dim, + add_self_attention=True if not add_downsample else False, + ) + raise ValueError(f"{down_block_type} does not exist.") + + +def get_mid_block( + mid_block_type: str, + temb_channels: int, + in_channels: int, + resnet_eps: float, + resnet_act_fn: str, + resnet_groups: int, + output_scale_factor: float = 1.0, + transformer_layers_per_block: int = 1, + num_attention_heads: Optional[int] = None, + cross_attention_dim: Optional[int] = None, + dual_cross_attention: bool = False, + use_linear_projection: bool = False, + mid_block_only_cross_attention: bool = False, + upcast_attention: bool = False, + resnet_time_scale_shift: str = "default", + attention_type: str = "default", + resnet_skip_time_act: bool = False, + cross_attention_norm: Optional[str] = None, + attention_head_dim: Optional[int] = 1, + dropout: float = 0.0, +): + if mid_block_type == "UNetMidBlock2DCrossAttn": + return UNetMidBlock2DCrossAttn( + transformer_layers_per_block=transformer_layers_per_block, + in_channels=in_channels, + temb_channels=temb_channels, + dropout=dropout, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + output_scale_factor=output_scale_factor, + resnet_time_scale_shift=resnet_time_scale_shift, + cross_attention_dim=cross_attention_dim, + num_attention_heads=num_attention_heads, + resnet_groups=resnet_groups, + dual_cross_attention=dual_cross_attention, + use_linear_projection=use_linear_projection, + upcast_attention=upcast_attention, + attention_type=attention_type, + ) + elif mid_block_type == "UNetMidBlock2DSimpleCrossAttn": + return UNetMidBlock2DSimpleCrossAttn( + in_channels=in_channels, + temb_channels=temb_channels, + dropout=dropout, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + output_scale_factor=output_scale_factor, + cross_attention_dim=cross_attention_dim, + attention_head_dim=attention_head_dim, + resnet_groups=resnet_groups, + resnet_time_scale_shift=resnet_time_scale_shift, + skip_time_act=resnet_skip_time_act, + only_cross_attention=mid_block_only_cross_attention, + cross_attention_norm=cross_attention_norm, + ) + elif mid_block_type == "UNetMidBlock2D": + return UNetMidBlock2D( + in_channels=in_channels, + temb_channels=temb_channels, + dropout=dropout, + num_layers=0, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + output_scale_factor=output_scale_factor, + resnet_groups=resnet_groups, + resnet_time_scale_shift=resnet_time_scale_shift, + add_attention=False, + ) + elif mid_block_type == "MidBlock2D": + return MidBlock2D( + in_channels=in_channels, + temb_channels=temb_channels, + dropout=dropout, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + output_scale_factor=output_scale_factor, + resnet_time_scale_shift=resnet_time_scale_shift, + resnet_groups=resnet_groups, + use_linear_projection=use_linear_projection, + ) + elif mid_block_type is None: + return None + else: + raise ValueError(f"unknown mid_block_type : {mid_block_type}") + + +def get_up_block( + up_block_type: str, + num_layers: int, + in_channels: int, + out_channels: int, + prev_output_channel: int, + temb_channels: int, + add_upsample: bool, + resnet_eps: float, + resnet_act_fn: str, + resolution_idx: Optional[int] = None, + transformer_layers_per_block: int = 1, + num_attention_heads: Optional[int] = None, + resnet_groups: Optional[int] = None, + cross_attention_dim: Optional[int] = None, + dual_cross_attention: bool = False, + use_linear_projection: bool = False, + only_cross_attention: bool = False, + upcast_attention: bool = False, + resnet_time_scale_shift: str = "default", + attention_type: str = "default", + resnet_skip_time_act: bool = False, + resnet_out_scale_factor: float = 1.0, + cross_attention_norm: Optional[str] = None, + attention_head_dim: Optional[int] = None, + upsample_type: Optional[str] = None, + dropout: float = 0.0, +) -> nn.Module: + # If attn head dim is not defined, we default it to the number of heads + if attention_head_dim is None: + logger.warning( + f"It is recommended to provide `attention_head_dim` when calling `get_up_block`. Defaulting `attention_head_dim` to {num_attention_heads}." + ) + attention_head_dim = num_attention_heads + + up_block_type = up_block_type[7:] if up_block_type.startswith("UNetRes") else up_block_type + if up_block_type == "UpBlock2D": + return UpBlock2D( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + prev_output_channel=prev_output_channel, + temb_channels=temb_channels, + resolution_idx=resolution_idx, + dropout=dropout, + add_upsample=add_upsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + resnet_groups=resnet_groups, + resnet_time_scale_shift=resnet_time_scale_shift, + ) + elif up_block_type == "ResnetUpsampleBlock2D": + return ResnetUpsampleBlock2D( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + prev_output_channel=prev_output_channel, + temb_channels=temb_channels, + resolution_idx=resolution_idx, + dropout=dropout, + add_upsample=add_upsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + resnet_groups=resnet_groups, + resnet_time_scale_shift=resnet_time_scale_shift, + skip_time_act=resnet_skip_time_act, + output_scale_factor=resnet_out_scale_factor, + ) + elif up_block_type == "CrossAttnUpBlock2D": + if cross_attention_dim is None: + raise ValueError("cross_attention_dim must be specified for CrossAttnUpBlock2D") + return CrossAttnUpBlock2D( + num_layers=num_layers, + transformer_layers_per_block=transformer_layers_per_block, + in_channels=in_channels, + out_channels=out_channels, + prev_output_channel=prev_output_channel, + temb_channels=temb_channels, + resolution_idx=resolution_idx, + dropout=dropout, + add_upsample=add_upsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + resnet_groups=resnet_groups, + cross_attention_dim=cross_attention_dim, + num_attention_heads=num_attention_heads, + dual_cross_attention=dual_cross_attention, + use_linear_projection=use_linear_projection, + only_cross_attention=only_cross_attention, + upcast_attention=upcast_attention, + resnet_time_scale_shift=resnet_time_scale_shift, + attention_type=attention_type, + ) + elif up_block_type == "SimpleCrossAttnUpBlock2D": + if cross_attention_dim is None: + raise ValueError("cross_attention_dim must be specified for SimpleCrossAttnUpBlock2D") + return SimpleCrossAttnUpBlock2D( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + prev_output_channel=prev_output_channel, + temb_channels=temb_channels, + resolution_idx=resolution_idx, + dropout=dropout, + add_upsample=add_upsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + resnet_groups=resnet_groups, + cross_attention_dim=cross_attention_dim, + attention_head_dim=attention_head_dim, + resnet_time_scale_shift=resnet_time_scale_shift, + skip_time_act=resnet_skip_time_act, + output_scale_factor=resnet_out_scale_factor, + only_cross_attention=only_cross_attention, + cross_attention_norm=cross_attention_norm, + ) + elif up_block_type == "AttnUpBlock2D": + if add_upsample is False: + upsample_type = None + else: + upsample_type = upsample_type or "conv" # default to 'conv' + + return AttnUpBlock2D( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + prev_output_channel=prev_output_channel, + temb_channels=temb_channels, + resolution_idx=resolution_idx, + dropout=dropout, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + resnet_groups=resnet_groups, + attention_head_dim=attention_head_dim, + resnet_time_scale_shift=resnet_time_scale_shift, + upsample_type=upsample_type, + ) + elif up_block_type == "SkipUpBlock2D": + return SkipUpBlock2D( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + prev_output_channel=prev_output_channel, + temb_channels=temb_channels, + resolution_idx=resolution_idx, + dropout=dropout, + add_upsample=add_upsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + resnet_time_scale_shift=resnet_time_scale_shift, + ) + elif up_block_type == "AttnSkipUpBlock2D": + return AttnSkipUpBlock2D( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + prev_output_channel=prev_output_channel, + temb_channels=temb_channels, + resolution_idx=resolution_idx, + dropout=dropout, + add_upsample=add_upsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + attention_head_dim=attention_head_dim, + resnet_time_scale_shift=resnet_time_scale_shift, + ) + elif up_block_type == "UpDecoderBlock2D": + return UpDecoderBlock2D( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + resolution_idx=resolution_idx, + dropout=dropout, + add_upsample=add_upsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + resnet_groups=resnet_groups, + resnet_time_scale_shift=resnet_time_scale_shift, + temb_channels=temb_channels, + ) + elif up_block_type == "AttnUpDecoderBlock2D": + return AttnUpDecoderBlock2D( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + resolution_idx=resolution_idx, + dropout=dropout, + add_upsample=add_upsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + resnet_groups=resnet_groups, + attention_head_dim=attention_head_dim, + resnet_time_scale_shift=resnet_time_scale_shift, + temb_channels=temb_channels, + ) + elif up_block_type == "KUpBlock2D": + return KUpBlock2D( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + resolution_idx=resolution_idx, + dropout=dropout, + add_upsample=add_upsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + ) + elif up_block_type == "KCrossAttnUpBlock2D": + return KCrossAttnUpBlock2D( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + resolution_idx=resolution_idx, + dropout=dropout, + add_upsample=add_upsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + cross_attention_dim=cross_attention_dim, + attention_head_dim=attention_head_dim, + ) + + raise ValueError(f"{up_block_type} does not exist.") + + +class AutoencoderTinyBlock(nn.Module): + """ + Tiny Autoencoder block used in [`AutoencoderTiny`]. It is a mini residual module consisting of plain conv + ReLU + blocks. + + Args: + in_channels (`int`): The number of input channels. + out_channels (`int`): The number of output channels. + act_fn (`str`): + ` The activation function to use. Supported values are `"swish"`, `"mish"`, `"gelu"`, and `"relu"`. + + Returns: + `torch.FloatTensor`: A tensor with the same shape as the input tensor, but with the number of channels equal to + `out_channels`. + """ + + def __init__(self, in_channels: int, out_channels: int, act_fn: str): + super().__init__() + act_fn = get_activation(act_fn) + self.conv = nn.Sequential( + nn.Conv2d(in_channels, out_channels, kernel_size=3, padding=1), + act_fn, + nn.Conv2d(out_channels, out_channels, kernel_size=3, padding=1), + act_fn, + nn.Conv2d(out_channels, out_channels, kernel_size=3, padding=1), + ) + self.skip = ( + nn.Conv2d(in_channels, out_channels, kernel_size=1, bias=False) + if in_channels != out_channels + else nn.Identity() + ) + self.fuse = nn.ReLU() + + def forward(self, x: torch.FloatTensor) -> torch.FloatTensor: + return self.fuse(self.conv(x) + self.skip(x)) + + +class UNetMidBlock2D(nn.Module): + """ + A 2D UNet mid-block [`UNetMidBlock2D`] with multiple residual blocks and optional attention blocks. + + Args: + in_channels (`int`): The number of input channels. + temb_channels (`int`): The number of temporal embedding channels. + dropout (`float`, *optional*, defaults to 0.0): The dropout rate. + num_layers (`int`, *optional*, defaults to 1): The number of residual blocks. + resnet_eps (`float`, *optional*, 1e-6 ): The epsilon value for the resnet blocks. + resnet_time_scale_shift (`str`, *optional*, defaults to `default`): + The type of normalization to apply to the time embeddings. This can help to improve the performance of the + model on tasks with long-range temporal dependencies. + resnet_act_fn (`str`, *optional*, defaults to `swish`): The activation function for the resnet blocks. + resnet_groups (`int`, *optional*, defaults to 32): + The number of groups to use in the group normalization layers of the resnet blocks. + attn_groups (`Optional[int]`, *optional*, defaults to None): The number of groups for the attention blocks. + resnet_pre_norm (`bool`, *optional*, defaults to `True`): + Whether to use pre-normalization for the resnet blocks. + add_attention (`bool`, *optional*, defaults to `True`): Whether to add attention blocks. + attention_head_dim (`int`, *optional*, defaults to 1): + Dimension of a single attention head. The number of attention heads is determined based on this value and + the number of input channels. + output_scale_factor (`float`, *optional*, defaults to 1.0): The output scale factor. + + Returns: + `torch.FloatTensor`: The output of the last residual block, which is a tensor of shape `(batch_size, + in_channels, height, width)`. + + """ + + def __init__( + self, + in_channels: int, + temb_channels: int, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", # default, spatial + resnet_act_fn: str = "swish", + resnet_groups: int = 32, + attn_groups: Optional[int] = None, + resnet_pre_norm: bool = True, + add_attention: bool = True, + attention_head_dim: int = 1, + output_scale_factor: float = 1.0, + ): + super().__init__() + resnet_groups = resnet_groups if resnet_groups is not None else min(in_channels // 4, 32) + self.add_attention = add_attention + + if attn_groups is None: + attn_groups = resnet_groups if resnet_time_scale_shift == "default" else None + + # there is always at least one resnet + if resnet_time_scale_shift == "spatial": + resnets = [ + ResnetBlockCondNorm2D( + in_channels=in_channels, + out_channels=in_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm="spatial", + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + ) + ] + else: + resnets = [ + ResnetBlock2D( + in_channels=in_channels, + out_channels=in_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ] + attentions = [] + + if attention_head_dim is None: + logger.warning( + f"It is not recommend to pass `attention_head_dim=None`. Defaulting `attention_head_dim` to `in_channels`: {in_channels}." + ) + attention_head_dim = in_channels + + for _ in range(num_layers): + if self.add_attention: + attentions.append( + Attention( + in_channels, + heads=in_channels // attention_head_dim, + dim_head=attention_head_dim, + rescale_output_factor=output_scale_factor, + eps=resnet_eps, + norm_num_groups=attn_groups, + spatial_norm_dim=temb_channels if resnet_time_scale_shift == "spatial" else None, + residual_connection=True, + bias=True, + upcast_softmax=True, + _from_deprecated_attn_block=True, + ) + ) + else: + attentions.append(None) + + if resnet_time_scale_shift == "spatial": + resnets.append( + ResnetBlockCondNorm2D( + in_channels=in_channels, + out_channels=in_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm="spatial", + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + ) + ) + else: + resnets.append( + ResnetBlock2D( + in_channels=in_channels, + out_channels=in_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ) + + self.attentions = nn.ModuleList(attentions) + self.resnets = nn.ModuleList(resnets) + + def forward(self, hidden_states: torch.FloatTensor, temb: Optional[torch.FloatTensor] = None) -> torch.FloatTensor: + hidden_states = self.resnets[0](hidden_states, temb) + for attn, resnet in zip(self.attentions, self.resnets[1:]): + if attn is not None: + hidden_states = attn(hidden_states, temb=temb) + hidden_states = resnet(hidden_states, temb) + + return hidden_states + + +class UNetMidBlock2DCrossAttn(nn.Module): + def __init__( + self, + in_channels: int, + temb_channels: int, + dropout: float = 0.0, + num_layers: int = 1, + transformer_layers_per_block: Union[int, Tuple[int]] = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + num_attention_heads: int = 1, + output_scale_factor: float = 1.0, + cross_attention_dim: int = 1280, + dual_cross_attention: bool = False, + use_linear_projection: bool = False, + upcast_attention: bool = False, + attention_type: str = "default", + ): + super().__init__() + + self.has_cross_attention = True + self.num_attention_heads = num_attention_heads + resnet_groups = resnet_groups if resnet_groups is not None else min(in_channels // 4, 32) + + # support for variable transformer layers per block + if isinstance(transformer_layers_per_block, int): + transformer_layers_per_block = [transformer_layers_per_block] * num_layers + + # there is always at least one resnet + resnets = [ + ResnetBlock2D( + in_channels=in_channels, + out_channels=in_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ] + attentions = [] + + for i in range(num_layers): + if not dual_cross_attention: + attentions.append( + Transformer2DModel( + num_attention_heads, + in_channels // num_attention_heads, + in_channels=in_channels, + num_layers=transformer_layers_per_block[i], + cross_attention_dim=cross_attention_dim, + norm_num_groups=resnet_groups, + use_linear_projection=use_linear_projection, + upcast_attention=upcast_attention, + attention_type=attention_type, + ) + ) + else: + attentions.append( + DualTransformer2DModel( + num_attention_heads, + in_channels // num_attention_heads, + in_channels=in_channels, + num_layers=1, + cross_attention_dim=cross_attention_dim, + norm_num_groups=resnet_groups, + ) + ) + resnets.append( + ResnetBlock2D( + in_channels=in_channels, + out_channels=in_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ) + + self.attentions = nn.ModuleList(attentions) + self.resnets = nn.ModuleList(resnets) + + self.gradient_checkpointing = False + + def forward( + self, + hidden_states: torch.FloatTensor, + temb: Optional[torch.FloatTensor] = None, + encoder_hidden_states: Optional[torch.FloatTensor] = None, + attention_mask: Optional[torch.FloatTensor] = None, + cross_attention_kwargs: Optional[Dict[str, Any]] = None, + encoder_attention_mask: Optional[torch.FloatTensor] = None, + ) -> torch.FloatTensor: + if cross_attention_kwargs is not None: + if cross_attention_kwargs.get("scale", None) is not None: + logger.warning("Passing `scale` to `cross_attention_kwargs` is deprecated. `scale` will be ignored.") + + hidden_states = self.resnets[0](hidden_states, temb) + for attn, resnet in zip(self.attentions, self.resnets[1:]): + if self.training and self.gradient_checkpointing: + + def create_custom_forward(module, return_dict=None): + def custom_forward(*inputs): + if return_dict is not None: + return module(*inputs, return_dict=return_dict) + else: + return module(*inputs) + + return custom_forward + + ckpt_kwargs: Dict[str, Any] = {"use_reentrant": False} if is_torch_version(">=", "1.11.0") else {} + hidden_states = attn( + hidden_states, + encoder_hidden_states=encoder_hidden_states, + cross_attention_kwargs=cross_attention_kwargs, + attention_mask=attention_mask, + encoder_attention_mask=encoder_attention_mask, + return_dict=False, + )[0] + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(resnet), + hidden_states, + temb, + **ckpt_kwargs, + ) + else: + hidden_states = attn( + hidden_states, + encoder_hidden_states=encoder_hidden_states, + cross_attention_kwargs=cross_attention_kwargs, + attention_mask=attention_mask, + encoder_attention_mask=encoder_attention_mask, + return_dict=False, + )[0] + hidden_states = resnet(hidden_states, temb) + + return hidden_states + + +class UNetMidBlock2DSimpleCrossAttn(nn.Module): + def __init__( + self, + in_channels: int, + temb_channels: int, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + attention_head_dim: int = 1, + output_scale_factor: float = 1.0, + cross_attention_dim: int = 1280, + skip_time_act: bool = False, + only_cross_attention: bool = False, + cross_attention_norm: Optional[str] = None, + ): + super().__init__() + + self.has_cross_attention = True + + self.attention_head_dim = attention_head_dim + resnet_groups = resnet_groups if resnet_groups is not None else min(in_channels // 4, 32) + + self.num_heads = in_channels // self.attention_head_dim + + # there is always at least one resnet + resnets = [ + ResnetBlock2D( + in_channels=in_channels, + out_channels=in_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + skip_time_act=skip_time_act, + ) + ] + attentions = [] + + for _ in range(num_layers): + processor = ( + AttnAddedKVProcessor2_0() if hasattr(F, "scaled_dot_product_attention") else AttnAddedKVProcessor() + ) + + attentions.append( + Attention( + query_dim=in_channels, + cross_attention_dim=in_channels, + heads=self.num_heads, + dim_head=self.attention_head_dim, + added_kv_proj_dim=cross_attention_dim, + norm_num_groups=resnet_groups, + bias=True, + upcast_softmax=True, + only_cross_attention=only_cross_attention, + cross_attention_norm=cross_attention_norm, + processor=processor, + ) + ) + resnets.append( + ResnetBlock2D( + in_channels=in_channels, + out_channels=in_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + skip_time_act=skip_time_act, + ) + ) + + self.attentions = nn.ModuleList(attentions) + self.resnets = nn.ModuleList(resnets) + + def forward( + self, + hidden_states: torch.FloatTensor, + temb: Optional[torch.FloatTensor] = None, + encoder_hidden_states: Optional[torch.FloatTensor] = None, + attention_mask: Optional[torch.FloatTensor] = None, + cross_attention_kwargs: Optional[Dict[str, Any]] = None, + encoder_attention_mask: Optional[torch.FloatTensor] = None, + ) -> torch.FloatTensor: + cross_attention_kwargs = cross_attention_kwargs if cross_attention_kwargs is not None else {} + if cross_attention_kwargs.get("scale", None) is not None: + logger.warning("Passing `scale` to `cross_attention_kwargs` is deprecated. `scale` will be ignored.") + + if attention_mask is None: + # if encoder_hidden_states is defined: we are doing cross-attn, so we should use cross-attn mask. + mask = None if encoder_hidden_states is None else encoder_attention_mask + else: + # when attention_mask is defined: we don't even check for encoder_attention_mask. + # this is to maintain compatibility with UnCLIP, which uses 'attention_mask' param for cross-attn masks. + # TODO: UnCLIP should express cross-attn mask via encoder_attention_mask param instead of via attention_mask. + # then we can simplify this whole if/else block to: + # mask = attention_mask if encoder_hidden_states is None else encoder_attention_mask + mask = attention_mask + + hidden_states = self.resnets[0](hidden_states, temb) + for attn, resnet in zip(self.attentions, self.resnets[1:]): + # attn + hidden_states = attn( + hidden_states, + encoder_hidden_states=encoder_hidden_states, + attention_mask=mask, + **cross_attention_kwargs, + ) + + # resnet + hidden_states = resnet(hidden_states, temb) + + return hidden_states + + +class MidBlock2D(nn.Module): + def __init__( + self, + in_channels: int, + temb_channels: int, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + output_scale_factor: float = 1.0, + use_linear_projection: bool = False, + ): + super().__init__() + + self.has_cross_attention = False + resnet_groups = resnet_groups if resnet_groups is not None else min(in_channels // 4, 32) + + # there is always at least one resnet + resnets = [ + ResnetBlock2D( + in_channels=in_channels, + out_channels=in_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ] + + for i in range(num_layers): + resnets.append( + ResnetBlock2D( + in_channels=in_channels, + out_channels=in_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ) + + self.resnets = nn.ModuleList(resnets) + + self.gradient_checkpointing = False + + def forward( + self, + hidden_states: torch.FloatTensor, + temb: Optional[torch.FloatTensor] = None, + ) -> torch.FloatTensor: + lora_scale = 1.0 + hidden_states = self.resnets[0](hidden_states, temb, scale=lora_scale) + for resnet in self.resnets[1:]: + if self.training and self.gradient_checkpointing: + + def create_custom_forward(module, return_dict=None): + def custom_forward(*inputs): + if return_dict is not None: + return module(*inputs, return_dict=return_dict) + else: + return module(*inputs) + + return custom_forward + + ckpt_kwargs: Dict[str, Any] = {"use_reentrant": False} if is_torch_version(">=", "1.11.0") else {} + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(resnet), + hidden_states, + temb, + **ckpt_kwargs, + ) + else: + hidden_states = resnet(hidden_states, temb, scale=lora_scale) + + return hidden_states + + +class AttnDownBlock2D(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + temb_channels: int, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + attention_head_dim: int = 1, + output_scale_factor: float = 1.0, + downsample_padding: int = 1, + downsample_type: str = "conv", + ): + super().__init__() + resnets = [] + attentions = [] + self.downsample_type = downsample_type + + if attention_head_dim is None: + logger.warning( + f"It is not recommend to pass `attention_head_dim=None`. Defaulting `attention_head_dim` to `in_channels`: {out_channels}." + ) + attention_head_dim = out_channels + + for i in range(num_layers): + in_channels = in_channels if i == 0 else out_channels + resnets.append( + ResnetBlock2D( + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ) + attentions.append( + Attention( + out_channels, + heads=out_channels // attention_head_dim, + dim_head=attention_head_dim, + rescale_output_factor=output_scale_factor, + eps=resnet_eps, + norm_num_groups=resnet_groups, + residual_connection=True, + bias=True, + upcast_softmax=True, + _from_deprecated_attn_block=True, + ) + ) + + self.attentions = nn.ModuleList(attentions) + self.resnets = nn.ModuleList(resnets) + + if downsample_type == "conv": + self.downsamplers = nn.ModuleList( + [ + Downsample2D( + out_channels, use_conv=True, out_channels=out_channels, padding=downsample_padding, name="op" + ) + ] + ) + elif downsample_type == "resnet": + self.downsamplers = nn.ModuleList( + [ + ResnetBlock2D( + in_channels=out_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + down=True, + ) + ] + ) + else: + self.downsamplers = None + + def forward( + self, + hidden_states: torch.FloatTensor, + temb: Optional[torch.FloatTensor] = None, + upsample_size: Optional[int] = None, + cross_attention_kwargs: Optional[Dict[str, Any]] = None, + ) -> Tuple[torch.FloatTensor, Tuple[torch.FloatTensor, ...]]: + cross_attention_kwargs = cross_attention_kwargs if cross_attention_kwargs is not None else {} + if cross_attention_kwargs.get("scale", None) is not None: + logger.warning("Passing `scale` to `cross_attention_kwargs` is deprecated. `scale` will be ignored.") + + output_states = () + + for resnet, attn in zip(self.resnets, self.attentions): + hidden_states = resnet(hidden_states, temb) + hidden_states = attn(hidden_states, **cross_attention_kwargs) + output_states = output_states + (hidden_states,) + + if self.downsamplers is not None: + for downsampler in self.downsamplers: + if self.downsample_type == "resnet": + hidden_states = downsampler(hidden_states, temb=temb) + else: + hidden_states = downsampler(hidden_states) + + output_states += (hidden_states,) + + return hidden_states, output_states + + +class CrossAttnDownBlock2D(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + temb_channels: int, + dropout: float = 0.0, + num_layers: int = 1, + transformer_layers_per_block: Union[int, Tuple[int]] = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + num_attention_heads: int = 1, + cross_attention_dim: int = 1280, + output_scale_factor: float = 1.0, + downsample_padding: int = 1, + add_downsample: bool = True, + dual_cross_attention: bool = False, + use_linear_projection: bool = False, + only_cross_attention: bool = False, + upcast_attention: bool = False, + attention_type: str = "default", + ): + super().__init__() + resnets = [] + attentions = [] + + self.has_cross_attention = True + self.num_attention_heads = num_attention_heads + if isinstance(transformer_layers_per_block, int): + transformer_layers_per_block = [transformer_layers_per_block] * num_layers + + for i in range(num_layers): + in_channels = in_channels if i == 0 else out_channels + resnets.append( + ResnetBlock2D( + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ) + if not dual_cross_attention: + attentions.append( + Transformer2DModel( + num_attention_heads, + out_channels // num_attention_heads, + in_channels=out_channels, + num_layers=transformer_layers_per_block[i], + cross_attention_dim=cross_attention_dim, + norm_num_groups=resnet_groups, + use_linear_projection=use_linear_projection, + only_cross_attention=only_cross_attention, + upcast_attention=upcast_attention, + attention_type=attention_type, + ) + ) + else: + attentions.append( + DualTransformer2DModel( + num_attention_heads, + out_channels // num_attention_heads, + in_channels=out_channels, + num_layers=1, + cross_attention_dim=cross_attention_dim, + norm_num_groups=resnet_groups, + ) + ) + self.attentions = nn.ModuleList(attentions) + self.resnets = nn.ModuleList(resnets) + + if add_downsample: + self.downsamplers = nn.ModuleList( + [ + Downsample2D( + out_channels, use_conv=True, out_channels=out_channels, padding=downsample_padding, name="op" + ) + ] + ) + else: + self.downsamplers = None + + self.gradient_checkpointing = False + + def forward( + self, + hidden_states: torch.FloatTensor, + temb: Optional[torch.FloatTensor] = None, + encoder_hidden_states: Optional[torch.FloatTensor] = None, + attention_mask: Optional[torch.FloatTensor] = None, + cross_attention_kwargs: Optional[Dict[str, Any]] = None, + encoder_attention_mask: Optional[torch.FloatTensor] = None, + additional_residuals: Optional[torch.FloatTensor] = None, + down_block_add_samples: Optional[torch.FloatTensor] = None, + debug=False, + ) -> Tuple[torch.FloatTensor, Tuple[torch.FloatTensor, ...]]: + + if debug: print(' XAD2: forward') + + if cross_attention_kwargs is not None: + if cross_attention_kwargs.get("scale", None) is not None: + logger.warning("Passing `scale` to `cross_attention_kwargs` is deprecated. `scale` will be ignored.") + + output_states = () + + blocks = list(zip(self.resnets, self.attentions)) + + for i, (resnet, attn) in enumerate(blocks): + if self.training and self.gradient_checkpointing: + + def create_custom_forward(module, return_dict=None): + def custom_forward(*inputs): + if return_dict is not None: + return module(*inputs, return_dict=return_dict) + else: + return module(*inputs) + + return custom_forward + + ckpt_kwargs: Dict[str, Any] = {"use_reentrant": False} if is_torch_version(">=", "1.11.0") else {} + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(resnet), + hidden_states, + temb, + **ckpt_kwargs, + ) + hidden_states = attn( + hidden_states, + encoder_hidden_states=encoder_hidden_states, + cross_attention_kwargs=cross_attention_kwargs, + attention_mask=attention_mask, + encoder_attention_mask=encoder_attention_mask, + return_dict=False, + )[0] + else: + if debug: print(' XAD2: resnet hs #', i, hidden_states.shape) + if debug and temb is not None: print(' XAD2: resnet temb #', i, temb.shape) + + hidden_states = resnet(hidden_states, temb) + + if debug: print(' XAD2: attn hs #', i, hidden_states.shape) + if debug and encoder_hidden_states is not None: print(' XAD2: attn ehs #', i, + encoder_hidden_states.shape) + + hidden_states = attn( + hidden_states, + encoder_hidden_states=encoder_hidden_states, + cross_attention_kwargs=cross_attention_kwargs, + attention_mask=attention_mask, + encoder_attention_mask=encoder_attention_mask, + return_dict=False, + )[0] + + # apply additional residuals to the output of the last pair of resnet and attention blocks + if i == len(blocks) - 1 and additional_residuals is not None: + + if debug: print(' XAD2: add res', additional_residuals.shape) + + hidden_states = hidden_states + additional_residuals + + if down_block_add_samples is not None: + + if debug: print(' XAD2: add samples', down_block_add_samples.shape) + + hidden_states = hidden_states + down_block_add_samples.pop(0) + + if debug: print(' XAD2: output', hidden_states.shape) + + output_states = output_states + (hidden_states,) + + if self.downsamplers is not None: + for downsampler in self.downsamplers: + hidden_states = downsampler(hidden_states) + + if down_block_add_samples is not None: + hidden_states = hidden_states + down_block_add_samples.pop(0) # todo: add before or after + + output_states = output_states + (hidden_states,) + + if debug: + print(' XAD2: finish') + for st in output_states: + print(' XAD2: ', st.shape) + + return hidden_states, output_states + + +class DownBlock2D(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + temb_channels: int, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + output_scale_factor: float = 1.0, + add_downsample: bool = True, + downsample_padding: int = 1, + ): + super().__init__() + resnets = [] + + for i in range(num_layers): + in_channels = in_channels if i == 0 else out_channels + resnets.append( + ResnetBlock2D( + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ) + + self.resnets = nn.ModuleList(resnets) + + if add_downsample: + self.downsamplers = nn.ModuleList( + [ + Downsample2D( + out_channels, use_conv=True, out_channels=out_channels, padding=downsample_padding, name="op" + ) + ] + ) + else: + self.downsamplers = None + + self.gradient_checkpointing = False + + def forward( + self, hidden_states: torch.FloatTensor, temb: Optional[torch.FloatTensor] = None, + down_block_add_samples: Optional[torch.FloatTensor] = None, *args, **kwargs + ) -> Tuple[torch.FloatTensor, Tuple[torch.FloatTensor, ...]]: + if len(args) > 0 or kwargs.get("scale", None) is not None: + deprecation_message = "The `scale` argument is deprecated and will be ignored. Please remove it, as passing it will raise an error in the future. `scale` should directly be passed while calling the underlying pipeline component i.e., via `cross_attention_kwargs`." + deprecate("scale", "1.0.0", deprecation_message) + + output_states = () + + if kwargs.get("debug", False): print(' D2: forward', hidden_states.shape) + + for resnet in self.resnets: + if self.training and self.gradient_checkpointing: + + def create_custom_forward(module): + def custom_forward(*inputs): + return module(*inputs) + + return custom_forward + + if is_torch_version(">=", "1.11.0"): + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(resnet), hidden_states, temb, use_reentrant=False + ) + else: + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(resnet), hidden_states, temb + ) + else: + + if kwargs.get("debug", False): print(' D2: resnet', hidden_states.shape) + + hidden_states = resnet(hidden_states, temb) + + if down_block_add_samples is not None: + hidden_states = hidden_states + down_block_add_samples.pop(0) + + output_states = output_states + (hidden_states,) + + if self.downsamplers is not None: + for downsampler in self.downsamplers: + hidden_states = downsampler(hidden_states) + + if down_block_add_samples is not None: + hidden_states = hidden_states + down_block_add_samples.pop(0) # todo: add before or after + + output_states = output_states + (hidden_states,) + + if kwargs.get("debug", False): print(' D2: finish', hidden_states.shape) + + return hidden_states, output_states + + +class DownEncoderBlock2D(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + output_scale_factor: float = 1.0, + add_downsample: bool = True, + downsample_padding: int = 1, + ): + super().__init__() + resnets = [] + + for i in range(num_layers): + in_channels = in_channels if i == 0 else out_channels + if resnet_time_scale_shift == "spatial": + resnets.append( + ResnetBlockCondNorm2D( + in_channels=in_channels, + out_channels=out_channels, + temb_channels=None, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm="spatial", + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + ) + ) + else: + resnets.append( + ResnetBlock2D( + in_channels=in_channels, + out_channels=out_channels, + temb_channels=None, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ) + + self.resnets = nn.ModuleList(resnets) + + if add_downsample: + self.downsamplers = nn.ModuleList( + [ + Downsample2D( + out_channels, use_conv=True, out_channels=out_channels, padding=downsample_padding, name="op" + ) + ] + ) + else: + self.downsamplers = None + + def forward(self, hidden_states: torch.FloatTensor, *args, **kwargs) -> torch.FloatTensor: + if len(args) > 0 or kwargs.get("scale", None) is not None: + deprecation_message = "The `scale` argument is deprecated and will be ignored. Please remove it, as passing it will raise an error in the future. `scale` should directly be passed while calling the underlying pipeline component i.e., via `cross_attention_kwargs`." + deprecate("scale", "1.0.0", deprecation_message) + + for resnet in self.resnets: + hidden_states = resnet(hidden_states, temb=None) + + if self.downsamplers is not None: + for downsampler in self.downsamplers: + hidden_states = downsampler(hidden_states) + + return hidden_states + + +class AttnDownEncoderBlock2D(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + attention_head_dim: int = 1, + output_scale_factor: float = 1.0, + add_downsample: bool = True, + downsample_padding: int = 1, + ): + super().__init__() + resnets = [] + attentions = [] + + if attention_head_dim is None: + logger.warning( + f"It is not recommend to pass `attention_head_dim=None`. Defaulting `attention_head_dim` to `in_channels`: {out_channels}." + ) + attention_head_dim = out_channels + + for i in range(num_layers): + in_channels = in_channels if i == 0 else out_channels + if resnet_time_scale_shift == "spatial": + resnets.append( + ResnetBlockCondNorm2D( + in_channels=in_channels, + out_channels=out_channels, + temb_channels=None, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm="spatial", + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + ) + ) + else: + resnets.append( + ResnetBlock2D( + in_channels=in_channels, + out_channels=out_channels, + temb_channels=None, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ) + attentions.append( + Attention( + out_channels, + heads=out_channels // attention_head_dim, + dim_head=attention_head_dim, + rescale_output_factor=output_scale_factor, + eps=resnet_eps, + norm_num_groups=resnet_groups, + residual_connection=True, + bias=True, + upcast_softmax=True, + _from_deprecated_attn_block=True, + ) + ) + + self.attentions = nn.ModuleList(attentions) + self.resnets = nn.ModuleList(resnets) + + if add_downsample: + self.downsamplers = nn.ModuleList( + [ + Downsample2D( + out_channels, use_conv=True, out_channels=out_channels, padding=downsample_padding, name="op" + ) + ] + ) + else: + self.downsamplers = None + + def forward(self, hidden_states: torch.FloatTensor, *args, **kwargs) -> torch.FloatTensor: + if len(args) > 0 or kwargs.get("scale", None) is not None: + deprecation_message = "The `scale` argument is deprecated and will be ignored. Please remove it, as passing it will raise an error in the future. `scale` should directly be passed while calling the underlying pipeline component i.e., via `cross_attention_kwargs`." + deprecate("scale", "1.0.0", deprecation_message) + + for resnet, attn in zip(self.resnets, self.attentions): + hidden_states = resnet(hidden_states, temb=None) + hidden_states = attn(hidden_states) + + if self.downsamplers is not None: + for downsampler in self.downsamplers: + hidden_states = downsampler(hidden_states) + + return hidden_states + + +class AttnSkipDownBlock2D(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + temb_channels: int, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_pre_norm: bool = True, + attention_head_dim: int = 1, + output_scale_factor: float = np.sqrt(2.0), + add_downsample: bool = True, + ): + super().__init__() + self.attentions = nn.ModuleList([]) + self.resnets = nn.ModuleList([]) + + if attention_head_dim is None: + logger.warning( + f"It is not recommend to pass `attention_head_dim=None`. Defaulting `attention_head_dim` to `in_channels`: {out_channels}." + ) + attention_head_dim = out_channels + + for i in range(num_layers): + in_channels = in_channels if i == 0 else out_channels + self.resnets.append( + ResnetBlock2D( + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=min(in_channels // 4, 32), + groups_out=min(out_channels // 4, 32), + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ) + self.attentions.append( + Attention( + out_channels, + heads=out_channels // attention_head_dim, + dim_head=attention_head_dim, + rescale_output_factor=output_scale_factor, + eps=resnet_eps, + norm_num_groups=32, + residual_connection=True, + bias=True, + upcast_softmax=True, + _from_deprecated_attn_block=True, + ) + ) + + if add_downsample: + self.resnet_down = ResnetBlock2D( + in_channels=out_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=min(out_channels // 4, 32), + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + use_in_shortcut=True, + down=True, + kernel="fir", + ) + self.downsamplers = nn.ModuleList([FirDownsample2D(out_channels, out_channels=out_channels)]) + self.skip_conv = nn.Conv2d(3, out_channels, kernel_size=(1, 1), stride=(1, 1)) + else: + self.resnet_down = None + self.downsamplers = None + self.skip_conv = None + + def forward( + self, + hidden_states: torch.FloatTensor, + temb: Optional[torch.FloatTensor] = None, + skip_sample: Optional[torch.FloatTensor] = None, + *args, + **kwargs, + ) -> Tuple[torch.FloatTensor, Tuple[torch.FloatTensor, ...], torch.FloatTensor]: + if len(args) > 0 or kwargs.get("scale", None) is not None: + deprecation_message = "The `scale` argument is deprecated and will be ignored. Please remove it, as passing it will raise an error in the future. `scale` should directly be passed while calling the underlying pipeline component i.e., via `cross_attention_kwargs`." + deprecate("scale", "1.0.0", deprecation_message) + + output_states = () + + for resnet, attn in zip(self.resnets, self.attentions): + hidden_states = resnet(hidden_states, temb) + hidden_states = attn(hidden_states) + output_states += (hidden_states,) + + if self.downsamplers is not None: + hidden_states = self.resnet_down(hidden_states, temb) + for downsampler in self.downsamplers: + skip_sample = downsampler(skip_sample) + + hidden_states = self.skip_conv(skip_sample) + hidden_states + + output_states += (hidden_states,) + + return hidden_states, output_states, skip_sample + + +class SkipDownBlock2D(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + temb_channels: int, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_pre_norm: bool = True, + output_scale_factor: float = np.sqrt(2.0), + add_downsample: bool = True, + downsample_padding: int = 1, + ): + super().__init__() + self.resnets = nn.ModuleList([]) + + for i in range(num_layers): + in_channels = in_channels if i == 0 else out_channels + self.resnets.append( + ResnetBlock2D( + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=min(in_channels // 4, 32), + groups_out=min(out_channels // 4, 32), + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ) + + if add_downsample: + self.resnet_down = ResnetBlock2D( + in_channels=out_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=min(out_channels // 4, 32), + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + use_in_shortcut=True, + down=True, + kernel="fir", + ) + self.downsamplers = nn.ModuleList([FirDownsample2D(out_channels, out_channels=out_channels)]) + self.skip_conv = nn.Conv2d(3, out_channels, kernel_size=(1, 1), stride=(1, 1)) + else: + self.resnet_down = None + self.downsamplers = None + self.skip_conv = None + + def forward( + self, + hidden_states: torch.FloatTensor, + temb: Optional[torch.FloatTensor] = None, + skip_sample: Optional[torch.FloatTensor] = None, + *args, + **kwargs, + ) -> Tuple[torch.FloatTensor, Tuple[torch.FloatTensor, ...], torch.FloatTensor]: + if len(args) > 0 or kwargs.get("scale", None) is not None: + deprecation_message = "The `scale` argument is deprecated and will be ignored. Please remove it, as passing it will raise an error in the future. `scale` should directly be passed while calling the underlying pipeline component i.e., via `cross_attention_kwargs`." + deprecate("scale", "1.0.0", deprecation_message) + + output_states = () + + for resnet in self.resnets: + hidden_states = resnet(hidden_states, temb) + output_states += (hidden_states,) + + if self.downsamplers is not None: + hidden_states = self.resnet_down(hidden_states, temb) + for downsampler in self.downsamplers: + skip_sample = downsampler(skip_sample) + + hidden_states = self.skip_conv(skip_sample) + hidden_states + + output_states += (hidden_states,) + + return hidden_states, output_states, skip_sample + + +class ResnetDownsampleBlock2D(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + temb_channels: int, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + output_scale_factor: float = 1.0, + add_downsample: bool = True, + skip_time_act: bool = False, + ): + super().__init__() + resnets = [] + + for i in range(num_layers): + in_channels = in_channels if i == 0 else out_channels + resnets.append( + ResnetBlock2D( + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + skip_time_act=skip_time_act, + ) + ) + + self.resnets = nn.ModuleList(resnets) + + if add_downsample: + self.downsamplers = nn.ModuleList( + [ + ResnetBlock2D( + in_channels=out_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + skip_time_act=skip_time_act, + down=True, + ) + ] + ) + else: + self.downsamplers = None + + self.gradient_checkpointing = False + + def forward( + self, hidden_states: torch.FloatTensor, temb: Optional[torch.FloatTensor] = None, *args, **kwargs + ) -> Tuple[torch.FloatTensor, Tuple[torch.FloatTensor, ...]]: + if len(args) > 0 or kwargs.get("scale", None) is not None: + deprecation_message = "The `scale` argument is deprecated and will be ignored. Please remove it, as passing it will raise an error in the future. `scale` should directly be passed while calling the underlying pipeline component i.e., via `cross_attention_kwargs`." + deprecate("scale", "1.0.0", deprecation_message) + + output_states = () + + for resnet in self.resnets: + if self.training and self.gradient_checkpointing: + + def create_custom_forward(module): + def custom_forward(*inputs): + return module(*inputs) + + return custom_forward + + if is_torch_version(">=", "1.11.0"): + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(resnet), hidden_states, temb, use_reentrant=False + ) + else: + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(resnet), hidden_states, temb + ) + else: + hidden_states = resnet(hidden_states, temb) + + output_states = output_states + (hidden_states,) + + if self.downsamplers is not None: + for downsampler in self.downsamplers: + hidden_states = downsampler(hidden_states, temb) + + output_states = output_states + (hidden_states,) + + return hidden_states, output_states + + +class SimpleCrossAttnDownBlock2D(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + temb_channels: int, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + attention_head_dim: int = 1, + cross_attention_dim: int = 1280, + output_scale_factor: float = 1.0, + add_downsample: bool = True, + skip_time_act: bool = False, + only_cross_attention: bool = False, + cross_attention_norm: Optional[str] = None, + ): + super().__init__() + + self.has_cross_attention = True + + resnets = [] + attentions = [] + + self.attention_head_dim = attention_head_dim + self.num_heads = out_channels // self.attention_head_dim + + for i in range(num_layers): + in_channels = in_channels if i == 0 else out_channels + resnets.append( + ResnetBlock2D( + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + skip_time_act=skip_time_act, + ) + ) + + processor = ( + AttnAddedKVProcessor2_0() if hasattr(F, "scaled_dot_product_attention") else AttnAddedKVProcessor() + ) + + attentions.append( + Attention( + query_dim=out_channels, + cross_attention_dim=out_channels, + heads=self.num_heads, + dim_head=attention_head_dim, + added_kv_proj_dim=cross_attention_dim, + norm_num_groups=resnet_groups, + bias=True, + upcast_softmax=True, + only_cross_attention=only_cross_attention, + cross_attention_norm=cross_attention_norm, + processor=processor, + ) + ) + self.attentions = nn.ModuleList(attentions) + self.resnets = nn.ModuleList(resnets) + + if add_downsample: + self.downsamplers = nn.ModuleList( + [ + ResnetBlock2D( + in_channels=out_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + skip_time_act=skip_time_act, + down=True, + ) + ] + ) + else: + self.downsamplers = None + + self.gradient_checkpointing = False + + def forward( + self, + hidden_states: torch.FloatTensor, + temb: Optional[torch.FloatTensor] = None, + encoder_hidden_states: Optional[torch.FloatTensor] = None, + attention_mask: Optional[torch.FloatTensor] = None, + cross_attention_kwargs: Optional[Dict[str, Any]] = None, + encoder_attention_mask: Optional[torch.FloatTensor] = None, + ) -> Tuple[torch.FloatTensor, Tuple[torch.FloatTensor, ...]]: + cross_attention_kwargs = cross_attention_kwargs if cross_attention_kwargs is not None else {} + if cross_attention_kwargs.get("scale", None) is not None: + logger.warning("Passing `scale` to `cross_attention_kwargs` is deprecated. `scale` will be ignored.") + + output_states = () + + if attention_mask is None: + # if encoder_hidden_states is defined: we are doing cross-attn, so we should use cross-attn mask. + mask = None if encoder_hidden_states is None else encoder_attention_mask + else: + # when attention_mask is defined: we don't even check for encoder_attention_mask. + # this is to maintain compatibility with UnCLIP, which uses 'attention_mask' param for cross-attn masks. + # TODO: UnCLIP should express cross-attn mask via encoder_attention_mask param instead of via attention_mask. + # then we can simplify this whole if/else block to: + # mask = attention_mask if encoder_hidden_states is None else encoder_attention_mask + mask = attention_mask + + for resnet, attn in zip(self.resnets, self.attentions): + if self.training and self.gradient_checkpointing: + + def create_custom_forward(module, return_dict=None): + def custom_forward(*inputs): + if return_dict is not None: + return module(*inputs, return_dict=return_dict) + else: + return module(*inputs) + + return custom_forward + + hidden_states = torch.utils.checkpoint.checkpoint(create_custom_forward(resnet), hidden_states, temb) + hidden_states = attn( + hidden_states, + encoder_hidden_states=encoder_hidden_states, + attention_mask=mask, + **cross_attention_kwargs, + ) + else: + hidden_states = resnet(hidden_states, temb) + + hidden_states = attn( + hidden_states, + encoder_hidden_states=encoder_hidden_states, + attention_mask=mask, + **cross_attention_kwargs, + ) + + output_states = output_states + (hidden_states,) + + if self.downsamplers is not None: + for downsampler in self.downsamplers: + hidden_states = downsampler(hidden_states, temb) + + output_states = output_states + (hidden_states,) + + return hidden_states, output_states + + +class KDownBlock2D(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + temb_channels: int, + dropout: float = 0.0, + num_layers: int = 4, + resnet_eps: float = 1e-5, + resnet_act_fn: str = "gelu", + resnet_group_size: int = 32, + add_downsample: bool = False, + ): + super().__init__() + resnets = [] + + for i in range(num_layers): + in_channels = in_channels if i == 0 else out_channels + groups = in_channels // resnet_group_size + groups_out = out_channels // resnet_group_size + + resnets.append( + ResnetBlockCondNorm2D( + in_channels=in_channels, + out_channels=out_channels, + dropout=dropout, + temb_channels=temb_channels, + groups=groups, + groups_out=groups_out, + eps=resnet_eps, + non_linearity=resnet_act_fn, + time_embedding_norm="ada_group", + conv_shortcut_bias=False, + ) + ) + + self.resnets = nn.ModuleList(resnets) + + if add_downsample: + # YiYi's comments- might be able to use FirDownsample2D, look into details later + self.downsamplers = nn.ModuleList([KDownsample2D()]) + else: + self.downsamplers = None + + self.gradient_checkpointing = False + + def forward( + self, hidden_states: torch.FloatTensor, temb: Optional[torch.FloatTensor] = None, *args, **kwargs + ) -> Tuple[torch.FloatTensor, Tuple[torch.FloatTensor, ...]]: + if len(args) > 0 or kwargs.get("scale", None) is not None: + deprecation_message = "The `scale` argument is deprecated and will be ignored. Please remove it, as passing it will raise an error in the future. `scale` should directly be passed while calling the underlying pipeline component i.e., via `cross_attention_kwargs`." + deprecate("scale", "1.0.0", deprecation_message) + + output_states = () + + for resnet in self.resnets: + if self.training and self.gradient_checkpointing: + + def create_custom_forward(module): + def custom_forward(*inputs): + return module(*inputs) + + return custom_forward + + if is_torch_version(">=", "1.11.0"): + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(resnet), hidden_states, temb, use_reentrant=False + ) + else: + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(resnet), hidden_states, temb + ) + else: + hidden_states = resnet(hidden_states, temb) + + output_states += (hidden_states,) + + if self.downsamplers is not None: + for downsampler in self.downsamplers: + hidden_states = downsampler(hidden_states) + + return hidden_states, output_states + + +class KCrossAttnDownBlock2D(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + temb_channels: int, + cross_attention_dim: int, + dropout: float = 0.0, + num_layers: int = 4, + resnet_group_size: int = 32, + add_downsample: bool = True, + attention_head_dim: int = 64, + add_self_attention: bool = False, + resnet_eps: float = 1e-5, + resnet_act_fn: str = "gelu", + ): + super().__init__() + resnets = [] + attentions = [] + + self.has_cross_attention = True + + for i in range(num_layers): + in_channels = in_channels if i == 0 else out_channels + groups = in_channels // resnet_group_size + groups_out = out_channels // resnet_group_size + + resnets.append( + ResnetBlockCondNorm2D( + in_channels=in_channels, + out_channels=out_channels, + dropout=dropout, + temb_channels=temb_channels, + groups=groups, + groups_out=groups_out, + eps=resnet_eps, + non_linearity=resnet_act_fn, + time_embedding_norm="ada_group", + conv_shortcut_bias=False, + ) + ) + attentions.append( + KAttentionBlock( + out_channels, + out_channels // attention_head_dim, + attention_head_dim, + cross_attention_dim=cross_attention_dim, + temb_channels=temb_channels, + attention_bias=True, + add_self_attention=add_self_attention, + cross_attention_norm="layer_norm", + group_size=resnet_group_size, + ) + ) + + self.resnets = nn.ModuleList(resnets) + self.attentions = nn.ModuleList(attentions) + + if add_downsample: + self.downsamplers = nn.ModuleList([KDownsample2D()]) + else: + self.downsamplers = None + + self.gradient_checkpointing = False + + def forward( + self, + hidden_states: torch.FloatTensor, + temb: Optional[torch.FloatTensor] = None, + encoder_hidden_states: Optional[torch.FloatTensor] = None, + attention_mask: Optional[torch.FloatTensor] = None, + cross_attention_kwargs: Optional[Dict[str, Any]] = None, + encoder_attention_mask: Optional[torch.FloatTensor] = None, + ) -> Tuple[torch.FloatTensor, Tuple[torch.FloatTensor, ...]]: + cross_attention_kwargs = cross_attention_kwargs if cross_attention_kwargs is not None else {} + if cross_attention_kwargs.get("scale", None) is not None: + logger.warning("Passing `scale` to `cross_attention_kwargs` is deprecated. `scale` will be ignored.") + + output_states = () + + for resnet, attn in zip(self.resnets, self.attentions): + if self.training and self.gradient_checkpointing: + + def create_custom_forward(module, return_dict=None): + def custom_forward(*inputs): + if return_dict is not None: + return module(*inputs, return_dict=return_dict) + else: + return module(*inputs) + + return custom_forward + + ckpt_kwargs: Dict[str, Any] = {"use_reentrant": False} if is_torch_version(">=", "1.11.0") else {} + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(resnet), + hidden_states, + temb, + **ckpt_kwargs, + ) + hidden_states = attn( + hidden_states, + encoder_hidden_states=encoder_hidden_states, + emb=temb, + attention_mask=attention_mask, + cross_attention_kwargs=cross_attention_kwargs, + encoder_attention_mask=encoder_attention_mask, + ) + else: + hidden_states = resnet(hidden_states, temb) + hidden_states = attn( + hidden_states, + encoder_hidden_states=encoder_hidden_states, + emb=temb, + attention_mask=attention_mask, + cross_attention_kwargs=cross_attention_kwargs, + encoder_attention_mask=encoder_attention_mask, + ) + + if self.downsamplers is None: + output_states += (None,) + else: + output_states += (hidden_states,) + + if self.downsamplers is not None: + for downsampler in self.downsamplers: + hidden_states = downsampler(hidden_states) + + return hidden_states, output_states + + +class AttnUpBlock2D(nn.Module): + def __init__( + self, + in_channels: int, + prev_output_channel: int, + out_channels: int, + temb_channels: int, + resolution_idx: int = None, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + attention_head_dim: int = 1, + output_scale_factor: float = 1.0, + upsample_type: str = "conv", + ): + super().__init__() + resnets = [] + attentions = [] + + self.upsample_type = upsample_type + + if attention_head_dim is None: + logger.warning( + f"It is not recommend to pass `attention_head_dim=None`. Defaulting `attention_head_dim` to `in_channels`: {out_channels}." + ) + attention_head_dim = out_channels + + for i in range(num_layers): + res_skip_channels = in_channels if (i == num_layers - 1) else out_channels + resnet_in_channels = prev_output_channel if i == 0 else out_channels + + resnets.append( + ResnetBlock2D( + in_channels=resnet_in_channels + res_skip_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ) + attentions.append( + Attention( + out_channels, + heads=out_channels // attention_head_dim, + dim_head=attention_head_dim, + rescale_output_factor=output_scale_factor, + eps=resnet_eps, + norm_num_groups=resnet_groups, + residual_connection=True, + bias=True, + upcast_softmax=True, + _from_deprecated_attn_block=True, + ) + ) + + self.attentions = nn.ModuleList(attentions) + self.resnets = nn.ModuleList(resnets) + + if upsample_type == "conv": + self.upsamplers = nn.ModuleList([Upsample2D(out_channels, use_conv=True, out_channels=out_channels)]) + elif upsample_type == "resnet": + self.upsamplers = nn.ModuleList( + [ + ResnetBlock2D( + in_channels=out_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + up=True, + ) + ] + ) + else: + self.upsamplers = None + + self.resolution_idx = resolution_idx + + def forward( + self, + hidden_states: torch.FloatTensor, + res_hidden_states_tuple: Tuple[torch.FloatTensor, ...], + temb: Optional[torch.FloatTensor] = None, + upsample_size: Optional[int] = None, + *args, + **kwargs, + ) -> torch.FloatTensor: + if len(args) > 0 or kwargs.get("scale", None) is not None: + deprecation_message = "The `scale` argument is deprecated and will be ignored. Please remove it, as passing it will raise an error in the future. `scale` should directly be passed while calling the underlying pipeline component i.e., via `cross_attention_kwargs`." + deprecate("scale", "1.0.0", deprecation_message) + + for resnet, attn in zip(self.resnets, self.attentions): + # pop res hidden states + res_hidden_states = res_hidden_states_tuple[-1] + res_hidden_states_tuple = res_hidden_states_tuple[:-1] + hidden_states = torch.cat([hidden_states, res_hidden_states], dim=1) + + hidden_states = resnet(hidden_states, temb) + hidden_states = attn(hidden_states) + + if self.upsamplers is not None: + for upsampler in self.upsamplers: + if self.upsample_type == "resnet": + hidden_states = upsampler(hidden_states, temb=temb) + else: + hidden_states = upsampler(hidden_states) + + return hidden_states + + +class CrossAttnUpBlock2D(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + prev_output_channel: int, + temb_channels: int, + resolution_idx: Optional[int] = None, + dropout: float = 0.0, + num_layers: int = 1, + transformer_layers_per_block: Union[int, Tuple[int]] = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + num_attention_heads: int = 1, + cross_attention_dim: int = 1280, + output_scale_factor: float = 1.0, + add_upsample: bool = True, + dual_cross_attention: bool = False, + use_linear_projection: bool = False, + only_cross_attention: bool = False, + upcast_attention: bool = False, + attention_type: str = "default", + ): + super().__init__() + resnets = [] + attentions = [] + + self.has_cross_attention = True + self.num_attention_heads = num_attention_heads + + if isinstance(transformer_layers_per_block, int): + transformer_layers_per_block = [transformer_layers_per_block] * num_layers + + for i in range(num_layers): + res_skip_channels = in_channels if (i == num_layers - 1) else out_channels + resnet_in_channels = prev_output_channel if i == 0 else out_channels + + resnets.append( + ResnetBlock2D( + in_channels=resnet_in_channels + res_skip_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ) + if not dual_cross_attention: + attentions.append( + Transformer2DModel( + num_attention_heads, + out_channels // num_attention_heads, + in_channels=out_channels, + num_layers=transformer_layers_per_block[i], + cross_attention_dim=cross_attention_dim, + norm_num_groups=resnet_groups, + use_linear_projection=use_linear_projection, + only_cross_attention=only_cross_attention, + upcast_attention=upcast_attention, + attention_type=attention_type, + ) + ) + else: + attentions.append( + DualTransformer2DModel( + num_attention_heads, + out_channels // num_attention_heads, + in_channels=out_channels, + num_layers=1, + cross_attention_dim=cross_attention_dim, + norm_num_groups=resnet_groups, + ) + ) + self.attentions = nn.ModuleList(attentions) + self.resnets = nn.ModuleList(resnets) + + if add_upsample: + self.upsamplers = nn.ModuleList([Upsample2D(out_channels, use_conv=True, out_channels=out_channels)]) + else: + self.upsamplers = None + + self.gradient_checkpointing = False + self.resolution_idx = resolution_idx + + def forward( + self, + hidden_states: torch.FloatTensor, + res_hidden_states_tuple: Tuple[torch.FloatTensor, ...], + temb: Optional[torch.FloatTensor] = None, + encoder_hidden_states: Optional[torch.FloatTensor] = None, + cross_attention_kwargs: Optional[Dict[str, Any]] = None, + upsample_size: Optional[int] = None, + attention_mask: Optional[torch.FloatTensor] = None, + encoder_attention_mask: Optional[torch.FloatTensor] = None, + return_res_samples: Optional[bool] = False, + up_block_add_samples: Optional[torch.FloatTensor] = None, + ) -> torch.FloatTensor: + if cross_attention_kwargs is not None: + if cross_attention_kwargs.get("scale", None) is not None: + logger.warning("Passing `scale` to `cross_attention_kwargs` is deprecated. `scale` will be ignored.") + + is_freeu_enabled = ( + getattr(self, "s1", None) + and getattr(self, "s2", None) + and getattr(self, "b1", None) + and getattr(self, "b2", None) + ) + if return_res_samples: + output_states = () + + for resnet, attn in zip(self.resnets, self.attentions): + # pop res hidden states + res_hidden_states = res_hidden_states_tuple[-1] + res_hidden_states_tuple = res_hidden_states_tuple[:-1] + + # FreeU: Only operate on the first two stages + if is_freeu_enabled: + hidden_states, res_hidden_states = apply_freeu( + self.resolution_idx, + hidden_states, + res_hidden_states, + s1=self.s1, + s2=self.s2, + b1=self.b1, + b2=self.b2, + ) + + hidden_states = torch.cat([hidden_states, res_hidden_states], dim=1) + + if self.training and self.gradient_checkpointing: + + def create_custom_forward(module, return_dict=None): + def custom_forward(*inputs): + if return_dict is not None: + return module(*inputs, return_dict=return_dict) + else: + return module(*inputs) + + return custom_forward + + ckpt_kwargs: Dict[str, Any] = {"use_reentrant": False} if is_torch_version(">=", "1.11.0") else {} + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(resnet), + hidden_states, + temb, + **ckpt_kwargs, + ) + hidden_states = attn( + hidden_states, + encoder_hidden_states=encoder_hidden_states, + cross_attention_kwargs=cross_attention_kwargs, + attention_mask=attention_mask, + encoder_attention_mask=encoder_attention_mask, + return_dict=False, + )[0] + else: + hidden_states = resnet(hidden_states, temb) + hidden_states = attn( + hidden_states, + encoder_hidden_states=encoder_hidden_states, + cross_attention_kwargs=cross_attention_kwargs, + attention_mask=attention_mask, + encoder_attention_mask=encoder_attention_mask, + return_dict=False, + )[0] + if return_res_samples: + output_states = output_states + (hidden_states,) + if up_block_add_samples is not None: + hidden_states = hidden_states + up_block_add_samples.pop(0) + + if self.upsamplers is not None: + for upsampler in self.upsamplers: + hidden_states = upsampler(hidden_states, upsample_size) + if return_res_samples: + output_states = output_states + (hidden_states,) + if up_block_add_samples is not None: + hidden_states = hidden_states + up_block_add_samples.pop(0) + + if return_res_samples: + return hidden_states, output_states + else: + return hidden_states + + +class UpBlock2D(nn.Module): + def __init__( + self, + in_channels: int, + prev_output_channel: int, + out_channels: int, + temb_channels: int, + resolution_idx: Optional[int] = None, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + output_scale_factor: float = 1.0, + add_upsample: bool = True, + ): + super().__init__() + resnets = [] + + for i in range(num_layers): + res_skip_channels = in_channels if (i == num_layers - 1) else out_channels + resnet_in_channels = prev_output_channel if i == 0 else out_channels + + resnets.append( + ResnetBlock2D( + in_channels=resnet_in_channels + res_skip_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ) + + self.resnets = nn.ModuleList(resnets) + + if add_upsample: + self.upsamplers = nn.ModuleList([Upsample2D(out_channels, use_conv=True, out_channels=out_channels)]) + else: + self.upsamplers = None + + self.gradient_checkpointing = False + self.resolution_idx = resolution_idx + + def forward( + self, + hidden_states: torch.FloatTensor, + res_hidden_states_tuple: Tuple[torch.FloatTensor, ...], + temb: Optional[torch.FloatTensor] = None, + upsample_size: Optional[int] = None, + return_res_samples: Optional[bool] = False, + up_block_add_samples: Optional[torch.FloatTensor] = None, + *args, + **kwargs, + ) -> torch.FloatTensor: + if len(args) > 0 or kwargs.get("scale", None) is not None: + deprecation_message = "The `scale` argument is deprecated and will be ignored. Please remove it, as passing it will raise an error in the future. `scale` should directly be passed while calling the underlying pipeline component i.e., via `cross_attention_kwargs`." + deprecate("scale", "1.0.0", deprecation_message) + + is_freeu_enabled = ( + getattr(self, "s1", None) + and getattr(self, "s2", None) + and getattr(self, "b1", None) + and getattr(self, "b2", None) + ) + if return_res_samples: + output_states = () + + for resnet in self.resnets: + # pop res hidden states + res_hidden_states = res_hidden_states_tuple[-1] + res_hidden_states_tuple = res_hidden_states_tuple[:-1] + + # FreeU: Only operate on the first two stages + if is_freeu_enabled: + hidden_states, res_hidden_states = apply_freeu( + self.resolution_idx, + hidden_states, + res_hidden_states, + s1=self.s1, + s2=self.s2, + b1=self.b1, + b2=self.b2, + ) + + hidden_states = torch.cat([hidden_states, res_hidden_states], dim=1) + + if self.training and self.gradient_checkpointing: + + def create_custom_forward(module): + def custom_forward(*inputs): + return module(*inputs) + + return custom_forward + + if is_torch_version(">=", "1.11.0"): + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(resnet), hidden_states, temb, use_reentrant=False + ) + else: + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(resnet), hidden_states, temb + ) + else: + hidden_states = resnet(hidden_states, temb) + + if return_res_samples: + output_states = output_states + (hidden_states,) + if up_block_add_samples is not None: + hidden_states = hidden_states + up_block_add_samples.pop(0) # todo: add before or after + + if self.upsamplers is not None: + for upsampler in self.upsamplers: + hidden_states = upsampler(hidden_states, upsample_size) + + if return_res_samples: + output_states = output_states + (hidden_states,) + if up_block_add_samples is not None: + hidden_states = hidden_states + up_block_add_samples.pop(0) # todo: add before or after + + if return_res_samples: + return hidden_states, output_states + else: + return hidden_states + + +class UpDecoderBlock2D(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + resolution_idx: Optional[int] = None, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", # default, spatial + resnet_act_fn: str = "swish", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + output_scale_factor: float = 1.0, + add_upsample: bool = True, + temb_channels: Optional[int] = None, + ): + super().__init__() + resnets = [] + + for i in range(num_layers): + input_channels = in_channels if i == 0 else out_channels + + if resnet_time_scale_shift == "spatial": + resnets.append( + ResnetBlockCondNorm2D( + in_channels=input_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm="spatial", + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + ) + ) + else: + resnets.append( + ResnetBlock2D( + in_channels=input_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ) + + self.resnets = nn.ModuleList(resnets) + + if add_upsample: + self.upsamplers = nn.ModuleList([Upsample2D(out_channels, use_conv=True, out_channels=out_channels)]) + else: + self.upsamplers = None + + self.resolution_idx = resolution_idx + + def forward(self, hidden_states: torch.FloatTensor, temb: Optional[torch.FloatTensor] = None) -> torch.FloatTensor: + for resnet in self.resnets: + hidden_states = resnet(hidden_states, temb=temb) + + if self.upsamplers is not None: + for upsampler in self.upsamplers: + hidden_states = upsampler(hidden_states) + + return hidden_states + + +class AttnUpDecoderBlock2D(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + resolution_idx: Optional[int] = None, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + attention_head_dim: int = 1, + output_scale_factor: float = 1.0, + add_upsample: bool = True, + temb_channels: Optional[int] = None, + ): + super().__init__() + resnets = [] + attentions = [] + + if attention_head_dim is None: + logger.warning( + f"It is not recommend to pass `attention_head_dim=None`. Defaulting `attention_head_dim` to `out_channels`: {out_channels}." + ) + attention_head_dim = out_channels + + for i in range(num_layers): + input_channels = in_channels if i == 0 else out_channels + + if resnet_time_scale_shift == "spatial": + resnets.append( + ResnetBlockCondNorm2D( + in_channels=input_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm="spatial", + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + ) + ) + else: + resnets.append( + ResnetBlock2D( + in_channels=input_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ) + + attentions.append( + Attention( + out_channels, + heads=out_channels // attention_head_dim, + dim_head=attention_head_dim, + rescale_output_factor=output_scale_factor, + eps=resnet_eps, + norm_num_groups=resnet_groups if resnet_time_scale_shift != "spatial" else None, + spatial_norm_dim=temb_channels if resnet_time_scale_shift == "spatial" else None, + residual_connection=True, + bias=True, + upcast_softmax=True, + _from_deprecated_attn_block=True, + ) + ) + + self.attentions = nn.ModuleList(attentions) + self.resnets = nn.ModuleList(resnets) + + if add_upsample: + self.upsamplers = nn.ModuleList([Upsample2D(out_channels, use_conv=True, out_channels=out_channels)]) + else: + self.upsamplers = None + + self.resolution_idx = resolution_idx + + def forward(self, hidden_states: torch.FloatTensor, temb: Optional[torch.FloatTensor] = None) -> torch.FloatTensor: + for resnet, attn in zip(self.resnets, self.attentions): + hidden_states = resnet(hidden_states, temb=temb) + hidden_states = attn(hidden_states, temb=temb) + + if self.upsamplers is not None: + for upsampler in self.upsamplers: + hidden_states = upsampler(hidden_states) + + return hidden_states + + +class AttnSkipUpBlock2D(nn.Module): + def __init__( + self, + in_channels: int, + prev_output_channel: int, + out_channels: int, + temb_channels: int, + resolution_idx: Optional[int] = None, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_pre_norm: bool = True, + attention_head_dim: int = 1, + output_scale_factor: float = np.sqrt(2.0), + add_upsample: bool = True, + ): + super().__init__() + self.attentions = nn.ModuleList([]) + self.resnets = nn.ModuleList([]) + + for i in range(num_layers): + res_skip_channels = in_channels if (i == num_layers - 1) else out_channels + resnet_in_channels = prev_output_channel if i == 0 else out_channels + + self.resnets.append( + ResnetBlock2D( + in_channels=resnet_in_channels + res_skip_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=min(resnet_in_channels + res_skip_channels // 4, 32), + groups_out=min(out_channels // 4, 32), + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ) + + if attention_head_dim is None: + logger.warning( + f"It is not recommend to pass `attention_head_dim=None`. Defaulting `attention_head_dim` to `out_channels`: {out_channels}." + ) + attention_head_dim = out_channels + + self.attentions.append( + Attention( + out_channels, + heads=out_channels // attention_head_dim, + dim_head=attention_head_dim, + rescale_output_factor=output_scale_factor, + eps=resnet_eps, + norm_num_groups=32, + residual_connection=True, + bias=True, + upcast_softmax=True, + _from_deprecated_attn_block=True, + ) + ) + + self.upsampler = FirUpsample2D(in_channels, out_channels=out_channels) + if add_upsample: + self.resnet_up = ResnetBlock2D( + in_channels=out_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=min(out_channels // 4, 32), + groups_out=min(out_channels // 4, 32), + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + use_in_shortcut=True, + up=True, + kernel="fir", + ) + self.skip_conv = nn.Conv2d(out_channels, 3, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + self.skip_norm = torch.nn.GroupNorm( + num_groups=min(out_channels // 4, 32), num_channels=out_channels, eps=resnet_eps, affine=True + ) + self.act = nn.SiLU() + else: + self.resnet_up = None + self.skip_conv = None + self.skip_norm = None + self.act = None + + self.resolution_idx = resolution_idx + + def forward( + self, + hidden_states: torch.FloatTensor, + res_hidden_states_tuple: Tuple[torch.FloatTensor, ...], + temb: Optional[torch.FloatTensor] = None, + skip_sample=None, + *args, + **kwargs, + ) -> Tuple[torch.FloatTensor, torch.FloatTensor]: + if len(args) > 0 or kwargs.get("scale", None) is not None: + deprecation_message = "The `scale` argument is deprecated and will be ignored. Please remove it, as passing it will raise an error in the future. `scale` should directly be passed while calling the underlying pipeline component i.e., via `cross_attention_kwargs`." + deprecate("scale", "1.0.0", deprecation_message) + + for resnet in self.resnets: + # pop res hidden states + res_hidden_states = res_hidden_states_tuple[-1] + res_hidden_states_tuple = res_hidden_states_tuple[:-1] + hidden_states = torch.cat([hidden_states, res_hidden_states], dim=1) + + hidden_states = resnet(hidden_states, temb) + + hidden_states = self.attentions[0](hidden_states) + + if skip_sample is not None: + skip_sample = self.upsampler(skip_sample) + else: + skip_sample = 0 + + if self.resnet_up is not None: + skip_sample_states = self.skip_norm(hidden_states) + skip_sample_states = self.act(skip_sample_states) + skip_sample_states = self.skip_conv(skip_sample_states) + + skip_sample = skip_sample + skip_sample_states + + hidden_states = self.resnet_up(hidden_states, temb) + + return hidden_states, skip_sample + + +class SkipUpBlock2D(nn.Module): + def __init__( + self, + in_channels: int, + prev_output_channel: int, + out_channels: int, + temb_channels: int, + resolution_idx: Optional[int] = None, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_pre_norm: bool = True, + output_scale_factor: float = np.sqrt(2.0), + add_upsample: bool = True, + upsample_padding: int = 1, + ): + super().__init__() + self.resnets = nn.ModuleList([]) + + for i in range(num_layers): + res_skip_channels = in_channels if (i == num_layers - 1) else out_channels + resnet_in_channels = prev_output_channel if i == 0 else out_channels + + self.resnets.append( + ResnetBlock2D( + in_channels=resnet_in_channels + res_skip_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=min((resnet_in_channels + res_skip_channels) // 4, 32), + groups_out=min(out_channels // 4, 32), + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ) + + self.upsampler = FirUpsample2D(in_channels, out_channels=out_channels) + if add_upsample: + self.resnet_up = ResnetBlock2D( + in_channels=out_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=min(out_channels // 4, 32), + groups_out=min(out_channels // 4, 32), + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + use_in_shortcut=True, + up=True, + kernel="fir", + ) + self.skip_conv = nn.Conv2d(out_channels, 3, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + self.skip_norm = torch.nn.GroupNorm( + num_groups=min(out_channels // 4, 32), num_channels=out_channels, eps=resnet_eps, affine=True + ) + self.act = nn.SiLU() + else: + self.resnet_up = None + self.skip_conv = None + self.skip_norm = None + self.act = None + + self.resolution_idx = resolution_idx + + def forward( + self, + hidden_states: torch.FloatTensor, + res_hidden_states_tuple: Tuple[torch.FloatTensor, ...], + temb: Optional[torch.FloatTensor] = None, + skip_sample=None, + *args, + **kwargs, + ) -> Tuple[torch.FloatTensor, torch.FloatTensor]: + if len(args) > 0 or kwargs.get("scale", None) is not None: + deprecation_message = "The `scale` argument is deprecated and will be ignored. Please remove it, as passing it will raise an error in the future. `scale` should directly be passed while calling the underlying pipeline component i.e., via `cross_attention_kwargs`." + deprecate("scale", "1.0.0", deprecation_message) + + for resnet in self.resnets: + # pop res hidden states + res_hidden_states = res_hidden_states_tuple[-1] + res_hidden_states_tuple = res_hidden_states_tuple[:-1] + hidden_states = torch.cat([hidden_states, res_hidden_states], dim=1) + + hidden_states = resnet(hidden_states, temb) + + if skip_sample is not None: + skip_sample = self.upsampler(skip_sample) + else: + skip_sample = 0 + + if self.resnet_up is not None: + skip_sample_states = self.skip_norm(hidden_states) + skip_sample_states = self.act(skip_sample_states) + skip_sample_states = self.skip_conv(skip_sample_states) + + skip_sample = skip_sample + skip_sample_states + + hidden_states = self.resnet_up(hidden_states, temb) + + return hidden_states, skip_sample + + +class ResnetUpsampleBlock2D(nn.Module): + def __init__( + self, + in_channels: int, + prev_output_channel: int, + out_channels: int, + temb_channels: int, + resolution_idx: Optional[int] = None, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + output_scale_factor: float = 1.0, + add_upsample: bool = True, + skip_time_act: bool = False, + ): + super().__init__() + resnets = [] + + for i in range(num_layers): + res_skip_channels = in_channels if (i == num_layers - 1) else out_channels + resnet_in_channels = prev_output_channel if i == 0 else out_channels + + resnets.append( + ResnetBlock2D( + in_channels=resnet_in_channels + res_skip_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + skip_time_act=skip_time_act, + ) + ) + + self.resnets = nn.ModuleList(resnets) + + if add_upsample: + self.upsamplers = nn.ModuleList( + [ + ResnetBlock2D( + in_channels=out_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + skip_time_act=skip_time_act, + up=True, + ) + ] + ) + else: + self.upsamplers = None + + self.gradient_checkpointing = False + self.resolution_idx = resolution_idx + + def forward( + self, + hidden_states: torch.FloatTensor, + res_hidden_states_tuple: Tuple[torch.FloatTensor, ...], + temb: Optional[torch.FloatTensor] = None, + upsample_size: Optional[int] = None, + *args, + **kwargs, + ) -> torch.FloatTensor: + if len(args) > 0 or kwargs.get("scale", None) is not None: + deprecation_message = "The `scale` argument is deprecated and will be ignored. Please remove it, as passing it will raise an error in the future. `scale` should directly be passed while calling the underlying pipeline component i.e., via `cross_attention_kwargs`." + deprecate("scale", "1.0.0", deprecation_message) + + for resnet in self.resnets: + # pop res hidden states + res_hidden_states = res_hidden_states_tuple[-1] + res_hidden_states_tuple = res_hidden_states_tuple[:-1] + hidden_states = torch.cat([hidden_states, res_hidden_states], dim=1) + + if self.training and self.gradient_checkpointing: + + def create_custom_forward(module): + def custom_forward(*inputs): + return module(*inputs) + + return custom_forward + + if is_torch_version(">=", "1.11.0"): + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(resnet), hidden_states, temb, use_reentrant=False + ) + else: + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(resnet), hidden_states, temb + ) + else: + hidden_states = resnet(hidden_states, temb) + + if self.upsamplers is not None: + for upsampler in self.upsamplers: + hidden_states = upsampler(hidden_states, temb) + + return hidden_states + + +class SimpleCrossAttnUpBlock2D(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + prev_output_channel: int, + temb_channels: int, + resolution_idx: Optional[int] = None, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + attention_head_dim: int = 1, + cross_attention_dim: int = 1280, + output_scale_factor: float = 1.0, + add_upsample: bool = True, + skip_time_act: bool = False, + only_cross_attention: bool = False, + cross_attention_norm: Optional[str] = None, + ): + super().__init__() + resnets = [] + attentions = [] + + self.has_cross_attention = True + self.attention_head_dim = attention_head_dim + + self.num_heads = out_channels // self.attention_head_dim + + for i in range(num_layers): + res_skip_channels = in_channels if (i == num_layers - 1) else out_channels + resnet_in_channels = prev_output_channel if i == 0 else out_channels + + resnets.append( + ResnetBlock2D( + in_channels=resnet_in_channels + res_skip_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + skip_time_act=skip_time_act, + ) + ) + + processor = ( + AttnAddedKVProcessor2_0() if hasattr(F, "scaled_dot_product_attention") else AttnAddedKVProcessor() + ) + + attentions.append( + Attention( + query_dim=out_channels, + cross_attention_dim=out_channels, + heads=self.num_heads, + dim_head=self.attention_head_dim, + added_kv_proj_dim=cross_attention_dim, + norm_num_groups=resnet_groups, + bias=True, + upcast_softmax=True, + only_cross_attention=only_cross_attention, + cross_attention_norm=cross_attention_norm, + processor=processor, + ) + ) + self.attentions = nn.ModuleList(attentions) + self.resnets = nn.ModuleList(resnets) + + if add_upsample: + self.upsamplers = nn.ModuleList( + [ + ResnetBlock2D( + in_channels=out_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + skip_time_act=skip_time_act, + up=True, + ) + ] + ) + else: + self.upsamplers = None + + self.gradient_checkpointing = False + self.resolution_idx = resolution_idx + + def forward( + self, + hidden_states: torch.FloatTensor, + res_hidden_states_tuple: Tuple[torch.FloatTensor, ...], + temb: Optional[torch.FloatTensor] = None, + encoder_hidden_states: Optional[torch.FloatTensor] = None, + upsample_size: Optional[int] = None, + attention_mask: Optional[torch.FloatTensor] = None, + cross_attention_kwargs: Optional[Dict[str, Any]] = None, + encoder_attention_mask: Optional[torch.FloatTensor] = None, + ) -> torch.FloatTensor: + cross_attention_kwargs = cross_attention_kwargs if cross_attention_kwargs is not None else {} + if cross_attention_kwargs.get("scale", None) is not None: + logger.warning("Passing `scale` to `cross_attention_kwargs` is deprecated. `scale` will be ignored.") + + if attention_mask is None: + # if encoder_hidden_states is defined: we are doing cross-attn, so we should use cross-attn mask. + mask = None if encoder_hidden_states is None else encoder_attention_mask + else: + # when attention_mask is defined: we don't even check for encoder_attention_mask. + # this is to maintain compatibility with UnCLIP, which uses 'attention_mask' param for cross-attn masks. + # TODO: UnCLIP should express cross-attn mask via encoder_attention_mask param instead of via attention_mask. + # then we can simplify this whole if/else block to: + # mask = attention_mask if encoder_hidden_states is None else encoder_attention_mask + mask = attention_mask + + for resnet, attn in zip(self.resnets, self.attentions): + # resnet + # pop res hidden states + res_hidden_states = res_hidden_states_tuple[-1] + res_hidden_states_tuple = res_hidden_states_tuple[:-1] + hidden_states = torch.cat([hidden_states, res_hidden_states], dim=1) + + if self.training and self.gradient_checkpointing: + + def create_custom_forward(module, return_dict=None): + def custom_forward(*inputs): + if return_dict is not None: + return module(*inputs, return_dict=return_dict) + else: + return module(*inputs) + + return custom_forward + + hidden_states = torch.utils.checkpoint.checkpoint(create_custom_forward(resnet), hidden_states, temb) + hidden_states = attn( + hidden_states, + encoder_hidden_states=encoder_hidden_states, + attention_mask=mask, + **cross_attention_kwargs, + ) + else: + hidden_states = resnet(hidden_states, temb) + + hidden_states = attn( + hidden_states, + encoder_hidden_states=encoder_hidden_states, + attention_mask=mask, + **cross_attention_kwargs, + ) + + if self.upsamplers is not None: + for upsampler in self.upsamplers: + hidden_states = upsampler(hidden_states, temb) + + return hidden_states + + +class KUpBlock2D(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + temb_channels: int, + resolution_idx: int, + dropout: float = 0.0, + num_layers: int = 5, + resnet_eps: float = 1e-5, + resnet_act_fn: str = "gelu", + resnet_group_size: Optional[int] = 32, + add_upsample: bool = True, + ): + super().__init__() + resnets = [] + k_in_channels = 2 * out_channels + k_out_channels = in_channels + num_layers = num_layers - 1 + + for i in range(num_layers): + in_channels = k_in_channels if i == 0 else out_channels + groups = in_channels // resnet_group_size + groups_out = out_channels // resnet_group_size + + resnets.append( + ResnetBlockCondNorm2D( + in_channels=in_channels, + out_channels=k_out_channels if (i == num_layers - 1) else out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=groups, + groups_out=groups_out, + dropout=dropout, + non_linearity=resnet_act_fn, + time_embedding_norm="ada_group", + conv_shortcut_bias=False, + ) + ) + + self.resnets = nn.ModuleList(resnets) + + if add_upsample: + self.upsamplers = nn.ModuleList([KUpsample2D()]) + else: + self.upsamplers = None + + self.gradient_checkpointing = False + self.resolution_idx = resolution_idx + + def forward( + self, + hidden_states: torch.FloatTensor, + res_hidden_states_tuple: Tuple[torch.FloatTensor, ...], + temb: Optional[torch.FloatTensor] = None, + upsample_size: Optional[int] = None, + *args, + **kwargs, + ) -> torch.FloatTensor: + if len(args) > 0 or kwargs.get("scale", None) is not None: + deprecation_message = "The `scale` argument is deprecated and will be ignored. Please remove it, as passing it will raise an error in the future. `scale` should directly be passed while calling the underlying pipeline component i.e., via `cross_attention_kwargs`." + deprecate("scale", "1.0.0", deprecation_message) + + res_hidden_states_tuple = res_hidden_states_tuple[-1] + if res_hidden_states_tuple is not None: + hidden_states = torch.cat([hidden_states, res_hidden_states_tuple], dim=1) + + for resnet in self.resnets: + if self.training and self.gradient_checkpointing: + + def create_custom_forward(module): + def custom_forward(*inputs): + return module(*inputs) + + return custom_forward + + if is_torch_version(">=", "1.11.0"): + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(resnet), hidden_states, temb, use_reentrant=False + ) + else: + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(resnet), hidden_states, temb + ) + else: + hidden_states = resnet(hidden_states, temb) + + if self.upsamplers is not None: + for upsampler in self.upsamplers: + hidden_states = upsampler(hidden_states) + + return hidden_states + + +class KCrossAttnUpBlock2D(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + temb_channels: int, + resolution_idx: int, + dropout: float = 0.0, + num_layers: int = 4, + resnet_eps: float = 1e-5, + resnet_act_fn: str = "gelu", + resnet_group_size: int = 32, + attention_head_dim: int = 1, # attention dim_head + cross_attention_dim: int = 768, + add_upsample: bool = True, + upcast_attention: bool = False, + ): + super().__init__() + resnets = [] + attentions = [] + + is_first_block = in_channels == out_channels == temb_channels + is_middle_block = in_channels != out_channels + add_self_attention = True if is_first_block else False + + self.has_cross_attention = True + self.attention_head_dim = attention_head_dim + + # in_channels, and out_channels for the block (k-unet) + k_in_channels = out_channels if is_first_block else 2 * out_channels + k_out_channels = in_channels + + num_layers = num_layers - 1 + + for i in range(num_layers): + in_channels = k_in_channels if i == 0 else out_channels + groups = in_channels // resnet_group_size + groups_out = out_channels // resnet_group_size + + if is_middle_block and (i == num_layers - 1): + conv_2d_out_channels = k_out_channels + else: + conv_2d_out_channels = None + + resnets.append( + ResnetBlockCondNorm2D( + in_channels=in_channels, + out_channels=out_channels, + conv_2d_out_channels=conv_2d_out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=groups, + groups_out=groups_out, + dropout=dropout, + non_linearity=resnet_act_fn, + time_embedding_norm="ada_group", + conv_shortcut_bias=False, + ) + ) + attentions.append( + KAttentionBlock( + k_out_channels if (i == num_layers - 1) else out_channels, + k_out_channels // attention_head_dim + if (i == num_layers - 1) + else out_channels // attention_head_dim, + attention_head_dim, + cross_attention_dim=cross_attention_dim, + temb_channels=temb_channels, + attention_bias=True, + add_self_attention=add_self_attention, + cross_attention_norm="layer_norm", + upcast_attention=upcast_attention, + ) + ) + + self.resnets = nn.ModuleList(resnets) + self.attentions = nn.ModuleList(attentions) + + if add_upsample: + self.upsamplers = nn.ModuleList([KUpsample2D()]) + else: + self.upsamplers = None + + self.gradient_checkpointing = False + self.resolution_idx = resolution_idx + + def forward( + self, + hidden_states: torch.FloatTensor, + res_hidden_states_tuple: Tuple[torch.FloatTensor, ...], + temb: Optional[torch.FloatTensor] = None, + encoder_hidden_states: Optional[torch.FloatTensor] = None, + cross_attention_kwargs: Optional[Dict[str, Any]] = None, + upsample_size: Optional[int] = None, + attention_mask: Optional[torch.FloatTensor] = None, + encoder_attention_mask: Optional[torch.FloatTensor] = None, + ) -> torch.FloatTensor: + res_hidden_states_tuple = res_hidden_states_tuple[-1] + if res_hidden_states_tuple is not None: + hidden_states = torch.cat([hidden_states, res_hidden_states_tuple], dim=1) + + for resnet, attn in zip(self.resnets, self.attentions): + if self.training and self.gradient_checkpointing: + + def create_custom_forward(module, return_dict=None): + def custom_forward(*inputs): + if return_dict is not None: + return module(*inputs, return_dict=return_dict) + else: + return module(*inputs) + + return custom_forward + + ckpt_kwargs: Dict[str, Any] = {"use_reentrant": False} if is_torch_version(">=", "1.11.0") else {} + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(resnet), + hidden_states, + temb, + **ckpt_kwargs, + ) + hidden_states = attn( + hidden_states, + encoder_hidden_states=encoder_hidden_states, + emb=temb, + attention_mask=attention_mask, + cross_attention_kwargs=cross_attention_kwargs, + encoder_attention_mask=encoder_attention_mask, + ) + else: + hidden_states = resnet(hidden_states, temb) + hidden_states = attn( + hidden_states, + encoder_hidden_states=encoder_hidden_states, + emb=temb, + attention_mask=attention_mask, + cross_attention_kwargs=cross_attention_kwargs, + encoder_attention_mask=encoder_attention_mask, + ) + + if self.upsamplers is not None: + for upsampler in self.upsamplers: + hidden_states = upsampler(hidden_states) + + return hidden_states + + +# can potentially later be renamed to `No-feed-forward` attention +class KAttentionBlock(nn.Module): + r""" + A basic Transformer block. + + Parameters: + dim (`int`): The number of channels in the input and output. + num_attention_heads (`int`): The number of heads to use for multi-head attention. + attention_head_dim (`int`): The number of channels in each head. + dropout (`float`, *optional*, defaults to 0.0): The dropout probability to use. + cross_attention_dim (`int`, *optional*): The size of the encoder_hidden_states vector for cross attention. + attention_bias (`bool`, *optional*, defaults to `False`): + Configure if the attention layers should contain a bias parameter. + upcast_attention (`bool`, *optional*, defaults to `False`): + Set to `True` to upcast the attention computation to `float32`. + temb_channels (`int`, *optional*, defaults to 768): + The number of channels in the token embedding. + add_self_attention (`bool`, *optional*, defaults to `False`): + Set to `True` to add self-attention to the block. + cross_attention_norm (`str`, *optional*, defaults to `None`): + The type of normalization to use for the cross attention. Can be `None`, `layer_norm`, or `group_norm`. + group_size (`int`, *optional*, defaults to 32): + The number of groups to separate the channels into for group normalization. + """ + + def __init__( + self, + dim: int, + num_attention_heads: int, + attention_head_dim: int, + dropout: float = 0.0, + cross_attention_dim: Optional[int] = None, + attention_bias: bool = False, + upcast_attention: bool = False, + temb_channels: int = 768, # for ada_group_norm + add_self_attention: bool = False, + cross_attention_norm: Optional[str] = None, + group_size: int = 32, + ): + super().__init__() + self.add_self_attention = add_self_attention + + # 1. Self-Attn + if add_self_attention: + self.norm1 = AdaGroupNorm(temb_channels, dim, max(1, dim // group_size)) + self.attn1 = Attention( + query_dim=dim, + heads=num_attention_heads, + dim_head=attention_head_dim, + dropout=dropout, + bias=attention_bias, + cross_attention_dim=None, + cross_attention_norm=None, + ) + + # 2. Cross-Attn + self.norm2 = AdaGroupNorm(temb_channels, dim, max(1, dim // group_size)) + self.attn2 = Attention( + query_dim=dim, + cross_attention_dim=cross_attention_dim, + heads=num_attention_heads, + dim_head=attention_head_dim, + dropout=dropout, + bias=attention_bias, + upcast_attention=upcast_attention, + cross_attention_norm=cross_attention_norm, + ) + + def _to_3d(self, hidden_states: torch.FloatTensor, height: int, weight: int) -> torch.FloatTensor: + return hidden_states.permute(0, 2, 3, 1).reshape(hidden_states.shape[0], height * weight, -1) + + def _to_4d(self, hidden_states: torch.FloatTensor, height: int, weight: int) -> torch.FloatTensor: + return hidden_states.permute(0, 2, 1).reshape(hidden_states.shape[0], -1, height, weight) + + def forward( + self, + hidden_states: torch.FloatTensor, + encoder_hidden_states: Optional[torch.FloatTensor] = None, + # TODO: mark emb as non-optional (self.norm2 requires it). + # requires assessing impact of change to positional param interface. + emb: Optional[torch.FloatTensor] = None, + attention_mask: Optional[torch.FloatTensor] = None, + cross_attention_kwargs: Optional[Dict[str, Any]] = None, + encoder_attention_mask: Optional[torch.FloatTensor] = None, + ) -> torch.FloatTensor: + cross_attention_kwargs = cross_attention_kwargs if cross_attention_kwargs is not None else {} + if cross_attention_kwargs.get("scale", None) is not None: + logger.warning("Passing `scale` to `cross_attention_kwargs` is deprecated. `scale` will be ignored.") + + # 1. Self-Attention + if self.add_self_attention: + norm_hidden_states = self.norm1(hidden_states, emb) + + height, weight = norm_hidden_states.shape[2:] + norm_hidden_states = self._to_3d(norm_hidden_states, height, weight) + + attn_output = self.attn1( + norm_hidden_states, + encoder_hidden_states=None, + attention_mask=attention_mask, + **cross_attention_kwargs, + ) + attn_output = self._to_4d(attn_output, height, weight) + + hidden_states = attn_output + hidden_states + + # 2. Cross-Attention/None + norm_hidden_states = self.norm2(hidden_states, emb) + + height, weight = norm_hidden_states.shape[2:] + norm_hidden_states = self._to_3d(norm_hidden_states, height, weight) + attn_output = self.attn2( + norm_hidden_states, + encoder_hidden_states=encoder_hidden_states, + attention_mask=attention_mask if encoder_hidden_states is None else encoder_attention_mask, + **cross_attention_kwargs, + ) + attn_output = self._to_4d(attn_output, height, weight) + + hidden_states = attn_output + hidden_states + + return hidden_states diff --git a/ComfyUI-Easy-Use/py/brushnet/unet_2d_condition.py b/ComfyUI-Easy-Use/py/brushnet/unet_2d_condition.py new file mode 100644 index 0000000000000000000000000000000000000000..103cd08d02e92753bce82af557dfc460f7406924 --- /dev/null +++ b/ComfyUI-Easy-Use/py/brushnet/unet_2d_condition.py @@ -0,0 +1,1359 @@ +# Copyright 2024 The HuggingFace Team. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from dataclasses import dataclass +from typing import Any, Dict, List, Optional, Tuple, Union + +import torch +import torch.nn as nn +import torch.utils.checkpoint + +from diffusers.configuration_utils import ConfigMixin, register_to_config +from diffusers.loaders import PeftAdapterMixin, UNet2DConditionLoadersMixin +from diffusers.utils import USE_PEFT_BACKEND, BaseOutput, deprecate, logging, scale_lora_layers, unscale_lora_layers +from diffusers.models.activations import get_activation +from diffusers.models.attention_processor import ( + ADDED_KV_ATTENTION_PROCESSORS, + CROSS_ATTENTION_PROCESSORS, + Attention, + AttentionProcessor, + AttnAddedKVProcessor, + AttnProcessor, +) +from diffusers.models.embeddings import ( + GaussianFourierProjection, + GLIGENTextBoundingboxProjection, + ImageHintTimeEmbedding, + ImageProjection, + ImageTimeEmbedding, + TextImageProjection, + TextImageTimeEmbedding, + TextTimeEmbedding, + TimestepEmbedding, + Timesteps, +) +from diffusers.models.modeling_utils import ModelMixin +from .unet_2d_blocks import ( + get_down_block, + get_mid_block, + get_up_block, +) + +logger = logging.get_logger(__name__) # pylint: disable=invalid-name + + +@dataclass +class UNet2DConditionOutput(BaseOutput): + """ + The output of [`UNet2DConditionModel`]. + + Args: + sample (`torch.FloatTensor` of shape `(batch_size, num_channels, height, width)`): + The hidden states output conditioned on `encoder_hidden_states` input. Output of last layer of model. + """ + + sample: torch.FloatTensor = None + + +class UNet2DConditionModel(ModelMixin, ConfigMixin, UNet2DConditionLoadersMixin, PeftAdapterMixin): + r""" + A conditional 2D UNet model that takes a noisy sample, conditional state, and a timestep and returns a sample + shaped output. + + This model inherits from [`ModelMixin`]. Check the superclass documentation for it's generic methods implemented + for all models (such as downloading or saving). + + Parameters: + sample_size (`int` or `Tuple[int, int]`, *optional*, defaults to `None`): + Height and width of input/output sample. + in_channels (`int`, *optional*, defaults to 4): Number of channels in the input sample. + out_channels (`int`, *optional*, defaults to 4): Number of channels in the output. + center_input_sample (`bool`, *optional*, defaults to `False`): Whether to center the input sample. + flip_sin_to_cos (`bool`, *optional*, defaults to `True`): + Whether to flip the sin to cos in the time embedding. + freq_shift (`int`, *optional*, defaults to 0): The frequency shift to apply to the time embedding. + down_block_types (`Tuple[str]`, *optional*, defaults to `("CrossAttnDownBlock2D", "CrossAttnDownBlock2D", "CrossAttnDownBlock2D", "DownBlock2D")`): + The tuple of downsample blocks to use. + mid_block_type (`str`, *optional*, defaults to `"UNetMidBlock2DCrossAttn"`): + Block type for middle of UNet, it can be one of `UNetMidBlock2DCrossAttn`, `UNetMidBlock2D`, or + `UNetMidBlock2DSimpleCrossAttn`. If `None`, the mid block layer is skipped. + up_block_types (`Tuple[str]`, *optional*, defaults to `("UpBlock2D", "CrossAttnUpBlock2D", "CrossAttnUpBlock2D", "CrossAttnUpBlock2D")`): + The tuple of upsample blocks to use. + only_cross_attention(`bool` or `Tuple[bool]`, *optional*, default to `False`): + Whether to include self-attention in the basic transformer blocks, see + [`~models.attention.BasicTransformerBlock`]. + block_out_channels (`Tuple[int]`, *optional*, defaults to `(320, 640, 1280, 1280)`): + The tuple of output channels for each block. + layers_per_block (`int`, *optional*, defaults to 2): The number of layers per block. + downsample_padding (`int`, *optional*, defaults to 1): The padding to use for the downsampling convolution. + mid_block_scale_factor (`float`, *optional*, defaults to 1.0): The scale factor to use for the mid block. + dropout (`float`, *optional*, defaults to 0.0): The dropout probability to use. + act_fn (`str`, *optional*, defaults to `"silu"`): The activation function to use. + norm_num_groups (`int`, *optional*, defaults to 32): The number of groups to use for the normalization. + If `None`, normalization and activation layers is skipped in post-processing. + norm_eps (`float`, *optional*, defaults to 1e-5): The epsilon to use for the normalization. + cross_attention_dim (`int` or `Tuple[int]`, *optional*, defaults to 1280): + The dimension of the cross attention features. + transformer_layers_per_block (`int`, `Tuple[int]`, or `Tuple[Tuple]` , *optional*, defaults to 1): + The number of transformer blocks of type [`~models.attention.BasicTransformerBlock`]. Only relevant for + [`~models.unet_2d_blocks.CrossAttnDownBlock2D`], [`~models.unet_2d_blocks.CrossAttnUpBlock2D`], + [`~models.unet_2d_blocks.UNetMidBlock2DCrossAttn`]. + reverse_transformer_layers_per_block : (`Tuple[Tuple]`, *optional*, defaults to None): + The number of transformer blocks of type [`~models.attention.BasicTransformerBlock`], in the upsampling + blocks of the U-Net. Only relevant if `transformer_layers_per_block` is of type `Tuple[Tuple]` and for + [`~models.unet_2d_blocks.CrossAttnDownBlock2D`], [`~models.unet_2d_blocks.CrossAttnUpBlock2D`], + [`~models.unet_2d_blocks.UNetMidBlock2DCrossAttn`]. + encoder_hid_dim (`int`, *optional*, defaults to None): + If `encoder_hid_dim_type` is defined, `encoder_hidden_states` will be projected from `encoder_hid_dim` + dimension to `cross_attention_dim`. + encoder_hid_dim_type (`str`, *optional*, defaults to `None`): + If given, the `encoder_hidden_states` and potentially other embeddings are down-projected to text + embeddings of dimension `cross_attention` according to `encoder_hid_dim_type`. + attention_head_dim (`int`, *optional*, defaults to 8): The dimension of the attention heads. + num_attention_heads (`int`, *optional*): + The number of attention heads. If not defined, defaults to `attention_head_dim` + resnet_time_scale_shift (`str`, *optional*, defaults to `"default"`): Time scale shift config + for ResNet blocks (see [`~models.resnet.ResnetBlock2D`]). Choose from `default` or `scale_shift`. + class_embed_type (`str`, *optional*, defaults to `None`): + The type of class embedding to use which is ultimately summed with the time embeddings. Choose from `None`, + `"timestep"`, `"identity"`, `"projection"`, or `"simple_projection"`. + addition_embed_type (`str`, *optional*, defaults to `None`): + Configures an optional embedding which will be summed with the time embeddings. Choose from `None` or + "text". "text" will use the `TextTimeEmbedding` layer. + addition_time_embed_dim: (`int`, *optional*, defaults to `None`): + Dimension for the timestep embeddings. + num_class_embeds (`int`, *optional*, defaults to `None`): + Input dimension of the learnable embedding matrix to be projected to `time_embed_dim`, when performing + class conditioning with `class_embed_type` equal to `None`. + time_embedding_type (`str`, *optional*, defaults to `positional`): + The type of position embedding to use for timesteps. Choose from `positional` or `fourier`. + time_embedding_dim (`int`, *optional*, defaults to `None`): + An optional override for the dimension of the projected time embedding. + time_embedding_act_fn (`str`, *optional*, defaults to `None`): + Optional activation function to use only once on the time embeddings before they are passed to the rest of + the UNet. Choose from `silu`, `mish`, `gelu`, and `swish`. + timestep_post_act (`str`, *optional*, defaults to `None`): + The second activation function to use in timestep embedding. Choose from `silu`, `mish` and `gelu`. + time_cond_proj_dim (`int`, *optional*, defaults to `None`): + The dimension of `cond_proj` layer in the timestep embedding. + conv_in_kernel (`int`, *optional*, default to `3`): The kernel size of `conv_in` layer. + conv_out_kernel (`int`, *optional*, default to `3`): The kernel size of `conv_out` layer. + projection_class_embeddings_input_dim (`int`, *optional*): The dimension of the `class_labels` input when + `class_embed_type="projection"`. Required when `class_embed_type="projection"`. + class_embeddings_concat (`bool`, *optional*, defaults to `False`): Whether to concatenate the time + embeddings with the class embeddings. + mid_block_only_cross_attention (`bool`, *optional*, defaults to `None`): + Whether to use cross attention with the mid block when using the `UNetMidBlock2DSimpleCrossAttn`. If + `only_cross_attention` is given as a single boolean and `mid_block_only_cross_attention` is `None`, the + `only_cross_attention` value is used as the value for `mid_block_only_cross_attention`. Default to `False` + otherwise. + """ + + _supports_gradient_checkpointing = True + + @register_to_config + def __init__( + self, + sample_size: Optional[int] = None, + in_channels: int = 4, + out_channels: int = 4, + center_input_sample: bool = False, + flip_sin_to_cos: bool = True, + freq_shift: int = 0, + down_block_types: Tuple[str] = ( + "CrossAttnDownBlock2D", + "CrossAttnDownBlock2D", + "CrossAttnDownBlock2D", + "DownBlock2D", + ), + mid_block_type: Optional[str] = "UNetMidBlock2DCrossAttn", + up_block_types: Tuple[str] = ( + "UpBlock2D", "CrossAttnUpBlock2D", "CrossAttnUpBlock2D", "CrossAttnUpBlock2D"), + only_cross_attention: Union[bool, Tuple[bool]] = False, + block_out_channels: Tuple[int] = (320, 640, 1280, 1280), + layers_per_block: Union[int, Tuple[int]] = 2, + downsample_padding: int = 1, + mid_block_scale_factor: float = 1, + dropout: float = 0.0, + act_fn: str = "silu", + norm_num_groups: Optional[int] = 32, + norm_eps: float = 1e-5, + cross_attention_dim: Union[int, Tuple[int]] = 1280, + transformer_layers_per_block: Union[int, Tuple[int], Tuple[Tuple]] = 1, + reverse_transformer_layers_per_block: Optional[Tuple[Tuple[int]]] = None, + encoder_hid_dim: Optional[int] = None, + encoder_hid_dim_type: Optional[str] = None, + attention_head_dim: Union[int, Tuple[int]] = 8, + num_attention_heads: Optional[Union[int, Tuple[int]]] = None, + dual_cross_attention: bool = False, + use_linear_projection: bool = False, + class_embed_type: Optional[str] = None, + addition_embed_type: Optional[str] = None, + addition_time_embed_dim: Optional[int] = None, + num_class_embeds: Optional[int] = None, + upcast_attention: bool = False, + resnet_time_scale_shift: str = "default", + resnet_skip_time_act: bool = False, + resnet_out_scale_factor: float = 1.0, + time_embedding_type: str = "positional", + time_embedding_dim: Optional[int] = None, + time_embedding_act_fn: Optional[str] = None, + timestep_post_act: Optional[str] = None, + time_cond_proj_dim: Optional[int] = None, + conv_in_kernel: int = 3, + conv_out_kernel: int = 3, + projection_class_embeddings_input_dim: Optional[int] = None, + attention_type: str = "default", + class_embeddings_concat: bool = False, + mid_block_only_cross_attention: Optional[bool] = None, + cross_attention_norm: Optional[str] = None, + addition_embed_type_num_heads: int = 64, + ): + super().__init__() + + self.sample_size = sample_size + + if num_attention_heads is not None: + raise ValueError( + "At the moment it is not possible to define the number of attention heads via `num_attention_heads` because of a naming issue as described in https://github.com/huggingface/diffusers/issues/2011#issuecomment-1547958131. Passing `num_attention_heads` will only be supported in diffusers v0.19." + ) + + # If `num_attention_heads` is not defined (which is the case for most models) + # it will default to `attention_head_dim`. This looks weird upon first reading it and it is. + # The reason for this behavior is to correct for incorrectly named variables that were introduced + # when this library was created. The incorrect naming was only discovered much later in https://github.com/huggingface/diffusers/issues/2011#issuecomment-1547958131 + # Changing `attention_head_dim` to `num_attention_heads` for 40,000+ configurations is too backwards breaking + # which is why we correct for the naming here. + num_attention_heads = num_attention_heads or attention_head_dim + + # Check inputs + self._check_config( + down_block_types=down_block_types, + up_block_types=up_block_types, + only_cross_attention=only_cross_attention, + block_out_channels=block_out_channels, + layers_per_block=layers_per_block, + cross_attention_dim=cross_attention_dim, + transformer_layers_per_block=transformer_layers_per_block, + reverse_transformer_layers_per_block=reverse_transformer_layers_per_block, + attention_head_dim=attention_head_dim, + num_attention_heads=num_attention_heads, + ) + + # input + conv_in_padding = (conv_in_kernel - 1) // 2 + self.conv_in = nn.Conv2d( + in_channels, block_out_channels[0], kernel_size=conv_in_kernel, padding=conv_in_padding + ) + + # time + time_embed_dim, timestep_input_dim = self._set_time_proj( + time_embedding_type, + block_out_channels=block_out_channels, + flip_sin_to_cos=flip_sin_to_cos, + freq_shift=freq_shift, + time_embedding_dim=time_embedding_dim, + ) + + self.time_embedding = TimestepEmbedding( + timestep_input_dim, + time_embed_dim, + act_fn=act_fn, + post_act_fn=timestep_post_act, + cond_proj_dim=time_cond_proj_dim, + ) + + self._set_encoder_hid_proj( + encoder_hid_dim_type, + cross_attention_dim=cross_attention_dim, + encoder_hid_dim=encoder_hid_dim, + ) + + # class embedding + self._set_class_embedding( + class_embed_type, + act_fn=act_fn, + num_class_embeds=num_class_embeds, + projection_class_embeddings_input_dim=projection_class_embeddings_input_dim, + time_embed_dim=time_embed_dim, + timestep_input_dim=timestep_input_dim, + ) + + self._set_add_embedding( + addition_embed_type, + addition_embed_type_num_heads=addition_embed_type_num_heads, + addition_time_embed_dim=addition_time_embed_dim, + cross_attention_dim=cross_attention_dim, + encoder_hid_dim=encoder_hid_dim, + flip_sin_to_cos=flip_sin_to_cos, + freq_shift=freq_shift, + projection_class_embeddings_input_dim=projection_class_embeddings_input_dim, + time_embed_dim=time_embed_dim, + ) + + if time_embedding_act_fn is None: + self.time_embed_act = None + else: + self.time_embed_act = get_activation(time_embedding_act_fn) + + self.down_blocks = nn.ModuleList([]) + self.up_blocks = nn.ModuleList([]) + + if isinstance(only_cross_attention, bool): + if mid_block_only_cross_attention is None: + mid_block_only_cross_attention = only_cross_attention + + only_cross_attention = [only_cross_attention] * len(down_block_types) + + if mid_block_only_cross_attention is None: + mid_block_only_cross_attention = False + + if isinstance(num_attention_heads, int): + num_attention_heads = (num_attention_heads,) * len(down_block_types) + + if isinstance(attention_head_dim, int): + attention_head_dim = (attention_head_dim,) * len(down_block_types) + + if isinstance(cross_attention_dim, int): + cross_attention_dim = (cross_attention_dim,) * len(down_block_types) + + if isinstance(layers_per_block, int): + layers_per_block = [layers_per_block] * len(down_block_types) + + if isinstance(transformer_layers_per_block, int): + transformer_layers_per_block = [transformer_layers_per_block] * len(down_block_types) + + if class_embeddings_concat: + # The time embeddings are concatenated with the class embeddings. The dimension of the + # time embeddings passed to the down, middle, and up blocks is twice the dimension of the + # regular time embeddings + blocks_time_embed_dim = time_embed_dim * 2 + else: + blocks_time_embed_dim = time_embed_dim + + # down + output_channel = block_out_channels[0] + for i, down_block_type in enumerate(down_block_types): + input_channel = output_channel + output_channel = block_out_channels[i] + is_final_block = i == len(block_out_channels) - 1 + + down_block = get_down_block( + down_block_type, + num_layers=layers_per_block[i], + transformer_layers_per_block=transformer_layers_per_block[i], + in_channels=input_channel, + out_channels=output_channel, + temb_channels=blocks_time_embed_dim, + add_downsample=not is_final_block, + resnet_eps=norm_eps, + resnet_act_fn=act_fn, + resnet_groups=norm_num_groups, + cross_attention_dim=cross_attention_dim[i], + num_attention_heads=num_attention_heads[i], + downsample_padding=downsample_padding, + dual_cross_attention=dual_cross_attention, + use_linear_projection=use_linear_projection, + only_cross_attention=only_cross_attention[i], + upcast_attention=upcast_attention, + resnet_time_scale_shift=resnet_time_scale_shift, + attention_type=attention_type, + resnet_skip_time_act=resnet_skip_time_act, + resnet_out_scale_factor=resnet_out_scale_factor, + cross_attention_norm=cross_attention_norm, + attention_head_dim=attention_head_dim[i] if attention_head_dim[i] is not None else output_channel, + dropout=dropout, + ) + self.down_blocks.append(down_block) + + # mid + self.mid_block = get_mid_block( + mid_block_type, + temb_channels=blocks_time_embed_dim, + in_channels=block_out_channels[-1], + resnet_eps=norm_eps, + resnet_act_fn=act_fn, + resnet_groups=norm_num_groups, + output_scale_factor=mid_block_scale_factor, + transformer_layers_per_block=transformer_layers_per_block[-1], + num_attention_heads=num_attention_heads[-1], + cross_attention_dim=cross_attention_dim[-1], + dual_cross_attention=dual_cross_attention, + use_linear_projection=use_linear_projection, + mid_block_only_cross_attention=mid_block_only_cross_attention, + upcast_attention=upcast_attention, + resnet_time_scale_shift=resnet_time_scale_shift, + attention_type=attention_type, + resnet_skip_time_act=resnet_skip_time_act, + cross_attention_norm=cross_attention_norm, + attention_head_dim=attention_head_dim[-1], + dropout=dropout, + ) + + # count how many layers upsample the images + self.num_upsamplers = 0 + + # up + reversed_block_out_channels = list(reversed(block_out_channels)) + reversed_num_attention_heads = list(reversed(num_attention_heads)) + reversed_layers_per_block = list(reversed(layers_per_block)) + reversed_cross_attention_dim = list(reversed(cross_attention_dim)) + reversed_transformer_layers_per_block = ( + list(reversed(transformer_layers_per_block)) + if reverse_transformer_layers_per_block is None + else reverse_transformer_layers_per_block + ) + only_cross_attention = list(reversed(only_cross_attention)) + + output_channel = reversed_block_out_channels[0] + for i, up_block_type in enumerate(up_block_types): + is_final_block = i == len(block_out_channels) - 1 + + prev_output_channel = output_channel + output_channel = reversed_block_out_channels[i] + input_channel = reversed_block_out_channels[min(i + 1, len(block_out_channels) - 1)] + + # add upsample block for all BUT final layer + if not is_final_block: + add_upsample = True + self.num_upsamplers += 1 + else: + add_upsample = False + + up_block = get_up_block( + up_block_type, + num_layers=reversed_layers_per_block[i] + 1, + transformer_layers_per_block=reversed_transformer_layers_per_block[i], + in_channels=input_channel, + out_channels=output_channel, + prev_output_channel=prev_output_channel, + temb_channels=blocks_time_embed_dim, + add_upsample=add_upsample, + resnet_eps=norm_eps, + resnet_act_fn=act_fn, + resolution_idx=i, + resnet_groups=norm_num_groups, + cross_attention_dim=reversed_cross_attention_dim[i], + num_attention_heads=reversed_num_attention_heads[i], + dual_cross_attention=dual_cross_attention, + use_linear_projection=use_linear_projection, + only_cross_attention=only_cross_attention[i], + upcast_attention=upcast_attention, + resnet_time_scale_shift=resnet_time_scale_shift, + attention_type=attention_type, + resnet_skip_time_act=resnet_skip_time_act, + resnet_out_scale_factor=resnet_out_scale_factor, + cross_attention_norm=cross_attention_norm, + attention_head_dim=attention_head_dim[i] if attention_head_dim[i] is not None else output_channel, + dropout=dropout, + ) + self.up_blocks.append(up_block) + prev_output_channel = output_channel + + # out + if norm_num_groups is not None: + self.conv_norm_out = nn.GroupNorm( + num_channels=block_out_channels[0], num_groups=norm_num_groups, eps=norm_eps + ) + + self.conv_act = get_activation(act_fn) + + else: + self.conv_norm_out = None + self.conv_act = None + + conv_out_padding = (conv_out_kernel - 1) // 2 + self.conv_out = nn.Conv2d( + block_out_channels[0], out_channels, kernel_size=conv_out_kernel, padding=conv_out_padding + ) + + self._set_pos_net_if_use_gligen(attention_type=attention_type, cross_attention_dim=cross_attention_dim) + + def _check_config( + self, + down_block_types: Tuple[str], + up_block_types: Tuple[str], + only_cross_attention: Union[bool, Tuple[bool]], + block_out_channels: Tuple[int], + layers_per_block: Union[int, Tuple[int]], + cross_attention_dim: Union[int, Tuple[int]], + transformer_layers_per_block: Union[int, Tuple[int], Tuple[Tuple[int]]], + reverse_transformer_layers_per_block: bool, + attention_head_dim: int, + num_attention_heads: Optional[Union[int, Tuple[int]]], + ): + if len(down_block_types) != len(up_block_types): + raise ValueError( + f"Must provide the same number of `down_block_types` as `up_block_types`. `down_block_types`: {down_block_types}. `up_block_types`: {up_block_types}." + ) + + if len(block_out_channels) != len(down_block_types): + raise ValueError( + f"Must provide the same number of `block_out_channels` as `down_block_types`. `block_out_channels`: {block_out_channels}. `down_block_types`: {down_block_types}." + ) + + if not isinstance(only_cross_attention, bool) and len(only_cross_attention) != len(down_block_types): + raise ValueError( + f"Must provide the same number of `only_cross_attention` as `down_block_types`. `only_cross_attention`: {only_cross_attention}. `down_block_types`: {down_block_types}." + ) + + if not isinstance(num_attention_heads, int) and len(num_attention_heads) != len(down_block_types): + raise ValueError( + f"Must provide the same number of `num_attention_heads` as `down_block_types`. `num_attention_heads`: {num_attention_heads}. `down_block_types`: {down_block_types}." + ) + + if not isinstance(attention_head_dim, int) and len(attention_head_dim) != len(down_block_types): + raise ValueError( + f"Must provide the same number of `attention_head_dim` as `down_block_types`. `attention_head_dim`: {attention_head_dim}. `down_block_types`: {down_block_types}." + ) + + if isinstance(cross_attention_dim, list) and len(cross_attention_dim) != len(down_block_types): + raise ValueError( + f"Must provide the same number of `cross_attention_dim` as `down_block_types`. `cross_attention_dim`: {cross_attention_dim}. `down_block_types`: {down_block_types}." + ) + + if not isinstance(layers_per_block, int) and len(layers_per_block) != len(down_block_types): + raise ValueError( + f"Must provide the same number of `layers_per_block` as `down_block_types`. `layers_per_block`: {layers_per_block}. `down_block_types`: {down_block_types}." + ) + if isinstance(transformer_layers_per_block, list) and reverse_transformer_layers_per_block is None: + for layer_number_per_block in transformer_layers_per_block: + if isinstance(layer_number_per_block, list): + raise ValueError("Must provide 'reverse_transformer_layers_per_block` if using asymmetrical UNet.") + + def _set_time_proj( + self, + time_embedding_type: str, + block_out_channels: int, + flip_sin_to_cos: bool, + freq_shift: float, + time_embedding_dim: int, + ) -> Tuple[int, int]: + if time_embedding_type == "fourier": + time_embed_dim = time_embedding_dim or block_out_channels[0] * 2 + if time_embed_dim % 2 != 0: + raise ValueError(f"`time_embed_dim` should be divisible by 2, but is {time_embed_dim}.") + self.time_proj = GaussianFourierProjection( + time_embed_dim // 2, set_W_to_weight=False, log=False, flip_sin_to_cos=flip_sin_to_cos + ) + timestep_input_dim = time_embed_dim + elif time_embedding_type == "positional": + time_embed_dim = time_embedding_dim or block_out_channels[0] * 4 + + self.time_proj = Timesteps(block_out_channels[0], flip_sin_to_cos, freq_shift) + timestep_input_dim = block_out_channels[0] + else: + raise ValueError( + f"{time_embedding_type} does not exist. Please make sure to use one of `fourier` or `positional`." + ) + + return time_embed_dim, timestep_input_dim + + def _set_encoder_hid_proj( + self, + encoder_hid_dim_type: Optional[str], + cross_attention_dim: Union[int, Tuple[int]], + encoder_hid_dim: Optional[int], + ): + if encoder_hid_dim_type is None and encoder_hid_dim is not None: + encoder_hid_dim_type = "text_proj" + self.register_to_config(encoder_hid_dim_type=encoder_hid_dim_type) + logger.info("encoder_hid_dim_type defaults to 'text_proj' as `encoder_hid_dim` is defined.") + + if encoder_hid_dim is None and encoder_hid_dim_type is not None: + raise ValueError( + f"`encoder_hid_dim` has to be defined when `encoder_hid_dim_type` is set to {encoder_hid_dim_type}." + ) + + if encoder_hid_dim_type == "text_proj": + self.encoder_hid_proj = nn.Linear(encoder_hid_dim, cross_attention_dim) + elif encoder_hid_dim_type == "text_image_proj": + # image_embed_dim DOESN'T have to be `cross_attention_dim`. To not clutter the __init__ too much + # they are set to `cross_attention_dim` here as this is exactly the required dimension for the currently only use + # case when `addition_embed_type == "text_image_proj"` (Kandinsky 2.1)` + self.encoder_hid_proj = TextImageProjection( + text_embed_dim=encoder_hid_dim, + image_embed_dim=cross_attention_dim, + cross_attention_dim=cross_attention_dim, + ) + elif encoder_hid_dim_type == "image_proj": + # Kandinsky 2.2 + self.encoder_hid_proj = ImageProjection( + image_embed_dim=encoder_hid_dim, + cross_attention_dim=cross_attention_dim, + ) + elif encoder_hid_dim_type is not None: + raise ValueError( + f"encoder_hid_dim_type: {encoder_hid_dim_type} must be None, 'text_proj' or 'text_image_proj'." + ) + else: + self.encoder_hid_proj = None + + def _set_class_embedding( + self, + class_embed_type: Optional[str], + act_fn: str, + num_class_embeds: Optional[int], + projection_class_embeddings_input_dim: Optional[int], + time_embed_dim: int, + timestep_input_dim: int, + ): + if class_embed_type is None and num_class_embeds is not None: + self.class_embedding = nn.Embedding(num_class_embeds, time_embed_dim) + elif class_embed_type == "timestep": + self.class_embedding = TimestepEmbedding(timestep_input_dim, time_embed_dim, act_fn=act_fn) + elif class_embed_type == "identity": + self.class_embedding = nn.Identity(time_embed_dim, time_embed_dim) + elif class_embed_type == "projection": + if projection_class_embeddings_input_dim is None: + raise ValueError( + "`class_embed_type`: 'projection' requires `projection_class_embeddings_input_dim` be set" + ) + # The projection `class_embed_type` is the same as the timestep `class_embed_type` except + # 1. the `class_labels` inputs are not first converted to sinusoidal embeddings + # 2. it projects from an arbitrary input dimension. + # + # Note that `TimestepEmbedding` is quite general, being mainly linear layers and activations. + # When used for embedding actual timesteps, the timesteps are first converted to sinusoidal embeddings. + # As a result, `TimestepEmbedding` can be passed arbitrary vectors. + self.class_embedding = TimestepEmbedding(projection_class_embeddings_input_dim, time_embed_dim) + elif class_embed_type == "simple_projection": + if projection_class_embeddings_input_dim is None: + raise ValueError( + "`class_embed_type`: 'simple_projection' requires `projection_class_embeddings_input_dim` be set" + ) + self.class_embedding = nn.Linear(projection_class_embeddings_input_dim, time_embed_dim) + else: + self.class_embedding = None + + def _set_add_embedding( + self, + addition_embed_type: str, + addition_embed_type_num_heads: int, + addition_time_embed_dim: Optional[int], + flip_sin_to_cos: bool, + freq_shift: float, + cross_attention_dim: Optional[int], + encoder_hid_dim: Optional[int], + projection_class_embeddings_input_dim: Optional[int], + time_embed_dim: int, + ): + if addition_embed_type == "text": + if encoder_hid_dim is not None: + text_time_embedding_from_dim = encoder_hid_dim + else: + text_time_embedding_from_dim = cross_attention_dim + + self.add_embedding = TextTimeEmbedding( + text_time_embedding_from_dim, time_embed_dim, num_heads=addition_embed_type_num_heads + ) + elif addition_embed_type == "text_image": + # text_embed_dim and image_embed_dim DON'T have to be `cross_attention_dim`. To not clutter the __init__ too much + # they are set to `cross_attention_dim` here as this is exactly the required dimension for the currently only use + # case when `addition_embed_type == "text_image"` (Kandinsky 2.1)` + self.add_embedding = TextImageTimeEmbedding( + text_embed_dim=cross_attention_dim, image_embed_dim=cross_attention_dim, time_embed_dim=time_embed_dim + ) + elif addition_embed_type == "text_time": + self.add_time_proj = Timesteps(addition_time_embed_dim, flip_sin_to_cos, freq_shift) + self.add_embedding = TimestepEmbedding(projection_class_embeddings_input_dim, time_embed_dim) + elif addition_embed_type == "image": + # Kandinsky 2.2 + self.add_embedding = ImageTimeEmbedding(image_embed_dim=encoder_hid_dim, time_embed_dim=time_embed_dim) + elif addition_embed_type == "image_hint": + # Kandinsky 2.2 ControlNet + self.add_embedding = ImageHintTimeEmbedding(image_embed_dim=encoder_hid_dim, time_embed_dim=time_embed_dim) + elif addition_embed_type is not None: + raise ValueError(f"addition_embed_type: {addition_embed_type} must be None, 'text' or 'text_image'.") + + def _set_pos_net_if_use_gligen(self, attention_type: str, cross_attention_dim: int): + if attention_type in ["gated", "gated-text-image"]: + positive_len = 768 + if isinstance(cross_attention_dim, int): + positive_len = cross_attention_dim + elif isinstance(cross_attention_dim, tuple) or isinstance(cross_attention_dim, list): + positive_len = cross_attention_dim[0] + + feature_type = "text-only" if attention_type == "gated" else "text-image" + self.position_net = GLIGENTextBoundingboxProjection( + positive_len=positive_len, out_dim=cross_attention_dim, feature_type=feature_type + ) + + @property + def attn_processors(self) -> Dict[str, AttentionProcessor]: + r""" + Returns: + `dict` of attention processors: A dictionary containing all attention processors used in the model with + indexed by its weight name. + """ + # set recursively + processors = {} + + def fn_recursive_add_processors(name: str, module: torch.nn.Module, processors: Dict[str, AttentionProcessor]): + if hasattr(module, "get_processor"): + processors[f"{name}.processor"] = module.get_processor(return_deprecated_lora=True) + + for sub_name, child in module.named_children(): + fn_recursive_add_processors(f"{name}.{sub_name}", child, processors) + + return processors + + for name, module in self.named_children(): + fn_recursive_add_processors(name, module, processors) + + return processors + + def set_attn_processor(self, processor: Union[AttentionProcessor, Dict[str, AttentionProcessor]]): + r""" + Sets the attention processor to use to compute attention. + + Parameters: + processor (`dict` of `AttentionProcessor` or only `AttentionProcessor`): + The instantiated processor class or a dictionary of processor classes that will be set as the processor + for **all** `Attention` layers. + + If `processor` is a dict, the key needs to define the path to the corresponding cross attention + processor. This is strongly recommended when setting trainable attention processors. + + """ + count = len(self.attn_processors.keys()) + + if isinstance(processor, dict) and len(processor) != count: + raise ValueError( + f"A dict of processors was passed, but the number of processors {len(processor)} does not match the" + f" number of attention layers: {count}. Please make sure to pass {count} processor classes." + ) + + def fn_recursive_attn_processor(name: str, module: torch.nn.Module, processor): + if hasattr(module, "set_processor"): + if not isinstance(processor, dict): + module.set_processor(processor) + else: + module.set_processor(processor.pop(f"{name}.processor")) + + for sub_name, child in module.named_children(): + fn_recursive_attn_processor(f"{name}.{sub_name}", child, processor) + + for name, module in self.named_children(): + fn_recursive_attn_processor(name, module, processor) + + def set_default_attn_processor(self): + """ + Disables custom attention processors and sets the default attention implementation. + """ + if all(proc.__class__ in ADDED_KV_ATTENTION_PROCESSORS for proc in self.attn_processors.values()): + processor = AttnAddedKVProcessor() + elif all(proc.__class__ in CROSS_ATTENTION_PROCESSORS for proc in self.attn_processors.values()): + processor = AttnProcessor() + else: + raise ValueError( + f"Cannot call `set_default_attn_processor` when attention processors are of type {next(iter(self.attn_processors.values()))}" + ) + + self.set_attn_processor(processor) + + def set_attention_slice(self, slice_size: Union[str, int, List[int]] = "auto"): + r""" + Enable sliced attention computation. + + When this option is enabled, the attention module splits the input tensor in slices to compute attention in + several steps. This is useful for saving some memory in exchange for a small decrease in speed. + + Args: + slice_size (`str` or `int` or `list(int)`, *optional*, defaults to `"auto"`): + When `"auto"`, input to the attention heads is halved, so attention is computed in two steps. If + `"max"`, maximum amount of memory is saved by running only one slice at a time. If a number is + provided, uses as many slices as `attention_head_dim // slice_size`. In this case, `attention_head_dim` + must be a multiple of `slice_size`. + """ + sliceable_head_dims = [] + + def fn_recursive_retrieve_sliceable_dims(module: torch.nn.Module): + if hasattr(module, "set_attention_slice"): + sliceable_head_dims.append(module.sliceable_head_dim) + + for child in module.children(): + fn_recursive_retrieve_sliceable_dims(child) + + # retrieve number of attention layers + for module in self.children(): + fn_recursive_retrieve_sliceable_dims(module) + + num_sliceable_layers = len(sliceable_head_dims) + + if slice_size == "auto": + # half the attention head size is usually a good trade-off between + # speed and memory + slice_size = [dim // 2 for dim in sliceable_head_dims] + elif slice_size == "max": + # make smallest slice possible + slice_size = num_sliceable_layers * [1] + + slice_size = num_sliceable_layers * [slice_size] if not isinstance(slice_size, list) else slice_size + + if len(slice_size) != len(sliceable_head_dims): + raise ValueError( + f"You have provided {len(slice_size)}, but {self.config} has {len(sliceable_head_dims)} different" + f" attention layers. Make sure to match `len(slice_size)` to be {len(sliceable_head_dims)}." + ) + + for i in range(len(slice_size)): + size = slice_size[i] + dim = sliceable_head_dims[i] + if size is not None and size > dim: + raise ValueError(f"size {size} has to be smaller or equal to {dim}.") + + # Recursively walk through all the children. + # Any children which exposes the set_attention_slice method + # gets the message + def fn_recursive_set_attention_slice(module: torch.nn.Module, slice_size: List[int]): + if hasattr(module, "set_attention_slice"): + module.set_attention_slice(slice_size.pop()) + + for child in module.children(): + fn_recursive_set_attention_slice(child, slice_size) + + reversed_slice_size = list(reversed(slice_size)) + for module in self.children(): + fn_recursive_set_attention_slice(module, reversed_slice_size) + + def _set_gradient_checkpointing(self, module, value=False): + if hasattr(module, "gradient_checkpointing"): + module.gradient_checkpointing = value + + def enable_freeu(self, s1: float, s2: float, b1: float, b2: float): + r"""Enables the FreeU mechanism from https://arxiv.org/abs/2309.11497. + + The suffixes after the scaling factors represent the stage blocks where they are being applied. + + Please refer to the [official repository](https://github.com/ChenyangSi/FreeU) for combinations of values that + are known to work well for different pipelines such as Stable Diffusion v1, v2, and Stable Diffusion XL. + + Args: + s1 (`float`): + Scaling factor for stage 1 to attenuate the contributions of the skip features. This is done to + mitigate the "oversmoothing effect" in the enhanced denoising process. + s2 (`float`): + Scaling factor for stage 2 to attenuate the contributions of the skip features. This is done to + mitigate the "oversmoothing effect" in the enhanced denoising process. + b1 (`float`): Scaling factor for stage 1 to amplify the contributions of backbone features. + b2 (`float`): Scaling factor for stage 2 to amplify the contributions of backbone features. + """ + for i, upsample_block in enumerate(self.up_blocks): + setattr(upsample_block, "s1", s1) + setattr(upsample_block, "s2", s2) + setattr(upsample_block, "b1", b1) + setattr(upsample_block, "b2", b2) + + def disable_freeu(self): + """Disables the FreeU mechanism.""" + freeu_keys = {"s1", "s2", "b1", "b2"} + for i, upsample_block in enumerate(self.up_blocks): + for k in freeu_keys: + if hasattr(upsample_block, k) or getattr(upsample_block, k, None) is not None: + setattr(upsample_block, k, None) + + def fuse_qkv_projections(self): + """ + Enables fused QKV projections. For self-attention modules, all projection matrices (i.e., query, key, value) + are fused. For cross-attention modules, key and value projection matrices are fused. + + + + This API is 🧪 experimental. + + + """ + self.original_attn_processors = None + + for _, attn_processor in self.attn_processors.items(): + if "Added" in str(attn_processor.__class__.__name__): + raise ValueError("`fuse_qkv_projections()` is not supported for models having added KV projections.") + + self.original_attn_processors = self.attn_processors + + for module in self.modules(): + if isinstance(module, Attention): + module.fuse_projections(fuse=True) + + def unfuse_qkv_projections(self): + """Disables the fused QKV projection if enabled. + + + + This API is 🧪 experimental. + + + + """ + if self.original_attn_processors is not None: + self.set_attn_processor(self.original_attn_processors) + + def unload_lora(self): + """Unloads LoRA weights.""" + deprecate( + "unload_lora", + "0.28.0", + "Calling `unload_lora()` is deprecated and will be removed in a future version. Please install `peft` and then call `disable_adapters().", + ) + for module in self.modules(): + if hasattr(module, "set_lora_layer"): + module.set_lora_layer(None) + + def get_time_embed( + self, sample: torch.Tensor, timestep: Union[torch.Tensor, float, int] + ) -> Optional[torch.Tensor]: + timesteps = timestep + if not torch.is_tensor(timesteps): + # TODO: this requires sync between CPU and GPU. So try to pass timesteps as tensors if you can + # This would be a good case for the `match` statement (Python 3.10+) + is_mps = sample.device.type == "mps" + if isinstance(timestep, float): + dtype = torch.float32 if is_mps else torch.float64 + else: + dtype = torch.int32 if is_mps else torch.int64 + timesteps = torch.tensor([timesteps], dtype=dtype, device=sample.device) + elif len(timesteps.shape) == 0: + timesteps = timesteps[None].to(sample.device) + + # broadcast to batch dimension in a way that's compatible with ONNX/Core ML + timesteps = timesteps.expand(sample.shape[0]) + + t_emb = self.time_proj(timesteps) + # `Timesteps` does not contain any weights and will always return f32 tensors + # but time_embedding might actually be running in fp16. so we need to cast here. + # there might be better ways to encapsulate this. + t_emb = t_emb.to(dtype=sample.dtype) + return t_emb + + def get_class_embed(self, sample: torch.Tensor, class_labels: Optional[torch.Tensor]) -> Optional[torch.Tensor]: + class_emb = None + if self.class_embedding is not None: + if class_labels is None: + raise ValueError("class_labels should be provided when num_class_embeds > 0") + + if self.config.class_embed_type == "timestep": + class_labels = self.time_proj(class_labels) + + # `Timesteps` does not contain any weights and will always return f32 tensors + # there might be better ways to encapsulate this. + class_labels = class_labels.to(dtype=sample.dtype) + + class_emb = self.class_embedding(class_labels).to(dtype=sample.dtype) + return class_emb + + def get_aug_embed( + self, emb: torch.Tensor, encoder_hidden_states: torch.Tensor, added_cond_kwargs: Dict[str, Any] + ) -> Optional[torch.Tensor]: + aug_emb = None + if self.config.addition_embed_type == "text": + aug_emb = self.add_embedding(encoder_hidden_states) + elif self.config.addition_embed_type == "text_image": + # Kandinsky 2.1 - style + if "image_embeds" not in added_cond_kwargs: + raise ValueError( + f"{self.__class__} has the config param `addition_embed_type` set to 'text_image' which requires the keyword argument `image_embeds` to be passed in `added_cond_kwargs`" + ) + + image_embs = added_cond_kwargs.get("image_embeds") + text_embs = added_cond_kwargs.get("text_embeds", encoder_hidden_states) + aug_emb = self.add_embedding(text_embs, image_embs) + elif self.config.addition_embed_type == "text_time": + # SDXL - style + if "text_embeds" not in added_cond_kwargs: + raise ValueError( + f"{self.__class__} has the config param `addition_embed_type` set to 'text_time' which requires the keyword argument `text_embeds` to be passed in `added_cond_kwargs`" + ) + text_embeds = added_cond_kwargs.get("text_embeds") + if "time_ids" not in added_cond_kwargs: + raise ValueError( + f"{self.__class__} has the config param `addition_embed_type` set to 'text_time' which requires the keyword argument `time_ids` to be passed in `added_cond_kwargs`" + ) + time_ids = added_cond_kwargs.get("time_ids") + time_embeds = self.add_time_proj(time_ids.flatten()) + time_embeds = time_embeds.reshape((text_embeds.shape[0], -1)) + add_embeds = torch.concat([text_embeds, time_embeds], dim=-1) + add_embeds = add_embeds.to(emb.dtype) + aug_emb = self.add_embedding(add_embeds) + elif self.config.addition_embed_type == "image": + # Kandinsky 2.2 - style + if "image_embeds" not in added_cond_kwargs: + raise ValueError( + f"{self.__class__} has the config param `addition_embed_type` set to 'image' which requires the keyword argument `image_embeds` to be passed in `added_cond_kwargs`" + ) + image_embs = added_cond_kwargs.get("image_embeds") + aug_emb = self.add_embedding(image_embs) + elif self.config.addition_embed_type == "image_hint": + # Kandinsky 2.2 - style + if "image_embeds" not in added_cond_kwargs or "hint" not in added_cond_kwargs: + raise ValueError( + f"{self.__class__} has the config param `addition_embed_type` set to 'image_hint' which requires the keyword arguments `image_embeds` and `hint` to be passed in `added_cond_kwargs`" + ) + image_embs = added_cond_kwargs.get("image_embeds") + hint = added_cond_kwargs.get("hint") + aug_emb = self.add_embedding(image_embs, hint) + return aug_emb + + def process_encoder_hidden_states( + self, encoder_hidden_states: torch.Tensor, added_cond_kwargs: Dict[str, Any] + ) -> torch.Tensor: + if self.encoder_hid_proj is not None and self.config.encoder_hid_dim_type == "text_proj": + encoder_hidden_states = self.encoder_hid_proj(encoder_hidden_states) + elif self.encoder_hid_proj is not None and self.config.encoder_hid_dim_type == "text_image_proj": + # Kandinsky 2.1 - style + if "image_embeds" not in added_cond_kwargs: + raise ValueError( + f"{self.__class__} has the config param `encoder_hid_dim_type` set to 'text_image_proj' which requires the keyword argument `image_embeds` to be passed in `added_conditions`" + ) + + image_embeds = added_cond_kwargs.get("image_embeds") + encoder_hidden_states = self.encoder_hid_proj(encoder_hidden_states, image_embeds) + elif self.encoder_hid_proj is not None and self.config.encoder_hid_dim_type == "image_proj": + # Kandinsky 2.2 - style + if "image_embeds" not in added_cond_kwargs: + raise ValueError( + f"{self.__class__} has the config param `encoder_hid_dim_type` set to 'image_proj' which requires the keyword argument `image_embeds` to be passed in `added_conditions`" + ) + image_embeds = added_cond_kwargs.get("image_embeds") + encoder_hidden_states = self.encoder_hid_proj(image_embeds) + elif self.encoder_hid_proj is not None and self.config.encoder_hid_dim_type == "ip_image_proj": + if "image_embeds" not in added_cond_kwargs: + raise ValueError( + f"{self.__class__} has the config param `encoder_hid_dim_type` set to 'ip_image_proj' which requires the keyword argument `image_embeds` to be passed in `added_conditions`" + ) + image_embeds = added_cond_kwargs.get("image_embeds") + image_embeds = self.encoder_hid_proj(image_embeds) + encoder_hidden_states = (encoder_hidden_states, image_embeds) + return encoder_hidden_states + + def forward( + self, + sample: torch.FloatTensor, + timestep: Union[torch.Tensor, float, int], + encoder_hidden_states: torch.Tensor, + class_labels: Optional[torch.Tensor] = None, + timestep_cond: Optional[torch.Tensor] = None, + attention_mask: Optional[torch.Tensor] = None, + cross_attention_kwargs: Optional[Dict[str, Any]] = None, + added_cond_kwargs: Optional[Dict[str, torch.Tensor]] = None, + down_block_additional_residuals: Optional[Tuple[torch.Tensor]] = None, + mid_block_additional_residual: Optional[torch.Tensor] = None, + down_intrablock_additional_residuals: Optional[Tuple[torch.Tensor]] = None, + encoder_attention_mask: Optional[torch.Tensor] = None, + return_dict: bool = True, + down_block_add_samples: Optional[Tuple[torch.Tensor]] = None, + mid_block_add_sample: Optional[Tuple[torch.Tensor]] = None, + up_block_add_samples: Optional[Tuple[torch.Tensor]] = None, + ) -> Union[UNet2DConditionOutput, Tuple]: + r""" + The [`UNet2DConditionModel`] forward method. + + Args: + sample (`torch.FloatTensor`): + The noisy input tensor with the following shape `(batch, channel, height, width)`. + timestep (`torch.FloatTensor` or `float` or `int`): The number of timesteps to denoise an input. + encoder_hidden_states (`torch.FloatTensor`): + The encoder hidden states with shape `(batch, sequence_length, feature_dim)`. + class_labels (`torch.Tensor`, *optional*, defaults to `None`): + Optional class labels for conditioning. Their embeddings will be summed with the timestep embeddings. + timestep_cond: (`torch.Tensor`, *optional*, defaults to `None`): + Conditional embeddings for timestep. If provided, the embeddings will be summed with the samples passed + through the `self.time_embedding` layer to obtain the timestep embeddings. + attention_mask (`torch.Tensor`, *optional*, defaults to `None`): + An attention mask of shape `(batch, key_tokens)` is applied to `encoder_hidden_states`. If `1` the mask + is kept, otherwise if `0` it is discarded. Mask will be converted into a bias, which adds large + negative values to the attention scores corresponding to "discard" tokens. + cross_attention_kwargs (`dict`, *optional*): + A kwargs dictionary that if specified is passed along to the `AttentionProcessor` as defined under + `self.processor` in + [diffusers.models.attention_processor](https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/attention_processor.py). + added_cond_kwargs: (`dict`, *optional*): + A kwargs dictionary containing additional embeddings that if specified are added to the embeddings that + are passed along to the UNet blocks. + down_block_additional_residuals: (`tuple` of `torch.Tensor`, *optional*): + A tuple of tensors that if specified are added to the residuals of down unet blocks. + mid_block_additional_residual: (`torch.Tensor`, *optional*): + A tensor that if specified is added to the residual of the middle unet block. + down_intrablock_additional_residuals (`tuple` of `torch.Tensor`, *optional*): + additional residuals to be added within UNet down blocks, for example from T2I-Adapter side model(s) + encoder_attention_mask (`torch.Tensor`): + A cross-attention mask of shape `(batch, sequence_length)` is applied to `encoder_hidden_states`. If + `True` the mask is kept, otherwise if `False` it is discarded. Mask will be converted into a bias, + which adds large negative values to the attention scores corresponding to "discard" tokens. + return_dict (`bool`, *optional*, defaults to `True`): + Whether or not to return a [`~models.unets.unet_2d_condition.UNet2DConditionOutput`] instead of a plain + tuple. + + Returns: + [`~models.unets.unet_2d_condition.UNet2DConditionOutput`] or `tuple`: + If `return_dict` is True, an [`~models.unets.unet_2d_condition.UNet2DConditionOutput`] is returned, + otherwise a `tuple` is returned where the first element is the sample tensor. + """ + # By default samples have to be AT least a multiple of the overall upsampling factor. + # The overall upsampling factor is equal to 2 ** (# num of upsampling layers). + # However, the upsampling interpolation output size can be forced to fit any upsampling size + # on the fly if necessary. + default_overall_up_factor = 2 ** self.num_upsamplers + + # upsample size should be forwarded when sample is not a multiple of `default_overall_up_factor` + forward_upsample_size = False + upsample_size = None + + for dim in sample.shape[-2:]: + if dim % default_overall_up_factor != 0: + # Forward upsample size to force interpolation output size. + forward_upsample_size = True + break + + # ensure attention_mask is a bias, and give it a singleton query_tokens dimension + # expects mask of shape: + # [batch, key_tokens] + # adds singleton query_tokens dimension: + # [batch, 1, key_tokens] + # this helps to broadcast it as a bias over attention scores, which will be in one of the following shapes: + # [batch, heads, query_tokens, key_tokens] (e.g. torch sdp attn) + # [batch * heads, query_tokens, key_tokens] (e.g. xformers or classic attn) + if attention_mask is not None: + # assume that mask is expressed as: + # (1 = keep, 0 = discard) + # convert mask into a bias that can be added to attention scores: + # (keep = +0, discard = -10000.0) + attention_mask = (1 - attention_mask.to(sample.dtype)) * -10000.0 + attention_mask = attention_mask.unsqueeze(1) + + # convert encoder_attention_mask to a bias the same way we do for attention_mask + if encoder_attention_mask is not None: + encoder_attention_mask = (1 - encoder_attention_mask.to(sample.dtype)) * -10000.0 + encoder_attention_mask = encoder_attention_mask.unsqueeze(1) + + # 0. center input if necessary + if self.config.center_input_sample: + sample = 2 * sample - 1.0 + + # 1. time + t_emb = self.get_time_embed(sample=sample, timestep=timestep) + emb = self.time_embedding(t_emb, timestep_cond) + aug_emb = None + + class_emb = self.get_class_embed(sample=sample, class_labels=class_labels) + if class_emb is not None: + if self.config.class_embeddings_concat: + emb = torch.cat([emb, class_emb], dim=-1) + else: + emb = emb + class_emb + + aug_emb = self.get_aug_embed( + emb=emb, encoder_hidden_states=encoder_hidden_states, added_cond_kwargs=added_cond_kwargs + ) + if self.config.addition_embed_type == "image_hint": + aug_emb, hint = aug_emb + sample = torch.cat([sample, hint], dim=1) + + emb = emb + aug_emb if aug_emb is not None else emb + + if self.time_embed_act is not None: + emb = self.time_embed_act(emb) + + encoder_hidden_states = self.process_encoder_hidden_states( + encoder_hidden_states=encoder_hidden_states, added_cond_kwargs=added_cond_kwargs + ) + + # 2. pre-process + sample = self.conv_in(sample) + + # 2.5 GLIGEN position net + if cross_attention_kwargs is not None and cross_attention_kwargs.get("gligen", None) is not None: + cross_attention_kwargs = cross_attention_kwargs.copy() + gligen_args = cross_attention_kwargs.pop("gligen") + cross_attention_kwargs["gligen"] = {"objs": self.position_net(**gligen_args)} + + # 3. down + # we're popping the `scale` instead of getting it because otherwise `scale` will be propagated + # to the internal blocks and will raise deprecation warnings. this will be confusing for our users. + if cross_attention_kwargs is not None: + cross_attention_kwargs = cross_attention_kwargs.copy() + lora_scale = cross_attention_kwargs.pop("scale", 1.0) + else: + lora_scale = 1.0 + + if USE_PEFT_BACKEND: + # weight the lora layers by setting `lora_scale` for each PEFT layer + scale_lora_layers(self, lora_scale) + + is_controlnet = mid_block_additional_residual is not None and down_block_additional_residuals is not None + # using new arg down_intrablock_additional_residuals for T2I-Adapters, to distinguish from controlnets + is_adapter = down_intrablock_additional_residuals is not None + # maintain backward compatibility for legacy usage, where + # T2I-Adapter and ControlNet both use down_block_additional_residuals arg + # but can only use one or the other + is_brushnet = down_block_add_samples is not None and mid_block_add_sample is not None and up_block_add_samples is not None + if not is_adapter and mid_block_additional_residual is None and down_block_additional_residuals is not None: + deprecate( + "T2I should not use down_block_additional_residuals", + "1.3.0", + "Passing intrablock residual connections with `down_block_additional_residuals` is deprecated \ + and will be removed in diffusers 1.3.0. `down_block_additional_residuals` should only be used \ + for ControlNet. Please make sure use `down_intrablock_additional_residuals` instead. ", + standard_warn=False, + ) + down_intrablock_additional_residuals = down_block_additional_residuals + is_adapter = True + + down_block_res_samples = (sample,) + + if is_brushnet: + sample = sample + down_block_add_samples.pop(0) + + for downsample_block in self.down_blocks: + if hasattr(downsample_block, "has_cross_attention") and downsample_block.has_cross_attention: + # For t2i-adapter CrossAttnDownBlock2D + additional_residuals = {} + if is_adapter and len(down_intrablock_additional_residuals) > 0: + additional_residuals["additional_residuals"] = down_intrablock_additional_residuals.pop(0) + + i = len(down_block_add_samples) + + if is_brushnet and len(down_block_add_samples) > 0: + additional_residuals["down_block_add_samples"] = [down_block_add_samples.pop(0) + for _ in range( + len(downsample_block.resnets) + (downsample_block.downsamplers != None))] + + sample, res_samples = downsample_block( + hidden_states=sample, + temb=emb, + encoder_hidden_states=encoder_hidden_states, + attention_mask=attention_mask, + cross_attention_kwargs=cross_attention_kwargs, + encoder_attention_mask=encoder_attention_mask, + **additional_residuals, + ) + else: + additional_residuals = {} + + i = len(down_block_add_samples) + + if is_brushnet and len(down_block_add_samples) > 0: + additional_residuals["down_block_add_samples"] = [down_block_add_samples.pop(0) + for _ in range( + len(downsample_block.resnets) + (downsample_block.downsamplers != None))] + + sample, res_samples = downsample_block(hidden_states=sample, temb=emb, **additional_residuals) + if is_adapter and len(down_intrablock_additional_residuals) > 0: + sample += down_intrablock_additional_residuals.pop(0) + + down_block_res_samples += res_samples + + if is_controlnet: + new_down_block_res_samples = () + + for down_block_res_sample, down_block_additional_residual in zip( + down_block_res_samples, down_block_additional_residuals + ): + down_block_res_sample = down_block_res_sample + down_block_additional_residual + new_down_block_res_samples = new_down_block_res_samples + (down_block_res_sample,) + + down_block_res_samples = new_down_block_res_samples + + # 4. mid + if self.mid_block is not None: + if hasattr(self.mid_block, "has_cross_attention") and self.mid_block.has_cross_attention: + sample = self.mid_block( + sample, + emb, + encoder_hidden_states=encoder_hidden_states, + attention_mask=attention_mask, + cross_attention_kwargs=cross_attention_kwargs, + encoder_attention_mask=encoder_attention_mask, + ) + else: + sample = self.mid_block(sample, emb) + + # To support T2I-Adapter-XL + if ( + is_adapter + and len(down_intrablock_additional_residuals) > 0 + and sample.shape == down_intrablock_additional_residuals[0].shape + ): + sample += down_intrablock_additional_residuals.pop(0) + + if is_controlnet: + sample = sample + mid_block_additional_residual + + if is_brushnet: + sample = sample + mid_block_add_sample + + # 5. up + for i, upsample_block in enumerate(self.up_blocks): + is_final_block = i == len(self.up_blocks) - 1 + + res_samples = down_block_res_samples[-len(upsample_block.resnets):] + down_block_res_samples = down_block_res_samples[: -len(upsample_block.resnets)] + + # if we have not reached the final block and need to forward the + # upsample size, we do it here + if not is_final_block and forward_upsample_size: + upsample_size = down_block_res_samples[-1].shape[2:] + + if hasattr(upsample_block, "has_cross_attention") and upsample_block.has_cross_attention: + additional_residuals = {} + + i = len(up_block_add_samples) + + if is_brushnet and len(up_block_add_samples) > 0: + additional_residuals["up_block_add_samples"] = [up_block_add_samples.pop(0) + for _ in range( + len(upsample_block.resnets) + (upsample_block.upsamplers != None))] + + sample = upsample_block( + hidden_states=sample, + temb=emb, + res_hidden_states_tuple=res_samples, + encoder_hidden_states=encoder_hidden_states, + cross_attention_kwargs=cross_attention_kwargs, + upsample_size=upsample_size, + attention_mask=attention_mask, + encoder_attention_mask=encoder_attention_mask, + **additional_residuals, + ) + else: + additional_residuals = {} + + i = len(up_block_add_samples) + + if is_brushnet and len(up_block_add_samples) > 0: + additional_residuals["up_block_add_samples"] = [up_block_add_samples.pop(0) + for _ in range( + len(upsample_block.resnets) + (upsample_block.upsamplers != None))] + + sample = upsample_block( + hidden_states=sample, + temb=emb, + res_hidden_states_tuple=res_samples, + upsample_size=upsample_size, + **additional_residuals, + ) + + # 6. post-process + if self.conv_norm_out: + sample = self.conv_norm_out(sample) + sample = self.conv_act(sample) + sample = self.conv_out(sample) + + if USE_PEFT_BACKEND: + # remove `lora_scale` from each PEFT layer + unscale_lora_layers(self, lora_scale) + + if not return_dict: + return (sample,) + + return UNet2DConditionOutput(sample=sample) diff --git a/ComfyUI-Easy-Use/py/config.py b/ComfyUI-Easy-Use/py/config.py new file mode 100644 index 0000000000000000000000000000000000000000..5e2f981174601ceef30dc12d298eab6143d4c1fc --- /dev/null +++ b/ComfyUI-Easy-Use/py/config.py @@ -0,0 +1,360 @@ +import os +import folder_paths +from pathlib import Path + +BASE_RESOLUTIONS = [ + ("width", "height"), + (512, 512), + (512, 768), + (576, 1024), + (768, 512), + (768, 768), + (768, 1024), + (768, 1280), + (768, 1344), + (768, 1536), + (816, 1920), + (832, 1152), + (832, 1216), + (896, 1152), + (896, 1088), + (1024, 1024), + (1024, 576), + (1024, 768), + (1080, 1920), + (1440, 2560), + (1088, 896), + (1216, 832), + (1152, 832), + (1152, 896), + (1280, 768), + (1344, 768), + (1536, 640), + (1536, 768), + (1920, 816), + (1920, 1080), + (2560, 1440), +] +MAX_SEED_NUM = 1125899906842624 + + +RESOURCES_DIR = os.path.join(Path(__file__).parent.parent, "resources") + +# inpaint +INPAINT_DIR = os.path.join(folder_paths.models_dir, "inpaint") +FOOOCUS_STYLES_DIR = os.path.join(Path(__file__).parent.parent, "styles") +FOOOCUS_STYLES_SAMPLES = 'https://raw.githubusercontent.com/lllyasviel/Fooocus/main/sdxl_styles/samples/' +FOOOCUS_INPAINT_HEAD = { + "fooocus_inpaint_head": { + "model_url": "https://huggingface.co/lllyasviel/fooocus_inpaint/resolve/main/fooocus_inpaint_head.pth" + } +} +FOOOCUS_INPAINT_PATCH = { + "inpaint_v26 (1.32GB)": { + "model_url": "https://huggingface.co/lllyasviel/fooocus_inpaint/resolve/main/inpaint_v26.fooocus.patch" + }, + "inpaint_v25 (2.58GB)": { + "model_url": "https://huggingface.co/lllyasviel/fooocus_inpaint/resolve/main/inpaint_v25.fooocus.patch" + }, + "inpaint (1.32GB)": { + "model_url": "https://huggingface.co/lllyasviel/fooocus_inpaint/resolve/main/inpaint.fooocus.patch" + }, +} +BRUSHNET_MODELS = { + "random_mask": { + "sd1": { + "model_url": "https://huggingface.co/Kijai/BrushNet-fp16/resolve/main/brushnet_random_mask_fp16.safetensors" + }, + "sdxl": { + "model_url": "https://huggingface.co/yolain/brushnet/resolve/main/brushnet_random_mask_sdxl.safetensors" + } + }, + "segmentation_mask": { + "sd1": { + "model_url": "https://huggingface.co/Kijai/BrushNet-fp16/resolve/main/brushnet_segmentation_mask_fp16.safetensors" + }, + "sdxl": { + "model_url": "https://huggingface.co/yolain/brushnet/resolve/main/brushnet_segmentation_mask_sdxl.safetensors" + } + } +} +POWERPAINT_MODELS = { + "base_fp16": { + "model_url": "https://huggingface.co/runwayml/stable-diffusion-v1-5/resolve/main/text_encoder/model.fp16.safetensors" + }, + "v2.1": { + "model_url": "https://huggingface.co/JunhaoZhuang/PowerPaint-v2-1/resolve/main/PowerPaint_Brushnet/diffusion_pytorch_model.safetensors", + "clip_url": "https://huggingface.co/JunhaoZhuang/PowerPaint-v2-1/resolve/main/PowerPaint_Brushnet/pytorch_model.bin", + } +} + +# layerDiffuse +LAYER_DIFFUSION_DIR = os.path.join(folder_paths.models_dir, "layer_model") +LAYER_DIFFUSION_VAE = { + "encode": { + "sdxl": { + "model_url": "https://huggingface.co/LayerDiffusion/layerdiffusion-v1/resolve/main/vae_transparent_encoder.safetensors" + } + }, + "decode": { + "sd1": { + "model_url": "https://huggingface.co/LayerDiffusion/layerdiffusion-v1/resolve/main/layer_sd15_vae_transparent_decoder.safetensors" + }, + "sdxl": { + "model_url": "https://huggingface.co/LayerDiffusion/layerdiffusion-v1/resolve/main/vae_transparent_decoder.safetensors" + } + } +} +LAYER_DIFFUSION = { + "Attention Injection": { + "sd1": { + "model_url": "https://huggingface.co/LayerDiffusion/layerdiffusion-v1/resolve/main/layer_sd15_transparent_attn.safetensors" + }, + "sdxl": { + "model_url": "https://huggingface.co/LayerDiffusion/layerdiffusion-v1/resolve/main/layer_xl_transparent_attn.safetensors" + }, + }, + "Conv Injection": { + "sdxl": { + "model_url": "https://huggingface.co/LayerDiffusion/layerdiffusion-v1/resolve/main/layer_xl_transparent_conv.safetensors" + }, + "sd1": { + "model_url": None + } + }, + "Everything": { + "sd1": { + "model_url": "https://huggingface.co/LayerDiffusion/layerdiffusion-v1/resolve/main/layer_sd15_joint.safetensors" + }, + "sdxl": { + "model_url": None + } + }, + "Foreground": { + "sd1": { + "model_url": "https://huggingface.co/LayerDiffusion/layerdiffusion-v1/resolve/main/layer_sd15_fg2bg.safetensors" + }, + "sdxl": { + "model_url": "https://huggingface.co/LayerDiffusion/layerdiffusion-v1/resolve/main/layer_xl_fg2ble.safetensors" + } + }, + "Foreground to Background": { + "sd1": { + "model_url": "https://huggingface.co/LayerDiffusion/layerdiffusion-v1/resolve/main/layer_sd15_fg2bg.safetensors" + }, + "sdxl": { + "model_url": "https://huggingface.co/LayerDiffusion/layerdiffusion-v1/resolve/main/layer_xl_fgble2bg.safetensors" + } + }, + "Background": { + "sd1": { + "model_url": "https://huggingface.co/LayerDiffusion/layerdiffusion-v1/resolve/main/layer_sd15_bg2fg.safetensors" + }, + "sdxl": { + "model_url": "https://huggingface.co/LayerDiffusion/layerdiffusion-v1/resolve/main/layer_xl_bg2ble.safetensors" + } + }, + "Background to Foreground": { + "sd1": { + "model_url": "https://huggingface.co/LayerDiffusion/layerdiffusion-v1/resolve/main/layer_sd15_bg2fg.safetensors" + }, + "sdxl": { + "model_url": "https://huggingface.co/LayerDiffusion/layerdiffusion-v1/resolve/main/layer_xl_bgble2fg.safetensors" + } + }, +} + +# IC Light +IC_LIGHT_MODELS = { + "Foreground": { + "sd1": { + "model_url": "https://huggingface.co/huchenlei/IC-Light-ldm/resolve/main/iclight_sd15_fc_unet_ldm.safetensors" + }, + "sdxl": { + "model_url": None + } + }, + "Foreground&Background": { + "sd1": { + "model_url": "https://huggingface.co/huchenlei/IC-Light-ldm/resolve/main/iclight_sd15_fbc_unet_ldm.safetensors" + }, + "sdxl": { + "model_url": None + } + } +} + + +# REMBG +REMBG_DIR = os.path.join(folder_paths.models_dir, "rembg") +REMBG_MODELS = { + "RMBG-1.4": { + "model_url": "https://huggingface.co/briaai/RMBG-1.4/resolve/main/model.pth" + } +} + +#ipadapter +IPADAPTER_DIR = os.path.join(folder_paths.models_dir, "ipadapter") +IPADAPTER_MODELS = { + "LIGHT - SD1.5 only (low strength)": { + "sd15": { + "model_url": "https://huggingface.co/h94/IP-Adapter/resolve/main/models/ip-adapter_sd15_light_v11.bin" + }, + "sdxl": { + "model_url": "" + } + }, + "STANDARD (medium strength)": { + "sd15": { + "model_url": "https://huggingface.co/h94/IP-Adapter/resolve/main/models/ip-adapter_sd15.safetensors" + }, + "sdxl": { + "model_url": "https://huggingface.co/h94/IP-Adapter/resolve/main/sdxl_models/ip-adapter_sdxl_vit-h.safetensors" + } + }, + "VIT-G (medium strength)": { + "sd15": { + "model_url": "https://huggingface.co/h94/IP-Adapter/resolve/main/models/ip-adapter_sd15_vit-G.safetensors" + }, + "sdxl": { + "model_url": "https://huggingface.co/h94/IP-Adapter/resolve/main/sdxl_models/ip-adapter_sdxl.safetensors" + } + }, + "PLUS (high strength)": { + "sd15": { + "model_url": "https://huggingface.co/h94/IP-Adapter/resolve/main/models/ip-adapter-plus_sd15.safetensors" + }, + "sdxl": { + "model_url": "https://huggingface.co/h94/IP-Adapter/resolve/main/sdxl_models/ip-adapter-plus_sdxl_vit-h.safetensors" + } + }, + "PLUS (kolors genernal)":{ + "sd15":{ + "model_url":"" + }, + "sdxl":{ + "model_url":"https://huggingface.co/Kwai-Kolors/Kolors-IP-Adapter-Plus/resolve/main/ip_adapter_plus_general.bin" + } + }, + "PLUS FACE (portraits)": { + "sd15": { + "model_url": "https://huggingface.co/h94/IP-Adapter/resolve/main/models/ip-adapter-plus-face_sd15.safetensors" + }, + "sdxl": { + "model_url": "https://huggingface.co/h94/IP-Adapter/resolve/main/sdxl_models/ip-adapter-plus-face_sdxl_vit-h.safetensors" + } + }, + "FULL FACE - SD1.5 only (portraits stronger)": { + "sd15": { + "model_url": "https://huggingface.co/h94/IP-Adapter/resolve/main/models/ip-adapter-full-face_sd15.safetensors" + }, + "sdxl": { + "model_url": "" + } + }, + "FACEID": { + "sd15": { + "model_url": "https://huggingface.co/h94/IP-Adapter-FaceID/resolve/main/ip-adapter-faceid_sd15.bin", + "lora_url": "https://huggingface.co/h94/IP-Adapter-FaceID/resolve/main/ip-adapter-faceid_sd15_lora.safetensors" + }, + "sdxl": { + "model_url": "https://huggingface.co/h94/IP-Adapter-FaceID/resolve/main/ip-adapter-faceid_sdxl.bin", + "lora_url": "https://huggingface.co/h94/IP-Adapter-FaceID/resolve/main/ip-adapter-faceid_sdxl_lora.safetensors" + } + }, + "FACEID PLUS - SD1.5 only": { + "sd15": { + "model_url": "https://huggingface.co/h94/IP-Adapter-FaceID/resolve/main/ip-adapter-faceid-plus_sd15.bin", + "lora_url": "https://huggingface.co/h94/IP-Adapter-FaceID/resolve/main/ip-adapter-faceid-plus_sd15_lora.safetensors" + }, + "sdxl": { + "model_url": "", + "lora_url": "" + } + }, + "FACEID PLUS V2": { + "sd15": { + "model_url": "https://huggingface.co/h94/IP-Adapter-FaceID/resolve/main/ip-adapter-faceid-plusv2_sd15.bin", + "lora_url": "https://huggingface.co/h94/IP-Adapter-FaceID/resolve/main/ip-adapter-faceid-plusv2_sd15_lora.safetensors" + }, + "sdxl": { + "model_url": "https://huggingface.co/h94/IP-Adapter-FaceID/resolve/main/ip-adapter-faceid-plusv2_sdxl.bin", + "lora_url": "https://huggingface.co/h94/IP-Adapter-FaceID/resolve/main/ip-adapter-faceid-plusv2_sdxl_lora.safetensors" + } + }, + "FACEID PLUS KOLORS":{ + "sd15":{ + + }, + "sdxl":{ + "model_url":"https://huggingface.co/Kwai-Kolors/Kolors-IP-Adapter-FaceID-Plus/resolve/main/ipa-faceid-plus.bin" + } + }, + "FACEID PORTRAIT (style transfer)": { + "sd15": { + "model_url": "https://huggingface.co/h94/IP-Adapter-FaceID/resolve/main/ip-adapter-faceid-portrait-v11_sd15.bin", + }, + "sdxl": { + "model_url": "https://huggingface.co/h94/IP-Adapter-FaceID/resolve/main/ip-adapter-faceid-portrait_sdxl.bin", + } + }, + "FACEID PORTRAIT UNNORM - SDXL only (strong)": { + "sd15": { + "model_url":"" + }, + "sdxl": { + "model_url": "https://huggingface.co/h94/IP-Adapter-FaceID/resolve/main/ip-adapter-faceid-portrait_sdxl_unnorm.bin", + } + }, + "COMPOSITION": { + "sd15": { + "model_url": "https://huggingface.co/ostris/ip-composition-adapter/resolve/main/ip_plus_composition_sd15.safetensors" + }, + "sdxl": { + "model_url": "https://huggingface.co/ostris/ip-composition-adapter/resolve/main/ip_plus_composition_sdxl.safetensors" + } + } +} +IPADAPTER_CLIPVISION_MODELS = { + "clip-vit-large-patch14-336":{ + "model_url": "https://huggingface.co/openai/clip-vit-large-patch14-336/resolve/main/pytorch_model.bin" + }, + "clip-vit-h-14-laion2B-s32B-b79K":{ + "model_url": "https://huggingface.co/laion/CLIP-ViT-H-14-laion2B-s32B-b79K/resolve/main/open_clip_pytorch_model.safetensors" + } +} + +# dynamiCrafter +DYNAMICRAFTER_DIR = os.path.join(folder_paths.models_dir, "dynamicrafter_models") +DYNAMICRAFTER_MODELS = { + "dynamicrafter_unet_512 (2.98GB)": { + "model_url": "https://huggingface.co/ExponentialML/DynamiCrafterUNet/resolve/main/dynamicrafter_unet_512.safetensors", + "vae_url": "https://huggingface.co/stabilityai/sd-vae-ft-mse-original/resolve/main/vae-ft-mse-840000-ema-pruned.safetensors", + "clip_url": "https://huggingface.co/stabilityai/stable-diffusion-2-1/resolve/main/text_encoder/model.safetensors", + "clip_vision_url": "https://huggingface.co/laion/CLIP-ViT-H-14-laion2B-s32B-b79K/resolve/main/open_clip_pytorch_model.safetensors", + }, + "dynamicrafter_unet_512_interp (2.98GB)": { + "model_url": "https://huggingface.co/ExponentialML/DynamiCrafterUNet/resolve/main/dynamicrafter_unet_512_interp.safetensors" + }, + "dynamicrafter_unet_1024 (2.98GB)": { + "model_url": "https://huggingface.co/ExponentialML/DynamiCrafterUNet/resolve/main/dynamicrafter_unet_1024.safetensors" + }, + "dynamicrafter_unet_256 (2.98GB)": { + "model_url": "https://huggingface.co/ExponentialML/DynamiCrafterUNet/resolve/main/dynamicrafter_unet_256.safetensors" + }, +} + +#humanParsing +HUMANPARSING_MODELS = { + "parsing_lip": { + "model_url": "https://huggingface.co/levihsu/OOTDiffusion/resolve/main/checkpoints/humanparsing/parsing_lip.onnx", + }, +} + +#mediapipe +MEDIAPIPE_DIR = os.path.join(folder_paths.models_dir, "mediapipe") +MEDIAPIPE_MODELS = { + "selfie_multiclass_256x256": { + "model_url": "https://huggingface.co/yolain/selfie_multiclass_256x256/resolve/main/selfie_multiclass_256x256.tflite" + } +} \ No newline at end of file diff --git a/ComfyUI-Easy-Use/py/dit/__init__.py b/ComfyUI-Easy-Use/py/dit/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..329a7347f7cf662fbab366e11bd8f42a2cb14990 --- /dev/null +++ b/ComfyUI-Easy-Use/py/dit/__init__.py @@ -0,0 +1,2 @@ +#credit to city96 for this module +#from https://github.com/city96/ComfyUI_ExtraModels/ \ No newline at end of file diff --git a/ComfyUI-Easy-Use/py/dit/config.py b/ComfyUI-Easy-Use/py/dit/config.py new file mode 100644 index 0000000000000000000000000000000000000000..07d94105cccbeeb7ccef21864ea5523509e86b20 --- /dev/null +++ b/ComfyUI-Easy-Use/py/dit/config.py @@ -0,0 +1,120 @@ +""" +List of all DiT model types / settings +""" +sampling_settings = { + "beta_schedule" : "sqrt_linear", + "linear_start" : 0.0001, + "linear_end" : 0.02, + "timesteps" : 1000, +} + +dit_conf = { + "XL/2": { # DiT_XL_2 + "unet_config": { + "depth" : 28, + "num_heads" : 16, + "patch_size" : 2, + "hidden_size" : 1152, + }, + "sampling_settings" : sampling_settings, + }, + "XL/4": { # DiT_XL_4 + "unet_config": { + "depth" : 28, + "num_heads" : 16, + "patch_size" : 4, + "hidden_size" : 1152, + }, + "sampling_settings" : sampling_settings, + }, + "XL/8": { # DiT_XL_8 + "unet_config": { + "depth" : 28, + "num_heads" : 16, + "patch_size" : 8, + "hidden_size" : 1152, + }, + "sampling_settings" : sampling_settings, + }, + "L/2": { # DiT_L_2 + "unet_config": { + "depth" : 24, + "num_heads" : 16, + "patch_size" : 2, + "hidden_size" : 1024, + }, + "sampling_settings" : sampling_settings, + }, + "L/4": { # DiT_L_4 + "unet_config": { + "depth" : 24, + "num_heads" : 16, + "patch_size" : 4, + "hidden_size" : 1024, + }, + "sampling_settings" : sampling_settings, + }, + "L/8": { # DiT_L_8 + "unet_config": { + "depth" : 24, + "num_heads" : 16, + "patch_size" : 8, + "hidden_size" : 1024, + }, + "sampling_settings" : sampling_settings, + }, + "B/2": { # DiT_B_2 + "unet_config": { + "depth" : 12, + "num_heads" : 12, + "patch_size" : 2, + "hidden_size" : 768, + }, + "sampling_settings" : sampling_settings, + }, + "B/4": { # DiT_B_4 + "unet_config": { + "depth" : 12, + "num_heads" : 12, + "patch_size" : 4, + "hidden_size" : 768, + }, + "sampling_settings" : sampling_settings, + }, + "B/8": { # DiT_B_8 + "unet_config": { + "depth" : 12, + "num_heads" : 12, + "patch_size" : 8, + "hidden_size" : 768, + }, + "sampling_settings" : sampling_settings, + }, + "S/2": { # DiT_S_2 + "unet_config": { + "depth" : 12, + "num_heads" : 6, + "patch_size" : 2, + "hidden_size" : 384, + }, + "sampling_settings" : sampling_settings, + }, + "S/4": { # DiT_S_4 + "unet_config": { + "depth" : 12, + "num_heads" : 6, + "patch_size" : 4, + "hidden_size" : 384, + }, + "sampling_settings" : sampling_settings, + }, + "S/8": { # DiT_S_8 + "unet_config": { + "depth" : 12, + "num_heads" : 6, + "patch_size" : 8, + "hidden_size" : 384, + }, + "sampling_settings" : sampling_settings, + }, +} \ No newline at end of file diff --git a/ComfyUI-Easy-Use/py/dit/pixArt/LICENSE-Pixart b/ComfyUI-Easy-Use/py/dit/pixArt/LICENSE-Pixart new file mode 100644 index 0000000000000000000000000000000000000000..29ebfa545f5580919a4e884d7014d7a3eb2df762 --- /dev/null +++ b/ComfyUI-Easy-Use/py/dit/pixArt/LICENSE-Pixart @@ -0,0 +1,661 @@ + GNU AFFERO GENERAL PUBLIC LICENSE + Version 3, 19 November 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU Affero General Public License is a free, copyleft license for +software and other kinds of works, specifically designed to ensure +cooperation with the community in the case of network server software. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +our General Public Licenses are intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + Developers that use our General Public Licenses protect your rights +with two steps: (1) assert copyright on the software, and (2) offer +you this License which gives you legal permission to copy, distribute +and/or modify the software. + + A secondary benefit of defending all users' freedom is that +improvements made in alternate versions of the program, if they +receive widespread use, become available for other developers to +incorporate. Many developers of free software are heartened and +encouraged by the resulting cooperation. However, in the case of +software used on network servers, this result may fail to come about. +The GNU General Public License permits making a modified version and +letting the public access it on a server without ever releasing its +source code to the public. + + The GNU Affero General Public License is designed specifically to +ensure that, in such cases, the modified source code becomes available +to the community. It requires the operator of a network server to +provide the source code of the modified version running there to the +users of that server. Therefore, public use of a modified version, on +a publicly accessible server, gives the public access to the source +code of the modified version. + + An older license, called the Affero General Public License and +published by Affero, was designed to accomplish similar goals. This is +a different license, not a version of the Affero GPL, but Affero has +released a new version of the Affero GPL which permits relicensing under +this license. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU Affero General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Remote Network Interaction; Use with the GNU General Public License. + + Notwithstanding any other provision of this License, if you modify the +Program, your modified version must prominently offer all users +interacting with it remotely through a computer network (if your version +supports such interaction) an opportunity to receive the Corresponding +Source of your version by providing access to the Corresponding Source +from a network server at no charge, through some standard or customary +means of facilitating copying of software. This Corresponding Source +shall include the Corresponding Source for any work covered by version 3 +of the GNU General Public License that is incorporated pursuant to the +following paragraph. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the work with which it is combined will remain governed by version +3 of the GNU General Public License. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU Affero General Public License from time to time. Such new versions +will be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU Affero General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU Affero General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU Affero General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU Affero General Public License as published + by the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU Affero General Public License for more details. + + You should have received a copy of the GNU Affero General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If your software can interact with users remotely through a computer +network, you should also make sure that it provides a way for users to +get its source. For example, if your program is a web application, its +interface could display a "Source" link that leads users to an archive +of the code. There are many ways you could offer source, and different +solutions will be better for different programs; see section 13 for the +specific requirements. + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU AGPL, see +. \ No newline at end of file diff --git a/ComfyUI-Easy-Use/py/dit/pixArt/config.py b/ComfyUI-Easy-Use/py/dit/pixArt/config.py new file mode 100644 index 0000000000000000000000000000000000000000..47f521a7162941286852ecd7f2537116fc07210a --- /dev/null +++ b/ComfyUI-Easy-Use/py/dit/pixArt/config.py @@ -0,0 +1,139 @@ +""" +List of all PixArt model types / settings +""" +sampling_settings = { + "beta_schedule" : "sqrt_linear", + "linear_start" : 0.0001, + "linear_end" : 0.02, + "timesteps" : 1000, +} + +pixart_conf = { + "PixArtMS_XL_2": { # models/PixArtMS + "target": "PixArtMS", + "unet_config": { + "input_size" : 1024//8, + "depth" : 28, + "num_heads" : 16, + "patch_size" : 2, + "hidden_size" : 1152, + "pe_interpolation": 2, + }, + "sampling_settings" : sampling_settings, + }, + "PixArtMS_Sigma_XL_2": { + "target": "PixArtMSSigma", + "unet_config": { + "input_size" : 1024//8, + "token_num" : 300, + "depth" : 28, + "num_heads" : 16, + "patch_size" : 2, + "hidden_size" : 1152, + "micro_condition": False, + "pe_interpolation": 2, + "model_max_length": 300, + }, + "sampling_settings" : sampling_settings, + }, + "PixArtMS_Sigma_XL_2_900M": { + "target": "PixArtMSSigma", + "unet_config": { + "input_size": 1024 // 8, + "token_num": 300, + "depth": 42, + "num_heads": 16, + "patch_size": 2, + "hidden_size": 1152, + "micro_condition": False, + "pe_interpolation": 2, + "model_max_length": 300, + }, + "sampling_settings": sampling_settings, + }, + "PixArtMS_Sigma_XL_2_2K": { + "target": "PixArtMSSigma", + "unet_config": { + "input_size" : 2048//8, + "token_num" : 300, + "depth" : 28, + "num_heads" : 16, + "patch_size" : 2, + "hidden_size" : 1152, + "micro_condition": False, + "pe_interpolation": 4, + "model_max_length": 300, + }, + "sampling_settings" : sampling_settings, + }, + "PixArt_XL_2": { # models/PixArt + "target": "PixArt", + "unet_config": { + "input_size" : 512//8, + "token_num" : 120, + "depth" : 28, + "num_heads" : 16, + "patch_size" : 2, + "hidden_size" : 1152, + "pe_interpolation": 1, + }, + "sampling_settings" : sampling_settings, + }, +} + +pixart_conf.update({ # controlnet models + "ControlPixArtHalf": { + "target": "ControlPixArtHalf", + "unet_config": pixart_conf["PixArt_XL_2"]["unet_config"], + "sampling_settings": pixart_conf["PixArt_XL_2"]["sampling_settings"], + }, + "ControlPixArtMSHalf": { + "target": "ControlPixArtMSHalf", + "unet_config": pixart_conf["PixArtMS_XL_2"]["unet_config"], + "sampling_settings": pixart_conf["PixArtMS_XL_2"]["sampling_settings"], + } +}) + +pixart_res = { + "PixArtMS_XL_2": { # models/PixArtMS 1024x1024 + '0.25': [512, 2048], '0.26': [512, 1984], '0.27': [512, 1920], '0.28': [512, 1856], + '0.32': [576, 1792], '0.33': [576, 1728], '0.35': [576, 1664], '0.40': [640, 1600], + '0.42': [640, 1536], '0.48': [704, 1472], '0.50': [704, 1408], '0.52': [704, 1344], + '0.57': [768, 1344], '0.60': [768, 1280], '0.68': [832, 1216], '0.72': [832, 1152], + '0.78': [896, 1152], '0.82': [896, 1088], '0.88': [960, 1088], '0.94': [960, 1024], + '1.00': [1024,1024], '1.07': [1024, 960], '1.13': [1088, 960], '1.21': [1088, 896], + '1.29': [1152, 896], '1.38': [1152, 832], '1.46': [1216, 832], '1.67': [1280, 768], + '1.75': [1344, 768], '2.00': [1408, 704], '2.09': [1472, 704], '2.40': [1536, 640], + '2.50': [1600, 640], '2.89': [1664, 576], '3.00': [1728, 576], '3.11': [1792, 576], + '3.62': [1856, 512], '3.75': [1920, 512], '3.88': [1984, 512], '4.00': [2048, 512], + }, + "PixArt_XL_2": { # models/PixArt 512x512 + '0.25': [256,1024], '0.26': [256, 992], '0.27': [256, 960], '0.28': [256, 928], + '0.32': [288, 896], '0.33': [288, 864], '0.35': [288, 832], '0.40': [320, 800], + '0.42': [320, 768], '0.48': [352, 736], '0.50': [352, 704], '0.52': [352, 672], + '0.57': [384, 672], '0.60': [384, 640], '0.68': [416, 608], '0.72': [416, 576], + '0.78': [448, 576], '0.82': [448, 544], '0.88': [480, 544], '0.94': [480, 512], + '1.00': [512, 512], '1.07': [512, 480], '1.13': [544, 480], '1.21': [544, 448], + '1.29': [576, 448], '1.38': [576, 416], '1.46': [608, 416], '1.67': [640, 384], + '1.75': [672, 384], '2.00': [704, 352], '2.09': [736, 352], '2.40': [768, 320], + '2.50': [800, 320], '2.89': [832, 288], '3.00': [864, 288], '3.11': [896, 288], + '3.62': [928, 256], '3.75': [960, 256], '3.88': [992, 256], '4.00': [1024,256] + }, + "PixArtMS_Sigma_XL_2_2K": { + '0.25': [1024, 4096], '0.26': [1024, 3968], '0.27': [1024, 3840], '0.28': [1024, 3712], + '0.32': [1152, 3584], '0.33': [1152, 3456], '0.35': [1152, 3328], '0.40': [1280, 3200], + '0.42': [1280, 3072], '0.48': [1408, 2944], '0.50': [1408, 2816], '0.52': [1408, 2688], + '0.57': [1536, 2688], '0.60': [1536, 2560], '0.68': [1664, 2432], '0.72': [1664, 2304], + '0.78': [1792, 2304], '0.82': [1792, 2176], '0.88': [1920, 2176], '0.94': [1920, 2048], + '1.00': [2048, 2048], '1.07': [2048, 1920], '1.13': [2176, 1920], '1.21': [2176, 1792], + '1.29': [2304, 1792], '1.38': [2304, 1664], '1.46': [2432, 1664], '1.67': [2560, 1536], + '1.75': [2688, 1536], '2.00': [2816, 1408], '2.09': [2944, 1408], '2.40': [3072, 1280], + '2.50': [3200, 1280], '2.89': [3328, 1152], '3.00': [3456, 1152], '3.11': [3584, 1152], + '3.62': [3712, 1024], '3.75': [3840, 1024], '3.88': [3968, 1024], '4.00': [4096, 1024] + } +} +# These should be the same +pixart_res.update({ + "PixArtMS_Sigma_XL_2": pixart_res["PixArtMS_XL_2"], + "PixArtMS_Sigma_XL_2_512": pixart_res["PixArt_XL_2"], +}) \ No newline at end of file diff --git a/ComfyUI-Easy-Use/py/dit/pixArt/diffusers_convert.py b/ComfyUI-Easy-Use/py/dit/pixArt/diffusers_convert.py new file mode 100644 index 0000000000000000000000000000000000000000..b5ca0ca07f0226b5c8d06500560c88588dff3fb7 --- /dev/null +++ b/ComfyUI-Easy-Use/py/dit/pixArt/diffusers_convert.py @@ -0,0 +1,216 @@ +# For using the diffusers format weights +# Based on the original ComfyUI function + +# https://github.com/PixArt-alpha/PixArt-alpha/blob/master/tools/convert_pixart_alpha_to_diffusers.py +import torch + +conversion_map_ms = [ # for multi_scale_train (MS) + # Resolution + ("csize_embedder.mlp.0.weight", "adaln_single.emb.resolution_embedder.linear_1.weight"), + ("csize_embedder.mlp.0.bias", "adaln_single.emb.resolution_embedder.linear_1.bias"), + ("csize_embedder.mlp.2.weight", "adaln_single.emb.resolution_embedder.linear_2.weight"), + ("csize_embedder.mlp.2.bias", "adaln_single.emb.resolution_embedder.linear_2.bias"), + # Aspect ratio + ("ar_embedder.mlp.0.weight", "adaln_single.emb.aspect_ratio_embedder.linear_1.weight"), + ("ar_embedder.mlp.0.bias", "adaln_single.emb.aspect_ratio_embedder.linear_1.bias"), + ("ar_embedder.mlp.2.weight", "adaln_single.emb.aspect_ratio_embedder.linear_2.weight"), + ("ar_embedder.mlp.2.bias", "adaln_single.emb.aspect_ratio_embedder.linear_2.bias"), +] + + +def get_depth(state_dict): + return sum(key.endswith('.attn1.to_k.bias') for key in state_dict.keys()) + + +def get_lora_depth(state_dict): + return sum(key.endswith('.attn1.to_k.lora_A.weight') for key in state_dict.keys()) + + +def get_conversion_map(state_dict): + conversion_map = [ # main SD conversion map (PixArt reference, HF Diffusers) + # Patch embeddings + ("x_embedder.proj.weight", "pos_embed.proj.weight"), + ("x_embedder.proj.bias", "pos_embed.proj.bias"), + # Caption projection + ("y_embedder.y_embedding", "caption_projection.y_embedding"), + ("y_embedder.y_proj.fc1.weight", "caption_projection.linear_1.weight"), + ("y_embedder.y_proj.fc1.bias", "caption_projection.linear_1.bias"), + ("y_embedder.y_proj.fc2.weight", "caption_projection.linear_2.weight"), + ("y_embedder.y_proj.fc2.bias", "caption_projection.linear_2.bias"), + # AdaLN-single LN + ("t_embedder.mlp.0.weight", "adaln_single.emb.timestep_embedder.linear_1.weight"), + ("t_embedder.mlp.0.bias", "adaln_single.emb.timestep_embedder.linear_1.bias"), + ("t_embedder.mlp.2.weight", "adaln_single.emb.timestep_embedder.linear_2.weight"), + ("t_embedder.mlp.2.bias", "adaln_single.emb.timestep_embedder.linear_2.bias"), + # Shared norm + ("t_block.1.weight", "adaln_single.linear.weight"), + ("t_block.1.bias", "adaln_single.linear.bias"), + # Final block + ("final_layer.linear.weight", "proj_out.weight"), + ("final_layer.linear.bias", "proj_out.bias"), + ("final_layer.scale_shift_table", "scale_shift_table"), + ] + + # Add actual transformer blocks + for depth in range(get_depth(state_dict)): + # Transformer blocks + conversion_map += [ + (f"blocks.{depth}.scale_shift_table", f"transformer_blocks.{depth}.scale_shift_table"), + # Projection + (f"blocks.{depth}.attn.proj.weight", f"transformer_blocks.{depth}.attn1.to_out.0.weight"), + (f"blocks.{depth}.attn.proj.bias", f"transformer_blocks.{depth}.attn1.to_out.0.bias"), + # Feed-forward + (f"blocks.{depth}.mlp.fc1.weight", f"transformer_blocks.{depth}.ff.net.0.proj.weight"), + (f"blocks.{depth}.mlp.fc1.bias", f"transformer_blocks.{depth}.ff.net.0.proj.bias"), + (f"blocks.{depth}.mlp.fc2.weight", f"transformer_blocks.{depth}.ff.net.2.weight"), + (f"blocks.{depth}.mlp.fc2.bias", f"transformer_blocks.{depth}.ff.net.2.bias"), + # Cross-attention (proj) + (f"blocks.{depth}.cross_attn.proj.weight", f"transformer_blocks.{depth}.attn2.to_out.0.weight"), + (f"blocks.{depth}.cross_attn.proj.bias", f"transformer_blocks.{depth}.attn2.to_out.0.bias"), + ] + return conversion_map + + +def find_prefix(state_dict, target_key): + prefix = "" + for k in state_dict.keys(): + if k.endswith(target_key): + prefix = k.split(target_key)[0] + break + return prefix + + +def convert_state_dict(state_dict): + if "adaln_single.emb.resolution_embedder.linear_1.weight" in state_dict.keys(): + cmap = get_conversion_map(state_dict) + conversion_map_ms + else: + cmap = get_conversion_map(state_dict) + + missing = [k for k, v in cmap if v not in state_dict] + new_state_dict = {k: state_dict[v] for k, v in cmap if k not in missing} + matched = list(v for k, v in cmap if v in state_dict.keys()) + + for depth in range(get_depth(state_dict)): + for wb in ["weight", "bias"]: + # Self Attention + key = lambda a: f"transformer_blocks.{depth}.attn1.to_{a}.{wb}" + new_state_dict[f"blocks.{depth}.attn.qkv.{wb}"] = torch.cat(( + state_dict[key('q')], state_dict[key('k')], state_dict[key('v')] + ), dim=0) + matched += [key('q'), key('k'), key('v')] + + # Cross-attention (linear) + key = lambda a: f"transformer_blocks.{depth}.attn2.to_{a}.{wb}" + new_state_dict[f"blocks.{depth}.cross_attn.q_linear.{wb}"] = state_dict[key('q')] + new_state_dict[f"blocks.{depth}.cross_attn.kv_linear.{wb}"] = torch.cat(( + state_dict[key('k')], state_dict[key('v')] + ), dim=0) + matched += [key('q'), key('k'), key('v')] + + if len(matched) < len(state_dict): + print(f"PixArt: UNET conversion has leftover keys! ({len(matched)} vs {len(state_dict)})") + print(list(set(state_dict.keys()) - set(matched))) + + if len(missing) > 0: + print(f"PixArt: UNET conversion has missing keys!") + print(missing) + + return new_state_dict + + +# Same as above but for LoRA weights: +def convert_lora_state_dict(state_dict, peft=True): + # koyha + rep_ak = lambda x: x.replace(".weight", ".lora_down.weight") + rep_bk = lambda x: x.replace(".weight", ".lora_up.weight") + rep_pk = lambda x: x.replace(".weight", ".alpha") + if peft: # peft + rep_ap = lambda x: x.replace(".weight", ".lora_A.weight") + rep_bp = lambda x: x.replace(".weight", ".lora_B.weight") + rep_pp = lambda x: x.replace(".weight", ".alpha") + + prefix = find_prefix(state_dict, "adaln_single.linear.lora_A.weight") + state_dict = {k[len(prefix):]: v for k, v in state_dict.items()} + else: # OneTrainer + rep_ap = lambda x: x.replace(".", "_")[:-7] + ".lora_down.weight" + rep_bp = lambda x: x.replace(".", "_")[:-7] + ".lora_up.weight" + rep_pp = lambda x: x.replace(".", "_")[:-7] + ".alpha" + + prefix = "lora_transformer_" + t5_marker = "lora_te_encoder" + t5_keys = [] + for key in list(state_dict.keys()): + if key.startswith(prefix): + state_dict[key[len(prefix):]] = state_dict.pop(key) + elif t5_marker in key: + t5_keys.append(state_dict.pop(key)) + if len(t5_keys) > 0: + print(f"Text Encoder not supported for PixArt LoRA, ignoring {len(t5_keys)} keys") + + cmap = [] + cmap_unet = get_conversion_map(state_dict) + conversion_map_ms # todo: 512 model + for k, v in cmap_unet: + if v.endswith(".weight"): + cmap.append((rep_ak(k), rep_ap(v))) + cmap.append((rep_bk(k), rep_bp(v))) + if not peft: + cmap.append((rep_pk(k), rep_pp(v))) + + missing = [k for k, v in cmap if v not in state_dict] + new_state_dict = {k: state_dict[v] for k, v in cmap if k not in missing} + matched = list(v for k, v in cmap if v in state_dict.keys()) + + lora_depth = get_lora_depth(state_dict) + for fp, fk in ((rep_ap, rep_ak), (rep_bp, rep_bk)): + for depth in range(lora_depth): + # Self Attention + key = lambda a: fp(f"transformer_blocks.{depth}.attn1.to_{a}.weight") + new_state_dict[fk(f"blocks.{depth}.attn.qkv.weight")] = torch.cat(( + state_dict[key('q')], state_dict[key('k')], state_dict[key('v')] + ), dim=0) + + matched += [key('q'), key('k'), key('v')] + if not peft: + akey = lambda a: rep_pp(f"transformer_blocks.{depth}.attn1.to_{a}.weight") + new_state_dict[rep_pk((f"blocks.{depth}.attn.qkv.weight"))] = state_dict[akey("q")] + matched += [akey('q'), akey('k'), akey('v')] + + # Self Attention projection? + key = lambda a: fp(f"transformer_blocks.{depth}.attn1.to_{a}.weight") + new_state_dict[fk(f"blocks.{depth}.attn.proj.weight")] = state_dict[key('out.0')] + matched += [key('out.0')] + + # Cross-attention (linear) + key = lambda a: fp(f"transformer_blocks.{depth}.attn2.to_{a}.weight") + new_state_dict[fk(f"blocks.{depth}.cross_attn.q_linear.weight")] = state_dict[key('q')] + new_state_dict[fk(f"blocks.{depth}.cross_attn.kv_linear.weight")] = torch.cat(( + state_dict[key('k')], state_dict[key('v')] + ), dim=0) + matched += [key('q'), key('k'), key('v')] + if not peft: + akey = lambda a: rep_pp(f"transformer_blocks.{depth}.attn2.to_{a}.weight") + new_state_dict[rep_pk((f"blocks.{depth}.cross_attn.q_linear.weight"))] = state_dict[akey("q")] + new_state_dict[rep_pk((f"blocks.{depth}.cross_attn.kv_linear.weight"))] = state_dict[akey("k")] + matched += [akey('q'), akey('k'), akey('v')] + + # Cross Attention projection? + key = lambda a: fp(f"transformer_blocks.{depth}.attn2.to_{a}.weight") + new_state_dict[fk(f"blocks.{depth}.cross_attn.proj.weight")] = state_dict[key('out.0')] + matched += [key('out.0')] + + key = fp(f"transformer_blocks.{depth}.ff.net.0.proj.weight") + new_state_dict[fk(f"blocks.{depth}.mlp.fc1.weight")] = state_dict[key] + matched += [key] + + key = fp(f"transformer_blocks.{depth}.ff.net.2.weight") + new_state_dict[fk(f"blocks.{depth}.mlp.fc2.weight")] = state_dict[key] + matched += [key] + + if len(matched) < len(state_dict): + print(f"PixArt: LoRA conversion has leftover keys! ({len(matched)} vs {len(state_dict)})") + print(list(set(state_dict.keys()) - set(matched))) + + if len(missing) > 0: + print(f"PixArt: LoRA conversion has missing keys! (probably)") + print(missing) + + return new_state_dict \ No newline at end of file diff --git a/ComfyUI-Easy-Use/py/dit/pixArt/loader.py b/ComfyUI-Easy-Use/py/dit/pixArt/loader.py new file mode 100644 index 0000000000000000000000000000000000000000..c12ce80aae20c7c35935c8e009b9e79220184562 --- /dev/null +++ b/ComfyUI-Easy-Use/py/dit/pixArt/loader.py @@ -0,0 +1,328 @@ +import torch +import math +import comfy.supported_models_base +import comfy.latent_formats +import comfy.model_patcher +import comfy.model_base +import comfy.utils +import comfy.conds +from comfy import model_management +from .diffusers_convert import convert_state_dict + +# checkpointbf +class EXM_PixArt(comfy.supported_models_base.BASE): + unet_config = {} + unet_extra_config = {} + latent_format = comfy.latent_formats.SD15 + + def __init__(self, model_conf): + self.model_target = model_conf.get("target") + self.unet_config = model_conf.get("unet_config", {}) + self.sampling_settings = model_conf.get("sampling_settings", {}) + self.latent_format = self.latent_format() + # UNET is handled by extension + self.unet_config["disable_unet_model_creation"] = True + + def model_type(self, state_dict, prefix=""): + return comfy.model_base.ModelType.EPS + + +class EXM_PixArt_Model(comfy.model_base.BaseModel): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + def extra_conds(self, **kwargs): + out = super().extra_conds(**kwargs) + + img_hw = kwargs.get("img_hw", None) + if img_hw is not None: + out["img_hw"] = comfy.conds.CONDRegular(torch.tensor(img_hw)) + + aspect_ratio = kwargs.get("aspect_ratio", None) + if aspect_ratio is not None: + out["aspect_ratio"] = comfy.conds.CONDRegular(torch.tensor(aspect_ratio)) + + cn_hint = kwargs.get("cn_hint", None) + if cn_hint is not None: + out["cn_hint"] = comfy.conds.CONDRegular(cn_hint) + + return out + + +def load_pixart(model_path, model_conf=None): + state_dict = comfy.utils.load_torch_file(model_path) + state_dict = state_dict.get("model", state_dict) + + # prefix + for prefix in ["model.diffusion_model.", ]: + if any(True for x in state_dict if x.startswith(prefix)): + state_dict = {k[len(prefix):]: v for k, v in state_dict.items()} + + # diffusers + if "adaln_single.linear.weight" in state_dict: + state_dict = convert_state_dict(state_dict) # Diffusers + + # guess auto config + if model_conf is None: + model_conf = guess_pixart_config(state_dict) + + parameters = comfy.utils.calculate_parameters(state_dict) + unet_dtype = model_management.unet_dtype(model_params=parameters) + load_device = comfy.model_management.get_torch_device() + offload_device = comfy.model_management.unet_offload_device() + + # ignore fp8/etc and use directly for now + manual_cast_dtype = model_management.unet_manual_cast(unet_dtype, load_device) + if manual_cast_dtype: + print(f"PixArt: falling back to {manual_cast_dtype}") + unet_dtype = manual_cast_dtype + + model_conf = EXM_PixArt(model_conf) # convert to object + model = EXM_PixArt_Model( # same as comfy.model_base.BaseModel + model_conf, + model_type=comfy.model_base.ModelType.EPS, + device=model_management.get_torch_device() + ) + + if model_conf.model_target == "PixArtMS": + from .models.PixArtMS import PixArtMS + model.diffusion_model = PixArtMS(**model_conf.unet_config) + elif model_conf.model_target == "PixArt": + from .models.PixArt import PixArt + model.diffusion_model = PixArt(**model_conf.unet_config) + elif model_conf.model_target == "PixArtMSSigma": + from .models.PixArtMS import PixArtMS + model.diffusion_model = PixArtMS(**model_conf.unet_config) + model.latent_format = comfy.latent_formats.SDXL() + elif model_conf.model_target == "ControlPixArtMSHalf": + from .models.PixArtMS import PixArtMS + from .models.pixart_controlnet import ControlPixArtMSHalf + model.diffusion_model = PixArtMS(**model_conf.unet_config) + model.diffusion_model = ControlPixArtMSHalf(model.diffusion_model) + elif model_conf.model_target == "ControlPixArtHalf": + from .models.PixArt import PixArt + from .models.pixart_controlnet import ControlPixArtHalf + model.diffusion_model = PixArt(**model_conf.unet_config) + model.diffusion_model = ControlPixArtHalf(model.diffusion_model) + else: + raise NotImplementedError(f"Unknown model target '{model_conf.model_target}'") + + m, u = model.diffusion_model.load_state_dict(state_dict, strict=False) + if len(m) > 0: print("Missing UNET keys", m) + if len(u) > 0: print("Leftover UNET keys", u) + model.diffusion_model.dtype = unet_dtype + model.diffusion_model.eval() + model.diffusion_model.to(unet_dtype) + + model_patcher = comfy.model_patcher.ModelPatcher( + model, + load_device=load_device, + offload_device=offload_device, + ) + return model_patcher + + +def guess_pixart_config(sd): + """ + Guess config based on converted state dict. + """ + # Shared settings based on DiT_XL_2 - could be enumerated + config = { + "num_heads": 16, # get from attention + "patch_size": 2, # final layer I guess? + "hidden_size": 1152, # pos_embed.shape[2] + } + config["depth"] = sum([key.endswith(".attn.proj.weight") for key in sd.keys()]) or 28 + + try: + # this is not present in the diffusers version for sigma? + config["model_max_length"] = sd["y_embedder.y_embedding"].shape[0] + except KeyError: + # need better logic to guess this + config["model_max_length"] = 300 + + if "pos_embed" in sd: + config["input_size"] = int(math.sqrt(sd["pos_embed"].shape[1])) * config["patch_size"] + config["pe_interpolation"] = config["input_size"] // (512 // 8) # dumb guess + + target_arch = "PixArtMS" + if config["model_max_length"] == 300: + # Sigma + target_arch = "PixArtMSSigma" + config["micro_condition"] = False + if "input_size" not in config: + # The diffusers weights for 1K/2K are exactly the same...? + # replace patch embed logic with HyDiT? + print(f"PixArt: diffusers weights - 2K model will be broken, use manual loading!") + config["input_size"] = 1024 // 8 + else: + # Alpha + if "csize_embedder.mlp.0.weight" in sd: + # MS (microconds) + target_arch = "PixArtMS" + config["micro_condition"] = True + if "input_size" not in config: + config["input_size"] = 1024 // 8 + config["pe_interpolation"] = 2 + else: + # PixArt + target_arch = "PixArt" + if "input_size" not in config: + config["input_size"] = 512 // 8 + config["pe_interpolation"] = 1 + + print("PixArt guessed config:", target_arch, config) + return { + "target": target_arch, + "unet_config": config, + "sampling_settings": { + "beta_schedule": "sqrt_linear", + "linear_start": 0.0001, + "linear_end": 0.02, + "timesteps": 1000, + } + } + +# lora +class EXM_PixArt_ModelPatcher(comfy.model_patcher.ModelPatcher): + def calculate_weight(self, patches, weight, key): + """ + This is almost the same as the comfy function, but stripped down to just the LoRA patch code. + The problem with the original code is the q/k/v keys being combined into one for the attention. + In the diffusers code, they're treated as separate keys, but in the reference code they're recombined (q+kv|qkv). + This means, for example, that the [1152,1152] weights become [3456,1152] in the state dict. + The issue with this is that the LoRA weights are [128,1152],[1152,128] and become [384,1162],[3456,128] instead. + + This is the best thing I could think of that would fix that, but it's very fragile. + - Check key shape to determine if it needs the fallback logic + - Cut the input into parts based on the shape (undoing the torch.cat) + - Do the matrix multiplication logic + - Recombine them to match the expected shape + """ + for p in patches: + alpha = p[0] + v = p[1] + strength_model = p[2] + if strength_model != 1.0: + weight *= strength_model + + if isinstance(v, list): + v = (self.calculate_weight(v[1:], v[0].clone(), key),) + + if len(v) == 2: + patch_type = v[0] + v = v[1] + + if patch_type == "lora": + mat1 = comfy.model_management.cast_to_device(v[0], weight.device, torch.float32) + mat2 = comfy.model_management.cast_to_device(v[1], weight.device, torch.float32) + if v[2] is not None: + alpha *= v[2] / mat2.shape[0] + try: + mat1 = mat1.flatten(start_dim=1) + mat2 = mat2.flatten(start_dim=1) + + ch1 = mat1.shape[0] // mat2.shape[1] + ch2 = mat2.shape[0] // mat1.shape[1] + ### Fallback logic for shape mismatch ### + if mat1.shape[0] != mat2.shape[1] and ch1 == ch2 and (mat1.shape[0] / mat2.shape[1]) % 1 == 0: + mat1 = mat1.chunk(ch1, dim=0) + mat2 = mat2.chunk(ch1, dim=0) + weight += torch.cat( + [alpha * torch.mm(mat1[x], mat2[x]) for x in range(ch1)], + dim=0, + ).reshape(weight.shape).type(weight.dtype) + else: + weight += (alpha * torch.mm(mat1, mat2)).reshape(weight.shape).type(weight.dtype) + except Exception as e: + print("ERROR", key, e) + return weight + + def clone(self): + n = EXM_PixArt_ModelPatcher(self.model, self.load_device, self.offload_device, self.size, self.current_device, + weight_inplace_update=self.weight_inplace_update) + n.patches = {} + for k in self.patches: + n.patches[k] = self.patches[k][:] + + n.object_patches = self.object_patches.copy() + n.model_options = copy.deepcopy(self.model_options) + n.model_keys = self.model_keys + return n + + +def replace_model_patcher(model): + n = EXM_PixArt_ModelPatcher( + model=model.model, + size=model.size, + load_device=model.load_device, + offload_device=model.offload_device, + current_device=model.current_device, + weight_inplace_update=model.weight_inplace_update, + ) + n.patches = {} + for k in model.patches: + n.patches[k] = model.patches[k][:] + + n.object_patches = model.object_patches.copy() + n.model_options = copy.deepcopy(model.model_options) + return n + + +def find_peft_alpha(path): + def load_json(json_path): + with open(json_path) as f: + data = json.load(f) + alpha = data.get("lora_alpha") + alpha = alpha or data.get("alpha") + if not alpha: + print(" Found config but `lora_alpha` is missing!") + else: + print(f" Found config at {json_path} [alpha:{alpha}]") + return alpha + + # For some weird reason peft doesn't include the alpha in the actual model + print("PixArt: Warning! This is a PEFT LoRA. Trying to find config...") + files = [ + f"{os.path.splitext(path)[0]}.json", + f"{os.path.splitext(path)[0]}.config.json", + os.path.join(os.path.dirname(path), "adapter_config.json"), + ] + for file in files: + if os.path.isfile(file): + return load_json(file) + + print(" Missing config/alpha! assuming alpha of 8. Consider converting it/adding a config json to it.") + return 8.0 + + +def load_pixart_lora(model, lora, lora_path, strength): + k_back = lambda x: x.replace(".lora_up.weight", "") + # need to convert the actual weights for this to work. + if any(True for x in lora.keys() if x.endswith("adaln_single.linear.lora_A.weight")): + lora = convert_lora_state_dict(lora, peft=True) + alpha = find_peft_alpha(lora_path) + lora.update({f"{k_back(x)}.alpha": torch.tensor(alpha) for x in lora.keys() if "lora_up" in x}) + else: # OneTrainer + lora = convert_lora_state_dict(lora, peft=False) + + key_map = {k_back(x): f"diffusion_model.{k_back(x)}.weight" for x in lora.keys() if "lora_up" in x} # fake + + loaded = comfy.lora.load_lora(lora, key_map) + if model is not None: + # switch to custom model patcher when using LoRAs + if isinstance(model, EXM_PixArt_ModelPatcher): + new_modelpatcher = model.clone() + else: + new_modelpatcher = replace_model_patcher(model) + k = new_modelpatcher.add_patches(loaded, strength) + else: + k = () + new_modelpatcher = None + + k = set(k) + for x in loaded: + if (x not in k): + print("NOT LOADED", x) + + return new_modelpatcher \ No newline at end of file diff --git a/ComfyUI-Easy-Use/py/dit/pixArt/models/PixArt.py b/ComfyUI-Easy-Use/py/dit/pixArt/models/PixArt.py new file mode 100644 index 0000000000000000000000000000000000000000..4d6cf93be017348dcafc42754552ca132112bd9d --- /dev/null +++ b/ComfyUI-Easy-Use/py/dit/pixArt/models/PixArt.py @@ -0,0 +1,250 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. + +# This source code is licensed under the license found in the +# LICENSE file in the root directory of this source tree. +# -------------------------------------------------------- +# References: +# GLIDE: https://github.com/openai/glide-text2im +# MAE: https://github.com/facebookresearch/mae/blob/main/models_mae.py +# -------------------------------------------------------- +import math +import torch +import torch.nn as nn +import os +import numpy as np +from timm.models.layers import DropPath +from timm.models.vision_transformer import PatchEmbed, Mlp + + +from .utils import auto_grad_checkpoint, to_2tuple +from .PixArt_blocks import t2i_modulate, CaptionEmbedder, AttentionKVCompress, MultiHeadCrossAttention, T2IFinalLayer, TimestepEmbedder, LabelEmbedder, FinalLayer + + +class PixArtBlock(nn.Module): + """ + A PixArt block with adaptive layer norm (adaLN-single) conditioning. + """ + def __init__(self, hidden_size, num_heads, mlp_ratio=4.0, drop_path=0, input_size=None, sampling=None, sr_ratio=1, qk_norm=False, **block_kwargs): + super().__init__() + self.norm1 = nn.LayerNorm(hidden_size, elementwise_affine=False, eps=1e-6) + self.attn = AttentionKVCompress( + hidden_size, num_heads=num_heads, qkv_bias=True, sampling=sampling, sr_ratio=sr_ratio, + qk_norm=qk_norm, **block_kwargs + ) + self.cross_attn = MultiHeadCrossAttention(hidden_size, num_heads, **block_kwargs) + self.norm2 = nn.LayerNorm(hidden_size, elementwise_affine=False, eps=1e-6) + # to be compatible with lower version pytorch + approx_gelu = lambda: nn.GELU(approximate="tanh") + self.mlp = Mlp(in_features=hidden_size, hidden_features=int(hidden_size * mlp_ratio), act_layer=approx_gelu, drop=0) + self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity() + self.scale_shift_table = nn.Parameter(torch.randn(6, hidden_size) / hidden_size ** 0.5) + self.sampling = sampling + self.sr_ratio = sr_ratio + + def forward(self, x, y, t, mask=None, **kwargs): + B, N, C = x.shape + + shift_msa, scale_msa, gate_msa, shift_mlp, scale_mlp, gate_mlp = (self.scale_shift_table[None] + t.reshape(B, 6, -1)).chunk(6, dim=1) + x = x + self.drop_path(gate_msa * self.attn(t2i_modulate(self.norm1(x), shift_msa, scale_msa)).reshape(B, N, C)) + x = x + self.cross_attn(x, y, mask) + x = x + self.drop_path(gate_mlp * self.mlp(t2i_modulate(self.norm2(x), shift_mlp, scale_mlp))) + + return x + + +### Core PixArt Model ### +class PixArt(nn.Module): + """ + Diffusion model with a Transformer backbone. + """ + def __init__( + self, + input_size=32, + patch_size=2, + in_channels=4, + hidden_size=1152, + depth=28, + num_heads=16, + mlp_ratio=4.0, + class_dropout_prob=0.1, + pred_sigma=True, + drop_path: float = 0., + caption_channels=4096, + pe_interpolation=1.0, + pe_precision=None, + config=None, + model_max_length=120, + qk_norm=False, + kv_compress_config=None, + **kwargs, + ): + super().__init__() + self.pred_sigma = pred_sigma + self.in_channels = in_channels + self.out_channels = in_channels * 2 if pred_sigma else in_channels + self.patch_size = patch_size + self.num_heads = num_heads + self.pe_interpolation = pe_interpolation + self.pe_precision = pe_precision + self.depth = depth + + self.x_embedder = PatchEmbed(input_size, patch_size, in_channels, hidden_size, bias=True) + self.t_embedder = TimestepEmbedder(hidden_size) + num_patches = self.x_embedder.num_patches + self.base_size = input_size // self.patch_size + # Will use fixed sin-cos embedding: + self.register_buffer("pos_embed", torch.zeros(1, num_patches, hidden_size)) + + approx_gelu = lambda: nn.GELU(approximate="tanh") + self.t_block = nn.Sequential( + nn.SiLU(), + nn.Linear(hidden_size, 6 * hidden_size, bias=True) + ) + self.y_embedder = CaptionEmbedder( + in_channels=caption_channels, hidden_size=hidden_size, uncond_prob=class_dropout_prob, + act_layer=approx_gelu, token_num=model_max_length + ) + drop_path = [x.item() for x in torch.linspace(0, drop_path, depth)] # stochastic depth decay rule + self.kv_compress_config = kv_compress_config + if kv_compress_config is None: + self.kv_compress_config = { + 'sampling': None, + 'scale_factor': 1, + 'kv_compress_layer': [], + } + self.blocks = nn.ModuleList([ + PixArtBlock( + hidden_size, num_heads, mlp_ratio=mlp_ratio, drop_path=drop_path[i], + input_size=(input_size // patch_size, input_size // patch_size), + sampling=self.kv_compress_config['sampling'], + sr_ratio=int( + self.kv_compress_config['scale_factor'] + ) if i in self.kv_compress_config['kv_compress_layer'] else 1, + qk_norm=qk_norm, + ) + for i in range(depth) + ]) + self.final_layer = T2IFinalLayer(hidden_size, patch_size, self.out_channels) + + def forward_raw(self, x, t, y, mask=None, data_info=None): + """ + Original forward pass of PixArt. + x: (N, C, H, W) tensor of spatial inputs (images or latent representations of images) + t: (N,) tensor of diffusion timesteps + y: (N, 1, 120, C) tensor of class labels + """ + x = x.to(self.dtype) + timestep = t.to(self.dtype) + y = y.to(self.dtype) + pos_embed = self.pos_embed.to(self.dtype) + self.h, self.w = x.shape[-2]//self.patch_size, x.shape[-1]//self.patch_size + x = self.x_embedder(x) + pos_embed # (N, T, D), where T = H * W / patch_size ** 2 + t = self.t_embedder(timestep.to(x.dtype)) # (N, D) + t0 = self.t_block(t) + y = self.y_embedder(y, self.training) # (N, 1, L, D) + if mask is not None: + if mask.shape[0] != y.shape[0]: + mask = mask.repeat(y.shape[0] // mask.shape[0], 1) + mask = mask.squeeze(1).squeeze(1) + y = y.squeeze(1).masked_select(mask.unsqueeze(-1) != 0).view(1, -1, x.shape[-1]) + y_lens = mask.sum(dim=1).tolist() + else: + y_lens = [y.shape[2]] * y.shape[0] + y = y.squeeze(1).view(1, -1, x.shape[-1]) + for block in self.blocks: + x = auto_grad_checkpoint(block, x, y, t0, y_lens) # (N, T, D) #support grad checkpoint + x = self.final_layer(x, t) # (N, T, patch_size ** 2 * out_channels) + x = self.unpatchify(x) # (N, out_channels, H, W) + return x + + def forward(self, x, timesteps, context, y=None, **kwargs): + """ + Forward pass that adapts comfy input to original forward function + x: (N, C, H, W) tensor of spatial inputs (images or latent representations of images) + timesteps: (N,) tensor of diffusion timesteps + context: (N, 1, 120, C) conditioning + y: extra conditioning. + """ + ## Still accepts the input w/o that dim but returns garbage + if len(context.shape) == 3: + context = context.unsqueeze(1) + + ## run original forward pass + out = self.forward_raw( + x = x.to(self.dtype), + t = timesteps.to(self.dtype), + y = context.to(self.dtype), + ) + + ## only return EPS + out = out.to(torch.float) + eps, rest = out[:, :self.in_channels], out[:, self.in_channels:] + return eps + + def unpatchify(self, x): + """ + x: (N, T, patch_size**2 * C) + imgs: (N, H, W, C) + """ + c = self.out_channels + p = self.x_embedder.patch_size[0] + h = w = int(x.shape[1] ** 0.5) + assert h * w == x.shape[1] + + x = x.reshape(shape=(x.shape[0], h, w, p, p, c)) + x = torch.einsum('nhwpqc->nchpwq', x) + imgs = x.reshape(shape=(x.shape[0], c, h * p, h * p)) + return imgs + + +def get_2d_sincos_pos_embed(embed_dim, grid_size, cls_token=False, extra_tokens=0, pe_interpolation=1.0, base_size=16): + """ + grid_size: int of the grid height and width + return: + pos_embed: [grid_size*grid_size, embed_dim] or [1+grid_size*grid_size, embed_dim] (w/ or w/o cls_token) + """ + if isinstance(grid_size, int): + grid_size = to_2tuple(grid_size) + grid_h = np.arange(grid_size[0], dtype=np.float32) / (grid_size[0]/base_size) / pe_interpolation + grid_w = np.arange(grid_size[1], dtype=np.float32) / (grid_size[1]/base_size) / pe_interpolation + grid = np.meshgrid(grid_w, grid_h) # here w goes first + grid = np.stack(grid, axis=0) + grid = grid.reshape([2, 1, grid_size[1], grid_size[0]]) + + pos_embed = get_2d_sincos_pos_embed_from_grid(embed_dim, grid) + if cls_token and extra_tokens > 0: + pos_embed = np.concatenate([np.zeros([extra_tokens, embed_dim]), pos_embed], axis=0) + return pos_embed.astype(np.float32) + + +def get_2d_sincos_pos_embed_from_grid(embed_dim, grid): + assert embed_dim % 2 == 0 + + # use half of dimensions to encode grid_h + emb_h = get_1d_sincos_pos_embed_from_grid(embed_dim // 2, grid[0]) # (H*W, D/2) + emb_w = get_1d_sincos_pos_embed_from_grid(embed_dim // 2, grid[1]) # (H*W, D/2) + + emb = np.concatenate([emb_h, emb_w], axis=1) # (H*W, D) + return emb + + +def get_1d_sincos_pos_embed_from_grid(embed_dim, pos): + """ + embed_dim: output dimension for each position + pos: a list of positions to be encoded: size (M,) + out: (M, D) + """ + assert embed_dim % 2 == 0 + omega = np.arange(embed_dim // 2, dtype=np.float64) + omega /= embed_dim / 2. + omega = 1. / 10000 ** omega # (D/2,) + + pos = pos.reshape(-1) # (M,) + out = np.einsum('m,d->md', pos, omega) # (M, D/2), outer product + + emb_sin = np.sin(out) # (M, D/2) + emb_cos = np.cos(out) # (M, D/2) + + emb = np.concatenate([emb_sin, emb_cos], axis=1) # (M, D) + return emb diff --git a/ComfyUI-Easy-Use/py/dit/pixArt/models/PixArtMS.py b/ComfyUI-Easy-Use/py/dit/pixArt/models/PixArtMS.py new file mode 100644 index 0000000000000000000000000000000000000000..34ada90ed128a22ad1050a7fe14a286e7bae4051 --- /dev/null +++ b/ComfyUI-Easy-Use/py/dit/pixArt/models/PixArtMS.py @@ -0,0 +1,273 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. + +# This source code is licensed under the license found in the +# LICENSE file in the root directory of this source tree. +# -------------------------------------------------------- +# References: +# GLIDE: https://github.com/openai/glide-text2im +# MAE: https://github.com/facebookresearch/mae/blob/main/models_mae.py +# -------------------------------------------------------- +import torch +import torch.nn as nn +from tqdm import tqdm +from timm.models.layers import DropPath +from timm.models.vision_transformer import Mlp + +from .utils import auto_grad_checkpoint, to_2tuple +from .PixArt_blocks import t2i_modulate, CaptionEmbedder, AttentionKVCompress, MultiHeadCrossAttention, T2IFinalLayer, TimestepEmbedder, SizeEmbedder +from .PixArt import PixArt, get_2d_sincos_pos_embed + + +class PatchEmbed(nn.Module): + """ + 2D Image to Patch Embedding + """ + def __init__( + self, + patch_size=16, + in_chans=3, + embed_dim=768, + norm_layer=None, + flatten=True, + bias=True, + ): + super().__init__() + patch_size = to_2tuple(patch_size) + self.patch_size = patch_size + self.flatten = flatten + self.proj = nn.Conv2d(in_chans, embed_dim, kernel_size=patch_size, stride=patch_size, bias=bias) + self.norm = norm_layer(embed_dim) if norm_layer else nn.Identity() + + def forward(self, x): + x = self.proj(x) + if self.flatten: + x = x.flatten(2).transpose(1, 2) # BCHW -> BNC + x = self.norm(x) + return x + + +class PixArtMSBlock(nn.Module): + """ + A PixArt block with adaptive layer norm zero (adaLN-Zero) conditioning. + """ + def __init__(self, hidden_size, num_heads, mlp_ratio=4.0, drop_path=0., input_size=None, + sampling=None, sr_ratio=1, qk_norm=False, **block_kwargs): + super().__init__() + self.hidden_size = hidden_size + self.norm1 = nn.LayerNorm(hidden_size, elementwise_affine=False, eps=1e-6) + self.attn = AttentionKVCompress( + hidden_size, num_heads=num_heads, qkv_bias=True, sampling=sampling, sr_ratio=sr_ratio, + qk_norm=qk_norm, **block_kwargs + ) + self.cross_attn = MultiHeadCrossAttention(hidden_size, num_heads, **block_kwargs) + self.norm2 = nn.LayerNorm(hidden_size, elementwise_affine=False, eps=1e-6) + # to be compatible with lower version pytorch + approx_gelu = lambda: nn.GELU(approximate="tanh") + self.mlp = Mlp(in_features=hidden_size, hidden_features=int(hidden_size * mlp_ratio), act_layer=approx_gelu, drop=0) + self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity() + self.scale_shift_table = nn.Parameter(torch.randn(6, hidden_size) / hidden_size ** 0.5) + + def forward(self, x, y, t, mask=None, HW=None, **kwargs): + B, N, C = x.shape + + shift_msa, scale_msa, gate_msa, shift_mlp, scale_mlp, gate_mlp = (self.scale_shift_table[None] + t.reshape(B, 6, -1)).chunk(6, dim=1) + x = x + self.drop_path(gate_msa * self.attn(t2i_modulate(self.norm1(x), shift_msa, scale_msa), HW=HW)) + x = x + self.cross_attn(x, y, mask) + x = x + self.drop_path(gate_mlp * self.mlp(t2i_modulate(self.norm2(x), shift_mlp, scale_mlp))) + + return x + + +### Core PixArt Model ### +class PixArtMS(PixArt): + """ + Diffusion model with a Transformer backbone. + """ + def __init__( + self, + input_size=32, + patch_size=2, + in_channels=4, + hidden_size=1152, + depth=28, + num_heads=16, + mlp_ratio=4.0, + class_dropout_prob=0.1, + learn_sigma=True, + pred_sigma=True, + drop_path: float = 0., + caption_channels=4096, + pe_interpolation=None, + pe_precision=None, + config=None, + model_max_length=120, + micro_condition=True, + qk_norm=False, + kv_compress_config=None, + **kwargs, + ): + super().__init__( + input_size=input_size, + patch_size=patch_size, + in_channels=in_channels, + hidden_size=hidden_size, + depth=depth, + num_heads=num_heads, + mlp_ratio=mlp_ratio, + class_dropout_prob=class_dropout_prob, + learn_sigma=learn_sigma, + pred_sigma=pred_sigma, + drop_path=drop_path, + pe_interpolation=pe_interpolation, + config=config, + model_max_length=model_max_length, + qk_norm=qk_norm, + kv_compress_config=kv_compress_config, + **kwargs, + ) + self.dtype = torch.get_default_dtype() + self.h = self.w = 0 + approx_gelu = lambda: nn.GELU(approximate="tanh") + self.t_block = nn.Sequential( + nn.SiLU(), + nn.Linear(hidden_size, 6 * hidden_size, bias=True) + ) + self.x_embedder = PatchEmbed(patch_size, in_channels, hidden_size, bias=True) + self.y_embedder = CaptionEmbedder(in_channels=caption_channels, hidden_size=hidden_size, uncond_prob=class_dropout_prob, act_layer=approx_gelu, token_num=model_max_length) + self.micro_conditioning = micro_condition + if self.micro_conditioning: + self.csize_embedder = SizeEmbedder(hidden_size//3) # c_size embed + self.ar_embedder = SizeEmbedder(hidden_size//3) # aspect ratio embed + drop_path = [x.item() for x in torch.linspace(0, drop_path, depth)] # stochastic depth decay rule + if kv_compress_config is None: + kv_compress_config = { + 'sampling': None, + 'scale_factor': 1, + 'kv_compress_layer': [], + } + self.blocks = nn.ModuleList([ + PixArtMSBlock( + hidden_size, num_heads, mlp_ratio=mlp_ratio, drop_path=drop_path[i], + input_size=(input_size // patch_size, input_size // patch_size), + sampling=kv_compress_config['sampling'], + sr_ratio=int(kv_compress_config['scale_factor']) if i in kv_compress_config['kv_compress_layer'] else 1, + qk_norm=qk_norm, + ) + for i in range(depth) + ]) + self.final_layer = T2IFinalLayer(hidden_size, patch_size, self.out_channels) + + def forward_raw(self, x, t, y, mask=None, data_info=None, **kwargs): + """ + Original forward pass of PixArt. + x: (N, C, H, W) tensor of spatial inputs (images or latent representations of images) + t: (N,) tensor of diffusion timesteps + y: (N, 1, 120, C) tensor of class labels + """ + bs = x.shape[0] + x = x.to(self.dtype) + timestep = t.to(self.dtype) + y = y.to(self.dtype) + + pe_interpolation = self.pe_interpolation + if pe_interpolation is None or self.pe_precision is not None: + # calculate pe_interpolation on-the-fly + pe_interpolation = round((x.shape[-1]+x.shape[-2])/2.0 / (512/8.0), self.pe_precision or 0) + + self.h, self.w = x.shape[-2]//self.patch_size, x.shape[-1]//self.patch_size + pos_embed = torch.from_numpy( + get_2d_sincos_pos_embed( + self.pos_embed.shape[-1], (self.h, self.w), pe_interpolation=pe_interpolation, + base_size=self.base_size + ) + ).unsqueeze(0).to(device=x.device, dtype=self.dtype) + + x = self.x_embedder(x) + pos_embed # (N, T, D), where T = H * W / patch_size ** 2 + t = self.t_embedder(timestep) # (N, D) + + if self.micro_conditioning: + c_size, ar = data_info['img_hw'].to(self.dtype), data_info['aspect_ratio'].to(self.dtype) + csize = self.csize_embedder(c_size, bs) # (N, D) + ar = self.ar_embedder(ar, bs) # (N, D) + t = t + torch.cat([csize, ar], dim=1) + + t0 = self.t_block(t) + y = self.y_embedder(y, self.training) # (N, D) + + if mask is not None: + if mask.shape[0] != y.shape[0]: + mask = mask.repeat(y.shape[0] // mask.shape[0], 1) + mask = mask.squeeze(1).squeeze(1) + y = y.squeeze(1).masked_select(mask.unsqueeze(-1) != 0).view(1, -1, x.shape[-1]) + y_lens = mask.sum(dim=1).tolist() + else: + y_lens = [y.shape[2]] * y.shape[0] + y = y.squeeze(1).view(1, -1, x.shape[-1]) + for block in self.blocks: + x = auto_grad_checkpoint(block, x, y, t0, y_lens, (self.h, self.w), **kwargs) # (N, T, D) #support grad checkpoint + + x = self.final_layer(x, t) # (N, T, patch_size ** 2 * out_channels) + x = self.unpatchify(x) # (N, out_channels, H, W) + + return x + + def forward(self, x, timesteps, context, img_hw=None, aspect_ratio=None, **kwargs): + """ + Forward pass that adapts comfy input to original forward function + x: (N, C, H, W) tensor of spatial inputs (images or latent representations of images) + timesteps: (N,) tensor of diffusion timesteps + context: (N, 1, 120, C) conditioning + img_hw: height|width conditioning + aspect_ratio: aspect ratio conditioning + """ + ## size/ar from cond with fallback based on the latent image shape. + bs = x.shape[0] + data_info = {} + if img_hw is None: + data_info["img_hw"] = torch.tensor( + [[x.shape[2]*8, x.shape[3]*8]], + dtype=self.dtype, + device=x.device + ).repeat(bs, 1) + else: + data_info["img_hw"] = img_hw.to(dtype=x.dtype, device=x.device) + if aspect_ratio is None or True: + data_info["aspect_ratio"] = torch.tensor( + [[x.shape[2]/x.shape[3]]], + dtype=self.dtype, + device=x.device + ).repeat(bs, 1) + else: + data_info["aspect_ratio"] = aspect_ratio.to(dtype=x.dtype, device=x.device) + + ## Still accepts the input w/o that dim but returns garbage + if len(context.shape) == 3: + context = context.unsqueeze(1) + + ## run original forward pass + out = self.forward_raw( + x = x.to(self.dtype), + t = timesteps.to(self.dtype), + y = context.to(self.dtype), + data_info=data_info, + ) + + ## only return EPS + out = out.to(torch.float) + eps, rest = out[:, :self.in_channels], out[:, self.in_channels:] + return eps + + def unpatchify(self, x): + """ + x: (N, T, patch_size**2 * C) + imgs: (N, H, W, C) + """ + c = self.out_channels + p = self.x_embedder.patch_size[0] + assert self.h * self.w == x.shape[1] + + x = x.reshape(shape=(x.shape[0], self.h, self.w, p, p, c)) + x = torch.einsum('nhwpqc->nchpwq', x) + imgs = x.reshape(shape=(x.shape[0], c, self.h * p, self.w * p)) + return imgs diff --git a/ComfyUI-Easy-Use/py/dit/pixArt/models/PixArt_blocks.py b/ComfyUI-Easy-Use/py/dit/pixArt/models/PixArt_blocks.py new file mode 100644 index 0000000000000000000000000000000000000000..52cf847c9f3672f6d10c6a3efe08ae67a881f898 --- /dev/null +++ b/ComfyUI-Easy-Use/py/dit/pixArt/models/PixArt_blocks.py @@ -0,0 +1,477 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. + +# This source code is licensed under the license found in the +# LICENSE file in the root directory of this source tree. +# -------------------------------------------------------- +# References: +# GLIDE: https://github.com/openai/glide-text2im +# MAE: https://github.com/facebookresearch/mae/blob/main/models_mae.py +# -------------------------------------------------------- +import math +import torch +import torch.nn as nn +import torch.nn.functional as F +from timm.models.vision_transformer import Mlp, Attention as Attention_ +from einops import rearrange + +from comfy import model_management +if model_management.xformers_enabled(): + import xformers + import xformers.ops +else: + print(""" +######################################## + PixArt: Not using xformers! + Expect images to be non-deterministic! + Batch sizes > 1 are most likely broken +######################################## +""") + +def modulate(x, shift, scale): + return x * (1 + scale.unsqueeze(1)) + shift.unsqueeze(1) + +def t2i_modulate(x, shift, scale): + return x * (1 + scale) + shift + +class MultiHeadCrossAttention(nn.Module): + def __init__(self, d_model, num_heads, attn_drop=0., proj_drop=0., **block_kwargs): + super(MultiHeadCrossAttention, self).__init__() + assert d_model % num_heads == 0, "d_model must be divisible by num_heads" + + self.d_model = d_model + self.num_heads = num_heads + self.head_dim = d_model // num_heads + + self.q_linear = nn.Linear(d_model, d_model) + self.kv_linear = nn.Linear(d_model, d_model*2) + self.attn_drop = nn.Dropout(attn_drop) + self.proj = nn.Linear(d_model, d_model) + self.proj_drop = nn.Dropout(proj_drop) + + def forward(self, x, cond, mask=None): + # query/value: img tokens; key: condition; mask: if padding tokens + B, N, C = x.shape + + q = self.q_linear(x).view(1, -1, self.num_heads, self.head_dim) + kv = self.kv_linear(cond).view(1, -1, 2, self.num_heads, self.head_dim) + k, v = kv.unbind(2) + + if model_management.xformers_enabled(): + attn_bias = None + if mask is not None: + attn_bias = xformers.ops.fmha.BlockDiagonalMask.from_seqlens([N] * B, mask) + x = xformers.ops.memory_efficient_attention( + q, k, v, + p=self.attn_drop.p, + attn_bias=attn_bias + ) + else: + q, k, v = map(lambda t: t.permute(0, 2, 1, 3),(q, k, v),) + attn_mask = None + if mask is not None and len(mask) > 1: + + # Create equivalent of xformer diagonal block mask, still only correct for square masks + # But depth doesn't matter as tensors can expand in that dimension + attn_mask_template = torch.ones( + [q.shape[2] // B, mask[0]], + dtype=torch.bool, + device=q.device + ) + attn_mask = torch.block_diag(attn_mask_template) + + # create a mask on the diagonal for each mask in the batch + for n in range(B - 1): + attn_mask = torch.block_diag(attn_mask, attn_mask_template) + + x = torch.nn.functional.scaled_dot_product_attention( + q, k, v, + attn_mask=attn_mask, + dropout_p=self.attn_drop.p + ).permute(0, 2, 1, 3).contiguous() + x = x.view(B, -1, C) + x = self.proj(x) + x = self.proj_drop(x) + return x + + +class AttentionKVCompress(Attention_): + """Multi-head Attention block with KV token compression and qk norm.""" + + def __init__( + self, + dim, + num_heads=8, + qkv_bias=True, + sampling='conv', + sr_ratio=1, + qk_norm=False, + **block_kwargs, + ): + """ + Args: + dim (int): Number of input channels. + num_heads (int): Number of attention heads. + qkv_bias (bool: If True, add a learnable bias to query, key, value. + """ + super().__init__(dim, num_heads=num_heads, qkv_bias=qkv_bias, **block_kwargs) + + self.sampling=sampling # ['conv', 'ave', 'uniform', 'uniform_every'] + self.sr_ratio = sr_ratio + if sr_ratio > 1 and sampling == 'conv': + # Avg Conv Init. + self.sr = nn.Conv2d(dim, dim, groups=dim, kernel_size=sr_ratio, stride=sr_ratio) + self.sr.weight.data.fill_(1/sr_ratio**2) + self.sr.bias.data.zero_() + self.norm = nn.LayerNorm(dim) + if qk_norm: + self.q_norm = nn.LayerNorm(dim) + self.k_norm = nn.LayerNorm(dim) + else: + self.q_norm = nn.Identity() + self.k_norm = nn.Identity() + + def downsample_2d(self, tensor, H, W, scale_factor, sampling=None): + if sampling is None or scale_factor == 1: + return tensor + B, N, C = tensor.shape + + if sampling == 'uniform_every': + return tensor[:, ::scale_factor], int(N // scale_factor) + + tensor = tensor.reshape(B, H, W, C).permute(0, 3, 1, 2) + new_H, new_W = int(H / scale_factor), int(W / scale_factor) + new_N = new_H * new_W + + if sampling == 'ave': + tensor = F.interpolate( + tensor, scale_factor=1 / scale_factor, mode='nearest' + ).permute(0, 2, 3, 1) + elif sampling == 'uniform': + tensor = tensor[:, :, ::scale_factor, ::scale_factor].permute(0, 2, 3, 1) + elif sampling == 'conv': + tensor = self.sr(tensor).reshape(B, C, -1).permute(0, 2, 1) + tensor = self.norm(tensor) + else: + raise ValueError + + return tensor.reshape(B, new_N, C).contiguous(), new_N + + def forward(self, x, mask=None, HW=None, block_id=None): + B, N, C = x.shape # 2 4096 1152 + new_N = N + if HW is None: + H = W = int(N ** 0.5) + else: + H, W = HW + qkv = self.qkv(x).reshape(B, N, 3, C) + + q, k, v = qkv.unbind(2) + dtype = q.dtype + q = self.q_norm(q) + k = self.k_norm(k) + + # KV compression + if self.sr_ratio > 1: + k, new_N = self.downsample_2d(k, H, W, self.sr_ratio, sampling=self.sampling) + v, new_N = self.downsample_2d(v, H, W, self.sr_ratio, sampling=self.sampling) + + q = q.reshape(B, N, self.num_heads, C // self.num_heads).to(dtype) + k = k.reshape(B, new_N, self.num_heads, C // self.num_heads).to(dtype) + v = v.reshape(B, new_N, self.num_heads, C // self.num_heads).to(dtype) + + attn_bias = None + if mask is not None: + attn_bias = torch.zeros([B * self.num_heads, q.shape[1], k.shape[1]], dtype=q.dtype, device=q.device) + attn_bias.masked_fill_(mask.squeeze(1).repeat(self.num_heads, 1, 1) == 0, float('-inf')) + # Switch between torch / xformers attention + if model_management.xformers_enabled(): + x = xformers.ops.memory_efficient_attention( + q, k, v, + p=self.attn_drop.p, + attn_bias=attn_bias + ) + else: + q, k, v = map(lambda t: t.transpose(1, 2),(q, k, v),) + x = torch.nn.functional.scaled_dot_product_attention( + q, k, v, + dropout_p=self.attn_drop.p, + attn_mask=attn_bias + ).transpose(1, 2).contiguous() + x = x.view(B, N, C) + x = self.proj(x) + x = self.proj_drop(x) + return x + + +################################################################################# +# AMP attention with fp32 softmax to fix loss NaN problem during training # +################################################################################# +class Attention(Attention_): + def forward(self, x): + B, N, C = x.shape + qkv = self.qkv(x).reshape(B, N, 3, self.num_heads, C // self.num_heads).permute(2, 0, 3, 1, 4) + q, k, v = qkv.unbind(0) # make torchscript happy (cannot use tensor as tuple) + use_fp32_attention = getattr(self, 'fp32_attention', False) + if use_fp32_attention: + q, k = q.float(), k.float() + with torch.cuda.amp.autocast(enabled=not use_fp32_attention): + attn = (q @ k.transpose(-2, -1)) * self.scale + attn = attn.softmax(dim=-1) + + attn = self.attn_drop(attn) + + x = (attn @ v).transpose(1, 2).reshape(B, N, C) + x = self.proj(x) + x = self.proj_drop(x) + return x + + +class FinalLayer(nn.Module): + """ + The final layer of PixArt. + """ + + def __init__(self, hidden_size, patch_size, out_channels): + super().__init__() + self.norm_final = nn.LayerNorm(hidden_size, elementwise_affine=False, eps=1e-6) + self.linear = nn.Linear(hidden_size, patch_size * patch_size * out_channels, bias=True) + self.adaLN_modulation = nn.Sequential( + nn.SiLU(), + nn.Linear(hidden_size, 2 * hidden_size, bias=True) + ) + + def forward(self, x, c): + shift, scale = self.adaLN_modulation(c).chunk(2, dim=1) + x = modulate(self.norm_final(x), shift, scale) + x = self.linear(x) + return x + + +class T2IFinalLayer(nn.Module): + """ + The final layer of PixArt. + """ + + def __init__(self, hidden_size, patch_size, out_channels): + super().__init__() + self.norm_final = nn.LayerNorm(hidden_size, elementwise_affine=False, eps=1e-6) + self.linear = nn.Linear(hidden_size, patch_size * patch_size * out_channels, bias=True) + self.scale_shift_table = nn.Parameter(torch.randn(2, hidden_size) / hidden_size ** 0.5) + self.out_channels = out_channels + + def forward(self, x, t): + shift, scale = (self.scale_shift_table[None] + t[:, None]).chunk(2, dim=1) + x = t2i_modulate(self.norm_final(x), shift, scale) + x = self.linear(x) + return x + + +class MaskFinalLayer(nn.Module): + """ + The final layer of PixArt. + """ + + def __init__(self, final_hidden_size, c_emb_size, patch_size, out_channels): + super().__init__() + self.norm_final = nn.LayerNorm(final_hidden_size, elementwise_affine=False, eps=1e-6) + self.linear = nn.Linear(final_hidden_size, patch_size * patch_size * out_channels, bias=True) + self.adaLN_modulation = nn.Sequential( + nn.SiLU(), + nn.Linear(c_emb_size, 2 * final_hidden_size, bias=True) + ) + def forward(self, x, t): + shift, scale = self.adaLN_modulation(t).chunk(2, dim=1) + x = modulate(self.norm_final(x), shift, scale) + x = self.linear(x) + return x + + +class DecoderLayer(nn.Module): + """ + The final layer of PixArt. + """ + + def __init__(self, hidden_size, decoder_hidden_size): + super().__init__() + self.norm_decoder = nn.LayerNorm(hidden_size, elementwise_affine=False, eps=1e-6) + self.linear = nn.Linear(hidden_size, decoder_hidden_size, bias=True) + self.adaLN_modulation = nn.Sequential( + nn.SiLU(), + nn.Linear(hidden_size, 2 * hidden_size, bias=True) + ) + def forward(self, x, t): + shift, scale = self.adaLN_modulation(t).chunk(2, dim=1) + x = modulate(self.norm_decoder(x), shift, scale) + x = self.linear(x) + return x + + +################################################################################# +# Embedding Layers for Timesteps and Class Labels # +################################################################################# +class TimestepEmbedder(nn.Module): + """ + Embeds scalar timesteps into vector representations. + """ + + def __init__(self, hidden_size, frequency_embedding_size=256): + super().__init__() + self.mlp = nn.Sequential( + nn.Linear(frequency_embedding_size, hidden_size, bias=True), + nn.SiLU(), + nn.Linear(hidden_size, hidden_size, bias=True), + ) + self.frequency_embedding_size = frequency_embedding_size + + @staticmethod + def timestep_embedding(t, dim, max_period=10000): + """ + Create sinusoidal timestep embeddings. + :param t: a 1-D Tensor of N indices, one per batch element. + These may be fractional. + :param dim: the dimension of the output. + :param max_period: controls the minimum frequency of the embeddings. + :return: an (N, D) Tensor of positional embeddings. + """ + # https://github.com/openai/glide-text2im/blob/main/glide_text2im/nn.py + half = dim // 2 + freqs = torch.exp( + -math.log(max_period) * torch.arange(start=0, end=half, dtype=torch.float32, device=t.device) / half) + args = t[:, None].float() * freqs[None] + embedding = torch.cat([torch.cos(args), torch.sin(args)], dim=-1) + if dim % 2: + embedding = torch.cat([embedding, torch.zeros_like(embedding[:, :1])], dim=-1) + return embedding + + def forward(self, t): + t_freq = self.timestep_embedding(t, self.frequency_embedding_size) + t_emb = self.mlp(t_freq.to(t.dtype)) + return t_emb + + +class SizeEmbedder(TimestepEmbedder): + """ + Embeds scalar timesteps into vector representations. + """ + + def __init__(self, hidden_size, frequency_embedding_size=256): + super().__init__(hidden_size=hidden_size, frequency_embedding_size=frequency_embedding_size) + self.mlp = nn.Sequential( + nn.Linear(frequency_embedding_size, hidden_size, bias=True), + nn.SiLU(), + nn.Linear(hidden_size, hidden_size, bias=True), + ) + self.frequency_embedding_size = frequency_embedding_size + self.outdim = hidden_size + + def forward(self, s, bs): + if s.ndim == 1: + s = s[:, None] + assert s.ndim == 2 + if s.shape[0] != bs: + s = s.repeat(bs//s.shape[0], 1) + assert s.shape[0] == bs + b, dims = s.shape[0], s.shape[1] + s = rearrange(s, "b d -> (b d)") + s_freq = self.timestep_embedding(s, self.frequency_embedding_size) + s_emb = self.mlp(s_freq.to(s.dtype)) + s_emb = rearrange(s_emb, "(b d) d2 -> b (d d2)", b=b, d=dims, d2=self.outdim) + return s_emb + + +class LabelEmbedder(nn.Module): + """ + Embeds class labels into vector representations. Also handles label dropout for classifier-free guidance. + """ + + def __init__(self, num_classes, hidden_size, dropout_prob): + super().__init__() + use_cfg_embedding = dropout_prob > 0 + self.embedding_table = nn.Embedding(num_classes + use_cfg_embedding, hidden_size) + self.num_classes = num_classes + self.dropout_prob = dropout_prob + + def token_drop(self, labels, force_drop_ids=None): + """ + Drops labels to enable classifier-free guidance. + """ + if force_drop_ids is None: + drop_ids = torch.rand(labels.shape[0]).cuda() < self.dropout_prob + else: + drop_ids = force_drop_ids == 1 + labels = torch.where(drop_ids, self.num_classes, labels) + return labels + + def forward(self, labels, train, force_drop_ids=None): + use_dropout = self.dropout_prob > 0 + if (train and use_dropout) or (force_drop_ids is not None): + labels = self.token_drop(labels, force_drop_ids) + embeddings = self.embedding_table(labels) + return embeddings + + +class CaptionEmbedder(nn.Module): + """ + Embeds class labels into vector representations. Also handles label dropout for classifier-free guidance. + """ + + def __init__(self, in_channels, hidden_size, uncond_prob, act_layer=nn.GELU(approximate='tanh'), token_num=120): + super().__init__() + self.y_proj = Mlp(in_features=in_channels, hidden_features=hidden_size, out_features=hidden_size, act_layer=act_layer, drop=0) + self.register_buffer("y_embedding", nn.Parameter(torch.randn(token_num, in_channels) / in_channels ** 0.5)) + self.uncond_prob = uncond_prob + + def token_drop(self, caption, force_drop_ids=None): + """ + Drops labels to enable classifier-free guidance. + """ + if force_drop_ids is None: + drop_ids = torch.rand(caption.shape[0]).cuda() < self.uncond_prob + else: + drop_ids = force_drop_ids == 1 + caption = torch.where(drop_ids[:, None, None, None], self.y_embedding, caption) + return caption + + def forward(self, caption, train, force_drop_ids=None): + if train: + assert caption.shape[2:] == self.y_embedding.shape + use_dropout = self.uncond_prob > 0 + if (train and use_dropout) or (force_drop_ids is not None): + caption = self.token_drop(caption, force_drop_ids) + caption = self.y_proj(caption) + return caption + + +class CaptionEmbedderDoubleBr(nn.Module): + """ + Embeds class labels into vector representations. Also handles label dropout for classifier-free guidance. + """ + + def __init__(self, in_channels, hidden_size, uncond_prob, act_layer=nn.GELU(approximate='tanh'), token_num=120): + super().__init__() + self.proj = Mlp(in_features=in_channels, hidden_features=hidden_size, out_features=hidden_size, act_layer=act_layer, drop=0) + self.embedding = nn.Parameter(torch.randn(1, in_channels) / 10 ** 0.5) + self.y_embedding = nn.Parameter(torch.randn(token_num, in_channels) / 10 ** 0.5) + self.uncond_prob = uncond_prob + + def token_drop(self, global_caption, caption, force_drop_ids=None): + """ + Drops labels to enable classifier-free guidance. + """ + if force_drop_ids is None: + drop_ids = torch.rand(global_caption.shape[0]).cuda() < self.uncond_prob + else: + drop_ids = force_drop_ids == 1 + global_caption = torch.where(drop_ids[:, None], self.embedding, global_caption) + caption = torch.where(drop_ids[:, None, None, None], self.y_embedding, caption) + return global_caption, caption + + def forward(self, caption, train, force_drop_ids=None): + assert caption.shape[2: ] == self.y_embedding.shape + global_caption = caption.mean(dim=2).squeeze() + use_dropout = self.uncond_prob > 0 + if (train and use_dropout) or (force_drop_ids is not None): + global_caption, caption = self.token_drop(global_caption, caption, force_drop_ids) + y_embed = self.proj(global_caption) + return y_embed, caption diff --git a/ComfyUI-Easy-Use/py/dit/pixArt/models/pixart_controlnet.py b/ComfyUI-Easy-Use/py/dit/pixArt/models/pixart_controlnet.py new file mode 100644 index 0000000000000000000000000000000000000000..37fa4c1dcc0e6de44719c7fe897e2ecb0653eedb --- /dev/null +++ b/ComfyUI-Easy-Use/py/dit/pixArt/models/pixart_controlnet.py @@ -0,0 +1,312 @@ +import re +import torch +import torch.nn as nn + +from copy import deepcopy +from torch import Tensor +from torch.nn import Module, Linear, init +from typing import Any, Mapping + +from .PixArt import PixArt, get_2d_sincos_pos_embed +from .PixArtMS import PixArtMSBlock, PixArtMS +from .utils import auto_grad_checkpoint + +# The implementation of ControlNet-Half architrecture +# https://github.com/lllyasviel/ControlNet/discussions/188 +class ControlT2IDitBlockHalf(Module): + def __init__(self, base_block: PixArtMSBlock, block_index: 0) -> None: + super().__init__() + self.copied_block = deepcopy(base_block) + self.block_index = block_index + + for p in self.copied_block.parameters(): + p.requires_grad_(True) + + self.copied_block.load_state_dict(base_block.state_dict()) + self.copied_block.train() + + self.hidden_size = hidden_size = base_block.hidden_size + if self.block_index == 0: + self.before_proj = Linear(hidden_size, hidden_size) + init.zeros_(self.before_proj.weight) + init.zeros_(self.before_proj.bias) + self.after_proj = Linear(hidden_size, hidden_size) + init.zeros_(self.after_proj.weight) + init.zeros_(self.after_proj.bias) + + def forward(self, x, y, t, mask=None, c=None): + + if self.block_index == 0: + # the first block + c = self.before_proj(c) + c = self.copied_block(x + c, y, t, mask) + c_skip = self.after_proj(c) + else: + # load from previous c and produce the c for skip connection + c = self.copied_block(c, y, t, mask) + c_skip = self.after_proj(c) + + return c, c_skip + + +# The implementation of ControlPixArtHalf net +class ControlPixArtHalf(Module): + # only support single res model + def __init__(self, base_model: PixArt, copy_blocks_num: int = 13) -> None: + super().__init__() + self.dtype = torch.get_default_dtype() + self.base_model = base_model.eval() + self.controlnet = [] + self.copy_blocks_num = copy_blocks_num + self.total_blocks_num = len(base_model.blocks) + for p in self.base_model.parameters(): + p.requires_grad_(False) + + # Copy first copy_blocks_num block + for i in range(copy_blocks_num): + self.controlnet.append(ControlT2IDitBlockHalf(base_model.blocks[i], i)) + self.controlnet = nn.ModuleList(self.controlnet) + + def __getattr__(self, name: str) -> Tensor or Module: + if name in ['forward', 'forward_with_dpmsolver', 'forward_with_cfg', 'forward_c', 'load_state_dict']: + return self.__dict__[name] + elif name in ['base_model', 'controlnet']: + return super().__getattr__(name) + else: + return getattr(self.base_model, name) + + def forward_c(self, c): + self.h, self.w = c.shape[-2]//self.patch_size, c.shape[-1]//self.patch_size + pos_embed = torch.from_numpy(get_2d_sincos_pos_embed(self.pos_embed.shape[-1], (self.h, self.w), lewei_scale=self.lewei_scale, base_size=self.base_size)).unsqueeze(0).to(c.device).to(self.dtype) + return self.x_embedder(c) + pos_embed if c is not None else c + + # def forward(self, x, t, c, **kwargs): + # return self.base_model(x, t, c=self.forward_c(c), **kwargs) + def forward_raw(self, x, timestep, y, mask=None, data_info=None, c=None, **kwargs): + # modify the original PixArtMS forward function + if c is not None: + c = c.to(self.dtype) + c = self.forward_c(c) + """ + Forward pass of PixArt. + x: (N, C, H, W) tensor of spatial inputs (images or latent representations of images) + t: (N,) tensor of diffusion timesteps + y: (N, 1, 120, C) tensor of class labels + """ + x = x.to(self.dtype) + timestep = timestep.to(self.dtype) + y = y.to(self.dtype) + pos_embed = self.pos_embed.to(self.dtype) + self.h, self.w = x.shape[-2]//self.patch_size, x.shape[-1]//self.patch_size + x = self.x_embedder(x) + pos_embed # (N, T, D), where T = H * W / patch_size ** 2 + t = self.t_embedder(timestep.to(x.dtype)) # (N, D) + t0 = self.t_block(t) + y = self.y_embedder(y, self.training) # (N, 1, L, D) + if mask is not None: + if mask.shape[0] != y.shape[0]: + mask = mask.repeat(y.shape[0] // mask.shape[0], 1) + mask = mask.squeeze(1).squeeze(1) + y = y.squeeze(1).masked_select(mask.unsqueeze(-1) != 0).view(1, -1, x.shape[-1]) + y_lens = mask.sum(dim=1).tolist() + else: + y_lens = [y.shape[2]] * y.shape[0] + y = y.squeeze(1).view(1, -1, x.shape[-1]) + + # define the first layer + x = auto_grad_checkpoint(self.base_model.blocks[0], x, y, t0, y_lens, **kwargs) # (N, T, D) #support grad checkpoint + + if c is not None: + # update c + for index in range(1, self.copy_blocks_num + 1): + c, c_skip = auto_grad_checkpoint(self.controlnet[index - 1], x, y, t0, y_lens, c, **kwargs) + x = auto_grad_checkpoint(self.base_model.blocks[index], x + c_skip, y, t0, y_lens, **kwargs) + + # update x + for index in range(self.copy_blocks_num + 1, self.total_blocks_num): + x = auto_grad_checkpoint(self.base_model.blocks[index], x, y, t0, y_lens, **kwargs) + else: + for index in range(1, self.total_blocks_num): + x = auto_grad_checkpoint(self.base_model.blocks[index], x, y, t0, y_lens, **kwargs) + + x = self.final_layer(x, t) # (N, T, patch_size ** 2 * out_channels) + x = self.unpatchify(x) # (N, out_channels, H, W) + return x + + def forward(self, x, timesteps, context, cn_hint=None, **kwargs): + """ + Forward pass that adapts comfy input to original forward function + x: (N, C, H, W) tensor of spatial inputs (images or latent representations of images) + timesteps: (N,) tensor of diffusion timesteps + context: (N, 1, 120, C) conditioning + cn_hint: controlnet hint + """ + ## Still accepts the input w/o that dim but returns garbage + if len(context.shape) == 3: + context = context.unsqueeze(1) + + ## run original forward pass + out = self.forward_raw( + x = x.to(self.dtype), + timestep = timesteps.to(self.dtype), + y = context.to(self.dtype), + c = cn_hint, + ) + + ## only return EPS + out = out.to(torch.float) + eps, rest = out[:, :self.in_channels], out[:, self.in_channels:] + return eps + + def forward_with_dpmsolver(self, x, t, y, data_info, c, **kwargs): + model_out = self.forward_raw(x, t, y, data_info=data_info, c=c, **kwargs) + return model_out.chunk(2, dim=1)[0] + + # def forward_with_dpmsolver(self, x, t, y, data_info, c, **kwargs): + # return self.base_model.forward_with_dpmsolver(x, t, y, data_info=data_info, c=self.forward_c(c), **kwargs) + + def forward_with_cfg(self, x, t, y, cfg_scale, data_info, c, **kwargs): + return self.base_model.forward_with_cfg(x, t, y, cfg_scale, data_info, c=self.forward_c(c), **kwargs) + + def load_state_dict(self, state_dict: Mapping[str, Any], strict: bool = True): + if all((k.startswith('base_model') or k.startswith('controlnet')) for k in state_dict.keys()): + return super().load_state_dict(state_dict, strict) + else: + new_key = {} + for k in state_dict.keys(): + new_key[k] = re.sub(r"(blocks\.\d+)(.*)", r"\1.base_block\2", k) + for k, v in new_key.items(): + if k != v: + print(f"replace {k} to {v}") + state_dict[v] = state_dict.pop(k) + + return self.base_model.load_state_dict(state_dict, strict) + + def unpatchify(self, x): + """ + x: (N, T, patch_size**2 * C) + imgs: (N, H, W, C) + """ + c = self.out_channels + p = self.x_embedder.patch_size[0] + assert self.h * self.w == x.shape[1] + + x = x.reshape(shape=(x.shape[0], self.h, self.w, p, p, c)) + x = torch.einsum('nhwpqc->nchpwq', x) + imgs = x.reshape(shape=(x.shape[0], c, self.h * p, self.w * p)) + return imgs + + # @property + # def dtype(self): + ## 返回模型参数的数据类型 + # return next(self.parameters()).dtype + + +# The implementation for PixArtMS_Half + 1024 resolution +class ControlPixArtMSHalf(ControlPixArtHalf): + # support multi-scale res model (multi-scale model can also be applied to single reso training & inference) + def __init__(self, base_model: PixArtMS, copy_blocks_num: int = 13) -> None: + super().__init__(base_model=base_model, copy_blocks_num=copy_blocks_num) + + def forward_raw(self, x, timestep, y, mask=None, data_info=None, c=None, **kwargs): + # modify the original PixArtMS forward function + """ + Forward pass of PixArt. + x: (N, C, H, W) tensor of spatial inputs (images or latent representations of images) + t: (N,) tensor of diffusion timesteps + y: (N, 1, 120, C) tensor of class labels + """ + if c is not None: + c = c.to(self.dtype) + c = self.forward_c(c) + bs = x.shape[0] + x = x.to(self.dtype) + timestep = timestep.to(self.dtype) + y = y.to(self.dtype) + c_size, ar = data_info['img_hw'].to(self.dtype), data_info['aspect_ratio'].to(self.dtype) + self.h, self.w = x.shape[-2]//self.patch_size, x.shape[-1]//self.patch_size + + pos_embed = torch.from_numpy(get_2d_sincos_pos_embed(self.pos_embed.shape[-1], (self.h, self.w), lewei_scale=self.lewei_scale, base_size=self.base_size)).unsqueeze(0).to(x.device).to(self.dtype) + x = self.x_embedder(x) + pos_embed # (N, T, D), where T = H * W / patch_size ** 2 + t = self.t_embedder(timestep) # (N, D) + csize = self.csize_embedder(c_size, bs) # (N, D) + ar = self.ar_embedder(ar, bs) # (N, D) + t = t + torch.cat([csize, ar], dim=1) + t0 = self.t_block(t) + y = self.y_embedder(y, self.training) # (N, D) + if mask is not None: + if mask.shape[0] != y.shape[0]: + mask = mask.repeat(y.shape[0] // mask.shape[0], 1) + mask = mask.squeeze(1).squeeze(1) + y = y.squeeze(1).masked_select(mask.unsqueeze(-1) != 0).view(1, -1, x.shape[-1]) + y_lens = mask.sum(dim=1).tolist() + else: + y_lens = [y.shape[2]] * y.shape[0] + y = y.squeeze(1).view(1, -1, x.shape[-1]) + + # define the first layer + x = auto_grad_checkpoint(self.base_model.blocks[0], x, y, t0, y_lens, **kwargs) # (N, T, D) #support grad checkpoint + + if c is not None: + # update c + for index in range(1, self.copy_blocks_num + 1): + c, c_skip = auto_grad_checkpoint(self.controlnet[index - 1], x, y, t0, y_lens, c, **kwargs) + x = auto_grad_checkpoint(self.base_model.blocks[index], x + c_skip, y, t0, y_lens, **kwargs) + + # update x + for index in range(self.copy_blocks_num + 1, self.total_blocks_num): + x = auto_grad_checkpoint(self.base_model.blocks[index], x, y, t0, y_lens, **kwargs) + else: + for index in range(1, self.total_blocks_num): + x = auto_grad_checkpoint(self.base_model.blocks[index], x, y, t0, y_lens, **kwargs) + + x = self.final_layer(x, t) # (N, T, patch_size ** 2 * out_channels) + x = self.unpatchify(x) # (N, out_channels, H, W) + return x + + def forward(self, x, timesteps, context, img_hw=None, aspect_ratio=None, cn_hint=None, **kwargs): + """ + Forward pass that adapts comfy input to original forward function + x: (N, C, H, W) tensor of spatial inputs (images or latent representations of images) + timesteps: (N,) tensor of diffusion timesteps + context: (N, 1, 120, C) conditioning + img_hw: height|width conditioning + aspect_ratio: aspect ratio conditioning + cn_hint: controlnet hint + """ + ## size/ar from cond with fallback based on the latent image shape. + bs = x.shape[0] + data_info = {} + if img_hw is None: + data_info["img_hw"] = torch.tensor( + [[x.shape[2]*8, x.shape[3]*8]], + dtype=self.dtype, + device=x.device + ).repeat(bs, 1) + else: + data_info["img_hw"] = img_hw.to(x.dtype) + if aspect_ratio is None or True: + data_info["aspect_ratio"] = torch.tensor( + [[x.shape[2]/x.shape[3]]], + dtype=self.dtype, + device=x.device + ).repeat(bs, 1) + else: + data_info["aspect_ratio"] = aspect_ratio.to(x.dtype) + + ## Still accepts the input w/o that dim but returns garbage + if len(context.shape) == 3: + context = context.unsqueeze(1) + + ## run original forward pass + out = self.forward_raw( + x = x.to(self.dtype), + timestep = timesteps.to(self.dtype), + y = context.to(self.dtype), + c = cn_hint, + data_info=data_info, + ) + + ## only return EPS + out = out.to(torch.float) + eps, rest = out[:, :self.in_channels], out[:, self.in_channels:] + return eps diff --git a/ComfyUI-Easy-Use/py/dit/pixArt/models/utils.py b/ComfyUI-Easy-Use/py/dit/pixArt/models/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..9f7762148226a32b7c20d8527fa144a14acc275c --- /dev/null +++ b/ComfyUI-Easy-Use/py/dit/pixArt/models/utils.py @@ -0,0 +1,122 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F +from torch.utils.checkpoint import checkpoint, checkpoint_sequential +from collections.abc import Iterable +from itertools import repeat + +def _ntuple(n): + def parse(x): + if isinstance(x, Iterable) and not isinstance(x, str): + return x + return tuple(repeat(x, n)) + return parse + +to_1tuple = _ntuple(1) +to_2tuple = _ntuple(2) + +def set_grad_checkpoint(model, use_fp32_attention=False, gc_step=1): + assert isinstance(model, nn.Module) + + def set_attr(module): + module.grad_checkpointing = True + module.fp32_attention = use_fp32_attention + module.grad_checkpointing_step = gc_step + model.apply(set_attr) + +def auto_grad_checkpoint(module, *args, **kwargs): + if getattr(module, 'grad_checkpointing', False): + if isinstance(module, Iterable): + gc_step = module[0].grad_checkpointing_step + return checkpoint_sequential(module, gc_step, *args, **kwargs) + else: + return checkpoint(module, *args, **kwargs) + return module(*args, **kwargs) + +def checkpoint_sequential(functions, step, input, *args, **kwargs): + + # Hack for keyword-only parameter in a python 2.7-compliant way + preserve = kwargs.pop('preserve_rng_state', True) + if kwargs: + raise ValueError("Unexpected keyword arguments: " + ",".join(arg for arg in kwargs)) + + def run_function(start, end, functions): + def forward(input): + for j in range(start, end + 1): + input = functions[j](input, *args) + return input + return forward + + if isinstance(functions, torch.nn.Sequential): + functions = list(functions.children()) + + # the last chunk has to be non-volatile + end = -1 + segment = len(functions) // step + for start in range(0, step * (segment - 1), step): + end = start + step - 1 + input = checkpoint(run_function(start, end, functions), input, preserve_rng_state=preserve) + return run_function(end + 1, len(functions) - 1, functions)(input) + +def get_rel_pos(q_size, k_size, rel_pos): + """ + Get relative positional embeddings according to the relative positions of + query and key sizes. + Args: + q_size (int): size of query q. + k_size (int): size of key k. + rel_pos (Tensor): relative position embeddings (L, C). + + Returns: + Extracted positional embeddings according to relative positions. + """ + max_rel_dist = int(2 * max(q_size, k_size) - 1) + # Interpolate rel pos if needed. + if rel_pos.shape[0] != max_rel_dist: + # Interpolate rel pos. + rel_pos_resized = F.interpolate( + rel_pos.reshape(1, rel_pos.shape[0], -1).permute(0, 2, 1), + size=max_rel_dist, + mode="linear", + ) + rel_pos_resized = rel_pos_resized.reshape(-1, max_rel_dist).permute(1, 0) + else: + rel_pos_resized = rel_pos + + # Scale the coords with short length if shapes for q and k are different. + q_coords = torch.arange(q_size)[:, None] * max(k_size / q_size, 1.0) + k_coords = torch.arange(k_size)[None, :] * max(q_size / k_size, 1.0) + relative_coords = (q_coords - k_coords) + (k_size - 1) * max(q_size / k_size, 1.0) + + return rel_pos_resized[relative_coords.long()] + +def add_decomposed_rel_pos(attn, q, rel_pos_h, rel_pos_w, q_size, k_size): + """ + Calculate decomposed Relative Positional Embeddings from :paper:`mvitv2`. + https://github.com/facebookresearch/mvit/blob/19786631e330df9f3622e5402b4a419a263a2c80/mvit/models/attention.py # noqa B950 + Args: + attn (Tensor): attention map. + q (Tensor): query q in the attention layer with shape (B, q_h * q_w, C). + rel_pos_h (Tensor): relative position embeddings (Lh, C) for height axis. + rel_pos_w (Tensor): relative position embeddings (Lw, C) for width axis. + q_size (Tuple): spatial sequence size of query q with (q_h, q_w). + k_size (Tuple): spatial sequence size of key k with (k_h, k_w). + + Returns: + attn (Tensor): attention map with added relative positional embeddings. + """ + q_h, q_w = q_size + k_h, k_w = k_size + Rh = get_rel_pos(q_h, k_h, rel_pos_h) + Rw = get_rel_pos(q_w, k_w, rel_pos_w) + + B, _, dim = q.shape + r_q = q.reshape(B, q_h, q_w, dim) + rel_h = torch.einsum("bhwc,hkc->bhwk", r_q, Rh) + rel_w = torch.einsum("bhwc,wkc->bhwk", r_q, Rw) + + attn = ( + attn.view(B, q_h, q_w, k_h, k_w) + rel_h[:, :, :, :, None] + rel_w[:, :, :, None, :] + ).view(B, q_h * q_w, k_h * k_w) + + return attn diff --git a/ComfyUI-Easy-Use/py/dit/utils.py b/ComfyUI-Easy-Use/py/dit/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..22bf8e9e4f54f538167f841980ebbdea11407b3f --- /dev/null +++ b/ComfyUI-Easy-Use/py/dit/utils.py @@ -0,0 +1,38 @@ +import torch +from comfy import model_management + +def string_to_dtype(s="none", mode=None): + s = s.lower().strip() + if s in ["default", "as-is"]: + return None + elif s in ["auto", "auto (comfy)"]: + if mode == "vae": + return model_management.vae_device() + elif mode == "text_encoder": + return model_management.text_encoder_dtype() + elif mode == "unet": + return model_management.unet_dtype() + else: + raise NotImplementedError(f"Unknown dtype mode '{mode}'") + elif s in ["none", "auto (hf)", "auto (hf/bnb)"]: + return None + elif s in ["fp32", "float32", "float"]: + return torch.float32 + elif s in ["bf16", "bfloat16"]: + return torch.bfloat16 + elif s in ["fp16", "float16", "half"]: + return torch.float16 + elif "fp8" in s or "float8" in s: + if "e5m2" in s: + return torch.float8_e5m2 + elif "e4m3" in s: + return torch.float8_e4m3fn + else: + raise NotImplementedError(f"Unknown 8bit dtype '{s}'") + elif "bnb" in s: + assert s in ["bnb8bit", "bnb4bit"], f"Unknown bnb mode '{s}'" + return s + elif s is None: + return None + else: + raise NotImplementedError(f"Unknown dtype '{s}'") \ No newline at end of file diff --git a/ComfyUI-Easy-Use/py/dynamiCrafter/__init__.py b/ComfyUI-Easy-Use/py/dynamiCrafter/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..84023baf0cba013061db70acecb4a6bc4ae821c5 --- /dev/null +++ b/ComfyUI-Easy-Use/py/dynamiCrafter/__init__.py @@ -0,0 +1,334 @@ +#credit to ExponentialML for this module +#from https://github.com/ExponentialML/ComfyUI_Native_DynamiCrafter +import os +import torch +import comfy + +from einops import rearrange +from comfy import model_base, model_management +from .lvdm.modules.networks.openaimodel3d import UNetModel as DynamiCrafterUNetModel + +from .utils.model_utils import DynamiCrafterBase, DYNAMICRAFTER_CONFIG, load_image_proj_dict, load_dynamicrafter_dict, get_image_proj_model + +class DynamiCrafter: + + def __init__(self): + self.model_patcher = None + + # There is probably a better way to do this, but with the apply_model callback, this seems necessary. + # The model gets wrapped around a CFG Denoiser class, and handles the conditioning parts there. + # We cannot access it, so we must find the conditioning according to how ComfyUI handles it. + def get_conditioning_pair(self, c_crossattn, use_cfg: bool): + if not use_cfg: + return c_crossattn + + conditioning_group = [] + + for i in range(c_crossattn.shape[0]): + # Get the positive and negative conditioning. + positive_idx = i + 1 + negative_idx = i + + if positive_idx >= c_crossattn.shape[0]: + break + + if not torch.equal(c_crossattn[[positive_idx]], c_crossattn[[negative_idx]]): + conditioning_group = [ + c_crossattn[[positive_idx]], + c_crossattn[[negative_idx]] + ] + break + + if len(conditioning_group) == 0: + raise ValueError("Could not get the appropriate conditioning group.") + + return torch.cat(conditioning_group) + + # apply_model, {"input": input_x, "timestep": timestep_, "c": c, "cond_or_uncond": cond_or_uncond} + def _forward(self, *args): + transformer_options = self.model_patcher.model_options['transformer_options'] + conditioning = transformer_options['conditioning'] + + apply_model = args[0] + + # forward_dict + fd = args[1] + + x, t, model_in_kwargs, _ = fd['input'], fd['timestep'], fd['c'], fd['cond_or_uncond'] + + c_crossattn = model_in_kwargs.pop("c_crossattn") + c_concat = conditioning['c_concat'] + num_video_frames = conditioning['num_video_frames'] + fs = conditioning['fs'] + + original_num_frames = num_video_frames + + # Better way to determine if we're using CFG + # The cond batch will always be num_frames >= 2 since we're doing video, + # so we need get this condition differently here. + if x.shape[0] > num_video_frames: + num_video_frames *= 2 + batch_size = 2 + use_cfg = True + else: + use_cfg = False + batch_size = 1 + + if use_cfg: + c_concat = torch.cat([c_concat] * 2) + + self.validate_forwardable_latent(x, c_concat, num_video_frames, use_cfg) + + x_in, c_concat = map(lambda xc: rearrange(xc, '(b t) c h w -> b c t h w', b=batch_size), (x, c_concat)) + + # We always assume video, so there will always be batched conditionings. + c_crossattn = self.get_conditioning_pair(c_crossattn, use_cfg) + c_crossattn = c_crossattn[:2] if use_cfg else c_crossattn[:1] + context_in = c_crossattn + + img_embs = conditioning['image_emb'] + + if use_cfg: + img_emb_uncond = conditioning['image_emb_uncond'] + img_embs = torch.cat([img_embs, img_emb_uncond]) + + fs = torch.cat([fs] * x_in.shape[0]) + + outs = [] + for i in range(batch_size): + model_in_kwargs['transformer_options']['cond_idx'] = i + x_out = apply_model( + x_in[[i]], + t=torch.cat([t[:1]]), + context_in=context_in[[i]], + c_crossattn=c_crossattn, + cc_concat=c_concat[[i]], # "cc" is to handle naming conflict with apply_model wrapper. + # We want to handle this in the UNet forward. + num_video_frames=num_video_frames // 2 if batch_size > 1 else num_video_frames, + img_emb=img_embs[[i]], + fs=fs[[i]], + **model_in_kwargs + ) + outs.append(x_out) + + x_out = torch.cat(list(reversed(outs))) + x_out = rearrange(x_out, 'b c t h w -> (b t) c h w') + + return x_out + + def assign_forward_args( + self, + model, + c_concat, + image_emb, + image_emb_uncond, + fs, + frames, + ): + model.model_options['transformer_options']['conditioning'] = { + "c_concat": c_concat, + "image_emb": image_emb, + 'image_emb_uncond': image_emb_uncond, + "fs": fs, + "num_video_frames": frames, + } + + def validate_forwardable_latent(self, latent, c_concat, num_video_frames, use_cfg): + check_no_cfg = latent.shape[0] != num_video_frames + check_with_cfg = latent.shape[0] != (num_video_frames * 2) + + latent_batch_size = latent.shape[0] if not use_cfg else latent.shape[0] // 2 + num_frames = num_video_frames if not use_cfg else num_video_frames // 2 + + if all([check_no_cfg, check_with_cfg]): + raise ValueError( + "Please make sure your latent inputs match the number of frames in the DynamiCrafter Processor." + f"Got a latent batch size of ({latent_batch_size}) with number of frames being ({num_frames})." + ) + + latent_h, latent_w = latent.shape[-2:] + c_concat_h, c_concat_w = c_concat.shape[-2:] + + if not all([latent_h == c_concat_h, latent_w == c_concat_w]): + raise ValueError( + "Please make sure that your input latent and image frames are the same height and width.", + f"Image Size: {c_concat_w * 8}, {c_concat_h * 8}, Latent Size: {latent_h * 8}, {latent_w * 8}" + ) + + def process_image_conditioning( + self, + model, + clip_vision, + vae, + image_proj_model, + images, + use_interpolate, + fps: int, + frames: int, + scale_latents: bool + ): + self.model_patcher = model + encoded_latent = vae.encode(images[:, :, :, :3]) + + encoded_image = clip_vision.encode_image(images[:1])['last_hidden_state'] + image_emb = image_proj_model(encoded_image) + + encoded_image_uncond = clip_vision.encode_image(torch.zeros_like(images)[:1])['last_hidden_state'] + image_emb_uncond = image_proj_model(encoded_image_uncond) + + c_concat = encoded_latent + + if scale_latents: + vae_process_input = vae.process_input + vae.process_input = lambda image: (image - .5) * 2 + c_concat = vae.encode(images[:, :, :, :3]) + vae.process_input = vae_process_input + c_concat = model.model.process_latent_in(c_concat) * 1.3 + else: + c_concat = model.model.process_latent_in(c_concat) + + fs = torch.tensor([fps], dtype=torch.long, device=model_management.intermediate_device()) + + model.set_model_unet_function_wrapper(self._forward) + + used_interpolate_processing = False + + if use_interpolate and frames > 16: + raise ValueError( + "When using interpolation mode, the maximum amount of frames are 16." + "If you're doing long video generation, consider using the last frame\ + from the first generation for the next one (autoregressive)." + ) + if encoded_latent.shape[0] == 1: + c_concat = torch.cat([c_concat] * frames, dim=0)[:frames] + + if use_interpolate: + mask = torch.zeros_like(c_concat) + mask[:1] = c_concat[:1] + c_concat = mask + + used_interpolate_processing = True + else: + if use_interpolate and c_concat.shape[0] in [2, 3]: + input_frame_count = c_concat.shape[0] + + # We're just padding to the same type an size of the concat + masked_frames = torch.zeros_like(torch.cat([c_concat[:1]] * frames))[:frames] + + # Start frame + masked_frames[:1] = c_concat[:1] + + end_frame_idx = -1 + + # TODO + speed = 1.0 + if speed < 1.0: + possible_speeds = list(torch.linspace(0, 1.0, c_concat.shape[0])) + speed_from_frames = enumerate(possible_speeds) + speed_idx = min(speed_from_frames, key=lambda n: n[1] - speed)[0] + end_frame_idx = speed_idx + + # End frame + masked_frames[-1:] = c_concat[[end_frame_idx]] + + # Possible middle frame, but not working at the moment. + if input_frame_count == 3: + middle_idx = masked_frames.shape[0] // 2 + middle_idx_frame = c_concat.shape[0] // 2 + masked_frames[[middle_idx]] = c_concat[[middle_idx_frame]] + + c_concat = masked_frames + used_interpolate_processing = True + + print(f"Using interpolation mode with {input_frame_count} frames.") + + if c_concat.shape[0] < frames and not used_interpolate_processing: + print( + "Multiple images found, but interpolation mode is unset. Using the first frame as condition.", + ) + c_concat = torch.cat([c_concat[:1]] * frames) + + c_concat = c_concat[:frames] + + if encoded_latent.shape[0] == 1: + encoded_latent = torch.cat([encoded_latent] * frames)[:frames] + + if encoded_latent.shape[0] < frames and encoded_latent.shape[0] != 1: + encoded_latent = torch.cat( + [encoded_latent] + [encoded_latent[-1:]] * abs(encoded_latent.shape[0] - frames) + )[:frames] + + # We could store this as a state in this Node Class Instance, but to prevent any weird edge cases, + # this should always be passed through the 'stateless' way, and let ComfyUI handle the transformer_options state. + self.assign_forward_args(model, c_concat, image_emb, image_emb_uncond, fs, frames) + + return (model, {"samples": torch.zeros_like(c_concat)}, {"samples": encoded_latent},) + + + # Loader for the DynamiCrafter model. + def load_model_sicts(self, model_path: str): + model_state_dict = comfy.utils.load_torch_file(model_path) + dynamicrafter_dict = load_dynamicrafter_dict(model_state_dict) + image_proj_dict = load_image_proj_dict(model_state_dict) + + return dynamicrafter_dict, image_proj_dict + + def get_prediction_type(self, is_eps: bool, model_config): + if not is_eps and "image_cross_attention_scale_learnable" in model_config.unet_config.keys(): + model_config.unet_config["image_cross_attention_scale_learnable"] = False + + return model_base.ModelType.EPS if is_eps else model_base.ModelType.V_PREDICTION + + def handle_model_management(self, dynamicrafter_dict: dict, model_config): + parameters = comfy.utils.calculate_parameters(dynamicrafter_dict, "model.diffusion_model.") + load_device = model_management.get_torch_device() + unet_dtype = model_management.unet_dtype( + model_params=parameters, + supported_dtypes=model_config.supported_inference_dtypes + ) + manual_cast_dtype = model_management.unet_manual_cast( + unet_dtype, + load_device, + model_config.supported_inference_dtypes + ) + model_config.set_inference_dtype(unet_dtype, manual_cast_dtype) + inital_load_device = model_management.unet_inital_load_device(parameters, unet_dtype) + offload_device = model_management.unet_offload_device() + + return load_device, inital_load_device + + def check_leftover_keys(self, state_dict: dict): + left_over = state_dict.keys() + if len(left_over) > 0: + print("left over keys:", left_over) + + def load_dynamicrafter(self, model_path): + + if os.path.exists(model_path): + dynamicrafter_dict, image_proj_dict = self.load_model_sicts(model_path) + model_config = DynamiCrafterBase(DYNAMICRAFTER_CONFIG) + + dynamicrafter_dict, is_eps = model_config.process_dict_version(state_dict=dynamicrafter_dict) + + MODEL_TYPE = self.get_prediction_type(is_eps, model_config) + load_device, inital_load_device = self.handle_model_management(dynamicrafter_dict, model_config) + + model = model_base.BaseModel( + model_config, + model_type=MODEL_TYPE, + device=inital_load_device, + unet_model=DynamiCrafterUNetModel + ) + + image_proj_model = get_image_proj_model(image_proj_dict) + model.load_model_weights(dynamicrafter_dict, "model.diffusion_model.") + self.check_leftover_keys(dynamicrafter_dict) + + model_patcher = comfy.model_patcher.ModelPatcher( + model, + load_device=load_device, + offload_device=model_management.unet_offload_device(), + current_device=inital_load_device + ) + + return (model_patcher, image_proj_model,) \ No newline at end of file diff --git a/ComfyUI-Easy-Use/py/dynamiCrafter/lvdm/__init__.py b/ComfyUI-Easy-Use/py/dynamiCrafter/lvdm/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/ComfyUI-Easy-Use/py/dynamiCrafter/lvdm/basics.py b/ComfyUI-Easy-Use/py/dynamiCrafter/lvdm/basics.py new file mode 100644 index 0000000000000000000000000000000000000000..3fd14ad87e99175f94b1dcb350d8da0b30d6603a --- /dev/null +++ b/ComfyUI-Easy-Use/py/dynamiCrafter/lvdm/basics.py @@ -0,0 +1,102 @@ +# adopted from +# https://github.com/openai/improved-diffusion/blob/main/improved_diffusion/gaussian_diffusion.py +# and +# https://github.com/lucidrains/denoising-diffusion-pytorch/blob/7706bdfc6f527f58d33f84b7b522e61e6e3164b3/denoising_diffusion_pytorch/denoising_diffusion_pytorch.py +# and +# https://github.com/openai/guided-diffusion/blob/0ba878e517b276c45d1195eb29f6f5f72659a05b/guided_diffusion/nn.py +# +# thanks! + +import torch.nn as nn +import comfy.ops +ops = comfy.ops.disable_weight_init + +from ..utils.utils import instantiate_from_config + +def disabled_train(self, mode=True): + """Overwrite model.train with this function to make sure train/eval mode + does not change anymore.""" + return self + +def zero_module(module): + """ + Zero out the parameters of a module and return it. + """ + for p in module.parameters(): + p.detach().zero_() + return module + +def scale_module(module, scale): + """ + Scale the parameters of a module and return it. + """ + for p in module.parameters(): + p.detach().mul_(scale) + return module + + +def conv_nd(dims, *args, **kwargs): + """ + Create a 1D, 2D, or 3D convolution module. + """ + if dims == 1: + return nn.Conv1d(*args, **kwargs) + elif dims == 2: + return ops.Conv2d(*args, **kwargs) + elif dims == 3: + return ops.Conv3d(*args, **kwargs) + raise ValueError(f"unsupported dimensions: {dims}") + + +def linear(*args, **kwargs): + """ + Create a linear module. + """ + return ops.Linear(*args, **kwargs) + + +def avg_pool_nd(dims, *args, **kwargs): + """ + Create a 1D, 2D, or 3D average pooling module. + """ + if dims == 1: + return nn.AvgPool1d(*args, **kwargs) + elif dims == 2: + return nn.AvgPool2d(*args, **kwargs) + elif dims == 3: + return nn.AvgPool3d(*args, **kwargs) + raise ValueError(f"unsupported dimensions: {dims}") + + +def nonlinearity(type='silu'): + if type == 'silu': + return nn.SiLU() + elif type == 'leaky_relu': + return nn.LeakyReLU() + + +class GroupNormSpecific(ops.GroupNorm): + def forward(self, x): + return super().forward(x.float()).type(x.dtype) + + +def normalization(channels, num_groups=32, dtype=None, device=None): + """ + Make a standard normalization layer. + :param channels: number of input channels. + :return: an nn.Module for normalization. + """ + return GroupNormSpecific(num_groups, channels, dtype=dtype, device=device) + + +class HybridConditioner(nn.Module): + + def __init__(self, c_concat_config, c_crossattn_config): + super().__init__() + self.concat_conditioner = instantiate_from_config(c_concat_config) + self.crossattn_conditioner = instantiate_from_config(c_crossattn_config) + + def forward(self, c_concat, c_crossattn): + c_concat = self.concat_conditioner(c_concat) + c_crossattn = self.crossattn_conditioner(c_crossattn) + return {'c_concat': [c_concat], 'c_crossattn': [c_crossattn]} \ No newline at end of file diff --git a/ComfyUI-Easy-Use/py/dynamiCrafter/lvdm/common.py b/ComfyUI-Easy-Use/py/dynamiCrafter/lvdm/common.py new file mode 100644 index 0000000000000000000000000000000000000000..55a150b618e275f01d3a59ad9c7579176c4ea1b8 --- /dev/null +++ b/ComfyUI-Easy-Use/py/dynamiCrafter/lvdm/common.py @@ -0,0 +1,94 @@ +import math +from inspect import isfunction +import torch +from torch import nn +import torch.distributed as dist + + +def gather_data(data, return_np=True): + ''' gather data from multiple processes to one list ''' + data_list = [torch.zeros_like(data) for _ in range(dist.get_world_size())] + dist.all_gather(data_list, data) # gather not supported with NCCL + if return_np: + data_list = [data.cpu().numpy() for data in data_list] + return data_list + +def autocast(f): + def do_autocast(*args, **kwargs): + with torch.cuda.amp.autocast(enabled=True, + dtype=torch.get_autocast_gpu_dtype(), + cache_enabled=torch.is_autocast_cache_enabled()): + return f(*args, **kwargs) + return do_autocast + + +def extract_into_tensor(a, t, x_shape): + b, *_ = t.shape + out = a.gather(-1, t) + return out.reshape(b, *((1,) * (len(x_shape) - 1))) + + +def noise_like(shape, device, repeat=False): + repeat_noise = lambda: torch.randn((1, *shape[1:]), device=device).repeat(shape[0], *((1,) * (len(shape) - 1))) + noise = lambda: torch.randn(shape, device=device) + return repeat_noise() if repeat else noise() + + +def default(val, d): + if exists(val): + return val + return d() if isfunction(d) else d + +def exists(val): + return val is not None + +def identity(*args, **kwargs): + return nn.Identity() + +def uniq(arr): + return{el: True for el in arr}.keys() + +def mean_flat(tensor): + """ + Take the mean over all non-batch dimensions. + """ + return tensor.mean(dim=list(range(1, len(tensor.shape)))) + +def ismap(x): + if not isinstance(x, torch.Tensor): + return False + return (len(x.shape) == 4) and (x.shape[1] > 3) + +def isimage(x): + if not isinstance(x,torch.Tensor): + return False + return (len(x.shape) == 4) and (x.shape[1] == 3 or x.shape[1] == 1) + +def max_neg_value(t): + return -torch.finfo(t.dtype).max + +def shape_to_str(x): + shape_str = "x".join([str(x) for x in x.shape]) + return shape_str + +def init_(tensor): + dim = tensor.shape[-1] + std = 1 / math.sqrt(dim) + tensor.uniform_(-std, std) + return tensor + +ckpt = torch.utils.checkpoint.checkpoint +def checkpoint(func, inputs, params, flag): + """ + Evaluate a function without caching intermediate activations, allowing for + reduced memory at the expense of extra compute in the backward pass. + :param func: the function to evaluate. + :param inputs: the argument sequence to pass to `func`. + :param params: a sequence of parameters `func` depends on but does not + explicitly take as arguments. + :param flag: if False, disable gradient checkpointing. + """ + if flag: + return ckpt(func, *inputs, use_reentrant=False) + else: + return func(*inputs) \ No newline at end of file diff --git a/ComfyUI-Easy-Use/py/dynamiCrafter/lvdm/distributions.py b/ComfyUI-Easy-Use/py/dynamiCrafter/lvdm/distributions.py new file mode 100644 index 0000000000000000000000000000000000000000..9a2a82ecace3ce27fb7816ddaf088e179c2d5ffd --- /dev/null +++ b/ComfyUI-Easy-Use/py/dynamiCrafter/lvdm/distributions.py @@ -0,0 +1,95 @@ +import torch +import numpy as np + + +class AbstractDistribution: + def sample(self): + raise NotImplementedError() + + def mode(self): + raise NotImplementedError() + + +class DiracDistribution(AbstractDistribution): + def __init__(self, value): + self.value = value + + def sample(self): + return self.value + + def mode(self): + return self.value + + +class DiagonalGaussianDistribution(object): + def __init__(self, parameters, deterministic=False): + self.parameters = parameters + self.mean, self.logvar = torch.chunk(parameters, 2, dim=1) + self.logvar = torch.clamp(self.logvar, -30.0, 20.0) + self.deterministic = deterministic + self.std = torch.exp(0.5 * self.logvar) + self.var = torch.exp(self.logvar) + if self.deterministic: + self.var = self.std = torch.zeros_like(self.mean).to(device=self.parameters.device) + + def sample(self, noise=None): + if noise is None: + noise = torch.randn(self.mean.shape) + + x = self.mean + self.std * noise.to(device=self.parameters.device) + return x + + def kl(self, other=None): + if self.deterministic: + return torch.Tensor([0.]) + else: + if other is None: + return 0.5 * torch.sum(torch.pow(self.mean, 2) + + self.var - 1.0 - self.logvar, + dim=[1, 2, 3]) + else: + return 0.5 * torch.sum( + torch.pow(self.mean - other.mean, 2) / other.var + + self.var / other.var - 1.0 - self.logvar + other.logvar, + dim=[1, 2, 3]) + + def nll(self, sample, dims=[1,2,3]): + if self.deterministic: + return torch.Tensor([0.]) + logtwopi = np.log(2.0 * np.pi) + return 0.5 * torch.sum( + logtwopi + self.logvar + torch.pow(sample - self.mean, 2) / self.var, + dim=dims) + + def mode(self): + return self.mean + + +def normal_kl(mean1, logvar1, mean2, logvar2): + """ + source: https://github.com/openai/guided-diffusion/blob/27c20a8fab9cb472df5d6bdd6c8d11c8f430b924/guided_diffusion/losses.py#L12 + Compute the KL divergence between two gaussians. + Shapes are automatically broadcasted, so batches can be compared to + scalars, among other use cases. + """ + tensor = None + for obj in (mean1, logvar1, mean2, logvar2): + if isinstance(obj, torch.Tensor): + tensor = obj + break + assert tensor is not None, "at least one argument must be a Tensor" + + # Force variances to be Tensors. Broadcasting helps convert scalars to + # Tensors, but it does not work for torch.exp(). + logvar1, logvar2 = [ + x if isinstance(x, torch.Tensor) else torch.tensor(x).to(tensor) + for x in (logvar1, logvar2) + ] + + return 0.5 * ( + -1.0 + + logvar2 + - logvar1 + + torch.exp(logvar1 - logvar2) + + ((mean1 - mean2) ** 2) * torch.exp(-logvar2) + ) \ No newline at end of file diff --git a/ComfyUI-Easy-Use/py/dynamiCrafter/lvdm/ema.py b/ComfyUI-Easy-Use/py/dynamiCrafter/lvdm/ema.py new file mode 100644 index 0000000000000000000000000000000000000000..cd2f8e3115f816b4cac674397238cd8c22de9bc2 --- /dev/null +++ b/ComfyUI-Easy-Use/py/dynamiCrafter/lvdm/ema.py @@ -0,0 +1,76 @@ +import torch +from torch import nn + + +class LitEma(nn.Module): + def __init__(self, model, decay=0.9999, use_num_upates=True): + super().__init__() + if decay < 0.0 or decay > 1.0: + raise ValueError('Decay must be between 0 and 1') + + self.m_name2s_name = {} + self.register_buffer('decay', torch.tensor(decay, dtype=torch.float32)) + self.register_buffer('num_updates', torch.tensor(0,dtype=torch.int) if use_num_upates + else torch.tensor(-1,dtype=torch.int)) + + for name, p in model.named_parameters(): + if p.requires_grad: + #remove as '.'-character is not allowed in buffers + s_name = name.replace('.','') + self.m_name2s_name.update({name:s_name}) + self.register_buffer(s_name,p.clone().detach().data) + + self.collected_params = [] + + def forward(self,model): + decay = self.decay + + if self.num_updates >= 0: + self.num_updates += 1 + decay = min(self.decay,(1 + self.num_updates) / (10 + self.num_updates)) + + one_minus_decay = 1.0 - decay + + with torch.no_grad(): + m_param = dict(model.named_parameters()) + shadow_params = dict(self.named_buffers()) + + for key in m_param: + if m_param[key].requires_grad: + sname = self.m_name2s_name[key] + shadow_params[sname] = shadow_params[sname].type_as(m_param[key]) + shadow_params[sname].sub_(one_minus_decay * (shadow_params[sname] - m_param[key])) + else: + assert not key in self.m_name2s_name + + def copy_to(self, model): + m_param = dict(model.named_parameters()) + shadow_params = dict(self.named_buffers()) + for key in m_param: + if m_param[key].requires_grad: + m_param[key].data.copy_(shadow_params[self.m_name2s_name[key]].data) + else: + assert not key in self.m_name2s_name + + def store(self, parameters): + """ + Save the current parameters for restoring later. + Args: + parameters: Iterable of `torch.nn.Parameter`; the parameters to be + temporarily stored. + """ + self.collected_params = [param.clone() for param in parameters] + + def restore(self, parameters): + """ + Restore the parameters stored with the `store` method. + Useful to validate the model with EMA parameters without affecting the + original optimization process. Store the parameters before the + `copy_to` method. After validation (or model saving), use this to + restore the former parameters. + Args: + parameters: Iterable of `torch.nn.Parameter`; the parameters to be + updated with the stored parameters. + """ + for c_param, param in zip(self.collected_params, parameters): + param.data.copy_(c_param.data) \ No newline at end of file diff --git a/ComfyUI-Easy-Use/py/dynamiCrafter/lvdm/models/autoencoder.py b/ComfyUI-Easy-Use/py/dynamiCrafter/lvdm/models/autoencoder.py new file mode 100644 index 0000000000000000000000000000000000000000..cfa86e9035f58f025cefe9b4632129ce388c6581 --- /dev/null +++ b/ComfyUI-Easy-Use/py/dynamiCrafter/lvdm/models/autoencoder.py @@ -0,0 +1,219 @@ +import os +from contextlib import contextmanager +import torch +import numpy as np +from einops import rearrange +import torch.nn.functional as F +import pytorch_lightning as pl +from ...modules.networks.ae_modules import Encoder, Decoder +from ...distributions import DiagonalGaussianDistribution +from utils.utils import instantiate_from_config + + +class AutoencoderKL(pl.LightningModule): + def __init__(self, + ddconfig, + lossconfig, + embed_dim, + ckpt_path=None, + ignore_keys=[], + image_key="image", + colorize_nlabels=None, + monitor=None, + test=False, + logdir=None, + input_dim=4, + test_args=None, + ): + super().__init__() + self.image_key = image_key + self.encoder = Encoder(**ddconfig) + self.decoder = Decoder(**ddconfig) + self.loss = instantiate_from_config(lossconfig) + assert ddconfig["double_z"] + self.quant_conv = torch.nn.Conv2d(2*ddconfig["z_channels"], 2*embed_dim, 1) + self.post_quant_conv = torch.nn.Conv2d(embed_dim, ddconfig["z_channels"], 1) + self.embed_dim = embed_dim + self.input_dim = input_dim + self.test = test + self.test_args = test_args + self.logdir = logdir + if colorize_nlabels is not None: + assert type(colorize_nlabels)==int + self.register_buffer("colorize", torch.randn(3, colorize_nlabels, 1, 1)) + if monitor is not None: + self.monitor = monitor + if ckpt_path is not None: + self.init_from_ckpt(ckpt_path, ignore_keys=ignore_keys) + if self.test: + self.init_test() + + def init_test(self,): + self.test = True + save_dir = os.path.join(self.logdir, "test") + if 'ckpt' in self.test_args: + ckpt_name = os.path.basename(self.test_args.ckpt).split('.ckpt')[0] + f'_epoch{self._cur_epoch}' + self.root = os.path.join(save_dir, ckpt_name) + else: + self.root = save_dir + if 'test_subdir' in self.test_args: + self.root = os.path.join(save_dir, self.test_args.test_subdir) + + self.root_zs = os.path.join(self.root, "zs") + self.root_dec = os.path.join(self.root, "reconstructions") + self.root_inputs = os.path.join(self.root, "inputs") + os.makedirs(self.root, exist_ok=True) + + if self.test_args.save_z: + os.makedirs(self.root_zs, exist_ok=True) + if self.test_args.save_reconstruction: + os.makedirs(self.root_dec, exist_ok=True) + if self.test_args.save_input: + os.makedirs(self.root_inputs, exist_ok=True) + assert(self.test_args is not None) + self.test_maximum = getattr(self.test_args, 'test_maximum', None) + self.count = 0 + self.eval_metrics = {} + self.decodes = [] + self.save_decode_samples = 2048 + + def init_from_ckpt(self, path, ignore_keys=list()): + sd = torch.load(path, map_location="cpu") + try: + self._cur_epoch = sd['epoch'] + sd = sd["state_dict"] + except: + self._cur_epoch = 'null' + keys = list(sd.keys()) + for k in keys: + for ik in ignore_keys: + if k.startswith(ik): + print("Deleting key {} from state_dict.".format(k)) + del sd[k] + self.load_state_dict(sd, strict=False) + # self.load_state_dict(sd, strict=True) + print(f"Restored from {path}") + + def encode(self, x, **kwargs): + + h = self.encoder(x) + moments = self.quant_conv(h) + posterior = DiagonalGaussianDistribution(moments) + return posterior + + def decode(self, z, **kwargs): + z = self.post_quant_conv(z) + dec = self.decoder(z) + return dec + + def forward(self, input, sample_posterior=True): + posterior = self.encode(input) + if sample_posterior: + z = posterior.sample() + else: + z = posterior.mode() + dec = self.decode(z) + return dec, posterior + + def get_input(self, batch, k): + x = batch[k] + if x.dim() == 5 and self.input_dim == 4: + b,c,t,h,w = x.shape + self.b = b + self.t = t + x = rearrange(x, 'b c t h w -> (b t) c h w') + + return x + + def training_step(self, batch, batch_idx, optimizer_idx): + inputs = self.get_input(batch, self.image_key) + reconstructions, posterior = self(inputs) + + if optimizer_idx == 0: + # train encoder+decoder+logvar + aeloss, log_dict_ae = self.loss(inputs, reconstructions, posterior, optimizer_idx, self.global_step, + last_layer=self.get_last_layer(), split="train") + self.log("aeloss", aeloss, prog_bar=True, logger=True, on_step=True, on_epoch=True) + self.log_dict(log_dict_ae, prog_bar=False, logger=True, on_step=True, on_epoch=False) + return aeloss + + if optimizer_idx == 1: + # train the discriminator + discloss, log_dict_disc = self.loss(inputs, reconstructions, posterior, optimizer_idx, self.global_step, + last_layer=self.get_last_layer(), split="train") + + self.log("discloss", discloss, prog_bar=True, logger=True, on_step=True, on_epoch=True) + self.log_dict(log_dict_disc, prog_bar=False, logger=True, on_step=True, on_epoch=False) + return discloss + + def validation_step(self, batch, batch_idx): + inputs = self.get_input(batch, self.image_key) + reconstructions, posterior = self(inputs) + aeloss, log_dict_ae = self.loss(inputs, reconstructions, posterior, 0, self.global_step, + last_layer=self.get_last_layer(), split="val") + + discloss, log_dict_disc = self.loss(inputs, reconstructions, posterior, 1, self.global_step, + last_layer=self.get_last_layer(), split="val") + + self.log("val/rec_loss", log_dict_ae["val/rec_loss"]) + self.log_dict(log_dict_ae) + self.log_dict(log_dict_disc) + return self.log_dict + + def configure_optimizers(self): + lr = self.learning_rate + opt_ae = torch.optim.Adam(list(self.encoder.parameters())+ + list(self.decoder.parameters())+ + list(self.quant_conv.parameters())+ + list(self.post_quant_conv.parameters()), + lr=lr, betas=(0.5, 0.9)) + opt_disc = torch.optim.Adam(self.loss.discriminator.parameters(), + lr=lr, betas=(0.5, 0.9)) + return [opt_ae, opt_disc], [] + + def get_last_layer(self): + return self.decoder.conv_out.weight + + @torch.no_grad() + def log_images(self, batch, only_inputs=False, **kwargs): + log = dict() + x = self.get_input(batch, self.image_key) + x = x.to(self.device) + if not only_inputs: + xrec, posterior = self(x) + if x.shape[1] > 3: + # colorize with random projection + assert xrec.shape[1] > 3 + x = self.to_rgb(x) + xrec = self.to_rgb(xrec) + log["samples"] = self.decode(torch.randn_like(posterior.sample())) + log["reconstructions"] = xrec + log["inputs"] = x + return log + + def to_rgb(self, x): + assert self.image_key == "segmentation" + if not hasattr(self, "colorize"): + self.register_buffer("colorize", torch.randn(3, x.shape[1], 1, 1).to(x)) + x = F.conv2d(x, weight=self.colorize) + x = 2.*(x-x.min())/(x.max()-x.min()) - 1. + return x + +class IdentityFirstStage(torch.nn.Module): + def __init__(self, *args, vq_interface=False, **kwargs): + self.vq_interface = vq_interface # TODO: Should be true by default but check to not break older stuff + super().__init__() + + def encode(self, x, *args, **kwargs): + return x + + def decode(self, x, *args, **kwargs): + return x + + def quantize(self, x, *args, **kwargs): + if self.vq_interface: + return x, None, [None, None, None] + return x + + def forward(self, x, *args, **kwargs): + return x \ No newline at end of file diff --git a/ComfyUI-Easy-Use/py/dynamiCrafter/lvdm/models/ddpm3d.py b/ComfyUI-Easy-Use/py/dynamiCrafter/lvdm/models/ddpm3d.py new file mode 100644 index 0000000000000000000000000000000000000000..a126ed94cbb783727a3e428556728c0b1ec724be --- /dev/null +++ b/ComfyUI-Easy-Use/py/dynamiCrafter/lvdm/models/ddpm3d.py @@ -0,0 +1,762 @@ +""" +wild mixture of +https://github.com/openai/improved-diffusion/blob/e94489283bb876ac1477d5dd7709bbbd2d9902ce/improved_diffusion/gaussian_diffusion.py +https://github.com/lucidrains/denoising-diffusion-pytorch/blob/7706bdfc6f527f58d33f84b7b522e61e6e3164b3/denoising_diffusion_pytorch/denoising_diffusion_pytorch.py +https://github.com/CompVis/taming-transformers +-- merci +""" + +from functools import partial +from contextlib import contextmanager +import numpy as np +from tqdm import tqdm +from einops import rearrange, repeat +import logging +mainlogger = logging.getLogger('mainlogger') +import torch +import torch.nn as nn +from torchvision.utils import make_grid + +from ...utils.utils import instantiate_from_config +from ..ema import LitEma +from ..distributions import DiagonalGaussianDistribution +from ..models.utils_diffusion import make_beta_schedule, rescale_zero_terminal_snr +from ..basics import disabled_train +from ..common import ( + extract_into_tensor, + noise_like, + exists, + default +) + +__conditioning_keys__ = {'concat': 'c_concat', + 'crossattn': 'c_crossattn', + 'adm': 'y'} + +class DDPM(nn.Module): + # classic DDPM with Gaussian diffusion, in image space + def __init__(self, + unet_config, + timesteps=1000, + beta_schedule="linear", + loss_type="l2", + ckpt_path=None, + ignore_keys=[], + load_only_unet=False, + monitor=None, + use_ema=True, + first_stage_key="image", + image_size=256, + channels=3, + log_every_t=100, + clip_denoised=True, + linear_start=1e-4, + linear_end=2e-2, + cosine_s=8e-3, + given_betas=None, + original_elbo_weight=0., + v_posterior=0., # weight for choosing posterior variance as sigma = (1-v) * beta_tilde + v * beta + l_simple_weight=1., + conditioning_key=None, + parameterization="eps", # all assuming fixed variance schedules + scheduler_config=None, + use_positional_encodings=False, + learn_logvar=False, + logvar_init=0., + rescale_betas_zero_snr=False, + ): + super().__init__() + assert parameterization in ["eps", "x0", "v"], 'currently only supporting "eps" and "x0" and "v"' + self.parameterization = parameterization + mainlogger.info(f"{self.__class__.__name__}: Running in {self.parameterization}-prediction mode") + self.cond_stage_model = None + self.clip_denoised = clip_denoised + self.log_every_t = log_every_t + self.first_stage_key = first_stage_key + self.channels = channels + self.temporal_length = unet_config.params.temporal_length + self.image_size = image_size # try conv? + if isinstance(self.image_size, int): + self.image_size = [self.image_size, self.image_size] + self.use_positional_encodings = use_positional_encodings + self.model = DiffusionWrapper(unet_config, conditioning_key) + #count_params(self.model, verbose=True) + self.use_ema = use_ema + self.rescale_betas_zero_snr = rescale_betas_zero_snr + if self.use_ema: + self.model_ema = LitEma(self.model) + mainlogger.info(f"Keeping EMAs of {len(list(self.model_ema.buffers()))}.") + + self.use_scheduler = scheduler_config is not None + if self.use_scheduler: + self.scheduler_config = scheduler_config + + self.v_posterior = v_posterior + self.original_elbo_weight = original_elbo_weight + self.l_simple_weight = l_simple_weight + + if monitor is not None: + self.monitor = monitor + if ckpt_path is not None: + self.init_from_ckpt(ckpt_path, ignore_keys=ignore_keys, only_model=load_only_unet) + + self.register_schedule(given_betas=given_betas, beta_schedule=beta_schedule, timesteps=timesteps, + linear_start=linear_start, linear_end=linear_end, cosine_s=cosine_s) + + self.loss_type = loss_type + + self.learn_logvar = learn_logvar + self.logvar = torch.full(fill_value=logvar_init, size=(self.num_timesteps,)) + if self.learn_logvar: + self.logvar = nn.Parameter(self.logvar, requires_grad=True) + + def register_schedule(self, given_betas=None, beta_schedule="linear", timesteps=1000, + linear_start=1e-4, linear_end=2e-2, cosine_s=8e-3): + if exists(given_betas): + betas = given_betas + else: + betas = make_beta_schedule(beta_schedule, timesteps, linear_start=linear_start, linear_end=linear_end, + cosine_s=cosine_s) + if self.rescale_betas_zero_snr: + betas = rescale_zero_terminal_snr(betas) + + alphas = 1. - betas + alphas_cumprod = np.cumprod(alphas, axis=0) + alphas_cumprod_prev = np.append(1., alphas_cumprod[:-1]) + + timesteps, = betas.shape + self.num_timesteps = int(timesteps) + self.linear_start = linear_start + self.linear_end = linear_end + assert alphas_cumprod.shape[0] == self.num_timesteps, 'alphas have to be defined for each timestep' + + to_torch = partial(torch.tensor, dtype=torch.float32) + + self.register_buffer('betas', to_torch(betas)) + self.register_buffer('alphas_cumprod', to_torch(alphas_cumprod)) + self.register_buffer('alphas_cumprod_prev', to_torch(alphas_cumprod_prev)) + + # calculations for diffusion q(x_t | x_{t-1}) and others + self.register_buffer('sqrt_alphas_cumprod', to_torch(np.sqrt(alphas_cumprod))) + self.register_buffer('sqrt_one_minus_alphas_cumprod', to_torch(np.sqrt(1. - alphas_cumprod))) + self.register_buffer('log_one_minus_alphas_cumprod', to_torch(np.log(1. - alphas_cumprod))) + + if self.parameterization != 'v': + self.register_buffer('sqrt_recip_alphas_cumprod', to_torch(np.sqrt(1. / alphas_cumprod))) + self.register_buffer('sqrt_recipm1_alphas_cumprod', to_torch(np.sqrt(1. / alphas_cumprod - 1))) + else: + self.register_buffer('sqrt_recip_alphas_cumprod', torch.zeros_like(to_torch(alphas_cumprod))) + self.register_buffer('sqrt_recipm1_alphas_cumprod', torch.zeros_like(to_torch(alphas_cumprod))) + + # calculations for posterior q(x_{t-1} | x_t, x_0) + posterior_variance = (1 - self.v_posterior) * betas * (1. - alphas_cumprod_prev) / ( + 1. - alphas_cumprod) + self.v_posterior * betas + # above: equal to 1. / (1. / (1. - alpha_cumprod_tm1) + alpha_t / beta_t) + self.register_buffer('posterior_variance', to_torch(posterior_variance)) + # below: log calculation clipped because the posterior variance is 0 at the beginning of the diffusion chain + self.register_buffer('posterior_log_variance_clipped', to_torch(np.log(np.maximum(posterior_variance, 1e-20)))) + self.register_buffer('posterior_mean_coef1', to_torch( + betas * np.sqrt(alphas_cumprod_prev) / (1. - alphas_cumprod))) + self.register_buffer('posterior_mean_coef2', to_torch( + (1. - alphas_cumprod_prev) * np.sqrt(alphas) / (1. - alphas_cumprod))) + + if self.parameterization == "eps": + lvlb_weights = self.betas ** 2 / ( + 2 * self.posterior_variance * to_torch(alphas) * (1 - self.alphas_cumprod)) + elif self.parameterization == "x0": + lvlb_weights = 0.5 * np.sqrt(torch.Tensor(alphas_cumprod)) / (2. * 1 - torch.Tensor(alphas_cumprod)) + elif self.parameterization == "v": + lvlb_weights = torch.ones_like(self.betas ** 2 / ( + 2 * self.posterior_variance * to_torch(alphas) * (1 - self.alphas_cumprod))) + else: + raise NotImplementedError("mu not supported") + # TODO how to choose this term + lvlb_weights[0] = lvlb_weights[1] + self.register_buffer('lvlb_weights', lvlb_weights, persistent=False) + assert not torch.isnan(self.lvlb_weights).all() + + @contextmanager + def ema_scope(self, context=None): + if self.use_ema: + self.model_ema.store(self.model.parameters()) + self.model_ema.copy_to(self.model) + if context is not None: + mainlogger.info(f"{context}: Switched to EMA weights") + try: + yield None + finally: + if self.use_ema: + self.model_ema.restore(self.model.parameters()) + if context is not None: + mainlogger.info(f"{context}: Restored training weights") + + def init_from_ckpt(self, path, ignore_keys=list(), only_model=False): + sd = torch.load(path, map_location="cpu") + if "state_dict" in list(sd.keys()): + sd = sd["state_dict"] + keys = list(sd.keys()) + for k in keys: + for ik in ignore_keys: + if k.startswith(ik): + mainlogger.info("Deleting key {} from state_dict.".format(k)) + del sd[k] + missing, unexpected = self.load_state_dict(sd, strict=False) if not only_model else self.model.load_state_dict( + sd, strict=False) + mainlogger.info(f"Restored from {path} with {len(missing)} missing and {len(unexpected)} unexpected keys") + if len(missing) > 0: + mainlogger.info(f"Missing Keys: {missing}") + if len(unexpected) > 0: + mainlogger.info(f"Unexpected Keys: {unexpected}") + + def q_mean_variance(self, x_start, t): + """ + Get the distribution q(x_t | x_0). + :param x_start: the [N x C x ...] tensor of noiseless inputs. + :param t: the number of diffusion steps (minus 1). Here, 0 means one step. + :return: A tuple (mean, variance, log_variance), all of x_start's shape. + """ + mean = (extract_into_tensor(self.sqrt_alphas_cumprod, t, x_start.shape) * x_start) + variance = extract_into_tensor(1.0 - self.alphas_cumprod, t, x_start.shape) + log_variance = extract_into_tensor(self.log_one_minus_alphas_cumprod, t, x_start.shape) + return mean, variance, log_variance + + def predict_start_from_noise(self, x_t, t, noise): + return ( + extract_into_tensor(self.sqrt_recip_alphas_cumprod, t, x_t.shape) * x_t - + extract_into_tensor(self.sqrt_recipm1_alphas_cumprod, t, x_t.shape) * noise + ) + + def predict_start_from_z_and_v(self, x_t, t, v): + # self.register_buffer('sqrt_alphas_cumprod', to_torch(np.sqrt(alphas_cumprod))) + # self.register_buffer('sqrt_one_minus_alphas_cumprod', to_torch(np.sqrt(1. - alphas_cumprod))) + return ( + extract_into_tensor(self.sqrt_alphas_cumprod, t, x_t.shape) * x_t - + extract_into_tensor(self.sqrt_one_minus_alphas_cumprod, t, x_t.shape) * v + ) + + def predict_eps_from_z_and_v(self, x_t, t, v): + return ( + extract_into_tensor(self.sqrt_alphas_cumprod, t, x_t.shape) * v + + extract_into_tensor(self.sqrt_one_minus_alphas_cumprod, t, x_t.shape) * x_t + ) + + def q_posterior(self, x_start, x_t, t): + posterior_mean = ( + extract_into_tensor(self.posterior_mean_coef1, t, x_t.shape) * x_start + + extract_into_tensor(self.posterior_mean_coef2, t, x_t.shape) * x_t + ) + posterior_variance = extract_into_tensor(self.posterior_variance, t, x_t.shape) + posterior_log_variance_clipped = extract_into_tensor(self.posterior_log_variance_clipped, t, x_t.shape) + return posterior_mean, posterior_variance, posterior_log_variance_clipped + + def p_mean_variance(self, x, t, clip_denoised: bool): + model_out = self.model(x, t) + if self.parameterization == "eps": + x_recon = self.predict_start_from_noise(x, t=t, noise=model_out) + elif self.parameterization == "x0": + x_recon = model_out + if clip_denoised: + x_recon.clamp_(-1., 1.) + + model_mean, posterior_variance, posterior_log_variance = self.q_posterior(x_start=x_recon, x_t=x, t=t) + return model_mean, posterior_variance, posterior_log_variance + + @torch.no_grad() + def p_sample(self, x, t, clip_denoised=True, repeat_noise=False): + b, *_, device = *x.shape, x.device + model_mean, _, model_log_variance = self.p_mean_variance(x=x, t=t, clip_denoised=clip_denoised) + noise = noise_like(x.shape, device, repeat_noise) + # no noise when t == 0 + nonzero_mask = (1 - (t == 0).float()).reshape(b, *((1,) * (len(x.shape) - 1))) + return model_mean + nonzero_mask * (0.5 * model_log_variance).exp() * noise + + @torch.no_grad() + def p_sample_loop(self, shape, return_intermediates=False): + device = self.betas.device + b = shape[0] + img = torch.randn(shape, device=device) + intermediates = [img] + for i in tqdm(reversed(range(0, self.num_timesteps)), desc='Sampling t', total=self.num_timesteps): + img = self.p_sample(img, torch.full((b,), i, device=device, dtype=torch.long), + clip_denoised=self.clip_denoised) + if i % self.log_every_t == 0 or i == self.num_timesteps - 1: + intermediates.append(img) + if return_intermediates: + return img, intermediates + return img + + @torch.no_grad() + def sample(self, batch_size=16, return_intermediates=False): + image_size = self.image_size + channels = self.channels + return self.p_sample_loop((batch_size, channels, image_size, image_size), + return_intermediates=return_intermediates) + + def q_sample(self, x_start, t, noise=None): + noise = default(noise, lambda: torch.randn_like(x_start)) + return (extract_into_tensor(self.sqrt_alphas_cumprod, t, x_start.shape) * x_start + + extract_into_tensor(self.sqrt_one_minus_alphas_cumprod, t, x_start.shape) * noise) + + def get_v(self, x, noise, t): + return ( + extract_into_tensor(self.sqrt_alphas_cumprod, t, x.shape) * noise - + extract_into_tensor(self.sqrt_one_minus_alphas_cumprod, t, x.shape) * x + ) + + def get_input(self, batch, k): + x = batch[k] + x = x.to(memory_format=torch.contiguous_format).float() + return x + + def _get_rows_from_list(self, samples): + n_imgs_per_row = len(samples) + denoise_grid = rearrange(samples, 'n b c h w -> b n c h w') + denoise_grid = rearrange(denoise_grid, 'b n c h w -> (b n) c h w') + denoise_grid = make_grid(denoise_grid, nrow=n_imgs_per_row) + return denoise_grid + + @torch.no_grad() + def log_images(self, batch, N=8, n_row=2, sample=True, return_keys=None, **kwargs): + log = dict() + x = self.get_input(batch, self.first_stage_key) + N = min(x.shape[0], N) + n_row = min(x.shape[0], n_row) + x = x.to(self.device)[:N] + log["inputs"] = x + + # get diffusion row + diffusion_row = list() + x_start = x[:n_row] + + for t in range(self.num_timesteps): + if t % self.log_every_t == 0 or t == self.num_timesteps - 1: + t = repeat(torch.tensor([t]), '1 -> b', b=n_row) + t = t.to(self.device).long() + noise = torch.randn_like(x_start) + x_noisy = self.q_sample(x_start=x_start, t=t, noise=noise) + diffusion_row.append(x_noisy) + + log["diffusion_row"] = self._get_rows_from_list(diffusion_row) + + if sample: + # get denoise row + with self.ema_scope("Plotting"): + samples, denoise_row = self.sample(batch_size=N, return_intermediates=True) + + log["samples"] = samples + log["denoise_row"] = self._get_rows_from_list(denoise_row) + + if return_keys: + if np.intersect1d(list(log.keys()), return_keys).shape[0] == 0: + return log + else: + return {key: log[key] for key in return_keys} + return log + + +class LatentDiffusion(DDPM): + """main class""" + def __init__(self, + first_stage_config, + cond_stage_config, + num_timesteps_cond=None, + cond_stage_key="caption", + cond_stage_trainable=False, + cond_stage_forward=None, + conditioning_key=None, + uncond_prob=0.2, + uncond_type="empty_seq", + scale_factor=1.0, + scale_by_std=False, + encoder_type="2d", + only_model=False, + noise_strength=0, + use_dynamic_rescale=False, + base_scale=0.7, + turning_step=400, + loop_video=False, + fps_condition_type='fs', + perframe_ae=False, + *args, **kwargs): + self.num_timesteps_cond = default(num_timesteps_cond, 1) + self.scale_by_std = scale_by_std + assert self.num_timesteps_cond <= kwargs['timesteps'] + # for backwards compatibility after implementation of DiffusionWrapper + ckpt_path = kwargs.pop("ckpt_path", None) + ignore_keys = kwargs.pop("ignore_keys", []) + conditioning_key = default(conditioning_key, 'crossattn') + super().__init__(conditioning_key=conditioning_key, *args, **kwargs) + + self.cond_stage_trainable = cond_stage_trainable + self.cond_stage_key = cond_stage_key + self.noise_strength = noise_strength + self.use_dynamic_rescale = use_dynamic_rescale + self.loop_video = loop_video + self.fps_condition_type = fps_condition_type + self.perframe_ae = perframe_ae + try: + self.num_downs = len(first_stage_config.params.ddconfig.ch_mult) - 1 + except: + self.num_downs = 0 + if not scale_by_std: + self.scale_factor = scale_factor + else: + self.register_buffer('scale_factor', torch.tensor(scale_factor)) + + if use_dynamic_rescale: + scale_arr1 = np.linspace(1.0, base_scale, turning_step) + scale_arr2 = np.full(self.num_timesteps, base_scale) + scale_arr = np.concatenate((scale_arr1, scale_arr2)) + to_torch = partial(torch.tensor, dtype=torch.float32) + self.register_buffer('scale_arr', to_torch(scale_arr)) + + self.instantiate_first_stage(first_stage_config) + self.instantiate_cond_stage(cond_stage_config) + self.first_stage_config = first_stage_config + self.cond_stage_config = cond_stage_config + self.clip_denoised = False + + self.cond_stage_forward = cond_stage_forward + self.encoder_type = encoder_type + assert(encoder_type in ["2d", "3d"]) + self.uncond_prob = uncond_prob + self.classifier_free_guidance = True if uncond_prob > 0 else False + assert(uncond_type in ["zero_embed", "empty_seq"]) + self.uncond_type = uncond_type + + self.restarted_from_ckpt = False + if ckpt_path is not None: + self.init_from_ckpt(ckpt_path, ignore_keys, only_model=only_model) + self.restarted_from_ckpt = True + + + def make_cond_schedule(self, ): + self.cond_ids = torch.full(size=(self.num_timesteps,), fill_value=self.num_timesteps - 1, dtype=torch.long) + ids = torch.round(torch.linspace(0, self.num_timesteps - 1, self.num_timesteps_cond)).long() + self.cond_ids[:self.num_timesteps_cond] = ids + + def instantiate_first_stage(self, config): + model = instantiate_from_config(config) + self.first_stage_model = model.eval() + self.first_stage_model.train = disabled_train + for param in self.first_stage_model.parameters(): + param.requires_grad = False + + def instantiate_cond_stage(self, config): + if not self.cond_stage_trainable: + model = instantiate_from_config(config) + self.cond_stage_model = model.eval() + self.cond_stage_model.train = disabled_train + for param in self.cond_stage_model.parameters(): + param.requires_grad = False + else: + model = instantiate_from_config(config) + self.cond_stage_model = model + + def get_learned_conditioning(self, c): + if self.cond_stage_forward is None: + if hasattr(self.cond_stage_model, 'encode') and callable(self.cond_stage_model.encode): + c = self.cond_stage_model.encode(c) + if isinstance(c, DiagonalGaussianDistribution): + c = c.mode() + else: + c = self.cond_stage_model(c) + else: + assert hasattr(self.cond_stage_model, self.cond_stage_forward) + c = getattr(self.cond_stage_model, self.cond_stage_forward)(c) + return c + + def get_first_stage_encoding(self, encoder_posterior, noise=None): + if isinstance(encoder_posterior, DiagonalGaussianDistribution): + z = encoder_posterior.sample(noise=noise) + elif isinstance(encoder_posterior, torch.Tensor): + z = encoder_posterior + else: + raise NotImplementedError(f"encoder_posterior of type '{type(encoder_posterior)}' not yet implemented") + return self.scale_factor * z + + @torch.no_grad() + def encode_first_stage(self, x): + if self.encoder_type == "2d" and x.dim() == 5: + b, _, t, _, _ = x.shape + x = rearrange(x, 'b c t h w -> (b t) c h w') + reshape_back = True + else: + reshape_back = False + + ## consume more GPU memory but faster + if not self.perframe_ae: + encoder_posterior = self.first_stage_model.encode(x) + results = self.get_first_stage_encoding(encoder_posterior).detach() + else: ## consume less GPU memory but slower + results = [] + for index in range(x.shape[0]): + frame_batch = self.first_stage_model.encode(x[index:index+1,:,:,:]) + frame_result = self.get_first_stage_encoding(frame_batch).detach() + results.append(frame_result) + results = torch.cat(results, dim=0) + + if reshape_back: + results = rearrange(results, '(b t) c h w -> b c t h w', b=b,t=t) + + return results + + def decode_core(self, z, **kwargs): + if self.encoder_type == "2d" and z.dim() == 5: + b, _, t, _, _ = z.shape + z = rearrange(z, 'b c t h w -> (b t) c h w') + reshape_back = True + else: + reshape_back = False + + if not self.perframe_ae: + z = 1. / self.scale_factor * z + results = self.first_stage_model.decode(z, **kwargs) + else: + results = [] + for index in range(z.shape[0]): + frame_z = 1. / self.scale_factor * z[index:index+1,:,:,:] + frame_result = self.first_stage_model.decode(frame_z, **kwargs) + results.append(frame_result) + results = torch.cat(results, dim=0) + + if reshape_back: + results = rearrange(results, '(b t) c h w -> b c t h w', b=b,t=t) + return results + + @torch.no_grad() + def decode_first_stage(self, z, **kwargs): + return self.decode_core(z, **kwargs) + + # same as above but without decorator + def differentiable_decode_first_stage(self, z, **kwargs): + return self.decode_core(z, **kwargs) + + def forward(self, x, c, **kwargs): + t = torch.randint(0, self.num_timesteps, (x.shape[0],), device=self.device).long() + if self.use_dynamic_rescale: + x = x * extract_into_tensor(self.scale_arr, t, x.shape) + return self.p_losses(x, c, t, **kwargs) + + def apply_model(self, x_noisy, t, cond, **kwargs): + if isinstance(cond, dict): + # hybrid case, cond is exptected to be a dict + pass + else: + if not isinstance(cond, list): + cond = [cond] + key = 'c_concat' if self.model.conditioning_key == 'concat' else 'c_crossattn' + cond = {key: cond} + + x_recon = self.model(x_noisy, t, **cond, **kwargs) + + if isinstance(x_recon, tuple): + return x_recon[0] + else: + return x_recon + + def _get_denoise_row_from_list(self, samples, desc=''): + denoise_row = [] + for zd in tqdm(samples, desc=desc): + denoise_row.append(self.decode_first_stage(zd.to(self.device))) + n_log_timesteps = len(denoise_row) + + denoise_row = torch.stack(denoise_row) # n_log_timesteps, b, C, H, W + + if denoise_row.dim() == 5: + denoise_grid = rearrange(denoise_row, 'n b c h w -> b n c h w') + denoise_grid = rearrange(denoise_grid, 'b n c h w -> (b n) c h w') + denoise_grid = make_grid(denoise_grid, nrow=n_log_timesteps) + elif denoise_row.dim() == 6: + # video, grid_size=[n_log_timesteps*bs, t] + video_length = denoise_row.shape[3] + denoise_grid = rearrange(denoise_row, 'n b c t h w -> b n c t h w') + denoise_grid = rearrange(denoise_grid, 'b n c t h w -> (b n) c t h w') + denoise_grid = rearrange(denoise_grid, 'n c t h w -> (n t) c h w') + denoise_grid = make_grid(denoise_grid, nrow=video_length) + else: + raise ValueError + + return denoise_grid + + + def p_mean_variance(self, x, c, t, clip_denoised: bool, return_x0=False, score_corrector=None, corrector_kwargs=None, **kwargs): + t_in = t + model_out = self.apply_model(x, t_in, c, **kwargs) + + if score_corrector is not None: + assert self.parameterization == "eps" + model_out = score_corrector.modify_score(self, model_out, x, t, c, **corrector_kwargs) + + if self.parameterization == "eps": + x_recon = self.predict_start_from_noise(x, t=t, noise=model_out) + elif self.parameterization == "x0": + x_recon = model_out + else: + raise NotImplementedError() + + if clip_denoised: + x_recon.clamp_(-1., 1.) + + model_mean, posterior_variance, posterior_log_variance = self.q_posterior(x_start=x_recon, x_t=x, t=t) + + if return_x0: + return model_mean, posterior_variance, posterior_log_variance, x_recon + else: + return model_mean, posterior_variance, posterior_log_variance + + @torch.no_grad() + def p_sample(self, x, c, t, clip_denoised=False, repeat_noise=False, return_x0=False, \ + temperature=1., noise_dropout=0., score_corrector=None, corrector_kwargs=None, **kwargs): + b, *_, device = *x.shape, x.device + outputs = self.p_mean_variance(x=x, c=c, t=t, clip_denoised=clip_denoised, return_x0=return_x0, \ + score_corrector=score_corrector, corrector_kwargs=corrector_kwargs, **kwargs) + if return_x0: + model_mean, _, model_log_variance, x0 = outputs + else: + model_mean, _, model_log_variance = outputs + + noise = noise_like(x.shape, device, repeat_noise) * temperature + if noise_dropout > 0.: + noise = torch.nn.functional.dropout(noise, p=noise_dropout) + # no noise when t == 0 + nonzero_mask = (1 - (t == 0).float()).reshape(b, *((1,) * (len(x.shape) - 1))) + + if return_x0: + return model_mean + nonzero_mask * (0.5 * model_log_variance).exp() * noise, x0 + else: + return model_mean + nonzero_mask * (0.5 * model_log_variance).exp() * noise + + @torch.no_grad() + def p_sample_loop(self, cond, shape, return_intermediates=False, x_T=None, verbose=True, callback=None, \ + timesteps=None, mask=None, x0=None, img_callback=None, start_T=None, log_every_t=None, **kwargs): + + if not log_every_t: + log_every_t = self.log_every_t + device = self.betas.device + b = shape[0] + # sample an initial noise + if x_T is None: + img = torch.randn(shape, device=device) + else: + img = x_T + + intermediates = [img] + if timesteps is None: + timesteps = self.num_timesteps + if start_T is not None: + timesteps = min(timesteps, start_T) + + iterator = tqdm(reversed(range(0, timesteps)), desc='Sampling t', total=timesteps) if verbose else reversed(range(0, timesteps)) + + if mask is not None: + assert x0 is not None + assert x0.shape[2:3] == mask.shape[2:3] # spatial size has to match + + for i in iterator: + ts = torch.full((b,), i, device=device, dtype=torch.long) + if self.shorten_cond_schedule: + assert self.model.conditioning_key != 'hybrid' + tc = self.cond_ids[ts].to(cond.device) + cond = self.q_sample(x_start=cond, t=tc, noise=torch.randn_like(cond)) + + img = self.p_sample(img, cond, ts, clip_denoised=self.clip_denoised, **kwargs) + if mask is not None: + img_orig = self.q_sample(x0, ts) + img = img_orig * mask + (1. - mask) * img + + if i % log_every_t == 0 or i == timesteps - 1: + intermediates.append(img) + if callback: callback(i) + if img_callback: img_callback(img, i) + + if return_intermediates: + return img, intermediates + return img + + +class LatentVisualDiffusion(LatentDiffusion): + def __init__(self, img_cond_stage_config, image_proj_stage_config, freeze_embedder=True, *args, **kwargs): + super().__init__(*args, **kwargs) + self._init_embedder(img_cond_stage_config, freeze_embedder) + self.image_proj_model = instantiate_from_config(image_proj_stage_config) + + def _init_embedder(self, config, freeze=True): + embedder = instantiate_from_config(config) + if freeze: + self.embedder = embedder.eval() + self.embedder.train = disabled_train + for param in self.embedder.parameters(): + param.requires_grad = False + + +class DiffusionWrapper(nn.Module): + def __init__(self, diff_model_config, conditioning_key): + super().__init__() + self.diffusion_model = instantiate_from_config(diff_model_config) + self.conditioning_key = conditioning_key + + def forward(self, x, t, c_concat: list = None, c_crossattn: list = None, + c_adm=None, s=None, mask=None, **kwargs): + # temporal_context = fps is foNone + if self.conditioning_key is None: + out = self.diffusion_model(x, t) + elif self.conditioning_key == 'concat': + xc = torch.cat([x] + c_concat, dim=1) + out = self.diffusion_model(xc, t, **kwargs) + elif self.conditioning_key == 'crossattn': + cc = torch.cat(c_crossattn, 1) + out = self.diffusion_model(x, t, context=cc, **kwargs) + elif self.conditioning_key == 'hybrid': + ## it is just right [b,c,t,h,w]: concatenate in channel dim + xc = torch.cat([x] + c_concat, dim=1) + cc = torch.cat(c_crossattn, 1) + out = self.diffusion_model(xc, t, context=cc, **kwargs) + elif self.conditioning_key == 'resblockcond': + cc = c_crossattn[0] + out = self.diffusion_model(x, t, context=cc) + elif self.conditioning_key == 'adm': + cc = c_crossattn[0] + out = self.diffusion_model(x, t, y=cc) + elif self.conditioning_key == 'hybrid-adm': + assert c_adm is not None + xc = torch.cat([x] + c_concat, dim=1) + cc = torch.cat(c_crossattn, 1) + out = self.diffusion_model(xc, t, context=cc, y=c_adm, **kwargs) + elif self.conditioning_key == 'hybrid-time': + assert s is not None + xc = torch.cat([x] + c_concat, dim=1) + cc = torch.cat(c_crossattn, 1) + out = self.diffusion_model(xc, t, context=cc, s=s) + elif self.conditioning_key == 'concat-time-mask': + # assert s is not None + xc = torch.cat([x] + c_concat, dim=1) + out = self.diffusion_model(xc, t, context=None, s=s, mask=mask) + elif self.conditioning_key == 'concat-adm-mask': + # assert s is not None + if c_concat is not None: + xc = torch.cat([x] + c_concat, dim=1) + else: + xc = x + out = self.diffusion_model(xc, t, context=None, y=s, mask=mask) + elif self.conditioning_key == 'hybrid-adm-mask': + cc = torch.cat(c_crossattn, 1) + if c_concat is not None: + xc = torch.cat([x] + c_concat, dim=1) + else: + xc = x + out = self.diffusion_model(xc, t, context=cc, y=s, mask=mask) + elif self.conditioning_key == 'hybrid-time-adm': # adm means y, e.g., class index + # assert s is not None + assert c_adm is not None + xc = torch.cat([x] + c_concat, dim=1) + cc = torch.cat(c_crossattn, 1) + out = self.diffusion_model(xc, t, context=cc, s=s, y=c_adm) + elif self.conditioning_key == 'crossattn-adm': + assert c_adm is not None + cc = torch.cat(c_crossattn, 1) + out = self.diffusion_model(x, t, context=cc, y=c_adm) + else: + raise NotImplementedError() + + return out \ No newline at end of file diff --git a/ComfyUI-Easy-Use/py/dynamiCrafter/lvdm/models/samplers/ddim.py b/ComfyUI-Easy-Use/py/dynamiCrafter/lvdm/models/samplers/ddim.py new file mode 100644 index 0000000000000000000000000000000000000000..a3270a0a6afeda5829c2d60a62c5653938d7019b --- /dev/null +++ b/ComfyUI-Easy-Use/py/dynamiCrafter/lvdm/models/samplers/ddim.py @@ -0,0 +1,317 @@ +import numpy as np +from tqdm import tqdm +import torch +from ..models.utils_diffusion import make_ddim_sampling_parameters, make_ddim_timesteps, rescale_noise_cfg +from ..common import noise_like +from ..common import extract_into_tensor +import copy + + +class DDIMSampler(object): + def __init__(self, model, schedule="linear", **kwargs): + super().__init__() + self.model = model + self.ddpm_num_timesteps = model.num_timesteps + self.schedule = schedule + self.counter = 0 + + def register_buffer(self, name, attr): + if type(attr) == torch.Tensor: + if attr.device != torch.device("cuda"): + attr = attr.to(torch.device("cuda")) + setattr(self, name, attr) + + def make_schedule(self, ddim_num_steps, ddim_discretize="uniform", ddim_eta=0., verbose=True): + self.ddim_timesteps = make_ddim_timesteps(ddim_discr_method=ddim_discretize, num_ddim_timesteps=ddim_num_steps, + num_ddpm_timesteps=self.ddpm_num_timesteps,verbose=verbose) + alphas_cumprod = self.model.alphas_cumprod + assert alphas_cumprod.shape[0] == self.ddpm_num_timesteps, 'alphas have to be defined for each timestep' + to_torch = lambda x: x.clone().detach().to(torch.float32).to(self.model.device) + + if self.model.use_dynamic_rescale: + self.ddim_scale_arr = self.model.scale_arr[self.ddim_timesteps] + self.ddim_scale_arr_prev = torch.cat([self.ddim_scale_arr[0:1], self.ddim_scale_arr[:-1]]) + + self.register_buffer('betas', to_torch(self.model.betas)) + self.register_buffer('alphas_cumprod', to_torch(alphas_cumprod)) + self.register_buffer('alphas_cumprod_prev', to_torch(self.model.alphas_cumprod_prev)) + + # calculations for diffusion q(x_t | x_{t-1}) and others + self.register_buffer('sqrt_alphas_cumprod', to_torch(np.sqrt(alphas_cumprod.cpu()))) + self.register_buffer('sqrt_one_minus_alphas_cumprod', to_torch(np.sqrt(1. - alphas_cumprod.cpu()))) + self.register_buffer('log_one_minus_alphas_cumprod', to_torch(np.log(1. - alphas_cumprod.cpu()))) + self.register_buffer('sqrt_recip_alphas_cumprod', to_torch(np.sqrt(1. / alphas_cumprod.cpu()))) + self.register_buffer('sqrt_recipm1_alphas_cumprod', to_torch(np.sqrt(1. / alphas_cumprod.cpu() - 1))) + + # ddim sampling parameters + ddim_sigmas, ddim_alphas, ddim_alphas_prev = make_ddim_sampling_parameters(alphacums=alphas_cumprod.cpu(), + ddim_timesteps=self.ddim_timesteps, + eta=ddim_eta,verbose=verbose) + self.register_buffer('ddim_sigmas', ddim_sigmas) + self.register_buffer('ddim_alphas', ddim_alphas) + self.register_buffer('ddim_alphas_prev', ddim_alphas_prev) + self.register_buffer('ddim_sqrt_one_minus_alphas', np.sqrt(1. - ddim_alphas)) + sigmas_for_original_sampling_steps = ddim_eta * torch.sqrt( + (1 - self.alphas_cumprod_prev) / (1 - self.alphas_cumprod) * ( + 1 - self.alphas_cumprod / self.alphas_cumprod_prev)) + self.register_buffer('ddim_sigmas_for_original_num_steps', sigmas_for_original_sampling_steps) + + @torch.no_grad() + def sample(self, + S, + batch_size, + shape, + conditioning=None, + callback=None, + normals_sequence=None, + img_callback=None, + quantize_x0=False, + eta=0., + mask=None, + x0=None, + temperature=1., + noise_dropout=0., + score_corrector=None, + corrector_kwargs=None, + verbose=True, + schedule_verbose=False, + x_T=None, + log_every_t=100, + unconditional_guidance_scale=1., + unconditional_conditioning=None, + precision=None, + fs=None, + timestep_spacing='uniform', #uniform_trailing for starting from last timestep + guidance_rescale=0.0, + **kwargs + ): + + # check condition bs + if conditioning is not None: + if isinstance(conditioning, dict): + try: + cbs = conditioning[list(conditioning.keys())[0]].shape[0] + except: + cbs = conditioning[list(conditioning.keys())[0]][0].shape[0] + + if cbs != batch_size: + print(f"Warning: Got {cbs} conditionings but batch-size is {batch_size}") + else: + if conditioning.shape[0] != batch_size: + print(f"Warning: Got {conditioning.shape[0]} conditionings but batch-size is {batch_size}") + + self.make_schedule(ddim_num_steps=S, ddim_discretize=timestep_spacing, ddim_eta=eta, verbose=schedule_verbose) + + # make shape + if len(shape) == 3: + C, H, W = shape + size = (batch_size, C, H, W) + elif len(shape) == 4: + C, T, H, W = shape + size = (batch_size, C, T, H, W) + + samples, intermediates = self.ddim_sampling(conditioning, size, + callback=callback, + img_callback=img_callback, + quantize_denoised=quantize_x0, + mask=mask, x0=x0, + ddim_use_original_steps=False, + noise_dropout=noise_dropout, + temperature=temperature, + score_corrector=score_corrector, + corrector_kwargs=corrector_kwargs, + x_T=x_T, + log_every_t=log_every_t, + unconditional_guidance_scale=unconditional_guidance_scale, + unconditional_conditioning=unconditional_conditioning, + verbose=verbose, + precision=precision, + fs=fs, + guidance_rescale=guidance_rescale, + **kwargs) + return samples, intermediates + + @torch.no_grad() + def ddim_sampling(self, cond, shape, + x_T=None, ddim_use_original_steps=False, + callback=None, timesteps=None, quantize_denoised=False, + mask=None, x0=None, img_callback=None, log_every_t=100, + temperature=1., noise_dropout=0., score_corrector=None, corrector_kwargs=None, + unconditional_guidance_scale=1., unconditional_conditioning=None, verbose=True,precision=None,fs=None,guidance_rescale=0.0, + **kwargs): + device = self.model.betas.device + b = shape[0] + if x_T is None: + img = torch.randn(shape, device=device) + else: + img = x_T + if precision is not None: + if precision == 16: + img = img.to(dtype=torch.float16) + + if timesteps is None: + timesteps = self.ddpm_num_timesteps if ddim_use_original_steps else self.ddim_timesteps + elif timesteps is not None and not ddim_use_original_steps: + subset_end = int(min(timesteps / self.ddim_timesteps.shape[0], 1) * self.ddim_timesteps.shape[0]) - 1 + timesteps = self.ddim_timesteps[:subset_end] + + intermediates = {'x_inter': [img], 'pred_x0': [img]} + time_range = reversed(range(0,timesteps)) if ddim_use_original_steps else np.flip(timesteps) + total_steps = timesteps if ddim_use_original_steps else timesteps.shape[0] + if verbose: + iterator = tqdm(time_range, desc='DDIM Sampler', total=total_steps) + else: + iterator = time_range + + clean_cond = kwargs.pop("clean_cond", False) + + # cond_copy, unconditional_conditioning_copy = copy.deepcopy(cond), copy.deepcopy(unconditional_conditioning) + for i, step in enumerate(iterator): + index = total_steps - i - 1 + ts = torch.full((b,), step, device=device, dtype=torch.long) + + ## use mask to blend noised original latent (img_orig) & new sampled latent (img) + if mask is not None: + assert x0 is not None + if clean_cond: + img_orig = x0 + else: + img_orig = self.model.q_sample(x0, ts) # TODO: deterministic forward pass? + img = img_orig * mask + (1. - mask) * img # keep original & modify use img + + + + + outs = self.p_sample_ddim(img, cond, ts, index=index, use_original_steps=ddim_use_original_steps, + quantize_denoised=quantize_denoised, temperature=temperature, + noise_dropout=noise_dropout, score_corrector=score_corrector, + corrector_kwargs=corrector_kwargs, + unconditional_guidance_scale=unconditional_guidance_scale, + unconditional_conditioning=unconditional_conditioning, + mask=mask,x0=x0,fs=fs,guidance_rescale=guidance_rescale, + **kwargs) + + + img, pred_x0 = outs + if callback: callback(i) + if img_callback: img_callback(pred_x0, i) + + if index % log_every_t == 0 or index == total_steps - 1: + intermediates['x_inter'].append(img) + intermediates['pred_x0'].append(pred_x0) + + return img, intermediates + + @torch.no_grad() + def p_sample_ddim(self, x, c, t, index, repeat_noise=False, use_original_steps=False, quantize_denoised=False, + temperature=1., noise_dropout=0., score_corrector=None, corrector_kwargs=None, + unconditional_guidance_scale=1., unconditional_conditioning=None, + uc_type=None, conditional_guidance_scale_temporal=None,mask=None,x0=None,guidance_rescale=0.0,**kwargs): + b, *_, device = *x.shape, x.device + if x.dim() == 5: + is_video = True + else: + is_video = False + + if unconditional_conditioning is None or unconditional_guidance_scale == 1.: + model_output = self.model.apply_model(x, t, c, **kwargs) # unet denoiser + else: + ### do_classifier_free_guidance + if isinstance(c, torch.Tensor) or isinstance(c, dict): + e_t_cond = self.model.apply_model(x, t, c, **kwargs) + e_t_uncond = self.model.apply_model(x, t, unconditional_conditioning, **kwargs) + else: + raise NotImplementedError + + model_output = e_t_uncond + unconditional_guidance_scale * (e_t_cond - e_t_uncond) + + if guidance_rescale > 0.0: + model_output = rescale_noise_cfg(model_output, e_t_cond, guidance_rescale=guidance_rescale) + + if self.model.parameterization == "v": + e_t = self.model.predict_eps_from_z_and_v(x, t, model_output) + else: + e_t = model_output + + if score_corrector is not None: + assert self.model.parameterization == "eps", 'not implemented' + e_t = score_corrector.modify_score(self.model, e_t, x, t, c, **corrector_kwargs) + + alphas = self.model.alphas_cumprod if use_original_steps else self.ddim_alphas + alphas_prev = self.model.alphas_cumprod_prev if use_original_steps else self.ddim_alphas_prev + sqrt_one_minus_alphas = self.model.sqrt_one_minus_alphas_cumprod if use_original_steps else self.ddim_sqrt_one_minus_alphas + # sigmas = self.model.ddim_sigmas_for_original_num_steps if use_original_steps else self.ddim_sigmas + sigmas = self.ddim_sigmas_for_original_num_steps if use_original_steps else self.ddim_sigmas + # select parameters corresponding to the currently considered timestep + + if is_video: + size = (b, 1, 1, 1, 1) + else: + size = (b, 1, 1, 1) + a_t = torch.full(size, alphas[index], device=device) + a_prev = torch.full(size, alphas_prev[index], device=device) + sigma_t = torch.full(size, sigmas[index], device=device) + sqrt_one_minus_at = torch.full(size, sqrt_one_minus_alphas[index],device=device) + + # current prediction for x_0 + if self.model.parameterization != "v": + pred_x0 = (x - sqrt_one_minus_at * e_t) / a_t.sqrt() + else: + pred_x0 = self.model.predict_start_from_z_and_v(x, t, model_output) + + if self.model.use_dynamic_rescale: + scale_t = torch.full(size, self.ddim_scale_arr[index], device=device) + prev_scale_t = torch.full(size, self.ddim_scale_arr_prev[index], device=device) + rescale = (prev_scale_t / scale_t) + pred_x0 *= rescale + + if quantize_denoised: + pred_x0, _, *_ = self.model.first_stage_model.quantize(pred_x0) + # direction pointing to x_t + dir_xt = (1. - a_prev - sigma_t**2).sqrt() * e_t + + noise = sigma_t * noise_like(x.shape, device, repeat_noise) * temperature + if noise_dropout > 0.: + noise = torch.nn.functional.dropout(noise, p=noise_dropout) + + x_prev = a_prev.sqrt() * pred_x0 + dir_xt + noise + + return x_prev, pred_x0 + + @torch.no_grad() + def decode(self, x_latent, cond, t_start, unconditional_guidance_scale=1.0, unconditional_conditioning=None, + use_original_steps=False, callback=None): + + timesteps = np.arange(self.ddpm_num_timesteps) if use_original_steps else self.ddim_timesteps + timesteps = timesteps[:t_start] + + time_range = np.flip(timesteps) + total_steps = timesteps.shape[0] + print(f"Running DDIM Sampling with {total_steps} timesteps") + + iterator = tqdm(time_range, desc='Decoding image', total=total_steps) + x_dec = x_latent + for i, step in enumerate(iterator): + index = total_steps - i - 1 + ts = torch.full((x_latent.shape[0],), step, device=x_latent.device, dtype=torch.long) + x_dec, _ = self.p_sample_ddim(x_dec, cond, ts, index=index, use_original_steps=use_original_steps, + unconditional_guidance_scale=unconditional_guidance_scale, + unconditional_conditioning=unconditional_conditioning) + if callback: callback(i) + return x_dec + + @torch.no_grad() + def stochastic_encode(self, x0, t, use_original_steps=False, noise=None): + # fast, but does not allow for exact reconstruction + # t serves as an index to gather the correct alphas + if use_original_steps: + sqrt_alphas_cumprod = self.sqrt_alphas_cumprod + sqrt_one_minus_alphas_cumprod = self.sqrt_one_minus_alphas_cumprod + else: + sqrt_alphas_cumprod = torch.sqrt(self.ddim_alphas) + sqrt_one_minus_alphas_cumprod = self.ddim_sqrt_one_minus_alphas + + if noise is None: + noise = torch.randn_like(x0) + return (extract_into_tensor(sqrt_alphas_cumprod, t, x0.shape) * x0 + + extract_into_tensor(sqrt_one_minus_alphas_cumprod, t, x0.shape) * noise) diff --git a/ComfyUI-Easy-Use/py/dynamiCrafter/lvdm/models/samplers/ddim_multiplecond.py b/ComfyUI-Easy-Use/py/dynamiCrafter/lvdm/models/samplers/ddim_multiplecond.py new file mode 100644 index 0000000000000000000000000000000000000000..1548a233c4476b5c032be274e6753b077ec56e87 --- /dev/null +++ b/ComfyUI-Easy-Use/py/dynamiCrafter/lvdm/models/samplers/ddim_multiplecond.py @@ -0,0 +1,323 @@ +import numpy as np +from tqdm import tqdm +import torch +from ...models.utils_diffusion import make_ddim_sampling_parameters, make_ddim_timesteps, rescale_noise_cfg +from ..common import noise_like +from ..common import extract_into_tensor +import copy + + +class DDIMSampler(object): + def __init__(self, model, schedule="linear", **kwargs): + super().__init__() + self.model = model + self.ddpm_num_timesteps = model.num_timesteps + self.schedule = schedule + self.counter = 0 + + def register_buffer(self, name, attr): + if type(attr) == torch.Tensor: + if attr.device != torch.device("cuda"): + attr = attr.to(torch.device("cuda")) + setattr(self, name, attr) + + def make_schedule(self, ddim_num_steps, ddim_discretize="uniform", ddim_eta=0., verbose=True): + self.ddim_timesteps = make_ddim_timesteps(ddim_discr_method=ddim_discretize, num_ddim_timesteps=ddim_num_steps, + num_ddpm_timesteps=self.ddpm_num_timesteps,verbose=verbose) + alphas_cumprod = self.model.alphas_cumprod + assert alphas_cumprod.shape[0] == self.ddpm_num_timesteps, 'alphas have to be defined for each timestep' + to_torch = lambda x: x.clone().detach().to(torch.float32).to(self.model.device) + + if self.model.use_dynamic_rescale: + self.ddim_scale_arr = self.model.scale_arr[self.ddim_timesteps] + self.ddim_scale_arr_prev = torch.cat([self.ddim_scale_arr[0:1], self.ddim_scale_arr[:-1]]) + + self.register_buffer('betas', to_torch(self.model.betas)) + self.register_buffer('alphas_cumprod', to_torch(alphas_cumprod)) + self.register_buffer('alphas_cumprod_prev', to_torch(self.model.alphas_cumprod_prev)) + + # calculations for diffusion q(x_t | x_{t-1}) and others + self.register_buffer('sqrt_alphas_cumprod', to_torch(np.sqrt(alphas_cumprod.cpu()))) + self.register_buffer('sqrt_one_minus_alphas_cumprod', to_torch(np.sqrt(1. - alphas_cumprod.cpu()))) + self.register_buffer('log_one_minus_alphas_cumprod', to_torch(np.log(1. - alphas_cumprod.cpu()))) + self.register_buffer('sqrt_recip_alphas_cumprod', to_torch(np.sqrt(1. / alphas_cumprod.cpu()))) + self.register_buffer('sqrt_recipm1_alphas_cumprod', to_torch(np.sqrt(1. / alphas_cumprod.cpu() - 1))) + + # ddim sampling parameters + ddim_sigmas, ddim_alphas, ddim_alphas_prev = make_ddim_sampling_parameters(alphacums=alphas_cumprod.cpu(), + ddim_timesteps=self.ddim_timesteps, + eta=ddim_eta,verbose=verbose) + self.register_buffer('ddim_sigmas', ddim_sigmas) + self.register_buffer('ddim_alphas', ddim_alphas) + self.register_buffer('ddim_alphas_prev', ddim_alphas_prev) + self.register_buffer('ddim_sqrt_one_minus_alphas', np.sqrt(1. - ddim_alphas)) + sigmas_for_original_sampling_steps = ddim_eta * torch.sqrt( + (1 - self.alphas_cumprod_prev) / (1 - self.alphas_cumprod) * ( + 1 - self.alphas_cumprod / self.alphas_cumprod_prev)) + self.register_buffer('ddim_sigmas_for_original_num_steps', sigmas_for_original_sampling_steps) + + @torch.no_grad() + def sample(self, + S, + batch_size, + shape, + conditioning=None, + callback=None, + normals_sequence=None, + img_callback=None, + quantize_x0=False, + eta=0., + mask=None, + x0=None, + temperature=1., + noise_dropout=0., + score_corrector=None, + corrector_kwargs=None, + verbose=True, + schedule_verbose=False, + x_T=None, + log_every_t=100, + unconditional_guidance_scale=1., + unconditional_conditioning=None, + precision=None, + fs=None, + timestep_spacing='uniform', #uniform_trailing for starting from last timestep + guidance_rescale=0.0, + # this has to come in the same format as the conditioning, # e.g. as encoded tokens, ... + **kwargs + ): + + # check condition bs + if conditioning is not None: + if isinstance(conditioning, dict): + try: + cbs = conditioning[list(conditioning.keys())[0]].shape[0] + except: + cbs = conditioning[list(conditioning.keys())[0]][0].shape[0] + + if cbs != batch_size: + print(f"Warning: Got {cbs} conditionings but batch-size is {batch_size}") + else: + if conditioning.shape[0] != batch_size: + print(f"Warning: Got {conditioning.shape[0]} conditionings but batch-size is {batch_size}") + + # print('==> timestep_spacing: ', timestep_spacing, guidance_rescale) + self.make_schedule(ddim_num_steps=S, ddim_discretize=timestep_spacing, ddim_eta=eta, verbose=schedule_verbose) + + # make shape + if len(shape) == 3: + C, H, W = shape + size = (batch_size, C, H, W) + elif len(shape) == 4: + C, T, H, W = shape + size = (batch_size, C, T, H, W) + # print(f'Data shape for DDIM sampling is {size}, eta {eta}') + + samples, intermediates = self.ddim_sampling(conditioning, size, + callback=callback, + img_callback=img_callback, + quantize_denoised=quantize_x0, + mask=mask, x0=x0, + ddim_use_original_steps=False, + noise_dropout=noise_dropout, + temperature=temperature, + score_corrector=score_corrector, + corrector_kwargs=corrector_kwargs, + x_T=x_T, + log_every_t=log_every_t, + unconditional_guidance_scale=unconditional_guidance_scale, + unconditional_conditioning=unconditional_conditioning, + verbose=verbose, + precision=precision, + fs=fs, + guidance_rescale=guidance_rescale, + **kwargs) + return samples, intermediates + + @torch.no_grad() + def ddim_sampling(self, cond, shape, + x_T=None, ddim_use_original_steps=False, + callback=None, timesteps=None, quantize_denoised=False, + mask=None, x0=None, img_callback=None, log_every_t=100, + temperature=1., noise_dropout=0., score_corrector=None, corrector_kwargs=None, + unconditional_guidance_scale=1., unconditional_conditioning=None, verbose=True,precision=None,fs=None,guidance_rescale=0.0, + **kwargs): + device = self.model.betas.device + b = shape[0] + if x_T is None: + img = torch.randn(shape, device=device) + else: + img = x_T + if precision is not None: + if precision == 16: + img = img.to(dtype=torch.float16) + + + if timesteps is None: + timesteps = self.ddpm_num_timesteps if ddim_use_original_steps else self.ddim_timesteps + elif timesteps is not None and not ddim_use_original_steps: + subset_end = int(min(timesteps / self.ddim_timesteps.shape[0], 1) * self.ddim_timesteps.shape[0]) - 1 + timesteps = self.ddim_timesteps[:subset_end] + + intermediates = {'x_inter': [img], 'pred_x0': [img]} + time_range = reversed(range(0,timesteps)) if ddim_use_original_steps else np.flip(timesteps) + total_steps = timesteps if ddim_use_original_steps else timesteps.shape[0] + if verbose: + iterator = tqdm(time_range, desc='DDIM Sampler', total=total_steps) + else: + iterator = time_range + + clean_cond = kwargs.pop("clean_cond", False) + + # cond_copy, unconditional_conditioning_copy = copy.deepcopy(cond), copy.deepcopy(unconditional_conditioning) + for i, step in enumerate(iterator): + index = total_steps - i - 1 + ts = torch.full((b,), step, device=device, dtype=torch.long) + + ## use mask to blend noised original latent (img_orig) & new sampled latent (img) + if mask is not None: + assert x0 is not None + if clean_cond: + img_orig = x0 + else: + img_orig = self.model.q_sample(x0, ts) # TODO: deterministic forward pass? + img = img_orig * mask + (1. - mask) * img # keep original & modify use img + + + + + outs = self.p_sample_ddim(img, cond, ts, index=index, use_original_steps=ddim_use_original_steps, + quantize_denoised=quantize_denoised, temperature=temperature, + noise_dropout=noise_dropout, score_corrector=score_corrector, + corrector_kwargs=corrector_kwargs, + unconditional_guidance_scale=unconditional_guidance_scale, + unconditional_conditioning=unconditional_conditioning, + mask=mask,x0=x0,fs=fs,guidance_rescale=guidance_rescale, + **kwargs) + + + + img, pred_x0 = outs + if callback: callback(i) + if img_callback: img_callback(pred_x0, i) + + if index % log_every_t == 0 or index == total_steps - 1: + intermediates['x_inter'].append(img) + intermediates['pred_x0'].append(pred_x0) + + return img, intermediates + + @torch.no_grad() + def p_sample_ddim(self, x, c, t, index, repeat_noise=False, use_original_steps=False, quantize_denoised=False, + temperature=1., noise_dropout=0., score_corrector=None, corrector_kwargs=None, + unconditional_guidance_scale=1., unconditional_conditioning=None, + uc_type=None, cfg_img=None,mask=None,x0=None,guidance_rescale=0.0, **kwargs): + b, *_, device = *x.shape, x.device + if x.dim() == 5: + is_video = True + else: + is_video = False + if cfg_img is None: + cfg_img = unconditional_guidance_scale + + unconditional_conditioning_img_nonetext = kwargs['unconditional_conditioning_img_nonetext'] + + + if unconditional_conditioning is None or unconditional_guidance_scale == 1.: + model_output = self.model.apply_model(x, t, c, **kwargs) # unet denoiser + else: + ### with unconditional condition + e_t_cond = self.model.apply_model(x, t, c, **kwargs) + e_t_uncond = self.model.apply_model(x, t, unconditional_conditioning, **kwargs) + e_t_uncond_img = self.model.apply_model(x, t, unconditional_conditioning_img_nonetext, **kwargs) + # text cfg + model_output = e_t_uncond + cfg_img * (e_t_uncond_img - e_t_uncond) + unconditional_guidance_scale * (e_t_cond - e_t_uncond_img) + if guidance_rescale > 0.0: + model_output = rescale_noise_cfg(model_output, e_t_cond, guidance_rescale=guidance_rescale) + + if self.model.parameterization == "v": + e_t = self.model.predict_eps_from_z_and_v(x, t, model_output) + else: + e_t = model_output + + if score_corrector is not None: + assert self.model.parameterization == "eps", 'not implemented' + e_t = score_corrector.modify_score(self.model, e_t, x, t, c, **corrector_kwargs) + + alphas = self.model.alphas_cumprod if use_original_steps else self.ddim_alphas + alphas_prev = self.model.alphas_cumprod_prev if use_original_steps else self.ddim_alphas_prev + sqrt_one_minus_alphas = self.model.sqrt_one_minus_alphas_cumprod if use_original_steps else self.ddim_sqrt_one_minus_alphas + sigmas = self.ddim_sigmas_for_original_num_steps if use_original_steps else self.ddim_sigmas + # select parameters corresponding to the currently considered timestep + + if is_video: + size = (b, 1, 1, 1, 1) + else: + size = (b, 1, 1, 1) + a_t = torch.full(size, alphas[index], device=device) + a_prev = torch.full(size, alphas_prev[index], device=device) + sigma_t = torch.full(size, sigmas[index], device=device) + sqrt_one_minus_at = torch.full(size, sqrt_one_minus_alphas[index],device=device) + + # current prediction for x_0 + if self.model.parameterization != "v": + pred_x0 = (x - sqrt_one_minus_at * e_t) / a_t.sqrt() + else: + pred_x0 = self.model.predict_start_from_z_and_v(x, t, model_output) + + if self.model.use_dynamic_rescale: + scale_t = torch.full(size, self.ddim_scale_arr[index], device=device) + prev_scale_t = torch.full(size, self.ddim_scale_arr_prev[index], device=device) + rescale = (prev_scale_t / scale_t) + pred_x0 *= rescale + + if quantize_denoised: + pred_x0, _, *_ = self.model.first_stage_model.quantize(pred_x0) + # direction pointing to x_t + dir_xt = (1. - a_prev - sigma_t**2).sqrt() * e_t + + noise = sigma_t * noise_like(x.shape, device, repeat_noise) * temperature + if noise_dropout > 0.: + noise = torch.nn.functional.dropout(noise, p=noise_dropout) + + x_prev = a_prev.sqrt() * pred_x0 + dir_xt + noise + + return x_prev, pred_x0 + + @torch.no_grad() + def decode(self, x_latent, cond, t_start, unconditional_guidance_scale=1.0, unconditional_conditioning=None, + use_original_steps=False, callback=None): + + timesteps = np.arange(self.ddpm_num_timesteps) if use_original_steps else self.ddim_timesteps + timesteps = timesteps[:t_start] + + time_range = np.flip(timesteps) + total_steps = timesteps.shape[0] + print(f"Running DDIM Sampling with {total_steps} timesteps") + + iterator = tqdm(time_range, desc='Decoding image', total=total_steps) + x_dec = x_latent + for i, step in enumerate(iterator): + index = total_steps - i - 1 + ts = torch.full((x_latent.shape[0],), step, device=x_latent.device, dtype=torch.long) + x_dec, _ = self.p_sample_ddim(x_dec, cond, ts, index=index, use_original_steps=use_original_steps, + unconditional_guidance_scale=unconditional_guidance_scale, + unconditional_conditioning=unconditional_conditioning) + if callback: callback(i) + return x_dec + + @torch.no_grad() + def stochastic_encode(self, x0, t, use_original_steps=False, noise=None): + # fast, but does not allow for exact reconstruction + # t serves as an index to gather the correct alphas + if use_original_steps: + sqrt_alphas_cumprod = self.sqrt_alphas_cumprod + sqrt_one_minus_alphas_cumprod = self.sqrt_one_minus_alphas_cumprod + else: + sqrt_alphas_cumprod = torch.sqrt(self.ddim_alphas) + sqrt_one_minus_alphas_cumprod = self.ddim_sqrt_one_minus_alphas + + if noise is None: + noise = torch.randn_like(x0) + return (extract_into_tensor(sqrt_alphas_cumprod, t, x0.shape) * x0 + + extract_into_tensor(sqrt_one_minus_alphas_cumprod, t, x0.shape) * noise) \ No newline at end of file diff --git a/ComfyUI-Easy-Use/py/dynamiCrafter/lvdm/models/samplers/unipc/__init__.py b/ComfyUI-Easy-Use/py/dynamiCrafter/lvdm/models/samplers/unipc/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..cdf30d0c61846c5117ffd0b308354189a8431f44 --- /dev/null +++ b/ComfyUI-Easy-Use/py/dynamiCrafter/lvdm/models/samplers/unipc/__init__.py @@ -0,0 +1 @@ +from .sampler import UniPCSampler \ No newline at end of file diff --git a/ComfyUI-Easy-Use/py/dynamiCrafter/lvdm/models/samplers/unipc/sampler.py b/ComfyUI-Easy-Use/py/dynamiCrafter/lvdm/models/samplers/unipc/sampler.py new file mode 100644 index 0000000000000000000000000000000000000000..b68ffffa135aad4d0ca5db4a51572adc217fb0e5 --- /dev/null +++ b/ComfyUI-Easy-Use/py/dynamiCrafter/lvdm/models/samplers/unipc/sampler.py @@ -0,0 +1,79 @@ +"""SAMPLING ONLY.""" + +import torch + +from .uni_pc import NoiseScheduleVP, model_wrapper, UniPC + +class UniPCSampler(object): + def __init__(self, model, **kwargs): + super().__init__() + self.model = model + to_torch = lambda x: x.clone().detach().to(torch.float32).to(model.device) + self.register_buffer('alphas_cumprod', to_torch(model.alphas_cumprod)) + + def register_buffer(self, name, attr): + if type(attr) == torch.Tensor: + if attr.device != torch.device("cuda"): + attr = attr.to(torch.device("cuda")) + setattr(self, name, attr) + + @torch.no_grad() + def sample(self, + S, + batch_size, + shape, + conditioning=None, + callback=None, + normals_sequence=None, + img_callback=None, + quantize_x0=False, + eta=0., + mask=None, + x0=None, + temperature=1., + noise_dropout=0., + score_corrector=None, + corrector_kwargs=None, + verbose=True, + x_T=None, + log_every_t=100, + unconditional_guidance_scale=1., + unconditional_conditioning=None, + # this has to come in the same format as the conditioning, # e.g. as encoded tokens, ... + **kwargs + ): + if conditioning is not None: + if isinstance(conditioning, dict): + cbs = conditioning[list(conditioning.keys())[0]].shape[0] + if cbs != batch_size: + print(f"Warning: Got {cbs} conditionings but batch-size is {batch_size}") + else: + if conditioning.shape[0] != batch_size: + print(f"Warning: Got {conditioning.shape[0]} conditionings but batch-size is {batch_size}") + + # sampling + C, F, H, W = shape + size = (batch_size, C, H, W) + + device = self.model.betas.device + if x_T is None: + img = torch.randn(size, device=device) + else: + img = x_T + + ns = NoiseScheduleVP('discrete', alphas_cumprod=self.alphas_cumprod) + + model_fn = model_wrapper( + lambda x, t, c: self.model.apply_model(x, t, c), + ns, + model_type="noise", + guidance_type="classifier-free", + condition=conditioning, + unconditional_condition=unconditional_conditioning, + guidance_scale=unconditional_guidance_scale, + ) + + uni_pc = UniPC(model_fn, ns, predict_x0=True, thresholding=False) + x = uni_pc.sample(img, steps=S, skip_type="time_uniform", method="multistep", order=3, lower_order_final=True) + + return x.to(device), None \ No newline at end of file diff --git a/ComfyUI-Easy-Use/py/dynamiCrafter/lvdm/models/samplers/unipc/uni_pc.py b/ComfyUI-Easy-Use/py/dynamiCrafter/lvdm/models/samplers/unipc/uni_pc.py new file mode 100644 index 0000000000000000000000000000000000000000..9a420691a548621f9aba708e71883b4628f6312f --- /dev/null +++ b/ComfyUI-Easy-Use/py/dynamiCrafter/lvdm/models/samplers/unipc/uni_pc.py @@ -0,0 +1,808 @@ +import torch +import torch.nn.functional as F +import math + + +class NoiseScheduleVP: + def __init__( + self, + schedule='discrete', + betas=None, + alphas_cumprod=None, + continuous_beta_0=0.1, + continuous_beta_1=20., + ): + """Create a wrapper class for the forward SDE (VP type). + + *** + Update: We support discrete-time diffusion models by implementing a picewise linear interpolation for log_alpha_t. + We recommend to use schedule='discrete' for the discrete-time diffusion models, especially for high-resolution images. + *** + + The forward SDE ensures that the condition distribution q_{t|0}(x_t | x_0) = N ( alpha_t * x_0, sigma_t^2 * I ). + We further define lambda_t = log(alpha_t) - log(sigma_t), which is the half-logSNR (described in the DPM-Solver paper). + Therefore, we implement the functions for computing alpha_t, sigma_t and lambda_t. For t in [0, T], we have: + + log_alpha_t = self.marginal_log_mean_coeff(t) + sigma_t = self.marginal_std(t) + lambda_t = self.marginal_lambda(t) + + Moreover, as lambda(t) is an invertible function, we also support its inverse function: + + t = self.inverse_lambda(lambda_t) + + =============================================================== + + We support both discrete-time DPMs (trained on n = 0, 1, ..., N-1) and continuous-time DPMs (trained on t in [t_0, T]). + + 1. For discrete-time DPMs: + + For discrete-time DPMs trained on n = 0, 1, ..., N-1, we convert the discrete steps to continuous time steps by: + t_i = (i + 1) / N + e.g. for N = 1000, we have t_0 = 1e-3 and T = t_{N-1} = 1. + We solve the corresponding diffusion ODE from time T = 1 to time t_0 = 1e-3. + + Args: + betas: A `torch.Tensor`. The beta array for the discrete-time DPM. (See the original DDPM paper for details) + alphas_cumprod: A `torch.Tensor`. The cumprod alphas for the discrete-time DPM. (See the original DDPM paper for details) + + Note that we always have alphas_cumprod = cumprod(betas). Therefore, we only need to set one of `betas` and `alphas_cumprod`. + + **Important**: Please pay special attention for the args for `alphas_cumprod`: + The `alphas_cumprod` is the \hat{alpha_n} arrays in the notations of DDPM. Specifically, DDPMs assume that + q_{t_n | 0}(x_{t_n} | x_0) = N ( \sqrt{\hat{alpha_n}} * x_0, (1 - \hat{alpha_n}) * I ). + Therefore, the notation \hat{alpha_n} is different from the notation alpha_t in DPM-Solver. In fact, we have + alpha_{t_n} = \sqrt{\hat{alpha_n}}, + and + log(alpha_{t_n}) = 0.5 * log(\hat{alpha_n}). + + + 2. For continuous-time DPMs: + + We support two types of VPSDEs: linear (DDPM) and cosine (improved-DDPM). The hyperparameters for the noise + schedule are the default settings in DDPM and improved-DDPM: + + Args: + beta_min: A `float` number. The smallest beta for the linear schedule. + beta_max: A `float` number. The largest beta for the linear schedule. + cosine_s: A `float` number. The hyperparameter in the cosine schedule. + cosine_beta_max: A `float` number. The hyperparameter in the cosine schedule. + T: A `float` number. The ending time of the forward process. + + =============================================================== + + Args: + schedule: A `str`. The noise schedule of the forward SDE. 'discrete' for discrete-time DPMs, + 'linear' or 'cosine' for continuous-time DPMs. + Returns: + A wrapper object of the forward SDE (VP type). + + =============================================================== + + Example: + + # For discrete-time DPMs, given betas (the beta array for n = 0, 1, ..., N - 1): + >>> ns = NoiseScheduleVP('discrete', betas=betas) + + # For discrete-time DPMs, given alphas_cumprod (the \hat{alpha_n} array for n = 0, 1, ..., N - 1): + >>> ns = NoiseScheduleVP('discrete', alphas_cumprod=alphas_cumprod) + + # For continuous-time DPMs (VPSDE), linear schedule: + >>> ns = NoiseScheduleVP('linear', continuous_beta_0=0.1, continuous_beta_1=20.) + + """ + + if schedule not in ['discrete', 'linear', 'cosine']: + raise ValueError("Unsupported noise schedule {}. The schedule needs to be 'discrete' or 'linear' or 'cosine'".format(schedule)) + + self.schedule = schedule + if schedule == 'discrete': + if betas is not None: + log_alphas = 0.5 * torch.log(1 - betas).cumsum(dim=0) + else: + assert alphas_cumprod is not None + log_alphas = 0.5 * torch.log(alphas_cumprod) + self.total_N = len(log_alphas) + self.T = 1. + self.t_array = torch.linspace(0., 1., self.total_N + 1)[1:].reshape((1, -1)) + self.log_alpha_array = log_alphas.reshape((1, -1,)) + else: + self.total_N = 1000 + self.beta_0 = continuous_beta_0 + self.beta_1 = continuous_beta_1 + self.cosine_s = 0.008 + self.cosine_beta_max = 999. + self.cosine_t_max = math.atan(self.cosine_beta_max * (1. + self.cosine_s) / math.pi) * 2. * (1. + self.cosine_s) / math.pi - self.cosine_s + self.cosine_log_alpha_0 = math.log(math.cos(self.cosine_s / (1. + self.cosine_s) * math.pi / 2.)) + self.schedule = schedule + if schedule == 'cosine': + # For the cosine schedule, T = 1 will have numerical issues. So we manually set the ending time T. + # Note that T = 0.9946 may be not the optimal setting. However, we find it works well. + self.T = 0.9946 + else: + self.T = 1. + + def marginal_log_mean_coeff(self, t): + """ + Compute log(alpha_t) of a given continuous-time label t in [0, T]. + """ + if self.schedule == 'discrete': + return interpolate_fn(t.reshape((-1, 1)), self.t_array.to(t.device), self.log_alpha_array.to(t.device)).reshape((-1)) + elif self.schedule == 'linear': + return -0.25 * t ** 2 * (self.beta_1 - self.beta_0) - 0.5 * t * self.beta_0 + elif self.schedule == 'cosine': + log_alpha_fn = lambda s: torch.log(torch.cos((s + self.cosine_s) / (1. + self.cosine_s) * math.pi / 2.)) + log_alpha_t = log_alpha_fn(t) - self.cosine_log_alpha_0 + return log_alpha_t + + def marginal_alpha(self, t): + """ + Compute alpha_t of a given continuous-time label t in [0, T]. + """ + return torch.exp(self.marginal_log_mean_coeff(t)) + + def marginal_std(self, t): + """ + Compute sigma_t of a given continuous-time label t in [0, T]. + """ + return torch.sqrt(1. - torch.exp(2. * self.marginal_log_mean_coeff(t))) + + def marginal_lambda(self, t): + """ + Compute lambda_t = log(alpha_t) - log(sigma_t) of a given continuous-time label t in [0, T]. + """ + log_mean_coeff = self.marginal_log_mean_coeff(t) + log_std = 0.5 * torch.log(1. - torch.exp(2. * log_mean_coeff)) + return log_mean_coeff - log_std + + def inverse_lambda(self, lamb): + """ + Compute the continuous-time label t in [0, T] of a given half-logSNR lambda_t. + """ + if self.schedule == 'linear': + tmp = 2. * (self.beta_1 - self.beta_0) * torch.logaddexp(-2. * lamb, torch.zeros((1,)).to(lamb)) + Delta = self.beta_0**2 + tmp + return tmp / (torch.sqrt(Delta) + self.beta_0) / (self.beta_1 - self.beta_0) + elif self.schedule == 'discrete': + log_alpha = -0.5 * torch.logaddexp(torch.zeros((1,)).to(lamb.device), -2. * lamb) + t = interpolate_fn(log_alpha.reshape((-1, 1)), torch.flip(self.log_alpha_array.to(lamb.device), [1]), torch.flip(self.t_array.to(lamb.device), [1])) + return t.reshape((-1,)) + else: + log_alpha = -0.5 * torch.logaddexp(-2. * lamb, torch.zeros((1,)).to(lamb)) + t_fn = lambda log_alpha_t: torch.arccos(torch.exp(log_alpha_t + self.cosine_log_alpha_0)) * 2. * (1. + self.cosine_s) / math.pi - self.cosine_s + t = t_fn(log_alpha) + return t + + +def model_wrapper( + model, + noise_schedule, + model_type="noise", + model_kwargs={}, + guidance_type="uncond", + condition=None, + unconditional_condition=None, + guidance_scale=1., + classifier_fn=None, + classifier_kwargs={}, +): + """Create a wrapper function for the noise prediction model. + + DPM-Solver needs to solve the continuous-time diffusion ODEs. For DPMs trained on discrete-time labels, we need to + firstly wrap the model function to a noise prediction model that accepts the continuous time as the input. + + We support four types of the diffusion model by setting `model_type`: + + 1. "noise": noise prediction model. (Trained by predicting noise). + + 2. "x_start": data prediction model. (Trained by predicting the data x_0 at time 0). + + 3. "v": velocity prediction model. (Trained by predicting the velocity). + The "v" prediction is derivation detailed in Appendix D of [1], and is used in Imagen-Video [2]. + + [1] Salimans, Tim, and Jonathan Ho. "Progressive distillation for fast sampling of diffusion models." + arXiv preprint arXiv:2202.00512 (2022). + [2] Ho, Jonathan, et al. "Imagen Video: High Definition Video Generation with Diffusion Models." + arXiv preprint arXiv:2210.02303 (2022). + + 4. "score": marginal score function. (Trained by denoising score matching). + Note that the score function and the noise prediction model follows a simple relationship: + ``` + noise(x_t, t) = -sigma_t * score(x_t, t) + ``` + + We support three types of guided sampling by DPMs by setting `guidance_type`: + 1. "uncond": unconditional sampling by DPMs. + The input `model` has the following format: + `` + model(x, t_input, **model_kwargs) -> noise | x_start | v | score + `` + + 2. "classifier": classifier guidance sampling [3] by DPMs and another classifier. + The input `model` has the following format: + `` + model(x, t_input, **model_kwargs) -> noise | x_start | v | score + `` + + The input `classifier_fn` has the following format: + `` + classifier_fn(x, t_input, cond, **classifier_kwargs) -> logits(x, t_input, cond) + `` + + [3] P. Dhariwal and A. Q. Nichol, "Diffusion models beat GANs on image synthesis," + in Advances in Neural Information Processing Systems, vol. 34, 2021, pp. 8780-8794. + + 3. "classifier-free": classifier-free guidance sampling by conditional DPMs. + The input `model` has the following format: + `` + model(x, t_input, cond, **model_kwargs) -> noise | x_start | v | score + `` + And if cond == `unconditional_condition`, the model output is the unconditional DPM output. + + [4] Ho, Jonathan, and Tim Salimans. "Classifier-free diffusion guidance." + arXiv preprint arXiv:2207.12598 (2022). + + + The `t_input` is the time label of the model, which may be discrete-time labels (i.e. 0 to 999) + or continuous-time labels (i.e. epsilon to T). + + We wrap the model function to accept only `x` and `t_continuous` as inputs, and outputs the predicted noise: + `` + def model_fn(x, t_continuous) -> noise: + t_input = get_model_input_time(t_continuous) + return noise_pred(model, x, t_input, **model_kwargs) + `` + where `t_continuous` is the continuous time labels (i.e. epsilon to T). And we use `model_fn` for DPM-Solver. + + =============================================================== + + Args: + model: A diffusion model with the corresponding format described above. + noise_schedule: A noise schedule object, such as NoiseScheduleVP. + model_type: A `str`. The parameterization type of the diffusion model. + "noise" or "x_start" or "v" or "score". + model_kwargs: A `dict`. A dict for the other inputs of the model function. + guidance_type: A `str`. The type of the guidance for sampling. + "uncond" or "classifier" or "classifier-free". + condition: A pytorch tensor. The condition for the guided sampling. + Only used for "classifier" or "classifier-free" guidance type. + unconditional_condition: A pytorch tensor. The condition for the unconditional sampling. + Only used for "classifier-free" guidance type. + guidance_scale: A `float`. The scale for the guided sampling. + classifier_fn: A classifier function. Only used for the classifier guidance. + classifier_kwargs: A `dict`. A dict for the other inputs of the classifier function. + Returns: + A noise prediction model that accepts the noised data and the continuous time as the inputs. + """ + + def get_model_input_time(t_continuous): + """ + Convert the continuous-time `t_continuous` (in [epsilon, T]) to the model input time. + For discrete-time DPMs, we convert `t_continuous` in [1 / N, 1] to `t_input` in [0, 1000 * (N - 1) / N]. + For continuous-time DPMs, we just use `t_continuous`. + """ + if noise_schedule.schedule == 'discrete': + return (t_continuous - 1. / noise_schedule.total_N) * 1000. + else: + return t_continuous + + def noise_pred_fn(x, t_continuous, cond=None): + if t_continuous.reshape((-1,)).shape[0] == 1: + t_continuous = t_continuous.expand((x.shape[0])) + t_input = get_model_input_time(t_continuous) + if cond is None: + output = model(x, t_input, None, **model_kwargs) + else: + output = model(x, t_input, cond, **model_kwargs) + if model_type == "noise": + return output + elif model_type == "x_start": + alpha_t, sigma_t = noise_schedule.marginal_alpha(t_continuous), noise_schedule.marginal_std(t_continuous) + dims = x.dim() + return (x - expand_dims(alpha_t, dims) * output) / expand_dims(sigma_t, dims) + elif model_type == "v": + alpha_t, sigma_t = noise_schedule.marginal_alpha(t_continuous), noise_schedule.marginal_std(t_continuous) + dims = x.dim() + return expand_dims(alpha_t, dims) * output + expand_dims(sigma_t, dims) * x + elif model_type == "score": + sigma_t = noise_schedule.marginal_std(t_continuous) + dims = x.dim() + return -expand_dims(sigma_t, dims) * output + + def cond_grad_fn(x, t_input): + """ + Compute the gradient of the classifier, i.e. nabla_{x} log p_t(cond | x_t). + """ + with torch.enable_grad(): + x_in = x.detach().requires_grad_(True) + log_prob = classifier_fn(x_in, t_input, condition, **classifier_kwargs) + return torch.autograd.grad(log_prob.sum(), x_in)[0] + + def model_fn(x, t_continuous): + """ + The noise predicition model function that is used for DPM-Solver. + """ + if t_continuous.reshape((-1,)).shape[0] == 1: + t_continuous = t_continuous.expand((x.shape[0])) + if guidance_type == "uncond": + return noise_pred_fn(x, t_continuous) + elif guidance_type == "classifier": + assert classifier_fn is not None + t_input = get_model_input_time(t_continuous) + cond_grad = cond_grad_fn(x, t_input) + sigma_t = noise_schedule.marginal_std(t_continuous) + noise = noise_pred_fn(x, t_continuous) + return noise - guidance_scale * expand_dims(sigma_t, dims=cond_grad.dim()) * cond_grad + elif guidance_type == "classifier-free": + if guidance_scale == 1. or unconditional_condition is None: + return noise_pred_fn(x, t_continuous, cond=condition) + else: + x_in = torch.cat([x] * 2) + t_in = torch.cat([t_continuous] * 2) + c_in = torch.cat([unconditional_condition, condition]) + noise_uncond, noise = noise_pred_fn(x_in, t_in, cond=c_in).chunk(2) + return noise_uncond + guidance_scale * (noise - noise_uncond) + + assert model_type in ["noise", "x_start", "v"] + assert guidance_type in ["uncond", "classifier", "classifier-free"] + return model_fn + + +class UniPC: + def __init__( + self, + model_fn, + noise_schedule, + predict_x0=True, + thresholding=False, + max_val=1., + variant='bh1' + ): + """Construct a UniPC. + + We support both data_prediction and noise_prediction. + """ + self.model = model_fn + self.noise_schedule = noise_schedule + self.variant = variant + self.predict_x0 = predict_x0 + self.thresholding = thresholding + self.max_val = max_val + + def dynamic_thresholding_fn(self, x0, t=None): + """ + The dynamic thresholding method. + """ + dims = x0.dim() + p = self.dynamic_thresholding_ratio + s = torch.quantile(torch.abs(x0).reshape((x0.shape[0], -1)), p, dim=1) + s = expand_dims(torch.maximum(s, self.thresholding_max_val * torch.ones_like(s).to(s.device)), dims) + x0 = torch.clamp(x0, -s, s) / s + return x0 + + def noise_prediction_fn(self, x, t): + """ + Return the noise prediction model. + """ + return self.model(x, t) + + def data_prediction_fn(self, x, t): + """ + Return the data prediction model (with thresholding). + """ + noise = self.noise_prediction_fn(x, t) + dims = x.dim() + alpha_t, sigma_t = self.noise_schedule.marginal_alpha(t), self.noise_schedule.marginal_std(t) + x0 = (x - expand_dims(sigma_t, dims) * noise) / expand_dims(alpha_t, dims) + if self.thresholding: + p = 0.995 # A hyperparameter in the paper of "Imagen" [1]. + s = torch.quantile(torch.abs(x0).reshape((x0.shape[0], -1)), p, dim=1) + s = expand_dims(torch.maximum(s, self.max_val * torch.ones_like(s).to(s.device)), dims) + x0 = torch.clamp(x0, -s, s) / s + return x0 + + def model_fn(self, x, t): + """ + Convert the model to the noise prediction model or the data prediction model. + """ + if self.predict_x0: + return self.data_prediction_fn(x, t) + else: + return self.noise_prediction_fn(x, t) + + def get_time_steps(self, skip_type, t_T, t_0, N, device): + """Compute the intermediate time steps for sampling. + """ + if skip_type == 'logSNR': + lambda_T = self.noise_schedule.marginal_lambda(torch.tensor(t_T).to(device)) + lambda_0 = self.noise_schedule.marginal_lambda(torch.tensor(t_0).to(device)) + logSNR_steps = torch.linspace(lambda_T.cpu().item(), lambda_0.cpu().item(), N + 1).to(device) + return self.noise_schedule.inverse_lambda(logSNR_steps) + elif skip_type == 'time_uniform': + return torch.linspace(t_T, t_0, N + 1).to(device) + elif skip_type == 'time_quadratic': + t_order = 2 + t = torch.linspace(t_T**(1. / t_order), t_0**(1. / t_order), N + 1).pow(t_order).to(device) + return t + else: + raise ValueError("Unsupported skip_type {}, need to be 'logSNR' or 'time_uniform' or 'time_quadratic'".format(skip_type)) + + def get_orders_and_timesteps_for_singlestep_solver(self, steps, order, skip_type, t_T, t_0, device): + """ + Get the order of each step for sampling by the singlestep DPM-Solver. + """ + if order == 3: + K = steps // 3 + 1 + if steps % 3 == 0: + orders = [3,] * (K - 2) + [2, 1] + elif steps % 3 == 1: + orders = [3,] * (K - 1) + [1] + else: + orders = [3,] * (K - 1) + [2] + elif order == 2: + if steps % 2 == 0: + K = steps // 2 + orders = [2,] * K + else: + K = steps // 2 + 1 + orders = [2,] * (K - 1) + [1] + elif order == 1: + K = steps + orders = [1,] * steps + else: + raise ValueError("'order' must be '1' or '2' or '3'.") + if skip_type == 'logSNR': + # To reproduce the results in DPM-Solver paper + timesteps_outer = self.get_time_steps(skip_type, t_T, t_0, K, device) + else: + timesteps_outer = self.get_time_steps(skip_type, t_T, t_0, steps, device)[torch.cumsum(torch.tensor([0,] + orders), 0).to(device)] + return timesteps_outer, orders + + def denoise_to_zero_fn(self, x, s): + """ + Denoise at the final step, which is equivalent to solve the ODE from lambda_s to infty by first-order discretization. + """ + return self.data_prediction_fn(x, s) + + def multistep_uni_pc_update(self, x, model_prev_list, t_prev_list, t, order, **kwargs): + if len(t.shape) == 0: + t = t.view(-1) + if 'bh' in self.variant: + return self.multistep_uni_pc_bh_update(x, model_prev_list, t_prev_list, t, order, **kwargs) + else: + assert self.variant == 'vary_coeff' + return self.multistep_uni_pc_vary_update(x, model_prev_list, t_prev_list, t, order, **kwargs) + + def multistep_uni_pc_vary_update(self, x, model_prev_list, t_prev_list, t, order, use_corrector=True): + print(f'using unified predictor-corrector with order {order} (solver type: vary coeff)') + ns = self.noise_schedule + assert order <= len(model_prev_list) + + # first compute rks + t_prev_0 = t_prev_list[-1] + lambda_prev_0 = ns.marginal_lambda(t_prev_0) + lambda_t = ns.marginal_lambda(t) + model_prev_0 = model_prev_list[-1] + sigma_prev_0, sigma_t = ns.marginal_std(t_prev_0), ns.marginal_std(t) + log_alpha_t = ns.marginal_log_mean_coeff(t) + alpha_t = torch.exp(log_alpha_t) + + h = lambda_t - lambda_prev_0 + + rks = [] + D1s = [] + for i in range(1, order): + t_prev_i = t_prev_list[-(i + 1)] + model_prev_i = model_prev_list[-(i + 1)] + lambda_prev_i = ns.marginal_lambda(t_prev_i) + rk = (lambda_prev_i - lambda_prev_0) / h + rks.append(rk) + D1s.append((model_prev_i - model_prev_0) / rk) + + rks.append(1.) + rks = torch.tensor(rks, device=x.device) + + K = len(rks) + # build C matrix + C = [] + + col = torch.ones_like(rks) + for k in range(1, K + 1): + C.append(col) + col = col * rks / (k + 1) + C = torch.stack(C, dim=1) + + if len(D1s) > 0: + D1s = torch.stack(D1s, dim=1) # (B, K) + C_inv_p = torch.linalg.inv(C[:-1, :-1]) + A_p = C_inv_p + + if use_corrector: + print('using corrector') + C_inv = torch.linalg.inv(C) + A_c = C_inv + + hh = -h if self.predict_x0 else h + h_phi_1 = torch.expm1(hh) + h_phi_ks = [] + factorial_k = 1 + h_phi_k = h_phi_1 + for k in range(1, K + 2): + h_phi_ks.append(h_phi_k) + h_phi_k = h_phi_k / hh - 1 / factorial_k + factorial_k *= (k + 1) + + model_t = None + if self.predict_x0: + x_t_ = ( + sigma_t / sigma_prev_0 * x + - alpha_t * h_phi_1 * model_prev_0 + ) + # now predictor + x_t = x_t_ + if len(D1s) > 0: + # compute the residuals for predictor + for k in range(K - 1): + x_t = x_t - alpha_t * h_phi_ks[k + 1] * torch.einsum('bkchw,k->bchw', D1s, A_p[k]) + # now corrector + if use_corrector: + model_t = self.model_fn(x_t, t) + D1_t = (model_t - model_prev_0) + x_t = x_t_ + k = 0 + for k in range(K - 1): + x_t = x_t - alpha_t * h_phi_ks[k + 1] * torch.einsum('bkchw,k->bchw', D1s, A_c[k][:-1]) + x_t = x_t - alpha_t * h_phi_ks[K] * (D1_t * A_c[k][-1]) + else: + log_alpha_prev_0, log_alpha_t = ns.marginal_log_mean_coeff(t_prev_0), ns.marginal_log_mean_coeff(t) + x_t_ = ( + (torch.exp(log_alpha_t - log_alpha_prev_0)) * x + - (sigma_t * h_phi_1) * model_prev_0 + ) + # now predictor + x_t = x_t_ + if len(D1s) > 0: + # compute the residuals for predictor + for k in range(K - 1): + x_t = x_t - sigma_t * h_phi_ks[k + 1] * torch.einsum('bkchw,k->bchw', D1s, A_p[k]) + # now corrector + if use_corrector: + model_t = self.model_fn(x_t, t) + D1_t = (model_t - model_prev_0) + x_t = x_t_ + k = 0 + for k in range(K - 1): + x_t = x_t - sigma_t * h_phi_ks[k + 1] * torch.einsum('bkchw,k->bchw', D1s, A_c[k][:-1]) + x_t = x_t - sigma_t * h_phi_ks[K] * (D1_t * A_c[k][-1]) + return x_t, model_t + + def multistep_uni_pc_bh_update(self, x, model_prev_list, t_prev_list, t, order, x_t=None, use_corrector=True): + print(f'using unified predictor-corrector with order {order} (solver type: B(h))') + ns = self.noise_schedule + assert order <= len(model_prev_list) + dims = x.dim() + + # first compute rks + t_prev_0 = t_prev_list[-1] + lambda_prev_0 = ns.marginal_lambda(t_prev_0) + lambda_t = ns.marginal_lambda(t) + model_prev_0 = model_prev_list[-1] + sigma_prev_0, sigma_t = ns.marginal_std(t_prev_0), ns.marginal_std(t) + log_alpha_prev_0, log_alpha_t = ns.marginal_log_mean_coeff(t_prev_0), ns.marginal_log_mean_coeff(t) + alpha_t = torch.exp(log_alpha_t) + + h = lambda_t - lambda_prev_0 + + rks = [] + D1s = [] + for i in range(1, order): + t_prev_i = t_prev_list[-(i + 1)] + model_prev_i = model_prev_list[-(i + 1)] + lambda_prev_i = ns.marginal_lambda(t_prev_i) + rk = ((lambda_prev_i - lambda_prev_0) / h)[0] + rks.append(rk) + D1s.append((model_prev_i - model_prev_0) / rk) + + rks.append(1.) + rks = torch.tensor(rks, device=x.device) + + R = [] + b = [] + + hh = -h[0] if self.predict_x0 else h[0] + h_phi_1 = torch.expm1(hh) # h\phi_1(h) = e^h - 1 + h_phi_k = h_phi_1 / hh - 1 + + factorial_i = 1 + + if self.variant == 'bh1': + B_h = hh + elif self.variant == 'bh2': + B_h = torch.expm1(hh) + else: + raise NotImplementedError() + + for i in range(1, order + 1): + R.append(torch.pow(rks, i - 1)) + b.append(h_phi_k * factorial_i / B_h) + factorial_i *= (i + 1) + h_phi_k = h_phi_k / hh - 1 / factorial_i + + R = torch.stack(R) + b = torch.tensor(b, device=x.device) + + # now predictor + use_predictor = len(D1s) > 0 and x_t is None + if len(D1s) > 0: + D1s = torch.stack(D1s, dim=1) # (B, K) + if x_t is None: + # for order 2, we use a simplified version + if order == 2: + rhos_p = torch.tensor([0.5], device=b.device) + else: + rhos_p = torch.linalg.solve(R[:-1, :-1], b[:-1]) + else: + D1s = None + + if use_corrector: + print('using corrector') + # for order 1, we use a simplified version + if order == 1: + rhos_c = torch.tensor([0.5], device=b.device) + else: + rhos_c = torch.linalg.solve(R, b) + + model_t = None + if self.predict_x0: + x_t_ = ( + expand_dims(sigma_t / sigma_prev_0, dims) * x + - expand_dims(alpha_t * h_phi_1, dims)* model_prev_0 + ) + + if x_t is None: + if use_predictor: + pred_res = torch.einsum('k,bkchw->bchw', rhos_p, D1s) + else: + pred_res = 0 + x_t = x_t_ - expand_dims(alpha_t * B_h, dims) * pred_res + + if use_corrector: + model_t = self.model_fn(x_t, t) + if D1s is not None: + corr_res = torch.einsum('k,bkchw->bchw', rhos_c[:-1], D1s) + else: + corr_res = 0 + D1_t = (model_t - model_prev_0) + x_t = x_t_ - expand_dims(alpha_t * B_h, dims) * (corr_res + rhos_c[-1] * D1_t) + else: + x_t_ = ( + expand_dims(torch.exp(log_alpha_t - log_alpha_prev_0), dims) * x + - expand_dims(sigma_t * h_phi_1, dims) * model_prev_0 + ) + if x_t is None: + if use_predictor: + pred_res = torch.einsum('k,bkchw->bchw', rhos_p, D1s) + else: + pred_res = 0 + x_t = x_t_ - expand_dims(sigma_t * B_h, dims) * pred_res + + if use_corrector: + model_t = self.model_fn(x_t, t) + if D1s is not None: + corr_res = torch.einsum('k,bkchw->bchw', rhos_c[:-1], D1s) + else: + corr_res = 0 + D1_t = (model_t - model_prev_0) + x_t = x_t_ - expand_dims(sigma_t * B_h, dims) * (corr_res + rhos_c[-1] * D1_t) + return x_t, model_t + + + def sample(self, x, steps=20, t_start=None, t_end=None, order=3, skip_type='time_uniform', + method='singlestep', lower_order_final=True, denoise_to_zero=False, solver_type='dpm_solver', + atol=0.0078, rtol=0.05, corrector=False, + ): + t_0 = 1. / self.noise_schedule.total_N if t_end is None else t_end + t_T = self.noise_schedule.T if t_start is None else t_start + device = x.device + if method == 'multistep': + assert steps >= order + timesteps = self.get_time_steps(skip_type=skip_type, t_T=t_T, t_0=t_0, N=steps, device=device) + assert timesteps.shape[0] - 1 == steps + with torch.no_grad(): + vec_t = timesteps[0].expand((x.shape[0])) + model_prev_list = [self.model_fn(x, vec_t)] + t_prev_list = [vec_t] + # Init the first `order` values by lower order multistep DPM-Solver. + for init_order in range(1, order): + vec_t = timesteps[init_order].expand(x.shape[0]) + x, model_x = self.multistep_uni_pc_update(x, model_prev_list, t_prev_list, vec_t, init_order, use_corrector=True) + if model_x is None: + model_x = self.model_fn(x, vec_t) + model_prev_list.append(model_x) + t_prev_list.append(vec_t) + for step in range(order, steps + 1): + vec_t = timesteps[step].expand(x.shape[0]) + if lower_order_final: + step_order = min(order, steps + 1 - step) + else: + step_order = order + print('this step order:', step_order) + if step == steps: + print('do not run corrector at the last step') + use_corrector = False + else: + use_corrector = True + x, model_x = self.multistep_uni_pc_update(x, model_prev_list, t_prev_list, vec_t, step_order, use_corrector=use_corrector) + for i in range(order - 1): + t_prev_list[i] = t_prev_list[i + 1] + model_prev_list[i] = model_prev_list[i + 1] + t_prev_list[-1] = vec_t + # We do not need to evaluate the final model value. + if step < steps: + if model_x is None: + model_x = self.model_fn(x, vec_t) + model_prev_list[-1] = model_x + else: + raise NotImplementedError() + if denoise_to_zero: + x = self.denoise_to_zero_fn(x, torch.ones((x.shape[0],)).to(device) * t_0) + return x + + +############################################################# +# other utility functions +############################################################# + +def interpolate_fn(x, xp, yp): + """ + A piecewise linear function y = f(x), using xp and yp as keypoints. + We implement f(x) in a differentiable way (i.e. applicable for autograd). + The function f(x) is well-defined for all x-axis. (For x beyond the bounds of xp, we use the outmost points of xp to define the linear function.) + + Args: + x: PyTorch tensor with shape [N, C], where N is the batch size, C is the number of channels (we use C = 1 for DPM-Solver). + xp: PyTorch tensor with shape [C, K], where K is the number of keypoints. + yp: PyTorch tensor with shape [C, K]. + Returns: + The function values f(x), with shape [N, C]. + """ + N, K = x.shape[0], xp.shape[1] + all_x = torch.cat([x.unsqueeze(2), xp.unsqueeze(0).repeat((N, 1, 1))], dim=2) + sorted_all_x, x_indices = torch.sort(all_x, dim=2) + x_idx = torch.argmin(x_indices, dim=2) + cand_start_idx = x_idx - 1 + start_idx = torch.where( + torch.eq(x_idx, 0), + torch.tensor(1, device=x.device), + torch.where( + torch.eq(x_idx, K), torch.tensor(K - 2, device=x.device), cand_start_idx, + ), + ) + end_idx = torch.where(torch.eq(start_idx, cand_start_idx), start_idx + 2, start_idx + 1) + start_x = torch.gather(sorted_all_x, dim=2, index=start_idx.unsqueeze(2)).squeeze(2) + end_x = torch.gather(sorted_all_x, dim=2, index=end_idx.unsqueeze(2)).squeeze(2) + start_idx2 = torch.where( + torch.eq(x_idx, 0), + torch.tensor(0, device=x.device), + torch.where( + torch.eq(x_idx, K), torch.tensor(K - 2, device=x.device), cand_start_idx, + ), + ) + y_positions_expanded = yp.unsqueeze(0).expand(N, -1, -1) + start_y = torch.gather(y_positions_expanded, dim=2, index=start_idx2.unsqueeze(2)).squeeze(2) + end_y = torch.gather(y_positions_expanded, dim=2, index=(start_idx2 + 1).unsqueeze(2)).squeeze(2) + cand = start_y + (x - start_x) * (end_y - start_y) / (end_x - start_x) + return cand + + +def expand_dims(v, dims): + """ + Expand the tensor `v` to the dim `dims`. + + Args: + `v`: a PyTorch tensor with shape [N]. + `dim`: a `int`. + Returns: + a PyTorch tensor with shape [N, 1, 1, ..., 1] and the total dimension is `dims`. + """ + return v[(...,) + (None,)*(dims - 1)] \ No newline at end of file diff --git a/ComfyUI-Easy-Use/py/dynamiCrafter/lvdm/models/utils_diffusion.py b/ComfyUI-Easy-Use/py/dynamiCrafter/lvdm/models/utils_diffusion.py new file mode 100644 index 0000000000000000000000000000000000000000..403b7b3aa4dfc8f1ce7abac978b6ac843040e294 --- /dev/null +++ b/ComfyUI-Easy-Use/py/dynamiCrafter/lvdm/models/utils_diffusion.py @@ -0,0 +1,158 @@ +import math +import numpy as np +import torch +import torch.nn.functional as F +from einops import repeat + + +def timestep_embedding(timesteps, dim, max_period=10000, repeat_only=False, dtype=None): + """ + Create sinusoidal timestep embeddings. + :param timesteps: a 1-D Tensor of N indices, one per batch element. + These may be fractional. + :param dim: the dimension of the output. + :param max_period: controls the minimum frequency of the embeddings. + :return: an [N x dim] Tensor of positional embeddings. + """ + if not repeat_only: + half = dim // 2 + freqs = torch.exp( + -math.log(max_period) * torch.arange(start=0, end=half, dtype=dtype) / half + ).to(device=timesteps.device) + args = timesteps[:, None].float() * freqs[None] + embedding = torch.cat([torch.cos(args), torch.sin(args)], dim=-1) + if dim % 2: + embedding = torch.cat([embedding, torch.zeros_like(embedding[:, :1])], dim=-1) + else: + embedding = repeat(timesteps, 'b -> b d', d=dim) + return embedding.to(dtype) + + +def make_beta_schedule(schedule, n_timestep, linear_start=1e-4, linear_end=2e-2, cosine_s=8e-3): + if schedule == "linear": + betas = ( + torch.linspace(linear_start ** 0.5, linear_end ** 0.5, n_timestep, dtype=torch.float64) ** 2 + ) + + elif schedule == "cosine": + timesteps = ( + torch.arange(n_timestep + 1, dtype=torch.float64) / n_timestep + cosine_s + ) + alphas = timesteps / (1 + cosine_s) * np.pi / 2 + alphas = torch.cos(alphas).pow(2) + alphas = alphas / alphas[0] + betas = 1 - alphas[1:] / alphas[:-1] + betas = np.clip(betas, a_min=0, a_max=0.999) + + elif schedule == "sqrt_linear": + betas = torch.linspace(linear_start, linear_end, n_timestep, dtype=torch.float64) + elif schedule == "sqrt": + betas = torch.linspace(linear_start, linear_end, n_timestep, dtype=torch.float64) ** 0.5 + else: + raise ValueError(f"schedule '{schedule}' unknown.") + return betas.numpy() + + +def make_ddim_timesteps(ddim_discr_method, num_ddim_timesteps, num_ddpm_timesteps, verbose=True): + if ddim_discr_method == 'uniform': + c = num_ddpm_timesteps // num_ddim_timesteps + ddim_timesteps = np.asarray(list(range(0, num_ddpm_timesteps, c))) + steps_out = ddim_timesteps + 1 + elif ddim_discr_method == 'uniform_trailing': + c = num_ddpm_timesteps / num_ddim_timesteps + ddim_timesteps = np.flip(np.round(np.arange(num_ddpm_timesteps, 0, -c))).astype(np.int64) + steps_out = ddim_timesteps - 1 + elif ddim_discr_method == 'quad': + ddim_timesteps = ((np.linspace(0, np.sqrt(num_ddpm_timesteps * .8), num_ddim_timesteps)) ** 2).astype(int) + steps_out = ddim_timesteps + 1 + else: + raise NotImplementedError(f'There is no ddim discretization method called "{ddim_discr_method}"') + + # assert ddim_timesteps.shape[0] == num_ddim_timesteps + # add one to get the final alpha values right (the ones from first scale to data during sampling) + # steps_out = ddim_timesteps + 1 + if verbose: + print(f'Selected timesteps for ddim sampler: {steps_out}') + return steps_out + + +def make_ddim_sampling_parameters(alphacums, ddim_timesteps, eta, verbose=True): + # select alphas for computing the variance schedule + # print(f'ddim_timesteps={ddim_timesteps}, len_alphacums={len(alphacums)}') + alphas = alphacums[ddim_timesteps] + alphas_prev = np.asarray([alphacums[0]] + alphacums[ddim_timesteps[:-1]].tolist()) + + # according the the formula provided in https://arxiv.org/abs/2010.02502 + sigmas = eta * np.sqrt((1 - alphas_prev) / (1 - alphas) * (1 - alphas / alphas_prev)) + if verbose: + print(f'Selected alphas for ddim sampler: a_t: {alphas}; a_(t-1): {alphas_prev}') + print(f'For the chosen value of eta, which is {eta}, ' + f'this results in the following sigma_t schedule for ddim sampler {sigmas}') + return sigmas, alphas, alphas_prev + + +def betas_for_alpha_bar(num_diffusion_timesteps, alpha_bar, max_beta=0.999): + """ + Create a beta schedule that discretizes the given alpha_t_bar function, + which defines the cumulative product of (1-beta) over time from t = [0,1]. + :param num_diffusion_timesteps: the number of betas to produce. + :param alpha_bar: a lambda that takes an argument t from 0 to 1 and + produces the cumulative product of (1-beta) up to that + part of the diffusion process. + :param max_beta: the maximum beta to use; use values lower than 1 to + prevent singularities. + """ + betas = [] + for i in range(num_diffusion_timesteps): + t1 = i / num_diffusion_timesteps + t2 = (i + 1) / num_diffusion_timesteps + betas.append(min(1 - alpha_bar(t2) / alpha_bar(t1), max_beta)) + return np.array(betas) + +def rescale_zero_terminal_snr(betas): + """ + Rescales betas to have zero terminal SNR Based on https://arxiv.org/pdf/2305.08891.pdf (Algorithm 1) + + Args: + betas (`numpy.ndarray`): + the betas that the scheduler is being initialized with. + + Returns: + `numpy.ndarray`: rescaled betas with zero terminal SNR + """ + # Convert betas to alphas_bar_sqrt + alphas = 1.0 - betas + alphas_cumprod = np.cumprod(alphas, axis=0) + alphas_bar_sqrt = np.sqrt(alphas_cumprod) + + # Store old values. + alphas_bar_sqrt_0 = alphas_bar_sqrt[0].copy() + alphas_bar_sqrt_T = alphas_bar_sqrt[-1].copy() + + # Shift so the last timestep is zero. + alphas_bar_sqrt -= alphas_bar_sqrt_T + + # Scale so the first timestep is back to the old value. + alphas_bar_sqrt *= alphas_bar_sqrt_0 / (alphas_bar_sqrt_0 - alphas_bar_sqrt_T) + + # Convert alphas_bar_sqrt to betas + alphas_bar = alphas_bar_sqrt**2 # Revert sqrt + alphas = alphas_bar[1:] / alphas_bar[:-1] # Revert cumprod + alphas = np.concatenate([alphas_bar[0:1], alphas]) + betas = 1 - alphas + + return betas + + +def rescale_noise_cfg(noise_cfg, noise_pred_text, guidance_rescale=0.0): + """ + Rescale `noise_cfg` according to `guidance_rescale`. Based on findings of [Common Diffusion Noise Schedules and + Sample Steps are Flawed](https://arxiv.org/pdf/2305.08891.pdf). See Section 3.4 + """ + std_text = noise_pred_text.std(dim=list(range(1, noise_pred_text.ndim)), keepdim=True) + std_cfg = noise_cfg.std(dim=list(range(1, noise_cfg.ndim)), keepdim=True) + # rescale the results from guidance (fixes overexposure) + noise_pred_rescaled = noise_cfg * (std_text / std_cfg) + # mix with the original results from guidance by factor guidance_rescale to avoid "plain looking" images + noise_cfg = guidance_rescale * noise_pred_rescaled + (1 - guidance_rescale) * noise_cfg + return noise_cfg \ No newline at end of file diff --git a/ComfyUI-Easy-Use/py/dynamiCrafter/lvdm/modules/attention.py b/ComfyUI-Easy-Use/py/dynamiCrafter/lvdm/modules/attention.py new file mode 100644 index 0000000000000000000000000000000000000000..a1360291017dde674132c2f4ae7c21424b9e2815 --- /dev/null +++ b/ComfyUI-Easy-Use/py/dynamiCrafter/lvdm/modules/attention.py @@ -0,0 +1,809 @@ +import torch +from torch import nn, einsum +import torch.nn.functional as F +from einops import rearrange, repeat +from functools import partial +from ..common import ( + checkpoint, + exists, + default, +) +from ..basics import zero_module +import comfy.ops +ops = comfy.ops.disable_weight_init +from comfy import model_management +from comfy.ldm.modules.attention import optimized_attention, optimized_attention_masked + +if model_management.xformers_enabled(): + import xformers + import xformers.ops + XFORMERS_IS_AVAILBLE = True +else: + XFORMERS_IS_AVAILBLE = False + +class RelativePosition(nn.Module): + """ https://github.com/evelinehong/Transformer_Relative_Position_PyTorch/blob/master/relative_position.py """ + + def __init__(self, num_units, max_relative_position): + super().__init__() + self.num_units = num_units + self.max_relative_position = max_relative_position + self.embeddings_table = nn.Parameter(torch.Tensor(max_relative_position * 2 + 1, num_units)) + nn.init.xavier_uniform_(self.embeddings_table) + + def forward(self, length_q, length_k): + device = self.embeddings_table.device + range_vec_q = torch.arange(length_q, device=device) + range_vec_k = torch.arange(length_k, device=device) + distance_mat = range_vec_k[None, :] - range_vec_q[:, None] + distance_mat_clipped = torch.clamp(distance_mat, -self.max_relative_position, self.max_relative_position) + final_mat = distance_mat_clipped + self.max_relative_position + final_mat = final_mat.long() + embeddings = self.embeddings_table[final_mat] + return embeddings + + +# TODO Add native Comfy optimized attention. +class CrossAttention(nn.Module): + + def __init__( + self, + query_dim, + context_dim=None, + heads=8, + dim_head=64, + dropout=0., + relative_position=False, + temporal_length=None, + video_length=None, + image_cross_attention=False, + image_cross_attention_scale=1.0, + image_cross_attention_scale_learnable=False, + text_context_len=77, + device=None, + dtype=None, + operations=ops + ): + super().__init__() + inner_dim = dim_head * heads + context_dim = default(context_dim, query_dim) + self.scale = dim_head**-0.5 + self.heads = heads + self.dim_head = dim_head + self.to_q = operations.Linear(query_dim, inner_dim, bias=False, device=device, dtype=dtype) + self.to_k = operations.Linear(context_dim, inner_dim, bias=False, device=device, dtype=dtype) + self.to_v = operations.Linear(context_dim, inner_dim, bias=False, device=device, dtype=dtype) + + self.to_out = nn.Sequential( + operations.Linear(inner_dim, query_dim, device=device, dtype=dtype), + nn.Dropout(dropout) + ) + + self.relative_position = relative_position + if self.relative_position: + assert(temporal_length is not None) + self.relative_position_k = RelativePosition(num_units=dim_head, max_relative_position=temporal_length) + self.relative_position_v = RelativePosition(num_units=dim_head, max_relative_position=temporal_length) + else: + ## only used for spatial attention, while NOT for temporal attention + if XFORMERS_IS_AVAILBLE and temporal_length is None: + self.forward = self.efficient_forward + else: + self.forward = self.comfy_efficient_forward + + self.video_length = video_length + self.image_cross_attention = image_cross_attention + self.image_cross_attention_scale = image_cross_attention_scale + self.text_context_len = text_context_len + self.image_cross_attention_scale_learnable = image_cross_attention_scale_learnable + if self.image_cross_attention: + self.to_k_ip = operations.Linear(context_dim, inner_dim, bias=False, device=device, dtype=dtype) + self.to_v_ip = operations.Linear(context_dim, inner_dim, bias=False, device=device, dtype=dtype) + if image_cross_attention_scale_learnable: + self.register_parameter('alpha', nn.Parameter(torch.tensor(0.)) ) + + def comfy_efficient_forward(self, x, context=None, mask=None, *args, **kwargs): + spatial_self_attn = (context is None) + k_ip, v_ip, out_ip = None, None, None + + h = self.heads + q = self.to_q(x) + context = default(context, x) + + if self.image_cross_attention and not spatial_self_attn: + context, context_image = context[:,:self.text_context_len,:], context[:,self.text_context_len:,:] + k = self.to_k(context) + v = self.to_v(context) + k_ip = self.to_k_ip(context_image) + v_ip = self.to_v_ip(context_image) + else: + if not spatial_self_attn: + context = context[:,:self.text_context_len,:] + k = self.to_k(context) + v = self.to_v(context) + + out = optimized_attention(q, k, v, h) + + if exists(mask): + ## feasible for causal attention mask only + out = optimized_attention_masked(q, k, v, h) + + ## for image cross-attention + if k_ip is not None: + q = rearrange(q, 'b n (h d) -> (b h) n d', h=h) + k_ip, v_ip = map(lambda t: rearrange(t, 'b n (h d) -> (b h) n d', h=h), (k_ip, v_ip)) + sim_ip = torch.einsum('b i d, b j d -> b i j', q, k_ip) * self.scale + del k_ip + sim_ip = sim_ip.softmax(dim=-1) + out_ip = torch.einsum('b i j, b j d -> b i d', sim_ip, v_ip) + out_ip = rearrange(out_ip, '(b h) n d -> b n (h d)', h=h) + + if out_ip is not None: + if self.image_cross_attention_scale_learnable: + out = out + self.image_cross_attention_scale * out_ip * (torch.tanh(self.alpha)+1) + else: + out = out + self.image_cross_attention_scale * out_ip + + return self.to_out(out) + + def forward(self, x, context=None, mask=None): + spatial_self_attn = (context is None) + k_ip, v_ip, out_ip = None, None, None + + h = self.heads + q = self.to_q(x) + context = default(context, x) + + if self.image_cross_attention and not spatial_self_attn: + context, context_image = context[:,:self.text_context_len,:], context[:,self.text_context_len:,:] + k = self.to_k(context) + v = self.to_v(context) + k_ip = self.to_k_ip(context_image) + v_ip = self.to_v_ip(context_image) + else: + + # Assumed Spatial Attention (b c h w) + if not spatial_self_attn: + context = context[:,:self.text_context_len,:] + k = self.to_k(context) + v = self.to_v(context) + + + q, k, v = map(lambda t: rearrange(t, 'b n (h d) -> (b h) n d', h=h), (q, k, v)) + + sim = torch.einsum('b i d, b j d -> b i j', q, k) * self.scale + if self.relative_position: + len_q, len_k, len_v = q.shape[1], k.shape[1], v.shape[1] + k2 = self.relative_position_k(len_q, len_k) + sim2 = einsum('b t d, t s d -> b t s', q, k2) * self.scale # TODO check + sim += sim2 + del k + + if exists(mask): + ## feasible for causal attention mask only + max_neg_value = -torch.finfo(sim.dtype).max + mask = repeat(mask, 'b i j -> (b h) i j', h=h) + sim.masked_fill_(~(mask>0.5), max_neg_value) + + # attention, what we cannot get enough of + sim = sim.softmax(dim=-1) + + out = torch.einsum('b i j, b j d -> b i d', sim, v) + if self.relative_position: + v2 = self.relative_position_v(len_q, len_v) + out2 = einsum('b t s, t s d -> b t d', sim, v2) # TODO check + out += out2 + out = rearrange(out, '(b h) n d -> b n (h d)', h=h) + + + ## for image cross-attention + if k_ip is not None: + k_ip, v_ip = map(lambda t: rearrange(t, 'b n (h d) -> (b h) n d', h=h), (k_ip, v_ip)) + sim_ip = torch.einsum('b i d, b j d -> b i j', q, k_ip) * self.scale + del k_ip + sim_ip = sim_ip.softmax(dim=-1) + out_ip = torch.einsum('b i j, b j d -> b i d', sim_ip, v_ip) + out_ip = rearrange(out_ip, '(b h) n d -> b n (h d)', h=h) + + + if out_ip is not None: + if self.image_cross_attention_scale_learnable: + out = out + self.image_cross_attention_scale * out_ip * (torch.tanh(self.alpha)+1) + else: + out = out + self.image_cross_attention_scale * out_ip + + return self.to_out(out) + + def efficient_forward(self, x, context=None, mask=None): + spatial_self_attn = (context is None) + k_ip, v_ip, out_ip = None, None, None + + q = self.to_q(x) + context = default(context, x) + + if self.image_cross_attention and not spatial_self_attn: + context, context_image = context[:,:self.text_context_len,:], context[:,self.text_context_len:,:] + k = self.to_k(context) + v = self.to_v(context) + k_ip = self.to_k_ip(context_image) + v_ip = self.to_v_ip(context_image) + else: + if not spatial_self_attn: + context = context[:,:self.text_context_len,:] + k = self.to_k(context) + v = self.to_v(context) + + b, _, _ = q.shape + q, k, v = map( + lambda t: t.unsqueeze(3) + .reshape(b, t.shape[1], self.heads, self.dim_head) + .permute(0, 2, 1, 3) + .reshape(b * self.heads, t.shape[1], self.dim_head) + .contiguous(), + (q, k, v), + ) + # actually compute the attention, what we cannot get enough of + out = xformers.ops.memory_efficient_attention(q, k, v, attn_bias=None, op=None) + + ## for image cross-attention + if k_ip is not None: + k_ip, v_ip = map( + lambda t: t.unsqueeze(3) + .reshape(b, t.shape[1], self.heads, self.dim_head) + .permute(0, 2, 1, 3) + .reshape(b * self.heads, t.shape[1], self.dim_head) + .contiguous(), + (k_ip, v_ip), + ) + out_ip = xformers.ops.memory_efficient_attention(q, k_ip, v_ip, attn_bias=None, op=None) + out_ip = ( + out_ip.unsqueeze(0) + .reshape(b, self.heads, out.shape[1], self.dim_head) + .permute(0, 2, 1, 3) + .reshape(b, out.shape[1], self.heads * self.dim_head) + ) + + if exists(mask): + raise NotImplementedError + out = ( + out.unsqueeze(0) + .reshape(b, self.heads, out.shape[1], self.dim_head) + .permute(0, 2, 1, 3) + .reshape(b, out.shape[1], self.heads * self.dim_head) + ) + if out_ip is not None: + if self.image_cross_attention_scale_learnable: + out = out + self.image_cross_attention_scale * out_ip * (torch.tanh(self.alpha)+1) + else: + out = out + self.image_cross_attention_scale * out_ip + + return self.to_out(out) + + +class BasicTransformerBlock(nn.Module): + + def __init__( + self, + dim, + n_heads, + d_head, + dropout=0., + context_dim=None, + gated_ff=True, + checkpoint=True, + disable_self_attn=False, + attention_cls=None, + video_length=None, + inner_dim=None, + image_cross_attention=False, + image_cross_attention_scale=1.0, + image_cross_attention_scale_learnable=False, + switch_temporal_ca_to_sa=False, + text_context_len=77, + ff_in=None, + device=None, + dtype=None, + operations=ops + ): + super().__init__() + attn_cls = CrossAttention if attention_cls is None else attention_cls + + self.ff_in = ff_in or inner_dim is not None + if self.ff_in: + self.norm_in = operations.LayerNorm(dim, dtype=dtype, device=device) + self.ff_in = FeedForward( + dim, + dim_out=inner_dim, + dropout=dropout, + glu=gated_ff, + dtype=dtype, + device=device, + operations=operations + ) + if inner_dim is None: + inner_dim = dim + + self.is_res = inner_dim == dim + self.disable_self_attn = disable_self_attn + self.attn1 = attn_cls(query_dim=dim, heads=n_heads, dim_head=d_head, dropout=dropout, + context_dim=None, device=device, dtype=dtype if self.disable_self_attn else None) + self.ff = FeedForward(dim, dropout=dropout, glu=gated_ff, device=device, dtype=dtype) + self.attn2 = attn_cls( + query_dim=dim, + context_dim=context_dim, + heads=n_heads, + dim_head=d_head, + dropout=dropout, + video_length=video_length, + image_cross_attention=image_cross_attention, + image_cross_attention_scale=image_cross_attention_scale, + image_cross_attention_scale_learnable=image_cross_attention_scale_learnable, + text_context_len=text_context_len, + device=device, + dtype=dtype + ) + self.image_cross_attention = image_cross_attention + + self.norm1 = operations.LayerNorm(dim, device=device, dtype=dtype) + self.norm2 = operations.LayerNorm(dim, device=device, dtype=dtype) + self.norm3 = operations.LayerNorm(dim, device=device, dtype=dtype) + + self.n_heads = n_heads + self.d_head = d_head + self.checkpoint = checkpoint + self.switch_temporal_ca_to_sa = switch_temporal_ca_to_sa + + def forward(self, x, context=None, mask=None, **kwargs): + ## implementation tricks: because checkpointing doesn't support non-tensor (e.g. None or scalar) arguments + input_tuple = (x,) ## should not be (x), otherwise *input_tuple will decouple x into multiple arguments + if context is not None: + input_tuple = (x, context) + if mask is not None: + forward_mask = partial(self._forward, mask=mask) + return checkpoint(forward_mask, (x,), self.parameters(), self.checkpoint) + return checkpoint(self._forward, input_tuple, self.parameters(), self.checkpoint) + + + def _forward(self, x, context=None, mask=None, transformer_options={}): + extra_options = {} + block = transformer_options.get("block", None) + block_index = transformer_options.get("block_index", 0) + transformer_patches = {} + transformer_patches_replace = {} + + for k in transformer_options: + if k == "patches": + transformer_patches = transformer_options[k] + elif k == "patches_replace": + transformer_patches_replace = transformer_options[k] + else: + extra_options[k] = transformer_options[k] + + extra_options["n_heads"] = self.n_heads + extra_options["dim_head"] = self.d_head + + if self.ff_in: + x_skip = x + x = self.ff_in(self.norm_in(x)) + if self.is_res: + x += x_skip + + n = self.norm1(x) + if self.disable_self_attn: + context_attn1 = context + else: + context_attn1 = None + value_attn1 = None + + if "attn1_patch" in transformer_patches: + patch = transformer_patches["attn1_patch"] + if context_attn1 is None: + context_attn1 = n + value_attn1 = context_attn1 + for p in patch: + n, context_attn1, value_attn1 = p(n, context_attn1, value_attn1, extra_options) + + if block is not None: + transformer_block = (block[0], block[1], block_index) + else: + transformer_block = None + attn1_replace_patch = transformer_patches_replace.get("attn1", {}) + block_attn1 = transformer_block + if block_attn1 not in attn1_replace_patch: + block_attn1 = block + + if block_attn1 in attn1_replace_patch: + if context_attn1 is None: + context_attn1 = n + value_attn1 = n + n = self.attn1.to_q(n) + context_attn1 = self.attn1.to_k(context_attn1) + value_attn1 = self.attn1.to_v(value_attn1) + n = attn1_replace_patch[block_attn1](n, context_attn1, value_attn1, extra_options) + n = self.attn1.to_out(n) + else: + n = self.attn1(n, context=context_attn1, value=value_attn1) + + if "attn1_output_patch" in transformer_patches: + patch = transformer_patches["attn1_output_patch"] + for p in patch: + n = p(n, extra_options) + + x += n + if "middle_patch" in transformer_patches: + patch = transformer_patches["middle_patch"] + for p in patch: + x = p(x, extra_options) + + if self.attn2 is not None: + n = self.norm2(x) + if self.switch_temporal_ca_to_sa: + context_attn2 = n + else: + context_attn2 = context + value_attn2 = None + if "attn2_patch" in transformer_patches: + patch = transformer_patches["attn2_patch"] + value_attn2 = context_attn2 + for p in patch: + n, context_attn2, value_attn2 = p(n, context_attn2, value_attn2, extra_options) + + attn2_replace_patch = transformer_patches_replace.get("attn2", {}) + block_attn2 = transformer_block + if block_attn2 not in attn2_replace_patch: + block_attn2 = block + + if block_attn2 in attn2_replace_patch: + if value_attn2 is None: + value_attn2 = context_attn2 + n = self.attn2.to_q(n) + context_attn2 = self.attn2.to_k(context_attn2) + value_attn2 = self.attn2.to_v(value_attn2) + n = attn2_replace_patch[block_attn2](n, context_attn2, value_attn2, extra_options) + n = self.attn2.to_out(n) + else: + n = self.attn2(n, context=context_attn2, value=value_attn2) + + if "attn2_output_patch" in transformer_patches: + patch = transformer_patches["attn2_output_patch"] + for p in patch: + n = p(n, extra_options) + + x += n + if self.is_res: + x_skip = x + x = self.ff(self.norm3(x)) + if self.is_res: + x += x_skip + + return x + + +class SpatialTransformer(nn.Module): + """ + Transformer block for image-like data in spatial axis. + First, project the input (aka embedding) + and reshape to b, t, d. + Then apply standard transformer action. + Finally, reshape to image + NEW: use_linear for more efficiency instead of the 1x1 convs + """ + + def __init__( + self, + in_channels, + n_heads, + d_head, + depth=1, + dropout=0., + context_dim=None, + use_checkpoint=True, + disable_self_attn=False, + use_linear=False, + video_length=None, + image_cross_attention=False, + image_cross_attention_scale_learnable=False, + device=None, + dtype=None, + operations=ops + ): + super().__init__() + self.in_channels = in_channels + inner_dim = n_heads * d_head + self.norm = operations.GroupNorm(num_groups=32, num_channels=in_channels, eps=1e-6, affine=True, device=device, dtype=dtype) + if not use_linear: + self.proj_in = opeations.Conv2d(in_channels, inner_dim, kernel_size=1, stride=1, padding=0, device=device, dtype=dtype) + else: + self.proj_in = operations.Linear(in_channels, inner_dim, device=device, dtype=dtype) + + attention_cls = None + self.transformer_blocks = nn.ModuleList([ + BasicTransformerBlock( + inner_dim, + n_heads, + d_head, + dropout=dropout, + context_dim=context_dim, + disable_self_attn=disable_self_attn, + checkpoint=use_checkpoint, + attention_cls=attention_cls, + video_length=video_length, + image_cross_attention=image_cross_attention, + image_cross_attention_scale_learnable=image_cross_attention_scale_learnable, + device=device, + dtype=dtype + ) for d in range(depth) + ]) + if not use_linear: + self.proj_out = zero_module(operations.Conv2d(inner_dim, in_channels, kernel_size=1, stride=1, padding=0, device=device, dtype=dtype)) + else: + self.proj_out = zero_module(operations.Linear(inner_dim, in_channels, device=device, dtype=dtype)) + self.use_linear = use_linear + + def forward(self, x, context=None, transformer_options={}, **kwargs): + b, c, h, w = x.shape + x_in = x + x = self.norm(x) + if not self.use_linear: + x = self.proj_in(x) + x = rearrange(x, 'b c h w -> b (h w) c').contiguous() + if self.use_linear: + x = self.proj_in(x) + for i, block in enumerate(self.transformer_blocks): + transformer_options['block_index'] = i + x = block(x, context=context, **kwargs) + if self.use_linear: + x = self.proj_out(x) + x = rearrange(x, 'b (h w) c -> b c h w', h=h, w=w).contiguous() + if not self.use_linear: + x = self.proj_out(x) + return x + x_in + + +class TemporalTransformer(nn.Module): + """ + Transformer block for image-like data in temporal axis. + First, reshape to b, t, d. + Then apply standard transformer action. + Finally, reshape to image + """ + def __init__( + self, + in_channels, + n_heads, + d_head, + depth=1, + dropout=0., + context_dim=None, + use_checkpoint=True, + use_linear=False, + only_self_att=True, + causal_attention=False, + causal_block_size=1, + relative_position=False, + temporal_length=None, + device=None, + dtype=None, + operations=ops + ): + super().__init__() + self.only_self_att = only_self_att + self.relative_position = relative_position + self.causal_attention = causal_attention + self.causal_block_size = causal_block_size + + if only_self_att: + context_dim = None + + self.in_channels = in_channels + inner_dim = n_heads * d_head + self.norm = operations.GroupNorm(num_groups=32, num_channels=in_channels, eps=1e-6, affine=True, device=device, dtype=dtype) + self.proj_in = nn.Conv1d(in_channels, inner_dim, kernel_size=1, stride=1, padding=0).to(device, dtype) + if not use_linear: + self.proj_in = nn.Conv1d(in_channels, inner_dim, kernel_size=1, stride=1, padding=0).to(device, dtype) + else: + self.proj_in = operations.Linear(in_channels, inner_dim, device=device, dtype=dtype) + + if relative_position: + assert(temporal_length is not None) + attention_cls = partial(CrossAttention, relative_position=True, temporal_length=temporal_length, device=device, dtype=dtype) + else: + attention_cls = partial(CrossAttention, temporal_length=temporal_length, device=device, dtype=dtype) + if self.causal_attention: + assert(temporal_length is not None) + self.mask = torch.tril(torch.ones([1, temporal_length, temporal_length])) + + if self.only_self_att: + context_dim = None + self.transformer_blocks = nn.ModuleList([ + BasicTransformerBlock( + inner_dim, + n_heads, + d_head, + dropout=dropout, + context_dim=context_dim, + attention_cls=attention_cls, + checkpoint=use_checkpoint, + device=device, + dtype=dtype + ) for d in range(depth) + ]) + if not use_linear: + self.proj_out = zero_module(nn.Conv1d(inner_dim, in_channels, kernel_size=1, stride=1, padding=0).to(device, dtype)) + else: + self.proj_out = zero_module(operations.Linear(inner_dim, in_channels, device=device, dtype=dtype)) + self.use_linear = use_linear + + def forward(self, x, context=None): + b, c, t, h, w = x.shape + x_in = x + x = self.norm(x) + x = rearrange(x, 'b c t h w -> (b h w) c t').contiguous() + if not self.use_linear: + x = self.proj_in(x) + x = rearrange(x, 'bhw c t -> bhw t c').contiguous() + if self.use_linear: + x = self.proj_in(x) + + temp_mask = None + if self.causal_attention: + # slice the from mask map + temp_mask = self.mask[:,:t,:t].to(x.device) + + if temp_mask is not None: + mask = temp_mask.to(x.device) + mask = repeat(mask, 'l i j -> (l bhw) i j', bhw=b*h*w) + else: + mask = None + + if self.only_self_att: + ## note: if no context is given, cross-attention defaults to self-attention + for i, block in enumerate(self.transformer_blocks): + x = block(x, mask=mask) + x = rearrange(x, '(b hw) t c -> b hw t c', b=b).contiguous() + else: + x = rearrange(x, '(b hw) t c -> b hw t c', b=b).contiguous() + context = rearrange(context, '(b t) l con -> b t l con', t=t).contiguous() + for i, block in enumerate(self.transformer_blocks): + # calculate each batch one by one (since number in shape could not greater then 65,535 for some package) + for j in range(b): + context_j = repeat( + context[j], + 't l con -> (t r) l con', r=(h * w) // t, t=t).contiguous() + ## note: causal mask will not applied in cross-attention case + x[j] = block(x[j], context=context_j) + + if self.use_linear: + x = self.proj_out(x) + x = rearrange(x, 'b (h w) t c -> b c t h w', h=h, w=w).contiguous() + if not self.use_linear: + x = rearrange(x, 'b hw t c -> (b hw) c t').contiguous() + x = self.proj_out(x) + x = rearrange(x, '(b h w) c t -> b c t h w', b=b, h=h, w=w).contiguous() + + return x + x_in + + +class GEGLU(nn.Module): + def __init__(self, dim_in, dim_out, device=None, dtype=None, operations=ops): + super().__init__() + self.proj = operations.Linear(dim_in, dim_out * 2, device=device, dtype=dtype) + + def forward(self, x): + x, gate = self.proj(x).chunk(2, dim=-1) + return x * F.gelu(gate) + + +class FeedForward(nn.Module): + def __init__(self, dim, dim_out=None, mult=4, glu=False, dropout=0., device=None, dtype=None, operations=ops): + super().__init__() + inner_dim = int(dim * mult) + dim_out = default(dim_out, dim) + project_in = nn.Sequential( + operations.Linear(dim, inner_dim, device=device, dtype=dtype), + nn.GELU() + ) if not glu else GEGLU(dim, inner_dim) + + self.net = nn.Sequential( + project_in, + nn.Dropout(dropout), + operations.Linear(inner_dim, dim_out, device=device, dtype=dtype) + ) + + def forward(self, x): + return self.net(x) + + +class LinearAttention(nn.Module): + def __init__(self, dim, heads=4, dim_head=32, device=None, dtype=None, operations=ops): + super().__init__() + self.heads = heads + hidden_dim = dim_head * heads + self.to_qkv = operations.Conv2d(dim, hidden_dim * 3, 1, bias = False, device=device, dtype=dtype) + self.to_out = operations.Conv2d(hidden_dim, dim, 1, device=device, dtype=dtype) + + def forward(self, x): + b, c, h, w = x.shape + qkv = self.to_qkv(x) + q, k, v = rearrange(qkv, 'b (qkv heads c) h w -> qkv b heads c (h w)', heads = self.heads, qkv=3) + k = k.softmax(dim=-1) + context = torch.einsum('bhdn,bhen->bhde', k, v) + out = torch.einsum('bhde,bhdn->bhen', context, q) + out = rearrange(out, 'b heads c (h w) -> b (heads c) h w', heads=self.heads, h=h, w=w) + return self.to_out(out) + + +class SpatialSelfAttention(nn.Module): + def __init__(self, in_channels, device=None, dtype=None, operations=ops): + super().__init__() + self.in_channels = in_channels + + self.norm = operations.GroupNorm( + num_groups=32, + num_channels=in_channels, + eps=1e-6, + affine=True, + device=device, + dtype=dtype + ) + self.q = operations.Conv2d( + in_channels, + in_channels, + kernel_size=1, + stride=1, + padding=0, + device=device, + dtype=dtype + ) + self.k = operations.Conv2d( + in_channels, + in_channels, + kernel_size=1, + stride=1, + padding=0, + device=device, + dtype=dtype + ) + self.v = operations.Conv2d( + in_channels, + in_channels, + kernel_size=1, + stride=1, + padding=0, + device=device, + dtype=dtype + ) + self.proj_out = operations.Conv2d( + in_channels, + in_channels, + kernel_size=1, + stride=1, + padding=0, + device=device, + dtype=dtype + ) + + def forward(self, x): + h_ = x + h_ = self.norm(h_) + q = self.q(h_) + k = self.k(h_) + v = self.v(h_) + + # compute attention + b,c,h,w = q.shape + q = rearrange(q, 'b c h w -> b (h w) c') + k = rearrange(k, 'b c h w -> b c (h w)') + w_ = torch.einsum('bij,bjk->bik', q, k) + + w_ = w_ * (int(c)**(-0.5)) + w_ = torch.nn.functional.softmax(w_, dim=2) + + # attend to values + v = rearrange(v, 'b c h w -> b c (h w)') + w_ = rearrange(w_, 'b i j -> b j i') + h_ = torch.einsum('bij,bjk->bik', v, w_) + h_ = rearrange(h_, 'b c (h w) -> b c h w', h=h) + h_ = self.proj_out(h_) + + return x+h_ diff --git a/ComfyUI-Easy-Use/py/dynamiCrafter/lvdm/modules/encoders/condition.py b/ComfyUI-Easy-Use/py/dynamiCrafter/lvdm/modules/encoders/condition.py new file mode 100644 index 0000000000000000000000000000000000000000..610322bff04b5d0cf93674622cb9ead121fa1e98 --- /dev/null +++ b/ComfyUI-Easy-Use/py/dynamiCrafter/lvdm/modules/encoders/condition.py @@ -0,0 +1,389 @@ +import torch +import torch.nn as nn +import kornia +import open_clip +from torch.utils.checkpoint import checkpoint +from transformers import T5Tokenizer, T5EncoderModel, CLIPTokenizer, CLIPTextModel +from ..common import autocast +from utils.utils import count_params + + +class AbstractEncoder(nn.Module): + def __init__(self): + super().__init__() + + def encode(self, *args, **kwargs): + raise NotImplementedError + + +class IdentityEncoder(AbstractEncoder): + def encode(self, x): + return x + + +class ClassEmbedder(nn.Module): + def __init__(self, embed_dim, n_classes=1000, key='class', ucg_rate=0.1): + super().__init__() + self.key = key + self.embedding = nn.Embedding(n_classes, embed_dim) + self.n_classes = n_classes + self.ucg_rate = ucg_rate + + def forward(self, batch, key=None, disable_dropout=False): + if key is None: + key = self.key + # this is for use in crossattn + c = batch[key][:, None] + if self.ucg_rate > 0. and not disable_dropout: + mask = 1. - torch.bernoulli(torch.ones_like(c) * self.ucg_rate) + c = mask * c + (1 - mask) * torch.ones_like(c) * (self.n_classes - 1) + c = c.long() + c = self.embedding(c) + return c + + def get_unconditional_conditioning(self, bs, device="cuda"): + uc_class = self.n_classes - 1 # 1000 classes --> 0 ... 999, one extra class for ucg (class 1000) + uc = torch.ones((bs,), device=device) * uc_class + uc = {self.key: uc} + return uc + + +def disabled_train(self, mode=True): + """Overwrite model.train with this function to make sure train/eval mode + does not change anymore.""" + return self + + +class FrozenT5Embedder(AbstractEncoder): + """Uses the T5 transformer encoder for text""" + + def __init__(self, version="google/t5-v1_1-large", device="cuda", max_length=77, + freeze=True): # others are google/t5-v1_1-xl and google/t5-v1_1-xxl + super().__init__() + self.tokenizer = T5Tokenizer.from_pretrained(version) + self.transformer = T5EncoderModel.from_pretrained(version) + self.device = device + self.max_length = max_length # TODO: typical value? + if freeze: + self.freeze() + + def freeze(self): + self.transformer = self.transformer.eval() + # self.train = disabled_train + for param in self.parameters(): + param.requires_grad = False + + def forward(self, text): + batch_encoding = self.tokenizer(text, truncation=True, max_length=self.max_length, return_length=True, + return_overflowing_tokens=False, padding="max_length", return_tensors="pt") + tokens = batch_encoding["input_ids"].to(self.device) + outputs = self.transformer(input_ids=tokens) + + z = outputs.last_hidden_state + return z + + def encode(self, text): + return self(text) + + +class FrozenCLIPEmbedder(AbstractEncoder): + """Uses the CLIP transformer encoder for text (from huggingface)""" + LAYERS = [ + "last", + "pooled", + "hidden" + ] + + def __init__(self, version="openai/clip-vit-large-patch14", device="cuda", max_length=77, + freeze=True, layer="last", layer_idx=None): # clip-vit-base-patch32 + super().__init__() + assert layer in self.LAYERS + self.tokenizer = CLIPTokenizer.from_pretrained(version) + self.transformer = CLIPTextModel.from_pretrained(version) + self.device = device + self.max_length = max_length + if freeze: + self.freeze() + self.layer = layer + self.layer_idx = layer_idx + if layer == "hidden": + assert layer_idx is not None + assert 0 <= abs(layer_idx) <= 12 + + def freeze(self): + self.transformer = self.transformer.eval() + # self.train = disabled_train + for param in self.parameters(): + param.requires_grad = False + + def forward(self, text): + batch_encoding = self.tokenizer(text, truncation=True, max_length=self.max_length, return_length=True, + return_overflowing_tokens=False, padding="max_length", return_tensors="pt") + tokens = batch_encoding["input_ids"].to(self.device) + outputs = self.transformer(input_ids=tokens, output_hidden_states=self.layer == "hidden") + if self.layer == "last": + z = outputs.last_hidden_state + elif self.layer == "pooled": + z = outputs.pooler_output[:, None, :] + else: + z = outputs.hidden_states[self.layer_idx] + return z + + def encode(self, text): + return self(text) + + +class ClipImageEmbedder(nn.Module): + def __init__( + self, + model, + jit=False, + device='cuda' if torch.cuda.is_available() else 'cpu', + antialias=True, + ucg_rate=0. + ): + super().__init__() + from clip import load as load_clip + self.model, _ = load_clip(name=model, device=device, jit=jit) + + self.antialias = antialias + + self.register_buffer('mean', torch.Tensor([0.48145466, 0.4578275, 0.40821073]), persistent=False) + self.register_buffer('std', torch.Tensor([0.26862954, 0.26130258, 0.27577711]), persistent=False) + self.ucg_rate = ucg_rate + + def preprocess(self, x): + # normalize to [0,1] + x = kornia.geometry.resize(x, (224, 224), + interpolation='bicubic', align_corners=True, + antialias=self.antialias) + x = (x + 1.) / 2. + # re-normalize according to clip + x = kornia.enhance.normalize(x, self.mean, self.std) + return x + + def forward(self, x, no_dropout=False): + # x is assumed to be in range [-1,1] + out = self.model.encode_image(self.preprocess(x)) + out = out.to(x.dtype) + if self.ucg_rate > 0. and not no_dropout: + out = torch.bernoulli((1. - self.ucg_rate) * torch.ones(out.shape[0], device=out.device))[:, None] * out + return out + + +class FrozenOpenCLIPEmbedder(AbstractEncoder): + """ + Uses the OpenCLIP transformer encoder for text + """ + LAYERS = [ + # "pooled", + "last", + "penultimate" + ] + + def __init__(self, arch="ViT-H-14", version="laion2b_s32b_b79k", device="cuda", max_length=77, + freeze=True, layer="last"): + super().__init__() + assert layer in self.LAYERS + model, _, _ = open_clip.create_model_and_transforms(arch, device=torch.device('cpu'), pretrained=version) + del model.visual + self.model = model + + self.device = device + self.max_length = max_length + if freeze: + self.freeze() + self.layer = layer + if self.layer == "last": + self.layer_idx = 0 + elif self.layer == "penultimate": + self.layer_idx = 1 + else: + raise NotImplementedError() + + def freeze(self): + self.model = self.model.eval() + for param in self.parameters(): + param.requires_grad = False + + def forward(self, text): + tokens = open_clip.tokenize(text) ## all clip models use 77 as context length + z = self.encode_with_transformer(tokens.to(self.device)) + return z + + def encode_with_transformer(self, text): + x = self.model.token_embedding(text) # [batch_size, n_ctx, d_model] + x = x + self.model.positional_embedding + x = x.permute(1, 0, 2) # NLD -> LND + x = self.text_transformer_forward(x, attn_mask=self.model.attn_mask) + x = x.permute(1, 0, 2) # LND -> NLD + x = self.model.ln_final(x) + return x + + def text_transformer_forward(self, x: torch.Tensor, attn_mask=None): + for i, r in enumerate(self.model.transformer.resblocks): + if i == len(self.model.transformer.resblocks) - self.layer_idx: + break + if self.model.transformer.grad_checkpointing and not torch.jit.is_scripting(): + x = checkpoint(r, x, attn_mask) + else: + x = r(x, attn_mask=attn_mask) + return x + + def encode(self, text): + return self(text) + + +class FrozenOpenCLIPImageEmbedder(AbstractEncoder): + """ + Uses the OpenCLIP vision transformer encoder for images + """ + + def __init__(self, arch="ViT-H-14", version="laion2b_s32b_b79k", device="cuda", max_length=77, + freeze=True, layer="pooled", antialias=True, ucg_rate=0.): + super().__init__() + model, _, _ = open_clip.create_model_and_transforms(arch, device=torch.device('cpu'), + pretrained=version, ) + del model.transformer + self.model = model + # self.mapper = torch.nn.Linear(1280, 1024) + self.device = device + self.max_length = max_length + if freeze: + self.freeze() + self.layer = layer + if self.layer == "penultimate": + raise NotImplementedError() + self.layer_idx = 1 + + self.antialias = antialias + + self.register_buffer('mean', torch.Tensor([0.48145466, 0.4578275, 0.40821073]), persistent=False) + self.register_buffer('std', torch.Tensor([0.26862954, 0.26130258, 0.27577711]), persistent=False) + self.ucg_rate = ucg_rate + + def preprocess(self, x): + # normalize to [0,1] + x = kornia.geometry.resize(x, (224, 224), + interpolation='bicubic', align_corners=True, + antialias=self.antialias) + x = (x + 1.) / 2. + # renormalize according to clip + x = kornia.enhance.normalize(x, self.mean, self.std) + return x + + def freeze(self): + self.model = self.model.eval() + for param in self.model.parameters(): + param.requires_grad = False + + @autocast + def forward(self, image, no_dropout=False): + z = self.encode_with_vision_transformer(image) + if self.ucg_rate > 0. and not no_dropout: + z = torch.bernoulli((1. - self.ucg_rate) * torch.ones(z.shape[0], device=z.device))[:, None] * z + return z + + def encode_with_vision_transformer(self, img): + img = self.preprocess(img) + x = self.model.visual(img) + return x + + def encode(self, text): + return self(text) + +class FrozenOpenCLIPImageEmbedderV2(AbstractEncoder): + """ + Uses the OpenCLIP vision transformer encoder for images + """ + + def __init__(self, arch="ViT-H-14", version="laion2b_s32b_b79k", device="cuda", + freeze=True, layer="pooled", antialias=True): + super().__init__() + model, _, _ = open_clip.create_model_and_transforms(arch, device=torch.device('cpu'), + pretrained=version, ) + del model.transformer + self.model = model + self.device = device + + if freeze: + self.freeze() + self.layer = layer + if self.layer == "penultimate": + raise NotImplementedError() + self.layer_idx = 1 + + self.antialias = antialias + + self.register_buffer('mean', torch.Tensor([0.48145466, 0.4578275, 0.40821073]), persistent=False) + self.register_buffer('std', torch.Tensor([0.26862954, 0.26130258, 0.27577711]), persistent=False) + + + def preprocess(self, x): + # normalize to [0,1] + x = kornia.geometry.resize(x, (224, 224), + interpolation='bicubic', align_corners=True, + antialias=self.antialias) + x = (x + 1.) / 2. + # renormalize according to clip + x = kornia.enhance.normalize(x, self.mean, self.std) + return x + + def freeze(self): + self.model = self.model.eval() + for param in self.model.parameters(): + param.requires_grad = False + + def forward(self, image, no_dropout=False): + ## image: b c h w + z = self.encode_with_vision_transformer(image) + return z + + def encode_with_vision_transformer(self, x): + x = self.preprocess(x) + + # to patches - whether to use dual patchnorm - https://arxiv.org/abs/2302.01327v1 + if self.model.visual.input_patchnorm: + # einops - rearrange(x, 'b c (h p1) (w p2) -> b (h w) (c p1 p2)') + x = x.reshape(x.shape[0], x.shape[1], self.model.visual.grid_size[0], self.model.visual.patch_size[0], self.model.visual.grid_size[1], self.model.visual.patch_size[1]) + x = x.permute(0, 2, 4, 1, 3, 5) + x = x.reshape(x.shape[0], self.model.visual.grid_size[0] * self.model.visual.grid_size[1], -1) + x = self.model.visual.patchnorm_pre_ln(x) + x = self.model.visual.conv1(x) + else: + x = self.model.visual.conv1(x) # shape = [*, width, grid, grid] + x = x.reshape(x.shape[0], x.shape[1], -1) # shape = [*, width, grid ** 2] + x = x.permute(0, 2, 1) # shape = [*, grid ** 2, width] + + # class embeddings and positional embeddings + x = torch.cat( + [self.model.visual.class_embedding.to(x.dtype) + torch.zeros(x.shape[0], 1, x.shape[-1], dtype=x.dtype, device=x.device), + x], dim=1) # shape = [*, grid ** 2 + 1, width] + x = x + self.model.visual.positional_embedding.to(x.dtype) + + # a patch_dropout of 0. would mean it is disabled and this function would do nothing but return what was passed in + x = self.model.visual.patch_dropout(x) + x = self.model.visual.ln_pre(x) + + x = x.permute(1, 0, 2) # NLD -> LND + x = self.model.visual.transformer(x) + x = x.permute(1, 0, 2) # LND -> NLD + + return x + +class FrozenCLIPT5Encoder(AbstractEncoder): + def __init__(self, clip_version="openai/clip-vit-large-patch14", t5_version="google/t5-v1_1-xl", device="cuda", + clip_max_length=77, t5_max_length=77): + super().__init__() + self.clip_encoder = FrozenCLIPEmbedder(clip_version, device, max_length=clip_max_length) + self.t5_encoder = FrozenT5Embedder(t5_version, device, max_length=t5_max_length) + print(f"{self.clip_encoder.__class__.__name__} has {count_params(self.clip_encoder) * 1.e-6:.2f} M parameters, " + f"{self.t5_encoder.__class__.__name__} comes with {count_params(self.t5_encoder) * 1.e-6:.2f} M params.") + + def encode(self, text): + return self(text) + + def forward(self, text): + clip_z = self.clip_encoder.encode(text) + t5_z = self.t5_encoder.encode(text) + return [clip_z, t5_z] diff --git a/ComfyUI-Easy-Use/py/dynamiCrafter/lvdm/modules/encoders/resampler.py b/ComfyUI-Easy-Use/py/dynamiCrafter/lvdm/modules/encoders/resampler.py new file mode 100644 index 0000000000000000000000000000000000000000..0c30c58a9a4530f82bf245355fde564553cc7893 --- /dev/null +++ b/ComfyUI-Easy-Use/py/dynamiCrafter/lvdm/modules/encoders/resampler.py @@ -0,0 +1,145 @@ +# modified from https://github.com/mlfoundations/open_flamingo/blob/main/open_flamingo/src/helpers.py +# and https://github.com/lucidrains/imagen-pytorch/blob/main/imagen_pytorch/imagen_pytorch.py +# and https://github.com/tencent-ailab/IP-Adapter/blob/main/ip_adapter/resampler.py +import math +import torch +import torch.nn as nn + + +class ImageProjModel(nn.Module): + """Projection Model""" + def __init__(self, cross_attention_dim=1024, clip_embeddings_dim=1024, clip_extra_context_tokens=4): + super().__init__() + self.cross_attention_dim = cross_attention_dim + self.clip_extra_context_tokens = clip_extra_context_tokens + self.proj = nn.Linear(clip_embeddings_dim, self.clip_extra_context_tokens * cross_attention_dim) + self.norm = nn.LayerNorm(cross_attention_dim) + + def forward(self, image_embeds): + #embeds = image_embeds + embeds = image_embeds.type(list(self.proj.parameters())[0].dtype) + clip_extra_context_tokens = self.proj(embeds).reshape(-1, self.clip_extra_context_tokens, self.cross_attention_dim) + clip_extra_context_tokens = self.norm(clip_extra_context_tokens) + return clip_extra_context_tokens + + +# FFN +def FeedForward(dim, mult=4): + inner_dim = int(dim * mult) + return nn.Sequential( + nn.LayerNorm(dim), + nn.Linear(dim, inner_dim, bias=False), + nn.GELU(), + nn.Linear(inner_dim, dim, bias=False), + ) + + +def reshape_tensor(x, heads): + bs, length, width = x.shape + #(bs, length, width) --> (bs, length, n_heads, dim_per_head) + x = x.view(bs, length, heads, -1) + # (bs, length, n_heads, dim_per_head) --> (bs, n_heads, length, dim_per_head) + x = x.transpose(1, 2) + # (bs, n_heads, length, dim_per_head) --> (bs*n_heads, length, dim_per_head) + x = x.reshape(bs, heads, length, -1) + return x + + +class PerceiverAttention(nn.Module): + def __init__(self, *, dim, dim_head=64, heads=8): + super().__init__() + self.scale = dim_head**-0.5 + self.dim_head = dim_head + self.heads = heads + inner_dim = dim_head * heads + + self.norm1 = nn.LayerNorm(dim) + self.norm2 = nn.LayerNorm(dim) + + self.to_q = nn.Linear(dim, inner_dim, bias=False) + self.to_kv = nn.Linear(dim, inner_dim * 2, bias=False) + self.to_out = nn.Linear(inner_dim, dim, bias=False) + + + def forward(self, x, latents): + """ + Args: + x (torch.Tensor): image features + shape (b, n1, D) + latent (torch.Tensor): latent features + shape (b, n2, D) + """ + x = self.norm1(x) + latents = self.norm2(latents) + + b, l, _ = latents.shape + + q = self.to_q(latents) + kv_input = torch.cat((x, latents), dim=-2) + k, v = self.to_kv(kv_input).chunk(2, dim=-1) + + q = reshape_tensor(q, self.heads) + k = reshape_tensor(k, self.heads) + v = reshape_tensor(v, self.heads) + + # attention + scale = 1 / math.sqrt(math.sqrt(self.dim_head)) + weight = (q * scale) @ (k * scale).transpose(-2, -1) # More stable with f16 than dividing afterwards + weight = torch.softmax(weight.float(), dim=-1).type(weight.dtype) + out = weight @ v + + out = out.permute(0, 2, 1, 3).reshape(b, l, -1) + + return self.to_out(out) + + +class Resampler(nn.Module): + def __init__( + self, + dim=1024, + depth=8, + dim_head=64, + heads=16, + num_queries=8, + embedding_dim=768, + output_dim=1024, + ff_mult=4, + video_length=None, # using frame-wise version or not + ): + super().__init__() + ## queries for a single frame / image + self.num_queries = num_queries + self.video_length = video_length + + ## queries for each frame + if video_length is not None: + num_queries = num_queries * video_length + + self.latents = nn.Parameter(torch.randn(1, num_queries, dim) / dim**0.5) + self.proj_in = nn.Linear(embedding_dim, dim) + self.proj_out = nn.Linear(dim, output_dim) + self.norm_out = nn.LayerNorm(output_dim) + + self.layers = nn.ModuleList([]) + for _ in range(depth): + self.layers.append( + nn.ModuleList( + [ + PerceiverAttention(dim=dim, dim_head=dim_head, heads=heads), + FeedForward(dim=dim, mult=ff_mult), + ] + ) + ) + + def forward(self, x): + latents = self.latents.repeat(x.size(0), 1, 1) ## B (T L) C + x = self.proj_in(x) + + for attn, ff in self.layers: + latents = attn(x, latents) + latents + latents = ff(latents) + latents + + latents = self.proj_out(latents) + latents = self.norm_out(latents) # B L C or B (T L) C + + return latents \ No newline at end of file diff --git a/ComfyUI-Easy-Use/py/dynamiCrafter/lvdm/modules/networks/ae_modules.py b/ComfyUI-Easy-Use/py/dynamiCrafter/lvdm/modules/networks/ae_modules.py new file mode 100644 index 0000000000000000000000000000000000000000..35b681704362866ae431ccfc759b2cff55f866fe --- /dev/null +++ b/ComfyUI-Easy-Use/py/dynamiCrafter/lvdm/modules/networks/ae_modules.py @@ -0,0 +1,1026 @@ +# pytorch_diffusion + derived encoder decoder +import math +import torch +import numpy as np +import torch.nn as nn +from einops import rearrange +from utils.utils import instantiate_from_config +from ...modules.attention import LinearAttention + +import comfy.ops +ops = comfy.ops.disable_weight_init + +def nonlinearity(x): + # swish + return x*torch.sigmoid(x) + + +def Normalize(in_channels, num_groups=32, device=None, dtype=None, operations=ops): + return operations.GroupNorm( + num_groups=num_groups, + num_channels=in_channels, + eps=1e-6, + affine=True, + device=device, + dtype=dtype + ) + +class LinAttnBlock(LinearAttention): + """to match AttnBlock usage""" + def __init__(self, in_channels, device=None, dtype=None): + super().__init__(dim=in_channels, heads=1, dim_head=in_channels, device=device, dtype=dtype) + + +class AttnBlock(nn.Module): + def __init__(self, in_channels, device=None, dtype=None, operations=ops): + super().__init__() + self.in_channels = in_channels + + self.norm = Normalize(in_channels, device=device, dtype=dtype) + self.q = operations.Conv2d( + in_channels, + in_channels, + kernel_size=1, + stride=1, + padding=0, + device=device, + dtype=dtype + ) + self.k = operations.Conv2d( + in_channels, + in_channels, + kernel_size=1, + stride=1, + padding=0, + device=device, + dtype=dtype + ) + self.v = operations.Conv2d( + in_channels, + in_channels, + kernel_size=1, + stride=1, + padding=0, + device=device, + dtype=dtype + ) + self.proj_out = operations.Conv2d( + in_channels, + in_channels, + kernel_size=1, + stride=1, + padding=0, + device=device, + dtype=dtype + ) + + def forward(self, x): + h_ = x + h_ = self.norm(h_) + q = self.q(h_) + k = self.k(h_) + v = self.v(h_) + + # compute attention + b,c,h,w = q.shape + q = q.reshape(b,c,h*w) # bcl + q = q.permute(0,2,1) # bcl -> blc l=hw + k = k.reshape(b,c,h*w) # bcl + + w_ = torch.bmm(q,k) # b,hw,hw w[b,i,j]=sum_c q[b,i,c]k[b,c,j] + w_ = w_ * (int(c)**(-0.5)) + w_ = torch.nn.functional.softmax(w_, dim=2) + + # attend to values + v = v.reshape(b,c,h*w) + w_ = w_.permute(0,2,1) # b,hw,hw (first hw of k, second of q) + h_ = torch.bmm(v,w_) # b, c,hw (hw of q) h_[b,c,j] = sum_i v[b,c,i] w_[b,i,j] + h_ = h_.reshape(b,c,h,w) + + h_ = self.proj_out(h_) + + return x+h_ + +def make_attn(in_channels, attn_type="vanilla", device=None, dtype=None): + assert attn_type in ["vanilla", "linear", "none"], f'attn_type {attn_type} unknown' + #print(f"making attention of type '{attn_type}' with {in_channels} in_channels") + if attn_type == "vanilla": + return AttnBlock(in_channels, device=device, dtype=dtype) + elif attn_type == "none": + return nn.Identity(in_channels) + else: + return LinAttnBlock(in_channels, device=device, dtype=dtype) + +class Downsample(nn.Module): + def __init__(self, in_channels, with_conv, device=None, dtype=None, operations=ops): + super().__init__() + self.with_conv = with_conv + self.in_channels = in_channels + if self.with_conv: + # no asymmetric padding in torch conv, must do it ourselves + self.conv = operations.Conv2d( + in_channels, + in_channels, + kernel_size=3, + stride=2, + padding=0, + device=device, + dtype=dtype + ) + def forward(self, x): + if self.with_conv: + pad = (0,1,0,1) + x = torch.nn.functional.pad(x, pad, mode="constant", value=0) + x = self.conv(x) + else: + x = torch.nn.functional.avg_pool2d(x, kernel_size=2, stride=2) + return x + +class Upsample(nn.Module): + def __init__(self, in_channels, with_conv, device=None, dtype=None, operations=ops): + super().__init__() + self.with_conv = with_conv + self.in_channels = in_channels + if self.with_conv: + self.conv = operations.Conv2d( + in_channels, + in_channels, + kernel_size=3, + stride=1, + padding=1, + device=device, + dtype=dtype + ) + + def forward(self, x): + x = torch.nn.functional.interpolate(x, scale_factor=2.0, mode="nearest") + if self.with_conv: + x = self.conv(x) + return x + +def get_timestep_embedding(timesteps, embedding_dim): + """ + This matches the implementation in Denoising Diffusion Probabilistic Models: + From Fairseq. + Build sinusoidal embeddings. + This matches the implementation in tensor2tensor, but differs slightly + from the description in Section 3.5 of "Attention Is All You Need". + """ + assert len(timesteps.shape) == 1 + + half_dim = embedding_dim // 2 + emb = math.log(10000) / (half_dim - 1) + emb = torch.exp(torch.arange(half_dim, dtype=torch.float32) * -emb) + emb = emb.to(device=timesteps.device) + emb = timesteps.float()[:, None] * emb[None, :] + emb = torch.cat([torch.sin(emb), torch.cos(emb)], dim=1) + if embedding_dim % 2 == 1: # zero pad + emb = torch.nn.functional.pad(emb, (0,1,0,0)) + return emb + + + +class ResnetBlock(nn.Module): + def __init__( + self, + *, + in_channels, + out_channels=None, + conv_shortcut=False, + dropout, + temb_channels=512, + device=None, + dtype=None, + operations=ops + ): + super().__init__() + self.in_channels = in_channels + out_channels = in_channels if out_channels is None else out_channels + self.out_channels = out_channels + self.use_conv_shortcut = conv_shortcut + + self.norm1 = Normalize(in_channels, device=device, dtype=dtype) + self.conv1 = operations.Conv2d( + in_channels, + out_channels, + kernel_size=3, + stride=1, + padding=1, + device=device, + dtype=dtype + ) + if temb_channels > 0: + self.temb_proj = operations.Linear( + temb_channels, + out_channels, + device=device, + dtype=dtype + ) + self.norm2 = Normalize(out_channels, device=device, dtype=dtype) + self.dropout = torch.nn.Dropout(dropout) + self.conv2 = operations.Conv2d( + out_channels, + out_channels, + kernel_size=3, + stride=1, + padding=1, + device=device, + dtype=dtype + ) + if self.in_channels != self.out_channels: + if self.use_conv_shortcut: + self.conv_shortcut = operations.Conv2d( + in_channels, + out_channels, + kernel_size=3, + stride=1, + padding=1, + device=device, + dtype=device + ) + else: + self.nin_shortcut = operations.Conv2d( + in_channels, + out_channels, + kernel_size=1, + stride=1, + padding=0, + device=device, + dtype=dtype + ) + + def forward(self, x, temb): + h = x + h = self.norm1(h) + h = nonlinearity(h) + h = self.conv1(h) + + if temb is not None: + h = h + self.temb_proj(nonlinearity(temb))[:,:,None,None] + + h = self.norm2(h) + h = nonlinearity(h) + h = self.dropout(h) + h = self.conv2(h) + + if self.in_channels != self.out_channels: + if self.use_conv_shortcut: + x = self.conv_shortcut(x) + else: + x = self.nin_shortcut(x) + + return x+h + +class Model(nn.Module): + def __init__( + self, + *, + ch, + out_ch, + ch_mult=(1,2,4,8), + num_res_blocks, + attn_resolutions, + dropout=0.0, + resamp_with_conv=True, + in_channels, + resolution, + use_timestep=True, + use_linear_attn=False, + attn_type="vanilla", + device=None, + dtype=None, + operations=ops + ): + super().__init__() + if use_linear_attn: attn_type = "linear" + self.ch = ch + self.temb_ch = self.ch*4 + self.num_resolutions = len(ch_mult) + self.num_res_blocks = num_res_blocks + self.resolution = resolution + self.in_channels = in_channels + + self.use_timestep = use_timestep + if self.use_timestep: + # timestep embedding + self.temb = nn.Module() + self.temb.dense = nn.ModuleList([ + operations.Linear( + self.ch, + self.temb_ch, + device=device, + dtype=dtype + ), + operations.Linear( + self.temb_ch, + self.temb_ch, + device=device, + dtype=dtype + ), + ]) + + # downsampling + self.conv_in = operations.Conv2d( + in_channels, + self.ch, + kernel_size=3, + stride=1, + padding=1, + device=device, + dtype=dtype + ) + + curr_res = resolution + in_ch_mult = (1,)+tuple(ch_mult) + self.down = nn.ModuleList() + for i_level in range(self.num_resolutions): + block = nn.ModuleList() + attn = nn.ModuleList() + block_in = ch*in_ch_mult[i_level] + block_out = ch*ch_mult[i_level] + for i_block in range(self.num_res_blocks): + block.append(ResnetBlock(in_channels=block_in, + out_channels=block_out, + temb_channels=self.temb_ch, + dropout=dropout, + device=device, + dtype=dtype)) + block_in = block_out + if curr_res in attn_resolutions: + attn.append(make_attn(block_in, attn_type=attn_type, device=device, dtype=dtype)) + down = nn.Module() + down.block = block + down.attn = attn + if i_level != self.num_resolutions-1: + down.downsample = Downsample(block_in, resamp_with_conv, device=device, dtype=dtype) + curr_res = curr_res // 2 + self.down.append(down) + + # middle + self.mid = nn.Module() + self.mid.block_1 = ResnetBlock(in_channels=block_in, + out_channels=block_in, + temb_channels=self.temb_ch, + dropout=dropout, + device=device, + dtype=dtype) + self.mid.attn_1 = make_attn(block_in, attn_type=attn_type, device=device, dtype=dtype) + self.mid.block_2 = ResnetBlock(in_channels=block_in, + out_channels=block_in, + temb_channels=self.temb_ch, + dropout=dropout, + device=device, + dtype=dtype) + + # upsampling + self.up = nn.ModuleList() + for i_level in reversed(range(self.num_resolutions)): + block = nn.ModuleList() + attn = nn.ModuleList() + block_out = ch*ch_mult[i_level] + skip_in = ch*ch_mult[i_level] + for i_block in range(self.num_res_blocks+1): + if i_block == self.num_res_blocks: + skip_in = ch*in_ch_mult[i_level] + block.append(ResnetBlock(in_channels=block_in+skip_in, + out_channels=block_out, + temb_channels=self.temb_ch, + dropout=dropout, + device=device, + dtype=dtype)) + block_in = block_out + if curr_res in attn_resolutions: + attn.append(make_attn(block_in, attn_type=attn_type, device=device, dtype=dtype)) + up = nn.Module() + up.block = block + up.attn = attn + if i_level != 0: + up.upsample = Upsample(block_in, resamp_with_conv, device=device, dtype=dtype) + curr_res = curr_res * 2 + self.up.insert(0, up) # prepend to get consistent order + + # end + self.norm_out = Normalize(block_in, device=device, dtype=device) + self.conv_out = torch.nn.Conv2d(block_in, + out_ch, + kernel_size=3, + stride=1, + padding=1, + device=device, + dtype=dtype) + + def forward(self, x, t=None, context=None): + #assert x.shape[2] == x.shape[3] == self.resolution + if context is not None: + # assume aligned context, cat along channel axis + x = torch.cat((x, context), dim=1) + if self.use_timestep: + # timestep embedding + assert t is not None + temb = get_timestep_embedding(t, self.ch) + temb = self.temb.dense[0](temb) + temb = nonlinearity(temb) + temb = self.temb.dense[1](temb) + else: + temb = None + + # downsampling + hs = [self.conv_in(x)] + for i_level in range(self.num_resolutions): + for i_block in range(self.num_res_blocks): + h = self.down[i_level].block[i_block](hs[-1], temb) + if len(self.down[i_level].attn) > 0: + h = self.down[i_level].attn[i_block](h) + hs.append(h) + if i_level != self.num_resolutions-1: + hs.append(self.down[i_level].downsample(hs[-1])) + + # middle + h = hs[-1] + h = self.mid.block_1(h, temb) + h = self.mid.attn_1(h) + h = self.mid.block_2(h, temb) + + # upsampling + for i_level in reversed(range(self.num_resolutions)): + for i_block in range(self.num_res_blocks+1): + h = self.up[i_level].block[i_block]( + torch.cat([h, hs.pop()], dim=1), temb) + if len(self.up[i_level].attn) > 0: + h = self.up[i_level].attn[i_block](h) + if i_level != 0: + h = self.up[i_level].upsample(h) + + # end + h = self.norm_out(h) + h = nonlinearity(h) + h = self.conv_out(h) + return h + + def get_last_layer(self): + return self.conv_out.weight + + +class Encoder(nn.Module): + def __init__( + self, + *, + ch, + out_ch, + ch_mult=(1,2,4,8), + num_res_blocks, + attn_resolutions, + dropout=0.0, + resamp_with_conv=True, + in_channels, + resolution, + z_channels, + double_z=True, + use_linear_attn=False, + attn_type="vanilla", + device=None, + dtype=None, + operations=ops, + **ignore_kwargs + ): + super().__init__() + if use_linear_attn: attn_type = "linear" + self.ch = ch + self.temb_ch = 0 + self.num_resolutions = len(ch_mult) + self.num_res_blocks = num_res_blocks + self.resolution = resolution + self.in_channels = in_channels + + # downsampling + self.conv_in = operations.Conv2d(in_channels, + self.ch, + kernel_size=3, + stride=1, + padding=1, + device=device, + dtype=dtype) + + curr_res = resolution + in_ch_mult = (1,)+tuple(ch_mult) + self.in_ch_mult = in_ch_mult + self.down = nn.ModuleList() + for i_level in range(self.num_resolutions): + block = nn.ModuleList() + attn = nn.ModuleList() + block_in = ch*in_ch_mult[i_level] + block_out = ch*ch_mult[i_level] + for i_block in range(self.num_res_blocks): + block.append(ResnetBlock(in_channels=block_in, + out_channels=block_out, + temb_channels=self.temb_ch, + dropout=dropout, + device=device, + dtype=dtype)) + block_in = block_out + if curr_res in attn_resolutions: + attn.append(make_attn(block_in, attn_type=attn_type, device=device, dtype=dtype)) + down = nn.Module() + down.block = block + down.attn = attn + if i_level != self.num_resolutions-1: + down.downsample = Downsample(block_in, resamp_with_conv, device=device, dtype=dtype) + curr_res = curr_res // 2 + self.down.append(down) + + # middle + self.mid = nn.Module() + self.mid.block_1 = ResnetBlock(in_channels=block_in, + out_channels=block_in, + temb_channels=self.temb_ch, + dropout=dropout, + device=device, + dtype=dtype) + self.mid.attn_1 = make_attn(block_in, attn_type=attn_type, device=device, dtype=dtype) + self.mid.block_2 = ResnetBlock(in_channels=block_in, + out_channels=block_in, + temb_channels=self.temb_ch, + dropout=dropout, + device=device, + dtype=dtype) + + # end + self.norm_out = Normalize(block_in, device=device, dtype=dtype) + self.conv_out = operations.Conv2d(block_in, + 2*z_channels if double_z else z_channels, + kernel_size=3, + stride=1, + padding=1, + device=device, + dtype=dtype) + + def forward(self, x): + # timestep embedding + temb = None + + # print(f'encoder-input={x.shape}') + # downsampling + hs = [self.conv_in(x)] + # print(f'encoder-conv in feat={hs[0].shape}') + for i_level in range(self.num_resolutions): + for i_block in range(self.num_res_blocks): + h = self.down[i_level].block[i_block](hs[-1], temb) + # print(f'encoder-down feat={h.shape}') + if len(self.down[i_level].attn) > 0: + h = self.down[i_level].attn[i_block](h) + hs.append(h) + if i_level != self.num_resolutions-1: + # print(f'encoder-downsample (input)={hs[-1].shape}') + hs.append(self.down[i_level].downsample(hs[-1])) + # print(f'encoder-downsample (output)={hs[-1].shape}') + + # middle + h = hs[-1] + h = self.mid.block_1(h, temb) + # print(f'encoder-mid1 feat={h.shape}') + h = self.mid.attn_1(h) + h = self.mid.block_2(h, temb) + # print(f'encoder-mid2 feat={h.shape}') + + # end + h = self.norm_out(h) + h = nonlinearity(h) + h = self.conv_out(h) + # print(f'end feat={h.shape}') + return h + + +class Decoder(nn.Module): + def __init__( + self, + *, + ch, + out_ch, + ch_mult=(1,2,4,8), + num_res_blocks, + attn_resolutions, + dropout=0.0, + resamp_with_conv=True, + in_channels, + resolution, + z_channels, + give_pre_end=False, + tanh_out=False, + use_linear_attn=False, + attn_type="vanilla", + device=None, + dtype=None, + operations=ops, + **ignorekwargs + ): + super().__init__() + if use_linear_attn: attn_type = "linear" + self.ch = ch + self.temb_ch = 0 + self.num_resolutions = len(ch_mult) + self.num_res_blocks = num_res_blocks + self.resolution = resolution + self.in_channels = in_channels + self.give_pre_end = give_pre_end + self.tanh_out = tanh_out + + # compute in_ch_mult, block_in and curr_res at lowest res + in_ch_mult = (1,)+tuple(ch_mult) + block_in = ch*ch_mult[self.num_resolutions-1] + curr_res = resolution // 2**(self.num_resolutions-1) + self.z_shape = (1,z_channels,curr_res,curr_res) + print("AE working on z of shape {} = {} dimensions.".format( + self.z_shape, np.prod(self.z_shape))) + + # z to block_in + self.conv_in = torch.nn.Conv2d(z_channels, + block_in, + kernel_size=3, + stride=1, + padding=1, + device=device, + dtype=dtype) + + # middle + self.mid = nn.Module() + self.mid.block_1 = ResnetBlock(in_channels=block_in, + out_channels=block_in, + temb_channels=self.temb_ch, + dropout=dropout, + device=device, + dtype=dtype) + self.mid.attn_1 = make_attn(block_in, attn_type=attn_type, device=device, dtype=dtype) + self.mid.block_2 = ResnetBlock(in_channels=block_in, + out_channels=block_in, + temb_channels=self.temb_ch, + dropout=dropout, + device=device, + dtype=dtype) + + # upsampling + self.up = nn.ModuleList() + for i_level in reversed(range(self.num_resolutions)): + block = nn.ModuleList() + attn = nn.ModuleList() + block_out = ch*ch_mult[i_level] + for i_block in range(self.num_res_blocks+1): + block.append(ResnetBlock(in_channels=block_in, + out_channels=block_out, + temb_channels=self.temb_ch, + dropout=dropout, + device=device, + dtype=dtype)) + block_in = block_out + if curr_res in attn_resolutions: + attn.append(make_attn(block_in, attn_type=attn_type, device=device, dtype=dtype)) + up = nn.Module() + up.block = block + up.attn = attn + if i_level != 0: + up.upsample = Upsample(block_in, resamp_with_conv, device=device, dtype=dtype) + curr_res = curr_res * 2 + self.up.insert(0, up) # prepend to get consistent order + + # end + self.norm_out = Normalize(block_in, device=device, dtype=dtype) + self.conv_out = operations.Conv2d(block_in, + out_ch, + kernel_size=3, + stride=1, + padding=1, + device=device, + dtype=dtype) + + def forward(self, z): + #assert z.shape[1:] == self.z_shape[1:] + self.last_z_shape = z.shape + + # print(f'decoder-input={z.shape}') + # timestep embedding + temb = None + + # z to block_in + h = self.conv_in(z) + # print(f'decoder-conv in feat={h.shape}') + + # middle + h = self.mid.block_1(h, temb) + h = self.mid.attn_1(h) + h = self.mid.block_2(h, temb) + # print(f'decoder-mid feat={h.shape}') + + # upsampling + for i_level in reversed(range(self.num_resolutions)): + for i_block in range(self.num_res_blocks+1): + h = self.up[i_level].block[i_block](h, temb) + if len(self.up[i_level].attn) > 0: + h = self.up[i_level].attn[i_block](h) + # print(f'decoder-up feat={h.shape}') + if i_level != 0: + h = self.up[i_level].upsample(h) + # print(f'decoder-upsample feat={h.shape}') + + # end + if self.give_pre_end: + return h + + h = self.norm_out(h) + h = nonlinearity(h) + h = self.conv_out(h) + # print(f'decoder-conv_out feat={h.shape}') + if self.tanh_out: + h = torch.tanh(h) + return h + + +class SimpleDecoder(nn.Module): + def __init__(self, in_channels, out_channels, device=None, dtype=None, operations=ops, *args, **kwargs): + super().__init__() + self.model = nn.ModuleList([nn.Conv2d(in_channels, in_channels, 1), + ResnetBlock(in_channels=in_channels, + out_channels=2 * in_channels, + temb_channels=0, dropout=0.0, + device=device, + dtype=dtype), + ResnetBlock(in_channels=2 * in_channels, + out_channels=4 * in_channels, + temb_channels=0, dropout=0.0, + device=device, + dtype=dtype), + ResnetBlock(in_channels=4 * in_channels, + out_channels=2 * in_channels, + temb_channels=0, dropout=0.0, + device=device, + dtype=dtype), + operations.Conv2d(2*in_channels, in_channels, 1), + Upsample(in_channels, with_conv=True, device=device, dtype=dtype)]) + # end + self.norm_out = Normalize(in_channels, device=device, dtype=dtype) + self.conv_out = torch.nn.Conv2d(in_channels, + out_channels, + kernel_size=3, + stride=1, + padding=1, + device=device, + dtype=dtype) + + def forward(self, x): + for i, layer in enumerate(self.model): + if i in [1,2,3]: + x = layer(x, None) + else: + x = layer(x) + + h = self.norm_out(x) + h = nonlinearity(h) + x = self.conv_out(h) + return x + + +class UpsampleDecoder(nn.Module): + def __init__(self, in_channels, out_channels, ch, num_res_blocks, resolution, + ch_mult=(2,2), dropout=0.0, device=None, dtype=None, operations=ops): + super().__init__() + # upsampling + self.temb_ch = 0 + self.num_resolutions = len(ch_mult) + self.num_res_blocks = num_res_blocks + block_in = in_channels + curr_res = resolution // 2 ** (self.num_resolutions - 1) + self.res_blocks = nn.ModuleList() + self.upsample_blocks = nn.ModuleList() + for i_level in range(self.num_resolutions): + res_block = [] + block_out = ch * ch_mult[i_level] + for i_block in range(self.num_res_blocks + 1): + res_block.append(ResnetBlock(in_channels=block_in, + out_channels=block_out, + temb_channels=self.temb_ch, + dropout=dropout, + device=device, + dtype=dtype)) + block_in = block_out + self.res_blocks.append(nn.ModuleList(res_block)) + if i_level != self.num_resolutions - 1: + self.upsample_blocks.append(Upsample(block_in, True, device=device, dtype=dtype)) + curr_res = curr_res * 2 + + # end + self.norm_out = Normalize(block_in, device=device, dtype=dtype) + self.conv_out = torch.nn.Conv2d(block_in, + out_channels, + kernel_size=3, + stride=1, + padding=1, + device=device, + dtype=dtype) + + def forward(self, x): + # upsampling + h = x + for k, i_level in enumerate(range(self.num_resolutions)): + for i_block in range(self.num_res_blocks + 1): + h = self.res_blocks[i_level][i_block](h, None) + if i_level != self.num_resolutions - 1: + h = self.upsample_blocks[k](h) + h = self.norm_out(h) + h = nonlinearity(h) + h = self.conv_out(h) + return h + + +class LatentRescaler(nn.Module): + def __init__(self, factor, in_channels, mid_channels, out_channels, depth=2, device=None, dtype=None, operations=ops): + super().__init__() + # residual block, interpolate, residual block + self.factor = factor + self.conv_in = operations.Conv2d(in_channels, + mid_channels, + kernel_size=3, + stride=1, + padding=1, + device=device, + dtype=dtype) + self.res_block1 = nn.ModuleList([ResnetBlock(in_channels=mid_channels, + out_channels=mid_channels, + temb_channels=0, + dropout=0.0, + device=device, + dtype=dtype) for _ in range(depth)]) + self.attn = AttnBlock(mid_channels, device=device, dtype=dtype) + self.res_block2 = nn.ModuleList([ResnetBlock(in_channels=mid_channels, + out_channels=mid_channels, + temb_channels=0, + dropout=0.0, + device=device, + dtype=dtype) for _ in range(depth)]) + + self.conv_out = operations.Conv2d(mid_channels, + out_channels, + kernel_size=1, + device=device, + dtype=dtype + ) + + def forward(self, x): + x = self.conv_in(x) + for block in self.res_block1: + x = block(x, None) + x = torch.nn.functional.interpolate(x, size=(int(round(x.shape[2]*self.factor)), int(round(x.shape[3]*self.factor)))) + x = self.attn(x) + for block in self.res_block2: + x = block(x, None) + x = self.conv_out(x) + return x + + +class MergedRescaleEncoder(nn.Module): + def __init__(self, in_channels, ch, resolution, out_ch, num_res_blocks, + attn_resolutions, dropout=0.0, resamp_with_conv=True, + ch_mult=(1,2,4,8), rescale_factor=1.0, rescale_module_depth=1, device=None, dtype=None, operations=ops): + super().__init__() + intermediate_chn = ch * ch_mult[-1] + self.encoder = Encoder(in_channels=in_channels, num_res_blocks=num_res_blocks, ch=ch, ch_mult=ch_mult, + z_channels=intermediate_chn, double_z=False, resolution=resolution, + attn_resolutions=attn_resolutions, dropout=dropout, resamp_with_conv=resamp_with_conv, + out_ch=None, device=device, dtype=dtype) + self.rescaler = LatentRescaler(factor=rescale_factor, in_channels=intermediate_chn, + mid_channels=intermediate_chn, out_channels=out_ch, depth=rescale_module_depth, + device=device, dtype=dtype) + + def forward(self, x): + x = self.encoder(x) + x = self.rescaler(x) + return x + + +class MergedRescaleDecoder(nn.Module): + def __init__(self, z_channels, out_ch, resolution, num_res_blocks, attn_resolutions, ch, ch_mult=(1,2,4,8), + dropout=0.0, resamp_with_conv=True, rescale_factor=1.0, rescale_module_depth=1, + device=None, dtype=None, operations=ops): + super().__init__() + tmp_chn = z_channels*ch_mult[-1] + self.decoder = Decoder(out_ch=out_ch, z_channels=tmp_chn, attn_resolutions=attn_resolutions, dropout=dropout, + resamp_with_conv=resamp_with_conv, in_channels=None, num_res_blocks=num_res_blocks, + ch_mult=ch_mult, resolution=resolution, ch=ch, device=device, operations=ops) + self.rescaler = LatentRescaler(factor=rescale_factor, in_channels=z_channels, mid_channels=tmp_chn, + out_channels=tmp_chn, depth=rescale_module_depth, device=device, operations=ops) + + def forward(self, x): + x = self.rescaler(x) + x = self.decoder(x) + return x + + +class Upsampler(nn.Module): + def __init__(self, in_size, out_size, in_channels, out_channels, ch_mult=2, device=None, dtype=None, operations=ops): + super().__init__() + assert out_size >= in_size + num_blocks = int(np.log2(out_size//in_size))+1 + factor_up = 1.+ (out_size % in_size) + print(f"Building {self.__class__.__name__} with in_size: {in_size} --> out_size {out_size} and factor {factor_up}") + self.rescaler = LatentRescaler(factor=factor_up, in_channels=in_channels, mid_channels=2*in_channels, + out_channels=in_channels, device=device, dtype=dtype) + self.decoder = Decoder(out_ch=out_channels, resolution=out_size, z_channels=in_channels, num_res_blocks=2, + attn_resolutions=[], in_channels=None, ch=in_channels, device=device, dtype=dtype, + ch_mult=[ch_mult for _ in range(num_blocks)]) + + def forward(self, x): + x = self.rescaler(x) + x = self.decoder(x) + return x + + +class Resize(nn.Module): + def __init__(self, in_channels=None, learned=False, mode="bilinear", device=None, dtype=None, operations=ops): + super().__init__() + self.with_conv = learned + self.mode = mode + if self.with_conv: + print(f"Note: {self.__class__.__name} uses learned downsampling and will ignore the fixed {mode} mode") + raise NotImplementedError() + assert in_channels is not None + # no asymmetric padding in torch conv, must do it ourselves + self.conv = operations.Conv2d(in_channels, + in_channels, + kernel_size=4, + stride=2, + padding=1, + device=device, + dtype=dtype) + + def forward(self, x, scale_factor=1.0): + if scale_factor==1.0: + return x + else: + x = torch.nn.functional.interpolate(x, mode=self.mode, align_corners=False, scale_factor=scale_factor) + return x + +class FirstStagePostProcessor(nn.Module): + + def __init__(self, ch_mult:list, in_channels, + pretrained_model:nn.Module=None, + reshape=False, + n_channels=None, + dropout=0., + pretrained_config=None, + device=None, + dtype=None, + operations=ops): + super().__init__() + if pretrained_config is None: + assert pretrained_model is not None, 'Either "pretrained_model" or "pretrained_config" must not be None' + self.pretrained_model = pretrained_model + else: + assert pretrained_config is not None, 'Either "pretrained_model" or "pretrained_config" must not be None' + self.instantiate_pretrained(pretrained_config) + + self.do_reshape = reshape + + if n_channels is None: + n_channels = self.pretrained_model.encoder.ch + + self.proj_norm = Normalize(in_channels,num_groups=in_channels//2, device=device, dtype=dtype) + self.proj = nn.Conv2d(in_channels,n_channels,kernel_size=3, + stride=1,padding=1, device=device, dtype=dtype) + + blocks = [] + downs = [] + ch_in = n_channels + for m in ch_mult: + blocks.append(ResnetBlock(in_channels=ch_in,out_channels=m*n_channels,dropout=dropout, device=device, dtype=dtype)) + ch_in = m * n_channels + downs.append(Downsample(ch_in, with_conv=False, device=device, dtype=dtype)) + + self.model = nn.ModuleList(blocks) + self.downsampler = nn.ModuleList(downs) + + + def instantiate_pretrained(self, config): + model = instantiate_from_config(config) + self.pretrained_model = model.eval() + # self.pretrained_model.train = False + for param in self.pretrained_model.parameters(): + param.requires_grad = False + + + @torch.no_grad() + def encode_with_pretrained(self,x): + c = self.pretrained_model.encode(x) + if isinstance(c, DiagonalGaussianDistribution): + c = c.mode() + return c + + def forward(self,x): + z_fs = self.encode_with_pretrained(x) + z = self.proj_norm(z_fs) + z = self.proj(z) + z = nonlinearity(z) + + for submodel, downmodel in zip(self.model,self.downsampler): + z = submodel(z,temb=None) + z = downmodel(z) + + if self.do_reshape: + z = rearrange(z,'b c h w -> b (h w) c') + return z \ No newline at end of file diff --git a/ComfyUI-Easy-Use/py/dynamiCrafter/lvdm/modules/networks/openaimodel3d.py b/ComfyUI-Easy-Use/py/dynamiCrafter/lvdm/modules/networks/openaimodel3d.py new file mode 100644 index 0000000000000000000000000000000000000000..cd4e1c6dc1af8d3028a8f6948086a8d5db3e63d3 --- /dev/null +++ b/ComfyUI-Easy-Use/py/dynamiCrafter/lvdm/modules/networks/openaimodel3d.py @@ -0,0 +1,822 @@ +from functools import partial +from abc import abstractmethod +import torch +import torch.nn as nn +from einops import rearrange +import torch.nn.functional as F +from ...models.utils_diffusion import timestep_embedding +from ...common import checkpoint +from ...basics import ( + zero_module, + conv_nd, + linear, + avg_pool_nd, + normalization +) +from ...modules.attention import SpatialTransformer, TemporalTransformer +import comfy.ops +import logging + +ops = comfy.ops.disable_weight_init + +class TimestepBlock(nn.Module): + """ + Any module where forward() takes timestep embeddings as a second argument. + """ + @abstractmethod + def forward(self, x, emb): + """ + Apply the module to `x` given `emb` timestep embeddings. + """ + +#This is needed because accelerate makes a copy of transformer_options which breaks "transformer_index" +def forward_timestep_embed(ts, x, emb, context=None, batch_size=None, transformer_options={}): + for layer in ts: + if isinstance(layer, TimestepBlock): + x = layer(x, emb, batch_size=batch_size) + elif isinstance(layer, SpatialTransformer): + x = layer(x, context) + if "transformer_index" in transformer_options: + transformer_options["transformer_index"] += 1 + elif isinstance(layer, TemporalTransformer): + x = rearrange(x, '(b f) c h w -> b c f h w', b=batch_size) + x = layer(x, context) + if "transformer_index" in transformer_options: + transformer_options["transformer_index"] += 1 + x = rearrange(x, 'b c f h w -> (b f) c h w') + else: + x = layer(x) + return x + +class TimestepEmbedSequential(nn.Sequential, TimestepBlock): + """ + A sequential module that passes timestep embeddings to the children that + support it as an extra input. + """ + + def forward(self, *args, **kwargs): + return forward_timestep_embed(self, *args, **kwargs) + +class Downsample(nn.Module): + """ + A downsampling layer with an optional convolution. + :param channels: channels in the inputs and outputs. + :param use_conv: a bool determining if a convolution is applied. + :param dims: determines if the signal is 1D, 2D, or 3D. If 3D, then + downsampling occurs in the inner-two dimensions. + """ + + def __init__(self, channels, use_conv, dims=2, out_channels=None, padding=1, dtype=None, device=None, operations=ops): + super().__init__() + self.channels = channels + self.out_channels = out_channels or channels + self.use_conv = use_conv + self.dims = dims + stride = 2 if dims != 3 else (1, 2, 2) + if use_conv: + self.op = operations.conv_nd( + dims, self.channels, self.out_channels, 3, stride=stride, padding=padding + ) + else: + assert self.channels == self.out_channels + self.op = avg_pool_nd(dims, kernel_size=stride, stride=stride) + + def forward(self, x): + assert x.shape[1] == self.channels + return self.op(x) + +class Upsample(nn.Module): + """ + An upsampling layer with an optional convolution. + :param channels: channels in the inputs and outputs. + :param use_conv: a bool determining if a convolution is applied. + :param dims: determines if the signal is 1D, 2D, or 3D. If 3D, then + upsampling occurs in the inner-two dimensions. + """ + + def __init__(self, channels, use_conv, dims=2, out_channels=None, padding=1, dtype=None, device=None, operations=ops): + super().__init__() + self.channels = channels + self.out_channels = out_channels or channels + self.use_conv = use_conv + self.dims = dims + if use_conv: + self.conv = operations.conv_nd(dims, self.channels, self.out_channels, 3, padding=padding, dtype=dtype, device=device) + + def forward(self, x): + assert x.shape[1] == self.channels + if self.dims == 3: + x = F.interpolate(x, (x.shape[2], x.shape[3] * 2, x.shape[4] * 2), mode='nearest') + else: + x = F.interpolate(x, scale_factor=2, mode='nearest') + if self.use_conv: + x = self.conv(x) + return x + +class ResBlock(TimestepBlock): + """ + A residual block that can optionally change the number of channels. + :param channels: the number of input channels. + :param emb_channels: the number of timestep embedding channels. + :param dropout: the rate of dropout. + :param out_channels: if specified, the number of out channels. + :param use_conv: if True and out_channels is specified, use a spatial + convolution instead of a smaller 1x1 convolution to change the + channels in the skip connection. + :param dims: determines if the signal is 1D, 2D, or 3D. + :param up: if True, use this block for upsampling. + :param down: if True, use this block for downsampling. + :param use_temporal_conv: if True, use the temporal convolution. + :param use_image_dataset: if True, the temporal parameters will not be optimized. + """ + + def __init__( + self, + channels, + emb_channels, + dropout, + out_channels=None, + use_scale_shift_norm=False, + dims=2, + use_checkpoint=False, + use_conv=False, + up=False, + down=False, + kernel_size=3, + use_temporal_conv=False, + tempspatial_aware=False, + dtype=None, + device=None, + operations=ops + ): + super().__init__() + self.channels = channels + self.emb_channels = emb_channels + self.dropout = dropout + self.out_channels = out_channels or channels + self.use_conv = use_conv + self.use_checkpoint = use_checkpoint + self.use_scale_shift_norm = use_scale_shift_norm + self.use_temporal_conv = use_temporal_conv + + if isinstance(kernel_size, list): + padding =[k // 2 for k in kernel_size] + else: + padding = kernel_size // 2 + + # operations used in normalization function + self.in_layers = nn.Sequential( + normalization(channels, dtype=dtype, device=device), + nn.SiLU(), + operations.conv_nd(dims, channels, self.out_channels, 3, padding=1, dtype=dtype, device=device), + ) + + self.updown = up or down + + if up: + self.h_upd = Upsample(channels, False, dims, dtype=dtype, device=device) + self.x_upd = Upsample(channels, False, dims, dtype=dtype, device=device) + elif down: + self.h_upd = Downsample(channels, False, dims, dtype=dtype, device=device) + self.x_upd = Downsample(channels, False, dims, dtype=dtype, device=device) + else: + self.h_upd = self.x_upd = nn.Identity() + + self.emb_layers = nn.Sequential( + nn.SiLU(), + operations.Linear( + emb_channels, + 2 * self.out_channels if use_scale_shift_norm else self.out_channels, + dtype=dtype, + device=device + ), + ) + self.out_layers = nn.Sequential( + normalization(self.out_channels, dtype=dtype, device=device), + nn.SiLU(), + nn.Dropout(p=dropout), + zero_module(operations.Conv2d(self.out_channels, self.out_channels, 3, padding=1, dtype=dtype, device=device)), + ) + + if self.out_channels == channels: + self.skip_connection = nn.Identity() + elif use_conv: + self.skip_connection = operations.conv_nd(dims, channels, self.out_channels, 3, padding=1, dtype=dtype, device=device) + else: + self.skip_connection = operations.conv_nd(dims, channels, self.out_channels, 1, dtype=dtype, device=device) + + if self.use_temporal_conv: + self.temopral_conv = TemporalConvBlock( + self.out_channels, + self.out_channels, + dropout=0.1, + spatial_aware=tempspatial_aware, + dtype=dtype, + device=device + ) + + def forward(self, x, emb, batch_size=None): + """ + Apply the block to a Tensor, conditioned on a timestep embedding. + :param x: an [N x C x ...] Tensor of features. + :param emb: an [N x emb_channels] Tensor of timestep embeddings. + :return: an [N x C x ...] Tensor of outputs. + """ + input_tuple = (x, emb) + if batch_size: + forward_batchsize = partial(self._forward, batch_size=batch_size) + return checkpoint(forward_batchsize, input_tuple, self.parameters(), self.use_checkpoint) + return checkpoint(self._forward, input_tuple, self.parameters(), self.use_checkpoint) + + def _forward(self, x, emb, batch_size=None): + if self.updown: + in_rest, in_conv = self.in_layers[:-1], self.in_layers[-1] + h = in_rest(x) + h = self.h_upd(h) + x = self.x_upd(x) + h = in_conv(h) + else: + h = self.in_layers(x) + emb_out = self.emb_layers(emb).type(h.dtype) + while len(emb_out.shape) < len(h.shape): + emb_out = emb_out[..., None] + if self.use_scale_shift_norm: + out_norm, out_rest = self.out_layers[0], self.out_layers[1:] + scale, shift = torch.chunk(emb_out, 2, dim=1) + h = out_norm(h) * (1 + scale) + shift + h = out_rest(h) + else: + h = h + emb_out + h = self.out_layers(h) + h = self.skip_connection(x) + h + + if self.use_temporal_conv and batch_size: + h = rearrange(h, '(b t) c h w -> b c t h w', b=batch_size) + h = self.temopral_conv(h) + h = rearrange(h, 'b c t h w -> (b t) c h w') + return h + +class TemporalConvBlock(nn.Module): + """ + Adapted from modelscope: https://github.com/modelscope/modelscope/blob/master/modelscope/models/multi_modal/video_synthesis/unet_sd.py + """ + def __init__( + self, + in_channels, + out_channels=None, + dropout=0.0, + spatial_aware=False, + dtype=None, + device=None, + operations=ops + ): + super(TemporalConvBlock, self).__init__() + if out_channels is None: + out_channels = in_channels + self.in_channels = in_channels + self.out_channels = out_channels + th_kernel_shape = (3, 1, 1) if not spatial_aware else (3, 3, 1) + th_padding_shape = (1, 0, 0) if not spatial_aware else (1, 1, 0) + tw_kernel_shape = (3, 1, 1) if not spatial_aware else (3, 1, 3) + tw_padding_shape = (1, 0, 0) if not spatial_aware else (1, 0, 1) + + # conv layers + self.conv1 = nn.Sequential( + operations.GroupNorm(32, in_channels, device=device, dtype=dtype), nn.SiLU(), + operations.Conv3d(in_channels, out_channels, th_kernel_shape, padding=th_padding_shape, device=device, dtype=dtype)) + self.conv2 = nn.Sequential( + operations.GroupNorm(32, out_channels, device=device, dtype=dtype), nn.SiLU(), nn.Dropout(dropout), + operations.Conv3d(out_channels, in_channels, tw_kernel_shape, padding=tw_padding_shape, device=device, dtype=dtype)) + self.conv3 = nn.Sequential( + operations.GroupNorm(32, out_channels, device=device, dtype=dtype), nn.SiLU(), nn.Dropout(dropout), + operations.Conv3d(out_channels, in_channels, th_kernel_shape, padding=th_padding_shape, device=device, dtype=dtype)) + self.conv4 = nn.Sequential( + operations.GroupNorm(32, out_channels, device=device, dtype=dtype), nn.SiLU(), nn.Dropout(dropout), + operations.Conv3d(out_channels, in_channels, tw_kernel_shape, padding=tw_padding_shape, device=device, dtype=dtype)) + + # zero out the last layer params,so the conv block is identity + nn.init.zeros_(self.conv4[-1].weight) + nn.init.zeros_(self.conv4[-1].bias) + + def forward(self, x): + identity = x + x = self.conv1(x) + x = self.conv2(x) + x = self.conv3(x) + x = self.conv4(x) + + return identity + x + +def context_processor(context, t, img_emb=None, temporal_size=16, concat_only=False, disable_concat=False): + if disable_concat: + return context + + ## repeat t times for context [(b t) 77 768] & time embedding + ## check if we use per-frame image conditioning + + if img_emb is not None: + context = torch.cat([context, img_emb.to(context.device, context.dtype)], dim=1) + + if concat_only: + return context + + b, l_context, _ = context.shape + if l_context == 77 + t * temporal_size: + context_text, context_img = context[:,:77,:], context[:,77:,:] + context_text = context_text.repeat_interleave(repeats=t, dim=0) + context_img = rearrange(context_img, 'b (t l) c -> (b t) l c', t=t) + context = torch.cat([context_text, context_img], dim=1) + else: + context = context.repeat_interleave(repeats=t, dim=0) + + return context + +def apply_control(h, control, name, cond_idx=None): + if control is not None and name in control and len(control[name]) > 0: + frames = h.shape[0] + ctrl = control[name].pop() + if ctrl is not None: + try: + if cond_idx is not None and ctrl.shape[0] > frames: + ctrl_frames_list = list(range(ctrl.shape[0])) + ctrl_frames = len(ctrl_frames_list) + + idxs = ( + ctrl_frames_list[ctrl_frames // 2:] if cond_idx == 0 else \ + ctrl_frames_list[:ctrl_frames // 2] + ) + + ctrl = ctrl[idxs] + + h += ctrl + except Exception as e: + if h.shape != ctrl.shape: + logging.warning( + "warning control could not be applied {} {}".format(h.shape, ctrl.shape) + ) + logging.warning(e) + return h + +class UNetModel(nn.Module): + """ + The full UNet model with attention and timestep embedding. + :param in_channels: in_channels in the input Tensor. + :param model_channels: base channel count for the model. + :param out_channels: channels in the output Tensor. + :param num_res_blocks: number of residual blocks per downsample. + :param attention_resolutions: a collection of downsample rates at which + attention will take place. May be a set, list, or tuple. + For example, if this contains 4, then at 4x downsampling, attention + will be used. + :param dropout: the dropout probability. + :param channel_mult: channel multiplier for each level of the UNet. + :param conv_resample: if True, use learned convolutions for upsampling and + downsampling. + :param dims: determines if the signal is 1D, 2D, or 3D. + :param num_classes: if specified (as an int), then this model will be + class-conditional with `num_classes` classes. + :param use_checkpoint: use gradient checkpointing to reduce memory usage. + :param num_heads: the number of attention heads in each attention layer. + :param num_heads_channels: if specified, ignore num_heads and instead use + a fixed channel width per attention head. + :param num_heads_upsample: works with num_heads to set a different number + of heads for upsampling. Deprecated. + :param use_scale_shift_norm: use a FiLM-like conditioning mechanism. + :param resblock_updown: use residual blocks for up/downsampling. + :param use_new_attention_order: use a different attention pattern for potentially + increased efficiency. + """ + + def __init__(self, + in_channels, + model_channels, + out_channels, + num_res_blocks, + attention_resolutions, + dropout=0.0, + channel_mult=(1, 2, 4, 8), + conv_resample=True, + dims=2, + context_dim=None, + use_scale_shift_norm=False, + resblock_updown=False, + num_heads=-1, + num_head_channels=-1, + transformer_depth=1, + use_linear=False, + use_checkpoint=False, + temporal_conv=False, + tempspatial_aware=False, + temporal_attention=True, + use_relative_position=True, + use_causal_attention=False, + temporal_length=None, + use_fp16=False, + addition_attention=False, + temporal_selfatt_only=True, + image_cross_attention=False, + image_cross_attention_scale_learnable=False, + default_fs=4, + fs_condition=False, + device=None, + dtype=torch.float16, + operations=ops + ): + super(UNetModel, self).__init__() + if num_heads == -1: + assert num_head_channels != -1, 'Either num_heads or num_head_channels has to be set' + if num_head_channels == -1: + assert num_heads != -1, 'Either num_heads or num_head_channels has to be set' + + self.in_channels = in_channels + self.model_channels = model_channels + self.out_channels = out_channels + self.num_res_blocks = num_res_blocks + self.attention_resolutions = attention_resolutions + self.dropout = dropout + self.channel_mult = channel_mult + self.conv_resample = conv_resample + self.temporal_attention = temporal_attention + time_embed_dim = model_channels * 4 + self.use_checkpoint = use_checkpoint + temporal_self_att_only = True + self.addition_attention = addition_attention + self.temporal_length = temporal_length + self.image_cross_attention = image_cross_attention + self.image_cross_attention_scale_learnable = image_cross_attention_scale_learnable + self.default_fs = default_fs + self.fs_condition = fs_condition + self.device = device + #self.dtype = dtype + self.dtype = torch.float32 + + ## Time embedding blocks + self.time_embed = nn.Sequential( + linear(model_channels, time_embed_dim, device=device, dtype=self.dtype), + nn.SiLU(), + linear(time_embed_dim, time_embed_dim, device=device, dtype=self.dtype), + ) + if fs_condition: + self.fps_embedding = nn.Sequential( + linear(model_channels, time_embed_dim, device=device, dtype=self.dtype), + nn.SiLU(), + linear(time_embed_dim, time_embed_dim, device=device, dtype=self.dtype), + ) + nn.init.zeros_(self.fps_embedding[-1].weight) + nn.init.zeros_(self.fps_embedding[-1].bias) + ## Input Block + self.input_blocks = nn.ModuleList( + [ + TimestepEmbedSequential( + operations.conv_nd( + dims, + in_channels, + model_channels, + 3, + padding=1, + device=device, + dtype=self.dtype + )) + ] + ) + if self.addition_attention: + self.init_attn=TimestepEmbedSequential( + TemporalTransformer( + model_channels, + n_heads=8, + d_head=num_head_channels, + depth=transformer_depth, + context_dim=context_dim, + use_checkpoint=use_checkpoint, only_self_att=temporal_selfatt_only, + causal_attention=False, relative_position=use_relative_position, + temporal_length=temporal_length, + device=device, + dtype=self.dtype + )) + + input_block_chans = [model_channels] + ch = model_channels + ds = 1 + for level, mult in enumerate(channel_mult): + for _ in range(num_res_blocks): + layers = [ + ResBlock(ch, time_embed_dim, dropout, + out_channels=mult * model_channels, dims=dims, use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, tempspatial_aware=tempspatial_aware, + use_temporal_conv=temporal_conv, + device=device, + dtype=self.dtype + ) + ] + ch = mult * model_channels + if ds in attention_resolutions: + if num_head_channels == -1: + dim_head = ch // num_heads + else: + num_heads = ch // num_head_channels + dim_head = num_head_channels + layers.append( + SpatialTransformer(ch, num_heads, dim_head, + depth=transformer_depth, context_dim=context_dim, use_linear=use_linear, + use_checkpoint=use_checkpoint, disable_self_attn=False, + video_length=temporal_length, image_cross_attention=self.image_cross_attention, + image_cross_attention_scale_learnable=self.image_cross_attention_scale_learnable, + device=device, + dtype=self.dtype + ) + ) + if self.temporal_attention: + layers.append( + TemporalTransformer(ch, num_heads, dim_head, + depth=transformer_depth, context_dim=context_dim, use_linear=use_linear, + use_checkpoint=use_checkpoint, only_self_att=temporal_self_att_only, + causal_attention=use_causal_attention, relative_position=use_relative_position, + temporal_length=temporal_length, + device=device, + dtype=self.dtype + ) + ) + self.input_blocks.append(TimestepEmbedSequential(*layers)) + input_block_chans.append(ch) + if level != len(channel_mult) - 1: + out_ch = ch + self.input_blocks.append( + TimestepEmbedSequential( + ResBlock(ch, time_embed_dim, dropout, + out_channels=out_ch, dims=dims, use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, + down=True, + device=device, + dtype=self.dtype + ) + if resblock_updown + else Downsample( + ch, + conv_resample, + dims=dims, + out_channels=out_ch, + device=device, + dtype=self.dtype + ) + ) + ) + ch = out_ch + input_block_chans.append(ch) + ds *= 2 + + if num_head_channels == -1: + dim_head = ch // num_heads + else: + num_heads = ch // num_head_channels + dim_head = num_head_channels + layers = [ + ResBlock(ch, time_embed_dim, dropout, + dims=dims, use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, tempspatial_aware=tempspatial_aware, + use_temporal_conv=temporal_conv, + device=device, + dtype=self.dtype + ), + SpatialTransformer(ch, num_heads, dim_head, + depth=transformer_depth, context_dim=context_dim, use_linear=use_linear, + use_checkpoint=use_checkpoint, disable_self_attn=False, video_length=temporal_length, + image_cross_attention=self.image_cross_attention,image_cross_attention_scale_learnable=self.image_cross_attention_scale_learnable, + device=device, + dtype=self.dtype + ) + ] + if self.temporal_attention: + layers.append( + TemporalTransformer(ch, num_heads, dim_head, + depth=transformer_depth, context_dim=context_dim, use_linear=use_linear, + use_checkpoint=use_checkpoint, only_self_att=temporal_self_att_only, + causal_attention=use_causal_attention, relative_position=use_relative_position, + temporal_length=temporal_length, + device=device, + dtype=self.dtype + ) + ) + layers.append( + ResBlock(ch, time_embed_dim, dropout, + dims=dims, use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, tempspatial_aware=tempspatial_aware, + use_temporal_conv=temporal_conv, + device=device, + dtype=self.dtype + ) + ) + + ## Middle Block + self.middle_block = TimestepEmbedSequential(*layers) + + ## Output Block + self.output_blocks = nn.ModuleList([]) + for level, mult in list(enumerate(channel_mult))[::-1]: + for i in range(num_res_blocks + 1): + ich = input_block_chans.pop() + layers = [ + ResBlock(ch + ich, time_embed_dim, dropout, + out_channels=mult * model_channels, dims=dims, use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, tempspatial_aware=tempspatial_aware, + use_temporal_conv=temporal_conv, + device=device, + dtype=self.dtype + ) + ] + ch = model_channels * mult + if ds in attention_resolutions: + if num_head_channels == -1: + dim_head = ch // num_heads + else: + num_heads = ch // num_head_channels + dim_head = num_head_channels + layers.append( + SpatialTransformer(ch, num_heads, dim_head, + depth=transformer_depth, context_dim=context_dim, use_linear=use_linear, + use_checkpoint=use_checkpoint, disable_self_attn=False, video_length=temporal_length, + image_cross_attention=self.image_cross_attention,image_cross_attention_scale_learnable=self.image_cross_attention_scale_learnable, + device=device, + dtype=self.dtype + ) + ) + if self.temporal_attention: + layers.append( + TemporalTransformer(ch, num_heads, dim_head, + depth=transformer_depth, context_dim=context_dim, use_linear=use_linear, + use_checkpoint=use_checkpoint, only_self_att=temporal_self_att_only, + causal_attention=use_causal_attention, relative_position=use_relative_position, + temporal_length=temporal_length, + device=device, + dtype=self.dtype + ) + ) + if level and i == num_res_blocks: + out_ch = ch + layers.append( + ResBlock(ch, time_embed_dim, dropout, + out_channels=out_ch, dims=dims, use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, + up=True, + device=device, + dtype=self.dtype + ) + if resblock_updown + else Upsample(ch, conv_resample, dims=dims, out_channels=out_ch) + ) + ds //= 2 + self.output_blocks.append(TimestepEmbedSequential(*layers)) + + self.out = nn.Sequential( + normalization(ch, device=device, dtype=self.dtype), + nn.SiLU(), + zero_module( + operations.conv_nd( + dims, + model_channels, + out_channels, + 3, + padding=1, + device=device, + dtype=self.dtype + ) + ), + ) + + # TODO Add Transformer options to leverage the usage of patches. + def forward( + self, + x, + timesteps, + context=None, + context_in=None, + cc_concat=None, + num_video_frames=16, + features_adapter=None, + fs=None, + img_emb=None, + control=None, + transformer_options={}, + cond_idx=None, + **kwargs + ): + + if any([fs is None, img_emb is None, cc_concat is None]): + raise ValueError("One or more of the required inputs for UNet Forward is None.") + + cond_idx = transformer_options.get("cond_idx", None) + transformer_options['original_shape'] = list(x.shape) + transformer_options['transformer_index'] = 0 + transformer_patches = transformer_options.get("patches", {}) + + # In ComfyUI, the frames are always with the batch, so we deconstruct it here. + # This is mandatory as this is a video based model. + # We usually denote "f" as frames, but will use "t" (time) to be consistent with DynamiCrafter. + b,_,t,_,_ = x.shape + + context = context_in + cc_concat = cc_concat.to(x.device, x.dtype) + x = torch.cat([x, cc_concat], dim=1) + + fs = fs.to(x.device, x.dtype) + + timestep = timesteps + context = context_processor(context, num_video_frames, img_emb=img_emb) + + t_emb = timestep_embedding(timestep, self.model_channels, repeat_only=False, dtype=self.dtype) + emb = self.time_embed(t_emb) + emb = emb.repeat_interleave(repeats=t, dim=0) + + ## always in shape (b t) c h w, except for temporal layer + x = rearrange(x, 'b c t h w -> (b t) c h w') + + ## combine emb + if self.fs_condition: + if fs is None: + fs = torch.tensor( + [self.default_fs] * b, dtype=torch.long, device=x.device) + fs_emb = timestep_embedding(fs, self.model_channels, repeat_only=False, dtype=self.dtype).type(x.dtype) + + fs_embed = self.fps_embedding(fs_emb) + fs_embed = fs_embed.repeat_interleave(repeats=t, dim=0) + + emb = emb + fs_embed + + h = x.type(self.dtype) + adapter_idx = 0 + hs = [] + + for id, module in enumerate(self.input_blocks): + transformer_options["block"] = ("input", id) + #h = module(h, emb, context=context, batch_size=b) + h = forward_timestep_embed( + module, + h, + emb, + context=context, + batch_size=b, + transformer_options=transformer_options + ) + h = apply_control(h, control, 'input', cond_idx) + + if "input_block_patch" in transformer_patches: + patch = transformer_patches["input_block_patch"] + for p in patch: + h = p(h, transformer_options) + + if id ==0 and self.addition_attention: + h = forward_timestep_embed( + self.init_attn, + h, + emb, + context=context, + batch_size=b, + transformer_options=transformer_options + ) + ## plug-in adapter features + if ((id+1)%3 == 0) and features_adapter is not None: + h = h + features_adapter[adapter_idx] + adapter_idx += 1 + hs.append(h) + if "input_block_patch_after_skip" in transformer_patches: + patch = transformer_patches["input_block_patch_after_skip"] + for p in patch: + h = p(h, transformer_options) + if features_adapter is not None: + assert len(features_adapter)==adapter_idx, 'Wrong features_adapter' + transformer_options["block"] = ("middle", 0) + h = forward_timestep_embed( + self.middle_block, + h, + emb, + context=context, + batch_size=b, + transformer_options=transformer_options + ) + h = apply_control(h, control, 'middle', cond_idx) + for id, module in enumerate(self.output_blocks): + transformer_options["block"] = ("output", id) + hsp = hs.pop() + hsp = apply_control(hsp, control, 'output', cond_idx) + + if "output_block_patch" in transformer_patches: + patch = transformer_patches["output_block_patch"] + for p in patch: + h, hsp = p(h, hsp, transformer_options) + + h = torch.cat([h, hsp], dim=1) + del hsp + h = forward_timestep_embed( + module, + h, + emb, + context=context, + batch_size=b, + transformer_options=transformer_options + ) + h = h.type(x.dtype) + h = self.out(h) + + # We output with the tensor unfolded framewise, then reshape them to batched using ComfyUI nodes. + h = rearrange(h, '(b t) c h w -> b c t h w', t=num_video_frames) + + return h \ No newline at end of file diff --git a/ComfyUI-Easy-Use/py/dynamiCrafter/lvdm/modules/x_transformer.py b/ComfyUI-Easy-Use/py/dynamiCrafter/lvdm/modules/x_transformer.py new file mode 100644 index 0000000000000000000000000000000000000000..5321012f860a8fb06850c1ddf495db934addecae --- /dev/null +++ b/ComfyUI-Easy-Use/py/dynamiCrafter/lvdm/modules/x_transformer.py @@ -0,0 +1,639 @@ +"""shout-out to https://github.com/lucidrains/x-transformers/tree/main/x_transformers""" +from functools import partial +from inspect import isfunction +from collections import namedtuple +from einops import rearrange, repeat +import torch +from torch import nn, einsum +import torch.nn.functional as F + +# constants +DEFAULT_DIM_HEAD = 64 + +Intermediates = namedtuple('Intermediates', [ + 'pre_softmax_attn', + 'post_softmax_attn' +]) + +LayerIntermediates = namedtuple('Intermediates', [ + 'hiddens', + 'attn_intermediates' +]) + + +class AbsolutePositionalEmbedding(nn.Module): + def __init__(self, dim, max_seq_len): + super().__init__() + self.emb = nn.Embedding(max_seq_len, dim) + self.init_() + + def init_(self): + nn.init.normal_(self.emb.weight, std=0.02) + + def forward(self, x): + n = torch.arange(x.shape[1], device=x.device) + return self.emb(n)[None, :, :] + + +class FixedPositionalEmbedding(nn.Module): + def __init__(self, dim): + super().__init__() + inv_freq = 1. / (10000 ** (torch.arange(0, dim, 2).float() / dim)) + self.register_buffer('inv_freq', inv_freq) + + def forward(self, x, seq_dim=1, offset=0): + t = torch.arange(x.shape[seq_dim], device=x.device).type_as(self.inv_freq) + offset + sinusoid_inp = torch.einsum('i , j -> i j', t, self.inv_freq) + emb = torch.cat((sinusoid_inp.sin(), sinusoid_inp.cos()), dim=-1) + return emb[None, :, :] + + +# helpers + +def exists(val): + return val is not None + + +def default(val, d): + if exists(val): + return val + return d() if isfunction(d) else d + + +def always(val): + def inner(*args, **kwargs): + return val + return inner + + +def not_equals(val): + def inner(x): + return x != val + return inner + + +def equals(val): + def inner(x): + return x == val + return inner + + +def max_neg_value(tensor): + return -torch.finfo(tensor.dtype).max + + +# keyword argument helpers + +def pick_and_pop(keys, d): + values = list(map(lambda key: d.pop(key), keys)) + return dict(zip(keys, values)) + + +def group_dict_by_key(cond, d): + return_val = [dict(), dict()] + for key in d.keys(): + match = bool(cond(key)) + ind = int(not match) + return_val[ind][key] = d[key] + return (*return_val,) + + +def string_begins_with(prefix, str): + return str.startswith(prefix) + + +def group_by_key_prefix(prefix, d): + return group_dict_by_key(partial(string_begins_with, prefix), d) + + +def groupby_prefix_and_trim(prefix, d): + kwargs_with_prefix, kwargs = group_dict_by_key(partial(string_begins_with, prefix), d) + kwargs_without_prefix = dict(map(lambda x: (x[0][len(prefix):], x[1]), tuple(kwargs_with_prefix.items()))) + return kwargs_without_prefix, kwargs + + +# classes +class Scale(nn.Module): + def __init__(self, value, fn): + super().__init__() + self.value = value + self.fn = fn + + def forward(self, x, **kwargs): + x, *rest = self.fn(x, **kwargs) + return (x * self.value, *rest) + + +class Rezero(nn.Module): + def __init__(self, fn): + super().__init__() + self.fn = fn + self.g = nn.Parameter(torch.zeros(1)) + + def forward(self, x, **kwargs): + x, *rest = self.fn(x, **kwargs) + return (x * self.g, *rest) + + +class ScaleNorm(nn.Module): + def __init__(self, dim, eps=1e-5): + super().__init__() + self.scale = dim ** -0.5 + self.eps = eps + self.g = nn.Parameter(torch.ones(1)) + + def forward(self, x): + norm = torch.norm(x, dim=-1, keepdim=True) * self.scale + return x / norm.clamp(min=self.eps) * self.g + + +class RMSNorm(nn.Module): + def __init__(self, dim, eps=1e-8): + super().__init__() + self.scale = dim ** -0.5 + self.eps = eps + self.g = nn.Parameter(torch.ones(dim)) + + def forward(self, x): + norm = torch.norm(x, dim=-1, keepdim=True) * self.scale + return x / norm.clamp(min=self.eps) * self.g + + +class Residual(nn.Module): + def forward(self, x, residual): + return x + residual + + +class GRUGating(nn.Module): + def __init__(self, dim): + super().__init__() + self.gru = nn.GRUCell(dim, dim) + + def forward(self, x, residual): + gated_output = self.gru( + rearrange(x, 'b n d -> (b n) d'), + rearrange(residual, 'b n d -> (b n) d') + ) + + return gated_output.reshape_as(x) + + +# feedforward + +class GEGLU(nn.Module): + def __init__(self, dim_in, dim_out): + super().__init__() + self.proj = nn.Linear(dim_in, dim_out * 2) + + def forward(self, x): + x, gate = self.proj(x).chunk(2, dim=-1) + return x * F.gelu(gate) + + +class FeedForward(nn.Module): + def __init__(self, dim, dim_out=None, mult=4, glu=False, dropout=0.): + super().__init__() + inner_dim = int(dim * mult) + dim_out = default(dim_out, dim) + project_in = nn.Sequential( + nn.Linear(dim, inner_dim), + nn.GELU() + ) if not glu else GEGLU(dim, inner_dim) + + self.net = nn.Sequential( + project_in, + nn.Dropout(dropout), + nn.Linear(inner_dim, dim_out) + ) + + def forward(self, x): + return self.net(x) + + +# attention. +class Attention(nn.Module): + def __init__( + self, + dim, + dim_head=DEFAULT_DIM_HEAD, + heads=8, + causal=False, + mask=None, + talking_heads=False, + sparse_topk=None, + use_entmax15=False, + num_mem_kv=0, + dropout=0., + on_attn=False + ): + super().__init__() + if use_entmax15: + raise NotImplementedError("Check out entmax activation instead of softmax activation!") + self.scale = dim_head ** -0.5 + self.heads = heads + self.causal = causal + self.mask = mask + + inner_dim = dim_head * heads + + self.to_q = nn.Linear(dim, inner_dim, bias=False) + self.to_k = nn.Linear(dim, inner_dim, bias=False) + self.to_v = nn.Linear(dim, inner_dim, bias=False) + self.dropout = nn.Dropout(dropout) + + # talking heads + self.talking_heads = talking_heads + if talking_heads: + self.pre_softmax_proj = nn.Parameter(torch.randn(heads, heads)) + self.post_softmax_proj = nn.Parameter(torch.randn(heads, heads)) + + # explicit topk sparse attention + self.sparse_topk = sparse_topk + + # entmax + #self.attn_fn = entmax15 if use_entmax15 else F.softmax + self.attn_fn = F.softmax + + # add memory key / values + self.num_mem_kv = num_mem_kv + if num_mem_kv > 0: + self.mem_k = nn.Parameter(torch.randn(heads, num_mem_kv, dim_head)) + self.mem_v = nn.Parameter(torch.randn(heads, num_mem_kv, dim_head)) + + # attention on attention + self.attn_on_attn = on_attn + self.to_out = nn.Sequential(nn.Linear(inner_dim, dim * 2), nn.GLU()) if on_attn else nn.Linear(inner_dim, dim) + + def forward( + self, + x, + context=None, + mask=None, + context_mask=None, + rel_pos=None, + sinusoidal_emb=None, + prev_attn=None, + mem=None + ): + b, n, _, h, talking_heads, device = *x.shape, self.heads, self.talking_heads, x.device + kv_input = default(context, x) + + q_input = x + k_input = kv_input + v_input = kv_input + + if exists(mem): + k_input = torch.cat((mem, k_input), dim=-2) + v_input = torch.cat((mem, v_input), dim=-2) + + if exists(sinusoidal_emb): + # in shortformer, the query would start at a position offset depending on the past cached memory + offset = k_input.shape[-2] - q_input.shape[-2] + q_input = q_input + sinusoidal_emb(q_input, offset=offset) + k_input = k_input + sinusoidal_emb(k_input) + + q = self.to_q(q_input) + k = self.to_k(k_input) + v = self.to_v(v_input) + + q, k, v = map(lambda t: rearrange(t, 'b n (h d) -> b h n d', h=h), (q, k, v)) + + input_mask = None + if any(map(exists, (mask, context_mask))): + q_mask = default(mask, lambda: torch.ones((b, n), device=device).bool()) + k_mask = q_mask if not exists(context) else context_mask + k_mask = default(k_mask, lambda: torch.ones((b, k.shape[-2]), device=device).bool()) + q_mask = rearrange(q_mask, 'b i -> b () i ()') + k_mask = rearrange(k_mask, 'b j -> b () () j') + input_mask = q_mask * k_mask + + if self.num_mem_kv > 0: + mem_k, mem_v = map(lambda t: repeat(t, 'h n d -> b h n d', b=b), (self.mem_k, self.mem_v)) + k = torch.cat((mem_k, k), dim=-2) + v = torch.cat((mem_v, v), dim=-2) + if exists(input_mask): + input_mask = F.pad(input_mask, (self.num_mem_kv, 0), value=True) + + dots = einsum('b h i d, b h j d -> b h i j', q, k) * self.scale + mask_value = max_neg_value(dots) + + if exists(prev_attn): + dots = dots + prev_attn + + pre_softmax_attn = dots + + if talking_heads: + dots = einsum('b h i j, h k -> b k i j', dots, self.pre_softmax_proj).contiguous() + + if exists(rel_pos): + dots = rel_pos(dots) + + if exists(input_mask): + dots.masked_fill_(~input_mask, mask_value) + del input_mask + + if self.causal: + i, j = dots.shape[-2:] + r = torch.arange(i, device=device) + mask = rearrange(r, 'i -> () () i ()') < rearrange(r, 'j -> () () () j') + mask = F.pad(mask, (j - i, 0), value=False) + dots.masked_fill_(mask, mask_value) + del mask + + if exists(self.sparse_topk) and self.sparse_topk < dots.shape[-1]: + top, _ = dots.topk(self.sparse_topk, dim=-1) + vk = top[..., -1].unsqueeze(-1).expand_as(dots) + mask = dots < vk + dots.masked_fill_(mask, mask_value) + del mask + + attn = self.attn_fn(dots, dim=-1) + post_softmax_attn = attn + + attn = self.dropout(attn) + + if talking_heads: + attn = einsum('b h i j, h k -> b k i j', attn, self.post_softmax_proj).contiguous() + + out = einsum('b h i j, b h j d -> b h i d', attn, v) + out = rearrange(out, 'b h n d -> b n (h d)') + + intermediates = Intermediates( + pre_softmax_attn=pre_softmax_attn, + post_softmax_attn=post_softmax_attn + ) + + return self.to_out(out), intermediates + + +class AttentionLayers(nn.Module): + def __init__( + self, + dim, + depth, + heads=8, + causal=False, + cross_attend=False, + only_cross=False, + use_scalenorm=False, + use_rmsnorm=False, + use_rezero=False, + rel_pos_num_buckets=32, + rel_pos_max_distance=128, + position_infused_attn=False, + custom_layers=None, + sandwich_coef=None, + par_ratio=None, + residual_attn=False, + cross_residual_attn=False, + macaron=False, + pre_norm=True, + gate_residual=False, + **kwargs + ): + super().__init__() + ff_kwargs, kwargs = groupby_prefix_and_trim('ff_', kwargs) + attn_kwargs, _ = groupby_prefix_and_trim('attn_', kwargs) + + dim_head = attn_kwargs.get('dim_head', DEFAULT_DIM_HEAD) + + self.dim = dim + self.depth = depth + self.layers = nn.ModuleList([]) + + self.has_pos_emb = position_infused_attn + self.pia_pos_emb = FixedPositionalEmbedding(dim) if position_infused_attn else None + self.rotary_pos_emb = always(None) + + assert rel_pos_num_buckets <= rel_pos_max_distance, 'number of relative position buckets must be less than the relative position max distance' + self.rel_pos = None + + self.pre_norm = pre_norm + + self.residual_attn = residual_attn + self.cross_residual_attn = cross_residual_attn + + norm_class = ScaleNorm if use_scalenorm else nn.LayerNorm + norm_class = RMSNorm if use_rmsnorm else norm_class + norm_fn = partial(norm_class, dim) + + norm_fn = nn.Identity if use_rezero else norm_fn + branch_fn = Rezero if use_rezero else None + + if cross_attend and not only_cross: + default_block = ('a', 'c', 'f') + elif cross_attend and only_cross: + default_block = ('c', 'f') + else: + default_block = ('a', 'f') + + if macaron: + default_block = ('f',) + default_block + + if exists(custom_layers): + layer_types = custom_layers + elif exists(par_ratio): + par_depth = depth * len(default_block) + assert 1 < par_ratio <= par_depth, 'par ratio out of range' + default_block = tuple(filter(not_equals('f'), default_block)) + par_attn = par_depth // par_ratio + depth_cut = par_depth * 2 // 3 # 2 / 3 attention layer cutoff suggested by PAR paper + par_width = (depth_cut + depth_cut // par_attn) // par_attn + assert len(default_block) <= par_width, 'default block is too large for par_ratio' + par_block = default_block + ('f',) * (par_width - len(default_block)) + par_head = par_block * par_attn + layer_types = par_head + ('f',) * (par_depth - len(par_head)) + elif exists(sandwich_coef): + assert sandwich_coef > 0 and sandwich_coef <= depth, 'sandwich coefficient should be less than the depth' + layer_types = ('a',) * sandwich_coef + default_block * (depth - sandwich_coef) + ('f',) * sandwich_coef + else: + layer_types = default_block * depth + + self.layer_types = layer_types + self.num_attn_layers = len(list(filter(equals('a'), layer_types))) + + for layer_type in self.layer_types: + if layer_type == 'a': + layer = Attention(dim, heads=heads, causal=causal, **attn_kwargs) + elif layer_type == 'c': + layer = Attention(dim, heads=heads, **attn_kwargs) + elif layer_type == 'f': + layer = FeedForward(dim, **ff_kwargs) + layer = layer if not macaron else Scale(0.5, layer) + else: + raise Exception(f'invalid layer type {layer_type}') + + if isinstance(layer, Attention) and exists(branch_fn): + layer = branch_fn(layer) + + if gate_residual: + residual_fn = GRUGating(dim) + else: + residual_fn = Residual() + + self.layers.append(nn.ModuleList([ + norm_fn(), + layer, + residual_fn + ])) + + def forward( + self, + x, + context=None, + mask=None, + context_mask=None, + mems=None, + return_hiddens=False + ): + hiddens = [] + intermediates = [] + prev_attn = None + prev_cross_attn = None + + mems = mems.copy() if exists(mems) else [None] * self.num_attn_layers + + for ind, (layer_type, (norm, block, residual_fn)) in enumerate(zip(self.layer_types, self.layers)): + is_last = ind == (len(self.layers) - 1) + + if layer_type == 'a': + hiddens.append(x) + layer_mem = mems.pop(0) + + residual = x + + if self.pre_norm: + x = norm(x) + + if layer_type == 'a': + out, inter = block(x, mask=mask, sinusoidal_emb=self.pia_pos_emb, rel_pos=self.rel_pos, + prev_attn=prev_attn, mem=layer_mem) + elif layer_type == 'c': + out, inter = block(x, context=context, mask=mask, context_mask=context_mask, prev_attn=prev_cross_attn) + elif layer_type == 'f': + out = block(x) + + x = residual_fn(out, residual) + + if layer_type in ('a', 'c'): + intermediates.append(inter) + + if layer_type == 'a' and self.residual_attn: + prev_attn = inter.pre_softmax_attn + elif layer_type == 'c' and self.cross_residual_attn: + prev_cross_attn = inter.pre_softmax_attn + + if not self.pre_norm and not is_last: + x = norm(x) + + if return_hiddens: + intermediates = LayerIntermediates( + hiddens=hiddens, + attn_intermediates=intermediates + ) + + return x, intermediates + + return x + + +class Encoder(AttentionLayers): + def __init__(self, **kwargs): + assert 'causal' not in kwargs, 'cannot set causality on encoder' + super().__init__(causal=False, **kwargs) + + + +class TransformerWrapper(nn.Module): + def __init__( + self, + *, + num_tokens, + max_seq_len, + attn_layers, + emb_dim=None, + max_mem_len=0., + emb_dropout=0., + num_memory_tokens=None, + tie_embedding=False, + use_pos_emb=True + ): + super().__init__() + assert isinstance(attn_layers, AttentionLayers), 'attention layers must be one of Encoder or Decoder' + + dim = attn_layers.dim + emb_dim = default(emb_dim, dim) + + self.max_seq_len = max_seq_len + self.max_mem_len = max_mem_len + self.num_tokens = num_tokens + + self.token_emb = nn.Embedding(num_tokens, emb_dim) + self.pos_emb = AbsolutePositionalEmbedding(emb_dim, max_seq_len) if ( + use_pos_emb and not attn_layers.has_pos_emb) else always(0) + self.emb_dropout = nn.Dropout(emb_dropout) + + self.project_emb = nn.Linear(emb_dim, dim) if emb_dim != dim else nn.Identity() + self.attn_layers = attn_layers + self.norm = nn.LayerNorm(dim) + + self.init_() + + self.to_logits = nn.Linear(dim, num_tokens) if not tie_embedding else lambda t: t @ self.token_emb.weight.t() + + # memory tokens (like [cls]) from Memory Transformers paper + num_memory_tokens = default(num_memory_tokens, 0) + self.num_memory_tokens = num_memory_tokens + if num_memory_tokens > 0: + self.memory_tokens = nn.Parameter(torch.randn(num_memory_tokens, dim)) + + # let funnel encoder know number of memory tokens, if specified + if hasattr(attn_layers, 'num_memory_tokens'): + attn_layers.num_memory_tokens = num_memory_tokens + + def init_(self): + nn.init.normal_(self.token_emb.weight, std=0.02) + + def forward( + self, + x, + return_embeddings=False, + mask=None, + return_mems=False, + return_attn=False, + mems=None, + **kwargs + ): + b, n, device, num_mem = *x.shape, x.device, self.num_memory_tokens + x = self.token_emb(x) + x += self.pos_emb(x) + x = self.emb_dropout(x) + + x = self.project_emb(x) + + if num_mem > 0: + mem = repeat(self.memory_tokens, 'n d -> b n d', b=b) + x = torch.cat((mem, x), dim=1) + + # auto-handle masking after appending memory tokens + if exists(mask): + mask = F.pad(mask, (num_mem, 0), value=True) + + x, intermediates = self.attn_layers(x, mask=mask, mems=mems, return_hiddens=True, **kwargs) + x = self.norm(x) + + mem, x = x[:, :num_mem], x[:, num_mem:] + + out = self.to_logits(x) if not return_embeddings else x + + if return_mems: + hiddens = intermediates.hiddens + new_mems = list(map(lambda pair: torch.cat(pair, dim=-2), zip(mems, hiddens))) if exists(mems) else hiddens + new_mems = list(map(lambda t: t[..., -self.max_mem_len:, :].detach(), new_mems)) + return out, new_mems + + if return_attn: + attn_maps = list(map(lambda t: t.post_softmax_attn, intermediates.attn_intermediates)) + return out, attn_maps + + return out \ No newline at end of file diff --git a/ComfyUI-Easy-Use/py/dynamiCrafter/utils/model_utils.py b/ComfyUI-Easy-Use/py/dynamiCrafter/utils/model_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..72d2c40dbec360c7796aeb6fbfa91ffd2a3d2447 --- /dev/null +++ b/ComfyUI-Easy-Use/py/dynamiCrafter/utils/model_utils.py @@ -0,0 +1,146 @@ + +import torch + +from collections import OrderedDict + +from comfy import model_base +from comfy import utils +from comfy import diffusers_convert + +try: + import comfy.text_encoders.sd2_clip +except ImportError: + from comfy import sd2_clip + +from comfy import supported_models_base +from comfy import latent_formats + +from ..lvdm.modules.encoders.resampler import Resampler + +DYNAMICRAFTER_CONFIG = { + 'in_channels': 8, + 'out_channels': 4, + 'model_channels': 320, + 'attention_resolutions': [4, 2, 1], + 'num_res_blocks': 2, + 'channel_mult': [1, 2, 4, 4], + 'num_head_channels': 64, + 'transformer_depth': 1, + 'context_dim': 1024, + 'use_linear': True, + 'use_checkpoint': False, + 'temporal_conv': True, + 'temporal_attention': True, + 'temporal_selfatt_only': True, + 'use_relative_position': False, + 'use_causal_attention': False, + 'temporal_length': 16, + 'addition_attention': True, + 'image_cross_attention': True, + 'image_cross_attention_scale_learnable': True, + 'default_fs': 3, + 'fs_condition': True +} + +IMAGE_PROJ_CONFIG = { + "dim": 1024, + "depth": 4, + "dim_head": 64, + "heads": 12, + "num_queries": 16, + "embedding_dim": 1280, + "output_dim": 1024, + "ff_mult": 4, + "video_length": 16 +} + +def process_list_or_str(target_key_or_keys, k): + if isinstance(target_key_or_keys, list): + return any([list_k in k for list_k in target_key_or_keys]) + else: + return target_key_or_keys in k + +def simple_state_dict_loader(state_dict: dict, target_key: str, target_dict: dict = None): + out_dict = {} + + if target_dict is None: + for k, v in state_dict.items(): + if process_list_or_str(target_key, k): + out_dict[k] = v + else: + for k, v in target_dict.items(): + out_dict[k] = state_dict[k] + + return out_dict + +def load_image_proj_dict(state_dict: dict): + return simple_state_dict_loader(state_dict, 'image_proj') + +def load_dynamicrafter_dict(state_dict: dict): + return simple_state_dict_loader(state_dict, 'model.diffusion_model') + +def load_vae_dict(state_dict: dict): + return simple_state_dict_loader(state_dict, 'first_stage_model') + +def get_base_model(state_dict: dict, version_checker=False): + + is_256_model = False + + for k in state_dict.keys(): + if "framestride_embed" in k: + is_256_model = True + break + +def get_image_proj_model(state_dict: dict): + + state_dict = {k.replace('image_proj_model.', ''): v for k, v in state_dict.items()} + #target_dict = Resampler().state_dict() + + ImageProjModel = Resampler(**IMAGE_PROJ_CONFIG) + ImageProjModel.load_state_dict(state_dict) + + print("Image Projection Model loaded successfully") + #del target_dict + return ImageProjModel + +class DynamiCrafterBase(supported_models_base.BASE): + unet_config = {} + unet_extra_config = {} + + latent_format = latent_formats.SD15 + + def process_clip_state_dict(self, state_dict): + replace_prefix = {} + replace_prefix["conditioner.embedders.0.model."] = "clip_h." #SD2 in sgm format + replace_prefix["cond_stage_model.model."] = "clip_h." + state_dict = utils.state_dict_prefix_replace(state_dict, replace_prefix, filter_keys=True) + state_dict = utils.clip_text_transformers_convert(state_dict, "clip_h.", "clip_h.transformer.") + return state_dict + + def process_clip_state_dict_for_saving(self, state_dict): + replace_prefix = {} + replace_prefix["clip_h"] = "cond_stage_model.model" + state_dict = utils.state_dict_prefix_replace(state_dict, replace_prefix) + state_dict = diffusers_convert.convert_text_enc_state_dict_v20(state_dict) + return state_dict + + def clip_target(self): + return supported_models_base.ClipTarget(sd2_clip.SD2Tokenizer, sd2_clip.SD2ClipModel) + + def process_dict_version(self, state_dict: dict): + processed_dict = OrderedDict() + is_eps = False + + for k in list(state_dict.keys()): + if "framestride_embed" in k: + new_key = k.replace("framestride_embed", "fps_embedding") + processed_dict[new_key] = state_dict[k] + is_eps = True + continue + + processed_dict[k] = state_dict[k] + + return processed_dict, is_eps + + + diff --git a/ComfyUI-Easy-Use/py/dynamiCrafter/utils/utils.py b/ComfyUI-Easy-Use/py/dynamiCrafter/utils/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..0e23b259b5d14378e38affb7873a4e231c4b3651 --- /dev/null +++ b/ComfyUI-Easy-Use/py/dynamiCrafter/utils/utils.py @@ -0,0 +1,82 @@ +import importlib +import numpy as np +import cv2 +import torch +import torch.distributed as dist + +MODEL_EXTS = ['ckpt', 'safetensors', 'bin'] + +def get_models_directory(directory: list): + files_list = list(filter(lambda f: f.split(".")[-1] in MODEL_EXTS, directory)) + return files_list + +def count_params(model, verbose=False): + total_params = sum(p.numel() for p in model.parameters()) + if verbose: + print(f"{model.__class__.__name__} has {total_params*1.e-6:.2f} M params.") + return total_params + + +def check_istarget(name, para_list): + """ + name: full name of source para + para_list: partial name of target para + """ + istarget=False + for para in para_list: + if para in name: + return True + return istarget + + +def instantiate_from_config(config): + if not "target" in config: + if config == '__is_first_stage__': + return None + elif config == "__is_unconditional__": + return None + raise KeyError("Expected key `target` to instantiate.") + return get_obj_from_str(config["target"])(**config.get("params", dict())) + + +def get_obj_from_str(string, reload=False): + module, cls = string.rsplit(".", 1) + if reload: + module_imp = importlib.import_module(module) + importlib.reload(module_imp) + return getattr(importlib.import_module(module, package=None), cls) + + +def load_npz_from_dir(data_dir): + data = [np.load(os.path.join(data_dir, data_name))['arr_0'] for data_name in os.listdir(data_dir)] + data = np.concatenate(data, axis=0) + return data + + +def load_npz_from_paths(data_paths): + data = [np.load(data_path)['arr_0'] for data_path in data_paths] + data = np.concatenate(data, axis=0) + return data + + +def resize_numpy_image(image, max_resolution=512 * 512, resize_short_edge=None): + h, w = image.shape[:2] + if resize_short_edge is not None: + k = resize_short_edge / min(h, w) + else: + k = max_resolution / (h * w) + k = k**0.5 + h = int(np.round(h * k / 64)) * 64 + w = int(np.round(w * k / 64)) * 64 + image = cv2.resize(image, (w, h), interpolation=cv2.INTER_LANCZOS4) + return image + + +def setup_dist(args): + if dist.is_initialized(): + return + torch.cuda.set_device(args.local_rank) + torch.distributed.init_process_group( + 'nccl', + init_method='env://' + ) \ No newline at end of file diff --git a/ComfyUI-Easy-Use/py/easyNodes.py b/ComfyUI-Easy-Use/py/easyNodes.py new file mode 100644 index 0000000000000000000000000000000000000000..8e6cb5e519f91874066e43e6b01682b387692dff --- /dev/null +++ b/ComfyUI-Easy-Use/py/easyNodes.py @@ -0,0 +1,7803 @@ +import sys, os, re, json, time +import torch +import folder_paths +import numpy as np +import comfy.utils, comfy.sample, comfy.samplers, comfy.controlnet, comfy.model_base, comfy.model_management, comfy.sampler_helpers, comfy.supported_models +from comfy.sd import CLIP, VAE +from comfy.model_patcher import ModelPatcher +from comfy_extras.chainner_models import model_loading +from comfy_extras.nodes_mask import LatentCompositeMasked, GrowMask +from comfy_extras.nodes_compositing import JoinImageWithAlpha +from comfy.clip_vision import load as load_clip_vision +from urllib.request import urlopen +from PIL import Image + +from server import PromptServer +from nodes import MAX_RESOLUTION, LatentFromBatch, RepeatLatentBatch, NODE_CLASS_MAPPINGS as ALL_NODE_CLASS_MAPPINGS, ConditioningSetMask, ConditioningConcat, CLIPTextEncode, VAEEncodeForInpaint, InpaintModelConditioning +from .config import MAX_SEED_NUM, BASE_RESOLUTIONS, RESOURCES_DIR, INPAINT_DIR, FOOOCUS_STYLES_DIR, FOOOCUS_INPAINT_HEAD, FOOOCUS_INPAINT_PATCH, BRUSHNET_MODELS, POWERPAINT_MODELS, IPADAPTER_DIR, IPADAPTER_CLIPVISION_MODELS, IPADAPTER_MODELS, DYNAMICRAFTER_DIR, DYNAMICRAFTER_MODELS, IC_LIGHT_MODELS +from .layer_diffuse import LayerDiffuse, LayerMethod +from .xyplot import XYplot_ModelMergeBlocks, XYplot_CFG, XYplot_Lora, XYplot_Checkpoint, XYplot_Denoise, XYplot_Steps, XYplot_PromptSR, XYplot_Positive_Cond, XYplot_Negative_Cond, XYplot_Positive_Cond_List, XYplot_Negative_Cond_List, XYplot_SeedsBatch, XYplot_Control_Net, XYplot_Sampler_Scheduler + +from .libs.log import log_node_info, log_node_error, log_node_warn +from .libs.adv_encode import advanced_encode +from .libs.wildcards import process_with_loras, get_wildcard_list, process +from .libs.utils import find_wildcards_seed, is_linked_styles_selector, easySave, get_local_filepath, AlwaysEqualProxy, get_sd_version +from .libs.loader import easyLoader +from .libs.sampler import easySampler, alignYourStepsScheduler, gitsScheduler +from .libs.xyplot import easyXYPlot +from .libs.controlnet import easyControlnet, union_controlnet_types +from .libs.conditioning import prompt_to_cond, set_cond +from .libs.easing import EasingBase +from .libs.translate import has_chinese, zh_to_en +from .libs import cache as backend_cache + +sampler = easySampler() +easyCache = easyLoader() + +new_schedulers = ['align_your_steps', 'gits'] +# ---------------------------------------------------------------提示词 开始----------------------------------------------------------------------# + +# 正面提示词 +class positivePrompt: + + def __init__(self): + pass + + @classmethod + def INPUT_TYPES(s): + return {"required": { + "positive": ("STRING", {"default": "", "multiline": True, "placeholder": "Positive"}),} + } + + RETURN_TYPES = ("STRING",) + RETURN_NAMES = ("positive",) + FUNCTION = "main" + + CATEGORY = "EasyUse/Prompt" + + @staticmethod + def main(positive): + return positive, + +# 通配符提示词 +class wildcardsPrompt: + + def __init__(self): + pass + + @classmethod + def INPUT_TYPES(s): + wildcard_list = get_wildcard_list() + return {"required": { + "text": ("STRING", {"default": "", "multiline": True, "dynamicPrompts": False, "placeholder": "(Support Lora Block Weight and wildcard)"}), + "Select to add LoRA": (["Select the LoRA to add to the text"] + folder_paths.get_filename_list("loras"),), + "Select to add Wildcard": (["Select the Wildcard to add to the text"] + wildcard_list,), + "seed": ("INT", {"default": 0, "min": 0, "max": MAX_SEED_NUM}), + "multiline_mode": ("BOOLEAN", {"default": False}), + }, + "hidden": {"prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO", "my_unique_id": "UNIQUE_ID"}, + } + + RETURN_TYPES = ("STRING", "STRING") + RETURN_NAMES = ("text", "populated_text") + OUTPUT_IS_LIST = (True, True) + FUNCTION = "main" + + CATEGORY = "EasyUse/Prompt" + + def translate(self, text): + return text + + def main(self, *args, **kwargs): + prompt = kwargs["prompt"] if "prompt" in kwargs else None + seed = kwargs["seed"] + + # Clean loaded_objects + if prompt: + easyCache.update_loaded_objects(prompt) + + text = kwargs['text'] + if "multiline_mode" in kwargs and kwargs["multiline_mode"]: + populated_text = [] + _text = [] + text = text.split("\n") + for t in text: + t = self.translate(t) + _text.append(t) + populated_text.append(process(t, seed)) + text = _text + else: + text = self.translate(text) + populated_text = [process(text, seed)] + text = [text] + return {"ui": {"value": [seed]}, "result": (text, populated_text)} + +# 负面提示词 +class negativePrompt: + + def __init__(self): + pass + + @classmethod + def INPUT_TYPES(s): + return {"required": { + "negative": ("STRING", {"default": "", "multiline": True, "placeholder": "Negative"}),} + } + + RETURN_TYPES = ("STRING",) + RETURN_NAMES = ("negative",) + FUNCTION = "main" + + CATEGORY = "EasyUse/Prompt" + + @staticmethod + def main(negative): + return negative, + +# 风格提示词选择器 +class stylesPromptSelector: + + @classmethod + def INPUT_TYPES(s): + styles = ["fooocus_styles"] + styles_dir = FOOOCUS_STYLES_DIR + for file_name in os.listdir(styles_dir): + file = os.path.join(styles_dir, file_name) + if os.path.isfile(file) and file_name.endswith(".json") and "styles" in file_name.split(".")[0]: + styles.append(file_name.split(".")[0]) + return { + "required": { + "styles": (styles, {"default": "fooocus_styles"}), + }, + "optional": { + "positive": ("STRING", {"forceInput": True}), + "negative": ("STRING", {"forceInput": True}), + }, + "hidden": {"prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO", "my_unique_id": "UNIQUE_ID"}, + } + + RETURN_TYPES = ("STRING", "STRING",) + RETURN_NAMES = ("positive", "negative",) + + CATEGORY = 'EasyUse/Prompt' + FUNCTION = 'run' + + def run(self, styles, positive='', negative='', prompt=None, extra_pnginfo=None, my_unique_id=None): + values = [] + all_styles = {} + positive_prompt, negative_prompt = '', negative + if styles == "fooocus_styles": + file = os.path.join(RESOURCES_DIR, styles + '.json') + else: + file = os.path.join(FOOOCUS_STYLES_DIR, styles + '.json') + f = open(file, 'r', encoding='utf-8') + data = json.load(f) + f.close() + for d in data: + all_styles[d['name']] = d + if my_unique_id in prompt: + if prompt[my_unique_id]["inputs"]['select_styles']: + values = prompt[my_unique_id]["inputs"]['select_styles'].split(',') + + has_prompt = False + if len(values) == 0: + return (positive, negative) + + for index, val in enumerate(values): + if 'prompt' in all_styles[val]: + if "{prompt}" in all_styles[val]['prompt'] and has_prompt == False: + positive_prompt = all_styles[val]['prompt'].format(prompt=positive) + has_prompt = True + else: + positive_prompt += ', ' + all_styles[val]['prompt'].replace(', {prompt}', '').replace('{prompt}', '') + if 'negative_prompt' in all_styles[val]: + negative_prompt += ', ' + all_styles[val]['negative_prompt'] if negative_prompt else all_styles[val]['negative_prompt'] + + if has_prompt == False and positive: + positive_prompt = positive + ', ' + + return (positive_prompt, negative_prompt) + +#prompt +class prompt: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "prompt": ("STRING", {"default": "", "multiline": True, "placeholder": "Prompt"}), + "main": ([ + 'none', + 'beautiful woman, detailed face', + 'handsome man, detailed face', + 'pretty girl', + 'handsome boy', + 'dog', + 'cat', + 'Buddha', + 'toy' + ], {"default": "none"}), + "lighting": ([ + 'none', + 'sunshine from window', + 'neon light, city', + 'sunset over sea', + 'golden time', + 'sci-fi RGB glowing, cyberpunk', + 'natural lighting', + 'warm atmosphere, at home, bedroom', + 'magic lit', + 'evil, gothic, Yharnam', + 'light and shadow', + 'shadow from window', + 'soft studio lighting', + 'home atmosphere, cozy bedroom illumination', + 'neon, Wong Kar-wai, warm', + 'cinemative lighting', + 'neo punk lighting, cyberpunk', + ],{"default":'none'}) + }} + + RETURN_TYPES = ("STRING",) + RETURN_NAMES = ("prompt",) + FUNCTION = "doit" + + CATEGORY = "EasyUse/Prompt" + + def doit(self, prompt, main, lighting): + if lighting != 'none' and main != 'none': + prompt = main + ',' + lighting + ',' + prompt + elif lighting != 'none' and main == 'none': + prompt = prompt + ',' + lighting + elif main != 'none': + prompt = main + ',' + prompt + + return prompt, +#promptList +class promptList: + @classmethod + def INPUT_TYPES(cls): + return {"required": { + "prompt_1": ("STRING", {"multiline": True, "default": ""}), + "prompt_2": ("STRING", {"multiline": True, "default": ""}), + "prompt_3": ("STRING", {"multiline": True, "default": ""}), + "prompt_4": ("STRING", {"multiline": True, "default": ""}), + "prompt_5": ("STRING", {"multiline": True, "default": ""}), + }, + "optional": { + "optional_prompt_list": ("LIST",) + } + } + + RETURN_TYPES = ("LIST", "STRING") + RETURN_NAMES = ("prompt_list", "prompt_strings") + OUTPUT_IS_LIST = (False, True) + FUNCTION = "run" + CATEGORY = "EasyUse/Prompt" + + def run(self, **kwargs): + prompts = [] + + if "optional_prompt_list" in kwargs: + for l in kwargs["optional_prompt_list"]: + prompts.append(l) + + # Iterate over the received inputs in sorted order. + for k in sorted(kwargs.keys()): + v = kwargs[k] + + # Only process string input ports. + if isinstance(v, str) and v != '': + prompts.append(v) + + return (prompts, prompts) + +#promptLine +class promptLine: + + @classmethod + def INPUT_TYPES(s): + return {"required": { + "prompt": ("STRING", {"multiline": True, "default": "text"}), + "start_index": ("INT", {"default": 0, "min": 0, "max": 9999}), + "max_rows": ("INT", {"default": 1000, "min": 1, "max": 9999}), + }, + "hidden":{ + "workflow_prompt": "PROMPT", "my_unique_id": "UNIQUE_ID" + } + } + + RETURN_TYPES = ("STRING", AlwaysEqualProxy('*')) + RETURN_NAMES = ("STRING", "COMBO") + OUTPUT_IS_LIST = (True, True) + FUNCTION = "generate_strings" + CATEGORY = "EasyUse/Prompt" + + def generate_strings(self, prompt, start_index, max_rows, workflow_prompt=None, my_unique_id=None): + lines = prompt.split('\n') + # lines = [zh_to_en([v])[0] if has_chinese(v) else v for v in lines if v] + + start_index = max(0, min(start_index, len(lines) - 1)) + + end_index = min(start_index + max_rows, len(lines)) + + rows = lines[start_index:end_index] + + return (rows, rows) + +class promptConcat: + @classmethod + def INPUT_TYPES(cls): + return {"required": { + }, + "optional": { + "prompt1": ("STRING", {"multiline": False, "default": "", "forceInput": True}), + "prompt2": ("STRING", {"multiline": False, "default": "", "forceInput": True}), + "separator": ("STRING", {"multiline": False, "default": ""}), + }, + } + RETURN_TYPES = ("STRING", ) + RETURN_NAMES = ("prompt", ) + FUNCTION = "concat_text" + CATEGORY = "EasyUse/Prompt" + + def concat_text(self, prompt1="", prompt2="", separator=""): + + return (prompt1 + separator + prompt2,) + +class promptReplace: + + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "prompt": ("STRING", {"multiline": True, "default": "", "forceInput": True}), + }, + "optional": { + "find1": ("STRING", {"multiline": False, "default": ""}), + "replace1": ("STRING", {"multiline": False, "default": ""}), + "find2": ("STRING", {"multiline": False, "default": ""}), + "replace2": ("STRING", {"multiline": False, "default": ""}), + "find3": ("STRING", {"multiline": False, "default": ""}), + "replace3": ("STRING", {"multiline": False, "default": ""}), + }, + } + + RETURN_TYPES = ("STRING",) + RETURN_NAMES = ("prompt",) + FUNCTION = "replace_text" + CATEGORY = "EasyUse/Prompt" + + def replace_text(self, prompt, find1="", replace1="", find2="", replace2="", find3="", replace3=""): + + prompt = prompt.replace(find1, replace1) + prompt = prompt.replace(find2, replace2) + prompt = prompt.replace(find3, replace3) + + return (prompt,) + + +# 肖像大师 +# Created by AI Wiz Art (Stefano Flore) +# Version: 2.2 +# https://stefanoflore.it +# https://ai-wiz.art +class portraitMaster: + + @classmethod + def INPUT_TYPES(s): + max_float_value = 1.95 + prompt_path = os.path.join(RESOURCES_DIR, 'portrait_prompt.json') + if not os.path.exists(prompt_path): + response = urlopen('https://raw.githubusercontent.com/yolain/ComfyUI-Easy-Use/main/resources/portrait_prompt.json') + temp_prompt = json.loads(response.read()) + prompt_serialized = json.dumps(temp_prompt, indent=4) + with open(prompt_path, "w") as f: + f.write(prompt_serialized) + del response, temp_prompt + # Load local + with open(prompt_path, 'r') as f: + list = json.load(f) + keys = [ + ['shot', 'COMBO', {"key": "shot_list"}], ['shot_weight', 'FLOAT'], + ['gender', 'COMBO', {"default": "Woman", "key": "gender_list"}], ['age', 'INT', {"default": 30, "min": 18, "max": 90, "step": 1, "display": "slider"}], + ['nationality_1', 'COMBO', {"default": "Chinese", "key": "nationality_list"}], ['nationality_2', 'COMBO', {"key": "nationality_list"}], ['nationality_mix', 'FLOAT'], + ['body_type', 'COMBO', {"key": "body_type_list"}], ['body_type_weight', 'FLOAT'], ['model_pose', 'COMBO', {"key": "model_pose_list"}], ['eyes_color', 'COMBO', {"key": "eyes_color_list"}], + ['facial_expression', 'COMBO', {"key": "face_expression_list"}], ['facial_expression_weight', 'FLOAT'], ['face_shape', 'COMBO', {"key": "face_shape_list"}], ['face_shape_weight', 'FLOAT'], ['facial_asymmetry', 'FLOAT'], + ['hair_style', 'COMBO', {"key": "hair_style_list"}], ['hair_color', 'COMBO', {"key": "hair_color_list"}], ['disheveled', 'FLOAT'], ['beard', 'COMBO', {"key": "beard_list"}], + ['skin_details', 'FLOAT'], ['skin_pores', 'FLOAT'], ['dimples', 'FLOAT'], ['freckles', 'FLOAT'], + ['moles', 'FLOAT'], ['skin_imperfections', 'FLOAT'], ['skin_acne', 'FLOAT'], ['tanned_skin', 'FLOAT'], + ['eyes_details', 'FLOAT'], ['iris_details', 'FLOAT'], ['circular_iris', 'FLOAT'], ['circular_pupil', 'FLOAT'], + ['light_type', 'COMBO', {"key": "light_type_list"}], ['light_direction', 'COMBO', {"key": "light_direction_list"}], ['light_weight', 'FLOAT'] + ] + widgets = {} + for i, obj in enumerate(keys): + if obj[1] == 'COMBO': + key = obj[2]['key'] if obj[2] and 'key' in obj[2] else obj[0] + _list = list[key].copy() + _list.insert(0, '-') + widgets[obj[0]] = (_list, {**obj[2]}) + elif obj[1] == 'FLOAT': + widgets[obj[0]] = ("FLOAT", {"default": 0, "step": 0.05, "min": 0, "max": max_float_value, "display": "slider",}) + elif obj[1] == 'INT': + widgets[obj[0]] = (obj[1], obj[2]) + del list + return { + "required": { + **widgets, + "photorealism_improvement": (["enable", "disable"],), + "prompt_start": ("STRING", {"multiline": True, "default": "raw photo, (realistic:1.5)"}), + "prompt_additional": ("STRING", {"multiline": True, "default": ""}), + "prompt_end": ("STRING", {"multiline": True, "default": ""}), + "negative_prompt": ("STRING", {"multiline": True, "default": ""}), + } + } + + RETURN_TYPES = ("STRING", "STRING",) + RETURN_NAMES = ("positive", "negative",) + + FUNCTION = "pm" + + CATEGORY = "EasyUse/Prompt" + + def pm(self, shot="-", shot_weight=1, gender="-", body_type="-", body_type_weight=0, eyes_color="-", + facial_expression="-", facial_expression_weight=0, face_shape="-", face_shape_weight=0, + nationality_1="-", nationality_2="-", nationality_mix=0.5, age=30, hair_style="-", hair_color="-", + disheveled=0, dimples=0, freckles=0, skin_pores=0, skin_details=0, moles=0, skin_imperfections=0, + wrinkles=0, tanned_skin=0, eyes_details=1, iris_details=1, circular_iris=1, circular_pupil=1, + facial_asymmetry=0, prompt_additional="", prompt_start="", prompt_end="", light_type="-", + light_direction="-", light_weight=0, negative_prompt="", photorealism_improvement="disable", beard="-", + model_pose="-", skin_acne=0): + + prompt = [] + + if gender == "-": + gender = "" + else: + if age <= 25 and gender == 'Woman': + gender = 'girl' + if age <= 25 and gender == 'Man': + gender = 'boy' + gender = " " + gender + " " + + if nationality_1 != '-' and nationality_2 != '-': + nationality = f"[{nationality_1}:{nationality_2}:{round(nationality_mix, 2)}]" + elif nationality_1 != '-': + nationality = nationality_1 + " " + elif nationality_2 != '-': + nationality = nationality_2 + " " + else: + nationality = "" + + if prompt_start != "": + prompt.append(f"{prompt_start}") + + if shot != "-" and shot_weight > 0: + prompt.append(f"({shot}:{round(shot_weight, 2)})") + + prompt.append(f"({nationality}{gender}{round(age)}-years-old:1.5)") + + if body_type != "-" and body_type_weight > 0: + prompt.append(f"({body_type}, {body_type} body:{round(body_type_weight, 2)})") + + if model_pose != "-": + prompt.append(f"({model_pose}:1.5)") + + if eyes_color != "-": + prompt.append(f"({eyes_color} eyes:1.25)") + + if facial_expression != "-" and facial_expression_weight > 0: + prompt.append( + f"({facial_expression}, {facial_expression} expression:{round(facial_expression_weight, 2)})") + + if face_shape != "-" and face_shape_weight > 0: + prompt.append(f"({face_shape} shape face:{round(face_shape_weight, 2)})") + + if hair_style != "-": + prompt.append(f"({hair_style} hairstyle:1.25)") + + if hair_color != "-": + prompt.append(f"({hair_color} hair:1.25)") + + if beard != "-": + prompt.append(f"({beard}:1.15)") + + if disheveled != "-" and disheveled > 0: + prompt.append(f"(disheveled:{round(disheveled, 2)})") + + if prompt_additional != "": + prompt.append(f"{prompt_additional}") + + if skin_details > 0: + prompt.append(f"(skin details, skin texture:{round(skin_details, 2)})") + + if skin_pores > 0: + prompt.append(f"(skin pores:{round(skin_pores, 2)})") + + if skin_imperfections > 0: + prompt.append(f"(skin imperfections:{round(skin_imperfections, 2)})") + + if skin_acne > 0: + prompt.append(f"(acne, skin with acne:{round(skin_acne, 2)})") + + if wrinkles > 0: + prompt.append(f"(skin imperfections:{round(wrinkles, 2)})") + + if tanned_skin > 0: + prompt.append(f"(tanned skin:{round(tanned_skin, 2)})") + + if dimples > 0: + prompt.append(f"(dimples:{round(dimples, 2)})") + + if freckles > 0: + prompt.append(f"(freckles:{round(freckles, 2)})") + + if moles > 0: + prompt.append(f"(skin pores:{round(moles, 2)})") + + if eyes_details > 0: + prompt.append(f"(eyes details:{round(eyes_details, 2)})") + + if iris_details > 0: + prompt.append(f"(iris details:{round(iris_details, 2)})") + + if circular_iris > 0: + prompt.append(f"(circular iris:{round(circular_iris, 2)})") + + if circular_pupil > 0: + prompt.append(f"(circular pupil:{round(circular_pupil, 2)})") + + if facial_asymmetry > 0: + prompt.append(f"(facial asymmetry, face asymmetry:{round(facial_asymmetry, 2)})") + + if light_type != '-' and light_weight > 0: + if light_direction != '-': + prompt.append(f"({light_type} {light_direction}:{round(light_weight, 2)})") + else: + prompt.append(f"({light_type}:{round(light_weight, 2)})") + + if prompt_end != "": + prompt.append(f"{prompt_end}") + + prompt = ", ".join(prompt) + prompt = prompt.lower() + + if photorealism_improvement == "enable": + prompt = prompt + ", (professional photo, balanced photo, balanced exposure:1.2), (film grain:1.15)" + + if photorealism_improvement == "enable": + negative_prompt = negative_prompt + ", (shinny skin, reflections on the skin, skin reflections:1.25)" + + log_node_info("Portrait Master as generate the prompt:", prompt) + + return (prompt, negative_prompt,) + +# ---------------------------------------------------------------提示词 结束----------------------------------------------------------------------# + +# ---------------------------------------------------------------潜空间 开始----------------------------------------------------------------------# +# 潜空间sigma相乘 +class latentNoisy: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "sampler_name": (comfy.samplers.KSampler.SAMPLERS,), + "scheduler": (comfy.samplers.KSampler.SCHEDULERS,), + "steps": ("INT", {"default": 10000, "min": 0, "max": 10000}), + "start_at_step": ("INT", {"default": 0, "min": 0, "max": 10000}), + "end_at_step": ("INT", {"default": 10000, "min": 1, "max": 10000}), + "source": (["CPU", "GPU"],), + "seed": ("INT", {"default": 0, "min": 0, "max": 0xffffffffffffffff}), + }, + "optional": { + "pipe": ("PIPE_LINE",), + "optional_model": ("MODEL",), + "optional_latent": ("LATENT",) + }} + + RETURN_TYPES = ("PIPE_LINE", "LATENT", "FLOAT",) + RETURN_NAMES = ("pipe", "latent", "sigma",) + FUNCTION = "run" + + CATEGORY = "EasyUse/Latent" + + def run(self, sampler_name, scheduler, steps, start_at_step, end_at_step, source, seed, pipe=None, optional_model=None, optional_latent=None): + model = optional_model if optional_model is not None else pipe["model"] + batch_size = pipe["loader_settings"]["batch_size"] + empty_latent_height = pipe["loader_settings"]["empty_latent_height"] + empty_latent_width = pipe["loader_settings"]["empty_latent_width"] + + if optional_latent is not None: + samples = optional_latent + else: + torch.manual_seed(seed) + if source == "CPU": + device = "cpu" + else: + device = comfy.model_management.get_torch_device() + noise = torch.randn((batch_size, 4, empty_latent_height // 8, empty_latent_width // 8), dtype=torch.float32, + device=device).cpu() + + samples = {"samples": noise} + + device = comfy.model_management.get_torch_device() + end_at_step = min(steps, end_at_step) + start_at_step = min(start_at_step, end_at_step) + comfy.model_management.load_model_gpu(model) + model_patcher = comfy.model_patcher.ModelPatcher(model.model, load_device=device, offload_device=comfy.model_management.unet_offload_device()) + sampler = comfy.samplers.KSampler(model_patcher, steps=steps, device=device, sampler=sampler_name, + scheduler=scheduler, denoise=1.0, model_options=model.model_options) + sigmas = sampler.sigmas + sigma = sigmas[start_at_step] - sigmas[end_at_step] + sigma /= model.model.latent_format.scale_factor + sigma = sigma.cpu().numpy() + + samples_out = samples.copy() + + s1 = samples["samples"] + samples_out["samples"] = s1 * sigma + + if pipe is None: + pipe = {} + new_pipe = { + **pipe, + "samples": samples_out + } + del pipe + + return (new_pipe, samples_out, sigma) + +# Latent遮罩复合 +class latentCompositeMaskedWithCond: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "pipe": ("PIPE_LINE",), + "text_combine": ("LIST",), + "source_latent": ("LATENT",), + "source_mask": ("MASK",), + "destination_mask": ("MASK",), + "text_combine_mode": (["add", "replace", "cover"], {"default": "add"}), + "replace_text": ("STRING", {"default": ""}) + }, + "hidden": {"prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO", "my_unique_id": "UNIQUE_ID"}, + } + + OUTPUT_IS_LIST = (False, False, True) + RETURN_TYPES = ("PIPE_LINE", "LATENT", "CONDITIONING") + RETURN_NAMES = ("pipe", "latent", "conditioning",) + FUNCTION = "run" + + CATEGORY = "EasyUse/Latent" + + def run(self, pipe, text_combine, source_latent, source_mask, destination_mask, text_combine_mode, replace_text, prompt=None, extra_pnginfo=None, my_unique_id=None): + positive = None + clip = pipe["clip"] + destination_latent = pipe["samples"] + + conds = [] + + for text in text_combine: + if text_combine_mode == 'cover': + positive = text + elif text_combine_mode == 'replace' and replace_text != '': + positive = pipe["loader_settings"]["positive"].replace(replace_text, text) + else: + positive = pipe["loader_settings"]["positive"] + ',' + text + positive_token_normalization = pipe["loader_settings"]["positive_token_normalization"] + positive_weight_interpretation = pipe["loader_settings"]["positive_weight_interpretation"] + a1111_prompt_style = pipe["loader_settings"]["a1111_prompt_style"] + positive_cond = pipe["positive"] + + log_node_warn("正在处理提示词编码...") + steps = pipe["loader_settings"]["steps"] if "steps" in pipe["loader_settings"] else 1 + positive_embeddings_final = advanced_encode(clip, positive, + positive_token_normalization, + positive_weight_interpretation, w_max=1.0, + apply_to_pooled='enable', a1111_prompt_style=a1111_prompt_style, steps=steps) + + # source cond + (cond_1,) = ConditioningSetMask().append(positive_cond, source_mask, "default", 1) + (cond_2,) = ConditioningSetMask().append(positive_embeddings_final, destination_mask, "default", 1) + positive_cond = cond_1 + cond_2 + + conds.append(positive_cond) + # latent composite masked + (samples,) = LatentCompositeMasked().composite(destination_latent, source_latent, 0, 0, False) + + new_pipe = { + **pipe, + "samples": samples, + "loader_settings": { + **pipe["loader_settings"], + "positive": positive, + } + } + + del pipe + + return (new_pipe, samples, conds) + +# 噪声注入到潜空间 +class injectNoiseToLatent: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "strength": ("FLOAT", {"default": 0.1, "min": 0.0, "max": 200.0, "step": 0.0001}), + "normalize": ("BOOLEAN", {"default": False}), + "average": ("BOOLEAN", {"default": False}), + }, + "optional": { + "pipe_to_noise": ("PIPE_LINE",), + "image_to_latent": ("IMAGE",), + "latent": ("LATENT",), + "noise": ("LATENT",), + "mask": ("MASK",), + "mix_randn_amount": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 1000.0, "step": 0.001}), + # "seed": ("INT", {"default": 123, "min": 0, "max": 0xffffffffffffffff, "step": 1}), + } + } + + RETURN_TYPES = ("LATENT",) + FUNCTION = "inject" + CATEGORY = "EasyUse/Latent" + + def inject(self,strength, normalize, average, pipe_to_noise=None, noise=None, image_to_latent=None, latent=None, mix_randn_amount=0, mask=None): + + vae = pipe_to_noise["vae"] if pipe_to_noise is not None else pipe_to_noise["vae"] + batch_size = pipe_to_noise["loader_settings"]["batch_size"] if pipe_to_noise is not None and "batch_size" in pipe_to_noise["loader_settings"] else 1 + if noise is None and pipe_to_noise is not None: + noise = pipe_to_noise["samples"] + elif noise is None: + raise Exception("InjectNoiseToLatent: No noise provided") + + if image_to_latent is not None and vae is not None: + samples = {"samples": vae.encode(image_to_latent[:, :, :, :3])} + latents = RepeatLatentBatch().repeat(samples, batch_size)[0] + elif latent is not None: + latents = latent + else: + raise Exception("InjectNoiseToLatent: No input latent provided") + + samples = latents.copy() + if latents["samples"].shape != noise["samples"].shape: + raise ValueError("InjectNoiseToLatent: Latent and noise must have the same shape") + if average: + noised = (samples["samples"].clone() + noise["samples"].clone()) / 2 + else: + noised = samples["samples"].clone() + noise["samples"].clone() * strength + if normalize: + noised = noised / noised.std() + if mask is not None: + mask = torch.nn.functional.interpolate(mask.reshape((-1, 1, mask.shape[-2], mask.shape[-1])), + size=(noised.shape[2], noised.shape[3]), mode="bilinear") + mask = mask.expand((-1, noised.shape[1], -1, -1)) + if mask.shape[0] < noised.shape[0]: + mask = mask.repeat((noised.shape[0] - 1) // mask.shape[0] + 1, 1, 1, 1)[:noised.shape[0]] + noised = mask * noised + (1 - mask) * latents["samples"] + if mix_randn_amount > 0: + # if seed is not None: + # torch.manual_seed(seed) + rand_noise = torch.randn_like(noised) + noised = ((1 - mix_randn_amount) * noised + mix_randn_amount * + rand_noise) / ((mix_randn_amount ** 2 + (1 - mix_randn_amount) ** 2) ** 0.5) + samples["samples"] = noised + return (samples,) + +# ---------------------------------------------------------------潜空间 结束----------------------------------------------------------------------# + +# ---------------------------------------------------------------随机种 开始----------------------------------------------------------------------# +# 随机种 +class easySeed: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "seed": ("INT", {"default": 0, "min": 0, "max": MAX_SEED_NUM}), + }, + "hidden": {"prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO", "my_unique_id": "UNIQUE_ID"}, + } + + RETURN_TYPES = ("INT",) + RETURN_NAMES = ("seed",) + FUNCTION = "doit" + + CATEGORY = "EasyUse/Seed" + + def doit(self, seed=0, prompt=None, extra_pnginfo=None, my_unique_id=None): + return seed, + +# 全局随机种 +class globalSeed: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "value": ("INT", {"default": 0, "min": 0, "max": MAX_SEED_NUM}), + "mode": ("BOOLEAN", {"default": True, "label_on": "control_before_generate", "label_off": "control_after_generate"}), + "action": (["fixed", "increment", "decrement", "randomize", + "increment for each node", "decrement for each node", "randomize for each node"], ), + "last_seed": ("STRING", {"default": ""}), + } + } + + RETURN_TYPES = () + FUNCTION = "doit" + + CATEGORY = "EasyUse/Seed" + + OUTPUT_NODE = True + + def doit(self, **kwargs): + return {} + +# ---------------------------------------------------------------随机种 结束----------------------------------------------------------------------# + +# ---------------------------------------------------------------加载器 开始----------------------------------------------------------------------# +class setCkptName: + @classmethod + def INPUT_TYPES(cls): + return {"required": { + "ckpt_name": (folder_paths.get_filename_list("checkpoints"),), + } + } + + RETURN_TYPES = (AlwaysEqualProxy('*'),) + RETURN_NAMES = ("ckpt_name",) + FUNCTION = "set_name" + CATEGORY = "EasyUse/Util" + + def set_name(self, ckpt_name): + return (ckpt_name,) + +class setControlName: + + @classmethod + def INPUT_TYPES(cls): + return {"required": { + "controlnet_name": (folder_paths.get_filename_list("controlnet"),), + } + } + + RETURN_TYPES = (AlwaysEqualProxy('*'),) + RETURN_NAMES = ("controlnet_name",) + FUNCTION = "set_name" + CATEGORY = "EasyUse/Util" + + def set_name(self, controlnet_name): + return (controlnet_name,) + +# 简易加载器完整 +resolution_strings = [f"{width} x {height} (custom)" if width == 'width' and height == 'height' else f"{width} x {height}" for width, height in BASE_RESOLUTIONS] +class fullLoader: + + @classmethod + def INPUT_TYPES(cls): + a1111_prompt_style_default = False + + return {"required": { + "ckpt_name": (folder_paths.get_filename_list("checkpoints"),), + "config_name": (["Default", ] + folder_paths.get_filename_list("configs"), {"default": "Default"}), + "vae_name": (["Baked VAE"] + folder_paths.get_filename_list("vae"),), + "clip_skip": ("INT", {"default": -1, "min": -24, "max": 0, "step": 1}), + + "lora_name": (["None"] + folder_paths.get_filename_list("loras"),), + "lora_model_strength": ("FLOAT", {"default": 1.0, "min": -10.0, "max": 10.0, "step": 0.01}), + "lora_clip_strength": ("FLOAT", {"default": 1.0, "min": -10.0, "max": 10.0, "step": 0.01}), + + "resolution": (resolution_strings,), + "empty_latent_width": ("INT", {"default": 512, "min": 64, "max": MAX_RESOLUTION, "step": 8}), + "empty_latent_height": ("INT", {"default": 512, "min": 64, "max": MAX_RESOLUTION, "step": 8}), + + "positive": ("STRING", {"default": "", "placeholder": "Positive", "multiline": True}), + "positive_token_normalization": (["none", "mean", "length", "length+mean"],), + "positive_weight_interpretation": (["comfy", "A1111", "comfy++", "compel", "fixed attention"],), + + "negative": ("STRING", {"default": "", "placeholder": "Negative", "multiline": True}), + "negative_token_normalization": (["none", "mean", "length", "length+mean"],), + "negative_weight_interpretation": (["comfy", "A1111", "comfy++", "compel", "fixed attention"],), + + "batch_size": ("INT", {"default": 1, "min": 1, "max": 64}), + }, + "optional": {"model_override": ("MODEL",), "clip_override": ("CLIP",), "vae_override": ("VAE",), "optional_lora_stack": ("LORA_STACK",), "optional_controlnet_stack": ("CONTROL_NET_STACK",), "a1111_prompt_style": ("BOOLEAN", {"default": a1111_prompt_style_default}),}, + "hidden": {"prompt": "PROMPT", "my_unique_id": "UNIQUE_ID"} + } + + RETURN_TYPES = ("PIPE_LINE", "MODEL", "VAE", "CLIP", "CONDITIONING", "CONDITIONING", "LATENT") + RETURN_NAMES = ("pipe", "model", "vae", "clip", "positive", "negative", "latent") + + FUNCTION = "adv_pipeloader" + CATEGORY = "EasyUse/Loaders" + + def adv_pipeloader(self, ckpt_name, config_name, vae_name, clip_skip, + lora_name, lora_model_strength, lora_clip_strength, + resolution, empty_latent_width, empty_latent_height, + positive, positive_token_normalization, positive_weight_interpretation, + negative, negative_token_normalization, negative_weight_interpretation, + batch_size, model_override=None, clip_override=None, vae_override=None, optional_lora_stack=None, optional_controlnet_stack=None, a1111_prompt_style=False, prompt=None, + my_unique_id=None + ): + + # Clean models from loaded_objects + easyCache.update_loaded_objects(prompt) + + # Load models + log_node_warn("正在加载模型...") + model, clip, vae, clip_vision, lora_stack = easyCache.load_main(ckpt_name, config_name, vae_name, lora_name, lora_model_strength, lora_clip_strength, optional_lora_stack, model_override, clip_override, vae_override, prompt) + + # Create Empty Latent + model_type = get_sd_version(model) + sd3 = True if model_type == "sd3" else False + samples = sampler.emptyLatent(resolution, empty_latent_width, empty_latent_height, batch_size, sd3=sd3) + + # Prompt to Conditioning + positive_embeddings_final, positive_wildcard_prompt, model, clip = prompt_to_cond('positive', model, clip, clip_skip, lora_stack, positive, positive_token_normalization, positive_weight_interpretation, a1111_prompt_style, my_unique_id, prompt, easyCache, model_type=model_type) + negative_embeddings_final, negative_wildcard_prompt, model, clip = prompt_to_cond('negative', model, clip, clip_skip, lora_stack, negative, negative_token_normalization, negative_weight_interpretation, a1111_prompt_style, my_unique_id, prompt, easyCache, model_type=model_type) + + # Conditioning add controlnet + if optional_controlnet_stack is not None and len(optional_controlnet_stack) > 0: + for controlnet in optional_controlnet_stack: + positive_embeddings_final, negative_embeddings_final = easyControlnet().apply(controlnet[0], controlnet[5], positive_embeddings_final, negative_embeddings_final, controlnet[1], start_percent=controlnet[2], end_percent=controlnet[3], control_net=None, scale_soft_weights=controlnet[4], mask=None, easyCache=easyCache, use_cache=True, model=model, vae=vae) + + log_node_warn("加载完毕...") + pipe = { + "model": model, + "positive": positive_embeddings_final, + "negative": negative_embeddings_final, + "vae": vae, + "clip": clip, + + "samples": samples, + "images": None, + + "loader_settings": { + "ckpt_name": ckpt_name, + "vae_name": vae_name, + "lora_name": lora_name, + "lora_model_strength": lora_model_strength, + "lora_clip_strength": lora_clip_strength, + "lora_stack": lora_stack, + + "clip_skip": clip_skip, + "a1111_prompt_style": a1111_prompt_style, + "positive": positive, + "positive_token_normalization": positive_token_normalization, + "positive_weight_interpretation": positive_weight_interpretation, + "negative": negative, + "negative_token_normalization": negative_token_normalization, + "negative_weight_interpretation": negative_weight_interpretation, + "resolution": resolution, + "empty_latent_width": empty_latent_width, + "empty_latent_height": empty_latent_height, + "batch_size": batch_size, + } + } + + return {"ui": {"positive": positive_wildcard_prompt, "negative": negative_wildcard_prompt}, "result": (pipe, model, vae, clip, positive_embeddings_final, negative_embeddings_final, samples)} + +# A1111简易加载器 +class a1111Loader(fullLoader): + @classmethod + def INPUT_TYPES(cls): + a1111_prompt_style_default = False + checkpoints = folder_paths.get_filename_list("checkpoints") + loras = ["None"] + folder_paths.get_filename_list("loras") + return { + "required": { + "ckpt_name": (checkpoints,), + "vae_name": (["Baked VAE"] + folder_paths.get_filename_list("vae"),), + "clip_skip": ("INT", {"default": -1, "min": -24, "max": 0, "step": 1}), + + "lora_name": (loras,), + "lora_model_strength": ("FLOAT", {"default": 1.0, "min": -10.0, "max": 10.0, "step": 0.01}), + "lora_clip_strength": ("FLOAT", {"default": 1.0, "min": -10.0, "max": 10.0, "step": 0.01}), + + "resolution": (resolution_strings, {"default": "512 x 512"}), + "empty_latent_width": ("INT", {"default": 512, "min": 64, "max": MAX_RESOLUTION, "step": 8}), + "empty_latent_height": ("INT", {"default": 512, "min": 64, "max": MAX_RESOLUTION, "step": 8}), + + "positive": ("STRING", {"default":"", "placeholder": "Positive", "multiline": True}), + "negative": ("STRING", {"default":"", "placeholder": "Negative", "multiline": True}), + "batch_size": ("INT", {"default": 1, "min": 1, "max": 64}), + }, + "optional": { + "optional_lora_stack": ("LORA_STACK",), + "optional_controlnet_stack": ("CONTROL_NET_STACK",), + "a1111_prompt_style": ("BOOLEAN", {"default": a1111_prompt_style_default}), + }, + "hidden": {"prompt": "PROMPT", "my_unique_id": "UNIQUE_ID"} + } + + RETURN_TYPES = ("PIPE_LINE", "MODEL", "VAE") + RETURN_NAMES = ("pipe", "model", "vae") + + FUNCTION = "a1111loader" + CATEGORY = "EasyUse/Loaders" + + def a1111loader(self, ckpt_name, vae_name, clip_skip, + lora_name, lora_model_strength, lora_clip_strength, + resolution, empty_latent_width, empty_latent_height, + positive, negative, batch_size, optional_lora_stack=None, optional_controlnet_stack=None, a1111_prompt_style=False, prompt=None, + my_unique_id=None): + + return super().adv_pipeloader(ckpt_name, 'Default', vae_name, clip_skip, + lora_name, lora_model_strength, lora_clip_strength, + resolution, empty_latent_width, empty_latent_height, + positive, 'mean', 'A1111', + negative,'mean','A1111', + batch_size, None, None, None, optional_lora_stack=optional_lora_stack, optional_controlnet_stack=optional_controlnet_stack,a1111_prompt_style=a1111_prompt_style, prompt=prompt, + my_unique_id=my_unique_id + ) + +# Comfy简易加载器 +class comfyLoader(fullLoader): + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "ckpt_name": (folder_paths.get_filename_list("checkpoints"),), + "vae_name": (["Baked VAE"] + folder_paths.get_filename_list("vae"),), + "clip_skip": ("INT", {"default": -1, "min": -24, "max": 0, "step": 1}), + + "lora_name": (["None"] + folder_paths.get_filename_list("loras"),), + "lora_model_strength": ("FLOAT", {"default": 1.0, "min": -10.0, "max": 10.0, "step": 0.01}), + "lora_clip_strength": ("FLOAT", {"default": 1.0, "min": -10.0, "max": 10.0, "step": 0.01}), + + "resolution": (resolution_strings, {"default": "512 x 512"}), + "empty_latent_width": ("INT", {"default": 512, "min": 64, "max": MAX_RESOLUTION, "step": 8}), + "empty_latent_height": ("INT", {"default": 512, "min": 64, "max": MAX_RESOLUTION, "step": 8}), + + "positive": ("STRING", {"default": "", "placeholder": "Positive", "multiline": True}), + "negative": ("STRING", {"default": "", "placeholder": "Negative", "multiline": True}), + + "batch_size": ("INT", {"default": 1, "min": 1, "max": 64}), + }, + "optional": {"optional_lora_stack": ("LORA_STACK",), "optional_controlnet_stack": ("CONTROL_NET_STACK",),}, + "hidden": {"prompt": "PROMPT", "my_unique_id": "UNIQUE_ID"} + } + + RETURN_TYPES = ("PIPE_LINE", "MODEL", "VAE") + RETURN_NAMES = ("pipe", "model", "vae") + + FUNCTION = "comfyloader" + CATEGORY = "EasyUse/Loaders" + + def comfyloader(self, ckpt_name, vae_name, clip_skip, + lora_name, lora_model_strength, lora_clip_strength, + resolution, empty_latent_width, empty_latent_height, + positive, negative, batch_size, optional_lora_stack=None, optional_controlnet_stack=None, prompt=None, + my_unique_id=None): + return super().adv_pipeloader(ckpt_name, 'Default', vae_name, clip_skip, + lora_name, lora_model_strength, lora_clip_strength, + resolution, empty_latent_width, empty_latent_height, + positive, 'none', 'comfy', + negative, 'none', 'comfy', + batch_size, None, None, None, optional_lora_stack=optional_lora_stack, optional_controlnet_stack=optional_controlnet_stack, a1111_prompt_style=False, prompt=prompt, + my_unique_id=my_unique_id + ) + +# hydit简易加载器 +class hunyuanDiTLoader(fullLoader): + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "ckpt_name": (folder_paths.get_filename_list("checkpoints"),), + "vae_name": (["Baked VAE"] + folder_paths.get_filename_list("vae"),), + + "lora_name": (["None"] + folder_paths.get_filename_list("loras"),), + "lora_model_strength": ("FLOAT", {"default": 1.0, "min": -10.0, "max": 10.0, "step": 0.01}), + "lora_clip_strength": ("FLOAT", {"default": 1.0, "min": -10.0, "max": 10.0, "step": 0.01}), + + "resolution": (resolution_strings, {"default": "1024 x 1024"}), + "empty_latent_width": ("INT", {"default": 1024, "min": 64, "max": MAX_RESOLUTION, "step": 8}), + "empty_latent_height": ("INT", {"default": 1024, "min": 64, "max": MAX_RESOLUTION, "step": 8}), + + "positive": ("STRING", {"default": "", "placeholder": "Positive", "multiline": True}), + "negative": ("STRING", {"default": "", "placeholder": "Negative", "multiline": True}), + + "batch_size": ("INT", {"default": 1, "min": 1, "max": 64}), + }, + "optional": {"optional_lora_stack": ("LORA_STACK",), "optional_controlnet_stack": ("CONTROL_NET_STACK",),}, + "hidden": {"prompt": "PROMPT", "my_unique_id": "UNIQUE_ID"} + } + + RETURN_TYPES = ("PIPE_LINE", "MODEL", "VAE") + RETURN_NAMES = ("pipe", "model", "vae") + + FUNCTION = "hyditloader" + CATEGORY = "EasyUse/Loaders" + + def hyditloader(self, ckpt_name, vae_name, + lora_name, lora_model_strength, lora_clip_strength, + resolution, empty_latent_width, empty_latent_height, + positive, negative, batch_size, optional_lora_stack=None, optional_controlnet_stack=None, prompt=None, + my_unique_id=None): + + return super().adv_pipeloader(ckpt_name, 'Default', vae_name, 0, + lora_name, lora_model_strength, lora_clip_strength, + resolution, empty_latent_width, empty_latent_height, + positive, 'none', 'comfy', + negative, 'none', 'comfy', + batch_size, None, None, None, optional_lora_stack=optional_lora_stack, optional_controlnet_stack=optional_controlnet_stack, a1111_prompt_style=False, prompt=prompt, + my_unique_id=my_unique_id + ) + +# stable Cascade +class cascadeLoader: + def __init__(self): + pass + + @classmethod + def INPUT_TYPES(s): + + return {"required": { + "stage_c": (folder_paths.get_filename_list("unet") + folder_paths.get_filename_list("checkpoints"),), + "stage_b": (folder_paths.get_filename_list("unet") + folder_paths.get_filename_list("checkpoints"),), + "stage_a": (["Baked VAE"]+folder_paths.get_filename_list("vae"),), + "clip_name": (["None"] + folder_paths.get_filename_list("clip"),), + + "lora_name": (["None"] + folder_paths.get_filename_list("loras"),), + "lora_model_strength": ("FLOAT", {"default": 1.0, "min": -10.0, "max": 10.0, "step": 0.01}), + "lora_clip_strength": ("FLOAT", {"default": 1.0, "min": -10.0, "max": 10.0, "step": 0.01}), + + "resolution": (resolution_strings, {"default": "1024 x 1024"}), + "empty_latent_width": ("INT", {"default": 1024, "min": 16, "max": MAX_RESOLUTION, "step": 8}), + "empty_latent_height": ("INT", {"default": 1024, "min": 16, "max": MAX_RESOLUTION, "step": 8}), + "compression": ("INT", {"default": 42, "min": 32, "max": 64, "step": 1}), + + "positive": ("STRING", {"default":"", "placeholder": "Positive", "multiline": True}), + "negative": ("STRING", {"default":"", "placeholder": "Negative", "multiline": True}), + + "batch_size": ("INT", {"default": 1, "min": 1, "max": 64}), + }, + "optional": {"optional_lora_stack": ("LORA_STACK",), }, + "hidden": {"prompt": "PROMPT", "my_unique_id": "UNIQUE_ID"} + } + + RETURN_TYPES = ("PIPE_LINE", "MODEL", "LATENT", "VAE") + RETURN_NAMES = ("pipe", "model_c", "latent_c", "vae") + + FUNCTION = "adv_pipeloader" + CATEGORY = "EasyUse/Loaders" + + def is_ckpt(self, name): + is_ckpt = False + path = folder_paths.get_full_path("checkpoints", name) + if path is not None: + is_ckpt = True + return is_ckpt + + def adv_pipeloader(self, stage_c, stage_b, stage_a, clip_name, lora_name, lora_model_strength, lora_clip_strength, + resolution, empty_latent_width, empty_latent_height, compression, + positive, negative, batch_size, optional_lora_stack=None,prompt=None, + my_unique_id=None): + + vae: VAE | None = None + model_c: ModelPatcher | None = None + model_b: ModelPatcher | None = None + clip: CLIP | None = None + can_load_lora = True + pipe_lora_stack = [] + + # Clean models from loaded_objects + easyCache.update_loaded_objects(prompt) + + # Create Empty Latent + samples = sampler.emptyLatent(resolution, empty_latent_width, empty_latent_height, batch_size, compression) + + if self.is_ckpt(stage_c): + model_c, clip, vae_c, clip_vision = easyCache.load_checkpoint(stage_c) + else: + model_c = easyCache.load_unet(stage_c) + vae_c = None + if self.is_ckpt(stage_b): + model_b, clip, vae_b, clip_vision = easyCache.load_checkpoint(stage_b) + else: + model_b = easyCache.load_unet(stage_b) + vae_b = None + + if optional_lora_stack is not None and can_load_lora: + for lora in optional_lora_stack: + lora = {"lora_name": lora[0], "model": model_c, "clip": clip, "model_strength": lora[1], "clip_strength": lora[2]} + model_c, clip = easyCache.load_lora(lora) + lora['model'] = model_c + lora['clip'] = clip + pipe_lora_stack.append(lora) + + if lora_name != "None" and can_load_lora: + lora = {"lora_name": lora_name, "model": model_c, "clip": clip, "model_strength": lora_model_strength, + "clip_strength": lora_clip_strength} + model_c, clip = easyCache.load_lora(lora) + pipe_lora_stack.append(lora) + + model = (model_c, model_b) + # Load clip + if clip_name != 'None': + clip = easyCache.load_clip(clip_name, "stable_cascade") + # Load vae + if stage_a not in ["Baked VAE", "Baked-VAE"]: + vae_b = easyCache.load_vae(stage_a) + + vae = (vae_c, vae_b) + # 判断是否连接 styles selector + is_positive_linked_styles_selector = is_linked_styles_selector(prompt, my_unique_id, 'positive') + is_negative_linked_styles_selector = is_linked_styles_selector(prompt, my_unique_id, 'negative') + + log_node_warn("正在处理提示词...") + positive_seed = find_wildcards_seed(my_unique_id, positive, prompt) + # Translate cn to en + if has_chinese(positive): + positive = zh_to_en([positive])[0] + model_c, clip, positive, positive_decode, show_positive_prompt, pipe_lora_stack = process_with_loras(positive, + model_c, clip, + "positive", + positive_seed, + can_load_lora, + pipe_lora_stack, + easyCache) + positive_wildcard_prompt = positive_decode if show_positive_prompt or is_positive_linked_styles_selector else "" + negative_seed = find_wildcards_seed(my_unique_id, negative, prompt) + # Translate cn to en + if has_chinese(negative): + negative = zh_to_en([negative])[0] + model_c, clip, negative, negative_decode, show_negative_prompt, pipe_lora_stack = process_with_loras(negative, + model_c, clip, + "negative", + negative_seed, + can_load_lora, + pipe_lora_stack, + easyCache) + negative_wildcard_prompt = negative_decode if show_negative_prompt or is_negative_linked_styles_selector else "" + + tokens = clip.tokenize(positive) + cond, pooled = clip.encode_from_tokens(tokens, return_pooled=True) + positive_embeddings_final = [[cond, {"pooled_output": pooled}]] + + tokens = clip.tokenize(negative) + cond, pooled = clip.encode_from_tokens(tokens, return_pooled=True) + negative_embeddings_final = [[cond, {"pooled_output": pooled}]] + + image = easySampler.pil2tensor(Image.new('RGB', (1, 1), (0, 0, 0))) + + log_node_warn("处理结束...") + pipe = { + "model": model, + "positive": positive_embeddings_final, + "negative": negative_embeddings_final, + "vae": vae, + "clip": clip, + + "samples": samples, + "images": image, + "seed": 0, + + "loader_settings": { + "vae_name": stage_a, + "lora_name": lora_name, + "lora_model_strength": lora_model_strength, + "lora_clip_strength": lora_clip_strength, + "lora_stack": pipe_lora_stack, + + "positive": positive, + "positive_token_normalization": 'none', + "positive_weight_interpretation": 'comfy', + "negative": negative, + "negative_token_normalization": 'none', + "negative_weight_interpretation": 'comfy', + "resolution": resolution, + "empty_latent_width": empty_latent_width, + "empty_latent_height": empty_latent_height, + "batch_size": batch_size, + "compression": compression + } + } + + return {"ui": {"positive": positive_wildcard_prompt, "negative": negative_wildcard_prompt}, + "result": (pipe, model_c, model_b, vae)} + +# Zero123简易加载器 (3D) +try: + from comfy_extras.nodes_stable3d import camera_embeddings +except FileNotFoundError: + log_node_error("EasyUse[zero123Loader]", "请更新ComfyUI到最新版本") + +class zero123Loader: + + @classmethod + def INPUT_TYPES(cls): + def get_file_list(filenames): + return [file for file in filenames if file != "put_models_here.txt" and "zero123" in file.lower()] + + return {"required": { + "ckpt_name": (get_file_list(folder_paths.get_filename_list("checkpoints")),), + "vae_name": (["Baked VAE"] + folder_paths.get_filename_list("vae"),), + + "init_image": ("IMAGE",), + "empty_latent_width": ("INT", {"default": 256, "min": 16, "max": MAX_RESOLUTION, "step": 8}), + "empty_latent_height": ("INT", {"default": 256, "min": 16, "max": MAX_RESOLUTION, "step": 8}), + + "batch_size": ("INT", {"default": 1, "min": 1, "max": 64}), + + "elevation": ("FLOAT", {"default": 0.0, "min": -180.0, "max": 180.0}), + "azimuth": ("FLOAT", {"default": 0.0, "min": -180.0, "max": 180.0}), + }, + "hidden": {"prompt": "PROMPT", "my_unique_id": "UNIQUE_ID"} + } + + RETURN_TYPES = ("PIPE_LINE", "MODEL", "VAE") + RETURN_NAMES = ("pipe", "model", "vae") + + FUNCTION = "adv_pipeloader" + CATEGORY = "EasyUse/Loaders" + + def adv_pipeloader(self, ckpt_name, vae_name, init_image, empty_latent_width, empty_latent_height, batch_size, elevation, azimuth, prompt=None, my_unique_id=None): + model: ModelPatcher | None = None + vae: VAE | None = None + clip: CLIP | None = None + clip_vision = None + + # Clean models from loaded_objects + easyCache.update_loaded_objects(prompt) + + model, clip, vae, clip_vision = easyCache.load_checkpoint(ckpt_name, "Default", True) + + output = clip_vision.encode_image(init_image) + pooled = output.image_embeds.unsqueeze(0) + pixels = comfy.utils.common_upscale(init_image.movedim(-1, 1), empty_latent_width, empty_latent_height, "bilinear", "center").movedim(1, -1) + encode_pixels = pixels[:, :, :, :3] + t = vae.encode(encode_pixels) + cam_embeds = camera_embeddings(elevation, azimuth) + cond = torch.cat([pooled, cam_embeds.repeat((pooled.shape[0], 1, 1))], dim=-1) + + positive = [[cond, {"concat_latent_image": t}]] + negative = [[torch.zeros_like(pooled), {"concat_latent_image": torch.zeros_like(t)}]] + latent = torch.zeros([batch_size, 4, empty_latent_height // 8, empty_latent_width // 8]) + samples = {"samples": latent} + + image = easySampler.pil2tensor(Image.new('RGB', (1, 1), (0, 0, 0))) + + pipe = {"model": model, + "positive": positive, + "negative": negative, + "vae": vae, + "clip": clip, + + "samples": samples, + "images": image, + "seed": 0, + + "loader_settings": {"ckpt_name": ckpt_name, + "vae_name": vae_name, + + "positive": positive, + "negative": negative, + "empty_latent_width": empty_latent_width, + "empty_latent_height": empty_latent_height, + "batch_size": batch_size, + "seed": 0, + } + } + + return (pipe, model, vae) + +# SV3D加载器 +class sv3DLoader(EasingBase): + + def __init__(self): + super().__init__() + + @classmethod + def INPUT_TYPES(cls): + def get_file_list(filenames): + return [file for file in filenames if file != "put_models_here.txt" and "sv3d" in file] + + return {"required": { + "ckpt_name": (get_file_list(folder_paths.get_filename_list("checkpoints")),), + "vae_name": (["Baked VAE"] + folder_paths.get_filename_list("vae"),), + + "init_image": ("IMAGE",), + "empty_latent_width": ("INT", {"default": 576, "min": 16, "max": MAX_RESOLUTION, "step": 8}), + "empty_latent_height": ("INT", {"default": 576, "min": 16, "max": MAX_RESOLUTION, "step": 8}), + + "batch_size": ("INT", {"default": 21, "min": 1, "max": 4096}), + "interp_easing": (["linear", "ease_in", "ease_out", "ease_in_out"], {"default": "linear"}), + "easing_mode": (["azimuth", "elevation", "custom"], {"default": "azimuth"}), + }, + "optional": {"scheduler": ("STRING", {"default": "", "multiline": True})}, + "hidden": {"prompt": "PROMPT", "my_unique_id": "UNIQUE_ID"} + } + + RETURN_TYPES = ("PIPE_LINE", "MODEL", "STRING") + RETURN_NAMES = ("pipe", "model", "interp_log") + + FUNCTION = "adv_pipeloader" + CATEGORY = "EasyUse/Loaders" + + def adv_pipeloader(self, ckpt_name, vae_name, init_image, empty_latent_width, empty_latent_height, batch_size, interp_easing, easing_mode, scheduler='',prompt=None, my_unique_id=None): + model: ModelPatcher | None = None + vae: VAE | None = None + clip: CLIP | None = None + + # Clean models from loaded_objects + easyCache.update_loaded_objects(prompt) + + model, clip, vae, clip_vision = easyCache.load_checkpoint(ckpt_name, "Default", True) + + output = clip_vision.encode_image(init_image) + pooled = output.image_embeds.unsqueeze(0) + pixels = comfy.utils.common_upscale(init_image.movedim(-1, 1), empty_latent_width, empty_latent_height, "bilinear", "center").movedim(1, + -1) + encode_pixels = pixels[:, :, :, :3] + t = vae.encode(encode_pixels) + + azimuth_points = [] + elevation_points = [] + if easing_mode == 'azimuth': + azimuth_points = [(0, 0), (batch_size-1, 360)] + elevation_points = [(0, 0)] * batch_size + elif easing_mode == 'elevation': + azimuth_points = [(0, 0)] * batch_size + elevation_points = [(0, -90), (batch_size-1, 90)] + else: + schedulers = scheduler.rstrip('\n') + for line in schedulers.split('\n'): + frame_str, point_str = line.split(':') + point_str = point_str.strip()[1:-1] + point = point_str.split(',') + azimuth_point = point[0] + elevation_point = point[1] if point[1] else 0.0 + frame = int(frame_str.strip()) + azimuth = float(azimuth_point) + azimuth_points.append((frame, azimuth)) + elevation_val = float(elevation_point) + elevation_points.append((frame, elevation_val)) + azimuth_points.sort(key=lambda x: x[0]) + elevation_points.sort(key=lambda x: x[0]) + + #interpolation + next_point = 1 + next_elevation_point = 1 + elevations = [] + azimuths = [] + # For azimuth interpolation + for i in range(batch_size): + # Find the interpolated azimuth for the current frame + while next_point < len(azimuth_points) and i >= azimuth_points[next_point][0]: + next_point += 1 + if next_point == len(azimuth_points): + next_point -= 1 + prev_point = max(next_point - 1, 0) + + if azimuth_points[next_point][0] != azimuth_points[prev_point][0]: + timing = (i - azimuth_points[prev_point][0]) / ( + azimuth_points[next_point][0] - azimuth_points[prev_point][0]) + interpolated_azimuth = self.ease(azimuth_points[prev_point][1], azimuth_points[next_point][1], self.easing(timing, interp_easing)) + else: + interpolated_azimuth = azimuth_points[prev_point][1] + + # Interpolate the elevation + next_elevation_point = 1 + while next_elevation_point < len(elevation_points) and i >= elevation_points[next_elevation_point][0]: + next_elevation_point += 1 + if next_elevation_point == len(elevation_points): + next_elevation_point -= 1 + prev_elevation_point = max(next_elevation_point - 1, 0) + + if elevation_points[next_elevation_point][0] != elevation_points[prev_elevation_point][0]: + timing = (i - elevation_points[prev_elevation_point][0]) / ( + elevation_points[next_elevation_point][0] - elevation_points[prev_elevation_point][0]) + interpolated_elevation = self.ease(elevation_points[prev_point][1], elevation_points[next_point][1], self.easing(timing, interp_easing)) + else: + interpolated_elevation = elevation_points[prev_elevation_point][1] + + azimuths.append(interpolated_azimuth) + elevations.append(interpolated_elevation) + + log_node_info("easy sv3dLoader", "azimuths:" + str(azimuths)) + log_node_info("easy sv3dLoader", "elevations:" + str(elevations)) + + log = 'azimuths:' + str(azimuths) + '\n\n' + "elevations:" + str(elevations) + # Structure the final output + positive = [[pooled, {"concat_latent_image": t, "elevation": elevations, "azimuth": azimuths}]] + negative = [[torch.zeros_like(pooled), + {"concat_latent_image": torch.zeros_like(t), "elevation": elevations, "azimuth": azimuths}]] + + latent = torch.zeros([batch_size, 4, empty_latent_height // 8, empty_latent_width // 8]) + samples = {"samples": latent} + + image = easySampler.pil2tensor(Image.new('RGB', (1, 1), (0, 0, 0))) + + + pipe = {"model": model, + "positive": positive, + "negative": negative, + "vae": vae, + "clip": clip, + + "samples": samples, + "images": image, + "seed": 0, + + "loader_settings": {"ckpt_name": ckpt_name, + "vae_name": vae_name, + + "positive": positive, + "negative": negative, + "empty_latent_width": empty_latent_width, + "empty_latent_height": empty_latent_height, + "batch_size": batch_size, + "seed": 0, + } + } + + return (pipe, model, log) + +#svd加载器 +class svdLoader: + + @classmethod + def INPUT_TYPES(cls): + def get_file_list(filenames): + return [file for file in filenames if file != "put_models_here.txt" and "svd" in file.lower()] + + return {"required": { + "ckpt_name": (get_file_list(folder_paths.get_filename_list("checkpoints")),), + "vae_name": (["Baked VAE"] + folder_paths.get_filename_list("vae"),), + "clip_name": (["None"] + folder_paths.get_filename_list("clip"),), + + "init_image": ("IMAGE",), + "resolution": (resolution_strings, {"default": "1024 x 576"}), + "empty_latent_width": ("INT", {"default": 256, "min": 16, "max": MAX_RESOLUTION, "step": 8}), + "empty_latent_height": ("INT", {"default": 256, "min": 16, "max": MAX_RESOLUTION, "step": 8}), + + "video_frames": ("INT", {"default": 14, "min": 1, "max": 4096}), + "motion_bucket_id": ("INT", {"default": 127, "min": 1, "max": 1023}), + "fps": ("INT", {"default": 6, "min": 1, "max": 1024}), + "augmentation_level": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 10.0, "step": 0.01}) + }, + "optional": { + "optional_positive": ("STRING", {"default": "", "multiline": True}), + "optional_negative": ("STRING", {"default": "", "multiline": True}), + }, + "hidden": {"prompt": "PROMPT", "my_unique_id": "UNIQUE_ID"} + } + + RETURN_TYPES = ("PIPE_LINE", "MODEL", "VAE") + RETURN_NAMES = ("pipe", "model", "vae") + + FUNCTION = "adv_pipeloader" + CATEGORY = "EasyUse/Loaders" + + def adv_pipeloader(self, ckpt_name, vae_name, clip_name, init_image, resolution, empty_latent_width, empty_latent_height, video_frames, motion_bucket_id, fps, augmentation_level, optional_positive=None, optional_negative=None, prompt=None, my_unique_id=None): + model: ModelPatcher | None = None + vae: VAE | None = None + clip: CLIP | None = None + clip_vision = None + + # resolution + if resolution != "自定义 x 自定义": + try: + width, height = map(int, resolution.split(' x ')) + empty_latent_width = width + empty_latent_height = height + except ValueError: + raise ValueError("Invalid base_resolution format.") + + # Clean models from loaded_objects + easyCache.update_loaded_objects(prompt) + + model, clip, vae, clip_vision = easyCache.load_checkpoint(ckpt_name, "Default", True) + + output = clip_vision.encode_image(init_image) + pooled = output.image_embeds.unsqueeze(0) + pixels = comfy.utils.common_upscale(init_image.movedim(-1, 1), empty_latent_width, empty_latent_height, "bilinear", "center").movedim(1, -1) + encode_pixels = pixels[:, :, :, :3] + if augmentation_level > 0: + encode_pixels += torch.randn_like(pixels) * augmentation_level + t = vae.encode(encode_pixels) + positive = [[pooled, + {"motion_bucket_id": motion_bucket_id, "fps": fps, "augmentation_level": augmentation_level, + "concat_latent_image": t}]] + negative = [[torch.zeros_like(pooled), + {"motion_bucket_id": motion_bucket_id, "fps": fps, "augmentation_level": augmentation_level, + "concat_latent_image": torch.zeros_like(t)}]] + if optional_positive is not None and optional_positive != '': + if clip_name == 'None': + raise Exception("You need choose a open_clip model when positive is not empty") + clip = easyCache.load_clip(clip_name) + if has_chinese(optional_positive): + optional_positive = zh_to_en([optional_positive])[0] + positive_embeddings_final, = CLIPTextEncode().encode(clip, optional_positive) + positive, = ConditioningConcat().concat(positive, positive_embeddings_final) + if optional_negative is not None and optional_negative != '': + if clip_name == 'None': + raise Exception("You need choose a open_clip model when negative is not empty") + if has_chinese(optional_negative): + optional_positive = zh_to_en([optional_negative])[0] + negative_embeddings_final, = CLIPTextEncode().encode(clip, optional_negative) + negative, = ConditioningConcat().concat(negative, negative_embeddings_final) + + latent = torch.zeros([video_frames, 4, empty_latent_height // 8, empty_latent_width // 8]) + samples = {"samples": latent} + + image = easySampler.pil2tensor(Image.new('RGB', (1, 1), (0, 0, 0))) + + pipe = {"model": model, + "positive": positive, + "negative": negative, + "vae": vae, + "clip": clip, + + "samples": samples, + "images": image, + "seed": 0, + + "loader_settings": {"ckpt_name": ckpt_name, + "vae_name": vae_name, + + "positive": positive, + "negative": negative, + "resolution": resolution, + "empty_latent_width": empty_latent_width, + "empty_latent_height": empty_latent_height, + "batch_size": 1, + "seed": 0, + } + } + + return (pipe, model, vae) + +#dynamiCrafter加载器 +from .dynamiCrafter import DynamiCrafter +class dynamiCrafterLoader(DynamiCrafter): + + def __init__(self): + super().__init__() + + @classmethod + def INPUT_TYPES(cls): + + return {"required": { + "model_name": (list(DYNAMICRAFTER_MODELS.keys()),), + "clip_skip": ("INT", {"default": -2, "min": -24, "max": 0, "step": 1}), + + "init_image": ("IMAGE",), + "resolution": (resolution_strings, {"default": "512 x 512"}), + "empty_latent_width": ("INT", {"default": 256, "min": 16, "max": MAX_RESOLUTION, "step": 8}), + "empty_latent_height": ("INT", {"default": 256, "min": 16, "max": MAX_RESOLUTION, "step": 8}), + + "positive": ("STRING", {"default": "", "multiline": True}), + "negative": ("STRING", {"default": "", "multiline": True}), + + "use_interpolate": ("BOOLEAN", {"default": False}), + "fps": ("INT", {"default": 15, "min": 1, "max": 30, "step": 1},), + "frames": ("INT", {"default": 16}), + "scale_latents": ("BOOLEAN", {"default": False}) + }, + "optional": { + "optional_vae": ("VAE",), + }, + "hidden": {"prompt": "PROMPT", "my_unique_id": "UNIQUE_ID"} + } + + RETURN_TYPES = ("PIPE_LINE", "MODEL", "VAE") + RETURN_NAMES = ("pipe", "model", "vae") + + FUNCTION = "adv_pipeloader" + CATEGORY = "EasyUse/Loaders" + + def get_clip_file(self, node_name): + clip_list = folder_paths.get_filename_list("clip") + pattern = 'sd2-1-open-clip|model\.(safetensors|bin)$' + clip_files = [e for e in clip_list if re.search(pattern, e, re.IGNORECASE)] + + clip_name = clip_files[0] if len(clip_files)>0 else None + clip_file = folder_paths.get_full_path("clip", clip_name) if clip_name else None + if clip_name is not None: + log_node_info(node_name, f"Using {clip_name}") + + return clip_file, clip_name + + def get_clipvision_file(self, node_name): + clipvision_list = folder_paths.get_filename_list("clip_vision") + pattern = '(ViT.H.14.*s32B.b79K|ipadapter.*sd15|sd1.?5.*model|open_clip_pytorch_model\.(bin|safetensors))' + clipvision_files = [e for e in clipvision_list if re.search(pattern, e, re.IGNORECASE)] + + clipvision_name = clipvision_files[0] if len(clipvision_files)>0 else None + clipvision_file = folder_paths.get_full_path("clip_vision", clipvision_name) if clipvision_name else None + if clipvision_name is not None: + log_node_info(node_name, f"Using {clipvision_name}") + + return clipvision_file, clipvision_name + + def get_vae_file(self, node_name): + vae_list = folder_paths.get_filename_list("vae") + pattern = 'vae-ft-mse-840000-ema-pruned\.(pt|bin|safetensors)$' + vae_files = [e for e in vae_list if re.search(pattern, e, re.IGNORECASE)] + + vae_name = vae_files[0] if len(vae_files)>0 else None + vae_file = folder_paths.get_full_path("vae", vae_name) if vae_name else None + if vae_name is not None: + log_node_info(node_name, f"Using {vae_name}") + + return vae_file, vae_name + + def adv_pipeloader(self, model_name, clip_skip, init_image, resolution, empty_latent_width, empty_latent_height, positive, negative, use_interpolate, fps, frames, scale_latents, optional_vae=None, prompt=None, my_unique_id=None): + positive_embeddings_final, negative_embeddings_final = None, None + # resolution + if resolution != "自定义 x 自定义": + try: + width, height = map(int, resolution.split(' x ')) + empty_latent_width = width + empty_latent_height = height + except ValueError: + raise ValueError("Invalid base_resolution format.") + + # Clean models from loaded_objects + easyCache.update_loaded_objects(prompt) + + models_0 = list(DYNAMICRAFTER_MODELS.keys())[0] + + if optional_vae: + vae = optional_vae + vae_name = None + else: + vae_file, vae_name = self.get_vae_file("easy dynamiCrafterLoader") + if vae_file is None: + vae_name = "vae-ft-mse-840000-ema-pruned.safetensors" + get_local_filepath(DYNAMICRAFTER_MODELS[models_0]['vae_url'], os.path.join(folder_paths.models_dir, "vae"), + vae_name) + vae = easyCache.load_vae(vae_name) + + clip_file, clip_name = self.get_clip_file("easy dynamiCrafterLoader") + if clip_file is None: + clip_name = 'sd2-1-open-clip.safetensors' + get_local_filepath(DYNAMICRAFTER_MODELS[models_0]['clip_url'], os.path.join(folder_paths.models_dir, "clip"), + clip_name) + + clip = easyCache.load_clip(clip_name) + # load clip vision + clip_vision_file, clip_vision_name = self.get_clipvision_file("easy dynamiCrafterLoader") + if clip_vision_file is None: + clip_vision_name = 'CLIP-ViT-H-14-laion2B-s32B-b79K.safetensors' + clip_vision_file = get_local_filepath(DYNAMICRAFTER_MODELS[models_0]['clip_vision_url'], os.path.join(folder_paths.models_dir, "clip_vision"), + clip_vision_name) + clip_vision = load_clip_vision(clip_vision_file) + # load unet model + model_path = get_local_filepath(DYNAMICRAFTER_MODELS[model_name]['model_url'], DYNAMICRAFTER_DIR) + model_patcher, image_proj_model = self.load_dynamicrafter(model_path) + + # apply + model, empty_latent, image_latent = self.process_image_conditioning(model_patcher, clip_vision, vae, image_proj_model, init_image, use_interpolate, fps, frames, scale_latents) + + clipped = clip.clone() + if clip_skip != 0: + clipped.clip_layer(clip_skip) + + if positive is not None and positive != '': + if has_chinese(positive): + positive = zh_to_en([positive])[0] + positive_embeddings_final, = CLIPTextEncode().encode(clipped, positive) + if negative is not None and negative != '': + if has_chinese(negative): + negative = zh_to_en([negative])[0] + negative_embeddings_final, = CLIPTextEncode().encode(clipped, negative) + + image = easySampler.pil2tensor(Image.new('RGB', (1, 1), (0, 0, 0))) + + pipe = {"model": model, + "positive": positive_embeddings_final, + "negative": negative_embeddings_final, + "vae": vae, + "clip": clip, + "clip_vision": clip_vision, + + "samples": empty_latent, + "images": image, + "seed": 0, + + "loader_settings": {"ckpt_name": model_name, + "vae_name": vae_name, + + "positive": positive, + "negative": negative, + "resolution": resolution, + "empty_latent_width": empty_latent_width, + "empty_latent_height": empty_latent_height, + "batch_size": 1, + "seed": 0, + } + } + + return (pipe, model, vae) + +# kolors Loader +from .kolors.text_encode import chatglm3_adv_text_encode +class kolorsLoader: + + @classmethod + def INPUT_TYPES(cls): + return { + "required":{ + "unet_name": (folder_paths.get_filename_list("unet"),), + "vae_name": (folder_paths.get_filename_list("vae"),), + "chatglm3_name": (folder_paths.get_filename_list("llm"),), + "lora_name": (["None"] + folder_paths.get_filename_list("loras"),), + "lora_model_strength": ("FLOAT", {"default": 1.0, "min": -10.0, "max": 10.0, "step": 0.01}), + "lora_clip_strength": ("FLOAT", {"default": 1.0, "min": -10.0, "max": 10.0, "step": 0.01}), + "resolution": (resolution_strings, {"default": "1024 x 576"}), + "empty_latent_width": ("INT", {"default": 1024, "min": 64, "max": MAX_RESOLUTION, "step": 8}), + "empty_latent_height": ("INT", {"default": 1024, "min": 64, "max": MAX_RESOLUTION, "step": 8}), + + "positive": ("STRING", {"default": "", "placeholder": "Positive", "multiline": True}), + "negative": ("STRING", {"default": "", "placeholder": "Negative", "multiline": True}), + + "batch_size": ("INT", {"default": 1, "min": 1, "max": 64}), + }, + "optional": { + "model_override": ("MODEL",), + "vae_override": ("VAE",), + "optional_lora_stack": ("LORA_STACK",), + "auto_clean_gpu": ("BOOLEAN", {"default": False}), + }, + "hidden": {"prompt": "PROMPT", "my_unique_id": "UNIQUE_ID"} + } + + RETURN_TYPES = ("PIPE_LINE", "MODEL", "VAE") + RETURN_NAMES = ("pipe", "model", "vae") + + FUNCTION = "adv_pipeloader" + CATEGORY = "EasyUse/Loaders" + + def adv_pipeloader(self, unet_name, vae_name, chatglm3_name, lora_name, lora_model_strength, lora_clip_strength, resolution, empty_latent_width, empty_latent_height, positive, negative, batch_size, model_override=None, optional_lora_stack=None, vae_override=None, auto_clean_gpu=False, prompt=None, my_unique_id=None): + # load unet + if model_override: + model = model_override + else: + model = easyCache.load_kolors_unet(unet_name) + # load vae + if vae_override: + vae = vae_override + else: + vae = easyCache.load_vae(vae_name) + # load chatglm3 + chatglm3_model = easyCache.load_chatglm3(chatglm3_name) + # load lora + lora_stack = [] + if optional_lora_stack is not None: + for lora in optional_lora_stack: + lora = {"lora_name": lora[0], "model": model, "clip": None, "model_strength": lora[1], + "clip_strength": lora[2]} + model, _ = easyCache.load_lora(lora) + lora['model'] = model + lora['clip'] = None + lora_stack.append(lora) + + if lora_name != "None": + lora = {"lora_name": lora_name, "model": model, "clip": None, "model_strength": lora_model_strength, + "clip_strength": lora_clip_strength} + model, _ = easyCache.load_lora(lora) + lora_stack.append(lora) + + + # text encode + log_node_warn("正在进行正向提示词编码...") + positive_embeddings_final = chatglm3_adv_text_encode(chatglm3_model, positive, auto_clean_gpu) + log_node_warn("正在进行负面提示词编码...") + negative_embeddings_final = chatglm3_adv_text_encode(chatglm3_model, negative, auto_clean_gpu) + + # empty latent + samples = sampler.emptyLatent(resolution, empty_latent_width, empty_latent_height, batch_size) + + log_node_warn("处理完毕...") + pipe = { + "model": model, + "chatglm3_model": chatglm3_model, + "positive": positive_embeddings_final, + "negative": negative_embeddings_final, + "vae": vae, + "clip": None, + + "samples": samples, + "images": None, + + "loader_settings": { + "unet_name": unet_name, + "vae_name": vae_name, + "chatglm3_name": chatglm3_name, + + "lora_name": lora_name, + "lora_model_strength": lora_model_strength, + "lora_clip_strength": lora_clip_strength, + + "positive": positive, + "negative": negative, + "resolution": resolution, + "empty_latent_width": empty_latent_width, + "empty_latent_height": empty_latent_height, + "batch_size": batch_size, + "auto_clean_gpu": auto_clean_gpu, + } + } + + return {"ui": {}, + "result": (pipe, model, vae, chatglm3_model, positive_embeddings_final, negative_embeddings_final, samples)} + + + return (chatglm3_model, None, None) + +# Flux Loader +class fluxLoader(fullLoader): + @classmethod + def INPUT_TYPES(cls): + checkpoints = folder_paths.get_filename_list("checkpoints") + loras = ["None"] + folder_paths.get_filename_list("loras") + return { + "required": { + "ckpt_name": (checkpoints,), + "vae_name": (["Baked VAE"] + folder_paths.get_filename_list("vae"),), + "lora_name": (loras,), + "lora_model_strength": ("FLOAT", {"default": 1.0, "min": -10.0, "max": 10.0, "step": 0.01}), + "lora_clip_strength": ("FLOAT", {"default": 1.0, "min": -10.0, "max": 10.0, "step": 0.01}), + "resolution": (resolution_strings, {"default": "1024 x 1024"}), + "empty_latent_width": ("INT", {"default": 1024, "min": 64, "max": MAX_RESOLUTION, "step": 8}), + "empty_latent_height": ("INT", {"default": 1024, "min": 64, "max": MAX_RESOLUTION, "step": 8}), + + "positive": ("STRING", {"default": "", "placeholder": "Positive", "multiline": True}), + + "batch_size": ("INT", {"default": 1, "min": 1, "max": 64}), + }, + "optional": { + "model_override": ("MODEL",), + "clip_override": ("CLIP",), + "vae_override": ("VAE",), + "optional_lora_stack": ("LORA_STACK",), + "optional_controlnet_stack": ("CONTROL_NET_STACK",), + }, + "hidden": {"prompt": "PROMPT", "my_unique_id": "UNIQUE_ID"} + } + + RETURN_TYPES = ("PIPE_LINE", "MODEL", "VAE") + RETURN_NAMES = ("pipe", "model", "vae") + + FUNCTION = "fluxloader" + CATEGORY = "EasyUse/Loaders" + + def fluxloader(self, ckpt_name, vae_name, + lora_name, lora_model_strength, lora_clip_strength, + resolution, empty_latent_width, empty_latent_height, + positive, batch_size, model_override=None, clip_override=None, vae_override=None, optional_lora_stack=None, optional_controlnet_stack=None, + a1111_prompt_style=False, prompt=None, + my_unique_id=None): + + return super().adv_pipeloader(ckpt_name, 'Default', vae_name, 0, + lora_name, lora_model_strength, lora_clip_strength, + resolution, empty_latent_width, empty_latent_height, + positive, 'none', 'comfy', + '', 'none', 'comfy', + batch_size, model_override, clip_override, vae_override, optional_lora_stack=optional_lora_stack, + optional_controlnet_stack=optional_controlnet_stack, + a1111_prompt_style=a1111_prompt_style, prompt=prompt, + my_unique_id=my_unique_id) + + +# Dit Loader +from .dit.pixArt.config import pixart_conf, pixart_res + +class pixArtLoader: + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "ckpt_name": (folder_paths.get_filename_list("checkpoints"),), + "model_name":(list(pixart_conf.keys()),), + "vae_name": (folder_paths.get_filename_list("vae"),), + "t5_type": (['sd3'],), + "clip_name": (folder_paths.get_filename_list("clip"),), + "padding": ("INT", {"default": 1, "min": 1, "max": 300}), + "t5_name": (folder_paths.get_filename_list("t5"),), + "device": (["auto", "cpu", "gpu"], {"default": "cpu"}), + "dtype": (["default", "auto (comfy)", "FP32", "FP16", "BF16"],), + + "lora_name": (["None"] + folder_paths.get_filename_list("loras"),), + "lora_model_strength": ("FLOAT", {"default": 1.0, "min": -10.0, "max": 10.0, "step": 0.01}), + + "ratio": (["custom"] + list(pixart_res["PixArtMS_XL_2"].keys()), {"default":"1.00"}), + "empty_latent_width": ("INT", {"default": 1024, "min": 64, "max": MAX_RESOLUTION, "step": 8}), + "empty_latent_height": ("INT", {"default": 1024, "min": 64, "max": MAX_RESOLUTION, "step": 8}), + + "positive": ("STRING", {"default": "", "placeholder": "Positive", "multiline": True}), + "negative": ("STRING", {"default": "", "placeholder": "Negative", "multiline": True}), + + "batch_size": ("INT", {"default": 1, "min": 1, "max": 64}), + }, + "optional":{ + "optional_lora_stack": ("LORA_STACK",), + }, + "hidden": {"prompt": "PROMPT", "my_unique_id": "UNIQUE_ID"} + } + + RETURN_TYPES = ("PIPE_LINE", "MODEL", "VAE") + RETURN_NAMES = ("pipe", "model", "vae") + FUNCTION = "pixart_pipeloader" + CATEGORY = "EasyUse/Loaders" + + def pixart_pipeloader(self, ckpt_name, model_name, vae_name, t5_type, clip_name, padding, t5_name, device, dtype, lora_name, lora_model_strength, ratio, empty_latent_width, empty_latent_height, positive, negative, batch_size, optional_lora_stack=None, prompt=None, my_unique_id=None): + # Clean models from loaded_objects + easyCache.update_loaded_objects(prompt) + + # load checkpoint + model = easyCache.load_dit_ckpt(ckpt_name=ckpt_name, model_name=model_name, pixart_conf=pixart_conf, + model_type='PixArt') + # load vae + vae = easyCache.load_vae(vae_name) + + # load t5 + if t5_type == 'sd3': + clip = easyCache.load_clip(clip_name=clip_name,type='sd3') + clip = easyCache.load_t5_from_sd3_clip(sd3_clip=clip, padding=padding) + lora_stack = None + if optional_lora_stack is not None: + for lora in optional_lora_stack: + lora = {"lora_name": lora[0], "model": model, "clip": clip, "model_strength": lora[1], + "clip_strength": lora[2]} + model, _ = easyCache.load_lora(lora, type='PixArt') + lora['model'] = model + lora['clip'] = clip + lora_stack.append(lora) + + if lora_name != "None": + lora = {"lora_name": lora_name, "model": model, "clip": clip, "model_strength": lora_model_strength, + "clip_strength": 1} + model, _ = easyCache.load_lora(lora, type='PixArt') + lora_stack.append(lora) + + positive_embeddings_final, = CLIPTextEncode().encode(clip, positive) + negative_embeddings_final, = CLIPTextEncode().encode(clip, negative) + else: + # todo t5v11 + positive_embeddings_final, negative_embeddings_final = None, None + clip = None + pass + + # Create Empty Latent + if ratio != 'custom': + if model_name in ['ControlPixArtMSHalf','PixArtMS_Sigma_XL_2_900M']: + res_name = 'PixArtMS_XL_2' + elif model_name in ['ControlPixArtHalf']: + res_name = 'PixArt_XL_2' + else: + res_name = model_name + width, height = pixart_res[res_name][ratio] + empty_latent_width = width + empty_latent_height = height + + latent = torch.zeros([batch_size, 4, empty_latent_height // 8, empty_latent_width // 8], device=sampler.device) + samples = {"samples": latent} + + log_node_warn("加载完毕...") + pipe = { + "model": model, + "positive": positive_embeddings_final, + "negative": negative_embeddings_final, + "vae": vae, + "clip": clip, + + "samples": samples, + "images": None, + + "loader_settings": { + "ckpt_name": ckpt_name, + "clip_name": clip_name, + "vae_name": vae_name, + "t5_name": t5_name, + + "positive": positive, + "negative": negative, + "ratio": ratio, + "empty_latent_width": empty_latent_width, + "empty_latent_height": empty_latent_height, + "batch_size": batch_size, + } + } + + return {"ui": {}, + "result": (pipe, model, vae, clip, positive_embeddings_final, negative_embeddings_final, samples)} + +# lora +class loraStack: + def __init__(self): + pass + + @classmethod + def INPUT_TYPES(s): + max_lora_num = 10 + inputs = { + "required": { + "toggle": ("BOOLEAN", {"label_on": "enabled", "label_off": "disabled"}), + "mode": (["simple", "advanced"],), + "num_loras": ("INT", {"default": 1, "min": 1, "max": max_lora_num}), + }, + "optional": { + "optional_lora_stack": ("LORA_STACK",), + }, + } + + for i in range(1, max_lora_num+1): + inputs["optional"][f"lora_{i}_name"] = ( + ["None"] + folder_paths.get_filename_list("loras"), {"default": "None"}) + inputs["optional"][f"lora_{i}_strength"] = ( + "FLOAT", {"default": 1.0, "min": -10.0, "max": 10.0, "step": 0.01}) + inputs["optional"][f"lora_{i}_model_strength"] = ( + "FLOAT", {"default": 1.0, "min": -10.0, "max": 10.0, "step": 0.01}) + inputs["optional"][f"lora_{i}_clip_strength"] = ( + "FLOAT", {"default": 1.0, "min": -10.0, "max": 10.0, "step": 0.01}) + + return inputs + + RETURN_TYPES = ("LORA_STACK",) + RETURN_NAMES = ("lora_stack",) + FUNCTION = "stack" + + CATEGORY = "EasyUse/Loaders" + + def stack(self, toggle, mode, num_loras, optional_lora_stack=None, **kwargs): + if (toggle in [False, None, "False"]) or not kwargs: + return (None,) + + loras = [] + + # Import Stack values + if optional_lora_stack is not None: + loras.extend([l for l in optional_lora_stack if l[0] != "None"]) + + # Import Lora values + for i in range(1, num_loras + 1): + lora_name = kwargs.get(f"lora_{i}_name") + + if not lora_name or lora_name == "None": + continue + + if mode == "simple": + lora_strength = float(kwargs.get(f"lora_{i}_strength")) + loras.append((lora_name, lora_strength, lora_strength)) + elif mode == "advanced": + model_strength = float(kwargs.get(f"lora_{i}_model_strength")) + clip_strength = float(kwargs.get(f"lora_{i}_clip_strength")) + loras.append((lora_name, model_strength, clip_strength)) + return (loras,) + +class controlnetStack: + + + @classmethod + def INPUT_TYPES(s): + max_cn_num = 3 + inputs = { + "required": { + "toggle": ("BOOLEAN", {"label_on": "enabled", "label_off": "disabled"}), + "mode": (["simple", "advanced"],), + "num_controlnet": ("INT", {"default": 1, "min": 1, "max": max_cn_num}), + }, + "optional": { + "optional_controlnet_stack": ("CONTROL_NET_STACK",), + } + } + + for i in range(1, max_cn_num+1): + inputs["optional"][f"controlnet_{i}"] = (["None"] + folder_paths.get_filename_list("controlnet"), {"default": "None"}) + inputs["optional"][f"controlnet_{i}_strength"] = ("FLOAT", {"default": 1.0, "min": -10.0, "max": 10.0, "step": 0.01},) + inputs["optional"][f"start_percent_{i}"] = ("FLOAT", {"default": 0.0, "min": 0.0, "max": 1.0, "step": 0.001},) + inputs["optional"][f"end_percent_{i}"] = ("FLOAT",{"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.001},) + inputs["optional"][f"scale_soft_weight_{i}"] = ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.001},) + inputs["optional"][f"image_{i}"] = ("IMAGE",) + return inputs + + RETURN_TYPES = ("CONTROL_NET_STACK",) + RETURN_NAMES = ("controlnet_stack",) + FUNCTION = "stack" + CATEGORY = "EasyUse/Loaders" + + def stack(self, toggle, mode, num_controlnet, optional_controlnet_stack=None, **kwargs): + if (toggle in [False, None, "False"]) or not kwargs: + return (None,) + + controlnets = [] + + # Import Stack values + if optional_controlnet_stack is not None: + controlnets.extend([l for l in optional_controlnet_stack if l[0] != "None"]) + + # Import Controlnet values + for i in range(1, num_controlnet+1): + controlnet_name = kwargs.get(f"controlnet_{i}") + + if not controlnet_name or controlnet_name == "None": + continue + + controlnet_strength = float(kwargs.get(f"controlnet_{i}_strength")) + start_percent = float(kwargs.get(f"start_percent_{i}")) if mode == "advanced" else 0 + end_percent = float(kwargs.get(f"end_percent_{i}")) if mode == "advanced" else 1.0 + scale_soft_weights = float(kwargs.get(f"scale_soft_weight_{i}")) + image = kwargs.get(f"image_{i}") + + controlnets.append((controlnet_name, controlnet_strength, start_percent, end_percent, scale_soft_weights, image, True)) + + return (controlnets,) +# controlnet +class controlnetSimple: + @classmethod + def INPUT_TYPES(s): + + return { + "required": { + "pipe": ("PIPE_LINE",), + "image": ("IMAGE",), + "control_net_name": (folder_paths.get_filename_list("controlnet"),), + }, + "optional": { + "control_net": ("CONTROL_NET",), + "strength": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}), + "scale_soft_weights": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.001},), + } + } + + RETURN_TYPES = ("PIPE_LINE", "CONDITIONING", "CONDITIONING") + RETURN_NAMES = ("pipe", "positive", "negative") + + FUNCTION = "controlnetApply" + CATEGORY = "EasyUse/Loaders" + + def controlnetApply(self, pipe, image, control_net_name, control_net=None, strength=1, scale_soft_weights=1, union_type=None): + + positive, negative = easyControlnet().apply(control_net_name, image, pipe["positive"], pipe["negative"], strength, 0, 1, control_net, scale_soft_weights, mask=None, easyCache=easyCache, model=pipe['model'], vae=pipe['vae']) + + new_pipe = { + "model": pipe['model'], + "positive": positive, + "negative": negative, + "vae": pipe['vae'], + "clip": pipe['clip'], + + "samples": pipe["samples"], + "images": pipe["images"], + "seed": 0, + + "loader_settings": pipe["loader_settings"] + } + + del pipe + return (new_pipe, positive, negative) + +# controlnetADV +class controlnetAdvanced: + + @classmethod + def INPUT_TYPES(s): + + return { + "required": { + "pipe": ("PIPE_LINE",), + "image": ("IMAGE",), + "control_net_name": (folder_paths.get_filename_list("controlnet"),), + }, + "optional": { + "control_net": ("CONTROL_NET",), + "strength": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}), + "start_percent": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 1.0, "step": 0.001}), + "end_percent": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.001}), + "scale_soft_weights": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.001},), + } + } + + RETURN_TYPES = ("PIPE_LINE", "CONDITIONING", "CONDITIONING") + RETURN_NAMES = ("pipe", "positive", "negative") + + FUNCTION = "controlnetApply" + CATEGORY = "EasyUse/Loaders" + + + def controlnetApply(self, pipe, image, control_net_name, control_net=None, strength=1, start_percent=0, end_percent=1, scale_soft_weights=1): + positive, negative = easyControlnet().apply(control_net_name, image, pipe["positive"], pipe["negative"], + strength, start_percent, end_percent, control_net, scale_soft_weights, union_type=None, mask=None, easyCache=easyCache, model=pipe['model'], vae=pipe['vae']) + + new_pipe = { + "model": pipe['model'], + "positive": positive, + "negative": negative, + "vae": pipe['vae'], + "clip": pipe['clip'], + + "samples": pipe["samples"], + "images": image, + "seed": 0, + + "loader_settings": pipe["loader_settings"] + } + + del pipe + + return (new_pipe, positive, negative) + +# controlnetPlusPlus +class controlnetPlusPlus: + + @classmethod + def INPUT_TYPES(s): + + return { + "required": { + "pipe": ("PIPE_LINE",), + "image": ("IMAGE",), + "control_net_name": (folder_paths.get_filename_list("controlnet"),), + }, + "optional": { + "control_net": ("CONTROL_NET",), + "strength": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}), + "start_percent": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 1.0, "step": 0.001}), + "end_percent": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.001}), + "scale_soft_weights": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.001},), + "union_type": (list(union_controlnet_types.keys()),) + } + } + + RETURN_TYPES = ("PIPE_LINE", "CONDITIONING", "CONDITIONING") + RETURN_NAMES = ("pipe", "positive", "negative") + + FUNCTION = "controlnetApply" + CATEGORY = "EasyUse/Loaders" + + + def controlnetApply(self, pipe, image, control_net_name, control_net=None, strength=1, start_percent=0, end_percent=1, scale_soft_weights=1, union_type=None): + if scale_soft_weights < 1: + if "ScaledSoftControlNetWeights" in ALL_NODE_CLASS_MAPPINGS: + soft_weight_cls = ALL_NODE_CLASS_MAPPINGS['ScaledSoftControlNetWeights'] + (weights, timestep_keyframe) = soft_weight_cls().load_weights(scale_soft_weights, False) + cn_adv_cls = ALL_NODE_CLASS_MAPPINGS['ACN_ControlNet++LoaderSingle'] + if union_type == 'auto': + union_type = 'none' + elif union_type == 'canny/lineart/anime_lineart/mlsd': + union_type = 'canny/lineart/mlsd' + elif union_type == 'repaint': + union_type = 'inpaint/outpaint' + control_net, = cn_adv_cls().load_controlnet_plusplus(control_net_name, union_type) + apply_adv_cls = ALL_NODE_CLASS_MAPPINGS['ACN_AdvancedControlNetApply'] + positive, negative, _ = apply_adv_cls().apply_controlnet(pipe["positive"], pipe["negative"], control_net, image, strength, start_percent, end_percent, timestep_kf=timestep_keyframe,) + else: + raise Exception( + f"[Advanced-ControlNet Not Found] you need to install 'COMFYUI-Advanced-ControlNet'") + else: + positive, negative = easyControlnet().apply(control_net_name, image, pipe["positive"], pipe["negative"], + strength, start_percent, end_percent, control_net, scale_soft_weights, union_type=union_type, mask=None, easyCache=easyCache, model=pipe['model']) + + new_pipe = { + "model": pipe['model'], + "positive": positive, + "negative": negative, + "vae": pipe['vae'], + "clip": pipe['clip'], + + "samples": pipe["samples"], + "images": pipe["images"], + "seed": 0, + + "loader_settings": pipe["loader_settings"] + } + + del pipe + + return (new_pipe, positive, negative) + +# LLLiteLoader +from .libs.lllite import load_control_net_lllite_patch +class LLLiteLoader: + def __init__(self): + pass + @classmethod + def INPUT_TYPES(s): + def get_file_list(filenames): + return [file for file in filenames if file != "put_models_here.txt" and "lllite" in file] + + return { + "required": { + "model": ("MODEL",), + "model_name": (get_file_list(folder_paths.get_filename_list("controlnet")),), + "cond_image": ("IMAGE",), + "strength": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}), + "steps": ("INT", {"default": 0, "min": 0, "max": 200, "step": 1}), + "start_percent": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 100.0, "step": 0.1}), + "end_percent": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 100.0, "step": 0.1}), + } + } + + RETURN_TYPES = ("MODEL",) + FUNCTION = "load_lllite" + CATEGORY = "EasyUse/Loaders" + + def load_lllite(self, model, model_name, cond_image, strength, steps, start_percent, end_percent): + # cond_image is b,h,w,3, 0-1 + + model_path = os.path.join(folder_paths.get_full_path("controlnet", model_name)) + + model_lllite = model.clone() + patch = load_control_net_lllite_patch(model_path, cond_image, strength, steps, start_percent, end_percent) + if patch is not None: + model_lllite.set_model_attn1_patch(patch) + model_lllite.set_model_attn2_patch(patch) + + return (model_lllite,) + +# ---------------------------------------------------------------加载器 结束----------------------------------------------------------------------# + +#---------------------------------------------------------------Inpaint 开始----------------------------------------------------------------------# + +# FooocusInpaint +from .libs.fooocus import InpaintHead, InpaintWorker + +class applyFooocusInpaint: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "model": ("MODEL",), + "latent": ("LATENT",), + "head": (list(FOOOCUS_INPAINT_HEAD.keys()),), + "patch": (list(FOOOCUS_INPAINT_PATCH.keys()),), + }, + } + + RETURN_TYPES = ("MODEL",) + RETURN_NAMES = ("model",) + CATEGORY = "EasyUse/Inpaint" + FUNCTION = "apply" + + def apply(self, model, latent, head, patch): + + head_file = get_local_filepath(FOOOCUS_INPAINT_HEAD[head]["model_url"], INPAINT_DIR) + inpaint_head_model = InpaintHead() + sd = torch.load(head_file, map_location='cpu') + inpaint_head_model.load_state_dict(sd) + + patch_file = get_local_filepath(FOOOCUS_INPAINT_PATCH[patch]["model_url"], INPAINT_DIR) + inpaint_lora = comfy.utils.load_torch_file(patch_file, safe_load=True) + + patch = (inpaint_head_model, inpaint_lora) + worker = InpaintWorker(node_name="easy kSamplerInpainting") + cloned = model.clone() + + m, = worker.patch(cloned, latent, patch) + return (m,) + +# brushnet +from .brushnet import BrushNet +class applyBrushNet: + + def get_files_with_extension(folder='inpaint', extensions='.safetensors'): + return [file for file in folder_paths.get_filename_list(folder) if file.endswith(extensions)] + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "pipe": ("PIPE_LINE",), + "image": ("IMAGE",), + "mask": ("MASK",), + "brushnet": (s.get_files_with_extension(),), + "dtype": (['float16', 'bfloat16', 'float32', 'float64'], ), + "scale": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0}), + "start_at": ("INT", {"default": 0, "min": 0, "max": 10000}), + "end_at": ("INT", {"default": 10000, "min": 0, "max": 10000}), + }, + } + + RETURN_TYPES = ("PIPE_LINE",) + RETURN_NAMES = ("pipe",) + CATEGORY = "EasyUse/Inpaint" + FUNCTION = "apply" + + def apply(self, pipe, image, mask, brushnet, dtype, scale, start_at, end_at): + + model = pipe['model'] + vae = pipe['vae'] + positive = pipe['positive'] + negative = pipe['negative'] + cls = BrushNet() + if brushnet in backend_cache.cache: + log_node_info("easy brushnetApply", f"Using {brushnet} Cached") + _, brushnet_model = backend_cache.cache[brushnet][1] + else: + brushnet_file = os.path.join(folder_paths.get_full_path("inpaint", brushnet)) + brushnet_model, = cls.load_brushnet_model(brushnet_file, dtype) + backend_cache.update_cache(brushnet, 'brushnet', (False, brushnet_model)) + m, positive, negative, latent = cls.brushnet_model_update(model=model, vae=vae, image=image, mask=mask, + brushnet=brushnet_model, positive=positive, + negative=negative, scale=scale, start_at=start_at, + end_at=end_at) + new_pipe = { + **pipe, + "model": m, + "positive": positive, + "negative": negative, + "samples": latent, + } + del pipe + return (new_pipe,) + +# #powerpaint +class applyPowerPaint: + def get_files_with_extension(folder='inpaint', extensions='.safetensors'): + return [file for file in folder_paths.get_filename_list(folder) if file.endswith(extensions)] + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "pipe": ("PIPE_LINE",), + "image": ("IMAGE",), + "mask": ("MASK",), + "powerpaint_model": (s.get_files_with_extension(),), + "powerpaint_clip": (s.get_files_with_extension(extensions='.bin'),), + "dtype": (['float16', 'bfloat16', 'float32', 'float64'],), + "fitting": ("FLOAT", {"default": 1.0, "min": 0.3, "max": 1.0}), + "function": (['text guided', 'shape guided', 'object removal', 'context aware', 'image outpainting'],), + "scale": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0}), + "start_at": ("INT", {"default": 0, "min": 0, "max": 10000}), + "end_at": ("INT", {"default": 10000, "min": 0, "max": 10000}), + "save_memory": (['none', 'auto', 'max'],), + }, + } + + RETURN_TYPES = ("PIPE_LINE",) + RETURN_NAMES = ("pipe",) + CATEGORY = "EasyUse/Inpaint" + FUNCTION = "apply" + + def apply(self, pipe, image, mask, powerpaint_model, powerpaint_clip, dtype, fitting, function, scale, start_at, end_at, save_memory='none'): + model = pipe['model'] + vae = pipe['vae'] + positive = pipe['positive'] + negative = pipe['negative'] + + cls = BrushNet() + # load powerpaint clip + if powerpaint_clip in backend_cache.cache: + log_node_info("easy powerpaintApply", f"Using {powerpaint_clip} Cached") + _, ppclip = backend_cache.cache[powerpaint_clip][1] + else: + model_url = POWERPAINT_MODELS['base_fp16']['model_url'] + base_clip = get_local_filepath(model_url, os.path.join(folder_paths.models_dir, 'clip')) + ppclip, = cls.load_powerpaint_clip(base_clip, os.path.join(folder_paths.get_full_path("inpaint", powerpaint_clip))) + backend_cache.update_cache(powerpaint_clip, 'ppclip', (False, ppclip)) + # load powerpaint model + if powerpaint_model in backend_cache.cache: + log_node_info("easy powerpaintApply", f"Using {powerpaint_model} Cached") + _, powerpaint = backend_cache.cache[powerpaint_model][1] + else: + powerpaint_file = os.path.join(folder_paths.get_full_path("inpaint", powerpaint_model)) + powerpaint, = cls.load_brushnet_model(powerpaint_file, dtype) + backend_cache.update_cache(powerpaint_model, 'powerpaint', (False, powerpaint)) + m, positive, negative, latent = cls.powerpaint_model_update(model=model, vae=vae, image=image, mask=mask, powerpaint=powerpaint, + clip=ppclip, positive=positive, + negative=negative, fitting=fitting, function=function, + scale=scale, start_at=start_at, end_at=end_at, save_memory=save_memory) + new_pipe = { + **pipe, + "model": m, + "positive": positive, + "negative": negative, + "samples": latent, + } + del pipe + return (new_pipe,) + +class applyInpaint: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "pipe": ("PIPE_LINE",), + "image": ("IMAGE",), + "mask": ("MASK",), + "inpaint_mode": (('normal', 'fooocus_inpaint', 'brushnet_random', 'brushnet_segmentation', 'powerpaint'),), + "encode": (('none', 'vae_encode_inpaint', 'inpaint_model_conditioning', 'different_diffusion'), {"default": "none"}), + "grow_mask_by": ("INT", {"default": 6, "min": 0, "max": 64, "step": 1}), + "dtype": (['float16', 'bfloat16', 'float32', 'float64'],), + "fitting": ("FLOAT", {"default": 1.0, "min": 0.3, "max": 1.0}), + "function": (['text guided', 'shape guided', 'object removal', 'context aware', 'image outpainting'],), + "scale": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0}), + "start_at": ("INT", {"default": 0, "min": 0, "max": 10000}), + "end_at": ("INT", {"default": 10000, "min": 0, "max": 10000}), + }, + } + + RETURN_TYPES = ("PIPE_LINE",) + RETURN_NAMES = ("pipe",) + CATEGORY = "EasyUse/Inpaint" + FUNCTION = "apply" + + def inpaint_model_conditioning(self, pipe, image, vae, mask, grow_mask_by): + if grow_mask_by >0: + mask, = GrowMask().expand_mask(mask, grow_mask_by, False) + positive, negative, latent = InpaintModelConditioning().encode(pipe['positive'], pipe['negative'], image, + vae, mask) + pipe['positive'] = positive + pipe['negative'] = negative + pipe['samples'] = latent + + return pipe + + def get_brushnet_model(self, type, model): + model_type = 'sdxl' if isinstance(model.model.model_config, comfy.supported_models.SDXL) else 'sd1' + if type == 'brushnet_random': + brush_model = BRUSHNET_MODELS['random_mask'][model_type]['model_url'] + if model_type == 'sdxl': + pattern = 'brushnet.random.mask.sdxl.*\.(safetensors|bin)$' + else: + pattern = 'brushnet.random.mask.*\.(safetensors|bin)$' + elif type == 'brushnet_segmentation': + brush_model = BRUSHNET_MODELS['segmentation_mask'][model_type]['model_url'] + if model_type == 'sdxl': + pattern = 'brushnet.segmentation.mask.sdxl.*\.(safetensors|bin)$' + else: + pattern = 'brushnet.segmentation.mask.*\.(safetensors|bin)$' + + + brushfile = [e for e in folder_paths.get_filename_list('inpaint') if re.search(pattern, e, re.IGNORECASE)] + brushname = brushfile[0] if brushfile else None + if not brushname: + from urllib.parse import urlparse + get_local_filepath(brush_model, INPAINT_DIR) + parsed_url = urlparse(brush_model) + brushname = os.path.basename(parsed_url.path) + return brushname + + def get_powerpaint_model(self, model): + model_type = 'sdxl' if isinstance(model.model.model_config, comfy.supported_models.SDXL) else 'sd1' + if model_type == 'sdxl': + raise Exception("Powerpaint not supported for SDXL models") + + powerpaint_model = POWERPAINT_MODELS['v2.1']['model_url'] + powerpaint_clip = POWERPAINT_MODELS['v2.1']['clip_url'] + + from urllib.parse import urlparse + get_local_filepath(powerpaint_model, os.path.join(INPAINT_DIR, 'powerpaint')) + model_parsed_url = urlparse(powerpaint_model) + clip_parsed_url = urlparse(powerpaint_clip) + model_name = os.path.join("powerpaint",os.path.basename(model_parsed_url.path)) + clip_name = os.path.join("powerpaint",os.path.basename(clip_parsed_url.path)) + return model_name, clip_name + + def apply(self, pipe, image, mask, inpaint_mode, encode, grow_mask_by, dtype, fitting, function, scale, start_at, end_at): + new_pipe = { + **pipe, + } + del pipe + if inpaint_mode in ['brushnet_random', 'brushnet_segmentation']: + brushnet = self.get_brushnet_model(inpaint_mode, new_pipe['model']) + new_pipe, = applyBrushNet().apply(new_pipe, image, mask, brushnet, dtype, scale, start_at, end_at) + elif inpaint_mode == 'powerpaint': + powerpaint_model, powerpaint_clip = self.get_powerpaint_model(new_pipe['model']) + new_pipe, = applyPowerPaint().apply(new_pipe, image, mask, powerpaint_model, powerpaint_clip, dtype, fitting, function, scale, start_at, end_at) + + vae = new_pipe['vae'] + if encode == 'none': + if inpaint_mode == 'fooocus_inpaint': + model, = applyFooocusInpaint().apply(new_pipe['model'], new_pipe['samples'], + list(FOOOCUS_INPAINT_HEAD.keys())[0], + list(FOOOCUS_INPAINT_PATCH.keys())[0]) + new_pipe['model'] = model + elif encode == 'vae_encode_inpaint': + latent, = VAEEncodeForInpaint().encode(vae, image, mask, grow_mask_by) + new_pipe['samples'] = latent + if inpaint_mode == 'fooocus_inpaint': + model, = applyFooocusInpaint().apply(new_pipe['model'], new_pipe['samples'], + list(FOOOCUS_INPAINT_HEAD.keys())[0], + list(FOOOCUS_INPAINT_PATCH.keys())[0]) + new_pipe['model'] = model + elif encode == 'inpaint_model_conditioning': + if inpaint_mode == 'fooocus_inpaint': + latent, = VAEEncodeForInpaint().encode(vae, image, mask, grow_mask_by) + new_pipe['samples'] = latent + model, = applyFooocusInpaint().apply(new_pipe['model'], new_pipe['samples'], + list(FOOOCUS_INPAINT_HEAD.keys())[0], + list(FOOOCUS_INPAINT_PATCH.keys())[0]) + new_pipe['model'] = model + new_pipe = self.inpaint_model_conditioning(new_pipe, image, vae, mask, 0) + else: + new_pipe = self.inpaint_model_conditioning(new_pipe, image, vae, mask, grow_mask_by) + elif encode == 'different_diffusion': + if inpaint_mode == 'fooocus_inpaint': + latent, = VAEEncodeForInpaint().encode(vae, image, mask, grow_mask_by) + new_pipe['samples'] = latent + model, = applyFooocusInpaint().apply(new_pipe['model'], new_pipe['samples'], + list(FOOOCUS_INPAINT_HEAD.keys())[0], + list(FOOOCUS_INPAINT_PATCH.keys())[0]) + new_pipe['model'] = model + new_pipe = self.inpaint_model_conditioning(new_pipe, image, vae, mask, 0) + else: + new_pipe = self.inpaint_model_conditioning(new_pipe, image, vae, mask, grow_mask_by) + cls = ALL_NODE_CLASS_MAPPINGS['DifferentialDiffusion'] + if cls is not None: + model, = cls().apply(new_pipe['model']) + new_pipe['model'] = model + else: + raise Exception("Differential Diffusion not found,please update comfyui") + + return (new_pipe,) +# ---------------------------------------------------------------Inpaint 结束----------------------------------------------------------------------# + +#---------------------------------------------------------------适配器 开始----------------------------------------------------------------------# + +# 风格对齐 +from .libs.styleAlign import styleAlignBatch, SHARE_NORM_OPTIONS, SHARE_ATTN_OPTIONS +class styleAlignedBatchAlign: + + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "model": ("MODEL",), + "share_norm": (SHARE_NORM_OPTIONS,), + "share_attn": (SHARE_ATTN_OPTIONS,), + "scale": ("FLOAT", {"default": 1, "min": 0, "max": 1.0, "step": 0.1}), + } + } + + RETURN_TYPES = ("MODEL",) + FUNCTION = "align" + CATEGORY = "EasyUse/Adapter" + + def align(self, model, share_norm, share_attn, scale): + return (styleAlignBatch(model, share_norm, share_attn, scale),) + +# 光照对齐 +from .ic_light.__init__ import ICLight, VAEEncodeArgMax +class icLightApply: + + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "mode": (list(IC_LIGHT_MODELS.keys()),), + "model": ("MODEL",), + "image": ("IMAGE",), + "vae": ("VAE",), + "lighting": (['None', 'Left Light', 'Right Light', 'Top Light', 'Bottom Light', 'Circle Light'],{"default": "None"}), + "source": (['Use Background Image', 'Use Flipped Background Image', 'Left Light', 'Right Light', 'Top Light', 'Bottom Light', 'Ambient'],{"default": "Use Background Image"}), + "remove_bg": ("BOOLEAN", {"default": True}), + }, + } + + RETURN_TYPES = ("MODEL", "IMAGE") + RETURN_NAMES = ("model", "lighting_image") + FUNCTION = "apply" + CATEGORY = "EasyUse/Adapter" + + def batch(self, image1, image2): + if image1.shape[1:] != image2.shape[1:]: + image2 = comfy.utils.common_upscale(image2.movedim(-1, 1), image1.shape[2], image1.shape[1], "bilinear", + "center").movedim(1, -1) + s = torch.cat((image1, image2), dim=0) + return s + + def removebg(self, image): + if "easy imageRemBg" not in ALL_NODE_CLASS_MAPPINGS: + raise Exception("Please re-install ComfyUI-Easy-Use") + cls = ALL_NODE_CLASS_MAPPINGS['easy imageRemBg'] + results = cls().remove('RMBG-1.4', image, 'Hide', 'ComfyUI') + if "result" in results: + image, _ = results['result'] + return image + + def apply(self, mode, model, image, vae, lighting, source, remove_bg): + model_type = get_sd_version(model) + if model_type == 'sdxl': + raise Exception("IC Light model is not supported for SDXL now") + + batch_size, height, width, channel = image.shape + if channel == 3: + # remove bg + if mode == 'Foreground' or batch_size == 1: + if remove_bg: + image = self.removebg(image) + else: + mask = torch.full((1, height, width), 1.0, dtype=torch.float32, device="cpu") + image, = JoinImageWithAlpha().join_image_with_alpha(image, mask) + + iclight = ICLight() + if mode == 'Foreground': + lighting_image = iclight.generate_lighting_image(image, lighting) + else: + lighting_image = iclight.generate_source_image(image, source) + if source not in ['Use Background Image', 'Use Flipped Background Image']: + _, height, width, _ = lighting_image.shape + mask = torch.full((1, height, width), 1.0, dtype=torch.float32, device="cpu") + lighting_image, = JoinImageWithAlpha().join_image_with_alpha(lighting_image, mask) + if batch_size < 2: + image = self.batch(image, lighting_image) + else: + original_image = [img.unsqueeze(0) for img in image] + original_image = self.removebg(original_image[0]) + image = self.batch(original_image, lighting_image) + + latent, = VAEEncodeArgMax().encode(vae, image) + key = 'iclight_' + mode + '_' + model_type + model_path = get_local_filepath(IC_LIGHT_MODELS[mode]['sd1']["model_url"], + os.path.join(folder_paths.models_dir, "unet")) + ic_model = None + if key in backend_cache.cache: + log_node_info("easy icLightApply", f"Using icLightModel {mode+'_'+model_type} Cached") + _, ic_model = backend_cache.cache[key][1] + m, _ = iclight.apply(model_path, model, latent, ic_model) + else: + m, ic_model = iclight.apply(model_path, model, latent, ic_model) + backend_cache.update_cache(key, 'iclight', (False, ic_model)) + return (m, lighting_image) + + +def insightface_loader(provider, name='buffalo_l'): + try: + from insightface.app import FaceAnalysis + except ImportError as e: + raise Exception(e) + path = os.path.join(folder_paths.models_dir, "insightface") + model = FaceAnalysis(name=name, root=path, providers=[provider + 'ExecutionProvider', ]) + model.prepare(ctx_id=0, det_size=(640, 640)) + return model + +# Apply Ipadapter +class ipadapter: + + def __init__(self): + self.normal_presets = [ + 'LIGHT - SD1.5 only (low strength)', + 'STANDARD (medium strength)', + 'VIT-G (medium strength)', + 'PLUS (high strength)', + 'PLUS (kolors genernal)', + 'PLUS FACE (portraits)', + 'FULL FACE - SD1.5 only (portraits stronger)', + 'COMPOSITION' + ] + self.faceid_presets = [ + 'FACEID', + 'FACEID PLUS - SD1.5 only', + "FACEID PLUS KOLORS", + 'FACEID PLUS V2', + 'FACEID PORTRAIT (style transfer)', + 'FACEID PORTRAIT UNNORM - SDXL only (strong)' + ] + self.weight_types = ["linear", "ease in", "ease out", 'ease in-out', 'reverse in-out', 'weak input', 'weak output', 'weak middle', 'strong middle', 'style transfer', 'composition', 'strong style transfer', 'style and composition', 'style transfer precise'] + self.presets = self.normal_presets + self.faceid_presets + + + def error(self): + raise Exception(f"[ERROR] To use ipadapterApply, you need to install 'ComfyUI_IPAdapter_plus'") + + def get_clipvision_file(self, preset, node_name): + preset = preset.lower() + clipvision_list = folder_paths.get_filename_list("clip_vision") + + if preset.startswith("plus (kolors") or preset.startswith("faceid plus kolors"): + pattern = 'Vit.Large.patch14.336\.(bin|safetensors)$' + elif preset.startswith("vit-g"): + pattern = '(ViT.bigG.14.*39B.b160k|ipadapter.*sdxl|sdxl.*model\.(bin|safetensors))' + else: + pattern = '(ViT.H.14.*s32B.b79K|ipadapter.*sd15|sd1.?5.*model\.(bin|safetensors))' + clipvision_files = [e for e in clipvision_list if re.search(pattern, e, re.IGNORECASE)] + + clipvision_name = clipvision_files[0] if len(clipvision_files)>0 else None + clipvision_file = folder_paths.get_full_path("clip_vision", clipvision_name) if clipvision_name else None + # if clipvision_name is not None: + # log_node_info(node_name, f"Using {clipvision_name}") + + return clipvision_file, clipvision_name + + def get_ipadapter_file(self, preset, is_sdxl, node_name): + preset = preset.lower() + ipadapter_list = folder_paths.get_filename_list("ipadapter") + is_insightface = False + lora_pattern = None + + if preset.startswith("light"): + if is_sdxl: + raise Exception("light model is not supported for SDXL") + pattern = 'sd15.light.v11\.(safetensors|bin)$' + # if light model v11 is not found, try with the old version + if not [e for e in ipadapter_list if re.search(pattern, e, re.IGNORECASE)]: + pattern = 'sd15.light\.(safetensors|bin)$' + elif preset.startswith("standard"): + if is_sdxl: + pattern = 'ip.adapter.sdxl.vit.h\.(safetensors|bin)$' + else: + pattern = 'ip.adapter.sd15\.(safetensors|bin)$' + elif preset.startswith("vit-g"): + if is_sdxl: + pattern = 'ip.adapter.sdxl\.(safetensors|bin)$' + else: + pattern = 'sd15.vit.g\.(safetensors|bin)$' + elif preset.startswith("plus (high"): + if is_sdxl: + pattern = 'plus.sdxl.vit.h\.(safetensors|bin)$' + else: + pattern = 'ip.adapter.plus.sd15\.(safetensors|bin)$' + elif preset.startswith("plus (kolors"): + if is_sdxl: + pattern = 'plus.gener(nal|al)\.(safetensors|bin)$' + else: + raise Exception("kolors model is not supported for SD15") + elif preset.startswith("plus face"): + if is_sdxl: + pattern = 'plus.face.sdxl.vit.h\.(safetensors|bin)$' + else: + pattern = 'plus.face.sd15\.(safetensors|bin)$' + elif preset.startswith("full"): + if is_sdxl: + raise Exception("full face model is not supported for SDXL") + pattern = 'full.face.sd15\.(safetensors|bin)$' + elif preset.startswith("composition"): + if is_sdxl: + pattern = 'plus.composition.sdxl\.(safetensors|bin)$' + else: + pattern = 'plus.composition.sd15\.(safetensors|bin)$' + elif preset.startswith("faceid portrait ("): + if is_sdxl: + pattern = 'portrait.sdxl\.(safetensors|bin)$' + else: + pattern = 'portrait.v11.sd15\.(safetensors|bin)$' + # if v11 is not found, try with the old version + if not [e for e in ipadapter_list if re.search(pattern, e, re.IGNORECASE)]: + pattern = 'portrait.sd15\.(safetensors|bin)$' + is_insightface = True + elif preset.startswith("faceid portrait unnorm"): + if is_sdxl: + pattern = r'portrait.sdxl.unnorm\.(safetensors|bin)$' + else: + raise Exception("portrait unnorm model is not supported for SD1.5") + is_insightface = True + elif preset == "faceid": + if is_sdxl: + pattern = 'faceid.sdxl\.(safetensors|bin)$' + lora_pattern = 'faceid.sdxl.lora\.safetensors$' + else: + pattern = 'faceid.sd15\.(safetensors|bin)$' + lora_pattern = 'faceid.sd15.lora\.safetensors$' + is_insightface = True + elif preset.startswith("faceid plus kolors"): + if is_sdxl: + pattern = '(kolors.ip.adapter.faceid.plus|ipa.faceid.plus)\.(safetensors|bin)$' + else: + raise Exception("faceid plus kolors model is not supported for SD1.5") + is_insightface = True + elif preset.startswith("faceid plus -"): + if is_sdxl: + raise Exception("faceid plus model is not supported for SDXL") + pattern = 'faceid.plus.sd15\.(safetensors|bin)$' + lora_pattern = 'faceid.plus.sd15.lora\.safetensors$' + is_insightface = True + elif preset.startswith("faceid plus v2"): + if is_sdxl: + pattern = 'faceid.plusv2.sdxl\.(safetensors|bin)$' + lora_pattern = 'faceid.plusv2.sdxl.lora\.safetensors$' + else: + pattern = 'faceid.plusv2.sd15\.(safetensors|bin)$' + lora_pattern = 'faceid.plusv2.sd15.lora\.safetensors$' + is_insightface = True + else: + raise Exception(f"invalid type '{preset}'") + + ipadapter_files = [e for e in ipadapter_list if re.search(pattern, e, re.IGNORECASE)] + ipadapter_name = ipadapter_files[0] if len(ipadapter_files)>0 else None + ipadapter_file = folder_paths.get_full_path("ipadapter", ipadapter_name) if ipadapter_name else None + # if ipadapter_name is not None: + # log_node_info(node_name, f"Using {ipadapter_name}") + + return ipadapter_file, ipadapter_name, is_insightface, lora_pattern + + def get_lora_pattern(self, file): + basename = os.path.basename(file) + lora_pattern = None + if re.search(r'faceid.sdxl\.(safetensors|bin)$', basename, re.IGNORECASE): + lora_pattern = 'faceid.sdxl.lora\.safetensors$' + elif re.search(r'faceid.sd15\.(safetensors|bin)$', basename, re.IGNORECASE): + lora_pattern = 'faceid.sd15.lora\.safetensors$' + elif re.search(r'faceid.plus.sd15\.(safetensors|bin)$', basename, re.IGNORECASE): + lora_pattern = 'faceid.plus.sd15.lora\.safetensors$' + elif re.search(r'faceid.plusv2.sdxl\.(safetensors|bin)$', basename, re.IGNORECASE): + lora_pattern = 'faceid.plusv2.sdxl.lora\.safetensors$' + elif re.search(r'faceid.plusv2.sd15\.(safetensors|bin)$', basename, re.IGNORECASE): + lora_pattern = 'faceid.plusv2.sd15.lora\.safetensors$' + + return lora_pattern + + def get_lora_file(self, preset, pattern, model_type, model, model_strength, clip_strength, clip=None): + lora_list = folder_paths.get_filename_list("loras") + lora_files = [e for e in lora_list if re.search(pattern, e, re.IGNORECASE)] + lora_name = lora_files[0] if lora_files else None + if lora_name: + return easyCache.load_lora({"model": model, "clip": clip, "lora_name": lora_name, "model_strength":model_strength, "clip_strength":clip_strength},) + else: + if "lora_url" in IPADAPTER_MODELS[preset][model_type]: + lora_name = get_local_filepath(IPADAPTER_MODELS[preset][model_type]["lora_url"], os.path.join(folder_paths.models_dir, "loras")) + return easyCache.load_lora({"model": model, "clip": clip, "lora_name": lora_name, "model_strength":model_strength, "clip_strength":clip_strength},) + return (model, clip) + + def ipadapter_model_loader(self, file): + model = comfy.utils.load_torch_file(file, safe_load=True) + + if file.lower().endswith(".safetensors"): + st_model = {"image_proj": {}, "ip_adapter": {}} + for key in model.keys(): + if key.startswith("image_proj."): + st_model["image_proj"][key.replace("image_proj.", "")] = model[key] + elif key.startswith("ip_adapter."): + st_model["ip_adapter"][key.replace("ip_adapter.", "")] = model[key] + model = st_model + del st_model + + model_keys = model.keys() + if "adapter_modules" in model_keys: + model["ip_adapter"] = model["adapter_modules"] + model["faceidplusv2"] = True + del model['adapter_modules'] + + if not "ip_adapter" in model_keys or not model["ip_adapter"]: + raise Exception("invalid IPAdapter model {}".format(file)) + + if 'plusv2' in file.lower(): + model["faceidplusv2"] = True + + if 'unnorm' in file.lower(): + model["portraitunnorm"] = True + + return model + + def load_model(self, model, preset, lora_model_strength, provider="CPU", clip_vision=None, optional_ipadapter=None, cache_mode='none', node_name='easy ipadapterApply'): + pipeline = {"clipvision": {'file': None, 'model': None}, "ipadapter": {'file': None, 'model': None}, + "insightface": {'provider': None, 'model': None}} + ipadapter, insightface, is_insightface, lora_pattern = None, None, None, None + if optional_ipadapter is not None: + pipeline = optional_ipadapter + if not clip_vision: + clip_vision = pipeline['clipvision']['model'] + ipadapter = pipeline['ipadapter']['model'] + if 'insightface' in pipeline: + insightface = pipeline['insightface']['model'] + lora_pattern = self.get_lora_pattern(pipeline['ipadapter']['file']) + + # 1. Load the clipvision model + if not clip_vision: + clipvision_file, clipvision_name = self.get_clipvision_file(preset, node_name) + if clipvision_file is None: + if preset.lower().startswith("plus (kolors"): + model_url = IPADAPTER_CLIPVISION_MODELS["clip-vit-large-patch14-336"]["model_url"] + clipvision_file = get_local_filepath(model_url, IPADAPTER_DIR, "clip-vit-large-patch14-336.bin") + else: + model_url = IPADAPTER_CLIPVISION_MODELS["clip-vit-h-14-laion2B-s32B-b79K"]["model_url"] + clipvision_file = get_local_filepath(model_url, IPADAPTER_DIR, "clip-vit-h-14-laion2B-s32B-b79K.safetensors") + clipvision_name = os.path.basename(model_url) + if clipvision_file == pipeline['clipvision']['file']: + clip_vision = pipeline['clipvision']['model'] + elif cache_mode in ["all", "clip_vision only"] and clipvision_name in backend_cache.cache: + log_node_info("easy ipadapterApply", f"Using ClipVisonModel {clipvision_name} Cached") + _, clip_vision = backend_cache.cache[clipvision_name][1] + else: + clip_vision = load_clip_vision(clipvision_file) + log_node_info("easy ipadapterApply", f"Using ClipVisonModel {clipvision_name}") + if cache_mode in ["all", "clip_vision only"]: + backend_cache.update_cache(clipvision_name, 'clip_vision', (False, clip_vision)) + pipeline['clipvision']['file'] = clipvision_file + pipeline['clipvision']['model'] = clip_vision + + # 2. Load the ipadapter model + is_sdxl = isinstance(model.model, comfy.model_base.SDXL) + if not ipadapter: + ipadapter_file, ipadapter_name, is_insightface, lora_pattern = self.get_ipadapter_file(preset, is_sdxl, node_name) + model_type = 'sdxl' if is_sdxl else 'sd15' + if ipadapter_file is None: + model_url = IPADAPTER_MODELS[preset][model_type]["model_url"] + ipadapter_file = get_local_filepath(model_url, IPADAPTER_DIR) + ipadapter_name = os.path.basename(model_url) + if ipadapter_file == pipeline['ipadapter']['file']: + ipadapter = pipeline['ipadapter']['model'] + elif cache_mode in ["all", "ipadapter only"] and ipadapter_name in backend_cache.cache: + log_node_info("easy ipadapterApply", f"Using IpAdapterModel {ipadapter_name} Cached") + _, ipadapter = backend_cache.cache[ipadapter_name][1] + else: + ipadapter = self.ipadapter_model_loader(ipadapter_file) + pipeline['ipadapter']['file'] = ipadapter_file + log_node_info("easy ipadapterApply", f"Using IpAdapterModel {ipadapter_name}") + if cache_mode in ["all", "ipadapter only"]: + backend_cache.update_cache(ipadapter_name, 'ipadapter', (False, ipadapter)) + + pipeline['ipadapter']['model'] = ipadapter + + # 3. Load the lora model if needed + if lora_pattern is not None: + if lora_model_strength > 0: + model, _ = self.get_lora_file(preset, lora_pattern, model_type, model, lora_model_strength, 1) + + # 4. Load the insightface model if needed + if is_insightface: + if not insightface: + icache_key = 'insightface-' + provider + if provider == pipeline['insightface']['provider']: + insightface = pipeline['insightface']['model'] + elif cache_mode in ["all", "insightface only"] and icache_key in backend_cache.cache: + log_node_info("easy ipadapterApply", f"Using InsightFaceModel {icache_key} Cached") + _, insightface = backend_cache.cache[icache_key][1] + else: + insightface = insightface_loader(provider, 'antelopev2' if preset == 'FACEID PLUS KOLORS' else 'buffalo_l') + if cache_mode in ["all", "insightface only"]: + backend_cache.update_cache(icache_key, 'insightface',(False, insightface)) + pipeline['insightface']['provider'] = provider + pipeline['insightface']['model'] = insightface + + return (model, pipeline,) + +class ipadapterApply(ipadapter): + def __init__(self): + super().__init__() + pass + + @classmethod + def INPUT_TYPES(cls): + presets = cls().presets + return { + "required": { + "model": ("MODEL",), + "image": ("IMAGE",), + "preset": (presets,), + "lora_strength": ("FLOAT", {"default": 0.6, "min": 0, "max": 1, "step": 0.01}), + "provider": (["CPU", "CUDA", "ROCM", "DirectML", "OpenVINO", "CoreML"],), + "weight": ("FLOAT", {"default": 1.0, "min": -1, "max": 3, "step": 0.05}), + "weight_faceidv2": ("FLOAT", { "default": 1.0, "min": -1, "max": 5.0, "step": 0.05 }), + "start_at": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 1.0, "step": 0.001}), + "end_at": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.001}), + "cache_mode": (["insightface only", "clip_vision only", "ipadapter only", "all", "none"], {"default": "all"},), + "use_tiled": ("BOOLEAN", {"default": False},), + }, + + "optional": { + "attn_mask": ("MASK",), + "optional_ipadapter": ("IPADAPTER",), + } + } + + RETURN_TYPES = ("MODEL", "IMAGE", "MASK", "IPADAPTER",) + RETURN_NAMES = ("model", "images", "masks", "ipadapter", ) + CATEGORY = "EasyUse/Adapter" + FUNCTION = "apply" + + def apply(self, model, image, preset, lora_strength, provider, weight, weight_faceidv2, start_at, end_at, cache_mode, use_tiled, attn_mask=None, optional_ipadapter=None, weight_kolors=None): + images, masks = image, [None] + model, ipadapter = self.load_model(model, preset, lora_strength, provider, clip_vision=None, optional_ipadapter=optional_ipadapter, cache_mode=cache_mode) + if use_tiled and preset not in self.faceid_presets: + if "IPAdapterTiled" not in ALL_NODE_CLASS_MAPPINGS: + self.error() + cls = ALL_NODE_CLASS_MAPPINGS["IPAdapterTiled"] + model, images, masks = cls().apply_tiled(model, ipadapter, image, weight, "linear", start_at, end_at, sharpening=0.0, combine_embeds="concat", image_negative=None, attn_mask=attn_mask, clip_vision=None, embeds_scaling='V only') + else: + if preset in ['FACEID PLUS KOLORS', 'FACEID PLUS V2', 'FACEID PORTRAIT (style transfer)']: + if "IPAdapterAdvanced" not in ALL_NODE_CLASS_MAPPINGS: + self.error() + cls = ALL_NODE_CLASS_MAPPINGS["IPAdapterAdvanced"] + if weight_kolors is None: + weight_kolors = weight + model, images = cls().apply_ipadapter(model, ipadapter, start_at=start_at, end_at=end_at, weight=weight, weight_type="linear", combine_embeds="concat", weight_faceidv2=weight_faceidv2, image=image, image_negative=None, clip_vision=None, attn_mask=attn_mask, insightface=None, embeds_scaling='V only', weight_kolors=weight_kolors) + else: + if "IPAdapter" not in ALL_NODE_CLASS_MAPPINGS: + self.error() + cls = ALL_NODE_CLASS_MAPPINGS["IPAdapter"] + model, images = cls().apply_ipadapter(model, ipadapter, image, weight, start_at, end_at, weight_type='standard', attn_mask=attn_mask) + if images is None: + images = image + return (model, images, masks, ipadapter,) + +class ipadapterApplyAdvanced(ipadapter): + def __init__(self): + super().__init__() + pass + + @classmethod + def INPUT_TYPES(cls): + ipa_cls = cls() + presets = ipa_cls.presets + weight_types = ipa_cls.weight_types + return { + "required": { + "model": ("MODEL",), + "image": ("IMAGE",), + "preset": (presets,), + "lora_strength": ("FLOAT", {"default": 0.6, "min": 0, "max": 1, "step": 0.01}), + "provider": (["CPU", "CUDA", "ROCM", "DirectML", "OpenVINO", "CoreML"],), + "weight": ("FLOAT", {"default": 1.0, "min": -1, "max": 3, "step": 0.05}), + "weight_faceidv2": ("FLOAT", {"default": 1.0, "min": -1, "max": 5.0, "step": 0.05 }), + "weight_type": (weight_types,), + "combine_embeds": (["concat", "add", "subtract", "average", "norm average"],), + "start_at": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 1.0, "step": 0.001}), + "end_at": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.001}), + "embeds_scaling": (['V only', 'K+V', 'K+V w/ C penalty', 'K+mean(V) w/ C penalty'],), + "cache_mode": (["insightface only", "clip_vision only","ipadapter only", "all", "none"], {"default": "all"},), + "use_tiled": ("BOOLEAN", {"default": False},), + "use_batch": ("BOOLEAN", {"default": False},), + "sharpening": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 1.0, "step": 0.05}), + }, + + "optional": { + "image_negative": ("IMAGE",), + "attn_mask": ("MASK",), + "clip_vision": ("CLIP_VISION",), + "optional_ipadapter": ("IPADAPTER",), + "layer_weights": ("STRING", {"default": "", "multiline": True, "placeholder": "Mad Scientist Layer Weights"}), + } + } + + RETURN_TYPES = ("MODEL", "IMAGE", "MASK", "IPADAPTER",) + RETURN_NAMES = ("model", "images", "masks", "ipadapter", ) + CATEGORY = "EasyUse/Adapter" + FUNCTION = "apply" + + def apply(self, model, image, preset, lora_strength, provider, weight, weight_faceidv2, weight_type, combine_embeds, start_at, end_at, embeds_scaling, cache_mode, use_tiled, use_batch, sharpening, weight_style=1.0, weight_composition=1.0, image_style=None, image_composition=None, expand_style=False, image_negative=None, clip_vision=None, attn_mask=None, optional_ipadapter=None, layer_weights=None, weight_kolors=None): + images, masks = image, [None] + model, ipadapter = self.load_model(model, preset, lora_strength, provider, clip_vision=clip_vision, optional_ipadapter=optional_ipadapter, cache_mode=cache_mode) + + if weight_kolors is None: + weight_kolors = weight + + if layer_weights: + if "IPAdapterMS" not in ALL_NODE_CLASS_MAPPINGS: + self.error() + cls = ALL_NODE_CLASS_MAPPINGS["IPAdapterAdvanced"] + model, images = cls().apply_ipadapter(model, ipadapter, weight=weight, weight_type=weight_type, start_at=start_at, end_at=end_at, combine_embeds=combine_embeds, weight_faceidv2=weight_faceidv2, image=image, image_negative=image_negative, weight_style=weight_style, weight_composition=weight_composition, image_style=image_style, image_composition=image_composition, expand_style=expand_style, clip_vision=clip_vision, attn_mask=attn_mask, insightface=None, embeds_scaling=embeds_scaling, layer_weights=layer_weights, weight_kolors=weight_kolors) + elif use_tiled: + if use_batch: + if "IPAdapterTiledBatch" not in ALL_NODE_CLASS_MAPPINGS: + self.error() + cls = ALL_NODE_CLASS_MAPPINGS["IPAdapterTiledBatch"] + else: + if "IPAdapterTiled" not in ALL_NODE_CLASS_MAPPINGS: + self.error() + cls = ALL_NODE_CLASS_MAPPINGS["IPAdapterTiled"] + model, images, masks = cls().apply_tiled(model, ipadapter, image=image, weight=weight, weight_type=weight_type, start_at=start_at, end_at=end_at, sharpening=sharpening, combine_embeds=combine_embeds, image_negative=image_negative, attn_mask=attn_mask, clip_vision=clip_vision, embeds_scaling=embeds_scaling) + else: + if use_batch: + if "IPAdapterBatch" not in ALL_NODE_CLASS_MAPPINGS: + self.error() + cls = ALL_NODE_CLASS_MAPPINGS["IPAdapterBatch"] + else: + if "IPAdapterAdvanced" not in ALL_NODE_CLASS_MAPPINGS: + self.error() + cls = ALL_NODE_CLASS_MAPPINGS["IPAdapterAdvanced"] + model, images = cls().apply_ipadapter(model, ipadapter, weight=weight, weight_type=weight_type, start_at=start_at, end_at=end_at, combine_embeds=combine_embeds, weight_faceidv2=weight_faceidv2, image=image, image_negative=image_negative, weight_style=1.0, weight_composition=1.0, image_style=image_style, image_composition=image_composition, expand_style=expand_style, clip_vision=clip_vision, attn_mask=attn_mask, insightface=None, embeds_scaling=embeds_scaling, weight_kolors=weight_kolors) + if images is None: + images = image + return (model, images, masks, ipadapter) + +class ipadapterApplyFaceIDKolors(ipadapterApplyAdvanced): + + @classmethod + def INPUT_TYPES(cls): + ipa_cls = cls() + presets = ipa_cls.presets + weight_types = ipa_cls.weight_types + return { + "required": { + "model": ("MODEL",), + "image": ("IMAGE",), + "preset": (['FACEID PLUS KOLORS'], {"default":"FACEID PLUS KOLORS"}), + "lora_strength": ("FLOAT", {"default": 0.6, "min": 0, "max": 1, "step": 0.01}), + "provider": (["CPU", "CUDA", "ROCM", "DirectML", "OpenVINO", "CoreML"],), + "weight": ("FLOAT", {"default": 0.8, "min": -1, "max": 3, "step": 0.05}), + "weight_faceidv2": ("FLOAT", {"default": 1.0, "min": -1, "max": 5.0, "step": 0.05}), + "weight_kolors": ("FLOAT", {"default": 0.8, "min": -1, "max": 5.0, "step": 0.05}), + "weight_type": (weight_types,), + "combine_embeds": (["concat", "add", "subtract", "average", "norm average"],), + "start_at": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 1.0, "step": 0.001}), + "end_at": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.001}), + "embeds_scaling": (['V only', 'K+V', 'K+V w/ C penalty', 'K+mean(V) w/ C penalty'],), + "cache_mode": (["insightface only", "clip_vision only", "ipadapter only", "all", "none"], {"default": "all"},), + "use_tiled": ("BOOLEAN", {"default": False},), + "use_batch": ("BOOLEAN", {"default": False},), + "sharpening": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 1.0, "step": 0.05}), + }, + + "optional": { + "image_negative": ("IMAGE",), + "attn_mask": ("MASK",), + "clip_vision": ("CLIP_VISION",), + "optional_ipadapter": ("IPADAPTER",), + } + } + + +class ipadapterStyleComposition(ipadapter): + def __init__(self): + super().__init__() + pass + + @classmethod + def INPUT_TYPES(cls): + ipa_cls = cls() + normal_presets = ipa_cls.normal_presets + weight_types = ipa_cls.weight_types + return { + "required": { + "model": ("MODEL",), + "image_style": ("IMAGE",), + "preset": (normal_presets,), + "weight_style": ("FLOAT", {"default": 1.0, "min": -1, "max": 5, "step": 0.05}), + "weight_composition": ("FLOAT", {"default": 1.0, "min": -1, "max": 5, "step": 0.05}), + "expand_style": ("BOOLEAN", {"default": False}), + "combine_embeds": (["concat", "add", "subtract", "average", "norm average"], {"default": "average"}), + "start_at": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 1.0, "step": 0.001}), + "end_at": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.001}), + "embeds_scaling": (['V only', 'K+V', 'K+V w/ C penalty', 'K+mean(V) w/ C penalty'],), + "cache_mode": (["insightface only", "clip_vision only", "ipadapter only", "all", "none"], + {"default": "all"},), + }, + "optional": { + "image_composition": ("IMAGE",), + "image_negative": ("IMAGE",), + "attn_mask": ("MASK",), + "clip_vision": ("CLIP_VISION",), + "optional_ipadapter": ("IPADAPTER",), + } + } + + CATEGORY = "EasyUse/Adapter" + + RETURN_TYPES = ("MODEL", "IPADAPTER",) + RETURN_NAMES = ("model", "ipadapter",) + CATEGORY = "EasyUse/Adapter" + FUNCTION = "apply" + + def apply(self, model, preset, weight_style, weight_composition, expand_style, combine_embeds, start_at, end_at, embeds_scaling, cache_mode, image_style=None , image_composition=None, image_negative=None, clip_vision=None, attn_mask=None, optional_ipadapter=None): + model, ipadapter = self.load_model(model, preset, 0, 'CPU', clip_vision=None, optional_ipadapter=optional_ipadapter, cache_mode=cache_mode) + + if "IPAdapterAdvanced" not in ALL_NODE_CLASS_MAPPINGS: + self.error() + cls = ALL_NODE_CLASS_MAPPINGS["IPAdapterAdvanced"] + + model, image = cls().apply_ipadapter(model, ipadapter, start_at=start_at, end_at=end_at, weight_style=weight_style, weight_composition=weight_composition, weight_type='linear', combine_embeds=combine_embeds, weight_faceidv2=weight_composition, image_style=image_style, image_composition=image_composition, image_negative=image_negative, expand_style=expand_style, clip_vision=clip_vision, attn_mask=attn_mask, insightface=None, embeds_scaling=embeds_scaling) + return (model, ipadapter) + +class ipadapterApplyEncoder(ipadapter): + def __init__(self): + super().__init__() + pass + + @classmethod + def INPUT_TYPES(cls): + ipa_cls = cls() + normal_presets = ipa_cls.normal_presets + max_embeds_num = 4 + inputs = { + "required": { + "model": ("MODEL",), + "clip_vision": ("CLIP_VISION",), + "image1": ("IMAGE",), + "preset": (normal_presets,), + "num_embeds": ("INT", {"default": 2, "min": 1, "max": max_embeds_num}), + }, + "optional": {} + } + + for i in range(1, max_embeds_num + 1): + if i > 1: + inputs["optional"][f"image{i}"] = ("IMAGE",) + for i in range(1, max_embeds_num + 1): + inputs["optional"][f"mask{i}"] = ("MASK",) + inputs["optional"][f"weight{i}"] = ("FLOAT", {"default": 1.0, "min": -1, "max": 3, "step": 0.05}) + inputs["optional"]["combine_method"] = (["concat", "add", "subtract", "average", "norm average", "max", "min"],) + inputs["optional"]["optional_ipadapter"] = ("IPADAPTER",) + inputs["optional"]["pos_embeds"] = ("EMBEDS",) + inputs["optional"]["neg_embeds"] = ("EMBEDS",) + return inputs + + RETURN_TYPES = ("MODEL", "CLIP_VISION","IPADAPTER", "EMBEDS", "EMBEDS", ) + RETURN_NAMES = ("model", "clip_vision","ipadapter", "pos_embed", "neg_embed",) + CATEGORY = "EasyUse/Adapter" + FUNCTION = "apply" + + def batch(self, embeds, method): + if method == 'concat' and len(embeds) == 1: + return (embeds[0],) + + embeds = [embed for embed in embeds if embed is not None] + embeds = torch.cat(embeds, dim=0) + + match method: + case "add": + embeds = torch.sum(embeds, dim=0).unsqueeze(0) + case "subtract": + embeds = embeds[0] - torch.mean(embeds[1:], dim=0) + embeds = embeds.unsqueeze(0) + case "average": + embeds = torch.mean(embeds, dim=0).unsqueeze(0) + case "norm average": + embeds = torch.mean(embeds / torch.norm(embeds, dim=0, keepdim=True), dim=0).unsqueeze(0) + case "max": + embeds = torch.max(embeds, dim=0).values.unsqueeze(0) + case "min": + embeds = torch.min(embeds, dim=0).values.unsqueeze(0) + + return embeds + + def apply(self, **kwargs): + model = kwargs['model'] + clip_vision = kwargs['clip_vision'] + preset = kwargs['preset'] + if 'optional_ipadapter' in kwargs: + ipadapter = kwargs['optional_ipadapter'] + else: + model, ipadapter = self.load_model(model, preset, 0, 'CPU', clip_vision=clip_vision, optional_ipadapter=None, cache_mode='none') + + if "IPAdapterEncoder" not in ALL_NODE_CLASS_MAPPINGS: + self.error() + encoder_cls = ALL_NODE_CLASS_MAPPINGS["IPAdapterEncoder"] + pos_embeds = kwargs["pos_embeds"] if "pos_embeds" in kwargs else [] + neg_embeds = kwargs["neg_embeds"] if "neg_embeds" in kwargs else [] + for i in range(1, kwargs['num_embeds'] + 1): + if f"image{i}" not in kwargs: + raise Exception(f"image{i} is required") + kwargs[f"mask{i}"] = kwargs[f"mask{i}"] if f"mask{i}" in kwargs else None + kwargs[f"weight{i}"] = kwargs[f"weight{i}"] if f"weight{i}" in kwargs else 1.0 + + pos, neg = encoder_cls().encode(ipadapter, kwargs[f"image{i}"], kwargs[f"weight{i}"], kwargs[f"mask{i}"], clip_vision=clip_vision) + pos_embeds.append(pos) + neg_embeds.append(neg) + + pos_embeds = self.batch(pos_embeds, kwargs['combine_method']) + neg_embeds = self.batch(neg_embeds, kwargs['combine_method']) + + return (model,clip_vision, ipadapter, pos_embeds, neg_embeds) + +class ipadapterApplyEmbeds(ipadapter): + def __init__(self): + super().__init__() + pass + + @classmethod + def INPUT_TYPES(cls): + ipa_cls = cls() + weight_types = ipa_cls.weight_types + return { + "required": { + "model": ("MODEL",), + "clip_vision": ("CLIP_VISION",), + "ipadapter": ("IPADAPTER",), + "pos_embed": ("EMBEDS",), + "weight": ("FLOAT", {"default": 1.0, "min": -1, "max": 3, "step": 0.05}), + "weight_type": (weight_types,), + "start_at": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 1.0, "step": 0.001}), + "end_at": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.001}), + "embeds_scaling": (['V only', 'K+V', 'K+V w/ C penalty', 'K+mean(V) w/ C penalty'],), + }, + + "optional": { + "neg_embed": ("EMBEDS",), + "attn_mask": ("MASK",), + } + } + + RETURN_TYPES = ("MODEL", "IPADAPTER",) + RETURN_NAMES = ("model", "ipadapter", ) + CATEGORY = "EasyUse/Adapter" + FUNCTION = "apply" + + def apply(self, model, ipadapter, clip_vision, pos_embed, weight, weight_type, start_at, end_at, embeds_scaling, attn_mask=None, neg_embed=None,): + if "IPAdapterEmbeds" not in ALL_NODE_CLASS_MAPPINGS: + self.error() + + cls = ALL_NODE_CLASS_MAPPINGS["IPAdapterEmbeds"] + model, image = cls().apply_ipadapter(model, ipadapter, pos_embed, weight, weight_type, start_at, end_at, neg_embed=neg_embed, attn_mask=attn_mask, clip_vision=clip_vision, embeds_scaling=embeds_scaling) + + return (model, ipadapter) + +class ipadapterApplyRegional(ipadapter): + def __init__(self): + super().__init__() + pass + + @classmethod + def INPUT_TYPES(cls): + ipa_cls = cls() + weight_types = ipa_cls.weight_types + return { + "required": { + "pipe": ("PIPE_LINE",), + "image": ("IMAGE",), + "positive": ("STRING", {"default": "", "placeholder": "positive", "multiline": True}), + "negative": ("STRING", {"default": "", "placeholder": "negative", "multiline": True}), + "image_weight": ("FLOAT", {"default": 1.0, "min": -1.0, "max": 3.0, "step": 0.05}), + "prompt_weight": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.05}), + "weight_type": (weight_types,), + "start_at": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 1.0, "step": 0.001}), + "end_at": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.001}), + }, + + "optional": { + "mask": ("MASK",), + "optional_ipadapter_params": ("IPADAPTER_PARAMS",), + }, + "hidden": {"prompt": "PROMPT", "my_unique_id": "UNIQUE_ID"} + } + + RETURN_TYPES = ("PIPE_LINE", "IPADAPTER_PARAMS", "CONDITIONING", "CONDITIONING") + RETURN_NAMES = ("pipe", "ipadapter_params", "positive", "negative") + CATEGORY = "EasyUse/Adapter" + FUNCTION = "apply" + + def apply(self, pipe, image, positive, negative, image_weight, prompt_weight, weight_type, start_at, end_at, mask=None, optional_ipadapter_params=None, prompt=None, my_unique_id=None): + model = pipe['model'] + clip = pipe['clip'] + clip_skip = pipe['loader_settings']['clip_skip'] + a1111_prompt_style = pipe['loader_settings']['a1111_prompt_style'] + pipe_lora_stack = pipe['loader_settings']['lora_stack'] + positive_token_normalization = pipe['loader_settings']['positive_token_normalization'] + positive_weight_interpretation = pipe['loader_settings']['positive_weight_interpretation'] + negative_token_normalization = pipe['loader_settings']['negative_token_normalization'] + negative_weight_interpretation = pipe['loader_settings']['negative_weight_interpretation'] + if positive == '': + positive = pipe['loader_settings']['positive'] + if negative == '': + negative = pipe['loader_settings']['negative'] + + if not clip: + raise Exception("No CLIP found") + + positive_embeddings_final, positive_wildcard_prompt, model, clip = prompt_to_cond('positive', model, clip, clip_skip, pipe_lora_stack, positive, positive_token_normalization, positive_weight_interpretation, a1111_prompt_style, my_unique_id, prompt, easyCache) + negative_embeddings_final, negative_wildcard_prompt, model, clip = prompt_to_cond('negative', model, clip, clip_skip, pipe_lora_stack, negative, negative_token_normalization, negative_weight_interpretation, a1111_prompt_style, my_unique_id, prompt, easyCache) + + #ipadapter regional + if "IPAdapterRegionalConditioning" not in ALL_NODE_CLASS_MAPPINGS: + self.error() + + cls = ALL_NODE_CLASS_MAPPINGS["IPAdapterRegionalConditioning"] + ipadapter_params, new_positive_embeds, new_negative_embeds = cls().conditioning(image, image_weight, prompt_weight, weight_type, start_at, end_at, mask=mask, positive=positive_embeddings_final, negative=negative_embeddings_final) + + if optional_ipadapter_params is not None: + positive_embeds = pipe['positive'] + new_positive_embeds + negative_embeds = pipe['negative'] + new_negative_embeds + _ipadapter_params = { + "image": optional_ipadapter_params["image"] + ipadapter_params["image"], + "attn_mask": optional_ipadapter_params["attn_mask"] + ipadapter_params["attn_mask"], + "weight": optional_ipadapter_params["weight"] + ipadapter_params["weight"], + "weight_type": optional_ipadapter_params["weight_type"] + ipadapter_params["weight_type"], + "start_at": optional_ipadapter_params["start_at"] + ipadapter_params["start_at"], + "end_at": optional_ipadapter_params["end_at"] + ipadapter_params["end_at"], + } + ipadapter_params = _ipadapter_params + del _ipadapter_params + else: + positive_embeds = new_positive_embeds + negative_embeds = new_negative_embeds + + new_pipe = { + **pipe, + "positive": positive_embeds, + "negative": negative_embeds, + } + + del pipe + + return (new_pipe, ipadapter_params, positive_embeds, negative_embeds) + +class ipadapterApplyFromParams(ipadapter): + def __init__(self): + super().__init__() + pass + + @classmethod + def INPUT_TYPES(cls): + ipa_cls = cls() + normal_presets = ipa_cls.normal_presets + return { + "required": { + "model": ("MODEL",), + "preset": (normal_presets,), + "ipadapter_params": ("IPADAPTER_PARAMS",), + "combine_embeds": (["concat", "add", "subtract", "average", "norm average", "max", "min"],), + "embeds_scaling": (['V only', 'K+V', 'K+V w/ C penalty', 'K+mean(V) w/ C penalty'],), + "cache_mode": (["insightface only", "clip_vision only", "ipadapter only", "all", "none"], + {"default": "insightface only"}), + }, + + "optional": { + "optional_ipadapter": ("IPADAPTER",), + "image_negative": ("IMAGE",), + } + } + + RETURN_TYPES = ("MODEL", "IPADAPTER",) + RETURN_NAMES = ("model", "ipadapter", ) + CATEGORY = "EasyUse/Adapter" + FUNCTION = "apply" + + def apply(self, model, preset, ipadapter_params, combine_embeds, embeds_scaling, cache_mode, optional_ipadapter=None, image_negative=None,): + model, ipadapter = self.load_model(model, preset, 0, 'CPU', clip_vision=None, optional_ipadapter=optional_ipadapter, cache_mode=cache_mode) + if "IPAdapterFromParams" not in ALL_NODE_CLASS_MAPPINGS: + self.error() + cls = ALL_NODE_CLASS_MAPPINGS["IPAdapterFromParams"] + model, image = cls().apply_ipadapter(model, ipadapter, clip_vision=None, combine_embeds=combine_embeds, embeds_scaling=embeds_scaling, image_negative=image_negative, ipadapter_params=ipadapter_params) + + return (model, ipadapter) + +#Apply InstantID +class instantID: + + def error(self): + raise Exception(f"[ERROR] To use instantIDApply, you need to install 'ComfyUI_InstantID'") + + def run(self, pipe, image, instantid_file, insightface, control_net_name, cn_strength, cn_soft_weights, weight, start_at, end_at, noise, image_kps=None, mask=None, control_net=None, positive=None, negative=None, prompt=None, extra_pnginfo=None, my_unique_id=None): + instantid_model, insightface_model, face_embeds = None, None, None + model = pipe['model'] + # Load InstantID + cache_key = 'instantID' + if cache_key in backend_cache.cache: + log_node_info("easy instantIDApply","Using InstantIDModel Cached") + _, instantid_model = backend_cache.cache[cache_key][1] + if "InstantIDModelLoader" in ALL_NODE_CLASS_MAPPINGS: + load_instant_cls = ALL_NODE_CLASS_MAPPINGS["InstantIDModelLoader"] + instantid_model, = load_instant_cls().load_model(instantid_file) + backend_cache.update_cache(cache_key, 'instantid', (False, instantid_model)) + else: + self.error() + icache_key = 'insightface-' + insightface + if icache_key in backend_cache.cache: + log_node_info("easy instantIDApply", f"Using InsightFaceModel {insightface} Cached") + _, insightface_model = backend_cache.cache[icache_key][1] + elif "InstantIDFaceAnalysis" in ALL_NODE_CLASS_MAPPINGS: + load_insightface_cls = ALL_NODE_CLASS_MAPPINGS["InstantIDFaceAnalysis"] + insightface_model, = load_insightface_cls().load_insight_face(insightface) + backend_cache.update_cache(icache_key, 'insightface', (False, insightface_model)) + else: + self.error() + + # Apply InstantID + if "ApplyInstantID" in ALL_NODE_CLASS_MAPPINGS: + instantid_apply = ALL_NODE_CLASS_MAPPINGS['ApplyInstantID'] + if control_net is None: + control_net = easyCache.load_controlnet(control_net_name, cn_soft_weights) + model, positive, negative = instantid_apply().apply_instantid(instantid_model, insightface_model, control_net, image, model, positive, negative, start_at, end_at, weight=weight, ip_weight=None, cn_strength=cn_strength, noise=noise, image_kps=image_kps, mask=mask) + else: + self.error() + + new_pipe = { + "model": model, + "positive": positive, + "negative": negative, + "vae": pipe['vae'], + "clip": pipe['clip'], + + "samples": pipe["samples"], + "images": pipe["images"], + "seed": 0, + + "loader_settings": pipe["loader_settings"] + } + + del pipe + + return (new_pipe, model, positive, negative) + +class instantIDApply(instantID): + + def __init__(self): + super().__init__() + pass + + @classmethod + def INPUT_TYPES(cls): + return { + "required":{ + "pipe": ("PIPE_LINE",), + "image": ("IMAGE",), + "instantid_file": (folder_paths.get_filename_list("instantid"),), + "insightface": (["CPU", "CUDA", "ROCM"],), + "control_net_name": (folder_paths.get_filename_list("controlnet"),), + "cn_strength": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}), + "cn_soft_weights": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.001},), + "weight": ("FLOAT", {"default": .8, "min": 0.0, "max": 5.0, "step": 0.01, }), + "start_at": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 1.0, "step": 0.001, }), + "end_at": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.001, }), + "noise": ("FLOAT", {"default": 0.35, "min": 0.0, "max": 1.0, "step": 0.05, }), + }, + "optional": { + "image_kps": ("IMAGE",), + "mask": ("MASK",), + "control_net": ("CONTROL_NET",), + }, + "hidden": { + "prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO", "my_unique_id": "UNIQUE_ID" + }, + } + + RETURN_TYPES = ("PIPE_LINE", "MODEL", "CONDITIONING", "CONDITIONING") + RETURN_NAMES = ("pipe", "model", "positive", "negative") + + FUNCTION = "apply" + CATEGORY = "EasyUse/Adapter" + + + def apply(self, pipe, image, instantid_file, insightface, control_net_name, cn_strength, cn_soft_weights, weight, start_at, end_at, noise, image_kps=None, mask=None, control_net=None, prompt=None, extra_pnginfo=None, my_unique_id=None): + positive = pipe['positive'] + negative = pipe['negative'] + return self.run(pipe, image, instantid_file, insightface, control_net_name, cn_strength, cn_soft_weights, weight, start_at, end_at, noise, image_kps, mask, control_net, positive, negative, prompt, extra_pnginfo, my_unique_id) + +#Apply InstantID Advanced +class instantIDApplyAdvanced(instantID): + + def __init__(self): + super().__init__() + pass + + @classmethod + def INPUT_TYPES(cls): + return { + "required":{ + "pipe": ("PIPE_LINE",), + "image": ("IMAGE",), + "instantid_file": (folder_paths.get_filename_list("instantid"),), + "insightface": (["CPU", "CUDA", "ROCM"],), + "control_net_name": (folder_paths.get_filename_list("controlnet"),), + "cn_strength": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}), + "cn_soft_weights": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.001},), + "weight": ("FLOAT", {"default": .8, "min": 0.0, "max": 5.0, "step": 0.01, }), + "start_at": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 1.0, "step": 0.001, }), + "end_at": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.001, }), + "noise": ("FLOAT", {"default": 0.35, "min": 0.0, "max": 1.0, "step": 0.05, }), + }, + "optional": { + "image_kps": ("IMAGE",), + "mask": ("MASK",), + "control_net": ("CONTROL_NET",), + "positive": ("CONDITIONING",), + "negative": ("CONDITIONING",), + }, + "hidden": { + "prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO", "my_unique_id": "UNIQUE_ID" + }, + } + + RETURN_TYPES = ("PIPE_LINE", "MODEL", "CONDITIONING", "CONDITIONING") + RETURN_NAMES = ("pipe", "model", "positive", "negative") + + FUNCTION = "apply_advanced" + CATEGORY = "EasyUse/Adapter" + + def apply_advanced(self, pipe, image, instantid_file, insightface, control_net_name, cn_strength, cn_soft_weights, weight, start_at, end_at, noise, image_kps=None, mask=None, control_net=None, positive=None, negative=None, prompt=None, extra_pnginfo=None, my_unique_id=None): + + positive = positive if positive is not None else pipe['positive'] + negative = negative if negative is not None else pipe['negative'] + + return self.run(pipe, image, instantid_file, insightface, control_net_name, cn_strength, cn_soft_weights, weight, start_at, end_at, noise, image_kps, mask, control_net, positive, negative, prompt, extra_pnginfo, my_unique_id) + +class applyPulID: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "model": ("MODEL",), + "pulid_file": (folder_paths.get_filename_list("pulid"),), + "insightface": (["CPU", "CUDA", "ROCM"],), + "image": ("IMAGE",), + "method": (["fidelity", "style", "neutral"],), + "weight": ("FLOAT", {"default": 1.0, "min": -1.0, "max": 5.0, "step": 0.05}), + "start_at": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 1.0, "step": 0.001}), + "end_at": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.001}), + }, + "optional": { + "attn_mask": ("MASK",), + }, + } + + RETURN_TYPES = ("MODEL",) + RETURN_NAMES = ("model",) + + FUNCTION = "run" + CATEGORY = "EasyUse/Adapter" + + def error(self): + raise Exception(f"[ERROR] To use pulIDApply, you need to install 'ComfyUI_PulID'") + + def run(self, model, image, pulid_file, insightface, weight, start_at, end_at, method=None, noise=0.0, fidelity=None, projection=None, attn_mask=None, prompt=None, extra_pnginfo=None, my_unique_id=None): + pulid_model, insightface_model, eva_clip = None, None, None + # Load PulID + cache_key = 'pulID' + if cache_key in backend_cache.cache: + log_node_info("easy pulIDApply","Using InstantIDModel Cached") + _, pulid_model = backend_cache.cache[cache_key][1] + if "PulidModelLoader" in ALL_NODE_CLASS_MAPPINGS: + load_pulid_cls = ALL_NODE_CLASS_MAPPINGS["PulidModelLoader"] + pulid_model, = load_pulid_cls().load_model(pulid_file) + backend_cache.update_cache(cache_key, 'pulid', (False, pulid_model)) + else: + self.error() + # Load Insightface + icache_key = 'insightface-' + insightface + if icache_key in backend_cache.cache: + log_node_info("easy pulIDApply", f"Using InsightFaceModel {insightface} Cached") + _, insightface_model = backend_cache.cache[icache_key][1] + elif "PulidInsightFaceLoader" in ALL_NODE_CLASS_MAPPINGS: + load_insightface_cls = ALL_NODE_CLASS_MAPPINGS["PulidInsightFaceLoader"] + insightface_model, = load_insightface_cls().load_insightface(insightface) + backend_cache.update_cache(icache_key, 'insightface', (False, insightface_model)) + else: + self.error() + # Load Eva clip + ecache_key = 'eva_clip' + if ecache_key in backend_cache.cache: + log_node_info("easy pulIDApply", f"Using EVAClipModel Cached") + _, eva_clip = backend_cache.cache[ecache_key][1] + elif "PulidEvaClipLoader" in ALL_NODE_CLASS_MAPPINGS: + load_evaclip_cls = ALL_NODE_CLASS_MAPPINGS["PulidEvaClipLoader"] + eva_clip, = load_evaclip_cls().load_eva_clip() + backend_cache.update_cache(ecache_key, 'eva_clip', (False, eva_clip)) + else: + self.error() + + # Apply PulID + if method is not None: + if "ApplyPulid" in ALL_NODE_CLASS_MAPPINGS: + cls = ALL_NODE_CLASS_MAPPINGS['ApplyPulid'] + model, = cls().apply_pulid(model, pulid=pulid_model, eva_clip=eva_clip, face_analysis=insightface_model, image=image, weight=weight, method=method, start_at=start_at, end_at=end_at, attn_mask=attn_mask) + else: + self.error() + else: + if "ApplyPulidAdvanced" in ALL_NODE_CLASS_MAPPINGS: + cls = ALL_NODE_CLASS_MAPPINGS['ApplyPulidAdvanced'] + model, = cls().apply_pulid(model, pulid=pulid_model, eva_clip=eva_clip, face_analysis=insightface_model, image=image, weight=weight, projection=projection, fidelity=fidelity, noise=noise, start_at=start_at, end_at=end_at, attn_mask=attn_mask) + else: + self.error() + + return (model,) + +class applyPulIDADV(applyPulID): + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "model": ("MODEL",), + "pulid_file": (folder_paths.get_filename_list("pulid"),), + "insightface": (["CPU", "CUDA", "ROCM"],), + "image": ("IMAGE",), + "weight": ("FLOAT", {"default": 1.0, "min": -1.0, "max": 5.0, "step": 0.05}), + "projection": (["ortho_v2", "ortho", "none"], {"default":"ortho_v2"}), + "fidelity": ("INT", {"default": 8, "min": 0, "max": 32, "step": 1}), + "noise": ("FLOAT", {"default": 0.0, "min": -1.0, "max": 1.0, "step": 0.1}), + "start_at": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 1.0, "step": 0.001}), + "end_at": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.001}), + }, + "optional": { + "attn_mask": ("MASK",), + }, + } + +# ---------------------------------------------------------------适配器 结束----------------------------------------------------------------------# + +#---------------------------------------------------------------预采样 开始----------------------------------------------------------------------# + +# 预采样设置(基础) +class samplerSettings: + + def __init__(self): + pass + + @classmethod + def INPUT_TYPES(cls): + return {"required": + {"pipe": ("PIPE_LINE",), + "steps": ("INT", {"default": 20, "min": 1, "max": 10000}), + "cfg": ("FLOAT", {"default": 8.0, "min": 0.0, "max": 100.0}), + "sampler_name": (comfy.samplers.KSampler.SAMPLERS,), + "scheduler": (comfy.samplers.KSampler.SCHEDULERS + new_schedulers,), + "denoise": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}), + "seed": ("INT", {"default": 0, "min": 0, "max": MAX_SEED_NUM}), + }, + "optional": { + "image_to_latent": ("IMAGE",), + "latent": ("LATENT",), + }, + "hidden": + {"prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO", "my_unique_id": "UNIQUE_ID"}, + } + + RETURN_TYPES = ("PIPE_LINE", ) + RETURN_NAMES = ("pipe",) + + FUNCTION = "settings" + CATEGORY = "EasyUse/PreSampling" + + def settings(self, pipe, steps, cfg, sampler_name, scheduler, denoise, seed, image_to_latent=None, latent=None, prompt=None, extra_pnginfo=None, my_unique_id=None): + # 图生图转换 + vae = pipe["vae"] + batch_size = pipe["loader_settings"]["batch_size"] if "batch_size" in pipe["loader_settings"] else 1 + if image_to_latent is not None: + _, height, width, _ = image_to_latent.shape + if height == 1 and width == 1: + samples = pipe["samples"] + images = pipe["images"] + else: + samples = {"samples": vae.encode(image_to_latent[:, :, :, :3])} + samples = RepeatLatentBatch().repeat(samples, batch_size)[0] + images = image_to_latent + elif latent is not None: + samples = latent + images = pipe["images"] + else: + samples = pipe["samples"] + images = pipe["images"] + + new_pipe = { + "model": pipe['model'], + "positive": pipe['positive'], + "negative": pipe['negative'], + "vae": pipe['vae'], + "clip": pipe['clip'], + + "samples": samples, + "images": images, + "seed": seed, + + "loader_settings": { + **pipe["loader_settings"], + "steps": steps, + "cfg": cfg, + "sampler_name": sampler_name, + "scheduler": scheduler, + "denoise": denoise, + "add_noise": "enabled" + } + } + + del pipe + + return {"ui": {"value": [seed]}, "result": (new_pipe,)} + +# 预采样设置(高级) +class samplerSettingsAdvanced: + + def __init__(self): + pass + + @classmethod + def INPUT_TYPES(cls): + return {"required": + {"pipe": ("PIPE_LINE",), + "steps": ("INT", {"default": 20, "min": 1, "max": 10000}), + "cfg": ("FLOAT", {"default": 8.0, "min": 0.0, "max": 100.0}), + "sampler_name": (comfy.samplers.KSampler.SAMPLERS,), + "scheduler": (comfy.samplers.KSampler.SCHEDULERS + new_schedulers,), + "start_at_step": ("INT", {"default": 0, "min": 0, "max": 10000}), + "end_at_step": ("INT", {"default": 10000, "min": 0, "max": 10000}), + "add_noise": (["enable", "disable"],), + "seed": ("INT", {"default": 0, "min": 0, "max": MAX_SEED_NUM}), + "return_with_leftover_noise": (["disable", "enable"], ), + }, + "optional": { + "image_to_latent": ("IMAGE",), + "latent": ("LATENT",) + }, + "hidden": + {"prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO", "my_unique_id": "UNIQUE_ID"}, + } + + RETURN_TYPES = ("PIPE_LINE", ) + RETURN_NAMES = ("pipe",) + + FUNCTION = "settings" + CATEGORY = "EasyUse/PreSampling" + + def settings(self, pipe, steps, cfg, sampler_name, scheduler, start_at_step, end_at_step, add_noise, seed, return_with_leftover_noise, image_to_latent=None, latent=None, prompt=None, extra_pnginfo=None, my_unique_id=None): + # 图生图转换 + vae = pipe["vae"] + batch_size = pipe["loader_settings"]["batch_size"] if "batch_size" in pipe["loader_settings"] else 1 + if image_to_latent is not None: + _, height, width, _ = image_to_latent.shape + if height == 1 and width == 1: + samples = pipe["samples"] + images = pipe["images"] + else: + samples = {"samples": vae.encode(image_to_latent[:, :, :, :3])} + samples = RepeatLatentBatch().repeat(samples, batch_size)[0] + images = image_to_latent + elif latent is not None: + samples = latent + images = pipe["images"] + else: + samples = pipe["samples"] + images = pipe["images"] + + force_full_denoise = True + if return_with_leftover_noise == "enable": + force_full_denoise = False + + new_pipe = { + "model": pipe['model'], + "positive": pipe['positive'], + "negative": pipe['negative'], + "vae": pipe['vae'], + "clip": pipe['clip'], + + "samples": samples, + "images": images, + "seed": seed, + + "loader_settings": { + **pipe["loader_settings"], + "steps": steps, + "cfg": cfg, + "sampler_name": sampler_name, + "scheduler": scheduler, + "start_step": start_at_step, + "last_step": end_at_step, + "denoise": 1.0, + "add_noise": add_noise, + "force_full_denoise": force_full_denoise + } + } + + del pipe + + return {"ui": {"value": [seed]}, "result": (new_pipe,)} + +# 预采样设置(噪声注入) +class samplerSettingsNoiseIn: + + def __init__(self): + pass + + @classmethod + def INPUT_TYPES(cls): + return {"required": + {"pipe": ("PIPE_LINE",), + "factor": ("FLOAT", {"default": 0.1, "min": 0.0, "max": 1.0, "step":0.01, "round": 0.01}), + "steps": ("INT", {"default": 20, "min": 1, "max": 10000}), + "cfg": ("FLOAT", {"default": 8.0, "min": 0.0, "max": 100.0}), + "sampler_name": (comfy.samplers.KSampler.SAMPLERS,), + "scheduler": (comfy.samplers.KSampler.SCHEDULERS+new_schedulers,), + "denoise": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}), + "seed": ("INT", {"default": 0, "min": 0, "max": MAX_SEED_NUM}), + }, + "optional": { + "optional_noise_seed": ("INT",{"forceInput": True}), + "optional_latent": ("LATENT",), + }, + "hidden": + {"prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO", "my_unique_id": "UNIQUE_ID"}, + } + + RETURN_TYPES = ("PIPE_LINE", ) + RETURN_NAMES = ("pipe",) + + FUNCTION = "settings" + CATEGORY = "EasyUse/PreSampling" + + def slerp(self, val, low, high): + dims = low.shape + + low = low.reshape(dims[0], -1) + high = high.reshape(dims[0], -1) + + low_norm = low / torch.norm(low, dim=1, keepdim=True) + high_norm = high / torch.norm(high, dim=1, keepdim=True) + + low_norm[low_norm != low_norm] = 0.0 + high_norm[high_norm != high_norm] = 0.0 + + omega = torch.acos((low_norm * high_norm).sum(1)) + so = torch.sin(omega) + res = (torch.sin((1.0 - val) * omega) / so).unsqueeze(1) * low + (torch.sin(val * omega) / so).unsqueeze( + 1) * high + + return res.reshape(dims) + + def prepare_mask(self, mask, shape): + mask = torch.nn.functional.interpolate(mask.reshape((-1, 1, mask.shape[-2], mask.shape[-1])), + size=(shape[2], shape[3]), mode="bilinear") + mask = mask.expand((-1, shape[1], -1, -1)) + if mask.shape[0] < shape[0]: + mask = mask.repeat((shape[0] - 1) // mask.shape[0] + 1, 1, 1, 1)[:shape[0]] + return mask + + def expand_mask(self, mask, expand, tapered_corners): + try: + import scipy + + c = 0 if tapered_corners else 1 + kernel = np.array([[c, 1, c], + [1, 1, 1], + [c, 1, c]]) + mask = mask.reshape((-1, mask.shape[-2], mask.shape[-1])) + out = [] + for m in mask: + output = m.numpy() + for _ in range(abs(expand)): + if expand < 0: + output = scipy.ndimage.grey_erosion(output, footprint=kernel) + else: + output = scipy.ndimage.grey_dilation(output, footprint=kernel) + output = torch.from_numpy(output) + out.append(output) + + return torch.stack(out, dim=0) + except: + return None + + def settings(self, pipe, factor, steps, cfg, sampler_name, scheduler, denoise, seed, optional_noise_seed=None, optional_latent=None, prompt=None, extra_pnginfo=None, my_unique_id=None): + latent = optional_latent if optional_latent is not None else pipe["samples"] + model = pipe["model"] + + # generate base noise + batch_size, _, height, width = latent["samples"].shape + generator = torch.manual_seed(seed) + base_noise = torch.randn((1, 4, height, width), dtype=torch.float32, device="cpu", generator=generator).repeat(batch_size, 1, 1, 1).cpu() + + # generate variation noise + if optional_noise_seed is None or optional_noise_seed == seed: + optional_noise_seed = seed+1 + generator = torch.manual_seed(optional_noise_seed) + variation_noise = torch.randn((batch_size, 4, height, width), dtype=torch.float32, device="cpu", + generator=generator).cpu() + + slerp_noise = self.slerp(factor, base_noise, variation_noise) + + end_at_step = steps # min(steps, end_at_step) + start_at_step = round(end_at_step - end_at_step * denoise) + + device = comfy.model_management.get_torch_device() + comfy.model_management.load_model_gpu(model) + model_patcher = comfy.model_patcher.ModelPatcher(model.model, load_device=device, offload_device=comfy.model_management.unet_offload_device()) + sampler = comfy.samplers.KSampler(model_patcher, steps=steps, device=device, sampler=sampler_name, + scheduler=scheduler, denoise=1.0, model_options=model.model_options) + sigmas = sampler.sigmas + sigma = sigmas[start_at_step] - sigmas[end_at_step] + sigma /= model.model.latent_format.scale_factor + sigma = sigma.cpu().numpy() + + work_latent = latent.copy() + work_latent["samples"] = latent["samples"].clone() + slerp_noise * sigma + + if "noise_mask" in latent: + noise_mask = self.prepare_mask(latent["noise_mask"], latent['samples'].shape) + work_latent["samples"] = noise_mask * work_latent["samples"] + (1-noise_mask) * latent["samples"] + work_latent['noise_mask'] = self.expand_mask(latent["noise_mask"].clone(), 5, True) + + if pipe is None: + pipe = {} + + new_pipe = { + "model": pipe['model'], + "positive": pipe['positive'], + "negative": pipe['negative'], + "vae": pipe['vae'], + "clip": pipe['clip'], + + "samples": work_latent, + "images": pipe['images'], + "seed": seed, + + "loader_settings": { + **pipe["loader_settings"], + "steps": steps, + "cfg": cfg, + "sampler_name": sampler_name, + "scheduler": scheduler, + "denoise": denoise, + "add_noise": "disable" + } + } + + return (new_pipe,) + +# 预采样设置(自定义) +import comfy_extras.nodes_custom_sampler as custom_samplers +from tqdm import trange +class samplerCustomSettings: + + def __init__(self): + pass + + @classmethod + def INPUT_TYPES(cls): + return {"required": { + "pipe": ("PIPE_LINE",), + "guider": (['CFG','DualCFG','IP2P+DualCFG','Basic'],{"default":"Basic"}), + "cfg": ("FLOAT", {"default": 8.0, "min": 0.0, "max": 100.0}), + "cfg_negative": ("FLOAT", {"default": 8.0, "min": 0.0, "max": 100.0}), + "sampler_name": (comfy.samplers.KSampler.SAMPLERS + ['inversed_euler'],), + "scheduler": (comfy.samplers.KSampler.SCHEDULERS + ['karrasADV','exponentialADV','polyExponential', 'sdturbo', 'vp', 'alignYourSteps', 'gits'],), + "coeff": ("FLOAT", {"default": 1.20, "min": 0.80, "max": 1.50, "step": 0.05}), + "steps": ("INT", {"default": 20, "min": 1, "max": 10000}), + "sigma_max": ("FLOAT", {"default": 14.614642, "min": 0.0, "max": 1000.0, "step": 0.01, "round": False}), + "sigma_min": ("FLOAT", {"default": 0.0291675, "min": 0.0, "max": 1000.0, "step": 0.01, "round": False}), + "rho": ("FLOAT", {"default": 7.0, "min": 0.0, "max": 100.0, "step": 0.01, "round": False}), + "beta_d": ("FLOAT", {"default": 19.9, "min": 0.0, "max": 1000.0, "step": 0.01, "round": False}), + "beta_min": ("FLOAT", {"default": 0.1, "min": 0.0, "max": 1000.0, "step": 0.01, "round": False}), + "eps_s": ("FLOAT", {"default": 0.001, "min": 0.0, "max": 1.0, "step": 0.0001, "round": False}), + "flip_sigmas": ("BOOLEAN", {"default": False}), + "denoise": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}), + "add_noise": (["enable", "disable"], {"default": "enable"}), + "seed": ("INT", {"default": 0, "min": 0, "max": MAX_SEED_NUM}), + }, + "optional": { + "image_to_latent": ("IMAGE",), + "latent": ("LATENT",), + "optional_sampler":("SAMPLER",), + "optional_sigmas":("SIGMAS",), + }, + "hidden": + {"prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO", "my_unique_id": "UNIQUE_ID"}, + } + + RETURN_TYPES = ("PIPE_LINE", ) + RETURN_NAMES = ("pipe",) + + FUNCTION = "settings" + CATEGORY = "EasyUse/PreSampling" + + def settings(self, pipe, guider, cfg, cfg_negative, sampler_name, scheduler, coeff, steps, sigma_max, sigma_min, rho, beta_d, beta_min, eps_s, flip_sigmas, denoise, add_noise, seed, image_to_latent=None, latent=None, optional_sampler=None, optional_sigmas=None, prompt=None, extra_pnginfo=None, my_unique_id=None): + + # 图生图转换 + vae = pipe["vae"] + model = pipe["model"] + positive = pipe['positive'] + negative = pipe['negative'] + batch_size = pipe["loader_settings"]["batch_size"] if "batch_size" in pipe["loader_settings"] else 1 + + if image_to_latent is not None: + _, height, width, _ = image_to_latent.shape + if height == 1 and width == 1: + samples = pipe["samples"] + images = pipe["images"] + else: + if guider == "IP2P+DualCFG": + positive, negative, latent = self.ip2p(pipe['positive'], pipe['negative'], vae, image_to_latent) + samples = latent + else: + samples = {"samples": vae.encode(image_to_latent[:, :, :, :3])} + samples = RepeatLatentBatch().repeat(samples, batch_size)[0] + images = image_to_latent + elif latent is not None: + if guider == "IP2P+DualCFG": + positive, negative, latent = self.ip2p(pipe['positive'], pipe['negative'], latent=latent) + samples = latent + else: + samples = latent + images = pipe["images"] + else: + samples = pipe["samples"] + images = pipe["images"] + + + new_pipe = { + "model": model, + "positive": positive, + "negative": negative, + "vae": pipe['vae'], + "clip": pipe['clip'], + + "samples": samples, + "images": images, + "seed": seed, + + "loader_settings": { + **pipe["loader_settings"], + "middle": pipe['negative'], + "steps": steps, + "cfg": cfg, + "cfg_negative": cfg_negative, + "sampler_name": sampler_name, + "scheduler": scheduler, + "denoise": denoise, + "add_noise": add_noise, + "custom": { + "guider": guider, + "coeff": coeff, + "sigma_max": sigma_max, + "sigma_min": sigma_min, + "rho": rho, + "beta_d": beta_d, + "beta_min": beta_min, + "eps_s": beta_min, + "flip_sigmas": flip_sigmas + }, + "optional_sampler": optional_sampler, + "optional_sigmas": optional_sigmas + } + } + + del pipe + + return {"ui": {"value": [seed]}, "result": (new_pipe,)} + +# 预采样设置(SDTurbo) +from .libs.gradual_latent_hires_fix import sample_dpmpp_2s_ancestral, sample_dpmpp_2m_sde, sample_lcm, sample_euler_ancestral +class sdTurboSettings: + def __init__(self): + pass + + @classmethod + def INPUT_TYPES(cls): + return {"required": { + "pipe": ("PIPE_LINE",), + "steps": ("INT", {"default": 1, "min": 1, "max": 10}), + "cfg": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 100.0}), + "sampler_name": (comfy.samplers.SAMPLER_NAMES,), + "eta": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01, "round": False}), + "s_noise": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01, "round": False}), + "upscale_ratio": ("FLOAT", {"default": 2.0, "min": 0.0, "max": 16.0, "step": 0.01, "round": False}), + "start_step": ("INT", {"default": 5, "min": 0, "max": 1000, "step": 1}), + "end_step": ("INT", {"default": 15, "min": 0, "max": 1000, "step": 1}), + "upscale_n_step": ("INT", {"default": 3, "min": 0, "max": 1000, "step": 1}), + "unsharp_kernel_size": ("INT", {"default": 3, "min": 1, "max": 21, "step": 1}), + "unsharp_sigma": ("FLOAT", {"default": 0.5, "min": 0.0, "max": 10.0, "step": 0.01, "round": False}), + "unsharp_strength": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 10.0, "step": 0.01, "round": False}), + "seed": ("INT", {"default": 0, "min": 0, "max": MAX_SEED_NUM}), + }, + "hidden": {"prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO", "my_unique_id": "UNIQUE_ID"}, + } + + RETURN_TYPES = ("PIPE_LINE",) + RETURN_NAMES = ("pipe",) + + FUNCTION = "settings" + CATEGORY = "EasyUse/PreSampling" + + def settings(self, pipe, steps, cfg, sampler_name, eta, s_noise, upscale_ratio, start_step, end_step, upscale_n_step, unsharp_kernel_size, unsharp_sigma, unsharp_strength, seed, prompt=None, extra_pnginfo=None, my_unique_id=None): + model = pipe['model'] + # sigma + timesteps = torch.flip(torch.arange(1, 11) * 100 - 1, (0,))[:steps] + sigmas = model.model.model_sampling.sigma(timesteps) + sigmas = torch.cat([sigmas, sigmas.new_zeros([1])]) + + #sampler + sample_function = None + extra_options = { + "eta": eta, + "s_noise": s_noise, + "upscale_ratio": upscale_ratio, + "start_step": start_step, + "end_step": end_step, + "upscale_n_step": upscale_n_step, + "unsharp_kernel_size": unsharp_kernel_size, + "unsharp_sigma": unsharp_sigma, + "unsharp_strength": unsharp_strength, + } + match sampler_name: + case "euler_ancestral": + sample_function = sample_euler_ancestral + case "dpmpp_2s_ancestral": + sample_function = sample_dpmpp_2s_ancestral + case "dpmpp_2m_sde": + sample_function = sample_dpmpp_2m_sde + case "lcm": + sample_function = sample_lcm + + if sample_function is not None: + unsharp_kernel_size = unsharp_kernel_size if unsharp_kernel_size % 2 == 1 else unsharp_kernel_size + 1 + extra_options["unsharp_kernel_size"] = unsharp_kernel_size + _sampler = comfy.samplers.KSAMPLER(sample_function, extra_options) + else: + _sampler = comfy.samplers.sampler_object(sampler_name) + extra_options = None + + new_pipe = { + "model": pipe['model'], + "positive": pipe['positive'], + "negative": pipe['negative'], + "vae": pipe['vae'], + "clip": pipe['clip'], + + "samples": pipe["samples"], + "images": pipe["images"], + "seed": seed, + + "loader_settings": { + **pipe["loader_settings"], + "extra_options": extra_options, + "sampler": _sampler, + "sigmas": sigmas, + "steps": steps, + "cfg": cfg, + "add_noise": "enabled" + } + } + + del pipe + + return {"ui": {"value": [seed]}, "result": (new_pipe,)} + + +# cascade预采样参数 +class cascadeSettings: + + def __init__(self): + pass + + @classmethod + def INPUT_TYPES(cls): + return {"required": + {"pipe": ("PIPE_LINE",), + "encode_vae_name": (["None"] + folder_paths.get_filename_list("vae"),), + "decode_vae_name": (["None"] + folder_paths.get_filename_list("vae"),), + "steps": ("INT", {"default": 20, "min": 1, "max": 10000}), + "cfg": ("FLOAT", {"default": 4.0, "min": 0.0, "max": 100.0}), + "sampler_name": (comfy.samplers.KSampler.SAMPLERS, {"default":"euler_ancestral"}), + "scheduler": (comfy.samplers.KSampler.SCHEDULERS, {"default":"simple"}), + "denoise": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}), + "seed": ("INT", {"default": 0, "min": 0, "max": MAX_SEED_NUM}), + }, + "optional": { + "image_to_latent_c": ("IMAGE",), + "latent_c": ("LATENT",), + }, + "hidden":{"prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO", "my_unique_id": "UNIQUE_ID"}, + } + + RETURN_TYPES = ("PIPE_LINE",) + RETURN_NAMES = ("pipe",) + + FUNCTION = "settings" + CATEGORY = "EasyUse/PreSampling" + + def settings(self, pipe, encode_vae_name, decode_vae_name, steps, cfg, sampler_name, scheduler, denoise, seed, model=None, image_to_latent_c=None, latent_c=None, prompt=None, extra_pnginfo=None, my_unique_id=None): + images, samples_c = None, None + samples = pipe['samples'] + batch_size = pipe["loader_settings"]["batch_size"] if "batch_size" in pipe["loader_settings"] else 1 + + encode_vae_name = encode_vae_name if encode_vae_name is not None else pipe['loader_settings']['encode_vae_name'] + decode_vae_name = decode_vae_name if decode_vae_name is not None else pipe['loader_settings']['decode_vae_name'] + + if image_to_latent_c is not None: + if encode_vae_name != 'None': + encode_vae = easyCache.load_vae(encode_vae_name) + else: + encode_vae = pipe['vae'][0] + if "compression" not in pipe["loader_settings"]: + raise Exception("compression is not found") + compression = pipe["loader_settings"]['compression'] + width = image_to_latent_c.shape[-2] + height = image_to_latent_c.shape[-3] + out_width = (width // compression) * encode_vae.downscale_ratio + out_height = (height // compression) * encode_vae.downscale_ratio + + s = comfy.utils.common_upscale(image_to_latent_c.movedim(-1, 1), out_width, out_height, "bicubic", + "center").movedim(1, + -1) + c_latent = encode_vae.encode(s[:, :, :, :3]) + b_latent = torch.zeros([c_latent.shape[0], 4, height // 4, width // 4]) + + samples_c = {"samples": c_latent} + samples_c = RepeatLatentBatch().repeat(samples_c, batch_size)[0] + + samples_b = {"samples": b_latent} + samples_b = RepeatLatentBatch().repeat(samples_b, batch_size)[0] + samples = (samples_c, samples_b) + images = image_to_latent_c + elif latent_c is not None: + samples_c = latent_c + samples = (samples_c, samples[1]) + images = pipe["images"] + if samples_c is not None: + samples = (samples_c, samples[1]) + + new_pipe = { + "model": pipe['model'], + "positive": pipe['positive'], + "negative": pipe['negative'], + "vae": pipe['vae'], + "clip": pipe['clip'], + + "samples": samples, + "images": images, + "seed": seed, + + "loader_settings": { + **pipe["loader_settings"], + "encode_vae_name": encode_vae_name, + "decode_vae_name": decode_vae_name, + "steps": steps, + "cfg": cfg, + "sampler_name": sampler_name, + "scheduler": scheduler, + "denoise": denoise, + "add_noise": "enabled" + } + } + + sampler.update_value_by_id("pipe_line", my_unique_id, new_pipe) + + del pipe + + return {"ui": {"value": [seed]}, "result": (new_pipe,)} + +# layerDiffusion预采样参数 +class layerDiffusionSettings: + + def __init__(self): + pass + + @classmethod + def INPUT_TYPES(cls): + return {"required": + { + "pipe": ("PIPE_LINE",), + "method": ([LayerMethod.FG_ONLY_ATTN.value, LayerMethod.FG_ONLY_CONV.value, LayerMethod.EVERYTHING.value, LayerMethod.FG_TO_BLEND.value, LayerMethod.BG_TO_BLEND.value],), + "weight": ("FLOAT",{"default": 1.0, "min": -1, "max": 3, "step": 0.05},), + "steps": ("INT", {"default": 20, "min": 1, "max": 10000}), + "cfg": ("FLOAT", {"default": 8.0, "min": 0.0, "max": 100.0}), + "sampler_name": (comfy.samplers.KSampler.SAMPLERS, {"default": "euler"}), + "scheduler": (comfy.samplers.KSampler.SCHEDULERS+ new_schedulers, {"default": "normal"}), + "denoise": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}), + "seed": ("INT", {"default": 0, "min": 0, "max": MAX_SEED_NUM}), + }, + "optional": { + "image": ("IMAGE",), + "blended_image": ("IMAGE",), + "mask": ("MASK",), + # "latent": ("LATENT",), + # "blended_latent": ("LATENT",), + }, + "hidden": {"prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO", "my_unique_id": "UNIQUE_ID"}, + } + + RETURN_TYPES = ("PIPE_LINE",) + RETURN_NAMES = ("pipe",) + + FUNCTION = "settings" + CATEGORY = "EasyUse/PreSampling" + + def get_layer_diffusion_method(self, method, has_blend_latent): + method = LayerMethod(method) + if has_blend_latent: + if method == LayerMethod.BG_TO_BLEND: + method = LayerMethod.BG_BLEND_TO_FG + elif method == LayerMethod.FG_TO_BLEND: + method = LayerMethod.FG_BLEND_TO_BG + return method + + def settings(self, pipe, method, weight, steps, cfg, sampler_name, scheduler, denoise, seed, image=None, blended_image=None, mask=None, prompt=None, extra_pnginfo=None, my_unique_id=None): + blend_samples = pipe['blend_samples'] if "blend_samples" in pipe else None + vae = pipe["vae"] + batch_size = pipe["loader_settings"]["batch_size"] if "batch_size" in pipe["loader_settings"] else 1 + + method = self.get_layer_diffusion_method(method, blend_samples is not None or blended_image is not None) + + if image is not None or "image" in pipe: + image = image if image is not None else pipe['image'] + if mask is not None: + print('inpaint') + samples, = VAEEncodeForInpaint().encode(vae, image, mask) + else: + samples = {"samples": vae.encode(image[:,:,:,:3])} + samples = RepeatLatentBatch().repeat(samples, batch_size)[0] + images = image + elif "samp_images" in pipe: + samples = {"samples": vae.encode(pipe["samp_images"][:,:,:,:3])} + samples = RepeatLatentBatch().repeat(samples, batch_size)[0] + images = pipe["samp_images"] + else: + if method not in [LayerMethod.FG_ONLY_ATTN, LayerMethod.FG_ONLY_CONV, LayerMethod.EVERYTHING]: + raise Exception("image is missing") + + samples = pipe["samples"] + images = pipe["images"] + + if method in [LayerMethod.BG_BLEND_TO_FG, LayerMethod.FG_BLEND_TO_BG]: + if blended_image is None and blend_samples is None: + raise Exception("blended_image is missing") + elif blended_image is not None: + blend_samples = {"samples": vae.encode(blended_image[:,:,:,:3])} + blend_samples = RepeatLatentBatch().repeat(blend_samples, batch_size)[0] + + new_pipe = { + "model": pipe['model'], + "positive": pipe['positive'], + "negative": pipe['negative'], + "vae": pipe['vae'], + "clip": pipe['clip'], + + "samples": samples, + "blend_samples": blend_samples, + "images": images, + "seed": seed, + + "loader_settings": { + **pipe["loader_settings"], + "steps": steps, + "cfg": cfg, + "sampler_name": sampler_name, + "scheduler": scheduler, + "denoise": denoise, + "add_noise": "enabled", + "layer_diffusion_method": method, + "layer_diffusion_weight": weight, + } + } + + del pipe + + return {"ui": {"value": [seed]}, "result": (new_pipe,)} + +# 预采样设置(layerDiffuse附加) +class layerDiffusionSettingsADDTL: + def __init__(self): + pass + + @classmethod + def INPUT_TYPES(cls): + return {"required": + { + "pipe": ("PIPE_LINE",), + "foreground_prompt": ("STRING", {"default": "", "placeholder": "Foreground Additional Prompt", "multiline": True}), + "background_prompt": ("STRING", {"default": "", "placeholder": "Background Additional Prompt", "multiline": True}), + "blended_prompt": ("STRING", {"default": "", "placeholder": "Blended Additional Prompt", "multiline": True}), + }, + "optional": { + "optional_fg_cond": ("CONDITIONING",), + "optional_bg_cond": ("CONDITIONING",), + "optional_blended_cond": ("CONDITIONING",), + }, + "hidden": {"prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO", "my_unique_id": "UNIQUE_ID"}, + } + + RETURN_TYPES = ("PIPE_LINE",) + RETURN_NAMES = ("pipe",) + + FUNCTION = "settings" + CATEGORY = "EasyUse/PreSampling" + + def settings(self, pipe, foreground_prompt, background_prompt, blended_prompt, optional_fg_cond=None, optional_bg_cond=None, optional_blended_cond=None, prompt=None, extra_pnginfo=None, my_unique_id=None): + fg_cond, bg_cond, blended_cond = None, None, None + clip = pipe['clip'] + if optional_fg_cond is not None: + fg_cond = optional_fg_cond + elif foreground_prompt != "": + fg_cond, = CLIPTextEncode().encode(clip, foreground_prompt) + if optional_bg_cond is not None: + bg_cond = optional_bg_cond + elif background_prompt != "": + bg_cond, = CLIPTextEncode().encode(clip, background_prompt) + if optional_blended_cond is not None: + blended_cond = optional_blended_cond + elif blended_prompt != "": + blended_cond, = CLIPTextEncode().encode(clip, blended_prompt) + + new_pipe = { + **pipe, + "loader_settings": { + **pipe["loader_settings"], + "layer_diffusion_cond": (fg_cond, bg_cond, blended_cond) + } + } + + del pipe + + return (new_pipe,) + +# 预采样设置(动态CFG) +from .libs.dynthres_core import DynThresh +class dynamicCFGSettings: + def __init__(self): + pass + + @classmethod + def INPUT_TYPES(cls): + return {"required": + {"pipe": ("PIPE_LINE",), + "steps": ("INT", {"default": 20, "min": 1, "max": 10000}), + "cfg": ("FLOAT", {"default": 8.0, "min": 0.0, "max": 100.0}), + "cfg_mode": (DynThresh.Modes,), + "cfg_scale_min": ("FLOAT", {"default": 3.5, "min": 0.0, "max": 100.0, "step": 0.5}), + "sampler_name": (comfy.samplers.KSampler.SAMPLERS,), + "scheduler": (comfy.samplers.KSampler.SCHEDULERS+new_schedulers,), + "denoise": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}), + "seed": ("INT", {"default": 0, "min": 0, "max": MAX_SEED_NUM}), + }, + "optional":{ + "image_to_latent": ("IMAGE",), + "latent": ("LATENT",) + }, + "hidden": + {"prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO", "my_unique_id": "UNIQUE_ID"}, + } + + RETURN_TYPES = ("PIPE_LINE",) + RETURN_NAMES = ("pipe",) + + FUNCTION = "settings" + CATEGORY = "EasyUse/PreSampling" + + def settings(self, pipe, steps, cfg, cfg_mode, cfg_scale_min,sampler_name, scheduler, denoise, seed, image_to_latent=None, latent=None, prompt=None, extra_pnginfo=None, my_unique_id=None): + + + dynamic_thresh = DynThresh(7.0, 1.0,"CONSTANT", 0, cfg_mode, cfg_scale_min, 0, 0, 999, False, + "MEAN", "AD", 1) + + def sampler_dyn_thresh(args): + input = args["input"] + cond = input - args["cond"] + uncond = input - args["uncond"] + cond_scale = args["cond_scale"] + time_step = args["timestep"] + dynamic_thresh.step = 999 - time_step[0] + + return input - dynamic_thresh.dynthresh(cond, uncond, cond_scale, None) + + model = pipe['model'] + + m = model.clone() + m.set_model_sampler_cfg_function(sampler_dyn_thresh) + + # 图生图转换 + vae = pipe["vae"] + batch_size = pipe["loader_settings"]["batch_size"] if "batch_size" in pipe["loader_settings"] else 1 + if image_to_latent is not None: + samples = {"samples": vae.encode(image_to_latent[:, :, :, :3])} + samples = RepeatLatentBatch().repeat(samples, batch_size)[0] + images = image_to_latent + elif latent is not None: + samples = RepeatLatentBatch().repeat(latent, batch_size)[0] + images = pipe["images"] + else: + samples = pipe["samples"] + images = pipe["images"] + + new_pipe = { + "model": m, + "positive": pipe['positive'], + "negative": pipe['negative'], + "vae": pipe['vae'], + "clip": pipe['clip'], + + "samples": samples, + "images": images, + "seed": seed, + + "loader_settings": { + **pipe["loader_settings"], + "steps": steps, + "cfg": cfg, + "sampler_name": sampler_name, + "scheduler": scheduler, + "denoise": denoise + }, + } + + del pipe + + return {"ui": {"value": [seed]}, "result": (new_pipe,)} + +# 动态CFG +class dynamicThresholdingFull: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "model": ("MODEL",), + "mimic_scale": ("FLOAT", {"default": 7.0, "min": 0.0, "max": 100.0, "step": 0.5}), + "threshold_percentile": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}), + "mimic_mode": (DynThresh.Modes,), + "mimic_scale_min": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 100.0, "step": 0.5}), + "cfg_mode": (DynThresh.Modes,), + "cfg_scale_min": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 100.0, "step": 0.5}), + "sched_val": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 100.0, "step": 0.01}), + "separate_feature_channels": (["enable", "disable"],), + "scaling_startpoint": (DynThresh.Startpoints,), + "variability_measure": (DynThresh.Variabilities,), + "interpolate_phi": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}), + } + } + + RETURN_TYPES = ("MODEL",) + FUNCTION = "patch" + CATEGORY = "EasyUse/PreSampling" + + def patch(self, model, mimic_scale, threshold_percentile, mimic_mode, mimic_scale_min, cfg_mode, cfg_scale_min, + sched_val, separate_feature_channels, scaling_startpoint, variability_measure, interpolate_phi): + dynamic_thresh = DynThresh(mimic_scale, threshold_percentile, mimic_mode, mimic_scale_min, cfg_mode, + cfg_scale_min, sched_val, 0, 999, separate_feature_channels == "enable", + scaling_startpoint, variability_measure, interpolate_phi) + + def sampler_dyn_thresh(args): + input = args["input"] + cond = input - args["cond"] + uncond = input - args["uncond"] + cond_scale = args["cond_scale"] + time_step = args["timestep"] + dynamic_thresh.step = 999 - time_step[0] + + return input - dynamic_thresh.dynthresh(cond, uncond, cond_scale, None) + + m = model.clone() + m.set_model_sampler_cfg_function(sampler_dyn_thresh) + return (m,) + +#---------------------------------------------------------------预采样参数 结束---------------------------------------------------------------------- + +#---------------------------------------------------------------采样器 开始---------------------------------------------------------------------- + +# 完整采样器 +from .libs.chooser import ChooserMessage, ChooserCancelled +class samplerFull: + + @classmethod + def INPUT_TYPES(cls): + return {"required": + {"pipe": ("PIPE_LINE",), + "steps": ("INT", {"default": 20, "min": 1, "max": 10000}), + "cfg": ("FLOAT", {"default": 8.0, "min": 0.0, "max": 100.0}), + "sampler_name": (comfy.samplers.KSampler.SAMPLERS,), + "scheduler": (comfy.samplers.KSampler.SCHEDULERS+new_schedulers,), + "denoise": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}), + "image_output": (["Hide", "Preview", "Preview&Choose", "Save", "Hide&Save", "Sender", "Sender&Save"],), + "link_id": ("INT", {"default": 0, "min": 0, "max": sys.maxsize, "step": 1}), + "save_prefix": ("STRING", {"default": "ComfyUI"}), + }, + "optional": { + "seed": ("INT", {"default": 0, "min": 0, "max": MAX_SEED_NUM}), + "model": ("MODEL",), + "positive": ("CONDITIONING",), + "negative": ("CONDITIONING",), + "latent": ("LATENT",), + "vae": ("VAE",), + "clip": ("CLIP",), + "xyPlot": ("XYPLOT",), + "image": ("IMAGE",), + }, + "hidden": + {"tile_size": "INT", "prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO", "my_unique_id": "UNIQUE_ID", + "embeddingsList": (folder_paths.get_filename_list("embeddings"),) + } + } + + RETURN_TYPES = ("PIPE_LINE", "IMAGE", "MODEL", "CONDITIONING", "CONDITIONING", "LATENT", "VAE", "CLIP", "INT",) + RETURN_NAMES = ("pipe", "image", "model", "positive", "negative", "latent", "vae", "clip", "seed",) + OUTPUT_NODE = True + FUNCTION = "run" + CATEGORY = "EasyUse/Sampler" + + def ip2p(self, positive, negative, vae=None, pixels=None, latent=None): + if latent is not None: + concat_latent = latent + else: + x = (pixels.shape[1] // 8) * 8 + y = (pixels.shape[2] // 8) * 8 + + if pixels.shape[1] != x or pixels.shape[2] != y: + x_offset = (pixels.shape[1] % 8) // 2 + y_offset = (pixels.shape[2] % 8) // 2 + pixels = pixels[:, x_offset:x + x_offset, y_offset:y + y_offset, :] + + concat_latent = vae.encode(pixels) + + out_latent = {} + out_latent["samples"] = torch.zeros_like(concat_latent) + + out = [] + for conditioning in [positive, negative]: + c = [] + for t in conditioning: + d = t[1].copy() + d["concat_latent_image"] = concat_latent + n = [t[0], d] + c.append(n) + out.append(c) + return (out[0], out[1], out_latent) + + def get_inversed_euler_sampler(self): + @torch.no_grad() + def sample_inversed_euler(model, x, sigmas, extra_args=None, callback=None, disable=None, s_churn=0., s_tmin=0.,s_tmax=float('inf'), s_noise=1.): + """Implements Algorithm 2 (Euler steps) from Karras et al. (2022).""" + extra_args = {} if extra_args is None else extra_args + s_in = x.new_ones([x.shape[0]]) + for i in trange(1, len(sigmas), disable=disable): + sigma_in = sigmas[i - 1] + + if i == 1: + sigma_t = sigmas[i] + else: + sigma_t = sigma_in + + denoised = model(x, sigma_t * s_in, **extra_args) + + if i == 1: + d = (x - denoised) / (2 * sigmas[i]) + else: + d = (x - denoised) / sigmas[i - 1] + + dt = sigmas[i] - sigmas[i - 1] + x = x + d * dt + if callback is not None: + callback( + {'x': x, 'i': i, 'sigma': sigmas[i], 'sigma_hat': sigmas[i], 'denoised': denoised}) + return x / sigmas[-1] + + ksampler = comfy.samplers.KSAMPLER(sample_inversed_euler) + return (ksampler,) + + def get_custom_cls(self, sampler_name): + try: + cls = custom_samplers.__dict__[sampler_name] + return cls() + except: + raise Exception(f"Custom sampler {sampler_name} not found, Please updated your ComfyUI") + + def add_model_patch_option(self, model): + if 'transformer_options' not in model.model_options: + model.model_options['transformer_options'] = {} + to = model.model_options['transformer_options'] + if "model_patch" not in to: + to["model_patch"] = {} + return to + + def get_sampler_custom(self, model, positive, negative, seed, loader_settings): + _guider = None + middle = loader_settings['middle'] if "middle" in loader_settings else negative + steps = loader_settings['steps'] if "steps" in loader_settings else 20 + cfg = loader_settings['cfg'] if "cfg" in loader_settings else 8.0 + cfg_negative = loader_settings['cfg_negative'] if "cfg_negative" in loader_settings else 8.0 + sampler_name = loader_settings['sampler_name'] if "sampler_name" in loader_settings else "euler" + scheduler = loader_settings['scheduler'] if "scheduler" in loader_settings else "normal" + guider = loader_settings['custom']['guider'] if "guider" in loader_settings['custom'] else "CFG" + beta_d = loader_settings['custom']['beta_d'] if "beta_d" in loader_settings['custom'] else 0.1 + beta_min = loader_settings['custom']['beta_min'] if "beta_min" in loader_settings['custom'] else 0.1 + eps_s = loader_settings['custom']['eps_s'] if "eps_s" in loader_settings['custom'] else 0.1 + sigma_max = loader_settings['custom']['sigma_max'] if "sigma_max" in loader_settings['custom'] else 14.61 + sigma_min = loader_settings['custom']['sigma_min'] if "sigma_min" in loader_settings['custom'] else 0.03 + rho = loader_settings['custom']['rho'] if "rho" in loader_settings['custom'] else 7.0 + coeff = loader_settings['custom']['coeff'] if "coeff" in loader_settings['custom'] else 1.2 + flip_sigmas = loader_settings['custom']['flip_sigmas'] if "flip_sigmas" in loader_settings['custom'] else False + denoise = loader_settings['denoise'] if "denoise" in loader_settings else 1.0 + add_noise = loader_settings['add_noise'] if "add_noise" in loader_settings else "enable" + optional_sigmas = loader_settings['optional_sigmas'] if "optional_sigmas" in loader_settings else None + optional_sampler = loader_settings['optional_sampler'] if "optional_sampler" in loader_settings else None + + # sigmas + if optional_sigmas is not None: + sigmas = optional_sigmas + else: + if scheduler == 'vp': + sigmas, = self.get_custom_cls('VPScheduler').get_sigmas(steps, beta_d, beta_min, eps_s) + elif scheduler == 'karrasADV': + sigmas, = self.get_custom_cls('KarrasScheduler').get_sigmas(steps, sigma_max, sigma_min, rho) + elif scheduler == 'exponentialADV': + sigmas, = self.get_custom_cls('ExponentialScheduler').get_sigmas(steps, sigma_max, sigma_min) + elif scheduler == 'polyExponential': + sigmas, = self.get_custom_cls('PolyexponentialScheduler').get_sigmas(steps, sigma_max, sigma_min, rho) + elif scheduler == 'sdturbo': + sigmas, = self.get_custom_cls('SDTurboScheduler').get_sigmas(model, steps, denoise) + elif scheduler == 'alignYourSteps': + model_type = get_sd_version(model) + if model_type == 'unknown': + model_type = 'sdxl' + sigmas, = alignYourStepsScheduler().get_sigmas(model_type.upper(), steps, denoise) + elif scheduler == 'gits': + sigmas, = gitsScheduler().get_sigmas(coeff, steps, denoise) + else: + sigmas, = self.get_custom_cls('BasicScheduler').get_sigmas(model, scheduler, steps, denoise) + + # filp_sigmas + if flip_sigmas: + sigmas, = self.get_custom_cls('FlipSigmas').get_sigmas(sigmas) + + ####################################################################################### + # brushnet + to = None + transformer_options = model.model_options['transformer_options'] if "transformer_options" in model.model_options else {} + if 'model_patch' in transformer_options and 'brushnet' in transformer_options['model_patch']: + to = self.add_model_patch_option(model) + mp = to['model_patch'] + if isinstance(model.model.model_config, comfy.supported_models.SD15): + mp['SDXL'] = False + elif isinstance(model.model.model_config, comfy.supported_models.SDXL): + mp['SDXL'] = True + else: + print('Base model type: ', type(model.model.model_config)) + raise Exception("Unsupported model type: ", type(model.model.model_config)) + + mp['all_sigmas'] = sigmas + mp['unet'] = model.model.diffusion_model + mp['step'] = 0 + mp['total_steps'] = 1 + ####################################################################################### + # guider + if guider == 'CFG': + _guider, = self.get_custom_cls('CFGGuider').get_guider(model, positive, negative, cfg) + elif guider in ['DualCFG', 'IP2P+DualCFG']: + _guider, = self.get_custom_cls('DualCFGGuider').get_guider(model, positive, middle, + negative, cfg, cfg_negative) + else: + _guider, = self.get_custom_cls('BasicGuider').get_guider(model, positive) + + # sampler + if optional_sampler: + _sampler = optional_sampler + else: + if sampler_name == 'inversed_euler': + _sampler, = self.get_inversed_euler_sampler() + else: + _sampler, = self.get_custom_cls('KSamplerSelect').get_sampler(sampler_name) + + # noise + if add_noise == 'disable': + noise, = self.get_custom_cls('DisableNoise').get_noise() + else: + noise, = self.get_custom_cls('RandomNoise').get_noise(seed) + + return (noise, _guider, _sampler, sigmas) + + def run(self, pipe, steps, cfg, sampler_name, scheduler, denoise, image_output, link_id, save_prefix, seed=None, model=None, positive=None, negative=None, latent=None, vae=None, clip=None, xyPlot=None, tile_size=None, prompt=None, extra_pnginfo=None, my_unique_id=None, force_full_denoise=False, disable_noise=False, downscale_options=None, image=None): + + samp_model = model if model is not None else pipe["model"] + samp_positive = positive if positive is not None else pipe["positive"] + samp_negative = negative if negative is not None else pipe["negative"] + samp_samples = latent if latent is not None else pipe["samples"] + samp_vae = vae if vae is not None else pipe["vae"] + samp_clip = clip if clip is not None else pipe["clip"] + + samp_seed = seed if seed is not None else pipe['seed'] + + samp_custom = pipe["loader_settings"] if "custom" in pipe["loader_settings"] else None + + steps = steps if steps is not None else pipe['loader_settings']['steps'] + start_step = pipe['loader_settings']['start_step'] if 'start_step' in pipe['loader_settings'] else 0 + last_step = pipe['loader_settings']['last_step'] if 'last_step' in pipe['loader_settings'] else 10000 + cfg = cfg if cfg is not None else pipe['loader_settings']['cfg'] + sampler_name = sampler_name if sampler_name is not None else pipe['loader_settings']['sampler_name'] + scheduler = scheduler if scheduler is not None else pipe['loader_settings']['scheduler'] + denoise = denoise if denoise is not None else pipe['loader_settings']['denoise'] + add_noise = pipe['loader_settings']['add_noise'] if 'add_noise' in pipe['loader_settings'] else 'enabled' + force_full_denoise = pipe['loader_settings']['force_full_denoise'] if 'force_full_denoise' in pipe['loader_settings'] else True + + if image is not None and latent is None: + samp_samples = {"samples": samp_vae.encode(image[:, :, :, :3])} + + disable_noise = False + if add_noise == "disable": + disable_noise = True + + def downscale_model_unet(samp_model): + # 获取Unet参数 + if "PatchModelAddDownscale" in ALL_NODE_CLASS_MAPPINGS: + cls = ALL_NODE_CLASS_MAPPINGS['PatchModelAddDownscale'] + # 自动收缩Unet + if downscale_options['downscale_factor'] is None: + unet_config = samp_model.model.model_config.unet_config + if unet_config is not None and "samples" in samp_samples: + height = samp_samples['samples'].shape[2] * 8 + width = samp_samples['samples'].shape[3] * 8 + context_dim = unet_config.get('context_dim') + longer_side = width if width > height else height + if context_dim is not None and longer_side > context_dim: + width_downscale_factor = float(width / context_dim) + height_downscale_factor = float(height / context_dim) + if width_downscale_factor > 1.75: + log_node_warn("正在收缩模型Unet...") + log_node_warn("收缩系数:" + str(width_downscale_factor)) + (samp_model,) = cls().patch(samp_model, downscale_options['block_number'], width_downscale_factor, 0, 0.35, True, "bicubic", + "bicubic") + elif height_downscale_factor > 1.25: + log_node_warn("正在收缩模型Unet...") + log_node_warn("收缩系数:" + str(height_downscale_factor)) + (samp_model,) = cls().patch(samp_model, downscale_options['block_number'], height_downscale_factor, 0, 0.35, True, "bicubic", + "bicubic") + else: + cls = ALL_NODE_CLASS_MAPPINGS['PatchModelAddDownscale'] + log_node_warn("正在收缩模型Unet...") + log_node_warn("收缩系数:" + str(downscale_options['downscale_factor'])) + (samp_model,) = cls().patch(samp_model, downscale_options['block_number'], downscale_options['downscale_factor'], downscale_options['start_percent'], downscale_options['end_percent'], downscale_options['downscale_after_skip'], downscale_options['downscale_method'], downscale_options['upscale_method']) + return samp_model + + def process_sample_state(pipe, samp_model, samp_clip, samp_samples, samp_vae, samp_seed, samp_positive, + samp_negative, + steps, start_step, last_step, cfg, sampler_name, scheduler, denoise, + image_output, link_id, save_prefix, tile_size, prompt, extra_pnginfo, my_unique_id, + preview_latent, force_full_denoise=force_full_denoise, disable_noise=disable_noise, samp_custom=None): + + # LayerDiffusion + layerDiffuse = None + samp_blend_samples = None + layer_diffusion_method = pipe['loader_settings']['layer_diffusion_method'] if 'layer_diffusion_method' in pipe['loader_settings'] else None + if layer_diffusion_method is not None: + layerDiffuse = LayerDiffuse() + samp_blend_samples = pipe["blend_samples"] if "blend_samples" in pipe else None + additional_cond = pipe["loader_settings"]['layer_diffusion_cond'] if "layer_diffusion_cond" in pipe[ + 'loader_settings'] else (None, None, None) + method = layerDiffuse.get_layer_diffusion_method(pipe['loader_settings']['layer_diffusion_method'], + samp_blend_samples is not None) + + images = pipe["images"] if "images" in pipe else None + weight = pipe['loader_settings']['layer_diffusion_weight'] if 'layer_diffusion_weight' in pipe[ + 'loader_settings'] else 1.0 + samp_model, samp_positive, samp_negative = layerDiffuse.apply_layer_diffusion(samp_model, method, weight, + samp_samples, samp_blend_samples, + samp_positive, samp_negative, + images, additional_cond) + resolution = pipe['loader_settings']['resolution'] if 'resolution' in pipe['loader_settings'] else "自定义 X 自定义" + empty_latent_width = pipe['loader_settings']['empty_latent_width'] if 'empty_latent_width' in pipe['loader_settings'] else 512 + empty_latent_height = pipe['loader_settings']['empty_latent_height'] if 'empty_latent_height' in pipe['loader_settings'] else 512 + batch_size = pipe["loader_settings"]["batch_size"] if "batch_size" in pipe["loader_settings"] else 1 + samp_samples = sampler.emptyLatent(resolution, empty_latent_width, empty_latent_height, batch_size) + + # Downscale Model Unet + if samp_model is not None and downscale_options is not None: + samp_model = downscale_model_unet(samp_model) + # 推理初始时间 + start_time = int(time.time() * 1000) + # 开始推理 + if samp_custom is not None: + noise, _guider, _sampler, sigmas = self.get_sampler_custom(samp_model, samp_positive, samp_negative, samp_seed, samp_custom) + samp_samples, _ = sampler.custom_advanced_ksampler(noise, _guider, _sampler, sigmas, samp_samples) + elif scheduler == 'align_your_steps': + model_type = get_sd_version(samp_model) + if model_type == 'unknown': + model_type = 'sdxl' + sigmas, = alignYourStepsScheduler().get_sigmas(model_type.upper(), steps, denoise) + _sampler = comfy.samplers.sampler_object(sampler_name) + samp_samples = sampler.custom_ksampler(samp_model, samp_seed, steps, cfg, _sampler, sigmas, samp_positive, samp_negative, samp_samples, disable_noise=disable_noise, preview_latent=preview_latent) + elif scheduler == 'gits': + sigmas, = gitsScheduler().get_sigmas(coeff=1.2, steps=steps, denoise=denoise) + _sampler = comfy.samplers.sampler_object(sampler_name) + samp_samples = sampler.custom_ksampler(samp_model, samp_seed, steps, cfg, _sampler, sigmas, samp_positive, samp_negative, samp_samples, disable_noise=disable_noise, preview_latent=preview_latent) + else: + samp_samples = sampler.common_ksampler(samp_model, samp_seed, steps, cfg, sampler_name, scheduler, samp_positive, samp_negative, samp_samples, denoise=denoise, preview_latent=preview_latent, start_step=start_step, last_step=last_step, force_full_denoise=force_full_denoise, disable_noise=disable_noise) + # 推理结束时间 + end_time = int(time.time() * 1000) + latent = samp_samples["samples"] + + # 解码图片 + if tile_size is not None: + samp_images = samp_vae.decode_tiled(latent, tile_x=tile_size // 8, tile_y=tile_size // 8, ) + else: + samp_images = samp_vae.decode(latent).cpu() + + # LayerDiffusion Decode + if layerDiffuse is not None: + new_images, samp_images, alpha = layerDiffuse.layer_diffusion_decode(layer_diffusion_method, latent, samp_blend_samples, samp_images, samp_model) + else: + new_images = samp_images + alpha = None + + # 推理总耗时(包含解码) + end_decode_time = int(time.time() * 1000) + spent_time = 'Diffusion:' + str((end_time-start_time)/1000)+'″, VAEDecode:' + str((end_decode_time-end_time)/1000)+'″ ' + + results = easySave(new_images, save_prefix, image_output, prompt, extra_pnginfo) + + new_pipe = { + **pipe, + "positive": samp_positive, + "negative": samp_negative, + "vae": samp_vae, + "clip": samp_clip, + + "samples": samp_samples, + "blend_samples": samp_blend_samples, + "images": new_images, + "samp_images": samp_images, + "alpha": alpha, + "seed": samp_seed, + + "loader_settings": { + **pipe["loader_settings"], + "spent_time": spent_time + } + } + + del pipe + + if image_output == 'Preview&Choose': + if my_unique_id not in ChooserMessage.stash: + ChooserMessage.stash[my_unique_id] = {} + my_stash = ChooserMessage.stash[my_unique_id] + + PromptServer.instance.send_sync("easyuse-image-choose", {"id": my_unique_id, "urls": results}) + # wait for selection + try: + selections = ChooserMessage.waitForMessage(my_unique_id, asList=True) + samples = samp_samples['samples'] + samples = [samples[x] for x in selections if x >= 0] if len(selections) > 1 else [samples[0]] + new_images = [new_images[x] for x in selections if x >= 0] if len(selections) > 1 else [new_images[0]] + samp_images = [samp_images[x] for x in selections if x >= 0] if len(selections) > 1 else [samp_images[0]] + new_images = torch.stack(new_images, dim=0) + samp_images = torch.stack(samp_images, dim=0) + samples = torch.stack(samples, dim=0) + samp_samples = {"samples": samples} + new_pipe['samples'] = samp_samples + new_pipe['loader_settings']['batch_size'] = len(new_images) + except ChooserCancelled: + raise comfy.model_management.InterruptProcessingException() + + new_pipe['images'] = new_images + new_pipe['samp_images'] = samp_images + + return {"ui": {"images": results}, + "result": sampler.get_output(new_pipe,)} + + if image_output in ("Hide", "Hide&Save"): + return {"ui": {}, + "result": sampler.get_output(new_pipe,)} + + if image_output in ("Sender", "Sender&Save"): + PromptServer.instance.send_sync("img-send", {"link_id": link_id, "images": results}) + + return {"ui": {"images": results}, + "result": sampler.get_output(new_pipe,)} + + def process_xyPlot(pipe, samp_model, samp_clip, samp_samples, samp_vae, samp_seed, samp_positive, samp_negative, + steps, cfg, sampler_name, scheduler, denoise, + image_output, link_id, save_prefix, tile_size, prompt, extra_pnginfo, my_unique_id, preview_latent, xyPlot, force_full_denoise, disable_noise, samp_custom): + + sampleXYplot = easyXYPlot(xyPlot, save_prefix, image_output, prompt, extra_pnginfo, my_unique_id, sampler, easyCache) + + if not sampleXYplot.validate_xy_plot(): + return process_sample_state(pipe, samp_model, samp_clip, samp_samples, samp_vae, samp_seed, samp_positive, + samp_negative, steps, 0, 10000, cfg, + sampler_name, scheduler, denoise, image_output, link_id, save_prefix, tile_size, prompt, + extra_pnginfo, my_unique_id, preview_latent, samp_custom=samp_custom) + + # Downscale Model Unet + if samp_model is not None and downscale_options is not None: + samp_model = downscale_model_unet(samp_model) + + blend_samples = pipe['blend_samples'] if "blend_samples" in pipe else None + layer_diffusion_method = pipe['loader_settings']['layer_diffusion_method'] if 'layer_diffusion_method' in pipe['loader_settings'] else None + + plot_image_vars = { + "x_node_type": sampleXYplot.x_node_type, "y_node_type": sampleXYplot.y_node_type, + "lora_name": pipe["loader_settings"]["lora_name"] if "lora_name" in pipe["loader_settings"] else None, + "lora_model_strength": pipe["loader_settings"]["lora_model_strength"] if "model_strength" in pipe["loader_settings"] else None, + "lora_clip_strength": pipe["loader_settings"]["lora_clip_strength"] if "clip_strength" in pipe["loader_settings"] else None, + "lora_stack": pipe["loader_settings"]["lora_stack"] if "lora_stack" in pipe["loader_settings"] else None, + "steps": steps, + "cfg": cfg, + "sampler_name": sampler_name, + "scheduler": scheduler, + "denoise": denoise, + "seed": samp_seed, + "images": pipe['images'], + + "model": samp_model, "vae": samp_vae, "clip": samp_clip, "positive_cond": samp_positive, + "negative_cond": samp_negative, + + "ckpt_name": pipe['loader_settings']['ckpt_name'] if "ckpt_name" in pipe["loader_settings"] else None, + "vae_name": pipe['loader_settings']['vae_name'] if "vae_name" in pipe["loader_settings"] else None, + "clip_skip": pipe['loader_settings']['clip_skip'] if "clip_skip" in pipe["loader_settings"] else None, + "positive": pipe['loader_settings']['positive'] if "positive" in pipe["loader_settings"] else None, + "positive_token_normalization": pipe['loader_settings']['positive_token_normalization'] if "positive_token_normalization" in pipe["loader_settings"] else None, + "positive_weight_interpretation": pipe['loader_settings']['positive_weight_interpretation'] if "positive_weight_interpretation" in pipe["loader_settings"] else None, + "negative": pipe['loader_settings']['negative'] if "negative" in pipe["loader_settings"] else None, + "negative_token_normalization": pipe['loader_settings']['negative_token_normalization'] if "negative_token_normalization" in pipe["loader_settings"] else None, + "negative_weight_interpretation": pipe['loader_settings']['negative_weight_interpretation'] if "negative_weight_interpretation" in pipe["loader_settings"] else None, + } + + if "models" in pipe["loader_settings"]: + plot_image_vars["models"] = pipe["loader_settings"]["models"] + if "vae_use" in pipe["loader_settings"]: + plot_image_vars["vae_use"] = pipe["loader_settings"]["vae_use"] + if "a1111_prompt_style" in pipe["loader_settings"]: + plot_image_vars["a1111_prompt_style"] = pipe["loader_settings"]["a1111_prompt_style"] + if "cnet_stack" in pipe["loader_settings"]: + plot_image_vars["cnet"] = pipe["loader_settings"]["cnet_stack"] + if "positive_cond_stack" in pipe["loader_settings"]: + plot_image_vars["positive_cond_stack"] = pipe["loader_settings"]["positive_cond_stack"] + if "negative_cond_stack" in pipe["loader_settings"]: + plot_image_vars["negative_cond_stack"] = pipe["loader_settings"]["negative_cond_stack"] + if layer_diffusion_method: + plot_image_vars["layer_diffusion_method"] = layer_diffusion_method + if "layer_diffusion_weight" in pipe["loader_settings"]: + plot_image_vars["layer_diffusion_weight"] = pipe['loader_settings']['layer_diffusion_weight'] + if "layer_diffusion_cond" in pipe["loader_settings"]: + plot_image_vars["layer_diffusion_cond"] = pipe['loader_settings']['layer_diffusion_cond'] + if "empty_samples" in pipe["loader_settings"]: + plot_image_vars["empty_samples"] = pipe["loader_settings"]['empty_samples'] + + latent_image = sampleXYplot.get_latent(pipe["samples"]) + latents_plot = sampleXYplot.get_labels_and_sample(plot_image_vars, latent_image, preview_latent, start_step, + last_step, force_full_denoise, disable_noise) + + samp_samples = {"samples": latents_plot} + + images, image_list = sampleXYplot.plot_images_and_labels() + + # Generate output_images + output_images = torch.stack([tensor.squeeze() for tensor in image_list]) + + if layer_diffusion_method is not None: + layerDiffuse = LayerDiffuse() + new_images, samp_images, alpha = layerDiffuse.layer_diffusion_decode(layer_diffusion_method, latents_plot, blend_samples, + output_images, samp_model) + else: + new_images = output_images + samp_images = output_images + alpha = None + + results = easySave(images, save_prefix, image_output, prompt, extra_pnginfo) + + new_pipe = { + **pipe, + "positive": samp_positive, + "negative": samp_negative, + "vae": samp_vae, + "clip": samp_clip, + + "samples": samp_samples, + "blend_samples": blend_samples, + "samp_images": samp_images, + "images": new_images, + "seed": samp_seed, + "alpha": alpha, + + "loader_settings": pipe["loader_settings"], + } + + del pipe + + if image_output in ("Hide", "Hide&Save"): + return sampler.get_output(new_pipe) + + return {"ui": {"images": results}, "result": (sampler.get_output(new_pipe))} + + preview_latent = True + if image_output in ("Hide", "Hide&Save"): + preview_latent = False + + xyplot_id = next((x for x in prompt if "XYPlot" in str(prompt[x]["class_type"])), None) + if xyplot_id is None: + xyPlot = None + else: + xyPlot = pipe["loader_settings"]["xyplot"] if "xyplot" in pipe["loader_settings"] else xyPlot + if xyPlot is not None: + return process_xyPlot(pipe, samp_model, samp_clip, samp_samples, samp_vae, samp_seed, samp_positive, samp_negative, steps, cfg, sampler_name, scheduler, denoise, image_output, link_id, save_prefix, tile_size, prompt, extra_pnginfo, my_unique_id, preview_latent, xyPlot, force_full_denoise, disable_noise, samp_custom) + else: + return process_sample_state(pipe, samp_model, samp_clip, samp_samples, samp_vae, samp_seed, samp_positive, samp_negative, steps, start_step, last_step, cfg, sampler_name, scheduler, denoise, image_output, link_id, save_prefix, tile_size, prompt, extra_pnginfo, my_unique_id, preview_latent, force_full_denoise, disable_noise, samp_custom) + +# 简易采样器 +class samplerSimple(samplerFull): + + @classmethod + def INPUT_TYPES(cls): + return {"required": + {"pipe": ("PIPE_LINE",), + "image_output": (["Hide", "Preview", "Preview&Choose", "Save", "Hide&Save", "Sender", "Sender&Save"],{"default": "Preview"}), + "link_id": ("INT", {"default": 0, "min": 0, "max": sys.maxsize, "step": 1}), + "save_prefix": ("STRING", {"default": "ComfyUI"}), + }, + "optional": { + "model": ("MODEL",), + }, + "hidden": + {"tile_size": "INT", "prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO", "my_unique_id": "UNIQUE_ID", + "embeddingsList": (folder_paths.get_filename_list("embeddings"),) + } + } + + + RETURN_TYPES = ("PIPE_LINE", "IMAGE",) + RETURN_NAMES = ("pipe", "image",) + OUTPUT_NODE = True + FUNCTION = "simple" + CATEGORY = "EasyUse/Sampler" + + def simple(self, pipe, image_output, link_id, save_prefix, model=None, tile_size=None, prompt=None, extra_pnginfo=None, my_unique_id=None, force_full_denoise=False, disable_noise=False): + + return super().run(pipe, None, None, None, None, None, image_output, link_id, save_prefix, + None, model, None, None, None, None, None, None, + None, prompt, extra_pnginfo, my_unique_id, force_full_denoise, disable_noise) + +# 简易采样器 (Tiled) +class samplerSimpleTiled(samplerFull): + + def __init__(self): + pass + + @classmethod + def INPUT_TYPES(cls): + return {"required": + {"pipe": ("PIPE_LINE",), + "tile_size": ("INT", {"default": 512, "min": 320, "max": 4096, "step": 64}), + "image_output": (["Hide", "Preview", "Save", "Hide&Save", "Sender", "Sender&Save"],{"default": "Preview"}), + "link_id": ("INT", {"default": 0, "min": 0, "max": sys.maxsize, "step": 1}), + "save_prefix": ("STRING", {"default": "ComfyUI"}) + }, + "optional": { + "model": ("MODEL",), + }, + "hidden": { + "prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO", "my_unique_id": "UNIQUE_ID", + "embeddingsList": (folder_paths.get_filename_list("embeddings"),) + } + } + + RETURN_TYPES = ("PIPE_LINE", "IMAGE",) + RETURN_NAMES = ("pipe", "image",) + OUTPUT_NODE = True + FUNCTION = "tiled" + CATEGORY = "EasyUse/Sampler" + + def tiled(self, pipe, tile_size=512, image_output='preview', link_id=0, save_prefix='ComfyUI', model=None, prompt=None, extra_pnginfo=None, my_unique_id=None, force_full_denoise=False, disable_noise=False): + + return super().run(pipe, None, None,None,None,None, image_output, link_id, save_prefix, + None, model, None, None, None, None, None, None, + tile_size, prompt, extra_pnginfo, my_unique_id, force_full_denoise, disable_noise) + +# 简易采样器 (LayerDiffusion) +class samplerSimpleLayerDiffusion(samplerFull): + + def __init__(self): + pass + + @classmethod + def INPUT_TYPES(cls): + return {"required": + {"pipe": ("PIPE_LINE",), + "image_output": (["Hide", "Preview", "Save", "Hide&Save", "Sender", "Sender&Save"],{"default": "Preview"}), + "link_id": ("INT", {"default": 0, "min": 0, "max": sys.maxsize, "step": 1}), + "save_prefix": ("STRING", {"default": "ComfyUI"}) + }, + "optional": { + "model": ("MODEL",), + }, + "hidden": { + "prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO", "my_unique_id": "UNIQUE_ID", + "embeddingsList": (folder_paths.get_filename_list("embeddings"),) + } + } + + RETURN_TYPES = ("PIPE_LINE", "IMAGE", "IMAGE", "MASK") + RETURN_NAMES = ("pipe", "final_image", "original_image", "alpha") + OUTPUT_NODE = True + OUTPUT_IS_LIST = (False, False, False, True) + FUNCTION = "layerDiffusion" + CATEGORY = "EasyUse/Sampler" + + def layerDiffusion(self, pipe, image_output='preview', link_id=0, save_prefix='ComfyUI', model=None, prompt=None, extra_pnginfo=None, my_unique_id=None, force_full_denoise=False, disable_noise=False): + + result = super().run(pipe, None, None,None,None,None, image_output, link_id, save_prefix, + None, model, None, None, None, None, None, None, + None, prompt, extra_pnginfo, my_unique_id, force_full_denoise, disable_noise) + pipe = result["result"][0] if "result" in result else None + return ({"ui":result['ui'], "result":(pipe, pipe["images"], pipe["samp_images"], pipe["alpha"])}) + +# 简易采样器(收缩Unet) +class samplerSimpleDownscaleUnet(samplerFull): + + upscale_methods = ["bicubic", "nearest-exact", "bilinear", "area", "bislerp"] + + @classmethod + def INPUT_TYPES(s): + return {"required": + {"pipe": ("PIPE_LINE",), + "downscale_mode": (["None", "Auto", "Custom"],{"default": "Auto"}), + "block_number": ("INT", {"default": 3, "min": 1, "max": 32, "step": 1}), + "downscale_factor": ("FLOAT", {"default": 2.0, "min": 0.1, "max": 9.0, "step": 0.001}), + "start_percent": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 1.0, "step": 0.001}), + "end_percent": ("FLOAT", {"default": 0.35, "min": 0.0, "max": 1.0, "step": 0.001}), + "downscale_after_skip": ("BOOLEAN", {"default": True}), + "downscale_method": (s.upscale_methods,), + "upscale_method": (s.upscale_methods,), + "image_output": (["Hide", "Preview", "Save", "Hide&Save", "Sender", "Sender&Save"],{"default": "Preview"}), + "link_id": ("INT", {"default": 0, "min": 0, "max": sys.maxsize, "step": 1}), + "save_prefix": ("STRING", {"default": "ComfyUI"}), + }, + "optional": { + "model": ("MODEL",), + }, + "hidden": + {"tile_size": "INT", "prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO", "my_unique_id": "UNIQUE_ID", + "embeddingsList": (folder_paths.get_filename_list("embeddings"),) + } + } + + + RETURN_TYPES = ("PIPE_LINE", "IMAGE",) + RETURN_NAMES = ("pipe", "image",) + OUTPUT_NODE = True + FUNCTION = "downscale_unet" + CATEGORY = "EasyUse/Sampler" + + def downscale_unet(self, pipe, downscale_mode, block_number, downscale_factor, start_percent, end_percent, downscale_after_skip, downscale_method, upscale_method, image_output, link_id, save_prefix, model=None, tile_size=None, prompt=None, extra_pnginfo=None, my_unique_id=None, force_full_denoise=False, disable_noise=False): + downscale_options = None + if downscale_mode == 'Auto': + downscale_options = { + "block_number": block_number, + "downscale_factor": None, + "start_percent": 0, + "end_percent":0.35, + "downscale_after_skip": True, + "downscale_method": "bicubic", + "upscale_method": "bicubic" + } + elif downscale_mode == 'Custom': + downscale_options = { + "block_number": block_number, + "downscale_factor": downscale_factor, + "start_percent": start_percent, + "end_percent": end_percent, + "downscale_after_skip": downscale_after_skip, + "downscale_method": downscale_method, + "upscale_method": upscale_method + } + + return super().run(pipe, None, None,None,None,None, image_output, link_id, save_prefix, + None, model, None, None, None, None, None, None, + tile_size, prompt, extra_pnginfo, my_unique_id, force_full_denoise, disable_noise, downscale_options) +# 简易采样器 (内补) +class samplerSimpleInpainting(samplerFull): + @classmethod + def INPUT_TYPES(cls): + return {"required": + {"pipe": ("PIPE_LINE",), + "grow_mask_by": ("INT", {"default": 6, "min": 0, "max": 64, "step": 1}), + "image_output": (["Hide", "Preview", "Save", "Hide&Save", "Sender", "Sender&Save"],{"default": "Preview"}), + "link_id": ("INT", {"default": 0, "min": 0, "max": sys.maxsize, "step": 1}), + "save_prefix": ("STRING", {"default": "ComfyUI"}), + "additional": (["None", "InpaintModelCond", "Differential Diffusion", "Fooocus Inpaint", "Fooocus Inpaint + DD", "Brushnet Random", "Brushnet Random + DD", "Brushnet Segmentation", "Brushnet Segmentation + DD"],{"default": "None"}) + }, + "optional": { + "model": ("MODEL",), + "mask": ("MASK",), + }, + "hidden": + {"tile_size": "INT", "prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO", "my_unique_id": "UNIQUE_ID", + "embeddingsList": (folder_paths.get_filename_list("embeddings"),) + } + } + + RETURN_TYPES = ("PIPE_LINE", "IMAGE", "VAE") + RETURN_NAMES = ("pipe", "image", "vae") + OUTPUT_NODE = True + FUNCTION = "inpainting" + CATEGORY = "EasyUse/Sampler" + + def dd(self, model, positive, negative, pixels, vae, mask): + positive, negative, latent = InpaintModelConditioning().encode(positive, negative, pixels, vae, mask) + cls = ALL_NODE_CLASS_MAPPINGS['DifferentialDiffusion'] + if cls is not None: + model, = cls().apply(model) + else: + raise Exception("Differential Diffusion not found,please update comfyui") + return positive, negative, latent, model + + def get_brushnet_model(self, type, model): + model_type = 'sdxl' if isinstance(model.model.model_config, comfy.supported_models.SDXL) else 'sd1' + if type == 'random': + brush_model = BRUSHNET_MODELS['random_mask'][model_type]['model_url'] + if model_type == 'sdxl': + pattern = 'brushnet.random.mask.sdxl.*\.(safetensors|bin)$' + else: + pattern = 'brushnet.random.mask.*\.(safetensors|bin)$' + elif type == 'segmentation': + brush_model = BRUSHNET_MODELS['segmentation_mask'][model_type]['model_url'] + if model_type == 'sdxl': + pattern = 'brushnet.segmentation.mask.sdxl.*\.(safetensors|bin)$' + else: + pattern = 'brushnet.segmentation.mask.*\.(safetensors|bin)$' + + + brushfile = [e for e in folder_paths.get_filename_list('inpaint') if re.search(pattern, e, re.IGNORECASE)] + brushname = brushfile[0] if brushfile else None + if not brushname: + from urllib.parse import urlparse + get_local_filepath(brush_model, INPAINT_DIR) + parsed_url = urlparse(brush_model) + brushname = os.path.basename(parsed_url.path) + return brushname + + def apply_brushnet(self, brushname, model, vae, image, mask, positive, negative, scale=1.0, start_at=0, end_at=10000): + if "BrushNetLoader" not in ALL_NODE_CLASS_MAPPINGS: + raise Exception("BrushNetLoader not found,please install ComfyUI-BrushNet") + cls = ALL_NODE_CLASS_MAPPINGS['BrushNetLoader'] + brushnet, = cls().brushnet_loading(brushname, 'float16') + cls = ALL_NODE_CLASS_MAPPINGS['BrushNet'] + m, positive, negative, latent = cls().model_update(model=model, vae=vae, image=image, mask=mask, brushnet=brushnet, positive=positive, negative=negative, scale=scale, start_at=start_at, end_at=end_at) + return m, positive, negative, latent + + def inpainting(self, pipe, grow_mask_by, image_output, link_id, save_prefix, additional, model=None, mask=None, tile_size=None, prompt=None, extra_pnginfo=None, my_unique_id=None, force_full_denoise=False, disable_noise=False): + _model = model if model is not None else pipe['model'] + latent = pipe['samples'] if 'samples' in pipe else None + positive = pipe['positive'] + negative = pipe['negative'] + images = pipe["images"] if pipe and "images" in pipe else None + vae = pipe["vae"] if pipe and "vae" in pipe else None + if 'noise_mask' in latent and mask is None: + mask = latent['noise_mask'] + elif mask is not None: + if images is None: + raise Exception("No Images found") + if vae is None: + raise Exception("No VAE found") + + match additional: + case 'Differential Diffusion': + positive, negative, latent, _model = self.dd(_model, positive, negative, images, vae, mask) + case 'InpaintModelCond': + if mask is not None: + mask, = GrowMask().expand_mask(mask, grow_mask_by, False) + positive, negative, latent = InpaintModelConditioning().encode(positive, negative, images, vae, mask) + case 'Fooocus Inpaint': + head = list(FOOOCUS_INPAINT_HEAD.keys())[0] + patch = list(FOOOCUS_INPAINT_PATCH.keys())[0] + if mask is not None: + latent, = VAEEncodeForInpaint().encode(vae, images, mask, grow_mask_by) + _model, = applyFooocusInpaint().apply(_model, latent, head, patch) + case 'Fooocus Inpaint + DD': + head = list(FOOOCUS_INPAINT_HEAD.keys())[0] + patch = list(FOOOCUS_INPAINT_PATCH.keys())[0] + if mask is not None: + latent, = VAEEncodeForInpaint().encode(vae, images, mask, grow_mask_by) + _model, = applyFooocusInpaint().apply(_model, latent, head, patch) + positive, negative, latent, _model = self.dd(_model, positive, negative, images, vae, mask) + case 'Brushnet Random': + mask, = GrowMask().expand_mask(mask, grow_mask_by, False) + brush_name = self.get_brushnet_model('random', _model) + _model, positive, negative, latent = self.apply_brushnet(brush_name, _model, vae, images, mask, positive, negative) + case 'Brushnet Random + DD': + mask, = GrowMask().expand_mask(mask, grow_mask_by, False) + brush_name = self.get_brushnet_model('random', _model) + _model, positive, negative, latent = self.apply_brushnet(brush_name, _model, vae, images, mask, positive, negative) + positive, negative, latent, _model = self.dd(_model, positive, negative, images, vae, mask) + case 'Brushnet Segmentation': + mask, = GrowMask().expand_mask(mask, grow_mask_by, False) + brush_name = self.get_brushnet_model('segmentation', _model) + _model, positive, negative, latent = self.apply_brushnet(brush_name, _model, vae, images, mask, positive, negative) + case 'Brushnet Segmentation + DD': + mask, = GrowMask().expand_mask(mask, grow_mask_by, False) + brush_name = self.get_brushnet_model('segmentation', _model) + _model, positive, negative, latent = self.apply_brushnet(brush_name, _model, vae, images, mask, positive, negative) + positive, negative, latent, _model = self.dd(_model, positive, negative, images, vae, mask) + case _: + latent, = VAEEncodeForInpaint().encode(vae, images, mask, grow_mask_by) + + results = super().run(pipe, None, None,None,None,None, image_output, link_id, save_prefix, + None, _model, positive, negative, latent, vae, None, None, + tile_size, prompt, extra_pnginfo, my_unique_id, force_full_denoise, disable_noise) + + result = results['result'] + + return {"ui":results['ui'],"result":(result[0], result[1], result[0]['vae'],)} + +# SDTurbo采样器 +class samplerSDTurbo: + + def __init__(self): + pass + + @classmethod + def INPUT_TYPES(cls): + return {"required": + {"pipe": ("PIPE_LINE",), + "image_output": (["Hide", "Preview", "Save", "Hide&Save", "Sender", "Sender&Save"],{"default": "Preview"}), + "link_id": ("INT", {"default": 0, "min": 0, "max": sys.maxsize, "step": 1}), + "save_prefix": ("STRING", {"default": "ComfyUI"}), + }, + "optional": { + "model": ("MODEL",), + }, + "hidden": + {"tile_size": "INT", "prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO", + "my_unique_id": "UNIQUE_ID", + "embeddingsList": (folder_paths.get_filename_list("embeddings"),) + } + } + + RETURN_TYPES = ("PIPE_LINE", "IMAGE",) + RETURN_NAMES = ("pipe", "image",) + OUTPUT_NODE = True + FUNCTION = "run" + + CATEGORY = "EasyUse/Sampler" + + def run(self, pipe, image_output, link_id, save_prefix, model=None, tile_size=None, prompt=None, extra_pnginfo=None, my_unique_id=None,): + # Clean loaded_objects + easyCache.update_loaded_objects(prompt) + + my_unique_id = int(my_unique_id) + + samp_model = pipe["model"] if model is None else model + samp_positive = pipe["positive"] + samp_negative = pipe["negative"] + samp_samples = pipe["samples"] + samp_vae = pipe["vae"] + samp_clip = pipe["clip"] + + samp_seed = pipe['seed'] + + samp_sampler = pipe['loader_settings']['sampler'] + + sigmas = pipe['loader_settings']['sigmas'] + cfg = pipe['loader_settings']['cfg'] + steps = pipe['loader_settings']['steps'] + + disable_noise = False + + preview_latent = True + if image_output in ("Hide", "Hide&Save"): + preview_latent = False + + # 推理初始时间 + start_time = int(time.time() * 1000) + # 开始推理 + samp_samples = sampler.custom_ksampler(samp_model, samp_seed, steps, cfg, samp_sampler, sigmas, samp_positive, samp_negative, samp_samples, + disable_noise, preview_latent) + # 推理结束时间 + end_time = int(time.time() * 1000) + + latent = samp_samples['samples'] + + # 解码图片 + if tile_size is not None: + samp_images = samp_vae.decode_tiled(latent, tile_x=tile_size // 8, tile_y=tile_size // 8, ) + else: + samp_images = samp_vae.decode(latent).cpu() + + # 推理总耗时(包含解码) + end_decode_time = int(time.time() * 1000) + spent_time = 'Diffusion:' + str((end_time - start_time) / 1000) + '″, VAEDecode:' + str( + (end_decode_time - end_time) / 1000) + '″ ' + + # Clean loaded_objects + easyCache.update_loaded_objects(prompt) + + results = easySave(samp_images, save_prefix, image_output, prompt, extra_pnginfo) + sampler.update_value_by_id("results", my_unique_id, results) + + new_pipe = { + "model": samp_model, + "positive": samp_positive, + "negative": samp_negative, + "vae": samp_vae, + "clip": samp_clip, + + "samples": samp_samples, + "images": samp_images, + "seed": samp_seed, + + "loader_settings": { + **pipe["loader_settings"], + "spent_time": spent_time + } + } + + sampler.update_value_by_id("pipe_line", my_unique_id, new_pipe) + + del pipe + + if image_output in ("Hide", "Hide&Save"): + return {"ui": {}, + "result": sampler.get_output(new_pipe, )} + + if image_output in ("Sender", "Sender&Save"): + PromptServer.instance.send_sync("img-send", {"link_id": link_id, "images": results}) + + return {"ui": {"images": results}, + "result": sampler.get_output(new_pipe, )} + + +# Cascade完整采样器 +class samplerCascadeFull: + + def __init__(self): + pass + + @classmethod + def INPUT_TYPES(cls): + return {"required": + {"pipe": ("PIPE_LINE",), + "encode_vae_name": (["None"] + folder_paths.get_filename_list("vae"),), + "decode_vae_name": (["None"] + folder_paths.get_filename_list("vae"),), + "steps": ("INT", {"default": 20, "min": 1, "max": 10000}), + "cfg": ("FLOAT", {"default": 4.0, "min": 0.0, "max": 100.0}), + "sampler_name": (comfy.samplers.KSampler.SAMPLERS, {"default":"euler_ancestral"}), + "scheduler": (comfy.samplers.KSampler.SCHEDULERS, {"default":"simple"}), + "denoise": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}), + "image_output": (["Hide", "Preview", "Save", "Hide&Save", "Sender", "Sender&Save"],), + "link_id": ("INT", {"default": 0, "min": 0, "max": sys.maxsize, "step": 1}), + "save_prefix": ("STRING", {"default": "ComfyUI"}), + "seed": ("INT", {"default": 0, "min": 0, "max": MAX_SEED_NUM}), + }, + + "optional": { + "image_to_latent_c": ("IMAGE",), + "latent_c": ("LATENT",), + "model_c": ("MODEL",), + }, + "hidden":{"tile_size": "INT", "prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO", "my_unique_id": "UNIQUE_ID", + "embeddingsList": (folder_paths.get_filename_list("embeddings"),) + } + } + + RETURN_TYPES = ("PIPE_LINE", "MODEL", "LATENT") + RETURN_NAMES = ("pipe", "model_b", "latent_b") + OUTPUT_NODE = True + + FUNCTION = "run" + CATEGORY = "EasyUse/Sampler" + + def run(self, pipe, encode_vae_name, decode_vae_name, steps, cfg, sampler_name, scheduler, denoise, image_output, link_id, save_prefix, seed, image_to_latent_c=None, latent_c=None, model_c=None, tile_size=None, prompt=None, extra_pnginfo=None, my_unique_id=None, force_full_denoise=False, disable_noise=False): + + encode_vae_name = encode_vae_name if encode_vae_name is not None else pipe['loader_settings']['encode_vae_name'] + decode_vae_name = decode_vae_name if decode_vae_name is not None else pipe['loader_settings']['decode_vae_name'] + + batch_size = pipe["loader_settings"]["batch_size"] if "batch_size" in pipe["loader_settings"] else 1 + if image_to_latent_c is not None: + if encode_vae_name != 'None': + encode_vae = easyCache.load_vae(encode_vae_name) + else: + encode_vae = pipe['vae'][0] + if "compression" not in pipe["loader_settings"]: + raise Exception("compression is not found") + + compression = pipe["loader_settings"]['compression'] + width = image_to_latent_c.shape[-2] + height = image_to_latent_c.shape[-3] + out_width = (width // compression) * encode_vae.downscale_ratio + out_height = (height // compression) * encode_vae.downscale_ratio + + s = comfy.utils.common_upscale(image_to_latent_c.movedim(-1, 1), out_width, out_height, "bicubic", + "center").movedim(1, -1) + latent_c = encode_vae.encode(s[:, :, :, :3]) + latent_b = torch.zeros([latent_c.shape[0], 4, height // 4, width // 4]) + + samples_c = {"samples": latent_c} + samples_c = RepeatLatentBatch().repeat(samples_c, batch_size)[0] + + samples_b = {"samples": latent_b} + samples_b = RepeatLatentBatch().repeat(samples_b, batch_size)[0] + images = image_to_latent_c + elif latent_c is not None: + samples_c = latent_c + samples_b = pipe["samples"][1] + images = pipe["images"] + else: + samples_c = pipe["samples"][0] + samples_b = pipe["samples"][1] + images = pipe["images"] + + # Clean loaded_objects + easyCache.update_loaded_objects(prompt) + samp_model = model_c if model_c else pipe["model"][0] + samp_positive = pipe["positive"] + samp_negative = pipe["negative"] + samp_samples = samples_c + + samp_seed = seed if seed is not None else pipe['seed'] + + steps = steps if steps is not None else pipe['loader_settings']['steps'] + start_step = pipe['loader_settings']['start_step'] if 'start_step' in pipe['loader_settings'] else 0 + last_step = pipe['loader_settings']['last_step'] if 'last_step' in pipe['loader_settings'] else 10000 + cfg = cfg if cfg is not None else pipe['loader_settings']['cfg'] + sampler_name = sampler_name if sampler_name is not None else pipe['loader_settings']['sampler_name'] + scheduler = scheduler if scheduler is not None else pipe['loader_settings']['scheduler'] + denoise = denoise if denoise is not None else pipe['loader_settings']['denoise'] + # 推理初始时间 + start_time = int(time.time() * 1000) + # 开始推理 + samp_samples = sampler.common_ksampler(samp_model, samp_seed, steps, cfg, sampler_name, scheduler, + samp_positive, samp_negative, samp_samples, denoise=denoise, + preview_latent=False, start_step=start_step, + last_step=last_step, force_full_denoise=False, + disable_noise=False) + # 推理结束时间 + end_time = int(time.time() * 1000) + stage_c = samp_samples["samples"] + results = None + + if image_output not in ['Hide', 'Hide&Save']: + if decode_vae_name != 'None': + decode_vae = easyCache.load_vae(decode_vae_name) + else: + decode_vae = pipe['vae'][0] + samp_images = decode_vae.decode(stage_c).cpu() + + results = easySave(samp_images, save_prefix, image_output, prompt, extra_pnginfo) + sampler.update_value_by_id("results", my_unique_id, results) + + # 推理总耗时(包含解码) + end_decode_time = int(time.time() * 1000) + spent_time = 'Diffusion:' + str((end_time - start_time) / 1000) + '″, VAEDecode:' + str( + (end_decode_time - end_time) / 1000) + '″ ' + + # Clean loaded_objects + easyCache.update_loaded_objects(prompt) + # zero_out + c1 = [] + for t in samp_positive: + d = t[1].copy() + if "pooled_output" in d: + d["pooled_output"] = torch.zeros_like(d["pooled_output"]) + n = [torch.zeros_like(t[0]), d] + c1.append(n) + # stage_b_conditioning + c2 = [] + for t in c1: + d = t[1].copy() + d['stable_cascade_prior'] = stage_c + n = [t[0], d] + c2.append(n) + + + new_pipe = { + "model": pipe['model'][1], + "positive": c2, + "negative": c1, + "vae": pipe['vae'][1], + "clip": pipe['clip'], + + "samples": samples_b, + "images": images, + "seed": seed, + + "loader_settings": { + **pipe["loader_settings"], + "spent_time": spent_time + } + } + sampler.update_value_by_id("pipe_line", my_unique_id, new_pipe) + + del pipe + + if image_output in ("Hide", "Hide&Save"): + return {"ui": {}, + "result": sampler.get_output(new_pipe, )} + + if image_output in ("Sender", "Sender&Save") and results is not None: + PromptServer.instance.send_sync("img-send", {"link_id": link_id, "images": results}) + + return {"ui": {"images": results}, "result": (new_pipe, new_pipe['model'], new_pipe['samples'])} + +# 简易采样器Cascade +class samplerCascadeSimple(samplerCascadeFull): + + def __init__(self): + pass + + @classmethod + def INPUT_TYPES(cls): + return {"required": + {"pipe": ("PIPE_LINE",), + "image_output": (["Hide", "Preview", "Save", "Hide&Save", "Sender", "Sender&Save"], {"default": "Preview"}), + "link_id": ("INT", {"default": 0, "min": 0, "max": sys.maxsize, "step": 1}), + "save_prefix": ("STRING", {"default": "ComfyUI"}), + }, + "optional": { + "model_c": ("MODEL",), + }, + "hidden": + {"tile_size": "INT", "prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO", "my_unique_id": "UNIQUE_ID", + "embeddingsList": (folder_paths.get_filename_list("embeddings"),) + } + } + + + RETURN_TYPES = ("PIPE_LINE", "IMAGE",) + RETURN_NAMES = ("pipe", "image",) + OUTPUT_NODE = True + FUNCTION = "simple" + CATEGORY = "EasyUse/Sampler" + + def simple(self, pipe, image_output, link_id, save_prefix, model_c=None, tile_size=None, prompt=None, extra_pnginfo=None, my_unique_id=None, force_full_denoise=False, disable_noise=False): + + return super().run(pipe, None, None,None, None,None,None,None, image_output, link_id, save_prefix, + None, None, None, model_c, tile_size, prompt, extra_pnginfo, my_unique_id, force_full_denoise, disable_noise) + +class unsampler: + @classmethod + def INPUT_TYPES(s): + return {"required":{ + "steps": ("INT", {"default": 20, "min": 1, "max": 10000}), + "end_at_step": ("INT", {"default": 0, "min": 0, "max": 10000}), + "cfg": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 100.0}), + "sampler_name": (comfy.samplers.KSampler.SAMPLERS,), + "scheduler": (comfy.samplers.KSampler.SCHEDULERS,), + "normalize": (["disable", "enable"],), + + }, + "optional": { + "pipe": ("PIPE_LINE",), + "optional_model": ("MODEL",), + "optional_positive": ("CONDITIONING",), + "optional_negative": ("CONDITIONING",), + "optional_latent": ("LATENT",), + } + } + + RETURN_TYPES = ("PIPE_LINE", "LATENT",) + RETURN_NAMES = ("pipe", "latent",) + FUNCTION = "unsampler" + + CATEGORY = "EasyUse/Sampler" + + def unsampler(self, cfg, sampler_name, steps, end_at_step, scheduler, normalize, pipe=None, optional_model=None, optional_positive=None, optional_negative=None, + optional_latent=None): + + model = optional_model if optional_model is not None else pipe["model"] + positive = optional_positive if optional_positive is not None else pipe["positive"] + negative = optional_negative if optional_negative is not None else pipe["negative"] + latent_image = optional_latent if optional_latent is not None else pipe["samples"] + + normalize = normalize == "enable" + device = comfy.model_management.get_torch_device() + latent = latent_image + latent_image = latent["samples"] + + end_at_step = min(end_at_step, steps - 1) + end_at_step = steps - end_at_step + + noise = torch.zeros(latent_image.size(), dtype=latent_image.dtype, layout=latent_image.layout, device="cpu") + noise_mask = None + if "noise_mask" in latent: + noise_mask = comfy.sample.prepare_mask(latent["noise_mask"], noise.shape, device) + + + noise = noise.to(device) + latent_image = latent_image.to(device) + + _positive = comfy.sampler_helpers.convert_cond(positive) + _negative = comfy.sampler_helpers.convert_cond(negative) + models, inference_memory = comfy.sampler_helpers.get_additional_models({"positive": _positive, "negative": _negative}, model.model_dtype()) + + + comfy.model_management.load_models_gpu([model] + models, model.memory_required(noise.shape) + inference_memory) + + model_patcher = comfy.model_patcher.ModelPatcher(model.model, load_device=device, offload_device=comfy.model_management.unet_offload_device()) + + sampler = comfy.samplers.KSampler(model_patcher, steps=steps, device=device, sampler=sampler_name, + scheduler=scheduler, denoise=1.0, model_options=model.model_options) + + sigmas = sampler.sigmas.flip(0) + 0.0001 + + pbar = comfy.utils.ProgressBar(steps) + + def callback(step, x0, x, total_steps): + pbar.update_absolute(step + 1, total_steps) + + samples = sampler.sample(noise, positive, negative, cfg=cfg, latent_image=latent_image, + force_full_denoise=False, denoise_mask=noise_mask, sigmas=sigmas, start_step=0, + last_step=end_at_step, callback=callback) + if normalize: + # technically doesn't normalize because unsampling is not guaranteed to end at a std given by the schedule + samples -= samples.mean() + samples /= samples.std() + samples = samples.cpu() + + comfy.sample.cleanup_additional_models(models) + + out = latent.copy() + out["samples"] = samples + + if pipe is None: + pipe = {} + + new_pipe = { + **pipe, + "samples": out + } + + return (new_pipe, out,) + +#---------------------------------------------------------------采样器 结束---------------------------------------------------------------------- + +#---------------------------------------------------------------修复 开始----------------------------------------------------------------------# + +# 高清修复 +class hiresFix: + upscale_methods = ["nearest-exact", "bilinear", "area", "bicubic", "lanczos", "bislerp"] + crop_methods = ["disabled", "center"] + + @classmethod + def INPUT_TYPES(s): + return {"required": { + "model_name": (folder_paths.get_filename_list("upscale_models"),), + "rescale_after_model": ([False, True], {"default": True}), + "rescale_method": (s.upscale_methods,), + "rescale": (["by percentage", "to Width/Height", 'to longer side - maintain aspect'],), + "percent": ("INT", {"default": 50, "min": 0, "max": 1000, "step": 1}), + "width": ("INT", {"default": 1024, "min": 64, "max": MAX_RESOLUTION, "step": 8}), + "height": ("INT", {"default": 1024, "min": 64, "max": MAX_RESOLUTION, "step": 8}), + "longer_side": ("INT", {"default": 1024, "min": 64, "max": MAX_RESOLUTION, "step": 8}), + "crop": (s.crop_methods,), + "image_output": (["Hide", "Preview", "Save", "Hide&Save", "Sender", "Sender&Save"],{"default": "Preview"}), + "link_id": ("INT", {"default": 0, "min": 0, "max": sys.maxsize, "step": 1}), + "save_prefix": ("STRING", {"default": "ComfyUI"}), + }, + "optional": { + "pipe": ("PIPE_LINE",), + "image": ("IMAGE",), + "vae": ("VAE",), + }, + "hidden": {"prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO", "my_unique_id": "UNIQUE_ID", + }, + } + + RETURN_TYPES = ("PIPE_LINE", "IMAGE", "LATENT", ) + RETURN_NAMES = ('pipe', 'image', "latent", ) + + FUNCTION = "upscale" + CATEGORY = "EasyUse/Fix" + OUTPUT_NODE = True + + def vae_encode_crop_pixels(self, pixels): + x = (pixels.shape[1] // 8) * 8 + y = (pixels.shape[2] // 8) * 8 + if pixels.shape[1] != x or pixels.shape[2] != y: + x_offset = (pixels.shape[1] % 8) // 2 + y_offset = (pixels.shape[2] % 8) // 2 + pixels = pixels[:, x_offset:x + x_offset, y_offset:y + y_offset, :] + return pixels + + def upscale(self, model_name, rescale_after_model, rescale_method, rescale, percent, width, height, + longer_side, crop, image_output, link_id, save_prefix, pipe=None, image=None, vae=None, prompt=None, + extra_pnginfo=None, my_unique_id=None): + + new_pipe = {} + if pipe is not None: + image = image if image is not None else pipe["images"] + vae = vae if vae is not None else pipe.get("vae") + elif image is None or vae is None: + raise ValueError("pipe or image or vae missing.") + # Load Model + model_path = folder_paths.get_full_path("upscale_models", model_name) + sd = comfy.utils.load_torch_file(model_path, safe_load=True) + upscale_model = model_loading.load_state_dict(sd).eval() + + # Model upscale + device = comfy.model_management.get_torch_device() + upscale_model.to(device) + in_img = image.movedim(-1, -3).to(device) + + tile = 128 + 64 + overlap = 8 + steps = in_img.shape[0] * comfy.utils.get_tiled_scale_steps(in_img.shape[3], in_img.shape[2], tile_x=tile, + tile_y=tile, overlap=overlap) + pbar = comfy.utils.ProgressBar(steps) + s = comfy.utils.tiled_scale(in_img, lambda a: upscale_model(a), tile_x=tile, tile_y=tile, overlap=overlap, + upscale_amount=upscale_model.scale, pbar=pbar) + upscale_model.cpu() + s = torch.clamp(s.movedim(-3, -1), min=0, max=1.0) + + # Post Model Rescale + if rescale_after_model == True: + samples = s.movedim(-1, 1) + orig_height = samples.shape[2] + orig_width = samples.shape[3] + if rescale == "by percentage" and percent != 0: + height = percent / 100 * orig_height + width = percent / 100 * orig_width + if (width > MAX_RESOLUTION): + width = MAX_RESOLUTION + if (height > MAX_RESOLUTION): + height = MAX_RESOLUTION + + width = easySampler.enforce_mul_of_64(width) + height = easySampler.enforce_mul_of_64(height) + elif rescale == "to longer side - maintain aspect": + longer_side = easySampler.enforce_mul_of_64(longer_side) + if orig_width > orig_height: + width, height = longer_side, easySampler.enforce_mul_of_64(longer_side * orig_height / orig_width) + else: + width, height = easySampler.enforce_mul_of_64(longer_side * orig_width / orig_height), longer_side + + s = comfy.utils.common_upscale(samples, width, height, rescale_method, crop) + s = s.movedim(1, -1) + + # vae encode + pixels = self.vae_encode_crop_pixels(s) + t = vae.encode(pixels[:, :, :, :3]) + + if pipe is not None: + new_pipe = { + "model": pipe['model'], + "positive": pipe['positive'], + "negative": pipe['negative'], + "vae": vae, + "clip": pipe['clip'], + + "samples": {"samples": t}, + "images": s, + "seed": pipe['seed'], + + "loader_settings": { + **pipe["loader_settings"], + } + } + del pipe + else: + new_pipe = {} + + results = easySave(s, save_prefix, image_output, prompt, extra_pnginfo) + + if image_output in ("Sender", "Sender&Save"): + PromptServer.instance.send_sync("img-send", {"link_id": link_id, "images": results}) + + if image_output in ("Hide", "Hide&Save"): + return (new_pipe, s, {"samples": t},) + + return {"ui": {"images": results}, + "result": (new_pipe, s, {"samples": t},)} + +# 预细节修复 +class preDetailerFix: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "pipe": ("PIPE_LINE",), + "guide_size": ("FLOAT", {"default": 256, "min": 64, "max": MAX_RESOLUTION, "step": 8}), + "guide_size_for": ("BOOLEAN", {"default": True, "label_on": "bbox", "label_off": "crop_region"}), + "max_size": ("FLOAT", {"default": 768, "min": 64, "max": MAX_RESOLUTION, "step": 8}), + "seed": ("INT", {"default": 0, "min": 0, "max": 0xffffffffffffffff}), + "steps": ("INT", {"default": 20, "min": 1, "max": 10000}), + "cfg": ("FLOAT", {"default": 8.0, "min": 0.0, "max": 100.0}), + "sampler_name": (comfy.samplers.KSampler.SAMPLERS,), + "scheduler": (comfy.samplers.KSampler.SCHEDULERS + ['align_your_steps'],), + "denoise": ("FLOAT", {"default": 0.5, "min": 0.0001, "max": 1.0, "step": 0.01}), + "feather": ("INT", {"default": 5, "min": 0, "max": 100, "step": 1}), + "noise_mask": ("BOOLEAN", {"default": True, "label_on": "enabled", "label_off": "disabled"}), + "force_inpaint": ("BOOLEAN", {"default": True, "label_on": "enabled", "label_off": "disabled"}), + "drop_size": ("INT", {"min": 1, "max": MAX_RESOLUTION, "step": 1, "default": 10}), + "wildcard": ("STRING", {"multiline": True, "dynamicPrompts": False}), + "cycle": ("INT", {"default": 1, "min": 1, "max": 10, "step": 1}), + }, + "optional": { + "bbox_segm_pipe": ("PIPE_LINE",), + "sam_pipe": ("PIPE_LINE",), + "optional_image": ("IMAGE",), + }, + } + + RETURN_TYPES = ("PIPE_LINE",) + RETURN_NAMES = ("pipe",) + OUTPUT_IS_LIST = (False,) + FUNCTION = "doit" + + CATEGORY = "EasyUse/Fix" + + def doit(self, pipe, guide_size, guide_size_for, max_size, seed, steps, cfg, sampler_name, scheduler, denoise, feather, noise_mask, force_inpaint, drop_size, wildcard, cycle, bbox_segm_pipe=None, sam_pipe=None, optional_image=None): + + model = pipe["model"] if "model" in pipe else None + if model is None: + raise Exception(f"[ERROR] pipe['model'] is missing") + clip = pipe["clip"] if"clip" in pipe else None + if clip is None: + raise Exception(f"[ERROR] pipe['clip'] is missing") + vae = pipe["vae"] if "vae" in pipe else None + if vae is None: + raise Exception(f"[ERROR] pipe['vae'] is missing") + if optional_image is not None: + images = optional_image + else: + images = pipe["images"] if "images" in pipe else None + if images is None: + raise Exception(f"[ERROR] pipe['image'] is missing") + positive = pipe["positive"] if "positive" in pipe else None + if positive is None: + raise Exception(f"[ERROR] pipe['positive'] is missing") + negative = pipe["negative"] if "negative" in pipe else None + if negative is None: + raise Exception(f"[ERROR] pipe['negative'] is missing") + bbox_segm_pipe = bbox_segm_pipe or (pipe["bbox_segm_pipe"] if pipe and "bbox_segm_pipe" in pipe else None) + if bbox_segm_pipe is None: + raise Exception(f"[ERROR] bbox_segm_pipe or pipe['bbox_segm_pipe'] is missing") + sam_pipe = sam_pipe or (pipe["sam_pipe"] if pipe and "sam_pipe" in pipe else None) + if sam_pipe is None: + raise Exception(f"[ERROR] sam_pipe or pipe['sam_pipe'] is missing") + + loader_settings = pipe["loader_settings"] if "loader_settings" in pipe else {} + + if(scheduler == 'align_your_steps'): + model_version = get_sd_version(model) + if model_version == 'sdxl': + scheduler = 'AYS SDXL' + elif model_version == 'svd': + scheduler = 'AYS SVD' + else: + scheduler = 'AYS SD1' + + new_pipe = { + "images": images, + "model": model, + "clip": clip, + "vae": vae, + "positive": positive, + "negative": negative, + "seed": seed, + + "bbox_segm_pipe": bbox_segm_pipe, + "sam_pipe": sam_pipe, + + "loader_settings": loader_settings, + + "detail_fix_settings": { + "guide_size": guide_size, + "guide_size_for": guide_size_for, + "max_size": max_size, + "seed": seed, + "steps": steps, + "cfg": cfg, + "sampler_name": sampler_name, + "scheduler": scheduler, + "denoise": denoise, + "feather": feather, + "noise_mask": noise_mask, + "force_inpaint": force_inpaint, + "drop_size": drop_size, + "wildcard": wildcard, + "cycle": cycle + } + } + + + del bbox_segm_pipe + del sam_pipe + + return (new_pipe,) + +# 预遮罩细节修复 +class preMaskDetailerFix: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "pipe": ("PIPE_LINE",), + "mask": ("MASK",), + + "guide_size": ("FLOAT", {"default": 384, "min": 64, "max": MAX_RESOLUTION, "step": 8}), + "guide_size_for": ("BOOLEAN", {"default": True, "label_on": "bbox", "label_off": "crop_region"}), + "max_size": ("FLOAT", {"default": 1024, "min": 64, "max": MAX_RESOLUTION, "step": 8}), + "mask_mode": ("BOOLEAN", {"default": True, "label_on": "masked only", "label_off": "whole"}), + + "seed": ("INT", {"default": 0, "min": 0, "max": 0xffffffffffffffff}), + "steps": ("INT", {"default": 20, "min": 1, "max": 10000}), + "cfg": ("FLOAT", {"default": 8.0, "min": 0.0, "max": 100.0}), + "sampler_name": (comfy.samplers.KSampler.SAMPLERS,), + "scheduler": (comfy.samplers.KSampler.SCHEDULERS,), + "denoise": ("FLOAT", {"default": 0.5, "min": 0.0001, "max": 1.0, "step": 0.01}), + + "feather": ("INT", {"default": 5, "min": 0, "max": 100, "step": 1}), + "crop_factor": ("FLOAT", {"default": 3.0, "min": 1.0, "max": 10, "step": 0.1}), + "drop_size": ("INT", {"min": 1, "max": MAX_RESOLUTION, "step": 1, "default": 10}), + "refiner_ratio": ("FLOAT", {"default": 0.2, "min": 0.0, "max": 1.0}), + "batch_size": ("INT", {"default": 1, "min": 1, "max": 100}), + "cycle": ("INT", {"default": 1, "min": 1, "max": 10, "step": 1}), + }, + "optional": { + # "patch": ("INPAINT_PATCH",), + "optional_image": ("IMAGE",), + "inpaint_model": ("BOOLEAN", {"default": False, "label_on": "enabled", "label_off": "disabled"}), + "noise_mask_feather": ("INT", {"default": 20, "min": 0, "max": 100, "step": 1}), + }, + } + + RETURN_TYPES = ("PIPE_LINE",) + RETURN_NAMES = ("pipe",) + OUTPUT_IS_LIST = (False,) + FUNCTION = "doit" + + CATEGORY = "EasyUse/Fix" + + def doit(self, pipe, mask, guide_size, guide_size_for, max_size, mask_mode, seed, steps, cfg, sampler_name, scheduler, denoise, feather, crop_factor, drop_size,refiner_ratio, batch_size, cycle, optional_image=None, inpaint_model=False, noise_mask_feather=20): + + model = pipe["model"] if "model" in pipe else None + if model is None: + raise Exception(f"[ERROR] pipe['model'] is missing") + clip = pipe["clip"] if"clip" in pipe else None + if clip is None: + raise Exception(f"[ERROR] pipe['clip'] is missing") + vae = pipe["vae"] if "vae" in pipe else None + if vae is None: + raise Exception(f"[ERROR] pipe['vae'] is missing") + if optional_image is not None: + images = optional_image + else: + images = pipe["images"] if "images" in pipe else None + if images is None: + raise Exception(f"[ERROR] pipe['image'] is missing") + positive = pipe["positive"] if "positive" in pipe else None + if positive is None: + raise Exception(f"[ERROR] pipe['positive'] is missing") + negative = pipe["negative"] if "negative" in pipe else None + if negative is None: + raise Exception(f"[ERROR] pipe['negative'] is missing") + latent = pipe["samples"] if "samples" in pipe else None + if latent is None: + raise Exception(f"[ERROR] pipe['samples'] is missing") + + if 'noise_mask' not in latent: + if images is None: + raise Exception("No Images found") + if vae is None: + raise Exception("No VAE found") + x = (images.shape[1] // 8) * 8 + y = (images.shape[2] // 8) * 8 + mask = torch.nn.functional.interpolate(mask.reshape((-1, 1, mask.shape[-2], mask.shape[-1])), + size=(images.shape[1], images.shape[2]), mode="bilinear") + + pixels = images.clone() + if pixels.shape[1] != x or pixels.shape[2] != y: + x_offset = (pixels.shape[1] % 8) // 2 + y_offset = (pixels.shape[2] % 8) // 2 + pixels = pixels[:, x_offset:x + x_offset, y_offset:y + y_offset, :] + mask = mask[:, :, x_offset:x + x_offset, y_offset:y + y_offset] + + mask_erosion = mask + + m = (1.0 - mask.round()).squeeze(1) + for i in range(3): + pixels[:, :, :, i] -= 0.5 + pixels[:, :, :, i] *= m + pixels[:, :, :, i] += 0.5 + t = vae.encode(pixels) + + latent = {"samples": t, "noise_mask": (mask_erosion[:, :, :x, :y].round())} + # when patch was linked + # if patch is not None: + # worker = InpaintWorker(node_name="easy kSamplerInpainting") + # model, = worker.patch(model, latent, patch) + + loader_settings = pipe["loader_settings"] if "loader_settings" in pipe else {} + + new_pipe = { + "images": images, + "model": model, + "clip": clip, + "vae": vae, + "positive": positive, + "negative": negative, + "seed": seed, + "mask": mask, + + "loader_settings": loader_settings, + + "detail_fix_settings": { + "guide_size": guide_size, + "guide_size_for": guide_size_for, + "max_size": max_size, + "seed": seed, + "steps": steps, + "cfg": cfg, + "sampler_name": sampler_name, + "scheduler": scheduler, + "denoise": denoise, + "feather": feather, + "crop_factor": crop_factor, + "drop_size": drop_size, + "refiner_ratio": refiner_ratio, + "batch_size": batch_size, + "cycle": cycle + }, + + "mask_settings": { + "mask_mode": mask_mode, + "inpaint_model": inpaint_model, + "noise_mask_feather": noise_mask_feather + } + } + + del pipe + + return (new_pipe,) + +# 细节修复 +class detailerFix: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "pipe": ("PIPE_LINE",), + "image_output": (["Hide", "Preview", "Save", "Hide&Save", "Sender", "Sender&Save"],{"default": "Preview"}), + "link_id": ("INT", {"default": 0, "min": 0, "max": sys.maxsize, "step": 1}), + "save_prefix": ("STRING", {"default": "ComfyUI"}), + }, + "optional": { + "model": ("MODEL",), + }, + "hidden": {"prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO", "my_unique_id": "UNIQUE_ID", } + } + + RETURN_TYPES = ("PIPE_LINE", "IMAGE", "IMAGE", "IMAGE") + RETURN_NAMES = ("pipe", "image", "cropped_refined", "cropped_enhanced_alpha") + OUTPUT_NODE = True + OUTPUT_IS_LIST = (False, False, True, True) + FUNCTION = "doit" + + CATEGORY = "EasyUse/Fix" + + + def doit(self, pipe, image_output, link_id, save_prefix, model=None, prompt=None, extra_pnginfo=None, my_unique_id=None): + + # Clean loaded_objects + easyCache.update_loaded_objects(prompt) + + my_unique_id = int(my_unique_id) + + model = model or (pipe["model"] if "model" in pipe else None) + if model is None: + raise Exception(f"[ERROR] model or pipe['model'] is missing") + + detail_fix_settings = pipe["detail_fix_settings"] if "detail_fix_settings" in pipe else None + if detail_fix_settings is None: + raise Exception(f"[ERROR] detail_fix_settings or pipe['detail_fix_settings'] is missing") + + mask = pipe["mask"] if "mask" in pipe else None + + image = pipe["images"] + clip = pipe["clip"] + vae = pipe["vae"] + seed = pipe["seed"] + positive = pipe["positive"] + negative = pipe["negative"] + loader_settings = pipe["loader_settings"] if "loader_settings" in pipe else {} + guide_size = pipe["detail_fix_settings"]["guide_size"] if "guide_size" in pipe["detail_fix_settings"] else 256 + guide_size_for = pipe["detail_fix_settings"]["guide_size_for"] if "guide_size_for" in pipe[ + "detail_fix_settings"] else True + max_size = pipe["detail_fix_settings"]["max_size"] if "max_size" in pipe["detail_fix_settings"] else 768 + steps = pipe["detail_fix_settings"]["steps"] if "steps" in pipe["detail_fix_settings"] else 20 + cfg = pipe["detail_fix_settings"]["cfg"] if "cfg" in pipe["detail_fix_settings"] else 1.0 + sampler_name = pipe["detail_fix_settings"]["sampler_name"] if "sampler_name" in pipe[ + "detail_fix_settings"] else None + scheduler = pipe["detail_fix_settings"]["scheduler"] if "scheduler" in pipe["detail_fix_settings"] else None + denoise = pipe["detail_fix_settings"]["denoise"] if "denoise" in pipe["detail_fix_settings"] else 0.5 + feather = pipe["detail_fix_settings"]["feather"] if "feather" in pipe["detail_fix_settings"] else 5 + crop_factor = pipe["detail_fix_settings"]["crop_factor"] if "crop_factor" in pipe["detail_fix_settings"] else 3.0 + drop_size = pipe["detail_fix_settings"]["drop_size"] if "drop_size" in pipe["detail_fix_settings"] else 10 + refiner_ratio = pipe["detail_fix_settings"]["refiner_ratio"] if "refiner_ratio" in pipe else 0.2 + batch_size = pipe["detail_fix_settings"]["batch_size"] if "batch_size" in pipe["detail_fix_settings"] else 1 + noise_mask = pipe["detail_fix_settings"]["noise_mask"] if "noise_mask" in pipe["detail_fix_settings"] else None + force_inpaint = pipe["detail_fix_settings"]["force_inpaint"] if "force_inpaint" in pipe["detail_fix_settings"] else False + wildcard = pipe["detail_fix_settings"]["wildcard"] if "wildcard" in pipe["detail_fix_settings"] else "" + cycle = pipe["detail_fix_settings"]["cycle"] if "cycle" in pipe["detail_fix_settings"] else 1 + + bbox_segm_pipe = pipe["bbox_segm_pipe"] if pipe and "bbox_segm_pipe" in pipe else None + sam_pipe = pipe["sam_pipe"] if "sam_pipe" in pipe else None + + # 细节修复初始时间 + start_time = int(time.time() * 1000) + if "mask_settings" in pipe: + mask_mode = pipe['mask_settings']["mask_mode"] if "inpaint_model" in pipe['mask_settings'] else True + inpaint_model = pipe['mask_settings']["inpaint_model"] if "inpaint_model" in pipe['mask_settings'] else False + noise_mask_feather = pipe['mask_settings']["noise_mask_feather"] if "noise_mask_feather" in pipe['mask_settings'] else 20 + cls = ALL_NODE_CLASS_MAPPINGS["MaskDetailerPipe"] + if "MaskDetailerPipe" not in ALL_NODE_CLASS_MAPPINGS: + raise Exception(f"[ERROR] To use MaskDetailerPipe, you need to install 'Impact Pack'") + basic_pipe = (model, clip, vae, positive, negative) + result_img, result_cropped_enhanced, result_cropped_enhanced_alpha, basic_pipe, refiner_basic_pipe_opt = cls().doit(image, mask, basic_pipe, guide_size, guide_size_for, max_size, mask_mode, + seed, steps, cfg, sampler_name, scheduler, denoise, + feather, crop_factor, drop_size, refiner_ratio, batch_size, cycle=1, + refiner_basic_pipe_opt=None, detailer_hook=None, inpaint_model=inpaint_model, noise_mask_feather=noise_mask_feather) + result_mask = mask + result_cnet_images = () + else: + if bbox_segm_pipe is None: + raise Exception(f"[ERROR] bbox_segm_pipe or pipe['bbox_segm_pipe'] is missing") + if sam_pipe is None: + raise Exception(f"[ERROR] sam_pipe or pipe['sam_pipe'] is missing") + bbox_detector_opt, bbox_threshold, bbox_dilation, bbox_crop_factor, segm_detector_opt = bbox_segm_pipe + sam_model_opt, sam_detection_hint, sam_dilation, sam_threshold, sam_bbox_expansion, sam_mask_hint_threshold, sam_mask_hint_use_negative = sam_pipe + if "FaceDetailer" not in ALL_NODE_CLASS_MAPPINGS: + raise Exception(f"[ERROR] To use FaceDetailer, you need to install 'Impact Pack'") + cls = ALL_NODE_CLASS_MAPPINGS["FaceDetailer"] + + result_img, result_cropped_enhanced, result_cropped_enhanced_alpha, result_mask, pipe, result_cnet_images = cls().doit( + image, model, clip, vae, guide_size, guide_size_for, max_size, seed, steps, cfg, sampler_name, + scheduler, + positive, negative, denoise, feather, noise_mask, force_inpaint, + bbox_threshold, bbox_dilation, bbox_crop_factor, + sam_detection_hint, sam_dilation, sam_threshold, sam_bbox_expansion, sam_mask_hint_threshold, + sam_mask_hint_use_negative, drop_size, bbox_detector_opt, wildcard, cycle, sam_model_opt, + segm_detector_opt, + detailer_hook=None) + + # 细节修复结束时间 + end_time = int(time.time() * 1000) + + spent_time = 'Fix:' + str((end_time - start_time) / 1000) + '"' + + results = easySave(result_img, save_prefix, image_output, prompt, extra_pnginfo) + sampler.update_value_by_id("results", my_unique_id, results) + + # Clean loaded_objects + easyCache.update_loaded_objects(prompt) + + new_pipe = { + "samples": None, + "images": result_img, + "model": model, + "clip": clip, + "vae": vae, + "seed": seed, + "positive": positive, + "negative": negative, + "wildcard": wildcard, + "bbox_segm_pipe": bbox_segm_pipe, + "sam_pipe": sam_pipe, + + "loader_settings": { + **loader_settings, + "spent_time": spent_time + }, + "detail_fix_settings": detail_fix_settings + } + if "mask_settings" in pipe: + new_pipe["mask_settings"] = pipe["mask_settings"] + + sampler.update_value_by_id("pipe_line", my_unique_id, new_pipe) + + del bbox_segm_pipe + del sam_pipe + del pipe + + if image_output in ("Hide", "Hide&Save"): + return (new_pipe, result_img, result_cropped_enhanced, result_cropped_enhanced_alpha, result_mask, result_cnet_images) + + if image_output in ("Sender", "Sender&Save"): + PromptServer.instance.send_sync("img-send", {"link_id": link_id, "images": results}) + + return {"ui": {"images": results}, "result": (new_pipe, result_img, result_cropped_enhanced, result_cropped_enhanced_alpha, result_mask, result_cnet_images )} + +class ultralyticsDetectorForDetailerFix: + @classmethod + def INPUT_TYPES(s): + bboxs = ["bbox/" + x for x in folder_paths.get_filename_list("ultralytics_bbox")] + segms = ["segm/" + x for x in folder_paths.get_filename_list("ultralytics_segm")] + return {"required": + {"model_name": (bboxs + segms,), + "bbox_threshold": ("FLOAT", {"default": 0.5, "min": 0.0, "max": 1.0, "step": 0.01}), + "bbox_dilation": ("INT", {"default": 10, "min": -512, "max": 512, "step": 1}), + "bbox_crop_factor": ("FLOAT", {"default": 3.0, "min": 1.0, "max": 10, "step": 0.1}), + } + } + + RETURN_TYPES = ("PIPE_LINE",) + RETURN_NAMES = ("bbox_segm_pipe",) + FUNCTION = "doit" + + CATEGORY = "EasyUse/Fix" + + def doit(self, model_name, bbox_threshold, bbox_dilation, bbox_crop_factor): + if 'UltralyticsDetectorProvider' not in ALL_NODE_CLASS_MAPPINGS: + raise Exception(f"[ERROR] To use UltralyticsDetectorProvider, you need to install 'Impact Pack'") + cls = ALL_NODE_CLASS_MAPPINGS['UltralyticsDetectorProvider'] + bbox_detector, segm_detector = cls().doit(model_name) + pipe = (bbox_detector, bbox_threshold, bbox_dilation, bbox_crop_factor, segm_detector) + return (pipe,) + +class samLoaderForDetailerFix: + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "model_name": (folder_paths.get_filename_list("sams"),), + "device_mode": (["AUTO", "Prefer GPU", "CPU"],{"default": "AUTO"}), + "sam_detection_hint": ( + ["center-1", "horizontal-2", "vertical-2", "rect-4", "diamond-4", "mask-area", "mask-points", + "mask-point-bbox", "none"],), + "sam_dilation": ("INT", {"default": 0, "min": -512, "max": 512, "step": 1}), + "sam_threshold": ("FLOAT", {"default": 0.93, "min": 0.0, "max": 1.0, "step": 0.01}), + "sam_bbox_expansion": ("INT", {"default": 0, "min": 0, "max": 1000, "step": 1}), + "sam_mask_hint_threshold": ("FLOAT", {"default": 0.7, "min": 0.0, "max": 1.0, "step": 0.01}), + "sam_mask_hint_use_negative": (["False", "Small", "Outter"],), + } + } + + RETURN_TYPES = ("PIPE_LINE",) + RETURN_NAMES = ("sam_pipe",) + FUNCTION = "doit" + + CATEGORY = "EasyUse/Fix" + + def doit(self, model_name, device_mode, sam_detection_hint, sam_dilation, sam_threshold, sam_bbox_expansion, sam_mask_hint_threshold, sam_mask_hint_use_negative): + if 'SAMLoader' not in ALL_NODE_CLASS_MAPPINGS: + raise Exception(f"[ERROR] To use SAMLoader, you need to install 'Impact Pack'") + cls = ALL_NODE_CLASS_MAPPINGS['SAMLoader'] + (sam_model,) = cls().load_model(model_name, device_mode) + pipe = (sam_model, sam_detection_hint, sam_dilation, sam_threshold, sam_bbox_expansion, sam_mask_hint_threshold, sam_mask_hint_use_negative) + return (pipe,) + +#---------------------------------------------------------------修复 结束---------------------------------------------------------------------- + +#---------------------------------------------------------------节点束 开始----------------------------------------------------------------------# +# 节点束输入 +class pipeIn: + def __init__(self): + pass + + @classmethod + def INPUT_TYPES(s): + return { + "required": {}, + "optional": { + "pipe": ("PIPE_LINE",), + "model": ("MODEL",), + "pos": ("CONDITIONING",), + "neg": ("CONDITIONING",), + "latent": ("LATENT",), + "vae": ("VAE",), + "clip": ("CLIP",), + "image": ("IMAGE",), + "xyPlot": ("XYPLOT",), + }, + "hidden": {"my_unique_id": "UNIQUE_ID"}, + } + + RETURN_TYPES = ("PIPE_LINE",) + RETURN_NAMES = ("pipe",) + FUNCTION = "flush" + + CATEGORY = "EasyUse/Pipe" + + def flush(self, pipe=None, model=None, pos=None, neg=None, latent=None, vae=None, clip=None, image=None, xyplot=None, my_unique_id=None): + + model = model if model is not None else pipe.get("model") + if model is None: + log_node_warn(f'pipeIn[{my_unique_id}]', "Model missing from pipeLine") + pos = pos if pos is not None else pipe.get("positive") + if pos is None: + log_node_warn(f'pipeIn[{my_unique_id}]', "Pos Conditioning missing from pipeLine") + neg = neg if neg is not None else pipe.get("negative") + if neg is None: + log_node_warn(f'pipeIn[{my_unique_id}]', "Neg Conditioning missing from pipeLine") + vae = vae if vae is not None else pipe.get("vae") + if vae is None: + log_node_warn(f'pipeIn[{my_unique_id}]', "VAE missing from pipeLine") + clip = clip if clip is not None else pipe.get("clip") + if clip is None: + log_node_warn(f'pipeIn[{my_unique_id}]', "Clip missing from pipeLine") + if latent is not None: + samples = latent + elif image is None: + samples = pipe.get("samples") if pipe is not None else None + image = pipe.get("images") if pipe is not None else None + elif image is not None: + if pipe is None: + batch_size = 1 + else: + batch_size = pipe["loader_settings"]["batch_size"] if "batch_size" in pipe["loader_settings"] else 1 + samples = {"samples": vae.encode(image[:, :, :, :3])} + samples = RepeatLatentBatch().repeat(samples, batch_size)[0] + + if pipe is None: + pipe = {"loader_settings": {"positive": "", "negative": "", "xyplot": None}} + + xyplot = xyplot if xyplot is not None else pipe['loader_settings']['xyplot'] if xyplot in pipe['loader_settings'] else None + + new_pipe = { + **pipe, + "model": model, + "positive": pos, + "negative": neg, + "vae": vae, + "clip": clip, + + "samples": samples, + "images": image, + "seed": pipe.get('seed') if pipe is not None and "seed" in pipe else None, + + "loader_settings": { + **pipe["loader_settings"], + "xyplot": xyplot + } + } + del pipe + + return (new_pipe,) + +# 节点束输出 +class pipeOut: + def __init__(self): + pass + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "pipe": ("PIPE_LINE",), + }, + "hidden": {"my_unique_id": "UNIQUE_ID"}, + } + + RETURN_TYPES = ("PIPE_LINE", "MODEL", "CONDITIONING", "CONDITIONING", "LATENT", "VAE", "CLIP", "IMAGE", "INT",) + RETURN_NAMES = ("pipe", "model", "pos", "neg", "latent", "vae", "clip", "image", "seed",) + FUNCTION = "flush" + + CATEGORY = "EasyUse/Pipe" + + def flush(self, pipe, my_unique_id=None): + model = pipe.get("model") + pos = pipe.get("positive") + neg = pipe.get("negative") + latent = pipe.get("samples") + vae = pipe.get("vae") + clip = pipe.get("clip") + image = pipe.get("images") + seed = pipe.get("seed") + + return pipe, model, pos, neg, latent, vae, clip, image, seed + +# 编辑节点束 +class pipeEdit: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "clip_skip": ("INT", {"default": -1, "min": -24, "max": 0, "step": 1}), + + "optional_positive": ("STRING", {"default": "", "multiline": True}), + "positive_token_normalization": (["none", "mean", "length", "length+mean"],), + "positive_weight_interpretation": (["comfy", "A1111", "comfy++", "compel", "fixed attention"],), + + "optional_negative": ("STRING", {"default": "", "multiline": True}), + "negative_token_normalization": (["none", "mean", "length", "length+mean"],), + "negative_weight_interpretation": (["comfy", "A1111", "comfy++", "compel", "fixed attention"],), + + "a1111_prompt_style": ("BOOLEAN", {"default": False}), + "conditioning_mode": (['replace', 'concat', 'combine', 'average', 'timestep'], {"default": "replace"}), + "average_strength": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}), + "old_cond_start": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 1.0, "step": 0.001}), + "old_cond_end": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.001}), + "new_cond_start": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 1.0, "step": 0.001}), + "new_cond_end": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.001}), + }, + "optional": { + "pipe": ("PIPE_LINE",), + "model": ("MODEL",), + "pos": ("CONDITIONING",), + "neg": ("CONDITIONING",), + "latent": ("LATENT",), + "vae": ("VAE",), + "clip": ("CLIP",), + "image": ("IMAGE",), + }, + "hidden": {"my_unique_id": "UNIQUE_ID", "prompt":"PROMPT"}, + } + + RETURN_TYPES = ("PIPE_LINE", "MODEL", "CONDITIONING", "CONDITIONING", "LATENT", "VAE", "CLIP", "IMAGE") + RETURN_NAMES = ("pipe", "model", "pos", "neg", "latent", "vae", "clip", "image") + FUNCTION = "edit" + + CATEGORY = "EasyUse/Pipe" + + def edit(self, clip_skip, optional_positive, positive_token_normalization, positive_weight_interpretation, optional_negative, negative_token_normalization, negative_weight_interpretation, a1111_prompt_style, conditioning_mode, average_strength, old_cond_start, old_cond_end, new_cond_start, new_cond_end, pipe=None, model=None, pos=None, neg=None, latent=None, vae=None, clip=None, image=None, my_unique_id=None, prompt=None): + + model = model if model is not None else pipe.get("model") + if model is None: + log_node_warn(f'pipeIn[{my_unique_id}]', "Model missing from pipeLine") + vae = vae if vae is not None else pipe.get("vae") + if vae is None: + log_node_warn(f'pipeIn[{my_unique_id}]', "VAE missing from pipeLine") + clip = clip if clip is not None else pipe.get("clip") + if clip is None: + log_node_warn(f'pipeIn[{my_unique_id}]', "Clip missing from pipeLine") + if image is None: + image = pipe.get("images") if pipe is not None else None + samples = latent if latent is not None else pipe.get("samples") + if samples is None: + log_node_warn(f'pipeIn[{my_unique_id}]', "Latent missing from pipeLine") + else: + batch_size = pipe["loader_settings"]["batch_size"] if "batch_size" in pipe["loader_settings"] else 1 + samples = {"samples": vae.encode(image[:, :, :, :3])} + samples = RepeatLatentBatch().repeat(samples, batch_size)[0] + + pipe_lora_stack = pipe.get("lora_stack") if pipe is not None and "lora_stack" in pipe else [] + + steps = pipe["loader_settings"]["steps"] if "steps" in pipe["loader_settings"] else 1 + if pos is None and optional_positive != '': + pos, positive_wildcard_prompt, model, clip = prompt_to_cond('positive', model, clip, clip_skip, + pipe_lora_stack, optional_positive, positive_token_normalization,positive_weight_interpretation, + a1111_prompt_style, my_unique_id, prompt, easyCache, True, steps) + pos = set_cond(pipe['positive'], pos, conditioning_mode, average_strength, old_cond_start, old_cond_end, new_cond_start, new_cond_end) + pipe['loader_settings']['positive'] = positive_wildcard_prompt + pipe['loader_settings']['positive_token_normalization'] = positive_token_normalization + pipe['loader_settings']['positive_weight_interpretation'] = positive_weight_interpretation + if a1111_prompt_style: + pipe['loader_settings']['a1111_prompt_style'] = True + else: + pos = pipe.get("positive") + if pos is None: + log_node_warn(f'pipeIn[{my_unique_id}]', "Pos Conditioning missing from pipeLine") + + if neg is None and optional_negative != '': + neg, negative_wildcard_prompt, model, clip = prompt_to_cond("negative", model, clip, clip_skip, pipe_lora_stack, optional_negative, + negative_token_normalization, negative_weight_interpretation, + a1111_prompt_style, my_unique_id, prompt, easyCache, True, steps) + neg = set_cond(pipe['negative'], neg, conditioning_mode, average_strength, old_cond_start, old_cond_end, new_cond_start, new_cond_end) + pipe['loader_settings']['negative'] = negative_wildcard_prompt + pipe['loader_settings']['negative_token_normalization'] = negative_token_normalization + pipe['loader_settings']['negative_weight_interpretation'] = negative_weight_interpretation + if a1111_prompt_style: + pipe['loader_settings']['a1111_prompt_style'] = True + else: + neg = pipe.get("negative") + if neg is None: + log_node_warn(f'pipeIn[{my_unique_id}]', "Neg Conditioning missing from pipeLine") + if pipe is None: + pipe = {"loader_settings": {"positive": "", "negative": "", "xyplot": None}} + + new_pipe = { + **pipe, + "model": model, + "positive": pos, + "negative": neg, + "vae": vae, + "clip": clip, + + "samples": samples, + "images": image, + "seed": pipe.get('seed') if pipe is not None and "seed" in pipe else None, + "loader_settings":{ + **pipe["loader_settings"] + } + } + del pipe + + return (new_pipe, model,pos, neg, latent, vae, clip, image) + +# 编辑节点束提示词 +class pipeEditPrompt: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "pipe": ("PIPE_LINE",), + "positive": ("STRING", {"default": "", "multiline": True}), + "negative": ("STRING", {"default": "", "multiline": True}), + }, + "hidden": {"my_unique_id": "UNIQUE_ID", "prompt": "PROMPT"}, + } + + RETURN_TYPES = ("PIPE_LINE",) + RETURN_NAMES = ("pipe",) + FUNCTION = "edit" + + CATEGORY = "EasyUse/Pipe" + + def edit(self, pipe, positive, negative, my_unique_id=None, prompt=None): + model = pipe.get("model") + if model is None: + log_node_warn(f'pipeEdit[{my_unique_id}]', "Model missing from pipeLine") + + from .kolors.loader import is_kolors_model + model_type = get_sd_version(model) + if model_type == 'sdxl' and is_kolors_model(model): + auto_clean_gpu = pipe["loader_settings"]["auto_clean_gpu"] if "auto_clean_gpu" in pipe["loader_settings"] else False + chatglm3_model = pipe["chatglm3_model"] if "chatglm3_model" in pipe else None + # text encode + log_node_warn("正在进行正向提示词编码...") + positive_embeddings_final = chatglm3_adv_text_encode(chatglm3_model, positive, auto_clean_gpu) + log_node_warn("正在进行负面提示词编码...") + negative_embeddings_final = chatglm3_adv_text_encode(chatglm3_model, negative, auto_clean_gpu) + else: + clip_skip = pipe["loader_settings"]["clip_skip"] if "clip_skip" in pipe["loader_settings"] else -1 + lora_stack = pipe.get("lora_stack") if pipe is not None and "lora_stack" in pipe else [] + clip = pipe.get("clip") if pipe is not None and "clip" in pipe else None + positive_token_normalization = pipe["loader_settings"]["positive_token_normalization"] if "positive_token_normalization" in pipe["loader_settings"] else "none" + positive_weight_interpretation = pipe["loader_settings"]["positive_weight_interpretation"] if "positive_weight_interpretation" in pipe["loader_settings"] else "comfy" + negative_token_normalization = pipe["loader_settings"]["negative_token_normalization"] if "negative_token_normalization" in pipe["loader_settings"] else "none" + negative_weight_interpretation = pipe["loader_settings"]["negative_weight_interpretation"] if "negative_weight_interpretation" in pipe["loader_settings"] else "comfy" + a1111_prompt_style = pipe["loader_settings"]["a1111_prompt_style"] if "a1111_prompt_style" in pipe["loader_settings"] else False + # Prompt to Conditioning + positive_embeddings_final, positive_wildcard_prompt, model, clip = prompt_to_cond('positive', model, clip, + clip_skip, lora_stack, + positive, + positive_token_normalization, + positive_weight_interpretation, + a1111_prompt_style, + my_unique_id, prompt, + easyCache, + model_type=model_type) + negative_embeddings_final, negative_wildcard_prompt, model, clip = prompt_to_cond('negative', model, clip, + clip_skip, lora_stack, + negative, + negative_token_normalization, + negative_weight_interpretation, + a1111_prompt_style, + my_unique_id, prompt, + easyCache, + model_type=model_type) + new_pipe = { + **pipe, + "model": model, + "positive": positive_embeddings_final, + "negative": negative_embeddings_final, + } + del pipe + + return (new_pipe,) + + +# 节点束到基础节点束(pipe to ComfyUI-Impack-pack's basic_pipe) +class pipeToBasicPipe: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "pipe": ("PIPE_LINE",), + }, + "hidden": {"my_unique_id": "UNIQUE_ID"}, + } + + RETURN_TYPES = ("BASIC_PIPE",) + RETURN_NAMES = ("basic_pipe",) + FUNCTION = "doit" + + CATEGORY = "EasyUse/Pipe" + + def doit(self, pipe, my_unique_id=None): + new_pipe = (pipe.get('model'), pipe.get('clip'), pipe.get('vae'), pipe.get('positive'), pipe.get('negative')) + del pipe + return (new_pipe,) + +# 批次索引 +class pipeBatchIndex: + @classmethod + def INPUT_TYPES(s): + return {"required": {"pipe": ("PIPE_LINE",), + "batch_index": ("INT", {"default": 0, "min": 0, "max": 63}), + "length": ("INT", {"default": 1, "min": 1, "max": 64}), + }, + "hidden": {"my_unique_id": "UNIQUE_ID"},} + + RETURN_TYPES = ("PIPE_LINE",) + RETURN_NAMES = ("pipe",) + FUNCTION = "doit" + + CATEGORY = "EasyUse/Pipe" + + def doit(self, pipe, batch_index, length, my_unique_id=None): + samples = pipe["samples"] + new_samples, = LatentFromBatch().frombatch(samples, batch_index, length) + new_pipe = { + **pipe, + "samples": new_samples + } + del pipe + return (new_pipe,) + +# pipeXYPlot +class pipeXYPlot: + lora_list = ["None"] + folder_paths.get_filename_list("loras") + lora_strengths = {"min": -4.0, "max": 4.0, "step": 0.01} + token_normalization = ["none", "mean", "length", "length+mean"] + weight_interpretation = ["comfy", "A1111", "compel", "comfy++"] + + loader_dict = { + "ckpt_name": folder_paths.get_filename_list("checkpoints"), + "vae_name": ["Baked-VAE"] + folder_paths.get_filename_list("vae"), + "clip_skip": {"min": -24, "max": -1, "step": 1}, + "lora_name": lora_list, + "lora_model_strength": lora_strengths, + "lora_clip_strength": lora_strengths, + "positive": [], + "negative": [], + } + + sampler_dict = { + "steps": {"min": 1, "max": 100, "step": 1}, + "cfg": {"min": 0.0, "max": 100.0, "step": 1.0}, + "sampler_name": comfy.samplers.KSampler.SAMPLERS, + "scheduler": comfy.samplers.KSampler.SCHEDULERS, + "denoise": {"min": 0.0, "max": 1.0, "step": 0.01}, + "seed": {"min": 0, "max": MAX_SEED_NUM}, + } + + plot_dict = {**sampler_dict, **loader_dict} + + plot_values = ["None", ] + plot_values.append("---------------------") + for k in sampler_dict: + plot_values.append(f'preSampling: {k}') + plot_values.append("---------------------") + for k in loader_dict: + plot_values.append(f'loader: {k}') + + def __init__(self): + pass + + rejected = ["None", "---------------------", "Nothing"] + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "grid_spacing": ("INT", {"min": 0, "max": 500, "step": 5, "default": 0, }), + "output_individuals": (["False", "True"], {"default": "False"}), + "flip_xy": (["False", "True"], {"default": "False"}), + "x_axis": (pipeXYPlot.plot_values, {"default": 'None'}), + "x_values": ( + "STRING", {"default": '', "multiline": True, "placeholder": 'insert values seperated by "; "'}), + "y_axis": (pipeXYPlot.plot_values, {"default": 'None'}), + "y_values": ( + "STRING", {"default": '', "multiline": True, "placeholder": 'insert values seperated by "; "'}), + }, + "optional": { + "pipe": ("PIPE_LINE",) + }, + "hidden": { + "plot_dict": (pipeXYPlot.plot_dict,), + }, + } + + RETURN_TYPES = ("PIPE_LINE",) + RETURN_NAMES = ("pipe",) + FUNCTION = "plot" + + CATEGORY = "EasyUse/Pipe" + + def plot(self, grid_spacing, output_individuals, flip_xy, x_axis, x_values, y_axis, y_values, pipe=None): + def clean_values(values): + original_values = values.split("; ") + cleaned_values = [] + + for value in original_values: + # Strip the semi-colon + cleaned_value = value.strip(';').strip() + + if cleaned_value == "": + continue + + # Try to convert the cleaned_value back to int or float if possible + try: + cleaned_value = int(cleaned_value) + except ValueError: + try: + cleaned_value = float(cleaned_value) + except ValueError: + pass + + # Append the cleaned_value to the list + cleaned_values.append(cleaned_value) + + return cleaned_values + + if x_axis in self.rejected: + x_axis = "None" + x_values = [] + else: + x_values = clean_values(x_values) + + if y_axis in self.rejected: + y_axis = "None" + y_values = [] + else: + y_values = clean_values(y_values) + + if flip_xy == "True": + x_axis, y_axis = y_axis, x_axis + x_values, y_values = y_values, x_values + + + xy_plot = {"x_axis": x_axis, + "x_vals": x_values, + "y_axis": y_axis, + "y_vals": y_values, + "grid_spacing": grid_spacing, + "output_individuals": output_individuals} + + if pipe is not None: + new_pipe = pipe + new_pipe['loader_settings'] = { + **pipe['loader_settings'], + "xyplot": xy_plot + } + del pipe + return (new_pipe, xy_plot,) + +# pipeXYPlotAdvanced +class pipeXYPlotAdvanced: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "pipe": ("PIPE_LINE",), + "grid_spacing": ("INT", {"min": 0, "max": 500, "step": 5, "default": 0, }), + "output_individuals": (["False", "True"], {"default": "False"}), + "flip_xy": (["False", "True"], {"default": "False"}), + }, + "optional": { + "X": ("X_Y",), + "Y": ("X_Y",), + }, + "hidden": {"my_unique_id": "UNIQUE_ID"} + } + + RETURN_TYPES = ("PIPE_LINE",) + RETURN_NAMES = ("pipe",) + FUNCTION = "plot" + + CATEGORY = "EasyUse/Pipe" + + def plot(self, pipe, grid_spacing, output_individuals, flip_xy, X=None, Y=None, my_unique_id=None): + if X != None: + x_axis = X.get('axis') + x_values = X.get('values') + else: + x_axis = "Nothing" + x_values = [""] + if Y != None: + y_axis = Y.get('axis') + y_values = Y.get('values') + else: + y_axis = "Nothing" + y_values = [""] + + if pipe is not None: + new_pipe = pipe + positive = pipe["loader_settings"]["positive"] if "positive" in pipe["loader_settings"] else "" + negative = pipe["loader_settings"]["negative"] if "negative" in pipe["loader_settings"] else "" + + if x_axis == 'advanced: ModelMergeBlocks': + models = X.get('models') + vae_use = X.get('vae_use') + if models is None: + raise Exception("models is not found") + new_pipe['loader_settings'] = { + **pipe['loader_settings'], + "models": models, + "vae_use": vae_use + } + if y_axis == 'advanced: ModelMergeBlocks': + models = Y.get('models') + vae_use = Y.get('vae_use') + if models is None: + raise Exception("models is not found") + new_pipe['loader_settings'] = { + **pipe['loader_settings'], + "models": models, + "vae_use": vae_use + } + + if x_axis in ['advanced: Lora', 'advanced: Checkpoint']: + lora_stack = X.get('lora_stack') + _lora_stack = [] + if lora_stack is not None: + for lora in lora_stack: + _lora_stack.append( + {"lora_name": lora[0], "model": pipe['model'], "clip": pipe['clip'], "model_strength": lora[1], + "clip_strength": lora[2]}) + del lora_stack + x_values = "; ".join(x_values) + lora_stack = pipe['lora_stack'] + _lora_stack if 'lora_stack' in pipe else _lora_stack + new_pipe['loader_settings'] = { + **pipe['loader_settings'], + "lora_stack": lora_stack, + } + + if y_axis in ['advanced: Lora', 'advanced: Checkpoint']: + lora_stack = Y.get('lora_stack') + _lora_stack = [] + if lora_stack is not None: + for lora in lora_stack: + _lora_stack.append( + {"lora_name": lora[0], "model": pipe['model'], "clip": pipe['clip'], "model_strength": lora[1], + "clip_strength": lora[2]}) + del lora_stack + y_values = "; ".join(y_values) + lora_stack = pipe['lora_stack'] + _lora_stack if 'lora_stack' in pipe else _lora_stack + new_pipe['loader_settings'] = { + **pipe['loader_settings'], + "lora_stack": lora_stack, + } + + if x_axis == 'advanced: Seeds++ Batch': + if new_pipe['seed']: + value = x_values + x_values = [] + for index in range(value): + x_values.append(str(new_pipe['seed'] + index)) + x_values = "; ".join(x_values) + if y_axis == 'advanced: Seeds++ Batch': + if new_pipe['seed']: + value = y_values + y_values = [] + for index in range(value): + y_values.append(str(new_pipe['seed'] + index)) + y_values = "; ".join(y_values) + + if x_axis == 'advanced: Positive Prompt S/R': + if positive: + x_value = x_values + x_values = [] + for index, value in enumerate(x_value): + search_txt, replace_txt, replace_all = value + if replace_all: + txt = replace_txt if replace_txt is not None else positive + x_values.append(txt) + else: + txt = positive.replace(search_txt, replace_txt, 1) if replace_txt is not None else positive + x_values.append(txt) + x_values = "; ".join(x_values) + if y_axis == 'advanced: Positive Prompt S/R': + if positive: + y_value = y_values + y_values = [] + for index, value in enumerate(y_value): + search_txt, replace_txt, replace_all = value + if replace_all: + txt = replace_txt if replace_txt is not None else positive + y_values.append(txt) + else: + txt = positive.replace(search_txt, replace_txt, 1) if replace_txt is not None else positive + y_values.append(txt) + y_values = "; ".join(y_values) + + if x_axis == 'advanced: Negative Prompt S/R': + if negative: + x_value = x_values + x_values = [] + for index, value in enumerate(x_value): + search_txt, replace_txt, replace_all = value + if replace_all: + txt = replace_txt if replace_txt is not None else negative + x_values.append(txt) + else: + txt = negative.replace(search_txt, replace_txt, 1) if replace_txt is not None else negative + x_values.append(txt) + x_values = "; ".join(x_values) + if y_axis == 'advanced: Negative Prompt S/R': + if negative: + y_value = y_values + y_values = [] + for index, value in enumerate(y_value): + search_txt, replace_txt, replace_all = value + if replace_all: + txt = replace_txt if replace_txt is not None else negative + y_values.append(txt) + else: + txt = negative.replace(search_txt, replace_txt, 1) if replace_txt is not None else negative + y_values.append(txt) + y_values = "; ".join(y_values) + + if "advanced: ControlNet" in x_axis: + x_value = x_values + x_values = [] + cnet = [] + for index, value in enumerate(x_value): + cnet.append(value) + x_values.append(str(index)) + x_values = "; ".join(x_values) + new_pipe['loader_settings'] = { + **pipe['loader_settings'], + "cnet_stack": cnet, + } + + if "advanced: ControlNet" in y_axis: + y_value = y_values + y_values = [] + cnet = [] + for index, value in enumerate(y_value): + cnet.append(value) + y_values.append(str(index)) + y_values = "; ".join(y_values) + new_pipe['loader_settings'] = { + **pipe['loader_settings'], + "cnet_stack": cnet, + } + + if "advanced: Pos Condition" in x_axis: + x_values = "; ".join(x_values) + cond = X.get('cond') + new_pipe['loader_settings'] = { + **pipe['loader_settings'], + "positive_cond_stack": cond, + } + if "advanced: Pos Condition" in y_axis: + y_values = "; ".join(y_values) + cond = Y.get('cond') + new_pipe['loader_settings'] = { + **pipe['loader_settings'], + "positive_cond_stack": cond, + } + + if "advanced: Neg Condition" in x_axis: + x_values = "; ".join(x_values) + cond = X.get('cond') + new_pipe['loader_settings'] = { + **pipe['loader_settings'], + "negative_cond_stack": cond, + } + if "advanced: Neg Condition" in y_axis: + y_values = "; ".join(y_values) + cond = Y.get('cond') + new_pipe['loader_settings'] = { + **pipe['loader_settings'], + "negative_cond_stack": cond, + } + + del pipe + + return pipeXYPlot().plot(grid_spacing, output_individuals, flip_xy, x_axis, x_values, y_axis, y_values, new_pipe) + +#---------------------------------------------------------------节点束 结束---------------------------------------------------------------------- + +# 显示推理时间 +class showSpentTime: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "pipe": ("PIPE_LINE",), + "spent_time": ("INFO", {"default": 'Time will be displayed when reasoning is complete', "forceInput": False}), + }, + "hidden": { + "unique_id": "UNIQUE_ID", + "extra_pnginfo": "EXTRA_PNGINFO", + }, + } + + FUNCTION = "notify" + OUTPUT_NODE = True + RETURN_TYPES = () + RETURN_NAMES = () + + CATEGORY = "EasyUse/Util" + + def notify(self, pipe, spent_time=None, unique_id=None, extra_pnginfo=None): + if unique_id and extra_pnginfo and "workflow" in extra_pnginfo: + workflow = extra_pnginfo["workflow"] + node = next((x for x in workflow["nodes"] if str(x["id"]) == unique_id), None) + if node: + spent_time = pipe['loader_settings']['spent_time'] if 'spent_time' in pipe['loader_settings'] else '' + node["widgets_values"] = [spent_time] + + return {"ui": {"text": spent_time}, "result": {}} + +# 显示加载器参数中的各种名称 +class showLoaderSettingsNames: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "pipe": ("PIPE_LINE",), + "names": ("INFO", {"default": '', "forceInput": False}), + }, + "hidden": { + "unique_id": "UNIQUE_ID", + "extra_pnginfo": "EXTRA_PNGINFO", + }, + } + + RETURN_TYPES = ("STRING", "STRING", "STRING",) + RETURN_NAMES = ("ckpt_name", "vae_name", "lora_name") + + FUNCTION = "notify" + OUTPUT_NODE = True + + CATEGORY = "EasyUse/Util" + + def notify(self, pipe, names=None, unique_id=None, extra_pnginfo=None): + if unique_id and extra_pnginfo and "workflow" in extra_pnginfo: + workflow = extra_pnginfo["workflow"] + node = next((x for x in workflow["nodes"] if str(x["id"]) == unique_id), None) + if node: + ckpt_name = pipe['loader_settings']['ckpt_name'] if 'ckpt_name' in pipe['loader_settings'] else '' + vae_name = pipe['loader_settings']['vae_name'] if 'vae_name' in pipe['loader_settings'] else '' + lora_name = pipe['loader_settings']['lora_name'] if 'lora_name' in pipe['loader_settings'] else '' + + if ckpt_name: + ckpt_name = os.path.basename(os.path.splitext(ckpt_name)[0]) + if vae_name: + vae_name = os.path.basename(os.path.splitext(vae_name)[0]) + if lora_name: + lora_name = os.path.basename(os.path.splitext(lora_name)[0]) + + names = "ckpt_name: " + ckpt_name + '\n' + "vae_name: " + vae_name + '\n' + "lora_name: " + lora_name + node["widgets_values"] = names + + return {"ui": {"text": names}, "result": (ckpt_name, vae_name, lora_name)} + + +class sliderControl: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "mode": (['ipadapter layer weights'],), + "model_type": (['sdxl', 'sd1'],), + }, + "hidden": { + "prompt": "PROMPT", + "my_unique_id": "UNIQUE_ID", + "extra_pnginfo": "EXTRA_PNGINFO", + }, + } + + RETURN_TYPES = ("STRING",) + RETURN_NAMES = ("layer_weights",) + + FUNCTION = "control" + + CATEGORY = "EasyUse/Util" + + def control(self, mode, model_type, prompt=None, my_unique_id=None, extra_pnginfo=None): + values = '' + if my_unique_id in prompt: + if 'values' in prompt[my_unique_id]["inputs"]: + values = prompt[my_unique_id]["inputs"]['values'] + + return (values,) + +#---------------------------------------------------------------API 开始----------------------------------------------------------------------# +from .libs.stability import stableAPI +class stableDiffusion3API: + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "positive": ("STRING", {"default": "", "placeholder": "Positive", "multiline": True}), + "negative": ("STRING", {"default": "", "placeholder": "Negative", "multiline": True}), + "model": (["sd3", "sd3-turbo"],), + "aspect_ratio": (['16:9', '1:1', '21:9', '2:3', '3:2', '4:5', '5:4', '9:16', '9:21'],), + "seed": ("INT", {"default": 0, "min": 0, "max": 4294967294}), + "denoise": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0}), + }, + "optional": { + "optional_image": ("IMAGE",), + }, + "hidden": { + "unique_id": "UNIQUE_ID", + "extra_pnginfo": "EXTRA_PNGINFO", + }, + } + + RETURN_TYPES = ("IMAGE",) + RETURN_NAMES = ("image",) + + FUNCTION = "generate" + OUTPUT_NODE = False + + CATEGORY = "EasyUse/API" + + def generate(self, positive, negative, model, aspect_ratio, seed, denoise, optional_image=None, unique_id=None, extra_pnginfo=None): + mode = 'text-to-image' + if optional_image is not None: + mode = 'image-to-image' + output_image = stableAPI.generate_sd3_image(positive, negative, aspect_ratio, seed=seed, mode=mode, model=model, strength=denoise, image=optional_image) + return (output_image,) + +#---------------------------------------------------------------API 结束---------------------------------------------------------------------- + +NODE_CLASS_MAPPINGS = { + # seed 随机种 + "easy seed": easySeed, + "easy globalSeed": globalSeed, + # prompt 提示词 + "easy positive": positivePrompt, + "easy negative": negativePrompt, + "easy wildcards": wildcardsPrompt, + "easy prompt": prompt, + "easy promptList": promptList, + "easy promptLine": promptLine, + "easy promptConcat": promptConcat, + "easy promptReplace": promptReplace, + "easy stylesSelector": stylesPromptSelector, + "easy portraitMaster": portraitMaster, + # loaders 加载器 + "easy fullLoader": fullLoader, + "easy a1111Loader": a1111Loader, + "easy comfyLoader": comfyLoader, + "easy hunyuanDiTLoader": hunyuanDiTLoader, + "easy svdLoader": svdLoader, + "easy sv3dLoader": sv3DLoader, + "easy zero123Loader": zero123Loader, + "easy dynamiCrafterLoader": dynamiCrafterLoader, + "easy cascadeLoader": cascadeLoader, + "easy kolorsLoader": kolorsLoader, + "easy fluxLoader": fluxLoader, + "easy pixArtLoader": pixArtLoader, + "easy loraStack": loraStack, + "easy controlnetStack": controlnetStack, + "easy controlnetLoader": controlnetSimple, + "easy controlnetLoaderADV": controlnetAdvanced, + "easy controlnetLoader++": controlnetPlusPlus, + "easy LLLiteLoader": LLLiteLoader, + # Adapter 适配器 + "easy ipadapterApply": ipadapterApply, + "easy ipadapterApplyADV": ipadapterApplyAdvanced, + "easy ipadapterApplyFaceIDKolors": ipadapterApplyFaceIDKolors, + "easy ipadapterApplyEncoder": ipadapterApplyEncoder, + "easy ipadapterApplyEmbeds": ipadapterApplyEmbeds, + "easy ipadapterApplyRegional": ipadapterApplyRegional, + "easy ipadapterApplyFromParams": ipadapterApplyFromParams, + "easy ipadapterStyleComposition": ipadapterStyleComposition, + "easy instantIDApply": instantIDApply, + "easy instantIDApplyADV": instantIDApplyAdvanced, + "easy pulIDApply": applyPulID, + "easy pulIDApplyADV": applyPulIDADV, + "easy styleAlignedBatchAlign": styleAlignedBatchAlign, + "easy icLightApply": icLightApply, + # Inpaint 内补 + "easy applyFooocusInpaint": applyFooocusInpaint, + "easy applyBrushNet": applyBrushNet, + "easy applyPowerPaint": applyPowerPaint, + "easy applyInpaint": applyInpaint, + # latent 潜空间 + "easy latentNoisy": latentNoisy, + "easy latentCompositeMaskedWithCond": latentCompositeMaskedWithCond, + "easy injectNoiseToLatent": injectNoiseToLatent, + # preSampling 预采样处理 + "easy preSampling": samplerSettings, + "easy preSamplingAdvanced": samplerSettingsAdvanced, + "easy preSamplingNoiseIn": samplerSettingsNoiseIn, + "easy preSamplingCustom": samplerCustomSettings, + "easy preSamplingSdTurbo": sdTurboSettings, + "easy preSamplingDynamicCFG": dynamicCFGSettings, + "easy preSamplingCascade": cascadeSettings, + "easy preSamplingLayerDiffusion": layerDiffusionSettings, + "easy preSamplingLayerDiffusionADDTL": layerDiffusionSettingsADDTL, + # kSampler k采样器 + "easy fullkSampler": samplerFull, + "easy kSampler": samplerSimple, + "easy kSamplerTiled": samplerSimpleTiled, + "easy kSamplerLayerDiffusion": samplerSimpleLayerDiffusion, + "easy kSamplerInpainting": samplerSimpleInpainting, + "easy kSamplerDownscaleUnet": samplerSimpleDownscaleUnet, + "easy kSamplerSDTurbo": samplerSDTurbo, + "easy fullCascadeKSampler": samplerCascadeFull, + "easy cascadeKSampler": samplerCascadeSimple, + "easy unSampler": unsampler, + # fix 修复相关 + "easy hiresFix": hiresFix, + "easy preDetailerFix": preDetailerFix, + "easy preMaskDetailerFix": preMaskDetailerFix, + "easy ultralyticsDetectorPipe": ultralyticsDetectorForDetailerFix, + "easy samLoaderPipe": samLoaderForDetailerFix, + "easy detailerFix": detailerFix, + # pipe 管道(节点束) + "easy pipeIn": pipeIn, + "easy pipeOut": pipeOut, + "easy pipeEdit": pipeEdit, + "easy pipeEditPrompt": pipeEditPrompt, + "easy pipeToBasicPipe": pipeToBasicPipe, + "easy pipeBatchIndex": pipeBatchIndex, + "easy XYPlot": pipeXYPlot, + "easy XYPlotAdvanced": pipeXYPlotAdvanced, + # XY Inputs + "easy XYInputs: Seeds++ Batch": XYplot_SeedsBatch, + "easy XYInputs: Steps": XYplot_Steps, + "easy XYInputs: CFG Scale": XYplot_CFG, + "easy XYInputs: Sampler/Scheduler": XYplot_Sampler_Scheduler, + "easy XYInputs: Denoise": XYplot_Denoise, + "easy XYInputs: Checkpoint": XYplot_Checkpoint, + "easy XYInputs: Lora": XYplot_Lora, + "easy XYInputs: ModelMergeBlocks": XYplot_ModelMergeBlocks, + "easy XYInputs: PromptSR": XYplot_PromptSR, + "easy XYInputs: ControlNet": XYplot_Control_Net, + "easy XYInputs: PositiveCond": XYplot_Positive_Cond, + "easy XYInputs: PositiveCondList": XYplot_Positive_Cond_List, + "easy XYInputs: NegativeCond": XYplot_Negative_Cond, + "easy XYInputs: NegativeCondList": XYplot_Negative_Cond_List, + # others 其他 + "easy showSpentTime": showSpentTime, + "easy showLoaderSettingsNames": showLoaderSettingsNames, + "easy sliderControl": sliderControl, + "dynamicThresholdingFull": dynamicThresholdingFull, + # api 相关 + "easy stableDiffusion3API": stableDiffusion3API, + # utils + "easy ckptNames": setCkptName, + "easy controlnetNames": setControlName, +} + +NODE_DISPLAY_NAME_MAPPINGS = { + # seed 随机种 + "easy seed": "EasySeed", + "easy globalSeed": "EasyGlobalSeed", + # prompt 提示词 + "easy positive": "Positive", + "easy negative": "Negative", + "easy wildcards": "Wildcards", + "easy prompt": "Prompt", + "easy promptList": "PromptList", + "easy promptLine": "PromptLine", + "easy promptConcat": "PromptConcat", + "easy promptReplace": "PromptReplace", + "easy stylesSelector": "Styles Selector", + "easy portraitMaster": "Portrait Master", + # loaders 加载器 + "easy fullLoader": "EasyLoader (Full)", + "easy a1111Loader": "EasyLoader (A1111)", + "easy comfyLoader": "EasyLoader (Comfy)", + "easy svdLoader": "EasyLoader (SVD)", + "easy sv3dLoader": "EasyLoader (SV3D)", + "easy zero123Loader": "EasyLoader (Zero123)", + "easy dynamiCrafterLoader": "EasyLoader (DynamiCrafter)", + "easy cascadeLoader": "EasyCascadeLoader", + "easy kolorsLoader": "EasyLoader (Kolors)", + "easy fluxLoader": "EasyLoader (Flux)", + "easy hunyuanDiTLoader": "EasyLoader (HunyuanDiT)", + "easy pixArtLoader": "EasyLoader (PixArt)", + "easy loraStack": "EasyLoraStack", + "easy controlnetStack": "EasyControlnetStack", + "easy controlnetLoader": "EasyControlnet", + "easy controlnetLoaderADV": "EasyControlnet (Advanced)", + "easy controlnetLoader++": "EasyControlnet++", + "easy LLLiteLoader": "EasyLLLite", + # Adapter 适配器 + "easy ipadapterApply": "Easy Apply IPAdapter", + "easy ipadapterApplyADV": "Easy Apply IPAdapter (Advanced)", + "easy ipadapterApplyFaceIDKolors": "Easy Apply IPAdapter (FaceID Kolors)", + "easy ipadapterStyleComposition": "Easy Apply IPAdapter (StyleComposition)", + "easy ipadapterApplyEncoder": "Easy Apply IPAdapter (Encoder)", + "easy ipadapterApplyRegional": "Easy Apply IPAdapter (Regional)", + "easy ipadapterApplyEmbeds": "Easy Apply IPAdapter (Embeds)", + "easy ipadapterApplyFromParams": "Easy Apply IPAdapter (From Params)", + "easy instantIDApply": "Easy Apply InstantID", + "easy instantIDApplyADV": "Easy Apply InstantID (Advanced)", + "easy pulIDApply": "Easy Apply PuLID", + "easy pulIDApplyADV": "Easy Apply PuLID (Advanced)", + "easy styleAlignedBatchAlign": "Easy Apply StyleAlign", + "easy icLightApply": "Easy Apply ICLight", + # Inpaint 内补 + "easy applyFooocusInpaint": "Easy Apply Fooocus Inpaint", + "easy applyBrushNet": "Easy Apply BrushNet", + "easy applyPowerPaint": "Easy Apply PowerPaint", + "easy applyInpaint": "Easy Apply Inpaint", + # latent 潜空间 + "easy latentNoisy": "LatentNoisy", + "easy latentCompositeMaskedWithCond": "LatentCompositeMaskedWithCond", + "easy injectNoiseToLatent": "InjectNoiseToLatent", + # preSampling 预采样处理 + "easy preSampling": "PreSampling", + "easy preSamplingAdvanced": "PreSampling (Advanced)", + "easy preSamplingNoiseIn": "PreSampling (NoiseIn)", + "easy preSamplingCustom": "PreSampling (Custom)", + "easy preSamplingSdTurbo": "PreSampling (SDTurbo)", + "easy preSamplingDynamicCFG": "PreSampling (DynamicCFG)", + "easy preSamplingCascade": "PreSampling (Cascade)", + "easy preSamplingLayerDiffusion": "PreSampling (LayerDiffuse)", + "easy preSamplingLayerDiffusionADDTL": "PreSampling (LayerDiffuse ADDTL)", + # kSampler k采样器 + "easy kSampler": "EasyKSampler", + "easy fullkSampler": "EasyKSampler (Full)", + "easy kSamplerTiled": "EasyKSampler (Tiled Decode)", + "easy kSamplerLayerDiffusion": "EasyKSampler (LayerDiffuse)", + "easy kSamplerInpainting": "EasyKSampler (Inpainting)", + "easy kSamplerDownscaleUnet": "EasyKsampler (Downscale Unet)", + "easy kSamplerSDTurbo": "EasyKSampler (SDTurbo)", + "easy cascadeKSampler": "EasyCascadeKsampler", + "easy fullCascadeKSampler": "EasyCascadeKsampler (Full)", + "easy unSampler": "EasyUnSampler", + # fix 修复相关 + "easy hiresFix": "HiresFix", + "easy preDetailerFix": "PreDetailerFix", + "easy preMaskDetailerFix": "preMaskDetailerFix", + "easy ultralyticsDetectorPipe": "UltralyticsDetector (Pipe)", + "easy samLoaderPipe": "SAMLoader (Pipe)", + "easy detailerFix": "DetailerFix", + # pipe 管道(节点束) + "easy pipeIn": "Pipe In", + "easy pipeOut": "Pipe Out", + "easy pipeEdit": "Pipe Edit", + "easy pipeEditPrompt": "Pipe Edit Prompt", + "easy pipeBatchIndex": "Pipe Batch Index", + "easy pipeToBasicPipe": "Pipe -> BasicPipe", + "easy XYPlot": "XY Plot", + "easy XYPlotAdvanced": "XY Plot Advanced", + # XY Inputs + "easy XYInputs: Seeds++ Batch": "XY Inputs: Seeds++ Batch //EasyUse", + "easy XYInputs: Steps": "XY Inputs: Steps //EasyUse", + "easy XYInputs: CFG Scale": "XY Inputs: CFG Scale //EasyUse", + "easy XYInputs: Sampler/Scheduler": "XY Inputs: Sampler/Scheduler //EasyUse", + "easy XYInputs: Denoise": "XY Inputs: Denoise //EasyUse", + "easy XYInputs: Checkpoint": "XY Inputs: Checkpoint //EasyUse", + "easy XYInputs: Lora": "XY Inputs: Lora //EasyUse", + "easy XYInputs: ModelMergeBlocks": "XY Inputs: ModelMergeBlocks //EasyUse", + "easy XYInputs: PromptSR": "XY Inputs: PromptSR //EasyUse", + "easy XYInputs: ControlNet": "XY Inputs: Controlnet //EasyUse", + "easy XYInputs: PositiveCond": "XY Inputs: PosCond //EasyUse", + "easy XYInputs: PositiveCondList": "XY Inputs: PosCondList //EasyUse", + "easy XYInputs: NegativeCond": "XY Inputs: NegCond //EasyUse", + "easy XYInputs: NegativeCondList": "XY Inputs: NegCondList //EasyUse", + # others 其他 + "easy showSpentTime": "Show Spent Time", + "easy showLoaderSettingsNames": "Show Loader Settings Names", + "easy sliderControl": "Easy Slider Control", + "dynamicThresholdingFull": "DynamicThresholdingFull", + # api 相关 + "easy stableDiffusion3API": "Stable Diffusion 3 (API)", + # utils + "easy ckptNames": "Ckpt Names", + "easy controlnetNames": "ControlNet Names", +} \ No newline at end of file diff --git a/ComfyUI-Easy-Use/py/human_parsing/__init__.py b/ComfyUI-Easy-Use/py/human_parsing/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/ComfyUI-Easy-Use/py/human_parsing/parsing_api.py b/ComfyUI-Easy-Use/py/human_parsing/parsing_api.py new file mode 100644 index 0000000000000000000000000000000000000000..610d1c3cd045f2d75f40cd03d30b962595345add --- /dev/null +++ b/ComfyUI-Easy-Use/py/human_parsing/parsing_api.py @@ -0,0 +1,156 @@ +import torch +import numpy as np +import cv2 +import torchvision.transforms as transforms +from torch.utils.data import DataLoader +from .simple_extractor_dataset import SimpleFolderDataset +from .transforms import transform_logits +from tqdm import tqdm +from PIL import Image + +def get_palette(num_cls): + """ Returns the color map for visualizing the segmentation mask. + Args: + num_cls: Number of classes + Returns: + The color map + """ + n = num_cls + palette = [0] * (n * 3) + for j in range(0, n): + lab = j + palette[j * 3 + 0] = 0 + palette[j * 3 + 1] = 0 + palette[j * 3 + 2] = 0 + i = 0 + while lab: + palette[j * 3 + 0] |= (((lab >> 0) & 1) << (7 - i)) + palette[j * 3 + 1] |= (((lab >> 1) & 1) << (7 - i)) + palette[j * 3 + 2] |= (((lab >> 2) & 1) << (7 - i)) + i += 1 + lab >>= 3 + return palette + + +def delete_irregular(logits_result): + parsing_result = np.argmax(logits_result, axis=2) + upper_cloth = np.where(parsing_result == 4, 255, 0) + contours, hierarchy = cv2.findContours(upper_cloth.astype(np.uint8), + cv2.RETR_CCOMP, cv2.CHAIN_APPROX_TC89_L1) + area = [] + for i in range(len(contours)): + a = cv2.contourArea(contours[i], True) + area.append(abs(a)) + if len(area) != 0: + top = area.index(max(area)) + M = cv2.moments(contours[top]) + cY = int(M["m01"] / M["m00"]) + + dresses = np.where(parsing_result == 7, 255, 0) + contours_dress, hierarchy_dress = cv2.findContours(dresses.astype(np.uint8), + cv2.RETR_CCOMP, cv2.CHAIN_APPROX_TC89_L1) + area_dress = [] + for j in range(len(contours_dress)): + a_d = cv2.contourArea(contours_dress[j], True) + area_dress.append(abs(a_d)) + if len(area_dress) != 0: + top_dress = area_dress.index(max(area_dress)) + M_dress = cv2.moments(contours_dress[top_dress]) + cY_dress = int(M_dress["m01"] / M_dress["m00"]) + wear_type = "dresses" + if len(area) != 0: + if len(area_dress) != 0 and cY_dress > cY: + irregular_list = np.array([4, 5, 6]) + logits_result[:, :, irregular_list] = -1 + else: + irregular_list = np.array([5, 6, 7, 8, 9, 10, 12, 13]) + logits_result[:cY, :, irregular_list] = -1 + wear_type = "cloth_pant" + parsing_result = np.argmax(logits_result, axis=2) + # pad border + parsing_result = np.pad(parsing_result, pad_width=1, mode='constant', constant_values=0) + return parsing_result, wear_type + + + +def hole_fill(img): + img_copy = img.copy() + mask = np.zeros((img.shape[0] + 2, img.shape[1] + 2), dtype=np.uint8) + cv2.floodFill(img, mask, (0, 0), 255) + img_inverse = cv2.bitwise_not(img) + dst = cv2.bitwise_or(img_copy, img_inverse) + return dst + +def refine_mask(mask): + contours, hierarchy = cv2.findContours(mask.astype(np.uint8), + cv2.RETR_CCOMP, cv2.CHAIN_APPROX_TC89_L1) + area = [] + for j in range(len(contours)): + a_d = cv2.contourArea(contours[j], True) + area.append(abs(a_d)) + refine_mask = np.zeros_like(mask).astype(np.uint8) + if len(area) != 0: + i = area.index(max(area)) + cv2.drawContours(refine_mask, contours, i, color=255, thickness=-1) + # keep large area in skin case + for j in range(len(area)): + if j != i and area[i] > 2000: + cv2.drawContours(refine_mask, contours, j, color=255, thickness=-1) + return refine_mask + +def refine_hole(parsing_result_filled, parsing_result, arm_mask): + filled_hole = cv2.bitwise_and(np.where(parsing_result_filled == 4, 255, 0), + np.where(parsing_result != 4, 255, 0)) - arm_mask * 255 + contours, hierarchy = cv2.findContours(filled_hole, cv2.RETR_CCOMP, cv2.CHAIN_APPROX_TC89_L1) + refine_hole_mask = np.zeros_like(parsing_result).astype(np.uint8) + for i in range(len(contours)): + a = cv2.contourArea(contours[i], True) + # keep hole > 2000 pixels + if abs(a) > 2000: + cv2.drawContours(refine_hole_mask, contours, i, color=255, thickness=-1) + return refine_hole_mask + arm_mask + +def onnx_inference(lip_session, input_dir, mask_components=[0]): + + transform = transforms.Compose([ + transforms.ToTensor(), + transforms.Normalize(mean=[0.406, 0.456, 0.485], std=[0.225, 0.224, 0.229]) + ]) + input_size = [473, 473] + + dataset_lip = SimpleFolderDataset(root=input_dir, input_size=input_size, transform=transform) + dataloader_lip = DataLoader(dataset_lip) + palette = get_palette(20) + with torch.no_grad(): + for _, batch in enumerate(tqdm(dataloader_lip)): + image, meta = batch + c = meta['center'].numpy()[0] + s = meta['scale'].numpy()[0] + w = meta['width'].numpy()[0] + h = meta['height'].numpy()[0] + + output = lip_session.run(None, {"input.1": image.numpy().astype(np.float32)}) + upsample = torch.nn.Upsample(size=input_size, mode='bilinear', align_corners=True) + upsample_output = upsample(torch.from_numpy(output[1][0]).unsqueeze(0)) + upsample_output = upsample_output.squeeze() + upsample_output = upsample_output.permute(1, 2, 0) # CHW -> HWC + logits_result_lip = transform_logits(upsample_output.data.cpu().numpy(), c, s, w, h, + input_size=input_size) + parsing_result = np.argmax(logits_result_lip, axis=2) + + output_img = Image.fromarray(np.asarray(parsing_result, dtype=np.uint8)) + output_img.putpalette(palette) + + mask = np.isin(output_img, mask_components).astype(np.uint8) + mask_image = Image.fromarray(mask * 255) + mask_image = mask_image.convert("RGB") + mask_image = torch.from_numpy(np.array(mask_image).astype(np.float32) / 255.0).unsqueeze(0) + + output_img = output_img.convert('RGB') + output_img = torch.from_numpy(np.array(output_img).astype(np.float32) / 255.0).unsqueeze(0) + + return output_img, mask_image + + + + diff --git a/ComfyUI-Easy-Use/py/human_parsing/run_parsing.py b/ComfyUI-Easy-Use/py/human_parsing/run_parsing.py new file mode 100644 index 0000000000000000000000000000000000000000..03fc1940e3f5aa08e577349b08749637ea210e51 --- /dev/null +++ b/ComfyUI-Easy-Use/py/human_parsing/run_parsing.py @@ -0,0 +1,23 @@ +from .parsing_api import onnx_inference +from ..libs.utils import install_package + +class HumanParsing: + def __init__(self, model_path): + self.model_path = model_path + self.session = None + + def __call__(self, input_image, mask_components): + if self.session is None: + install_package('onnxruntime') + import onnxruntime as ort + + session_options = ort.SessionOptions() + session_options.graph_optimization_level = ort.GraphOptimizationLevel.ORT_ENABLE_ALL + session_options.execution_mode = ort.ExecutionMode.ORT_SEQUENTIAL + # session_options.add_session_config_entry('gpu_id', str(gpu_id)) + self.session = ort.InferenceSession(self.model_path, sess_options=session_options, + providers=['CPUExecutionProvider']) + + parsed_image, mask = onnx_inference(self.session, input_image, mask_components) + return parsed_image, mask + diff --git a/ComfyUI-Easy-Use/py/human_parsing/simple_extractor_dataset.py b/ComfyUI-Easy-Use/py/human_parsing/simple_extractor_dataset.py new file mode 100644 index 0000000000000000000000000000000000000000..5604b1eda242f3214a123684b727f2fe0768ce1f --- /dev/null +++ b/ComfyUI-Easy-Use/py/human_parsing/simple_extractor_dataset.py @@ -0,0 +1,88 @@ +#!/usr/bin/env python +# -*- encoding: utf-8 -*- + +""" +@Author : Peike Li +@Contact : peike.li@yahoo.com +@File : dataset.py +@Time : 8/30/19 9:12 PM +@Desc : Dataset Definition +@License : This source code is licensed under the license found in the + LICENSE file in the root directory of this source tree. +""" + +import os + +import cv2 +import numpy as np +from PIL import Image +from torch.utils import data +from .transforms import get_affine_transform + + +class SimpleFolderDataset(data.Dataset): + def __init__(self, root, input_size=[512, 512], transform=None): + self.root = root + self.input_size = input_size + self.transform = transform + self.aspect_ratio = input_size[1] * 1.0 / input_size[0] + self.input_size = np.asarray(input_size) + self.is_pil_image = False + if isinstance(root, Image.Image): + self.file_list = [root] + self.is_pil_image = True + elif os.path.isfile(root): + self.file_list = [os.path.basename(root)] + self.root = os.path.dirname(root) + else: + self.file_list = os.listdir(self.root) + + def __len__(self): + return len(self.file_list) + + def _box2cs(self, box): + x, y, w, h = box[:4] + return self._xywh2cs(x, y, w, h) + + def _xywh2cs(self, x, y, w, h): + center = np.zeros((2), dtype=np.float32) + center[0] = x + w * 0.5 + center[1] = y + h * 0.5 + if w > self.aspect_ratio * h: + h = w * 1.0 / self.aspect_ratio + elif w < self.aspect_ratio * h: + w = h * self.aspect_ratio + scale = np.array([w, h], dtype=np.float32) + return center, scale + + def __getitem__(self, index): + if self.is_pil_image: + img = np.asarray(self.file_list[index])[:, :, [2, 1, 0]] + else: + img_name = self.file_list[index] + img_path = os.path.join(self.root, img_name) + img = cv2.imread(img_path, cv2.IMREAD_COLOR) + h, w, _ = img.shape + + # Get person center and scale + person_center, s = self._box2cs([0, 0, w - 1, h - 1]) + r = 0 + trans = get_affine_transform(person_center, s, r, self.input_size) + input = cv2.warpAffine( + img, + trans, + (int(self.input_size[1]), int(self.input_size[0])), + flags=cv2.INTER_LINEAR, + borderMode=cv2.BORDER_CONSTANT, + borderValue=(0, 0, 0)) + + input = self.transform(input) + meta = { + 'center': person_center, + 'height': h, + 'width': w, + 'scale': s, + 'rotation': r + } + + return input, meta diff --git a/ComfyUI-Easy-Use/py/human_parsing/transforms.py b/ComfyUI-Easy-Use/py/human_parsing/transforms.py new file mode 100644 index 0000000000000000000000000000000000000000..1442a728938ca19fcb4ac21ae6588266df45631c --- /dev/null +++ b/ComfyUI-Easy-Use/py/human_parsing/transforms.py @@ -0,0 +1,167 @@ +# ------------------------------------------------------------------------------ +# Copyright (c) Microsoft +# Licensed under the MIT License. +# Written by Bin Xiao (Bin.Xiao@microsoft.com) +# ------------------------------------------------------------------------------ + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import numpy as np +import cv2 +import torch + +class BRG2Tensor_transform(object): + def __call__(self, pic): + img = torch.from_numpy(pic.transpose((2, 0, 1))) + if isinstance(img, torch.ByteTensor): + return img.float() + else: + return img + +class BGR2RGB_transform(object): + def __call__(self, tensor): + return tensor[[2,1,0],:,:] + +def flip_back(output_flipped, matched_parts): + ''' + ouput_flipped: numpy.ndarray(batch_size, num_joints, height, width) + ''' + assert output_flipped.ndim == 4,\ + 'output_flipped should be [batch_size, num_joints, height, width]' + + output_flipped = output_flipped[:, :, :, ::-1] + + for pair in matched_parts: + tmp = output_flipped[:, pair[0], :, :].copy() + output_flipped[:, pair[0], :, :] = output_flipped[:, pair[1], :, :] + output_flipped[:, pair[1], :, :] = tmp + + return output_flipped + + +def fliplr_joints(joints, joints_vis, width, matched_parts): + """ + flip coords + """ + # Flip horizontal + joints[:, 0] = width - joints[:, 0] - 1 + + # Change left-right parts + for pair in matched_parts: + joints[pair[0], :], joints[pair[1], :] = \ + joints[pair[1], :], joints[pair[0], :].copy() + joints_vis[pair[0], :], joints_vis[pair[1], :] = \ + joints_vis[pair[1], :], joints_vis[pair[0], :].copy() + + return joints*joints_vis, joints_vis + + +def transform_preds(coords, center, scale, input_size): + target_coords = np.zeros(coords.shape) + trans = get_affine_transform(center, scale, 0, input_size, inv=1) + for p in range(coords.shape[0]): + target_coords[p, 0:2] = affine_transform(coords[p, 0:2], trans) + return target_coords + +def transform_parsing(pred, center, scale, width, height, input_size): + + trans = get_affine_transform(center, scale, 0, input_size, inv=1) + target_pred = cv2.warpAffine( + pred, + trans, + (int(width), int(height)), #(int(width), int(height)), + flags=cv2.INTER_NEAREST, + borderMode=cv2.BORDER_CONSTANT, + borderValue=(0)) + + return target_pred + +def transform_logits(logits, center, scale, width, height, input_size): + + trans = get_affine_transform(center, scale, 0, input_size, inv=1) + channel = logits.shape[2] + target_logits = [] + for i in range(channel): + target_logit = cv2.warpAffine( + logits[:,:,i], + trans, + (int(width), int(height)), #(int(width), int(height)), + flags=cv2.INTER_LINEAR, + borderMode=cv2.BORDER_CONSTANT, + borderValue=(0)) + target_logits.append(target_logit) + target_logits = np.stack(target_logits,axis=2) + + return target_logits + + +def get_affine_transform(center, + scale, + rot, + output_size, + shift=np.array([0, 0], dtype=np.float32), + inv=0): + if not isinstance(scale, np.ndarray) and not isinstance(scale, list): + print(scale) + scale = np.array([scale, scale]) + + scale_tmp = scale + + src_w = scale_tmp[0] + dst_w = output_size[1] + dst_h = output_size[0] + + rot_rad = np.pi * rot / 180 + src_dir = get_dir([0, src_w * -0.5], rot_rad) + dst_dir = np.array([0, (dst_w-1) * -0.5], np.float32) + + src = np.zeros((3, 2), dtype=np.float32) + dst = np.zeros((3, 2), dtype=np.float32) + src[0, :] = center + scale_tmp * shift + src[1, :] = center + src_dir + scale_tmp * shift + dst[0, :] = [(dst_w-1) * 0.5, (dst_h-1) * 0.5] + dst[1, :] = np.array([(dst_w-1) * 0.5, (dst_h-1) * 0.5]) + dst_dir + + src[2:, :] = get_3rd_point(src[0, :], src[1, :]) + dst[2:, :] = get_3rd_point(dst[0, :], dst[1, :]) + + if inv: + trans = cv2.getAffineTransform(np.float32(dst), np.float32(src)) + else: + trans = cv2.getAffineTransform(np.float32(src), np.float32(dst)) + + return trans + + +def affine_transform(pt, t): + new_pt = np.array([pt[0], pt[1], 1.]).T + new_pt = np.dot(t, new_pt) + return new_pt[:2] + + +def get_3rd_point(a, b): + direct = a - b + return b + np.array([-direct[1], direct[0]], dtype=np.float32) + + +def get_dir(src_point, rot_rad): + sn, cs = np.sin(rot_rad), np.cos(rot_rad) + + src_result = [0, 0] + src_result[0] = src_point[0] * cs - src_point[1] * sn + src_result[1] = src_point[0] * sn + src_point[1] * cs + + return src_result + + +def crop(img, center, scale, output_size, rot=0): + trans = get_affine_transform(center, scale, rot, output_size) + + dst_img = cv2.warpAffine(img, + trans, + (int(output_size[1]), int(output_size[0])), + flags=cv2.INTER_LINEAR) + + return dst_img diff --git a/ComfyUI-Easy-Use/py/ic_light/__init__.py b/ComfyUI-Easy-Use/py/ic_light/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..33faea898a12213002402a2d17cea0466253e192 --- /dev/null +++ b/ComfyUI-Easy-Use/py/ic_light/__init__.py @@ -0,0 +1,186 @@ +#credit to huchenlei for this module +#from https://github.com/huchenlei/ComfyUI-IC-Light-Native +import torch +import numpy as np +from typing import Tuple, TypedDict, Callable + +import comfy.model_management +from comfy.sd import load_unet +from comfy.ldm.models.autoencoder import AutoencoderKL +from comfy.model_base import BaseModel +from comfy.model_patcher import ModelPatcher +from PIL import Image +from nodes import VAEEncode +from ..libs.image import np2tensor, pil2tensor + +class UnetParams(TypedDict): + input: torch.Tensor + timestep: torch.Tensor + c: dict + cond_or_uncond: torch.Tensor + +class VAEEncodeArgMax(VAEEncode): + def encode(self, vae, pixels): + assert isinstance( + vae.first_stage_model, AutoencoderKL + ), "ArgMax only supported for AutoencoderKL" + original_sample_mode = vae.first_stage_model.regularization.sample + vae.first_stage_model.regularization.sample = False + ret = super().encode(vae, pixels) + vae.first_stage_model.regularization.sample = original_sample_mode + return ret + +class ICLight: + + @staticmethod + def apply_c_concat(params: UnetParams, concat_conds) -> UnetParams: + """Apply c_concat on unet call.""" + sample = params["input"] + params["c"]["c_concat"] = torch.cat( + ( + [concat_conds.to(sample.device)] + * (sample.shape[0] // concat_conds.shape[0]) + ), + dim=0, + ) + return params + + @staticmethod + def create_custom_conv( + original_conv: torch.nn.Module, + dtype: torch.dtype, + device=torch.device, + ) -> torch.nn.Module: + with torch.no_grad(): + new_conv_in = torch.nn.Conv2d( + 8, + original_conv.out_channels, + original_conv.kernel_size, + original_conv.stride, + original_conv.padding, + ) + new_conv_in.weight.zero_() + new_conv_in.weight[:, :4, :, :].copy_(original_conv.weight) + new_conv_in.bias = original_conv.bias + return new_conv_in.to(dtype=dtype, device=device) + + def generate_lighting_image(self, original_image, direction): + _, image_height, image_width, _ = original_image.shape + match direction: + case 'Left Light': + gradient = np.linspace(255, 0, image_width) + image = np.tile(gradient, (image_height, 1)) + input_bg = np.stack((image,) * 3, axis=-1).astype(np.uint8) + return np2tensor(input_bg) + case 'Right Light': + gradient = np.linspace(0, 255, image_width) + image = np.tile(gradient, (image_height, 1)) + input_bg = np.stack((image,) * 3, axis=-1).astype(np.uint8) + return np2tensor(input_bg) + case 'Top Light': + gradient = np.linspace(255, 0, image_height)[:, None] + image = np.tile(gradient, (1, image_width)) + input_bg = np.stack((image,) * 3, axis=-1).astype(np.uint8) + return np2tensor(input_bg) + case 'Bottom Light': + gradient = np.linspace(0, 255, image_height)[:, None] + image = np.tile(gradient, (1, image_width)) + input_bg = np.stack((image,) * 3, axis=-1).astype(np.uint8) + return np2tensor(input_bg) + case 'Circle Light': + x = np.linspace(-1, 1, image_width) + y = np.linspace(-1, 1, image_height) + x, y = np.meshgrid(x, y) + r = np.sqrt(x ** 2 + y ** 2) + r = r / r.max() + color1 = np.array([0, 0, 0])[np.newaxis, np.newaxis, :] + color2 = np.array([255, 255, 255])[np.newaxis, np.newaxis, :] + gradient = (color1 * r[..., np.newaxis] + color2 * (1 - r)[..., np.newaxis]).astype(np.uint8) + image = pil2tensor(Image.fromarray(gradient)) + return image + case _: + image = pil2tensor(Image.new('RGB', (1, 1), (0, 0, 0))) + return image + + def generate_source_image(self, original_image, source): + batch_size, image_height, image_width, _ = original_image.shape + match source: + case 'Use Flipped Background Image': + if batch_size < 2: + raise ValueError('Must be at least 2 image to use flipped background image.') + original_image = [img.unsqueeze(0) for img in original_image] + image = torch.flip(original_image[1], [2]) + return image + case 'Ambient': + input_bg = np.zeros(shape=(image_height, image_width, 3), dtype=np.uint8) + 64 + return np2tensor(input_bg) + case 'Left Light': + gradient = np.linspace(224, 32, image_width) + image = np.tile(gradient, (image_height, 1)) + input_bg = np.stack((image,) * 3, axis=-1).astype(np.uint8) + return np2tensor(input_bg) + case 'Right Light': + gradient = np.linspace(32, 224, image_width) + image = np.tile(gradient, (image_height, 1)) + input_bg = np.stack((image,) * 3, axis=-1).astype(np.uint8) + return np2tensor(input_bg) + case 'Top Light': + gradient = np.linspace(224, 32, image_height)[:, None] + image = np.tile(gradient, (1, image_width)) + input_bg = np.stack((image,) * 3, axis=-1).astype(np.uint8) + return np2tensor(input_bg) + case 'Bottom Light': + gradient = np.linspace(32, 224, image_height)[:, None] + image = np.tile(gradient, (1, image_width)) + input_bg = np.stack((image,) * 3, axis=-1).astype(np.uint8) + return np2tensor(input_bg) + case _: + image = pil2tensor(Image.new('RGB', (1, 1), (0, 0, 0))) + return image + + + def apply(self, ic_model_path, model, c_concat: dict, ic_model=None) -> Tuple[ModelPatcher]: + device = comfy.model_management.get_torch_device() + dtype = comfy.model_management.unet_dtype() + work_model = model.clone() + + # Apply scale factor. + base_model: BaseModel = work_model.model + scale_factor = base_model.model_config.latent_format.scale_factor + + # [B, 4, H, W] + concat_conds: torch.Tensor = c_concat["samples"] * scale_factor + # [1, 4 * B, H, W] + concat_conds = torch.cat([c[None, ...] for c in concat_conds], dim=1) + + def unet_dummy_apply(unet_apply: Callable, params: UnetParams): + """A dummy unet apply wrapper serving as the endpoint of wrapper + chain.""" + return unet_apply(x=params["input"], t=params["timestep"], **params["c"]) + + existing_wrapper = work_model.model_options.get( + "model_function_wrapper", unet_dummy_apply + ) + + def wrapper_func(unet_apply: Callable, params: UnetParams): + return existing_wrapper(unet_apply, params=self.apply_c_concat(params, concat_conds)) + + work_model.set_model_unet_function_wrapper(wrapper_func) + if not ic_model: + ic_model = load_unet(ic_model_path) + ic_model_state_dict = ic_model.model.diffusion_model.state_dict() + + work_model.add_patches( + patches={ + ("diffusion_model." + key): ( + 'diff', + [ + value.to(dtype=dtype, device=device), + {"pad_weight": key == 'input_blocks.0.0.weight'} + ] + ) + for key, value in ic_model_state_dict.items() + } + ) + + return (work_model, ic_model) \ No newline at end of file diff --git a/ComfyUI-Easy-Use/py/image.py b/ComfyUI-Easy-Use/py/image.py new file mode 100644 index 0000000000000000000000000000000000000000..1f72e01101206de16b369a3ab18b0f93b7d07be7 --- /dev/null +++ b/ComfyUI-Easy-Use/py/image.py @@ -0,0 +1,1590 @@ +import os +import hashlib +import folder_paths +import torch +import numpy as np +import comfy.utils +import comfy.model_management +from comfy_extras.nodes_compositing import JoinImageWithAlpha +from server import PromptServer +from nodes import MAX_RESOLUTION +from PIL import Image, ImageDraw, ImageFilter +from torchvision.transforms import Resize, CenterCrop, GaussianBlur +from torchvision.transforms.functional import to_pil_image +from .libs.log import log_node_info +from .libs.utils import AlwaysEqualProxy +from .libs.image import pil2tensor, tensor2pil, ResizeMode, get_new_bounds, RGB2RGBA, image2mask +from .libs.colorfix import adain_color_fix, wavelet_color_fix +from .libs.chooser import ChooserMessage, ChooserCancelled +from .config import REMBG_DIR, REMBG_MODELS, HUMANPARSING_MODELS, MEDIAPIPE_MODELS, MEDIAPIPE_DIR + + +# 图像数量 +class imageCount: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "images": ("IMAGE",), + } + } + + CATEGORY = "EasyUse/Image" + + RETURN_TYPES = ("INT",) + RETURN_NAMES = ("count",) + FUNCTION = "get_count" + + def get_count(self, images): + return (images.size(0),) + +# 图像裁切 +class imageInsetCrop: + + @classmethod + def INPUT_TYPES(cls): # pylint: disable = invalid-name, missing-function-docstring + return { + "required": { + "image": ("IMAGE",), + "measurement": (['Pixels', 'Percentage'],), + "left": ("INT", { + "default": 0, + "min": 0, + "max": MAX_RESOLUTION, + "step": 8 + }), + "right": ("INT", { + "default": 0, + "min": 0, + "max": MAX_RESOLUTION, + "step": 8 + }), + "top": ("INT", { + "default": 0, + "min": 0, + "max": MAX_RESOLUTION, + "step": 8 + }), + "bottom": ("INT", { + "default": 0, + "min": 0, + "max": MAX_RESOLUTION, + "step": 8 + }), + }, + } + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "crop" + + CATEGORY = "EasyUse/Image" + + # pylint: disable = too-many-arguments + def crop(self, measurement, left, right, top, bottom, image=None): + """Does the crop.""" + + _, height, width, _ = image.shape + + if measurement == 'Percentage': + left = int(width - (width * (100 - left) / 100)) + right = int(width - (width * (100 - right) / 100)) + top = int(height - (height * (100 - top) / 100)) + bottom = int(height - (height * (100 - bottom) / 100)) + + # Snap to 8 pixels + left = left // 8 * 8 + right = right // 8 * 8 + top = top // 8 * 8 + bottom = bottom // 8 * 8 + + if left == 0 and right == 0 and bottom == 0 and top == 0: + return (image,) + + inset_left, inset_right, inset_top, inset_bottom = get_new_bounds(width, height, left, right, + top, bottom) + if inset_top > inset_bottom: + raise ValueError( + f"Invalid cropping dimensions top ({inset_top}) exceeds bottom ({inset_bottom})") + if inset_left > inset_right: + raise ValueError( + f"Invalid cropping dimensions left ({inset_left}) exceeds right ({inset_right})") + + log_node_info("Image Inset Crop", f'Cropping image {width}x{height} width inset by {inset_left},{inset_right}, ' + + f'and height inset by {inset_top}, {inset_bottom}') + image = image[:, inset_top:inset_bottom, inset_left:inset_right, :] + + return (image,) + +# 图像尺寸 +class imageSize: + def __init__(self): + pass + + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "image": ("IMAGE",), + } + } + + RETURN_TYPES = ("INT", "INT") + RETURN_NAMES = ("width_int", "height_int") + OUTPUT_NODE = True + FUNCTION = "image_width_height" + + CATEGORY = "EasyUse/Image" + + def image_width_height(self, image): + _, raw_H, raw_W, _ = image.shape + + width = raw_W + height = raw_H + + if width is not None and height is not None: + result = (width, height) + else: + result = (0, 0) + return {"ui": {"text": "Width: "+str(width)+" , Height: "+str(height)}, "result": result} + +# 图像尺寸(最长边) +class imageSizeBySide: + def __init__(self): + pass + + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "image": ("IMAGE",), + "side": (["Longest", "Shortest"],) + } + } + + RETURN_TYPES = ("INT",) + RETURN_NAMES = ("resolution",) + OUTPUT_NODE = True + FUNCTION = "image_side" + + CATEGORY = "EasyUse/Image" + + def image_side(self, image, side): + _, raw_H, raw_W, _ = image.shape + + width = raw_W + height = raw_H + if width is not None and height is not None: + if side == "Longest": + result = (width,) if width > height else (height,) + elif side == 'Shortest': + result = (width,) if width < height else (height,) + else: + result = (0,) + return {"ui": {"text": str(result[0])}, "result": result} + +# 图像尺寸(最长边) +class imageSizeByLongerSide: + def __init__(self): + pass + + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "image": ("IMAGE",), + } + } + + RETURN_TYPES = ("INT",) + RETURN_NAMES = ("resolution",) + OUTPUT_NODE = True + FUNCTION = "image_longer_side" + + CATEGORY = "EasyUse/Image" + + def image_longer_side(self, image): + _, raw_H, raw_W, _ = image.shape + + width = raw_W + height = raw_H + if width is not None and height is not None: + if width > height: + result = (width,) + else: + result = (height,) + else: + result = (0,) + return {"ui": {"text": str(result[0])}, "result": result} + +# 图像缩放 +class imageScaleDown: + crop_methods = ["disabled", "center"] + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "images": ("IMAGE",), + "width": ( + "INT", + {"default": 512, "min": 1, "max": MAX_RESOLUTION, "step": 1}, + ), + "height": ( + "INT", + {"default": 512, "min": 1, "max": MAX_RESOLUTION, "step": 1}, + ), + "crop": (s.crop_methods,), + } + } + + RETURN_TYPES = ("IMAGE",) + CATEGORY = "EasyUse/Image" + FUNCTION = "image_scale_down" + + def image_scale_down(self, images, width, height, crop): + if crop == "center": + old_width = images.shape[2] + old_height = images.shape[1] + old_aspect = old_width / old_height + new_aspect = width / height + x = 0 + y = 0 + if old_aspect > new_aspect: + x = round((old_width - old_width * (new_aspect / old_aspect)) / 2) + elif old_aspect < new_aspect: + y = round((old_height - old_height * (old_aspect / new_aspect)) / 2) + s = images[:, y: old_height - y, x: old_width - x, :] + else: + s = images + + results = [] + for image in s: + img = tensor2pil(image).convert("RGB") + img = img.resize((width, height), Image.LANCZOS) + results.append(pil2tensor(img)) + + return (torch.cat(results, dim=0),) + +# 图像缩放比例 +class imageScaleDownBy(imageScaleDown): + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "images": ("IMAGE",), + "scale_by": ( + "FLOAT", + {"default": 0.5, "min": 0.01, "max": 1.0, "step": 0.01}, + ), + } + } + + RETURN_TYPES = ("IMAGE",) + CATEGORY = "EasyUse/Image" + FUNCTION = "image_scale_down_by" + + def image_scale_down_by(self, images, scale_by): + width = images.shape[2] + height = images.shape[1] + new_width = int(width * scale_by) + new_height = int(height * scale_by) + return self.image_scale_down(images, new_width, new_height, "center") + +# 图像缩放尺寸 +class imageScaleDownToSize(imageScaleDownBy): + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "images": ("IMAGE",), + "size": ("INT", {"default": 512, "min": 1, "max": MAX_RESOLUTION, "step": 1}), + "mode": ("BOOLEAN", {"default": True, "label_on": "max", "label_off": "min"}), + } + } + + RETURN_TYPES = ("IMAGE",) + CATEGORY = "EasyUse/Image" + FUNCTION = "image_scale_down_to_size" + + def image_scale_down_to_size(self, images, size, mode): + width = images.shape[2] + height = images.shape[1] + + if mode: + scale_by = size / max(width, height) + else: + scale_by = size / min(width, height) + + scale_by = min(scale_by, 1.0) + return self.image_scale_down_by(images, scale_by) + + +# 图像比率 +class imageRatio: + def __init__(self): + pass + + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "image": ("IMAGE",), + } + } + + RETURN_TYPES = ("INT", "INT", "FLOAT", "FLOAT") + RETURN_NAMES = ("width_ratio_int", "height_ratio_int", "width_ratio_float", "height_ratio_float") + OUTPUT_NODE = True + FUNCTION = "image_ratio" + + CATEGORY = "EasyUse/Image" + + def gcf(self, a, b): + while b: + a, b = b, a % b + return a + + def image_ratio(self, image): + _, raw_H, raw_W, _ = image.shape + + width = raw_W + height = raw_H + + ratio = self.gcf(width, height) + + if width is not None and height is not None: + width_ratio = width // ratio + height_ratio = height // ratio + result = (width_ratio, height_ratio, width_ratio, height_ratio) + else: + width_ratio = 0 + height_ratio = 0 + result = (0, 0, 0.0, 0.0) + text = f"Image Ratio is {str(width_ratio)}:{str(height_ratio)}" + + return {"ui": {"text": text}, "result": result} + + +# 图像完美像素 +class imagePixelPerfect: + @classmethod + def INPUT_TYPES(s): + RESIZE_MODES = [ResizeMode.RESIZE.value, ResizeMode.INNER_FIT.value, ResizeMode.OUTER_FIT.value] + return { + "required": { + "image": ("IMAGE",), + "resize_mode": (RESIZE_MODES, {"default": ResizeMode.RESIZE.value}) + } + } + + RETURN_TYPES = ("INT",) + RETURN_NAMES = ("resolution",) + OUTPUT_NODE = True + FUNCTION = "execute" + + CATEGORY = "EasyUse/Image" + + def execute(self, image, resize_mode): + + _, raw_H, raw_W, _ = image.shape + + width = raw_W + height = raw_H + + k0 = float(height) / float(raw_H) + k1 = float(width) / float(raw_W) + + if resize_mode == ResizeMode.OUTER_FIT.value: + estimation = min(k0, k1) * float(min(raw_H, raw_W)) + else: + estimation = max(k0, k1) * float(min(raw_H, raw_W)) + + result = int(np.round(estimation)) + text = f"Width:{str(width)}\nHeight:{str(height)}\nPixelPerfect:{str(result)}" + + return {"ui": {"text": text}, "result": (result,)} + +# 图像保存 (简易) +from nodes import PreviewImage, SaveImage +class imageSaveSimple: + + def __init__(self): + self.output_dir = folder_paths.get_output_directory() + self.type = "output" + self.prefix_append = "" + self.compress_level = 4 + + @classmethod + def INPUT_TYPES(s): + return {"required": + { + "images": ("IMAGE",), + "filename_prefix": ("STRING", {"default": "ComfyUI"}), + "only_preview": ("BOOLEAN", {"default": False}), + }, + "hidden": {"prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO"}, + } + + RETURN_TYPES = () + FUNCTION = "save" + OUTPUT_NODE = True + CATEGORY = "EasyUse/Image" + + def save(self, images, filename_prefix="ComfyUI", only_preview=False, prompt=None, extra_pnginfo=None): + if only_preview: + PreviewImage().save_images(images, filename_prefix, prompt, extra_pnginfo) + return () + else: + return SaveImage().save_images(images, filename_prefix, prompt, extra_pnginfo) + + +# 图像批次合并 +class JoinImageBatch: + """Turns an image batch into one big image.""" + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "images": ("IMAGE",), + "mode": (("horizontal", "vertical"), {"default": "horizontal"}), + }, + } + + RETURN_TYPES = ("IMAGE",) + RETURN_NAMES = ("image",) + FUNCTION = "join" + CATEGORY = "EasyUse/Image" + + def join(self, images, mode): + n, h, w, c = images.shape + image = None + if mode == "vertical": + # for vertical we can just reshape + image = images.reshape(1, n * h, w, c) + elif mode == "horizontal": + # for horizontal we have to swap axes + image = torch.transpose(torch.transpose(images, 1, 2).reshape(1, n * w, h, c), 1, 2) + return (image,) + +class imageListToImageBatch: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "images": ("IMAGE",), + }} + + INPUT_IS_LIST = True + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "doit" + + CATEGORY = "EasyUse/Image" + + def doit(self, images): + if len(images) <= 1: + return (images[0],) + else: + image1 = images[0] + for image2 in images[1:]: + if image1.shape[1:] != image2.shape[1:]: + image2 = comfy.utils.common_upscale(image2.movedim(-1, 1), image1.shape[2], image1.shape[1], "lanczos", + "center").movedim(1, -1) + image1 = torch.cat((image1, image2), dim=0) + return (image1,) + + +class imageBatchToImageList: + @classmethod + def INPUT_TYPES(s): + return {"required": {"image": ("IMAGE",), }} + + RETURN_TYPES = ("IMAGE",) + OUTPUT_IS_LIST = (True,) + FUNCTION = "doit" + + CATEGORY = "EasyUse/Image" + + def doit(self, image): + images = [image[i:i + 1, ...] for i in range(image.shape[0])] + return (images,) + +# 图像拆分 +class imageSplitList: + @classmethod + + def INPUT_TYPES(s): + return { + "required": { + "images": ("IMAGE",), + }, + } + + RETURN_TYPES = ("IMAGE", "IMAGE", "IMAGE",) + RETURN_NAMES = ("images", "images", "images",) + FUNCTION = "doit" + CATEGORY = "EasyUse/Image" + + def doit(self, images): + length = len(images) + new_images = ([], [], []) + if length % 3 == 0: + for index, img in enumerate(images): + if index % 3 == 0: + new_images[0].append(img) + elif (index+1) % 3 == 0: + new_images[2].append(img) + else: + new_images[1].append(img) + elif length % 2 == 0: + for index, img in enumerate(images): + if index % 2 == 0: + new_images[0].append(img) + else: + new_images[1].append(img) + return new_images + +class imageSplitGrid: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "images": ("IMAGE",), + "row": ("INT", {"default": 1,"min": 1,"max": 10,"step": 1,}), + "column": ("INT", {"default": 1,"min": 1,"max": 10,"step": 1,}), + } + } + + RETURN_TYPES = ("IMAGE",) + RETURN_NAMES = ("images",) + FUNCTION = "doit" + CATEGORY = "EasyUse/Image" + + def crop(self, image, width, height, x, y): + x = min(x, image.shape[2] - 1) + y = min(y, image.shape[1] - 1) + to_x = width + x + to_y = height + y + img = image[:, y:to_y, x:to_x, :] + return img + + def doit(self, images, row, column): + _, height, width, _ = images.shape + sub_width = width // column + sub_height = height // row + new_images = [] + for i in range(row): + for j in range(column): + new_images.append(self.crop(images, sub_width, sub_height, j * sub_width, i * sub_height)) + + return (torch.cat(new_images, dim=0),) + +class imagesSplitImage: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "images": ("IMAGE",), + } + } + + RETURN_TYPES = ("IMAGE", "IMAGE", "IMAGE", "IMAGE", "IMAGE") + RETURN_NAMES = ("image1", "image2", "image3", "image4", "image5") + FUNCTION = "split" + CATEGORY = "EasyUse/Image" + + def split(self, images,): + new_images = torch.chunk(images, len(images), dim=0) + return new_images + + +class imageConcat: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "image1": ("IMAGE",), + "image2": ("IMAGE",), + "direction": (['right','down','left','up',],{"default": 'right'}), + "match_image_size": ("BOOLEAN", {"default": False}), + }} + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "concat" + CATEGORY = "EasyUse/Image" + + def concat(self, image1, image2, direction, match_image_size): + if match_image_size: + image2 = torch.nn.functional.interpolate(image2, size=(image1.shape[2], image1.shape[3]), mode="bilinear") + if direction == 'right': + row = torch.cat((image1, image2), dim=2) + elif direction == 'down': + row = torch.cat((image1, image2), dim=1) + elif direction == 'left': + row = torch.cat((image2, image1), dim=2) + elif direction == 'up': + row = torch.cat((image2, image1), dim=1) + return (row,) + +# 图片背景移除 +from .briaai.rembg import BriaRMBG, preprocess_image, postprocess_image +from .libs.utils import get_local_filepath, easySave, install_package +class imageRemBg: + @classmethod + def INPUT_TYPES(self): + return { + "required": { + "images": ("IMAGE",), + "rem_mode": (("RMBG-1.4","Inspyrenet"),), + "image_output": (["Hide", "Preview", "Save", "Hide/Save"], {"default": "Preview"}), + "save_prefix": ("STRING", {"default": "ComfyUI"}), + + }, + "optional":{ + "torchscript_jit": ("BOOLEAN", {"default": False}), + }, + "hidden": {"prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO"}, + } + + RETURN_TYPES = ("IMAGE", "MASK") + RETURN_NAMES = ("image", "mask") + FUNCTION = "remove" + OUTPUT_NODE = True + + CATEGORY = "EasyUse/Image" + + def remove(self, rem_mode, images, image_output, save_prefix, torchscript_jit=False, prompt=None, extra_pnginfo=None): + new_images = list() + masks = list() + if rem_mode == "RMBG-1.4": + # load model + model_url = REMBG_MODELS[rem_mode]['model_url'] + suffix = model_url.split(".")[-1] + model_path = get_local_filepath(model_url, REMBG_DIR, rem_mode+'.'+suffix) + + net = BriaRMBG() + device = torch.device("cuda" if torch.cuda.is_available() else "cpu") + net.load_state_dict(torch.load(model_path, map_location=device)) + net.to(device) + net.eval() + # prepare input + model_input_size = [1024, 1024] + for image in images: + orig_im = tensor2pil(image) + w, h = orig_im.size + image = preprocess_image(orig_im, model_input_size).to(device) + # inference + result = net(image) + result_image = postprocess_image(result[0][0], (h, w)) + mask_im = Image.fromarray(result_image) + new_im = Image.new("RGBA", mask_im.size, (0,0,0,0)) + new_im.paste(orig_im, mask=mask_im) + + new_images.append(pil2tensor(new_im)) + masks.append(pil2tensor(mask_im)) + + new_images = torch.cat(new_images, dim=0) + masks = torch.cat(masks, dim=0) + + elif rem_mode == "Inspyrenet": + from tqdm import tqdm + try: + from transparent_background import Remover + except: + install_package("transparent_background") + from transparent_background import Remover + + remover = Remover(jit=torchscript_jit) + + for img in tqdm(images, "Inspyrenet Rembg"): + mid = remover.process(tensor2pil(img), type='rgba') + out = pil2tensor(mid) + new_images.append(out) + mask = out[:, :, :, 3] + masks.append(mask) + new_images = torch.cat(new_images, dim=0) + masks = torch.cat(masks, dim=0) + + results = easySave(new_images, save_prefix, image_output, prompt, extra_pnginfo) + + if image_output in ("Hide", "Hide/Save"): + return {"ui": {}, + "result": (new_images, masks)} + + return {"ui": {"images": results}, + "result": (new_images, masks)} + +# 图像选择器 +class imageChooser(PreviewImage): + @classmethod + def INPUT_TYPES(self): + return { + "required":{ + "mode": (['Always Pause', 'Keep Last Selection'], {"default": "Always Pause"}), + }, + "optional": { + "images": ("IMAGE",), + }, + "hidden": {"prompt": "PROMPT", "my_unique_id": "UNIQUE_ID", "extra_pnginfo": "EXTRA_PNGINFO"}, + } + + RETURN_TYPES = ("IMAGE",) + RETURN_NAMES = ("image",) + FUNCTION = "chooser" + OUTPUT_NODE = True + INPUT_IS_LIST = True + CATEGORY = "EasyUse/Image" + + last_ic = {} + @classmethod + def IS_CHANGED(cls, my_unique_id, **kwargs): + return cls.last_ic[my_unique_id[0]] + + def tensor_bundle(self, tensor_in: torch.Tensor, picks): + if tensor_in is not None and len(picks): + batch = tensor_in.shape[0] + return torch.cat(tuple([tensor_in[(x) % batch].unsqueeze_(0) for x in picks])).reshape( + [-1] + list(tensor_in.shape[1:])) + else: + return None + + def chooser(self, prompt=None, my_unique_id=None, extra_pnginfo=None, **kwargs): + id = my_unique_id[0] + if id not in ChooserMessage.stash: + ChooserMessage.stash[id] = {} + my_stash = ChooserMessage.stash[id] + + # enable stashing. If images is None, we are operating in read-from-stash mode + if 'images' in kwargs: + my_stash['images'] = kwargs['images'] + else: + kwargs['images'] = my_stash.get('images', None) + + if (kwargs['images'] is None): + return (None, None, None, "") + + images_in = torch.cat(kwargs.pop('images')) + self.batch = images_in.shape[0] + for x in kwargs: kwargs[x] = kwargs[x][0] + result = self.save_images(images=images_in, prompt=prompt) + + images = result['ui']['images'] + PromptServer.instance.send_sync("easyuse-image-choose", {"id": id, "urls": images}) + + # 获取上次选择 + mode = kwargs.pop('mode', 'Always Pause') + last_choosen = None + if mode == 'Keep Last Selection': + if not extra_pnginfo: + print("Error: extra_pnginfo is empty") + elif (not isinstance(extra_pnginfo[0], dict) or "workflow" not in extra_pnginfo[0]): + print("Error: extra_pnginfo[0] is not a dict or missing 'workflow' key") + else: + workflow = extra_pnginfo[0]["workflow"] + node = next((x for x in workflow["nodes"] if str(x["id"]) == id), None) + if node: + last_choosen = node['properties']['values'] + + # wait for selection + try: + selections = ChooserMessage.waitForMessage(id, asList=True) if last_choosen is None or len(last_choosen)<1 else last_choosen + choosen = [x for x in selections if x >= 0] if len(selections)>1 else [0] + except ChooserCancelled: + raise comfy.model_management.InterruptProcessingException() + + return {"ui": {"images": images}, + "result": (self.tensor_bundle(images_in, choosen),)} + +class imageColorMatch(PreviewImage): + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "image_ref": ("IMAGE",), + "image_target": ("IMAGE",), + "method": (['wavelet', 'adain', 'mkl', 'hm', 'reinhard', 'mvgd', 'hm-mvgd-hm', 'hm-mkl-hm'],), + "image_output": (["Hide", "Preview", "Save", "Hide/Save"], {"default": "Preview"}), + "save_prefix": ("STRING", {"default": "ComfyUI"}), + }, + "hidden": {"prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO"}, + } + + CATEGORY = "EasyUse/Image" + + RETURN_TYPES = ("IMAGE",) + RETURN_NAMES = ("image",) + OUTPUT_NODE = True + FUNCTION = "color_match" + + def color_match(self, image_ref, image_target, method, image_output, save_prefix, prompt=None, extra_pnginfo=None): + if method in ["wavelet", "adain"]: + result_images = wavelet_color_fix(tensor2pil(image_target), tensor2pil(image_ref)) if method == 'wavelet' else adain_color_fix(tensor2pil(image_target), tensor2pil(image_ref)) + new_images = pil2tensor(result_images) + else: + try: + from color_matcher import ColorMatcher + except: + install_package("color-matcher") + from color_matcher import ColorMatcher + image_ref = image_ref.cpu() + image_target = image_target.cpu() + batch_size = image_target.size(0) + out = [] + images_target = image_target.squeeze() + images_ref = image_ref.squeeze() + + image_ref_np = images_ref.numpy() + images_target_np = images_target.numpy() + if image_ref.size(0) > 1 and image_ref.size(0) != batch_size: + raise ValueError("ColorMatch: Use either single reference image or a matching batch of reference images.") + cm = ColorMatcher() + for i in range(batch_size): + image_target_np = images_target_np if batch_size == 1 else images_target[i].numpy() + image_ref_np_i = image_ref_np if image_ref.size(0) == 1 else images_ref[i].numpy() + try: + image_result = cm.transfer(src=image_target_np, ref=image_ref_np_i, method=method) + except BaseException as e: + print(f"Error occurred during transfer: {e}") + break + out.append(torch.from_numpy(image_result)) + + new_images = torch.stack(out, dim=0).to(torch.float32) + + results = easySave(new_images, save_prefix, image_output, prompt, extra_pnginfo) + + if image_output in ("Hide", "Hide/Save"): + return {"ui": {}, + "result": (new_images,)} + + return {"ui": {"images": results}, + "result": (new_images,)} + +class imageDetailTransfer: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "target": ("IMAGE",), + "source": ("IMAGE",), + "mode": (["add", "multiply", "screen", "overlay", "soft_light", "hard_light", "color_dodge", "color_burn", "difference", "exclusion", "divide",],{"default": "add"}), + "blur_sigma": ("FLOAT", {"default": 1.0, "min": 0.1, "max": 100.0, "step": 0.01}), + "blend_factor": ("FLOAT", {"default": 1.0, "min": -10.0, "max": 10.0, "step": 0.001, "round": 0.001}), + "image_output": (["Hide", "Preview", "Save", "Hide/Save"], {"default": "Preview"}), + "save_prefix": ("STRING", {"default": "ComfyUI"}), + }, + "optional": { + "mask": ("MASK",), + }, + "hidden": {"prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO"}, + } + + RETURN_TYPES = ("IMAGE",) + RETURN_NAMES = ("image",) + OUTPUT_NODE = True + FUNCTION = "transfer" + CATEGORY = "EasyUse/Image" + + + + def transfer(self, target, source, mode, blur_sigma, blend_factor, image_output, save_prefix, mask=None, prompt=None, extra_pnginfo=None): + batch_size, height, width, _ = target.shape + device = comfy.model_management.get_torch_device() + target_tensor = target.permute(0, 3, 1, 2).clone().to(device) + source_tensor = source.permute(0, 3, 1, 2).clone().to(device) + + if target.shape[1:] != source.shape[1:]: + source_tensor = comfy.utils.common_upscale(source_tensor, width, height, "bilinear", "disabled") + + if source.shape[0] < batch_size: + source = source[0].unsqueeze(0).repeat(batch_size, 1, 1, 1) + + kernel_size = int(6 * int(blur_sigma) + 1) + + gaussian_blur = GaussianBlur(kernel_size=(kernel_size, kernel_size), sigma=(blur_sigma, blur_sigma)) + + blurred_target = gaussian_blur(target_tensor) + blurred_source = gaussian_blur(source_tensor) + + if mode == "add": + new_image = (source_tensor - blurred_source) + blurred_target + elif mode == "multiply": + new_image = source_tensor * blurred_target + elif mode == "screen": + new_image = 1 - (1 - source_tensor) * (1 - blurred_target) + elif mode == "overlay": + new_image = torch.where(blurred_target < 0.5, 2 * source_tensor * blurred_target, + 1 - 2 * (1 - source_tensor) * (1 - blurred_target)) + elif mode == "soft_light": + new_image = (1 - 2 * blurred_target) * source_tensor ** 2 + 2 * blurred_target * source_tensor + elif mode == "hard_light": + new_image = torch.where(source_tensor < 0.5, 2 * source_tensor * blurred_target, + 1 - 2 * (1 - source_tensor) * (1 - blurred_target)) + elif mode == "difference": + new_image = torch.abs(blurred_target - source_tensor) + elif mode == "exclusion": + new_image = 0.5 - 2 * (blurred_target - 0.5) * (source_tensor - 0.5) + elif mode == "color_dodge": + new_image = blurred_target / (1 - source_tensor) + elif mode == "color_burn": + new_image = 1 - (1 - blurred_target) / source_tensor + elif mode == "divide": + new_image = (source_tensor / blurred_source) * blurred_target + else: + new_image = source_tensor + + new_image = torch.lerp(target_tensor, new_image, blend_factor) + if mask is not None: + mask = mask.to(device) + new_image = torch.lerp(target_tensor, new_image, mask) + new_image = torch.clamp(new_image, 0, 1) + new_image = new_image.permute(0, 2, 3, 1).cpu().float() + + results = easySave(new_image, save_prefix, image_output, prompt, extra_pnginfo) + + if image_output in ("Hide", "Hide/Save"): + return {"ui": {}, + "result": (new_image,)} + + return {"ui": {"images": results}, + "result": (new_image,)} + +# 图像反推 +from .libs.image import ci +class imageInterrogator: + @classmethod + def INPUT_TYPES(self): + return { + "required": { + "image": ("IMAGE",), + "mode": (['fast','classic','best','negative'],), + "use_lowvram": ("BOOLEAN", {"default": True}), + } + } + + RETURN_TYPES = ("STRING",) + RETURN_NAMES = ("prompt",) + FUNCTION = "interrogate" + CATEGORY = "EasyUse/Image" + OUTPUT_NODE = True + OUTPUT_IS_LIST = (True,) + + def interrogate(self, image, mode, use_lowvram=False): + prompt = ci.image_to_prompt(image, mode, low_vram=use_lowvram) + return {"ui":{"text":prompt},"result":(prompt,)} + +# 人类分割器 +class humanSegmentation: + + @classmethod + def INPUT_TYPES(cls): + + return { + "required":{ + "image": ("IMAGE",), + "method": (["selfie_multiclass_256x256", "human_parsing_lip"],), + "confidence": ("FLOAT", {"default": 0.4, "min": 0.05, "max": 0.95, "step": 0.01},), + "crop_multi": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 10.0, "step": 0.001},), + }, + "hidden": { + "prompt": "PROMPT", + "my_unique_id": "UNIQUE_ID", + } + } + + RETURN_TYPES = ("IMAGE", "MASK", "BBOX") + RETURN_NAMES = ("image", "mask", "bbox") + FUNCTION = "parsing" + CATEGORY = "EasyUse/Segmentation" + + def get_mediapipe_image(self, image: Image): + import mediapipe as mp + # Convert image to NumPy array + numpy_image = np.asarray(image) + image_format = mp.ImageFormat.SRGB + # Convert BGR to RGB (if necessary) + if numpy_image.shape[-1] == 4: + image_format = mp.ImageFormat.SRGBA + elif numpy_image.shape[-1] == 3: + image_format = mp.ImageFormat.SRGB + numpy_image = cv2.cvtColor(numpy_image, cv2.COLOR_BGR2RGB) + return mp.Image(image_format=image_format, data=numpy_image) + + def parsing(self, image, confidence, method, crop_multi, prompt=None, my_unique_id=None): + mask_components = [] + if my_unique_id in prompt: + if prompt[my_unique_id]["inputs"]['mask_components']: + mask_components = prompt[my_unique_id]["inputs"]['mask_components'].split(',') + mask_components = list(map(int, mask_components)) + if method == 'selfie_multiclass_256x256': + try: + import mediapipe as mp + except: + install_package("mediapipe") + import mediapipe as mp + + from functools import reduce + + model_path = get_local_filepath(MEDIAPIPE_MODELS['selfie_multiclass_256x256']['model_url'], MEDIAPIPE_DIR) + model_asset_buffer = None + with open(model_path, "rb") as f: + model_asset_buffer = f.read() + image_segmenter_base_options = mp.tasks.BaseOptions(model_asset_buffer=model_asset_buffer) + options = mp.tasks.vision.ImageSegmenterOptions( + base_options=image_segmenter_base_options, + running_mode=mp.tasks.vision.RunningMode.IMAGE, + output_category_mask=True) + # Create the image segmenter + ret_images = [] + ret_masks = [] + + with mp.tasks.vision.ImageSegmenter.create_from_options(options) as segmenter: + for img in image: + _image = torch.unsqueeze(img, 0) + orig_image = tensor2pil(_image).convert('RGB') + # Convert the Tensor to a PIL image + i = 255. * img.cpu().numpy() + image_pil = Image.fromarray(np.clip(i, 0, 255).astype(np.uint8)) + # create our foreground and background arrays for storing the mask results + mask_background_array = np.zeros((image_pil.size[0], image_pil.size[1], 4), dtype=np.uint8) + mask_background_array[:] = (0, 0, 0, 255) + mask_foreground_array = np.zeros((image_pil.size[0], image_pil.size[1], 4), dtype=np.uint8) + mask_foreground_array[:] = (255, 255, 255, 255) + # Retrieve the masks for the segmented image + media_pipe_image = self.get_mediapipe_image(image=image_pil) + segmented_masks = segmenter.segment(media_pipe_image) + masks = [] + for i, com in enumerate(mask_components): + masks.append(segmented_masks.confidence_masks[com]) + + image_data = media_pipe_image.numpy_view() + image_shape = image_data.shape + # convert the image shape from "rgb" to "rgba" aka add the alpha channel + if image_shape[-1] == 3: + image_shape = (image_shape[0], image_shape[1], 4) + mask_background_array = np.zeros(image_shape, dtype=np.uint8) + mask_background_array[:] = (0, 0, 0, 255) + mask_foreground_array = np.zeros(image_shape, dtype=np.uint8) + mask_foreground_array[:] = (255, 255, 255, 255) + mask_arrays = [] + if len(masks) == 0: + mask_arrays.append(mask_background_array) + else: + for i, mask in enumerate(masks): + condition = np.stack((mask.numpy_view(),) * image_shape[-1], axis=-1) > confidence + mask_array = np.where(condition, mask_foreground_array, mask_background_array) + mask_arrays.append(mask_array) + # Merge our masks taking the maximum from each + merged_mask_arrays = reduce(np.maximum, mask_arrays) + # Create the image + mask_image = Image.fromarray(merged_mask_arrays) + # convert PIL image to tensor image + tensor_mask = mask_image.convert("RGB") + tensor_mask = np.array(tensor_mask).astype(np.float32) / 255.0 + tensor_mask = torch.from_numpy(tensor_mask)[None,] + _mask = tensor_mask.squeeze(3)[..., 0] + + _mask = tensor2pil(tensor_mask).convert('L') + + ret_image = RGB2RGBA(orig_image, _mask) + ret_images.append(pil2tensor(ret_image)) + ret_masks.append(image2mask(_mask)) + + output_image = torch.cat(ret_images, dim=0) + mask = torch.cat(ret_masks, dim=0) + + elif method == "human_parsing_lip": + from .human_parsing.run_parsing import HumanParsing + onnx_path = os.path.join(folder_paths.models_dir, 'onnx') + model_path = get_local_filepath(HUMANPARSING_MODELS['parsing_lip']['model_url'], onnx_path) + parsing = HumanParsing(model_path=model_path) + model_image = image.squeeze(0) + model_image = model_image.permute((2, 0, 1)) + model_image = to_pil_image(model_image) + + map_image, mask = parsing(model_image, mask_components) + + mask = mask[:, :, :, 0] + + alpha = 1.0 - mask + + output_image, = JoinImageWithAlpha().join_image_with_alpha(image, alpha) + + # use crop + bbox = [[0, 0, 0, 0]] + if crop_multi > 0.0: + output_image, mask, bbox = imageCropFromMask().crop(output_image, mask, crop_multi, crop_multi, 1.0) + + return (output_image, mask, bbox) + + +class imageCropFromMask: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image": ("IMAGE",), + "mask": ("MASK",), + "image_crop_multi": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.001}), + "mask_crop_multi": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.001}), + "bbox_smooth_alpha": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}), + }, + } + + RETURN_TYPES = ("IMAGE", "MASK", "BBOX",) + RETURN_NAMES = ("crop_image", "crop_mask", "bbox",) + FUNCTION = "crop" + CATEGORY = "EasyUse/Image" + + def smooth_bbox_size(self, prev_bbox_size, curr_bbox_size, alpha): + if alpha == 0: + return prev_bbox_size + return round(alpha * curr_bbox_size + (1 - alpha) * prev_bbox_size) + + def smooth_center(self, prev_center, curr_center, alpha=0.5): + if alpha == 0: + return prev_center + return ( + round(alpha * curr_center[0] + (1 - alpha) * prev_center[0]), + round(alpha * curr_center[1] + (1 - alpha) * prev_center[1]) + ) + + def image2mask(self, image): + return image[:, :, :, 0] + + def mask2image(self, mask): + return mask.reshape((-1, 1, mask.shape[-2], mask.shape[-1])).movedim(1, -1).expand(-1, -1, -1, 3) + + def cropimage(self, original_images, masks, crop_size_mult, bbox_smooth_alpha): + + bounding_boxes = [] + cropped_images = [] + + self.max_bbox_width = 0 + self.max_bbox_height = 0 + + # First, calculate the maximum bounding box size across all masks + curr_max_bbox_width = 0 + curr_max_bbox_height = 0 + for mask in masks: + _mask = tensor2pil(mask) + non_zero_indices = np.nonzero(np.array(_mask)) + min_x, max_x = np.min(non_zero_indices[1]), np.max(non_zero_indices[1]) + min_y, max_y = np.min(non_zero_indices[0]), np.max(non_zero_indices[0]) + width = max_x - min_x + height = max_y - min_y + curr_max_bbox_width = max(curr_max_bbox_width, width) + curr_max_bbox_height = max(curr_max_bbox_height, height) + + # Smooth the changes in the bounding box size + self.max_bbox_width = self.smooth_bbox_size(self.max_bbox_width, curr_max_bbox_width, bbox_smooth_alpha) + self.max_bbox_height = self.smooth_bbox_size(self.max_bbox_height, curr_max_bbox_height, bbox_smooth_alpha) + + # Apply the crop size multiplier + self.max_bbox_width = round(self.max_bbox_width * crop_size_mult) + self.max_bbox_height = round(self.max_bbox_height * crop_size_mult) + bbox_aspect_ratio = self.max_bbox_width / self.max_bbox_height + + # Then, for each mask and corresponding image... + for i, (mask, img) in enumerate(zip(masks, original_images)): + _mask = tensor2pil(mask) + non_zero_indices = np.nonzero(np.array(_mask)) + min_x, max_x = np.min(non_zero_indices[1]), np.max(non_zero_indices[1]) + min_y, max_y = np.min(non_zero_indices[0]), np.max(non_zero_indices[0]) + + # Calculate center of bounding box + center_x = np.mean(non_zero_indices[1]) + center_y = np.mean(non_zero_indices[0]) + curr_center = (round(center_x), round(center_y)) + + # If this is the first frame, initialize prev_center with curr_center + if not hasattr(self, 'prev_center'): + self.prev_center = curr_center + + # Smooth the changes in the center coordinates from the second frame onwards + if i > 0: + center = self.smooth_center(self.prev_center, curr_center, bbox_smooth_alpha) + else: + center = curr_center + + # Update prev_center for the next frame + self.prev_center = center + + # Create bounding box using max_bbox_width and max_bbox_height + half_box_width = round(self.max_bbox_width / 2) + half_box_height = round(self.max_bbox_height / 2) + min_x = max(0, center[0] - half_box_width) + max_x = min(img.shape[1], center[0] + half_box_width) + min_y = max(0, center[1] - half_box_height) + max_y = min(img.shape[0], center[1] + half_box_height) + + # Append bounding box coordinates + bounding_boxes.append((min_x, min_y, max_x - min_x, max_y - min_y)) + + # Crop the image from the bounding box + cropped_img = img[min_y:max_y, min_x:max_x, :] + + # Calculate the new dimensions while maintaining the aspect ratio + new_height = min(cropped_img.shape[0], self.max_bbox_height) + new_width = round(new_height * bbox_aspect_ratio) + + # Resize the image + resize_transform = Resize((new_height, new_width)) + resized_img = resize_transform(cropped_img.permute(2, 0, 1)) + + # Perform the center crop to the desired size + crop_transform = CenterCrop((self.max_bbox_height, self.max_bbox_width)) # swap the order here if necessary + cropped_resized_img = crop_transform(resized_img) + + cropped_images.append(cropped_resized_img.permute(1, 2, 0)) + + return cropped_images, bounding_boxes + + def crop(self, image, mask, image_crop_multi, mask_crop_multi, bbox_smooth_alpha): + cropped_images, bounding_boxes = self.cropimage(image, mask, image_crop_multi, bbox_smooth_alpha) + cropped_mask_image, _ = self.cropimage(self.mask2image(mask), mask, mask_crop_multi, bbox_smooth_alpha) + + cropped_image_out = torch.stack(cropped_images, dim=0) + cropped_mask_out = torch.stack(cropped_mask_image, dim=0) + + return (cropped_image_out, cropped_mask_out[:, :, :, 0], bounding_boxes) + + +class imageUncropFromBBOX: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "original_image": ("IMAGE",), + "crop_image": ("IMAGE",), + "bbox": ("BBOX",), + "border_blending": ("FLOAT", {"default": 0.25, "min": 0.0, "max": 1.0, "step": 0.01},), + "use_square_mask": ("BOOLEAN", {"default": True}), + }, + "optional":{ + "optional_mask": ("MASK",) + } + } + + RETURN_TYPES = ("IMAGE",) + RETURN_NAMES = ("image",) + FUNCTION = "uncrop" + CATEGORY = "EasyUse/Image" + + def bbox_check(self, bbox, target_size=None): + if not target_size: + return bbox + + new_bbox = ( + bbox[0], + bbox[1], + min(target_size[0] - bbox[0], bbox[2]), + min(target_size[1] - bbox[1], bbox[3]), + ) + return new_bbox + + def bbox_to_region(self, bbox, target_size=None): + bbox = self.bbox_check(bbox, target_size) + return (bbox[0], bbox[1], bbox[0] + bbox[2], bbox[1] + bbox[3]) + + def uncrop(self, original_image, crop_image, bbox, border_blending, use_square_mask, optional_mask=None): + def inset_border(image, border_width=20, border_color=(0)): + width, height = image.size + bordered_image = Image.new(image.mode, (width, height), border_color) + bordered_image.paste(image, (0, 0)) + draw = ImageDraw.Draw(bordered_image) + draw.rectangle((0, 0, width - 1, height - 1), outline=border_color, width=border_width) + return bordered_image + + if len(original_image) != len(crop_image): + raise ValueError( + f"The number of original_images ({len(original_image)}) and cropped_images ({len(crop_image)}) should be the same") + + # Ensure there are enough bboxes, but drop the excess if there are more bboxes than images + if len(bbox) > len(original_image): + print(f"Warning: Dropping excess bounding boxes. Expected {len(original_image)}, but got {len(bbox)}") + bbox = bbox[:len(original_image)] + elif len(bbox) < len(original_image): + raise ValueError("There should be at least as many bboxes as there are original and cropped images") + + + out_images = [] + + for i in range(len(original_image)): + img = tensor2pil(original_image[i]) + crop = tensor2pil(crop_image[i]) + _bbox = bbox[i] + + bb_x, bb_y, bb_width, bb_height = _bbox + paste_region = self.bbox_to_region((bb_x, bb_y, bb_width, bb_height), img.size) + + # rescale the crop image to fit the paste_region + crop = crop.resize((round(paste_region[2] - paste_region[0]), round(paste_region[3] - paste_region[1]))) + crop_img = crop.convert("RGB") + + # border blending + if border_blending > 1.0: + border_blending = 1.0 + elif border_blending < 0.0: + border_blending = 0.0 + + blend_ratio = (max(crop_img.size) / 2) * float(border_blending) + blend = img.convert("RGBA") + + if use_square_mask: + mask = Image.new("L", img.size, 0) + mask_block = Image.new("L", (paste_region[2] - paste_region[0], paste_region[3] - paste_region[1]), 255) + mask_block = inset_border(mask_block, round(blend_ratio / 2), (0)) + mask.paste(mask_block, paste_region) + else: + if optional_mask is None: + raise ValueError("optional_mask is required when use_square_mask is False") + original_mask = tensor2pil(optional_mask) + original_mask = original_mask.resize((paste_region[2] - paste_region[0], paste_region[3] - paste_region[1])) + mask = Image.new("L", img.size, 0) + mask.paste(original_mask, paste_region) + + mask = mask.filter(ImageFilter.BoxBlur(radius=blend_ratio / 4)) + mask = mask.filter(ImageFilter.GaussianBlur(radius=blend_ratio / 4)) + + blend.paste(crop_img, paste_region) + blend.putalpha(mask) + + img = Image.alpha_composite(img.convert("RGBA"), blend) + out_images.append(img.convert("RGB")) + + output_images = torch.cat([pil2tensor(img) for img in out_images], dim=0) + return (output_images,) + + + +import cv2 +import base64 +class loadImageBase64: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "base64_data": ("STRING", {"default": ""}), + "image_output": (["Hide", "Preview", "Save", "Hide/Save"], {"default": "Preview"}), + "save_prefix": ("STRING", {"default": "ComfyUI"}), + }, + "optional": { + + }, + "hidden": {"prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO"}, + } + + RETURN_TYPES = ("IMAGE", "MASK") + OUTPUT_NODE = True + FUNCTION = "load_image" + CATEGORY = "EasyUse/Image/LoadImage" + + def convert_color(self, image,): + if len(image.shape) > 2 and image.shape[2] >= 4: + return cv2.cvtColor(image, cv2.COLOR_BGRA2RGB) + return cv2.cvtColor(image, cv2.COLOR_BGR2RGB) + + def load_image(self, base64_data, image_output, save_prefix, prompt=None, extra_pnginfo=None): + nparr = np.frombuffer(base64.b64decode(base64_data), np.uint8) + + result = cv2.imdecode(nparr, cv2.IMREAD_UNCHANGED) + channels = cv2.split(result) + if len(channels) > 3: + mask = channels[3].astype(np.float32) / 255.0 + mask = torch.from_numpy(mask) + else: + mask = torch.ones(channels[0].shape, dtype=torch.float32, device="cpu") + + result = self.convert_color(result) + result = result.astype(np.float32) / 255.0 + new_images = torch.from_numpy(result)[None,] + + results = easySave(new_images, save_prefix, image_output, None, None) + mask = mask.unsqueeze(0) + + if image_output in ("Hide", "Hide/Save"): + return {"ui": {}, + "result": (new_images, mask)} + + return {"ui": {"images": results}, + "result": (new_images, mask)} + +class imageToBase64: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image": ("IMAGE",), + }, + } + + RETURN_TYPES = ("STRING",) + FUNCTION = "to_base64" + CATEGORY = "EasyUse/Image" + OUTPUT_NODE = True + + def to_base64(self, image, ): + import base64 + from io import BytesIO + + # 将张量图像转换为PIL图像 + pil_image = tensor2pil(image) + + buffered = BytesIO() + pil_image.save(buffered, format="JPEG") + image_bytes = buffered.getvalue() + + base64_str = base64.b64encode(image_bytes).decode("utf-8") + return {"result": (base64_str,)} + +class removeLocalImage: + + def __init__(self): + self.hasFile = False + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "any": (AlwaysEqualProxy("*"),), + "file_name": ("STRING",{"default":""}), + }, + } + + RETURN_TYPES = () + OUTPUT_NODE = True + FUNCTION = "remove" + CATEGORY = "EasyUse/Image" + + + + def remove(self, any, file_name): + self.hasFile = False + def listdir(path, dir_name=''): + for file in os.listdir(path): + file_path = os.path.join(path, file) + if os.path.isdir(file_path): + dir_name = os.path.basename(file_path) + listdir(file_path, dir_name) + else: + file = os.path.join(dir_name, file) + name_without_extension, file_extension = os.path.splitext(file) + if name_without_extension == file_name or file == file_name: + os.remove(os.path.join(folder_paths.input_directory, file)) + self.hasFile = True + break + + listdir(folder_paths.input_directory, '') + + if self.hasFile: + PromptServer.instance.send_sync("easyuse-toast", {"content": "Removed SuccessFully", "type":'success'}) + else: + PromptServer.instance.send_sync("easyuse-toast", {"content": "Removed Failed", "type": 'error'}) + return () + + +# 姿势编辑器 +# class poseEditor: +# @classmethod +# def INPUT_TYPES(self): +# temp_dir = folder_paths.get_temp_directory() +# +# if not os.path.isdir(temp_dir): +# os.makedirs(temp_dir) +# +# temp_dir = folder_paths.get_temp_directory() +# +# return {"required": +# {"image": (sorted(os.listdir(temp_dir)),)}, +# } +# +# RETURN_TYPES = ("IMAGE",) +# FUNCTION = "output_pose" +# +# CATEGORY = "EasyUse/🚫 Deprecated" +# +# def output_pose(self, image): +# image_path = os.path.join(folder_paths.get_temp_directory(), image) +# # print(f"Create: {image_path}") +# +# i = Image.open(image_path) +# image = i.convert("RGB") +# image = np.array(image).astype(np.float32) / 255.0 +# image = torch.from_numpy(image)[None,] +# +# return (image,) +# +# @classmethod +# def IS_CHANGED(self, image): +# image_path = os.path.join( +# folder_paths.get_temp_directory(), image) +# # print(f'Change: {image_path}') +# +# m = hashlib.sha256() +# with open(image_path, 'rb') as f: +# m.update(f.read()) +# return m.digest().hex() + +NODE_CLASS_MAPPINGS = { + "easy imageInsetCrop": imageInsetCrop, + "easy imageCount": imageCount, + "easy imageSize": imageSize, + "easy imageSizeBySide": imageSizeBySide, + "easy imageSizeByLongerSide": imageSizeByLongerSide, + "easy imagePixelPerfect": imagePixelPerfect, + "easy imageScaleDown": imageScaleDown, + "easy imageScaleDownBy": imageScaleDownBy, + "easy imageScaleDownToSize": imageScaleDownToSize, + "easy imageRatio": imageRatio, + "easy imageConcat": imageConcat, + "easy imageListToImageBatch": imageListToImageBatch, + "easy imageBatchToImageList": imageBatchToImageList, + "easy imageSplitList": imageSplitList, + "easy imageSplitGrid": imageSplitGrid, + "easy imagesSplitImage": imagesSplitImage, + "easy imageCropFromMask": imageCropFromMask, + "easy imageUncropFromBBOX": imageUncropFromBBOX, + "easy imageSave": imageSaveSimple, + "easy imageRemBg": imageRemBg, + "easy imageChooser": imageChooser, + "easy imageColorMatch": imageColorMatch, + "easy imageDetailTransfer": imageDetailTransfer, + "easy imageInterrogator": imageInterrogator, + "easy loadImageBase64": loadImageBase64, + "easy imageToBase64": imageToBase64, + "easy joinImageBatch": JoinImageBatch, + "easy humanSegmentation": humanSegmentation, + "easy removeLocalImage": removeLocalImage, +} + +NODE_DISPLAY_NAME_MAPPINGS = { + "easy imageInsetCrop": "ImageInsetCrop", + "easy imageCount": "ImageCount", + "easy imageSize": "ImageSize", + "easy imageSizeBySide": "ImageSize (Side)", + "easy imageSizeByLongerSide": "ImageSize (LongerSide)", + "easy imagePixelPerfect": "ImagePixelPerfect", + "easy imageScaleDown": "Image Scale Down", + "easy imageScaleDownBy": "Image Scale Down By", + "easy imageScaleDownToSize": "Image Scale Down To Size", + "easy imageRatio": "ImageRatio", + "easy imageHSVMask": "ImageHSVMask", + "easy imageConcat": "imageConcat", + "easy imageListToImageBatch": "Image List To Image Batch", + "easy imageBatchToImageList": "Image Batch To Image List", + "easy imageSplitList": "imageSplitList", + "easy imageSplitGrid": "imageSplitGrid", + "easy imagesSplitImage": "imagesSplitImage", + "easy imageCropFromMask": "imageCropFromMask", + "easy imageUncropFromBBOX": "imageUncropFromBBOX", + "easy imageSave": "SaveImage (Simple)", + "easy imageRemBg": "Image Remove Bg", + "easy imageChooser": "Image Chooser", + "easy imageColorMatch": "Image Color Match", + "easy imageDetailTransfer": "Image Detail Transfer", + "easy imageInterrogator": "Image To Prompt", + "easy joinImageBatch": "JoinImageBatch", + "easy loadImageBase64": "Load Image (Base64)", + "easy imageToBase64": "Image To Base64", + "easy humanSegmentation": "Human Segmentation", + "easy removeLocalImage": "Remove Local Image", +} \ No newline at end of file diff --git a/ComfyUI-Easy-Use/py/kolors/__init__.py b/ComfyUI-Easy-Use/py/kolors/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/ComfyUI-Easy-Use/py/kolors/chatglm/__init__.py b/ComfyUI-Easy-Use/py/kolors/chatglm/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/ComfyUI-Easy-Use/py/kolors/chatglm/config_chatglm.json b/ComfyUI-Easy-Use/py/kolors/chatglm/config_chatglm.json new file mode 100644 index 0000000000000000000000000000000000000000..c6e19300822b25ae0a07125bbc171c6581dbeda4 --- /dev/null +++ b/ComfyUI-Easy-Use/py/kolors/chatglm/config_chatglm.json @@ -0,0 +1,42 @@ +{ + "_name_or_path": "THUDM/chatglm3-6b-base", + "model_type": "chatglm", + "architectures": [ + "ChatGLMModel" + ], + "auto_map": { + "AutoConfig": "configuration_chatglm.ChatGLMConfig", + "AutoModel": "modeling_chatglm.ChatGLMForConditionalGeneration", + "AutoModelForCausalLM": "modeling_chatglm.ChatGLMForConditionalGeneration", + "AutoModelForSeq2SeqLM": "modeling_chatglm.ChatGLMForConditionalGeneration", + "AutoModelForSequenceClassification": "modeling_chatglm.ChatGLMForSequenceClassification" + }, + "add_bias_linear": false, + "add_qkv_bias": true, + "apply_query_key_layer_scaling": true, + "apply_residual_connection_post_layernorm": false, + "attention_dropout": 0.0, + "attention_softmax_in_fp32": true, + "bias_dropout_fusion": true, + "ffn_hidden_size": 13696, + "fp32_residual_connection": false, + "hidden_dropout": 0.0, + "hidden_size": 4096, + "kv_channels": 128, + "layernorm_epsilon": 1e-05, + "multi_query_attention": true, + "multi_query_group_num": 2, + "num_attention_heads": 32, + "num_layers": 28, + "original_rope": true, + "padded_vocab_size": 65024, + "post_layer_norm": true, + "rmsnorm": true, + "seq_length": 32768, + "use_cache": true, + "torch_dtype": "float16", + "transformers_version": "4.30.2", + "tie_word_embeddings": false, + "eos_token_id": 2, + "pad_token_id": 0 +} \ No newline at end of file diff --git a/ComfyUI-Easy-Use/py/kolors/chatglm/configuration_chatglm.py b/ComfyUI-Easy-Use/py/kolors/chatglm/configuration_chatglm.py new file mode 100644 index 0000000000000000000000000000000000000000..7a0e01bceb1982e9bff680823f97c93e191ddaab --- /dev/null +++ b/ComfyUI-Easy-Use/py/kolors/chatglm/configuration_chatglm.py @@ -0,0 +1,60 @@ +from transformers import PretrainedConfig + +class ChatGLMConfig(PretrainedConfig): + model_type = "chatglm" + def __init__( + self, + num_layers=28, + padded_vocab_size=65024, + hidden_size=4096, + ffn_hidden_size=13696, + kv_channels=128, + num_attention_heads=32, + seq_length=2048, + hidden_dropout=0.0, + classifier_dropout=None, + attention_dropout=0.0, + layernorm_epsilon=1e-5, + rmsnorm=True, + apply_residual_connection_post_layernorm=False, + post_layer_norm=True, + add_bias_linear=False, + add_qkv_bias=False, + bias_dropout_fusion=True, + multi_query_attention=False, + multi_query_group_num=1, + apply_query_key_layer_scaling=True, + attention_softmax_in_fp32=True, + fp32_residual_connection=False, + quantization_bit=0, + pre_seq_len=None, + prefix_projection=False, + **kwargs + ): + self.num_layers = num_layers + self.vocab_size = padded_vocab_size + self.padded_vocab_size = padded_vocab_size + self.hidden_size = hidden_size + self.ffn_hidden_size = ffn_hidden_size + self.kv_channels = kv_channels + self.num_attention_heads = num_attention_heads + self.seq_length = seq_length + self.hidden_dropout = hidden_dropout + self.classifier_dropout = classifier_dropout + self.attention_dropout = attention_dropout + self.layernorm_epsilon = layernorm_epsilon + self.rmsnorm = rmsnorm + self.apply_residual_connection_post_layernorm = apply_residual_connection_post_layernorm + self.post_layer_norm = post_layer_norm + self.add_bias_linear = add_bias_linear + self.add_qkv_bias = add_qkv_bias + self.bias_dropout_fusion = bias_dropout_fusion + self.multi_query_attention = multi_query_attention + self.multi_query_group_num = multi_query_group_num + self.apply_query_key_layer_scaling = apply_query_key_layer_scaling + self.attention_softmax_in_fp32 = attention_softmax_in_fp32 + self.fp32_residual_connection = fp32_residual_connection + self.quantization_bit = quantization_bit + self.pre_seq_len = pre_seq_len + self.prefix_projection = prefix_projection + super().__init__(**kwargs) \ No newline at end of file diff --git a/ComfyUI-Easy-Use/py/kolors/chatglm/modeling_chatglm.py b/ComfyUI-Easy-Use/py/kolors/chatglm/modeling_chatglm.py new file mode 100644 index 0000000000000000000000000000000000000000..ecfea1d8a1bb9c79ca23a8336fa119bc9c3ac2d5 --- /dev/null +++ b/ComfyUI-Easy-Use/py/kolors/chatglm/modeling_chatglm.py @@ -0,0 +1,1298 @@ +""" PyTorch ChatGLM model. """ + +import math +import copy +import warnings +import re +import sys + +import torch +import torch.utils.checkpoint +import torch.nn.functional as F +from torch import nn +from torch.nn import CrossEntropyLoss, LayerNorm +from torch.nn import CrossEntropyLoss, LayerNorm, MSELoss, BCEWithLogitsLoss +from torch.nn.utils import skip_init +from typing import Optional, Tuple, Union, List, Callable, Dict, Any +from copy import deepcopy + +from transformers.modeling_outputs import ( + BaseModelOutputWithPast, + CausalLMOutputWithPast, + SequenceClassifierOutputWithPast, +) +from transformers.modeling_utils import PreTrainedModel +from transformers.utils import logging +from transformers.generation.logits_process import LogitsProcessor +from transformers.generation.utils import LogitsProcessorList, StoppingCriteriaList, GenerationConfig, ModelOutput + +try: + from .configuration_chatglm import ChatGLMConfig +except: + from configuration_chatglm import ChatGLMConfig + + +# flags required to enable jit fusion kernels + +if sys.platform != 'darwin': + torch._C._jit_set_profiling_mode(False) + torch._C._jit_set_profiling_executor(False) + torch._C._jit_override_can_fuse_on_cpu(True) + torch._C._jit_override_can_fuse_on_gpu(True) + +logger = logging.get_logger(__name__) + +_CHECKPOINT_FOR_DOC = "THUDM/ChatGLM" +_CONFIG_FOR_DOC = "ChatGLM6BConfig" + +CHATGLM_6B_PRETRAINED_MODEL_ARCHIVE_LIST = [ + "THUDM/chatglm3-6b-base", + # See all ChatGLM models at https://huggingface.co/models?filter=chatglm +] + + +def default_init(cls, *args, **kwargs): + return cls(*args, **kwargs) + + +class InvalidScoreLogitsProcessor(LogitsProcessor): + def __call__(self, input_ids: torch.LongTensor, scores: torch.FloatTensor) -> torch.FloatTensor: + if torch.isnan(scores).any() or torch.isinf(scores).any(): + scores.zero_() + scores[..., 5] = 5e4 + return scores + + +class PrefixEncoder(torch.nn.Module): + """ + The torch.nn model to encode the prefix + Input shape: (batch-size, prefix-length) + Output shape: (batch-size, prefix-length, 2*layers*hidden) + """ + + def __init__(self, config: ChatGLMConfig): + super().__init__() + self.prefix_projection = config.prefix_projection + if self.prefix_projection: + # Use a two-layer MLP to encode the prefix + kv_size = config.num_layers * config.kv_channels * config.multi_query_group_num * 2 + self.embedding = torch.nn.Embedding(config.pre_seq_len, kv_size) + self.trans = torch.nn.Sequential( + torch.nn.Linear(kv_size, config.hidden_size), + torch.nn.Tanh(), + torch.nn.Linear(config.hidden_size, kv_size) + ) + else: + self.embedding = torch.nn.Embedding(config.pre_seq_len, + config.num_layers * config.kv_channels * config.multi_query_group_num * 2) + + def forward(self, prefix: torch.Tensor): + if self.prefix_projection: + prefix_tokens = self.embedding(prefix) + past_key_values = self.trans(prefix_tokens) + else: + past_key_values = self.embedding(prefix) + return past_key_values + + +def split_tensor_along_last_dim( + tensor: torch.Tensor, + num_partitions: int, + contiguous_split_chunks: bool = False, +) -> List[torch.Tensor]: + """Split a tensor along its last dimension. + + Arguments: + tensor: input tensor. + num_partitions: number of partitions to split the tensor + contiguous_split_chunks: If True, make each chunk contiguous + in memory. + + Returns: + A list of Tensors + """ + # Get the size and dimension. + last_dim = tensor.dim() - 1 + last_dim_size = tensor.size()[last_dim] // num_partitions + # Split. + tensor_list = torch.split(tensor, last_dim_size, dim=last_dim) + # Note: torch.split does not create contiguous tensors by default. + if contiguous_split_chunks: + return tuple(chunk.contiguous() for chunk in tensor_list) + + return tensor_list + + +class RotaryEmbedding(nn.Module): + def __init__(self, dim, original_impl=False, device=None, dtype=None): + super().__init__() + inv_freq = 1.0 / (10000 ** (torch.arange(0, dim, 2, device=device).to(dtype=dtype) / dim)) + self.register_buffer("inv_freq", inv_freq) + self.dim = dim + self.original_impl = original_impl + + def forward_impl( + self, seq_len: int, n_elem: int, dtype: torch.dtype, device: torch.device, base: int = 10000 + ): + """Enhanced Transformer with Rotary Position Embedding. + + Derived from: https://github.com/labmlai/annotated_deep_learning_paper_implementations/blob/master/labml_nn/ + transformers/rope/__init__.py. MIT License: + https://github.com/labmlai/annotated_deep_learning_paper_implementations/blob/master/license. + """ + # $\Theta = {\theta_i = 10000^{\frac{2(i-1)}{d}}, i \in [1, 2, ..., \frac{d}{2}]}$ + theta = 1.0 / (base ** (torch.arange(0, n_elem, 2, dtype=torch.float, device=device) / n_elem)) + + # Create position indexes `[0, 1, ..., seq_len - 1]` + seq_idx = torch.arange(seq_len, dtype=torch.float, device=device) + + # Calculate the product of position index and $\theta_i$ + idx_theta = torch.outer(seq_idx, theta).float() + + cache = torch.stack([torch.cos(idx_theta), torch.sin(idx_theta)], dim=-1) + + # this is to mimic the behaviour of complex32, else we will get different results + if dtype in (torch.float16, torch.bfloat16, torch.int8): + cache = cache.bfloat16() if dtype == torch.bfloat16 else cache.half() + return cache + + def forward(self, max_seq_len, offset=0): + return self.forward_impl( + max_seq_len, self.dim, dtype=self.inv_freq.dtype, device=self.inv_freq.device + ) + + +@torch.jit.script +def apply_rotary_pos_emb(x: torch.Tensor, rope_cache: torch.Tensor) -> torch.Tensor: + # x: [sq, b, np, hn] + sq, b, np, hn = x.size(0), x.size(1), x.size(2), x.size(3) + rot_dim = rope_cache.shape[-2] * 2 + x, x_pass = x[..., :rot_dim], x[..., rot_dim:] + # truncate to support variable sizes + rope_cache = rope_cache[:sq] + xshaped = x.reshape(sq, -1, np, rot_dim // 2, 2) + rope_cache = rope_cache.view(sq, -1, 1, xshaped.size(3), 2) + x_out2 = torch.stack( + [ + xshaped[..., 0] * rope_cache[..., 0] - xshaped[..., 1] * rope_cache[..., 1], + xshaped[..., 1] * rope_cache[..., 0] + xshaped[..., 0] * rope_cache[..., 1], + ], + -1, + ) + x_out2 = x_out2.flatten(3) + return torch.cat((x_out2, x_pass), dim=-1) + + +class RMSNorm(torch.nn.Module): + def __init__(self, normalized_shape, eps=1e-5, device=None, dtype=None, **kwargs): + super().__init__() + self.weight = torch.nn.Parameter(torch.empty(normalized_shape, device=device, dtype=dtype)) + self.eps = eps + + def forward(self, hidden_states: torch.Tensor): + input_dtype = hidden_states.dtype + variance = hidden_states.to(torch.float32).pow(2).mean(-1, keepdim=True) + hidden_states = hidden_states * torch.rsqrt(variance + self.eps) + + return (self.weight * hidden_states).to(input_dtype) + + +class CoreAttention(torch.nn.Module): + def __init__(self, config: ChatGLMConfig, layer_number): + super(CoreAttention, self).__init__() + + self.apply_query_key_layer_scaling = config.apply_query_key_layer_scaling + self.attention_softmax_in_fp32 = config.attention_softmax_in_fp32 + if self.apply_query_key_layer_scaling: + self.attention_softmax_in_fp32 = True + self.layer_number = max(1, layer_number) + + projection_size = config.kv_channels * config.num_attention_heads + + # Per attention head and per partition values. + self.hidden_size_per_partition = projection_size + self.hidden_size_per_attention_head = projection_size // config.num_attention_heads + self.num_attention_heads_per_partition = config.num_attention_heads + + coeff = None + self.norm_factor = math.sqrt(self.hidden_size_per_attention_head) + if self.apply_query_key_layer_scaling: + coeff = self.layer_number + self.norm_factor *= coeff + self.coeff = coeff + + self.attention_dropout = torch.nn.Dropout(config.attention_dropout) + + def forward(self, query_layer, key_layer, value_layer, attention_mask): + pytorch_major_version = int(torch.__version__.split('.')[0]) + if pytorch_major_version >= 2: + query_layer, key_layer, value_layer = [k.permute(1, 2, 0, 3) for k in [query_layer, key_layer, value_layer]] + if attention_mask is None and query_layer.shape[2] == key_layer.shape[2]: + context_layer = torch.nn.functional.scaled_dot_product_attention(query_layer, key_layer, value_layer, + is_causal=True) + else: + if attention_mask is not None: + attention_mask = ~attention_mask + context_layer = torch.nn.functional.scaled_dot_product_attention(query_layer, key_layer, value_layer, + attention_mask) + context_layer = context_layer.permute(2, 0, 1, 3) + new_context_layer_shape = context_layer.size()[:-2] + (self.hidden_size_per_partition,) + context_layer = context_layer.reshape(*new_context_layer_shape) + else: + # Raw attention scores + + # [b, np, sq, sk] + output_size = (query_layer.size(1), query_layer.size(2), query_layer.size(0), key_layer.size(0)) + + # [sq, b, np, hn] -> [sq, b * np, hn] + query_layer = query_layer.view(output_size[2], output_size[0] * output_size[1], -1) + # [sk, b, np, hn] -> [sk, b * np, hn] + key_layer = key_layer.view(output_size[3], output_size[0] * output_size[1], -1) + + # preallocting input tensor: [b * np, sq, sk] + matmul_input_buffer = torch.empty( + output_size[0] * output_size[1], output_size[2], output_size[3], dtype=query_layer.dtype, + device=query_layer.device + ) + + # Raw attention scores. [b * np, sq, sk] + matmul_result = torch.baddbmm( + matmul_input_buffer, + query_layer.transpose(0, 1), # [b * np, sq, hn] + key_layer.transpose(0, 1).transpose(1, 2), # [b * np, hn, sk] + beta=0.0, + alpha=(1.0 / self.norm_factor), + ) + + # change view to [b, np, sq, sk] + attention_scores = matmul_result.view(*output_size) + + # =========================== + # Attention probs and dropout + # =========================== + + # attention scores and attention mask [b, np, sq, sk] + if self.attention_softmax_in_fp32: + attention_scores = attention_scores.float() + if self.coeff is not None: + attention_scores = attention_scores * self.coeff + if attention_mask is None and attention_scores.shape[2] == attention_scores.shape[3]: + attention_mask = torch.ones(output_size[0], 1, output_size[2], output_size[3], + device=attention_scores.device, dtype=torch.bool) + attention_mask.tril_() + attention_mask = ~attention_mask + if attention_mask is not None: + attention_scores = attention_scores.masked_fill(attention_mask, float("-inf")) + attention_probs = F.softmax(attention_scores, dim=-1) + attention_probs = attention_probs.type_as(value_layer) + + # This is actually dropping out entire tokens to attend to, which might + # seem a bit unusual, but is taken from the original Transformer paper. + attention_probs = self.attention_dropout(attention_probs) + # ========================= + # Context layer. [sq, b, hp] + # ========================= + + # value_layer -> context layer. + # [sk, b, np, hn] --> [b, np, sq, hn] + + # context layer shape: [b, np, sq, hn] + output_size = (value_layer.size(1), value_layer.size(2), query_layer.size(0), value_layer.size(3)) + # change view [sk, b * np, hn] + value_layer = value_layer.view(value_layer.size(0), output_size[0] * output_size[1], -1) + # change view [b * np, sq, sk] + attention_probs = attention_probs.view(output_size[0] * output_size[1], output_size[2], -1) + # matmul: [b * np, sq, hn] + context_layer = torch.bmm(attention_probs, value_layer.transpose(0, 1)) + # change view [b, np, sq, hn] + context_layer = context_layer.view(*output_size) + # [b, np, sq, hn] --> [sq, b, np, hn] + context_layer = context_layer.permute(2, 0, 1, 3).contiguous() + # [sq, b, np, hn] --> [sq, b, hp] + new_context_layer_shape = context_layer.size()[:-2] + (self.hidden_size_per_partition,) + context_layer = context_layer.view(*new_context_layer_shape) + + return context_layer + + +class SelfAttention(torch.nn.Module): + """Parallel self-attention layer abstract class. + + Self-attention layer takes input with size [s, b, h] + and returns output of the same size. + """ + + def __init__(self, config: ChatGLMConfig, layer_number, device=None): + super(SelfAttention, self).__init__() + self.layer_number = max(1, layer_number) + + self.projection_size = config.kv_channels * config.num_attention_heads + + # Per attention head and per partition values. + self.hidden_size_per_attention_head = self.projection_size // config.num_attention_heads + self.num_attention_heads_per_partition = config.num_attention_heads + + self.multi_query_attention = config.multi_query_attention + self.qkv_hidden_size = 3 * self.projection_size + if self.multi_query_attention: + self.num_multi_query_groups_per_partition = config.multi_query_group_num + self.qkv_hidden_size = ( + self.projection_size + 2 * self.hidden_size_per_attention_head * config.multi_query_group_num + ) + self.query_key_value = nn.Linear(config.hidden_size, self.qkv_hidden_size, + bias=config.add_bias_linear or config.add_qkv_bias, + device=device, **_config_to_kwargs(config) + ) + + self.core_attention = CoreAttention(config, self.layer_number) + + # Output. + self.dense = nn.Linear(self.projection_size, config.hidden_size, bias=config.add_bias_linear, + device=device, **_config_to_kwargs(config) + ) + + def _allocate_memory(self, inference_max_sequence_len, batch_size, device=None, dtype=None): + if self.multi_query_attention: + num_attention_heads = self.num_multi_query_groups_per_partition + else: + num_attention_heads = self.num_attention_heads_per_partition + return torch.empty( + inference_max_sequence_len, + batch_size, + num_attention_heads, + self.hidden_size_per_attention_head, + dtype=dtype, + device=device, + ) + + def forward( + self, hidden_states, attention_mask, rotary_pos_emb, kv_cache=None, use_cache=True + ): + # hidden_states: [sq, b, h] + + # ================================================= + # Pre-allocate memory for key-values for inference. + # ================================================= + # ===================== + # Query, Key, and Value + # ===================== + + # Attention heads [sq, b, h] --> [sq, b, (np * 3 * hn)] + mixed_x_layer = self.query_key_value(hidden_states) + + if self.multi_query_attention: + (query_layer, key_layer, value_layer) = mixed_x_layer.split( + [ + self.num_attention_heads_per_partition * self.hidden_size_per_attention_head, + self.num_multi_query_groups_per_partition * self.hidden_size_per_attention_head, + self.num_multi_query_groups_per_partition * self.hidden_size_per_attention_head, + ], + dim=-1, + ) + query_layer = query_layer.view( + query_layer.size()[:-1] + (self.num_attention_heads_per_partition, self.hidden_size_per_attention_head) + ) + key_layer = key_layer.view( + key_layer.size()[:-1] + (self.num_multi_query_groups_per_partition, self.hidden_size_per_attention_head) + ) + value_layer = value_layer.view( + value_layer.size()[:-1] + + (self.num_multi_query_groups_per_partition, self.hidden_size_per_attention_head) + ) + else: + new_tensor_shape = mixed_x_layer.size()[:-1] + \ + (self.num_attention_heads_per_partition, + 3 * self.hidden_size_per_attention_head) + mixed_x_layer = mixed_x_layer.view(*new_tensor_shape) + + # [sq, b, np, 3 * hn] --> 3 [sq, b, np, hn] + (query_layer, key_layer, value_layer) = split_tensor_along_last_dim(mixed_x_layer, 3) + + # apply relative positional encoding (rotary embedding) + if rotary_pos_emb is not None: + query_layer = apply_rotary_pos_emb(query_layer, rotary_pos_emb) + key_layer = apply_rotary_pos_emb(key_layer, rotary_pos_emb) + + # adjust key and value for inference + if kv_cache is not None: + cache_k, cache_v = kv_cache + key_layer = torch.cat((cache_k, key_layer), dim=0) + value_layer = torch.cat((cache_v, value_layer), dim=0) + if use_cache: + kv_cache = (key_layer, value_layer) + else: + kv_cache = None + + if self.multi_query_attention: + key_layer = key_layer.unsqueeze(-2) + key_layer = key_layer.expand( + -1, -1, -1, self.num_attention_heads_per_partition // self.num_multi_query_groups_per_partition, -1 + ) + key_layer = key_layer.contiguous().view( + key_layer.size()[:2] + (self.num_attention_heads_per_partition, self.hidden_size_per_attention_head) + ) + value_layer = value_layer.unsqueeze(-2) + value_layer = value_layer.expand( + -1, -1, -1, self.num_attention_heads_per_partition // self.num_multi_query_groups_per_partition, -1 + ) + value_layer = value_layer.contiguous().view( + value_layer.size()[:2] + (self.num_attention_heads_per_partition, self.hidden_size_per_attention_head) + ) + + # ================================== + # core attention computation + # ================================== + + context_layer = self.core_attention(query_layer, key_layer, value_layer, attention_mask) + + # ================= + # Output. [sq, b, h] + # ================= + + output = self.dense(context_layer) + + return output, kv_cache + + +def _config_to_kwargs(args): + common_kwargs = { + "dtype": args.torch_dtype, + } + return common_kwargs + + +class MLP(torch.nn.Module): + """MLP. + + MLP will take the input with h hidden state, project it to 4*h + hidden dimension, perform nonlinear transformation, and project the + state back into h hidden dimension. + """ + + def __init__(self, config: ChatGLMConfig, device=None): + super(MLP, self).__init__() + + self.add_bias = config.add_bias_linear + + # Project to 4h. If using swiglu double the output width, see https://arxiv.org/pdf/2002.05202.pdf + self.dense_h_to_4h = nn.Linear( + config.hidden_size, + config.ffn_hidden_size * 2, + bias=self.add_bias, + device=device, + **_config_to_kwargs(config) + ) + + def swiglu(x): + x = torch.chunk(x, 2, dim=-1) + return F.silu(x[0]) * x[1] + + self.activation_func = swiglu + + # Project back to h. + self.dense_4h_to_h = nn.Linear( + config.ffn_hidden_size, + config.hidden_size, + bias=self.add_bias, + device=device, + **_config_to_kwargs(config) + ) + + def forward(self, hidden_states): + # [s, b, 4hp] + intermediate_parallel = self.dense_h_to_4h(hidden_states) + intermediate_parallel = self.activation_func(intermediate_parallel) + # [s, b, h] + output = self.dense_4h_to_h(intermediate_parallel) + return output + + +class GLMBlock(torch.nn.Module): + """A single transformer layer. + + Transformer layer takes input with size [s, b, h] and returns an + output of the same size. + """ + + def __init__(self, config: ChatGLMConfig, layer_number, device=None): + super(GLMBlock, self).__init__() + self.layer_number = layer_number + + self.apply_residual_connection_post_layernorm = config.apply_residual_connection_post_layernorm + + self.fp32_residual_connection = config.fp32_residual_connection + + LayerNormFunc = RMSNorm if config.rmsnorm else LayerNorm + # Layernorm on the input data. + self.input_layernorm = LayerNormFunc(config.hidden_size, eps=config.layernorm_epsilon, device=device, + dtype=config.torch_dtype) + + # Self attention. + self.self_attention = SelfAttention(config, layer_number, device=device) + self.hidden_dropout = config.hidden_dropout + + # Layernorm on the attention output + self.post_attention_layernorm = LayerNormFunc(config.hidden_size, eps=config.layernorm_epsilon, device=device, + dtype=config.torch_dtype) + + # MLP + self.mlp = MLP(config, device=device) + + def forward( + self, hidden_states, attention_mask, rotary_pos_emb, kv_cache=None, use_cache=True, + ): + # hidden_states: [s, b, h] + + # Layer norm at the beginning of the transformer layer. + layernorm_output = self.input_layernorm(hidden_states) + # Self attention. + attention_output, kv_cache = self.self_attention( + layernorm_output, + attention_mask, + rotary_pos_emb, + kv_cache=kv_cache, + use_cache=use_cache + ) + + # Residual connection. + if self.apply_residual_connection_post_layernorm: + residual = layernorm_output + else: + residual = hidden_states + + layernorm_input = torch.nn.functional.dropout(attention_output, p=self.hidden_dropout, training=self.training) + layernorm_input = residual + layernorm_input + + # Layer norm post the self attention. + layernorm_output = self.post_attention_layernorm(layernorm_input) + + # MLP. + mlp_output = self.mlp(layernorm_output) + + # Second residual connection. + if self.apply_residual_connection_post_layernorm: + residual = layernorm_output + else: + residual = layernorm_input + + output = torch.nn.functional.dropout(mlp_output, p=self.hidden_dropout, training=self.training) + output = residual + output + + return output, kv_cache + + +class GLMTransformer(torch.nn.Module): + """Transformer class.""" + + def __init__(self, config: ChatGLMConfig, device=None): + super(GLMTransformer, self).__init__() + + self.fp32_residual_connection = config.fp32_residual_connection + self.post_layer_norm = config.post_layer_norm + + # Number of layers. + self.num_layers = config.num_layers + + # Transformer layers. + def build_layer(layer_number): + return GLMBlock(config, layer_number, device=device) + + self.layers = torch.nn.ModuleList([build_layer(i + 1) for i in range(self.num_layers)]) + + if self.post_layer_norm: + LayerNormFunc = RMSNorm if config.rmsnorm else LayerNorm + # Final layer norm before output. + self.final_layernorm = LayerNormFunc(config.hidden_size, eps=config.layernorm_epsilon, device=device, + dtype=config.torch_dtype) + + self.gradient_checkpointing = False + + def _get_layer(self, layer_number): + return self.layers[layer_number] + + def forward( + self, hidden_states, attention_mask, rotary_pos_emb, kv_caches=None, + use_cache: Optional[bool] = True, + output_hidden_states: Optional[bool] = False, + ): + if not kv_caches: + kv_caches = [None for _ in range(self.num_layers)] + presents = () if use_cache else None + if self.gradient_checkpointing and self.training: + if use_cache: + logger.warning_once( + "`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..." + ) + use_cache = False + + all_self_attentions = None + all_hidden_states = () if output_hidden_states else None + for index in range(self.num_layers): + if output_hidden_states: + all_hidden_states = all_hidden_states + (hidden_states,) + + layer = self._get_layer(index) + if self.gradient_checkpointing and self.training: + layer_ret = torch.utils.checkpoint.checkpoint( + layer, + hidden_states, + attention_mask, + rotary_pos_emb, + kv_caches[index], + use_cache + ) + else: + layer_ret = layer( + hidden_states, + attention_mask, + rotary_pos_emb, + kv_cache=kv_caches[index], + use_cache=use_cache + ) + hidden_states, kv_cache = layer_ret + if use_cache: + presents = presents + (kv_cache,) + + if output_hidden_states: + all_hidden_states = all_hidden_states + (hidden_states,) + + # Final layer norm. + if self.post_layer_norm: + hidden_states = self.final_layernorm(hidden_states) + + return hidden_states, presents, all_hidden_states, all_self_attentions + + +class ChatGLMPreTrainedModel(PreTrainedModel): + """ + An abstract class to handle weights initialization and + a simple interface for downloading and loading pretrained models. + """ + + is_parallelizable = False + supports_gradient_checkpointing = True + config_class = ChatGLMConfig + base_model_prefix = "transformer" + _no_split_modules = ["GLMBlock"] + + def _init_weights(self, module: nn.Module): + """Initialize the weights.""" + return + + def get_masks(self, input_ids, past_key_values, padding_mask=None): + batch_size, seq_length = input_ids.shape + full_attention_mask = torch.ones(batch_size, seq_length, seq_length, device=input_ids.device) + full_attention_mask.tril_() + past_length = 0 + if past_key_values: + past_length = past_key_values[0][0].shape[0] + if past_length: + full_attention_mask = torch.cat((torch.ones(batch_size, seq_length, past_length, + device=input_ids.device), full_attention_mask), dim=-1) + if padding_mask is not None: + full_attention_mask = full_attention_mask * padding_mask.unsqueeze(1) + if not past_length and padding_mask is not None: + full_attention_mask -= padding_mask.unsqueeze(-1) - 1 + full_attention_mask = (full_attention_mask < 0.5).bool() + full_attention_mask.unsqueeze_(1) + return full_attention_mask + + def get_position_ids(self, input_ids, device): + batch_size, seq_length = input_ids.shape + position_ids = torch.arange(seq_length, dtype=torch.long, device=device).unsqueeze(0).repeat(batch_size, 1) + return position_ids + + def _set_gradient_checkpointing(self, module, value=False): + if isinstance(module, GLMTransformer): + module.gradient_checkpointing = value + + +class Embedding(torch.nn.Module): + """Language model embeddings.""" + + def __init__(self, config: ChatGLMConfig, device=None): + super(Embedding, self).__init__() + + self.hidden_size = config.hidden_size + # Word embeddings (parallel). + self.word_embeddings = nn.Embedding( + config.padded_vocab_size, + self.hidden_size, + dtype=config.torch_dtype, + device=device + ) + self.fp32_residual_connection = config.fp32_residual_connection + + def forward(self, input_ids): + # Embeddings. + words_embeddings = self.word_embeddings(input_ids) + embeddings = words_embeddings + # Data format change to avoid explicit tranposes : [b s h] --> [s b h]. + embeddings = embeddings.transpose(0, 1).contiguous() + # If the input flag for fp32 residual connection is set, convert for float. + if self.fp32_residual_connection: + embeddings = embeddings.float() + return embeddings + + +class ChatGLMModel(ChatGLMPreTrainedModel): + def __init__(self, config: ChatGLMConfig, device=None, empty_init=True): + super().__init__(config) + if empty_init: + init_method = skip_init + else: + init_method = default_init + init_kwargs = {} + if device is not None: + init_kwargs["device"] = device + self.embedding = init_method(Embedding, config, **init_kwargs) + self.num_layers = config.num_layers + self.multi_query_group_num = config.multi_query_group_num + self.kv_channels = config.kv_channels + + # Rotary positional embeddings + self.seq_length = config.seq_length + rotary_dim = ( + config.hidden_size // config.num_attention_heads if config.kv_channels is None else config.kv_channels + ) + + self.rotary_pos_emb = RotaryEmbedding(rotary_dim // 2, original_impl=config.original_rope, device=device, + dtype=config.torch_dtype) + self.encoder = init_method(GLMTransformer, config, **init_kwargs) + self.output_layer = init_method(nn.Linear, config.hidden_size, config.padded_vocab_size, bias=False, + dtype=config.torch_dtype, **init_kwargs) + self.pre_seq_len = config.pre_seq_len + self.prefix_projection = config.prefix_projection + if self.pre_seq_len is not None: + for param in self.parameters(): + param.requires_grad = False + self.prefix_tokens = torch.arange(self.pre_seq_len).long() + self.prefix_encoder = PrefixEncoder(config) + self.dropout = torch.nn.Dropout(0.1) + + def get_input_embeddings(self): + return self.embedding.word_embeddings + + def get_prompt(self, batch_size, device, dtype=torch.half): + prefix_tokens = self.prefix_tokens.unsqueeze(0).expand(batch_size, -1).to(device) + past_key_values = self.prefix_encoder(prefix_tokens).type(dtype) + past_key_values = past_key_values.view( + batch_size, + self.pre_seq_len, + self.num_layers * 2, + self.multi_query_group_num, + self.kv_channels + ) + # seq_len, b, nh, hidden_size + past_key_values = self.dropout(past_key_values) + past_key_values = past_key_values.permute([2, 1, 0, 3, 4]).split(2) + return past_key_values + + def forward( + self, + input_ids, + position_ids: Optional[torch.Tensor] = None, + attention_mask: Optional[torch.BoolTensor] = None, + full_attention_mask: Optional[torch.BoolTensor] = None, + past_key_values: Optional[Tuple[Tuple[torch.Tensor, torch.Tensor], ...]] = None, + inputs_embeds: Optional[torch.Tensor] = None, + use_cache: Optional[bool] = None, + output_hidden_states: Optional[bool] = None, + return_dict: Optional[bool] = None, + ): + output_hidden_states = ( + output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states + ) + use_cache = use_cache if use_cache is not None else self.config.use_cache + return_dict = return_dict if return_dict is not None else self.config.use_return_dict + + batch_size, seq_length = input_ids.shape + + if inputs_embeds is None: + inputs_embeds = self.embedding(input_ids) + + if self.pre_seq_len is not None: + if past_key_values is None: + past_key_values = self.get_prompt(batch_size=batch_size, device=input_ids.device, + dtype=inputs_embeds.dtype) + if attention_mask is not None: + attention_mask = torch.cat([attention_mask.new_ones((batch_size, self.pre_seq_len)), + attention_mask], dim=-1) + + if full_attention_mask is None: + if (attention_mask is not None and not attention_mask.all()) or (past_key_values and seq_length != 1): + full_attention_mask = self.get_masks(input_ids, past_key_values, padding_mask=attention_mask) + + # Rotary positional embeddings + rotary_pos_emb = self.rotary_pos_emb(self.seq_length) + if position_ids is not None: + rotary_pos_emb = rotary_pos_emb[position_ids] + else: + rotary_pos_emb = rotary_pos_emb[None, :seq_length] + rotary_pos_emb = rotary_pos_emb.transpose(0, 1).contiguous() + + # Run encoder. + hidden_states, presents, all_hidden_states, all_self_attentions = self.encoder( + inputs_embeds, full_attention_mask, rotary_pos_emb=rotary_pos_emb, + kv_caches=past_key_values, use_cache=use_cache, output_hidden_states=output_hidden_states + ) + + if not return_dict: + return tuple(v for v in [hidden_states, presents, all_hidden_states, all_self_attentions] if v is not None) + + return BaseModelOutputWithPast( + last_hidden_state=hidden_states, + past_key_values=presents, + hidden_states=all_hidden_states, + attentions=all_self_attentions, + ) + + def quantize(self, weight_bit_width: int): + from .quantization import quantize + quantize(self.encoder, weight_bit_width) + return self + + +class ChatGLMForConditionalGeneration(ChatGLMPreTrainedModel): + def __init__(self, config: ChatGLMConfig, empty_init=True, device=None): + super().__init__(config) + + self.max_sequence_length = config.max_length + self.transformer = ChatGLMModel(config, empty_init=empty_init, device=device) + self.config = config + self.quantized = False + + if self.config.quantization_bit: + self.quantize(self.config.quantization_bit, empty_init=True) + + def _update_model_kwargs_for_generation( + self, + outputs: ModelOutput, + model_kwargs: Dict[str, Any], + is_encoder_decoder: bool = False, + standardize_cache_format: bool = False, + ) -> Dict[str, Any]: + # update past_key_values + model_kwargs["past_key_values"] = self._extract_past_from_model_output( + outputs, standardize_cache_format=standardize_cache_format + ) + + # update attention mask + if "attention_mask" in model_kwargs: + attention_mask = model_kwargs["attention_mask"] + model_kwargs["attention_mask"] = torch.cat( + [attention_mask, attention_mask.new_ones((attention_mask.shape[0], 1))], dim=-1 + ) + + # update position ids + if "position_ids" in model_kwargs: + position_ids = model_kwargs["position_ids"] + new_position_id = position_ids[..., -1:].clone() + new_position_id += 1 + model_kwargs["position_ids"] = torch.cat( + [position_ids, new_position_id], dim=-1 + ) + + model_kwargs["is_first_forward"] = False + return model_kwargs + + def prepare_inputs_for_generation( + self, + input_ids: torch.LongTensor, + past_key_values: Optional[torch.Tensor] = None, + attention_mask: Optional[torch.Tensor] = None, + position_ids: Optional[torch.Tensor] = None, + use_cache: Optional[bool] = None, + is_first_forward: bool = True, + **kwargs + ) -> dict: + # only last token for input_ids if past is not None + if position_ids is None: + position_ids = self.get_position_ids(input_ids, device=input_ids.device) + if not is_first_forward: + if past_key_values is not None: + position_ids = position_ids[..., -1:] + input_ids = input_ids[:, -1:] + return { + "input_ids": input_ids, + "past_key_values": past_key_values, + "position_ids": position_ids, + "attention_mask": attention_mask, + "return_last_logit": True, + "use_cache": use_cache + } + + def forward( + self, + input_ids: Optional[torch.Tensor] = None, + position_ids: Optional[torch.Tensor] = None, + attention_mask: Optional[torch.Tensor] = None, + past_key_values: Optional[Tuple[torch.FloatTensor]] = None, + inputs_embeds: Optional[torch.Tensor] = None, + labels: Optional[torch.Tensor] = None, + use_cache: Optional[bool] = None, + output_attentions: Optional[bool] = None, + output_hidden_states: Optional[bool] = None, + return_dict: Optional[bool] = None, + return_last_logit: Optional[bool] = False, + ): + use_cache = use_cache if use_cache is not None else self.config.use_cache + return_dict = return_dict if return_dict is not None else self.config.use_return_dict + + transformer_outputs = self.transformer( + input_ids=input_ids, + position_ids=position_ids, + attention_mask=attention_mask, + past_key_values=past_key_values, + inputs_embeds=inputs_embeds, + use_cache=use_cache, + output_hidden_states=output_hidden_states, + return_dict=return_dict, + ) + + hidden_states = transformer_outputs[0] + if return_last_logit: + hidden_states = hidden_states[-1:] + lm_logits = self.transformer.output_layer(hidden_states) + lm_logits = lm_logits.transpose(0, 1).contiguous() + + loss = None + if labels is not None: + lm_logits = lm_logits.to(torch.float32) + + # Shift so that tokens < n predict n + shift_logits = lm_logits[..., :-1, :].contiguous() + shift_labels = labels[..., 1:].contiguous() + # Flatten the tokens + loss_fct = CrossEntropyLoss(ignore_index=-100) + loss = loss_fct(shift_logits.view(-1, shift_logits.size(-1)), shift_labels.view(-1)) + + lm_logits = lm_logits.to(hidden_states.dtype) + loss = loss.to(hidden_states.dtype) + + if not return_dict: + output = (lm_logits,) + transformer_outputs[1:] + return ((loss,) + output) if loss is not None else output + + return CausalLMOutputWithPast( + loss=loss, + logits=lm_logits, + past_key_values=transformer_outputs.past_key_values, + hidden_states=transformer_outputs.hidden_states, + attentions=transformer_outputs.attentions, + ) + + @staticmethod + def _reorder_cache( + past: Tuple[Tuple[torch.Tensor, torch.Tensor], ...], beam_idx: torch.LongTensor + ) -> Tuple[Tuple[torch.Tensor, torch.Tensor], ...]: + """ + This function is used to re-order the `past_key_values` cache if [`~PreTrainedModel.beam_search`] or + [`~PreTrainedModel.beam_sample`] is called. This is required to match `past_key_values` with the correct + beam_idx at every generation step. + + Output shares the same memory storage as `past`. + """ + return tuple( + ( + layer_past[0].index_select(1, beam_idx.to(layer_past[0].device)), + layer_past[1].index_select(1, beam_idx.to(layer_past[1].device)), + ) + for layer_past in past + ) + + def process_response(self, output, history): + content = "" + history = deepcopy(history) + for response in output.split("<|assistant|>"): + metadata, content = response.split("\n", maxsplit=1) + if not metadata.strip(): + content = content.strip() + history.append({"role": "assistant", "metadata": metadata, "content": content}) + content = content.replace("[[训练时间]]", "2023年") + else: + history.append({"role": "assistant", "metadata": metadata, "content": content}) + if history[0]["role"] == "system" and "tools" in history[0]: + content = "\n".join(content.split("\n")[1:-1]) + def tool_call(**kwargs): + return kwargs + parameters = eval(content) + content = {"name": metadata.strip(), "parameters": parameters} + else: + content = {"name": metadata.strip(), "content": content} + return content, history + + @torch.inference_mode() + def chat(self, tokenizer, query: str, history: List[Tuple[str, str]] = None, role: str = "user", + max_length: int = 8192, num_beams=1, do_sample=True, top_p=0.8, temperature=0.8, logits_processor=None, + **kwargs): + if history is None: + history = [] + if logits_processor is None: + logits_processor = LogitsProcessorList() + logits_processor.append(InvalidScoreLogitsProcessor()) + gen_kwargs = {"max_length": max_length, "num_beams": num_beams, "do_sample": do_sample, "top_p": top_p, + "temperature": temperature, "logits_processor": logits_processor, **kwargs} + inputs = tokenizer.build_chat_input(query, history=history, role=role) + inputs = inputs.to(self.device) + eos_token_id = [tokenizer.eos_token_id, tokenizer.get_command("<|user|>"), + tokenizer.get_command("<|observation|>")] + outputs = self.generate(**inputs, **gen_kwargs, eos_token_id=eos_token_id) + outputs = outputs.tolist()[0][len(inputs["input_ids"][0]):-1] + response = tokenizer.decode(outputs) + history.append({"role": role, "content": query}) + response, history = self.process_response(response, history) + return response, history + + @torch.inference_mode() + def stream_chat(self, tokenizer, query: str, history: List[Tuple[str, str]] = None, role: str = "user", + past_key_values=None,max_length: int = 8192, do_sample=True, top_p=0.8, temperature=0.8, + logits_processor=None, return_past_key_values=False, **kwargs): + if history is None: + history = [] + if logits_processor is None: + logits_processor = LogitsProcessorList() + logits_processor.append(InvalidScoreLogitsProcessor()) + eos_token_id = [tokenizer.eos_token_id, tokenizer.get_command("<|user|>"), + tokenizer.get_command("<|observation|>")] + gen_kwargs = {"max_length": max_length, "do_sample": do_sample, "top_p": top_p, + "temperature": temperature, "logits_processor": logits_processor, **kwargs} + if past_key_values is None: + inputs = tokenizer.build_chat_input(query, history=history, role=role) + else: + inputs = tokenizer.build_chat_input(query, role=role) + inputs = inputs.to(self.device) + if past_key_values is not None: + past_length = past_key_values[0][0].shape[0] + if self.transformer.pre_seq_len is not None: + past_length -= self.transformer.pre_seq_len + inputs.position_ids += past_length + attention_mask = inputs.attention_mask + attention_mask = torch.cat((attention_mask.new_ones(1, past_length), attention_mask), dim=1) + inputs['attention_mask'] = attention_mask + history.append({"role": role, "content": query}) + for outputs in self.stream_generate(**inputs, past_key_values=past_key_values, + eos_token_id=eos_token_id, return_past_key_values=return_past_key_values, + **gen_kwargs): + if return_past_key_values: + outputs, past_key_values = outputs + outputs = outputs.tolist()[0][len(inputs["input_ids"][0]):-1] + response = tokenizer.decode(outputs) + if response and response[-1] != "�": + response, new_history = self.process_response(response, history) + if return_past_key_values: + yield response, new_history, past_key_values + else: + yield response, new_history + + @torch.inference_mode() + def stream_generate( + self, + input_ids, + generation_config: Optional[GenerationConfig] = None, + logits_processor: Optional[LogitsProcessorList] = None, + stopping_criteria: Optional[StoppingCriteriaList] = None, + prefix_allowed_tokens_fn: Optional[Callable[[int, torch.Tensor], List[int]]] = None, + return_past_key_values=False, + **kwargs, + ): + batch_size, input_ids_seq_length = input_ids.shape[0], input_ids.shape[-1] + + if generation_config is None: + generation_config = self.generation_config + generation_config = copy.deepcopy(generation_config) + model_kwargs = generation_config.update(**kwargs) + model_kwargs["use_cache"] = generation_config.use_cache + bos_token_id, eos_token_id = generation_config.bos_token_id, generation_config.eos_token_id + + if isinstance(eos_token_id, int): + eos_token_id = [eos_token_id] + eos_token_id_tensor = torch.tensor(eos_token_id).to(input_ids.device) if eos_token_id is not None else None + + has_default_max_length = kwargs.get("max_length") is None and generation_config.max_length is not None + if has_default_max_length and generation_config.max_new_tokens is None: + warnings.warn( + f"Using `max_length`'s default ({generation_config.max_length}) to control the generation length. " + "This behaviour is deprecated and will be removed from the config in v5 of Transformers -- we" + " recommend using `max_new_tokens` to control the maximum length of the generation.", + UserWarning, + ) + elif generation_config.max_new_tokens is not None: + generation_config.max_length = generation_config.max_new_tokens + input_ids_seq_length + if not has_default_max_length: + logger.warn( + f"Both `max_new_tokens` (={generation_config.max_new_tokens}) and `max_length`(=" + f"{generation_config.max_length}) seem to have been set. `max_new_tokens` will take precedence. " + "Please refer to the documentation for more information. " + "(https://huggingface.co/docs/transformers/main/en/main_classes/text_generation)", + UserWarning, + ) + + if input_ids_seq_length >= generation_config.max_length: + input_ids_string = "decoder_input_ids" if self.config.is_encoder_decoder else "input_ids" + logger.warning( + f"Input length of {input_ids_string} is {input_ids_seq_length}, but `max_length` is set to" + f" {generation_config.max_length}. This can lead to unexpected behavior. You should consider" + " increasing `max_new_tokens`." + ) + + # 2. Set generation parameters if not already defined + logits_processor = logits_processor if logits_processor is not None else LogitsProcessorList() + stopping_criteria = stopping_criteria if stopping_criteria is not None else StoppingCriteriaList() + + logits_processor = self._get_logits_processor( + generation_config=generation_config, + input_ids_seq_length=input_ids_seq_length, + encoder_input_ids=input_ids, + prefix_allowed_tokens_fn=prefix_allowed_tokens_fn, + logits_processor=logits_processor, + ) + + stopping_criteria = self._get_stopping_criteria( + generation_config=generation_config, stopping_criteria=stopping_criteria + ) + logits_warper = self._get_logits_warper(generation_config) + + unfinished_sequences = input_ids.new(input_ids.shape[0]).fill_(1) + scores = None + while True: + model_inputs = self.prepare_inputs_for_generation(input_ids, **model_kwargs) + # forward pass to get next token + outputs = self( + **model_inputs, + return_dict=True, + output_attentions=False, + output_hidden_states=False, + ) + + next_token_logits = outputs.logits[:, -1, :] + + # pre-process distribution + next_token_scores = logits_processor(input_ids, next_token_logits) + next_token_scores = logits_warper(input_ids, next_token_scores) + + # sample + probs = nn.functional.softmax(next_token_scores, dim=-1) + if generation_config.do_sample: + next_tokens = torch.multinomial(probs, num_samples=1).squeeze(1) + else: + next_tokens = torch.argmax(probs, dim=-1) + # update generated ids, model inputs, and length for next step + input_ids = torch.cat([input_ids, next_tokens[:, None]], dim=-1) + model_kwargs = self._update_model_kwargs_for_generation( + outputs, model_kwargs, is_encoder_decoder=self.config.is_encoder_decoder + ) + unfinished_sequences = unfinished_sequences.mul( + next_tokens.tile(eos_token_id_tensor.shape[0], 1).ne(eos_token_id_tensor.unsqueeze(1)).prod(dim=0) + ) + if return_past_key_values: + yield input_ids, outputs.past_key_values + else: + yield input_ids + # stop when each sentence is finished, or if we exceed the maximum length + if unfinished_sequences.max() == 0 or stopping_criteria(input_ids, scores): + break + + def quantize(self, bits: int, empty_init=False, device=None, **kwargs): + if bits == 0: + return + + from .quantization import quantize + + if self.quantized: + logger.info("Already quantized.") + return self + + self.quantized = True + + self.config.quantization_bit = bits + + self.transformer.encoder = quantize(self.transformer.encoder, bits, empty_init=empty_init, device=device, + **kwargs) + return self + + +class ChatGLMForSequenceClassification(ChatGLMPreTrainedModel): + def __init__(self, config: ChatGLMConfig, empty_init=True, device=None): + super().__init__(config) + + self.num_labels = config.num_labels + self.transformer = ChatGLMModel(config, empty_init=empty_init, device=device) + + self.classifier_head = nn.Linear(config.hidden_size, config.num_labels, bias=True, dtype=torch.half) + if config.classifier_dropout is not None: + self.dropout = nn.Dropout(config.classifier_dropout) + else: + self.dropout = None + self.config = config + + if self.config.quantization_bit: + self.quantize(self.config.quantization_bit, empty_init=True) + + def forward( + self, + input_ids: Optional[torch.LongTensor] = None, + position_ids: Optional[torch.LongTensor] = None, + attention_mask: Optional[torch.Tensor] = None, + full_attention_mask: Optional[torch.Tensor] = None, + past_key_values: Optional[Tuple[Tuple[torch.Tensor, torch.Tensor], ...]] = None, + inputs_embeds: Optional[torch.LongTensor] = None, + labels: Optional[torch.LongTensor] = None, + use_cache: Optional[bool] = None, + output_hidden_states: Optional[bool] = None, + return_dict: Optional[bool] = None, + ) -> Union[Tuple[torch.Tensor, ...], SequenceClassifierOutputWithPast]: + return_dict = return_dict if return_dict is not None else self.config.use_return_dict + + transformer_outputs = self.transformer( + input_ids=input_ids, + position_ids=position_ids, + attention_mask=attention_mask, + full_attention_mask=full_attention_mask, + past_key_values=past_key_values, + inputs_embeds=inputs_embeds, + use_cache=use_cache, + output_hidden_states=output_hidden_states, + return_dict=return_dict, + ) + + hidden_states = transformer_outputs[0] + pooled_hidden_states = hidden_states[-1] + if self.dropout is not None: + pooled_hidden_states = self.dropout(pooled_hidden_states) + logits = self.classifier_head(pooled_hidden_states) + + loss = None + if labels is not None: + if self.config.problem_type is None: + if self.num_labels == 1: + self.config.problem_type = "regression" + elif self.num_labels > 1 and (labels.dtype == torch.long or labels.dtype == torch.int): + self.config.problem_type = "single_label_classification" + else: + self.config.problem_type = "multi_label_classification" + + if self.config.problem_type == "regression": + loss_fct = MSELoss() + if self.num_labels == 1: + loss = loss_fct(logits.squeeze().float(), labels.squeeze()) + else: + loss = loss_fct(logits.float(), labels) + elif self.config.problem_type == "single_label_classification": + loss_fct = CrossEntropyLoss() + loss = loss_fct(logits.view(-1, self.num_labels).float(), labels.view(-1)) + elif self.config.problem_type == "multi_label_classification": + loss_fct = BCEWithLogitsLoss() + loss = loss_fct(logits.float(), labels.view(-1, self.num_labels)) + + if not return_dict: + output = (logits,) + transformer_outputs[1:] + return ((loss,) + output) if loss is not None else output + + return SequenceClassifierOutputWithPast( + loss=loss, + logits=logits, + past_key_values=transformer_outputs.past_key_values, + hidden_states=transformer_outputs.hidden_states, + attentions=transformer_outputs.attentions, + ) \ No newline at end of file diff --git a/ComfyUI-Easy-Use/py/kolors/chatglm/quantization.py b/ComfyUI-Easy-Use/py/kolors/chatglm/quantization.py new file mode 100644 index 0000000000000000000000000000000000000000..c3bb3926ffd6d4f18804097997eddda8d84bfb1d --- /dev/null +++ b/ComfyUI-Easy-Use/py/kolors/chatglm/quantization.py @@ -0,0 +1,190 @@ +from torch.nn import Linear +from torch.nn.parameter import Parameter + +import bz2 +import torch +import base64 +import ctypes +from transformers.utils import logging + +from typing import List +from functools import partial + +logger = logging.get_logger(__name__) + +try: + from cpm_kernels.kernels.base import LazyKernelCModule, KernelFunction, round_up + + class Kernel: + def __init__(self, code: bytes, function_names: List[str]): + self.code = code + self._function_names = function_names + self._cmodule = LazyKernelCModule(self.code) + + for name in self._function_names: + setattr(self, name, KernelFunction(self._cmodule, name)) + + quantization_code = "$QlpoOTFBWSZTWU9yuJUAQHN//////////f/n/8/n///n//bt4dTidcVx8X3V9FV/92/v4B7/AD5FBQFAAAChSgKpFCFAFVSigUAAAEKhSgUUqgFBKigqVREQAABQBQIANDTTIGI00BkZBkNGE0A0BkBkGQGRkaNAaAGQNBoGgDIAAYIGTI0DQAQAaGmmQMRpoDIyDIaMJoBoDIDIMgMjI0aA0AMgaDQNAGQAAwQMmRoGgAgA0NNMgYjTQGRkGQ0YTQDQGQGQZAZGRo0BoAZA0GgaAMgABggZMjQNABABoaaZAxGmgMjIMhowmgGgMgMgyAyMjRoDQAyBoNA0AZAADBAyZGgaAAmqU1NEgJqnptU/Sn4jRR6J6epk2pqb1Q/SgAPUGgyNNGjQ2SBpoAZAAGg0NB6mgDIAAAAA2oaApSREBNAARhGiYEaEwU8pvImlP0k2aam1GaGqbFNM1MHpTwmkepmyU9R6nqPKekHqNNPUxNGhp6n6p6QaZ6o9TG1GMqcoV9ly6nRanHlq6zPNbnGZNi6HSug+2nPiZ13XcnFYZW+45W11CumhzYhchOJ2GLLV1OBjBjGf4TptOddTSOcVxhqYZMYwZXZZY00zI1paX5X9J+b+f4e+x43RXSxXPOdquiGpduatGyXneN696M9t4HU2eR5XX/kPhP261NTx3JO1Ow7LyuDmeo9a7d351T1ZxnvnrvYnrXv/hXxPCeuYx2XsNmO003eg9J3Z6U7b23meJ4ri01OdzTk9BNO96brz+qT5nuvvH3ds/G+m/JcG/F2XYuhXlvO+jP7U3XgrzPN/lr8Sf1n6j4j7jZs+s/T0tNaNNYzTs12rxjwztHlnire3Nzc3N1wuBwOBwXBvZfoHpD7rFmR99V5vj3aXza3xdBbXMalubTg/jIv5dfAi54Pdc75j4z412n3Npj3Ld/ENm7a3b/Cod6h/ret1/5vn/C+l+gdslMvgPSLJ8d8q+U66fevYn/tW1chleEtNTGlcHCbLRlq0tHzF5tsbbZZfHjjLgZu42XCuC3NrdjTasZGNzgxPIrGqp7r3p7L2p5XjnpPSmTd5XtzqnB6U87zzg1Ol0zd0zsLszxR6lkxp35u6/teL0L0W922cR7Lu1lpL9CsHirzuM2T+BgsyViT6LHcm0/Vr6U/7LGGyJeqTEjt0PHWhF5mCT7R9mtlDwriYv0Tyr/OxYt6qp5r0mPVT0608TqnqMZaarU2nFwrTzzlrs1ed7z1ux60wyr4ydCaTi3enW8x68x0zU7tXSlcmPSW1mGpWJMg4zmPC2lK96tp0OE80y4MfEvnZj8zGluR6b22ki1Ou9V2nCd9xovcPvcYMZYy0lvN60ScZ45vN6yeCeeXFb1lVjnnCar5fwXwE2bzJ4HI1XVPXfXZMm44GUsMpYsmLB65TuVdm0cl0b+i/wGNN66XjeV7zuPpHcnK/juhhjdfId5jMdE5nN0dGmmm2zZs2cexD5n9p/dY352XsvXHaZNWWsmmS1atjR452nYudzvqv2HMRyvNNnlMcDl3R2+yx2uVrBubTW9icHDVtbNXlZm7jma1rM4VurZZd2y6nUau7ZXZ7bVU+mnoOVxZGMrVmvX60605JwmzGZhhhjTWtaaaMaaGTGmNMZasY0iX8VMUl8eepaIrzGSpemWOQyZORk2bNpjUybMmxqYmknCGCFynutfksaZpjTNMaaatM0xsxcGR0sociNqxNSmhhR1ZJPbsn8qyF0t2qH6iYBclclalbtTTcHTDsPaX6rlnElph2Jyumumtynv2Kk8GI7rsvXbIcJgHJOSaSXnnGaI3m87RtVXJOZ/YtgdTE6Wpha6ZlE8ayXkef1fh602r2WwvfMXtMdLlkfnLFdYYwYso+bWqm7yJqHXZGw2nrS5ZanSYnWlxBxMF1V940K2wdrI7R6OYf7DGGamMmTSbRhlS45xmVOumF1EyPCmHrrN8wwZOOrdNtLeMtzFzDlWnfTBxMk2NaXIZHBYxYLD4w8yju0ao65Vz1OIXoS9dLanwCe1PWrYuWMqf1if1z2k2yYfKJ741PDgno1ZQ8DRqvUny3mNoWTzGO6m1DkrJI8JiR5cSd+vZdGOO8nrMoc5+NDUFsMSXaZJeNlMmGLtJsovOsUp7I9S5VojKxF6bTVEelXqlfJobQr3LozSh2Jk7VcrVMfhXqszGWMzNqGhqZY0OadxkyyMssKugZR0KNFXBHlqwmJgTE/BNVMk6ItJXZMR0H47GpXv/DMOvNkmVuaV1PRfEdxuqc7Hcd+ZV/zTLaRxWk0nl9CdCeM6mn5rstHIBcpiuwmUZXeq81DacHI2rmrZ5SuE5mOZd6LQrZg9mx32TprA8BMo5jKN6yLTCi3WzQaZSuhzTtM1fUTGVpG8Tw+KXI0tjEpiWxtLYynOlktSbVlaI5kxP8TDH8kx50xoxi5KcA4pcja8KWLRlO/Ks6q06ergnvm1ca3Tq8Uw7LTUsmWyctXPWmpitl/uvGcWTGXGuAXDfhqazGmjkxcJW5hMMMMpYsXl2TZYtVOddG3XCarUt6Ptq9CZXSNzyuRzqRZOjsxdBbFVz6OA5HI43r1jityVlVpVkxmOsyaYWE1NTGq1sOVh36mHMcxtSvcy70edG0ZGR3I1Go1GRlV7mWWo1G0ZGRqlvH40l7o4m5xMWLLLYyNjnqc8556mdPqLJ31n/1nWOncxzG1tizrHs/Z+d2vP/B/l8wdJ6rHUn2nbbDq4p6htFtYzMMMTaZis1K5GKzGNmxhmUx2DDlZ/qNnIx41xnaMfCZWYaZWtNLTNW8ND4Fw1MyZOCdM428suKG1ehW8TesOydg7J+YYcD4cYR+8dFK6M4E3HM9ZfRNNL+Sn6rsl4DsrDl2HpPCnfxjGXtbZtYys1ttlyJ4T+BvexjGWRjMszK4Jpc77D3GyuVD7q0+G8m9G+2+rGm7cOR2y7FdtY2XUYx/oNlfRYxhMYyYZkyyg55enna9Kt/FFi6GMMwYwdwxWgxGMLKYmUyGExTKMZkMFhkymKuh0NOBNnBu+23LdwDoZYYzGGMxtORaTU1pjTGWTTGGtMrNWUsyyTTLLG1qy2ZjbK2DBllWqxMtBMaYZQmcE7zvvRcTkclUwdkxTaSdyySt/7fpL+T1v516Ji97fwr5JbLu305zMn5+GMTTZ9F+y7ExwmGVfG44yxn3dLv6l5i+Wth1jCrDq21nW9LqvvDzz3Vf3LLH/O/32TJ/erx3bXftO4eF+G956D952K/An4NfvOpjFjExjevP/UmE0fIoZXx6/w6lX/no3D0bLt+ixjieBM6ksRd0yB4Lt2SwYNE+gd1detlZWUnpiZfGfFaK+4PyCa/v18V8X75pe9fLXzp7l3VjF76vWZmHwGz1IZNWT7b8yddJ4q5kyrVdfru6atWc7bVYztL9Jf4GXvT+Y8m9/YsXP6H018a8D4XVOqvfzqeR+6yZOD8dPv0+U7/q5Pl+2dNb0MjzGVH5p6MNQ7cOWvw62U9aHE8DprDek+McLyvDz+te+9Zhq5+YTruufMcWMabqysTmZVWjKPfnK0wyVcrsuhjZRdLkHNvD72b9abriOSGIxiLixMOoalNPXzy+wT/tf+U6HHONfsz+xe8ufHBdQWWGWLA9if0rsnmrxK5LvRZQeWsTCsrmOYy8VteVfuRfcVTtDLItLIsMYxZLdU/DbtSemxF6Z6Zo5WBXE4tFdCyVMMXMTEMZXVlS6Xec2T4e0tHsRcEuWshcJ2YsNF5rUx1E8ifCq6Z+ZP7qdCeu/aTwFd53l16/o0NOw6O3dLavP4Hbi4RdmuDk6DoYaninC0+o4uZjbJ7Rxeu0/FbuFg+q7DVS6fQe0rZ6NDGUNNU6DEqOaLTicKnYZMnBWruljQxoaS3dZhocDge0bSTyOvdAbG5hxe2xji7E/L55xX13wWNDi6HCekcFxfCPGxY0MXC+s7afWaMdDyjyr+o8Rudm/NabOZvdl274zH4f5XK9z6On1Pe/K5TdPAslg77BjuO6Y3eO7GqvOPG/stknp1leyvLL0Z7bl9I4noMvLkzytLhWYzrOZzLXCORe028rORzOg4N/L0HlMOQ3Pgmnbb6KczlabORpu980q37TBqRu0/p3PO6234Bl03Ynuz+9W7gnsEcmvYaYY3aMYY0wx3pYd+ujsXauWdaY5Xkbtl23fPzFHiDB/QMo0yFjBllYxTQYYyxkrwn7JufwJ/PfgJ+C83X69ni6zvXcnyXabv0ncbLwsceS+RNlyN2mnneJtX0ngYO0+e+0+UnA+Wch3ji8hj5an4h+i6XBySU4n+R0roVcbw5yvHrmr4Yw8Y7x6c+9POPYHI5HI5HI5HI5HGXGww4nE4nrVyOR8XeqPEO7PLOiukYa3Novk5hV4cdtYZLI93e+uxff2jRo0aNGjRo0aNG1bVtW1dy3m83m8+tQ5ZzHw3nObwOu8La9Rc1dtkdS8A3eTk823tnktXWlxN6Oixe06zrN70Isd9jiOgZFq9yfkPqP/SLhN2Myl8jDM43bl1nbcb4cO57jlh8Jow6pzXZdL4dyODTuuhu77FyO27DdwdRxmvO+O+3N2+BdqyTwLHVczDVY4UPE4O66/ZO2cx1LFzVdSXtF7G4HMbrauOHRw6c8FdZ5m9fHZHYZXfTlZquyynSyTTKke6vcffSD9pzPA/G7n7jxPmuhc1DHMynPMrGL6AdewYmwu5ko+UUyTwrMv27rPH1v1nGqd87+p6N6LU8k3NEng53xXyHS97+44OSg/sy/hn+Se6yfYNjW0/uTgP+PvWYzLMmjhcLB/gGpri6H83/84eUXWT6T9Hsv7785z/7z4icpW+zfXypuR7rx/gMdZb1/wC678pcs8/2a3mDitGHxl9mfPlll5MafWWqxk/eYuTDgcNMzDGWLWvsuglNxs53GtN6uWpktlW1tZZYcuinMMWmnNnJydze3b2Y1McBxrBkXw799izLMZZYyy0TkbsGM4p03S2uVu5s/XXUdSdec6smVxZYYGpVmT8A+8ajuEyV5FatkvVru2x6uxGXXbH4A+jvgP4GMYy3iPLXzq/6z65+E005ey+cwMZD3fZcqc6xpjTFjQ0P3U+e++cPYmTIwj0nrK5NPTfl3WvpfLtXDcb2HQMudYOxFXQBor4L4T6vrOauFctYXJQ++NUWmJe5bmx1jDiZS1dTqWxo4GR8jm3fttpmPHppk9PEyv4/y8/sO07XacOmcqc0x2Vi9BvNJvN5oW8x4mOsydpidRxMYJPx06m1bqPzq9KtK8sxXNXFodD/+MYYaJTLwOhc9brCsV18oOR1i4tXChyTkq4lf4y1Ke+9axjDHqs1mfBbMXuP4Hzi+X7t8vzv7bHerrUPgPCxhjre4fXdfLNtNM+Jd+Zdh8xd8wP87uNPoPgv4W7/5P2BuxfsMabNnMnza+54Pdi5U671GPZY8CehX8Voeoo7FHpkeEc6715FwHZrIrUrHaviPUbPZHND+IhczrP6FcYvhOZ0Di/ETt0OI+YwNWR9r7tpf6WDeZKZDB1+z2IthOl1mPyb5FluvEx9h9d0NnM0Y1XPFkWIsk1WotJ0PBMmkvjvQTd0e71tfeV+8r8lQ/tpzpsmxJ+InrI/dj2UajUajVTUajatRqNRtGo1Go1Go4wjeMpZFMVV9CHbofPraLsJ3JpWV2XOoanCuFky4y3PPNxucK2uKC1Lbdb1eo+m5XomN6HfeZsabHLHRX/K+offtNGGmHWctcVcG44MdSqsOLY9VzX+Zxfxn2HPdWTpzWvkrtJ8M5zorrKcquRytJ5N5DZmcaW02l76nWO+BqPXm1A2Ry/0q71dH/mqrqeFjkYxjEXtsX8qubTk67rGycyqsdm4tZx5D6D5hhi0waaWmiaMP81Yjii5qxPlPuU/GfTL1Y5E6Jyfiq63qTa39A4J0sOGDgO9WF9bOXl0XfPRbsY2bPNKPy1YrFYrFYmRhhlTIyMjJWJYZHXuCXI8OoXsvfljGLFicNifpp2XunoPiG1wtx3p1Tah+/DD66OnVtVXP9rKbVxOnL0tR/rHtqB5UDErUVcl11D4qqvjpOcxX7armUNJB3LpW6bxVvD08e8h3odKKvyCFZBdSh2FVcST9xV3n3T8t1j7Kr9qgrqXg+13Pt5U7JCvFXVIV1YG5lRhkVYZJYYDDD4KOIMoHCp26WS8GB7uBh2zIdgq/PKyInjV2STShuoapUdCpX1yTwqq/z1VvET7Kh5nVPkO8YyxjLt2MaaMmWTLQvx3qnzltnXW0p2jxgbEtSny/Osv8Y9pLMXYoHVPAhkVdWVeODhR6q9/Sxe2liwwZWMVvFXfRkeIDxAePUPIrdJ4ey6yquzH+PD/bUOWAu05qVHtFd8rrKHSoeNIOUqrYr3FXyToqfYJgwmJdKpXXOwYYegNNGMzfZPp/t3t/DVs4zjNTN61rRqaWaa4NYbRjTa0tWwy2Y2tGN8ZO8ofNKq4j9SL7I+cSm4/6ovLV5HNXLI0jJidwrtk6ynCaP6Z++GjRlWS3tLeW129Mi9evxU9mtz6s5J3Z7M2ngTgnKvmpomxpaLCzPfmx0JWE+m3NLDDGOX47RctdYYNK5jakdqLkRlI39n590T5zctGSwwZZDJj6kW8XSi6ot2MmWWJ0DUT3nuvebBudScjZ79g8cWJ8av0k+/bE5WKd5MdbFpbDVMxu1DVMmtNZGJvq1mtRbn6M+g/kP0FwDwr7quZs7xosNGpbscyxhhd9TyJyFwbLcxlTasg75vW7TsV5K7ji44XPMMrdoj+Y3rT0Hie62nlYV/pwczzOmdLqLhYkzGMzCZWGMQzGMSsZYY6Di1t4nlJ+Em63mJxrVLxPbYxNEdgc1dU2iOKyoYYWjNrEeHTYybVk0atSa7ehuwsWMWTqn1TrnS6hYsi71d1+s+k+ic70e20fzE/VaTdxT9ZtU4GIXdeNx3X77guYYfpHeTQjaMX6brOu4OY4K7Y2d9mbHarI5ox3p4GpJ2Vd/Tst60f7j999pppjR+Q/Qf8J/VaORs3cji7FfFuN61+ui9s8hix1OCh5KGVV23BPXvZfz3CLyHpix+exi8z/KnCnosY2eunor+cxyPO/xJ0vKey9OvE9VjqaYu0x3Z3jd6o2b1T12D+F8l232lwaaacD5LE8LBxu7WTlbWraWpew8Xexjel3E+wWD4APITdNqR8F3R3T0lunCQ4GaE9R37DxeCYfcHi4xci5ovKfxVs55y2hf+65E/Xdp6jR5nrebTmi5incpkyOjs50JvrZwstbbW6kfuuQw+2mykf/EXNFzxfKTrxew929TR6bWnGL//F3JFOFCQT3K4lQ" + + kernels = Kernel( + bz2.decompress(base64.b64decode(quantization_code)), + [ + "int4WeightCompression", + "int4WeightExtractionFloat", + "int4WeightExtractionHalf", + "int8WeightExtractionFloat", + "int8WeightExtractionHalf", + ], + ) +except Exception as exception: + kernels = None + logger.warning("Failed to load cpm_kernels:" + str(exception)) + + +class W8A16Linear(torch.autograd.Function): + @staticmethod + def forward(ctx, inp: torch.Tensor, quant_w: torch.Tensor, scale_w: torch.Tensor, weight_bit_width): + ctx.inp_shape = inp.size() + ctx.weight_bit_width = weight_bit_width + out_features = quant_w.size(0) + inp = inp.contiguous().view(-1, inp.size(-1)) + weight = extract_weight_to_half(quant_w, scale_w, weight_bit_width) + ctx.weight_shape = weight.size() + output = inp.mm(weight.t()) + ctx.save_for_backward(inp, quant_w, scale_w) + return output.view(*(ctx.inp_shape[:-1] + (out_features,))) + + @staticmethod + def backward(ctx, grad_output: torch.Tensor): + inp, quant_w, scale_w = ctx.saved_tensors + weight = extract_weight_to_half(quant_w, scale_w, ctx.weight_bit_width) + grad_output = grad_output.contiguous().view(-1, weight.size(0)) + grad_input = grad_output.mm(weight) + grad_weight = grad_output.t().mm(inp) + return grad_input.view(ctx.inp_shape), grad_weight.view(ctx.weight_shape), None, None + + +def compress_int4_weight(weight: torch.Tensor): # (n, m) + with torch.cuda.device(weight.device): + n, m = weight.size(0), weight.size(1) + assert m % 2 == 0 + m = m // 2 + out = torch.empty(n, m, dtype=torch.int8, device="cuda") + stream = torch.cuda.current_stream() + + gridDim = (n, 1, 1) + blockDim = (min(round_up(m, 32), 1024), 1, 1) + + kernels.int4WeightCompression( + gridDim, + blockDim, + 0, + stream, + [ctypes.c_void_p(weight.data_ptr()), ctypes.c_void_p(out.data_ptr()), ctypes.c_int32(n), ctypes.c_int32(m)], + ) + return out + + +def extract_weight_to_half(weight: torch.Tensor, scale_list: torch.Tensor, source_bit_width: int): + assert scale_list.dtype in [torch.half, torch.bfloat16] + assert weight.dtype in [torch.int8] + if source_bit_width == 8: + return weight.to(scale_list.dtype) * scale_list[:, None] + elif source_bit_width == 4: + func = ( + kernels.int4WeightExtractionHalf if scale_list.dtype == torch.half else kernels.int4WeightExtractionBFloat16 + ) + else: + assert False, "Unsupported bit-width" + + with torch.cuda.device(weight.device): + n, m = weight.size(0), weight.size(1) + out = torch.empty(n, m * (8 // source_bit_width), dtype=scale_list.dtype, device="cuda") + stream = torch.cuda.current_stream() + + gridDim = (n, 1, 1) + blockDim = (min(round_up(m, 32), 1024), 1, 1) + + func( + gridDim, + blockDim, + 0, + stream, + [ + ctypes.c_void_p(weight.data_ptr()), + ctypes.c_void_p(scale_list.data_ptr()), + ctypes.c_void_p(out.data_ptr()), + ctypes.c_int32(n), + ctypes.c_int32(m), + ], + ) + return out + + +class QuantizedLinear(torch.nn.Module): + def __init__(self, weight_bit_width: int, weight, bias=None, device="cpu", dtype=None, empty_init=False, *args, + **kwargs): + super().__init__() + self.weight_bit_width = weight_bit_width + + shape = weight.shape + + if weight is None or empty_init: + self.weight = torch.empty(shape[0], shape[1] * weight_bit_width // 8, dtype=torch.int8, device=device) + self.weight_scale = torch.empty(shape[0], dtype=dtype, device=device) + else: + self.weight_scale = weight.abs().max(dim=-1).values / ((2 ** (weight_bit_width - 1)) - 1) + self.weight = torch.round(weight / self.weight_scale[:, None]).to(torch.int8) + if weight_bit_width == 4: + self.weight = compress_int4_weight(self.weight) + try: + self.weight = Parameter(self.weight.to(device), requires_grad=False) + except: + self.weight.to(device, dtype=self.weight.dtype) + self.weight_scale = Parameter(self.weight_scale.to(device), requires_grad=False) + self.bias = Parameter(bias.to(device), requires_grad=False) if bias is not None else None + + def forward(self, input): + output = W8A16Linear.apply(input, self.weight, self.weight_scale, self.weight_bit_width) + if self.bias is not None: + output = output + self.bias + return output + + +def quantize(model, weight_bit_width, empty_init=False, device=None): + """Replace fp16 linear with quantized linear""" + for layer in model.layers: + layer.self_attention.query_key_value = QuantizedLinear( + weight_bit_width=weight_bit_width, + weight=layer.self_attention.query_key_value.weight.to(torch.cuda.current_device()), + bias=layer.self_attention.query_key_value.bias, + dtype=layer.self_attention.query_key_value.weight.dtype, + device=layer.self_attention.query_key_value.weight.device if device is None else device, + empty_init=empty_init + ) + layer.self_attention.dense = QuantizedLinear( + weight_bit_width=weight_bit_width, + weight=layer.self_attention.dense.weight.to(torch.cuda.current_device()), + bias=layer.self_attention.dense.bias, + dtype=layer.self_attention.dense.weight.dtype, + device=layer.self_attention.dense.weight.device if device is None else device, + empty_init=empty_init + ) + layer.mlp.dense_h_to_4h = QuantizedLinear( + weight_bit_width=weight_bit_width, + weight=layer.mlp.dense_h_to_4h.weight.to(torch.cuda.current_device()), + bias=layer.mlp.dense_h_to_4h.bias, + dtype=layer.mlp.dense_h_to_4h.weight.dtype, + device=layer.mlp.dense_h_to_4h.weight.device if device is None else device, + empty_init=empty_init + ) + layer.mlp.dense_4h_to_h = QuantizedLinear( + weight_bit_width=weight_bit_width, + weight=layer.mlp.dense_4h_to_h.weight.to(torch.cuda.current_device()), + bias=layer.mlp.dense_4h_to_h.bias, + dtype=layer.mlp.dense_4h_to_h.weight.dtype, + device=layer.mlp.dense_4h_to_h.weight.device if device is None else device, + empty_init=empty_init + ) + + return model \ No newline at end of file diff --git a/ComfyUI-Easy-Use/py/kolors/chatglm/tokenization_chatglm.py b/ComfyUI-Easy-Use/py/kolors/chatglm/tokenization_chatglm.py new file mode 100644 index 0000000000000000000000000000000000000000..74694f9e2186306b8948825ce4f8c95fa7945daf --- /dev/null +++ b/ComfyUI-Easy-Use/py/kolors/chatglm/tokenization_chatglm.py @@ -0,0 +1,299 @@ +import json +import os +import re +from typing import List, Optional, Union, Dict +from sentencepiece import SentencePieceProcessor +from transformers import PreTrainedTokenizer +from transformers.utils import logging, PaddingStrategy +from transformers.tokenization_utils_base import EncodedInput, BatchEncoding + +class SPTokenizer: + def __init__(self, model_path: str): + # reload tokenizer + assert os.path.isfile(model_path), model_path + self.sp_model = SentencePieceProcessor(model_file=model_path) + + # BOS / EOS token IDs + self.n_words: int = self.sp_model.vocab_size() + self.bos_id: int = self.sp_model.bos_id() + self.eos_id: int = self.sp_model.eos_id() + self.pad_id: int = self.sp_model.unk_id() + assert self.sp_model.vocab_size() == self.sp_model.get_piece_size() + + role_special_tokens = ["<|system|>", "<|user|>", "<|assistant|>", "<|observation|>"] + special_tokens = ["[MASK]", "[gMASK]", "[sMASK]", "sop", "eop"] + role_special_tokens + self.special_tokens = {} + self.index_special_tokens = {} + for token in special_tokens: + self.special_tokens[token] = self.n_words + self.index_special_tokens[self.n_words] = token + self.n_words += 1 + self.role_special_token_expression = "|".join([re.escape(token) for token in role_special_tokens]) + + def tokenize(self, s: str, encode_special_tokens=False): + if encode_special_tokens: + last_index = 0 + t = [] + for match in re.finditer(self.role_special_token_expression, s): + if last_index < match.start(): + t.extend(self.sp_model.EncodeAsPieces(s[last_index:match.start()])) + t.append(s[match.start():match.end()]) + last_index = match.end() + if last_index < len(s): + t.extend(self.sp_model.EncodeAsPieces(s[last_index:])) + return t + else: + return self.sp_model.EncodeAsPieces(s) + + def encode(self, s: str, bos: bool = False, eos: bool = False) -> List[int]: + assert type(s) is str + t = self.sp_model.encode(s) + if bos: + t = [self.bos_id] + t + if eos: + t = t + [self.eos_id] + return t + + def decode(self, t: List[int]) -> str: + text, buffer = "", [] + for token in t: + if token in self.index_special_tokens: + if buffer: + text += self.sp_model.decode(buffer) + buffer = [] + text += self.index_special_tokens[token] + else: + buffer.append(token) + if buffer: + text += self.sp_model.decode(buffer) + return text + + def decode_tokens(self, tokens: List[str]) -> str: + text = self.sp_model.DecodePieces(tokens) + return text + + def convert_token_to_id(self, token): + """ Converts a token (str) in an id using the vocab. """ + if token in self.special_tokens: + return self.special_tokens[token] + return self.sp_model.PieceToId(token) + + def convert_id_to_token(self, index): + """Converts an index (integer) in a token (str) using the vocab.""" + if index in self.index_special_tokens: + return self.index_special_tokens[index] + if index in [self.eos_id, self.bos_id, self.pad_id] or index < 0: + return "" + return self.sp_model.IdToPiece(index) + + +class ChatGLMTokenizer(PreTrainedTokenizer): + vocab_files_names = {"vocab_file": "tokenizer.model"} + + model_input_names = ["input_ids", "attention_mask", "position_ids"] + + def __init__(self, vocab_file, padding_side="left", clean_up_tokenization_spaces=False, encode_special_tokens=False, + **kwargs): + self.name = "GLMTokenizer" + + self.vocab_file = vocab_file + self.tokenizer = SPTokenizer(vocab_file) + self.special_tokens = { + "": self.tokenizer.bos_id, + "": self.tokenizer.eos_id, + "": self.tokenizer.pad_id + } + self.encode_special_tokens = encode_special_tokens + super().__init__(padding_side=padding_side, clean_up_tokenization_spaces=clean_up_tokenization_spaces, + encode_special_tokens=encode_special_tokens, + **kwargs) + + def get_command(self, token): + if token in self.special_tokens: + return self.special_tokens[token] + assert token in self.tokenizer.special_tokens, f"{token} is not a special token for {self.name}" + return self.tokenizer.special_tokens[token] + + @property + def unk_token(self) -> str: + return "" + + @property + def pad_token(self) -> str: + return "" + + @property + def pad_token_id(self): + return self.get_command("") + + @property + def eos_token(self) -> str: + return "" + + @property + def eos_token_id(self): + return self.get_command("") + + @property + def vocab_size(self): + return self.tokenizer.n_words + + def get_vocab(self): + """ Returns vocab as a dict """ + vocab = {self._convert_id_to_token(i): i for i in range(self.vocab_size)} + vocab.update(self.added_tokens_encoder) + return vocab + + def _tokenize(self, text, **kwargs): + return self.tokenizer.tokenize(text, encode_special_tokens=self.encode_special_tokens) + + def _convert_token_to_id(self, token): + """ Converts a token (str) in an id using the vocab. """ + return self.tokenizer.convert_token_to_id(token) + + def _convert_id_to_token(self, index): + """Converts an index (integer) in a token (str) using the vocab.""" + return self.tokenizer.convert_id_to_token(index) + + def convert_tokens_to_string(self, tokens: List[str]) -> str: + return self.tokenizer.decode_tokens(tokens) + + def save_vocabulary(self, save_directory, filename_prefix=None): + """ + Save the vocabulary and special tokens file to a directory. + + Args: + save_directory (`str`): + The directory in which to save the vocabulary. + filename_prefix (`str`, *optional*): + An optional prefix to add to the named of the saved files. + + Returns: + `Tuple(str)`: Paths to the files saved. + """ + if os.path.isdir(save_directory): + vocab_file = os.path.join( + save_directory, self.vocab_files_names["vocab_file"] + ) + else: + vocab_file = save_directory + + with open(self.vocab_file, 'rb') as fin: + proto_str = fin.read() + + with open(vocab_file, "wb") as writer: + writer.write(proto_str) + + return (vocab_file,) + + def get_prefix_tokens(self): + prefix_tokens = [self.get_command("[gMASK]"), self.get_command("sop")] + return prefix_tokens + + def build_single_message(self, role, metadata, message): + assert role in ["system", "user", "assistant", "observation"], role + role_tokens = [self.get_command(f"<|{role}|>")] + self.tokenizer.encode(f"{metadata}\n") + message_tokens = self.tokenizer.encode(message) + tokens = role_tokens + message_tokens + return tokens + + def build_chat_input(self, query, history=None, role="user"): + if history is None: + history = [] + input_ids = [] + for item in history: + content = item["content"] + if item["role"] == "system" and "tools" in item: + content = content + "\n" + json.dumps(item["tools"], indent=4, ensure_ascii=False) + input_ids.extend(self.build_single_message(item["role"], item.get("metadata", ""), content)) + input_ids.extend(self.build_single_message(role, "", query)) + input_ids.extend([self.get_command("<|assistant|>")]) + return self.batch_encode_plus([input_ids], return_tensors="pt", is_split_into_words=True) + + def build_inputs_with_special_tokens( + self, token_ids_0: List[int], token_ids_1: Optional[List[int]] = None + ) -> List[int]: + """ + Build model inputs from a sequence or a pair of sequence for sequence classification tasks by concatenating and + adding special tokens. A BERT sequence has the following format: + + - single sequence: `[CLS] X [SEP]` + - pair of sequences: `[CLS] A [SEP] B [SEP]` + + Args: + token_ids_0 (`List[int]`): + List of IDs to which the special tokens will be added. + token_ids_1 (`List[int]`, *optional*): + Optional second list of IDs for sequence pairs. + + Returns: + `List[int]`: List of [input IDs](../glossary#input-ids) with the appropriate special tokens. + """ + prefix_tokens = self.get_prefix_tokens() + token_ids_0 = prefix_tokens + token_ids_0 + if token_ids_1 is not None: + token_ids_0 = token_ids_0 + token_ids_1 + [self.get_command("")] + return token_ids_0 + + def _pad( + self, + encoded_inputs: Union[Dict[str, EncodedInput], BatchEncoding], + max_length: Optional[int] = None, + padding_strategy: PaddingStrategy = PaddingStrategy.DO_NOT_PAD, + pad_to_multiple_of: Optional[int] = None, + return_attention_mask: Optional[bool] = None, + ) -> dict: + """ + Pad encoded inputs (on left/right and up to predefined length or max length in the batch) + + Args: + encoded_inputs: + Dictionary of tokenized inputs (`List[int]`) or batch of tokenized inputs (`List[List[int]]`). + max_length: maximum length of the returned list and optionally padding length (see below). + Will truncate by taking into account the special tokens. + padding_strategy: PaddingStrategy to use for padding. + + - PaddingStrategy.LONGEST Pad to the longest sequence in the batch + - PaddingStrategy.MAX_LENGTH: Pad to the max length (default) + - PaddingStrategy.DO_NOT_PAD: Do not pad + The tokenizer padding sides are defined in self.padding_side: + + - 'left': pads on the left of the sequences + - 'right': pads on the right of the sequences + pad_to_multiple_of: (optional) Integer if set will pad the sequence to a multiple of the provided value. + This is especially useful to enable the use of Tensor Core on NVIDIA hardware with compute capability + `>= 7.5` (Volta). + return_attention_mask: + (optional) Set to False to avoid returning attention mask (default: set to model specifics) + """ + # Load from model defaults + assert self.padding_side == "left" + + required_input = encoded_inputs[self.model_input_names[0]] + seq_length = len(required_input) + + if padding_strategy == PaddingStrategy.LONGEST: + max_length = len(required_input) + + if max_length is not None and pad_to_multiple_of is not None and (max_length % pad_to_multiple_of != 0): + max_length = ((max_length // pad_to_multiple_of) + 1) * pad_to_multiple_of + + needs_to_be_padded = padding_strategy != PaddingStrategy.DO_NOT_PAD and len(required_input) != max_length + + # Initialize attention mask if not present. + if "attention_mask" not in encoded_inputs: + encoded_inputs["attention_mask"] = [1] * seq_length + + if "position_ids" not in encoded_inputs: + encoded_inputs["position_ids"] = list(range(seq_length)) + + if needs_to_be_padded: + difference = max_length - len(required_input) + + if "attention_mask" in encoded_inputs: + encoded_inputs["attention_mask"] = [0] * difference + encoded_inputs["attention_mask"] + if "position_ids" in encoded_inputs: + encoded_inputs["position_ids"] = [0] * difference + encoded_inputs["position_ids"] + encoded_inputs[self.model_input_names[0]] = [self.pad_token_id] * difference + required_input + + return encoded_inputs \ No newline at end of file diff --git a/ComfyUI-Easy-Use/py/kolors/chatglm/tokenizer/tokenizer.model b/ComfyUI-Easy-Use/py/kolors/chatglm/tokenizer/tokenizer.model new file mode 100644 index 0000000000000000000000000000000000000000..8a8007697b7cc3d3868dcffbbebf8c1f2bd690ba --- /dev/null +++ b/ComfyUI-Easy-Use/py/kolors/chatglm/tokenizer/tokenizer.model @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e7dc4c393423b76e4373e5157ddc34803a0189ba96b21ddbb40269d31468a6f2 +size 1018370 diff --git a/ComfyUI-Easy-Use/py/kolors/chatglm/tokenizer/tokenizer_config.json b/ComfyUI-Easy-Use/py/kolors/chatglm/tokenizer/tokenizer_config.json new file mode 100644 index 0000000000000000000000000000000000000000..f6f13c88707490cebd8023da86e8bf7a56fa21e3 --- /dev/null +++ b/ComfyUI-Easy-Use/py/kolors/chatglm/tokenizer/tokenizer_config.json @@ -0,0 +1,12 @@ +{ + "name_or_path": "THUDM/chatglm3-6b-base", + "remove_space": false, + "do_lower_case": false, + "tokenizer_class": "ChatGLMTokenizer", + "auto_map": { + "AutoTokenizer": [ + "tokenization_chatglm.ChatGLMTokenizer", + null + ] + } +} diff --git a/ComfyUI-Easy-Use/py/kolors/chatglm/tokenizer/vocab.txt b/ComfyUI-Easy-Use/py/kolors/chatglm/tokenizer/vocab.txt new file mode 100644 index 0000000000000000000000000000000000000000..8a8007697b7cc3d3868dcffbbebf8c1f2bd690ba --- /dev/null +++ b/ComfyUI-Easy-Use/py/kolors/chatglm/tokenizer/vocab.txt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e7dc4c393423b76e4373e5157ddc34803a0189ba96b21ddbb40269d31468a6f2 +size 1018370 diff --git a/ComfyUI-Easy-Use/py/kolors/clip_vision_config_vitl_336.json b/ComfyUI-Easy-Use/py/kolors/clip_vision_config_vitl_336.json new file mode 100644 index 0000000000000000000000000000000000000000..20a019142e8d20db3cd149b4609a460c5295fd0f --- /dev/null +++ b/ComfyUI-Easy-Use/py/kolors/clip_vision_config_vitl_336.json @@ -0,0 +1,18 @@ +{ + "attention_dropout": 0.0, + "dropout": 0.0, + "hidden_act": "quick_gelu", + "hidden_size": 1024, + "image_size": 336, + "initializer_factor": 1.0, + "initializer_range": 0.02, + "intermediate_size": 4096, + "layer_norm_eps": 1e-05, + "model_type": "clip_vision_model", + "num_attention_heads": 16, + "num_channels": 3, + "num_hidden_layers": 24, + "patch_size": 14, + "projection_dim": 768, + "torch_dtype": "float32" +} diff --git a/ComfyUI-Easy-Use/py/kolors/loader.py b/ComfyUI-Easy-Use/py/kolors/loader.py new file mode 100644 index 0000000000000000000000000000000000000000..908455faaa2982da3d05786da5b183c994d99d5c --- /dev/null +++ b/ComfyUI-Easy-Use/py/kolors/loader.py @@ -0,0 +1,303 @@ +import json +import os +import torch +import subprocess +import sys +import comfy.supported_models +import comfy.model_patcher +import comfy.model_management +import comfy.model_detection as model_detection +import comfy.model_base as model_base +from comfy.model_base import sdxl_pooled, CLIPEmbeddingNoiseAugmentation, Timestep, ModelType +from comfy.ldm.modules.diffusionmodules.openaimodel import UNetModel +from comfy.clip_vision import ClipVisionModel, Output +from comfy.utils import load_torch_file +from .chatglm.modeling_chatglm import ChatGLMModel, ChatGLMConfig +from .chatglm.tokenization_chatglm import ChatGLMTokenizer + +class KolorsUNetModel(UNetModel): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.encoder_hid_proj = torch.nn.Linear(4096, 2048, bias=True) + + def forward(self, *args, **kwargs): + with torch.cuda.amp.autocast(enabled=True): + if "context" in kwargs: + kwargs["context"] = self.encoder_hid_proj(kwargs["context"]) + result = super().forward(*args, **kwargs) + return result + +class KolorsSDXL(model_base.SDXL): + def __init__(self, model_config, model_type=ModelType.EPS, device=None): + model_base.BaseModel.__init__(self, model_config, model_type, device=device, unet_model=KolorsUNetModel) + self.embedder = Timestep(256) + self.noise_augmentor = CLIPEmbeddingNoiseAugmentation(**{"noise_schedule_config": {"timesteps": 1000, "beta_schedule": "squaredcos_cap_v2"}, "timestep_dim": 1280}) + + def encode_adm(self, **kwargs): + clip_pooled = sdxl_pooled(kwargs, self.noise_augmentor) + width = kwargs.get("width", 768) + height = kwargs.get("height", 768) + crop_w = kwargs.get("crop_w", 0) + crop_h = kwargs.get("crop_h", 0) + target_width = kwargs.get("target_width", width) + target_height = kwargs.get("target_height", height) + + out = [] + out.append(self.embedder(torch.Tensor([height]))) + out.append(self.embedder(torch.Tensor([width]))) + out.append(self.embedder(torch.Tensor([crop_h]))) + out.append(self.embedder(torch.Tensor([crop_w]))) + out.append(self.embedder(torch.Tensor([target_height]))) + out.append(self.embedder(torch.Tensor([target_width]))) + flat = torch.flatten(torch.cat(out)).unsqueeze( + dim=0).repeat(clip_pooled.shape[0], 1) + return torch.cat((clip_pooled.to(flat.device), flat), dim=1) + +class Kolors(comfy.supported_models.SDXL): + unet_config = { + "model_channels": 320, + "use_linear_in_transformer": True, + "transformer_depth": [0, 0, 2, 2, 10, 10], + "context_dim": 2048, + "adm_in_channels": 5632, + "use_temporal_attention": False, + } + + def get_model(self, state_dict, prefix="", device=None): + out = KolorsSDXL(self, model_type=self.model_type(state_dict, prefix), device=device, ) + out.__class__ = model_base.SDXL + if self.inpaint_model(): + out.set_inpaint() + return out + +def kolors_unet_config_from_diffusers_unet(state_dict, dtype=None): + match = {} + transformer_depth = [] + + attn_res = 1 + count_blocks = model_detection.count_blocks + down_blocks = count_blocks(state_dict, "down_blocks.{}") + for i in range(down_blocks): + attn_blocks = count_blocks( + state_dict, "down_blocks.{}.attentions.".format(i) + '{}') + res_blocks = count_blocks( + state_dict, "down_blocks.{}.resnets.".format(i) + '{}') + for ab in range(attn_blocks): + transformer_count = count_blocks( + state_dict, "down_blocks.{}.attentions.{}.transformer_blocks.".format(i, ab) + '{}') + transformer_depth.append(transformer_count) + if transformer_count > 0: + match["context_dim"] = state_dict["down_blocks.{}.attentions.{}.transformer_blocks.0.attn2.to_k.weight".format( + i, ab)].shape[1] + + attn_res *= 2 + if attn_blocks == 0: + for i in range(res_blocks): + transformer_depth.append(0) + + match["transformer_depth"] = transformer_depth + + match["model_channels"] = state_dict["conv_in.weight"].shape[0] + match["in_channels"] = state_dict["conv_in.weight"].shape[1] + match["adm_in_channels"] = None + if "class_embedding.linear_1.weight" in state_dict: + match["adm_in_channels"] = state_dict["class_embedding.linear_1.weight"].shape[1] + elif "add_embedding.linear_1.weight" in state_dict: + match["adm_in_channels"] = state_dict["add_embedding.linear_1.weight"].shape[1] + + Kolors = {'use_checkpoint': False, 'image_size': 32, 'out_channels': 4, 'use_spatial_transformer': True, 'legacy': False, + 'num_classes': 'sequential', 'adm_in_channels': 5632, 'dtype': dtype, 'in_channels': 4, 'model_channels': 320, + 'num_res_blocks': [2, 2, 2], 'transformer_depth': [0, 0, 2, 2, 10, 10], 'channel_mult': [1, 2, 4], 'transformer_depth_middle': 10, + 'use_linear_in_transformer': True, 'context_dim': 2048, 'num_head_channels': 64, 'transformer_depth_output': [0, 0, 0, 2, 2, 2, 10, 10, 10], + 'use_temporal_attention': False, 'use_temporal_resblock': False} + + Kolors_inpaint = {'use_checkpoint': False, 'image_size': 32, 'out_channels': 4, 'use_spatial_transformer': True, + 'legacy': False, + 'num_classes': 'sequential', 'adm_in_channels': 5632, 'dtype': dtype, 'in_channels': 9, + 'model_channels': 320, + 'num_res_blocks': [2, 2, 2], 'transformer_depth': [0, 0, 2, 2, 10, 10], 'channel_mult': [1, 2, 4], + 'transformer_depth_middle': 10, + 'use_linear_in_transformer': True, 'context_dim': 2048, 'num_head_channels': 64, + 'transformer_depth_output': [0, 0, 0, 2, 2, 2, 10, 10, 10], + 'use_temporal_attention': False, 'use_temporal_resblock': False} + + Kolors_ip2p = {'use_checkpoint': False, 'image_size': 32, 'out_channels': 4, 'use_spatial_transformer': True, + 'legacy': False, + 'num_classes': 'sequential', 'adm_in_channels': 5632, 'dtype': dtype, 'in_channels': 8, + 'model_channels': 320, + 'num_res_blocks': [2, 2, 2], 'transformer_depth': [0, 0, 2, 2, 10, 10], 'channel_mult': [1, 2, 4], + 'transformer_depth_middle': 10, + 'use_linear_in_transformer': True, 'context_dim': 2048, 'num_head_channels': 64, + 'transformer_depth_output': [0, 0, 0, 2, 2, 2, 10, 10, 10], + 'use_temporal_attention': False, 'use_temporal_resblock': False} + + SDXL = {'use_checkpoint': False, 'image_size': 32, 'out_channels': 4, 'use_spatial_transformer': True, + 'legacy': False, + 'num_classes': 'sequential', 'adm_in_channels': 2816, 'dtype': dtype, 'in_channels': 4, + 'model_channels': 320, + 'num_res_blocks': [2, 2, 2], 'transformer_depth': [0, 0, 2, 2, 10, 10], 'channel_mult': [1, 2, 4], + 'transformer_depth_middle': 10, + 'use_linear_in_transformer': True, 'context_dim': 2048, 'num_head_channels': 64, + 'transformer_depth_output': [0, 0, 0, 2, 2, 2, 10, 10, 10], + 'use_temporal_attention': False, 'use_temporal_resblock': False} + + SDXL_mid_cnet = {'use_checkpoint': False, 'image_size': 32, 'out_channels': 4, 'use_spatial_transformer': True, + 'legacy': False, + 'num_classes': 'sequential', 'adm_in_channels': 2816, 'dtype': dtype, 'in_channels': 4, + 'model_channels': 320, + 'num_res_blocks': [2, 2, 2], 'transformer_depth': [0, 0, 0, 0, 1, 1], 'channel_mult': [1, 2, 4], + 'transformer_depth_middle': 1, + 'use_linear_in_transformer': True, 'context_dim': 2048, 'num_head_channels': 64, + 'transformer_depth_output': [0, 0, 0, 0, 0, 0, 1, 1, 1], + 'use_temporal_attention': False, 'use_temporal_resblock': False} + + SDXL_small_cnet = {'use_checkpoint': False, 'image_size': 32, 'out_channels': 4, 'use_spatial_transformer': True, + 'legacy': False, + 'num_classes': 'sequential', 'adm_in_channels': 2816, 'dtype': dtype, 'in_channels': 4, + 'model_channels': 320, + 'num_res_blocks': [2, 2, 2], 'transformer_depth': [0, 0, 0, 0, 0, 0], 'channel_mult': [1, 2, 4], + 'transformer_depth_middle': 0, + 'use_linear_in_transformer': True, 'num_head_channels': 64, 'context_dim': 1, + 'transformer_depth_output': [0, 0, 0, 0, 0, 0, 0, 0, 0], + 'use_temporal_attention': False, 'use_temporal_resblock': False} + + supported_models = [Kolors, Kolors_inpaint, + Kolors_ip2p, SDXL, SDXL_mid_cnet, SDXL_small_cnet] + + + for unet_config in supported_models: + matches = True + for k in match: + if match[k] != unet_config[k]: + # print("key {} does not match".format(k), match[k], "||", unet_config[k]) + matches = False + break + if matches: + return model_detection.convert_config(unet_config) + return None + +# chatglm3 model +class chatGLM3Model(torch.nn.Module): + def __init__(self, textmodel_json_config=None, device='cpu', offload_device='cpu', model_path=None): + super().__init__() + if model_path is None: + raise ValueError("model_path is required") + self.device = device + if textmodel_json_config is None: + textmodel_json_config = os.path.join( + os.path.dirname(os.path.realpath(__file__)), + "chatglm", + "config_chatglm.json" + ) + with open(textmodel_json_config, 'r') as file: + config = json.load(file) + textmodel_json_config = ChatGLMConfig(**config) + is_accelerate_available = False + try: + from accelerate import init_empty_weights + from accelerate.utils import set_module_tensor_to_device + is_accelerate_available = True + except: + pass + + from contextlib import nullcontext + with (init_empty_weights() if is_accelerate_available else nullcontext()): + with torch.no_grad(): + print('torch version:', torch.__version__) + self.text_encoder = ChatGLMModel(textmodel_json_config).eval() + if '4bit' in model_path: + try: + import cpm_kernels + except ImportError: + print("Installing cpm_kernels...") + subprocess.run([sys.executable, "-m", "pip", "install", "cpm_kernels"], check=True) + pass + self.text_encoder.quantize(4) + elif '8bit' in model_path: + self.text_encoder.quantize(8) + + sd = load_torch_file(model_path) + if is_accelerate_available: + for key in sd: + set_module_tensor_to_device(self.text_encoder, key, device=offload_device, value=sd[key]) + else: + print("WARNING: Accelerate not available, use load_state_dict load model") + self.text_encoder.load_state_dict() + +def load_chatglm3(model_path=None): + if model_path is None: + return + + load_device = comfy.model_management.text_encoder_device() + offload_device = comfy.model_management.text_encoder_offload_device() + + glm3model = chatGLM3Model( + device=load_device, + offload_device=offload_device, + model_path=model_path + ) + tokenizer_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'chatglm', "tokenizer") + tokenizer = ChatGLMTokenizer.from_pretrained(tokenizer_path) + text_encoder = glm3model.text_encoder + return {"text_encoder":text_encoder, "tokenizer":tokenizer} + + +# clipvision model +def load_clipvision_vitl_336(path): + sd = load_torch_file(path) + if "vision_model.encoder.layers.22.layer_norm1.weight" in sd: + json_config = os.path.join(os.path.dirname(os.path.realpath(__file__)), "clip_vision_config_vitl_336.json") + else: + raise Exception("Unsupported clip vision model") + clip = ClipVisionModel(json_config) + m, u = clip.load_sd(sd) + if len(m) > 0: + print("missing clip vision: {}".format(m)) + u = set(u) + keys = list(sd.keys()) + for k in keys: + if k not in u: + t = sd.pop(k) + del t + return clip + +class applyKolorsUnet: + def __enter__(self): + import comfy.ldm.modules.diffusionmodules.openaimodel + import comfy.utils + import comfy.clip_vision + + self.original_UNET_MAP_BASIC = comfy.utils.UNET_MAP_BASIC.copy() + comfy.utils.UNET_MAP_BASIC.add(("encoder_hid_proj.weight", "encoder_hid_proj.weight"),) + comfy.utils.UNET_MAP_BASIC.add(("encoder_hid_proj.bias", "encoder_hid_proj.bias"),) + + self.original_unet_config_from_diffusers_unet = model_detection.unet_config_from_diffusers_unet + model_detection.unet_config_from_diffusers_unet = kolors_unet_config_from_diffusers_unet + + import comfy.supported_models + self.original_supported_models = comfy.supported_models.models + comfy.supported_models.models = [Kolors] + + self.original_load_clipvision_from_sd = comfy.clip_vision.load_clipvision_from_sd + comfy.clip_vision.load_clipvision_from_sd = load_clipvision_vitl_336 + + def __exit__(self, type, value, traceback): + import comfy.ldm.modules.diffusionmodules.openaimodel + import comfy.utils + import comfy.supported_models + import comfy.clip_vision + + comfy.utils.UNET_MAP_BASIC = self.original_UNET_MAP_BASIC + + model_detection.unet_config_from_diffusers_unet = self.original_unet_config_from_diffusers_unet + comfy.supported_models.models = self.original_supported_models + + comfy.clip_vision.load_clipvision_from_sd = self.original_load_clipvision_from_sd + + +def is_kolors_model(model): + unet_config = model.model.model_config.unet_config + if unet_config and "adm_in_channels" in unet_config and unet_config["adm_in_channels"] == 5632: + return True + else: + return False \ No newline at end of file diff --git a/ComfyUI-Easy-Use/py/kolors/model_patch.py b/ComfyUI-Easy-Use/py/kolors/model_patch.py new file mode 100644 index 0000000000000000000000000000000000000000..7900a0a13d8e1639d8bcb3302d02c360d81c44fb --- /dev/null +++ b/ComfyUI-Easy-Use/py/kolors/model_patch.py @@ -0,0 +1,66 @@ +import torch +from torch.nn import Linear +from types import MethodType +import comfy.model_management +import comfy.samplers +from comfy.cldm.cldm import ControlNet +from comfy.controlnet import ControlLora + +def patch_controlnet(model, control_net): + import comfy.controlnet + if isinstance(control_net, ControlLora): + del_keys = [] + for k in control_net.control_weights: + if k.startswith("label_emb.0.0."): + del_keys.append(k) + + for k in del_keys: + control_net.control_weights.pop(k) + + super_pre_run = ControlLora.pre_run + super_copy = ControlLora.copy + + super_forward = ControlNet.forward + + def KolorsControlNet_forward(self, x, hint, timesteps, context, **kwargs): + with torch.cuda.amp.autocast(enabled=True): + context = model.model.diffusion_model.encoder_hid_proj(context) + return super_forward(self, x, hint, timesteps, context, **kwargs) + + def KolorsControlLora_pre_run(self, *args, **kwargs): + result = super_pre_run(self, *args, **kwargs) + + if hasattr(self, "control_model"): + self.control_model.forward = MethodType( + KolorsControlNet_forward, self.control_model) + return result + + control_net.pre_run = MethodType( + KolorsControlLora_pre_run, control_net) + + def KolorsControlLora_copy(self, *args, **kwargs): + c = super_copy(self, *args, **kwargs) + c.pre_run = MethodType( + KolorsControlLora_pre_run, c) + return c + + control_net.copy = MethodType(KolorsControlLora_copy, control_net) + + elif isinstance(control_net, comfy.controlnet.ControlNet): + model_label_emb = model.model.diffusion_model.label_emb + control_net.control_model.label_emb = model_label_emb + control_net.control_model_wrapped.model.label_emb = model_label_emb + super_forward = ControlNet.forward + + def KolorsControlNet_forward(self, x, hint, timesteps, context, **kwargs): + with torch.cuda.amp.autocast(enabled=True): + context = model.model.diffusion_model.encoder_hid_proj(context) + return super_forward(self, x, hint, timesteps, context, **kwargs) + + control_net.control_model.forward = MethodType( + KolorsControlNet_forward, control_net.control_model) + + else: + raise NotImplementedError(f"Type {control_net} not supported for KolorsControlNetPatch") + + return control_net diff --git a/ComfyUI-Easy-Use/py/kolors/text_encode.py b/ComfyUI-Easy-Use/py/kolors/text_encode.py new file mode 100644 index 0000000000000000000000000000000000000000..b52e1bb250a73cc8b42b0fcb10b1246944699993 --- /dev/null +++ b/ComfyUI-Easy-Use/py/kolors/text_encode.py @@ -0,0 +1,105 @@ +import re +import random +import gc +import comfy.model_management as mm +from nodes import ConditioningConcat, ConditioningZeroOut, ConditioningSetTimestepRange, ConditioningCombine + +def chatglm3_text_encode(chatglm3_model, prompt, clean_gpu=False): + device = mm.get_torch_device() + offload_device = mm.unet_offload_device() + if clean_gpu: + mm.unload_all_models() + mm.soft_empty_cache() + # Function to randomly select an option from the brackets + + def choose_random_option(match): + options = match.group(1).split('|') + return random.choice(options) + + prompt = re.sub(r'\{([^{}]*)\}', choose_random_option, prompt) + + if "|" in prompt: + prompt = prompt.split("|") + + if prompt is not None and isinstance(prompt, str): + batch_size = 1 + elif prompt is not None and isinstance(prompt, list): + batch_size = len(prompt) + + # Define tokenizers and text encoders + tokenizer = chatglm3_model['tokenizer'] + text_encoder = chatglm3_model['text_encoder'] + text_encoder.to(device) + text_inputs = tokenizer( + prompt, + padding="max_length", + max_length=256, + truncation=True, + return_tensors="pt", + ).to(device) + + output = text_encoder( + input_ids=text_inputs['input_ids'], + attention_mask=text_inputs['attention_mask'], + position_ids=text_inputs['position_ids'], + output_hidden_states=True) + + # [batch_size, 77, 4096] + prompt_embeds = output.hidden_states[-2].permute(1, 0, 2).clone() + text_proj = output.hidden_states[-1][-1, :, :].clone() # [batch_size, 4096] + bs_embed, seq_len, _ = prompt_embeds.shape + prompt_embeds = prompt_embeds.repeat(1, 1, 1) + prompt_embeds = prompt_embeds.view(bs_embed, seq_len, -1) + + bs_embed = text_proj.shape[0] + text_proj = text_proj.repeat(1, 1).view(bs_embed, -1) + text_encoder.to(offload_device) + if clean_gpu: + mm.soft_empty_cache() + gc.collect() + return [[prompt_embeds, {"pooled_output": text_proj},]] + +def chatglm3_adv_text_encode(chatglm3_model, text, clean_gpu=False): + time_start = 0 + time_end = 1 + match = re.search(r'TIMESTEP.*$', text) + if match: + timestep = match.group() + timestep = timestep.split(' ') + timestep = timestep[0] + text = text.replace(timestep, '') + value = timestep.split(':') + if len(value) >= 3: + time_start = float(value[1]) + time_end = float(value[2]) + elif len(value) == 2: + time_start = float(value[1]) + time_end = 1 + elif len(value) == 1: + time_start = 0.1 + time_end = 1 + + + pass3 = [x.strip() for x in text.split("BREAK")] + pass3 = [x for x in pass3 if x != ''] + + if len(pass3) == 0: + pass3 = [''] + + conditioning = None + + for text in pass3: + cond = chatglm3_text_encode(chatglm3_model, text, clean_gpu) + if conditioning is not None: + conditioning = ConditioningConcat().concat(conditioning, cond)[0] + else: + conditioning = cond + + # setTimeStepRange + if time_start > 0 or time_end < 1: + conditioning_2, = ConditioningSetTimestepRange().set_range(conditioning, 0, time_start) + conditioning_1, = ConditioningZeroOut().zero_out(conditioning) + conditioning_1, = ConditioningSetTimestepRange().set_range(conditioning_1, time_start, time_end) + conditioning, = ConditioningCombine().combine(conditioning_1, conditioning_2) + + return conditioning \ No newline at end of file diff --git a/ComfyUI-Easy-Use/py/layer_diffuse/__init__.py b/ComfyUI-Easy-Use/py/layer_diffuse/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..b038da199a689b73e35e6fa34bad592d4c3b858c --- /dev/null +++ b/ComfyUI-Easy-Use/py/layer_diffuse/__init__.py @@ -0,0 +1,213 @@ +#credit to huchenlei for this module +#from https://github.com/huchenlei/ComfyUI-layerdiffuse +import torch +import comfy.model_management +import comfy.lora +import copy +from typing import Optional +from enum import Enum +from comfy.utils import load_torch_file +from comfy.conds import CONDRegular +from comfy_extras.nodes_compositing import JoinImageWithAlpha +from .model import ModelPatcher, TransparentVAEDecoder, calculate_weight_adjust_channel +from .attension_sharing import AttentionSharingPatcher +from ..config import LAYER_DIFFUSION, LAYER_DIFFUSION_DIR, LAYER_DIFFUSION_VAE +from ..libs.utils import to_lora_patch_dict, get_local_filepath, get_sd_version + +load_layer_model_state_dict = load_torch_file +class LayerMethod(Enum): + FG_ONLY_ATTN = "Attention Injection" + FG_ONLY_CONV = "Conv Injection" + FG_TO_BLEND = "Foreground" + FG_BLEND_TO_BG = "Foreground to Background" + BG_TO_BLEND = "Background" + BG_BLEND_TO_FG = "Background to Foreground" + EVERYTHING = "Everything" + +class LayerDiffuse: + + def __init__(self) -> None: + self.vae_transparent_decoder = None + self.frames = 1 + + def get_layer_diffusion_method(self, method, has_blend_latent): + method = LayerMethod(method) + if method == LayerMethod.BG_TO_BLEND and has_blend_latent: + method = LayerMethod.BG_BLEND_TO_FG + elif method == LayerMethod.FG_TO_BLEND and has_blend_latent: + method = LayerMethod.FG_BLEND_TO_BG + return method + + def apply_layer_c_concat(self, cond, uncond, c_concat): + def write_c_concat(cond): + new_cond = [] + for t in cond: + n = [t[0], t[1].copy()] + if "model_conds" not in n[1]: + n[1]["model_conds"] = {} + n[1]["model_conds"]["c_concat"] = CONDRegular(c_concat) + new_cond.append(n) + return new_cond + + return (write_c_concat(cond), write_c_concat(uncond)) + + def apply_layer_diffusion(self, model: ModelPatcher, method, weight, samples, blend_samples, positive, negative, image=None, additional_cond=(None, None, None)): + control_img: Optional[torch.TensorType] = None + sd_version = get_sd_version(model) + model_url = LAYER_DIFFUSION[method.value][sd_version]["model_url"] + + if image is not None: + image = image.movedim(-1, 1) + + try: + if hasattr(comfy.lora, "calculate_weight"): + comfy.lora.calculate_weight = calculate_weight_adjust_channel(comfy.lora.calculate_weight) + else: + ModelPatcher.calculate_weight = calculate_weight_adjust_channel(ModelPatcher.calculate_weight) + except: + pass + + if method in [LayerMethod.FG_ONLY_CONV, LayerMethod.FG_ONLY_ATTN] and sd_version == 'sd1': + self.frames = 1 + elif method in [LayerMethod.BG_TO_BLEND, LayerMethod.FG_TO_BLEND, LayerMethod.BG_BLEND_TO_FG, LayerMethod.FG_BLEND_TO_BG] and sd_version == 'sd1': + self.frames = 2 + batch_size, _, height, width = samples['samples'].shape + if batch_size % 2 != 0: + raise Exception(f"The batch size should be a multiple of 2. 批次大小需为2的倍数") + control_img = image + elif method == LayerMethod.EVERYTHING and sd_version == 'sd1': + batch_size, _, height, width = samples['samples'].shape + self.frames = 3 + if batch_size % 3 != 0: + raise Exception(f"The batch size should be a multiple of 3. 批次大小需为3的倍数") + if model_url is None: + raise Exception(f"{method.value} is not supported for {sd_version} model") + + model_path = get_local_filepath(model_url, LAYER_DIFFUSION_DIR) + layer_lora_state_dict = load_layer_model_state_dict(model_path) + work_model = model.clone() + if sd_version == 'sd1': + patcher = AttentionSharingPatcher( + work_model, self.frames, use_control=control_img is not None + ) + patcher.load_state_dict(layer_lora_state_dict, strict=True) + if control_img is not None: + patcher.set_control(control_img) + else: + layer_lora_patch_dict = to_lora_patch_dict(layer_lora_state_dict) + work_model.add_patches(layer_lora_patch_dict, weight) + + # cond_contact + if method in [LayerMethod.FG_ONLY_ATTN, LayerMethod.FG_ONLY_CONV]: + samp_model = work_model + elif sd_version == 'sdxl': + if method in [LayerMethod.BG_TO_BLEND, LayerMethod.FG_TO_BLEND]: + c_concat = model.model.latent_format.process_in(samples["samples"]) + else: + c_concat = model.model.latent_format.process_in(torch.cat([samples["samples"], blend_samples["samples"]], dim=1)) + samp_model, positive, negative = (work_model,) + self.apply_layer_c_concat(positive, negative, c_concat) + elif sd_version == 'sd1': + if method in [LayerMethod.BG_TO_BLEND, LayerMethod.BG_BLEND_TO_FG]: + additional_cond = (additional_cond[0], None) + elif method in [LayerMethod.FG_TO_BLEND, LayerMethod.FG_BLEND_TO_BG]: + additional_cond = (additional_cond[1], None) + + work_model.model_options.setdefault("transformer_options", {}) + work_model.model_options["transformer_options"]["cond_overwrite"] = [ + cond[0][0] if cond is not None else None + for cond in additional_cond + ] + samp_model = work_model + + return samp_model, positive, negative + + def join_image_with_alpha(self, image, alpha): + out = image.movedim(-1, 1) + if out.shape[1] == 3: # RGB + out = torch.cat([out, torch.ones_like(out[:, :1, :, :])], dim=1) + for i in range(out.shape[0]): + out[i, 3, :, :] = alpha + return out.movedim(1, -1) + + def image_to_alpha(self, image, latent): + pixel = image.movedim(-1, 1) # [B, H, W, C] => [B, C, H, W] + decoded = [] + sub_batch_size = 16 + for start_idx in range(0, latent.shape[0], sub_batch_size): + decoded.append( + self.vae_transparent_decoder.decode_pixel( + pixel[start_idx: start_idx + sub_batch_size], + latent[start_idx: start_idx + sub_batch_size], + ) + ) + pixel_with_alpha = torch.cat(decoded, dim=0) + # [B, C, H, W] => [B, H, W, C] + pixel_with_alpha = pixel_with_alpha.movedim(1, -1) + image = pixel_with_alpha[..., 1:] + alpha = pixel_with_alpha[..., 0] + + alpha = 1.0 - alpha + new_images, = JoinImageWithAlpha().join_image_with_alpha(image, alpha) + return new_images, alpha + + def make_3d_mask(self, mask): + if len(mask.shape) == 4: + return mask.squeeze(0) + + elif len(mask.shape) == 2: + return mask.unsqueeze(0) + + return mask + + def masks_to_list(self, masks): + if masks is None: + empty_mask = torch.zeros((64, 64), dtype=torch.float32, device="cpu") + return ([empty_mask],) + + res = [] + + for mask in masks: + res.append(mask) + + return [self.make_3d_mask(x) for x in res] + + def layer_diffusion_decode(self, layer_diffusion_method, latent, blend_samples, samp_images, model): + alpha = [] + if layer_diffusion_method is not None: + sd_version = get_sd_version(model) + if sd_version not in ['sdxl', 'sd1']: + raise Exception(f"Only SDXL and SD1.5 model supported for Layer Diffusion") + method = self.get_layer_diffusion_method(layer_diffusion_method, blend_samples is not None) + sd15_allow = True if sd_version == 'sd1' and method in [LayerMethod.FG_ONLY_ATTN, LayerMethod.EVERYTHING, LayerMethod.BG_TO_BLEND, LayerMethod.BG_BLEND_TO_FG] else False + sdxl_allow = True if sd_version == 'sdxl' and method in [LayerMethod.FG_ONLY_CONV, LayerMethod.FG_ONLY_ATTN, LayerMethod.BG_BLEND_TO_FG] else False + if sdxl_allow or sd15_allow: + if self.vae_transparent_decoder is None: + model_url = LAYER_DIFFUSION_VAE['decode'][sd_version]["model_url"] + if model_url is None: + raise Exception(f"{method.value} is not supported for {sd_version} model") + decoder_file = get_local_filepath(model_url, LAYER_DIFFUSION_DIR) + self.vae_transparent_decoder = TransparentVAEDecoder( + load_torch_file(decoder_file), + device=comfy.model_management.get_torch_device(), + dtype=(torch.float16 if comfy.model_management.should_use_fp16() else torch.float32), + ) + if method in [LayerMethod.EVERYTHING, LayerMethod.BG_BLEND_TO_FG, LayerMethod.BG_TO_BLEND]: + new_images = [] + sliced_samples = copy.copy({"samples": latent}) + for index in range(len(samp_images)): + if index % self.frames == 0: + img = samp_images[index::self.frames] + alpha_images, _alpha = self.image_to_alpha(img, sliced_samples["samples"][index::self.frames]) + alpha.append(self.make_3d_mask(_alpha[0])) + new_images.append(alpha_images[0]) + else: + new_images.append(samp_images[index]) + else: + new_images, alpha = self.image_to_alpha(samp_images, latent) + else: + new_images = samp_images + else: + new_images = samp_images + + + return (new_images, samp_images, alpha) \ No newline at end of file diff --git a/ComfyUI-Easy-Use/py/layer_diffuse/attension_sharing.py b/ComfyUI-Easy-Use/py/layer_diffuse/attension_sharing.py new file mode 100644 index 0000000000000000000000000000000000000000..89904eecfc2354823428eac5782747997b260bc7 --- /dev/null +++ b/ComfyUI-Easy-Use/py/layer_diffuse/attension_sharing.py @@ -0,0 +1,359 @@ +# Currently only sd15 + +import functools +import torch +import einops + +from comfy import model_management, utils +from comfy.ldm.modules.attention import optimized_attention + +module_mapping_sd15 = { + 0: "input_blocks.1.1.transformer_blocks.0.attn1", + 1: "input_blocks.1.1.transformer_blocks.0.attn2", + 2: "input_blocks.2.1.transformer_blocks.0.attn1", + 3: "input_blocks.2.1.transformer_blocks.0.attn2", + 4: "input_blocks.4.1.transformer_blocks.0.attn1", + 5: "input_blocks.4.1.transformer_blocks.0.attn2", + 6: "input_blocks.5.1.transformer_blocks.0.attn1", + 7: "input_blocks.5.1.transformer_blocks.0.attn2", + 8: "input_blocks.7.1.transformer_blocks.0.attn1", + 9: "input_blocks.7.1.transformer_blocks.0.attn2", + 10: "input_blocks.8.1.transformer_blocks.0.attn1", + 11: "input_blocks.8.1.transformer_blocks.0.attn2", + 12: "output_blocks.3.1.transformer_blocks.0.attn1", + 13: "output_blocks.3.1.transformer_blocks.0.attn2", + 14: "output_blocks.4.1.transformer_blocks.0.attn1", + 15: "output_blocks.4.1.transformer_blocks.0.attn2", + 16: "output_blocks.5.1.transformer_blocks.0.attn1", + 17: "output_blocks.5.1.transformer_blocks.0.attn2", + 18: "output_blocks.6.1.transformer_blocks.0.attn1", + 19: "output_blocks.6.1.transformer_blocks.0.attn2", + 20: "output_blocks.7.1.transformer_blocks.0.attn1", + 21: "output_blocks.7.1.transformer_blocks.0.attn2", + 22: "output_blocks.8.1.transformer_blocks.0.attn1", + 23: "output_blocks.8.1.transformer_blocks.0.attn2", + 24: "output_blocks.9.1.transformer_blocks.0.attn1", + 25: "output_blocks.9.1.transformer_blocks.0.attn2", + 26: "output_blocks.10.1.transformer_blocks.0.attn1", + 27: "output_blocks.10.1.transformer_blocks.0.attn2", + 28: "output_blocks.11.1.transformer_blocks.0.attn1", + 29: "output_blocks.11.1.transformer_blocks.0.attn2", + 30: "middle_block.1.transformer_blocks.0.attn1", + 31: "middle_block.1.transformer_blocks.0.attn2", +} + + +def compute_cond_mark(cond_or_uncond, sigmas): + cond_or_uncond_size = int(sigmas.shape[0]) + + cond_mark = [] + for cx in cond_or_uncond: + cond_mark += [cx] * cond_or_uncond_size + + cond_mark = torch.Tensor(cond_mark).to(sigmas) + return cond_mark + + +class LoRALinearLayer(torch.nn.Module): + def __init__(self, in_features: int, out_features: int, rank: int = 256, org=None): + super().__init__() + self.down = torch.nn.Linear(in_features, rank, bias=False) + self.up = torch.nn.Linear(rank, out_features, bias=False) + self.org = [org] + + def forward(self, h): + org_weight = self.org[0].weight.to(h) + org_bias = self.org[0].bias.to(h) if self.org[0].bias is not None else None + down_weight = self.down.weight + up_weight = self.up.weight + final_weight = org_weight + torch.mm(up_weight, down_weight) + return torch.nn.functional.linear(h, final_weight, org_bias) + + +class AttentionSharingUnit(torch.nn.Module): + # `transformer_options` passed to the most recent BasicTransformerBlock.forward + # call. + transformer_options: dict = {} + + def __init__(self, module, frames=2, use_control=True, rank=256): + super().__init__() + + self.heads = module.heads + self.frames = frames + self.original_module = [module] + q_in_channels, q_out_channels = ( + module.to_q.in_features, + module.to_q.out_features, + ) + k_in_channels, k_out_channels = ( + module.to_k.in_features, + module.to_k.out_features, + ) + v_in_channels, v_out_channels = ( + module.to_v.in_features, + module.to_v.out_features, + ) + o_in_channels, o_out_channels = ( + module.to_out[0].in_features, + module.to_out[0].out_features, + ) + + hidden_size = k_out_channels + + self.to_q_lora = [ + LoRALinearLayer(q_in_channels, q_out_channels, rank, module.to_q) + for _ in range(self.frames) + ] + self.to_k_lora = [ + LoRALinearLayer(k_in_channels, k_out_channels, rank, module.to_k) + for _ in range(self.frames) + ] + self.to_v_lora = [ + LoRALinearLayer(v_in_channels, v_out_channels, rank, module.to_v) + for _ in range(self.frames) + ] + self.to_out_lora = [ + LoRALinearLayer(o_in_channels, o_out_channels, rank, module.to_out[0]) + for _ in range(self.frames) + ] + + self.to_q_lora = torch.nn.ModuleList(self.to_q_lora) + self.to_k_lora = torch.nn.ModuleList(self.to_k_lora) + self.to_v_lora = torch.nn.ModuleList(self.to_v_lora) + self.to_out_lora = torch.nn.ModuleList(self.to_out_lora) + + self.temporal_i = torch.nn.Linear( + in_features=hidden_size, out_features=hidden_size + ) + self.temporal_n = torch.nn.LayerNorm( + hidden_size, elementwise_affine=True, eps=1e-6 + ) + self.temporal_q = torch.nn.Linear( + in_features=hidden_size, out_features=hidden_size + ) + self.temporal_k = torch.nn.Linear( + in_features=hidden_size, out_features=hidden_size + ) + self.temporal_v = torch.nn.Linear( + in_features=hidden_size, out_features=hidden_size + ) + self.temporal_o = torch.nn.Linear( + in_features=hidden_size, out_features=hidden_size + ) + + self.control_convs = None + + if use_control: + self.control_convs = [ + torch.nn.Sequential( + torch.nn.Conv2d(256, 256, kernel_size=3, padding=1, stride=1), + torch.nn.SiLU(), + torch.nn.Conv2d(256, hidden_size, kernel_size=1), + ) + for _ in range(self.frames) + ] + self.control_convs = torch.nn.ModuleList(self.control_convs) + + self.control_signals = None + + def forward(self, h, context=None, value=None): + transformer_options = self.transformer_options + + modified_hidden_states = einops.rearrange( + h, "(b f) d c -> f b d c", f=self.frames + ) + + if self.control_convs is not None: + context_dim = int(modified_hidden_states.shape[2]) + control_outs = [] + for f in range(self.frames): + control_signal = self.control_signals[context_dim].to( + modified_hidden_states + ) + control = self.control_convs[f](control_signal) + control = einops.rearrange(control, "b c h w -> b (h w) c") + control_outs.append(control) + control_outs = torch.stack(control_outs, dim=0) + modified_hidden_states = modified_hidden_states + control_outs.to( + modified_hidden_states + ) + + if context is None: + framed_context = modified_hidden_states + else: + framed_context = einops.rearrange( + context, "(b f) d c -> f b d c", f=self.frames + ) + + framed_cond_mark = einops.rearrange( + compute_cond_mark( + transformer_options["cond_or_uncond"], + transformer_options["sigmas"], + ), + "(b f) -> f b", + f=self.frames, + ).to(modified_hidden_states) + + attn_outs = [] + for f in range(self.frames): + fcf = framed_context[f] + + if context is not None: + cond_overwrite = transformer_options.get("cond_overwrite", []) + if len(cond_overwrite) > f: + cond_overwrite = cond_overwrite[f] + else: + cond_overwrite = None + if cond_overwrite is not None: + cond_mark = framed_cond_mark[f][:, None, None] + fcf = cond_overwrite.to(fcf) * (1.0 - cond_mark) + fcf * cond_mark + + q = self.to_q_lora[f](modified_hidden_states[f]) + k = self.to_k_lora[f](fcf) + v = self.to_v_lora[f](fcf) + o = optimized_attention(q, k, v, self.heads) + o = self.to_out_lora[f](o) + o = self.original_module[0].to_out[1](o) + attn_outs.append(o) + + attn_outs = torch.stack(attn_outs, dim=0) + modified_hidden_states = modified_hidden_states + attn_outs.to( + modified_hidden_states + ) + modified_hidden_states = einops.rearrange( + modified_hidden_states, "f b d c -> (b f) d c", f=self.frames + ) + + x = modified_hidden_states + x = self.temporal_n(x) + x = self.temporal_i(x) + d = x.shape[1] + + x = einops.rearrange(x, "(b f) d c -> (b d) f c", f=self.frames) + + q = self.temporal_q(x) + k = self.temporal_k(x) + v = self.temporal_v(x) + + x = optimized_attention(q, k, v, self.heads) + x = self.temporal_o(x) + x = einops.rearrange(x, "(b d) f c -> (b f) d c", d=d) + + modified_hidden_states = modified_hidden_states + x + + return modified_hidden_states - h + + @classmethod + def hijack_transformer_block(cls): + def register_get_transformer_options(func): + @functools.wraps(func) + def forward(self, x, context=None, transformer_options={}): + cls.transformer_options = transformer_options + return func(self, x, context, transformer_options) + + return forward + + from comfy.ldm.modules.attention import BasicTransformerBlock + + BasicTransformerBlock.forward = register_get_transformer_options( + BasicTransformerBlock.forward + ) + + +AttentionSharingUnit.hijack_transformer_block() + + +class AdditionalAttentionCondsEncoder(torch.nn.Module): + def __init__(self): + super().__init__() + + self.blocks_0 = torch.nn.Sequential( + torch.nn.Conv2d(3, 32, kernel_size=3, padding=1, stride=1), + torch.nn.SiLU(), + torch.nn.Conv2d(32, 32, kernel_size=3, padding=1, stride=1), + torch.nn.SiLU(), + torch.nn.Conv2d(32, 64, kernel_size=3, padding=1, stride=2), + torch.nn.SiLU(), + torch.nn.Conv2d(64, 64, kernel_size=3, padding=1, stride=1), + torch.nn.SiLU(), + torch.nn.Conv2d(64, 128, kernel_size=3, padding=1, stride=2), + torch.nn.SiLU(), + torch.nn.Conv2d(128, 128, kernel_size=3, padding=1, stride=1), + torch.nn.SiLU(), + torch.nn.Conv2d(128, 256, kernel_size=3, padding=1, stride=2), + torch.nn.SiLU(), + torch.nn.Conv2d(256, 256, kernel_size=3, padding=1, stride=1), + torch.nn.SiLU(), + ) # 64*64*256 + + self.blocks_1 = torch.nn.Sequential( + torch.nn.Conv2d(256, 256, kernel_size=3, padding=1, stride=2), + torch.nn.SiLU(), + torch.nn.Conv2d(256, 256, kernel_size=3, padding=1, stride=1), + torch.nn.SiLU(), + ) # 32*32*256 + + self.blocks_2 = torch.nn.Sequential( + torch.nn.Conv2d(256, 256, kernel_size=3, padding=1, stride=2), + torch.nn.SiLU(), + torch.nn.Conv2d(256, 256, kernel_size=3, padding=1, stride=1), + torch.nn.SiLU(), + ) # 16*16*256 + + self.blocks_3 = torch.nn.Sequential( + torch.nn.Conv2d(256, 256, kernel_size=3, padding=1, stride=2), + torch.nn.SiLU(), + torch.nn.Conv2d(256, 256, kernel_size=3, padding=1, stride=1), + torch.nn.SiLU(), + ) # 8*8*256 + + self.blks = [self.blocks_0, self.blocks_1, self.blocks_2, self.blocks_3] + + def __call__(self, h): + results = {} + for b in self.blks: + h = b(h) + results[int(h.shape[2]) * int(h.shape[3])] = h + return results + + +class HookerLayers(torch.nn.Module): + def __init__(self, layer_list): + super().__init__() + self.layers = torch.nn.ModuleList(layer_list) + + +class AttentionSharingPatcher(torch.nn.Module): + def __init__(self, unet, frames=2, use_control=True, rank=256): + super().__init__() + model_management.unload_model_clones(unet) + + units = [] + for i in range(32): + real_key = module_mapping_sd15[i] + attn_module = utils.get_attr(unet.model.diffusion_model, real_key) + u = AttentionSharingUnit( + attn_module, frames=frames, use_control=use_control, rank=rank + ) + units.append(u) + unet.add_object_patch("diffusion_model." + real_key, u) + + self.hookers = HookerLayers(units) + + if use_control: + self.kwargs_encoder = AdditionalAttentionCondsEncoder() + else: + self.kwargs_encoder = None + + self.dtype = torch.float32 + if model_management.should_use_fp16(model_management.get_torch_device()): + self.dtype = torch.float16 + self.hookers.half() + return + + def set_control(self, img): + img = img.cpu().float() * 2.0 - 1.0 + signals = self.kwargs_encoder(img) + for m in self.hookers.layers: + m.control_signals = signals + return diff --git a/ComfyUI-Easy-Use/py/layer_diffuse/model.py b/ComfyUI-Easy-Use/py/layer_diffuse/model.py new file mode 100644 index 0000000000000000000000000000000000000000..d1ba1bdfc71a5c35a45c8d70d001bd39c562796e --- /dev/null +++ b/ComfyUI-Easy-Use/py/layer_diffuse/model.py @@ -0,0 +1,390 @@ +import torch.nn as nn +import torch +import cv2 +import numpy as np +import comfy.model_management + +from comfy.model_patcher import ModelPatcher +from tqdm import tqdm +from typing import Optional, Tuple +from ..libs.utils import install_package +from packaging import version + +try: + install_package("diffusers", "0.27.2", True, "0.25.0") + + from diffusers.configuration_utils import ConfigMixin, register_to_config + from diffusers.models.modeling_utils import ModelMixin + from diffusers import __version__ + if __version__: + if version.parse(__version__) < version.parse("0.26.0"): + from diffusers.models.unet_2d_blocks import UNetMidBlock2D, get_down_block, get_up_block + else: + from diffusers.models.unets.unet_2d_blocks import UNetMidBlock2D, get_down_block, get_up_block + + import functools + + def zero_module(module): + """ + Zero out the parameters of a module and return it. + """ + for p in module.parameters(): + p.detach().zero_() + return module + + + class LatentTransparencyOffsetEncoder(torch.nn.Module): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.blocks = torch.nn.Sequential( + torch.nn.Conv2d(4, 32, kernel_size=3, padding=1, stride=1), + nn.SiLU(), + torch.nn.Conv2d(32, 32, kernel_size=3, padding=1, stride=1), + nn.SiLU(), + torch.nn.Conv2d(32, 64, kernel_size=3, padding=1, stride=2), + nn.SiLU(), + torch.nn.Conv2d(64, 64, kernel_size=3, padding=1, stride=1), + nn.SiLU(), + torch.nn.Conv2d(64, 128, kernel_size=3, padding=1, stride=2), + nn.SiLU(), + torch.nn.Conv2d(128, 128, kernel_size=3, padding=1, stride=1), + nn.SiLU(), + torch.nn.Conv2d(128, 256, kernel_size=3, padding=1, stride=2), + nn.SiLU(), + torch.nn.Conv2d(256, 256, kernel_size=3, padding=1, stride=1), + nn.SiLU(), + zero_module(torch.nn.Conv2d(256, 4, kernel_size=3, padding=1, stride=1)), + ) + + def __call__(self, x): + return self.blocks(x) + + + # 1024 * 1024 * 3 -> 16 * 16 * 512 -> 1024 * 1024 * 3 + class UNet1024(ModelMixin, ConfigMixin): + @register_to_config + def __init__( + self, + in_channels: int = 3, + out_channels: int = 3, + down_block_types: Tuple[str] = ( + "DownBlock2D", + "DownBlock2D", + "DownBlock2D", + "DownBlock2D", + "AttnDownBlock2D", + "AttnDownBlock2D", + "AttnDownBlock2D", + ), + up_block_types: Tuple[str] = ( + "AttnUpBlock2D", + "AttnUpBlock2D", + "AttnUpBlock2D", + "UpBlock2D", + "UpBlock2D", + "UpBlock2D", + "UpBlock2D", + ), + block_out_channels: Tuple[int] = (32, 32, 64, 128, 256, 512, 512), + layers_per_block: int = 2, + mid_block_scale_factor: float = 1, + downsample_padding: int = 1, + downsample_type: str = "conv", + upsample_type: str = "conv", + dropout: float = 0.0, + act_fn: str = "silu", + attention_head_dim: Optional[int] = 8, + norm_num_groups: int = 4, + norm_eps: float = 1e-5, + ): + super().__init__() + + # input + self.conv_in = nn.Conv2d( + in_channels, block_out_channels[0], kernel_size=3, padding=(1, 1) + ) + self.latent_conv_in = zero_module( + nn.Conv2d(4, block_out_channels[2], kernel_size=1) + ) + + self.down_blocks = nn.ModuleList([]) + self.mid_block = None + self.up_blocks = nn.ModuleList([]) + + # down + output_channel = block_out_channels[0] + for i, down_block_type in enumerate(down_block_types): + input_channel = output_channel + output_channel = block_out_channels[i] + is_final_block = i == len(block_out_channels) - 1 + + down_block = get_down_block( + down_block_type, + num_layers=layers_per_block, + in_channels=input_channel, + out_channels=output_channel, + temb_channels=None, + add_downsample=not is_final_block, + resnet_eps=norm_eps, + resnet_act_fn=act_fn, + resnet_groups=norm_num_groups, + attention_head_dim=( + attention_head_dim + if attention_head_dim is not None + else output_channel + ), + downsample_padding=downsample_padding, + resnet_time_scale_shift="default", + downsample_type=downsample_type, + dropout=dropout, + ) + self.down_blocks.append(down_block) + + # mid + self.mid_block = UNetMidBlock2D( + in_channels=block_out_channels[-1], + temb_channels=None, + dropout=dropout, + resnet_eps=norm_eps, + resnet_act_fn=act_fn, + output_scale_factor=mid_block_scale_factor, + resnet_time_scale_shift="default", + attention_head_dim=( + attention_head_dim + if attention_head_dim is not None + else block_out_channels[-1] + ), + resnet_groups=norm_num_groups, + attn_groups=None, + add_attention=True, + ) + + # up + reversed_block_out_channels = list(reversed(block_out_channels)) + output_channel = reversed_block_out_channels[0] + for i, up_block_type in enumerate(up_block_types): + prev_output_channel = output_channel + output_channel = reversed_block_out_channels[i] + input_channel = reversed_block_out_channels[ + min(i + 1, len(block_out_channels) - 1) + ] + + is_final_block = i == len(block_out_channels) - 1 + + up_block = get_up_block( + up_block_type, + num_layers=layers_per_block + 1, + in_channels=input_channel, + out_channels=output_channel, + prev_output_channel=prev_output_channel, + temb_channels=None, + add_upsample=not is_final_block, + resnet_eps=norm_eps, + resnet_act_fn=act_fn, + resnet_groups=norm_num_groups, + attention_head_dim=( + attention_head_dim + if attention_head_dim is not None + else output_channel + ), + resnet_time_scale_shift="default", + upsample_type=upsample_type, + dropout=dropout, + ) + self.up_blocks.append(up_block) + prev_output_channel = output_channel + + # out + self.conv_norm_out = nn.GroupNorm( + num_channels=block_out_channels[0], num_groups=norm_num_groups, eps=norm_eps + ) + self.conv_act = nn.SiLU() + self.conv_out = nn.Conv2d( + block_out_channels[0], out_channels, kernel_size=3, padding=1 + ) + + def forward(self, x, latent): + sample_latent = self.latent_conv_in(latent) + sample = self.conv_in(x) + emb = None + + down_block_res_samples = (sample,) + for i, downsample_block in enumerate(self.down_blocks): + if i == 3: + sample = sample + sample_latent + + sample, res_samples = downsample_block(hidden_states=sample, temb=emb) + down_block_res_samples += res_samples + + sample = self.mid_block(sample, emb) + + for upsample_block in self.up_blocks: + res_samples = down_block_res_samples[-len(upsample_block.resnets):] + down_block_res_samples = down_block_res_samples[ + : -len(upsample_block.resnets) + ] + sample = upsample_block(sample, res_samples, emb) + + sample = self.conv_norm_out(sample) + sample = self.conv_act(sample) + sample = self.conv_out(sample) + return sample + + + def checkerboard(shape): + return np.indices(shape).sum(axis=0) % 2 + + + def fill_checkerboard_bg(y: torch.Tensor) -> torch.Tensor: + alpha = y[..., :1] + fg = y[..., 1:] + B, H, W, C = fg.shape + cb = checkerboard(shape=(H // 64, W // 64)) + cb = cv2.resize(cb, (W, H), interpolation=cv2.INTER_NEAREST) + cb = (0.5 + (cb - 0.5) * 0.1)[None, ..., None] + cb = torch.from_numpy(cb).to(fg) + vis = fg * alpha + cb * (1 - alpha) + return vis + + + class TransparentVAEDecoder: + def __init__(self, sd, device, dtype): + self.load_device = device + self.dtype = dtype + + model = UNet1024(in_channels=3, out_channels=4) + model.load_state_dict(sd, strict=True) + model.to(self.load_device, dtype=self.dtype) + model.eval() + self.model = model + + @torch.no_grad() + def estimate_single_pass(self, pixel, latent): + y = self.model(pixel, latent) + return y + + @torch.no_grad() + def estimate_augmented(self, pixel, latent): + args = [ + [False, 0], + [False, 1], + [False, 2], + [False, 3], + [True, 0], + [True, 1], + [True, 2], + [True, 3], + ] + + result = [] + + for flip, rok in tqdm(args): + feed_pixel = pixel.clone() + feed_latent = latent.clone() + + if flip: + feed_pixel = torch.flip(feed_pixel, dims=(3,)) + feed_latent = torch.flip(feed_latent, dims=(3,)) + + feed_pixel = torch.rot90(feed_pixel, k=rok, dims=(2, 3)) + feed_latent = torch.rot90(feed_latent, k=rok, dims=(2, 3)) + + eps = self.estimate_single_pass(feed_pixel, feed_latent).clip(0, 1) + eps = torch.rot90(eps, k=-rok, dims=(2, 3)) + + if flip: + eps = torch.flip(eps, dims=(3,)) + + result += [eps] + + result = torch.stack(result, dim=0) + median = torch.median(result, dim=0).values + return median + + @torch.no_grad() + def decode_pixel( + self, pixel: torch.TensorType, latent: torch.TensorType + ) -> torch.TensorType: + # pixel.shape = [B, C=3, H, W] + assert pixel.shape[1] == 3 + pixel_device = pixel.device + pixel_dtype = pixel.dtype + + pixel = pixel.to(device=self.load_device, dtype=self.dtype) + latent = latent.to(device=self.load_device, dtype=self.dtype) + # y.shape = [B, C=4, H, W] + y = self.estimate_augmented(pixel, latent) + y = y.clip(0, 1) + assert y.shape[1] == 4 + # Restore image to original device of input image. + return y.to(pixel_device, dtype=pixel_dtype) + + + def calculate_weight_adjust_channel(func): + """Patches ComfyUI's LoRA weight application to accept multi-channel inputs.""" + @functools.wraps(func) + def calculate_weight( + patches, weight: torch.Tensor, key: str, intermediate_type=torch.float32 + ) -> torch.Tensor: + weight = func(patches, weight, key, intermediate_type) + + for p in patches: + alpha = p[0] + v = p[1] + + # The recursion call should be handled in the main func call. + if isinstance(v, list): + continue + + if len(v) == 1: + patch_type = "diff" + elif len(v) == 2: + patch_type = v[0] + v = v[1] + + if patch_type == "diff": + w1 = v[0] + if all( + ( + alpha != 0.0, + w1.shape != weight.shape, + w1.ndim == weight.ndim == 4, + ) + ): + new_shape = [max(n, m) for n, m in zip(weight.shape, w1.shape)] + print( + f"Merged with {key} channel changed from {weight.shape} to {new_shape}" + ) + new_diff = alpha * comfy.model_management.cast_to_device( + w1, weight.device, weight.dtype + ) + new_weight = torch.zeros(size=new_shape).to(weight) + new_weight[ + : weight.shape[0], + : weight.shape[1], + : weight.shape[2], + : weight.shape[3], + ] = weight + new_weight[ + : new_diff.shape[0], + : new_diff.shape[1], + : new_diff.shape[2], + : new_diff.shape[3], + ] += new_diff + new_weight = new_weight.contiguous().clone() + weight = new_weight + return weight + + return calculate_weight + + +except ImportError: + ModelMixin = None + ConfigMixin = None + TransparentVAEDecoder = None + calculate_weight_adjust_channel = None + print("\33[33mModule 'diffusers' load failed. If you don't have it installed, do it:\033[0m") + print("\33[33mpip install diffusers\033[0m") + + + diff --git a/ComfyUI-Easy-Use/py/libs/add_resources.py b/ComfyUI-Easy-Use/py/libs/add_resources.py new file mode 100644 index 0000000000000000000000000000000000000000..8d04ca3a7ea7783958a7dac566c136d69dd901e0 --- /dev/null +++ b/ComfyUI-Easy-Use/py/libs/add_resources.py @@ -0,0 +1,113 @@ +import urllib.parse +from os import PathLike +from aiohttp import web +from aiohttp.web_urldispatcher import AbstractRoute, UrlDispatcher +from server import PromptServer +from pathlib import Path + +# 文件限制大小(MB) +max_size = 50 +def suffix_limiter(self: web.StaticResource, request: web.Request): + suffixes = {".jpg", ".jpeg", ".png", ".gif", ".webp", ".bmp", ".tiff", ".svg", ".ico", ".apng", ".tif", ".hdr", ".exr"} + rel_url = request.match_info["filename"] + try: + filename = Path(rel_url) + if filename.anchor: + raise web.HTTPForbidden() + filepath = self._directory.joinpath(filename).resolve() + if filepath.exists() and filepath.suffix.lower() not in suffixes: + raise web.HTTPForbidden(reason="File type is not allowed") + finally: + pass + +def filesize_limiter(self: web.StaticResource, request: web.Request): + rel_url = request.match_info["filename"] + try: + filename = Path(rel_url) + filepath = self._directory.joinpath(filename).resolve() + if filepath.exists() and filepath.stat().st_size > max_size * 1024 * 1024: + raise web.HTTPForbidden(reason="File size is too large") + finally: + pass +class LimitResource(web.StaticResource): + limiters = [] + + def push_limiter(self, limiter): + self.limiters.append(limiter) + + async def _handle(self, request: web.Request) -> web.StreamResponse: + try: + for limiter in self.limiters: + limiter(self, request) + except (ValueError, FileNotFoundError) as error: + raise web.HTTPNotFound() from error + + return await super()._handle(request) + + def __repr__(self) -> str: + name = "'" + self.name + "'" if self.name is not None else "" + return f' {self._directory!r}>' + +class LimitRouter(web.StaticDef): + def __repr__(self) -> str: + info = [] + for name, value in sorted(self.kwargs.items()): + info.append(f", {name}={value!r}") + return f' {self.path}{"".join(info)}>' + + def register(self, router: UrlDispatcher) -> list[AbstractRoute]: + # resource = router.add_static(self.prefix, self.path, **self.kwargs) + def add_static( + self: UrlDispatcher, + prefix: str, + path: PathLike, + *, + name=None, + expect_handler=None, + chunk_size: int = 256 * 1024, + show_index: bool = False, + follow_symlinks: bool = False, + append_version: bool = False, + ) -> web.AbstractResource: + assert prefix.startswith("/") + if prefix.endswith("/"): + prefix = prefix[:-1] + resource = LimitResource( + prefix, + path, + name=name, + expect_handler=expect_handler, + chunk_size=chunk_size, + show_index=show_index, + follow_symlinks=follow_symlinks, + append_version=append_version, + ) + resource.push_limiter(suffix_limiter) + resource.push_limiter(filesize_limiter) + self.register_resource(resource) + return resource + resource = add_static(router, self.prefix, self.path, **self.kwargs) + routes = resource.get_info().get("routes", {}) + return list(routes.values()) + +def path_to_url(path): + if not path: + return path + path = path.replace("\\", "/") + if not path.startswith("/"): + path = "/" + path + while path.startswith("//"): + path = path[1:] + path = path.replace("//", "/") + return path + +def add_static_resource(prefix, path,limit=False): + app = PromptServer.instance.app + prefix = path_to_url(prefix) + prefix = urllib.parse.quote(prefix) + prefix = path_to_url(prefix) + if limit: + route = LimitRouter(prefix, path, {"follow_symlinks": True}) + else: + route = web.static(prefix, path, follow_symlinks=True) + app.add_routes([route]) \ No newline at end of file diff --git a/ComfyUI-Easy-Use/py/libs/adv_encode.py b/ComfyUI-Easy-Use/py/libs/adv_encode.py new file mode 100644 index 0000000000000000000000000000000000000000..c947dbb4cdbf6b6684b275fdf6b2d889ee646bda --- /dev/null +++ b/ComfyUI-Easy-Use/py/libs/adv_encode.py @@ -0,0 +1,427 @@ +import torch +import numpy as np +import re +import itertools + +from comfy import model_management +from comfy.sdxl_clip import SDXLClipModel, SDXLRefinerClipModel, SDXLClipG +try: + from comfy.text_encoders.sd3_clip import SD3ClipModel, T5XXLModel +except ImportError: + from comfy.sd3_clip import SD3ClipModel, T5XXLModel + +from nodes import NODE_CLASS_MAPPINGS, ConditioningConcat, ConditioningZeroOut, ConditioningSetTimestepRange, ConditioningCombine + +def _grouper(n, iterable): + it = iter(iterable) + while True: + chunk = list(itertools.islice(it, n)) + if not chunk: + return + yield chunk + + +def _norm_mag(w, n): + d = w - 1 + return 1 + np.sign(d) * np.sqrt(np.abs(d) ** 2 / n) + # return np.sign(w) * np.sqrt(np.abs(w)**2 / n) + + +def divide_length(word_ids, weights): + sums = dict(zip(*np.unique(word_ids, return_counts=True))) + sums[0] = 1 + weights = [[_norm_mag(w, sums[id]) if id != 0 else 1.0 + for w, id in zip(x, y)] for x, y in zip(weights, word_ids)] + return weights + + +def shift_mean_weight(word_ids, weights): + delta = 1 - np.mean([w for x, y in zip(weights, word_ids) for w, id in zip(x, y) if id != 0]) + weights = [[w if id == 0 else w + delta + for w, id in zip(x, y)] for x, y in zip(weights, word_ids)] + return weights + + +def scale_to_norm(weights, word_ids, w_max): + top = np.max(weights) + w_max = min(top, w_max) + weights = [[w_max if id == 0 else (w / top) * w_max + for w, id in zip(x, y)] for x, y in zip(weights, word_ids)] + return weights + + +def from_zero(weights, base_emb): + weight_tensor = torch.tensor(weights, dtype=base_emb.dtype, device=base_emb.device) + weight_tensor = weight_tensor.reshape(1, -1, 1).expand(base_emb.shape) + return base_emb * weight_tensor + + +def mask_word_id(tokens, word_ids, target_id, mask_token): + new_tokens = [[mask_token if wid == target_id else t + for t, wid in zip(x, y)] for x, y in zip(tokens, word_ids)] + mask = np.array(word_ids) == target_id + return (new_tokens, mask) + + +def batched_clip_encode(tokens, length, encode_func, num_chunks): + embs = [] + for e in _grouper(32, tokens): + enc, pooled = encode_func(e) + enc = enc.reshape((len(e), length, -1)) + embs.append(enc) + embs = torch.cat(embs) + embs = embs.reshape((len(tokens) // num_chunks, length * num_chunks, -1)) + return embs + + +def from_masked(tokens, weights, word_ids, base_emb, length, encode_func, m_token=266): + pooled_base = base_emb[0, length - 1:length, :] + wids, inds = np.unique(np.array(word_ids).reshape(-1), return_index=True) + weight_dict = dict((id, w) + for id, w in zip(wids, np.array(weights).reshape(-1)[inds]) + if w != 1.0) + + if len(weight_dict) == 0: + return torch.zeros_like(base_emb), base_emb[0, length - 1:length, :] + + weight_tensor = torch.tensor(weights, dtype=base_emb.dtype, device=base_emb.device) + weight_tensor = weight_tensor.reshape(1, -1, 1).expand(base_emb.shape) + + # m_token = (clip.tokenizer.end_token, 1.0) if clip.tokenizer.pad_with_end else (0,1.0) + # TODO: find most suitable masking token here + m_token = (m_token, 1.0) + + ws = [] + masked_tokens = [] + masks = [] + + # create prompts + for id, w in weight_dict.items(): + masked, m = mask_word_id(tokens, word_ids, id, m_token) + masked_tokens.extend(masked) + + m = torch.tensor(m, dtype=base_emb.dtype, device=base_emb.device) + m = m.reshape(1, -1, 1).expand(base_emb.shape) + masks.append(m) + + ws.append(w) + + # batch process prompts + embs = batched_clip_encode(masked_tokens, length, encode_func, len(tokens)) + masks = torch.cat(masks) + + embs = (base_emb.expand(embs.shape) - embs) + pooled = embs[0, length - 1:length, :] + + embs *= masks + embs = embs.sum(axis=0, keepdim=True) + + pooled_start = pooled_base.expand(len(ws), -1) + ws = torch.tensor(ws).reshape(-1, 1).expand(pooled_start.shape) + pooled = (pooled - pooled_start) * (ws - 1) + pooled = pooled.mean(axis=0, keepdim=True) + + return ((weight_tensor - 1) * embs), pooled_base + pooled + + +def mask_inds(tokens, inds, mask_token): + clip_len = len(tokens[0]) + inds_set = set(inds) + new_tokens = [[mask_token if i * clip_len + j in inds_set else t + for j, t in enumerate(x)] for i, x in enumerate(tokens)] + return new_tokens + + +def down_weight(tokens, weights, word_ids, base_emb, length, encode_func, m_token=266): + w, w_inv = np.unique(weights, return_inverse=True) + + if np.sum(w < 1) == 0: + return base_emb, tokens, base_emb[0, length - 1:length, :] + # m_token = (clip.tokenizer.end_token, 1.0) if clip.tokenizer.pad_with_end else (0,1.0) + # using the comma token as a masking token seems to work better than aos tokens for SD 1.x + m_token = (m_token, 1.0) + + masked_tokens = [] + + masked_current = tokens + for i in range(len(w)): + if w[i] >= 1: + continue + masked_current = mask_inds(masked_current, np.where(w_inv == i)[0], m_token) + masked_tokens.extend(masked_current) + + embs = batched_clip_encode(masked_tokens, length, encode_func, len(tokens)) + embs = torch.cat([base_emb, embs]) + w = w[w <= 1.0] + w_mix = np.diff([0] + w.tolist()) + w_mix = torch.tensor(w_mix, dtype=embs.dtype, device=embs.device).reshape((-1, 1, 1)) + + weighted_emb = (w_mix * embs).sum(axis=0, keepdim=True) + return weighted_emb, masked_current, weighted_emb[0, length - 1:length, :] + + +def scale_emb_to_mag(base_emb, weighted_emb): + norm_base = torch.linalg.norm(base_emb) + norm_weighted = torch.linalg.norm(weighted_emb) + embeddings_final = (norm_base / norm_weighted) * weighted_emb + return embeddings_final + + +def recover_dist(base_emb, weighted_emb): + fixed_std = (base_emb.std() / weighted_emb.std()) * (weighted_emb - weighted_emb.mean()) + embeddings_final = fixed_std + (base_emb.mean() - fixed_std.mean()) + return embeddings_final + + +def A1111_renorm(base_emb, weighted_emb): + embeddings_final = (base_emb.mean() / weighted_emb.mean()) * weighted_emb + return embeddings_final + + +def advanced_encode_from_tokens(tokenized, token_normalization, weight_interpretation, encode_func, m_token=266, + length=77, w_max=1.0, return_pooled=False, apply_to_pooled=False): + tokens = [[t for t, _, _ in x] for x in tokenized] + weights = [[w for _, w, _ in x] for x in tokenized] + word_ids = [[wid for _, _, wid in x] for x in tokenized] + + # weight normalization + # ==================== + + # distribute down/up weights over word lengths + if token_normalization.startswith("length"): + weights = divide_length(word_ids, weights) + + # make mean of word tokens 1 + if token_normalization.endswith("mean"): + weights = shift_mean_weight(word_ids, weights) + + # weight interpretation + # ===================== + pooled = None + + if weight_interpretation == "comfy": + weighted_tokens = [[(t, w) for t, w in zip(x, y)] for x, y in zip(tokens, weights)] + weighted_emb, pooled_base = encode_func(weighted_tokens) + pooled = pooled_base + else: + unweighted_tokens = [[(t, 1.0) for t, _, _ in x] for x in tokenized] + base_emb, pooled_base = encode_func(unweighted_tokens) + + if weight_interpretation == "A1111": + weighted_emb = from_zero(weights, base_emb) + weighted_emb = A1111_renorm(base_emb, weighted_emb) + pooled = pooled_base + + if weight_interpretation == "compel": + pos_tokens = [[(t, w) if w >= 1.0 else (t, 1.0) for t, w in zip(x, y)] for x, y in zip(tokens, weights)] + weighted_emb, _ = encode_func(pos_tokens) + weighted_emb, _, pooled = down_weight(pos_tokens, weights, word_ids, weighted_emb, length, encode_func) + + if weight_interpretation == "comfy++": + weighted_emb, tokens_down, _ = down_weight(unweighted_tokens, weights, word_ids, base_emb, length, encode_func) + weights = [[w if w > 1.0 else 1.0 for w in x] for x in weights] + # unweighted_tokens = [[(t,1.0) for t, _,_ in x] for x in tokens_down] + embs, pooled = from_masked(unweighted_tokens, weights, word_ids, base_emb, length, encode_func) + weighted_emb += embs + + if weight_interpretation == "down_weight": + weights = scale_to_norm(weights, word_ids, w_max) + weighted_emb, _, pooled = down_weight(unweighted_tokens, weights, word_ids, base_emb, length, encode_func) + + if return_pooled: + if apply_to_pooled: + return weighted_emb, pooled + else: + return weighted_emb, pooled_base + return weighted_emb, None + + +def encode_token_weights_g(model, token_weight_pairs): + return model.clip_g.encode_token_weights(token_weight_pairs) + + +def encode_token_weights_l(model, token_weight_pairs): + l_out, pooled = model.clip_l.encode_token_weights(token_weight_pairs) + return l_out, pooled + +def encode_token_weights_t5(model, token_weight_pairs): + return model.t5xxl.encode_token_weights(token_weight_pairs) + + +def encode_token_weights(model, token_weight_pairs, encode_func): + if model.layer_idx is not None: + # 2016 [c2cb8e88] 及以上版本去除了sdxl clip的clip_layer方法 + # if compare_revision(2016): + model.cond_stage_model.set_clip_options({'layer': model.layer_idx}) + # else: + # model.cond_stage_model.clip_layer(model.layer_idx) + + model_management.load_model_gpu(model.patcher) + return encode_func(model.cond_stage_model, token_weight_pairs) + +def prepareXL(embs_l, embs_g, pooled, clip_balance): + l_w = 1 - max(0, clip_balance - .5) * 2 + g_w = 1 - max(0, .5 - clip_balance) * 2 + if embs_l is not None: + return torch.cat([embs_l * l_w, embs_g * g_w], dim=-1), pooled + else: + return embs_g, pooled + +def prepareSD3(out, pooled, clip_balance): + lg_w = 1 - max(0, clip_balance - .5) * 2 + t5_w = 1 - max(0, .5 - clip_balance) * 2 + if out.shape[0] > 1: + return torch.cat([out[0] * lg_w, out[1] * t5_w], dim=-1), pooled + else: + return out, pooled + +def advanced_encode(clip, text, token_normalization, weight_interpretation, w_max=1.0, clip_balance=.5, + apply_to_pooled=True, width=1024, height=1024, crop_w=0, crop_h=0, target_width=1024, target_height=1024, a1111_prompt_style=False, steps=1): + + # Use clip text encode by smzNodes like same as a1111, when if you need installed the smzNodes + if a1111_prompt_style: + if "smZ CLIPTextEncode" in NODE_CLASS_MAPPINGS: + cls = NODE_CLASS_MAPPINGS['smZ CLIPTextEncode'] + embeddings_final, = cls().encode(clip, text, weight_interpretation, True, True, False, False, 6, 1024, 1024, 0, 0, 1024, 1024, '', '', steps) + return embeddings_final + else: + raise Exception(f"[smzNodes Not Found] you need to install 'ComfyUI-smzNodes'") + + time_start = 0 + time_end = 1 + match = re.search(r'TIMESTEP.*$', text) + if match: + timestep = match.group() + timestep = timestep.split(' ') + timestep = timestep[0] + text = text.replace(timestep, '') + value = timestep.split(':') + if len(value) >= 3: + time_start = float(value[1]) + time_end = float(value[2]) + elif len(value) == 2: + time_start = float(value[1]) + time_end = 1 + elif len(value) == 1: + time_start = 0.1 + time_end = 1 + + pass3 = [x.strip() for x in text.split("BREAK")] + pass3 = [x for x in pass3 if x != ''] + + if len(pass3) == 0: + pass3 = [''] + + # pass3_str = [f'[{x}]' for x in pass3] + # print(f"CLIP: {str.join(' + ', pass3_str)}") + + conditioning = None + + for text in pass3: + tokenized = clip.tokenize(text, return_word_ids=True) + if SD3ClipModel and isinstance(clip.cond_stage_model, SD3ClipModel): + lg_out = None + pooled = None + out = None + + if len(tokenized['l']) > 0 or len(tokenized['g']) > 0: + if 'l' in tokenized: + lg_out, l_pooled = advanced_encode_from_tokens(tokenized['l'], + token_normalization, + weight_interpretation, + lambda x: encode_token_weights(clip, x, encode_token_weights_l), + w_max=w_max, return_pooled=True,) + else: + l_pooled = torch.zeros((1, 768), device=model_management.intermediate_device()) + + if 'g' in tokenized: + g_out, g_pooled = advanced_encode_from_tokens(tokenized['g'], + token_normalization, + weight_interpretation, + lambda x: encode_token_weights(clip, x, encode_token_weights_g), + w_max=w_max, return_pooled=True) + if lg_out is not None: + lg_out = torch.cat([lg_out, g_out], dim=-1) + else: + lg_out = torch.nn.functional.pad(g_out, (768, 0)) + else: + g_out = None + g_pooled = torch.zeros((1, 1280), device=model_management.intermediate_device()) + + if lg_out is not None: + lg_out = torch.nn.functional.pad(lg_out, (0, 4096 - lg_out.shape[-1])) + out = lg_out + pooled = torch.cat((l_pooled, g_pooled), dim=-1) + + # t5xxl + if 't5xxl' in tokenized and clip.cond_stage_model.t5xxl is not None: + t5_out, t5_pooled = advanced_encode_from_tokens(tokenized['t5xxl'], + token_normalization, + weight_interpretation, + lambda x: encode_token_weights(clip, x, encode_token_weights_t5), + w_max=w_max, return_pooled=True) + if lg_out is not None: + out = torch.cat([lg_out, t5_out], dim=-2) + else: + out = t5_out + + if out is None: + out = torch.zeros((1, 77, 4096), device=model_management.intermediate_device()) + + if pooled is None: + pooled = torch.zeros((1, 768 + 1280), device=model_management.intermediate_device()) + + embeddings_final, pooled = prepareSD3(out, pooled, clip_balance) + cond = [[embeddings_final, {"pooled_output": pooled}]] + + elif isinstance(clip.cond_stage_model, (SDXLClipModel, SDXLRefinerClipModel, SDXLClipG)): + embs_l = None + embs_g = None + pooled = None + if 'l' in tokenized and isinstance(clip.cond_stage_model, SDXLClipModel): + embs_l, _ = advanced_encode_from_tokens(tokenized['l'], + token_normalization, + weight_interpretation, + lambda x: encode_token_weights(clip, x, encode_token_weights_l), + w_max=w_max, + return_pooled=False) + if 'g' in tokenized: + embs_g, pooled = advanced_encode_from_tokens(tokenized['g'], + token_normalization, + weight_interpretation, + lambda x: encode_token_weights(clip, x, + encode_token_weights_g), + w_max=w_max, + return_pooled=True, + apply_to_pooled=apply_to_pooled) + + embeddings_final, pooled = prepareXL(embs_l, embs_g, pooled, clip_balance) + + cond = [[embeddings_final, {"pooled_output": pooled}]] + # cond = [[embeddings_final, + # {"pooled_output": pooled, "width": width, "height": height, "crop_w": crop_w, + # "crop_h": crop_h, "target_width": target_width, "target_height": target_height}]] + else: + embeddings_final, pooled = advanced_encode_from_tokens(tokenized['l'], + token_normalization, + weight_interpretation, + lambda x: encode_token_weights(clip, x, encode_token_weights_l), + w_max=w_max,return_pooled=True,) + cond = [[embeddings_final, {"pooled_output": pooled}]] + + if conditioning is not None: + conditioning = ConditioningConcat().concat(conditioning, cond)[0] + else: + conditioning = cond + + # setTimeStepRange + if time_start > 0 or time_end < 1: + conditioning_2, = ConditioningSetTimestepRange().set_range(conditioning, 0, time_start) + conditioning_1, = ConditioningZeroOut().zero_out(conditioning) + conditioning_1, = ConditioningSetTimestepRange().set_range(conditioning_1, time_start, time_end) + conditioning, = ConditioningCombine().combine(conditioning_1, conditioning_2) + + return conditioning + + + diff --git a/ComfyUI-Easy-Use/py/libs/cache.py b/ComfyUI-Easy-Use/py/libs/cache.py new file mode 100644 index 0000000000000000000000000000000000000000..271ec3e65fc8e43d0c2573e746419c7d03f3bfe3 --- /dev/null +++ b/ComfyUI-Easy-Use/py/libs/cache.py @@ -0,0 +1,86 @@ +import itertools +from typing import Optional + +class TaggedCache: + def __init__(self, tag_settings: Optional[dict]=None): + self._tag_settings = tag_settings or {} # tag cache size + self._data = {} + + def __getitem__(self, key): + for tag_data in self._data.values(): + if key in tag_data: + return tag_data[key] + raise KeyError(f'Key `{key}` does not exist') + + def __setitem__(self, key, value: tuple): + # value: (tag: str, (islist: bool, data: *)) + + # if key already exists, pop old value + for tag_data in self._data.values(): + if key in tag_data: + tag_data.pop(key, None) + break + + tag = value[0] + if tag not in self._data: + + try: + from cachetools import LRUCache + + default_size = 20 + if 'ckpt' in tag: + default_size = 5 + elif tag in ['latent', 'image']: + default_size = 100 + + self._data[tag] = LRUCache(maxsize=self._tag_settings.get(tag, default_size)) + + except (ImportError, ModuleNotFoundError): + # TODO: implement a simple lru dict + self._data[tag] = {} + self._data[tag][key] = value + + def __delitem__(self, key): + for tag_data in self._data.values(): + if key in tag_data: + del tag_data[key] + return + raise KeyError(f'Key `{key}` does not exist') + + def __contains__(self, key): + return any(key in tag_data for tag_data in self._data.values()) + + def items(self): + yield from itertools.chain(*map(lambda x :x.items(), self._data.values())) + + def get(self, key, default=None): + """D.get(k[,d]) -> D[k] if k in D, else d. d defaults to None.""" + for tag_data in self._data.values(): + if key in tag_data: + return tag_data[key] + return default + + def clear(self): + # clear all cache + self._data = {} + +cache_settings = {} +cache = TaggedCache(cache_settings) +cache_count = {} + +def update_cache(k, tag, v): + cache[k] = (tag, v) + cnt = cache_count.get(k) + if cnt is None: + cnt = 0 + cache_count[k] = cnt + else: + cache_count[k] += 1 +def remove_cache(key): + global cache + if key == '*': + cache = TaggedCache(cache_settings) + elif key in cache: + del cache[key] + else: + print(f"invalid {key}") \ No newline at end of file diff --git a/ComfyUI-Easy-Use/py/libs/chooser.py b/ComfyUI-Easy-Use/py/libs/chooser.py new file mode 100644 index 0000000000000000000000000000000000000000..559eaff6b440c994b51973e58c32053e0fd3d13c --- /dev/null +++ b/ComfyUI-Easy-Use/py/libs/chooser.py @@ -0,0 +1,52 @@ +from server import PromptServer +from aiohttp import web +import time + +class ChooserCancelled(Exception): + pass + +class ChooserMessage: + stash = {} + messages = {} + cancelled = False + + @classmethod + def addMessage(cls, id, message): + if message == '__cancel__': + cls.messages = {} + cls.cancelled = True + elif message == '__start__': + cls.messages = {} + cls.stash = {} + cls.cancelled = False + else: + cls.messages[str(id)] = message + + @classmethod + def waitForMessage(cls, id, period=0.1, asList=False): + sid = str(id) + while not (sid in cls.messages) and not ("-1" in cls.messages): + if cls.cancelled: + cls.cancelled = False + raise ChooserCancelled() + time.sleep(period) + if cls.cancelled: + cls.cancelled = False + raise ChooserCancelled() + message = cls.messages.pop(str(id), None) or cls.messages.pop("-1") + try: + if asList: + return [int(x.strip()) for x in message.split(",")] + else: + return int(message.strip()) + except ValueError: + print( + f"ERROR IN IMAGE_CHOOSER - failed to parse '${message}' as ${'comma separated list of ints' if asList else 'int'}") + return [1] if asList else 1 + + +@PromptServer.instance.routes.post('/easyuse/image_chooser_message') +async def make_image_selection(request): + post = await request.post() + ChooserMessage.addMessage(post.get("id"), post.get("message")) + return web.json_response({}) \ No newline at end of file diff --git a/ComfyUI-Easy-Use/py/libs/colorfix.py b/ComfyUI-Easy-Use/py/libs/colorfix.py new file mode 100644 index 0000000000000000000000000000000000000000..7792e7c1410083a4ff42175ddbd01638ca47675b --- /dev/null +++ b/ComfyUI-Easy-Use/py/libs/colorfix.py @@ -0,0 +1,115 @@ +import torch +from PIL import Image +from torch import Tensor +from torch.nn import functional as F + +from torchvision.transforms import ToTensor, ToPILImage + +def adain_color_fix(target: Image, source: Image): + # Convert images to tensors + to_tensor = ToTensor() + target_tensor = to_tensor(target).unsqueeze(0) + source_tensor = to_tensor(source).unsqueeze(0) + + # Apply adaptive instance normalization + result_tensor = adaptive_instance_normalization(target_tensor, source_tensor) + + # Convert tensor back to image + to_image = ToPILImage() + result_image = to_image(result_tensor.squeeze(0).clamp_(0.0, 1.0)) + + return result_image + +def wavelet_color_fix(target: Image, source: Image): + source = source.resize(target.size, resample=Image.Resampling.LANCZOS) + + # Convert images to tensors + to_tensor = ToTensor() + target_tensor = to_tensor(target).unsqueeze(0) + source_tensor = to_tensor(source).unsqueeze(0) + + # Apply wavelet reconstruction + result_tensor = wavelet_reconstruction(target_tensor, source_tensor) + + # Convert tensor back to image + to_image = ToPILImage() + result_image = to_image(result_tensor.squeeze(0).clamp_(0.0, 1.0)) + + return result_image + +def calc_mean_std(feat: Tensor, eps=1e-5): + """Calculate mean and std for adaptive_instance_normalization. + Args: + feat (Tensor): 4D tensor. + eps (float): A small value added to the variance to avoid + divide-by-zero. Default: 1e-5. + """ + size = feat.size() + assert len(size) == 4, 'The input feature should be 4D tensor.' + b, c = size[:2] + feat_var = feat.view(b, c, -1).var(dim=2) + eps + feat_std = feat_var.sqrt().view(b, c, 1, 1) + feat_mean = feat.view(b, c, -1).mean(dim=2).view(b, c, 1, 1) + return feat_mean, feat_std + +def adaptive_instance_normalization(content_feat:Tensor, style_feat:Tensor): + """Adaptive instance normalization. + Adjust the reference features to have the similar color and illuminations + as those in the degradate features. + Args: + content_feat (Tensor): The reference feature. + style_feat (Tensor): The degradate features. + """ + size = content_feat.size() + style_mean, style_std = calc_mean_std(style_feat) + content_mean, content_std = calc_mean_std(content_feat) + normalized_feat = (content_feat - content_mean.expand(size)) / content_std.expand(size) + return normalized_feat * style_std.expand(size) + style_mean.expand(size) + +def wavelet_blur(image: Tensor, radius: int): + """ + Apply wavelet blur to the input tensor. + """ + # input shape: (1, 3, H, W) + # convolution kernel + kernel_vals = [ + [0.0625, 0.125, 0.0625], + [0.125, 0.25, 0.125], + [0.0625, 0.125, 0.0625], + ] + kernel = torch.tensor(kernel_vals, dtype=image.dtype, device=image.device) + # add channel dimensions to the kernel to make it a 4D tensor + kernel = kernel[None, None] + # repeat the kernel across all input channels + kernel = kernel.repeat(3, 1, 1, 1) + image = F.pad(image, (radius, radius, radius, radius), mode='replicate') + # apply convolution + output = F.conv2d(image, kernel, groups=3, dilation=radius) + return output + +def wavelet_decomposition(image: Tensor, levels=5): + """ + Apply wavelet decomposition to the input tensor. + This function only returns the low frequency & the high frequency. + """ + high_freq = torch.zeros_like(image) + for i in range(levels): + radius = 2 ** i + low_freq = wavelet_blur(image, radius) + high_freq += (image - low_freq) + image = low_freq + + return high_freq, low_freq + +def wavelet_reconstruction(content_feat:Tensor, style_feat:Tensor): + """ + Apply wavelet decomposition, so that the content will have the same color as the style. + """ + # calculate the wavelet decomposition of the content feature + content_high_freq, content_low_freq = wavelet_decomposition(content_feat) + del content_low_freq + # calculate the wavelet decomposition of the style feature + style_high_freq, style_low_freq = wavelet_decomposition(style_feat) + del style_high_freq + # reconstruct the content feature with the style's high frequency + return content_high_freq + style_low_freq \ No newline at end of file diff --git a/ComfyUI-Easy-Use/py/libs/conditioning.py b/ComfyUI-Easy-Use/py/libs/conditioning.py new file mode 100644 index 0000000000000000000000000000000000000000..bf88ce162009ff2a97222b242755a2670ef08cfa --- /dev/null +++ b/ComfyUI-Easy-Use/py/libs/conditioning.py @@ -0,0 +1,55 @@ +from .utils import find_wildcards_seed, find_nearest_steps, is_linked_styles_selector +from .log import log_node_warn +from .translate import zh_to_en, has_chinese +from .wildcards import process_with_loras +from .adv_encode import advanced_encode + +from nodes import ConditioningConcat, ConditioningCombine, ConditioningAverage, ConditioningSetTimestepRange, CLIPTextEncode + +def prompt_to_cond(type, model, clip, clip_skip, lora_stack, text, prompt_token_normalization, prompt_weight_interpretation, a1111_prompt_style ,my_unique_id, prompt, easyCache, can_load_lora=True, steps=None, model_type=None): + styles_selector = is_linked_styles_selector(prompt, my_unique_id, type) + title = "正面提示词" if type == 'positive' else "负面提示词" + log_node_warn("正在进行" + title + "...") + + if model_type in ['hydit', 'flux']: + if model_type == 'flux': + text = zh_to_en([text])[0] if has_chinese(text) else text + embeddings_final, = CLIPTextEncode().encode(clip, text) + return (embeddings_final, "", model, clip) + + # Translate cn to en + if has_chinese(text): + text = zh_to_en([text])[0] + + positive_seed = find_wildcards_seed(my_unique_id, text, prompt) + model, clip, text, cond_decode, show_prompt, pipe_lora_stack = process_with_loras( + text, model, clip, type, positive_seed, can_load_lora, lora_stack, easyCache) + wildcard_prompt = cond_decode if show_prompt or styles_selector else "" + + clipped = clip.clone() + if clip_skip != 0: + clipped.clip_layer(clip_skip) + + log_node_warn("正在进行" + title + "编码...") + steps = steps if steps is not None else find_nearest_steps(my_unique_id, prompt) + return (advanced_encode(clipped, text, prompt_token_normalization, + prompt_weight_interpretation, w_max=1.0, + apply_to_pooled='enable', + a1111_prompt_style=a1111_prompt_style, steps=steps), wildcard_prompt, model, clipped) + +def set_cond(old_cond, new_cond, mode, average_strength, old_cond_start, old_cond_end, new_cond_start, new_cond_end): + if not old_cond: + return new_cond + else: + if mode == "replace": + return new_cond + elif mode == "concat": + return ConditioningConcat().concat(new_cond, old_cond)[0] + elif mode == "combine": + return ConditioningCombine().combine(old_cond, new_cond)[0] + elif mode == 'average': + return ConditioningAverage().addWeighted(new_cond, old_cond, average_strength)[0] + elif mode == 'timestep': + cond_1 = ConditioningSetTimestepRange().set_range(old_cond, old_cond_start, old_cond_end)[0] + cond_2 = ConditioningSetTimestepRange().set_range(new_cond, new_cond_start, new_cond_end)[0] + return ConditioningCombine().combine(cond_1, cond_2)[0] \ No newline at end of file diff --git a/ComfyUI-Easy-Use/py/libs/controlnet.py b/ComfyUI-Easy-Use/py/libs/controlnet.py new file mode 100644 index 0000000000000000000000000000000000000000..d4bacd8fe9ccb0dd615a34d2b5caaa84bec8acf3 --- /dev/null +++ b/ComfyUI-Easy-Use/py/libs/controlnet.py @@ -0,0 +1,89 @@ +import folder_paths +import comfy.controlnet +import comfy.model_management +from nodes import NODE_CLASS_MAPPINGS + +union_controlnet_types = {"auto": -1, "openpose": 0, "depth": 1, "hed/pidi/scribble/ted": 2, "canny/lineart/anime_lineart/mlsd": 3, "normal": 4, "segment": 5, "tile": 6, "repaint": 7} + +class easyControlnet: + def __init__(self): + pass + + def apply(self, control_net_name, image, positive, negative, strength, start_percent=0, end_percent=1, control_net=None, scale_soft_weights=1, mask=None, union_type=None, easyCache=None, use_cache=True, model=None, vae=None): + if strength == 0: + return (positive, negative) + + # kolors controlnet patch + from ..kolors.loader import is_kolors_model, applyKolorsUnet + if is_kolors_model(model): + from ..kolors.model_patch import patch_controlnet + if control_net is None: + with applyKolorsUnet(): + control_net = easyCache.load_controlnet(control_net_name, scale_soft_weights, use_cache) + control_net = patch_controlnet(model, control_net) + else: + if control_net is None: + control_net = easyCache.load_controlnet(control_net_name, scale_soft_weights, use_cache) + + # union controlnet + if union_type is not None: + control_net = control_net.copy() + type_number = union_controlnet_types[union_type] + if type_number >= 0: + control_net.set_extra_arg("control_type", [type_number]) + else: + control_net.set_extra_arg("control_type", []) + + if mask is not None: + mask = mask.to(self.device) + + if mask is not None and len(mask.shape) < 3: + mask = mask.unsqueeze(0) + + control_hint = image.movedim(-1, 1) + + is_cond = True + if negative is None: + p = [] + for t in positive: + n = [t[0], t[1].copy()] + c_net = control_net.copy().set_cond_hint(control_hint, strength, (start_percent, end_percent)) + if 'control' in t[1]: + c_net.set_previous_controlnet(t[1]['control']) + n[1]['control'] = c_net + n[1]['control_apply_to_uncond'] = True + if mask is not None: + n[1]['mask'] = mask + n[1]['set_area_to_bounds'] = False + p.append(n) + positive = p + else: + cnets = {} + out = [] + for conditioning in [positive, negative]: + c = [] + for t in conditioning: + d = t[1].copy() + + prev_cnet = d.get('control', None) + if prev_cnet in cnets: + c_net = cnets[prev_cnet] + else: + c_net = control_net.copy().set_cond_hint(control_hint, strength, (start_percent, end_percent), vae) + c_net.set_previous_controlnet(prev_cnet) + cnets[prev_cnet] = c_net + + d['control'] = c_net + d['control_apply_to_uncond'] = False + + if mask is not None: + d['mask'] = mask + d['set_area_to_bounds'] = False + + n = [t[0], d] + c.append(n) + out.append(c) + positive = out[0] + negative = out[1] + + return (positive, negative) \ No newline at end of file diff --git a/ComfyUI-Easy-Use/py/libs/dynthres_core.py b/ComfyUI-Easy-Use/py/libs/dynthres_core.py new file mode 100644 index 0000000000000000000000000000000000000000..0c6945031f29bbb3a1486b48db34506cfaedfd54 --- /dev/null +++ b/ComfyUI-Easy-Use/py/libs/dynthres_core.py @@ -0,0 +1,167 @@ +import torch, math + +######################### DynThresh Core ######################### + +class DynThresh: + + Modes = ["Constant", "Linear Down", "Cosine Down", "Half Cosine Down", "Linear Up", "Cosine Up", "Half Cosine Up", "Power Up", "Power Down", "Linear Repeating", "Cosine Repeating", "Sawtooth"] + Startpoints = ["MEAN", "ZERO"] + Variabilities = ["AD", "STD"] + + def __init__(self, mimic_scale, threshold_percentile, mimic_mode, mimic_scale_min, cfg_mode, cfg_scale_min, sched_val, experiment_mode, max_steps, separate_feature_channels, scaling_startpoint, variability_measure, interpolate_phi): + self.mimic_scale = mimic_scale + self.threshold_percentile = threshold_percentile + self.mimic_mode = mimic_mode + self.cfg_mode = cfg_mode + self.max_steps = max_steps + self.cfg_scale_min = cfg_scale_min + self.mimic_scale_min = mimic_scale_min + self.experiment_mode = experiment_mode + self.sched_val = sched_val + self.sep_feat_channels = separate_feature_channels + self.scaling_startpoint = scaling_startpoint + self.variability_measure = variability_measure + self.interpolate_phi = interpolate_phi + + def interpret_scale(self, scale, mode, min): + scale -= min + max = self.max_steps - 1 + frac = self.step / max + if mode == "Constant": + pass + elif mode == "Linear Down": + scale *= 1.0 - frac + elif mode == "Half Cosine Down": + scale *= math.cos(frac) + elif mode == "Cosine Down": + scale *= math.cos(frac * 1.5707) + elif mode == "Linear Up": + scale *= frac + elif mode == "Half Cosine Up": + scale *= 1.0 - math.cos(frac) + elif mode == "Cosine Up": + scale *= 1.0 - math.cos(frac * 1.5707) + elif mode == "Power Up": + scale *= math.pow(frac, self.sched_val) + elif mode == "Power Down": + scale *= 1.0 - math.pow(frac, self.sched_val) + elif mode == "Linear Repeating": + portion = (frac * self.sched_val) % 1.0 + scale *= (0.5 - portion) * 2 if portion < 0.5 else (portion - 0.5) * 2 + elif mode == "Cosine Repeating": + scale *= math.cos(frac * 6.28318 * self.sched_val) * 0.5 + 0.5 + elif mode == "Sawtooth": + scale *= (frac * self.sched_val) % 1.0 + scale += min + return scale + + def dynthresh(self, cond, uncond, cfg_scale, weights): + mimic_scale = self.interpret_scale(self.mimic_scale, self.mimic_mode, self.mimic_scale_min) + cfg_scale = self.interpret_scale(cfg_scale, self.cfg_mode, self.cfg_scale_min) + # uncond shape is (batch, 4, height, width) + conds_per_batch = cond.shape[0] / uncond.shape[0] + assert conds_per_batch == int(conds_per_batch), "Expected # of conds per batch to be constant across batches" + cond_stacked = cond.reshape((-1, int(conds_per_batch)) + uncond.shape[1:]) + + ### Normal first part of the CFG Scale logic, basically + diff = cond_stacked - uncond.unsqueeze(1) + if weights is not None: + diff = diff * weights + relative = diff.sum(1) + + ### Get the normal result for both mimic and normal scale + mim_target = uncond + relative * mimic_scale + cfg_target = uncond + relative * cfg_scale + ### If we weren't doing mimic scale, we'd just return cfg_target here + + ### Now recenter the values relative to their average rather than absolute, to allow scaling from average + mim_flattened = mim_target.flatten(2) + cfg_flattened = cfg_target.flatten(2) + mim_means = mim_flattened.mean(dim=2).unsqueeze(2) + cfg_means = cfg_flattened.mean(dim=2).unsqueeze(2) + mim_centered = mim_flattened - mim_means + cfg_centered = cfg_flattened - cfg_means + + if self.sep_feat_channels: + if self.variability_measure == 'STD': + mim_scaleref = mim_centered.std(dim=2).unsqueeze(2) + cfg_scaleref = cfg_centered.std(dim=2).unsqueeze(2) + else: # 'AD' + mim_scaleref = mim_centered.abs().max(dim=2).values.unsqueeze(2) + cfg_scaleref = torch.quantile(cfg_centered.abs(), self.threshold_percentile, dim=2).unsqueeze(2) + + else: + if self.variability_measure == 'STD': + mim_scaleref = mim_centered.std() + cfg_scaleref = cfg_centered.std() + else: # 'AD' + mim_scaleref = mim_centered.abs().max() + cfg_scaleref = torch.quantile(cfg_centered.abs(), self.threshold_percentile) + + if self.scaling_startpoint == 'ZERO': + scaling_factor = mim_scaleref / cfg_scaleref + result = cfg_flattened * scaling_factor + + else: # 'MEAN' + if self.variability_measure == 'STD': + cfg_renormalized = (cfg_centered / cfg_scaleref) * mim_scaleref + else: # 'AD' + ### Get the maximum value of all datapoints (with an optional threshold percentile on the uncond) + max_scaleref = torch.maximum(mim_scaleref, cfg_scaleref) + ### Clamp to the max + cfg_clamped = cfg_centered.clamp(-max_scaleref, max_scaleref) + ### Now shrink from the max to normalize and grow to the mimic scale (instead of the CFG scale) + cfg_renormalized = (cfg_clamped / max_scaleref) * mim_scaleref + + ### Now add it back onto the averages to get into real scale again and return + result = cfg_renormalized + cfg_means + + actual_res = result.unflatten(2, mim_target.shape[2:]) + + if self.interpolate_phi != 1.0: + actual_res = actual_res * self.interpolate_phi + cfg_target * (1.0 - self.interpolate_phi) + + if self.experiment_mode == 1: + num = actual_res.cpu().numpy() + for y in range(0, 64): + for x in range (0, 64): + if num[0][0][y][x] > 1.0: + num[0][1][y][x] *= 0.5 + if num[0][1][y][x] > 1.0: + num[0][1][y][x] *= 0.5 + if num[0][2][y][x] > 1.5: + num[0][2][y][x] *= 0.5 + actual_res = torch.from_numpy(num).to(device=uncond.device) + elif self.experiment_mode == 2: + num = actual_res.cpu().numpy() + for y in range(0, 64): + for x in range (0, 64): + over_scale = False + for z in range(0, 4): + if abs(num[0][z][y][x]) > 1.5: + over_scale = True + if over_scale: + for z in range(0, 4): + num[0][z][y][x] *= 0.7 + actual_res = torch.from_numpy(num).to(device=uncond.device) + elif self.experiment_mode == 3: + coefs = torch.tensor([ + # R G B W + [0.298, 0.207, 0.208, 0.0], # L1 + [0.187, 0.286, 0.173, 0.0], # L2 + [-0.158, 0.189, 0.264, 0.0], # L3 + [-0.184, -0.271, -0.473, 1.0], # L4 + ], device=uncond.device) + res_rgb = torch.einsum("laxy,ab -> lbxy", actual_res, coefs) + max_r, max_g, max_b, max_w = res_rgb[0][0].max(), res_rgb[0][1].max(), res_rgb[0][2].max(), res_rgb[0][3].max() + max_rgb = max(max_r, max_g, max_b) + print(f"test max = r={max_r}, g={max_g}, b={max_b}, w={max_w}, rgb={max_rgb}") + if self.step / (self.max_steps - 1) > 0.2: + if max_rgb < 2.0 and max_w < 3.0: + res_rgb /= max_rgb / 2.4 + else: + if max_rgb > 2.4 and max_w > 3.0: + res_rgb /= max_rgb / 2.4 + actual_res = torch.einsum("laxy,ab -> lbxy", res_rgb, coefs.inverse()) + + return actual_res \ No newline at end of file diff --git a/ComfyUI-Easy-Use/py/libs/easing.py b/ComfyUI-Easy-Use/py/libs/easing.py new file mode 100644 index 0000000000000000000000000000000000000000..db300c93e3764f2ce30b590c9f559d6936628265 --- /dev/null +++ b/ComfyUI-Easy-Use/py/libs/easing.py @@ -0,0 +1,27 @@ +@staticmethod +def easyIn(t: float)-> float: + return t*t +@staticmethod +def easyOut(t: float)-> float: + return -(t * (t - 2)) +@staticmethod +def easyInOut(t: float)-> float: + if t < 0.5: + return 2*t*t + else: + return (-2*t*t) + (4*t) - 1 + +class EasingBase: + + def easing(self, t: float, function='linear') -> float: + if function == 'easyIn': + return easyIn(t) + elif function == 'easyOut': + return easyOut(t) + elif function == 'easyInOut': + return easyInOut(t) + else: + return t + + def ease(self, start, end, t) -> float: + return end * t + start * (1 - t) \ No newline at end of file diff --git a/ComfyUI-Easy-Use/py/libs/fooocus.py b/ComfyUI-Easy-Use/py/libs/fooocus.py new file mode 100644 index 0000000000000000000000000000000000000000..61f699b47d65441fdab8dc6e32fcd578f5160795 --- /dev/null +++ b/ComfyUI-Easy-Use/py/libs/fooocus.py @@ -0,0 +1,119 @@ +#credit to Acly for this module +#from https://github.com/Acly/comfyui-inpaint-nodes +import torch +import torch.nn.functional as F +import comfy +from comfy.model_base import BaseModel +from comfy.model_patcher import ModelPatcher +from comfy.model_management import cast_to_device + +from .log import log_node_warn, log_node_error, log_node_info + +# Inpaint +if hasattr(comfy.lora, "calculate_weight"): + original_calculate_weight = comfy.lora.calculate_weight +else: + original_calculate_weight = ModelPatcher.calculate_weight +injected_model_patcher_calculate_weight = False + +class InpaintHead(torch.nn.Module): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.head = torch.nn.Parameter(torch.empty(size=(320, 5, 3, 3), device="cpu")) + + def __call__(self, x): + x = F.pad(x, (1, 1, 1, 1), "replicate") + return F.conv2d(x, weight=self.head) + +def calculate_weight_patched(patches, weight, key, intermediate_type=torch.float32): + remaining = [] + + for p in patches: + alpha = p[0] + v = p[1] + + is_fooocus_patch = isinstance(v, tuple) and len(v) == 2 and v[0] == "fooocus" + if not is_fooocus_patch: + remaining.append(p) + continue + + if alpha != 0.0: + v = v[1] + w1 = cast_to_device(v[0], weight.device, torch.float32) + if w1.shape == weight.shape: + w_min = cast_to_device(v[1], weight.device, torch.float32) + w_max = cast_to_device(v[2], weight.device, torch.float32) + w1 = (w1 / 255.0) * (w_max - w_min) + w_min + weight += alpha * cast_to_device(w1, weight.device, weight.dtype) + else: + pass + # log_node_warn(self.node_name, + # f"Shape mismatch {key}, weight not merged ({w1.shape} != {weight.shape})" + # ) + + if len(remaining) > 0: + return original_calculate_weight(remaining, weight, key, intermediate_type) + return weight + +def inject_patched_calculate_weight(): + global injected_model_patcher_calculate_weight + if not injected_model_patcher_calculate_weight: + print( + "[comfyui-inpaint-nodes] Injecting patched comfy.model_patcher.ModelPatcher.calculate_weight" + ) + if hasattr(comfy.lora, "calculate_weight"): + comfy.lora.calculate_weight = calculate_weight_patched + else: + ModelPatcher.calculate_weight = calculate_weight_patched + injected_model_patcher_calculate_weight = True + +class InpaintWorker: + def __init__(self, node_name): + self.node_name = node_name if node_name is not None else "" + + def load_fooocus_patch(self, lora: dict, to_load: dict): + patch_dict = {} + loaded_keys = set() + for key in to_load.values(): + if value := lora.get(key, None): + patch_dict[key] = ("fooocus", value) + loaded_keys.add(key) + + not_loaded = sum(1 for x in lora if x not in loaded_keys) + if not_loaded > 0: + log_node_info(self.node_name, + f"{len(loaded_keys)} Lora keys loaded, {not_loaded} remaining keys not found in model." + ) + return patch_dict + + + def patch(self, model, latent, patch): + base_model: BaseModel = model.model + latent_pixels = base_model.process_latent_in(latent["samples"]) + noise_mask = latent["noise_mask"].round() + latent_mask = F.max_pool2d(noise_mask, (8, 8)).round().to(latent_pixels) + + inpaint_head_model, inpaint_lora = patch + feed = torch.cat([latent_mask, latent_pixels], dim=1) + inpaint_head_model.to(device=feed.device, dtype=feed.dtype) + inpaint_head_feature = inpaint_head_model(feed) + + def input_block_patch(h, transformer_options): + if transformer_options["block"][1] == 0: + h = h + inpaint_head_feature.to(h) + return h + + lora_keys = comfy.lora.model_lora_keys_unet(model.model, {}) + lora_keys.update({x: x for x in base_model.state_dict().keys()}) + loaded_lora = self.load_fooocus_patch(inpaint_lora, lora_keys) + + m = model.clone() + m.set_model_input_block_patch(input_block_patch) + patched = m.add_patches(loaded_lora, 1.0) + + not_patched_count = sum(1 for x in loaded_lora if x not in patched) + if not_patched_count > 0: + log_node_error(self.node_name, f"Failed to patch {not_patched_count} keys") + + inject_patched_calculate_weight() + return (m,) \ No newline at end of file diff --git a/ComfyUI-Easy-Use/py/libs/gradual_latent_hires_fix.py b/ComfyUI-Easy-Use/py/libs/gradual_latent_hires_fix.py new file mode 100644 index 0000000000000000000000000000000000000000..82d94704f24d5ed36f733cf78ebdc60bd8eecd51 --- /dev/null +++ b/ComfyUI-Easy-Use/py/libs/gradual_latent_hires_fix.py @@ -0,0 +1,273 @@ +import torch +from torchvision.transforms.functional import gaussian_blur +from comfy.k_diffusion.sampling import default_noise_sampler, get_ancestral_step, to_d, BrownianTreeNoiseSampler +from tqdm.auto import trange + +@torch.no_grad() +def sample_euler_ancestral( + model, + x, + sigmas, + extra_args=None, + callback=None, + disable=None, + eta=1.0, + s_noise=1.0, + noise_sampler=None, + upscale_ratio=2.0, + start_step=5, + end_step=15, + upscale_n_step=3, + unsharp_kernel_size=3, + unsharp_sigma=0.5, + unsharp_strength=0.0, +): + """Ancestral sampling with Euler method steps.""" + extra_args = {} if extra_args is None else extra_args + noise_sampler = default_noise_sampler(x) if noise_sampler is None else noise_sampler + s_in = x.new_ones([x.shape[0]]) + + # make upscale info + upscale_steps = [] + step = start_step - 1 + while step < end_step - 1: + upscale_steps.append(step) + step += upscale_n_step + height, width = x.shape[2:] + upscale_shapes = [ + (int(height * (((upscale_ratio - 1) / i) + 1)), int(width * (((upscale_ratio - 1) / i) + 1))) + for i in reversed(range(1, len(upscale_steps) + 1)) + ] + upscale_info = {k: v for k, v in zip(upscale_steps, upscale_shapes)} + + for i in trange(len(sigmas) - 1, disable=disable): + denoised = model(x, sigmas[i] * s_in, **extra_args) + sigma_down, sigma_up = get_ancestral_step(sigmas[i], sigmas[i + 1], eta=eta) + if callback is not None: + callback({"x": x, "i": i, "sigma": sigmas[i], "sigma_hat": sigmas[i], "denoised": denoised}) + d = to_d(x, sigmas[i], denoised) + # Euler method + dt = sigma_down - sigmas[i] + x = x + d * dt + if sigmas[i + 1] > 0: + # Resize + if i in upscale_info: + x = torch.nn.functional.interpolate(x, size=upscale_info[i], mode="bicubic", align_corners=False) + if unsharp_strength > 0: + blurred = gaussian_blur(x, kernel_size=unsharp_kernel_size, sigma=unsharp_sigma) + x = x + unsharp_strength * (x - blurred) + + noise_sampler = default_noise_sampler(x) + noise = noise_sampler(sigmas[i], sigmas[i + 1]) + x = x + noise * sigma_up * s_noise + return x + + +@torch.no_grad() +def sample_dpmpp_2s_ancestral( + model, + x, + sigmas, + extra_args=None, + callback=None, + disable=None, + eta=1.0, + s_noise=1.0, + noise_sampler=None, + upscale_ratio=2.0, + start_step=5, + end_step=15, + upscale_n_step=3, + unsharp_kernel_size=3, + unsharp_sigma=0.5, + unsharp_strength=0.0, +): + """Ancestral sampling with DPM-Solver++(2S) second-order steps.""" + extra_args = {} if extra_args is None else extra_args + s_in = x.new_ones([x.shape[0]]) + sigma_fn = lambda t: t.neg().exp() + t_fn = lambda sigma: sigma.log().neg() + + # make upscale info + upscale_steps = [] + step = start_step - 1 + while step < end_step - 1: + upscale_steps.append(step) + step += upscale_n_step + height, width = x.shape[2:] + upscale_shapes = [ + (int(height * (((upscale_ratio - 1) / i) + 1)), int(width * (((upscale_ratio - 1) / i) + 1))) + for i in reversed(range(1, len(upscale_steps) + 1)) + ] + upscale_info = {k: v for k, v in zip(upscale_steps, upscale_shapes)} + + for i in trange(len(sigmas) - 1, disable=disable): + denoised = model(x, sigmas[i] * s_in, **extra_args) + sigma_down, sigma_up = get_ancestral_step(sigmas[i], sigmas[i + 1], eta=eta) + if callback is not None: + callback({"x": x, "i": i, "sigma": sigmas[i], "sigma_hat": sigmas[i], "denoised": denoised}) + if sigma_down == 0: + # Euler method + d = to_d(x, sigmas[i], denoised) + dt = sigma_down - sigmas[i] + x = x + d * dt + else: + # DPM-Solver++(2S) + t, t_next = t_fn(sigmas[i]), t_fn(sigma_down) + r = 1 / 2 + h = t_next - t + s = t + r * h + x_2 = (sigma_fn(s) / sigma_fn(t)) * x - (-h * r).expm1() * denoised + denoised_2 = model(x_2, sigma_fn(s) * s_in, **extra_args) + x = (sigma_fn(t_next) / sigma_fn(t)) * x - (-h).expm1() * denoised_2 + # Noise addition + if sigmas[i + 1] > 0: + # Resize + if i in upscale_info: + x = torch.nn.functional.interpolate(x, size=upscale_info[i], mode="bicubic", align_corners=False) + if unsharp_strength > 0: + blurred = gaussian_blur(x, kernel_size=unsharp_kernel_size, sigma=unsharp_sigma) + x = x + unsharp_strength * (x - blurred) + noise_sampler = default_noise_sampler(x) + noise = noise_sampler(sigmas[i], sigmas[i + 1]) + x = x + noise * sigma_up * s_noise + return x + + +@torch.no_grad() +def sample_dpmpp_2m_sde( + model, + x, + sigmas, + extra_args=None, + callback=None, + disable=None, + eta=1.0, + s_noise=1.0, + noise_sampler=None, + solver_type="midpoint", + upscale_ratio=2.0, + start_step=5, + end_step=15, + upscale_n_step=3, + unsharp_kernel_size=3, + unsharp_sigma=0.5, + unsharp_strength=0.0, +): + """DPM-Solver++(2M) SDE.""" + + if solver_type not in {"heun", "midpoint"}: + raise ValueError("solver_type must be 'heun' or 'midpoint'") + + seed = extra_args.get("seed", None) + sigma_min, sigma_max = sigmas[sigmas > 0].min(), sigmas.max() + extra_args = {} if extra_args is None else extra_args + s_in = x.new_ones([x.shape[0]]) + + old_denoised = None + h_last = None + h = None + + # make upscale info + upscale_steps = [] + step = start_step - 1 + while step < end_step - 1: + upscale_steps.append(step) + step += upscale_n_step + height, width = x.shape[2:] + upscale_shapes = [ + (int(height * (((upscale_ratio - 1) / i) + 1)), int(width * (((upscale_ratio - 1) / i) + 1))) + for i in reversed(range(1, len(upscale_steps) + 1)) + ] + upscale_info = {k: v for k, v in zip(upscale_steps, upscale_shapes)} + + for i in trange(len(sigmas) - 1, disable=disable): + denoised = model(x, sigmas[i] * s_in, **extra_args) + if callback is not None: + callback({"x": x, "i": i, "sigma": sigmas[i], "sigma_hat": sigmas[i], "denoised": denoised}) + if sigmas[i + 1] == 0: + # Denoising step + x = denoised + else: + # DPM-Solver++(2M) SDE + t, s = -sigmas[i].log(), -sigmas[i + 1].log() + h = s - t + eta_h = eta * h + + x = sigmas[i + 1] / sigmas[i] * (-eta_h).exp() * x + (-h - eta_h).expm1().neg() * denoised + + if old_denoised is not None: + r = h_last / h + if solver_type == "heun": + x = x + ((-h - eta_h).expm1().neg() / (-h - eta_h) + 1) * (1 / r) * (denoised - old_denoised) + elif solver_type == "midpoint": + x = x + 0.5 * (-h - eta_h).expm1().neg() * (1 / r) * (denoised - old_denoised) + + if eta: + # Resize + if i in upscale_info: + x = torch.nn.functional.interpolate(x, size=upscale_info[i], mode="bicubic", align_corners=False) + if unsharp_strength > 0: + blurred = gaussian_blur(x, kernel_size=unsharp_kernel_size, sigma=unsharp_sigma) + x = x + unsharp_strength * (x - blurred) + denoised = None # 次ステップとサイズがあわないのでとりあえずNoneにしておく。 + noise_sampler = BrownianTreeNoiseSampler(x, sigma_min, sigma_max, seed=seed, cpu=True) + x = x + noise_sampler(sigmas[i], sigmas[i + 1]) * sigmas[i + 1] * (-2 * eta_h).expm1().neg().sqrt() * s_noise + + old_denoised = denoised + h_last = h + return x + + +@torch.no_grad() +def sample_lcm( + model, + x, + sigmas, + extra_args=None, + callback=None, + disable=None, + noise_sampler=None, + eta=None, + s_noise=None, + upscale_ratio=2.0, + start_step=5, + end_step=15, + upscale_n_step=3, + unsharp_kernel_size=3, + unsharp_sigma=0.5, + unsharp_strength=0.0, +): + extra_args = {} if extra_args is None else extra_args + s_in = x.new_ones([x.shape[0]]) + + # make upscale info + upscale_steps = [] + step = start_step - 1 + while step < end_step - 1: + upscale_steps.append(step) + step += upscale_n_step + height, width = x.shape[2:] + upscale_shapes = [ + (int(height * (((upscale_ratio - 1) / i) + 1)), int(width * (((upscale_ratio - 1) / i) + 1))) + for i in reversed(range(1, len(upscale_steps) + 1)) + ] + upscale_info = {k: v for k, v in zip(upscale_steps, upscale_shapes)} + + for i in trange(len(sigmas) - 1, disable=disable): + denoised = model(x, sigmas[i] * s_in, **extra_args) + if callback is not None: + callback({"x": x, "i": i, "sigma": sigmas[i], "sigma_hat": sigmas[i], "denoised": denoised}) + + x = denoised + if sigmas[i + 1] > 0: + # Resize + if i in upscale_info: + x = torch.nn.functional.interpolate(x, size=upscale_info[i], mode="bicubic", align_corners=False) + if unsharp_strength > 0: + blurred = gaussian_blur(x, kernel_size=unsharp_kernel_size, sigma=unsharp_sigma) + x = x + unsharp_strength * (x - blurred) + noise_sampler = default_noise_sampler(x) + x += sigmas[i + 1] * noise_sampler(sigmas[i], sigmas[i + 1]) + + return x diff --git a/ComfyUI-Easy-Use/py/libs/image.py b/ComfyUI-Easy-Use/py/libs/image.py new file mode 100644 index 0000000000000000000000000000000000000000..c4ec1893f9e3087c9cf6385fec1df022e615becf --- /dev/null +++ b/ComfyUI-Easy-Use/py/libs/image.py @@ -0,0 +1,194 @@ +import os +import base64 +import torch +import numpy as np +from enum import Enum +from PIL import Image +from io import BytesIO +from typing import List, Union + +import folder_paths +from .utils import install_package + +# PIL to Tensor +def pil2tensor(image): + return torch.from_numpy(np.array(image).astype(np.float32) / 255.0).unsqueeze(0) +# Tensor to PIL +def tensor2pil(image): + return Image.fromarray(np.clip(255. * image.cpu().numpy().squeeze(), 0, 255).astype(np.uint8)) +# np to Tensor +def np2tensor(img_np: Union[np.ndarray, List[np.ndarray]]) -> torch.Tensor: + if isinstance(img_np, list): + return torch.cat([np2tensor(img) for img in img_np], dim=0) + return torch.from_numpy(img_np.astype(np.float32) / 255.0).unsqueeze(0) +# Tensor to np +def tensor2np(tensor: torch.Tensor) -> List[np.ndarray]: + if len(tensor.shape) == 3: # Single image + return np.clip(255.0 * tensor.cpu().numpy(), 0, 255).astype(np.uint8) + else: # Batch of images + return [np.clip(255.0 * t.cpu().numpy(), 0, 255).astype(np.uint8) for t in tensor] + +def pil2byte(pil_image, format='PNG'): + byte_arr = BytesIO() + pil_image.save(byte_arr, format=format) + byte_arr.seek(0) + return byte_arr + +def image2base64(image_base64): + image_bytes = base64.b64decode(image_base64) + image_data = Image.open(BytesIO(image_bytes)) + return image_data + +# Get new bounds +def get_new_bounds(width, height, left, right, top, bottom): + """Returns the new bounds for an image with inset crop data.""" + left = 0 + left + right = width - right + top = 0 + top + bottom = height - bottom + return (left, right, top, bottom) + +def RGB2RGBA(image: Image, mask: Image) -> Image: + (R, G, B) = image.convert('RGB').split() + return Image.merge('RGBA', (R, G, B, mask.convert('L'))) + +def image2mask(image: Image) -> torch.Tensor: + _image = image.convert('RGBA') + alpha = _image.split()[0] + bg = Image.new("L", _image.size) + _image = Image.merge('RGBA', (bg, bg, bg, alpha)) + ret_mask = torch.tensor([pil2tensor(_image)[0, :, :, 3].tolist()]) + return ret_mask + +def mask2image(mask: torch.Tensor) -> Image: + masks = tensor2np(mask) + for m in masks: + _mask = Image.fromarray(m).convert("L") + _image = Image.new("RGBA", _mask.size, color='white') + _image = Image.composite( + _image, Image.new("RGBA", _mask.size, color='black'), _mask) + return _image + +# 图像融合 +class blendImage: + def g(self, x): + return torch.where(x <= 0.25, ((16 * x - 12) * x + 4) * x, torch.sqrt(x)) + + def blend_mode(self, img1, img2, mode): + if mode == "normal": + return img2 + elif mode == "multiply": + return img1 * img2 + elif mode == "screen": + return 1 - (1 - img1) * (1 - img2) + elif mode == "overlay": + return torch.where(img1 <= 0.5, 2 * img1 * img2, 1 - 2 * (1 - img1) * (1 - img2)) + elif mode == "soft_light": + return torch.where(img2 <= 0.5, img1 - (1 - 2 * img2) * img1 * (1 - img1), + img1 + (2 * img2 - 1) * (self.g(img1) - img1)) + elif mode == "difference": + return img1 - img2 + else: + raise ValueError(f"Unsupported blend mode: {mode}") + + def blend_images(self, image1: torch.Tensor, image2: torch.Tensor, blend_factor: float, blend_mode: str = 'normal'): + image2 = image2.to(image1.device) + if image1.shape != image2.shape: + image2 = image2.permute(0, 3, 1, 2) + image2 = comfy.utils.common_upscale(image2, image1.shape[2], image1.shape[1], upscale_method='bicubic', + crop='center') + image2 = image2.permute(0, 2, 3, 1) + + blended_image = self.blend_mode(image1, image2, blend_mode) + blended_image = image1 * (1 - blend_factor) + blended_image * blend_factor + blended_image = torch.clamp(blended_image, 0, 1) + return blended_image + + + + +class ResizeMode(Enum): + RESIZE = "Just Resize" + INNER_FIT = "Crop and Resize" + OUTER_FIT = "Resize and Fill" + def int_value(self): + if self == ResizeMode.RESIZE: + return 0 + elif self == ResizeMode.INNER_FIT: + return 1 + elif self == ResizeMode.OUTER_FIT: + return 2 + assert False, "NOTREACHED" + + + +# CLIP反推 +import comfy.utils +from torchvision import transforms +Config, Interrogator = None, None +class CI_Inference: + ci_model = None + cache_path: str + + def __init__(self): + self.ci_model = None + self.low_vram = False + self.cache_path = os.path.join(folder_paths.models_dir, "clip_interrogator") + + def _load_model(self, model_name, low_vram=False): + if not (self.ci_model and model_name == self.ci_model.config.clip_model_name and self.low_vram == low_vram): + self.low_vram = low_vram + print(f"Load model: {model_name}") + + config = Config( + device="cuda" if torch.cuda.is_available() else "cpu", + download_cache=True, + clip_model_name=model_name, + clip_model_path=self.cache_path, + cache_path=self.cache_path, + caption_model_name='blip-large' + ) + + if low_vram: + config.apply_low_vram_defaults() + + self.ci_model = Interrogator(config) + + def _interrogate(self, image, mode, caption=None): + if mode == 'best': + prompt = self.ci_model.interrogate(image, caption=caption) + elif mode == 'classic': + prompt = self.ci_model.interrogate_classic(image, caption=caption) + elif mode == 'fast': + prompt = self.ci_model.interrogate_fast(image, caption=caption) + elif mode == 'negative': + prompt = self.ci_model.interrogate_negative(image) + else: + raise Exception(f"Unknown mode {mode}") + return prompt + + def image_to_prompt(self, image, mode, model_name='ViT-L-14/openai', low_vram=False): + try: + from clip_interrogator import Config, Interrogator + global Config, Interrogator + except: + install_package("clip_interrogator", "0.6.0") + from clip_interrogator import Config, Interrogator + + pbar = comfy.utils.ProgressBar(len(image)) + + self._load_model(model_name, low_vram) + prompt = [] + for i in range(len(image)): + im = image[i] + + im = tensor2pil(im) + im = im.convert('RGB') + + _prompt = self._interrogate(im, mode) + pbar.update(1) + prompt.append(_prompt) + + return prompt + +ci = CI_Inference() diff --git a/ComfyUI-Easy-Use/py/libs/lllite.py b/ComfyUI-Easy-Use/py/libs/lllite.py new file mode 100644 index 0000000000000000000000000000000000000000..5fd686ebb3356da497bb78fdd7287e46cabc176a --- /dev/null +++ b/ComfyUI-Easy-Use/py/libs/lllite.py @@ -0,0 +1,237 @@ +import math +import torch +import comfy + + +def extra_options_to_module_prefix(extra_options): + # extra_options = {'transformer_index': 2, 'block_index': 8, 'original_shape': [2, 4, 128, 128], 'block': ('input', 7), 'n_heads': 20, 'dim_head': 64} + + # block is: [('input', 4), ('input', 5), ('input', 7), ('input', 8), ('middle', 0), + # ('output', 0), ('output', 1), ('output', 2), ('output', 3), ('output', 4), ('output', 5)] + # transformer_index is: [0, 1, 2, 3, 4, 5, 6, 7, 8], for each block + # block_index is: 0-1 or 0-9, depends on the block + # input 7 and 8, middle has 10 blocks + + # make module name from extra_options + block = extra_options["block"] + block_index = extra_options["block_index"] + if block[0] == "input": + module_pfx = f"lllite_unet_input_blocks_{block[1]}_1_transformer_blocks_{block_index}" + elif block[0] == "middle": + module_pfx = f"lllite_unet_middle_block_1_transformer_blocks_{block_index}" + elif block[0] == "output": + module_pfx = f"lllite_unet_output_blocks_{block[1]}_1_transformer_blocks_{block_index}" + else: + raise Exception("invalid block name") + return module_pfx + + +def load_control_net_lllite_patch(path, cond_image, multiplier, num_steps, start_percent, end_percent): + # calculate start and end step + start_step = math.floor(num_steps * start_percent * 0.01) if start_percent > 0 else 0 + end_step = math.floor(num_steps * end_percent * 0.01) if end_percent > 0 else num_steps + + # load weights + ctrl_sd = comfy.utils.load_torch_file(path, safe_load=True) + + # split each weights for each module + module_weights = {} + for key, value in ctrl_sd.items(): + fragments = key.split(".") + module_name = fragments[0] + weight_name = ".".join(fragments[1:]) + + if module_name not in module_weights: + module_weights[module_name] = {} + module_weights[module_name][weight_name] = value + + # load each module + modules = {} + for module_name, weights in module_weights.items(): + # ここの自動判定を何とかしたい + if "conditioning1.4.weight" in weights: + depth = 3 + elif weights["conditioning1.2.weight"].shape[-1] == 4: + depth = 2 + else: + depth = 1 + + module = LLLiteModule( + name=module_name, + is_conv2d=weights["down.0.weight"].ndim == 4, + in_dim=weights["down.0.weight"].shape[1], + depth=depth, + cond_emb_dim=weights["conditioning1.0.weight"].shape[0] * 2, + mlp_dim=weights["down.0.weight"].shape[0], + multiplier=multiplier, + num_steps=num_steps, + start_step=start_step, + end_step=end_step, + ) + info = module.load_state_dict(weights) + modules[module_name] = module + if len(modules) == 1: + module.is_first = True + + print(f"loaded {path} successfully, {len(modules)} modules") + + # cond imageをセットする + cond_image = cond_image.permute(0, 3, 1, 2) # b,h,w,3 -> b,3,h,w + cond_image = cond_image * 2.0 - 1.0 # 0-1 -> -1-+1 + + for module in modules.values(): + module.set_cond_image(cond_image) + + class control_net_lllite_patch: + def __init__(self, modules): + self.modules = modules + + def __call__(self, q, k, v, extra_options): + module_pfx = extra_options_to_module_prefix(extra_options) + + is_attn1 = q.shape[-1] == k.shape[-1] # self attention + if is_attn1: + module_pfx = module_pfx + "_attn1" + else: + module_pfx = module_pfx + "_attn2" + + module_pfx_to_q = module_pfx + "_to_q" + module_pfx_to_k = module_pfx + "_to_k" + module_pfx_to_v = module_pfx + "_to_v" + + if module_pfx_to_q in self.modules: + q = q + self.modules[module_pfx_to_q](q) + if module_pfx_to_k in self.modules: + k = k + self.modules[module_pfx_to_k](k) + if module_pfx_to_v in self.modules: + v = v + self.modules[module_pfx_to_v](v) + + return q, k, v + + def to(self, device): + for d in self.modules.keys(): + self.modules[d] = self.modules[d].to(device) + return self + + return control_net_lllite_patch(modules) + +class LLLiteModule(torch.nn.Module): + def __init__( + self, + name: str, + is_conv2d: bool, + in_dim: int, + depth: int, + cond_emb_dim: int, + mlp_dim: int, + multiplier: int, + num_steps: int, + start_step: int, + end_step: int, + ): + super().__init__() + self.name = name + self.is_conv2d = is_conv2d + self.multiplier = multiplier + self.num_steps = num_steps + self.start_step = start_step + self.end_step = end_step + self.is_first = False + + modules = [] + modules.append(torch.nn.Conv2d(3, cond_emb_dim // 2, kernel_size=4, stride=4, padding=0)) # to latent (from VAE) size*2 + if depth == 1: + modules.append(torch.nn.ReLU(inplace=True)) + modules.append(torch.nn.Conv2d(cond_emb_dim // 2, cond_emb_dim, kernel_size=2, stride=2, padding=0)) + elif depth == 2: + modules.append(torch.nn.ReLU(inplace=True)) + modules.append(torch.nn.Conv2d(cond_emb_dim // 2, cond_emb_dim, kernel_size=4, stride=4, padding=0)) + elif depth == 3: + # kernel size 8は大きすぎるので、4にする / kernel size 8 is too large, so set it to 4 + modules.append(torch.nn.ReLU(inplace=True)) + modules.append(torch.nn.Conv2d(cond_emb_dim // 2, cond_emb_dim // 2, kernel_size=4, stride=4, padding=0)) + modules.append(torch.nn.ReLU(inplace=True)) + modules.append(torch.nn.Conv2d(cond_emb_dim // 2, cond_emb_dim, kernel_size=2, stride=2, padding=0)) + + self.conditioning1 = torch.nn.Sequential(*modules) + + if self.is_conv2d: + self.down = torch.nn.Sequential( + torch.nn.Conv2d(in_dim, mlp_dim, kernel_size=1, stride=1, padding=0), + torch.nn.ReLU(inplace=True), + ) + self.mid = torch.nn.Sequential( + torch.nn.Conv2d(mlp_dim + cond_emb_dim, mlp_dim, kernel_size=1, stride=1, padding=0), + torch.nn.ReLU(inplace=True), + ) + self.up = torch.nn.Sequential( + torch.nn.Conv2d(mlp_dim, in_dim, kernel_size=1, stride=1, padding=0), + ) + else: + self.down = torch.nn.Sequential( + torch.nn.Linear(in_dim, mlp_dim), + torch.nn.ReLU(inplace=True), + ) + self.mid = torch.nn.Sequential( + torch.nn.Linear(mlp_dim + cond_emb_dim, mlp_dim), + torch.nn.ReLU(inplace=True), + ) + self.up = torch.nn.Sequential( + torch.nn.Linear(mlp_dim, in_dim), + ) + + self.depth = depth + self.cond_image = None + self.cond_emb = None + self.current_step = 0 + + # @torch.inference_mode() + def set_cond_image(self, cond_image): + # print("set_cond_image", self.name) + self.cond_image = cond_image + self.cond_emb = None + self.current_step = 0 + + def forward(self, x): + if self.num_steps > 0: + if self.current_step < self.start_step: + self.current_step += 1 + return torch.zeros_like(x) + elif self.current_step >= self.end_step: + if self.is_first and self.current_step == self.end_step: + print(f"end LLLite: step {self.current_step}") + self.current_step += 1 + if self.current_step >= self.num_steps: + self.current_step = 0 # reset + return torch.zeros_like(x) + else: + if self.is_first and self.current_step == self.start_step: + print(f"start LLLite: step {self.current_step}") + self.current_step += 1 + if self.current_step >= self.num_steps: + self.current_step = 0 # reset + + if self.cond_emb is None: + # print(f"cond_emb is None, {self.name}") + cx = self.conditioning1(self.cond_image.to(x.device, dtype=x.dtype)) + if not self.is_conv2d: + # reshape / b,c,h,w -> b,h*w,c + n, c, h, w = cx.shape + cx = cx.view(n, c, h * w).permute(0, 2, 1) + self.cond_emb = cx + + cx = self.cond_emb + # print(f"forward {self.name}, {cx.shape}, {x.shape}") + + # uncond/condでxはバッチサイズが2倍 + if x.shape[0] != cx.shape[0]: + if self.is_conv2d: + cx = cx.repeat(x.shape[0] // cx.shape[0], 1, 1, 1) + else: + # print("x.shape[0] != cx.shape[0]", x.shape[0], cx.shape[0]) + cx = cx.repeat(x.shape[0] // cx.shape[0], 1, 1) + + cx = torch.cat([cx, self.down(x)], dim=1 if self.is_conv2d else 2) + cx = self.mid(cx) + cx = self.up(cx) + return cx * self.multiplier \ No newline at end of file diff --git a/ComfyUI-Easy-Use/py/libs/loader.py b/ComfyUI-Easy-Use/py/libs/loader.py new file mode 100644 index 0000000000000000000000000000000000000000..7fc4d768eb779ee8e71d8ac297892a48dc5b1b4c --- /dev/null +++ b/ComfyUI-Easy-Use/py/libs/loader.py @@ -0,0 +1,609 @@ +import re, time, os, psutil +import folder_paths +import comfy.utils +import comfy.sd +import comfy.controlnet + +from comfy.model_patcher import ModelPatcher +from nodes import NODE_CLASS_MAPPINGS +from collections import defaultdict +from .log import log_node_info, log_node_error +from ..dit.pixArt.loader import load_pixart + +stable_diffusion_loaders = ["easy fullLoader", "easy a1111Loader", "easy comfyLoader", "easy hunyuanDiTLoader","easy zero123Loader", "easy svdLoader"] +stable_cascade_loaders = ["easy cascadeLoader"] +dit_loaders = ['easy pixArtLoader'] +controlnet_loaders = ["easy controlnetLoader", "easy controlnetLoaderADV"] +instant_loaders = ["easy instantIDApply", "easy instantIDApplyADV"] +cascade_vae_node = ["easy preSamplingCascade", "easy fullCascadeKSampler"] +model_merge_node = ["easy XYInputs: ModelMergeBlocks"] +lora_widget = ["easy fullLoader", "easy a1111Loader", "easy comfyLoader"] + +class easyLoader: + def __init__(self): + self.loaded_objects = { + "ckpt": defaultdict(tuple), # {ckpt_name: (model, ...)} + "unet": defaultdict(tuple), + "clip": defaultdict(tuple), + "clip_vision": defaultdict(tuple), + "bvae": defaultdict(tuple), + "vae": defaultdict(object), + "lora": defaultdict(dict), # {lora_name: {UID: (model_lora, clip_lora)}} + "controlnet": defaultdict(dict), + "t5": defaultdict(tuple), + "chatglm3": defaultdict(tuple), + } + self.memory_threshold = self.determine_memory_threshold(0.7) + self.lora_name_cache = [] + + def clean_values(self, values: str): + original_values = values.split("; ") + cleaned_values = [] + + for value in original_values: + cleaned_value = value.strip(';').strip() + if cleaned_value == "": + continue + try: + cleaned_value = int(cleaned_value) + except ValueError: + try: + cleaned_value = float(cleaned_value) + except ValueError: + pass + cleaned_values.append(cleaned_value) + + return cleaned_values + + def clear_unused_objects(self, desired_names: set, object_type: str): + keys = set(self.loaded_objects[object_type].keys()) + for key in keys - desired_names: + del self.loaded_objects[object_type][key] + + def get_input_value(self, entry, key, prompt=None): + val = entry["inputs"][key] + if isinstance(val, str): + return val + elif isinstance(val, list): + if prompt is not None and val[0]: + return prompt[val[0]]['inputs'][key] + else: + return val[0] + else: + return str(val) + + def process_pipe_loader(self, entry, desired_ckpt_names, desired_vae_names, desired_lora_names, desired_lora_settings, num_loras=3, suffix=""): + for idx in range(1, num_loras + 1): + lora_name_key = f"{suffix}lora{idx}_name" + desired_lora_names.add(self.get_input_value(entry, lora_name_key)) + setting = f'{self.get_input_value(entry, lora_name_key)};{entry["inputs"][f"{suffix}lora{idx}_model_strength"]};{entry["inputs"][f"{suffix}lora{idx}_clip_strength"]}' + desired_lora_settings.add(setting) + + desired_ckpt_names.add(self.get_input_value(entry, f"{suffix}ckpt_name")) + desired_vae_names.add(self.get_input_value(entry, f"{suffix}vae_name")) + + def update_loaded_objects(self, prompt): + desired_ckpt_names = set() + desired_unet_names = set() + desired_clip_names = set() + desired_vae_names = set() + desired_lora_names = set() + desired_lora_settings = set() + desired_controlnet_names = set() + desired_t5_names = set() + desired_glm3_names = set() + + for entry in prompt.values(): + class_type = entry["class_type"] + if class_type in lora_widget: + lora_name = self.get_input_value(entry, "lora_name") + desired_lora_names.add(lora_name) + setting = f'{lora_name};{entry["inputs"]["lora_model_strength"]};{entry["inputs"]["lora_clip_strength"]}' + desired_lora_settings.add(setting) + + if class_type in stable_diffusion_loaders: + desired_ckpt_names.add(self.get_input_value(entry, "ckpt_name", prompt)) + desired_vae_names.add(self.get_input_value(entry, "vae_name")) + + elif class_type in ['easy kolorsLoader']: + desired_unet_names.add(self.get_input_value(entry, "unet_name")) + desired_vae_names.add(self.get_input_value(entry, "vae_name")) + desired_glm3_names.add(self.get_input_value(entry, "chatglm3_name")) + + elif class_type in dit_loaders: + t5_name = self.get_input_value(entry, "mt5_name") if "mt5_name" in entry["inputs"] else None + clip_name = self.get_input_value(entry, "clip_name") if "clip_name" in entry["inputs"] else None + model_name = self.get_input_value(entry, "model_name") + ckpt_name = self.get_input_value(entry, "ckpt_name", prompt) + if t5_name: + desired_t5_names.add(t5_name) + if clip_name: + desired_clip_names.add(clip_name) + desired_ckpt_names.add(ckpt_name+'_'+model_name) + + elif class_type in stable_cascade_loaders: + desired_unet_names.add(self.get_input_value(entry, "stage_c")) + desired_unet_names.add(self.get_input_value(entry, "stage_b")) + desired_clip_names.add(self.get_input_value(entry, "clip_name")) + desired_vae_names.add(self.get_input_value(entry, "stage_a")) + + elif class_type in cascade_vae_node: + encode_vae_name = self.get_input_value(entry, "encode_vae_name") + decode_vae_name = self.get_input_value(entry, "decode_vae_name") + if encode_vae_name and encode_vae_name != 'None': + desired_vae_names.add(encode_vae_name) + if decode_vae_name and decode_vae_name != 'None': + desired_vae_names.add(decode_vae_name) + + elif class_type in controlnet_loaders: + control_net_name = self.get_input_value(entry, "control_net_name", prompt) + scale_soft_weights = self.get_input_value(entry, "scale_soft_weights") + desired_controlnet_names.add(f'{control_net_name};{scale_soft_weights}') + + elif class_type in instant_loaders: + control_net_name = self.get_input_value(entry, "control_net_name", prompt) + scale_soft_weights = self.get_input_value(entry, "cn_soft_weights") + desired_controlnet_names.add(f'{control_net_name};{scale_soft_weights}') + + elif class_type in model_merge_node: + desired_ckpt_names.add(self.get_input_value(entry, "ckpt_name_1")) + desired_ckpt_names.add(self.get_input_value(entry, "ckpt_name_2")) + vae_use = self.get_input_value(entry, "vae_use") + if vae_use != 'Use Model 1' and vae_use != 'Use Model 2': + desired_vae_names.add(vae_use) + + object_types = ["ckpt", "unet", "clip", "bvae", "vae", "lora", "controlnet", "t5"] + for object_type in object_types: + if object_type == 'unet': + desired_names = desired_unet_names + elif object_type in ["ckpt", "clip", "bvae"]: + if object_type == 'clip': + desired_names = desired_ckpt_names.union(desired_clip_names) + else: + desired_names = desired_ckpt_names + elif object_type == "vae": + desired_names = desired_vae_names + elif object_type == "controlnet": + desired_names = desired_controlnet_names + elif object_type == "t5": + desired_names = desired_t5_names + elif object_type == "chatglm3": + desired_names = desired_glm3_names + else: + desired_names = desired_lora_names + self.clear_unused_objects(desired_names, object_type) + + def add_to_cache(self, obj_type, key, value): + """ + Add an item to the cache with the current timestamp. + """ + timestamped_value = (value, time.time()) + self.loaded_objects[obj_type][key] = timestamped_value + + def determine_memory_threshold(self, percentage=0.8): + """ + Determines the memory threshold as a percentage of the total available memory. + Args: + - percentage (float): The fraction of total memory to use as the threshold. + Should be a value between 0 and 1. Default is 0.8 (80%). + Returns: + - memory_threshold (int): Memory threshold in bytes. + """ + total_memory = psutil.virtual_memory().total + memory_threshold = total_memory * percentage + return memory_threshold + + def get_memory_usage(self): + """ + Returns the memory usage of the current process in bytes. + """ + process = psutil.Process(os.getpid()) + return process.memory_info().rss + + def eviction_based_on_memory(self): + """ + Evicts objects from cache based on memory usage and priority. + """ + current_memory = self.get_memory_usage() + if current_memory < self.memory_threshold: + return + eviction_order = ["vae", "lora", "bvae", "clip", "ckpt", "controlnet", "unet", "t5", "chatglm3"] + for obj_type in eviction_order: + if current_memory < self.memory_threshold: + break + # Sort items based on age (using the timestamp) + items = list(self.loaded_objects[obj_type].items()) + items.sort(key=lambda x: x[1][1]) # Sorting by timestamp + + for item in items: + if current_memory < self.memory_threshold: + break + del self.loaded_objects[obj_type][item[0]] + current_memory = self.get_memory_usage() + + def load_checkpoint(self, ckpt_name, config_name=None, load_vision=False): + cache_name = ckpt_name + if config_name not in [None, "Default"]: + cache_name = ckpt_name + "_" + config_name + if cache_name in self.loaded_objects["ckpt"]: + clip_vision = self.loaded_objects["clip_vision"][cache_name][0] if load_vision else None + clip = self.loaded_objects["clip"][cache_name][0] if not load_vision else None + return self.loaded_objects["ckpt"][cache_name][0], clip, self.loaded_objects["bvae"][cache_name][0], clip_vision + + ckpt_path = folder_paths.get_full_path("checkpoints", ckpt_name) + + output_clip = False if load_vision else True + output_clipvision = True if load_vision else False + if config_name not in [None, "Default"]: + config_path = folder_paths.get_full_path("configs", config_name) + loaded_ckpt = comfy.sd.load_checkpoint(config_path, ckpt_path, output_vae=True, output_clip=output_clip, embedding_directory=folder_paths.get_folder_paths("embeddings")) + else: + model_options = {} + if re.search("nf4", ckpt_name): + from ..bitsandbytes_NF4 import OPS + model_options = {"custom_operations": OPS} + loaded_ckpt = comfy.sd.load_checkpoint_guess_config(ckpt_path, output_vae=True, output_clip=output_clip, output_clipvision=output_clipvision, embedding_directory=folder_paths.get_folder_paths("embeddings"), model_options=model_options) + + self.add_to_cache("ckpt", cache_name, loaded_ckpt[0]) + self.add_to_cache("bvae", cache_name, loaded_ckpt[2]) + + clip = loaded_ckpt[1] + clip_vision = loaded_ckpt[3] + if clip: + self.add_to_cache("clip", cache_name, clip) + if clip_vision: + self.add_to_cache("clip_vision", cache_name, clip_vision) + + self.eviction_based_on_memory() + + return loaded_ckpt[0], clip, loaded_ckpt[2], clip_vision + + def load_vae(self, vae_name): + if vae_name in self.loaded_objects["vae"]: + return self.loaded_objects["vae"][vae_name][0] + + vae_path = folder_paths.get_full_path("vae", vae_name) + sd = comfy.utils.load_torch_file(vae_path) + loaded_vae = comfy.sd.VAE(sd=sd) + self.add_to_cache("vae", vae_name, loaded_vae) + self.eviction_based_on_memory() + + return loaded_vae + + def load_unet(self, unet_name): + if unet_name in self.loaded_objects["unet"]: + log_node_info("Load UNet", f"{unet_name} cached") + return self.loaded_objects["unet"][unet_name][0] + + unet_path = folder_paths.get_full_path("unet", unet_name) + model = comfy.sd.load_unet(unet_path) + self.add_to_cache("unet", unet_name, model) + self.eviction_based_on_memory() + + return model + + def load_controlnet(self, control_net_name, scale_soft_weights=1, use_cache=True): + unique_id = f'{control_net_name};{str(scale_soft_weights)}' + if use_cache and unique_id in self.loaded_objects["controlnet"]: + return self.loaded_objects["controlnet"][unique_id][0] + if scale_soft_weights < 1: + if "ScaledSoftControlNetWeights" in NODE_CLASS_MAPPINGS: + soft_weight_cls = NODE_CLASS_MAPPINGS['ScaledSoftControlNetWeights'] + (weights, timestep_keyframe) = soft_weight_cls().load_weights(scale_soft_weights, False) + cn_adv_cls = NODE_CLASS_MAPPINGS['ControlNetLoaderAdvanced'] + control_net, = cn_adv_cls().load_controlnet(control_net_name, timestep_keyframe) + else: + raise Exception(f"[Advanced-ControlNet Not Found] you need to install 'COMFYUI-Advanced-ControlNet'") + else: + controlnet_path = folder_paths.get_full_path("controlnet", control_net_name) + control_net = comfy.controlnet.load_controlnet(controlnet_path) + if use_cache: + self.add_to_cache("controlnet", unique_id, control_net) + self.eviction_based_on_memory() + + return control_net + def load_clip(self, clip_name, type='stable_diffusion', load_clip=None): + if clip_name in self.loaded_objects["clip"]: + return self.loaded_objects["clip"][clip_name][0] + + if type == 'stable_diffusion': + clip_type = comfy.sd.CLIPType.STABLE_DIFFUSION + elif type == 'stable_cascade': + clip_type = comfy.sd.CLIPType.STABLE_CASCADE + elif type == 'sd3': + clip_type = comfy.sd.CLIPType.SD3 + elif type == 'flux': + clip_type = comfy.sd.CLIPType.FLUX + elif type == 'stable_audio': + clip_type = comfy.sd.CLIPType.STABLE_AUDIO + clip_path = folder_paths.get_full_path("clip", clip_name) + load_clip = comfy.sd.load_clip(ckpt_paths=[clip_path], embedding_directory=folder_paths.get_folder_paths("embeddings"), clip_type=clip_type) + self.add_to_cache("clip", clip_name, load_clip) + self.eviction_based_on_memory() + + return load_clip + + def load_lora(self, lora, model=None, clip=None, type=None): + lora_name = lora["lora_name"] + model = model if model is not None else lora["model"] + clip = clip if clip is not None else lora["clip"] + model_strength = lora["model_strength"] + clip_strength = lora["clip_strength"] + lbw = lora["lbw"] if "lbw" in lora else None + lbw_a = lora["lbw_a"] if "lbw_a" in lora else None + lbw_b = lora["lbw_b"] if "lbw_b" in lora else None + + model_hash = str(model)[44:-1] + clip_hash = str(clip)[25:-1] if clip else '' + + unique_id = f'{model_hash};{clip_hash};{lora_name};{model_strength};{clip_strength}' + + if unique_id in self.loaded_objects["lora"]: + log_node_info("Load LORA",f"{lora_name} cached") + return self.loaded_objects["lora"][unique_id][0] + + orig_lora_name = lora_name + lora_name = self.resolve_lora_name(lora_name) + + if lora_name is not None: + lora_path = folder_paths.get_full_path("loras", lora_name) + else: + lora_path = None + + if lora_path is not None: + log_node_info("Load LORA",f"{lora_name}: {model_strength}, {clip_strength}, LBW={lbw}, A={lbw_a}, B={lbw_b}") + if lbw: + lbw = lora["lbw"] + lbw_a = lora["lbw_a"] + lbw_b = lora["lbw_b"] + if 'LoraLoaderBlockWeight //Inspire' not in NODE_CLASS_MAPPINGS: + raise Exception('[InspirePack Not Found] you need to install ComfyUI-Inspire-Pack') + cls = NODE_CLASS_MAPPINGS['LoraLoaderBlockWeight //Inspire'] + model, clip, _ = cls().doit(model, clip, lora_name, model_strength, clip_strength, False, 0, + lbw_a, lbw_b, "", lbw) + else: + _lora = comfy.utils.load_torch_file(lora_path, safe_load=True) + keys = _lora.keys() + if "down_blocks.0.resnets.0.norm1.bias" in keys: + print('Using LORA for Resadapter') + key_map = {} + key_map = comfy.lora.model_lora_keys_unet(model.model, key_map) + mapping_norm = {} + + for key in keys: + if ".weight" in key: + key_name_in_ori_sd = key_map[key.replace(".weight", "")] + mapping_norm[key_name_in_ori_sd] = _lora[key] + elif ".bias" in key: + key_name_in_ori_sd = key_map[key.replace(".bias", "")] + mapping_norm[key_name_in_ori_sd.replace(".weight", ".bias")] = _lora[ + key + ] + else: + print("===>Unexpected key", key) + mapping_norm[key] = _lora[key] + + for k in mapping_norm.keys(): + if k not in model.model.state_dict(): + print("===>Missing key:", k) + model.model.load_state_dict(mapping_norm, strict=False) + return (model, clip) + + # PixArt + if type is not None and type == 'PixArt': + from ..dit.pixArt.loader import load_pixart_lora + model = load_pixart_lora(model, _lora, lora_path, model_strength) + else: + model, clip = comfy.sd.load_lora_for_models(model, clip, _lora, model_strength, clip_strength) + + self.add_to_cache("lora", unique_id, (model, clip)) + self.eviction_based_on_memory() + else: + log_node_error(f"LORA NOT FOUND", orig_lora_name) + + return model, clip + + def resolve_lora_name(self, name): + if os.path.exists(name): + return name + else: + if len(self.lora_name_cache) == 0: + loras = folder_paths.get_filename_list("loras") + self.lora_name_cache.extend(loras) + for x in self.lora_name_cache: + if x.endswith(name): + return x + + # 如果刷新网页后新添加的lora走这个逻辑 + log_node_info("LORA NOT IN CACHE", f"{name}") + loras = folder_paths.get_filename_list("loras") + for x in loras: + if x.endswith(name): + self.lora_name_cache.append(x) + return x + + return None + + def load_main(self, ckpt_name, config_name, vae_name, lora_name, lora_model_strength, lora_clip_strength, optional_lora_stack, model_override, clip_override, vae_override, prompt, nf4=False): + model: ModelPatcher | None = None + clip: comfy.sd.CLIP | None = None + vae: comfy.sd.VAE | None = None + clip_vision = None + lora_stack = [] + + can_load_lora = True + # 判断是否存在 模型或Lora叠加xyplot, 若存在优先缓存第一个模型 + xy_model_id = next((x for x in prompt if str(prompt[x]["class_type"]) in ["easy XYInputs: ModelMergeBlocks", + "easy XYInputs: Checkpoint"]), None) + xy_lora_id = next((x for x in prompt if str(prompt[x]["class_type"]) == "easy XYInputs: Lora"), None) + if xy_lora_id is not None: + can_load_lora = False + if xy_model_id is not None: + node = prompt[xy_model_id] + if "ckpt_name_1" in node["inputs"]: + ckpt_name_1 = node["inputs"]["ckpt_name_1"] + model, clip, vae, clip_vision = self.load_checkpoint(ckpt_name_1) + can_load_lora = False + # Load models + elif model_override is not None and clip_override is not None and vae_override is not None: + model = model_override + clip = clip_override + vae = vae_override + elif model_override is not None: + raise Exception(f"[ERROR] clip or vae is missing") + elif vae_override is not None: + raise Exception(f"[ERROR] model or clip is missing") + elif clip_override is not None: + raise Exception(f"[ERROR] model or vae is missing") + else: + model, clip, vae, clip_vision = self.load_checkpoint(ckpt_name, config_name) + + if optional_lora_stack is not None and can_load_lora: + for lora in optional_lora_stack: + lora = {"lora_name": lora[0], "model": model, "clip": clip, "model_strength": lora[1], + "clip_strength": lora[2]} + model, clip = self.load_lora(lora) + lora['model'] = model + lora['clip'] = clip + lora_stack.append(lora) + + if lora_name != "None" and can_load_lora: + lora = {"lora_name": lora_name, "model": model, "clip": clip, "model_strength": lora_model_strength, + "clip_strength": lora_clip_strength} + model, clip = self.load_lora(lora) + lora_stack.append(lora) + + # Check for custom VAE + if vae_name not in ["Baked VAE", "Baked-VAE"]: + vae = self.load_vae(vae_name) + # CLIP skip + if not clip: + raise Exception("No CLIP found") + + return model, clip, vae, clip_vision, lora_stack + + # Kolors + def load_kolors_unet(self, unet_name): + if unet_name in self.loaded_objects["unet"]: + log_node_info("Load Kolors UNet", f"{unet_name} cached") + return self.loaded_objects["unet"][unet_name][0] + else: + from ..kolors.loader import applyKolorsUnet + with applyKolorsUnet(): + unet_path = folder_paths.get_full_path("unet", unet_name) + sd = comfy.utils.load_torch_file(unet_path) + model = comfy.sd.load_unet_state_dict(sd) + if model is None: + raise RuntimeError("ERROR: Could not detect model type of: {}".format(unet_path)) + + self.add_to_cache("unet", unet_name, model) + self.eviction_based_on_memory() + + return model + + def load_chatglm3(self, chatglm3_name): + from ..kolors.loader import load_chatglm3 + if chatglm3_name in self.loaded_objects["chatglm3"]: + log_node_info("Load ChatGLM3", f"{chatglm3_name} cached") + return self.loaded_objects["chatglm3"][chatglm3_name][0] + + chatglm_model = load_chatglm3(model_path=folder_paths.get_full_path("llm", chatglm3_name)) + self.add_to_cache("chatglm3", chatglm3_name, chatglm_model) + self.eviction_based_on_memory() + + return chatglm_model + + + # DiT + def load_dit_ckpt(self, ckpt_name, model_name, **kwargs): + if (ckpt_name+'_'+model_name) in self.loaded_objects["ckpt"]: + return self.loaded_objects["ckpt"][ckpt_name+'_'+model_name][0] + model = None + ckpt_path = folder_paths.get_full_path("checkpoints", ckpt_name) + model_type = kwargs['model_type'] if "model_type" in kwargs else 'PixArt' + if model_type == 'PixArt': + pixart_conf = kwargs['pixart_conf'] + model_conf = pixart_conf[model_name] + model = load_pixart(ckpt_path, model_conf) + if model: + self.add_to_cache("ckpt", ckpt_name + '_' + model_name, model) + self.eviction_based_on_memory() + return model + + + def load_dit_clip(self, clip_name, **kwargs): + if clip_name in self.loaded_objects["clip"]: + return self.loaded_objects["clip"][clip_name][0] + + clip_path = folder_paths.get_full_path("clip", clip_name) + sd = comfy.utils.load_torch_file(clip_path) + + prefix = "bert." + state_dict = {} + for key in sd: + nkey = key + if key.startswith(prefix): + nkey = key[len(prefix):] + state_dict[nkey] = sd[key] + + m, e = model.load_sd(state_dict) + if len(m) > 0 or len(e) > 0: + print(f"{clip_name}: clip missing {len(m)} keys ({len(e)} extra)") + + self.add_to_cache("clip", clip_name, model) + self.eviction_based_on_memory() + + return model + + def load_dit_t5(self, t5_name, **kwargs): + if t5_name in self.loaded_objects["t5"]: + return self.loaded_objects["t5"][t5_name][0] + + model_type = kwargs['model_type'] if "model_type" in kwargs else 'HyDiT' + if model_type == 'HyDiT': + del kwargs['model_type'] + model = EXM_HyDiT_Tenc_Temp(model_class="mT5", **kwargs) + t5_path = folder_paths.get_full_path("t5", t5_name) + sd = comfy.utils.load_torch_file(t5_path) + m, e = model.load_sd(sd) + if len(m) > 0 or len(e) > 0: + print(f"{t5_name}: mT5 missing {len(m)} keys ({len(e)} extra)") + + self.add_to_cache("t5", t5_name, model) + self.eviction_based_on_memory() + + return model + + def load_t5_from_sd3_clip(self, sd3_clip, padding): + try: + from comfy.text_encoders.sd3_clip import SD3Tokenizer, SD3ClipModel + except: + from comfy.sd3_clip import SD3Tokenizer, SD3ClipModel + import copy + + clip = sd3_clip.clone() + assert clip.cond_stage_model.t5xxl is not None, "CLIP must have T5 loaded!" + + # remove transformer + transformer = clip.cond_stage_model.t5xxl.transformer + clip.cond_stage_model.t5xxl.transformer = None + + # clone object + tmp = SD3ClipModel(clip_l=False, clip_g=False, t5=False) + tmp.t5xxl = copy.deepcopy(clip.cond_stage_model.t5xxl) + # put transformer back + clip.cond_stage_model.t5xxl.transformer = transformer + tmp.t5xxl.transformer = transformer + + # override special tokens + tmp.t5xxl.special_tokens = copy.deepcopy(clip.cond_stage_model.t5xxl.special_tokens) + tmp.t5xxl.special_tokens.pop("end") # make sure empty tokens match + + # tokenizer + tok = SD3Tokenizer() + tok.t5xxl.min_length = padding + + clip.cond_stage_model = tmp + clip.tokenizer = tok + + return clip diff --git a/ComfyUI-Easy-Use/py/libs/log.py b/ComfyUI-Easy-Use/py/libs/log.py new file mode 100644 index 0000000000000000000000000000000000000000..8eea58831473f20b694711891fa234da31e9138d --- /dev/null +++ b/ComfyUI-Easy-Use/py/libs/log.py @@ -0,0 +1,77 @@ +COLORS_FG = { + 'BLACK': '\33[30m', + 'RED': '\33[31m', + 'GREEN': '\33[32m', + 'YELLOW': '\33[33m', + 'BLUE': '\33[34m', + 'MAGENTA': '\33[35m', + 'CYAN': '\33[36m', + 'WHITE': '\33[37m', + 'GREY': '\33[90m', + 'BRIGHT_RED': '\33[91m', + 'BRIGHT_GREEN': '\33[92m', + 'BRIGHT_YELLOW': '\33[93m', + 'BRIGHT_BLUE': '\33[94m', + 'BRIGHT_MAGENTA': '\33[95m', + 'BRIGHT_CYAN': '\33[96m', + 'BRIGHT_WHITE': '\33[97m', +} +COLORS_STYLE = { + 'RESET': '\33[0m', + 'BOLD': '\33[1m', + 'NORMAL': '\33[22m', + 'ITALIC': '\33[3m', + 'UNDERLINE': '\33[4m', + 'BLINK': '\33[5m', + 'BLINK2': '\33[6m', + 'SELECTED': '\33[7m', +} +COLORS_BG = { + 'BLACK': '\33[40m', + 'RED': '\33[41m', + 'GREEN': '\33[42m', + 'YELLOW': '\33[43m', + 'BLUE': '\33[44m', + 'MAGENTA': '\33[45m', + 'CYAN': '\33[46m', + 'WHITE': '\33[47m', + 'GREY': '\33[100m', + 'BRIGHT_RED': '\33[101m', + 'BRIGHT_GREEN': '\33[102m', + 'BRIGHT_YELLOW': '\33[103m', + 'BRIGHT_BLUE': '\33[104m', + 'BRIGHT_MAGENTA': '\33[105m', + 'BRIGHT_CYAN': '\33[106m', + 'BRIGHT_WHITE': '\33[107m', +} + +def log_node_success(node_name, message=None): + """Logs a success message.""" + _log_node(COLORS_FG["GREEN"], node_name, message) + +def log_node_info(node_name, message=None): + """Logs an info message.""" + _log_node(COLORS_FG["CYAN"], node_name, message) + + +def log_node_warn(node_name, message=None): + """Logs an warn message.""" + _log_node(COLORS_FG["YELLOW"], node_name, message) + +def log_node_error(node_name, message=None): + """Logs an warn message.""" + _log_node(COLORS_FG["RED"], node_name, message) + +def log_node(node_name, message=None): + """Logs a message.""" + _log_node(COLORS_FG["CYAN"], node_name, message) + + +def _log_node(color, node_name, message=None, prefix=''): + print(_get_log_msg(color, node_name, message, prefix=prefix)) + +def _get_log_msg(color, node_name, message=None, prefix=''): + msg = f'{COLORS_STYLE["BOLD"]}{color}{prefix}[EasyUse] {node_name.replace(" (EasyUse)", "")}' + msg += f':{COLORS_STYLE["RESET"]} {message}' if message is not None else f'{COLORS_STYLE["RESET"]}' + return msg + diff --git a/ComfyUI-Easy-Use/py/libs/model.py b/ComfyUI-Easy-Use/py/libs/model.py new file mode 100644 index 0000000000000000000000000000000000000000..41ccf51aa185cb86385ddf544ba632664ba21753 --- /dev/null +++ b/ComfyUI-Easy-Use/py/libs/model.py @@ -0,0 +1,58 @@ +import json +import os +import folder_paths +import server +from .utils import find_tags + +class easyModelManager: + + def __init__(self): + self.img_suffixes = [".png", ".jpg", ".jpeg", ".gif", ".webp", ".bmp", ".tiff", ".svg", ".tif", ".tiff"] + self.default_suffixes = [".ckpt", ".pt", ".bin", ".pth", ".safetensors"] + self.models_config = { + "checkpoints": {"suffix": self.default_suffixes}, + "loras": {"suffix": self.default_suffixes}, + "unet": {"suffix": self.default_suffixes}, + } + self.model_lists = {} + + def find_thumbnail(self, model_type, name): + file_no_ext = os.path.splitext(name)[0] + for ext in self.img_suffixes: + full_path = folder_paths.get_full_path(model_type, file_no_ext + ext) + if os.path.isfile(str(full_path)): + return full_path + return None + + def get_model_lists(self, model_type): + if model_type not in self.models_config: + return [] + filenames = folder_paths.get_filename_list(model_type) + model_lists = [] + for name in filenames: + model_suffix = os.path.splitext(name)[-1] + if model_suffix not in self.models_config[model_type]["suffix"]: + continue + else: + cfg = { + "name": os.path.basename(os.path.splitext(name)[0]), + "full_name": name, + "remark": '', + "file_path": folder_paths.get_full_path(model_type, name), + "type": model_type, + "suffix": model_suffix, + "dir_tags": find_tags(name), + "cover": self.find_thumbnail(model_type, name), + "metadata": None, + "sha256": None + } + model_lists.append(cfg) + + return model_lists + + def get_model_info(self, model_type, model_name): + pass + +# if __name__ == "__main__": +# manager = easyModelManager() +# print(manager.get_model_lists("checkpoints")) \ No newline at end of file diff --git a/ComfyUI-Easy-Use/py/libs/sampler.py b/ComfyUI-Easy-Use/py/libs/sampler.py new file mode 100644 index 0000000000000000000000000000000000000000..b330fe946ae1c6e3f86dec98ada03c6b539967bb --- /dev/null +++ b/ComfyUI-Easy-Use/py/libs/sampler.py @@ -0,0 +1,909 @@ +import comfy +import comfy.model_management +import comfy.samplers +import torch +import numpy as np +import latent_preview +from nodes import MAX_RESOLUTION +from PIL import Image +from typing import Dict, List, Optional, Tuple, Union, Any +from ..brushnet.model_patch import add_model_patch + +class easySampler: + def __init__(self): + self.last_helds: dict[str, list] = { + "results": [], + "pipe_line": [], + } + self.device = comfy.model_management.intermediate_device() + + @staticmethod + def tensor2pil(image: torch.Tensor) -> Image.Image: + """Convert a torch tensor to a PIL image.""" + return Image.fromarray(np.clip(255. * image.cpu().numpy().squeeze(), 0, 255).astype(np.uint8)) + + @staticmethod + def pil2tensor(image: Image.Image) -> torch.Tensor: + """Convert a PIL image to a torch tensor.""" + return torch.from_numpy(np.array(image).astype(np.float32) / 255.0).unsqueeze(0) + + @staticmethod + def enforce_mul_of_64(d): + d = int(d) + if d <= 7: + d = 8 + leftover = d % 8 # 8 is the number of pixels per byte + if leftover != 0: # if the number of pixels is not a multiple of 8 + if (leftover < 4): # if the number of pixels is less than 4 + d -= leftover # remove the leftover pixels + else: # if the number of pixels is more than 4 + d += 8 - leftover # add the leftover pixels + + return int(d) + + @staticmethod + def safe_split(to_split: str, delimiter: str) -> List[str]: + """Split the input string and return a list of non-empty parts.""" + parts = to_split.split(delimiter) + parts = [part for part in parts if part not in ('', ' ', ' ')] + + while len(parts) < 2: + parts.append('None') + return parts + + def emptyLatent(self, resolution, empty_latent_width, empty_latent_height, batch_size=1, compression=0, sd3=False): + if resolution not in ["自定义 x 自定义", 'width x height (custom)']: + try: + width, height = map(int, resolution.split(' x ')) + empty_latent_width = width + empty_latent_height = height + except ValueError: + raise ValueError("Invalid base_resolution format.") + if sd3: + latent = torch.ones([batch_size, 16, empty_latent_height // 8, empty_latent_width // 8], device=self.device) * 0.0609 + samples = {"samples": latent} + elif compression == 0: + latent = torch.zeros([batch_size, 4, empty_latent_height // 8, empty_latent_width // 8], device=self.device) + samples = {"samples": latent} + else: + latent_c = torch.zeros( + [batch_size, 16, empty_latent_height // compression, empty_latent_width // compression]) + latent_b = torch.zeros([batch_size, 4, empty_latent_height // 4, empty_latent_width // 4]) + + samples = ({"samples": latent_c}, {"samples": latent_b}) + return samples + + def common_ksampler(self, model, seed, steps, cfg, sampler_name, scheduler, positive, negative, latent, denoise=1.0, + disable_noise=False, start_step=None, last_step=None, force_full_denoise=False, + preview_latent=True, disable_pbar=False): + device = comfy.model_management.get_torch_device() + latent_image = latent["samples"] + + noise_mask = None + if "noise_mask" in latent: + noise_mask = latent["noise_mask"] + + preview_format = "JPEG" + if preview_format not in ["JPEG", "PNG"]: + preview_format = "JPEG" + + previewer = False + + if preview_latent: + previewer = latent_preview.get_previewer(device, model.model.latent_format) + + pbar = comfy.utils.ProgressBar(steps) + + def callback(step, x0, x, total_steps): + preview_bytes = None + if previewer: + preview_bytes = previewer.decode_latent_to_preview_image(preview_format, x0) + pbar.update_absolute(step + 1, total_steps, preview_bytes) + + if disable_noise: + noise = torch.zeros(latent_image.size(), dtype=latent_image.dtype, layout=latent_image.layout, + device="cpu") + else: + batch_inds = latent["batch_index"] if "batch_index" in latent else None + noise = comfy.sample.prepare_noise(latent_image, seed, batch_inds) + + ####################################################################################### + # add model patch + # brushnet + add_model_patch(model) + ####################################################################################### + samples = comfy.sample.sample(model, noise, steps, cfg, sampler_name, scheduler, positive, negative, + latent_image, + denoise=denoise, disable_noise=disable_noise, start_step=start_step, + last_step=last_step, + force_full_denoise=force_full_denoise, noise_mask=noise_mask, + callback=callback, + disable_pbar=disable_pbar, seed=seed) + out = latent.copy() + out["samples"] = samples + return out + + def custom_ksampler(self, model, seed, steps, cfg, _sampler, sigmas, positive, negative, latent, + disable_noise=False, preview_latent=True, disable_pbar=False): + + device = comfy.model_management.get_torch_device() + latent_image = latent["samples"] + + if disable_noise: + noise = torch.zeros(latent_image.size(), dtype=latent_image.dtype, layout=latent_image.layout, device="cpu") + else: + batch_inds = latent["batch_index"] if "batch_index" in latent else None + noise = comfy.sample.prepare_noise(latent_image, seed, batch_inds) + + noise_mask = None + if "noise_mask" in latent: + noise_mask = latent["noise_mask"] + + preview_format = "JPEG" + if preview_format not in ["JPEG", "PNG"]: + preview_format = "JPEG" + + previewer = False + + if preview_latent: + previewer = latent_preview.get_previewer(device, model.model.latent_format) + + pbar = comfy.utils.ProgressBar(steps) + + def callback(step, x0, x, total_steps): + preview_bytes = None + if previewer: + preview_bytes = previewer.decode_latent_to_preview_image(preview_format, x0) + pbar.update_absolute(step + 1, total_steps, preview_bytes) + + samples = comfy.samplers.sample(model, noise, positive, negative, cfg, device, _sampler, sigmas, latent_image=latent_image, model_options=model.model_options, + denoise_mask=noise_mask, callback=callback, disable_pbar=disable_pbar, seed=seed) + + out = latent.copy() + out["samples"] = samples + return out + + def custom_advanced_ksampler(self, noise, guider, sampler, sigmas, latent_image): + latent = latent_image + latent_image = latent["samples"] + latent = latent.copy() + latent_image = comfy.sample.fix_empty_latent_channels(guider.model_patcher, latent_image) + latent["samples"] = latent_image + + noise_mask = None + if "noise_mask" in latent: + noise_mask = latent["noise_mask"] + + x0_output = {} + callback = latent_preview.prepare_callback(guider.model_patcher, sigmas.shape[-1] - 1, x0_output) + + disable_pbar = not comfy.utils.PROGRESS_BAR_ENABLED + samples = guider.sample(noise.generate_noise(latent), latent_image, sampler, sigmas, denoise_mask=noise_mask, + callback=callback, disable_pbar=disable_pbar, seed=noise.seed) + samples = samples.to(comfy.model_management.intermediate_device()) + + out = latent.copy() + out["samples"] = samples + if "x0" in x0_output: + out_denoised = latent.copy() + out_denoised["samples"] = guider.model_patcher.model.process_latent_out(x0_output["x0"].cpu()) + else: + out_denoised = out + + return (out, out_denoised) + + def get_value_by_id(self, key: str, my_unique_id: Any) -> Optional[Any]: + """Retrieve value by its associated ID.""" + try: + for value, id_ in self.last_helds[key]: + if id_ == my_unique_id: + return value + except KeyError: + return None + + def update_value_by_id(self, key: str, my_unique_id: Any, new_value: Any) -> Union[bool, None]: + """Update the value associated with a given ID. Return True if updated, False if appended, None if key doesn't exist.""" + try: + for i, (value, id_) in enumerate(self.last_helds[key]): + if id_ == my_unique_id: + self.last_helds[key][i] = (new_value, id_) + return True + self.last_helds[key].append((new_value, my_unique_id)) + return False + except KeyError: + return False + + def upscale(self, samples, upscale_method, scale_by, crop): + s = samples.copy() + width = self.enforce_mul_of_64(round(samples["samples"].shape[3] * scale_by)) + height = self.enforce_mul_of_64(round(samples["samples"].shape[2] * scale_by)) + + if (width > MAX_RESOLUTION): + width = MAX_RESOLUTION + if (height > MAX_RESOLUTION): + height = MAX_RESOLUTION + + s["samples"] = comfy.utils.common_upscale(samples["samples"], width, height, upscale_method, crop) + return (s,) + + def handle_upscale(self, samples: dict, upscale_method: str, factor: float, crop: bool) -> dict: + """Upscale the samples if the upscale_method is not set to 'None'.""" + if upscale_method != "None": + samples = self.upscale(samples, upscale_method, factor, crop)[0] + return samples + + def init_state(self, my_unique_id: Any, key: str, default: Any) -> Any: + """Initialize the state by either fetching the stored value or setting a default.""" + value = self.get_value_by_id(key, my_unique_id) + if value is not None: + return value + return default + + def get_output(self, pipe: dict,) -> Tuple: + """Return a tuple of various elements fetched from the input pipe dictionary.""" + return ( + pipe, + pipe.get("images"), + pipe.get("model"), + pipe.get("positive"), + pipe.get("negative"), + pipe.get("samples"), + pipe.get("vae"), + pipe.get("clip"), + pipe.get("seed"), + ) + + def get_output_sdxl(self, sdxl_pipe: dict) -> Tuple: + """Return a tuple of various elements fetched from the input sdxl_pipe dictionary.""" + return ( + sdxl_pipe, + sdxl_pipe.get("model"), + sdxl_pipe.get("positive"), + sdxl_pipe.get("negative"), + sdxl_pipe.get("vae"), + sdxl_pipe.get("refiner_model"), + sdxl_pipe.get("refiner_positive"), + sdxl_pipe.get("refiner_negative"), + sdxl_pipe.get("refiner_vae"), + sdxl_pipe.get("samples"), + sdxl_pipe.get("clip"), + sdxl_pipe.get("images"), + sdxl_pipe.get("seed") + ) + +def loglinear_interp(t_steps, num_steps): + """ + Performs log-linear interpolation of a given array of decreasing numbers. + """ + xs = np.linspace(0, 1, len(t_steps)) + ys = np.log(t_steps[::-1]) + + new_xs = np.linspace(0, 1, num_steps) + new_ys = np.interp(new_xs, xs, ys) + + interped_ys = np.exp(new_ys)[::-1].copy() + return interped_ys + +class alignYourStepsScheduler: + + NOISE_LEVELS = { + "SD1": [14.6146412293, 6.4745760956, 3.8636745985, 2.6946151520, 1.8841921177, 1.3943805092, 0.9642583904, + 0.6523686016, 0.3977456272, 0.1515232662, 0.0291671582], + "SDXL": [14.6146412293, 6.3184485287, 3.7681790315, 2.1811480769, 1.3405244945, 0.8620721141, 0.5550693289, + 0.3798540708, 0.2332364134, 0.1114188177, 0.0291671582], + "SVD": [700.00, 54.5, 15.886, 7.977, 4.248, 1.789, 0.981, 0.403, 0.173, 0.034, 0.002]} + + def get_sigmas(self, model_type, steps, denoise): + + total_steps = steps + if denoise < 1.0: + if denoise <= 0.0: + return (torch.FloatTensor([]),) + total_steps = round(steps * denoise) + + sigmas = self.NOISE_LEVELS[model_type][:] + if (steps + 1) != len(sigmas): + sigmas = loglinear_interp(sigmas, steps + 1) + + sigmas = sigmas[-(total_steps + 1):] + sigmas[-1] = 0 + return (torch.FloatTensor(sigmas),) + + +class gitsScheduler: + + NOISE_LEVELS = { + 0.80: [ + [14.61464119, 7.49001646, 0.02916753], + [14.61464119, 11.54541874, 6.77309084, 0.02916753], + [14.61464119, 11.54541874, 7.49001646, 3.07277966, 0.02916753], + [14.61464119, 11.54541874, 7.49001646, 5.85520077, 2.05039096, 0.02916753], + [14.61464119, 12.2308979, 8.75849152, 7.49001646, 5.85520077, 2.05039096, 0.02916753], + [14.61464119, 12.2308979, 8.75849152, 7.49001646, 5.85520077, 3.07277966, 1.56271636, 0.02916753], + [14.61464119, 12.96784878, 11.54541874, 8.75849152, 7.49001646, 5.85520077, 3.07277966, 1.56271636, + 0.02916753], + [14.61464119, 13.76078796, 12.2308979, 10.90732002, 8.75849152, 7.49001646, 5.85520077, 3.07277966, + 1.56271636, 0.02916753], + [14.61464119, 13.76078796, 12.96784878, 12.2308979, 10.90732002, 8.75849152, 7.49001646, 5.85520077, + 3.07277966, 1.56271636, 0.02916753], + [14.61464119, 13.76078796, 12.96784878, 12.2308979, 10.90732002, 9.24142551, 8.30717278, 7.49001646, + 5.85520077, 3.07277966, 1.56271636, 0.02916753], + [14.61464119, 13.76078796, 12.96784878, 12.2308979, 10.90732002, 9.24142551, 8.30717278, 7.49001646, + 6.14220476, 4.86714602, 3.07277966, 1.56271636, 0.02916753], + [14.61464119, 13.76078796, 12.96784878, 12.2308979, 11.54541874, 10.31284904, 9.24142551, 8.30717278, + 7.49001646, 6.14220476, 4.86714602, 3.07277966, 1.56271636, 0.02916753], + [14.61464119, 13.76078796, 12.96784878, 12.2308979, 11.54541874, 10.90732002, 10.31284904, 9.24142551, + 8.30717278, 7.49001646, 6.14220476, 4.86714602, 3.07277966, 1.56271636, 0.02916753], + [14.61464119, 13.76078796, 12.96784878, 12.2308979, 11.54541874, 10.90732002, 10.31284904, 9.24142551, + 8.75849152, 8.30717278, 7.49001646, 6.14220476, 4.86714602, 3.07277966, 1.56271636, 0.02916753], + [14.61464119, 13.76078796, 12.96784878, 12.2308979, 11.54541874, 10.90732002, 10.31284904, 9.24142551, + 8.75849152, 8.30717278, 7.49001646, 6.14220476, 4.86714602, 3.1956799, 1.98035145, 0.86115354, 0.02916753], + [14.61464119, 13.76078796, 12.96784878, 12.2308979, 11.54541874, 10.90732002, 10.31284904, 9.75859547, + 9.24142551, 8.75849152, 8.30717278, 7.49001646, 6.14220476, 4.86714602, 3.1956799, 1.98035145, 0.86115354, + 0.02916753], + [14.61464119, 13.76078796, 12.96784878, 12.2308979, 11.54541874, 10.90732002, 10.31284904, 9.75859547, + 9.24142551, 8.75849152, 8.30717278, 7.49001646, 6.77309084, 5.85520077, 4.65472794, 3.07277966, 1.84880662, + 0.83188516, 0.02916753], + [14.61464119, 13.76078796, 12.96784878, 12.2308979, 11.54541874, 10.90732002, 10.31284904, 9.75859547, + 9.24142551, 8.75849152, 8.30717278, 7.88507891, 7.49001646, 6.77309084, 5.85520077, 4.65472794, 3.07277966, + 1.84880662, 0.83188516, 0.02916753], + [14.61464119, 13.76078796, 12.96784878, 12.2308979, 11.54541874, 10.90732002, 10.31284904, 9.75859547, + 9.24142551, 8.75849152, 8.30717278, 7.88507891, 7.49001646, 6.77309084, 5.85520077, 4.86714602, 3.75677586, + 2.84484982, 1.78698075, 0.803307, 0.02916753], + ], + 0.85: [ + [14.61464119, 7.49001646, 0.02916753], + [14.61464119, 7.49001646, 1.84880662, 0.02916753], + [14.61464119, 11.54541874, 6.77309084, 1.56271636, 0.02916753], + [14.61464119, 11.54541874, 7.11996698, 3.07277966, 1.24153244, 0.02916753], + [14.61464119, 11.54541874, 7.49001646, 5.09240818, 2.84484982, 0.95350921, 0.02916753], + [14.61464119, 12.2308979, 8.75849152, 7.49001646, 5.09240818, 2.84484982, 0.95350921, 0.02916753], + [14.61464119, 12.2308979, 8.75849152, 7.49001646, 5.58536053, 3.1956799, 1.84880662, 0.803307, 0.02916753], + [14.61464119, 12.96784878, 11.54541874, 8.75849152, 7.49001646, 5.58536053, 3.1956799, 1.84880662, 0.803307, + 0.02916753], + [14.61464119, 12.96784878, 11.54541874, 8.75849152, 7.49001646, 6.14220476, 4.65472794, 3.07277966, + 1.84880662, 0.803307, 0.02916753], + [14.61464119, 13.76078796, 12.2308979, 10.90732002, 8.75849152, 7.49001646, 6.14220476, 4.65472794, + 3.07277966, 1.84880662, 0.803307, 0.02916753], + [14.61464119, 13.76078796, 12.2308979, 10.90732002, 9.24142551, 8.30717278, 7.49001646, 6.14220476, + 4.65472794, 3.07277966, 1.84880662, 0.803307, 0.02916753], + [14.61464119, 13.76078796, 12.96784878, 12.2308979, 10.90732002, 9.24142551, 8.30717278, 7.49001646, + 6.14220476, 4.65472794, 3.07277966, 1.84880662, 0.803307, 0.02916753], + [14.61464119, 13.76078796, 12.96784878, 12.2308979, 11.54541874, 10.31284904, 9.24142551, 8.30717278, + 7.49001646, 6.14220476, 4.65472794, 3.07277966, 1.84880662, 0.803307, 0.02916753], + [14.61464119, 13.76078796, 12.96784878, 12.2308979, 11.54541874, 10.31284904, 9.24142551, 8.30717278, + 7.49001646, 6.14220476, 4.86714602, 3.60512662, 2.6383388, 1.56271636, 0.72133851, 0.02916753], + [14.61464119, 13.76078796, 12.96784878, 12.2308979, 11.54541874, 10.31284904, 9.24142551, 8.30717278, + 7.49001646, 6.77309084, 5.85520077, 4.65472794, 3.46139455, 2.45070267, 1.56271636, 0.72133851, + 0.02916753], + [14.61464119, 13.76078796, 12.96784878, 12.2308979, 11.54541874, 10.31284904, 9.24142551, 8.75849152, + 8.30717278, 7.49001646, 6.77309084, 5.85520077, 4.65472794, 3.46139455, 2.45070267, 1.56271636, 0.72133851, + 0.02916753], + [14.61464119, 13.76078796, 12.96784878, 12.2308979, 11.54541874, 10.90732002, 10.31284904, 9.24142551, + 8.75849152, 8.30717278, 7.49001646, 6.77309084, 5.85520077, 4.65472794, 3.46139455, 2.45070267, 1.56271636, + 0.72133851, 0.02916753], + [14.61464119, 13.76078796, 12.96784878, 12.2308979, 11.54541874, 10.90732002, 10.31284904, 9.75859547, + 9.24142551, 8.75849152, 8.30717278, 7.49001646, 6.77309084, 5.85520077, 4.65472794, 3.46139455, 2.45070267, + 1.56271636, 0.72133851, 0.02916753], + [14.61464119, 13.76078796, 12.96784878, 12.2308979, 11.54541874, 10.90732002, 10.31284904, 9.75859547, + 9.24142551, 8.75849152, 8.30717278, 7.88507891, 7.49001646, 6.77309084, 5.85520077, 4.65472794, 3.46139455, + 2.45070267, 1.56271636, 0.72133851, 0.02916753], + ], + 0.90: [ + [14.61464119, 6.77309084, 0.02916753], + [14.61464119, 7.49001646, 1.56271636, 0.02916753], + [14.61464119, 7.49001646, 3.07277966, 0.95350921, 0.02916753], + [14.61464119, 7.49001646, 4.86714602, 2.54230714, 0.89115214, 0.02916753], + [14.61464119, 11.54541874, 7.49001646, 4.86714602, 2.54230714, 0.89115214, 0.02916753], + [14.61464119, 11.54541874, 7.49001646, 5.09240818, 3.07277966, 1.61558151, 0.69515091, 0.02916753], + [14.61464119, 12.2308979, 8.75849152, 7.11996698, 4.86714602, 3.07277966, 1.61558151, 0.69515091, + 0.02916753], + [14.61464119, 12.2308979, 8.75849152, 7.49001646, 5.85520077, 4.45427561, 2.95596409, 1.61558151, + 0.69515091, 0.02916753], + [14.61464119, 12.2308979, 8.75849152, 7.49001646, 5.85520077, 4.45427561, 3.1956799, 2.19988537, 1.24153244, + 0.57119018, 0.02916753], + [14.61464119, 12.96784878, 10.90732002, 8.75849152, 7.49001646, 5.85520077, 4.45427561, 3.1956799, + 2.19988537, 1.24153244, 0.57119018, 0.02916753], + [14.61464119, 12.96784878, 11.54541874, 9.24142551, 8.30717278, 7.49001646, 5.85520077, 4.45427561, + 3.1956799, 2.19988537, 1.24153244, 0.57119018, 0.02916753], + [14.61464119, 12.96784878, 11.54541874, 9.24142551, 8.30717278, 7.49001646, 6.14220476, 4.86714602, + 3.75677586, 2.84484982, 1.84880662, 1.08895338, 0.52423614, 0.02916753], + [14.61464119, 13.76078796, 12.2308979, 10.90732002, 9.24142551, 8.30717278, 7.49001646, 6.14220476, + 4.86714602, 3.75677586, 2.84484982, 1.84880662, 1.08895338, 0.52423614, 0.02916753], + [14.61464119, 13.76078796, 12.2308979, 10.90732002, 9.24142551, 8.30717278, 7.49001646, 6.44769001, + 5.58536053, 4.45427561, 3.32507086, 2.45070267, 1.61558151, 0.95350921, 0.45573691, 0.02916753], + [14.61464119, 13.76078796, 12.96784878, 12.2308979, 10.90732002, 9.24142551, 8.30717278, 7.49001646, + 6.44769001, 5.58536053, 4.45427561, 3.32507086, 2.45070267, 1.61558151, 0.95350921, 0.45573691, + 0.02916753], + [14.61464119, 13.76078796, 12.96784878, 12.2308979, 10.90732002, 9.24142551, 8.30717278, 7.49001646, + 6.77309084, 5.85520077, 4.86714602, 3.91689563, 3.07277966, 2.27973175, 1.56271636, 0.95350921, 0.45573691, + 0.02916753], + [14.61464119, 13.76078796, 12.96784878, 12.2308979, 11.54541874, 10.31284904, 9.24142551, 8.30717278, + 7.49001646, 6.77309084, 5.85520077, 4.86714602, 3.91689563, 3.07277966, 2.27973175, 1.56271636, 0.95350921, + 0.45573691, 0.02916753], + [14.61464119, 13.76078796, 12.96784878, 12.2308979, 11.54541874, 10.31284904, 9.24142551, 8.75849152, + 8.30717278, 7.49001646, 6.77309084, 5.85520077, 4.86714602, 3.91689563, 3.07277966, 2.27973175, 1.56271636, + 0.95350921, 0.45573691, 0.02916753], + [14.61464119, 13.76078796, 12.96784878, 12.2308979, 11.54541874, 10.31284904, 9.24142551, 8.75849152, + 8.30717278, 7.49001646, 6.77309084, 5.85520077, 5.09240818, 4.45427561, 3.60512662, 2.95596409, 2.19988537, + 1.51179266, 0.89115214, 0.43325692, 0.02916753], + ], + 0.95: [ + [14.61464119, 6.77309084, 0.02916753], + [14.61464119, 6.77309084, 1.56271636, 0.02916753], + [14.61464119, 7.49001646, 2.84484982, 0.89115214, 0.02916753], + [14.61464119, 7.49001646, 4.86714602, 2.36326075, 0.803307, 0.02916753], + [14.61464119, 7.49001646, 4.86714602, 2.95596409, 1.56271636, 0.64427125, 0.02916753], + [14.61464119, 11.54541874, 7.49001646, 4.86714602, 2.95596409, 1.56271636, 0.64427125, 0.02916753], + [14.61464119, 11.54541874, 7.49001646, 4.86714602, 3.07277966, 1.91321158, 1.08895338, 0.50118381, + 0.02916753], + [14.61464119, 11.54541874, 7.49001646, 5.85520077, 4.45427561, 3.07277966, 1.91321158, 1.08895338, + 0.50118381, 0.02916753], + [14.61464119, 12.2308979, 8.75849152, 7.49001646, 5.85520077, 4.45427561, 3.07277966, 1.91321158, + 1.08895338, 0.50118381, 0.02916753], + [14.61464119, 12.2308979, 8.75849152, 7.49001646, 5.85520077, 4.45427561, 3.1956799, 2.19988537, 1.41535246, + 0.803307, 0.38853383, 0.02916753], + [14.61464119, 12.2308979, 8.75849152, 7.49001646, 5.85520077, 4.65472794, 3.46139455, 2.6383388, 1.84880662, + 1.24153244, 0.72133851, 0.34370604, 0.02916753], + [14.61464119, 12.96784878, 10.90732002, 8.75849152, 7.49001646, 5.85520077, 4.65472794, 3.46139455, + 2.6383388, 1.84880662, 1.24153244, 0.72133851, 0.34370604, 0.02916753], + [14.61464119, 12.96784878, 10.90732002, 8.75849152, 7.49001646, 6.14220476, 4.86714602, 3.75677586, + 2.95596409, 2.19988537, 1.56271636, 1.05362725, 0.64427125, 0.32104823, 0.02916753], + [14.61464119, 12.96784878, 10.90732002, 8.75849152, 7.49001646, 6.44769001, 5.58536053, 4.65472794, + 3.60512662, 2.95596409, 2.19988537, 1.56271636, 1.05362725, 0.64427125, 0.32104823, 0.02916753], + [14.61464119, 12.96784878, 11.54541874, 9.24142551, 8.30717278, 7.49001646, 6.44769001, 5.58536053, + 4.65472794, 3.60512662, 2.95596409, 2.19988537, 1.56271636, 1.05362725, 0.64427125, 0.32104823, + 0.02916753], + [14.61464119, 12.96784878, 11.54541874, 9.24142551, 8.30717278, 7.49001646, 6.44769001, 5.58536053, + 4.65472794, 3.75677586, 3.07277966, 2.45070267, 1.78698075, 1.24153244, 0.83188516, 0.50118381, 0.22545385, + 0.02916753], + [14.61464119, 12.96784878, 11.54541874, 9.24142551, 8.30717278, 7.49001646, 6.77309084, 5.85520077, + 5.09240818, 4.45427561, 3.60512662, 2.95596409, 2.36326075, 1.72759056, 1.24153244, 0.83188516, 0.50118381, + 0.22545385, 0.02916753], + [14.61464119, 13.76078796, 12.2308979, 10.90732002, 9.24142551, 8.30717278, 7.49001646, 6.77309084, + 5.85520077, 5.09240818, 4.45427561, 3.60512662, 2.95596409, 2.36326075, 1.72759056, 1.24153244, 0.83188516, + 0.50118381, 0.22545385, 0.02916753], + [14.61464119, 13.76078796, 12.2308979, 10.90732002, 9.24142551, 8.30717278, 7.49001646, 6.77309084, + 5.85520077, 5.09240818, 4.45427561, 3.75677586, 3.07277966, 2.45070267, 1.91321158, 1.46270394, 1.05362725, + 0.72133851, 0.43325692, 0.19894916, 0.02916753], + ], + 1.00: [ + [14.61464119, 1.56271636, 0.02916753], + [14.61464119, 6.77309084, 0.95350921, 0.02916753], + [14.61464119, 6.77309084, 2.36326075, 0.803307, 0.02916753], + [14.61464119, 7.11996698, 3.07277966, 1.56271636, 0.59516323, 0.02916753], + [14.61464119, 7.49001646, 4.86714602, 2.84484982, 1.41535246, 0.57119018, 0.02916753], + [14.61464119, 7.49001646, 4.86714602, 2.84484982, 1.61558151, 0.86115354, 0.38853383, 0.02916753], + [14.61464119, 11.54541874, 7.49001646, 4.86714602, 2.84484982, 1.61558151, 0.86115354, 0.38853383, + 0.02916753], + [14.61464119, 11.54541874, 7.49001646, 4.86714602, 3.07277966, 1.98035145, 1.24153244, 0.72133851, + 0.34370604, 0.02916753], + [14.61464119, 11.54541874, 7.49001646, 5.85520077, 4.45427561, 3.07277966, 1.98035145, 1.24153244, + 0.72133851, 0.34370604, 0.02916753], + [14.61464119, 11.54541874, 7.49001646, 5.85520077, 4.45427561, 3.1956799, 2.27973175, 1.51179266, + 0.95350921, 0.54755926, 0.25053367, 0.02916753], + [14.61464119, 11.54541874, 7.49001646, 5.85520077, 4.45427561, 3.1956799, 2.36326075, 1.61558151, + 1.08895338, 0.72133851, 0.41087446, 0.17026083, 0.02916753], + [14.61464119, 11.54541874, 8.75849152, 7.49001646, 5.85520077, 4.45427561, 3.1956799, 2.36326075, + 1.61558151, 1.08895338, 0.72133851, 0.41087446, 0.17026083, 0.02916753], + [14.61464119, 11.54541874, 8.75849152, 7.49001646, 5.85520077, 4.65472794, 3.60512662, 2.84484982, + 2.12350607, 1.56271636, 1.08895338, 0.72133851, 0.41087446, 0.17026083, 0.02916753], + [14.61464119, 11.54541874, 8.75849152, 7.49001646, 5.85520077, 4.65472794, 3.60512662, 2.84484982, + 2.19988537, 1.61558151, 1.162866, 0.803307, 0.50118381, 0.27464288, 0.09824532, 0.02916753], + [14.61464119, 11.54541874, 8.75849152, 7.49001646, 5.85520077, 4.65472794, 3.75677586, 3.07277966, + 2.45070267, 1.84880662, 1.36964464, 1.01931262, 0.72133851, 0.45573691, 0.25053367, 0.09824532, + 0.02916753], + [14.61464119, 11.54541874, 8.75849152, 7.49001646, 6.14220476, 5.09240818, 4.26497746, 3.46139455, + 2.84484982, 2.19988537, 1.67050016, 1.24153244, 0.92192322, 0.64427125, 0.43325692, 0.25053367, 0.09824532, + 0.02916753], + [14.61464119, 11.54541874, 8.75849152, 7.49001646, 6.14220476, 5.09240818, 4.26497746, 3.60512662, + 2.95596409, 2.45070267, 1.91321158, 1.51179266, 1.12534678, 0.83188516, 0.59516323, 0.38853383, 0.22545385, + 0.09824532, 0.02916753], + [14.61464119, 12.2308979, 9.24142551, 8.30717278, 7.49001646, 6.14220476, 5.09240818, 4.26497746, + 3.60512662, 2.95596409, 2.45070267, 1.91321158, 1.51179266, 1.12534678, 0.83188516, 0.59516323, 0.38853383, + 0.22545385, 0.09824532, 0.02916753], + [14.61464119, 12.2308979, 9.24142551, 8.30717278, 7.49001646, 6.77309084, 5.85520077, 5.09240818, + 4.26497746, 3.60512662, 2.95596409, 2.45070267, 1.91321158, 1.51179266, 1.12534678, 0.83188516, 0.59516323, + 0.38853383, 0.22545385, 0.09824532, 0.02916753], + ], + 1.05: [ + [14.61464119, 0.95350921, 0.02916753], + [14.61464119, 6.77309084, 0.89115214, 0.02916753], + [14.61464119, 6.77309084, 2.05039096, 0.72133851, 0.02916753], + [14.61464119, 6.77309084, 2.84484982, 1.28281462, 0.52423614, 0.02916753], + [14.61464119, 6.77309084, 3.07277966, 1.61558151, 0.803307, 0.34370604, 0.02916753], + [14.61464119, 7.49001646, 4.86714602, 2.84484982, 1.56271636, 0.803307, 0.34370604, 0.02916753], + [14.61464119, 7.49001646, 4.86714602, 2.84484982, 1.61558151, 0.95350921, 0.52423614, 0.22545385, + 0.02916753], + [14.61464119, 7.49001646, 4.86714602, 3.07277966, 1.98035145, 1.24153244, 0.74807048, 0.41087446, + 0.17026083, 0.02916753], + [14.61464119, 7.49001646, 4.86714602, 3.1956799, 2.27973175, 1.51179266, 0.95350921, 0.59516323, 0.34370604, + 0.13792117, 0.02916753], + [14.61464119, 7.49001646, 5.09240818, 3.46139455, 2.45070267, 1.61558151, 1.08895338, 0.72133851, + 0.45573691, 0.25053367, 0.09824532, 0.02916753], + [14.61464119, 11.54541874, 7.49001646, 5.09240818, 3.46139455, 2.45070267, 1.61558151, 1.08895338, + 0.72133851, 0.45573691, 0.25053367, 0.09824532, 0.02916753], + [14.61464119, 11.54541874, 7.49001646, 5.85520077, 4.45427561, 3.1956799, 2.36326075, 1.61558151, + 1.08895338, 0.72133851, 0.45573691, 0.25053367, 0.09824532, 0.02916753], + [14.61464119, 11.54541874, 7.49001646, 5.85520077, 4.45427561, 3.1956799, 2.45070267, 1.72759056, + 1.24153244, 0.86115354, 0.59516323, 0.38853383, 0.22545385, 0.09824532, 0.02916753], + [14.61464119, 11.54541874, 7.49001646, 5.85520077, 4.65472794, 3.60512662, 2.84484982, 2.19988537, + 1.61558151, 1.162866, 0.83188516, 0.59516323, 0.38853383, 0.22545385, 0.09824532, 0.02916753], + [14.61464119, 11.54541874, 7.49001646, 5.85520077, 4.65472794, 3.60512662, 2.84484982, 2.19988537, + 1.67050016, 1.28281462, 0.95350921, 0.72133851, 0.52423614, 0.34370604, 0.19894916, 0.09824532, + 0.02916753], + [14.61464119, 11.54541874, 7.49001646, 5.85520077, 4.65472794, 3.60512662, 2.95596409, 2.36326075, + 1.84880662, 1.41535246, 1.08895338, 0.83188516, 0.61951244, 0.45573691, 0.32104823, 0.19894916, 0.09824532, + 0.02916753], + [14.61464119, 11.54541874, 7.49001646, 5.85520077, 4.65472794, 3.60512662, 2.95596409, 2.45070267, + 1.91321158, 1.51179266, 1.20157266, 0.95350921, 0.74807048, 0.57119018, 0.43325692, 0.29807833, 0.19894916, + 0.09824532, 0.02916753], + [14.61464119, 11.54541874, 8.30717278, 7.11996698, 5.85520077, 4.65472794, 3.60512662, 2.95596409, + 2.45070267, 1.91321158, 1.51179266, 1.20157266, 0.95350921, 0.74807048, 0.57119018, 0.43325692, 0.29807833, + 0.19894916, 0.09824532, 0.02916753], + [14.61464119, 11.54541874, 8.30717278, 7.11996698, 5.85520077, 4.65472794, 3.60512662, 2.95596409, + 2.45070267, 1.98035145, 1.61558151, 1.32549286, 1.08895338, 0.86115354, 0.69515091, 0.54755926, 0.41087446, + 0.29807833, 0.19894916, 0.09824532, 0.02916753], + ], + 1.10: [ + [14.61464119, 0.89115214, 0.02916753], + [14.61464119, 2.36326075, 0.72133851, 0.02916753], + [14.61464119, 5.85520077, 1.61558151, 0.57119018, 0.02916753], + [14.61464119, 6.77309084, 2.45070267, 1.08895338, 0.45573691, 0.02916753], + [14.61464119, 6.77309084, 2.95596409, 1.56271636, 0.803307, 0.34370604, 0.02916753], + [14.61464119, 6.77309084, 3.07277966, 1.61558151, 0.89115214, 0.4783645, 0.19894916, 0.02916753], + [14.61464119, 6.77309084, 3.07277966, 1.84880662, 1.08895338, 0.64427125, 0.34370604, 0.13792117, + 0.02916753], + [14.61464119, 7.49001646, 4.86714602, 2.84484982, 1.61558151, 0.95350921, 0.54755926, 0.27464288, + 0.09824532, 0.02916753], + [14.61464119, 7.49001646, 4.86714602, 2.95596409, 1.91321158, 1.24153244, 0.803307, 0.4783645, 0.25053367, + 0.09824532, 0.02916753], + [14.61464119, 7.49001646, 4.86714602, 3.07277966, 2.05039096, 1.41535246, 0.95350921, 0.64427125, + 0.41087446, 0.22545385, 0.09824532, 0.02916753], + [14.61464119, 7.49001646, 4.86714602, 3.1956799, 2.27973175, 1.61558151, 1.12534678, 0.803307, 0.54755926, + 0.36617002, 0.22545385, 0.09824532, 0.02916753], + [14.61464119, 7.49001646, 4.86714602, 3.32507086, 2.45070267, 1.72759056, 1.24153244, 0.89115214, + 0.64427125, 0.45573691, 0.32104823, 0.19894916, 0.09824532, 0.02916753], + [14.61464119, 7.49001646, 5.09240818, 3.60512662, 2.84484982, 2.05039096, 1.51179266, 1.08895338, 0.803307, + 0.59516323, 0.43325692, 0.29807833, 0.19894916, 0.09824532, 0.02916753], + [14.61464119, 7.49001646, 5.09240818, 3.60512662, 2.84484982, 2.12350607, 1.61558151, 1.24153244, + 0.95350921, 0.72133851, 0.54755926, 0.41087446, 0.29807833, 0.19894916, 0.09824532, 0.02916753], + [14.61464119, 7.49001646, 5.85520077, 4.45427561, 3.1956799, 2.45070267, 1.84880662, 1.41535246, 1.08895338, + 0.83188516, 0.64427125, 0.50118381, 0.36617002, 0.25053367, 0.17026083, 0.09824532, 0.02916753], + [14.61464119, 7.49001646, 5.85520077, 4.45427561, 3.1956799, 2.45070267, 1.91321158, 1.51179266, 1.20157266, + 0.95350921, 0.74807048, 0.59516323, 0.45573691, 0.34370604, 0.25053367, 0.17026083, 0.09824532, + 0.02916753], + [14.61464119, 7.49001646, 5.85520077, 4.45427561, 3.46139455, 2.84484982, 2.19988537, 1.72759056, + 1.36964464, 1.08895338, 0.86115354, 0.69515091, 0.54755926, 0.43325692, 0.34370604, 0.25053367, 0.17026083, + 0.09824532, 0.02916753], + [14.61464119, 11.54541874, 7.49001646, 5.85520077, 4.45427561, 3.46139455, 2.84484982, 2.19988537, + 1.72759056, 1.36964464, 1.08895338, 0.86115354, 0.69515091, 0.54755926, 0.43325692, 0.34370604, 0.25053367, + 0.17026083, 0.09824532, 0.02916753], + [14.61464119, 11.54541874, 7.49001646, 5.85520077, 4.45427561, 3.46139455, 2.84484982, 2.19988537, + 1.72759056, 1.36964464, 1.08895338, 0.89115214, 0.72133851, 0.59516323, 0.4783645, 0.38853383, 0.29807833, + 0.22545385, 0.17026083, 0.09824532, 0.02916753], + ], + 1.15: [ + [14.61464119, 0.83188516, 0.02916753], + [14.61464119, 1.84880662, 0.59516323, 0.02916753], + [14.61464119, 5.85520077, 1.56271636, 0.52423614, 0.02916753], + [14.61464119, 5.85520077, 1.91321158, 0.83188516, 0.34370604, 0.02916753], + [14.61464119, 5.85520077, 2.45070267, 1.24153244, 0.59516323, 0.25053367, 0.02916753], + [14.61464119, 5.85520077, 2.84484982, 1.51179266, 0.803307, 0.41087446, 0.17026083, 0.02916753], + [14.61464119, 5.85520077, 2.84484982, 1.56271636, 0.89115214, 0.50118381, 0.25053367, 0.09824532, + 0.02916753], + [14.61464119, 6.77309084, 3.07277966, 1.84880662, 1.12534678, 0.72133851, 0.43325692, 0.22545385, + 0.09824532, 0.02916753], + [14.61464119, 6.77309084, 3.07277966, 1.91321158, 1.24153244, 0.803307, 0.52423614, 0.34370604, 0.19894916, + 0.09824532, 0.02916753], + [14.61464119, 7.49001646, 4.86714602, 2.95596409, 1.91321158, 1.24153244, 0.803307, 0.52423614, 0.34370604, + 0.19894916, 0.09824532, 0.02916753], + [14.61464119, 7.49001646, 4.86714602, 3.07277966, 2.05039096, 1.36964464, 0.95350921, 0.69515091, 0.4783645, + 0.32104823, 0.19894916, 0.09824532, 0.02916753], + [14.61464119, 7.49001646, 4.86714602, 3.07277966, 2.12350607, 1.51179266, 1.08895338, 0.803307, 0.59516323, + 0.43325692, 0.29807833, 0.19894916, 0.09824532, 0.02916753], + [14.61464119, 7.49001646, 4.86714602, 3.07277966, 2.12350607, 1.51179266, 1.08895338, 0.803307, 0.59516323, + 0.45573691, 0.34370604, 0.25053367, 0.17026083, 0.09824532, 0.02916753], + [14.61464119, 7.49001646, 4.86714602, 3.07277966, 2.19988537, 1.61558151, 1.24153244, 0.95350921, + 0.74807048, 0.59516323, 0.45573691, 0.34370604, 0.25053367, 0.17026083, 0.09824532, 0.02916753], + [14.61464119, 7.49001646, 4.86714602, 3.1956799, 2.45070267, 1.78698075, 1.32549286, 1.01931262, 0.803307, + 0.64427125, 0.50118381, 0.38853383, 0.29807833, 0.22545385, 0.17026083, 0.09824532, 0.02916753], + [14.61464119, 7.49001646, 4.86714602, 3.1956799, 2.45070267, 1.78698075, 1.32549286, 1.01931262, 0.803307, + 0.64427125, 0.52423614, 0.41087446, 0.32104823, 0.25053367, 0.19894916, 0.13792117, 0.09824532, + 0.02916753], + [14.61464119, 7.49001646, 4.86714602, 3.1956799, 2.45070267, 1.84880662, 1.41535246, 1.12534678, 0.89115214, + 0.72133851, 0.59516323, 0.4783645, 0.38853383, 0.32104823, 0.25053367, 0.19894916, 0.13792117, 0.09824532, + 0.02916753], + [14.61464119, 7.49001646, 4.86714602, 3.1956799, 2.45070267, 1.84880662, 1.41535246, 1.12534678, 0.89115214, + 0.72133851, 0.59516323, 0.50118381, 0.41087446, 0.34370604, 0.27464288, 0.22545385, 0.17026083, 0.13792117, + 0.09824532, 0.02916753], + [14.61464119, 7.49001646, 4.86714602, 3.1956799, 2.45070267, 1.84880662, 1.41535246, 1.12534678, 0.89115214, + 0.72133851, 0.59516323, 0.50118381, 0.41087446, 0.34370604, 0.29807833, 0.25053367, 0.19894916, 0.17026083, + 0.13792117, 0.09824532, 0.02916753], + ], + 1.20: [ + [14.61464119, 0.803307, 0.02916753], + [14.61464119, 1.56271636, 0.52423614, 0.02916753], + [14.61464119, 2.36326075, 0.92192322, 0.36617002, 0.02916753], + [14.61464119, 2.84484982, 1.24153244, 0.59516323, 0.25053367, 0.02916753], + [14.61464119, 5.85520077, 2.05039096, 0.95350921, 0.45573691, 0.17026083, 0.02916753], + [14.61464119, 5.85520077, 2.45070267, 1.24153244, 0.64427125, 0.29807833, 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.45070267, 1.36964464, 0.803307, 0.45573691, 0.25053367, 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.84484982, 1.61558151, 0.95350921, 0.59516323, 0.36617002, 0.19894916, + 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.84484982, 1.67050016, 1.08895338, 0.74807048, 0.50118381, 0.32104823, + 0.19894916, 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.95596409, 1.84880662, 1.24153244, 0.83188516, 0.59516323, 0.41087446, + 0.27464288, 0.17026083, 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 3.07277966, 1.98035145, 1.36964464, 0.95350921, 0.69515091, 0.50118381, + 0.36617002, 0.25053367, 0.17026083, 0.09824532, 0.02916753], + [14.61464119, 6.77309084, 3.46139455, 2.36326075, 1.56271636, 1.08895338, 0.803307, 0.59516323, 0.45573691, + 0.34370604, 0.25053367, 0.17026083, 0.09824532, 0.02916753], + [14.61464119, 6.77309084, 3.46139455, 2.45070267, 1.61558151, 1.162866, 0.86115354, 0.64427125, 0.50118381, + 0.38853383, 0.29807833, 0.22545385, 0.17026083, 0.09824532, 0.02916753], + [14.61464119, 7.49001646, 4.65472794, 3.07277966, 2.12350607, 1.51179266, 1.08895338, 0.83188516, + 0.64427125, 0.50118381, 0.38853383, 0.29807833, 0.22545385, 0.17026083, 0.09824532, 0.02916753], + [14.61464119, 7.49001646, 4.65472794, 3.07277966, 2.12350607, 1.51179266, 1.08895338, 0.83188516, + 0.64427125, 0.50118381, 0.41087446, 0.32104823, 0.25053367, 0.19894916, 0.13792117, 0.09824532, + 0.02916753], + [14.61464119, 7.49001646, 4.65472794, 3.07277966, 2.12350607, 1.51179266, 1.08895338, 0.83188516, + 0.64427125, 0.50118381, 0.41087446, 0.34370604, 0.27464288, 0.22545385, 0.17026083, 0.13792117, 0.09824532, + 0.02916753], + [14.61464119, 7.49001646, 4.65472794, 3.07277966, 2.19988537, 1.61558151, 1.20157266, 0.92192322, + 0.72133851, 0.57119018, 0.45573691, 0.36617002, 0.29807833, 0.25053367, 0.19894916, 0.17026083, 0.13792117, + 0.09824532, 0.02916753], + [14.61464119, 7.49001646, 4.65472794, 3.07277966, 2.19988537, 1.61558151, 1.24153244, 0.95350921, + 0.74807048, 0.59516323, 0.4783645, 0.38853383, 0.32104823, 0.27464288, 0.22545385, 0.19894916, 0.17026083, + 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 7.49001646, 4.65472794, 3.07277966, 2.19988537, 1.61558151, 1.24153244, 0.95350921, + 0.74807048, 0.59516323, 0.50118381, 0.41087446, 0.34370604, 0.29807833, 0.25053367, 0.22545385, 0.19894916, + 0.17026083, 0.13792117, 0.09824532, 0.02916753], + ], + 1.25: [ + [14.61464119, 0.72133851, 0.02916753], + [14.61464119, 1.56271636, 0.50118381, 0.02916753], + [14.61464119, 2.05039096, 0.803307, 0.32104823, 0.02916753], + [14.61464119, 2.36326075, 0.95350921, 0.43325692, 0.17026083, 0.02916753], + [14.61464119, 2.84484982, 1.24153244, 0.59516323, 0.27464288, 0.09824532, 0.02916753], + [14.61464119, 3.07277966, 1.51179266, 0.803307, 0.43325692, 0.22545385, 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.36326075, 1.24153244, 0.72133851, 0.41087446, 0.22545385, 0.09824532, + 0.02916753], + [14.61464119, 5.85520077, 2.45070267, 1.36964464, 0.83188516, 0.52423614, 0.34370604, 0.19894916, + 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.84484982, 1.61558151, 0.98595673, 0.64427125, 0.43325692, 0.27464288, + 0.17026083, 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.84484982, 1.67050016, 1.08895338, 0.74807048, 0.52423614, 0.36617002, + 0.25053367, 0.17026083, 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.84484982, 1.72759056, 1.162866, 0.803307, 0.59516323, 0.45573691, 0.34370604, + 0.25053367, 0.17026083, 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.95596409, 1.84880662, 1.24153244, 0.86115354, 0.64427125, 0.4783645, 0.36617002, + 0.27464288, 0.19894916, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.95596409, 1.84880662, 1.28281462, 0.92192322, 0.69515091, 0.52423614, + 0.41087446, 0.32104823, 0.25053367, 0.19894916, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.95596409, 1.91321158, 1.32549286, 0.95350921, 0.72133851, 0.54755926, + 0.43325692, 0.34370604, 0.27464288, 0.22545385, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.95596409, 1.91321158, 1.32549286, 0.95350921, 0.72133851, 0.57119018, + 0.45573691, 0.36617002, 0.29807833, 0.25053367, 0.19894916, 0.17026083, 0.13792117, 0.09824532, + 0.02916753], + [14.61464119, 5.85520077, 2.95596409, 1.91321158, 1.32549286, 0.95350921, 0.74807048, 0.59516323, 0.4783645, + 0.38853383, 0.32104823, 0.27464288, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, + 0.02916753], + [14.61464119, 5.85520077, 3.07277966, 2.05039096, 1.41535246, 1.05362725, 0.803307, 0.61951244, 0.50118381, + 0.41087446, 0.34370604, 0.29807833, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, + 0.02916753], + [14.61464119, 5.85520077, 3.07277966, 2.05039096, 1.41535246, 1.05362725, 0.803307, 0.64427125, 0.52423614, + 0.43325692, 0.36617002, 0.32104823, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, + 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 3.07277966, 2.05039096, 1.46270394, 1.08895338, 0.83188516, 0.66947293, + 0.54755926, 0.45573691, 0.38853383, 0.34370604, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, + 0.17026083, 0.13792117, 0.09824532, 0.02916753], + ], + 1.30: [ + [14.61464119, 0.72133851, 0.02916753], + [14.61464119, 1.24153244, 0.43325692, 0.02916753], + [14.61464119, 1.56271636, 0.59516323, 0.22545385, 0.02916753], + [14.61464119, 1.84880662, 0.803307, 0.36617002, 0.13792117, 0.02916753], + [14.61464119, 2.36326075, 1.01931262, 0.52423614, 0.25053367, 0.09824532, 0.02916753], + [14.61464119, 2.84484982, 1.36964464, 0.74807048, 0.41087446, 0.22545385, 0.09824532, 0.02916753], + [14.61464119, 3.07277966, 1.56271636, 0.89115214, 0.54755926, 0.34370604, 0.19894916, 0.09824532, + 0.02916753], + [14.61464119, 3.07277966, 1.61558151, 0.95350921, 0.61951244, 0.41087446, 0.27464288, 0.17026083, + 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.45070267, 1.36964464, 0.83188516, 0.54755926, 0.36617002, 0.25053367, + 0.17026083, 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.45070267, 1.41535246, 0.92192322, 0.64427125, 0.45573691, 0.34370604, + 0.25053367, 0.17026083, 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.6383388, 1.56271636, 1.01931262, 0.72133851, 0.50118381, 0.36617002, 0.27464288, + 0.19894916, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.84484982, 1.61558151, 1.05362725, 0.74807048, 0.54755926, 0.41087446, + 0.32104823, 0.25053367, 0.19894916, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.84484982, 1.61558151, 1.08895338, 0.77538133, 0.57119018, 0.43325692, + 0.34370604, 0.27464288, 0.22545385, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.84484982, 1.61558151, 1.08895338, 0.803307, 0.59516323, 0.45573691, 0.36617002, + 0.29807833, 0.25053367, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.84484982, 1.61558151, 1.08895338, 0.803307, 0.59516323, 0.4783645, 0.38853383, + 0.32104823, 0.27464288, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.84484982, 1.72759056, 1.162866, 0.83188516, 0.64427125, 0.50118381, 0.41087446, + 0.34370604, 0.29807833, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, + 0.02916753], + [14.61464119, 5.85520077, 2.84484982, 1.72759056, 1.162866, 0.83188516, 0.64427125, 0.52423614, 0.43325692, + 0.36617002, 0.32104823, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, + 0.02916753], + [14.61464119, 5.85520077, 2.84484982, 1.78698075, 1.24153244, 0.92192322, 0.72133851, 0.57119018, + 0.45573691, 0.38853383, 0.34370604, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, + 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.84484982, 1.78698075, 1.24153244, 0.92192322, 0.72133851, 0.57119018, 0.4783645, + 0.41087446, 0.36617002, 0.32104823, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, + 0.13792117, 0.09824532, 0.02916753], + ], + 1.35: [ + [14.61464119, 0.69515091, 0.02916753], + [14.61464119, 0.95350921, 0.34370604, 0.02916753], + [14.61464119, 1.56271636, 0.57119018, 0.19894916, 0.02916753], + [14.61464119, 1.61558151, 0.69515091, 0.29807833, 0.09824532, 0.02916753], + [14.61464119, 1.84880662, 0.83188516, 0.43325692, 0.22545385, 0.09824532, 0.02916753], + [14.61464119, 2.45070267, 1.162866, 0.64427125, 0.36617002, 0.19894916, 0.09824532, 0.02916753], + [14.61464119, 2.84484982, 1.36964464, 0.803307, 0.50118381, 0.32104823, 0.19894916, 0.09824532, 0.02916753], + [14.61464119, 2.84484982, 1.41535246, 0.83188516, 0.54755926, 0.36617002, 0.25053367, 0.17026083, + 0.09824532, 0.02916753], + [14.61464119, 2.84484982, 1.56271636, 0.95350921, 0.64427125, 0.45573691, 0.32104823, 0.22545385, + 0.17026083, 0.09824532, 0.02916753], + [14.61464119, 2.84484982, 1.56271636, 0.95350921, 0.64427125, 0.45573691, 0.34370604, 0.25053367, + 0.19894916, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 3.07277966, 1.61558151, 1.01931262, 0.72133851, 0.52423614, 0.38853383, 0.29807833, + 0.22545385, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 3.07277966, 1.61558151, 1.01931262, 0.72133851, 0.52423614, 0.41087446, 0.32104823, + 0.25053367, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 3.07277966, 1.61558151, 1.05362725, 0.74807048, 0.54755926, 0.43325692, 0.34370604, + 0.27464288, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 3.07277966, 1.72759056, 1.12534678, 0.803307, 0.59516323, 0.45573691, 0.36617002, 0.29807833, + 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 3.07277966, 1.72759056, 1.12534678, 0.803307, 0.59516323, 0.4783645, 0.38853383, 0.32104823, + 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.45070267, 1.51179266, 1.01931262, 0.74807048, 0.57119018, 0.45573691, + 0.36617002, 0.32104823, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, + 0.02916753], + [14.61464119, 5.85520077, 2.6383388, 1.61558151, 1.08895338, 0.803307, 0.61951244, 0.50118381, 0.41087446, + 0.34370604, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, + 0.02916753], + [14.61464119, 5.85520077, 2.6383388, 1.61558151, 1.08895338, 0.803307, 0.64427125, 0.52423614, 0.43325692, + 0.36617002, 0.32104823, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, + 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.6383388, 1.61558151, 1.08895338, 0.803307, 0.64427125, 0.52423614, 0.45573691, + 0.38853383, 0.34370604, 0.32104823, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, + 0.13792117, 0.09824532, 0.02916753], + ], + 1.40: [ + [14.61464119, 0.59516323, 0.02916753], + [14.61464119, 0.95350921, 0.34370604, 0.02916753], + [14.61464119, 1.08895338, 0.43325692, 0.13792117, 0.02916753], + [14.61464119, 1.56271636, 0.64427125, 0.27464288, 0.09824532, 0.02916753], + [14.61464119, 1.61558151, 0.803307, 0.43325692, 0.22545385, 0.09824532, 0.02916753], + [14.61464119, 2.05039096, 0.95350921, 0.54755926, 0.34370604, 0.19894916, 0.09824532, 0.02916753], + [14.61464119, 2.45070267, 1.24153244, 0.72133851, 0.43325692, 0.27464288, 0.17026083, 0.09824532, + 0.02916753], + [14.61464119, 2.45070267, 1.24153244, 0.74807048, 0.50118381, 0.34370604, 0.25053367, 0.17026083, + 0.09824532, 0.02916753], + [14.61464119, 2.45070267, 1.28281462, 0.803307, 0.52423614, 0.36617002, 0.27464288, 0.19894916, 0.13792117, + 0.09824532, 0.02916753], + [14.61464119, 2.45070267, 1.28281462, 0.803307, 0.54755926, 0.38853383, 0.29807833, 0.22545385, 0.17026083, + 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 2.84484982, 1.41535246, 0.86115354, 0.59516323, 0.43325692, 0.32104823, 0.25053367, + 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 2.84484982, 1.51179266, 0.95350921, 0.64427125, 0.45573691, 0.34370604, 0.27464288, + 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 2.84484982, 1.51179266, 0.95350921, 0.64427125, 0.4783645, 0.36617002, 0.29807833, 0.25053367, + 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 2.84484982, 1.56271636, 0.98595673, 0.69515091, 0.52423614, 0.41087446, 0.34370604, + 0.29807833, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 2.84484982, 1.56271636, 1.01931262, 0.72133851, 0.54755926, 0.43325692, 0.36617002, + 0.32104823, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, + 0.02916753], + [14.61464119, 2.84484982, 1.61558151, 1.05362725, 0.74807048, 0.57119018, 0.45573691, 0.38853383, + 0.34370604, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, + 0.02916753], + [14.61464119, 2.84484982, 1.61558151, 1.08895338, 0.803307, 0.61951244, 0.50118381, 0.41087446, 0.36617002, + 0.32104823, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, + 0.02916753], + [14.61464119, 2.84484982, 1.61558151, 1.08895338, 0.803307, 0.61951244, 0.50118381, 0.43325692, 0.38853383, + 0.34370604, 0.32104823, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, + 0.09824532, 0.02916753], + [14.61464119, 2.84484982, 1.61558151, 1.08895338, 0.803307, 0.64427125, 0.52423614, 0.45573691, 0.41087446, + 0.36617002, 0.34370604, 0.32104823, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, + 0.13792117, 0.09824532, 0.02916753], + ], + 1.45: [ + [14.61464119, 0.59516323, 0.02916753], + [14.61464119, 0.803307, 0.25053367, 0.02916753], + [14.61464119, 0.95350921, 0.34370604, 0.09824532, 0.02916753], + [14.61464119, 1.24153244, 0.54755926, 0.25053367, 0.09824532, 0.02916753], + [14.61464119, 1.56271636, 0.72133851, 0.36617002, 0.19894916, 0.09824532, 0.02916753], + [14.61464119, 1.61558151, 0.803307, 0.45573691, 0.27464288, 0.17026083, 0.09824532, 0.02916753], + [14.61464119, 1.91321158, 0.95350921, 0.57119018, 0.36617002, 0.25053367, 0.17026083, 0.09824532, + 0.02916753], + [14.61464119, 2.19988537, 1.08895338, 0.64427125, 0.41087446, 0.27464288, 0.19894916, 0.13792117, + 0.09824532, 0.02916753], + [14.61464119, 2.45070267, 1.24153244, 0.74807048, 0.50118381, 0.34370604, 0.25053367, 0.19894916, + 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 2.45070267, 1.24153244, 0.74807048, 0.50118381, 0.36617002, 0.27464288, 0.22545385, + 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 2.45070267, 1.28281462, 0.803307, 0.54755926, 0.41087446, 0.32104823, 0.25053367, 0.19894916, + 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 2.45070267, 1.28281462, 0.803307, 0.57119018, 0.43325692, 0.34370604, 0.27464288, 0.22545385, + 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 2.45070267, 1.28281462, 0.83188516, 0.59516323, 0.45573691, 0.36617002, 0.29807833, + 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 2.45070267, 1.28281462, 0.83188516, 0.59516323, 0.45573691, 0.36617002, 0.32104823, + 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 2.84484982, 1.51179266, 0.95350921, 0.69515091, 0.52423614, 0.41087446, 0.34370604, + 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, + 0.02916753], + [14.61464119, 2.84484982, 1.51179266, 0.95350921, 0.69515091, 0.52423614, 0.43325692, 0.36617002, + 0.32104823, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, + 0.02916753], + [14.61464119, 2.84484982, 1.56271636, 0.98595673, 0.72133851, 0.54755926, 0.45573691, 0.38853383, + 0.34370604, 0.32104823, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, + 0.09824532, 0.02916753], + [14.61464119, 2.84484982, 1.56271636, 1.01931262, 0.74807048, 0.57119018, 0.4783645, 0.41087446, 0.36617002, + 0.34370604, 0.32104823, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, + 0.09824532, 0.02916753], + [14.61464119, 2.84484982, 1.56271636, 1.01931262, 0.74807048, 0.59516323, 0.50118381, 0.43325692, + 0.38853383, 0.36617002, 0.34370604, 0.32104823, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, + 0.17026083, 0.13792117, 0.09824532, 0.02916753], + ], + 1.50: [ + [14.61464119, 0.54755926, 0.02916753], + [14.61464119, 0.803307, 0.25053367, 0.02916753], + [14.61464119, 0.86115354, 0.32104823, 0.09824532, 0.02916753], + [14.61464119, 1.24153244, 0.54755926, 0.25053367, 0.09824532, 0.02916753], + [14.61464119, 1.56271636, 0.72133851, 0.36617002, 0.19894916, 0.09824532, 0.02916753], + [14.61464119, 1.61558151, 0.803307, 0.45573691, 0.27464288, 0.17026083, 0.09824532, 0.02916753], + [14.61464119, 1.61558151, 0.83188516, 0.52423614, 0.34370604, 0.25053367, 0.17026083, 0.09824532, + 0.02916753], + [14.61464119, 1.84880662, 0.95350921, 0.59516323, 0.38853383, 0.27464288, 0.19894916, 0.13792117, + 0.09824532, 0.02916753], + [14.61464119, 1.84880662, 0.95350921, 0.59516323, 0.41087446, 0.29807833, 0.22545385, 0.17026083, + 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 1.84880662, 0.95350921, 0.61951244, 0.43325692, 0.32104823, 0.25053367, 0.19894916, + 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 2.19988537, 1.12534678, 0.72133851, 0.50118381, 0.36617002, 0.27464288, 0.22545385, + 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 2.19988537, 1.12534678, 0.72133851, 0.50118381, 0.36617002, 0.29807833, 0.25053367, + 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 2.36326075, 1.24153244, 0.803307, 0.57119018, 0.43325692, 0.34370604, 0.29807833, 0.25053367, + 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 2.36326075, 1.24153244, 0.803307, 0.57119018, 0.43325692, 0.34370604, 0.29807833, 0.27464288, + 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 2.36326075, 1.24153244, 0.803307, 0.59516323, 0.45573691, 0.36617002, 0.32104823, 0.29807833, + 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 2.36326075, 1.24153244, 0.803307, 0.59516323, 0.45573691, 0.38853383, 0.34370604, 0.32104823, + 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, + 0.02916753], + [14.61464119, 2.45070267, 1.32549286, 0.86115354, 0.64427125, 0.50118381, 0.41087446, 0.36617002, + 0.34370604, 0.32104823, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, + 0.09824532, 0.02916753], + [14.61464119, 2.45070267, 1.36964464, 0.92192322, 0.69515091, 0.54755926, 0.45573691, 0.41087446, + 0.36617002, 0.34370604, 0.32104823, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, + 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 2.45070267, 1.41535246, 0.95350921, 0.72133851, 0.57119018, 0.4783645, 0.43325692, 0.38853383, + 0.36617002, 0.34370604, 0.32104823, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, + 0.13792117, 0.09824532, 0.02916753], + ], + } + + def get_sigmas(self, coeff, steps, denoise): + total_steps = steps + if denoise < 1.0: + if denoise <= 0.0: + return (torch.FloatTensor([]),) + total_steps = round(steps * denoise) + + if steps <= 20: + sigmas = self.NOISE_LEVELS[round(coeff, 2)][steps-2][:] + else: + sigmas = self.NOISE_LEVELS[round(coeff, 2)][-1][:] + sigmas = loglinear_interp(sigmas, steps + 1) + + sigmas = sigmas[-(total_steps + 1):] + sigmas[-1] = 0 + return (torch.FloatTensor(sigmas), ) \ No newline at end of file diff --git a/ComfyUI-Easy-Use/py/libs/stability.py b/ComfyUI-Easy-Use/py/libs/stability.py new file mode 100644 index 0000000000000000000000000000000000000000..26688fa595d36683e65dc9bbebfd5e7685aba5f4 --- /dev/null +++ b/ComfyUI-Easy-Use/py/libs/stability.py @@ -0,0 +1,201 @@ +import json +import os +import yaml +import requests +import pathlib +from aiohttp import web +from server import PromptServer +from .image import tensor2pil, pil2tensor, image2base64, pil2byte +from .log import log_node_error + + +root_path = pathlib.Path(__file__).parent.parent.parent +config_path = os.path.join(root_path,'config.yaml') +default_key = [{'name':'Default', 'key':''}] + +class StabilityAPI: + def __init__(self): + self.api_url = "https://api.stability.ai" + self.api_keys = None + self.api_current = 0 + self.user_info = {} + self.getAPIKeys() + + def getErrors(self, code): + errors = { + 400: "Bad Request", + 403: "ApiKey Forbidden", + 413: "Your request was larger than 10MiB.", + 429: "You have made more than 150 requests in 10 seconds.", + 500: "Internal Server Error", + } + return errors.get(code, "Unknown Error") + + def getAPIKeys(self): + if os.path.isfile(config_path): + with open(config_path, 'r') as f: + data = yaml.load(f, Loader=yaml.FullLoader) + if not data: + data = {'STABILITY_API_KEY': default_key, 'STABILITY_API_DEFAULT':0} + with open(config_path, 'w') as f: + yaml.dump(data, f) + if 'STABILITY_API_KEY' not in data: + data['STABILITY_API_KEY'] = default_key + data['STABILITY_API_DEFAULT'] = 0 + with open(config_path, 'w') as f: + yaml.dump(data, f) + api_keys = data['STABILITY_API_KEY'] + self.api_current = data['STABILITY_API_DEFAULT'] + self.api_keys = api_keys + return api_keys + else: + # create a yaml file + with open(config_path, 'w') as f: + data = {'STABILITY_API_KEY': default_key, 'STABILITY_API_DEFAULT':0} + yaml.dump(data, f) + return data['STABILITY_API_KEY'] + pass + + def setAPIKeys(self, api_keys): + if len(api_keys) > 0: + self.api_keys = api_keys + # load and save the yaml file + with open(config_path, 'r') as f: + data = yaml.load(f, Loader=yaml.FullLoader) + data['STABILITY_API_KEY'] = api_keys + with open(config_path, 'w') as f: + yaml.dump(data, f) + return True + + def setAPIDefault(self, current): + if current is not None: + self.api_current = current + # load and save the yaml file + with open(config_path, 'r') as f: + data = yaml.load(f, Loader=yaml.FullLoader) + data['STABILITY_API_DEFAULT'] = current + with open(config_path, 'w') as f: + yaml.dump(data, f) + return True + + def generate_sd3_image(self, prompt, negative_prompt, aspect_ratio, model, seed, mode='text-to-image', image=None, strength=1, output_format='png', node_name='easy stableDiffusion3API'): + url = f"{self.api_url}/v2beta/stable-image/generate/sd3" + api_key = self.api_keys[self.api_current]['key'] + files = None + data = { + "prompt": prompt, + "mode": mode, + "model": model, + "seed": seed, + "output_format": output_format, + } + if model == 'sd3': + data['negative_prompt'] = negative_prompt + + if mode == 'text-to-image': + files = {"none": ''} + data['aspect_ratio'] = aspect_ratio + elif mode == 'image-to-image': + pil_image = tensor2pil(image) + image_byte = pil2byte(pil_image) + files = {"image": ("output.png", image_byte, 'image/png')} + data['strength'] = strength + + response = requests.post(url, + headers={"authorization": f"{api_key}", "accept": "application/json"}, + files=files, + data=data, + ) + if response.status_code == 200: + PromptServer.instance.send_sync('stable-diffusion-api-generate-succeed',{"model":model}) + json_data = response.json() + image_base64 = json_data['image'] + image_data = image2base64(image_base64) + output_t = pil2tensor(image_data) + return output_t + else: + if 'application/json' in response.headers['Content-Type']: + error_info = response.json() + log_node_error(node_name, error_info.get('name', 'No name provided')) + log_node_error(node_name, error_info.get('errors', ['No details provided'])) + error_status_text = self.getErrors(response.status_code) + PromptServer.instance.send_sync('easyuse-toast',{"type": "error", "content": error_status_text}) + raise Exception(f"Failed to generate image: {error_status_text}") + + # get user account + async def getUserAccount(self, cache=True): + url = f"{self.api_url}/v1/user/account" + api_key = self.api_keys[self.api_current]['key'] + name = self.api_keys[self.api_current]['name'] + if cache and name in self.user_info: + return self.user_info[name] + else: + response = requests.get(url, headers={"Authorization": f"Bearer {api_key}"}) + if response.status_code == 200: + user_info = response.json() + self.user_info[name] = user_info + return user_info + else: + PromptServer.instance.send_sync('easyuse-toast',{'type': 'error', 'content': self.getErrors(response.status_code)}) + return None + + # get user balance + async def getUserBalance(self): + url = f"{self.api_url}/v1/user/balance" + api_key = self.api_keys[self.api_current]['key'] + response = requests.get(url, headers={ + "Authorization": f"Bearer {api_key}" + }) + if response.status_code == 200: + return response.json() + else: + PromptServer.instance.send_sync('easyuse-toast', {'type': 'error', 'content': self.getErrors(response.status_code)}) + return None + +stableAPI = StabilityAPI() + + +@PromptServer.instance.routes.get("/easyuse/stability/api_keys") +async def get_stability_api_keys(request): + stableAPI.getAPIKeys() + return web.json_response({"keys": stableAPI.api_keys, "current": stableAPI.api_current}) + +@PromptServer.instance.routes.post("/easyuse/stability/set_api_keys") +async def set_stability_api_keys(request): + post = await request.post() + api_keys = post.get("api_keys") + current = post.get('current') + if api_keys is not None: + api_keys = json.loads(api_keys) + stableAPI.setAPIKeys(api_keys) + if current is not None: + print(current) + stableAPI.setAPIDefault(int(current)) + account = await stableAPI.getUserAccount() + balance = await stableAPI.getUserBalance() + return web.json_response({'account': account, 'balance': balance}) + else: + return web.json_response({'status': 'ok'}) + else: + return web.Response(status=400) + +@PromptServer.instance.routes.post("/easyuse/stability/set_apikey_default") +async def set_stability_api_default(request): + post = await request.post() + current = post.get("current") + if current is not None and current < len(stableAPI.api_keys): + stableAPI.api_current = current + return web.json_response({'status': 'ok'}) + else: + return web.Response(status=400) + +@PromptServer.instance.routes.get("/easyuse/stability/user_info") +async def get_account_info(request): + account = await stableAPI.getUserAccount() + balance = await stableAPI.getUserBalance() + return web.json_response({'account': account, 'balance': balance}) + +@PromptServer.instance.routes.get("/easyuse/stability/balance") +async def get_balance_info(request): + balance = await stableAPI.getUserBalance() + return web.json_response({'balance': balance}) diff --git a/ComfyUI-Easy-Use/py/libs/styleAlign.py b/ComfyUI-Easy-Use/py/libs/styleAlign.py new file mode 100644 index 0000000000000000000000000000000000000000..fefae6022fa021531907df8e2d4ceab28012b880 --- /dev/null +++ b/ComfyUI-Easy-Use/py/libs/styleAlign.py @@ -0,0 +1,148 @@ +import torch +import torch.nn as nn +from comfy.model_patcher import ModelPatcher +from typing import Union + +T = torch.Tensor + + +def exists(val): + return val is not None + + +def default(val, d): + if exists(val): + return val + return d + + +class StyleAlignedArgs: + def __init__(self, share_attn: str) -> None: + self.adain_keys = "k" in share_attn + self.adain_values = "v" in share_attn + self.adain_queries = "q" in share_attn + + share_attention: bool = True + adain_queries: bool = True + adain_keys: bool = True + adain_values: bool = True + + +def expand_first( + feat: T, + scale=1.0, +) -> T: + """ + Expand the first element so it has the same shape as the rest of the batch. + """ + b = feat.shape[0] + feat_style = torch.stack((feat[0], feat[b // 2])).unsqueeze(1) + if scale == 1: + feat_style = feat_style.expand(2, b // 2, *feat.shape[1:]) + else: + feat_style = feat_style.repeat(1, b // 2, 1, 1, 1) + feat_style = torch.cat([feat_style[:, :1], scale * feat_style[:, 1:]], dim=1) + return feat_style.reshape(*feat.shape) + + +def concat_first(feat: T, dim=2, scale=1.0) -> T: + """ + concat the the feature and the style feature expanded above + """ + feat_style = expand_first(feat, scale=scale) + return torch.cat((feat, feat_style), dim=dim) + + +def calc_mean_std(feat, eps: float = 1e-5) -> "tuple[T, T]": + feat_std = (feat.var(dim=-2, keepdims=True) + eps).sqrt() + feat_mean = feat.mean(dim=-2, keepdims=True) + return feat_mean, feat_std + +def adain(feat: T) -> T: + feat_mean, feat_std = calc_mean_std(feat) + feat_style_mean = expand_first(feat_mean) + feat_style_std = expand_first(feat_std) + feat = (feat - feat_mean) / feat_std + feat = feat * feat_style_std + feat_style_mean + return feat + +class SharedAttentionProcessor: + def __init__(self, args: StyleAlignedArgs, scale: float): + self.args = args + self.scale = scale + + def __call__(self, q, k, v, extra_options): + if self.args.adain_queries: + q = adain(q) + if self.args.adain_keys: + k = adain(k) + if self.args.adain_values: + v = adain(v) + if self.args.share_attention: + k = concat_first(k, -2, scale=self.scale) + v = concat_first(v, -2) + + return q, k, v + + +def get_norm_layers( + layer: nn.Module, + norm_layers_: "dict[str, list[Union[nn.GroupNorm, nn.LayerNorm]]]", + share_layer_norm: bool, + share_group_norm: bool, +): + if isinstance(layer, nn.LayerNorm) and share_layer_norm: + norm_layers_["layer"].append(layer) + if isinstance(layer, nn.GroupNorm) and share_group_norm: + norm_layers_["group"].append(layer) + else: + for child_layer in layer.children(): + get_norm_layers( + child_layer, norm_layers_, share_layer_norm, share_group_norm + ) + + +def register_norm_forward( + norm_layer: Union[nn.GroupNorm, nn.LayerNorm], +) -> Union[nn.GroupNorm, nn.LayerNorm]: + if not hasattr(norm_layer, "orig_forward"): + setattr(norm_layer, "orig_forward", norm_layer.forward) + orig_forward = norm_layer.orig_forward + + def forward_(hidden_states: T) -> T: + n = hidden_states.shape[-2] + hidden_states = concat_first(hidden_states, dim=-2) + hidden_states = orig_forward(hidden_states) # type: ignore + return hidden_states[..., :n, :] + + norm_layer.forward = forward_ # type: ignore + return norm_layer + + +def register_shared_norm( + model: ModelPatcher, + share_group_norm: bool = True, + share_layer_norm: bool = True, +): + norm_layers = {"group": [], "layer": []} + get_norm_layers(model.model, norm_layers, share_layer_norm, share_group_norm) + print( + f"Patching {len(norm_layers['group'])} group norms, {len(norm_layers['layer'])} layer norms." + ) + return [register_norm_forward(layer) for layer in norm_layers["group"]] + [ + register_norm_forward(layer) for layer in norm_layers["layer"] + ] + + +SHARE_NORM_OPTIONS = ["both", "group", "layer", "disabled"] +SHARE_ATTN_OPTIONS = ["q+k", "q+k+v", "disabled"] + + +def styleAlignBatch(model, share_norm, share_attn, scale=1.0): + m = model.clone() + share_group_norm = share_norm in ["group", "both"] + share_layer_norm = share_norm in ["layer", "both"] + register_shared_norm(model, share_group_norm, share_layer_norm) + args = StyleAlignedArgs(share_attn) + m.set_model_attn1_patch(SharedAttentionProcessor(args, scale)) + return m \ No newline at end of file diff --git a/ComfyUI-Easy-Use/py/libs/translate.py b/ComfyUI-Easy-Use/py/libs/translate.py new file mode 100644 index 0000000000000000000000000000000000000000..ac63dc14e495a18c8f7cacb4a25468b872b98bfb --- /dev/null +++ b/ComfyUI-Easy-Use/py/libs/translate.py @@ -0,0 +1,247 @@ +#credit to shadowcz007 for this module +#from https://github.com/shadowcz007/comfyui-mixlab-nodes/blob/main/nodes/TextGenerateNode.py +import re +import os +import folder_paths + +import comfy.utils +import torch +from transformers import AutoModelForSeq2SeqLM, AutoTokenizer + +from .utils import install_package +try: + from lark import Lark, Transformer, v_args +except: + print('install lark-parser...') + install_package('lark-parser') + from lark import Lark, Transformer, v_args + +model_path = os.path.join(folder_paths.models_dir, 'prompt_generator') +zh_en_model_path = os.path.join(model_path, 'opus-mt-zh-en') +zh_en_model, zh_en_tokenizer = None, None + +def correct_prompt_syntax(prompt=""): + # print("input prompt",prompt) + corrected_elements = [] + # 处理成统一的英文标点 + prompt = prompt.replace('(', '(').replace(')', ')').replace(',', ',').replace(';', ',').replace('。', '.').replace(':',':').replace('\\',',') + # 删除多余的空格 + prompt = re.sub(r'\s+', ' ', prompt).strip() + prompt = prompt.replace("< ","<").replace(" >",">").replace("( ","(").replace(" )",")").replace("[ ","[").replace(' ]',']') + + # 分词 + prompt_elements = prompt.split(',') + + def balance_brackets(element, open_bracket, close_bracket): + open_brackets_count = element.count(open_bracket) + close_brackets_count = element.count(close_bracket) + return element + close_bracket * (open_brackets_count - close_brackets_count) + + for element in prompt_elements: + element = element.strip() + + # 处理空元素 + if not element: + continue + + # 检查并处理圆括号、方括号、尖括号 + if element[0] in '([': + corrected_element = balance_brackets(element, '(', ')') if element[0] == '(' else balance_brackets(element, '[', ']') + elif element[0] == '<': + corrected_element = balance_brackets(element, '<', '>') + else: + # 删除开头的右括号或右方括号 + corrected_element = element.lstrip(')]') + + corrected_elements.append(corrected_element) + + # 重组修正后的prompt + return ','.join(corrected_elements) + +def detect_language(input_str): + # 统计中文和英文字符的数量 + count_cn = count_en = 0 + for char in input_str: + if '\u4e00' <= char <= '\u9fff': + count_cn += 1 + elif char.isalpha(): + count_en += 1 + + # 根据统计的字符数量判断主要语言 + if count_cn > count_en: + return "cn" + elif count_en > count_cn: + return "en" + else: + return "unknow" + +def has_chinese(text): + has_cn = False + _text = text + _text = re.sub(r'<.*?>', '', _text) + _text = re.sub(r'__.*?__', '', _text) + _text = re.sub(r'embedding:.*?$', '', _text) + for char in _text: + if '\u4e00' <= char <= '\u9fff': + has_cn = True + break + elif char.isalpha(): + continue + return has_cn + +def translate(text): + global zh_en_model_path, zh_en_model, zh_en_tokenizer + + if not os.path.exists(zh_en_model_path): + zh_en_model_path = 'Helsinki-NLP/opus-mt-zh-en' + + if zh_en_model is None: + + zh_en_model = AutoModelForSeq2SeqLM.from_pretrained(zh_en_model_path).eval() + zh_en_tokenizer = AutoTokenizer.from_pretrained(zh_en_model_path, padding=True, truncation=True) + + zh_en_model.to("cuda" if torch.cuda.is_available() else "cpu") + with torch.no_grad(): + encoded = zh_en_tokenizer([text], return_tensors="pt") + encoded.to(zh_en_model.device) + sequences = zh_en_model.generate(**encoded) + return zh_en_tokenizer.batch_decode(sequences, skip_special_tokens=True)[0] + +@v_args(inline=True) # Decorator to flatten the tree directly into the function arguments +class ChinesePromptTranslate(Transformer): + + def sentence(self, *args): + return ", ".join(args) + + def phrase(self, *args): + return "".join(args) + + def emphasis(self, *args): + # Reconstruct the emphasis with translated content + return "(" + "".join(args) + ")" + + def weak_emphasis(self, *args): + print('weak_emphasis:', args) + return "[" + "".join(args) + "]" + + def embedding(self, *args): + print('prompt embedding', args[0]) + if len(args) == 1: + embedding_name = str(args[0]) + return f"embedding:{embedding_name}" + elif len(args) > 1: + embedding_name, *numbers = args + + if len(numbers) == 2: + return f"embedding:{embedding_name}:{numbers[0]}:{numbers[1]}" + elif len(numbers) == 1: + return f"embedding:{embedding_name}:{numbers[0]}" + else: + return f"embedding:{embedding_name}" + + def lora(self, *args): + if len(args) == 1: + return f"" + elif len(args) > 1: + # print('lora', args) + _, loar_name, *numbers = args + loar_name = str(loar_name).strip() + if len(numbers) == 2: + return f"" + elif len(numbers) == 1: + return f"" + else: + return f"" + + def weight(self, word, number): + translated_word = translate(str(word)).rstrip('.') + return f"({translated_word}:{str(number).strip()})" + + def schedule(self, *args): + print('prompt schedule', args) + data = [str(arg).strip() for arg in args] + + return f"[{':'.join(data)}]" + + def word(self, word): + # Translate each word using the dictionary + word = str(word) + match_cn = re.search(r'@.*?@', word) + if re.search(r'__.*?__', word): + return word.rstrip('.') + elif match_cn: + chinese = match_cn.group() + before = word.split('@', 1) + before = before[0] if len(before) > 0 else '' + before = translate(str(before)).rstrip('.') if before else '' + after = word.rsplit('@', 1) + after = after[len(after)-1] if len(after) > 1 else '' + after = translate(after).rstrip('.') if after else '' + return before + chinese.replace('@', '').rstrip('.') + after + elif detect_language(word) == "cn": + return translate(word).rstrip('.') + else: + return word.rstrip('.') + + +#定义Prompt文法 +grammar = """ +start: sentence +sentence: phrase ("," phrase)* +phrase: emphasis | weight | word | lora | embedding | schedule +emphasis: "(" sentence ")" -> emphasis + | "[" sentence "]" -> weak_emphasis +weight: "(" word ":" NUMBER ")" +schedule: "[" word ":" word ":" NUMBER "]" +lora: "<" WORD ":" WORD (":" NUMBER)? (":" NUMBER)? ">" +embedding: "embedding" ":" WORD (":" NUMBER)? (":" NUMBER)? +word: WORD + +NUMBER: /\s*-?\d+(\.\d+)?\s*/ +WORD: /[^,:\(\)\[\]<>]+/ +""" +def zh_to_en(text): + global zh_en_model_path, zh_en_model, zh_en_tokenizer + # 进度条 + pbar = comfy.utils.ProgressBar(len(text) + 1) + texts = [correct_prompt_syntax(t) for t in text] + + install_package('sentencepiece', '0.2.0') + + if not os.path.exists(zh_en_model_path): + zh_en_model_path = 'Helsinki-NLP/opus-mt-zh-en' + + if zh_en_model is None: + zh_en_model = AutoModelForSeq2SeqLM.from_pretrained(zh_en_model_path).eval() + zh_en_tokenizer = AutoTokenizer.from_pretrained(zh_en_model_path, padding=True, truncation=True) + + zh_en_model.to("cuda" if torch.cuda.is_available() else "cpu") + + prompt_result = [] + + en_texts = [] + + for t in texts: + if t: + # translated_text = translated_word = translate(zh_en_tokenizer,zh_en_model,str(t)) + parser = Lark(grammar, start="start", parser="lalr", transformer=ChinesePromptTranslate()) + # print('t',t) + result = parser.parse(t).children + # print('en_result',result) + # en_text=translate(zh_en_tokenizer,zh_en_model,text_without_syntax) + en_texts.append(result[0]) + + zh_en_model.to('cpu') + # print("test en_text", en_texts) + # en_text.to("cuda" if torch.cuda.is_available() else "cpu") + + pbar.update(1) + for t in en_texts: + prompt_result.append(t) + pbar.update(1) + + # print('prompt_result', prompt_result, ) + if len(prompt_result) == 0: + prompt_result = [""] + + return prompt_result \ No newline at end of file diff --git a/ComfyUI-Easy-Use/py/libs/utils.py b/ComfyUI-Easy-Use/py/libs/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..73888a295754b7bf2a50b35ebeacfd623e24f5b5 --- /dev/null +++ b/ComfyUI-Easy-Use/py/libs/utils.py @@ -0,0 +1,277 @@ +class AlwaysEqualProxy(str): + def __eq__(self, _): + return True + + def __ne__(self, _): + return False + +class TautologyStr(str): + def __ne__(self, other): + return False + +class ByPassTypeTuple(tuple): + def __getitem__(self, index): + if index>0: + index=0 + item = super().__getitem__(index) + if isinstance(item, str): + return TautologyStr(item) + return item + +comfy_ui_revision = None +def get_comfyui_revision(): + try: + import git + import os + import folder_paths + repo = git.Repo(os.path.dirname(folder_paths.__file__)) + comfy_ui_revision = len(list(repo.iter_commits('HEAD'))) + except: + comfy_ui_revision = "Unknown" + return comfy_ui_revision + + +import sys +import importlib.util +import importlib.metadata +import comfy.model_management as mm +import gc +from packaging import version +from server import PromptServer +def is_package_installed(package): + try: + module = importlib.util.find_spec(package) + return module is not None + except ImportError as e: + print(e) + return False + +def install_package(package, v=None, compare=True, compare_version=None): + run_install = True + if is_package_installed(package): + try: + installed_version = importlib.metadata.version(package) + if v is not None: + if compare_version is None: + compare_version = v + if not compare or version.parse(installed_version) >= version.parse(compare_version): + run_install = False + else: + run_install = False + except: + run_install = False + + if run_install: + import subprocess + package_command = package + '==' + v if v is not None else package + PromptServer.instance.send_sync("easyuse-toast", {'content': f"Installing {package_command}...", 'duration': 5000}) + result = subprocess.run([sys.executable, '-s', '-m', 'pip', 'install', package_command], capture_output=True, text=True) + if result.returncode == 0: + PromptServer.instance.send_sync("easyuse-toast", {'content': f"{package} installed successfully", 'type': 'success', 'duration': 5000}) + print(f"Package {package} installed successfully") + return True + else: + PromptServer.instance.send_sync("easyuse-toast", {'content': f"{package} installed failed", 'type': 'error', 'duration': 5000}) + print(f"Package {package} installed failed") + return False + else: + return False + +def compare_revision(num): + global comfy_ui_revision + if not comfy_ui_revision: + comfy_ui_revision = get_comfyui_revision() + return True if comfy_ui_revision == 'Unknown' or int(comfy_ui_revision) >= num else False +def find_tags(string: str, sep="/") -> list[str]: + """ + find tags from string use the sep for split + Note: string may contain the \\ or / for path separator + """ + if not string: + return [] + string = string.replace("\\", "/") + while "//" in string: + string = string.replace("//", "/") + if string and sep in string: + return string.split(sep)[:-1] + return [] + + +from comfy.model_base import BaseModel +import comfy.supported_models +import comfy.supported_models_base +def get_sd_version(model): + base: BaseModel = model.model + model_config: comfy.supported_models.supported_models_base.BASE = base.model_config + if isinstance(model_config, comfy.supported_models.SDXL): + return 'sdxl' + elif isinstance(model_config, comfy.supported_models.SDXLRefiner): + return 'sdxl_refiner' + elif isinstance( + model_config, (comfy.supported_models.SD15, comfy.supported_models.SD20) + ): + return 'sd1' + elif isinstance( + model_config, (comfy.supported_models.SVD_img2vid) + ): + return 'svd' + elif isinstance(model_config, comfy.supported_models.SD3): + return 'sd3' + elif isinstance(model_config, comfy.supported_models.HunyuanDiT): + return 'hydit' + elif isinstance(model_config, comfy.supported_models.Flux): + return 'flux' + else: + return 'unknown' + +def find_nearest_steps(clip_id, prompt): + """Find the nearest KSampler or preSampling node that references the given id.""" + def check_link_to_clip(node_id, clip_id, visited=None, node=None): + """Check if a given node links directly or indirectly to a loader node.""" + if visited is None: + visited = set() + + if node_id in visited: + return False + visited.add(node_id) + if "pipe" in node["inputs"]: + link_ids = node["inputs"]["pipe"] + for id in link_ids: + if id != 0 and id == str(clip_id): + return True + return False + + for id in prompt: + node = prompt[id] + if "Sampler" in node["class_type"] or "sampler" in node["class_type"] or "Sampling" in node["class_type"]: + # Check if this KSampler node directly or indirectly references the given CLIPTextEncode node + if check_link_to_clip(id, clip_id, None, node): + steps = node["inputs"]["steps"] if "steps" in node["inputs"] else 1 + return steps + return 1 + +def find_wildcards_seed(clip_id, text, prompt): + """ Find easy wildcards seed value""" + def find_link_clip_id(id, seed, wildcard_id): + node = prompt[id] + if "positive" in node['inputs']: + link_ids = node["inputs"]["positive"] + if type(link_ids) == list: + for id in link_ids: + if id != 0: + if id == wildcard_id: + wildcard_node = prompt[wildcard_id] + seed = wildcard_node["inputs"]["seed"] if "seed" in wildcard_node["inputs"] else None + if seed is None: + seed = wildcard_node["inputs"]["seed_num"] if "seed_num" in wildcard_node["inputs"] else None + return seed + else: + return find_link_clip_id(id, seed, wildcard_id) + else: + return None + else: + return None + if "__" in text: + seed = None + for id in prompt: + node = prompt[id] + if "wildcards" in node["class_type"]: + wildcard_id = id + return find_link_clip_id(str(clip_id), seed, wildcard_id) + return seed + else: + return None + +def is_linked_styles_selector(prompt, my_unique_id, prompt_type='positive'): + inputs_values = prompt[my_unique_id]['inputs'][prompt_type] if prompt_type in prompt[my_unique_id][ + 'inputs'] else None + if type(inputs_values) == list and inputs_values != 'undefined' and inputs_values[0]: + return True if prompt[inputs_values[0]] and prompt[inputs_values[0]]['class_type'] == 'easy stylesSelector' else False + else: + return False + +use_mirror = False +def get_local_filepath(url, dirname, local_file_name=None): + """Get local file path when is already downloaded or download it""" + import os + from server import PromptServer + from urllib.parse import urlparse + from torch.hub import download_url_to_file + global use_mirror + if not os.path.exists(dirname): + os.makedirs(dirname) + if not local_file_name: + parsed_url = urlparse(url) + local_file_name = os.path.basename(parsed_url.path) + destination = os.path.join(dirname, local_file_name) + if not os.path.exists(destination): + try: + if use_mirror: + url = url.replace('huggingface.co', 'hf-mirror.com') + print(f'downloading {url} to {destination}') + PromptServer.instance.send_sync("easyuse-toast", {'content': f'Downloading model to {destination}, please wait...', 'duration': 10000}) + download_url_to_file(url, destination) + except Exception as e: + use_mirror = True + url = url.replace('huggingface.co', 'hf-mirror.com') + print(f'无法从huggingface下载,正在尝试从 {url} 下载...') + PromptServer.instance.send_sync("easyuse-toast", {'content': f'无法连接huggingface,正在尝试从 {url} 下载...', 'duration': 10000}) + try: + download_url_to_file(url, destination) + except Exception as err: + PromptServer.instance.send_sync("easyuse-toast", + {'content': f'无法从 {url} 下载模型', 'type':'error'}) + raise Exception(f'无法从 {url} 下载,错误信息:{str(err.args[0])}') + return destination + +def to_lora_patch_dict(state_dict: dict) -> dict: + """ Convert raw lora state_dict to patch_dict that can be applied on + modelpatcher.""" + patch_dict = {} + for k, w in state_dict.items(): + model_key, patch_type, weight_index = k.split('::') + if model_key not in patch_dict: + patch_dict[model_key] = {} + if patch_type not in patch_dict[model_key]: + patch_dict[model_key][patch_type] = [None] * 16 + patch_dict[model_key][patch_type][int(weight_index)] = w + + patch_flat = {} + for model_key, v in patch_dict.items(): + for patch_type, weight_list in v.items(): + patch_flat[model_key] = (patch_type, weight_list) + + return patch_flat + +def easySave(images, filename_prefix, output_type, prompt=None, extra_pnginfo=None): + """Save or Preview Image""" + from nodes import PreviewImage, SaveImage + if output_type == "Hide": + return list() + if output_type in ["Preview", "Preview&Choose"]: + filename_prefix = 'easyPreview' + results = PreviewImage().save_images(images, filename_prefix, prompt, extra_pnginfo) + return results['ui']['images'] + else: + results = SaveImage().save_images(images, filename_prefix, prompt, extra_pnginfo) + return results['ui']['images'] + +def getMetadata(filepath): + with open(filepath, "rb") as file: + # https://github.com/huggingface/safetensors#format + # 8 bytes: N, an unsigned little-endian 64-bit integer, containing the size of the header + header_size = int.from_bytes(file.read(8), "little", signed=False) + + if header_size <= 0: + raise BufferError("Invalid header size") + + header = file.read(header_size) + if header_size <= 0: + raise BufferError("Invalid header") + + return header + +def cleanGPUUsedForce(): + gc.collect() + mm.unload_all_models() + mm.soft_empty_cache() \ No newline at end of file diff --git a/ComfyUI-Easy-Use/py/libs/wildcards.py b/ComfyUI-Easy-Use/py/libs/wildcards.py new file mode 100644 index 0000000000000000000000000000000000000000..28e5ed65d88c20815689531b8004ed32fd6500bf --- /dev/null +++ b/ComfyUI-Easy-Use/py/libs/wildcards.py @@ -0,0 +1,304 @@ +import re +import random +import os +import folder_paths +import yaml +import json +from .log import log_node_info + +easy_wildcard_dict = {} + +def get_wildcard_list(): + return [f"__{x}__" for x in easy_wildcard_dict.keys()] + +def wildcard_normalize(x): + return x.replace("\\", "/").lower() + +def read_wildcard(k, v): + if isinstance(v, list): + k = wildcard_normalize(k) + easy_wildcard_dict[k] = v + elif isinstance(v, dict): + for k2, v2 in v.items(): + new_key = f"{k}/{k2}" + new_key = wildcard_normalize(new_key) + read_wildcard(new_key, v2) + +def read_wildcard_dict(wildcard_path): + global easy_wildcard_dict + for root, directories, files in os.walk(wildcard_path, followlinks=True): + for file in files: + if file.endswith('.txt'): + file_path = os.path.join(root, file) + rel_path = os.path.relpath(file_path, wildcard_path) + key = os.path.splitext(rel_path)[0].replace('\\', '/').lower() + + try: + with open(file_path, 'r', encoding="ISO-8859-1") as f: + lines = f.read().splitlines() + easy_wildcard_dict[key] = lines + except UnicodeDecodeError: + with open(file_path, 'r', encoding="UTF-8", errors="ignore") as f: + lines = f.read().splitlines() + easy_wildcard_dict[key] = lines + elif file.endswith('.yaml'): + file_path = os.path.join(root, file) + with open(file_path, 'r') as f: + yaml_data = yaml.load(f, Loader=yaml.FullLoader) + + for k, v in yaml_data.items(): + read_wildcard(k, v) + elif file.endswith('.json'): + file_path = os.path.join(root, file) + try: + with open(file_path, 'r') as f: + json_data = json.load(f) + for key, value in json_data.items(): + key = wildcard_normalize(key) + easy_wildcard_dict[key] = value + except ValueError: + print('json files load error') + return easy_wildcard_dict + + +def process(text, seed=None): + + if seed is not None: + random.seed(seed) + + def replace_options(string): + replacements_found = False + + def replace_option(match): + nonlocal replacements_found + options = match.group(1).split('|') + + multi_select_pattern = options[0].split('$$') + select_range = None + select_sep = ' ' + range_pattern = r'(\d+)(-(\d+))?' + range_pattern2 = r'-(\d+)' + + if len(multi_select_pattern) > 1: + r = re.match(range_pattern, options[0]) + + if r is None: + r = re.match(range_pattern2, options[0]) + a = '1' + b = r.group(1).strip() + else: + a = r.group(1).strip() + b = r.group(3).strip() + + if r is not None: + if b is not None and is_numeric_string(a) and is_numeric_string(b): + # PATTERN: num1-num2 + select_range = int(a), int(b) + elif is_numeric_string(a): + # PATTERN: num + x = int(a) + select_range = (x, x) + + if select_range is not None and len(multi_select_pattern) == 2: + # PATTERN: count$$ + options[0] = multi_select_pattern[1] + elif select_range is not None and len(multi_select_pattern) == 3: + # PATTERN: count$$ sep $$ + select_sep = multi_select_pattern[1] + options[0] = multi_select_pattern[2] + + adjusted_probabilities = [] + + total_prob = 0 + + for option in options: + parts = option.split('::', 1) + if len(parts) == 2 and is_numeric_string(parts[0].strip()): + config_value = float(parts[0].strip()) + else: + config_value = 1 # Default value if no configuration is provided + + adjusted_probabilities.append(config_value) + total_prob += config_value + + normalized_probabilities = [prob / total_prob for prob in adjusted_probabilities] + + if select_range is None: + select_count = 1 + else: + select_count = random.randint(select_range[0], select_range[1]) + + if select_count > len(options): + selected_items = options + else: + selected_items = random.choices(options, weights=normalized_probabilities, k=select_count) + selected_items = set(selected_items) + + try_count = 0 + while len(selected_items) < select_count and try_count < 10: + remaining_count = select_count - len(selected_items) + additional_items = random.choices(options, weights=normalized_probabilities, k=remaining_count) + selected_items |= set(additional_items) + try_count += 1 + + selected_items2 = [re.sub(r'^\s*[0-9.]+::', '', x, 1) for x in selected_items] + replacement = select_sep.join(selected_items2) + if '::' in replacement: + pass + + replacements_found = True + return replacement + + pattern = r'{([^{}]*?)}' + replaced_string = re.sub(pattern, replace_option, string) + + return replaced_string, replacements_found + + def replace_wildcard(string): + global easy_wildcard_dict + pattern = r"__([\w\s.\-+/*\\]+?)__" + matches = re.findall(pattern, string) + replacements_found = False + + for match in matches: + keyword = match.lower() + keyword = wildcard_normalize(keyword) + if keyword in easy_wildcard_dict: + replacement = random.choice(easy_wildcard_dict[keyword]) + replacements_found = True + string = string.replace(f"__{match}__", replacement, 1) + elif '*' in keyword: + subpattern = keyword.replace('*', '.*').replace('+','\+') + total_patterns = [] + found = False + for k, v in easy_wildcard_dict.items(): + if re.match(subpattern, k) is not None: + total_patterns += v + found = True + + if found: + replacement = random.choice(total_patterns) + replacements_found = True + string = string.replace(f"__{match}__", replacement, 1) + elif '/' not in keyword: + string_fallback = string.replace(f"__{match}__", f"__*/{match}__", 1) + string, replacements_found = replace_wildcard(string_fallback) + + return string, replacements_found + + replace_depth = 100 + stop_unwrap = False + while not stop_unwrap and replace_depth > 1: + replace_depth -= 1 # prevent infinite loop + + # pass1: replace options + pass1, is_replaced1 = replace_options(text) + + while is_replaced1: + pass1, is_replaced1 = replace_options(pass1) + + # pass2: replace wildcards + text, is_replaced2 = replace_wildcard(pass1) + stop_unwrap = not is_replaced1 and not is_replaced2 + + return text + + +def is_numeric_string(input_str): + return re.match(r'^-?\d+(\.\d+)?$', input_str) is not None + + +def safe_float(x): + if is_numeric_string(x): + return float(x) + else: + return 1.0 + + +def extract_lora_values(string): + pattern = r']+)>' + matches = re.findall(pattern, string) + + def touch_lbw(text): + return re.sub(r'LBW=[A-Za-z][A-Za-z0-9_-]*:', r'LBW=', text) + + items = [touch_lbw(match.strip(':')) for match in matches] + + added = set() + result = [] + for item in items: + item = item.split(':') + + lora = None + a = None + b = None + lbw = None + lbw_a = None + lbw_b = None + + if len(item) > 0: + lora = item[0] + + for sub_item in item[1:]: + if is_numeric_string(sub_item): + if a is None: + a = float(sub_item) + elif b is None: + b = float(sub_item) + elif sub_item.startswith("LBW="): + for lbw_item in sub_item[4:].split(';'): + if lbw_item.startswith("A="): + lbw_a = safe_float(lbw_item[2:].strip()) + elif lbw_item.startswith("B="): + lbw_b = safe_float(lbw_item[2:].strip()) + elif lbw_item.strip() != '': + lbw = lbw_item + + if a is None: + a = 1.0 + if b is None: + b = 1.0 + + if lora is not None and lora not in added: + result.append((lora, a, b, lbw, lbw_a, lbw_b)) + added.add(lora) + + return result + + +def remove_lora_tags(string): + pattern = r']+>' + result = re.sub(pattern, '', string) + + return result + +def process_with_loras(wildcard_opt, model, clip, title="Positive", seed=None, can_load_lora=True, pipe_lora_stack=[], easyCache=None): + pass1 = process(wildcard_opt, seed) + loras = extract_lora_values(pass1) + pass2 = remove_lora_tags(pass1) + + has_noodle_key = True if "__" in wildcard_opt else False + has_loras = True if loras != [] else False + show_wildcard_prompt = True if has_noodle_key or has_loras else False + + if can_load_lora and has_loras: + for lora_name, model_weight, clip_weight, lbw, lbw_a, lbw_b in loras: + if (lora_name.split('.')[-1]) not in folder_paths.supported_pt_extensions: + lora_name = lora_name+".safetensors" + lora = { + "lora_name": lora_name, "model": model, "clip": clip, "model_strength": model_weight, + "clip_strength": clip_weight, + "lbw_a": lbw_a, + "lbw_b": lbw_b, + "lbw": lbw + } + model, clip = easyCache.load_lora(lora) + lora["model"] = model + lora["clip"] = clip + pipe_lora_stack.append(lora) + + log_node_info("easy wildcards",f"{title}: {pass2}") + if pass1 != pass2: + log_node_info("easy wildcards",f'{title}_decode: {pass1}') + + return model, clip, pass2, pass1, show_wildcard_prompt, pipe_lora_stack diff --git a/ComfyUI-Easy-Use/py/libs/xyplot.py b/ComfyUI-Easy-Use/py/libs/xyplot.py new file mode 100644 index 0000000000000000000000000000000000000000..e302b7d3f692f5f581eee5360124e2fe341f7e9e --- /dev/null +++ b/ComfyUI-Easy-Use/py/libs/xyplot.py @@ -0,0 +1,572 @@ +import os, torch +from pathlib import Path +from PIL import Image, ImageDraw, ImageFont +from .utils import easySave +from .adv_encode import advanced_encode +from .controlnet import easyControlnet +from .log import log_node_warn +from ..layer_diffuse import LayerDiffuse +from ..config import RESOURCES_DIR + +class easyXYPlot(): + + def __init__(self, xyPlotData, save_prefix, image_output, prompt, extra_pnginfo, my_unique_id, sampler, easyCache): + self.x_node_type, self.x_type = sampler.safe_split(xyPlotData.get("x_axis"), ': ') + self.y_node_type, self.y_type = sampler.safe_split(xyPlotData.get("y_axis"), ': ') + self.x_values = xyPlotData.get("x_vals") if self.x_type != "None" else [] + self.y_values = xyPlotData.get("y_vals") if self.y_type != "None" else [] + + self.grid_spacing = xyPlotData.get("grid_spacing") + self.latent_id = 0 + self.output_individuals = xyPlotData.get("output_individuals") + + self.x_label, self.y_label = [], [] + self.max_width, self.max_height = 0, 0 + self.latents_plot = [] + self.image_list = [] + + self.num_cols = len(self.x_values) if len(self.x_values) > 0 else 1 + self.num_rows = len(self.y_values) if len(self.y_values) > 0 else 1 + + self.total = self.num_cols * self.num_rows + self.num = 0 + + self.save_prefix = save_prefix + self.image_output = image_output + self.prompt = prompt + self.extra_pnginfo = extra_pnginfo + self.my_unique_id = my_unique_id + + self.sampler = sampler + self.easyCache = easyCache + + # Helper Functions + @staticmethod + def define_variable(plot_image_vars, value_type, value, index): + + plot_image_vars[value_type] = value + if value_type in ["seed", "Seeds++ Batch"]: + value_label = f"{value}" + else: + value_label = f"{value_type}: {value}" + + if "ControlNet" in value_type: + value_label = f"ControlNet {index + 1}" + + if value_type in ['Lora', 'Checkpoint']: + value_label = f"{os.path.basename(os.path.splitext(value.split(',')[0])[0])}" + + if value_type in ["ModelMergeBlocks"]: + if ":" in value: + line = value.split(':') + value_label = f"{line[0]}" + elif len(value) > 16: + value_label = f"ModelMergeBlocks {index + 1}" + else: + value_label = f"MMB: {value}" + + if value_type in ["Pos Condition"]: + value_label = f"pos cond {index + 1}" if index>0 else f"pos cond" + if value_type in ["Neg Condition"]: + value_label = f"neg cond {index + 1}" if index>0 else f"neg cond" + + if value_type in ["Positive Prompt S/R"]: + value_label = f"pos prompt {index + 1}" if index>0 else f"pos prompt" + if value_type in ["Negative Prompt S/R"]: + value_label = f"neg prompt {index + 1}" if index>0 else f"neg prompt" + + if value_type in ["steps", "cfg", "denoise", "clip_skip", + "lora_model_strength", "lora_clip_strength"]: + value_label = f"{value_type}: {value}" + + if value_type == "positive": + value_label = f"pos prompt {index + 1}" + elif value_type == "negative": + value_label = f"neg prompt {index + 1}" + + return plot_image_vars, value_label + + @staticmethod + def get_font(font_size): + return ImageFont.truetype(str(Path(os.path.join(RESOURCES_DIR, 'OpenSans-Medium.ttf'))), font_size) + + @staticmethod + def update_label(label, value, num_items): + if len(label) < num_items: + return [*label, value] + return label + + @staticmethod + def rearrange_tensors(latent, num_cols, num_rows): + new_latent = [] + for i in range(num_rows): + for j in range(num_cols): + index = j * num_rows + i + new_latent.append(latent[index]) + return new_latent + + def calculate_background_dimensions(self): + border_size = int((self.max_width // 8) * 1.5) if self.y_type != "None" or self.x_type != "None" else 0 + bg_width = self.num_cols * (self.max_width + self.grid_spacing) - self.grid_spacing + border_size * ( + self.y_type != "None") + bg_height = self.num_rows * (self.max_height + self.grid_spacing) - self.grid_spacing + border_size * ( + self.x_type != "None") + + x_offset_initial = border_size if self.y_type != "None" else 0 + y_offset = border_size if self.x_type != "None" else 0 + + return bg_width, bg_height, x_offset_initial, y_offset + + def adjust_font_size(self, text, initial_font_size, label_width): + font = self.get_font(initial_font_size) + text_width = font.getbbox(text) + if text_width and text_width[2]: + text_width = text_width[2] + + scaling_factor = 0.9 + if text_width > (label_width * scaling_factor): + return int(initial_font_size * (label_width / text_width) * scaling_factor) + else: + return initial_font_size + + def textsize(self, d, text, font): + _, _, width, height = d.textbbox((0, 0), text=text, font=font) + return width, height + + def create_label(self, img, text, initial_font_size, is_x_label=True, max_font_size=70, min_font_size=10): + label_width = img.width if is_x_label else img.height + + # Adjust font size + font_size = self.adjust_font_size(text, initial_font_size, label_width) + font_size = min(max_font_size, font_size) # Ensure font isn't too large + font_size = max(min_font_size, font_size) # Ensure font isn't too small + + label_height = int(font_size * 1.5) if is_x_label else font_size + + label_bg = Image.new('RGBA', (label_width, label_height), color=(255, 255, 255, 0)) + d = ImageDraw.Draw(label_bg) + + font = self.get_font(font_size) + + # Check if text will fit, if not insert ellipsis and reduce text + if self.textsize(d, text, font=font)[0] > label_width: + while self.textsize(d, text + '...', font=font)[0] > label_width and len(text) > 0: + text = text[:-1] + text = text + '...' + + # Compute text width and height for multi-line text + text_lines = text.split('\n') + text_widths, text_heights = zip(*[self.textsize(d, line, font=font) for line in text_lines]) + max_text_width = max(text_widths) + total_text_height = sum(text_heights) + + # Compute position for each line of text + lines_positions = [] + current_y = 0 + for line, line_width, line_height in zip(text_lines, text_widths, text_heights): + text_x = (label_width - line_width) // 2 + text_y = current_y + (label_height - total_text_height) // 2 + current_y += line_height + lines_positions.append((line, (text_x, text_y))) + + # Draw each line of text + for line, (text_x, text_y) in lines_positions: + d.text((text_x, text_y), line, fill='black', font=font) + + return label_bg + + def sample_plot_image(self, plot_image_vars, samples, preview_latent, latents_plot, image_list, disable_noise, + start_step, last_step, force_full_denoise, x_value=None, y_value=None): + model, clip, vae, positive, negative, seed, steps, cfg = None, None, None, None, None, None, None, None + sampler_name, scheduler, denoise = None, None, None + + a1111_prompt_style = plot_image_vars['a1111_prompt_style'] if "a1111_prompt_style" in plot_image_vars else False + clip = clip if clip is not None else plot_image_vars["clip"] + steps = plot_image_vars['steps'] if "steps" in plot_image_vars else 1 + + # 高级用法 + if plot_image_vars["x_node_type"] == "advanced" or plot_image_vars["y_node_type"] == "advanced": + if self.x_type == "Seeds++ Batch" or self.y_type == "Seeds++ Batch": + seed = int(x_value) if self.x_type == "Seeds++ Batch" else int(y_value) + if self.x_type == "Steps" or self.y_type == "Steps": + steps = int(x_value) if self.x_type == "Steps" else int(y_value) + if self.x_type == "StartStep" or self.y_type == "StartStep": + start_step = int(x_value) if self.x_type == "StartStep" else int(y_value) + if self.x_type == "EndStep" or self.y_type == "EndStep": + last_step = int(x_value) if self.x_type == "EndStep" else int(y_value) + if self.x_type == "CFG Scale" or self.y_type == "CFG Scale": + cfg = float(x_value) if self.x_type == "CFG Scale" else float(y_value) + if self.x_type == "Sampler" or self.y_type == "Sampler": + sampler_name = x_value if self.x_type == "Sampler" else y_value + if self.x_type == "Scheduler" or self.y_type == "Scheduler": + scheduler = x_value if self.x_type == "Scheduler" else y_value + if self.x_type == "Sampler&Scheduler" or self.y_type == "Sampler&Scheduler": + arr = x_value.split(',') if self.x_type == "Sampler&Scheduler" else y_value.split(',') + if arr[0] and arr[0]!= 'None': + sampler_name = arr[0] + if arr[1] and arr[1]!= 'None': + scheduler = arr[1] + if self.x_type == "Denoise" or self.y_type == "Denoise": + denoise = float(x_value) if self.x_type == "Denoise" else float(y_value) + if self.x_type == "Pos Condition" or self.y_type == "Pos Condition": + positive = plot_image_vars['positive_cond_stack'][int(x_value)] if self.x_type == "Pos Condition" else plot_image_vars['positive_cond_stack'][int(y_value)] + if self.x_type == "Neg Condition" or self.y_type == "Neg Condition": + negative = plot_image_vars['negative_cond_stack'][int(x_value)] if self.x_type == "Neg Condition" else plot_image_vars['negative_cond_stack'][int(y_value)] + # 模型叠加 + if self.x_type == "ModelMergeBlocks" or self.y_type == "ModelMergeBlocks": + ckpt_name_1, ckpt_name_2 = plot_image_vars['models'] + model1, clip1, vae1, clip_vision = self.easyCache.load_checkpoint(ckpt_name_1) + model2, clip2, vae2, clip_vision = self.easyCache.load_checkpoint(ckpt_name_2) + xy_values = x_value if self.x_type == "ModelMergeBlocks" else y_value + if ":" in xy_values: + xy_line = xy_values.split(':') + xy_values = xy_line[1] + + xy_arrs = xy_values.split(',') + # ModelMergeBlocks + if len(xy_arrs) == 3: + input, middle, out = xy_arrs + kwargs = { + "input": input, + "middle": middle, + "out": out + } + elif len(xy_arrs) == 30: + kwargs = {} + kwargs["time_embed."] = xy_arrs[0] + kwargs["label_emb."] = xy_arrs[1] + + for i in range(12): + kwargs["input_blocks.{}.".format(i)] = xy_arrs[2+i] + + for i in range(3): + kwargs["middle_block.{}.".format(i)] = xy_arrs[14+i] + + for i in range(12): + kwargs["output_blocks.{}.".format(i)] = xy_arrs[17+i] + + kwargs["out."] = xy_arrs[29] + else: + raise Exception("ModelMergeBlocks weight length error") + default_ratio = next(iter(kwargs.values())) + + m = model1.clone() + kp = model2.get_key_patches("diffusion_model.") + + for k in kp: + ratio = float(default_ratio) + k_unet = k[len("diffusion_model."):] + + last_arg_size = 0 + for arg in kwargs: + if k_unet.startswith(arg) and last_arg_size < len(arg): + ratio = float(kwargs[arg]) + last_arg_size = len(arg) + + m.add_patches({k: kp[k]}, 1.0 - ratio, ratio) + + vae_use = plot_image_vars['vae_use'] + + clip = clip2 if vae_use == 'Use Model 2' else clip1 + if vae_use == 'Use Model 2': + vae = vae2 + elif vae_use == 'Use Model 1': + vae = vae1 + else: + vae = self.easyCache.load_vae(vae_use) + model = m + + # 如果存在lora_stack叠加lora + optional_lora_stack = plot_image_vars['lora_stack'] + if optional_lora_stack is not None and optional_lora_stack != []: + for lora in optional_lora_stack: + model, clip = self.easyCache.load_lora(lora) + + # 处理clip + clip = clip.clone() + if plot_image_vars['clip_skip'] != 0: + clip.clip_layer(plot_image_vars['clip_skip']) + + # CheckPoint + if self.x_type == "Checkpoint" or self.y_type == "Checkpoint": + xy_values = x_value if self.x_type == "Checkpoint" else y_value + ckpt_name, clip_skip, vae_name = xy_values.split(",") + ckpt_name = ckpt_name.replace('*', ',') + vae_name = vae_name.replace('*', ',') + model, clip, vae, clip_vision = self.easyCache.load_checkpoint(ckpt_name) + if vae_name != 'None': + vae = self.easyCache.load_vae(vae_name) + + # 如果存在lora_stack叠加lora + optional_lora_stack = plot_image_vars['lora_stack'] + if optional_lora_stack is not None and optional_lora_stack != []: + for lora in optional_lora_stack: + lora['model'] = model + lora['clip'] = clip + model, clip = self.easyCache.load_lora(lora) + + # 处理clip + clip = clip.clone() + if clip_skip != 'None': + clip.clip_layer(int(clip_skip)) + positive = plot_image_vars['positive'] + negative = plot_image_vars['negative'] + a1111_prompt_style = plot_image_vars['a1111_prompt_style'] + steps = plot_image_vars['steps'] + clip = clip if clip is not None else plot_image_vars["clip"] + positive = advanced_encode(clip, positive, + plot_image_vars['positive_token_normalization'], + plot_image_vars['positive_weight_interpretation'], + w_max=1.0, + apply_to_pooled="enable", + a1111_prompt_style=a1111_prompt_style, steps=steps) + + negative = advanced_encode(clip, negative, + plot_image_vars['negative_token_normalization'], + plot_image_vars['negative_weight_interpretation'], + w_max=1.0, + apply_to_pooled="enable", + a1111_prompt_style=a1111_prompt_style, steps=steps) + if "positive_cond" in plot_image_vars: + positive = positive + plot_image_vars["positive_cond"] + if "negative_cond" in plot_image_vars: + negative = negative + plot_image_vars["negative_cond"] + + # Lora + if self.x_type == "Lora" or self.y_type == "Lora": + model = model if model is not None else plot_image_vars["model"] + clip = clip if clip is not None else plot_image_vars["clip"] + + xy_values = x_value if self.x_type == "Lora" else y_value + lora_name, lora_model_strength, lora_clip_strength = xy_values.split(",") + lora_stack = [{"lora_name": lora_name, "model": model, "clip" :clip, "model_strength": float(lora_model_strength), "clip_strength": float(lora_clip_strength)}] + if 'lora_stack' in plot_image_vars: + lora_stack = lora_stack + plot_image_vars['lora_stack'] + + if lora_stack is not None and lora_stack != []: + for lora in lora_stack: + model, clip = self.easyCache.load_lora(lora) + + # 提示词 + if "Positive" in self.x_type or "Positive" in self.y_type: + if self.x_type == 'Positive Prompt S/R' or self.y_type == 'Positive Prompt S/R': + positive = x_value if self.x_type == "Positive Prompt S/R" else y_value + + positive = advanced_encode(clip, positive, + plot_image_vars['positive_token_normalization'], + plot_image_vars['positive_weight_interpretation'], + w_max=1.0, + apply_to_pooled="enable", a1111_prompt_style=a1111_prompt_style, steps=steps) + + # if "positive_cond" in plot_image_vars: + # positive = positive + plot_image_vars["positive_cond"] + + if "Negative" in self.x_type or "Negative" in self.y_type: + if self.x_type == 'Negative Prompt S/R' or self.y_type == 'Negative Prompt S/R': + negative = x_value if self.x_type == "Negative Prompt S/R" else y_value + + negative = advanced_encode(clip, negative, + plot_image_vars['negative_token_normalization'], + plot_image_vars['negative_weight_interpretation'], + w_max=1.0, + apply_to_pooled="enable", a1111_prompt_style=a1111_prompt_style, steps=steps) + # if "negative_cond" in plot_image_vars: + # negative = negative + plot_image_vars["negative_cond"] + + # ControlNet + if "ControlNet" in self.x_type or "ControlNet" in self.y_type: + cnet = plot_image_vars["cnet"] if "cnet" in plot_image_vars else None + positive = plot_image_vars["positive_cond"] if "positive" in plot_image_vars else None + negative = plot_image_vars["negative_cond"] if "negative" in plot_image_vars else None + if cnet: + index = x_value if "ControlNet" in self.x_type else y_value + controlnet = cnet[index] + for index, item in enumerate(controlnet): + control_net_name = item[0] + image = item[1] + strength = item[2] + start_percent = item[3] + end_percent = item[4] + positive, negative = easyControlnet().apply(control_net_name, image, positive, negative, strength, start_percent, end_percent, None, 1) + + # 简单用法 + if plot_image_vars["x_node_type"] == "loader" or plot_image_vars["y_node_type"] == "loader": + model, clip, vae, clip_vision = self.easyCache.load_checkpoint(plot_image_vars['ckpt_name']) + + if plot_image_vars['lora_name'] != "None": + lora = {"lora_name": plot_image_vars['lora_name'], "model": model, "clip": clip, "model_strength": plot_image_vars['lora_model_strength'], "clip_strength": plot_image_vars['lora_clip_strength']} + model, clip = self.easyCache.load_lora(lora) + + # Check for custom VAE + if plot_image_vars['vae_name'] not in ["Baked-VAE", "Baked VAE"]: + vae = self.easyCache.load_vae(plot_image_vars['vae_name']) + + # CLIP skip + if not clip: + raise Exception("No CLIP found") + clip = clip.clone() + clip.clip_layer(plot_image_vars['clip_skip']) + + positive = advanced_encode(clip, plot_image_vars['positive'], + plot_image_vars['positive_token_normalization'], + plot_image_vars['positive_weight_interpretation'], w_max=1.0, + apply_to_pooled="enable",a1111_prompt_style=a1111_prompt_style, steps=steps) + + negative = advanced_encode(clip, plot_image_vars['negative'], + plot_image_vars['negative_token_normalization'], + plot_image_vars['negative_weight_interpretation'], w_max=1.0, + apply_to_pooled="enable", a1111_prompt_style=a1111_prompt_style, steps=steps) + + model = model if model is not None else plot_image_vars["model"] + vae = vae if vae is not None else plot_image_vars["vae"] + positive = positive if positive is not None else plot_image_vars["positive_cond"] + negative = negative if negative is not None else plot_image_vars["negative_cond"] + + seed = seed if seed is not None else plot_image_vars["seed"] + steps = steps if steps is not None else plot_image_vars["steps"] + cfg = cfg if cfg is not None else plot_image_vars["cfg"] + sampler_name = sampler_name if sampler_name is not None else plot_image_vars["sampler_name"] + scheduler = scheduler if scheduler is not None else plot_image_vars["scheduler"] + denoise = denoise if denoise is not None else plot_image_vars["denoise"] + + # LayerDiffuse + layer_diffusion_method = plot_image_vars["layer_diffusion_method"] if "layer_diffusion_method" in plot_image_vars else None + empty_samples = plot_image_vars["empty_samples"] if "empty_samples" in plot_image_vars else None + + if layer_diffusion_method: + samp_blend_samples = plot_image_vars["blend_samples"] if "blend_samples" in plot_image_vars else None + additional_cond = plot_image_vars["layer_diffusion_cond"] if "layer_diffusion_cond" in plot_image_vars else None + + images = plot_image_vars["images"].movedim(-1, 1) if "images" in plot_image_vars else None + weight = plot_image_vars['layer_diffusion_weight'] if 'layer_diffusion_weight' in plot_image_vars else 1.0 + model, positive, negative = LayerDiffuse().apply_layer_diffusion(model, layer_diffusion_method, weight, samples, + samp_blend_samples, positive, + negative, images, additional_cond) + + samples = empty_samples if layer_diffusion_method is not None and empty_samples is not None else samples + # Sample + samples = self.sampler.common_ksampler(model, seed, steps, cfg, sampler_name, scheduler, positive, negative, samples, + denoise=denoise, disable_noise=disable_noise, preview_latent=preview_latent, + start_step=start_step, last_step=last_step, + force_full_denoise=force_full_denoise) + + # Decode images and store + latent = samples["samples"] + + # Add the latent tensor to the tensors list + latents_plot.append(latent) + + # Decode the image + image = vae.decode(latent).cpu() + + if self.output_individuals in [True, "True"]: + easySave(image, self.save_prefix, self.image_output) + + # Convert the image from tensor to PIL Image and add it to the list + pil_image = self.sampler.tensor2pil(image) + image_list.append(pil_image) + + # Update max dimensions + self.max_width = max(self.max_width, pil_image.width) + self.max_height = max(self.max_height, pil_image.height) + + # Return the touched variables + return image_list, self.max_width, self.max_height, latents_plot + + # Process Functions + def validate_xy_plot(self): + if self.x_type == 'None' and self.y_type == 'None': + log_node_warn(f'#{self.my_unique_id}','No Valid Plot Types - Reverting to default sampling...') + return False + else: + return True + + def get_latent(self, samples): + # Extract the 'samples' tensor from the dictionary + latent_image_tensor = samples["samples"] + + # Split the tensor into individual image tensors + image_tensors = torch.split(latent_image_tensor, 1, dim=0) + + # Create a list of dictionaries containing the individual image tensors + latent_list = [{'samples': image} for image in image_tensors] + + # Set latent only to the first latent of batch + if self.latent_id >= len(latent_list): + log_node_warn(f'#{self.my_unique_id}',f'The selected latent_id ({self.latent_id}) is out of range.') + log_node_warn(f'#{self.my_unique_id}', f'Automatically setting the latent_id to the last image in the list (index: {len(latent_list) - 1}).') + + self.latent_id = len(latent_list) - 1 + + return latent_list[self.latent_id] + + def get_labels_and_sample(self, plot_image_vars, latent_image, preview_latent, start_step, last_step, + force_full_denoise, disable_noise): + for x_index, x_value in enumerate(self.x_values): + plot_image_vars, x_value_label = self.define_variable(plot_image_vars, self.x_type, x_value, + x_index) + self.x_label = self.update_label(self.x_label, x_value_label, len(self.x_values)) + if self.y_type != 'None': + for y_index, y_value in enumerate(self.y_values): + plot_image_vars, y_value_label = self.define_variable(plot_image_vars, self.y_type, y_value, + y_index) + self.y_label = self.update_label(self.y_label, y_value_label, len(self.y_values)) + # ttNl(f'{CC.GREY}X: {x_value_label}, Y: {y_value_label}').t( + # f'Plot Values {self.num}/{self.total} ->').p() + + self.image_list, self.max_width, self.max_height, self.latents_plot = self.sample_plot_image( + plot_image_vars, latent_image, preview_latent, self.latents_plot, self.image_list, + disable_noise, start_step, last_step, force_full_denoise, x_value, y_value) + self.num += 1 + else: + # ttNl(f'{CC.GREY}X: {x_value_label}').t(f'Plot Values {self.num}/{self.total} ->').p() + self.image_list, self.max_width, self.max_height, self.latents_plot = self.sample_plot_image( + plot_image_vars, latent_image, preview_latent, self.latents_plot, self.image_list, disable_noise, + start_step, last_step, force_full_denoise, x_value) + self.num += 1 + + # Rearrange latent array to match preview image grid + self.latents_plot = self.rearrange_tensors(self.latents_plot, self.num_cols, self.num_rows) + + # Concatenate the tensors along the first dimension (dim=0) + self.latents_plot = torch.cat(self.latents_plot, dim=0) + + return self.latents_plot + + def plot_images_and_labels(self): + # Calculate the background dimensions + bg_width, bg_height, x_offset_initial, y_offset = self.calculate_background_dimensions() + + # Create the white background image + background = Image.new('RGBA', (int(bg_width), int(bg_height)), color=(255, 255, 255, 255)) + + output_image = [] + for row_index in range(self.num_rows): + x_offset = x_offset_initial + + for col_index in range(self.num_cols): + index = col_index * self.num_rows + row_index + img = self.image_list[index] + output_image.append(self.sampler.pil2tensor(img)) + background.paste(img, (x_offset, y_offset)) + + # Handle X label + if row_index == 0 and self.x_type != "None": + label_bg = self.create_label(img, self.x_label[col_index], int(48 * img.width / 512)) + label_y = (y_offset - label_bg.height) // 2 + background.alpha_composite(label_bg, (x_offset, label_y)) + + # Handle Y label + if col_index == 0 and self.y_type != "None": + label_bg = self.create_label(img, self.y_label[row_index], int(48 * img.height / 512), False) + label_bg = label_bg.rotate(90, expand=True) + + label_x = (x_offset - label_bg.width) // 2 + label_y = y_offset + (img.height - label_bg.height) // 2 + background.alpha_composite(label_bg, (label_x, label_y)) + + x_offset += img.width + self.grid_spacing + + y_offset += img.height + self.grid_spacing + + return (self.sampler.pil2tensor(background), output_image) \ No newline at end of file diff --git a/ComfyUI-Easy-Use/py/logic.py b/ComfyUI-Easy-Use/py/logic.py new file mode 100644 index 0000000000000000000000000000000000000000..133e7463aa50de71c50d231e300458ff5d398c08 --- /dev/null +++ b/ComfyUI-Easy-Use/py/logic.py @@ -0,0 +1,1282 @@ +from typing import Iterator, List, Tuple, Dict, Any, Union, Optional +from _decimal import Context, getcontext +from decimal import Decimal +from .libs.utils import AlwaysEqualProxy, ByPassTypeTuple, cleanGPUUsedForce, compare_revision +from .libs.cache import remove_cache +import numpy as np +import re +import json +import torch +import comfy.utils + +DEFAULT_FLOW_NUM = 2 +MAX_FLOW_NUM = 10 +lazy_options = {"lazy": True} if compare_revision(2543) else {} + +def validate_list_args(args: Dict[str, List[Any]]) -> Tuple[bool, Optional[str], Optional[str]]: + """ + Checks that if there are multiple arguments, they are all the same length or 1 + :param args: + :return: Tuple (Status, mismatched_key_1, mismatched_key_2) + """ + # Only have 1 arg + if len(args) == 1: + return True, None, None + + len_to_match = None + matched_arg_name = None + for arg_name, arg in args.items(): + if arg_name == 'self': + # self is in locals() + continue + + if len(arg) != 1: + if len_to_match is None: + len_to_match = len(arg) + matched_arg_name = arg_name + elif len(arg) != len_to_match: + return False, arg_name, matched_arg_name + + return True, None, None +def error_if_mismatched_list_args(args: Dict[str, List[Any]]) -> None: + is_valid, failed_key1, failed_key2 = validate_list_args(args) + if not is_valid: + assert failed_key1 is not None + assert failed_key2 is not None + raise ValueError( + f"Mismatched list inputs received. {failed_key1}({len(args[failed_key1])}) !== {failed_key2}({len(args[failed_key2])})" + ) + +def zip_with_fill(*lists: Union[List[Any], None]) -> Iterator[Tuple[Any, ...]]: + """ + Zips lists together, but if a list has 1 element, it will be repeated for each element in the other lists. + If a list is None, None will be used for that element. + (Not intended for use with lists of different lengths) + :param lists: + :return: Iterator of tuples of length len(lists) + """ + max_len = max(len(lst) if lst is not None else 0 for lst in lists) + for i in range(max_len): + yield tuple(None if lst is None else (lst[0] if len(lst) == 1 else lst[i]) for lst in lists) + +# ---------------------------------------------------------------类型 开始----------------------------------------------------------------------# + +# 字符串 +class String: + @classmethod + def INPUT_TYPES(s): + return { + "required": {"value": ("STRING", {"default": ""})}, + } + + RETURN_TYPES = ("STRING",) + RETURN_NAMES = ("string",) + FUNCTION = "execute" + CATEGORY = "EasyUse/Logic/Type" + + def execute(self, value): + return (value,) + +# 整数 +class Int: + @classmethod + def INPUT_TYPES(s): + return { + "required": {"value": ("INT", {"default": 0, "min": -999999, "max": 999999,})}, + } + + RETURN_TYPES = ("INT",) + RETURN_NAMES = ("int",) + FUNCTION = "execute" + CATEGORY = "EasyUse/Logic/Type" + + def execute(self, value): + return (value,) + +# 整数范围 +class RangeInt: + def __init__(self) -> None: + pass + + @classmethod + def INPUT_TYPES(s) -> Dict[str, Dict[str, Any]]: + return { + "required": { + "range_mode": (["step", "num_steps"], {"default": "step"}), + "start": ("INT", {"default": 0, "min": -4096, "max": 4096, "step": 1}), + "stop": ("INT", {"default": 0, "min": -4096, "max": 4096, "step": 1}), + "step": ("INT", {"default": 0, "min": -4096, "max": 4096, "step": 1}), + "num_steps": ("INT", {"default": 0, "min": -4096, "max": 4096, "step": 1}), + "end_mode": (["Inclusive", "Exclusive"], {"default": "Inclusive"}), + }, + } + + RETURN_TYPES = ("INT", "INT") + RETURN_NAMES = ("range", "range_sizes") + INPUT_IS_LIST = True + OUTPUT_IS_LIST = (True, True) + FUNCTION = "build_range" + + CATEGORY = "EasyUse/Logic/Type" + + def build_range( + self, range_mode, start, stop, step, num_steps, end_mode + ) -> Tuple[List[int], List[int]]: + error_if_mismatched_list_args(locals()) + + ranges = [] + range_sizes = [] + for range_mode, e_start, e_stop, e_num_steps, e_step, e_end_mode in zip_with_fill( + range_mode, start, stop, num_steps, step, end_mode + ): + if range_mode == 'step': + if e_end_mode == "Inclusive": + e_stop += 1 + vals = list(range(e_start, e_stop, e_step)) + ranges.extend(vals) + range_sizes.append(len(vals)) + elif range_mode == 'num_steps': + direction = 1 if e_stop > e_start else -1 + if e_end_mode == "Exclusive": + e_stop -= direction + vals = (np.rint(np.linspace(e_start, e_stop, e_num_steps)).astype(int).tolist()) + ranges.extend(vals) + range_sizes.append(len(vals)) + return ranges, range_sizes + + + +# 浮点数 +class Float: + @classmethod + def INPUT_TYPES(s): + return { + "required": {"value": ("FLOAT", {"default": 0, "step": 0.01, "min": -999999, "max": 999999,})}, + } + + RETURN_TYPES = ("FLOAT",) + RETURN_NAMES = ("float",) + FUNCTION = "execute" + CATEGORY = "EasyUse/Logic/Type" + + def execute(self, value): + return (value,) + + +# 浮点数范围 +class RangeFloat: + def __init__(self) -> None: + pass + + @classmethod + def INPUT_TYPES(s) -> Dict[str, Dict[str, Any]]: + return { + "required": { + "range_mode": (["step", "num_steps"], {"default": "step"}), + "start": ("FLOAT", {"default": 0, "min": -4096, "max": 4096, "step": 0.1}), + "stop": ("FLOAT", {"default": 0, "min": -4096, "max": 4096, "step": 0.1}), + "step": ("FLOAT", {"default": 0, "min": -4096, "max": 4096, "step": 0.1}), + "num_steps": ("INT", {"default": 0, "min": -4096, "max": 4096, "step": 1}), + "end_mode": (["Inclusive", "Exclusive"], {"default": "Inclusive"}), + }, + } + + RETURN_TYPES = ("FLOAT", "INT") + RETURN_NAMES = ("range", "range_sizes") + INPUT_IS_LIST = True + OUTPUT_IS_LIST = (True, True) + FUNCTION = "build_range" + + CATEGORY = "EasyUse/Logic/Type" + + @staticmethod + def _decimal_range( + range_mode: String, start: Decimal, stop: Decimal, step: Decimal, num_steps: Int, inclusive: bool + ) -> Iterator[float]: + if range_mode == 'step': + ret_val = start + if inclusive: + stop = stop + step + direction = 1 if step > 0 else -1 + while (ret_val - stop) * direction < 0: + yield float(ret_val) + ret_val += step + elif range_mode == 'num_steps': + step = (stop - start) / (num_steps - 1) + direction = 1 if step > 0 else -1 + + ret_val = start + for _ in range(num_steps): + if (ret_val - stop) * direction > 0: # Ensure we don't exceed the 'stop' value + break + yield float(ret_val) + ret_val += step + + def build_range( + self, + range_mode, + start, + stop, + step, + num_steps, + end_mode, + ) -> Tuple[List[float], List[int]]: + error_if_mismatched_list_args(locals()) + getcontext().prec = 12 + + start = [Decimal(s) for s in start] + stop = [Decimal(s) for s in stop] + step = [Decimal(s) for s in step] + + ranges = [] + range_sizes = [] + for range_mode, e_start, e_stop, e_step, e_num_steps, e_end_mode in zip_with_fill( + range_mode, start, stop, step, num_steps, end_mode + ): + vals = list( + self._decimal_range(range_mode, e_start, e_stop, e_step, e_num_steps, e_end_mode == 'Inclusive') + ) + ranges.extend(vals) + range_sizes.append(len(vals)) + + return ranges, range_sizes + + +# 布尔 +class Boolean: + @classmethod + def INPUT_TYPES(s): + return { + "required": {"value": ("BOOLEAN", {"default": False})}, + } + + RETURN_TYPES = ("BOOLEAN",) + RETURN_NAMES = ("boolean",) + FUNCTION = "execute" + CATEGORY = "EasyUse/Logic/Type" + + def execute(self, value): + return (value,) + +# ---------------------------------------------------------------开关 开始----------------------------------------------------------------------# +class imageSwitch: + def __init__(self): + pass + + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "image_a": ("IMAGE",), + "image_b": ("IMAGE",), + "boolean": ("BOOLEAN", {"default": False}), + } + } + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "image_switch" + + CATEGORY = "EasyUse/Logic/Switch" + + def image_switch(self, image_a, image_b, boolean): + + if boolean: + return (image_a, ) + else: + return (image_b, ) + +class textSwitch: + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "input": ("INT", {"default": 1, "min": 1, "max": 2}), + }, + "optional": { + "text1": ("STRING", {"forceInput": True}), + "text2": ("STRING", {"forceInput": True}), + } + } + + RETURN_TYPES = ("STRING",) + RETURN_NAMES = ("STRING",) + CATEGORY = "EasyUse/Logic/Switch" + FUNCTION = "switch" + + def switch(self, input, text1=None, text2=None,): + if input == 1: + return (text1,) + else: + return (text2,) + +# ---------------------------------------------------------------Index Switch----------------------------------------------------------------------# + +class anythingIndexSwitch: + def __init__(self): + pass + + @classmethod + def INPUT_TYPES(cls): + inputs = { + "required": { + "index": ("INT", {"default": 0, "min": 0, "max": 9, "step": 1}), + }, + "optional": { + } + } + for i in range(DEFAULT_FLOW_NUM): + inputs["optional"]["value%d" % i] = (AlwaysEqualProxy("*"),lazy_options) + return inputs + + RETURN_TYPES = (AlwaysEqualProxy("*"),) + RETURN_NAMES = ("value",) + FUNCTION = "index_switch" + + CATEGORY = "EasyUse/Logic/Index Switch" + + def check_lazy_status(self, index, **kwargs): + key = "value%d" % index + if kwargs.get(key, None) is None: + return [key] + + def index_switch(self, index, **kwargs): + key = "value%d" % index + return (kwargs[key],) + +class imageIndexSwitch: + def __init__(self): + pass + + @classmethod + def INPUT_TYPES(cls): + inputs = { + "required": { + "index": ("INT", {"default": 0, "min": 0, "max": 9, "step": 1}), + }, + "optional": { + } + } + for i in range(DEFAULT_FLOW_NUM): + inputs["optional"]["image%d" % i] = ("IMAGE",lazy_options) + return inputs + + RETURN_TYPES = ("IMAGE",) + RETURN_NAMES = ("image",) + FUNCTION = "index_switch" + + CATEGORY = "EasyUse/Logic/Index Switch" + + def check_lazy_status(self, index, **kwargs): + key = "image%d" % index + if kwargs.get(key, None) is None: + return [key] + + def index_switch(self, index, **kwargs): + key = "image%d" % index + return (kwargs[key],) + +class textIndexSwitch: + def __init__(self): + pass + + @classmethod + def INPUT_TYPES(cls): + inputs = { + "required": { + "index": ("INT", {"default": 0, "min": 0, "max": 9, "step": 1}), + }, + "optional": { + } + } + for i in range(DEFAULT_FLOW_NUM): + inputs["optional"]["text%d" % i] = ("STRING",{**lazy_options,"forceInput":True}) + return inputs + + RETURN_TYPES = ("STRING",) + RETURN_NAMES = ("text",) + FUNCTION = "index_switch" + + CATEGORY = "EasyUse/Logic/Index Switch" + + def check_lazy_status(self, index, **kwargs): + key = "text%d" % index + if kwargs.get(key, None) is None: + return [key] + + def index_switch(self, index, **kwargs): + key = "text%d" % index + return (kwargs[key],) + +class conditioningIndexSwitch: + def __init__(self): + pass + + @classmethod + def INPUT_TYPES(cls): + inputs = { + "required": { + "index": ("INT", {"default": 0, "min": 0, "max": 9, "step": 1}), + }, + "optional": { + } + } + for i in range(DEFAULT_FLOW_NUM): + inputs["optional"]["cond%d" % i] = ("CONDITIONING",lazy_options) + return inputs + + RETURN_TYPES = ("CONDITIONING",) + RETURN_NAMES = ("conditioning",) + FUNCTION = "index_switch" + + CATEGORY = "EasyUse/Logic/Index Switch" + + def check_lazy_status(self, index, **kwargs): + key = "cond%d" % index + if kwargs.get(key, None) is None: + return [key] + + def index_switch(self, index, **kwargs): + key = "cond%d" % index + return (kwargs[key],) + +# ---------------------------------------------------------------Math----------------------------------------------------------------------# +class mathIntOperation: + def __init__(self): + pass + + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "a": ("INT", {"default": 0, "min": -0xffffffffffffffff, "max": 0xffffffffffffffff, "step": 1}), + "b": ("INT", {"default": 0, "min": -0xffffffffffffffff, "max": 0xffffffffffffffff, "step": 1}), + "operation": (["add", "subtract", "multiply", "divide", "modulo", "power"],), + }, + } + + RETURN_TYPES = ("INT",) + FUNCTION = "int_math_operation" + + CATEGORY = "EasyUse/Logic/Math" + + def int_math_operation(self, a, b, operation): + if operation == "add": + return (a + b,) + elif operation == "subtract": + return (a - b,) + elif operation == "multiply": + return (a * b,) + elif operation == "divide": + return (a // b,) + elif operation == "modulo": + return (a % b,) + elif operation == "power": + return (a ** b,) + +class mathFloatOperation: + def __init__(self): + pass + + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "a": ("FLOAT", {"default": 0, "min": -999999999999.0, "max": 999999999999.0, "step": 1}), + "b": ("FLOAT", {"default": 0, "min": -999999999999.0, "max": 999999999999.0, "step": 1}), + "operation": (["==", "!=", "<", ">", "<=", ">="],), + }, + } + + RETURN_TYPES = ("BOOLEAN",) + FUNCTION = "float_math_operation" + + CATEGORY = "EasyUse/Logic/Math" + + def float_math_operation(self, a, b, operation): + if operation == "==": + return (a == b,) + elif operation == "!=": + return (a != b,) + elif operation == "<": + return (a < b,) + elif operation == ">": + return (a > b,) + elif operation == "<=": + return (a <= b,) + elif operation == ">=": + return (a >= b,) + +class mathStringOperation: + def __init__(self): + pass + + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "a": ("STRING", {"multiline": False}), + "b": ("STRING", {"multiline": False}), + "operation": (["a == b", "a != b", "a IN b", "a MATCH REGEX(b)", "a BEGINSWITH b", "a ENDSWITH b"],), + "case_sensitive": ("BOOLEAN", {"default": True}), + }, + } + + RETURN_TYPES = ("BOOLEAN",) + FUNCTION = "string_math_operation" + + CATEGORY = "EasyUse/Logic/Math" + + def string_math_operation(self, a, b, operation, case_sensitive): + if not case_sensitive: + a = a.lower() + b = b.lower() + + if operation == "a == b": + return (a == b,) + elif operation == "a != b": + return (a != b,) + elif operation == "a IN b": + return (a in b,) + elif operation == "a MATCH REGEX(b)": + try: + return (re.match(b, a) is not None,) + except: + return (False,) + elif operation == "a BEGINSWITH b": + return (a.startswith(b),) + elif operation == "a ENDSWITH b": + return (a.endswith(b),) + +# ---------------------------------------------------------------Flow----------------------------------------------------------------------# +try: + from comfy_execution.graph_utils import GraphBuilder, is_link +except: + GraphBuilder = None + +class whileLoopStart: + def __init__(self): + pass + + @classmethod + def INPUT_TYPES(cls): + inputs = { + "required": { + "condition": ("BOOLEAN", {"default": True}), + }, + "optional": { + }, + } + for i in range(MAX_FLOW_NUM): + inputs["optional"]["initial_value%d" % i] = ("*",) + return inputs + + RETURN_TYPES = ByPassTypeTuple(tuple(["FLOW_CONTROL"] + ["*"] * MAX_FLOW_NUM)) + RETURN_NAMES = ByPassTypeTuple(tuple(["flow"] + ["value%d" % i for i in range(MAX_FLOW_NUM)])) + FUNCTION = "while_loop_open" + + CATEGORY = "EasyUse/Logic/While Loop" + + def while_loop_open(self, condition, **kwargs): + values = [] + for i in range(MAX_FLOW_NUM): + values.append(kwargs.get("initial_value%d" % i, None)) + return tuple(["stub"] + values) + +class whileLoopEnd: + def __init__(self): + pass + + @classmethod + def INPUT_TYPES(cls): + inputs = { + "required": { + "flow": ("FLOW_CONTROL", {"rawLink": True}), + "condition": ("BOOLEAN", {}), + }, + "optional": { + }, + "hidden": { + "dynprompt": "DYNPROMPT", + "unique_id": "UNIQUE_ID", + } + } + for i in range(MAX_FLOW_NUM): + inputs["optional"]["initial_value%d" % i] = (AlwaysEqualProxy('*'),) + return inputs + + RETURN_TYPES = ByPassTypeTuple(tuple([AlwaysEqualProxy('*')] * MAX_FLOW_NUM)) + RETURN_NAMES = ByPassTypeTuple(tuple(["value%d" % i for i in range(MAX_FLOW_NUM)])) + FUNCTION = "while_loop_close" + + CATEGORY = "EasyUse/Logic/While Loop" + + def explore_dependencies(self, node_id, dynprompt, upstream): + node_info = dynprompt.get_node(node_id) + if "inputs" not in node_info: + return + for k, v in node_info["inputs"].items(): + if is_link(v): + parent_id = v[0] + if parent_id not in upstream: + upstream[parent_id] = [] + self.explore_dependencies(parent_id, dynprompt, upstream) + upstream[parent_id].append(node_id) + + def collect_contained(self, node_id, upstream, contained): + if node_id not in upstream: + return + for child_id in upstream[node_id]: + if child_id not in contained: + contained[child_id] = True + self.collect_contained(child_id, upstream, contained) + + + def while_loop_close(self, flow, condition, dynprompt=None, unique_id=None, **kwargs): + if not condition: + # We're done with the loop + values = [] + for i in range(MAX_FLOW_NUM): + values.append(kwargs.get("initial_value%d" % i, None)) + return tuple(values) + + # We want to loop + this_node = dynprompt.get_node(unique_id) + upstream = {} + # Get the list of all nodes between the open and close nodes + self.explore_dependencies(unique_id, dynprompt, upstream) + + contained = {} + open_node = flow[0] + self.collect_contained(open_node, upstream, contained) + contained[unique_id] = True + contained[open_node] = True + + graph = GraphBuilder() + for node_id in contained: + original_node = dynprompt.get_node(node_id) + node = graph.node(original_node["class_type"], "Recurse" if node_id == unique_id else node_id) + node.set_override_display_id(node_id) + for node_id in contained: + original_node = dynprompt.get_node(node_id) + node = graph.lookup_node("Recurse" if node_id == unique_id else node_id) + for k, v in original_node["inputs"].items(): + if is_link(v) and v[0] in contained: + parent = graph.lookup_node(v[0]) + node.set_input(k, parent.out(v[1])) + else: + node.set_input(k, v) + + new_open = graph.lookup_node(open_node) + for i in range(MAX_FLOW_NUM): + key = "initial_value%d" % i + new_open.set_input(key, kwargs.get(key, None)) + my_clone = graph.lookup_node("Recurse") + result = map(lambda x: my_clone.out(x), range(MAX_FLOW_NUM)) + return { + "result": tuple(result), + "expand": graph.finalize(), + } + +class forLoopStart: + def __init__(self): + pass + + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "total": ("INT", {"default": 1, "min": 1, "max": 100000, "step": 1}), + }, + "optional": { + "initial_value%d" % i: (AlwaysEqualProxy("*"),) for i in range(1, MAX_FLOW_NUM) + }, + "hidden": { + "initial_value0": (AlwaysEqualProxy("*"),), + "prompt": "PROMPT", + "extra_pnginfo": "EXTRA_PNGINFO", + "unique_id": "UNIQUE_ID" + } + } + + RETURN_TYPES = ByPassTypeTuple(tuple(["FLOW_CONTROL", "INT"] + [AlwaysEqualProxy("*")] * (MAX_FLOW_NUM - 1))) + RETURN_NAMES = ByPassTypeTuple(tuple(["flow", "index"] + ["value%d" % i for i in range(1, MAX_FLOW_NUM)])) + FUNCTION = "for_loop_start" + + CATEGORY = "EasyUse/Logic/For Loop" + + def for_loop_start(self, total, prompt=None, extra_pnginfo=None, unique_id=None, **kwargs): + graph = GraphBuilder() + i = 0 + unique_id = unique_id.split('.')[len(unique_id.split('.'))-1] if "." in unique_id else unique_id + node = next((x for x in extra_pnginfo['workflow']['nodes'] if x['id'] == int(unique_id)), None) + if node: + node['properties']['total'] = total + if "initial_value0" in kwargs: + i = kwargs["initial_value0"] + initial_values = {("initial_value%d" % num): kwargs.get("initial_value%d" % num, None) for num in range(1, MAX_FLOW_NUM)} + while_open = graph.node("easy whileLoopStart", condition=total, initial_value0=i, **initial_values) + outputs = [kwargs.get("initial_value%d" % num, None) for num in range(1, MAX_FLOW_NUM)] + return { + "result": tuple(["stub", i] + outputs), + "expand": graph.finalize(), + } + +class forLoopEnd: + def __init__(self): + pass + + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "flow": ("FLOW_CONTROL", {"rawLink": True}), + }, + "optional": { + "initial_value%d" % i: (AlwaysEqualProxy("*"), {"rawLink": True}) for i in range(1, MAX_FLOW_NUM) + }, + "hidden": {"prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO", "my_unique_id": "UNIQUE_ID"}, + } + + RETURN_TYPES = ByPassTypeTuple(tuple([AlwaysEqualProxy("*")] * (MAX_FLOW_NUM - 1))) + RETURN_NAMES = ByPassTypeTuple(tuple(["value%d" % i for i in range(1, MAX_FLOW_NUM)])) + FUNCTION = "for_loop_end" + + CATEGORY = "EasyUse/Logic/For Loop" + + def for_loop_end(self, flow, prompt=None, extra_pnginfo=None, my_unique_id=None, **kwargs): + graph = GraphBuilder() + while_open = flow[0] + total = None + if extra_pnginfo: + node = next((x for x in extra_pnginfo['workflow']['nodes'] if x['id'] == int(while_open)), None) + if node: + if 'properties' in node and 'total' in node['properties']: + total = node['properties']['total'] + else: + total = node['widgets_values'][0] if "widgets_values" in node else None + if total is None: + raise Exception("Unable to get parameters for the start of the loop") + sub = graph.node("easy mathInt", operation="add", a=[while_open, 1], b=1) + cond = graph.node("easy compare", a=sub.out(0), b=total, comparison='a < b') + input_values = {("initial_value%d" % i): kwargs.get("initial_value%d" % i, None) for i in + range(1, MAX_FLOW_NUM)} + while_close = graph.node("easy whileLoopEnd", + flow=flow, + condition=cond.out(0), + initial_value0=sub.out(0), + **input_values) + return { + "result": tuple([while_close.out(i) for i in range(1, MAX_FLOW_NUM)]), + "expand": graph.finalize(), + } + +COMPARE_FUNCTIONS = { + "a == b": lambda a, b: a == b, + "a != b": lambda a, b: a != b, + "a < b": lambda a, b: a < b, + "a > b": lambda a, b: a > b, + "a <= b": lambda a, b: a <= b, + "a >= b": lambda a, b: a >= b, +} + +# 比较 +class Compare: + @classmethod + def INPUT_TYPES(s): + compare_functions = list(COMPARE_FUNCTIONS.keys()) + return { + "required": { + "a": (AlwaysEqualProxy("*"), {"default": 0}), + "b": (AlwaysEqualProxy("*"), {"default": 0}), + "comparison": (compare_functions, {"default": "a == b"}), + }, + } + + RETURN_TYPES = ("BOOLEAN",) + RETURN_NAMES = ("boolean",) + FUNCTION = "compare" + CATEGORY = "EasyUse/Logic" + + def compare(self, a, b, comparison): + return (COMPARE_FUNCTIONS[comparison](a, b),) + +# 判断 +class IfElse: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "boolean": ("BOOLEAN",), + "on_true": (AlwaysEqualProxy("*"), lazy_options), + "on_false": (AlwaysEqualProxy("*"), lazy_options), + }, + } + + RETURN_TYPES = (AlwaysEqualProxy("*"),) + RETURN_NAMES = ("*",) + FUNCTION = "execute" + CATEGORY = "EasyUse/Logic" + + def check_lazy_status(self, boolean, on_true=None, on_false=None): + if boolean and on_true is None: + return ["on_true"] + if not boolean and on_false is None: + return ["on_false"] + + def execute(self, *args, **kwargs): + return (kwargs['on_true'] if kwargs['boolean'] else kwargs['on_false'],) + +#是否为SDXL +from comfy.sdxl_clip import SDXLClipModel, SDXLRefinerClipModel, SDXLClipG +class isNone: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "any": (AlwaysEqualProxy("*"),) + }, + "optional": { + } + } + + RETURN_TYPES = ("BOOLEAN",) + RETURN_NAMES = ("boolean",) + FUNCTION = "execute" + CATEGORY = "EasyUse/Logic" + + def execute(self, any): + return (True if any is None else False,) + +class isSDXL: + @classmethod + def INPUT_TYPES(s): + return { + "required": {}, + "optional": { + "optional_pipe": ("PIPE_LINE",), + "optional_clip": ("CLIP",), + } + } + + RETURN_TYPES = ("BOOLEAN",) + RETURN_NAMES = ("boolean",) + FUNCTION = "execute" + CATEGORY = "EasyUse/Logic" + + def execute(self, optional_pipe=None, optional_clip=None): + if optional_pipe is None and optional_clip is None: + raise Exception(f"[ERROR] optional_pipe or optional_clip is missing") + clip = optional_clip if optional_clip is not None else optional_pipe['clip'] + if isinstance(clip.cond_stage_model, (SDXLClipModel, SDXLRefinerClipModel, SDXLClipG)): + return (True,) + else: + return (False,) + +#xy矩阵 +class xyAny: + + @classmethod + def INPUT_TYPES(s): + + return { + "required": { + "X": (AlwaysEqualProxy("*"), {}), + "Y": (AlwaysEqualProxy("*"), {}), + "direction": (["horizontal", "vertical"], {"default": "horizontal"}) + } + } + + RETURN_TYPES = (AlwaysEqualProxy("*"), AlwaysEqualProxy("*")) + RETURN_NAMES = ("X", "Y") + INPUT_IS_LIST = True + OUTPUT_IS_LIST = (True, True) + CATEGORY = "EasyUse/Logic" + FUNCTION = "to_xy" + + def to_xy(self, X, Y, direction): + new_x = list() + new_y = list() + if direction[0] == "horizontal": + for y in Y: + for x in X: + new_x.append(x) + new_y.append(y) + else: + for x in X: + for y in Y: + new_x.append(x) + new_y.append(y) + + return (new_x, new_y) + +class batchAnything: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "any_1": (AlwaysEqualProxy("*"),{}), + "any_2": (AlwaysEqualProxy("*"),{}) + } + } + + RETURN_TYPES = (AlwaysEqualProxy("*"),) + RETURN_NAMES = ("batch",) + + FUNCTION = "batch" + CATEGORY = "EasyUse/Logic" + + def batch(self, any_1, any_2): + if isinstance(any_1, torch.Tensor) or isinstance(any_2, torch.Tensor): + if any_1 is None: + return (any_2,) + elif any_2 is None: + return (any_1,) + if any_1.shape[1:] != any_2.shape[1:]: + any_2 = comfy.utils.common_upscale(any_2.movedim(-1, 1), any_1.shape[2], any_1.shape[1], "bilinear", "center").movedim(1, -1) + return (torch.cat((any_1, any_2), 0),) + elif isinstance(any_1, (str, float, int)): + if any_2 is None: + return (any_1,) + elif isinstance(any_2, tuple): + return (any_2 + (any_1,),) + return ((any_1, any_2),) + elif isinstance(any_2, (str, float, int)): + if any_1 is None: + return (any_2,) + elif isinstance(any_1, tuple): + return (any_1 + (any_2,),) + return ((any_2, any_1),) + else: + if any_1 is None: + return (any_2,) + elif any_2 is None: + return (any_1,) + return (any_1 + any_2,) + +# 转换所有类型 +class convertAnything: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "*": (AlwaysEqualProxy("*"),), + "output_type": (["string", "int", "float", "boolean"], {"default": "string"}), + }} + + RETURN_TYPES = ByPassTypeTuple((AlwaysEqualProxy("*"),)) + OUTPUT_NODE = True + FUNCTION = "convert" + CATEGORY = "EasyUse/Logic" + + def convert(self, *args, **kwargs): + anything = kwargs['*'] + output_type = kwargs['output_type'] + params = None + if output_type == 'string': + params = str(anything) + elif output_type == 'int': + params = int(anything) + elif output_type == 'float': + params = float(anything) + elif output_type == 'boolean': + params = bool(anything) + return (params,) + +# 将所有类型的内容都转成字符串输出 +class showAnything: + @classmethod + def INPUT_TYPES(s): + return {"required": {}, "optional": {"anything": (AlwaysEqualProxy("*"), {}), }, + "hidden": {"unique_id": "UNIQUE_ID", "extra_pnginfo": "EXTRA_PNGINFO", + }} + + RETURN_TYPES = () + INPUT_IS_LIST = True + OUTPUT_NODE = True + FUNCTION = "log_input" + CATEGORY = "EasyUse/Logic" + + def log_input(self, unique_id=None, extra_pnginfo=None, **kwargs): + + values = [] + if "anything" in kwargs: + for val in kwargs['anything']: + try: + if type(val) is str: + values.append(val) + else: + val = json.dumps(val) + values.append(str(val)) + except Exception: + values.append(str(val)) + pass + + if not extra_pnginfo: + print("Error: extra_pnginfo is empty") + elif (not isinstance(extra_pnginfo[0], dict) or "workflow" not in extra_pnginfo[0]): + print("Error: extra_pnginfo[0] is not a dict or missing 'workflow' key") + else: + workflow = extra_pnginfo[0]["workflow"] + node = next((x for x in workflow["nodes"] if str(x["id"]) == unique_id[0]), None) + if node: + node["widgets_values"] = [values] + + return {"ui": {"text": values}} + +class showTensorShape: + @classmethod + def INPUT_TYPES(s): + return {"required": {"tensor": (AlwaysEqualProxy("*"),)}, "optional": {}, + "hidden": {"unique_id": "UNIQUE_ID", "extra_pnginfo": "EXTRA_PNGINFO" + }} + + RETURN_TYPES = () + RETURN_NAMES = () + OUTPUT_NODE = True + FUNCTION = "log_input" + CATEGORY = "EasyUse/Logic" + + def log_input(self, tensor, unique_id=None, extra_pnginfo=None): + shapes = [] + + def tensorShape(tensor): + if isinstance(tensor, dict): + for k in tensor: + tensorShape(tensor[k]) + elif isinstance(tensor, list): + for i in range(len(tensor)): + tensorShape(tensor[i]) + elif hasattr(tensor, 'shape'): + shapes.append(list(tensor.shape)) + + tensorShape(tensor) + + return {"ui": {"text": shapes}} + +class outputToList: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "tuple": (AlwaysEqualProxy("*"), {}), + }, "optional": {}, + } + + RETURN_TYPES = (AlwaysEqualProxy("*"),) + RETURN_NAMES = ("list",) + OUTPUT_IS_LIST = (True,) + FUNCTION = "output_to_List" + CATEGORY = "EasyUse/Logic" + + def output_to_List(self, tuple): + return (tuple,) + +# cleanGpuUsed +class cleanGPUUsed: + @classmethod + def INPUT_TYPES(s): + return {"required": {"anything": (AlwaysEqualProxy("*"), {})}, "optional": {}, + "hidden": {"unique_id": "UNIQUE_ID", "extra_pnginfo": "EXTRA_PNGINFO", + }} + + RETURN_TYPES = () + RETURN_NAMES = () + OUTPUT_NODE = True + FUNCTION = "empty_cache" + CATEGORY = "EasyUse/Logic" + + def empty_cache(self, anything, unique_id=None, extra_pnginfo=None): + cleanGPUUsedForce() + remove_cache('*') + return () + +class clearCacheKey: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "anything": (AlwaysEqualProxy("*"), {}), + "cache_key": ("STRING", {"default": "*"}), + }, "optional": {}, + "hidden": {"unique_id": "UNIQUE_ID", "extra_pnginfo": "EXTRA_PNGINFO",} + } + + RETURN_TYPES = () + RETURN_NAMES = () + OUTPUT_NODE = True + FUNCTION = "empty_cache" + CATEGORY = "EasyUse/Logic" + + def empty_cache(self, anything, cache_name, unique_id=None, extra_pnginfo=None): + remove_cache(cache_name) + return () + +class clearCacheAll: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "anything": (AlwaysEqualProxy("*"), {}), + }, "optional": {}, + "hidden": {"unique_id": "UNIQUE_ID", "extra_pnginfo": "EXTRA_PNGINFO",} + } + + RETURN_TYPES = () + RETURN_NAMES = () + OUTPUT_NODE = True + FUNCTION = "empty_cache" + CATEGORY = "EasyUse/Logic" + + def empty_cache(self, anything, unique_id=None, extra_pnginfo=None): + remove_cache('*') + return () + +# Deprecated +class If: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "any": (AlwaysEqualProxy("*"),), + "if": (AlwaysEqualProxy("*"),), + "else": (AlwaysEqualProxy("*"),), + }, + } + + RETURN_TYPES = (AlwaysEqualProxy("*"),) + RETURN_NAMES = ("?",) + FUNCTION = "execute" + CATEGORY = "EasyUse/🚫 Deprecated" + DEPRECATED = True + + def execute(self, *args, **kwargs): + return (kwargs['if'] if kwargs['any'] else kwargs['else'],) + +class poseEditor: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "image": ("STRING", {"default":""}) + }} + + FUNCTION = "output_pose" + CATEGORY = "EasyUse/🚫 Deprecated" + DEPRECATED = True + RETURN_TYPES = () + RETURN_NAMES = () + def output_pose(self, image): + return () + +class imageToMask: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "image": ("IMAGE",), + "channel": (['red', 'green', 'blue'],), + } + } + + RETURN_TYPES = ("MASK",) + FUNCTION = "convert" + CATEGORY = "EasyUse/🚫 Deprecated" + DEPRECATED = True + + def convert_to_single_channel(self, image, channel='red'): + from PIL import Image + # Convert to RGB mode to access individual channels + image = image.convert('RGB') + + # Extract the desired channel and convert to greyscale + if channel == 'red': + channel_img = image.split()[0].convert('L') + elif channel == 'green': + channel_img = image.split()[1].convert('L') + elif channel == 'blue': + channel_img = image.split()[2].convert('L') + else: + raise ValueError( + "Invalid channel option. Please choose 'red', 'green', or 'blue'.") + + # Convert the greyscale channel back to RGB mode + channel_img = Image.merge( + 'RGB', (channel_img, channel_img, channel_img)) + + return channel_img + + def convert(self, image, channel='red'): + from .libs.image import pil2tensor, tensor2pil + image = self.convert_to_single_channel(tensor2pil(image), channel) + image = pil2tensor(image) + return (image.squeeze().mean(2),) + +NODE_CLASS_MAPPINGS = { + "easy string": String, + "easy int": Int, + "easy rangeInt": RangeInt, + "easy float": Float, + "easy rangeFloat": RangeFloat, + "easy boolean": Boolean, + "easy mathString": mathStringOperation, + "easy mathInt": mathIntOperation, + "easy mathFloat": mathFloatOperation, + "easy compare": Compare, + "easy imageSwitch": imageSwitch, + "easy textSwitch": textSwitch, + "easy anythingIndexSwitch": anythingIndexSwitch, + "easy imageIndexSwitch": imageIndexSwitch, + "easy textIndexSwitch": textIndexSwitch, + "easy conditioningIndexSwitch": conditioningIndexSwitch, + "easy whileLoopStart": whileLoopStart, + "easy whileLoopEnd": whileLoopEnd, + "easy forLoopStart": forLoopStart, + "easy forLoopEnd": forLoopEnd, + "easy ifElse": IfElse, + "easy isNone": isNone, + "easy isSDXL": isSDXL, + "easy outputToList": outputToList, + "easy xyAny": xyAny, + "easy batchAnything": batchAnything, + "easy convertAnything": convertAnything, + "easy showAnything": showAnything, + "easy showTensorShape": showTensorShape, + "easy clearCacheKey": clearCacheKey, + "easy clearCacheAll": clearCacheAll, + "easy cleanGpuUsed": cleanGPUUsed, + "easy if": If, + "easy poseEditor": poseEditor, + "easy imageToMask": imageToMask +} +NODE_DISPLAY_NAME_MAPPINGS = { + "easy string": "String", + "easy int": "Int", + "easy rangeInt": "Range(Int)", + "easy float": "Float", + "easy rangeFloat": "Range(Float)", + "easy boolean": "Boolean", + "easy compare": "Compare", + "easy mathString": "Math String", + "easy mathInt": "Math Int", + "easy mathFloat": "Math Float", + "easy imageSwitch": "Image Switch", + "easy textSwitch": "Text Switch", + "easy anythingIndexSwitch": "Any Index Switch", + "easy imageIndexSwitch": "Image Index Switch", + "easy textIndexSwitch": "Text Index Switch", + "easy conditioningIndexSwitch": "Conditioning Index Switch", + "easy whileLoopStart": "While Loop Start", + "easy whileLoopEnd": "While Loop End", + "easy forLoopStart": "For Loop Start", + "easy forLoopEnd": "For Loop End", + "easy ifElse": "If else", + "easy isNone": "Is None", + "easy isSDXL": "Is SDXL", + "easy outputToList": "Output to List", + "easy xyAny": "XYAny", + "easy batchAnything": "Batch Any", + "easy convertAnything": "Convert Any", + "easy showAnything": "Show Any", + "easy showTensorShape": "Show Tensor Shape", + "easy clearCacheKey": "Clear Cache Key", + "easy clearCacheAll": "Clear Cache All", + "easy cleanGpuUsed": "Clean GPU Used", + "easy if": "If (🚫Deprecated)", + "easy poseEditor": "PoseEditor (🚫Deprecated)", + "easy imageToMask": "ImageToMask (🚫Deprecated)" +} \ No newline at end of file diff --git a/ComfyUI-Easy-Use/py/server.py b/ComfyUI-Easy-Use/py/server.py new file mode 100644 index 0000000000000000000000000000000000000000..0719b48f24127265f4571b559db081b33f339ce0 --- /dev/null +++ b/ComfyUI-Easy-Use/py/server.py @@ -0,0 +1,170 @@ +import random +import server +from enum import Enum + +class SGmode(Enum): + FIX = 1 + INCR = 2 + DECR = 3 + RAND = 4 + + +class SeedGenerator: + def __init__(self, base_value, action): + self.base_value = base_value + + if action == "fixed" or action == "increment" or action == "decrement" or action == "randomize": + self.action = SGmode.FIX + elif action == 'increment for each node': + self.action = SGmode.INCR + elif action == 'decrement for each node': + self.action = SGmode.DECR + elif action == 'randomize for each node': + self.action = SGmode.RAND + + def next(self): + seed = self.base_value + + if self.action == SGmode.INCR: + self.base_value += 1 + if self.base_value > 1125899906842624: + self.base_value = 0 + elif self.action == SGmode.DECR: + self.base_value -= 1 + if self.base_value < 0: + self.base_value = 1125899906842624 + elif self.action == SGmode.RAND: + self.base_value = random.randint(0, 1125899906842624) + + return seed + + +def control_seed(v, action, seed_is_global): + action = v['inputs']['action'] if seed_is_global else action + value = v['inputs']['value'] if seed_is_global else v['inputs']['seed_num'] + + if action == 'increment' or action == 'increment for each node': + value = value + 1 + if value > 1125899906842624: + value = 0 + elif action == 'decrement' or action == 'decrement for each node': + value = value - 1 + if value < 0: + value = 1125899906842624 + elif action == 'randomize' or action == 'randomize for each node': + value = random.randint(0, 1125899906842624) + if seed_is_global: + v['inputs']['value'] = value + + return value + + +def prompt_seed_update(json_data): + try: + seed_widget_map = json_data['extra_data']['extra_pnginfo']['workflow']['seed_widgets'] + except: + return None + + workflow = json_data['extra_data']['extra_pnginfo']['workflow'] + seed_widget_map = workflow['seed_widgets'] + value = None + mode = None + node = None + action = None + seed_is_global = False + + for k, v in json_data['prompt'].items(): + if 'class_type' not in v: + continue + + cls = v['class_type'] + + if cls == 'easy globalSeed': + mode = v['inputs']['mode'] + action = v['inputs']['action'] + value = v['inputs']['value'] + node = k, v + seed_is_global = True + + # control before generated + if mode is not None and mode and seed_is_global: + value = control_seed(node[1], action, seed_is_global) + + if seed_is_global: + if value is not None: + seed_generator = SeedGenerator(value, action) + + for k, v in json_data['prompt'].items(): + for k2, v2 in v['inputs'].items(): + if isinstance(v2, str) and '$GlobalSeed.value$' in v2: + v['inputs'][k2] = v2.replace('$GlobalSeed.value$', str(value)) + + if k not in seed_widget_map: + continue + + if 'seed_num' in v['inputs']: + if isinstance(v['inputs']['seed_num'], int): + v['inputs']['seed_num'] = seed_generator.next() + + if 'seed' in v['inputs']: + if isinstance(v['inputs']['seed'], int): + v['inputs']['seed'] = seed_generator.next() + + if 'noise_seed' in v['inputs']: + if isinstance(v['inputs']['noise_seed'], int): + v['inputs']['noise_seed'] = seed_generator.next() + + for k2, v2 in v['inputs'].items(): + if isinstance(v2, str) and '$GlobalSeed.value$' in v2: + v['inputs'][k2] = v2.replace('$GlobalSeed.value$', str(value)) + # control after generated + if mode is not None and not mode: + control_seed(node[1], action, seed_is_global) + + return value is not None + + +def workflow_seed_update(json_data): + nodes = json_data['extra_data']['extra_pnginfo']['workflow']['nodes'] + seed_widget_map = json_data['extra_data']['extra_pnginfo']['workflow']['seed_widgets'] + prompt = json_data['prompt'] + + updated_seed_map = {} + value = None + + for node in nodes: + node_id = str(node['id']) + if node_id in prompt: + if node['type'] == 'easy globalSeed': + value = prompt[node_id]['inputs']['value'] + length = len(node['widgets_values']) + node['widgets_values'][length-1] = node['widgets_values'][0] + node['widgets_values'][0] = value + elif node_id in seed_widget_map: + widget_idx = seed_widget_map[node_id] + + if 'seed_num' in prompt[node_id]['inputs']: + seed = prompt[node_id]['inputs']['seed_num'] + elif 'noise_seed' in prompt[node_id]['inputs']: + seed = prompt[node_id]['inputs']['noise_seed'] + else: + seed = prompt[node_id]['inputs']['seed'] + + node['widgets_values'][widget_idx] = seed + updated_seed_map[node_id] = seed + + server.PromptServer.instance.send_sync("easyuse-global-seed", {"id": node_id, "value": value, "seed_map": updated_seed_map}) + + +def onprompt(json_data): + is_changed = prompt_seed_update(json_data) + if is_changed: + workflow_seed_update(json_data) + + return json_data + +server.PromptServer.instance.add_on_prompt_handler(onprompt) + + +NODE_CLASS_MAPPINGS = {} +NODE_DISPLAY_NAME_MAPPINGS = {} \ No newline at end of file diff --git a/ComfyUI-Easy-Use/py/xyplot.py b/ComfyUI-Easy-Use/py/xyplot.py new file mode 100644 index 0000000000000000000000000000000000000000..2793f4e8603d6eaf01d64c4cf8be42f4372ef1f9 --- /dev/null +++ b/ComfyUI-Easy-Use/py/xyplot.py @@ -0,0 +1,602 @@ +import os +import comfy +import folder_paths +from .config import RESOURCES_DIR +def load_preset(filename): + path = os.path.join(RESOURCES_DIR, filename) + path = os.path.abspath(path) + preset_list = [] + + if os.path.exists(path): + with open(path, 'r') as file: + for line in file: + preset_list.append(line.strip()) + + return preset_list + else: + return [] +def generate_floats(batch_count, first_float, last_float): + if batch_count > 1: + interval = (last_float - first_float) / (batch_count - 1) + values = [str(round(first_float + i * interval, 3)) for i in range(batch_count)] + else: + values = [str(first_float)] if batch_count == 1 else [] + return "; ".join(values) + +def generate_ints(batch_count, first_int, last_int): + if batch_count > 1: + interval = (last_int - first_int) / (batch_count - 1) + values = [str(int(first_int + i * interval)) for i in range(batch_count)] + else: + values = [str(first_int)] if batch_count == 1 else [] + # values = list(set(values)) # Remove duplicates + # values.sort() # Sort in ascending order + return "; ".join(values) + +# Seed++ Batch +class XYplot_SeedsBatch: + + @classmethod + def INPUT_TYPES(cls): + return {"required": { + "batch_count": ("INT", {"default": 3, "min": 1, "max": 50}), }, + } + + RETURN_TYPES = ("X_Y",) + RETURN_NAMES = ("X or Y",) + FUNCTION = "xy_value" + CATEGORY = "EasyUse/XY Inputs" + + def xy_value(self, batch_count): + + axis = "advanced: Seeds++ Batch" + xy_values = {"axis": axis, "values": batch_count} + return (xy_values,) + +# Step Values +class XYplot_Steps: + parameters = ["steps", "start_at_step", "end_at_step",] + + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "target_parameter": (cls.parameters,), + "batch_count": ("INT", {"default": 3, "min": 0, "max": 50}), + "first_step": ("INT", {"default": 10, "min": 1, "max": 10000}), + "last_step": ("INT", {"default": 20, "min": 1, "max": 10000}), + "first_start_step": ("INT", {"default": 0, "min": 0, "max": 10000}), + "last_start_step": ("INT", {"default": 10, "min": 0, "max": 10000}), + "first_end_step": ("INT", {"default": 10, "min": 0, "max": 10000}), + "last_end_step": ("INT", {"default": 20, "min": 0, "max": 10000}), + } + } + + RETURN_TYPES = ("X_Y",) + RETURN_NAMES = ("X or Y",) + FUNCTION = "xy_value" + CATEGORY = "EasyUse/XY Inputs" + + def xy_value(self, target_parameter, batch_count, first_step, last_step, first_start_step, last_start_step, + first_end_step, last_end_step,): + + axis, xy_first, xy_last = None, None, None + + if target_parameter == "steps": + axis = "advanced: Steps" + xy_first = first_step + xy_last = last_step + elif target_parameter == "start_at_step": + axis = "advanced: StartStep" + xy_first = first_start_step + xy_last = last_start_step + elif target_parameter == "end_at_step": + axis = "advanced: EndStep" + xy_first = first_end_step + xy_last = last_end_step + + values = generate_ints(batch_count, xy_first, xy_last) + return ({"axis": axis, "values": values},) if values is not None else (None,) + +class XYplot_CFG: + + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "batch_count": ("INT", {"default": 3, "min": 0, "max": 50}), + "first_cfg": ("FLOAT", {"default": 7.0, "min": 0.0, "max": 100.0}), + "last_cfg": ("FLOAT", {"default": 9.0, "min": 0.0, "max": 100.0}), + } + } + + RETURN_TYPES = ("X_Y",) + RETURN_NAMES = ("X or Y",) + FUNCTION = "xy_value" + CATEGORY = "EasyUse/XY Inputs" + + def xy_value(self, batch_count, first_cfg, last_cfg): + axis = "advanced: CFG Scale" + values = generate_floats(batch_count, first_cfg, last_cfg) + return ({"axis": axis, "values": values},) if values else (None,) + +# Step Values +class XYplot_Sampler_Scheduler: + parameters = ["sampler", "scheduler", "sampler & scheduler"] + + @classmethod + def INPUT_TYPES(cls): + samplers = ["None"] + comfy.samplers.KSampler.SAMPLERS + schedulers = ["None"] + comfy.samplers.KSampler.SCHEDULERS + inputs = { + "required": { + "target_parameter": (cls.parameters,), + "input_count": ("INT", {"default": 1, "min": 1, "max": 30, "step": 1}) + } + } + for i in range(1, 30 + 1): + inputs["required"][f"sampler_{i}"] = (samplers,) + inputs["required"][f"scheduler_{i}"] = (schedulers,) + + return inputs + + RETURN_TYPES = ("X_Y",) + RETURN_NAMES = ("X or Y",) + FUNCTION = "xy_value" + CATEGORY = "EasyUse/XY Inputs" + + def xy_value(self, target_parameter, input_count, **kwargs): + axis, values, = None, None, + if target_parameter == "scheduler": + axis = "advanced: Scheduler" + schedulers = [kwargs.get(f"scheduler_{i}") for i in range(1, input_count + 1)] + values = [scheduler for scheduler in schedulers if scheduler != "None"] + elif target_parameter == "sampler": + axis = "advanced: Sampler" + samplers = [kwargs.get(f"sampler_{i}") for i in range(1, input_count + 1)] + values = [sampler for sampler in samplers if sampler != "None"] + else: + axis = "advanced: Sampler&Scheduler" + samplers = [kwargs.get(f"sampler_{i}") for i in range(1, input_count + 1)] + schedulers = [kwargs.get(f"scheduler_{i}") for i in range(1, input_count + 1)] + values = [] + for sampler, scheduler in zip(samplers, schedulers): + sampler = sampler if sampler else 'None' + scheduler = scheduler if scheduler else 'None' + values.append(sampler +','+ scheduler) + values = "; ".join(values) + return ({"axis": axis, "values": values},) if values else (None,) + +class XYplot_Denoise: + + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "batch_count": ("INT", {"default": 3, "min": 0, "max": 50}), + "first_denoise": ("FLOAT", {"default": 0.5, "min": 0.0, "max": 1.0, "step": 0.1}), + "last_denoise": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.1}), + } + } + + RETURN_TYPES = ("X_Y",) + RETURN_NAMES = ("X or Y",) + FUNCTION = "xy_value" + CATEGORY = "EasyUse/XY Inputs" + + def xy_value(self, batch_count, first_denoise, last_denoise): + axis = "advanced: Denoise" + values = generate_floats(batch_count, first_denoise, last_denoise) + return ({"axis": axis, "values": values},) if values else (None,) + +# PromptSR +class XYplot_PromptSR: + + @classmethod + def INPUT_TYPES(cls): + inputs = { + "required": { + "target_prompt": (["positive", "negative"],), + "search_txt": ("STRING", {"default": "", "multiline": False}), + "replace_all_text": ("BOOLEAN", {"default": False}), + "replace_count": ("INT", {"default": 3, "min": 1, "max": 30 - 1}), + } + } + + # Dynamically add replace_X inputs + for i in range(1, 30): + replace_key = f"replace_{i}" + inputs["required"][replace_key] = ("STRING", {"default": "", "multiline": False, "placeholder": replace_key}) + + return inputs + + RETURN_TYPES = ("X_Y",) + RETURN_NAMES = ("X or Y",) + FUNCTION = "xy_value" + CATEGORY = "EasyUse/XY Inputs" + + def xy_value(self, target_prompt, search_txt, replace_all_text, replace_count, **kwargs): + axis = None + + if target_prompt == "positive": + axis = "advanced: Positive Prompt S/R" + elif target_prompt == "negative": + axis = "advanced: Negative Prompt S/R" + + # Create base entry + values = [(search_txt, None, replace_all_text)] + + if replace_count > 0: + # Append additional entries based on replace_count + values.extend([(search_txt, kwargs.get(f"replace_{i+1}"), replace_all_text) for i in range(replace_count)]) + return ({"axis": axis, "values": values},) if values is not None else (None,) + +# XYPlot Pos Condition +class XYplot_Positive_Cond: + + @classmethod + def INPUT_TYPES(cls): + inputs = { + "optional": { + "positive_1": ("CONDITIONING",), + "positive_2": ("CONDITIONING",), + "positive_3": ("CONDITIONING",), + "positive_4": ("CONDITIONING",), + } + } + + return inputs + + RETURN_TYPES = ("X_Y",) + RETURN_NAMES = ("X or Y",) + FUNCTION = "xy_value" + CATEGORY = "EasyUse/XY Inputs" + + def xy_value(self, positive_1=None, positive_2=None, positive_3=None, positive_4=None): + axis = "advanced: Pos Condition" + values = [] + cond = [] + # Create base entry + if positive_1 is not None: + values.append("0") + cond.append(positive_1) + if positive_2 is not None: + values.append("1") + cond.append(positive_2) + if positive_3 is not None: + values.append("2") + cond.append(positive_3) + if positive_4 is not None: + values.append("3") + cond.append(positive_4) + + return ({"axis": axis, "values": values, "cond": cond},) if values is not None else (None,) + +# XYPlot Neg Condition +class XYplot_Negative_Cond: + + @classmethod + def INPUT_TYPES(cls): + inputs = { + "optional": { + "negative_1": ("CONDITIONING",), + "negative_2": ("CONDITIONING",), + "negative_3": ("CONDITIONING",), + "negative_4": ("CONDITIONING",), + } + } + + return inputs + + RETURN_TYPES = ("X_Y",) + RETURN_NAMES = ("X or Y",) + FUNCTION = "xy_value" + CATEGORY = "EasyUse/XY Inputs" + + def xy_value(self, negative_1=None, negative_2=None, negative_3=None, negative_4=None): + axis = "advanced: Neg Condition" + values = [] + cond = [] + # Create base entry + if negative_1 is not None: + values.append(0) + cond.append(negative_1) + if negative_2 is not None: + values.append(1) + cond.append(negative_2) + if negative_3 is not None: + values.append(2) + cond.append(negative_3) + if negative_4 is not None: + values.append(3) + cond.append(negative_4) + + return ({"axis": axis, "values": values, "cond": cond},) if values is not None else (None,) + +# XYPlot Pos Condition List +class XYplot_Positive_Cond_List: + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "positive": ("CONDITIONING",), + } + } + + INPUT_IS_LIST = True + RETURN_TYPES = ("X_Y",) + RETURN_NAMES = ("X or Y",) + FUNCTION = "xy_value" + CATEGORY = "EasyUse/XY Inputs" + + def xy_value(self, positive): + axis = "advanced: Pos Condition" + values = [] + cond = [] + for index, c in enumerate(positive): + values.append(str(index)) + cond.append(c) + + return ({"axis": axis, "values": values, "cond": cond},) if values is not None else (None,) + +# XYPlot Neg Condition List +class XYplot_Negative_Cond_List: + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "negative": ("CONDITIONING",), + } + } + + INPUT_IS_LIST = True + RETURN_TYPES = ("X_Y",) + RETURN_NAMES = ("X or Y",) + FUNCTION = "xy_value" + CATEGORY = "EasyUse/XY Inputs" + + def xy_value(self, negative): + axis = "advanced: Neg Condition" + values = [] + cond = [] + for index, c in enumerate(negative): + values.append(index) + cond.append(c) + + return ({"axis": axis, "values": values, "cond": cond},) if values is not None else (None,) + +# XY Plot: ControlNet +class XYplot_Control_Net: + parameters = ["strength", "start_percent", "end_percent"] + @classmethod + def INPUT_TYPES(cls): + def get_file_list(filenames): + return [file for file in filenames if file != "put_models_here.txt" and "lllite" not in file] + + return { + "required": { + "control_net_name": (get_file_list(folder_paths.get_filename_list("controlnet")),), + "image": ("IMAGE",), + "target_parameter": (cls.parameters,), + "batch_count": ("INT", {"default": 3, "min": 1, "max": 30}), + "first_strength": ("FLOAT", {"default": 0.0, "min": 0.00, "max": 10.0, "step": 0.01}), + "last_strength": ("FLOAT", {"default": 1.0, "min": 0.00, "max": 10.0, "step": 0.01}), + "first_start_percent": ("FLOAT", {"default": 0.0, "min": 0.00, "max": 1.0, "step": 0.01}), + "last_start_percent": ("FLOAT", {"default": 1.0, "min": 0.00, "max": 1.0, "step": 0.01}), + "first_end_percent": ("FLOAT", {"default": 0.0, "min": 0.00, "max": 1.0, "step": 0.01}), + "last_end_percent": ("FLOAT", {"default": 1.0, "min": 0.00, "max": 1.0, "step": 0.01}), + "strength": ("FLOAT", {"default": 1.0, "min": 0.00, "max": 10.0, "step": 0.01}), + "start_percent": ("FLOAT", {"default": 0.0, "min": 0.00, "max": 1.0, "step": 0.01}), + "end_percent": ("FLOAT", {"default": 1.0, "min": 0.00, "max": 1.0, "step": 0.01}), + }, + } + + RETURN_TYPES = ("X_Y",) + RETURN_NAMES = ("X or Y",) + FUNCTION = "xy_value" + CATEGORY = "EasyUse/XY Inputs" + + def xy_value(self, control_net_name, image, target_parameter, batch_count, first_strength, last_strength, first_start_percent, + last_start_percent, first_end_percent, last_end_percent, strength, start_percent, end_percent): + + axis, = None, + + values = [] + + if target_parameter == "strength": + axis = "advanced: ControlNetStrength" + + values.append([(control_net_name, image, first_strength, start_percent, end_percent)]) + strength_increment = (last_strength - first_strength) / (batch_count - 1) if batch_count > 1 else 0 + for i in range(1, batch_count - 1): + values.append([(control_net_name, image, first_strength + i * strength_increment, start_percent, + end_percent)]) + if batch_count > 1: + values.append([(control_net_name, image, last_strength, start_percent, end_percent)]) + + elif target_parameter == "start_percent": + axis = "advanced: ControlNetStart%" + + percent_increment = (last_start_percent - first_start_percent) / (batch_count - 1) if batch_count > 1 else 0 + values.append([(control_net_name, image, strength, first_start_percent, end_percent)]) + for i in range(1, batch_count - 1): + values.append([(control_net_name, image, strength, first_start_percent + i * percent_increment, + end_percent)]) + + # Always add the last start_percent if batch_count is more than 1. + if batch_count > 1: + values.append((control_net_name, image, strength, last_start_percent, end_percent)) + + elif target_parameter == "end_percent": + axis = "advanced: ControlNetEnd%" + + percent_increment = (last_end_percent - first_end_percent) / (batch_count - 1) if batch_count > 1 else 0 + values.append([(control_net_name, image, image, strength, start_percent, first_end_percent)]) + for i in range(1, batch_count - 1): + values.append([(control_net_name, image, strength, start_percent, + first_end_percent + i * percent_increment)]) + + if batch_count > 1: + values.append([(control_net_name, image, strength, start_percent, last_end_percent)]) + + + return ({"axis": axis, "values": values},) + + +#Checkpoints +class XYplot_Checkpoint: + + modes = ["Ckpt Names", "Ckpt Names+ClipSkip", "Ckpt Names+ClipSkip+VAE"] + + @classmethod + def INPUT_TYPES(cls): + + checkpoints = ["None"] + folder_paths.get_filename_list("checkpoints") + vaes = ["Baked VAE"] + folder_paths.get_filename_list("vae") + + inputs = { + "required": { + "input_mode": (cls.modes,), + "ckpt_count": ("INT", {"default": 3, "min": 0, "max": 10, "step": 1}), + } + } + + for i in range(1, 10 + 1): + inputs["required"][f"ckpt_name_{i}"] = (checkpoints,) + inputs["required"][f"clip_skip_{i}"] = ("INT", {"default": -1, "min": -24, "max": -1, "step": 1}) + inputs["required"][f"vae_name_{i}"] = (vaes,) + + inputs["optional"] = { + "optional_lora_stack": ("LORA_STACK",) + } + return inputs + + RETURN_TYPES = ("X_Y",) + RETURN_NAMES = ("X or Y",) + FUNCTION = "xy_value" + + CATEGORY = "EasyUse/XY Inputs" + + def xy_value(self, input_mode, ckpt_count, **kwargs): + + axis = "advanced: Checkpoint" + + checkpoints = [kwargs.get(f"ckpt_name_{i}") for i in range(1, ckpt_count + 1)] + clip_skips = [kwargs.get(f"clip_skip_{i}") for i in range(1, ckpt_count + 1)] + vaes = [kwargs.get(f"vae_name_{i}") for i in range(1, ckpt_count + 1)] + + # Set None for Clip Skip and/or VAE if not correct modes + for i in range(ckpt_count): + if "ClipSkip" not in input_mode: + clip_skips[i] = 'None' + if "VAE" not in input_mode: + vaes[i] = 'None' + + # Extend each sub-array with lora_stack if it's not None + values = [checkpoint.replace(',', '*')+','+str(clip_skip)+','+vae.replace(',', '*') for checkpoint, clip_skip, vae in zip(checkpoints, clip_skips, vaes) if + checkpoint != "None"] + + optional_lora_stack = kwargs.get("optional_lora_stack") if "optional_lora_stack" in kwargs else [] + + xy_values = {"axis": axis, "values": values, "lora_stack": optional_lora_stack} + return (xy_values,) + +#Loras +class XYplot_Lora: + + modes = ["Lora Names", "Lora Names+Weights"] + + @classmethod + def INPUT_TYPES(cls): + loras = ["None"] + folder_paths.get_filename_list("loras") + + inputs = { + "required": { + "input_mode": (cls.modes,), + "lora_count": ("INT", {"default": 3, "min": 0, "max": 10, "step": 1}), + "model_strength": ("FLOAT", {"default": 1.0, "min": -10.0, "max": 10.0, "step": 0.01}), + "clip_strength": ("FLOAT", {"default": 1.0, "min": -10.0, "max": 10.0, "step": 0.01}), + } + } + + for i in range(1, 10 + 1): + inputs["required"][f"lora_name_{i}"] = (loras,) + inputs["required"][f"model_str_{i}"] = ("FLOAT", {"default": 1.0, "min": -10.0, "max": 10.0, "step": 0.01}) + inputs["required"][f"clip_str_{i}"] = ("FLOAT", {"default": 1.0, "min": -10.0, "max": 10.0, "step": 0.01}) + + inputs["optional"] = { + "optional_lora_stack": ("LORA_STACK",) + } + return inputs + + RETURN_TYPES = ("X_Y",) + RETURN_NAMES = ("X or Y",) + FUNCTION = "xy_value" + + CATEGORY = "EasyUse/XY Inputs" + + def xy_value(self, input_mode, lora_count, model_strength, clip_strength, **kwargs): + + axis = "advanced: Lora" + # Extract values from kwargs + loras = [kwargs.get(f"lora_name_{i}") for i in range(1, lora_count + 1)] + model_strs = [kwargs.get(f"model_str_{i}", model_strength) for i in range(1, lora_count + 1)] + clip_strs = [kwargs.get(f"clip_str_{i}", clip_strength) for i in range(1, lora_count + 1)] + + # Use model_strength and clip_strength for the loras where values are not provided + if "Weights" not in input_mode: + for i in range(lora_count): + model_strs[i] = model_strength + clip_strs[i] = clip_strength + + # Extend each sub-array with lora_stack if it's not None + values = [lora.replace(',', '*')+','+str(model_str)+','+str(clip_str) for lora, model_str, clip_str + in zip(loras, model_strs, clip_strs) if lora != "None"] + + optional_lora_stack = kwargs.get("optional_lora_stack") if "optional_lora_stack" in kwargs else [] + + xy_values = {"axis": axis, "values": values, "lora_stack": optional_lora_stack} + return (xy_values,) + +# 模型叠加 +class XYplot_ModelMergeBlocks: + + @classmethod + def INPUT_TYPES(s): + checkpoints = folder_paths.get_filename_list("checkpoints") + vae = ["Use Model 1", "Use Model 2"] + folder_paths.get_filename_list("vae") + + preset = ["Preset"] # 20 + preset += load_preset("mmb-preset.txt") + preset += load_preset("mmb-preset.custom.txt") + + default_vectors = "1,0,0; \n0,1,0; \n0,0,1; \n1,1,0; \n1,0,1; \n0,1,1; " + return { + "required": { + "ckpt_name_1": (checkpoints,), + "ckpt_name_2": (checkpoints,), + "vae_use": (vae, {"default": "Use Model 1"}), + "preset": (preset, {"default": "preset"}), + "values": ("STRING", {"default": default_vectors, "multiline": True, "placeholder": 'Support 2 methods:\n\n1.input, middle, out in same line and insert values seperated by "; "\n\n2.model merge block number seperated by ", " in same line and insert values seperated by "; "'}), + }, + "hidden": {"my_unique_id": "UNIQUE_ID"} + } + + RETURN_TYPES = ("X_Y",) + RETURN_NAMES = ("X or Y",) + FUNCTION = "xy_value" + + CATEGORY = "EasyUse/XY Inputs" + + def xy_value(self, ckpt_name_1, ckpt_name_2, vae_use, preset, values, my_unique_id=None): + + axis = "advanced: ModelMergeBlocks" + if ckpt_name_1 is None: + raise Exception("ckpt_name_1 is not found") + if ckpt_name_2 is None: + raise Exception("ckpt_name_2 is not found") + + models = (ckpt_name_1, ckpt_name_2) + + xy_values = {"axis":axis, "values":values, "models":models, "vae_use": vae_use} + return (xy_values,) \ No newline at end of file diff --git a/ComfyUI-Easy-Use/pyproject.toml b/ComfyUI-Easy-Use/pyproject.toml new file mode 100644 index 0000000000000000000000000000000000000000..8f90b97d8db2686c8a61d8860192680671195b16 --- /dev/null +++ b/ComfyUI-Easy-Use/pyproject.toml @@ -0,0 +1,15 @@ +[project] +name = "comfyui-easy-use" +description = "To enhance the usability of ComfyUI, optimizations and integrations have been implemented for several commonly used nodes." +version = "1.2.1" +license = { file = "LICENSE" } +dependencies = ["diffusers>=0.25.0", "accelerate>=0.25.0", "clip_interrogator>=0.6.0", "sentencepiece", "lark-parser", "onnxruntime", "spandrel", "opencv-python"] + +[project.urls] +Repository = "https://github.com/yolain/ComfyUI-Easy-Use" +# Used by Comfy Registry https://comfyregistry.org + +[tool.comfy] +PublisherId = "yolain" +DisplayName = "ComfyUI-Easy-Use" +Icon = "" diff --git a/ComfyUI-Easy-Use/repair_dependency_list.txt b/ComfyUI-Easy-Use/repair_dependency_list.txt new file mode 100644 index 0000000000000000000000000000000000000000..a66377e9ab5d324cf7bc19fa0c4cfc1798ffe4e4 --- /dev/null +++ b/ComfyUI-Easy-Use/repair_dependency_list.txt @@ -0,0 +1,4 @@ +numpy>=1.19.0 +huggingface_hub>=0.23.3 +transformers>=4.38.2 +protobuf>=4.25.3 \ No newline at end of file diff --git a/ComfyUI-Easy-Use/requirements.txt b/ComfyUI-Easy-Use/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..cc1920c0de96366c2c4fbedf3035def19b97c68d --- /dev/null +++ b/ComfyUI-Easy-Use/requirements.txt @@ -0,0 +1,8 @@ +diffusers>=0.25.0 +accelerate>=0.25.0 +clip_interrogator>=0.6.0 +lark-parser +onnxruntime +opencv-python +sentencepiece +spandrel \ No newline at end of file diff --git a/ComfyUI-Easy-Use/resources/OpenSans-Medium.ttf b/ComfyUI-Easy-Use/resources/OpenSans-Medium.ttf new file mode 100644 index 0000000000000000000000000000000000000000..8faa49e71119eaf344af62587eefb1a65def7ded --- /dev/null +++ b/ComfyUI-Easy-Use/resources/OpenSans-Medium.ttf @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5ed28e4bb6c0fa26885f2d3e9d27e18049defe9594cf54a62acfe730b5f67397 +size 129948 diff --git a/ComfyUI-Easy-Use/resources/fooocus_styles.json b/ComfyUI-Easy-Use/resources/fooocus_styles.json new file mode 100644 index 0000000000000000000000000000000000000000..8e6fdcd46e03c862bb7e82e0ed1d4ba4fdd72642 --- /dev/null +++ b/ComfyUI-Easy-Use/resources/fooocus_styles.json @@ -0,0 +1,1375 @@ +[ + { + "name": "Fooocus Enhance", + "negative_prompt": "(worst quality, low quality, normal quality, lowres, low details, oversaturated, undersaturated, overexposed, underexposed, grayscale, bw, bad photo, bad photography, bad art:1.4), (watermark, signature, text font, username, error, logo, words, letters, digits, autograph, trademark, name:1.2), (blur, blurry, grainy), morbid, ugly, asymmetrical, mutated malformed, mutilated, poorly lit, bad shadow, draft, cropped, out of frame, cut off, censored, jpeg artifacts, out of focus, glitch, duplicate, (airbrushed, cartoon, anime, semi-realistic, cgi, render, blender, digital art, manga, amateur:1.3), (3D ,3D Game, 3D Game Scene, 3D Character:1.1), (bad hands, bad anatomy, bad body, bad face, bad teeth, bad arms, bad legs, deformities:1.3)" + }, + { + "name": "Fooocus Sharp", + "prompt": "cinematic still {prompt} . emotional, harmonious, vignette, 4k epic detailed, shot on kodak, 35mm photo, sharp focus, high budget, cinemascope, moody, epic, gorgeous, film grain, grainy", + "negative_prompt": "anime, cartoon, graphic, (blur, blurry, bokeh), text, painting, crayon, graphite, abstract, glitch, deformed, mutated, ugly, disfigured" + }, + { + "name": "Fooocus Masterpiece", + "prompt": "(masterpiece), (best quality), (ultra-detailed), {prompt}, illustration, disheveled hair, detailed eyes, perfect composition, moist skin, intricate details, earrings, by wlop", + "negative_prompt": "longbody, lowres, bad anatomy, bad hands, missing fingers, pubic hair,extra digit, fewer digits, cropped, worst quality, low quality" + }, + { + "name": "Fooocus Photograph", + "prompt": "photograph {prompt}, 50mm . cinematic 4k epic detailed 4k epic detailed photograph shot on kodak detailed cinematic hbo dark moody, 35mm photo, grainy, vignette, vintage, Kodachrome, Lomography, stained, highly detailed, found footage", + "negative_prompt": "Brad Pitt, bokeh, depth of field, blurry, cropped, regular face, saturated, contrast, deformed iris, deformed pupils, semi-realistic, cgi, 3d, render, sketch, cartoon, drawing, anime, text, cropped, out of frame, worst quality, low quality, jpeg artifacts, ugly, duplicate, morbid, mutilated, extra fingers, mutated hands, poorly drawn hands, poorly drawn face, mutation, deformed, dehydrated, bad anatomy, bad proportions, extra limbs, cloned face, disfigured, gross proportions, malformed limbs, missing arms, missing legs, extra arms, extra legs, fused fingers, too many fingers, long neck" + }, + { + "name": "Fooocus Negative", + "negative_prompt": "deformed, bad anatomy, disfigured, poorly drawn face, mutated, extra limb, ugly, poorly drawn hands, missing limb, floating limbs, disconnected limbs, disconnected head, malformed hands, long neck, mutated hands and fingers, bad hands, missing fingers, cropped, worst quality, low quality, mutation, poorly drawn, huge calf, bad hands, fused hand, missing hand, disappearing arms, disappearing thigh, disappearing calf, disappearing legs, missing fingers, fused fingers, abnormal eye proportion, Abnormal hands, abnormal legs, abnormal feet, abnormal fingers, drawing, painting, crayon, sketch, graphite, impressionist, noisy, blurry, soft, deformed, ugly, anime, cartoon, graphic, text, painting, crayon, graphite, abstract, glitch" + }, + { + "name": "Fooocus Cinematic", + "prompt": "cinematic still {prompt} . emotional, harmonious, vignette, highly detailed, high budget, bokeh, cinemascope, moody, epic, gorgeous, film grain, grainy", + "negative_prompt": "anime, cartoon, graphic, text, painting, crayon, graphite, abstract, glitch, deformed, mutated, ugly, disfigured" + }, + { + "name": "sai-3d-model", + "prompt": "professional 3d model {prompt} . octane render, highly detailed, volumetric, dramatic lighting", + "negative_prompt": "ugly, deformed, noisy, low poly, blurry, painting" + }, + { + "name": "sai-analog film", + "prompt": "analog film photo {prompt} . faded film, desaturated, 35mm photo, grainy, vignette, vintage, Kodachrome, Lomography, stained, highly detailed, found footage", + "negative_prompt": "painting, drawing, illustration, glitch, deformed, mutated, cross-eyed, ugly, disfigured" + }, + { + "name": "sai-anime", + "prompt": "anime artwork {prompt} . anime style, key visual, vibrant, studio anime, highly detailed", + "negative_prompt": "photo, deformed, black and white, realism, disfigured, low contrast" + }, + { + "name": "sai-cinematic", + "prompt": "cinematic film still {prompt} . shallow depth of field, vignette, highly detailed, high budget, bokeh, cinemascope, moody, epic, gorgeous, film grain, grainy", + "negative_prompt": "anime, cartoon, graphic, text, painting, crayon, graphite, abstract, glitch, deformed, mutated, ugly, disfigured" + }, + { + "name": "sai-comic book", + "prompt": "comic {prompt} . graphic illustration, comic art, graphic novel art, vibrant, highly detailed", + "negative_prompt": "photograph, deformed, glitch, noisy, realistic, stock photo" + }, + { + "name": "sai-craft clay", + "prompt": "play-doh style {prompt} . sculpture, clay art, centered composition, Claymation", + "negative_prompt": "sloppy, messy, grainy, highly detailed, ultra textured, photo" + }, + { + "name": "sai-digital art", + "prompt": "concept art {prompt} . digital artwork, illustrative, painterly, matte painting, highly detailed", + "negative_prompt": "photo, photorealistic, realism, ugly" + }, + { + "name": "sai-enhance", + "prompt": "breathtaking {prompt} . award-winning, professional, highly detailed", + "negative_prompt": "ugly, deformed, noisy, blurry, distorted, grainy" + }, + { + "name": "sai-fantasy art", + "prompt": "ethereal fantasy concept art of {prompt} . magnificent, celestial, ethereal, painterly, epic, majestic, magical, fantasy art, cover art, dreamy", + "negative_prompt": "photographic, realistic, realism, 35mm film, dslr, cropped, frame, text, deformed, glitch, noise, noisy, off-center, deformed, cross-eyed, closed eyes, bad anatomy, ugly, disfigured, sloppy, duplicate, mutated, black and white" + }, + { + "name": "sai-isometric", + "prompt": "isometric style {prompt} . vibrant, beautiful, crisp, detailed, ultra detailed, intricate", + "negative_prompt": "deformed, mutated, ugly, disfigured, blur, blurry, noise, noisy, realistic, photographic" + }, + { + "name": "sai-line art", + "prompt": "line art drawing {prompt} . professional, sleek, modern, minimalist, graphic, line art, vector graphics", + "negative_prompt": "anime, photorealistic, 35mm film, deformed, glitch, blurry, noisy, off-center, deformed, cross-eyed, closed eyes, bad anatomy, ugly, disfigured, mutated, realism, realistic, impressionism, expressionism, oil, acrylic" + }, + { + "name": "sai-lowpoly", + "prompt": "low-poly style {prompt} . low-poly game art, polygon mesh, jagged, blocky, wireframe edges, centered composition", + "negative_prompt": "noisy, sloppy, messy, grainy, highly detailed, ultra textured, photo" + }, + { + "name": "sai-neonpunk", + "prompt": "neonpunk style {prompt} . cyberpunk, vaporwave, neon, vibes, vibrant, stunningly beautiful, crisp, detailed, sleek, ultramodern, magenta highlights, dark purple shadows, high contrast, cinematic, ultra detailed, intricate, professional", + "negative_prompt": "painting, drawing, illustration, glitch, deformed, mutated, cross-eyed, ugly, disfigured" + }, + { + "name": "sai-origami", + "prompt": "origami style {prompt} . paper art, pleated paper, folded, origami art, pleats, cut and fold, centered composition", + "negative_prompt": "noisy, sloppy, messy, grainy, highly detailed, ultra textured, photo" + }, + { + "name": "sai-photographic", + "prompt": "cinematic photo {prompt} . 35mm photograph, film, bokeh, professional, 4k, highly detailed", + "negative_prompt": "drawing, painting, crayon, sketch, graphite, impressionist, noisy, blurry, soft, deformed, ugly" + }, + { + "name": "sai-pixel art", + "prompt": "pixel-art {prompt} . low-res, blocky, pixel art style, 8-bit graphics", + "negative_prompt": "sloppy, messy, blurry, noisy, highly detailed, ultra textured, photo, realistic" + }, + { + "name": "sai-texture", + "prompt": "texture {prompt} top down close-up", + "negative_prompt": "ugly, deformed, noisy, blurry" + }, + { + "name": "mre-cinematic-dynamic", + "prompt": "epic cinematic shot of dynamic {prompt} in motion. main subject of high budget action movie. raw photo, motion blur. best quality, high resolution", + "negative_prompt": "static, still, motionless, sluggish. drawing, painting, illustration, rendered. low budget. low quality, low resolution" + }, + { + "name": "mre-spontaneous-picture", + "prompt": "spontaneous picture of {prompt}, taken by talented amateur. best quality, high resolution. magical moment, natural look. simple but good looking", + "negative_prompt": "overthinked. low quality, low resolution" + }, + { + "name": "mre-artistic-vision", + "prompt": "powerful artistic vision of {prompt}. breathtaking masterpiece made by great artist. best quality, high resolution", + "negative_prompt": "insignificant, flawed, made by bad artist. low quality, low resolution" + }, + { + "name": "mre-dark-dream", + "prompt": "dark and unsettling dream showing {prompt}. best quality, high resolution. created by genius but depressed mad artist. grim beauty", + "negative_prompt": "naive, cheerful. comfortable, casual, boring, cliche. low quality, low resolution" + }, + { + "name": "mre-gloomy-art", + "prompt": "astonishing gloomy art made mainly of shadows and lighting, forming {prompt}. masterful usage of lighting, shadows and chiaroscuro. made by black-hearted artist, drawing from darkness. best quality, high resolution", + "negative_prompt": "low quality, low resolution" + }, + { + "name": "mre-bad-dream", + "prompt": "picture from really bad dream about terrifying {prompt}, true horror. bone-chilling vision. mad world that shouldn't exist. best quality, high resolution", + "negative_prompt": "nice dream, pleasant experience. low quality, low resolution" + }, + { + "name": "mre-underground", + "prompt": "uncanny caliginous vision of {prompt}, created by remarkable underground artist. best quality, high resolution. raw and brutal art, careless but impressive style. inspired by darkness and chaos", + "negative_prompt": "photography, mainstream, civilized. low quality, low resolution" + }, + { + "name": "mre-surreal-painting", + "prompt": "surreal painting representing strange vision of {prompt}. harmonious madness, synergy with chance. unique artstyle, mindbending art, magical surrealism. best quality, high resolution", + "negative_prompt": "photography, illustration, drawing. realistic, possible. logical, sane. low quality, low resolution" + }, + { + "name": "mre-dynamic-illustration", + "prompt": "insanely dynamic illustration of {prompt}. best quality, high resolution. crazy artstyle, careless brushstrokes, emotional and fun", + "negative_prompt": "photography, realistic. static, still, slow, boring. low quality, low resolution" + }, + { + "name": "mre-undead-art", + "prompt": "long forgotten art created by undead artist illustrating {prompt}, tribute to the death and decay. miserable art of the damned. wretched and decaying world. best quality, high resolution", + "negative_prompt": "alive, playful, living. low quality, low resolution" + }, + { + "name": "mre-elemental-art", + "prompt": "art illustrating insane amounts of raging elemental energy turning into {prompt}, avatar of elements. magical surrealism, wizardry. best quality, high resolution", + "negative_prompt": "photography, realistic, real. low quality, low resolution" + }, + { + "name": "mre-space-art", + "prompt": "winner of inter-galactic art contest illustrating {prompt}, symbol of the interstellar singularity. best quality, high resolution. artstyle previously unseen in the whole galaxy", + "negative_prompt": "created by human race, low quality, low resolution" + }, + { + "name": "mre-ancient-illustration", + "prompt": "sublime ancient illustration of {prompt}, predating human civilization. crude and simple, but also surprisingly beautiful artwork, made by genius primeval artist. best quality, high resolution", + "negative_prompt": "low quality, low resolution" + }, + { + "name": "mre-brave-art", + "prompt": "brave, shocking, and brutally true art showing {prompt}. inspired by courage and unlimited creativity. truth found in chaos. best quality, high resolution", + "negative_prompt": "low quality, low resolution" + }, + { + "name": "mre-heroic-fantasy", + "prompt": "heroic fantasy painting of {prompt}, in the dangerous fantasy world. airbrush over oil on canvas. best quality, high resolution", + "negative_prompt": "low quality, low resolution" + }, + { + "name": "mre-dark-cyberpunk", + "prompt": "dark cyberpunk illustration of brutal {prompt} in a world without hope, ruled by ruthless criminal corporations. best quality, high resolution", + "negative_prompt": "low quality, low resolution" + }, + { + "name": "mre-lyrical-geometry", + "prompt": "geometric and lyrical abstraction painting presenting {prompt}. oil on metal. best quality, high resolution", + "negative_prompt": "photography, realistic, drawing, rendered. low quality, low resolution" + }, + { + "name": "mre-sumi-e-symbolic", + "prompt": "big long brushstrokes of deep black sumi-e turning into symbolic painting of {prompt}. master level raw art. best quality, high resolution", + "negative_prompt": "photography, rendered. low quality, low resolution" + }, + { + "name": "mre-sumi-e-detailed", + "prompt": "highly detailed black sumi-e painting of {prompt}. in-depth study of perfection, created by a master. best quality, high resolution", + "negative_prompt": "low quality, low resolution" + }, + { + "name": "mre-manga", + "prompt": "manga artwork presenting {prompt}. created by japanese manga artist. highly emotional. best quality, high resolution", + "negative_prompt": "low quality, low resolution" + }, + { + "name": "mre-anime", + "prompt": "anime artwork illustrating {prompt}. created by japanese anime studio. highly emotional. best quality, high resolution", + "negative_prompt": "low quality, low resolution" + }, + { + "name": "mre-comic", + "prompt": "breathtaking illustration from adult comic book presenting {prompt}. fabulous artwork. best quality, high resolution", + "negative_prompt": "deformed, ugly, low quality, low resolution" + }, + { + "name": "ads-advertising", + "prompt": "advertising poster style {prompt} . Professional, modern, product-focused, commercial, eye-catching, highly detailed", + "negative_prompt": "noisy, blurry, amateurish, sloppy, unattractive" + }, + { + "name": "ads-automotive", + "prompt": "automotive advertisement style {prompt} . sleek, dynamic, professional, commercial, vehicle-focused, high-resolution, highly detailed", + "negative_prompt": "noisy, blurry, unattractive, sloppy, unprofessional" + }, + { + "name": "ads-corporate", + "prompt": "corporate branding style {prompt} . professional, clean, modern, sleek, minimalist, business-oriented, highly detailed", + "negative_prompt": "noisy, blurry, grungy, sloppy, cluttered, disorganized" + }, + { + "name": "ads-fashion editorial", + "prompt": "fashion editorial style {prompt} . high fashion, trendy, stylish, editorial, magazine style, professional, highly detailed", + "negative_prompt": "outdated, blurry, noisy, unattractive, sloppy" + }, + { + "name": "ads-food photography", + "prompt": "food photography style {prompt} . appetizing, professional, culinary, high-resolution, commercial, highly detailed", + "negative_prompt": "unappetizing, sloppy, unprofessional, noisy, blurry" + }, + { + "name": "ads-gourmet food photography", + "prompt": "gourmet food photo of {prompt} . soft natural lighting, macro details, vibrant colors, fresh ingredients, glistening textures, bokeh background, styled plating, wooden tabletop, garnished, tantalizing, editorial quality", + "negative_prompt": "cartoon, anime, sketch, grayscale, dull, overexposed, cluttered, messy plate, deformed" + }, + { + "name": "ads-luxury", + "prompt": "luxury product style {prompt} . elegant, sophisticated, high-end, luxurious, professional, highly detailed", + "negative_prompt": "cheap, noisy, blurry, unattractive, amateurish" + }, + { + "name": "ads-real estate", + "prompt": "real estate photography style {prompt} . professional, inviting, well-lit, high-resolution, property-focused, commercial, highly detailed", + "negative_prompt": "dark, blurry, unappealing, noisy, unprofessional" + }, + { + "name": "ads-retail", + "prompt": "retail packaging style {prompt} . vibrant, enticing, commercial, product-focused, eye-catching, professional, highly detailed", + "negative_prompt": "noisy, blurry, amateurish, sloppy, unattractive" + }, + { + "name": "artstyle-abstract", + "prompt": "abstract style {prompt} . non-representational, colors and shapes, expression of feelings, imaginative, highly detailed", + "negative_prompt": "realistic, photographic, figurative, concrete" + }, + { + "name": "artstyle-abstract expressionism", + "prompt": "abstract expressionist painting {prompt} . energetic brushwork, bold colors, abstract forms, expressive, emotional", + "negative_prompt": "realistic, photorealistic, low contrast, plain, simple, monochrome" + }, + { + "name": "artstyle-art deco", + "prompt": "art deco style {prompt} . geometric shapes, bold colors, luxurious, elegant, decorative, symmetrical, ornate, detailed", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, realism, photorealistic, modernist, minimalist" + }, + { + "name": "artstyle-art nouveau", + "prompt": "art nouveau style {prompt} . elegant, decorative, curvilinear forms, nature-inspired, ornate, detailed", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, realism, photorealistic, modernist, minimalist" + }, + { + "name": "artstyle-constructivist", + "prompt": "constructivist style {prompt} . geometric shapes, bold colors, dynamic composition, propaganda art style", + "negative_prompt": "realistic, photorealistic, low contrast, plain, simple, abstract expressionism" + }, + { + "name": "artstyle-cubist", + "prompt": "cubist artwork {prompt} . geometric shapes, abstract, innovative, revolutionary", + "negative_prompt": "anime, photorealistic, 35mm film, deformed, glitch, low contrast, noisy" + }, + { + "name": "artstyle-expressionist", + "prompt": "expressionist {prompt} . raw, emotional, dynamic, distortion for emotional effect, vibrant, use of unusual colors, detailed", + "negative_prompt": "realism, symmetry, quiet, calm, photo" + }, + { + "name": "artstyle-graffiti", + "prompt": "graffiti style {prompt} . street art, vibrant, urban, detailed, tag, mural", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, realism, photorealistic" + }, + { + "name": "artstyle-hyperrealism", + "prompt": "hyperrealistic art {prompt} . extremely high-resolution details, photographic, realism pushed to extreme, fine texture, incredibly lifelike", + "negative_prompt": "simplified, abstract, unrealistic, impressionistic, low resolution" + }, + { + "name": "artstyle-impressionist", + "prompt": "impressionist painting {prompt} . loose brushwork, vibrant color, light and shadow play, captures feeling over form", + "negative_prompt": "anime, photorealistic, 35mm film, deformed, glitch, low contrast, noisy" + }, + { + "name": "artstyle-pointillism", + "prompt": "pointillism style {prompt} . composed entirely of small, distinct dots of color, vibrant, highly detailed", + "negative_prompt": "line drawing, smooth shading, large color fields, simplistic" + }, + { + "name": "artstyle-pop art", + "prompt": "pop Art style {prompt} . bright colors, bold outlines, popular culture themes, ironic or kitsch", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, realism, photorealistic, minimalist" + }, + { + "name": "artstyle-psychedelic", + "prompt": "psychedelic style {prompt} . vibrant colors, swirling patterns, abstract forms, surreal, trippy", + "negative_prompt": "monochrome, black and white, low contrast, realistic, photorealistic, plain, simple" + }, + { + "name": "artstyle-renaissance", + "prompt": "renaissance style {prompt} . realistic, perspective, light and shadow, religious or mythological themes, highly detailed", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, modernist, minimalist, abstract" + }, + { + "name": "artstyle-steampunk", + "prompt": "steampunk style {prompt} . antique, mechanical, brass and copper tones, gears, intricate, detailed", + "negative_prompt": "deformed, glitch, noisy, low contrast, anime, photorealistic" + }, + { + "name": "artstyle-surrealist", + "prompt": "surrealist art {prompt} . dreamlike, mysterious, provocative, symbolic, intricate, detailed", + "negative_prompt": "anime, photorealistic, realistic, deformed, glitch, noisy, low contrast" + }, + { + "name": "artstyle-typography", + "prompt": "typographic art {prompt} . stylized, intricate, detailed, artistic, text-based", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, realism, photorealistic" + }, + { + "name": "artstyle-watercolor", + "prompt": "watercolor painting {prompt} . vibrant, beautiful, painterly, detailed, textural, artistic", + "negative_prompt": "anime, photorealistic, 35mm film, deformed, glitch, low contrast, noisy" + }, + { + "name": "futuristic-biomechanical", + "prompt": "biomechanical style {prompt} . blend of organic and mechanical elements, futuristic, cybernetic, detailed, intricate", + "negative_prompt": "natural, rustic, primitive, organic, simplistic" + }, + { + "name": "futuristic-biomechanical cyberpunk", + "prompt": "biomechanical cyberpunk {prompt} . cybernetics, human-machine fusion, dystopian, organic meets artificial, dark, intricate, highly detailed", + "negative_prompt": "natural, colorful, deformed, sketch, low contrast, watercolor" + }, + { + "name": "futuristic-cybernetic", + "prompt": "cybernetic style {prompt} . futuristic, technological, cybernetic enhancements, robotics, artificial intelligence themes", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, realism, photorealistic, historical, medieval" + }, + { + "name": "futuristic-cybernetic robot", + "prompt": "cybernetic robot {prompt} . android, AI, machine, metal, wires, tech, futuristic, highly detailed", + "negative_prompt": "organic, natural, human, sketch, watercolor, low contrast" + }, + { + "name": "futuristic-cyberpunk cityscape", + "prompt": "cyberpunk cityscape {prompt} . neon lights, dark alleys, skyscrapers, futuristic, vibrant colors, high contrast, highly detailed", + "negative_prompt": "natural, rural, deformed, low contrast, black and white, sketch, watercolor" + }, + { + "name": "futuristic-futuristic", + "prompt": "futuristic style {prompt} . sleek, modern, ultramodern, high tech, detailed", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, realism, photorealistic, vintage, antique" + }, + { + "name": "futuristic-retro cyberpunk", + "prompt": "retro cyberpunk {prompt} . 80's inspired, synthwave, neon, vibrant, detailed, retro futurism", + "negative_prompt": "modern, desaturated, black and white, realism, low contrast" + }, + { + "name": "futuristic-retro futurism", + "prompt": "retro-futuristic {prompt} . vintage sci-fi, 50s and 60s style, atomic age, vibrant, highly detailed", + "negative_prompt": "contemporary, realistic, rustic, primitive" + }, + { + "name": "futuristic-sci-fi", + "prompt": "sci-fi style {prompt} . futuristic, technological, alien worlds, space themes, advanced civilizations", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, realism, photorealistic, historical, medieval" + }, + { + "name": "futuristic-vaporwave", + "prompt": "vaporwave style {prompt} . retro aesthetic, cyberpunk, vibrant, neon colors, vintage 80s and 90s style, highly detailed", + "negative_prompt": "monochrome, muted colors, realism, rustic, minimalist, dark" + }, + { + "name": "game-bubble bobble", + "prompt": "Bubble Bobble style {prompt} . 8-bit, cute, pixelated, fantasy, vibrant, reminiscent of Bubble Bobble game", + "negative_prompt": "realistic, modern, photorealistic, violent, horror" + }, + { + "name": "game-cyberpunk game", + "prompt": "cyberpunk game style {prompt} . neon, dystopian, futuristic, digital, vibrant, detailed, high contrast, reminiscent of cyberpunk genre video games", + "negative_prompt": "historical, natural, rustic, low detailed" + }, + { + "name": "game-fighting game", + "prompt": "fighting game style {prompt} . dynamic, vibrant, action-packed, detailed character design, reminiscent of fighting video games", + "negative_prompt": "peaceful, calm, minimalist, photorealistic" + }, + { + "name": "game-gta", + "prompt": "GTA-style artwork {prompt} . satirical, exaggerated, pop art style, vibrant colors, iconic characters, action-packed", + "negative_prompt": "realistic, black and white, low contrast, impressionist, cubist, noisy, blurry, deformed" + }, + { + "name": "game-mario", + "prompt": "Super Mario style {prompt} . vibrant, cute, cartoony, fantasy, playful, reminiscent of Super Mario series", + "negative_prompt": "realistic, modern, horror, dystopian, violent" + }, + { + "name": "game-minecraft", + "prompt": "Minecraft style {prompt} . blocky, pixelated, vibrant colors, recognizable characters and objects, game assets", + "negative_prompt": "smooth, realistic, detailed, photorealistic, noise, blurry, deformed" + }, + { + "name": "game-pokemon", + "prompt": "Pok\u00e9mon style {prompt} . vibrant, cute, anime, fantasy, reminiscent of Pok\u00e9mon series", + "negative_prompt": "realistic, modern, horror, dystopian, violent" + }, + { + "name": "game-retro arcade", + "prompt": "retro arcade style {prompt} . 8-bit, pixelated, vibrant, classic video game, old school gaming, reminiscent of 80s and 90s arcade games", + "negative_prompt": "modern, ultra-high resolution, photorealistic, 3D" + }, + { + "name": "game-retro game", + "prompt": "retro game art {prompt} . 16-bit, vibrant colors, pixelated, nostalgic, charming, fun", + "negative_prompt": "realistic, photorealistic, 35mm film, deformed, glitch, low contrast, noisy" + }, + { + "name": "game-rpg fantasy game", + "prompt": "role-playing game (RPG) style fantasy {prompt} . detailed, vibrant, immersive, reminiscent of high fantasy RPG games", + "negative_prompt": "sci-fi, modern, urban, futuristic, low detailed" + }, + { + "name": "game-strategy game", + "prompt": "strategy game style {prompt} . overhead view, detailed map, units, reminiscent of real-time strategy video games", + "negative_prompt": "first-person view, modern, photorealistic" + }, + { + "name": "game-streetfighter", + "prompt": "Street Fighter style {prompt} . vibrant, dynamic, arcade, 2D fighting game, highly detailed, reminiscent of Street Fighter series", + "negative_prompt": "3D, realistic, modern, photorealistic, turn-based strategy" + }, + { + "name": "game-zelda", + "prompt": "Legend of Zelda style {prompt} . vibrant, fantasy, detailed, epic, heroic, reminiscent of The Legend of Zelda series", + "negative_prompt": "sci-fi, modern, realistic, horror" + }, + { + "name": "misc-architectural", + "prompt": "architectural style {prompt} . clean lines, geometric shapes, minimalist, modern, architectural drawing, highly detailed", + "negative_prompt": "curved lines, ornate, baroque, abstract, grunge" + }, + { + "name": "misc-disco", + "prompt": "disco-themed {prompt} . vibrant, groovy, retro 70s style, shiny disco balls, neon lights, dance floor, highly detailed", + "negative_prompt": "minimalist, rustic, monochrome, contemporary, simplistic" + }, + { + "name": "misc-dreamscape", + "prompt": "dreamscape {prompt} . surreal, ethereal, dreamy, mysterious, fantasy, highly detailed", + "negative_prompt": "realistic, concrete, ordinary, mundane" + }, + { + "name": "misc-dystopian", + "prompt": "dystopian style {prompt} . bleak, post-apocalyptic, somber, dramatic, highly detailed", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, cheerful, optimistic, vibrant, colorful" + }, + { + "name": "misc-fairy tale", + "prompt": "fairy tale {prompt} . magical, fantastical, enchanting, storybook style, highly detailed", + "negative_prompt": "realistic, modern, ordinary, mundane" + }, + { + "name": "misc-gothic", + "prompt": "gothic style {prompt} . dark, mysterious, haunting, dramatic, ornate, detailed", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, realism, photorealistic, cheerful, optimistic" + }, + { + "name": "misc-grunge", + "prompt": "grunge style {prompt} . textured, distressed, vintage, edgy, punk rock vibe, dirty, noisy", + "negative_prompt": "smooth, clean, minimalist, sleek, modern, photorealistic" + }, + { + "name": "misc-horror", + "prompt": "horror-themed {prompt} . eerie, unsettling, dark, spooky, suspenseful, grim, highly detailed", + "negative_prompt": "cheerful, bright, vibrant, light-hearted, cute" + }, + { + "name": "misc-kawaii", + "prompt": "kawaii style {prompt} . cute, adorable, brightly colored, cheerful, anime influence, highly detailed", + "negative_prompt": "dark, scary, realistic, monochrome, abstract" + }, + { + "name": "misc-lovecraftian", + "prompt": "lovecraftian horror {prompt} . eldritch, cosmic horror, unknown, mysterious, surreal, highly detailed", + "negative_prompt": "light-hearted, mundane, familiar, simplistic, realistic" + }, + { + "name": "misc-macabre", + "prompt": "macabre style {prompt} . dark, gothic, grim, haunting, highly detailed", + "negative_prompt": "bright, cheerful, light-hearted, cartoonish, cute" + }, + { + "name": "misc-manga", + "prompt": "manga style {prompt} . vibrant, high-energy, detailed, iconic, Japanese comic style", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, realism, photorealistic, Western comic style" + }, + { + "name": "misc-metropolis", + "prompt": "metropolis-themed {prompt} . urban, cityscape, skyscrapers, modern, futuristic, highly detailed", + "negative_prompt": "rural, natural, rustic, historical, simple" + }, + { + "name": "misc-minimalist", + "prompt": "minimalist style {prompt} . simple, clean, uncluttered, modern, elegant", + "negative_prompt": "ornate, complicated, highly detailed, cluttered, disordered, messy, noisy" + }, + { + "name": "misc-monochrome", + "prompt": "monochrome {prompt} . black and white, contrast, tone, texture, detailed", + "negative_prompt": "colorful, vibrant, noisy, blurry, deformed" + }, + { + "name": "misc-nautical", + "prompt": "nautical-themed {prompt} . sea, ocean, ships, maritime, beach, marine life, highly detailed", + "negative_prompt": "landlocked, desert, mountains, urban, rustic" + }, + { + "name": "misc-space", + "prompt": "space-themed {prompt} . cosmic, celestial, stars, galaxies, nebulas, planets, science fiction, highly detailed", + "negative_prompt": "earthly, mundane, ground-based, realism" + }, + { + "name": "misc-stained glass", + "prompt": "stained glass style {prompt} . vibrant, beautiful, translucent, intricate, detailed", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, realism, photorealistic" + }, + { + "name": "misc-techwear fashion", + "prompt": "techwear fashion {prompt} . futuristic, cyberpunk, urban, tactical, sleek, dark, highly detailed", + "negative_prompt": "vintage, rural, colorful, low contrast, realism, sketch, watercolor" + }, + { + "name": "misc-tribal", + "prompt": "tribal style {prompt} . indigenous, ethnic, traditional patterns, bold, natural colors, highly detailed", + "negative_prompt": "modern, futuristic, minimalist, pastel" + }, + { + "name": "misc-zentangle", + "prompt": "zentangle {prompt} . intricate, abstract, monochrome, patterns, meditative, highly detailed", + "negative_prompt": "colorful, representative, simplistic, large fields of color" + }, + { + "name": "papercraft-collage", + "prompt": "collage style {prompt} . mixed media, layered, textural, detailed, artistic", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, realism, photorealistic" + }, + { + "name": "papercraft-flat papercut", + "prompt": "flat papercut style {prompt} . silhouette, clean cuts, paper, sharp edges, minimalist, color block", + "negative_prompt": "3D, high detail, noise, grainy, blurry, painting, drawing, photo, disfigured" + }, + { + "name": "papercraft-kirigami", + "prompt": "kirigami representation of {prompt} . 3D, paper folding, paper cutting, Japanese, intricate, symmetrical, precision, clean lines", + "negative_prompt": "painting, drawing, 2D, noisy, blurry, deformed" + }, + { + "name": "papercraft-paper mache", + "prompt": "paper mache representation of {prompt} . 3D, sculptural, textured, handmade, vibrant, fun", + "negative_prompt": "2D, flat, photo, sketch, digital art, deformed, noisy, blurry" + }, + { + "name": "papercraft-paper quilling", + "prompt": "paper quilling art of {prompt} . intricate, delicate, curling, rolling, shaping, coiling, loops, 3D, dimensional, ornamental", + "negative_prompt": "photo, painting, drawing, 2D, flat, deformed, noisy, blurry" + }, + { + "name": "papercraft-papercut collage", + "prompt": "papercut collage of {prompt} . mixed media, textured paper, overlapping, asymmetrical, abstract, vibrant", + "negative_prompt": "photo, 3D, realistic, drawing, painting, high detail, disfigured" + }, + { + "name": "papercraft-papercut shadow box", + "prompt": "3D papercut shadow box of {prompt} . layered, dimensional, depth, silhouette, shadow, papercut, handmade, high contrast", + "negative_prompt": "painting, drawing, photo, 2D, flat, high detail, blurry, noisy, disfigured" + }, + { + "name": "papercraft-stacked papercut", + "prompt": "stacked papercut art of {prompt} . 3D, layered, dimensional, depth, precision cut, stacked layers, papercut, high contrast", + "negative_prompt": "2D, flat, noisy, blurry, painting, drawing, photo, deformed" + }, + { + "name": "papercraft-thick layered papercut", + "prompt": "thick layered papercut art of {prompt} . deep 3D, volumetric, dimensional, depth, thick paper, high stack, heavy texture, tangible layers", + "negative_prompt": "2D, flat, thin paper, low stack, smooth texture, painting, drawing, photo, deformed" + }, + { + "name": "photo-alien", + "prompt": "alien-themed {prompt} . extraterrestrial, cosmic, otherworldly, mysterious, sci-fi, highly detailed", + "negative_prompt": "earthly, mundane, common, realistic, simple" + }, + { + "name": "photo-film noir", + "prompt": "film noir style {prompt} . monochrome, high contrast, dramatic shadows, 1940s style, mysterious, cinematic", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, realism, photorealistic, vibrant, colorful" + }, + { + "name": "photo-glamour", + "prompt": "glamorous photo {prompt} . high fashion, luxurious, extravagant, stylish, sensual, opulent, elegance, stunning beauty, professional, high contrast, detailed", + "negative_prompt": "ugly, deformed, noisy, blurry, distorted, grainy, sketch, low contrast, dull, plain, modest" + }, + { + "name": "photo-hdr", + "prompt": "HDR photo of {prompt} . High dynamic range, vivid, rich details, clear shadows and highlights, realistic, intense, enhanced contrast, highly detailed", + "negative_prompt": "flat, low contrast, oversaturated, underexposed, overexposed, blurred, noisy" + }, + { + "name": "photo-iphone photographic", + "prompt": "iphone photo {prompt} . large depth of field, deep depth of field, highly detailed", + "negative_prompt": "drawing, painting, crayon, sketch, graphite, impressionist, noisy, blurry, soft, deformed, ugly, shallow depth of field, bokeh" + }, + { + "name": "photo-long exposure", + "prompt": "long exposure photo of {prompt} . Blurred motion, streaks of light, surreal, dreamy, ghosting effect, highly detailed", + "negative_prompt": "static, noisy, deformed, shaky, abrupt, flat, low contrast" + }, + { + "name": "photo-neon noir", + "prompt": "neon noir {prompt} . cyberpunk, dark, rainy streets, neon signs, high contrast, low light, vibrant, highly detailed", + "negative_prompt": "bright, sunny, daytime, low contrast, black and white, sketch, watercolor" + }, + { + "name": "photo-silhouette", + "prompt": "silhouette style {prompt} . high contrast, minimalistic, black and white, stark, dramatic", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, color, realism, photorealistic" + }, + { + "name": "photo-tilt-shift", + "prompt": "tilt-shift photo of {prompt} . selective focus, miniature effect, blurred background, highly detailed, vibrant, perspective control", + "negative_prompt": "blurry, noisy, deformed, flat, low contrast, unrealistic, oversaturated, underexposed" + }, + { + "name": "cinematic-diva", + "prompt": "UHD, 8K, ultra detailed, a cinematic photograph of {prompt}, beautiful lighting, great composition", + "negative_prompt": "ugly, deformed, noisy, blurry, NSFW" + }, + { + "name": "Abstract Expressionism", + "prompt": "Abstract Expressionism Art, {prompt}, High contrast, minimalistic, colorful, stark, dramatic, expressionism", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, realism, photorealistic" + }, + { + "name": "Academia", + "prompt": "Academia, {prompt}, preppy Ivy League style, stark, dramatic, chic boarding school, academia", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, grunge, sloppy, unkempt" + }, + { + "name": "Action Figure", + "prompt": "Action Figure, {prompt}, plastic collectable action figure, collectable toy action figure", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Adorable 3D Character", + "prompt": "Adorable 3D Character, {prompt}, 3D render, adorable character, 3D art", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, grunge, sloppy, unkempt, photograph, photo, realistic" + }, + { + "name": "Adorable Kawaii", + "prompt": "Adorable Kawaii, {prompt}, pretty, cute, adorable, kawaii", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, gothic, dark, moody, monochromatic" + }, + { + "name": "Art Deco", + "prompt": "Art Deco, {prompt}, sleek, geometric forms, art deco style", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Art Nouveau", + "prompt": "Art Nouveau, beautiful art, {prompt}, sleek, organic forms, long, sinuous, art nouveau style", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, industrial, mechanical" + }, + { + "name": "Astral Aura", + "prompt": "Astral Aura, {prompt}, astral, colorful aura, vibrant energy", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Avant-garde", + "prompt": "Avant-garde, {prompt}, unusual, experimental, avant-garde art", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Baroque", + "prompt": "Baroque, {prompt}, dramatic, exuberant, grandeur, baroque art", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Bauhaus-Style Poster", + "prompt": "Bauhaus-Style Poster, {prompt}, simple geometric shapes, clean lines, primary colors, Bauhaus-Style Poster", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Blueprint Schematic Drawing", + "prompt": "Blueprint Schematic Drawing, {prompt}, technical drawing, blueprint, schematic", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Caricature", + "prompt": "Caricature, {prompt}, exaggerated, comical, caricature", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, realistic" + }, + { + "name": "Cel Shaded Art", + "prompt": "Cel Shaded Art, {prompt}, 2D, flat color, toon shading, cel shaded style", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Character Design Sheet", + "prompt": "Character Design Sheet, {prompt}, character reference sheet, character turn around", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Classicism Art", + "prompt": "Classicism Art, {prompt}, inspired by Roman and Greek culture, clarity, harmonious, classicism art", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Color Field Painting", + "prompt": "Color Field Painting, {prompt}, abstract, simple, geometic, color field painting style", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Colored Pencil Art", + "prompt": "Colored Pencil Art, {prompt}, colored pencil strokes, light color, visible paper texture, colored pencil art", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Conceptual Art", + "prompt": "Conceptual Art, {prompt}, concept art", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Constructivism", + "prompt": "Constructivism Art, {prompt}, minimalistic, geometric forms, constructivism art", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Cubism", + "prompt": "Cubism Art, {prompt}, flat geometric forms, cubism art", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Dadaism", + "prompt": "Dadaism Art, {prompt}, satirical, nonsensical, dadaism art", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Dark Fantasy", + "prompt": "Dark Fantasy Art, {prompt}, dark, moody, dark fantasy style", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, bright, sunny" + }, + { + "name": "Dark Moody Atmosphere", + "prompt": "Dark Moody Atmosphere, {prompt}, dramatic, mysterious, dark moody atmosphere", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, vibrant, colorful, bright" + }, + { + "name": "DMT Art Style", + "prompt": "DMT Art Style, {prompt}, bright colors, surreal visuals, swirling patterns, DMT art style", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Doodle Art", + "prompt": "Doodle Art Style, {prompt}, drawing, freeform, swirling patterns, doodle art style", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Double Exposure", + "prompt": "Double Exposure Style, {prompt}, double image ghost effect, image combination, double exposure style", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Dripping Paint Splatter Art", + "prompt": "Dripping Paint Splatter Art, {prompt}, dramatic, paint drips, splatters, dripping paint", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Expressionism", + "prompt": "Expressionism Art Style, {prompt}, movement, contrast, emotional, exaggerated forms, expressionism art style", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Faded Polaroid Photo", + "prompt": "Faded Polaroid Photo, {prompt}, analog, old faded photo, old polaroid", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, vibrant, colorful" + }, + { + "name": "Fauvism", + "prompt": "Fauvism Art, {prompt}, painterly, bold colors, textured brushwork, fauvism art", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Flat 2D Art", + "prompt": "Flat 2D Art, {prompt}, simple flat color, 2-dimensional, Flat 2D Art Style", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, 3D, photo, realistic" + }, + { + "name": "Fortnite Art Style", + "prompt": "Fortnite Art Style, {prompt}, 3D cartoon, colorful, Fortnite Art Style", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, photo, realistic" + }, + { + "name": "Futurism", + "prompt": "Futurism Art Style, {prompt}, dynamic, dramatic, Futurism Art Style", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Glitchcore", + "prompt": "Glitchcore Art Style, {prompt}, dynamic, dramatic, distorted, vibrant colors, glitchcore art style", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Glo-fi", + "prompt": "Glo-fi Art Style, {prompt}, dynamic, dramatic, vibrant colors, glo-fi art style", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Googie Art Style", + "prompt": "Googie Art Style, {prompt}, dynamic, dramatic, 1950's futurism, bold boomerang angles, Googie art style", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Graffiti Art", + "prompt": "Graffiti Art Style, {prompt}, dynamic, dramatic, vibrant colors, graffiti art style", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Harlem Renaissance Art", + "prompt": "Harlem Renaissance Art Style, {prompt}, dynamic, dramatic, 1920s African American culture, Harlem Renaissance art style", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "High Fashion", + "prompt": "High Fashion, {prompt}, dynamic, dramatic, haute couture, elegant, ornate clothing, High Fashion", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Idyllic", + "prompt": "Idyllic, {prompt}, peaceful, happy, pleasant, happy, harmonious, picturesque, charming", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Impressionism", + "prompt": "Impressionism, {prompt}, painterly, small brushstrokes, visible brushstrokes, impressionistic style", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Infographic Drawing", + "prompt": "Infographic Drawing, {prompt}, diagram, infographic", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Ink Dripping Drawing", + "prompt": "Ink Dripping Drawing, {prompt}, ink drawing, dripping ink", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, colorful, vibrant" + }, + { + "name": "Japanese Ink Drawing", + "prompt": "Japanese Ink Drawing, {prompt}, ink drawing, inkwash, Japanese Ink Drawing", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, colorful, vibrant" + }, + { + "name": "Knolling Photography", + "prompt": "Knolling Photography, {prompt}, flat lay photography, object arrangment, knolling photography", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Light Cheery Atmosphere", + "prompt": "Light Cheery Atmosphere, {prompt}, happy, joyful, cheerful, carefree, gleeful, lighthearted, pleasant atmosphere", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, monochromatic, dark, moody" + }, + { + "name": "Logo Design", + "prompt": "Logo Design, {prompt}, dynamic graphic art, vector art, minimalist, professional logo design", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Luxurious Elegance", + "prompt": "Luxurious Elegance, {prompt}, extravagant, ornate, designer, opulent, picturesque, lavish", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Macro Photography", + "prompt": "Macro Photography, {prompt}, close-up, macro 100mm, macro photography", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Mandola Art", + "prompt": "Mandola art style, {prompt}, complex, circular design, mandola", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Marker Drawing", + "prompt": "Marker Drawing, {prompt}, bold marker lines, visibile paper texture, marker drawing", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, photograph, realistic" + }, + { + "name": "Medievalism", + "prompt": "Medievalism, {prompt}, inspired by The Middle Ages, medieval art, elaborate patterns and decoration, Medievalism", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Minimalism", + "prompt": "Minimalism, {prompt}, abstract, simple geometic shapes, hard edges, sleek contours, Minimalism", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Neo-Baroque", + "prompt": "Neo-Baroque, {prompt}, ornate and elaborate, dynaimc, Neo-Baroque", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Neo-Byzantine", + "prompt": "Neo-Byzantine, {prompt}, grand decorative religious style, Orthodox Christian inspired, Neo-Byzantine", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Neo-Futurism", + "prompt": "Neo-Futurism, {prompt}, high-tech, curves, spirals, flowing lines, idealistic future, Neo-Futurism", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Neo-Impressionism", + "prompt": "Neo-Impressionism, {prompt}, tiny dabs of color, Pointillism, painterly, Neo-Impressionism", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, photograph, realistic" + }, + { + "name": "Neo-Rococo", + "prompt": "Neo-Rococo, {prompt}, curved forms, naturalistic ornamentation, elaborate, decorative, gaudy, Neo-Rococo", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Neoclassicism", + "prompt": "Neoclassicism, {prompt}, ancient Rome and Greece inspired, idealic, sober colors, Neoclassicism", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Op Art", + "prompt": "Op Art, {prompt}, optical illusion, abstract, geometric pattern, impression of movement, Op Art", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Ornate and Intricate", + "prompt": "Ornate and Intricate, {prompt}, decorative, highly detailed, elaborate, ornate, intricate", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Pencil Sketch Drawing", + "prompt": "Pencil Sketch Drawing, {prompt}, black and white drawing, graphite drawing", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Pop Art 2", + "prompt": "Pop Art, {prompt}, vivid colors, flat color, 2D, strong lines, Pop Art", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, photo, realistic" + }, + { + "name": "Rococo", + "prompt": "Rococo, {prompt}, flamboyant, pastel colors, curved lines, elaborate detail, Rococo", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Silhouette Art", + "prompt": "Silhouette Art, {prompt}, high contrast, well defined, Silhouette Art", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Simple Vector Art", + "prompt": "Simple Vector Art, {prompt}, 2D flat, simple shapes, minimalistic, professional graphic, flat color, high contrast, Simple Vector Art", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, 3D, photo, realistic" + }, + { + "name": "Sketchup", + "prompt": "Sketchup, {prompt}, CAD, professional design, Sketchup", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, photo, photograph" + }, + { + "name": "Steampunk 2", + "prompt": "Steampunk, {prompt}, retrofuturistic science fantasy, steam-powered tech, vintage industry, gears, neo-victorian, steampunk", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Surrealism", + "prompt": "Surrealism, {prompt}, expressive, dramatic, organic lines and forms, dreamlike and mysterious, Surrealism", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, realistic" + }, + { + "name": "Suprematism", + "prompt": "Suprematism, {prompt}, abstract, limited color palette, geometric forms, Suprematism", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, realistic" + }, + { + "name": "Terragen", + "prompt": "Terragen, {prompt}, beautiful massive landscape, epic scenery, Terragen", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Tranquil Relaxing Atmosphere", + "prompt": "Tranquil Relaxing Atmosphere, {prompt}, calming style, soothing colors, peaceful, idealic, Tranquil Relaxing Atmosphere", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, oversaturated" + }, + { + "name": "Sticker Designs", + "prompt": "Vector Art Stickers, {prompt}, professional vector design, sticker designs, Sticker Sheet", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Vibrant Rim Light", + "prompt": "Vibrant Rim Light, {prompt}, bright rim light, high contrast, bold edge light", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Volumetric Lighting", + "prompt": "Volumetric Lighting, {prompt}, light depth, dramatic atmospheric lighting, Volumetric Lighting", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Watercolor 2", + "prompt": "Watercolor style painting, {prompt}, visible paper texture, colorwash, watercolor", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, photo, realistic" + }, + { + "name": "Whimsical and Playful", + "prompt": "Whimsical and Playful, {prompt}, imaginative, fantastical, bight colors, stylized, happy, Whimsical and Playful", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, drab, boring, moody" + }, + { + "name": "MK Chromolithography", + "prompt": "Chromolithograph {prompt}. Vibrant colors, intricate details, rich color saturation, meticulous registration, multi-layered printing, decorative elements, historical charm, artistic reproductions, commercial posters, nostalgic, ornate compositions.", + "negative_prompt": "monochromatic, simple designs, limited color palette, imprecise registration, minimalistic, modern aesthetic, digital appearance." + }, + { + "name": "MK Cross Processing Print", + "prompt": "Cross processing print {prompt}. Experimental color shifts, unconventional tonalities, vibrant and surreal hues, heightened contrasts, unpredictable results, artistic unpredictability, retro and vintage feel, dynamic color interplay, abstract and dreamlike.", + "negative_prompt": "predictable color tones, traditional processing, realistic color representation, subdued contrasts, standard photographic aesthetics." + }, + { + "name": "MK Dufaycolor Photograph", + "prompt": "Dufaycolor photograph {prompt}. Vintage color palette, distinctive color rendering, soft and dreamy atmosphere, historical charm, unique color process, grainy texture, evocative mood, nostalgic aesthetic, hand-tinted appearance, artistic patina.", + "negative_prompt": "modern color reproduction, hyperrealistic tones, sharp and clear details, digital precision, contemporary aesthetic." + }, + { + "name": "MK Herbarium", + "prompt": "Herbarium drawing{prompt}. Botanical accuracy, old botanical book illustration, detailed illustrations, pressed plants, delicate and precise linework, scientific documentation, meticulous presentation, educational purpose, organic compositions, timeless aesthetic, naturalistic beauty.", + "negative_prompt": "abstract representation, vibrant colors, artistic interpretation, chaotic compositions, fantastical elements, digital appearance." + }, + { + "name": "MK Punk Collage", + "prompt": "punk collage style {prompt} . mixed media, papercut,textured paper, overlapping, ripped posters, safety pins, chaotic layers, graffiti-style elements, anarchy symbols, vintage photos, cut-and-paste aesthetic, bold typography, distorted images, political messages, urban decay, distressed textures, newspaper clippings, spray paint, rebellious icons, DIY spirit, vivid colors, punk band logos, edgy and raw compositions, ", + "negative_prompt": "conventional,blurry, noisy, low contrast" + }, + { + "name": "MK mosaic", + "prompt": "mosaic style {prompt} . fragmented, assembled, colorful, highly detailed", + "negative_prompt": "whole, unbroken, monochrome" + }, + { + "name": "MK Van Gogh", + "prompt": "Oil painting by Van Gogh {prompt} . Expressive, impasto, swirling brushwork, vibrant, brush strokes, Brushstroke-heavy, Textured, Impasto, Colorful, Dynamic, Bold, Distinctive, Vibrant, Whirling, Expressive, Dramatic, Swirling, Layered, Intense, Contrastive, Atmospheric, Luminous, Textural, Evocative, SpiraledVan Gogh style", + "negative_prompt": "realistic, photorealistic, calm, straight lines, signature, frame, text, watermark" + }, + { + "name": "MK Coloring Book", + "prompt": "centered black and white high contrast line drawing, coloring book style,{prompt} . monochrome, blank white background", + "negative_prompt": "greyscale, gradients,shadows,shadow, colored, Red, Blue, Yellow, Green, Orange, Purple, Pink, Brown, Gray, Beige, Turquoise, Lavender, Cyan, Magenta, Olive, Indigo, black background" + }, + { + "name": "MK Singer Sargent", + "prompt": "Oil painting by John Singer Sargent {prompt}. Elegant, refined, masterful technique,realistic portrayal, subtle play of light, captivating expression, rich details, harmonious colors, skillful composition, brush strokes, chiaroscuro.", + "negative_prompt": "realistic, photorealistic, abstract, overly stylized, excessive contrasts, distorted,bright colors,disorder." + }, + { + "name": "MK Pollock", + "prompt": "Oil painting by Jackson Pollock {prompt}. Abstract expressionism, drip painting, chaotic composition, energetic, spontaneous, unconventional technique, dynamic, bold, distinctive, vibrant, intense, expressive, energetic, layered, non-representational, gestural.", + "negative_prompt": "(realistic:1.5), (photorealistic:1.5), representational, calm, ordered composition, precise lines, detailed forms, subdued colors, quiet, static, traditional, figurative." + }, + { + "name": "MK Basquiat", + "prompt": "Artwork by Jean-Michel Basquiat {prompt}. Neo-expressionism, street art influence, graffiti-inspired, raw, energetic, bold colors, dynamic composition, chaotic, layered, textural, expressive, spontaneous, distinctive, symbolic,energetic brushstrokes.", + "negative_prompt": "(realistic:1.5), (photorealistic:1.5), calm, precise lines, conventional composition, subdued" + }, + { + "name": "MK Andy Warhol", + "prompt": "Artwork in the style of Andy Warhol {prompt}. Pop art, vibrant colors, bold compositions, repetition of iconic imagery, celebrity culture, commercial aesthetics, mass production influence, stylized simplicity, cultural commentary, graphical elements, distinctive portraits.", + "negative_prompt": "subdued colors, realistic, lack of repetition, minimalistic." + }, + { + "name": "MK Halftone print", + "prompt": "Halftone print of {prompt}. Dot matrix pattern, grayscale tones, vintage aesthetic, newspaper print vibe, stylized dots, visual texture, black and white contrasts, retro appearance, artistic pointillism,pop culture, (Roy Lichtenstein style:1.5).", + "negative_prompt": "smooth gradients, continuous tones, vibrant colors." + }, + { + "name": "MK Gond Painting", + "prompt": "Gond painting {prompt}. Intricate patterns, vibrant colors, detailed motifs, nature-inspired themes, tribal folklore, fine lines, intricate detailing, storytelling compositions, mystical and folkloric, cultural richness.", + "negative_prompt": "monochromatic, abstract shapes, minimalistic." + }, + { + "name": "MK Albumen Print", + "prompt": "Albumen print {prompt}. Sepia tones, fine details, subtle tonal gradations, delicate highlights, vintage aesthetic, soft and muted atmosphere, historical charm, rich textures, meticulous craftsmanship, classic photographic technique, vignetting.", + "negative_prompt": "vibrant colors, high contrast, modern, digital appearance, sharp details, contemporary style." + }, + { + "name": "MK Aquatint Print", + "prompt": "Aquatint print {prompt}. Soft tonal gradations, atmospheric effects, velvety textures, rich contrasts, fine details, etching process, delicate lines, nuanced shading, expressive and moody atmosphere, artistic depth.", + "negative_prompt": "sharp contrasts, bold lines, minimalistic." + }, + { + "name": "MK Anthotype Print", + "prompt": "Anthotype print {prompt}. Monochrome dye, soft and muted colors, organic textures, ephemeral and delicate appearance, low details, watercolor canvas, low contrast, overexposed, silhouette, textured paper.", + "negative_prompt": "vibrant synthetic dyes, bold and saturated colors." + }, + { + "name": "MK Inuit Carving", + "prompt": "A sculpture made of ivory, {prompt} made of . Sculptures, Inuit art style, intricate carvings, natural materials, storytelling motifs, arctic wildlife themes, symbolic representations, cultural traditions, earthy tones, harmonious compositions, spiritual and mythological elements.", + "negative_prompt": "abstract, vibrant colors." + }, + { + "name": "MK Bromoil Print", + "prompt": "Bromoil print {prompt}. Painterly effects, sepia tones, textured surfaces, rich contrasts, expressive brushwork, tonal variations, vintage aesthetic, atmospheric mood, handmade quality, artistic experimentation, darkroom craftsmanship, vignetting.", + "negative_prompt": "smooth surfaces, minimal brushwork, contemporary digital appearance." + }, + { + "name": "MK Calotype Print", + "prompt": "Calotype print {prompt}. Soft focus, subtle tonal range, paper negative process, fine details, vintage aesthetic, artistic experimentation, atmospheric mood, early photographic charm, handmade quality, vignetting.", + "negative_prompt": "sharp focus, bold contrasts, modern aesthetic, digital photography." + }, + { + "name": "MK Color Sketchnote", + "prompt": "Color sketchnote {prompt}. Hand-drawn elements, vibrant colors, visual hierarchy, playful illustrations, varied typography, graphic icons, organic and dynamic layout, personalized touches, creative expression, engaging storytelling.", + "negative_prompt": "monochromatic, geometric layout." + }, + { + "name": "MK Cibulak Porcelain", + "prompt": "A sculpture made of blue pattern porcelain of {prompt}. Classic design, blue and white color scheme, intricate detailing, floral motifs, onion-shaped elements, historical charm, rococo, white ware, cobalt blue, underglaze pattern, fine craftsmanship, traditional elegance, delicate patterns, vintage aesthetic, Meissen, Blue Onion pattern, Cibulak.", + "negative_prompt": "tea, teapot, cup, teacup,bright colors, bold and modern design, absence of intricate detailing, lack of floral motifs, non-traditional shapes." + }, + { + "name": "MK Alcohol Ink Art", + "prompt": "Alcohol ink art {prompt}. Fluid and vibrant colors, unpredictable patterns, organic textures, translucent layers, abstract compositions, ethereal and dreamy effects, free-flowing movement, expressive brushstrokes, contemporary aesthetic, wet textured paper.", + "negative_prompt": "monochromatic, controlled patterns." + }, + { + "name": "MK One Line Art", + "prompt": "One line art {prompt}. Continuous and unbroken black line, minimalistic, simplicity, economical use of space, flowing and dynamic, symbolic representations, contemporary aesthetic, evocative and abstract, white background.", + "negative_prompt": "disjointed lines, complexity, complex detailing." + }, + { + "name": "MK Blacklight Paint", + "prompt": "Blacklight paint {prompt}. Fluorescent pigments, vibrant and surreal colors, ethereal glow, otherworldly effects, dynamic and psychedelic compositions, neon aesthetics, transformative in ultraviolet light, contemporary and experimental.", + "negative_prompt": "muted colors, traditional and realistic compositions." + }, + { + "name": "MK Carnival Glass", + "prompt": "A sculpture made of Carnival glass, {prompt}. Iridescent surfaces, vibrant colors, intricate patterns, opalescent hues, reflective and prismatic effects, Art Nouveau and Art Deco influences, vintage charm, intricate detailing, lustrous and luminous appearance, Carnival Glass style.", + "negative_prompt": "non-iridescent surfaces, muted colors, absence of intricate patterns, lack of opalescent hues, modern and minimalist aesthetic." + }, + { + "name": "MK Cyanotype Print", + "prompt": "Cyanotype print {prompt}. Prussian blue tones, distinctive coloration, high contrast, blueprint aesthetics, atmospheric mood, sun-exposed paper, silhouette effects, delicate details, historical charm, handmade and experimental quality.", + "negative_prompt": "vibrant colors, low contrast, modern and polished appearance." + }, + { + "name": "MK Cross-Stitching", + "prompt": "Cross-stitching {prompt}. Intricate patterns, embroidery thread, sewing, fine details, precise stitches, textile artistry, symmetrical designs, varied color palette, traditional and contemporary motifs, handmade and crafted,canvas, nostalgic charm.", + "negative_prompt": "paper, paint, ink, photography." + }, + { + "name": "MK Encaustic Paint", + "prompt": "Encaustic paint {prompt}. Textured surfaces, translucent layers, luminous quality, wax medium, rich color saturation, fluid and organic shapes, contemporary and historical influences, mixed media elements, atmospheric depth.", + "negative_prompt": "flat surfaces, opaque layers, lack of wax medium, muted color palette, absence of textured surfaces, non-mixed media." + }, + { + "name": "MK Embroidery", + "prompt": "Embroidery {prompt}. Intricate stitching, embroidery thread, fine details, varied thread textures, textile artistry, embellished surfaces, diverse color palette, traditional and contemporary motifs, handmade and crafted, tactile and ornate.", + "negative_prompt": "minimalist, monochromatic." + }, + { + "name": "MK Gyotaku", + "prompt": "Gyotaku {prompt}. Fish impressions, realistic details, ink rubbings, textured surfaces, traditional Japanese art form, nature-inspired compositions, artistic representation of marine life, black and white contrasts, cultural significance.", + "negative_prompt": "photography." + }, + { + "name": "MK Luminogram", + "prompt": "Luminogram {prompt}. Photogram technique, ethereal and abstract effects, light and shadow interplay, luminous quality, experimental process, direct light exposure, unique and unpredictable results, artistic experimentation.", + "negative_prompt": "" + }, + { + "name": "MK Lite Brite Art", + "prompt": "Lite Brite art {prompt}. Luminous and colorful designs, pixelated compositions, retro aesthetic, glowing effects, creative patterns, interactive and playful, nostalgic charm, vibrant and dynamic arrangements.", + "negative_prompt": "monochromatic." + }, + { + "name": "MK Mokume-gane", + "prompt": "Mokume-gane {prompt}. Wood-grain patterns, mixed metal layers, intricate and organic designs, traditional Japanese metalwork, harmonious color combinations, artisanal craftsmanship, unique and layered textures, cultural and historical significance.", + "negative_prompt": "uniform metal surfaces." + }, + { + "name": "Pebble Art", + "prompt": "a sculpture made of peebles, {prompt}. Pebble art style,natural materials, textured surfaces, balanced compositions, organic forms, harmonious arrangements, tactile and 3D effects, beach-inspired aesthetic, creative storytelling, artisanal craftsmanship.", + "negative_prompt": "non-natural materials, lack of textured surfaces, imbalanced compositions, absence of organic forms, non-tactile appearance." + }, + { + "name": "MK Palekh", + "prompt": "Palekh art {prompt}. Miniature paintings, intricate details, vivid colors, folkloric themes, lacquer finish, storytelling compositions, symbolic elements, Russian folklore influence, cultural and historical significance.", + "negative_prompt": "large-scale paintings." + }, + { + "name": "MK Suminagashi", + "prompt": "Suminagashi {prompt}. Floating ink patterns, marbled effects, delicate and ethereal designs, water-based ink, fluid and unpredictable compositions, meditative process, monochromatic or subtle color palette, Japanese artistic tradition.", + "negative_prompt": "vibrant and bold color palette." + }, + { + "name": "MK Scrimshaw", + "prompt": "A Scrimshaw engraving of {prompt}. Intricate engravings on a spermwhale's teeth, marine motifs, detailed scenes, nautical themes, black and white contrasts, historical craftsmanship, artisanal carving, storytelling compositions, maritime heritage.", + "negative_prompt": "colorful, modern." + }, + { + "name": "MK Shibori", + "prompt": "Shibori {prompt}. Textured fabric, intricate patterns, resist-dyeing technique, indigo or vibrant colors, organic and flowing designs, Japanese textile art, cultural tradition, tactile and visual interest.", + "negative_prompt": "monochromatic." + }, + { + "name": "MK Vitreous Enamel", + "prompt": "A sculpture made of Vitreous enamel {prompt}. Smooth and glossy surfaces, vibrant colors, glass-like finish, durable and resilient, intricate detailing, traditional and contemporary applications, artistic craftsmanship, jewelry and decorative objects, , Vitreous enamel, colored glass.", + "negative_prompt": "rough surfaces, muted colors." + }, + { + "name": "MK Ukiyo-e", + "prompt": "Ukiyo-e {prompt}. Woodblock prints, vibrant colors, intricate details, depictions of landscapes, kabuki actors, beautiful women, cultural scenes, traditional Japanese art, artistic craftsmanship, historical significance.", + "negative_prompt": "absence of woodblock prints, muted colors, lack of intricate details, non-traditional Japanese themes, absence of cultural scenes." + }, + { + "name": "MK vintage-airline-poster", + "prompt": "vintage airline poster {prompt} . classic aviation fonts, pastel colors, elegant aircraft illustrations, scenic destinations, distressed textures, retro travel allure", + "negative_prompt": "modern fonts, bold colors, hyper-realistic, sleek design" + }, + { + "name": "MK vintage-travel-poster", + "prompt": "vintage travel poster {prompt} . retro fonts, muted colors, scenic illustrations, iconic landmarks, distressed textures, nostalgic vibes", + "negative_prompt": "modern fonts, vibrant colors, hyper-realistic, sleek design" + }, + { + "name": "MK bauhaus-style", + "prompt": "Bauhaus-inspired {prompt} . minimalism, geometric precision, primary colors, sans-serif typography, asymmetry, functional design", + "negative_prompt": "ornate, intricate, excessive detail, complex patterns, serif typography" + }, + { + "name": "MK afrofuturism", + "prompt": "Afrofuturism illustration {prompt} . vibrant colors, futuristic elements, cultural symbolism, cosmic imagery, dynamic patterns, empowering narratives", + "negative_prompt": "monochromatic" + }, + { + "name": "MK atompunk", + "prompt": "Atompunk illustation, {prompt} . retro-futuristic, atomic age aesthetics, sleek lines, metallic textures, futuristic technology, optimism, energy", + "negative_prompt": "organic, natural textures, rustic, dystopian" + }, + { + "name": "MK constructivism", + "prompt": "Constructivism {prompt} . geometric abstraction, bold colors, industrial aesthetics, dynamic compositions, utilitarian design, revolutionary spirit", + "negative_prompt": "organic shapes, muted colors, ornate elements, traditional" + }, + { + "name": "MK chicano-art", + "prompt": "Chicano art {prompt} . bold colors, cultural symbolism, muralism, lowrider aesthetics, barrio life, political messages, social activism, Mexico", + "negative_prompt": "monochromatic, minimalist, mainstream aesthetics" + }, + { + "name": "MK de-stijl", + "prompt": "De Stijl Art {prompt} . neoplasticism, primary colors, geometric abstraction, horizontal and vertical lines, simplicity, harmony, utopian ideals", + "negative_prompt": "complex patterns, muted colors, ornate elements, asymmetry" + }, + { + "name": "MK dayak-art", + "prompt": "Dayak art sculpture of {prompt} . intricate patterns, nature-inspired motifs, vibrant colors, traditional craftsmanship, cultural symbolism, storytelling", + "negative_prompt": "minimalist, monochromatic, modern" + }, + { + "name": "MK fayum-portrait", + "prompt": "Fayum portrait {prompt} . encaustic painting, realistic facial features, warm earth tones, serene expressions, ancient Egyptian influences", + "negative_prompt": "abstract, vibrant colors, exaggerated features, modern" + }, + { + "name": "MK illuminated-manuscript", + "prompt": "Illuminated manuscript {prompt} . intricate calligraphy, rich colors, detailed illustrations, gold leaf accents, ornate borders, religious, historical, medieval", + "negative_prompt": "modern typography, minimalist design, monochromatic, abstract themes" + }, + { + "name": "MK kalighat-painting", + "prompt": "Kalighat painting {prompt} . bold lines, vibrant colors, narrative storytelling, cultural motifs, flat compositions, expressive characters", + "negative_prompt": "subdued colors, intricate details, realistic portrayal, modern aesthetics" + }, + { + "name": "MK madhubani-painting", + "prompt": "Madhubani painting {prompt} . intricate patterns, vibrant colors, nature-inspired motifs, cultural storytelling, symmetry, folk art aesthetics", + "negative_prompt": "abstract, muted colors, minimalistic design, modern aesthetics" + }, + { + "name": "MK pictorialism", + "prompt": "Pictorialism illustration{prompt} . soft focus, atmospheric effects, artistic interpretation, tonality, muted colors, evocative storytelling", + "negative_prompt": "sharp focus, high contrast, realistic depiction, vivid colors" + }, + { + "name": "MK pichwai-painting", + "prompt": "Pichwai painting {prompt} . intricate detailing, vibrant colors, religious themes, nature motifs, devotional storytelling, gold leaf accents", + "negative_prompt": "minimalist, subdued colors, abstract design" + }, + { + "name": "MK patachitra-painting", + "prompt": "Patachitra painting {prompt} . bold outlines, vibrant colors, intricate detailing, mythological themes, storytelling, traditional craftsmanship", + "negative_prompt": "subdued colors, minimalistic, abstract, modern aesthetics" + }, + { + "name": "MK samoan-art-inspired", + "prompt": "Samoan art-inspired wooden sculpture {prompt} . traditional motifs, natural elements, bold colors, cultural symbolism, storytelling, craftsmanship", + "negative_prompt": "modern aesthetics, minimalist, abstract" + }, + { + "name": "MK tlingit-art", + "prompt": "Tlingit art {prompt} . formline design, natural elements, animal motifs, bold colors, cultural storytelling, traditional craftsmanship, Alaska traditional art, (totem:1.5)", + "negative_prompt": "" + }, + { + "name": "MK adnate-style", + "prompt": "Painting by Adnate {prompt} . realistic portraits, street art, large-scale murals, subdued color palette, social narratives", + "negative_prompt": "abstract, vibrant colors, small-scale art" + }, + { + "name": "MK ron-english-style", + "prompt": "Painting by Ron English {prompt} . pop-surrealism, cultural subversion, iconic mash-ups, vibrant and bold colors, satirical commentary", + "negative_prompt": "traditional, monochromatic" + }, + { + "name": "MK shepard-fairey-style", + "prompt": "Painting by Shepard Fairey {prompt} . street art, political activism, iconic stencils, bold typography, high contrast, red, black, and white color palette", + "negative_prompt": "traditional, muted colors" + } +] \ No newline at end of file diff --git a/ComfyUI-Easy-Use/resources/fooocus_styles_cn.json b/ComfyUI-Easy-Use/resources/fooocus_styles_cn.json new file mode 100644 index 0000000000000000000000000000000000000000..20554273408622512c6b569438ca0d4beecc9614 --- /dev/null +++ b/ComfyUI-Easy-Use/resources/fooocus_styles_cn.json @@ -0,0 +1,279 @@ +{ + "Fooocus V2": "Fooocus V2扩展词", + "Default (Slightly Cinematic)": "默认(轻微的电影感)", + "Fooocus Enhance": "Fooocus-优化增强", + "Fooocus Cinematic": "Fooocus-电影感", + "Fooocus Sharp": "Fooocus-锐化", + "Fooocus Masterpiece": "Fooocus-杰作", + "Fooocus Photograph": "Fooocus-照片", + "Fooocus Negative": "Fooocus-反向提示词", + "SAI 3D Model": "SAI-3D模型", + "SAI Analog Film": "SAI-模拟电影", + "SAI Anime": "SAI-动漫", + "SAI Cinematic": "SAI-电影片段", + "SAI Comic Book": "SAI-漫画", + "SAI Craft Clay": "SAI-工艺粘土", + "SAI Digital Art": "SAI-数字艺术", + "SAI Enhance": "SAI-增强", + "SAI Fantasy Art": "SAI-奇幻艺术", + "SAI Isometric": "SAI-等距风格", + "SAI Line Art": "SAI-线条艺术", + "SAI Lowpoly": "SAI-低多边形", + "SAI Neonpunk": "SAI-霓虹朋克", + "SAI Origami": "SAI-折纸", + "SAI Photographic": "SAI-摄影", + "SAI Pixel Art": "SAI-像素艺术", + "SAI Texture": "SAI-纹理", + "MRE Cinematic Dynamic": "MRE-史诗电影", + "MRE Spontaneous Picture": "MRE-自然的抓拍照片", + "MRE Artistic Vision": "MRE-艺术视觉", + "MRE Dark Dream": "MRE-黑暗梦境", + "MRE Gloomy Art": "MRE-阴郁艺术", + "MRE Bad Dream": "MRE-噩梦", + "MRE Underground": "MRE-阴森地下", + "MRE Surreal Painting": "MRE-超现实主义绘画", + "MRE Dynamic Illustration": "MRE-动态插画", + "MRE Undead Art": "MRE-遗忘艺术家作品", + "MRE Elemental Art": "MRE-元素艺术", + "MRE Space Art": "MRE-空间艺术", + "MRE Ancient Illustration": "MRE-古代插图", + "MRE Brave Art": "MRE-勇敢艺术", + "MRE Heroic Fantasy": "MRE-英雄幻想", + "MRE Dark Cyberpunk": "MRE-黑暗赛博朋克", + "MRE Lyrical Geometry": "MRE-抒情几何抽象画", + "MRE Sumi E Symbolic": "MRE-墨绘长笔画", + "MRE Sumi E Detailed": "MRE-精细墨绘画", + "MRE Manga": "MRE-日本漫画", + "MRE Anime": "MRE-日本动画片", + "MRE Comic": "MRE-成人漫画书插画", + "Ads Advertising": "广告-广告", + "Ads Automotive": "广告-汽车", + "Ads Corporate": "广告-企业品牌", + "Ads Fashion Editorial": "广告-时尚编辑", + "Ads Food Photography": "广告-食品摄影", + "Ads Gourmet Food Photography": "广告-顶级美食摄影", + "Ads Luxury": "广告-奢侈品", + "Ads Real Estate": "广告-房地产", + "Ads Retail": "广告-零售", + "Artstyle Abstract": "艺术风格-抽象", + "Artstyle Abstract Expressionism": "艺术风格-抽象表现主义", + "Artstyle Art Deco": "艺术风格-装饰艺术", + "Artstyle Art Nouveau": "艺术风格-新艺术", + "Artstyle Constructivist": "艺术风格-构造主义", + "Artstyle Cubist": "艺术风格-立体主义", + "Artstyle Expressionist": "艺术风格-表现主义", + "Artstyle Graffiti": "艺术风格-涂鸦", + "Artstyle Hyperrealism": "艺术风格-超写实主义", + "Artstyle Impressionist": "艺术风格-印象派", + "Artstyle Pointillism": "艺术风格-点彩派", + "Artstyle Pop Art": "艺术风格-波普艺术", + "Artstyle Psychedelic": "艺术风格-迷幻", + "Artstyle Renaissance": "艺术风格-文艺复兴", + "Artstyle Steampunk": "艺术风格-蒸汽朋克", + "Artstyle Surrealist": "艺术风格-超现实主义", + "Artstyle Typography": "艺术风格-字体设计", + "Artstyle Watercolor": "艺术风格-水彩", + "Futuristic Biomechanical": "未来主义-生物机械", + "Futuristic Biomechanical Cyberpunk": "未来主义-生物机械-赛博朋克", + "Futuristic Cybernetic": "未来主义-人机融合", + "Futuristic Cybernetic Robot": "未来主义-人机融合-机器人", + "Futuristic Cyberpunk Cityscape": "未来主义-赛博朋克城市", + "Futuristic Futuristic": "未来主义-未来主义", + "Futuristic Retro Cyberpunk": "未来主义-复古赛博朋克", + "Futuristic Retro Futurism": "未来主义-复古未来主义", + "Futuristic Sci Fi": "未来主义-科幻", + "Futuristic Vaporwave": "未来主义-蒸汽波", + "Game Bubble Bobble": "游戏-泡泡龙", + "Game Cyberpunk Game": "游戏-赛博朋克游戏", + "Game Fighting Game": "游戏-格斗游戏", + "Game Gta": "游戏-侠盗猎车手", + "Game Mario": "游戏-马里奥", + "Game Minecraft": "游戏-我的世界", + "Game Pokemon": "游戏-宝可梦", + "Game Retro Arcade": "游戏-复古街机", + "Game Retro Game": "游戏-复古游戏", + "Game Rpg Fantasy Game": "游戏-角色扮演幻想游戏", + "Game Strategy Game": "游戏-策略游戏", + "Game Streetfighter": "游戏-街头霸王", + "Game Zelda": "游戏-塞尔达传说", + "Misc Architectural": "其他-建筑", + "Misc Disco": "其他-迪斯科", + "Misc Dreamscape": "其他-梦境", + "Misc Dystopian": "其他-反乌托邦", + "Misc Fairy Tale": "其他-童话故事", + "Misc Gothic": "其他-哥特风", + "Misc Grunge": "其他-垮掉的", + "Misc Horror": "其他-恐怖", + "Misc Kawaii": "其他-可爱", + "Misc Lovecraftian": "其他-洛夫克拉夫特", + "Misc Macabre": "其他-恐怖", + "Misc Manga": "其他-漫画", + "Misc Metropolis": "其他-大都市", + "Misc Minimalist": "其他-极简主义", + "Misc Monochrome": "其他-单色", + "Misc Nautical": "其他-航海", + "Misc Space": "其他-太空", + "Misc Stained Glass": "其他-彩色玻璃", + "Misc Techwear Fashion": "其他-科技时尚", + "Misc Tribal": "其他-部落", + "Misc Zentangle": "其他-禅绕画", + "Papercraft Collage": "手工艺-拼贴", + "Papercraft Flat Papercut": "手工艺-平面剪纸", + "Papercraft Kirigami": "手工艺-切纸", + "Papercraft Paper Mache": "手工艺-纸浆塑造", + "Papercraft Paper Quilling": "手工艺-纸艺卷轴", + "Papercraft Papercut Collage": "手工艺-剪纸拼贴", + "Papercraft Papercut Shadow Box": "手工艺-剪纸影箱", + "Papercraft Stacked Papercut": "手工艺-层叠剪纸", + "Papercraft Thick Layered Papercut": "手工艺-厚层剪纸", + "Photo Alien": "摄影-外星人", + "Photo Film Noir": "摄影-黑色电影", + "Photo Glamour": "摄影-魅力", + "Photo Hdr": "摄影-高动态范围", + "Photo Iphone Photographic": "摄影-苹果手机摄影", + "Photo Long Exposure": "摄影-长曝光", + "Photo Neon Noir": "摄影-霓虹黑色", + "Photo Silhouette": "摄影-轮廓", + "Photo Tilt Shift": "摄影-移轴", + "Cinematic Diva": "电影女主角", + "Abstract Expressionism": "抽象表现主义", + "Academia": "学术", + "Action Figure": "动作人偶", + "Adorable 3D Character": "可爱的3D角色", + "Adorable Kawaii": "可爱的卡哇伊", + "Art Deco": "装饰艺术", + "Art Nouveau": "新艺术,美丽艺术", + "Astral Aura": "星体光环", + "Avant Garde": "前卫", + "Baroque": "巴洛克", + "Bauhaus Style Poster": "包豪斯风格海报", + "Blueprint Schematic Drawing": "蓝图示意图", + "Caricature": "漫画", + "Cel Shaded Art": "卡通渲染", + "Character Design Sheet": "角色设计表", + "Classicism Art": "古典主义艺术", + "Color Field Painting": "色彩领域绘画", + "Colored Pencil Art": "彩色铅笔艺术", + "Conceptual Art": "概念艺术", + "Constructivism": "建构主义", + "Cubism": "立体主义", + "Dadaism": "达达主义", + "Dark Fantasy": "黑暗奇幻", + "Dark Moody Atmosphere": "黑暗忧郁气氛", + "Dmt Art Style": "迷幻艺术风格", + "Doodle Art": "涂鸦艺术", + "Double Exposure": "双重曝光", + "Dripping Paint Splatter Art": "滴漆飞溅艺术", + "Expressionism": "表现主义", + "Faded Polaroid Photo": "褪色的宝丽来照片", + "Fauvism": "野兽派", + "Flat 2d Art": "平面 2D 艺术", + "Fortnite Art Style": "堡垒之夜艺术风格", + "Futurism": "未来派", + "Glitchcore": "故障核心", + "Glo Fi": "光明高保真", + "Googie Art Style": "古吉艺术风格", + "Graffiti Art": "涂鸦艺术", + "Harlem Renaissance Art": "哈莱姆文艺复兴艺术", + "High Fashion": "高级时装", + "Idyllic": "田园诗般", + "Impressionism": "印象派", + "Infographic Drawing": "信息图表绘图", + "Ink Dripping Drawing": "滴墨绘画", + "Japanese Ink Drawing": "日式水墨画", + "Knolling Photography": "规律摆放摄影", + "Light Cheery Atmosphere": "轻松愉快的气氛", + "Logo Design": "标志设计", + "Luxurious Elegance": "奢华优雅", + "Macro Photography": "微距摄影", + "Mandola Art": "曼陀罗艺术", + "Marker Drawing": "马克笔绘图", + "Medievalism": "中世纪主义", + "Minimalism": "极简主义", + "Neo Baroque": "新巴洛克", + "Neo Byzantine": "新拜占庭", + "Neo Futurism": "新未来派", + "Neo Impressionism": "新印象派", + "Neo Rococo": "新洛可可", + "Neoclassicism": "新古典主义", + "Op Art": "欧普艺术", + "Ornate And Intricate": "华丽而复杂", + "Pencil Sketch Drawing": "铅笔素描", + "Pop Art 2": "流行艺术2", + "Rococo": "洛可可", + "Silhouette Art": "剪影艺术", + "Simple Vector Art": "简单矢量艺术", + "Sketchup": "草图", + "Steampunk 2": "赛博朋克2", + "Surrealism": "超现实主义", + "Suprematism": "至上主义", + "Terragen": "地表风景", + "Tranquil Relaxing Atmosphere": "宁静轻松的氛围", + "Sticker Designs": "贴纸设计", + "Vibrant Rim Light": "生动的边缘光", + "Volumetric Lighting": "体积照明", + "Watercolor 2": "水彩2", + "Whimsical And Playful": "异想天开、俏皮", + "Mk Chromolithography": "MK 色彩版画", + "Mk Cross Processing Print": "MK 交叉过程打印", + "Mk Dufaycolor Photograph": "MK 杜法色彩照片", + "Mk Herbarium": "MK 植物标本馆", + "Mk Punk Collage": "MK 朋克拼贴画", + "Mk Mosaic": "MK 镶嵌图", + "Mk Van Gogh": "MK 梵高", + "Mk Coloring Book": "MK 色彩书", + "Mk Singer Sargent": "MK 辛格 · 萨尔生特", + "Mk Pollock": "MK 波洛克", + "Mk Basquiat": "MK 巴斯奎特", + "Mk Andy Warhol": "MK 安迪 · 沃霍尔", + "Mk Halftone Print": "MK 半色版画", + "Mk Gond Painting": "MK 贡德绘画", + "Mk Albumen Print": "MK 白蛋清印刷", + "Mk Aquatint Print": "MK 水蚀刻印刷", + "Mk Anthotype Print": "MK 花纹版画", + "Mk Inuit Carving": "MK 因纽特雕塑", + "Mk Bromoil Print": "MK 溴油印刷", + "Mk Calotype Print": "MK 卡洛雅图印刷", + "Mk Color Sketchnote": "MK色彩素描笔记", + "Mk Cibulak Porcelain": "MK 西布拉瓷器", + "Mk Alcohol Ink Art": "MK 酒精水彩艺术", + "Mk One Line Art": "MK 一线画", + "Mk Blacklight Paint": "MK 黑光油漆", + "Mk Carnival Glass": "MK 嘉年华玻璃", + "Mk Cyanotype Print": "MK 青色版画", + "Mk Cross Stitching": "MK 交叉针织", + "Mk Encaustic Paint": "MK 蜡漆", + "Mk Embroidery": "MK 刺绣", + "Mk Gyotaku": "MK 鱼拓版画", + "Mk Luminogram": "MK 光感影像", + "Mk Lite Brite Art": "MK 彩色灯泡艺术", + "Mk Mokume Gane": "MK 木金工艺", + "Pebble Art": "MK 鹅卵石艺术", + "Mk Palekh": "MK 帕列赫", + "Mk Suminagashi": "MK 澄洗画", + "Mk Scrimshaw": "MK 丝线绣", + "Mk Shibori": "MK 湿布雕版印刷", + "Mk Vitreous Enamel": "MK 玻璃珐琅", + "Mk Ukiyo E": "MK 浮世绘", + "Mk Vintage Airline Poster": "MK 古董航空公司海报", + "Mk Vintage Travel Poster": "MK 古董旅行海报", + "Mk Bauhaus Style": "Mk 包豪斯风格", + "Mk Afrofuturism": "Mk 非洲未来主义", + "Mk Atompunk": "Mk 原子朋克", + "Mk Constructivism": "Mk 构成派", + "Mk Chicano Art": "Mk 西班牙裔美国艺术", + "Mk De Stijl": "Mk 去风格派", + "Mk Dayak Art": "Mk 达雅克艺术", + "Mk Fayum Portrait": "Mk 法尤姆肖像画", + "Mk Illuminated Manuscript": "Mk 彩绘手稿", + "Mk Kalighat Painting": "Mk 卡利加特绘画", + "Mk Madhubani Painting": "Mk 马杜班尼绘画", + "Mk Pictorialism": "Mk 描绘主义", + "Mk Pichwai Painting": "Mk 皮奇瓦伊绘画", + "Mk Patachitra Painting": "Mk 帕塔基特拉绘画", + "Mk Samoan Art Inspired": "Mk 萨莫亚艺术启发的", + "Mk Tlingit Art": "Mk 特林吉特艺术", + "Mk Adnate Style": "Mk 阿达内特风格", + "Mk Ron English Style": "Mk 罗恩英国风格", + "Mk Shepard Fairey Style": "Mk 舒帕德 · 费尔利风格" +} \ No newline at end of file diff --git a/ComfyUI-Easy-Use/resources/mmb-preset.custom.txt.example b/ComfyUI-Easy-Use/resources/mmb-preset.custom.txt.example new file mode 100644 index 0000000000000000000000000000000000000000..eb113a50dd7a1cac37be92e6af5dc1b9e78fceee --- /dev/null +++ b/ComfyUI-Easy-Use/resources/mmb-preset.custom.txt.example @@ -0,0 +1,8 @@ +MMB-ALL:1,1,1 +MMB-IN:1,0,0 +MMB-MID:0,1,0 +MMB-OUT:0,0,1 +MMB-INMID:1,1,0 +MMB-INOUT:1,0,1 +MMB-MIDOUT:0,1,1 +MMB-NONE:0,0,0 \ No newline at end of file diff --git a/ComfyUI-Easy-Use/resources/mmb-preset.txt b/ComfyUI-Easy-Use/resources/mmb-preset.txt new file mode 100644 index 0000000000000000000000000000000000000000..7403808c1102962a01c9cfe7231c22fe57eafe74 --- /dev/null +++ b/ComfyUI-Easy-Use/resources/mmb-preset.txt @@ -0,0 +1,9 @@ +MMB-ALL:1,1,1 +MMB-IN:1,0,0 +MMB-MID:0,1,0 +MMB-OUT:0,0,1 +MMB-INMID:1,1,0 +MMB-INOUT:1,0,1 +MMB-MIDOUT:0,1,1 +MMB-NONE:0,0,0 +@MMBN-FULL-TEST:27 diff --git a/ComfyUI-Easy-Use/resources/portrait_prompt.json b/ComfyUI-Easy-Use/resources/portrait_prompt.json new file mode 100644 index 0000000000000000000000000000000000000000..7c77a392bd7169c85f712ac1b9fd9479bcf4591d --- /dev/null +++ b/ComfyUI-Easy-Use/resources/portrait_prompt.json @@ -0,0 +1,407 @@ +{ + "beard_list": [ + "Stubble Beard", + "Goatee", + "Full Beard", + "Van Dyke Beard", + "Soul Patch", + "Garibaldi Beard", + "Mutton Chops", + "Circle Beard", + "Corporate Beard", + "Balbo Beard", + "Ducktail Beard", + "Chinstrap Beard", + "Anchor Beard", + "Chevron Mustache", + "Horseshoe Mustache", + "Handlebar Mustache", + "Imperial Mustache", + "Pencil Mustache", + "Friendly Mutton Chops", + "Zappa Mustache" + ], + "body_type_list": [ + "Underweight", + "Normal weight", + "Overweight", + "Obese" + ], + "eyes_color_list": [ + "Brown", + "Blue", + "Green", + "Hazel", + "Gray", + "Amber", + "Red", + "Violet" + ], + "face_expression_list": [ + "Happy", + "Sad", + "Angry", + "Surprised", + "Fearful", + "Disgusted", + "Contemptuous", + "Excited", + "Nervous", + "Confused", + "Amused", + "Content", + "Disappointed", + "Bored", + "Relieved", + "In love", + "Shy", + "Envious", + "Proud", + "Cautious", + "Serious", + "Serene", + "Peaceful", + "Calm" + ], + "face_shape_list": [ + "Oval", + "Round", + "Square", + "Heart-shaped", + "Long", + "Rectangle", + "Triangle", + "Inverted Triangle", + "Pear-shaped", + "Oblong", + "Square Round", + "Square Oval" + ], + "gender_list": [ + "Man", + "Woman" + ], + "hair_color_list": [ + "Black", + "Brown", + "Blonde", + "Red", + "Auburn", + "Chestnut", + "Gray", + "White", + "Salt and pepper" + ], + "hair_style_list": [ + "Asymmetrical cut", + "Blunt cut", + "Bob cut", + "Braided bob", + "Buzz cut", + "Choppy cut", + "Curly bob", + "Curtain bangs", + "Faux hawk", + "Feathered cut", + "French bob", + "Layered cut", + "Long bob", + "Mohawk", + "Pixie cut", + "Shag cut", + "Side-swept bangs", + "Textured cut", + "Undercut", + "Wavy bob", + "Faux hawk short pixie", + "Brave short haircut with shaved sides", + "Tapered haricut wuth shaved side", + "Stacked bob", + "Lemonade braids", + "Middle part ponytails", + "Stitch braids", + "Deep side part", + "French braids", + "Box braids", + "Two dutch braids", + "Wavy cut with curtains bangs", + "Right side shaved", + "Sweeping pixie", + "Smooth lob", + "Long pixie", + "Sideswept pixie", + "Italian bob", + "Shullet" + ], + "light_direction_list": [ + "top", + "bottom", + "right", + "left", + "front", + "rear", + "top-right", + "top-left", + "bottom-right", + "bottom-left" + ], + "light_type_list": [ + "Natural sunlight", + "Soft ambient light", + "Harsh sunlight", + "Overcast sky", + "Sunset glow", + "Sunrise warmth", + "Twilight hues", + "Candlelight", + "Incandescent lighting", + "Fluorescent lighting", + "Moonlight", + "Dappled sunlight", + "Backlit silhouette", + "Spotlight", + "Rim lighting", + "Firelight", + "City streetlights", + "Studio lighting", + "Lantern light", + "Tungsten lighting", + "Cloudy day diffused light", + "Skylight", + "Golden hour light", + "Blue hour light", + "Flash photography", + "Stage lighting", + "Neon lights", + "Torchlight", + "Softbox lighting", + "Rim light", + "Lightning", + "Abstract light patterns" + ], + "model_pose_list": [ + "Power Pose", + "Walking Pose", + "The Over-the-Shoulder Look", + "S-curve Pose", + "Sitting Pose", + "Close-Up Beauty Shot Pose", + "Leaning Pose", + "Arms Up Pose", + "Casual Stroll Pose", + "Headshot Pose", + "Sitting Cross-Legged Pose", + "Back Arch Pose", + "Hand-on-Hip Pose", + "Gazing into the Distance Pose", + "Candid Laugh Pose", + "Dynamic Action Pose", + "Contrapposto Pose", + "High Fashion Pose" + ], + "nationality_list": [ + "Afghan", + "Albanian", + "Algerian", + "Andorran", + "Angolan", + "Antiguans Barbudans", + "Argentine", + "Armenian", + "Australian", + "Austrian", + "Azerbaijani", + "Bahamian", + "Bahraini", + "Bangladeshi", + "Barbadian", + "Belarusian", + "Belgian", + "Belizean", + "Beninese", + "Bhutanese", + "Bolivian", + "Bosnian Herzegovinian", + "Brazilian", + "British", + "Bruneian", + "Bulgarian", + "Burkinabe", + "Burundian", + "Cambodian", + "Cameroonian", + "Canadian", + "Cape Verdian", + "Central African", + "Chadian", + "Chilean", + "Chinese", + "Colombian", + "Comoran", + "Congolese", + "Costa Rican", + "Croatian", + "Cuban", + "Cypriot", + "Czech", + "Danish", + "Djibouti", + "Dominican", + "Dutch", + "East Timorese", + "Ecuadorean", + "Egyptian", + "Emirian", + "Equatorial Guinean", + "Eritrean", + "Estonian", + "Ethiopian", + "Fijian", + "Filipino", + "Finnish", + "French", + "Gabonese", + "Gambian", + "Georgian", + "German", + "Ghanaian", + "Greek", + "Grenadian", + "Guatemalan", + "Guinean", + "Guyanese", + "Haitian", + "Herzegovinian", + "Honduran", + "Hungarian", + "Icelander", + "Indian", + "Indonesian", + "Iranian", + "Iraqi", + "Irish", + "Israeli", + "Italian", + "Ivorian", + "Jamaican", + "Japanese", + "Jordanian", + "Kazakhstani", + "Kenyan", + "Kiribati", + "North Korean", + "South Korean", + "Kuwaiti", + "Kyrgyz", + "Laotian", + "Latvian", + "Lebanese", + "Liberian", + "Libyan", + "Liechtensteiner", + "Lithuanian", + "Luxembourgish", + "Macedonian", + "Malagasy", + "Malawian", + "Malaysian", + "Maldivan", + "Malian", + "Maltese", + "Marshallese", + "Mauritanian", + "Mauritian", + "Mexican", + "Micronesian", + "Moldovan", + "Monegasque", + "Mongolian", + "Montenegrin", + "Moroccan", + "Mosotho", + "Motswana", + "Mozambican", + "Namibian", + "Nauruan", + "Nepalese", + "New Zealander", + "Ni-Vanuatu", + "Nicaraguan", + "Nigerian", + "Nigerien", + "North Korean", + "Northern Irish", + "Norwegian", + "Omani", + "Pakistani", + "Palauan", + "Palestinian", + "Panamanian", + "Papua New Guinean", + "Paraguayan", + "Peruvian", + "Polish", + "Portuguese", + "Qatari", + "Romanian", + "Russian", + "Rwandan", + "Saint Lucian", + "Salvadoran", + "Samoan", + "San Marinese", + "Sao Tomean", + "Saudi", + "Scottish", + "Senegalese", + "Serbian", + "Seychellois", + "Sierra Leonean", + "Singaporean", + "Slovakian", + "Slovenian", + "Solomon Islander", + "Somali", + "South African", + "South Korean", + "South Sudanese", + "Spanish", + "Sri Lankan", + "Sudanese", + "Surinamer", + "Swazi", + "Swedish", + "Swiss", + "Syrian", + "Tajikistani", + "Tanzanian", + "Thai", + "Togolese", + "Tongan", + "Trinidadian Tobagonian", + "Tunisian", + "Turkish", + "Turkmen", + "Tuvaluan", + "Ugandan", + "Ukrainian", + "Uruguayan", + "Uzbekistani", + "Venezuelan", + "Vietnamese", + "Welsh", + "Yemeni", + "Zambian", + "Zimbabwean" + ], + "shot_list": [ + "Head portrait", + "Head and shoulders portrait", + "Half-length portrait", + "Full-length portrait", + "Face", + "Portrait", + "Full body", + "Close-up" + ] +} \ No newline at end of file diff --git a/ComfyUI-Easy-Use/web/css/account.css b/ComfyUI-Easy-Use/web/css/account.css new file mode 100644 index 0000000000000000000000000000000000000000..626aa07c42783ce5377421f3207c4db105957b4e --- /dev/null +++ b/ComfyUI-Easy-Use/web/css/account.css @@ -0,0 +1,93 @@ +.easyuse-account{ + +} +.easyuse-account-user{ + font-size: 10px; + color:var(--descrip-text); + text-align: center; +} +.easyuse-account-user-info{ + display: flex; + justify-content: space-between; + align-items: center; + padding-bottom:10px; + cursor: pointer; +} +.easyuse-account-user-info .user{ + display: flex; + align-items: center; +} +.easyuse-account-user-info .edit{ + padding:5px 10px; + background: var(--comfy-menu-bg); + border-radius:4px; +} +.easyuse-account-user-info:hover{ + filter:brightness(110%); +} +.easyuse-account-user-info h5{ + margin:0; + font-size: 10px; + text-align: left; +} +.easyuse-account-user-info h6{ + margin:0; + font-size: 8px; + text-align: left; + font-weight: 300; +} +.easyuse-account-user-info .remark{ + margin-top: 4px; +} +.easyuse-account-user-info .avatar{ + width: 36px; + height: 36px; + background: var(--comfy-input-bg); + border-radius: 50%; + margin-right: 5px; + display: flex; + justify-content: center; + align-items: center; + font-size: 16px; + overflow: hidden; +} +.easyuse-account-user-info .avatar img{ + width: 100%; + height: 100%; +} +.easyuse-account-dialog{ + width: 600px; +} +.easyuse-account-dialog-main a, .easyuse-account-dialog-main a:visited{ + font-weight: 400; + color: var(--theme-color-light); +} +.easyuse-account-dialog-item{ + display: flex; + justify-content: flex-start; + align-items: center; + padding: 10px 0; + border-bottom: 1px solid var(--border-color); +} +.easyuse-account-dialog-item input{ + padding:5px; + margin-right:5px; +} +.easyuse-account-dialog-item input.key{ + flex:1; +} +.easyuse-account-dialog-item button{ + cursor: pointer; + margin-left:5px!important; + padding:5px!important; + font-size: 16px!important; +} +.easyuse-account-dialog-item button:hover{ + filter:brightness(120%); +} +.easyuse-account-dialog-item button.choose { + background: var(--theme-color); +} +.easyuse-account-dialog-item button.delete{ + background: var(--error-color); +} \ No newline at end of file diff --git a/ComfyUI-Easy-Use/web/css/chooser.css b/ComfyUI-Easy-Use/web/css/chooser.css new file mode 100644 index 0000000000000000000000000000000000000000..0c1696bf36fcbe2a5b1b0e83ea00897be97bf391 --- /dev/null +++ b/ComfyUI-Easy-Use/web/css/chooser.css @@ -0,0 +1,35 @@ +.easyuse-chooser-dialog{ + max-width: 600px; +} +.easyuse-chooser-dialog-title{ + font-size: 18px; + font-weight: 700; + text-align: center; + color:var(--input-text); + margin:0; +} +.easyuse-chooser-dialog-images{ + margin-top:10px; + display: flex; + flex-wrap: wrap; + width: 100%; + box-sizing: border-box; +} +.easyuse-chooser-dialog-images img{ + width: 50%; + height: auto; + cursor: pointer; + box-sizing: border-box; + filter:brightness(80%); +} +.easyuse-chooser-dialog-images img:hover{ + filter:brightness(100%); +} +.easyuse-chooser-dialog-images img.selected{ + border: 4px solid var(--success-color); +} + +.easyuse-chooser-hidden{ + display: none; + height:0; +} \ No newline at end of file diff --git a/ComfyUI-Easy-Use/web/css/contextmenu.css b/ComfyUI-Easy-Use/web/css/contextmenu.css new file mode 100644 index 0000000000000000000000000000000000000000..9fc0e9583b19394cf73259ead2cba816eac3e351 --- /dev/null +++ b/ComfyUI-Easy-Use/web/css/contextmenu.css @@ -0,0 +1,20 @@ +.easyuse-model{ + position:relative; +} +.easyuse-model:hover img{ + display: block; + opacity: 1; +} +.easyuse-model img{ + position: absolute; + z-index:1; + right:-155px; + top:0; + width:150px; + height:auto; + display: none; + filter:brightness(70%); + -webkit-filter: brightness(70%); + opacity: 0; + transition:all 0.5s ease-in-out; +} \ No newline at end of file diff --git a/ComfyUI-Easy-Use/web/css/dropdown.css b/ComfyUI-Easy-Use/web/css/dropdown.css new file mode 100644 index 0000000000000000000000000000000000000000..176ff618ff050358844309bb344511758dcae6a5 --- /dev/null +++ b/ComfyUI-Easy-Use/web/css/dropdown.css @@ -0,0 +1,68 @@ +.easy-dropdown, .easy-nested-dropdown { + position: relative; + box-sizing: border-box; + background-color: #171717; + box-shadow: 0 4px 4px rgba(255, 255, 255, .25); + padding: 0; + margin: 0; + list-style: none; + z-index: 1000; + overflow: visible; + max-height: fit-content; + max-width: fit-content; +} + +.easy-dropdown { + position: absolute; + border-radius: 0; +} + +/* Style for final items */ +.easy-dropdown li.item, .easy-nested-dropdown li.item { + font-weight: normal; + min-width: max-content; +} + +/* Style for folders (parent items) */ +.easy-dropdown li.folder, .easy-nested-dropdown li.folder { + cursor: default; + position: relative; + border-right: 3px solid cyan; +} + +.easy-dropdown li.folder::after, .easy-nested-dropdown li.folder::after { + content: ">"; + position: absolute; + right: 2px; + font-weight: normal; +} + +.easy-dropdown li, .easy-nested-dropdown li { + padding: 4px 10px; + cursor: pointer; + font-family: system-ui; + font-size: 0.7rem; + position: relative; +} + +/* Style for nested dropdowns */ +.easy-nested-dropdown { + position: absolute; + top: 0; + left: 100%; + margin: 0; + border: none; + display: none; +} + +.easy-dropdown li.selected > .easy-nested-dropdown, +.easy-nested-dropdown li.selected > .easy-nested-dropdown { + display: block; + border: none; +} + +.easy-dropdown li.selected, +.easy-nested-dropdown li.selected { + background-color: #e5e5e5; + border: none; +} \ No newline at end of file diff --git a/ComfyUI-Easy-Use/web/css/easy.css b/ComfyUI-Easy-Use/web/css/easy.css new file mode 100644 index 0000000000000000000000000000000000000000..60dece20688a5ca5731b3fdf82b27ceb3854a10b --- /dev/null +++ b/ComfyUI-Easy-Use/web/css/easy.css @@ -0,0 +1,127 @@ + +.pysssss-workflow-popup{ + min-width:220px!important; + /*right:0px!important;*/ + /*left:auto!important;*/ +} +body{ + font-family: var(--font-family)!important; + -webkit-font-smoothing: antialiased; + -moz-osx-font-smoothing: grayscale; +} +textarea{ + font-family: var(--font-family)!important; +} + +.comfy-multiline-input{ + background-color: transparent; + border:1px solid var(--border-color); + border-radius:8px; + padding: 8px; + font-size: 12px; +} +.comfy-modal { + border:1px solid var(--border-color); + box-shadow:none; + backdrop-filter: blur(8px) brightness(120%); +} +.comfy-menu{ + border-radius:16px; + box-shadow:0 0 1px var(--descrip-text); + backdrop-filter: blur(8px) brightness(120%); +} +.comfy-menu button,.comfy-modal button { + font-size: 14px; + padding:4px 0; + margin-bottom:4px; +} +.comfy-menu button.comfy-settings-btn{ + font-size: 12px; +} +.comfy-menu-btns { + margin-bottom: 4px; +} +.comfy-menu-btns button,.comfy-list-actions button{ + font-size: 10px; +} +.comfy-menu > button, +.comfy-menu-btns button, +.comfy-menu .comfy-list button, +.comfy-modal button { + border-width:1px; +} + + +dialog{ + border:1px solid var(--border-color); + background:transparent; + backdrop-filter: blur(8px) brightness(120%); + box-shadow:none; +} +.cm-title{ + background-color:transparent!important; +} +.cm-notice-board{ + border-radius:10px!important; + border:1px solid var(--border-color)!important; +} +.cm-menu-container{ + margin-bottom:50px!important; +} +hr{ + border:1px solid var(--border-color); +} +#comfy-dev-save-api-button{ + justify-content: center; +} +#shareButton{ + background:linear-gradient(to left,var(--theme-color),var(--theme-color-light))!important; + color:white!important; +} +#queue-button{ + position:relative; + overflow:hidden; + min-height:30px; + z-index:1; +} + +#queue-button:after{ + clear: both; + content:attr(data-attr); + background:green; + color:#FFF; + width:var(--process-bar-width); + height:100%; + position:absolute; + top:0; + left:0; + z-index:0; + text-align:center; + display:flex; + justify-content:center; + align-items:center; +} + +.litegraph .litemenu-entry.has_submenu { + border-right: 2px solid var(--theme-color); +} +::-webkit-scrollbar { + width: 0em; +} +::-webkit-scrollbar-track { + background-color: transparent; +} +::-webkit-scrollbar-thumb { + background-color: transparent; + border-radius: 2px; +} +::-webkit-scrollbar-thumb:hover { + background-color: transparent; +} + +[data-theme="dark"] .workspace_manager .chakra-card{ + background-color:var(--comfy-menu-bg)!important; +} +.workspace_manager .chakra-card{ + width: 400px; +} \ No newline at end of file diff --git a/ComfyUI-Easy-Use/web/css/groupmap.css b/ComfyUI-Easy-Use/web/css/groupmap.css new file mode 100644 index 0000000000000000000000000000000000000000..75aa8679c80bbc80e06ee174d4d570fb9f1b6844 --- /dev/null +++ b/ComfyUI-Easy-Use/web/css/groupmap.css @@ -0,0 +1,34 @@ +#easyuse_groups_map{ + flex-direction: column; + align-items: end; + display:flex;position: absolute; + top: 50px; left: 10px; width: 180px; + border-radius:12px; + min-height:100px; + max-height:400px; + color: var(--descrip-text); + background-color: var(--comfy-menu-bg); + padding: 10px 4px; + border: 1px solid var(--border-color); + z-index: 399; + padding-top: 0; +} +#easyuse_groups_map .icon{ + width: 12px; + height:12px; +} +#easyuse_groups_map .closeBtn{ + float: right; + color: var(--input-text); + border-radius:30px; + background-color: var(--comfy-input-bg); + border: 1px solid var(--border-color); + cursor: pointer; + aspect-ratio: 1 / 1; + display: flex; + justify-content: center; + align-items: center; +} +#easyuse_groups_map .closeBtn:hover{ + filter:brightness(120%); +} \ No newline at end of file diff --git a/ComfyUI-Easy-Use/web/css/index.css b/ComfyUI-Easy-Use/web/css/index.css new file mode 100644 index 0000000000000000000000000000000000000000..5c148a2153dc91441e6b9581b1d6794ea2159161 --- /dev/null +++ b/ComfyUI-Easy-Use/web/css/index.css @@ -0,0 +1,11 @@ +@import "theme.css"; +@import "dropdown.css"; +@import "selector.css"; +@import "groupmap.css"; +@import "contextmenu.css"; +@import "modelinfo.css"; +@import "toast.css"; +@import "account.css"; +@import "chooser.css"; +@import "toolbar.css"; +@import "sliderControl.css"; \ No newline at end of file diff --git a/ComfyUI-Easy-Use/web/css/modelinfo.css b/ComfyUI-Easy-Use/web/css/modelinfo.css new file mode 100644 index 0000000000000000000000000000000000000000..370c21f9a1c0625ce4d88780ceb1abc9e9c1e25f --- /dev/null +++ b/ComfyUI-Easy-Use/web/css/modelinfo.css @@ -0,0 +1,265 @@ +.easyuse-model-info { + color: white; + max-width: 90vw; + font-family: var(--font-family); +} +.easyuse-model-content { + display: flex; + flex-direction: column; + overflow: hidden; +} +.easyuse-model-header{ + margin:0 0 15px 0; +} +.easyuse-model-header-remark{ + display: flex; + align-items: center; + margin-top:5px; +} +.easyuse-model-info h2 { + text-align: left; + margin:0; +} +.easyuse-model-info h5 { + text-align: left; + margin:0 15px 0 0px; + font-weight: 400; + color:var(--descrip-text); +} +.easyuse-model-info p { + margin: 5px 0; +} +.easyuse-model-info a { + color: var(--theme-color-light); +} +.easyuse-model-info a:hover { + text-decoration: underline; +} +.easyuse-model-tags-list { + display: flex; + flex-wrap: wrap; + list-style: none; + gap: 10px; + max-height: 200px; + overflow: auto; + margin: 10px 0; + padding: 0; +} +.easyuse-model-tag { + background-color: var(--comfy-input-bg); + border: 2px solid var(--border-color); + color: var(--input-text); + display: flex; + align-items: center; + gap: 5px; + border-radius: 5px; + padding: 2px 5px; + cursor: pointer; +} +.easyuse-model-tag--selected span::before { + content: "✅"; + position: absolute; + background-color: var(--theme-color-light); + left: 0; + top: 0; + right: 0; + bottom: 0; + text-align: center; +} +.easyuse-model-tag:hover { + border: 2px solid var(--theme-color-light); +} +.easyuse-model-tag p { + margin: 0; +} +.easyuse-model-tag span { + text-align: center; + border-radius: 5px; + background-color: var(--theme-color-light); + padding: 2px; + position: relative; + min-width: 20px; + overflow: hidden; + color: #fff; +} + +.easyuse-model-metadata .comfy-modal-content { + max-width: 100%; +} +.easyuse-model-metadata label { + margin-right: 1ch; + color: #ccc; +} + +.easyuse-model-metadata span { + color: var(--theme-color-light); +} + +.easyuse-preview { + max-width:660px; + margin-right: 15px; + position: relative; +} +.easyuse-preview-group{ + position: relative; + overflow: hidden; + border-radius:.5rem; + width: 660px; +} +.easyuse-preview-list{ + display: flex; + flex-wrap: nowrap; + width: 100%; + transition: all .5s ease-in-out; +} +.easyuse-preview-list.no-transition{ + transition: none; +} +.easyuse-preview-slide{ + display: flex; + flex-basis: calc(50% - 5px); + flex-grow: 0; + flex-shrink: 0; + position: relative; + justify-content: center; + align-items: center; + padding-right:5px; + padding-left:0; +} +.easyuse-preview-slide:nth-child(even){ + padding-left:5px; + padding-right:0; +} +.easyuse-preview-slide-content{ + position: relative; + min-height:150px; + width: 100%; +} +.easyuse-preview-slide-content .save{ + position: absolute; + right: 6px; + z-index: 12; + bottom: 6px; + display: flex; + align-items: center; + height: 26px; + padding: 0 9px; + color: var(--input-text); + font-size: 12px; + line-height: 26px; + background: rgba(0, 0, 0, .5); + border-radius: 13px; + cursor: pointer; + min-width:80px; + text-align: center; +} +.easyuse-preview-slide-content .save:hover{ + filter: brightness(120%); + will-change: auto; +} + +.easyuse-preview-slide-content img { + border-radius: 14px; + object-position: center center; + max-width: 100%; + max-height:700px; + border-style: none; + vertical-align: middle; +} +.easyuse-preview button { + position: absolute; + z-index:10; + top: 50%; + display: flex; + align-items: center; + justify-content: center; + width:30px; + height:30px; + border-radius:15px; + border:1px solid rgba(66, 63, 78, .15); + background-color: rgba(66, 63, 78, .5); + color:hsla(0, 0%, 100%, .8); + transition-property: color, background-color, border-color, text-decoration-color, fill, stroke; + transition-timing-function: cubic-bezier(.4,0,.2,1); + transition-duration: .15s; + transform: translateY(-50%); +} +.easyuse-preview button.left{ + left:10px; +} +.easyuse-preview button.right{ + right:10px; +} + +.easyuse-model-detail{ + margin-top: 16px; + overflow: hidden; + border: 1px solid var(--border-color); + border-radius: 8px; + width:300px; +} +.easyuse-model-detail-head{ + height: 40px; + padding: 0 10px; + font-weight: 500; + font-size: 14px; + font-style: normal; + line-height: 40px; +} +.easyuse-model-detail-body{ + box-sizing: border-box; + font-size: 12px; +} +.easyuse-model-detail-item{ + display: flex; + justify-content: flex-start; + border-top: 1px solid var(--border-color); +} +.easyuse-model-detail-item-label{ + flex-shrink: 0; + width: 88px; + padding-top: 5px; + padding-bottom: 5px; + padding-left: 10px; + border-right: 1px solid var(--border-color); + color: var(--input-text); + font-weight: 400; +} +.easyuse-model-detail-item-value{ + display: flex; + flex-wrap: wrap; + padding: 5px 10px 5px 10px; + color: var(--input-text); +} +.easyuse-model-detail-textarea{ + border-top:1px solid var(--border-color); + padding:10px; + height:100px; + overflow-y: auto; + font-size: 12px; +} +.easyuse-model-detail-textarea textarea{ + width:100%; + height:100%; + border:0; + background-color:transparent; + color: var(--input-text); +} +.easyuse-model-detail-textarea textarea::placeholder{ + color:var(--descrip-text); +} +.easyuse-model-detail-textarea.empty{ + display: flex; + justify-content: center; + align-items: center; + color: var(--descrip-text); +} + +.easyuse-model-notes { + background-color: rgba(0, 0, 0, 0.25); + padding: 5px; + margin-top: 5px; +} +.easyuse-model-notes:empty { + display: none; +} diff --git a/ComfyUI-Easy-Use/web/css/selector.css b/ComfyUI-Easy-Use/web/css/selector.css new file mode 100644 index 0000000000000000000000000000000000000000..4ecce1e4f7ebca4c207bd55b87c150d2061bd9bd --- /dev/null +++ b/ComfyUI-Easy-Use/web/css/selector.css @@ -0,0 +1,108 @@ +.easyuse-prompt-styles{ + overflow: auto; +} +.easyuse-prompt-styles .tools{ + display:flex; + justify-content:space-between; + height:30px; + padding-bottom:10px; + border-bottom:2px solid var(--border-color); +} +.easyuse-prompt-styles .tools button.delete{ + height:30px; + border-radius: 8px; + border: 2px solid var(--border-color); + font-size:11px; + background:var(--comfy-input-bg); + color:var(--error-text); + box-shadow:none; + cursor:pointer; +} +.easyuse-prompt-styles .tools button.delete:hover{ + filter: brightness(1.2); +} +.easyuse-prompt-styles .tools textarea.search{ + flex:1; + margin-left:10px; + height:20px; + line-height:20px; + border-radius: 8px; + border: 2px solid var(--border-color); + font-size:11px; + background:var(--comfy-input-bg); + color:var(--input-text); + box-shadow:none; + padding:4px 10px; + outline: none; + resize: none; + appearance:none; +} +.easyuse-prompt-styles-list{ + list-style: none; + padding: 0; + margin: 0; + min-height: 150px; + height: calc(100% - 40px); + overflow: auto; + /*display: flex;*/ + /*flex-wrap: wrap;*/ +} +.easyuse-prompt-styles-list.no-top{ + height: auto; +} + +.easyuse-prompt-styles-tag{ + display: inline-block; + vertical-align: middle; + margin-top: 8px; + margin-right: 8px; + padding:4px; + color: var(--input-text); + background-color: var(--comfy-input-bg); + border-radius: 8px; + border: 2px solid var(--border-color); + font-size:11px; + cursor:pointer; +} +.easyuse-prompt-styles-tag.hide{ + display:none; +} +.easyuse-prompt-styles-tag:hover{ + filter: brightness(1.2); +} +.easyuse-prompt-styles-tag input{ + --ring-color: transparent; + position: relative; + box-shadow: none; + border: 2px solid var(--border-color); + border-radius: 2px; + background: linear-gradient(135deg, var(--comfy-menu-bg) 0%, var(--comfy-input-bg) 60%); +} +.easyuse-prompt-styles-tag input[type=checkbox]:checked{ + border: 1px solid var(--theme-color-light); + background-color: var(--theme-color-light); + background-image: url("data:image/svg+xml,%3csvg viewBox='0 0 16 16' fill='white' xmlns='http://www.w3.org/2000/svg'%3e%3cpath d='M12.207 4.793a1 1 0 010 1.414l-5 5a1 1 0 01-1.414 0l-2-2a1 1 0 011.414-1.414L6.5 9.086l4.293-4.293a1 1 0 011.414 0z'/%3e%3c/svg%3e"); +} +.easyuse-prompt-styles-tag input[type=checkbox]{ + color-adjust: exact; + display: inline-block; + flex-shrink: 0; + vertical-align: middle; + appearance: none; + border: 2px solid var(--border-color); + background-origin: border-box; + padding: 0; + width: 1rem; + height: 1rem; + border-radius:4px; + color:var(--theme-color-light); + user-select: none; +} +.easyuse-prompt-styles-tag span{ + margin:0 4px; + vertical-align: middle; +} +#show_image_id{ + width:128px; + height:128px; +} \ No newline at end of file diff --git a/ComfyUI-Easy-Use/web/css/sliderControl.css b/ComfyUI-Easy-Use/web/css/sliderControl.css new file mode 100644 index 0000000000000000000000000000000000000000..42e749b45422289c02be96ab3f17ceb33ca9a9cd --- /dev/null +++ b/ComfyUI-Easy-Use/web/css/sliderControl.css @@ -0,0 +1,66 @@ +.easyuse-slider{ + width:100%; + height:100%; + display: flex; + flex-direction: row; + justify-content: space-between; + position: relative; +} +.easyuse-slider-item{ + height: inherit; + min-width: 25px; + justify-content: center; + display: flex; + flex-direction: column; + align-items: center; +} +.easyuse-slider-item.positive .easyuse-slider-item-label{ + color: var(--success-color); +} +.easyuse-slider-item.negative .easyuse-slider-item-label{ + color: var(--error-color); +} +.easyuse-slider-item-input{ + height:15px; + font-size: 10px; + color: var(--input-text); +} +.easyuse-slider-item-label{ + height:15px; + border: none; + color: var(--descrip-text); + font-size: 8px; +} +.easyuse-slider-item-scroll { + width: 5px; + height: calc(100% - 30px); + background: var(--comfy-input-bg); + border-radius: 10px; + position: relative; +} +.easyuse-slider-item-bar{ + width: 10px; + height: 10px; + background: linear-gradient(to bottom, var(--input-text), var(--descrip-text)); + border-radius:100%; + box-shadow: 0 2px 10px var(--bg-color); + position: absolute; + top: 0; + left:-2.5px; + cursor: pointer; + z-index:1; +} +.easyuse-slider-item-area{ + width: 100%; + border-radius:20px; + position: absolute; + bottom: 0; + background: var(--input-text); + z-index:0; +} +.easyuse-slider-item.positive .easyuse-slider-item-area{ + background: var(--success-color); +} +.easyuse-slider-item.negative .easyuse-slider-item-area{ + background: var(--error-color); +} diff --git a/ComfyUI-Easy-Use/web/css/theme.css b/ComfyUI-Easy-Use/web/css/theme.css new file mode 100644 index 0000000000000000000000000000000000000000..d2754c8f1ba54e020d971ffb46cbcab3ec537f36 --- /dev/null +++ b/ComfyUI-Easy-Use/web/css/theme.css @@ -0,0 +1,10 @@ +:root { + /*--theme-color:#3f3eed;*/ + /*--theme-color-light: #008ecb;*/ + --theme-color:#236692; + --theme-color-light: #3485bb; + --success-color: #52c41a; + --error-color: #ff4d4f; + --warning-color: #faad14; + --font-family: Inter, -apple-system, BlinkMacSystemFont, Helvetica Neue, sans-serif; +} diff --git a/ComfyUI-Easy-Use/web/css/toast.css b/ComfyUI-Easy-Use/web/css/toast.css new file mode 100644 index 0000000000000000000000000000000000000000..b0166fb4d2c7e5c54773128be7d200260b1cb89a --- /dev/null +++ b/ComfyUI-Easy-Use/web/css/toast.css @@ -0,0 +1,110 @@ +.easyuse-toast-container{ + position: fixed; + z-index: 99999; + top: 0; + left: 0; + width: 100%; + height: 0; + display: flex; + flex-direction: column; + align-items: center; + justify-content: start; + padding:10px 0; +} +.easyuse-toast-container > div { + position: relative; + height: fit-content; + padding: 4px; + margin-top: -100px; /* re-set by JS */ + opacity: 0; + transition: all 0.33s ease-in-out; + z-index: 3; +} + +.easyuse-toast-container > div:last-child { + z-index: 2; +} + +.easyuse-toast-container > div:not(.-show) { + z-index: 1; +} + +.easyuse-toast-container > div.-show { + opacity: 1; + margin-top: 0px !important; +} + +.easyuse-toast-container > div.-show { + opacity: 1; + transform: translateY(0%); +} + +.easyuse-toast-container > div > div { + position: relative; + background: var(--comfy-menu-bg); + color: var(--input-text); + display: flex; + flex-direction: row; + align-items: center; + justify-content: center; + height: fit-content; + box-shadow: 0 0 10px rgba(0, 0, 0, 0.88); + padding: 9px 12px; + border-radius: 8px; + font-family: Arial, sans-serif; + font-size: 14px; + pointer-events: all; +} + +.easyuse-toast-container > div > div > span { + display: flex; + flex-direction: row; + align-items: center; + justify-content: center; +} + +.easyuse-toast-container > div > div > span svg { + width: 16px; + height: auto; + margin-right: 8px; +} + +.easyuse-toast-container > div > div > span svg[data-icon=info-circle]{ + fill: var(--theme-color-light); +} +.easyuse-toast-container > div > div > span svg[data-icon=check-circle]{ + fill: var(--success-color); +} +.easyuse-toast-container > div > div > span svg[data-icon=close-circle]{ + fill: var(--error-color); +} +.easyuse-toast-container > div > div > span svg[data-icon=exclamation-circle]{ + fill: var(--warning-color); +} +/*rotate animation*/ +@keyframes rotate { + 0% { + transform: rotate(0deg); + } + 100% { + transform: rotate(360deg); + } +} +.easyuse-toast-container > div > div > span svg[data-icon=loading]{ + fill: var(--theme-color); + animation: rotate 1s linear infinite; +} + +.easyuse-toast-container a { + cursor: pointer; + text-decoration: underline; + color: var(--theme-color-light); + margin-left: 4px; + display: inline-block; + line-height: 1; +} + +.easyuse-toast-container a:hover { + color: var(--theme-color-light); + text-decoration: none; +} \ No newline at end of file diff --git a/ComfyUI-Easy-Use/web/css/toolbar.css b/ComfyUI-Easy-Use/web/css/toolbar.css new file mode 100644 index 0000000000000000000000000000000000000000..89a72439701e8a01149ab87895a0605858ef851d --- /dev/null +++ b/ComfyUI-Easy-Use/web/css/toolbar.css @@ -0,0 +1,230 @@ +.easyuse-toolbar{ + background: rgba(15,15,15,.5); + backdrop-filter: blur(4px) brightness(120%); + border-radius:0 12px 12px 0; + min-width:50px; + height:24px; + position: fixed; + bottom:85px; + left:0px; + display: flex; + align-items: center; + z-index:10000; +} +.easyuse-toolbar.disable-render-info{ + bottom: 55px; +} +.easyuse-toolbar-item{ + border-radius:20px; + height: 20px; + width:20px; + cursor: pointer; + display: flex; + justify-content: center; + align-items: center; + transition: all 0.3s ease-in-out; + margin-left:2.5px; +} +.easyuse-toolbar-icon{ + width: 14px; + height: 14px; + display: flex; + justify-content: center; + align-items: center; + font-size: 12px; + color:white; + transition: all 0.3s ease-in-out; +} +.easyuse-toolbar-icon svg{ + width: 14px; + height: 14px; +} +.easyuse-toolbar-tips{ + visibility: hidden; + opacity: 0; + position: absolute; + top: -25px; + left: 0; + color: var(--descrip-text); + padding: 2px 5px; + border-radius: 5px; + font-size: 11px; + min-width:100px; + transition: all 0.3s ease-in-out; +} +.easyuse-toolbar-item:hover{ + background:rgba(12,12,12,1); +} +.easyuse-toolbar-item:hover .easyuse-toolbar-tips{ + opacity: 1; + visibility: visible; +} +.easyuse-toolbar-item:hover .easyuse-toolbar-icon.group{ + color:var(--warning-color); +} +.easyuse-toolbar-item:hover .easyuse-toolbar-icon.rocket{ + color:var(--theme-color-light); +} +.easyuse-toolbar-item:hover .easyuse-toolbar-icon.question{ + color:var(--success-color); +} + + +.easyuse-guide-dialog{ + max-width: 300px; + font-family: var(--font-family); + position: absolute; + z-index:100; + left:0; + bottom:140px; + background: rgba(25,25,25,.85); + backdrop-filter: blur(8px) brightness(120%); + border-radius:0 12px 12px 0; + padding:10px; + transition: .5s all ease-in-out; + visibility: visible; + opacity: 1; + transform: translateX(0%); +} +.easyuse-guide-dialog.disable-render-info{ + bottom:110px; +} +.easyuse-guide-dialog-top{ + display: flex; + justify-content: space-between; + align-items: center; +} +.easyuse-guide-dialog-top .icon{ + width: 12px; + height:12px; +} +.easyuse-guide-dialog.hidden{ + opacity: 0; + transform: translateX(-50%); + visibility: hidden; +} +.easyuse-guide-dialog .closeBtn{ + float: right; + color: var(--input-text); + border-radius:30px; + background-color: var(--comfy-input-bg); + border: 1px solid var(--border-color); + cursor: pointer; + aspect-ratio: 1 / 1; + display: flex; + justify-content: center; + align-items: center; +} +.easyuse-guide-dialog .closeBtn:hover{ + filter:brightness(120%); +} +.easyuse-guide-dialog-title{ + color:var(--input-text); + font-size: 16px; + font-weight: bold; + margin-bottom: 5px; +} +.easyuse-guide-dialog-remark{ + color: var(--input-text); + font-size: 12px; + margin-top: 5px; +} +.easyuse-guide-dialog-content{ + max-height: 600px; + overflow: auto; +} +.easyuse-guide-dialog a, .easyuse-guide-dialog a:visited{ + color: var(--theme-color-light); + cursor: pointer; +} +.easyuse-guide-dialog-note{ + margin-top: 20px; + color:white; +} +.easyuse-guide-dialog p{ + margin:4px 0; + font-size: 12px; + font-weight: 300; +} +.markdown-body h1, .markdown-body h2, .markdown-body h3, .markdown-body h4, .markdown-body h5, .markdown-body h6 { + margin-top: 12px; + margin-bottom: 8px; + font-weight: 600; + line-height: 1.25; + padding-bottom: 5px; + border-bottom: 1px solid var(--border-color); + color: var(--input-text); +} +.markdown-body h1{ + font-size: 18px; +} +.markdown-body h2{ + font-size: 16px; +} +.markdown-body h3{ + font-size: 14px; +} +.markdown-body h4{ + font-size: 13px; +} +.markdown-body table { + display: block; + /*width: 100%;*/ + /*width: max-content;*/ + max-width: 300px; + overflow: auto; + color:var(--input-text); + box-sizing: border-box; + border: 1px solid var(--border-color); + text-align: left; + width: 100%; +} +.markdown-body table th, .markdown-body table td { + padding: 6px 13px; + font-size: 12px; + margin:0; + border-right: 1px solid var(--border-color); + border-bottom: 1px solid var(--border-color); +} +.markdown-body table td { + font-size: 12px; +} +.markdown-body table th:last-child, .markdown-body table td:last-child{ + border-right: none; +} +.markdown-body table tr:last-child td{ + border-bottom: none; +} +.markdown-body table th{ + font-weight: bold; + width: auto; + min-width: 70px; +} +.markdown-body table th:last-child{ + width:100%; +} +.markdown-body .warning{ + color:var(--warning-color) +} +.markdown-body .error{ + color:var(--error-color) +} +.markdown-body .success{ + color:var(--success-color) +} +.markdown-body .link{ + color:var(--theme-color-light) +} + +#comfyui-menu-monitor{ + width:120px; +} +#comfyui-menu-monitor #crystools-monitor-container{ + margin:0 auto!important; +} +#comfyui-menu-monitor #crystools-monitor-container > div{ + margin:2px 0!important; +} +#comfyui-menu-monitor #crystools-monitor-container > div > div > div{ + padding:0 4px!important; +} \ No newline at end of file diff --git a/ComfyUI-Easy-Use/web/js/bookmark.js b/ComfyUI-Easy-Use/web/js/bookmark.js new file mode 100644 index 0000000000000000000000000000000000000000..554b484fe5d98712e3a7135ea2db52c2e0fd71be --- /dev/null +++ b/ComfyUI-Easy-Use/web/js/bookmark.js @@ -0,0 +1,101 @@ +import { app } from "../../../scripts/app.js"; + + +app.registerExtension({ + name: "easy bookmark", + registerCustomNodes() { + class Bookmark { + type = 'easy bookmark' + title = "🔖"; + + slot_start_y = -20; + + ___collapsed_width = 0; + + get _collapsed_width() { + return this.___collapsed_width; + } + + set _collapsed_width(width){ + const canvas = app.canvas ; + const ctx = canvas.canvas.getContext('2d'); + if(ctx){ + const oldFont = ctx.font; + ctx.font = canvas.title_text_font; + this.___collapsed_width = 40 + ctx.measureText(this.title).width; + ctx.font = oldFont; + } + } + + isVirtualNode = true; + serialize_widgets = true; + keypressBound = null; + + constructor() { + + this.addWidget('text', 'shortcut_key', '1', (value) => { + value = value.trim()[0] || '1'; + if(value !== ''){ + this.title = "🔖 " + value; + } + },{ + y: 8, + }); + this.addWidget('number', 'zoom', 1, (value) => {}, { + y: 8 + LiteGraph.NODE_WIDGET_HEIGHT + 4, + max: 2, + min: 0.5, + precision: 2, + }); + this.keypressBound = this.onKeypress.bind(this); + } + + onAdded(){ + setTimeout(_=>{ + const value = this.widgets[0].value + if(value){ + this.title = "🔖 " + value; + } + },1) + window.addEventListener("keydown", this.keypressBound); + } + + onRemoved() { + window.removeEventListener("keydown", this.keypressBound); + } + + onKeypress(event){ + const target = event.target; + if (['input','textarea'].includes(target.localName)) { + return; + } + if (this.widgets[0] && event.key.toLocaleLowerCase() === this.widgets[0].value.toLocaleLowerCase()) { + this.canvasToBookmark(); + } + } + + canvasToBookmark() { + const canvas = app.canvas; + // ComfyUI seemed to break us again, but couldn't repro. No reason to not check, I guess. + // https://github.com/rgthree/rgthree-comfy/issues/71 + if (canvas?.ds?.offset) { + canvas.ds.offset[0] = -this.pos[0] + 16; + canvas.ds.offset[1] = -this.pos[1] + 40; + } + if (canvas?.ds?.scale != null) { + canvas.ds.scale = Number(this.widgets[1].value || 1); + } + canvas.setDirty(true, true); + } + } + + LiteGraph.registerNodeType( + "easy bookmark", + Object.assign(Bookmark,{ + title: "Bookmark 🔖", + }) + ); + + Bookmark.category = "EasyUse/Util" + } +}) \ No newline at end of file diff --git a/ComfyUI-Easy-Use/web/js/common/dropdown.js b/ComfyUI-Easy-Use/web/js/common/dropdown.js new file mode 100644 index 0000000000000000000000000000000000000000..f1fd485f842be4c9beb6dbad823f0741cfb2725d --- /dev/null +++ b/ComfyUI-Easy-Use/web/js/common/dropdown.js @@ -0,0 +1,218 @@ +let activeDropdown = null; + +export function removeDropdown() { + if (activeDropdown) { + activeDropdown.removeEventListeners(); + activeDropdown.dropdown.remove(); + activeDropdown = null; + } +} +export function createDropdown(inputEl, suggestions, onSelect, isDict = false) { + removeDropdown(); + new Dropdown(inputEl, suggestions, onSelect, isDict); +} + +class Dropdown { + constructor(inputEl, suggestions, onSelect, isDict = false) { + this.dropdown = document.createElement('ul'); + this.dropdown.setAttribute('role', 'listbox'); + this.dropdown.classList.add('easy-dropdown'); + this.selectedIndex = -1; + this.inputEl = inputEl; + this.suggestions = suggestions; + this.onSelect = onSelect; + this.isDict = isDict; + + this.focusedDropdown = this.dropdown; + + this.buildDropdown(); + + this.onKeyDownBound = this.onKeyDown.bind(this); + this.onWheelBound = this.onWheel.bind(this); + this.onClickBound = this.onClick.bind(this); + + this.addEventListeners(); + } + + buildDropdown() { + if (this.isDict) { + this.buildNestedDropdown(this.suggestions, this.dropdown); + } else { + this.suggestions.forEach((suggestion, index) => { + this.addListItem(suggestion, index, this.dropdown); + }); + } + + const inputRect = this.inputEl.getBoundingClientRect(); + this.dropdown.style.top = (inputRect.top + inputRect.height - 10) + 'px'; + this.dropdown.style.left = inputRect.left + 'px'; + + document.body.appendChild(this.dropdown); + activeDropdown = this; + } + + buildNestedDropdown(dictionary, parentElement) { + let index = 0; + Object.keys(dictionary).forEach((key) => { + const item = dictionary[key]; + if (typeof item === "object" && item !== null) { + const nestedDropdown = document.createElement('ul'); + nestedDropdown.setAttribute('role', 'listbox'); + nestedDropdown.classList.add('easy-nested-dropdown'); + const parentListItem = document.createElement('li'); + parentListItem.classList.add('folder'); + parentListItem.textContent = key; + parentListItem.appendChild(nestedDropdown); + parentListItem.addEventListener('mouseover', this.onMouseOver.bind(this, index, parentElement)); + parentElement.appendChild(parentListItem); + this.buildNestedDropdown(item, nestedDropdown); + index = index + 1; + } else { + const listItem = document.createElement('li'); + listItem.classList.add('item'); + listItem.setAttribute('role', 'option'); + listItem.textContent = key; + listItem.addEventListener('mouseover', this.onMouseOver.bind(this, index, parentElement)); + listItem.addEventListener('mousedown', this.onMouseDown.bind(this, key)); + parentElement.appendChild(listItem); + index = index + 1; + } + }); + } + + addListItem(item, index, parentElement) { + const listItem = document.createElement('li'); + listItem.setAttribute('role', 'option'); + listItem.textContent = item; + listItem.addEventListener('mouseover', this.onMouseOver.bind(this, index)); + listItem.addEventListener('mousedown', this.onMouseDown.bind(this, item)); + parentElement.appendChild(listItem); + } + + addEventListeners() { + document.addEventListener('keydown', this.onKeyDownBound); + this.dropdown.addEventListener('wheel', this.onWheelBound); + document.addEventListener('click', this.onClickBound); + } + + removeEventListeners() { + document.removeEventListener('keydown', this.onKeyDownBound); + this.dropdown.removeEventListener('wheel', this.onWheelBound); + document.removeEventListener('click', this.onClickBound); + } + + onMouseOver(index, parentElement) { + if (parentElement) { + this.focusedDropdown = parentElement; + } + this.selectedIndex = index; + this.updateSelection(); + } + + onMouseOut() { + this.selectedIndex = -1; + this.updateSelection(); + } + + onMouseDown(suggestion, event) { + event.preventDefault(); + this.onSelect(suggestion); + this.dropdown.remove(); + this.removeEventListeners(); + } + + onKeyDown(event) { + const enterKeyCode = 13; + const escKeyCode = 27; + const arrowUpKeyCode = 38; + const arrowDownKeyCode = 40; + const arrowRightKeyCode = 39; + const arrowLeftKeyCode = 37; + const tabKeyCode = 9; + + const items = Array.from(this.focusedDropdown.children); + const selectedItem = items[this.selectedIndex]; + + if (activeDropdown) { + if (event.keyCode === arrowUpKeyCode) { + event.preventDefault(); + this.selectedIndex = Math.max(0, this.selectedIndex - 1); + this.updateSelection(); + } + + else if (event.keyCode === arrowDownKeyCode) { + event.preventDefault(); + this.selectedIndex = Math.min(items.length - 1, this.selectedIndex + 1); + this.updateSelection(); + } + + else if (event.keyCode === arrowRightKeyCode) { + event.preventDefault(); + if (selectedItem && selectedItem.classList.contains('folder')) { + const nestedDropdown = selectedItem.querySelector('.easy-nested-dropdown'); + if (nestedDropdown) { + this.focusedDropdown = nestedDropdown; + this.selectedIndex = 0; + this.updateSelection(); + } + } + } + + else if (event.keyCode === arrowLeftKeyCode && this.focusedDropdown !== this.dropdown) { + const parentDropdown = this.focusedDropdown.closest('.easy-dropdown, .easy-nested-dropdown').parentNode.closest('.easy-dropdown, .easy-nested-dropdown'); + if (parentDropdown) { + this.focusedDropdown = parentDropdown; + this.selectedIndex = Array.from(parentDropdown.children).indexOf(this.focusedDropdown.parentNode); + this.updateSelection(); + } + } + + else if ((event.keyCode === enterKeyCode || event.keyCode === tabKeyCode) && this.selectedIndex >= 0) { + event.preventDefault(); + if (selectedItem.classList.contains('item')) { + this.onSelect(items[this.selectedIndex].textContent); + this.dropdown.remove(); + this.removeEventListeners(); + } + + const nestedDropdown = selectedItem.querySelector('.easy-nested-dropdown'); + if (nestedDropdown) { + this.focusedDropdown = nestedDropdown; + this.selectedIndex = 0; + this.updateSelection(); + } + } + + else if (event.keyCode === escKeyCode) { + this.dropdown.remove(); + this.removeEventListeners(); + } + } + } + + onWheel(event) { + const top = parseInt(this.dropdown.style.top); + if (localStorage.getItem("Comfy.Settings.Comfy.InvertMenuScrolling")) { + this.dropdown.style.top = (top + (event.deltaY < 0 ? 10 : -10)) + "px"; + } else { + this.dropdown.style.top = (top + (event.deltaY < 0 ? -10 : 10)) + "px"; + } + } + + onClick(event) { + if (!this.dropdown.contains(event.target) && event.target !== this.inputEl) { + this.dropdown.remove(); + this.removeEventListeners(); + } + } + + updateSelection() { + Array.from(this.focusedDropdown.children).forEach((li, index) => { + if (index === this.selectedIndex) { + li.classList.add('selected'); + } else { + li.classList.remove('selected'); + } + }); + } +} \ No newline at end of file diff --git a/ComfyUI-Easy-Use/web/js/common/i18n.js b/ComfyUI-Easy-Use/web/js/common/i18n.js new file mode 100644 index 0000000000000000000000000000000000000000..c31ac359f262d652f22308328b71e52ceffd4ef1 --- /dev/null +++ b/ComfyUI-Easy-Use/web/js/common/i18n.js @@ -0,0 +1,101 @@ +import {getLocale} from './utils.js' +const locale = getLocale() + +const zhCN = { + "Workflow created by": "工作流创建者", + "Watch more video content": "观看更多视频内容", + "Workflow Guide":"工作流指南", + // ExtraMenu + "💎 View Checkpoint Info...": "💎 查看 Checkpoint 信息...", + "💎 View Lora Info...": "💎 查看 Lora 信息...", + "🔃 Reload Node": "🔃 刷新节点", + // ModelInfo + "Updated At:": "最近更新:", + "Created At:": "首次发布:", + "✏️ Edit": "✏️ 编辑", + "💾 Save": "💾 保存", + "No notes": "当前还没有备注内容", + "Saving Notes...": "正在保存备注...", + "Type your notes here":"在这里输入备注内容", + "ModelName":"模型名称", + "Models Required":"所需模型", + "Download Model": "下载模型", + "Source Url": "模型源地址", + "Notes": "备注", + "Type": "类型", + "Trained Words": "训练词", + "BaseModel": "基础算法", + "Details": "详情", + "Description": "描述", + "Download": "下载量", + "Source": "来源", + "Saving Preview...": "正在保存预览图...", + "Saving Succeed":"保存成功", + "Clean SuccessFully":"清理成功", + "Clean Failed": "清理失败", + "Saving Failed":"保存失败", + "No COMBO link": "沒有找到COMBO连接", + "Reboot ComfyUI":"重启ComfyUI", + "Are you sure you'd like to reboot the server?": "是否要重启ComfyUI?", + // GroupMap + "Groups Map": "管理组", + "Cleanup Of GPU Usage": "清理GPU占用", + "Please stop all running tasks before cleaning GPU": "请在清理GPU之前停止所有运行中的任务", + "Always": "启用中", + "Bypass": "已忽略", + "Never": "已停用", + "Auto Sorting": "自动排序", + "Toggle `Show/Hide` can set mode of group, LongPress can set group nodes to never": "点击`启用中/已忽略`可设置组模式, 长按可停用该组节点", + // Quick + "Enable ALT+1~9 to paste nodes from nodes template (ComfyUI-Easy-Use)": "启用ALT1~9从节点模板粘贴到工作流 (ComfyUI-Easy-Use)", + "Enable process bar in queue button (ComfyUI-Easy-Use)": "启用提示词队列进度显示条 (ComfyUI-Easy-Use)", + "Enable ContextMenu Auto Nest Subdirectories (ComfyUI-Easy-Use)": "启用上下文菜单自动嵌套子目录 (ComfyUI-Easy-Use)", + "Enable tool bar fixed on the left-bottom (ComfyUI-Easy-Use)": "启用工具栏固定在左下角 (ComfyUI-Easy-Use)", + "Too many thumbnails, have closed the display": "模型缩略图太多啦,为您关闭了显示", + // selector + "Empty All": "清空所有", + "🔎 Type here to search styles...": "🔎 在此处输入以搜索样式...", + // account + "Loading UserInfo...": "正在获取用户信息...", + "Please set the APIKEY first": "请先设置APIKEY", + "Setting APIKEY": "设置APIKEY", + "Save Account Info": "保存账号信息", + "Choose": "选择", + "Delete": "删除", + "Edit": "编辑", + "At least one account is required": "删除失败: 至少需要一个账户", + "APIKEY is not Empty": "APIKEY 不能为空", + "Add Account": "添加账号", + "Getting Your APIKEY": "获取您的APIKEY", + // choosers + "Choose Selected Images": "选择选中的图片", + "Choose images to continue": "选择图片以继续", + // seg + "Background": "背景", + "Hat": "帽子", + "Hair": "头发", + "Body": "身体", + "Face": "脸部", + "Clothes": "衣服", + "Others": "其他", + "Glove": "手套", + "Sunglasses": "太阳镜", + "Upper-clothes": "上衣", + "Dress": "连衣裙", + "Coat": "外套", + "Socks": "袜子", + "Pants": "裤子", + "Jumpsuits": "连体衣", + "Scarf": "围巾", + "Skirt": "裙子", + "Left-arm": "左臂", + "Right-arm": "右臂", + "Left-leg": "左腿", + "Right-leg": "右腿", + "Left-shoe": "左鞋", + "Right-shoe": "右鞋", +} +export const $t = (key) => { + const cn = zhCN[key] + return locale === 'zh-CN' && cn ? cn : key +} diff --git a/ComfyUI-Easy-Use/web/js/common/icon.js b/ComfyUI-Easy-Use/web/js/common/icon.js new file mode 100644 index 0000000000000000000000000000000000000000..e71a0a34ff16dcb668d164aeff2ede0abe87b9a0 --- /dev/null +++ b/ComfyUI-Easy-Use/web/js/common/icon.js @@ -0,0 +1,25 @@ +export const logoIcon = ` + + + + + + + + + + + + + + + + + +` + +export const quesitonIcon = `` +export const rocketIcon = `` +export const groupIcon = `` +export const rebootIcon = `` +export const closeIcon = `` \ No newline at end of file diff --git a/ComfyUI-Easy-Use/web/js/common/model.js b/ComfyUI-Easy-Use/web/js/common/model.js new file mode 100644 index 0000000000000000000000000000000000000000..b94928835526d17cf076f1612d4a8148ecec02af --- /dev/null +++ b/ComfyUI-Easy-Use/web/js/common/model.js @@ -0,0 +1,683 @@ +import { $el, ComfyDialog } from "../../../../scripts/ui.js"; +import { api } from "../../../../scripts/api.js"; +import {formatTime} from './utils.js'; +import {$t} from "./i18n.js"; +import {toast} from "./toast.js"; + +class MetadataDialog extends ComfyDialog { + constructor() { + super(); + this.element.classList.add("easyuse-model-metadata"); + } + show(metadata) { + super.show( + $el( + "div", + Object.keys(metadata).map((k) => + $el("div", [$el("label", { textContent: k }), $el("span", { textContent: metadata[k] })]) + ) + ) + ); + } +} + +export class ModelInfoDialog extends ComfyDialog { + constructor(name) { + super(); + this.name = name; + this.element.classList.add("easyuse-model-info"); + } + + get customNotes() { + return this.metadata["easyuse.notes"]; + } + + set customNotes(v) { + this.metadata["easyuse.notes"] = v; + } + + get hash() { + return this.metadata["easyuse.sha256"]; + } + + async show(type, value) { + this.type = type; + + const req = api.fetchApi("/easyuse/metadata/" + encodeURIComponent(`${type}/${value}`)); + this.info = $el("div", { style: { flex: "auto" } }); + // this.img = $el("img", { style: { display: "none" } }); + this.imgCurrent = 0 + this.imgList = $el("div.easyuse-preview-list",{ + style: { display: "none" } + }) + this.imgWrapper = $el("div.easyuse-preview", [ + $el("div.easyuse-preview-group",[ + this.imgList + ]), + ]); + this.main = $el("main", { style: { display: "flex" } }, [this.imgWrapper, this.info]); + this.content = $el("div.easyuse-model-content", [ + $el("div.easyuse-model-header",[$el("h2", { textContent: this.name })]) + , this.main]); + + const loading = $el("div", { textContent: "ℹ️ Loading...", parent: this.content }); + + super.show(this.content); + + this.metadata = await (await req).json(); + this.viewMetadata.style.cursor = this.viewMetadata.style.opacity = ""; + this.viewMetadata.removeAttribute("disabled"); + + loading.remove(); + this.addInfo(); + } + + createButtons() { + const btns = super.createButtons(); + this.viewMetadata = $el("button", { + type: "button", + textContent: "View raw metadata", + disabled: "disabled", + style: { + opacity: 0.5, + cursor: "not-allowed", + }, + onclick: (e) => { + if (this.metadata) { + new MetadataDialog().show(this.metadata); + } + }, + }); + + btns.unshift(this.viewMetadata); + return btns; + } + + parseNote() { + if (!this.customNotes) return []; + + let notes = []; + // Extract links from notes + const r = new RegExp("(\\bhttps?:\\/\\/[^\\s]+)", "g"); + let end = 0; + let m; + do { + m = r.exec(this.customNotes); + let pos; + let fin = 0; + if (m) { + pos = m.index; + fin = m.index + m[0].length; + } else { + pos = this.customNotes.length; + } + + let pre = this.customNotes.substring(end, pos); + if (pre) { + pre = pre.replaceAll("\n", "
"); + notes.push( + $el("span", { + innerHTML: pre, + }) + ); + } + if (m) { + notes.push( + $el("a", { + href: m[0], + textContent: m[0], + target: "_blank", + }) + ); + } + + end = fin; + } while (m); + return notes; + } + + addInfoEntry(name, value) { + return $el( + "p", + { + parent: this.info, + }, + [ + typeof name === "string" ? $el("label", { textContent: name + ": " }) : name, + typeof value === "string" ? $el("span", { textContent: value }) : value, + ] + ); + } + + async getCivitaiDetails() { + const req = await fetch("https://civitai.com/api/v1/model-versions/by-hash/" + this.hash); + if (req.status === 200) { + return await req.json(); + } else if (req.status === 404) { + throw new Error("Model not found"); + } else { + throw new Error(`Error loading info (${req.status}) ${req.statusText}`); + } + } + + addCivitaiInfo() { + const promise = this.getCivitaiDetails(); + const content = $el("span", { textContent: "ℹ️ Loading..." }); + + this.addInfoEntry( + $el("label", [ + $el("img", { + style: { + width: "18px", + position: "relative", + top: "3px", + margin: "0 5px 0 0", + }, + src: "https://civitai.com/favicon.ico", + }), + $el("span", { textContent: "Civitai: " }), + ]), + content + ); + + return promise + .then((info) => { + this.imgWrapper.style.display = 'block' + // 变更标题信息 + let header = this.element.querySelector('.easyuse-model-header') + if(header){ + header.replaceChildren( + $el("h2", { textContent: this.name }), + $el("div.easyuse-model-header-remark",[ + $el("h5", { textContent: $t("Updated At:") + formatTime(new Date(info.updatedAt),'yyyy/MM/dd')}), + $el("h5", { textContent: $t("Created At:") + formatTime(new Date(info.updatedAt),'yyyy/MM/dd')}), + ]) + ) + } + // 替换内容 + let textarea = null + let notes = this.parseNote.call(this) + let editText = $t("✏️ Edit") + console.log(notes) + let textarea_div = $el("div.easyuse-model-detail-textarea",[ + $el("p",notes?.length>0 ? notes : {textContent:$t('No notes')}), + ]) + if(!notes || notes.length == 0) textarea_div.classList.add('empty') + else textarea_div.classList.remove('empty') + this.info.replaceChildren( + $el("div.easyuse-model-detail",[ + $el("div.easyuse-model-detail-head.flex-b",[ + $el('span',$t("Notes")), + $el("a", { + textContent: editText, + href: "#", + style: { + fontSize: "12px", + float: "right", + color: "var(--warning-color)", + textDecoration: "none", + }, + onclick: async (e) => { + e.preventDefault(); + + if (textarea) { + if(textarea.value != this.customNotes){ + toast.showLoading($t('Saving Notes...')) + this.customNotes = textarea.value; + const resp = await api.fetchApi( + "/easyuse/metadata/notes/" + encodeURIComponent(`${this.type}/${this.name}`), + { + method: "POST", + body: this.customNotes, + } + ); + toast.hideLoading() + if (resp.status !== 200) { + toast.error($t('Saving Failed')) + console.error(resp); + alert(`Error saving notes (${resp.status}) ${resp.statusText}`); + return; + } + toast.success($t('Saving Succeed')) + notes = this.parseNote.call(this) + console.log(notes) + textarea_div.replaceChildren($el("p",notes?.length>0 ? notes : {textContent:$t('No notes')})); + if(textarea.value) textarea_div.classList.remove('empty') + else textarea_div.classList.add('empty') + }else { + textarea_div.replaceChildren($el("p",{textContent:$t('No notes')})); + textarea_div.classList.add('empty') + } + e.target.textContent = editText; + textarea.remove(); + textarea = null; + + } else { + e.target.textContent = "💾 Save"; + textarea = $el("textarea", { + placeholder: $t("Type your notes here"), + style: { + width: "100%", + minWidth: "200px", + minHeight: "50px", + height:"100px" + }, + textContent: this.customNotes, + }); + textarea_div.replaceChildren(textarea); + textarea.focus() + } + } + }) + ]), + textarea_div + ]), + $el("div.easyuse-model-detail",[ + $el("div.easyuse-model-detail-head",{textContent:$t("Details")}), + $el("div.easyuse-model-detail-body",[ + $el("div.easyuse-model-detail-item",[ + $el("div.easyuse-model-detail-item-label",{textContent:$t("Type")}), + $el("div.easyuse-model-detail-item-value",{textContent:info.model.type}), + ]), + $el("div.easyuse-model-detail-item",[ + $el("div.easyuse-model-detail-item-label",{textContent:$t("BaseModel")}), + $el("div.easyuse-model-detail-item-value",{textContent:info.baseModel}), + ]), + $el("div.easyuse-model-detail-item",[ + $el("div.easyuse-model-detail-item-label",{textContent:$t("Download")}), + $el("div.easyuse-model-detail-item-value",{textContent:info.stats?.downloadCount || 0}), + ]), + $el("div.easyuse-model-detail-item",[ + $el("div.easyuse-model-detail-item-label",{textContent:$t("Trained Words")}), + $el("div.easyuse-model-detail-item-value",{textContent:info?.trainedWords.join(',') || '-'}), + ]), + $el("div.easyuse-model-detail-item",[ + $el("div.easyuse-model-detail-item-label",{textContent:$t("Source")}), + $el("div.easyuse-model-detail-item-value",[ + $el("label", [ + $el("img", { + style: { + width: "14px", + position: "relative", + top: "3px", + margin: "0 5px 0 0", + }, + src: "https://civitai.com/favicon.ico", + }), + $el("a", { + href: "https://civitai.com/models/" + info.modelId, + textContent: "View " + info.model.name, + target: "_blank", + }) + ]) + ]), + ]) + ]), + ]) + ); + + if (info.images?.length) { + this.imgCurrent = 0 + this.isSaving = false + info.images.map(cate=> + cate.url && + this.imgList.appendChild( + $el('div.easyuse-preview-slide',[ + $el('div.easyuse-preview-slide-content',[ + $el('img',{src:(cate.url)}), + $el("div.save", { + textContent: "Save as preview", + onclick: async () => { + if(this.isSaving) return + this.isSaving = true + toast.showLoading($t('Saving Preview...')) + // Convert the preview to a blob + const blob = await (await fetch(cate.url)).blob(); + + // Store it in temp + const name = "temp_preview." + new URL(cate.url).pathname.split(".")[1]; + const body = new FormData(); + body.append("image", new File([blob], name)); + body.append("overwrite", "true"); + body.append("type", "temp"); + + const resp = await api.fetchApi("/upload/image", { + method: "POST", + body, + }); + + if (resp.status !== 200) { + this.isSaving = false + toast.error($t('Saving Failed')) + toast.hideLoading() + console.error(resp); + alert(`Error saving preview (${req.status}) ${req.statusText}`); + return; + } + + // Use as preview + await api.fetchApi("/easyuse/save/" + encodeURIComponent(`${this.type}/${this.name}`), { + method: "POST", + body: JSON.stringify({ + filename: name, + type: "temp", + }), + headers: { + "content-type": "application/json", + }, + }).then(_=>{ + toast.success($t('Saving Succeed')) + toast.hideLoading() + }); + this.isSaving = false + app.refreshComboInNodes(); + }, + }) + ]) + ]) + ) + ) + let _this = this + this.imgDistance = (-660 * this.imgCurrent).toString() + this.imgList.style.display = '' + this.imgList.style.transform = 'translate3d(' + this.imgDistance +'px, 0px, 0px)' + this.slides = this.imgList.querySelectorAll('.easyuse-preview-slide') + // 添加按钮 + this.slideLeftButton = $el("button.left",{ + parent: this.imgWrapper, + style:{ + display:info.images.length <= 2 ? 'none' : 'block' + }, + innerHTML:``, + onclick: ()=>{ + if(info.images.length <= 2) return + _this.imgList.classList.remove("no-transition") + if(_this.imgCurrent == 0){ + _this.imgCurrent = (info.images.length/2)-1 + this.slides[this.slides.length-1].style.transform = 'translate3d(' + (-660 * (this.imgCurrent+1)).toString()+'px, 0px, 0px)' + this.slides[this.slides.length-2].style.transform = 'translate3d(' + (-660 * (this.imgCurrent+1)).toString()+'px, 0px, 0px)' + _this.imgList.style.transform = 'translate3d(660px, 0px, 0px)' + setTimeout(_=>{ + this.slides[this.slides.length-1].style.transform = 'translate3d(0px, 0px, 0px)' + this.slides[this.slides.length-2].style.transform = 'translate3d(0px, 0px, 0px)' + _this.imgDistance = (-660 * this.imgCurrent).toString() + _this.imgList.style.transform = 'translate3d(' + _this.imgDistance +'px, 0px, 0px)' + _this.imgList.classList.add("no-transition") + },500) + } + else { + _this.imgCurrent = _this.imgCurrent-1 + _this.imgDistance = (-660 * this.imgCurrent).toString() + _this.imgList.style.transform = 'translate3d(' + _this.imgDistance +'px, 0px, 0px)' + } + } + }) + this.slideRightButton = $el("button.right",{ + parent: this.imgWrapper, + style:{ + display:info.images.length <= 2 ? 'none' : 'block' + }, + innerHTML:``, + onclick: ()=>{ + if(info.images.length <= 2) return + _this.imgList.classList.remove("no-transition") + + if( _this.imgCurrent >= (info.images.length/2)-1){ + _this.imgCurrent = 0 + const max = info.images.length/2 + this.slides[0].style.transform = 'translate3d(' + (660 * max).toString()+'px, 0px, 0px)' + this.slides[1].style.transform = 'translate3d(' + (660 * max).toString()+'px, 0px, 0px)' + _this.imgList.style.transform = 'translate3d(' + (-660 * max).toString()+'px, 0px, 0px)' + setTimeout(_=>{ + this.slides[0].style.transform = 'translate3d(0px, 0px, 0px)' + this.slides[1].style.transform = 'translate3d(0px, 0px, 0px)' + _this.imgDistance = (-660 * this.imgCurrent).toString() + _this.imgList.style.transform = 'translate3d(' + _this.imgDistance +'px, 0px, 0px)' + _this.imgList.classList.add("no-transition") + },500) + } + else { + _this.imgCurrent = _this.imgCurrent+1 + _this.imgDistance = (-660 * this.imgCurrent).toString() + _this.imgList.style.transform = 'translate3d(' + _this.imgDistance +'px, 0px, 0px)' + } + + } + }) + + } + + if(info.description){ + $el("div", { + parent: this.content, + innerHTML: info.description, + style: { + marginTop: "10px", + }, + }); + } + + return info; + }) + .catch((err) => { + this.imgWrapper.style.display = 'none' + content.textContent = "⚠️ " + err.message; + }) + .finally(_=>{ + }) + } +} + + +export class CheckpointInfoDialog extends ModelInfoDialog { + async addInfo() { + // super.addInfo(); + await this.addCivitaiInfo(); + } +} + +const MAX_TAGS = 500 +export class LoraInfoDialog extends ModelInfoDialog { + getTagFrequency() { + if (!this.metadata.ss_tag_frequency) return []; + + const datasets = JSON.parse(this.metadata.ss_tag_frequency); + const tags = {}; + for (const setName in datasets) { + const set = datasets[setName]; + for (const t in set) { + if (t in tags) { + tags[t] += set[t]; + } else { + tags[t] = set[t]; + } + } + } + + return Object.entries(tags).sort((a, b) => b[1] - a[1]); + } + + getResolutions() { + let res = []; + if (this.metadata.ss_bucket_info) { + const parsed = JSON.parse(this.metadata.ss_bucket_info); + if (parsed?.buckets) { + for (const { resolution, count } of Object.values(parsed.buckets)) { + res.push([count, `${resolution.join("x")} * ${count}`]); + } + } + } + res = res.sort((a, b) => b[0] - a[0]).map((a) => a[1]); + let r = this.metadata.ss_resolution; + if (r) { + const s = r.split(","); + const w = s[0].replace("(", ""); + const h = s[1].replace(")", ""); + res.push(`${w.trim()}x${h.trim()} (Base res)`); + } else if ((r = this.metadata["modelspec.resolution"])) { + res.push(r + " (Base res"); + } + if (!res.length) { + res.push("⚠️ Unknown"); + } + return res; + } + + getTagList(tags) { + return tags.map((t) => + $el( + "li.easyuse-model-tag", + { + dataset: { + tag: t[0], + }, + $: (el) => { + el.onclick = () => { + el.classList.toggle("easyuse-model-tag--selected"); + }; + }, + }, + [ + $el("p", { + textContent: t[0], + }), + $el("span", { + textContent: t[1], + }), + ] + ) + ); + } + + addTags() { + let tags = this.getTagFrequency(); + let hasMore; + if (tags?.length) { + const c = tags.length; + let list; + if (c > MAX_TAGS) { + tags = tags.slice(0, MAX_TAGS); + hasMore = $el("p", [ + $el("span", { textContent: `⚠️ Only showing first ${MAX_TAGS} tags ` }), + $el("a", { + href: "#", + textContent: `Show all ${c}`, + onclick: () => { + list.replaceChildren(...this.getTagList(this.getTagFrequency())); + hasMore.remove(); + }, + }), + ]); + } + list = $el("ol.easyuse-model-tags-list", this.getTagList(tags)); + this.tags = $el("div", [list]); + } else { + this.tags = $el("p", { textContent: "⚠️ No tag frequency metadata found" }); + } + + this.content.append(this.tags); + + if (hasMore) { + this.content.append(hasMore); + } + } + + async addInfo() { + // this.addInfoEntry("Name", this.metadata.ss_output_name || "⚠️ Unknown"); + // this.addInfoEntry("Base Model", this.metadata.ss_sd_model_name || "⚠️ Unknown"); + // this.addInfoEntry("Clip Skip", this.metadata.ss_clip_skip || "⚠️ Unknown"); + // + // this.addInfoEntry( + // "Resolution", + // $el( + // "select", + // this.getResolutions().map((r) => $el("option", { textContent: r })) + // ) + // ); + + // super.addInfo(); + const p = this.addCivitaiInfo(); + this.addTags(); + + const info = await p; + if (info) { + // $el( + // "p", + // { + // parent: this.content, + // textContent: "Trained Words: ", + // }, + // [ + // $el("pre", { + // textContent: info.trainedWords.join(", "), + // style: { + // whiteSpace: "pre-wrap", + // margin: "10px 0", + // background: "#222", + // padding: "5px", + // borderRadius: "5px", + // maxHeight: "250px", + // overflow: "auto", + // }, + // }), + // ] + // ); + $el("div", { + parent: this.content, + innerHTML: info.description, + style: { + maxHeight: "250px", + overflow: "auto", + }, + }); + } + } + + createButtons() { + const btns = super.createButtons(); + + function copyTags(e, tags) { + const textarea = $el("textarea", { + parent: document.body, + style: { + position: "fixed", + }, + textContent: tags.map((el) => el.dataset.tag).join(", "), + }); + textarea.select(); + try { + document.execCommand("copy"); + if (!e.target.dataset.text) { + e.target.dataset.text = e.target.textContent; + } + e.target.textContent = "Copied " + tags.length + " tags"; + setTimeout(() => { + e.target.textContent = e.target.dataset.text; + }, 1000); + } catch (ex) { + prompt("Copy to clipboard: Ctrl+C, Enter", text); + } finally { + document.body.removeChild(textarea); + } + } + + btns.unshift( + $el("button", { + type: "button", + textContent: "Copy Selected", + onclick: (e) => { + copyTags(e, [...this.tags.querySelectorAll(".easyuse-model-tag--selected")]); + }, + }), + $el("button", { + type: "button", + textContent: "Copy All", + onclick: (e) => { + copyTags(e, [...this.tags.querySelectorAll(".easyuse-model-tag")]); + }, + }) + ); + + return btns; + } +} \ No newline at end of file diff --git a/ComfyUI-Easy-Use/web/js/common/toast.js b/ComfyUI-Easy-Use/web/js/common/toast.js new file mode 100644 index 0000000000000000000000000000000000000000..f32ab5e41a4a4212ca25894f94e234514249c6c5 --- /dev/null +++ b/ComfyUI-Easy-Use/web/js/common/toast.js @@ -0,0 +1,127 @@ +import {sleep} from "./utils.js"; +import {$t} from "./i18n.js"; + +class Toast{ + + constructor() { + this.info_icon = `` + this.success_icon = `` + this.error_icon = `` + this.warn_icon = `` + this.loading_icon = `` + } + + async showToast(data){ + let container = document.querySelector(".easyuse-toast-container"); + if (!container) { + container = document.createElement("div"); + container.classList.add("easyuse-toast-container"); + document.body.appendChild(container); + } + await this.hideToast(data.id); + const toastContainer = document.createElement("div"); + const content = document.createElement("span"); + content.innerHTML = data.content; + toastContainer.appendChild(content); + for (let a = 0; a < (data.actions || []).length; a++) { + const action = data.actions[a]; + if (a > 0) { + const sep = document.createElement("span"); + sep.innerHTML = " | "; + toastContainer.appendChild(sep); + } + const actionEl = document.createElement("a"); + actionEl.innerText = action.label; + if (action.href) { + actionEl.target = "_blank"; + actionEl.href = action.href; + } + if (action.callback) { + actionEl.onclick = (e) => { + return action.callback(e); + }; + } + toastContainer.appendChild(actionEl); + } + const animContainer = document.createElement("div"); + animContainer.setAttribute("toast-id", data.id); + animContainer.appendChild(toastContainer); + container.appendChild(animContainer); + await sleep(64); + animContainer.style.marginTop = `-${animContainer.offsetHeight}px`; + await sleep(64); + animContainer.classList.add("-show"); + if (data.duration) { + await sleep(data.duration); + this.hideToast(data.id); + } + } + async hideToast(id) { + const msg = document.querySelector(`.easyuse-toast-container > [toast-id="${id}"]`); + if (msg === null || msg === void 0 ? void 0 : msg.classList.contains("-show")) { + msg.classList.remove("-show"); + await sleep(750); + } + msg && msg.remove(); + } + async clearAllMessages() { + let container = document.querySelector(".easyuse-toast-container"); + container && (container.innerHTML = ""); + } + + async copyright(duration = 5000, actions = []) { + this.showToast({ + id: `toast-info`, + content: `${this.info_icon} ${$t('Workflow created by')} Yolain , ${$t('Watch more video content')} B站乱乱呀`, + duration, + actions + }); + } + async info(content, duration = 3000, actions = []) { + this.showToast({ + id: `toast-info`, + content: `${this.info_icon} ${content}`, + duration, + actions + }); + } + async success(content, duration = 3000, actions = []) { + this.showToast({ + id: `toast-success`, + content: `${this.success_icon} ${content}`, + duration, + actions + }); + } + async error(content, duration = 3000, actions = []) { + this.showToast({ + id: `toast-error`, + content: `${this.error_icon} ${content}`, + duration, + actions + }); + } + async warn(content, duration = 3000, actions = []) { + this.showToast({ + id: `toast-warn`, + content: `${this.warn_icon} ${content}`, + duration, + actions + }); + } + async showLoading(content, duration = 0, actions = []) { + this.showToast({ + id: `toast-loading`, + content: `${this.loading_icon} ${content}`, + duration, + actions + }); + } + + async hideLoading() { + this.hideToast("toast-loading"); + } + +} + +export const toast = new Toast(); diff --git a/ComfyUI-Easy-Use/web/js/common/utils.js b/ComfyUI-Easy-Use/web/js/common/utils.js new file mode 100644 index 0000000000000000000000000000000000000000..9a988a7a85da6fc5fe35e7bd43bcca4495c62adc --- /dev/null +++ b/ComfyUI-Easy-Use/web/js/common/utils.js @@ -0,0 +1,187 @@ +export function sleep(ms = 100, value) { + return new Promise((resolve) => { + setTimeout(() => { + resolve(value); + }, ms); + }); +} +export function addPreconnect(href, crossorigin=false){ + const preconnect = document.createElement("link"); + preconnect.rel = 'preconnect' + preconnect.href = href + if(crossorigin) preconnect.crossorigin = '' + document.head.appendChild(preconnect); +} +export function addCss(href, base=true) { + const link = document.createElement("link"); + link.rel = "stylesheet"; + link.type = "text/css"; + link.href = base ? "extensions/ComfyUI-Easy-Use/"+href : href; + document.head.appendChild(link); +} + +export function addMeta(name, content) { + const meta = document.createElement("meta"); + meta.setAttribute("name", name); + meta.setAttribute('content', content); + document.head.appendChild(meta); +} + +export function deepEqual(obj1, obj2) { + if (typeof obj1 !== typeof obj2) { + return false + } + if (typeof obj1 !== 'object' || obj1 === null || obj2 === null) { + return obj1 === obj2 + } + const keys1 = Object.keys(obj1) + const keys2 = Object.keys(obj2) + if (keys1.length !== keys2.length) { + return false + } + for (let key of keys1) { + if (!deepEqual(obj1[key], obj2[key])) { + return false + } + } + return true +} + + +export function getLocale(){ + const locale = localStorage['AGL.Locale'] || localStorage['Comfy.Settings.AGL.Locale'] || 'en-US' + return locale +} + +export function spliceExtension(fileName){ + return fileName.substring(0,fileName.lastIndexOf('.')) +} +export function getExtension(fileName){ + return fileName.substring(fileName.lastIndexOf('.') + 1) +} + +export function formatTime(time, format) { + time = typeof (time) === "number" ? time : (time instanceof Date ? time.getTime() : parseInt(time)); + if (isNaN(time)) return null; + if (typeof (format) !== 'string' || !format) format = 'yyyy-MM-dd hh:mm:ss'; + let _time = new Date(time); + time = _time.toString().split(/[\s\:]/g).slice(0, -2); + time[1] = ['01', '02', '03', '04', '05', '06', '07', '08', '09', '10', '11', '12'][_time.getMonth()]; + let _mapping = { + MM: 1, + dd: 2, + yyyy: 3, + hh: 4, + mm: 5, + ss: 6 + }; + return format.replace(/([Mmdhs]|y{2})\1/g, (key) => time[_mapping[key]]); +} + + +let origProps = {}; +export const findWidgetByName = (node, name) => node.widgets.find((w) => w.name === name); + +export const doesInputWithNameExist = (node, name) => node.inputs ? node.inputs.some((input) => input.name === name) : false; + +export function updateNodeHeight(node) {node.setSize([node.size[0], node.computeSize()[1]]);} + +export function toggleWidget(node, widget, show = false, suffix = "") { + if (!widget || doesInputWithNameExist(node, widget.name)) return; + if (!origProps[widget.name]) { + origProps[widget.name] = { origType: widget.type, origComputeSize: widget.computeSize }; + } + const origSize = node.size; + + widget.type = show ? origProps[widget.name].origType : "easyHidden" + suffix; + widget.computeSize = show ? origProps[widget.name].origComputeSize : () => [0, -4]; + + widget.linkedWidgets?.forEach(w => toggleWidget(node, w, ":" + widget.name, show)); + + const height = show ? Math.max(node.computeSize()[1], origSize[1]) : node.size[1]; + node.setSize([node.size[0], height]); +} + +export function isLocalNetwork(ip) { + const localNetworkRanges = [ + '192.168.', + '10.', + '127.', + /^172\.((1[6-9]|2[0-9]|3[0-1])\.)/ + ]; + + return localNetworkRanges.some(range => { + if (typeof range === 'string') { + return ip.startsWith(range); + } else { + return range.test(ip); + } + }); +} + + +/** +* accAdd 高精度加法 +* @since 1.0.10 +* @param {Number} arg1 +* @param {Number} arg2 +* @return {Number} +*/ +export function accAdd(arg1, arg2) { + let r1, r2, s1, s2,max; + s1 = typeof arg1 == 'string' ? arg1 : arg1.toString() + s2 = typeof arg2 == 'string' ? arg2 : arg2.toString() + try { r1 = s1.split(".")[1].length } catch (e) { r1 = 0 } + try { r2 = s2.split(".")[1].length } catch (e) { r2 = 0 } + max = Math.pow(10, Math.max(r1, r2)) + return (arg1 * max + arg2 * max) / max +} +/** + * accSub 高精度减法 + * @since 1.0.10 + * @param {Number} arg1 + * @param {Number} arg2 + * @return {Number} + */ +export function accSub(arg1, arg2) { + let r1, r2, max, min,s1,s2; + s1 = typeof arg1 == 'string' ? arg1 : arg1.toString() + s2 = typeof arg2 == 'string' ? arg2 : arg2.toString() + try { r1 = s1.split(".")[1].length } catch (e) { r1 = 0 } + try { r2 = s2.split(".")[1].length } catch (e) { r2 = 0 } + max = Math.pow(10, Math.max(r1, r2)); + //动态控制精度长度 + min = (r1 >= r2) ? r1 : r2; + return ((arg1 * max - arg2 * max) / max).toFixed(min) +} +/** + * accMul 高精度乘法 + * @since 1.0.10 + * @param {Number} arg1 + * @param {Number} arg2 + * @return {Number} + */ +export function accMul(arg1, arg2) { + let max = 0, s1 = typeof arg1 == 'string' ? arg1 : arg1.toString(), s2 = typeof arg2 == 'string' ? arg2 : arg2.toString(); + try { max += s1.split(".")[1].length } catch (e) { } + try { max += s2.split(".")[1].length } catch (e) { } + return Number(s1.replace(".", "")) * Number(s2.replace(".", "")) / Math.pow(10, max) +} +/** + * accDiv 高精度除法 + * @since 1.0.10 + * @param {Number} arg1 + * @param {Number} arg2 + * @return {Number} + */ +export function accDiv(arg1, arg2) { + let t1 = 0, t2 = 0, r1, r2,s1 = typeof arg1 == 'string' ? arg1 : arg1.toString(), s2 = typeof arg2 == 'string' ? arg2 : arg2.toString(); + try { t1 = s1.toString().split(".")[1].length } catch (e) { } + try { t2 = s2.toString().split(".")[1].length } catch (e) { } + r1 = Number(s1.toString().replace(".", "")) + r2 = Number(s2.toString().replace(".", "")) + return (r1 / r2) * Math.pow(10, t2 - t1) +} +Number.prototype.div = function (arg) { + return accDiv(this, arg); +} \ No newline at end of file diff --git a/ComfyUI-Easy-Use/web/js/easy/easy.js b/ComfyUI-Easy-Use/web/js/easy/easy.js new file mode 100644 index 0000000000000000000000000000000000000000..b22d2fb2c4830ac0a8be8b29a0b0b64b9e70969e --- /dev/null +++ b/ComfyUI-Easy-Use/web/js/easy/easy.js @@ -0,0 +1,610 @@ +import { api } from "../../../../scripts/api.js"; +import { app } from "../../../../scripts/app.js"; +import {deepEqual, addCss, addMeta, isLocalNetwork} from "../common/utils.js"; +import {logoIcon, quesitonIcon, rocketIcon, groupIcon, rebootIcon, closeIcon} from "../common/icon.js"; +import {$t} from '../common/i18n.js'; +import {toast} from "../common/toast.js"; +import {$el, ComfyDialog} from "../../../../scripts/ui.js"; + + +addCss('css/index.css') + +api.addEventListener("easyuse-toast",event=>{ + const content = event.detail.content + const type = event.detail.type + const duration = event.detail.duration + if(!type){ + toast.info(content, duration) + } + else{ + toast.showToast({ + id: `toast-${type}`, + content: `${toast[type+"_icon"]} ${content}`, + duration: duration || 3000, + }) + } +}) + + +let draggerEl = null +let isGroupMapcanMove = true +function createGroupMap(){ + let div = document.querySelector('#easyuse_groups_map') + if(div){ + div.style.display = div.style.display == 'none' ? 'flex' : 'none' + return + } + let groups = app.canvas.graph._groups + let nodes = app.canvas.graph._nodes + let old_nodes = groups.length + div = document.createElement('div') + div.id = 'easyuse_groups_map' + div.innerHTML = '' + let btn = document.createElement('div') + btn.style = `display: flex; + width: calc(100% - 8px); + justify-content: space-between; + align-items: center; + padding: 0 6px; + height: 44px;` + let hideBtn = $el('button.closeBtn',{ + innerHTML:closeIcon, + onclick:_=>div.style.display = 'none' + }) + let textB = document.createElement('p') + btn.appendChild(textB) + btn.appendChild(hideBtn) + textB.style.fontSize = '11px' + textB.innerHTML = `${$t('Groups Map')} (EasyUse)` + div.appendChild(btn) + + div.addEventListener('mousedown', function (e) { + var startX = e.clientX + var startY = e.clientY + var offsetX = div.offsetLeft + var offsetY = div.offsetTop + + function moveBox (e) { + var newX = e.clientX + var newY = e.clientY + var deltaX = newX - startX + var deltaY = newY - startY + div.style.left = offsetX + deltaX + 'px' + div.style.top = offsetY + deltaY + 'px' + } + + function stopMoving () { + document.removeEventListener('mousemove', moveBox) + document.removeEventListener('mouseup', stopMoving) + } + + if(isGroupMapcanMove){ + document.addEventListener('mousemove', moveBox) + document.addEventListener('mouseup', stopMoving) + } + }) + + function updateGroups(groups, groupsDiv, autoSortDiv){ + if(groups.length>0){ + autoSortDiv.style.display = 'block' + }else autoSortDiv.style.display = 'none' + for (let index in groups) { + const group = groups[index] + const title = group.title + const show_text = $t('Always') + const hide_text = $t('Bypass') + const mute_text = $t('Never') + let group_item = document.createElement('div') + let group_item_style = `justify-content: space-between;display:flex;background-color: var(--comfy-input-bg);border-radius: 5px;border:1px solid var(--border-color);margin-top:5px;` + group_item.addEventListener("mouseover",event=>{ + event.preventDefault() + group_item.style = group_item_style + "filter:brightness(1.2);" + }) + group_item.addEventListener("mouseleave",event=>{ + event.preventDefault() + group_item.style = group_item_style + "filter:brightness(1);" + }) + group_item.addEventListener("dragstart",e=>{ + draggerEl = e.currentTarget; + e.currentTarget.style.opacity = "0.6"; + e.currentTarget.style.border = "1px dashed yellow"; + e.dataTransfer.effectAllowed = 'move'; + e.dataTransfer.setDragImage(emptyImg, 0, 0); + }) + group_item.addEventListener("dragend",e=>{ + e.target.style.opacity = "1"; + e.currentTarget.style.border = "1px dashed transparent"; + e.currentTarget.removeAttribute("draggable"); + document.querySelectorAll('.easyuse-group-item').forEach((el,i) => { + var prev_i = el.dataset.id; + if (el == draggerEl && prev_i != i ) { + groups.splice(i, 0, groups.splice(prev_i, 1)[0]); + } + el.dataset.id = i; + }); + isGroupMapcanMove = true + }) + group_item.addEventListener("dragover",e=>{ + e.preventDefault(); + if (e.currentTarget == draggerEl) return; + let rect = e.currentTarget.getBoundingClientRect(); + if (e.clientY > rect.top + rect.height / 2) { + e.currentTarget.parentNode.insertBefore(draggerEl, e.currentTarget.nextSibling); + } else { + e.currentTarget.parentNode.insertBefore(draggerEl, e.currentTarget); + } + isGroupMapcanMove = true + }) + + + group_item.setAttribute('data-id',index) + group_item.className = 'easyuse-group-item' + group_item.style = group_item_style + // 标题 + let text_group_title = document.createElement('div') + text_group_title.style = `flex:1;font-size:12px;color:var(--input-text);padding:4px;white-space: nowrap;overflow: hidden;text-overflow: ellipsis;cursor:pointer` + text_group_title.innerHTML = `${title}` + text_group_title.addEventListener('mousedown',e=>{ + isGroupMapcanMove = false + e.currentTarget.parentNode.draggable = 'true'; + }) + text_group_title.addEventListener('mouseleave',e=>{ + setTimeout(_=>{ + isGroupMapcanMove = true + },150) + }) + group_item.append(text_group_title) + // 按钮组 + let buttons = document.createElement('div') + group.recomputeInsideNodes(); + const nodesInGroup = group._nodes; + let isGroupShow = nodesInGroup && nodesInGroup.length>0 && nodesInGroup[0].mode == 0 + let isGroupMute = nodesInGroup && nodesInGroup.length>0 && nodesInGroup[0].mode == 2 + let go_btn = document.createElement('button') + go_btn.style = "margin-right:6px;cursor:pointer;font-size:10px;padding:2px 4px;color:var(--input-text);background-color: var(--comfy-input-bg);border: 1px solid var(--border-color);border-radius:4px;" + go_btn.innerText = "Go" + go_btn.addEventListener('click', () => { + app.canvas.ds.offset[0] = -group.pos[0] - group.size[0] * 0.5 + (app.canvas.canvas.width * 0.5) / app.canvas.ds.scale; + app.canvas.ds.offset[1] = -group.pos[1] - group.size[1] * 0.5 + (app.canvas.canvas.height * 0.5) / app.canvas.ds.scale; + app.canvas.setDirty(true, true); + app.canvas.setZoom(1) + }) + buttons.append(go_btn) + let see_btn = document.createElement('button') + let defaultStyle = `cursor:pointer;font-size:10px;;padding:2px;border: 1px solid var(--border-color);border-radius:4px;width:36px;` + see_btn.style = isGroupMute ? `background-color:var(--error-text);color:var(--input-text);` + defaultStyle : (isGroupShow ? `background-color:var(--theme-color);color:var(--input-text);` + defaultStyle : `background-color: var(--comfy-input-bg);color:var(--descrip-text);` + defaultStyle) + see_btn.innerText = isGroupMute ? mute_text : (isGroupShow ? show_text : hide_text) + let pressTimer + let firstTime =0, lastTime =0 + let isHolding = false + see_btn.addEventListener('click', () => { + if(isHolding){ + isHolding = false + return + } + for (const node of nodesInGroup) { + node.mode = isGroupShow ? 4 : 0; + node.graph.change(); + } + isGroupShow = nodesInGroup[0].mode == 0 ? true : false + isGroupMute = nodesInGroup[0].mode == 2 ? true : false + see_btn.style = isGroupMute ? `background-color:var(--error-text);color:var(--input-text);` + defaultStyle : (isGroupShow ? `background-color:#006691;color:var(--input-text);` + defaultStyle : `background-color: var(--comfy-input-bg);color:var(--descrip-text);` + defaultStyle) + see_btn.innerText = isGroupMute ? mute_text : (isGroupShow ? show_text : hide_text) + }) + see_btn.addEventListener('mousedown', () => { + firstTime = new Date().getTime(); + clearTimeout(pressTimer); + pressTimer = setTimeout(_=>{ + for (const node of nodesInGroup) { + node.mode = isGroupMute ? 0 : 2; + node.graph.change(); + } + isGroupShow = nodesInGroup[0].mode == 0 ? true : false + isGroupMute = nodesInGroup[0].mode == 2 ? true : false + see_btn.style = isGroupMute ? `background-color:var(--error-text);color:var(--input-text);` + defaultStyle : (isGroupShow ? `background-color:#006691;color:var(--input-text);` + defaultStyle : `background-color: var(--comfy-input-bg);color:var(--descrip-text);` + defaultStyle) + see_btn.innerText = isGroupMute ? mute_text : (isGroupShow ? show_text : hide_text) + },500) + }) + see_btn.addEventListener('mouseup', () => { + lastTime = new Date().getTime(); + if(lastTime - firstTime > 500) isHolding = true + clearTimeout(pressTimer); + }) + buttons.append(see_btn) + group_item.append(buttons) + + groupsDiv.append(group_item) + } + + } + + let groupsDiv = document.createElement('div') + groupsDiv.id = 'easyuse-groups-items' + groupsDiv.style = `overflow-y: auto;max-height: 400px;height:100%;width: 100%;` + + let autoSortDiv = document.createElement('button') + autoSortDiv.style = `cursor:pointer;font-size:10px;padding:2px 4px;color:var(--input-text);background-color: var(--comfy-input-bg);border: 1px solid var(--border-color);border-radius:4px;` + autoSortDiv.innerText = $t('Auto Sorting') + autoSortDiv.addEventListener('click',e=>{ + e.preventDefault() + groupsDiv.innerHTML = `` + let new_groups = groups.sort((a,b)=> a['pos'][0] - b['pos'][0]).sort((a,b)=> a['pos'][1] - b['pos'][1]) + updateGroups(new_groups, groupsDiv, autoSortDiv) + }) + + updateGroups(groups, groupsDiv, autoSortDiv) + + div.appendChild(groupsDiv) + + let remarkDiv = document.createElement('p') + remarkDiv.style = `text-align:center; font-size:10px; padding:0 10px;color:var(--descrip-text)` + remarkDiv.innerText = $t('Toggle `Show/Hide` can set mode of group, LongPress can set group nodes to never') + div.appendChild(groupsDiv) + div.appendChild(remarkDiv) + div.appendChild(autoSortDiv) + + let graphDiv = document.getElementById("graph-canvas") + graphDiv.addEventListener('mouseover', async () => { + groupsDiv.innerHTML = `` + let new_groups = app.canvas.graph._groups + updateGroups(new_groups, groupsDiv, autoSortDiv) + old_nodes = nodes + }) + + if (!document.querySelector('#easyuse_groups_map')){ + document.body.appendChild(div) + }else{ + div.style.display = 'flex' + } + +} + +async function cleanup(){ + try { + const {Running, Pending} = await api.getQueue() + if(Running.length>0 || Pending.length>0){ + toast.error($t("Clean Failed")+ ":"+ $t("Please stop all running tasks before cleaning GPU")) + return + } + api.fetchApi("/easyuse/cleangpu",{ + method:"POST" + }).then(res=>{ + if(res.status == 200){ + toast.success($t("Clean SuccessFully")) + }else{ + toast.error($t("Clean Failed")) + } + }) + + } catch (exception) {} +} + + +let guideDialog = null +let isDownloading = false +function download_model(url,local_dir){ + if(isDownloading || !url || !local_dir) return + isDownloading = true + let body = new FormData(); + body.append('url', url); + body.append('local_dir', local_dir); + api.fetchApi("/easyuse/model/download",{ + method:"POST", + body + }).then(res=>{ + if(res.status == 200){ + toast.success($t("Download SuccessFully")) + }else{ + toast.error($t("Download Failed")) + } + isDownloading = false + }) + +} +class GuideDialog { + + constructor(note, need_models){ + this.dialogDiv = null + this.modelsDiv = null + + if(need_models?.length>0){ + let tbody = [] + + for(let i=0;idownload_model(need_models[i]['download_url'],need_models[i]['local_dir']), target:"_blank", textContent:$t('Download Model')}) : '', + need_models[i]['source_url'] ? $el('a',{href:need_models[i]['source_url'], target:"_blank", textContent:$t('Source Url')}) : '', + need_models[i]['desciption'] ? $el('span',{textContent:need_models[i]['desciption']}) : '', + ]), + ])) + } + this.modelsDiv = $el('div.easyuse-guide-dialog-models.markdown-body',[ + $el('h3',{textContent:$t('Models Required')}), + $el('table',{cellpadding:0,cellspacing:0},[ + $el('thead',[ + $el('tr',[ + $el('th',{innerHTML:$t('ModelName')}), + $el('th',{innerHTML:$t('Description')}), + ]) + ]), + $el('tbody',tbody) + ]) + ]) + } + + this.dialogDiv = $el('div.easyuse-guide-dialog.hidden',[ + $el('div.easyuse-guide-dialog-header',[ + $el('div.easyuse-guide-dialog-top',[ + $el('div.easyuse-guide-dialog-title',{ + innerHTML:$t('Workflow Guide') + }), + $el('button.closeBtn',{innerHTML:closeIcon,onclick:_=>this.close()}) + ]), + + $el('div.easyuse-guide-dialog-remark',{ + innerHTML:`${$t('Workflow created by')} Yolain , ${$t('Watch more video content')} B站乱乱呀` + }) + ]), + $el('div.easyuse-guide-dialog-content.markdown-body',[ + $el('div.easyuse-guide-dialog-note',{ + innerHTML:note + }), + ...this.modelsDiv ? [this.modelsDiv] : [] + ]) + ]) + + if(disableRenderInfo){ + this.dialogDiv.classList.add('disable-render-info') + } + document.body.appendChild(this.dialogDiv) + } + show(){ + if(this.dialogDiv) this.dialogDiv.classList.remove('hidden') + } + + close(){ + if(this.dialogDiv){ + this.dialogDiv.classList.add('hidden') + } + } + toggle(){ + if(this.dialogDiv){ + if(this.dialogDiv.classList.contains('hidden')){ + this.show() + }else{ + this.close() + } + } + } + + remove(){ + if(this.dialogDiv) document.body.removeChild(this.dialogDiv) + } +} + +// toolbar +const toolBarId = "Comfy.EasyUse.toolBar" +const getEnableToolBar = _ => app.ui.settings.getSettingValue(toolBarId, true) +const getNewMenuPosition = _ => { + try{ + return app.ui.settings.getSettingValue('Comfy.UseNewMenu', 'Disabled') + }catch (e){ + return 'Disabled' + } +} + +let note = null +let toolbar = null +let enableToolBar = getEnableToolBar() && getNewMenuPosition() == 'Disabled' +let disableRenderInfo = localStorage['Comfy.Settings.Comfy.EasyUse.disableRenderInfo'] ? true : false +export function addToolBar(app) { + app.ui.settings.addSetting({ + id: toolBarId, + name: $t("Enable tool bar fixed on the left-bottom (ComfyUI-Easy-Use)"), + type: "boolean", + defaultValue: enableToolBar, + onChange(value) { + enableToolBar = !!value; + if(enableToolBar){ + showToolBar() + }else hideToolBar() + }, + }); +} +function showToolBar(){ + if(toolbar) toolbar.style.display = 'flex' +} +function hideToolBar(){ + if(toolbar) toolbar.style.display = 'none' +} +let monitor = null +function setCrystoolsUI(position){ + const crystools = document.getElementById('crystools-root')?.children || null + if(crystools?.length>0){ + if(!monitor){ + for (let i = 0; i < crystools.length; i++) { + if (crystools[i].id === 'crystools-monitor-container') { + monitor = crystools[i]; + break; + } + } + } + if(monitor){ + if(position == 'Disabled'){ + let replace = true + for (let i = 0; i < crystools.length; i++) { + if (crystools[i].id === 'crystools-monitor-container') { + replace = false + break; + } + } + document.getElementById('crystools-root').appendChild(monitor) + } + else { + let monitor_div = document.getElementById('comfyui-menu-monitor') + if(!monitor_div) app.menu.settingsGroup.element.before($el('div',{id:'comfyui-menu-monitor'},monitor)) + else monitor_div.appendChild(monitor) + } + } + } +} +const changeNewMenuPosition = app.ui.settings.settingsLookup?.['Comfy.UseNewMenu'] +if(changeNewMenuPosition) changeNewMenuPosition.onChange = v => { + v == 'Disabled' ? showToolBar() : hideToolBar() + setCrystoolsUI(v) +} + + + +app.registerExtension({ + name: "comfy.easyUse", + init() { + // Canvas Menu + const getCanvasMenuOptions = LGraphCanvas.prototype.getCanvasMenuOptions; + LGraphCanvas.prototype.getCanvasMenuOptions = function () { + const options = getCanvasMenuOptions.apply(this, arguments); + let emptyImg = new Image() + emptyImg.src = "data:image/gif;base64,R0lGODlhAQABAIAAAAUEBAAAACwAAAAAAQABAAACAkQBADs="; + + options.push(null, + // Groups Map + { + content: groupIcon.replace('currentColor','var(--warning-color)') + ' '+ $t('Groups Map') + ' (EasyUse)', + callback: async() => { + createGroupMap() + } + }, + // Force clean ComfyUI GPU Used 强制卸载模型GPU占用 + { + content: rocketIcon.replace('currentColor','var(--theme-color-light)') + ' '+ $t('Cleanup Of GPU Usage') + ' (EasyUse)', + callback: async() =>{ + await cleanup() + } + }, + // Only show the reboot option if the server is running on a local network 仅在本地或局域网环境可重启服务 + isLocalNetwork(window.location.host) ? { + content: rebootIcon.replace('currentColor','var(--error-color)') + ' '+ $t('Reboot ComfyUI') + ' (EasyUse)', + callback: _ =>{ + if (confirm($t("Are you sure you'd like to reboot the server?"))){ + try { + api.fetchApi("/easyuse/reboot"); + } catch (exception) {} + } + } + } : null, + ); + return options; + }; + + let renderInfoEvent = LGraphCanvas.prototype.renderInfo + if(disableRenderInfo){ + LGraphCanvas.prototype.renderInfo = function (ctx, x, y) {} + } + + if(!toolbar){ + toolbar = $el('div.easyuse-toolbar',[ + $el('div.easyuse-toolbar-item',{ + onclick:_=>{ + createGroupMap() + } + },[ + $el('div.easyuse-toolbar-icon.group', {innerHTML:groupIcon}), + $el('div.easyuse-toolbar-tips',$t('Groups Map')) + ]), + $el('div.easyuse-toolbar-item',{ + onclick:async()=>{ + await cleanup() + } + },[ + $el('div.easyuse-toolbar-icon.rocket',{innerHTML:rocketIcon}), + $el('div.easyuse-toolbar-tips',$t('Cleanup Of GPU Usage')) + ]), + ]) + if(disableRenderInfo){ + toolbar.classList.add('disable-render-info') + }else{ + toolbar.classList.remove('disable-render-info') + } + document.body.appendChild(toolbar) + } + + // rewrite handleFile + let loadGraphDataEvent = app.loadGraphData + app.loadGraphData = async function (data, clean=true) { + // if(data?.extra?.cpr){ + // toast.copyright() + // } + if(data?.extra?.note){ + if(guideDialog) { + guideDialog.remove() + guideDialog = null + } + if(note && toolbar) toolbar.removeChild(note) + const need_models = data.extra?.need_models || null + guideDialog = new GuideDialog(data.extra.note, need_models) + note = $el('div.easyuse-toolbar-item',{ + onclick:async()=>{ + guideDialog.toggle() + } + },[ + $el('div.easyuse-toolbar-icon.question',{innerHTML:quesitonIcon}), + $el('div.easyuse-toolbar-tips',$t('Workflow Guide')) + ]) + if(toolbar) toolbar.insertBefore(note, toolbar.firstChild) + } + else{ + if(note) { + toolbar.removeChild(note) + note = null + } + } + return await loadGraphDataEvent.apply(this, [...arguments]) + } + + addToolBar(app) + }, + async setup() { + // New style menu button + if(app.menu?.actionsGroup){ + const groupMap = new (await import('../../../../scripts/ui/components/button.js')).ComfyButton({ + icon:'list-box', + action:()=> createGroupMap(), + tooltip: "EasyUse Group Map", + // content: "EasyUse Group Map", + classList: "comfyui-button comfyui-menu-mobile-collapse" + }); + app.menu.actionsGroup.element.after(groupMap.element); + const position = getNewMenuPosition() + setCrystoolsUI(position) + if(position == 'Disabled') showToolBar() + else hideToolBar() + // const easyNewMenu = $el('div.easyuse-new-menu',[ + // $el('div.easyuse-new-menu-intro',[ + // $el('div.easyuse-new-menu-logo',{innerHTML:logoIcon}), + // $el('div.easyuse-new-menu-title',[ + // $el('div.title',{textContent:'ComfyUI-Easy-Use'}), + // $el('div.desc',{textContent:'Version:'}) + // ]) + // ]) + // ]) + // app.menu?.actionsGroup.element.after(new (await import('../../../../scripts/ui/components/splitButton.js')).ComfySplitButton({ + // primary: groupMap, + // mode:'click', + // position:'absolute', + // horizontal: 'right' + // },easyNewMenu).element); + } + + }, + beforeRegisterNodeDef(nodeType, nodeData, app) { + if (nodeData.name.startsWith("easy")) { + const origOnConfigure = nodeType.prototype.onConfigure; + nodeType.prototype.onConfigure = function () { + const r = origOnConfigure ? origOnConfigure.apply(this, arguments) : undefined; + return r; + }; + } + }, +}); \ No newline at end of file diff --git a/ComfyUI-Easy-Use/web/js/easy/easyAccount.js b/ComfyUI-Easy-Use/web/js/easy/easyAccount.js new file mode 100644 index 0000000000000000000000000000000000000000..421432fa190616a84dff5a5b8cc482c5c14e4580 --- /dev/null +++ b/ComfyUI-Easy-Use/web/js/easy/easyAccount.js @@ -0,0 +1,283 @@ +import { app } from "../../../../scripts/app.js"; +import { api } from "../../../../scripts/api.js"; +import { $el, ComfyDialog } from "../../../../scripts/ui.js"; +import { $t } from '../common/i18n.js' +import { toast } from "../common/toast.js"; +import {sleep, accSub} from "../common/utils.js"; + +let api_keys = [] +let api_current = 0 +let user_info = {} + +const api_cost = { + 'sd3': 6.5, + 'sd3-turbo': 4, +} + +class AccountDialog extends ComfyDialog { + constructor() { + super(); + this.lists = [] + this.dialog_div = null + this.user_div = null + } + + addItem(index, user_div){ + return $el('div.easyuse-account-dialog-item',[ + $el('input',{type:'text',placeholder:'Enter name',oninput: e=>{ + const dataIndex = Array.prototype.indexOf.call(this.dialog_div.querySelectorAll('.easyuse-account-dialog-item'), e.target.parentNode) + api_keys[dataIndex]['name'] = e.target.value + },value:api_keys[index]['name']}), + $el('input.key',{type:'text',oninput: e=>{ + const dataIndex = Array.prototype.indexOf.call(this.dialog_div.querySelectorAll('.easyuse-account-dialog-item'), e.target.parentNode) + api_keys[dataIndex]['key'] = e.target.value + },placeholder:'Enter APIKEY', value:api_keys[index]['key']}), + $el('button.choose',{textContent:$t('Choose'),onclick:async(e)=>{ + const dataIndex = Array.prototype.indexOf.call(this.dialog_div.querySelectorAll('.easyuse-account-dialog-item'), e.target.parentNode) + let name = api_keys[dataIndex]['name'] + let key = api_keys[dataIndex]['key'] + if(!name){ + toast.error($t('Please enter the account name')) + return + } + else if(!key){ + toast.error($t('Please enter the APIKEY')) + return + } + let missing = true + for(let i=0;i{ + new AccountDialog().show(user_div); + } + },[ + $el('div.user',[ + $el('div.avatar', avatar ? [$el('img',{src:avatar})] : '😀'), + $el('div.info', [ + $el('h5.name', email), + $el('h6.remark','Credits: '+ credits) + ]) + ]), + $el('div.edit', {textContent:$t('Edit')}) + ]) + ) + toast.success($t('Save Succeed')) + } + else toast.success($t('Save Succeed')) + this.close() + } else { + toast.error($t('Save Failed')) + } + }}), + $el('button.delete',{textContent:$t('Delete'),onclick:e=>{ + const dataIndex = Array.prototype.indexOf.call(this.dialog_div.querySelectorAll('.easyuse-account-dialog-item'), e.target.parentNode) + if(api_keys.length<=1){ + toast.error($t('At least one account is required')) + return + } + api_keys.splice(dataIndex,1) + this.dialog_div.removeChild(e.target.parentNode) + }}), + ]) + } + + show(userdiv) { + api_keys.forEach((item,index)=>{ + this.lists.push(this.addItem(index,userdiv)) + }) + this.dialog_div = $el("div.easyuse-account-dialog", this.lists) + super.show( + $el('div.easyuse-account-dialog-main',[ + $el('div',[ + $el('a',{href:'https://platform.stability.ai/account/keys',target:'_blank',textContent:$t('Getting Your APIKEY')}), + ]), + this.dialog_div, + ]) + ); + } + + createButtons() { + const btns = super.createButtons(); + btns.unshift($el('button',{ + type:'button', + textContent:$t('Save Account Info'), + onclick:_=>{ + let missing = true + for(let i=0;i { + if (res.status == 200) { + toast.success($t('Save Succeed')) + + } else { + toast.error($t('Save Failed')) + } + }) + } + } + })) + btns.unshift($el('button',{ + type:'button', + textContent:$t('Add Account'), + onclick:_=>{ + const name = 'Account '+(api_keys.length).toString() + api_keys.push({name,key:''}) + const item = this.addItem(api_keys.length - 1) + this.lists.push(item) + this.dialog_div.appendChild(item) + } + })) + return btns + } +} + + +app.registerExtension({ + name: 'comfy.easyUse.account', + async beforeRegisterNodeDef(nodeType, nodeData, app) { + if(nodeData.name == 'easy stableDiffusion3API'){ + + const onNodeCreated = nodeType.prototype.onNodeCreated; + nodeType.prototype.onNodeCreated = async function() { + onNodeCreated ? onNodeCreated?.apply(this, arguments) : undefined; + const seed_widget = this.widgets.find(w => ['seed_num','seed'].includes(w.name)) + const seed_control = this.widgets.find(w=> ['control_before_generate','control_after_generate'].includes(w.name)) + let model_widget = this.widgets.find(w => w.name == 'model') + model_widget.callback = value =>{ + cost_widget.value = '-'+api_cost[value] + } + const cost_widget = this.addWidget('text', 'cost_credit', '0', _=>{ + },{ + serialize:false, + }) + cost_widget.disabled = true + setTimeout(_=>{ + if(seed_control.name == 'control_before_generate' && seed_widget.value === 0){ + seed_widget.value = Math.floor(Math.random() * 4294967294) + } + cost_widget.value = '-'+api_cost[model_widget.value] + },100) + let user_div = $el('div.easyuse-account-user', [$t('Loading UserInfo...')]) + let account = this.addDOMWidget('account',"btn",$el('div.easyuse-account',user_div)); + // 更新balance信息 + api.addEventListener('stable-diffusion-api-generate-succeed', async ({detail}) => { + let remarkDiv = user_div.querySelectorAll('.remark') + if(remarkDiv && remarkDiv[0]){ + const credits = detail?.model ? api_cost[detail.model] : 0 + if(credits) { + let balance = accSub(parseFloat(remarkDiv[0].innerText.replace(/Credits: /g,'')),credits) + if(balance>0){ + remarkDiv[0].innerText = 'Credits: '+ balance.toString() + } + } + } + await sleep(10000) + const res = await api.fetchApi('/easyuse/stability/balance') + if(res.status == 200){ + const data = await res.json() + if(data?.balance){ + const credits = data.balance?.credits || 0 + if(remarkDiv && remarkDiv[0]){ + remarkDiv[0].innerText = 'Credits: ' + credits + } + } + } + }) + // 获取api_keys + const res = await api.fetchApi('/easyuse/stability/api_keys') + if (res.status == 200){ + let data = await res.json() + api_keys = data.keys + api_current = data.current + if (api_keys.length > 0 && api_current!==undefined){ + const api_key = api_keys[api_current]['key'] + const api_name = api_keys[api_current]['name'] + if(!api_key){ + user_div.replaceChildren( + $el('div.easyuse-account-user-info', { + onclick:_=>{ + new AccountDialog().show(user_div); + } + },[ + $el('div.user',[ + $el('div.avatar', '😀'), + $el('div.info', [ + $el('h5.name', api_name), + $el('h6.remark',$t('Click to set the APIKEY first')) + ]) + ]), + $el('div.edit', {textContent:$t('Edit')}) + ]) + ) + }else{ + // 获取账号信息 + const res = await api.fetchApi('/easyuse/stability/user_info') + if(res.status == 200){ + const data = await res.json() + if(data?.account && data?.balance){ + const avatar = data.account?.profile_picture || null + const email = data.account?.email || null + const credits = data.balance?.credits || 0 + user_div.replaceChildren( + $el('div.easyuse-account-user-info', { + onclick:_=>{ + new AccountDialog().show(user_div); + } + },[ + $el('div.user',[ + $el('div.avatar', avatar ? [$el('img',{src:avatar})] : '😀'), + $el('div.info', [ + $el('h5.name', email), + $el('h6.remark','Credits: '+ credits) + ]) + ]), + $el('div.edit', {textContent:$t('Edit')}) + ]) + ) + } + } + } + } + } + } + } + } +}) \ No newline at end of file diff --git a/ComfyUI-Easy-Use/web/js/easy/easyContextMenu.js b/ComfyUI-Easy-Use/web/js/easy/easyContextMenu.js new file mode 100644 index 0000000000000000000000000000000000000000..0abbdd684b3d0352d06161a4e4b98b3f68a0ea00 --- /dev/null +++ b/ComfyUI-Easy-Use/web/js/easy/easyContextMenu.js @@ -0,0 +1,174 @@ +import {app} from "../../../../scripts/app.js"; +import {api} from "../../../../scripts/api.js"; +import {$el} from "../../../../scripts/ui.js"; +import {$t} from "../common/i18n.js"; +import {getExtension, spliceExtension} from '../common/utils.js' +import {toast} from "../common/toast.js"; + +const setting_id = "Comfy.EasyUse.MenuNestSub" +let enableMenuNestSub = false +let thumbnails = [] + +export function addMenuNestSubSetting(app) { + app.ui.settings.addSetting({ + id: setting_id, + name: $t("Enable ContextMenu Auto Nest Subdirectories (ComfyUI-Easy-Use)"), + type: "boolean", + defaultValue: enableMenuNestSub, + onChange(value) { + enableMenuNestSub = !!value; + }, + }); +} + +const getEnableMenuNestSub = _ => app.ui.settings.getSettingValue(setting_id, enableMenuNestSub) + + +const Loaders = ['easy fullLoader','easy a1111Loader','easy comfyLoader'] +app.registerExtension({ + name:"comfy.easyUse.contextMenu", + async setup(app){ + addMenuNestSubSetting(app) + // 获取所有模型图像 + const imgRes = await api.fetchApi(`/easyuse/models/thumbnail`) + if (imgRes.status === 200) { + let data = await imgRes.json(); + thumbnails = data + } + else if(getEnableMenuNestSub()){ + toast.error($t("Too many thumbnails, have closed the display")) + } + const existingContextMenu = LiteGraph.ContextMenu; + LiteGraph.ContextMenu = function(values,options){ + const threshold = 10; + const enabled = getEnableMenuNestSub(); + if(!enabled || (values?.length || 0) <= threshold || !(options?.callback) || values.some(i => typeof i !== 'string')){ + if(enabled){ + // console.log('Skipping context menu auto nesting for incompatible menu.'); + } + return existingContextMenu.apply(this,[...arguments]); + } + const compatValues = values; + const originalValues = [...compatValues]; + const folders = {}; + const specialOps = []; + const folderless = []; + for(const value of compatValues){ + const splitBy = value.indexOf('/') > -1 ? '/' : '\\'; + const valueSplit = value.split(splitBy); + if(valueSplit.length > 1){ + const key = valueSplit.shift(); + folders[key] = folders[key] || []; + folders[key].push(valueSplit.join(splitBy)); + }else if(value === 'CHOOSE' || value.startsWith('DISABLE ')){ + specialOps.push(value); + }else{ + folderless.push(value); + } + } + const foldersCount = Object.values(folders).length; + if(foldersCount > 0){ + const oldcallback = options.callback; + options.callback = null; + const newCallback = (item,options) => { + if(['None','无','無','なし'].includes(item.content)) oldcallback('None',options) + else oldcallback(originalValues.find(i => i.endsWith(item.content),options)); + }; + const addContent = (content, folderName='') => { + const name = folderName ? folderName + '\\' + spliceExtension(content) : spliceExtension(content); + const ext = getExtension(content) + const time = new Date().getTime() + let thumbnail = '' + if(['ckpt', 'pt', 'bin', 'pth', 'safetensors'].includes(ext)){ + for(let i=0;i { + let subs = [] + let less = [] + const b = folder.map(name=> { + const _folders = {}; + const splitBy = name.indexOf('/') > -1 ? '/' : '\\'; + const valueSplit = name.split(splitBy); + if(valueSplit.length > 1){ + const key = valueSplit.shift(); + _folders[key] = _folders[key] || []; + _folders[key].push(valueSplit.join(splitBy)); + } + const foldersCount = Object.values(folders).length; + if(foldersCount > 0){ + let key = Object.keys(_folders)[0] + if(key && _folders[key]) subs.push({key, value:_folders[key][0]}) + else{ + less.push(addContent(name,key)) + } + } + return addContent(name,folderName) + }) + if(subs.length>0){ + let subs_obj = {} + subs.forEach(item => { + subs_obj[item.key] = subs_obj[item.key] || [] + subs_obj[item.key].push(item.value) + }) + return [...Object.entries(subs_obj).map(f => { + return { + content: f[0], + has_submenu: true, + callback: () => {}, + submenu: { + options: add_sub_folder(f[1], f[0]), + } + } + }),...less] + } + else return b + } + + for(const [folderName,folder] of Object.entries(folders)){ + newValues.push({ + content:folderName, + has_submenu:true, + callback:() => {}, + submenu:{ + options:add_sub_folder(folder,folderName), + } + }); + } + newValues.push(...folderless.map(f => addContent(f, ''))); + if(specialOps.length > 0) newValues.push(...specialOps.map(f => addContent(f, ''))); + return existingContextMenu.call(this,newValues,options); + } + return existingContextMenu.apply(this,[...arguments]); + } + LiteGraph.ContextMenu.prototype = existingContextMenu.prototype; + }, + +}) + diff --git a/ComfyUI-Easy-Use/web/js/easy/easyDynamicWidgets.js b/ComfyUI-Easy-Use/web/js/easy/easyDynamicWidgets.js new file mode 100644 index 0000000000000000000000000000000000000000..ec5fa9fd2d6b916e6da7c9e6e2ffde6418fc50bf --- /dev/null +++ b/ComfyUI-Easy-Use/web/js/easy/easyDynamicWidgets.js @@ -0,0 +1,1273 @@ +import { app } from "../../../../scripts/app.js"; +import { api } from "../../../../scripts/api.js"; +import { ComfyWidgets } from "../../../../scripts/widgets.js"; +import { toast} from "../common/toast.js"; +import { $t } from '../common/i18n.js'; + +import { findWidgetByName, toggleWidget, updateNodeHeight} from "../common/utils.js"; + +const seedNodes = ["easy seed", "easy latentNoisy", "easy wildcards", "easy preSampling", "easy preSamplingAdvanced", "easy preSamplingNoiseIn", "easy preSamplingSdTurbo", "easy preSamplingCascade", "easy preSamplingDynamicCFG", "easy preSamplingLayerDiffusion", "easy fullkSampler", "easy fullCascadeKSampler"] +const loaderNodes = ["easy fullLoader", "easy a1111Loader", "easy comfyLoader", "easy hunyuanDiTLoader", "easy pixArtLoader"] + +function widgetLogic(node, widget) { + if (widget.name === 'lora_name') { + if (widget.value === "None") { + toggleWidget(node, findWidgetByName(node, 'lora_model_strength')) + toggleWidget(node, findWidgetByName(node, 'lora_clip_strength')) + } else { + toggleWidget(node, findWidgetByName(node, 'lora_model_strength'), true) + toggleWidget(node, findWidgetByName(node, 'lora_clip_strength'), true) + } + } + if (widget.name === 'rescale') { + let rescale_after_model = findWidgetByName(node, 'rescale_after_model').value + if (widget.value === 'by percentage' && rescale_after_model) { + toggleWidget(node, findWidgetByName(node, 'width')) + toggleWidget(node, findWidgetByName(node, 'height')) + toggleWidget(node, findWidgetByName(node, 'longer_side')) + toggleWidget(node, findWidgetByName(node, 'percent'), true) + } else if (widget.value === 'to Width/Height' && rescale_after_model) { + toggleWidget(node, findWidgetByName(node, 'width'), true) + toggleWidget(node, findWidgetByName(node, 'height'), true) + toggleWidget(node, findWidgetByName(node, 'percent')) + toggleWidget(node, findWidgetByName(node, 'longer_side')) + } else if (rescale_after_model) { + toggleWidget(node, findWidgetByName(node, 'longer_side'), true) + toggleWidget(node, findWidgetByName(node, 'width')) + toggleWidget(node, findWidgetByName(node, 'height')) + toggleWidget(node, findWidgetByName(node, 'percent')) + } + updateNodeHeight(node) + } + if (widget.name === 'upscale_method') { + if (widget.value === "None") { + toggleWidget(node, findWidgetByName(node, 'factor')) + toggleWidget(node, findWidgetByName(node, 'crop')) + } else { + toggleWidget(node, findWidgetByName(node, 'factor'), true) + toggleWidget(node, findWidgetByName(node, 'crop'), true) + } + updateNodeHeight(node) + } + if (widget.name === 'image_output') { + if (widget.value === 'Sender' || widget.value === 'Sender&Save'){ + toggleWidget(node, findWidgetByName(node, 'link_id'), true) + }else { + toggleWidget(node, findWidgetByName(node, 'link_id')) + } + if (widget.value === 'Hide' || widget.value === 'Preview' || widget.value == 'Preview&Choose' || widget.value === 'Sender') { + toggleWidget(node, findWidgetByName(node, 'save_prefix')) + toggleWidget(node, findWidgetByName(node, 'output_path')) + toggleWidget(node, findWidgetByName(node, 'embed_workflow')) + toggleWidget(node, findWidgetByName(node, 'number_padding')) + toggleWidget(node, findWidgetByName(node, 'overwrite_existing')) + } else if (widget.value === 'Save' || widget.value === 'Hide&Save' || widget.value === 'Sender&Save') { + toggleWidget(node, findWidgetByName(node, 'save_prefix'), true) + toggleWidget(node, findWidgetByName(node, 'output_path'), true) + toggleWidget(node, findWidgetByName(node, 'embed_workflow'), true) + toggleWidget(node, findWidgetByName(node, 'number_padding'), true) + toggleWidget(node, findWidgetByName(node, 'overwrite_existing'), true) + } + + if(widget.value === 'Hide' || widget.value === 'Hide&Save'){ + toggleWidget(node, findWidgetByName(node, 'decode_vae_name')) + }else{ + toggleWidget(node, findWidgetByName(node, 'decode_vae_name'), true) + } + } + if (widget.name === 'add_noise') { + let control_before_widget = findWidgetByName(node, 'control_before_generate') + let control_after_widget = findWidgetByName(node, 'control_after_generate') + if (widget.value === "disable") { + toggleWidget(node, findWidgetByName(node, 'seed')) + if(control_before_widget){ + control_before_widget.last_value = control_before_widget.value + control_before_widget.value = 'fixed' + toggleWidget(node, control_before_widget) + } + if(control_after_widget){ + control_after_widget.last_value = control_after_widget.value + control_after_widget.value = 'fixed' + toggleWidget(node, control_after_widget) + } + } else { + toggleWidget(node, findWidgetByName(node, 'seed'), true) + if(control_before_widget){ + if(control_before_widget?.last_value) control_before_widget.value = control_before_widget.last_value + toggleWidget(node, control_before_widget, true) + } + if(control_after_widget) { + if(control_after_widget?.last_value) control_after_widget.value = control_after_widget.last_value + toggleWidget(node, findWidgetByName(node, control_after_widget, true)) + } + } + updateNodeHeight(node) + } + if (widget.name === 'num_loras') { + let number_to_show = widget.value + 1 + for (let i = 0; i < number_to_show; i++) { + toggleWidget(node, findWidgetByName(node, 'lora_'+i+'_name'), true) + if (findWidgetByName(node, 'mode').value === "simple") { + toggleWidget(node, findWidgetByName(node, 'lora_'+i+'_strength'), true) + toggleWidget(node, findWidgetByName(node, 'lora_'+i+'_model_strength')) + toggleWidget(node, findWidgetByName(node, 'lora_'+i+'_clip_strength')) + } else { + toggleWidget(node, findWidgetByName(node, 'lora_'+i+'_strength')) + toggleWidget(node, findWidgetByName(node, 'lora_'+i+'_model_strength'), true) + toggleWidget(node, findWidgetByName(node, 'lora_'+i+'_clip_strength'), true) + } + } + for (let i = number_to_show; i < 21; i++) { + toggleWidget(node, findWidgetByName(node, 'lora_'+i+'_name')) + toggleWidget(node, findWidgetByName(node, 'lora_'+i+'_strength')) + toggleWidget(node, findWidgetByName(node, 'lora_'+i+'_model_strength')) + toggleWidget(node, findWidgetByName(node, 'lora_'+i+'_clip_strength')) + } + updateNodeHeight(node) + } + if (widget.name === 'num_controlnet') { + let number_to_show = widget.value + 1 + for (let i = 0; i < number_to_show; i++) { + toggleWidget(node, findWidgetByName(node, 'controlnet_'+i), true) + toggleWidget(node, findWidgetByName(node, 'controlnet_'+i+'_strength'), true) + toggleWidget(node, findWidgetByName(node, 'scale_soft_weight_'+i),true) + if (findWidgetByName(node, 'mode').value === "simple") { + toggleWidget(node, findWidgetByName(node, 'start_percent_'+i)) + toggleWidget(node, findWidgetByName(node, 'end_percent_'+i)) + } else { + toggleWidget(node, findWidgetByName(node, 'start_percent_'+i),true) + toggleWidget(node, findWidgetByName(node, 'end_percent_'+i), true) + } + } + for (let i = number_to_show; i < 10; i++) { + toggleWidget(node, findWidgetByName(node, 'controlnet_'+i)) + toggleWidget(node, findWidgetByName(node, 'controlnet_'+i+'_strength')) + toggleWidget(node, findWidgetByName(node, 'start_percent_'+i)) + toggleWidget(node, findWidgetByName(node, 'end_percent_'+i)) + toggleWidget(node, findWidgetByName(node, 'scale_soft_weight_'+i)) + } + updateNodeHeight(node) + } + + if (widget.name === 'mode') { + switch (node.comfyClass) { + case 'easy loraStack': + for (let i = 0; i < (findWidgetByName(node, 'num_loras').value + 1); i++) { + if (widget.value === "simple") { + toggleWidget(node, findWidgetByName(node, 'lora_'+i+'_strength'), true) + toggleWidget(node, findWidgetByName(node, 'lora_'+i+'_model_strength')) + toggleWidget(node, findWidgetByName(node, 'lora_'+i+'_clip_strength')) + } else { + toggleWidget(node, findWidgetByName(node, 'lora_'+i+'_strength')) + toggleWidget(node, findWidgetByName(node, 'lora_'+i+'_model_strength'), true) + toggleWidget(node, findWidgetByName(node, 'lora_'+i+'_clip_strength'), true)} + } + updateNodeHeight(node) + break + case 'easy controlnetStack': + for (let i = 0; i < (findWidgetByName(node, 'num_controlnet').value + 1); i++) { + if (widget.value === "simple") { + toggleWidget(node, findWidgetByName(node, 'start_percent_'+i)) + toggleWidget(node, findWidgetByName(node, 'end_percent_'+i)) + } else { + toggleWidget(node, findWidgetByName(node, 'start_percent_' + i), true) + toggleWidget(node, findWidgetByName(node, 'end_percent_' + i), true) + } + } + updateNodeHeight(node) + break + case 'easy icLightApply': + if (widget.value === "Foreground") { + toggleWidget(node, findWidgetByName(node, 'lighting'), true) + toggleWidget(node, findWidgetByName(node, 'remove_bg'), true) + toggleWidget(node, findWidgetByName(node, 'source')) + } else { + toggleWidget(node, findWidgetByName(node, 'lighting')) + toggleWidget(node, findWidgetByName(node, 'source'), true) + toggleWidget(node, findWidgetByName(node, 'remove_bg')) + } + updateNodeHeight(node) + break + } + } + + if (widget.name === 'resolution') { + if(widget.value === "自定义 x 自定义"){ + widget.value = 'width x height (custom)' + } + if (widget.value === "自定义 x 自定义" || widget.value === 'width x height (custom)') { + toggleWidget(node, findWidgetByName(node, 'empty_latent_width'), true) + toggleWidget(node, findWidgetByName(node, 'empty_latent_height'), true) + } else { + toggleWidget(node, findWidgetByName(node, 'empty_latent_width'), false) + toggleWidget(node, findWidgetByName(node, 'empty_latent_height'), false) + } + } + if (widget.name === 'ratio') { + if (widget.value === "custom") { + toggleWidget(node, findWidgetByName(node, 'empty_latent_width'), true) + toggleWidget(node, findWidgetByName(node, 'empty_latent_height'), true) + } else { + toggleWidget(node, findWidgetByName(node, 'empty_latent_width'), false) + toggleWidget(node, findWidgetByName(node, 'empty_latent_height'), false) + } + } + if (widget.name === 'downscale_mode') { + const widget_names = ['block_number', 'downscale_factor', 'start_percent', 'end_percent', 'downscale_after_skip', 'downscale_method', 'upscale_method'] + if (widget.value === "None") widget_names.map(name=> toggleWidget(node, findWidgetByName(node, name))) + else if(widget.value == 'Auto') widget_names.map(name =>toggleWidget(node, findWidgetByName(node, name),name == 'block_number' ? true : false)) + else widget_names.map(name=> toggleWidget(node, findWidgetByName(node, name), true)) + updateNodeHeight(node) + } + + if (widget.name == 'range_mode'){ + if(widget.value == 'step'){ + toggleWidget(node, findWidgetByName(node, 'step'), true) + toggleWidget(node, findWidgetByName(node, 'num_steps')) + }else if(widget.value == 'num_steps'){ + toggleWidget(node, findWidgetByName(node, 'step')) + toggleWidget(node, findWidgetByName(node, 'num_steps'), true) + } + updateNodeHeight(node) + } + + if (widget.name === 'toggle') { + widget.type = 'toggle' + widget.options = {on: 'Enabled', off: 'Disabled'} + } + + if(widget.name == 'text_combine_mode'){ + if(widget.value == 'replace'){ + toggleWidget(node, findWidgetByName(node, 'replace_text'), true) + }else{ + toggleWidget(node, findWidgetByName(node, 'replace_text')) + } + updateNodeHeight(node) + } + + if (widget.name === 'conditioning_mode') { + if (["replace", "concat", "combine"].includes(widget.value)) { + toggleWidget(node, findWidgetByName(node, 'average_strength')) + toggleWidget(node, findWidgetByName(node, 'old_cond_start')) + toggleWidget(node, findWidgetByName(node, 'old_cond_end')) + toggleWidget(node, findWidgetByName(node, 'new_cond_start')) + toggleWidget(node, findWidgetByName(node, 'new_cond_end')) + } else if(widget.value == 'average'){ + toggleWidget(node, findWidgetByName(node, 'average_strength'), true) + toggleWidget(node, findWidgetByName(node, 'old_cond_start')) + toggleWidget(node, findWidgetByName(node, 'old_cond_end')) + toggleWidget(node, findWidgetByName(node, 'new_cond_start')) + toggleWidget(node, findWidgetByName(node, 'new_cond_end')) + }else if(widget.value == 'timestep'){ + toggleWidget(node, findWidgetByName(node, 'average_strength')) + toggleWidget(node, findWidgetByName(node, 'old_cond_start'), true) + toggleWidget(node, findWidgetByName(node, 'old_cond_end'), true) + toggleWidget(node, findWidgetByName(node, 'new_cond_start'), true) + toggleWidget(node, findWidgetByName(node, 'new_cond_end'), true) + } + } + + if (widget.name === 'preset') { + const normol_presets = [ + 'LIGHT - SD1.5 only (low strength)', + 'STANDARD (medium strength)', + 'VIT-G (medium strength)', + 'PLUS (high strength)', 'PLUS FACE (portraits)', + 'FULL FACE - SD1.5 only (portraits stronger)', + ] + const faceid_presets = [ + 'FACEID', + 'FACEID PLUS - SD1.5 only', + 'FACEID PLUS V2', + 'FACEID PORTRAIT (style transfer)', + 'FACEID PORTRAIT UNNORM - SDXL only (strong)' + ] + if(normol_presets.includes(widget.value)){ + toggleWidget(node, findWidgetByName(node, 'lora_strength')) + toggleWidget(node, findWidgetByName(node, 'provider')) + toggleWidget(node, findWidgetByName(node, 'weight_faceidv2')) + toggleWidget(node, findWidgetByName(node, 'use_tiled'), true) + let use_tiled = findWidgetByName(node, 'use_tiled') + if(use_tiled && use_tiled.value){ + toggleWidget(node, findWidgetByName(node, 'sharpening'), true) + }else { + toggleWidget(node, findWidgetByName(node, 'sharpening')) + } + + } + else if(faceid_presets.includes(widget.value)){ + if(widget.value == 'FACEID PLUS V2'){ + toggleWidget(node, findWidgetByName(node, 'weight_faceidv2'), true) + }else{ + toggleWidget(node, findWidgetByName(node, 'weight_faceidv2')) + } + if(['FACEID PORTRAIT (style transfer)','FACEID PORTRAIT UNNORM - SDXL only (strong)'].includes(widget.value)){ + toggleWidget(node, findWidgetByName(node, 'lora_strength'), false) + } + else{ + toggleWidget(node, findWidgetByName(node, 'lora_strength'), true) + } + toggleWidget(node, findWidgetByName(node, 'provider'), true) + toggleWidget(node, findWidgetByName(node, 'use_tiled')) + toggleWidget(node, findWidgetByName(node, 'sharpening')) + } + updateNodeHeight(node) + } + + if (widget.name === 'use_tiled') { + if(widget.value) + toggleWidget(node, findWidgetByName(node, 'sharpening'), true) + else + toggleWidget(node, findWidgetByName(node, 'sharpening')) + updateNodeHeight(node) + } + + if (widget.name === 'num_embeds') { + let number_to_show = widget.value + 1 + for (let i = 0; i < number_to_show; i++) { + toggleWidget(node, findWidgetByName(node, 'weight'+i), true) + } + for (let i = number_to_show; i < 6; i++) { + toggleWidget(node, findWidgetByName(node, 'weight'+i)) + } + updateNodeHeight(node) + } + + if (widget.name === 'guider'){ + switch (widget.value){ + case 'Basic': + toggleWidget(node, findWidgetByName(node, 'cfg')) + toggleWidget(node, findWidgetByName(node, 'cfg_negative')) + break + case 'CFG': + toggleWidget(node, findWidgetByName(node, 'cfg'),true) + toggleWidget(node, findWidgetByName(node, 'cfg_negative')) + break + case 'IP2P+DualCFG': + case 'DualCFG': + toggleWidget(node, findWidgetByName(node, 'cfg'),true) + toggleWidget(node, findWidgetByName(node, 'cfg_negative'), true) + break + + } + updateNodeHeight(node) + } + + if (widget.name === 'scheduler'){ + if (['karrasADV','exponentialADV','polyExponential'].includes(widget.value)){ + toggleWidget(node, findWidgetByName(node, 'sigma_max'), true) + toggleWidget(node, findWidgetByName(node, 'sigma_min'), true) + toggleWidget(node, findWidgetByName(node, 'denoise')) + toggleWidget(node, findWidgetByName(node, 'beta_d')) + toggleWidget(node, findWidgetByName(node, 'beta_min')) + toggleWidget(node, findWidgetByName(node, 'eps_s')) + toggleWidget(node, findWidgetByName(node, 'coeff')) + if(widget.value != 'exponentialADV'){ + toggleWidget(node, findWidgetByName(node, 'rho'), true) + }else{ + toggleWidget(node, findWidgetByName(node, 'rho')) + } + }else if(widget.value == 'vp'){ + toggleWidget(node, findWidgetByName(node, 'sigma_max')) + toggleWidget(node, findWidgetByName(node, 'sigma_min')) + toggleWidget(node, findWidgetByName(node, 'denoise')) + toggleWidget(node, findWidgetByName(node, 'rho')) + toggleWidget(node, findWidgetByName(node, 'beta_d'),true) + toggleWidget(node, findWidgetByName(node, 'beta_min'),true) + toggleWidget(node, findWidgetByName(node, 'eps_s'),true) + toggleWidget(node, findWidgetByName(node, 'coeff')) + } + else{ + toggleWidget(node, findWidgetByName(node, 'denoise'),true) + toggleWidget(node, findWidgetByName(node, 'sigma_max')) + toggleWidget(node, findWidgetByName(node, 'sigma_min')) + toggleWidget(node, findWidgetByName(node, 'beta_d')) + toggleWidget(node, findWidgetByName(node, 'beta_min')) + toggleWidget(node, findWidgetByName(node, 'eps_s')) + toggleWidget(node, findWidgetByName(node, 'rho')) + if(widget.value == 'gits') toggleWidget(node, findWidgetByName(node, 'coeff'), true) + else toggleWidget(node, findWidgetByName(node, 'coeff')) + } + updateNodeHeight(node) + } + + if(widget.name === 'inpaint_mode'){ + switch (widget.value){ + case 'normal': + case 'fooocus_inpaint': + toggleWidget(node, findWidgetByName(node, 'dtype')) + toggleWidget(node, findWidgetByName(node, 'fitting')) + toggleWidget(node, findWidgetByName(node, 'function')) + toggleWidget(node, findWidgetByName(node, 'scale')) + toggleWidget(node, findWidgetByName(node, 'start_at')) + toggleWidget(node, findWidgetByName(node, 'end_at')) + break + case 'brushnet_random': + case 'brushnet_segmentation': + toggleWidget(node, findWidgetByName(node, 'dtype'), true) + toggleWidget(node, findWidgetByName(node, 'fitting')) + toggleWidget(node, findWidgetByName(node, 'function')) + toggleWidget(node, findWidgetByName(node, 'scale'), true) + toggleWidget(node, findWidgetByName(node, 'start_at'), true) + toggleWidget(node, findWidgetByName(node, 'end_at'), true) + break + case 'powerpaint': + toggleWidget(node, findWidgetByName(node, 'dtype'), true) + toggleWidget(node, findWidgetByName(node, 'fitting'),true) + toggleWidget(node, findWidgetByName(node, 'function'),true) + toggleWidget(node, findWidgetByName(node, 'scale'), true) + toggleWidget(node, findWidgetByName(node, 'start_at'), true) + toggleWidget(node, findWidgetByName(node, 'end_at'), true) + break + } + updateNodeHeight(node) + } + + if(widget.name == 't5_type'){ + switch (widget.value){ + case 'sd3': + toggleWidget(node, findWidgetByName(node, 'clip_name'), true) + toggleWidget(node, findWidgetByName(node, 'padding'), true) + toggleWidget(node, findWidgetByName(node, 't5_name')) + toggleWidget(node, findWidgetByName(node, 'device')) + toggleWidget(node, findWidgetByName(node, 'dtype')) + break + case 't5v11': + toggleWidget(node, findWidgetByName(node, 'clip_name')) + toggleWidget(node, findWidgetByName(node, 'padding')) + toggleWidget(node, findWidgetByName(node, 't5_name'),true) + toggleWidget(node, findWidgetByName(node, 'device'),true) + toggleWidget(node, findWidgetByName(node, 'dtype'),true) + } + updateNodeHeight(node) + } + + if(widget.name == 'rem_mode'){ + switch (widget.value){ + case 'Inspyrenet': + toggleWidget(node, findWidgetByName(node, 'torchscript_jit'), true) + break + default: + toggleWidget(node, findWidgetByName(node, 'torchscript_jit'), false) + break + } + } +} + +function widgetLogic2(node, widget) { + if (widget.name === 'sampler_name') { + const widget_names = ['eta','s_noise','upscale_ratio','start_step','end_step','upscale_n_step','unsharp_kernel_size','unsharp_sigma','unsharp_strength'] + if (["euler_ancestral", "dpmpp_2s_ancestral", "dpmpp_2m_sde", "lcm"].includes(widget.value)) { + widget_names.map(name=> toggleWidget(node, findWidgetByName(node, name)), true) + } else { + widget_names.map(name=> toggleWidget(node, findWidgetByName(node, name))) + } + updateNodeHeight(node) + } +} + +function widgetLogic3(node, widget){ + if (widget.name === 'target_parameter') { + if (node.comfyClass == 'easy XYInputs: Steps'){ + switch (widget.value){ + case "steps": + toggleWidget(node, findWidgetByName(node, 'first_step'), true) + toggleWidget(node, findWidgetByName(node, 'last_step'), true) + toggleWidget(node, findWidgetByName(node, 'first_start_step')) + toggleWidget(node, findWidgetByName(node, 'last_start_step')) + toggleWidget(node, findWidgetByName(node, 'first_end_step')) + toggleWidget(node, findWidgetByName(node, 'last_end_step')) + break + case "start_at_step": + toggleWidget(node, findWidgetByName(node, 'first_step')) + toggleWidget(node, findWidgetByName(node, 'last_step')) + toggleWidget(node, findWidgetByName(node, 'first_start_step'), true) + toggleWidget(node, findWidgetByName(node, 'last_start_step'), true) + toggleWidget(node, findWidgetByName(node, 'first_end_step')) + toggleWidget(node, findWidgetByName(node, 'last_end_step')) + break + case "end_at_step": + toggleWidget(node, findWidgetByName(node, 'first_step')) + toggleWidget(node, findWidgetByName(node, 'last_step')) + toggleWidget(node, findWidgetByName(node, 'first_start_step')) + toggleWidget(node, findWidgetByName(node, 'last_start_step')) + toggleWidget(node, findWidgetByName(node, 'first_end_step'),true) + toggleWidget(node, findWidgetByName(node, 'last_end_step'),true) + break + } + } + if (node.comfyClass == 'easy XYInputs: Sampler/Scheduler'){ + let number_to_show = findWidgetByName(node, 'input_count').value + 1 + for (let i = 0; i < number_to_show; i++) { + switch (widget.value) { + case "sampler": + toggleWidget(node, findWidgetByName(node, 'sampler_'+i), true) + toggleWidget(node, findWidgetByName(node, 'scheduler_'+i)) + break + case "scheduler": + toggleWidget(node, findWidgetByName(node, 'scheduler_'+i), true) + toggleWidget(node, findWidgetByName(node, 'sampler_'+i)) + break + default: + toggleWidget(node, findWidgetByName(node, 'sampler_'+i), true) + toggleWidget(node, findWidgetByName(node, 'scheduler_'+i), true) + break + } + } + updateNodeHeight(node) + } + if (node.comfyClass == 'easy XYInputs: ControlNet'){ + switch (widget.value){ + case "strength": + toggleWidget(node, findWidgetByName(node, 'first_strength'), true) + toggleWidget(node, findWidgetByName(node, 'last_strength'), true) + toggleWidget(node, findWidgetByName(node, 'strength')) + toggleWidget(node, findWidgetByName(node, 'start_percent'), true) + toggleWidget(node, findWidgetByName(node, 'end_percent'), true) + toggleWidget(node, findWidgetByName(node, 'first_start_percent')) + toggleWidget(node, findWidgetByName(node, 'last_start_percent')) + toggleWidget(node, findWidgetByName(node, 'first_end_percent')) + toggleWidget(node, findWidgetByName(node, 'last_end_percent')) + break + case "start_percent": + toggleWidget(node, findWidgetByName(node, 'first_strength')) + toggleWidget(node, findWidgetByName(node, 'last_strength')) + toggleWidget(node, findWidgetByName(node, 'strength'), true) + toggleWidget(node, findWidgetByName(node, 'start_percent')) + toggleWidget(node, findWidgetByName(node, 'end_percent'), true) + toggleWidget(node, findWidgetByName(node, 'first_start_percent'), true) + toggleWidget(node, findWidgetByName(node, 'last_start_percent'), true) + toggleWidget(node, findWidgetByName(node, 'first_end_percent')) + toggleWidget(node, findWidgetByName(node, 'last_end_percent')) + break + case "end_percent": + toggleWidget(node, findWidgetByName(node, 'first_strength')) + toggleWidget(node, findWidgetByName(node, 'last_strength')) + toggleWidget(node, findWidgetByName(node, 'strength'), true) + toggleWidget(node, findWidgetByName(node, 'start_percent'), true) + toggleWidget(node, findWidgetByName(node, 'end_percent')) + toggleWidget(node, findWidgetByName(node, 'first_start_percent')) + toggleWidget(node, findWidgetByName(node, 'last_start_percent')) + toggleWidget(node, findWidgetByName(node, 'first_end_percent'), true) + toggleWidget(node, findWidgetByName(node, 'last_end_percent'), true) + break + } + updateNodeHeight(node) + } + + } + if (node.comfyClass == 'easy XYInputs: PromptSR'){ + let number_to_show = findWidgetByName(node, 'replace_count').value + 1 + for (let i = 0; i < number_to_show; i++) { + toggleWidget(node, findWidgetByName(node, 'replace_'+i), true) + } + for (let i = number_to_show; i < 31; i++) { + toggleWidget(node, findWidgetByName(node, 'replace_'+i)) + } + updateNodeHeight(node) + } + + if(widget.name == 'input_count'){ + let number_to_show = widget.value + 1 + for (let i = 0; i < number_to_show; i++) { + if (findWidgetByName(node, 'target_parameter').value === "sampler") { + toggleWidget(node, findWidgetByName(node, 'sampler_'+i), true) + toggleWidget(node, findWidgetByName(node, 'scheduler_'+i)) + } + else if (findWidgetByName(node, 'target_parameter').value === "scheduler") { + toggleWidget(node, findWidgetByName(node, 'scheduler_'+i), true) + toggleWidget(node, findWidgetByName(node, 'sampler_'+i)) + } else { + toggleWidget(node, findWidgetByName(node, 'sampler_'+i), true) + toggleWidget(node, findWidgetByName(node, 'scheduler_'+i), true) + } + } + for (let i = number_to_show; i < 31; i++) { + toggleWidget(node, findWidgetByName(node, 'sampler_'+i)) + toggleWidget(node, findWidgetByName(node, 'scheduler_'+i)) + } + updateNodeHeight(node) + } + if (widget.name === 'lora_count') { + let number_to_show = widget.value + 1 + const isWeight = findWidgetByName(node, 'input_mode').value.indexOf("Weights") == -1 + for (let i = 0; i < number_to_show; i++) { + toggleWidget(node, findWidgetByName(node, 'lora_name_'+i), true) + if (isWeight) { + toggleWidget(node, findWidgetByName(node, 'lora_name_'+i), true) + toggleWidget(node, findWidgetByName(node, 'model_str_'+i)) + toggleWidget(node, findWidgetByName(node, 'clip_str_'+i)) + } else { + toggleWidget(node, findWidgetByName(node, 'lora_name_'+i), true) + toggleWidget(node, findWidgetByName(node, 'model_str_'+i),true) + toggleWidget(node, findWidgetByName(node, 'clip_str_'+i), true) + } + } + for (let i = number_to_show; i < 11; i++) { + toggleWidget(node, findWidgetByName(node, 'lora_name_'+i)) + toggleWidget(node, findWidgetByName(node, 'model_str_'+i)) + toggleWidget(node, findWidgetByName(node, 'clip_str_'+i)) + } + updateNodeHeight(node) + } + if (widget.name === 'ckpt_count') { + let number_to_show = widget.value + 1 + const hasClipSkip = findWidgetByName(node, 'input_mode').value.indexOf("ClipSkip") != -1 + const hasVae = findWidgetByName(node, 'input_mode').value.indexOf("VAE") != -1 + for (let i = 0; i < number_to_show; i++) { + toggleWidget(node, findWidgetByName(node, 'ckpt_name_'+i), true) + if (hasClipSkip && hasVae) { + toggleWidget(node, findWidgetByName(node, 'clip_skip_'+i), true) + toggleWidget(node, findWidgetByName(node, 'vae_name_'+i), true) + } else if (hasVae){ + toggleWidget(node, findWidgetByName(node, 'clip_skip_' + i)) + toggleWidget(node, findWidgetByName(node, 'vae_name_' + i), true) + }else{ + toggleWidget(node, findWidgetByName(node, 'clip_skip_' + i)) + toggleWidget(node, findWidgetByName(node, 'vae_name_' + i)) + } + } + for (let i = number_to_show; i < 11; i++) { + toggleWidget(node, findWidgetByName(node, 'ckpt_name_'+i)) + toggleWidget(node, findWidgetByName(node, 'clip_skip_'+i)) + toggleWidget(node, findWidgetByName(node, 'vae_name_'+i)) + } + updateNodeHeight(node) + } + + if (widget.name === 'input_mode') { + if(node.comfyClass == 'easy XYInputs: Lora'){ + let number_to_show = findWidgetByName(node, 'lora_count').value + 1 + const hasWeight = widget.value.indexOf("Weights") != -1 + for (let i = 0; i < number_to_show; i++) { + toggleWidget(node, findWidgetByName(node, 'lora_name_'+i), true) + if (hasWeight) { + toggleWidget(node, findWidgetByName(node, 'model_str_'+i), true) + toggleWidget(node, findWidgetByName(node, 'clip_str_'+i), true) + } else { + toggleWidget(node, findWidgetByName(node, 'model_str_' + i)) + toggleWidget(node, findWidgetByName(node, 'clip_str_' + i)) + } + } + if(hasWeight){ + toggleWidget(node, findWidgetByName(node, 'model_strength')) + toggleWidget(node, findWidgetByName(node, 'clip_strength')) + }else{ + toggleWidget(node, findWidgetByName(node, 'model_strength'), true) + toggleWidget(node, findWidgetByName(node, 'clip_strength'),true) + } + } + else if(node.comfyClass == 'easy XYInputs: Checkpoint'){ + let number_to_show = findWidgetByName(node, 'ckpt_count').value + 1 + const hasClipSkip = widget.value.indexOf("ClipSkip") != -1 + const hasVae = widget.value.indexOf("VAE") != -1 + for (let i = 0; i < number_to_show; i++) { + toggleWidget(node, findWidgetByName(node, 'ckpt_name_'+i), true) + if (hasClipSkip && hasVae) { + toggleWidget(node, findWidgetByName(node, 'clip_skip_'+i), true) + toggleWidget(node, findWidgetByName(node, 'vae_name_'+i), true) + } else if (hasClipSkip){ + toggleWidget(node, findWidgetByName(node, 'clip_skip_' + i), true) + toggleWidget(node, findWidgetByName(node, 'vae_name_' + i)) + }else{ + toggleWidget(node, findWidgetByName(node, 'clip_skip_' + i)) + toggleWidget(node, findWidgetByName(node, 'vae_name_' + i)) + } + } + } + + updateNodeHeight(node) + } + + // if(widget.name == 'replace_count'){ + // let number_to_show = widget.value + 1 + // for (let i = 0; i < number_to_show; i++) { + // toggleWidget(node, findWidgetByName(node, 'replace_'+i), true) + // } + // for (let i = number_to_show; i < 31; i++) { + // toggleWidget(node, findWidgetByName(node, 'replace_'+i)) + // } + // updateNodeHeight(node) + // } +} + +app.registerExtension({ + name: "comfy.easyUse.dynamicWidgets", + + nodeCreated(node) { + switch (node.comfyClass){ + case "easy fullLoader": + case "easy a1111Loader": + case "easy comfyLoader": + case "easy cascadeLoader": + case "easy svdLoader": + case "easy dynamiCrafterLoader": + case "easy hunyuanDiTLoader": + case "easy pixArtLoader": + case "easy kolorsLoader": + case "easy loraStack": + case "easy controlnetStack": + case "easy latentNoisy": + case "easy preSampling": + case "easy preSamplingAdvanced": + case "easy preSamplingNoiseIn": + case "easy preSamplingCustom": + case "easy preSamplingSdTurbo": + case "easy preSamplingCascade": + case "easy preSamplingLayerDiffusion": + case "easy fullkSampler": + case "easy kSampler": + case "easy kSamplerSDTurbo": + case "easy kSamplerTiled": + case "easy kSamplerLayerDiffusion": + case "easy kSamplerInpainting": + case "easy kSamplerDownscaleUnet": + case "easy fullCascadeKSampler": + case "easy cascadeKSampler": + case "easy hiresFix": + case "easy detailerFix": + case "easy imageRemBg": + case "easy imageColorMatch": + case "easy imageDetailTransfer": + case "easy loadImageBase64": + case "easy XYInputs: Steps": + case "easy XYInputs: Sampler/Scheduler": + case 'easy XYInputs: Checkpoint': + case "easy XYInputs: Lora": + case "easy XYInputs: PromptSR": + case "easy XYInputs: ControlNet": + case "easy rangeInt": + case "easy rangeFloat": + case 'easy latentCompositeMaskedWithCond': + case 'easy pipeEdit': + case 'easy icLightApply': + case 'easy ipadapterApply': + case 'easy ipadapterApplyADV': + case 'easy ipadapterApplyEncoder': + case 'easy applyInpaint': + getSetters(node) + break + case "easy wildcards": + const wildcard_text_widget_index = node.widgets.findIndex((w) => w.name == 'text'); + const wildcard_text_widget = node.widgets[wildcard_text_widget_index]; + + // lora selector, wildcard selector + let combo_id = 1; + + Object.defineProperty(node.widgets[combo_id], "value", { + set: (value) => { + const stackTrace = new Error().stack; + if(stackTrace.includes('inner_value_change')) { + if(value != "Select the LoRA to add to the text") { + let lora_name = value; + if (lora_name.endsWith('.safetensors')) { + lora_name = lora_name.slice(0, -12); + } + + wildcard_text_widget.value += ``; + } + } + }, + get: () => { return "Select the LoRA to add to the text"; } + }); + + Object.defineProperty(node.widgets[combo_id+1], "value", { + set: (value) => { + const stackTrace = new Error().stack; + if(stackTrace.includes('inner_value_change')) { + if(value != "Select the Wildcard to add to the text") { + if(wildcard_text_widget.value != '') + wildcard_text_widget.value += ', ' + + wildcard_text_widget.value += value; + } + } + }, + get: () => { return "Select the Wildcard to add to the text"; } + }); + + // Preventing validation errors from occurring in any situation. + node.widgets[combo_id].serializeValue = () => { return "Select the LoRA to add to the text"; } + node.widgets[combo_id+1].serializeValue = () => { return "Select the Wildcard to add to the text"; } + break + case "easy detailerFix": + const textarea_widget_index = node.widgets.findIndex((w) => w.type === "customtext"); + if(textarea_widget_index == -1) return + node.widgets[textarea_widget_index].dynamicPrompts = false + node.widgets[textarea_widget_index].inputEl.placeholder = "wildcard spec: if kept empty, this option will be ignored"; + node.widgets[textarea_widget_index].serializeValue = () => {return node.widgets[textarea_widget_index].value}; + break + case "easy XYInputs: ModelMergeBlocks": + let preset_i = 3; + let vector_i = 4; + let file_i = 5; + node._value = "Preset"; + + let valuesWidget = node.widgets[vector_i] + Object.defineProperty(node.widgets[preset_i], "value", { + set: (value) => { + const stackTrace = new Error().stack; + if(stackTrace.includes('inner_value_change')) { + if(value != "Preset") { + if(!value.startsWith('@') && valuesWidget.value) + valuesWidget.value += "\n"; + if(value.startsWith('@')) { + let spec = value.split(':')[1]; + var n; + var sub_n = null; + var block = null; + + if(isNaN(spec)) { + let sub_spec = spec.split(','); + + if(sub_spec.length != 3) { + valuesWidget = '!! SPEC ERROR !!'; + node._value = ''; + return; + } + + n = parseInt(sub_spec[0].trim()); + sub_n = parseInt(sub_spec[1].trim()); + block = parseInt(sub_spec[2].trim()); + } + else { + n = parseInt(spec.trim()); + } + + valuesWidget.value = ""; + if(sub_n == null) { + for(let i=1; i<=n; i++) { + var temp = "1,1"; + for(let j=1; j<=n; j++) { + if(temp!='') + temp += ','; + if(j==i) + temp += '1'; + else + temp += '0'; + } + temp += ',1; '; + + valuesWidget.value += `B${i}:${temp}\n`; + } + } + else { + for(let i=1; i<=sub_n; i++) { + var temp = ""; + for(let j=1; j<=n; j++) { + if(temp!='') + temp += ','; + + if(block!=j) + temp += '0'; + else { + temp += ' '; + for(let k=1; k<=sub_n; k++) { + if(k==i) + temp += '1 '; + else + temp += '0 '; + } + } + } + + valuesWidget.value += `B${block}.SUB${i}:${temp}\n`; + } + } + } + else { + valuesWidget.value += `${value}; `; + } + // if(node.widgets_values) { + // valuesWidget.value = node.widgets[preset_i].value+ `; `; + // } + } + } + + node._value = value; + }, + get: () => { + return node._value; + } + }); + + const cb = node.callback; + valuesWidget.callback = function () { + if (cb) { + return cb.apply(this, arguments); + } + }; + + // upload .csv + async function uploadFile(file) { + try { + const body = new FormData(); + body.append("csv", file); + const resp = await api.fetchApi("/easyuse/upload/csv", { + method: "POST", + body, + }); + + if (resp.status === 200) { + const data = await resp.json(); + node.widgets[vector_i].value = data + } else { + alert(resp.status + " - " + resp.statusText); + } + } catch (error) { + alert(error); + } + } + + const fileInput = document.createElement("input"); + Object.assign(fileInput, { + type: "file", + accept: "text/csv", + style: "show: none", + onchange: async (event) => { + if (fileInput.files.length) { + await uploadFile(fileInput.files[0], true); + event.target.value = '' + } + }, + }); + document.body.append(fileInput); + + const name = "choose .csv file into values" + let uploadWidget = node.addWidget("button", name, "csv", () => { + fileInput.click(); + }); + uploadWidget.label = name; + uploadWidget.serialize = false; + + break + } + + }, + async beforeRegisterNodeDef(nodeType, nodeData, app) { + function addText(arr_text) { + var text = ''; + for (let i = 0; i < arr_text.length; i++) { + text += arr_text[i]; + } + return text + } + + if (["easy showSpentTime"].includes(nodeData.name)) { + function populate(text) { + if (this.widgets) { + const pos = this.widgets.findIndex((w) => w.name === "spent_time"); + if (pos !== -1 && this.widgets[pos]) { + const w = this.widgets[pos] + console.log(text) + w.value = text; + } + } + } + + // When the node is executed we will be sent the input text, show this in the widget + const onExecuted = nodeType.prototype.onExecuted; + nodeType.prototype.onExecuted = function (message) { + onExecuted?.apply(this, arguments); + const text = addText(message.text) + populate.call(this, text); + }; + } + + if (["easy showLoaderSettingsNames"].includes(nodeData.name)) { + function populate(text) { + if (this.widgets) { + const pos = this.widgets.findIndex((w) => w.name === "names"); + if (pos !== -1 && this.widgets[pos]) { + const w = this.widgets[pos] + w.value = text; + } + } + } + + // When the node is executed we will be sent the input text, show this in the widget + const onExecuted = nodeType.prototype.onExecuted; + nodeType.prototype.onExecuted = function (message) { + onExecuted?.apply(this, arguments); + const text = addText(message.text) + populate.call(this, text); + }; + } + + if (loaderNodes.includes(nodeData.name)) { + function populate(text, type = 'positive') { + if (this.widgets) { + const pos = this.widgets.findIndex((w) => w.name === type + "_prompt"); + const className = "comfy-multiline-input wildcard_" + type + '_' + this.id.toString() + if (pos == -1 && text) { + const inputEl = document.createElement("textarea"); + inputEl.className = className; + inputEl.placeholder = "Wildcard Prompt (" + type + ")" + const widget = this.addDOMWidget(type + "_prompt", "customtext", inputEl, { + getValue() { + return inputEl.value; + }, + setValue(v) { + inputEl.value = v; + }, + serialize: false, + }); + widget.inputEl = inputEl; + widget.inputEl.readOnly = true + inputEl.addEventListener("input", () => { + widget.callback?.(widget.value); + }); + widget.value = text; + } else if (this.widgets[pos]) { + if (text) { + const w = this.widgets[pos] + w.value = text; + } else { + this.widgets.splice(pos, 1); + const element = document.getElementsByClassName(className) + if (element && element[0]) element[0].remove() + } + } + } + } + + const onExecuted = nodeType.prototype.onExecuted; + nodeType.prototype.onExecuted = function (message) { + onExecuted?.apply(this, arguments); + const positive = addText(message.positive) + const negative = addText(message.negative) + populate.call(this, positive, "positive"); + populate.call(this, negative, "negative"); + }; + } + + if(["easy sv3dLoader"].includes(nodeData.name)){ + function changeSchedulerText(mode, batch_size, inputEl) { + console.log(mode) + switch (mode){ + case 'azimuth': + inputEl.readOnly = true + inputEl.style.opacity = 0.6 + return `0:(0.0,0.0)` + (batch_size > 1 ? `\n${batch_size-1}:(360.0,0.0)` : '') + case 'elevation': + inputEl.readOnly = true + inputEl.style.opacity = 0.6 + return `0:(-90.0,0.0)` + (batch_size > 1 ? `\n${batch_size-1}:(90.0,0.0)` : '') + case 'custom': + inputEl.readOnly = false + inputEl.style.opacity = 1 + return `0:(0.0,0.0)\n9:(180.0,0.0)\n20:(360.0,0.0)` + } + } + + + const onNodeCreated = nodeType.prototype.onNodeCreated; + nodeType.prototype.onNodeCreated = async function () { + onNodeCreated ? onNodeCreated.apply(this, []) : undefined; + const easing_mode_widget = this.widgets.find(w => w.name == 'easing_mode') + const batch_size = this.widgets.find(w => w.name == 'batch_size') + const scheduler = this.widgets.find(w => w.name == 'scheduler') + setTimeout(_=>{ + if(!scheduler.value) scheduler.value = changeSchedulerText(easing_mode_widget.value, batch_size.value, scheduler.inputEl) + },1) + easing_mode_widget.callback = value=>{ + scheduler.value = changeSchedulerText(value, batch_size.value, scheduler.inputEl) + } + batch_size.callback = value =>{ + scheduler.value = changeSchedulerText(easing_mode_widget.value, value, scheduler.inputEl) + } + } + } + + if (seedNodes.includes(nodeData.name)) { + const onNodeCreated = nodeType.prototype.onNodeCreated; + nodeType.prototype.onNodeCreated = async function () { + onNodeCreated ? onNodeCreated.apply(this, []) : undefined; + const seed_widget = this.widgets.find(w => ['seed_num','seed'].includes(w.name)) + const seed_control = this.widgets.find(w=> ['control_before_generate','control_after_generate'].includes(w.name)) + if(nodeData.name == 'easy seed'){ + const randomSeedButton = this.addWidget("button", "🎲 Manual Random Seed", null, _=>{ + if(seed_control.value != 'fixed') seed_control.value = 'fixed' + seed_widget.value = Math.floor(Math.random() * 1125899906842624) + app.queuePrompt(0, 1) + },{ serialize:false}) + seed_widget.linkedWidgets = [randomSeedButton, seed_control]; + } + } + const onAdded = nodeType.prototype.onAdded; + nodeType.prototype.onAdded = async function () { + onAdded ? onAdded.apply(this, []) : undefined; + const seed_widget = this.widgets.find(w => ['seed_num','seed'].includes(w.name)) + const seed_control = this.widgets.find(w=> ['control_before_generate','control_after_generate'].includes(w.name)) + setTimeout(_=>{ + if(seed_control.name == 'control_before_generate' && seed_widget.value === 0) { + seed_widget.value = Math.floor(Math.random() * 1125899906842624) + } + },1) + } + } + + if (nodeData.name == 'easy imageInsetCrop') { + function setWidgetStep(a) { + const measurementWidget = a.widgets[0] + for (let i = 1; i <= 4; i++) { + if (measurementWidget.value === 'Pixels') { + a.widgets[i].options.step = 80; + a.widgets[i].options.max = 8192; + } else { + a.widgets[i].options.step = 10; + a.widgets[i].options.max = 99; + } + } + } + + nodeType.prototype.onAdded = async function (graph) { + const measurementWidget = this.widgets[0]; + let callback = measurementWidget.callback; + measurementWidget.callback = (...args) => { + setWidgetStep(this); + callback && callback.apply(measurementWidget, [...args]); + }; + setTimeout(_=>{ + setWidgetStep(this); + },1) + } + } + + if(['easy showAnything', 'easy showTensorShape', 'easy imageInterrogator'].includes(nodeData.name)){ + function populate(text) { + if (this.widgets) { + const pos = this.widgets.findIndex((w) => w.name === "text"); + if (pos !== -1) { + for (let i = pos; i < this.widgets.length; i++) { + this.widgets[i].onRemove?.(); + } + this.widgets.length = pos; + } + } + + for (const list of text) { + const w = ComfyWidgets["STRING"](this, "text", ["STRING", { multiline: true }], app).widget; + w.inputEl.readOnly = true; + w.inputEl.style.opacity = 0.6; + w.value = list; + } + + requestAnimationFrame(() => { + const sz = this.computeSize(); + if (sz[0] < this.size[0]) { + sz[0] = this.size[0]; + } + if (sz[1] < this.size[1]) { + sz[1] = this.size[1]; + } + this.onResize?.(sz); + app.graph.setDirtyCanvas(true, false); + }); + } + + // When the node is executed we will be sent the input text, display this in the widget + const onExecuted = nodeType.prototype.onExecuted; + nodeType.prototype.onExecuted = function (message) { + onExecuted?.apply(this, arguments); + populate.call(this, message.text); + }; + + if(!['easy imageInterrogator'].includes(nodeData.name)) { + const onConfigure = nodeType.prototype.onConfigure; + nodeType.prototype.onConfigure = function () { + onConfigure?.apply(this, arguments); + if (this.widgets_values?.length) { + populate.call(this, this.widgets_values); + } + }; + } + } + + if(nodeData.name == 'easy convertAnything'){ + const onNodeCreated = nodeType.prototype.onNodeCreated; + nodeType.prototype.onNodeCreated = async function () { + onNodeCreated ? onNodeCreated.apply(this, []) : undefined; + setTimeout(_=>{ + const type_control = this.widgets[this.widgets.findIndex((w) => w.name === "output_type")] + let _this = this + type_control.callback = async() => { + _this.outputs[0].type = (type_control.value).toUpperCase() + _this.outputs[0].name = type_control.value + _this.outputs[0].label = type_control.value + } + },300) + + } + } + + if (nodeData.name == 'easy promptLine') { + const onAdded = nodeType.prototype.onAdded; + nodeType.prototype.onAdded = async function () { + onAdded ? onAdded.apply(this, []) : undefined; + let prompt_widget = this.widgets.find(w => w.name == "prompt") + const button = this.addWidget("button", "get values from COMBO link", '', () => { + const output_link = this.outputs[1]?.links?.length>0 ? this.outputs[1]['links'][0] : null + const all_nodes = app.graph._nodes + const node = all_nodes.find(cate=> cate.inputs?.find(input=> input.link == output_link)) + if(!output_link || !node){ + toast.error($t('No COMBO link'), 3000) + return + } + else{ + const input = node.inputs.find(input=> input.link == output_link) + const widget_name = input.widget.name + const widgets = node.widgets + const widget = widgets.find(cate=> cate.name == widget_name) + let values = widget?.options.values || null + if(values){ + values = values.join('\n') + prompt_widget.value = values + } + } + }, { + serialize: false + }) + } + } + } +}); + + +const getSetWidgets = ['rescale_after_model', 'rescale', + 'lora_name', 'lora1_name', 'lora2_name', 'lora3_name', + 'refiner_lora1_name', 'refiner_lora2_name', 'upscale_method', + 'image_output', 'add_noise', 'info', 'sampler_name', + 'ckpt_B_name', 'ckpt_C_name', 'save_model', 'refiner_ckpt_name', + 'num_loras', 'num_controlnet', 'mode', 'toggle', 'resolution', 'ratio', 'target_parameter', + 'input_count', 'replace_count', 'downscale_mode', 'range_mode','text_combine_mode', 'input_mode', + 'lora_count','ckpt_count', 'conditioning_mode', 'preset', 'use_tiled', 'use_batch', 'num_embeds', + "easing_mode", "guider", "scheduler", "inpaint_mode", 't5_type', 'rem_mode' +] + +function getSetters(node) { + if (node.widgets) + for (const w of node.widgets) { + if (getSetWidgets.includes(w.name)) { + if(node.comfyClass.indexOf("easy XYInputs:") != -1) widgetLogic3(node, w) + else if(w.name == 'sampler_name' && node.comfyClass == 'easy preSamplingSdTurbo') widgetLogic2(node, w); + else widgetLogic(node, w); + let widgetValue = w.value; + + // Define getters and setters for widget values + Object.defineProperty(w, 'value', { + get() { + return widgetValue; + }, + set(newVal) { + if (newVal !== widgetValue) { + widgetValue = newVal; + if(node.comfyClass.indexOf("easy XYInputs:") != -1) widgetLogic3(node, w) + else if(w.name == 'sampler_name' && node.comfyClass == 'easy preSamplingSdTurbo') widgetLogic2(node, w); + else widgetLogic(node, w); + } + } + }); + } + } +} \ No newline at end of file diff --git a/ComfyUI-Easy-Use/web/js/easy/easyExtraMenu.js b/ComfyUI-Easy-Use/web/js/easy/easyExtraMenu.js new file mode 100644 index 0000000000000000000000000000000000000000..d28e6bfbdcad4524a9b80a24c3a50a34db937a01 --- /dev/null +++ b/ComfyUI-Easy-Use/web/js/easy/easyExtraMenu.js @@ -0,0 +1,593 @@ +import {app} from "../../../../scripts/app.js"; +import {$t} from '../common/i18n.js' +import {CheckpointInfoDialog, LoraInfoDialog} from "../common/model.js"; + +const loaders = ['easy fullLoader', 'easy a1111Loader', 'easy comfyLoader', 'easy kolorsLoader', 'easy hunyuanDiTLoader', 'easy pixArtLoader'] +const preSampling = ['easy preSampling', 'easy preSamplingAdvanced', 'easy preSamplingDynamicCFG', 'easy preSamplingNoiseIn', 'easy preSamplingCustom', 'easy preSamplingLayerDiffusion', 'easy fullkSampler'] +const kSampler = ['easy kSampler', 'easy kSamplerTiled', 'easy kSamplerInpainting', 'easy kSamplerDownscaleUnet', 'easy kSamplerLayerDiffusion'] +const controlnet = ['easy controlnetLoader', 'easy controlnetLoaderADV', 'easy controlnetLoader++', 'easy instantIDApply', 'easy instantIDApplyADV'] +const ipadapter = ['easy ipadapterApply', 'easy ipadapterApplyADV', 'easy ipadapterStyleComposition', 'easy ipadapterApplyFromParams', 'easy pulIDApply', 'easy pulIDApplyADV'] +const positive_prompt = ['easy positive', 'easy wildcards'] +const imageNode = ['easy loadImageBase64', 'LoadImage', 'LoadImageMask'] +const inpaint = ['easy applyBrushNet', 'easy applyPowerPaint', 'easy applyInpaint'] +const widgetMapping = { + "positive_prompt":{ + "text": "positive", + "positive": "text" + }, + "loaders":{ + "ckpt_name": "ckpt_name", + "vae_name": "vae_name", + "clip_skip": "clip_skip", + "lora_name": "lora_name", + "resolution": "resolution", + "empty_latent_width": "empty_latent_width", + "empty_latent_height": "empty_latent_height", + "positive": "positive", + "negative": "negative", + "batch_size": "batch_size", + "a1111_prompt_style": "a1111_prompt_style" + }, + "preSampling":{ + "steps": "steps", + "cfg": "cfg", + "cfg_scale_min": "cfg", + "sampler_name": "sampler_name", + "scheduler": "scheduler", + "denoise": "denoise", + "seed_num": "seed_num", + "seed": "seed" + }, + "kSampler":{ + "image_output": "image_output", + "save_prefix": "save_prefix", + "link_id": "link_id" + }, + "controlnet":{ + "control_net_name":"control_net_name", + "strength": ["strength", "cn_strength"], + "scale_soft_weights": ["scale_soft_weights","cn_soft_weights"], + "cn_strength": ["strength", "cn_strength"], + "cn_soft_weights": ["scale_soft_weights","cn_soft_weights"], + }, + "ipadapter":{ + "preset":"preset", + "lora_strength": "lora_strength", + "provider": "provider", + "weight":"weight", + "weight_faceidv2": "weight_faceidv2", + "start_at": "start_at", + "end_at": "end_at", + "cache_mode": "cache_mode", + "use_tiled": "use_tiled", + "insightface": "insightface", + "pulid_file": "pulid_file" + }, + "load_image":{ + "image":"image", + "base64_data":"base64_data", + "channel": "channel" + }, + "inpaint":{ + "dtype": "dtype", + "fitting": "fitting", + "function": "function", + "scale": "scale", + "start_at": "start_at", + "end_at": "end_at" + } +} +const inputMapping = { + "loaders":{ + "optional_lora_stack": "optional_lora_stack", + "positive": "positive", + "negative": "negative" + }, + "preSampling":{ + "pipe": "pipe", + "image_to_latent": "image_to_latent", + "latent": "latent" + }, + "kSampler":{ + "pipe": "pipe", + "model": "model" + }, + "controlnet":{ + "pipe": "pipe", + "image": "image", + "image_kps": "image_kps", + "control_net": "control_net", + "positive": "positive", + "negative": "negative", + "mask": "mask" + }, + "positive_prompt":{ + + }, + "ipadapter":{ + "model":"model", + "image":"image", + "image_style": "image", + "attn_mask":"attn_mask", + "optional_ipadapter":"optional_ipadapter" + }, + "inpaint":{ + "pipe": "pipe", + "image": "image", + "mask": "mask" + } +}; + +const outputMapping = { + "loaders":{ + "pipe": "pipe", + "model": "model", + "vae": "vae", + "clip": null, + "positive": null, + "negative": null, + "latent": null, + }, + "preSampling":{ + "pipe":"pipe" + }, + "kSampler":{ + "pipe": "pipe", + "image": "image" + }, + "controlnet":{ + "pipe": "pipe", + "positive": "positive", + "negative": "negative" + }, + "positive_prompt":{ + "text": "positive", + "positive": "text" + }, + "load_image":{ + "IMAGE":"IMAGE", + "MASK": "MASK" + }, + "ipadapter":{ + "model":"model", + "tiles":"tiles", + "masks":"masks", + "ipadapter":"ipadapter" + }, + "inpaint":{ + "pipe": "pipe", + } +}; + +// 替换节点 +function replaceNode(oldNode, newNodeName, type) { + const newNode = LiteGraph.createNode(newNodeName); + if (!newNode) { + return; + } + app.graph.add(newNode); + + newNode.pos = oldNode.pos.slice(); + newNode.size = oldNode.size.slice(); + + oldNode.widgets.forEach(widget => { + if(widgetMapping[type][widget.name]){ + const newName = widgetMapping[type][widget.name]; + if (newName) { + const newWidget = findWidgetByName(newNode, newName); + if (newWidget) { + newWidget.value = widget.value; + if(widget.name == 'seed_num'){ + newWidget.linkedWidgets[0].value = widget.linkedWidgets[0].value + } + if(widget.type == 'converted-widget'){ + convertToInput(newNode, newWidget, widget); + } + } + } + } + + }); + + if(oldNode.inputs){ + oldNode.inputs.forEach((input, index) => { + if (input && input.link && inputMapping[type][input.name]) { + const newInputName = inputMapping[type][input.name]; + // If the new node does not have this output, skip + if (newInputName === null) { + return; + } + const newInputIndex = newNode.findInputSlot(newInputName); + if (newInputIndex !== -1) { + const originLinkInfo = oldNode.graph.links[input.link]; + if (originLinkInfo) { + const originNode = oldNode.graph.getNodeById(originLinkInfo.origin_id); + if (originNode) { + originNode.connect(originLinkInfo.origin_slot, newNode, newInputIndex); + } + } + } + } + }); + } + + if(oldNode.outputs){ + oldNode.outputs.forEach((output, index) => { + if (output && output.links && outputMapping[type] && outputMapping[type][output.name]) { + const newOutputName = outputMapping[type][output.name]; + // If the new node does not have this output, skip + if (newOutputName === null) { + return; + } + const newOutputIndex = newNode.findOutputSlot(newOutputName); + if (newOutputIndex !== -1) { + output.links.forEach(link => { + const targetLinkInfo = oldNode.graph.links[link]; + if (targetLinkInfo) { + const targetNode = oldNode.graph.getNodeById(targetLinkInfo.target_id); + if (targetNode) { + newNode.connect(newOutputIndex, targetNode, targetLinkInfo.target_slot); + } + } + }); + } + } + }); + } + + + // Remove old node + app.graph.remove(oldNode); + + // Remove others + if(newNode.type == 'easy fullkSampler'){ + const link_output_id = newNode.outputs[0].links + if(link_output_id && link_output_id[0]){ + const nodes = app.graph._nodes + const node = nodes.find(cate=> cate.inputs && cate.inputs[0] && cate.inputs[0]['link'] == link_output_id[0]) + if(node){ + app.graph.remove(node); + } + } + }else if(preSampling.includes(newNode.type)){ + const link_output_id = newNode.outputs[0].links + if(!link_output_id || !link_output_id[0]){ + const ksampler = LiteGraph.createNode('easy kSampler'); + app.graph.add(ksampler); + ksampler.pos = newNode.pos.slice(); + ksampler.pos[0] = ksampler.pos[0] + newNode.size[0] + 20; + const newInputIndex = newNode.findInputSlot('pipe'); + if (newInputIndex !== -1) { + if (newNode) { + newNode.connect(0, ksampler, newInputIndex); + } + } + } + } + + // autoHeight + newNode.setSize([newNode.size[0], newNode.computeSize()[1]]); +} + +export function findWidgetByName(node, widgetName) { + return node.widgets.find(widget => typeof widgetName == 'object' ? widgetName.includes(widget.name) : widget.name === widgetName); +} +function replaceNodeMenuCallback(currentNode, targetNodeName, type) { + return function() { + replaceNode(currentNode, targetNodeName, type); + }; +} +const addMenuHandler = (nodeType, cb)=> { + const getOpts = nodeType.prototype.getExtraMenuOptions; + nodeType.prototype.getExtraMenuOptions = function () { + const r = getOpts.apply(this, arguments); + cb.apply(this, arguments); + return r; + }; +} +const addMenu = (content, type, nodes_include, nodeType, has_submenu=true) => { + addMenuHandler(nodeType, function (_, options) { + options.unshift({ + content: content, + has_submenu: has_submenu, + callback: (value, options, e, menu, node) => showSwapMenu(value, options, e, menu, node, type, nodes_include) + }) + if(type == 'loaders') { + options.unshift({ + content: $t("💎 View Lora Info..."), + callback: (value, options, e, menu, node) => { + const widget = node.widgets.find(cate => cate.name == 'lora_name') + let name = widget.value; + if (!name || name == 'None') return + new LoraInfoDialog(name).show('loras', name); + } + }) + options.unshift({ + content: $t("💎 View Checkpoint Info..."), + callback: (value, options, e, menu, node) => { + let name = node.widgets[0].value; + if (!name || name == 'None') return + new CheckpointInfoDialog(name).show('checkpoints', name); + } + }) + } + }) +} +const showSwapMenu = (value, options, e, menu, node, type, nodes_include) => { + const swapOptions = []; + nodes_include.map(cate=>{ + if (node.type !== cate) { + swapOptions.push({ + content: `${cate}`, + callback: replaceNodeMenuCallback(node, cate, type) + }); + } + }) + new LiteGraph.ContextMenu(swapOptions, { + event: e, + callback: null, + parentMenu: menu, + node: node + }); + return false; +} + +// 重载节点 +const CONVERTED_TYPE = "converted-widget"; +const GET_CONFIG = Symbol(); + +function hideWidget(node, widget, suffix = "") { + widget.origType = widget.type; + widget.origComputeSize = widget.computeSize; + widget.origSerializeValue = widget.serializeValue; + widget.computeSize = () => [0, -4]; // -4 is due to the gap litegraph adds between widgets automatically + widget.type = CONVERTED_TYPE + suffix; + widget.serializeValue = () => { + // Prevent serializing the widget if we have no input linked + if (!node.inputs) { + return undefined; + } + let node_input = node.inputs.find((i) => i.widget?.name === widget.name); + + if (!node_input || !node_input.link) { + return undefined; + } + return widget.origSerializeValue ? widget.origSerializeValue() : widget.value; + }; + + // Hide any linked widgets, e.g. seed+seedControl + if (widget.linkedWidgets) { + for (const w of widget.linkedWidgets) { + hideWidget(node, w, ":" + widget.name); + } + } +} +function convertToInput(node, widget, config) { + console.log('config:', config) + hideWidget(node, widget); + + const { type } = getWidgetType(config); + + // Add input and store widget config for creating on primitive node + const sz = node.size; + if(!widget.options || !widget.options.forceInput){ + node.addInput(widget.name, type, { + widget: { name: widget.name, [GET_CONFIG]: () => config }, + }); + } + + for (const widget of node.widgets) { + widget.last_y += LiteGraph.NODE_SLOT_HEIGHT; + } + + // Restore original size but grow if needed + node.setSize([Math.max(sz[0], node.size[0]), Math.max(sz[1], node.size[1])]); +} + +function getWidgetType(config) { + // Special handling for COMBO so we restrict links based on the entries + let type = config[0]; + if (type instanceof Array) { + type = "COMBO"; + } + return { type }; +} + +const reloadNode = function (node) { + const nodeType = node.constructor.type; + const origVals = node.properties.origVals || {}; + + const nodeTitle = origVals.title || node.title; + const nodeColor = origVals.color || node.color; + const bgColor = origVals.bgcolor || node.bgcolor; + const oldNode = node + const options = { + 'size': [...node.size], + 'color': nodeColor, + 'bgcolor': bgColor, + 'pos': [...node.pos] + } + + let inputLinks = [] + let outputLinks = [] + if(node.inputs){ + for (const input of node.inputs) { + if (input.link) { + const input_name = input.name + const input_slot = node.findInputSlot(input_name) + const input_node = node.getInputNode(input_slot) + const input_link = node.getInputLink(input_slot) + inputLinks.push([input_link.origin_slot, input_node, input_name]) + } + } + } + if(node.outputs) { + for (const output of node.outputs) { + if (output.links) { + const output_name = output.name + + for (const linkID of output.links) { + const output_link = graph.links[linkID] + const output_node = graph._nodes_by_id[output_link.target_id] + outputLinks.push([output_name, output_node, output_link.target_slot]) + } + } + } + } + + app.graph.remove(node) + const newNode = app.graph.add(LiteGraph.createNode(nodeType, nodeTitle, options)); + + function handleLinks() { + // re-convert inputs + if(oldNode.widgets) { + for (let w of oldNode.widgets) { + if (w.type === 'converted-widget') { + const WidgetToConvert = newNode.widgets.find((nw) => nw.name === w.name); + for (let i of oldNode.inputs) { + if (i.name === w.name) { + convertToInput(newNode, WidgetToConvert, i.widget); + } + } + } + } + } + // replace input and output links + for (let input of inputLinks) { + const [output_slot, output_node, input_name] = input; + output_node.connect(output_slot, newNode.id, input_name) + } + for (let output of outputLinks) { + const [output_name, input_node, input_slot] = output; + newNode.connect(output_name, input_node, input_slot) + } + } + + // fix widget values + let values = oldNode.widgets_values; + if (!values && newNode.widgets?.length>0) { + newNode.widgets.forEach((newWidget, index) => { + const oldWidget = oldNode.widgets[index]; + if (newWidget.name === oldWidget.name && newWidget.type === oldWidget.type) { + newWidget.value = oldWidget.value; + } + }); + handleLinks(); + return; + } + let pass = false + const isIterateForwards = values?.length <= newNode.widgets?.length; + let vi = isIterateForwards ? 0 : values.length - 1; + function evalWidgetValues(testValue, newWidg) { + if (testValue === true || testValue === false) { + if (newWidg.options?.on && newWidg.options?.off) { + return { value: testValue, pass: true }; + } + } else if (typeof testValue === "number") { + if (newWidg.options?.min <= testValue && testValue <= newWidg.options?.max) { + return { value: testValue, pass: true }; + } + } else if (newWidg.options?.values?.includes(testValue)) { + return { value: testValue, pass: true }; + } else if (newWidg.inputEl && typeof testValue === "string") { + return { value: testValue, pass: true }; + } + return { value: newWidg.value, pass: false }; + } + const updateValue = (wi) => { + const oldWidget = oldNode.widgets[wi]; + let newWidget = newNode.widgets[wi]; + if (newWidget.name === oldWidget.name && newWidget.type === oldWidget.type) { + while ((isIterateForwards ? vi < values.length : vi >= 0) && !pass) { + let { value, pass } = evalWidgetValues(values[vi], newWidget); + if (pass && value !== null) { + newWidget.value = value; + break; + } + vi += isIterateForwards ? 1 : -1; + } + vi++ + if (!isIterateForwards) { + vi = values.length - (newNode.widgets?.length - 1 - wi); + } + } + }; + if (isIterateForwards && newNode.widgets?.length>0) { + for (let wi = 0; wi < newNode.widgets.length; wi++) { + updateValue(wi); + } + } else if(newNode.widgets?.length>0){ + for (let wi = newNode.widgets.length - 1; wi >= 0; wi--) { + updateValue(wi); + } + } + handleLinks(); +}; + + +app.registerExtension({ + name: "comfy.easyUse.extraMenu", + async beforeRegisterNodeDef(nodeType, nodeData, app) { + // 刷新节点 + addMenuHandler(nodeType, function (_, options) { + options.unshift({ + content: $t("🔃 Reload Node"), + callback: (value, options, e, menu, node) => { + let graphcanvas = LGraphCanvas.active_canvas; + if (!graphcanvas.selected_nodes || Object.keys(graphcanvas.selected_nodes).length <= 1) { + reloadNode(node); + } else { + for (let i in graphcanvas.selected_nodes) { + reloadNode(graphcanvas.selected_nodes[i]); + } + } + } + }) + // ckptNames + if(nodeData.name == 'easy ckptNames'){ + options.unshift({ + content: $t("💎 View Checkpoint Info..."), + callback: (value, options, e, menu, node) => { + let name = node.widgets[0].value; + if (!name || name == 'None') return + new CheckpointInfoDialog(name).show('checkpoints', name); + } + }) + } + }) + + // Swap提示词 + if (positive_prompt.includes(nodeData.name)) { + addMenu("↪️ Swap EasyPrompt", 'positive_prompt', positive_prompt, nodeType) + } + // Swap加载器 + if (loaders.includes(nodeData.name)) { + addMenu("↪️ Swap EasyLoader", 'loaders', loaders, nodeType) + } + // Swap预采样器 + if (preSampling.includes(nodeData.name)) { + addMenu("↪️ Swap EasyPreSampling", 'preSampling', preSampling, nodeType) + } + // Swap kSampler + if (kSampler.includes(nodeData.name)) { + addMenu("↪️ Swap EasyKSampler", 'preSampling', kSampler, nodeType) + } + // Swap ControlNet + if (controlnet.includes(nodeData.name)) { + addMenu("↪️ Swap EasyControlnet", 'controlnet', controlnet, nodeType) + } + // Swap IPAdapater + if (ipadapter.includes(nodeData.name)) { + addMenu("↪️ Swap EasyAdapater", 'ipadapter', ipadapter, nodeType) + } + // Swap Image + if (imageNode.includes(nodeData.name)) { + addMenu("↪️ Swap LoadImage", 'load_image', imageNode, nodeType) + } + // Swap inpaint + if (inpaint.includes(nodeData.name)) { + addMenu("↪️ Swap InpaintNode", 'inpaint', inpaint, nodeType) + } + } +}); + diff --git a/ComfyUI-Easy-Use/web/js/easy/easyInterface.js b/ComfyUI-Easy-Use/web/js/easy/easyInterface.js new file mode 100644 index 0000000000000000000000000000000000000000..5f73e3390ddb6a9ef30ae75d8336d41502f44f06 --- /dev/null +++ b/ComfyUI-Easy-Use/web/js/easy/easyInterface.js @@ -0,0 +1,788 @@ +import { app } from "../../../../scripts/app.js"; +import { api } from "../../../../scripts/api.js"; +import { $el } from "../../../../scripts/ui.js"; +import {addPreconnect, addCss} from "../common/utils.js"; + +const locale = localStorage['AGL.Locale'] || localStorage['Comfy.Settings.AGL.Locale'] || 'en-US' + +const customThemeColor = "#236692" +const customThemeColorLight = "#3485bb" +// 增加Slot颜色 +const customPipeLineLink = "#7737AA" +const customPipeLineSDXLLink = "#7737AA" +const customIntLink = "#29699C" +const customXYPlotLink = "#74DA5D" +const customLoraStackLink = "#94dccd" +const customXYLink = "#38291f" + +var customLinkColors = JSON.parse(localStorage.getItem('Comfy.Settings.ttN.customLinkColors')) || {}; +if (!customLinkColors["PIPE_LINE"] || !LGraphCanvas.link_type_colors["PIPE_LINE"]) {customLinkColors["PIPE_LINE"] = customPipeLineLink;} +if (!customLinkColors["PIPE_LINE_SDXL"] || !LGraphCanvas.link_type_colors["PIPE_LINE_SDXL"]) {customLinkColors["PIPE_LINE_SDXL"] = customPipeLineSDXLLink;} +if (!customLinkColors["INT"] || !LGraphCanvas.link_type_colors["INT"]) {customLinkColors["INT"] = customIntLink;} +if (!customLinkColors["XYPLOT"] || !LGraphCanvas.link_type_colors["XYPLOT"]) {customLinkColors["XYPLOT"] = customXYPlotLink;} +if (!customLinkColors["X_Y"] || !LGraphCanvas.link_type_colors["X_Y"]) {customLinkColors["X_Y"] = customXYLink;} +if (!customLinkColors["LORA_STACK"] || !LGraphCanvas.link_type_colors["LORA_STACK"]) {customLinkColors["LORA_STACK"] = customLoraStackLink;} +if (!customLinkColors["CONTROL_NET_STACK"] || !LGraphCanvas.link_type_colors["CONTROL_NET_STACK"]) {customLinkColors["CONTROL_NET_STACK"] = customLoraStackLink;} + +localStorage.setItem('Comfy.Settings.easyUse.customLinkColors', JSON.stringify(customLinkColors)); + +// 增加自定义主题 +const ui = { + "version": 102, + "id": "obsidian", + "name": "Obsidian", + "colors": { + "node_slot": { + "CLIP": "#FFD500", + "CLIP_VISION": "#A8DADC", + "CLIP_VISION_OUTPUT": "#ad7452", + "CONDITIONING": "#FFA931", + "CONTROL_NET": "#6EE7B7", + "IMAGE": "#64B5F6", + "LATENT": "#FF9CF9", + "MASK": "#81C784", + "MODEL": "#B39DDB", + "STYLE_MODEL": "#C2FFAE", + "VAE": "#FF6E6E", + "TAESD": "#DCC274", + "PIPE_LINE": customPipeLineLink, + "PIPE_LINE_SDXL": customPipeLineSDXLLink, + "INT": customIntLink, + "XYPLOT": customXYPlotLink, + "X_Y": customXYLink + }, + "litegraph_base": { + "BACKGROUND_IMAGE": "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAGQAAABkCAIAAAD/gAIDAAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAAAQBJREFUeNrs1rEKwjAUhlETUkj3vP9rdmr1Ysammk2w5wdxuLgcMHyptfawuZX4pJSWZTnfnu/lnIe/jNNxHHGNn//HNbbv+4dr6V+11uF527arU7+u63qfa/bnmh8sWLBgwYJlqRf8MEptXPBXJXa37BSl3ixYsGDBMliwFLyCV/DeLIMFCxYsWLBMwSt4Be/NggXLYMGCBUvBK3iNruC9WbBgwYJlsGApeAWv4L1ZBgsWLFiwYJmCV/AK3psFC5bBggULloJX8BpdwXuzYMGCBctgwVLwCl7Be7MMFixYsGDBsu8FH1FaSmExVfAxBa/gvVmwYMGCZbBg/W4vAQYA5tRF9QYlv/QAAAAASUVORK5CYII=", + "CLEAR_BACKGROUND_COLOR": "#222222", + "NODE_TITLE_COLOR": "rgba(255,255,255,.75)", + "NODE_SELECTED_TITLE_COLOR": "#FFF", + "NODE_TEXT_SIZE": 14, + "NODE_TEXT_COLOR": "#b8b8b8", + "NODE_SUBTEXT_SIZE": 12, + "NODE_DEFAULT_COLOR": "rgba(0,0,0,.8)", + "NODE_DEFAULT_BGCOLOR": "rgba(22,22,22,.8)", + "NODE_DEFAULT_BOXCOLOR": "rgba(255,255,255,.75)", + "NODE_DEFAULT_SHAPE": "box", + "NODE_BOX_OUTLINE_COLOR": customThemeColor, + "DEFAULT_SHADOW_COLOR": "rgba(0,0,0,0)", + "DEFAULT_GROUP_FONT": 24, + + "WIDGET_BGCOLOR": "#242424", + "WIDGET_OUTLINE_COLOR": "#333", + "WIDGET_TEXT_COLOR": "#a3a3a8", + "WIDGET_SECONDARY_TEXT_COLOR": "#97979c", + + "LINK_COLOR": "#9A9", + "EVENT_LINK_COLOR": "#A86", + "CONNECTING_LINK_COLOR": "#AFA" + }, + "comfy_base": { + "fg-color": "#fff", + "bg-color": "#242424", + "comfy-menu-bg": "rgba(24,24,24,.9)", + "comfy-input-bg": "#262626", + "input-text": "#ddd", + "descrip-text": "#999", + "drag-text": "#ccc", + "error-text": "#ff4444", + "border-color": "#29292c", + "tr-even-bg-color": "rgba(28,28,28,.9)", + "tr-odd-bg-color": "rgba(19,19,19,.9)" + } + } +} + +let custom_theme = null +let control_mode = null +try{ + custom_theme = localStorage.getItem('Comfy.Settings.Comfy.CustomColorPalettes') ? JSON.parse(localStorage.getItem('Comfy.Settings.Comfy.CustomColorPalettes')) : {}; +} +catch (e) {custom_theme = {}} +try{ + const dark_bg = "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAGQAAABkCAIAAAD/gAIDAAAACXBIWXMAAAsTAAALEwEAmpwYAAAGlmlUWHRYTUw6Y29tLmFkb2JlLnhtcAAAAAAAPD94cGFja2V0IGJlZ2luPSLvu78iIGlkPSJXNU0wTXBDZWhpSHpyZVN6TlRjemtjOWQiPz4gPHg6eG1wbWV0YSB4bWxuczp4PSJhZG9iZTpuczptZXRhLyIgeDp4bXB0az0iQWRvYmUgWE1QIENvcmUgOS4xLWMwMDEgNzkuMTQ2Mjg5OSwgMjAyMy8wNi8yNS0yMDowMTo1NSAgICAgICAgIj4gPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvIiB4bWxuczpkYz0iaHR0cDovL3B1cmwub3JnL2RjL2VsZW1lbnRzLzEuMS8iIHhtbG5zOnBob3Rvc2hvcD0iaHR0cDovL25zLmFkb2JlLmNvbS9waG90b3Nob3AvMS4wLyIgeG1sbnM6eG1wTU09Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9tbS8iIHhtbG5zOnN0RXZ0PSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvc1R5cGUvUmVzb3VyY2VFdmVudCMiIHhtcDpDcmVhdG9yVG9vbD0iQWRvYmUgUGhvdG9zaG9wIDI1LjEgKFdpbmRvd3MpIiB4bXA6Q3JlYXRlRGF0ZT0iMjAyMy0xMS0xM1QwMDoxODowMiswMTowMCIgeG1wOk1vZGlmeURhdGU9IjIwMjMtMTEtMTVUMDI6MDQ6NTkrMDE6MDAiIHhtcDpNZXRhZGF0YURhdGU9IjIwMjMtMTEtMTVUMDI6MDQ6NTkrMDE6MDAiIGRjOmZvcm1hdD0iaW1hZ2UvcG5nIiBwaG90b3Nob3A6Q29sb3JNb2RlPSIzIiB4bXBNTTpJbnN0YW5jZUlEPSJ4bXAuaWlkOmIyYzRhNjA5LWJmYTctYTg0MC1iOGFlLTk3MzE2ZjM1ZGIyNyIgeG1wTU06RG9jdW1lbnRJRD0iYWRvYmU6ZG9jaWQ6cGhvdG9zaG9wOjk0ZmNlZGU4LTE1MTctZmQ0MC04ZGU3LWYzOTgxM2E3ODk5ZiIgeG1wTU06T3JpZ2luYWxEb2N1bWVudElEPSJ4bXAuZGlkOjIzMWIxMGIwLWI0ZmItMDI0ZS1iMTJlLTMwNTMwM2NkMDdjOCI+IDx4bXBNTTpIaXN0b3J5PiA8cmRmOlNlcT4gPHJkZjpsaSBzdEV2dDphY3Rpb249ImNyZWF0ZWQiIHN0RXZ0Omluc3RhbmNlSUQ9InhtcC5paWQ6MjMxYjEwYjAtYjRmYi0wMjRlLWIxMmUtMzA1MzAzY2QwN2M4IiBzdEV2dDp3aGVuPSIyMDIzLTExLTEzVDAwOjE4OjAyKzAxOjAwIiBzdEV2dDpzb2Z0d2FyZUFnZW50PSJBZG9iZSBQaG90b3Nob3AgMjUuMSAoV2luZG93cykiLz4gPHJkZjpsaSBzdEV2dDphY3Rpb249InNhdmVkIiBzdEV2dDppbnN0YW5jZUlEPSJ4bXAuaWlkOjQ4OWY1NzlmLTJkNjUtZWQ0Zi04OTg0LTA4NGE2MGE1ZTMzNSIgc3RFdnQ6d2hlbj0iMjAyMy0xMS0xNVQwMjowNDo1OSswMTowMCIgc3RFdnQ6c29mdHdhcmVBZ2VudD0iQWRvYmUgUGhvdG9zaG9wIDI1LjEgKFdpbmRvd3MpIiBzdEV2dDpjaGFuZ2VkPSIvIi8+IDxyZGY6bGkgc3RFdnQ6YWN0aW9uPSJzYXZlZCIgc3RFdnQ6aW5zdGFuY2VJRD0ieG1wLmlpZDpiMmM0YTYwOS1iZmE3LWE4NDAtYjhhZS05NzMxNmYzNWRiMjciIHN0RXZ0OndoZW49IjIwMjMtMTEtMTVUMDI6MDQ6NTkrMDE6MDAiIHN0RXZ0OnNvZnR3YXJlQWdlbnQ9IkFkb2JlIFBob3Rvc2hvcCAyNS4xIChXaW5kb3dzKSIgc3RFdnQ6Y2hhbmdlZD0iLyIvPiA8L3JkZjpTZXE+IDwveG1wTU06SGlzdG9yeT4gPC9yZGY6RGVzY3JpcHRpb24+IDwvcmRmOlJERj4gPC94OnhtcG1ldGE+IDw/eHBhY2tldCBlbmQ9InIiPz4OTe6GAAAAx0lEQVR42u3WMQoAIQxFwRzJys77X8vSLiRgITif7bYbgrwYc/mKXyBoY4VVBgsWLFiwYFmOlTv+9jfDOjHmr8u6eVkGCxYsWLBgmc5S8ApewXvgYRksWLBgKXidpeBdloL3wMOCBctgwVLwCl7BuyyDBQsWLFiwTGcpeAWv4D3wsAwWLFiwFLzOUvAuS8F74GHBgmWwYCl4Ba/gXZbBggULFixYprMUvIJX8B54WAYLFixYCl5nKXiXpeA98LBgwTJYsGC9tg1o8f4TTtqzNQAAAABJRU5ErkJggg==" + // 修改自定义主题 + if(!custom_theme || !custom_theme.obsidian || !custom_theme.obsidian.version || custom_theme.obsidian.version0) node.execute_triggered--; + if (node.action_triggered>0) node.action_triggered--; + }; + LGraphCanvas.prototype.drawNodeWidgets = function( + node, + posY, + ctx, + active_widget + ) { + if (!node.widgets || !node.widgets.length) { + return 0; + } + var width = node.size[0]; + var widgets = node.widgets; + posY += 2; + var H = LiteGraph.NODE_WIDGET_HEIGHT; + var show_text = this.ds.scale > 0.5; + ctx.save(); + ctx.globalAlpha = this.editor_alpha; + var outline_color = LiteGraph.WIDGET_OUTLINE_COLOR; + var background_color = LiteGraph.WIDGET_BGCOLOR; + var text_color = LiteGraph.WIDGET_TEXT_COLOR; + var secondary_text_color = LiteGraph.WIDGET_SECONDARY_TEXT_COLOR; + var margin = 12; + + for (var i = 0; i < widgets.length; ++i) { + var w = widgets[i]; + var y = posY; + if (w.y) { + y = w.y; + } + w.last_y = y; + ctx.strokeStyle = outline_color; + ctx.fillStyle = "#222"; + ctx.textAlign = "left"; + ctx.lineWidth = 1; + if(w.disabled) + ctx.globalAlpha *= 0.5; + var widget_width = w.width || width; + + switch (w.type) { + case "button": + ctx.font = "10px Inter" + ctx.fillStyle = background_color; + if (w.clicked) { + ctx.fillStyle = "#AAA"; + w.clicked = false; + this.dirty_canvas = true; + } + ctx.beginPath(); + ctx.roundRect(margin, y, widget_width - margin * 2, H, [H * 0.25]); + ctx.fill(); + if(show_text && !w.disabled) + ctx.stroke(); + if (show_text) { + ctx.textAlign = "center"; + ctx.fillStyle = text_color; + ctx.fillText(w.label || w.name, widget_width * 0.5, y + H * 0.7); + } + break; + case "toggle": + ctx.font = "10px Inter" + ctx.textAlign = "left"; + ctx.strokeStyle = outline_color; + ctx.fillStyle = background_color; + ctx.beginPath(); + if (show_text) + ctx.roundRect(margin, y, widget_width - margin * 2, H, [H * 0.25]); + else + ctx.rect(margin, y, widget_width - margin * 2, H ); + ctx.fill(); + if(show_text && !w.disabled) + ctx.stroke(); + ctx.fillStyle = w.value ? customThemeColor : "#333"; + ctx.beginPath(); + ctx.arc( widget_width - margin * 2, y + H * 0.5, H * 0.25, 0, Math.PI * 2 ); + ctx.fill(); + if (show_text) { + ctx.fillStyle = secondary_text_color; + const label = w.label || w.name; + if (label != null) { + ctx.fillText(label, margin * 1.6, y + H * 0.7); + } + ctx.font = "10px Inter" + ctx.fillStyle = w.value ? text_color : secondary_text_color; + ctx.textAlign = "right"; + ctx.fillText( + w.value + ? w.options.on || "true" + : w.options.off || "false", + widget_width - 35, + y + H * 0.7 + ); + } + break; + case "slider": + ctx.font = "10px Inter" + ctx.fillStyle = background_color; + ctx.strokeStyle = outline_color; + ctx.beginPath(); + ctx.roundRect(margin, y, widget_width - margin * 2, H, [H*0.25]); + ctx.fill(); + ctx.stroke() + var range = w.options.max - w.options.min; + var nvalue = (w.value - w.options.min) / range; + if(nvalue < 0.0) nvalue = 0.0; + if(nvalue > 1.0) nvalue = 1.0; + ctx.fillStyle = w.options.hasOwnProperty("slider_color") ? w.options.slider_color : (active_widget == w ? "#333" : customThemeColor); + ctx.beginPath(); + ctx.roundRect(margin, y, nvalue * (widget_width - margin * 2), H, [H*0.25]); + ctx.fill(); + if (w.marker) { + var marker_nvalue = (w.marker - w.options.min) / range; + if(marker_nvalue < 0.0) marker_nvalue = 0.0; + if(marker_nvalue > 1.0) marker_nvalue = 1.0; + ctx.fillStyle = w.options.hasOwnProperty("marker_color") ? w.options.marker_color : "#AA9"; + ctx.roundRect( margin + marker_nvalue * (widget_width - margin * 2), y, 2, H , [H * 0.25] ); + } + if (show_text) { + ctx.textAlign = "center"; + ctx.fillStyle = text_color; + var text = (w.label || w.name) + ": " + (Number(w.value).toFixed(w.options.precision != null ? w.options.precision : 3)).toString() + ctx.fillText( + text, + widget_width * 0.5, + y + H * 0.7 + ); + + } + break; + case "number": + case "combo": + ctx.textAlign = "left"; + ctx.strokeStyle = outline_color; + ctx.fillStyle = background_color; + ctx.beginPath(); + if(show_text) + ctx.roundRect(margin, y, widget_width - margin * 2, H, [H * 0.25] ); + else + ctx.rect(margin, y, widget_width - margin * 2, H ); + ctx.fill(); + if (show_text) { + if(!w.disabled) + ctx.stroke(); + ctx.fillStyle = text_color; + if(!w.disabled) + { + ctx.beginPath(); + ctx.moveTo(margin + 12, y + 6.5); + ctx.lineTo(margin + 6, y + H * 0.5); + ctx.lineTo(margin + 12, y + H - 6.5); + ctx.fill(); + ctx.beginPath(); + ctx.moveTo(widget_width - margin - 12, y + 6.5); + ctx.lineTo(widget_width - margin - 6, y + H * 0.5); + ctx.lineTo(widget_width - margin - 12, y + H - 6.5); + ctx.fill(); + } + ctx.fillStyle = secondary_text_color; + ctx.font = "10px Inter" + ctx.fillText(w.label || w.name, margin * 2 + 5, y + H * 0.7); + ctx.fillStyle = text_color; + ctx.textAlign = "right"; + var rightDistance = 6 + if (w.type == "number") { + ctx.font = "10px Inter,JetBrains Mono,monospace" + ctx.fillText( + Number(w.value).toFixed( + w.options.precision !== undefined + ? w.options.precision + : 3 + ), + widget_width - margin * 2 - rightDistance, + y + H * 0.7 + ); + } else { + var v = w.value; + if( w.options.values ) + { + var values = w.options.values; + if( values.constructor === Function ) + values = values(); + if(values && values.constructor !== Array) + v = values[ w.value ]; + } + ctx.fillText( + v, + widget_width - margin * 2 - rightDistance, + y + H * 0.7 + ); + } + } + break; + case "string": + case "text": + ctx.textAlign = "left"; + ctx.strokeStyle = outline_color; + ctx.fillStyle = background_color; + ctx.beginPath(); + if (show_text) + ctx.roundRect(margin, y, widget_width - margin * 2, H, [H * 0.25]); + else + ctx.rect( margin, y, widget_width - margin * 2, H ); + ctx.fill(); + if (show_text) { + if(!w.disabled) + ctx.stroke(); + ctx.save(); + ctx.beginPath(); + ctx.rect(margin, y, widget_width - margin * 2, H); + ctx.clip(); + + //ctx.stroke(); + ctx.fillStyle = secondary_text_color; + const label = w.label || w.name; + ctx.font = "10px Inter" + if (label != null) { + ctx.fillText(label, margin * 2, y + H * 0.7); + } + ctx.fillStyle = text_color; + ctx.textAlign = "right"; + ctx.fillText(String(w.value).substr(0,30), widget_width - margin * 2, y + H * 0.7); //30 chars max + ctx.restore(); + } + break; + default: + if (w.draw) { + w.draw(ctx, node, widget_width, y, H); + } + break; + } + posY += (w.computeSize ? w.computeSize(widget_width)[1] : H) + 4; + ctx.globalAlpha = this.editor_alpha; + + } + ctx.restore(); + ctx.textAlign = "left"; + }; + } +}catch(e){ + console.error(e) +} + +function updateControlWidgetLabel(widget, controlValueRunBefore=false) { + let replacement = "after"; + let find = "before"; + if (controlValueRunBefore) { + [find, replacement] = [replacement, find] + } + widget.label = (widget.label ?? widget.name).replace(find, replacement); + widget.name = widget.label; +} + +// 节点颜色 +const COLOR_THEMES = LGraphCanvas.node_colors +const NODE_COLORS = { + "easy positive":"green", + "easy negative":"red", + "easy promptList":"cyan", + "easy promptLine":"cyan", + "easy promptConcat":"cyan", + "easy promptReplace":"cyan", + "easy XYInputs: Seeds++ Batch": customXYLink, + "easy XYInputs: ModelMergeBlocks": customXYLink, + 'easy textSwitch': "pale_blue" +} + +function setNodeColors(node, theme) { + if (!theme) {return;} + if(theme.color) node.color = theme.color; + if(theme.bgcolor) node.bgcolor = theme.bgcolor; +} + + +app.registerExtension({ + name: "comfy.easyUse.interface", + setup() { + Object.assign(app.canvas.default_connection_color_byType, customLinkColors); + Object.assign(LGraphCanvas.link_type_colors, customLinkColors); + }, + + async nodeCreated(node) { + if (NODE_COLORS.hasOwnProperty(node.comfyClass)) { + const colorKey = NODE_COLORS[node.comfyClass] + const theme = COLOR_THEMES[colorKey]; + setNodeColors(node, theme); + } + // 修复官方bug: 应该初始化修改节点的control_mode name + if(control_mode && control_mode == 'before'){ + const controlValueRunBefore = control_mode == 'before' + if(node.widgets && node.widgets.length>0) { + for (const w of node.widgets) { + if (['control_before_generate', 'control_after_generate'].includes(w.name)) { + await updateControlWidgetLabel(w, controlValueRunBefore); + if (w.linkedWidgets) { + for (const l of w.linkedWidgets) { + await updateControlWidgetLabel(l, controlValueRunBefore); + } + } + } + } + } + } + }, +}) diff --git a/ComfyUI-Easy-Use/web/js/easy/easyQuick.js b/ComfyUI-Easy-Use/web/js/easy/easyQuick.js new file mode 100644 index 0000000000000000000000000000000000000000..394bd8a31508a9c1f2f669f2030ea5847d36699e --- /dev/null +++ b/ComfyUI-Easy-Use/web/js/easy/easyQuick.js @@ -0,0 +1,250 @@ +// 1.0.2 +import { app } from "../../../../scripts/app.js"; +import { GroupNodeConfig } from "../../../../extensions/core/groupNode.js"; +import { api } from "../../../../scripts/api.js"; +import { $t } from "../common/i18n.js" + +const nodeTemplateShortcutId = "Comfy.EasyUse.NodeTemplateShortcut" +const processBarId = "Comfy.EasyUse.queueProcessBar" + +let enableNodeTemplateShortcut = true +let enableQueueProcess = false + +export function addNodeTemplateShortcutSetting(app) { + app.ui.settings.addSetting({ + id: nodeTemplateShortcutId, + name: $t("Enable ALT+1~9 to paste nodes from nodes template (ComfyUI-Easy-Use)"), + type: "boolean", + defaultValue: enableNodeTemplateShortcut, + onChange(value) { + enableNodeTemplateShortcut = !!value; + }, + }); +} +export function addQueueProcessSetting(app) { + app.ui.settings.addSetting({ + id: processBarId, + name: $t("Enable process bar in queue button (ComfyUI-Easy-Use)"), + type: "boolean", + defaultValue: enableQueueProcess, + onChange(value) { + enableQueueProcess = !!value; + }, + }); +} +const getEnableNodeTemplateShortcut = _ => app.ui.settings.getSettingValue(nodeTemplateShortcutId, true) +const getQueueProcessSetting = _ => app.ui.settings.getSettingValue(processBarId, false) + +function loadTemplate(){ + return localStorage['Comfy.NodeTemplates'] ? JSON.parse(localStorage['Comfy.NodeTemplates']) : null +} +const clipboardAction = async (cb) => { + const old = localStorage.getItem("litegrapheditor_clipboard"); + await cb(); + localStorage.setItem("litegrapheditor_clipboard", old); +}; +async function addTemplateToCanvas(t){ + const data = JSON.parse(t.data); + await GroupNodeConfig.registerFromWorkflow(data.groupNodes, {}); + localStorage.setItem("litegrapheditor_clipboard", t.data); + app.canvas.pasteFromClipboard(); +} + +app.registerExtension({ + name: 'comfy.easyUse.quick', + init() { + const keybindListener = async function (event) { + let modifierPressed = event.altKey; + const isEnabled = getEnableNodeTemplateShortcut() + if(isEnabled){ + const mac_alt_nums = ['¡','™','£','¢','∞','§','¶','•','ª'] + const nums = ['1','2','3','4','5','6','7','8','9'] + let key = event.key + if(mac_alt_nums.includes(key)){ + const idx = mac_alt_nums.findIndex(cate=> cate == key) + key = nums[idx] + modifierPressed = true + } + if(['1','2','3','4','5','6','7','8','9'].includes(key) && modifierPressed) { + const template = loadTemplate() + const idx = parseInt(key) - 1 + if (template && template[idx]) { + let t = template[idx] + try{ + let data = JSON.parse(t.data) + data.title = t.name + t.data = JSON.stringify(data) + clipboardAction(_ => { + addTemplateToCanvas(t) + }) + }catch (e){ + console.error(e) + } + + } + if (event.ctrlKey || event.altKey || event.metaKey) { + return; + } + } + } + + } + window.addEventListener("keydown", keybindListener, true); + }, + + setup(app) { + addNodeTemplateShortcutSetting(app) + addQueueProcessSetting(app) + registerListeners() + } +}); + +const registerListeners = () => { + const queue_button = document.getElementById("queue-button") + const old_queue_button_text = queue_button.innerText + api.addEventListener('progress', ({ + detail, + }) => { + const isEnabled = getQueueProcessSetting() + if(isEnabled){ + const { + value, max, node, + } = detail; + const progress = Math.floor((value / max) * 100); + // console.log(progress) + if (!isNaN(progress) && progress >= 0 && progress <= 100) { + queue_button.innerText = progress ==0 || progress == 100 ? old_queue_button_text : "ㅤ " + const width = progress ==0 || progress == 100 ? '0%' : progress.toString() + '%' + let bar = document.createElement("div") + queue_button.setAttribute('data-attr', progress ==0 || progress == 100 ? "" : progress.toString() + '%') + document.documentElement.style.setProperty('--process-bar-width', width) + } + } + + }, false); + + api.addEventListener('status', ({ + detail, + }) => { + const queueRemaining = detail?.exec_info.queue_remaining; + if(queueRemaining === 0){ + let queue_button = document.getElementById("queue-button") + queue_button.innerText = old_queue_button_text + queue_button.setAttribute('data-attr', "") + document.documentElement.style.setProperty('--process-bar-width', '0%') + } + }, false); +}; + + +// 修改粘贴指令 +LGraphCanvas.prototype.pasteFromClipboard = function(isConnectUnselected = false) { + // if ctrl + shift + v is off, return when isConnectUnselected is true (shift is pressed) to maintain old behavior + if (!LiteGraph.ctrl_shift_v_paste_connect_unselected_outputs && isConnectUnselected) { + return; + } + var data = localStorage.getItem("litegrapheditor_clipboard"); + if (!data) { + return; + } + + this.graph.beforeChange(); + + //create nodes + var clipboard_info = JSON.parse(data); + // calculate top-left node, could work without this processing but using diff with last node pos :: clipboard_info.nodes[clipboard_info.nodes.length-1].pos + var posMin = false; + var posMinIndexes = false; + for (var i = 0; i < clipboard_info.nodes.length; ++i) { + if (posMin){ + if(posMin[0]>clipboard_info.nodes[i].pos[0]){ + posMin[0] = clipboard_info.nodes[i].pos[0]; + posMinIndexes[0] = i; + } + if(posMin[1]>clipboard_info.nodes[i].pos[1]){ + posMin[1] = clipboard_info.nodes[i].pos[1]; + posMinIndexes[1] = i; + } + } + else{ + posMin = [clipboard_info.nodes[i].pos[0], clipboard_info.nodes[i].pos[1]]; + posMinIndexes = [i, i]; + } + } + var nodes = []; + var left_arr = [], right_arr = [], top_arr =[], bottom_arr =[]; + + for (var i = 0; i < clipboard_info.nodes.length; ++i) { + var node_data = clipboard_info.nodes[i]; + var node = LiteGraph.createNode(node_data.type); + if (node) { + + node.configure(node_data); + //paste in last known mouse position + node.pos[0] += this.graph_mouse[0] - posMin[0]; //+= 5; + node.pos[1] += this.graph_mouse[1] - posMin[1]; //+= 5; + + left_arr.push(node.pos[0]) + right_arr.push(node.pos[0] + node.size[0]) + top_arr.push(node.pos[1]) + bottom_arr.push(node.pos[1] + node.size[1]) + + this.graph.add(node,{doProcessChange:false}); + + nodes.push(node); + + } + } + + if(clipboard_info.title){ + var l = Math.min(...left_arr) - 15; + var r = Math.max(...right_arr) - this.graph_mouse[0] + 30; + var t = Math.min(...top_arr) - 100; + var b = Math.max(...bottom_arr) - this.graph_mouse[1] + 130; + + // create group + const groups = [ + { + "title": clipboard_info.title, + "bounding": [ + l, + t, + r, + b + ], + "color": "#3f789e", + "font_size": 24, + "locked": false + } + ] + + for (var i = 0; i < groups.length; ++i) { + var group = new LiteGraph.LGraphGroup(); + group.configure(groups[i]); + this.graph.add(group); + } + } + + //create links + for (var i = 0; i < clipboard_info.links.length; ++i) { + var link_info = clipboard_info.links[i]; + var origin_node; + var origin_node_relative_id = link_info[0]; + if (origin_node_relative_id != null) { + origin_node = nodes[origin_node_relative_id]; + } else if (LiteGraph.ctrl_shift_v_paste_connect_unselected_outputs && isConnectUnselected) { + var origin_node_id = link_info[4]; + if (origin_node_id) { + origin_node = this.graph.getNodeById(origin_node_id); + } + } + var target_node = nodes[link_info[2]]; + if( origin_node && target_node ) + origin_node.connect(link_info[1], target_node, link_info[3]); + else + console.warn("Warning, nodes missing on pasting"); + } + + this.selectNodes(nodes); + this.graph.afterChange(); +}; \ No newline at end of file diff --git a/ComfyUI-Easy-Use/web/js/easy/easySaveImage.js b/ComfyUI-Easy-Use/web/js/easy/easySaveImage.js new file mode 100644 index 0000000000000000000000000000000000000000..35adb752ed1e167fa47083fafa149310dffedd0f --- /dev/null +++ b/ComfyUI-Easy-Use/web/js/easy/easySaveImage.js @@ -0,0 +1,36 @@ +import { app } from "../../../../scripts/app.js"; +import { applyTextReplacements } from "../../../../scripts/utils.js"; + +const extraNodes = ["easy imageSave", "easy fullkSampler", "easy kSampler", "easy kSamplerTiled","easy kSamplerInpainting", "easy kSamplerDownscaleUnet", "easy kSamplerSDTurbo","easy detailerFix"] + +app.registerExtension({ + name: "Comfy.Easy.SaveImageExtraOutput", + async beforeRegisterNodeDef(nodeType, nodeData, app) { + if (extraNodes.includes(nodeData.name)) { + const onNodeCreated = nodeType.prototype.onNodeCreated; + // When the SaveImage node is created we want to override the serialization of the output name widget to run our S&R + nodeType.prototype.onNodeCreated = function () { + const r = onNodeCreated ? onNodeCreated.apply(this, arguments) : undefined; + + const widget = this.widgets.find((w) => w.name === "filename_prefix" || w.name === 'save_prefix'); + widget.serializeValue = () => { + return applyTextReplacements(app, widget.value); + }; + + return r; + }; + } else { + // When any other node is created add a property to alias the node + const onNodeCreated = nodeType.prototype.onNodeCreated; + nodeType.prototype.onNodeCreated = function () { + const r = onNodeCreated ? onNodeCreated.apply(this, arguments) : undefined; + + if (!this.properties || !("Node name for S&R" in this.properties)) { + this.addProperty("Node name for S&R", this.constructor.type, "string"); + } + + return r; + }; + } + }, +}); diff --git a/ComfyUI-Easy-Use/web/js/easy/easySeg.js b/ComfyUI-Easy-Use/web/js/easy/easySeg.js new file mode 100644 index 0000000000000000000000000000000000000000..11e8feb61f3e0b3f8358c5a38c9a6df9730c965e --- /dev/null +++ b/ComfyUI-Easy-Use/web/js/easy/easySeg.js @@ -0,0 +1,136 @@ +import {app} from "../../../../scripts/app.js"; +import {$el} from "../../../../scripts/ui.js"; +import {$t} from "../common/i18n.js"; +import {findWidgetByName, toggleWidget} from "../common/utils.js"; + + +const tags = { + "selfie_multiclass_256x256": ["Background", "Hair", "Body", "Face", "Clothes", "Others",], + "human_parsing_lip":["Background","Hat","Hair","Glove","Sunglasses","Upper-clothes","Dress","Coat","Socks","Pants","Jumpsuits","Scarf","Skirt","Face","Left-arm","Right-arm","Left-leg","Right-leg","Left-shoe","Right-shoe"], +} +function getTagList(tags) { + let rlist=[] + tags.forEach((k,i) => { + rlist.push($el( + "label.easyuse-prompt-styles-tag", + { + dataset: { + tag: i, + name: $t(k), + index: i + }, + $: (el) => { + el.children[0].onclick = () => { + el.classList.toggle("easyuse-prompt-styles-tag-selected"); + }; + }, + }, + [ + $el("input",{ + type: 'checkbox', + name: i + }), + $el("span",{ + textContent: $t(k), + }) + ] + )) + }); + return rlist +} + + +app.registerExtension({ + name: 'comfy.easyUse.seg', + async beforeRegisterNodeDef(nodeType, nodeData, app) { + + if (nodeData.name == 'easy humanSegmentation') { + // 创建时 + const onNodeCreated = nodeType.prototype.onNodeCreated; + nodeType.prototype.onNodeCreated = function () { + onNodeCreated ? onNodeCreated?.apply(this, arguments) : undefined; + const method = this.widgets.findIndex((w) => w.name == 'method'); + const list = $el("ul.easyuse-prompt-styles-list.no-top", []); + let method_values = '' + this.setProperty("values", []) + + let selector = this.addDOMWidget('mask_components',"btn",$el('div.easyuse-prompt-styles',[list])) + + Object.defineProperty(this.widgets[method],'value',{ + set:(value)=>{ + method_values = value + if(method_values){ + selector.element.children[0].innerHTML = '' + if(method_values == 'selfie_multiclass_256x256'){ + toggleWidget(this, findWidgetByName(this, 'confidence'), true) + this.setSize([300, 260]); + }else{ + toggleWidget(this, findWidgetByName(this, 'confidence')) + this.setSize([300, 500]); + } + let list = getTagList(tags[method_values]); + selector.element.children[0].append(...list) + } + }, + get: () => { + return method_values + } + }) + + let mask_select_values = '' + + Object.defineProperty(selector, "value", { + set: (value) => { + setTimeout(_=>{ + selector.element.children[0].querySelectorAll(".easyuse-prompt-styles-tag").forEach(el => { + let arr = value.split(',') + if (arr.includes(el.dataset.tag)) { + el.classList.add("easyuse-prompt-styles-tag-selected"); + el.children[0].checked = true + } + }) + },100) + }, + get: () => { + selector.element.children[0].querySelectorAll(".easyuse-prompt-styles-tag").forEach(el => { + if(el.classList.value.indexOf("easyuse-prompt-styles-tag-selected")>=0){ + if(!this.properties["values"].includes(el.dataset.tag)){ + this.properties["values"].push(el.dataset.tag); + } + }else{ + if(this.properties["values"].includes(el.dataset.tag)){ + this.properties["values"]= this.properties["values"].filter(v=>v!=el.dataset.tag); + } + } + }); + mask_select_values = this.properties["values"].join(','); + return mask_select_values; + } + }); + + let old_values = '' + let mask_lists_dom = selector.element.children[0] + + // 初始化 + setTimeout(_=>{ + if(!method_values) { + method_values = 'selfie_multiclass_256x256' + selector.element.children[0].innerHTML = '' + // 重新排序 + let list = getTagList(tags[method_values]); + selector.element.children[0].append(...list) + } + if(method_values == 'selfie_multiclass_256x256'){ + toggleWidget(this, findWidgetByName(this, 'confidence'), true) + this.setSize([300, 260]); + }else{ + toggleWidget(this, findWidgetByName(this, 'confidence')) + this.setSize([300, 500]); + } + },1) + + return onNodeCreated; + } + } + } +}) \ No newline at end of file diff --git a/ComfyUI-Easy-Use/web/js/easy/easySelector.js b/ComfyUI-Easy-Use/web/js/easy/easySelector.js new file mode 100644 index 0000000000000000000000000000000000000000..c9dde34da828d7bed2d0361c9aaf489e4ed2d88a --- /dev/null +++ b/ComfyUI-Easy-Use/web/js/easy/easySelector.js @@ -0,0 +1,296 @@ +// 1.0.3 +import { app } from "../../../../scripts/app.js"; +import { api } from "../../../../scripts/api.js"; +import { $el } from "../../../../scripts/ui.js"; +import { $t } from "../common/i18n.js"; + +// 获取风格列表 +let styles_list_cache = {} +let styles_image_cache = {} +async function getStylesList(name){ + if(styles_list_cache[name]) return styles_list_cache[name] + else{ + const resp = await api.fetchApi(`/easyuse/prompt/styles?name=${name}`); + if (resp.status === 200) { + let data = await resp.json(); + styles_list_cache[name] = data; + return data; + } + return undefined; + } +} +async function getStylesImage(name, styles_name){ + if(!styles_image_cache[styles_name]) styles_image_cache[styles_name] = {} + if(styles_image_cache[styles_name][name]) return styles_image_cache[styles_name][name] + else{ + const resp = await api.fetchApi(`/easyuse/prompt/styles/image?name=${name}&styles_name=${styles_name}`); + if (resp.status === 200) { + const text = await resp.text() + if(text.startsWith('http')){ + styles_image_cache[styles_name][name] = text + return text + } + const url = `/easyuse/prompt/styles/image?name=${name}&styles_name=${styles_name}` + styles_image_cache[styles_name][name] = url + return url + } + return undefined; + } +} + +function getTagList(tags, styleName, language='en-US') { + let rlist=[] + tags.forEach((k,i) => { + rlist.push($el( + "label.easyuse-prompt-styles-tag", + { + dataset: { + tag: k['name'], + name: language == 'zh-CN' && k['name_cn'] ? k['name_cn'] : k['name'], + imgName: k['imgName'], + index: i + }, + $: (el) => { + el.children[0].onclick = () => { + el.classList.toggle("easyuse-prompt-styles-tag-selected"); + }; + el.onmousemove = (e) => { + displayImage(el.dataset.imgName, styleName, e) + }; + el.onmouseout = () => { + hiddenImage() + }; + el.onmouseover = (e) => { + displayImage(el.dataset.imgName, styleName) + }; + }, + }, + [ + $el("input",{ + type: 'checkbox', + name: k['name'] + }), + $el("span",{ + textContent: language == 'zh-CN' && k['name_cn'] ? k['name_cn'] : k['name'], + }) + ] + )) + }); + return rlist +} + +const foocus_base_path = "https://raw.githubusercontent.com/lllyasviel/Fooocus/main/sdxl_styles/samples/" +const empty_img = "data:image/jpeg;base64,/9j/4QAYRXhpZgAASUkqAAgAAAAAAAAAAAAAAP/sABFEdWNreQABAAQAAAA8AAD/4QNLaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wLwA8P3hwYWNrZXQgYmVnaW49Iu+7vyIgaWQ9Ilc1TTBNcENlaGlIenJlU3pOVGN6a2M5ZCI/PiA8eDp4bXBtZXRhIHhtbG5zOng9ImFkb2JlOm5zOm1ldGEvIiB4OnhtcHRrPSJBZG9iZSBYTVAgQ29yZSA5LjEtYzAwMSA3OS4xNDYyODk5Nzc3LCAyMDIzLzA2LzI1LTIzOjU3OjE0ICAgICAgICAiPiA8cmRmOlJERiB4bWxuczpyZGY9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkvMDIvMjItcmRmLXN5bnRheC1ucyMiPiA8cmRmOkRlc2NyaXB0aW9uIHJkZjphYm91dD0iIiB4bWxuczp4bXA9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC8iIHhtbG5zOnhtcE1NPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvbW0vIiB4bWxuczpzdFJlZj0iaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wL3NUeXBlL1Jlc291cmNlUmVmIyIgeG1wOkNyZWF0b3JUb29sPSJBZG9iZSBQaG90b3Nob3AgMjUuMSAoMjAyMzA5MDUubS4yMzE2IDk3OWM4NmQpICAoV2luZG93cykiIHhtcE1NOkluc3RhbmNlSUQ9InhtcC5paWQ6RjA3NEU1QzNCNUJBMTFFRUExMUVDNkZDRjI0NzlBN0QiIHhtcE1NOkRvY3VtZW50SUQ9InhtcC5kaWQ6RjA3NEU1QzRCNUJBMTFFRUExMUVDNkZDRjI0NzlBN0QiPiA8eG1wTU06RGVyaXZlZEZyb20gc3RSZWY6aW5zdGFuY2VJRD0ieG1wLmlpZDpGMDc0RTVDMUI1QkExMUVFQTExRUM2RkNGMjQ3OUE3RCIgc3RSZWY6ZG9jdW1lbnRJRD0ieG1wLmRpZDpGMDc0RTVDMkI1QkExMUVFQTExRUM2RkNGMjQ3OUE3RCIvPiA8L3JkZjpEZXNjcmlwdGlvbj4gPC9yZGY6UkRGPiA8L3g6eG1wbWV0YT4gPD94cGFja2V0IGVuZD0iciI/Pv/uAA5BZG9iZQBkwAAAAAH/2wCEAAYEBAQFBAYFBQYJBgUGCQsIBgYICwwKCgsKCgwQDAwMDAwMEAwODxAPDgwTExQUExMcGxsbHB8fHx8fHx8fHx8BBwcHDQwNGBAQGBoVERUaHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fH//AABEIAIAAgAMBEQACEQEDEQH/xACLAAEAAgMBAQEAAAAAAAAAAAAABAUCAwYBBwgBAQADAQEBAAAAAAAAAAAAAAABAgMEBQYQAAEEAgECAwUHAwUAAAAAAAEAAgMEEQUhEgYxEwdBYSIyFFFxgVJyIxWRoTOxwdFiJBEBAAICAQQBBAIDAAAAAAAAAAECEQMxIUESBBOB0SIyUXGCIwX/2gAMAwEAAhEDEQA/AP1SgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICDXJYgj+d4afsVopM8KWvEcy8it1pXdMcjXO/Lnn+im2u0cwV2VniW1UXEBAQEBAQEBAQEBAQRNlc+mgyDh7zhv+5WunX5Sw37fCHM2dh48r06ank7N6rn2Ja7qa4hw5BBwQV010uK+/DsO29v/J68SOI86Jxjl95HIP4gryPc0fHfHaXu+j7Py68zzHSVquV2iAgICAgICAgICDyTr6HdHz4PTnwypjnqic46OauNbY6mGX99p+L8w9xaeV6OufHt0eXtr59M9VFb194E9LmuH3kf6rv17avO2ets7YVcuuuk/uOa3PgBlxP4BdMbq9nLPqbJ5xDbSM9azFXpyujuSO+Bo5kcf0NPyj25We2YtEzaPxdfr6519Kz+UvqEIlELBKQZQ0eYRwC7HOPxXzVsZ6cPpK5x15ZKEiAgICAgICAgICCNc1tG40CzA2XHg4j4h9zhyFpr22p+s4Z7NNL/ALRlTX+1dVFBJOJrcTI2lxZHYcBx+sldWv3bzOMVn6fZy39OkRnNo+v3aoOx9JOxks8tqwHDPS+1IW8+IzGWZVrf9DZHSMR/j9yvo656zMz9V1rdLqdYwsoVIqwd87mNAc79Tvmd+JXJt332ftMy6temlP1jCasmggICAgICAgICAgwlmiib1SPDB7zhWrWZ4VtaI5QXb2l5ojYHvLjjIGB/dbR61sZlhPtVziFb3PYdd0luCvAZbXludVZ1huZQPgyTx4/atvWj4rxaZ6d/6Ye1/t1zSI6zx/bzti5YqaOpBeg8u41n/oa14cA4ccH7lPs1jZebVn8eyPUtOrXFbR+XdYx9xa90pjeXROaSCXDj+oysZ9S+Mx1bR7uvOJ6LGOWKVgfG8PafAtOQueazHLqraJjMMlCRAQEBAQEBAQRLNp4HTFx/2/4WtKR3Y32T2Udl8j3knk/aeSu6kREPPvaZlpY3DmyY8DyrzPZWv8tkvmFv7bg12RyR1DGeeMj2KnjE9JaeUx1hi1sgaet/U7JIOMcE8Dj7FMREcK2zPKMasr5XO6fmOVt5xEOadVplYU45IAOhxa72kLm2TFuXXqrNeF1WtlwDZeHfmHguO+vHDupszylLJsICAgICAg8cMjCQiYR5IVpFmc1Q5qLXHPgfbhbV2MLaYlqNQAYA4V/kV+PDA1fcp81fjYurtYMu4CmLZRNYhtZWBAI8CqzdaKN8df3LObtIokxwe5ZzZrFUloIGFnLWHqhIgICAgICAgxMbSpyjDAwAq3kr4MTWCnzR4MX02PGHDISNmETqieWba7QABwB4KJumKNgjaFXK0VZYChYQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEHzvuv1G7k1W9s6/Xamtaq15oaonmnsCR008HntaI4K8/s4HOeEGXZXqTud7uqtG7r6kNa5HdMU9aaw9zZde+FkrHsnr1+M2MZBPIKDRe9cO2K2mjs/V0m7X61lWzq32W+ZFEbfkSSO4B+GL9zw4QWm99TqFVmjsaSu7fUtxeNM2aTmSMBbHI9zWHqHVJlnDTxjPKCJL6sea502t1D7Ouhr0rNqxNM2CSNuwnkgjAi6ZOotdEc/Egibf1j/j+7JNL9DWdWg84TWn2ywtdFKyMZb5Tg0nLyG55x48IJ3bXqe/ea/a26dFtyTXtldDUqyOdNL5VqaDHS5gwXRxMe3xz1Y9iDKP1Sa7uefUnR7TyYqUVoEU5jY6pJZIz1RY4ZiMYd7TkexBA749Wr2gtCKlrIpGs17NjK29LLWmPmMsyiFkbIZsPEdKQu6y0eAQWdD1E2L93W1tzRyCDY3paev2NaxVlhIjidMfMb5vmse1kbi9pZ7MeKDt0BAQEBAQfEPU+lFY2++q2K1uSSezTnrReVsTTmiZVYHOd9LVuQyubIwANkbxz4FA7FsQ0NrrLNXX7N0eo1+3darGDYPjb5j6prxVRajjDetsRAjj4yM4CDre2uxO7q2hqtm7nua6w9rp5tfXgoSxwyTOMr42PlrPe4Nc8jJJQRDb3Oz1fYFrcV7As0mu3u7nbWkBZ9LSfG5nlxs/yySWRiNozwcBBx9EXadGTXz62+LG41+jZS6adhzS6vfnlkEjgzEZax7T8ePFBu3nbPdUXqJZsw6S5cqbCW1YdIY2lxhhfEGMjfHtoG9HxucwPEZy4/A7kMC87aq2Kmv7mdvxuqGmklFjUU4G2Yp21rdyW00t+kJkFl88pY9vDgwNDvEoK9np73FBcHdkrt2+rZd5FjQx7O0b8WvbzDKZhN1SSse573QdeAHkN+Ichj3p2rBvZq9vUnY2tcNQPqpZYZpJ44GxXqzHdVlzZZpib73mLHViI85c1BZ6OpsIe/6/XSuntevdsz6+8+pI0/yM1dtWVr2Z644P8rmyuj6S53jxkh9aQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBB/9k=" +async function displayImage(imgName, styleName) { + var e = event || window.event; + var img = document.getElementById("show_image_id"); + var pxy= img.parentElement.getBoundingClientRect(); + if(imgName) { + const url = await getStylesImage(imgName, styleName) + img.src = url + img.onerror = _ =>{ + img.src = empty_img + } + } + var scale = app?.canvas?.ds?.scale || 1; + var x = (e.pageX-pxy.x-100)/scale; + var y = (e.pageY-pxy.y+25)/scale; + img.style.left = x+"px"; + img.style.top = y+"px"; + img.style.display = "block"; + img.style.borderRadius = "10px"; + img.style.borderColor = "var(--fg-color)" + img.style.borderWidth = "1px"; + img.style.borderStyle = "solid"; +} +function hiddenImage(){ //theEvent用来传入事件,Firefox的方式 + var img = document.getElementById('show_image_id'); + img.style.display = "none"; +} + +// StylePromptSelector +app.registerExtension({ + name: 'comfy.easyUse.styleSelector', + async beforeRegisterNodeDef(nodeType, nodeData, app) { + + if(nodeData.name == 'easy stylesSelector'){ + // 创建时 + const onNodeCreated = nodeType.prototype.onNodeCreated; + nodeType.prototype.onNodeCreated = function() { + onNodeCreated ? onNodeCreated?.apply(this, arguments) : undefined; + const styles_id = this.widgets.findIndex((w) => w.name == 'styles'); + const language = localStorage['AGL.Locale'] || localStorage['Comfy.Settings.AGL.Locale'] || 'en-US' + const list = $el("ul.easyuse-prompt-styles-list",[]); + let styles_values = '' + this.setProperty("values", []) + + let selector = this.addDOMWidget('select_styles',"btn",$el('div.easyuse-prompt-styles',[$el('div.tools', [ + $el('button.delete',{ + textContent: $t('Empty All'), + style:{}, + onclick:()=>{ + selector.element.children[0].querySelectorAll(".search").forEach(el=>{ + el.value = '' + }) + selector.element.children[1].querySelectorAll(".easyuse-prompt-styles-tag-selected").forEach(el => { + el.classList.remove("easyuse-prompt-styles-tag-selected"); + el.children[0].checked = false + }) + selector.element.children[1].querySelectorAll(".easyuse-prompt-styles-tag").forEach(el => { + el.classList.remove('hide') + }) + this.setProperty("values", []) + }} + ), + $el('textarea.search',{ + dir:"ltr", + style:{"overflow-y": "scroll"}, + rows:1, + placeholder:$t("🔎 Type here to search styles ..."), + oninput:(e)=>{ + let value = e.target.value + selector.element.children[1].querySelectorAll(".easyuse-prompt-styles-tag").forEach(el => { + const name = el.dataset.name.toLowerCase() + const tag = el.dataset.tag.toLowerCase() + const lower_value = value.toLowerCase() + if(name.indexOf(lower_value) != -1 || tag.indexOf(lower_value) != -1 || el.classList.value.indexOf("easyuse-prompt-styles-tag-selected")!=-1){ + el.classList.remove('hide') + } + else{ + el.classList.add('hide') + } + }) + } + }) + ]),list, + $el('img',{id:'show_image_id', + style:{display:'none',position:'absolute'}, + src:``, + onerror:()=>{ + this.src = empty_img + } + }) + ])); + + Object.defineProperty(this.widgets[styles_id],'value',{ + set:(value)=>{ + styles_values = value + if(styles_values){ + getStylesList(styles_values).then(_=>{ + selector.element.children[1].innerHTML='' + if(styles_list_cache[styles_values]){ + let tags = styles_list_cache[styles_values] + // 重新排序 + if(selector.value) tags = tags.sort((a,b)=> selector.value.includes(b.name) - selector.value.includes(a.name)) + this.properties["values"] = [] + let list = getTagList(tags, value, language); + selector.element.children[1].append(...list) + selector.element.children[1].querySelectorAll(".easyuse-prompt-styles-tag").forEach(el => { + if (this.properties["values"].includes(el.dataset.tag)) { + el.classList.add("easyuse-prompt-styles-tag-selected"); + } + if(this.size?.[0]<150 || this.size?.[1]<150) this.setSize([425, 500]); + }) + } + }) + } + }, + get: () => { + return styles_values + } + }) + + + let style_select_values = '' + Object.defineProperty(selector, "value", { + set: (value) => { + setTimeout(_=>{ + selector.element.children[1].querySelectorAll(".easyuse-prompt-styles-tag").forEach(el => { + let arr = value.split(',') + if (arr.includes(el.dataset.tag)) { + el.classList.add("easyuse-prompt-styles-tag-selected"); + el.children[0].checked = true + } + }) + },300) + }, + get: () => { + selector.element.children[1].querySelectorAll(".easyuse-prompt-styles-tag").forEach(el => { + if(el.classList.value.indexOf("easyuse-prompt-styles-tag-selected")>=0){ + if(!this.properties["values"].includes(el.dataset.tag)){ + this.properties["values"].push(el.dataset.tag); + } + }else{ + if(this.properties["values"].includes(el.dataset.tag)){ + this.properties["values"]= this.properties["values"].filter(v=>v!=el.dataset.tag); + } + } + }); + style_select_values = this.properties["values"].join(','); + return style_select_values; + } + }); + + let old_values = '' + let style_lists_dom = selector.element.children[1] + style_lists_dom.addEventListener('mouseenter', function (e) { + let value = '' + style_lists_dom.querySelectorAll(".easyuse-prompt-styles-tag-selected").forEach(el=> value+=el.dataset.tag) + old_values = value + }) + style_lists_dom.addEventListener('mouseleave', function (e) { + let value = '' + style_lists_dom.querySelectorAll(".easyuse-prompt-styles-tag-selected").forEach(el=> value+=el.dataset.tag) + let new_values = value + if(old_values != new_values){ + // console.log("选项发生了变化") + // 获取搜索值 + const search_value = document.getElementsByClassName('search')[0]['value'] + // 重新排序 + const tags = styles_list_cache[styles_values].sort((a,b)=> new_values.includes(b.name) - new_values.includes(a.name)) + style_lists_dom.innerHTML = '' + let list = getTagList(tags, styles_values, language); + style_lists_dom.append(...list) + style_lists_dom.querySelectorAll(".easyuse-prompt-styles-tag").forEach(el => { + if (new_values.includes(el.dataset.tag)) { + el.classList.add("easyuse-prompt-styles-tag-selected"); + el.children[0].checked = true; + } + if(search_value){ + if(el.dataset.name.indexOf(search_value) != -1 || el.dataset.tag.indexOf(search_value) != -1 || el.classList.value.indexOf("easyuse-prompt-styles-tag-selected")!=-1){ + el.classList.remove('hide') + } + else{ + el.classList.add('hide') + } + } + + }) + } + }) + + + // 初始化 + setTimeout(_=>{ + if(!styles_values) { + styles_values = 'fooocus_styles' + getStylesList(styles_values).then(_=>{ + selector.element.children[1].innerHTML='' + if(styles_list_cache[styles_values]){ + let tags = styles_list_cache[styles_values] + // 重新排序 + if(selector.value) tags = tags.sort((a,b)=> selector.value.includes(b.name) - selector.value.includes(a.name)) + let list = getTagList(tags, styles_values, language); + selector.element.children[1].append(...list) + } + }) + } + if(this.size?.[0]<150 || this.size?.[1]<150) this.setSize([425, 500]); + // + },100) + + return onNodeCreated; + } + } + } +}) \ No newline at end of file diff --git a/ComfyUI-Easy-Use/web/js/easy/easySliderControl.js b/ComfyUI-Easy-Use/web/js/easy/easySliderControl.js new file mode 100644 index 0000000000000000000000000000000000000000..17fc5094c5e773dc88698c9e6beb4758f52819fb --- /dev/null +++ b/ComfyUI-Easy-Use/web/js/easy/easySliderControl.js @@ -0,0 +1,173 @@ +import { app } from "../../../../scripts/app.js"; +import { api } from "../../../../scripts/api.js"; +import { $el } from "../../../../scripts/ui.js"; +import { $t } from "../common/i18n.js"; +import { sleep } from "../common/utils.js"; + + +const calculatePercent = (value, min, max) => ((value-min)/(max-min)*100) + +const getLayerDefaultValue = (index) => { + switch (index){ + case 3: + return 2.5 + case 6: + return 1 + default: + return 0 + } +} + +const addLayer = (_this, layer_total, arrays, sliders, i) => { + let scroll = $el('div.easyuse-slider-item-scroll') + let value = $el('div.easyuse-slider-item-input', {textContent: arrays[i]['value']}) + let label = $el('div.easyuse-slider-item-label', {textContent: 'L'+i}) + let girdTotal = (arrays[i]['max'] - arrays[i]['min']) / arrays[i]['step'] + let area = $el('div.easyuse-slider-item-area', {style:{ height: calculatePercent(arrays[i]['default'],arrays[i]['min'],arrays[i]['max']) + '%'}}) + let bar = $el('div.easyuse-slider-item-bar', { + style:{ top: (100-calculatePercent(arrays[i]['default'],arrays[i]['min'],arrays[i]['max'])) + '%'}, + onmousedown: (e) => { + let event = e || window.event; + var y = event.clientY - bar.offsetTop; + document.onmousemove = (e) => { + let event = e || window.event; + let top = event.clientY - y; + if(top < 0){ + top = 0; + } + else if(top > scroll.offsetHeight - bar.offsetHeight){ + top = scroll.offsetHeight - bar.offsetHeight; + } + // top到最近的girdHeight值 + let girlHeight = (scroll.offsetHeight - bar.offsetHeight)/ girdTotal + top = Math.round(top / girlHeight) * girlHeight; + bar.style.top = Math.floor(top/(scroll.offsetHeight - bar.offsetHeight)* 100) + '%'; + area.style.height = Math.floor((scroll.offsetHeight - bar.offsetHeight - top)/(scroll.offsetHeight - bar.offsetHeight)* 100) + '%'; + value.innerText = parseFloat(parseFloat(arrays[i]['max'] - (arrays[i]['max']-arrays[i]['min']) * (top/(scroll.offsetHeight - bar.offsetHeight))).toFixed(2)) + arrays[i]['value'] = value.innerText + _this.properties['values'][i] = i+':'+value.innerText + window.getSelection ? window.getSelection().removeAllRanges() : document.selection.empty(); + } + }, + ondblclick:_=>{ + bar.style.top = (100-calculatePercent(arrays[i]['default'],arrays[i]['min'],arrays[i]['max'])) + '%' + area.style.height = calculatePercent(arrays[i]['default'],arrays[i]['min'],arrays[i]['max']) + '%' + value.innerText = arrays[i]['default'] + arrays[i]['value'] = arrays[i]['default'] + _this.properties['values'][i] = i+':'+value.innerText + } + }) + document.onmouseup = _=> document.onmousemove = null; + + scroll.replaceChildren(bar,area) + let item_div = $el('div.easyuse-slider-item',[ + value, + scroll, + label + ]) + if(i == 3 ) layer_total == 12 ? item_div.classList.add('negative') : item_div.classList.remove('negative') + else if(i == 6) layer_total == 12 ? item_div.classList.add('positive') : item_div.classList.remove('positive') + sliders.push(item_div) + return item_div +} + +const setSliderValue = (_this, type, refresh=false, values_div, sliders_value) => { + let layer_total = type == 'sdxl' ? 12 : 16 + let sliders = [] + let arrays = Array.from({length: layer_total}, (v, i) => ({default: layer_total == 12 ? getLayerDefaultValue(i) : 0, min: -1, max: 3, step: 0.05, value:layer_total == 12 ? getLayerDefaultValue(i) : 0})) + _this.setProperty("values", Array.from({length: layer_total}, (v, i) => i+':'+arrays[i]['value'])) + for (let i = 0; i < layer_total; i++) { + addLayer(_this, layer_total, arrays, sliders, i) + } + if(refresh) values_div.replaceChildren(...sliders) + else{ + values_div = $el('div.easyuse-slider', sliders) + sliders_value = _this.addDOMWidget('values',"btn",values_div) + } + + Object.defineProperty(sliders_value, 'value', { + set: function() {}, + get: function() { + return _this.properties.values.join(','); + } + }); + return {sliders, arrays, values_div, sliders_value} +} + + +app.registerExtension({ + name: 'comfy.easyUse.sliderControl', + async beforeRegisterNodeDef(nodeType, nodeData, app) { + if(nodeData.name == 'easy sliderControl'){ + // 创建时 + const onNodeCreated = nodeType.prototype.onNodeCreated; + nodeType.prototype.onNodeCreated = function() { + onNodeCreated && onNodeCreated.call(this); + const mode = this.widgets[0]; + const model_type = this.widgets[1]; + let layer_total = model_type.value == 'sdxl' ? 12 : 16 + let _this = this + let values_div = null + let sliders_value = null + mode.callback = async()=>{ + switch (mode.value) { + case 'ipadapter layer weights': + nodeData.output_name = ['layer_weights'] + _this.outputs[0]['name'] = 'layer_weights' + _this.outputs[0]['label'] = 'layer_weights' + break + } + } + + model_type.callback = async()=>{ + if(values_div) { + let r2 = setSliderValue(_this, model_type.value, true, values_div, sliders_value) + values_div = r2.values_div + sliders_value = r2.sliders_value + } + _this.setSize(model_type.value == 'sdxl' ? [375,320] : [455,320]) + } + + let r1 = setSliderValue(_this, model_type.value, false, values_div, sliders_value) + let sliders = r1.sliders + let arrays = r1.arrays + values_div = r1.values_div + sliders_value = r1.sliders_value + setTimeout(_=>{ + let values_widgets_index = this.widgets.findIndex((w) => w.name == 'values'); + if(values_widgets_index != -1){ + let old_values_widget = this.widgets[values_widgets_index]; + let old_value = old_values_widget.value.split(',') + let layer_total = _this.widgets[1].value == 'sdxl' ? 12 : 16 + for (let i = 0; i < layer_total; i++) { + let value = parseFloat(parseFloat(old_value[i].split(':')[1]).toFixed(2)) + let item_div = sliders[i] || null + // 存在层即修改 + if(arrays[i]){ + arrays[i]['value'] = value + _this.properties['values'][i] = old_value[i] + }else{ + arrays.push({default: layer_total == 12 ? getLayerDefaultValue(i) : 0, min: -1, max: 3, step: 0.05, value:layer_total == 12 ? getLayerDefaultValue(i) : 0}) + _this.properties['values'].push(i+':'+arrays[i]['value']) + // 添加缺失层 + item_div = addLayer(_this, layer_total, arrays, sliders, i) + values_div.appendChild(item_div) + } + // todo: 修改bar位置等 + let input = item_div.getElementsByClassName('easyuse-slider-item-input')[0] + let bar = item_div.getElementsByClassName('easyuse-slider-item-bar')[0] + let area = item_div.getElementsByClassName('easyuse-slider-item-area')[0] + if(i == 3 ) layer_total == 12 ? item_div.classList.add('negative') : item_div.classList.remove('negative') + else if(i == 6) layer_total == 12 ? item_div.classList.add('positive') : item_div.classList.remove('positive') + input.textContent = value + bar.style.top = (100-calculatePercent(value,arrays[i]['min'],arrays[i]['max'])) + '%' + area.style.height = calculatePercent(value,arrays[i]['min'],arrays[i]['max']) + '%' + } + } + _this.setSize(model_type.value == 'sdxl' ? [375,320] : [455,320]) + },1) + return onNodeCreated; + } + } + } +}) \ No newline at end of file diff --git a/ComfyUI-Easy-Use/web/js/easy/easySuggestion.js b/ComfyUI-Easy-Use/web/js/easy/easySuggestion.js new file mode 100644 index 0000000000000000000000000000000000000000..ca1b8d9366c8abc05caa66bc25447743527fac03 --- /dev/null +++ b/ComfyUI-Easy-Use/web/js/easy/easySuggestion.js @@ -0,0 +1,524 @@ +import {app} from "../../../../scripts/app.js"; +import {api} from "../../../../scripts/api.js"; +import {$el} from "../../../../scripts/ui.js"; + +const propmts = ["easy wildcards", "easy positive", "easy negative", "easy stylesSelector", "easy promptConcat", "easy promptReplace"] +const loaders = ["easy a1111Loader", "easy comfyLoader", "easy fullLoader", "easy svdLoader", "easy cascadeLoader", "easy sv3dLoader"] +const preSamplingNodes = ["easy preSampling", "easy preSamplingAdvanced", "easy preSamplingNoiseIn", "easy preSamplingCustom", "easy preSamplingDynamicCFG","easy preSamplingSdTurbo", "easy preSamplingLayerDiffusion"] +const kSampler = ["easy kSampler", "easy kSamplerTiled","easy kSamplerInpainting", "easy kSamplerDownscaleUnet", "easy kSamplerSDTurbo"] +const controlNetNodes = ["easy controlnetLoader", "easy controlnetLoaderADV"] +const instantIDNodes = ["easy instantIDApply", "easy instantIDApplyADV"] +const ipadapterNodes = ["easy ipadapterApply", "easy ipadapterApplyADV" , "easy ipadapterStyleComposition"] +const pipeNodes = ['easy pipeIn','easy pipeOut', 'easy pipeEdit'] +const xyNodes = ['easy XYPlot', 'easy XYPlotAdvanced'] +const extraNodes = ['easy setNode'] +const modelNormalNodes = [...["Reroute"],...['RescaleCFG','LoraLoaderModelOnly','LoraLoader','FreeU','FreeU_v2'],...ipadapterNodes,...extraNodes] +const suggestions = { + // prompt + "easy seed":{ + "from":{ + "INT": [...["Reroute"],...preSamplingNodes,...['easy fullkSampler']] + } + }, + "easy positive":{ + "from":{ + "STRING": [...["Reroute"],...propmts] + } + }, + "easy negative":{ + "from":{ + "STRING": [...["Reroute"],...propmts] + } + }, + "easy wildcards":{ + "from":{ + "STRING": [...["Reroute","easy showAnything"],...propmts,] + } + }, + "easy stylesSelector":{ + "from":{ + "STRING": [...["Reroute","easy showAnything"],...propmts,] + } + }, + "easy promptConcat":{ + "from":{ + "STRING": [...["Reroute","easy showAnything"],...propmts,] + } + }, + "easy promptReplace":{ + "from":{ + "STRING": [...["Reroute","easy showAnything"],...propmts,] + } + }, + // sd相关 + "easy fullLoader": { + "from":{ + "PIPE_LINE": [...["Reroute"],...preSamplingNodes,...['easy fullkSampler'],...pipeNodes,...extraNodes], + "MODEL":modelNormalNodes + }, + "to":{ + "STRING": [...["Reroute"],...propmts] + } + }, + "easy a1111Loader": { + "from": { + "PIPE_LINE": [...["Reroute"], ...preSamplingNodes, ...controlNetNodes, ...instantIDNodes, ...pipeNodes, ...extraNodes], + "MODEL": modelNormalNodes + }, + "to":{ + "STRING": [...["Reroute"],...propmts] + } + }, + "easy comfyLoader": { + "from": { + "PIPE_LINE": [...["Reroute"], ...preSamplingNodes, ...controlNetNodes, ...instantIDNodes, ...pipeNodes, ...extraNodes], + "MODEL": modelNormalNodes + }, + "to":{ + "STRING": [...["Reroute"],...propmts] + } + }, + "easy svdLoader":{ + "from": { + "PIPE_LINE": [...["Reroute"], ...["easy preSampling", "easy preSamplingAdvanced", "easy preSamplingDynamicCFG"], ...pipeNodes, ...extraNodes], + "MODEL": modelNormalNodes + }, + "to":{ + "STRING": [...["Reroute"],...propmts] + } + }, + "easy zero123Loader":{ + "from": { + "PIPE_LINE": [...["Reroute"], ...["easy preSampling", "easy preSamplingAdvanced", "easy preSamplingDynamicCFG"], ...pipeNodes, ...extraNodes], + "MODEL": modelNormalNodes + }, + "to":{ + "STRING": [...["Reroute"],...propmts] + } + }, + "easy sv3dLoader":{ + "from": { + "PIPE_LINE": [...["Reroute"], ...["easy preSampling", "easy preSamplingAdvanced", "easy preSamplingDynamicCFG"], ...pipeNodes, ...extraNodes], + "MODEL": modelNormalNodes + }, + "to":{ + "STRING": [...["Reroute"],...propmts] + } + }, + "easy preSampling": { + "from": { + "PIPE_LINE": [...["Reroute"], ...kSampler, ...pipeNodes, ...controlNetNodes, ...xyNodes, ...extraNodes] + }, + }, + "easy preSamplingAdvanced": { + "from": { + "PIPE_LINE": [...["Reroute"], ...kSampler, ...pipeNodes, ...controlNetNodes, ...xyNodes, ...extraNodes] + } + }, + "easy preSamplingDynamicCFG": { + "from": { + "PIPE_LINE": [...["Reroute"], ...kSampler, ...pipeNodes, ...controlNetNodes, ...xyNodes, ...extraNodes] + } + }, + "easy preSamplingCustom": { + "from": { + "PIPE_LINE": [...["Reroute"], ...kSampler, ...pipeNodes, ...controlNetNodes, ...xyNodes, ...extraNodes] + } + }, + "easy preSamplingLayerDiffusion": { + "from": { + "PIPE_LINE": [...["Reroute", "easy kSamplerLayerDiffusion"], ...kSampler, ...pipeNodes, ...controlNetNodes, ...xyNodes, ...extraNodes] + } + }, + "easy preSamplingNoiseIn": { + "from": { + "PIPE_LINE": [...["Reroute"], ...kSampler, ...pipeNodes, ...controlNetNodes, ...xyNodes, ...extraNodes] + } + }, + // ksampler + "easy fullkSampler": { + "from": { + "PIPE_LINE": [...["Reroute"], ...pipeNodes.reverse(), ...['easy preDetailerFix', 'easy preMaskDetailerFix'], ...preSamplingNodes, ...extraNodes] + } + }, + "easy kSampler": { + "from": { + "PIPE_LINE": [...["Reroute"], ...pipeNodes.reverse(), ...['easy preDetailerFix', 'easy preMaskDetailerFix', 'easy hiresFix'], ...preSamplingNodes, ...extraNodes], + } + }, + // cn + "easy controlnetLoader": { + "from": { + "PIPE_LINE": [...["Reroute"], ...preSamplingNodes, ...controlNetNodes, ...instantIDNodes, ...pipeNodes, ...extraNodes] + } + }, + "easy controlnetLoaderADV":{ + "from": { + "PIPE_LINE": [...["Reroute"], ...preSamplingNodes, ...controlNetNodes, ...instantIDNodes, ...pipeNodes, ...extraNodes] + } + }, + // instant + "easy instantIDApply": { + "from": { + "PIPE_LINE": [...["Reroute"], ...preSamplingNodes, ...controlNetNodes, ...instantIDNodes, ...pipeNodes, ...extraNodes], + "MODEL": modelNormalNodes + }, + "to":{ + "COMBO": [...["Reroute", "easy promptLine"]] + } + }, + "easy instantIDApplyADV":{ + "from": { + "PIPE_LINE": [...["Reroute"], ...preSamplingNodes, ...controlNetNodes, ...instantIDNodes, ...pipeNodes, ...extraNodes], + "MODEL": modelNormalNodes + }, + "to":{ + "COMBO": [...["Reroute", "easy promptLine"]] + } + }, + "easy ipadapterApply":{ + "to":{ + "COMBO": [...["Reroute", "easy promptLine"]] + } + }, + "easy ipadapterApplyADV":{ + "to":{ + "STRING": [...["Reroute", "easy sliderControl"], ...propmts], + "COMBO": [...["Reroute", "easy promptLine"]] + } + }, + "easy ipadapterStyleComposition":{ + "to":{ + "COMBO": [...["Reroute", "easy promptLine"]] + } + }, + // fix + "easy preDetailerFix":{ + "from": { + "PIPE_LINE": [...["Reroute", "easy detailerFix"], ...pipeNodes, ...extraNodes] + }, + "to":{ + "PIPE_LINE": [...["Reroute", "easy ultralyticsDetectorPipe", "easy samLoaderPipe", "easy kSampler", "easy fullkSampler"]] + } + }, + "easy preMaskDetailerFix":{ + "from": { + "PIPE_LINE": [...["Reroute", "easy detailerFix"], ...pipeNodes, ...extraNodes] + } + }, + "easy samLoaderPipe": { + "from":{ + "PIPE_LINE": [...["Reroute", "easy preDetailerFix"], ...pipeNodes, ...extraNodes] + } + }, + "easy ultralyticsDetectorPipe": { + "from":{ + "PIPE_LINE": [...["Reroute", "easy preDetailerFix"], ...pipeNodes, ...extraNodes] + } + }, + // cascade相关 + "easy cascadeLoader":{ + "from": { + "PIPE_LINE": [...["Reroute"], ...["easy fullCascadeKSampler", 'easy preSamplingCascade'], ...controlNetNodes, ...pipeNodes, ...extraNodes], + "MODEL": modelNormalNodes.filter(cate => !ipadapterNodes.includes(cate)) + } + }, + "easy fullCascadeKSampler":{ + "from": { + "PIPE_LINE": [...["Reroute"], ...["easy preSampling", "easy preSamplingAdvanced"], ...pipeNodes, ...extraNodes] + } + }, + "easy preSamplingCascade":{ + "from": { + "PIPE_LINE": [...["Reroute"], ...["easy cascadeKSampler",], ...pipeNodes, ...extraNodes] + } + }, + "easy cascadeKSampler": { + "from": { + "PIPE_LINE": [...["Reroute"], ...["easy preSampling", "easy preSamplingAdvanced"], ...pipeNodes, ...extraNodes] + } + }, +} + + +app.registerExtension({ + name: "comfy.easyuse.suggestions", + async setup(app) { + LGraphCanvas.prototype.createDefaultNodeForSlot = function(optPass) { // addNodeMenu for connection + var optPass = optPass || {}; + var opts = Object.assign({ nodeFrom: null // input + ,slotFrom: null // input + ,nodeTo: null // output + ,slotTo: null // output + ,position: [] // pass the event coords + ,nodeType: null // choose a nodetype to add, AUTO to set at first good + ,posAdd:[0,0] // adjust x,y + ,posSizeFix:[0,0] // alpha, adjust the position x,y based on the new node size w,h + } + ,optPass + ); + var that = this; + + var isFrom = opts.nodeFrom && opts.slotFrom!==null; + var isTo = !isFrom && opts.nodeTo && opts.slotTo!==null; + + if (!isFrom && !isTo){ + console.warn("No data passed to createDefaultNodeForSlot "+opts.nodeFrom+" "+opts.slotFrom+" "+opts.nodeTo+" "+opts.slotTo); + return false; + } + if (!opts.nodeType){ + console.warn("No type to createDefaultNodeForSlot"); + return false; + } + + var nodeX = isFrom ? opts.nodeFrom : opts.nodeTo; + var slotX = isFrom ? opts.slotFrom : opts.slotTo; + var nodeType = nodeX.type + + var iSlotConn = false; + switch (typeof slotX){ + case "string": + iSlotConn = isFrom ? nodeX.findOutputSlot(slotX,false) : nodeX.findInputSlot(slotX,false); + slotX = isFrom ? nodeX.outputs[slotX] : nodeX.inputs[slotX]; + break; + case "object": + // ok slotX + iSlotConn = isFrom ? nodeX.findOutputSlot(slotX.name) : nodeX.findInputSlot(slotX.name); + break; + case "number": + iSlotConn = slotX; + slotX = isFrom ? nodeX.outputs[slotX] : nodeX.inputs[slotX]; + break; + case "undefined": + default: + // bad ? + //iSlotConn = 0; + console.warn("Cant get slot information "+slotX); + return false; + } + + if (slotX===false || iSlotConn===false){ + console.warn("createDefaultNodeForSlot bad slotX "+slotX+" "+iSlotConn); + } + + // check for defaults nodes for this slottype + var fromSlotType = slotX.type==LiteGraph.EVENT?"_event_":slotX.type; + var slotTypesDefault = isFrom ? LiteGraph.slot_types_default_out : LiteGraph.slot_types_default_in; + if(slotTypesDefault && slotTypesDefault[fromSlotType]){ + if (slotX.link !== null) { + // is connected + }else{ + // is not not connected + } + let nodeNewType = false; + const fromOrTo = isFrom ? 'from' : 'to' + if(suggestions[nodeType] && suggestions[nodeType][fromOrTo] && suggestions[nodeType][fromOrTo][fromSlotType]?.length>0){ + for(var typeX in suggestions[nodeType][fromOrTo][fromSlotType]){ + if (opts.nodeType == suggestions[nodeType][fromOrTo][fromSlotType][typeX] || opts.nodeType == "AUTO") { + nodeNewType = suggestions[nodeType][fromOrTo][fromSlotType][typeX]; + break + } + } + } + else if(typeof slotTypesDefault[fromSlotType] == "object" || typeof slotTypesDefault[fromSlotType] == "array"){ + for(var typeX in slotTypesDefault[fromSlotType]){ + if (opts.nodeType == slotTypesDefault[fromSlotType][typeX] || opts.nodeType == "AUTO"){ + nodeNewType = slotTypesDefault[fromSlotType][typeX]; + // console.log("opts.nodeType == slotTypesDefault[fromSlotType][typeX] :: "+opts.nodeType); + break; // -------- + } + } + }else{ + if (opts.nodeType == slotTypesDefault[fromSlotType] || opts.nodeType == "AUTO") nodeNewType = slotTypesDefault[fromSlotType]; + } + if (nodeNewType) { + var nodeNewOpts = false; + if (typeof nodeNewType == "object" && nodeNewType.node){ + nodeNewOpts = nodeNewType; + nodeNewType = nodeNewType.node; + } + + //that.graph.beforeChange(); + + var newNode = LiteGraph.createNode(nodeNewType); + if(newNode){ + // if is object pass options + if (nodeNewOpts){ + if (nodeNewOpts.properties) { + for (var i in nodeNewOpts.properties) { + newNode.addProperty( i, nodeNewOpts.properties[i] ); + } + } + if (nodeNewOpts.inputs) { + newNode.inputs = []; + for (var i in nodeNewOpts.inputs) { + newNode.addOutput( + nodeNewOpts.inputs[i][0], + nodeNewOpts.inputs[i][1] + ); + } + } + if (nodeNewOpts.outputs) { + newNode.outputs = []; + for (var i in nodeNewOpts.outputs) { + newNode.addOutput( + nodeNewOpts.outputs[i][0], + nodeNewOpts.outputs[i][1] + ); + } + } + if (nodeNewOpts.title) { + newNode.title = nodeNewOpts.title; + } + if (nodeNewOpts.json) { + newNode.configure(nodeNewOpts.json); + } + + } + + // add the node + that.graph.add(newNode); + newNode.pos = [ opts.position[0]+opts.posAdd[0]+(opts.posSizeFix[0]?opts.posSizeFix[0]*newNode.size[0]:0) + ,opts.position[1]+opts.posAdd[1]+(opts.posSizeFix[1]?opts.posSizeFix[1]*newNode.size[1]:0)]; //that.last_click_position; //[e.canvasX+30, e.canvasX+5];*/ + + //that.graph.afterChange(); + + // connect the two! + if (isFrom){ + opts.nodeFrom.connectByType( iSlotConn, newNode, fromSlotType ); + }else{ + opts.nodeTo.connectByTypeOutput( iSlotConn, newNode, fromSlotType ); + } + + // if connecting in between + if (isFrom && isTo){ + // TODO + } + + return true; + + }else{ + console.log("failed creating "+nodeNewType); + } + } + } + return false; + } + + LGraphCanvas.prototype.showConnectionMenu = function(optPass) { // addNodeMenu for connection + var optPass = optPass || {}; + var opts = Object.assign({ nodeFrom: null // input + ,slotFrom: null // input + ,nodeTo: null // output + ,slotTo: null // output + ,e: null + } + ,optPass + ); + var that = this; + + var isFrom = opts.nodeFrom && opts.slotFrom; + var isTo = !isFrom && opts.nodeTo && opts.slotTo; + + if (!isFrom && !isTo){ + console.warn("No data passed to showConnectionMenu"); + return false; + } + + var nodeX = isFrom ? opts.nodeFrom : opts.nodeTo; + var slotX = isFrom ? opts.slotFrom : opts.slotTo; + + var iSlotConn = false; + switch (typeof slotX){ + case "string": + iSlotConn = isFrom ? nodeX.findOutputSlot(slotX,false) : nodeX.findInputSlot(slotX,false); + slotX = isFrom ? nodeX.outputs[slotX] : nodeX.inputs[slotX]; + break; + case "object": + // ok slotX + iSlotConn = isFrom ? nodeX.findOutputSlot(slotX.name) : nodeX.findInputSlot(slotX.name); + break; + case "number": + iSlotConn = slotX; + slotX = isFrom ? nodeX.outputs[slotX] : nodeX.inputs[slotX]; + break; + default: + // bad ? + //iSlotConn = 0; + console.warn("Cant get slot information "+slotX); + return false; + } + + var options = ["Add Node",null]; + + if (that.allow_searchbox){ + options.push("Search"); + options.push(null); + } + + // get defaults nodes for this slottype + var fromSlotType = slotX.type==LiteGraph.EVENT?"_event_":slotX.type; + var slotTypesDefault = isFrom ? LiteGraph.slot_types_default_out : LiteGraph.slot_types_default_in; + var nodeType = nodeX.type + if(slotTypesDefault && slotTypesDefault[fromSlotType]){ + const fromOrTo = isFrom ? 'from' : 'to' + if(suggestions[nodeType] && suggestions[nodeType][fromOrTo] && suggestions[nodeType][fromOrTo][fromSlotType]?.length>0){ + for(var typeX in suggestions[nodeType][fromOrTo][fromSlotType]){ + options.push(suggestions[nodeType][fromOrTo][fromSlotType][typeX]); + } + } + else if(typeof slotTypesDefault[fromSlotType] == "object" || typeof slotTypesDefault[fromSlotType] == "array"){ + for(var typeX in slotTypesDefault[fromSlotType]){ + options.push(slotTypesDefault[fromSlotType][typeX]); + } + }else{ + options.push(slotTypesDefault[fromSlotType]); + } + } + + // build menu + var menu = new LiteGraph.ContextMenu(options, { + event: opts.e, + title: (slotX && slotX.name!="" ? (slotX.name + (fromSlotType?" | ":"")) : "")+(slotX && fromSlotType ? fromSlotType : ""), + callback: inner_clicked + }); + + // callback + function inner_clicked(v,options,e) { + //console.log("Process showConnectionMenu selection"); + switch (v) { + case "Add Node": + LGraphCanvas.onMenuAdd(null, null, e, menu, function(node){ + if (isFrom){ + opts.nodeFrom.connectByType( iSlotConn, node, fromSlotType ); + }else{ + opts.nodeTo.connectByTypeOutput( iSlotConn, node, fromSlotType ); + } + }); + break; + case "Search": + if(isFrom){ + that.showSearchBox(e,{node_from: opts.nodeFrom, slot_from: slotX, type_filter_in: fromSlotType}); + }else{ + that.showSearchBox(e,{node_to: opts.nodeTo, slot_from: slotX, type_filter_out: fromSlotType}); + } + break; + default: + // check for defaults nodes for this slottype + var nodeCreated = that.createDefaultNodeForSlot(Object.assign(opts,{ position: [opts.e.canvasX, opts.e.canvasY] + ,nodeType: v + })); + if (nodeCreated){ + // new node created + //console.log("node "+v+" created") + }else{ + // failed or v is not in defaults + } + break; + } + } + + return false; + }; + } +}) \ No newline at end of file diff --git a/ComfyUI-Easy-Use/web/js/easy/easyWidgets.js b/ComfyUI-Easy-Use/web/js/easy/easyWidgets.js new file mode 100644 index 0000000000000000000000000000000000000000..899befed6a35139d51e23ed99b427bc0e05224ea --- /dev/null +++ b/ComfyUI-Easy-Use/web/js/easy/easyWidgets.js @@ -0,0 +1,400 @@ +import { app } from "../../../../scripts/app.js"; +import { ComfyWidgets } from "../../../../scripts/widgets.js"; + +const KEY_CODES = { ENTER: 13, ESC: 27, ARROW_DOWN: 40, ARROW_UP: 38 }; +const WIDGET_GAP = -4; + +function hideInfoWidget(e, node, widget) { + let dropdownShouldBeRemoved = false; + let selectionIndex = -1; + + if (e) { + e.preventDefault(); + e.stopPropagation(); + displayDropdown(widget); + } else { + hideWidget(widget, node); + } + + function createDropdownElement() { + const dropdown = document.createElement('ul'); + dropdown.id = 'hideinfo-dropdown'; + dropdown.setAttribute('role', 'listbox'); + dropdown.classList.add('hideInfo-dropdown'); + return dropdown; + } + + function createDropdownItem(textContent, action) { + const listItem = document.createElement('li'); + listItem.id = `hideInfo-item-${textContent.replace(/ /g, '')}`; + listItem.classList.add('hideInfo-item'); + listItem.setAttribute('role', 'option'); + listItem.textContent = textContent; + listItem.addEventListener('mousedown', (event) => { + event.preventDefault(); + action(widget, node); // perform the action when dropdown item is clicked + removeDropdown(); + dropdownShouldBeRemoved = false; + }); + listItem.dataset.action = textContent.replace(/ /g, ''); // store the action in a data attribute + return listItem; + } + + function displayDropdown(widget) { + removeDropdown(); + + const dropdown = createDropdownElement(); + const listItemHide = createDropdownItem('Hide info Widget', hideWidget); + const listItemHideAll = createDropdownItem('Hide for all of this node-type', hideWidgetForNodetype); + + dropdown.appendChild(listItemHide); + dropdown.appendChild(listItemHideAll); + + const inputRect = widget.inputEl.getBoundingClientRect(); + dropdown.style.top = `${inputRect.top + inputRect.height}px`; + dropdown.style.left = `${inputRect.left}px`; + dropdown.style.width = `${inputRect.width}px`; + + document.body.appendChild(dropdown); + dropdownShouldBeRemoved = true; + + widget.inputEl.removeEventListener('keydown', handleKeyDown); + widget.inputEl.addEventListener('keydown', handleKeyDown); + document.addEventListener('click', handleDocumentClick); + } + + function removeDropdown() { + const dropdown = document.getElementById('hideinfo-dropdown'); + if (dropdown) { + dropdown.remove(); + widget.inputEl.removeEventListener('keydown', handleKeyDown); + } + document.removeEventListener('click', handleDocumentClick); + + } + + function handleKeyDown(event) { + const dropdownItems = document.querySelectorAll('.hideInfo-item'); + + if (event.keyCode === KEY_CODES.ENTER && dropdownShouldBeRemoved) { + event.preventDefault(); + if (selectionIndex !== -1) { + const selectedAction = dropdownItems[selectionIndex].dataset.action; + if (selectedAction === 'HideinfoWidget') { + hideWidget(widget, node); + } else if (selectedAction === 'Hideforall') { + hideWidgetForNodetype(widget, node); + } + removeDropdown(); + dropdownShouldBeRemoved = false; + } + } else if (event.keyCode === KEY_CODES.ARROW_DOWN && dropdownShouldBeRemoved) { + event.preventDefault(); + if (selectionIndex !== -1) { + dropdownItems[selectionIndex].classList.remove('selected'); + } + selectionIndex = (selectionIndex + 1) % dropdownItems.length; + dropdownItems[selectionIndex].classList.add('selected'); + } else if (event.keyCode === KEY_CODES.ARROW_UP && dropdownShouldBeRemoved) { + event.preventDefault(); + if (selectionIndex !== -1) { + dropdownItems[selectionIndex].classList.remove('selected'); + } + selectionIndex = (selectionIndex - 1 + dropdownItems.length) % dropdownItems.length; + dropdownItems[selectionIndex].classList.add('selected'); + } else if (event.keyCode === KEY_CODES.ESC && dropdownShouldBeRemoved) { + event.preventDefault(); + removeDropdown(); + } + } + + function hideWidget(widget, node) { + node.properties['infoWidgetHidden'] = true; + widget.type = "esayHidden"; + widget.computeSize = () => [0, WIDGET_GAP]; + node.setSize([node.size[0], node.size[1]]); + } + + function hideWidgetForNodetype(widget, node) { + hideWidget(widget, node) + const hiddenNodeTypes = JSON.parse(localStorage.getItem('hiddenWidgetNodeTypes') || "[]"); + if (!hiddenNodeTypes.includes(node.constructor.type)) { + hiddenNodeTypes.push(node.constructor.type); + } + localStorage.setItem('hiddenWidgetNodeTypes', JSON.stringify(hiddenNodeTypes)); + } + + function handleDocumentClick(event) { + const dropdown = document.getElementById('hideinfo-dropdown'); + + // If the click was outside the dropdown and the dropdown should be removed, remove it + if (dropdown && !dropdown.contains(event.target) && dropdownShouldBeRemoved) { + removeDropdown(); + dropdownShouldBeRemoved = false; + } + } +} + + +var styleElement = document.createElement("style"); +const cssCode = ` +.easy-info_widget { + background-color: var(--comfy-input-bg); + color: var(--input-text); + overflow: hidden; + padding: 2px; + resize: none; + border: none; + box-sizing: border-box; + font-size: 10px; + border-radius: 7px; + text-align: center; + text-wrap: balance; +} +.hideInfo-dropdown { + position: absolute; + box-sizing: border-box; + background-color: #121212; + border-radius: 7px; + box-shadow: 0 2px 4px rgba(255, 255, 255, .25); + padding: 0; + margin: 0; + list-style: none; + z-index: 1000; + overflow: auto; + max-height: 200px; +} + +.hideInfo-dropdown li { + padding: 4px 10px; + cursor: pointer; + font-family: system-ui; + font-size: 0.7rem; +} + +.hideInfo-dropdown li:hover, +.hideInfo-dropdown li.selected { + background-color: #e5e5e5; + border-radius: 7px; +} +` +styleElement.innerHTML = cssCode +document.head.appendChild(styleElement); + +const InfoSymbol = Symbol(); +const InfoResizeSymbol = Symbol(); + + + + +// WIDGET FUNCTIONS +function addInfoWidget(node, name, opts, app) { + const INFO_W_SIZE = 50; + + node.addProperty('infoWidgetHidden', false) + + function computeSize(size) { + if (node.widgets[0].last_y == null) return; + + let y = node.widgets[0].last_y; + + // Compute the height of all non easyInfo widgets + let widgetHeight = 0; + const infoWidges = []; + for (let i = 0; i < node.widgets.length; i++) { + const w = node.widgets[i]; + if (w.type === "easyInfo") { + infoWidges.push(w); + } else { + if (w.computeSize) { + widgetHeight += w.computeSize()[1] + 4; + } else { + widgetHeight += LiteGraph.NODE_WIDGET_HEIGHT + 4; + } + } + } + + let infoWidgetSpace = infoWidges.length * INFO_W_SIZE; // Height for all info widgets + + // Check if there's enough space for all widgets + if (size[1] < y + widgetHeight + infoWidgetSpace) { + // There isn't enough space for all the widgets, increase the size of the node + node.size[1] = y + widgetHeight + infoWidgetSpace; + node.graph.setDirtyCanvas(true); + } + + // Position each of the widgets + for (const w of node.widgets) { + w.y = y; + if (w.type === "easyInfo") { + y += INFO_W_SIZE; + } else if (w.computeSize) { + y += w.computeSize()[1] + 4; + } else { + y += LiteGraph.NODE_WIDGET_HEIGHT + 4; + } + } + } + + const widget = { + type: "easyInfo", + name, + get value() { + return this.inputEl.value; + }, + set value(x) { + this.inputEl.value = x; + }, + draw: function (ctx, _, widgetWidth, y, widgetHeight) { + if (!this.parent.inputHeight) { + // If we are initially offscreen when created we wont have received a resize event + // Calculate it here instead + computeSize(node.size); + } + const visible = app.canvas.ds.scale > 0.5 && this.type === "easyInfo"; + const margin = 10; + const elRect = ctx.canvas.getBoundingClientRect(); + const transform = new DOMMatrix() + .scaleSelf(elRect.width / ctx.canvas.width, elRect.height / ctx.canvas.height) + .multiplySelf(ctx.getTransform()) + .translateSelf(margin, margin + y); + + Object.assign(this.inputEl.style, { + transformOrigin: "0 0", + transform: transform, + left: "0px", + top: "0px", + width: `${widgetWidth - (margin * 2)}px`, + height: `${this.parent.inputHeight - (margin * 2)}px`, + position: "absolute", + background: (!node.color)?'':node.color, + color: (!node.color)?'':'white', + zIndex: app.graph._nodes.indexOf(node), + }); + this.inputEl.hidden = !visible; + }, + }; + widget.inputEl = document.createElement("textarea"); + widget.inputEl.className = "easy-info_widget"; + widget.inputEl.value = opts.defaultVal; + widget.inputEl.placeholder = opts.placeholder || ""; + widget.inputEl.readOnly = true; + widget.parent = node; + + document.body.appendChild(widget.inputEl); + + node.addCustomWidget(widget); + + app.canvas.onDrawBackground = function () { + // Draw node isnt fired once the node is off the screen + // if it goes off screen quickly, the input may not be removed + // this shifts it off screen so it can be moved back if the node is visible. + for (let n in app.graph._nodes) { + n = app.graph._nodes[n]; + for (let w in n.widgets) { + let wid = n.widgets[w]; + if (Object.hasOwn(wid, "inputEl")) { + wid.inputEl.style.left = -8000 + "px"; + wid.inputEl.style.position = "absolute"; + } + } + } + }; + + node.onRemoved = function () { + // When removing this node we need to remove the input from the DOM + for (let y in this.widgets) { + if (this.widgets[y].inputEl) { + this.widgets[y].inputEl.remove(); + } + } + }; + + widget.onRemove = () => { + widget.inputEl?.remove(); + + // Restore original size handler if we are the last + if (!--node[InfoSymbol]) { + node.onResize = node[InfoResizeSymbol]; + delete node[InfoSymbol]; + delete node[InfoResizeSymbol]; + } + }; + + if (node[InfoSymbol]) { + node[InfoSymbol]++; + } else { + node[InfoSymbol] = 1; + const onResize = (node[InfoResizeSymbol] = node.onResize); + + node.onResize = function (size) { + computeSize(size); + + // Call original resizer handler + if (onResize) { + console.log(this, arguments) + onResize.apply(this, arguments); + } + }; + } + + return { widget }; +} + +// WIDGETS +const easyCustomWidgets = { + INFO(node, inputName, inputData, app) { + const defaultVal = inputData[1].default || ""; + return addInfoWidget(node, inputName, { defaultVal, ...inputData[1] }, app); + }, +} + + + +app.registerExtension({ + name: "comfy.easy.widgets", + getCustomWidgets(app) { + return easyCustomWidgets; + }, + nodeCreated(node) { + if (node.widgets) { + // Locate info widgets + const widgets = node.widgets.filter((n) => (n.type === "easyInfo")); + for (const widget of widgets) { + widget.inputEl.addEventListener('contextmenu', function(e) { + hideInfoWidget(e, node, widget); + }); + widget.inputEl.addEventListener('click', function(e) { + hideInfoWidget(e, node, widget); + }); + } + } + }, + async beforeRegisterNodeDef(nodeType, nodeData, app) { + const hiddenNodeTypes = JSON.parse(localStorage.getItem('hiddenWidgetNodeTypes') || "[]"); + const origOnConfigure = nodeType.prototype.onConfigure; + nodeType.prototype.onConfigure = function () { + const r = origOnConfigure ? origOnConfigure.apply(this, arguments) : undefined; + if (this.properties['infoWidgetHidden']) { + for (let i in this.widgets) { + if (this.widgets[i].type == "easyInfo") { + hideInfoWidget(null, this, this.widgets[i]); + } + } + } + return r; + }; + const origOnAdded = nodeType.prototype.onAdded; + nodeType.prototype.onAdded = function () { + const r = origOnAdded ? origOnAdded.apply(this, arguments) : undefined; + if (hiddenNodeTypes.includes(this.type)) { + for (let i in this.widgets) { + if (this.widgets[i].type == "easyInfo") { + this.properties['infoWidgetHidden'] = true; + } + } + } + return r; + } + } +}); \ No newline at end of file diff --git a/ComfyUI-Easy-Use/web/js/easy/easyXYPlot.js b/ComfyUI-Easy-Use/web/js/easy/easyXYPlot.js new file mode 100644 index 0000000000000000000000000000000000000000..6f27007b926511d99cdbbc97fc3747f557c4f3f7 --- /dev/null +++ b/ComfyUI-Easy-Use/web/js/easy/easyXYPlot.js @@ -0,0 +1,212 @@ +import { app } from "../../../../scripts/app.js"; +import {removeDropdown, createDropdown} from "../common/dropdown.js"; + +function generateNumList(dictionary) { + const minimum = dictionary["min"] || 0; + const maximum = dictionary["max"] || 0; + const step = dictionary["step"] || 1; + + if (step === 0) { + return []; + } + + const result = []; + let currentValue = minimum; + + while (currentValue <= maximum) { + if (Number.isInteger(step)) { + result.push(Math.round(currentValue) + '; '); + } else { + let formattedValue = currentValue.toFixed(3); + if(formattedValue == -0.000){ + formattedValue = '0.000'; + } + if (!/\.\d{3}$/.test(formattedValue)) { + formattedValue += "0"; + } + result.push(formattedValue + "; "); + } + currentValue += step; + } + + if (maximum >= 0 && minimum >= 0) { + //low to high + return result; + } + else { + //high to low + return result.reverse(); + } +} + +let plotDict = {}; +let currentOptionsDict = {}; + +function getCurrentOptionLists(node, widget) { + const nodeId = String(node.id); + const widgetName = widget.name; + const widgetValue = widget.value.replace(/^(loader|preSampling):\s/, ''); + + if (!currentOptionsDict[widgetName]) { + currentOptionsDict = {...currentOptionsDict, [widgetName]: plotDict[widgetValue]}; + } else if (currentOptionsDict[widgetName] != plotDict[widgetValue]) { + currentOptionsDict[widgetName] = plotDict[widgetValue]; + } +} + +function addGetSetters(node) { + if (node.widgets) + for (const w of node.widgets) { + if (w.name === "x_axis" || + w.name === "y_axis") { + let widgetValue = w.value; + + // Define getters and setters for widget values + Object.defineProperty(w, 'value', { + + get() { + return widgetValue; + }, + set(newVal) { + if (newVal !== widgetValue) { + widgetValue = newVal; + getCurrentOptionLists(node, w); + } + } + }); + } + } +} + +function dropdownCreator(node) { + if (node.widgets) { + const widgets = node.widgets.filter( + (n) => (n.type === "customtext" && n.dynamicPrompts !== false) || n.dynamicPrompts + ); + + for (const w of widgets) { + function replaceOptionSegments(selectedOption, inputSegments, cursorSegmentIndex, optionsList) { + if (selectedOption) { + inputSegments[cursorSegmentIndex] = selectedOption; + } + + return inputSegments.map(segment => verifySegment(segment, optionsList)) + .filter(item => item !== '') + .join(''); + } + + function verifySegment(segment, optionsList) { + segment = cleanSegment(segment); + + if (isInOptionsList(segment, optionsList)) { + return segment + '; '; + } + + let matchedOptions = findMatchedOptions(segment, optionsList); + + if (matchedOptions.length === 1 || matchedOptions.length === 2) { + return matchedOptions[0]; + } + + if (isInOptionsList(formatNumberSegment(segment), optionsList)) { + return formatNumberSegment(segment) + '; '; + } + + return ''; + } + + function cleanSegment(segment) { + return segment.replace(/(\n|;| )/g, ''); + } + + function isInOptionsList(segment, optionsList) { + return optionsList.includes(segment + '; '); + } + + function findMatchedOptions(segment, optionsList) { + return optionsList.filter(option => option.toLowerCase().includes(segment.toLowerCase())); + } + + function formatNumberSegment(segment) { + if (Number(segment)) { + return Number(segment).toFixed(3); + } + + if (['0', '0.', '0.0', '0.00', '00'].includes(segment)) { + return '0.000'; + } + return segment; + } + + + const onInput = function () { + const axisWidgetName = w.name[0] + '_axis'; + let optionsList = currentOptionsDict?.[axisWidgetName] || []; + if (optionsList.length === 0) {return} + + const inputText = w.inputEl.value; + const cursorPosition = w.inputEl.selectionStart; + let inputSegments = inputText.split('; '); + + const cursorSegmentIndex = inputText.substring(0, cursorPosition).split('; ').length - 1; + const currentSegment = inputSegments[cursorSegmentIndex]; + const currentSegmentLower = currentSegment.replace(/\n/g, '').toLowerCase(); + const filteredOptionsList = optionsList.filter(option => option.toLowerCase().includes(currentSegmentLower)).map(option => option.replace(/; /g, '')); + + if (filteredOptionsList.length > 0) { + createDropdown(w.inputEl, filteredOptionsList, (selectedOption) => { + const verifiedText = replaceOptionSegments(selectedOption, inputSegments, cursorSegmentIndex, optionsList); + w.inputEl.value = verifiedText; + }); + } + else { + removeDropdown(); + const verifiedText = replaceOptionSegments(null, inputSegments, cursorSegmentIndex, optionsList); + w.inputEl.value = verifiedText; + } + }; + + w.inputEl.removeEventListener('input', onInput); + w.inputEl.addEventListener('input', onInput); + w.inputEl.removeEventListener('mouseup', onInput); + w.inputEl.addEventListener('mouseup', onInput); + } + } +} + +app.registerExtension({ + name: "comfy.easy.xyPlot", + async beforeRegisterNodeDef(nodeType, nodeData, app) { + if (nodeData.name === "easy XYPlot") { + plotDict = nodeData.input.hidden.plot_dict[0]; + + for (const key in plotDict) { + const value = plotDict[key]; + if (Array.isArray(value)) { + let updatedValues = []; + for (const v of value) { + updatedValues.push(v + '; '); + } + plotDict[key] = updatedValues; + } else if (typeof(value) === 'object') { + if(key == 'seed'){ + plotDict[key] = value + '; '; + } + else { + plotDict[key] = generateNumList(value); + } + } else { + plotDict[key] = value + '; '; + } + } + plotDict["None"] = []; + plotDict["---------------------"] = []; + } + }, + nodeCreated(node) { + if (node.comfyClass === "easy XYPlot") { + addGetSetters(node); + dropdownCreator(node); + } + } +}); \ No newline at end of file diff --git a/ComfyUI-Easy-Use/web/js/getset.js b/ComfyUI-Easy-Use/web/js/getset.js new file mode 100644 index 0000000000000000000000000000000000000000..78957b283f57fd3a0eaefb89b3c99e00c1b7f202 --- /dev/null +++ b/ComfyUI-Easy-Use/web/js/getset.js @@ -0,0 +1,311 @@ +import { app } from "../../../scripts/app.js"; + +// Node that allows you to tunnel connections for cleaner graphs + +app.registerExtension({ + name: "easy setNode", + registerCustomNodes() { + class SetNode { + defaultVisibility = true; + serialize_widgets = true; + constructor() { + if (!this.properties) { + this.properties = { + "previousName": "" + }; + } + this.properties.showOutputText = SetNode.defaultVisibility; + + const node = this; + node.color = LGraphCanvas.node_colors.blue.color; + + this.addWidget( + "text", + "Constant", + '', + (s, t, u, v, x) => { + node.validateName(node.graph); + if(this.widgets[0].value !== ''){ + this.title = "Set_" + this.widgets[0].value; + } + this.update(); + this.properties.previousName = this.widgets[0].value; + }, + {} + ) + + this.addInput("*", "*"); + + + this.onConnectionsChange = function( + slotType, //1 = input, 2 = output + slot, + isChangeConnect, + link_info, + output + ) { + // console.log("onConnectionsChange"); + //On Disconnect + if (slotType == 1 && !isChangeConnect) { + this.inputs[slot].type = '*'; + this.inputs[slot].name = '*'; + } + + //On Connect + if (link_info && node.graph && slotType == 1 && isChangeConnect) { + const fromNode = node.graph._nodes.find((otherNode) => otherNode.id == link_info.origin_id); + const type = fromNode.outputs[link_info.origin_slot].type; + + if (this.title === "Set"){ + this.title = "Set_" + type; + } + if (this.widgets[0].value === '*'){ + this.widgets[0].value = type + } + + this.validateName(node.graph); + this.inputs[0].type = type; + this.inputs[0].name = type; + + setTimeout(_=>{ + if(type != this.widgets[0].value){ + this.title = "Set_" + this.widgets[0].value; + } + },1) + } + + //Update either way + this.update(); + } + + this.validateName = function(graph) { + let widgetValue = node.widgets[0].value; + if (widgetValue != '') { + let tries = 0; + let collisions = []; + + do { + collisions = graph._nodes.filter((otherNode) => { + if (otherNode == this) { + return false; + } + if (otherNode.type == 'easy setNode' && otherNode.widgets[0].value === widgetValue) { + return true; + } + return false; + }) + if (collisions.length > 0) { + widgetValue = node.widgets[0].value + "_" + tries; + } + tries++; + } while (collisions.length > 0) + node.widgets[0].value = widgetValue; + this.update(); + } + } + + this.clone = function () { + const cloned = SetNode.prototype.clone.apply(this); + cloned.inputs[0].name = '*'; + cloned.inputs[0].type = '*'; + cloned.properties.previousName = ''; + cloned.size = cloned.computeSize(); + return cloned; + }; + + this.onAdded = function(graph) { + this.validateName(graph); + } + + + this.update = function() { + if (node.graph) { + this.findGetters(node.graph).forEach((getter) => { + getter.setType(this.inputs[0].type); + }); + if (this.widgets[0].value) { + this.findGetters(node.graph, true).forEach((getter) => { + getter.setName(this.widgets[0].value) + }); + } + + const allGetters = node.graph._nodes.filter((otherNode) => otherNode.type == "easy getNode"); + allGetters.forEach((otherNode) => { + if (otherNode.setComboValues) { + otherNode.setComboValues(); + } + }) + } + } + + + this.findGetters = function(graph, checkForPreviousName) { + const name = checkForPreviousName ? this.properties.previousName : this.widgets[0].value; + return graph._nodes.filter((otherNode) => { + if (otherNode.type == 'easy getNode' && otherNode.widgets[0].value === name && name != '') { + return true; + } + return false; + }) + } + // This node is purely frontend and does not impact the resulting prompt so should not be serialized + this.isVirtualNode = true; + } + + onRemoved() { + const allGetters = this.graph._nodes.filter((otherNode) => otherNode.type == "easy getNode"); + allGetters.forEach((otherNode) => { + if (otherNode.setComboValues) { + otherNode.setComboValues([this]); + } + }) + } + } + + + LiteGraph.registerNodeType( + "easy setNode", + Object.assign(SetNode, { + title: "Set", + }) + ); + + SetNode.category = "EasyUse/Util"; + }, +}); + + +app.registerExtension({ + name: "easy getNode", + registerCustomNodes() { + class GetNode { + + defaultVisibility = true; + serialize_widgets = true; + + constructor() { + if (!this.properties) { + this.properties = {}; + } + this.properties.showOutputText = GetNode.defaultVisibility; + + const node = this; + node.color = LGraphCanvas.node_colors.blue.color; + this.addWidget( + "combo", + "Constant", + "", + (e) => { + this.onRename(); + }, + { + values: () => { + const setterNodes = node.graph._nodes.filter((otherNode) => otherNode.type == 'easy setNode'); + return setterNodes.map((otherNode) => otherNode.widgets[0].value).sort(); + } + } + ) + + + this.addOutput("*", '*'); + + + this.onConnectionsChange = function( + slotType, //0 = output, 1 = input + slot, //self-explanatory + isChangeConnect, + link_info, + output + ) { + this.validateLinks(); + setTimeout(_=>{ + this.title = 'Get_' + this.widgets[0].value + },1) + } + + + this.setName = function(name) { + node.widgets[0].value = name; + node.onRename(); + node.serialize(); + } + + + this.onRename = function() { + const setter = this.findSetter(node.graph); + if (setter) { + this.setType(setter.inputs[0].type); + this.title = "Get_" + setter.widgets[0].value; + } else { + this.setType('*'); + } + } + + this.clone = function () { + const cloned = GetNode.prototype.clone.apply(this); + cloned.size = cloned.computeSize(); + return cloned; + }; + + this.validateLinks = function() { + if (this.outputs[0].type != '*' && this.outputs[0].links) { + this.outputs[0].links.forEach((linkId) => { + const link = node.graph.links[linkId]; + if (link && link.type != this.outputs[0].type && link.type != '*') { + node.graph.removeLink(linkId) + } + }) + } + } + + this.setType = function(type) { + this.outputs[0].name = type; + this.outputs[0].type = type; + this.validateLinks(); + } + + this.findSetter = function(graph) { + const name = this.widgets[0].value; + return graph._nodes.find((otherNode) => { + if (otherNode.type == 'easy setNode' && otherNode.widgets[0].value === name && name != '') { + return true; + } + return false; + }) + } + + // This node is purely frontend and does not impact the resulting prompt so should not be serialized + this.isVirtualNode = true; + } + + + getInputLink(slot) { + const setter = this.findSetter(this.graph); + + if (setter) { + const slot_info = setter.inputs[slot]; + const link = this.graph.links[ slot_info.link ]; + return link; + } else { + throw new Error("No setter found for " + this.widgets[0].value + "(" + this.type + ")"); + } + + } + onAdded(graph) { + //this.setComboValues(); + //this.validateName(graph); + } + + } + + + LiteGraph.registerNodeType( + "easy getNode", + Object.assign(GetNode, { + title: "Get", + }) + ); + + GetNode.category = "EasyUse/Util"; + }, +}); \ No newline at end of file diff --git a/ComfyUI-Easy-Use/web/js/image.js b/ComfyUI-Easy-Use/web/js/image.js new file mode 100644 index 0000000000000000000000000000000000000000..7d8b45ca893a4a6dcc9ecd8aa8763fe60970ce18 --- /dev/null +++ b/ComfyUI-Easy-Use/web/js/image.js @@ -0,0 +1,66 @@ +import { app } from "../../../scripts/app.js"; + + +app.registerExtension({ + name: "comfy.easyUse.imageWidgets", + + nodeCreated(node) { + if (["easy imageSize","easy imageSizeBySide","easy imageSizeByLongerSide","easy imageSizeShow", "easy imageRatio", "easy imagePixelPerfect"].includes(node.comfyClass)) { + + const inputEl = document.createElement("textarea"); + inputEl.className = "comfy-multiline-input"; + inputEl.readOnly = true + + const widget = node.addDOMWidget("info", "customtext", inputEl, { + getValue() { + return inputEl.value; + }, + setValue(v) { + inputEl.value = v; + }, + serialize: false + }); + widget.inputEl = inputEl; + + inputEl.addEventListener("input", () => { + widget.callback?.(widget.value); + }); + } + }, + + beforeRegisterNodeDef(nodeType, nodeData, app) { + if (["easy imageSize","easy imageSizeBySide","easy imageSizeByLongerSide", "easy imageSizeShow", "easy imageRatio", "easy imagePixelPerfect"].includes(nodeData.name)) { + function populate(arr_text) { + var text = ''; + for (let i = 0; i < arr_text.length; i++){ + text += arr_text[i]; + } + if (this.widgets) { + const pos = this.widgets.findIndex((w) => w.name === "info"); + if (pos !== -1 && this.widgets[pos]) { + const w = this.widgets[pos] + w.value = text; + } + } + requestAnimationFrame(() => { + const sz = this.computeSize(); + if (sz[0] < this.size[0]) { + sz[0] = this.size[0]; + } + if (sz[1] < this.size[1]) { + sz[1] = this.size[1]; + } + this.onResize?.(sz); + app.graph.setDirtyCanvas(true, false); + }); + } + + // When the node is executed we will be sent the input text, display this in the widget + const onExecuted = nodeType.prototype.onExecuted; + nodeType.prototype.onExecuted = function (message) { + onExecuted?.apply(this, arguments); + populate.call(this, message.text); + }; + } + } +}) \ No newline at end of file diff --git a/ComfyUI-Easy-Use/web/js/image_chooser/chooser.js b/ComfyUI-Easy-Use/web/js/image_chooser/chooser.js new file mode 100644 index 0000000000000000000000000000000000000000..e1c153dc6adc0432af90c653c05c6fde0ec1de59 --- /dev/null +++ b/ComfyUI-Easy-Use/web/js/image_chooser/chooser.js @@ -0,0 +1,237 @@ +import { app } from "../../../../scripts/app.js"; +import { api } from "../../../../scripts/api.js"; +import { ComfyDialog, $el } from "../../../../scripts/ui.js"; + +import { restart_from_here } from "./prompt.js"; +import { hud, FlowState } from "./state.js"; +import { send_cancel, send_message, send_onstart, skip_next_restart_message } from "./messaging.js"; +import { display_preview_images, additionalDrawBackground, click_is_in_image } from "./preview.js"; +import {$t} from "../common/i18n.js"; + + +class chooserImageDialog extends ComfyDialog { + + constructor() { + super(); + this.node = null + this.select_index = [] + this.dialog_div = null + } + + show(image,node){ + this.select_index = [] + this.node = node + + const images_div = image.map((img, index) => { + const imgEl = $el('img', { + src: img.src, + onclick: _ => { + if(this.select_index.includes(index)){ + this.select_index = this.select_index.filter(i => i !== index) + imgEl.classList.remove('selected') + } else { + this.select_index.push(index) + imgEl.classList.add('selected') + } + if (node.selected.has(index)) node.selected.delete(index); + else node.selected.add(index); + } + }) + return imgEl + }) + super.show($el('div.easyuse-chooser-dialog',[ + $el('h5.easyuse-chooser-dialog-title', $t('Choose images to continue')), + $el('div.easyuse-chooser-dialog-images',images_div) + ])) + } + createButtons() { + const btns = super.createButtons(); + btns[0].onclick = _ => { + if (FlowState.running()) { send_cancel();} + super.close() + } + btns.unshift($el('button', { + type: 'button', + textContent: $t('Choose Selected Images'), + onclick: _ => { + if (FlowState.paused()) { + send_message(this.node.id, [...this.node.selected, -1, ...this.node.anti_selected]); + } + if (FlowState.idle()) { + skip_next_restart_message(); + restart_from_here(this.node.id).then(() => { send_message(this.node.id, [...this.node.selected, -1, ...this.node.anti_selected]); }); + } + super.close() + } + })) + return btns + } + +} + +function progressButtonPressed() { + const node = app.graph._nodes_by_id[this.node_id]; + if (node) { + const selected = [...node.selected] + if(selected?.length>0){ + node.setProperty('values',selected) + } + if (FlowState.paused()) { + send_message(node.id, [...node.selected, -1, ...node.anti_selected]); + } + if (FlowState.idle()) { + skip_next_restart_message(); + restart_from_here(node.id).then(() => { send_message(node.id, [...node.selected, -1, ...node.anti_selected]); }); + } + } +} + +function cancelButtonPressed() { + + if (FlowState.running()) { send_cancel();} +} + +function enable_disabling(button) { + Object.defineProperty(button, 'clicked', { + get : function() { return this._clicked; }, + set : function(v) { this._clicked = (v && this.name!=''); } + }) +} + +function disable_serialize(widget) { + if (!widget.options) widget.options = { }; + widget.options.serialize = false; +} + +app.registerExtension({ + name:'comfy.easyuse.imageChooser', + init() { + window.addEventListener("beforeunload", send_cancel, true); + }, + setup(app) { + + const draw = LGraphCanvas.prototype.draw; + LGraphCanvas.prototype.draw = function() { + if (hud.update()) { + app.graph._nodes.forEach((node)=> { if (node.update) { node.update(); } }) + } + draw.apply(this,arguments); + } + + + function easyuseImageChooser(event) { + const {node,image,isKSampler} = display_preview_images(event); + if(isKSampler) { + const dialog = new chooserImageDialog(); + dialog.show(image,node) + } + } + api.addEventListener("easyuse-image-choose", easyuseImageChooser); + + /* + If a run is interrupted, send a cancel message (unless we're doing the cancelling, to avoid infinite loop) + */ + const original_api_interrupt = api.interrupt; + api.interrupt = function () { + if (FlowState.paused() && !FlowState.cancelling) send_cancel(); + original_api_interrupt.apply(this, arguments); + } + + /* + At the start of execution + */ + function on_execution_start() { + if (send_onstart()) { + app.graph._nodes.forEach((node)=> { + if (node.selected || node.anti_selected) { + node.selected.clear(); + node.anti_selected.clear(); + node.update(); + } + }) + } + } + api.addEventListener("execution_start", on_execution_start); + }, + + async nodeCreated(node, app) { + + if(node.comfyClass == 'easy imageChooser'){ + node.setProperty('values',[]) + + /* A property defining the top of the image when there is just one */ + if(node?.imageIndex === undefined){ + Object.defineProperty(node, 'imageIndex', { + get : function() { return null; }, + set: function (v) {node.overIndex= v}, + }) + } + if(node?.imagey === undefined){ + Object.defineProperty(node, 'imagey', { + get : function() { return null; }, + set: function (v) {return node.widgets[node.widgets.length-1].last_y+LiteGraph.NODE_WIDGET_HEIGHT;}, + }) + } + + /* Capture clicks */ + const org_onMouseDown = node.onMouseDown; + node.onMouseDown = function( e, pos, canvas ) { + if (e.isPrimary) { + const i = click_is_in_image(node, pos); + if (i>=0) { this.imageClicked(i); } + } + return (org_onMouseDown && org_onMouseDown.apply(this, arguments)); + } + + node.send_button_widget = node.addWidget("button", "", "", progressButtonPressed); + node.cancel_button_widget = node.addWidget("button", "", "", cancelButtonPressed); + enable_disabling(node.cancel_button_widget); + enable_disabling(node.send_button_widget); + disable_serialize(node.cancel_button_widget); + disable_serialize(node.send_button_widget); + + } + }, + + beforeRegisterNodeDef(nodeType, nodeData, app) { + if(nodeData?.name == 'easy imageChooser'){ + + const onDrawBackground = nodeType.prototype.onDrawBackground; + nodeType.prototype.onDrawBackground = function(ctx) { + onDrawBackground.apply(this, arguments); + additionalDrawBackground(this, ctx); + } + + nodeType.prototype.imageClicked = function (imageIndex) { + if (nodeType?.comfyClass==="easy imageChooser") { + if (this.selected.has(imageIndex)) this.selected.delete(imageIndex); + else this.selected.add(imageIndex); + this.update(); + } + } + + const update = nodeType.prototype.update; + nodeType.prototype.update = function() { + if (update) update.apply(this,arguments); + if (this.send_button_widget) { + this.send_button_widget.node_id = this.id; + const selection = ( this.selected ? this.selected.size : 0 ) + ( this.anti_selected ? this.anti_selected.size : 0 ) + const maxlength = this.imgs?.length || 0; + if (FlowState.paused_here(this.id) && selection>0) { + this.send_button_widget.name = (selection>1) ? "Progress selected (" + selection + '/' + maxlength +")" : "Progress selected image"; + } else if (selection>0) { + this.send_button_widget.name = (selection>1) ? "Progress selected (" + selection + '/' + maxlength +")" : "Progress selected image as restart"; + } + else { + this.send_button_widget.name = ""; + } + } + if (this.cancel_button_widget) { + const isRunning = FlowState.running() + this.cancel_button_widget.name = isRunning ? "Cancel current run" : ""; + } + this.setDirtyCanvas(true,true); + } + } + } +}) \ No newline at end of file diff --git a/ComfyUI-Easy-Use/web/js/image_chooser/messaging.js b/ComfyUI-Easy-Use/web/js/image_chooser/messaging.js new file mode 100644 index 0000000000000000000000000000000000000000..b5864462dc1ab3daaa34afa885bb7bfe2d71b74e --- /dev/null +++ b/ComfyUI-Easy-Use/web/js/image_chooser/messaging.js @@ -0,0 +1,34 @@ +import { api } from "../../../../scripts/api.js"; +import { FlowState } from "./state.js"; + +function send_message_from_pausing_node(message) { + const id = app.runningNodeId; + send_message(id, message); +} + +function send_message(id, message) { + const body = new FormData(); + body.append('message',message); + body.append('id', id); + api.fetchApi("/easyuse/image_chooser_message", { method: "POST", body, }); +} + +function send_cancel() { + send_message(-1,'__cancel__'); + FlowState.cancelling = true; + api.interrupt(); + FlowState.cancelling = false; +} + +var skip_next = 0; +function skip_next_restart_message() { skip_next += 1; } +function send_onstart() { + if (skip_next>0) { + skip_next -= 1; + return false; + } + send_message(-1,'__start__'); + return true; +} + +export { send_message_from_pausing_node, send_cancel, send_message, send_onstart, skip_next_restart_message } \ No newline at end of file diff --git a/ComfyUI-Easy-Use/web/js/image_chooser/preview.js b/ComfyUI-Easy-Use/web/js/image_chooser/preview.js new file mode 100644 index 0000000000000000000000000000000000000000..6f8fe4407812fa809f2b4e6a118baba5ccdb084c --- /dev/null +++ b/ComfyUI-Easy-Use/web/js/image_chooser/preview.js @@ -0,0 +1,90 @@ +import { app } from "../../../../scripts/app.js"; + +const kSampler = ['easy kSampler', 'easy kSamplerTiled', 'easy fullkSampler'] + +function display_preview_images(event) { + const node = app.graph._nodes_by_id[event.detail.id]; + if (node) { + node.selected = new Set(); + node.anti_selected = new Set(); + const image = showImages(node, event.detail.urls); + return {node,image,isKSampler:kSampler.includes(node.type)} + } else { + console.log(`Image Chooser Preview - failed to find ${event.detail.id}`) + } +} + +function showImages(node, urls) { + node.imgs = []; + urls.forEach((u)=> { + const img = new Image(); + node.imgs.push(img); + img.onload = () => { app.graph.setDirtyCanvas(true); }; + img.src = `/view?filename=${encodeURIComponent(u.filename)}&type=temp&subfolder=${app.getPreviewFormatParam()}` + }) + node.setSizeForImage?.(); + return node.imgs +} + +function drawRect(node, s, ctx) { + const padding = 1; + var rect; + if (node.imageRects) { + rect = node.imageRects[s]; + } else { + const y = node.imagey; + rect = [padding,y+padding,node.size[0]-2*padding,node.size[1]-y-2*padding]; + } + ctx.strokeRect(rect[0]+padding, rect[1]+padding, rect[2]-padding*2, rect[3]-padding*2); +} + +function additionalDrawBackground(node, ctx) { + if (!node.imgs) return; + if (node.imageRects) { + for (let i = 0; i < node.imgs.length; i++) { + // delete underlying image + ctx.fillStyle = "#000"; + ctx.fillRect(...node.imageRects[i]) + // draw the new one + const img = node.imgs[i]; + const cellWidth = node.imageRects[i][2]; + const cellHeight = node.imageRects[i][3]; + + let wratio = cellWidth/img.width; + let hratio = cellHeight/img.height; + var ratio = Math.min(wratio, hratio); + + let imgHeight = ratio * img.height; + let imgWidth = ratio * img.width; + + const imgX = node.imageRects[i][0] + (cellWidth - imgWidth)/2; + const imgY = node.imageRects[i][1] + (cellHeight - imgHeight)/2; + const cell_padding = 2; + ctx.drawImage(img, imgX+cell_padding, imgY+cell_padding, imgWidth-cell_padding*2, imgHeight-cell_padding*2); + + } + } + ctx.lineWidth = 2; + ctx.strokeStyle = "green"; + node?.selected?.forEach((s) => { drawRect(node,s, ctx) }) + ctx.strokeStyle = "#F88"; + node?.anti_selected?.forEach((s) => { drawRect(node,s, ctx) }) +} + +function click_is_in_image(node, pos) { + if (node.imgs?.length>1) { + for (var i = 0; i 0 && dx < node.imageRects[i][2] && + dy > 0 && dy < node.imageRects[i][3] ) { + return i; + } + } + } else if (node.imgs?.length==1) { + if (pos[1]>node.imagey) return 0; + } + return -1; +} + +export { display_preview_images, additionalDrawBackground, click_is_in_image } \ No newline at end of file diff --git a/ComfyUI-Easy-Use/web/js/image_chooser/prompt.js b/ComfyUI-Easy-Use/web/js/image_chooser/prompt.js new file mode 100644 index 0000000000000000000000000000000000000000..c9c89b6289ce05d522a68823c60aa3bce35f60ad --- /dev/null +++ b/ComfyUI-Easy-Use/web/js/image_chooser/prompt.js @@ -0,0 +1,114 @@ +import { app } from "../../../../scripts/app.js"; + +function links_with(p, node_id, down, up) { + const links_with = []; + p.workflow.links.forEach((l) => { + if (down && l[1]===node_id && !links_with.includes(l[3])) links_with.push(l[3]) + if (up && l[3]===node_id && !links_with.includes(l[1])) links_with.push(l[1]) + }); + return links_with; +} + +function _all_v_nodes(p, here_id) { + /* + Make a list of all downstream nodes. + */ + const downstream = []; + const to_process = [here_id] + while(to_process.length>0) { + const id = to_process.pop(); + downstream.push(id); + to_process.push( + ...links_with(p,id,true,false).filter((nid)=>{ + return !(downstream.includes(nid) || to_process.includes(nid)) + }) + ) + } + + /* + Now all upstream nodes from any of the downstream nodes (except us). + Put us on the result list so we don't flow up through us + */ + to_process.push(...downstream.filter((n)=>{ return n!=here_id})); + const back_upstream = [here_id]; + while(to_process.length>0) { + const id = to_process.pop(); + back_upstream.push(id); + to_process.push( + ...links_with(p,id,false,true).filter((nid)=>{ + return !(back_upstream.includes(nid) || to_process.includes(nid)) + }) + ) + } + + const keep = []; + keep.push(...downstream); + keep.push(...back_upstream.filter((n)=>{return !keep.includes(n)})); + + console.log(`Nodes to keep: ${keep}`); + return keep; +} + +async function all_v_nodes(here_id) { + const p = structuredClone(await app.graphToPrompt()); + const all_nodes = []; + p.workflow.nodes.forEach((node)=>{all_nodes.push(node.id)}) + p.workflow.links = p.workflow.links.filter((l)=>{ return (all_nodes.includes(l[1]) && all_nodes.includes(l[3]))} ) + return _all_v_nodes(p,here_id); +} + +async function restart_from_here(here_id, go_down_to_chooser=false) { + const p = structuredClone(await app.graphToPrompt()); + /* + Make a list of all nodes, and filter out links that are no longer valid + */ + const all_nodes = []; + p.workflow.nodes.forEach((node)=>{all_nodes.push(node.id)}) + p.workflow.links = p.workflow.links.filter((l)=>{ return (all_nodes.includes(l[1]) && all_nodes.includes(l[3]))} ) + + /* Move downstream to a chooser */ + if (go_down_to_chooser) { + while (!app.graph._nodes_by_id[here_id].isChooser) { + here_id = links_with(p, here_id, true, false)[0]; + } + } + + const keep = _all_v_nodes(p, here_id); + + /* + Filter p.workflow.nodes and p.workflow.links + */ + p.workflow.nodes = p.workflow.nodes.filter((node) => { + if (node.id===here_id) node.inputs.forEach((i)=>{i.link=null}) // remove our upstream links + return (keep.includes(node.id)) // only keep keepers + }) + p.workflow.links = p.workflow.links.filter((l) => {return (keep.includes(l[1]) && keep.includes(l[3]))}) + + /* + Filter the p.output object to only include nodes we're keeping + */ + const new_output = {} + for (const [key, value] of Object.entries(p.output)) { + if (keep.includes(parseInt(key))) new_output[key] = value; + } + /* + Filter the p.output entry for the start node to remove any list (ie link) inputs + */ + const new_inputs = {}; + for (const [key, value] of Object.entries(new_output[here_id.toString()].inputs)) { + if (!Array.isArray(value)) new_inputs[key] = value; + } + new_output[here_id.toString()].inputs = new_inputs; + + p.output = new_output; + + // temporarily hijack graph_to_prompt with a version that restores the old one but returns this prompt + const gtp_was = app.graphToPrompt; + app.graphToPrompt = () => { + app.graphToPrompt = gtp_was; + return p; + } + app.queuePrompt(0); +} + +export { restart_from_here, all_v_nodes } \ No newline at end of file diff --git a/ComfyUI-Easy-Use/web/js/image_chooser/state.js b/ComfyUI-Easy-Use/web/js/image_chooser/state.js new file mode 100644 index 0000000000000000000000000000000000000000..69437d72a217a76afd86c5489130e521bcbd41e9 --- /dev/null +++ b/ComfyUI-Easy-Use/web/js/image_chooser/state.js @@ -0,0 +1,55 @@ +import { app } from "../../../../scripts/app.js"; + + +class HUD { + constructor() { + this.current_node_id = undefined; + this.class_of_current_node = null; + this.current_node_is_chooser = false; + } + + update() { + if (app.runningNodeId==this.current_node_id) return false; + + this.current_node_id = app.runningNodeId; + + if (this.current_node_id) { + this.class_of_current_node = app.graph?._nodes_by_id[app.runningNodeId.toString()]?.comfyClass; + this.current_node_is_chooser = this.class_of_current_node === "easy imageChooser" + } else { + this.class_of_current_node = undefined; + this.current_node_is_chooser = false; + } + return true; + } +} + +const hud = new HUD(); + + +class FlowState { + constructor(){} + static idle() { + return (!app.runningNodeId); + } + static paused() { + return true; + } + static paused_here(node_id) { + return (FlowState.paused() && FlowState.here(node_id)) + } + static running() { + return (!FlowState.idle()); + } + static here(node_id) { + return (app.runningNodeId==node_id); + } + static state() { + if (FlowState.paused()) return "Paused"; + if (FlowState.running()) return "Running"; + return "Idle"; + } + static cancelling = false; +} + +export { hud, FlowState} \ No newline at end of file diff --git a/ComfyUI-Easy-Use/web/js/poseEditor.js b/ComfyUI-Easy-Use/web/js/poseEditor.js new file mode 100644 index 0000000000000000000000000000000000000000..bfd6813efd86ca7483db7f00b8677837ac93043e --- /dev/null +++ b/ComfyUI-Easy-Use/web/js/poseEditor.js @@ -0,0 +1,666 @@ +import { app } from "../../../scripts/app.js"; +import { fabric } from "../lib/fabric.js"; + +fabric.Object.prototype.transparentCorners = false; +fabric.Object.prototype.cornerColor = "#108ce6"; +fabric.Object.prototype.borderColor = "#108ce6"; +fabric.Object.prototype.cornerSize = 10; + +let connect_keypoints = [ + [0, 1], + [1, 2], + [2, 3], + [3, 4], + [1, 5], + [5, 6], + [6, 7], + [1, 8], + [8, 9], + [9, 10], + [1, 11], + [11, 12], + [12, 13], + [0, 14], + [14, 16], + [0, 15], + [15, 17], +]; + +let connect_color = [ + [0, 0, 255], + [255, 0, 0], + [255, 170, 0], + [255, 255, 0], + [255, 85, 0], + [170, 255, 0], + [85, 255, 0], + [0, 255, 0], + [0, 255, 85], + [0, 255, 170], + [0, 255, 255], + [0, 170, 255], + [0, 85, 255], + [85, 0, 255], + [170, 0, 255], + [255, 0, 255], + [255, 0, 170], + [255, 0, 85], +]; + +const default_keypoints = [ + [241, 77], + [241, 120], + [191, 118], + [177, 183], + [163, 252], + [298, 118], + [317, 182], + [332, 245], + [225, 241], + [213, 359], + [215, 454], + [270, 240], + [282, 360], + [286, 456], + [232, 59], + [253, 60], + [225, 70], + [260, 72], +]; + +class OpenPose { + constructor(node, canvasElement) { + this.lockMode = false; + this.visibleEyes = true; + this.flipped = false; + this.node = node; + this.undo_history = LS_Poses[node.name].undo_history || []; + this.redo_history = LS_Poses[node.name].redo_history || []; + this.history_change = false; + this.canvas = this.initCanvas(canvasElement); + this.image = node.widgets.find((w) => w.name === "image"); + } + + setPose(keypoints) { + this.canvas.clear(); + + this.canvas.backgroundColor = "#000"; + + const res = []; + for (let i = 0; i < keypoints.length; i += 18) { + const chunk = keypoints.slice(i, i + 18); + res.push(chunk); + } + + for (let item of res) { + this.addPose(item); + this.canvas.discardActiveObject(); + } + } + + addPose(keypoints = undefined) { + if (keypoints === undefined) { + keypoints = default_keypoints; + } + + const group = new fabric.Group(); + + const makeCircle = ( + color, + left, + top, + line1, + line2, + line3, + line4, + line5 + ) => { + let c = new fabric.Circle({ + left: left, + top: top, + strokeWidth: 1, + radius: 5, + fill: color, + stroke: color, + }); + + c.hasControls = c.hasBorders = false; + c.line1 = line1; + c.line2 = line2; + c.line3 = line3; + c.line4 = line4; + c.line5 = line5; + + return c; + }; + + const makeLine = (coords, color) => { + return new fabric.Line(coords, { + fill: color, + stroke: color, + strokeWidth: 10, + selectable: false, + evented: false, + }); + }; + + const lines = []; + const circles = []; + + for (let i = 0; i < connect_keypoints.length; i++) { + // 接続されるidxを指定 [0, 1]なら0と1つなぐ + const item = connect_keypoints[i]; + const line = makeLine( + keypoints[item[0]].concat(keypoints[item[1]]), + `rgba(${connect_color[i].join(", ")}, 0.7)` + ); + lines.push(line); + this.canvas.add(line); + } + + for (let i = 0; i < keypoints.length; i++) { + let list = []; + + connect_keypoints.filter((item, idx) => { + if (item.includes(i)) { + list.push(lines[idx]); + return idx; + } + }); + const circle = makeCircle( + `rgb(${connect_color[i].join(", ")})`, + keypoints[i][0], + keypoints[i][1], + ...list + ); + circle["id"] = i; + circles.push(circle); + group.addWithUpdate(circle); + } + + this.canvas.discardActiveObject(); + this.canvas.setActiveObject(group); + this.canvas.add(group); + group.toActiveSelection(); + this.canvas.requestRenderAll(); + } + + initCanvas() { + this.canvas = new fabric.Canvas(this.canvas, { + backgroundColor: "#000", + preserveObjectStacking: true, + }); + + const updateLines = (target) => { + if ("_objects" in target) { + const flipX = target.flipX ? -1 : 1; + const flipY = target.flipY ? -1 : 1; + this.flipped = flipX * flipY === -1; + const showEyes = this.flipped ? !this.visibleEyes : this.visibleEyes; + + if (target.angle === 0) { + const rtop = target.top; + const rleft = target.left; + for (const item of target._objects) { + let p = item; + p.scaleX = 1; + p.scaleY = 1; + const top = + rtop + + p.top * target.scaleY * flipY + + (target.height * target.scaleY) / 2; + const left = + rleft + + p.left * target.scaleX * flipX + + (target.width * target.scaleX) / 2; + p["_top"] = top; + p["_left"] = left; + if (p["id"] === 0) { + p.line1 && p.line1.set({ x1: left, y1: top }); + } else { + p.line1 && p.line1.set({ x2: left, y2: top }); + } + if (p["id"] === 14 || p["id"] === 15) { + p.radius = showEyes ? 5 : 0; + if (p.line1) p.line1.strokeWidth = showEyes ? 10 : 0; + if (p.line2) p.line2.strokeWidth = showEyes ? 10 : 0; + } + p.line2 && p.line2.set({ x1: left, y1: top }); + p.line3 && p.line3.set({ x1: left, y1: top }); + p.line4 && p.line4.set({ x1: left, y1: top }); + p.line5 && p.line5.set({ x1: left, y1: top }); + } + } else { + const aCoords = target.aCoords; + const center = { + x: (aCoords.tl.x + aCoords.br.x) / 2, + y: (aCoords.tl.y + aCoords.br.y) / 2, + }; + const rad = (target.angle * Math.PI) / 180; + const sin = Math.sin(rad); + const cos = Math.cos(rad); + + for (const item of target._objects) { + let p = item; + const p_top = p.top * target.scaleY * flipY; + const p_left = p.left * target.scaleX * flipX; + const left = center.x + p_left * cos - p_top * sin; + const top = center.y + p_left * sin + p_top * cos; + p["_top"] = top; + p["_left"] = left; + if (p["id"] === 0) { + p.line1 && p.line1.set({ x1: left, y1: top }); + } else { + p.line1 && p.line1.set({ x2: left, y2: top }); + } + if (p["id"] === 14 || p["id"] === 15) { + p.radius = showEyes ? 5 : 0.3; + if (p.line1) p.line1.strokeWidth = showEyes ? 10 : 0; + if (p.line2) p.line2.strokeWidth = showEyes ? 10 : 0; + } + p.line2 && p.line2.set({ x1: left, y1: top }); + p.line3 && p.line3.set({ x1: left, y1: top }); + p.line4 && p.line4.set({ x1: left, y1: top }); + p.line5 && p.line5.set({ x1: left, y1: top }); + } + } + } else { + var p = target; + if (p["id"] === 0) { + p.line1 && p.line1.set({ x1: p.left, y1: p.top }); + } else { + p.line1 && p.line1.set({ x2: p.left, y2: p.top }); + } + p.line2 && p.line2.set({ x1: p.left, y1: p.top }); + p.line3 && p.line3.set({ x1: p.left, y1: p.top }); + p.line4 && p.line4.set({ x1: p.left, y1: p.top }); + p.line5 && p.line5.set({ x1: p.left, y1: p.top }); + } + this.canvas.renderAll(); + }; + + this.canvas.on("object:moving", (e) => { + updateLines(e.target); + }); + + this.canvas.on("object:scaling", (e) => { + updateLines(e.target); + this.canvas.renderAll(); + }); + + this.canvas.on("object:rotating", (e) => { + updateLines(e.target); + this.canvas.renderAll(); + }); + + this.canvas.on("object:modified", () => { + if ( + this.lockMode || + this.canvas.getActiveObject().type == "activeSelection" + ) + return; + this.undo_history.push(this.getJSON()); + this.redo_history.length = 0; + this.history_change = true; + this.uploadPoseFile(this.node.name); + }); + + if (!LS_Poses[this.node.name].undo_history.length) { + this.setPose(default_keypoints); + this.undo_history.push(this.getJSON()); + } + return this.canvas; + } + + undo() { + if (this.undo_history.length > 0) { + this.lockMode = true; + if (this.undo_history.length > 1) + this.redo_history.push(this.undo_history.pop()); + + const content = this.undo_history[this.undo_history.length - 1]; + this.loadPreset(content); + this.canvas.renderAll(); + this.lockMode = false; + this.history_change = true; + this.uploadPoseFile(this.node.name); + } + } + + redo() { + if (this.redo_history.length > 0) { + this.lockMode = true; + const content = this.redo_history.pop(); + this.undo_history.push(content); + this.loadPreset(content); + this.canvas.renderAll(); + this.lockMode = false; + this.history_change = true; + this.uploadPoseFile(this.node.name); + } + } + + resetCanvas() { + this.canvas.clear(); + this.canvas.backgroundColor = "#000"; + this.addPose(); + } + + updateHistoryData() { + if (this.history_change) { + LS_Poses[this.node.name].undo_history = this.undo_history; + LS_Poses[this.node.name].redo_history = this.redo_history; + LS_Save(); + this.history_change = false; + } + } + + uploadPoseFile(fileName) { + // Upload pose to temp folder ComfyUI + + const uploadFile = async (blobFile) => { + try { + const resp = await fetch("/upload/image", { + method: "POST", + body: blobFile, + }); + + if (resp.status === 200) { + const data = await resp.json(); + + if (!this.image.options.values.includes(data.name)) { + this.image.options.values.push(data.name); + } + + this.image.value = data.name; + this.updateHistoryData(); + } else { + alert(resp.status + " - " + resp.statusText); + } + } catch (error) { + console.error(error); + } + }; + + this.canvas.lowerCanvasEl.toBlob(function (blob) { + let formData = new FormData(); + formData.append("image", blob, fileName); + formData.append("overwrite", "true"); + formData.append("type", "temp"); + uploadFile(formData); + }, "image/png"); + // - end + + const callb = this.node.callback, + self = this; + this.image.callback = function () { + this.image.value = self.node.name; + if (callb) { + return callb.apply(this, arguments); + } + }; + } + + getJSON() { + const json = { + keypoints: this.canvas + .getObjects() + .filter((item) => { + if (item.type === "circle") return item; + }) + .map((item) => { + return [Math.round(item.left), Math.round(item.top)]; + }), + }; + + return json; + } + + loadPreset(json) { + try { + if (json["keypoints"].length % 18 === 0) { + this.setPose(json["keypoints"]); + } else { + throw new Error("keypoints is invalid"); + } + } catch (e) { + console.error(e); + } + } +} + +// Create OpenPose widget +function createOpenPose(node, inputName, inputData, app) { + node.name = inputName; + const widget = { + type: "openpose", + name: `w${inputName}`, + + draw: function (ctx, _, widgetWidth, y, widgetHeight) { + const margin = 10, + visible = app.canvas.ds.scale > 0.5 && this.type === "openpose", + clientRectBound = ctx.canvas.getBoundingClientRect(), + transform = new DOMMatrix() + .scaleSelf( + clientRectBound.width / ctx.canvas.width, + clientRectBound.height / ctx.canvas.height + ) + .multiplySelf(ctx.getTransform()) + .translateSelf(margin, margin + y), + w = (widgetWidth - margin * 2 - 3) * transform.a; + + Object.assign(this.openpose.style, { + left: `${transform.a * margin + transform.e}px`, + top: `${transform.d + transform.f}px`, + width: w + "px", + height: w + "px", + position: "absolute", + zIndex: app.graph._nodes.indexOf(node), + }); + + Object.assign(this.openpose.children[0].style, { + width: w + "px", + height: w + "px", + }); + + Object.assign(this.openpose.children[1].style, { + width: w + "px", + height: w + "px", + }); + + Array.from(this.openpose.children[2].children).forEach((element) => { + Object.assign(element.style, { + width: `${28.0 * transform.a}px`, + height: `${22.0 * transform.d}px`, + fontSize: `${transform.d * 10.0}px`, + }); + element.hidden = !visible; + }); + }, + }; + + // Fabric canvas + let canvasOpenPose = document.createElement("canvas"); + node.openPose = new OpenPose(node, canvasOpenPose); + + node.openPose.canvas.setWidth(512); + node.openPose.canvas.setHeight(512); + + let widgetCombo = node.widgets.filter((w) => w.type === "combo"); + widgetCombo[0].value = node.name; + + widget.openpose = node.openPose.canvas.wrapperEl; + widget.parent = node; + + // Create elements undo, redo, clear history + let panelButtons = document.createElement("div"), + undoButton = document.createElement("button"), + redoButton = document.createElement("button"), + historyClearButton = document.createElement("button"); + + panelButtons.className = "panelButtons comfy-menu-btns"; + undoButton.textContent = "⟲"; + redoButton.textContent = "⟳"; + historyClearButton.textContent = "✖"; + undoButton.title = "Undo"; + redoButton.title = "Redo"; + historyClearButton.title = "Clear History"; + + undoButton.addEventListener("click", () => node.openPose.undo()); + redoButton.addEventListener("click", () => node.openPose.redo()); + historyClearButton.addEventListener("click", () => { + if (confirm(`Delete all pose history of a node "${node.name}"?`)) { + node.openPose.undo_history = []; + node.openPose.redo_history = []; + node.openPose.setPose(default_keypoints); + node.openPose.undo_history.push(node.openPose.getJSON()); + node.openPose.history_change = true; + node.openPose.updateHistoryData(); + } + }); + + panelButtons.appendChild(undoButton); + panelButtons.appendChild(redoButton); + panelButtons.appendChild(historyClearButton); + node.openPose.canvas.wrapperEl.appendChild(panelButtons); + + document.body.appendChild(widget.openpose); + + // Add buttons add, reset, undo, redo poses + node.addWidget("button", "Add pose", "add_pose", () => { + node.openPose.addPose(); + }); + + node.addWidget("button", "Reset pose", "reset_pose", () => { + node.openPose.resetCanvas(); + }); + + // Add customWidget to node + node.addCustomWidget(widget); + + node.onRemoved = () => { + if (Object.hasOwn(LS_Poses, node.name)) { + delete LS_Poses[node.name]; + LS_Save(); + } + + // When removing this node we need to remove the input from the DOM + for (let y in node.widgets) { + if (node.widgets[y].openpose) { + node.widgets[y].openpose.remove(); + } + } + }; + + widget.onRemove = () => { + widget.openpose?.remove(); + }; + + app.canvas.onDrawBackground = function () { + // Draw node isnt fired once the node is off the screen + // if it goes off screen quickly, the input may not be removed + // this shifts it off screen so it can be moved back if the node is visible. + for (let n in app.graph._nodes) { + n = graph._nodes[n]; + for (let w in n.widgets) { + let wid = n.widgets[w]; + if (Object.hasOwn(wid, "openpose")) { + wid.openpose.style.left = -8000 + "px"; + wid.openpose.style.position = "absolute"; + } + } + } + }; + return { widget: widget }; +} + +window.LS_Poses = {}; +function LS_Save() { + ///console.log("Save:", LS_Poses); + localStorage.setItem("ComfyUI_Poses", JSON.stringify(LS_Poses)); +} + +app.registerExtension({ + name: "comfy.easyuse.poseEditor", + async init(app) { + // Any initial setup to run as soon as the page loads + let style = document.createElement("style"); + style.innerText = `.panelButtons{ + position: absolute; + padding: 4px; + display: flex; + gap: 4px; + flex-direction: column; + width: fit-content; + } + .panelButtons button:last-child{ + border-color: var(--error-text); + color: var(--error-text) !important; + } + + `; + document.head.appendChild(style); + }, + async setup(app) { + let openPoseNode = app.graph._nodes.filter((wi) => wi.type == "easy poseEditor"); + + if (openPoseNode.length) { + openPoseNode.map((n) => { + console.log(`Setup PoseNode: ${n.name}`); + let widgetImage = n.widgets.find((w) => w.name == "image"); + if (widgetImage && Object.hasOwn(LS_Poses, n.name)) { + let pose_ls = LS_Poses[n.name].undo_history; + n.openPose.loadPreset( + pose_ls.length > 0 + ? pose_ls[pose_ls.length - 1] + : { keypoints: default_keypoints } + ); + } + }); + } + }, + async beforeRegisterNodeDef(nodeType, nodeData, app) { + if (nodeData.name === "easy poseEditor") { + const onNodeCreated = nodeType.prototype.onNodeCreated; + + nodeType.prototype.onNodeCreated = function () { + const r = onNodeCreated + ? onNodeCreated.apply(this, arguments) + : undefined; + + let openPoseNode = app.graph._nodes.filter( + (wi) => {wi.type == "easy poseEditor"} + ), + nodeName = `Pose_${openPoseNode.length}`, + nodeNamePNG = `${nodeName}.png`; + + console.log(`Create PoseNode: ${nodeName}`); + + LS_Poses = + localStorage.getItem("ComfyUI_Poses") && + JSON.parse(localStorage.getItem("ComfyUI_Poses")); + if (!LS_Poses) { + localStorage.setItem("ComfyUI_Poses", JSON.stringify({})); + LS_Poses = JSON.parse(localStorage.getItem("ComfyUI_Poses")); + } + + if (!Object.hasOwn(LS_Poses, nodeNamePNG)) { + LS_Poses[nodeNamePNG] = { + undo_history: [], + redo_history: [], + }; + LS_Save(); + } + + createOpenPose.apply(this, [this, nodeNamePNG, {}, app]); + setTimeout(() => { + this.openPose.uploadPoseFile(nodeNamePNG); + }, 1); + + this.setSize([530, 620]); + + return r; + }; + } + }, +}); diff --git a/ComfyUI-Easy-Use/web/js/seed.js b/ComfyUI-Easy-Use/web/js/seed.js new file mode 100644 index 0000000000000000000000000000000000000000..dc6c17e050c15c2cc04c0c47af16b02ad4e97b8a --- /dev/null +++ b/ComfyUI-Easy-Use/web/js/seed.js @@ -0,0 +1,47 @@ +import { api } from "../../../scripts/api.js"; + +// 全局Seed +function globalSeedHandler(event) { + let nodes = app.graph._nodes_by_id; + for(let i in nodes) { + let node = nodes[i]; + if(node.type == 'easy globalSeed') { + if(node.widgets) { + const w = node.widgets.find((w) => w.name == 'value'); + const last_w = node.widgets.find((w) => w.name == 'last_seed'); + last_w.value = w.value; + w.value = event.detail.value; + } + } + else{ + if(node.widgets) { + const w = node.widgets.find((w) => w.name == 'seed_num' || w.name == 'seed' || w.name == 'noise_seed'); + if(w && event.detail.seed_map[node.id] != undefined) { + w.value = event.detail.seed_map[node.id]; + } + } + } + + } +} + +api.addEventListener("easyuse-global-seed", globalSeedHandler); + +const original_queuePrompt = api.queuePrompt; +async function queuePrompt_with_seed(number, { output, workflow }) { + workflow.seed_widgets = {}; + + for(let i in app.graph._nodes_by_id) { + let widgets = app.graph._nodes_by_id[i].widgets; + if(widgets) { + for(let j in widgets) { + if((widgets[j].name == 'seed_num' || widgets[j].name == 'seed' || widgets[j].name == 'noise_seed') && widgets[j].type != 'converted-widget') + workflow.seed_widgets[i] = parseInt(j); + } + } + } + + return await original_queuePrompt.call(api, number, { output, workflow }); +} + +api.queuePrompt = queuePrompt_with_seed; diff --git a/ComfyUI-Easy-Use/web/lib/fabric.js b/ComfyUI-Easy-Use/web/lib/fabric.js new file mode 100644 index 0000000000000000000000000000000000000000..fa7bd3ce09e2b1c4168a380f625cb8a96c05ab0a --- /dev/null +++ b/ComfyUI-Easy-Use/web/lib/fabric.js @@ -0,0 +1,10 @@ +/** + * Bundled by jsDelivr using Rollup v2.79.1 and Terser v5.17.1. + * Original file: /npm/fabric-with-all-modules@4.6.1/dist/fabric.js + * + * Do NOT use SRI with dynamically generated files! More information: https://www.jsdelivr.com/using-sri-with-dynamic-files + */ +function t(t,e){return e.forEach((function(e){e&&"string"!=typeof e&&!Array.isArray(e)&&Object.keys(e).forEach((function(i){if("default"!==i&&!(i in t)){var r=Object.getOwnPropertyDescriptor(e,i);Object.defineProperty(t,i,r.get?r:{enumerable:!0,get:function(){return e[i]}})}}))})),Object.freeze(t)}var e="undefined"!=typeof global?global:"undefined"!=typeof self?self:"undefined"!=typeof window?window:{},i=[],r=[],n="undefined"!=typeof Uint8Array?Uint8Array:Array,o=!1;function s(){o=!0;for(var t="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/",e=0;e<64;++e)i[e]=t[e],r[t.charCodeAt(e)]=e;r["-".charCodeAt(0)]=62,r["_".charCodeAt(0)]=63}function a(t,e,r){for(var n,o,s=[],a=e;a>18&63]+i[o>>12&63]+i[o>>6&63]+i[63&o]);return s.join("")}function h(t){var e;o||s();for(var r=t.length,n=r%3,h="",c=[],l=16383,u=0,f=r-n;uf?f:u+l));return 1===n?(e=t[r-1],h+=i[e>>2],h+=i[e<<4&63],h+="=="):2===n&&(e=(t[r-2]<<8)+t[r-1],h+=i[e>>10],h+=i[e>>4&63],h+=i[e<<2&63],h+="="),c.push(h),c.join("")}function c(t,e,i,r,n){var o,s,a=8*n-r-1,h=(1<>1,l=-7,u=i?n-1:0,f=i?-1:1,d=t[e+u];for(u+=f,o=d&(1<<-l)-1,d>>=-l,l+=a;l>0;o=256*o+t[e+u],u+=f,l-=8);for(s=o&(1<<-l)-1,o>>=-l,l+=r;l>0;s=256*s+t[e+u],u+=f,l-=8);if(0===o)o=1-c;else{if(o===h)return s?NaN:1/0*(d?-1:1);s+=Math.pow(2,r),o-=c}return(d?-1:1)*s*Math.pow(2,o-r)}function l(t,e,i,r,n,o){var s,a,h,c=8*o-n-1,l=(1<>1,f=23===n?Math.pow(2,-24)-Math.pow(2,-77):0,d=r?0:o-1,g=r?1:-1,p=e<0||0===e&&1/e<0?1:0;for(e=Math.abs(e),isNaN(e)||e===1/0?(a=isNaN(e)?1:0,s=l):(s=Math.floor(Math.log(e)/Math.LN2),e*(h=Math.pow(2,-s))<1&&(s--,h*=2),(e+=s+u>=1?f/h:f*Math.pow(2,1-u))*h>=2&&(s++,h/=2),s+u>=l?(a=0,s=l):s+u>=1?(a=(e*h-1)*Math.pow(2,n),s+=u):(a=e*Math.pow(2,u-1)*Math.pow(2,n),s=0));n>=8;t[i+d]=255&a,d+=g,a/=256,n-=8);for(s=s<0;t[i+d]=255&s,d+=g,s/=256,c-=8);t[i+d-g]|=128*p}var u={}.toString,f=Array.isArray||function(t){return"[object Array]"==u.call(t)};function d(){return p.TYPED_ARRAY_SUPPORT?2147483647:1073741823}function g(t,e){if(d()=d())throw new RangeError("Attempt to allocate Buffer larger than maximum size: 0x"+d().toString(16)+" bytes");return 0|t}function x(t){return!(null==t||!t._isBuffer)}function C(t,e){if(x(t))return t.length;if("undefined"!=typeof ArrayBuffer&&"function"==typeof ArrayBuffer.isView&&(ArrayBuffer.isView(t)||t instanceof ArrayBuffer))return t.byteLength;"string"!=typeof t&&(t=""+t);var i=t.length;if(0===i)return 0;for(var r=!1;;)switch(e){case"ascii":case"latin1":case"binary":return i;case"utf8":case"utf-8":case void 0:return K(t).length;case"ucs2":case"ucs-2":case"utf16le":case"utf-16le":return 2*i;case"hex":return i>>>1;case"base64":return J(t).length;default:if(r)return K(t).length;e=(""+e).toLowerCase(),r=!0}}function w(t,e,i){var r=!1;if((void 0===e||e<0)&&(e=0),e>this.length)return"";if((void 0===i||i>this.length)&&(i=this.length),i<=0)return"";if((i>>>=0)<=(e>>>=0))return"";for(t||(t="utf8");;)switch(t){case"hex":return B(this,e,i);case"utf8":case"utf-8":return F(this,e,i);case"ascii":return I(this,e,i);case"latin1":case"binary":return R(this,e,i);case"base64":return j(this,e,i);case"ucs2":case"ucs-2":case"utf16le":case"utf-16le":return Y(this,e,i);default:if(r)throw new TypeError("Unknown encoding: "+t);t=(t+"").toLowerCase(),r=!0}}function S(t,e,i){var r=t[e];t[e]=t[i],t[i]=r}function T(t,e,i,r,n){if(0===t.length)return-1;if("string"==typeof i?(r=i,i=0):i>2147483647?i=2147483647:i<-2147483648&&(i=-2147483648),i=+i,isNaN(i)&&(i=n?0:t.length-1),i<0&&(i=t.length+i),i>=t.length){if(n)return-1;i=t.length-1}else if(i<0){if(!n)return-1;i=0}if("string"==typeof e&&(e=p.from(e,r)),x(e))return 0===e.length?-1:O(t,e,i,r,n);if("number"==typeof e)return e&=255,p.TYPED_ARRAY_SUPPORT&&"function"==typeof Uint8Array.prototype.indexOf?n?Uint8Array.prototype.indexOf.call(t,e,i):Uint8Array.prototype.lastIndexOf.call(t,e,i):O(t,[e],i,r,n);throw new TypeError("val must be string, number or Buffer")}function O(t,e,i,r,n){var o,s=1,a=t.length,h=e.length;if(void 0!==r&&("ucs2"===(r=String(r).toLowerCase())||"ucs-2"===r||"utf16le"===r||"utf-16le"===r)){if(t.length<2||e.length<2)return-1;s=2,a/=2,h/=2,i/=2}function c(t,e){return 1===s?t[e]:t.readUInt16BE(e*s)}if(n){var l=-1;for(o=i;oa&&(i=a-h),o=i;o>=0;o--){for(var u=!0,f=0;fn&&(r=n):r=n;var o=e.length;if(o%2!=0)throw new TypeError("Invalid hex string");r>o/2&&(r=o/2);for(var s=0;s>8,n=i%256,o.push(n),o.push(r);return o}(e,t.length-i),t,i,r)}function j(t,e,i){return 0===e&&i===t.length?h(t):h(t.slice(e,i))}function F(t,e,i){i=Math.min(t.length,i);for(var r=[],n=e;n239?4:c>223?3:c>191?2:1;if(n+u<=i)switch(u){case 1:c<128&&(l=c);break;case 2:128==(192&(o=t[n+1]))&&(h=(31&c)<<6|63&o)>127&&(l=h);break;case 3:o=t[n+1],s=t[n+2],128==(192&o)&&128==(192&s)&&(h=(15&c)<<12|(63&o)<<6|63&s)>2047&&(h<55296||h>57343)&&(l=h);break;case 4:o=t[n+1],s=t[n+2],a=t[n+3],128==(192&o)&&128==(192&s)&&128==(192&a)&&(h=(15&c)<<18|(63&o)<<12|(63&s)<<6|63&a)>65535&&h<1114112&&(l=h)}null===l?(l=65533,u=1):l>65535&&(l-=65536,r.push(l>>>10&1023|55296),l=56320|1023&l),r.push(l),n+=u}return function(t){var e=t.length;if(e<=L)return String.fromCharCode.apply(String,t);var i="",r=0;for(;r0&&(t=this.toString("hex",0,50).match(/.{2}/g).join(" "),this.length>50&&(t+=" ... ")),""},p.prototype.compare=function(t,e,i,r,n){if(!x(t))throw new TypeError("Argument must be a Buffer");if(void 0===e&&(e=0),void 0===i&&(i=t?t.length:0),void 0===r&&(r=0),void 0===n&&(n=this.length),e<0||i>t.length||r<0||n>this.length)throw new RangeError("out of range index");if(r>=n&&e>=i)return 0;if(r>=n)return-1;if(e>=i)return 1;if(this===t)return 0;for(var o=(n>>>=0)-(r>>>=0),s=(i>>>=0)-(e>>>=0),a=Math.min(o,s),h=this.slice(r,n),c=t.slice(e,i),l=0;ln)&&(i=n),t.length>0&&(i<0||e<0)||e>this.length)throw new RangeError("Attempt to write outside buffer bounds");r||(r="utf8");for(var o=!1;;)switch(r){case"hex":return P(this,t,e,i);case"utf8":case"utf-8":return E(this,t,e,i);case"ascii":return k(this,t,e,i);case"latin1":case"binary":return A(this,t,e,i);case"base64":return D(this,t,e,i);case"ucs2":case"ucs-2":case"utf16le":case"utf-16le":return M(this,t,e,i);default:if(o)throw new TypeError("Unknown encoding: "+r);r=(""+r).toLowerCase(),o=!0}},p.prototype.toJSON=function(){return{type:"Buffer",data:Array.prototype.slice.call(this._arr||this,0)}};var L=4096;function I(t,e,i){var r="";i=Math.min(t.length,i);for(var n=e;nr)&&(i=r);for(var n="",o=e;oi)throw new RangeError("Trying to access beyond buffer length")}function U(t,e,i,r,n,o){if(!x(t))throw new TypeError('"buffer" argument must be a Buffer instance');if(e>n||et.length)throw new RangeError("Index out of range")}function G(t,e,i,r){e<0&&(e=65535+e+1);for(var n=0,o=Math.min(t.length-i,2);n>>8*(r?n:1-n)}function H(t,e,i,r){e<0&&(e=4294967295+e+1);for(var n=0,o=Math.min(t.length-i,4);n>>8*(r?n:3-n)&255}function W(t,e,i,r,n,o){if(i+r>t.length)throw new RangeError("Index out of range");if(i<0)throw new RangeError("Index out of range")}function z(t,e,i,r,n){return n||W(t,0,i,4),l(t,e,i,r,23,4),i+4}function N(t,e,i,r,n){return n||W(t,0,i,8),l(t,e,i,r,52,8),i+8}p.prototype.slice=function(t,e){var i,r=this.length;if((t=~~t)<0?(t+=r)<0&&(t=0):t>r&&(t=r),(e=void 0===e?r:~~e)<0?(e+=r)<0&&(e=0):e>r&&(e=r),e0&&(n*=256);)r+=this[t+--e]*n;return r},p.prototype.readUInt8=function(t,e){return e||X(t,1,this.length),this[t]},p.prototype.readUInt16LE=function(t,e){return e||X(t,2,this.length),this[t]|this[t+1]<<8},p.prototype.readUInt16BE=function(t,e){return e||X(t,2,this.length),this[t]<<8|this[t+1]},p.prototype.readUInt32LE=function(t,e){return e||X(t,4,this.length),(this[t]|this[t+1]<<8|this[t+2]<<16)+16777216*this[t+3]},p.prototype.readUInt32BE=function(t,e){return e||X(t,4,this.length),16777216*this[t]+(this[t+1]<<16|this[t+2]<<8|this[t+3])},p.prototype.readIntLE=function(t,e,i){t|=0,e|=0,i||X(t,e,this.length);for(var r=this[t],n=1,o=0;++o=(n*=128)&&(r-=Math.pow(2,8*e)),r},p.prototype.readIntBE=function(t,e,i){t|=0,e|=0,i||X(t,e,this.length);for(var r=e,n=1,o=this[t+--r];r>0&&(n*=256);)o+=this[t+--r]*n;return o>=(n*=128)&&(o-=Math.pow(2,8*e)),o},p.prototype.readInt8=function(t,e){return e||X(t,1,this.length),128&this[t]?-1*(255-this[t]+1):this[t]},p.prototype.readInt16LE=function(t,e){e||X(t,2,this.length);var i=this[t]|this[t+1]<<8;return 32768&i?4294901760|i:i},p.prototype.readInt16BE=function(t,e){e||X(t,2,this.length);var i=this[t+1]|this[t]<<8;return 32768&i?4294901760|i:i},p.prototype.readInt32LE=function(t,e){return e||X(t,4,this.length),this[t]|this[t+1]<<8|this[t+2]<<16|this[t+3]<<24},p.prototype.readInt32BE=function(t,e){return e||X(t,4,this.length),this[t]<<24|this[t+1]<<16|this[t+2]<<8|this[t+3]},p.prototype.readFloatLE=function(t,e){return e||X(t,4,this.length),c(this,t,!0,23,4)},p.prototype.readFloatBE=function(t,e){return e||X(t,4,this.length),c(this,t,!1,23,4)},p.prototype.readDoubleLE=function(t,e){return e||X(t,8,this.length),c(this,t,!0,52,8)},p.prototype.readDoubleBE=function(t,e){return e||X(t,8,this.length),c(this,t,!1,52,8)},p.prototype.writeUIntLE=function(t,e,i,r){(t=+t,e|=0,i|=0,r)||U(this,t,e,i,Math.pow(2,8*i)-1,0);var n=1,o=0;for(this[e]=255&t;++o=0&&(o*=256);)this[e+n]=t/o&255;return e+i},p.prototype.writeUInt8=function(t,e,i){return t=+t,e|=0,i||U(this,t,e,1,255,0),p.TYPED_ARRAY_SUPPORT||(t=Math.floor(t)),this[e]=255&t,e+1},p.prototype.writeUInt16LE=function(t,e,i){return t=+t,e|=0,i||U(this,t,e,2,65535,0),p.TYPED_ARRAY_SUPPORT?(this[e]=255&t,this[e+1]=t>>>8):G(this,t,e,!0),e+2},p.prototype.writeUInt16BE=function(t,e,i){return t=+t,e|=0,i||U(this,t,e,2,65535,0),p.TYPED_ARRAY_SUPPORT?(this[e]=t>>>8,this[e+1]=255&t):G(this,t,e,!1),e+2},p.prototype.writeUInt32LE=function(t,e,i){return t=+t,e|=0,i||U(this,t,e,4,4294967295,0),p.TYPED_ARRAY_SUPPORT?(this[e+3]=t>>>24,this[e+2]=t>>>16,this[e+1]=t>>>8,this[e]=255&t):H(this,t,e,!0),e+4},p.prototype.writeUInt32BE=function(t,e,i){return t=+t,e|=0,i||U(this,t,e,4,4294967295,0),p.TYPED_ARRAY_SUPPORT?(this[e]=t>>>24,this[e+1]=t>>>16,this[e+2]=t>>>8,this[e+3]=255&t):H(this,t,e,!1),e+4},p.prototype.writeIntLE=function(t,e,i,r){if(t=+t,e|=0,!r){var n=Math.pow(2,8*i-1);U(this,t,e,i,n-1,-n)}var o=0,s=1,a=0;for(this[e]=255&t;++o>0)-a&255;return e+i},p.prototype.writeIntBE=function(t,e,i,r){if(t=+t,e|=0,!r){var n=Math.pow(2,8*i-1);U(this,t,e,i,n-1,-n)}var o=i-1,s=1,a=0;for(this[e+o]=255&t;--o>=0&&(s*=256);)t<0&&0===a&&0!==this[e+o+1]&&(a=1),this[e+o]=(t/s>>0)-a&255;return e+i},p.prototype.writeInt8=function(t,e,i){return t=+t,e|=0,i||U(this,t,e,1,127,-128),p.TYPED_ARRAY_SUPPORT||(t=Math.floor(t)),t<0&&(t=255+t+1),this[e]=255&t,e+1},p.prototype.writeInt16LE=function(t,e,i){return t=+t,e|=0,i||U(this,t,e,2,32767,-32768),p.TYPED_ARRAY_SUPPORT?(this[e]=255&t,this[e+1]=t>>>8):G(this,t,e,!0),e+2},p.prototype.writeInt16BE=function(t,e,i){return t=+t,e|=0,i||U(this,t,e,2,32767,-32768),p.TYPED_ARRAY_SUPPORT?(this[e]=t>>>8,this[e+1]=255&t):G(this,t,e,!1),e+2},p.prototype.writeInt32LE=function(t,e,i){return t=+t,e|=0,i||U(this,t,e,4,2147483647,-2147483648),p.TYPED_ARRAY_SUPPORT?(this[e]=255&t,this[e+1]=t>>>8,this[e+2]=t>>>16,this[e+3]=t>>>24):H(this,t,e,!0),e+4},p.prototype.writeInt32BE=function(t,e,i){return t=+t,e|=0,i||U(this,t,e,4,2147483647,-2147483648),t<0&&(t=4294967295+t+1),p.TYPED_ARRAY_SUPPORT?(this[e]=t>>>24,this[e+1]=t>>>16,this[e+2]=t>>>8,this[e+3]=255&t):H(this,t,e,!1),e+4},p.prototype.writeFloatLE=function(t,e,i){return z(this,t,e,!0,i)},p.prototype.writeFloatBE=function(t,e,i){return z(this,t,e,!1,i)},p.prototype.writeDoubleLE=function(t,e,i){return N(this,t,e,!0,i)},p.prototype.writeDoubleBE=function(t,e,i){return N(this,t,e,!1,i)},p.prototype.copy=function(t,e,i,r){if(i||(i=0),r||0===r||(r=this.length),e>=t.length&&(e=t.length),e||(e=0),r>0&&r=this.length)throw new RangeError("sourceStart out of bounds");if(r<0)throw new RangeError("sourceEnd out of bounds");r>this.length&&(r=this.length),t.length-e=0;--n)t[n+e]=this[n+i];else if(o<1e3||!p.TYPED_ARRAY_SUPPORT)for(n=0;n>>=0,i=void 0===i?this.length:i>>>0,t||(t=0),"number"==typeof t)for(o=e;o55295&&i<57344){if(!n){if(i>56319){(e-=3)>-1&&o.push(239,191,189);continue}if(s+1===r){(e-=3)>-1&&o.push(239,191,189);continue}n=i;continue}if(i<56320){(e-=3)>-1&&o.push(239,191,189),n=i;continue}i=65536+(n-55296<<10|i-56320)}else n&&(e-=3)>-1&&o.push(239,191,189);if(n=null,i<128){if((e-=1)<0)break;o.push(i)}else if(i<2048){if((e-=2)<0)break;o.push(i>>6|192,63&i|128)}else if(i<65536){if((e-=3)<0)break;o.push(i>>12|224,i>>6&63|128,63&i|128)}else{if(!(i<1114112))throw new Error("Invalid code point");if((e-=4)<0)break;o.push(i>>18|240,i>>12&63|128,i>>6&63|128,63&i|128)}}return o}function J(t){return function(t){var e,i,a,h,c,l;o||s();var u=t.length;if(u%4>0)throw new Error("Invalid string. Length must be a multiple of 4");c="="===t[u-2]?2:"="===t[u-1]?1:0,l=new n(3*u/4-c),a=c>0?u-4:u;var f=0;for(e=0,i=0;e>16&255,l[f++]=h>>8&255,l[f++]=255&h;return 2===c?(h=r[t.charCodeAt(e)]<<2|r[t.charCodeAt(e+1)]>>4,l[f++]=255&h):1===c&&(h=r[t.charCodeAt(e)]<<10|r[t.charCodeAt(e+1)]<<4|r[t.charCodeAt(e+2)]>>2,l[f++]=h>>8&255,l[f++]=255&h),l}(function(t){if((t=function(t){return t.trim?t.trim():t.replace(/^\s+|\s+$/g,"")}(t).replace(V,"")).length<2)return"";for(;t.length%4!=0;)t+="=";return t}(t))}function Z(t,e,i,r){for(var n=0;n=e.length||n>=t.length);++n)e[n+i]=t[n];return n}function $(t){return!!t.constructor&&"function"==typeof t.constructor.isBuffer&&t.constructor.isBuffer(t)}function Q(t){if(t.__esModule)return t;var e=Object.defineProperty({},"__esModule",{value:!0});return Object.keys(t).forEach((function(i){var r=Object.getOwnPropertyDescriptor(t,i);Object.defineProperty(e,i,r.get?r:{enumerable:!0,get:function(){return t[i]}})})),e}var tt={},et={},it=Q(t({__proto__:null,default:et},[et]));!function(t){ +/*! Fabric.js Copyright 2008-2015, Printio (Juriy Zaytsev, Maxim Chernyak) */ +var e,i,r,n,o,s,a,h,c,l,u,f,d,g,v,m,y,_,b,x,C,w,S,T=T||{version:"4.6.0"};if(t.fabric=T,"undefined"!=typeof document&&"undefined"!=typeof window)document instanceof("undefined"!=typeof HTMLDocument?HTMLDocument:Document)?T.document=document:T.document=document.implementation.createHTMLDocument(""),T.window=window;else{var O=new it.JSDOM(decodeURIComponent("%3C!DOCTYPE%20html%3E%3Chtml%3E%3Chead%3E%3C%2Fhead%3E%3Cbody%3E%3C%2Fbody%3E%3C%2Fhtml%3E"),{features:{FetchExternalResources:["img"]},resources:"usable"}).window;T.document=O.document,T.jsdomImplForWrapper=it.implForWrapper,T.nodeCanvas=it.Canvas,T.window=O,DOMParser=T.window.DOMParser}if(T.isTouchSupported="ontouchstart"in T.window||"ontouchstart"in T.document||T.window&&T.window.navigator&&T.window.navigator.maxTouchPoints>0,T.isLikelyNode=void 0!==p&&"undefined"==typeof window,T.SHARED_ATTRIBUTES=["display","transform","fill","fill-opacity","fill-rule","opacity","stroke","stroke-dasharray","stroke-linecap","stroke-dashoffset","stroke-linejoin","stroke-miterlimit","stroke-opacity","stroke-width","id","paint-order","vector-effect","instantiated_by_use","clip-path"],T.DPI=96,T.reNum="(?:[-+]?(?:\\d+|\\d*\\.\\d+)(?:[eE][-+]?\\d+)?)",T.commaWsp="(?:\\s+,?\\s*|,\\s*)",T.rePathCommand=/([-+]?((\d+\.\d+)|((\d+)|(\.\d+)))(?:[eE][-+]?\d+)?)/gi,T.reNonWord=/[ \n\.,;!\?\-]/,T.fontPaths={},T.iMatrix=[1,0,0,1,0,0],T.svgNS="http://www.w3.org/2000/svg",T.perfLimitSizeTotal=2097152,T.maxCacheSideLimit=4096,T.minCacheSideLimit=256,T.charWidthsCache={},T.textureSize=2048,T.disableStyleCopyPaste=!1,T.enableGLFiltering=!0,T.devicePixelRatio=T.window.devicePixelRatio||T.window.webkitDevicePixelRatio||T.window.mozDevicePixelRatio||1,T.browserShadowBlurConstant=1,T.arcToSegmentsCache={},T.boundsOfCurveCache={},T.cachesBoundsOfCurve=!0,T.forceGLPutImageData=!1,T.initFilterBackend=function(){return T.enableGLFiltering&&T.isWebglSupported&&T.isWebglSupported(T.textureSize)?(console.log("max texture size: "+T.maxTextureSize),new T.WebglFilterBackend({tileSize:T.textureSize})):T.Canvas2dFilterBackend?new T.Canvas2dFilterBackend:void 0},"undefined"!=typeof document&&"undefined"!=typeof window&&(window.fabric=T),void 0===P)var P={};if(function(t){t.modifyEventListener=!1,t.modifySelectors=!1,t.configure=function(e){isFinite(e.modifyEventListener)&&(t.modifyEventListener=e.modifyEventListener),isFinite(e.modifySelectors)&&(t.modifySelectors=e.modifySelectors),!1===d&&t.modifyEventListener&&g(),!1===p&&t.modifySelectors&&v()},t.add=function(t,e,r,n){return i(t,e,r,n,"add")},t.remove=function(t,e,r,n){return i(t,e,r,n,"remove")},t.returnFalse=function(t){return!1},t.stop=function(t){t&&(t.stopPropagation&&t.stopPropagation(),t.cancelBubble=!0,t.cancelBubbleCount=0)},t.prevent=function(t){t&&(t.preventDefault?t.preventDefault():t.preventManipulation?t.preventManipulation():t.returnValue=!1)},t.cancel=function(e){t.stop(e),t.prevent(e)},t.blur=function(){var t=document.activeElement;if(t){var e=document.activeElement.nodeName;"INPUT"!==e&&"TEXTAREA"!==e&&"true"!==t.contentEditable||t.blur&&t.blur()}},t.getEventSupport=function(t,e){if("string"==typeof t&&(e=t,t=window),(e="on"+e)in t)return!0;if(t.setAttribute||(t=document.createElement("div")),t.setAttribute&&t.removeAttribute){t.setAttribute(e,"");var i="function"==typeof t[e];return void 0!==t[e]&&(t[e]=null),t.removeAttribute(e),i}};var e=function(t){if(!t||"object"!=typeof t)return t;var i=new t.constructor;for(var r in t)t[r]&&"object"==typeof t[r]?i[r]=e(t[r]):i[r]=t[r];return i},i=function(o,s,c,d,g,p){if(d=d||{},"[object Object]"===String(o)){var v=o;if(o=v.target,delete v.target,!v.type||!v.listener){for(var m in v){var y=v[m];"function"!=typeof y&&(d[m]=y)}var _={};for(var b in v){m=b.split(",");var x=v[b],C={};for(var w in d)C[w]=d[w];if("function"==typeof x)c=x;else{if("function"!=typeof x.listener)continue;c=x.listener;for(var w in x)"function"!=typeof x[w]&&(C[w]=x[w])}for(var S=0;SO&&window.clearInterval(k),document.querySelector(o)&&(window.clearInterval(k),setTimeout(c,1))}),E);return}s="load",o=window}if("string"==typeof o){if(0===(o=document.querySelectorAll(o)).length)return n("Missing target on listener!",arguments);1===o.length&&(o=o[0])}var A,D={};if(o.length>0&&o!==window){for(var M=0,j=o.length;M=r.maxFingers){var d=[];for(var f in r.tracker)d.push(f);return i.identifier=d.join(","),s}for(var g in a)if(a[g].up){delete a[g],o(u,f),r.cancel=!0;break}if(a[f])continue;o(u,f)}else a=r.tracker={},i.bbox=r.bbox=t.getBoundingBox(r.target),r.fingers=0,r.cancel=!1,o(u,f)}d=[];for(var f in r.tracker)d.push(f);return i.identifier=d.join(","),s},t.pointerEnd=function(t,e,i,r){for(var n=t.touches||[],o=n.length,s={},a=0;ao.x1&&a>o.y1&&h0&&l0&&u0?x:-x,void 0!==v.DEG2&&(x>0?v.rotation+=v.DEG1-v.DEG2:v.rotation-=v.DEG1-v.DEG2,g+=v.rotation),f.push(v.move)}}n.touches=f,n.fingers=r.fingers,n.scale=d/r.fingers,n.rotation=g/r.fingers,n.state="change",r.listener(o,n)}},r.onPointerUp=function(e){var i=r.fingers;t.pointerEnd(e,n,r)&&(P.remove(r.doc,"mousemove",r.onPointerMove),P.remove(r.doc,"mouseup",r.onPointerUp)),i===r.minFingers&&r.fingers4){var g=h*u/d,p=Math.abs(g+f.value);f.value&&p<200?(f.value=g,f.count++,3===f.count&&(t.listener(o,e),i=h,f.value=0,f.count=0)):(f.value=g,f.count=1)}}else t.listener(o,e)};if(window.addEventListener)return window.addEventListener("devicemotion",o,!1),e},P.Gesture=P.Gesture||{},P.Gesture._gestureHandlers=P.Gesture._gestureHandlers||{},P.Gesture._gestureHandlers.shake=t.shake,t}(P.proxy),void 0===P)P={};if(void 0===P.proxy&&(P.proxy={}),P.proxy=function(t){var e=Math.PI/180;return t.swipe=function(i){i.snap=i.snap||90,i.threshold=i.threshold||1,i.gesture=i.gesture||"swipe",i.onPointerDown=function(e){t.pointerStart(e,r,i)&&(P.add(i.doc,"mousemove",i.onPointerMove).listener(e),P.add(i.doc,"mouseup",i.onPointerUp))},i.onPointerMove=function(e){for(var r=e.changedTouches||t.getCoords(e),n=r.length,o=0;o=_&&o>i.threshold&&(a.x/=l,a.y/=l,r.start=a,r.x=h/l,r.y=c/l,r.angle=-(((s/i.snap+.5>>0)*i.snap||360)-360),r.velocity=o,r.fingers=_,r.state="swipe",i.listener(n,r))}};var r=t.pointerSetup(i);return P.add(i.target,"mousedown",i.onPointerDown),r},P.Gesture=P.Gesture||{},P.Gesture._gestureHandlers=P.Gesture._gestureHandlers||{},P.Gesture._gestureHandlers.swipe=t.swipe,t}(P.proxy),void 0===P)P={};if(void 0===P.proxy&&(P.proxy={}),P.proxy=function(t){return t.longpress=function(e){return e.gesture="longpress",t.tap(e)},t.tap=function(e){var i,r;e.delay=e.delay||500,e.timeout=e.timeout||250,e.driftDeviance=e.driftDeviance||10,e.gesture=e.gesture||"tap",e.onPointerDown=function(o){if(t.pointerStart(o,n,e)){if(i=(new Date).getTime(),P.add(e.doc,"mousemove",e.onPointerMove).listener(o),P.add(e.doc,"mouseup",e.onPointerUp),"longpress"!==e.gesture)return;r=setTimeout((function(){if(!(o.cancelBubble&&++o.cancelBubbleCount>1)){var t=0;for(var i in e.tracker){var r=e.tracker[i];if(!0===r.end)return;if(e.cancel)return;t++}e.minFingers<=t&&e.maxFingers>=t&&(n.state="start",n.fingers=t,n.x=r.start.x,n.y=r.start.y,e.listener(o,n))}}),e.delay)}},e.onPointerMove=function(i){for(var r=e.bbox,n=i.changedTouches||t.getCoords(i),o=n.length,s=0;s0&&l0&&u1)return;if("longpress"===e.gesture)return void("start"===n.state&&(n.state="end",e.listener(o,n)));if(e.cancel)return;if((new Date).getTime()-i>e.timeout)return;var s=e.gestureFingers;e.minFingers<=s&&e.maxFingers>=s&&(n.state="tap",n.fingers=e.gestureFingers,e.listener(o,n))}};var n=t.pointerSetup(e);return P.add(e.target,"mousedown",e.onPointerDown),n},P.Gesture=P.Gesture||{},P.Gesture._gestureHandlers=P.Gesture._gestureHandlers||{},P.Gesture._gestureHandlers.tap=t.tap,P.Gesture._gestureHandlers.longpress=t.longpress,t}(P.proxy),void 0===P)P={};if(void 0===P.proxy&&(P.proxy={}),P.proxy=function(t){return t.wheelPreventElasticBounce=function(t){t&&("string"==typeof t&&(t=document.querySelector(t)),P.add(t,"wheel",(function(t,e){e.preventElasticBounce(),P.stop(t)})))},t.wheel=function(t){var e,i=t.timeout||150,r=0,n={gesture:"wheel",state:"start",wheelDelta:0,target:t.target,listener:t.listener,preventElasticBounce:function(t){var e=this.target,i=e.scrollTop;(i+e.offsetHeight===e.scrollHeight&&this.wheelDelta<=0||0===i&&this.wheelDelta>=0)&&P.cancel(t),P.stop(t)},add:function(){t.target[s](h,o,!1)},remove:function(){t.target[a](h,o,!1)}},o=function(o){o=o||window.event,n.state=r++?"change":"start",n.wheelDelta=o.detail?-20*o.detail:o.wheelDelta,t.listener(o,n),clearTimeout(e),e=setTimeout((function(){r=0,n.state="end",n.wheelDelta=0,t.listener(o,n)}),i)},s=document.addEventListener?"addEventListener":"attachEvent",a=document.removeEventListener?"removeEventListener":"detachEvent",h=P.getEventSupport("mousewheel")?"mousewheel":"DOMMouseScroll";return t.target[s](h,o,!1),n},P.Gesture=P.Gesture||{},P.Gesture._gestureHandlers=P.Gesture._gestureHandlers||{},P.Gesture._gestureHandlers.wheel=t.wheel,t}(P.proxy),void 0===E)var E={};function k(t,e){var i=t.canvas,r=e.targetCanvas,n=r.getContext("2d");n.translate(0,r.height),n.scale(1,-1);var o=i.height-r.height;n.drawImage(i,0,o,r.width,r.height,0,0,r.width,r.height)}function A(t,e){var i=e.targetCanvas.getContext("2d"),r=e.destinationWidth,n=e.destinationHeight,o=r*n*4,s=new Uint8Array(this.imageBuffer,0,o),a=new Uint8ClampedArray(this.imageBuffer,0,o);t.readPixels(0,0,r,n,t.RGBA,t.UNSIGNED_BYTE,s);var h=new ImageData(a,r,n);i.putImageData(h,0,0)}void 0===E.proxy&&(E.proxy={}),E.proxy=function(t){return t.orientation=function(t){var e={gesture:"orientationchange",previous:null,current:window.orientation,target:t.target,listener:t.listener,remove:function(){window.removeEventListener("orientationchange",i,!1)}},i=function(i){e.previous=e.current,e.current=window.orientation,null===e.previous||e.previous==e.current||t.listener(i,e)};return window.DeviceOrientationEvent&&window.addEventListener("orientationchange",i,!1),e},E.Gesture=E.Gesture||{},E.Gesture._gestureHandlers=E.Gesture._gestureHandlers||{},E.Gesture._gestureHandlers.orientation=t.orientation,t}(E.proxy),function(){function t(t,e){if(this.__eventListeners[t]){var i=this.__eventListeners[t];e?i[i.indexOf(e)]=!1:T.util.array.fill(i,!1)}}function e(t,e){var i=function(){e.apply(this,arguments),this.off(t,i)}.bind(this);this.on(t,i)}T.Observable={fire:function(t,e){if(!this.__eventListeners)return this;var i=this.__eventListeners[t];if(!i)return this;for(var r=0,n=i.length;r-1||!!e&&this._objects.some((function(e){return"function"==typeof e.contains&&e.contains(t,!0)}))},complexity:function(){return this._objects.reduce((function(t,e){return t+=e.complexity?e.complexity():0}),0)}},T.CommonMethods={_setOptions:function(t){for(var e in t)this.set(e,t[e])},_initGradient:function(t,e){!t||!t.colorStops||t instanceof T.Gradient||this.set(e,new T.Gradient(t))},_initPattern:function(t,e,i){!t||!t.source||t instanceof T.Pattern?i&&i():this.set(e,new T.Pattern(t,i))},_setObject:function(t){for(var e in t)this._set(e,t[e])},set:function(t,e){return"object"==typeof t?this._setObject(t):this._set(t,e),this},_set:function(t,e){this[t]=e},toggle:function(t){var e=this.get(t);return"boolean"==typeof e&&this.set(t,!e),this},get:function(t){return this[t]}},n=t,o=Math.sqrt,s=Math.atan2,a=Math.pow,h=Math.PI/180,c=Math.PI/2,T.util={cos:function(t){if(0===t)return 1;switch(t<0&&(t=-t),t/c){case 1:case 3:return 0;case 2:return-1}return Math.cos(t)},sin:function(t){if(0===t)return 0;var e=1;switch(t<0&&(e=-1),t/c){case 1:return e;case 2:return 0;case 3:return-e}return Math.sin(t)},removeFromArray:function(t,e){var i=t.indexOf(e);return-1!==i&&t.splice(i,1),t},getRandomInt:function(t,e){return Math.floor(Math.random()*(e-t+1))+t},degreesToRadians:function(t){return t*h},radiansToDegrees:function(t){return t/h},rotatePoint:function(t,e,i){var r=new T.Point(t.x-e.x,t.y-e.y),n=T.util.rotateVector(r,i);return new T.Point(n.x,n.y).addEquals(e)},rotateVector:function(t,e){var i=T.util.sin(e),r=T.util.cos(e);return{x:t.x*r-t.y*i,y:t.x*i+t.y*r}},transformPoint:function(t,e,i){return i?new T.Point(e[0]*t.x+e[2]*t.y,e[1]*t.x+e[3]*t.y):new T.Point(e[0]*t.x+e[2]*t.y+e[4],e[1]*t.x+e[3]*t.y+e[5])},makeBoundingBoxFromPoints:function(t,e){if(e)for(var i=0;ie;)(e+=a[d++%f])>l&&(e=l),t[g?"lineTo":"moveTo"](e,0),g=!g;t.restore()},createCanvasElement:function(){return T.document.createElement("canvas")},copyCanvasElement:function(t){var e=T.util.createCanvasElement();return e.width=t.width,e.height=t.height,e.getContext("2d").drawImage(t,0,0),e},toDataURL:function(t,e,i){return t.toDataURL("image/"+e,i)},createImage:function(){return T.document.createElement("img")},multiplyTransformMatrices:function(t,e,i){return[t[0]*e[0]+t[2]*e[1],t[1]*e[0]+t[3]*e[1],t[0]*e[2]+t[2]*e[3],t[1]*e[2]+t[3]*e[3],i?0:t[0]*e[4]+t[2]*e[5]+t[4],i?0:t[1]*e[4]+t[3]*e[5]+t[5]]},qrDecompose:function(t){var e=s(t[1],t[0]),i=a(t[0],2)+a(t[1],2),r=o(i),n=(t[0]*t[3]-t[2]*t[1])/r,c=s(t[0]*t[2]+t[1]*t[3],i);return{angle:e/h,scaleX:r,scaleY:n,skewX:c/h,skewY:0,translateX:t[4],translateY:t[5]}},calcRotateMatrix:function(t){if(!t.angle)return T.iMatrix.concat();var e=T.util.degreesToRadians(t.angle),i=T.util.cos(e),r=T.util.sin(e);return[i,r,-r,i,0,0]},calcDimensionsMatrix:function(t){var e=void 0===t.scaleX?1:t.scaleX,i=void 0===t.scaleY?1:t.scaleY,r=[t.flipX?-e:e,0,0,t.flipY?-i:i,0,0],n=T.util.multiplyTransformMatrices,o=T.util.degreesToRadians;return t.skewX&&(r=n(r,[1,0,Math.tan(o(t.skewX)),1],!0)),t.skewY&&(r=n(r,[1,Math.tan(o(t.skewY)),0,1],!0)),r},composeMatrix:function(t){var e=[1,0,0,1,t.translateX||0,t.translateY||0],i=T.util.multiplyTransformMatrices;return t.angle&&(e=i(e,T.util.calcRotateMatrix(t))),(1!==t.scaleX||1!==t.scaleY||t.skewX||t.skewY||t.flipX||t.flipY)&&(e=i(e,T.util.calcDimensionsMatrix(t))),e},resetObjectTransform:function(t){t.scaleX=1,t.scaleY=1,t.skewX=0,t.skewY=0,t.flipX=!1,t.flipY=!1,t.rotate(0)},saveObjectTransform:function(t){return{scaleX:t.scaleX,scaleY:t.scaleY,skewX:t.skewX,skewY:t.skewY,angle:t.angle,left:t.left,flipX:t.flipX,flipY:t.flipY,top:t.top}},isTransparent:function(t,e,i,r){r>0&&(e>r?e-=r:e=0,i>r?i-=r:i=0);var n,o=!0,s=t.getImageData(e,i,2*r||1,2*r||1),a=s.data.length;for(n=3;n0?A-=2*c:1===a&&A<0&&(A+=2*c);for(var D=Math.ceil(Math.abs(A/c*2)),M=[],j=A/D,F=8/3*Math.sin(j/4)*Math.sin(j/4)/Math.sin(j/2),L=k+j,I=0;I=n?o-n:2*Math.PI-(n-o)}function s(e,i,r,n,o,s,a,h){var c;if(T.cachesBoundsOfCurve&&(c=t.call(arguments),T.boundsOfCurveCache[c]))return T.boundsOfCurveCache[c];var l,u,f,d,g,p,v,m,y=Math.sqrt,_=Math.min,b=Math.max,x=Math.abs,C=[],w=[[],[]];u=6*e-12*r+6*o,l=-3*e+9*r-9*o+3*a,f=3*r-3*e;for(var S=0;S<2;++S)if(S>0&&(u=6*i-12*n+6*s,l=-3*i+9*n-9*s+3*h,f=3*n-3*i),x(l)<1e-12){if(x(u)<1e-12)continue;0<(d=-f/u)&&d<1&&C.push(d)}else(v=u*u-4*f*l)<0||(0<(g=(-u+(m=y(v)))/(2*l))&&g<1&&C.push(g),0<(p=(-u-m)/(2*l))&&p<1&&C.push(p));for(var O,P,E,k=C.length,A=k;k--;)O=(E=1-(d=C[k]))*E*E*e+3*E*E*d*r+3*E*d*d*o+d*d*d*a,w[0][k]=O,P=E*E*E*i+3*E*E*d*n+3*E*d*d*s+d*d*d*h,w[1][k]=P;w[0][A]=e,w[1][A]=i,w[0][A+1]=a,w[1][A+1]=h;var D=[{x:_.apply(null,w[0]),y:_.apply(null,w[1])},{x:b.apply(null,w[0]),y:b.apply(null,w[1])}];return T.cachesBoundsOfCurve&&(T.boundsOfCurveCache[c]=D),D}function a(t,e,i){for(var r=i[1],o=i[2],s=i[3],a=i[4],h=i[5],c=n(i[6]-t,i[7]-e,r,o,a,h,s),l=0,u=c.length;l1e-4;)i=a(o),n=o,(r=h(c.x,c.y,i.x,i.y))+s>e?o-=l/=2:(c=i,o+=l,s+=r);return i.angle=u(n),i}function p(t){for(var e,i,r,n,o=0,s=t.length,a=0,g=0,p=0,v=0,m=[],y=0;yC)for(var S=1,O=p.length;S2;for(e=e||0,c&&(a=t[2].xt[i-2].x?1:n.x===t[i-2].x?0:-1,h=n.y>t[i-2].y?1:n.y===t[i-2].y?0:-1),r.push(["L",n.x+a*e,n.y+h*e]),r},T.util.getPathSegmentsInfo=p,T.util.getBoundsOfCurve=s,T.util.getPointOnPath=function(t,e,i){i||(i=p(t));for(var r=0;e-i[r].length>0&&r=e}))}}}(),function(){function t(e,i,r){if(r)if(!T.isLikelyNode&&i instanceof Element)e=i;else if(i instanceof Array){e=[];for(var n=0,o=i.length;n57343)return t.charAt(e);if(55296<=i&&i<=56319){if(t.length<=e+1)throw"High surrogate without following low surrogate";var r=t.charCodeAt(e+1);if(56320>r||r>57343)throw"High surrogate without following low surrogate";return t.charAt(e)+t.charAt(e+1)}if(0===e)throw"Low surrogate without preceding high surrogate";var n=t.charCodeAt(e-1);if(55296>n||n>56319)throw"Low surrogate without preceding high surrogate";return!1}T.util.string={camelize:function(t){return t.replace(/-+(.)?/g,(function(t,e){return e?e.toUpperCase():""}))},capitalize:function(t,e){return t.charAt(0).toUpperCase()+(e?t.slice(1):t.slice(1).toLowerCase())},escapeXml:function(t){return t.replace(/&/g,"&").replace(/"/g,""").replace(/'/g,"'").replace(//g,">")},graphemeSplit:function(e){var i,r=0,n=[];for(r=0;r-1?t.prototype[n]=function(t){return function(){var i=this.constructor.superclass;this.constructor.superclass=r;var n=e[t].apply(this,arguments);if(this.constructor.superclass=i,"initialize"!==t)return n}}(n):t.prototype[n]=e[n],i&&(e.toString!==Object.prototype.toString&&(t.prototype.toString=e.toString),e.valueOf!==Object.prototype.valueOf&&(t.prototype.valueOf=e.valueOf))};function n(){}function o(e){for(var i=null,r=this;r.constructor.superclass;){var n=r.constructor.superclass.prototype[e];if(r[e]!==n){i=n;break}r=r.constructor.superclass.prototype}return i?arguments.length>1?i.apply(this,t.call(arguments,1)):i.call(this):console.log("tried to callSuper "+e+", method not found in prototype chain",this)}T.util.createClass=function(){var i=null,s=t.call(arguments,0);function a(){this.initialize.apply(this,arguments)}"function"==typeof s[0]&&(i=s.shift()),a.superclass=i,a.subclasses=[],i&&(n.prototype=i.prototype,a.prototype=new n,i.subclasses.push(a));for(var h=0,c=s.length;h-1||"touch"===t.pointerType},f=T.document.createElement("div"),d="string"==typeof f.style.opacity,g="string"==typeof f.style.filter,v=/alpha\s*\(\s*opacity\s*=\s*([^\)]+)\)/,m=function(t){return t},d?m=function(t,e){return t.style.opacity=e,t}:g&&(m=function(t,e){var i=t.style;return t.currentStyle&&!t.currentStyle.hasLayout&&(i.zoom=1),v.test(i.filter)?(e=e>=.9999?"":"alpha(opacity="+100*e+")",i.filter=i.filter.replace(v,e)):i.filter+=" alpha(opacity="+100*e+")",t}),T.util.setStyle=function(t,e){var i=t.style;if(!i)return t;if("string"==typeof e)return t.style.cssText+=";"+e,e.indexOf("opacity")>-1?m(t,e.match(/opacity:\s*(\d?\.?\d*)/)[1]):t;for(var r in e)"opacity"===r?m(t,e[r]):i["float"===r||"cssFloat"===r?void 0===i.styleFloat?"cssFloat":"styleFloat":r]=e[r];return t},function(){var t=Array.prototype.slice;var e,i,r,n,o=function(e){return t.call(e,0)};try{e=o(T.document.childNodes)instanceof Array}catch(t){}function s(t,e){var i=T.document.createElement(t);for(var r in e)"class"===r?i.className=e[r]:"for"===r?i.htmlFor=e[r]:i.setAttribute(r,e[r]);return i}function a(t){for(var e=0,i=0,r=T.document.documentElement,n=T.document.body||{scrollLeft:0,scrollTop:0};t&&(t.parentNode||t.host)&&((t=t.parentNode||t.host)===T.document?(e=n.scrollLeft||r.scrollLeft||0,i=n.scrollTop||r.scrollTop||0):(e+=t.scrollLeft||0,i+=t.scrollTop||0),1!==t.nodeType||"fixed"!==t.style.position););return{left:e,top:i}}e||(o=function(t){for(var e=new Array(t.length),i=t.length;i--;)e[i]=t[i];return e}),i=T.document.defaultView&&T.document.defaultView.getComputedStyle?function(t,e){var i=T.document.defaultView.getComputedStyle(t,null);return i?i[e]:void 0}:function(t,e){var i=t.style[e];return!i&&t.currentStyle&&(i=t.currentStyle[e]),i},r=T.document.documentElement.style,n="userSelect"in r?"userSelect":"MozUserSelect"in r?"MozUserSelect":"WebkitUserSelect"in r?"WebkitUserSelect":"KhtmlUserSelect"in r?"KhtmlUserSelect":"",T.util.makeElementUnselectable=function(t){return void 0!==t.onselectstart&&(t.onselectstart=T.util.falseFunction),n?t.style[n]="none":"string"==typeof t.unselectable&&(t.unselectable="on"),t},T.util.makeElementSelectable=function(t){return void 0!==t.onselectstart&&(t.onselectstart=null),n?t.style[n]="":"string"==typeof t.unselectable&&(t.unselectable=""),t},T.util.setImageSmoothing=function(t,e){t.imageSmoothingEnabled=t.imageSmoothingEnabled||t.webkitImageSmoothingEnabled||t.mozImageSmoothingEnabled||t.msImageSmoothingEnabled||t.oImageSmoothingEnabled,t.imageSmoothingEnabled=e},T.util.getById=function(t){return"string"==typeof t?T.document.getElementById(t):t},T.util.toArray=o,T.util.addClass=function(t,e){t&&-1===(" "+t.className+" ").indexOf(" "+e+" ")&&(t.className+=(t.className?" ":"")+e)},T.util.makeElement=s,T.util.wrapElement=function(t,e,i){return"string"==typeof e&&(e=s(e,i)),t.parentNode&&t.parentNode.replaceChild(e,t),e.appendChild(t),e},T.util.getScrollLeftTop=a,T.util.getElementOffset=function(t){var e,r,n=t&&t.ownerDocument,o={left:0,top:0},s={left:0,top:0},h={borderLeftWidth:"left",borderTopWidth:"top",paddingLeft:"left",paddingTop:"top"};if(!n)return s;for(var c in h)s[h[c]]+=parseInt(i(t,c),10)||0;return e=n.documentElement,void 0!==t.getBoundingClientRect&&(o=t.getBoundingClientRect()),r=a(t),{left:o.left+r.left-(e.clientLeft||0)+s.left,top:o.top+r.top-(e.clientTop||0)+s.top}},T.util.getNodeCanvas=function(t){var e=T.jsdomImplForWrapper(t);return e._canvas||e._image},T.util.cleanUpJsdomNode=function(t){if(T.isLikelyNode){var e=T.jsdomImplForWrapper(t);e&&(e._image=null,e._canvas=null,e._currentSrc=null,e._attributes=null,e._classList=null)}}}(),function(){function t(){}T.util.request=function(e,i){i||(i={});var r=i.method?i.method.toUpperCase():"GET",n=i.onComplete||function(){},o=new T.window.XMLHttpRequest,s=i.body||i.parameters;return o.onreadystatechange=function(){4===o.readyState&&(n(o),o.onreadystatechange=t)},"GET"===r&&(s=null,"string"==typeof i.parameters&&(e=function(t,e){return t+(/\?/.test(t)?"&":"?")+e}(e,i.parameters))),o.open(r,e,!0),"POST"!==r&&"PUT"!==r||o.setRequestHeader("Content-Type","application/x-www-form-urlencoded"),o.send(s),o}}(),T.log=console.log,T.warn=console.warn,function(){function t(){return!1}function e(t,e,i,r){return-i*Math.cos(t/r*(Math.PI/2))+i+e}var i=T.window.requestAnimationFrame||T.window.webkitRequestAnimationFrame||T.window.mozRequestAnimationFrame||T.window.oRequestAnimationFrame||T.window.msRequestAnimationFrame||function(t){return T.window.setTimeout(t,1e3/60)},r=T.window.cancelAnimationFrame||T.window.clearTimeout;function n(){return i.apply(T.window,arguments)}T.util.animate=function(i){var r=!1;return n((function(o){i||(i={});var s,a=o||+new Date,h=i.duration||500,c=a+h,l=i.onChange||t,u=i.abort||t,f=i.onComplete||t,d=i.easing||e,g="startValue"in i?i.startValue:0,p="endValue"in i?i.endValue:100,v=i.byValue||p-g;i.onStart&&i.onStart(),function t(e){var i=(s=e||+new Date)>c?h:s-a,o=i/h,m=d(i,g,v,h),y=Math.abs((m-g)/v);if(!r){if(!u(m,y,o))return s>c?(l(p,1,1),void f(p,1,1)):(l(m,y,o),void n(t));f(p,1,1)}}(a)})),function(){r=!0}},T.util.requestAnimFrame=n,T.util.cancelAnimFrame=function(){return r.apply(T.window,arguments)}}(),function(){function t(t,e,i){var r="rgba("+parseInt(t[0]+i*(e[0]-t[0]),10)+","+parseInt(t[1]+i*(e[1]-t[1]),10)+","+parseInt(t[2]+i*(e[2]-t[2]),10);return r+=","+(t&&e?parseFloat(t[3]+i*(e[3]-t[3])):1),r+=")"}T.util.animateColor=function(e,i,r,n){var o=new T.Color(e).getSource(),s=new T.Color(i).getSource(),a=n.onComplete,h=n.onChange;return n=n||{},T.util.animate(T.util.object.extend(n,{duration:r||500,startValue:o,endValue:s,byValue:s,easing:function(e,i,r,o){return t(i,r,n.colorEasing?n.colorEasing(e,o):1-Math.cos(e/o*(Math.PI/2)))},onComplete:function(e,i,r){if(a)return a(t(s,s,0),i,r)},onChange:function(e,i,r){if(h){if(Array.isArray(e))return h(t(e,e,0),i,r);h(e,i,r)}}}))}}(),function(){function t(t,e,i,r){return t-1&&l>-1&&l-1)&&(i="stroke")}else{if("href"===t||"xlink:href"===t||"font"===t)return i;if("imageSmoothing"===t)return"optimizeQuality"===i;a=h?i.map(o):o(i,n)}}else i="";return!h&&isNaN(a)?i:a}function d(t){return new RegExp("^("+t.join("|")+")\\b","i")}function g(t,e){var i,r,n,o,s=[];for(n=0,o=e.length;n1;)h.shift(),c=e.util.multiplyTransformMatrices(c,h[0]);return c}}();var y=new RegExp("^\\s*("+e.reNum+"+)\\s*,?\\s*("+e.reNum+"+)\\s*,?\\s*("+e.reNum+"+)\\s*,?\\s*("+e.reNum+"+)\\s*$");function _(t){if(!e.svgViewBoxElementsRegEx.test(t.nodeName))return{};var i,r,n,s,a,h,c=t.getAttribute("viewBox"),l=1,u=1,f=t.getAttribute("width"),d=t.getAttribute("height"),g=t.getAttribute("x")||0,p=t.getAttribute("y")||0,v=t.getAttribute("preserveAspectRatio")||"",m=!c||!(c=c.match(y)),_=!f||!d||"100%"===f||"100%"===d,b=m&&_,x={},C="",w=0,S=0;if(x.width=0,x.height=0,x.toBeParsed=b,m&&(g||p)&&t.parentNode&&"#document"!==t.parentNode.nodeName&&(C=" translate("+o(g)+" "+o(p)+") ",a=(t.getAttribute("transform")||"")+C,t.setAttribute("transform",a),t.removeAttribute("x"),t.removeAttribute("y")),b)return x;if(m)return x.width=o(f),x.height=o(d),x;if(i=-parseFloat(c[1]),r=-parseFloat(c[2]),n=parseFloat(c[3]),s=parseFloat(c[4]),x.minX=i,x.minY=r,x.viewBoxWidth=n,x.viewBoxHeight=s,_?(x.width=n,x.height=s):(x.width=o(f),x.height=o(d),l=x.width/n,u=x.height/s),"none"!==(v=e.util.parsePreserveAspectRatioAttribute(v)).alignX&&("meet"===v.meetOrSlice&&(u=l=l>u?u:l),"slice"===v.meetOrSlice&&(u=l=l>u?l:u),w=x.width-n*l,S=x.height-s*l,"Mid"===v.alignX&&(w/=2),"Mid"===v.alignY&&(S/=2),"Min"===v.alignX&&(w=0),"Min"===v.alignY&&(S=0)),1===l&&1===u&&0===i&&0===r&&0===g&&0===p)return x;if((g||p)&&"#document"!==t.parentNode.nodeName&&(C=" translate("+o(g)+" "+o(p)+") "),a=C+" matrix("+l+" 0 0 "+u+" "+(i*l+w)+" "+(r*u+S)+") ","svg"===t.nodeName){for(h=t.ownerDocument.createElementNS(e.svgNS,"g");t.firstChild;)h.appendChild(t.firstChild);t.appendChild(h)}else(h=t).removeAttribute("x"),h.removeAttribute("y"),a=h.getAttribute("transform")+a;return h.setAttribute("transform",a),x}function b(t,e){var i="xlink:href",r=m(t,e.getAttribute(i).substr(1));if(r&&r.getAttribute(i)&&b(t,r),["gradientTransform","x1","x2","y1","y2","gradientUnits","cx","cy","r","fx","fy"].forEach((function(t){r&&!e.hasAttribute(t)&&r.hasAttribute(t)&&e.setAttribute(t,r.getAttribute(t))})),!e.children.length)for(var n=r.cloneNode(!0);n.firstChild;)e.appendChild(n.firstChild);e.removeAttribute(i)}e.parseSVGDocument=function(t,i,n,o){if(t){!function(t){for(var i=g(t,["use","svg:use"]),r=0;i.length&&rt.x&&this.y>t.y},gte:function(t){return this.x>=t.x&&this.y>=t.y},lerp:function(t,e){return void 0===e&&(e=.5),e=Math.max(Math.min(1,e),0),new i(this.x+(t.x-this.x)*e,this.y+(t.y-this.y)*e)},distanceFrom:function(t){var e=this.x-t.x,i=this.y-t.y;return Math.sqrt(e*e+i*i)},midPointFrom:function(t){return this.lerp(t)},min:function(t){return new i(Math.min(this.x,t.x),Math.min(this.y,t.y))},max:function(t){return new i(Math.max(this.x,t.x),Math.max(this.y,t.y))},toString:function(){return this.x+","+this.y},setXY:function(t,e){return this.x=t,this.y=e,this},setX:function(t){return this.x=t,this},setY:function(t){return this.y=t,this},setFromPoint:function(t){return this.x=t.x,this.y=t.y,this},swap:function(t){var e=this.x,i=this.y;this.x=t.x,this.y=t.y,t.x=e,t.y=i},clone:function(){return new i(this.x,this.y)}})}(t),function(t){var e=t.fabric||(t.fabric={});function i(t){this.status=t,this.points=[]}e.Intersection?e.warn("fabric.Intersection is already defined"):(e.Intersection=i,e.Intersection.prototype={constructor:i,appendPoint:function(t){return this.points.push(t),this},appendPoints:function(t){return this.points=this.points.concat(t),this}},e.Intersection.intersectLineLine=function(t,r,n,o){var s,a=(o.x-n.x)*(t.y-n.y)-(o.y-n.y)*(t.x-n.x),h=(r.x-t.x)*(t.y-n.y)-(r.y-t.y)*(t.x-n.x),c=(o.y-n.y)*(r.x-t.x)-(o.x-n.x)*(r.y-t.y);if(0!==c){var l=a/c,u=h/c;0<=l&&l<=1&&0<=u&&u<=1?(s=new i("Intersection")).appendPoint(new e.Point(t.x+l*(r.x-t.x),t.y+l*(r.y-t.y))):s=new i}else s=new i(0===a||0===h?"Coincident":"Parallel");return s},e.Intersection.intersectLinePolygon=function(t,e,r){var n,o,s,a,h=new i,c=r.length;for(a=0;a0&&(h.status="Intersection"),h},e.Intersection.intersectPolygonPolygon=function(t,e){var r,n=new i,o=t.length;for(r=0;r0&&(n.status="Intersection"),n},e.Intersection.intersectPolygonRectangle=function(t,r,n){var o=r.min(n),s=r.max(n),a=new e.Point(s.x,o.y),h=new e.Point(o.x,s.y),c=i.intersectLinePolygon(o,a,t),l=i.intersectLinePolygon(a,s,t),u=i.intersectLinePolygon(s,h,t),f=i.intersectLinePolygon(h,o,t),d=new i;return d.appendPoints(c.points),d.appendPoints(l.points),d.appendPoints(u.points),d.appendPoints(f.points),d.points.length>0&&(d.status="Intersection"),d})}(t),function(t){var e=t.fabric||(t.fabric={});function i(t){t?this._tryParsingColor(t):this.setSource([0,0,0,1])}function r(t,e,i){return i<0&&(i+=1),i>1&&(i-=1),i<1/6?t+6*(e-t)*i:i<.5?e:i<2/3?t+(e-t)*(2/3-i)*6:t}e.Color?e.warn("fabric.Color is already defined."):(e.Color=i,e.Color.prototype={_tryParsingColor:function(t){var e;t in i.colorNameMap&&(t=i.colorNameMap[t]),"transparent"===t&&(e=[255,255,255,0]),e||(e=i.sourceFromHex(t)),e||(e=i.sourceFromRgb(t)),e||(e=i.sourceFromHsl(t)),e||(e=[0,0,0,1]),e&&this.setSource(e)},_rgbToHsl:function(t,i,r){t/=255,i/=255,r/=255;var n,o,s,a=e.util.array.max([t,i,r]),h=e.util.array.min([t,i,r]);if(s=(a+h)/2,a===h)n=o=0;else{var c=a-h;switch(o=s>.5?c/(2-a-h):c/(a+h),a){case t:n=(i-r)/c+(i0)-(t<0)||+t};function d(t,e){var i=t.angle+u(Math.atan2(e.y,e.x))+360;return Math.round(i%360/45)}function g(t,i){var r=i.transform.target,n=r.canvas,o=e.util.object.clone(i);o.target=r,n&&n.fire("object:"+t,o),r.fire(t,i)}function p(t,e){var i=e.canvas,r=t[i.uniScaleKey];return i.uniformScaling&&!r||!i.uniformScaling&&r}function v(t){return t.originX===c&&t.originY===c}function m(t,e,i){var r=t.lockScalingX,n=t.lockScalingY;return!(!r||!n)||(!(e||!r&&!n||!i)||(!(!r||"x"!==e)||!(!n||"y"!==e)))}function y(t,e,i,r){return{e:t,transform:e,pointer:{x:i,y:r}}}function _(t){return function(e,i,r,n){var o=i.target,s=o.getCenterPoint(),a=o.translateToOriginPoint(s,i.originX,i.originY),h=t(e,i,r,n);return o.setPositionByOrigin(a,i.originX,i.originY),h}}function b(t,e){return function(i,r,n,o){var s=e(i,r,n,o);return s&&g(t,y(i,r,n,o)),s}}function x(t,i,r,n,o){var s=t.target,a=s.controls[t.corner],h=s.canvas.getZoom(),c=s.padding/h,l=s.toLocalPoint(new e.Point(n,o),i,r);return l.x>=c&&(l.x-=c),l.x<=-c&&(l.x+=c),l.y>=c&&(l.y-=c),l.y<=c&&(l.y+=c),l.x-=a.offsetX,l.y-=a.offsetY,l}function C(t){return t.flipX!==t.flipY}function w(t,e,i,r,n){if(0!==t[e]){var o=n/t._getTransformedDimensions()[r]*t[i];t.set(i,o)}}function S(t,e,i,r){var n,c=e.target,l=c._getTransformedDimensions(0,c.skewY),f=x(e,e.originX,e.originY,i,r),d=Math.abs(2*f.x)-l.x,g=c.skewX;d<2?n=0:(n=u(Math.atan2(d/c.scaleX,l.y/c.scaleY)),e.originX===o&&e.originY===h&&(n=-n),e.originX===a&&e.originY===s&&(n=-n),C(c)&&(n=-n));var p=g!==n;if(p){var v=c._getTransformedDimensions().y;c.set("skewX",n),w(c,"skewY","scaleY","y",v)}return p}function T(t,e,i,r){var n,c=e.target,l=c._getTransformedDimensions(c.skewX,0),f=x(e,e.originX,e.originY,i,r),d=Math.abs(2*f.y)-l.y,g=c.skewY;d<2?n=0:(n=u(Math.atan2(d/c.scaleY,l.x/c.scaleX)),e.originX===o&&e.originY===h&&(n=-n),e.originX===a&&e.originY===s&&(n=-n),C(c)&&(n=-n));var p=g!==n;if(p){var v=c._getTransformedDimensions().x;c.set("skewY",n),w(c,"skewX","scaleX","x",v)}return p}function O(t,e,i,r,n){n=n||{};var o,s,a,h,c,u,d=e.target,g=d.lockScalingX,y=d.lockScalingY,_=n.by,b=p(t,d),C=m(d,_,b),w=e.gestureScale;if(C)return!1;if(w)s=e.scaleX*w,a=e.scaleY*w;else{if(o=x(e,e.originX,e.originY,i,r),c="y"!==_?f(o.x):1,u="x"!==_?f(o.y):1,e.signX||(e.signX=c),e.signY||(e.signY=u),d.lockScalingFlip&&(e.signX!==c||e.signY!==u))return!1;if(h=d._getTransformedDimensions(),b&&!_){var S=Math.abs(o.x)+Math.abs(o.y),T=e.original,O=S/(Math.abs(h.x*T.scaleX/d.scaleX)+Math.abs(h.y*T.scaleY/d.scaleY));s=T.scaleX*O,a=T.scaleY*O}else s=Math.abs(o.x*d.scaleX/h.x),a=Math.abs(o.y*d.scaleY/h.y);v(e)&&(s*=2,a*=2),e.signX!==c&&"y"!==_&&(e.originX=l[e.originX],s*=-1,e.signX=c),e.signY!==u&&"x"!==_&&(e.originY=l[e.originY],a*=-1,e.signY=u)}var P=d.scaleX,E=d.scaleY;return _?("x"===_&&d.set("scaleX",s),"y"===_&&d.set("scaleY",a)):(!g&&d.set("scaleX",s),!y&&d.set("scaleY",a)),P!==d.scaleX||E!==d.scaleY}n.scaleCursorStyleHandler=function(t,e,r){var n=p(t,r),o="";if(0!==e.x&&0===e.y?o="x":0===e.x&&0!==e.y&&(o="y"),m(r,o,n))return"not-allowed";var s=d(r,e);return i[s]+"-resize"},n.skewCursorStyleHandler=function(t,e,i){var n="not-allowed";if(0!==e.x&&i.lockSkewingY)return n;if(0!==e.y&&i.lockSkewingX)return n;var o=d(i,e)%4;return r[o]+"-resize"},n.scaleSkewCursorStyleHandler=function(t,e,i){return t[i.canvas.altActionKey]?n.skewCursorStyleHandler(t,e,i):n.scaleCursorStyleHandler(t,e,i)},n.rotationWithSnapping=b("rotating",_((function(t,e,i,r){var n=e,o=n.target,s=o.translateToOriginPoint(o.getCenterPoint(),n.originX,n.originY);if(o.lockRotation)return!1;var a,h=Math.atan2(n.ey-s.y,n.ex-s.x),c=Math.atan2(r-s.y,i-s.x),l=u(c-h+n.theta);if(o.snapAngle>0){var f=o.snapAngle,d=o.snapThreshold||f,g=Math.ceil(l/f)*f,p=Math.floor(l/f)*f;Math.abs(l-p)0?o:a:(l>0&&(n=u===s?o:a),l<0&&(n=u===s?a:o),C(h)&&(n=n===o?a:o)),e.originX=n,b("skewing",_(S))(t,e,i,r))},n.skewHandlerY=function(t,e,i,r){var n,a=e.target,l=a.skewY,u=e.originX;return!a.lockSkewingY&&(0===l?n=x(e,c,c,i,r).y>0?s:h:(l>0&&(n=u===o?s:h),l<0&&(n=u===o?h:s),C(a)&&(n=n===s?h:s)),e.originY=n,b("skewing",_(T))(t,e,i,r))},n.dragHandler=function(t,e,i,r){var n=e.target,o=i-e.offsetX,s=r-e.offsetY,a=!n.get("lockMovementX")&&n.left!==o,h=!n.get("lockMovementY")&&n.top!==s;return a&&n.set("left",o),h&&n.set("top",s),(a||h)&&g("moving",y(t,e,i,r)),a||h},n.scaleOrSkewActionName=function(t,e,i){var r=t[i.canvas.altActionKey];return 0===e.x?r?"skewX":"scaleY":0===e.y?r?"skewY":"scaleX":void 0},n.rotationStyleHandler=function(t,e,i){return i.lockRotation?"not-allowed":e.cursorStyle},n.fireEvent=g,n.wrapWithFixedAnchor=_,n.wrapWithFireEvent=b,n.getLocalPoint=x,e.controlsUtils=n}(t),function(t){var e=t.fabric||(t.fabric={}),i=e.util.degreesToRadians,r=e.controlsUtils;r.renderCircleControl=function(t,e,i,r,n){r=r||{};var o,s=this.sizeX||r.cornerSize||n.cornerSize,a=this.sizeY||r.cornerSize||n.cornerSize,h=void 0!==r.transparentCorners?r.transparentCorners:n.transparentCorners,c=h?"stroke":"fill",l=!h&&(r.cornerStrokeColor||n.cornerStrokeColor),u=e,f=i;t.save(),t.fillStyle=r.cornerColor||n.cornerColor,t.strokeStyle=r.cornerStrokeColor||n.cornerStrokeColor,s>a?(o=s,t.scale(1,a/s),f=i*s/a):a>s?(o=a,t.scale(s/a,1),u=e*a/s):o=s,t.lineWidth=1,t.beginPath(),t.arc(u,f,o/2,0,2*Math.PI,!1),t[c](),l&&t.stroke(),t.restore()},r.renderSquareControl=function(t,e,r,n,o){n=n||{};var s=this.sizeX||n.cornerSize||o.cornerSize,a=this.sizeY||n.cornerSize||o.cornerSize,h=void 0!==n.transparentCorners?n.transparentCorners:o.transparentCorners,c=h?"stroke":"fill",l=!h&&(n.cornerStrokeColor||o.cornerStrokeColor),u=s/2,f=a/2;t.save(),t.fillStyle=n.cornerColor||o.cornerColor,t.strokeStyle=n.cornerStrokeColor||o.cornerStrokeColor,t.lineWidth=1,t.translate(e,r),t.rotate(i(o.angle)),t[c+"Rect"](-u,-f,s,a),l&&t.strokeRect(-u,-f,s,a),t.restore()}}(t),function(t){var e=t.fabric||(t.fabric={});e.Control=function(t){for(var e in t)this[e]=t[e]},e.Control.prototype={visible:!0,actionName:"scale",angle:0,x:0,y:0,offsetX:0,offsetY:0,sizeX:null,sizeY:null,touchSizeX:null,touchSizeY:null,cursorStyle:"crosshair",withConnection:!1,actionHandler:function(){},mouseDownHandler:function(){},mouseUpHandler:function(){},getActionHandler:function(){return this.actionHandler},getMouseDownHandler:function(){return this.mouseDownHandler},getMouseUpHandler:function(){return this.mouseUpHandler},cursorStyleHandler:function(t,e){return e.cursorStyle},getActionName:function(t,e){return e.actionName},getVisibility:function(t,e){var i=t._controlsVisibility;return i&&void 0!==i[e]?i[e]:this.visible},setVisibility:function(t){this.visible=t},positionHandler:function(t,i){return e.util.transformPoint({x:this.x*t.x+this.offsetX,y:this.y*t.y+this.offsetY},i)},calcCornerCoords:function(t,i,r,n,o){var s,a,h,c,l=o?this.touchSizeX:this.sizeX,u=o?this.touchSizeY:this.sizeY;if(l&&u&&l!==u){var f=Math.atan2(u,l),d=Math.sqrt(l*l+u*u)/2,g=f-e.util.degreesToRadians(t),p=Math.PI/2-f-e.util.degreesToRadians(t);s=d*e.util.cos(g),a=d*e.util.sin(g),h=d*e.util.cos(p),c=d*e.util.sin(p)}else{d=.7071067812*(l&&u?l:i);g=e.util.degreesToRadians(45-t);s=h=d*e.util.cos(g),a=c=d*e.util.sin(g)}return{tl:{x:r-c,y:n-h},tr:{x:r+s,y:n-a},bl:{x:r-s,y:n+a},br:{x:r+c,y:n+h}}},render:function(t,i,r,n,o){if("circle"===((n=n||{}).cornerStyle||o.cornerStyle))e.controlsUtils.renderCircleControl.call(this,t,i,r,n,o);else e.controlsUtils.renderSquareControl.call(this,t,i,r,n,o)}}}(t),function(){function t(t,e){var i,r,n,o,s=t.getAttribute("style"),a=t.getAttribute("offset")||0;if(a=(a=parseFloat(a)/(/%$/.test(a)?100:1))<0?0:a>1?1:a,s){var h=s.split(/\s*;\s*/);for(""===h[h.length-1]&&h.pop(),o=h.length;o--;){var c=h[o].split(/\s*:\s*/),l=c[0].trim(),u=c[1].trim();"stop-color"===l?i=u:"stop-opacity"===l&&(n=u)}}return i||(i=t.getAttribute("stop-color")||"rgb(0,0,0)"),n||(n=t.getAttribute("stop-opacity")),r=(i=new T.Color(i)).getAlpha(),n=isNaN(parseFloat(n))?1:parseFloat(n),n*=r*e,{offset:a,color:i.toRgb(),opacity:n}}var e=T.util.object.clone;T.Gradient=T.util.createClass({offsetX:0,offsetY:0,gradientTransform:null,gradientUnits:"pixels",type:"linear",initialize:function(t){t||(t={}),t.coords||(t.coords={});var e,i=this;Object.keys(t).forEach((function(e){i[e]=t[e]})),this.id?this.id+="_"+T.Object.__uid++:this.id=T.Object.__uid++,e={x1:t.coords.x1||0,y1:t.coords.y1||0,x2:t.coords.x2||0,y2:t.coords.y2||0},"radial"===this.type&&(e.r1=t.coords.r1||0,e.r2=t.coords.r2||0),this.coords=e,this.colorStops=t.colorStops.slice()},addColorStop:function(t){for(var e in t){var i=new T.Color(t[e]);this.colorStops.push({offset:parseFloat(e),color:i.toRgb(),opacity:i.getAlpha()})}return this},toObject:function(t){var e={type:this.type,coords:this.coords,colorStops:this.colorStops,offsetX:this.offsetX,offsetY:this.offsetY,gradientUnits:this.gradientUnits,gradientTransform:this.gradientTransform?this.gradientTransform.concat():this.gradientTransform};return T.util.populateWithProperties(this,e,t),e},toSVG:function(t,i){var r,n,o,s,a=e(this.coords,!0),h=(i=i||{},e(this.colorStops,!0)),c=a.r1>a.r2,l=this.gradientTransform?this.gradientTransform.concat():T.iMatrix.concat(),u=-this.offsetX,f=-this.offsetY,d=!!i.additionalTransform,g="pixels"===this.gradientUnits?"userSpaceOnUse":"objectBoundingBox";if(h.sort((function(t,e){return t.offset-e.offset})),"objectBoundingBox"===g?(u/=t.width,f/=t.height):(u+=t.width/2,f+=t.height/2),"path"===t.type&&"percentage"!==this.gradientUnits&&(u-=t.pathOffset.x,f-=t.pathOffset.y),l[4]-=u,l[5]-=f,s='id="SVGID_'+this.id+'" gradientUnits="'+g+'"',s+=' gradientTransform="'+(d?i.additionalTransform+" ":"")+T.util.matrixToSVG(l)+'" ',"linear"===this.type?o=["\n']:"radial"===this.type&&(o=["\n']),"radial"===this.type){if(c)for((h=h.concat()).reverse(),r=0,n=h.length;r0){var v=p/Math.max(a.r1,a.r2);for(r=0,n=h.length;r\n')}return o.push("linear"===this.type?"\n":"\n"),o.join("")},toLive:function(t){var e,i,r,n=T.util.object.clone(this.coords);if(this.type){for("linear"===this.type?e=t.createLinearGradient(n.x1,n.y1,n.x2,n.y2):"radial"===this.type&&(e=t.createRadialGradient(n.x1,n.y1,n.r1,n.x2,n.y2,n.r2)),i=0,r=this.colorStops.length;i1?1:o,isNaN(o)&&(o=1);var s,a,h,c,l=e.getElementsByTagName("stop"),u="userSpaceOnUse"===e.getAttribute("gradientUnits")?"pixels":"percentage",f=e.getAttribute("gradientTransform")||"",d=[],g=0,p=0;for("linearGradient"===e.nodeName||"LINEARGRADIENT"===e.nodeName?(s="linear",a=function(t){return{x1:t.getAttribute("x1")||0,y1:t.getAttribute("y1")||0,x2:t.getAttribute("x2")||"100%",y2:t.getAttribute("y2")||0}}(e)):(s="radial",a=function(t){return{x1:t.getAttribute("fx")||t.getAttribute("cx")||"50%",y1:t.getAttribute("fy")||t.getAttribute("cy")||"50%",r1:0,x2:t.getAttribute("cx")||"50%",y2:t.getAttribute("cy")||"50%",r2:t.getAttribute("r")||"50%"}}(e)),h=l.length;h--;)d.push(t(l[h],o));return c=T.parseTransformAttribute(f),function(t,e,i,r){var n,o;Object.keys(e).forEach((function(t){"Infinity"===(n=e[t])?o=1:"-Infinity"===n?o=0:(o=parseFloat(e[t],10),"string"==typeof n&&/^(\d+\.\d+)%|(\d+)%$/.test(n)&&(o*=.01,"pixels"===r&&("x1"!==t&&"x2"!==t&&"r2"!==t||(o*=i.viewBoxWidth||i.width),"y1"!==t&&"y2"!==t||(o*=i.viewBoxHeight||i.height)))),e[t]=o}))}(0,a,n,u),"pixels"===u&&(g=-i.left,p=-i.top),new T.Gradient({id:e.getAttribute("id"),type:s,coords:a,colorStops:d,gradientUnits:u,gradientTransform:c,offsetX:g,offsetY:p})}})}(),_=T.util.toFixed,T.Pattern=T.util.createClass({repeat:"repeat",offsetX:0,offsetY:0,crossOrigin:"",patternTransform:null,initialize:function(t,e){if(t||(t={}),this.id=T.Object.__uid++,this.setOptions(t),!t.source||t.source&&"string"!=typeof t.source)e&&e(this);else{var i=this;this.source=T.util.createImage(),T.util.loadImage(t.source,(function(t,r){i.source=t,e&&e(i,r)}),null,this.crossOrigin)}},toObject:function(t){var e,i,r=T.Object.NUM_FRACTION_DIGITS;return"string"==typeof this.source.src?e=this.source.src:"object"==typeof this.source&&this.source.toDataURL&&(e=this.source.toDataURL()),i={type:"pattern",source:e,repeat:this.repeat,crossOrigin:this.crossOrigin,offsetX:_(this.offsetX,r),offsetY:_(this.offsetY,r),patternTransform:this.patternTransform?this.patternTransform.concat():null},T.util.populateWithProperties(this,i,t),i},toSVG:function(t){var e="function"==typeof this.source?this.source():this.source,i=e.width/t.width,r=e.height/t.height,n=this.offsetX/t.width,o=this.offsetY/t.height,s="";return"repeat-x"!==this.repeat&&"no-repeat"!==this.repeat||(r=1,o&&(r+=Math.abs(o))),"repeat-y"!==this.repeat&&"no-repeat"!==this.repeat||(i=1,n&&(i+=Math.abs(n))),e.src?s=e.src:e.toDataURL&&(s=e.toDataURL()),'\n\n\n'},setOptions:function(t){for(var e in t)this[e]=t[e]},toLive:function(t){var e=this.source;if(!e)return"";if(void 0!==e.src){if(!e.complete)return"";if(0===e.naturalWidth||0===e.naturalHeight)return""}return t.createPattern(e,this.repeat)}}),function(t){var e=t.fabric||(t.fabric={}),i=e.util.toFixed;e.Shadow?e.warn("fabric.Shadow is already defined."):(e.Shadow=e.util.createClass({color:"rgb(0,0,0)",blur:0,offsetX:0,offsetY:0,affectStroke:!1,includeDefaultValues:!0,nonScaling:!1,initialize:function(t){for(var i in"string"==typeof t&&(t=this._parseShadow(t)),t)this[i]=t[i];this.id=e.Object.__uid++},_parseShadow:function(t){var i=t.trim(),r=e.Shadow.reOffsetsAndBlur.exec(i)||[];return{color:(i.replace(e.Shadow.reOffsetsAndBlur,"")||"rgb(0,0,0)").trim(),offsetX:parseFloat(r[1],10)||0,offsetY:parseFloat(r[2],10)||0,blur:parseFloat(r[3],10)||0}},toString:function(){return[this.offsetX,this.offsetY,this.blur,this.color].join("px ")},toSVG:function(t){var r=40,n=40,o=e.Object.NUM_FRACTION_DIGITS,s=e.util.rotateVector({x:this.offsetX,y:this.offsetY},e.util.degreesToRadians(-t.angle)),a=new e.Color(this.color);return t.width&&t.height&&(r=100*i((Math.abs(s.x)+this.blur)/t.width,o)+20,n=100*i((Math.abs(s.y)+this.blur)/t.height,o)+20),t.flipX&&(s.x*=-1),t.flipY&&(s.y*=-1),'\n\t\n\t\n\t\n\t\n\t\n\t\t\n\t\t\n\t\n\n'},toObject:function(){if(this.includeDefaultValues)return{color:this.color,blur:this.blur,offsetX:this.offsetX,offsetY:this.offsetY,affectStroke:this.affectStroke,nonScaling:this.nonScaling};var t={},i=e.Shadow.prototype;return["color","blur","offsetX","offsetY","affectStroke","nonScaling"].forEach((function(e){this[e]!==i[e]&&(t[e]=this[e])}),this),t}}),e.Shadow.reOffsetsAndBlur=/(?:\s|^)(-?\d+(?:\.\d*)?(?:px)?(?:\s?|$))?(-?\d+(?:\.\d*)?(?:px)?(?:\s?|$))?(\d+(?:\.\d*)?(?:px)?)?(?:\s?|$)(?:$|\s)/)}(t),function(){if(T.StaticCanvas)T.warn("fabric.StaticCanvas is already defined.");else{var t=T.util.object.extend,e=T.util.getElementOffset,i=T.util.removeFromArray,r=T.util.toFixed,n=T.util.transformPoint,o=T.util.invertTransform,s=T.util.getNodeCanvas,a=T.util.createCanvasElement,h=new Error("Could not initialize `canvas` element");T.StaticCanvas=T.util.createClass(T.CommonMethods,{initialize:function(t,e){e||(e={}),this.renderAndResetBound=this.renderAndReset.bind(this),this.requestRenderAllBound=this.requestRenderAll.bind(this),this._initStatic(t,e)},backgroundColor:"",backgroundImage:null,overlayColor:"",overlayImage:null,includeDefaultValues:!0,stateful:!1,renderOnAddRemove:!0,controlsAboveOverlay:!1,allowTouchScrolling:!1,imageSmoothingEnabled:!0,viewportTransform:T.iMatrix.concat(),backgroundVpt:!0,overlayVpt:!0,enableRetinaScaling:!0,vptCoords:{},skipOffscreen:!0,clipPath:void 0,_initStatic:function(t,e){var i=this.requestRenderAllBound;this._objects=[],this._createLowerCanvas(t),this._initOptions(e),this.interactive||this._initRetinaScaling(),e.overlayImage&&this.setOverlayImage(e.overlayImage,i),e.backgroundImage&&this.setBackgroundImage(e.backgroundImage,i),e.backgroundColor&&this.setBackgroundColor(e.backgroundColor,i),e.overlayColor&&this.setOverlayColor(e.overlayColor,i),this.calcOffset()},_isRetinaScaling:function(){return 1!==T.devicePixelRatio&&this.enableRetinaScaling},getRetinaScaling:function(){return this._isRetinaScaling()?T.devicePixelRatio:1},_initRetinaScaling:function(){if(this._isRetinaScaling()){var t=T.devicePixelRatio;this.__initRetinaScaling(t,this.lowerCanvasEl,this.contextContainer),this.upperCanvasEl&&this.__initRetinaScaling(t,this.upperCanvasEl,this.contextTop)}},__initRetinaScaling:function(t,e,i){e.setAttribute("width",this.width*t),e.setAttribute("height",this.height*t),i.scale(t,t)},calcOffset:function(){return this._offset=e(this.lowerCanvasEl),this},setOverlayImage:function(t,e,i){return this.__setBgOverlayImage("overlayImage",t,e,i)},setBackgroundImage:function(t,e,i){return this.__setBgOverlayImage("backgroundImage",t,e,i)},setOverlayColor:function(t,e){return this.__setBgOverlayColor("overlayColor",t,e)},setBackgroundColor:function(t,e){return this.__setBgOverlayColor("backgroundColor",t,e)},__setBgOverlayImage:function(t,e,i,r){return"string"==typeof e?T.util.loadImage(e,(function(e,n){if(e){var o=new T.Image(e,r);this[t]=o,o.canvas=this}i&&i(e,n)}),this,r&&r.crossOrigin):(r&&e.setOptions(r),this[t]=e,e&&(e.canvas=this),i&&i(e,!1)),this},__setBgOverlayColor:function(t,e,i){return this[t]=e,this._initGradient(e,t),this._initPattern(e,t,i),this},_createCanvasElement:function(){var t=a();if(!t)throw h;if(t.style||(t.style={}),void 0===t.getContext)throw h;return t},_initOptions:function(t){var e=this.lowerCanvasEl;this._setOptions(t),this.width=this.width||parseInt(e.width,10)||0,this.height=this.height||parseInt(e.height,10)||0,this.lowerCanvasEl.style&&(e.width=this.width,e.height=this.height,e.style.width=this.width+"px",e.style.height=this.height+"px",this.viewportTransform=this.viewportTransform.slice())},_createLowerCanvas:function(t){t&&t.getContext?this.lowerCanvasEl=t:this.lowerCanvasEl=T.util.getById(t)||this._createCanvasElement(),T.util.addClass(this.lowerCanvasEl,"lower-canvas"),this._originalCanvasStyle=this.lowerCanvasEl.style,this.interactive&&this._applyCanvasStyle(this.lowerCanvasEl),this.contextContainer=this.lowerCanvasEl.getContext("2d")},getWidth:function(){return this.width},getHeight:function(){return this.height},setWidth:function(t,e){return this.setDimensions({width:t},e)},setHeight:function(t,e){return this.setDimensions({height:t},e)},setDimensions:function(t,e){var i;for(var r in e=e||{},t)i=t[r],e.cssOnly||(this._setBackstoreDimension(r,t[r]),i+="px",this.hasLostContext=!0),e.backstoreOnly||this._setCssDimension(r,i);return this._isCurrentlyDrawing&&this.freeDrawingBrush&&this.freeDrawingBrush._setBrushStyles(),this._initRetinaScaling(),this.calcOffset(),e.cssOnly||this.requestRenderAll(),this},_setBackstoreDimension:function(t,e){return this.lowerCanvasEl[t]=e,this.upperCanvasEl&&(this.upperCanvasEl[t]=e),this.cacheCanvasEl&&(this.cacheCanvasEl[t]=e),this[t]=e,this},_setCssDimension:function(t,e){return this.lowerCanvasEl.style[t]=e,this.upperCanvasEl&&(this.upperCanvasEl.style[t]=e),this.wrapperEl&&(this.wrapperEl.style[t]=e),this},getZoom:function(){return this.viewportTransform[0]},setViewportTransform:function(t){var e,i,r,n=this._activeObject,o=this.backgroundImage,s=this.overlayImage;for(this.viewportTransform=t,i=0,r=this._objects.length;i\n'),this._setSVGBgOverlayColor(i,"background"),this._setSVGBgOverlayImage(i,"backgroundImage",e),this._setSVGObjects(i,e),this.clipPath&&i.push("\n"),this._setSVGBgOverlayColor(i,"overlay"),this._setSVGBgOverlayImage(i,"overlayImage",e),i.push(""),i.join("")},_setSVGPreamble:function(t,e){e.suppressPreamble||t.push('\n','\n')},_setSVGHeader:function(t,e){var i,n=e.width||this.width,o=e.height||this.height,s='viewBox="0 0 '+this.width+" "+this.height+'" ',a=T.Object.NUM_FRACTION_DIGITS;e.viewBox?s='viewBox="'+e.viewBox.x+" "+e.viewBox.y+" "+e.viewBox.width+" "+e.viewBox.height+'" ':this.svgViewportTransformation&&(i=this.viewportTransform,s='viewBox="'+r(-i[4]/i[0],a)+" "+r(-i[5]/i[3],a)+" "+r(this.width/i[0],a)+" "+r(this.height/i[3],a)+'" '),t.push("\n',"Created with Fabric.js ",T.version,"\n","\n",this.createSVGFontFacesMarkup(),this.createSVGRefElementsMarkup(),this.createSVGClipPathMarkup(e),"\n")},createSVGClipPathMarkup:function(t){var e=this.clipPath;return e?(e.clipPathId="CLIPPATH_"+T.Object.__uid++,'\n'+this.clipPath.toClipPathSVG(t.reviver)+"\n"):""},createSVGRefElementsMarkup:function(){var t=this;return["background","overlay"].map((function(e){var i=t[e+"Color"];if(i&&i.toLive){var r=t[e+"Vpt"],n=t.viewportTransform,o={width:t.width/(r?n[0]:1),height:t.height/(r?n[3]:1)};return i.toSVG(o,{additionalTransform:r?T.util.matrixToSVG(n):""})}})).join("")},createSVGFontFacesMarkup:function(){var t,e,i,r,n,o,s,a,h="",c={},l=T.fontPaths,u=[];for(this._objects.forEach((function t(e){u.push(e),e._objects&&e._objects.forEach(t)})),s=0,a=u.length;s',"\n",h,"","\n"].join("")),h},_setSVGObjects:function(t,e){var i,r,n,o=this._objects;for(r=0,n=o.length;r\n")}else t.push('\n")},sendToBack:function(t){if(!t)return this;var e,r,n,o=this._activeObject;if(t===o&&"activeSelection"===t.type)for(e=(n=o._objects).length;e--;)r=n[e],i(this._objects,r),this._objects.unshift(r);else i(this._objects,t),this._objects.unshift(t);return this.renderOnAddRemove&&this.requestRenderAll(),this},bringToFront:function(t){if(!t)return this;var e,r,n,o=this._activeObject;if(t===o&&"activeSelection"===t.type)for(n=o._objects,e=0;e0+c&&(s=o-1,i(this._objects,n),this._objects.splice(s,0,n)),c++;else 0!==(o=this._objects.indexOf(t))&&(s=this._findNewLowerIndex(t,o,e),i(this._objects,t),this._objects.splice(s,0,t));return this.renderOnAddRemove&&this.requestRenderAll(),this},_findNewLowerIndex:function(t,e,i){var r,n;if(i)for(r=e,n=e-1;n>=0;--n){if(t.intersectsWithObject(this._objects[n])||t.isContainedWithinObject(this._objects[n])||this._objects[n].isContainedWithinObject(t)){r=n;break}}else r=e-1;return r},bringForward:function(t,e){if(!t)return this;var r,n,o,s,a,h=this._activeObject,c=0;if(t===h&&"activeSelection"===t.type)for(r=(a=h._objects).length;r--;)n=a[r],(o=this._objects.indexOf(n))"}}),t(T.StaticCanvas.prototype,T.Observable),t(T.StaticCanvas.prototype,T.Collection),t(T.StaticCanvas.prototype,T.DataURLExporter),t(T.StaticCanvas,{EMPTY_JSON:'{"objects": [], "background": "white"}',supports:function(t){var e=a();if(!e||!e.getContext)return null;var i=e.getContext("2d");return i&&"setLineDash"===t?void 0!==i.setLineDash:null}}),T.StaticCanvas.prototype.toJSON=T.StaticCanvas.prototype.toObject,T.isLikelyNode&&(T.StaticCanvas.prototype.createPNGStream=function(){var t=s(this.lowerCanvasEl);return t&&t.createPNGStream()},T.StaticCanvas.prototype.createJPEGStream=function(t){var e=s(this.lowerCanvasEl);return e&&e.createJPEGStream(t)})}}(),T.BaseBrush=T.util.createClass({color:"rgb(0, 0, 0)",width:1,shadow:null,strokeLineCap:"round",strokeLineJoin:"round",strokeMiterLimit:10,strokeDashArray:null,limitedToCanvasSize:!1,_setBrushStyles:function(){var t=this.canvas.contextTop;t.strokeStyle=this.color,t.lineWidth=this.width,t.lineCap=this.strokeLineCap,t.miterLimit=this.strokeMiterLimit,t.lineJoin=this.strokeLineJoin,t.setLineDash(this.strokeDashArray||[])},_saveAndTransform:function(t){var e=this.canvas.viewportTransform;t.save(),t.transform(e[0],e[1],e[2],e[3],e[4],e[5])},_setShadow:function(){if(this.shadow){var t=this.canvas,e=this.shadow,i=t.contextTop,r=t.getZoom();t&&t._isRetinaScaling()&&(r*=T.devicePixelRatio),i.shadowColor=e.color,i.shadowBlur=e.blur*r,i.shadowOffsetX=e.offsetX*r,i.shadowOffsetY=e.offsetY*r}},needsFullRender:function(){return new T.Color(this.color).getAlpha()<1||!!this.shadow},_resetShadow:function(){var t=this.canvas.contextTop;t.shadowColor="",t.shadowBlur=t.shadowOffsetX=t.shadowOffsetY=0},_isOutSideCanvas:function(t){return t.x<0||t.x>this.canvas.getWidth()||t.y<0||t.y>this.canvas.getHeight()}}),T.PencilBrush=T.util.createClass(T.BaseBrush,{decimate:.4,initialize:function(t){this.canvas=t,this._points=[]},_drawSegment:function(t,e,i){var r=e.midPointFrom(i);return t.quadraticCurveTo(e.x,e.y,r.x,r.y),r},onMouseDown:function(t,e){this.canvas._isMainEvent(e.e)&&(this._prepareForDrawing(t),this._captureDrawingPath(t),this._render())},onMouseMove:function(t,e){if(this.canvas._isMainEvent(e.e)&&(!0!==this.limitedToCanvasSize||!this._isOutSideCanvas(t))&&this._captureDrawingPath(t)&&this._points.length>1)if(this.needsFullRender())this.canvas.clearContext(this.canvas.contextTop),this._render();else{var i=this._points,r=i.length,n=this.canvas.contextTop;this._saveAndTransform(n),this.oldEnd&&(n.beginPath(),n.moveTo(this.oldEnd.x,this.oldEnd.y)),this.oldEnd=this._drawSegment(n,i[r-2],i[r-1],!0),n.stroke(),n.restore()}},onMouseUp:function(t){return!this.canvas._isMainEvent(t.e)||(this.oldEnd=void 0,this._finalizeAndAddPath(),!1)},_prepareForDrawing:function(t){var e=new T.Point(t.x,t.y);this._reset(),this._addPoint(e),this.canvas.contextTop.moveTo(e.x,e.y)},_addPoint:function(t){return!(this._points.length>1&&t.eq(this._points[this._points.length-1])||(this._points.push(t),0))},_reset:function(){this._points=[],this._setBrushStyles(),this._setShadow()},_captureDrawingPath:function(t){var e=new T.Point(t.x,t.y);return this._addPoint(e)},_render:function(){var t,e,i=this.canvas.contextTop,r=this._points[0],n=this._points[1];if(this._saveAndTransform(i),i.beginPath(),2===this._points.length&&r.x===n.x&&r.y===n.y){var o=this.width/1e3;r=new T.Point(r.x,r.y),n=new T.Point(n.x,n.y),r.x-=o,n.x+=o}for(i.moveTo(r.x,r.y),t=1,e=this._points.length;t=n&&(s=t[i],a.push(s));return a.push(t[o]),a},_finalizeAndAddPath:function(){this.canvas.contextTop.closePath(),this.decimate&&(this._points=this.decimatePoints(this._points,this.decimate));var t=this.convertPointsToSVGPath(this._points);if(this._isEmptySVGPath(t))this.canvas.requestRenderAll();else{var e=this.createPath(t);this.canvas.clearContext(this.canvas.contextTop),this.canvas.fire("before:path:created",{path:e}),this.canvas.add(e),this.canvas.requestRenderAll(),e.setCoords(),this._resetShadow(),this.canvas.fire("path:created",{path:e})}}}),T.CircleBrush=T.util.createClass(T.BaseBrush,{width:10,initialize:function(t){this.canvas=t,this.points=[]},drawDot:function(t){var e=this.addPoint(t),i=this.canvas.contextTop;this._saveAndTransform(i),this.dot(i,e),i.restore()},dot:function(t,e){t.fillStyle=e.fill,t.beginPath(),t.arc(e.x,e.y,e.radius,0,2*Math.PI,!1),t.closePath(),t.fill()},onMouseDown:function(t){this.points.length=0,this.canvas.clearContext(this.canvas.contextTop),this._setShadow(),this.drawDot(t)},_render:function(){var t,e,i=this.canvas.contextTop,r=this.points;for(this._saveAndTransform(i),t=0,e=r.length;t0&&!this.preserveObjectStacking){e=[],i=[];for(var n=0,o=this._objects.length;n1&&(this._activeObject._objects=i),e.push.apply(e,i)}else e=this._objects;return e},renderAll:function(){!this.contextTopDirty||this._groupSelector||this.isDrawingMode||(this.clearContext(this.contextTop),this.contextTopDirty=!1),this.hasLostContext&&this.renderTopLayer(this.contextTop);var t=this.contextContainer;return this.renderCanvas(t,this._chooseObjectsToRender()),this},renderTopLayer:function(t){t.save(),this.isDrawingMode&&this._isCurrentlyDrawing&&(this.freeDrawingBrush&&this.freeDrawingBrush._render(),this.contextTopDirty=!0),this.selection&&this._groupSelector&&(this._drawSelection(t),this.contextTopDirty=!0),t.restore()},renderTop:function(){var t=this.contextTop;return this.clearContext(t),this.renderTopLayer(t),this.fire("after:render"),this},_normalizePointer:function(t,e){var i=t.calcTransformMatrix(),r=T.util.invertTransform(i),n=this.restorePointerVpt(e);return T.util.transformPoint(n,r)},isTargetTransparent:function(t,e,i){if(t.shouldCache()&&t._cacheCanvas&&t!==this._activeObject){var r=this._normalizePointer(t,{x:e,y:i}),n=Math.max(t.cacheTranslationX+r.x*t.zoomX,0),o=Math.max(t.cacheTranslationY+r.y*t.zoomY,0);return T.util.isTransparent(t._cacheContext,Math.round(n),Math.round(o),this.targetFindTolerance)}var s=this.contextCache,a=t.selectionBackgroundColor,h=this.viewportTransform;return t.selectionBackgroundColor="",this.clearContext(s),s.save(),s.transform(h[0],h[1],h[2],h[3],h[4],h[5]),t.render(s),s.restore(),t.selectionBackgroundColor=a,T.util.isTransparent(s,e,i,this.targetFindTolerance)},_isSelectionKeyPressed:function(t){return"[object Array]"===Object.prototype.toString.call(this.selectionKey)?!!this.selectionKey.find((function(e){return!0===t[e]})):t[this.selectionKey]},_shouldClearSelection:function(t,e){var i=this.getActiveObjects(),r=this._activeObject;return!e||e&&r&&i.length>1&&-1===i.indexOf(e)&&r!==e&&!this._isSelectionKeyPressed(t)||e&&!e.evented||e&&!e.selectable&&r&&r!==e},_shouldCenterTransform:function(t,e,i){var r;if(t)return"scale"===e||"scaleX"===e||"scaleY"===e||"resizing"===e?r=this.centeredScaling||t.centeredScaling:"rotate"===e&&(r=this.centeredRotation||t.centeredRotation),r?!i:i},_getOriginFromCorner:function(t,e){var i={x:t.originX,y:t.originY};return"ml"===e||"tl"===e||"bl"===e?i.x="right":"mr"!==e&&"tr"!==e&&"br"!==e||(i.x="left"),"tl"===e||"mt"===e||"tr"===e?i.y="bottom":"bl"!==e&&"mb"!==e&&"br"!==e||(i.y="top"),i},_getActionFromCorner:function(t,e,i,r){if(!e||!t)return"drag";var n=r.controls[e];return n.getActionName(i,n,r)},_setupCurrentTransform:function(t,i,r){if(i){var n=this.getPointer(t),o=i.__corner,s=i.controls[o],a=r&&o?s.getActionHandler(t,i,s):T.controlsUtils.dragHandler,h=this._getActionFromCorner(r,o,t,i),c=this._getOriginFromCorner(i,o),l=t[this.centeredKey],u={target:i,action:h,actionHandler:a,corner:o,scaleX:i.scaleX,scaleY:i.scaleY,skewX:i.skewX,skewY:i.skewY,offsetX:n.x-i.left,offsetY:n.y-i.top,originX:c.x,originY:c.y,ex:n.x,ey:n.y,lastX:n.x,lastY:n.y,theta:e(i.angle),width:i.width*i.scaleX,shiftKey:t.shiftKey,altKey:l,original:T.util.saveObjectTransform(i)};this._shouldCenterTransform(i,h,l)&&(u.originX="center",u.originY="center"),u.original.originX=c.x,u.original.originY=c.y,this._currentTransform=u,this._beforeTransform(t)}},setCursor:function(t){this.upperCanvasEl.style.cursor=t},_drawSelection:function(t){var e=this._groupSelector,i=new T.Point(e.ex,e.ey),r=T.util.transformPoint(i,this.viewportTransform),n=new T.Point(e.ex+e.left,e.ey+e.top),o=T.util.transformPoint(n,this.viewportTransform),s=Math.min(r.x,o.x),a=Math.min(r.y,o.y),h=Math.max(r.x,o.x),c=Math.max(r.y,o.y),l=this.selectionLineWidth/2;this.selectionColor&&(t.fillStyle=this.selectionColor,t.fillRect(s,a,h-s,c-a)),this.selectionLineWidth&&this.selectionBorderColor&&(t.lineWidth=this.selectionLineWidth,t.strokeStyle=this.selectionBorderColor,s+=l,a+=l,h-=l,c-=l,T.Object.prototype._setLineDash.call(this,t,this.selectionDashArray),t.strokeRect(s,a,h-s,c-a))},findTarget:function(t,e){if(!this.skipTargetFind){var r,n,o=this.getPointer(t,!0),s=this._activeObject,a=this.getActiveObjects(),h=i(t),c=a.length>1&&!e||1===a.length;if(this.targets=[],c&&s._findTargetCorner(o,h))return s;if(a.length>1&&!e&&s===this._searchPossibleTargets([s],o))return s;if(1===a.length&&s===this._searchPossibleTargets([s],o)){if(!this.preserveObjectStacking)return s;r=s,n=this.targets,this.targets=[]}var l=this._searchPossibleTargets(this._objects,o);return t[this.altSelectionKey]&&l&&r&&l!==r&&(l=r,this.targets=n),l}},_checkTarget:function(t,e,i){if(e&&e.visible&&e.evented&&e.containsPoint(t)){if(!this.perPixelTargetFind&&!e.perPixelTargetFind||e.isEditing)return!0;if(!this.isTargetTransparent(e,i.x,i.y))return!0}},_searchPossibleTargets:function(t,e){for(var i,r,n=t.length;n--;){var o=t[n],s=o.group?this._normalizePointer(o.group,e):e;if(this._checkTarget(s,o,e)){(i=t[n]).subTargetCheck&&i instanceof T.Group&&(r=this._searchPossibleTargets(i._objects,e))&&this.targets.push(r);break}}return i},restorePointerVpt:function(t){return T.util.transformPoint(t,T.util.invertTransform(this.viewportTransform))},getPointer:function(e,i){if(this._absolutePointer&&!i)return this._absolutePointer;if(this._pointer&&i)return this._pointer;var r,n=t(e),o=this.upperCanvasEl,s=o.getBoundingClientRect(),a=s.width||0,h=s.height||0;a&&h||("top"in s&&"bottom"in s&&(h=Math.abs(s.top-s.bottom)),"right"in s&&"left"in s&&(a=Math.abs(s.right-s.left))),this.calcOffset(),n.x=n.x-this._offset.left,n.y=n.y-this._offset.top,i||(n=this.restorePointerVpt(n));var c=this.getRetinaScaling();return 1!==c&&(n.x/=c,n.y/=c),r=0===a||0===h?{width:1,height:1}:{width:o.width/a,height:o.height/h},{x:n.x*r.width,y:n.y*r.height}},_createUpperCanvas:function(){var t=this.lowerCanvasEl.className.replace(/\s*lower-canvas\s*/,""),e=this.lowerCanvasEl,i=this.upperCanvasEl;i?i.className="":(i=this._createCanvasElement(),this.upperCanvasEl=i),T.util.addClass(i,"upper-canvas "+t),this.wrapperEl.appendChild(i),this._copyCanvasStyle(e,i),this._applyCanvasStyle(i),this.contextTop=i.getContext("2d")},_createCacheCanvas:function(){this.cacheCanvasEl=this._createCanvasElement(),this.cacheCanvasEl.setAttribute("width",this.width),this.cacheCanvasEl.setAttribute("height",this.height),this.contextCache=this.cacheCanvasEl.getContext("2d")},_initWrapperElement:function(){this.wrapperEl=T.util.wrapElement(this.lowerCanvasEl,"div",{class:this.containerClass}),T.util.setStyle(this.wrapperEl,{width:this.width+"px",height:this.height+"px",position:"relative"}),T.util.makeElementUnselectable(this.wrapperEl)},_applyCanvasStyle:function(t){var e=this.width||t.width,i=this.height||t.height;T.util.setStyle(t,{position:"absolute",width:e+"px",height:i+"px",left:0,top:0,"touch-action":this.allowTouchScrolling?"manipulation":"none","-ms-touch-action":this.allowTouchScrolling?"manipulation":"none"}),t.width=e,t.height=i,T.util.makeElementUnselectable(t)},_copyCanvasStyle:function(t,e){e.style.cssText=t.style.cssText},getSelectionContext:function(){return this.contextTop},getSelectionElement:function(){return this.upperCanvasEl},getActiveObject:function(){return this._activeObject},getActiveObjects:function(){var t=this._activeObject;return t?"activeSelection"===t.type&&t._objects?t._objects.slice(0):[t]:[]},_onObjectRemoved:function(t){t===this._activeObject&&(this.fire("before:selection:cleared",{target:t}),this._discardActiveObject(),this.fire("selection:cleared",{target:t}),t.fire("deselected")),t===this._hoveredTarget&&(this._hoveredTarget=null,this._hoveredTargets=[]),this.callSuper("_onObjectRemoved",t)},_fireSelectionEvents:function(t,e){var i=!1,r=this.getActiveObjects(),n=[],o=[];t.forEach((function(t){-1===r.indexOf(t)&&(i=!0,t.fire("deselected",{e:e,target:t}),o.push(t))})),r.forEach((function(r){-1===t.indexOf(r)&&(i=!0,r.fire("selected",{e:e,target:r}),n.push(r))})),t.length>0&&r.length>0?i&&this.fire("selection:updated",{e:e,selected:n,deselected:o,updated:n[0]||o[0],target:this._activeObject}):r.length>0?this.fire("selection:created",{e:e,selected:n,target:this._activeObject}):t.length>0&&this.fire("selection:cleared",{e:e,deselected:o})},setActiveObject:function(t,e){var i=this.getActiveObjects();return this._setActiveObject(t,e),this._fireSelectionEvents(i,e),this},_setActiveObject:function(t,e){return this._activeObject!==t&&(!!this._discardActiveObject(e,t)&&(!t.onSelect({e:e})&&(this._activeObject=t,!0)))},_discardActiveObject:function(t,e){var i=this._activeObject;if(i){if(i.onDeselect({e:t,object:e}))return!1;this._activeObject=null}return!0},discardActiveObject:function(t){var e=this.getActiveObjects(),i=this.getActiveObject();return e.length&&this.fire("before:selection:cleared",{target:i,e:t}),this._discardActiveObject(t),this._fireSelectionEvents(e,t),this},dispose:function(){var t=this.wrapperEl;return this.removeListeners(),t.removeChild(this.upperCanvasEl),t.removeChild(this.lowerCanvasEl),this.contextCache=null,this.contextTop=null,["upperCanvasEl","cacheCanvasEl"].forEach(function(t){T.util.cleanUpJsdomNode(this[t]),this[t]=void 0}.bind(this)),t.parentNode&&t.parentNode.replaceChild(this.lowerCanvasEl,this.wrapperEl),delete this.wrapperEl,T.StaticCanvas.prototype.dispose.call(this),this},clear:function(){return this.discardActiveObject(),this.clearContext(this.contextTop),this.callSuper("clear")},drawControls:function(t){var e=this._activeObject;e&&e._renderControls(t)},_toObject:function(t,e,i){var r=this._realizeGroupTransformOnObject(t),n=this.callSuper("_toObject",t,e,i);return this._unwindGroupTransformOnObject(t,r),n},_realizeGroupTransformOnObject:function(t){if(t.group&&"activeSelection"===t.group.type&&this._activeObject===t.group){var e={};return["angle","flipX","flipY","left","scaleX","scaleY","skewX","skewY","top"].forEach((function(i){e[i]=t[i]})),T.util.addTransformToObject(t,this._activeObject.calcOwnMatrix()),e}return null},_unwindGroupTransformOnObject:function(t,e){e&&t.set(e)},_setSVGObject:function(t,e,i){var r=this._realizeGroupTransformOnObject(e);this.callSuper("_setSVGObject",t,e,i),this._unwindGroupTransformOnObject(e,r)},setViewportTransform:function(t){this.renderOnAddRemove&&this._activeObject&&this._activeObject.isEditing&&this._activeObject.clearContextTop(),T.StaticCanvas.prototype.setViewportTransform.call(this,t)}}),T.StaticCanvas)"prototype"!==r&&(T.Canvas[r]=T.StaticCanvas[r])}(),function(){var t=T.util.addListener,e=T.util.removeListener,i={passive:!1};function r(t,e){return t.button&&t.button===e-1}T.util.object.extend(T.Canvas.prototype,{mainTouchId:null,_initEventListeners:function(){this.removeListeners(),this._bindEvents(),this.addOrRemove(t,"add")},_getEventPrefix:function(){return this.enablePointerEvents?"pointer":"mouse"},addOrRemove:function(t,e){var r=this.upperCanvasEl,n=this._getEventPrefix();t(T.window,"resize",this._onResize),t(r,n+"down",this._onMouseDown),t(r,n+"move",this._onMouseMove,i),t(r,n+"out",this._onMouseOut),t(r,n+"enter",this._onMouseEnter),t(r,"wheel",this._onMouseWheel),t(r,"contextmenu",this._onContextMenu),t(r,"dblclick",this._onDoubleClick),t(r,"dragover",this._onDragOver),t(r,"dragenter",this._onDragEnter),t(r,"dragleave",this._onDragLeave),t(r,"drop",this._onDrop),this.enablePointerEvents||t(r,"touchstart",this._onTouchStart,i),void 0!==P&&e in P&&(P[e](r,"gesture",this._onGesture),P[e](r,"drag",this._onDrag),P[e](r,"orientation",this._onOrientationChange),P[e](r,"shake",this._onShake),P[e](r,"longpress",this._onLongPress))},removeListeners:function(){this.addOrRemove(e,"remove");var t=this._getEventPrefix();e(T.document,t+"up",this._onMouseUp),e(T.document,"touchend",this._onTouchEnd,i),e(T.document,t+"move",this._onMouseMove,i),e(T.document,"touchmove",this._onMouseMove,i)},_bindEvents:function(){this.eventsBound||(this._onMouseDown=this._onMouseDown.bind(this),this._onTouchStart=this._onTouchStart.bind(this),this._onMouseMove=this._onMouseMove.bind(this),this._onMouseUp=this._onMouseUp.bind(this),this._onTouchEnd=this._onTouchEnd.bind(this),this._onResize=this._onResize.bind(this),this._onGesture=this._onGesture.bind(this),this._onDrag=this._onDrag.bind(this),this._onShake=this._onShake.bind(this),this._onLongPress=this._onLongPress.bind(this),this._onOrientationChange=this._onOrientationChange.bind(this),this._onMouseWheel=this._onMouseWheel.bind(this),this._onMouseOut=this._onMouseOut.bind(this),this._onMouseEnter=this._onMouseEnter.bind(this),this._onContextMenu=this._onContextMenu.bind(this),this._onDoubleClick=this._onDoubleClick.bind(this),this._onDragOver=this._onDragOver.bind(this),this._onDragEnter=this._simpleEventHandler.bind(this,"dragenter"),this._onDragLeave=this._simpleEventHandler.bind(this,"dragleave"),this._onDrop=this._simpleEventHandler.bind(this,"drop"),this.eventsBound=!0)},_onGesture:function(t,e){this.__onTransformGesture&&this.__onTransformGesture(t,e)},_onDrag:function(t,e){this.__onDrag&&this.__onDrag(t,e)},_onMouseWheel:function(t){this.__onMouseWheel(t)},_onMouseOut:function(t){var e=this._hoveredTarget;this.fire("mouse:out",{target:e,e:t}),this._hoveredTarget=null,e&&e.fire("mouseout",{e:t});var i=this;this._hoveredTargets.forEach((function(r){i.fire("mouse:out",{target:e,e:t}),r&&e.fire("mouseout",{e:t})})),this._hoveredTargets=[],this._iTextInstances&&this._iTextInstances.forEach((function(t){t.isEditing&&t.hiddenTextarea.focus()}))},_onMouseEnter:function(t){this._currentTransform||this.findTarget(t)||(this.fire("mouse:over",{target:null,e:t}),this._hoveredTarget=null,this._hoveredTargets=[])},_onOrientationChange:function(t,e){this.__onOrientationChange&&this.__onOrientationChange(t,e)},_onShake:function(t,e){this.__onShake&&this.__onShake(t,e)},_onLongPress:function(t,e){this.__onLongPress&&this.__onLongPress(t,e)},_onDragOver:function(t){t.preventDefault();var e=this._simpleEventHandler("dragover",t);this._fireEnterLeaveEvents(e,t)},_onContextMenu:function(t){return this.stopContextMenu&&(t.stopPropagation(),t.preventDefault()),!1},_onDoubleClick:function(t){this._cacheTransformEventData(t),this._handleEvent(t,"dblclick"),this._resetTransformEventData(t)},getPointerId:function(t){var e=t.changedTouches;return e?e[0]&&e[0].identifier:this.enablePointerEvents?t.pointerId:-1},_isMainEvent:function(t){return!0===t.isPrimary||!1!==t.isPrimary&&("touchend"===t.type&&0===t.touches.length||(!t.changedTouches||t.changedTouches[0].identifier===this.mainTouchId))},_onTouchStart:function(r){r.preventDefault(),null===this.mainTouchId&&(this.mainTouchId=this.getPointerId(r)),this.__onMouseDown(r),this._resetTransformEventData();var n=this.upperCanvasEl,o=this._getEventPrefix();t(T.document,"touchend",this._onTouchEnd,i),t(T.document,"touchmove",this._onMouseMove,i),e(n,o+"down",this._onMouseDown)},_onMouseDown:function(r){this.__onMouseDown(r),this._resetTransformEventData();var n=this.upperCanvasEl,o=this._getEventPrefix();e(n,o+"move",this._onMouseMove,i),t(T.document,o+"up",this._onMouseUp),t(T.document,o+"move",this._onMouseMove,i)},_onTouchEnd:function(r){if(!(r.touches.length>0)){this.__onMouseUp(r),this._resetTransformEventData(),this.mainTouchId=null;var n=this._getEventPrefix();e(T.document,"touchend",this._onTouchEnd,i),e(T.document,"touchmove",this._onMouseMove,i);var o=this;this._willAddMouseDown&&clearTimeout(this._willAddMouseDown),this._willAddMouseDown=setTimeout((function(){t(o.upperCanvasEl,n+"down",o._onMouseDown),o._willAddMouseDown=0}),400)}},_onMouseUp:function(r){this.__onMouseUp(r),this._resetTransformEventData();var n=this.upperCanvasEl,o=this._getEventPrefix();this._isMainEvent(r)&&(e(T.document,o+"up",this._onMouseUp),e(T.document,o+"move",this._onMouseMove,i),t(n,o+"move",this._onMouseMove,i))},_onMouseMove:function(t){!this.allowTouchScrolling&&t.preventDefault&&t.preventDefault(),this.__onMouseMove(t)},_onResize:function(){this.calcOffset()},_shouldRender:function(t){var e=this._activeObject;return!!(!!e!=!!t||e&&t&&e!==t)||(e&&e.isEditing,!1)},__onMouseUp:function(t){var e,i=this._currentTransform,n=this._groupSelector,o=!1,s=!n||0===n.left&&0===n.top;if(this._cacheTransformEventData(t),e=this._target,this._handleEvent(t,"up:before"),r(t,3))this.fireRightClick&&this._handleEvent(t,"up",3,s);else{if(r(t,2))return this.fireMiddleClick&&this._handleEvent(t,"up",2,s),void this._resetTransformEventData();if(this.isDrawingMode&&this._isCurrentlyDrawing)this._onMouseUpInDrawingMode(t);else if(this._isMainEvent(t)){if(i&&(this._finalizeCurrentTransform(t),o=i.actionPerformed),!s){var a=e===this._activeObject;this._maybeGroupObjects(t),o||(o=this._shouldRender(e)||!a&&e===this._activeObject)}if(e){if(e.selectable&&e!==this._activeObject&&"up"===e.activeOn)this.setActiveObject(e,t),o=!0;else{var h=e._findTargetCorner(this.getPointer(t,!0),T.util.isTouchEvent(t)),c=e.controls[h],l=c&&c.getMouseUpHandler(t,e,c);if(l){var u=this.getPointer(t);l(t,i,u.x,u.y)}}e.isMoving=!1}this._setCursorFromEvent(t,e),this._handleEvent(t,"up",1,s),this._groupSelector=null,this._currentTransform=null,e&&(e.__corner=0),o?this.requestRenderAll():s||this.renderTop()}}},_simpleEventHandler:function(t,e){var i=this.findTarget(e),r=this.targets,n={e:e,target:i,subTargets:r};if(this.fire(t,n),i&&i.fire(t,n),!r)return i;for(var o=0;o1&&(e=new T.ActiveSelection(i.reverse(),{canvas:this}),this.setActiveObject(e,t))},_collectObjects:function(t){for(var e,i=[],r=this._groupSelector.ex,n=this._groupSelector.ey,o=r+this._groupSelector.left,s=n+this._groupSelector.top,a=new T.Point(b(r,o),b(n,s)),h=new T.Point(x(r,o),x(n,s)),c=!this.selectionFullyContained,l=r===o&&n===s,u=this._objects.length;u--&&!((e=this._objects[u])&&e.selectable&&e.visible&&(c&&e.intersectsWithRect(a,h,!0)||e.isContainedWithinRect(a,h,!0)||c&&e.containsPoint(a,null,!0)||c&&e.containsPoint(h,null,!0))&&(i.push(e),l)););return i.length>1&&(i=i.filter((function(e){return!e.onSelect({e:t})}))),i},_maybeGroupObjects:function(t){this.selection&&this._groupSelector&&this._groupSelectedObjects(t),this.setCursor(this.defaultCursor),this._groupSelector=null}}),T.util.object.extend(T.StaticCanvas.prototype,{toDataURL:function(t){t||(t={});var e=t.format||"png",i=t.quality||1,r=(t.multiplier||1)*(t.enableRetinaScaling?this.getRetinaScaling():1),n=this.toCanvasElement(r,t);return T.util.toDataURL(n,e,i)},toCanvasElement:function(t,e){t=t||1;var i=((e=e||{}).width||this.width)*t,r=(e.height||this.height)*t,n=this.getZoom(),o=this.width,s=this.height,a=n*t,h=this.viewportTransform,c=(h[4]-(e.left||0))*t,l=(h[5]-(e.top||0))*t,u=this.interactive,f=[a,0,0,a,c,l],d=this.enableRetinaScaling,g=T.util.createCanvasElement(),p=this.contextTop;return g.width=i,g.height=r,this.contextTop=null,this.enableRetinaScaling=!1,this.interactive=!1,this.viewportTransform=f,this.width=i,this.height=r,this.calcViewportBoundaries(),this.renderCanvas(g.getContext("2d"),this._objects),this.viewportTransform=h,this.width=o,this.height=s,this.calcViewportBoundaries(),this.interactive=u,this.enableRetinaScaling=d,this.contextTop=p,g}}),T.util.object.extend(T.StaticCanvas.prototype,{loadFromJSON:function(t,e,i){if(t){var r="string"==typeof t?JSON.parse(t):T.util.object.clone(t),n=this,o=r.clipPath,s=this.renderOnAddRemove;return this.renderOnAddRemove=!1,delete r.clipPath,this._enlivenObjects(r.objects,(function(t){n.clear(),n._setBgOverlay(r,(function(){o?n._enlivenObjects([o],(function(i){n.clipPath=i[0],n.__setupCanvas.call(n,r,t,s,e)})):n.__setupCanvas.call(n,r,t,s,e)}))}),i),this}},__setupCanvas:function(t,e,i,r){var n=this;e.forEach((function(t,e){n.insertAt(t,e)})),this.renderOnAddRemove=i,delete t.objects,delete t.backgroundImage,delete t.overlayImage,delete t.background,delete t.overlay,this._setOptions(t),this.renderAll(),r&&r()},_setBgOverlay:function(t,e){var i={backgroundColor:!1,overlayColor:!1,backgroundImage:!1,overlayImage:!1};if(t.backgroundImage||t.overlayImage||t.background||t.overlay){var r=function(){i.backgroundImage&&i.overlayImage&&i.backgroundColor&&i.overlayColor&&e&&e()};this.__setBgOverlay("backgroundImage",t.backgroundImage,i,r),this.__setBgOverlay("overlayImage",t.overlayImage,i,r),this.__setBgOverlay("backgroundColor",t.background,i,r),this.__setBgOverlay("overlayColor",t.overlay,i,r)}else e&&e()},__setBgOverlay:function(t,e,i,r){var n=this;if(!e)return i[t]=!0,void(r&&r());"backgroundImage"===t||"overlayImage"===t?T.util.enlivenObjects([e],(function(e){n[t]=e[0],i[t]=!0,r&&r()})):this["set"+T.util.string.capitalize(t,!0)](e,(function(){i[t]=!0,r&&r()}))},_enlivenObjects:function(t,e,i){t&&0!==t.length?T.util.enlivenObjects(t,(function(t){e&&e(t)}),null,i):e&&e([])},_toDataURL:function(t,e){this.clone((function(i){e(i.toDataURL(t))}))},_toDataURLWithMultiplier:function(t,e,i){this.clone((function(r){i(r.toDataURLWithMultiplier(t,e))}))},clone:function(t,e){var i=JSON.stringify(this.toJSON(e));this.cloneWithoutData((function(e){e.loadFromJSON(i,(function(){t&&t(e)}))}))},cloneWithoutData:function(t){var e=T.util.createCanvasElement();e.width=this.width,e.height=this.height;var i=new T.Canvas(e);this.backgroundImage?(i.setBackgroundImage(this.backgroundImage.src,(function(){i.renderAll(),t&&t(i)})),i.backgroundImageOpacity=this.backgroundImageOpacity,i.backgroundImageStretch=this.backgroundImageStretch):t&&t(i)}}),C=T.util.degreesToRadians,w=T.util.radiansToDegrees,T.util.object.extend(T.Canvas.prototype,{__onTransformGesture:function(t,e){if(!this.isDrawingMode&&t.touches&&2===t.touches.length&&"gesture"===e.gesture){var i=this.findTarget(t);void 0!==i&&(this.__gesturesParams={e:t,self:e,target:i},this.__gesturesRenderer()),this.fire("touch:gesture",{target:i,e:t,self:e})}},__gesturesParams:null,__gesturesRenderer:function(){if(null!==this.__gesturesParams&&null!==this._currentTransform){var t=this.__gesturesParams.self,e=this._currentTransform,i=this.__gesturesParams.e;e.action="scale",e.originX=e.originY="center",this._scaleObjectBy(t.scale,i),0!==t.rotation&&(e.action="rotate",this._rotateObjectByAngle(t.rotation,i)),this.requestRenderAll(),e.action="drag"}},__onDrag:function(t,e){this.fire("touch:drag",{e:t,self:e})},__onOrientationChange:function(t,e){this.fire("touch:orientation",{e:t,self:e})},__onShake:function(t,e){this.fire("touch:shake",{e:t,self:e})},__onLongPress:function(t,e){this.fire("touch:longpress",{e:t,self:e})},_scaleObjectBy:function(t,e){var i=this._currentTransform,r=i.target;return i.gestureScale=t,r._scaling=!0,T.controlsUtils.scalingEqually(e,i,0,0)},_rotateObjectByAngle:function(t,e){var i=this._currentTransform;i.target.get("lockRotation")||(i.target.rotate(w(C(t)+i.theta)),this._fire("rotating",{target:i.target,e:e,transform:i}))}}),function(t){var e=t.fabric||(t.fabric={}),i=e.util.object.extend,r=e.util.object.clone,n=e.util.toFixed,o=e.util.string.capitalize,s=e.util.degreesToRadians,a=!e.isLikelyNode;e.Object||(e.Object=e.util.createClass(e.CommonMethods,{type:"object",originX:"left",originY:"top",top:0,left:0,width:0,height:0,scaleX:1,scaleY:1,flipX:!1,flipY:!1,opacity:1,angle:0,skewX:0,skewY:0,cornerSize:13,touchCornerSize:24,transparentCorners:!0,hoverCursor:null,moveCursor:null,padding:0,borderColor:"rgb(178,204,255)",borderDashArray:null,cornerColor:"rgb(178,204,255)",cornerStrokeColor:null,cornerStyle:"rect",cornerDashArray:null,centeredScaling:!1,centeredRotation:!0,fill:"rgb(0,0,0)",fillRule:"nonzero",globalCompositeOperation:"source-over",backgroundColor:"",selectionBackgroundColor:"",stroke:null,strokeWidth:1,strokeDashArray:null,strokeDashOffset:0,strokeLineCap:"butt",strokeLineJoin:"miter",strokeMiterLimit:4,shadow:null,borderOpacityWhenMoving:.4,borderScaleFactor:1,minScaleLimit:0,selectable:!0,evented:!0,visible:!0,hasControls:!0,hasBorders:!0,perPixelTargetFind:!1,includeDefaultValues:!0,lockMovementX:!1,lockMovementY:!1,lockRotation:!1,lockScalingX:!1,lockScalingY:!1,lockSkewingX:!1,lockSkewingY:!1,lockScalingFlip:!1,excludeFromExport:!1,objectCaching:a,statefullCache:!1,noScaleCache:!0,strokeUniform:!1,dirty:!0,__corner:0,paintFirst:"fill",activeOn:"down",stateProperties:"top left width height scaleX scaleY flipX flipY originX originY transformMatrix stroke strokeWidth strokeDashArray strokeLineCap strokeDashOffset strokeLineJoin strokeMiterLimit angle opacity fill globalCompositeOperation shadow visible backgroundColor skewX skewY fillRule paintFirst clipPath strokeUniform".split(" "),cacheProperties:"fill stroke strokeWidth strokeDashArray width height paintFirst strokeUniform strokeLineCap strokeDashOffset strokeLineJoin strokeMiterLimit backgroundColor clipPath".split(" "),colorProperties:"fill stroke backgroundColor".split(" "),clipPath:void 0,inverted:!1,absolutePositioned:!1,initialize:function(t){t&&this.setOptions(t)},_createCacheCanvas:function(){this._cacheProperties={},this._cacheCanvas=e.util.createCanvasElement(),this._cacheContext=this._cacheCanvas.getContext("2d"),this._updateCacheCanvas(),this.dirty=!0},_limitCacheSize:function(t){var i=e.perfLimitSizeTotal,r=t.width,n=t.height,o=e.maxCacheSideLimit,s=e.minCacheSideLimit;if(r<=o&&n<=o&&r*n<=i)return rl&&(t.zoomX/=r/l,t.width=l,t.capped=!0),n>u&&(t.zoomY/=n/u,t.height=u,t.capped=!0),t},_getCacheCanvasDimensions:function(){var t=this.getTotalObjectScaling(),e=this._getTransformedDimensions(0,0),i=e.x*t.scaleX/this.scaleX,r=e.y*t.scaleY/this.scaleY;return{width:i+2,height:r+2,zoomX:t.scaleX,zoomY:t.scaleY,x:i,y:r}},_updateCacheCanvas:function(){var t=this.canvas;if(this.noScaleCache&&t&&t._currentTransform){var i=t._currentTransform.target,r=t._currentTransform.action;if(this===i&&r.slice&&"scale"===r.slice(0,5))return!1}var n,o,s=this._cacheCanvas,a=this._limitCacheSize(this._getCacheCanvasDimensions()),h=e.minCacheSideLimit,c=a.width,l=a.height,u=a.zoomX,f=a.zoomY,d=c!==this.cacheWidth||l!==this.cacheHeight,g=this.zoomX!==u||this.zoomY!==f,p=d||g,v=0,m=0,y=!1;if(d){var _=this._cacheCanvas.width,b=this._cacheCanvas.height,x=c>_||l>b;y=x||(c<.9*_||l<.9*b)&&_>h&&b>h,x&&!a.capped&&(c>h||l>h)&&(v=.1*c,m=.1*l)}return this instanceof e.Text&&this.path&&(p=!0,y=!0,v+=this.getHeightOfLine(0)*this.zoomX,m+=this.getHeightOfLine(0)*this.zoomY),!!p&&(y?(s.width=Math.ceil(c+v),s.height=Math.ceil(l+m)):(this._cacheContext.setTransform(1,0,0,1,0,0),this._cacheContext.clearRect(0,0,s.width,s.height)),n=a.x/2,o=a.y/2,this.cacheTranslationX=Math.round(s.width/2-n)+n,this.cacheTranslationY=Math.round(s.height/2-o)+o,this.cacheWidth=c,this.cacheHeight=l,this._cacheContext.translate(this.cacheTranslationX,this.cacheTranslationY),this._cacheContext.scale(u,f),this.zoomX=u,this.zoomY=f,!0)},setOptions:function(t){this._setOptions(t),this._initGradient(t.fill,"fill"),this._initGradient(t.stroke,"stroke"),this._initPattern(t.fill,"fill"),this._initPattern(t.stroke,"stroke")},transform:function(t){var e=this.group&&!this.group._transformDone||this.group&&this.canvas&&t===this.canvas.contextTop,i=this.calcTransformMatrix(!e);t.transform(i[0],i[1],i[2],i[3],i[4],i[5])},toObject:function(t){var i=e.Object.NUM_FRACTION_DIGITS,r={type:this.type,version:e.version,originX:this.originX,originY:this.originY,left:n(this.left,i),top:n(this.top,i),width:n(this.width,i),height:n(this.height,i),fill:this.fill&&this.fill.toObject?this.fill.toObject():this.fill,stroke:this.stroke&&this.stroke.toObject?this.stroke.toObject():this.stroke,strokeWidth:n(this.strokeWidth,i),strokeDashArray:this.strokeDashArray?this.strokeDashArray.concat():this.strokeDashArray,strokeLineCap:this.strokeLineCap,strokeDashOffset:this.strokeDashOffset,strokeLineJoin:this.strokeLineJoin,strokeUniform:this.strokeUniform,strokeMiterLimit:n(this.strokeMiterLimit,i),scaleX:n(this.scaleX,i),scaleY:n(this.scaleY,i),angle:n(this.angle,i),flipX:this.flipX,flipY:this.flipY,opacity:n(this.opacity,i),shadow:this.shadow&&this.shadow.toObject?this.shadow.toObject():this.shadow,visible:this.visible,backgroundColor:this.backgroundColor,fillRule:this.fillRule,paintFirst:this.paintFirst,globalCompositeOperation:this.globalCompositeOperation,skewX:n(this.skewX,i),skewY:n(this.skewY,i)};return this.clipPath&&!this.clipPath.excludeFromExport&&(r.clipPath=this.clipPath.toObject(t),r.clipPath.inverted=this.clipPath.inverted,r.clipPath.absolutePositioned=this.clipPath.absolutePositioned),e.util.populateWithProperties(this,r,t),this.includeDefaultValues||(r=this._removeDefaultValues(r)),r},toDatalessObject:function(t){return this.toObject(t)},_removeDefaultValues:function(t){var i=e.util.getKlass(t.type).prototype;return i.stateProperties.forEach((function(e){"left"!==e&&"top"!==e&&(t[e]===i[e]&&delete t[e],"[object Array]"===Object.prototype.toString.call(t[e])&&"[object Array]"===Object.prototype.toString.call(i[e])&&0===t[e].length&&0===i[e].length&&delete t[e])})),t},toString:function(){return"#"},getObjectScaling:function(){if(!this.group)return{scaleX:this.scaleX,scaleY:this.scaleY};var t=e.util.qrDecompose(this.calcTransformMatrix());return{scaleX:Math.abs(t.scaleX),scaleY:Math.abs(t.scaleY)}},getTotalObjectScaling:function(){var t=this.getObjectScaling(),e=t.scaleX,i=t.scaleY;if(this.canvas){var r=this.canvas.getZoom(),n=this.canvas.getRetinaScaling();e*=r*n,i*=r*n}return{scaleX:e,scaleY:i}},getObjectOpacity:function(){var t=this.opacity;return this.group&&(t*=this.group.getObjectOpacity()),t},_set:function(t,i){var r="scaleX"===t||"scaleY"===t,n=this[t]!==i,o=!1;return r&&(i=this._constrainScale(i)),"scaleX"===t&&i<0?(this.flipX=!this.flipX,i*=-1):"scaleY"===t&&i<0?(this.flipY=!this.flipY,i*=-1):"shadow"!==t||!i||i instanceof e.Shadow?"dirty"===t&&this.group&&this.group.set("dirty",i):i=new e.Shadow(i),this[t]=i,n&&(o=this.group&&this.group.isOnACache(),this.cacheProperties.indexOf(t)>-1?(this.dirty=!0,o&&this.group.set("dirty",!0)):o&&this.stateProperties.indexOf(t)>-1&&this.group.set("dirty",!0)),this},setOnGroup:function(){},getViewportTransform:function(){return this.canvas&&this.canvas.viewportTransform?this.canvas.viewportTransform:e.iMatrix.concat()},isNotVisible:function(){return 0===this.opacity||!this.width&&!this.height&&0===this.strokeWidth||!this.visible},render:function(t){this.isNotVisible()||this.canvas&&this.canvas.skipOffscreen&&!this.group&&!this.isOnScreen()||(t.save(),this._setupCompositeOperation(t),this.drawSelectionBackground(t),this.transform(t),this._setOpacity(t),this._setShadow(t,this),this.shouldCache()?(this.renderCache(),this.drawCacheOnCanvas(t)):(this._removeCacheCanvas(),this.dirty=!1,this.drawObject(t),this.objectCaching&&this.statefullCache&&this.saveState({propertySet:"cacheProperties"})),t.restore())},renderCache:function(t){t=t||{},this._cacheCanvas||this._createCacheCanvas(),this.isCacheDirty()&&(this.statefullCache&&this.saveState({propertySet:"cacheProperties"}),this.drawObject(this._cacheContext,t.forClipping),this.dirty=!1)},_removeCacheCanvas:function(){this._cacheCanvas=null,this.cacheWidth=0,this.cacheHeight=0},hasStroke:function(){return this.stroke&&"transparent"!==this.stroke&&0!==this.strokeWidth},hasFill:function(){return this.fill&&"transparent"!==this.fill},needsItsOwnCache:function(){return!("stroke"!==this.paintFirst||!this.hasFill()||!this.hasStroke()||"object"!=typeof this.shadow)||!!this.clipPath},shouldCache:function(){return this.ownCaching=this.needsItsOwnCache()||this.objectCaching&&(!this.group||!this.group.isOnACache()),this.ownCaching},willDrawShadow:function(){return!!this.shadow&&(0!==this.shadow.offsetX||0!==this.shadow.offsetY)},drawClipPathOnCache:function(t){var i=this.clipPath;if(t.save(),i.inverted?t.globalCompositeOperation="destination-out":t.globalCompositeOperation="destination-in",i.absolutePositioned){var r=e.util.invertTransform(this.calcTransformMatrix());t.transform(r[0],r[1],r[2],r[3],r[4],r[5])}i.transform(t),t.scale(1/i.zoomX,1/i.zoomY),t.drawImage(i._cacheCanvas,-i.cacheTranslationX,-i.cacheTranslationY),t.restore()},drawObject:function(t,e){var i=this.fill,r=this.stroke;e?(this.fill="black",this.stroke="",this._setClippingProperties(t)):this._renderBackground(t),this._render(t),this._drawClipPath(t),this.fill=i,this.stroke=r},_drawClipPath:function(t){var e=this.clipPath;e&&(e.canvas=this.canvas,e.shouldCache(),e._transformDone=!0,e.renderCache({forClipping:!0}),this.drawClipPathOnCache(t))},drawCacheOnCanvas:function(t){t.scale(1/this.zoomX,1/this.zoomY),t.drawImage(this._cacheCanvas,-this.cacheTranslationX,-this.cacheTranslationY)},isCacheDirty:function(t){if(this.isNotVisible())return!1;if(this._cacheCanvas&&!t&&this._updateCacheCanvas())return!0;if(this.dirty||this.clipPath&&this.clipPath.absolutePositioned||this.statefullCache&&this.hasStateChanged("cacheProperties")){if(this._cacheCanvas&&!t){var e=this.cacheWidth/this.zoomX,i=this.cacheHeight/this.zoomY;this._cacheContext.clearRect(-e/2,-i/2,e,i)}return!0}return!1},_renderBackground:function(t){if(this.backgroundColor){var e=this._getNonTransformedDimensions();t.fillStyle=this.backgroundColor,t.fillRect(-e.x/2,-e.y/2,e.x,e.y),this._removeShadow(t)}},_setOpacity:function(t){this.group&&!this.group._transformDone?t.globalAlpha=this.getObjectOpacity():t.globalAlpha*=this.opacity},_setStrokeStyles:function(t,e){var i=e.stroke;i&&(t.lineWidth=e.strokeWidth,t.lineCap=e.strokeLineCap,t.lineDashOffset=e.strokeDashOffset,t.lineJoin=e.strokeLineJoin,t.miterLimit=e.strokeMiterLimit,i.toLive?"percentage"===i.gradientUnits||i.gradientTransform||i.patternTransform?this._applyPatternForTransformedGradient(t,i):(t.strokeStyle=i.toLive(t,this),this._applyPatternGradientTransform(t,i)):t.strokeStyle=e.stroke)},_setFillStyles:function(t,e){var i=e.fill;i&&(i.toLive?(t.fillStyle=i.toLive(t,this),this._applyPatternGradientTransform(t,e.fill)):t.fillStyle=i)},_setClippingProperties:function(t){t.globalAlpha=1,t.strokeStyle="transparent",t.fillStyle="#000000"},_setLineDash:function(t,e){e&&0!==e.length&&(1&e.length&&e.push.apply(e,e),t.setLineDash(e))},_renderControls:function(t,i){var r,n,o,a=this.getViewportTransform(),h=this.calcTransformMatrix();n=void 0!==(i=i||{}).hasBorders?i.hasBorders:this.hasBorders,o=void 0!==i.hasControls?i.hasControls:this.hasControls,h=e.util.multiplyTransformMatrices(a,h),r=e.util.qrDecompose(h),t.save(),t.translate(r.translateX,r.translateY),t.lineWidth=1*this.borderScaleFactor,this.group||(t.globalAlpha=this.isMoving?this.borderOpacityWhenMoving:1),t.rotate(s(r.angle)),i.forActiveSelection||this.group?n&&this.drawBordersInGroup(t,r,i):n&&this.drawBorders(t,i),o&&this.drawControls(t,i),t.restore()},_setShadow:function(t){if(this.shadow){var i,r=this.shadow,n=this.canvas,o=n&&n.viewportTransform[0]||1,s=n&&n.viewportTransform[3]||1;i=r.nonScaling?{scaleX:1,scaleY:1}:this.getObjectScaling(),n&&n._isRetinaScaling()&&(o*=e.devicePixelRatio,s*=e.devicePixelRatio),t.shadowColor=r.color,t.shadowBlur=r.blur*e.browserShadowBlurConstant*(o+s)*(i.scaleX+i.scaleY)/4,t.shadowOffsetX=r.offsetX*o*i.scaleX,t.shadowOffsetY=r.offsetY*s*i.scaleY}},_removeShadow:function(t){this.shadow&&(t.shadowColor="",t.shadowBlur=t.shadowOffsetX=t.shadowOffsetY=0)},_applyPatternGradientTransform:function(t,e){if(!e||!e.toLive)return{offsetX:0,offsetY:0};var i=e.gradientTransform||e.patternTransform,r=-this.width/2+e.offsetX||0,n=-this.height/2+e.offsetY||0;return"percentage"===e.gradientUnits?t.transform(this.width,0,0,this.height,r,n):t.transform(1,0,0,1,r,n),i&&t.transform(i[0],i[1],i[2],i[3],i[4],i[5]),{offsetX:r,offsetY:n}},_renderPaintInOrder:function(t){"stroke"===this.paintFirst?(this._renderStroke(t),this._renderFill(t)):(this._renderFill(t),this._renderStroke(t))},_render:function(){},_renderFill:function(t){this.fill&&(t.save(),this._setFillStyles(t,this),"evenodd"===this.fillRule?t.fill("evenodd"):t.fill(),t.restore())},_renderStroke:function(t){if(this.stroke&&0!==this.strokeWidth){if(this.shadow&&!this.shadow.affectStroke&&this._removeShadow(t),t.save(),this.strokeUniform&&this.group){var e=this.getObjectScaling();t.scale(1/e.scaleX,1/e.scaleY)}else this.strokeUniform&&t.scale(1/this.scaleX,1/this.scaleY);this._setLineDash(t,this.strokeDashArray),this._setStrokeStyles(t,this),t.stroke(),t.restore()}},_applyPatternForTransformedGradient:function(t,i){var r,n=this._limitCacheSize(this._getCacheCanvasDimensions()),o=e.util.createCanvasElement(),s=this.canvas.getRetinaScaling(),a=n.x/this.scaleX/s,h=n.y/this.scaleY/s;o.width=a,o.height=h,(r=o.getContext("2d")).beginPath(),r.moveTo(0,0),r.lineTo(a,0),r.lineTo(a,h),r.lineTo(0,h),r.closePath(),r.translate(a/2,h/2),r.scale(n.zoomX/this.scaleX/s,n.zoomY/this.scaleY/s),this._applyPatternGradientTransform(r,i),r.fillStyle=i.toLive(t),r.fill(),t.translate(-this.width/2-this.strokeWidth/2,-this.height/2-this.strokeWidth/2),t.scale(s*this.scaleX/n.zoomX,s*this.scaleY/n.zoomY),t.strokeStyle=r.createPattern(o,"no-repeat")},_findCenterFromElement:function(){return{x:this.left+this.width/2,y:this.top+this.height/2}},_assignTransformMatrixProps:function(){if(this.transformMatrix){var t=e.util.qrDecompose(this.transformMatrix);this.flipX=!1,this.flipY=!1,this.set("scaleX",t.scaleX),this.set("scaleY",t.scaleY),this.angle=t.angle,this.skewX=t.skewX,this.skewY=0}},_removeTransformMatrix:function(t){var i=this._findCenterFromElement();this.transformMatrix&&(this._assignTransformMatrixProps(),i=e.util.transformPoint(i,this.transformMatrix)),this.transformMatrix=null,t&&(this.scaleX*=t.scaleX,this.scaleY*=t.scaleY,this.cropX=t.cropX,this.cropY=t.cropY,i.x+=t.offsetLeft,i.y+=t.offsetTop,this.width=t.width,this.height=t.height),this.setPositionByOrigin(i,"center","center")},clone:function(t,i){var r=this.toObject(i);this.constructor.fromObject?this.constructor.fromObject(r,t):e.Object._fromObject("Object",r,t)},cloneAsImage:function(t,i){var r=this.toCanvasElement(i);return t&&t(new e.Image(r)),this},toCanvasElement:function(t){t||(t={});var i=e.util,r=i.saveObjectTransform(this),n=this.group,o=this.shadow,s=Math.abs,a=(t.multiplier||1)*(t.enableRetinaScaling?e.devicePixelRatio:1);delete this.group,t.withoutTransform&&i.resetObjectTransform(this),t.withoutShadow&&(this.shadow=null);var h,c,l,u,f=e.util.createCanvasElement(),d=this.getBoundingRect(!0,!0),g=this.shadow,p={x:0,y:0};g&&(c=g.blur,h=g.nonScaling?{scaleX:1,scaleY:1}:this.getObjectScaling(),p.x=2*Math.round(s(g.offsetX)+c)*s(h.scaleX),p.y=2*Math.round(s(g.offsetY)+c)*s(h.scaleY)),l=d.width+p.x,u=d.height+p.y,f.width=Math.ceil(l),f.height=Math.ceil(u);var v=new e.StaticCanvas(f,{enableRetinaScaling:!1,renderOnAddRemove:!1,skipOffscreen:!1});"jpeg"===t.format&&(v.backgroundColor="#fff"),this.setPositionByOrigin(new e.Point(v.width/2,v.height/2),"center","center");var m=this.canvas;v.add(this);var y=v.toCanvasElement(a||1,t);return this.shadow=o,this.set("canvas",m),n&&(this.group=n),this.set(r).setCoords(),v._objects=[],v.dispose(),v=null,y},toDataURL:function(t){return t||(t={}),e.util.toDataURL(this.toCanvasElement(t),t.format||"png",t.quality||1)},isType:function(t){return this.type===t},complexity:function(){return 1},toJSON:function(t){return this.toObject(t)},rotate:function(t){var e=("center"!==this.originX||"center"!==this.originY)&&this.centeredRotation;return e&&this._setOriginToCenter(),this.set("angle",t),e&&this._resetOrigin(),this},centerH:function(){return this.canvas&&this.canvas.centerObjectH(this),this},viewportCenterH:function(){return this.canvas&&this.canvas.viewportCenterObjectH(this),this},centerV:function(){return this.canvas&&this.canvas.centerObjectV(this),this},viewportCenterV:function(){return this.canvas&&this.canvas.viewportCenterObjectV(this),this},center:function(){return this.canvas&&this.canvas.centerObject(this),this},viewportCenter:function(){return this.canvas&&this.canvas.viewportCenterObject(this),this},getLocalPointer:function(t,i){i=i||this.canvas.getPointer(t);var r=new e.Point(i.x,i.y),n=this._getLeftTopCoords();return this.angle&&(r=e.util.rotatePoint(r,n,s(-this.angle))),{x:r.x-n.x,y:r.y-n.y}},_setupCompositeOperation:function(t){this.globalCompositeOperation&&(t.globalCompositeOperation=this.globalCompositeOperation)}}),e.util.createAccessors&&e.util.createAccessors(e.Object),i(e.Object.prototype,e.Observable),e.Object.NUM_FRACTION_DIGITS=2,e.Object._fromObject=function(t,i,n,o){var s=e[t];i=r(i,!0),e.util.enlivenPatterns([i.fill,i.stroke],(function(t){void 0!==t[0]&&(i.fill=t[0]),void 0!==t[1]&&(i.stroke=t[1]),e.util.enlivenObjects([i.clipPath],(function(t){i.clipPath=t[0];var e=o?new s(i[o],i):new s(i);n&&n(e)}))}))},e.Object.__uid=0)}(t),function(){var t=T.util.degreesToRadians,e={left:-.5,center:0,right:.5},i={top:-.5,center:0,bottom:.5};T.util.object.extend(T.Object.prototype,{translateToGivenOrigin:function(t,r,n,o,s){var a,h,c,l=t.x,u=t.y;return"string"==typeof r?r=e[r]:r-=.5,"string"==typeof o?o=e[o]:o-=.5,"string"==typeof n?n=i[n]:n-=.5,"string"==typeof s?s=i[s]:s-=.5,h=s-n,((a=o-r)||h)&&(c=this._getTransformedDimensions(),l=t.x+a*c.x,u=t.y+h*c.y),new T.Point(l,u)},translateToCenterPoint:function(e,i,r){var n=this.translateToGivenOrigin(e,i,r,"center","center");return this.angle?T.util.rotatePoint(n,e,t(this.angle)):n},translateToOriginPoint:function(e,i,r){var n=this.translateToGivenOrigin(e,"center","center",i,r);return this.angle?T.util.rotatePoint(n,e,t(this.angle)):n},getCenterPoint:function(){var t=new T.Point(this.left,this.top);return this.translateToCenterPoint(t,this.originX,this.originY)},getPointByOrigin:function(t,e){var i=this.getCenterPoint();return this.translateToOriginPoint(i,t,e)},toLocalPoint:function(e,i,r){var n,o,s=this.getCenterPoint();return n=void 0!==i&&void 0!==r?this.translateToGivenOrigin(s,"center","center",i,r):new T.Point(this.left,this.top),o=new T.Point(e.x,e.y),this.angle&&(o=T.util.rotatePoint(o,s,-t(this.angle))),o.subtractEquals(n)},setPositionByOrigin:function(t,e,i){var r=this.translateToCenterPoint(t,e,i),n=this.translateToOriginPoint(r,this.originX,this.originY);this.set("left",n.x),this.set("top",n.y)},adjustPosition:function(i){var r,n,o=t(this.angle),s=this.getScaledWidth(),a=T.util.cos(o)*s,h=T.util.sin(o)*s;r="string"==typeof this.originX?e[this.originX]:this.originX-.5,n="string"==typeof i?e[i]:i-.5,this.left+=a*(n-r),this.top+=h*(n-r),this.setCoords(),this.originX=i},_setOriginToCenter:function(){this._originalOriginX=this.originX,this._originalOriginY=this.originY;var t=this.getCenterPoint();this.originX="center",this.originY="center",this.left=t.x,this.top=t.y},_resetOrigin:function(){var t=this.translateToOriginPoint(this.getCenterPoint(),this._originalOriginX,this._originalOriginY);this.originX=this._originalOriginX,this.originY=this._originalOriginY,this.left=t.x,this.top=t.y,this._originalOriginX=null,this._originalOriginY=null},_getLeftTopCoords:function(){return this.translateToOriginPoint(this.getCenterPoint(),"left","top")}})}(),function(){var t=T.util,e=t.degreesToRadians,i=t.multiplyTransformMatrices,r=t.transformPoint;t.object.extend(T.Object.prototype,{oCoords:null,aCoords:null,lineCoords:null,ownMatrixCache:null,matrixCache:null,controls:{},_getCoords:function(t,e){return e?t?this.calcACoords():this.calcLineCoords():(this.aCoords&&this.lineCoords||this.setCoords(!0),t?this.aCoords:this.lineCoords)},getCoords:function(t,e){return i=this._getCoords(t,e),[new T.Point(i.tl.x,i.tl.y),new T.Point(i.tr.x,i.tr.y),new T.Point(i.br.x,i.br.y),new T.Point(i.bl.x,i.bl.y)];var i},intersectsWithRect:function(t,e,i,r){var n=this.getCoords(i,r);return"Intersection"===T.Intersection.intersectPolygonRectangle(n,t,e).status},intersectsWithObject:function(t,e,i){return"Intersection"===T.Intersection.intersectPolygonPolygon(this.getCoords(e,i),t.getCoords(e,i)).status||t.isContainedWithinObject(this,e,i)||this.isContainedWithinObject(t,e,i)},isContainedWithinObject:function(t,e,i){for(var r=this.getCoords(e,i),n=e?t.aCoords:t.lineCoords,o=0,s=t._getImageLines(n);o<4;o++)if(!t.containsPoint(r[o],s))return!1;return!0},isContainedWithinRect:function(t,e,i,r){var n=this.getBoundingRect(i,r);return n.left>=t.x&&n.left+n.width<=e.x&&n.top>=t.y&&n.top+n.height<=e.y},containsPoint:function(t,e,i,r){var n=this._getCoords(i,r),o=(e=e||this._getImageLines(n),this._findCrossPoints(t,e));return 0!==o&&o%2==1},isOnScreen:function(t){if(!this.canvas)return!1;var e=this.canvas.vptCoords.tl,i=this.canvas.vptCoords.br;return!!this.getCoords(!0,t).some((function(t){return t.x<=i.x&&t.x>=e.x&&t.y<=i.y&&t.y>=e.y}))||(!!this.intersectsWithRect(e,i,!0,t)||this._containsCenterOfCanvas(e,i,t))},_containsCenterOfCanvas:function(t,e,i){var r={x:(t.x+e.x)/2,y:(t.y+e.y)/2};return!!this.containsPoint(r,null,!0,i)},isPartiallyOnScreen:function(t){if(!this.canvas)return!1;var e=this.canvas.vptCoords.tl,i=this.canvas.vptCoords.br;return!!this.intersectsWithRect(e,i,!0,t)||this.getCoords(!0,t).every((function(t){return(t.x>=i.x||t.x<=e.x)&&(t.y>=i.y||t.y<=e.y)}))&&this._containsCenterOfCanvas(e,i,t)},_getImageLines:function(t){return{topline:{o:t.tl,d:t.tr},rightline:{o:t.tr,d:t.br},bottomline:{o:t.br,d:t.bl},leftline:{o:t.bl,d:t.tl}}},_findCrossPoints:function(t,e){var i,r,n,o=0;for(var s in e)if(!((n=e[s]).o.y=t.y&&n.d.y>=t.y||(n.o.x===n.d.x&&n.o.x>=t.x?r=n.o.x:(0,i=(n.d.y-n.o.y)/(n.d.x-n.o.x),r=-(t.y-0*t.x-(n.o.y-i*n.o.x))/(0-i)),r>=t.x&&(o+=1),2!==o)))break;return o},getBoundingRect:function(e,i){var r=this.getCoords(e,i);return t.makeBoundingBoxFromPoints(r)},getScaledWidth:function(){return this._getTransformedDimensions().x},getScaledHeight:function(){return this._getTransformedDimensions().y},_constrainScale:function(t){return Math.abs(t)\n')}},toSVG:function(t){return this._createBaseSVGMarkup(this._toSVG(t),{reviver:t})},toClipPathSVG:function(t){return"\t"+this._createBaseClipPathSVGMarkup(this._toSVG(t),{reviver:t})},_createBaseClipPathSVGMarkup:function(t,e){var i=(e=e||{}).reviver,r=e.additionalTransform||"",n=[this.getSvgTransform(!0,r),this.getSvgCommons()].join(""),o=t.indexOf("COMMON_PARTS");return t[o]=n,i?i(t.join("")):t.join("")},_createBaseSVGMarkup:function(t,e){var i,r,n=(e=e||{}).noStyle,o=e.reviver,s=n?"":'style="'+this.getSvgStyles()+'" ',a=e.withShadow?'style="'+this.getSvgFilter()+'" ':"",h=this.clipPath,c=this.strokeUniform?'vector-effect="non-scaling-stroke" ':"",l=h&&h.absolutePositioned,u=this.stroke,f=this.fill,d=this.shadow,g=[],p=t.indexOf("COMMON_PARTS"),v=e.additionalTransform;return h&&(h.clipPathId="CLIPPATH_"+T.Object.__uid++,r='\n'+h.toClipPathSVG(o)+"\n"),l&&g.push("\n"),g.push("\n"),i=[s,c,n?"":this.addPaintOrder()," ",v?'transform="'+v+'" ':""].join(""),t[p]=i,f&&f.toLive&&g.push(f.toSVG(this)),u&&u.toLive&&g.push(u.toSVG(this)),d&&g.push(d.toSVG(this)),h&&g.push(r),g.push(t.join("")),g.push("\n"),l&&g.push("\n"),o?o(g.join("")):g.join("")},addPaintOrder:function(){return"fill"!==this.paintFirst?' paint-order="'+this.paintFirst+'" ':""}})}(),function(){var t=T.util.object.extend,e="stateProperties";function i(e,i,r){var n={};r.forEach((function(t){n[t]=e[t]})),t(e[i],n,!0)}function r(t,e,i){if(t===e)return!0;if(Array.isArray(t)){if(!Array.isArray(e)||t.length!==e.length)return!1;for(var n=0,o=t.length;n=0;h--)if(n=a[h],this.isControlVisible(n)&&(r=this._getImageLines(e?this.oCoords[n].touchCorner:this.oCoords[n].corner),0!==(i=this._findCrossPoints({x:o,y:s},r))&&i%2==1))return this.__corner=n,n;return!1},forEachControl:function(t){for(var e in this.controls)t(this.controls[e],e,this)},_setCornerCoords:function(){var t=this.oCoords;for(var e in t){var i=this.controls[e];t[e].corner=i.calcCornerCoords(this.angle,this.cornerSize,t[e].x,t[e].y,!1),t[e].touchCorner=i.calcCornerCoords(this.angle,this.touchCornerSize,t[e].x,t[e].y,!0)}},drawSelectionBackground:function(e){if(!this.selectionBackgroundColor||this.canvas&&!this.canvas.interactive||this.canvas&&this.canvas._activeObject!==this)return this;e.save();var i=this.getCenterPoint(),r=this._calculateCurrentDimensions(),n=this.canvas.viewportTransform;return e.translate(i.x,i.y),e.scale(1/n[0],1/n[3]),e.rotate(t(this.angle)),e.fillStyle=this.selectionBackgroundColor,e.fillRect(-r.x/2,-r.y/2,r.x,r.y),e.restore(),this},drawBorders:function(t,e){e=e||{};var i=this._calculateCurrentDimensions(),r=this.borderScaleFactor,n=i.x+r,o=i.y+r,s=void 0!==e.hasControls?e.hasControls:this.hasControls,a=!1;return t.save(),t.strokeStyle=e.borderColor||this.borderColor,this._setLineDash(t,e.borderDashArray||this.borderDashArray),t.strokeRect(-n/2,-o/2,n,o),s&&(t.beginPath(),this.forEachControl((function(e,i,r){e.withConnection&&e.getVisibility(r,i)&&(a=!0,t.moveTo(e.x*n,e.y*o),t.lineTo(e.x*n+e.offsetX,e.y*o+e.offsetY))})),a&&t.stroke()),t.restore(),this},drawBordersInGroup:function(t,e,i){i=i||{};var r=T.util.sizeAfterTransform(this.width,this.height,e),n=this.strokeWidth,o=this.strokeUniform,s=this.borderScaleFactor,a=r.x+n*(o?this.canvas.getZoom():e.scaleX)+s,h=r.y+n*(o?this.canvas.getZoom():e.scaleY)+s;return t.save(),this._setLineDash(t,i.borderDashArray||this.borderDashArray),t.strokeStyle=i.borderColor||this.borderColor,t.strokeRect(-a/2,-h/2,a,h),t.restore(),this},drawControls:function(t,e){e=e||{},t.save();var i,r,n=this.canvas.getRetinaScaling();return t.setTransform(n,0,0,n,0,0),t.strokeStyle=t.fillStyle=e.cornerColor||this.cornerColor,this.transparentCorners||(t.strokeStyle=e.cornerStrokeColor||this.cornerStrokeColor),this._setLineDash(t,e.cornerDashArray||this.cornerDashArray),this.setCoords(),this.group&&(i=this.group.calcTransformMatrix()),this.forEachControl((function(n,o,s){r=s.oCoords[o],n.getVisibility(s,o)&&(i&&(r=T.util.transformPoint(r,i)),n.render(t,r.x,r.y,e,s))})),t.restore(),this},isControlVisible:function(t){return this.controls[t]&&this.controls[t].getVisibility(this,t)},setControlVisible:function(t,e){return this._controlsVisibility||(this._controlsVisibility={}),this._controlsVisibility[t]=e,this},setControlsVisibility:function(t){for(var e in t||(t={}),t)this.setControlVisible(e,t[e]);return this},onDeselect:function(){},onSelect:function(){}})}(),T.util.object.extend(T.StaticCanvas.prototype,{FX_DURATION:500,fxCenterObjectH:function(t,e){var i=function(){},r=(e=e||{}).onComplete||i,n=e.onChange||i,o=this;return T.util.animate({startValue:t.left,endValue:this.getCenter().left,duration:this.FX_DURATION,onChange:function(e){t.set("left",e),o.requestRenderAll(),n()},onComplete:function(){t.setCoords(),r()}}),this},fxCenterObjectV:function(t,e){var i=function(){},r=(e=e||{}).onComplete||i,n=e.onChange||i,o=this;return T.util.animate({startValue:t.top,endValue:this.getCenter().top,duration:this.FX_DURATION,onChange:function(e){t.set("top",e),o.requestRenderAll(),n()},onComplete:function(){t.setCoords(),r()}}),this},fxRemove:function(t,e){var i=function(){},r=(e=e||{}).onComplete||i,n=e.onChange||i,o=this;return T.util.animate({startValue:t.opacity,endValue:0,duration:this.FX_DURATION,onChange:function(e){t.set("opacity",e),o.requestRenderAll(),n()},onComplete:function(){o.remove(t),r()}}),this}}),T.util.object.extend(T.Object.prototype,{animate:function(){if(arguments[0]&&"object"==typeof arguments[0]){var t,e,i=[];for(t in arguments[0])i.push(t);for(var r=0,n=i.length;r-1||n&&o.colorProperties.indexOf(n[1])>-1,a=n?this.get(n[0])[n[1]]:this.get(t);"from"in i||(i.from=a),s||(e=~e.indexOf("=")?a+parseFloat(e.replace("=","")):parseFloat(e));var h={startValue:i.from,endValue:e,byValue:i.by,easing:i.easing,duration:i.duration,abort:i.abort&&function(t,e,r){return i.abort.call(o,t,e,r)},onChange:function(e,s,a){n?o[n[0]][n[1]]=e:o.set(t,e),r||i.onChange&&i.onChange(e,s,a)},onComplete:function(t,e,n){r||(o.setCoords(),i.onComplete&&i.onComplete(t,e,n))}};return s?T.util.animateColor(h.startValue,h.endValue,h.duration,h):T.util.animate(h)}}),function(t){var e=t.fabric||(t.fabric={}),i=e.util.object.extend,r=e.util.object.clone,n={x1:1,x2:1,y1:1,y2:1};function o(t,e){var i=t.origin,r=t.axis1,n=t.axis2,o=t.dimension,s=e.nearest,a=e.center,h=e.farthest;return function(){switch(this.get(i)){case s:return Math.min(this.get(r),this.get(n));case a:return Math.min(this.get(r),this.get(n))+.5*this.get(o);case h:return Math.max(this.get(r),this.get(n))}}}e.Line?e.warn("fabric.Line is already defined"):(e.Line=e.util.createClass(e.Object,{type:"line",x1:0,y1:0,x2:0,y2:0,cacheProperties:e.Object.prototype.cacheProperties.concat("x1","x2","y1","y2"),initialize:function(t,e){t||(t=[0,0,0,0]),this.callSuper("initialize",e),this.set("x1",t[0]),this.set("y1",t[1]),this.set("x2",t[2]),this.set("y2",t[3]),this._setWidthHeight(e)},_setWidthHeight:function(t){t||(t={}),this.width=Math.abs(this.x2-this.x1),this.height=Math.abs(this.y2-this.y1),this.left="left"in t?t.left:this._getLeftToOriginX(),this.top="top"in t?t.top:this._getTopToOriginY()},_set:function(t,e){return this.callSuper("_set",t,e),void 0!==n[t]&&this._setWidthHeight(),this},_getLeftToOriginX:o({origin:"originX",axis1:"x1",axis2:"x2",dimension:"width"},{nearest:"left",center:"center",farthest:"right"}),_getTopToOriginY:o({origin:"originY",axis1:"y1",axis2:"y2",dimension:"height"},{nearest:"top",center:"center",farthest:"bottom"}),_render:function(t){t.beginPath();var e=this.calcLinePoints();t.moveTo(e.x1,e.y1),t.lineTo(e.x2,e.y2),t.lineWidth=this.strokeWidth;var i=t.strokeStyle;t.strokeStyle=this.stroke||t.fillStyle,this.stroke&&this._renderStroke(t),t.strokeStyle=i},_findCenterFromElement:function(){return{x:(this.x1+this.x2)/2,y:(this.y1+this.y2)/2}},toObject:function(t){return i(this.callSuper("toObject",t),this.calcLinePoints())},_getNonTransformedDimensions:function(){var t=this.callSuper("_getNonTransformedDimensions");return"butt"===this.strokeLineCap&&(0===this.width&&(t.y-=this.strokeWidth),0===this.height&&(t.x-=this.strokeWidth)),t},calcLinePoints:function(){var t=this.x1<=this.x2?-1:1,e=this.y1<=this.y2?-1:1,i=t*this.width*.5,r=e*this.height*.5;return{x1:i,x2:t*this.width*-.5,y1:r,y2:e*this.height*-.5}},_toSVG:function(){var t=this.calcLinePoints();return["\n']}}),e.Line.ATTRIBUTE_NAMES=e.SHARED_ATTRIBUTES.concat("x1 y1 x2 y2".split(" ")),e.Line.fromElement=function(t,r,n){n=n||{};var o=e.parseAttributes(t,e.Line.ATTRIBUTE_NAMES),s=[o.x1||0,o.y1||0,o.x2||0,o.y2||0];r(new e.Line(s,i(o,n)))},e.Line.fromObject=function(t,i){var n=r(t,!0);n.points=[t.x1,t.y1,t.x2,t.y2],e.Object._fromObject("Line",n,(function(t){delete t.points,i&&i(t)}),"points")})}(t),function(t){var e=t.fabric||(t.fabric={}),i=Math.PI;e.Circle?e.warn("fabric.Circle is already defined."):(e.Circle=e.util.createClass(e.Object,{type:"circle",radius:0,startAngle:0,endAngle:2*i,cacheProperties:e.Object.prototype.cacheProperties.concat("radius","startAngle","endAngle"),_set:function(t,e){return this.callSuper("_set",t,e),"radius"===t&&this.setRadius(e),this},toObject:function(t){return this.callSuper("toObject",["radius","startAngle","endAngle"].concat(t))},_toSVG:function(){var t,r=(this.endAngle-this.startAngle)%(2*i);if(0===r)t=["\n'];else{var n=e.util.cos(this.startAngle)*this.radius,o=e.util.sin(this.startAngle)*this.radius,s=e.util.cos(this.endAngle)*this.radius,a=e.util.sin(this.endAngle)*this.radius,h=r>i?"1":"0";t=['\n"]}return t},_render:function(t){t.beginPath(),t.arc(0,0,this.radius,this.startAngle,this.endAngle,!1),this._renderPaintInOrder(t)},getRadiusX:function(){return this.get("radius")*this.get("scaleX")},getRadiusY:function(){return this.get("radius")*this.get("scaleY")},setRadius:function(t){return this.radius=t,this.set("width",2*t).set("height",2*t)}}),e.Circle.ATTRIBUTE_NAMES=e.SHARED_ATTRIBUTES.concat("cx cy r".split(" ")),e.Circle.fromElement=function(t,i){var r,n=e.parseAttributes(t,e.Circle.ATTRIBUTE_NAMES);if(!("radius"in(r=n)&&r.radius>=0))throw new Error("value of `r` attribute is required and can not be negative");n.left=(n.left||0)-n.radius,n.top=(n.top||0)-n.radius,i(new e.Circle(n))},e.Circle.fromObject=function(t,i){e.Object._fromObject("Circle",t,i)})}(t),function(t){var e=t.fabric||(t.fabric={});e.Triangle?e.warn("fabric.Triangle is already defined"):(e.Triangle=e.util.createClass(e.Object,{type:"triangle",width:100,height:100,_render:function(t){var e=this.width/2,i=this.height/2;t.beginPath(),t.moveTo(-e,i),t.lineTo(0,-i),t.lineTo(e,i),t.closePath(),this._renderPaintInOrder(t)},_toSVG:function(){var t=this.width/2,e=this.height/2;return["']}}),e.Triangle.fromObject=function(t,i){return e.Object._fromObject("Triangle",t,i)})}(t),function(t){var e=t.fabric||(t.fabric={}),i=2*Math.PI;e.Ellipse?e.warn("fabric.Ellipse is already defined."):(e.Ellipse=e.util.createClass(e.Object,{type:"ellipse",rx:0,ry:0,cacheProperties:e.Object.prototype.cacheProperties.concat("rx","ry"),initialize:function(t){this.callSuper("initialize",t),this.set("rx",t&&t.rx||0),this.set("ry",t&&t.ry||0)},_set:function(t,e){switch(this.callSuper("_set",t,e),t){case"rx":this.rx=e,this.set("width",2*e);break;case"ry":this.ry=e,this.set("height",2*e)}return this},getRx:function(){return this.get("rx")*this.get("scaleX")},getRy:function(){return this.get("ry")*this.get("scaleY")},toObject:function(t){return this.callSuper("toObject",["rx","ry"].concat(t))},_toSVG:function(){return["\n']},_render:function(t){t.beginPath(),t.save(),t.transform(1,0,0,this.ry/this.rx,0,0),t.arc(0,0,this.rx,0,i,!1),t.restore(),this._renderPaintInOrder(t)}}),e.Ellipse.ATTRIBUTE_NAMES=e.SHARED_ATTRIBUTES.concat("cx cy rx ry".split(" ")),e.Ellipse.fromElement=function(t,i){var r=e.parseAttributes(t,e.Ellipse.ATTRIBUTE_NAMES);r.left=(r.left||0)-r.rx,r.top=(r.top||0)-r.ry,i(new e.Ellipse(r))},e.Ellipse.fromObject=function(t,i){e.Object._fromObject("Ellipse",t,i)})}(t),function(t){var e=t.fabric||(t.fabric={}),i=e.util.object.extend;e.Rect?e.warn("fabric.Rect is already defined"):(e.Rect=e.util.createClass(e.Object,{stateProperties:e.Object.prototype.stateProperties.concat("rx","ry"),type:"rect",rx:0,ry:0,cacheProperties:e.Object.prototype.cacheProperties.concat("rx","ry"),initialize:function(t){this.callSuper("initialize",t),this._initRxRy()},_initRxRy:function(){this.rx&&!this.ry?this.ry=this.rx:this.ry&&!this.rx&&(this.rx=this.ry)},_render:function(t){var e=this.rx?Math.min(this.rx,this.width/2):0,i=this.ry?Math.min(this.ry,this.height/2):0,r=this.width,n=this.height,o=-this.width/2,s=-this.height/2,a=0!==e||0!==i,h=.4477152502;t.beginPath(),t.moveTo(o+e,s),t.lineTo(o+r-e,s),a&&t.bezierCurveTo(o+r-h*e,s,o+r,s+h*i,o+r,s+i),t.lineTo(o+r,s+n-i),a&&t.bezierCurveTo(o+r,s+n-h*i,o+r-h*e,s+n,o+r-e,s+n),t.lineTo(o+e,s+n),a&&t.bezierCurveTo(o+h*e,s+n,o,s+n-h*i,o,s+n-i),t.lineTo(o,s+i),a&&t.bezierCurveTo(o,s+h*i,o+h*e,s,o+e,s),t.closePath(),this._renderPaintInOrder(t)},toObject:function(t){return this.callSuper("toObject",["rx","ry"].concat(t))},_toSVG:function(){return["\n']}}),e.Rect.ATTRIBUTE_NAMES=e.SHARED_ATTRIBUTES.concat("x y rx ry width height".split(" ")),e.Rect.fromElement=function(t,r,n){if(!t)return r(null);n=n||{};var o=e.parseAttributes(t,e.Rect.ATTRIBUTE_NAMES);o.left=o.left||0,o.top=o.top||0,o.height=o.height||0,o.width=o.width||0;var s=new e.Rect(i(n?e.util.object.clone(n):{},o));s.visible=s.visible&&s.width>0&&s.height>0,r(s)},e.Rect.fromObject=function(t,i){return e.Object._fromObject("Rect",t,i)})}(t),function(t){var e=t.fabric||(t.fabric={}),i=e.util.object.extend,r=e.util.array.min,n=e.util.array.max,o=e.util.toFixed;e.Polyline?e.warn("fabric.Polyline is already defined"):(e.Polyline=e.util.createClass(e.Object,{type:"polyline",points:null,cacheProperties:e.Object.prototype.cacheProperties.concat("points"),initialize:function(t,e){e=e||{},this.points=t||[],this.callSuper("initialize",e),this._setPositionDimensions(e)},_setPositionDimensions:function(t){var e,i=this._calcDimensions(t);this.width=i.width,this.height=i.height,t.fromSVG||(e=this.translateToGivenOrigin({x:i.left-this.strokeWidth/2,y:i.top-this.strokeWidth/2},"left","top",this.originX,this.originY)),void 0===t.left&&(this.left=t.fromSVG?i.left:e.x),void 0===t.top&&(this.top=t.fromSVG?i.top:e.y),this.pathOffset={x:i.left+this.width/2,y:i.top+this.height/2}},_calcDimensions:function(){var t=this.points,e=r(t,"x")||0,i=r(t,"y")||0;return{left:e,top:i,width:(n(t,"x")||0)-e,height:(n(t,"y")||0)-i}},toObject:function(t){return i(this.callSuper("toObject",t),{points:this.points.concat()})},_toSVG:function(){for(var t=[],i=this.pathOffset.x,r=this.pathOffset.y,n=e.Object.NUM_FRACTION_DIGITS,s=0,a=this.points.length;s\n']},commonRender:function(t){var e,i=this.points.length,r=this.pathOffset.x,n=this.pathOffset.y;if(!i||isNaN(this.points[i-1].y))return!1;t.beginPath(),t.moveTo(this.points[0].x-r,this.points[0].y-n);for(var o=0;o"},toObject:function(t){return n(this.callSuper("toObject",t),{path:this.path.map((function(t){return t.slice()}))})},toDatalessObject:function(t){var e=this.toObject(["sourcePath"].concat(t));return e.sourcePath&&delete e.path,e},_toSVG:function(){return["\n"]},_getOffsetTransform:function(){var t=e.Object.NUM_FRACTION_DIGITS;return" translate("+s(-this.pathOffset.x,t)+", "+s(-this.pathOffset.y,t)+")"},toClipPathSVG:function(t){var e=this._getOffsetTransform();return"\t"+this._createBaseClipPathSVGMarkup(this._toSVG(),{reviver:t,additionalTransform:e})},toSVG:function(t){var e=this._getOffsetTransform();return this._createBaseSVGMarkup(this._toSVG(),{reviver:t,additionalTransform:e})},complexity:function(){return this.path.length},_calcDimensions:function(){for(var t,n,o=[],s=[],a=0,h=0,c=0,l=0,u=0,f=this.path.length;u"},addWithUpdate:function(t){var i=!!this.group;return this._restoreObjectsState(),e.util.resetObjectTransform(this),t&&(i&&e.util.removeTransformFromObject(t,this.group.calcTransformMatrix()),this._objects.push(t),t.group=this,t._set("canvas",this.canvas)),this._calcBounds(),this._updateObjectsCoords(),this.dirty=!0,i?this.group.addWithUpdate():this.setCoords(),this},removeWithUpdate:function(t){return this._restoreObjectsState(),e.util.resetObjectTransform(this),this.remove(t),this._calcBounds(),this._updateObjectsCoords(),this.setCoords(),this.dirty=!0,this},_onObjectAdded:function(t){this.dirty=!0,t.group=this,t._set("canvas",this.canvas)},_onObjectRemoved:function(t){this.dirty=!0,delete t.group},_set:function(t,i){var r=this._objects.length;if(this.useSetOnGroup)for(;r--;)this._objects[r].setOnGroup(t,i);if("canvas"===t)for(;r--;)this._objects[r]._set(t,i);e.Object.prototype._set.call(this,t,i)},toObject:function(t){var i=this.includeDefaultValues,r=this._objects.filter((function(t){return!t.excludeFromExport})).map((function(e){var r=e.includeDefaultValues;e.includeDefaultValues=i;var n=e.toObject(t);return e.includeDefaultValues=r,n})),n=e.Object.prototype.toObject.call(this,t);return n.objects=r,n},toDatalessObject:function(t){var i,r=this.sourcePath;if(r)i=r;else{var n=this.includeDefaultValues;i=this._objects.map((function(e){var i=e.includeDefaultValues;e.includeDefaultValues=n;var r=e.toDatalessObject(t);return e.includeDefaultValues=i,r}))}var o=e.Object.prototype.toDatalessObject.call(this,t);return o.objects=i,o},render:function(t){this._transformDone=!0,this.callSuper("render",t),this._transformDone=!1},shouldCache:function(){var t=e.Object.prototype.shouldCache.call(this);if(t)for(var i=0,r=this._objects.length;i\n"],i=0,r=this._objects.length;i\n"),e},getSvgStyles:function(){var t=void 0!==this.opacity&&1!==this.opacity?"opacity: "+this.opacity+";":"",e=this.visible?"":" visibility: hidden;";return[t,this.getSvgFilter(),e].join("")},toClipPathSVG:function(t){for(var e=[],i=0,r=this._objects.length;i"},shouldCache:function(){return!1},isOnACache:function(){return!1},_renderControls:function(t,e,i){t.save(),t.globalAlpha=this.isMoving?this.borderOpacityWhenMoving:1,this.callSuper("_renderControls",t,e),void 0===(i=i||{}).hasControls&&(i.hasControls=!1),i.forActiveSelection=!0;for(var r=0,n=this._objects.length;r\n','\t\n',"\n"),s=' clip-path="url(#imageCrop_'+h+')" '}if(this.imageSmoothing||(a='" image-rendering="optimizeSpeed'),i.push("\t\n"),this.stroke||this.strokeDashArray){var c=this.fill;this.fill=null,t=["\t\n'],this.fill=c}return e="fill"!==this.paintFirst?e.concat(t,i):e.concat(i,t)},getSrc:function(t){var e=t?this._element:this._originalElement;return e?e.toDataURL?e.toDataURL():this.srcFromAttribute?e.getAttribute("src"):e.src:this.src||""},setSrc:function(t,e,i){return T.util.loadImage(t,(function(t,r){this.setElement(t,i),this._setWidthHeight(),e&&e(this,r)}),this,i&&i.crossOrigin),this},toString:function(){return'#'},applyResizeFilters:function(){var t=this.resizeFilter,e=this.minimumScaleTrigger,i=this.getTotalObjectScaling(),r=i.scaleX,n=i.scaleY,o=this._filteredEl||this._originalElement;if(this.group&&this.set("dirty",!0),!t||r>e&&n>e)return this._element=o,this._filterScalingX=1,this._filterScalingY=1,this._lastScaleX=r,void(this._lastScaleY=n);T.filterBackend||(T.filterBackend=T.initFilterBackend());var s=T.util.createCanvasElement(),a=this._filteredEl?this.cacheKey+"_filtered":this.cacheKey,h=o.width,c=o.height;s.width=h,s.height=c,this._element=s,this._lastScaleX=t.scaleX=r,this._lastScaleY=t.scaleY=n,T.filterBackend.applyFilters([t],o,h,c,this._element,a),this._filterScalingX=s.width/this._originalElement.width,this._filterScalingY=s.height/this._originalElement.height},applyFilters:function(t){if(t=(t=t||this.filters||[]).filter((function(t){return t&&!t.isNeutralState()})),this.set("dirty",!0),this.removeTexture(this.cacheKey+"_filtered"),0===t.length)return this._element=this._originalElement,this._filteredEl=null,this._filterScalingX=1,this._filterScalingY=1,this;var e=this._originalElement,i=e.naturalWidth||e.width,r=e.naturalHeight||e.height;if(this._element===this._originalElement){var n=T.util.createCanvasElement();n.width=i,n.height=r,this._element=n,this._filteredEl=n}else this._element=this._filteredEl,this._filteredEl.getContext("2d").clearRect(0,0,i,r),this._lastScaleX=1,this._lastScaleY=1;return T.filterBackend||(T.filterBackend=T.initFilterBackend()),T.filterBackend.applyFilters(t,this._originalElement,i,r,this._element,this.cacheKey),this._originalElement.width===this._element.width&&this._originalElement.height===this._element.height||(this._filterScalingX=this._element.width/this._originalElement.width,this._filterScalingY=this._element.height/this._originalElement.height),this},_render:function(t){T.util.setImageSmoothing(t,this.imageSmoothing),!0!==this.isMoving&&this.resizeFilter&&this._needsResize()&&this.applyResizeFilters(),this._stroke(t),this._renderPaintInOrder(t)},drawCacheOnCanvas:function(t){T.util.setImageSmoothing(t,this.imageSmoothing),T.Object.prototype.drawCacheOnCanvas.call(this,t)},shouldCache:function(){return this.needsItsOwnCache()},_renderFill:function(t){var e=this._element;if(e){var i=this._filterScalingX,r=this._filterScalingY,n=this.width,o=this.height,s=Math.min,a=Math.max,h=a(this.cropX,0),c=a(this.cropY,0),l=e.naturalWidth||e.width,u=e.naturalHeight||e.height,f=h*i,d=c*r,g=s(n*i,l-f),p=s(o*r,u-d),v=-n/2,m=-o/2,y=s(n,l/i-h),_=s(o,u/r-c);e&&t.drawImage(e,f,d,g,p,v,m,y,_)}},_needsResize:function(){var t=this.getTotalObjectScaling();return t.scaleX!==this._lastScaleX||t.scaleY!==this._lastScaleY},_resetWidthHeight:function(){this.set(this.getOriginalSize())},_initElement:function(t,e){this.setElement(T.util.getById(t),e),T.util.addClass(this.getElement(),T.Image.CSS_CANVAS)},_initConfig:function(t){t||(t={}),this.setOptions(t),this._setWidthHeight(t)},_initFilters:function(t,e){t&&t.length?T.util.enlivenObjects(t,(function(t){e&&e(t)}),"fabric.Image.filters"):e&&e()},_setWidthHeight:function(t){t||(t={});var e=this.getElement();this.width=t.width||e.naturalWidth||e.width||0,this.height=t.height||e.naturalHeight||e.height||0},parsePreserveAspectRatioAttribute:function(){var t,e=T.util.parsePreserveAspectRatioAttribute(this.preserveAspectRatio||""),i=this._element.width,r=this._element.height,n=1,o=1,s=0,a=0,h=0,c=0,l=this.width,u=this.height,f={width:l,height:u};return!e||"none"===e.alignX&&"none"===e.alignY?(n=l/i,o=u/r):("meet"===e.meetOrSlice&&(t=(l-i*(n=o=T.util.findScaleToFit(this._element,f)))/2,"Min"===e.alignX&&(s=-t),"Max"===e.alignX&&(s=t),t=(u-r*o)/2,"Min"===e.alignY&&(a=-t),"Max"===e.alignY&&(a=t)),"slice"===e.meetOrSlice&&(t=i-l/(n=o=T.util.findScaleToCover(this._element,f)),"Mid"===e.alignX&&(h=t/2),"Max"===e.alignX&&(h=t),t=r-u/o,"Mid"===e.alignY&&(c=t/2),"Max"===e.alignY&&(c=t),i=l/n,r=u/o)),{width:i,height:r,scaleX:n,scaleY:o,offsetLeft:s,offsetTop:a,cropX:h,cropY:c}}}),T.Image.CSS_CANVAS="canvas-img",T.Image.prototype.getSvgSrc=T.Image.prototype.getSrc,T.Image.fromObject=function(t,e){var i=T.util.object.clone(t);T.util.loadImage(i.src,(function(t,r){r?e&&e(null,!0):T.Image.prototype._initFilters.call(i,i.filters,(function(r){i.filters=r||[],T.Image.prototype._initFilters.call(i,[i.resizeFilter],(function(r){i.resizeFilter=r[0],T.util.enlivenObjects([i.clipPath],(function(r){i.clipPath=r[0];var n=new T.Image(t,i);e(n,!1)}))}))}))}),null,i.crossOrigin)},T.Image.fromURL=function(t,e,i){T.util.loadImage(t,(function(t,r){e&&e(new T.Image(t,i),r)}),null,i&&i.crossOrigin)},T.Image.ATTRIBUTE_NAMES=T.SHARED_ATTRIBUTES.concat("x y width height preserveAspectRatio xlink:href crossOrigin image-rendering".split(" ")),T.Image.fromElement=function(t,i,r){var n=T.parseAttributes(t,T.Image.ATTRIBUTE_NAMES);T.Image.fromURL(n["xlink:href"],i,e(r?T.util.object.clone(r):{},n))})}(t),T.util.object.extend(T.Object.prototype,{_getAngleValueForStraighten:function(){var t=this.angle%360;return t>0?90*Math.round((t-1)/90):90*Math.round(t/90)},straighten:function(){return this.rotate(this._getAngleValueForStraighten()),this},fxStraighten:function(t){var e=function(){},i=(t=t||{}).onComplete||e,r=t.onChange||e,n=this;return T.util.animate({startValue:this.get("angle"),endValue:this._getAngleValueForStraighten(),duration:this.FX_DURATION,onChange:function(t){n.rotate(t),r()},onComplete:function(){n.setCoords(),i()}}),this}}),T.util.object.extend(T.StaticCanvas.prototype,{straightenObject:function(t){return t.straighten(),this.requestRenderAll(),this},fxStraightenObject:function(t){return t.fxStraighten({onChange:this.requestRenderAllBound}),this}}),function(){function t(t,e){var i="precision "+e+" float;\nvoid main(){}",r=t.createShader(t.FRAGMENT_SHADER);return t.shaderSource(r,i),t.compileShader(r),!!t.getShaderParameter(r,t.COMPILE_STATUS)}function e(t){t&&t.tileSize&&(this.tileSize=t.tileSize),this.setupGLContext(this.tileSize,this.tileSize),this.captureGPUInfo()}T.isWebglSupported=function(e){if(T.isLikelyNode)return!1;e=e||T.WebglFilterBackend.prototype.tileSize;var i=document.createElement("canvas"),r=i.getContext("webgl")||i.getContext("experimental-webgl"),n=!1;if(r){T.maxTextureSize=r.getParameter(r.MAX_TEXTURE_SIZE),n=T.maxTextureSize>=e;for(var o=["highp","mediump","lowp"],s=0;s<3;s++)if(t(r,o[s])){T.webGlPrecision=o[s];break}}return this.isSupported=n,n},T.WebglFilterBackend=e,e.prototype={tileSize:2048,resources:{},setupGLContext:function(t,e){this.dispose(),this.createWebGLCanvas(t,e),this.aPosition=new Float32Array([0,0,0,1,1,0,1,1]),this.chooseFastestCopyGLTo2DMethod(t,e)},chooseFastestCopyGLTo2DMethod:function(t,e){var i,r=void 0!==window.performance;try{new ImageData(1,1),i=!0}catch(t){i=!1}var n="undefined"!=typeof ArrayBuffer,o="undefined"!=typeof Uint8ClampedArray;if(r&&i&&n&&o){var s=T.util.createCanvasElement(),a=new ArrayBuffer(t*e*4);if(T.forceGLPutImageData)return this.imageBuffer=a,void(this.copyGLTo2D=A);var h,c,l={imageBuffer:a,destinationWidth:t,destinationHeight:e,targetCanvas:s};s.width=t,s.height=e,h=window.performance.now(),k.call(l,this.gl,l),c=window.performance.now()-h,h=window.performance.now(),A.call(l,this.gl,l),c>window.performance.now()-h?(this.imageBuffer=a,this.copyGLTo2D=A):this.copyGLTo2D=k}},createWebGLCanvas:function(t,e){var i=T.util.createCanvasElement();i.width=t,i.height=e;var r={alpha:!0,premultipliedAlpha:!1,depth:!1,stencil:!1,antialias:!1},n=i.getContext("webgl",r);n||(n=i.getContext("experimental-webgl",r)),n&&(n.clearColor(0,0,0,0),this.canvas=i,this.gl=n)},applyFilters:function(t,e,i,r,n,o){var s,a=this.gl;o&&(s=this.getCachedTexture(o,e));var h={originalWidth:e.width||e.originalWidth,originalHeight:e.height||e.originalHeight,sourceWidth:i,sourceHeight:r,destinationWidth:i,destinationHeight:r,context:a,sourceTexture:this.createTexture(a,i,r,!s&&e),targetTexture:this.createTexture(a,i,r),originalTexture:s||this.createTexture(a,i,r,!s&&e),passes:t.length,webgl:!0,aPosition:this.aPosition,programCache:this.programCache,pass:0,filterBackend:this,targetCanvas:n},c=a.createFramebuffer();return a.bindFramebuffer(a.FRAMEBUFFER,c),t.forEach((function(t){t&&t.applyTo(h)})),function(t){var e=t.targetCanvas,i=e.width,r=e.height,n=t.destinationWidth,o=t.destinationHeight;i===n&&r===o||(e.width=n,e.height=o)}(h),this.copyGLTo2D(a,h),a.bindTexture(a.TEXTURE_2D,null),a.deleteTexture(h.sourceTexture),a.deleteTexture(h.targetTexture),a.deleteFramebuffer(c),n.getContext("2d").setTransform(1,0,0,1,0,0),h},dispose:function(){this.canvas&&(this.canvas=null,this.gl=null),this.clearWebGLCaches()},clearWebGLCaches:function(){this.programCache={},this.textureCache={}},createTexture:function(t,e,i,r){var n=t.createTexture();return t.bindTexture(t.TEXTURE_2D,n),t.texParameteri(t.TEXTURE_2D,t.TEXTURE_MAG_FILTER,t.NEAREST),t.texParameteri(t.TEXTURE_2D,t.TEXTURE_MIN_FILTER,t.NEAREST),t.texParameteri(t.TEXTURE_2D,t.TEXTURE_WRAP_S,t.CLAMP_TO_EDGE),t.texParameteri(t.TEXTURE_2D,t.TEXTURE_WRAP_T,t.CLAMP_TO_EDGE),r?t.texImage2D(t.TEXTURE_2D,0,t.RGBA,t.RGBA,t.UNSIGNED_BYTE,r):t.texImage2D(t.TEXTURE_2D,0,t.RGBA,e,i,0,t.RGBA,t.UNSIGNED_BYTE,null),n},getCachedTexture:function(t,e){if(this.textureCache[t])return this.textureCache[t];var i=this.createTexture(this.gl,e.width,e.height,e);return this.textureCache[t]=i,i},evictCachesForKey:function(t){this.textureCache[t]&&(this.gl.deleteTexture(this.textureCache[t]),delete this.textureCache[t])},copyGLTo2D:k,captureGPUInfo:function(){if(this.gpuInfo)return this.gpuInfo;var t=this.gl,e={renderer:"",vendor:""};if(!t)return e;var i=t.getExtension("WEBGL_debug_renderer_info");if(i){var r=t.getParameter(i.UNMASKED_RENDERER_WEBGL),n=t.getParameter(i.UNMASKED_VENDOR_WEBGL);r&&(e.renderer=r.toLowerCase()),n&&(e.vendor=n.toLowerCase())}return this.gpuInfo=e,e}}}(),function(){var t=function(){};function e(){}T.Canvas2dFilterBackend=e,e.prototype={evictCachesForKey:t,dispose:t,clearWebGLCaches:t,resources:{},applyFilters:function(t,e,i,r,n){var o=n.getContext("2d");o.drawImage(e,0,0,i,r);var s={sourceWidth:i,sourceHeight:r,imageData:o.getImageData(0,0,i,r),originalEl:e,originalImageData:o.getImageData(0,0,i,r),canvasEl:n,ctx:o,filterBackend:this};return t.forEach((function(t){t.applyTo(s)})),s.imageData.width===i&&s.imageData.height===r||(n.width=s.imageData.width,n.height=s.imageData.height),o.putImageData(s.imageData,0,0),s}}}(),T.Image=T.Image||{},T.Image.filters=T.Image.filters||{},T.Image.filters.BaseFilter=T.util.createClass({type:"BaseFilter",vertexSource:"attribute vec2 aPosition;\nvarying vec2 vTexCoord;\nvoid main() {\nvTexCoord = aPosition;\ngl_Position = vec4(aPosition * 2.0 - 1.0, 0.0, 1.0);\n}",fragmentSource:"precision highp float;\nvarying vec2 vTexCoord;\nuniform sampler2D uTexture;\nvoid main() {\ngl_FragColor = texture2D(uTexture, vTexCoord);\n}",initialize:function(t){t&&this.setOptions(t)},setOptions:function(t){for(var e in t)this[e]=t[e]},createProgram:function(t,e,i){e=e||this.fragmentSource,i=i||this.vertexSource,"highp"!==T.webGlPrecision&&(e=e.replace(/precision highp float/g,"precision "+T.webGlPrecision+" float"));var r=t.createShader(t.VERTEX_SHADER);if(t.shaderSource(r,i),t.compileShader(r),!t.getShaderParameter(r,t.COMPILE_STATUS))throw new Error("Vertex shader compile error for "+this.type+": "+t.getShaderInfoLog(r));var n=t.createShader(t.FRAGMENT_SHADER);if(t.shaderSource(n,e),t.compileShader(n),!t.getShaderParameter(n,t.COMPILE_STATUS))throw new Error("Fragment shader compile error for "+this.type+": "+t.getShaderInfoLog(n));var o=t.createProgram();if(t.attachShader(o,r),t.attachShader(o,n),t.linkProgram(o),!t.getProgramParameter(o,t.LINK_STATUS))throw new Error('Shader link error for "${this.type}" '+t.getProgramInfoLog(o));var s=this.getAttributeLocations(t,o),a=this.getUniformLocations(t,o)||{};return a.uStepW=t.getUniformLocation(o,"uStepW"),a.uStepH=t.getUniformLocation(o,"uStepH"),{program:o,attributeLocations:s,uniformLocations:a}},getAttributeLocations:function(t,e){return{aPosition:t.getAttribLocation(e,"aPosition")}},getUniformLocations:function(){return{}},sendAttributeData:function(t,e,i){var r=e.aPosition,n=t.createBuffer();t.bindBuffer(t.ARRAY_BUFFER,n),t.enableVertexAttribArray(r),t.vertexAttribPointer(r,2,t.FLOAT,!1,0,0),t.bufferData(t.ARRAY_BUFFER,i,t.STATIC_DRAW)},_setupFrameBuffer:function(t){var e,i,r=t.context;t.passes>1?(e=t.destinationWidth,i=t.destinationHeight,t.sourceWidth===e&&t.sourceHeight===i||(r.deleteTexture(t.targetTexture),t.targetTexture=t.filterBackend.createTexture(r,e,i)),r.framebufferTexture2D(r.FRAMEBUFFER,r.COLOR_ATTACHMENT0,r.TEXTURE_2D,t.targetTexture,0)):(r.bindFramebuffer(r.FRAMEBUFFER,null),r.finish())},_swapTextures:function(t){t.passes--,t.pass++;var e=t.targetTexture;t.targetTexture=t.sourceTexture,t.sourceTexture=e},isNeutralState:function(){var t=this.mainParameter,e=T.Image.filters[this.type].prototype;if(t){if(Array.isArray(e[t])){for(var i=e[t].length;i--;)if(this[t][i]!==e[t][i])return!1;return!0}return e[t]===this[t]}return!1},applyTo:function(t){t.webgl?(this._setupFrameBuffer(t),this.applyToWebGL(t),this._swapTextures(t)):this.applyTo2d(t)},retrieveShader:function(t){return t.programCache.hasOwnProperty(this.type)||(t.programCache[this.type]=this.createProgram(t.context)),t.programCache[this.type]},applyToWebGL:function(t){var e=t.context,i=this.retrieveShader(t);0===t.pass&&t.originalTexture?e.bindTexture(e.TEXTURE_2D,t.originalTexture):e.bindTexture(e.TEXTURE_2D,t.sourceTexture),e.useProgram(i.program),this.sendAttributeData(e,i.attributeLocations,t.aPosition),e.uniform1f(i.uniformLocations.uStepW,1/t.sourceWidth),e.uniform1f(i.uniformLocations.uStepH,1/t.sourceHeight),this.sendUniformData(e,i.uniformLocations),e.viewport(0,0,t.destinationWidth,t.destinationHeight),e.drawArrays(e.TRIANGLE_STRIP,0,4)},bindAdditionalTexture:function(t,e,i){t.activeTexture(i),t.bindTexture(t.TEXTURE_2D,e),t.activeTexture(t.TEXTURE0)},unbindAdditionalTexture:function(t,e){t.activeTexture(e),t.bindTexture(t.TEXTURE_2D,null),t.activeTexture(t.TEXTURE0)},getMainParameter:function(){return this[this.mainParameter]},setMainParameter:function(t){this[this.mainParameter]=t},sendUniformData:function(){},createHelpLayer:function(t){if(!t.helpLayer){var e=document.createElement("canvas");e.width=t.sourceWidth,e.height=t.sourceHeight,t.helpLayer=e}},toObject:function(){var t={type:this.type},e=this.mainParameter;return e&&(t[e]=this[e]),t},toJSON:function(){return this.toObject()}}),T.Image.filters.BaseFilter.fromObject=function(t,e){var i=new T.Image.filters[t.type](t);return e&&e(i),i},function(t){var e=t.fabric||(t.fabric={}),i=e.Image.filters,r=e.util.createClass;i.ColorMatrix=r(i.BaseFilter,{type:"ColorMatrix",fragmentSource:"precision highp float;\nuniform sampler2D uTexture;\nvarying vec2 vTexCoord;\nuniform mat4 uColorMatrix;\nuniform vec4 uConstants;\nvoid main() {\nvec4 color = texture2D(uTexture, vTexCoord);\ncolor *= uColorMatrix;\ncolor += uConstants;\ngl_FragColor = color;\n}",matrix:[1,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,1,0],mainParameter:"matrix",colorsOnly:!0,initialize:function(t){this.callSuper("initialize",t),this.matrix=this.matrix.slice(0)},applyTo2d:function(t){var e,i,r,n,o,s=t.imageData.data,a=s.length,h=this.matrix,c=this.colorsOnly;for(o=0;o=b||s<0||s>=_||(h=4*(a*_+s),c=v[d*m+f],e+=p[h]*c,i+=p[h+1]*c,r+=p[h+2]*c,w||(n+=p[h+3]*c));C[o]=e,C[o+1]=i,C[o+2]=r,C[o+3]=w?p[o+3]:n}t.imageData=x},getUniformLocations:function(t,e){return{uMatrix:t.getUniformLocation(e,"uMatrix"),uOpaque:t.getUniformLocation(e,"uOpaque"),uHalfSize:t.getUniformLocation(e,"uHalfSize"),uSize:t.getUniformLocation(e,"uSize")}},sendUniformData:function(t,e){t.uniform1fv(e.uMatrix,this.matrix)},toObject:function(){return i(this.callSuper("toObject"),{opaque:this.opaque,matrix:this.matrix})}}),e.Image.filters.Convolute.fromObject=e.Image.filters.BaseFilter.fromObject}(t),function(t){var e=t.fabric||(t.fabric={}),i=e.Image.filters,r=e.util.createClass;i.Grayscale=r(i.BaseFilter,{type:"Grayscale",fragmentSource:{average:"precision highp float;\nuniform sampler2D uTexture;\nvarying vec2 vTexCoord;\nvoid main() {\nvec4 color = texture2D(uTexture, vTexCoord);\nfloat average = (color.r + color.b + color.g) / 3.0;\ngl_FragColor = vec4(average, average, average, color.a);\n}",lightness:"precision highp float;\nuniform sampler2D uTexture;\nuniform int uMode;\nvarying vec2 vTexCoord;\nvoid main() {\nvec4 col = texture2D(uTexture, vTexCoord);\nfloat average = (max(max(col.r, col.g),col.b) + min(min(col.r, col.g),col.b)) / 2.0;\ngl_FragColor = vec4(average, average, average, col.a);\n}",luminosity:"precision highp float;\nuniform sampler2D uTexture;\nuniform int uMode;\nvarying vec2 vTexCoord;\nvoid main() {\nvec4 col = texture2D(uTexture, vTexCoord);\nfloat average = 0.21 * col.r + 0.72 * col.g + 0.07 * col.b;\ngl_FragColor = vec4(average, average, average, col.a);\n}"},mode:"average",mainParameter:"mode",applyTo2d:function(t){var e,i,r=t.imageData.data,n=r.length,o=this.mode;for(e=0;ec[0]&&n>c[1]&&o>c[2]&&r 0.0) {\n"+this.fragmentSource[t]+"}\n}"},retrieveShader:function(t){var e,i=this.type+"_"+this.mode;return t.programCache.hasOwnProperty(i)||(e=this.buildSource(this.mode),t.programCache[i]=this.createProgram(t.context,e)),t.programCache[i]},applyTo2d:function(t){var i,r,n,o,s,a,h,c=t.imageData.data,l=c.length,u=1-this.alpha;i=(h=new e.Color(this.color).getSource())[0]*this.alpha,r=h[1]*this.alpha,n=h[2]*this.alpha;for(var f=0;f=t||e<=-t)return 0;if(e<1.1920929e-7&&e>-1.1920929e-7)return 1;var i=(e*=Math.PI)/t;return a(e)/e*a(i)/i}},applyTo2d:function(t){var e=t.imageData,i=this.scaleX,r=this.scaleY;this.rcpScaleX=1/i,this.rcpScaleY=1/r;var n,o=e.width,a=e.height,h=s(o*i),c=s(a*r);"sliceHack"===this.resizeType?n=this.sliceByTwo(t,o,a,h,c):"hermite"===this.resizeType?n=this.hermiteFastResize(t,o,a,h,c):"bilinear"===this.resizeType?n=this.bilinearFiltering(t,o,a,h,c):"lanczos"===this.resizeType&&(n=this.lanczosResize(t,o,a,h,c)),t.imageData=n},sliceByTwo:function(t,i,n,o,s){var a,h,c=t.imageData,l=.5,u=!1,f=!1,d=i*l,g=n*l,p=e.filterBackend.resources,v=0,m=0,y=i,_=0;for(p.sliceByTwo||(p.sliceByTwo=document.createElement("canvas")),((a=p.sliceByTwo).width<1.5*i||a.height=e)){M=r(1e3*o(S-x.x)),b[M]||(b[M]={});for(var F=C.y-_;F<=C.y+_;F++)F<0||F>=s||(j=r(1e3*o(F-x.y)),b[M][j]||(b[M][j]=d(n(i(M*v,2)+i(j*m,2))/1e3)),(T=b[M][j])>0&&(P+=T,E+=T*l[O=4*(F*e+S)],k+=T*l[O+1],A+=T*l[O+2],D+=T*l[O+3]))}f[O=4*(w*a+h)]=E/P,f[O+1]=k/P,f[O+2]=A/P,f[O+3]=D/P}return++h1&&j<-1||(_=2*j*j*j-3*j*j+1)>0&&(T+=_*d[(M=4*(D+P*e))+3],x+=_,d[M+3]<255&&(_=_*d[M+3]/250),C+=_*d[M],w+=_*d[M+1],S+=_*d[M+2],b+=_)}p[y]=C/b,p[y+1]=w/b,p[y+2]=S/b,p[y+3]=T/x}return g},toObject:function(){return{type:this.type,scaleX:this.scaleX,scaleY:this.scaleY,resizeType:this.resizeType,lanczosLobes:this.lanczosLobes}}}),e.Image.filters.Resize.fromObject=e.Image.filters.BaseFilter.fromObject}(t),function(t){var e=t.fabric||(t.fabric={}),i=e.Image.filters,r=e.util.createClass;i.Contrast=r(i.BaseFilter,{type:"Contrast",fragmentSource:"precision highp float;\nuniform sampler2D uTexture;\nuniform float uContrast;\nvarying vec2 vTexCoord;\nvoid main() {\nvec4 color = texture2D(uTexture, vTexCoord);\nfloat contrastF = 1.015 * (uContrast + 1.0) / (1.0 * (1.015 - uContrast));\ncolor.rgb = contrastF * (color.rgb - 0.5) + 0.5;\ngl_FragColor = color;\n}",contrast:0,mainParameter:"contrast",applyTo2d:function(t){if(0!==this.contrast){var e,i=t.imageData.data,r=i.length,n=Math.floor(255*this.contrast),o=259*(n+255)/(255*(259-n));for(e=0;e1&&(e=1/this.aspectRatio):this.aspectRatio<1&&(e=this.aspectRatio),t=e*this.blur*.12,this.horizontal?i[0]=t:i[1]=t,i}}),i.Blur.fromObject=e.Image.filters.BaseFilter.fromObject}(t),function(t){var e=t.fabric||(t.fabric={}),i=e.Image.filters,r=e.util.createClass;i.Gamma=r(i.BaseFilter,{type:"Gamma",fragmentSource:"precision highp float;\nuniform sampler2D uTexture;\nuniform vec3 uGamma;\nvarying vec2 vTexCoord;\nvoid main() {\nvec4 color = texture2D(uTexture, vTexCoord);\nvec3 correction = (1.0 / uGamma);\ncolor.r = pow(color.r, correction.r);\ncolor.g = pow(color.g, correction.g);\ncolor.b = pow(color.b, correction.b);\ngl_FragColor = color;\ngl_FragColor.rgb *= color.a;\n}",gamma:[1,1,1],mainParameter:"gamma",initialize:function(t){this.gamma=[1,1,1],i.BaseFilter.prototype.initialize.call(this,t)},applyTo2d:function(t){var e,i=t.imageData.data,r=this.gamma,n=i.length,o=1/r[0],s=1/r[1],a=1/r[2];for(this.rVals||(this.rVals=new Uint8Array(256),this.gVals=new Uint8Array(256),this.bVals=new Uint8Array(256)),e=0,n=256;e'},_getCacheCanvasDimensions:function(){var t=this.callSuper("_getCacheCanvasDimensions"),e=this.fontSize;return t.width+=e*t.zoomX,t.height+=e*t.zoomY,t},_render:function(t){var e=this.path;e&&!e.isNotVisible()&&e._render(t),this._setTextStyles(t),this._renderTextLinesBackground(t),this._renderTextDecoration(t,"underline"),this._renderText(t),this._renderTextDecoration(t,"overline"),this._renderTextDecoration(t,"linethrough")},_renderText:function(t){"stroke"===this.paintFirst?(this._renderTextStroke(t),this._renderTextFill(t)):(this._renderTextFill(t),this._renderTextStroke(t))},_setTextStyles:function(t,e,i){t.textBaseline="alphabetic",t.font=this._getFontDeclaration(e,i)},calcTextWidth:function(){for(var t=this.getLineWidth(0),e=1,i=this._textLines.length;et&&(t=r)}return t},_renderTextLine:function(t,e,i,r,n,o){this._renderChars(t,e,i,r,n,o)},_renderTextLinesBackground:function(t){if(this.textBackgroundColor||this.styleHas("textBackgroundColor")){for(var e,i,r,n,o,s,a,h=t.fillStyle,c=this._getLeftOffset(),l=this._getTopOffset(),u=0,f=0,d=this.path,g=0,p=this._textLines.length;g=0:ia?u%=a:u<0&&(u+=a),this._setGraphemeOnPath(u,o,s),u+=o.kernedWidth}return{width:h,numOfSpaces:0}},_setGraphemeOnPath:function(t,i,r){var n=t+i.kernedWidth/2,o=this.path,s=e.util.getPointOnPath(o.path,n,o.segmentsInfo);i.renderLeft=s.x-r.x,i.renderTop=s.y-r.y,i.angle=s.angle+("right"===this.pathSide?Math.PI:0)},_getGraphemeBox:function(t,e,i,r,n){var o,s=this.getCompleteStyleDeclaration(e,i),a=r?this.getCompleteStyleDeclaration(e,i-1):{},h=this._measureChar(t,s,r,a),c=h.kernedWidth,l=h.width;0!==this.charSpacing&&(l+=o=this._getWidthOfCharSpacing(),c+=o);var u={width:l,left:0,height:s.fontSize,kernedWidth:c,deltaY:s.deltaY};if(i>0&&!n){var f=this.__charBounds[e][i-1];u.left=f.left+f.width+h.kernedWidth-h.width}return u},getHeightOfLine:function(t){if(this.__lineHeights[t])return this.__lineHeights[t];for(var e=this._textLines[t],i=this.getHeightOfChar(t,0),r=1,n=e.length;r0){var P=y+o+u;"rtl"===this.direction&&(P=this.width-P-f),c&&m&&(t.fillStyle=m,t.fillRect(P,l+C*r+s,f,this.fontSize/15)),u=d.left,f=d.width,c=g,m=v,r=n,s=a}else f+=d.kernedWidth;P=y+o+u;"rtl"===this.direction&&(P=this.width-P-f),t.fillStyle=v,g&&v&&t.fillRect(P,l+C*r+s,f-x,this.fontSize/15),_+=i}else _+=i;this._removeShadow(t)}},_getFontDeclaration:function(t,i){var r=t||this,n=this.fontFamily,o=e.Text.genericFonts.indexOf(n.toLowerCase())>-1,s=void 0===n||n.indexOf("'")>-1||n.indexOf(",")>-1||n.indexOf('"')>-1||o?r.fontFamily:'"'+r.fontFamily+'"';return[e.isLikelyNode?r.fontWeight:r.fontStyle,e.isLikelyNode?r.fontStyle:r.fontWeight,i?this.CACHE_FONT_SIZE+"px":r.fontSize+"px",s].join(" ")},render:function(t){this.visible&&(this.canvas&&this.canvas.skipOffscreen&&!this.group&&!this.isOnScreen()||(this._shouldClearDimensionCache()&&this.initDimensions(),this.callSuper("render",t)))},_splitTextIntoLines:function(t){for(var i=t.split(this._reNewline),r=new Array(i.length),n=["\n"],o=[],s=0;s-1&&(t.underline=!0),t.textDecoration.indexOf("line-through")>-1&&(t.linethrough=!0),t.textDecoration.indexOf("overline")>-1&&(t.overline=!0),delete t.textDecoration)}T.IText=T.util.createClass(T.Text,T.Observable,{type:"i-text",selectionStart:0,selectionEnd:0,selectionColor:"rgba(17,119,255,0.3)",isEditing:!1,editable:!0,editingBorderColor:"rgba(102,153,255,0.25)",cursorWidth:2,cursorColor:"",cursorDelay:1e3,cursorDuration:600,caching:!0,hiddenTextareaContainer:null,_reSpace:/\s|\n/,_currentCursorOpacity:0,_selectionDirection:null,_abortCursorAnimation:!1,__widthOfSpace:[],inCompositionMode:!1,initialize:function(t,e){this.callSuper("initialize",t,e),this.initBehavior()},setSelectionStart:function(t){t=Math.max(t,0),this._updateAndFire("selectionStart",t)},setSelectionEnd:function(t){t=Math.min(t,this.text.length),this._updateAndFire("selectionEnd",t)},_updateAndFire:function(t,e){this[t]!==e&&(this._fireSelectionChanged(),this[t]=e),this._updateTextarea()},_fireSelectionChanged:function(){this.fire("selection:changed"),this.canvas&&this.canvas.fire("text:selection:changed",{target:this})},initDimensions:function(){this.isEditing&&this.initDelayedCursor(),this.clearContextTop(),this.callSuper("initDimensions")},render:function(t){this.clearContextTop(),this.callSuper("render",t),this.cursorOffsetCache={},this.renderCursorOrSelection()},_render:function(t){this.callSuper("_render",t)},clearContextTop:function(t){if(this.isEditing&&this.canvas&&this.canvas.contextTop){var e=this.canvas.contextTop,i=this.canvas.viewportTransform;e.save(),e.transform(i[0],i[1],i[2],i[3],i[4],i[5]),this.transform(e),this._clearTextArea(e),t||e.restore()}},renderCursorOrSelection:function(){if(this.isEditing&&this.canvas&&this.canvas.contextTop){var t=this._getCursorBoundaries(),e=this.canvas.contextTop;this.clearContextTop(!0),this.selectionStart===this.selectionEnd?this.renderCursor(t,e):this.renderSelection(t,e),e.restore()}},_clearTextArea:function(t){var e=this.width+4,i=this.height+4;t.clearRect(-e/2,-i/2,e,i)},_getCursorBoundaries:function(t){void 0===t&&(t=this.selectionStart);var e=this._getLeftOffset(),i=this._getTopOffset(),r=this._getCursorBoundariesOffsets(t);return{left:e,top:i,leftOffset:r.left,topOffset:r.top}},_getCursorBoundariesOffsets:function(t){if(this.cursorOffsetCache&&"top"in this.cursorOffsetCache)return this.cursorOffsetCache;var e,i,r,n,o=0,s=0,a=this.get2DCursorLocation(t);r=a.charIndex,i=a.lineIndex;for(var h=0;h0?s:0)},"rtl"===this.direction&&(n.left*=-1),this.cursorOffsetCache=n,this.cursorOffsetCache},renderCursor:function(t,e){var i=this.get2DCursorLocation(),r=i.lineIndex,n=i.charIndex>0?i.charIndex-1:0,o=this.getValueOfPropertyAt(r,n,"fontSize"),s=this.scaleX*this.canvas.getZoom(),a=this.cursorWidth/s,h=t.topOffset,c=this.getValueOfPropertyAt(r,n,"deltaY");h+=(1-this._fontSizeFraction)*this.getHeightOfLine(r)/this.lineHeight-o*(1-this._fontSizeFraction),this.inCompositionMode&&this.renderSelection(t,e),e.fillStyle=this.cursorColor||this.getValueOfPropertyAt(r,n,"fill"),e.globalAlpha=this.__isMousedown?1:this._currentCursorOpacity,e.fillRect(t.left+t.leftOffset-a/2,h+t.top+c,a,o)},renderSelection:function(t,e){for(var i=this.inCompositionMode?this.hiddenTextarea.selectionStart:this.selectionStart,r=this.inCompositionMode?this.hiddenTextarea.selectionEnd:this.selectionEnd,n=-1!==this.textAlign.indexOf("justify"),o=this.get2DCursorLocation(i),s=this.get2DCursorLocation(r),a=o.lineIndex,h=s.lineIndex,c=o.charIndex<0?0:o.charIndex,l=s.charIndex<0?0:s.charIndex,u=a;u<=h;u++){var f,d=this._getLineLeftOffset(u)||0,g=this.getHeightOfLine(u),p=0,v=0;if(u===a&&(p=this.__charBounds[a][c].left),u>=a&&u1)&&(g/=this.lineHeight);var y=t.left+d+p,_=v-p,b=g,x=0;this.inCompositionMode?(e.fillStyle=this.compositionColor||"black",b=1,x=g):e.fillStyle=this.selectionColor,"rtl"===this.direction&&(y=this.width-y-_),e.fillRect(y,t.top+t.topOffset+x,_,b),t.topOffset+=f}},getCurrentCharFontSize:function(){var t=this._getCurrentCharIndex();return this.getValueOfPropertyAt(t.l,t.c,"fontSize")},getCurrentCharColor:function(){var t=this._getCurrentCharIndex();return this.getValueOfPropertyAt(t.l,t.c,"fill")},_getCurrentCharIndex:function(){var t=this.get2DCursorLocation(this.selectionStart,!0),e=t.charIndex>0?t.charIndex-1:0;return{l:t.lineIndex,c:e}}}),T.IText.fromObject=function(e,i){if(t(e),e.styles)for(var r in e.styles)for(var n in e.styles[r])t(e.styles[r][n]);T.Object._fromObject("IText",e,i,"text")}}(),S=T.util.object.clone,T.util.object.extend(T.IText.prototype,{initBehavior:function(){this.initAddedHandler(),this.initRemovedHandler(),this.initCursorSelectionHandlers(),this.initDoubleClickSimulation(),this.mouseMoveHandler=this.mouseMoveHandler.bind(this)},onDeselect:function(){this.isEditing&&this.exitEditing(),this.selected=!1},initAddedHandler:function(){var t=this;this.on("added",(function(){var e=t.canvas;e&&(e._hasITextHandlers||(e._hasITextHandlers=!0,t._initCanvasHandlers(e)),e._iTextInstances=e._iTextInstances||[],e._iTextInstances.push(t))}))},initRemovedHandler:function(){var t=this;this.on("removed",(function(){var e=t.canvas;e&&(e._iTextInstances=e._iTextInstances||[],T.util.removeFromArray(e._iTextInstances,t),0===e._iTextInstances.length&&(e._hasITextHandlers=!1,t._removeCanvasHandlers(e)))}))},_initCanvasHandlers:function(t){t._mouseUpITextHandler=function(){t._iTextInstances&&t._iTextInstances.forEach((function(t){t.__isMousedown=!1}))},t.on("mouse:up",t._mouseUpITextHandler)},_removeCanvasHandlers:function(t){t.off("mouse:up",t._mouseUpITextHandler)},_tick:function(){this._currentTickState=this._animateCursor(this,1,this.cursorDuration,"_onTickComplete")},_animateCursor:function(t,e,i,r){var n;return n={isAborted:!1,abort:function(){this.isAborted=!0}},t.animate("_currentCursorOpacity",e,{duration:i,onComplete:function(){n.isAborted||t[r]()},onChange:function(){t.canvas&&t.selectionStart===t.selectionEnd&&t.renderCursorOrSelection()},abort:function(){return n.isAborted}}),n},_onTickComplete:function(){var t=this;this._cursorTimeout1&&clearTimeout(this._cursorTimeout1),this._cursorTimeout1=setTimeout((function(){t._currentTickCompleteState=t._animateCursor(t,0,this.cursorDuration/2,"_tick")}),100)},initDelayedCursor:function(t){var e=this,i=t?0:this.cursorDelay;this.abortCursorAnimation(),this._currentCursorOpacity=1,this._cursorTimeout2=setTimeout((function(){e._tick()}),i)},abortCursorAnimation:function(){var t=this._currentTickState||this._currentTickCompleteState,e=this.canvas;this._currentTickState&&this._currentTickState.abort(),this._currentTickCompleteState&&this._currentTickCompleteState.abort(),clearTimeout(this._cursorTimeout1),clearTimeout(this._cursorTimeout2),this._currentCursorOpacity=0,t&&e&&e.clearContext(e.contextTop||e.contextContainer)},selectAll:function(){return this.selectionStart=0,this.selectionEnd=this._text.length,this._fireSelectionChanged(),this._updateTextarea(),this},getSelectedText:function(){return this._text.slice(this.selectionStart,this.selectionEnd).join("")},findWordBoundaryLeft:function(t){var e=0,i=t-1;if(this._reSpace.test(this._text[i]))for(;this._reSpace.test(this._text[i]);)e++,i--;for(;/\S/.test(this._text[i])&&i>-1;)e++,i--;return t-e},findWordBoundaryRight:function(t){var e=0,i=t;if(this._reSpace.test(this._text[i]))for(;this._reSpace.test(this._text[i]);)e++,i++;for(;/\S/.test(this._text[i])&&i-1;)e++,i--;return t-e},findLineBoundaryRight:function(t){for(var e=0,i=t;!/\n/.test(this._text[i])&&i0&&rthis.__selectionStartOnMouseDown?(this.selectionStart=this.__selectionStartOnMouseDown,this.selectionEnd=e):(this.selectionStart=e,this.selectionEnd=this.__selectionStartOnMouseDown),this.selectionStart===i&&this.selectionEnd===r||(this.restartCursorIfNeeded(),this._fireSelectionChanged(),this._updateTextarea(),this.renderCursorOrSelection()))}},_setEditingProps:function(){this.hoverCursor="text",this.canvas&&(this.canvas.defaultCursor=this.canvas.moveCursor="text"),this.borderColor=this.editingBorderColor,this.hasControls=this.selectable=!1,this.lockMovementX=this.lockMovementY=!0},fromStringToGraphemeSelection:function(t,e,i){var r=i.slice(0,t),n=T.util.string.graphemeSplit(r).length;if(t===e)return{selectionStart:n,selectionEnd:n};var o=i.slice(t,e);return{selectionStart:n,selectionEnd:n+T.util.string.graphemeSplit(o).length}},fromGraphemeToStringSelection:function(t,e,i){var r=i.slice(0,t).join("").length;return t===e?{selectionStart:r,selectionEnd:r}:{selectionStart:r,selectionEnd:r+i.slice(t,e).join("").length}},_updateTextarea:function(){if(this.cursorOffsetCache={},this.hiddenTextarea){if(!this.inCompositionMode){var t=this.fromGraphemeToStringSelection(this.selectionStart,this.selectionEnd,this._text);this.hiddenTextarea.selectionStart=t.selectionStart,this.hiddenTextarea.selectionEnd=t.selectionEnd}this.updateTextareaPosition()}},updateFromTextArea:function(){if(this.hiddenTextarea){this.cursorOffsetCache={},this.text=this.hiddenTextarea.value,this._shouldClearDimensionCache()&&(this.initDimensions(),this.setCoords());var t=this.fromStringToGraphemeSelection(this.hiddenTextarea.selectionStart,this.hiddenTextarea.selectionEnd,this.hiddenTextarea.value);this.selectionEnd=this.selectionStart=t.selectionEnd,this.inCompositionMode||(this.selectionStart=t.selectionStart),this.updateTextareaPosition()}},updateTextareaPosition:function(){if(this.selectionStart===this.selectionEnd){var t=this._calcTextareaPosition();this.hiddenTextarea.style.left=t.left,this.hiddenTextarea.style.top=t.top}},_calcTextareaPosition:function(){if(!this.canvas)return{x:1,y:1};var t=this.inCompositionMode?this.compositionStart:this.selectionStart,e=this._getCursorBoundaries(t),i=this.get2DCursorLocation(t),r=i.lineIndex,n=i.charIndex,o=this.getValueOfPropertyAt(r,n,"fontSize")*this.lineHeight,s=e.leftOffset,a=this.calcTransformMatrix(),h={x:e.left+s,y:e.top+e.topOffset+o},c=this.canvas.getRetinaScaling(),l=this.canvas.upperCanvasEl,u=l.width/c,f=l.height/c,d=u-o,g=f-o,p=l.clientWidth/u,v=l.clientHeight/f;return h=T.util.transformPoint(h,a),(h=T.util.transformPoint(h,this.canvas.viewportTransform)).x*=p,h.y*=v,h.x<0&&(h.x=0),h.x>d&&(h.x=d),h.y<0&&(h.y=0),h.y>g&&(h.y=g),h.x+=this.canvas._offset.left,h.y+=this.canvas._offset.top,{left:h.x+"px",top:h.y+"px",fontSize:o+"px",charHeight:o}},_saveEditingProps:function(){this._savedProps={hasControls:this.hasControls,borderColor:this.borderColor,lockMovementX:this.lockMovementX,lockMovementY:this.lockMovementY,hoverCursor:this.hoverCursor,selectable:this.selectable,defaultCursor:this.canvas&&this.canvas.defaultCursor,moveCursor:this.canvas&&this.canvas.moveCursor}},_restoreEditingProps:function(){this._savedProps&&(this.hoverCursor=this._savedProps.hoverCursor,this.hasControls=this._savedProps.hasControls,this.borderColor=this._savedProps.borderColor,this.selectable=this._savedProps.selectable,this.lockMovementX=this._savedProps.lockMovementX,this.lockMovementY=this._savedProps.lockMovementY,this.canvas&&(this.canvas.defaultCursor=this._savedProps.defaultCursor,this.canvas.moveCursor=this._savedProps.moveCursor))},exitEditing:function(){var t=this._textBeforeEdit!==this.text,e=this.hiddenTextarea;return this.selected=!1,this.isEditing=!1,this.selectionEnd=this.selectionStart,e&&(e.blur&&e.blur(),e.parentNode&&e.parentNode.removeChild(e)),this.hiddenTextarea=null,this.abortCursorAnimation(),this._restoreEditingProps(),this._currentCursorOpacity=0,this._shouldClearDimensionCache()&&(this.initDimensions(),this.setCoords()),this.fire("editing:exited"),t&&this.fire("modified"),this.canvas&&(this.canvas.off("mouse:move",this.mouseMoveHandler),this.canvas.fire("text:editing:exited",{target:this}),t&&this.canvas.fire("object:modified",{target:this})),this},_removeExtraneousStyles:function(){for(var t in this.styles)this._textLines[t]||delete this.styles[t]},removeStyleFromTo:function(t,e){var i,r,n=this.get2DCursorLocation(t,!0),o=this.get2DCursorLocation(e,!0),s=n.lineIndex,a=n.charIndex,h=o.lineIndex,c=o.charIndex;if(s!==h){if(this.styles[s])for(i=a;i=c&&(r[l-f]=r[u],delete r[u])}},shiftLineStyles:function(t,e){var i=S(this.styles);for(var r in this.styles){var n=parseInt(r,10);n>t&&(this.styles[n+e]=i[n],i[n-e]||delete this.styles[n])}},restartCursorIfNeeded:function(){this._currentTickState&&!this._currentTickState.isAborted&&this._currentTickCompleteState&&!this._currentTickCompleteState.isAborted||this.initDelayedCursor()},insertNewlineStyleObject:function(t,e,i,r){var n,o={},s=!1,a=this._unwrappedTextLines[t].length===e;for(var h in i||(i=1),this.shiftLineStyles(t,i),this.styles[t]&&(n=this.styles[t][0===e?e:e-1]),this.styles[t]){var c=parseInt(h,10);c>=e&&(s=!0,o[c-e]=this.styles[t][h],a&&0===e||delete this.styles[t][h])}var l=!1;for(s&&!a&&(this.styles[t+i]=o,l=!0),l&&i--;i>0;)r&&r[i-1]?this.styles[t+i]={0:S(r[i-1])}:n?this.styles[t+i]={0:S(n)}:delete this.styles[t+i],i--;this._forceClearCache=!0},insertCharStyleObject:function(t,e,i,r){this.styles||(this.styles={});var n=this.styles[t],o=n?S(n):{};for(var s in i||(i=1),o){var a=parseInt(s,10);a>=e&&(n[a+i]=o[a],o[a-i]||delete n[a])}if(this._forceClearCache=!0,r)for(;i--;)Object.keys(r[i]).length&&(this.styles[t]||(this.styles[t]={}),this.styles[t][e+i]=S(r[i]));else if(n)for(var h=n[e?e-1:1];h&&i--;)this.styles[t][e+i]=S(h)},insertNewStyleBlock:function(t,e,i){for(var r=this.get2DCursorLocation(e,!0),n=[0],o=0,s=0;s0&&(this.insertCharStyleObject(r.lineIndex,r.charIndex,n[0],i),i=i&&i.slice(n[0]+1)),o&&this.insertNewlineStyleObject(r.lineIndex,r.charIndex+n[0],o),s=1;s0?this.insertCharStyleObject(r.lineIndex+s,0,n[s],i):i&&(this.styles[r.lineIndex+s][0]=i[0]),i=i&&i.slice(n[s]+1);n[s]>0&&this.insertCharStyleObject(r.lineIndex+s,0,n[s],i)},setSelectionStartEndWithShift:function(t,e,i){i<=t?(e===t?this._selectionDirection="left":"right"===this._selectionDirection&&(this._selectionDirection="left",this.selectionEnd=t),this.selectionStart=i):i>t&&it?this.selectionStart=t:this.selectionStart<0&&(this.selectionStart=0),this.selectionEnd>t?this.selectionEnd=t:this.selectionEnd<0&&(this.selectionEnd=0)}}),T.util.object.extend(T.IText.prototype,{initDoubleClickSimulation:function(){this.__lastClickTime=+new Date,this.__lastLastClickTime=+new Date,this.__lastPointer={},this.on("mousedown",this.onMouseDown)},onMouseDown:function(t){if(this.canvas){this.__newClickTime=+new Date;var e=t.pointer;this.isTripleClick(e)&&(this.fire("tripleclick",t),this._stopEvent(t.e)),this.__lastLastClickTime=this.__lastClickTime,this.__lastClickTime=this.__newClickTime,this.__lastPointer=e,this.__lastIsEditing=this.isEditing,this.__lastSelected=this.selected}},isTripleClick:function(t){return this.__newClickTime-this.__lastClickTime<500&&this.__lastClickTime-this.__lastLastClickTime<500&&this.__lastPointer.x===t.x&&this.__lastPointer.y===t.y},_stopEvent:function(t){t.preventDefault&&t.preventDefault(),t.stopPropagation&&t.stopPropagation()},initCursorSelectionHandlers:function(){this.initMousedownHandler(),this.initMouseupHandler(),this.initClicks()},doubleClickHandler:function(t){this.isEditing&&this.selectWord(this.getSelectionStartFromPointer(t.e))},tripleClickHandler:function(t){this.isEditing&&this.selectLine(this.getSelectionStartFromPointer(t.e))},initClicks:function(){this.on("mousedblclick",this.doubleClickHandler),this.on("tripleclick",this.tripleClickHandler)},_mouseDownHandler:function(t){!this.canvas||!this.editable||t.e.button&&1!==t.e.button||(this.__isMousedown=!0,this.selected&&(this.inCompositionMode=!1,this.setCursorByClick(t.e)),this.isEditing&&(this.__selectionStartOnMouseDown=this.selectionStart,this.selectionStart===this.selectionEnd&&this.abortCursorAnimation(),this.renderCursorOrSelection()))},_mouseDownHandlerBefore:function(t){!this.canvas||!this.editable||t.e.button&&1!==t.e.button||(this.selected=this===this.canvas._activeObject)},initMousedownHandler:function(){this.on("mousedown",this._mouseDownHandler),this.on("mousedown:before",this._mouseDownHandlerBefore)},initMouseupHandler:function(){this.on("mouseup",this.mouseUpHandler)},mouseUpHandler:function(t){if(this.__isMousedown=!1,!(!this.editable||this.group||t.transform&&t.transform.actionPerformed||t.e.button&&1!==t.e.button)){if(this.canvas){var e=this.canvas._activeObject;if(e&&e!==this)return}this.__lastSelected&&!this.__corner?(this.selected=!1,this.__lastSelected=!1,this.enterEditing(t.e),this.selectionStart===this.selectionEnd?this.initDelayedCursor(!0):this.renderCursorOrSelection()):this.selected=!0}},setCursorByClick:function(t){var e=this.getSelectionStartFromPointer(t),i=this.selectionStart,r=this.selectionEnd;t.shiftKey?this.setSelectionStartEndWithShift(i,r,e):(this.selectionStart=e,this.selectionEnd=e),this.isEditing&&(this._fireSelectionChanged(),this._updateTextarea())},getSelectionStartFromPointer:function(t){for(var e,i=this.getLocalPointer(t),r=0,n=0,o=0,s=0,a=0,h=0,c=this._textLines.length;h0&&(s+=this._textLines[h-1].length+this.missingNewlineOffset(h-1));n=this._getLineLeftOffset(a)*this.scaleX,e=this._textLines[a],"rtl"===this.direction&&(i.x=this.width*this.scaleX-i.x+n);for(var l=0,u=e.length;lo||s<0?0:1);return this.flipX&&(a=n-a),a>this._text.length&&(a=this._text.length),a}}),T.util.object.extend(T.IText.prototype,{initHiddenTextarea:function(){this.hiddenTextarea=T.document.createElement("textarea"),this.hiddenTextarea.setAttribute("autocapitalize","off"),this.hiddenTextarea.setAttribute("autocorrect","off"),this.hiddenTextarea.setAttribute("autocomplete","off"),this.hiddenTextarea.setAttribute("spellcheck","false"),this.hiddenTextarea.setAttribute("data-fabric-hiddentextarea",""),this.hiddenTextarea.setAttribute("wrap","off");var t=this._calcTextareaPosition();this.hiddenTextarea.style.cssText="position: absolute; top: "+t.top+"; left: "+t.left+"; z-index: -999; opacity: 0; width: 1px; height: 1px; font-size: 1px; paddingーtop: "+t.fontSize+";",this.hiddenTextareaContainer?this.hiddenTextareaContainer.appendChild(this.hiddenTextarea):T.document.body.appendChild(this.hiddenTextarea),T.util.addListener(this.hiddenTextarea,"keydown",this.onKeyDown.bind(this)),T.util.addListener(this.hiddenTextarea,"keyup",this.onKeyUp.bind(this)),T.util.addListener(this.hiddenTextarea,"input",this.onInput.bind(this)),T.util.addListener(this.hiddenTextarea,"copy",this.copy.bind(this)),T.util.addListener(this.hiddenTextarea,"cut",this.copy.bind(this)),T.util.addListener(this.hiddenTextarea,"paste",this.paste.bind(this)),T.util.addListener(this.hiddenTextarea,"compositionstart",this.onCompositionStart.bind(this)),T.util.addListener(this.hiddenTextarea,"compositionupdate",this.onCompositionUpdate.bind(this)),T.util.addListener(this.hiddenTextarea,"compositionend",this.onCompositionEnd.bind(this)),!this._clickHandlerInitialized&&this.canvas&&(T.util.addListener(this.canvas.upperCanvasEl,"click",this.onClick.bind(this)),this._clickHandlerInitialized=!0)},keysMap:{9:"exitEditing",27:"exitEditing",33:"moveCursorUp",34:"moveCursorDown",35:"moveCursorRight",36:"moveCursorLeft",37:"moveCursorLeft",38:"moveCursorUp",39:"moveCursorRight",40:"moveCursorDown"},keysMapRtl:{9:"exitEditing",27:"exitEditing",33:"moveCursorUp",34:"moveCursorDown",35:"moveCursorLeft",36:"moveCursorRight",37:"moveCursorRight",38:"moveCursorUp",39:"moveCursorLeft",40:"moveCursorDown"},ctrlKeysMapUp:{67:"copy",88:"cut"},ctrlKeysMapDown:{65:"selectAll"},onClick:function(){this.hiddenTextarea&&this.hiddenTextarea.focus()},onKeyDown:function(t){if(this.isEditing){var e="rtl"===this.direction?this.keysMapRtl:this.keysMap;if(t.keyCode in e)this[e[t.keyCode]](t);else{if(!(t.keyCode in this.ctrlKeysMapDown)||!t.ctrlKey&&!t.metaKey)return;this[this.ctrlKeysMapDown[t.keyCode]](t)}t.stopImmediatePropagation(),t.preventDefault(),t.keyCode>=33&&t.keyCode<=40?(this.inCompositionMode=!1,this.clearContextTop(),this.renderCursorOrSelection()):this.canvas&&this.canvas.requestRenderAll()}},onKeyUp:function(t){!this.isEditing||this._copyDone||this.inCompositionMode?this._copyDone=!1:t.keyCode in this.ctrlKeysMapUp&&(t.ctrlKey||t.metaKey)&&(this[this.ctrlKeysMapUp[t.keyCode]](t),t.stopImmediatePropagation(),t.preventDefault(),this.canvas&&this.canvas.requestRenderAll())},onInput:function(t){var e=this.fromPaste;if(this.fromPaste=!1,t&&t.stopPropagation(),this.isEditing){var i,r,n,o,s,a=this._splitTextIntoLines(this.hiddenTextarea.value).graphemeText,h=this._text.length,c=a.length,l=c-h,u=this.selectionStart,f=this.selectionEnd,d=u!==f;if(""===this.hiddenTextarea.value)return this.styles={},this.updateFromTextArea(),this.fire("changed"),void(this.canvas&&(this.canvas.fire("text:changed",{target:this}),this.canvas.requestRenderAll()));var g=this.fromStringToGraphemeSelection(this.hiddenTextarea.selectionStart,this.hiddenTextarea.selectionEnd,this.hiddenTextarea.value),p=u>g.selectionStart;d?(i=this._text.slice(u,f),l+=f-u):c0&&(r+=(i=this.__charBounds[t][e-1]).left+i.width),r},getDownCursorOffset:function(t,e){var i=this._getSelectionForOffset(t,e),r=this.get2DCursorLocation(i),n=r.lineIndex;if(n===this._textLines.length-1||t.metaKey||34===t.keyCode)return this._text.length-i;var o=r.charIndex,s=this._getWidthBeforeCursor(n,o),a=this._getIndexOnLine(n+1,s);return this._textLines[n].slice(o).length+a+1+this.missingNewlineOffset(n)},_getSelectionForOffset:function(t,e){return t.shiftKey&&this.selectionStart!==this.selectionEnd&&e?this.selectionEnd:this.selectionStart},getUpCursorOffset:function(t,e){var i=this._getSelectionForOffset(t,e),r=this.get2DCursorLocation(i),n=r.lineIndex;if(0===n||t.metaKey||33===t.keyCode)return-i;var o=r.charIndex,s=this._getWidthBeforeCursor(n,o),a=this._getIndexOnLine(n-1,s),h=this._textLines[n].slice(0,o),c=this.missingNewlineOffset(n-1);return-this._textLines[n-1].length+a-h.length+(1-c)},_getIndexOnLine:function(t,e){for(var i,r,n=this._textLines[t],o=this._getLineLeftOffset(t),s=0,a=0,h=n.length;ae){r=!0;var c=o-i,l=o,u=Math.abs(c-e);s=Math.abs(l-e)=this._text.length&&this.selectionEnd>=this._text.length||this._moveCursorUpOrDown("Down",t)},moveCursorUp:function(t){0===this.selectionStart&&0===this.selectionEnd||this._moveCursorUpOrDown("Up",t)},_moveCursorUpOrDown:function(t,e){var i=this["get"+t+"CursorOffset"](e,"right"===this._selectionDirection);e.shiftKey?this.moveCursorWithShift(i):this.moveCursorWithoutShift(i),0!==i&&(this.setSelectionInBoundaries(),this.abortCursorAnimation(),this._currentCursorOpacity=1,this.initDelayedCursor(),this._fireSelectionChanged(),this._updateTextarea())},moveCursorWithShift:function(t){var e="left"===this._selectionDirection?this.selectionStart+t:this.selectionEnd+t;return this.setSelectionStartEndWithShift(this.selectionStart,this.selectionEnd,e),0!==t},moveCursorWithoutShift:function(t){return t<0?(this.selectionStart+=t,this.selectionEnd=this.selectionStart):(this.selectionEnd+=t,this.selectionStart=this.selectionEnd),0!==t},moveCursorLeft:function(t){0===this.selectionStart&&0===this.selectionEnd||this._moveCursorLeftOrRight("Left",t)},_move:function(t,e,i){var r;if(t.altKey)r=this["findWordBoundary"+i](this[e]);else{if(!t.metaKey&&35!==t.keyCode&&36!==t.keyCode)return this[e]+="Left"===i?-1:1,!0;r=this["findLineBoundary"+i](this[e])}if(void 0!==typeof r&&this[e]!==r)return this[e]=r,!0},_moveLeft:function(t,e){return this._move(t,e,"Left")},_moveRight:function(t,e){return this._move(t,e,"Right")},moveCursorLeftWithoutShift:function(t){var e=!0;return this._selectionDirection="left",this.selectionEnd===this.selectionStart&&0!==this.selectionStart&&(e=this._moveLeft(t,"selectionStart")),this.selectionEnd=this.selectionStart,e},moveCursorLeftWithShift:function(t){return"right"===this._selectionDirection&&this.selectionStart!==this.selectionEnd?this._moveLeft(t,"selectionEnd"):0!==this.selectionStart?(this._selectionDirection="left",this._moveLeft(t,"selectionStart")):void 0},moveCursorRight:function(t){this.selectionStart>=this._text.length&&this.selectionEnd>=this._text.length||this._moveCursorLeftOrRight("Right",t)},_moveCursorLeftOrRight:function(t,e){var i="moveCursor"+t+"With";this._currentCursorOpacity=1,e.shiftKey?i+="Shift":i+="outShift",this[i](e)&&(this.abortCursorAnimation(),this.initDelayedCursor(),this._fireSelectionChanged(),this._updateTextarea())},moveCursorRightWithShift:function(t){return"left"===this._selectionDirection&&this.selectionStart!==this.selectionEnd?this._moveRight(t,"selectionStart"):this.selectionEnd!==this._text.length?(this._selectionDirection="right",this._moveRight(t,"selectionEnd")):void 0},moveCursorRightWithoutShift:function(t){var e=!0;return this._selectionDirection="right",this.selectionStart===this.selectionEnd?(e=this._moveRight(t,"selectionStart"),this.selectionEnd=this.selectionStart):this.selectionStart=this.selectionEnd,e},removeChars:function(t,e){void 0===e&&(e=t+1),this.removeStyleFromTo(t,e),this._text.splice(t,e-t),this.text=this._text.join(""),this.set("dirty",!0),this._shouldClearDimensionCache()&&(this.initDimensions(),this.setCoords()),this._removeExtraneousStyles()},insertChars:function(t,e,i,r){void 0===r&&(r=i),r>i&&this.removeStyleFromTo(i,r);var n=T.util.string.graphemeSplit(t);this.insertNewStyleBlock(n,i,e),this._text=[].concat(this._text.slice(0,i),n,this._text.slice(r)),this.text=this._text.join(""),this.set("dirty",!0),this._shouldClearDimensionCache()&&(this.initDimensions(),this.setCoords()),this._removeExtraneousStyles()}}),function(){var t=T.util.toFixed,e=/ +/g;T.util.object.extend(T.Text.prototype,{_toSVG:function(){var t=this._getSVGLeftTopOffsets(),e=this._getSVGTextAndBg(t.textTop,t.textLeft);return this._wrapSVGTextAndBg(e)},toSVG:function(t){return this._createBaseSVGMarkup(this._toSVG(),{reviver:t,noStyle:!0,withShadow:!0})},_getSVGLeftTopOffsets:function(){return{textLeft:-this.width/2,textTop:-this.height/2,lineTop:this.getHeightOfLine(0)}},_wrapSVGTextAndBg:function(t){var e=this.getSvgTextDecoration(this);return[t.textBgRects.join(""),'\t\t",t.textSpans.join(""),"\n"]},_getSVGTextAndBg:function(t,e){var i,r=[],n=[],o=t;this._setSVGBg(n);for(var s=0,a=this._textLines.length;s",T.util.string.escapeXml(i),""].join("")},_setSVGTextLineText:function(t,e,i,r){var n,o,s,a,h,c=this.getHeightOfLine(e),l=-1!==this.textAlign.indexOf("justify"),u="",f=0,d=this._textLines[e];r+=c*(1-this._fontSizeFraction)/this.lineHeight;for(var g=0,p=d.length-1;g<=p;g++)h=g===p||this.charSpacing,u+=d[g],s=this.__charBounds[e][g],0===f?(i+=s.kernedWidth-s.width,f+=s.width):f+=s.kernedWidth,l&&!h&&this._reSpaceAndTab.test(d[g])&&(h=!0),h||(n=n||this.getCompleteStyleDeclaration(e,g),o=this.getCompleteStyleDeclaration(e,g+1),h=this._hasStyleChangedForSvg(n,o)),h&&(a=this._getStyleDeclaration(e,g)||{},t.push(this._createTextCharSpan(u,a,i,r)),u="",n=o,i+=f,f=0)},_pushTextBgRect:function(e,i,r,n,o,s){var a=T.Object.NUM_FRACTION_DIGITS;e.push("\t\t\n')},_setSVGTextLineBg:function(t,e,i,r){for(var n,o,s=this._textLines[e],a=this.getHeightOfLine(e)/this.lineHeight,h=0,c=0,l=this.getValueOfPropertyAt(e,0,"textBackgroundColor"),u=0,f=s.length;uthis.width&&this._set("width",this.dynamicMinWidth),-1!==this.textAlign.indexOf("justify")&&this.enlargeSpaces(),this.height=this.calcTextHeight(),this.saveState({propertySet:"_dimensionAffectingProps"}))},_generateStyleMap:function(t){for(var e=0,i=0,r=0,n={},o=0;o0?(i=0,r++,e++):!this.splitByGrapheme&&this._reSpaceAndTab.test(t.graphemeText[r])&&o>0&&(i++,r++),n[o]={line:e,offset:i},r+=t.graphemeLines[o].length,i+=t.graphemeLines[o].length;return n},styleHas:function(t,i){if(this._styleMap&&!this.isWrapping){var r=this._styleMap[i];r&&(i=r.line)}return e.Text.prototype.styleHas.call(this,t,i)},isEmptyStyles:function(t){if(!this.styles)return!0;var e,i,r=0,n=!1,o=this._styleMap[t],s=this._styleMap[t+1];for(var a in o&&(t=o.line,r=o.offset),s&&(n=s.line===t,e=s.offset),i=void 0===t?this.styles:{line:this.styles[t]})for(var h in i[a])if(h>=r&&(!n||hr&&!v?(a.push(h),h=[],o=d,v=!0):o+=m,v||s||h.push(f),h=h.concat(l),g=s?0:this._measureWord([f],i,u),u++,v=!1,d>p&&(p=d);return y&&a.push(h),p+n>this.dynamicMinWidth&&(this.dynamicMinWidth=p-m+n),a},isEndOfWrapping:function(t){return!this._styleMap[t+1]||this._styleMap[t+1].line!==this._styleMap[t].line},missingNewlineOffset:function(t){return this.splitByGrapheme?this.isEndOfWrapping(t)?1:0:1},_splitTextIntoLines:function(t){for(var i=e.Text.prototype._splitTextIntoLines.call(this,t),r=this._wrapText(i.lines,this.width),n=new Array(r.length),o=0;o","",e.toSVG(t.reviver),"",""];return e._objects[0].fill=i,r.join("\n")}return""},_createBaseSVGMarkup:function(t,e){var i=this.getEraser();if(i){var n=this.eraserToSVG(e);this.clipPath=null;var o=r.call(this,t,e);return this.clipPath=i,[n,o.replace(">",'mask="url(#'+i.clipPathId+')" >')].join("\n")}return r.call(this,t,e)}});var n=T.Group.prototype._restoreObjectsState,o=T.Group.prototype.toObject,s=T.Group.prototype._getBounds;T.util.object.extend(T.Group.prototype,{_getBounds:function(t,e,i){if(this.eraser)return this.width=this._objects[0].width,void(this.height=this._objects[0].height);s.call(this,t,e,i)},_addEraserPathToObjects:function(t){this._objects.forEach((function(e){T.EraserBrush.prototype._addPathToObjectEraser.call(T.EraserBrush.prototype,e,t)}))},applyEraserToObjects:function(){var t=this;if(this.getEraser()){var e=t.calcTransformMatrix();t.getEraser().clone((function(i){var r=i._objects[0].clipPath;t.clipPath=r||void 0,i.getObjects("path").forEach((function(i){var n=T.util.multiplyTransformMatrices(e,i.calcTransformMatrix());T.util.applyTransformToObject(i,n),r?r.clone((function(r){T.EraserBrush.prototype.applyClipPathToPath.call(T.EraserBrush.prototype,i,r,e),t._addEraserPathToObjects(i)})):t._addEraserPathToObjects(i)}))}))}},_restoreObjectsState:function(){return!0===this.erasable&&this.applyEraserToObjects(),n.call(this)},toObject:function(t){return o.call(this,["eraser"].concat(t))}}),T.util.object.extend(T.Canvas.prototype,{isErasing:function(){return this.isDrawingMode&&this.freeDrawingBrush&&"eraser"===this.freeDrawingBrush.type&&this.freeDrawingBrush._isErasing},renderAll:function(){if(!this.contextTopDirty||this._groupSelector||this.isDrawingMode||(this.clearContext(this.contextTop),this.contextTopDirty=!1),!this.isErasing()){this.hasLostContext&&this.renderTopLayer(this.contextTop);var t=this.contextContainer;return this.renderCanvas(t,this._chooseObjectsToRender()),this}this.freeDrawingBrush._render()}}),T.EraserBrush=T.util.createClass(T.PencilBrush,{type:"eraser",_ready:!1,_drawOverlayOnTop:!1,_isErasing:!1,initialize:function(t){this.callSuper("initialize",t),this._renderBound=this._render.bind(this),this.render=this.render.bind(this)},hideObject:function(t){t&&(t._originalOpacity=t.opacity,t.set({opacity:0}))},restoreObjectVisibility:function(t){t&&t._originalOpacity&&(t.set({opacity:t._originalOpacity}),t._originalOpacity=void 0)},_isErasable:function(t){return!1!==t.erasable},prepareCanvasBackgroundForLayer:function(t){if("overlay"!==t){var e=this.canvas.backgroundImage,i="top"===t;e&&this._isErasable(e)===!i&&this.hideObject(e)}},prepareCanvasOverlayForLayer:function(t){var e=this.canvas,i=e.overlayImage,r=!!e.overlayColor;if(e.overlayColor&&"overlay"!==t&&(this.__overlayColor=e.overlayColor,delete e.overlayColor),"bottom"===t)return this.hideObject(i),!1;var n="top"===t,o=i&&!this._isErasable(i)||r;return i&&this._isErasable(i)===!n&&this.hideObject(i),o},restoreCanvasDrawables:function(){var t=this.canvas;this.__overlayColor&&(t.overlayColor=this.__overlayColor,delete this.__overlayColor),this.restoreObjectVisibility(t.backgroundImage),this.restoreObjectVisibility(t.overlayImage)},prepareCollectionTraversal:function(t){var e=this;t.forEachObject((function(t){t.forEachObject&&"deep"===t.erasable?e.prepareCollectionTraversal(t):t.erasable&&e.hideObject(t)}))},restoreCollectionTraversal:function(t){var e=this;t.forEachObject((function(t){t.forEachObject&&"deep"===t.erasable?e.restoreCollectionTraversal(t):e.restoreObjectVisibility(t)}))},prepareCanvasObjectsForLayer:function(t){"bottom"===t&&this.prepareCollectionTraversal(this.canvas)},restoreCanvasObjectsFromLayer:function(t){"bottom"===t&&this.restoreCollectionTraversal(this.canvas)},prepareCanvasForLayer:function(t){return this.prepareCanvasBackgroundForLayer(t),this.prepareCanvasObjectsForLayer(t),this.prepareCanvasOverlayForLayer(t)},restoreCanvasFromLayer:function(t){this.restoreCanvasDrawables(),this.restoreCanvasObjectsFromLayer(t)},renderBottomLayer:function(){var t=this.canvas;this.prepareCanvasForLayer("bottom"),t.renderCanvas(t.getContext(),t.getObjects().filter((function(t){return!t.erasable||t.forEachObject}))),this.restoreCanvasFromLayer("bottom")},renderTopLayer:function(){var t=this.canvas;this._drawOverlayOnTop=this.prepareCanvasForLayer("top"),t.renderCanvas(t.contextTop,t.getObjects()),this.callSuper("_render"),this.restoreCanvasFromLayer("top")},renderOverlay:function(){this.prepareCanvasForLayer("overlay");var t=this.canvas,e=t.contextTop;t._renderOverlay(e),this.restoreCanvasFromLayer("overlay")},_saveAndTransform:function(t){this.callSuper("_saveAndTransform",t),t.globalCompositeOperation="destination-out"},needsFullRender:function(){return this.callSuper("needsFullRender")||this._drawOverlayOnTop},onMouseDown:function(t,e){this.canvas._isMainEvent(e.e)&&(this._prepareForDrawing(t),this._captureDrawingPath(t),this._isErasing=!0,this.canvas.fire("erasing:start"),this._ready=!0,this._render())},_render:function(){this._ready&&(this.isRendering=1,this.renderBottomLayer(),this.renderTopLayer(),this.renderOverlay(),this.isRendering=0)},render:function(){return!!this._isErasing&&(this.isRendering?this.isRendering=T.util.requestAnimFrame(this._renderBound):this._render(),!0)},applyClipPathToPath:function(t,e,i){var r=t.calcTransformMatrix(),n=e.calcTransformMatrix(),o=T.util.multiplyTransformMatrices(T.util.invertTransform(r),i);return T.util.applyTransformToObject(e,T.util.multiplyTransformMatrices(o,n)),t.clipPath=e,t},clonePathWithClipPath:function(t,e,i){var r=e.calcTransformMatrix(),n=e.getClipPath(),o=this;t.clone((function(t){n.clone((function(e){i(o.applyClipPathToPath(t,e,r))}))}))},_addPathToObjectEraser:function(t,e){var i,r=this;if(t.forEachObject&&"deep"===t.erasable){var n=t._objects.filter((function(t){return t.erasable}));n.length>0&&t.clipPath?this.clonePathWithClipPath(e,t,(function(t){n.forEach((function(e){r._addPathToObjectEraser(e,t)}))})):n.length>0&&n.forEach((function(t){r._addPathToObjectEraser(t,e)}))}else{if(t.getEraser())i=t.clipPath;else{var o=t._getNonTransformedDimensions(),s=new T.Rect({fill:"rgb(0,0,0)",width:o.x,height:o.y,clipPath:t.clipPath,originX:"center",originY:"center"});i=new T.Group([s],{eraser:!0})}e.clone((function(e){e.globalCompositeOperation="destination-out";var n=T.util.multiplyTransformMatrices(T.util.invertTransform(t.calcTransformMatrix()),e.calcTransformMatrix());T.util.applyTransformToObject(e,n),i.addWithUpdate(e),t.set({clipPath:i,dirty:!0}),t.fire("erasing:end",{path:e}),t.group&&Array.isArray(r.__subTargets)&&r.__subTargets.push(t)}))}},applyEraserToCanvas:function(t){var e=this.canvas,i={};return["backgroundImage","overlayImage"].forEach((function(r){var n=e[r];n&&n.erasable&&(this._addPathToObjectEraser(n,t),i[r]=n)}),this),i},_finalizeAndAddPath:function(){var t=this.canvas.contextTop,e=this.canvas;t.closePath(),this.decimate&&(this._points=this.decimatePoints(this._points,this.decimate)),e.clearContext(e.contextTop),this._isErasing=!1;var i=this._points&&this._points.length>1?this.convertPointsToSVGPath(this._points):null;if(!i||this._isEmptySVGPath(i))return e.fire("erasing:end"),void e.requestRenderAll();var r=this.createPath(i);r.setCoords(),e.fire("before:path:created",{path:r});var n=this.applyEraserToCanvas(r),o=this;this.__subTargets=[];var s=[];e.forEachObject((function(t){t.erasable&&t.intersectsWithObject(r,!0,!0)&&(o._addPathToObjectEraser(t,r),s.push(t))})),e.fire("erasing:end",{path:r,targets:s,subTargets:this.__subTargets,drawables:n}),delete this.__subTargets,e.requestRenderAll(),r.setCoords(),this._resetShadow(),e.fire("path:created",{path:r})}})}()}(tt);var rt=tt.fabric;export{tt as default,rt as fabric}; +//# sourceMappingURL=/sm/4595deba885e6b95e012cf1ed9deed8a7b30edf3ae4c7d4bd5cc11d619b75c11.map \ No newline at end of file diff --git a/ComfyUI-Easy-Use/web_version/v1/css/account.css b/ComfyUI-Easy-Use/web_version/v1/css/account.css new file mode 100644 index 0000000000000000000000000000000000000000..626aa07c42783ce5377421f3207c4db105957b4e --- /dev/null +++ b/ComfyUI-Easy-Use/web_version/v1/css/account.css @@ -0,0 +1,93 @@ +.easyuse-account{ + +} +.easyuse-account-user{ + font-size: 10px; + color:var(--descrip-text); + text-align: center; +} +.easyuse-account-user-info{ + display: flex; + justify-content: space-between; + align-items: center; + padding-bottom:10px; + cursor: pointer; +} +.easyuse-account-user-info .user{ + display: flex; + align-items: center; +} +.easyuse-account-user-info .edit{ + padding:5px 10px; + background: var(--comfy-menu-bg); + border-radius:4px; +} +.easyuse-account-user-info:hover{ + filter:brightness(110%); +} +.easyuse-account-user-info h5{ + margin:0; + font-size: 10px; + text-align: left; +} +.easyuse-account-user-info h6{ + margin:0; + font-size: 8px; + text-align: left; + font-weight: 300; +} +.easyuse-account-user-info .remark{ + margin-top: 4px; +} +.easyuse-account-user-info .avatar{ + width: 36px; + height: 36px; + background: var(--comfy-input-bg); + border-radius: 50%; + margin-right: 5px; + display: flex; + justify-content: center; + align-items: center; + font-size: 16px; + overflow: hidden; +} +.easyuse-account-user-info .avatar img{ + width: 100%; + height: 100%; +} +.easyuse-account-dialog{ + width: 600px; +} +.easyuse-account-dialog-main a, .easyuse-account-dialog-main a:visited{ + font-weight: 400; + color: var(--theme-color-light); +} +.easyuse-account-dialog-item{ + display: flex; + justify-content: flex-start; + align-items: center; + padding: 10px 0; + border-bottom: 1px solid var(--border-color); +} +.easyuse-account-dialog-item input{ + padding:5px; + margin-right:5px; +} +.easyuse-account-dialog-item input.key{ + flex:1; +} +.easyuse-account-dialog-item button{ + cursor: pointer; + margin-left:5px!important; + padding:5px!important; + font-size: 16px!important; +} +.easyuse-account-dialog-item button:hover{ + filter:brightness(120%); +} +.easyuse-account-dialog-item button.choose { + background: var(--theme-color); +} +.easyuse-account-dialog-item button.delete{ + background: var(--error-color); +} \ No newline at end of file diff --git a/ComfyUI-Easy-Use/web_version/v1/css/chooser.css b/ComfyUI-Easy-Use/web_version/v1/css/chooser.css new file mode 100644 index 0000000000000000000000000000000000000000..0c1696bf36fcbe2a5b1b0e83ea00897be97bf391 --- /dev/null +++ b/ComfyUI-Easy-Use/web_version/v1/css/chooser.css @@ -0,0 +1,35 @@ +.easyuse-chooser-dialog{ + max-width: 600px; +} +.easyuse-chooser-dialog-title{ + font-size: 18px; + font-weight: 700; + text-align: center; + color:var(--input-text); + margin:0; +} +.easyuse-chooser-dialog-images{ + margin-top:10px; + display: flex; + flex-wrap: wrap; + width: 100%; + box-sizing: border-box; +} +.easyuse-chooser-dialog-images img{ + width: 50%; + height: auto; + cursor: pointer; + box-sizing: border-box; + filter:brightness(80%); +} +.easyuse-chooser-dialog-images img:hover{ + filter:brightness(100%); +} +.easyuse-chooser-dialog-images img.selected{ + border: 4px solid var(--success-color); +} + +.easyuse-chooser-hidden{ + display: none; + height:0; +} \ No newline at end of file diff --git a/ComfyUI-Easy-Use/web_version/v1/css/contextmenu.css b/ComfyUI-Easy-Use/web_version/v1/css/contextmenu.css new file mode 100644 index 0000000000000000000000000000000000000000..9fc0e9583b19394cf73259ead2cba816eac3e351 --- /dev/null +++ b/ComfyUI-Easy-Use/web_version/v1/css/contextmenu.css @@ -0,0 +1,20 @@ +.easyuse-model{ + position:relative; +} +.easyuse-model:hover img{ + display: block; + opacity: 1; +} +.easyuse-model img{ + position: absolute; + z-index:1; + right:-155px; + top:0; + width:150px; + height:auto; + display: none; + filter:brightness(70%); + -webkit-filter: brightness(70%); + opacity: 0; + transition:all 0.5s ease-in-out; +} \ No newline at end of file diff --git a/ComfyUI-Easy-Use/web_version/v1/css/dropdown.css b/ComfyUI-Easy-Use/web_version/v1/css/dropdown.css new file mode 100644 index 0000000000000000000000000000000000000000..176ff618ff050358844309bb344511758dcae6a5 --- /dev/null +++ b/ComfyUI-Easy-Use/web_version/v1/css/dropdown.css @@ -0,0 +1,68 @@ +.easy-dropdown, .easy-nested-dropdown { + position: relative; + box-sizing: border-box; + background-color: #171717; + box-shadow: 0 4px 4px rgba(255, 255, 255, .25); + padding: 0; + margin: 0; + list-style: none; + z-index: 1000; + overflow: visible; + max-height: fit-content; + max-width: fit-content; +} + +.easy-dropdown { + position: absolute; + border-radius: 0; +} + +/* Style for final items */ +.easy-dropdown li.item, .easy-nested-dropdown li.item { + font-weight: normal; + min-width: max-content; +} + +/* Style for folders (parent items) */ +.easy-dropdown li.folder, .easy-nested-dropdown li.folder { + cursor: default; + position: relative; + border-right: 3px solid cyan; +} + +.easy-dropdown li.folder::after, .easy-nested-dropdown li.folder::after { + content: ">"; + position: absolute; + right: 2px; + font-weight: normal; +} + +.easy-dropdown li, .easy-nested-dropdown li { + padding: 4px 10px; + cursor: pointer; + font-family: system-ui; + font-size: 0.7rem; + position: relative; +} + +/* Style for nested dropdowns */ +.easy-nested-dropdown { + position: absolute; + top: 0; + left: 100%; + margin: 0; + border: none; + display: none; +} + +.easy-dropdown li.selected > .easy-nested-dropdown, +.easy-nested-dropdown li.selected > .easy-nested-dropdown { + display: block; + border: none; +} + +.easy-dropdown li.selected, +.easy-nested-dropdown li.selected { + background-color: #e5e5e5; + border: none; +} \ No newline at end of file diff --git a/ComfyUI-Easy-Use/web_version/v1/css/easy.css b/ComfyUI-Easy-Use/web_version/v1/css/easy.css new file mode 100644 index 0000000000000000000000000000000000000000..b2f835372f8daba1de442a008b4cf196cd9e2a83 --- /dev/null +++ b/ComfyUI-Easy-Use/web_version/v1/css/easy.css @@ -0,0 +1,130 @@ + +.pysssss-workflow-popup{ + min-width:220px!important; + /*right:0px!important;*/ + /*left:auto!important;*/ +} +body{ + font-family: var(--font-family)!important; + -webkit-font-smoothing: antialiased; + -moz-osx-font-smoothing: grayscale; +} +textarea{ + font-family: var(--font-family)!important; +} + +.comfy-multiline-input{ + background-color: transparent; + border:1px solid var(--border-color); + border-radius:8px; + padding: 8px; + font-size: 12px; +} +.comfy-modal { + border:1px solid var(--border-color); + box-shadow:none; + backdrop-filter: blur(8px) brightness(120%); +} +.comfy-menu{ + border-radius:16px; + box-shadow:0 0 1px var(--descrip-text); + backdrop-filter: blur(8px) brightness(120%); +} +.comfy-menu button,.comfy-modal button { + font-size: 14px; + padding:4px 0; + margin-bottom:4px; +} +.comfy-menu button.comfy-settings-btn{ + font-size: 12px; +} +.comfy-menu-btns { + margin-bottom: 4px; +} +.comfy-menu-btns button,.comfy-list-actions button{ + font-size: 10px; +} +.comfy-menu > button, +.comfy-menu-btns button, +.comfy-menu .comfy-list button, +.comfy-modal button { + border-width:1px; +} +.comfy-modal-content{ + width: 100%; +} + + +dialog{ + border:1px solid var(--border-color); + background:transparent; + backdrop-filter: blur(8px) brightness(120%); + box-shadow:none; +} +.cm-title{ + background-color:transparent!important; +} +.cm-notice-board{ + border-radius:10px!important; + border:1px solid var(--border-color)!important; +} +.cm-menu-container{ + margin-bottom:50px!important; +} +hr{ + border:1px solid var(--border-color); +} +#comfy-dev-save-api-button{ + justify-content: center; +} +#shareButton{ + background:linear-gradient(to left,var(--theme-color),var(--theme-color-light))!important; + color:white!important; +} +#queue-button{ + position:relative; + overflow:hidden; + min-height:30px; + z-index:1; +} + +#queue-button:after{ + clear: both; + content:attr(data-attr); + background:green; + color:#FFF; + width:var(--process-bar-width); + height:100%; + position:absolute; + top:0; + left:0; + z-index:0; + text-align:center; + display:flex; + justify-content:center; + align-items:center; +} + +.litegraph .litemenu-entry.has_submenu { + border-right: 2px solid var(--theme-color); +} +::-webkit-scrollbar { + width: 0em; +} +::-webkit-scrollbar-track { + background-color: transparent; +} +::-webkit-scrollbar-thumb { + background-color: transparent; + border-radius: 2px; +} +::-webkit-scrollbar-thumb:hover { + background-color: transparent; +} + +[data-theme="dark"] .workspace_manager .chakra-card{ + background-color:var(--comfy-menu-bg)!important; +} +.workspace_manager .chakra-card{ + width: 400px; +} \ No newline at end of file diff --git a/ComfyUI-Easy-Use/web_version/v1/css/groupmap.css b/ComfyUI-Easy-Use/web_version/v1/css/groupmap.css new file mode 100644 index 0000000000000000000000000000000000000000..75aa8679c80bbc80e06ee174d4d570fb9f1b6844 --- /dev/null +++ b/ComfyUI-Easy-Use/web_version/v1/css/groupmap.css @@ -0,0 +1,34 @@ +#easyuse_groups_map{ + flex-direction: column; + align-items: end; + display:flex;position: absolute; + top: 50px; left: 10px; width: 180px; + border-radius:12px; + min-height:100px; + max-height:400px; + color: var(--descrip-text); + background-color: var(--comfy-menu-bg); + padding: 10px 4px; + border: 1px solid var(--border-color); + z-index: 399; + padding-top: 0; +} +#easyuse_groups_map .icon{ + width: 12px; + height:12px; +} +#easyuse_groups_map .closeBtn{ + float: right; + color: var(--input-text); + border-radius:30px; + background-color: var(--comfy-input-bg); + border: 1px solid var(--border-color); + cursor: pointer; + aspect-ratio: 1 / 1; + display: flex; + justify-content: center; + align-items: center; +} +#easyuse_groups_map .closeBtn:hover{ + filter:brightness(120%); +} \ No newline at end of file diff --git a/ComfyUI-Easy-Use/web_version/v1/css/index.css b/ComfyUI-Easy-Use/web_version/v1/css/index.css new file mode 100644 index 0000000000000000000000000000000000000000..5c148a2153dc91441e6b9581b1d6794ea2159161 --- /dev/null +++ b/ComfyUI-Easy-Use/web_version/v1/css/index.css @@ -0,0 +1,11 @@ +@import "theme.css"; +@import "dropdown.css"; +@import "selector.css"; +@import "groupmap.css"; +@import "contextmenu.css"; +@import "modelinfo.css"; +@import "toast.css"; +@import "account.css"; +@import "chooser.css"; +@import "toolbar.css"; +@import "sliderControl.css"; \ No newline at end of file diff --git a/ComfyUI-Easy-Use/web_version/v1/css/modelinfo.css b/ComfyUI-Easy-Use/web_version/v1/css/modelinfo.css new file mode 100644 index 0000000000000000000000000000000000000000..370c21f9a1c0625ce4d88780ceb1abc9e9c1e25f --- /dev/null +++ b/ComfyUI-Easy-Use/web_version/v1/css/modelinfo.css @@ -0,0 +1,265 @@ +.easyuse-model-info { + color: white; + max-width: 90vw; + font-family: var(--font-family); +} +.easyuse-model-content { + display: flex; + flex-direction: column; + overflow: hidden; +} +.easyuse-model-header{ + margin:0 0 15px 0; +} +.easyuse-model-header-remark{ + display: flex; + align-items: center; + margin-top:5px; +} +.easyuse-model-info h2 { + text-align: left; + margin:0; +} +.easyuse-model-info h5 { + text-align: left; + margin:0 15px 0 0px; + font-weight: 400; + color:var(--descrip-text); +} +.easyuse-model-info p { + margin: 5px 0; +} +.easyuse-model-info a { + color: var(--theme-color-light); +} +.easyuse-model-info a:hover { + text-decoration: underline; +} +.easyuse-model-tags-list { + display: flex; + flex-wrap: wrap; + list-style: none; + gap: 10px; + max-height: 200px; + overflow: auto; + margin: 10px 0; + padding: 0; +} +.easyuse-model-tag { + background-color: var(--comfy-input-bg); + border: 2px solid var(--border-color); + color: var(--input-text); + display: flex; + align-items: center; + gap: 5px; + border-radius: 5px; + padding: 2px 5px; + cursor: pointer; +} +.easyuse-model-tag--selected span::before { + content: "✅"; + position: absolute; + background-color: var(--theme-color-light); + left: 0; + top: 0; + right: 0; + bottom: 0; + text-align: center; +} +.easyuse-model-tag:hover { + border: 2px solid var(--theme-color-light); +} +.easyuse-model-tag p { + margin: 0; +} +.easyuse-model-tag span { + text-align: center; + border-radius: 5px; + background-color: var(--theme-color-light); + padding: 2px; + position: relative; + min-width: 20px; + overflow: hidden; + color: #fff; +} + +.easyuse-model-metadata .comfy-modal-content { + max-width: 100%; +} +.easyuse-model-metadata label { + margin-right: 1ch; + color: #ccc; +} + +.easyuse-model-metadata span { + color: var(--theme-color-light); +} + +.easyuse-preview { + max-width:660px; + margin-right: 15px; + position: relative; +} +.easyuse-preview-group{ + position: relative; + overflow: hidden; + border-radius:.5rem; + width: 660px; +} +.easyuse-preview-list{ + display: flex; + flex-wrap: nowrap; + width: 100%; + transition: all .5s ease-in-out; +} +.easyuse-preview-list.no-transition{ + transition: none; +} +.easyuse-preview-slide{ + display: flex; + flex-basis: calc(50% - 5px); + flex-grow: 0; + flex-shrink: 0; + position: relative; + justify-content: center; + align-items: center; + padding-right:5px; + padding-left:0; +} +.easyuse-preview-slide:nth-child(even){ + padding-left:5px; + padding-right:0; +} +.easyuse-preview-slide-content{ + position: relative; + min-height:150px; + width: 100%; +} +.easyuse-preview-slide-content .save{ + position: absolute; + right: 6px; + z-index: 12; + bottom: 6px; + display: flex; + align-items: center; + height: 26px; + padding: 0 9px; + color: var(--input-text); + font-size: 12px; + line-height: 26px; + background: rgba(0, 0, 0, .5); + border-radius: 13px; + cursor: pointer; + min-width:80px; + text-align: center; +} +.easyuse-preview-slide-content .save:hover{ + filter: brightness(120%); + will-change: auto; +} + +.easyuse-preview-slide-content img { + border-radius: 14px; + object-position: center center; + max-width: 100%; + max-height:700px; + border-style: none; + vertical-align: middle; +} +.easyuse-preview button { + position: absolute; + z-index:10; + top: 50%; + display: flex; + align-items: center; + justify-content: center; + width:30px; + height:30px; + border-radius:15px; + border:1px solid rgba(66, 63, 78, .15); + background-color: rgba(66, 63, 78, .5); + color:hsla(0, 0%, 100%, .8); + transition-property: color, background-color, border-color, text-decoration-color, fill, stroke; + transition-timing-function: cubic-bezier(.4,0,.2,1); + transition-duration: .15s; + transform: translateY(-50%); +} +.easyuse-preview button.left{ + left:10px; +} +.easyuse-preview button.right{ + right:10px; +} + +.easyuse-model-detail{ + margin-top: 16px; + overflow: hidden; + border: 1px solid var(--border-color); + border-radius: 8px; + width:300px; +} +.easyuse-model-detail-head{ + height: 40px; + padding: 0 10px; + font-weight: 500; + font-size: 14px; + font-style: normal; + line-height: 40px; +} +.easyuse-model-detail-body{ + box-sizing: border-box; + font-size: 12px; +} +.easyuse-model-detail-item{ + display: flex; + justify-content: flex-start; + border-top: 1px solid var(--border-color); +} +.easyuse-model-detail-item-label{ + flex-shrink: 0; + width: 88px; + padding-top: 5px; + padding-bottom: 5px; + padding-left: 10px; + border-right: 1px solid var(--border-color); + color: var(--input-text); + font-weight: 400; +} +.easyuse-model-detail-item-value{ + display: flex; + flex-wrap: wrap; + padding: 5px 10px 5px 10px; + color: var(--input-text); +} +.easyuse-model-detail-textarea{ + border-top:1px solid var(--border-color); + padding:10px; + height:100px; + overflow-y: auto; + font-size: 12px; +} +.easyuse-model-detail-textarea textarea{ + width:100%; + height:100%; + border:0; + background-color:transparent; + color: var(--input-text); +} +.easyuse-model-detail-textarea textarea::placeholder{ + color:var(--descrip-text); +} +.easyuse-model-detail-textarea.empty{ + display: flex; + justify-content: center; + align-items: center; + color: var(--descrip-text); +} + +.easyuse-model-notes { + background-color: rgba(0, 0, 0, 0.25); + padding: 5px; + margin-top: 5px; +} +.easyuse-model-notes:empty { + display: none; +} diff --git a/ComfyUI-Easy-Use/web_version/v1/css/selector.css b/ComfyUI-Easy-Use/web_version/v1/css/selector.css new file mode 100644 index 0000000000000000000000000000000000000000..4ecce1e4f7ebca4c207bd55b87c150d2061bd9bd --- /dev/null +++ b/ComfyUI-Easy-Use/web_version/v1/css/selector.css @@ -0,0 +1,108 @@ +.easyuse-prompt-styles{ + overflow: auto; +} +.easyuse-prompt-styles .tools{ + display:flex; + justify-content:space-between; + height:30px; + padding-bottom:10px; + border-bottom:2px solid var(--border-color); +} +.easyuse-prompt-styles .tools button.delete{ + height:30px; + border-radius: 8px; + border: 2px solid var(--border-color); + font-size:11px; + background:var(--comfy-input-bg); + color:var(--error-text); + box-shadow:none; + cursor:pointer; +} +.easyuse-prompt-styles .tools button.delete:hover{ + filter: brightness(1.2); +} +.easyuse-prompt-styles .tools textarea.search{ + flex:1; + margin-left:10px; + height:20px; + line-height:20px; + border-radius: 8px; + border: 2px solid var(--border-color); + font-size:11px; + background:var(--comfy-input-bg); + color:var(--input-text); + box-shadow:none; + padding:4px 10px; + outline: none; + resize: none; + appearance:none; +} +.easyuse-prompt-styles-list{ + list-style: none; + padding: 0; + margin: 0; + min-height: 150px; + height: calc(100% - 40px); + overflow: auto; + /*display: flex;*/ + /*flex-wrap: wrap;*/ +} +.easyuse-prompt-styles-list.no-top{ + height: auto; +} + +.easyuse-prompt-styles-tag{ + display: inline-block; + vertical-align: middle; + margin-top: 8px; + margin-right: 8px; + padding:4px; + color: var(--input-text); + background-color: var(--comfy-input-bg); + border-radius: 8px; + border: 2px solid var(--border-color); + font-size:11px; + cursor:pointer; +} +.easyuse-prompt-styles-tag.hide{ + display:none; +} +.easyuse-prompt-styles-tag:hover{ + filter: brightness(1.2); +} +.easyuse-prompt-styles-tag input{ + --ring-color: transparent; + position: relative; + box-shadow: none; + border: 2px solid var(--border-color); + border-radius: 2px; + background: linear-gradient(135deg, var(--comfy-menu-bg) 0%, var(--comfy-input-bg) 60%); +} +.easyuse-prompt-styles-tag input[type=checkbox]:checked{ + border: 1px solid var(--theme-color-light); + background-color: var(--theme-color-light); + background-image: url("data:image/svg+xml,%3csvg viewBox='0 0 16 16' fill='white' xmlns='http://www.w3.org/2000/svg'%3e%3cpath d='M12.207 4.793a1 1 0 010 1.414l-5 5a1 1 0 01-1.414 0l-2-2a1 1 0 011.414-1.414L6.5 9.086l4.293-4.293a1 1 0 011.414 0z'/%3e%3c/svg%3e"); +} +.easyuse-prompt-styles-tag input[type=checkbox]{ + color-adjust: exact; + display: inline-block; + flex-shrink: 0; + vertical-align: middle; + appearance: none; + border: 2px solid var(--border-color); + background-origin: border-box; + padding: 0; + width: 1rem; + height: 1rem; + border-radius:4px; + color:var(--theme-color-light); + user-select: none; +} +.easyuse-prompt-styles-tag span{ + margin:0 4px; + vertical-align: middle; +} +#show_image_id{ + width:128px; + height:128px; +} \ No newline at end of file diff --git a/ComfyUI-Easy-Use/web_version/v1/css/sliderControl.css b/ComfyUI-Easy-Use/web_version/v1/css/sliderControl.css new file mode 100644 index 0000000000000000000000000000000000000000..42e749b45422289c02be96ab3f17ceb33ca9a9cd --- /dev/null +++ b/ComfyUI-Easy-Use/web_version/v1/css/sliderControl.css @@ -0,0 +1,66 @@ +.easyuse-slider{ + width:100%; + height:100%; + display: flex; + flex-direction: row; + justify-content: space-between; + position: relative; +} +.easyuse-slider-item{ + height: inherit; + min-width: 25px; + justify-content: center; + display: flex; + flex-direction: column; + align-items: center; +} +.easyuse-slider-item.positive .easyuse-slider-item-label{ + color: var(--success-color); +} +.easyuse-slider-item.negative .easyuse-slider-item-label{ + color: var(--error-color); +} +.easyuse-slider-item-input{ + height:15px; + font-size: 10px; + color: var(--input-text); +} +.easyuse-slider-item-label{ + height:15px; + border: none; + color: var(--descrip-text); + font-size: 8px; +} +.easyuse-slider-item-scroll { + width: 5px; + height: calc(100% - 30px); + background: var(--comfy-input-bg); + border-radius: 10px; + position: relative; +} +.easyuse-slider-item-bar{ + width: 10px; + height: 10px; + background: linear-gradient(to bottom, var(--input-text), var(--descrip-text)); + border-radius:100%; + box-shadow: 0 2px 10px var(--bg-color); + position: absolute; + top: 0; + left:-2.5px; + cursor: pointer; + z-index:1; +} +.easyuse-slider-item-area{ + width: 100%; + border-radius:20px; + position: absolute; + bottom: 0; + background: var(--input-text); + z-index:0; +} +.easyuse-slider-item.positive .easyuse-slider-item-area{ + background: var(--success-color); +} +.easyuse-slider-item.negative .easyuse-slider-item-area{ + background: var(--error-color); +} diff --git a/ComfyUI-Easy-Use/web_version/v1/css/theme.css b/ComfyUI-Easy-Use/web_version/v1/css/theme.css new file mode 100644 index 0000000000000000000000000000000000000000..d2754c8f1ba54e020d971ffb46cbcab3ec537f36 --- /dev/null +++ b/ComfyUI-Easy-Use/web_version/v1/css/theme.css @@ -0,0 +1,10 @@ +:root { + /*--theme-color:#3f3eed;*/ + /*--theme-color-light: #008ecb;*/ + --theme-color:#236692; + --theme-color-light: #3485bb; + --success-color: #52c41a; + --error-color: #ff4d4f; + --warning-color: #faad14; + --font-family: Inter, -apple-system, BlinkMacSystemFont, Helvetica Neue, sans-serif; +} diff --git a/ComfyUI-Easy-Use/web_version/v1/css/toast.css b/ComfyUI-Easy-Use/web_version/v1/css/toast.css new file mode 100644 index 0000000000000000000000000000000000000000..b0166fb4d2c7e5c54773128be7d200260b1cb89a --- /dev/null +++ b/ComfyUI-Easy-Use/web_version/v1/css/toast.css @@ -0,0 +1,110 @@ +.easyuse-toast-container{ + position: fixed; + z-index: 99999; + top: 0; + left: 0; + width: 100%; + height: 0; + display: flex; + flex-direction: column; + align-items: center; + justify-content: start; + padding:10px 0; +} +.easyuse-toast-container > div { + position: relative; + height: fit-content; + padding: 4px; + margin-top: -100px; /* re-set by JS */ + opacity: 0; + transition: all 0.33s ease-in-out; + z-index: 3; +} + +.easyuse-toast-container > div:last-child { + z-index: 2; +} + +.easyuse-toast-container > div:not(.-show) { + z-index: 1; +} + +.easyuse-toast-container > div.-show { + opacity: 1; + margin-top: 0px !important; +} + +.easyuse-toast-container > div.-show { + opacity: 1; + transform: translateY(0%); +} + +.easyuse-toast-container > div > div { + position: relative; + background: var(--comfy-menu-bg); + color: var(--input-text); + display: flex; + flex-direction: row; + align-items: center; + justify-content: center; + height: fit-content; + box-shadow: 0 0 10px rgba(0, 0, 0, 0.88); + padding: 9px 12px; + border-radius: 8px; + font-family: Arial, sans-serif; + font-size: 14px; + pointer-events: all; +} + +.easyuse-toast-container > div > div > span { + display: flex; + flex-direction: row; + align-items: center; + justify-content: center; +} + +.easyuse-toast-container > div > div > span svg { + width: 16px; + height: auto; + margin-right: 8px; +} + +.easyuse-toast-container > div > div > span svg[data-icon=info-circle]{ + fill: var(--theme-color-light); +} +.easyuse-toast-container > div > div > span svg[data-icon=check-circle]{ + fill: var(--success-color); +} +.easyuse-toast-container > div > div > span svg[data-icon=close-circle]{ + fill: var(--error-color); +} +.easyuse-toast-container > div > div > span svg[data-icon=exclamation-circle]{ + fill: var(--warning-color); +} +/*rotate animation*/ +@keyframes rotate { + 0% { + transform: rotate(0deg); + } + 100% { + transform: rotate(360deg); + } +} +.easyuse-toast-container > div > div > span svg[data-icon=loading]{ + fill: var(--theme-color); + animation: rotate 1s linear infinite; +} + +.easyuse-toast-container a { + cursor: pointer; + text-decoration: underline; + color: var(--theme-color-light); + margin-left: 4px; + display: inline-block; + line-height: 1; +} + +.easyuse-toast-container a:hover { + color: var(--theme-color-light); + text-decoration: none; +} \ No newline at end of file diff --git a/ComfyUI-Easy-Use/web_version/v1/css/toolbar.css b/ComfyUI-Easy-Use/web_version/v1/css/toolbar.css new file mode 100644 index 0000000000000000000000000000000000000000..89a72439701e8a01149ab87895a0605858ef851d --- /dev/null +++ b/ComfyUI-Easy-Use/web_version/v1/css/toolbar.css @@ -0,0 +1,230 @@ +.easyuse-toolbar{ + background: rgba(15,15,15,.5); + backdrop-filter: blur(4px) brightness(120%); + border-radius:0 12px 12px 0; + min-width:50px; + height:24px; + position: fixed; + bottom:85px; + left:0px; + display: flex; + align-items: center; + z-index:10000; +} +.easyuse-toolbar.disable-render-info{ + bottom: 55px; +} +.easyuse-toolbar-item{ + border-radius:20px; + height: 20px; + width:20px; + cursor: pointer; + display: flex; + justify-content: center; + align-items: center; + transition: all 0.3s ease-in-out; + margin-left:2.5px; +} +.easyuse-toolbar-icon{ + width: 14px; + height: 14px; + display: flex; + justify-content: center; + align-items: center; + font-size: 12px; + color:white; + transition: all 0.3s ease-in-out; +} +.easyuse-toolbar-icon svg{ + width: 14px; + height: 14px; +} +.easyuse-toolbar-tips{ + visibility: hidden; + opacity: 0; + position: absolute; + top: -25px; + left: 0; + color: var(--descrip-text); + padding: 2px 5px; + border-radius: 5px; + font-size: 11px; + min-width:100px; + transition: all 0.3s ease-in-out; +} +.easyuse-toolbar-item:hover{ + background:rgba(12,12,12,1); +} +.easyuse-toolbar-item:hover .easyuse-toolbar-tips{ + opacity: 1; + visibility: visible; +} +.easyuse-toolbar-item:hover .easyuse-toolbar-icon.group{ + color:var(--warning-color); +} +.easyuse-toolbar-item:hover .easyuse-toolbar-icon.rocket{ + color:var(--theme-color-light); +} +.easyuse-toolbar-item:hover .easyuse-toolbar-icon.question{ + color:var(--success-color); +} + + +.easyuse-guide-dialog{ + max-width: 300px; + font-family: var(--font-family); + position: absolute; + z-index:100; + left:0; + bottom:140px; + background: rgba(25,25,25,.85); + backdrop-filter: blur(8px) brightness(120%); + border-radius:0 12px 12px 0; + padding:10px; + transition: .5s all ease-in-out; + visibility: visible; + opacity: 1; + transform: translateX(0%); +} +.easyuse-guide-dialog.disable-render-info{ + bottom:110px; +} +.easyuse-guide-dialog-top{ + display: flex; + justify-content: space-between; + align-items: center; +} +.easyuse-guide-dialog-top .icon{ + width: 12px; + height:12px; +} +.easyuse-guide-dialog.hidden{ + opacity: 0; + transform: translateX(-50%); + visibility: hidden; +} +.easyuse-guide-dialog .closeBtn{ + float: right; + color: var(--input-text); + border-radius:30px; + background-color: var(--comfy-input-bg); + border: 1px solid var(--border-color); + cursor: pointer; + aspect-ratio: 1 / 1; + display: flex; + justify-content: center; + align-items: center; +} +.easyuse-guide-dialog .closeBtn:hover{ + filter:brightness(120%); +} +.easyuse-guide-dialog-title{ + color:var(--input-text); + font-size: 16px; + font-weight: bold; + margin-bottom: 5px; +} +.easyuse-guide-dialog-remark{ + color: var(--input-text); + font-size: 12px; + margin-top: 5px; +} +.easyuse-guide-dialog-content{ + max-height: 600px; + overflow: auto; +} +.easyuse-guide-dialog a, .easyuse-guide-dialog a:visited{ + color: var(--theme-color-light); + cursor: pointer; +} +.easyuse-guide-dialog-note{ + margin-top: 20px; + color:white; +} +.easyuse-guide-dialog p{ + margin:4px 0; + font-size: 12px; + font-weight: 300; +} +.markdown-body h1, .markdown-body h2, .markdown-body h3, .markdown-body h4, .markdown-body h5, .markdown-body h6 { + margin-top: 12px; + margin-bottom: 8px; + font-weight: 600; + line-height: 1.25; + padding-bottom: 5px; + border-bottom: 1px solid var(--border-color); + color: var(--input-text); +} +.markdown-body h1{ + font-size: 18px; +} +.markdown-body h2{ + font-size: 16px; +} +.markdown-body h3{ + font-size: 14px; +} +.markdown-body h4{ + font-size: 13px; +} +.markdown-body table { + display: block; + /*width: 100%;*/ + /*width: max-content;*/ + max-width: 300px; + overflow: auto; + color:var(--input-text); + box-sizing: border-box; + border: 1px solid var(--border-color); + text-align: left; + width: 100%; +} +.markdown-body table th, .markdown-body table td { + padding: 6px 13px; + font-size: 12px; + margin:0; + border-right: 1px solid var(--border-color); + border-bottom: 1px solid var(--border-color); +} +.markdown-body table td { + font-size: 12px; +} +.markdown-body table th:last-child, .markdown-body table td:last-child{ + border-right: none; +} +.markdown-body table tr:last-child td{ + border-bottom: none; +} +.markdown-body table th{ + font-weight: bold; + width: auto; + min-width: 70px; +} +.markdown-body table th:last-child{ + width:100%; +} +.markdown-body .warning{ + color:var(--warning-color) +} +.markdown-body .error{ + color:var(--error-color) +} +.markdown-body .success{ + color:var(--success-color) +} +.markdown-body .link{ + color:var(--theme-color-light) +} + +#comfyui-menu-monitor{ + width:120px; +} +#comfyui-menu-monitor #crystools-monitor-container{ + margin:0 auto!important; +} +#comfyui-menu-monitor #crystools-monitor-container > div{ + margin:2px 0!important; +} +#comfyui-menu-monitor #crystools-monitor-container > div > div > div{ + padding:0 4px!important; +} \ No newline at end of file diff --git a/ComfyUI-Easy-Use/web_version/v1/js/bookmark.js b/ComfyUI-Easy-Use/web_version/v1/js/bookmark.js new file mode 100644 index 0000000000000000000000000000000000000000..554b484fe5d98712e3a7135ea2db52c2e0fd71be --- /dev/null +++ b/ComfyUI-Easy-Use/web_version/v1/js/bookmark.js @@ -0,0 +1,101 @@ +import { app } from "../../../scripts/app.js"; + + +app.registerExtension({ + name: "easy bookmark", + registerCustomNodes() { + class Bookmark { + type = 'easy bookmark' + title = "🔖"; + + slot_start_y = -20; + + ___collapsed_width = 0; + + get _collapsed_width() { + return this.___collapsed_width; + } + + set _collapsed_width(width){ + const canvas = app.canvas ; + const ctx = canvas.canvas.getContext('2d'); + if(ctx){ + const oldFont = ctx.font; + ctx.font = canvas.title_text_font; + this.___collapsed_width = 40 + ctx.measureText(this.title).width; + ctx.font = oldFont; + } + } + + isVirtualNode = true; + serialize_widgets = true; + keypressBound = null; + + constructor() { + + this.addWidget('text', 'shortcut_key', '1', (value) => { + value = value.trim()[0] || '1'; + if(value !== ''){ + this.title = "🔖 " + value; + } + },{ + y: 8, + }); + this.addWidget('number', 'zoom', 1, (value) => {}, { + y: 8 + LiteGraph.NODE_WIDGET_HEIGHT + 4, + max: 2, + min: 0.5, + precision: 2, + }); + this.keypressBound = this.onKeypress.bind(this); + } + + onAdded(){ + setTimeout(_=>{ + const value = this.widgets[0].value + if(value){ + this.title = "🔖 " + value; + } + },1) + window.addEventListener("keydown", this.keypressBound); + } + + onRemoved() { + window.removeEventListener("keydown", this.keypressBound); + } + + onKeypress(event){ + const target = event.target; + if (['input','textarea'].includes(target.localName)) { + return; + } + if (this.widgets[0] && event.key.toLocaleLowerCase() === this.widgets[0].value.toLocaleLowerCase()) { + this.canvasToBookmark(); + } + } + + canvasToBookmark() { + const canvas = app.canvas; + // ComfyUI seemed to break us again, but couldn't repro. No reason to not check, I guess. + // https://github.com/rgthree/rgthree-comfy/issues/71 + if (canvas?.ds?.offset) { + canvas.ds.offset[0] = -this.pos[0] + 16; + canvas.ds.offset[1] = -this.pos[1] + 40; + } + if (canvas?.ds?.scale != null) { + canvas.ds.scale = Number(this.widgets[1].value || 1); + } + canvas.setDirty(true, true); + } + } + + LiteGraph.registerNodeType( + "easy bookmark", + Object.assign(Bookmark,{ + title: "Bookmark 🔖", + }) + ); + + Bookmark.category = "EasyUse/Util" + } +}) \ No newline at end of file diff --git a/ComfyUI-Easy-Use/web_version/v1/js/common/dropdown.js b/ComfyUI-Easy-Use/web_version/v1/js/common/dropdown.js new file mode 100644 index 0000000000000000000000000000000000000000..f1fd485f842be4c9beb6dbad823f0741cfb2725d --- /dev/null +++ b/ComfyUI-Easy-Use/web_version/v1/js/common/dropdown.js @@ -0,0 +1,218 @@ +let activeDropdown = null; + +export function removeDropdown() { + if (activeDropdown) { + activeDropdown.removeEventListeners(); + activeDropdown.dropdown.remove(); + activeDropdown = null; + } +} +export function createDropdown(inputEl, suggestions, onSelect, isDict = false) { + removeDropdown(); + new Dropdown(inputEl, suggestions, onSelect, isDict); +} + +class Dropdown { + constructor(inputEl, suggestions, onSelect, isDict = false) { + this.dropdown = document.createElement('ul'); + this.dropdown.setAttribute('role', 'listbox'); + this.dropdown.classList.add('easy-dropdown'); + this.selectedIndex = -1; + this.inputEl = inputEl; + this.suggestions = suggestions; + this.onSelect = onSelect; + this.isDict = isDict; + + this.focusedDropdown = this.dropdown; + + this.buildDropdown(); + + this.onKeyDownBound = this.onKeyDown.bind(this); + this.onWheelBound = this.onWheel.bind(this); + this.onClickBound = this.onClick.bind(this); + + this.addEventListeners(); + } + + buildDropdown() { + if (this.isDict) { + this.buildNestedDropdown(this.suggestions, this.dropdown); + } else { + this.suggestions.forEach((suggestion, index) => { + this.addListItem(suggestion, index, this.dropdown); + }); + } + + const inputRect = this.inputEl.getBoundingClientRect(); + this.dropdown.style.top = (inputRect.top + inputRect.height - 10) + 'px'; + this.dropdown.style.left = inputRect.left + 'px'; + + document.body.appendChild(this.dropdown); + activeDropdown = this; + } + + buildNestedDropdown(dictionary, parentElement) { + let index = 0; + Object.keys(dictionary).forEach((key) => { + const item = dictionary[key]; + if (typeof item === "object" && item !== null) { + const nestedDropdown = document.createElement('ul'); + nestedDropdown.setAttribute('role', 'listbox'); + nestedDropdown.classList.add('easy-nested-dropdown'); + const parentListItem = document.createElement('li'); + parentListItem.classList.add('folder'); + parentListItem.textContent = key; + parentListItem.appendChild(nestedDropdown); + parentListItem.addEventListener('mouseover', this.onMouseOver.bind(this, index, parentElement)); + parentElement.appendChild(parentListItem); + this.buildNestedDropdown(item, nestedDropdown); + index = index + 1; + } else { + const listItem = document.createElement('li'); + listItem.classList.add('item'); + listItem.setAttribute('role', 'option'); + listItem.textContent = key; + listItem.addEventListener('mouseover', this.onMouseOver.bind(this, index, parentElement)); + listItem.addEventListener('mousedown', this.onMouseDown.bind(this, key)); + parentElement.appendChild(listItem); + index = index + 1; + } + }); + } + + addListItem(item, index, parentElement) { + const listItem = document.createElement('li'); + listItem.setAttribute('role', 'option'); + listItem.textContent = item; + listItem.addEventListener('mouseover', this.onMouseOver.bind(this, index)); + listItem.addEventListener('mousedown', this.onMouseDown.bind(this, item)); + parentElement.appendChild(listItem); + } + + addEventListeners() { + document.addEventListener('keydown', this.onKeyDownBound); + this.dropdown.addEventListener('wheel', this.onWheelBound); + document.addEventListener('click', this.onClickBound); + } + + removeEventListeners() { + document.removeEventListener('keydown', this.onKeyDownBound); + this.dropdown.removeEventListener('wheel', this.onWheelBound); + document.removeEventListener('click', this.onClickBound); + } + + onMouseOver(index, parentElement) { + if (parentElement) { + this.focusedDropdown = parentElement; + } + this.selectedIndex = index; + this.updateSelection(); + } + + onMouseOut() { + this.selectedIndex = -1; + this.updateSelection(); + } + + onMouseDown(suggestion, event) { + event.preventDefault(); + this.onSelect(suggestion); + this.dropdown.remove(); + this.removeEventListeners(); + } + + onKeyDown(event) { + const enterKeyCode = 13; + const escKeyCode = 27; + const arrowUpKeyCode = 38; + const arrowDownKeyCode = 40; + const arrowRightKeyCode = 39; + const arrowLeftKeyCode = 37; + const tabKeyCode = 9; + + const items = Array.from(this.focusedDropdown.children); + const selectedItem = items[this.selectedIndex]; + + if (activeDropdown) { + if (event.keyCode === arrowUpKeyCode) { + event.preventDefault(); + this.selectedIndex = Math.max(0, this.selectedIndex - 1); + this.updateSelection(); + } + + else if (event.keyCode === arrowDownKeyCode) { + event.preventDefault(); + this.selectedIndex = Math.min(items.length - 1, this.selectedIndex + 1); + this.updateSelection(); + } + + else if (event.keyCode === arrowRightKeyCode) { + event.preventDefault(); + if (selectedItem && selectedItem.classList.contains('folder')) { + const nestedDropdown = selectedItem.querySelector('.easy-nested-dropdown'); + if (nestedDropdown) { + this.focusedDropdown = nestedDropdown; + this.selectedIndex = 0; + this.updateSelection(); + } + } + } + + else if (event.keyCode === arrowLeftKeyCode && this.focusedDropdown !== this.dropdown) { + const parentDropdown = this.focusedDropdown.closest('.easy-dropdown, .easy-nested-dropdown').parentNode.closest('.easy-dropdown, .easy-nested-dropdown'); + if (parentDropdown) { + this.focusedDropdown = parentDropdown; + this.selectedIndex = Array.from(parentDropdown.children).indexOf(this.focusedDropdown.parentNode); + this.updateSelection(); + } + } + + else if ((event.keyCode === enterKeyCode || event.keyCode === tabKeyCode) && this.selectedIndex >= 0) { + event.preventDefault(); + if (selectedItem.classList.contains('item')) { + this.onSelect(items[this.selectedIndex].textContent); + this.dropdown.remove(); + this.removeEventListeners(); + } + + const nestedDropdown = selectedItem.querySelector('.easy-nested-dropdown'); + if (nestedDropdown) { + this.focusedDropdown = nestedDropdown; + this.selectedIndex = 0; + this.updateSelection(); + } + } + + else if (event.keyCode === escKeyCode) { + this.dropdown.remove(); + this.removeEventListeners(); + } + } + } + + onWheel(event) { + const top = parseInt(this.dropdown.style.top); + if (localStorage.getItem("Comfy.Settings.Comfy.InvertMenuScrolling")) { + this.dropdown.style.top = (top + (event.deltaY < 0 ? 10 : -10)) + "px"; + } else { + this.dropdown.style.top = (top + (event.deltaY < 0 ? -10 : 10)) + "px"; + } + } + + onClick(event) { + if (!this.dropdown.contains(event.target) && event.target !== this.inputEl) { + this.dropdown.remove(); + this.removeEventListeners(); + } + } + + updateSelection() { + Array.from(this.focusedDropdown.children).forEach((li, index) => { + if (index === this.selectedIndex) { + li.classList.add('selected'); + } else { + li.classList.remove('selected'); + } + }); + } +} \ No newline at end of file diff --git a/ComfyUI-Easy-Use/web_version/v1/js/common/i18n.js b/ComfyUI-Easy-Use/web_version/v1/js/common/i18n.js new file mode 100644 index 0000000000000000000000000000000000000000..643a11016b6e884ee5961c94a727445f1170cef0 --- /dev/null +++ b/ComfyUI-Easy-Use/web_version/v1/js/common/i18n.js @@ -0,0 +1,101 @@ +import {getLocale} from './utils.js' +const locale = getLocale() + +const zhCN = { + "Workflow created by": "工作流创建者", + "Watch more video content": "观看更多视频内容", + "Workflow Guide":"工作流指南", + // ExtraMenu + "💎 View Checkpoint Info...": "💎 查看 Checkpoint 信息...", + "💎 View Lora Info...": "💎 查看 Lora 信息...", + "🔃 Reload Node": "🔃 刷新节点", + // ModelInfo + "Updated At:": "最近更新:", + "Created At:": "首次发布:", + "✏️ Edit": "✏️ 编辑", + "💾 Save": "💾 保存", + "No notes": "当前还没有备注内容", + "Saving Notes...": "正在保存备注...", + "Type your notes here":"在这里输入备注内容", + "ModelName":"模型名称", + "Models Required":"所需模型", + "Download Model": "下载模型", + "Source Url": "模型源地址", + "Notes": "备注", + "Type": "类型", + "Trained Words": "训练词", + "BaseModel": "基础算法", + "Details": "详情", + "Description": "描述", + "Download": "下载量", + "Source": "来源", + "Saving Preview...": "正在保存预览图...", + "Saving Succeed":"保存成功", + "Clean SuccessFully":"清理成功", + "Clean Failed": "清理失败", + "Saving Failed":"保存失败", + "No COMBO link": "沒有找到COMBO连接", + "Reboot ComfyUI":"重启ComfyUI", + "Are you sure you'd like to reboot the server?": "是否要重启ComfyUI?", + // GroupMap + "Groups Map": "管理组", + "Cleanup Of GPU Usage": "清理GPU占用", + "Please stop all running tasks before cleaning GPU": "请在清理GPU之前停止所有运行中的任务", + "Always": "启用中", + "Bypass": "已忽略", + "Never": "已停用", + "Auto Sorting": "自动排序", + "Toggle `Show/Hide` can set mode of group, LongPress can set group nodes to never": "点击`启用中/已忽略`可设置组模式, 长按可停用该组节点", + // Quick + "Enable ALT+1~9 to paste nodes from nodes template (ComfyUI-Easy-Use)": "启用ALT1~9从节点模板粘贴到工作流 (ComfyUI-Easy-Use)", + "Enable process bar in queue button (ComfyUI-Easy-Use)": "启用提示词队列进度显示条 (ComfyUI-Easy-Use)", + "Enable ContextMenu Auto Nest Subdirectories (ComfyUI-Easy-Use)": "启用上下文菜单自动嵌套子目录 (ComfyUI-Easy-Use)", + "Enable tool bar fixed on the left-bottom (ComfyUI-Easy-Use)": "启用工具栏固定在左下角 (ComfyUI-Easy-Use)", + "Too many thumbnails, have closed the display": "模型缩略图太多啦,为您关闭了显示", + // selector + "Empty All": "清空所有", + "🔎 Type here to search styles ...": "🔎 在此处输入以搜索样式 ...", + // account + "Loading UserInfo...": "正在获取用户信息...", + "Please set the APIKEY first": "请先设置APIKEY", + "Setting APIKEY": "设置APIKEY", + "Save Account Info": "保存账号信息", + "Choose": "选择", + "Delete": "删除", + "Edit": "编辑", + "At least one account is required": "删除失败: 至少需要一个账户", + "APIKEY is not Empty": "APIKEY 不能为空", + "Add Account": "添加账号", + "Getting Your APIKEY": "获取您的APIKEY", + // choosers + "Choose Selected Images": "选择选中的图片", + "Choose images to continue": "选择图片以继续", + // seg + "Background": "背景", + "Hat": "帽子", + "Hair": "头发", + "Body": "身体", + "Face": "脸部", + "Clothes": "衣服", + "Others": "其他", + "Glove": "手套", + "Sunglasses": "太阳镜", + "Upper-clothes": "上衣", + "Dress": "连衣裙", + "Coat": "外套", + "Socks": "袜子", + "Pants": "裤子", + "Jumpsuits": "连体衣", + "Scarf": "围巾", + "Skirt": "裙子", + "Left-arm": "左臂", + "Right-arm": "右臂", + "Left-leg": "左腿", + "Right-leg": "右腿", + "Left-shoe": "左鞋", + "Right-shoe": "右鞋", +} +export const $t = (key) => { + const cn = zhCN[key] + return locale === 'zh-CN' && cn ? cn : key +} \ No newline at end of file diff --git a/ComfyUI-Easy-Use/web_version/v1/js/common/icon.js b/ComfyUI-Easy-Use/web_version/v1/js/common/icon.js new file mode 100644 index 0000000000000000000000000000000000000000..e71a0a34ff16dcb668d164aeff2ede0abe87b9a0 --- /dev/null +++ b/ComfyUI-Easy-Use/web_version/v1/js/common/icon.js @@ -0,0 +1,25 @@ +export const logoIcon = ` + + + + + + + + + + + + + + + + + +` + +export const quesitonIcon = `` +export const rocketIcon = `` +export const groupIcon = `` +export const rebootIcon = `` +export const closeIcon = `` \ No newline at end of file diff --git a/ComfyUI-Easy-Use/web_version/v1/js/common/model.js b/ComfyUI-Easy-Use/web_version/v1/js/common/model.js new file mode 100644 index 0000000000000000000000000000000000000000..b94928835526d17cf076f1612d4a8148ecec02af --- /dev/null +++ b/ComfyUI-Easy-Use/web_version/v1/js/common/model.js @@ -0,0 +1,683 @@ +import { $el, ComfyDialog } from "../../../../scripts/ui.js"; +import { api } from "../../../../scripts/api.js"; +import {formatTime} from './utils.js'; +import {$t} from "./i18n.js"; +import {toast} from "./toast.js"; + +class MetadataDialog extends ComfyDialog { + constructor() { + super(); + this.element.classList.add("easyuse-model-metadata"); + } + show(metadata) { + super.show( + $el( + "div", + Object.keys(metadata).map((k) => + $el("div", [$el("label", { textContent: k }), $el("span", { textContent: metadata[k] })]) + ) + ) + ); + } +} + +export class ModelInfoDialog extends ComfyDialog { + constructor(name) { + super(); + this.name = name; + this.element.classList.add("easyuse-model-info"); + } + + get customNotes() { + return this.metadata["easyuse.notes"]; + } + + set customNotes(v) { + this.metadata["easyuse.notes"] = v; + } + + get hash() { + return this.metadata["easyuse.sha256"]; + } + + async show(type, value) { + this.type = type; + + const req = api.fetchApi("/easyuse/metadata/" + encodeURIComponent(`${type}/${value}`)); + this.info = $el("div", { style: { flex: "auto" } }); + // this.img = $el("img", { style: { display: "none" } }); + this.imgCurrent = 0 + this.imgList = $el("div.easyuse-preview-list",{ + style: { display: "none" } + }) + this.imgWrapper = $el("div.easyuse-preview", [ + $el("div.easyuse-preview-group",[ + this.imgList + ]), + ]); + this.main = $el("main", { style: { display: "flex" } }, [this.imgWrapper, this.info]); + this.content = $el("div.easyuse-model-content", [ + $el("div.easyuse-model-header",[$el("h2", { textContent: this.name })]) + , this.main]); + + const loading = $el("div", { textContent: "ℹ️ Loading...", parent: this.content }); + + super.show(this.content); + + this.metadata = await (await req).json(); + this.viewMetadata.style.cursor = this.viewMetadata.style.opacity = ""; + this.viewMetadata.removeAttribute("disabled"); + + loading.remove(); + this.addInfo(); + } + + createButtons() { + const btns = super.createButtons(); + this.viewMetadata = $el("button", { + type: "button", + textContent: "View raw metadata", + disabled: "disabled", + style: { + opacity: 0.5, + cursor: "not-allowed", + }, + onclick: (e) => { + if (this.metadata) { + new MetadataDialog().show(this.metadata); + } + }, + }); + + btns.unshift(this.viewMetadata); + return btns; + } + + parseNote() { + if (!this.customNotes) return []; + + let notes = []; + // Extract links from notes + const r = new RegExp("(\\bhttps?:\\/\\/[^\\s]+)", "g"); + let end = 0; + let m; + do { + m = r.exec(this.customNotes); + let pos; + let fin = 0; + if (m) { + pos = m.index; + fin = m.index + m[0].length; + } else { + pos = this.customNotes.length; + } + + let pre = this.customNotes.substring(end, pos); + if (pre) { + pre = pre.replaceAll("\n", "
"); + notes.push( + $el("span", { + innerHTML: pre, + }) + ); + } + if (m) { + notes.push( + $el("a", { + href: m[0], + textContent: m[0], + target: "_blank", + }) + ); + } + + end = fin; + } while (m); + return notes; + } + + addInfoEntry(name, value) { + return $el( + "p", + { + parent: this.info, + }, + [ + typeof name === "string" ? $el("label", { textContent: name + ": " }) : name, + typeof value === "string" ? $el("span", { textContent: value }) : value, + ] + ); + } + + async getCivitaiDetails() { + const req = await fetch("https://civitai.com/api/v1/model-versions/by-hash/" + this.hash); + if (req.status === 200) { + return await req.json(); + } else if (req.status === 404) { + throw new Error("Model not found"); + } else { + throw new Error(`Error loading info (${req.status}) ${req.statusText}`); + } + } + + addCivitaiInfo() { + const promise = this.getCivitaiDetails(); + const content = $el("span", { textContent: "ℹ️ Loading..." }); + + this.addInfoEntry( + $el("label", [ + $el("img", { + style: { + width: "18px", + position: "relative", + top: "3px", + margin: "0 5px 0 0", + }, + src: "https://civitai.com/favicon.ico", + }), + $el("span", { textContent: "Civitai: " }), + ]), + content + ); + + return promise + .then((info) => { + this.imgWrapper.style.display = 'block' + // 变更标题信息 + let header = this.element.querySelector('.easyuse-model-header') + if(header){ + header.replaceChildren( + $el("h2", { textContent: this.name }), + $el("div.easyuse-model-header-remark",[ + $el("h5", { textContent: $t("Updated At:") + formatTime(new Date(info.updatedAt),'yyyy/MM/dd')}), + $el("h5", { textContent: $t("Created At:") + formatTime(new Date(info.updatedAt),'yyyy/MM/dd')}), + ]) + ) + } + // 替换内容 + let textarea = null + let notes = this.parseNote.call(this) + let editText = $t("✏️ Edit") + console.log(notes) + let textarea_div = $el("div.easyuse-model-detail-textarea",[ + $el("p",notes?.length>0 ? notes : {textContent:$t('No notes')}), + ]) + if(!notes || notes.length == 0) textarea_div.classList.add('empty') + else textarea_div.classList.remove('empty') + this.info.replaceChildren( + $el("div.easyuse-model-detail",[ + $el("div.easyuse-model-detail-head.flex-b",[ + $el('span',$t("Notes")), + $el("a", { + textContent: editText, + href: "#", + style: { + fontSize: "12px", + float: "right", + color: "var(--warning-color)", + textDecoration: "none", + }, + onclick: async (e) => { + e.preventDefault(); + + if (textarea) { + if(textarea.value != this.customNotes){ + toast.showLoading($t('Saving Notes...')) + this.customNotes = textarea.value; + const resp = await api.fetchApi( + "/easyuse/metadata/notes/" + encodeURIComponent(`${this.type}/${this.name}`), + { + method: "POST", + body: this.customNotes, + } + ); + toast.hideLoading() + if (resp.status !== 200) { + toast.error($t('Saving Failed')) + console.error(resp); + alert(`Error saving notes (${resp.status}) ${resp.statusText}`); + return; + } + toast.success($t('Saving Succeed')) + notes = this.parseNote.call(this) + console.log(notes) + textarea_div.replaceChildren($el("p",notes?.length>0 ? notes : {textContent:$t('No notes')})); + if(textarea.value) textarea_div.classList.remove('empty') + else textarea_div.classList.add('empty') + }else { + textarea_div.replaceChildren($el("p",{textContent:$t('No notes')})); + textarea_div.classList.add('empty') + } + e.target.textContent = editText; + textarea.remove(); + textarea = null; + + } else { + e.target.textContent = "💾 Save"; + textarea = $el("textarea", { + placeholder: $t("Type your notes here"), + style: { + width: "100%", + minWidth: "200px", + minHeight: "50px", + height:"100px" + }, + textContent: this.customNotes, + }); + textarea_div.replaceChildren(textarea); + textarea.focus() + } + } + }) + ]), + textarea_div + ]), + $el("div.easyuse-model-detail",[ + $el("div.easyuse-model-detail-head",{textContent:$t("Details")}), + $el("div.easyuse-model-detail-body",[ + $el("div.easyuse-model-detail-item",[ + $el("div.easyuse-model-detail-item-label",{textContent:$t("Type")}), + $el("div.easyuse-model-detail-item-value",{textContent:info.model.type}), + ]), + $el("div.easyuse-model-detail-item",[ + $el("div.easyuse-model-detail-item-label",{textContent:$t("BaseModel")}), + $el("div.easyuse-model-detail-item-value",{textContent:info.baseModel}), + ]), + $el("div.easyuse-model-detail-item",[ + $el("div.easyuse-model-detail-item-label",{textContent:$t("Download")}), + $el("div.easyuse-model-detail-item-value",{textContent:info.stats?.downloadCount || 0}), + ]), + $el("div.easyuse-model-detail-item",[ + $el("div.easyuse-model-detail-item-label",{textContent:$t("Trained Words")}), + $el("div.easyuse-model-detail-item-value",{textContent:info?.trainedWords.join(',') || '-'}), + ]), + $el("div.easyuse-model-detail-item",[ + $el("div.easyuse-model-detail-item-label",{textContent:$t("Source")}), + $el("div.easyuse-model-detail-item-value",[ + $el("label", [ + $el("img", { + style: { + width: "14px", + position: "relative", + top: "3px", + margin: "0 5px 0 0", + }, + src: "https://civitai.com/favicon.ico", + }), + $el("a", { + href: "https://civitai.com/models/" + info.modelId, + textContent: "View " + info.model.name, + target: "_blank", + }) + ]) + ]), + ]) + ]), + ]) + ); + + if (info.images?.length) { + this.imgCurrent = 0 + this.isSaving = false + info.images.map(cate=> + cate.url && + this.imgList.appendChild( + $el('div.easyuse-preview-slide',[ + $el('div.easyuse-preview-slide-content',[ + $el('img',{src:(cate.url)}), + $el("div.save", { + textContent: "Save as preview", + onclick: async () => { + if(this.isSaving) return + this.isSaving = true + toast.showLoading($t('Saving Preview...')) + // Convert the preview to a blob + const blob = await (await fetch(cate.url)).blob(); + + // Store it in temp + const name = "temp_preview." + new URL(cate.url).pathname.split(".")[1]; + const body = new FormData(); + body.append("image", new File([blob], name)); + body.append("overwrite", "true"); + body.append("type", "temp"); + + const resp = await api.fetchApi("/upload/image", { + method: "POST", + body, + }); + + if (resp.status !== 200) { + this.isSaving = false + toast.error($t('Saving Failed')) + toast.hideLoading() + console.error(resp); + alert(`Error saving preview (${req.status}) ${req.statusText}`); + return; + } + + // Use as preview + await api.fetchApi("/easyuse/save/" + encodeURIComponent(`${this.type}/${this.name}`), { + method: "POST", + body: JSON.stringify({ + filename: name, + type: "temp", + }), + headers: { + "content-type": "application/json", + }, + }).then(_=>{ + toast.success($t('Saving Succeed')) + toast.hideLoading() + }); + this.isSaving = false + app.refreshComboInNodes(); + }, + }) + ]) + ]) + ) + ) + let _this = this + this.imgDistance = (-660 * this.imgCurrent).toString() + this.imgList.style.display = '' + this.imgList.style.transform = 'translate3d(' + this.imgDistance +'px, 0px, 0px)' + this.slides = this.imgList.querySelectorAll('.easyuse-preview-slide') + // 添加按钮 + this.slideLeftButton = $el("button.left",{ + parent: this.imgWrapper, + style:{ + display:info.images.length <= 2 ? 'none' : 'block' + }, + innerHTML:``, + onclick: ()=>{ + if(info.images.length <= 2) return + _this.imgList.classList.remove("no-transition") + if(_this.imgCurrent == 0){ + _this.imgCurrent = (info.images.length/2)-1 + this.slides[this.slides.length-1].style.transform = 'translate3d(' + (-660 * (this.imgCurrent+1)).toString()+'px, 0px, 0px)' + this.slides[this.slides.length-2].style.transform = 'translate3d(' + (-660 * (this.imgCurrent+1)).toString()+'px, 0px, 0px)' + _this.imgList.style.transform = 'translate3d(660px, 0px, 0px)' + setTimeout(_=>{ + this.slides[this.slides.length-1].style.transform = 'translate3d(0px, 0px, 0px)' + this.slides[this.slides.length-2].style.transform = 'translate3d(0px, 0px, 0px)' + _this.imgDistance = (-660 * this.imgCurrent).toString() + _this.imgList.style.transform = 'translate3d(' + _this.imgDistance +'px, 0px, 0px)' + _this.imgList.classList.add("no-transition") + },500) + } + else { + _this.imgCurrent = _this.imgCurrent-1 + _this.imgDistance = (-660 * this.imgCurrent).toString() + _this.imgList.style.transform = 'translate3d(' + _this.imgDistance +'px, 0px, 0px)' + } + } + }) + this.slideRightButton = $el("button.right",{ + parent: this.imgWrapper, + style:{ + display:info.images.length <= 2 ? 'none' : 'block' + }, + innerHTML:``, + onclick: ()=>{ + if(info.images.length <= 2) return + _this.imgList.classList.remove("no-transition") + + if( _this.imgCurrent >= (info.images.length/2)-1){ + _this.imgCurrent = 0 + const max = info.images.length/2 + this.slides[0].style.transform = 'translate3d(' + (660 * max).toString()+'px, 0px, 0px)' + this.slides[1].style.transform = 'translate3d(' + (660 * max).toString()+'px, 0px, 0px)' + _this.imgList.style.transform = 'translate3d(' + (-660 * max).toString()+'px, 0px, 0px)' + setTimeout(_=>{ + this.slides[0].style.transform = 'translate3d(0px, 0px, 0px)' + this.slides[1].style.transform = 'translate3d(0px, 0px, 0px)' + _this.imgDistance = (-660 * this.imgCurrent).toString() + _this.imgList.style.transform = 'translate3d(' + _this.imgDistance +'px, 0px, 0px)' + _this.imgList.classList.add("no-transition") + },500) + } + else { + _this.imgCurrent = _this.imgCurrent+1 + _this.imgDistance = (-660 * this.imgCurrent).toString() + _this.imgList.style.transform = 'translate3d(' + _this.imgDistance +'px, 0px, 0px)' + } + + } + }) + + } + + if(info.description){ + $el("div", { + parent: this.content, + innerHTML: info.description, + style: { + marginTop: "10px", + }, + }); + } + + return info; + }) + .catch((err) => { + this.imgWrapper.style.display = 'none' + content.textContent = "⚠️ " + err.message; + }) + .finally(_=>{ + }) + } +} + + +export class CheckpointInfoDialog extends ModelInfoDialog { + async addInfo() { + // super.addInfo(); + await this.addCivitaiInfo(); + } +} + +const MAX_TAGS = 500 +export class LoraInfoDialog extends ModelInfoDialog { + getTagFrequency() { + if (!this.metadata.ss_tag_frequency) return []; + + const datasets = JSON.parse(this.metadata.ss_tag_frequency); + const tags = {}; + for (const setName in datasets) { + const set = datasets[setName]; + for (const t in set) { + if (t in tags) { + tags[t] += set[t]; + } else { + tags[t] = set[t]; + } + } + } + + return Object.entries(tags).sort((a, b) => b[1] - a[1]); + } + + getResolutions() { + let res = []; + if (this.metadata.ss_bucket_info) { + const parsed = JSON.parse(this.metadata.ss_bucket_info); + if (parsed?.buckets) { + for (const { resolution, count } of Object.values(parsed.buckets)) { + res.push([count, `${resolution.join("x")} * ${count}`]); + } + } + } + res = res.sort((a, b) => b[0] - a[0]).map((a) => a[1]); + let r = this.metadata.ss_resolution; + if (r) { + const s = r.split(","); + const w = s[0].replace("(", ""); + const h = s[1].replace(")", ""); + res.push(`${w.trim()}x${h.trim()} (Base res)`); + } else if ((r = this.metadata["modelspec.resolution"])) { + res.push(r + " (Base res"); + } + if (!res.length) { + res.push("⚠️ Unknown"); + } + return res; + } + + getTagList(tags) { + return tags.map((t) => + $el( + "li.easyuse-model-tag", + { + dataset: { + tag: t[0], + }, + $: (el) => { + el.onclick = () => { + el.classList.toggle("easyuse-model-tag--selected"); + }; + }, + }, + [ + $el("p", { + textContent: t[0], + }), + $el("span", { + textContent: t[1], + }), + ] + ) + ); + } + + addTags() { + let tags = this.getTagFrequency(); + let hasMore; + if (tags?.length) { + const c = tags.length; + let list; + if (c > MAX_TAGS) { + tags = tags.slice(0, MAX_TAGS); + hasMore = $el("p", [ + $el("span", { textContent: `⚠️ Only showing first ${MAX_TAGS} tags ` }), + $el("a", { + href: "#", + textContent: `Show all ${c}`, + onclick: () => { + list.replaceChildren(...this.getTagList(this.getTagFrequency())); + hasMore.remove(); + }, + }), + ]); + } + list = $el("ol.easyuse-model-tags-list", this.getTagList(tags)); + this.tags = $el("div", [list]); + } else { + this.tags = $el("p", { textContent: "⚠️ No tag frequency metadata found" }); + } + + this.content.append(this.tags); + + if (hasMore) { + this.content.append(hasMore); + } + } + + async addInfo() { + // this.addInfoEntry("Name", this.metadata.ss_output_name || "⚠️ Unknown"); + // this.addInfoEntry("Base Model", this.metadata.ss_sd_model_name || "⚠️ Unknown"); + // this.addInfoEntry("Clip Skip", this.metadata.ss_clip_skip || "⚠️ Unknown"); + // + // this.addInfoEntry( + // "Resolution", + // $el( + // "select", + // this.getResolutions().map((r) => $el("option", { textContent: r })) + // ) + // ); + + // super.addInfo(); + const p = this.addCivitaiInfo(); + this.addTags(); + + const info = await p; + if (info) { + // $el( + // "p", + // { + // parent: this.content, + // textContent: "Trained Words: ", + // }, + // [ + // $el("pre", { + // textContent: info.trainedWords.join(", "), + // style: { + // whiteSpace: "pre-wrap", + // margin: "10px 0", + // background: "#222", + // padding: "5px", + // borderRadius: "5px", + // maxHeight: "250px", + // overflow: "auto", + // }, + // }), + // ] + // ); + $el("div", { + parent: this.content, + innerHTML: info.description, + style: { + maxHeight: "250px", + overflow: "auto", + }, + }); + } + } + + createButtons() { + const btns = super.createButtons(); + + function copyTags(e, tags) { + const textarea = $el("textarea", { + parent: document.body, + style: { + position: "fixed", + }, + textContent: tags.map((el) => el.dataset.tag).join(", "), + }); + textarea.select(); + try { + document.execCommand("copy"); + if (!e.target.dataset.text) { + e.target.dataset.text = e.target.textContent; + } + e.target.textContent = "Copied " + tags.length + " tags"; + setTimeout(() => { + e.target.textContent = e.target.dataset.text; + }, 1000); + } catch (ex) { + prompt("Copy to clipboard: Ctrl+C, Enter", text); + } finally { + document.body.removeChild(textarea); + } + } + + btns.unshift( + $el("button", { + type: "button", + textContent: "Copy Selected", + onclick: (e) => { + copyTags(e, [...this.tags.querySelectorAll(".easyuse-model-tag--selected")]); + }, + }), + $el("button", { + type: "button", + textContent: "Copy All", + onclick: (e) => { + copyTags(e, [...this.tags.querySelectorAll(".easyuse-model-tag")]); + }, + }) + ); + + return btns; + } +} \ No newline at end of file diff --git a/ComfyUI-Easy-Use/web_version/v1/js/common/toast.js b/ComfyUI-Easy-Use/web_version/v1/js/common/toast.js new file mode 100644 index 0000000000000000000000000000000000000000..f32ab5e41a4a4212ca25894f94e234514249c6c5 --- /dev/null +++ b/ComfyUI-Easy-Use/web_version/v1/js/common/toast.js @@ -0,0 +1,127 @@ +import {sleep} from "./utils.js"; +import {$t} from "./i18n.js"; + +class Toast{ + + constructor() { + this.info_icon = `` + this.success_icon = `` + this.error_icon = `` + this.warn_icon = `` + this.loading_icon = `` + } + + async showToast(data){ + let container = document.querySelector(".easyuse-toast-container"); + if (!container) { + container = document.createElement("div"); + container.classList.add("easyuse-toast-container"); + document.body.appendChild(container); + } + await this.hideToast(data.id); + const toastContainer = document.createElement("div"); + const content = document.createElement("span"); + content.innerHTML = data.content; + toastContainer.appendChild(content); + for (let a = 0; a < (data.actions || []).length; a++) { + const action = data.actions[a]; + if (a > 0) { + const sep = document.createElement("span"); + sep.innerHTML = " | "; + toastContainer.appendChild(sep); + } + const actionEl = document.createElement("a"); + actionEl.innerText = action.label; + if (action.href) { + actionEl.target = "_blank"; + actionEl.href = action.href; + } + if (action.callback) { + actionEl.onclick = (e) => { + return action.callback(e); + }; + } + toastContainer.appendChild(actionEl); + } + const animContainer = document.createElement("div"); + animContainer.setAttribute("toast-id", data.id); + animContainer.appendChild(toastContainer); + container.appendChild(animContainer); + await sleep(64); + animContainer.style.marginTop = `-${animContainer.offsetHeight}px`; + await sleep(64); + animContainer.classList.add("-show"); + if (data.duration) { + await sleep(data.duration); + this.hideToast(data.id); + } + } + async hideToast(id) { + const msg = document.querySelector(`.easyuse-toast-container > [toast-id="${id}"]`); + if (msg === null || msg === void 0 ? void 0 : msg.classList.contains("-show")) { + msg.classList.remove("-show"); + await sleep(750); + } + msg && msg.remove(); + } + async clearAllMessages() { + let container = document.querySelector(".easyuse-toast-container"); + container && (container.innerHTML = ""); + } + + async copyright(duration = 5000, actions = []) { + this.showToast({ + id: `toast-info`, + content: `${this.info_icon} ${$t('Workflow created by')} Yolain , ${$t('Watch more video content')} B站乱乱呀`, + duration, + actions + }); + } + async info(content, duration = 3000, actions = []) { + this.showToast({ + id: `toast-info`, + content: `${this.info_icon} ${content}`, + duration, + actions + }); + } + async success(content, duration = 3000, actions = []) { + this.showToast({ + id: `toast-success`, + content: `${this.success_icon} ${content}`, + duration, + actions + }); + } + async error(content, duration = 3000, actions = []) { + this.showToast({ + id: `toast-error`, + content: `${this.error_icon} ${content}`, + duration, + actions + }); + } + async warn(content, duration = 3000, actions = []) { + this.showToast({ + id: `toast-warn`, + content: `${this.warn_icon} ${content}`, + duration, + actions + }); + } + async showLoading(content, duration = 0, actions = []) { + this.showToast({ + id: `toast-loading`, + content: `${this.loading_icon} ${content}`, + duration, + actions + }); + } + + async hideLoading() { + this.hideToast("toast-loading"); + } + +} + +export const toast = new Toast(); diff --git a/ComfyUI-Easy-Use/web_version/v1/js/common/utils.js b/ComfyUI-Easy-Use/web_version/v1/js/common/utils.js new file mode 100644 index 0000000000000000000000000000000000000000..9a988a7a85da6fc5fe35e7bd43bcca4495c62adc --- /dev/null +++ b/ComfyUI-Easy-Use/web_version/v1/js/common/utils.js @@ -0,0 +1,187 @@ +export function sleep(ms = 100, value) { + return new Promise((resolve) => { + setTimeout(() => { + resolve(value); + }, ms); + }); +} +export function addPreconnect(href, crossorigin=false){ + const preconnect = document.createElement("link"); + preconnect.rel = 'preconnect' + preconnect.href = href + if(crossorigin) preconnect.crossorigin = '' + document.head.appendChild(preconnect); +} +export function addCss(href, base=true) { + const link = document.createElement("link"); + link.rel = "stylesheet"; + link.type = "text/css"; + link.href = base ? "extensions/ComfyUI-Easy-Use/"+href : href; + document.head.appendChild(link); +} + +export function addMeta(name, content) { + const meta = document.createElement("meta"); + meta.setAttribute("name", name); + meta.setAttribute('content', content); + document.head.appendChild(meta); +} + +export function deepEqual(obj1, obj2) { + if (typeof obj1 !== typeof obj2) { + return false + } + if (typeof obj1 !== 'object' || obj1 === null || obj2 === null) { + return obj1 === obj2 + } + const keys1 = Object.keys(obj1) + const keys2 = Object.keys(obj2) + if (keys1.length !== keys2.length) { + return false + } + for (let key of keys1) { + if (!deepEqual(obj1[key], obj2[key])) { + return false + } + } + return true +} + + +export function getLocale(){ + const locale = localStorage['AGL.Locale'] || localStorage['Comfy.Settings.AGL.Locale'] || 'en-US' + return locale +} + +export function spliceExtension(fileName){ + return fileName.substring(0,fileName.lastIndexOf('.')) +} +export function getExtension(fileName){ + return fileName.substring(fileName.lastIndexOf('.') + 1) +} + +export function formatTime(time, format) { + time = typeof (time) === "number" ? time : (time instanceof Date ? time.getTime() : parseInt(time)); + if (isNaN(time)) return null; + if (typeof (format) !== 'string' || !format) format = 'yyyy-MM-dd hh:mm:ss'; + let _time = new Date(time); + time = _time.toString().split(/[\s\:]/g).slice(0, -2); + time[1] = ['01', '02', '03', '04', '05', '06', '07', '08', '09', '10', '11', '12'][_time.getMonth()]; + let _mapping = { + MM: 1, + dd: 2, + yyyy: 3, + hh: 4, + mm: 5, + ss: 6 + }; + return format.replace(/([Mmdhs]|y{2})\1/g, (key) => time[_mapping[key]]); +} + + +let origProps = {}; +export const findWidgetByName = (node, name) => node.widgets.find((w) => w.name === name); + +export const doesInputWithNameExist = (node, name) => node.inputs ? node.inputs.some((input) => input.name === name) : false; + +export function updateNodeHeight(node) {node.setSize([node.size[0], node.computeSize()[1]]);} + +export function toggleWidget(node, widget, show = false, suffix = "") { + if (!widget || doesInputWithNameExist(node, widget.name)) return; + if (!origProps[widget.name]) { + origProps[widget.name] = { origType: widget.type, origComputeSize: widget.computeSize }; + } + const origSize = node.size; + + widget.type = show ? origProps[widget.name].origType : "easyHidden" + suffix; + widget.computeSize = show ? origProps[widget.name].origComputeSize : () => [0, -4]; + + widget.linkedWidgets?.forEach(w => toggleWidget(node, w, ":" + widget.name, show)); + + const height = show ? Math.max(node.computeSize()[1], origSize[1]) : node.size[1]; + node.setSize([node.size[0], height]); +} + +export function isLocalNetwork(ip) { + const localNetworkRanges = [ + '192.168.', + '10.', + '127.', + /^172\.((1[6-9]|2[0-9]|3[0-1])\.)/ + ]; + + return localNetworkRanges.some(range => { + if (typeof range === 'string') { + return ip.startsWith(range); + } else { + return range.test(ip); + } + }); +} + + +/** +* accAdd 高精度加法 +* @since 1.0.10 +* @param {Number} arg1 +* @param {Number} arg2 +* @return {Number} +*/ +export function accAdd(arg1, arg2) { + let r1, r2, s1, s2,max; + s1 = typeof arg1 == 'string' ? arg1 : arg1.toString() + s2 = typeof arg2 == 'string' ? arg2 : arg2.toString() + try { r1 = s1.split(".")[1].length } catch (e) { r1 = 0 } + try { r2 = s2.split(".")[1].length } catch (e) { r2 = 0 } + max = Math.pow(10, Math.max(r1, r2)) + return (arg1 * max + arg2 * max) / max +} +/** + * accSub 高精度减法 + * @since 1.0.10 + * @param {Number} arg1 + * @param {Number} arg2 + * @return {Number} + */ +export function accSub(arg1, arg2) { + let r1, r2, max, min,s1,s2; + s1 = typeof arg1 == 'string' ? arg1 : arg1.toString() + s2 = typeof arg2 == 'string' ? arg2 : arg2.toString() + try { r1 = s1.split(".")[1].length } catch (e) { r1 = 0 } + try { r2 = s2.split(".")[1].length } catch (e) { r2 = 0 } + max = Math.pow(10, Math.max(r1, r2)); + //动态控制精度长度 + min = (r1 >= r2) ? r1 : r2; + return ((arg1 * max - arg2 * max) / max).toFixed(min) +} +/** + * accMul 高精度乘法 + * @since 1.0.10 + * @param {Number} arg1 + * @param {Number} arg2 + * @return {Number} + */ +export function accMul(arg1, arg2) { + let max = 0, s1 = typeof arg1 == 'string' ? arg1 : arg1.toString(), s2 = typeof arg2 == 'string' ? arg2 : arg2.toString(); + try { max += s1.split(".")[1].length } catch (e) { } + try { max += s2.split(".")[1].length } catch (e) { } + return Number(s1.replace(".", "")) * Number(s2.replace(".", "")) / Math.pow(10, max) +} +/** + * accDiv 高精度除法 + * @since 1.0.10 + * @param {Number} arg1 + * @param {Number} arg2 + * @return {Number} + */ +export function accDiv(arg1, arg2) { + let t1 = 0, t2 = 0, r1, r2,s1 = typeof arg1 == 'string' ? arg1 : arg1.toString(), s2 = typeof arg2 == 'string' ? arg2 : arg2.toString(); + try { t1 = s1.toString().split(".")[1].length } catch (e) { } + try { t2 = s2.toString().split(".")[1].length } catch (e) { } + r1 = Number(s1.toString().replace(".", "")) + r2 = Number(s2.toString().replace(".", "")) + return (r1 / r2) * Math.pow(10, t2 - t1) +} +Number.prototype.div = function (arg) { + return accDiv(this, arg); +} \ No newline at end of file diff --git a/ComfyUI-Easy-Use/web_version/v1/js/easy/easy.js b/ComfyUI-Easy-Use/web_version/v1/js/easy/easy.js new file mode 100644 index 0000000000000000000000000000000000000000..b22d2fb2c4830ac0a8be8b29a0b0b64b9e70969e --- /dev/null +++ b/ComfyUI-Easy-Use/web_version/v1/js/easy/easy.js @@ -0,0 +1,610 @@ +import { api } from "../../../../scripts/api.js"; +import { app } from "../../../../scripts/app.js"; +import {deepEqual, addCss, addMeta, isLocalNetwork} from "../common/utils.js"; +import {logoIcon, quesitonIcon, rocketIcon, groupIcon, rebootIcon, closeIcon} from "../common/icon.js"; +import {$t} from '../common/i18n.js'; +import {toast} from "../common/toast.js"; +import {$el, ComfyDialog} from "../../../../scripts/ui.js"; + + +addCss('css/index.css') + +api.addEventListener("easyuse-toast",event=>{ + const content = event.detail.content + const type = event.detail.type + const duration = event.detail.duration + if(!type){ + toast.info(content, duration) + } + else{ + toast.showToast({ + id: `toast-${type}`, + content: `${toast[type+"_icon"]} ${content}`, + duration: duration || 3000, + }) + } +}) + + +let draggerEl = null +let isGroupMapcanMove = true +function createGroupMap(){ + let div = document.querySelector('#easyuse_groups_map') + if(div){ + div.style.display = div.style.display == 'none' ? 'flex' : 'none' + return + } + let groups = app.canvas.graph._groups + let nodes = app.canvas.graph._nodes + let old_nodes = groups.length + div = document.createElement('div') + div.id = 'easyuse_groups_map' + div.innerHTML = '' + let btn = document.createElement('div') + btn.style = `display: flex; + width: calc(100% - 8px); + justify-content: space-between; + align-items: center; + padding: 0 6px; + height: 44px;` + let hideBtn = $el('button.closeBtn',{ + innerHTML:closeIcon, + onclick:_=>div.style.display = 'none' + }) + let textB = document.createElement('p') + btn.appendChild(textB) + btn.appendChild(hideBtn) + textB.style.fontSize = '11px' + textB.innerHTML = `${$t('Groups Map')} (EasyUse)` + div.appendChild(btn) + + div.addEventListener('mousedown', function (e) { + var startX = e.clientX + var startY = e.clientY + var offsetX = div.offsetLeft + var offsetY = div.offsetTop + + function moveBox (e) { + var newX = e.clientX + var newY = e.clientY + var deltaX = newX - startX + var deltaY = newY - startY + div.style.left = offsetX + deltaX + 'px' + div.style.top = offsetY + deltaY + 'px' + } + + function stopMoving () { + document.removeEventListener('mousemove', moveBox) + document.removeEventListener('mouseup', stopMoving) + } + + if(isGroupMapcanMove){ + document.addEventListener('mousemove', moveBox) + document.addEventListener('mouseup', stopMoving) + } + }) + + function updateGroups(groups, groupsDiv, autoSortDiv){ + if(groups.length>0){ + autoSortDiv.style.display = 'block' + }else autoSortDiv.style.display = 'none' + for (let index in groups) { + const group = groups[index] + const title = group.title + const show_text = $t('Always') + const hide_text = $t('Bypass') + const mute_text = $t('Never') + let group_item = document.createElement('div') + let group_item_style = `justify-content: space-between;display:flex;background-color: var(--comfy-input-bg);border-radius: 5px;border:1px solid var(--border-color);margin-top:5px;` + group_item.addEventListener("mouseover",event=>{ + event.preventDefault() + group_item.style = group_item_style + "filter:brightness(1.2);" + }) + group_item.addEventListener("mouseleave",event=>{ + event.preventDefault() + group_item.style = group_item_style + "filter:brightness(1);" + }) + group_item.addEventListener("dragstart",e=>{ + draggerEl = e.currentTarget; + e.currentTarget.style.opacity = "0.6"; + e.currentTarget.style.border = "1px dashed yellow"; + e.dataTransfer.effectAllowed = 'move'; + e.dataTransfer.setDragImage(emptyImg, 0, 0); + }) + group_item.addEventListener("dragend",e=>{ + e.target.style.opacity = "1"; + e.currentTarget.style.border = "1px dashed transparent"; + e.currentTarget.removeAttribute("draggable"); + document.querySelectorAll('.easyuse-group-item').forEach((el,i) => { + var prev_i = el.dataset.id; + if (el == draggerEl && prev_i != i ) { + groups.splice(i, 0, groups.splice(prev_i, 1)[0]); + } + el.dataset.id = i; + }); + isGroupMapcanMove = true + }) + group_item.addEventListener("dragover",e=>{ + e.preventDefault(); + if (e.currentTarget == draggerEl) return; + let rect = e.currentTarget.getBoundingClientRect(); + if (e.clientY > rect.top + rect.height / 2) { + e.currentTarget.parentNode.insertBefore(draggerEl, e.currentTarget.nextSibling); + } else { + e.currentTarget.parentNode.insertBefore(draggerEl, e.currentTarget); + } + isGroupMapcanMove = true + }) + + + group_item.setAttribute('data-id',index) + group_item.className = 'easyuse-group-item' + group_item.style = group_item_style + // 标题 + let text_group_title = document.createElement('div') + text_group_title.style = `flex:1;font-size:12px;color:var(--input-text);padding:4px;white-space: nowrap;overflow: hidden;text-overflow: ellipsis;cursor:pointer` + text_group_title.innerHTML = `${title}` + text_group_title.addEventListener('mousedown',e=>{ + isGroupMapcanMove = false + e.currentTarget.parentNode.draggable = 'true'; + }) + text_group_title.addEventListener('mouseleave',e=>{ + setTimeout(_=>{ + isGroupMapcanMove = true + },150) + }) + group_item.append(text_group_title) + // 按钮组 + let buttons = document.createElement('div') + group.recomputeInsideNodes(); + const nodesInGroup = group._nodes; + let isGroupShow = nodesInGroup && nodesInGroup.length>0 && nodesInGroup[0].mode == 0 + let isGroupMute = nodesInGroup && nodesInGroup.length>0 && nodesInGroup[0].mode == 2 + let go_btn = document.createElement('button') + go_btn.style = "margin-right:6px;cursor:pointer;font-size:10px;padding:2px 4px;color:var(--input-text);background-color: var(--comfy-input-bg);border: 1px solid var(--border-color);border-radius:4px;" + go_btn.innerText = "Go" + go_btn.addEventListener('click', () => { + app.canvas.ds.offset[0] = -group.pos[0] - group.size[0] * 0.5 + (app.canvas.canvas.width * 0.5) / app.canvas.ds.scale; + app.canvas.ds.offset[1] = -group.pos[1] - group.size[1] * 0.5 + (app.canvas.canvas.height * 0.5) / app.canvas.ds.scale; + app.canvas.setDirty(true, true); + app.canvas.setZoom(1) + }) + buttons.append(go_btn) + let see_btn = document.createElement('button') + let defaultStyle = `cursor:pointer;font-size:10px;;padding:2px;border: 1px solid var(--border-color);border-radius:4px;width:36px;` + see_btn.style = isGroupMute ? `background-color:var(--error-text);color:var(--input-text);` + defaultStyle : (isGroupShow ? `background-color:var(--theme-color);color:var(--input-text);` + defaultStyle : `background-color: var(--comfy-input-bg);color:var(--descrip-text);` + defaultStyle) + see_btn.innerText = isGroupMute ? mute_text : (isGroupShow ? show_text : hide_text) + let pressTimer + let firstTime =0, lastTime =0 + let isHolding = false + see_btn.addEventListener('click', () => { + if(isHolding){ + isHolding = false + return + } + for (const node of nodesInGroup) { + node.mode = isGroupShow ? 4 : 0; + node.graph.change(); + } + isGroupShow = nodesInGroup[0].mode == 0 ? true : false + isGroupMute = nodesInGroup[0].mode == 2 ? true : false + see_btn.style = isGroupMute ? `background-color:var(--error-text);color:var(--input-text);` + defaultStyle : (isGroupShow ? `background-color:#006691;color:var(--input-text);` + defaultStyle : `background-color: var(--comfy-input-bg);color:var(--descrip-text);` + defaultStyle) + see_btn.innerText = isGroupMute ? mute_text : (isGroupShow ? show_text : hide_text) + }) + see_btn.addEventListener('mousedown', () => { + firstTime = new Date().getTime(); + clearTimeout(pressTimer); + pressTimer = setTimeout(_=>{ + for (const node of nodesInGroup) { + node.mode = isGroupMute ? 0 : 2; + node.graph.change(); + } + isGroupShow = nodesInGroup[0].mode == 0 ? true : false + isGroupMute = nodesInGroup[0].mode == 2 ? true : false + see_btn.style = isGroupMute ? `background-color:var(--error-text);color:var(--input-text);` + defaultStyle : (isGroupShow ? `background-color:#006691;color:var(--input-text);` + defaultStyle : `background-color: var(--comfy-input-bg);color:var(--descrip-text);` + defaultStyle) + see_btn.innerText = isGroupMute ? mute_text : (isGroupShow ? show_text : hide_text) + },500) + }) + see_btn.addEventListener('mouseup', () => { + lastTime = new Date().getTime(); + if(lastTime - firstTime > 500) isHolding = true + clearTimeout(pressTimer); + }) + buttons.append(see_btn) + group_item.append(buttons) + + groupsDiv.append(group_item) + } + + } + + let groupsDiv = document.createElement('div') + groupsDiv.id = 'easyuse-groups-items' + groupsDiv.style = `overflow-y: auto;max-height: 400px;height:100%;width: 100%;` + + let autoSortDiv = document.createElement('button') + autoSortDiv.style = `cursor:pointer;font-size:10px;padding:2px 4px;color:var(--input-text);background-color: var(--comfy-input-bg);border: 1px solid var(--border-color);border-radius:4px;` + autoSortDiv.innerText = $t('Auto Sorting') + autoSortDiv.addEventListener('click',e=>{ + e.preventDefault() + groupsDiv.innerHTML = `` + let new_groups = groups.sort((a,b)=> a['pos'][0] - b['pos'][0]).sort((a,b)=> a['pos'][1] - b['pos'][1]) + updateGroups(new_groups, groupsDiv, autoSortDiv) + }) + + updateGroups(groups, groupsDiv, autoSortDiv) + + div.appendChild(groupsDiv) + + let remarkDiv = document.createElement('p') + remarkDiv.style = `text-align:center; font-size:10px; padding:0 10px;color:var(--descrip-text)` + remarkDiv.innerText = $t('Toggle `Show/Hide` can set mode of group, LongPress can set group nodes to never') + div.appendChild(groupsDiv) + div.appendChild(remarkDiv) + div.appendChild(autoSortDiv) + + let graphDiv = document.getElementById("graph-canvas") + graphDiv.addEventListener('mouseover', async () => { + groupsDiv.innerHTML = `` + let new_groups = app.canvas.graph._groups + updateGroups(new_groups, groupsDiv, autoSortDiv) + old_nodes = nodes + }) + + if (!document.querySelector('#easyuse_groups_map')){ + document.body.appendChild(div) + }else{ + div.style.display = 'flex' + } + +} + +async function cleanup(){ + try { + const {Running, Pending} = await api.getQueue() + if(Running.length>0 || Pending.length>0){ + toast.error($t("Clean Failed")+ ":"+ $t("Please stop all running tasks before cleaning GPU")) + return + } + api.fetchApi("/easyuse/cleangpu",{ + method:"POST" + }).then(res=>{ + if(res.status == 200){ + toast.success($t("Clean SuccessFully")) + }else{ + toast.error($t("Clean Failed")) + } + }) + + } catch (exception) {} +} + + +let guideDialog = null +let isDownloading = false +function download_model(url,local_dir){ + if(isDownloading || !url || !local_dir) return + isDownloading = true + let body = new FormData(); + body.append('url', url); + body.append('local_dir', local_dir); + api.fetchApi("/easyuse/model/download",{ + method:"POST", + body + }).then(res=>{ + if(res.status == 200){ + toast.success($t("Download SuccessFully")) + }else{ + toast.error($t("Download Failed")) + } + isDownloading = false + }) + +} +class GuideDialog { + + constructor(note, need_models){ + this.dialogDiv = null + this.modelsDiv = null + + if(need_models?.length>0){ + let tbody = [] + + for(let i=0;idownload_model(need_models[i]['download_url'],need_models[i]['local_dir']), target:"_blank", textContent:$t('Download Model')}) : '', + need_models[i]['source_url'] ? $el('a',{href:need_models[i]['source_url'], target:"_blank", textContent:$t('Source Url')}) : '', + need_models[i]['desciption'] ? $el('span',{textContent:need_models[i]['desciption']}) : '', + ]), + ])) + } + this.modelsDiv = $el('div.easyuse-guide-dialog-models.markdown-body',[ + $el('h3',{textContent:$t('Models Required')}), + $el('table',{cellpadding:0,cellspacing:0},[ + $el('thead',[ + $el('tr',[ + $el('th',{innerHTML:$t('ModelName')}), + $el('th',{innerHTML:$t('Description')}), + ]) + ]), + $el('tbody',tbody) + ]) + ]) + } + + this.dialogDiv = $el('div.easyuse-guide-dialog.hidden',[ + $el('div.easyuse-guide-dialog-header',[ + $el('div.easyuse-guide-dialog-top',[ + $el('div.easyuse-guide-dialog-title',{ + innerHTML:$t('Workflow Guide') + }), + $el('button.closeBtn',{innerHTML:closeIcon,onclick:_=>this.close()}) + ]), + + $el('div.easyuse-guide-dialog-remark',{ + innerHTML:`${$t('Workflow created by')} Yolain , ${$t('Watch more video content')} B站乱乱呀` + }) + ]), + $el('div.easyuse-guide-dialog-content.markdown-body',[ + $el('div.easyuse-guide-dialog-note',{ + innerHTML:note + }), + ...this.modelsDiv ? [this.modelsDiv] : [] + ]) + ]) + + if(disableRenderInfo){ + this.dialogDiv.classList.add('disable-render-info') + } + document.body.appendChild(this.dialogDiv) + } + show(){ + if(this.dialogDiv) this.dialogDiv.classList.remove('hidden') + } + + close(){ + if(this.dialogDiv){ + this.dialogDiv.classList.add('hidden') + } + } + toggle(){ + if(this.dialogDiv){ + if(this.dialogDiv.classList.contains('hidden')){ + this.show() + }else{ + this.close() + } + } + } + + remove(){ + if(this.dialogDiv) document.body.removeChild(this.dialogDiv) + } +} + +// toolbar +const toolBarId = "Comfy.EasyUse.toolBar" +const getEnableToolBar = _ => app.ui.settings.getSettingValue(toolBarId, true) +const getNewMenuPosition = _ => { + try{ + return app.ui.settings.getSettingValue('Comfy.UseNewMenu', 'Disabled') + }catch (e){ + return 'Disabled' + } +} + +let note = null +let toolbar = null +let enableToolBar = getEnableToolBar() && getNewMenuPosition() == 'Disabled' +let disableRenderInfo = localStorage['Comfy.Settings.Comfy.EasyUse.disableRenderInfo'] ? true : false +export function addToolBar(app) { + app.ui.settings.addSetting({ + id: toolBarId, + name: $t("Enable tool bar fixed on the left-bottom (ComfyUI-Easy-Use)"), + type: "boolean", + defaultValue: enableToolBar, + onChange(value) { + enableToolBar = !!value; + if(enableToolBar){ + showToolBar() + }else hideToolBar() + }, + }); +} +function showToolBar(){ + if(toolbar) toolbar.style.display = 'flex' +} +function hideToolBar(){ + if(toolbar) toolbar.style.display = 'none' +} +let monitor = null +function setCrystoolsUI(position){ + const crystools = document.getElementById('crystools-root')?.children || null + if(crystools?.length>0){ + if(!monitor){ + for (let i = 0; i < crystools.length; i++) { + if (crystools[i].id === 'crystools-monitor-container') { + monitor = crystools[i]; + break; + } + } + } + if(monitor){ + if(position == 'Disabled'){ + let replace = true + for (let i = 0; i < crystools.length; i++) { + if (crystools[i].id === 'crystools-monitor-container') { + replace = false + break; + } + } + document.getElementById('crystools-root').appendChild(monitor) + } + else { + let monitor_div = document.getElementById('comfyui-menu-monitor') + if(!monitor_div) app.menu.settingsGroup.element.before($el('div',{id:'comfyui-menu-monitor'},monitor)) + else monitor_div.appendChild(monitor) + } + } + } +} +const changeNewMenuPosition = app.ui.settings.settingsLookup?.['Comfy.UseNewMenu'] +if(changeNewMenuPosition) changeNewMenuPosition.onChange = v => { + v == 'Disabled' ? showToolBar() : hideToolBar() + setCrystoolsUI(v) +} + + + +app.registerExtension({ + name: "comfy.easyUse", + init() { + // Canvas Menu + const getCanvasMenuOptions = LGraphCanvas.prototype.getCanvasMenuOptions; + LGraphCanvas.prototype.getCanvasMenuOptions = function () { + const options = getCanvasMenuOptions.apply(this, arguments); + let emptyImg = new Image() + emptyImg.src = "data:image/gif;base64,R0lGODlhAQABAIAAAAUEBAAAACwAAAAAAQABAAACAkQBADs="; + + options.push(null, + // Groups Map + { + content: groupIcon.replace('currentColor','var(--warning-color)') + ' '+ $t('Groups Map') + ' (EasyUse)', + callback: async() => { + createGroupMap() + } + }, + // Force clean ComfyUI GPU Used 强制卸载模型GPU占用 + { + content: rocketIcon.replace('currentColor','var(--theme-color-light)') + ' '+ $t('Cleanup Of GPU Usage') + ' (EasyUse)', + callback: async() =>{ + await cleanup() + } + }, + // Only show the reboot option if the server is running on a local network 仅在本地或局域网环境可重启服务 + isLocalNetwork(window.location.host) ? { + content: rebootIcon.replace('currentColor','var(--error-color)') + ' '+ $t('Reboot ComfyUI') + ' (EasyUse)', + callback: _ =>{ + if (confirm($t("Are you sure you'd like to reboot the server?"))){ + try { + api.fetchApi("/easyuse/reboot"); + } catch (exception) {} + } + } + } : null, + ); + return options; + }; + + let renderInfoEvent = LGraphCanvas.prototype.renderInfo + if(disableRenderInfo){ + LGraphCanvas.prototype.renderInfo = function (ctx, x, y) {} + } + + if(!toolbar){ + toolbar = $el('div.easyuse-toolbar',[ + $el('div.easyuse-toolbar-item',{ + onclick:_=>{ + createGroupMap() + } + },[ + $el('div.easyuse-toolbar-icon.group', {innerHTML:groupIcon}), + $el('div.easyuse-toolbar-tips',$t('Groups Map')) + ]), + $el('div.easyuse-toolbar-item',{ + onclick:async()=>{ + await cleanup() + } + },[ + $el('div.easyuse-toolbar-icon.rocket',{innerHTML:rocketIcon}), + $el('div.easyuse-toolbar-tips',$t('Cleanup Of GPU Usage')) + ]), + ]) + if(disableRenderInfo){ + toolbar.classList.add('disable-render-info') + }else{ + toolbar.classList.remove('disable-render-info') + } + document.body.appendChild(toolbar) + } + + // rewrite handleFile + let loadGraphDataEvent = app.loadGraphData + app.loadGraphData = async function (data, clean=true) { + // if(data?.extra?.cpr){ + // toast.copyright() + // } + if(data?.extra?.note){ + if(guideDialog) { + guideDialog.remove() + guideDialog = null + } + if(note && toolbar) toolbar.removeChild(note) + const need_models = data.extra?.need_models || null + guideDialog = new GuideDialog(data.extra.note, need_models) + note = $el('div.easyuse-toolbar-item',{ + onclick:async()=>{ + guideDialog.toggle() + } + },[ + $el('div.easyuse-toolbar-icon.question',{innerHTML:quesitonIcon}), + $el('div.easyuse-toolbar-tips',$t('Workflow Guide')) + ]) + if(toolbar) toolbar.insertBefore(note, toolbar.firstChild) + } + else{ + if(note) { + toolbar.removeChild(note) + note = null + } + } + return await loadGraphDataEvent.apply(this, [...arguments]) + } + + addToolBar(app) + }, + async setup() { + // New style menu button + if(app.menu?.actionsGroup){ + const groupMap = new (await import('../../../../scripts/ui/components/button.js')).ComfyButton({ + icon:'list-box', + action:()=> createGroupMap(), + tooltip: "EasyUse Group Map", + // content: "EasyUse Group Map", + classList: "comfyui-button comfyui-menu-mobile-collapse" + }); + app.menu.actionsGroup.element.after(groupMap.element); + const position = getNewMenuPosition() + setCrystoolsUI(position) + if(position == 'Disabled') showToolBar() + else hideToolBar() + // const easyNewMenu = $el('div.easyuse-new-menu',[ + // $el('div.easyuse-new-menu-intro',[ + // $el('div.easyuse-new-menu-logo',{innerHTML:logoIcon}), + // $el('div.easyuse-new-menu-title',[ + // $el('div.title',{textContent:'ComfyUI-Easy-Use'}), + // $el('div.desc',{textContent:'Version:'}) + // ]) + // ]) + // ]) + // app.menu?.actionsGroup.element.after(new (await import('../../../../scripts/ui/components/splitButton.js')).ComfySplitButton({ + // primary: groupMap, + // mode:'click', + // position:'absolute', + // horizontal: 'right' + // },easyNewMenu).element); + } + + }, + beforeRegisterNodeDef(nodeType, nodeData, app) { + if (nodeData.name.startsWith("easy")) { + const origOnConfigure = nodeType.prototype.onConfigure; + nodeType.prototype.onConfigure = function () { + const r = origOnConfigure ? origOnConfigure.apply(this, arguments) : undefined; + return r; + }; + } + }, +}); \ No newline at end of file diff --git a/ComfyUI-Easy-Use/web_version/v1/js/easy/easyAccount.js b/ComfyUI-Easy-Use/web_version/v1/js/easy/easyAccount.js new file mode 100644 index 0000000000000000000000000000000000000000..421432fa190616a84dff5a5b8cc482c5c14e4580 --- /dev/null +++ b/ComfyUI-Easy-Use/web_version/v1/js/easy/easyAccount.js @@ -0,0 +1,283 @@ +import { app } from "../../../../scripts/app.js"; +import { api } from "../../../../scripts/api.js"; +import { $el, ComfyDialog } from "../../../../scripts/ui.js"; +import { $t } from '../common/i18n.js' +import { toast } from "../common/toast.js"; +import {sleep, accSub} from "../common/utils.js"; + +let api_keys = [] +let api_current = 0 +let user_info = {} + +const api_cost = { + 'sd3': 6.5, + 'sd3-turbo': 4, +} + +class AccountDialog extends ComfyDialog { + constructor() { + super(); + this.lists = [] + this.dialog_div = null + this.user_div = null + } + + addItem(index, user_div){ + return $el('div.easyuse-account-dialog-item',[ + $el('input',{type:'text',placeholder:'Enter name',oninput: e=>{ + const dataIndex = Array.prototype.indexOf.call(this.dialog_div.querySelectorAll('.easyuse-account-dialog-item'), e.target.parentNode) + api_keys[dataIndex]['name'] = e.target.value + },value:api_keys[index]['name']}), + $el('input.key',{type:'text',oninput: e=>{ + const dataIndex = Array.prototype.indexOf.call(this.dialog_div.querySelectorAll('.easyuse-account-dialog-item'), e.target.parentNode) + api_keys[dataIndex]['key'] = e.target.value + },placeholder:'Enter APIKEY', value:api_keys[index]['key']}), + $el('button.choose',{textContent:$t('Choose'),onclick:async(e)=>{ + const dataIndex = Array.prototype.indexOf.call(this.dialog_div.querySelectorAll('.easyuse-account-dialog-item'), e.target.parentNode) + let name = api_keys[dataIndex]['name'] + let key = api_keys[dataIndex]['key'] + if(!name){ + toast.error($t('Please enter the account name')) + return + } + else if(!key){ + toast.error($t('Please enter the APIKEY')) + return + } + let missing = true + for(let i=0;i{ + new AccountDialog().show(user_div); + } + },[ + $el('div.user',[ + $el('div.avatar', avatar ? [$el('img',{src:avatar})] : '😀'), + $el('div.info', [ + $el('h5.name', email), + $el('h6.remark','Credits: '+ credits) + ]) + ]), + $el('div.edit', {textContent:$t('Edit')}) + ]) + ) + toast.success($t('Save Succeed')) + } + else toast.success($t('Save Succeed')) + this.close() + } else { + toast.error($t('Save Failed')) + } + }}), + $el('button.delete',{textContent:$t('Delete'),onclick:e=>{ + const dataIndex = Array.prototype.indexOf.call(this.dialog_div.querySelectorAll('.easyuse-account-dialog-item'), e.target.parentNode) + if(api_keys.length<=1){ + toast.error($t('At least one account is required')) + return + } + api_keys.splice(dataIndex,1) + this.dialog_div.removeChild(e.target.parentNode) + }}), + ]) + } + + show(userdiv) { + api_keys.forEach((item,index)=>{ + this.lists.push(this.addItem(index,userdiv)) + }) + this.dialog_div = $el("div.easyuse-account-dialog", this.lists) + super.show( + $el('div.easyuse-account-dialog-main',[ + $el('div',[ + $el('a',{href:'https://platform.stability.ai/account/keys',target:'_blank',textContent:$t('Getting Your APIKEY')}), + ]), + this.dialog_div, + ]) + ); + } + + createButtons() { + const btns = super.createButtons(); + btns.unshift($el('button',{ + type:'button', + textContent:$t('Save Account Info'), + onclick:_=>{ + let missing = true + for(let i=0;i { + if (res.status == 200) { + toast.success($t('Save Succeed')) + + } else { + toast.error($t('Save Failed')) + } + }) + } + } + })) + btns.unshift($el('button',{ + type:'button', + textContent:$t('Add Account'), + onclick:_=>{ + const name = 'Account '+(api_keys.length).toString() + api_keys.push({name,key:''}) + const item = this.addItem(api_keys.length - 1) + this.lists.push(item) + this.dialog_div.appendChild(item) + } + })) + return btns + } +} + + +app.registerExtension({ + name: 'comfy.easyUse.account', + async beforeRegisterNodeDef(nodeType, nodeData, app) { + if(nodeData.name == 'easy stableDiffusion3API'){ + + const onNodeCreated = nodeType.prototype.onNodeCreated; + nodeType.prototype.onNodeCreated = async function() { + onNodeCreated ? onNodeCreated?.apply(this, arguments) : undefined; + const seed_widget = this.widgets.find(w => ['seed_num','seed'].includes(w.name)) + const seed_control = this.widgets.find(w=> ['control_before_generate','control_after_generate'].includes(w.name)) + let model_widget = this.widgets.find(w => w.name == 'model') + model_widget.callback = value =>{ + cost_widget.value = '-'+api_cost[value] + } + const cost_widget = this.addWidget('text', 'cost_credit', '0', _=>{ + },{ + serialize:false, + }) + cost_widget.disabled = true + setTimeout(_=>{ + if(seed_control.name == 'control_before_generate' && seed_widget.value === 0){ + seed_widget.value = Math.floor(Math.random() * 4294967294) + } + cost_widget.value = '-'+api_cost[model_widget.value] + },100) + let user_div = $el('div.easyuse-account-user', [$t('Loading UserInfo...')]) + let account = this.addDOMWidget('account',"btn",$el('div.easyuse-account',user_div)); + // 更新balance信息 + api.addEventListener('stable-diffusion-api-generate-succeed', async ({detail}) => { + let remarkDiv = user_div.querySelectorAll('.remark') + if(remarkDiv && remarkDiv[0]){ + const credits = detail?.model ? api_cost[detail.model] : 0 + if(credits) { + let balance = accSub(parseFloat(remarkDiv[0].innerText.replace(/Credits: /g,'')),credits) + if(balance>0){ + remarkDiv[0].innerText = 'Credits: '+ balance.toString() + } + } + } + await sleep(10000) + const res = await api.fetchApi('/easyuse/stability/balance') + if(res.status == 200){ + const data = await res.json() + if(data?.balance){ + const credits = data.balance?.credits || 0 + if(remarkDiv && remarkDiv[0]){ + remarkDiv[0].innerText = 'Credits: ' + credits + } + } + } + }) + // 获取api_keys + const res = await api.fetchApi('/easyuse/stability/api_keys') + if (res.status == 200){ + let data = await res.json() + api_keys = data.keys + api_current = data.current + if (api_keys.length > 0 && api_current!==undefined){ + const api_key = api_keys[api_current]['key'] + const api_name = api_keys[api_current]['name'] + if(!api_key){ + user_div.replaceChildren( + $el('div.easyuse-account-user-info', { + onclick:_=>{ + new AccountDialog().show(user_div); + } + },[ + $el('div.user',[ + $el('div.avatar', '😀'), + $el('div.info', [ + $el('h5.name', api_name), + $el('h6.remark',$t('Click to set the APIKEY first')) + ]) + ]), + $el('div.edit', {textContent:$t('Edit')}) + ]) + ) + }else{ + // 获取账号信息 + const res = await api.fetchApi('/easyuse/stability/user_info') + if(res.status == 200){ + const data = await res.json() + if(data?.account && data?.balance){ + const avatar = data.account?.profile_picture || null + const email = data.account?.email || null + const credits = data.balance?.credits || 0 + user_div.replaceChildren( + $el('div.easyuse-account-user-info', { + onclick:_=>{ + new AccountDialog().show(user_div); + } + },[ + $el('div.user',[ + $el('div.avatar', avatar ? [$el('img',{src:avatar})] : '😀'), + $el('div.info', [ + $el('h5.name', email), + $el('h6.remark','Credits: '+ credits) + ]) + ]), + $el('div.edit', {textContent:$t('Edit')}) + ]) + ) + } + } + } + } + } + } + } + } +}) \ No newline at end of file diff --git a/ComfyUI-Easy-Use/web_version/v1/js/easy/easyContextMenu.js b/ComfyUI-Easy-Use/web_version/v1/js/easy/easyContextMenu.js new file mode 100644 index 0000000000000000000000000000000000000000..0abbdd684b3d0352d06161a4e4b98b3f68a0ea00 --- /dev/null +++ b/ComfyUI-Easy-Use/web_version/v1/js/easy/easyContextMenu.js @@ -0,0 +1,174 @@ +import {app} from "../../../../scripts/app.js"; +import {api} from "../../../../scripts/api.js"; +import {$el} from "../../../../scripts/ui.js"; +import {$t} from "../common/i18n.js"; +import {getExtension, spliceExtension} from '../common/utils.js' +import {toast} from "../common/toast.js"; + +const setting_id = "Comfy.EasyUse.MenuNestSub" +let enableMenuNestSub = false +let thumbnails = [] + +export function addMenuNestSubSetting(app) { + app.ui.settings.addSetting({ + id: setting_id, + name: $t("Enable ContextMenu Auto Nest Subdirectories (ComfyUI-Easy-Use)"), + type: "boolean", + defaultValue: enableMenuNestSub, + onChange(value) { + enableMenuNestSub = !!value; + }, + }); +} + +const getEnableMenuNestSub = _ => app.ui.settings.getSettingValue(setting_id, enableMenuNestSub) + + +const Loaders = ['easy fullLoader','easy a1111Loader','easy comfyLoader'] +app.registerExtension({ + name:"comfy.easyUse.contextMenu", + async setup(app){ + addMenuNestSubSetting(app) + // 获取所有模型图像 + const imgRes = await api.fetchApi(`/easyuse/models/thumbnail`) + if (imgRes.status === 200) { + let data = await imgRes.json(); + thumbnails = data + } + else if(getEnableMenuNestSub()){ + toast.error($t("Too many thumbnails, have closed the display")) + } + const existingContextMenu = LiteGraph.ContextMenu; + LiteGraph.ContextMenu = function(values,options){ + const threshold = 10; + const enabled = getEnableMenuNestSub(); + if(!enabled || (values?.length || 0) <= threshold || !(options?.callback) || values.some(i => typeof i !== 'string')){ + if(enabled){ + // console.log('Skipping context menu auto nesting for incompatible menu.'); + } + return existingContextMenu.apply(this,[...arguments]); + } + const compatValues = values; + const originalValues = [...compatValues]; + const folders = {}; + const specialOps = []; + const folderless = []; + for(const value of compatValues){ + const splitBy = value.indexOf('/') > -1 ? '/' : '\\'; + const valueSplit = value.split(splitBy); + if(valueSplit.length > 1){ + const key = valueSplit.shift(); + folders[key] = folders[key] || []; + folders[key].push(valueSplit.join(splitBy)); + }else if(value === 'CHOOSE' || value.startsWith('DISABLE ')){ + specialOps.push(value); + }else{ + folderless.push(value); + } + } + const foldersCount = Object.values(folders).length; + if(foldersCount > 0){ + const oldcallback = options.callback; + options.callback = null; + const newCallback = (item,options) => { + if(['None','无','無','なし'].includes(item.content)) oldcallback('None',options) + else oldcallback(originalValues.find(i => i.endsWith(item.content),options)); + }; + const addContent = (content, folderName='') => { + const name = folderName ? folderName + '\\' + spliceExtension(content) : spliceExtension(content); + const ext = getExtension(content) + const time = new Date().getTime() + let thumbnail = '' + if(['ckpt', 'pt', 'bin', 'pth', 'safetensors'].includes(ext)){ + for(let i=0;i { + let subs = [] + let less = [] + const b = folder.map(name=> { + const _folders = {}; + const splitBy = name.indexOf('/') > -1 ? '/' : '\\'; + const valueSplit = name.split(splitBy); + if(valueSplit.length > 1){ + const key = valueSplit.shift(); + _folders[key] = _folders[key] || []; + _folders[key].push(valueSplit.join(splitBy)); + } + const foldersCount = Object.values(folders).length; + if(foldersCount > 0){ + let key = Object.keys(_folders)[0] + if(key && _folders[key]) subs.push({key, value:_folders[key][0]}) + else{ + less.push(addContent(name,key)) + } + } + return addContent(name,folderName) + }) + if(subs.length>0){ + let subs_obj = {} + subs.forEach(item => { + subs_obj[item.key] = subs_obj[item.key] || [] + subs_obj[item.key].push(item.value) + }) + return [...Object.entries(subs_obj).map(f => { + return { + content: f[0], + has_submenu: true, + callback: () => {}, + submenu: { + options: add_sub_folder(f[1], f[0]), + } + } + }),...less] + } + else return b + } + + for(const [folderName,folder] of Object.entries(folders)){ + newValues.push({ + content:folderName, + has_submenu:true, + callback:() => {}, + submenu:{ + options:add_sub_folder(folder,folderName), + } + }); + } + newValues.push(...folderless.map(f => addContent(f, ''))); + if(specialOps.length > 0) newValues.push(...specialOps.map(f => addContent(f, ''))); + return existingContextMenu.call(this,newValues,options); + } + return existingContextMenu.apply(this,[...arguments]); + } + LiteGraph.ContextMenu.prototype = existingContextMenu.prototype; + }, + +}) + diff --git a/ComfyUI-Easy-Use/web_version/v1/js/easy/easyDynamicWidgets.js b/ComfyUI-Easy-Use/web_version/v1/js/easy/easyDynamicWidgets.js new file mode 100644 index 0000000000000000000000000000000000000000..7ab5a5af2ca5e0f5e67d5480bba63abab2075a05 --- /dev/null +++ b/ComfyUI-Easy-Use/web_version/v1/js/easy/easyDynamicWidgets.js @@ -0,0 +1,1274 @@ +import { app } from "../../../../scripts/app.js"; +import { api } from "../../../../scripts/api.js"; +import { ComfyWidgets } from "../../../../scripts/widgets.js"; +import { toast} from "../common/toast.js"; +import { $t } from '../common/i18n.js'; + +import { findWidgetByName, toggleWidget, updateNodeHeight} from "../common/utils.js"; + +const seedNodes = ["easy seed", "easy latentNoisy", "easy wildcards", "easy preSampling", "easy preSamplingAdvanced", "easy preSamplingNoiseIn", "easy preSamplingSdTurbo", "easy preSamplingCascade", "easy preSamplingDynamicCFG", "easy preSamplingLayerDiffusion", "easy fullkSampler", "easy fullCascadeKSampler"] +const loaderNodes = ["easy fullLoader", "easy a1111Loader", "easy comfyLoader", "easy fluxLoader", "easy hunyuanDiTLoader", "easy pixArtLoader"] + +function widgetLogic(node, widget) { + if (widget.name === 'lora_name') { + if (widget.value === "None") { + toggleWidget(node, findWidgetByName(node, 'lora_model_strength')) + toggleWidget(node, findWidgetByName(node, 'lora_clip_strength')) + } else { + toggleWidget(node, findWidgetByName(node, 'lora_model_strength'), true) + toggleWidget(node, findWidgetByName(node, 'lora_clip_strength'), true) + } + } + if (widget.name === 'rescale') { + let rescale_after_model = findWidgetByName(node, 'rescale_after_model').value + if (widget.value === 'by percentage' && rescale_after_model) { + toggleWidget(node, findWidgetByName(node, 'width')) + toggleWidget(node, findWidgetByName(node, 'height')) + toggleWidget(node, findWidgetByName(node, 'longer_side')) + toggleWidget(node, findWidgetByName(node, 'percent'), true) + } else if (widget.value === 'to Width/Height' && rescale_after_model) { + toggleWidget(node, findWidgetByName(node, 'width'), true) + toggleWidget(node, findWidgetByName(node, 'height'), true) + toggleWidget(node, findWidgetByName(node, 'percent')) + toggleWidget(node, findWidgetByName(node, 'longer_side')) + } else if (rescale_after_model) { + toggleWidget(node, findWidgetByName(node, 'longer_side'), true) + toggleWidget(node, findWidgetByName(node, 'width')) + toggleWidget(node, findWidgetByName(node, 'height')) + toggleWidget(node, findWidgetByName(node, 'percent')) + } + updateNodeHeight(node) + } + if (widget.name === 'upscale_method') { + if (widget.value === "None") { + toggleWidget(node, findWidgetByName(node, 'factor')) + toggleWidget(node, findWidgetByName(node, 'crop')) + } else { + toggleWidget(node, findWidgetByName(node, 'factor'), true) + toggleWidget(node, findWidgetByName(node, 'crop'), true) + } + updateNodeHeight(node) + } + if (widget.name === 'image_output') { + if (widget.value === 'Sender' || widget.value === 'Sender&Save'){ + toggleWidget(node, findWidgetByName(node, 'link_id'), true) + }else { + toggleWidget(node, findWidgetByName(node, 'link_id')) + } + if (widget.value === 'Hide' || widget.value === 'Preview' || widget.value == 'Preview&Choose' || widget.value === 'Sender') { + toggleWidget(node, findWidgetByName(node, 'save_prefix')) + toggleWidget(node, findWidgetByName(node, 'output_path')) + toggleWidget(node, findWidgetByName(node, 'embed_workflow')) + toggleWidget(node, findWidgetByName(node, 'number_padding')) + toggleWidget(node, findWidgetByName(node, 'overwrite_existing')) + } else if (widget.value === 'Save' || widget.value === 'Hide&Save' || widget.value === 'Sender&Save') { + toggleWidget(node, findWidgetByName(node, 'save_prefix'), true) + toggleWidget(node, findWidgetByName(node, 'output_path'), true) + toggleWidget(node, findWidgetByName(node, 'embed_workflow'), true) + toggleWidget(node, findWidgetByName(node, 'number_padding'), true) + toggleWidget(node, findWidgetByName(node, 'overwrite_existing'), true) + } + + if(widget.value === 'Hide' || widget.value === 'Hide&Save'){ + toggleWidget(node, findWidgetByName(node, 'decode_vae_name')) + }else{ + toggleWidget(node, findWidgetByName(node, 'decode_vae_name'), true) + } + } + if (widget.name === 'add_noise') { + let control_before_widget = findWidgetByName(node, 'control_before_generate') + let control_after_widget = findWidgetByName(node, 'control_after_generate') + if (widget.value === "disable") { + toggleWidget(node, findWidgetByName(node, 'seed')) + if(control_before_widget){ + control_before_widget.last_value = control_before_widget.value + control_before_widget.value = 'fixed' + toggleWidget(node, control_before_widget) + } + if(control_after_widget){ + control_after_widget.last_value = control_after_widget.value + control_after_widget.value = 'fixed' + toggleWidget(node, control_after_widget) + } + } else { + toggleWidget(node, findWidgetByName(node, 'seed'), true) + if(control_before_widget){ + if(control_before_widget?.last_value) control_before_widget.value = control_before_widget.last_value + toggleWidget(node, control_before_widget, true) + } + if(control_after_widget) { + if(control_after_widget?.last_value) control_after_widget.value = control_after_widget.last_value + toggleWidget(node, findWidgetByName(node, control_after_widget, true)) + } + } + updateNodeHeight(node) + } + if (widget.name === 'num_loras') { + let number_to_show = widget.value + 1 + for (let i = 0; i < number_to_show; i++) { + toggleWidget(node, findWidgetByName(node, 'lora_'+i+'_name'), true) + if (findWidgetByName(node, 'mode').value === "simple") { + toggleWidget(node, findWidgetByName(node, 'lora_'+i+'_strength'), true) + toggleWidget(node, findWidgetByName(node, 'lora_'+i+'_model_strength')) + toggleWidget(node, findWidgetByName(node, 'lora_'+i+'_clip_strength')) + } else { + toggleWidget(node, findWidgetByName(node, 'lora_'+i+'_strength')) + toggleWidget(node, findWidgetByName(node, 'lora_'+i+'_model_strength'), true) + toggleWidget(node, findWidgetByName(node, 'lora_'+i+'_clip_strength'), true) + } + } + for (let i = number_to_show; i < 21; i++) { + toggleWidget(node, findWidgetByName(node, 'lora_'+i+'_name')) + toggleWidget(node, findWidgetByName(node, 'lora_'+i+'_strength')) + toggleWidget(node, findWidgetByName(node, 'lora_'+i+'_model_strength')) + toggleWidget(node, findWidgetByName(node, 'lora_'+i+'_clip_strength')) + } + updateNodeHeight(node) + } + if (widget.name === 'num_controlnet') { + let number_to_show = widget.value + 1 + for (let i = 0; i < number_to_show; i++) { + toggleWidget(node, findWidgetByName(node, 'controlnet_'+i), true) + toggleWidget(node, findWidgetByName(node, 'controlnet_'+i+'_strength'), true) + toggleWidget(node, findWidgetByName(node, 'scale_soft_weight_'+i),true) + if (findWidgetByName(node, 'mode').value === "simple") { + toggleWidget(node, findWidgetByName(node, 'start_percent_'+i)) + toggleWidget(node, findWidgetByName(node, 'end_percent_'+i)) + } else { + toggleWidget(node, findWidgetByName(node, 'start_percent_'+i),true) + toggleWidget(node, findWidgetByName(node, 'end_percent_'+i), true) + } + } + for (let i = number_to_show; i < 10; i++) { + toggleWidget(node, findWidgetByName(node, 'controlnet_'+i)) + toggleWidget(node, findWidgetByName(node, 'controlnet_'+i+'_strength')) + toggleWidget(node, findWidgetByName(node, 'start_percent_'+i)) + toggleWidget(node, findWidgetByName(node, 'end_percent_'+i)) + toggleWidget(node, findWidgetByName(node, 'scale_soft_weight_'+i)) + } + updateNodeHeight(node) + } + + if (widget.name === 'mode') { + switch (node.comfyClass) { + case 'easy loraStack': + for (let i = 0; i < (findWidgetByName(node, 'num_loras').value + 1); i++) { + if (widget.value === "simple") { + toggleWidget(node, findWidgetByName(node, 'lora_'+i+'_strength'), true) + toggleWidget(node, findWidgetByName(node, 'lora_'+i+'_model_strength')) + toggleWidget(node, findWidgetByName(node, 'lora_'+i+'_clip_strength')) + } else { + toggleWidget(node, findWidgetByName(node, 'lora_'+i+'_strength')) + toggleWidget(node, findWidgetByName(node, 'lora_'+i+'_model_strength'), true) + toggleWidget(node, findWidgetByName(node, 'lora_'+i+'_clip_strength'), true)} + } + updateNodeHeight(node) + break + case 'easy controlnetStack': + for (let i = 0; i < (findWidgetByName(node, 'num_controlnet').value + 1); i++) { + if (widget.value === "simple") { + toggleWidget(node, findWidgetByName(node, 'start_percent_'+i)) + toggleWidget(node, findWidgetByName(node, 'end_percent_'+i)) + } else { + toggleWidget(node, findWidgetByName(node, 'start_percent_' + i), true) + toggleWidget(node, findWidgetByName(node, 'end_percent_' + i), true) + } + } + updateNodeHeight(node) + break + case 'easy icLightApply': + if (widget.value === "Foreground") { + toggleWidget(node, findWidgetByName(node, 'lighting'), true) + toggleWidget(node, findWidgetByName(node, 'remove_bg'), true) + toggleWidget(node, findWidgetByName(node, 'source')) + } else { + toggleWidget(node, findWidgetByName(node, 'lighting')) + toggleWidget(node, findWidgetByName(node, 'source'), true) + toggleWidget(node, findWidgetByName(node, 'remove_bg')) + } + updateNodeHeight(node) + break + } + } + + if (widget.name === 'resolution') { + if(widget.value === "自定义 x 自定义"){ + widget.value = 'width x height (custom)' + } + if (widget.value === "自定义 x 自定义" || widget.value === 'width x height (custom)') { + toggleWidget(node, findWidgetByName(node, 'empty_latent_width'), true) + toggleWidget(node, findWidgetByName(node, 'empty_latent_height'), true) + } else { + toggleWidget(node, findWidgetByName(node, 'empty_latent_width'), false) + toggleWidget(node, findWidgetByName(node, 'empty_latent_height'), false) + } + } + if (widget.name === 'ratio') { + if (widget.value === "custom") { + toggleWidget(node, findWidgetByName(node, 'empty_latent_width'), true) + toggleWidget(node, findWidgetByName(node, 'empty_latent_height'), true) + } else { + toggleWidget(node, findWidgetByName(node, 'empty_latent_width'), false) + toggleWidget(node, findWidgetByName(node, 'empty_latent_height'), false) + } + } + if (widget.name === 'downscale_mode') { + const widget_names = ['block_number', 'downscale_factor', 'start_percent', 'end_percent', 'downscale_after_skip', 'downscale_method', 'upscale_method'] + if (widget.value === "None") widget_names.map(name=> toggleWidget(node, findWidgetByName(node, name))) + else if(widget.value == 'Auto') widget_names.map(name =>toggleWidget(node, findWidgetByName(node, name),name == 'block_number' ? true : false)) + else widget_names.map(name=> toggleWidget(node, findWidgetByName(node, name), true)) + updateNodeHeight(node) + } + + if (widget.name == 'range_mode'){ + if(widget.value == 'step'){ + toggleWidget(node, findWidgetByName(node, 'step'), true) + toggleWidget(node, findWidgetByName(node, 'num_steps')) + }else if(widget.value == 'num_steps'){ + toggleWidget(node, findWidgetByName(node, 'step')) + toggleWidget(node, findWidgetByName(node, 'num_steps'), true) + } + updateNodeHeight(node) + } + + if (widget.name === 'toggle') { + widget.type = 'toggle' + widget.options = {on: 'Enabled', off: 'Disabled'} + } + + if(widget.name == 'text_combine_mode'){ + if(widget.value == 'replace'){ + toggleWidget(node, findWidgetByName(node, 'replace_text'), true) + }else{ + toggleWidget(node, findWidgetByName(node, 'replace_text')) + } + updateNodeHeight(node) + } + + if (widget.name === 'conditioning_mode') { + if (["replace", "concat", "combine"].includes(widget.value)) { + toggleWidget(node, findWidgetByName(node, 'average_strength')) + toggleWidget(node, findWidgetByName(node, 'old_cond_start')) + toggleWidget(node, findWidgetByName(node, 'old_cond_end')) + toggleWidget(node, findWidgetByName(node, 'new_cond_start')) + toggleWidget(node, findWidgetByName(node, 'new_cond_end')) + } else if(widget.value == 'average'){ + toggleWidget(node, findWidgetByName(node, 'average_strength'), true) + toggleWidget(node, findWidgetByName(node, 'old_cond_start')) + toggleWidget(node, findWidgetByName(node, 'old_cond_end')) + toggleWidget(node, findWidgetByName(node, 'new_cond_start')) + toggleWidget(node, findWidgetByName(node, 'new_cond_end')) + }else if(widget.value == 'timestep'){ + toggleWidget(node, findWidgetByName(node, 'average_strength')) + toggleWidget(node, findWidgetByName(node, 'old_cond_start'), true) + toggleWidget(node, findWidgetByName(node, 'old_cond_end'), true) + toggleWidget(node, findWidgetByName(node, 'new_cond_start'), true) + toggleWidget(node, findWidgetByName(node, 'new_cond_end'), true) + } + } + + if (widget.name === 'preset') { + const normol_presets = [ + 'LIGHT - SD1.5 only (low strength)', + 'STANDARD (medium strength)', + 'VIT-G (medium strength)', + 'PLUS (high strength)', 'PLUS FACE (portraits)', + 'FULL FACE - SD1.5 only (portraits stronger)', + ] + const faceid_presets = [ + 'FACEID', + 'FACEID PLUS - SD1.5 only', + 'FACEID PLUS KOLORS', + 'FACEID PLUS V2', + 'FACEID PORTRAIT (style transfer)', + 'FACEID PORTRAIT UNNORM - SDXL only (strong)' + ] + if(normol_presets.includes(widget.value)){ + toggleWidget(node, findWidgetByName(node, 'lora_strength')) + toggleWidget(node, findWidgetByName(node, 'provider')) + toggleWidget(node, findWidgetByName(node, 'weight_faceidv2')) + toggleWidget(node, findWidgetByName(node, 'weight_kolors')) + toggleWidget(node, findWidgetByName(node, 'use_tiled'), true) + let use_tiled = findWidgetByName(node, 'use_tiled') + if(use_tiled && use_tiled.value){ + toggleWidget(node, findWidgetByName(node, 'sharpening'), true) + }else { + toggleWidget(node, findWidgetByName(node, 'sharpening')) + } + + } + else if(faceid_presets.includes(widget.value)){ + toggleWidget(node, findWidgetByName(node, 'weight_faceidv2'), ['FACEID PLUS V2','FACEID PLUS KOLORS'].includes(widget.value) ? true : false); + toggleWidget(node, findWidgetByName(node, 'weight_kolors'), ['FACEID PLUS KOLORS'].includes(widget.value) ? true : false); + if(['FACEID PLUS KOLORS','FACEID PORTRAIT (style transfer)','FACEID PORTRAIT UNNORM - SDXL only (strong)'].includes(widget.value)){ + toggleWidget(node, findWidgetByName(node, 'lora_strength'), false) + } + else{ + toggleWidget(node, findWidgetByName(node, 'lora_strength'), true) + } + toggleWidget(node, findWidgetByName(node, 'provider'), true) + toggleWidget(node, findWidgetByName(node, 'use_tiled')) + toggleWidget(node, findWidgetByName(node, 'sharpening')) + } + updateNodeHeight(node) + } + + if (widget.name === 'use_tiled') { + if(widget.value) + toggleWidget(node, findWidgetByName(node, 'sharpening'), true) + else + toggleWidget(node, findWidgetByName(node, 'sharpening')) + updateNodeHeight(node) + } + + if (widget.name === 'num_embeds') { + let number_to_show = widget.value + 1 + for (let i = 0; i < number_to_show; i++) { + toggleWidget(node, findWidgetByName(node, 'weight'+i), true) + } + for (let i = number_to_show; i < 6; i++) { + toggleWidget(node, findWidgetByName(node, 'weight'+i)) + } + updateNodeHeight(node) + } + + if (widget.name === 'guider'){ + switch (widget.value){ + case 'Basic': + toggleWidget(node, findWidgetByName(node, 'cfg')) + toggleWidget(node, findWidgetByName(node, 'cfg_negative')) + break + case 'CFG': + toggleWidget(node, findWidgetByName(node, 'cfg'),true) + toggleWidget(node, findWidgetByName(node, 'cfg_negative')) + break + case 'IP2P+DualCFG': + case 'DualCFG': + toggleWidget(node, findWidgetByName(node, 'cfg'),true) + toggleWidget(node, findWidgetByName(node, 'cfg_negative'), true) + break + + } + updateNodeHeight(node) + } + + if (widget.name === 'scheduler'){ + if (['karrasADV','exponentialADV','polyExponential'].includes(widget.value)){ + toggleWidget(node, findWidgetByName(node, 'sigma_max'), true) + toggleWidget(node, findWidgetByName(node, 'sigma_min'), true) + toggleWidget(node, findWidgetByName(node, 'denoise')) + toggleWidget(node, findWidgetByName(node, 'beta_d')) + toggleWidget(node, findWidgetByName(node, 'beta_min')) + toggleWidget(node, findWidgetByName(node, 'eps_s')) + toggleWidget(node, findWidgetByName(node, 'coeff')) + if(widget.value != 'exponentialADV'){ + toggleWidget(node, findWidgetByName(node, 'rho'), true) + }else{ + toggleWidget(node, findWidgetByName(node, 'rho')) + } + }else if(widget.value == 'vp'){ + toggleWidget(node, findWidgetByName(node, 'sigma_max')) + toggleWidget(node, findWidgetByName(node, 'sigma_min')) + toggleWidget(node, findWidgetByName(node, 'denoise')) + toggleWidget(node, findWidgetByName(node, 'rho')) + toggleWidget(node, findWidgetByName(node, 'beta_d'),true) + toggleWidget(node, findWidgetByName(node, 'beta_min'),true) + toggleWidget(node, findWidgetByName(node, 'eps_s'),true) + toggleWidget(node, findWidgetByName(node, 'coeff')) + } + else{ + toggleWidget(node, findWidgetByName(node, 'denoise'),true) + toggleWidget(node, findWidgetByName(node, 'sigma_max')) + toggleWidget(node, findWidgetByName(node, 'sigma_min')) + toggleWidget(node, findWidgetByName(node, 'beta_d')) + toggleWidget(node, findWidgetByName(node, 'beta_min')) + toggleWidget(node, findWidgetByName(node, 'eps_s')) + toggleWidget(node, findWidgetByName(node, 'rho')) + if(widget.value == 'gits') toggleWidget(node, findWidgetByName(node, 'coeff'), true) + else toggleWidget(node, findWidgetByName(node, 'coeff')) + } + updateNodeHeight(node) + } + + if(widget.name === 'inpaint_mode'){ + switch (widget.value){ + case 'normal': + case 'fooocus_inpaint': + toggleWidget(node, findWidgetByName(node, 'dtype')) + toggleWidget(node, findWidgetByName(node, 'fitting')) + toggleWidget(node, findWidgetByName(node, 'function')) + toggleWidget(node, findWidgetByName(node, 'scale')) + toggleWidget(node, findWidgetByName(node, 'start_at')) + toggleWidget(node, findWidgetByName(node, 'end_at')) + break + case 'brushnet_random': + case 'brushnet_segmentation': + toggleWidget(node, findWidgetByName(node, 'dtype'), true) + toggleWidget(node, findWidgetByName(node, 'fitting')) + toggleWidget(node, findWidgetByName(node, 'function')) + toggleWidget(node, findWidgetByName(node, 'scale'), true) + toggleWidget(node, findWidgetByName(node, 'start_at'), true) + toggleWidget(node, findWidgetByName(node, 'end_at'), true) + break + case 'powerpaint': + toggleWidget(node, findWidgetByName(node, 'dtype'), true) + toggleWidget(node, findWidgetByName(node, 'fitting'),true) + toggleWidget(node, findWidgetByName(node, 'function'),true) + toggleWidget(node, findWidgetByName(node, 'scale'), true) + toggleWidget(node, findWidgetByName(node, 'start_at'), true) + toggleWidget(node, findWidgetByName(node, 'end_at'), true) + break + } + updateNodeHeight(node) + } + + if(widget.name == 't5_type'){ + switch (widget.value){ + case 'sd3': + toggleWidget(node, findWidgetByName(node, 'clip_name'), true) + toggleWidget(node, findWidgetByName(node, 'padding'), true) + toggleWidget(node, findWidgetByName(node, 't5_name')) + toggleWidget(node, findWidgetByName(node, 'device')) + toggleWidget(node, findWidgetByName(node, 'dtype')) + break + case 't5v11': + toggleWidget(node, findWidgetByName(node, 'clip_name')) + toggleWidget(node, findWidgetByName(node, 'padding')) + toggleWidget(node, findWidgetByName(node, 't5_name'),true) + toggleWidget(node, findWidgetByName(node, 'device'),true) + toggleWidget(node, findWidgetByName(node, 'dtype'),true) + } + updateNodeHeight(node) + } + + if(widget.name == 'rem_mode'){ + switch (widget.value){ + case 'Inspyrenet': + toggleWidget(node, findWidgetByName(node, 'torchscript_jit'), true) + break + default: + toggleWidget(node, findWidgetByName(node, 'torchscript_jit'), false) + break + } + } +} + +function widgetLogic2(node, widget) { + if (widget.name === 'sampler_name') { + const widget_names = ['eta','s_noise','upscale_ratio','start_step','end_step','upscale_n_step','unsharp_kernel_size','unsharp_sigma','unsharp_strength'] + if (["euler_ancestral", "dpmpp_2s_ancestral", "dpmpp_2m_sde", "lcm"].includes(widget.value)) { + widget_names.map(name=> toggleWidget(node, findWidgetByName(node, name)), true) + } else { + widget_names.map(name=> toggleWidget(node, findWidgetByName(node, name))) + } + updateNodeHeight(node) + } +} + +function widgetLogic3(node, widget){ + if (widget.name === 'target_parameter') { + if (node.comfyClass == 'easy XYInputs: Steps'){ + switch (widget.value){ + case "steps": + toggleWidget(node, findWidgetByName(node, 'first_step'), true) + toggleWidget(node, findWidgetByName(node, 'last_step'), true) + toggleWidget(node, findWidgetByName(node, 'first_start_step')) + toggleWidget(node, findWidgetByName(node, 'last_start_step')) + toggleWidget(node, findWidgetByName(node, 'first_end_step')) + toggleWidget(node, findWidgetByName(node, 'last_end_step')) + break + case "start_at_step": + toggleWidget(node, findWidgetByName(node, 'first_step')) + toggleWidget(node, findWidgetByName(node, 'last_step')) + toggleWidget(node, findWidgetByName(node, 'first_start_step'), true) + toggleWidget(node, findWidgetByName(node, 'last_start_step'), true) + toggleWidget(node, findWidgetByName(node, 'first_end_step')) + toggleWidget(node, findWidgetByName(node, 'last_end_step')) + break + case "end_at_step": + toggleWidget(node, findWidgetByName(node, 'first_step')) + toggleWidget(node, findWidgetByName(node, 'last_step')) + toggleWidget(node, findWidgetByName(node, 'first_start_step')) + toggleWidget(node, findWidgetByName(node, 'last_start_step')) + toggleWidget(node, findWidgetByName(node, 'first_end_step'),true) + toggleWidget(node, findWidgetByName(node, 'last_end_step'),true) + break + } + } + if (node.comfyClass == 'easy XYInputs: Sampler/Scheduler'){ + let number_to_show = findWidgetByName(node, 'input_count').value + 1 + for (let i = 0; i < number_to_show; i++) { + switch (widget.value) { + case "sampler": + toggleWidget(node, findWidgetByName(node, 'sampler_'+i), true) + toggleWidget(node, findWidgetByName(node, 'scheduler_'+i)) + break + case "scheduler": + toggleWidget(node, findWidgetByName(node, 'scheduler_'+i), true) + toggleWidget(node, findWidgetByName(node, 'sampler_'+i)) + break + default: + toggleWidget(node, findWidgetByName(node, 'sampler_'+i), true) + toggleWidget(node, findWidgetByName(node, 'scheduler_'+i), true) + break + } + } + updateNodeHeight(node) + } + if (node.comfyClass == 'easy XYInputs: ControlNet'){ + switch (widget.value){ + case "strength": + toggleWidget(node, findWidgetByName(node, 'first_strength'), true) + toggleWidget(node, findWidgetByName(node, 'last_strength'), true) + toggleWidget(node, findWidgetByName(node, 'strength')) + toggleWidget(node, findWidgetByName(node, 'start_percent'), true) + toggleWidget(node, findWidgetByName(node, 'end_percent'), true) + toggleWidget(node, findWidgetByName(node, 'first_start_percent')) + toggleWidget(node, findWidgetByName(node, 'last_start_percent')) + toggleWidget(node, findWidgetByName(node, 'first_end_percent')) + toggleWidget(node, findWidgetByName(node, 'last_end_percent')) + break + case "start_percent": + toggleWidget(node, findWidgetByName(node, 'first_strength')) + toggleWidget(node, findWidgetByName(node, 'last_strength')) + toggleWidget(node, findWidgetByName(node, 'strength'), true) + toggleWidget(node, findWidgetByName(node, 'start_percent')) + toggleWidget(node, findWidgetByName(node, 'end_percent'), true) + toggleWidget(node, findWidgetByName(node, 'first_start_percent'), true) + toggleWidget(node, findWidgetByName(node, 'last_start_percent'), true) + toggleWidget(node, findWidgetByName(node, 'first_end_percent')) + toggleWidget(node, findWidgetByName(node, 'last_end_percent')) + break + case "end_percent": + toggleWidget(node, findWidgetByName(node, 'first_strength')) + toggleWidget(node, findWidgetByName(node, 'last_strength')) + toggleWidget(node, findWidgetByName(node, 'strength'), true) + toggleWidget(node, findWidgetByName(node, 'start_percent'), true) + toggleWidget(node, findWidgetByName(node, 'end_percent')) + toggleWidget(node, findWidgetByName(node, 'first_start_percent')) + toggleWidget(node, findWidgetByName(node, 'last_start_percent')) + toggleWidget(node, findWidgetByName(node, 'first_end_percent'), true) + toggleWidget(node, findWidgetByName(node, 'last_end_percent'), true) + break + } + updateNodeHeight(node) + } + + } + if (node.comfyClass == 'easy XYInputs: PromptSR'){ + let number_to_show = findWidgetByName(node, 'replace_count').value + 1 + for (let i = 0; i < number_to_show; i++) { + toggleWidget(node, findWidgetByName(node, 'replace_'+i), true) + } + for (let i = number_to_show; i < 31; i++) { + toggleWidget(node, findWidgetByName(node, 'replace_'+i)) + } + updateNodeHeight(node) + } + + if(widget.name == 'input_count'){ + let number_to_show = widget.value + 1 + for (let i = 0; i < number_to_show; i++) { + if (findWidgetByName(node, 'target_parameter').value === "sampler") { + toggleWidget(node, findWidgetByName(node, 'sampler_'+i), true) + toggleWidget(node, findWidgetByName(node, 'scheduler_'+i)) + } + else if (findWidgetByName(node, 'target_parameter').value === "scheduler") { + toggleWidget(node, findWidgetByName(node, 'scheduler_'+i), true) + toggleWidget(node, findWidgetByName(node, 'sampler_'+i)) + } else { + toggleWidget(node, findWidgetByName(node, 'sampler_'+i), true) + toggleWidget(node, findWidgetByName(node, 'scheduler_'+i), true) + } + } + for (let i = number_to_show; i < 31; i++) { + toggleWidget(node, findWidgetByName(node, 'sampler_'+i)) + toggleWidget(node, findWidgetByName(node, 'scheduler_'+i)) + } + updateNodeHeight(node) + } + if (widget.name === 'lora_count') { + let number_to_show = widget.value + 1 + const isWeight = findWidgetByName(node, 'input_mode').value.indexOf("Weights") == -1 + for (let i = 0; i < number_to_show; i++) { + toggleWidget(node, findWidgetByName(node, 'lora_name_'+i), true) + if (isWeight) { + toggleWidget(node, findWidgetByName(node, 'lora_name_'+i), true) + toggleWidget(node, findWidgetByName(node, 'model_str_'+i)) + toggleWidget(node, findWidgetByName(node, 'clip_str_'+i)) + } else { + toggleWidget(node, findWidgetByName(node, 'lora_name_'+i), true) + toggleWidget(node, findWidgetByName(node, 'model_str_'+i),true) + toggleWidget(node, findWidgetByName(node, 'clip_str_'+i), true) + } + } + for (let i = number_to_show; i < 11; i++) { + toggleWidget(node, findWidgetByName(node, 'lora_name_'+i)) + toggleWidget(node, findWidgetByName(node, 'model_str_'+i)) + toggleWidget(node, findWidgetByName(node, 'clip_str_'+i)) + } + updateNodeHeight(node) + } + if (widget.name === 'ckpt_count') { + let number_to_show = widget.value + 1 + const hasClipSkip = findWidgetByName(node, 'input_mode').value.indexOf("ClipSkip") != -1 + const hasVae = findWidgetByName(node, 'input_mode').value.indexOf("VAE") != -1 + for (let i = 0; i < number_to_show; i++) { + toggleWidget(node, findWidgetByName(node, 'ckpt_name_'+i), true) + if (hasClipSkip && hasVae) { + toggleWidget(node, findWidgetByName(node, 'clip_skip_'+i), true) + toggleWidget(node, findWidgetByName(node, 'vae_name_'+i), true) + } else if (hasVae){ + toggleWidget(node, findWidgetByName(node, 'clip_skip_' + i)) + toggleWidget(node, findWidgetByName(node, 'vae_name_' + i), true) + }else{ + toggleWidget(node, findWidgetByName(node, 'clip_skip_' + i)) + toggleWidget(node, findWidgetByName(node, 'vae_name_' + i)) + } + } + for (let i = number_to_show; i < 11; i++) { + toggleWidget(node, findWidgetByName(node, 'ckpt_name_'+i)) + toggleWidget(node, findWidgetByName(node, 'clip_skip_'+i)) + toggleWidget(node, findWidgetByName(node, 'vae_name_'+i)) + } + updateNodeHeight(node) + } + + if (widget.name === 'input_mode') { + if(node.comfyClass == 'easy XYInputs: Lora'){ + let number_to_show = findWidgetByName(node, 'lora_count').value + 1 + const hasWeight = widget.value.indexOf("Weights") != -1 + for (let i = 0; i < number_to_show; i++) { + toggleWidget(node, findWidgetByName(node, 'lora_name_'+i), true) + if (hasWeight) { + toggleWidget(node, findWidgetByName(node, 'model_str_'+i), true) + toggleWidget(node, findWidgetByName(node, 'clip_str_'+i), true) + } else { + toggleWidget(node, findWidgetByName(node, 'model_str_' + i)) + toggleWidget(node, findWidgetByName(node, 'clip_str_' + i)) + } + } + if(hasWeight){ + toggleWidget(node, findWidgetByName(node, 'model_strength')) + toggleWidget(node, findWidgetByName(node, 'clip_strength')) + }else{ + toggleWidget(node, findWidgetByName(node, 'model_strength'), true) + toggleWidget(node, findWidgetByName(node, 'clip_strength'),true) + } + } + else if(node.comfyClass == 'easy XYInputs: Checkpoint'){ + let number_to_show = findWidgetByName(node, 'ckpt_count').value + 1 + const hasClipSkip = widget.value.indexOf("ClipSkip") != -1 + const hasVae = widget.value.indexOf("VAE") != -1 + for (let i = 0; i < number_to_show; i++) { + toggleWidget(node, findWidgetByName(node, 'ckpt_name_'+i), true) + if (hasClipSkip && hasVae) { + toggleWidget(node, findWidgetByName(node, 'clip_skip_'+i), true) + toggleWidget(node, findWidgetByName(node, 'vae_name_'+i), true) + } else if (hasClipSkip){ + toggleWidget(node, findWidgetByName(node, 'clip_skip_' + i), true) + toggleWidget(node, findWidgetByName(node, 'vae_name_' + i)) + }else{ + toggleWidget(node, findWidgetByName(node, 'clip_skip_' + i)) + toggleWidget(node, findWidgetByName(node, 'vae_name_' + i)) + } + } + } + + updateNodeHeight(node) + } + + // if(widget.name == 'replace_count'){ + // let number_to_show = widget.value + 1 + // for (let i = 0; i < number_to_show; i++) { + // toggleWidget(node, findWidgetByName(node, 'replace_'+i), true) + // } + // for (let i = number_to_show; i < 31; i++) { + // toggleWidget(node, findWidgetByName(node, 'replace_'+i)) + // } + // updateNodeHeight(node) + // } +} + +app.registerExtension({ + name: "comfy.easyUse.dynamicWidgets", + + nodeCreated(node) { + switch (node.comfyClass){ + case "easy fullLoader": + case "easy a1111Loader": + case "easy fluxLoader": + case "easy comfyLoader": + case "easy cascadeLoader": + case "easy svdLoader": + case "easy dynamiCrafterLoader": + case "easy hunyuanDiTLoader": + case "easy pixArtLoader": + case "easy kolorsLoader": + case "easy loraStack": + case "easy controlnetStack": + case "easy latentNoisy": + case "easy preSampling": + case "easy preSamplingAdvanced": + case "easy preSamplingNoiseIn": + case "easy preSamplingCustom": + case "easy preSamplingSdTurbo": + case "easy preSamplingCascade": + case "easy preSamplingLayerDiffusion": + case "easy fullkSampler": + case "easy kSampler": + case "easy kSamplerSDTurbo": + case "easy kSamplerTiled": + case "easy kSamplerLayerDiffusion": + case "easy kSamplerInpainting": + case "easy kSamplerDownscaleUnet": + case "easy fullCascadeKSampler": + case "easy cascadeKSampler": + case "easy hiresFix": + case "easy detailerFix": + case "easy imageRemBg": + case "easy imageColorMatch": + case "easy imageDetailTransfer": + case "easy loadImageBase64": + case "easy XYInputs: Steps": + case "easy XYInputs: Sampler/Scheduler": + case 'easy XYInputs: Checkpoint': + case "easy XYInputs: Lora": + case "easy XYInputs: PromptSR": + case "easy XYInputs: ControlNet": + case "easy rangeInt": + case "easy rangeFloat": + case 'easy latentCompositeMaskedWithCond': + case 'easy pipeEdit': + case 'easy icLightApply': + case 'easy ipadapterApply': + case 'easy ipadapterApplyADV': + case 'easy ipadapterApplyFaceIDKolors': + case 'easy ipadapterApplyEncoder': + case 'easy applyInpaint': + getSetters(node) + break + case "easy wildcards": + const wildcard_text_widget_index = node.widgets.findIndex((w) => w.name == 'text'); + const wildcard_text_widget = node.widgets[wildcard_text_widget_index]; + + // lora selector, wildcard selector + let combo_id = 1; + + Object.defineProperty(node.widgets[combo_id], "value", { + set: (value) => { + const stackTrace = new Error().stack; + if(stackTrace.includes('inner_value_change')) { + if(value != "Select the LoRA to add to the text") { + let lora_name = value; + if (lora_name.endsWith('.safetensors')) { + lora_name = lora_name.slice(0, -12); + } + + wildcard_text_widget.value += ``; + } + } + }, + get: () => { return "Select the LoRA to add to the text"; } + }); + + Object.defineProperty(node.widgets[combo_id+1], "value", { + set: (value) => { + const stackTrace = new Error().stack; + if(stackTrace.includes('inner_value_change')) { + if(value != "Select the Wildcard to add to the text") { + if(wildcard_text_widget.value != '') + wildcard_text_widget.value += ', ' + + wildcard_text_widget.value += value; + } + } + }, + get: () => { return "Select the Wildcard to add to the text"; } + }); + + // Preventing validation errors from occurring in any situation. + node.widgets[combo_id].serializeValue = () => { return "Select the LoRA to add to the text"; } + node.widgets[combo_id+1].serializeValue = () => { return "Select the Wildcard to add to the text"; } + break + case "easy detailerFix": + const textarea_widget_index = node.widgets.findIndex((w) => w.type === "customtext"); + if(textarea_widget_index == -1) return + node.widgets[textarea_widget_index].dynamicPrompts = false + node.widgets[textarea_widget_index].inputEl.placeholder = "wildcard spec: if kept empty, this option will be ignored"; + node.widgets[textarea_widget_index].serializeValue = () => {return node.widgets[textarea_widget_index].value}; + break + case "easy XYInputs: ModelMergeBlocks": + let preset_i = 3; + let vector_i = 4; + let file_i = 5; + node._value = "Preset"; + + let valuesWidget = node.widgets[vector_i] + Object.defineProperty(node.widgets[preset_i], "value", { + set: (value) => { + const stackTrace = new Error().stack; + if(stackTrace.includes('inner_value_change')) { + if(value != "Preset") { + if(!value.startsWith('@') && valuesWidget.value) + valuesWidget.value += "\n"; + if(value.startsWith('@')) { + let spec = value.split(':')[1]; + var n; + var sub_n = null; + var block = null; + + if(isNaN(spec)) { + let sub_spec = spec.split(','); + + if(sub_spec.length != 3) { + valuesWidget = '!! SPEC ERROR !!'; + node._value = ''; + return; + } + + n = parseInt(sub_spec[0].trim()); + sub_n = parseInt(sub_spec[1].trim()); + block = parseInt(sub_spec[2].trim()); + } + else { + n = parseInt(spec.trim()); + } + + valuesWidget.value = ""; + if(sub_n == null) { + for(let i=1; i<=n; i++) { + var temp = "1,1"; + for(let j=1; j<=n; j++) { + if(temp!='') + temp += ','; + if(j==i) + temp += '1'; + else + temp += '0'; + } + temp += ',1; '; + + valuesWidget.value += `B${i}:${temp}\n`; + } + } + else { + for(let i=1; i<=sub_n; i++) { + var temp = ""; + for(let j=1; j<=n; j++) { + if(temp!='') + temp += ','; + + if(block!=j) + temp += '0'; + else { + temp += ' '; + for(let k=1; k<=sub_n; k++) { + if(k==i) + temp += '1 '; + else + temp += '0 '; + } + } + } + + valuesWidget.value += `B${block}.SUB${i}:${temp}\n`; + } + } + } + else { + valuesWidget.value += `${value}; `; + } + // if(node.widgets_values) { + // valuesWidget.value = node.widgets[preset_i].value+ `; `; + // } + } + } + + node._value = value; + }, + get: () => { + return node._value; + } + }); + + const cb = node.callback; + valuesWidget.callback = function () { + if (cb) { + return cb.apply(this, arguments); + } + }; + + // upload .csv + async function uploadFile(file) { + try { + const body = new FormData(); + body.append("csv", file); + const resp = await api.fetchApi("/easyuse/upload/csv", { + method: "POST", + body, + }); + + if (resp.status === 200) { + const data = await resp.json(); + node.widgets[vector_i].value = data + } else { + alert(resp.status + " - " + resp.statusText); + } + } catch (error) { + alert(error); + } + } + + const fileInput = document.createElement("input"); + Object.assign(fileInput, { + type: "file", + accept: "text/csv", + style: "show: none", + onchange: async (event) => { + if (fileInput.files.length) { + await uploadFile(fileInput.files[0], true); + event.target.value = '' + } + }, + }); + document.body.append(fileInput); + + const name = "choose .csv file into values" + let uploadWidget = node.addWidget("button", name, "csv", () => { + fileInput.click(); + }); + uploadWidget.label = name; + uploadWidget.serialize = false; + + break + } + + }, + async beforeRegisterNodeDef(nodeType, nodeData, app) { + function addText(arr_text) { + var text = ''; + for (let i = 0; i < arr_text.length; i++) { + text += arr_text[i]; + } + return text + } + + if (["easy showSpentTime"].includes(nodeData.name)) { + function populate(text) { + if (this.widgets) { + const pos = this.widgets.findIndex((w) => w.name === "spent_time"); + if (pos !== -1 && this.widgets[pos]) { + const w = this.widgets[pos] + console.log(text) + w.value = text; + } + } + } + + // When the node is executed we will be sent the input text, show this in the widget + const onExecuted = nodeType.prototype.onExecuted; + nodeType.prototype.onExecuted = function (message) { + onExecuted?.apply(this, arguments); + const text = addText(message.text) + populate.call(this, text); + }; + } + + if (["easy showLoaderSettingsNames"].includes(nodeData.name)) { + function populate(text) { + if (this.widgets) { + const pos = this.widgets.findIndex((w) => w.name === "names"); + if (pos !== -1 && this.widgets[pos]) { + const w = this.widgets[pos] + w.value = text; + } + } + } + + // When the node is executed we will be sent the input text, show this in the widget + const onExecuted = nodeType.prototype.onExecuted; + nodeType.prototype.onExecuted = function (message) { + onExecuted?.apply(this, arguments); + const text = addText(message.text) + populate.call(this, text); + }; + } + + if (loaderNodes.includes(nodeData.name)) { + function populate(text, type = 'positive') { + if (this.widgets) { + const pos = this.widgets.findIndex((w) => w.name === type + "_prompt"); + const className = "comfy-multiline-input wildcard_" + type + '_' + this.id.toString() + if (pos == -1 && text) { + const inputEl = document.createElement("textarea"); + inputEl.className = className; + inputEl.placeholder = "Wildcard Prompt (" + type + ")" + const widget = this.addDOMWidget(type + "_prompt", "customtext", inputEl, { + getValue() { + return inputEl.value; + }, + setValue(v) { + inputEl.value = v; + }, + serialize: false, + }); + widget.inputEl = inputEl; + widget.inputEl.readOnly = true + inputEl.addEventListener("input", () => { + widget.callback?.(widget.value); + }); + widget.value = text; + } else if (this.widgets[pos]) { + if (text) { + const w = this.widgets[pos] + w.value = text; + } else { + this.widgets.splice(pos, 1); + const element = document.getElementsByClassName(className) + if (element && element[0]) element[0].remove() + } + } + } + } + + const onExecuted = nodeType.prototype.onExecuted; + nodeType.prototype.onExecuted = function (message) { + onExecuted?.apply(this, arguments); + const positive = addText(message.positive) + const negative = addText(message.negative) + populate.call(this, positive, "positive"); + populate.call(this, negative, "negative"); + }; + } + + if(["easy sv3dLoader"].includes(nodeData.name)){ + function changeSchedulerText(mode, batch_size, inputEl) { + console.log(mode) + switch (mode){ + case 'azimuth': + inputEl.readOnly = true + inputEl.style.opacity = 0.6 + return `0:(0.0,0.0)` + (batch_size > 1 ? `\n${batch_size-1}:(360.0,0.0)` : '') + case 'elevation': + inputEl.readOnly = true + inputEl.style.opacity = 0.6 + return `0:(-90.0,0.0)` + (batch_size > 1 ? `\n${batch_size-1}:(90.0,0.0)` : '') + case 'custom': + inputEl.readOnly = false + inputEl.style.opacity = 1 + return `0:(0.0,0.0)\n9:(180.0,0.0)\n20:(360.0,0.0)` + } + } + + + const onNodeCreated = nodeType.prototype.onNodeCreated; + nodeType.prototype.onNodeCreated = async function () { + onNodeCreated ? onNodeCreated.apply(this, []) : undefined; + const easing_mode_widget = this.widgets.find(w => w.name == 'easing_mode') + const batch_size = this.widgets.find(w => w.name == 'batch_size') + const scheduler = this.widgets.find(w => w.name == 'scheduler') + setTimeout(_=>{ + if(!scheduler.value) scheduler.value = changeSchedulerText(easing_mode_widget.value, batch_size.value, scheduler.inputEl) + },1) + easing_mode_widget.callback = value=>{ + scheduler.value = changeSchedulerText(value, batch_size.value, scheduler.inputEl) + } + batch_size.callback = value =>{ + scheduler.value = changeSchedulerText(easing_mode_widget.value, value, scheduler.inputEl) + } + } + } + + if (seedNodes.includes(nodeData.name)) { + const onNodeCreated = nodeType.prototype.onNodeCreated; + nodeType.prototype.onNodeCreated = async function () { + onNodeCreated ? onNodeCreated.apply(this, []) : undefined; + const seed_widget = this.widgets.find(w => ['seed_num','seed'].includes(w.name)) + const seed_control = this.widgets.find(w=> ['control_before_generate','control_after_generate'].includes(w.name)) + if(nodeData.name == 'easy seed'){ + const randomSeedButton = this.addWidget("button", "🎲 Manual Random Seed", null, _=>{ + if(seed_control.value != 'fixed') seed_control.value = 'fixed' + seed_widget.value = Math.floor(Math.random() * 1125899906842624) + app.queuePrompt(0, 1) + },{ serialize:false}) + seed_widget.linkedWidgets = [randomSeedButton, seed_control]; + } + } + const onAdded = nodeType.prototype.onAdded; + nodeType.prototype.onAdded = async function () { + onAdded ? onAdded.apply(this, []) : undefined; + const seed_widget = this.widgets.find(w => ['seed_num','seed'].includes(w.name)) + const seed_control = this.widgets.find(w=> ['control_before_generate','control_after_generate'].includes(w.name)) + setTimeout(_=>{ + if(seed_control.name == 'control_before_generate' && seed_widget.value === 0) { + seed_widget.value = Math.floor(Math.random() * 1125899906842624) + } + },1) + } + } + + if (nodeData.name == 'easy imageInsetCrop') { + function setWidgetStep(a) { + const measurementWidget = a.widgets[0] + for (let i = 1; i <= 4; i++) { + if (measurementWidget.value === 'Pixels') { + a.widgets[i].options.step = 80; + a.widgets[i].options.max = 8192; + } else { + a.widgets[i].options.step = 10; + a.widgets[i].options.max = 99; + } + } + } + + nodeType.prototype.onAdded = async function (graph) { + const measurementWidget = this.widgets[0]; + let callback = measurementWidget.callback; + measurementWidget.callback = (...args) => { + setWidgetStep(this); + callback && callback.apply(measurementWidget, [...args]); + }; + setTimeout(_=>{ + setWidgetStep(this); + },1) + } + } + + if(['easy showAnything', 'easy showTensorShape', 'easy imageInterrogator'].includes(nodeData.name)){ + function populate(text) { + if (this.widgets) { + const pos = this.widgets.findIndex((w) => w.name === "text"); + if (pos !== -1) { + for (let i = pos; i < this.widgets.length; i++) { + this.widgets[i].onRemove?.(); + } + this.widgets.length = pos; + } + } + + for (const list of text) { + const w = ComfyWidgets["STRING"](this, "text", ["STRING", { multiline: true }], app).widget; + w.inputEl.readOnly = true; + w.inputEl.style.opacity = 0.6; + w.value = list; + } + + requestAnimationFrame(() => { + const sz = this.computeSize(); + if (sz[0] < this.size[0]) { + sz[0] = this.size[0]; + } + if (sz[1] < this.size[1]) { + sz[1] = this.size[1]; + } + this.onResize?.(sz); + app.graph.setDirtyCanvas(true, false); + }); + } + + // When the node is executed we will be sent the input text, display this in the widget + const onExecuted = nodeType.prototype.onExecuted; + nodeType.prototype.onExecuted = function (message) { + onExecuted?.apply(this, arguments); + populate.call(this, message.text); + }; + + if(!['easy imageInterrogator'].includes(nodeData.name)) { + const onConfigure = nodeType.prototype.onConfigure; + nodeType.prototype.onConfigure = function () { + onConfigure?.apply(this, arguments); + if (this.widgets_values?.length) { + populate.call(this, this.widgets_values); + } + }; + } + } + + if(nodeData.name == 'easy convertAnything'){ + const onNodeCreated = nodeType.prototype.onNodeCreated; + nodeType.prototype.onNodeCreated = async function () { + onNodeCreated ? onNodeCreated.apply(this, []) : undefined; + setTimeout(_=>{ + const type_control = this.widgets[this.widgets.findIndex((w) => w.name === "output_type")] + let _this = this + type_control.callback = async() => { + _this.outputs[0].type = (type_control.value).toUpperCase() + _this.outputs[0].name = type_control.value + _this.outputs[0].label = type_control.value + } + },300) + + } + } + + if (nodeData.name == 'easy promptLine') { + const onAdded = nodeType.prototype.onAdded; + nodeType.prototype.onAdded = async function () { + onAdded ? onAdded.apply(this, []) : undefined; + let prompt_widget = this.widgets.find(w => w.name == "prompt") + const button = this.addWidget("button", "get values from COMBO link", '', () => { + const output_link = this.outputs[1]?.links?.length>0 ? this.outputs[1]['links'][0] : null + const all_nodes = app.graph._nodes + const node = all_nodes.find(cate=> cate.inputs?.find(input=> input.link == output_link)) + if(!output_link || !node){ + toast.error($t('No COMBO link'), 3000) + return + } + else{ + const input = node.inputs.find(input=> input.link == output_link) + const widget_name = input.widget.name + const widgets = node.widgets + const widget = widgets.find(cate=> cate.name == widget_name) + let values = widget?.options.values || null + if(values){ + values = values.join('\n') + prompt_widget.value = values + } + } + }, { + serialize: false + }) + } + } + } +}); + + +const getSetWidgets = ['rescale_after_model', 'rescale', + 'lora_name', 'lora1_name', 'lora2_name', 'lora3_name', + 'refiner_lora1_name', 'refiner_lora2_name', 'upscale_method', + 'image_output', 'add_noise', 'info', 'sampler_name', + 'ckpt_B_name', 'ckpt_C_name', 'save_model', 'refiner_ckpt_name', + 'num_loras', 'num_controlnet', 'mode', 'toggle', 'resolution', 'ratio', 'target_parameter', + 'input_count', 'replace_count', 'downscale_mode', 'range_mode','text_combine_mode', 'input_mode', + 'lora_count','ckpt_count', 'conditioning_mode', 'preset', 'use_tiled', 'use_batch', 'num_embeds', + "easing_mode", "guider", "scheduler", "inpaint_mode", 't5_type', 'rem_mode' +] + +function getSetters(node) { + if (node.widgets) + for (const w of node.widgets) { + if (getSetWidgets.includes(w.name)) { + if(node.comfyClass.indexOf("easy XYInputs:") != -1) widgetLogic3(node, w) + else if(w.name == 'sampler_name' && node.comfyClass == 'easy preSamplingSdTurbo') widgetLogic2(node, w); + else widgetLogic(node, w); + let widgetValue = w.value; + + // Define getters and setters for widget values + Object.defineProperty(w, 'value', { + get() { + return widgetValue; + }, + set(newVal) { + if (newVal !== widgetValue) { + widgetValue = newVal; + if(node.comfyClass.indexOf("easy XYInputs:") != -1) widgetLogic3(node, w) + else if(w.name == 'sampler_name' && node.comfyClass == 'easy preSamplingSdTurbo') widgetLogic2(node, w); + else widgetLogic(node, w); + } + } + }); + } + } +} \ No newline at end of file diff --git a/ComfyUI-Easy-Use/web_version/v1/js/easy/easyExtraMenu.js b/ComfyUI-Easy-Use/web_version/v1/js/easy/easyExtraMenu.js new file mode 100644 index 0000000000000000000000000000000000000000..b4ea61fc9276ab291e9d4a9747f92d2831cbd8ef --- /dev/null +++ b/ComfyUI-Easy-Use/web_version/v1/js/easy/easyExtraMenu.js @@ -0,0 +1,593 @@ +import {app} from "../../../../scripts/app.js"; +import {$t} from '../common/i18n.js' +import {CheckpointInfoDialog, LoraInfoDialog} from "../common/model.js"; + +const loaders = ['easy fullLoader', 'easy a1111Loader', 'easy comfyLoader', 'easy kolorsLoader', 'easy hunyuanDiTLoader', 'easy pixArtLoader'] +const preSampling = ['easy preSampling', 'easy preSamplingAdvanced', 'easy preSamplingDynamicCFG', 'easy preSamplingNoiseIn', 'easy preSamplingCustom', 'easy preSamplingLayerDiffusion', 'easy fullkSampler'] +const kSampler = ['easy kSampler', 'easy kSamplerTiled', 'easy kSamplerInpainting', 'easy kSamplerDownscaleUnet', 'easy kSamplerLayerDiffusion'] +const controlnet = ['easy controlnetLoader', 'easy controlnetLoaderADV', 'easy controlnetLoader++', 'easy instantIDApply', 'easy instantIDApplyADV'] +const ipadapter = ['easy ipadapterApply', 'easy ipadapterApplyADV', 'easy ipadapterApplyFaceIDKolors', 'easy ipadapterStyleComposition', 'easy ipadapterApplyFromParams', 'easy pulIDApply', 'easy pulIDApplyADV'] +const positive_prompt = ['easy positive', 'easy wildcards'] +const imageNode = ['easy loadImageBase64', 'LoadImage', 'LoadImageMask'] +const inpaint = ['easy applyBrushNet', 'easy applyPowerPaint', 'easy applyInpaint'] +const widgetMapping = { + "positive_prompt":{ + "text": "positive", + "positive": "text" + }, + "loaders":{ + "ckpt_name": "ckpt_name", + "vae_name": "vae_name", + "clip_skip": "clip_skip", + "lora_name": "lora_name", + "resolution": "resolution", + "empty_latent_width": "empty_latent_width", + "empty_latent_height": "empty_latent_height", + "positive": "positive", + "negative": "negative", + "batch_size": "batch_size", + "a1111_prompt_style": "a1111_prompt_style" + }, + "preSampling":{ + "steps": "steps", + "cfg": "cfg", + "cfg_scale_min": "cfg", + "sampler_name": "sampler_name", + "scheduler": "scheduler", + "denoise": "denoise", + "seed_num": "seed_num", + "seed": "seed" + }, + "kSampler":{ + "image_output": "image_output", + "save_prefix": "save_prefix", + "link_id": "link_id" + }, + "controlnet":{ + "control_net_name":"control_net_name", + "strength": ["strength", "cn_strength"], + "scale_soft_weights": ["scale_soft_weights","cn_soft_weights"], + "cn_strength": ["strength", "cn_strength"], + "cn_soft_weights": ["scale_soft_weights","cn_soft_weights"], + }, + "ipadapter":{ + "preset":"preset", + "lora_strength": "lora_strength", + "provider": "provider", + "weight":"weight", + "weight_faceidv2": "weight_faceidv2", + "start_at": "start_at", + "end_at": "end_at", + "cache_mode": "cache_mode", + "use_tiled": "use_tiled", + "insightface": "insightface", + "pulid_file": "pulid_file" + }, + "load_image":{ + "image":"image", + "base64_data":"base64_data", + "channel": "channel" + }, + "inpaint":{ + "dtype": "dtype", + "fitting": "fitting", + "function": "function", + "scale": "scale", + "start_at": "start_at", + "end_at": "end_at" + } +} +const inputMapping = { + "loaders":{ + "optional_lora_stack": "optional_lora_stack", + "positive": "positive", + "negative": "negative" + }, + "preSampling":{ + "pipe": "pipe", + "image_to_latent": "image_to_latent", + "latent": "latent" + }, + "kSampler":{ + "pipe": "pipe", + "model": "model" + }, + "controlnet":{ + "pipe": "pipe", + "image": "image", + "image_kps": "image_kps", + "control_net": "control_net", + "positive": "positive", + "negative": "negative", + "mask": "mask" + }, + "positive_prompt":{ + + }, + "ipadapter":{ + "model":"model", + "image":"image", + "image_style": "image", + "attn_mask":"attn_mask", + "optional_ipadapter":"optional_ipadapter" + }, + "inpaint":{ + "pipe": "pipe", + "image": "image", + "mask": "mask" + } +}; + +const outputMapping = { + "loaders":{ + "pipe": "pipe", + "model": "model", + "vae": "vae", + "clip": null, + "positive": null, + "negative": null, + "latent": null, + }, + "preSampling":{ + "pipe":"pipe" + }, + "kSampler":{ + "pipe": "pipe", + "image": "image" + }, + "controlnet":{ + "pipe": "pipe", + "positive": "positive", + "negative": "negative" + }, + "positive_prompt":{ + "text": "positive", + "positive": "text" + }, + "load_image":{ + "IMAGE":"IMAGE", + "MASK": "MASK" + }, + "ipadapter":{ + "model":"model", + "tiles":"tiles", + "masks":"masks", + "ipadapter":"ipadapter" + }, + "inpaint":{ + "pipe": "pipe", + } +}; + +// 替换节点 +function replaceNode(oldNode, newNodeName, type) { + const newNode = LiteGraph.createNode(newNodeName); + if (!newNode) { + return; + } + app.graph.add(newNode); + + newNode.pos = oldNode.pos.slice(); + newNode.size = oldNode.size.slice(); + + oldNode.widgets.forEach(widget => { + if(widgetMapping[type][widget.name]){ + const newName = widgetMapping[type][widget.name]; + if (newName) { + const newWidget = findWidgetByName(newNode, newName); + if (newWidget) { + newWidget.value = widget.value; + if(widget.name == 'seed_num'){ + newWidget.linkedWidgets[0].value = widget.linkedWidgets[0].value + } + if(widget.type == 'converted-widget'){ + convertToInput(newNode, newWidget, widget); + } + } + } + } + + }); + + if(oldNode.inputs){ + oldNode.inputs.forEach((input, index) => { + if (input && input.link && inputMapping[type][input.name]) { + const newInputName = inputMapping[type][input.name]; + // If the new node does not have this output, skip + if (newInputName === null) { + return; + } + const newInputIndex = newNode.findInputSlot(newInputName); + if (newInputIndex !== -1) { + const originLinkInfo = oldNode.graph.links[input.link]; + if (originLinkInfo) { + const originNode = oldNode.graph.getNodeById(originLinkInfo.origin_id); + if (originNode) { + originNode.connect(originLinkInfo.origin_slot, newNode, newInputIndex); + } + } + } + } + }); + } + + if(oldNode.outputs){ + oldNode.outputs.forEach((output, index) => { + if (output && output.links && outputMapping[type] && outputMapping[type][output.name]) { + const newOutputName = outputMapping[type][output.name]; + // If the new node does not have this output, skip + if (newOutputName === null) { + return; + } + const newOutputIndex = newNode.findOutputSlot(newOutputName); + if (newOutputIndex !== -1) { + output.links.forEach(link => { + const targetLinkInfo = oldNode.graph.links[link]; + if (targetLinkInfo) { + const targetNode = oldNode.graph.getNodeById(targetLinkInfo.target_id); + if (targetNode) { + newNode.connect(newOutputIndex, targetNode, targetLinkInfo.target_slot); + } + } + }); + } + } + }); + } + + + // Remove old node + app.graph.remove(oldNode); + + // Remove others + if(newNode.type == 'easy fullkSampler'){ + const link_output_id = newNode.outputs[0].links + if(link_output_id && link_output_id[0]){ + const nodes = app.graph._nodes + const node = nodes.find(cate=> cate.inputs && cate.inputs[0] && cate.inputs[0]['link'] == link_output_id[0]) + if(node){ + app.graph.remove(node); + } + } + }else if(preSampling.includes(newNode.type)){ + const link_output_id = newNode.outputs[0].links + if(!link_output_id || !link_output_id[0]){ + const ksampler = LiteGraph.createNode('easy kSampler'); + app.graph.add(ksampler); + ksampler.pos = newNode.pos.slice(); + ksampler.pos[0] = ksampler.pos[0] + newNode.size[0] + 20; + const newInputIndex = newNode.findInputSlot('pipe'); + if (newInputIndex !== -1) { + if (newNode) { + newNode.connect(0, ksampler, newInputIndex); + } + } + } + } + + // autoHeight + newNode.setSize([newNode.size[0], newNode.computeSize()[1]]); +} + +export function findWidgetByName(node, widgetName) { + return node.widgets.find(widget => typeof widgetName == 'object' ? widgetName.includes(widget.name) : widget.name === widgetName); +} +function replaceNodeMenuCallback(currentNode, targetNodeName, type) { + return function() { + replaceNode(currentNode, targetNodeName, type); + }; +} +const addMenuHandler = (nodeType, cb)=> { + const getOpts = nodeType.prototype.getExtraMenuOptions; + nodeType.prototype.getExtraMenuOptions = function () { + const r = getOpts.apply(this, arguments); + cb.apply(this, arguments); + return r; + }; +} +const addMenu = (content, type, nodes_include, nodeType, has_submenu=true) => { + addMenuHandler(nodeType, function (_, options) { + options.unshift({ + content: content, + has_submenu: has_submenu, + callback: (value, options, e, menu, node) => showSwapMenu(value, options, e, menu, node, type, nodes_include) + }) + if(type == 'loaders') { + options.unshift({ + content: $t("💎 View Lora Info..."), + callback: (value, options, e, menu, node) => { + const widget = node.widgets.find(cate => cate.name == 'lora_name') + let name = widget.value; + if (!name || name == 'None') return + new LoraInfoDialog(name).show('loras', name); + } + }) + options.unshift({ + content: $t("💎 View Checkpoint Info..."), + callback: (value, options, e, menu, node) => { + let name = node.widgets[0].value; + if (!name || name == 'None') return + new CheckpointInfoDialog(name).show('checkpoints', name); + } + }) + } + }) +} +const showSwapMenu = (value, options, e, menu, node, type, nodes_include) => { + const swapOptions = []; + nodes_include.map(cate=>{ + if (node.type !== cate) { + swapOptions.push({ + content: `${cate}`, + callback: replaceNodeMenuCallback(node, cate, type) + }); + } + }) + new LiteGraph.ContextMenu(swapOptions, { + event: e, + callback: null, + parentMenu: menu, + node: node + }); + return false; +} + +// 重载节点 +const CONVERTED_TYPE = "converted-widget"; +const GET_CONFIG = Symbol(); + +function hideWidget(node, widget, suffix = "") { + widget.origType = widget.type; + widget.origComputeSize = widget.computeSize; + widget.origSerializeValue = widget.serializeValue; + widget.computeSize = () => [0, -4]; // -4 is due to the gap litegraph adds between widgets automatically + widget.type = CONVERTED_TYPE + suffix; + widget.serializeValue = () => { + // Prevent serializing the widget if we have no input linked + if (!node.inputs) { + return undefined; + } + let node_input = node.inputs.find((i) => i.widget?.name === widget.name); + + if (!node_input || !node_input.link) { + return undefined; + } + return widget.origSerializeValue ? widget.origSerializeValue() : widget.value; + }; + + // Hide any linked widgets, e.g. seed+seedControl + if (widget.linkedWidgets) { + for (const w of widget.linkedWidgets) { + hideWidget(node, w, ":" + widget.name); + } + } +} +function convertToInput(node, widget, config) { + console.log('config:', config) + hideWidget(node, widget); + + const { type } = getWidgetType(config); + + // Add input and store widget config for creating on primitive node + const sz = node.size; + if(!widget.options || !widget.options.forceInput){ + node.addInput(widget.name, type, { + widget: { name: widget.name, [GET_CONFIG]: () => config }, + }); + } + + for (const widget of node.widgets) { + widget.last_y += LiteGraph.NODE_SLOT_HEIGHT; + } + + // Restore original size but grow if needed + node.setSize([Math.max(sz[0], node.size[0]), Math.max(sz[1], node.size[1])]); +} + +function getWidgetType(config) { + // Special handling for COMBO so we restrict links based on the entries + let type = config[0]; + if (type instanceof Array) { + type = "COMBO"; + } + return { type }; +} + +const reloadNode = function (node) { + const nodeType = node.constructor.type; + const origVals = node.properties.origVals || {}; + + const nodeTitle = origVals.title || node.title; + const nodeColor = origVals.color || node.color; + const bgColor = origVals.bgcolor || node.bgcolor; + const oldNode = node + const options = { + 'size': [...node.size], + 'color': nodeColor, + 'bgcolor': bgColor, + 'pos': [...node.pos] + } + + let inputLinks = [] + let outputLinks = [] + if(node.inputs){ + for (const input of node.inputs) { + if (input.link) { + const input_name = input.name + const input_slot = node.findInputSlot(input_name) + const input_node = node.getInputNode(input_slot) + const input_link = node.getInputLink(input_slot) + inputLinks.push([input_link.origin_slot, input_node, input_name]) + } + } + } + if(node.outputs) { + for (const output of node.outputs) { + if (output.links) { + const output_name = output.name + + for (const linkID of output.links) { + const output_link = graph.links[linkID] + const output_node = graph._nodes_by_id[output_link.target_id] + outputLinks.push([output_name, output_node, output_link.target_slot]) + } + } + } + } + + app.graph.remove(node) + const newNode = app.graph.add(LiteGraph.createNode(nodeType, nodeTitle, options)); + + function handleLinks() { + // re-convert inputs + if(oldNode.widgets) { + for (let w of oldNode.widgets) { + if (w.type === 'converted-widget') { + const WidgetToConvert = newNode.widgets.find((nw) => nw.name === w.name); + for (let i of oldNode.inputs) { + if (i.name === w.name) { + convertToInput(newNode, WidgetToConvert, i.widget); + } + } + } + } + } + // replace input and output links + for (let input of inputLinks) { + const [output_slot, output_node, input_name] = input; + output_node.connect(output_slot, newNode.id, input_name) + } + for (let output of outputLinks) { + const [output_name, input_node, input_slot] = output; + newNode.connect(output_name, input_node, input_slot) + } + } + + // fix widget values + let values = oldNode.widgets_values; + if (!values && newNode.widgets?.length>0) { + newNode.widgets.forEach((newWidget, index) => { + const oldWidget = oldNode.widgets[index]; + if (newWidget.name === oldWidget.name && newWidget.type === oldWidget.type) { + newWidget.value = oldWidget.value; + } + }); + handleLinks(); + return; + } + let pass = false + const isIterateForwards = values?.length <= newNode.widgets?.length; + let vi = isIterateForwards ? 0 : values.length - 1; + function evalWidgetValues(testValue, newWidg) { + if (testValue === true || testValue === false) { + if (newWidg.options?.on && newWidg.options?.off) { + return { value: testValue, pass: true }; + } + } else if (typeof testValue === "number") { + if (newWidg.options?.min <= testValue && testValue <= newWidg.options?.max) { + return { value: testValue, pass: true }; + } + } else if (newWidg.options?.values?.includes(testValue)) { + return { value: testValue, pass: true }; + } else if (newWidg.inputEl && typeof testValue === "string") { + return { value: testValue, pass: true }; + } + return { value: newWidg.value, pass: false }; + } + const updateValue = (wi) => { + const oldWidget = oldNode.widgets[wi]; + let newWidget = newNode.widgets[wi]; + if (newWidget.name === oldWidget.name && newWidget.type === oldWidget.type) { + while ((isIterateForwards ? vi < values.length : vi >= 0) && !pass) { + let { value, pass } = evalWidgetValues(values[vi], newWidget); + if (pass && value !== null) { + newWidget.value = value; + break; + } + vi += isIterateForwards ? 1 : -1; + } + vi++ + if (!isIterateForwards) { + vi = values.length - (newNode.widgets?.length - 1 - wi); + } + } + }; + if (isIterateForwards && newNode.widgets?.length>0) { + for (let wi = 0; wi < newNode.widgets.length; wi++) { + updateValue(wi); + } + } else if(newNode.widgets?.length>0){ + for (let wi = newNode.widgets.length - 1; wi >= 0; wi--) { + updateValue(wi); + } + } + handleLinks(); +}; + + +app.registerExtension({ + name: "comfy.easyUse.extraMenu", + async beforeRegisterNodeDef(nodeType, nodeData, app) { + // 刷新节点 + addMenuHandler(nodeType, function (_, options) { + options.unshift({ + content: $t("🔃 Reload Node"), + callback: (value, options, e, menu, node) => { + let graphcanvas = LGraphCanvas.active_canvas; + if (!graphcanvas.selected_nodes || Object.keys(graphcanvas.selected_nodes).length <= 1) { + reloadNode(node); + } else { + for (let i in graphcanvas.selected_nodes) { + reloadNode(graphcanvas.selected_nodes[i]); + } + } + } + }) + // ckptNames + if(nodeData.name == 'easy ckptNames'){ + options.unshift({ + content: $t("💎 View Checkpoint Info..."), + callback: (value, options, e, menu, node) => { + let name = node.widgets[0].value; + if (!name || name == 'None') return + new CheckpointInfoDialog(name).show('checkpoints', name); + } + }) + } + }) + + // Swap提示词 + if (positive_prompt.includes(nodeData.name)) { + addMenu("↪️ Swap EasyPrompt", 'positive_prompt', positive_prompt, nodeType) + } + // Swap加载器 + if (loaders.includes(nodeData.name)) { + addMenu("↪️ Swap EasyLoader", 'loaders', loaders, nodeType) + } + // Swap预采样器 + if (preSampling.includes(nodeData.name)) { + addMenu("↪️ Swap EasyPreSampling", 'preSampling', preSampling, nodeType) + } + // Swap kSampler + if (kSampler.includes(nodeData.name)) { + addMenu("↪️ Swap EasyKSampler", 'preSampling', kSampler, nodeType) + } + // Swap ControlNet + if (controlnet.includes(nodeData.name)) { + addMenu("↪️ Swap EasyControlnet", 'controlnet', controlnet, nodeType) + } + // Swap IPAdapater + if (ipadapter.includes(nodeData.name)) { + addMenu("↪️ Swap EasyAdapater", 'ipadapter', ipadapter, nodeType) + } + // Swap Image + if (imageNode.includes(nodeData.name)) { + addMenu("↪️ Swap LoadImage", 'load_image', imageNode, nodeType) + } + // Swap inpaint + if (inpaint.includes(nodeData.name)) { + addMenu("↪️ Swap InpaintNode", 'inpaint', inpaint, nodeType) + } + } +}); + diff --git a/ComfyUI-Easy-Use/web_version/v1/js/easy/easyInterface.js b/ComfyUI-Easy-Use/web_version/v1/js/easy/easyInterface.js new file mode 100644 index 0000000000000000000000000000000000000000..5f73e3390ddb6a9ef30ae75d8336d41502f44f06 --- /dev/null +++ b/ComfyUI-Easy-Use/web_version/v1/js/easy/easyInterface.js @@ -0,0 +1,788 @@ +import { app } from "../../../../scripts/app.js"; +import { api } from "../../../../scripts/api.js"; +import { $el } from "../../../../scripts/ui.js"; +import {addPreconnect, addCss} from "../common/utils.js"; + +const locale = localStorage['AGL.Locale'] || localStorage['Comfy.Settings.AGL.Locale'] || 'en-US' + +const customThemeColor = "#236692" +const customThemeColorLight = "#3485bb" +// 增加Slot颜色 +const customPipeLineLink = "#7737AA" +const customPipeLineSDXLLink = "#7737AA" +const customIntLink = "#29699C" +const customXYPlotLink = "#74DA5D" +const customLoraStackLink = "#94dccd" +const customXYLink = "#38291f" + +var customLinkColors = JSON.parse(localStorage.getItem('Comfy.Settings.ttN.customLinkColors')) || {}; +if (!customLinkColors["PIPE_LINE"] || !LGraphCanvas.link_type_colors["PIPE_LINE"]) {customLinkColors["PIPE_LINE"] = customPipeLineLink;} +if (!customLinkColors["PIPE_LINE_SDXL"] || !LGraphCanvas.link_type_colors["PIPE_LINE_SDXL"]) {customLinkColors["PIPE_LINE_SDXL"] = customPipeLineSDXLLink;} +if (!customLinkColors["INT"] || !LGraphCanvas.link_type_colors["INT"]) {customLinkColors["INT"] = customIntLink;} +if (!customLinkColors["XYPLOT"] || !LGraphCanvas.link_type_colors["XYPLOT"]) {customLinkColors["XYPLOT"] = customXYPlotLink;} +if (!customLinkColors["X_Y"] || !LGraphCanvas.link_type_colors["X_Y"]) {customLinkColors["X_Y"] = customXYLink;} +if (!customLinkColors["LORA_STACK"] || !LGraphCanvas.link_type_colors["LORA_STACK"]) {customLinkColors["LORA_STACK"] = customLoraStackLink;} +if (!customLinkColors["CONTROL_NET_STACK"] || !LGraphCanvas.link_type_colors["CONTROL_NET_STACK"]) {customLinkColors["CONTROL_NET_STACK"] = customLoraStackLink;} + +localStorage.setItem('Comfy.Settings.easyUse.customLinkColors', JSON.stringify(customLinkColors)); + +// 增加自定义主题 +const ui = { + "version": 102, + "id": "obsidian", + "name": "Obsidian", + "colors": { + "node_slot": { + "CLIP": "#FFD500", + "CLIP_VISION": "#A8DADC", + "CLIP_VISION_OUTPUT": "#ad7452", + "CONDITIONING": "#FFA931", + "CONTROL_NET": "#6EE7B7", + "IMAGE": "#64B5F6", + "LATENT": "#FF9CF9", + "MASK": "#81C784", + "MODEL": "#B39DDB", + "STYLE_MODEL": "#C2FFAE", + "VAE": "#FF6E6E", + "TAESD": "#DCC274", + "PIPE_LINE": customPipeLineLink, + "PIPE_LINE_SDXL": customPipeLineSDXLLink, + "INT": customIntLink, + "XYPLOT": customXYPlotLink, + "X_Y": customXYLink + }, + "litegraph_base": { + "BACKGROUND_IMAGE": "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAGQAAABkCAIAAAD/gAIDAAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAAAQBJREFUeNrs1rEKwjAUhlETUkj3vP9rdmr1Ysammk2w5wdxuLgcMHyptfawuZX4pJSWZTnfnu/lnIe/jNNxHHGNn//HNbbv+4dr6V+11uF527arU7+u63qfa/bnmh8sWLBgwYJlqRf8MEptXPBXJXa37BSl3ixYsGDBMliwFLyCV/DeLIMFCxYsWLBMwSt4Be/NggXLYMGCBUvBK3iNruC9WbBgwYJlsGApeAWv4L1ZBgsWLFiwYJmCV/AK3psFC5bBggULloJX8BpdwXuzYMGCBctgwVLwCl7Be7MMFixYsGDBsu8FH1FaSmExVfAxBa/gvVmwYMGCZbBg/W4vAQYA5tRF9QYlv/QAAAAASUVORK5CYII=", + "CLEAR_BACKGROUND_COLOR": "#222222", + "NODE_TITLE_COLOR": "rgba(255,255,255,.75)", + "NODE_SELECTED_TITLE_COLOR": "#FFF", + "NODE_TEXT_SIZE": 14, + "NODE_TEXT_COLOR": "#b8b8b8", + "NODE_SUBTEXT_SIZE": 12, + "NODE_DEFAULT_COLOR": "rgba(0,0,0,.8)", + "NODE_DEFAULT_BGCOLOR": "rgba(22,22,22,.8)", + "NODE_DEFAULT_BOXCOLOR": "rgba(255,255,255,.75)", + "NODE_DEFAULT_SHAPE": "box", + "NODE_BOX_OUTLINE_COLOR": customThemeColor, + "DEFAULT_SHADOW_COLOR": "rgba(0,0,0,0)", + "DEFAULT_GROUP_FONT": 24, + + "WIDGET_BGCOLOR": "#242424", + "WIDGET_OUTLINE_COLOR": "#333", + "WIDGET_TEXT_COLOR": "#a3a3a8", + "WIDGET_SECONDARY_TEXT_COLOR": "#97979c", + + "LINK_COLOR": "#9A9", + "EVENT_LINK_COLOR": "#A86", + "CONNECTING_LINK_COLOR": "#AFA" + }, + "comfy_base": { + "fg-color": "#fff", + "bg-color": "#242424", + "comfy-menu-bg": "rgba(24,24,24,.9)", + "comfy-input-bg": "#262626", + "input-text": "#ddd", + "descrip-text": "#999", + "drag-text": "#ccc", + "error-text": "#ff4444", + "border-color": "#29292c", + "tr-even-bg-color": "rgba(28,28,28,.9)", + "tr-odd-bg-color": "rgba(19,19,19,.9)" + } + } +} + +let custom_theme = null +let control_mode = null +try{ + custom_theme = localStorage.getItem('Comfy.Settings.Comfy.CustomColorPalettes') ? JSON.parse(localStorage.getItem('Comfy.Settings.Comfy.CustomColorPalettes')) : {}; +} +catch (e) {custom_theme = {}} +try{ + const dark_bg = "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAGQAAABkCAIAAAD/gAIDAAAACXBIWXMAAAsTAAALEwEAmpwYAAAGlmlUWHRYTUw6Y29tLmFkb2JlLnhtcAAAAAAAPD94cGFja2V0IGJlZ2luPSLvu78iIGlkPSJXNU0wTXBDZWhpSHpyZVN6TlRjemtjOWQiPz4gPHg6eG1wbWV0YSB4bWxuczp4PSJhZG9iZTpuczptZXRhLyIgeDp4bXB0az0iQWRvYmUgWE1QIENvcmUgOS4xLWMwMDEgNzkuMTQ2Mjg5OSwgMjAyMy8wNi8yNS0yMDowMTo1NSAgICAgICAgIj4gPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvIiB4bWxuczpkYz0iaHR0cDovL3B1cmwub3JnL2RjL2VsZW1lbnRzLzEuMS8iIHhtbG5zOnBob3Rvc2hvcD0iaHR0cDovL25zLmFkb2JlLmNvbS9waG90b3Nob3AvMS4wLyIgeG1sbnM6eG1wTU09Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9tbS8iIHhtbG5zOnN0RXZ0PSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvc1R5cGUvUmVzb3VyY2VFdmVudCMiIHhtcDpDcmVhdG9yVG9vbD0iQWRvYmUgUGhvdG9zaG9wIDI1LjEgKFdpbmRvd3MpIiB4bXA6Q3JlYXRlRGF0ZT0iMjAyMy0xMS0xM1QwMDoxODowMiswMTowMCIgeG1wOk1vZGlmeURhdGU9IjIwMjMtMTEtMTVUMDI6MDQ6NTkrMDE6MDAiIHhtcDpNZXRhZGF0YURhdGU9IjIwMjMtMTEtMTVUMDI6MDQ6NTkrMDE6MDAiIGRjOmZvcm1hdD0iaW1hZ2UvcG5nIiBwaG90b3Nob3A6Q29sb3JNb2RlPSIzIiB4bXBNTTpJbnN0YW5jZUlEPSJ4bXAuaWlkOmIyYzRhNjA5LWJmYTctYTg0MC1iOGFlLTk3MzE2ZjM1ZGIyNyIgeG1wTU06RG9jdW1lbnRJRD0iYWRvYmU6ZG9jaWQ6cGhvdG9zaG9wOjk0ZmNlZGU4LTE1MTctZmQ0MC04ZGU3LWYzOTgxM2E3ODk5ZiIgeG1wTU06T3JpZ2luYWxEb2N1bWVudElEPSJ4bXAuZGlkOjIzMWIxMGIwLWI0ZmItMDI0ZS1iMTJlLTMwNTMwM2NkMDdjOCI+IDx4bXBNTTpIaXN0b3J5PiA8cmRmOlNlcT4gPHJkZjpsaSBzdEV2dDphY3Rpb249ImNyZWF0ZWQiIHN0RXZ0Omluc3RhbmNlSUQ9InhtcC5paWQ6MjMxYjEwYjAtYjRmYi0wMjRlLWIxMmUtMzA1MzAzY2QwN2M4IiBzdEV2dDp3aGVuPSIyMDIzLTExLTEzVDAwOjE4OjAyKzAxOjAwIiBzdEV2dDpzb2Z0d2FyZUFnZW50PSJBZG9iZSBQaG90b3Nob3AgMjUuMSAoV2luZG93cykiLz4gPHJkZjpsaSBzdEV2dDphY3Rpb249InNhdmVkIiBzdEV2dDppbnN0YW5jZUlEPSJ4bXAuaWlkOjQ4OWY1NzlmLTJkNjUtZWQ0Zi04OTg0LTA4NGE2MGE1ZTMzNSIgc3RFdnQ6d2hlbj0iMjAyMy0xMS0xNVQwMjowNDo1OSswMTowMCIgc3RFdnQ6c29mdHdhcmVBZ2VudD0iQWRvYmUgUGhvdG9zaG9wIDI1LjEgKFdpbmRvd3MpIiBzdEV2dDpjaGFuZ2VkPSIvIi8+IDxyZGY6bGkgc3RFdnQ6YWN0aW9uPSJzYXZlZCIgc3RFdnQ6aW5zdGFuY2VJRD0ieG1wLmlpZDpiMmM0YTYwOS1iZmE3LWE4NDAtYjhhZS05NzMxNmYzNWRiMjciIHN0RXZ0OndoZW49IjIwMjMtMTEtMTVUMDI6MDQ6NTkrMDE6MDAiIHN0RXZ0OnNvZnR3YXJlQWdlbnQ9IkFkb2JlIFBob3Rvc2hvcCAyNS4xIChXaW5kb3dzKSIgc3RFdnQ6Y2hhbmdlZD0iLyIvPiA8L3JkZjpTZXE+IDwveG1wTU06SGlzdG9yeT4gPC9yZGY6RGVzY3JpcHRpb24+IDwvcmRmOlJERj4gPC94OnhtcG1ldGE+IDw/eHBhY2tldCBlbmQ9InIiPz4OTe6GAAAAx0lEQVR42u3WMQoAIQxFwRzJys77X8vSLiRgITif7bYbgrwYc/mKXyBoY4VVBgsWLFiwYFmOlTv+9jfDOjHmr8u6eVkGCxYsWLBgmc5S8ApewXvgYRksWLBgKXidpeBdloL3wMOCBctgwVLwCl7BuyyDBQsWLFiwTGcpeAWv4D3wsAwWLFiwFLzOUvAuS8F74GHBgmWwYCl4Ba/gXZbBggULFixYprMUvIJX8B54WAYLFixYCl5nKXiXpeA98LBgwTJYsGC9tg1o8f4TTtqzNQAAAABJRU5ErkJggg==" + // 修改自定义主题 + if(!custom_theme || !custom_theme.obsidian || !custom_theme.obsidian.version || custom_theme.obsidian.version0) node.execute_triggered--; + if (node.action_triggered>0) node.action_triggered--; + }; + LGraphCanvas.prototype.drawNodeWidgets = function( + node, + posY, + ctx, + active_widget + ) { + if (!node.widgets || !node.widgets.length) { + return 0; + } + var width = node.size[0]; + var widgets = node.widgets; + posY += 2; + var H = LiteGraph.NODE_WIDGET_HEIGHT; + var show_text = this.ds.scale > 0.5; + ctx.save(); + ctx.globalAlpha = this.editor_alpha; + var outline_color = LiteGraph.WIDGET_OUTLINE_COLOR; + var background_color = LiteGraph.WIDGET_BGCOLOR; + var text_color = LiteGraph.WIDGET_TEXT_COLOR; + var secondary_text_color = LiteGraph.WIDGET_SECONDARY_TEXT_COLOR; + var margin = 12; + + for (var i = 0; i < widgets.length; ++i) { + var w = widgets[i]; + var y = posY; + if (w.y) { + y = w.y; + } + w.last_y = y; + ctx.strokeStyle = outline_color; + ctx.fillStyle = "#222"; + ctx.textAlign = "left"; + ctx.lineWidth = 1; + if(w.disabled) + ctx.globalAlpha *= 0.5; + var widget_width = w.width || width; + + switch (w.type) { + case "button": + ctx.font = "10px Inter" + ctx.fillStyle = background_color; + if (w.clicked) { + ctx.fillStyle = "#AAA"; + w.clicked = false; + this.dirty_canvas = true; + } + ctx.beginPath(); + ctx.roundRect(margin, y, widget_width - margin * 2, H, [H * 0.25]); + ctx.fill(); + if(show_text && !w.disabled) + ctx.stroke(); + if (show_text) { + ctx.textAlign = "center"; + ctx.fillStyle = text_color; + ctx.fillText(w.label || w.name, widget_width * 0.5, y + H * 0.7); + } + break; + case "toggle": + ctx.font = "10px Inter" + ctx.textAlign = "left"; + ctx.strokeStyle = outline_color; + ctx.fillStyle = background_color; + ctx.beginPath(); + if (show_text) + ctx.roundRect(margin, y, widget_width - margin * 2, H, [H * 0.25]); + else + ctx.rect(margin, y, widget_width - margin * 2, H ); + ctx.fill(); + if(show_text && !w.disabled) + ctx.stroke(); + ctx.fillStyle = w.value ? customThemeColor : "#333"; + ctx.beginPath(); + ctx.arc( widget_width - margin * 2, y + H * 0.5, H * 0.25, 0, Math.PI * 2 ); + ctx.fill(); + if (show_text) { + ctx.fillStyle = secondary_text_color; + const label = w.label || w.name; + if (label != null) { + ctx.fillText(label, margin * 1.6, y + H * 0.7); + } + ctx.font = "10px Inter" + ctx.fillStyle = w.value ? text_color : secondary_text_color; + ctx.textAlign = "right"; + ctx.fillText( + w.value + ? w.options.on || "true" + : w.options.off || "false", + widget_width - 35, + y + H * 0.7 + ); + } + break; + case "slider": + ctx.font = "10px Inter" + ctx.fillStyle = background_color; + ctx.strokeStyle = outline_color; + ctx.beginPath(); + ctx.roundRect(margin, y, widget_width - margin * 2, H, [H*0.25]); + ctx.fill(); + ctx.stroke() + var range = w.options.max - w.options.min; + var nvalue = (w.value - w.options.min) / range; + if(nvalue < 0.0) nvalue = 0.0; + if(nvalue > 1.0) nvalue = 1.0; + ctx.fillStyle = w.options.hasOwnProperty("slider_color") ? w.options.slider_color : (active_widget == w ? "#333" : customThemeColor); + ctx.beginPath(); + ctx.roundRect(margin, y, nvalue * (widget_width - margin * 2), H, [H*0.25]); + ctx.fill(); + if (w.marker) { + var marker_nvalue = (w.marker - w.options.min) / range; + if(marker_nvalue < 0.0) marker_nvalue = 0.0; + if(marker_nvalue > 1.0) marker_nvalue = 1.0; + ctx.fillStyle = w.options.hasOwnProperty("marker_color") ? w.options.marker_color : "#AA9"; + ctx.roundRect( margin + marker_nvalue * (widget_width - margin * 2), y, 2, H , [H * 0.25] ); + } + if (show_text) { + ctx.textAlign = "center"; + ctx.fillStyle = text_color; + var text = (w.label || w.name) + ": " + (Number(w.value).toFixed(w.options.precision != null ? w.options.precision : 3)).toString() + ctx.fillText( + text, + widget_width * 0.5, + y + H * 0.7 + ); + + } + break; + case "number": + case "combo": + ctx.textAlign = "left"; + ctx.strokeStyle = outline_color; + ctx.fillStyle = background_color; + ctx.beginPath(); + if(show_text) + ctx.roundRect(margin, y, widget_width - margin * 2, H, [H * 0.25] ); + else + ctx.rect(margin, y, widget_width - margin * 2, H ); + ctx.fill(); + if (show_text) { + if(!w.disabled) + ctx.stroke(); + ctx.fillStyle = text_color; + if(!w.disabled) + { + ctx.beginPath(); + ctx.moveTo(margin + 12, y + 6.5); + ctx.lineTo(margin + 6, y + H * 0.5); + ctx.lineTo(margin + 12, y + H - 6.5); + ctx.fill(); + ctx.beginPath(); + ctx.moveTo(widget_width - margin - 12, y + 6.5); + ctx.lineTo(widget_width - margin - 6, y + H * 0.5); + ctx.lineTo(widget_width - margin - 12, y + H - 6.5); + ctx.fill(); + } + ctx.fillStyle = secondary_text_color; + ctx.font = "10px Inter" + ctx.fillText(w.label || w.name, margin * 2 + 5, y + H * 0.7); + ctx.fillStyle = text_color; + ctx.textAlign = "right"; + var rightDistance = 6 + if (w.type == "number") { + ctx.font = "10px Inter,JetBrains Mono,monospace" + ctx.fillText( + Number(w.value).toFixed( + w.options.precision !== undefined + ? w.options.precision + : 3 + ), + widget_width - margin * 2 - rightDistance, + y + H * 0.7 + ); + } else { + var v = w.value; + if( w.options.values ) + { + var values = w.options.values; + if( values.constructor === Function ) + values = values(); + if(values && values.constructor !== Array) + v = values[ w.value ]; + } + ctx.fillText( + v, + widget_width - margin * 2 - rightDistance, + y + H * 0.7 + ); + } + } + break; + case "string": + case "text": + ctx.textAlign = "left"; + ctx.strokeStyle = outline_color; + ctx.fillStyle = background_color; + ctx.beginPath(); + if (show_text) + ctx.roundRect(margin, y, widget_width - margin * 2, H, [H * 0.25]); + else + ctx.rect( margin, y, widget_width - margin * 2, H ); + ctx.fill(); + if (show_text) { + if(!w.disabled) + ctx.stroke(); + ctx.save(); + ctx.beginPath(); + ctx.rect(margin, y, widget_width - margin * 2, H); + ctx.clip(); + + //ctx.stroke(); + ctx.fillStyle = secondary_text_color; + const label = w.label || w.name; + ctx.font = "10px Inter" + if (label != null) { + ctx.fillText(label, margin * 2, y + H * 0.7); + } + ctx.fillStyle = text_color; + ctx.textAlign = "right"; + ctx.fillText(String(w.value).substr(0,30), widget_width - margin * 2, y + H * 0.7); //30 chars max + ctx.restore(); + } + break; + default: + if (w.draw) { + w.draw(ctx, node, widget_width, y, H); + } + break; + } + posY += (w.computeSize ? w.computeSize(widget_width)[1] : H) + 4; + ctx.globalAlpha = this.editor_alpha; + + } + ctx.restore(); + ctx.textAlign = "left"; + }; + } +}catch(e){ + console.error(e) +} + +function updateControlWidgetLabel(widget, controlValueRunBefore=false) { + let replacement = "after"; + let find = "before"; + if (controlValueRunBefore) { + [find, replacement] = [replacement, find] + } + widget.label = (widget.label ?? widget.name).replace(find, replacement); + widget.name = widget.label; +} + +// 节点颜色 +const COLOR_THEMES = LGraphCanvas.node_colors +const NODE_COLORS = { + "easy positive":"green", + "easy negative":"red", + "easy promptList":"cyan", + "easy promptLine":"cyan", + "easy promptConcat":"cyan", + "easy promptReplace":"cyan", + "easy XYInputs: Seeds++ Batch": customXYLink, + "easy XYInputs: ModelMergeBlocks": customXYLink, + 'easy textSwitch': "pale_blue" +} + +function setNodeColors(node, theme) { + if (!theme) {return;} + if(theme.color) node.color = theme.color; + if(theme.bgcolor) node.bgcolor = theme.bgcolor; +} + + +app.registerExtension({ + name: "comfy.easyUse.interface", + setup() { + Object.assign(app.canvas.default_connection_color_byType, customLinkColors); + Object.assign(LGraphCanvas.link_type_colors, customLinkColors); + }, + + async nodeCreated(node) { + if (NODE_COLORS.hasOwnProperty(node.comfyClass)) { + const colorKey = NODE_COLORS[node.comfyClass] + const theme = COLOR_THEMES[colorKey]; + setNodeColors(node, theme); + } + // 修复官方bug: 应该初始化修改节点的control_mode name + if(control_mode && control_mode == 'before'){ + const controlValueRunBefore = control_mode == 'before' + if(node.widgets && node.widgets.length>0) { + for (const w of node.widgets) { + if (['control_before_generate', 'control_after_generate'].includes(w.name)) { + await updateControlWidgetLabel(w, controlValueRunBefore); + if (w.linkedWidgets) { + for (const l of w.linkedWidgets) { + await updateControlWidgetLabel(l, controlValueRunBefore); + } + } + } + } + } + } + }, +}) diff --git a/ComfyUI-Easy-Use/web_version/v1/js/easy/easyQuick.js b/ComfyUI-Easy-Use/web_version/v1/js/easy/easyQuick.js new file mode 100644 index 0000000000000000000000000000000000000000..394bd8a31508a9c1f2f669f2030ea5847d36699e --- /dev/null +++ b/ComfyUI-Easy-Use/web_version/v1/js/easy/easyQuick.js @@ -0,0 +1,250 @@ +// 1.0.2 +import { app } from "../../../../scripts/app.js"; +import { GroupNodeConfig } from "../../../../extensions/core/groupNode.js"; +import { api } from "../../../../scripts/api.js"; +import { $t } from "../common/i18n.js" + +const nodeTemplateShortcutId = "Comfy.EasyUse.NodeTemplateShortcut" +const processBarId = "Comfy.EasyUse.queueProcessBar" + +let enableNodeTemplateShortcut = true +let enableQueueProcess = false + +export function addNodeTemplateShortcutSetting(app) { + app.ui.settings.addSetting({ + id: nodeTemplateShortcutId, + name: $t("Enable ALT+1~9 to paste nodes from nodes template (ComfyUI-Easy-Use)"), + type: "boolean", + defaultValue: enableNodeTemplateShortcut, + onChange(value) { + enableNodeTemplateShortcut = !!value; + }, + }); +} +export function addQueueProcessSetting(app) { + app.ui.settings.addSetting({ + id: processBarId, + name: $t("Enable process bar in queue button (ComfyUI-Easy-Use)"), + type: "boolean", + defaultValue: enableQueueProcess, + onChange(value) { + enableQueueProcess = !!value; + }, + }); +} +const getEnableNodeTemplateShortcut = _ => app.ui.settings.getSettingValue(nodeTemplateShortcutId, true) +const getQueueProcessSetting = _ => app.ui.settings.getSettingValue(processBarId, false) + +function loadTemplate(){ + return localStorage['Comfy.NodeTemplates'] ? JSON.parse(localStorage['Comfy.NodeTemplates']) : null +} +const clipboardAction = async (cb) => { + const old = localStorage.getItem("litegrapheditor_clipboard"); + await cb(); + localStorage.setItem("litegrapheditor_clipboard", old); +}; +async function addTemplateToCanvas(t){ + const data = JSON.parse(t.data); + await GroupNodeConfig.registerFromWorkflow(data.groupNodes, {}); + localStorage.setItem("litegrapheditor_clipboard", t.data); + app.canvas.pasteFromClipboard(); +} + +app.registerExtension({ + name: 'comfy.easyUse.quick', + init() { + const keybindListener = async function (event) { + let modifierPressed = event.altKey; + const isEnabled = getEnableNodeTemplateShortcut() + if(isEnabled){ + const mac_alt_nums = ['¡','™','£','¢','∞','§','¶','•','ª'] + const nums = ['1','2','3','4','5','6','7','8','9'] + let key = event.key + if(mac_alt_nums.includes(key)){ + const idx = mac_alt_nums.findIndex(cate=> cate == key) + key = nums[idx] + modifierPressed = true + } + if(['1','2','3','4','5','6','7','8','9'].includes(key) && modifierPressed) { + const template = loadTemplate() + const idx = parseInt(key) - 1 + if (template && template[idx]) { + let t = template[idx] + try{ + let data = JSON.parse(t.data) + data.title = t.name + t.data = JSON.stringify(data) + clipboardAction(_ => { + addTemplateToCanvas(t) + }) + }catch (e){ + console.error(e) + } + + } + if (event.ctrlKey || event.altKey || event.metaKey) { + return; + } + } + } + + } + window.addEventListener("keydown", keybindListener, true); + }, + + setup(app) { + addNodeTemplateShortcutSetting(app) + addQueueProcessSetting(app) + registerListeners() + } +}); + +const registerListeners = () => { + const queue_button = document.getElementById("queue-button") + const old_queue_button_text = queue_button.innerText + api.addEventListener('progress', ({ + detail, + }) => { + const isEnabled = getQueueProcessSetting() + if(isEnabled){ + const { + value, max, node, + } = detail; + const progress = Math.floor((value / max) * 100); + // console.log(progress) + if (!isNaN(progress) && progress >= 0 && progress <= 100) { + queue_button.innerText = progress ==0 || progress == 100 ? old_queue_button_text : "ㅤ " + const width = progress ==0 || progress == 100 ? '0%' : progress.toString() + '%' + let bar = document.createElement("div") + queue_button.setAttribute('data-attr', progress ==0 || progress == 100 ? "" : progress.toString() + '%') + document.documentElement.style.setProperty('--process-bar-width', width) + } + } + + }, false); + + api.addEventListener('status', ({ + detail, + }) => { + const queueRemaining = detail?.exec_info.queue_remaining; + if(queueRemaining === 0){ + let queue_button = document.getElementById("queue-button") + queue_button.innerText = old_queue_button_text + queue_button.setAttribute('data-attr', "") + document.documentElement.style.setProperty('--process-bar-width', '0%') + } + }, false); +}; + + +// 修改粘贴指令 +LGraphCanvas.prototype.pasteFromClipboard = function(isConnectUnselected = false) { + // if ctrl + shift + v is off, return when isConnectUnselected is true (shift is pressed) to maintain old behavior + if (!LiteGraph.ctrl_shift_v_paste_connect_unselected_outputs && isConnectUnselected) { + return; + } + var data = localStorage.getItem("litegrapheditor_clipboard"); + if (!data) { + return; + } + + this.graph.beforeChange(); + + //create nodes + var clipboard_info = JSON.parse(data); + // calculate top-left node, could work without this processing but using diff with last node pos :: clipboard_info.nodes[clipboard_info.nodes.length-1].pos + var posMin = false; + var posMinIndexes = false; + for (var i = 0; i < clipboard_info.nodes.length; ++i) { + if (posMin){ + if(posMin[0]>clipboard_info.nodes[i].pos[0]){ + posMin[0] = clipboard_info.nodes[i].pos[0]; + posMinIndexes[0] = i; + } + if(posMin[1]>clipboard_info.nodes[i].pos[1]){ + posMin[1] = clipboard_info.nodes[i].pos[1]; + posMinIndexes[1] = i; + } + } + else{ + posMin = [clipboard_info.nodes[i].pos[0], clipboard_info.nodes[i].pos[1]]; + posMinIndexes = [i, i]; + } + } + var nodes = []; + var left_arr = [], right_arr = [], top_arr =[], bottom_arr =[]; + + for (var i = 0; i < clipboard_info.nodes.length; ++i) { + var node_data = clipboard_info.nodes[i]; + var node = LiteGraph.createNode(node_data.type); + if (node) { + + node.configure(node_data); + //paste in last known mouse position + node.pos[0] += this.graph_mouse[0] - posMin[0]; //+= 5; + node.pos[1] += this.graph_mouse[1] - posMin[1]; //+= 5; + + left_arr.push(node.pos[0]) + right_arr.push(node.pos[0] + node.size[0]) + top_arr.push(node.pos[1]) + bottom_arr.push(node.pos[1] + node.size[1]) + + this.graph.add(node,{doProcessChange:false}); + + nodes.push(node); + + } + } + + if(clipboard_info.title){ + var l = Math.min(...left_arr) - 15; + var r = Math.max(...right_arr) - this.graph_mouse[0] + 30; + var t = Math.min(...top_arr) - 100; + var b = Math.max(...bottom_arr) - this.graph_mouse[1] + 130; + + // create group + const groups = [ + { + "title": clipboard_info.title, + "bounding": [ + l, + t, + r, + b + ], + "color": "#3f789e", + "font_size": 24, + "locked": false + } + ] + + for (var i = 0; i < groups.length; ++i) { + var group = new LiteGraph.LGraphGroup(); + group.configure(groups[i]); + this.graph.add(group); + } + } + + //create links + for (var i = 0; i < clipboard_info.links.length; ++i) { + var link_info = clipboard_info.links[i]; + var origin_node; + var origin_node_relative_id = link_info[0]; + if (origin_node_relative_id != null) { + origin_node = nodes[origin_node_relative_id]; + } else if (LiteGraph.ctrl_shift_v_paste_connect_unselected_outputs && isConnectUnselected) { + var origin_node_id = link_info[4]; + if (origin_node_id) { + origin_node = this.graph.getNodeById(origin_node_id); + } + } + var target_node = nodes[link_info[2]]; + if( origin_node && target_node ) + origin_node.connect(link_info[1], target_node, link_info[3]); + else + console.warn("Warning, nodes missing on pasting"); + } + + this.selectNodes(nodes); + this.graph.afterChange(); +}; \ No newline at end of file diff --git a/ComfyUI-Easy-Use/web_version/v1/js/easy/easySaveImage.js b/ComfyUI-Easy-Use/web_version/v1/js/easy/easySaveImage.js new file mode 100644 index 0000000000000000000000000000000000000000..35adb752ed1e167fa47083fafa149310dffedd0f --- /dev/null +++ b/ComfyUI-Easy-Use/web_version/v1/js/easy/easySaveImage.js @@ -0,0 +1,36 @@ +import { app } from "../../../../scripts/app.js"; +import { applyTextReplacements } from "../../../../scripts/utils.js"; + +const extraNodes = ["easy imageSave", "easy fullkSampler", "easy kSampler", "easy kSamplerTiled","easy kSamplerInpainting", "easy kSamplerDownscaleUnet", "easy kSamplerSDTurbo","easy detailerFix"] + +app.registerExtension({ + name: "Comfy.Easy.SaveImageExtraOutput", + async beforeRegisterNodeDef(nodeType, nodeData, app) { + if (extraNodes.includes(nodeData.name)) { + const onNodeCreated = nodeType.prototype.onNodeCreated; + // When the SaveImage node is created we want to override the serialization of the output name widget to run our S&R + nodeType.prototype.onNodeCreated = function () { + const r = onNodeCreated ? onNodeCreated.apply(this, arguments) : undefined; + + const widget = this.widgets.find((w) => w.name === "filename_prefix" || w.name === 'save_prefix'); + widget.serializeValue = () => { + return applyTextReplacements(app, widget.value); + }; + + return r; + }; + } else { + // When any other node is created add a property to alias the node + const onNodeCreated = nodeType.prototype.onNodeCreated; + nodeType.prototype.onNodeCreated = function () { + const r = onNodeCreated ? onNodeCreated.apply(this, arguments) : undefined; + + if (!this.properties || !("Node name for S&R" in this.properties)) { + this.addProperty("Node name for S&R", this.constructor.type, "string"); + } + + return r; + }; + } + }, +}); diff --git a/ComfyUI-Easy-Use/web_version/v1/js/easy/easySeg.js b/ComfyUI-Easy-Use/web_version/v1/js/easy/easySeg.js new file mode 100644 index 0000000000000000000000000000000000000000..11e8feb61f3e0b3f8358c5a38c9a6df9730c965e --- /dev/null +++ b/ComfyUI-Easy-Use/web_version/v1/js/easy/easySeg.js @@ -0,0 +1,136 @@ +import {app} from "../../../../scripts/app.js"; +import {$el} from "../../../../scripts/ui.js"; +import {$t} from "../common/i18n.js"; +import {findWidgetByName, toggleWidget} from "../common/utils.js"; + + +const tags = { + "selfie_multiclass_256x256": ["Background", "Hair", "Body", "Face", "Clothes", "Others",], + "human_parsing_lip":["Background","Hat","Hair","Glove","Sunglasses","Upper-clothes","Dress","Coat","Socks","Pants","Jumpsuits","Scarf","Skirt","Face","Left-arm","Right-arm","Left-leg","Right-leg","Left-shoe","Right-shoe"], +} +function getTagList(tags) { + let rlist=[] + tags.forEach((k,i) => { + rlist.push($el( + "label.easyuse-prompt-styles-tag", + { + dataset: { + tag: i, + name: $t(k), + index: i + }, + $: (el) => { + el.children[0].onclick = () => { + el.classList.toggle("easyuse-prompt-styles-tag-selected"); + }; + }, + }, + [ + $el("input",{ + type: 'checkbox', + name: i + }), + $el("span",{ + textContent: $t(k), + }) + ] + )) + }); + return rlist +} + + +app.registerExtension({ + name: 'comfy.easyUse.seg', + async beforeRegisterNodeDef(nodeType, nodeData, app) { + + if (nodeData.name == 'easy humanSegmentation') { + // 创建时 + const onNodeCreated = nodeType.prototype.onNodeCreated; + nodeType.prototype.onNodeCreated = function () { + onNodeCreated ? onNodeCreated?.apply(this, arguments) : undefined; + const method = this.widgets.findIndex((w) => w.name == 'method'); + const list = $el("ul.easyuse-prompt-styles-list.no-top", []); + let method_values = '' + this.setProperty("values", []) + + let selector = this.addDOMWidget('mask_components',"btn",$el('div.easyuse-prompt-styles',[list])) + + Object.defineProperty(this.widgets[method],'value',{ + set:(value)=>{ + method_values = value + if(method_values){ + selector.element.children[0].innerHTML = '' + if(method_values == 'selfie_multiclass_256x256'){ + toggleWidget(this, findWidgetByName(this, 'confidence'), true) + this.setSize([300, 260]); + }else{ + toggleWidget(this, findWidgetByName(this, 'confidence')) + this.setSize([300, 500]); + } + let list = getTagList(tags[method_values]); + selector.element.children[0].append(...list) + } + }, + get: () => { + return method_values + } + }) + + let mask_select_values = '' + + Object.defineProperty(selector, "value", { + set: (value) => { + setTimeout(_=>{ + selector.element.children[0].querySelectorAll(".easyuse-prompt-styles-tag").forEach(el => { + let arr = value.split(',') + if (arr.includes(el.dataset.tag)) { + el.classList.add("easyuse-prompt-styles-tag-selected"); + el.children[0].checked = true + } + }) + },100) + }, + get: () => { + selector.element.children[0].querySelectorAll(".easyuse-prompt-styles-tag").forEach(el => { + if(el.classList.value.indexOf("easyuse-prompt-styles-tag-selected")>=0){ + if(!this.properties["values"].includes(el.dataset.tag)){ + this.properties["values"].push(el.dataset.tag); + } + }else{ + if(this.properties["values"].includes(el.dataset.tag)){ + this.properties["values"]= this.properties["values"].filter(v=>v!=el.dataset.tag); + } + } + }); + mask_select_values = this.properties["values"].join(','); + return mask_select_values; + } + }); + + let old_values = '' + let mask_lists_dom = selector.element.children[0] + + // 初始化 + setTimeout(_=>{ + if(!method_values) { + method_values = 'selfie_multiclass_256x256' + selector.element.children[0].innerHTML = '' + // 重新排序 + let list = getTagList(tags[method_values]); + selector.element.children[0].append(...list) + } + if(method_values == 'selfie_multiclass_256x256'){ + toggleWidget(this, findWidgetByName(this, 'confidence'), true) + this.setSize([300, 260]); + }else{ + toggleWidget(this, findWidgetByName(this, 'confidence')) + this.setSize([300, 500]); + } + },1) + + return onNodeCreated; + } + } + } +}) \ No newline at end of file diff --git a/ComfyUI-Easy-Use/web_version/v1/js/easy/easySelector.js b/ComfyUI-Easy-Use/web_version/v1/js/easy/easySelector.js new file mode 100644 index 0000000000000000000000000000000000000000..c9dde34da828d7bed2d0361c9aaf489e4ed2d88a --- /dev/null +++ b/ComfyUI-Easy-Use/web_version/v1/js/easy/easySelector.js @@ -0,0 +1,296 @@ +// 1.0.3 +import { app } from "../../../../scripts/app.js"; +import { api } from "../../../../scripts/api.js"; +import { $el } from "../../../../scripts/ui.js"; +import { $t } from "../common/i18n.js"; + +// 获取风格列表 +let styles_list_cache = {} +let styles_image_cache = {} +async function getStylesList(name){ + if(styles_list_cache[name]) return styles_list_cache[name] + else{ + const resp = await api.fetchApi(`/easyuse/prompt/styles?name=${name}`); + if (resp.status === 200) { + let data = await resp.json(); + styles_list_cache[name] = data; + return data; + } + return undefined; + } +} +async function getStylesImage(name, styles_name){ + if(!styles_image_cache[styles_name]) styles_image_cache[styles_name] = {} + if(styles_image_cache[styles_name][name]) return styles_image_cache[styles_name][name] + else{ + const resp = await api.fetchApi(`/easyuse/prompt/styles/image?name=${name}&styles_name=${styles_name}`); + if (resp.status === 200) { + const text = await resp.text() + if(text.startsWith('http')){ + styles_image_cache[styles_name][name] = text + return text + } + const url = `/easyuse/prompt/styles/image?name=${name}&styles_name=${styles_name}` + styles_image_cache[styles_name][name] = url + return url + } + return undefined; + } +} + +function getTagList(tags, styleName, language='en-US') { + let rlist=[] + tags.forEach((k,i) => { + rlist.push($el( + "label.easyuse-prompt-styles-tag", + { + dataset: { + tag: k['name'], + name: language == 'zh-CN' && k['name_cn'] ? k['name_cn'] : k['name'], + imgName: k['imgName'], + index: i + }, + $: (el) => { + el.children[0].onclick = () => { + el.classList.toggle("easyuse-prompt-styles-tag-selected"); + }; + el.onmousemove = (e) => { + displayImage(el.dataset.imgName, styleName, e) + }; + el.onmouseout = () => { + hiddenImage() + }; + el.onmouseover = (e) => { + displayImage(el.dataset.imgName, styleName) + }; + }, + }, + [ + $el("input",{ + type: 'checkbox', + name: k['name'] + }), + $el("span",{ + textContent: language == 'zh-CN' && k['name_cn'] ? k['name_cn'] : k['name'], + }) + ] + )) + }); + return rlist +} + +const foocus_base_path = "https://raw.githubusercontent.com/lllyasviel/Fooocus/main/sdxl_styles/samples/" +const empty_img = "data:image/jpeg;base64,/9j/4QAYRXhpZgAASUkqAAgAAAAAAAAAAAAAAP/sABFEdWNreQABAAQAAAA8AAD/4QNLaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wLwA8P3hwYWNrZXQgYmVnaW49Iu+7vyIgaWQ9Ilc1TTBNcENlaGlIenJlU3pOVGN6a2M5ZCI/PiA8eDp4bXBtZXRhIHhtbG5zOng9ImFkb2JlOm5zOm1ldGEvIiB4OnhtcHRrPSJBZG9iZSBYTVAgQ29yZSA5LjEtYzAwMSA3OS4xNDYyODk5Nzc3LCAyMDIzLzA2LzI1LTIzOjU3OjE0ICAgICAgICAiPiA8cmRmOlJERiB4bWxuczpyZGY9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkvMDIvMjItcmRmLXN5bnRheC1ucyMiPiA8cmRmOkRlc2NyaXB0aW9uIHJkZjphYm91dD0iIiB4bWxuczp4bXA9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC8iIHhtbG5zOnhtcE1NPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvbW0vIiB4bWxuczpzdFJlZj0iaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wL3NUeXBlL1Jlc291cmNlUmVmIyIgeG1wOkNyZWF0b3JUb29sPSJBZG9iZSBQaG90b3Nob3AgMjUuMSAoMjAyMzA5MDUubS4yMzE2IDk3OWM4NmQpICAoV2luZG93cykiIHhtcE1NOkluc3RhbmNlSUQ9InhtcC5paWQ6RjA3NEU1QzNCNUJBMTFFRUExMUVDNkZDRjI0NzlBN0QiIHhtcE1NOkRvY3VtZW50SUQ9InhtcC5kaWQ6RjA3NEU1QzRCNUJBMTFFRUExMUVDNkZDRjI0NzlBN0QiPiA8eG1wTU06RGVyaXZlZEZyb20gc3RSZWY6aW5zdGFuY2VJRD0ieG1wLmlpZDpGMDc0RTVDMUI1QkExMUVFQTExRUM2RkNGMjQ3OUE3RCIgc3RSZWY6ZG9jdW1lbnRJRD0ieG1wLmRpZDpGMDc0RTVDMkI1QkExMUVFQTExRUM2RkNGMjQ3OUE3RCIvPiA8L3JkZjpEZXNjcmlwdGlvbj4gPC9yZGY6UkRGPiA8L3g6eG1wbWV0YT4gPD94cGFja2V0IGVuZD0iciI/Pv/uAA5BZG9iZQBkwAAAAAH/2wCEAAYEBAQFBAYFBQYJBgUGCQsIBgYICwwKCgsKCgwQDAwMDAwMEAwODxAPDgwTExQUExMcGxsbHB8fHx8fHx8fHx8BBwcHDQwNGBAQGBoVERUaHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fH//AABEIAIAAgAMBEQACEQEDEQH/xACLAAEAAgMBAQEAAAAAAAAAAAAABAUCAwYBBwgBAQADAQEBAAAAAAAAAAAAAAABAgMEBQYQAAEEAgECAwUHAwUAAAAAAAEAAgMEEQUhEgYxEwdBYSIyFFFxgVJyIxWRoTOxwdFiJBEBAAICAQQBBAIDAAAAAAAAAAECEQMxIUESBBOB0SIyUXGCIwX/2gAMAwEAAhEDEQA/AP1SgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICDXJYgj+d4afsVopM8KWvEcy8it1pXdMcjXO/Lnn+im2u0cwV2VniW1UXEBAQEBAQEBAQEBAQRNlc+mgyDh7zhv+5WunX5Sw37fCHM2dh48r06ank7N6rn2Ja7qa4hw5BBwQV010uK+/DsO29v/J68SOI86Jxjl95HIP4gryPc0fHfHaXu+j7Py68zzHSVquV2iAgICAgICAgICDyTr6HdHz4PTnwypjnqic46OauNbY6mGX99p+L8w9xaeV6OufHt0eXtr59M9VFb194E9LmuH3kf6rv17avO2ets7YVcuuuk/uOa3PgBlxP4BdMbq9nLPqbJ5xDbSM9azFXpyujuSO+Bo5kcf0NPyj25We2YtEzaPxdfr6519Kz+UvqEIlELBKQZQ0eYRwC7HOPxXzVsZ6cPpK5x15ZKEiAgICAgICAgICCNc1tG40CzA2XHg4j4h9zhyFpr22p+s4Z7NNL/ALRlTX+1dVFBJOJrcTI2lxZHYcBx+sldWv3bzOMVn6fZy39OkRnNo+v3aoOx9JOxks8tqwHDPS+1IW8+IzGWZVrf9DZHSMR/j9yvo656zMz9V1rdLqdYwsoVIqwd87mNAc79Tvmd+JXJt332ftMy6temlP1jCasmggICAgICAgICAgwlmiib1SPDB7zhWrWZ4VtaI5QXb2l5ojYHvLjjIGB/dbR61sZlhPtVziFb3PYdd0luCvAZbXludVZ1huZQPgyTx4/atvWj4rxaZ6d/6Ye1/t1zSI6zx/bzti5YqaOpBeg8u41n/oa14cA4ccH7lPs1jZebVn8eyPUtOrXFbR+XdYx9xa90pjeXROaSCXDj+oysZ9S+Mx1bR7uvOJ6LGOWKVgfG8PafAtOQueazHLqraJjMMlCRAQEBAQEBAQRLNp4HTFx/2/4WtKR3Y32T2Udl8j3knk/aeSu6kREPPvaZlpY3DmyY8DyrzPZWv8tkvmFv7bg12RyR1DGeeMj2KnjE9JaeUx1hi1sgaet/U7JIOMcE8Dj7FMREcK2zPKMasr5XO6fmOVt5xEOadVplYU45IAOhxa72kLm2TFuXXqrNeF1WtlwDZeHfmHguO+vHDupszylLJsICAgICAg8cMjCQiYR5IVpFmc1Q5qLXHPgfbhbV2MLaYlqNQAYA4V/kV+PDA1fcp81fjYurtYMu4CmLZRNYhtZWBAI8CqzdaKN8df3LObtIokxwe5ZzZrFUloIGFnLWHqhIgICAgICAgxMbSpyjDAwAq3kr4MTWCnzR4MX02PGHDISNmETqieWba7QABwB4KJumKNgjaFXK0VZYChYQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEHzvuv1G7k1W9s6/Xamtaq15oaonmnsCR008HntaI4K8/s4HOeEGXZXqTud7uqtG7r6kNa5HdMU9aaw9zZde+FkrHsnr1+M2MZBPIKDRe9cO2K2mjs/V0m7X61lWzq32W+ZFEbfkSSO4B+GL9zw4QWm99TqFVmjsaSu7fUtxeNM2aTmSMBbHI9zWHqHVJlnDTxjPKCJL6sea502t1D7Ouhr0rNqxNM2CSNuwnkgjAi6ZOotdEc/Egibf1j/j+7JNL9DWdWg84TWn2ywtdFKyMZb5Tg0nLyG55x48IJ3bXqe/ea/a26dFtyTXtldDUqyOdNL5VqaDHS5gwXRxMe3xz1Y9iDKP1Sa7uefUnR7TyYqUVoEU5jY6pJZIz1RY4ZiMYd7TkexBA749Wr2gtCKlrIpGs17NjK29LLWmPmMsyiFkbIZsPEdKQu6y0eAQWdD1E2L93W1tzRyCDY3paev2NaxVlhIjidMfMb5vmse1kbi9pZ7MeKDt0BAQEBAQfEPU+lFY2++q2K1uSSezTnrReVsTTmiZVYHOd9LVuQyubIwANkbxz4FA7FsQ0NrrLNXX7N0eo1+3darGDYPjb5j6prxVRajjDetsRAjj4yM4CDre2uxO7q2hqtm7nua6w9rp5tfXgoSxwyTOMr42PlrPe4Nc8jJJQRDb3Oz1fYFrcV7As0mu3u7nbWkBZ9LSfG5nlxs/yySWRiNozwcBBx9EXadGTXz62+LG41+jZS6adhzS6vfnlkEjgzEZax7T8ePFBu3nbPdUXqJZsw6S5cqbCW1YdIY2lxhhfEGMjfHtoG9HxucwPEZy4/A7kMC87aq2Kmv7mdvxuqGmklFjUU4G2Yp21rdyW00t+kJkFl88pY9vDgwNDvEoK9np73FBcHdkrt2+rZd5FjQx7O0b8WvbzDKZhN1SSse573QdeAHkN+Ichj3p2rBvZq9vUnY2tcNQPqpZYZpJ44GxXqzHdVlzZZpib73mLHViI85c1BZ6OpsIe/6/XSuntevdsz6+8+pI0/yM1dtWVr2Z644P8rmyuj6S53jxkh9aQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBB/9k=" +async function displayImage(imgName, styleName) { + var e = event || window.event; + var img = document.getElementById("show_image_id"); + var pxy= img.parentElement.getBoundingClientRect(); + if(imgName) { + const url = await getStylesImage(imgName, styleName) + img.src = url + img.onerror = _ =>{ + img.src = empty_img + } + } + var scale = app?.canvas?.ds?.scale || 1; + var x = (e.pageX-pxy.x-100)/scale; + var y = (e.pageY-pxy.y+25)/scale; + img.style.left = x+"px"; + img.style.top = y+"px"; + img.style.display = "block"; + img.style.borderRadius = "10px"; + img.style.borderColor = "var(--fg-color)" + img.style.borderWidth = "1px"; + img.style.borderStyle = "solid"; +} +function hiddenImage(){ //theEvent用来传入事件,Firefox的方式 + var img = document.getElementById('show_image_id'); + img.style.display = "none"; +} + +// StylePromptSelector +app.registerExtension({ + name: 'comfy.easyUse.styleSelector', + async beforeRegisterNodeDef(nodeType, nodeData, app) { + + if(nodeData.name == 'easy stylesSelector'){ + // 创建时 + const onNodeCreated = nodeType.prototype.onNodeCreated; + nodeType.prototype.onNodeCreated = function() { + onNodeCreated ? onNodeCreated?.apply(this, arguments) : undefined; + const styles_id = this.widgets.findIndex((w) => w.name == 'styles'); + const language = localStorage['AGL.Locale'] || localStorage['Comfy.Settings.AGL.Locale'] || 'en-US' + const list = $el("ul.easyuse-prompt-styles-list",[]); + let styles_values = '' + this.setProperty("values", []) + + let selector = this.addDOMWidget('select_styles',"btn",$el('div.easyuse-prompt-styles',[$el('div.tools', [ + $el('button.delete',{ + textContent: $t('Empty All'), + style:{}, + onclick:()=>{ + selector.element.children[0].querySelectorAll(".search").forEach(el=>{ + el.value = '' + }) + selector.element.children[1].querySelectorAll(".easyuse-prompt-styles-tag-selected").forEach(el => { + el.classList.remove("easyuse-prompt-styles-tag-selected"); + el.children[0].checked = false + }) + selector.element.children[1].querySelectorAll(".easyuse-prompt-styles-tag").forEach(el => { + el.classList.remove('hide') + }) + this.setProperty("values", []) + }} + ), + $el('textarea.search',{ + dir:"ltr", + style:{"overflow-y": "scroll"}, + rows:1, + placeholder:$t("🔎 Type here to search styles ..."), + oninput:(e)=>{ + let value = e.target.value + selector.element.children[1].querySelectorAll(".easyuse-prompt-styles-tag").forEach(el => { + const name = el.dataset.name.toLowerCase() + const tag = el.dataset.tag.toLowerCase() + const lower_value = value.toLowerCase() + if(name.indexOf(lower_value) != -1 || tag.indexOf(lower_value) != -1 || el.classList.value.indexOf("easyuse-prompt-styles-tag-selected")!=-1){ + el.classList.remove('hide') + } + else{ + el.classList.add('hide') + } + }) + } + }) + ]),list, + $el('img',{id:'show_image_id', + style:{display:'none',position:'absolute'}, + src:``, + onerror:()=>{ + this.src = empty_img + } + }) + ])); + + Object.defineProperty(this.widgets[styles_id],'value',{ + set:(value)=>{ + styles_values = value + if(styles_values){ + getStylesList(styles_values).then(_=>{ + selector.element.children[1].innerHTML='' + if(styles_list_cache[styles_values]){ + let tags = styles_list_cache[styles_values] + // 重新排序 + if(selector.value) tags = tags.sort((a,b)=> selector.value.includes(b.name) - selector.value.includes(a.name)) + this.properties["values"] = [] + let list = getTagList(tags, value, language); + selector.element.children[1].append(...list) + selector.element.children[1].querySelectorAll(".easyuse-prompt-styles-tag").forEach(el => { + if (this.properties["values"].includes(el.dataset.tag)) { + el.classList.add("easyuse-prompt-styles-tag-selected"); + } + if(this.size?.[0]<150 || this.size?.[1]<150) this.setSize([425, 500]); + }) + } + }) + } + }, + get: () => { + return styles_values + } + }) + + + let style_select_values = '' + Object.defineProperty(selector, "value", { + set: (value) => { + setTimeout(_=>{ + selector.element.children[1].querySelectorAll(".easyuse-prompt-styles-tag").forEach(el => { + let arr = value.split(',') + if (arr.includes(el.dataset.tag)) { + el.classList.add("easyuse-prompt-styles-tag-selected"); + el.children[0].checked = true + } + }) + },300) + }, + get: () => { + selector.element.children[1].querySelectorAll(".easyuse-prompt-styles-tag").forEach(el => { + if(el.classList.value.indexOf("easyuse-prompt-styles-tag-selected")>=0){ + if(!this.properties["values"].includes(el.dataset.tag)){ + this.properties["values"].push(el.dataset.tag); + } + }else{ + if(this.properties["values"].includes(el.dataset.tag)){ + this.properties["values"]= this.properties["values"].filter(v=>v!=el.dataset.tag); + } + } + }); + style_select_values = this.properties["values"].join(','); + return style_select_values; + } + }); + + let old_values = '' + let style_lists_dom = selector.element.children[1] + style_lists_dom.addEventListener('mouseenter', function (e) { + let value = '' + style_lists_dom.querySelectorAll(".easyuse-prompt-styles-tag-selected").forEach(el=> value+=el.dataset.tag) + old_values = value + }) + style_lists_dom.addEventListener('mouseleave', function (e) { + let value = '' + style_lists_dom.querySelectorAll(".easyuse-prompt-styles-tag-selected").forEach(el=> value+=el.dataset.tag) + let new_values = value + if(old_values != new_values){ + // console.log("选项发生了变化") + // 获取搜索值 + const search_value = document.getElementsByClassName('search')[0]['value'] + // 重新排序 + const tags = styles_list_cache[styles_values].sort((a,b)=> new_values.includes(b.name) - new_values.includes(a.name)) + style_lists_dom.innerHTML = '' + let list = getTagList(tags, styles_values, language); + style_lists_dom.append(...list) + style_lists_dom.querySelectorAll(".easyuse-prompt-styles-tag").forEach(el => { + if (new_values.includes(el.dataset.tag)) { + el.classList.add("easyuse-prompt-styles-tag-selected"); + el.children[0].checked = true; + } + if(search_value){ + if(el.dataset.name.indexOf(search_value) != -1 || el.dataset.tag.indexOf(search_value) != -1 || el.classList.value.indexOf("easyuse-prompt-styles-tag-selected")!=-1){ + el.classList.remove('hide') + } + else{ + el.classList.add('hide') + } + } + + }) + } + }) + + + // 初始化 + setTimeout(_=>{ + if(!styles_values) { + styles_values = 'fooocus_styles' + getStylesList(styles_values).then(_=>{ + selector.element.children[1].innerHTML='' + if(styles_list_cache[styles_values]){ + let tags = styles_list_cache[styles_values] + // 重新排序 + if(selector.value) tags = tags.sort((a,b)=> selector.value.includes(b.name) - selector.value.includes(a.name)) + let list = getTagList(tags, styles_values, language); + selector.element.children[1].append(...list) + } + }) + } + if(this.size?.[0]<150 || this.size?.[1]<150) this.setSize([425, 500]); + // + },100) + + return onNodeCreated; + } + } + } +}) \ No newline at end of file diff --git a/ComfyUI-Easy-Use/web_version/v1/js/easy/easySliderControl.js b/ComfyUI-Easy-Use/web_version/v1/js/easy/easySliderControl.js new file mode 100644 index 0000000000000000000000000000000000000000..17fc5094c5e773dc88698c9e6beb4758f52819fb --- /dev/null +++ b/ComfyUI-Easy-Use/web_version/v1/js/easy/easySliderControl.js @@ -0,0 +1,173 @@ +import { app } from "../../../../scripts/app.js"; +import { api } from "../../../../scripts/api.js"; +import { $el } from "../../../../scripts/ui.js"; +import { $t } from "../common/i18n.js"; +import { sleep } from "../common/utils.js"; + + +const calculatePercent = (value, min, max) => ((value-min)/(max-min)*100) + +const getLayerDefaultValue = (index) => { + switch (index){ + case 3: + return 2.5 + case 6: + return 1 + default: + return 0 + } +} + +const addLayer = (_this, layer_total, arrays, sliders, i) => { + let scroll = $el('div.easyuse-slider-item-scroll') + let value = $el('div.easyuse-slider-item-input', {textContent: arrays[i]['value']}) + let label = $el('div.easyuse-slider-item-label', {textContent: 'L'+i}) + let girdTotal = (arrays[i]['max'] - arrays[i]['min']) / arrays[i]['step'] + let area = $el('div.easyuse-slider-item-area', {style:{ height: calculatePercent(arrays[i]['default'],arrays[i]['min'],arrays[i]['max']) + '%'}}) + let bar = $el('div.easyuse-slider-item-bar', { + style:{ top: (100-calculatePercent(arrays[i]['default'],arrays[i]['min'],arrays[i]['max'])) + '%'}, + onmousedown: (e) => { + let event = e || window.event; + var y = event.clientY - bar.offsetTop; + document.onmousemove = (e) => { + let event = e || window.event; + let top = event.clientY - y; + if(top < 0){ + top = 0; + } + else if(top > scroll.offsetHeight - bar.offsetHeight){ + top = scroll.offsetHeight - bar.offsetHeight; + } + // top到最近的girdHeight值 + let girlHeight = (scroll.offsetHeight - bar.offsetHeight)/ girdTotal + top = Math.round(top / girlHeight) * girlHeight; + bar.style.top = Math.floor(top/(scroll.offsetHeight - bar.offsetHeight)* 100) + '%'; + area.style.height = Math.floor((scroll.offsetHeight - bar.offsetHeight - top)/(scroll.offsetHeight - bar.offsetHeight)* 100) + '%'; + value.innerText = parseFloat(parseFloat(arrays[i]['max'] - (arrays[i]['max']-arrays[i]['min']) * (top/(scroll.offsetHeight - bar.offsetHeight))).toFixed(2)) + arrays[i]['value'] = value.innerText + _this.properties['values'][i] = i+':'+value.innerText + window.getSelection ? window.getSelection().removeAllRanges() : document.selection.empty(); + } + }, + ondblclick:_=>{ + bar.style.top = (100-calculatePercent(arrays[i]['default'],arrays[i]['min'],arrays[i]['max'])) + '%' + area.style.height = calculatePercent(arrays[i]['default'],arrays[i]['min'],arrays[i]['max']) + '%' + value.innerText = arrays[i]['default'] + arrays[i]['value'] = arrays[i]['default'] + _this.properties['values'][i] = i+':'+value.innerText + } + }) + document.onmouseup = _=> document.onmousemove = null; + + scroll.replaceChildren(bar,area) + let item_div = $el('div.easyuse-slider-item',[ + value, + scroll, + label + ]) + if(i == 3 ) layer_total == 12 ? item_div.classList.add('negative') : item_div.classList.remove('negative') + else if(i == 6) layer_total == 12 ? item_div.classList.add('positive') : item_div.classList.remove('positive') + sliders.push(item_div) + return item_div +} + +const setSliderValue = (_this, type, refresh=false, values_div, sliders_value) => { + let layer_total = type == 'sdxl' ? 12 : 16 + let sliders = [] + let arrays = Array.from({length: layer_total}, (v, i) => ({default: layer_total == 12 ? getLayerDefaultValue(i) : 0, min: -1, max: 3, step: 0.05, value:layer_total == 12 ? getLayerDefaultValue(i) : 0})) + _this.setProperty("values", Array.from({length: layer_total}, (v, i) => i+':'+arrays[i]['value'])) + for (let i = 0; i < layer_total; i++) { + addLayer(_this, layer_total, arrays, sliders, i) + } + if(refresh) values_div.replaceChildren(...sliders) + else{ + values_div = $el('div.easyuse-slider', sliders) + sliders_value = _this.addDOMWidget('values',"btn",values_div) + } + + Object.defineProperty(sliders_value, 'value', { + set: function() {}, + get: function() { + return _this.properties.values.join(','); + } + }); + return {sliders, arrays, values_div, sliders_value} +} + + +app.registerExtension({ + name: 'comfy.easyUse.sliderControl', + async beforeRegisterNodeDef(nodeType, nodeData, app) { + if(nodeData.name == 'easy sliderControl'){ + // 创建时 + const onNodeCreated = nodeType.prototype.onNodeCreated; + nodeType.prototype.onNodeCreated = function() { + onNodeCreated && onNodeCreated.call(this); + const mode = this.widgets[0]; + const model_type = this.widgets[1]; + let layer_total = model_type.value == 'sdxl' ? 12 : 16 + let _this = this + let values_div = null + let sliders_value = null + mode.callback = async()=>{ + switch (mode.value) { + case 'ipadapter layer weights': + nodeData.output_name = ['layer_weights'] + _this.outputs[0]['name'] = 'layer_weights' + _this.outputs[0]['label'] = 'layer_weights' + break + } + } + + model_type.callback = async()=>{ + if(values_div) { + let r2 = setSliderValue(_this, model_type.value, true, values_div, sliders_value) + values_div = r2.values_div + sliders_value = r2.sliders_value + } + _this.setSize(model_type.value == 'sdxl' ? [375,320] : [455,320]) + } + + let r1 = setSliderValue(_this, model_type.value, false, values_div, sliders_value) + let sliders = r1.sliders + let arrays = r1.arrays + values_div = r1.values_div + sliders_value = r1.sliders_value + setTimeout(_=>{ + let values_widgets_index = this.widgets.findIndex((w) => w.name == 'values'); + if(values_widgets_index != -1){ + let old_values_widget = this.widgets[values_widgets_index]; + let old_value = old_values_widget.value.split(',') + let layer_total = _this.widgets[1].value == 'sdxl' ? 12 : 16 + for (let i = 0; i < layer_total; i++) { + let value = parseFloat(parseFloat(old_value[i].split(':')[1]).toFixed(2)) + let item_div = sliders[i] || null + // 存在层即修改 + if(arrays[i]){ + arrays[i]['value'] = value + _this.properties['values'][i] = old_value[i] + }else{ + arrays.push({default: layer_total == 12 ? getLayerDefaultValue(i) : 0, min: -1, max: 3, step: 0.05, value:layer_total == 12 ? getLayerDefaultValue(i) : 0}) + _this.properties['values'].push(i+':'+arrays[i]['value']) + // 添加缺失层 + item_div = addLayer(_this, layer_total, arrays, sliders, i) + values_div.appendChild(item_div) + } + // todo: 修改bar位置等 + let input = item_div.getElementsByClassName('easyuse-slider-item-input')[0] + let bar = item_div.getElementsByClassName('easyuse-slider-item-bar')[0] + let area = item_div.getElementsByClassName('easyuse-slider-item-area')[0] + if(i == 3 ) layer_total == 12 ? item_div.classList.add('negative') : item_div.classList.remove('negative') + else if(i == 6) layer_total == 12 ? item_div.classList.add('positive') : item_div.classList.remove('positive') + input.textContent = value + bar.style.top = (100-calculatePercent(value,arrays[i]['min'],arrays[i]['max'])) + '%' + area.style.height = calculatePercent(value,arrays[i]['min'],arrays[i]['max']) + '%' + } + } + _this.setSize(model_type.value == 'sdxl' ? [375,320] : [455,320]) + },1) + return onNodeCreated; + } + } + } +}) \ No newline at end of file diff --git a/ComfyUI-Easy-Use/web_version/v1/js/easy/easySuggestion.js b/ComfyUI-Easy-Use/web_version/v1/js/easy/easySuggestion.js new file mode 100644 index 0000000000000000000000000000000000000000..76dbf34ffeab71786606672e23ab4b25eae0b9e0 --- /dev/null +++ b/ComfyUI-Easy-Use/web_version/v1/js/easy/easySuggestion.js @@ -0,0 +1,524 @@ +import {app} from "../../../../scripts/app.js"; +import {api} from "../../../../scripts/api.js"; +import {$el} from "../../../../scripts/ui.js"; + +const propmts = ["easy wildcards", "easy positive", "easy negative", "easy stylesSelector", "easy promptConcat", "easy promptReplace"] +const loaders = ["easy a1111Loader", "easy comfyLoader", "easy fullLoader", "easy svdLoader", "easy cascadeLoader", "easy sv3dLoader"] +const preSamplingNodes = ["easy preSampling", "easy preSamplingAdvanced", "easy preSamplingNoiseIn", "easy preSamplingCustom", "easy preSamplingDynamicCFG","easy preSamplingSdTurbo", "easy preSamplingLayerDiffusion"] +const kSampler = ["easy kSampler", "easy kSamplerTiled","easy kSamplerInpainting", "easy kSamplerDownscaleUnet", "easy kSamplerSDTurbo"] +const controlNetNodes = ["easy controlnetLoader", "easy controlnetLoaderADV"] +const instantIDNodes = ["easy instantIDApply", "easy instantIDApplyADV"] +const ipadapterNodes = ["easy ipadapterApply", "easy ipadapterApplyADV" ,"easy ipadapterApplyFaceIDKolors", "easy ipadapterStyleComposition"] +const pipeNodes = ['easy pipeIn','easy pipeOut', 'easy pipeEdit'] +const xyNodes = ['easy XYPlot', 'easy XYPlotAdvanced'] +const extraNodes = ['easy setNode'] +const modelNormalNodes = [...["Reroute"],...['RescaleCFG','LoraLoaderModelOnly','LoraLoader','FreeU','FreeU_v2'],...ipadapterNodes,...extraNodes] +const suggestions = { + // prompt + "easy seed":{ + "from":{ + "INT": [...["Reroute"],...preSamplingNodes,...['easy fullkSampler']] + } + }, + "easy positive":{ + "from":{ + "STRING": [...["Reroute"],...propmts] + } + }, + "easy negative":{ + "from":{ + "STRING": [...["Reroute"],...propmts] + } + }, + "easy wildcards":{ + "from":{ + "STRING": [...["Reroute","easy showAnything"],...propmts,] + } + }, + "easy stylesSelector":{ + "from":{ + "STRING": [...["Reroute","easy showAnything"],...propmts,] + } + }, + "easy promptConcat":{ + "from":{ + "STRING": [...["Reroute","easy showAnything"],...propmts,] + } + }, + "easy promptReplace":{ + "from":{ + "STRING": [...["Reroute","easy showAnything"],...propmts,] + } + }, + // sd相关 + "easy fullLoader": { + "from":{ + "PIPE_LINE": [...["Reroute"],...preSamplingNodes,...['easy fullkSampler'],...pipeNodes,...extraNodes], + "MODEL":modelNormalNodes + }, + "to":{ + "STRING": [...["Reroute"],...propmts] + } + }, + "easy a1111Loader": { + "from": { + "PIPE_LINE": [...["Reroute"], ...preSamplingNodes, ...controlNetNodes, ...instantIDNodes, ...pipeNodes, ...extraNodes], + "MODEL": modelNormalNodes + }, + "to":{ + "STRING": [...["Reroute"],...propmts] + } + }, + "easy comfyLoader": { + "from": { + "PIPE_LINE": [...["Reroute"], ...preSamplingNodes, ...controlNetNodes, ...instantIDNodes, ...pipeNodes, ...extraNodes], + "MODEL": modelNormalNodes + }, + "to":{ + "STRING": [...["Reroute"],...propmts] + } + }, + "easy svdLoader":{ + "from": { + "PIPE_LINE": [...["Reroute"], ...["easy preSampling", "easy preSamplingAdvanced", "easy preSamplingDynamicCFG"], ...pipeNodes, ...extraNodes], + "MODEL": modelNormalNodes + }, + "to":{ + "STRING": [...["Reroute"],...propmts] + } + }, + "easy zero123Loader":{ + "from": { + "PIPE_LINE": [...["Reroute"], ...["easy preSampling", "easy preSamplingAdvanced", "easy preSamplingDynamicCFG"], ...pipeNodes, ...extraNodes], + "MODEL": modelNormalNodes + }, + "to":{ + "STRING": [...["Reroute"],...propmts] + } + }, + "easy sv3dLoader":{ + "from": { + "PIPE_LINE": [...["Reroute"], ...["easy preSampling", "easy preSamplingAdvanced", "easy preSamplingDynamicCFG"], ...pipeNodes, ...extraNodes], + "MODEL": modelNormalNodes + }, + "to":{ + "STRING": [...["Reroute"],...propmts] + } + }, + "easy preSampling": { + "from": { + "PIPE_LINE": [...["Reroute"], ...kSampler, ...pipeNodes, ...controlNetNodes, ...xyNodes, ...extraNodes] + }, + }, + "easy preSamplingAdvanced": { + "from": { + "PIPE_LINE": [...["Reroute"], ...kSampler, ...pipeNodes, ...controlNetNodes, ...xyNodes, ...extraNodes] + } + }, + "easy preSamplingDynamicCFG": { + "from": { + "PIPE_LINE": [...["Reroute"], ...kSampler, ...pipeNodes, ...controlNetNodes, ...xyNodes, ...extraNodes] + } + }, + "easy preSamplingCustom": { + "from": { + "PIPE_LINE": [...["Reroute"], ...kSampler, ...pipeNodes, ...controlNetNodes, ...xyNodes, ...extraNodes] + } + }, + "easy preSamplingLayerDiffusion": { + "from": { + "PIPE_LINE": [...["Reroute", "easy kSamplerLayerDiffusion"], ...kSampler, ...pipeNodes, ...controlNetNodes, ...xyNodes, ...extraNodes] + } + }, + "easy preSamplingNoiseIn": { + "from": { + "PIPE_LINE": [...["Reroute"], ...kSampler, ...pipeNodes, ...controlNetNodes, ...xyNodes, ...extraNodes] + } + }, + // ksampler + "easy fullkSampler": { + "from": { + "PIPE_LINE": [...["Reroute"], ...pipeNodes.reverse(), ...['easy preDetailerFix', 'easy preMaskDetailerFix'], ...preSamplingNodes, ...extraNodes] + } + }, + "easy kSampler": { + "from": { + "PIPE_LINE": [...["Reroute"], ...pipeNodes.reverse(), ...['easy preDetailerFix', 'easy preMaskDetailerFix', 'easy hiresFix'], ...preSamplingNodes, ...extraNodes], + } + }, + // cn + "easy controlnetLoader": { + "from": { + "PIPE_LINE": [...["Reroute"], ...preSamplingNodes, ...controlNetNodes, ...instantIDNodes, ...pipeNodes, ...extraNodes] + } + }, + "easy controlnetLoaderADV":{ + "from": { + "PIPE_LINE": [...["Reroute"], ...preSamplingNodes, ...controlNetNodes, ...instantIDNodes, ...pipeNodes, ...extraNodes] + } + }, + // instant + "easy instantIDApply": { + "from": { + "PIPE_LINE": [...["Reroute"], ...preSamplingNodes, ...controlNetNodes, ...instantIDNodes, ...pipeNodes, ...extraNodes], + "MODEL": modelNormalNodes + }, + "to":{ + "COMBO": [...["Reroute", "easy promptLine"]] + } + }, + "easy instantIDApplyADV":{ + "from": { + "PIPE_LINE": [...["Reroute"], ...preSamplingNodes, ...controlNetNodes, ...instantIDNodes, ...pipeNodes, ...extraNodes], + "MODEL": modelNormalNodes + }, + "to":{ + "COMBO": [...["Reroute", "easy promptLine"]] + } + }, + "easy ipadapterApply":{ + "to":{ + "COMBO": [...["Reroute", "easy promptLine"]] + } + }, + "easy ipadapterApplyADV":{ + "to":{ + "STRING": [...["Reroute", "easy sliderControl"], ...propmts], + "COMBO": [...["Reroute", "easy promptLine"]] + } + }, + "easy ipadapterStyleComposition":{ + "to":{ + "COMBO": [...["Reroute", "easy promptLine"]] + } + }, + // fix + "easy preDetailerFix":{ + "from": { + "PIPE_LINE": [...["Reroute", "easy detailerFix"], ...pipeNodes, ...extraNodes] + }, + "to":{ + "PIPE_LINE": [...["Reroute", "easy ultralyticsDetectorPipe", "easy samLoaderPipe", "easy kSampler", "easy fullkSampler"]] + } + }, + "easy preMaskDetailerFix":{ + "from": { + "PIPE_LINE": [...["Reroute", "easy detailerFix"], ...pipeNodes, ...extraNodes] + } + }, + "easy samLoaderPipe": { + "from":{ + "PIPE_LINE": [...["Reroute", "easy preDetailerFix"], ...pipeNodes, ...extraNodes] + } + }, + "easy ultralyticsDetectorPipe": { + "from":{ + "PIPE_LINE": [...["Reroute", "easy preDetailerFix"], ...pipeNodes, ...extraNodes] + } + }, + // cascade相关 + "easy cascadeLoader":{ + "from": { + "PIPE_LINE": [...["Reroute"], ...["easy fullCascadeKSampler", 'easy preSamplingCascade'], ...controlNetNodes, ...pipeNodes, ...extraNodes], + "MODEL": modelNormalNodes.filter(cate => !ipadapterNodes.includes(cate)) + } + }, + "easy fullCascadeKSampler":{ + "from": { + "PIPE_LINE": [...["Reroute"], ...["easy preSampling", "easy preSamplingAdvanced"], ...pipeNodes, ...extraNodes] + } + }, + "easy preSamplingCascade":{ + "from": { + "PIPE_LINE": [...["Reroute"], ...["easy cascadeKSampler",], ...pipeNodes, ...extraNodes] + } + }, + "easy cascadeKSampler": { + "from": { + "PIPE_LINE": [...["Reroute"], ...["easy preSampling", "easy preSamplingAdvanced"], ...pipeNodes, ...extraNodes] + } + }, +} + + +app.registerExtension({ + name: "comfy.easyuse.suggestions", + async setup(app) { + LGraphCanvas.prototype.createDefaultNodeForSlot = function(optPass) { // addNodeMenu for connection + var optPass = optPass || {}; + var opts = Object.assign({ nodeFrom: null // input + ,slotFrom: null // input + ,nodeTo: null // output + ,slotTo: null // output + ,position: [] // pass the event coords + ,nodeType: null // choose a nodetype to add, AUTO to set at first good + ,posAdd:[0,0] // adjust x,y + ,posSizeFix:[0,0] // alpha, adjust the position x,y based on the new node size w,h + } + ,optPass + ); + var that = this; + + var isFrom = opts.nodeFrom && opts.slotFrom!==null; + var isTo = !isFrom && opts.nodeTo && opts.slotTo!==null; + + if (!isFrom && !isTo){ + console.warn("No data passed to createDefaultNodeForSlot "+opts.nodeFrom+" "+opts.slotFrom+" "+opts.nodeTo+" "+opts.slotTo); + return false; + } + if (!opts.nodeType){ + console.warn("No type to createDefaultNodeForSlot"); + return false; + } + + var nodeX = isFrom ? opts.nodeFrom : opts.nodeTo; + var slotX = isFrom ? opts.slotFrom : opts.slotTo; + var nodeType = nodeX.type + + var iSlotConn = false; + switch (typeof slotX){ + case "string": + iSlotConn = isFrom ? nodeX.findOutputSlot(slotX,false) : nodeX.findInputSlot(slotX,false); + slotX = isFrom ? nodeX.outputs[slotX] : nodeX.inputs[slotX]; + break; + case "object": + // ok slotX + iSlotConn = isFrom ? nodeX.findOutputSlot(slotX.name) : nodeX.findInputSlot(slotX.name); + break; + case "number": + iSlotConn = slotX; + slotX = isFrom ? nodeX.outputs[slotX] : nodeX.inputs[slotX]; + break; + case "undefined": + default: + // bad ? + //iSlotConn = 0; + console.warn("Cant get slot information "+slotX); + return false; + } + + if (slotX===false || iSlotConn===false){ + console.warn("createDefaultNodeForSlot bad slotX "+slotX+" "+iSlotConn); + } + + // check for defaults nodes for this slottype + var fromSlotType = slotX.type==LiteGraph.EVENT?"_event_":slotX.type; + var slotTypesDefault = isFrom ? LiteGraph.slot_types_default_out : LiteGraph.slot_types_default_in; + if(slotTypesDefault && slotTypesDefault[fromSlotType]){ + if (slotX.link !== null) { + // is connected + }else{ + // is not not connected + } + let nodeNewType = false; + const fromOrTo = isFrom ? 'from' : 'to' + if(suggestions[nodeType] && suggestions[nodeType][fromOrTo] && suggestions[nodeType][fromOrTo][fromSlotType]?.length>0){ + for(var typeX in suggestions[nodeType][fromOrTo][fromSlotType]){ + if (opts.nodeType == suggestions[nodeType][fromOrTo][fromSlotType][typeX] || opts.nodeType == "AUTO") { + nodeNewType = suggestions[nodeType][fromOrTo][fromSlotType][typeX]; + break + } + } + } + else if(typeof slotTypesDefault[fromSlotType] == "object" || typeof slotTypesDefault[fromSlotType] == "array"){ + for(var typeX in slotTypesDefault[fromSlotType]){ + if (opts.nodeType == slotTypesDefault[fromSlotType][typeX] || opts.nodeType == "AUTO"){ + nodeNewType = slotTypesDefault[fromSlotType][typeX]; + // console.log("opts.nodeType == slotTypesDefault[fromSlotType][typeX] :: "+opts.nodeType); + break; // -------- + } + } + }else{ + if (opts.nodeType == slotTypesDefault[fromSlotType] || opts.nodeType == "AUTO") nodeNewType = slotTypesDefault[fromSlotType]; + } + if (nodeNewType) { + var nodeNewOpts = false; + if (typeof nodeNewType == "object" && nodeNewType.node){ + nodeNewOpts = nodeNewType; + nodeNewType = nodeNewType.node; + } + + //that.graph.beforeChange(); + + var newNode = LiteGraph.createNode(nodeNewType); + if(newNode){ + // if is object pass options + if (nodeNewOpts){ + if (nodeNewOpts.properties) { + for (var i in nodeNewOpts.properties) { + newNode.addProperty( i, nodeNewOpts.properties[i] ); + } + } + if (nodeNewOpts.inputs) { + newNode.inputs = []; + for (var i in nodeNewOpts.inputs) { + newNode.addOutput( + nodeNewOpts.inputs[i][0], + nodeNewOpts.inputs[i][1] + ); + } + } + if (nodeNewOpts.outputs) { + newNode.outputs = []; + for (var i in nodeNewOpts.outputs) { + newNode.addOutput( + nodeNewOpts.outputs[i][0], + nodeNewOpts.outputs[i][1] + ); + } + } + if (nodeNewOpts.title) { + newNode.title = nodeNewOpts.title; + } + if (nodeNewOpts.json) { + newNode.configure(nodeNewOpts.json); + } + + } + + // add the node + that.graph.add(newNode); + newNode.pos = [ opts.position[0]+opts.posAdd[0]+(opts.posSizeFix[0]?opts.posSizeFix[0]*newNode.size[0]:0) + ,opts.position[1]+opts.posAdd[1]+(opts.posSizeFix[1]?opts.posSizeFix[1]*newNode.size[1]:0)]; //that.last_click_position; //[e.canvasX+30, e.canvasX+5];*/ + + //that.graph.afterChange(); + + // connect the two! + if (isFrom){ + opts.nodeFrom.connectByType( iSlotConn, newNode, fromSlotType ); + }else{ + opts.nodeTo.connectByTypeOutput( iSlotConn, newNode, fromSlotType ); + } + + // if connecting in between + if (isFrom && isTo){ + // TODO + } + + return true; + + }else{ + console.log("failed creating "+nodeNewType); + } + } + } + return false; + } + + LGraphCanvas.prototype.showConnectionMenu = function(optPass) { // addNodeMenu for connection + var optPass = optPass || {}; + var opts = Object.assign({ nodeFrom: null // input + ,slotFrom: null // input + ,nodeTo: null // output + ,slotTo: null // output + ,e: null + } + ,optPass + ); + var that = this; + + var isFrom = opts.nodeFrom && opts.slotFrom; + var isTo = !isFrom && opts.nodeTo && opts.slotTo; + + if (!isFrom && !isTo){ + console.warn("No data passed to showConnectionMenu"); + return false; + } + + var nodeX = isFrom ? opts.nodeFrom : opts.nodeTo; + var slotX = isFrom ? opts.slotFrom : opts.slotTo; + + var iSlotConn = false; + switch (typeof slotX){ + case "string": + iSlotConn = isFrom ? nodeX.findOutputSlot(slotX,false) : nodeX.findInputSlot(slotX,false); + slotX = isFrom ? nodeX.outputs[slotX] : nodeX.inputs[slotX]; + break; + case "object": + // ok slotX + iSlotConn = isFrom ? nodeX.findOutputSlot(slotX.name) : nodeX.findInputSlot(slotX.name); + break; + case "number": + iSlotConn = slotX; + slotX = isFrom ? nodeX.outputs[slotX] : nodeX.inputs[slotX]; + break; + default: + // bad ? + //iSlotConn = 0; + console.warn("Cant get slot information "+slotX); + return false; + } + + var options = ["Add Node",null]; + + if (that.allow_searchbox){ + options.push("Search"); + options.push(null); + } + + // get defaults nodes for this slottype + var fromSlotType = slotX.type==LiteGraph.EVENT?"_event_":slotX.type; + var slotTypesDefault = isFrom ? LiteGraph.slot_types_default_out : LiteGraph.slot_types_default_in; + var nodeType = nodeX.type + if(slotTypesDefault && slotTypesDefault[fromSlotType]){ + const fromOrTo = isFrom ? 'from' : 'to' + if(suggestions[nodeType] && suggestions[nodeType][fromOrTo] && suggestions[nodeType][fromOrTo][fromSlotType]?.length>0){ + for(var typeX in suggestions[nodeType][fromOrTo][fromSlotType]){ + options.push(suggestions[nodeType][fromOrTo][fromSlotType][typeX]); + } + } + else if(typeof slotTypesDefault[fromSlotType] == "object" || typeof slotTypesDefault[fromSlotType] == "array"){ + for(var typeX in slotTypesDefault[fromSlotType]){ + options.push(slotTypesDefault[fromSlotType][typeX]); + } + }else{ + options.push(slotTypesDefault[fromSlotType]); + } + } + + // build menu + var menu = new LiteGraph.ContextMenu(options, { + event: opts.e, + title: (slotX && slotX.name!="" ? (slotX.name + (fromSlotType?" | ":"")) : "")+(slotX && fromSlotType ? fromSlotType : ""), + callback: inner_clicked + }); + + // callback + function inner_clicked(v,options,e) { + //console.log("Process showConnectionMenu selection"); + switch (v) { + case "Add Node": + LGraphCanvas.onMenuAdd(null, null, e, menu, function(node){ + if (isFrom){ + opts.nodeFrom.connectByType( iSlotConn, node, fromSlotType ); + }else{ + opts.nodeTo.connectByTypeOutput( iSlotConn, node, fromSlotType ); + } + }); + break; + case "Search": + if(isFrom){ + that.showSearchBox(e,{node_from: opts.nodeFrom, slot_from: slotX, type_filter_in: fromSlotType}); + }else{ + that.showSearchBox(e,{node_to: opts.nodeTo, slot_from: slotX, type_filter_out: fromSlotType}); + } + break; + default: + // check for defaults nodes for this slottype + var nodeCreated = that.createDefaultNodeForSlot(Object.assign(opts,{ position: [opts.e.canvasX, opts.e.canvasY] + ,nodeType: v + })); + if (nodeCreated){ + // new node created + //console.log("node "+v+" created") + }else{ + // failed or v is not in defaults + } + break; + } + } + + return false; + }; + } +}) \ No newline at end of file diff --git a/ComfyUI-Easy-Use/web_version/v1/js/easy/easyWidgets.js b/ComfyUI-Easy-Use/web_version/v1/js/easy/easyWidgets.js new file mode 100644 index 0000000000000000000000000000000000000000..899befed6a35139d51e23ed99b427bc0e05224ea --- /dev/null +++ b/ComfyUI-Easy-Use/web_version/v1/js/easy/easyWidgets.js @@ -0,0 +1,400 @@ +import { app } from "../../../../scripts/app.js"; +import { ComfyWidgets } from "../../../../scripts/widgets.js"; + +const KEY_CODES = { ENTER: 13, ESC: 27, ARROW_DOWN: 40, ARROW_UP: 38 }; +const WIDGET_GAP = -4; + +function hideInfoWidget(e, node, widget) { + let dropdownShouldBeRemoved = false; + let selectionIndex = -1; + + if (e) { + e.preventDefault(); + e.stopPropagation(); + displayDropdown(widget); + } else { + hideWidget(widget, node); + } + + function createDropdownElement() { + const dropdown = document.createElement('ul'); + dropdown.id = 'hideinfo-dropdown'; + dropdown.setAttribute('role', 'listbox'); + dropdown.classList.add('hideInfo-dropdown'); + return dropdown; + } + + function createDropdownItem(textContent, action) { + const listItem = document.createElement('li'); + listItem.id = `hideInfo-item-${textContent.replace(/ /g, '')}`; + listItem.classList.add('hideInfo-item'); + listItem.setAttribute('role', 'option'); + listItem.textContent = textContent; + listItem.addEventListener('mousedown', (event) => { + event.preventDefault(); + action(widget, node); // perform the action when dropdown item is clicked + removeDropdown(); + dropdownShouldBeRemoved = false; + }); + listItem.dataset.action = textContent.replace(/ /g, ''); // store the action in a data attribute + return listItem; + } + + function displayDropdown(widget) { + removeDropdown(); + + const dropdown = createDropdownElement(); + const listItemHide = createDropdownItem('Hide info Widget', hideWidget); + const listItemHideAll = createDropdownItem('Hide for all of this node-type', hideWidgetForNodetype); + + dropdown.appendChild(listItemHide); + dropdown.appendChild(listItemHideAll); + + const inputRect = widget.inputEl.getBoundingClientRect(); + dropdown.style.top = `${inputRect.top + inputRect.height}px`; + dropdown.style.left = `${inputRect.left}px`; + dropdown.style.width = `${inputRect.width}px`; + + document.body.appendChild(dropdown); + dropdownShouldBeRemoved = true; + + widget.inputEl.removeEventListener('keydown', handleKeyDown); + widget.inputEl.addEventListener('keydown', handleKeyDown); + document.addEventListener('click', handleDocumentClick); + } + + function removeDropdown() { + const dropdown = document.getElementById('hideinfo-dropdown'); + if (dropdown) { + dropdown.remove(); + widget.inputEl.removeEventListener('keydown', handleKeyDown); + } + document.removeEventListener('click', handleDocumentClick); + + } + + function handleKeyDown(event) { + const dropdownItems = document.querySelectorAll('.hideInfo-item'); + + if (event.keyCode === KEY_CODES.ENTER && dropdownShouldBeRemoved) { + event.preventDefault(); + if (selectionIndex !== -1) { + const selectedAction = dropdownItems[selectionIndex].dataset.action; + if (selectedAction === 'HideinfoWidget') { + hideWidget(widget, node); + } else if (selectedAction === 'Hideforall') { + hideWidgetForNodetype(widget, node); + } + removeDropdown(); + dropdownShouldBeRemoved = false; + } + } else if (event.keyCode === KEY_CODES.ARROW_DOWN && dropdownShouldBeRemoved) { + event.preventDefault(); + if (selectionIndex !== -1) { + dropdownItems[selectionIndex].classList.remove('selected'); + } + selectionIndex = (selectionIndex + 1) % dropdownItems.length; + dropdownItems[selectionIndex].classList.add('selected'); + } else if (event.keyCode === KEY_CODES.ARROW_UP && dropdownShouldBeRemoved) { + event.preventDefault(); + if (selectionIndex !== -1) { + dropdownItems[selectionIndex].classList.remove('selected'); + } + selectionIndex = (selectionIndex - 1 + dropdownItems.length) % dropdownItems.length; + dropdownItems[selectionIndex].classList.add('selected'); + } else if (event.keyCode === KEY_CODES.ESC && dropdownShouldBeRemoved) { + event.preventDefault(); + removeDropdown(); + } + } + + function hideWidget(widget, node) { + node.properties['infoWidgetHidden'] = true; + widget.type = "esayHidden"; + widget.computeSize = () => [0, WIDGET_GAP]; + node.setSize([node.size[0], node.size[1]]); + } + + function hideWidgetForNodetype(widget, node) { + hideWidget(widget, node) + const hiddenNodeTypes = JSON.parse(localStorage.getItem('hiddenWidgetNodeTypes') || "[]"); + if (!hiddenNodeTypes.includes(node.constructor.type)) { + hiddenNodeTypes.push(node.constructor.type); + } + localStorage.setItem('hiddenWidgetNodeTypes', JSON.stringify(hiddenNodeTypes)); + } + + function handleDocumentClick(event) { + const dropdown = document.getElementById('hideinfo-dropdown'); + + // If the click was outside the dropdown and the dropdown should be removed, remove it + if (dropdown && !dropdown.contains(event.target) && dropdownShouldBeRemoved) { + removeDropdown(); + dropdownShouldBeRemoved = false; + } + } +} + + +var styleElement = document.createElement("style"); +const cssCode = ` +.easy-info_widget { + background-color: var(--comfy-input-bg); + color: var(--input-text); + overflow: hidden; + padding: 2px; + resize: none; + border: none; + box-sizing: border-box; + font-size: 10px; + border-radius: 7px; + text-align: center; + text-wrap: balance; +} +.hideInfo-dropdown { + position: absolute; + box-sizing: border-box; + background-color: #121212; + border-radius: 7px; + box-shadow: 0 2px 4px rgba(255, 255, 255, .25); + padding: 0; + margin: 0; + list-style: none; + z-index: 1000; + overflow: auto; + max-height: 200px; +} + +.hideInfo-dropdown li { + padding: 4px 10px; + cursor: pointer; + font-family: system-ui; + font-size: 0.7rem; +} + +.hideInfo-dropdown li:hover, +.hideInfo-dropdown li.selected { + background-color: #e5e5e5; + border-radius: 7px; +} +` +styleElement.innerHTML = cssCode +document.head.appendChild(styleElement); + +const InfoSymbol = Symbol(); +const InfoResizeSymbol = Symbol(); + + + + +// WIDGET FUNCTIONS +function addInfoWidget(node, name, opts, app) { + const INFO_W_SIZE = 50; + + node.addProperty('infoWidgetHidden', false) + + function computeSize(size) { + if (node.widgets[0].last_y == null) return; + + let y = node.widgets[0].last_y; + + // Compute the height of all non easyInfo widgets + let widgetHeight = 0; + const infoWidges = []; + for (let i = 0; i < node.widgets.length; i++) { + const w = node.widgets[i]; + if (w.type === "easyInfo") { + infoWidges.push(w); + } else { + if (w.computeSize) { + widgetHeight += w.computeSize()[1] + 4; + } else { + widgetHeight += LiteGraph.NODE_WIDGET_HEIGHT + 4; + } + } + } + + let infoWidgetSpace = infoWidges.length * INFO_W_SIZE; // Height for all info widgets + + // Check if there's enough space for all widgets + if (size[1] < y + widgetHeight + infoWidgetSpace) { + // There isn't enough space for all the widgets, increase the size of the node + node.size[1] = y + widgetHeight + infoWidgetSpace; + node.graph.setDirtyCanvas(true); + } + + // Position each of the widgets + for (const w of node.widgets) { + w.y = y; + if (w.type === "easyInfo") { + y += INFO_W_SIZE; + } else if (w.computeSize) { + y += w.computeSize()[1] + 4; + } else { + y += LiteGraph.NODE_WIDGET_HEIGHT + 4; + } + } + } + + const widget = { + type: "easyInfo", + name, + get value() { + return this.inputEl.value; + }, + set value(x) { + this.inputEl.value = x; + }, + draw: function (ctx, _, widgetWidth, y, widgetHeight) { + if (!this.parent.inputHeight) { + // If we are initially offscreen when created we wont have received a resize event + // Calculate it here instead + computeSize(node.size); + } + const visible = app.canvas.ds.scale > 0.5 && this.type === "easyInfo"; + const margin = 10; + const elRect = ctx.canvas.getBoundingClientRect(); + const transform = new DOMMatrix() + .scaleSelf(elRect.width / ctx.canvas.width, elRect.height / ctx.canvas.height) + .multiplySelf(ctx.getTransform()) + .translateSelf(margin, margin + y); + + Object.assign(this.inputEl.style, { + transformOrigin: "0 0", + transform: transform, + left: "0px", + top: "0px", + width: `${widgetWidth - (margin * 2)}px`, + height: `${this.parent.inputHeight - (margin * 2)}px`, + position: "absolute", + background: (!node.color)?'':node.color, + color: (!node.color)?'':'white', + zIndex: app.graph._nodes.indexOf(node), + }); + this.inputEl.hidden = !visible; + }, + }; + widget.inputEl = document.createElement("textarea"); + widget.inputEl.className = "easy-info_widget"; + widget.inputEl.value = opts.defaultVal; + widget.inputEl.placeholder = opts.placeholder || ""; + widget.inputEl.readOnly = true; + widget.parent = node; + + document.body.appendChild(widget.inputEl); + + node.addCustomWidget(widget); + + app.canvas.onDrawBackground = function () { + // Draw node isnt fired once the node is off the screen + // if it goes off screen quickly, the input may not be removed + // this shifts it off screen so it can be moved back if the node is visible. + for (let n in app.graph._nodes) { + n = app.graph._nodes[n]; + for (let w in n.widgets) { + let wid = n.widgets[w]; + if (Object.hasOwn(wid, "inputEl")) { + wid.inputEl.style.left = -8000 + "px"; + wid.inputEl.style.position = "absolute"; + } + } + } + }; + + node.onRemoved = function () { + // When removing this node we need to remove the input from the DOM + for (let y in this.widgets) { + if (this.widgets[y].inputEl) { + this.widgets[y].inputEl.remove(); + } + } + }; + + widget.onRemove = () => { + widget.inputEl?.remove(); + + // Restore original size handler if we are the last + if (!--node[InfoSymbol]) { + node.onResize = node[InfoResizeSymbol]; + delete node[InfoSymbol]; + delete node[InfoResizeSymbol]; + } + }; + + if (node[InfoSymbol]) { + node[InfoSymbol]++; + } else { + node[InfoSymbol] = 1; + const onResize = (node[InfoResizeSymbol] = node.onResize); + + node.onResize = function (size) { + computeSize(size); + + // Call original resizer handler + if (onResize) { + console.log(this, arguments) + onResize.apply(this, arguments); + } + }; + } + + return { widget }; +} + +// WIDGETS +const easyCustomWidgets = { + INFO(node, inputName, inputData, app) { + const defaultVal = inputData[1].default || ""; + return addInfoWidget(node, inputName, { defaultVal, ...inputData[1] }, app); + }, +} + + + +app.registerExtension({ + name: "comfy.easy.widgets", + getCustomWidgets(app) { + return easyCustomWidgets; + }, + nodeCreated(node) { + if (node.widgets) { + // Locate info widgets + const widgets = node.widgets.filter((n) => (n.type === "easyInfo")); + for (const widget of widgets) { + widget.inputEl.addEventListener('contextmenu', function(e) { + hideInfoWidget(e, node, widget); + }); + widget.inputEl.addEventListener('click', function(e) { + hideInfoWidget(e, node, widget); + }); + } + } + }, + async beforeRegisterNodeDef(nodeType, nodeData, app) { + const hiddenNodeTypes = JSON.parse(localStorage.getItem('hiddenWidgetNodeTypes') || "[]"); + const origOnConfigure = nodeType.prototype.onConfigure; + nodeType.prototype.onConfigure = function () { + const r = origOnConfigure ? origOnConfigure.apply(this, arguments) : undefined; + if (this.properties['infoWidgetHidden']) { + for (let i in this.widgets) { + if (this.widgets[i].type == "easyInfo") { + hideInfoWidget(null, this, this.widgets[i]); + } + } + } + return r; + }; + const origOnAdded = nodeType.prototype.onAdded; + nodeType.prototype.onAdded = function () { + const r = origOnAdded ? origOnAdded.apply(this, arguments) : undefined; + if (hiddenNodeTypes.includes(this.type)) { + for (let i in this.widgets) { + if (this.widgets[i].type == "easyInfo") { + this.properties['infoWidgetHidden'] = true; + } + } + } + return r; + } + } +}); \ No newline at end of file diff --git a/ComfyUI-Easy-Use/web_version/v1/js/easy/easyXYPlot.js b/ComfyUI-Easy-Use/web_version/v1/js/easy/easyXYPlot.js new file mode 100644 index 0000000000000000000000000000000000000000..6f27007b926511d99cdbbc97fc3747f557c4f3f7 --- /dev/null +++ b/ComfyUI-Easy-Use/web_version/v1/js/easy/easyXYPlot.js @@ -0,0 +1,212 @@ +import { app } from "../../../../scripts/app.js"; +import {removeDropdown, createDropdown} from "../common/dropdown.js"; + +function generateNumList(dictionary) { + const minimum = dictionary["min"] || 0; + const maximum = dictionary["max"] || 0; + const step = dictionary["step"] || 1; + + if (step === 0) { + return []; + } + + const result = []; + let currentValue = minimum; + + while (currentValue <= maximum) { + if (Number.isInteger(step)) { + result.push(Math.round(currentValue) + '; '); + } else { + let formattedValue = currentValue.toFixed(3); + if(formattedValue == -0.000){ + formattedValue = '0.000'; + } + if (!/\.\d{3}$/.test(formattedValue)) { + formattedValue += "0"; + } + result.push(formattedValue + "; "); + } + currentValue += step; + } + + if (maximum >= 0 && minimum >= 0) { + //low to high + return result; + } + else { + //high to low + return result.reverse(); + } +} + +let plotDict = {}; +let currentOptionsDict = {}; + +function getCurrentOptionLists(node, widget) { + const nodeId = String(node.id); + const widgetName = widget.name; + const widgetValue = widget.value.replace(/^(loader|preSampling):\s/, ''); + + if (!currentOptionsDict[widgetName]) { + currentOptionsDict = {...currentOptionsDict, [widgetName]: plotDict[widgetValue]}; + } else if (currentOptionsDict[widgetName] != plotDict[widgetValue]) { + currentOptionsDict[widgetName] = plotDict[widgetValue]; + } +} + +function addGetSetters(node) { + if (node.widgets) + for (const w of node.widgets) { + if (w.name === "x_axis" || + w.name === "y_axis") { + let widgetValue = w.value; + + // Define getters and setters for widget values + Object.defineProperty(w, 'value', { + + get() { + return widgetValue; + }, + set(newVal) { + if (newVal !== widgetValue) { + widgetValue = newVal; + getCurrentOptionLists(node, w); + } + } + }); + } + } +} + +function dropdownCreator(node) { + if (node.widgets) { + const widgets = node.widgets.filter( + (n) => (n.type === "customtext" && n.dynamicPrompts !== false) || n.dynamicPrompts + ); + + for (const w of widgets) { + function replaceOptionSegments(selectedOption, inputSegments, cursorSegmentIndex, optionsList) { + if (selectedOption) { + inputSegments[cursorSegmentIndex] = selectedOption; + } + + return inputSegments.map(segment => verifySegment(segment, optionsList)) + .filter(item => item !== '') + .join(''); + } + + function verifySegment(segment, optionsList) { + segment = cleanSegment(segment); + + if (isInOptionsList(segment, optionsList)) { + return segment + '; '; + } + + let matchedOptions = findMatchedOptions(segment, optionsList); + + if (matchedOptions.length === 1 || matchedOptions.length === 2) { + return matchedOptions[0]; + } + + if (isInOptionsList(formatNumberSegment(segment), optionsList)) { + return formatNumberSegment(segment) + '; '; + } + + return ''; + } + + function cleanSegment(segment) { + return segment.replace(/(\n|;| )/g, ''); + } + + function isInOptionsList(segment, optionsList) { + return optionsList.includes(segment + '; '); + } + + function findMatchedOptions(segment, optionsList) { + return optionsList.filter(option => option.toLowerCase().includes(segment.toLowerCase())); + } + + function formatNumberSegment(segment) { + if (Number(segment)) { + return Number(segment).toFixed(3); + } + + if (['0', '0.', '0.0', '0.00', '00'].includes(segment)) { + return '0.000'; + } + return segment; + } + + + const onInput = function () { + const axisWidgetName = w.name[0] + '_axis'; + let optionsList = currentOptionsDict?.[axisWidgetName] || []; + if (optionsList.length === 0) {return} + + const inputText = w.inputEl.value; + const cursorPosition = w.inputEl.selectionStart; + let inputSegments = inputText.split('; '); + + const cursorSegmentIndex = inputText.substring(0, cursorPosition).split('; ').length - 1; + const currentSegment = inputSegments[cursorSegmentIndex]; + const currentSegmentLower = currentSegment.replace(/\n/g, '').toLowerCase(); + const filteredOptionsList = optionsList.filter(option => option.toLowerCase().includes(currentSegmentLower)).map(option => option.replace(/; /g, '')); + + if (filteredOptionsList.length > 0) { + createDropdown(w.inputEl, filteredOptionsList, (selectedOption) => { + const verifiedText = replaceOptionSegments(selectedOption, inputSegments, cursorSegmentIndex, optionsList); + w.inputEl.value = verifiedText; + }); + } + else { + removeDropdown(); + const verifiedText = replaceOptionSegments(null, inputSegments, cursorSegmentIndex, optionsList); + w.inputEl.value = verifiedText; + } + }; + + w.inputEl.removeEventListener('input', onInput); + w.inputEl.addEventListener('input', onInput); + w.inputEl.removeEventListener('mouseup', onInput); + w.inputEl.addEventListener('mouseup', onInput); + } + } +} + +app.registerExtension({ + name: "comfy.easy.xyPlot", + async beforeRegisterNodeDef(nodeType, nodeData, app) { + if (nodeData.name === "easy XYPlot") { + plotDict = nodeData.input.hidden.plot_dict[0]; + + for (const key in plotDict) { + const value = plotDict[key]; + if (Array.isArray(value)) { + let updatedValues = []; + for (const v of value) { + updatedValues.push(v + '; '); + } + plotDict[key] = updatedValues; + } else if (typeof(value) === 'object') { + if(key == 'seed'){ + plotDict[key] = value + '; '; + } + else { + plotDict[key] = generateNumList(value); + } + } else { + plotDict[key] = value + '; '; + } + } + plotDict["None"] = []; + plotDict["---------------------"] = []; + } + }, + nodeCreated(node) { + if (node.comfyClass === "easy XYPlot") { + addGetSetters(node); + dropdownCreator(node); + } + } +}); \ No newline at end of file diff --git a/ComfyUI-Easy-Use/web_version/v1/js/getset.js b/ComfyUI-Easy-Use/web_version/v1/js/getset.js new file mode 100644 index 0000000000000000000000000000000000000000..78957b283f57fd3a0eaefb89b3c99e00c1b7f202 --- /dev/null +++ b/ComfyUI-Easy-Use/web_version/v1/js/getset.js @@ -0,0 +1,311 @@ +import { app } from "../../../scripts/app.js"; + +// Node that allows you to tunnel connections for cleaner graphs + +app.registerExtension({ + name: "easy setNode", + registerCustomNodes() { + class SetNode { + defaultVisibility = true; + serialize_widgets = true; + constructor() { + if (!this.properties) { + this.properties = { + "previousName": "" + }; + } + this.properties.showOutputText = SetNode.defaultVisibility; + + const node = this; + node.color = LGraphCanvas.node_colors.blue.color; + + this.addWidget( + "text", + "Constant", + '', + (s, t, u, v, x) => { + node.validateName(node.graph); + if(this.widgets[0].value !== ''){ + this.title = "Set_" + this.widgets[0].value; + } + this.update(); + this.properties.previousName = this.widgets[0].value; + }, + {} + ) + + this.addInput("*", "*"); + + + this.onConnectionsChange = function( + slotType, //1 = input, 2 = output + slot, + isChangeConnect, + link_info, + output + ) { + // console.log("onConnectionsChange"); + //On Disconnect + if (slotType == 1 && !isChangeConnect) { + this.inputs[slot].type = '*'; + this.inputs[slot].name = '*'; + } + + //On Connect + if (link_info && node.graph && slotType == 1 && isChangeConnect) { + const fromNode = node.graph._nodes.find((otherNode) => otherNode.id == link_info.origin_id); + const type = fromNode.outputs[link_info.origin_slot].type; + + if (this.title === "Set"){ + this.title = "Set_" + type; + } + if (this.widgets[0].value === '*'){ + this.widgets[0].value = type + } + + this.validateName(node.graph); + this.inputs[0].type = type; + this.inputs[0].name = type; + + setTimeout(_=>{ + if(type != this.widgets[0].value){ + this.title = "Set_" + this.widgets[0].value; + } + },1) + } + + //Update either way + this.update(); + } + + this.validateName = function(graph) { + let widgetValue = node.widgets[0].value; + if (widgetValue != '') { + let tries = 0; + let collisions = []; + + do { + collisions = graph._nodes.filter((otherNode) => { + if (otherNode == this) { + return false; + } + if (otherNode.type == 'easy setNode' && otherNode.widgets[0].value === widgetValue) { + return true; + } + return false; + }) + if (collisions.length > 0) { + widgetValue = node.widgets[0].value + "_" + tries; + } + tries++; + } while (collisions.length > 0) + node.widgets[0].value = widgetValue; + this.update(); + } + } + + this.clone = function () { + const cloned = SetNode.prototype.clone.apply(this); + cloned.inputs[0].name = '*'; + cloned.inputs[0].type = '*'; + cloned.properties.previousName = ''; + cloned.size = cloned.computeSize(); + return cloned; + }; + + this.onAdded = function(graph) { + this.validateName(graph); + } + + + this.update = function() { + if (node.graph) { + this.findGetters(node.graph).forEach((getter) => { + getter.setType(this.inputs[0].type); + }); + if (this.widgets[0].value) { + this.findGetters(node.graph, true).forEach((getter) => { + getter.setName(this.widgets[0].value) + }); + } + + const allGetters = node.graph._nodes.filter((otherNode) => otherNode.type == "easy getNode"); + allGetters.forEach((otherNode) => { + if (otherNode.setComboValues) { + otherNode.setComboValues(); + } + }) + } + } + + + this.findGetters = function(graph, checkForPreviousName) { + const name = checkForPreviousName ? this.properties.previousName : this.widgets[0].value; + return graph._nodes.filter((otherNode) => { + if (otherNode.type == 'easy getNode' && otherNode.widgets[0].value === name && name != '') { + return true; + } + return false; + }) + } + // This node is purely frontend and does not impact the resulting prompt so should not be serialized + this.isVirtualNode = true; + } + + onRemoved() { + const allGetters = this.graph._nodes.filter((otherNode) => otherNode.type == "easy getNode"); + allGetters.forEach((otherNode) => { + if (otherNode.setComboValues) { + otherNode.setComboValues([this]); + } + }) + } + } + + + LiteGraph.registerNodeType( + "easy setNode", + Object.assign(SetNode, { + title: "Set", + }) + ); + + SetNode.category = "EasyUse/Util"; + }, +}); + + +app.registerExtension({ + name: "easy getNode", + registerCustomNodes() { + class GetNode { + + defaultVisibility = true; + serialize_widgets = true; + + constructor() { + if (!this.properties) { + this.properties = {}; + } + this.properties.showOutputText = GetNode.defaultVisibility; + + const node = this; + node.color = LGraphCanvas.node_colors.blue.color; + this.addWidget( + "combo", + "Constant", + "", + (e) => { + this.onRename(); + }, + { + values: () => { + const setterNodes = node.graph._nodes.filter((otherNode) => otherNode.type == 'easy setNode'); + return setterNodes.map((otherNode) => otherNode.widgets[0].value).sort(); + } + } + ) + + + this.addOutput("*", '*'); + + + this.onConnectionsChange = function( + slotType, //0 = output, 1 = input + slot, //self-explanatory + isChangeConnect, + link_info, + output + ) { + this.validateLinks(); + setTimeout(_=>{ + this.title = 'Get_' + this.widgets[0].value + },1) + } + + + this.setName = function(name) { + node.widgets[0].value = name; + node.onRename(); + node.serialize(); + } + + + this.onRename = function() { + const setter = this.findSetter(node.graph); + if (setter) { + this.setType(setter.inputs[0].type); + this.title = "Get_" + setter.widgets[0].value; + } else { + this.setType('*'); + } + } + + this.clone = function () { + const cloned = GetNode.prototype.clone.apply(this); + cloned.size = cloned.computeSize(); + return cloned; + }; + + this.validateLinks = function() { + if (this.outputs[0].type != '*' && this.outputs[0].links) { + this.outputs[0].links.forEach((linkId) => { + const link = node.graph.links[linkId]; + if (link && link.type != this.outputs[0].type && link.type != '*') { + node.graph.removeLink(linkId) + } + }) + } + } + + this.setType = function(type) { + this.outputs[0].name = type; + this.outputs[0].type = type; + this.validateLinks(); + } + + this.findSetter = function(graph) { + const name = this.widgets[0].value; + return graph._nodes.find((otherNode) => { + if (otherNode.type == 'easy setNode' && otherNode.widgets[0].value === name && name != '') { + return true; + } + return false; + }) + } + + // This node is purely frontend and does not impact the resulting prompt so should not be serialized + this.isVirtualNode = true; + } + + + getInputLink(slot) { + const setter = this.findSetter(this.graph); + + if (setter) { + const slot_info = setter.inputs[slot]; + const link = this.graph.links[ slot_info.link ]; + return link; + } else { + throw new Error("No setter found for " + this.widgets[0].value + "(" + this.type + ")"); + } + + } + onAdded(graph) { + //this.setComboValues(); + //this.validateName(graph); + } + + } + + + LiteGraph.registerNodeType( + "easy getNode", + Object.assign(GetNode, { + title: "Get", + }) + ); + + GetNode.category = "EasyUse/Util"; + }, +}); \ No newline at end of file diff --git a/ComfyUI-Easy-Use/web_version/v1/js/image.js b/ComfyUI-Easy-Use/web_version/v1/js/image.js new file mode 100644 index 0000000000000000000000000000000000000000..7d8b45ca893a4a6dcc9ecd8aa8763fe60970ce18 --- /dev/null +++ b/ComfyUI-Easy-Use/web_version/v1/js/image.js @@ -0,0 +1,66 @@ +import { app } from "../../../scripts/app.js"; + + +app.registerExtension({ + name: "comfy.easyUse.imageWidgets", + + nodeCreated(node) { + if (["easy imageSize","easy imageSizeBySide","easy imageSizeByLongerSide","easy imageSizeShow", "easy imageRatio", "easy imagePixelPerfect"].includes(node.comfyClass)) { + + const inputEl = document.createElement("textarea"); + inputEl.className = "comfy-multiline-input"; + inputEl.readOnly = true + + const widget = node.addDOMWidget("info", "customtext", inputEl, { + getValue() { + return inputEl.value; + }, + setValue(v) { + inputEl.value = v; + }, + serialize: false + }); + widget.inputEl = inputEl; + + inputEl.addEventListener("input", () => { + widget.callback?.(widget.value); + }); + } + }, + + beforeRegisterNodeDef(nodeType, nodeData, app) { + if (["easy imageSize","easy imageSizeBySide","easy imageSizeByLongerSide", "easy imageSizeShow", "easy imageRatio", "easy imagePixelPerfect"].includes(nodeData.name)) { + function populate(arr_text) { + var text = ''; + for (let i = 0; i < arr_text.length; i++){ + text += arr_text[i]; + } + if (this.widgets) { + const pos = this.widgets.findIndex((w) => w.name === "info"); + if (pos !== -1 && this.widgets[pos]) { + const w = this.widgets[pos] + w.value = text; + } + } + requestAnimationFrame(() => { + const sz = this.computeSize(); + if (sz[0] < this.size[0]) { + sz[0] = this.size[0]; + } + if (sz[1] < this.size[1]) { + sz[1] = this.size[1]; + } + this.onResize?.(sz); + app.graph.setDirtyCanvas(true, false); + }); + } + + // When the node is executed we will be sent the input text, display this in the widget + const onExecuted = nodeType.prototype.onExecuted; + nodeType.prototype.onExecuted = function (message) { + onExecuted?.apply(this, arguments); + populate.call(this, message.text); + }; + } + } +}) \ No newline at end of file diff --git a/ComfyUI-Easy-Use/web_version/v1/js/image_chooser/chooser.js b/ComfyUI-Easy-Use/web_version/v1/js/image_chooser/chooser.js new file mode 100644 index 0000000000000000000000000000000000000000..e1c153dc6adc0432af90c653c05c6fde0ec1de59 --- /dev/null +++ b/ComfyUI-Easy-Use/web_version/v1/js/image_chooser/chooser.js @@ -0,0 +1,237 @@ +import { app } from "../../../../scripts/app.js"; +import { api } from "../../../../scripts/api.js"; +import { ComfyDialog, $el } from "../../../../scripts/ui.js"; + +import { restart_from_here } from "./prompt.js"; +import { hud, FlowState } from "./state.js"; +import { send_cancel, send_message, send_onstart, skip_next_restart_message } from "./messaging.js"; +import { display_preview_images, additionalDrawBackground, click_is_in_image } from "./preview.js"; +import {$t} from "../common/i18n.js"; + + +class chooserImageDialog extends ComfyDialog { + + constructor() { + super(); + this.node = null + this.select_index = [] + this.dialog_div = null + } + + show(image,node){ + this.select_index = [] + this.node = node + + const images_div = image.map((img, index) => { + const imgEl = $el('img', { + src: img.src, + onclick: _ => { + if(this.select_index.includes(index)){ + this.select_index = this.select_index.filter(i => i !== index) + imgEl.classList.remove('selected') + } else { + this.select_index.push(index) + imgEl.classList.add('selected') + } + if (node.selected.has(index)) node.selected.delete(index); + else node.selected.add(index); + } + }) + return imgEl + }) + super.show($el('div.easyuse-chooser-dialog',[ + $el('h5.easyuse-chooser-dialog-title', $t('Choose images to continue')), + $el('div.easyuse-chooser-dialog-images',images_div) + ])) + } + createButtons() { + const btns = super.createButtons(); + btns[0].onclick = _ => { + if (FlowState.running()) { send_cancel();} + super.close() + } + btns.unshift($el('button', { + type: 'button', + textContent: $t('Choose Selected Images'), + onclick: _ => { + if (FlowState.paused()) { + send_message(this.node.id, [...this.node.selected, -1, ...this.node.anti_selected]); + } + if (FlowState.idle()) { + skip_next_restart_message(); + restart_from_here(this.node.id).then(() => { send_message(this.node.id, [...this.node.selected, -1, ...this.node.anti_selected]); }); + } + super.close() + } + })) + return btns + } + +} + +function progressButtonPressed() { + const node = app.graph._nodes_by_id[this.node_id]; + if (node) { + const selected = [...node.selected] + if(selected?.length>0){ + node.setProperty('values',selected) + } + if (FlowState.paused()) { + send_message(node.id, [...node.selected, -1, ...node.anti_selected]); + } + if (FlowState.idle()) { + skip_next_restart_message(); + restart_from_here(node.id).then(() => { send_message(node.id, [...node.selected, -1, ...node.anti_selected]); }); + } + } +} + +function cancelButtonPressed() { + + if (FlowState.running()) { send_cancel();} +} + +function enable_disabling(button) { + Object.defineProperty(button, 'clicked', { + get : function() { return this._clicked; }, + set : function(v) { this._clicked = (v && this.name!=''); } + }) +} + +function disable_serialize(widget) { + if (!widget.options) widget.options = { }; + widget.options.serialize = false; +} + +app.registerExtension({ + name:'comfy.easyuse.imageChooser', + init() { + window.addEventListener("beforeunload", send_cancel, true); + }, + setup(app) { + + const draw = LGraphCanvas.prototype.draw; + LGraphCanvas.prototype.draw = function() { + if (hud.update()) { + app.graph._nodes.forEach((node)=> { if (node.update) { node.update(); } }) + } + draw.apply(this,arguments); + } + + + function easyuseImageChooser(event) { + const {node,image,isKSampler} = display_preview_images(event); + if(isKSampler) { + const dialog = new chooserImageDialog(); + dialog.show(image,node) + } + } + api.addEventListener("easyuse-image-choose", easyuseImageChooser); + + /* + If a run is interrupted, send a cancel message (unless we're doing the cancelling, to avoid infinite loop) + */ + const original_api_interrupt = api.interrupt; + api.interrupt = function () { + if (FlowState.paused() && !FlowState.cancelling) send_cancel(); + original_api_interrupt.apply(this, arguments); + } + + /* + At the start of execution + */ + function on_execution_start() { + if (send_onstart()) { + app.graph._nodes.forEach((node)=> { + if (node.selected || node.anti_selected) { + node.selected.clear(); + node.anti_selected.clear(); + node.update(); + } + }) + } + } + api.addEventListener("execution_start", on_execution_start); + }, + + async nodeCreated(node, app) { + + if(node.comfyClass == 'easy imageChooser'){ + node.setProperty('values',[]) + + /* A property defining the top of the image when there is just one */ + if(node?.imageIndex === undefined){ + Object.defineProperty(node, 'imageIndex', { + get : function() { return null; }, + set: function (v) {node.overIndex= v}, + }) + } + if(node?.imagey === undefined){ + Object.defineProperty(node, 'imagey', { + get : function() { return null; }, + set: function (v) {return node.widgets[node.widgets.length-1].last_y+LiteGraph.NODE_WIDGET_HEIGHT;}, + }) + } + + /* Capture clicks */ + const org_onMouseDown = node.onMouseDown; + node.onMouseDown = function( e, pos, canvas ) { + if (e.isPrimary) { + const i = click_is_in_image(node, pos); + if (i>=0) { this.imageClicked(i); } + } + return (org_onMouseDown && org_onMouseDown.apply(this, arguments)); + } + + node.send_button_widget = node.addWidget("button", "", "", progressButtonPressed); + node.cancel_button_widget = node.addWidget("button", "", "", cancelButtonPressed); + enable_disabling(node.cancel_button_widget); + enable_disabling(node.send_button_widget); + disable_serialize(node.cancel_button_widget); + disable_serialize(node.send_button_widget); + + } + }, + + beforeRegisterNodeDef(nodeType, nodeData, app) { + if(nodeData?.name == 'easy imageChooser'){ + + const onDrawBackground = nodeType.prototype.onDrawBackground; + nodeType.prototype.onDrawBackground = function(ctx) { + onDrawBackground.apply(this, arguments); + additionalDrawBackground(this, ctx); + } + + nodeType.prototype.imageClicked = function (imageIndex) { + if (nodeType?.comfyClass==="easy imageChooser") { + if (this.selected.has(imageIndex)) this.selected.delete(imageIndex); + else this.selected.add(imageIndex); + this.update(); + } + } + + const update = nodeType.prototype.update; + nodeType.prototype.update = function() { + if (update) update.apply(this,arguments); + if (this.send_button_widget) { + this.send_button_widget.node_id = this.id; + const selection = ( this.selected ? this.selected.size : 0 ) + ( this.anti_selected ? this.anti_selected.size : 0 ) + const maxlength = this.imgs?.length || 0; + if (FlowState.paused_here(this.id) && selection>0) { + this.send_button_widget.name = (selection>1) ? "Progress selected (" + selection + '/' + maxlength +")" : "Progress selected image"; + } else if (selection>0) { + this.send_button_widget.name = (selection>1) ? "Progress selected (" + selection + '/' + maxlength +")" : "Progress selected image as restart"; + } + else { + this.send_button_widget.name = ""; + } + } + if (this.cancel_button_widget) { + const isRunning = FlowState.running() + this.cancel_button_widget.name = isRunning ? "Cancel current run" : ""; + } + this.setDirtyCanvas(true,true); + } + } + } +}) \ No newline at end of file diff --git a/ComfyUI-Easy-Use/web_version/v1/js/image_chooser/messaging.js b/ComfyUI-Easy-Use/web_version/v1/js/image_chooser/messaging.js new file mode 100644 index 0000000000000000000000000000000000000000..b5864462dc1ab3daaa34afa885bb7bfe2d71b74e --- /dev/null +++ b/ComfyUI-Easy-Use/web_version/v1/js/image_chooser/messaging.js @@ -0,0 +1,34 @@ +import { api } from "../../../../scripts/api.js"; +import { FlowState } from "./state.js"; + +function send_message_from_pausing_node(message) { + const id = app.runningNodeId; + send_message(id, message); +} + +function send_message(id, message) { + const body = new FormData(); + body.append('message',message); + body.append('id', id); + api.fetchApi("/easyuse/image_chooser_message", { method: "POST", body, }); +} + +function send_cancel() { + send_message(-1,'__cancel__'); + FlowState.cancelling = true; + api.interrupt(); + FlowState.cancelling = false; +} + +var skip_next = 0; +function skip_next_restart_message() { skip_next += 1; } +function send_onstart() { + if (skip_next>0) { + skip_next -= 1; + return false; + } + send_message(-1,'__start__'); + return true; +} + +export { send_message_from_pausing_node, send_cancel, send_message, send_onstart, skip_next_restart_message } \ No newline at end of file diff --git a/ComfyUI-Easy-Use/web_version/v1/js/image_chooser/preview.js b/ComfyUI-Easy-Use/web_version/v1/js/image_chooser/preview.js new file mode 100644 index 0000000000000000000000000000000000000000..6f8fe4407812fa809f2b4e6a118baba5ccdb084c --- /dev/null +++ b/ComfyUI-Easy-Use/web_version/v1/js/image_chooser/preview.js @@ -0,0 +1,90 @@ +import { app } from "../../../../scripts/app.js"; + +const kSampler = ['easy kSampler', 'easy kSamplerTiled', 'easy fullkSampler'] + +function display_preview_images(event) { + const node = app.graph._nodes_by_id[event.detail.id]; + if (node) { + node.selected = new Set(); + node.anti_selected = new Set(); + const image = showImages(node, event.detail.urls); + return {node,image,isKSampler:kSampler.includes(node.type)} + } else { + console.log(`Image Chooser Preview - failed to find ${event.detail.id}`) + } +} + +function showImages(node, urls) { + node.imgs = []; + urls.forEach((u)=> { + const img = new Image(); + node.imgs.push(img); + img.onload = () => { app.graph.setDirtyCanvas(true); }; + img.src = `/view?filename=${encodeURIComponent(u.filename)}&type=temp&subfolder=${app.getPreviewFormatParam()}` + }) + node.setSizeForImage?.(); + return node.imgs +} + +function drawRect(node, s, ctx) { + const padding = 1; + var rect; + if (node.imageRects) { + rect = node.imageRects[s]; + } else { + const y = node.imagey; + rect = [padding,y+padding,node.size[0]-2*padding,node.size[1]-y-2*padding]; + } + ctx.strokeRect(rect[0]+padding, rect[1]+padding, rect[2]-padding*2, rect[3]-padding*2); +} + +function additionalDrawBackground(node, ctx) { + if (!node.imgs) return; + if (node.imageRects) { + for (let i = 0; i < node.imgs.length; i++) { + // delete underlying image + ctx.fillStyle = "#000"; + ctx.fillRect(...node.imageRects[i]) + // draw the new one + const img = node.imgs[i]; + const cellWidth = node.imageRects[i][2]; + const cellHeight = node.imageRects[i][3]; + + let wratio = cellWidth/img.width; + let hratio = cellHeight/img.height; + var ratio = Math.min(wratio, hratio); + + let imgHeight = ratio * img.height; + let imgWidth = ratio * img.width; + + const imgX = node.imageRects[i][0] + (cellWidth - imgWidth)/2; + const imgY = node.imageRects[i][1] + (cellHeight - imgHeight)/2; + const cell_padding = 2; + ctx.drawImage(img, imgX+cell_padding, imgY+cell_padding, imgWidth-cell_padding*2, imgHeight-cell_padding*2); + + } + } + ctx.lineWidth = 2; + ctx.strokeStyle = "green"; + node?.selected?.forEach((s) => { drawRect(node,s, ctx) }) + ctx.strokeStyle = "#F88"; + node?.anti_selected?.forEach((s) => { drawRect(node,s, ctx) }) +} + +function click_is_in_image(node, pos) { + if (node.imgs?.length>1) { + for (var i = 0; i 0 && dx < node.imageRects[i][2] && + dy > 0 && dy < node.imageRects[i][3] ) { + return i; + } + } + } else if (node.imgs?.length==1) { + if (pos[1]>node.imagey) return 0; + } + return -1; +} + +export { display_preview_images, additionalDrawBackground, click_is_in_image } \ No newline at end of file diff --git a/ComfyUI-Easy-Use/web_version/v1/js/image_chooser/prompt.js b/ComfyUI-Easy-Use/web_version/v1/js/image_chooser/prompt.js new file mode 100644 index 0000000000000000000000000000000000000000..c9c89b6289ce05d522a68823c60aa3bce35f60ad --- /dev/null +++ b/ComfyUI-Easy-Use/web_version/v1/js/image_chooser/prompt.js @@ -0,0 +1,114 @@ +import { app } from "../../../../scripts/app.js"; + +function links_with(p, node_id, down, up) { + const links_with = []; + p.workflow.links.forEach((l) => { + if (down && l[1]===node_id && !links_with.includes(l[3])) links_with.push(l[3]) + if (up && l[3]===node_id && !links_with.includes(l[1])) links_with.push(l[1]) + }); + return links_with; +} + +function _all_v_nodes(p, here_id) { + /* + Make a list of all downstream nodes. + */ + const downstream = []; + const to_process = [here_id] + while(to_process.length>0) { + const id = to_process.pop(); + downstream.push(id); + to_process.push( + ...links_with(p,id,true,false).filter((nid)=>{ + return !(downstream.includes(nid) || to_process.includes(nid)) + }) + ) + } + + /* + Now all upstream nodes from any of the downstream nodes (except us). + Put us on the result list so we don't flow up through us + */ + to_process.push(...downstream.filter((n)=>{ return n!=here_id})); + const back_upstream = [here_id]; + while(to_process.length>0) { + const id = to_process.pop(); + back_upstream.push(id); + to_process.push( + ...links_with(p,id,false,true).filter((nid)=>{ + return !(back_upstream.includes(nid) || to_process.includes(nid)) + }) + ) + } + + const keep = []; + keep.push(...downstream); + keep.push(...back_upstream.filter((n)=>{return !keep.includes(n)})); + + console.log(`Nodes to keep: ${keep}`); + return keep; +} + +async function all_v_nodes(here_id) { + const p = structuredClone(await app.graphToPrompt()); + const all_nodes = []; + p.workflow.nodes.forEach((node)=>{all_nodes.push(node.id)}) + p.workflow.links = p.workflow.links.filter((l)=>{ return (all_nodes.includes(l[1]) && all_nodes.includes(l[3]))} ) + return _all_v_nodes(p,here_id); +} + +async function restart_from_here(here_id, go_down_to_chooser=false) { + const p = structuredClone(await app.graphToPrompt()); + /* + Make a list of all nodes, and filter out links that are no longer valid + */ + const all_nodes = []; + p.workflow.nodes.forEach((node)=>{all_nodes.push(node.id)}) + p.workflow.links = p.workflow.links.filter((l)=>{ return (all_nodes.includes(l[1]) && all_nodes.includes(l[3]))} ) + + /* Move downstream to a chooser */ + if (go_down_to_chooser) { + while (!app.graph._nodes_by_id[here_id].isChooser) { + here_id = links_with(p, here_id, true, false)[0]; + } + } + + const keep = _all_v_nodes(p, here_id); + + /* + Filter p.workflow.nodes and p.workflow.links + */ + p.workflow.nodes = p.workflow.nodes.filter((node) => { + if (node.id===here_id) node.inputs.forEach((i)=>{i.link=null}) // remove our upstream links + return (keep.includes(node.id)) // only keep keepers + }) + p.workflow.links = p.workflow.links.filter((l) => {return (keep.includes(l[1]) && keep.includes(l[3]))}) + + /* + Filter the p.output object to only include nodes we're keeping + */ + const new_output = {} + for (const [key, value] of Object.entries(p.output)) { + if (keep.includes(parseInt(key))) new_output[key] = value; + } + /* + Filter the p.output entry for the start node to remove any list (ie link) inputs + */ + const new_inputs = {}; + for (const [key, value] of Object.entries(new_output[here_id.toString()].inputs)) { + if (!Array.isArray(value)) new_inputs[key] = value; + } + new_output[here_id.toString()].inputs = new_inputs; + + p.output = new_output; + + // temporarily hijack graph_to_prompt with a version that restores the old one but returns this prompt + const gtp_was = app.graphToPrompt; + app.graphToPrompt = () => { + app.graphToPrompt = gtp_was; + return p; + } + app.queuePrompt(0); +} + +export { restart_from_here, all_v_nodes } \ No newline at end of file diff --git a/ComfyUI-Easy-Use/web_version/v1/js/image_chooser/state.js b/ComfyUI-Easy-Use/web_version/v1/js/image_chooser/state.js new file mode 100644 index 0000000000000000000000000000000000000000..69437d72a217a76afd86c5489130e521bcbd41e9 --- /dev/null +++ b/ComfyUI-Easy-Use/web_version/v1/js/image_chooser/state.js @@ -0,0 +1,55 @@ +import { app } from "../../../../scripts/app.js"; + + +class HUD { + constructor() { + this.current_node_id = undefined; + this.class_of_current_node = null; + this.current_node_is_chooser = false; + } + + update() { + if (app.runningNodeId==this.current_node_id) return false; + + this.current_node_id = app.runningNodeId; + + if (this.current_node_id) { + this.class_of_current_node = app.graph?._nodes_by_id[app.runningNodeId.toString()]?.comfyClass; + this.current_node_is_chooser = this.class_of_current_node === "easy imageChooser" + } else { + this.class_of_current_node = undefined; + this.current_node_is_chooser = false; + } + return true; + } +} + +const hud = new HUD(); + + +class FlowState { + constructor(){} + static idle() { + return (!app.runningNodeId); + } + static paused() { + return true; + } + static paused_here(node_id) { + return (FlowState.paused() && FlowState.here(node_id)) + } + static running() { + return (!FlowState.idle()); + } + static here(node_id) { + return (app.runningNodeId==node_id); + } + static state() { + if (FlowState.paused()) return "Paused"; + if (FlowState.running()) return "Running"; + return "Idle"; + } + static cancelling = false; +} + +export { hud, FlowState} \ No newline at end of file diff --git a/ComfyUI-Easy-Use/web_version/v1/js/poseEditor.js b/ComfyUI-Easy-Use/web_version/v1/js/poseEditor.js new file mode 100644 index 0000000000000000000000000000000000000000..bfd6813efd86ca7483db7f00b8677837ac93043e --- /dev/null +++ b/ComfyUI-Easy-Use/web_version/v1/js/poseEditor.js @@ -0,0 +1,666 @@ +import { app } from "../../../scripts/app.js"; +import { fabric } from "../lib/fabric.js"; + +fabric.Object.prototype.transparentCorners = false; +fabric.Object.prototype.cornerColor = "#108ce6"; +fabric.Object.prototype.borderColor = "#108ce6"; +fabric.Object.prototype.cornerSize = 10; + +let connect_keypoints = [ + [0, 1], + [1, 2], + [2, 3], + [3, 4], + [1, 5], + [5, 6], + [6, 7], + [1, 8], + [8, 9], + [9, 10], + [1, 11], + [11, 12], + [12, 13], + [0, 14], + [14, 16], + [0, 15], + [15, 17], +]; + +let connect_color = [ + [0, 0, 255], + [255, 0, 0], + [255, 170, 0], + [255, 255, 0], + [255, 85, 0], + [170, 255, 0], + [85, 255, 0], + [0, 255, 0], + [0, 255, 85], + [0, 255, 170], + [0, 255, 255], + [0, 170, 255], + [0, 85, 255], + [85, 0, 255], + [170, 0, 255], + [255, 0, 255], + [255, 0, 170], + [255, 0, 85], +]; + +const default_keypoints = [ + [241, 77], + [241, 120], + [191, 118], + [177, 183], + [163, 252], + [298, 118], + [317, 182], + [332, 245], + [225, 241], + [213, 359], + [215, 454], + [270, 240], + [282, 360], + [286, 456], + [232, 59], + [253, 60], + [225, 70], + [260, 72], +]; + +class OpenPose { + constructor(node, canvasElement) { + this.lockMode = false; + this.visibleEyes = true; + this.flipped = false; + this.node = node; + this.undo_history = LS_Poses[node.name].undo_history || []; + this.redo_history = LS_Poses[node.name].redo_history || []; + this.history_change = false; + this.canvas = this.initCanvas(canvasElement); + this.image = node.widgets.find((w) => w.name === "image"); + } + + setPose(keypoints) { + this.canvas.clear(); + + this.canvas.backgroundColor = "#000"; + + const res = []; + for (let i = 0; i < keypoints.length; i += 18) { + const chunk = keypoints.slice(i, i + 18); + res.push(chunk); + } + + for (let item of res) { + this.addPose(item); + this.canvas.discardActiveObject(); + } + } + + addPose(keypoints = undefined) { + if (keypoints === undefined) { + keypoints = default_keypoints; + } + + const group = new fabric.Group(); + + const makeCircle = ( + color, + left, + top, + line1, + line2, + line3, + line4, + line5 + ) => { + let c = new fabric.Circle({ + left: left, + top: top, + strokeWidth: 1, + radius: 5, + fill: color, + stroke: color, + }); + + c.hasControls = c.hasBorders = false; + c.line1 = line1; + c.line2 = line2; + c.line3 = line3; + c.line4 = line4; + c.line5 = line5; + + return c; + }; + + const makeLine = (coords, color) => { + return new fabric.Line(coords, { + fill: color, + stroke: color, + strokeWidth: 10, + selectable: false, + evented: false, + }); + }; + + const lines = []; + const circles = []; + + for (let i = 0; i < connect_keypoints.length; i++) { + // 接続されるidxを指定 [0, 1]なら0と1つなぐ + const item = connect_keypoints[i]; + const line = makeLine( + keypoints[item[0]].concat(keypoints[item[1]]), + `rgba(${connect_color[i].join(", ")}, 0.7)` + ); + lines.push(line); + this.canvas.add(line); + } + + for (let i = 0; i < keypoints.length; i++) { + let list = []; + + connect_keypoints.filter((item, idx) => { + if (item.includes(i)) { + list.push(lines[idx]); + return idx; + } + }); + const circle = makeCircle( + `rgb(${connect_color[i].join(", ")})`, + keypoints[i][0], + keypoints[i][1], + ...list + ); + circle["id"] = i; + circles.push(circle); + group.addWithUpdate(circle); + } + + this.canvas.discardActiveObject(); + this.canvas.setActiveObject(group); + this.canvas.add(group); + group.toActiveSelection(); + this.canvas.requestRenderAll(); + } + + initCanvas() { + this.canvas = new fabric.Canvas(this.canvas, { + backgroundColor: "#000", + preserveObjectStacking: true, + }); + + const updateLines = (target) => { + if ("_objects" in target) { + const flipX = target.flipX ? -1 : 1; + const flipY = target.flipY ? -1 : 1; + this.flipped = flipX * flipY === -1; + const showEyes = this.flipped ? !this.visibleEyes : this.visibleEyes; + + if (target.angle === 0) { + const rtop = target.top; + const rleft = target.left; + for (const item of target._objects) { + let p = item; + p.scaleX = 1; + p.scaleY = 1; + const top = + rtop + + p.top * target.scaleY * flipY + + (target.height * target.scaleY) / 2; + const left = + rleft + + p.left * target.scaleX * flipX + + (target.width * target.scaleX) / 2; + p["_top"] = top; + p["_left"] = left; + if (p["id"] === 0) { + p.line1 && p.line1.set({ x1: left, y1: top }); + } else { + p.line1 && p.line1.set({ x2: left, y2: top }); + } + if (p["id"] === 14 || p["id"] === 15) { + p.radius = showEyes ? 5 : 0; + if (p.line1) p.line1.strokeWidth = showEyes ? 10 : 0; + if (p.line2) p.line2.strokeWidth = showEyes ? 10 : 0; + } + p.line2 && p.line2.set({ x1: left, y1: top }); + p.line3 && p.line3.set({ x1: left, y1: top }); + p.line4 && p.line4.set({ x1: left, y1: top }); + p.line5 && p.line5.set({ x1: left, y1: top }); + } + } else { + const aCoords = target.aCoords; + const center = { + x: (aCoords.tl.x + aCoords.br.x) / 2, + y: (aCoords.tl.y + aCoords.br.y) / 2, + }; + const rad = (target.angle * Math.PI) / 180; + const sin = Math.sin(rad); + const cos = Math.cos(rad); + + for (const item of target._objects) { + let p = item; + const p_top = p.top * target.scaleY * flipY; + const p_left = p.left * target.scaleX * flipX; + const left = center.x + p_left * cos - p_top * sin; + const top = center.y + p_left * sin + p_top * cos; + p["_top"] = top; + p["_left"] = left; + if (p["id"] === 0) { + p.line1 && p.line1.set({ x1: left, y1: top }); + } else { + p.line1 && p.line1.set({ x2: left, y2: top }); + } + if (p["id"] === 14 || p["id"] === 15) { + p.radius = showEyes ? 5 : 0.3; + if (p.line1) p.line1.strokeWidth = showEyes ? 10 : 0; + if (p.line2) p.line2.strokeWidth = showEyes ? 10 : 0; + } + p.line2 && p.line2.set({ x1: left, y1: top }); + p.line3 && p.line3.set({ x1: left, y1: top }); + p.line4 && p.line4.set({ x1: left, y1: top }); + p.line5 && p.line5.set({ x1: left, y1: top }); + } + } + } else { + var p = target; + if (p["id"] === 0) { + p.line1 && p.line1.set({ x1: p.left, y1: p.top }); + } else { + p.line1 && p.line1.set({ x2: p.left, y2: p.top }); + } + p.line2 && p.line2.set({ x1: p.left, y1: p.top }); + p.line3 && p.line3.set({ x1: p.left, y1: p.top }); + p.line4 && p.line4.set({ x1: p.left, y1: p.top }); + p.line5 && p.line5.set({ x1: p.left, y1: p.top }); + } + this.canvas.renderAll(); + }; + + this.canvas.on("object:moving", (e) => { + updateLines(e.target); + }); + + this.canvas.on("object:scaling", (e) => { + updateLines(e.target); + this.canvas.renderAll(); + }); + + this.canvas.on("object:rotating", (e) => { + updateLines(e.target); + this.canvas.renderAll(); + }); + + this.canvas.on("object:modified", () => { + if ( + this.lockMode || + this.canvas.getActiveObject().type == "activeSelection" + ) + return; + this.undo_history.push(this.getJSON()); + this.redo_history.length = 0; + this.history_change = true; + this.uploadPoseFile(this.node.name); + }); + + if (!LS_Poses[this.node.name].undo_history.length) { + this.setPose(default_keypoints); + this.undo_history.push(this.getJSON()); + } + return this.canvas; + } + + undo() { + if (this.undo_history.length > 0) { + this.lockMode = true; + if (this.undo_history.length > 1) + this.redo_history.push(this.undo_history.pop()); + + const content = this.undo_history[this.undo_history.length - 1]; + this.loadPreset(content); + this.canvas.renderAll(); + this.lockMode = false; + this.history_change = true; + this.uploadPoseFile(this.node.name); + } + } + + redo() { + if (this.redo_history.length > 0) { + this.lockMode = true; + const content = this.redo_history.pop(); + this.undo_history.push(content); + this.loadPreset(content); + this.canvas.renderAll(); + this.lockMode = false; + this.history_change = true; + this.uploadPoseFile(this.node.name); + } + } + + resetCanvas() { + this.canvas.clear(); + this.canvas.backgroundColor = "#000"; + this.addPose(); + } + + updateHistoryData() { + if (this.history_change) { + LS_Poses[this.node.name].undo_history = this.undo_history; + LS_Poses[this.node.name].redo_history = this.redo_history; + LS_Save(); + this.history_change = false; + } + } + + uploadPoseFile(fileName) { + // Upload pose to temp folder ComfyUI + + const uploadFile = async (blobFile) => { + try { + const resp = await fetch("/upload/image", { + method: "POST", + body: blobFile, + }); + + if (resp.status === 200) { + const data = await resp.json(); + + if (!this.image.options.values.includes(data.name)) { + this.image.options.values.push(data.name); + } + + this.image.value = data.name; + this.updateHistoryData(); + } else { + alert(resp.status + " - " + resp.statusText); + } + } catch (error) { + console.error(error); + } + }; + + this.canvas.lowerCanvasEl.toBlob(function (blob) { + let formData = new FormData(); + formData.append("image", blob, fileName); + formData.append("overwrite", "true"); + formData.append("type", "temp"); + uploadFile(formData); + }, "image/png"); + // - end + + const callb = this.node.callback, + self = this; + this.image.callback = function () { + this.image.value = self.node.name; + if (callb) { + return callb.apply(this, arguments); + } + }; + } + + getJSON() { + const json = { + keypoints: this.canvas + .getObjects() + .filter((item) => { + if (item.type === "circle") return item; + }) + .map((item) => { + return [Math.round(item.left), Math.round(item.top)]; + }), + }; + + return json; + } + + loadPreset(json) { + try { + if (json["keypoints"].length % 18 === 0) { + this.setPose(json["keypoints"]); + } else { + throw new Error("keypoints is invalid"); + } + } catch (e) { + console.error(e); + } + } +} + +// Create OpenPose widget +function createOpenPose(node, inputName, inputData, app) { + node.name = inputName; + const widget = { + type: "openpose", + name: `w${inputName}`, + + draw: function (ctx, _, widgetWidth, y, widgetHeight) { + const margin = 10, + visible = app.canvas.ds.scale > 0.5 && this.type === "openpose", + clientRectBound = ctx.canvas.getBoundingClientRect(), + transform = new DOMMatrix() + .scaleSelf( + clientRectBound.width / ctx.canvas.width, + clientRectBound.height / ctx.canvas.height + ) + .multiplySelf(ctx.getTransform()) + .translateSelf(margin, margin + y), + w = (widgetWidth - margin * 2 - 3) * transform.a; + + Object.assign(this.openpose.style, { + left: `${transform.a * margin + transform.e}px`, + top: `${transform.d + transform.f}px`, + width: w + "px", + height: w + "px", + position: "absolute", + zIndex: app.graph._nodes.indexOf(node), + }); + + Object.assign(this.openpose.children[0].style, { + width: w + "px", + height: w + "px", + }); + + Object.assign(this.openpose.children[1].style, { + width: w + "px", + height: w + "px", + }); + + Array.from(this.openpose.children[2].children).forEach((element) => { + Object.assign(element.style, { + width: `${28.0 * transform.a}px`, + height: `${22.0 * transform.d}px`, + fontSize: `${transform.d * 10.0}px`, + }); + element.hidden = !visible; + }); + }, + }; + + // Fabric canvas + let canvasOpenPose = document.createElement("canvas"); + node.openPose = new OpenPose(node, canvasOpenPose); + + node.openPose.canvas.setWidth(512); + node.openPose.canvas.setHeight(512); + + let widgetCombo = node.widgets.filter((w) => w.type === "combo"); + widgetCombo[0].value = node.name; + + widget.openpose = node.openPose.canvas.wrapperEl; + widget.parent = node; + + // Create elements undo, redo, clear history + let panelButtons = document.createElement("div"), + undoButton = document.createElement("button"), + redoButton = document.createElement("button"), + historyClearButton = document.createElement("button"); + + panelButtons.className = "panelButtons comfy-menu-btns"; + undoButton.textContent = "⟲"; + redoButton.textContent = "⟳"; + historyClearButton.textContent = "✖"; + undoButton.title = "Undo"; + redoButton.title = "Redo"; + historyClearButton.title = "Clear History"; + + undoButton.addEventListener("click", () => node.openPose.undo()); + redoButton.addEventListener("click", () => node.openPose.redo()); + historyClearButton.addEventListener("click", () => { + if (confirm(`Delete all pose history of a node "${node.name}"?`)) { + node.openPose.undo_history = []; + node.openPose.redo_history = []; + node.openPose.setPose(default_keypoints); + node.openPose.undo_history.push(node.openPose.getJSON()); + node.openPose.history_change = true; + node.openPose.updateHistoryData(); + } + }); + + panelButtons.appendChild(undoButton); + panelButtons.appendChild(redoButton); + panelButtons.appendChild(historyClearButton); + node.openPose.canvas.wrapperEl.appendChild(panelButtons); + + document.body.appendChild(widget.openpose); + + // Add buttons add, reset, undo, redo poses + node.addWidget("button", "Add pose", "add_pose", () => { + node.openPose.addPose(); + }); + + node.addWidget("button", "Reset pose", "reset_pose", () => { + node.openPose.resetCanvas(); + }); + + // Add customWidget to node + node.addCustomWidget(widget); + + node.onRemoved = () => { + if (Object.hasOwn(LS_Poses, node.name)) { + delete LS_Poses[node.name]; + LS_Save(); + } + + // When removing this node we need to remove the input from the DOM + for (let y in node.widgets) { + if (node.widgets[y].openpose) { + node.widgets[y].openpose.remove(); + } + } + }; + + widget.onRemove = () => { + widget.openpose?.remove(); + }; + + app.canvas.onDrawBackground = function () { + // Draw node isnt fired once the node is off the screen + // if it goes off screen quickly, the input may not be removed + // this shifts it off screen so it can be moved back if the node is visible. + for (let n in app.graph._nodes) { + n = graph._nodes[n]; + for (let w in n.widgets) { + let wid = n.widgets[w]; + if (Object.hasOwn(wid, "openpose")) { + wid.openpose.style.left = -8000 + "px"; + wid.openpose.style.position = "absolute"; + } + } + } + }; + return { widget: widget }; +} + +window.LS_Poses = {}; +function LS_Save() { + ///console.log("Save:", LS_Poses); + localStorage.setItem("ComfyUI_Poses", JSON.stringify(LS_Poses)); +} + +app.registerExtension({ + name: "comfy.easyuse.poseEditor", + async init(app) { + // Any initial setup to run as soon as the page loads + let style = document.createElement("style"); + style.innerText = `.panelButtons{ + position: absolute; + padding: 4px; + display: flex; + gap: 4px; + flex-direction: column; + width: fit-content; + } + .panelButtons button:last-child{ + border-color: var(--error-text); + color: var(--error-text) !important; + } + + `; + document.head.appendChild(style); + }, + async setup(app) { + let openPoseNode = app.graph._nodes.filter((wi) => wi.type == "easy poseEditor"); + + if (openPoseNode.length) { + openPoseNode.map((n) => { + console.log(`Setup PoseNode: ${n.name}`); + let widgetImage = n.widgets.find((w) => w.name == "image"); + if (widgetImage && Object.hasOwn(LS_Poses, n.name)) { + let pose_ls = LS_Poses[n.name].undo_history; + n.openPose.loadPreset( + pose_ls.length > 0 + ? pose_ls[pose_ls.length - 1] + : { keypoints: default_keypoints } + ); + } + }); + } + }, + async beforeRegisterNodeDef(nodeType, nodeData, app) { + if (nodeData.name === "easy poseEditor") { + const onNodeCreated = nodeType.prototype.onNodeCreated; + + nodeType.prototype.onNodeCreated = function () { + const r = onNodeCreated + ? onNodeCreated.apply(this, arguments) + : undefined; + + let openPoseNode = app.graph._nodes.filter( + (wi) => {wi.type == "easy poseEditor"} + ), + nodeName = `Pose_${openPoseNode.length}`, + nodeNamePNG = `${nodeName}.png`; + + console.log(`Create PoseNode: ${nodeName}`); + + LS_Poses = + localStorage.getItem("ComfyUI_Poses") && + JSON.parse(localStorage.getItem("ComfyUI_Poses")); + if (!LS_Poses) { + localStorage.setItem("ComfyUI_Poses", JSON.stringify({})); + LS_Poses = JSON.parse(localStorage.getItem("ComfyUI_Poses")); + } + + if (!Object.hasOwn(LS_Poses, nodeNamePNG)) { + LS_Poses[nodeNamePNG] = { + undo_history: [], + redo_history: [], + }; + LS_Save(); + } + + createOpenPose.apply(this, [this, nodeNamePNG, {}, app]); + setTimeout(() => { + this.openPose.uploadPoseFile(nodeNamePNG); + }, 1); + + this.setSize([530, 620]); + + return r; + }; + } + }, +}); diff --git a/ComfyUI-Easy-Use/web_version/v1/js/seed.js b/ComfyUI-Easy-Use/web_version/v1/js/seed.js new file mode 100644 index 0000000000000000000000000000000000000000..dc6c17e050c15c2cc04c0c47af16b02ad4e97b8a --- /dev/null +++ b/ComfyUI-Easy-Use/web_version/v1/js/seed.js @@ -0,0 +1,47 @@ +import { api } from "../../../scripts/api.js"; + +// 全局Seed +function globalSeedHandler(event) { + let nodes = app.graph._nodes_by_id; + for(let i in nodes) { + let node = nodes[i]; + if(node.type == 'easy globalSeed') { + if(node.widgets) { + const w = node.widgets.find((w) => w.name == 'value'); + const last_w = node.widgets.find((w) => w.name == 'last_seed'); + last_w.value = w.value; + w.value = event.detail.value; + } + } + else{ + if(node.widgets) { + const w = node.widgets.find((w) => w.name == 'seed_num' || w.name == 'seed' || w.name == 'noise_seed'); + if(w && event.detail.seed_map[node.id] != undefined) { + w.value = event.detail.seed_map[node.id]; + } + } + } + + } +} + +api.addEventListener("easyuse-global-seed", globalSeedHandler); + +const original_queuePrompt = api.queuePrompt; +async function queuePrompt_with_seed(number, { output, workflow }) { + workflow.seed_widgets = {}; + + for(let i in app.graph._nodes_by_id) { + let widgets = app.graph._nodes_by_id[i].widgets; + if(widgets) { + for(let j in widgets) { + if((widgets[j].name == 'seed_num' || widgets[j].name == 'seed' || widgets[j].name == 'noise_seed') && widgets[j].type != 'converted-widget') + workflow.seed_widgets[i] = parseInt(j); + } + } + } + + return await original_queuePrompt.call(api, number, { output, workflow }); +} + +api.queuePrompt = queuePrompt_with_seed; diff --git a/ComfyUI-Easy-Use/web_version/v1/lib/fabric.js b/ComfyUI-Easy-Use/web_version/v1/lib/fabric.js new file mode 100644 index 0000000000000000000000000000000000000000..fa7bd3ce09e2b1c4168a380f625cb8a96c05ab0a --- /dev/null +++ b/ComfyUI-Easy-Use/web_version/v1/lib/fabric.js @@ -0,0 +1,10 @@ +/** + * Bundled by jsDelivr using Rollup v2.79.1 and Terser v5.17.1. + * Original file: /npm/fabric-with-all-modules@4.6.1/dist/fabric.js + * + * Do NOT use SRI with dynamically generated files! More information: https://www.jsdelivr.com/using-sri-with-dynamic-files + */ +function t(t,e){return e.forEach((function(e){e&&"string"!=typeof e&&!Array.isArray(e)&&Object.keys(e).forEach((function(i){if("default"!==i&&!(i in t)){var r=Object.getOwnPropertyDescriptor(e,i);Object.defineProperty(t,i,r.get?r:{enumerable:!0,get:function(){return e[i]}})}}))})),Object.freeze(t)}var e="undefined"!=typeof global?global:"undefined"!=typeof self?self:"undefined"!=typeof window?window:{},i=[],r=[],n="undefined"!=typeof Uint8Array?Uint8Array:Array,o=!1;function s(){o=!0;for(var t="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/",e=0;e<64;++e)i[e]=t[e],r[t.charCodeAt(e)]=e;r["-".charCodeAt(0)]=62,r["_".charCodeAt(0)]=63}function a(t,e,r){for(var n,o,s=[],a=e;a>18&63]+i[o>>12&63]+i[o>>6&63]+i[63&o]);return s.join("")}function h(t){var e;o||s();for(var r=t.length,n=r%3,h="",c=[],l=16383,u=0,f=r-n;uf?f:u+l));return 1===n?(e=t[r-1],h+=i[e>>2],h+=i[e<<4&63],h+="=="):2===n&&(e=(t[r-2]<<8)+t[r-1],h+=i[e>>10],h+=i[e>>4&63],h+=i[e<<2&63],h+="="),c.push(h),c.join("")}function c(t,e,i,r,n){var o,s,a=8*n-r-1,h=(1<>1,l=-7,u=i?n-1:0,f=i?-1:1,d=t[e+u];for(u+=f,o=d&(1<<-l)-1,d>>=-l,l+=a;l>0;o=256*o+t[e+u],u+=f,l-=8);for(s=o&(1<<-l)-1,o>>=-l,l+=r;l>0;s=256*s+t[e+u],u+=f,l-=8);if(0===o)o=1-c;else{if(o===h)return s?NaN:1/0*(d?-1:1);s+=Math.pow(2,r),o-=c}return(d?-1:1)*s*Math.pow(2,o-r)}function l(t,e,i,r,n,o){var s,a,h,c=8*o-n-1,l=(1<>1,f=23===n?Math.pow(2,-24)-Math.pow(2,-77):0,d=r?0:o-1,g=r?1:-1,p=e<0||0===e&&1/e<0?1:0;for(e=Math.abs(e),isNaN(e)||e===1/0?(a=isNaN(e)?1:0,s=l):(s=Math.floor(Math.log(e)/Math.LN2),e*(h=Math.pow(2,-s))<1&&(s--,h*=2),(e+=s+u>=1?f/h:f*Math.pow(2,1-u))*h>=2&&(s++,h/=2),s+u>=l?(a=0,s=l):s+u>=1?(a=(e*h-1)*Math.pow(2,n),s+=u):(a=e*Math.pow(2,u-1)*Math.pow(2,n),s=0));n>=8;t[i+d]=255&a,d+=g,a/=256,n-=8);for(s=s<0;t[i+d]=255&s,d+=g,s/=256,c-=8);t[i+d-g]|=128*p}var u={}.toString,f=Array.isArray||function(t){return"[object Array]"==u.call(t)};function d(){return p.TYPED_ARRAY_SUPPORT?2147483647:1073741823}function g(t,e){if(d()=d())throw new RangeError("Attempt to allocate Buffer larger than maximum size: 0x"+d().toString(16)+" bytes");return 0|t}function x(t){return!(null==t||!t._isBuffer)}function C(t,e){if(x(t))return t.length;if("undefined"!=typeof ArrayBuffer&&"function"==typeof ArrayBuffer.isView&&(ArrayBuffer.isView(t)||t instanceof ArrayBuffer))return t.byteLength;"string"!=typeof t&&(t=""+t);var i=t.length;if(0===i)return 0;for(var r=!1;;)switch(e){case"ascii":case"latin1":case"binary":return i;case"utf8":case"utf-8":case void 0:return K(t).length;case"ucs2":case"ucs-2":case"utf16le":case"utf-16le":return 2*i;case"hex":return i>>>1;case"base64":return J(t).length;default:if(r)return K(t).length;e=(""+e).toLowerCase(),r=!0}}function w(t,e,i){var r=!1;if((void 0===e||e<0)&&(e=0),e>this.length)return"";if((void 0===i||i>this.length)&&(i=this.length),i<=0)return"";if((i>>>=0)<=(e>>>=0))return"";for(t||(t="utf8");;)switch(t){case"hex":return B(this,e,i);case"utf8":case"utf-8":return F(this,e,i);case"ascii":return I(this,e,i);case"latin1":case"binary":return R(this,e,i);case"base64":return j(this,e,i);case"ucs2":case"ucs-2":case"utf16le":case"utf-16le":return Y(this,e,i);default:if(r)throw new TypeError("Unknown encoding: "+t);t=(t+"").toLowerCase(),r=!0}}function S(t,e,i){var r=t[e];t[e]=t[i],t[i]=r}function T(t,e,i,r,n){if(0===t.length)return-1;if("string"==typeof i?(r=i,i=0):i>2147483647?i=2147483647:i<-2147483648&&(i=-2147483648),i=+i,isNaN(i)&&(i=n?0:t.length-1),i<0&&(i=t.length+i),i>=t.length){if(n)return-1;i=t.length-1}else if(i<0){if(!n)return-1;i=0}if("string"==typeof e&&(e=p.from(e,r)),x(e))return 0===e.length?-1:O(t,e,i,r,n);if("number"==typeof e)return e&=255,p.TYPED_ARRAY_SUPPORT&&"function"==typeof Uint8Array.prototype.indexOf?n?Uint8Array.prototype.indexOf.call(t,e,i):Uint8Array.prototype.lastIndexOf.call(t,e,i):O(t,[e],i,r,n);throw new TypeError("val must be string, number or Buffer")}function O(t,e,i,r,n){var o,s=1,a=t.length,h=e.length;if(void 0!==r&&("ucs2"===(r=String(r).toLowerCase())||"ucs-2"===r||"utf16le"===r||"utf-16le"===r)){if(t.length<2||e.length<2)return-1;s=2,a/=2,h/=2,i/=2}function c(t,e){return 1===s?t[e]:t.readUInt16BE(e*s)}if(n){var l=-1;for(o=i;oa&&(i=a-h),o=i;o>=0;o--){for(var u=!0,f=0;fn&&(r=n):r=n;var o=e.length;if(o%2!=0)throw new TypeError("Invalid hex string");r>o/2&&(r=o/2);for(var s=0;s>8,n=i%256,o.push(n),o.push(r);return o}(e,t.length-i),t,i,r)}function j(t,e,i){return 0===e&&i===t.length?h(t):h(t.slice(e,i))}function F(t,e,i){i=Math.min(t.length,i);for(var r=[],n=e;n239?4:c>223?3:c>191?2:1;if(n+u<=i)switch(u){case 1:c<128&&(l=c);break;case 2:128==(192&(o=t[n+1]))&&(h=(31&c)<<6|63&o)>127&&(l=h);break;case 3:o=t[n+1],s=t[n+2],128==(192&o)&&128==(192&s)&&(h=(15&c)<<12|(63&o)<<6|63&s)>2047&&(h<55296||h>57343)&&(l=h);break;case 4:o=t[n+1],s=t[n+2],a=t[n+3],128==(192&o)&&128==(192&s)&&128==(192&a)&&(h=(15&c)<<18|(63&o)<<12|(63&s)<<6|63&a)>65535&&h<1114112&&(l=h)}null===l?(l=65533,u=1):l>65535&&(l-=65536,r.push(l>>>10&1023|55296),l=56320|1023&l),r.push(l),n+=u}return function(t){var e=t.length;if(e<=L)return String.fromCharCode.apply(String,t);var i="",r=0;for(;r0&&(t=this.toString("hex",0,50).match(/.{2}/g).join(" "),this.length>50&&(t+=" ... ")),""},p.prototype.compare=function(t,e,i,r,n){if(!x(t))throw new TypeError("Argument must be a Buffer");if(void 0===e&&(e=0),void 0===i&&(i=t?t.length:0),void 0===r&&(r=0),void 0===n&&(n=this.length),e<0||i>t.length||r<0||n>this.length)throw new RangeError("out of range index");if(r>=n&&e>=i)return 0;if(r>=n)return-1;if(e>=i)return 1;if(this===t)return 0;for(var o=(n>>>=0)-(r>>>=0),s=(i>>>=0)-(e>>>=0),a=Math.min(o,s),h=this.slice(r,n),c=t.slice(e,i),l=0;ln)&&(i=n),t.length>0&&(i<0||e<0)||e>this.length)throw new RangeError("Attempt to write outside buffer bounds");r||(r="utf8");for(var o=!1;;)switch(r){case"hex":return P(this,t,e,i);case"utf8":case"utf-8":return E(this,t,e,i);case"ascii":return k(this,t,e,i);case"latin1":case"binary":return A(this,t,e,i);case"base64":return D(this,t,e,i);case"ucs2":case"ucs-2":case"utf16le":case"utf-16le":return M(this,t,e,i);default:if(o)throw new TypeError("Unknown encoding: "+r);r=(""+r).toLowerCase(),o=!0}},p.prototype.toJSON=function(){return{type:"Buffer",data:Array.prototype.slice.call(this._arr||this,0)}};var L=4096;function I(t,e,i){var r="";i=Math.min(t.length,i);for(var n=e;nr)&&(i=r);for(var n="",o=e;oi)throw new RangeError("Trying to access beyond buffer length")}function U(t,e,i,r,n,o){if(!x(t))throw new TypeError('"buffer" argument must be a Buffer instance');if(e>n||et.length)throw new RangeError("Index out of range")}function G(t,e,i,r){e<0&&(e=65535+e+1);for(var n=0,o=Math.min(t.length-i,2);n>>8*(r?n:1-n)}function H(t,e,i,r){e<0&&(e=4294967295+e+1);for(var n=0,o=Math.min(t.length-i,4);n>>8*(r?n:3-n)&255}function W(t,e,i,r,n,o){if(i+r>t.length)throw new RangeError("Index out of range");if(i<0)throw new RangeError("Index out of range")}function z(t,e,i,r,n){return n||W(t,0,i,4),l(t,e,i,r,23,4),i+4}function N(t,e,i,r,n){return n||W(t,0,i,8),l(t,e,i,r,52,8),i+8}p.prototype.slice=function(t,e){var i,r=this.length;if((t=~~t)<0?(t+=r)<0&&(t=0):t>r&&(t=r),(e=void 0===e?r:~~e)<0?(e+=r)<0&&(e=0):e>r&&(e=r),e0&&(n*=256);)r+=this[t+--e]*n;return r},p.prototype.readUInt8=function(t,e){return e||X(t,1,this.length),this[t]},p.prototype.readUInt16LE=function(t,e){return e||X(t,2,this.length),this[t]|this[t+1]<<8},p.prototype.readUInt16BE=function(t,e){return e||X(t,2,this.length),this[t]<<8|this[t+1]},p.prototype.readUInt32LE=function(t,e){return e||X(t,4,this.length),(this[t]|this[t+1]<<8|this[t+2]<<16)+16777216*this[t+3]},p.prototype.readUInt32BE=function(t,e){return e||X(t,4,this.length),16777216*this[t]+(this[t+1]<<16|this[t+2]<<8|this[t+3])},p.prototype.readIntLE=function(t,e,i){t|=0,e|=0,i||X(t,e,this.length);for(var r=this[t],n=1,o=0;++o=(n*=128)&&(r-=Math.pow(2,8*e)),r},p.prototype.readIntBE=function(t,e,i){t|=0,e|=0,i||X(t,e,this.length);for(var r=e,n=1,o=this[t+--r];r>0&&(n*=256);)o+=this[t+--r]*n;return o>=(n*=128)&&(o-=Math.pow(2,8*e)),o},p.prototype.readInt8=function(t,e){return e||X(t,1,this.length),128&this[t]?-1*(255-this[t]+1):this[t]},p.prototype.readInt16LE=function(t,e){e||X(t,2,this.length);var i=this[t]|this[t+1]<<8;return 32768&i?4294901760|i:i},p.prototype.readInt16BE=function(t,e){e||X(t,2,this.length);var i=this[t+1]|this[t]<<8;return 32768&i?4294901760|i:i},p.prototype.readInt32LE=function(t,e){return e||X(t,4,this.length),this[t]|this[t+1]<<8|this[t+2]<<16|this[t+3]<<24},p.prototype.readInt32BE=function(t,e){return e||X(t,4,this.length),this[t]<<24|this[t+1]<<16|this[t+2]<<8|this[t+3]},p.prototype.readFloatLE=function(t,e){return e||X(t,4,this.length),c(this,t,!0,23,4)},p.prototype.readFloatBE=function(t,e){return e||X(t,4,this.length),c(this,t,!1,23,4)},p.prototype.readDoubleLE=function(t,e){return e||X(t,8,this.length),c(this,t,!0,52,8)},p.prototype.readDoubleBE=function(t,e){return e||X(t,8,this.length),c(this,t,!1,52,8)},p.prototype.writeUIntLE=function(t,e,i,r){(t=+t,e|=0,i|=0,r)||U(this,t,e,i,Math.pow(2,8*i)-1,0);var n=1,o=0;for(this[e]=255&t;++o=0&&(o*=256);)this[e+n]=t/o&255;return e+i},p.prototype.writeUInt8=function(t,e,i){return t=+t,e|=0,i||U(this,t,e,1,255,0),p.TYPED_ARRAY_SUPPORT||(t=Math.floor(t)),this[e]=255&t,e+1},p.prototype.writeUInt16LE=function(t,e,i){return t=+t,e|=0,i||U(this,t,e,2,65535,0),p.TYPED_ARRAY_SUPPORT?(this[e]=255&t,this[e+1]=t>>>8):G(this,t,e,!0),e+2},p.prototype.writeUInt16BE=function(t,e,i){return t=+t,e|=0,i||U(this,t,e,2,65535,0),p.TYPED_ARRAY_SUPPORT?(this[e]=t>>>8,this[e+1]=255&t):G(this,t,e,!1),e+2},p.prototype.writeUInt32LE=function(t,e,i){return t=+t,e|=0,i||U(this,t,e,4,4294967295,0),p.TYPED_ARRAY_SUPPORT?(this[e+3]=t>>>24,this[e+2]=t>>>16,this[e+1]=t>>>8,this[e]=255&t):H(this,t,e,!0),e+4},p.prototype.writeUInt32BE=function(t,e,i){return t=+t,e|=0,i||U(this,t,e,4,4294967295,0),p.TYPED_ARRAY_SUPPORT?(this[e]=t>>>24,this[e+1]=t>>>16,this[e+2]=t>>>8,this[e+3]=255&t):H(this,t,e,!1),e+4},p.prototype.writeIntLE=function(t,e,i,r){if(t=+t,e|=0,!r){var n=Math.pow(2,8*i-1);U(this,t,e,i,n-1,-n)}var o=0,s=1,a=0;for(this[e]=255&t;++o>0)-a&255;return e+i},p.prototype.writeIntBE=function(t,e,i,r){if(t=+t,e|=0,!r){var n=Math.pow(2,8*i-1);U(this,t,e,i,n-1,-n)}var o=i-1,s=1,a=0;for(this[e+o]=255&t;--o>=0&&(s*=256);)t<0&&0===a&&0!==this[e+o+1]&&(a=1),this[e+o]=(t/s>>0)-a&255;return e+i},p.prototype.writeInt8=function(t,e,i){return t=+t,e|=0,i||U(this,t,e,1,127,-128),p.TYPED_ARRAY_SUPPORT||(t=Math.floor(t)),t<0&&(t=255+t+1),this[e]=255&t,e+1},p.prototype.writeInt16LE=function(t,e,i){return t=+t,e|=0,i||U(this,t,e,2,32767,-32768),p.TYPED_ARRAY_SUPPORT?(this[e]=255&t,this[e+1]=t>>>8):G(this,t,e,!0),e+2},p.prototype.writeInt16BE=function(t,e,i){return t=+t,e|=0,i||U(this,t,e,2,32767,-32768),p.TYPED_ARRAY_SUPPORT?(this[e]=t>>>8,this[e+1]=255&t):G(this,t,e,!1),e+2},p.prototype.writeInt32LE=function(t,e,i){return t=+t,e|=0,i||U(this,t,e,4,2147483647,-2147483648),p.TYPED_ARRAY_SUPPORT?(this[e]=255&t,this[e+1]=t>>>8,this[e+2]=t>>>16,this[e+3]=t>>>24):H(this,t,e,!0),e+4},p.prototype.writeInt32BE=function(t,e,i){return t=+t,e|=0,i||U(this,t,e,4,2147483647,-2147483648),t<0&&(t=4294967295+t+1),p.TYPED_ARRAY_SUPPORT?(this[e]=t>>>24,this[e+1]=t>>>16,this[e+2]=t>>>8,this[e+3]=255&t):H(this,t,e,!1),e+4},p.prototype.writeFloatLE=function(t,e,i){return z(this,t,e,!0,i)},p.prototype.writeFloatBE=function(t,e,i){return z(this,t,e,!1,i)},p.prototype.writeDoubleLE=function(t,e,i){return N(this,t,e,!0,i)},p.prototype.writeDoubleBE=function(t,e,i){return N(this,t,e,!1,i)},p.prototype.copy=function(t,e,i,r){if(i||(i=0),r||0===r||(r=this.length),e>=t.length&&(e=t.length),e||(e=0),r>0&&r=this.length)throw new RangeError("sourceStart out of bounds");if(r<0)throw new RangeError("sourceEnd out of bounds");r>this.length&&(r=this.length),t.length-e=0;--n)t[n+e]=this[n+i];else if(o<1e3||!p.TYPED_ARRAY_SUPPORT)for(n=0;n>>=0,i=void 0===i?this.length:i>>>0,t||(t=0),"number"==typeof t)for(o=e;o55295&&i<57344){if(!n){if(i>56319){(e-=3)>-1&&o.push(239,191,189);continue}if(s+1===r){(e-=3)>-1&&o.push(239,191,189);continue}n=i;continue}if(i<56320){(e-=3)>-1&&o.push(239,191,189),n=i;continue}i=65536+(n-55296<<10|i-56320)}else n&&(e-=3)>-1&&o.push(239,191,189);if(n=null,i<128){if((e-=1)<0)break;o.push(i)}else if(i<2048){if((e-=2)<0)break;o.push(i>>6|192,63&i|128)}else if(i<65536){if((e-=3)<0)break;o.push(i>>12|224,i>>6&63|128,63&i|128)}else{if(!(i<1114112))throw new Error("Invalid code point");if((e-=4)<0)break;o.push(i>>18|240,i>>12&63|128,i>>6&63|128,63&i|128)}}return o}function J(t){return function(t){var e,i,a,h,c,l;o||s();var u=t.length;if(u%4>0)throw new Error("Invalid string. Length must be a multiple of 4");c="="===t[u-2]?2:"="===t[u-1]?1:0,l=new n(3*u/4-c),a=c>0?u-4:u;var f=0;for(e=0,i=0;e>16&255,l[f++]=h>>8&255,l[f++]=255&h;return 2===c?(h=r[t.charCodeAt(e)]<<2|r[t.charCodeAt(e+1)]>>4,l[f++]=255&h):1===c&&(h=r[t.charCodeAt(e)]<<10|r[t.charCodeAt(e+1)]<<4|r[t.charCodeAt(e+2)]>>2,l[f++]=h>>8&255,l[f++]=255&h),l}(function(t){if((t=function(t){return t.trim?t.trim():t.replace(/^\s+|\s+$/g,"")}(t).replace(V,"")).length<2)return"";for(;t.length%4!=0;)t+="=";return t}(t))}function Z(t,e,i,r){for(var n=0;n=e.length||n>=t.length);++n)e[n+i]=t[n];return n}function $(t){return!!t.constructor&&"function"==typeof t.constructor.isBuffer&&t.constructor.isBuffer(t)}function Q(t){if(t.__esModule)return t;var e=Object.defineProperty({},"__esModule",{value:!0});return Object.keys(t).forEach((function(i){var r=Object.getOwnPropertyDescriptor(t,i);Object.defineProperty(e,i,r.get?r:{enumerable:!0,get:function(){return t[i]}})})),e}var tt={},et={},it=Q(t({__proto__:null,default:et},[et]));!function(t){ +/*! Fabric.js Copyright 2008-2015, Printio (Juriy Zaytsev, Maxim Chernyak) */ +var e,i,r,n,o,s,a,h,c,l,u,f,d,g,v,m,y,_,b,x,C,w,S,T=T||{version:"4.6.0"};if(t.fabric=T,"undefined"!=typeof document&&"undefined"!=typeof window)document instanceof("undefined"!=typeof HTMLDocument?HTMLDocument:Document)?T.document=document:T.document=document.implementation.createHTMLDocument(""),T.window=window;else{var O=new it.JSDOM(decodeURIComponent("%3C!DOCTYPE%20html%3E%3Chtml%3E%3Chead%3E%3C%2Fhead%3E%3Cbody%3E%3C%2Fbody%3E%3C%2Fhtml%3E"),{features:{FetchExternalResources:["img"]},resources:"usable"}).window;T.document=O.document,T.jsdomImplForWrapper=it.implForWrapper,T.nodeCanvas=it.Canvas,T.window=O,DOMParser=T.window.DOMParser}if(T.isTouchSupported="ontouchstart"in T.window||"ontouchstart"in T.document||T.window&&T.window.navigator&&T.window.navigator.maxTouchPoints>0,T.isLikelyNode=void 0!==p&&"undefined"==typeof window,T.SHARED_ATTRIBUTES=["display","transform","fill","fill-opacity","fill-rule","opacity","stroke","stroke-dasharray","stroke-linecap","stroke-dashoffset","stroke-linejoin","stroke-miterlimit","stroke-opacity","stroke-width","id","paint-order","vector-effect","instantiated_by_use","clip-path"],T.DPI=96,T.reNum="(?:[-+]?(?:\\d+|\\d*\\.\\d+)(?:[eE][-+]?\\d+)?)",T.commaWsp="(?:\\s+,?\\s*|,\\s*)",T.rePathCommand=/([-+]?((\d+\.\d+)|((\d+)|(\.\d+)))(?:[eE][-+]?\d+)?)/gi,T.reNonWord=/[ \n\.,;!\?\-]/,T.fontPaths={},T.iMatrix=[1,0,0,1,0,0],T.svgNS="http://www.w3.org/2000/svg",T.perfLimitSizeTotal=2097152,T.maxCacheSideLimit=4096,T.minCacheSideLimit=256,T.charWidthsCache={},T.textureSize=2048,T.disableStyleCopyPaste=!1,T.enableGLFiltering=!0,T.devicePixelRatio=T.window.devicePixelRatio||T.window.webkitDevicePixelRatio||T.window.mozDevicePixelRatio||1,T.browserShadowBlurConstant=1,T.arcToSegmentsCache={},T.boundsOfCurveCache={},T.cachesBoundsOfCurve=!0,T.forceGLPutImageData=!1,T.initFilterBackend=function(){return T.enableGLFiltering&&T.isWebglSupported&&T.isWebglSupported(T.textureSize)?(console.log("max texture size: "+T.maxTextureSize),new T.WebglFilterBackend({tileSize:T.textureSize})):T.Canvas2dFilterBackend?new T.Canvas2dFilterBackend:void 0},"undefined"!=typeof document&&"undefined"!=typeof window&&(window.fabric=T),void 0===P)var P={};if(function(t){t.modifyEventListener=!1,t.modifySelectors=!1,t.configure=function(e){isFinite(e.modifyEventListener)&&(t.modifyEventListener=e.modifyEventListener),isFinite(e.modifySelectors)&&(t.modifySelectors=e.modifySelectors),!1===d&&t.modifyEventListener&&g(),!1===p&&t.modifySelectors&&v()},t.add=function(t,e,r,n){return i(t,e,r,n,"add")},t.remove=function(t,e,r,n){return i(t,e,r,n,"remove")},t.returnFalse=function(t){return!1},t.stop=function(t){t&&(t.stopPropagation&&t.stopPropagation(),t.cancelBubble=!0,t.cancelBubbleCount=0)},t.prevent=function(t){t&&(t.preventDefault?t.preventDefault():t.preventManipulation?t.preventManipulation():t.returnValue=!1)},t.cancel=function(e){t.stop(e),t.prevent(e)},t.blur=function(){var t=document.activeElement;if(t){var e=document.activeElement.nodeName;"INPUT"!==e&&"TEXTAREA"!==e&&"true"!==t.contentEditable||t.blur&&t.blur()}},t.getEventSupport=function(t,e){if("string"==typeof t&&(e=t,t=window),(e="on"+e)in t)return!0;if(t.setAttribute||(t=document.createElement("div")),t.setAttribute&&t.removeAttribute){t.setAttribute(e,"");var i="function"==typeof t[e];return void 0!==t[e]&&(t[e]=null),t.removeAttribute(e),i}};var e=function(t){if(!t||"object"!=typeof t)return t;var i=new t.constructor;for(var r in t)t[r]&&"object"==typeof t[r]?i[r]=e(t[r]):i[r]=t[r];return i},i=function(o,s,c,d,g,p){if(d=d||{},"[object Object]"===String(o)){var v=o;if(o=v.target,delete v.target,!v.type||!v.listener){for(var m in v){var y=v[m];"function"!=typeof y&&(d[m]=y)}var _={};for(var b in v){m=b.split(",");var x=v[b],C={};for(var w in d)C[w]=d[w];if("function"==typeof x)c=x;else{if("function"!=typeof x.listener)continue;c=x.listener;for(var w in x)"function"!=typeof x[w]&&(C[w]=x[w])}for(var S=0;SO&&window.clearInterval(k),document.querySelector(o)&&(window.clearInterval(k),setTimeout(c,1))}),E);return}s="load",o=window}if("string"==typeof o){if(0===(o=document.querySelectorAll(o)).length)return n("Missing target on listener!",arguments);1===o.length&&(o=o[0])}var A,D={};if(o.length>0&&o!==window){for(var M=0,j=o.length;M=r.maxFingers){var d=[];for(var f in r.tracker)d.push(f);return i.identifier=d.join(","),s}for(var g in a)if(a[g].up){delete a[g],o(u,f),r.cancel=!0;break}if(a[f])continue;o(u,f)}else a=r.tracker={},i.bbox=r.bbox=t.getBoundingBox(r.target),r.fingers=0,r.cancel=!1,o(u,f)}d=[];for(var f in r.tracker)d.push(f);return i.identifier=d.join(","),s},t.pointerEnd=function(t,e,i,r){for(var n=t.touches||[],o=n.length,s={},a=0;ao.x1&&a>o.y1&&h0&&l0&&u0?x:-x,void 0!==v.DEG2&&(x>0?v.rotation+=v.DEG1-v.DEG2:v.rotation-=v.DEG1-v.DEG2,g+=v.rotation),f.push(v.move)}}n.touches=f,n.fingers=r.fingers,n.scale=d/r.fingers,n.rotation=g/r.fingers,n.state="change",r.listener(o,n)}},r.onPointerUp=function(e){var i=r.fingers;t.pointerEnd(e,n,r)&&(P.remove(r.doc,"mousemove",r.onPointerMove),P.remove(r.doc,"mouseup",r.onPointerUp)),i===r.minFingers&&r.fingers4){var g=h*u/d,p=Math.abs(g+f.value);f.value&&p<200?(f.value=g,f.count++,3===f.count&&(t.listener(o,e),i=h,f.value=0,f.count=0)):(f.value=g,f.count=1)}}else t.listener(o,e)};if(window.addEventListener)return window.addEventListener("devicemotion",o,!1),e},P.Gesture=P.Gesture||{},P.Gesture._gestureHandlers=P.Gesture._gestureHandlers||{},P.Gesture._gestureHandlers.shake=t.shake,t}(P.proxy),void 0===P)P={};if(void 0===P.proxy&&(P.proxy={}),P.proxy=function(t){var e=Math.PI/180;return t.swipe=function(i){i.snap=i.snap||90,i.threshold=i.threshold||1,i.gesture=i.gesture||"swipe",i.onPointerDown=function(e){t.pointerStart(e,r,i)&&(P.add(i.doc,"mousemove",i.onPointerMove).listener(e),P.add(i.doc,"mouseup",i.onPointerUp))},i.onPointerMove=function(e){for(var r=e.changedTouches||t.getCoords(e),n=r.length,o=0;o=_&&o>i.threshold&&(a.x/=l,a.y/=l,r.start=a,r.x=h/l,r.y=c/l,r.angle=-(((s/i.snap+.5>>0)*i.snap||360)-360),r.velocity=o,r.fingers=_,r.state="swipe",i.listener(n,r))}};var r=t.pointerSetup(i);return P.add(i.target,"mousedown",i.onPointerDown),r},P.Gesture=P.Gesture||{},P.Gesture._gestureHandlers=P.Gesture._gestureHandlers||{},P.Gesture._gestureHandlers.swipe=t.swipe,t}(P.proxy),void 0===P)P={};if(void 0===P.proxy&&(P.proxy={}),P.proxy=function(t){return t.longpress=function(e){return e.gesture="longpress",t.tap(e)},t.tap=function(e){var i,r;e.delay=e.delay||500,e.timeout=e.timeout||250,e.driftDeviance=e.driftDeviance||10,e.gesture=e.gesture||"tap",e.onPointerDown=function(o){if(t.pointerStart(o,n,e)){if(i=(new Date).getTime(),P.add(e.doc,"mousemove",e.onPointerMove).listener(o),P.add(e.doc,"mouseup",e.onPointerUp),"longpress"!==e.gesture)return;r=setTimeout((function(){if(!(o.cancelBubble&&++o.cancelBubbleCount>1)){var t=0;for(var i in e.tracker){var r=e.tracker[i];if(!0===r.end)return;if(e.cancel)return;t++}e.minFingers<=t&&e.maxFingers>=t&&(n.state="start",n.fingers=t,n.x=r.start.x,n.y=r.start.y,e.listener(o,n))}}),e.delay)}},e.onPointerMove=function(i){for(var r=e.bbox,n=i.changedTouches||t.getCoords(i),o=n.length,s=0;s0&&l0&&u1)return;if("longpress"===e.gesture)return void("start"===n.state&&(n.state="end",e.listener(o,n)));if(e.cancel)return;if((new Date).getTime()-i>e.timeout)return;var s=e.gestureFingers;e.minFingers<=s&&e.maxFingers>=s&&(n.state="tap",n.fingers=e.gestureFingers,e.listener(o,n))}};var n=t.pointerSetup(e);return P.add(e.target,"mousedown",e.onPointerDown),n},P.Gesture=P.Gesture||{},P.Gesture._gestureHandlers=P.Gesture._gestureHandlers||{},P.Gesture._gestureHandlers.tap=t.tap,P.Gesture._gestureHandlers.longpress=t.longpress,t}(P.proxy),void 0===P)P={};if(void 0===P.proxy&&(P.proxy={}),P.proxy=function(t){return t.wheelPreventElasticBounce=function(t){t&&("string"==typeof t&&(t=document.querySelector(t)),P.add(t,"wheel",(function(t,e){e.preventElasticBounce(),P.stop(t)})))},t.wheel=function(t){var e,i=t.timeout||150,r=0,n={gesture:"wheel",state:"start",wheelDelta:0,target:t.target,listener:t.listener,preventElasticBounce:function(t){var e=this.target,i=e.scrollTop;(i+e.offsetHeight===e.scrollHeight&&this.wheelDelta<=0||0===i&&this.wheelDelta>=0)&&P.cancel(t),P.stop(t)},add:function(){t.target[s](h,o,!1)},remove:function(){t.target[a](h,o,!1)}},o=function(o){o=o||window.event,n.state=r++?"change":"start",n.wheelDelta=o.detail?-20*o.detail:o.wheelDelta,t.listener(o,n),clearTimeout(e),e=setTimeout((function(){r=0,n.state="end",n.wheelDelta=0,t.listener(o,n)}),i)},s=document.addEventListener?"addEventListener":"attachEvent",a=document.removeEventListener?"removeEventListener":"detachEvent",h=P.getEventSupport("mousewheel")?"mousewheel":"DOMMouseScroll";return t.target[s](h,o,!1),n},P.Gesture=P.Gesture||{},P.Gesture._gestureHandlers=P.Gesture._gestureHandlers||{},P.Gesture._gestureHandlers.wheel=t.wheel,t}(P.proxy),void 0===E)var E={};function k(t,e){var i=t.canvas,r=e.targetCanvas,n=r.getContext("2d");n.translate(0,r.height),n.scale(1,-1);var o=i.height-r.height;n.drawImage(i,0,o,r.width,r.height,0,0,r.width,r.height)}function A(t,e){var i=e.targetCanvas.getContext("2d"),r=e.destinationWidth,n=e.destinationHeight,o=r*n*4,s=new Uint8Array(this.imageBuffer,0,o),a=new Uint8ClampedArray(this.imageBuffer,0,o);t.readPixels(0,0,r,n,t.RGBA,t.UNSIGNED_BYTE,s);var h=new ImageData(a,r,n);i.putImageData(h,0,0)}void 0===E.proxy&&(E.proxy={}),E.proxy=function(t){return t.orientation=function(t){var e={gesture:"orientationchange",previous:null,current:window.orientation,target:t.target,listener:t.listener,remove:function(){window.removeEventListener("orientationchange",i,!1)}},i=function(i){e.previous=e.current,e.current=window.orientation,null===e.previous||e.previous==e.current||t.listener(i,e)};return window.DeviceOrientationEvent&&window.addEventListener("orientationchange",i,!1),e},E.Gesture=E.Gesture||{},E.Gesture._gestureHandlers=E.Gesture._gestureHandlers||{},E.Gesture._gestureHandlers.orientation=t.orientation,t}(E.proxy),function(){function t(t,e){if(this.__eventListeners[t]){var i=this.__eventListeners[t];e?i[i.indexOf(e)]=!1:T.util.array.fill(i,!1)}}function e(t,e){var i=function(){e.apply(this,arguments),this.off(t,i)}.bind(this);this.on(t,i)}T.Observable={fire:function(t,e){if(!this.__eventListeners)return this;var i=this.__eventListeners[t];if(!i)return this;for(var r=0,n=i.length;r-1||!!e&&this._objects.some((function(e){return"function"==typeof e.contains&&e.contains(t,!0)}))},complexity:function(){return this._objects.reduce((function(t,e){return t+=e.complexity?e.complexity():0}),0)}},T.CommonMethods={_setOptions:function(t){for(var e in t)this.set(e,t[e])},_initGradient:function(t,e){!t||!t.colorStops||t instanceof T.Gradient||this.set(e,new T.Gradient(t))},_initPattern:function(t,e,i){!t||!t.source||t instanceof T.Pattern?i&&i():this.set(e,new T.Pattern(t,i))},_setObject:function(t){for(var e in t)this._set(e,t[e])},set:function(t,e){return"object"==typeof t?this._setObject(t):this._set(t,e),this},_set:function(t,e){this[t]=e},toggle:function(t){var e=this.get(t);return"boolean"==typeof e&&this.set(t,!e),this},get:function(t){return this[t]}},n=t,o=Math.sqrt,s=Math.atan2,a=Math.pow,h=Math.PI/180,c=Math.PI/2,T.util={cos:function(t){if(0===t)return 1;switch(t<0&&(t=-t),t/c){case 1:case 3:return 0;case 2:return-1}return Math.cos(t)},sin:function(t){if(0===t)return 0;var e=1;switch(t<0&&(e=-1),t/c){case 1:return e;case 2:return 0;case 3:return-e}return Math.sin(t)},removeFromArray:function(t,e){var i=t.indexOf(e);return-1!==i&&t.splice(i,1),t},getRandomInt:function(t,e){return Math.floor(Math.random()*(e-t+1))+t},degreesToRadians:function(t){return t*h},radiansToDegrees:function(t){return t/h},rotatePoint:function(t,e,i){var r=new T.Point(t.x-e.x,t.y-e.y),n=T.util.rotateVector(r,i);return new T.Point(n.x,n.y).addEquals(e)},rotateVector:function(t,e){var i=T.util.sin(e),r=T.util.cos(e);return{x:t.x*r-t.y*i,y:t.x*i+t.y*r}},transformPoint:function(t,e,i){return i?new T.Point(e[0]*t.x+e[2]*t.y,e[1]*t.x+e[3]*t.y):new T.Point(e[0]*t.x+e[2]*t.y+e[4],e[1]*t.x+e[3]*t.y+e[5])},makeBoundingBoxFromPoints:function(t,e){if(e)for(var i=0;ie;)(e+=a[d++%f])>l&&(e=l),t[g?"lineTo":"moveTo"](e,0),g=!g;t.restore()},createCanvasElement:function(){return T.document.createElement("canvas")},copyCanvasElement:function(t){var e=T.util.createCanvasElement();return e.width=t.width,e.height=t.height,e.getContext("2d").drawImage(t,0,0),e},toDataURL:function(t,e,i){return t.toDataURL("image/"+e,i)},createImage:function(){return T.document.createElement("img")},multiplyTransformMatrices:function(t,e,i){return[t[0]*e[0]+t[2]*e[1],t[1]*e[0]+t[3]*e[1],t[0]*e[2]+t[2]*e[3],t[1]*e[2]+t[3]*e[3],i?0:t[0]*e[4]+t[2]*e[5]+t[4],i?0:t[1]*e[4]+t[3]*e[5]+t[5]]},qrDecompose:function(t){var e=s(t[1],t[0]),i=a(t[0],2)+a(t[1],2),r=o(i),n=(t[0]*t[3]-t[2]*t[1])/r,c=s(t[0]*t[2]+t[1]*t[3],i);return{angle:e/h,scaleX:r,scaleY:n,skewX:c/h,skewY:0,translateX:t[4],translateY:t[5]}},calcRotateMatrix:function(t){if(!t.angle)return T.iMatrix.concat();var e=T.util.degreesToRadians(t.angle),i=T.util.cos(e),r=T.util.sin(e);return[i,r,-r,i,0,0]},calcDimensionsMatrix:function(t){var e=void 0===t.scaleX?1:t.scaleX,i=void 0===t.scaleY?1:t.scaleY,r=[t.flipX?-e:e,0,0,t.flipY?-i:i,0,0],n=T.util.multiplyTransformMatrices,o=T.util.degreesToRadians;return t.skewX&&(r=n(r,[1,0,Math.tan(o(t.skewX)),1],!0)),t.skewY&&(r=n(r,[1,Math.tan(o(t.skewY)),0,1],!0)),r},composeMatrix:function(t){var e=[1,0,0,1,t.translateX||0,t.translateY||0],i=T.util.multiplyTransformMatrices;return t.angle&&(e=i(e,T.util.calcRotateMatrix(t))),(1!==t.scaleX||1!==t.scaleY||t.skewX||t.skewY||t.flipX||t.flipY)&&(e=i(e,T.util.calcDimensionsMatrix(t))),e},resetObjectTransform:function(t){t.scaleX=1,t.scaleY=1,t.skewX=0,t.skewY=0,t.flipX=!1,t.flipY=!1,t.rotate(0)},saveObjectTransform:function(t){return{scaleX:t.scaleX,scaleY:t.scaleY,skewX:t.skewX,skewY:t.skewY,angle:t.angle,left:t.left,flipX:t.flipX,flipY:t.flipY,top:t.top}},isTransparent:function(t,e,i,r){r>0&&(e>r?e-=r:e=0,i>r?i-=r:i=0);var n,o=!0,s=t.getImageData(e,i,2*r||1,2*r||1),a=s.data.length;for(n=3;n0?A-=2*c:1===a&&A<0&&(A+=2*c);for(var D=Math.ceil(Math.abs(A/c*2)),M=[],j=A/D,F=8/3*Math.sin(j/4)*Math.sin(j/4)/Math.sin(j/2),L=k+j,I=0;I=n?o-n:2*Math.PI-(n-o)}function s(e,i,r,n,o,s,a,h){var c;if(T.cachesBoundsOfCurve&&(c=t.call(arguments),T.boundsOfCurveCache[c]))return T.boundsOfCurveCache[c];var l,u,f,d,g,p,v,m,y=Math.sqrt,_=Math.min,b=Math.max,x=Math.abs,C=[],w=[[],[]];u=6*e-12*r+6*o,l=-3*e+9*r-9*o+3*a,f=3*r-3*e;for(var S=0;S<2;++S)if(S>0&&(u=6*i-12*n+6*s,l=-3*i+9*n-9*s+3*h,f=3*n-3*i),x(l)<1e-12){if(x(u)<1e-12)continue;0<(d=-f/u)&&d<1&&C.push(d)}else(v=u*u-4*f*l)<0||(0<(g=(-u+(m=y(v)))/(2*l))&&g<1&&C.push(g),0<(p=(-u-m)/(2*l))&&p<1&&C.push(p));for(var O,P,E,k=C.length,A=k;k--;)O=(E=1-(d=C[k]))*E*E*e+3*E*E*d*r+3*E*d*d*o+d*d*d*a,w[0][k]=O,P=E*E*E*i+3*E*E*d*n+3*E*d*d*s+d*d*d*h,w[1][k]=P;w[0][A]=e,w[1][A]=i,w[0][A+1]=a,w[1][A+1]=h;var D=[{x:_.apply(null,w[0]),y:_.apply(null,w[1])},{x:b.apply(null,w[0]),y:b.apply(null,w[1])}];return T.cachesBoundsOfCurve&&(T.boundsOfCurveCache[c]=D),D}function a(t,e,i){for(var r=i[1],o=i[2],s=i[3],a=i[4],h=i[5],c=n(i[6]-t,i[7]-e,r,o,a,h,s),l=0,u=c.length;l1e-4;)i=a(o),n=o,(r=h(c.x,c.y,i.x,i.y))+s>e?o-=l/=2:(c=i,o+=l,s+=r);return i.angle=u(n),i}function p(t){for(var e,i,r,n,o=0,s=t.length,a=0,g=0,p=0,v=0,m=[],y=0;yC)for(var S=1,O=p.length;S2;for(e=e||0,c&&(a=t[2].xt[i-2].x?1:n.x===t[i-2].x?0:-1,h=n.y>t[i-2].y?1:n.y===t[i-2].y?0:-1),r.push(["L",n.x+a*e,n.y+h*e]),r},T.util.getPathSegmentsInfo=p,T.util.getBoundsOfCurve=s,T.util.getPointOnPath=function(t,e,i){i||(i=p(t));for(var r=0;e-i[r].length>0&&r=e}))}}}(),function(){function t(e,i,r){if(r)if(!T.isLikelyNode&&i instanceof Element)e=i;else if(i instanceof Array){e=[];for(var n=0,o=i.length;n57343)return t.charAt(e);if(55296<=i&&i<=56319){if(t.length<=e+1)throw"High surrogate without following low surrogate";var r=t.charCodeAt(e+1);if(56320>r||r>57343)throw"High surrogate without following low surrogate";return t.charAt(e)+t.charAt(e+1)}if(0===e)throw"Low surrogate without preceding high surrogate";var n=t.charCodeAt(e-1);if(55296>n||n>56319)throw"Low surrogate without preceding high surrogate";return!1}T.util.string={camelize:function(t){return t.replace(/-+(.)?/g,(function(t,e){return e?e.toUpperCase():""}))},capitalize:function(t,e){return t.charAt(0).toUpperCase()+(e?t.slice(1):t.slice(1).toLowerCase())},escapeXml:function(t){return t.replace(/&/g,"&").replace(/"/g,""").replace(/'/g,"'").replace(//g,">")},graphemeSplit:function(e){var i,r=0,n=[];for(r=0;r-1?t.prototype[n]=function(t){return function(){var i=this.constructor.superclass;this.constructor.superclass=r;var n=e[t].apply(this,arguments);if(this.constructor.superclass=i,"initialize"!==t)return n}}(n):t.prototype[n]=e[n],i&&(e.toString!==Object.prototype.toString&&(t.prototype.toString=e.toString),e.valueOf!==Object.prototype.valueOf&&(t.prototype.valueOf=e.valueOf))};function n(){}function o(e){for(var i=null,r=this;r.constructor.superclass;){var n=r.constructor.superclass.prototype[e];if(r[e]!==n){i=n;break}r=r.constructor.superclass.prototype}return i?arguments.length>1?i.apply(this,t.call(arguments,1)):i.call(this):console.log("tried to callSuper "+e+", method not found in prototype chain",this)}T.util.createClass=function(){var i=null,s=t.call(arguments,0);function a(){this.initialize.apply(this,arguments)}"function"==typeof s[0]&&(i=s.shift()),a.superclass=i,a.subclasses=[],i&&(n.prototype=i.prototype,a.prototype=new n,i.subclasses.push(a));for(var h=0,c=s.length;h-1||"touch"===t.pointerType},f=T.document.createElement("div"),d="string"==typeof f.style.opacity,g="string"==typeof f.style.filter,v=/alpha\s*\(\s*opacity\s*=\s*([^\)]+)\)/,m=function(t){return t},d?m=function(t,e){return t.style.opacity=e,t}:g&&(m=function(t,e){var i=t.style;return t.currentStyle&&!t.currentStyle.hasLayout&&(i.zoom=1),v.test(i.filter)?(e=e>=.9999?"":"alpha(opacity="+100*e+")",i.filter=i.filter.replace(v,e)):i.filter+=" alpha(opacity="+100*e+")",t}),T.util.setStyle=function(t,e){var i=t.style;if(!i)return t;if("string"==typeof e)return t.style.cssText+=";"+e,e.indexOf("opacity")>-1?m(t,e.match(/opacity:\s*(\d?\.?\d*)/)[1]):t;for(var r in e)"opacity"===r?m(t,e[r]):i["float"===r||"cssFloat"===r?void 0===i.styleFloat?"cssFloat":"styleFloat":r]=e[r];return t},function(){var t=Array.prototype.slice;var e,i,r,n,o=function(e){return t.call(e,0)};try{e=o(T.document.childNodes)instanceof Array}catch(t){}function s(t,e){var i=T.document.createElement(t);for(var r in e)"class"===r?i.className=e[r]:"for"===r?i.htmlFor=e[r]:i.setAttribute(r,e[r]);return i}function a(t){for(var e=0,i=0,r=T.document.documentElement,n=T.document.body||{scrollLeft:0,scrollTop:0};t&&(t.parentNode||t.host)&&((t=t.parentNode||t.host)===T.document?(e=n.scrollLeft||r.scrollLeft||0,i=n.scrollTop||r.scrollTop||0):(e+=t.scrollLeft||0,i+=t.scrollTop||0),1!==t.nodeType||"fixed"!==t.style.position););return{left:e,top:i}}e||(o=function(t){for(var e=new Array(t.length),i=t.length;i--;)e[i]=t[i];return e}),i=T.document.defaultView&&T.document.defaultView.getComputedStyle?function(t,e){var i=T.document.defaultView.getComputedStyle(t,null);return i?i[e]:void 0}:function(t,e){var i=t.style[e];return!i&&t.currentStyle&&(i=t.currentStyle[e]),i},r=T.document.documentElement.style,n="userSelect"in r?"userSelect":"MozUserSelect"in r?"MozUserSelect":"WebkitUserSelect"in r?"WebkitUserSelect":"KhtmlUserSelect"in r?"KhtmlUserSelect":"",T.util.makeElementUnselectable=function(t){return void 0!==t.onselectstart&&(t.onselectstart=T.util.falseFunction),n?t.style[n]="none":"string"==typeof t.unselectable&&(t.unselectable="on"),t},T.util.makeElementSelectable=function(t){return void 0!==t.onselectstart&&(t.onselectstart=null),n?t.style[n]="":"string"==typeof t.unselectable&&(t.unselectable=""),t},T.util.setImageSmoothing=function(t,e){t.imageSmoothingEnabled=t.imageSmoothingEnabled||t.webkitImageSmoothingEnabled||t.mozImageSmoothingEnabled||t.msImageSmoothingEnabled||t.oImageSmoothingEnabled,t.imageSmoothingEnabled=e},T.util.getById=function(t){return"string"==typeof t?T.document.getElementById(t):t},T.util.toArray=o,T.util.addClass=function(t,e){t&&-1===(" "+t.className+" ").indexOf(" "+e+" ")&&(t.className+=(t.className?" ":"")+e)},T.util.makeElement=s,T.util.wrapElement=function(t,e,i){return"string"==typeof e&&(e=s(e,i)),t.parentNode&&t.parentNode.replaceChild(e,t),e.appendChild(t),e},T.util.getScrollLeftTop=a,T.util.getElementOffset=function(t){var e,r,n=t&&t.ownerDocument,o={left:0,top:0},s={left:0,top:0},h={borderLeftWidth:"left",borderTopWidth:"top",paddingLeft:"left",paddingTop:"top"};if(!n)return s;for(var c in h)s[h[c]]+=parseInt(i(t,c),10)||0;return e=n.documentElement,void 0!==t.getBoundingClientRect&&(o=t.getBoundingClientRect()),r=a(t),{left:o.left+r.left-(e.clientLeft||0)+s.left,top:o.top+r.top-(e.clientTop||0)+s.top}},T.util.getNodeCanvas=function(t){var e=T.jsdomImplForWrapper(t);return e._canvas||e._image},T.util.cleanUpJsdomNode=function(t){if(T.isLikelyNode){var e=T.jsdomImplForWrapper(t);e&&(e._image=null,e._canvas=null,e._currentSrc=null,e._attributes=null,e._classList=null)}}}(),function(){function t(){}T.util.request=function(e,i){i||(i={});var r=i.method?i.method.toUpperCase():"GET",n=i.onComplete||function(){},o=new T.window.XMLHttpRequest,s=i.body||i.parameters;return o.onreadystatechange=function(){4===o.readyState&&(n(o),o.onreadystatechange=t)},"GET"===r&&(s=null,"string"==typeof i.parameters&&(e=function(t,e){return t+(/\?/.test(t)?"&":"?")+e}(e,i.parameters))),o.open(r,e,!0),"POST"!==r&&"PUT"!==r||o.setRequestHeader("Content-Type","application/x-www-form-urlencoded"),o.send(s),o}}(),T.log=console.log,T.warn=console.warn,function(){function t(){return!1}function e(t,e,i,r){return-i*Math.cos(t/r*(Math.PI/2))+i+e}var i=T.window.requestAnimationFrame||T.window.webkitRequestAnimationFrame||T.window.mozRequestAnimationFrame||T.window.oRequestAnimationFrame||T.window.msRequestAnimationFrame||function(t){return T.window.setTimeout(t,1e3/60)},r=T.window.cancelAnimationFrame||T.window.clearTimeout;function n(){return i.apply(T.window,arguments)}T.util.animate=function(i){var r=!1;return n((function(o){i||(i={});var s,a=o||+new Date,h=i.duration||500,c=a+h,l=i.onChange||t,u=i.abort||t,f=i.onComplete||t,d=i.easing||e,g="startValue"in i?i.startValue:0,p="endValue"in i?i.endValue:100,v=i.byValue||p-g;i.onStart&&i.onStart(),function t(e){var i=(s=e||+new Date)>c?h:s-a,o=i/h,m=d(i,g,v,h),y=Math.abs((m-g)/v);if(!r){if(!u(m,y,o))return s>c?(l(p,1,1),void f(p,1,1)):(l(m,y,o),void n(t));f(p,1,1)}}(a)})),function(){r=!0}},T.util.requestAnimFrame=n,T.util.cancelAnimFrame=function(){return r.apply(T.window,arguments)}}(),function(){function t(t,e,i){var r="rgba("+parseInt(t[0]+i*(e[0]-t[0]),10)+","+parseInt(t[1]+i*(e[1]-t[1]),10)+","+parseInt(t[2]+i*(e[2]-t[2]),10);return r+=","+(t&&e?parseFloat(t[3]+i*(e[3]-t[3])):1),r+=")"}T.util.animateColor=function(e,i,r,n){var o=new T.Color(e).getSource(),s=new T.Color(i).getSource(),a=n.onComplete,h=n.onChange;return n=n||{},T.util.animate(T.util.object.extend(n,{duration:r||500,startValue:o,endValue:s,byValue:s,easing:function(e,i,r,o){return t(i,r,n.colorEasing?n.colorEasing(e,o):1-Math.cos(e/o*(Math.PI/2)))},onComplete:function(e,i,r){if(a)return a(t(s,s,0),i,r)},onChange:function(e,i,r){if(h){if(Array.isArray(e))return h(t(e,e,0),i,r);h(e,i,r)}}}))}}(),function(){function t(t,e,i,r){return t-1&&l>-1&&l-1)&&(i="stroke")}else{if("href"===t||"xlink:href"===t||"font"===t)return i;if("imageSmoothing"===t)return"optimizeQuality"===i;a=h?i.map(o):o(i,n)}}else i="";return!h&&isNaN(a)?i:a}function d(t){return new RegExp("^("+t.join("|")+")\\b","i")}function g(t,e){var i,r,n,o,s=[];for(n=0,o=e.length;n1;)h.shift(),c=e.util.multiplyTransformMatrices(c,h[0]);return c}}();var y=new RegExp("^\\s*("+e.reNum+"+)\\s*,?\\s*("+e.reNum+"+)\\s*,?\\s*("+e.reNum+"+)\\s*,?\\s*("+e.reNum+"+)\\s*$");function _(t){if(!e.svgViewBoxElementsRegEx.test(t.nodeName))return{};var i,r,n,s,a,h,c=t.getAttribute("viewBox"),l=1,u=1,f=t.getAttribute("width"),d=t.getAttribute("height"),g=t.getAttribute("x")||0,p=t.getAttribute("y")||0,v=t.getAttribute("preserveAspectRatio")||"",m=!c||!(c=c.match(y)),_=!f||!d||"100%"===f||"100%"===d,b=m&&_,x={},C="",w=0,S=0;if(x.width=0,x.height=0,x.toBeParsed=b,m&&(g||p)&&t.parentNode&&"#document"!==t.parentNode.nodeName&&(C=" translate("+o(g)+" "+o(p)+") ",a=(t.getAttribute("transform")||"")+C,t.setAttribute("transform",a),t.removeAttribute("x"),t.removeAttribute("y")),b)return x;if(m)return x.width=o(f),x.height=o(d),x;if(i=-parseFloat(c[1]),r=-parseFloat(c[2]),n=parseFloat(c[3]),s=parseFloat(c[4]),x.minX=i,x.minY=r,x.viewBoxWidth=n,x.viewBoxHeight=s,_?(x.width=n,x.height=s):(x.width=o(f),x.height=o(d),l=x.width/n,u=x.height/s),"none"!==(v=e.util.parsePreserveAspectRatioAttribute(v)).alignX&&("meet"===v.meetOrSlice&&(u=l=l>u?u:l),"slice"===v.meetOrSlice&&(u=l=l>u?l:u),w=x.width-n*l,S=x.height-s*l,"Mid"===v.alignX&&(w/=2),"Mid"===v.alignY&&(S/=2),"Min"===v.alignX&&(w=0),"Min"===v.alignY&&(S=0)),1===l&&1===u&&0===i&&0===r&&0===g&&0===p)return x;if((g||p)&&"#document"!==t.parentNode.nodeName&&(C=" translate("+o(g)+" "+o(p)+") "),a=C+" matrix("+l+" 0 0 "+u+" "+(i*l+w)+" "+(r*u+S)+") ","svg"===t.nodeName){for(h=t.ownerDocument.createElementNS(e.svgNS,"g");t.firstChild;)h.appendChild(t.firstChild);t.appendChild(h)}else(h=t).removeAttribute("x"),h.removeAttribute("y"),a=h.getAttribute("transform")+a;return h.setAttribute("transform",a),x}function b(t,e){var i="xlink:href",r=m(t,e.getAttribute(i).substr(1));if(r&&r.getAttribute(i)&&b(t,r),["gradientTransform","x1","x2","y1","y2","gradientUnits","cx","cy","r","fx","fy"].forEach((function(t){r&&!e.hasAttribute(t)&&r.hasAttribute(t)&&e.setAttribute(t,r.getAttribute(t))})),!e.children.length)for(var n=r.cloneNode(!0);n.firstChild;)e.appendChild(n.firstChild);e.removeAttribute(i)}e.parseSVGDocument=function(t,i,n,o){if(t){!function(t){for(var i=g(t,["use","svg:use"]),r=0;i.length&&rt.x&&this.y>t.y},gte:function(t){return this.x>=t.x&&this.y>=t.y},lerp:function(t,e){return void 0===e&&(e=.5),e=Math.max(Math.min(1,e),0),new i(this.x+(t.x-this.x)*e,this.y+(t.y-this.y)*e)},distanceFrom:function(t){var e=this.x-t.x,i=this.y-t.y;return Math.sqrt(e*e+i*i)},midPointFrom:function(t){return this.lerp(t)},min:function(t){return new i(Math.min(this.x,t.x),Math.min(this.y,t.y))},max:function(t){return new i(Math.max(this.x,t.x),Math.max(this.y,t.y))},toString:function(){return this.x+","+this.y},setXY:function(t,e){return this.x=t,this.y=e,this},setX:function(t){return this.x=t,this},setY:function(t){return this.y=t,this},setFromPoint:function(t){return this.x=t.x,this.y=t.y,this},swap:function(t){var e=this.x,i=this.y;this.x=t.x,this.y=t.y,t.x=e,t.y=i},clone:function(){return new i(this.x,this.y)}})}(t),function(t){var e=t.fabric||(t.fabric={});function i(t){this.status=t,this.points=[]}e.Intersection?e.warn("fabric.Intersection is already defined"):(e.Intersection=i,e.Intersection.prototype={constructor:i,appendPoint:function(t){return this.points.push(t),this},appendPoints:function(t){return this.points=this.points.concat(t),this}},e.Intersection.intersectLineLine=function(t,r,n,o){var s,a=(o.x-n.x)*(t.y-n.y)-(o.y-n.y)*(t.x-n.x),h=(r.x-t.x)*(t.y-n.y)-(r.y-t.y)*(t.x-n.x),c=(o.y-n.y)*(r.x-t.x)-(o.x-n.x)*(r.y-t.y);if(0!==c){var l=a/c,u=h/c;0<=l&&l<=1&&0<=u&&u<=1?(s=new i("Intersection")).appendPoint(new e.Point(t.x+l*(r.x-t.x),t.y+l*(r.y-t.y))):s=new i}else s=new i(0===a||0===h?"Coincident":"Parallel");return s},e.Intersection.intersectLinePolygon=function(t,e,r){var n,o,s,a,h=new i,c=r.length;for(a=0;a0&&(h.status="Intersection"),h},e.Intersection.intersectPolygonPolygon=function(t,e){var r,n=new i,o=t.length;for(r=0;r0&&(n.status="Intersection"),n},e.Intersection.intersectPolygonRectangle=function(t,r,n){var o=r.min(n),s=r.max(n),a=new e.Point(s.x,o.y),h=new e.Point(o.x,s.y),c=i.intersectLinePolygon(o,a,t),l=i.intersectLinePolygon(a,s,t),u=i.intersectLinePolygon(s,h,t),f=i.intersectLinePolygon(h,o,t),d=new i;return d.appendPoints(c.points),d.appendPoints(l.points),d.appendPoints(u.points),d.appendPoints(f.points),d.points.length>0&&(d.status="Intersection"),d})}(t),function(t){var e=t.fabric||(t.fabric={});function i(t){t?this._tryParsingColor(t):this.setSource([0,0,0,1])}function r(t,e,i){return i<0&&(i+=1),i>1&&(i-=1),i<1/6?t+6*(e-t)*i:i<.5?e:i<2/3?t+(e-t)*(2/3-i)*6:t}e.Color?e.warn("fabric.Color is already defined."):(e.Color=i,e.Color.prototype={_tryParsingColor:function(t){var e;t in i.colorNameMap&&(t=i.colorNameMap[t]),"transparent"===t&&(e=[255,255,255,0]),e||(e=i.sourceFromHex(t)),e||(e=i.sourceFromRgb(t)),e||(e=i.sourceFromHsl(t)),e||(e=[0,0,0,1]),e&&this.setSource(e)},_rgbToHsl:function(t,i,r){t/=255,i/=255,r/=255;var n,o,s,a=e.util.array.max([t,i,r]),h=e.util.array.min([t,i,r]);if(s=(a+h)/2,a===h)n=o=0;else{var c=a-h;switch(o=s>.5?c/(2-a-h):c/(a+h),a){case t:n=(i-r)/c+(i0)-(t<0)||+t};function d(t,e){var i=t.angle+u(Math.atan2(e.y,e.x))+360;return Math.round(i%360/45)}function g(t,i){var r=i.transform.target,n=r.canvas,o=e.util.object.clone(i);o.target=r,n&&n.fire("object:"+t,o),r.fire(t,i)}function p(t,e){var i=e.canvas,r=t[i.uniScaleKey];return i.uniformScaling&&!r||!i.uniformScaling&&r}function v(t){return t.originX===c&&t.originY===c}function m(t,e,i){var r=t.lockScalingX,n=t.lockScalingY;return!(!r||!n)||(!(e||!r&&!n||!i)||(!(!r||"x"!==e)||!(!n||"y"!==e)))}function y(t,e,i,r){return{e:t,transform:e,pointer:{x:i,y:r}}}function _(t){return function(e,i,r,n){var o=i.target,s=o.getCenterPoint(),a=o.translateToOriginPoint(s,i.originX,i.originY),h=t(e,i,r,n);return o.setPositionByOrigin(a,i.originX,i.originY),h}}function b(t,e){return function(i,r,n,o){var s=e(i,r,n,o);return s&&g(t,y(i,r,n,o)),s}}function x(t,i,r,n,o){var s=t.target,a=s.controls[t.corner],h=s.canvas.getZoom(),c=s.padding/h,l=s.toLocalPoint(new e.Point(n,o),i,r);return l.x>=c&&(l.x-=c),l.x<=-c&&(l.x+=c),l.y>=c&&(l.y-=c),l.y<=c&&(l.y+=c),l.x-=a.offsetX,l.y-=a.offsetY,l}function C(t){return t.flipX!==t.flipY}function w(t,e,i,r,n){if(0!==t[e]){var o=n/t._getTransformedDimensions()[r]*t[i];t.set(i,o)}}function S(t,e,i,r){var n,c=e.target,l=c._getTransformedDimensions(0,c.skewY),f=x(e,e.originX,e.originY,i,r),d=Math.abs(2*f.x)-l.x,g=c.skewX;d<2?n=0:(n=u(Math.atan2(d/c.scaleX,l.y/c.scaleY)),e.originX===o&&e.originY===h&&(n=-n),e.originX===a&&e.originY===s&&(n=-n),C(c)&&(n=-n));var p=g!==n;if(p){var v=c._getTransformedDimensions().y;c.set("skewX",n),w(c,"skewY","scaleY","y",v)}return p}function T(t,e,i,r){var n,c=e.target,l=c._getTransformedDimensions(c.skewX,0),f=x(e,e.originX,e.originY,i,r),d=Math.abs(2*f.y)-l.y,g=c.skewY;d<2?n=0:(n=u(Math.atan2(d/c.scaleY,l.x/c.scaleX)),e.originX===o&&e.originY===h&&(n=-n),e.originX===a&&e.originY===s&&(n=-n),C(c)&&(n=-n));var p=g!==n;if(p){var v=c._getTransformedDimensions().x;c.set("skewY",n),w(c,"skewX","scaleX","x",v)}return p}function O(t,e,i,r,n){n=n||{};var o,s,a,h,c,u,d=e.target,g=d.lockScalingX,y=d.lockScalingY,_=n.by,b=p(t,d),C=m(d,_,b),w=e.gestureScale;if(C)return!1;if(w)s=e.scaleX*w,a=e.scaleY*w;else{if(o=x(e,e.originX,e.originY,i,r),c="y"!==_?f(o.x):1,u="x"!==_?f(o.y):1,e.signX||(e.signX=c),e.signY||(e.signY=u),d.lockScalingFlip&&(e.signX!==c||e.signY!==u))return!1;if(h=d._getTransformedDimensions(),b&&!_){var S=Math.abs(o.x)+Math.abs(o.y),T=e.original,O=S/(Math.abs(h.x*T.scaleX/d.scaleX)+Math.abs(h.y*T.scaleY/d.scaleY));s=T.scaleX*O,a=T.scaleY*O}else s=Math.abs(o.x*d.scaleX/h.x),a=Math.abs(o.y*d.scaleY/h.y);v(e)&&(s*=2,a*=2),e.signX!==c&&"y"!==_&&(e.originX=l[e.originX],s*=-1,e.signX=c),e.signY!==u&&"x"!==_&&(e.originY=l[e.originY],a*=-1,e.signY=u)}var P=d.scaleX,E=d.scaleY;return _?("x"===_&&d.set("scaleX",s),"y"===_&&d.set("scaleY",a)):(!g&&d.set("scaleX",s),!y&&d.set("scaleY",a)),P!==d.scaleX||E!==d.scaleY}n.scaleCursorStyleHandler=function(t,e,r){var n=p(t,r),o="";if(0!==e.x&&0===e.y?o="x":0===e.x&&0!==e.y&&(o="y"),m(r,o,n))return"not-allowed";var s=d(r,e);return i[s]+"-resize"},n.skewCursorStyleHandler=function(t,e,i){var n="not-allowed";if(0!==e.x&&i.lockSkewingY)return n;if(0!==e.y&&i.lockSkewingX)return n;var o=d(i,e)%4;return r[o]+"-resize"},n.scaleSkewCursorStyleHandler=function(t,e,i){return t[i.canvas.altActionKey]?n.skewCursorStyleHandler(t,e,i):n.scaleCursorStyleHandler(t,e,i)},n.rotationWithSnapping=b("rotating",_((function(t,e,i,r){var n=e,o=n.target,s=o.translateToOriginPoint(o.getCenterPoint(),n.originX,n.originY);if(o.lockRotation)return!1;var a,h=Math.atan2(n.ey-s.y,n.ex-s.x),c=Math.atan2(r-s.y,i-s.x),l=u(c-h+n.theta);if(o.snapAngle>0){var f=o.snapAngle,d=o.snapThreshold||f,g=Math.ceil(l/f)*f,p=Math.floor(l/f)*f;Math.abs(l-p)0?o:a:(l>0&&(n=u===s?o:a),l<0&&(n=u===s?a:o),C(h)&&(n=n===o?a:o)),e.originX=n,b("skewing",_(S))(t,e,i,r))},n.skewHandlerY=function(t,e,i,r){var n,a=e.target,l=a.skewY,u=e.originX;return!a.lockSkewingY&&(0===l?n=x(e,c,c,i,r).y>0?s:h:(l>0&&(n=u===o?s:h),l<0&&(n=u===o?h:s),C(a)&&(n=n===s?h:s)),e.originY=n,b("skewing",_(T))(t,e,i,r))},n.dragHandler=function(t,e,i,r){var n=e.target,o=i-e.offsetX,s=r-e.offsetY,a=!n.get("lockMovementX")&&n.left!==o,h=!n.get("lockMovementY")&&n.top!==s;return a&&n.set("left",o),h&&n.set("top",s),(a||h)&&g("moving",y(t,e,i,r)),a||h},n.scaleOrSkewActionName=function(t,e,i){var r=t[i.canvas.altActionKey];return 0===e.x?r?"skewX":"scaleY":0===e.y?r?"skewY":"scaleX":void 0},n.rotationStyleHandler=function(t,e,i){return i.lockRotation?"not-allowed":e.cursorStyle},n.fireEvent=g,n.wrapWithFixedAnchor=_,n.wrapWithFireEvent=b,n.getLocalPoint=x,e.controlsUtils=n}(t),function(t){var e=t.fabric||(t.fabric={}),i=e.util.degreesToRadians,r=e.controlsUtils;r.renderCircleControl=function(t,e,i,r,n){r=r||{};var o,s=this.sizeX||r.cornerSize||n.cornerSize,a=this.sizeY||r.cornerSize||n.cornerSize,h=void 0!==r.transparentCorners?r.transparentCorners:n.transparentCorners,c=h?"stroke":"fill",l=!h&&(r.cornerStrokeColor||n.cornerStrokeColor),u=e,f=i;t.save(),t.fillStyle=r.cornerColor||n.cornerColor,t.strokeStyle=r.cornerStrokeColor||n.cornerStrokeColor,s>a?(o=s,t.scale(1,a/s),f=i*s/a):a>s?(o=a,t.scale(s/a,1),u=e*a/s):o=s,t.lineWidth=1,t.beginPath(),t.arc(u,f,o/2,0,2*Math.PI,!1),t[c](),l&&t.stroke(),t.restore()},r.renderSquareControl=function(t,e,r,n,o){n=n||{};var s=this.sizeX||n.cornerSize||o.cornerSize,a=this.sizeY||n.cornerSize||o.cornerSize,h=void 0!==n.transparentCorners?n.transparentCorners:o.transparentCorners,c=h?"stroke":"fill",l=!h&&(n.cornerStrokeColor||o.cornerStrokeColor),u=s/2,f=a/2;t.save(),t.fillStyle=n.cornerColor||o.cornerColor,t.strokeStyle=n.cornerStrokeColor||o.cornerStrokeColor,t.lineWidth=1,t.translate(e,r),t.rotate(i(o.angle)),t[c+"Rect"](-u,-f,s,a),l&&t.strokeRect(-u,-f,s,a),t.restore()}}(t),function(t){var e=t.fabric||(t.fabric={});e.Control=function(t){for(var e in t)this[e]=t[e]},e.Control.prototype={visible:!0,actionName:"scale",angle:0,x:0,y:0,offsetX:0,offsetY:0,sizeX:null,sizeY:null,touchSizeX:null,touchSizeY:null,cursorStyle:"crosshair",withConnection:!1,actionHandler:function(){},mouseDownHandler:function(){},mouseUpHandler:function(){},getActionHandler:function(){return this.actionHandler},getMouseDownHandler:function(){return this.mouseDownHandler},getMouseUpHandler:function(){return this.mouseUpHandler},cursorStyleHandler:function(t,e){return e.cursorStyle},getActionName:function(t,e){return e.actionName},getVisibility:function(t,e){var i=t._controlsVisibility;return i&&void 0!==i[e]?i[e]:this.visible},setVisibility:function(t){this.visible=t},positionHandler:function(t,i){return e.util.transformPoint({x:this.x*t.x+this.offsetX,y:this.y*t.y+this.offsetY},i)},calcCornerCoords:function(t,i,r,n,o){var s,a,h,c,l=o?this.touchSizeX:this.sizeX,u=o?this.touchSizeY:this.sizeY;if(l&&u&&l!==u){var f=Math.atan2(u,l),d=Math.sqrt(l*l+u*u)/2,g=f-e.util.degreesToRadians(t),p=Math.PI/2-f-e.util.degreesToRadians(t);s=d*e.util.cos(g),a=d*e.util.sin(g),h=d*e.util.cos(p),c=d*e.util.sin(p)}else{d=.7071067812*(l&&u?l:i);g=e.util.degreesToRadians(45-t);s=h=d*e.util.cos(g),a=c=d*e.util.sin(g)}return{tl:{x:r-c,y:n-h},tr:{x:r+s,y:n-a},bl:{x:r-s,y:n+a},br:{x:r+c,y:n+h}}},render:function(t,i,r,n,o){if("circle"===((n=n||{}).cornerStyle||o.cornerStyle))e.controlsUtils.renderCircleControl.call(this,t,i,r,n,o);else e.controlsUtils.renderSquareControl.call(this,t,i,r,n,o)}}}(t),function(){function t(t,e){var i,r,n,o,s=t.getAttribute("style"),a=t.getAttribute("offset")||0;if(a=(a=parseFloat(a)/(/%$/.test(a)?100:1))<0?0:a>1?1:a,s){var h=s.split(/\s*;\s*/);for(""===h[h.length-1]&&h.pop(),o=h.length;o--;){var c=h[o].split(/\s*:\s*/),l=c[0].trim(),u=c[1].trim();"stop-color"===l?i=u:"stop-opacity"===l&&(n=u)}}return i||(i=t.getAttribute("stop-color")||"rgb(0,0,0)"),n||(n=t.getAttribute("stop-opacity")),r=(i=new T.Color(i)).getAlpha(),n=isNaN(parseFloat(n))?1:parseFloat(n),n*=r*e,{offset:a,color:i.toRgb(),opacity:n}}var e=T.util.object.clone;T.Gradient=T.util.createClass({offsetX:0,offsetY:0,gradientTransform:null,gradientUnits:"pixels",type:"linear",initialize:function(t){t||(t={}),t.coords||(t.coords={});var e,i=this;Object.keys(t).forEach((function(e){i[e]=t[e]})),this.id?this.id+="_"+T.Object.__uid++:this.id=T.Object.__uid++,e={x1:t.coords.x1||0,y1:t.coords.y1||0,x2:t.coords.x2||0,y2:t.coords.y2||0},"radial"===this.type&&(e.r1=t.coords.r1||0,e.r2=t.coords.r2||0),this.coords=e,this.colorStops=t.colorStops.slice()},addColorStop:function(t){for(var e in t){var i=new T.Color(t[e]);this.colorStops.push({offset:parseFloat(e),color:i.toRgb(),opacity:i.getAlpha()})}return this},toObject:function(t){var e={type:this.type,coords:this.coords,colorStops:this.colorStops,offsetX:this.offsetX,offsetY:this.offsetY,gradientUnits:this.gradientUnits,gradientTransform:this.gradientTransform?this.gradientTransform.concat():this.gradientTransform};return T.util.populateWithProperties(this,e,t),e},toSVG:function(t,i){var r,n,o,s,a=e(this.coords,!0),h=(i=i||{},e(this.colorStops,!0)),c=a.r1>a.r2,l=this.gradientTransform?this.gradientTransform.concat():T.iMatrix.concat(),u=-this.offsetX,f=-this.offsetY,d=!!i.additionalTransform,g="pixels"===this.gradientUnits?"userSpaceOnUse":"objectBoundingBox";if(h.sort((function(t,e){return t.offset-e.offset})),"objectBoundingBox"===g?(u/=t.width,f/=t.height):(u+=t.width/2,f+=t.height/2),"path"===t.type&&"percentage"!==this.gradientUnits&&(u-=t.pathOffset.x,f-=t.pathOffset.y),l[4]-=u,l[5]-=f,s='id="SVGID_'+this.id+'" gradientUnits="'+g+'"',s+=' gradientTransform="'+(d?i.additionalTransform+" ":"")+T.util.matrixToSVG(l)+'" ',"linear"===this.type?o=["\n']:"radial"===this.type&&(o=["\n']),"radial"===this.type){if(c)for((h=h.concat()).reverse(),r=0,n=h.length;r0){var v=p/Math.max(a.r1,a.r2);for(r=0,n=h.length;r\n')}return o.push("linear"===this.type?"\n":"\n"),o.join("")},toLive:function(t){var e,i,r,n=T.util.object.clone(this.coords);if(this.type){for("linear"===this.type?e=t.createLinearGradient(n.x1,n.y1,n.x2,n.y2):"radial"===this.type&&(e=t.createRadialGradient(n.x1,n.y1,n.r1,n.x2,n.y2,n.r2)),i=0,r=this.colorStops.length;i1?1:o,isNaN(o)&&(o=1);var s,a,h,c,l=e.getElementsByTagName("stop"),u="userSpaceOnUse"===e.getAttribute("gradientUnits")?"pixels":"percentage",f=e.getAttribute("gradientTransform")||"",d=[],g=0,p=0;for("linearGradient"===e.nodeName||"LINEARGRADIENT"===e.nodeName?(s="linear",a=function(t){return{x1:t.getAttribute("x1")||0,y1:t.getAttribute("y1")||0,x2:t.getAttribute("x2")||"100%",y2:t.getAttribute("y2")||0}}(e)):(s="radial",a=function(t){return{x1:t.getAttribute("fx")||t.getAttribute("cx")||"50%",y1:t.getAttribute("fy")||t.getAttribute("cy")||"50%",r1:0,x2:t.getAttribute("cx")||"50%",y2:t.getAttribute("cy")||"50%",r2:t.getAttribute("r")||"50%"}}(e)),h=l.length;h--;)d.push(t(l[h],o));return c=T.parseTransformAttribute(f),function(t,e,i,r){var n,o;Object.keys(e).forEach((function(t){"Infinity"===(n=e[t])?o=1:"-Infinity"===n?o=0:(o=parseFloat(e[t],10),"string"==typeof n&&/^(\d+\.\d+)%|(\d+)%$/.test(n)&&(o*=.01,"pixels"===r&&("x1"!==t&&"x2"!==t&&"r2"!==t||(o*=i.viewBoxWidth||i.width),"y1"!==t&&"y2"!==t||(o*=i.viewBoxHeight||i.height)))),e[t]=o}))}(0,a,n,u),"pixels"===u&&(g=-i.left,p=-i.top),new T.Gradient({id:e.getAttribute("id"),type:s,coords:a,colorStops:d,gradientUnits:u,gradientTransform:c,offsetX:g,offsetY:p})}})}(),_=T.util.toFixed,T.Pattern=T.util.createClass({repeat:"repeat",offsetX:0,offsetY:0,crossOrigin:"",patternTransform:null,initialize:function(t,e){if(t||(t={}),this.id=T.Object.__uid++,this.setOptions(t),!t.source||t.source&&"string"!=typeof t.source)e&&e(this);else{var i=this;this.source=T.util.createImage(),T.util.loadImage(t.source,(function(t,r){i.source=t,e&&e(i,r)}),null,this.crossOrigin)}},toObject:function(t){var e,i,r=T.Object.NUM_FRACTION_DIGITS;return"string"==typeof this.source.src?e=this.source.src:"object"==typeof this.source&&this.source.toDataURL&&(e=this.source.toDataURL()),i={type:"pattern",source:e,repeat:this.repeat,crossOrigin:this.crossOrigin,offsetX:_(this.offsetX,r),offsetY:_(this.offsetY,r),patternTransform:this.patternTransform?this.patternTransform.concat():null},T.util.populateWithProperties(this,i,t),i},toSVG:function(t){var e="function"==typeof this.source?this.source():this.source,i=e.width/t.width,r=e.height/t.height,n=this.offsetX/t.width,o=this.offsetY/t.height,s="";return"repeat-x"!==this.repeat&&"no-repeat"!==this.repeat||(r=1,o&&(r+=Math.abs(o))),"repeat-y"!==this.repeat&&"no-repeat"!==this.repeat||(i=1,n&&(i+=Math.abs(n))),e.src?s=e.src:e.toDataURL&&(s=e.toDataURL()),'\n\n\n'},setOptions:function(t){for(var e in t)this[e]=t[e]},toLive:function(t){var e=this.source;if(!e)return"";if(void 0!==e.src){if(!e.complete)return"";if(0===e.naturalWidth||0===e.naturalHeight)return""}return t.createPattern(e,this.repeat)}}),function(t){var e=t.fabric||(t.fabric={}),i=e.util.toFixed;e.Shadow?e.warn("fabric.Shadow is already defined."):(e.Shadow=e.util.createClass({color:"rgb(0,0,0)",blur:0,offsetX:0,offsetY:0,affectStroke:!1,includeDefaultValues:!0,nonScaling:!1,initialize:function(t){for(var i in"string"==typeof t&&(t=this._parseShadow(t)),t)this[i]=t[i];this.id=e.Object.__uid++},_parseShadow:function(t){var i=t.trim(),r=e.Shadow.reOffsetsAndBlur.exec(i)||[];return{color:(i.replace(e.Shadow.reOffsetsAndBlur,"")||"rgb(0,0,0)").trim(),offsetX:parseFloat(r[1],10)||0,offsetY:parseFloat(r[2],10)||0,blur:parseFloat(r[3],10)||0}},toString:function(){return[this.offsetX,this.offsetY,this.blur,this.color].join("px ")},toSVG:function(t){var r=40,n=40,o=e.Object.NUM_FRACTION_DIGITS,s=e.util.rotateVector({x:this.offsetX,y:this.offsetY},e.util.degreesToRadians(-t.angle)),a=new e.Color(this.color);return t.width&&t.height&&(r=100*i((Math.abs(s.x)+this.blur)/t.width,o)+20,n=100*i((Math.abs(s.y)+this.blur)/t.height,o)+20),t.flipX&&(s.x*=-1),t.flipY&&(s.y*=-1),'\n\t\n\t\n\t\n\t\n\t\n\t\t\n\t\t\n\t\n\n'},toObject:function(){if(this.includeDefaultValues)return{color:this.color,blur:this.blur,offsetX:this.offsetX,offsetY:this.offsetY,affectStroke:this.affectStroke,nonScaling:this.nonScaling};var t={},i=e.Shadow.prototype;return["color","blur","offsetX","offsetY","affectStroke","nonScaling"].forEach((function(e){this[e]!==i[e]&&(t[e]=this[e])}),this),t}}),e.Shadow.reOffsetsAndBlur=/(?:\s|^)(-?\d+(?:\.\d*)?(?:px)?(?:\s?|$))?(-?\d+(?:\.\d*)?(?:px)?(?:\s?|$))?(\d+(?:\.\d*)?(?:px)?)?(?:\s?|$)(?:$|\s)/)}(t),function(){if(T.StaticCanvas)T.warn("fabric.StaticCanvas is already defined.");else{var t=T.util.object.extend,e=T.util.getElementOffset,i=T.util.removeFromArray,r=T.util.toFixed,n=T.util.transformPoint,o=T.util.invertTransform,s=T.util.getNodeCanvas,a=T.util.createCanvasElement,h=new Error("Could not initialize `canvas` element");T.StaticCanvas=T.util.createClass(T.CommonMethods,{initialize:function(t,e){e||(e={}),this.renderAndResetBound=this.renderAndReset.bind(this),this.requestRenderAllBound=this.requestRenderAll.bind(this),this._initStatic(t,e)},backgroundColor:"",backgroundImage:null,overlayColor:"",overlayImage:null,includeDefaultValues:!0,stateful:!1,renderOnAddRemove:!0,controlsAboveOverlay:!1,allowTouchScrolling:!1,imageSmoothingEnabled:!0,viewportTransform:T.iMatrix.concat(),backgroundVpt:!0,overlayVpt:!0,enableRetinaScaling:!0,vptCoords:{},skipOffscreen:!0,clipPath:void 0,_initStatic:function(t,e){var i=this.requestRenderAllBound;this._objects=[],this._createLowerCanvas(t),this._initOptions(e),this.interactive||this._initRetinaScaling(),e.overlayImage&&this.setOverlayImage(e.overlayImage,i),e.backgroundImage&&this.setBackgroundImage(e.backgroundImage,i),e.backgroundColor&&this.setBackgroundColor(e.backgroundColor,i),e.overlayColor&&this.setOverlayColor(e.overlayColor,i),this.calcOffset()},_isRetinaScaling:function(){return 1!==T.devicePixelRatio&&this.enableRetinaScaling},getRetinaScaling:function(){return this._isRetinaScaling()?T.devicePixelRatio:1},_initRetinaScaling:function(){if(this._isRetinaScaling()){var t=T.devicePixelRatio;this.__initRetinaScaling(t,this.lowerCanvasEl,this.contextContainer),this.upperCanvasEl&&this.__initRetinaScaling(t,this.upperCanvasEl,this.contextTop)}},__initRetinaScaling:function(t,e,i){e.setAttribute("width",this.width*t),e.setAttribute("height",this.height*t),i.scale(t,t)},calcOffset:function(){return this._offset=e(this.lowerCanvasEl),this},setOverlayImage:function(t,e,i){return this.__setBgOverlayImage("overlayImage",t,e,i)},setBackgroundImage:function(t,e,i){return this.__setBgOverlayImage("backgroundImage",t,e,i)},setOverlayColor:function(t,e){return this.__setBgOverlayColor("overlayColor",t,e)},setBackgroundColor:function(t,e){return this.__setBgOverlayColor("backgroundColor",t,e)},__setBgOverlayImage:function(t,e,i,r){return"string"==typeof e?T.util.loadImage(e,(function(e,n){if(e){var o=new T.Image(e,r);this[t]=o,o.canvas=this}i&&i(e,n)}),this,r&&r.crossOrigin):(r&&e.setOptions(r),this[t]=e,e&&(e.canvas=this),i&&i(e,!1)),this},__setBgOverlayColor:function(t,e,i){return this[t]=e,this._initGradient(e,t),this._initPattern(e,t,i),this},_createCanvasElement:function(){var t=a();if(!t)throw h;if(t.style||(t.style={}),void 0===t.getContext)throw h;return t},_initOptions:function(t){var e=this.lowerCanvasEl;this._setOptions(t),this.width=this.width||parseInt(e.width,10)||0,this.height=this.height||parseInt(e.height,10)||0,this.lowerCanvasEl.style&&(e.width=this.width,e.height=this.height,e.style.width=this.width+"px",e.style.height=this.height+"px",this.viewportTransform=this.viewportTransform.slice())},_createLowerCanvas:function(t){t&&t.getContext?this.lowerCanvasEl=t:this.lowerCanvasEl=T.util.getById(t)||this._createCanvasElement(),T.util.addClass(this.lowerCanvasEl,"lower-canvas"),this._originalCanvasStyle=this.lowerCanvasEl.style,this.interactive&&this._applyCanvasStyle(this.lowerCanvasEl),this.contextContainer=this.lowerCanvasEl.getContext("2d")},getWidth:function(){return this.width},getHeight:function(){return this.height},setWidth:function(t,e){return this.setDimensions({width:t},e)},setHeight:function(t,e){return this.setDimensions({height:t},e)},setDimensions:function(t,e){var i;for(var r in e=e||{},t)i=t[r],e.cssOnly||(this._setBackstoreDimension(r,t[r]),i+="px",this.hasLostContext=!0),e.backstoreOnly||this._setCssDimension(r,i);return this._isCurrentlyDrawing&&this.freeDrawingBrush&&this.freeDrawingBrush._setBrushStyles(),this._initRetinaScaling(),this.calcOffset(),e.cssOnly||this.requestRenderAll(),this},_setBackstoreDimension:function(t,e){return this.lowerCanvasEl[t]=e,this.upperCanvasEl&&(this.upperCanvasEl[t]=e),this.cacheCanvasEl&&(this.cacheCanvasEl[t]=e),this[t]=e,this},_setCssDimension:function(t,e){return this.lowerCanvasEl.style[t]=e,this.upperCanvasEl&&(this.upperCanvasEl.style[t]=e),this.wrapperEl&&(this.wrapperEl.style[t]=e),this},getZoom:function(){return this.viewportTransform[0]},setViewportTransform:function(t){var e,i,r,n=this._activeObject,o=this.backgroundImage,s=this.overlayImage;for(this.viewportTransform=t,i=0,r=this._objects.length;i\n'),this._setSVGBgOverlayColor(i,"background"),this._setSVGBgOverlayImage(i,"backgroundImage",e),this._setSVGObjects(i,e),this.clipPath&&i.push("\n"),this._setSVGBgOverlayColor(i,"overlay"),this._setSVGBgOverlayImage(i,"overlayImage",e),i.push(""),i.join("")},_setSVGPreamble:function(t,e){e.suppressPreamble||t.push('\n','\n')},_setSVGHeader:function(t,e){var i,n=e.width||this.width,o=e.height||this.height,s='viewBox="0 0 '+this.width+" "+this.height+'" ',a=T.Object.NUM_FRACTION_DIGITS;e.viewBox?s='viewBox="'+e.viewBox.x+" "+e.viewBox.y+" "+e.viewBox.width+" "+e.viewBox.height+'" ':this.svgViewportTransformation&&(i=this.viewportTransform,s='viewBox="'+r(-i[4]/i[0],a)+" "+r(-i[5]/i[3],a)+" "+r(this.width/i[0],a)+" "+r(this.height/i[3],a)+'" '),t.push("\n',"Created with Fabric.js ",T.version,"\n","\n",this.createSVGFontFacesMarkup(),this.createSVGRefElementsMarkup(),this.createSVGClipPathMarkup(e),"\n")},createSVGClipPathMarkup:function(t){var e=this.clipPath;return e?(e.clipPathId="CLIPPATH_"+T.Object.__uid++,'\n'+this.clipPath.toClipPathSVG(t.reviver)+"\n"):""},createSVGRefElementsMarkup:function(){var t=this;return["background","overlay"].map((function(e){var i=t[e+"Color"];if(i&&i.toLive){var r=t[e+"Vpt"],n=t.viewportTransform,o={width:t.width/(r?n[0]:1),height:t.height/(r?n[3]:1)};return i.toSVG(o,{additionalTransform:r?T.util.matrixToSVG(n):""})}})).join("")},createSVGFontFacesMarkup:function(){var t,e,i,r,n,o,s,a,h="",c={},l=T.fontPaths,u=[];for(this._objects.forEach((function t(e){u.push(e),e._objects&&e._objects.forEach(t)})),s=0,a=u.length;s',"\n",h,"","\n"].join("")),h},_setSVGObjects:function(t,e){var i,r,n,o=this._objects;for(r=0,n=o.length;r\n")}else t.push('\n")},sendToBack:function(t){if(!t)return this;var e,r,n,o=this._activeObject;if(t===o&&"activeSelection"===t.type)for(e=(n=o._objects).length;e--;)r=n[e],i(this._objects,r),this._objects.unshift(r);else i(this._objects,t),this._objects.unshift(t);return this.renderOnAddRemove&&this.requestRenderAll(),this},bringToFront:function(t){if(!t)return this;var e,r,n,o=this._activeObject;if(t===o&&"activeSelection"===t.type)for(n=o._objects,e=0;e0+c&&(s=o-1,i(this._objects,n),this._objects.splice(s,0,n)),c++;else 0!==(o=this._objects.indexOf(t))&&(s=this._findNewLowerIndex(t,o,e),i(this._objects,t),this._objects.splice(s,0,t));return this.renderOnAddRemove&&this.requestRenderAll(),this},_findNewLowerIndex:function(t,e,i){var r,n;if(i)for(r=e,n=e-1;n>=0;--n){if(t.intersectsWithObject(this._objects[n])||t.isContainedWithinObject(this._objects[n])||this._objects[n].isContainedWithinObject(t)){r=n;break}}else r=e-1;return r},bringForward:function(t,e){if(!t)return this;var r,n,o,s,a,h=this._activeObject,c=0;if(t===h&&"activeSelection"===t.type)for(r=(a=h._objects).length;r--;)n=a[r],(o=this._objects.indexOf(n))"}}),t(T.StaticCanvas.prototype,T.Observable),t(T.StaticCanvas.prototype,T.Collection),t(T.StaticCanvas.prototype,T.DataURLExporter),t(T.StaticCanvas,{EMPTY_JSON:'{"objects": [], "background": "white"}',supports:function(t){var e=a();if(!e||!e.getContext)return null;var i=e.getContext("2d");return i&&"setLineDash"===t?void 0!==i.setLineDash:null}}),T.StaticCanvas.prototype.toJSON=T.StaticCanvas.prototype.toObject,T.isLikelyNode&&(T.StaticCanvas.prototype.createPNGStream=function(){var t=s(this.lowerCanvasEl);return t&&t.createPNGStream()},T.StaticCanvas.prototype.createJPEGStream=function(t){var e=s(this.lowerCanvasEl);return e&&e.createJPEGStream(t)})}}(),T.BaseBrush=T.util.createClass({color:"rgb(0, 0, 0)",width:1,shadow:null,strokeLineCap:"round",strokeLineJoin:"round",strokeMiterLimit:10,strokeDashArray:null,limitedToCanvasSize:!1,_setBrushStyles:function(){var t=this.canvas.contextTop;t.strokeStyle=this.color,t.lineWidth=this.width,t.lineCap=this.strokeLineCap,t.miterLimit=this.strokeMiterLimit,t.lineJoin=this.strokeLineJoin,t.setLineDash(this.strokeDashArray||[])},_saveAndTransform:function(t){var e=this.canvas.viewportTransform;t.save(),t.transform(e[0],e[1],e[2],e[3],e[4],e[5])},_setShadow:function(){if(this.shadow){var t=this.canvas,e=this.shadow,i=t.contextTop,r=t.getZoom();t&&t._isRetinaScaling()&&(r*=T.devicePixelRatio),i.shadowColor=e.color,i.shadowBlur=e.blur*r,i.shadowOffsetX=e.offsetX*r,i.shadowOffsetY=e.offsetY*r}},needsFullRender:function(){return new T.Color(this.color).getAlpha()<1||!!this.shadow},_resetShadow:function(){var t=this.canvas.contextTop;t.shadowColor="",t.shadowBlur=t.shadowOffsetX=t.shadowOffsetY=0},_isOutSideCanvas:function(t){return t.x<0||t.x>this.canvas.getWidth()||t.y<0||t.y>this.canvas.getHeight()}}),T.PencilBrush=T.util.createClass(T.BaseBrush,{decimate:.4,initialize:function(t){this.canvas=t,this._points=[]},_drawSegment:function(t,e,i){var r=e.midPointFrom(i);return t.quadraticCurveTo(e.x,e.y,r.x,r.y),r},onMouseDown:function(t,e){this.canvas._isMainEvent(e.e)&&(this._prepareForDrawing(t),this._captureDrawingPath(t),this._render())},onMouseMove:function(t,e){if(this.canvas._isMainEvent(e.e)&&(!0!==this.limitedToCanvasSize||!this._isOutSideCanvas(t))&&this._captureDrawingPath(t)&&this._points.length>1)if(this.needsFullRender())this.canvas.clearContext(this.canvas.contextTop),this._render();else{var i=this._points,r=i.length,n=this.canvas.contextTop;this._saveAndTransform(n),this.oldEnd&&(n.beginPath(),n.moveTo(this.oldEnd.x,this.oldEnd.y)),this.oldEnd=this._drawSegment(n,i[r-2],i[r-1],!0),n.stroke(),n.restore()}},onMouseUp:function(t){return!this.canvas._isMainEvent(t.e)||(this.oldEnd=void 0,this._finalizeAndAddPath(),!1)},_prepareForDrawing:function(t){var e=new T.Point(t.x,t.y);this._reset(),this._addPoint(e),this.canvas.contextTop.moveTo(e.x,e.y)},_addPoint:function(t){return!(this._points.length>1&&t.eq(this._points[this._points.length-1])||(this._points.push(t),0))},_reset:function(){this._points=[],this._setBrushStyles(),this._setShadow()},_captureDrawingPath:function(t){var e=new T.Point(t.x,t.y);return this._addPoint(e)},_render:function(){var t,e,i=this.canvas.contextTop,r=this._points[0],n=this._points[1];if(this._saveAndTransform(i),i.beginPath(),2===this._points.length&&r.x===n.x&&r.y===n.y){var o=this.width/1e3;r=new T.Point(r.x,r.y),n=new T.Point(n.x,n.y),r.x-=o,n.x+=o}for(i.moveTo(r.x,r.y),t=1,e=this._points.length;t=n&&(s=t[i],a.push(s));return a.push(t[o]),a},_finalizeAndAddPath:function(){this.canvas.contextTop.closePath(),this.decimate&&(this._points=this.decimatePoints(this._points,this.decimate));var t=this.convertPointsToSVGPath(this._points);if(this._isEmptySVGPath(t))this.canvas.requestRenderAll();else{var e=this.createPath(t);this.canvas.clearContext(this.canvas.contextTop),this.canvas.fire("before:path:created",{path:e}),this.canvas.add(e),this.canvas.requestRenderAll(),e.setCoords(),this._resetShadow(),this.canvas.fire("path:created",{path:e})}}}),T.CircleBrush=T.util.createClass(T.BaseBrush,{width:10,initialize:function(t){this.canvas=t,this.points=[]},drawDot:function(t){var e=this.addPoint(t),i=this.canvas.contextTop;this._saveAndTransform(i),this.dot(i,e),i.restore()},dot:function(t,e){t.fillStyle=e.fill,t.beginPath(),t.arc(e.x,e.y,e.radius,0,2*Math.PI,!1),t.closePath(),t.fill()},onMouseDown:function(t){this.points.length=0,this.canvas.clearContext(this.canvas.contextTop),this._setShadow(),this.drawDot(t)},_render:function(){var t,e,i=this.canvas.contextTop,r=this.points;for(this._saveAndTransform(i),t=0,e=r.length;t0&&!this.preserveObjectStacking){e=[],i=[];for(var n=0,o=this._objects.length;n1&&(this._activeObject._objects=i),e.push.apply(e,i)}else e=this._objects;return e},renderAll:function(){!this.contextTopDirty||this._groupSelector||this.isDrawingMode||(this.clearContext(this.contextTop),this.contextTopDirty=!1),this.hasLostContext&&this.renderTopLayer(this.contextTop);var t=this.contextContainer;return this.renderCanvas(t,this._chooseObjectsToRender()),this},renderTopLayer:function(t){t.save(),this.isDrawingMode&&this._isCurrentlyDrawing&&(this.freeDrawingBrush&&this.freeDrawingBrush._render(),this.contextTopDirty=!0),this.selection&&this._groupSelector&&(this._drawSelection(t),this.contextTopDirty=!0),t.restore()},renderTop:function(){var t=this.contextTop;return this.clearContext(t),this.renderTopLayer(t),this.fire("after:render"),this},_normalizePointer:function(t,e){var i=t.calcTransformMatrix(),r=T.util.invertTransform(i),n=this.restorePointerVpt(e);return T.util.transformPoint(n,r)},isTargetTransparent:function(t,e,i){if(t.shouldCache()&&t._cacheCanvas&&t!==this._activeObject){var r=this._normalizePointer(t,{x:e,y:i}),n=Math.max(t.cacheTranslationX+r.x*t.zoomX,0),o=Math.max(t.cacheTranslationY+r.y*t.zoomY,0);return T.util.isTransparent(t._cacheContext,Math.round(n),Math.round(o),this.targetFindTolerance)}var s=this.contextCache,a=t.selectionBackgroundColor,h=this.viewportTransform;return t.selectionBackgroundColor="",this.clearContext(s),s.save(),s.transform(h[0],h[1],h[2],h[3],h[4],h[5]),t.render(s),s.restore(),t.selectionBackgroundColor=a,T.util.isTransparent(s,e,i,this.targetFindTolerance)},_isSelectionKeyPressed:function(t){return"[object Array]"===Object.prototype.toString.call(this.selectionKey)?!!this.selectionKey.find((function(e){return!0===t[e]})):t[this.selectionKey]},_shouldClearSelection:function(t,e){var i=this.getActiveObjects(),r=this._activeObject;return!e||e&&r&&i.length>1&&-1===i.indexOf(e)&&r!==e&&!this._isSelectionKeyPressed(t)||e&&!e.evented||e&&!e.selectable&&r&&r!==e},_shouldCenterTransform:function(t,e,i){var r;if(t)return"scale"===e||"scaleX"===e||"scaleY"===e||"resizing"===e?r=this.centeredScaling||t.centeredScaling:"rotate"===e&&(r=this.centeredRotation||t.centeredRotation),r?!i:i},_getOriginFromCorner:function(t,e){var i={x:t.originX,y:t.originY};return"ml"===e||"tl"===e||"bl"===e?i.x="right":"mr"!==e&&"tr"!==e&&"br"!==e||(i.x="left"),"tl"===e||"mt"===e||"tr"===e?i.y="bottom":"bl"!==e&&"mb"!==e&&"br"!==e||(i.y="top"),i},_getActionFromCorner:function(t,e,i,r){if(!e||!t)return"drag";var n=r.controls[e];return n.getActionName(i,n,r)},_setupCurrentTransform:function(t,i,r){if(i){var n=this.getPointer(t),o=i.__corner,s=i.controls[o],a=r&&o?s.getActionHandler(t,i,s):T.controlsUtils.dragHandler,h=this._getActionFromCorner(r,o,t,i),c=this._getOriginFromCorner(i,o),l=t[this.centeredKey],u={target:i,action:h,actionHandler:a,corner:o,scaleX:i.scaleX,scaleY:i.scaleY,skewX:i.skewX,skewY:i.skewY,offsetX:n.x-i.left,offsetY:n.y-i.top,originX:c.x,originY:c.y,ex:n.x,ey:n.y,lastX:n.x,lastY:n.y,theta:e(i.angle),width:i.width*i.scaleX,shiftKey:t.shiftKey,altKey:l,original:T.util.saveObjectTransform(i)};this._shouldCenterTransform(i,h,l)&&(u.originX="center",u.originY="center"),u.original.originX=c.x,u.original.originY=c.y,this._currentTransform=u,this._beforeTransform(t)}},setCursor:function(t){this.upperCanvasEl.style.cursor=t},_drawSelection:function(t){var e=this._groupSelector,i=new T.Point(e.ex,e.ey),r=T.util.transformPoint(i,this.viewportTransform),n=new T.Point(e.ex+e.left,e.ey+e.top),o=T.util.transformPoint(n,this.viewportTransform),s=Math.min(r.x,o.x),a=Math.min(r.y,o.y),h=Math.max(r.x,o.x),c=Math.max(r.y,o.y),l=this.selectionLineWidth/2;this.selectionColor&&(t.fillStyle=this.selectionColor,t.fillRect(s,a,h-s,c-a)),this.selectionLineWidth&&this.selectionBorderColor&&(t.lineWidth=this.selectionLineWidth,t.strokeStyle=this.selectionBorderColor,s+=l,a+=l,h-=l,c-=l,T.Object.prototype._setLineDash.call(this,t,this.selectionDashArray),t.strokeRect(s,a,h-s,c-a))},findTarget:function(t,e){if(!this.skipTargetFind){var r,n,o=this.getPointer(t,!0),s=this._activeObject,a=this.getActiveObjects(),h=i(t),c=a.length>1&&!e||1===a.length;if(this.targets=[],c&&s._findTargetCorner(o,h))return s;if(a.length>1&&!e&&s===this._searchPossibleTargets([s],o))return s;if(1===a.length&&s===this._searchPossibleTargets([s],o)){if(!this.preserveObjectStacking)return s;r=s,n=this.targets,this.targets=[]}var l=this._searchPossibleTargets(this._objects,o);return t[this.altSelectionKey]&&l&&r&&l!==r&&(l=r,this.targets=n),l}},_checkTarget:function(t,e,i){if(e&&e.visible&&e.evented&&e.containsPoint(t)){if(!this.perPixelTargetFind&&!e.perPixelTargetFind||e.isEditing)return!0;if(!this.isTargetTransparent(e,i.x,i.y))return!0}},_searchPossibleTargets:function(t,e){for(var i,r,n=t.length;n--;){var o=t[n],s=o.group?this._normalizePointer(o.group,e):e;if(this._checkTarget(s,o,e)){(i=t[n]).subTargetCheck&&i instanceof T.Group&&(r=this._searchPossibleTargets(i._objects,e))&&this.targets.push(r);break}}return i},restorePointerVpt:function(t){return T.util.transformPoint(t,T.util.invertTransform(this.viewportTransform))},getPointer:function(e,i){if(this._absolutePointer&&!i)return this._absolutePointer;if(this._pointer&&i)return this._pointer;var r,n=t(e),o=this.upperCanvasEl,s=o.getBoundingClientRect(),a=s.width||0,h=s.height||0;a&&h||("top"in s&&"bottom"in s&&(h=Math.abs(s.top-s.bottom)),"right"in s&&"left"in s&&(a=Math.abs(s.right-s.left))),this.calcOffset(),n.x=n.x-this._offset.left,n.y=n.y-this._offset.top,i||(n=this.restorePointerVpt(n));var c=this.getRetinaScaling();return 1!==c&&(n.x/=c,n.y/=c),r=0===a||0===h?{width:1,height:1}:{width:o.width/a,height:o.height/h},{x:n.x*r.width,y:n.y*r.height}},_createUpperCanvas:function(){var t=this.lowerCanvasEl.className.replace(/\s*lower-canvas\s*/,""),e=this.lowerCanvasEl,i=this.upperCanvasEl;i?i.className="":(i=this._createCanvasElement(),this.upperCanvasEl=i),T.util.addClass(i,"upper-canvas "+t),this.wrapperEl.appendChild(i),this._copyCanvasStyle(e,i),this._applyCanvasStyle(i),this.contextTop=i.getContext("2d")},_createCacheCanvas:function(){this.cacheCanvasEl=this._createCanvasElement(),this.cacheCanvasEl.setAttribute("width",this.width),this.cacheCanvasEl.setAttribute("height",this.height),this.contextCache=this.cacheCanvasEl.getContext("2d")},_initWrapperElement:function(){this.wrapperEl=T.util.wrapElement(this.lowerCanvasEl,"div",{class:this.containerClass}),T.util.setStyle(this.wrapperEl,{width:this.width+"px",height:this.height+"px",position:"relative"}),T.util.makeElementUnselectable(this.wrapperEl)},_applyCanvasStyle:function(t){var e=this.width||t.width,i=this.height||t.height;T.util.setStyle(t,{position:"absolute",width:e+"px",height:i+"px",left:0,top:0,"touch-action":this.allowTouchScrolling?"manipulation":"none","-ms-touch-action":this.allowTouchScrolling?"manipulation":"none"}),t.width=e,t.height=i,T.util.makeElementUnselectable(t)},_copyCanvasStyle:function(t,e){e.style.cssText=t.style.cssText},getSelectionContext:function(){return this.contextTop},getSelectionElement:function(){return this.upperCanvasEl},getActiveObject:function(){return this._activeObject},getActiveObjects:function(){var t=this._activeObject;return t?"activeSelection"===t.type&&t._objects?t._objects.slice(0):[t]:[]},_onObjectRemoved:function(t){t===this._activeObject&&(this.fire("before:selection:cleared",{target:t}),this._discardActiveObject(),this.fire("selection:cleared",{target:t}),t.fire("deselected")),t===this._hoveredTarget&&(this._hoveredTarget=null,this._hoveredTargets=[]),this.callSuper("_onObjectRemoved",t)},_fireSelectionEvents:function(t,e){var i=!1,r=this.getActiveObjects(),n=[],o=[];t.forEach((function(t){-1===r.indexOf(t)&&(i=!0,t.fire("deselected",{e:e,target:t}),o.push(t))})),r.forEach((function(r){-1===t.indexOf(r)&&(i=!0,r.fire("selected",{e:e,target:r}),n.push(r))})),t.length>0&&r.length>0?i&&this.fire("selection:updated",{e:e,selected:n,deselected:o,updated:n[0]||o[0],target:this._activeObject}):r.length>0?this.fire("selection:created",{e:e,selected:n,target:this._activeObject}):t.length>0&&this.fire("selection:cleared",{e:e,deselected:o})},setActiveObject:function(t,e){var i=this.getActiveObjects();return this._setActiveObject(t,e),this._fireSelectionEvents(i,e),this},_setActiveObject:function(t,e){return this._activeObject!==t&&(!!this._discardActiveObject(e,t)&&(!t.onSelect({e:e})&&(this._activeObject=t,!0)))},_discardActiveObject:function(t,e){var i=this._activeObject;if(i){if(i.onDeselect({e:t,object:e}))return!1;this._activeObject=null}return!0},discardActiveObject:function(t){var e=this.getActiveObjects(),i=this.getActiveObject();return e.length&&this.fire("before:selection:cleared",{target:i,e:t}),this._discardActiveObject(t),this._fireSelectionEvents(e,t),this},dispose:function(){var t=this.wrapperEl;return this.removeListeners(),t.removeChild(this.upperCanvasEl),t.removeChild(this.lowerCanvasEl),this.contextCache=null,this.contextTop=null,["upperCanvasEl","cacheCanvasEl"].forEach(function(t){T.util.cleanUpJsdomNode(this[t]),this[t]=void 0}.bind(this)),t.parentNode&&t.parentNode.replaceChild(this.lowerCanvasEl,this.wrapperEl),delete this.wrapperEl,T.StaticCanvas.prototype.dispose.call(this),this},clear:function(){return this.discardActiveObject(),this.clearContext(this.contextTop),this.callSuper("clear")},drawControls:function(t){var e=this._activeObject;e&&e._renderControls(t)},_toObject:function(t,e,i){var r=this._realizeGroupTransformOnObject(t),n=this.callSuper("_toObject",t,e,i);return this._unwindGroupTransformOnObject(t,r),n},_realizeGroupTransformOnObject:function(t){if(t.group&&"activeSelection"===t.group.type&&this._activeObject===t.group){var e={};return["angle","flipX","flipY","left","scaleX","scaleY","skewX","skewY","top"].forEach((function(i){e[i]=t[i]})),T.util.addTransformToObject(t,this._activeObject.calcOwnMatrix()),e}return null},_unwindGroupTransformOnObject:function(t,e){e&&t.set(e)},_setSVGObject:function(t,e,i){var r=this._realizeGroupTransformOnObject(e);this.callSuper("_setSVGObject",t,e,i),this._unwindGroupTransformOnObject(e,r)},setViewportTransform:function(t){this.renderOnAddRemove&&this._activeObject&&this._activeObject.isEditing&&this._activeObject.clearContextTop(),T.StaticCanvas.prototype.setViewportTransform.call(this,t)}}),T.StaticCanvas)"prototype"!==r&&(T.Canvas[r]=T.StaticCanvas[r])}(),function(){var t=T.util.addListener,e=T.util.removeListener,i={passive:!1};function r(t,e){return t.button&&t.button===e-1}T.util.object.extend(T.Canvas.prototype,{mainTouchId:null,_initEventListeners:function(){this.removeListeners(),this._bindEvents(),this.addOrRemove(t,"add")},_getEventPrefix:function(){return this.enablePointerEvents?"pointer":"mouse"},addOrRemove:function(t,e){var r=this.upperCanvasEl,n=this._getEventPrefix();t(T.window,"resize",this._onResize),t(r,n+"down",this._onMouseDown),t(r,n+"move",this._onMouseMove,i),t(r,n+"out",this._onMouseOut),t(r,n+"enter",this._onMouseEnter),t(r,"wheel",this._onMouseWheel),t(r,"contextmenu",this._onContextMenu),t(r,"dblclick",this._onDoubleClick),t(r,"dragover",this._onDragOver),t(r,"dragenter",this._onDragEnter),t(r,"dragleave",this._onDragLeave),t(r,"drop",this._onDrop),this.enablePointerEvents||t(r,"touchstart",this._onTouchStart,i),void 0!==P&&e in P&&(P[e](r,"gesture",this._onGesture),P[e](r,"drag",this._onDrag),P[e](r,"orientation",this._onOrientationChange),P[e](r,"shake",this._onShake),P[e](r,"longpress",this._onLongPress))},removeListeners:function(){this.addOrRemove(e,"remove");var t=this._getEventPrefix();e(T.document,t+"up",this._onMouseUp),e(T.document,"touchend",this._onTouchEnd,i),e(T.document,t+"move",this._onMouseMove,i),e(T.document,"touchmove",this._onMouseMove,i)},_bindEvents:function(){this.eventsBound||(this._onMouseDown=this._onMouseDown.bind(this),this._onTouchStart=this._onTouchStart.bind(this),this._onMouseMove=this._onMouseMove.bind(this),this._onMouseUp=this._onMouseUp.bind(this),this._onTouchEnd=this._onTouchEnd.bind(this),this._onResize=this._onResize.bind(this),this._onGesture=this._onGesture.bind(this),this._onDrag=this._onDrag.bind(this),this._onShake=this._onShake.bind(this),this._onLongPress=this._onLongPress.bind(this),this._onOrientationChange=this._onOrientationChange.bind(this),this._onMouseWheel=this._onMouseWheel.bind(this),this._onMouseOut=this._onMouseOut.bind(this),this._onMouseEnter=this._onMouseEnter.bind(this),this._onContextMenu=this._onContextMenu.bind(this),this._onDoubleClick=this._onDoubleClick.bind(this),this._onDragOver=this._onDragOver.bind(this),this._onDragEnter=this._simpleEventHandler.bind(this,"dragenter"),this._onDragLeave=this._simpleEventHandler.bind(this,"dragleave"),this._onDrop=this._simpleEventHandler.bind(this,"drop"),this.eventsBound=!0)},_onGesture:function(t,e){this.__onTransformGesture&&this.__onTransformGesture(t,e)},_onDrag:function(t,e){this.__onDrag&&this.__onDrag(t,e)},_onMouseWheel:function(t){this.__onMouseWheel(t)},_onMouseOut:function(t){var e=this._hoveredTarget;this.fire("mouse:out",{target:e,e:t}),this._hoveredTarget=null,e&&e.fire("mouseout",{e:t});var i=this;this._hoveredTargets.forEach((function(r){i.fire("mouse:out",{target:e,e:t}),r&&e.fire("mouseout",{e:t})})),this._hoveredTargets=[],this._iTextInstances&&this._iTextInstances.forEach((function(t){t.isEditing&&t.hiddenTextarea.focus()}))},_onMouseEnter:function(t){this._currentTransform||this.findTarget(t)||(this.fire("mouse:over",{target:null,e:t}),this._hoveredTarget=null,this._hoveredTargets=[])},_onOrientationChange:function(t,e){this.__onOrientationChange&&this.__onOrientationChange(t,e)},_onShake:function(t,e){this.__onShake&&this.__onShake(t,e)},_onLongPress:function(t,e){this.__onLongPress&&this.__onLongPress(t,e)},_onDragOver:function(t){t.preventDefault();var e=this._simpleEventHandler("dragover",t);this._fireEnterLeaveEvents(e,t)},_onContextMenu:function(t){return this.stopContextMenu&&(t.stopPropagation(),t.preventDefault()),!1},_onDoubleClick:function(t){this._cacheTransformEventData(t),this._handleEvent(t,"dblclick"),this._resetTransformEventData(t)},getPointerId:function(t){var e=t.changedTouches;return e?e[0]&&e[0].identifier:this.enablePointerEvents?t.pointerId:-1},_isMainEvent:function(t){return!0===t.isPrimary||!1!==t.isPrimary&&("touchend"===t.type&&0===t.touches.length||(!t.changedTouches||t.changedTouches[0].identifier===this.mainTouchId))},_onTouchStart:function(r){r.preventDefault(),null===this.mainTouchId&&(this.mainTouchId=this.getPointerId(r)),this.__onMouseDown(r),this._resetTransformEventData();var n=this.upperCanvasEl,o=this._getEventPrefix();t(T.document,"touchend",this._onTouchEnd,i),t(T.document,"touchmove",this._onMouseMove,i),e(n,o+"down",this._onMouseDown)},_onMouseDown:function(r){this.__onMouseDown(r),this._resetTransformEventData();var n=this.upperCanvasEl,o=this._getEventPrefix();e(n,o+"move",this._onMouseMove,i),t(T.document,o+"up",this._onMouseUp),t(T.document,o+"move",this._onMouseMove,i)},_onTouchEnd:function(r){if(!(r.touches.length>0)){this.__onMouseUp(r),this._resetTransformEventData(),this.mainTouchId=null;var n=this._getEventPrefix();e(T.document,"touchend",this._onTouchEnd,i),e(T.document,"touchmove",this._onMouseMove,i);var o=this;this._willAddMouseDown&&clearTimeout(this._willAddMouseDown),this._willAddMouseDown=setTimeout((function(){t(o.upperCanvasEl,n+"down",o._onMouseDown),o._willAddMouseDown=0}),400)}},_onMouseUp:function(r){this.__onMouseUp(r),this._resetTransformEventData();var n=this.upperCanvasEl,o=this._getEventPrefix();this._isMainEvent(r)&&(e(T.document,o+"up",this._onMouseUp),e(T.document,o+"move",this._onMouseMove,i),t(n,o+"move",this._onMouseMove,i))},_onMouseMove:function(t){!this.allowTouchScrolling&&t.preventDefault&&t.preventDefault(),this.__onMouseMove(t)},_onResize:function(){this.calcOffset()},_shouldRender:function(t){var e=this._activeObject;return!!(!!e!=!!t||e&&t&&e!==t)||(e&&e.isEditing,!1)},__onMouseUp:function(t){var e,i=this._currentTransform,n=this._groupSelector,o=!1,s=!n||0===n.left&&0===n.top;if(this._cacheTransformEventData(t),e=this._target,this._handleEvent(t,"up:before"),r(t,3))this.fireRightClick&&this._handleEvent(t,"up",3,s);else{if(r(t,2))return this.fireMiddleClick&&this._handleEvent(t,"up",2,s),void this._resetTransformEventData();if(this.isDrawingMode&&this._isCurrentlyDrawing)this._onMouseUpInDrawingMode(t);else if(this._isMainEvent(t)){if(i&&(this._finalizeCurrentTransform(t),o=i.actionPerformed),!s){var a=e===this._activeObject;this._maybeGroupObjects(t),o||(o=this._shouldRender(e)||!a&&e===this._activeObject)}if(e){if(e.selectable&&e!==this._activeObject&&"up"===e.activeOn)this.setActiveObject(e,t),o=!0;else{var h=e._findTargetCorner(this.getPointer(t,!0),T.util.isTouchEvent(t)),c=e.controls[h],l=c&&c.getMouseUpHandler(t,e,c);if(l){var u=this.getPointer(t);l(t,i,u.x,u.y)}}e.isMoving=!1}this._setCursorFromEvent(t,e),this._handleEvent(t,"up",1,s),this._groupSelector=null,this._currentTransform=null,e&&(e.__corner=0),o?this.requestRenderAll():s||this.renderTop()}}},_simpleEventHandler:function(t,e){var i=this.findTarget(e),r=this.targets,n={e:e,target:i,subTargets:r};if(this.fire(t,n),i&&i.fire(t,n),!r)return i;for(var o=0;o1&&(e=new T.ActiveSelection(i.reverse(),{canvas:this}),this.setActiveObject(e,t))},_collectObjects:function(t){for(var e,i=[],r=this._groupSelector.ex,n=this._groupSelector.ey,o=r+this._groupSelector.left,s=n+this._groupSelector.top,a=new T.Point(b(r,o),b(n,s)),h=new T.Point(x(r,o),x(n,s)),c=!this.selectionFullyContained,l=r===o&&n===s,u=this._objects.length;u--&&!((e=this._objects[u])&&e.selectable&&e.visible&&(c&&e.intersectsWithRect(a,h,!0)||e.isContainedWithinRect(a,h,!0)||c&&e.containsPoint(a,null,!0)||c&&e.containsPoint(h,null,!0))&&(i.push(e),l)););return i.length>1&&(i=i.filter((function(e){return!e.onSelect({e:t})}))),i},_maybeGroupObjects:function(t){this.selection&&this._groupSelector&&this._groupSelectedObjects(t),this.setCursor(this.defaultCursor),this._groupSelector=null}}),T.util.object.extend(T.StaticCanvas.prototype,{toDataURL:function(t){t||(t={});var e=t.format||"png",i=t.quality||1,r=(t.multiplier||1)*(t.enableRetinaScaling?this.getRetinaScaling():1),n=this.toCanvasElement(r,t);return T.util.toDataURL(n,e,i)},toCanvasElement:function(t,e){t=t||1;var i=((e=e||{}).width||this.width)*t,r=(e.height||this.height)*t,n=this.getZoom(),o=this.width,s=this.height,a=n*t,h=this.viewportTransform,c=(h[4]-(e.left||0))*t,l=(h[5]-(e.top||0))*t,u=this.interactive,f=[a,0,0,a,c,l],d=this.enableRetinaScaling,g=T.util.createCanvasElement(),p=this.contextTop;return g.width=i,g.height=r,this.contextTop=null,this.enableRetinaScaling=!1,this.interactive=!1,this.viewportTransform=f,this.width=i,this.height=r,this.calcViewportBoundaries(),this.renderCanvas(g.getContext("2d"),this._objects),this.viewportTransform=h,this.width=o,this.height=s,this.calcViewportBoundaries(),this.interactive=u,this.enableRetinaScaling=d,this.contextTop=p,g}}),T.util.object.extend(T.StaticCanvas.prototype,{loadFromJSON:function(t,e,i){if(t){var r="string"==typeof t?JSON.parse(t):T.util.object.clone(t),n=this,o=r.clipPath,s=this.renderOnAddRemove;return this.renderOnAddRemove=!1,delete r.clipPath,this._enlivenObjects(r.objects,(function(t){n.clear(),n._setBgOverlay(r,(function(){o?n._enlivenObjects([o],(function(i){n.clipPath=i[0],n.__setupCanvas.call(n,r,t,s,e)})):n.__setupCanvas.call(n,r,t,s,e)}))}),i),this}},__setupCanvas:function(t,e,i,r){var n=this;e.forEach((function(t,e){n.insertAt(t,e)})),this.renderOnAddRemove=i,delete t.objects,delete t.backgroundImage,delete t.overlayImage,delete t.background,delete t.overlay,this._setOptions(t),this.renderAll(),r&&r()},_setBgOverlay:function(t,e){var i={backgroundColor:!1,overlayColor:!1,backgroundImage:!1,overlayImage:!1};if(t.backgroundImage||t.overlayImage||t.background||t.overlay){var r=function(){i.backgroundImage&&i.overlayImage&&i.backgroundColor&&i.overlayColor&&e&&e()};this.__setBgOverlay("backgroundImage",t.backgroundImage,i,r),this.__setBgOverlay("overlayImage",t.overlayImage,i,r),this.__setBgOverlay("backgroundColor",t.background,i,r),this.__setBgOverlay("overlayColor",t.overlay,i,r)}else e&&e()},__setBgOverlay:function(t,e,i,r){var n=this;if(!e)return i[t]=!0,void(r&&r());"backgroundImage"===t||"overlayImage"===t?T.util.enlivenObjects([e],(function(e){n[t]=e[0],i[t]=!0,r&&r()})):this["set"+T.util.string.capitalize(t,!0)](e,(function(){i[t]=!0,r&&r()}))},_enlivenObjects:function(t,e,i){t&&0!==t.length?T.util.enlivenObjects(t,(function(t){e&&e(t)}),null,i):e&&e([])},_toDataURL:function(t,e){this.clone((function(i){e(i.toDataURL(t))}))},_toDataURLWithMultiplier:function(t,e,i){this.clone((function(r){i(r.toDataURLWithMultiplier(t,e))}))},clone:function(t,e){var i=JSON.stringify(this.toJSON(e));this.cloneWithoutData((function(e){e.loadFromJSON(i,(function(){t&&t(e)}))}))},cloneWithoutData:function(t){var e=T.util.createCanvasElement();e.width=this.width,e.height=this.height;var i=new T.Canvas(e);this.backgroundImage?(i.setBackgroundImage(this.backgroundImage.src,(function(){i.renderAll(),t&&t(i)})),i.backgroundImageOpacity=this.backgroundImageOpacity,i.backgroundImageStretch=this.backgroundImageStretch):t&&t(i)}}),C=T.util.degreesToRadians,w=T.util.radiansToDegrees,T.util.object.extend(T.Canvas.prototype,{__onTransformGesture:function(t,e){if(!this.isDrawingMode&&t.touches&&2===t.touches.length&&"gesture"===e.gesture){var i=this.findTarget(t);void 0!==i&&(this.__gesturesParams={e:t,self:e,target:i},this.__gesturesRenderer()),this.fire("touch:gesture",{target:i,e:t,self:e})}},__gesturesParams:null,__gesturesRenderer:function(){if(null!==this.__gesturesParams&&null!==this._currentTransform){var t=this.__gesturesParams.self,e=this._currentTransform,i=this.__gesturesParams.e;e.action="scale",e.originX=e.originY="center",this._scaleObjectBy(t.scale,i),0!==t.rotation&&(e.action="rotate",this._rotateObjectByAngle(t.rotation,i)),this.requestRenderAll(),e.action="drag"}},__onDrag:function(t,e){this.fire("touch:drag",{e:t,self:e})},__onOrientationChange:function(t,e){this.fire("touch:orientation",{e:t,self:e})},__onShake:function(t,e){this.fire("touch:shake",{e:t,self:e})},__onLongPress:function(t,e){this.fire("touch:longpress",{e:t,self:e})},_scaleObjectBy:function(t,e){var i=this._currentTransform,r=i.target;return i.gestureScale=t,r._scaling=!0,T.controlsUtils.scalingEqually(e,i,0,0)},_rotateObjectByAngle:function(t,e){var i=this._currentTransform;i.target.get("lockRotation")||(i.target.rotate(w(C(t)+i.theta)),this._fire("rotating",{target:i.target,e:e,transform:i}))}}),function(t){var e=t.fabric||(t.fabric={}),i=e.util.object.extend,r=e.util.object.clone,n=e.util.toFixed,o=e.util.string.capitalize,s=e.util.degreesToRadians,a=!e.isLikelyNode;e.Object||(e.Object=e.util.createClass(e.CommonMethods,{type:"object",originX:"left",originY:"top",top:0,left:0,width:0,height:0,scaleX:1,scaleY:1,flipX:!1,flipY:!1,opacity:1,angle:0,skewX:0,skewY:0,cornerSize:13,touchCornerSize:24,transparentCorners:!0,hoverCursor:null,moveCursor:null,padding:0,borderColor:"rgb(178,204,255)",borderDashArray:null,cornerColor:"rgb(178,204,255)",cornerStrokeColor:null,cornerStyle:"rect",cornerDashArray:null,centeredScaling:!1,centeredRotation:!0,fill:"rgb(0,0,0)",fillRule:"nonzero",globalCompositeOperation:"source-over",backgroundColor:"",selectionBackgroundColor:"",stroke:null,strokeWidth:1,strokeDashArray:null,strokeDashOffset:0,strokeLineCap:"butt",strokeLineJoin:"miter",strokeMiterLimit:4,shadow:null,borderOpacityWhenMoving:.4,borderScaleFactor:1,minScaleLimit:0,selectable:!0,evented:!0,visible:!0,hasControls:!0,hasBorders:!0,perPixelTargetFind:!1,includeDefaultValues:!0,lockMovementX:!1,lockMovementY:!1,lockRotation:!1,lockScalingX:!1,lockScalingY:!1,lockSkewingX:!1,lockSkewingY:!1,lockScalingFlip:!1,excludeFromExport:!1,objectCaching:a,statefullCache:!1,noScaleCache:!0,strokeUniform:!1,dirty:!0,__corner:0,paintFirst:"fill",activeOn:"down",stateProperties:"top left width height scaleX scaleY flipX flipY originX originY transformMatrix stroke strokeWidth strokeDashArray strokeLineCap strokeDashOffset strokeLineJoin strokeMiterLimit angle opacity fill globalCompositeOperation shadow visible backgroundColor skewX skewY fillRule paintFirst clipPath strokeUniform".split(" "),cacheProperties:"fill stroke strokeWidth strokeDashArray width height paintFirst strokeUniform strokeLineCap strokeDashOffset strokeLineJoin strokeMiterLimit backgroundColor clipPath".split(" "),colorProperties:"fill stroke backgroundColor".split(" "),clipPath:void 0,inverted:!1,absolutePositioned:!1,initialize:function(t){t&&this.setOptions(t)},_createCacheCanvas:function(){this._cacheProperties={},this._cacheCanvas=e.util.createCanvasElement(),this._cacheContext=this._cacheCanvas.getContext("2d"),this._updateCacheCanvas(),this.dirty=!0},_limitCacheSize:function(t){var i=e.perfLimitSizeTotal,r=t.width,n=t.height,o=e.maxCacheSideLimit,s=e.minCacheSideLimit;if(r<=o&&n<=o&&r*n<=i)return rl&&(t.zoomX/=r/l,t.width=l,t.capped=!0),n>u&&(t.zoomY/=n/u,t.height=u,t.capped=!0),t},_getCacheCanvasDimensions:function(){var t=this.getTotalObjectScaling(),e=this._getTransformedDimensions(0,0),i=e.x*t.scaleX/this.scaleX,r=e.y*t.scaleY/this.scaleY;return{width:i+2,height:r+2,zoomX:t.scaleX,zoomY:t.scaleY,x:i,y:r}},_updateCacheCanvas:function(){var t=this.canvas;if(this.noScaleCache&&t&&t._currentTransform){var i=t._currentTransform.target,r=t._currentTransform.action;if(this===i&&r.slice&&"scale"===r.slice(0,5))return!1}var n,o,s=this._cacheCanvas,a=this._limitCacheSize(this._getCacheCanvasDimensions()),h=e.minCacheSideLimit,c=a.width,l=a.height,u=a.zoomX,f=a.zoomY,d=c!==this.cacheWidth||l!==this.cacheHeight,g=this.zoomX!==u||this.zoomY!==f,p=d||g,v=0,m=0,y=!1;if(d){var _=this._cacheCanvas.width,b=this._cacheCanvas.height,x=c>_||l>b;y=x||(c<.9*_||l<.9*b)&&_>h&&b>h,x&&!a.capped&&(c>h||l>h)&&(v=.1*c,m=.1*l)}return this instanceof e.Text&&this.path&&(p=!0,y=!0,v+=this.getHeightOfLine(0)*this.zoomX,m+=this.getHeightOfLine(0)*this.zoomY),!!p&&(y?(s.width=Math.ceil(c+v),s.height=Math.ceil(l+m)):(this._cacheContext.setTransform(1,0,0,1,0,0),this._cacheContext.clearRect(0,0,s.width,s.height)),n=a.x/2,o=a.y/2,this.cacheTranslationX=Math.round(s.width/2-n)+n,this.cacheTranslationY=Math.round(s.height/2-o)+o,this.cacheWidth=c,this.cacheHeight=l,this._cacheContext.translate(this.cacheTranslationX,this.cacheTranslationY),this._cacheContext.scale(u,f),this.zoomX=u,this.zoomY=f,!0)},setOptions:function(t){this._setOptions(t),this._initGradient(t.fill,"fill"),this._initGradient(t.stroke,"stroke"),this._initPattern(t.fill,"fill"),this._initPattern(t.stroke,"stroke")},transform:function(t){var e=this.group&&!this.group._transformDone||this.group&&this.canvas&&t===this.canvas.contextTop,i=this.calcTransformMatrix(!e);t.transform(i[0],i[1],i[2],i[3],i[4],i[5])},toObject:function(t){var i=e.Object.NUM_FRACTION_DIGITS,r={type:this.type,version:e.version,originX:this.originX,originY:this.originY,left:n(this.left,i),top:n(this.top,i),width:n(this.width,i),height:n(this.height,i),fill:this.fill&&this.fill.toObject?this.fill.toObject():this.fill,stroke:this.stroke&&this.stroke.toObject?this.stroke.toObject():this.stroke,strokeWidth:n(this.strokeWidth,i),strokeDashArray:this.strokeDashArray?this.strokeDashArray.concat():this.strokeDashArray,strokeLineCap:this.strokeLineCap,strokeDashOffset:this.strokeDashOffset,strokeLineJoin:this.strokeLineJoin,strokeUniform:this.strokeUniform,strokeMiterLimit:n(this.strokeMiterLimit,i),scaleX:n(this.scaleX,i),scaleY:n(this.scaleY,i),angle:n(this.angle,i),flipX:this.flipX,flipY:this.flipY,opacity:n(this.opacity,i),shadow:this.shadow&&this.shadow.toObject?this.shadow.toObject():this.shadow,visible:this.visible,backgroundColor:this.backgroundColor,fillRule:this.fillRule,paintFirst:this.paintFirst,globalCompositeOperation:this.globalCompositeOperation,skewX:n(this.skewX,i),skewY:n(this.skewY,i)};return this.clipPath&&!this.clipPath.excludeFromExport&&(r.clipPath=this.clipPath.toObject(t),r.clipPath.inverted=this.clipPath.inverted,r.clipPath.absolutePositioned=this.clipPath.absolutePositioned),e.util.populateWithProperties(this,r,t),this.includeDefaultValues||(r=this._removeDefaultValues(r)),r},toDatalessObject:function(t){return this.toObject(t)},_removeDefaultValues:function(t){var i=e.util.getKlass(t.type).prototype;return i.stateProperties.forEach((function(e){"left"!==e&&"top"!==e&&(t[e]===i[e]&&delete t[e],"[object Array]"===Object.prototype.toString.call(t[e])&&"[object Array]"===Object.prototype.toString.call(i[e])&&0===t[e].length&&0===i[e].length&&delete t[e])})),t},toString:function(){return"#"},getObjectScaling:function(){if(!this.group)return{scaleX:this.scaleX,scaleY:this.scaleY};var t=e.util.qrDecompose(this.calcTransformMatrix());return{scaleX:Math.abs(t.scaleX),scaleY:Math.abs(t.scaleY)}},getTotalObjectScaling:function(){var t=this.getObjectScaling(),e=t.scaleX,i=t.scaleY;if(this.canvas){var r=this.canvas.getZoom(),n=this.canvas.getRetinaScaling();e*=r*n,i*=r*n}return{scaleX:e,scaleY:i}},getObjectOpacity:function(){var t=this.opacity;return this.group&&(t*=this.group.getObjectOpacity()),t},_set:function(t,i){var r="scaleX"===t||"scaleY"===t,n=this[t]!==i,o=!1;return r&&(i=this._constrainScale(i)),"scaleX"===t&&i<0?(this.flipX=!this.flipX,i*=-1):"scaleY"===t&&i<0?(this.flipY=!this.flipY,i*=-1):"shadow"!==t||!i||i instanceof e.Shadow?"dirty"===t&&this.group&&this.group.set("dirty",i):i=new e.Shadow(i),this[t]=i,n&&(o=this.group&&this.group.isOnACache(),this.cacheProperties.indexOf(t)>-1?(this.dirty=!0,o&&this.group.set("dirty",!0)):o&&this.stateProperties.indexOf(t)>-1&&this.group.set("dirty",!0)),this},setOnGroup:function(){},getViewportTransform:function(){return this.canvas&&this.canvas.viewportTransform?this.canvas.viewportTransform:e.iMatrix.concat()},isNotVisible:function(){return 0===this.opacity||!this.width&&!this.height&&0===this.strokeWidth||!this.visible},render:function(t){this.isNotVisible()||this.canvas&&this.canvas.skipOffscreen&&!this.group&&!this.isOnScreen()||(t.save(),this._setupCompositeOperation(t),this.drawSelectionBackground(t),this.transform(t),this._setOpacity(t),this._setShadow(t,this),this.shouldCache()?(this.renderCache(),this.drawCacheOnCanvas(t)):(this._removeCacheCanvas(),this.dirty=!1,this.drawObject(t),this.objectCaching&&this.statefullCache&&this.saveState({propertySet:"cacheProperties"})),t.restore())},renderCache:function(t){t=t||{},this._cacheCanvas||this._createCacheCanvas(),this.isCacheDirty()&&(this.statefullCache&&this.saveState({propertySet:"cacheProperties"}),this.drawObject(this._cacheContext,t.forClipping),this.dirty=!1)},_removeCacheCanvas:function(){this._cacheCanvas=null,this.cacheWidth=0,this.cacheHeight=0},hasStroke:function(){return this.stroke&&"transparent"!==this.stroke&&0!==this.strokeWidth},hasFill:function(){return this.fill&&"transparent"!==this.fill},needsItsOwnCache:function(){return!("stroke"!==this.paintFirst||!this.hasFill()||!this.hasStroke()||"object"!=typeof this.shadow)||!!this.clipPath},shouldCache:function(){return this.ownCaching=this.needsItsOwnCache()||this.objectCaching&&(!this.group||!this.group.isOnACache()),this.ownCaching},willDrawShadow:function(){return!!this.shadow&&(0!==this.shadow.offsetX||0!==this.shadow.offsetY)},drawClipPathOnCache:function(t){var i=this.clipPath;if(t.save(),i.inverted?t.globalCompositeOperation="destination-out":t.globalCompositeOperation="destination-in",i.absolutePositioned){var r=e.util.invertTransform(this.calcTransformMatrix());t.transform(r[0],r[1],r[2],r[3],r[4],r[5])}i.transform(t),t.scale(1/i.zoomX,1/i.zoomY),t.drawImage(i._cacheCanvas,-i.cacheTranslationX,-i.cacheTranslationY),t.restore()},drawObject:function(t,e){var i=this.fill,r=this.stroke;e?(this.fill="black",this.stroke="",this._setClippingProperties(t)):this._renderBackground(t),this._render(t),this._drawClipPath(t),this.fill=i,this.stroke=r},_drawClipPath:function(t){var e=this.clipPath;e&&(e.canvas=this.canvas,e.shouldCache(),e._transformDone=!0,e.renderCache({forClipping:!0}),this.drawClipPathOnCache(t))},drawCacheOnCanvas:function(t){t.scale(1/this.zoomX,1/this.zoomY),t.drawImage(this._cacheCanvas,-this.cacheTranslationX,-this.cacheTranslationY)},isCacheDirty:function(t){if(this.isNotVisible())return!1;if(this._cacheCanvas&&!t&&this._updateCacheCanvas())return!0;if(this.dirty||this.clipPath&&this.clipPath.absolutePositioned||this.statefullCache&&this.hasStateChanged("cacheProperties")){if(this._cacheCanvas&&!t){var e=this.cacheWidth/this.zoomX,i=this.cacheHeight/this.zoomY;this._cacheContext.clearRect(-e/2,-i/2,e,i)}return!0}return!1},_renderBackground:function(t){if(this.backgroundColor){var e=this._getNonTransformedDimensions();t.fillStyle=this.backgroundColor,t.fillRect(-e.x/2,-e.y/2,e.x,e.y),this._removeShadow(t)}},_setOpacity:function(t){this.group&&!this.group._transformDone?t.globalAlpha=this.getObjectOpacity():t.globalAlpha*=this.opacity},_setStrokeStyles:function(t,e){var i=e.stroke;i&&(t.lineWidth=e.strokeWidth,t.lineCap=e.strokeLineCap,t.lineDashOffset=e.strokeDashOffset,t.lineJoin=e.strokeLineJoin,t.miterLimit=e.strokeMiterLimit,i.toLive?"percentage"===i.gradientUnits||i.gradientTransform||i.patternTransform?this._applyPatternForTransformedGradient(t,i):(t.strokeStyle=i.toLive(t,this),this._applyPatternGradientTransform(t,i)):t.strokeStyle=e.stroke)},_setFillStyles:function(t,e){var i=e.fill;i&&(i.toLive?(t.fillStyle=i.toLive(t,this),this._applyPatternGradientTransform(t,e.fill)):t.fillStyle=i)},_setClippingProperties:function(t){t.globalAlpha=1,t.strokeStyle="transparent",t.fillStyle="#000000"},_setLineDash:function(t,e){e&&0!==e.length&&(1&e.length&&e.push.apply(e,e),t.setLineDash(e))},_renderControls:function(t,i){var r,n,o,a=this.getViewportTransform(),h=this.calcTransformMatrix();n=void 0!==(i=i||{}).hasBorders?i.hasBorders:this.hasBorders,o=void 0!==i.hasControls?i.hasControls:this.hasControls,h=e.util.multiplyTransformMatrices(a,h),r=e.util.qrDecompose(h),t.save(),t.translate(r.translateX,r.translateY),t.lineWidth=1*this.borderScaleFactor,this.group||(t.globalAlpha=this.isMoving?this.borderOpacityWhenMoving:1),t.rotate(s(r.angle)),i.forActiveSelection||this.group?n&&this.drawBordersInGroup(t,r,i):n&&this.drawBorders(t,i),o&&this.drawControls(t,i),t.restore()},_setShadow:function(t){if(this.shadow){var i,r=this.shadow,n=this.canvas,o=n&&n.viewportTransform[0]||1,s=n&&n.viewportTransform[3]||1;i=r.nonScaling?{scaleX:1,scaleY:1}:this.getObjectScaling(),n&&n._isRetinaScaling()&&(o*=e.devicePixelRatio,s*=e.devicePixelRatio),t.shadowColor=r.color,t.shadowBlur=r.blur*e.browserShadowBlurConstant*(o+s)*(i.scaleX+i.scaleY)/4,t.shadowOffsetX=r.offsetX*o*i.scaleX,t.shadowOffsetY=r.offsetY*s*i.scaleY}},_removeShadow:function(t){this.shadow&&(t.shadowColor="",t.shadowBlur=t.shadowOffsetX=t.shadowOffsetY=0)},_applyPatternGradientTransform:function(t,e){if(!e||!e.toLive)return{offsetX:0,offsetY:0};var i=e.gradientTransform||e.patternTransform,r=-this.width/2+e.offsetX||0,n=-this.height/2+e.offsetY||0;return"percentage"===e.gradientUnits?t.transform(this.width,0,0,this.height,r,n):t.transform(1,0,0,1,r,n),i&&t.transform(i[0],i[1],i[2],i[3],i[4],i[5]),{offsetX:r,offsetY:n}},_renderPaintInOrder:function(t){"stroke"===this.paintFirst?(this._renderStroke(t),this._renderFill(t)):(this._renderFill(t),this._renderStroke(t))},_render:function(){},_renderFill:function(t){this.fill&&(t.save(),this._setFillStyles(t,this),"evenodd"===this.fillRule?t.fill("evenodd"):t.fill(),t.restore())},_renderStroke:function(t){if(this.stroke&&0!==this.strokeWidth){if(this.shadow&&!this.shadow.affectStroke&&this._removeShadow(t),t.save(),this.strokeUniform&&this.group){var e=this.getObjectScaling();t.scale(1/e.scaleX,1/e.scaleY)}else this.strokeUniform&&t.scale(1/this.scaleX,1/this.scaleY);this._setLineDash(t,this.strokeDashArray),this._setStrokeStyles(t,this),t.stroke(),t.restore()}},_applyPatternForTransformedGradient:function(t,i){var r,n=this._limitCacheSize(this._getCacheCanvasDimensions()),o=e.util.createCanvasElement(),s=this.canvas.getRetinaScaling(),a=n.x/this.scaleX/s,h=n.y/this.scaleY/s;o.width=a,o.height=h,(r=o.getContext("2d")).beginPath(),r.moveTo(0,0),r.lineTo(a,0),r.lineTo(a,h),r.lineTo(0,h),r.closePath(),r.translate(a/2,h/2),r.scale(n.zoomX/this.scaleX/s,n.zoomY/this.scaleY/s),this._applyPatternGradientTransform(r,i),r.fillStyle=i.toLive(t),r.fill(),t.translate(-this.width/2-this.strokeWidth/2,-this.height/2-this.strokeWidth/2),t.scale(s*this.scaleX/n.zoomX,s*this.scaleY/n.zoomY),t.strokeStyle=r.createPattern(o,"no-repeat")},_findCenterFromElement:function(){return{x:this.left+this.width/2,y:this.top+this.height/2}},_assignTransformMatrixProps:function(){if(this.transformMatrix){var t=e.util.qrDecompose(this.transformMatrix);this.flipX=!1,this.flipY=!1,this.set("scaleX",t.scaleX),this.set("scaleY",t.scaleY),this.angle=t.angle,this.skewX=t.skewX,this.skewY=0}},_removeTransformMatrix:function(t){var i=this._findCenterFromElement();this.transformMatrix&&(this._assignTransformMatrixProps(),i=e.util.transformPoint(i,this.transformMatrix)),this.transformMatrix=null,t&&(this.scaleX*=t.scaleX,this.scaleY*=t.scaleY,this.cropX=t.cropX,this.cropY=t.cropY,i.x+=t.offsetLeft,i.y+=t.offsetTop,this.width=t.width,this.height=t.height),this.setPositionByOrigin(i,"center","center")},clone:function(t,i){var r=this.toObject(i);this.constructor.fromObject?this.constructor.fromObject(r,t):e.Object._fromObject("Object",r,t)},cloneAsImage:function(t,i){var r=this.toCanvasElement(i);return t&&t(new e.Image(r)),this},toCanvasElement:function(t){t||(t={});var i=e.util,r=i.saveObjectTransform(this),n=this.group,o=this.shadow,s=Math.abs,a=(t.multiplier||1)*(t.enableRetinaScaling?e.devicePixelRatio:1);delete this.group,t.withoutTransform&&i.resetObjectTransform(this),t.withoutShadow&&(this.shadow=null);var h,c,l,u,f=e.util.createCanvasElement(),d=this.getBoundingRect(!0,!0),g=this.shadow,p={x:0,y:0};g&&(c=g.blur,h=g.nonScaling?{scaleX:1,scaleY:1}:this.getObjectScaling(),p.x=2*Math.round(s(g.offsetX)+c)*s(h.scaleX),p.y=2*Math.round(s(g.offsetY)+c)*s(h.scaleY)),l=d.width+p.x,u=d.height+p.y,f.width=Math.ceil(l),f.height=Math.ceil(u);var v=new e.StaticCanvas(f,{enableRetinaScaling:!1,renderOnAddRemove:!1,skipOffscreen:!1});"jpeg"===t.format&&(v.backgroundColor="#fff"),this.setPositionByOrigin(new e.Point(v.width/2,v.height/2),"center","center");var m=this.canvas;v.add(this);var y=v.toCanvasElement(a||1,t);return this.shadow=o,this.set("canvas",m),n&&(this.group=n),this.set(r).setCoords(),v._objects=[],v.dispose(),v=null,y},toDataURL:function(t){return t||(t={}),e.util.toDataURL(this.toCanvasElement(t),t.format||"png",t.quality||1)},isType:function(t){return this.type===t},complexity:function(){return 1},toJSON:function(t){return this.toObject(t)},rotate:function(t){var e=("center"!==this.originX||"center"!==this.originY)&&this.centeredRotation;return e&&this._setOriginToCenter(),this.set("angle",t),e&&this._resetOrigin(),this},centerH:function(){return this.canvas&&this.canvas.centerObjectH(this),this},viewportCenterH:function(){return this.canvas&&this.canvas.viewportCenterObjectH(this),this},centerV:function(){return this.canvas&&this.canvas.centerObjectV(this),this},viewportCenterV:function(){return this.canvas&&this.canvas.viewportCenterObjectV(this),this},center:function(){return this.canvas&&this.canvas.centerObject(this),this},viewportCenter:function(){return this.canvas&&this.canvas.viewportCenterObject(this),this},getLocalPointer:function(t,i){i=i||this.canvas.getPointer(t);var r=new e.Point(i.x,i.y),n=this._getLeftTopCoords();return this.angle&&(r=e.util.rotatePoint(r,n,s(-this.angle))),{x:r.x-n.x,y:r.y-n.y}},_setupCompositeOperation:function(t){this.globalCompositeOperation&&(t.globalCompositeOperation=this.globalCompositeOperation)}}),e.util.createAccessors&&e.util.createAccessors(e.Object),i(e.Object.prototype,e.Observable),e.Object.NUM_FRACTION_DIGITS=2,e.Object._fromObject=function(t,i,n,o){var s=e[t];i=r(i,!0),e.util.enlivenPatterns([i.fill,i.stroke],(function(t){void 0!==t[0]&&(i.fill=t[0]),void 0!==t[1]&&(i.stroke=t[1]),e.util.enlivenObjects([i.clipPath],(function(t){i.clipPath=t[0];var e=o?new s(i[o],i):new s(i);n&&n(e)}))}))},e.Object.__uid=0)}(t),function(){var t=T.util.degreesToRadians,e={left:-.5,center:0,right:.5},i={top:-.5,center:0,bottom:.5};T.util.object.extend(T.Object.prototype,{translateToGivenOrigin:function(t,r,n,o,s){var a,h,c,l=t.x,u=t.y;return"string"==typeof r?r=e[r]:r-=.5,"string"==typeof o?o=e[o]:o-=.5,"string"==typeof n?n=i[n]:n-=.5,"string"==typeof s?s=i[s]:s-=.5,h=s-n,((a=o-r)||h)&&(c=this._getTransformedDimensions(),l=t.x+a*c.x,u=t.y+h*c.y),new T.Point(l,u)},translateToCenterPoint:function(e,i,r){var n=this.translateToGivenOrigin(e,i,r,"center","center");return this.angle?T.util.rotatePoint(n,e,t(this.angle)):n},translateToOriginPoint:function(e,i,r){var n=this.translateToGivenOrigin(e,"center","center",i,r);return this.angle?T.util.rotatePoint(n,e,t(this.angle)):n},getCenterPoint:function(){var t=new T.Point(this.left,this.top);return this.translateToCenterPoint(t,this.originX,this.originY)},getPointByOrigin:function(t,e){var i=this.getCenterPoint();return this.translateToOriginPoint(i,t,e)},toLocalPoint:function(e,i,r){var n,o,s=this.getCenterPoint();return n=void 0!==i&&void 0!==r?this.translateToGivenOrigin(s,"center","center",i,r):new T.Point(this.left,this.top),o=new T.Point(e.x,e.y),this.angle&&(o=T.util.rotatePoint(o,s,-t(this.angle))),o.subtractEquals(n)},setPositionByOrigin:function(t,e,i){var r=this.translateToCenterPoint(t,e,i),n=this.translateToOriginPoint(r,this.originX,this.originY);this.set("left",n.x),this.set("top",n.y)},adjustPosition:function(i){var r,n,o=t(this.angle),s=this.getScaledWidth(),a=T.util.cos(o)*s,h=T.util.sin(o)*s;r="string"==typeof this.originX?e[this.originX]:this.originX-.5,n="string"==typeof i?e[i]:i-.5,this.left+=a*(n-r),this.top+=h*(n-r),this.setCoords(),this.originX=i},_setOriginToCenter:function(){this._originalOriginX=this.originX,this._originalOriginY=this.originY;var t=this.getCenterPoint();this.originX="center",this.originY="center",this.left=t.x,this.top=t.y},_resetOrigin:function(){var t=this.translateToOriginPoint(this.getCenterPoint(),this._originalOriginX,this._originalOriginY);this.originX=this._originalOriginX,this.originY=this._originalOriginY,this.left=t.x,this.top=t.y,this._originalOriginX=null,this._originalOriginY=null},_getLeftTopCoords:function(){return this.translateToOriginPoint(this.getCenterPoint(),"left","top")}})}(),function(){var t=T.util,e=t.degreesToRadians,i=t.multiplyTransformMatrices,r=t.transformPoint;t.object.extend(T.Object.prototype,{oCoords:null,aCoords:null,lineCoords:null,ownMatrixCache:null,matrixCache:null,controls:{},_getCoords:function(t,e){return e?t?this.calcACoords():this.calcLineCoords():(this.aCoords&&this.lineCoords||this.setCoords(!0),t?this.aCoords:this.lineCoords)},getCoords:function(t,e){return i=this._getCoords(t,e),[new T.Point(i.tl.x,i.tl.y),new T.Point(i.tr.x,i.tr.y),new T.Point(i.br.x,i.br.y),new T.Point(i.bl.x,i.bl.y)];var i},intersectsWithRect:function(t,e,i,r){var n=this.getCoords(i,r);return"Intersection"===T.Intersection.intersectPolygonRectangle(n,t,e).status},intersectsWithObject:function(t,e,i){return"Intersection"===T.Intersection.intersectPolygonPolygon(this.getCoords(e,i),t.getCoords(e,i)).status||t.isContainedWithinObject(this,e,i)||this.isContainedWithinObject(t,e,i)},isContainedWithinObject:function(t,e,i){for(var r=this.getCoords(e,i),n=e?t.aCoords:t.lineCoords,o=0,s=t._getImageLines(n);o<4;o++)if(!t.containsPoint(r[o],s))return!1;return!0},isContainedWithinRect:function(t,e,i,r){var n=this.getBoundingRect(i,r);return n.left>=t.x&&n.left+n.width<=e.x&&n.top>=t.y&&n.top+n.height<=e.y},containsPoint:function(t,e,i,r){var n=this._getCoords(i,r),o=(e=e||this._getImageLines(n),this._findCrossPoints(t,e));return 0!==o&&o%2==1},isOnScreen:function(t){if(!this.canvas)return!1;var e=this.canvas.vptCoords.tl,i=this.canvas.vptCoords.br;return!!this.getCoords(!0,t).some((function(t){return t.x<=i.x&&t.x>=e.x&&t.y<=i.y&&t.y>=e.y}))||(!!this.intersectsWithRect(e,i,!0,t)||this._containsCenterOfCanvas(e,i,t))},_containsCenterOfCanvas:function(t,e,i){var r={x:(t.x+e.x)/2,y:(t.y+e.y)/2};return!!this.containsPoint(r,null,!0,i)},isPartiallyOnScreen:function(t){if(!this.canvas)return!1;var e=this.canvas.vptCoords.tl,i=this.canvas.vptCoords.br;return!!this.intersectsWithRect(e,i,!0,t)||this.getCoords(!0,t).every((function(t){return(t.x>=i.x||t.x<=e.x)&&(t.y>=i.y||t.y<=e.y)}))&&this._containsCenterOfCanvas(e,i,t)},_getImageLines:function(t){return{topline:{o:t.tl,d:t.tr},rightline:{o:t.tr,d:t.br},bottomline:{o:t.br,d:t.bl},leftline:{o:t.bl,d:t.tl}}},_findCrossPoints:function(t,e){var i,r,n,o=0;for(var s in e)if(!((n=e[s]).o.y=t.y&&n.d.y>=t.y||(n.o.x===n.d.x&&n.o.x>=t.x?r=n.o.x:(0,i=(n.d.y-n.o.y)/(n.d.x-n.o.x),r=-(t.y-0*t.x-(n.o.y-i*n.o.x))/(0-i)),r>=t.x&&(o+=1),2!==o)))break;return o},getBoundingRect:function(e,i){var r=this.getCoords(e,i);return t.makeBoundingBoxFromPoints(r)},getScaledWidth:function(){return this._getTransformedDimensions().x},getScaledHeight:function(){return this._getTransformedDimensions().y},_constrainScale:function(t){return Math.abs(t)\n')}},toSVG:function(t){return this._createBaseSVGMarkup(this._toSVG(t),{reviver:t})},toClipPathSVG:function(t){return"\t"+this._createBaseClipPathSVGMarkup(this._toSVG(t),{reviver:t})},_createBaseClipPathSVGMarkup:function(t,e){var i=(e=e||{}).reviver,r=e.additionalTransform||"",n=[this.getSvgTransform(!0,r),this.getSvgCommons()].join(""),o=t.indexOf("COMMON_PARTS");return t[o]=n,i?i(t.join("")):t.join("")},_createBaseSVGMarkup:function(t,e){var i,r,n=(e=e||{}).noStyle,o=e.reviver,s=n?"":'style="'+this.getSvgStyles()+'" ',a=e.withShadow?'style="'+this.getSvgFilter()+'" ':"",h=this.clipPath,c=this.strokeUniform?'vector-effect="non-scaling-stroke" ':"",l=h&&h.absolutePositioned,u=this.stroke,f=this.fill,d=this.shadow,g=[],p=t.indexOf("COMMON_PARTS"),v=e.additionalTransform;return h&&(h.clipPathId="CLIPPATH_"+T.Object.__uid++,r='\n'+h.toClipPathSVG(o)+"\n"),l&&g.push("\n"),g.push("\n"),i=[s,c,n?"":this.addPaintOrder()," ",v?'transform="'+v+'" ':""].join(""),t[p]=i,f&&f.toLive&&g.push(f.toSVG(this)),u&&u.toLive&&g.push(u.toSVG(this)),d&&g.push(d.toSVG(this)),h&&g.push(r),g.push(t.join("")),g.push("\n"),l&&g.push("\n"),o?o(g.join("")):g.join("")},addPaintOrder:function(){return"fill"!==this.paintFirst?' paint-order="'+this.paintFirst+'" ':""}})}(),function(){var t=T.util.object.extend,e="stateProperties";function i(e,i,r){var n={};r.forEach((function(t){n[t]=e[t]})),t(e[i],n,!0)}function r(t,e,i){if(t===e)return!0;if(Array.isArray(t)){if(!Array.isArray(e)||t.length!==e.length)return!1;for(var n=0,o=t.length;n=0;h--)if(n=a[h],this.isControlVisible(n)&&(r=this._getImageLines(e?this.oCoords[n].touchCorner:this.oCoords[n].corner),0!==(i=this._findCrossPoints({x:o,y:s},r))&&i%2==1))return this.__corner=n,n;return!1},forEachControl:function(t){for(var e in this.controls)t(this.controls[e],e,this)},_setCornerCoords:function(){var t=this.oCoords;for(var e in t){var i=this.controls[e];t[e].corner=i.calcCornerCoords(this.angle,this.cornerSize,t[e].x,t[e].y,!1),t[e].touchCorner=i.calcCornerCoords(this.angle,this.touchCornerSize,t[e].x,t[e].y,!0)}},drawSelectionBackground:function(e){if(!this.selectionBackgroundColor||this.canvas&&!this.canvas.interactive||this.canvas&&this.canvas._activeObject!==this)return this;e.save();var i=this.getCenterPoint(),r=this._calculateCurrentDimensions(),n=this.canvas.viewportTransform;return e.translate(i.x,i.y),e.scale(1/n[0],1/n[3]),e.rotate(t(this.angle)),e.fillStyle=this.selectionBackgroundColor,e.fillRect(-r.x/2,-r.y/2,r.x,r.y),e.restore(),this},drawBorders:function(t,e){e=e||{};var i=this._calculateCurrentDimensions(),r=this.borderScaleFactor,n=i.x+r,o=i.y+r,s=void 0!==e.hasControls?e.hasControls:this.hasControls,a=!1;return t.save(),t.strokeStyle=e.borderColor||this.borderColor,this._setLineDash(t,e.borderDashArray||this.borderDashArray),t.strokeRect(-n/2,-o/2,n,o),s&&(t.beginPath(),this.forEachControl((function(e,i,r){e.withConnection&&e.getVisibility(r,i)&&(a=!0,t.moveTo(e.x*n,e.y*o),t.lineTo(e.x*n+e.offsetX,e.y*o+e.offsetY))})),a&&t.stroke()),t.restore(),this},drawBordersInGroup:function(t,e,i){i=i||{};var r=T.util.sizeAfterTransform(this.width,this.height,e),n=this.strokeWidth,o=this.strokeUniform,s=this.borderScaleFactor,a=r.x+n*(o?this.canvas.getZoom():e.scaleX)+s,h=r.y+n*(o?this.canvas.getZoom():e.scaleY)+s;return t.save(),this._setLineDash(t,i.borderDashArray||this.borderDashArray),t.strokeStyle=i.borderColor||this.borderColor,t.strokeRect(-a/2,-h/2,a,h),t.restore(),this},drawControls:function(t,e){e=e||{},t.save();var i,r,n=this.canvas.getRetinaScaling();return t.setTransform(n,0,0,n,0,0),t.strokeStyle=t.fillStyle=e.cornerColor||this.cornerColor,this.transparentCorners||(t.strokeStyle=e.cornerStrokeColor||this.cornerStrokeColor),this._setLineDash(t,e.cornerDashArray||this.cornerDashArray),this.setCoords(),this.group&&(i=this.group.calcTransformMatrix()),this.forEachControl((function(n,o,s){r=s.oCoords[o],n.getVisibility(s,o)&&(i&&(r=T.util.transformPoint(r,i)),n.render(t,r.x,r.y,e,s))})),t.restore(),this},isControlVisible:function(t){return this.controls[t]&&this.controls[t].getVisibility(this,t)},setControlVisible:function(t,e){return this._controlsVisibility||(this._controlsVisibility={}),this._controlsVisibility[t]=e,this},setControlsVisibility:function(t){for(var e in t||(t={}),t)this.setControlVisible(e,t[e]);return this},onDeselect:function(){},onSelect:function(){}})}(),T.util.object.extend(T.StaticCanvas.prototype,{FX_DURATION:500,fxCenterObjectH:function(t,e){var i=function(){},r=(e=e||{}).onComplete||i,n=e.onChange||i,o=this;return T.util.animate({startValue:t.left,endValue:this.getCenter().left,duration:this.FX_DURATION,onChange:function(e){t.set("left",e),o.requestRenderAll(),n()},onComplete:function(){t.setCoords(),r()}}),this},fxCenterObjectV:function(t,e){var i=function(){},r=(e=e||{}).onComplete||i,n=e.onChange||i,o=this;return T.util.animate({startValue:t.top,endValue:this.getCenter().top,duration:this.FX_DURATION,onChange:function(e){t.set("top",e),o.requestRenderAll(),n()},onComplete:function(){t.setCoords(),r()}}),this},fxRemove:function(t,e){var i=function(){},r=(e=e||{}).onComplete||i,n=e.onChange||i,o=this;return T.util.animate({startValue:t.opacity,endValue:0,duration:this.FX_DURATION,onChange:function(e){t.set("opacity",e),o.requestRenderAll(),n()},onComplete:function(){o.remove(t),r()}}),this}}),T.util.object.extend(T.Object.prototype,{animate:function(){if(arguments[0]&&"object"==typeof arguments[0]){var t,e,i=[];for(t in arguments[0])i.push(t);for(var r=0,n=i.length;r-1||n&&o.colorProperties.indexOf(n[1])>-1,a=n?this.get(n[0])[n[1]]:this.get(t);"from"in i||(i.from=a),s||(e=~e.indexOf("=")?a+parseFloat(e.replace("=","")):parseFloat(e));var h={startValue:i.from,endValue:e,byValue:i.by,easing:i.easing,duration:i.duration,abort:i.abort&&function(t,e,r){return i.abort.call(o,t,e,r)},onChange:function(e,s,a){n?o[n[0]][n[1]]=e:o.set(t,e),r||i.onChange&&i.onChange(e,s,a)},onComplete:function(t,e,n){r||(o.setCoords(),i.onComplete&&i.onComplete(t,e,n))}};return s?T.util.animateColor(h.startValue,h.endValue,h.duration,h):T.util.animate(h)}}),function(t){var e=t.fabric||(t.fabric={}),i=e.util.object.extend,r=e.util.object.clone,n={x1:1,x2:1,y1:1,y2:1};function o(t,e){var i=t.origin,r=t.axis1,n=t.axis2,o=t.dimension,s=e.nearest,a=e.center,h=e.farthest;return function(){switch(this.get(i)){case s:return Math.min(this.get(r),this.get(n));case a:return Math.min(this.get(r),this.get(n))+.5*this.get(o);case h:return Math.max(this.get(r),this.get(n))}}}e.Line?e.warn("fabric.Line is already defined"):(e.Line=e.util.createClass(e.Object,{type:"line",x1:0,y1:0,x2:0,y2:0,cacheProperties:e.Object.prototype.cacheProperties.concat("x1","x2","y1","y2"),initialize:function(t,e){t||(t=[0,0,0,0]),this.callSuper("initialize",e),this.set("x1",t[0]),this.set("y1",t[1]),this.set("x2",t[2]),this.set("y2",t[3]),this._setWidthHeight(e)},_setWidthHeight:function(t){t||(t={}),this.width=Math.abs(this.x2-this.x1),this.height=Math.abs(this.y2-this.y1),this.left="left"in t?t.left:this._getLeftToOriginX(),this.top="top"in t?t.top:this._getTopToOriginY()},_set:function(t,e){return this.callSuper("_set",t,e),void 0!==n[t]&&this._setWidthHeight(),this},_getLeftToOriginX:o({origin:"originX",axis1:"x1",axis2:"x2",dimension:"width"},{nearest:"left",center:"center",farthest:"right"}),_getTopToOriginY:o({origin:"originY",axis1:"y1",axis2:"y2",dimension:"height"},{nearest:"top",center:"center",farthest:"bottom"}),_render:function(t){t.beginPath();var e=this.calcLinePoints();t.moveTo(e.x1,e.y1),t.lineTo(e.x2,e.y2),t.lineWidth=this.strokeWidth;var i=t.strokeStyle;t.strokeStyle=this.stroke||t.fillStyle,this.stroke&&this._renderStroke(t),t.strokeStyle=i},_findCenterFromElement:function(){return{x:(this.x1+this.x2)/2,y:(this.y1+this.y2)/2}},toObject:function(t){return i(this.callSuper("toObject",t),this.calcLinePoints())},_getNonTransformedDimensions:function(){var t=this.callSuper("_getNonTransformedDimensions");return"butt"===this.strokeLineCap&&(0===this.width&&(t.y-=this.strokeWidth),0===this.height&&(t.x-=this.strokeWidth)),t},calcLinePoints:function(){var t=this.x1<=this.x2?-1:1,e=this.y1<=this.y2?-1:1,i=t*this.width*.5,r=e*this.height*.5;return{x1:i,x2:t*this.width*-.5,y1:r,y2:e*this.height*-.5}},_toSVG:function(){var t=this.calcLinePoints();return["\n']}}),e.Line.ATTRIBUTE_NAMES=e.SHARED_ATTRIBUTES.concat("x1 y1 x2 y2".split(" ")),e.Line.fromElement=function(t,r,n){n=n||{};var o=e.parseAttributes(t,e.Line.ATTRIBUTE_NAMES),s=[o.x1||0,o.y1||0,o.x2||0,o.y2||0];r(new e.Line(s,i(o,n)))},e.Line.fromObject=function(t,i){var n=r(t,!0);n.points=[t.x1,t.y1,t.x2,t.y2],e.Object._fromObject("Line",n,(function(t){delete t.points,i&&i(t)}),"points")})}(t),function(t){var e=t.fabric||(t.fabric={}),i=Math.PI;e.Circle?e.warn("fabric.Circle is already defined."):(e.Circle=e.util.createClass(e.Object,{type:"circle",radius:0,startAngle:0,endAngle:2*i,cacheProperties:e.Object.prototype.cacheProperties.concat("radius","startAngle","endAngle"),_set:function(t,e){return this.callSuper("_set",t,e),"radius"===t&&this.setRadius(e),this},toObject:function(t){return this.callSuper("toObject",["radius","startAngle","endAngle"].concat(t))},_toSVG:function(){var t,r=(this.endAngle-this.startAngle)%(2*i);if(0===r)t=["\n'];else{var n=e.util.cos(this.startAngle)*this.radius,o=e.util.sin(this.startAngle)*this.radius,s=e.util.cos(this.endAngle)*this.radius,a=e.util.sin(this.endAngle)*this.radius,h=r>i?"1":"0";t=['\n"]}return t},_render:function(t){t.beginPath(),t.arc(0,0,this.radius,this.startAngle,this.endAngle,!1),this._renderPaintInOrder(t)},getRadiusX:function(){return this.get("radius")*this.get("scaleX")},getRadiusY:function(){return this.get("radius")*this.get("scaleY")},setRadius:function(t){return this.radius=t,this.set("width",2*t).set("height",2*t)}}),e.Circle.ATTRIBUTE_NAMES=e.SHARED_ATTRIBUTES.concat("cx cy r".split(" ")),e.Circle.fromElement=function(t,i){var r,n=e.parseAttributes(t,e.Circle.ATTRIBUTE_NAMES);if(!("radius"in(r=n)&&r.radius>=0))throw new Error("value of `r` attribute is required and can not be negative");n.left=(n.left||0)-n.radius,n.top=(n.top||0)-n.radius,i(new e.Circle(n))},e.Circle.fromObject=function(t,i){e.Object._fromObject("Circle",t,i)})}(t),function(t){var e=t.fabric||(t.fabric={});e.Triangle?e.warn("fabric.Triangle is already defined"):(e.Triangle=e.util.createClass(e.Object,{type:"triangle",width:100,height:100,_render:function(t){var e=this.width/2,i=this.height/2;t.beginPath(),t.moveTo(-e,i),t.lineTo(0,-i),t.lineTo(e,i),t.closePath(),this._renderPaintInOrder(t)},_toSVG:function(){var t=this.width/2,e=this.height/2;return["']}}),e.Triangle.fromObject=function(t,i){return e.Object._fromObject("Triangle",t,i)})}(t),function(t){var e=t.fabric||(t.fabric={}),i=2*Math.PI;e.Ellipse?e.warn("fabric.Ellipse is already defined."):(e.Ellipse=e.util.createClass(e.Object,{type:"ellipse",rx:0,ry:0,cacheProperties:e.Object.prototype.cacheProperties.concat("rx","ry"),initialize:function(t){this.callSuper("initialize",t),this.set("rx",t&&t.rx||0),this.set("ry",t&&t.ry||0)},_set:function(t,e){switch(this.callSuper("_set",t,e),t){case"rx":this.rx=e,this.set("width",2*e);break;case"ry":this.ry=e,this.set("height",2*e)}return this},getRx:function(){return this.get("rx")*this.get("scaleX")},getRy:function(){return this.get("ry")*this.get("scaleY")},toObject:function(t){return this.callSuper("toObject",["rx","ry"].concat(t))},_toSVG:function(){return["\n']},_render:function(t){t.beginPath(),t.save(),t.transform(1,0,0,this.ry/this.rx,0,0),t.arc(0,0,this.rx,0,i,!1),t.restore(),this._renderPaintInOrder(t)}}),e.Ellipse.ATTRIBUTE_NAMES=e.SHARED_ATTRIBUTES.concat("cx cy rx ry".split(" ")),e.Ellipse.fromElement=function(t,i){var r=e.parseAttributes(t,e.Ellipse.ATTRIBUTE_NAMES);r.left=(r.left||0)-r.rx,r.top=(r.top||0)-r.ry,i(new e.Ellipse(r))},e.Ellipse.fromObject=function(t,i){e.Object._fromObject("Ellipse",t,i)})}(t),function(t){var e=t.fabric||(t.fabric={}),i=e.util.object.extend;e.Rect?e.warn("fabric.Rect is already defined"):(e.Rect=e.util.createClass(e.Object,{stateProperties:e.Object.prototype.stateProperties.concat("rx","ry"),type:"rect",rx:0,ry:0,cacheProperties:e.Object.prototype.cacheProperties.concat("rx","ry"),initialize:function(t){this.callSuper("initialize",t),this._initRxRy()},_initRxRy:function(){this.rx&&!this.ry?this.ry=this.rx:this.ry&&!this.rx&&(this.rx=this.ry)},_render:function(t){var e=this.rx?Math.min(this.rx,this.width/2):0,i=this.ry?Math.min(this.ry,this.height/2):0,r=this.width,n=this.height,o=-this.width/2,s=-this.height/2,a=0!==e||0!==i,h=.4477152502;t.beginPath(),t.moveTo(o+e,s),t.lineTo(o+r-e,s),a&&t.bezierCurveTo(o+r-h*e,s,o+r,s+h*i,o+r,s+i),t.lineTo(o+r,s+n-i),a&&t.bezierCurveTo(o+r,s+n-h*i,o+r-h*e,s+n,o+r-e,s+n),t.lineTo(o+e,s+n),a&&t.bezierCurveTo(o+h*e,s+n,o,s+n-h*i,o,s+n-i),t.lineTo(o,s+i),a&&t.bezierCurveTo(o,s+h*i,o+h*e,s,o+e,s),t.closePath(),this._renderPaintInOrder(t)},toObject:function(t){return this.callSuper("toObject",["rx","ry"].concat(t))},_toSVG:function(){return["\n']}}),e.Rect.ATTRIBUTE_NAMES=e.SHARED_ATTRIBUTES.concat("x y rx ry width height".split(" ")),e.Rect.fromElement=function(t,r,n){if(!t)return r(null);n=n||{};var o=e.parseAttributes(t,e.Rect.ATTRIBUTE_NAMES);o.left=o.left||0,o.top=o.top||0,o.height=o.height||0,o.width=o.width||0;var s=new e.Rect(i(n?e.util.object.clone(n):{},o));s.visible=s.visible&&s.width>0&&s.height>0,r(s)},e.Rect.fromObject=function(t,i){return e.Object._fromObject("Rect",t,i)})}(t),function(t){var e=t.fabric||(t.fabric={}),i=e.util.object.extend,r=e.util.array.min,n=e.util.array.max,o=e.util.toFixed;e.Polyline?e.warn("fabric.Polyline is already defined"):(e.Polyline=e.util.createClass(e.Object,{type:"polyline",points:null,cacheProperties:e.Object.prototype.cacheProperties.concat("points"),initialize:function(t,e){e=e||{},this.points=t||[],this.callSuper("initialize",e),this._setPositionDimensions(e)},_setPositionDimensions:function(t){var e,i=this._calcDimensions(t);this.width=i.width,this.height=i.height,t.fromSVG||(e=this.translateToGivenOrigin({x:i.left-this.strokeWidth/2,y:i.top-this.strokeWidth/2},"left","top",this.originX,this.originY)),void 0===t.left&&(this.left=t.fromSVG?i.left:e.x),void 0===t.top&&(this.top=t.fromSVG?i.top:e.y),this.pathOffset={x:i.left+this.width/2,y:i.top+this.height/2}},_calcDimensions:function(){var t=this.points,e=r(t,"x")||0,i=r(t,"y")||0;return{left:e,top:i,width:(n(t,"x")||0)-e,height:(n(t,"y")||0)-i}},toObject:function(t){return i(this.callSuper("toObject",t),{points:this.points.concat()})},_toSVG:function(){for(var t=[],i=this.pathOffset.x,r=this.pathOffset.y,n=e.Object.NUM_FRACTION_DIGITS,s=0,a=this.points.length;s\n']},commonRender:function(t){var e,i=this.points.length,r=this.pathOffset.x,n=this.pathOffset.y;if(!i||isNaN(this.points[i-1].y))return!1;t.beginPath(),t.moveTo(this.points[0].x-r,this.points[0].y-n);for(var o=0;o"},toObject:function(t){return n(this.callSuper("toObject",t),{path:this.path.map((function(t){return t.slice()}))})},toDatalessObject:function(t){var e=this.toObject(["sourcePath"].concat(t));return e.sourcePath&&delete e.path,e},_toSVG:function(){return["\n"]},_getOffsetTransform:function(){var t=e.Object.NUM_FRACTION_DIGITS;return" translate("+s(-this.pathOffset.x,t)+", "+s(-this.pathOffset.y,t)+")"},toClipPathSVG:function(t){var e=this._getOffsetTransform();return"\t"+this._createBaseClipPathSVGMarkup(this._toSVG(),{reviver:t,additionalTransform:e})},toSVG:function(t){var e=this._getOffsetTransform();return this._createBaseSVGMarkup(this._toSVG(),{reviver:t,additionalTransform:e})},complexity:function(){return this.path.length},_calcDimensions:function(){for(var t,n,o=[],s=[],a=0,h=0,c=0,l=0,u=0,f=this.path.length;u"},addWithUpdate:function(t){var i=!!this.group;return this._restoreObjectsState(),e.util.resetObjectTransform(this),t&&(i&&e.util.removeTransformFromObject(t,this.group.calcTransformMatrix()),this._objects.push(t),t.group=this,t._set("canvas",this.canvas)),this._calcBounds(),this._updateObjectsCoords(),this.dirty=!0,i?this.group.addWithUpdate():this.setCoords(),this},removeWithUpdate:function(t){return this._restoreObjectsState(),e.util.resetObjectTransform(this),this.remove(t),this._calcBounds(),this._updateObjectsCoords(),this.setCoords(),this.dirty=!0,this},_onObjectAdded:function(t){this.dirty=!0,t.group=this,t._set("canvas",this.canvas)},_onObjectRemoved:function(t){this.dirty=!0,delete t.group},_set:function(t,i){var r=this._objects.length;if(this.useSetOnGroup)for(;r--;)this._objects[r].setOnGroup(t,i);if("canvas"===t)for(;r--;)this._objects[r]._set(t,i);e.Object.prototype._set.call(this,t,i)},toObject:function(t){var i=this.includeDefaultValues,r=this._objects.filter((function(t){return!t.excludeFromExport})).map((function(e){var r=e.includeDefaultValues;e.includeDefaultValues=i;var n=e.toObject(t);return e.includeDefaultValues=r,n})),n=e.Object.prototype.toObject.call(this,t);return n.objects=r,n},toDatalessObject:function(t){var i,r=this.sourcePath;if(r)i=r;else{var n=this.includeDefaultValues;i=this._objects.map((function(e){var i=e.includeDefaultValues;e.includeDefaultValues=n;var r=e.toDatalessObject(t);return e.includeDefaultValues=i,r}))}var o=e.Object.prototype.toDatalessObject.call(this,t);return o.objects=i,o},render:function(t){this._transformDone=!0,this.callSuper("render",t),this._transformDone=!1},shouldCache:function(){var t=e.Object.prototype.shouldCache.call(this);if(t)for(var i=0,r=this._objects.length;i\n"],i=0,r=this._objects.length;i\n"),e},getSvgStyles:function(){var t=void 0!==this.opacity&&1!==this.opacity?"opacity: "+this.opacity+";":"",e=this.visible?"":" visibility: hidden;";return[t,this.getSvgFilter(),e].join("")},toClipPathSVG:function(t){for(var e=[],i=0,r=this._objects.length;i"},shouldCache:function(){return!1},isOnACache:function(){return!1},_renderControls:function(t,e,i){t.save(),t.globalAlpha=this.isMoving?this.borderOpacityWhenMoving:1,this.callSuper("_renderControls",t,e),void 0===(i=i||{}).hasControls&&(i.hasControls=!1),i.forActiveSelection=!0;for(var r=0,n=this._objects.length;r\n','\t\n',"\n"),s=' clip-path="url(#imageCrop_'+h+')" '}if(this.imageSmoothing||(a='" image-rendering="optimizeSpeed'),i.push("\t\n"),this.stroke||this.strokeDashArray){var c=this.fill;this.fill=null,t=["\t\n'],this.fill=c}return e="fill"!==this.paintFirst?e.concat(t,i):e.concat(i,t)},getSrc:function(t){var e=t?this._element:this._originalElement;return e?e.toDataURL?e.toDataURL():this.srcFromAttribute?e.getAttribute("src"):e.src:this.src||""},setSrc:function(t,e,i){return T.util.loadImage(t,(function(t,r){this.setElement(t,i),this._setWidthHeight(),e&&e(this,r)}),this,i&&i.crossOrigin),this},toString:function(){return'#'},applyResizeFilters:function(){var t=this.resizeFilter,e=this.minimumScaleTrigger,i=this.getTotalObjectScaling(),r=i.scaleX,n=i.scaleY,o=this._filteredEl||this._originalElement;if(this.group&&this.set("dirty",!0),!t||r>e&&n>e)return this._element=o,this._filterScalingX=1,this._filterScalingY=1,this._lastScaleX=r,void(this._lastScaleY=n);T.filterBackend||(T.filterBackend=T.initFilterBackend());var s=T.util.createCanvasElement(),a=this._filteredEl?this.cacheKey+"_filtered":this.cacheKey,h=o.width,c=o.height;s.width=h,s.height=c,this._element=s,this._lastScaleX=t.scaleX=r,this._lastScaleY=t.scaleY=n,T.filterBackend.applyFilters([t],o,h,c,this._element,a),this._filterScalingX=s.width/this._originalElement.width,this._filterScalingY=s.height/this._originalElement.height},applyFilters:function(t){if(t=(t=t||this.filters||[]).filter((function(t){return t&&!t.isNeutralState()})),this.set("dirty",!0),this.removeTexture(this.cacheKey+"_filtered"),0===t.length)return this._element=this._originalElement,this._filteredEl=null,this._filterScalingX=1,this._filterScalingY=1,this;var e=this._originalElement,i=e.naturalWidth||e.width,r=e.naturalHeight||e.height;if(this._element===this._originalElement){var n=T.util.createCanvasElement();n.width=i,n.height=r,this._element=n,this._filteredEl=n}else this._element=this._filteredEl,this._filteredEl.getContext("2d").clearRect(0,0,i,r),this._lastScaleX=1,this._lastScaleY=1;return T.filterBackend||(T.filterBackend=T.initFilterBackend()),T.filterBackend.applyFilters(t,this._originalElement,i,r,this._element,this.cacheKey),this._originalElement.width===this._element.width&&this._originalElement.height===this._element.height||(this._filterScalingX=this._element.width/this._originalElement.width,this._filterScalingY=this._element.height/this._originalElement.height),this},_render:function(t){T.util.setImageSmoothing(t,this.imageSmoothing),!0!==this.isMoving&&this.resizeFilter&&this._needsResize()&&this.applyResizeFilters(),this._stroke(t),this._renderPaintInOrder(t)},drawCacheOnCanvas:function(t){T.util.setImageSmoothing(t,this.imageSmoothing),T.Object.prototype.drawCacheOnCanvas.call(this,t)},shouldCache:function(){return this.needsItsOwnCache()},_renderFill:function(t){var e=this._element;if(e){var i=this._filterScalingX,r=this._filterScalingY,n=this.width,o=this.height,s=Math.min,a=Math.max,h=a(this.cropX,0),c=a(this.cropY,0),l=e.naturalWidth||e.width,u=e.naturalHeight||e.height,f=h*i,d=c*r,g=s(n*i,l-f),p=s(o*r,u-d),v=-n/2,m=-o/2,y=s(n,l/i-h),_=s(o,u/r-c);e&&t.drawImage(e,f,d,g,p,v,m,y,_)}},_needsResize:function(){var t=this.getTotalObjectScaling();return t.scaleX!==this._lastScaleX||t.scaleY!==this._lastScaleY},_resetWidthHeight:function(){this.set(this.getOriginalSize())},_initElement:function(t,e){this.setElement(T.util.getById(t),e),T.util.addClass(this.getElement(),T.Image.CSS_CANVAS)},_initConfig:function(t){t||(t={}),this.setOptions(t),this._setWidthHeight(t)},_initFilters:function(t,e){t&&t.length?T.util.enlivenObjects(t,(function(t){e&&e(t)}),"fabric.Image.filters"):e&&e()},_setWidthHeight:function(t){t||(t={});var e=this.getElement();this.width=t.width||e.naturalWidth||e.width||0,this.height=t.height||e.naturalHeight||e.height||0},parsePreserveAspectRatioAttribute:function(){var t,e=T.util.parsePreserveAspectRatioAttribute(this.preserveAspectRatio||""),i=this._element.width,r=this._element.height,n=1,o=1,s=0,a=0,h=0,c=0,l=this.width,u=this.height,f={width:l,height:u};return!e||"none"===e.alignX&&"none"===e.alignY?(n=l/i,o=u/r):("meet"===e.meetOrSlice&&(t=(l-i*(n=o=T.util.findScaleToFit(this._element,f)))/2,"Min"===e.alignX&&(s=-t),"Max"===e.alignX&&(s=t),t=(u-r*o)/2,"Min"===e.alignY&&(a=-t),"Max"===e.alignY&&(a=t)),"slice"===e.meetOrSlice&&(t=i-l/(n=o=T.util.findScaleToCover(this._element,f)),"Mid"===e.alignX&&(h=t/2),"Max"===e.alignX&&(h=t),t=r-u/o,"Mid"===e.alignY&&(c=t/2),"Max"===e.alignY&&(c=t),i=l/n,r=u/o)),{width:i,height:r,scaleX:n,scaleY:o,offsetLeft:s,offsetTop:a,cropX:h,cropY:c}}}),T.Image.CSS_CANVAS="canvas-img",T.Image.prototype.getSvgSrc=T.Image.prototype.getSrc,T.Image.fromObject=function(t,e){var i=T.util.object.clone(t);T.util.loadImage(i.src,(function(t,r){r?e&&e(null,!0):T.Image.prototype._initFilters.call(i,i.filters,(function(r){i.filters=r||[],T.Image.prototype._initFilters.call(i,[i.resizeFilter],(function(r){i.resizeFilter=r[0],T.util.enlivenObjects([i.clipPath],(function(r){i.clipPath=r[0];var n=new T.Image(t,i);e(n,!1)}))}))}))}),null,i.crossOrigin)},T.Image.fromURL=function(t,e,i){T.util.loadImage(t,(function(t,r){e&&e(new T.Image(t,i),r)}),null,i&&i.crossOrigin)},T.Image.ATTRIBUTE_NAMES=T.SHARED_ATTRIBUTES.concat("x y width height preserveAspectRatio xlink:href crossOrigin image-rendering".split(" ")),T.Image.fromElement=function(t,i,r){var n=T.parseAttributes(t,T.Image.ATTRIBUTE_NAMES);T.Image.fromURL(n["xlink:href"],i,e(r?T.util.object.clone(r):{},n))})}(t),T.util.object.extend(T.Object.prototype,{_getAngleValueForStraighten:function(){var t=this.angle%360;return t>0?90*Math.round((t-1)/90):90*Math.round(t/90)},straighten:function(){return this.rotate(this._getAngleValueForStraighten()),this},fxStraighten:function(t){var e=function(){},i=(t=t||{}).onComplete||e,r=t.onChange||e,n=this;return T.util.animate({startValue:this.get("angle"),endValue:this._getAngleValueForStraighten(),duration:this.FX_DURATION,onChange:function(t){n.rotate(t),r()},onComplete:function(){n.setCoords(),i()}}),this}}),T.util.object.extend(T.StaticCanvas.prototype,{straightenObject:function(t){return t.straighten(),this.requestRenderAll(),this},fxStraightenObject:function(t){return t.fxStraighten({onChange:this.requestRenderAllBound}),this}}),function(){function t(t,e){var i="precision "+e+" float;\nvoid main(){}",r=t.createShader(t.FRAGMENT_SHADER);return t.shaderSource(r,i),t.compileShader(r),!!t.getShaderParameter(r,t.COMPILE_STATUS)}function e(t){t&&t.tileSize&&(this.tileSize=t.tileSize),this.setupGLContext(this.tileSize,this.tileSize),this.captureGPUInfo()}T.isWebglSupported=function(e){if(T.isLikelyNode)return!1;e=e||T.WebglFilterBackend.prototype.tileSize;var i=document.createElement("canvas"),r=i.getContext("webgl")||i.getContext("experimental-webgl"),n=!1;if(r){T.maxTextureSize=r.getParameter(r.MAX_TEXTURE_SIZE),n=T.maxTextureSize>=e;for(var o=["highp","mediump","lowp"],s=0;s<3;s++)if(t(r,o[s])){T.webGlPrecision=o[s];break}}return this.isSupported=n,n},T.WebglFilterBackend=e,e.prototype={tileSize:2048,resources:{},setupGLContext:function(t,e){this.dispose(),this.createWebGLCanvas(t,e),this.aPosition=new Float32Array([0,0,0,1,1,0,1,1]),this.chooseFastestCopyGLTo2DMethod(t,e)},chooseFastestCopyGLTo2DMethod:function(t,e){var i,r=void 0!==window.performance;try{new ImageData(1,1),i=!0}catch(t){i=!1}var n="undefined"!=typeof ArrayBuffer,o="undefined"!=typeof Uint8ClampedArray;if(r&&i&&n&&o){var s=T.util.createCanvasElement(),a=new ArrayBuffer(t*e*4);if(T.forceGLPutImageData)return this.imageBuffer=a,void(this.copyGLTo2D=A);var h,c,l={imageBuffer:a,destinationWidth:t,destinationHeight:e,targetCanvas:s};s.width=t,s.height=e,h=window.performance.now(),k.call(l,this.gl,l),c=window.performance.now()-h,h=window.performance.now(),A.call(l,this.gl,l),c>window.performance.now()-h?(this.imageBuffer=a,this.copyGLTo2D=A):this.copyGLTo2D=k}},createWebGLCanvas:function(t,e){var i=T.util.createCanvasElement();i.width=t,i.height=e;var r={alpha:!0,premultipliedAlpha:!1,depth:!1,stencil:!1,antialias:!1},n=i.getContext("webgl",r);n||(n=i.getContext("experimental-webgl",r)),n&&(n.clearColor(0,0,0,0),this.canvas=i,this.gl=n)},applyFilters:function(t,e,i,r,n,o){var s,a=this.gl;o&&(s=this.getCachedTexture(o,e));var h={originalWidth:e.width||e.originalWidth,originalHeight:e.height||e.originalHeight,sourceWidth:i,sourceHeight:r,destinationWidth:i,destinationHeight:r,context:a,sourceTexture:this.createTexture(a,i,r,!s&&e),targetTexture:this.createTexture(a,i,r),originalTexture:s||this.createTexture(a,i,r,!s&&e),passes:t.length,webgl:!0,aPosition:this.aPosition,programCache:this.programCache,pass:0,filterBackend:this,targetCanvas:n},c=a.createFramebuffer();return a.bindFramebuffer(a.FRAMEBUFFER,c),t.forEach((function(t){t&&t.applyTo(h)})),function(t){var e=t.targetCanvas,i=e.width,r=e.height,n=t.destinationWidth,o=t.destinationHeight;i===n&&r===o||(e.width=n,e.height=o)}(h),this.copyGLTo2D(a,h),a.bindTexture(a.TEXTURE_2D,null),a.deleteTexture(h.sourceTexture),a.deleteTexture(h.targetTexture),a.deleteFramebuffer(c),n.getContext("2d").setTransform(1,0,0,1,0,0),h},dispose:function(){this.canvas&&(this.canvas=null,this.gl=null),this.clearWebGLCaches()},clearWebGLCaches:function(){this.programCache={},this.textureCache={}},createTexture:function(t,e,i,r){var n=t.createTexture();return t.bindTexture(t.TEXTURE_2D,n),t.texParameteri(t.TEXTURE_2D,t.TEXTURE_MAG_FILTER,t.NEAREST),t.texParameteri(t.TEXTURE_2D,t.TEXTURE_MIN_FILTER,t.NEAREST),t.texParameteri(t.TEXTURE_2D,t.TEXTURE_WRAP_S,t.CLAMP_TO_EDGE),t.texParameteri(t.TEXTURE_2D,t.TEXTURE_WRAP_T,t.CLAMP_TO_EDGE),r?t.texImage2D(t.TEXTURE_2D,0,t.RGBA,t.RGBA,t.UNSIGNED_BYTE,r):t.texImage2D(t.TEXTURE_2D,0,t.RGBA,e,i,0,t.RGBA,t.UNSIGNED_BYTE,null),n},getCachedTexture:function(t,e){if(this.textureCache[t])return this.textureCache[t];var i=this.createTexture(this.gl,e.width,e.height,e);return this.textureCache[t]=i,i},evictCachesForKey:function(t){this.textureCache[t]&&(this.gl.deleteTexture(this.textureCache[t]),delete this.textureCache[t])},copyGLTo2D:k,captureGPUInfo:function(){if(this.gpuInfo)return this.gpuInfo;var t=this.gl,e={renderer:"",vendor:""};if(!t)return e;var i=t.getExtension("WEBGL_debug_renderer_info");if(i){var r=t.getParameter(i.UNMASKED_RENDERER_WEBGL),n=t.getParameter(i.UNMASKED_VENDOR_WEBGL);r&&(e.renderer=r.toLowerCase()),n&&(e.vendor=n.toLowerCase())}return this.gpuInfo=e,e}}}(),function(){var t=function(){};function e(){}T.Canvas2dFilterBackend=e,e.prototype={evictCachesForKey:t,dispose:t,clearWebGLCaches:t,resources:{},applyFilters:function(t,e,i,r,n){var o=n.getContext("2d");o.drawImage(e,0,0,i,r);var s={sourceWidth:i,sourceHeight:r,imageData:o.getImageData(0,0,i,r),originalEl:e,originalImageData:o.getImageData(0,0,i,r),canvasEl:n,ctx:o,filterBackend:this};return t.forEach((function(t){t.applyTo(s)})),s.imageData.width===i&&s.imageData.height===r||(n.width=s.imageData.width,n.height=s.imageData.height),o.putImageData(s.imageData,0,0),s}}}(),T.Image=T.Image||{},T.Image.filters=T.Image.filters||{},T.Image.filters.BaseFilter=T.util.createClass({type:"BaseFilter",vertexSource:"attribute vec2 aPosition;\nvarying vec2 vTexCoord;\nvoid main() {\nvTexCoord = aPosition;\ngl_Position = vec4(aPosition * 2.0 - 1.0, 0.0, 1.0);\n}",fragmentSource:"precision highp float;\nvarying vec2 vTexCoord;\nuniform sampler2D uTexture;\nvoid main() {\ngl_FragColor = texture2D(uTexture, vTexCoord);\n}",initialize:function(t){t&&this.setOptions(t)},setOptions:function(t){for(var e in t)this[e]=t[e]},createProgram:function(t,e,i){e=e||this.fragmentSource,i=i||this.vertexSource,"highp"!==T.webGlPrecision&&(e=e.replace(/precision highp float/g,"precision "+T.webGlPrecision+" float"));var r=t.createShader(t.VERTEX_SHADER);if(t.shaderSource(r,i),t.compileShader(r),!t.getShaderParameter(r,t.COMPILE_STATUS))throw new Error("Vertex shader compile error for "+this.type+": "+t.getShaderInfoLog(r));var n=t.createShader(t.FRAGMENT_SHADER);if(t.shaderSource(n,e),t.compileShader(n),!t.getShaderParameter(n,t.COMPILE_STATUS))throw new Error("Fragment shader compile error for "+this.type+": "+t.getShaderInfoLog(n));var o=t.createProgram();if(t.attachShader(o,r),t.attachShader(o,n),t.linkProgram(o),!t.getProgramParameter(o,t.LINK_STATUS))throw new Error('Shader link error for "${this.type}" '+t.getProgramInfoLog(o));var s=this.getAttributeLocations(t,o),a=this.getUniformLocations(t,o)||{};return a.uStepW=t.getUniformLocation(o,"uStepW"),a.uStepH=t.getUniformLocation(o,"uStepH"),{program:o,attributeLocations:s,uniformLocations:a}},getAttributeLocations:function(t,e){return{aPosition:t.getAttribLocation(e,"aPosition")}},getUniformLocations:function(){return{}},sendAttributeData:function(t,e,i){var r=e.aPosition,n=t.createBuffer();t.bindBuffer(t.ARRAY_BUFFER,n),t.enableVertexAttribArray(r),t.vertexAttribPointer(r,2,t.FLOAT,!1,0,0),t.bufferData(t.ARRAY_BUFFER,i,t.STATIC_DRAW)},_setupFrameBuffer:function(t){var e,i,r=t.context;t.passes>1?(e=t.destinationWidth,i=t.destinationHeight,t.sourceWidth===e&&t.sourceHeight===i||(r.deleteTexture(t.targetTexture),t.targetTexture=t.filterBackend.createTexture(r,e,i)),r.framebufferTexture2D(r.FRAMEBUFFER,r.COLOR_ATTACHMENT0,r.TEXTURE_2D,t.targetTexture,0)):(r.bindFramebuffer(r.FRAMEBUFFER,null),r.finish())},_swapTextures:function(t){t.passes--,t.pass++;var e=t.targetTexture;t.targetTexture=t.sourceTexture,t.sourceTexture=e},isNeutralState:function(){var t=this.mainParameter,e=T.Image.filters[this.type].prototype;if(t){if(Array.isArray(e[t])){for(var i=e[t].length;i--;)if(this[t][i]!==e[t][i])return!1;return!0}return e[t]===this[t]}return!1},applyTo:function(t){t.webgl?(this._setupFrameBuffer(t),this.applyToWebGL(t),this._swapTextures(t)):this.applyTo2d(t)},retrieveShader:function(t){return t.programCache.hasOwnProperty(this.type)||(t.programCache[this.type]=this.createProgram(t.context)),t.programCache[this.type]},applyToWebGL:function(t){var e=t.context,i=this.retrieveShader(t);0===t.pass&&t.originalTexture?e.bindTexture(e.TEXTURE_2D,t.originalTexture):e.bindTexture(e.TEXTURE_2D,t.sourceTexture),e.useProgram(i.program),this.sendAttributeData(e,i.attributeLocations,t.aPosition),e.uniform1f(i.uniformLocations.uStepW,1/t.sourceWidth),e.uniform1f(i.uniformLocations.uStepH,1/t.sourceHeight),this.sendUniformData(e,i.uniformLocations),e.viewport(0,0,t.destinationWidth,t.destinationHeight),e.drawArrays(e.TRIANGLE_STRIP,0,4)},bindAdditionalTexture:function(t,e,i){t.activeTexture(i),t.bindTexture(t.TEXTURE_2D,e),t.activeTexture(t.TEXTURE0)},unbindAdditionalTexture:function(t,e){t.activeTexture(e),t.bindTexture(t.TEXTURE_2D,null),t.activeTexture(t.TEXTURE0)},getMainParameter:function(){return this[this.mainParameter]},setMainParameter:function(t){this[this.mainParameter]=t},sendUniformData:function(){},createHelpLayer:function(t){if(!t.helpLayer){var e=document.createElement("canvas");e.width=t.sourceWidth,e.height=t.sourceHeight,t.helpLayer=e}},toObject:function(){var t={type:this.type},e=this.mainParameter;return e&&(t[e]=this[e]),t},toJSON:function(){return this.toObject()}}),T.Image.filters.BaseFilter.fromObject=function(t,e){var i=new T.Image.filters[t.type](t);return e&&e(i),i},function(t){var e=t.fabric||(t.fabric={}),i=e.Image.filters,r=e.util.createClass;i.ColorMatrix=r(i.BaseFilter,{type:"ColorMatrix",fragmentSource:"precision highp float;\nuniform sampler2D uTexture;\nvarying vec2 vTexCoord;\nuniform mat4 uColorMatrix;\nuniform vec4 uConstants;\nvoid main() {\nvec4 color = texture2D(uTexture, vTexCoord);\ncolor *= uColorMatrix;\ncolor += uConstants;\ngl_FragColor = color;\n}",matrix:[1,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,1,0],mainParameter:"matrix",colorsOnly:!0,initialize:function(t){this.callSuper("initialize",t),this.matrix=this.matrix.slice(0)},applyTo2d:function(t){var e,i,r,n,o,s=t.imageData.data,a=s.length,h=this.matrix,c=this.colorsOnly;for(o=0;o=b||s<0||s>=_||(h=4*(a*_+s),c=v[d*m+f],e+=p[h]*c,i+=p[h+1]*c,r+=p[h+2]*c,w||(n+=p[h+3]*c));C[o]=e,C[o+1]=i,C[o+2]=r,C[o+3]=w?p[o+3]:n}t.imageData=x},getUniformLocations:function(t,e){return{uMatrix:t.getUniformLocation(e,"uMatrix"),uOpaque:t.getUniformLocation(e,"uOpaque"),uHalfSize:t.getUniformLocation(e,"uHalfSize"),uSize:t.getUniformLocation(e,"uSize")}},sendUniformData:function(t,e){t.uniform1fv(e.uMatrix,this.matrix)},toObject:function(){return i(this.callSuper("toObject"),{opaque:this.opaque,matrix:this.matrix})}}),e.Image.filters.Convolute.fromObject=e.Image.filters.BaseFilter.fromObject}(t),function(t){var e=t.fabric||(t.fabric={}),i=e.Image.filters,r=e.util.createClass;i.Grayscale=r(i.BaseFilter,{type:"Grayscale",fragmentSource:{average:"precision highp float;\nuniform sampler2D uTexture;\nvarying vec2 vTexCoord;\nvoid main() {\nvec4 color = texture2D(uTexture, vTexCoord);\nfloat average = (color.r + color.b + color.g) / 3.0;\ngl_FragColor = vec4(average, average, average, color.a);\n}",lightness:"precision highp float;\nuniform sampler2D uTexture;\nuniform int uMode;\nvarying vec2 vTexCoord;\nvoid main() {\nvec4 col = texture2D(uTexture, vTexCoord);\nfloat average = (max(max(col.r, col.g),col.b) + min(min(col.r, col.g),col.b)) / 2.0;\ngl_FragColor = vec4(average, average, average, col.a);\n}",luminosity:"precision highp float;\nuniform sampler2D uTexture;\nuniform int uMode;\nvarying vec2 vTexCoord;\nvoid main() {\nvec4 col = texture2D(uTexture, vTexCoord);\nfloat average = 0.21 * col.r + 0.72 * col.g + 0.07 * col.b;\ngl_FragColor = vec4(average, average, average, col.a);\n}"},mode:"average",mainParameter:"mode",applyTo2d:function(t){var e,i,r=t.imageData.data,n=r.length,o=this.mode;for(e=0;ec[0]&&n>c[1]&&o>c[2]&&r 0.0) {\n"+this.fragmentSource[t]+"}\n}"},retrieveShader:function(t){var e,i=this.type+"_"+this.mode;return t.programCache.hasOwnProperty(i)||(e=this.buildSource(this.mode),t.programCache[i]=this.createProgram(t.context,e)),t.programCache[i]},applyTo2d:function(t){var i,r,n,o,s,a,h,c=t.imageData.data,l=c.length,u=1-this.alpha;i=(h=new e.Color(this.color).getSource())[0]*this.alpha,r=h[1]*this.alpha,n=h[2]*this.alpha;for(var f=0;f=t||e<=-t)return 0;if(e<1.1920929e-7&&e>-1.1920929e-7)return 1;var i=(e*=Math.PI)/t;return a(e)/e*a(i)/i}},applyTo2d:function(t){var e=t.imageData,i=this.scaleX,r=this.scaleY;this.rcpScaleX=1/i,this.rcpScaleY=1/r;var n,o=e.width,a=e.height,h=s(o*i),c=s(a*r);"sliceHack"===this.resizeType?n=this.sliceByTwo(t,o,a,h,c):"hermite"===this.resizeType?n=this.hermiteFastResize(t,o,a,h,c):"bilinear"===this.resizeType?n=this.bilinearFiltering(t,o,a,h,c):"lanczos"===this.resizeType&&(n=this.lanczosResize(t,o,a,h,c)),t.imageData=n},sliceByTwo:function(t,i,n,o,s){var a,h,c=t.imageData,l=.5,u=!1,f=!1,d=i*l,g=n*l,p=e.filterBackend.resources,v=0,m=0,y=i,_=0;for(p.sliceByTwo||(p.sliceByTwo=document.createElement("canvas")),((a=p.sliceByTwo).width<1.5*i||a.height=e)){M=r(1e3*o(S-x.x)),b[M]||(b[M]={});for(var F=C.y-_;F<=C.y+_;F++)F<0||F>=s||(j=r(1e3*o(F-x.y)),b[M][j]||(b[M][j]=d(n(i(M*v,2)+i(j*m,2))/1e3)),(T=b[M][j])>0&&(P+=T,E+=T*l[O=4*(F*e+S)],k+=T*l[O+1],A+=T*l[O+2],D+=T*l[O+3]))}f[O=4*(w*a+h)]=E/P,f[O+1]=k/P,f[O+2]=A/P,f[O+3]=D/P}return++h1&&j<-1||(_=2*j*j*j-3*j*j+1)>0&&(T+=_*d[(M=4*(D+P*e))+3],x+=_,d[M+3]<255&&(_=_*d[M+3]/250),C+=_*d[M],w+=_*d[M+1],S+=_*d[M+2],b+=_)}p[y]=C/b,p[y+1]=w/b,p[y+2]=S/b,p[y+3]=T/x}return g},toObject:function(){return{type:this.type,scaleX:this.scaleX,scaleY:this.scaleY,resizeType:this.resizeType,lanczosLobes:this.lanczosLobes}}}),e.Image.filters.Resize.fromObject=e.Image.filters.BaseFilter.fromObject}(t),function(t){var e=t.fabric||(t.fabric={}),i=e.Image.filters,r=e.util.createClass;i.Contrast=r(i.BaseFilter,{type:"Contrast",fragmentSource:"precision highp float;\nuniform sampler2D uTexture;\nuniform float uContrast;\nvarying vec2 vTexCoord;\nvoid main() {\nvec4 color = texture2D(uTexture, vTexCoord);\nfloat contrastF = 1.015 * (uContrast + 1.0) / (1.0 * (1.015 - uContrast));\ncolor.rgb = contrastF * (color.rgb - 0.5) + 0.5;\ngl_FragColor = color;\n}",contrast:0,mainParameter:"contrast",applyTo2d:function(t){if(0!==this.contrast){var e,i=t.imageData.data,r=i.length,n=Math.floor(255*this.contrast),o=259*(n+255)/(255*(259-n));for(e=0;e1&&(e=1/this.aspectRatio):this.aspectRatio<1&&(e=this.aspectRatio),t=e*this.blur*.12,this.horizontal?i[0]=t:i[1]=t,i}}),i.Blur.fromObject=e.Image.filters.BaseFilter.fromObject}(t),function(t){var e=t.fabric||(t.fabric={}),i=e.Image.filters,r=e.util.createClass;i.Gamma=r(i.BaseFilter,{type:"Gamma",fragmentSource:"precision highp float;\nuniform sampler2D uTexture;\nuniform vec3 uGamma;\nvarying vec2 vTexCoord;\nvoid main() {\nvec4 color = texture2D(uTexture, vTexCoord);\nvec3 correction = (1.0 / uGamma);\ncolor.r = pow(color.r, correction.r);\ncolor.g = pow(color.g, correction.g);\ncolor.b = pow(color.b, correction.b);\ngl_FragColor = color;\ngl_FragColor.rgb *= color.a;\n}",gamma:[1,1,1],mainParameter:"gamma",initialize:function(t){this.gamma=[1,1,1],i.BaseFilter.prototype.initialize.call(this,t)},applyTo2d:function(t){var e,i=t.imageData.data,r=this.gamma,n=i.length,o=1/r[0],s=1/r[1],a=1/r[2];for(this.rVals||(this.rVals=new Uint8Array(256),this.gVals=new Uint8Array(256),this.bVals=new Uint8Array(256)),e=0,n=256;e'},_getCacheCanvasDimensions:function(){var t=this.callSuper("_getCacheCanvasDimensions"),e=this.fontSize;return t.width+=e*t.zoomX,t.height+=e*t.zoomY,t},_render:function(t){var e=this.path;e&&!e.isNotVisible()&&e._render(t),this._setTextStyles(t),this._renderTextLinesBackground(t),this._renderTextDecoration(t,"underline"),this._renderText(t),this._renderTextDecoration(t,"overline"),this._renderTextDecoration(t,"linethrough")},_renderText:function(t){"stroke"===this.paintFirst?(this._renderTextStroke(t),this._renderTextFill(t)):(this._renderTextFill(t),this._renderTextStroke(t))},_setTextStyles:function(t,e,i){t.textBaseline="alphabetic",t.font=this._getFontDeclaration(e,i)},calcTextWidth:function(){for(var t=this.getLineWidth(0),e=1,i=this._textLines.length;et&&(t=r)}return t},_renderTextLine:function(t,e,i,r,n,o){this._renderChars(t,e,i,r,n,o)},_renderTextLinesBackground:function(t){if(this.textBackgroundColor||this.styleHas("textBackgroundColor")){for(var e,i,r,n,o,s,a,h=t.fillStyle,c=this._getLeftOffset(),l=this._getTopOffset(),u=0,f=0,d=this.path,g=0,p=this._textLines.length;g=0:ia?u%=a:u<0&&(u+=a),this._setGraphemeOnPath(u,o,s),u+=o.kernedWidth}return{width:h,numOfSpaces:0}},_setGraphemeOnPath:function(t,i,r){var n=t+i.kernedWidth/2,o=this.path,s=e.util.getPointOnPath(o.path,n,o.segmentsInfo);i.renderLeft=s.x-r.x,i.renderTop=s.y-r.y,i.angle=s.angle+("right"===this.pathSide?Math.PI:0)},_getGraphemeBox:function(t,e,i,r,n){var o,s=this.getCompleteStyleDeclaration(e,i),a=r?this.getCompleteStyleDeclaration(e,i-1):{},h=this._measureChar(t,s,r,a),c=h.kernedWidth,l=h.width;0!==this.charSpacing&&(l+=o=this._getWidthOfCharSpacing(),c+=o);var u={width:l,left:0,height:s.fontSize,kernedWidth:c,deltaY:s.deltaY};if(i>0&&!n){var f=this.__charBounds[e][i-1];u.left=f.left+f.width+h.kernedWidth-h.width}return u},getHeightOfLine:function(t){if(this.__lineHeights[t])return this.__lineHeights[t];for(var e=this._textLines[t],i=this.getHeightOfChar(t,0),r=1,n=e.length;r0){var P=y+o+u;"rtl"===this.direction&&(P=this.width-P-f),c&&m&&(t.fillStyle=m,t.fillRect(P,l+C*r+s,f,this.fontSize/15)),u=d.left,f=d.width,c=g,m=v,r=n,s=a}else f+=d.kernedWidth;P=y+o+u;"rtl"===this.direction&&(P=this.width-P-f),t.fillStyle=v,g&&v&&t.fillRect(P,l+C*r+s,f-x,this.fontSize/15),_+=i}else _+=i;this._removeShadow(t)}},_getFontDeclaration:function(t,i){var r=t||this,n=this.fontFamily,o=e.Text.genericFonts.indexOf(n.toLowerCase())>-1,s=void 0===n||n.indexOf("'")>-1||n.indexOf(",")>-1||n.indexOf('"')>-1||o?r.fontFamily:'"'+r.fontFamily+'"';return[e.isLikelyNode?r.fontWeight:r.fontStyle,e.isLikelyNode?r.fontStyle:r.fontWeight,i?this.CACHE_FONT_SIZE+"px":r.fontSize+"px",s].join(" ")},render:function(t){this.visible&&(this.canvas&&this.canvas.skipOffscreen&&!this.group&&!this.isOnScreen()||(this._shouldClearDimensionCache()&&this.initDimensions(),this.callSuper("render",t)))},_splitTextIntoLines:function(t){for(var i=t.split(this._reNewline),r=new Array(i.length),n=["\n"],o=[],s=0;s-1&&(t.underline=!0),t.textDecoration.indexOf("line-through")>-1&&(t.linethrough=!0),t.textDecoration.indexOf("overline")>-1&&(t.overline=!0),delete t.textDecoration)}T.IText=T.util.createClass(T.Text,T.Observable,{type:"i-text",selectionStart:0,selectionEnd:0,selectionColor:"rgba(17,119,255,0.3)",isEditing:!1,editable:!0,editingBorderColor:"rgba(102,153,255,0.25)",cursorWidth:2,cursorColor:"",cursorDelay:1e3,cursorDuration:600,caching:!0,hiddenTextareaContainer:null,_reSpace:/\s|\n/,_currentCursorOpacity:0,_selectionDirection:null,_abortCursorAnimation:!1,__widthOfSpace:[],inCompositionMode:!1,initialize:function(t,e){this.callSuper("initialize",t,e),this.initBehavior()},setSelectionStart:function(t){t=Math.max(t,0),this._updateAndFire("selectionStart",t)},setSelectionEnd:function(t){t=Math.min(t,this.text.length),this._updateAndFire("selectionEnd",t)},_updateAndFire:function(t,e){this[t]!==e&&(this._fireSelectionChanged(),this[t]=e),this._updateTextarea()},_fireSelectionChanged:function(){this.fire("selection:changed"),this.canvas&&this.canvas.fire("text:selection:changed",{target:this})},initDimensions:function(){this.isEditing&&this.initDelayedCursor(),this.clearContextTop(),this.callSuper("initDimensions")},render:function(t){this.clearContextTop(),this.callSuper("render",t),this.cursorOffsetCache={},this.renderCursorOrSelection()},_render:function(t){this.callSuper("_render",t)},clearContextTop:function(t){if(this.isEditing&&this.canvas&&this.canvas.contextTop){var e=this.canvas.contextTop,i=this.canvas.viewportTransform;e.save(),e.transform(i[0],i[1],i[2],i[3],i[4],i[5]),this.transform(e),this._clearTextArea(e),t||e.restore()}},renderCursorOrSelection:function(){if(this.isEditing&&this.canvas&&this.canvas.contextTop){var t=this._getCursorBoundaries(),e=this.canvas.contextTop;this.clearContextTop(!0),this.selectionStart===this.selectionEnd?this.renderCursor(t,e):this.renderSelection(t,e),e.restore()}},_clearTextArea:function(t){var e=this.width+4,i=this.height+4;t.clearRect(-e/2,-i/2,e,i)},_getCursorBoundaries:function(t){void 0===t&&(t=this.selectionStart);var e=this._getLeftOffset(),i=this._getTopOffset(),r=this._getCursorBoundariesOffsets(t);return{left:e,top:i,leftOffset:r.left,topOffset:r.top}},_getCursorBoundariesOffsets:function(t){if(this.cursorOffsetCache&&"top"in this.cursorOffsetCache)return this.cursorOffsetCache;var e,i,r,n,o=0,s=0,a=this.get2DCursorLocation(t);r=a.charIndex,i=a.lineIndex;for(var h=0;h0?s:0)},"rtl"===this.direction&&(n.left*=-1),this.cursorOffsetCache=n,this.cursorOffsetCache},renderCursor:function(t,e){var i=this.get2DCursorLocation(),r=i.lineIndex,n=i.charIndex>0?i.charIndex-1:0,o=this.getValueOfPropertyAt(r,n,"fontSize"),s=this.scaleX*this.canvas.getZoom(),a=this.cursorWidth/s,h=t.topOffset,c=this.getValueOfPropertyAt(r,n,"deltaY");h+=(1-this._fontSizeFraction)*this.getHeightOfLine(r)/this.lineHeight-o*(1-this._fontSizeFraction),this.inCompositionMode&&this.renderSelection(t,e),e.fillStyle=this.cursorColor||this.getValueOfPropertyAt(r,n,"fill"),e.globalAlpha=this.__isMousedown?1:this._currentCursorOpacity,e.fillRect(t.left+t.leftOffset-a/2,h+t.top+c,a,o)},renderSelection:function(t,e){for(var i=this.inCompositionMode?this.hiddenTextarea.selectionStart:this.selectionStart,r=this.inCompositionMode?this.hiddenTextarea.selectionEnd:this.selectionEnd,n=-1!==this.textAlign.indexOf("justify"),o=this.get2DCursorLocation(i),s=this.get2DCursorLocation(r),a=o.lineIndex,h=s.lineIndex,c=o.charIndex<0?0:o.charIndex,l=s.charIndex<0?0:s.charIndex,u=a;u<=h;u++){var f,d=this._getLineLeftOffset(u)||0,g=this.getHeightOfLine(u),p=0,v=0;if(u===a&&(p=this.__charBounds[a][c].left),u>=a&&u1)&&(g/=this.lineHeight);var y=t.left+d+p,_=v-p,b=g,x=0;this.inCompositionMode?(e.fillStyle=this.compositionColor||"black",b=1,x=g):e.fillStyle=this.selectionColor,"rtl"===this.direction&&(y=this.width-y-_),e.fillRect(y,t.top+t.topOffset+x,_,b),t.topOffset+=f}},getCurrentCharFontSize:function(){var t=this._getCurrentCharIndex();return this.getValueOfPropertyAt(t.l,t.c,"fontSize")},getCurrentCharColor:function(){var t=this._getCurrentCharIndex();return this.getValueOfPropertyAt(t.l,t.c,"fill")},_getCurrentCharIndex:function(){var t=this.get2DCursorLocation(this.selectionStart,!0),e=t.charIndex>0?t.charIndex-1:0;return{l:t.lineIndex,c:e}}}),T.IText.fromObject=function(e,i){if(t(e),e.styles)for(var r in e.styles)for(var n in e.styles[r])t(e.styles[r][n]);T.Object._fromObject("IText",e,i,"text")}}(),S=T.util.object.clone,T.util.object.extend(T.IText.prototype,{initBehavior:function(){this.initAddedHandler(),this.initRemovedHandler(),this.initCursorSelectionHandlers(),this.initDoubleClickSimulation(),this.mouseMoveHandler=this.mouseMoveHandler.bind(this)},onDeselect:function(){this.isEditing&&this.exitEditing(),this.selected=!1},initAddedHandler:function(){var t=this;this.on("added",(function(){var e=t.canvas;e&&(e._hasITextHandlers||(e._hasITextHandlers=!0,t._initCanvasHandlers(e)),e._iTextInstances=e._iTextInstances||[],e._iTextInstances.push(t))}))},initRemovedHandler:function(){var t=this;this.on("removed",(function(){var e=t.canvas;e&&(e._iTextInstances=e._iTextInstances||[],T.util.removeFromArray(e._iTextInstances,t),0===e._iTextInstances.length&&(e._hasITextHandlers=!1,t._removeCanvasHandlers(e)))}))},_initCanvasHandlers:function(t){t._mouseUpITextHandler=function(){t._iTextInstances&&t._iTextInstances.forEach((function(t){t.__isMousedown=!1}))},t.on("mouse:up",t._mouseUpITextHandler)},_removeCanvasHandlers:function(t){t.off("mouse:up",t._mouseUpITextHandler)},_tick:function(){this._currentTickState=this._animateCursor(this,1,this.cursorDuration,"_onTickComplete")},_animateCursor:function(t,e,i,r){var n;return n={isAborted:!1,abort:function(){this.isAborted=!0}},t.animate("_currentCursorOpacity",e,{duration:i,onComplete:function(){n.isAborted||t[r]()},onChange:function(){t.canvas&&t.selectionStart===t.selectionEnd&&t.renderCursorOrSelection()},abort:function(){return n.isAborted}}),n},_onTickComplete:function(){var t=this;this._cursorTimeout1&&clearTimeout(this._cursorTimeout1),this._cursorTimeout1=setTimeout((function(){t._currentTickCompleteState=t._animateCursor(t,0,this.cursorDuration/2,"_tick")}),100)},initDelayedCursor:function(t){var e=this,i=t?0:this.cursorDelay;this.abortCursorAnimation(),this._currentCursorOpacity=1,this._cursorTimeout2=setTimeout((function(){e._tick()}),i)},abortCursorAnimation:function(){var t=this._currentTickState||this._currentTickCompleteState,e=this.canvas;this._currentTickState&&this._currentTickState.abort(),this._currentTickCompleteState&&this._currentTickCompleteState.abort(),clearTimeout(this._cursorTimeout1),clearTimeout(this._cursorTimeout2),this._currentCursorOpacity=0,t&&e&&e.clearContext(e.contextTop||e.contextContainer)},selectAll:function(){return this.selectionStart=0,this.selectionEnd=this._text.length,this._fireSelectionChanged(),this._updateTextarea(),this},getSelectedText:function(){return this._text.slice(this.selectionStart,this.selectionEnd).join("")},findWordBoundaryLeft:function(t){var e=0,i=t-1;if(this._reSpace.test(this._text[i]))for(;this._reSpace.test(this._text[i]);)e++,i--;for(;/\S/.test(this._text[i])&&i>-1;)e++,i--;return t-e},findWordBoundaryRight:function(t){var e=0,i=t;if(this._reSpace.test(this._text[i]))for(;this._reSpace.test(this._text[i]);)e++,i++;for(;/\S/.test(this._text[i])&&i-1;)e++,i--;return t-e},findLineBoundaryRight:function(t){for(var e=0,i=t;!/\n/.test(this._text[i])&&i0&&rthis.__selectionStartOnMouseDown?(this.selectionStart=this.__selectionStartOnMouseDown,this.selectionEnd=e):(this.selectionStart=e,this.selectionEnd=this.__selectionStartOnMouseDown),this.selectionStart===i&&this.selectionEnd===r||(this.restartCursorIfNeeded(),this._fireSelectionChanged(),this._updateTextarea(),this.renderCursorOrSelection()))}},_setEditingProps:function(){this.hoverCursor="text",this.canvas&&(this.canvas.defaultCursor=this.canvas.moveCursor="text"),this.borderColor=this.editingBorderColor,this.hasControls=this.selectable=!1,this.lockMovementX=this.lockMovementY=!0},fromStringToGraphemeSelection:function(t,e,i){var r=i.slice(0,t),n=T.util.string.graphemeSplit(r).length;if(t===e)return{selectionStart:n,selectionEnd:n};var o=i.slice(t,e);return{selectionStart:n,selectionEnd:n+T.util.string.graphemeSplit(o).length}},fromGraphemeToStringSelection:function(t,e,i){var r=i.slice(0,t).join("").length;return t===e?{selectionStart:r,selectionEnd:r}:{selectionStart:r,selectionEnd:r+i.slice(t,e).join("").length}},_updateTextarea:function(){if(this.cursorOffsetCache={},this.hiddenTextarea){if(!this.inCompositionMode){var t=this.fromGraphemeToStringSelection(this.selectionStart,this.selectionEnd,this._text);this.hiddenTextarea.selectionStart=t.selectionStart,this.hiddenTextarea.selectionEnd=t.selectionEnd}this.updateTextareaPosition()}},updateFromTextArea:function(){if(this.hiddenTextarea){this.cursorOffsetCache={},this.text=this.hiddenTextarea.value,this._shouldClearDimensionCache()&&(this.initDimensions(),this.setCoords());var t=this.fromStringToGraphemeSelection(this.hiddenTextarea.selectionStart,this.hiddenTextarea.selectionEnd,this.hiddenTextarea.value);this.selectionEnd=this.selectionStart=t.selectionEnd,this.inCompositionMode||(this.selectionStart=t.selectionStart),this.updateTextareaPosition()}},updateTextareaPosition:function(){if(this.selectionStart===this.selectionEnd){var t=this._calcTextareaPosition();this.hiddenTextarea.style.left=t.left,this.hiddenTextarea.style.top=t.top}},_calcTextareaPosition:function(){if(!this.canvas)return{x:1,y:1};var t=this.inCompositionMode?this.compositionStart:this.selectionStart,e=this._getCursorBoundaries(t),i=this.get2DCursorLocation(t),r=i.lineIndex,n=i.charIndex,o=this.getValueOfPropertyAt(r,n,"fontSize")*this.lineHeight,s=e.leftOffset,a=this.calcTransformMatrix(),h={x:e.left+s,y:e.top+e.topOffset+o},c=this.canvas.getRetinaScaling(),l=this.canvas.upperCanvasEl,u=l.width/c,f=l.height/c,d=u-o,g=f-o,p=l.clientWidth/u,v=l.clientHeight/f;return h=T.util.transformPoint(h,a),(h=T.util.transformPoint(h,this.canvas.viewportTransform)).x*=p,h.y*=v,h.x<0&&(h.x=0),h.x>d&&(h.x=d),h.y<0&&(h.y=0),h.y>g&&(h.y=g),h.x+=this.canvas._offset.left,h.y+=this.canvas._offset.top,{left:h.x+"px",top:h.y+"px",fontSize:o+"px",charHeight:o}},_saveEditingProps:function(){this._savedProps={hasControls:this.hasControls,borderColor:this.borderColor,lockMovementX:this.lockMovementX,lockMovementY:this.lockMovementY,hoverCursor:this.hoverCursor,selectable:this.selectable,defaultCursor:this.canvas&&this.canvas.defaultCursor,moveCursor:this.canvas&&this.canvas.moveCursor}},_restoreEditingProps:function(){this._savedProps&&(this.hoverCursor=this._savedProps.hoverCursor,this.hasControls=this._savedProps.hasControls,this.borderColor=this._savedProps.borderColor,this.selectable=this._savedProps.selectable,this.lockMovementX=this._savedProps.lockMovementX,this.lockMovementY=this._savedProps.lockMovementY,this.canvas&&(this.canvas.defaultCursor=this._savedProps.defaultCursor,this.canvas.moveCursor=this._savedProps.moveCursor))},exitEditing:function(){var t=this._textBeforeEdit!==this.text,e=this.hiddenTextarea;return this.selected=!1,this.isEditing=!1,this.selectionEnd=this.selectionStart,e&&(e.blur&&e.blur(),e.parentNode&&e.parentNode.removeChild(e)),this.hiddenTextarea=null,this.abortCursorAnimation(),this._restoreEditingProps(),this._currentCursorOpacity=0,this._shouldClearDimensionCache()&&(this.initDimensions(),this.setCoords()),this.fire("editing:exited"),t&&this.fire("modified"),this.canvas&&(this.canvas.off("mouse:move",this.mouseMoveHandler),this.canvas.fire("text:editing:exited",{target:this}),t&&this.canvas.fire("object:modified",{target:this})),this},_removeExtraneousStyles:function(){for(var t in this.styles)this._textLines[t]||delete this.styles[t]},removeStyleFromTo:function(t,e){var i,r,n=this.get2DCursorLocation(t,!0),o=this.get2DCursorLocation(e,!0),s=n.lineIndex,a=n.charIndex,h=o.lineIndex,c=o.charIndex;if(s!==h){if(this.styles[s])for(i=a;i=c&&(r[l-f]=r[u],delete r[u])}},shiftLineStyles:function(t,e){var i=S(this.styles);for(var r in this.styles){var n=parseInt(r,10);n>t&&(this.styles[n+e]=i[n],i[n-e]||delete this.styles[n])}},restartCursorIfNeeded:function(){this._currentTickState&&!this._currentTickState.isAborted&&this._currentTickCompleteState&&!this._currentTickCompleteState.isAborted||this.initDelayedCursor()},insertNewlineStyleObject:function(t,e,i,r){var n,o={},s=!1,a=this._unwrappedTextLines[t].length===e;for(var h in i||(i=1),this.shiftLineStyles(t,i),this.styles[t]&&(n=this.styles[t][0===e?e:e-1]),this.styles[t]){var c=parseInt(h,10);c>=e&&(s=!0,o[c-e]=this.styles[t][h],a&&0===e||delete this.styles[t][h])}var l=!1;for(s&&!a&&(this.styles[t+i]=o,l=!0),l&&i--;i>0;)r&&r[i-1]?this.styles[t+i]={0:S(r[i-1])}:n?this.styles[t+i]={0:S(n)}:delete this.styles[t+i],i--;this._forceClearCache=!0},insertCharStyleObject:function(t,e,i,r){this.styles||(this.styles={});var n=this.styles[t],o=n?S(n):{};for(var s in i||(i=1),o){var a=parseInt(s,10);a>=e&&(n[a+i]=o[a],o[a-i]||delete n[a])}if(this._forceClearCache=!0,r)for(;i--;)Object.keys(r[i]).length&&(this.styles[t]||(this.styles[t]={}),this.styles[t][e+i]=S(r[i]));else if(n)for(var h=n[e?e-1:1];h&&i--;)this.styles[t][e+i]=S(h)},insertNewStyleBlock:function(t,e,i){for(var r=this.get2DCursorLocation(e,!0),n=[0],o=0,s=0;s0&&(this.insertCharStyleObject(r.lineIndex,r.charIndex,n[0],i),i=i&&i.slice(n[0]+1)),o&&this.insertNewlineStyleObject(r.lineIndex,r.charIndex+n[0],o),s=1;s0?this.insertCharStyleObject(r.lineIndex+s,0,n[s],i):i&&(this.styles[r.lineIndex+s][0]=i[0]),i=i&&i.slice(n[s]+1);n[s]>0&&this.insertCharStyleObject(r.lineIndex+s,0,n[s],i)},setSelectionStartEndWithShift:function(t,e,i){i<=t?(e===t?this._selectionDirection="left":"right"===this._selectionDirection&&(this._selectionDirection="left",this.selectionEnd=t),this.selectionStart=i):i>t&&it?this.selectionStart=t:this.selectionStart<0&&(this.selectionStart=0),this.selectionEnd>t?this.selectionEnd=t:this.selectionEnd<0&&(this.selectionEnd=0)}}),T.util.object.extend(T.IText.prototype,{initDoubleClickSimulation:function(){this.__lastClickTime=+new Date,this.__lastLastClickTime=+new Date,this.__lastPointer={},this.on("mousedown",this.onMouseDown)},onMouseDown:function(t){if(this.canvas){this.__newClickTime=+new Date;var e=t.pointer;this.isTripleClick(e)&&(this.fire("tripleclick",t),this._stopEvent(t.e)),this.__lastLastClickTime=this.__lastClickTime,this.__lastClickTime=this.__newClickTime,this.__lastPointer=e,this.__lastIsEditing=this.isEditing,this.__lastSelected=this.selected}},isTripleClick:function(t){return this.__newClickTime-this.__lastClickTime<500&&this.__lastClickTime-this.__lastLastClickTime<500&&this.__lastPointer.x===t.x&&this.__lastPointer.y===t.y},_stopEvent:function(t){t.preventDefault&&t.preventDefault(),t.stopPropagation&&t.stopPropagation()},initCursorSelectionHandlers:function(){this.initMousedownHandler(),this.initMouseupHandler(),this.initClicks()},doubleClickHandler:function(t){this.isEditing&&this.selectWord(this.getSelectionStartFromPointer(t.e))},tripleClickHandler:function(t){this.isEditing&&this.selectLine(this.getSelectionStartFromPointer(t.e))},initClicks:function(){this.on("mousedblclick",this.doubleClickHandler),this.on("tripleclick",this.tripleClickHandler)},_mouseDownHandler:function(t){!this.canvas||!this.editable||t.e.button&&1!==t.e.button||(this.__isMousedown=!0,this.selected&&(this.inCompositionMode=!1,this.setCursorByClick(t.e)),this.isEditing&&(this.__selectionStartOnMouseDown=this.selectionStart,this.selectionStart===this.selectionEnd&&this.abortCursorAnimation(),this.renderCursorOrSelection()))},_mouseDownHandlerBefore:function(t){!this.canvas||!this.editable||t.e.button&&1!==t.e.button||(this.selected=this===this.canvas._activeObject)},initMousedownHandler:function(){this.on("mousedown",this._mouseDownHandler),this.on("mousedown:before",this._mouseDownHandlerBefore)},initMouseupHandler:function(){this.on("mouseup",this.mouseUpHandler)},mouseUpHandler:function(t){if(this.__isMousedown=!1,!(!this.editable||this.group||t.transform&&t.transform.actionPerformed||t.e.button&&1!==t.e.button)){if(this.canvas){var e=this.canvas._activeObject;if(e&&e!==this)return}this.__lastSelected&&!this.__corner?(this.selected=!1,this.__lastSelected=!1,this.enterEditing(t.e),this.selectionStart===this.selectionEnd?this.initDelayedCursor(!0):this.renderCursorOrSelection()):this.selected=!0}},setCursorByClick:function(t){var e=this.getSelectionStartFromPointer(t),i=this.selectionStart,r=this.selectionEnd;t.shiftKey?this.setSelectionStartEndWithShift(i,r,e):(this.selectionStart=e,this.selectionEnd=e),this.isEditing&&(this._fireSelectionChanged(),this._updateTextarea())},getSelectionStartFromPointer:function(t){for(var e,i=this.getLocalPointer(t),r=0,n=0,o=0,s=0,a=0,h=0,c=this._textLines.length;h0&&(s+=this._textLines[h-1].length+this.missingNewlineOffset(h-1));n=this._getLineLeftOffset(a)*this.scaleX,e=this._textLines[a],"rtl"===this.direction&&(i.x=this.width*this.scaleX-i.x+n);for(var l=0,u=e.length;lo||s<0?0:1);return this.flipX&&(a=n-a),a>this._text.length&&(a=this._text.length),a}}),T.util.object.extend(T.IText.prototype,{initHiddenTextarea:function(){this.hiddenTextarea=T.document.createElement("textarea"),this.hiddenTextarea.setAttribute("autocapitalize","off"),this.hiddenTextarea.setAttribute("autocorrect","off"),this.hiddenTextarea.setAttribute("autocomplete","off"),this.hiddenTextarea.setAttribute("spellcheck","false"),this.hiddenTextarea.setAttribute("data-fabric-hiddentextarea",""),this.hiddenTextarea.setAttribute("wrap","off");var t=this._calcTextareaPosition();this.hiddenTextarea.style.cssText="position: absolute; top: "+t.top+"; left: "+t.left+"; z-index: -999; opacity: 0; width: 1px; height: 1px; font-size: 1px; paddingーtop: "+t.fontSize+";",this.hiddenTextareaContainer?this.hiddenTextareaContainer.appendChild(this.hiddenTextarea):T.document.body.appendChild(this.hiddenTextarea),T.util.addListener(this.hiddenTextarea,"keydown",this.onKeyDown.bind(this)),T.util.addListener(this.hiddenTextarea,"keyup",this.onKeyUp.bind(this)),T.util.addListener(this.hiddenTextarea,"input",this.onInput.bind(this)),T.util.addListener(this.hiddenTextarea,"copy",this.copy.bind(this)),T.util.addListener(this.hiddenTextarea,"cut",this.copy.bind(this)),T.util.addListener(this.hiddenTextarea,"paste",this.paste.bind(this)),T.util.addListener(this.hiddenTextarea,"compositionstart",this.onCompositionStart.bind(this)),T.util.addListener(this.hiddenTextarea,"compositionupdate",this.onCompositionUpdate.bind(this)),T.util.addListener(this.hiddenTextarea,"compositionend",this.onCompositionEnd.bind(this)),!this._clickHandlerInitialized&&this.canvas&&(T.util.addListener(this.canvas.upperCanvasEl,"click",this.onClick.bind(this)),this._clickHandlerInitialized=!0)},keysMap:{9:"exitEditing",27:"exitEditing",33:"moveCursorUp",34:"moveCursorDown",35:"moveCursorRight",36:"moveCursorLeft",37:"moveCursorLeft",38:"moveCursorUp",39:"moveCursorRight",40:"moveCursorDown"},keysMapRtl:{9:"exitEditing",27:"exitEditing",33:"moveCursorUp",34:"moveCursorDown",35:"moveCursorLeft",36:"moveCursorRight",37:"moveCursorRight",38:"moveCursorUp",39:"moveCursorLeft",40:"moveCursorDown"},ctrlKeysMapUp:{67:"copy",88:"cut"},ctrlKeysMapDown:{65:"selectAll"},onClick:function(){this.hiddenTextarea&&this.hiddenTextarea.focus()},onKeyDown:function(t){if(this.isEditing){var e="rtl"===this.direction?this.keysMapRtl:this.keysMap;if(t.keyCode in e)this[e[t.keyCode]](t);else{if(!(t.keyCode in this.ctrlKeysMapDown)||!t.ctrlKey&&!t.metaKey)return;this[this.ctrlKeysMapDown[t.keyCode]](t)}t.stopImmediatePropagation(),t.preventDefault(),t.keyCode>=33&&t.keyCode<=40?(this.inCompositionMode=!1,this.clearContextTop(),this.renderCursorOrSelection()):this.canvas&&this.canvas.requestRenderAll()}},onKeyUp:function(t){!this.isEditing||this._copyDone||this.inCompositionMode?this._copyDone=!1:t.keyCode in this.ctrlKeysMapUp&&(t.ctrlKey||t.metaKey)&&(this[this.ctrlKeysMapUp[t.keyCode]](t),t.stopImmediatePropagation(),t.preventDefault(),this.canvas&&this.canvas.requestRenderAll())},onInput:function(t){var e=this.fromPaste;if(this.fromPaste=!1,t&&t.stopPropagation(),this.isEditing){var i,r,n,o,s,a=this._splitTextIntoLines(this.hiddenTextarea.value).graphemeText,h=this._text.length,c=a.length,l=c-h,u=this.selectionStart,f=this.selectionEnd,d=u!==f;if(""===this.hiddenTextarea.value)return this.styles={},this.updateFromTextArea(),this.fire("changed"),void(this.canvas&&(this.canvas.fire("text:changed",{target:this}),this.canvas.requestRenderAll()));var g=this.fromStringToGraphemeSelection(this.hiddenTextarea.selectionStart,this.hiddenTextarea.selectionEnd,this.hiddenTextarea.value),p=u>g.selectionStart;d?(i=this._text.slice(u,f),l+=f-u):c0&&(r+=(i=this.__charBounds[t][e-1]).left+i.width),r},getDownCursorOffset:function(t,e){var i=this._getSelectionForOffset(t,e),r=this.get2DCursorLocation(i),n=r.lineIndex;if(n===this._textLines.length-1||t.metaKey||34===t.keyCode)return this._text.length-i;var o=r.charIndex,s=this._getWidthBeforeCursor(n,o),a=this._getIndexOnLine(n+1,s);return this._textLines[n].slice(o).length+a+1+this.missingNewlineOffset(n)},_getSelectionForOffset:function(t,e){return t.shiftKey&&this.selectionStart!==this.selectionEnd&&e?this.selectionEnd:this.selectionStart},getUpCursorOffset:function(t,e){var i=this._getSelectionForOffset(t,e),r=this.get2DCursorLocation(i),n=r.lineIndex;if(0===n||t.metaKey||33===t.keyCode)return-i;var o=r.charIndex,s=this._getWidthBeforeCursor(n,o),a=this._getIndexOnLine(n-1,s),h=this._textLines[n].slice(0,o),c=this.missingNewlineOffset(n-1);return-this._textLines[n-1].length+a-h.length+(1-c)},_getIndexOnLine:function(t,e){for(var i,r,n=this._textLines[t],o=this._getLineLeftOffset(t),s=0,a=0,h=n.length;ae){r=!0;var c=o-i,l=o,u=Math.abs(c-e);s=Math.abs(l-e)=this._text.length&&this.selectionEnd>=this._text.length||this._moveCursorUpOrDown("Down",t)},moveCursorUp:function(t){0===this.selectionStart&&0===this.selectionEnd||this._moveCursorUpOrDown("Up",t)},_moveCursorUpOrDown:function(t,e){var i=this["get"+t+"CursorOffset"](e,"right"===this._selectionDirection);e.shiftKey?this.moveCursorWithShift(i):this.moveCursorWithoutShift(i),0!==i&&(this.setSelectionInBoundaries(),this.abortCursorAnimation(),this._currentCursorOpacity=1,this.initDelayedCursor(),this._fireSelectionChanged(),this._updateTextarea())},moveCursorWithShift:function(t){var e="left"===this._selectionDirection?this.selectionStart+t:this.selectionEnd+t;return this.setSelectionStartEndWithShift(this.selectionStart,this.selectionEnd,e),0!==t},moveCursorWithoutShift:function(t){return t<0?(this.selectionStart+=t,this.selectionEnd=this.selectionStart):(this.selectionEnd+=t,this.selectionStart=this.selectionEnd),0!==t},moveCursorLeft:function(t){0===this.selectionStart&&0===this.selectionEnd||this._moveCursorLeftOrRight("Left",t)},_move:function(t,e,i){var r;if(t.altKey)r=this["findWordBoundary"+i](this[e]);else{if(!t.metaKey&&35!==t.keyCode&&36!==t.keyCode)return this[e]+="Left"===i?-1:1,!0;r=this["findLineBoundary"+i](this[e])}if(void 0!==typeof r&&this[e]!==r)return this[e]=r,!0},_moveLeft:function(t,e){return this._move(t,e,"Left")},_moveRight:function(t,e){return this._move(t,e,"Right")},moveCursorLeftWithoutShift:function(t){var e=!0;return this._selectionDirection="left",this.selectionEnd===this.selectionStart&&0!==this.selectionStart&&(e=this._moveLeft(t,"selectionStart")),this.selectionEnd=this.selectionStart,e},moveCursorLeftWithShift:function(t){return"right"===this._selectionDirection&&this.selectionStart!==this.selectionEnd?this._moveLeft(t,"selectionEnd"):0!==this.selectionStart?(this._selectionDirection="left",this._moveLeft(t,"selectionStart")):void 0},moveCursorRight:function(t){this.selectionStart>=this._text.length&&this.selectionEnd>=this._text.length||this._moveCursorLeftOrRight("Right",t)},_moveCursorLeftOrRight:function(t,e){var i="moveCursor"+t+"With";this._currentCursorOpacity=1,e.shiftKey?i+="Shift":i+="outShift",this[i](e)&&(this.abortCursorAnimation(),this.initDelayedCursor(),this._fireSelectionChanged(),this._updateTextarea())},moveCursorRightWithShift:function(t){return"left"===this._selectionDirection&&this.selectionStart!==this.selectionEnd?this._moveRight(t,"selectionStart"):this.selectionEnd!==this._text.length?(this._selectionDirection="right",this._moveRight(t,"selectionEnd")):void 0},moveCursorRightWithoutShift:function(t){var e=!0;return this._selectionDirection="right",this.selectionStart===this.selectionEnd?(e=this._moveRight(t,"selectionStart"),this.selectionEnd=this.selectionStart):this.selectionStart=this.selectionEnd,e},removeChars:function(t,e){void 0===e&&(e=t+1),this.removeStyleFromTo(t,e),this._text.splice(t,e-t),this.text=this._text.join(""),this.set("dirty",!0),this._shouldClearDimensionCache()&&(this.initDimensions(),this.setCoords()),this._removeExtraneousStyles()},insertChars:function(t,e,i,r){void 0===r&&(r=i),r>i&&this.removeStyleFromTo(i,r);var n=T.util.string.graphemeSplit(t);this.insertNewStyleBlock(n,i,e),this._text=[].concat(this._text.slice(0,i),n,this._text.slice(r)),this.text=this._text.join(""),this.set("dirty",!0),this._shouldClearDimensionCache()&&(this.initDimensions(),this.setCoords()),this._removeExtraneousStyles()}}),function(){var t=T.util.toFixed,e=/ +/g;T.util.object.extend(T.Text.prototype,{_toSVG:function(){var t=this._getSVGLeftTopOffsets(),e=this._getSVGTextAndBg(t.textTop,t.textLeft);return this._wrapSVGTextAndBg(e)},toSVG:function(t){return this._createBaseSVGMarkup(this._toSVG(),{reviver:t,noStyle:!0,withShadow:!0})},_getSVGLeftTopOffsets:function(){return{textLeft:-this.width/2,textTop:-this.height/2,lineTop:this.getHeightOfLine(0)}},_wrapSVGTextAndBg:function(t){var e=this.getSvgTextDecoration(this);return[t.textBgRects.join(""),'\t\t",t.textSpans.join(""),"\n"]},_getSVGTextAndBg:function(t,e){var i,r=[],n=[],o=t;this._setSVGBg(n);for(var s=0,a=this._textLines.length;s",T.util.string.escapeXml(i),""].join("")},_setSVGTextLineText:function(t,e,i,r){var n,o,s,a,h,c=this.getHeightOfLine(e),l=-1!==this.textAlign.indexOf("justify"),u="",f=0,d=this._textLines[e];r+=c*(1-this._fontSizeFraction)/this.lineHeight;for(var g=0,p=d.length-1;g<=p;g++)h=g===p||this.charSpacing,u+=d[g],s=this.__charBounds[e][g],0===f?(i+=s.kernedWidth-s.width,f+=s.width):f+=s.kernedWidth,l&&!h&&this._reSpaceAndTab.test(d[g])&&(h=!0),h||(n=n||this.getCompleteStyleDeclaration(e,g),o=this.getCompleteStyleDeclaration(e,g+1),h=this._hasStyleChangedForSvg(n,o)),h&&(a=this._getStyleDeclaration(e,g)||{},t.push(this._createTextCharSpan(u,a,i,r)),u="",n=o,i+=f,f=0)},_pushTextBgRect:function(e,i,r,n,o,s){var a=T.Object.NUM_FRACTION_DIGITS;e.push("\t\t\n')},_setSVGTextLineBg:function(t,e,i,r){for(var n,o,s=this._textLines[e],a=this.getHeightOfLine(e)/this.lineHeight,h=0,c=0,l=this.getValueOfPropertyAt(e,0,"textBackgroundColor"),u=0,f=s.length;uthis.width&&this._set("width",this.dynamicMinWidth),-1!==this.textAlign.indexOf("justify")&&this.enlargeSpaces(),this.height=this.calcTextHeight(),this.saveState({propertySet:"_dimensionAffectingProps"}))},_generateStyleMap:function(t){for(var e=0,i=0,r=0,n={},o=0;o0?(i=0,r++,e++):!this.splitByGrapheme&&this._reSpaceAndTab.test(t.graphemeText[r])&&o>0&&(i++,r++),n[o]={line:e,offset:i},r+=t.graphemeLines[o].length,i+=t.graphemeLines[o].length;return n},styleHas:function(t,i){if(this._styleMap&&!this.isWrapping){var r=this._styleMap[i];r&&(i=r.line)}return e.Text.prototype.styleHas.call(this,t,i)},isEmptyStyles:function(t){if(!this.styles)return!0;var e,i,r=0,n=!1,o=this._styleMap[t],s=this._styleMap[t+1];for(var a in o&&(t=o.line,r=o.offset),s&&(n=s.line===t,e=s.offset),i=void 0===t?this.styles:{line:this.styles[t]})for(var h in i[a])if(h>=r&&(!n||hr&&!v?(a.push(h),h=[],o=d,v=!0):o+=m,v||s||h.push(f),h=h.concat(l),g=s?0:this._measureWord([f],i,u),u++,v=!1,d>p&&(p=d);return y&&a.push(h),p+n>this.dynamicMinWidth&&(this.dynamicMinWidth=p-m+n),a},isEndOfWrapping:function(t){return!this._styleMap[t+1]||this._styleMap[t+1].line!==this._styleMap[t].line},missingNewlineOffset:function(t){return this.splitByGrapheme?this.isEndOfWrapping(t)?1:0:1},_splitTextIntoLines:function(t){for(var i=e.Text.prototype._splitTextIntoLines.call(this,t),r=this._wrapText(i.lines,this.width),n=new Array(r.length),o=0;o","",e.toSVG(t.reviver),"",""];return e._objects[0].fill=i,r.join("\n")}return""},_createBaseSVGMarkup:function(t,e){var i=this.getEraser();if(i){var n=this.eraserToSVG(e);this.clipPath=null;var o=r.call(this,t,e);return this.clipPath=i,[n,o.replace(">",'mask="url(#'+i.clipPathId+')" >')].join("\n")}return r.call(this,t,e)}});var n=T.Group.prototype._restoreObjectsState,o=T.Group.prototype.toObject,s=T.Group.prototype._getBounds;T.util.object.extend(T.Group.prototype,{_getBounds:function(t,e,i){if(this.eraser)return this.width=this._objects[0].width,void(this.height=this._objects[0].height);s.call(this,t,e,i)},_addEraserPathToObjects:function(t){this._objects.forEach((function(e){T.EraserBrush.prototype._addPathToObjectEraser.call(T.EraserBrush.prototype,e,t)}))},applyEraserToObjects:function(){var t=this;if(this.getEraser()){var e=t.calcTransformMatrix();t.getEraser().clone((function(i){var r=i._objects[0].clipPath;t.clipPath=r||void 0,i.getObjects("path").forEach((function(i){var n=T.util.multiplyTransformMatrices(e,i.calcTransformMatrix());T.util.applyTransformToObject(i,n),r?r.clone((function(r){T.EraserBrush.prototype.applyClipPathToPath.call(T.EraserBrush.prototype,i,r,e),t._addEraserPathToObjects(i)})):t._addEraserPathToObjects(i)}))}))}},_restoreObjectsState:function(){return!0===this.erasable&&this.applyEraserToObjects(),n.call(this)},toObject:function(t){return o.call(this,["eraser"].concat(t))}}),T.util.object.extend(T.Canvas.prototype,{isErasing:function(){return this.isDrawingMode&&this.freeDrawingBrush&&"eraser"===this.freeDrawingBrush.type&&this.freeDrawingBrush._isErasing},renderAll:function(){if(!this.contextTopDirty||this._groupSelector||this.isDrawingMode||(this.clearContext(this.contextTop),this.contextTopDirty=!1),!this.isErasing()){this.hasLostContext&&this.renderTopLayer(this.contextTop);var t=this.contextContainer;return this.renderCanvas(t,this._chooseObjectsToRender()),this}this.freeDrawingBrush._render()}}),T.EraserBrush=T.util.createClass(T.PencilBrush,{type:"eraser",_ready:!1,_drawOverlayOnTop:!1,_isErasing:!1,initialize:function(t){this.callSuper("initialize",t),this._renderBound=this._render.bind(this),this.render=this.render.bind(this)},hideObject:function(t){t&&(t._originalOpacity=t.opacity,t.set({opacity:0}))},restoreObjectVisibility:function(t){t&&t._originalOpacity&&(t.set({opacity:t._originalOpacity}),t._originalOpacity=void 0)},_isErasable:function(t){return!1!==t.erasable},prepareCanvasBackgroundForLayer:function(t){if("overlay"!==t){var e=this.canvas.backgroundImage,i="top"===t;e&&this._isErasable(e)===!i&&this.hideObject(e)}},prepareCanvasOverlayForLayer:function(t){var e=this.canvas,i=e.overlayImage,r=!!e.overlayColor;if(e.overlayColor&&"overlay"!==t&&(this.__overlayColor=e.overlayColor,delete e.overlayColor),"bottom"===t)return this.hideObject(i),!1;var n="top"===t,o=i&&!this._isErasable(i)||r;return i&&this._isErasable(i)===!n&&this.hideObject(i),o},restoreCanvasDrawables:function(){var t=this.canvas;this.__overlayColor&&(t.overlayColor=this.__overlayColor,delete this.__overlayColor),this.restoreObjectVisibility(t.backgroundImage),this.restoreObjectVisibility(t.overlayImage)},prepareCollectionTraversal:function(t){var e=this;t.forEachObject((function(t){t.forEachObject&&"deep"===t.erasable?e.prepareCollectionTraversal(t):t.erasable&&e.hideObject(t)}))},restoreCollectionTraversal:function(t){var e=this;t.forEachObject((function(t){t.forEachObject&&"deep"===t.erasable?e.restoreCollectionTraversal(t):e.restoreObjectVisibility(t)}))},prepareCanvasObjectsForLayer:function(t){"bottom"===t&&this.prepareCollectionTraversal(this.canvas)},restoreCanvasObjectsFromLayer:function(t){"bottom"===t&&this.restoreCollectionTraversal(this.canvas)},prepareCanvasForLayer:function(t){return this.prepareCanvasBackgroundForLayer(t),this.prepareCanvasObjectsForLayer(t),this.prepareCanvasOverlayForLayer(t)},restoreCanvasFromLayer:function(t){this.restoreCanvasDrawables(),this.restoreCanvasObjectsFromLayer(t)},renderBottomLayer:function(){var t=this.canvas;this.prepareCanvasForLayer("bottom"),t.renderCanvas(t.getContext(),t.getObjects().filter((function(t){return!t.erasable||t.forEachObject}))),this.restoreCanvasFromLayer("bottom")},renderTopLayer:function(){var t=this.canvas;this._drawOverlayOnTop=this.prepareCanvasForLayer("top"),t.renderCanvas(t.contextTop,t.getObjects()),this.callSuper("_render"),this.restoreCanvasFromLayer("top")},renderOverlay:function(){this.prepareCanvasForLayer("overlay");var t=this.canvas,e=t.contextTop;t._renderOverlay(e),this.restoreCanvasFromLayer("overlay")},_saveAndTransform:function(t){this.callSuper("_saveAndTransform",t),t.globalCompositeOperation="destination-out"},needsFullRender:function(){return this.callSuper("needsFullRender")||this._drawOverlayOnTop},onMouseDown:function(t,e){this.canvas._isMainEvent(e.e)&&(this._prepareForDrawing(t),this._captureDrawingPath(t),this._isErasing=!0,this.canvas.fire("erasing:start"),this._ready=!0,this._render())},_render:function(){this._ready&&(this.isRendering=1,this.renderBottomLayer(),this.renderTopLayer(),this.renderOverlay(),this.isRendering=0)},render:function(){return!!this._isErasing&&(this.isRendering?this.isRendering=T.util.requestAnimFrame(this._renderBound):this._render(),!0)},applyClipPathToPath:function(t,e,i){var r=t.calcTransformMatrix(),n=e.calcTransformMatrix(),o=T.util.multiplyTransformMatrices(T.util.invertTransform(r),i);return T.util.applyTransformToObject(e,T.util.multiplyTransformMatrices(o,n)),t.clipPath=e,t},clonePathWithClipPath:function(t,e,i){var r=e.calcTransformMatrix(),n=e.getClipPath(),o=this;t.clone((function(t){n.clone((function(e){i(o.applyClipPathToPath(t,e,r))}))}))},_addPathToObjectEraser:function(t,e){var i,r=this;if(t.forEachObject&&"deep"===t.erasable){var n=t._objects.filter((function(t){return t.erasable}));n.length>0&&t.clipPath?this.clonePathWithClipPath(e,t,(function(t){n.forEach((function(e){r._addPathToObjectEraser(e,t)}))})):n.length>0&&n.forEach((function(t){r._addPathToObjectEraser(t,e)}))}else{if(t.getEraser())i=t.clipPath;else{var o=t._getNonTransformedDimensions(),s=new T.Rect({fill:"rgb(0,0,0)",width:o.x,height:o.y,clipPath:t.clipPath,originX:"center",originY:"center"});i=new T.Group([s],{eraser:!0})}e.clone((function(e){e.globalCompositeOperation="destination-out";var n=T.util.multiplyTransformMatrices(T.util.invertTransform(t.calcTransformMatrix()),e.calcTransformMatrix());T.util.applyTransformToObject(e,n),i.addWithUpdate(e),t.set({clipPath:i,dirty:!0}),t.fire("erasing:end",{path:e}),t.group&&Array.isArray(r.__subTargets)&&r.__subTargets.push(t)}))}},applyEraserToCanvas:function(t){var e=this.canvas,i={};return["backgroundImage","overlayImage"].forEach((function(r){var n=e[r];n&&n.erasable&&(this._addPathToObjectEraser(n,t),i[r]=n)}),this),i},_finalizeAndAddPath:function(){var t=this.canvas.contextTop,e=this.canvas;t.closePath(),this.decimate&&(this._points=this.decimatePoints(this._points,this.decimate)),e.clearContext(e.contextTop),this._isErasing=!1;var i=this._points&&this._points.length>1?this.convertPointsToSVGPath(this._points):null;if(!i||this._isEmptySVGPath(i))return e.fire("erasing:end"),void e.requestRenderAll();var r=this.createPath(i);r.setCoords(),e.fire("before:path:created",{path:r});var n=this.applyEraserToCanvas(r),o=this;this.__subTargets=[];var s=[];e.forEachObject((function(t){t.erasable&&t.intersectsWithObject(r,!0,!0)&&(o._addPathToObjectEraser(t,r),s.push(t))})),e.fire("erasing:end",{path:r,targets:s,subTargets:this.__subTargets,drawables:n}),delete this.__subTargets,e.requestRenderAll(),r.setCoords(),this._resetShadow(),e.fire("path:created",{path:r})}})}()}(tt);var rt=tt.fabric;export{tt as default,rt as fabric}; +//# sourceMappingURL=/sm/4595deba885e6b95e012cf1ed9deed8a7b30edf3ae4c7d4bd5cc11d619b75c11.map \ No newline at end of file diff --git a/ComfyUI-Easy-Use/web_version/v2/assets/extensions-Wh9Wq1Mi.js b/ComfyUI-Easy-Use/web_version/v2/assets/extensions-Wh9Wq1Mi.js new file mode 100644 index 0000000000000000000000000000000000000000..d5d718e273832e358496a934580d970291b84252 --- /dev/null +++ b/ComfyUI-Easy-Use/web_version/v2/assets/extensions-Wh9Wq1Mi.js @@ -0,0 +1 @@ +var e,t,n,s,o,i,a,l,r,d,u,c,p,h,m,g=Object.defineProperty,f=(e,t,n)=>((e,t,n)=>t in e?g(e,t,{enumerable:!0,configurable:!0,writable:!0,value:n}):e[t]=n)(e,"symbol"!=typeof t?t+"":t,n);import{d as y,h as _}from"./vendor-DT1J-jWa.js";import{c as v}from"./lodash-CZi7izHi.js";let w=(null==(t=null==(e=window.comfyAPI)?void 0:e.app)?void 0:t.app)||null,b=(null==(s=null==(n=window.comfyAPI)?void 0:n.api)?void 0:s.api)||null,L=(null==(i=null==(o=window.comfyAPI)?void 0:o.ui)?void 0:i.$el)||null,E=(null==(l=null==(a=window.comfyAPI)?void 0:a.dialog)?void 0:l.ComfyDialog)||null,S=(null==(d=null==(r=window.comfyAPI)?void 0:r.widgets)?void 0:d.ComfyWidgets)||null,C=(null==(c=null==(u=window.comfyAPI)?void 0:u.utils)?void 0:c.applyTextReplacements)||null,A=(null==(h=null==(p=window.comfyAPI)?void 0:p.groupNode)?void 0:h.GroupNodeConfig)||null;const k=(e,t=void 0)=>{var n,s;return e?null==(s=null==(n=null==w?void 0:w.ui)?void 0:n.settings)?void 0:s.getSettingValue(e,t):null};function I(e,t=null,n=void 0){try{let s=e?k(e,n):null;return void 0===s&&t&&(s=localStorage[e]),s}catch(s){return null}}function x(e,t=e=>{}){var n;const s=null==(n=w.ui.settings.settingsLookup)?void 0:n[e];s&&(s.onChange=e=>t(e))}async function N(e,t,n=null){var s,o;try{(null==(o=null==(s=null==w?void 0:w.ui)?void 0:s.settings)?void 0:o.setSettingValue)?w.ui.settings.setSettingValue(e,t):await b.storeSetting(e,t),n&&(localStorage[n]="object"==typeof t?JSON.stringify(t):t)}catch(i){}}function T(e){w.ui.settings.addSetting(e)}function O(e,t){if(e="number"==typeof e?e:e instanceof Date?e.getTime():parseInt(e),isNaN(e))return null;let n=new Date(e);(e=n.toString().split(/[\s\:]/g).slice(0,-2))[1]=["01","02","03","04","05","06","07","08","09","10","11","12"][n.getMonth()];let s={MM:1,dd:2,yyyy:3,hh:4,mm:5,ss:6};return t.replace(/([Mmdhs]|y{2})\1/g,(t=>e[s[t]]))}const D="comfyui-easyuse-",G="dark-theme",R="#236692",M={PIPE_LINE:"#7737AA",PIPE_LINE_SDXL:"#7737AA",INT:"#29699C",X_Y:"#38291f",XYPLOT:"#74DA5D",LORA_STACK:"#94dccd",CONTROL_NET_STACK:"#94dccd",FLOW_CONTROL:"#373780"},P=0x4000000000000,F=["loaders","latent","image","mask","sampling","_for_testing","advanced","utils","api"],U={ALWAYS:0,NEVER:2,BYPASS:4},B="easyuse_nodes_map",W=LGraphCanvas.node_colors.bgcolor,z={ColorPalette:{version:105,id:"obsidian",name:"Obsidian",colors:{node_slot:{CLIP:"#FFD500",CLIP_VISION:"#A8DADC",CLIP_VISION_OUTPUT:"#ad7452",CONDITIONING:"#FFA931",CONTROL_NET:"#6EE7B7",IMAGE:"#64B5F6",LATENT:"#FF9CF9",MASK:"#81C784",MODEL:"#B39DDB",STYLE_MODEL:"#C2FFAE",VAE:"#FF6E6E",TAESD:"#DCC274",PIPE_LINE:"#7737AA",PIPE_LINE_SDXL:"#7737AA",INT:"#29699C",X_Y:"#38291f",XYPLOT:"#74DA5D",LORA_STACK:"#94dccd",CONTROL_NET_STACK:"#94dccd"},litegraph_base:{BACKGROUND_IMAGE:"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAGQAAABkCAIAAAD/gAIDAAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAAAQBJREFUeNrs1rEKwjAUhlETUkj3vP9rdmr1Ysammk2w5wdxuLgcMHyptfawuZX4pJSWZTnfnu/lnIe/jNNxHHGNn//HNbbv+4dr6V+11uF527arU7+u63qfa/bnmh8sWLBgwYJlqRf8MEptXPBXJXa37BSl3ixYsGDBMliwFLyCV/DeLIMFCxYsWLBMwSt4Be/NggXLYMGCBUvBK3iNruC9WbBgwYJlsGApeAWv4L1ZBgsWLFiwYJmCV/AK3psFC5bBggULloJX8BpdwXuzYMGCBctgwVLwCl7Be7MMFixYsGDBsu8FH1FaSmExVfAxBa/gvVmwYMGCZbBg/W4vAQYA5tRF9QYlv/QAAAAASUVORK5CYII=",CLEAR_BACKGROUND_COLOR:"#222222",NODE_TITLE_COLOR:"#d4d4d8",NODE_SELECTED_TITLE_COLOR:"#ffffff",NODE_TEXT_SIZE:14,NODE_TEXT_COLOR:"#ffffff",NODE_SUBTEXT_SIZE:12,NODE_DEFAULT_COLOR:"#09090b",NODE_DEFAULT_BGCOLOR:"rgba(24,24,27,.9)",NODE_DEFAULT_BOXCOLOR:"rgba(255,255,255,.75)",NODE_DEFAULT_SHAPE:"box",NODE_BOX_OUTLINE_COLOR:R,DEFAULT_SHADOW_COLOR:"rgba(0,0,0,0)",DEFAULT_GROUP_FONT:24,WIDGET_BGCOLOR:"#242427",WIDGET_OUTLINE_COLOR:"#3f3f46",WIDGET_TEXT_COLOR:"#d4d4d8",WIDGET_SECONDARY_TEXT_COLOR:"#d4d4d8",LINK_COLOR:"#9A9",EVENT_LINK_COLOR:"#A86",CONNECTING_LINK_COLOR:"#AFA"},comfy_base:{"fg-color":"#fff","bg-color":"#09090b","comfy-menu-bg":"rgba(24,24,24,.9)","comfy-input-bg":"#262626","input-text":"#ddd","descrip-text":"#999","drag-text":"#ccc","error-text":"#ff4444","border-color":"#29292c","tr-even-bg-color":"rgba(28,28,28,.9)","tr-odd-bg-color":"rgba(19,19,19,.9)"}}},NODE_COLORS:{red:{color:"#af3535",bgcolor:W,groupcolor:"#A88"},brown:{color:"#38291f",bgcolor:W,groupcolor:"#b06634"},green:{color:"#346434",bgcolor:W,groupcolor:"#8A8"},blue:{color:"#1f1f48",bgcolor:W,groupcolor:"#88A"},pale_blue:{color:"#006691",bgcolor:W,groupcolor:"#3f789e"},cyan:{color:"#008181",bgcolor:W,groupcolor:"#8AA"},purple:{color:"#422342",bgcolor:W,groupcolor:"#a1309b"},yellow:{color:"#c09430",bgcolor:W,groupcolor:"#b58b2a"},black:{color:"rgba(0,0,0,.8)",bgcolor:W,groupcolor:"#444"}}};let j=JSON.parse(JSON.stringify(z));delete j.NODE_COLORS,j.ColorPalette.id="obsidian_dark",j.ColorPalette.name="Obsidian Dark",j.ColorPalette.colors.litegraph_base.BACKGROUND_IMAGE="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAGQAAABkCAIAAAD/gAIDAAAACXBIWXMAAAsTAAALEwEAmpwYAAAGlmlUWHRYTUw6Y29tLmFkb2JlLnhtcAAAAAAAPD94cGFja2V0IGJlZ2luPSLvu78iIGlkPSJXNU0wTXBDZWhpSHpyZVN6TlRjemtjOWQiPz4gPHg6eG1wbWV0YSB4bWxuczp4PSJhZG9iZTpuczptZXRhLyIgeDp4bXB0az0iQWRvYmUgWE1QIENvcmUgOS4xLWMwMDEgNzkuMTQ2Mjg5OSwgMjAyMy8wNi8yNS0yMDowMTo1NSAgICAgICAgIj4gPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvIiB4bWxuczpkYz0iaHR0cDovL3B1cmwub3JnL2RjL2VsZW1lbnRzLzEuMS8iIHhtbG5zOnBob3Rvc2hvcD0iaHR0cDovL25zLmFkb2JlLmNvbS9waG90b3Nob3AvMS4wLyIgeG1sbnM6eG1wTU09Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9tbS8iIHhtbG5zOnN0RXZ0PSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvc1R5cGUvUmVzb3VyY2VFdmVudCMiIHhtcDpDcmVhdG9yVG9vbD0iQWRvYmUgUGhvdG9zaG9wIDI1LjEgKFdpbmRvd3MpIiB4bXA6Q3JlYXRlRGF0ZT0iMjAyMy0xMS0xM1QwMDoxODowMiswMTowMCIgeG1wOk1vZGlmeURhdGU9IjIwMjMtMTEtMTVUMDI6MDQ6NTkrMDE6MDAiIHhtcDpNZXRhZGF0YURhdGU9IjIwMjMtMTEtMTVUMDI6MDQ6NTkrMDE6MDAiIGRjOmZvcm1hdD0iaW1hZ2UvcG5nIiBwaG90b3Nob3A6Q29sb3JNb2RlPSIzIiB4bXBNTTpJbnN0YW5jZUlEPSJ4bXAuaWlkOmIyYzRhNjA5LWJmYTctYTg0MC1iOGFlLTk3MzE2ZjM1ZGIyNyIgeG1wTU06RG9jdW1lbnRJRD0iYWRvYmU6ZG9jaWQ6cGhvdG9zaG9wOjk0ZmNlZGU4LTE1MTctZmQ0MC04ZGU3LWYzOTgxM2E3ODk5ZiIgeG1wTU06T3JpZ2luYWxEb2N1bWVudElEPSJ4bXAuZGlkOjIzMWIxMGIwLWI0ZmItMDI0ZS1iMTJlLTMwNTMwM2NkMDdjOCI+IDx4bXBNTTpIaXN0b3J5PiA8cmRmOlNlcT4gPHJkZjpsaSBzdEV2dDphY3Rpb249ImNyZWF0ZWQiIHN0RXZ0Omluc3RhbmNlSUQ9InhtcC5paWQ6MjMxYjEwYjAtYjRmYi0wMjRlLWIxMmUtMzA1MzAzY2QwN2M4IiBzdEV2dDp3aGVuPSIyMDIzLTExLTEzVDAwOjE4OjAyKzAxOjAwIiBzdEV2dDpzb2Z0d2FyZUFnZW50PSJBZG9iZSBQaG90b3Nob3AgMjUuMSAoV2luZG93cykiLz4gPHJkZjpsaSBzdEV2dDphY3Rpb249InNhdmVkIiBzdEV2dDppbnN0YW5jZUlEPSJ4bXAuaWlkOjQ4OWY1NzlmLTJkNjUtZWQ0Zi04OTg0LTA4NGE2MGE1ZTMzNSIgc3RFdnQ6d2hlbj0iMjAyMy0xMS0xNVQwMjowNDo1OSswMTowMCIgc3RFdnQ6c29mdHdhcmVBZ2VudD0iQWRvYmUgUGhvdG9zaG9wIDI1LjEgKFdpbmRvd3MpIiBzdEV2dDpjaGFuZ2VkPSIvIi8+IDxyZGY6bGkgc3RFdnQ6YWN0aW9uPSJzYXZlZCIgc3RFdnQ6aW5zdGFuY2VJRD0ieG1wLmlpZDpiMmM0YTYwOS1iZmE3LWE4NDAtYjhhZS05NzMxNmYzNWRiMjciIHN0RXZ0OndoZW49IjIwMjMtMTEtMTVUMDI6MDQ6NTkrMDE6MDAiIHN0RXZ0OnNvZnR3YXJlQWdlbnQ9IkFkb2JlIFBob3Rvc2hvcCAyNS4xIChXaW5kb3dzKSIgc3RFdnQ6Y2hhbmdlZD0iLyIvPiA8L3JkZjpTZXE+IDwveG1wTU06SGlzdG9yeT4gPC9yZGY6RGVzY3JpcHRpb24+IDwvcmRmOlJERj4gPC94OnhtcG1ldGE+IDw/eHBhY2tldCBlbmQ9InIiPz4OTe6GAAAAx0lEQVR42u3WMQoAIQxFwRzJys77X8vSLiRgITif7bYbgrwYc/mKXyBoY4VVBgsWLFiwYFmOlTv+9jfDOjHmr8u6eVkGCxYsWLBgmc5S8ApewXvgYRksWLBgKXidpeBdloL3wMOCBctgwVLwCl7BuyyDBQsWLFiwTGcpeAWv4D3wsAwWLFiwFLzOUvAuS8F74GHBgmWwYCl4Ba/gXZbBggULFixYprMUvIJX8B54WAYLFixYCl5nKXiXpeA98LBgwTJYsGC9tg1o8f4TTtqzNQAAAABJRU5ErkJggg==",j.ColorPalette.colors.litegraph_base.CLEAR_BACKGROUND_COLOR="#09090b";const V=LGraphCanvas.node_colors.bgcolor,Y={ColorPalette:{id:"milk_white",name:"Milk White",colors:{node_slot:{CLIP:"#FFA726",CLIP_VISION:"#5C6BC0",CLIP_VISION_OUTPUT:"#8D6E63",CONDITIONING:"#EF5350",CONTROL_NET:"#66BB6A",IMAGE:"#42A5F5",LATENT:"#AB47BC",MASK:"#9CCC65",MODEL:"#7E57C2",STYLE_MODEL:"#D4E157",VAE:"#FF7043",PIPE_LINE:"#7737AA",PIPE_LINE_SDXL:"#7737AA",INT:"#29699C",X_Y:"#38291f",XYPLOT:"#74DA5D",LORA_STACK:"#94dccd",CONTROL_NET_STACK:"#94dccd"},litegraph_base:{BACKGROUND_IMAGE:"data:image/gif;base64,R0lGODlhZABkALMAAAAAAP///+vr6+rq6ujo6Ofn5+bm5uXl5d3d3f///wAAAAAAAAAAAAAAAAAAAAAAACH5BAEAAAkALAAAAABkAGQAAAT/UMhJq7046827HkcoHkYxjgZhnGG6si5LqnIM0/fL4qwwIMAg0CAsEovBIxKhRDaNy2GUOX0KfVFrssrNdpdaqTeKBX+dZ+jYvEaTf+y4W66mC8PUdrE879f9d2mBeoNLfH+IhYBbhIx2jkiHiomQlGKPl4uZe3CaeZifnnijgkESBqipqqusra6vsLGys62SlZO4t7qbuby7CLa+wqGWxL3Gv3jByMOkjc2lw8vOoNSi0czAncXW3Njdx9Pf48/Z4Kbbx+fQ5evZ4u3k1fKR6cn03vHlp7T9/v8A/8Gbp4+gwXoFryXMB2qgwoMMHyKEqA5fxX322FG8tzBcRnMW/zlulPbRncmQGidKjMjyYsOSKEF2FBlJQMCbOHP6c9iSZs+UnGYCdbnSo1CZI5F64kn0p1KnTH02nSoV3dGTV7FFHVqVq1dtWcMmVQZTbNGu72zqXMuW7danVL+6e4t1bEy6MeueBYLXrNO5Ze36jQtWsOG97wIj1vt3St/DjTEORss4nNq2mDP3e7w4r1bFkSET5hy6s2TRlD2/mSxXtSHQhCunXo26NevCpmvD/UU6tuullzULH76q92zdZG/Ltv1a+W+osI/nRmyc+fRi1Xdbh+68+0vv10dH3+77KD/i6IdnX669/frn5Zsjh4/2PXju8+8bzc9/6fj27LFnX11/+IUnXWl7BJfegm79FyB9JOl3oHgSklefgxAC+FmFGpqHIYcCfkhgfCohSKKJVo044YUMttggiBkmp6KFXw1oII24oYhjiDByaKOOHcp3Y5BD/njikSkO+eBREQAAOw==",CLEAR_BACKGROUND_COLOR:"lightgray",NODE_TITLE_COLOR:"#222",NODE_SELECTED_TITLE_COLOR:"#000",NODE_TEXT_SIZE:14,NODE_TEXT_COLOR:"#444",NODE_SUBTEXT_SIZE:12,NODE_DEFAULT_COLOR:"#F7F7F7",NODE_DEFAULT_BGCOLOR:"#F5F5F5",NODE_DEFAULT_BOXCOLOR:"#555",NODE_DEFAULT_SHAPE:"box",NODE_BOX_OUTLINE_COLOR:"#000",DEFAULT_SHADOW_COLOR:"rgba(0,0,0,0.1)",DEFAULT_GROUP_FONT:24,WIDGET_BGCOLOR:"#D4D4D4",WIDGET_OUTLINE_COLOR:"#999",WIDGET_TEXT_COLOR:"#222",WIDGET_SECONDARY_TEXT_COLOR:"#555",LINK_COLOR:"#9A9",EVENT_LINK_COLOR:"#FF9800",CONNECTING_LINK_COLOR:"#222"},comfy_base:{"fg-color":"#222","bg-color":"#DDD","comfy-menu-bg":"#F5F5F5","comfy-input-bg":"#C9C9C9","input-text":"#222","descrip-text":"#444","drag-text":"#555","error-text":"#F44336","border-color":"#bbb","tr-even-bg-color":"#f9f9f9","tr-odd-bg-color":"#fff","content-bg":"#e0e0e0","content-fg":"#222","content-hover-bg":"#adadad","content-hover-fg":"#222"}}},NODE_COLORS:{red:{color:"#af3535",bgcolor:V,groupcolor:"#A88"},brown:{color:"#38291f",bgcolor:V,groupcolor:"#b06634"},green:{color:"#346434",bgcolor:V,groupcolor:"#8A8"},blue:{color:"#1f1f48",bgcolor:V,groupcolor:"#88A"},pale_blue:{color:"#006691",bgcolor:V,groupcolor:"#3f789e"},cyan:{color:"#008181",bgcolor:V,groupcolor:"#8AA"},purple:{color:"#422342",bgcolor:V,groupcolor:"#a1309b"},yellow:{color:"#c09430",bgcolor:V,groupcolor:"#b58b2a"},black:{color:"rgba(0,0,0,.8)",bgcolor:V,groupcolor:"#444"}}},H={"Workflow created by":"工作流创建者","Watch more video content":"观看更多视频内容","Workflow Guide":"工作流指南","💎 View Checkpoint Info...":"💎 查看 Checkpoint 信息...","💎 View Lora Info...":"💎 查看 Lora 信息...","🔃 Reload Node":"🔃 刷新节点","Updated At:":"最近更新:","Created At:":"首次发布:","✏️ Edit":"✏️ 编辑","💾 Save":"💾 保存","No notes":"当前还没有备注内容","Saving Notes...":"正在保存备注...","Type your notes here":"在这里输入备注内容",ModelName:"模型名称","Models Required":"所需模型","Download Model":"下载模型","Source Url":"模型源地址",Notes:"备注",Type:"类型","Trained Words":"训练词",BaseModel:"基础算法",Details:"详情",Description:"描述",Download:"下载量",Source:"来源","Saving Preview...":"正在保存预览图...","Saving Succeed":"保存成功","Clean SuccessFully":"清理成功","Clean Failed":"清理失败","Saving Failed":"保存失败","No COMBO link":"沒有找到COMBO连接","Reboot ComfyUI":"重启ComfyUI","Are you sure you'd like to reboot the server?":"是否要重启ComfyUI?","Nodes Map":"管理节点组","Nodes map sorting mode":"管理节点组排序模式","No Nodes":"未找到节点","No nodes found in the map":"在工作流程中没有找到节点","Expand All":"展开所有组","Collapse All":"折叠所有组",Close:"关闭","Default automatic sorting, if set to manual, groups can be dragged and dropped and the sorting results saved.":"默认自动排序,如果设置为手动,组可以拖放并保存排序结果。","For drag and drop sorting, please find Nodes map sorting mode in Settings->EasyUse and change it to manual":"如需拖拽排序请在设置->EasyUse节点中找到管理节点组排序模式并修改成 manual",Queue:"队列","Cleanup Of VRAM Usage":"清理显存占用","Please stop all running tasks before cleaning GPU":"请在清理GPU之前停止所有运行中的任务",Always:"启用中",Bypass:"已忽略",Never:"已停用","Auto Sorting":"自动排序","Toggle `Show/Hide` can set mode of group, LongPress can set group nodes to never":"点击`启用中/已忽略`可设置组模式, 长按可停用该组节点","Enable Shift+Up/Down/Left/Right key to align selected nodes":"启用 `Shift+上/下/左/右` 键对齐选中的节点","Enable Shift+g to add selected nodes to a group":"启用 `Shift+g` 键将选中的节点添加一个组","Enable Shift+r to unload models and node cache":"启用 `Shift+r` 键卸载模型和节点缓存","Enable Up/Down/Left/Right key to jump nearest nodes":"启用 `上/下/左/右` 键跳转到最近的前后节点","Enable ALT+1~9 to paste nodes from nodes template":"启用 `ALT+1~9` 从节点模板粘贴到工作流中","Enable contextMenu auto nest subdirectories":"启用上下文菜单自动嵌套子目录","Enable right-click menu to add node A~Z sorting":"启用右键菜单中新建节点A~Z排序","Enable model thumbnails display":"启动模型预览图显示","Enable nodes runtime display":"启动节点运行时间显示","Enable chain get node and set node with parent nodes":"启用将获取点和设置点与父节点链在一起","Maximum number of model thumbnails displayed":"显示的模型缩略图的最大数量","Too many thumbnails will affect the first loading time, set the maximum value to not load the thumbnail function when there are too many models's thumbnail":"太多的缩略图会影响首次加载时间,当模型缩略图太多时,设置最大值以不加载缩略图功能","Too many thumbnails, have closed the display":"模型缩略图太多啦,为您关闭了显示","Get styles list Failed":"获取样式列表失败","Get style image Failed":"获取样式图片失败","Empty All":"清空所有","Type here to search styles ...":"在此处输入以搜索样式 ...","Loading UserInfo...":"正在获取用户信息...","Please set the APIKEY first":"请先设置APIKEY","Setting APIKEY":"设置APIKEY","Save Account Info":"保存账号信息",Choose:"选择",Delete:"删除",Edit:"编辑","At least one account is required":"删除失败: 至少需要一个账户","APIKEY is not Empty":"APIKEY 不能为空","Add Account":"添加账号","Getting Your APIKEY":"获取您的APIKEY","Choose Selected Images":"选择选中的图片","Choose images to continue":"选择图片以继续",Background:"背景",Hat:"帽子",Hair:"头发",Body:"身体",Face:"脸部",Clothes:"衣服",Others:"其他",Glove:"手套",Sunglasses:"太阳镜","Upper-clothes":"上衣",Dress:"连衣裙",Coat:"外套",Socks:"袜子",Pants:"裤子",Jumpsuits:"连体衣",Scarf:"围巾",Skirt:"裙子","Left-arm":"左臂","Right-arm":"右臂","Left-leg":"左腿","Right-leg":"右腿","Left-shoe":"左鞋","Right-shoe":"右鞋",s:"秒","No Node Templates Found":"未找到节点模板预设","Get Node Templates File Failed":"获取节点模板文件失败","Node template with {key} not set":"未设置快捷键为{key}的节点预设","ComfyUI Basic":"ComfyUI 基础节点","Recommend Nodes":"推荐节点","Others A~Z":"其他节点 A~Z"},X=I("AGL.Locale"),Z=(e,t=!1)=>"zh-CN"===(t?navigator.language:X)&&H[e]||e,K={addGroup:{id:"EasyUse.Hotkeys.AddGroup",name:Z("Enable Shift+g to add selected nodes to a group"),type:"boolean",defaultValue:!0},cleanVRAMUsed:{id:"EasyUse.Hotkeys.cleanVRAMUsed",name:Z("Enable Shift+r to unload models and node cache"),type:"boolean",defaultValue:!0},alignSelectedNodes:{id:"EasyUse.Hotkeys.AlignSelectedNodes",name:Z("Enable Shift+Up/Down/Left/Right key to align selected nodes"),type:"boolean",defaultValue:!0},nodesTemplate:{id:"EasyUse.Hotkeys.NodesTemplate",name:Z("Enable ALT+1~9 to paste nodes from nodes template"),type:"boolean",defaultValue:!0},jumpNearestNodes:{id:"EasyUse.Hotkeys.JumpNearestNodes",name:Z("Enable Up/Down/Left/Right key to jump nearest nodes"),type:"boolean",defaultValue:!0},subDirectories:{id:"EasyUse.ContextMenu.SubDirectories",name:Z("Enable contextMenu auto nest subdirectories"),type:"boolean",defaultValue:!1},modelsThumbnails:{id:"EasyUse.ContextMenu.ModelsThumbnails",name:Z("Enable model thumbnails display"),type:"boolean",defaultValue:!1},modelsThumbnailsLimit:{id:"EasyUse.ContextMenu.ModelsThumbnailsLimit",name:Z("Maximum number of model thumbnails displayed"),tooltip:Z("Too many thumbnails will affect the first loading time, set the maximum value to not load the thumbnail function when there are too many models's thumbnail"),type:"slider",attrs:{min:0,max:5e3,step:100},defaultValue:500},rightMenuNodesSort:{id:"EasyUse.ContextMenu.NodesSort",name:Z("Enable right-click menu to add node A~Z sorting"),type:"boolean",defaultValue:!0},nodesRuntime:{id:"EasyUse.Nodes.Runtime",name:Z("Enable nodes runtime display"),type:"boolean",defaultValue:!0},chainGetSet:{id:"EasyUse.Nodes.ChainGetSet",name:Z("Enable chain get node and set node with parent nodes"),type:"boolean",defaultValue:!0},nodesMap:{id:"EasyUse.NodesMap.Sorting",name:Z("Nodes map sorting mode"),tooltip:Z("Default automatic sorting, if set to manual, groups can be dragged and dropped and the sorting results saved."),type:"combo",options:["Auto sorting","Manual drag&drop sorting"],defaultValue:"Auto sorting"}};function J(e=100,t){return new Promise((n=>{setTimeout((()=>{n(t)}),e)}))}const $=y("groups",{state:e=>({groups:[],nodes:[],isWatching:!1}),getters:{groups_nodes(){var e;let t=[],n=[];if((null==(e=this.nodes)?void 0:e.length)>0){this.nodes.map((e=>{let s=e.pos,o=!1;for(let n=0;ni.pos[0]&&s[0]i.pos[1]&&s[1]e.pos[0]-t.pos[0])).sort(((e,t)=>e.pos[1]-t.pos[1])))},setNodes(e){this.nodes=v(e)},update(){(w.extensionManager.activeSidebarTab===B||this.isWatching)&&setTimeout((e=>{this.setGroups(w.canvas.graph._groups),this.setNodes(w.canvas.graph._nodes)}),1)},watchGraph(e=!1){e&&(this.isWatching=!0);let t=this;this.update();const n=w.graph.onNodeAdded;w.graph.onNodeAdded=function(e){t.update();const s=e.onRemoved;return e.onRemoved=function(){return t.update(),null==s?void 0:s.apply(this,arguments)},null==n?void 0:n.apply(this,arguments)},w.canvas.onNodeMoved=function(e){t.update()};const s=LGraphCanvas.onNodeAlign;LGraphCanvas.onNodeAlign=function(e){return t.update(),null==s?void 0:s.apply(this,arguments)};const o=LGraphCanvas.onGroupAdd;LGraphCanvas.onGroupAdd=function(){return t.update(),null==o?void 0:o.apply(this,arguments)};const i=LGraphCanvas.onGroupAlign;LGraphCanvas.onGroupAlign=function(e){return t.update(),null==i?void 0:i.apply(this,arguments)};const a=LGraphCanvas.onMenuNodeRemove;LGraphCanvas.onMenuNodeRemove=function(e){return t.update(),null==a?void 0:a.apply(this,arguments)}},unwatchGraph(){this.isWatching=!1}}});let q=null;const Q=["custom_obsidian","custom_obsidian_dark","custom_milk_white"],ee={"easy positive":"green","easy negative":"red","easy promptList":"cyan","easy promptLine":"cyan","easy promptConcat":"cyan","easy promptReplace":"cyan","easy forLoopStart":"blue","easy forLoopEnd":"blue"};let te=LGraphCanvas.node_colors,ne=null,se=null,oe=null,ie=null;for(let dn in K)"Disabled"==I("Comfy.UseNewMenu")?T({...K[dn],name:"👽 "+K[dn].name}):T(K[dn]);function ae(e,t=!1){let n="after",s="before";t&&([s,n]=[n,s]),e.label=(e.label??e.name).replace(s,n),e.name=e.label}function le(e,t,n,s,o,i,a){t.strokeStyle=s,t.fillStyle=o;let l=LiteGraph.NODE_TITLE_HEIGHT,r=this.ds.scale<.5,d=e._shape||e.constructor.shape||LiteGraph.ROUND_SHAPE,u=e.constructor.title_mode,c=!0;u==LiteGraph.TRANSPARENT_TITLE||u==LiteGraph.NO_TITLE?c=!1:u==LiteGraph.AUTOHIDE_TITLE&&mouse_over&&(c=!0);let p=new Float32Array(4);p=[0,c?-l:0,n[0]+1,c?n[1]+l:n[1]];let h=t.globalAlpha;if(t.lineWidth=1,t.beginPath(),d==LiteGraph.BOX_SHAPE||r?t.fillRect(p[0],p[1],p[2],p[3]):d==LiteGraph.ROUND_SHAPE||d==LiteGraph.CARD_SHAPE?t.roundRect(p[0],p[1],p[2],p[3],d==LiteGraph.CARD_SHAPE?[this.round_radius,this.round_radius,0,0]:[this.round_radius]):d==LiteGraph.CIRCLE_SHAPE&&t.arc(.5*n[0],.5*n[1],.5*n[0],0,2*Math.PI),t.strokeStyle=LiteGraph.WIDGET_OUTLINE_COLOR,t.stroke(),t.strokeStyle=s,t.fill(),!e.flags.collapsed&&c&&(t.shadowColor="transparent",t.fillStyle="rgba(0,0,0,0.2)",t.fillRect(0,-1,p[2],2)),t.shadowColor="transparent",e.onDrawBackground&&e.onDrawBackground(t,this,this.canvas,this.graph_mouse),c||u==LiteGraph.TRANSPARENT_TITLE){const o="dark"==function(e){let t=e.replace("#","");return n=parseInt(t.substring(0,2),16),s=parseInt(t.substring(2,4),16),o=parseInt(t.substring(4,6),16),.299*n+.587*s+.114*o>127.5?"light":"dark";var n,s,o}((null==e?void 0:e.color)||"#ffffff");if(e.onDrawTitleBar)e.onDrawTitleBar(t,l,n,this.ds.scale,s);else if(u!=LiteGraph.TRANSPARENT_TITLE&&(e.constructor.title_color||this.render_title_colored)){let o=e.constructor.title_color||s;if(e.flags.collapsed&&(t.shadowColor=LiteGraph.DEFAULT_SHADOW_COLOR),this.use_gradients){let e=LGraphCanvas.gradients[o];e||(e=LGraphCanvas.gradients[o]=t.createLinearGradient(0,0,400,0),e.addColorStop(0,o),e.addColorStop(1,"#000")),t.fillStyle=e}else t.fillStyle=o;t.beginPath(),d==LiteGraph.BOX_SHAPE||r?t.rect(0,-l,n[0]+1,l):d!=LiteGraph.ROUND_SHAPE&&d!=LiteGraph.CARD_SHAPE||t.roundRect(0,-l,n[0]+1,l,e.flags.collapsed?[this.round_radius]:[this.round_radius,this.round_radius,0,0]),t.fill(),t.shadowColor="transparent"}let a=!1;LiteGraph.node_box_coloured_by_mode&&LiteGraph.NODE_MODES_COLORS[e.mode]&&(a=LiteGraph.NODE_MODES_COLORS[e.mode]),LiteGraph.node_box_coloured_when_on&&(a=e.action_triggered?"#FFF":e.execute_triggered?"#AAA":a);let c=10;if(e.onDrawTitleBox)e.onDrawTitleBox(t,l,n,this.ds.scale);else if(d==LiteGraph.ROUND_SHAPE||d==LiteGraph.CIRCLE_SHAPE||d==LiteGraph.CARD_SHAPE){const n=o?"#ffffff":LiteGraph.NODE_SELECTED_TITLE_COLOR,s=o?"#eeeeee":e.boxcolor||a||LiteGraph.NODE_DEFAULT_BOXCOLOR;t.fillStyle=i?n:s,t.beginPath(),t.fillRect(10,0-1.05*c-1,1.1*c,.125*c),t.fillRect(10,0-1.45*c-1,1.1*c,.125*c),t.fillRect(10,0-1.85*c-1,1.1*c,.125*c)}else t.fillStyle=e.boxcolor||a||LiteGraph.NODE_DEFAULT_BOXCOLOR,t.fillRect(.5*(l-c),-.5*(l+c),c,c);if(t.globalAlpha=h,e.onDrawTitleText&&e.onDrawTitleText(t,l,n,this.ds.scale,this.title_text_font,i),!r){t.font=this.title_text_font;let n=String(e.getTitle());n&&(t.fillStyle=i?o?"#ffffff":LiteGraph.NODE_SELECTED_TITLE_COLOR:o?"#ffffff":e.constructor.title_text_color||this.node_title_color,e.flags.collapsed?(t.textAlign="left",t.measureText(n),t.fillText(n.substr(0,20),l,LiteGraph.NODE_TITLE_TEXT_Y-l),t.textAlign="left"):(t.textAlign="left",t.fillText(n,l,LiteGraph.NODE_TITLE_TEXT_Y-l)))}if(!e.flags.collapsed&&e.subgraph&&!e.skip_subgraph_button){let n=LiteGraph.NODE_TITLE_HEIGHT,s=e.size[0]-n,o=LiteGraph.isInsideRectangle(this.graph_mouse[0]-e.pos[0],this.graph_mouse[1]-e.pos[1],s+2,2-n,n-4,n-4);t.fillStyle=o?"#888":"#555",d==LiteGraph.BOX_SHAPE||r?t.fillRect(s+2,2-n,n-4,n-4):(t.beginPath(),t.roundRect(s+2,2-n,n-4,n-4,[4]),t.fill()),t.fillStyle="#333",t.beginPath(),t.moveTo(s+.2*n,.6*-n),t.lineTo(s+.8*n,.6*-n),t.lineTo(s+.5*n,.3*-n),t.fill()}e.onDrawTitle&&e.onDrawTitle(t)}if(i){e.onBounding&&e.onBounding(p),u==LiteGraph.TRANSPARENT_TITLE&&(p[1]-=l,p[3]+=l),t.lineWidth=2,t.globalAlpha=.8,t.beginPath();let o=0,i=0,a=1;d==LiteGraph.BOX_SHAPE?t.rect(o+p[0],o+p[1],i+p[2],i+p[3]):d==LiteGraph.ROUND_SHAPE||d==LiteGraph.CARD_SHAPE&&e.flags.collapsed?t.roundRect(o+p[0],o+p[1],i+p[2],i+p[3],[this.round_radius*a]):d==LiteGraph.CARD_SHAPE?t.roundRect(o+p[0],o+p[1],i+p[2],i+p[3],[this.round_radius*a,a,this.round_radius*a,a]):d==LiteGraph.CIRCLE_SHAPE&&t.arc(.5*n[0],.5*n[1],.5*n[0]+6,0,2*Math.PI),t.strokeStyle=LiteGraph.NODE_BOX_OUTLINE_COLOR,t.stroke(),t.strokeStyle=s,t.globalAlpha=1}e.execute_triggered>0&&e.execute_triggered--,e.action_triggered>0&&e.action_triggered--}function re(e,t,n,s){if(!e.widgets||!e.widgets.length)return 0;let o=e.size[0],i=e.widgets;t+=2;let a=LiteGraph.NODE_WIDGET_HEIGHT,l=this.ds.scale>.5;n.save(),n.globalAlpha=this.editor_alpha;let r=LiteGraph.WIDGET_OUTLINE_COLOR,d=LiteGraph.WIDGET_BGCOLOR,u=LiteGraph.WIDGET_TEXT_COLOR,c=LiteGraph.WIDGET_SECONDARY_TEXT_COLOR,p=12;for(let h=0;h1&&(o=1),n.fillStyle=m.options.hasOwnProperty("slider_color")?m.options.slider_color:s==m?r:R,n.beginPath(),n.roundRect(p,g,o*(f-24),a,[.25*a]),n.fill(),m.marker){let e=(m.marker-m.options.min)/t;e<0&&(e=0),e>1&&(e=1),n.fillStyle=m.options.hasOwnProperty("marker_color")?m.options.marker_color:"#AA9",n.roundRect(p+e*(f-24),g,2,a,[.25*a])}if(l){n.textAlign="center",n.fillStyle=u;let e=(m.label||m.name)+": "+Number(m.value).toFixed(null!=m.options.precision?m.options.precision:3).toString();n.fillText(e,.5*f,g+.7*a)}break;case"number":case"combo":if(n.textAlign="left",n.strokeStyle=r,n.fillStyle=d,n.beginPath(),l?n.roundRect(p,g,f-24,a,[.25*a]):n.rect(p,g,f-24,a),n.fill(),l){m.disabled||n.stroke(),n.fillStyle=u,m.disabled||(n.beginPath(),n.moveTo(24,g+6.5),n.lineTo(18,g+.5*a),n.lineTo(24,g+a-6.5),n.fill(),n.beginPath(),n.moveTo(f-p-12,g+6.5),n.lineTo(f-p-6,g+.5*a),n.lineTo(f-p-12,g+a-6.5),n.fill()),n.fillStyle=c,n.font="10px Inter",n.fillText(m.label||m.name,29,g+.7*a),n.fillStyle=u,n.textAlign="right";let e=6;if("number"==m.type)n.font="10px Inter",n.fillText(Number(m.value).toFixed(void 0!==m.options.precision?m.options.precision:3),f-24-e,g+.7*a);else{let t=m.value;if(m.options.values){let e=m.options.values;e.constructor===Function&&(e=e()),e&&e.constructor!==Array&&(t=e[m.value])}n.fillText(t,f-24-e,g+.7*a)}}break;case"string":case"text":if(n.textAlign="left",n.strokeStyle=r,n.fillStyle=d,n.beginPath(),l?n.roundRect(p,g,f-24,a,[.25*a]):n.rect(p,g,f-24,a),n.fill(),l){m.disabled||n.stroke(),n.save(),n.beginPath(),n.rect(p,g,f-24,a),n.clip(),n.fillStyle=c;const e=m.label||m.name;n.font="10px Inter",null!=e&&n.fillText(e,24,g+.7*a),n.fillStyle=u,n.textAlign="right",n.fillText(String(m.value).substr(0,30),f-24,g+.7*a),n.restore()}break;default:m.draw&&m.draw(n,e,f,g,a)}t+=(m.computeSize?m.computeSize(f)[1]:a)+4,n.globalAlpha=this.editor_alpha}n.restore(),n.textAlign="left"}function de(e,t,n,s,o){return new LiteGraph.ContextMenu(LiteGraph.NODE_MODES,{event:n,callback:function(e){if(!o)return;var t=Object.values(LiteGraph.NODE_MODES).indexOf(e),n=function(e){t>=0&&LiteGraph.NODE_MODES[t]?e.changeMode(t):e.changeMode(LiteGraph.ALWAYS),q||(q=$()),q.update()},s=LGraphCanvas.active_canvas;if(!s.selected_nodes||Object.keys(s.selected_nodes).length<=1)n(o);else for(var i in s.selected_nodes)n(s.selected_nodes[i])},parentMenu:s,node:o}),!1}function ue(e,t,n,s,o){if(!o)throw"no node for color";var i=[];for(var a in i.push({value:null,content:"No color"}),LGraphCanvas.node_colors){var l=LGraphCanvas.node_colors[a];e={value:a,content:""+a+""};i.push(e)}return new LiteGraph.ContextMenu(i,{event:n,callback:function(e){if(!o)return;var t=e.value?LGraphCanvas.node_colors[e.value]:null,n=function(e){t?e.constructor===LiteGraph.LGraphGroup?e.color=t.groupcolor:(e.color=t.color,e.bgcolor=t.bgcolor):(delete e.color,delete e.bgcolor),q||(q=$()),q.update()},s=LGraphCanvas.active_canvas;if(!s.selected_nodes||Object.keys(s.selected_nodes).length<=1)n(o);else for(var i in s.selected_nodes)n(s.selected_nodes[i]);o.setDirtyCanvas(!0,!0)},parentMenu:s,node:o}),!1}function ce(e,t,n,s,o){var i=e.property||"title",a=o[i],l=document.createElement("div");l.is_modified=!1,l.className="graphdialog",l.innerHTML="",l.close=function(){l.parentNode&&l.parentNode.removeChild(l)},l.querySelector(".name").innerText=i;var r=l.querySelector(".value");r&&(r.value=a,r.addEventListener("blur",(function(e){this.focus()})),r.addEventListener("keydown",(function(e){if(l.is_modified=!0,27==e.keyCode)l.close();else if(13==e.keyCode)m();else if(13!=e.keyCode&&"textarea"!=e.target.localName)return;e.preventDefault(),e.stopPropagation()})));var d=LGraphCanvas.active_canvas.canvas,u=d.getBoundingClientRect(),c=-20,p=-20;u&&(c-=u.left,p-=u.top),event?(l.style.left=event.clientX+c+"px",l.style.top=event.clientY+p+"px"):(l.style.left=.5*d.width+c+"px",l.style.top=.5*d.height+p+"px"),l.querySelector("button").addEventListener("click",m),d.parentNode.appendChild(l),r&&r.focus();var h=null;function m(){r&&function(t){"Number"==e.type?t=Number(t):"Boolean"==e.type&&(t=Boolean(t));o[i]=t,l.parentNode&&l.parentNode.removeChild(l);o.setDirtyCanvas(!0,!0),q||(q=$());q.update()}(r.value)}l.addEventListener("mouseleave",(function(e){LiteGraph.dialog_close_on_mouse_leave&&!l.is_modified&&LiteGraph.dialog_close_on_mouse_leave&&(h=setTimeout(l.close,LiteGraph.dialog_close_on_mouse_leave_delay))})),l.addEventListener("mouseenter",(function(e){LiteGraph.dialog_close_on_mouse_leave&&h&&clearTimeout(h)}))}w.registerExtension({name:"Comfy.EasyUse.UI",init(){var e,t;const n="Comfy.CustomColorPalettes",s="Comfy.Settings.Comfy.CustomColorPalettes";if(se||(se=I(n,s)),oe||(oe=I("Comfy.ColorPalette","Comfy.Settings.Comfy.ColorPalette")||"dark"),(!(null==(e=null==se?void 0:se.obsidian)?void 0:e.version)||se.obsidian.version{(null==e?void 0:e.value)&&(null==e?void 0:e.oldValue)&&(await J(1),Object.assign(w.canvas.default_connection_color_byType,M),Object.assign(LGraphCanvas.link_type_colors,M)),"custom_milk_white"==e.value&&document.body.classList.remove(G)})),setTimeout((e=>he(I("Comfy.UseNewMenu")||"Disabled")),1)},async nodeCreated(e){var t;if(ee.hasOwnProperty(e.comfyClass)){const t=ee[e.comfyClass],n=te[t];if(!n)return;n.color&&(e.color=n.color),n.bgcolor&&(e.bgcolor=n.bgcolor)}if(ne||(ne=I("Comfy.WidgetControlMode")),"before"==ne){const n="before"==ne;if((null==(t=e.widgets)?void 0:t.length)>0)for(const t of e.widgets)if(["control_before_generate","control_after_generate"].includes(t.name)&&(await ae(t,n),t.linkedWidgets))for(const e of t.linkedWidgets)await ae(e,n)}}});const pe=null==(m=w.ui.settings.settingsLookup)?void 0:m["Comfy.UseNewMenu"];pe&&(pe.onChange=e=>he(e));const he=e=>{var t;const n=(null==(t=document.getElementById("crystools-root"))?void 0:t.children)||null;if((null==n?void 0:n.length)>0){if(!ie)for(let e=0;ee.widgets.find((e=>e.name===t)),fe=(e,t,n=!1,s="")=>{var o;if(!t||((e,t)=>!!e.inputs&&e.inputs.some((e=>e.name===t)))(e,t.name))return;me[t.name]||(me[t.name]={origType:t.type,origComputeSize:t.computeSize});const i=e.size;t.type=n?me[t.name].origType:"easyHidden"+s,t.computeSize=n?me[t.name].origComputeSize:()=>[0,-4],null==(o=t.linkedWidgets)||o.forEach((s=>fe(e,s,":"+t.name,n)));const a=n?Math.max(e.computeSize()[1],i[1]):e.size[1];e.setSize([e.size[0],a])},ye=(e,t=0)=>{var n,s;if(e)return(null==(n=e.widgets)?void 0:n[t])?e.widgets[t].value:e.widgets_values?null==(s=e.widgets_values)?void 0:s[t]:void 0},_e=e=>e.setSize([e.size[0],e.computeSize()[1]]),ve=(e,t)=>graph.getNodeById(e),we=e=>{var t;try{return Object.values(null==(t=null==graph?void 0:graph.list_of_graphcanvas[0])?void 0:t.selected_nodes)}catch(n){return[]}};function be(e,t,n){return e+(s=n,(.5-.5*Math.cos(Math.PI*s))*(t-e));var s}const Le=(e,t=!0)=>{var n,s;const o=(null==(s=null==(n=e.graph)?void 0:n.list_of_graphcanvas)?void 0:s[0])||null;if(!o)return;const[i,a]=e.pos,[l,r]=e.size;(([e,t],n)=>{const s=n.ds,o=document.body.clientWidth,i=document.body.clientHeight,a=s.scale,l=.5*o/a-e,r=.5*i/a-t,d=Date.now()+250,u=s.offset[0],c=s.offset[1],p=()=>{const e=d-Date.now();if(!(Date.now(){const t=ve(e);t&&Le(t)},Se=(e,t=(()=>graph.links??[])())=>t[e],Ce=e=>e.toLowerCase().replace(/_./g,(e=>e.replace("_","").toUpperCase())),Ae=e=>"easy getNode"===e.type,ke=e=>"easy setNode"===e.type,Ie=e=>Ae(e)||ke(e),xe=(e=(()=>graph._nodes??[])())=>e.filter((e=>Ie(e))),Ne=(e,t,n=0)=>{e.widgets_values||(e.widgets_values=[]),e.widgets_values[n]=t,e.widgets[n].value=t},Te=e=>graph.add(e),Oe=e=>graph.remove(e),De=(e,t=0)=>{var n,s;if("Reroute"!==e.type)return[e,t];const o=e,i=null==(s=null==(n=o.inputs)?void 0:n[0])?void 0:s.link;if(!i)return[o,t];const a=Se(i);if(!a)return[o,t];const l=ve(a.origin_id);return l?(setTimeout((()=>{Oe(o)})),De(l,a.origin_slot)):[o,t]},Ge=e=>{var t,n,s;if("Reroute"!==e.type)return e;const o=e,i=null==(n=null==(t=o.outputs)?void 0:t[0])?void 0:n.links;if(!i)return o;const a=i[0];if(!a)return o;const l=Se(a);if(!l)return o;const r=ve(l.target_id);return r?(1===(null==(s=o.outputs[0].links)?void 0:s.length)&&setTimeout((()=>{Oe(o)})),Ge(r)):o};const Re=new class{constructor(){f(this,"element",L(`div.${D}toast`)),f(this,"children",HTMLElement),f(this,"container",document.body),this.container.appendChild(this.element)}async show(e){let t=L(`div.${D}toast-container`,[L("div",[L("span",[...e.icon?[L("i",{className:e.icon})]:[],L("span",e.content)])])]);t.setAttribute("toast-id",e.id),this.element.replaceChildren(t),this.container.appendChild(this.element),await J(64),t.style.marginTop=`-${t.offsetHeight}px`,await J(64),t.classList.add("show"),e.duration&&(await J(e.duration),this.hide(e.id))}async hide(e){const t=document.querySelector(`.${D}toast > [toast-id="${e}"]`);(null==t?void 0:t.classList.contains("show"))&&(t.classList.remove("show"),await J(750)),t&&t.remove()}async clearAllMessages(){let e=document.querySelector(`.${D}container`);e&&(e.innerHTML="")}async info(e,t=3e3,n=[]){this.show({id:"toast-info",icon:`mdi mdi-information ${D}theme`,content:e,duration:t})}async success(e,t=3e3){this.show({id:"toast-success",icon:`mdi mdi-check-circle ${D}success`,content:e,duration:t})}async error(e,t=3e3){this.show({id:"toast-error",icon:`mdi mdi-close-circle ${D}error`,content:e,duration:t})}async warn(e,t=3e3){this.show({id:"toast-warn",icon:`mdi mdi-alert-circle ${D}warning`,content:e,duration:t})}async showLoading(e,t=0){this.show({id:"toast-loading",icon:"mdi mdi-rotate-right loading",content:e,duration:t})}async hideLoading(){this.hide("toast-loading")}},Me=["rescale_after_model","rescale","lora_name","upscale_method","image_output","add_noise","info","sampler_name","ckpt_B_name","ckpt_C_name","save_model","refiner_ckpt_name","num_loras","num_controlnet","mode","toggle","resolution","ratio","target_parameter","input_count","replace_count","downscale_mode","range_mode","text_combine_mode","input_mode","lora_count","ckpt_count","conditioning_mode","preset","use_tiled","use_batch","num_embeds","easing_mode","guider","scheduler","inpaint_mode","t5_type","rem_mode"],Pe=["LIGHT - SD1.5 only (low strength)","STANDARD (medium strength)","VIT-G (medium strength)","PLUS (high strength)","PLUS FACE (portraits)","FULL FACE - SD1.5 only (portraits stronger)"],Fe=["FACEID","FACEID PLUS - SD1.5 only","FACEID PLUS V2","FACEID PLUS KOLORS","FACEID PORTRAIT (style transfer)","FACEID PORTRAIT UNNORM - SDXL only (strong)"],Ue=["easy seed","easy latentNoisy","easy wildcards","easy preSampling","easy preSamplingAdvanced","easy preSamplingNoiseIn","easy preSamplingSdTurbo","easy preSamplingCascade","easy preSamplingDynamicCFG","easy preSamplingLayerDiffusion","easy fullkSampler","easy fullCascadeKSampler"],Be=["easy fullLoader","easy a1111Loader","easy comfyLoader","easy hyditLoader","easy pixArtLoader"],We=["easy imageSize","easy imageSizeBySide","easy imageSizeByLongerSide","easy imageSizeShow","easy imageRatio","easy imagePixelPerfect"],ze=["easy forLoopStart","easy forLoopEnd","easy whileLoopStart","easy whileLoopEnd"],je=["easy anythingIndexSwitch","easy imageIndexSwitch","easy textIndexSwitch","easy conditioningIndexSwitch"],Ve=[...ze,...je],Ye={"easy anythingIndexSwitch":"value","easy imageIndexSwitch":"image","easy textIndexSwitch":"text","easy conditioningIndexSwitch":"cond"};function He(e,t){const n=e.comfyClass,s=t.value;switch(t.name){case"range_mode":fe(e,ge(e,"step"),"step"==s),fe(e,ge(e,"num_steps"),"num_steps"==s),_e(e);break;case"text_combine_mode":fe(e,ge(e,"replace_text"),"replace"==s);break;case"lora_name":["lora_model_strength","lora_clip_strength"].map((t=>fe(e,ge(e,t),"None"!==s)));break;case"resolution":"自定义 x 自定义"===s&&(t.value="width x height (custom)"),["empty_latent_width","empty_latent_height"].map((t=>fe(e,ge(e,t),"width x height (custom)"===s)));break;case"ratio":["empty_latent_width","empty_latent_height"].map((t=>fe(e,ge(e,t),"custom"===s)));break;case"num_loras":var o=s+1,i=ge(e,"mode").value;for(let t=0;tfe(e,ge(e,t),"simple"!==i)));for(let t=o;t<21;t++)["lora_"+t+"_name","lora_"+t+"_strength","lora_"+t+"_model_strength","lora_"+t+"_clip_strength"].map((t=>fe(e,ge(e,t),!1)));_e(e);break;case"num_controlnet":o=s+1,i=ge(e,"mode").value;for(let t=0;tfe(e,ge(e,t),!0))),["start_percent_"+t,"end_percent_"+t].map((t=>fe(e,ge(e,t),"simple"!==i)));for(let t=o;t<21;t++)["controlnet_"+t,"controlnet_"+t+"_strength","scale_soft_weight_"+t,"start_percent_"+t,"end_percent_"+t].map((t=>fe(e,ge(e,t),!1)));_e(e);break;case"mode":switch(null==e?void 0:e.comfyClass){case"easy loraStack":o=ge(e,"num_loras").value+1,i=s;for(let t=0;tfe(e,ge(e,t),"simple"!==i)));_e(e);break;case"easy controlnetStack":o=ge(e,"num_controlnet").value+1,i=s;for(let t=0;tfe(e,ge(e,t),"simple"!==i)));_e(e);break;case"easy icLightApply":i=s;["lighting","remove_bg"].map((t=>fe(e,ge(e,t),"Foreground"===i))),fe(e,ge(e,"source"),"Foreground"!==i),_e(e)}break;case"toggle":t.type="toggle",t.options={on:"Enabled",off:"Disabled"};break;case"t5_type":["clip_name","padding"].map((t=>fe(e,ge(e,t),"sd3"==s))),["t5_name","device","dtype"].map((t=>fe(e,ge(e,t),"t5v11"==s))),_e(e);break;case"preset":if(Pe.includes(s)){let t=ge(e,"use_tiled");fe(e,ge(e,"lora_strength")),fe(e,ge(e,"provider")),fe(e,ge(e,"weight_faceidv2")),fe(e,ge(e,"weight_kolors")),fe(e,ge(e,"use_tiled"),!0),fe(e,ge(e,"sharpening"),t&&t.value)}else Fe.includes(s)&&(fe(e,ge(e,"weight_faceidv2"),!!["FACEID PLUS V2","FACEID PLUS KOLORS"].includes(s)),fe(e,ge(e,"weight_kolors"),!!["FACEID PLUS KOLORS"].includes(t.value)),["FACEID PLUS KOLORS","FACEID PORTRAIT (style transfer)","FACEID PORTRAIT UNNORM - SDXL only (strong)"].includes(s)?fe(e,ge(e,"lora_strength"),!1):fe(e,ge(e,"lora_strength"),!0),fe(e,ge(e,"provider"),!0),fe(e,ge(e,"use_tiled")),fe(e,ge(e,"sharpening")));_e(e);break;case"use_tiled":fe(e,ge(e,"sharpening"),!!s),_e(e);break;case"num_embeds":o=s+1;for(let t=0;tfe(e,ge(e,t),!1)));break;case"brushnet_random":case"brushnet_segmentation":["dtype","scale","start_at","end_at"].map((t=>fe(e,ge(e,t),!0))),["fitting","function"].map((t=>fe(e,ge(e,t),!1)));break;case"powerpaint":["dtype","fitting","function","scale","start_at","end_at"].map((t=>fe(e,ge(e,t),!0)))}_e(e);break;case"image_output":fe(e,ge(e,"link_id"),!!["Sender","Sender&Save"].includes(s)),fe(e,ge(e,"decode_vae_name"),!!["Hide","Hide&Save"].includes(s)),["save_prefix","output_path","embed_workflow","number_padding","overwrite_existing"].map((t=>fe(e,ge(e,t),!!["Save","Hide&Save","Sender&Save"].includes(s))));break;case"add_noise":var a=ge(e,"control_before_generate"),l=ge(e,"control_after_generate")||a;"disable"===s?(fe(e,ge(e,"seed")),l&&(l.last_value=l.value,l.value="fixed",fe(e,l))):(fe(e,ge(e,"seed"),!0),l&&((null==l?void 0:l.last_value)&&(l.value=l.last_value),fe(e,l,!0))),_e(e);break;case"guider":switch(s){case"Basic":["cfg","cfg_negative"].map((t=>fe(e,ge(e,t))));break;case"CFG":fe(e,ge(e,"cfg"),!0),fe(e,ge(e,"cfg_negative"));break;case"IP2P+DualCFG":case"DualCFG":["cfg","cfg_negative"].map((t=>fe(e,ge(e,t),!0)))}_e(e);break;case"scheduler":["karrasADV","exponentialADV","polyExponential"].includes(s)?(["sigma_max","sigma_min"].map((t=>fe(e,ge(e,t),!0))),["denoise","beta_d","beta_min","eps_s","coeff"].map((t=>fe(e,ge(e,t))),!1),fe(e,ge(e,"rho"),"exponentialADV"!=s)):"vp"==s?(["sigma_max","sigma_min","denoise","rho","coeff"].map((t=>fe(e,ge(e,t)))),["beta_d","beta_min","eps_s"].map((t=>fe(e,ge(e,t),!0)))):(["sigma_max","sigma_min","beta_d","beta_min","eps_s","rho"].map((t=>fe(e,ge(e,t)))),fe(e,ge(e,"coeff"),"gits"==s),fe(e,ge(e,"denoise"),!0)),_e(e);break;case"conditioning_mode":["replace","concat","combine"].includes(s)?["average_strength","old_cond_start","old_cond_end","new_cond_start","new_cond_end"].map((t=>fe(e,ge(e,t)))):"average"==s?(fe(e,ge(e,"average_strength"),!0),["old_cond_start","old_cond_end","new_cond_start","new_cond_end"].map((t=>fe(e,ge(e,t),!1)))):"timestep"==s&&(["average_strength"].map((t=>fe(e,ge(e,t),!1))),["old_cond_start","old_cond_end","new_cond_start","new_cond_end"].map((t=>fe(e,ge(e,t)))));break;case"rescale":ge(e,"rescale_after_model").value,fe(e,ge(e,"width"),"to Width/Height"===s),fe(e,ge(e,"height"),"to Width/Height"===s),fe(e,ge(e,"percent"),"by percentage"===s),fe(e,ge(e,"longer_side"),"to longer side - maintain aspect"===s),_e(e);break;case"upscale_method":["factor","crop"].map((t=>fe(e,ge(e,t),"None"!==s)));break;case"target_parameter":switch(n){case"easy XYInputs: Steps":["first_step","last_step"].map((t=>fe(e,ge(e,t),"steps"==s))),["first_start_step","last_start_step"].map((t=>fe(e,ge(e,t),"start_at_step"==s))),["first_end_step","last_end_step"].map((t=>fe(e,ge(e,t),"end_at_step"==s)));break;case"easy XYInputs: Sampler/Scheduler":let t=ge(e,"input_count").value+1;for(let n=0;nfe(e,ge(e,t),"strength"==s))),["first_start_percent","last_start_percent"].map((t=>fe(e,ge(e,t),"start_percent"==s))),["first_end_percent","last_end_percent"].map((t=>fe(e,ge(e,t),"end_percent"==s))),["strength","start_percent","end_percent"].map((t=>fe(e,ge(e,t),s!=t))),_e(e)}case"replace_count":o=s+1;for(let t=0;tfe(e,ge(e,t),!r)));for(let t=o;t<11;t++)["lora_name_"+t,"model_str_"+t,"clip_str_"+t].map((t=>fe(e,ge(e,t),!1)));_e(e);break;case"ckpt_count":o=s+1;var d=-1!=ge(e,"input_mode").value.indexOf("ClipSkip"),u=-1!=ge(e,"input_mode").value.indexOf("VAE");for(let t=0;tfe(e,ge(e,t),!1)));_e(e);break;case"input_count":o=s+1;var c=ge(e,"target_parameter").value;for(let t=0;tfe(e,ge(e,n),!t)));["model_strength","clip_strength"].map((n=>fe(e,ge(e,n),!t)));break;case"easy XYInputs: Checkpoint":o=ge(e,"ckpt_count").value+1,d=-1!=ge(e,"input_mode").value.indexOf("ClipSkip"),u=-1!=ge(e,"input_mode").value.indexOf("VAE");for(let n=0;ne.name===t));if(-1!==e){for(let t=e;t{var e;const t=this.computeSize();t[0]"info"===e.name));if(-1!==e&&this.widgets[e]){this.widgets[e].value=t}}requestAnimationFrame((()=>{var e;const t=this.computeSize();t[0]"prompt"==e.name));this.addWidget("button","get values from COMBO link","",(()=>{var t,s;const o=(null==(s=null==(t=this.outputs[1])?void 0:t.links)?void 0:s.length)>0?this.outputs[1].links[0]:null,i=n.graph._nodes.find((e=>{var t;return null==(t=e.inputs)?void 0:t.find((e=>e.link==o))}));if(o&&i){const t=i.inputs.find((e=>e.link==o)).widget.name,n=i.widgets.find((e=>e.name==t));let s=(null==n?void 0:n.options.values)||null;s&&(s=s.join("\n"),e.value=s)}else Re.error(Z("No COMBO link"),3e3)}),{serialize:!1})}),Be.includes(t.name)){let t=function(e){var t="";for(let n=0;ne.name===t+"_prompt")),s="comfy-multiline-input wildcard_"+t+"_"+this.id.toString();if(-1==n&&e){const n=document.createElement("textarea");n.className=s,n.placeholder="Wildcard Prompt ("+t+")";const o=this.addDOMWidget(t+"_prompt","customtext",n,{getValue:e=>n.value,setValue(e){n.value=e},serialize:!1});o.inputEl=n,o.inputEl.readOnly=!0,n.addEventListener("input",(()=>{var e;null==(e=o.callback)||e.call(o,o.value)})),o.value=e}else if(this.widgets[n])if(e){this.widgets[n].value=e}else{this.widgets.splice(n,1);const e=document.getElementsByClassName(s);e&&e[0]&&e[0].remove()}}};e.prototype.onExecuted=function(e){null==l||l.apply(this,arguments);const s=t(e.positive),o=t(e.negative);n.call(this,s,"positive"),n.call(this,o,"negative")}}if(["easy sv3dLoader"].includes(t.name)){let t=function(e,t,n){switch(e){case"azimuth":return n.readOnly=!0,n.style.opacity=.6,"0:(0.0,0.0)"+(t>1?`\n${t-1}:(360.0,0.0)`:"");case"elevation":return n.readOnly=!0,n.style.opacity=.6,"0:(-90.0,0.0)"+(t>1?`\n${t-1}:(90.0,0.0)`:"");case"custom":return n.readOnly=!1,n.style.opacity=1,"0:(0.0,0.0)\n9:(180.0,0.0)\n20:(360.0,0.0)"}};e.prototype.onNodeCreated=async function(){i&&i.apply(this,[]);const e=this.widgets.find((e=>"easing_mode"==e.name)),n=this.widgets.find((e=>"batch_size"==e.name)),s=this.widgets.find((e=>"scheduler"==e.name));setTimeout((o=>{s.value||(s.value=t(e.value,n.value,s.inputEl))}),1),e.callback=e=>{s.value=t(e,n.value,s.inputEl)},n.callback=n=>{s.value=t(e.value,n,s.inputEl)}}}if(Ue.includes(s)&&(e.prototype.onNodeCreated=async function(){i&&i.apply(this,[]);const e=this.widgets.find((e=>["seed_num","seed"].includes(e.name))),s=this.widgets.find((e=>["control_before_generate","control_after_generate"].includes(e.name)));if("easy seed"==t.name){const t=this.addWidget("button","🎲 Manual Random Seed",null,(t=>{"fixed"!=s.value&&(s.value="fixed"),e.value=Math.floor(Math.random()*P),n.queuePrompt(0,1)}),{serialize:!1});e.linkedWidgets=[t,s]}},e.prototype.onAdded=async function(){o&&o.apply(this,[]);const e=this.widgets.find((e=>["seed_num","seed"].includes(e.name))),t=this.widgets.find((e=>["control_before_generate","control_after_generate"].includes(e.name)));setTimeout((n=>{"control_before_generate"==t.name&&0===e.value&&(e.value=Math.floor(Math.random()*P))}),1)}),"easy convertAnything"==s&&(e.prototype.onNodeCreated=async function(){i&&i.apply(this,[]);const e=this.widgets.find((e=>"output_type"==e.name)),t=t=>{this.outputs[0].type=e.value.toUpperCase(),this.outputs[0].name=e.value,this.outputs[0].label=e.value};setTimeout((e=>t()),10),e.callback=e=>t()}),"easy imageInsetCrop"==s){let t=function(e){const t=e.widgets[0];for(let n=1;n<=4;n++)"Pixels"===t.value?(e.widgets[n].options.step=80,e.widgets[n].options.max=8192):(e.widgets[n].options.step=10,e.widgets[n].options.max=99)};e.prototype.onAdded=async function(e){const n=this.widgets[0];let s=n.callback;n.callback=(...e)=>{t(this),s&&s.apply(n,[...e])},setTimeout((e=>{t(this)}),1)}}if(Ve.includes(s)){const t=e=>{switch(s){case"easy forLoopStart":return 0;case"easy forLoopEnd":return 1}},n=e=>{switch(s){case"easy forLoopStart":return 2;case"easy forLoopEnd":return 0}};e.prototype.onNodeCreated=async function(){if(ze.includes(s)){const e=this.inputs.findIndex((e=>"flow"===e.name)),o=this.outputs.findIndex((e=>"flow"===e.name));if(-1!==e&&(this.inputs[e].shape=5),-1!==o&&(this.outputs[o].shape=5),"easy whileLoopStart"==s||"easy whileLoopEnd"==s)return;this.inputs=this.inputs.filter(((e,n)=>n<=t())),this.outputs=this.outputs.filter(((e,t)=>t<=n())),_e(this)}return null==i?void 0:i.apply(this,arguments)},e.prototype.onConnectionsChange=function(e,o,i,a){var l;if("easy whileLoopStart"!=s&&"easy whileLoopEnd"!=s&&a)if(1==e){let e=this.inputs.every((e=>null!==e.link)),n=this.inputs.filter((e=>!["condition","index","total"].includes(e.name)));if(ze.includes(s)){if(e){if(n.length>=10)return void Re.warn(Z("The maximum number of inputs is 10"));let e=n[n.length-1],t=parseInt(e.name.split("initial_value")[1])+1;if(this.inputs.find((e=>e.name==="initial_value"+t)))return;let s="initial_value"+t,o="value"+t;this.addInput(s,"*"),this.addOutput(o,"*")}else if(!i){const e=t();let n=this.inputs.findLastIndex((e=>e.link));if(o>=e&&(-1===n||o>=n)){let e=this.inputs[o];if(!e.name||["condition","total"].includes(e.name))return;let t=parseInt(e.name.split("initial_value")[1])+1,n=this.inputs.findIndex((e=>e.name==="initial_value"+t)),s=this.outputs.findIndex((e=>e.name==="value"+t));-1!==n&&this.removeInput(n),-1!==s&&this.removeOutput(s)}}}else if(je.includes(s))if(e){if(n.length>=10)return void Re.warn(Z("The maximum number of inputs is 10"));let e=Ye[s]+n.length;this.addInput(e,"*")}else i||o==this.inputs.length-2&&this.removeInput(o+1)}else if(2==e){let e=this.outputs.filter((e=>!["flow","index"].includes(e.name))),t=e.every((e=>{var t;return(null==(t=e.links)?void 0:t.length)>0}));if(ze.includes(s))if(t){if(e.length>=10)return void Re.warn(Z("The maximum number of inputs is 10"));let t=e[e.length-1],n=parseInt(t.name.split("value")[1])+1;if(this.inputs.find((e=>e.name==="initial_value"+n)))return;if(this.outputs.find((e=>e.name==="value"+n)))return;let s="initial_value"+n,o="value"+n;this.addInput(s,"*"),this.addOutput(o,"*")}else if(!i){const e=n();let t=a.origin_slot,s=this.outputs.findLastIndex((e=>{var t;return(null==(t=e.links)?void 0:t.length)>0}));if(t>=e&&(-1===s||t>=s)){let e=this.outputs[t];if(!e.name||["flow","index"].includes(e.name))return;let n=parseInt(e.name.split("value")[1])+1,s=this.inputs.findIndex((e=>e.name==="initial_value"+n)),o=this.outputs.findIndex((e=>e.name==="value"+n));if(-1!==s&&(null==(l=this.inputs[s])?void 0:l.link))return;-1!==s&&this.removeInput(s),-1!==o&&this.removeOutput(o)}}}}}},nodeCreated(e){if(e.comfyClass.startsWith("easy ")){if(e.widgets)for(const n of e.widgets){if(!Me.includes(n.name))continue;let t=n.value;He(e,n),Object.defineProperty(n,"value",{get:e=>t,set(s){s!==t&&(t=s,He(e,n))}})}const t=e.comfyClass;if("easy preDetailerFix"==t){const t=e.widgets.find((e=>"customtext"===e.type));if(!t)return;t.dynamicPrompts=!1,t.inputEl.placeholder="wildcard spec: if kept empty, this option will be ignored",t.serializeValue=()=>t.value}if("easy wildcards"==t){const t=e.widgets.find((e=>"text"==e.name));let n=1;Object.defineProperty(e.widgets[n],"value",{set:e=>{if((new Error).stack.includes("inner_value_change")&&"Select the LoRA to add to the text"!=e){let n=e;n.endsWith(".safetensors")&&(n=n.slice(0,-12)),t.value+=``}},get:e=>"Select the LoRA to add to the text"}),Object.defineProperty(e.widgets[n+1],"value",{set:e=>{(new Error).stack.includes("inner_value_change")&&"Select the Wildcard to add to the text"!=e&&(""!=t.value&&(t.value+=", "),t.value+=e)},get:e=>"Select the Wildcard to add to the text"}),e.widgets[n].serializeValue=e=>"Select the LoRA to add to the text",e.widgets[n+1].serializeValue=e=>"Select the Wildcard to add to the text"}if(We.includes(t)){const t=document.createElement("textarea");t.className="comfy-multiline-input",t.readOnly=!0;const n=e.addDOMWidget("info","customtext",t,{getValue:e=>t.value,setValue:e=>t.value=e,serialize:!1});n.inputEl=t,t.addEventListener("input",(()=>{var e;null==(e=n.callback)||e.call(n,n.value)}))}}}});const Xe=LiteGraph.LGraphNode;w.registerExtension({name:"easy bookmark",registerCustomNodes(){class e extends Xe{constructor(){super("🔖"),f(this,"type","easy bookmark"),f(this,"title","🔖"),f(this,"slot_start_y",-20),f(this,"___collapsed_width",0),f(this,"isVirtualNode",!0),f(this,"serialize_widgets",!0),f(this,"keypressBound",null),this.addWidget("text","shortcut_key","1",(e=>{""!==(e=e.trim()[0]||"1")&&(this.title="🔖 "+e)}),{y:8}),this.addWidget("number","zoom",1,(e=>{}),{y:8+LiteGraph.NODE_WIDGET_HEIGHT+4,max:2,min:.5,precision:2}),this.keypressBound=this.onKeypress.bind(this)}get _collapsed_width(){return this.___collapsed_width}set _collapsed_width(e){const t=w.canvas,n=t.canvas.getContext("2d");if(n){const e=n.font;n.font=t.title_text_font,this.___collapsed_width=40+n.measureText(this.title).width,n.font=e}}onAdded(){setTimeout((e=>{const t=this.widgets[0].value;t&&(this.title="🔖 "+t)}),1),window.addEventListener("keydown",this.keypressBound)}onRemoved(){window.removeEventListener("keydown",this.keypressBound)}onKeypress(e){const t=e.target;["input","textarea"].includes(t.localName)||this.widgets[0]&&e.key.toLocaleLowerCase()===this.widgets[0].value.toLocaleLowerCase()&&this.canvasToBookmark()}canvasToBookmark(){var e,t;const n=w.canvas;(null==(e=null==n?void 0:n.ds)?void 0:e.offset)&&(n.ds.offset[0]=16-this.pos[0],n.ds.offset[1]=40-this.pos[1]),null!=(null==(t=null==n?void 0:n.ds)?void 0:t.scale)&&(n.ds.scale=Number(this.widgets[1].value||1)),n.setDirty(!0,!0)}}LiteGraph.registerNodeType("easy bookmark",Object.assign(e,{title:"Bookmark 🔖"})),e.category="EasyUse/Util"}}),w.registerExtension({name:"Comfy.EasyUse.ChainNode",init(){w.canvas._mousemove_callback=e=>{if(!I("EasyUse.Nodes.ChainGetSet",null,!0))return;((e,t=!1,n={})=>{var s,o,i,a,l;if(0===e.length)return;const r=n.inputX||160,d=n.ouputX||60;if(e.filter((e=>Ie(e))).length>1)return;for(const c of e){let a=0,l=0;const u=n.inputY||10,p=n.outputY||30,h=[];if(c.graph){for(const e of c.inputs??[]){const t=e.link;if(!t)continue;const{origin_id:n,target_slot:s}=Se(t),o=ve(n);if(!o)continue;if(!Ie(o))continue;const i=c.getConnectionPos(!0,s);o.pos=[i[0]-r,i[1]+15+a*u],a+=1,h.push(o),o.flags.collapsed=!0}for(const e of c.outputs??[])if(e.links&&c.graph)for(const t of e.links){const{target_id:e}=Se(t),n=ve(e);if(!n)continue;if(!Ie(n))continue;const o=null==(s=n.outputs)?void 0:s.links;if((null==o?void 0:o.length)>1)return;const i=c.getConnectionPos(!1,0);n.pos=[i[0]+d,i[1]+15+l*p],l+=1,h.push(n),n.flags.collapsed=!0}if(t&&1===e.length){const e=[c,...h];(null==(i=null==(o=c.graph)?void 0:o.list_of_graphcanvas)?void 0:i[0]).selectNodes(e)}}}const u=e[0];if(!u)return;(null==(l=null==(a=u.graph)?void 0:a.list_of_graphcanvas)?void 0:l[0]).setDirty(!0,!0)})(we())};const e=LGraphCanvas.prototype.showLinkMenu;LGraphCanvas.prototype.showLinkMenu=function(t,n){return n.shiftKey?(((e,t=!1)=>{var n,s,o,i,a,l,r,d,u,c;const{type:p}=e;if("*"===p)return;let{origin_id:h,target_id:m,origin_slot:g,target_slot:f}=e,y=ve(h),_=ve(m);if(!y||!_)return!1;if("Reroute"===y.type){let e=0;[y,e]=De(y),h=null==y?void 0:y.id,g=e,void 0!==g&&-1!==g||(g=0)}if("Reroute"===_.type&&(_=Ge(_),m=null==_?void 0:_.id,f=null==_?void 0:_.inputs.findIndex((e=>e.type===p)),void 0!==f&&-1!==f||(f=0)),void 0===h||void 0===m||!y||!_)return!1;if(t&&(Ie(y)||Ie(_)))return!1;let v=Ce((null==(n=_.getInputInfo(f))?void 0:n.name)??p.toLowerCase());v||(v=Ce((null==(o=null==(s=null==y?void 0:y.outputs)?void 0:s[g])?void 0:o.name)??(null==(a=null==(i=null==y?void 0:y.outputs)?void 0:i[g])?void 0:a.type.toString())??v+`_from_${h}_to_${m}`));let w,b=!1,L=!1;if(Ie(y))v=ye(y),L=!0;else{const e=null==(r=null==(l=y.outputs)?void 0:l[g])?void 0:r.links;if(e)for(const t of e){const e=ve((null==(d=Se(t))?void 0:d.target_id)??-1);e&&Ie(e)&&ke(e)&&(v=ye(e),L=!0)}if(!L){for(const e of xe()){if(v!==ye(e)||!ke(e))continue;const t=null==(u=e.inputs[0])?void 0:u.link;(null==(c=Se(t))?void 0:c.origin_id)===y.id?L=!0:b=!0}b&&(v+=`_from_${h}_to_${m}`)}}if(!L){w=LiteGraph.createNode("easy setNode"),w.is_auto_link=!0;const e=y.getConnectionPos(!1,g);w.pos=[e[0]+20,e[1]],w.inputs[0].name=v,w.inputs[0].type=p,w.inputs[0].widget=_.inputs[f].widget,Ne(w,v),Te(w),w.flags.collapsed=!0;let t=[];y.widgets?t=Object.values(y.widgets).map((e=>e.value)):y.widgets_values&&(t=JSON.parse(JSON.stringify(y.widgets_values))),y.connect(g,w,0),y.widgets_values=t,"PrimitiveNode"===y.type&&setTimeout((()=>{if(y){y.connect(g,w,0);for(const[e,n]of t.entries())Ne(y,n,e);null!==w&&w.setSize(w.computeSize())}}))}const E=LiteGraph.createNode("easy getNode"),S=_.getConnectionPos(!0,f);E.pos=[S[0]-150,S[1]],E.outputs[0].name=v,E.outputs[0].type=p,E.outputs[0].widget=_.inputs[f].widget,Te(E),Ne(E,v),null===E||(E.flags.collapsed=!0,E.setSize(E.computeSize()),E.connect(0,_,f))})(t),!1):(e.apply(this,[t,n]),!1)}}});const Ze=async()=>{try{const{Running:e,Pending:t}=await b.getQueue();if(e.length>0||t.length>0)return void Re.error(Z("Clean Failed")+":"+Z("Please stop all running tasks before cleaning GPU"));200==(await b.fetchApi("/easyuse/cleangpu",{method:"POST"})).status?Re.success(Z("Clean SuccessFully")):Re.error(Z("Clean Failed"))}catch(e){}};let Ke=[];function Je(e,t,n,s,o){var i=LGraphCanvas.active_canvas,a=i.getCanvasWindow(),l=i.graph;if(l)return function e(t,s){var r=LiteGraph.getNodeTypesCategories(i.filter||l.filter).filter((function(e){return e.startsWith(t)})),d=[];r.map((function(n){if(n){var s=new RegExp("^("+t+")"),o=n.replace(s,"").split("/")[0],i=""===t?o+"/":t+o+"/",a=o;-1!=a.indexOf("::")&&(a=a.split("::")[1]),-1===d.findIndex((function(e){return e.value===i}))&&d.push({value:i,content:a,has_submenu:!0,callback:function(t,n,s,o){e(t.value,o)}})}})),LiteGraph.getNodeTypesInCategory(t.slice(0,-1),i.filter||l.filter).map((function(e){if(!e.skip_list){var t={value:e.type,content:e.title,has_submenu:!1,callback:function(e,t,n,s){var a=s.getFirstEvent();i.graph.beforeChange();var l=LiteGraph.createNode(e.value);l&&(l.pos=i.convertEventToCanvasOffset(a),i.graph.add(l)),o&&o(l),i.graph.afterChange()}};d.push(t)}}));const u=I("EasyUse.ContextMenu.NodesSort",null,!0);""===t&&u&&(d=function(e){let t=[],n=[];return e.forEach((e=>{(null==e?void 0:e.value)&&F.includes(e.value.split("/")[0])?t.push(e):n.push(e)})),[{title:Z("ComfyUI Basic"),is_category_title:!0},...t,{title:Z("Others A~Z"),is_category_title:!0},...n.sort(((e,t)=>e.content.localeCompare(t.content)))]}(d)),new LiteGraph.ContextMenu(d,{event:n,parentMenu:s},a)}("",s),!1}w.registerExtension({name:"Comfy.EasyUse.ContextMenu",async setup(){LGraphCanvas.onMenuAdd=Je;const e=I("EasyUse.ContextMenu.ModelsThumbnailsLimit",null,500),t=await b.fetchApi(`/easyuse/models/thumbnail?limit=${e}`);if(200===t.status){let e=await t.json();Ke=e}else Re.error(Z("Too many thumbnails, have closed the display"));const n=LiteGraph.ContextMenu;LiteGraph.ContextMenu=function(e,t){if(I("EasyUse.ContextMenu.SubDirectories",null,!1)&&(null==t?void 0:t.callback)&&!e.some((e=>"string"!=typeof e))){const s=function(e,t){const n=e,s=[...n],o={},i=[],a=[],l=["ckpt","pt","bin","pth","safetensors"];if((null==e?void 0:e.length)>0){const t=et(e[e.length-1]);if(!l.includes(t))return null}for(const r of n){const e=r.indexOf("/")>-1?"/":"\\",t=r.split(e);if(t.length>1){const n=t.shift();o[n]=o[n]||[],o[n].push(t.join(e))}else"CHOOSE"===r||r.startsWith("DISABLE ")?i.push(r):a.push(r)}if(Object.values(o).length>0){const e=t.callback;t.callback=null;const n=(t,n)=>{["None","无","無","なし"].includes(t.content)?e("None",n):e(s.find((e=>e.endsWith(t.content)),n))},r=(e,t="")=>{const s=t?t+"\\"+Qe(e):Qe(e),o=et(e),i=(new Date).getTime();let a,r="";if(l.includes(o))for(let n=0;n{let n=[],s=[];const i=e.map((e=>{const i={},a=e.indexOf("/")>-1?"/":"\\",l=e.split(a);if(l.length>1){const e=l.shift();i[e]=i[e]||[],i[e].push(l.join(a))}if(Object.values(o).length>0){let t=Object.keys(i)[0];t&&i[t]?n.push({key:t,value:i[t][0]}):s.push(r(e,t))}return r(e,t)}));if(n.length>0){let e={};return n.forEach((t=>{e[t.key]=e[t.key]||[],e[t.key].push(t.value)})),[...Object.entries(e).map((e=>({content:e[0],has_submenu:!0,callback:()=>{},submenu:{options:u(e[1],e[0])}}))),...s]}return i};for(const[t,s]of Object.entries(o))d.push({content:t,has_submenu:!0,callback:()=>{},submenu:{options:u(s,t)}});return d.push(...a.map((e=>r(e,"")))),i.length>0&&d.push(...i.map((e=>r(e,"")))),d}return null}(e,t);return s?n.call(this,s,t):n.apply(this,[...arguments])}return t.parentMenu||t.extra||t.scale||t.hasOwnProperty("extra")&&(e.unshift(null),s=window.location.host,["192.168.","10.","127.",/^172\.((1[6-9]|2[0-9]|3[0-1])\.)/].some((e=>"string"==typeof e?s.startsWith(e):e.test(s)))&&e.unshift({content:`${Z("Reboot ComfyUI")}`,callback:e=>(async()=>{if(confirm(Z("Are you sure you'd like to reboot the server?")))try{b.fetchApi("/easyuse/reboot")}catch(e){}})()}),e.unshift({content:`${Z("Cleanup Of VRAM Usage")}`,callback:e=>Ze()})),n.apply(this,[...arguments]);var s},LiteGraph.ContextMenu.prototype=n.prototype,I("EasyUse.ContextMenu.NodesSort",null,!0)&&(LiteGraph.ContextMenu.prototype.addItem=qe)}});const $e=e=>e&&"object"==typeof e&&"image"in e&&e.content;function qe(e,t,n){var s=this;n=n||{};var o=document.createElement("div");o.className="litemenu-entry submenu";var i,a=!1;function l(e){var t=this.value,o=!0;(s.current_submenu&&s.current_submenu.close(e),n.callback)&&(!0===n.callback.call(this,t,n,e,s,n.node)&&(o=!1));if(t){if(t.callback&&!n.ignore_item_callbacks&&!0!==t.disabled)!0===t.callback.call(this,t,n,e,s,n.extra)&&(o=!1);if(t.submenu){if(!t.submenu.options)throw"ContextMenu submenu needs options";new s.constructor(t.submenu.options,{callback:t.submenu.callback,event:e,parentMenu:s,ignore_item_callbacks:t.submenu.ignore_item_callbacks,title:t.submenu.title,extra:t.submenu.extra,autoopen:n.autoopen}),o=!1}}o&&!s.lock&&s.close()}return null===t?o.classList.add("separator"):t.is_category_title?(o.classList.remove("litemenu-entry"),o.classList.remove("submenu"),o.classList.add("litemenu-title"),o.innerHTML=t.title):(o.innerHTML=t&&t.title?t.title:e,o.value=t,t&&(t.disabled&&(a=!0,o.classList.add("disabled")),(t.submenu||t.has_submenu)&&o.classList.add("has_submenu")),"function"==typeof t?(o.dataset.value=e,o.onclick_callback=t):o.dataset.value=t,t.className&&(o.className+=" "+t.className)),o&&$e(t)&&(null==t?void 0:t.image)&&!t.submenu&&(o.textContent+=" *",L("div.pysssss-combo-image",{parent:o,style:{backgroundImage:`url(/pysssss/view/${i=t.image,encodeURIComponent(i).replace(/[!'()*]/g,(e=>`%${e.charCodeAt(0).toString(16).toUpperCase()}`))})`}})),this.root.appendChild(o),a||o.addEventListener("click",l),!a&&n.autoopen&&LiteGraph.pointerListenerAdd(o,"enter",(function(e){var t=this.value;if(!t||!t.has_submenu)return;l.call(this,e)})),o}function Qe(e){return null==e?void 0:e.substring(0,e.lastIndexOf("."))}function et(e){return null==e?void 0:e.substring(e.lastIndexOf(".")+1)}class tt extends E{constructor(){super(),this.element.classList.add("easyuse-model-metadata")}show(e){super.show(L("div",Object.keys(e).map((t=>L("div",[L("label",{textContent:t}),L("span",{textContent:e[t]})])))))}}class nt extends E{constructor(e){super(),this.name=e,this.element.classList.add("easyuse-model-info")}get customNotes(){return this.metadata["easyuse.notes"]}set customNotes(e){this.metadata["easyuse.notes"]=e}get hash(){return this.metadata["easyuse.sha256"]}async show(e,t){this.type=e;const n=b.fetchApi("/easyuse/metadata/"+encodeURIComponent(`${e}/${t}`));this.info=L("div",{style:{flex:"auto"}}),this.imgCurrent=0,this.imgList=L("div.easyuse-preview-list",{style:{display:"none"}}),this.imgWrapper=L("div.easyuse-preview",[L("div.easyuse-preview-group",[this.imgList])]),this.main=L("main",{style:{display:"flex"}},[this.imgWrapper,this.info]),this.content=L("div.easyuse-model-content",[L("div.easyuse-model-header",[L("h2",{textContent:this.name})]),this.main]);const s=L("div",{textContent:"ℹ️ Loading...",parent:this.content});super.show(this.content),this.metadata=await(await n).json(),this.viewMetadata.style.cursor=this.viewMetadata.style.opacity="",this.viewMetadata.removeAttribute("disabled"),s.remove(),this.addInfo()}createButtons(){const e=super.createButtons();return this.viewMetadata=L("button",{type:"button",textContent:"View raw metadata",disabled:"disabled",style:{opacity:.5,cursor:"not-allowed"},onclick:e=>{this.metadata&&(new tt).show(this.metadata)}}),e.unshift(this.viewMetadata),e}parseNote(){if(!this.customNotes)return[];let e=[];const t=new RegExp("(\\bhttps?:\\/\\/[^\\s]+)","g");let n,s=0;do{let o;n=t.exec(this.customNotes);let i=0;n?(o=n.index,i=n.index+n[0].length):o=this.customNotes.length;let a=this.customNotes.substring(s,o);a&&(a=a.replaceAll("\n","
"),e.push(L("span",{innerHTML:a}))),n&&e.push(L("a",{href:n[0],textContent:n[0],target:"_blank"})),s=i}while(n);return e}addInfoEntry(e,t){return L("p",{parent:this.info},["string"==typeof e?L("label",{textContent:e+": "}):e,"string"==typeof t?L("span",{textContent:t}):t])}async getCivitaiDetails(){const e=await fetch("https://civitai.com/api/v1/model-versions/by-hash/"+this.hash);if(200===e.status)return await e.json();throw 404===e.status?new Error("Model not found"):new Error(`Error loading info (${e.status}) ${e.statusText}`)}addCivitaiInfo(){const e=this.getCivitaiDetails(),t=L("span",{textContent:"ℹ️ Loading..."});return this.addInfoEntry(L("label",[L("img",{style:{width:"18px",position:"relative",top:"3px",margin:"0 5px 0 0"},src:"https://civitai.com/favicon.ico"}),L("span",{textContent:"Civitai: "})]),t),e.then((e=>{var t,n;this.imgWrapper.style.display="block";let s=this.element.querySelector(".easyuse-model-header");s&&s.replaceChildren(L("h2",{textContent:this.name}),L("div.easyuse-model-header-remark",[L("h5",{textContent:Z("Updated At:")+O(new Date(e.updatedAt),"yyyy/MM/dd")}),L("h5",{textContent:Z("Created At:")+O(new Date(e.updatedAt),"yyyy/MM/dd")})]));let o=null,i=this.parseNote.call(this),a=Z("✏️ Edit"),l=L("div.easyuse-model-detail-textarea",[L("p",(null==i?void 0:i.length)>0?i:{textContent:Z("No notes")})]);if(i&&0!=i.length?l.classList.remove("empty"):l.classList.add("empty"),this.info.replaceChildren(L("div.easyuse-model-detail",[L("div.easyuse-model-detail-head.flex-b",[L("span",Z("Notes")),L("a",{textContent:a,href:"#",style:{fontSize:"12px",float:"right",color:"var(--warning-color)",textDecoration:"none"},onclick:async e=>{if(e.preventDefault(),o){if(o.value!=this.customNotes){toast.showLoading(Z("Saving Notes...")),this.customNotes=o.value;const e=await b.fetchApi("/easyuse/metadata/notes/"+encodeURIComponent(`${this.type}/${this.name}`),{method:"POST",body:this.customNotes});if(toast.hideLoading(),200!==e.status)return toast.error(Z("Saving Failed")),void alert(`Error saving notes (${e.status}) ${e.statusText}`);toast.success(Z("Saving Succeed")),i=this.parseNote.call(this),l.replaceChildren(L("p",(null==i?void 0:i.length)>0?i:{textContent:Z("No notes")})),o.value?l.classList.remove("empty"):l.classList.add("empty")}else l.replaceChildren(L("p",{textContent:Z("No notes")})),l.classList.add("empty");e.target.textContent=a,o.remove(),o=null}else e.target.textContent="💾 Save",o=L("textarea",{placeholder:Z("Type your notes here"),style:{width:"100%",minWidth:"200px",minHeight:"50px",height:"100px"},textContent:this.customNotes}),l.replaceChildren(o),o.focus()}})]),l]),L("div.easyuse-model-detail",[L("div.easyuse-model-detail-head",{textContent:Z("Details")}),L("div.easyuse-model-detail-body",[L("div.easyuse-model-detail-item",[L("div.easyuse-model-detail-item-label",{textContent:Z("Type")}),L("div.easyuse-model-detail-item-value",{textContent:e.model.type})]),L("div.easyuse-model-detail-item",[L("div.easyuse-model-detail-item-label",{textContent:Z("BaseModel")}),L("div.easyuse-model-detail-item-value",{textContent:e.baseModel})]),L("div.easyuse-model-detail-item",[L("div.easyuse-model-detail-item-label",{textContent:Z("Download")}),L("div.easyuse-model-detail-item-value",{textContent:(null==(t=e.stats)?void 0:t.downloadCount)||0})]),L("div.easyuse-model-detail-item",[L("div.easyuse-model-detail-item-label",{textContent:Z("Trained Words")}),L("div.easyuse-model-detail-item-value",{textContent:(null==e?void 0:e.trainedWords.join(","))||"-"})]),L("div.easyuse-model-detail-item",[L("div.easyuse-model-detail-item-label",{textContent:Z("Source")}),L("div.easyuse-model-detail-item-value",[L("label",[L("img",{style:{width:"14px",position:"relative",top:"3px",margin:"0 5px 0 0"},src:"https://civitai.com/favicon.ico"}),L("a",{href:"https://civitai.com/models/"+e.modelId,textContent:"View "+e.model.name,target:"_blank"})])])])])])),null==(n=e.images)?void 0:n.length){this.imgCurrent=0,this.isSaving=!1,e.images.map((e=>e.url&&this.imgList.appendChild(L("div.easyuse-preview-slide",[L("div.easyuse-preview-slide-content",[L("img",{src:e.url}),L("div.save",{textContent:"Save as preview",onclick:async()=>{if(this.isSaving)return;this.isSaving=!0,toast.showLoading(Z("Saving Preview..."));const t=await(await fetch(e.url)).blob(),n="temp_preview."+new URL(e.url).pathname.split(".")[1],s=new FormData;s.append("image",new File([t],n)),s.append("overwrite","true"),s.append("type","temp");if(200!==(await b.fetchApi("/upload/image",{method:"POST",body:s})).status)return this.isSaving=!1,toast.error(Z("Saving Failed")),toast.hideLoading(),void alert(`Error saving preview (${req.status}) ${req.statusText}`);await b.fetchApi("/easyuse/save/"+encodeURIComponent(`${this.type}/${this.name}`),{method:"POST",body:JSON.stringify({filename:n,type:"temp"}),headers:{"content-type":"application/json"}}).then((e=>{toast.success(Z("Saving Succeed")),toast.hideLoading()})),this.isSaving=!1,app.refreshComboInNodes()}})])]))));let t=this;this.imgDistance=(-660*this.imgCurrent).toString(),this.imgList.style.display="",this.imgList.style.transform="translate3d("+this.imgDistance+"px, 0px, 0px)",this.slides=this.imgList.querySelectorAll(".easyuse-preview-slide"),this.slideLeftButton=L("button.left",{parent:this.imgWrapper,style:{display:e.images.length<=2?"none":"block"},innerHTML:'',onclick:()=>{e.images.length<=2||(t.imgList.classList.remove("no-transition"),0==t.imgCurrent?(t.imgCurrent=e.images.length/2-1,this.slides[this.slides.length-1].style.transform="translate3d("+(-660*(this.imgCurrent+1)).toString()+"px, 0px, 0px)",this.slides[this.slides.length-2].style.transform="translate3d("+(-660*(this.imgCurrent+1)).toString()+"px, 0px, 0px)",t.imgList.style.transform="translate3d(660px, 0px, 0px)",setTimeout((e=>{this.slides[this.slides.length-1].style.transform="translate3d(0px, 0px, 0px)",this.slides[this.slides.length-2].style.transform="translate3d(0px, 0px, 0px)",t.imgDistance=(-660*this.imgCurrent).toString(),t.imgList.style.transform="translate3d("+t.imgDistance+"px, 0px, 0px)",t.imgList.classList.add("no-transition")}),500)):(t.imgCurrent=t.imgCurrent-1,t.imgDistance=(-660*this.imgCurrent).toString(),t.imgList.style.transform="translate3d("+t.imgDistance+"px, 0px, 0px)"))}}),this.slideRightButton=L("button.right",{parent:this.imgWrapper,style:{display:e.images.length<=2?"none":"block"},innerHTML:'',onclick:()=>{if(!(e.images.length<=2))if(t.imgList.classList.remove("no-transition"),t.imgCurrent>=e.images.length/2-1){t.imgCurrent=0;const n=e.images.length/2;this.slides[0].style.transform="translate3d("+(660*n).toString()+"px, 0px, 0px)",this.slides[1].style.transform="translate3d("+(660*n).toString()+"px, 0px, 0px)",t.imgList.style.transform="translate3d("+(-660*n).toString()+"px, 0px, 0px)",setTimeout((e=>{this.slides[0].style.transform="translate3d(0px, 0px, 0px)",this.slides[1].style.transform="translate3d(0px, 0px, 0px)",t.imgDistance=(-660*this.imgCurrent).toString(),t.imgList.style.transform="translate3d("+t.imgDistance+"px, 0px, 0px)",t.imgList.classList.add("no-transition")}),500)}else t.imgCurrent=t.imgCurrent+1,t.imgDistance=(-660*this.imgCurrent).toString(),t.imgList.style.transform="translate3d("+t.imgDistance+"px, 0px, 0px)"}})}return e.description&&L("div",{parent:this.content,innerHTML:e.description,style:{marginTop:"10px"}}),e})).catch((e=>{this.imgWrapper.style.display="none",t.textContent="⚠️ "+e.message})).finally((e=>{}))}}class st extends nt{async addInfo(){await this.addCivitaiInfo()}}class ot extends nt{getTagFrequency(){if(!this.metadata.ss_tag_frequency)return[];const e=JSON.parse(this.metadata.ss_tag_frequency),t={};for(const n in e){const s=e[n];for(const e in s)e in t?t[e]+=s[e]:t[e]=s[e]}return Object.entries(t).sort(((e,t)=>t[1]-e[1]))}getResolutions(){let e=[];if(this.metadata.ss_bucket_info){const t=JSON.parse(this.metadata.ss_bucket_info);if(null==t?void 0:t.buckets)for(const{resolution:n,count:s}of Object.values(t.buckets))e.push([s,`${n.join("x")} * ${s}`])}e=e.sort(((e,t)=>t[0]-e[0])).map((e=>e[1]));let t=this.metadata.ss_resolution;if(t){const n=t.split(","),s=n[0].replace("(",""),o=n[1].replace(")","");e.push(`${s.trim()}x${o.trim()} (Base res)`)}else(t=this.metadata["modelspec.resolution"])&&e.push(t+" (Base res");return e.length||e.push("⚠️ Unknown"),e}getTagList(e){return e.map((e=>L("li.easyuse-model-tag",{dataset:{tag:e[0]},$:e=>{e.onclick=()=>{e.classList.toggle("easyuse-model-tag--selected")}}},[L("p",{textContent:e[0]}),L("span",{textContent:e[1]})])))}addTags(){let e,t=this.getTagFrequency();if(null==t?void 0:t.length){const n=t.length;let s;n>500&&(t=t.slice(0,500),e=L("p",[L("span",{textContent:"⚠️ Only showing first 500 tags "}),L("a",{href:"#",textContent:`Show all ${n}`,onclick:()=>{s.replaceChildren(...this.getTagList(this.getTagFrequency())),e.remove()}})])),s=L("ol.easyuse-model-tags-list",this.getTagList(t)),this.tags=L("div",[s])}else this.tags=L("p",{textContent:"⚠️ No tag frequency metadata found"});this.content.append(this.tags),e&&this.content.append(e)}async addInfo(){const e=this.addCivitaiInfo();this.addTags();const t=await e;t&&L("div",{parent:this.content,innerHTML:t.description,style:{maxHeight:"250px",overflow:"auto"}})}createButtons(){const e=super.createButtons();function t(e,t){const n=L("textarea",{parent:document.body,style:{position:"fixed"},textContent:t.map((e=>e.dataset.tag)).join(", ")});n.select();try{document.execCommand("copy"),e.target.dataset.text||(e.target.dataset.text=e.target.textContent),e.target.textContent="Copied "+t.length+" tags",setTimeout((()=>{e.target.textContent=e.target.dataset.text}),1e3)}catch(s){prompt("Copy to clipboard: Ctrl+C, Enter",text)}finally{document.body.removeChild(n)}}return e.unshift(L("button",{type:"button",textContent:"Copy Selected",onclick:e=>{t(e,[...this.tags.querySelectorAll(".easyuse-model-tag--selected")])}}),L("button",{type:"button",textContent:"Copy All",onclick:e=>{t(e,[...this.tags.querySelectorAll(".easyuse-model-tag")])}})),e}}const it={pipe:{category:"Easy Pipe",nodes:["easy pipeIn","easy pipeOut","easy pipeEdit","easy pipeEditPrompt","easy pipeBatchIndex"],input:{pipe:"pipe"},output:{pipe:"pipe"},widget:{optional_positive:"optional_positive",optional_negative:"optional_negative"}},loaders:{category:"Easy Loaders",nodes:["easy fullLoader","easy a1111Loader","easy comfyLoader","easy kolorsLoader","easy hunyuanDiTLoader","easy pixArtLoader","easy fluxLoader"],input:{optional_lora_stack:"optional_lora_stack",optional_controlnet_stack:"optional_controlnet_stack",positive:"positive",negative:"negative"},output:{pipe:"pipe",model:"model",vae:"vae",clip:null,positive:null,negative:null,latent:null},widget:{ckpt_name:"ckpt_name",vae_name:"vae_name",clip_skip:"clip_skip",lora_name:"lora_name",resolution:"resolution",empty_latent_width:"empty_latent_width",empty_latent_height:"empty_latent_height",positive:"positive",negative:"negative",batch_size:"batch_size",a1111_prompt_style:"a1111_prompt_style"}},preSampling:{category:"Easy PreSampling",nodes:["easy preSampling","easy preSamplingAdvanced","easy preSamplingDynamicCFG","easy preSamplingNoiseIn","easy preSamplingCustom","easy preSamplingLayerDiffusion","easy fullkSampler"],input:{pipe:"pipe",image_to_latent:"image_to_latent",latent:"latent"},output:{pipe:"pipe"},widget:{steps:"steps",cfg:"cfg",cfg_scale_min:"cfg",sampler_name:"sampler_name",scheduler:"scheduler",denoise:"denoise",seed_num:"seed_num",seed:"seed"}},samplers:{category:"Custom Sampler",nodes:["KSamplerSelect","SamplerEulerAncestral","SamplerEulerAncestralCFG++","SamplerLMS","SamplerDPMPP_3M_SDE","SamplerDPMPP_2M_SDE","SamplerDPMPP_SDE","SamplerDPMAdaptative","SamplerLCMUpscale","SamplerTCD","SamplerTCD EulerA"],output:{SAMPLER:"SAMPLER"}},sigmas:{category:"Custom Sigmas",nodes:["BasicScheduler","KarrasScheduler","ExponentialScheduler","PolyexponentialScheduler","VPScheduler","BetaSamplingScheduler","SDTurboScheduler","SplitSigmas","SplitSigmasDenoise","FlipSigmas","AlignYourStepsScheduler","GITSScheduler"],output:{SIGMAS:"SIGMAS"}},kSampler:{category:"Easy kSampler",nodes:["easy kSampler","easy kSamplerTiled","easy kSamplerInpainting","easy kSamplerDownscaleUnet","easy kSamplerLayerDiffusion"],input:{pipe:"pipe",model:"model"},output:{pipe:"pipe",image:"image"},widget:{image_output:"image_output",save_prefix:"save_prefix",link_id:"link_id"}},controlNet:{category:"Easy ControlNet",nodes:["easy controlnetLoader","easy controlnetLoaderADV","easy controlnetLoader++","easy instantIDApply","easy instantIDApplyADV"],input:{pipe:"pipe",image:"image",image_kps:"image_kps",control_net:"control_net",positive:"positive",negative:"negative",mask:"mask"},output:{pipe:"pipe",positive:"positive",negative:"negative"},widget:{control_net_name:"control_net_name",strength:["strength","cn_strength"],scale_soft_weights:["scale_soft_weights","cn_soft_weights"],cn_strength:["strength","cn_strength"],cn_soft_weights:["scale_soft_weights","cn_soft_weights"]}},adapter:{category:"Easy Adapter",nodes:["easy ipadapterApply","easy ipadapterApplyADV","easy ipadapterApplyFaceIDKolors","easy ipadapterStyleComposition","easy ipadapterApplyFromParams","easy pulIDApply","easy pulIDApplyADV"],input:{model:"model",image:"image",image_style:"image",attn_mask:"attn_mask",optional_ipadapter:"optional_ipadapter"},output:{model:"model",tiles:"tiles",masks:"masks",ipadapter:"ipadapter"},widget:{preset:"preset",lora_strength:"lora_strength",provider:"provider",weight:"weight",weight_faceidv2:"weight_faceidv2",start_at:"start_at",end_at:"end_at",cache_mode:"cache_mode",use_tiled:"use_tiled",insightface:"insightface",pulid_file:"pulid_file"}},positive:{category:"Easy Positive",nodes:["easy positive","easy wildcards"],input:{},output:{text:"positive",positive:"text"},widget:{text:"positive",positive:"text"}},loadImage:{category:"Easy LoadImage",nodes:["easy loadImageBase64","easy loadImage","easy loadImageMask"],input:{pipe:"pipe",image:"image",mask:"mask"},output:{IMAGE:"IMAGE",MASK:"MASK"},widget:{image:"image",base64_data:"base64_data",channel:"channel"}},saveImage:{category:"Save/Preview Image",nodes:["SaveImage","PreviewImage"]},inPaint:{category:"Easy Inpaint",nodes:["easy applyBrushNet","easy applyPowerPaint","easy applyInpaint"],input:{},output:{pipe:"pipe"},widget:{dtype:"dtype",fitting:"fitting",function:"function",scale:"scale",start_at:"start_at",end_at:"end_at"}},persona:{category:"LLM Party Persona",nodes:["load_persona","classify_persona","classify_persona_plus","custom_persona","translate_persona","flux_persona"],input:{file_content:"file_content"},output:{system_prompt:"system_prompt"},widget:{is_enable:"is_enable"}},llmModelLoader:{category:"LLM Model Loader",nodes:["LLM_api_loader","genai_api_loader","LLM_local_loader"],output:{model:"model"}},llmModelChain:{category:"LLM Model Chain",nodes:["LLM","LLM_local"],input:{model:"model",image:"images",images:"image",extra_parameters:"extra_parameters",system_prompt_input:"system_prompt_input",user_prompt_input:"user_prompt_input",tools:"tools",file_content:"file_content"},output:{assistant_response:"assistant_response",history:"history",tool:"tool",image:"image"},widget:{system_prompt:"system_prompt",user_prompt:"user_prompt",temperature:"temperature",is_memory:"is_memory",is_tools_in_sys_prompt:"is_tools_in_sys_prompt",max_length:"max_length",main_brain:"main_brain",conversation_rounds:"conversation_rounds",history_record:"history_record",is_enable:"is_enable"}},maskModify:{category:"Mask Modify",nodes:["CropMask","ThresholdMask","GrowMask","FeatherMask","LayerMask: MaskGrain","LayerMask: MaskEdgeUltraDetail","LayerMask: MaskEdgeUltraDetail V2"],input:{mask:"mask"},output:{MASK:"MASK",mask:"mask",image:"image"}},maskModifyWAS:{category:"Mask Modify (WAS)",nodes:["Mask Dilate Region","Mask Gaussian Region"],input:{masks:"masks"},output:{MASKS:"MASKS"}}};function at(e,t,n){return function(){!function(e,t,n){var s;const o=LiteGraph.createNode(t);if(o){if(w.graph.add(o),o.pos=e.pos.slice(),o.size=e.size.slice(),(null==(s=e.widgets)?void 0:s.length)>0&&e.widgets.forEach((e=>{var t,s,i;if(null==(s=null==(t=it[n])?void 0:t.widget)?void 0:s[e.name]){const t=it[n].widget[e.name];if(t&&o.widgets){const n=(i=t,o.widgets.find((e=>"object"==typeof i?i.includes(e.name):e.name===i)));n&&(n.value=e.value,"seed_num"==e.name&&(n.linkedWidgets[0].value=e.linkedWidgets[0].value),"converted-widget"==e.type&&ht(o,n,e))}}})),e.inputs&&e.inputs.forEach(((t,s)=>{var i,a,l;if(t&&t.link&&(null==(a=null==(i=it[n])?void 0:i.input)?void 0:a[t.name])){const s=null==(l=it[n])?void 0:l.input[t.name];if(null===s)return;const i=o.findInputSlot(s);if(-1!==i){const n=e.graph.links[t.link];if(n){const t=e.graph.getNodeById(n.origin_id);t&&t.connect(n.origin_slot,o,i)}}}})),e.outputs&&e.outputs.forEach(((t,s)=>{var i,a;if(t&&t.links&&(null==(a=null==(i=it[n])?void 0:i.output)?void 0:a[t.name])){const s=it[n].output[t.name];if(null===s)return;const i=o.findOutputSlot(s);-1!==i&&t.links.forEach((t=>{const n=e.graph.links[t];if(n){const t=e.graph.getNodeById(n.target_id);t&&o.connect(i,t,n.target_slot)}}))}})),w.graph.remove(e),"easy fullkSampler"==o.type){const e=o.outputs[0].links;if(e&&e[0]){const t=w.graph._nodes.find((t=>t.inputs&&t.inputs[0]&&t.inputs[0].link==e[0]));t&&w.graph.remove(t)}}else if(it.preSampling.nodes.includes(o.type)){const e=o.outputs[0].links;if(!e||!e[0]){const e=LiteGraph.createNode("easy kSampler");w.graph.add(e),e.pos=o.pos.slice(),e.pos[0]=e.pos[0]+o.size[0]+20;const t=o.findInputSlot("pipe");-1!==t&&o&&o.connect(0,e,t)}}o.setSize([o.size[0],o.computeSize()[1]])}}(e,t,n)}}const lt=(e,t)=>{const n=e.prototype.getExtraMenuOptions;e.prototype.getExtraMenuOptions=function(){const e=n.apply(this,arguments);return t.apply(this,arguments),e}},rt=(e,t,n,s,o=!0)=>{lt(s,(function(s,i){i.unshift({content:e,has_submenu:o,callback:(e,s,o,i,a)=>dt(e,s,o,i,a,t,n)}),"loaders"==t&&(i.unshift({content:Z("💎 View Lora Info..."),callback:(e,t,n,s,o)=>{let i=o.widgets.find((e=>"lora_name"==e.name)).value;i&&"None"!=i&&new ot(i).show("loras",i)}}),i.unshift({content:Z("💎 View Checkpoint Info..."),callback:(e,t,n,s,o)=>{let i=o.widgets[0].value;i&&"None"!=i&&new st(i).show("checkpoints",i)}}))}))},dt=(e,t,n,s,o,i,a)=>{const l=[];return a.map((e=>{o.type!==e&&l.push({content:`${e}`,callback:at(o,e,i)})})),new LiteGraph.ContextMenu(l,{event:n,callback:null,parentMenu:s,node:o}),!1},ut="converted-widget",ct=Symbol();function pt(e,t,n=""){if(t.origType=t.type,t.origComputeSize=t.computeSize,t.origSerializeValue=t.serializeValue,t.computeSize=()=>[0,-4],t.type=ut+n,t.serializeValue=()=>{if(!e.inputs)return;let n=e.inputs.find((e=>{var n;return(null==(n=e.widget)?void 0:n.name)===t.name}));return n&&n.link?t.origSerializeValue?t.origSerializeValue():t.value:void 0},t.linkedWidgets)for(const s of t.linkedWidgets)pt(e,s,":"+t.name)}function ht(e,t,n){pt(e,t);const{type:s}=function(e){let t=e[0];t instanceof Array&&(t="COMBO");return{type:t}}(n),o=e.size;t.options&&t.options.forceInput||e.addInput(t.name,s,{widget:{name:t.name,[ct]:()=>n}});for(const i of e.widgets)i.last_y+=LiteGraph.NODE_SLOT_HEIGHT;e.setSize([Math.max(o[0],e.size[0]),Math.max(o[1],e.size[1])])}const mt=function(e){var t,n,s,o;const i=e.constructor.type,a=e.properties.origVals||{},l=a.title||e.title,r=a.color||e.color,d=a.bgcolor||e.bgcolor,u=e,c={size:[...e.size],color:r,bgcolor:d,pos:[...e.pos]};let p=[],h=[];if(e.inputs)for(const y of e.inputs)if(y.link){const t=y.name,n=e.findInputSlot(t),s=e.getInputNode(n),o=e.getInputLink(n);p.push([o.origin_slot,s,t])}if(e.outputs)for(const y of e.outputs)if(y.links){const e=y.name;for(const t of y.links){const n=graph.links[t],s=graph._nodes_by_id[n.target_id];h.push([e,s,n.target_slot])}}w.graph.remove(e);const m=w.graph.add(LiteGraph.createNode(i,l,c));function g(){if(u.widgets)for(let e of u.widgets)if("converted-widget"===e.type){const t=m.widgets.find((t=>t.name===e.name));for(let n of u.inputs)n.name===e.name&&ht(m,t,n.widget)}for(let e of p){const[t,n,s]=e;n.connect(t,m.id,s)}for(let e of h){const[t,n,s]=e;m.connect(t,n,s)}}let f=u.widgets_values;if(!f&&(null==(t=m.widgets)?void 0:t.length)>0)return m.widgets.forEach(((e,t)=>{const n=u.widgets[t];e.name===n.name&&e.type===n.type&&(e.value=n.value)})),void g();if(f){let e=function(e,t){var n,s,o,i,a,l;if(!0===e||!1===e){if((null==(n=t.options)?void 0:n.on)&&(null==(s=t.options)?void 0:s.off))return{value:e,pass:!0}}else if("number"==typeof e){if((null==(o=t.options)?void 0:o.min)<=e&&e<=(null==(i=t.options)?void 0:i.max))return{value:e,pass:!0}}else{if(null==(l=null==(a=t.options)?void 0:a.values)?void 0:l.includes(e))return{value:e,pass:!0};if(t.inputEl&&"string"==typeof e)return{value:e,pass:!0}}return{value:t.value,pass:!1}},t=!1;const i=(null==f?void 0:f.length)<=(null==(n=m.widgets)?void 0:n.length);let a=i?0:f.length-1;const l=n=>{var s;const o=u.widgets[n];let l=m.widgets[n];if(l.name===o.name&&l.type===o.type){for(;(i?a=0)&&!t;){let{value:t,pass:n}=e(f[a],l);if(n&&null!==t){l.value=t;break}a+=i?1:-1}a++,i||(a=f.length-((null==(s=m.widgets)?void 0:s.length)-1-n))}};if(i&&(null==(s=m.widgets)?void 0:s.length)>0)for(let n=0;n0)for(let n=m.widgets.length-1;n>=0;n--)l(n)}g()};w.registerExtension({name:"Comfy.EasyUse.ExtraMenu",async beforeRegisterNodeDef(e,t,n){lt(e,(function(e,n){n.unshift({content:Z("🔃 Reload Node"),callback:(e,t,n,s,o)=>{let i=LGraphCanvas.active_canvas;if(!i.selected_nodes||Object.keys(i.selected_nodes).length<=1)mt(o);else for(let a in i.selected_nodes)mt(i.selected_nodes[a])}}),"easy ckptNames"==t.name&&n.unshift({content:Z("💎 View Checkpoint Info..."),callback:(e,t,n,s,o)=>{o.widgets[0].value}})}));for(const s in it)it[s].nodes.includes(t.name)&&rt(`↪️ Swap ${it[s].category}`,s,it[s].nodes,e)}});const gt=LiteGraph.LGraphNode,ft="➡️";w.registerExtension({name:"easy setNode",registerCustomNodes(){class e extends gt{constructor(t){super("Set"),f(this,"defaultVisibility",!0),f(this,"serialize_widgets",!0),this.properties||(this.properties={previousName:""}),this.properties.showOutputText=e.defaultVisibility;const n=this;n.color=LGraphCanvas.node_colors.blue.color,this.addWidget("text","Constant","",((e,t,s,o,i)=>{n.validateName(n.graph),""!==this.widgets[0].value&&(this.title=ft+this.widgets[0].value),this.update(),this.properties.previousName=this.widgets[0].value}),{}),this.addInput("*","*"),this.onConnectionsChange=function(e,t,s,o,i){if(1!=e||s||(this.inputs[t].type="*",this.inputs[t].name="*",this.title="Set"),o&&n.graph&&1==e&&s){const e=n.graph._nodes.find((e=>e.id==o.origin_id)).outputs[o.origin_slot],t=e.type,s=n.is_auto_link?this.widgets[0].value:e.name;"Set"===this.title&&(this.title=ft+s,this.widgets[0].value=s),"*"===this.widgets[0].value&&(this.widgets[0].value=s),this.validateName(n.graph),this.inputs[0].type=t,this.inputs[0].name=s,setTimeout((e=>{this.title=ft+this.widgets[0].value}),1)}this.update()},this.validateName=function(e){let t=n.widgets[0].value;if(""!=t){let s=0,o=[];do{o=e._nodes.filter((e=>e!=this&&("easy setNode"==e.type&&e.widgets[0].value===t))),o.length>0&&(t=n.widgets[0].value+s),s++}while(o.length>0);n.widgets[0].value=t,this.update()}},this.clone=function(){const t=e.prototype.clone.apply(this);return t.inputs[0].name="*",t.inputs[0].type="*",t.properties.previousName="",t.size=t.computeSize(),t},this.onAdded=function(e){this.validateName(e)},this.update=function(){if(n.graph){this.findGetters(n.graph).forEach((e=>{e.setType(this.inputs[0].type)})),this.widgets[0].value&&this.findGetters(n.graph,!0).forEach((e=>{e.setName(this.widgets[0].value)}));n.graph._nodes.filter((e=>"easy getNode"==e.type)).forEach((e=>{e.setComboValues&&e.setComboValues()}))}},this.findGetters=function(e,t){const n=t?this.properties.previousName:this.widgets[0].value;return e._nodes.filter((e=>"easy getNode"==e.type&&e.widgets[0].value===n&&""!=n))},this.isVirtualNode=!0}onRemoved(){this.graph._nodes.filter((e=>"easy getNode"==e.type)).forEach((e=>{e.setComboValues&&e.setComboValues([this])}))}}LiteGraph.registerNodeType("easy setNode",Object.assign(e,{title:"Set"})),e.category="EasyUse/Util"}}),w.registerExtension({name:"easy getNode",registerCustomNodes(){class e extends gt{constructor(t){super("Get"),f(this,"defaultVisibility",!0),f(this,"serialize_widgets",!0),this.properties||(this.properties={}),this.properties.showOutputText=e.defaultVisibility;const n=this;n.color=LGraphCanvas.node_colors.blue.color,this.addWidget("combo","Constant","",(e=>{this.onRename()}),{values:()=>n.graph._nodes.filter((e=>"easy setNode"==e.type)).map((e=>e.widgets[0].value)).sort()}),this.addOutput("*","*"),this.onConnectionsChange=function(e,t,n,s,o){this.validateLinks(),2!=e||n?(this.onRename(),setTimeout((e=>{this.title="⬅️"+this.widgets[0].value}),1)):(this.outputs[t].type="*",this.outputs[t].name="*",this.title="Get")},this.setName=function(e){n.widgets[0].value=e,n.onRename(),n.serialize()},this.onRename=function(e=0){const t=this.findSetter(n.graph);if(t){const n=t.inputs[0].type,s=t.inputs[0].name;this.setType(n,s),this.outputs[e].type=n,this.outputs[e].name=s,this.title="⬅️"+t.widgets[0].value}else this.setType("*","*"),this.outputs[e].type="*",this.outputs[e].name="*"},this.clone=function(){const t=e.prototype.clone.apply(this);return t.size=t.computeSize(),t},this.validateLinks=function(){"*"!=this.outputs[0].type&&this.outputs[0].links&&this.outputs[0].links.forEach((e=>{const t=n.graph.links[e];t&&t.type!=this.outputs[0].type&&"*"!=t.type&&n.graph.removeLink(e)}))},this.setType=function(e,t){this.outputs[0].name=t,this.outputs[0].type=e,this.validateLinks()},this.findSetter=function(e){const t=this.widgets[0].value;return e._nodes.find((e=>"easy setNode"==e.type&&e.widgets[0].value===t&&""!=t))},this.isVirtualNode=!0}getInputLink(e){const t=this.findSetter(this.graph);if(t){const n=t.inputs[e];return this.graph.links[n.link]}throw new Error("No setter found for "+this.widgets[0].value+"("+this.type+")")}onAdded(e){}}LiteGraph.registerNodeType("easy getNode",Object.assign(e,{title:"Get"})),e.category="EasyUse/Util"}}),b.addEventListener("easyuse-global-seed",(function(e){let t=app.graph._nodes_by_id;for(let n in t){let s=t[n];if("easy globalSeed"==s.type){if(s.widgets){const t=s.widgets.find((e=>"value"==e.name));s.widgets.find((e=>"last_seed"==e.name)).value=t.value,t.value=e.detail.value}}else if(s.widgets){const t=s.widgets.find((e=>"seed_num"==e.name||"seed"==e.name||"noise_seed"==e.name));t&&null!=e.detail.seed_map[s.id]&&(t.value=e.detail.seed_map[s.id])}}}));const yt=b.queuePrompt;b.queuePrompt=async function(e,{output:t,workflow:n}){n.seed_widgets={};for(let s in app.graph._nodes_by_id){let e=app.graph._nodes_by_id[s].widgets;if(e)for(let t in e)"seed_num"!=e[t].name&&"seed"!=e[t].name&&"noise_seed"!=e[t].name||"converted-widget"==e[t].type||(n.seed_widgets[s]=parseInt(t))}return await yt.call(b,e,{output:t,workflow:n})};const _t=["easy imageSave","easy fullkSampler","easy kSampler","easy kSamplerTiled","easy kSamplerInpainting","easy kSamplerDownscaleUnet","easy kSamplerSDTurbo","easy detailerFix"];w.registerExtension({name:"Comfy.EasyUse.SaveImageExtraOutput",async beforeRegisterNodeDef(e,t,n){if(_t.includes(t.name)){const t=e.prototype.onNodeCreated;e.prototype.onNodeCreated=function(){const e=t?t.apply(this,arguments):void 0,s=this.widgets.find((e=>"filename_prefix"===e.name||"save_prefix"===e.name));return s.serializeValue=()=>C(n,s.value),e}}else{const t=e.prototype.onNodeCreated;e.prototype.onNodeCreated=function(){const e=t?t.apply(this,arguments):void 0;return this.properties&&"Node name for S&R"in this.properties||this.addProperty("Node name for S&R",this.constructor.type,"string"),e}}}});const vt=["easy wildcards","easy positive","easy negative","easy stylesSelector","easy promptConcat","easy promptReplace"],wt=["easy preSampling","easy preSamplingAdvanced","easy preSamplingNoiseIn","easy preSamplingCustom","easy preSamplingDynamicCFG","easy preSamplingSdTurbo","easy preSamplingLayerDiffusion"],bt=["easy kSampler","easy kSamplerTiled","easy kSamplerInpainting","easy kSamplerDownscaleUnet","easy kSamplerSDTurbo"],Lt=["easy controlnetLoader","easy controlnetLoaderADV"],Et=["easy instantIDApply","easy instantIDApplyADV"],St=["easy ipadapterApply","easy ipadapterApplyADV","easy ipadapterApplyFaceIDKolors","easy ipadapterStyleComposition"],Ct=["easy pipeIn","easy pipeOut","easy pipeEdit"],At=["easy XYPlot","easy XYPlotAdvanced"],kt=["easy setNode"],It=["Reroute","RescaleCFG","LoraLoaderModelOnly","LoraLoader","FreeU","FreeU_v2",...St,...kt],xt={"easy seed":{from:{INT:["Reroute",...wt,"easy fullkSampler"]}},"easy positive":{from:{STRING:["Reroute",...vt]}},"easy negative":{from:{STRING:["Reroute",...vt]}},"easy wildcards":{from:{STRING:["Reroute","easy showAnything",...vt]}},"easy stylesSelector":{from:{STRING:["Reroute","easy showAnything",...vt]}},"easy promptConcat":{from:{STRING:["Reroute","easy showAnything",...vt]}},"easy promptReplace":{from:{STRING:["Reroute","easy showAnything",...vt]}},"easy fullLoader":{from:{PIPE_LINE:["Reroute",...wt,"easy fullkSampler",...Ct,...kt],MODEL:It},to:{STRING:["Reroute",...vt]}},"easy a1111Loader":{from:{PIPE_LINE:["Reroute",...wt,...Lt,...Et,...Ct,...kt],MODEL:It},to:{STRING:["Reroute",...vt]}},"easy comfyLoader":{from:{PIPE_LINE:["Reroute",...wt,...Lt,...Et,...Ct,...kt],MODEL:It},to:{STRING:["Reroute",...vt]}},"easy hunyuanDiTLoader":{from:{PIPE_LINE:["Reroute",...wt,...Lt,...Et,...Ct,...kt],MODEL:It},to:{STRING:["Reroute",...vt]}},"easy kolorsLoader":{from:{PIPE_LINE:["Reroute",...wt,...Lt,...Et,...Ct,...kt],MODEL:It},to:{STRING:["Reroute",...vt]}},"easy pixArtLoader":{from:{PIPE_LINE:["Reroute",...wt,...Lt,...Et,...Ct,...kt],MODEL:It},to:{STRING:["Reroute",...vt]}},"easy svdLoader":{from:{PIPE_LINE:["Reroute","easy preSampling","easy preSamplingAdvanced","easy preSamplingDynamicCFG",...Ct,...kt],MODEL:It},to:{STRING:["Reroute",...vt]}},"easy zero123Loader":{from:{PIPE_LINE:["Reroute","easy preSampling","easy preSamplingAdvanced","easy preSamplingDynamicCFG",...Ct,...kt],MODEL:It},to:{STRING:["Reroute",...vt]}},"easy sv3dLoader":{from:{PIPE_LINE:["Reroute","easy preSampling","easy preSamplingAdvanced","easy preSamplingDynamicCFG",...Ct,...kt],MODEL:It},to:{STRING:["Reroute",...vt]}},"easy preSampling":{from:{PIPE_LINE:["Reroute",...bt,...Ct,...Lt,...At,...kt]}},"easy preSamplingAdvanced":{from:{PIPE_LINE:["Reroute",...bt,...Ct,...Lt,...At,...kt]}},"easy preSamplingDynamicCFG":{from:{PIPE_LINE:["Reroute",...bt,...Ct,...Lt,...At,...kt]}},"easy preSamplingCustom":{from:{PIPE_LINE:["Reroute",...bt,...Ct,...Lt,...At,...kt]}},"easy preSamplingLayerDiffusion":{from:{PIPE_LINE:["Reroute","easy kSamplerLayerDiffusion",...bt,...Ct,...Lt,...At,...kt]}},"easy preSamplingNoiseIn":{from:{PIPE_LINE:["Reroute",...bt,...Ct,...Lt,...At,...kt]}},"easy fullkSampler":{from:{PIPE_LINE:["Reroute",...Ct.reverse(),"easy preDetailerFix","easy preMaskDetailerFix",...wt,...kt]}},"easy kSampler":{from:{PIPE_LINE:["Reroute",...Ct.reverse(),"easy preDetailerFix","easy preMaskDetailerFix","easy hiresFix",...wt,...kt]}},"easy controlnetLoader":{from:{PIPE_LINE:["Reroute",...wt,...Lt,...Et,...Ct,...kt]}},"easy controlnetLoaderADV":{from:{PIPE_LINE:["Reroute",...wt,...Lt,...Et,...Ct,...kt]}},"easy instantIDApply":{from:{PIPE_LINE:["Reroute",...wt,...Lt,...Et,...Ct,...kt],MODEL:It},to:{COMBO:["Reroute","easy promptLine"]}},"easy instantIDApplyADV":{from:{PIPE_LINE:["Reroute",...wt,...Lt,...Et,...Ct,...kt],MODEL:It},to:{COMBO:["Reroute","easy promptLine"]}},"easy ipadapterApply":{to:{COMBO:["Reroute","easy promptLine"]}},"easy ipadapterApplyADV":{to:{STRING:["Reroute","easy sliderControl",...vt],COMBO:["Reroute","easy promptLine"]}},"easy ipadapterStyleComposition":{to:{COMBO:["Reroute","easy promptLine"]}},"easy preDetailerFix":{from:{PIPE_LINE:["Reroute","easy detailerFix",...Ct,...kt]},to:{PIPE_LINE:["Reroute","easy ultralyticsDetectorPipe","easy samLoaderPipe","easy kSampler","easy fullkSampler"]}},"easy preMaskDetailerFix":{from:{PIPE_LINE:["Reroute","easy detailerFix",...Ct,...kt]}},"easy samLoaderPipe":{from:{PIPE_LINE:["Reroute","easy preDetailerFix",...Ct,...kt]}},"easy ultralyticsDetectorPipe":{from:{PIPE_LINE:["Reroute","easy preDetailerFix",...Ct,...kt]}},"easy cascadeLoader":{from:{PIPE_LINE:["Reroute","easy fullCascadeKSampler","easy preSamplingCascade",...Lt,...Ct,...kt],MODEL:It.filter((e=>!St.includes(e)))}},"easy fullCascadeKSampler":{from:{PIPE_LINE:["Reroute","easy preSampling","easy preSamplingAdvanced",...Ct,...kt]}},"easy preSamplingCascade":{from:{PIPE_LINE:["Reroute","easy cascadeKSampler",...Ct,...kt]}},"easy cascadeKSampler":{from:{PIPE_LINE:["Reroute","easy preSampling","easy preSamplingAdvanced",...Ct,...kt]}}};w.registerExtension({name:"Comfy.EasyUse.Suggestions",async setup(e){LGraphCanvas.prototype.createDefaultNodeForSlot=function(e){e=e||{};var t,n=Object.assign({nodeFrom:null,slotFrom:null,nodeTo:null,slotTo:null,position:[],nodeType:null,posAdd:[0,0],posSizeFix:[0,0]},e),s=n.nodeFrom&&null!==n.slotFrom,o=!s&&n.nodeTo&&null!==n.slotTo;if(!s&&!o)return!1;if(!n.nodeType)return!1;var i=s?n.nodeFrom:n.nodeTo,a=s?n.slotFrom:n.slotTo,l=i.type,r=!1;switch(typeof a){case"string":r=s?i.findOutputSlot(a,!1):i.findInputSlot(a,!1),a=s?i.outputs[a]:i.inputs[a];break;case"object":r=s?i.findOutputSlot(a.name):i.findInputSlot(a.name);break;case"number":r=a,a=s?i.outputs[a]:i.inputs[a];break;default:return!1}var d=a.type==LiteGraph.EVENT?"_event_":a.type,u=s?LiteGraph.slot_types_default_out:LiteGraph.slot_types_default_in;if(u&&u[d]){a.link;let e=!1;const o=s?"from":"to";if(xt[l]&&xt[l][o]&&(null==(t=xt[l][o][d])?void 0:t.length)>0){for(var c in xt[l][o][d])if(n.nodeType==xt[l][o][d][c]||"AUTO"==n.nodeType){e=xt[l][o][d][c];break}}else if("object"==typeof u[d]||"array"==typeof u[d]){for(var c in u[d])if(n.nodeType==u[d][c]||"AUTO"==n.nodeType){e=u[d][c];break}}else n.nodeType!=u[d]&&"AUTO"!=n.nodeType||(e=u[d]);if(e){var p=!1;"object"==typeof e&&e.node&&(p=e,e=e.node);var h=LiteGraph.createNode(e);if(h){if(p){if(p.properties)for(var m in p.properties)h.addProperty(m,p.properties[m]);if(p.inputs)for(var m in h.inputs=[],p.inputs)h.addOutput(p.inputs[m][0],p.inputs[m][1]);if(p.outputs)for(var m in h.outputs=[],p.outputs)h.addOutput(p.outputs[m][0],p.outputs[m][1]);p.title&&(h.title=p.title),p.json&&h.configure(p.json)}return this.graph.add(h),h.pos=[n.position[0]+n.posAdd[0]+(n.posSizeFix[0]?n.posSizeFix[0]*h.size[0]:0),n.position[1]+n.posAdd[1]+(n.posSizeFix[1]?n.posSizeFix[1]*h.size[1]:0)],s?n.nodeFrom.connectByType(r,h,d):n.nodeTo.connectByTypeOutput(r,h,d),!0}}}return!1},LGraphCanvas.prototype.showConnectionMenu=function(e){e=e||{};var t,n=Object.assign({nodeFrom:null,slotFrom:null,nodeTo:null,slotTo:null,e:null},e),s=this,o=n.nodeFrom&&n.slotFrom,i=!o&&n.nodeTo&&n.slotTo;if(!o&&!i)return!1;var a=o?n.nodeFrom:n.nodeTo,l=o?n.slotFrom:n.slotTo,r=!1;switch(typeof l){case"string":r=o?a.findOutputSlot(l,!1):a.findInputSlot(l,!1),l=o?a.outputs[l]:a.inputs[l];break;case"object":r=o?a.findOutputSlot(l.name):a.findInputSlot(l.name);break;case"number":r=l,l=o?a.outputs[l]:a.inputs[l];break;default:return!1}var d=["Add Node",null];s.allow_searchbox&&(d.push("Search"),d.push(null));var u=l.type==LiteGraph.EVENT?"_event_":l.type,c=o?LiteGraph.slot_types_default_out:LiteGraph.slot_types_default_in,p=a.type;if(c&&c[u]){const e=o?"from":"to";if(xt[p]&&xt[p][e]&&(null==(t=xt[p][e][u])?void 0:t.length)>0)for(var h in xt[p][e][u])d.push(xt[p][e][u][h]);else if("object"==typeof c[u]||"array"==typeof c[u])for(var h in c[u])d.push(c[u][h]);else d.push(c[u])}var m=new LiteGraph.ContextMenu(d,{event:n.e,title:(l&&""!=l.name?l.name+(u?" | ":""):"")+(l&&u?u:""),callback:function(e,t,i){switch(e){case"Add Node":LGraphCanvas.onMenuAdd(null,null,i,m,(function(e){o?n.nodeFrom.connectByType(r,e,u):n.nodeTo.connectByTypeOutput(r,e,u)}));break;case"Search":o?s.showSearchBox(i,{node_from:n.nodeFrom,slot_from:l,type_filter_in:u}):s.showSearchBox(i,{node_to:n.nodeTo,slot_from:l,type_filter_out:u});break;default:s.createDefaultNodeForSlot(Object.assign(n,{position:[n.e.canvasX,n.e.canvasY],nodeType:e}))}}});return!1}}}),w.registerExtension({name:"Comfy.EasyUse.TimeTaken",setup(){const e=new Map;let t=0;b.addEventListener("executing",(n=>{if(!I("EasyUse.Nodes.Runtime",null,!0))return;const s=(null==n?void 0:n.node)||(null==n?void 0:n.detail)||null,o=ve(s);o&&(o.executionDuration="");const i=e.get(t);if(e.delete(t),t&&i){const e=Date.now()-i,n=ve(t);n&&(n.executionDuration=`${(e/1e3).toFixed(2)}${Z("s")}`)}t=s,e.set(s,Date.now())}))},beforeRegisterNodeDef(e,t){const n=e.prototype.onDrawForeground;e.prototype.onDrawForeground=function(...e){const[t]=e;return function(e,t){if(!t)return;e.save(),e.fillStyle=LiteGraph.NODE_DEFAULT_BGCOLOR,function(e,t,n,s,o,i){e.beginPath(),e.moveTo(t+i,n),e.lineTo(t+s-i,n),e.arcTo(t+s,n,t+s,n+i,i),e.lineTo(t+s,n+o-i),e.arcTo(t+s,n+o,t+s-i,n+o,i),e.lineTo(t+i,n+o),e.arcTo(t,n+o,t,n+o-i,i),e.lineTo(t,n+i),e.arcTo(t,n,t+i,n,i),e.closePath()}(e,0,-LiteGraph.NODE_TITLE_HEIGHT-20,e.measureText(t).width+10,LiteGraph.NODE_TITLE_HEIGHT-10,4),e.fill(),function(e,t,n,s,o="#000",i=12,a="Inter"){e.font=`${i}px ${a}`,e.fillStyle=o,e.fillText(t,n,s)}(e,t,8,-LiteGraph.NODE_TITLE_HEIGHT-6,LiteGraph.NODE_TITLE_COLOR),e.restore()}(t,this.executionDuration||""),null==n?void 0:n.apply(this,e)}}});let Nt=null;w.registerExtension({name:"Comfy.EasyUse.HotKeys",setup(){if(void 0!==_){_("up,down,left,right",(function(e,t){var n,s,o,i,a,l,r,d,u,c,p,h,m,g,f;e.preventDefault();if(!I("EasyUse.Hotkeys.JumpNearestNodes",null,!0))return;const y=we();if(0===y.length)return;const _=y[0];switch(t.key){case"up":case"left":let e=null;if(Ae(_)){const e=null==(n=_.widgets_values)?void 0:n[0],t=null==(s=_.graph)?void 0:s._nodes,o=null==t?void 0:t.find((t=>{var n;if(ke(t)){if((null==(n=t.widgets_values)?void 0:n[0])===e)return t}return null}));o&&Le(o)}else if((null==(o=_.inputs)?void 0:o.length)>0){for(let t=0;t<_.inputs.length;t++)if(_.inputs[t].link){e=_.inputs[t].link;break}if(e){const t=null==(i=_.graph)?void 0:i.links;if(t[e]){const n=null==(a=t[e])?void 0:a.origin_id,s=null==(r=null==(l=_.graph)?void 0:l._nodes_by_id)?void 0:r[n];s&&Le(s)}}}break;case"down":case"right":let t=null;if(ke(_)){const e=null==(d=_.widgets_values)?void 0:d[0],t=null==(u=_.graph)?void 0:u._nodes,n=null==t?void 0:t.find((t=>{var n;if(Ae(t)){if((null==(n=t.widgets_values)?void 0:n[0])===e)return t}return null}));n&&Le(n)}else if((null==(c=_.outputs)?void 0:c.length)>0){for(let e=0;e<_.outputs.length;e++)if((null==(p=_.outputs[e].links)?void 0:p.length)>0&&_.outputs[e].links[0]){t=_.outputs[e].links[0];break}if(t){const e=null==(h=_.graph)?void 0:h.links;if(e[t]){const n=null==(m=e[t])?void 0:m.target_id,s=null==(f=null==(g=_.graph)?void 0:g._nodes_by_id)?void 0:f[n];s&&Le(s)}}}}})),_("shift+up,shift+down,shift+left,shift+right",(function(e,t){e.preventDefault();if(!I("EasyUse.Hotkeys.AlignSelectedNodes",null,!0))return;const n=we();if(n.length<=1)return;const s=n;switch(t.key){case"shift+up":LGraphCanvas.alignNodes(s,"top",s[0]);break;case"shift+down":LGraphCanvas.alignNodes(s,"bottom",s[0]);break;case"shift+left":LGraphCanvas.alignNodes(s,"left",s[0]);break;case"shift+right":LGraphCanvas.alignNodes(s,"right",s[0])}Nt||(Nt=$()),Nt&&Nt.update()})),_("shift+g",(function(e,t){e.preventDefault();I("EasyUse.Hotkeys.AddGroup",null,!0)&&(Ot(),Nt||(Nt=$()),Nt&&Nt.update())})),_("shift+r",(function(e,t){e.preventDefault();I("EasyUse.Hotkeys.cleanVRAMused",null,!0)&&Ze()}));const e=[];Array.from(Array(10).keys()).forEach((t=>e.push(`alt+${t}`))),_(e.join(","),(async function(e,t){e.preventDefault();if(!I("EasyUse.Hotkeys.NodesTemplate",null,!0))return;const n=t.key;let s=parseInt(n.split("+")[1]);const o=await b.getUserData("comfy.templates.json");let i=null;if(200==o.status)try{i=await o.json()}catch(l){Re.error(Z("Get Node Templates File Failed"))}else localStorage["Comfy.NodeTemplates"]?i=JSON.parse(localStorage["Comfy.NodeTemplates"]):Re.warn(Z("No Node Templates Found"));if(!i)return void Re.warn(Z("No Node Templates Found"));s=0===s?9:s-1;const a=i[s];if(a)try{const e=(null==a?void 0:a.name)||"Group",t=(null==a?void 0:a.data)?JSON.parse(a.data):[];Tt((async()=>{await A.registerFromWorkflow(t.groupNodes,{}),localStorage.litegrapheditor_clipboard=a.data,w.canvas.pasteFromClipboard(),t.groupNodes||Ot(e)}))}catch(l){Re.error(l)}else Re.warn(Z("Node template with {key} not set").replace("{key}",n))}));const t=async function(e){if(("b"===e.key||"m"==e.key)&&(e.metaKey||e.ctrlKey)){if(0===we().length)return;Nt||(Nt=$()),Nt&&Nt.update()}};window.addEventListener("keydown",t,!0)}}});const Tt=async e=>{const t=localStorage.litegrapheditor_clipboard;await e(),localStorage.litegrapheditor_clipboard=t},Ot=e=>{const t=we();if(0===t.length)return;const n=t;let s=new LiteGraph.LGraphGroup;s.title=e||"Group",((e,t=[],n=20)=>{var s,o,i,a,l,r,d,u,c,p;for(var h of(o=i=a=l=-1,r=d=u=c=-1,[e._nodes,t]))for(var m in h)r=(p=h[m]).pos[0],d=p.pos[1],u=p.pos[0]+p.size[0],c=p.pos[1]+p.size[1],"Reroute"!=p.type&&(d-=LiteGraph.NODE_TITLE_HEIGHT),(null==(s=p.flags)?void 0:s.collapsed)&&(c=d+LiteGraph.NODE_TITLE_HEIGHT,(null==p?void 0:p._collapsed_width)&&(u=r+Math.round(p._collapsed_width))),(-1==o||ra)&&(a=u),(-1==l||c>l)&&(l=c);i-=Math.round(1.4*e.font_size),e.pos=[o-n,i-n],e.size=[a-o+2*n,l-i+2*n]})(s,n),w.canvas.graph.add(s)};function Dt(e,t,n,s){const o=[];return e.workflow.links.forEach((e=>{n&&e[1]===t&&!o.includes(e[3])&&o.push(e[3]),s&&e[3]===t&&!o.includes(e[1])&&o.push(e[1])})),o}async function Gt(e,t=!1){const n=structuredClone(await w.graphToPrompt()),s=[];if(n.workflow.nodes.forEach((e=>{s.push(e.id)})),n.workflow.links=n.workflow.links.filter((e=>s.includes(e[1])&&s.includes(e[3]))),t)for(;!w.graph._nodes_by_id[e].isChooser;)e=Dt(n,e,!0,!1)[0];const o=function(e,t){const n=[],s=[t];for(;s.length>0;){const t=s.pop();n.push(t),s.push(...Dt(e,t,!0,!1).filter((e=>!(n.includes(e)||s.includes(e)))))}s.push(...n.filter((e=>e!=t)));const o=[t];for(;s.length>0;){const t=s.pop();o.push(t),s.push(...Dt(e,t,!1,!0).filter((e=>!(o.includes(e)||s.includes(e)))))}const i=[];return i.push(...n),i.push(...o.filter((e=>!i.includes(e)))),i}(n,e);n.workflow.nodes=n.workflow.nodes.filter((t=>(t.id===e&&t.inputs.forEach((e=>{e.link=null})),o.includes(t.id)))),n.workflow.links=n.workflow.links.filter((e=>o.includes(e[1])&&o.includes(e[3])));const i={};for(const[r,d]of Object.entries(n.output))o.includes(parseInt(r))&&(i[r]=d);const a={};for(const[r,d]of Object.entries(i[e.toString()].inputs))Array.isArray(d)||(a[r]=d);i[e.toString()].inputs=a,n.output=i;const l=w.graphToPrompt;w.graphToPrompt=()=>(w.graphToPrompt=l,n),w.queuePrompt(0)}const Rt=new class{constructor(){this.current_node_id=void 0,this.class_of_current_node=null,this.current_node_is_chooser=!1}update(){var e,t;return w.runningNodeId!=this.current_node_id&&(this.current_node_id=w.runningNodeId,this.current_node_id?(this.class_of_current_node=null==(t=null==(e=w.graph)?void 0:e._nodes_by_id[w.runningNodeId.toString()])?void 0:t.comfyClass,this.current_node_is_chooser="easy imageChooser"===this.class_of_current_node):(this.class_of_current_node=void 0,this.current_node_is_chooser=!1),!0)}},Mt=class e{constructor(){}static idle(){return!w.runningNodeId}static paused(){return!0}static paused_here(t){return e.here(t)}static running(){return!e.idle()}static here(e){return w.runningNodeId==e}static state(){return"Paused"}};f(Mt,"cancelling",!1);let Pt=Mt;function Ft(e,t){const n=new FormData;n.append("message",t),n.append("id",e),b.fetchApi("/easyuse/image_chooser_message",{method:"POST",body:n})}function Ut(){Ft(-1,"__cancel__"),Pt.cancelling=!0,b.interrupt(),Pt.cancelling=!1}var Bt=0;function Wt(){Bt+=1}const zt=["easy kSampler","easy kSamplerTiled","easy fullkSampler"];function jt(e){const t=w.graph._nodes_by_id[e.detail.id];if(t){t.selected=new Set,t.anti_selected=new Set;const n=function(e,t){var n;return e.imgs=[],t.forEach((t=>{const n=new Image;e.imgs.push(n),n.onload=()=>{w.graph.setDirtyCanvas(!0)},n.src=`/view?filename=${encodeURIComponent(t.filename)}&type=temp&subfolder=${w.getPreviewFormatParam()}`})),null==(n=e.setSizeForImage)||n.call(e),e.imgs}(t,e.detail.urls);return{node:t,image:n,isKSampler:zt.includes(t.type)}}}function Vt(e,t,n){var s;if(e.imageRects)s=e.imageRects[t];else{const t=e.imagey;s=[1,t+1,e.size[0]-2,e.size[1]-t-2]}n.strokeRect(s[0]+1,s[1]+1,s[2]-2,s[3]-2)}class Yt extends E{constructor(){super(),this.node=null,this.select_index=[],this.dialog_div=null}show(e,t){this.select_index=[],this.node=t;const n=e.map(((e,n)=>{const s=L("img",{src:e.src,onclick:e=>{this.select_index.includes(n)?(this.select_index=this.select_index.filter((e=>e!==n)),s.classList.remove("selected")):(this.select_index.push(n),s.classList.add("selected")),t.selected.has(n)?t.selected.delete(n):t.selected.add(n)}});return s}));super.show(L("div.comfyui-easyuse-chooser-dialog",[L("h5.comfyui-easyuse-chooser-dialog-title",Z("Choose images to continue")),L("div.comfyui-easyuse-chooser-dialog-images",n)]))}createButtons(){const e=super.createButtons();return e[0].onclick=e=>{Pt.running()&&Ut(),super.close()},e.unshift(L("button",{type:"button",textContent:Z("Choose Selected Images"),onclick:e=>{Ft(this.node.id,[...this.node.selected,-1,...this.node.anti_selected]),Pt.idle()&&(Wt(),Gt(this.node.id).then((()=>{Ft(this.node.id,[...this.node.selected,-1,...this.node.anti_selected])}))),super.close()}})),e}}function Ht(){const e=w.graph._nodes_by_id[this.node_id];if(e){const t=[...e.selected];(null==t?void 0:t.length)>0&&e.setProperty("values",t),Ft(e.id,[...e.selected,-1,...e.anti_selected]),Pt.idle()&&(Wt(),Gt(e.id).then((()=>{Ft(e.id,[...e.selected,-1,...e.anti_selected])})))}}function Xt(){Pt.running()&&Ut()}function Zt(e){Object.defineProperty(e,"clicked",{get:function(){return this._clicked},set:function(e){this._clicked=e&&""!=this.name}})}function Kt(e){e.options||(e.options={}),e.options.serialize=!1}w.registerExtension({name:"Comfy.EasyUse.imageChooser",init(){window.addEventListener("beforeunload",Ut,!0)},setup(e){const t=LGraphCanvas.prototype.draw;LGraphCanvas.prototype.draw=function(){Rt.update()&&e.graph._nodes.forEach((e=>{e.update&&e.update()})),t.apply(this,arguments)},b.addEventListener("easyuse-image-choose",(function(e){const{node:t,image:n,isKSampler:s}=jt(e);if(s){(new Yt).show(n,t)}}));const n=b.interrupt;b.interrupt=function(){Pt.cancelling||Ut(),n.apply(this,arguments)},b.addEventListener("execution_start",(function(){(Bt>0?(Bt-=1,0):(Ft(-1,"__start__"),1))&&e.graph._nodes.forEach((e=>{(e.selected||e.anti_selected)&&(e.selected.clear(),e.anti_selected.clear(),e.update())}))}))},async nodeCreated(e,t){if("easy imageChooser"==e.comfyClass){e.setProperty("values",[]),void 0===(null==e?void 0:e.imageIndex)&&Object.defineProperty(e,"imageIndex",{get:function(){return null},set:function(t){e.overIndex=t}}),void 0===(null==e?void 0:e.imagey)&&Object.defineProperty(e,"imagey",{get:function(){return null},set:function(t){return e.widgets[e.widgets.length-1].last_y+LiteGraph.NODE_WIDGET_HEIGHT}});const t=e.onMouseDown;e.onMouseDown=function(n,s,o){if(n.isPrimary){const t=function(e,t){var n,s;if((null==(n=e.imgs)?void 0:n.length)>1)for(var o=0;o0&&n0&&se.imagey)return 0;return-1}(e,s);t>=0&&this.imageClicked(t)}return t&&t.apply(this,arguments)},e.send_button_widget=e.addWidget("button","","",Ht),e.cancel_button_widget=e.addWidget("button","","",Xt),Zt(e.cancel_button_widget),Zt(e.send_button_widget),Kt(e.cancel_button_widget),Kt(e.send_button_widget)}},beforeRegisterNodeDef(e,t,n){if("easy imageChooser"==(null==t?void 0:t.name)){const t=e.prototype.onDrawBackground;e.prototype.onDrawBackground=function(e){t.apply(this,arguments),function(e,t){var n,s;if(e.imgs){if(e.imageRects)for(let n=0;n{Vt(e,n,t)})),t.strokeStyle="#F88",null==(s=null==e?void 0:e.anti_selected)||s.forEach((n=>{Vt(e,n,t)}))}}(this,e)},e.prototype.imageClicked=function(t){"easy imageChooser"===(null==e?void 0:e.comfyClass)&&(this.selected.has(t)?this.selected.delete(t):this.selected.add(t),this.update())};const n=e.prototype.update;e.prototype.update=function(){var e;if(n&&n.apply(this,arguments),this.send_button_widget){this.send_button_widget.node_id=this.id;const t=(this.selected?this.selected.size:0)+(this.anti_selected?this.anti_selected.size:0),n=(null==(e=this.imgs)?void 0:e.length)||0;Pt.paused_here(this.id)&&t>0?this.send_button_widget.name=t>1?"Progress selected ("+t+"/"+n+")":"Progress selected image":this.send_button_widget.name=t>0?t>1?"Progress selected ("+t+"/"+n+")":"Progress selected image as restart":""}if(this.cancel_button_widget){const e=Pt.running();this.cancel_button_widget.name=e?"Cancel current run":""}this.setDirtyCanvas(!0,!0)}}}}),Number.prototype.div=function(e){return function(e,t){let n,s,o=0,i=0,a="string"==typeof e?e:e.toString(),l="string"==typeof t?t:t.toString();try{o=a.toString().split(".")[1].length}catch(r){}try{i=l.toString().split(".")[1].length}catch(r){}return n=Number(a.toString().replace(".","")),s=Number(l.toString().replace(".","")),n/s*Math.pow(10,i-o)}(this,e)};let Jt=[],$t=0;const qt={sd3:6.5,"sd3-turbo":4};class Qt extends E{constructor(){super(),this.lists=[],this.dialog_div=null,this.user_div=null}addItem(e,t){return L("div.easyuse-account-dialog-item",[L("input",{type:"text",placeholder:"Enter name",oninput:e=>{const t=Array.prototype.indexOf.call(this.dialog_div.querySelectorAll(".easyuse-account-dialog-item"),e.target.parentNode);Jt[t].name=e.target.value},value:Jt[e].name}),L("input.key",{type:"text",oninput:e=>{const t=Array.prototype.indexOf.call(this.dialog_div.querySelectorAll(".easyuse-account-dialog-item"),e.target.parentNode);Jt[t].key=e.target.value},placeholder:"Enter APIKEY",value:Jt[e].key}),L("button.choose",{textContent:Z("Choose"),onclick:async e=>{var n,s,o;const i=Array.prototype.indexOf.call(this.dialog_div.querySelectorAll(".easyuse-account-dialog-item"),e.target.parentNode);let a=Jt[i].name,l=Jt[i].key;if(!a)return void Re.error(Z("Please enter the account name"));if(!l)return void Re.error(Z("Please enter the APIKEY"));let r=!0;for(let t=0;t{(new Qt).show(t)}},[L("div.user",[L("div.avatar",i?[L("img",{src:i})]:"😀"),L("div.info",[L("h5.name",a),L("h6.remark","Credits: "+l)])]),L("div.edit",{textContent:Z("Edit")})])),Re.success(Z("Save Succeed"))}else Re.success(Z("Save Succeed"));this.close()}else Re.error(Z("Save Failed"))}}),L("button.delete",{textContent:Z("Delete"),onclick:e=>{const t=Array.prototype.indexOf.call(this.dialog_div.querySelectorAll(".easyuse-account-dialog-item"),e.target.parentNode);Jt.length<=1?Re.error(Z("At least one account is required")):(Jt.splice(t,1),this.dialog_div.removeChild(e.target.parentNode))}})])}show(e){Jt.forEach(((t,n)=>{this.lists.push(this.addItem(n,e))})),this.dialog_div=L("div.easyuse-account-dialog",this.lists),super.show(L("div.easyuse-account-dialog-main",[L("div",[L("a",{href:"https://platform.stability.ai/account/keys",target:"_blank",textContent:Z("Getting Your APIKEY")})]),this.dialog_div]))}createButtons(){const e=super.createButtons();return e.unshift(L("button",{type:"button",textContent:Z("Save Account Info"),onclick:e=>{let t=!0;for(let n=0;n{200==e.status?Re.success(Z("Save Succeed")):Re.error(Z("Save Failed"))}))}else Re.error(Z("APIKEY is not Empty"))}})),e.unshift(L("button",{type:"button",textContent:Z("Add Account"),onclick:e=>{const t="Account "+Jt.length.toString();Jt.push({name:t,key:""});const n=this.addItem(Jt.length-1);this.lists.push(n),this.dialog_div.appendChild(n)}})),e}}w.registerExtension({name:"Comfy.EasyUse.API.SD3",async beforeRegisterNodeDef(e,t,n){if("easy stableDiffusion3API"==t.name){const t=e.prototype.onNodeCreated;e.prototype.onNodeCreated=async function(){var e,n,s;t&&(null==t||t.apply(this,arguments));const o=this.widgets.find((e=>["seed_num","seed"].includes(e.name))),i=this.widgets.find((e=>["control_before_generate","control_after_generate"].includes(e.name)));let a=this.widgets.find((e=>"model"==e.name));a.callback=e=>{l.value="-"+qt[e]};const l=this.addWidget("text","cost_credit","0",(e=>{}),{serialize:!1});l.disabled=!0,setTimeout((e=>{"control_before_generate"==i.name&&0===o.value&&(o.value=Math.floor(4294967294*Math.random())),l.value="-"+qt[a.value]}),100);let r=L("div.easyuse-account-user",[Z("Loading UserInfo...")]);this.addDOMWidget("account","btn",L("div.easyuse-account",r)),b.addEventListener("stable-diffusion-api-generate-succeed",(async({detail:e})=>{var t;let n=r.querySelectorAll(".remark");if(n&&n[0]){const t=(null==e?void 0:e.model)?qt[e.model]:0;if(t){let e=function(e,t){let n,s,o,i,a,l;a="string"==typeof e?e:e.toString(),l="string"==typeof t?t:t.toString();try{n=a.split(".")[1].length}catch(r){n=0}try{s=l.split(".")[1].length}catch(r){s=0}return o=Math.pow(10,Math.max(n,s)),i=n>=s?n:s,((e*o-t*o)/o).toFixed(i)}(parseFloat(n[0].innerText.replace(/Credits: /g,"")),t);e>0&&(n[0].innerText="Credits: "+e.toString())}}await J(1e4);const s=await b.fetchApi("/easyuse/stability/balance");if(200==s.status){const e=await s.json();if(null==e?void 0:e.balance){const s=(null==(t=e.balance)?void 0:t.credits)||0;n&&n[0]&&(n[0].innerText="Credits: "+s)}}}));const d=await b.fetchApi("/easyuse/stability/api_keys");if(200==d.status){let t=await d.json();if(Jt=t.keys,$t=t.current,Jt.length>0&&void 0!==$t){const t=Jt[$t].key,o=Jt[$t].name;if(t){const t=await b.fetchApi("/easyuse/stability/user_info");if(200==t.status){const o=await t.json();if((null==o?void 0:o.account)&&(null==o?void 0:o.balance)){const t=(null==(e=o.account)?void 0:e.profile_picture)||null,i=(null==(n=o.account)?void 0:n.email)||null,a=(null==(s=o.balance)?void 0:s.credits)||0;r.replaceChildren(L("div.easyuse-account-user-info",{onclick:e=>{(new Qt).show(r)}},[L("div.user",[L("div.avatar",t?[L("img",{src:t})]:"😀"),L("div.info",[L("h5.name",i),L("h6.remark","Credits: "+a)])]),L("div.edit",{textContent:Z("Edit")})]))}}}else r.replaceChildren(L("div.easyuse-account-user-info",{onclick:e=>{(new Qt).show(r)}},[L("div.user",[L("div.avatar","😀"),L("div.info",[L("h5.name",o),L("h6.remark",Z("Click to set the APIKEY first"))])]),L("div.edit",{textContent:Z("Edit")})]))}}}}}});let en=null;function tn(){en&&(en.removeEventListeners(),en.dropdown.remove(),en=null)}function nn(e,t,n,s=!1){tn(),new sn(e,t,n,s)}class sn{constructor(e,t,n,s=!1){this.dropdown=document.createElement("ul"),this.dropdown.setAttribute("role","listbox"),this.dropdown.classList.add("easy-dropdown"),this.selectedIndex=-1,this.inputEl=e,this.suggestions=t,this.onSelect=n,this.isDict=s,this.focusedDropdown=this.dropdown,this.buildDropdown(),this.onKeyDownBound=this.onKeyDown.bind(this),this.onWheelBound=this.onWheel.bind(this),this.onClickBound=this.onClick.bind(this),this.addEventListeners()}buildDropdown(){this.isDict?this.buildNestedDropdown(this.suggestions,this.dropdown):this.suggestions.forEach(((e,t)=>{this.addListItem(e,t,this.dropdown)}));const e=this.inputEl.getBoundingClientRect();this.dropdown.style.top=e.top+e.height-10+"px",this.dropdown.style.left=e.left+"px",document.body.appendChild(this.dropdown),en=this}buildNestedDropdown(e,t){let n=0;Object.keys(e).forEach((s=>{const o=e[s];if("object"==typeof o&&null!==o){const e=document.createElement("ul");e.setAttribute("role","listbox"),e.classList.add("easy-nested-dropdown");const i=document.createElement("li");i.classList.add("folder"),i.textContent=s,i.appendChild(e),i.addEventListener("mouseover",this.onMouseOver.bind(this,n,t)),t.appendChild(i),this.buildNestedDropdown(o,e),n+=1}else{const e=document.createElement("li");e.classList.add("item"),e.setAttribute("role","option"),e.textContent=s,e.addEventListener("mouseover",this.onMouseOver.bind(this,n,t)),e.addEventListener("mousedown",this.onMouseDown.bind(this,s)),t.appendChild(e),n+=1}}))}addListItem(e,t,n){const s=document.createElement("li");s.setAttribute("role","option"),s.textContent=e,s.addEventListener("mouseover",this.onMouseOver.bind(this,t)),s.addEventListener("mousedown",this.onMouseDown.bind(this,e)),n.appendChild(s)}addEventListeners(){document.addEventListener("keydown",this.onKeyDownBound),this.dropdown.addEventListener("wheel",this.onWheelBound),document.addEventListener("click",this.onClickBound)}removeEventListeners(){document.removeEventListener("keydown",this.onKeyDownBound),this.dropdown.removeEventListener("wheel",this.onWheelBound),document.removeEventListener("click",this.onClickBound)}onMouseOver(e,t){t&&(this.focusedDropdown=t),this.selectedIndex=e,this.updateSelection()}onMouseOut(){this.selectedIndex=-1,this.updateSelection()}onMouseDown(e,t){t.preventDefault(),this.onSelect(e),this.dropdown.remove(),this.removeEventListeners()}onKeyDown(e){const t=Array.from(this.focusedDropdown.children),n=t[this.selectedIndex];if(en)if(38===e.keyCode)e.preventDefault(),this.selectedIndex=Math.max(0,this.selectedIndex-1),this.updateSelection();else if(40===e.keyCode)e.preventDefault(),this.selectedIndex=Math.min(t.length-1,this.selectedIndex+1),this.updateSelection();else if(39===e.keyCode){if(e.preventDefault(),n&&n.classList.contains("folder")){const e=n.querySelector(".easy-nested-dropdown");e&&(this.focusedDropdown=e,this.selectedIndex=0,this.updateSelection())}}else if(37===e.keyCode&&this.focusedDropdown!==this.dropdown){const e=this.focusedDropdown.closest(".easy-dropdown, .easy-nested-dropdown").parentNode.closest(".easy-dropdown, .easy-nested-dropdown");e&&(this.focusedDropdown=e,this.selectedIndex=Array.from(e.children).indexOf(this.focusedDropdown.parentNode),this.updateSelection())}else if((13===e.keyCode||9===e.keyCode)&&this.selectedIndex>=0){e.preventDefault(),n.classList.contains("item")&&(this.onSelect(t[this.selectedIndex].textContent),this.dropdown.remove(),this.removeEventListeners());const s=n.querySelector(".easy-nested-dropdown");s&&(this.focusedDropdown=s,this.selectedIndex=0,this.updateSelection())}else 27===e.keyCode&&(this.dropdown.remove(),this.removeEventListeners())}onWheel(e){const t=parseInt(this.dropdown.style.top);localStorage.getItem("Comfy.Settings.Comfy.InvertMenuScrolling")?this.dropdown.style.top=t+(e.deltaY<0?10:-10)+"px":this.dropdown.style.top=t+(e.deltaY<0?-10:10)+"px"}onClick(e){this.dropdown.contains(e.target)||e.target===this.inputEl||(this.dropdown.remove(),this.removeEventListeners())}updateSelection(){Array.from(this.focusedDropdown.children).forEach(((e,t)=>{t===this.selectedIndex?e.classList.add("selected"):e.classList.remove("selected")}))}}function on(e){const t=e.min||0,n=e.max||0,s=e.step||1;if(0===s)return[];const o=[];let i=t;for(;i<=n;){if(Number.isInteger(s))o.push(Math.round(i)+"; ");else{let e=i.toFixed(3);-0==e&&(e="0.000"),/\.\d{3}$/.test(e)||(e+="0"),o.push(e+"; ")}i+=s}return n>=0&&t>=0?o:o.reverse()}let an={},ln={};function rn(e,t){String(e.id);const n=t.name,s=t.value.replace(/^(loader|preSampling):\s/,"");ln[n]?ln[n]!=an[s]&&(ln[n]=an[s]):ln={...ln,[n]:an[s]}}w.registerExtension({name:"Comfy.EasyUse.XYPlot",async beforeRegisterNodeDef(e,t,n){if("easy XYPlot"===t.name){an=t.input.hidden.plot_dict[0];for(const e in an){const t=an[e];if(Array.isArray(t)){let n=[];for(const e of t)n.push(e+"; ");an[e]=n}else an[e]="object"==typeof t?"seed"==e?t+"; ":on(t):t+"; "}an.None=[],an["---------------------"]=[]}},nodeCreated(e){"easy XYPlot"===e.comfyClass&&(function(e){if(e.widgets)for(const t of e.widgets)if("x_axis"===t.name||"y_axis"===t.name){let n=t.value;Object.defineProperty(t,"value",{get:()=>n,set(s){s!==n&&(n=s,rn(e,t))}})}}(e),function(e){if(e.widgets){const t=e.widgets.filter((e=>"customtext"===e.type&&!1!==e.dynamicPrompts||e.dynamicPrompts));for(const e of t){let t=function(e,t,s,o){return e&&(t[s]=e),t.map((e=>n(e,o))).filter((e=>""!==e)).join("")},n=function(e,t){if(e=s(e),o(e,t))return e+"; ";let n=i(e,t);return 1===n.length||2===n.length?n[0]:o(a(e),t)?a(e)+"; ":""},s=function(e){return e.replace(/(\n|;| )/g,"")},o=function(e,t){return t.includes(e+"; ")},i=function(e,t){return t.filter((t=>t.toLowerCase().includes(e.toLowerCase())))},a=function(e){return Number(e)?Number(e).toFixed(3):["0","0.","0.0","0.00","00"].includes(e)?"0.000":e};const l=function(){const n=e.name[0]+"_axis";let s=(null==ln?void 0:ln[n])||[];if(0===s.length)return;const o=e.inputEl.value,i=e.inputEl.selectionStart;let a=o.split("; ");const l=o.substring(0,i).split("; ").length-1,r=a[l].replace(/\n/g,"").toLowerCase(),d=s.filter((e=>e.toLowerCase().includes(r))).map((e=>e.replace(/; /g,"")));if(d.length>0)nn(e.inputEl,d,(n=>{const o=t(n,a,l,s);e.inputEl.value=o}));else{tn();const n=t(null,a,l,s);e.inputEl.value=n}};e.inputEl.removeEventListener("input",l),e.inputEl.addEventListener("input",l),e.inputEl.removeEventListener("mouseup",l),e.inputEl.addEventListener("mouseup",l)}}}(e))}});export{Z as $,U as N,b as a,w as b,fe as c,I as d,Ze as e,B as f,ge as g,x as h,Ee as j,X as l,J as s,Re as t,$ as u}; diff --git a/ComfyUI-Easy-Use/web_version/v2/assets/lodash-CZi7izHi.js b/ComfyUI-Easy-Use/web_version/v2/assets/lodash-CZi7izHi.js new file mode 100644 index 0000000000000000000000000000000000000000..915496d175046833a3d852d4bf7e3b50fdcd098c --- /dev/null +++ b/ComfyUI-Easy-Use/web_version/v2/assets/lodash-CZi7izHi.js @@ -0,0 +1 @@ +var t="undefined"!=typeof globalThis?globalThis:"undefined"!=typeof window?window:"undefined"!=typeof global?global:"undefined"!=typeof self?self:{};function r(t){return t&&t.__esModule&&Object.prototype.hasOwnProperty.call(t,"default")?t.default:t}var e=function(){this.__data__=[],this.size=0};var n=function(t,r){return t===r||t!=t&&r!=r},o=n;var a=function(t,r){for(var e=t.length;e--;)if(o(t[e][0],r))return e;return-1},c=a,u=Array.prototype.splice;var i=a;var s=a;var f=a;var p=e,v=function(t){var r=this.__data__,e=c(r,t);return!(e<0)&&(e==r.length-1?r.pop():u.call(r,e,1),--this.size,!0)},l=function(t){var r=this.__data__,e=i(r,t);return e<0?void 0:r[e][1]},b=function(t){return s(this.__data__,t)>-1},y=function(t,r){var e=this.__data__,n=f(e,t);return n<0?(++this.size,e.push([t,r])):e[n][1]=r,this};function j(t){var r=-1,e=null==t?0:t.length;for(this.clear();++r-1&&t%1==0&&t-1&&t%1==0&&t<=9007199254740991},Mr=k,Tr=Ir,Er=sr,Br={};Br["[object Float32Array]"]=Br["[object Float64Array]"]=Br["[object Int8Array]"]=Br["[object Int16Array]"]=Br["[object Int32Array]"]=Br["[object Uint8Array]"]=Br["[object Uint8ClampedArray]"]=Br["[object Uint16Array]"]=Br["[object Uint32Array]"]=!0,Br["[object Arguments]"]=Br["[object Array]"]=Br["[object ArrayBuffer]"]=Br["[object Boolean]"]=Br["[object DataView]"]=Br["[object Date]"]=Br["[object Error]"]=Br["[object Function]"]=Br["[object Map]"]=Br["[object Number]"]=Br["[object Object]"]=Br["[object RegExp]"]=Br["[object Set]"]=Br["[object String]"]=Br["[object WeakMap]"]=!1;var Dr=function(t){return Er(t)&&Tr(t.length)&&!!Br[Mr(t)]};var $r=function(t){return function(r){return t(r)}},kr={exports:{}};!function(t,r){var e=A,n=r&&!r.nodeType&&r,o=n&&t&&!t.nodeType&&t,a=o&&o.exports===n&&e.process,c=function(){try{var t=o&&o.require&&o.require("util").types;return t||a&&a.binding&&a.binding("util")}catch(r){}}();t.exports=c}(kr,kr.exports);var Rr=kr.exports,Vr=Dr,Cr=$r,Nr=Rr&&Rr.isTypedArray,Wr=Nr?Cr(Nr):Vr,Lr=ir,qr=mr,Gr=Sr,Hr=Pr,Jr=Ur,Kr=Wr,Qr=Object.prototype.hasOwnProperty;var Xr=function(t,r){var e=Gr(t),n=!e&&qr(t),o=!e&&!n&&Hr(t),a=!e&&!n&&!o&&Kr(t),c=e||n||o||a,u=c?Lr(t.length,String):[],i=u.length;for(var s in t)!r&&!Qr.call(t,s)||c&&("length"==s||o&&("offset"==s||"parent"==s)||a&&("buffer"==s||"byteLength"==s||"byteOffset"==s)||Jr(s,i))||u.push(s);return u},Yr=Object.prototype;var Zr=function(t){var r=t&&t.constructor;return t===("function"==typeof r&&r.prototype||Yr)};var te=function(t,r){return function(e){return t(r(e))}},re=te(Object.keys,Object),ee=Zr,ne=re,oe=Object.prototype.hasOwnProperty;var ae=W,ce=Ir;var ue=function(t){return null!=t&&ce(t.length)&&!ae(t)},ie=Xr,se=function(t){if(!ee(t))return ne(t);var r=[];for(var e in Object(t))oe.call(t,e)&&"constructor"!=e&&r.push(e);return r},fe=ue;var pe=function(t){return fe(t)?ie(t):se(t)},ve=ur,le=pe;var be=function(t,r){return t&&ve(r,le(r),t)};var ye=R,je=Zr,he=function(t){var r=[];if(null!=t)for(var e in Object(t))r.push(e);return r},_e=Object.prototype.hasOwnProperty;var de=Xr,ge=function(t){if(!ye(t))return he(t);var r=je(t),e=[];for(var n in t)("constructor"!=n||!r&&_e.call(t,n))&&e.push(n);return e},Oe=ue;var we=function(t){return Oe(t)?de(t,!0):ge(t)},Ae=ur,xe=we;var me=function(t,r){return t&&Ae(r,xe(r),t)},Se={exports:{}};!function(t,r){var e=S,n=r&&!r.nodeType&&r,o=n&&t&&!t.nodeType&&t,a=o&&o.exports===n?e.Buffer:void 0,c=a?a.allocUnsafe:void 0;t.exports=function(t,r){if(r)return t.slice();var e=t.length,n=c?c(e):new t.constructor(e);return t.copy(n),n}}(Se,Se.exports);var ze=Se.exports;var Pe=function(t,r){var e=-1,n=t.length;for(r||(r=Array(n));++er in t?e(t,r,{enumerable:!0,configurable:!0,writable:!0,value:s}):t[r]=s;function a(e){return null==e||""===e||Array.isArray(e)&&0===e.length||!(e instanceof Date)&&"object"==typeof e&&0===Object.keys(e).length}function o(e){return!!(e&&e.constructor&&e.call&&e.apply)}function i(e){return!a(e)}function l(e,t=!0){return e instanceof Object&&e.constructor===Object&&(t||0!==Object.keys(e).length)}function c(e,...t){return o(e)?e(...t):e}function u(e,t=!0){return"string"==typeof e&&(t||""!==e)}function d(e){return u(e)?e.replace(/(-|_)/g,"").toLowerCase():e}function m(e,t="",r={}){const s=d(t).split("."),n=s.shift();return n?l(e)?m(c(e[Object.keys(e).find((e=>d(e)===n))||""],r),s.join("."),r):void 0:c(e,r)}function h(e,t=!0){return Array.isArray(e)&&(t||0!==e.length)}function p(e){return i(e)&&!isNaN(e)}function f(e,t){if(t){const r=t.test(e);return t.lastIndex=0,r}return!1}function g(...e){const a=(e={},o={})=>{const i=((e,a)=>{for(var o in a||(a={}))r.call(a,o)&&n(e,o,a[o]);if(t)for(var o of t(a))s.call(a,o)&&n(e,o,a[o]);return e})({},e);return Object.keys(o).forEach((t=>{l(o[t])&&t in e&&l(e[t])?i[t]=a(e[t],o[t]):i[t]=o[t]})),i};return e.reduce(((e,t,r)=>0===r?t:a(e,t)),{})}function y(e){return e?e.replace(/\/\*(?:(?!\*\/)[\s\S])*\*\/|[\r\n\t]+/g,"").replace(/ {2,}/g," ").replace(/ ([{:}]) /g,"$1").replace(/([;,]) /g,"$1").replace(/ !/g,"!").replace(/: /g,":"):e}function v(e){return u(e,!1)?e[0].toUpperCase()+e.slice(1):e}function b(e){return u(e)?e.replace(/(_)/g,"-").replace(/[A-Z]/g,((e,t)=>0===t?e:"-"+e.toLowerCase())).toLowerCase():e}function S(e){return u(e)?e.replace(/[A-Z]/g,((e,t)=>0===t?e:"."+e.toLowerCase())).toLowerCase():e}function $(){const e=new Map;return{on(t,r){let s=e.get(t);return s?s.push(r):s=[r],e.set(t,s),this},off(t,r){let s=e.get(t);return s&&s.splice(s.indexOf(r)>>>0,1),this},emit(t,r){let s=e.get(t);s&&s.slice().map((e=>{e(r)}))},clear(){e.clear()}}}var w=Object.defineProperty,k=Object.defineProperties,L=Object.getOwnPropertyDescriptors,N=Object.getOwnPropertySymbols,O=Object.prototype.hasOwnProperty,j=Object.prototype.propertyIsEnumerable,C=(e,t,r)=>t in e?w(e,t,{enumerable:!0,configurable:!0,writable:!0,value:r}):e[t]=r,x=(e,t)=>{for(var r in t||(t={}))O.call(t,r)&&C(e,r,t[r]);if(N)for(var r of N(t))j.call(t,r)&&C(e,r,t[r]);return e},_=(e,t)=>k(e,L(t)),P=(e,t)=>{var r={};for(var s in e)O.call(e,s)&&t.indexOf(s)<0&&(r[s]=e[s]);if(null!=e&&N)for(var s of N(e))t.indexOf(s)<0&&j.call(e,s)&&(r[s]=e[s]);return r},T=$();function E(e,t){h(e)?e.push(...t||[]):l(e)&&Object.assign(e,t)}function A(e,t=""){if(!["opacity","z-index","line-height","font-weight","flex","flex-grow","flex-shrink","order"].some((e=>t.endsWith(e)))){return`${e}`.trim().split(" ").map((e=>p(e)?`${e}px`:e)).join(" ")}return e}function V(e="",t=""){return function(e){return e.replaceAll(/ /g,"").replace(/[^\w]/g,"-")}(`${u(e,!1)&&u(t,!1)?`${e}-`:e}${t}`)}function R(e="",t=""){return`--${V(e,t)}`}function F(e,t="",r="",s=[],n){if(u(e)){const a=/{([^}]*)}/g,o=e.trim();if(f(o,a)){const e=o.replaceAll(a,(e=>{const t=e.replace(/{|}/g,"").split(".").filter((e=>!s.some((t=>f(e,t)))));return`var(${R(r,b(t.join("-")))}${i(n)?`, ${n}`:""})`})),t=/(\d+\s+[\+\-\*\/]\s+\d+)/g,l=/var\([^)]+\)/g;return f(e.replace(l,"0"),t)?`calc(${e})`:e}return A(o,t)}if(p(e))return A(e,t)}function D(e,t){return e?`${e}{${t}}`:""}var W=(...e)=>z(I.getTheme(),...e),z=(e={},t,r,s="variable")=>{if(t){const{variable:n,options:a}=I.defaults||{},{prefix:o,transform:i}=(null==e?void 0:e.options)||a||{},l=f(t,/{([^}]*)}/g)?t:`{${t}}`;return"value"===s||"strict"===i?I.getTokenValue(t):F(l,void 0,o,[n.excludedKeyRegex],r)}return""};function H(e,t={}){const r=I.defaults.variable,{prefix:s=r.prefix,selector:n=r.selector,excludedKeyRegex:a=r.excludedKeyRegex}=t,o=(e,t="")=>Object.entries(e).reduce(((e,[r,n])=>{const i=f(r,a)?V(t):V(t,b(r)),c=function(e){return l(e)&&e.hasOwnProperty("value")&&e.hasOwnProperty("type")?e.value:e}(n);if(l(c)){const{variables:t,tokens:r}=o(c,i);E(e.tokens,r),E(e.variables,t)}else e.tokens.push((s?i.replace(`${s}-`,""):i).replaceAll("-",".")),function(e,t,r){u(t,!1)&&e.push(`${t}:${r};`)}(e.variables,R(i),F(c,i,s,[a]));return e}),{variables:[],tokens:[]}),{variables:i,tokens:c}=o(e,s);return{value:i,tokens:c,declarations:i.join(""),css:D(n,i.join(""))}}var M={regex:{rules:{class:{pattern:/^\.([a-zA-Z][\w-]*)$/,resolve(e){return{type:"class",selector:e,matched:this.pattern.test(e.trim())}}},attr:{pattern:/^\[(.*)\]$/,resolve(e){return{type:"attr",selector:`:root${e}`,matched:this.pattern.test(e.trim())}}},media:{pattern:/^@media (.*)$/,resolve(e){return{type:"media",selector:`${e}{:root{[CSS]}}`,matched:this.pattern.test(e.trim())}}},system:{pattern:/^system$/,resolve(e){return{type:"system",selector:"@media (prefers-color-scheme: dark){:root{[CSS]}}",matched:this.pattern.test(e.trim())}}},custom:{resolve:e=>({type:"custom",selector:e,matched:!0})}},resolve(e){const t=Object.keys(this.rules).filter((e=>"custom"!==e)).map((e=>this.rules[e]));return[e].flat().map((e=>{var r;return null!=(r=t.map((t=>t.resolve(e))).find((e=>e.matched)))?r:this.rules.custom.resolve(e)}))}},_toVariables:(e,t)=>H(e,{prefix:null==t?void 0:t.prefix}),getCommon({name:e="",theme:t={},params:r,set:s,defaults:n}){var a,o,l,c;const{preset:u,options:d}=t;let m,h,p,f;if(i(u)){const{primitive:t,semantic:r}=u,g=r||{},{colorScheme:y}=g,v=P(g,["colorScheme"]),b=y||{},{dark:S}=b,$=P(b,["dark"]),w=i(t)?this._toVariables({primitive:t},d):{},k=i(v)?this._toVariables({semantic:v},d):{},L=i($)?this._toVariables({light:$},d):{},N=i(S)?this._toVariables({dark:S},d):{},[O,j]=[null!=(a=w.declarations)?a:"",w.tokens],[C,x]=[null!=(o=k.declarations)?o:"",k.tokens||[]],[_,T]=[null!=(l=L.declarations)?l:"",L.tokens||[]],[E,A]=[null!=(c=N.declarations)?c:"",N.tokens||[]];m=this.transformCSS(e,O,"light","variable",d,s,n),h=j;p=`${this.transformCSS(e,`${C}${_}color-scheme:light`,"light","variable",d,s,n)}${this.transformCSS(e,`${E}color-scheme:dark`,"dark","variable",d,s,n)}`,f=[...new Set([...x,...T,...A])]}return{primitive:{css:m,tokens:h},semantic:{css:p,tokens:f}}},getPreset({name:e="",preset:t={},options:r,params:s,set:n,defaults:a,selector:o}){var l,c,u;const d=e.replace("-directive",""),m=t,{colorScheme:h}=m,p=P(m,["colorScheme"]),f=h||{},{dark:g}=f,y=P(f,["dark"]),v=i(p)?this._toVariables({[d]:p},r):{},b=i(y)?this._toVariables({[d]:y},r):{},S=i(g)?this._toVariables({[d]:g},r):{},[$,w]=[null!=(l=v.declarations)?l:"",v.tokens||[]],[k,L]=[null!=(c=b.declarations)?c:"",b.tokens||[]],[N,O]=[null!=(u=S.declarations)?u:"",S.tokens||[]],j=[...new Set([...w,...L,...O])];return{css:`${this.transformCSS(d,`${$}${k}`,"light","variable",r,n,a,o)}${this.transformCSS(d,N,"dark","variable",r,n,a,o)}`,tokens:j}},getPresetC({name:e="",theme:t={},params:r,set:s,defaults:n}){var a;const{preset:o,options:i}=t,l=null==(a=null==o?void 0:o.components)?void 0:a[e];return this.getPreset({name:e,preset:l,options:i,params:r,set:s,defaults:n})},getPresetD({name:e="",theme:t={},params:r,set:s,defaults:n}){var a;const o=e.replace("-directive",""),{preset:i,options:l}=t,c=null==(a=null==i?void 0:i.directives)?void 0:a[o];return this.getPreset({name:o,preset:c,options:l,params:r,set:s,defaults:n})},getColorSchemeOption(e,t){var r;return this.regex.resolve(null!=(r=e.darkModeSelector)?r:t.options.darkModeSelector)},getLayerOrder(e,t={},r,s){const{cssLayer:n}=t;if(n){return`@layer ${c(n.order||"primeui",r)}`}return""},getCommonStyleSheet({name:e="",theme:t={},params:r,props:s={},set:n,defaults:a}){const o=this.getCommon({name:e,theme:t,params:r,set:n,defaults:a}),i=Object.entries(s).reduce(((e,[t,r])=>e.push(`${t}="${r}"`)&&e),[]).join(" ");return Object.entries(o||{}).reduce(((e,[t,r])=>{if(null==r?void 0:r.css){const s=y(null==r?void 0:r.css),n=`${t}-variables`;e.push(``)}return e}),[]).join("")},getStyleSheet({name:e="",theme:t={},params:r,props:s={},set:n,defaults:a}){var o;const i={name:e,theme:t,params:r,set:n,defaults:a},l=null==(o=e.includes("-directive")?this.getPresetD(i):this.getPresetC(i))?void 0:o.css,c=Object.entries(s).reduce(((e,[t,r])=>e.push(`${t}="${r}"`)&&e),[]).join(" ");return l?``:""},createTokens(e={},t,r="",s="",n={}){return Object.entries(e).forEach((([e,o])=>{const i=f(e,t.variable.excludedKeyRegex)?r:r?`${r}.${S(e)}`:S(e),c=s?`${s}.${e}`:e;l(o)?this.createTokens(o,t,i,c,n):(n[i]||(n[i]={paths:[],computed(e,t={}){if(e){const r=this.paths.find((t=>t.scheme===e))||this.paths.find((e=>"none"===e.scheme));return null==r?void 0:r.computed(e,t.binding)}return this.paths.map((e=>e.computed(e.scheme,t[e.scheme])))}}),n[i].paths.push({path:c,value:o,scheme:c.includes("colorScheme.light")?"light":c.includes("colorScheme.dark")?"dark":"none",computed(e,t={}){const r=/{([^}]*)}/g;let s=o;if(t.name=this.path,t.binding||(t.binding={}),f(o,r)){const a=o.trim().replaceAll(r,(r=>{var s,a;const o=r.replace(/{|}/g,"");return null==(a=null==(s=n[o])?void 0:s.computed(e,t))?void 0:a.value})),i=/(\d+\w*\s+[\+\-\*\/]\s+\d+\w*)/g,l=/var\([^)]+\)/g;s=f(a.replace(l,"0"),i)?`calc(${a})`:a}return a(t.binding)&&delete t.binding,{colorScheme:e,path:this.path,paths:t,value:s.includes("undefined")?void 0:s}}}))})),n},getTokenValue(e,t,r){var s;const n=t.split(".").filter((e=>!f(e.toLowerCase(),r.variable.excludedKeyRegex))).join(".");const a=t.includes("colorScheme.light")?"light":t.includes("colorScheme.dark")?"dark":void 0,o=[null==(s=e[n])?void 0:s.computed(a)].flat().filter((e=>e));return 1===o.length?o[0].value:o.reduce(((e={},t)=>{const r=t,{colorScheme:s}=r,n=P(r,["colorScheme"]);return e[s]=n,e}),void 0)},transformCSS(e,t,r,s,n={},a,o,u){if(i(t)){const{cssLayer:d}=n;if("style"!==s){const e=this.getColorSchemeOption(n,o),s=u?D(u,t):t;t="dark"===r?e.reduce(((e,{selector:t})=>(i(t)&&(e+=t.includes("[CSS]")?t.replace("[CSS]",s):D(t,s)),e)),""):D(null!=u?u:":root",t)}if(d){const r={name:"primeui",order:"primeui"};l(d)&&(r.name=c(d.name,{name:e,type:s})),i(r.name)&&(t=D(`@layer ${r.name}`,t),null==a||a.layerNames(r.name))}return t}return""}},I={defaults:{variable:{prefix:"p",selector:":root",excludedKeyRegex:/^(primitive|semantic|components|directives|variables|colorscheme|light|dark|common|root|states)$/gi},options:{prefix:"p",darkModeSelector:"system",cssLayer:!1}},_theme:void 0,_layerNames:new Set,_loadedStyleNames:new Set,_loadingStyles:new Set,_tokens:{},update(e={}){const{theme:t}=e;t&&(this._theme=_(x({},t),{options:x(x({},this.defaults.options),t.options)}),this._tokens=M.createTokens(this.preset,this.defaults),this.clearLoadedStyleNames())},get theme(){return this._theme},get preset(){var e;return(null==(e=this.theme)?void 0:e.preset)||{}},get options(){var e;return(null==(e=this.theme)?void 0:e.options)||{}},get tokens(){return this._tokens},getTheme(){return this.theme},setTheme(e){this.update({theme:e}),T.emit("theme:change",e)},getPreset(){return this.preset},setPreset(e){this._theme=_(x({},this.theme),{preset:e}),this._tokens=M.createTokens(e,this.defaults),this.clearLoadedStyleNames(),T.emit("preset:change",e),T.emit("theme:change",this.theme)},getOptions(){return this.options},setOptions(e){this._theme=_(x({},this.theme),{options:e}),this.clearLoadedStyleNames(),T.emit("options:change",e),T.emit("theme:change",this.theme)},getLayerNames(){return[...this._layerNames]},setLayerNames(e){this._layerNames.add(e)},getLoadedStyleNames(){return this._loadedStyleNames},isStyleNameLoaded(e){return this._loadedStyleNames.has(e)},setLoadedStyleName(e){this._loadedStyleNames.add(e)},deleteLoadedStyleName(e){this._loadedStyleNames.delete(e)},clearLoadedStyleNames(){this._loadedStyleNames.clear()},getTokenValue(e){return M.getTokenValue(this.tokens,e,this.defaults)},getCommon(e="",t){return M.getCommon({name:e,theme:this.theme,params:t,defaults:this.defaults,set:{layerNames:this.setLayerNames.bind(this)}})},getComponent(e="",t){const r={name:e,theme:this.theme,params:t,defaults:this.defaults,set:{layerNames:this.setLayerNames.bind(this)}};return M.getPresetC(r)},getDirective(e="",t){const r={name:e,theme:this.theme,params:t,defaults:this.defaults,set:{layerNames:this.setLayerNames.bind(this)}};return M.getPresetD(r)},getCustomPreset(e="",t,r,s){const n={name:e,preset:t,options:this.options,selector:r,params:s,defaults:this.defaults,set:{layerNames:this.setLayerNames.bind(this)}};return M.getPreset(n)},getLayerOrderCSS(e=""){return M.getLayerOrder(e,this.options,{names:this.getLayerNames()},this.defaults)},transformCSS(e="",t,r="style",s){return M.transformCSS(e,t,s,r,this.options,{layerNames:this.setLayerNames.bind(this)},this.defaults)},getCommonStyleSheet(e="",t,r={}){return M.getCommonStyleSheet({name:e,theme:this.theme,params:t,props:r,defaults:this.defaults,set:{layerNames:this.setLayerNames.bind(this)}})},getStyleSheet(e,t,r={}){return M.getStyleSheet({name:e,theme:this.theme,params:t,props:r,defaults:this.defaults,set:{layerNames:this.setLayerNames.bind(this)}})},onStyleMounted(e){this._loadingStyles.add(e)},onStyleUpdated(e){this._loadingStyles.add(e)},onStyleLoaded(e,{name:t}){this._loadingStyles.size&&(this._loadingStyles.delete(t),T.emit(`theme:${t}:load`,e),!this._loadingStyles.size&&T.emit("theme:load"))}};function B(e,t){return!!e&&(e.classList?e.classList.contains(t):new RegExp("(^| )"+t+"( |$)","gi").test(e.className))}function K(e,t){if(e&&t){const r=t=>{B(e,t)||(e.classList?e.classList.add(t):e.className+=" "+t)};[t].flat().filter(Boolean).forEach((e=>e.split(" ").forEach(r)))}}function Z(e,t){if(e&&t){const r=t=>{e.classList?e.classList.remove(t):e.className=e.className.replace(new RegExp("(^|\\b)"+t.split(" ").join("|")+"(\\b|$)","gi")," ")};[t].flat().filter(Boolean).forEach((e=>e.split(" ").forEach(r)))}}function q(){let e=window,t=document,r=t.documentElement,s=t.getElementsByTagName("body")[0];return{width:e.innerWidth||r.clientWidth||s.clientWidth,height:e.innerHeight||r.clientHeight||s.clientHeight}}function X(){let e=document.documentElement;return(window.pageXOffset||e.scrollLeft)-(e.clientLeft||0)}function Y(){let e=document.documentElement;return(window.pageYOffset||e.scrollTop)-(e.clientTop||0)}function U(e,t){if(e instanceof HTMLElement){return e.offsetWidth}return 0}function G(e){return"object"==typeof HTMLElement?e instanceof HTMLElement:e&&"object"==typeof e&&null!==e&&1===e.nodeType&&"string"==typeof e.nodeName}function J(e,t={}){if(G(e)){const r=(t,s)=>{var n,a;const o=(null==(n=null==e?void 0:e.$attrs)?void 0:n[t])?[null==(a=null==e?void 0:e.$attrs)?void 0:a[t]]:[];return[s].flat().reduce(((e,s)=>{if(null!=s){const n=typeof s;if("string"===n||"number"===n)e.push(s);else if("object"===n){const n=Array.isArray(s)?r(t,s):Object.entries(s).map((([e,r])=>"style"!==t||!r&&0!==r?r?e:void 0:`${e.replace(/([a-z])([A-Z])/g,"$1-$2").toLowerCase()}:${r}`));e=n.length?e.concat(n.filter((e=>!!e))):e}}return e}),o)};Object.entries(t).forEach((([t,s])=>{if(null!=s){const n=t.match(/^on(.+)/);n?e.addEventListener(n[1].toLowerCase(),s):"p-bind"===t?J(e,s):(s="class"===t?[...new Set(r("class",s))].join(" ").trim():"style"===t?r("style",s).join(";").trim():s,(e.$attrs=e.$attrs||{})&&(e.$attrs[t]=s),e.setAttribute(t,s))}}))}}function Q(e,t={},...r){if(e){const s=document.createElement(e);return J(s,t),s.append(...r),s}}function ee(e,t){if(e){e.style.opacity="0";let r=+new Date,s="0",n=function(){s=""+(+e.style.opacity+((new Date).getTime()-r)/t),e.style.opacity=s,r=+new Date,+s<1&&(window.requestAnimationFrame&&requestAnimationFrame(n)||setTimeout(n,16))};n()}}function te(e,t){return G(e)?e.matches(t)?e:e.querySelector(t):null}function re(e,t){if(G(e)){const r=e.getAttribute(t);return isNaN(r)?"true"===r||"false"===r?"true"===r:r:+r}}function se(e){if(e){let t=e.offsetHeight,r=getComputedStyle(e);return t-=parseFloat(r.paddingTop)+parseFloat(r.paddingBottom)+parseFloat(r.borderTopWidth)+parseFloat(r.borderBottomWidth),t}return 0}function ne(e){if(e){let t=e.parentNode;return t&&t instanceof ShadowRoot&&t.host&&(t=t.host),t}return null}function ae(e){if(e){let t=e.getBoundingClientRect();return{top:t.top+(window.pageYOffset||document.documentElement.scrollTop||document.body.scrollTop||0),left:t.left+(window.pageXOffset||document.documentElement.scrollLeft||document.body.scrollLeft||0)}}return{top:"auto",left:"auto"}}function oe(e,t){if(e){return e.offsetHeight}return 0}function ie(e,t=[]){const r=ne(e);return null===r?t:ie(r,t.concat([r]))}function le(e){let t=[];if(e){let r=ie(e);const s=/(auto|scroll)/,n=e=>{try{let t=window.getComputedStyle(e,null);return s.test(t.getPropertyValue("overflow"))||s.test(t.getPropertyValue("overflowX"))||s.test(t.getPropertyValue("overflowY"))}catch(t){return!1}};for(let e of r){let r=1===e.nodeType&&e.dataset.scrollselectors;if(r){let s=r.split(",");for(let r of s){let s=te(e,r);s&&n(s)&&t.push(s)}}9!==e.nodeType&&n(e)&&t.push(e)}}return t}function ce(e){return!(null==e||!e.nodeName||!ne(e))}function ue(e){if(e){let t=e.offsetWidth,r=getComputedStyle(e);return t-=parseFloat(r.paddingLeft)+parseFloat(r.paddingRight)+parseFloat(r.borderLeftWidth)+parseFloat(r.borderRightWidth),t}return 0}function de(){return!("undefined"==typeof window||!window.document||!window.document.createElement)}function me(){return"ontouchstart"in window||navigator.maxTouchPoints>0||navigator.msMaxTouchPoints>0}function he(e,t="",r){G(e)&&null!=r&&e.setAttribute(t,r)}var pe={};function fe(e="pui_id_"){return pe.hasOwnProperty(e)||(pe[e]=0),pe[e]++,`${e}${pe[e]}`}var ge=function(){let e=[];const t=(t,r,s=0)=>[...e].reverse().find((e=>!0))||{key:t,value:s},r=e=>e&&parseInt(e.style.zIndex,10)||0;return{get:r,set:(r,s,n)=>{s&&(s.style.zIndex=String(((r,s,n=999)=>{const a=t(r,s,n),o=a.value+(a.key===r?0:n)+1;return e.push({key:r,value:o}),o})(r,!0,n)))},clear:t=>{var s;t&&(s=r(t),e=e.filter((e=>e.value!==s)),t.style.zIndex="")},getCurrent:e=>(e=>t(e).value)(e)}}();export{U as A,oe as B,ae as C,K as D,$ as E,re as F,ee as G,me as H,X as I,Y as J,q as K,B as L,ge as Z,he as a,de as b,I as c,W as d,o as e,te as f,le as g,i as h,ce as i,T as j,m as k,u as l,y as m,h as n,a as o,l as p,v as q,c as r,J as s,d as t,fe as u,g as v,Q as w,Z as x,se as y,ue as z}; diff --git a/ComfyUI-Easy-Use/web_version/v2/assets/primevue-BSs2m5Wu.js b/ComfyUI-Easy-Use/web_version/v2/assets/primevue-BSs2m5Wu.js new file mode 100644 index 0000000000000000000000000000000000000000..aaf7c2e3f52a913a2a0b9bd13e1a159600955d54 --- /dev/null +++ b/ComfyUI-Easy-Use/web_version/v2/assets/primevue-BSs2m5Wu.js @@ -0,0 +1 @@ +import{s as e,a as t,i as n,b as o,r as i,m as r,c as l,d as a,u as s,g as u,f as c,t as d,e as v,h as p,j as f,k as m,l as h,n as y,o as g,E as b,p as S,q as $,v as P}from"./primeuix-Be3xdh47.js";import{r as O,a as _,g as w,o as T,n as C,w as j,m as N,b as k,c as L,d as x,e as M}from"./vue-DjzFgvDF.js";var D={_loadedStyleNames:new Set,getLoadedStyleNames:function(){return this._loadedStyleNames},isStyleNameLoaded:function(e){return this._loadedStyleNames.has(e)},setLoadedStyleName:function(e){this._loadedStyleNames.add(e)},deleteLoadedStyleName:function(e){this._loadedStyleNames.delete(e)},clearLoadedStyleNames:function(){this._loadedStyleNames.clear()}};function V(e){return(V="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function E(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);t&&(o=o.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,o)}return n}function A(e){for(var t=1;t1&&void 0!==arguments[1]?arguments[1]:{},l=O(!1),a=O(i),s=O(null),u=o()?window.document:void 0,c=r.document,d=void 0===c?u:c,v=r.immediate,p=void 0===v||v,f=r.manual,m=void 0!==f&&f,h=r.name,y=void 0===h?"style_".concat(++U):h,g=r.id,b=void 0===g?void 0:g,S=r.media,$=void 0===S?void 0:S,P=r.nonce,N=void 0===P?void 0:P,k=r.first,L=void 0!==k&&k,x=r.onMounted,M=void 0===x?void 0:x,D=r.onUpdated,V=void 0===D?void 0:D,E=r.onLoad,I=void 0===E?void 0:E,B=r.props,R=void 0===B?{}:B,F=function(){},z=function(n){var o=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{};if(d){var r=A(A({},R),o),u=r.name||y,c=r.id||b,v=r.nonce||N;s.value=d.querySelector('style[data-primevue-style-id="'.concat(u,'"]'))||d.getElementById(c)||d.createElement("style"),s.value.isConnected||(a.value=n||i,e(s.value,{type:"text/css",id:c,media:$,nonce:v}),L?d.head.prepend(s.value):d.head.appendChild(s.value),t(s.value,"data-primevue-style-id",u),e(s.value,r),s.value.onload=function(e){return null==I?void 0:I(e,{name:u})},null==M||M(u)),l.value||(F=j(a,(function(e){s.value.textContent=e,null==V||V(u)}),{immediate:!0}),l.value=!0)}};return p&&!m&&function(e){var t=!(arguments.length>1&&void 0!==arguments[1])||arguments[1];w()?T(e):t?e():C(e)}(z),{id:b,name:y,el:s,css:a,unload:function(){d&&l.value&&(F(),n(s.value)&&d.head.removeChild(s.value),l.value=!1)},load:z,isLoaded:_(l)}}function R(e){return(R="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function F(e,t){return function(e){if(Array.isArray(e))return e}(e)||function(e,t){var n=null==e?null:"undefined"!=typeof Symbol&&e[Symbol.iterator]||e["@@iterator"];if(null!=n){var o,i,r,l,a=[],s=!0,u=!1;try{if(r=(n=n.call(e)).next,0===t);else for(;!(s=(o=r.call(n)).done)&&(a.push(o.value),a.length!==t);s=!0);}catch(c){u=!0,i=c}finally{try{if(!s&&null!=n.return&&(l=n.return(),Object(l)!==l))return}finally{if(u)throw i}}return a}}(e,t)||function(e,t){if(e){if("string"==typeof e)return z(e,t);var n={}.toString.call(e).slice(8,-1);return"Object"===n&&e.constructor&&(n=e.constructor.name),"Map"===n||"Set"===n?Array.from(e):"Arguments"===n||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)?z(e,t):void 0}}(e,t)||function(){throw new TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}()}function z(e,t){(null==t||t>e.length)&&(t=e.length);for(var n=0,o=Array(t);n1&&void 0!==arguments[1]?arguments[1]:{},n=(arguments.length>2&&void 0!==arguments[2]?arguments[2]:function(e){return e})(i(e,{dt:a}));return n?B(r(n),Y({name:this.name},t)):{}},loadCSS:function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{};return this.load(this.css,e)},loadTheme:function(){var e=this,t=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{};return this.load(this.theme,t,(function(n){return l.transformCSS(t.name||e.name,n)}))},getCommonTheme:function(e){return l.getCommon(this.name,e)},getComponentTheme:function(e){return l.getComponent(this.name,e)},getDirectiveTheme:function(e){return l.getDirective(this.name,e)},getPresetTheme:function(e,t,n){return l.getCustomPreset(this.name,e,t,n)},getLayerOrderThemeCSS:function(){return l.getLayerOrderCSS(this.name)},getStyleSheet:function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:"",t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{};if(this.css){var n=i(this.css,{dt:a}),o=r("".concat(n).concat(e)),l=Object.entries(t).reduce((function(e,t){var n=F(t,2),o=n[0],i=n[1];return e.push("".concat(o,'="').concat(i,'"'))&&e}),[]).join(" ");return'")}return""},getCommonThemeStyleSheet:function(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{};return l.getCommonStyleSheet(this.name,e,t)},getThemeStyleSheet:function(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{},n=[l.getStyleSheet(this.name,e,t)];if(this.theme){var o="base"===this.name?"global-style":"".concat(this.name,"-style"),s=i(this.theme,{dt:a}),u=r(l.transformCSS(o,s)),c=Object.entries(t).reduce((function(e,t){var n=F(t,2),o=n[0],i=n[1];return e.push("".concat(o,'="').concat(i,'"'))&&e}),[]).join(" ");n.push('"))}return n.join("")},extend:function(e){return Y(Y({},this),{},{css:void 0,theme:void 0},e)}};function G(e){return(G="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function J(e,t,n){return t&&function(e,t){for(var n=0;n1&&void 0!==arguments[1]?arguments[1]:function(){};!function(e,t){if(!(e instanceof t))throw new TypeError("Cannot call a class as a function")}(this,e),this.element=t,this.listener=n}),[{key:"bindScrollListener",value:function(){this.scrollableParents=u(this.element);for(var e=0;e0&&void 0!==arguments[0]?arguments[0]:"pv_id_")}var X=q.extend({name:"common"});function ee(e){return(ee="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function te(e){return le(e)||function(e){if("undefined"!=typeof Symbol&&null!=e[Symbol.iterator]||null!=e["@@iterator"])return Array.from(e)}(e)||ie(e)||oe()}function ne(e,t){return le(e)||function(e,t){var n=null==e?null:"undefined"!=typeof Symbol&&e[Symbol.iterator]||e["@@iterator"];if(null!=n){var o,i,r,l,a=[],s=!0,u=!1;try{if(r=(n=n.call(e)).next,0===t){if(Object(n)!==n)return;s=!1}else for(;!(s=(o=r.call(n)).done)&&(a.push(o.value),a.length!==t);s=!0);}catch(c){u=!0,i=c}finally{try{if(!s&&null!=n.return&&(l=n.return(),Object(l)!==l))return}finally{if(u)throw i}}return a}}(e,t)||ie(e,t)||oe()}function oe(){throw new TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}function ie(e,t){if(e){if("string"==typeof e)return re(e,t);var n={}.toString.call(e).slice(8,-1);return"Object"===n&&e.constructor&&(n=e.constructor.name),"Map"===n||"Set"===n?Array.from(e):"Arguments"===n||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)?re(e,t):void 0}}function re(e,t){(null==t||t>e.length)&&(t=e.length);for(var n=0,o=Array(t);n1?t-1:0),o=1;o0&&void 0!==arguments[0]?arguments[0]:function(){};D.clearLoadedStyleNames(),f.on("theme:change",e)},_getHostInstance:function(e){return e?this.$options.hostName?e.$.type.name===this.$options.hostName?e:this._getHostInstance(e.$parentInstance):e.$parentInstance:void 0},_getPropValue:function(e){var t;return this[e]||(null===(t=this._getHostInstance(this))||void 0===t?void 0:t[e])},_getOptionValue:function(e){return m(e,arguments.length>1&&void 0!==arguments[1]?arguments[1]:"",arguments.length>2&&void 0!==arguments[2]?arguments[2]:{})},_getPTValue:function(){var e,t=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{},n=arguments.length>1&&void 0!==arguments[1]?arguments[1]:"",o=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{},i=!(arguments.length>3&&void 0!==arguments[3])||arguments[3],r=/./g.test(n)&&!!o[n.split(".")[0]],l=this._getPropValue("ptOptions")||(null===(e=this.$primevueConfig)||void 0===e?void 0:e.ptOptions)||{},a=l.mergeSections,s=void 0===a||a,u=l.mergeProps,c=void 0!==u&&u,d=i?r?this._useGlobalPT(this._getPTClassValue,n,o):this._useDefaultPT(this._getPTClassValue,n,o):void 0,v=r?void 0:this._getPTSelf(t,this._getPTClassValue,n,se(se({},o),{},{global:d||{}})),p=this._getPTDatasets(n);return s||!s&&v?c?this._mergeProps(c,d,v,p):se(se(se({},d),v),p):se(se({},v),p)},_getPTSelf:function(){for(var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{},t=arguments.length,n=new Array(t>1?t-1:0),o=1;o0&&void 0!==arguments[0]?arguments[0]:"",o="data-pc-",i="root"===n&&p(null===(e=this.pt)||void 0===e?void 0:e["data-pc-section"]);return"transition"!==n&&se(se({},"root"===n&&se(ue({},"".concat(o,"name"),d(i?null===(t=this.pt)||void 0===t?void 0:t["data-pc-section"]:this.$.type.name)),i&&ue({},"".concat(o,"extend"),d(this.$.type.name)))),{},ue({},"".concat(o,"section"),d(n)))},_getPTClassValue:function(){var e=this._getOptionValue.apply(this,arguments);return h(e)||y(e)?{class:e}:e},_getPT:function(e){var t=this,n=arguments.length>1&&void 0!==arguments[1]?arguments[1]:"",o=arguments.length>2?arguments[2]:void 0,i=function(e){var i,r=arguments.length>1&&void 0!==arguments[1]&&arguments[1],l=o?o(e):e,a=d(n),s=d(t.$name);return null!==(i=r?a!==s?null==l?void 0:l[a]:void 0:null==l?void 0:l[a])&&void 0!==i?i:l};return null!=e&&e.hasOwnProperty("_usept")?{_usept:e._usept,originalValue:i(e.originalValue),value:i(e.value)}:i(e,!0)},_usePT:function(e,t,n,o){var i=function(e){return t(e,n,o)};if(null!=e&&e.hasOwnProperty("_usept")){var r,l=e._usept||(null===(r=this.$primevueConfig)||void 0===r?void 0:r.ptOptions)||{},a=l.mergeSections,s=void 0===a||a,u=l.mergeProps,c=void 0!==u&&u,d=i(e.originalValue),v=i(e.value);if(void 0===d&&void 0===v)return;return h(v)?v:h(d)?d:s||!s&&v?c?this._mergeProps(c,d,v):se(se({},d),v):v}return i(e)},_useGlobalPT:function(e,t,n){return this._usePT(this.globalPT,e,t,n)},_useDefaultPT:function(e,t,n){return this._usePT(this.defaultPT,e,t,n)},ptm:function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:"",t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{};return this._getPTValue(this.pt,e,se(se({},this.$params),t))},ptmi:function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:"",t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{};return N(this.$_attrsWithoutPT,this.ptm(e,t))},ptmo:function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{},t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:"",n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{};return this._getPTValue(e,t,se({instance:this},n),!1)},cx:function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:"",t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{};return this.isUnstyled?void 0:this._getOptionValue(this.$style.classes,e,se(se({},this.$params),t))},sx:function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:"",t=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{};if(!(arguments.length>1&&void 0!==arguments[1])||arguments[1]){var n=this._getOptionValue(this.$style.inlineStyles,e,se(se({},this.$params),t));return[this._getOptionValue(X.inlineStyles,e,se(se({},this.$params),t)),n]}}},computed:{globalPT:function(){var e,t=this;return this._getPT(null===(e=this.$primevueConfig)||void 0===e?void 0:e.pt,void 0,(function(e){return i(e,{instance:t})}))},defaultPT:function(){var e,t=this;return this._getPT(null===(e=this.$primevueConfig)||void 0===e?void 0:e.pt,void 0,(function(e){return t._getOptionValue(e,t.$name,se({},t.$params))||i(e,se({},t.$params))}))},isUnstyled:function(){var e;return void 0!==this.unstyled?this.unstyled:null===(e=this.$primevueConfig)||void 0===e?void 0:e.unstyled},$theme:function(){var e;return null===(e=this.$primevueConfig)||void 0===e?void 0:e.theme},$style:function(){return se(se({classes:void 0,inlineStyles:void 0,load:function(){},loadCSS:function(){},loadTheme:function(){}},(this._getHostInstance(this)||{}).$style),this.$options.style)},$styleOptions:function(){var e;return{nonce:null===(e=this.$primevueConfig)||void 0===e||null===(e=e.csp)||void 0===e?void 0:e.nonce}},$primevueConfig:function(){var e;return null===(e=this.$primevue)||void 0===e?void 0:e.config},$name:function(){return this.$options.hostName||this.$.type.name},$params:function(){var e=this._getHostInstance(this)||this.$parent;return{instance:this,props:this.$props,state:this.$data,attrs:this.$attrs,parent:{instance:e,props:null==e?void 0:e.$props,state:null==e?void 0:e.$data,attrs:null==e?void 0:e.$attrs}}},$_attrsPT:function(){return Object.entries(this.$attrs||{}).filter((function(e){var t=ne(e,1)[0];return null==t?void 0:t.startsWith("pt:")})).reduce((function(e,t){var n=ne(t,2),o=n[0],i=n[1],r=te(o.split(":")).slice(1);return null==r||r.reduce((function(e,t,n,o){return!e[t]&&(e[t]=n===o.length-1?i:{}),e[t]}),e),e}),{})},$_attrsWithoutPT:function(){return Object.entries(this.$attrs||{}).filter((function(e){var t=ne(e,1)[0];return!(null!=t&&t.startsWith("pt:"))})).reduce((function(e,t){var n=ne(t,2),o=n[0],i=n[1];return e[o]=i,e}),{})},$attrSelector:function(){return Q("pc")}}},de=q.extend({name:"baseicon",css:"\n.p-icon {\n display: inline-block;\n vertical-align: baseline;\n}\n\n.p-icon-spin {\n -webkit-animation: p-icon-spin 2s infinite linear;\n animation: p-icon-spin 2s infinite linear;\n}\n\n@-webkit-keyframes p-icon-spin {\n 0% {\n -webkit-transform: rotate(0deg);\n transform: rotate(0deg);\n }\n 100% {\n -webkit-transform: rotate(359deg);\n transform: rotate(359deg);\n }\n}\n\n@keyframes p-icon-spin {\n 0% {\n -webkit-transform: rotate(0deg);\n transform: rotate(0deg);\n }\n 100% {\n -webkit-transform: rotate(359deg);\n transform: rotate(359deg);\n }\n}\n"});function ve(e){return(ve="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function pe(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);t&&(o=o.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,o)}return n}function fe(e){for(var t=1;te.length)&&(t=e.length);for(var n=0,o=Array(t);n0&&void 0!==arguments[0]?arguments[0]:{},o=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{},i=arguments.length>2&&void 0!==arguments[2]?arguments[2]:"",r=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{},l=!(arguments.length>4&&void 0!==arguments[4])||arguments[4],a=function(){var e=we._getOptionValue.apply(we,arguments);return h(e)||y(e)?{class:e}:e},s=(null===(e=n.binding)||void 0===e||null===(e=e.value)||void 0===e?void 0:e.ptOptions)||(null===(t=n.$primevueConfig)||void 0===t?void 0:t.ptOptions)||{},u=s.mergeSections,c=void 0===u||u,d=s.mergeProps,v=void 0!==d&&d,p=l?we._useDefaultPT(n,n.defaultPT(),a,i,r):void 0,f=we._usePT(n,we._getPT(o,n.$name),a,i,Oe(Oe({},r),{},{global:p||{}})),m=we._getPTDatasets(n,i);return c||!c&&f?v?we._mergeProps(n,v,p,f,m):Oe(Oe(Oe({},p),f),m):Oe(Oe({},f),m)},_getPTDatasets:function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{},t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:"",n="data-pc-";return Oe(Oe({},"root"===t&&_e({},"".concat(n,"name"),d(e.$name))),{},_e({},"".concat(n,"section"),d(t)))},_getPT:function(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:"",n=arguments.length>2?arguments[2]:void 0,o=function(e){var o,i=n?n(e):e,r=d(t);return null!==(o=null==i?void 0:i[r])&&void 0!==o?o:i};return null!=e&&e.hasOwnProperty("_usept")?{_usept:e._usept,originalValue:o(e.originalValue),value:o(e.value)}:o(e)},_usePT:function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{},t=arguments.length>1?arguments[1]:void 0,n=arguments.length>2?arguments[2]:void 0,o=arguments.length>3?arguments[3]:void 0,i=arguments.length>4?arguments[4]:void 0,r=function(e){return n(e,o,i)};if(null!=t&&t.hasOwnProperty("_usept")){var l,a=t._usept||(null===(l=e.$primevueConfig)||void 0===l?void 0:l.ptOptions)||{},s=a.mergeSections,u=void 0===s||s,c=a.mergeProps,d=void 0!==c&&c,v=r(t.originalValue),p=r(t.value);if(void 0===v&&void 0===p)return;return h(p)?p:h(v)?v:u||!u&&p?d?we._mergeProps(e,d,v,p):Oe(Oe({},v),p):p}return r(t)},_useDefaultPT:function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{},t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{},n=arguments.length>2?arguments[2]:void 0,o=arguments.length>3?arguments[3]:void 0,i=arguments.length>4?arguments[4]:void 0;return we._usePT(e,t,n,o,i)},_loadStyles:function(e,t,n){var o,i=we._getConfig(t,n),r={nonce:null==i||null===(o=i.csp)||void 0===o?void 0:o.nonce};we._loadCoreStyles(e.$instance,r),we._loadThemeStyles(e.$instance,r),we._loadScopedThemeStyles(e.$instance,r),we._themeChangeListener((function(){return we._loadThemeStyles(e.$instance,r)}))},_loadCoreStyles:function(){var e,t,n,o=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{},i=arguments.length>1?arguments[1]:void 0;!D.isStyleNameLoaded(null===(e=o.$style)||void 0===e?void 0:e.name)&&null!==(t=o.$style)&&void 0!==t&&t.name&&(q.loadCSS(i),o.isUnstyled()&&(null===(n=o.$style)||void 0===n||n.loadCSS(i)),D.setLoadedStyleName(o.$style.name))},_loadThemeStyles:function(){var e,t,n=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{},o=arguments.length>1?arguments[1]:void 0;if(null==n||!n.isUnstyled()){if(!l.isStyleNameLoaded("common")){var i,r,a=(null===(i=n.$style)||void 0===i||null===(r=i.getCommonTheme)||void 0===r?void 0:r.call(i))||{},s=a.primitive,u=a.semantic;q.load(null==s?void 0:s.css,Oe({name:"primitive-variables"},o)),q.load(null==u?void 0:u.css,Oe({name:"semantic-variables"},o)),q.loadTheme(Oe({name:"global-style"},o)),l.setLoadedStyleName("common")}if(!l.isStyleNameLoaded(null===(e=n.$style)||void 0===e?void 0:e.name)&&null!==(t=n.$style)&&void 0!==t&&t.name){var c,d,v,p,f=((null===(c=n.$style)||void 0===c||null===(d=c.getDirectiveTheme)||void 0===d?void 0:d.call(c))||{}).css;null===(v=n.$style)||void 0===v||v.load(f,Oe({name:"".concat(n.$style.name,"-variables")},o)),null===(p=n.$style)||void 0===p||p.loadTheme(Oe({name:"".concat(n.$style.name,"-style")},o)),l.setLoadedStyleName(n.$style.name)}if(!l.isStyleNameLoaded("layer-order")){var m,h,y=null===(m=n.$style)||void 0===m||null===(h=m.getLayerOrderThemeCSS)||void 0===h?void 0:h.call(m);q.load(y,Oe({name:"layer-order",first:!0},o)),l.setLoadedStyleName("layer-order")}}},_loadScopedThemeStyles:function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{},t=arguments.length>1?arguments[1]:void 0,n=e.preset();if(n&&e.$attrSelector){var o,i,r,l=((null===(o=e.$style)||void 0===o||null===(i=o.getPresetTheme)||void 0===i?void 0:i.call(o,n,"[".concat(e.$attrSelector,"]")))||{}).css,a=null===(r=e.$style)||void 0===r?void 0:r.load(l,Oe({name:"".concat(e.$attrSelector,"-").concat(e.$style.name)},t));e.scopedStyleEl=a.el}},_themeChangeListener:function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:function(){};D.clearLoadedStyleNames(),f.on("theme:change",e)},_hook:function(e,t,n,o,i,r){var l,a,s="on".concat($(t)),u=we._getConfig(o,i),c=null==n?void 0:n.$instance,d=we._usePT(c,we._getPT(null==o||null===(l=o.value)||void 0===l?void 0:l.pt,e),we._getOptionValue,"hooks.".concat(s)),v=we._useDefaultPT(c,null==u||null===(a=u.pt)||void 0===a||null===(a=a.directives)||void 0===a?void 0:a[e],we._getOptionValue,"hooks.".concat(s)),p={el:n,binding:o,vnode:i,prevVnode:r};null==d||d(c,p),null==v||v(c,p)},_mergeProps:function(){for(var e=arguments.length>1?arguments[1]:void 0,t=arguments.length,n=new Array(t>2?t-2:0),o=2;o1&&void 0!==arguments[1]?arguments[1]:{},n=function(n,o,i,r,l){var a,s,u;o._$instances=o._$instances||{};var c=we._getConfig(i,r),d=o._$instances[e]||{},v=g(d)?Oe(Oe({},t),null==t?void 0:t.methods):{};o._$instances[e]=Oe(Oe({},d),{},{$name:e,$host:o,$binding:i,$modifiers:null==i?void 0:i.modifiers,$value:null==i?void 0:i.value,$el:d.$el||o||void 0,$style:Oe({classes:void 0,inlineStyles:void 0,load:function(){},loadCSS:function(){},loadTheme:function(){}},null==t?void 0:t.style),$primevueConfig:c,$attrSelector:o.$attrSelector,defaultPT:function(){return we._getPT(null==c?void 0:c.pt,void 0,(function(t){var n;return null==t||null===(n=t.directives)||void 0===n?void 0:n[e]}))},isUnstyled:function(){var e,t;return void 0!==(null===(e=o.$instance)||void 0===e||null===(e=e.$binding)||void 0===e||null===(e=e.value)||void 0===e?void 0:e.unstyled)?null===(t=o.$instance)||void 0===t||null===(t=t.$binding)||void 0===t||null===(t=t.value)||void 0===t?void 0:t.unstyled:null==c?void 0:c.unstyled},theme:function(){var e;return null===(e=o.$instance)||void 0===e||null===(e=e.$primevueConfig)||void 0===e?void 0:e.theme},preset:function(){var e;return null===(e=o.$instance)||void 0===e||null===(e=e.$binding)||void 0===e||null===(e=e.value)||void 0===e?void 0:e.dt},ptm:function(){var e,t=arguments.length>0&&void 0!==arguments[0]?arguments[0]:"",n=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{};return we._getPTValue(o.$instance,null===(e=o.$instance)||void 0===e||null===(e=e.$binding)||void 0===e||null===(e=e.value)||void 0===e?void 0:e.pt,t,Oe({},n))},ptmo:function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{},t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:"",n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{};return we._getPTValue(o.$instance,e,t,n,!1)},cx:function(){var e,t,n=arguments.length>0&&void 0!==arguments[0]?arguments[0]:"",i=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{};return null!==(e=o.$instance)&&void 0!==e&&e.isUnstyled()?void 0:we._getOptionValue(null===(t=o.$instance)||void 0===t||null===(t=t.$style)||void 0===t?void 0:t.classes,n,Oe({},i))},sx:function(){var e,t=arguments.length>0&&void 0!==arguments[0]?arguments[0]:"",n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{};return!(arguments.length>1&&void 0!==arguments[1])||arguments[1]?we._getOptionValue(null===(e=o.$instance)||void 0===e||null===(e=e.$style)||void 0===e?void 0:e.inlineStyles,t,Oe({},n)):void 0}},v),o.$instance=o._$instances[e],null===(a=(s=o.$instance)[n])||void 0===a||a.call(s,o,i,r,l),o["$".concat(e)]=o.$instance,we._hook(e,n,o,i,r,l),o.$pd||(o.$pd={}),o.$pd[e]=Oe(Oe({},null===(u=o.$pd)||void 0===u?void 0:u[e]),{},{name:e,instance:o.$instance})};return{created:function(e,t,o,i){n("created",e,t,o,i)},beforeMount:function(e,t,o,i){e.$attrSelector=Q("pd"),we._loadStyles(e,t,o),n("beforeMount",e,t,o,i),function(e){var t,n,o,i,r,l=null===(t=e.$instance)||void 0===t?void 0:t.watch;null==l||null===(n=l.config)||void 0===n||n.call(e.$instance,null===(o=e.$instance)||void 0===o?void 0:o.$primevueConfig),ge.on("config:change",(function(t){var n,o=t.newValue,i=t.oldValue;return null==l||null===(n=l.config)||void 0===n?void 0:n.call(e.$instance,o,i)})),null==l||null===(i=l["config.ripple"])||void 0===i||i.call(e.$instance,null===(r=e.$instance)||void 0===r||null===(r=r.$primevueConfig)||void 0===r?void 0:r.ripple),ge.on("config:ripple:change",(function(t){var n,o=t.newValue,i=t.oldValue;return null==l||null===(n=l["config.ripple"])||void 0===n?void 0:n.call(e.$instance,o,i)}))}(e)},mounted:function(e,t,o,i){we._loadStyles(e,t,o),n("mounted",e,t,o,i)},beforeUpdate:function(e,t,o,i){n("beforeUpdate",e,t,o,i)},updated:function(e,t,o,i){we._loadStyles(e,t,o),n("updated",e,t,o,i)},beforeUnmount:function(e,t,o,i){n("beforeUnmount",e,t,o,i)},unmounted:function(e,t,o,i){var r;null===(r=e.$instance)||void 0===r||null===(r=r.scopedStyleEl)||void 0===r||null===(r=r.value)||void 0===r||r.remove(),n("unmounted",e,t,o,i)}}},extend:function(){var e=Se(we._getMeta.apply(we,arguments),2),t=e[0],n=e[1];return Oe({extend:function(){var e=Se(we._getMeta.apply(we,arguments),2),t=e[0],o=e[1];return we.extend(t,Oe(Oe(Oe({},n),null==n?void 0:n.methods),o))}},we._extend(t,n))}},Te="equals",Ce="notEquals";function je(e){return(je="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function Ne(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);t&&(o=o.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,o)}return n}function ke(e){for(var t=1;tet=t,ct=Symbol();function at(t){return t&&"object"==typeof t&&"[object Object]"===Object.prototype.toString.call(t)&&"function"!=typeof t.toJSON}var it,lt;function ut(){const e=t(!0),r=e.run((()=>n({})));let c=[],a=[];const i=o({install(t){rt(i),i._a=t,t.provide(ct,i),t.config.globalProperties.$pinia=i,a.forEach((t=>c.push(t))),a=[]},use(t){return this._a?c.push(t):a.push(t),this},_p:c,_a:null,_e:e,_s:new Map,state:r});return i}(lt=it||(it={})).direct="direct",lt.patchObject="patch object",lt.patchFunction="patch function";const st=()=>{};function dt(t,n,o,e=st){t.push(n);const r=()=>{const o=t.indexOf(n);o>-1&&(t.splice(o,1),e())};return!o&&l()&&u(r),r}function pt(t,...n){t.slice().forEach((t=>{t(...n)}))}const bt=t=>t(),ft=Symbol(),gt=Symbol();function ht(t,n){t instanceof Map&&n instanceof Map?n.forEach(((n,o)=>t.set(o,n))):t instanceof Set&&n instanceof Set&&n.forEach(t.add,t);for(const o in n){if(!n.hasOwnProperty(o))continue;const e=n[o],r=t[o];at(r)&&at(e)&&t.hasOwnProperty(o)&&!c(e)&&!a(e)?t[o]=ht(r,e):t[o]=e}return t}const vt=Symbol();const{assign:yt}=Object;function mt(o,l,u={},d,p,b){let f;const g=yt({actions:{}},u),h={deep:!0};let v,y,m,x=[],k=[];const w=d.state.value[o];let $;function _(t){let n;v=y=!1,"function"==typeof t?(t(d.state.value[o]),n={type:it.patchFunction,storeId:o,events:m}):(ht(d.state.value[o],t),n={type:it.patchObject,payload:t,storeId:o,events:m});const e=$=Symbol();s().then((()=>{$===e&&(v=!0)})),y=!0,pt(x,n,d.state.value[o])}b||w||(d.state.value[o]={}),n({});const S=b?function(){const{state:t}=u,n=t?t():{};this.$patch((t=>{yt(t,n)}))}:st;const E=(t,n="")=>{if(ft in t)return t[gt]=n,t;const e=function(){rt(d);const n=Array.from(arguments),r=[],c=[];let a;pt(k,{args:n,name:e[gt],store:A,after:function(t){r.push(t)},onError:function(t){c.push(t)}});try{a=t.apply(this&&this.$id===o?this:A,n)}catch(i){throw pt(c,i),i}return a instanceof Promise?a.then((t=>(pt(r,t),t))).catch((t=>(pt(c,t),Promise.reject(t)))):(pt(r,a),a)};return e[ft]=!0,e[gt]=n,e},O={_p:d,$id:o,$onAction:dt.bind(null,k),$patch:_,$reset:S,$subscribe(t,n={}){const r=dt(x,t,n.detached,(()=>c())),c=f.run((()=>e((()=>d.state.value[o]),(e=>{("sync"===n.flush?y:v)&&t({storeId:o,type:it.direct,events:m},e)}),yt({},h,n))));return r},$dispose:function(){f.stop(),x=[],k=[],d._s.delete(o)}},A=r(O);d._s.set(o,A);const T=(d._a&&d._a.runWithContext||bt)((()=>d._e.run((()=>(f=t()).run((()=>l({action:E})))))));for(const t in T){const n=T[t];if(c(n)&&(!c(j=n)||!j.effect)||a(n))b||(!w||at(C=n)&&C.hasOwnProperty(vt)||(c(n)?n.value=w[t]:ht(n,w[t])),d.state.value[o][t]=n);else if("function"==typeof n){const o=E(n,t);T[t]=o,g.actions[t]=n}}var C,j;return yt(A,T),yt(i(A),T),Object.defineProperty(A,"$state",{get:()=>d.state.value[o],set:t=>{_((n=>{yt(n,t)}))}}),d._p.forEach((t=>{yt(A,f.run((()=>t({store:A,app:d._a,pinia:d,options:g}))))})),w&&b&&u.hydrate&&u.hydrate(A.$state,w),v=!0,y=!0,A}function xt(t,n,e){let r,c;const a="function"==typeof n;function i(t,e){const i=f();(t=t||(i?d(ct,null):null))&&rt(t),(t=et)._s.has(r)||(a?mt(r,n,c,t):function(t,n,e){const{state:r,actions:c,getters:a}=n,i=e.state.value[t];let l;l=mt(t,(function(){i||(e.state.value[t]=r?r():{});const n=p(e.state.value[t]);return yt(n,c,Object.keys(a||{}).reduce(((n,r)=>(n[r]=o(b((()=>{rt(e);const n=e._s.get(t);return a[r].call(n,n)}))),n)),{}))}),n,e,0,!0)}(r,c,t));return t._s.get(r)}return"string"==typeof t?(r=t,c=a?e:n):(c=t,r=t.id),i.$id=r,i}function kt(t){{t=i(t);const n={};for(const o in t){const e=t[o];(c(e)||a(e))&&(n[o]=g(t,o))}return n}}const wt="undefined"!=typeof navigator&&navigator.userAgent.toLowerCase().indexOf("firefox")>0;function $t(t,n,o,e){t.addEventListener?t.addEventListener(n,o,e):t.attachEvent&&t.attachEvent("on".concat(n),o)}function _t(t,n,o,e){t.removeEventListener?t.removeEventListener(n,o,e):t.detachEvent&&t.detachEvent("on".concat(n),o)}function St(t,n){const o=n.slice(0,n.length-1);for(let e=0;e=0;)n[o-1]+=",",n.splice(o,1),o=n.lastIndexOf("");return n}const Ot={backspace:8,"⌫":8,tab:9,clear:12,enter:13,"↩":13,return:13,esc:27,escape:27,space:32,left:37,up:38,right:39,down:40,del:46,delete:46,ins:45,insert:45,home:36,end:35,pageup:33,pagedown:34,capslock:20,num_0:96,num_1:97,num_2:98,num_3:99,num_4:100,num_5:101,num_6:102,num_7:103,num_8:104,num_9:105,num_multiply:106,num_add:107,num_enter:108,num_subtract:109,num_decimal:110,num_divide:111,"⇪":20,",":188,".":190,"/":191,"`":192,"-":wt?173:189,"=":wt?61:187,";":wt?59:186,"'":222,"[":219,"]":221,"\\":220},At={"⇧":16,shift:16,"⌥":18,alt:18,option:18,"⌃":17,ctrl:17,control:17,"⌘":91,cmd:91,command:91},Tt={16:"shiftKey",18:"altKey",17:"ctrlKey",91:"metaKey",shiftKey:16,ctrlKey:17,altKey:18,metaKey:91},Ct={16:!1,18:!1,17:!1,91:!1},jt={};for(let vn=1;vn<20;vn++)Ot["f".concat(vn)]=111+vn;let Bt=[],It=null,Lt="all";const zt=new Map,Ht=t=>Ot[t.toLowerCase()]||At[t.toLowerCase()]||t.toUpperCase().charCodeAt(0);function Pt(t){Lt=t||"all"}function Kt(){return Lt||"all"}function Mt(t){if(void 0===t)Object.keys(jt).forEach((t=>{Array.isArray(jt[t])&&jt[t].forEach((t=>Dt(t))),delete jt[t]})),Rt(null);else if(Array.isArray(t))t.forEach((t=>{t.key&&Dt(t)}));else if("object"==typeof t)t.key&&Dt(t);else if("string"==typeof t){for(var n=arguments.length,o=new Array(n>1?n-1:0),e=1;e{let{key:n,scope:o,method:e,splitKey:r="+"}=t;Et(n).forEach((t=>{const n=t.split(r),c=n.length,a=n[c-1],i="*"===a?"*":Ht(a);if(!jt[i])return;o||(o=Kt());const l=c>1?St(At,n):[],u=[];jt[i]=jt[i].filter((t=>{const n=(!e||t.method===e)&&t.scope===o&&function(t,n){const o=t.length>=n.length?t:n,e=t.length>=n.length?n:t;let r=!0;for(let c=0;cRt(t)))}))};function Ft(t,n,o,e){if(n.element!==e)return;let r;if(n.scope===o||"all"===n.scope){r=n.mods.length>0;for(const t in Ct)Object.prototype.hasOwnProperty.call(Ct,t)&&(!Ct[t]&&n.mods.indexOf(+t)>-1||Ct[t]&&-1===n.mods.indexOf(+t))&&(r=!1);(0!==n.mods.length||Ct[16]||Ct[18]||Ct[17]||Ct[91])&&!r&&"*"!==n.shortcut||(n.keys=[],n.keys=n.keys.concat(Bt),!1===n.method(t,n)&&(t.preventDefault?t.preventDefault():t.returnValue=!1,t.stopPropagation&&t.stopPropagation(),t.cancelBubble&&(t.cancelBubble=!0)))}}function Ut(t,n){const o=jt["*"];let e=t.keyCode||t.which||t.charCode;if(!Vt.filter.call(this,t))return;if(93!==e&&224!==e||(e=91),-1===Bt.indexOf(e)&&229!==e&&Bt.push(e),["ctrlKey","altKey","shiftKey","metaKey"].forEach((n=>{const o=Tt[n];t[n]&&-1===Bt.indexOf(o)?Bt.push(o):!t[n]&&Bt.indexOf(o)>-1?Bt.splice(Bt.indexOf(o),1):"metaKey"===n&&t[n]&&3===Bt.length&&(t.ctrlKey||t.shiftKey||t.altKey||(Bt=Bt.slice(Bt.indexOf(o))))})),e in Ct){Ct[e]=!0;for(const t in At)At[t]===e&&(Vt[t]=!0);if(!o)return}for(const i in Ct)Object.prototype.hasOwnProperty.call(Ct,i)&&(Ct[i]=t[Tt[i]]);t.getModifierState&&(!t.altKey||t.ctrlKey)&&t.getModifierState("AltGraph")&&(-1===Bt.indexOf(17)&&Bt.push(17),-1===Bt.indexOf(18)&&Bt.push(18),Ct[17]=!0,Ct[18]=!0);const r=Kt();if(o)for(let i=0;i1&&(r=St(At,t)),(t="*"===(t=t[t.length-1])?"*":Ht(t))in jt||(jt[t]=[]),jt[t].push({keyup:l,keydown:u,scope:c,mods:r,shortcut:e[i],method:o,key:e[i],splitKey:s,element:a});if(void 0!==a&&window){if(!zt.has(a)){const t=function(){return Ut(arguments.length>0&&void 0!==arguments[0]?arguments[0]:window.event,a)},n=function(){let t=arguments.length>0&&void 0!==arguments[0]?arguments[0]:window.event;Ut(t,a),function(t){let n=t.keyCode||t.which||t.charCode;const o=Bt.indexOf(n);if(o>=0&&Bt.splice(o,1),t.key&&"meta"===t.key.toLowerCase()&&Bt.splice(0,Bt.length),93!==n&&224!==n||(n=91),n in Ct){Ct[n]=!1;for(const t in At)At[t]===n&&(Vt[t]=!1)}}(t)};zt.set(a,{keydownListener:t,keyupListenr:n,capture:d}),$t(a,"keydown",t,d),$t(a,"keyup",n,d)}if(!It){const t=()=>{Bt=[]};It={listener:t,capture:d},$t(window,"focus",t,d)}}}function Rt(t){const n=Object.values(jt).flat();if(n.findIndex((n=>{let{element:o}=n;return o===t}))<0){const{keydownListener:n,keyupListenr:o,capture:e}=zt.get(t)||{};n&&o&&(_t(t,"keyup",o,e),_t(t,"keydown",n,e),zt.delete(t))}if(n.length<=0||zt.size<=0){if(Object.keys(zt).forEach((t=>{const{keydownListener:n,keyupListenr:o,capture:e}=zt.get(t)||{};n&&o&&(_t(t,"keyup",o,e),_t(t,"keydown",n,e),zt.delete(t))})),zt.clear(),Object.keys(jt).forEach((t=>delete jt[t])),It){const{listener:t,capture:n}=It;_t(window,"focus",t,n),It=null}}}const Nt={getPressedKeyString:function(){return Bt.map((t=>{return n=t,Object.keys(Ot).find((t=>Ot[t]===n))||(t=>Object.keys(At).find((n=>At[n]===t)))(t)||String.fromCharCode(t);var n}))},setScope:Pt,getScope:Kt,deleteScope:function(t,n){let o,e;t||(t=Kt());for(const r in jt)if(Object.prototype.hasOwnProperty.call(jt,r))for(o=jt[r],e=0;e{let{element:n}=t;return Rt(n)}))}else e++;Kt()===t&&Pt(n||"all")},getPressedKeyCodes:function(){return Bt.slice(0)},getAllKeyCodes:function(){const t=[];return Object.keys(jt).forEach((n=>{jt[n].forEach((n=>{let{key:o,scope:e,mods:r,shortcut:c}=n;t.push({scope:e,shortcut:c,mods:r,keys:o.split("+").map((t=>Ht(t)))})}))})),t},isPressed:function(t){return"string"==typeof t&&(t=Ht(t)),-1!==Bt.indexOf(t)},filter:function(t){const n=t.target||t.srcElement,{tagName:o}=n;let e=!0;const r="INPUT"===o&&!["checkbox","radio","range","button","file","reset","submit","color"].includes(n.type);return(n.isContentEditable||(r||"TEXTAREA"===o||"SELECT"===o)&&!n.readOnly)&&(e=!1),e},trigger:function(t){let n=arguments.length>1&&void 0!==arguments[1]?arguments[1]:"all";Object.keys(jt).forEach((o=>{jt[o].filter((o=>o.scope===n&&o.shortcut===t)).forEach((t=>{t&&t.method&&t.method()}))}))},unbind:Mt,keyMap:Ot,modifier:At,modifierMap:Tt};for(const vn in Nt)Object.prototype.hasOwnProperty.call(Nt,vn)&&(Vt[vn]=Nt[vn]);if("undefined"!=typeof window){const t=window.hotkeys;Vt.noConflict=n=>(n&&window.hotkeys===Vt&&(window.hotkeys=t),Vt),window.hotkeys=Vt}var Gt={root:function(t){var n=t.props,o=t.instance;return["p-badge p-component",{"p-badge-circle":j(n.value)&&1===String(n.value).length,"p-badge-dot":B(n.value)&&!o.$slots.default,"p-badge-sm":"small"===n.size,"p-badge-lg":"large"===n.size,"p-badge-xl":"xlarge"===n.size,"p-badge-info":"info"===n.severity,"p-badge-success":"success"===n.severity,"p-badge-warn":"warn"===n.severity,"p-badge-danger":"danger"===n.severity,"p-badge-secondary":"secondary"===n.severity,"p-badge-contrast":"contrast"===n.severity}]}},Zt=W.extend({name:"badge",theme:function(t){var n=t.dt;return"\n.p-badge {\n display: inline-flex;\n border-radius: ".concat(n("badge.border.radius"),";\n align-items: center;\n justify-content: center;\n padding: ").concat(n("badge.padding"),";\n background: ").concat(n("badge.primary.background"),";\n color: ").concat(n("badge.primary.color"),";\n font-size: ").concat(n("badge.font.size"),";\n font-weight: ").concat(n("badge.font.weight"),";\n min-width: ").concat(n("badge.min.width"),";\n height: ").concat(n("badge.height"),";\n}\n\n.p-badge-dot {\n width: ").concat(n("badge.dot.size"),";\n min-width: ").concat(n("badge.dot.size"),";\n height: ").concat(n("badge.dot.size"),";\n border-radius: 50%;\n padding: 0;\n}\n\n.p-badge-circle {\n padding: 0;\n border-radius: 50%;\n}\n\n.p-badge-secondary {\n background: ").concat(n("badge.secondary.background"),";\n color: ").concat(n("badge.secondary.color"),";\n}\n\n.p-badge-success {\n background: ").concat(n("badge.success.background"),";\n color: ").concat(n("badge.success.color"),";\n}\n\n.p-badge-info {\n background: ").concat(n("badge.info.background"),";\n color: ").concat(n("badge.info.color"),";\n}\n\n.p-badge-warn {\n background: ").concat(n("badge.warn.background"),";\n color: ").concat(n("badge.warn.color"),";\n}\n\n.p-badge-danger {\n background: ").concat(n("badge.danger.background"),";\n color: ").concat(n("badge.danger.color"),";\n}\n\n.p-badge-contrast {\n background: ").concat(n("badge.contrast.background"),";\n color: ").concat(n("badge.contrast.color"),";\n}\n\n.p-badge-sm {\n font-size: ").concat(n("badge.sm.font.size"),";\n min-width: ").concat(n("badge.sm.min.width"),";\n height: ").concat(n("badge.sm.height"),";\n}\n\n.p-badge-lg {\n font-size: ").concat(n("badge.lg.font.size"),";\n min-width: ").concat(n("badge.lg.min.width"),";\n height: ").concat(n("badge.lg.height"),";\n}\n\n.p-badge-xl {\n font-size: ").concat(n("badge.xl.font.size"),";\n min-width: ").concat(n("badge.xl.min.width"),";\n height: ").concat(n("badge.xl.height"),";\n}\n")},classes:Gt}),Jt={name:"Badge",extends:{name:"BaseBadge",extends:Y,props:{value:{type:[String,Number],default:null},severity:{type:String,default:null},size:{type:String,default:null}},style:Zt,provide:function(){return{$pcBadge:this,$parentInstance:this}}},inheritAttrs:!1};Jt.render=function(t,n,o,e,r,c){return h(),v("span",k({class:t.cx("root")},t.ptmi("root")),[y(t.$slots,"default",{},(function(){return[m(x(t.value),1)]}))],16)};var Xt=W.extend({name:"ripple-directive",theme:function(t){var n=t.dt;return"\n.p-ink {\n display: block;\n position: absolute;\n background: ".concat(n("ripple.background"),";\n border-radius: 100%;\n transform: scale(0);\n pointer-events: none;\n}\n\n.p-ink-active {\n animation: ripple 0.4s linear;\n}\n\n@keyframes ripple {\n 100% {\n opacity: 0;\n transform: scale(2.5);\n }\n}\n")},classes:{root:"p-ink"}});function qt(t){return(qt="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(t){return typeof t}:function(t){return t&&"function"==typeof Symbol&&t.constructor===Symbol&&t!==Symbol.prototype?"symbol":typeof t})(t)}function Wt(t){return function(t){if(Array.isArray(t))return Yt(t)}(t)||function(t){if("undefined"!=typeof Symbol&&null!=t[Symbol.iterator]||null!=t["@@iterator"])return Array.from(t)}(t)||function(t,n){if(t){if("string"==typeof t)return Yt(t,n);var o={}.toString.call(t).slice(8,-1);return"Object"===o&&t.constructor&&(o=t.constructor.name),"Map"===o||"Set"===o?Array.from(t):"Arguments"===o||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(o)?Yt(t,n):void 0}}(t)||function(){throw new TypeError("Invalid attempt to spread non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}()}function Yt(t,n){(null==n||n>t.length)&&(n=t.length);for(var o=0,e=Array(n);ot.length)&&(n=t.length);for(var o=0,e=Array(n);oi.width||r<0||e<0||e+a>i.height},getTarget:function(t){return q(t,"p-inputwrapper")?J(t,"input"):t},getModifiers:function(t){return t.modifiers&&Object.keys(t.modifiers).length?t.modifiers:t.arg&&"object"===dn(t.arg)?Object.entries(t.arg).reduce((function(t,n){var o=ln(n,2),e=o[0],r=o[1];return"event"!==e&&"position"!==e||(t[r]=!0),t}),{}):{}}}}),bn={name:"Card",extends:{name:"BaseCard",extends:Y,style:W.extend({name:"card",theme:function(t){var n=t.dt;return"\n.p-card {\n background: ".concat(n("card.background"),";\n color: ").concat(n("card.color"),";\n box-shadow: ").concat(n("card.shadow"),";\n border-radius: ").concat(n("card.border.radius"),";\n display: flex;\n flex-direction: column;\n}\n\n.p-card-caption {\n display: flex;\n flex-direction: column;\n gap: ").concat(n("card.caption.gap"),";\n}\n\n.p-card-body {\n padding: ").concat(n("card.body.padding"),";\n display: flex;\n flex-direction: column;\n gap: ").concat(n("card.body.gap"),";\n}\n\n.p-card-title {\n font-size: ").concat(n("card.title.font.size"),";\n font-weight: ").concat(n("card.title.font.weight"),";\n}\n\n.p-card-subtitle {\n color: ").concat(n("card.subtitle.color"),";\n}\n")},classes:{root:"p-card p-component",header:"p-card-header",body:"p-card-body",caption:"p-card-caption",title:"p-card-title",subtitle:"p-card-subtitle",content:"p-card-content",footer:"p-card-footer"}}),provide:function(){return{$pcCard:this,$parentInstance:this}}},inheritAttrs:!1};bn.render=function(t,n,o,e,r,c){return h(),v("div",k({class:t.cx("root")},t.ptmi("root")),[t.$slots.header?(h(),v("div",k({key:0,class:t.cx("header")},t.ptm("header")),[y(t.$slots,"header")],16)):A("",!0),T("div",k({class:t.cx("body")},t.ptm("body")),[t.$slots.title||t.$slots.subtitle?(h(),v("div",k({key:0,class:t.cx("caption")},t.ptm("caption")),[t.$slots.title?(h(),v("div",k({key:0,class:t.cx("title")},t.ptm("title")),[y(t.$slots,"title")],16)):A("",!0),t.$slots.subtitle?(h(),v("div",k({key:1,class:t.cx("subtitle")},t.ptm("subtitle")),[y(t.$slots,"subtitle")],16)):A("",!0)],16)):A("",!0),T("div",k({class:t.cx("content")},t.ptm("content")),[y(t.$slots,"content")],16),t.$slots.footer?(h(),v("div",k({key:1,class:t.cx("footer")},t.ptm("footer")),[y(t.$slots,"footer")],16)):A("",!0)],16)],16)};var fn=W.extend({name:"inputtext",theme:function(t){var n=t.dt;return"\n.p-inputtext {\n font-family: inherit;\n font-feature-settings: inherit;\n font-size: 1rem;\n color: ".concat(n("inputtext.color"),";\n background: ").concat(n("inputtext.background"),";\n padding: ").concat(n("inputtext.padding.y")," ").concat(n("inputtext.padding.x"),";\n border: 1px solid ").concat(n("inputtext.border.color"),";\n transition: background ").concat(n("inputtext.transition.duration"),", color ").concat(n("inputtext.transition.duration"),", border-color ").concat(n("inputtext.transition.duration"),", outline-color ").concat(n("inputtext.transition.duration"),", box-shadow ").concat(n("inputtext.transition.duration"),";\n appearance: none;\n border-radius: ").concat(n("inputtext.border.radius"),";\n outline-color: transparent;\n box-shadow: ").concat(n("inputtext.shadow"),";\n}\n\n.p-inputtext:enabled:hover {\n border-color: ").concat(n("inputtext.hover.border.color"),";\n}\n\n.p-inputtext:enabled:focus {\n border-color: ").concat(n("inputtext.focus.border.color"),";\n box-shadow: ").concat(n("inputtext.focus.ring.shadow"),";\n outline: ").concat(n("inputtext.focus.ring.width")," ").concat(n("inputtext.focus.ring.style")," ").concat(n("inputtext.focus.ring.color"),";\n outline-offset: ").concat(n("inputtext.focus.ring.offset"),";\n}\n\n.p-inputtext.p-invalid {\n border-color: ").concat(n("inputtext.invalid.border.color"),";\n}\n\n.p-inputtext.p-variant-filled {\n background: ").concat(n("inputtext.filled.background"),";\n}\n\n.p-inputtext.p-variant-filled:enabled:focus {\n background: ").concat(n("inputtext.filled.focus.background"),";\n}\n\n.p-inputtext:disabled {\n opacity: 1;\n background: ").concat(n("inputtext.disabled.background"),";\n color: ").concat(n("inputtext.disabled.color"),";\n}\n\n.p-inputtext::placeholder {\n color: ").concat(n("inputtext.placeholder.color"),";\n}\n\n.p-inputtext-sm {\n font-size: ").concat(n("inputtext.sm.font.size"),";\n padding: ").concat(n("inputtext.sm.padding.y")," ").concat(n("inputtext.sm.padding.x"),";\n}\n\n.p-inputtext-lg {\n font-size: ").concat(n("inputtext.lg.font.size"),";\n padding: ").concat(n("inputtext.lg.padding.y")," ").concat(n("inputtext.lg.padding.x"),";\n}\n\n.p-inputtext-fluid {\n width: 100%;\n}\n")},classes:{root:function(t){var n=t.instance,o=t.props;return["p-inputtext p-component",{"p-filled":n.filled,"p-inputtext-sm":"small"===o.size,"p-inputtext-lg":"large"===o.size,"p-invalid":o.invalid,"p-variant-filled":o.variant?"filled"===o.variant:"filled"===n.$primevue.config.inputStyle||"filled"===n.$primevue.config.inputVariant,"p-inputtext-fluid":n.hasFluid}]}}}),gn={name:"InputText",extends:{name:"BaseInputText",extends:Y,props:{modelValue:null,size:{type:String,default:null},invalid:{type:Boolean,default:!1},variant:{type:String,default:null},fluid:{type:Boolean,default:null}},style:fn,provide:function(){return{$pcInputText:this,$parentInstance:this}}},inheritAttrs:!1,emits:["update:modelValue"],inject:{$pcFluid:{default:null}},methods:{getPTOptions:function(t){return("root"===t?this.ptmi:this.ptm)(t,{context:{filled:this.filled,disabled:this.$attrs.disabled||""===this.$attrs.disabled}})},onInput:function(t){this.$emit("update:modelValue",t.target.value)}},computed:{filled:function(){return null!=this.modelValue&&this.modelValue.toString().length>0},hasFluid:function(){return B(this.fluid)?!!this.$pcFluid:this.fluid}}},hn=["value","aria-invalid"];gn.render=function(t,n,o,e,r,c){return h(),v("input",k({type:"text",class:t.cx("root"),value:t.modelValue,"aria-invalid":t.invalid||void 0,onInput:n[0]||(n[0]=function(){return c.onInput&&c.onInput.apply(c,arguments)})},c.getPTOptions("root")),null,16,hn)};export{pn as T,cn as a,bn as b,gn as c,xt as d,ut as e,Vt as h,kt as s}; diff --git a/ComfyUI-Easy-Use/web_version/v2/assets/vue-DjzFgvDF.js b/ComfyUI-Easy-Use/web_version/v2/assets/vue-DjzFgvDF.js new file mode 100644 index 0000000000000000000000000000000000000000..35a6acd4e8577da244617c5c53051b224b9e07fe --- /dev/null +++ b/ComfyUI-Easy-Use/web_version/v2/assets/vue-DjzFgvDF.js @@ -0,0 +1,23 @@ +/** +* @vue/shared v3.4.36 +* (c) 2018-present Yuxi (Evan) You and Vue contributors +* @license MIT +**/ +/*! #__NO_SIDE_EFFECTS__ */ +function e(e,t){const n=new Set(e.split(","));return e=>n.has(e)}const t={},n=[],s=()=>{},r=()=>!1,o=e=>111===e.charCodeAt(0)&&110===e.charCodeAt(1)&&(e.charCodeAt(2)>122||e.charCodeAt(2)<97),l=e=>e.startsWith("onUpdate:"),i=Object.assign,c=(e,t)=>{const n=e.indexOf(t);n>-1&&e.splice(n,1)},u=Object.prototype.hasOwnProperty,a=(e,t)=>u.call(e,t),f=Array.isArray,p=e=>"[object Map]"===b(e),d=e=>"[object Set]"===b(e),h=e=>"function"==typeof e,v=e=>"string"==typeof e,g=e=>"symbol"==typeof e,_=e=>null!==e&&"object"==typeof e,m=e=>(_(e)||h(e))&&h(e.then)&&h(e.catch),y=Object.prototype.toString,b=e=>y.call(e),x=e=>b(e).slice(8,-1),w=e=>"[object Object]"===b(e),S=e=>v(e)&&"NaN"!==e&&"-"!==e[0]&&""+parseInt(e,10)===e,C=e(",key,ref,ref_for,ref_key,onVnodeBeforeMount,onVnodeMounted,onVnodeBeforeUpdate,onVnodeUpdated,onVnodeBeforeUnmount,onVnodeUnmounted"),k=e=>{const t=Object.create(null);return n=>t[n]||(t[n]=e(n))},O=/-(\w)/g,E=k((e=>e.replace(O,((e,t)=>t?t.toUpperCase():"")))),F=/\B([A-Z])/g,M=k((e=>e.replace(F,"-$1").toLowerCase())),P=k((e=>e.charAt(0).toUpperCase()+e.slice(1))),A=k((e=>e?`on${P(e)}`:"")),R=(e,t)=>!Object.is(e,t),j=(e,...t)=>{for(let n=0;n{Object.defineProperty(e,t,{configurable:!0,enumerable:!1,writable:s,value:n})},L=e=>{const t=parseFloat(e);return isNaN(t)?e:t};let I;const V=()=>I||(I="undefined"!=typeof globalThis?globalThis:"undefined"!=typeof self?self:"undefined"!=typeof window?window:"undefined"!=typeof global?global:{});function $(e){if(f(e)){const t={};for(let n=0;n{if(e){const n=e.split(U);n.length>1&&(t[n[0].trim()]=n[1].trim())}})),t}function W(e){let t="";if(v(e))t=e;else if(f(e))for(let n=0;n!(!e||!0!==e.__v_isRef),q=e=>v(e)?e:null==e?"":f(e)||_(e)&&(e.toString===y||!h(e.toString))?K(e)?q(e.value):JSON.stringify(e,G,2):String(e),G=(e,t)=>K(t)?G(e,t.value):p(t)?{[`Map(${t.size})`]:[...t.entries()].reduce(((e,[t,n],s)=>(e[J(t,s)+" =>"]=n,e)),{})}:d(t)?{[`Set(${t.size})`]:[...t.values()].map((e=>J(e)))}:g(t)?J(t):!_(t)||f(t)||w(t)?t:String(t),J=(e,t="")=>{var n;return g(e)?`Symbol(${null!=(n=e.description)?n:t})`:e}; +/** +* @vue/reactivity v3.4.36 +* (c) 2018-present Yuxi (Evan) You and Vue contributors +* @license MIT +**/ +let Q,X;class Z{constructor(e=!1){this.detached=e,this._active=!0,this.effects=[],this.cleanups=[],this.parent=Q,!e&&Q&&(this.index=(Q.scopes||(Q.scopes=[])).push(this)-1)}get active(){return this._active}run(e){if(this._active){const t=Q;try{return Q=this,e()}finally{Q=t}}}on(){Q=this}off(){Q=this.parent}stop(e){if(this._active){let t,n;for(t=0,n=this.effects.length;t=4))break}1===this._dirtyLevel&&(this._dirtyLevel=0),ae()}return this._dirtyLevel>=4}set dirty(e){this._dirtyLevel=e?4:0}run(){if(this._dirtyLevel=0,!this.active)return this.fn();let e=le,t=X;try{return le=!0,X=this,this._runnings++,se(this),this.fn()}finally{re(this),this._runnings--,X=t,le=e}}stop(){this.active&&(se(this),re(this),this.onStop&&this.onStop(),this.active=!1)}}function se(e){e._trackId++,e._depsLength=0}function re(e){if(e.deps.length>e._depsLength){for(let t=e._depsLength;t{const n=new Map;return n.cleanup=e,n.computed=t,n},_e=new WeakMap,me=Symbol(""),ye=Symbol("");function be(e,t,n){if(le&&X){let t=_e.get(e);t||_e.set(e,t=new Map);let s=t.get(n);s||t.set(n,s=ge((()=>t.delete(n)))),de(X,s)}}function xe(e,t,n,s,r,o){const l=_e.get(e);if(!l)return;let i=[];if("clear"===t)i=[...l.values()];else if("length"===n&&f(e)){const e=Number(s);l.forEach(((t,n)=>{("length"===n||!g(n)&&n>=e)&&i.push(t)}))}else switch(void 0!==n&&i.push(l.get(n)),t){case"add":f(e)?S(n)&&i.push(l.get("length")):(i.push(l.get(me)),p(e)&&i.push(l.get(ye)));break;case"delete":f(e)||(i.push(l.get(me)),p(e)&&i.push(l.get(ye)));break;case"set":p(e)&&i.push(l.get(me))}fe();for(const c of i)c&&ve(c,4);pe()}const we=e("__proto__,__v_isRef,__isVue"),Se=new Set(Object.getOwnPropertyNames(Symbol).filter((e=>"arguments"!==e&&"caller"!==e)).map((e=>Symbol[e])).filter(g)),Ce=ke();function ke(){const e={};return["includes","indexOf","lastIndexOf"].forEach((t=>{e[t]=function(...e){const n=ft(this);for(let t=0,r=this.length;t{e[t]=function(...e){ue(),fe();const n=ft(this)[t].apply(this,e);return pe(),ae(),n}})),e}function Oe(e){g(e)||(e=String(e));const t=ft(this);return be(t,0,e),t.hasOwnProperty(e)}class Ee{constructor(e=!1,t=!1){this._isReadonly=e,this._isShallow=t}get(e,t,n){const s=this._isReadonly,r=this._isShallow;if("__v_isReactive"===t)return!s;if("__v_isReadonly"===t)return s;if("__v_isShallow"===t)return r;if("__v_raw"===t)return n===(s?r?st:nt:r?tt:et).get(e)||Object.getPrototypeOf(e)===Object.getPrototypeOf(n)?e:void 0;const o=f(e);if(!s){if(o&&a(Ce,t))return Reflect.get(Ce,t,n);if("hasOwnProperty"===t)return Oe}const l=Reflect.get(e,t,n);return(g(t)?Se.has(t):we(t))?l:(s||be(e,0,t),r?l:mt(l)?o&&S(t)?l:l.value:_(l)?s?ot(l):rt(l):l)}}class Fe extends Ee{constructor(e=!1){super(!1,e)}set(e,t,n,s){let r=e[t];if(!this._isShallow){const t=ct(r);if(ut(n)||ct(n)||(r=ft(r),n=ft(n)),!f(e)&&mt(r)&&!mt(n))return!t&&(r.value=n,!0)}const o=f(e)&&S(t)?Number(t)e,Te=e=>Reflect.getPrototypeOf(e);function Le(e,t,n=!1,s=!1){const r=ft(e=e.__v_raw),o=ft(t);n||(R(t,o)&&be(r,0,t),be(r,0,o));const{has:l}=Te(r),i=s?je:n?ht:dt;return l.call(r,t)?i(e.get(t)):l.call(r,o)?i(e.get(o)):void(e!==r&&e.get(t))}function Ie(e,t=!1){const n=this.__v_raw,s=ft(n),r=ft(e);return t||(R(e,r)&&be(s,0,e),be(s,0,r)),e===r?n.has(e):n.has(e)||n.has(r)}function Ve(e,t=!1){return e=e.__v_raw,!t&&be(ft(e),0,me),Reflect.get(e,"size",e)}function $e(e,t=!1){t||ut(e)||ct(e)||(e=ft(e));const n=ft(this);return Te(n).has.call(n,e)||(n.add(e),xe(n,"add",e,e)),this}function Ne(e,t,n=!1){n||ut(t)||ct(t)||(t=ft(t));const s=ft(this),{has:r,get:o}=Te(s);let l=r.call(s,e);l||(e=ft(e),l=r.call(s,e));const i=o.call(s,e);return s.set(e,t),l?R(t,i)&&xe(s,"set",e,t):xe(s,"add",e,t),this}function Ue(e){const t=ft(this),{has:n,get:s}=Te(t);let r=n.call(t,e);r||(e=ft(e),r=n.call(t,e)),s&&s.call(t,e);const o=t.delete(e);return r&&xe(t,"delete",e,void 0),o}function De(){const e=ft(this),t=0!==e.size,n=e.clear();return t&&xe(e,"clear",void 0,void 0),n}function Be(e,t){return function(n,s){const r=this,o=r.__v_raw,l=ft(o),i=t?je:e?ht:dt;return!e&&be(l,0,me),o.forEach(((e,t)=>n.call(s,i(e),i(t),r)))}}function We(e,t,n){return function(...s){const r=this.__v_raw,o=ft(r),l=p(o),i="entries"===e||e===Symbol.iterator&&l,c="keys"===e&&l,u=r[e](...s),a=n?je:t?ht:dt;return!t&&be(o,0,c?ye:me),{next(){const{value:e,done:t}=u.next();return t?{value:e,done:t}:{value:i?[a(e[0]),a(e[1])]:a(e),done:t}},[Symbol.iterator](){return this}}}}function ze(e){return function(...t){return"delete"!==e&&("clear"===e?void 0:this)}}function He(){const e={get(e){return Le(this,e)},get size(){return Ve(this)},has:Ie,add:$e,set:Ne,delete:Ue,clear:De,forEach:Be(!1,!1)},t={get(e){return Le(this,e,!1,!0)},get size(){return Ve(this)},has:Ie,add(e){return $e.call(this,e,!0)},set(e,t){return Ne.call(this,e,t,!0)},delete:Ue,clear:De,forEach:Be(!1,!0)},n={get(e){return Le(this,e,!0)},get size(){return Ve(this,!0)},has(e){return Ie.call(this,e,!0)},add:ze("add"),set:ze("set"),delete:ze("delete"),clear:ze("clear"),forEach:Be(!0,!1)},s={get(e){return Le(this,e,!0,!0)},get size(){return Ve(this,!0)},has(e){return Ie.call(this,e,!0)},add:ze("add"),set:ze("set"),delete:ze("delete"),clear:ze("clear"),forEach:Be(!0,!0)};return["keys","values","entries",Symbol.iterator].forEach((r=>{e[r]=We(r,!1,!1),n[r]=We(r,!0,!1),t[r]=We(r,!1,!0),s[r]=We(r,!0,!0)})),[e,n,t,s]}const[Ke,qe,Ge,Je]=He();function Qe(e,t){const n=t?e?Je:Ge:e?qe:Ke;return(t,s,r)=>"__v_isReactive"===s?!e:"__v_isReadonly"===s?e:"__v_raw"===s?t:Reflect.get(a(n,s)&&s in t?n:t,s,r)}const Xe={get:Qe(!1,!1)},Ze={get:Qe(!1,!0)},Ye={get:Qe(!0,!1)},et=new WeakMap,tt=new WeakMap,nt=new WeakMap,st=new WeakMap;function rt(e){return ct(e)?e:lt(e,!1,Pe,Xe,et)}function ot(e){return lt(e,!0,Ae,Ye,nt)}function lt(e,t,n,s,r){if(!_(e))return e;if(e.__v_raw&&(!t||!e.__v_isReactive))return e;const o=r.get(e);if(o)return o;const l=(i=e).__v_skip||!Object.isExtensible(i)?0:function(e){switch(e){case"Object":case"Array":return 1;case"Map":case"Set":case"WeakMap":case"WeakSet":return 2;default:return 0}}(x(i));var i;if(0===l)return e;const c=new Proxy(e,2===l?s:n);return r.set(e,c),c}function it(e){return ct(e)?it(e.__v_raw):!(!e||!e.__v_isReactive)}function ct(e){return!(!e||!e.__v_isReadonly)}function ut(e){return!(!e||!e.__v_isShallow)}function at(e){return!!e&&!!e.__v_raw}function ft(e){const t=e&&e.__v_raw;return t?ft(t):e}function pt(e){return Object.isExtensible(e)&&T(e,"__v_skip",!0),e}const dt=e=>_(e)?rt(e):e,ht=e=>_(e)?ot(e):e;class vt{constructor(e,t,n,s){this.getter=e,this._setter=t,this.dep=void 0,this.__v_isRef=!0,this.__v_isReadonly=!1,this.effect=new ne((()=>e(this._value)),(()=>_t(this,2===this.effect._dirtyLevel?2:3))),this.effect.computed=this,this.effect.active=this._cacheable=!s,this.__v_isReadonly=n}get value(){const e=ft(this);return e._cacheable&&!e.effect.dirty||!R(e._value,e._value=e.effect.run())||_t(e,4),gt(e),e.effect._dirtyLevel>=2&&_t(e,2),e._value}set value(e){this._setter(e)}get _dirty(){return this.effect.dirty}set _dirty(e){this.effect.dirty=e}}function gt(e){var t;le&&X&&(e=ft(e),de(X,null!=(t=e.dep)?t:e.dep=ge((()=>e.dep=void 0),e instanceof vt?e:void 0)))}function _t(e,t=4,n,s){const r=(e=ft(e)).dep;r&&ve(r,t)}function mt(e){return!(!e||!0!==e.__v_isRef)}function yt(e){return function(e,t){if(mt(e))return e;return new bt(e,t)}(e,!1)}class bt{constructor(e,t){this.__v_isShallow=t,this.dep=void 0,this.__v_isRef=!0,this._rawValue=t?e:ft(e),this._value=t?e:dt(e)}get value(){return gt(this),this._value}set value(e){const t=this.__v_isShallow||ut(e)||ct(e);e=t?e:ft(e),R(e,this._rawValue)&&(this._rawValue,this._rawValue=e,this._value=t?e:dt(e),_t(this,4))}}function xt(e){return mt(e)?e.value:e}const wt={get:(e,t,n)=>xt(Reflect.get(e,t,n)),set:(e,t,n,s)=>{const r=e[t];return mt(r)&&!mt(n)?(r.value=n,!0):Reflect.set(e,t,n,s)}};function St(e){return it(e)?e:new Proxy(e,wt)}function Ct(e){const t=f(e)?new Array(e.length):{};for(const n in e)t[n]=Ft(e,n);return t}class kt{constructor(e,t,n){this._object=e,this._key=t,this._defaultValue=n,this.__v_isRef=!0}get value(){const e=this._object[this._key];return void 0===e?this._defaultValue:e}set value(e){this._object[this._key]=e}get dep(){return function(e,t){const n=_e.get(e);return n&&n.get(t)}(ft(this._object),this._key)}}class Ot{constructor(e){this._getter=e,this.__v_isRef=!0,this.__v_isReadonly=!0}get value(){return this._getter()}}function Et(e,t,n){return mt(e)?e:h(e)?new Ot(e):_(e)&&arguments.length>1?Ft(e,t,n):yt(e)}function Ft(e,t,n){const s=e[t];return mt(s)?s:new kt(e,t,n)} +/** +* @vue/runtime-core v3.4.36 +* (c) 2018-present Yuxi (Evan) You and Vue contributors +* @license MIT +**/function Mt(e,t,n,s){try{return s?e(...s):e()}catch(r){At(r,t,n)}}function Pt(e,t,n,s){if(h(e)){const r=Mt(e,t,n,s);return r&&m(r)&&r.catch((e=>{At(e,t,n)})),r}if(f(e)){const r=[];for(let o=0;o>>1,r=Tt[s],o=Kt(r);oKt(e)-Kt(t)));if(It.length=0,Vt)return void Vt.push(...e);for(Vt=e,$t=0;$tnull==e.id?1/0:e.id,qt=(e,t)=>{const n=Kt(e)-Kt(t);if(0===n){if(e.pre&&!t.pre)return-1;if(t.pre&&!e.pre)return 1}return n};function Gt(e){jt=!1,Rt=!0,Tt.sort(qt);try{for(Lt=0;Lt{s._d&&Qs(-1);const r=Xt(t);let o;try{o=e(...n)}finally{Xt(r),s._d&&Qs(1)}return o};return s._n=!0,s._c=!0,s._d=!0,s}function Yt(e,n){if(null===Jt)return e;const s=Fr(Jt),r=e.dirs||(e.dirs=[]);for(let o=0;o!!e.type.__asyncLoader,sn=e=>e.type.__isKeepAlive;function rn(e,t){ln(e,"a",t)}function on(e,t){ln(e,"da",t)}function ln(e,t,n=gr){const s=e.__wdc||(e.__wdc=()=>{let t=n;for(;t;){if(t.isDeactivated)return;t=t.parent}return e()});if(un(t,s,n),n){let e=n.parent;for(;e&&e.parent;)sn(e.parent.vnode)&&cn(s,t,n,e),e=e.parent}}function cn(e,t,n,s){const r=un(t,e,s,!0);gn((()=>{c(s[t],r)}),n)}function un(e,t,n=gr,s=!1){if(n){const r=n[e]||(n[e]=[]),o=t.__weh||(t.__weh=(...s)=>{ue();const r=br(n),o=Pt(t,n,e,s);return r(),ae(),o});return s?r.unshift(o):r.push(o),o}}const an=e=>(t,n=gr)=>{Cr&&"sp"!==e||un(e,((...e)=>t(...e)),n)},fn=an("bm"),pn=an("m"),dn=an("bu"),hn=an("u"),vn=an("bum"),gn=an("um"),_n=an("sp"),mn=an("rtg"),yn=an("rtc");function bn(e,t=gr){un("ec",e,t)}const xn="components";function wn(e,t){return On(xn,e,!0,t)||e}const Sn=Symbol.for("v-ndc");function Cn(e){return v(e)?On(xn,e,!1)||e:e||Sn}function kn(e){return On("directives",e)}function On(e,t,n=!0,s=!1){const r=Jt||gr;if(r){const n=r.type;if(e===xn){const e=Mr(n,!1);if(e&&(e===t||e===E(t)||e===P(E(t))))return n}const o=En(r[e]||n[e],t)||En(r.appContext[e],t);return!o&&s?n:o}}function En(e,t){return e&&(e[t]||e[E(t)]||e[P(E(t))])}function Fn(e,t,n,s){let r;const o=n;if(f(e)||v(e)){r=new Array(e.length);for(let n=0,s=e.length;nt(e,n,void 0,o)));else{const n=Object.keys(e);r=new Array(n.length);for(let s=0,l=n.length;s!er(e)||e.type!==zs&&!(e.type===Bs&&!Pn(e.children))))?e:null}const An=e=>e?wr(e)?Fr(e):An(e.parent):null,Rn=i(Object.create(null),{$:e=>e,$el:e=>e.vnode.el,$data:e=>e.data,$props:e=>e.props,$attrs:e=>e.attrs,$slots:e=>e.slots,$refs:e=>e.refs,$parent:e=>An(e.parent),$root:e=>An(e.root),$emit:e=>e.emit,$options:e=>Un(e),$forceUpdate:e=>e.f||(e.f=()=>{e.effect.dirty=!0,Bt(e.update)}),$nextTick:e=>e.n||(e.n=Dt.bind(e.proxy)),$watch:e=>As.bind(e)}),jn=(e,n)=>e!==t&&!e.__isScriptSetup&&a(e,n),Tn={get({_:e},n){if("__v_skip"===n)return!0;const{ctx:s,setupState:r,data:o,props:l,accessCache:i,type:c,appContext:u}=e;let f;if("$"!==n[0]){const c=i[n];if(void 0!==c)switch(c){case 1:return r[n];case 2:return o[n];case 4:return s[n];case 3:return l[n]}else{if(jn(r,n))return i[n]=1,r[n];if(o!==t&&a(o,n))return i[n]=2,o[n];if((f=e.propsOptions[0])&&a(f,n))return i[n]=3,l[n];if(s!==t&&a(s,n))return i[n]=4,s[n];In&&(i[n]=0)}}const p=Rn[n];let d,h;return p?("$attrs"===n&&be(e.attrs,0,""),p(e)):(d=c.__cssModules)&&(d=d[n])?d:s!==t&&a(s,n)?(i[n]=4,s[n]):(h=u.config.globalProperties,a(h,n)?h[n]:void 0)},set({_:e},n,s){const{data:r,setupState:o,ctx:l}=e;return jn(o,n)?(o[n]=s,!0):r!==t&&a(r,n)?(r[n]=s,!0):!a(e.props,n)&&(("$"!==n[0]||!(n.slice(1)in e))&&(l[n]=s,!0))},has({_:{data:e,setupState:n,accessCache:s,ctx:r,appContext:o,propsOptions:l}},i){let c;return!!s[i]||e!==t&&a(e,i)||jn(n,i)||(c=l[0])&&a(c,i)||a(r,i)||a(Rn,i)||a(o.config.globalProperties,i)},defineProperty(e,t,n){return null!=n.get?e._.accessCache[t]=0:a(n,"value")&&this.set(e,t,n.value,null),Reflect.defineProperty(e,t,n)}};function Ln(e){return f(e)?e.reduce(((e,t)=>(e[t]=null,e)),{}):e}let In=!0;function Vn(e){const t=Un(e),n=e.proxy,r=e.ctx;In=!1,t.beforeCreate&&$n(t.beforeCreate,e,"bc");const{data:o,computed:l,methods:i,watch:c,provide:u,inject:a,created:p,beforeMount:d,mounted:v,beforeUpdate:g,updated:m,activated:y,deactivated:b,beforeDestroy:x,beforeUnmount:w,destroyed:S,unmounted:C,render:k,renderTracked:O,renderTriggered:E,errorCaptured:F,serverPrefetch:M,expose:P,inheritAttrs:A,components:R,directives:j,filters:T}=t;if(a&&function(e,t){f(e)&&(e=zn(e));for(const n in e){const s=e[n];let r;r=_(s)?"default"in s?Zn(s.from||n,s.default,!0):Zn(s.from||n):Zn(s),mt(r)?Object.defineProperty(t,n,{enumerable:!0,configurable:!0,get:()=>r.value,set:e=>r.value=e}):t[n]=r}}(a,r,null),i)for(const s in i){const e=i[s];h(e)&&(r[s]=e.bind(n))}if(o){const t=o.call(n,n);_(t)&&(e.data=rt(t))}if(In=!0,l)for(const f in l){const e=l[f],t=h(e)?e.bind(n,n):h(e.get)?e.get.bind(n,n):s,o=!h(e)&&h(e.set)?e.set.bind(n):s,i=Pr({get:t,set:o});Object.defineProperty(r,f,{enumerable:!0,configurable:!0,get:()=>i.value,set:e=>i.value=e})}if(c)for(const s in c)Nn(c[s],r,n,s);if(u){const e=h(u)?u.call(n):u;Reflect.ownKeys(e).forEach((t=>{!function(e,t){if(gr){let n=gr.provides;const s=gr.parent&&gr.parent.provides;s===n&&(n=gr.provides=Object.create(s)),n[e]=t}else;}(t,e[t])}))}function L(e,t){f(t)?t.forEach((t=>e(t.bind(n)))):t&&e(t.bind(n))}if(p&&$n(p,e,"c"),L(fn,d),L(pn,v),L(dn,g),L(hn,m),L(rn,y),L(on,b),L(bn,F),L(yn,O),L(mn,E),L(vn,w),L(gn,C),L(_n,M),f(P))if(P.length){const t=e.exposed||(e.exposed={});P.forEach((e=>{Object.defineProperty(t,e,{get:()=>n[e],set:t=>n[e]=t})}))}else e.exposed||(e.exposed={});k&&e.render===s&&(e.render=k),null!=A&&(e.inheritAttrs=A),R&&(e.components=R),j&&(e.directives=j)}function $n(e,t,n){Pt(f(e)?e.map((e=>e.bind(t.proxy))):e.bind(t.proxy),t,n)}function Nn(e,t,n,s){const r=s.includes(".")?Rs(n,s):()=>n[s];if(v(e)){const n=t[e];h(n)&&Ms(r,n)}else if(h(e))Ms(r,e.bind(n));else if(_(e))if(f(e))e.forEach((e=>Nn(e,t,n,s)));else{const s=h(e.handler)?e.handler.bind(n):t[e.handler];h(s)&&Ms(r,s,e)}}function Un(e){const t=e.type,{mixins:n,extends:s}=t,{mixins:r,optionsCache:o,config:{optionMergeStrategies:l}}=e.appContext,i=o.get(t);let c;return i?c=i:r.length||n||s?(c={},r.length&&r.forEach((e=>Dn(c,e,l,!0))),Dn(c,t,l)):c=t,_(t)&&o.set(t,c),c}function Dn(e,t,n,s=!1){const{mixins:r,extends:o}=t;o&&Dn(e,o,n,!0),r&&r.forEach((t=>Dn(e,t,n,!0)));for(const l in t)if(s&&"expose"===l);else{const s=Bn[l]||n&&n[l];e[l]=s?s(e[l],t[l]):t[l]}return e}const Bn={data:Wn,props:qn,emits:qn,methods:Kn,computed:Kn,beforeCreate:Hn,created:Hn,beforeMount:Hn,mounted:Hn,beforeUpdate:Hn,updated:Hn,beforeDestroy:Hn,beforeUnmount:Hn,destroyed:Hn,unmounted:Hn,activated:Hn,deactivated:Hn,errorCaptured:Hn,serverPrefetch:Hn,components:Kn,directives:Kn,watch:function(e,t){if(!e)return t;if(!t)return e;const n=i(Object.create(null),e);for(const s in t)n[s]=Hn(e[s],t[s]);return n},provide:Wn,inject:function(e,t){return Kn(zn(e),zn(t))}};function Wn(e,t){return t?e?function(){return i(h(e)?e.call(this,this):e,h(t)?t.call(this,this):t)}:t:e}function zn(e){if(f(e)){const t={};for(let n=0;n(o.has(e)||(e&&h(e.install)?(o.add(e),e.install(c,...t)):h(e)&&(o.add(e),e(c,...t))),c),mixin:e=>(r.mixins.includes(e)||r.mixins.push(e),c),component:(e,t)=>t?(r.components[e]=t,c):r.components[e],directive:(e,t)=>t?(r.directives[e]=t,c):r.directives[e],mount(o,i,u){if(!l){const a=or(n,s);return a.appContext=r,!0===u?u="svg":!1===u&&(u=void 0),i&&t?t(a,o):e(a,o,u),l=!0,c._container=o,o.__vue_app__=c,Fr(a.component)}},unmount(){l&&(e(null,c._container),delete c._container.__vue_app__)},provide:(e,t)=>(r.provides[e]=t,c),runWithContext(e){const t=Xn;Xn=c;try{return e()}finally{Xn=t}}};return c}}let Xn=null;function Zn(e,t,n=!1){const s=gr||Jt;if(s||Xn){const r=s?null==s.parent?s.vnode.appContext&&s.vnode.appContext.provides:s.parent.provides:Xn._context.provides;if(r&&e in r)return r[e];if(arguments.length>1)return n&&h(t)?t.call(s&&s.proxy):t}}function Yn(){return!!(gr||Jt||Xn)}const es={},ts=()=>Object.create(es),ns=e=>Object.getPrototypeOf(e)===es;function ss(e,t,n,s=!1){const r={},o=ts();e.propsDefaults=Object.create(null),rs(e,t,r,o);for(const l in e.propsOptions[0])l in r||(r[l]=void 0);n?e.props=s?r:lt(r,!1,Re,Ze,tt):e.type.props?e.props=r:e.props=o,e.attrs=o}function rs(e,n,s,r){const[o,l]=e.propsOptions;let i,c=!1;if(n)for(let t in n){if(C(t))continue;const u=n[t];let f;o&&a(o,f=E(t))?l&&l.includes(f)?(i||(i={}))[f]=u:s[f]=u:Vs(e.emitsOptions,t)||t in r&&u===r[t]||(r[t]=u,c=!0)}if(l){const n=ft(s),r=i||t;for(let t=0;t{d=!0;const[t,n]=is(e,s,!0);i(u,t),n&&p.push(...n)};!r&&s.mixins.length&&s.mixins.forEach(t),e.extends&&t(e.extends),e.mixins&&e.mixins.forEach(t)}if(!c&&!d)return _(e)&&o.set(e,n),n;if(f(c))for(let n=0;n"_"===e[0]||"$stable"===e,as=e=>f(e)?e.map(ur):[ur(e)],fs=(e,t,n)=>{if(t._n)return t;const s=Zt(((...e)=>as(t(...e))),n);return s._c=!1,s},ps=(e,t,n)=>{const s=e._ctx;for(const r in e){if(us(r))continue;const n=e[r];if(h(n))t[r]=fs(0,n,s);else if(null!=n){const e=as(n);t[r]=()=>e}}},ds=(e,t)=>{const n=as(t);e.slots.default=()=>n},hs=(e,t,n)=>{for(const s in t)(n||"_"!==s)&&(e[s]=t[s])},vs=(e,t,n)=>{const s=e.slots=ts();if(32&e.vnode.shapeFlag){const e=t._;e?(hs(s,t,n),n&&T(s,"_",e,!0)):ps(t,s)}else t&&ds(e,t)},gs=(e,n,s)=>{const{vnode:r,slots:o}=e;let l=!0,i=t;if(32&r.shapeFlag){const e=n._;e?s&&1===e?l=!1:hs(o,n,s):(l=!n.$stable,ps(n,o)),i=n}else n&&(ds(e,n),i={default:1});if(l)for(const t in o)us(t)||null!=i[t]||delete o[t]};function _s(e,n,s,r,o=!1){if(f(e))return void e.forEach(((e,t)=>_s(e,n&&(f(n)?n[t]:n),s,r,o)));if(nn(r)&&!o)return;const l=4&r.shapeFlag?Fr(r.component):r.el,i=o?null:l,{i:u,r:p}=e,d=n&&n.r,g=u.refs===t?u.refs={}:u.refs,_=u.setupState;if(null!=d&&d!==p&&(v(d)?(g[d]=null,a(_,d)&&(_[d]=null)):mt(d)&&(d.value=null)),h(p))Mt(p,u,12,[i,g]);else{const t=v(p),n=mt(p);if(t||n){const r=()=>{if(e.f){const n=t?a(_,p)?_[p]:g[p]:p.value;o?f(n)&&c(n,l):f(n)?n.includes(l)||n.push(l):t?(g[p]=[l],a(_,p)&&(_[p]=g[p])):(p.value=[l],e.k&&(g[e.k]=p.value))}else t?(g[p]=i,a(_,p)&&(_[p]=i)):n&&(p.value=i,e.k&&(g[e.k]=i))};i?(r.id=-1,ys(r,s)):r()}}}const ms=Symbol("_vte"),ys=function(e,t){t&&t.pendingBranch?f(e)?t.effects.push(...e):t.effects.push(e):(f(n=e)?It.push(...n):Vt&&Vt.includes(n,n.allowRecurse?$t+1:$t)||It.push(n),Wt());var n};function bs(e){return function(e){V().__VUE__=!0;const{insert:r,remove:o,patchProp:l,createElement:i,createText:c,createComment:u,setText:f,setElementText:p,parentNode:d,nextSibling:h,setScopeId:v=s,insertStaticContent:g}=e,_=(e,t,n,s=null,r=null,o=null,l=void 0,i=null,c=!!t.dynamicChildren)=>{if(e===t)return;e&&!tr(e,t)&&(s=X(e),K(e,r,o,!0),e=null),-2===t.patchFlag&&(c=!1,t.dynamicChildren=null);const{type:u,ref:a,shapeFlag:f}=t;switch(u){case Ws:y(e,t,n,s);break;case zs:b(e,t,n,s);break;case Hs:null==e&&x(t,n,s,l);break;case Bs:L(e,t,n,s,r,o,l,i,c);break;default:1&f?k(e,t,n,s,r,o,l,i,c):6&f?I(e,t,n,s,r,o,l,i,c):(64&f||128&f)&&u.process(e,t,n,s,r,o,l,i,c,te)}null!=a&&r&&_s(a,e&&e.ref,o,t||e,!t)},y=(e,t,n,s)=>{if(null==e)r(t.el=c(t.children),n,s);else{const n=t.el=e.el;t.children!==e.children&&f(n,t.children)}},b=(e,t,n,s)=>{null==e?r(t.el=u(t.children||""),n,s):t.el=e.el},x=(e,t,n,s)=>{[e.el,e.anchor]=g(e.children,t,n,s,e.el,e.anchor)},w=({el:e,anchor:t},n,s)=>{let o;for(;e&&e!==t;)o=h(e),r(e,n,s),e=o;r(t,n,s)},S=({el:e,anchor:t})=>{let n;for(;e&&e!==t;)n=h(e),o(e),e=n;o(t)},k=(e,t,n,s,r,o,l,i,c)=>{"svg"===t.type?l="svg":"math"===t.type&&(l="mathml"),null==e?O(t,n,s,r,o,l,i,c):A(e,t,r,o,l,i,c)},O=(e,t,n,s,o,c,u,a)=>{let f,d;const{props:h,shapeFlag:v,transition:g,dirs:_}=e;if(f=e.el=i(e.type,c,h&&h.is,h),8&v?p(f,e.children):16&v&&P(e.children,f,null,s,o,xs(e,c),u,a),_&&en(e,null,s,"created"),F(f,e,e.scopeId,u,s),h){for(const e in h)"value"===e||C(e)||l(f,e,null,h[e],c,s);"value"in h&&l(f,"value",null,h.value,c),(d=h.onVnodeBeforeMount)&&dr(d,s,e)}_&&en(e,null,s,"beforeMount");const m=function(e,t){return(!e||e&&!e.pendingBranch)&&t&&!t.persisted}(o,g);m&&g.beforeEnter(f),r(f,t,n),((d=h&&h.onVnodeMounted)||m||_)&&ys((()=>{d&&dr(d,s,e),m&&g.enter(f),_&&en(e,null,s,"mounted")}),o)},F=(e,t,n,s,r)=>{if(n&&v(e,n),s)for(let o=0;o{for(let u=c;u{const u=n.el=e.el;let{patchFlag:a,dynamicChildren:f,dirs:d}=n;a|=16&e.patchFlag;const h=e.props||t,v=n.props||t;let g;if(s&&ws(s,!1),(g=v.onVnodeBeforeUpdate)&&dr(g,s,n,e),d&&en(n,e,s,"beforeUpdate"),s&&ws(s,!0),(h.innerHTML&&null==v.innerHTML||h.textContent&&null==v.textContent)&&p(u,""),f?R(e.dynamicChildren,f,u,s,r,xs(n,o),i):c||B(e,n,u,null,s,r,xs(n,o),i,!1),a>0){if(16&a)T(u,h,v,s,o);else if(2&a&&h.class!==v.class&&l(u,"class",null,v.class,o),4&a&&l(u,"style",h.style,v.style,o),8&a){const e=n.dynamicProps;for(let t=0;t{g&&dr(g,s,n,e),d&&en(n,e,s,"updated")}),r)},R=(e,t,n,s,r,o,l)=>{for(let i=0;i{if(n!==s){if(n!==t)for(const t in n)C(t)||t in s||l(e,t,n[t],null,o,r);for(const t in s){if(C(t))continue;const i=s[t],c=n[t];i!==c&&"value"!==t&&l(e,t,c,i,o,r)}"value"in s&&l(e,"value",n.value,s.value,o)}},L=(e,t,n,s,o,l,i,u,a)=>{const f=t.el=e?e.el:c(""),p=t.anchor=e?e.anchor:c("");let{patchFlag:d,dynamicChildren:h,slotScopeIds:v}=t;v&&(u=u?u.concat(v):v),null==e?(r(f,n,s),r(p,n,s),P(t.children||[],n,p,o,l,i,u,a)):d>0&&64&d&&h&&e.dynamicChildren?(R(e.dynamicChildren,h,n,o,l,i,u),(null!=t.key||o&&t===o.subTree)&&Ss(e,t,!0)):B(e,t,n,p,o,l,i,u,a)},I=(e,t,n,s,r,o,l,i,c)=>{t.slotScopeIds=i,null==e?512&t.shapeFlag?r.ctx.activate(t,n,s,l,c):$(t,n,s,r,o,l,c):N(e,t,c)},$=(e,n,s,r,o,l,i)=>{const c=e.component=function(e,n,s){const r=e.type,o=(n?n.appContext:e.appContext)||hr,l={uid:vr++,vnode:e,type:r,parent:n,appContext:o,root:null,next:null,subTree:null,effect:null,update:null,scope:new Z(!0),render:null,proxy:null,exposed:null,exposeProxy:null,withProxy:null,provides:n?n.provides:Object.create(o.provides),accessCache:null,renderCache:[],components:null,directives:null,propsOptions:is(r,o),emitsOptions:Is(r,o),emit:null,emitted:null,propsDefaults:t,inheritAttrs:r.inheritAttrs,ctx:t,data:t,props:t,attrs:t,slots:t,refs:t,setupState:t,setupContext:null,suspense:s,suspenseId:s?s.pendingId:0,asyncDep:null,asyncResolved:!1,isMounted:!1,isUnmounted:!1,isDeactivated:!1,bc:null,c:null,bm:null,m:null,bu:null,u:null,um:null,bum:null,da:null,a:null,rtg:null,rtc:null,ec:null,sp:null};l.ctx={_:l},l.root=n?n.root:l,l.emit=Ls.bind(null,l),e.ce&&e.ce(l);return l}(e,r,o);if(sn(e)&&(c.ctx.renderer=te),function(e,t=!1,n=!1){t&&yr(t);const{props:s,children:r}=e.vnode,o=wr(e);ss(e,s,o,t),vs(e,r,n);const l=o?function(e,t){const n=e.type;e.accessCache=Object.create(null),e.proxy=new Proxy(e.ctx,Tn);const{setup:s}=n;if(s){const n=e.setupContext=s.length>1?function(e){const t=t=>{e.exposed=t||{}};return{attrs:new Proxy(e.attrs,Er),slots:e.slots,emit:e.emit,expose:t}}(e):null,r=br(e);ue();const o=Mt(s,e,0,[e.props,n]);if(ae(),r(),m(o)){if(o.then(xr,xr),t)return o.then((n=>{kr(e,n,t)})).catch((t=>{At(t,e,0)}));e.asyncDep=o}else kr(e,o,t)}else Or(e,t)}(e,t):void 0;t&&yr(!1)}(c,!1,i),c.asyncDep){if(o&&o.registerDep(c,U,i),!e.el){const e=c.subTree=or(zs);b(null,e,n,s)}}else U(c,e,n,s,o,l,i)},N=(e,t,n)=>{const s=t.component=e.component;if(function(e,t,n){const{props:s,children:r,component:o}=e,{props:l,children:i,patchFlag:c}=t,u=o.emitsOptions;if(t.dirs||t.transition)return!0;if(!(n&&c>=0))return!(!r&&!i||i&&i.$stable)||s!==l&&(s?!l||Ds(s,l,u):!!l);if(1024&c)return!0;if(16&c)return s?Ds(s,l,u):!!l;if(8&c){const e=t.dynamicProps;for(let t=0;tLt&&Tt.splice(t,1)}(s.update),s.effect.dirty=!0,s.update()}else t.el=e.el,s.vnode=t},U=(e,t,n,r,o,l,i)=>{const c=()=>{if(e.isMounted){let{next:t,bu:n,u:s,parent:r,vnode:u}=e;{const n=Cs(e);if(n)return t&&(t.el=u.el,D(e,t,i)),void n.asyncDep.then((()=>{e.isUnmounted||c()}))}let a,f=t;ws(e,!1),t?(t.el=u.el,D(e,t,i)):t=u,n&&j(n),(a=t.props&&t.props.onVnodeBeforeUpdate)&&dr(a,r,t,u),ws(e,!0);const p=$s(e),h=e.subTree;e.subTree=p,_(h,p,d(h.el),X(h),e,o,l),t.el=p.el,null===f&&function({vnode:e,parent:t},n){for(;t;){const s=t.subTree;if(s.suspense&&s.suspense.activeBranch===e&&(s.el=e.el),s!==e)break;(e=t.vnode).el=n,t=t.parent}}(e,p.el),s&&ys(s,o),(a=t.props&&t.props.onVnodeUpdated)&&ys((()=>dr(a,r,t,u)),o)}else{let s;const{el:i,props:c}=t,{bm:u,m:a,parent:f}=e,p=nn(t);if(ws(e,!1),u&&j(u),!p&&(s=c&&c.onVnodeBeforeMount)&&dr(s,f,t),ws(e,!0),i&&re){const n=()=>{e.subTree=$s(e),re(i,e.subTree,e,o,null)};p?t.type.__asyncLoader().then((()=>!e.isUnmounted&&n())):n()}else{const s=e.subTree=$s(e);_(null,s,n,r,e,o,l),t.el=s.el}if(a&&ys(a,o),!p&&(s=c&&c.onVnodeMounted)){const e=t;ys((()=>dr(s,f,e)),o)}(256&t.shapeFlag||f&&nn(f.vnode)&&256&f.vnode.shapeFlag)&&e.a&&ys(e.a,o),e.isMounted=!0,t=n=r=null}},u=e.effect=new ne(c,s,(()=>Bt(a)),e.scope),a=e.update=()=>{u.dirty&&u.run()};a.i=e,a.id=e.uid,ws(e,!0),a()},D=(e,t,n)=>{t.component=e;const s=e.vnode.props;e.vnode=t,e.next=null,function(e,t,n,s){const{props:r,attrs:o,vnode:{patchFlag:l}}=e,i=ft(r),[c]=e.propsOptions;let u=!1;if(!(s||l>0)||16&l){let s;rs(e,t,r,o)&&(u=!0);for(const o in i)t&&(a(t,o)||(s=M(o))!==o&&a(t,s))||(c?!n||void 0===n[o]&&void 0===n[s]||(r[o]=os(c,i,o,void 0,e,!0)):delete r[o]);if(o!==i)for(const e in o)t&&a(t,e)||(delete o[e],u=!0)}else if(8&l){const n=e.vnode.dynamicProps;for(let s=0;s{const u=e&&e.children,a=e?e.shapeFlag:0,f=t.children,{patchFlag:d,shapeFlag:h}=t;if(d>0){if(128&d)return void z(u,f,n,s,r,o,l,i,c);if(256&d)return void W(u,f,n,s,r,o,l,i,c)}8&h?(16&a&&Q(u,r,o),f!==u&&p(n,f)):16&a?16&h?z(u,f,n,s,r,o,l,i,c):Q(u,r,o,!0):(8&a&&p(n,""),16&h&&P(f,n,s,r,o,l,i,c))},W=(e,t,s,r,o,l,i,c,u)=>{t=t||n;const a=(e=e||n).length,f=t.length,p=Math.min(a,f);let d;for(d=0;df?Q(e,o,l,!0,!1,p):P(t,s,r,o,l,i,c,u,p)},z=(e,t,s,r,o,l,i,c,u)=>{let a=0;const f=t.length;let p=e.length-1,d=f-1;for(;a<=p&&a<=d;){const n=e[a],r=t[a]=u?ar(t[a]):ur(t[a]);if(!tr(n,r))break;_(n,r,s,null,o,l,i,c,u),a++}for(;a<=p&&a<=d;){const n=e[p],r=t[d]=u?ar(t[d]):ur(t[d]);if(!tr(n,r))break;_(n,r,s,null,o,l,i,c,u),p--,d--}if(a>p){if(a<=d){const e=d+1,n=ed)for(;a<=p;)K(e[a],o,l,!0),a++;else{const h=a,v=a,g=new Map;for(a=v;a<=d;a++){const e=t[a]=u?ar(t[a]):ur(t[a]);null!=e.key&&g.set(e.key,a)}let m,y=0;const b=d-v+1;let x=!1,w=0;const S=new Array(b);for(a=0;a=b){K(n,o,l,!0);continue}let r;if(null!=n.key)r=g.get(n.key);else for(m=v;m<=d;m++)if(0===S[m-v]&&tr(n,t[m])){r=m;break}void 0===r?K(n,o,l,!0):(S[r-v]=a+1,r>=w?w=r:x=!0,_(n,t[r],s,null,o,l,i,c,u),y++)}const C=x?function(e){const t=e.slice(),n=[0];let s,r,o,l,i;const c=e.length;for(s=0;s>1,e[n[i]]0&&(t[s]=n[o-1]),n[o]=s)}}o=n.length,l=n[o-1];for(;o-- >0;)n[o]=l,l=t[l];return n}(S):n;for(m=C.length-1,a=b-1;a>=0;a--){const e=v+a,n=t[e],p=e+1{const{el:l,type:i,transition:c,children:u,shapeFlag:a}=e;if(6&a)return void H(e.component.subTree,t,n,s);if(128&a)return void e.suspense.move(t,n,s);if(64&a)return void i.move(e,t,n,te);if(i===Bs){r(l,t,n);for(let e=0;ec.enter(l)),o);else{const{leave:e,delayLeave:s,afterLeave:o}=c,i=()=>r(l,t,n),u=()=>{e(l,(()=>{i(),o&&o()}))};s?s(l,i,u):u()}else r(l,t,n)},K=(e,t,n,s=!1,r=!1)=>{const{type:o,props:l,ref:i,children:c,dynamicChildren:u,shapeFlag:a,patchFlag:f,dirs:p,cacheIndex:d}=e;if(-2===f&&(r=!1),null!=i&&_s(i,null,n,e,!0),null!=d&&(t.renderCache[d]=void 0),256&a)return void t.ctx.deactivate(e);const h=1&a&&p,v=!nn(e);let g;if(v&&(g=l&&l.onVnodeBeforeUnmount)&&dr(g,t,e),6&a)J(e.component,n,s);else{if(128&a)return void e.suspense.unmount(n,s);h&&en(e,null,t,"beforeUnmount"),64&a?e.type.remove(e,t,n,te,s):u&&!u.hasOnce&&(o!==Bs||f>0&&64&f)?Q(u,t,n,!1,!0):(o===Bs&&384&f||!r&&16&a)&&Q(c,t,n),s&&q(e)}(v&&(g=l&&l.onVnodeUnmounted)||h)&&ys((()=>{g&&dr(g,t,e),h&&en(e,null,t,"unmounted")}),n)},q=e=>{const{type:t,el:n,anchor:s,transition:r}=e;if(t===Bs)return void G(n,s);if(t===Hs)return void S(e);const l=()=>{o(n),r&&!r.persisted&&r.afterLeave&&r.afterLeave()};if(1&e.shapeFlag&&r&&!r.persisted){const{leave:t,delayLeave:s}=r,o=()=>t(n,l);s?s(e.el,l,o):o()}else l()},G=(e,t)=>{let n;for(;e!==t;)n=h(e),o(e),e=n;o(t)},J=(e,t,n)=>{const{bum:s,scope:r,update:o,subTree:l,um:i,m:c,a:u}=e;ks(c),ks(u),s&&j(s),r.stop(),o&&(o.active=!1,K(l,e,t,n)),i&&ys(i,t),ys((()=>{e.isUnmounted=!0}),t),t&&t.pendingBranch&&!t.isUnmounted&&e.asyncDep&&!e.asyncResolved&&e.suspenseId===t.pendingId&&(t.deps--,0===t.deps&&t.resolve())},Q=(e,t,n,s=!1,r=!1,o=0)=>{for(let l=o;l{if(6&e.shapeFlag)return X(e.component.subTree);if(128&e.shapeFlag)return e.suspense.next();const t=h(e.anchor||e.el),n=t&&t[ms];return n?h(n):t};let Y=!1;const ee=(e,t,n)=>{null==e?t._vnode&&K(t._vnode,null,null,!0):_(t._vnode||null,e,t,null,null,null,n),Y||(Y=!0,zt(),Ht(),Y=!1),t._vnode=e},te={p:_,um:K,m:H,r:q,mt:$,mc:P,pc:B,pbc:R,n:X,o:e};let se,re;return{render:ee,hydrate:se,createApp:Qn(ee,se)}}(e)}function xs({type:e,props:t},n){return"svg"===n&&"foreignObject"===e||"mathml"===n&&"annotation-xml"===e&&t&&t.encoding&&t.encoding.includes("html")?void 0:n}function ws({effect:e,update:t},n){e.allowRecurse=t.allowRecurse=n}function Ss(e,t,n=!1){const s=e.children,r=t.children;if(f(s)&&f(r))for(let o=0;oZn(Os),Fs={};function Ms(e,t,n){return Ps(e,t,n)}function Ps(e,n,{immediate:r,deep:o,flush:l,once:i,onTrack:u,onTrigger:a}=t){if(n&&i){const e=n;n=(...t)=>{e(...t),O()}}const p=gr,d=e=>!0===o?e:js(e,!1===o?1:void 0);let v,g,_=!1,m=!1;if(mt(e)?(v=()=>e.value,_=ut(e)):it(e)?(v=()=>d(e),_=!0):f(e)?(m=!0,_=e.some((e=>it(e)||ut(e))),v=()=>e.map((e=>mt(e)?e.value:it(e)?d(e):h(e)?Mt(e,p,2):void 0))):v=h(e)?n?()=>Mt(e,p,2):()=>(g&&g(),Pt(e,p,3,[b])):s,n&&o){const e=v;v=()=>js(e())}let y,b=e=>{g=C.onStop=()=>{Mt(e,p,4),g=C.onStop=void 0}};if(Cr){if(b=s,n?r&&Pt(n,p,3,[v(),m?[]:void 0,b]):v(),"sync"!==l)return s;{const e=Es();y=e.__watcherHandles||(e.__watcherHandles=[])}}let x=m?new Array(e.length).fill(Fs):Fs;const w=()=>{if(C.active&&C.dirty)if(n){const e=C.run();(o||_||(m?e.some(((e,t)=>R(e,x[t]))):R(e,x)))&&(g&&g(),Pt(n,p,3,[e,x===Fs?void 0:m&&x[0]===Fs?[]:x,b]),x=e)}else C.run()};let S;w.allowRecurse=!!n,"sync"===l?S=w:"post"===l?S=()=>ys(w,p&&p.suspense):(w.pre=!0,p&&(w.id=p.uid),S=()=>Bt(w));const C=new ne(v,s,S),k=ee(),O=()=>{C.stop(),k&&c(k.effects,C)};return n?r?w():x=C.run():"post"===l?ys(C.run.bind(C),p&&p.suspense):C.run(),y&&y.push(O),O}function As(e,t,n){const s=this.proxy,r=v(e)?e.includes(".")?Rs(s,e):()=>s[e]:e.bind(s,s);let o;h(t)?o=t:(o=t.handler,n=t);const l=br(this),i=Ps(r,o.bind(s),n);return l(),i}function Rs(e,t){const n=t.split(".");return()=>{let t=e;for(let e=0;e{js(e,t,n)}));else if(w(e)){for(const s in e)js(e[s],t,n);for(const s of Object.getOwnPropertySymbols(e))Object.prototype.propertyIsEnumerable.call(e,s)&&js(e[s],t,n)}return e}const Ts=(e,t)=>"modelValue"===t||"model-value"===t?e.modelModifiers:e[`${t}Modifiers`]||e[`${E(t)}Modifiers`]||e[`${M(t)}Modifiers`];function Ls(e,n,...s){if(e.isUnmounted)return;const r=e.vnode.props||t;let o=s;const l=n.startsWith("update:"),i=l&&Ts(r,n.slice(7));let c;i&&(i.trim&&(o=s.map((e=>v(e)?e.trim():e))),i.number&&(o=s.map(L)));let u=r[c=A(n)]||r[c=A(E(n))];!u&&l&&(u=r[c=A(M(n))]),u&&Pt(u,e,6,o);const a=r[c+"Once"];if(a){if(e.emitted){if(e.emitted[c])return}else e.emitted={};e.emitted[c]=!0,Pt(a,e,6,o)}}function Is(e,t,n=!1){const s=t.emitsCache,r=s.get(e);if(void 0!==r)return r;const o=e.emits;let l={},c=!1;if(!h(e)){const s=e=>{const n=Is(e,t,!0);n&&(c=!0,i(l,n))};!n&&t.mixins.length&&t.mixins.forEach(s),e.extends&&s(e.extends),e.mixins&&e.mixins.forEach(s)}return o||c?(f(o)?o.forEach((e=>l[e]=null)):i(l,o),_(e)&&s.set(e,l),l):(_(e)&&s.set(e,null),null)}function Vs(e,t){return!(!e||!o(t))&&(t=t.slice(2).replace(/Once$/,""),a(e,t[0].toLowerCase()+t.slice(1))||a(e,M(t))||a(e,t))}function $s(e){const{type:t,vnode:n,proxy:s,withProxy:r,propsOptions:[o],slots:i,attrs:c,emit:u,render:a,renderCache:f,props:p,data:d,setupState:h,ctx:v,inheritAttrs:g}=e,_=Xt(e);let m,y;try{if(4&n.shapeFlag){const e=r||s,t=e;m=ur(a.call(t,e,f,p,h,d,v)),y=c}else{const e=t;0,m=ur(e.length>1?e(p,{attrs:c,slots:i,emit:u}):e(p,null)),y=t.props?c:Ns(c)}}catch(x){Ks.length=0,At(x,e,1),m=or(zs)}let b=m;if(y&&!1!==g){const e=Object.keys(y),{shapeFlag:t}=b;e.length&&7&t&&(o&&e.some(l)&&(y=Us(y,o)),b=lr(b,y,!1,!0))}return n.dirs&&(b=lr(b,null,!1,!0),b.dirs=b.dirs?b.dirs.concat(n.dirs):n.dirs),n.transition&&(b.transition=n.transition),m=b,Xt(_),m}const Ns=e=>{let t;for(const n in e)("class"===n||"style"===n||o(n))&&((t||(t={}))[n]=e[n]);return t},Us=(e,t)=>{const n={};for(const s in e)l(s)&&s.slice(9)in t||(n[s]=e[s]);return n};function Ds(e,t,n){const s=Object.keys(t);if(s.length!==Object.keys(e).length)return!0;for(let r=0;r0?qs||n:null,Ks.pop(),qs=Ks[Ks.length-1]||null,Js>0&&qs&&qs.push(e),e}function Zs(e,t,n,s,r,o){return Xs(rr(e,t,n,s,r,o,!0))}function Ys(e,t,n,s,r){return Xs(or(e,t,n,s,r,!0))}function er(e){return!!e&&!0===e.__v_isVNode}function tr(e,t){return e.type===t.type&&e.key===t.key}const nr=({key:e})=>null!=e?e:null,sr=({ref:e,ref_key:t,ref_for:n})=>("number"==typeof e&&(e=""+e),null!=e?v(e)||mt(e)||h(e)?{i:Jt,r:e,k:t,f:!!n}:e:null);function rr(e,t=null,n=null,s=0,r=null,o=(e===Bs?0:1),l=!1,i=!1){const c={__v_isVNode:!0,__v_skip:!0,type:e,props:t,key:t&&nr(t),ref:t&&sr(t),scopeId:Qt,slotScopeIds:null,children:n,component:null,suspense:null,ssContent:null,ssFallback:null,dirs:null,transition:null,el:null,anchor:null,target:null,targetStart:null,targetAnchor:null,staticCount:0,shapeFlag:o,patchFlag:s,dynamicProps:r,dynamicChildren:null,appContext:null,ctx:Jt};return i?(fr(c,n),128&o&&e.normalize(c)):n&&(c.shapeFlag|=v(n)?8:16),Js>0&&!l&&qs&&(c.patchFlag>0||6&o)&&32!==c.patchFlag&&qs.push(c),c}const or=function(e,t=null,n=null,s=0,r=null,o=!1){e&&e!==Sn||(e=zs);if(er(e)){const s=lr(e,t,!0);return n&&fr(s,n),Js>0&&!o&&qs&&(6&s.shapeFlag?qs[qs.indexOf(e)]=s:qs.push(s)),s.patchFlag=-2,s}l=e,h(l)&&"__vccOpts"in l&&(e=e.__vccOpts);var l;if(t){t=function(e){return e?at(e)||ns(e)?i({},e):e:null}(t);let{class:e,style:n}=t;e&&!v(e)&&(t.class=W(e)),_(n)&&(at(n)&&!f(n)&&(n=i({},n)),t.style=$(n))}const c=v(e)?1:(e=>e.__isSuspense)(e)?128:(e=>e.__isTeleport)(e)?64:_(e)?4:h(e)?2:0;return rr(e,t,n,s,r,c,o,!0)};function lr(e,t,n=!1,s=!1){const{props:r,ref:o,patchFlag:l,children:i,transition:c}=e,u=t?pr(r||{},t):r,a={__v_isVNode:!0,__v_skip:!0,type:e.type,props:u,key:u&&nr(u),ref:t&&t.ref?n&&o?f(o)?o.concat(sr(t)):[o,sr(t)]:sr(t):o,scopeId:e.scopeId,slotScopeIds:e.slotScopeIds,children:i,target:e.target,targetStart:e.targetStart,targetAnchor:e.targetAnchor,staticCount:e.staticCount,shapeFlag:e.shapeFlag,patchFlag:t&&e.type!==Bs?-1===l?16:16|l:l,dynamicProps:e.dynamicProps,dynamicChildren:e.dynamicChildren,appContext:e.appContext,dirs:e.dirs,transition:c,component:e.component,suspense:e.suspense,ssContent:e.ssContent&&lr(e.ssContent),ssFallback:e.ssFallback&&lr(e.ssFallback),el:e.el,anchor:e.anchor,ctx:e.ctx,ce:e.ce};return c&&s&&tn(a,c.clone(a)),a}function ir(e=" ",t=0){return or(Ws,null,e,t)}function cr(e="",t=!1){return t?(Gs(),Ys(zs,null,e)):or(zs,null,e)}function ur(e){return null==e||"boolean"==typeof e?or(zs):f(e)?or(Bs,null,e.slice()):"object"==typeof e?ar(e):or(Ws,null,String(e))}function ar(e){return null===e.el&&-1!==e.patchFlag||e.memo?e:lr(e)}function fr(e,t){let n=0;const{shapeFlag:s}=e;if(null==t)t=null;else if(f(t))n=16;else if("object"==typeof t){if(65&s){const n=t.default;return void(n&&(n._c&&(n._d=!1),fr(e,n()),n._c&&(n._d=!0)))}{n=32;const s=t._;s||ns(t)?3===s&&Jt&&(1===Jt.slots._?t._=1:(t._=2,e.patchFlag|=1024)):t._ctx=Jt}}else h(t)?(t={default:t,_ctx:Jt},n=32):(t=String(t),64&s?(n=16,t=[ir(t)]):n=8);e.children=t,e.shapeFlag|=n}function pr(...e){const t={};for(let n=0;ngr||Jt;let mr,yr;{const e=V(),t=(t,n)=>{let s;return(s=e[t])||(s=e[t]=[]),s.push(n),e=>{s.length>1?s.forEach((t=>t(e))):s[0](e)}};mr=t("__VUE_INSTANCE_SETTERS__",(e=>gr=e)),yr=t("__VUE_SSR_SETTERS__",(e=>Cr=e))}const br=e=>{const t=gr;return mr(e),e.scope.on(),()=>{e.scope.off(),mr(t)}},xr=()=>{gr&&gr.scope.off(),mr(null)};function wr(e){return 4&e.vnode.shapeFlag}let Sr,Cr=!1;function kr(e,t,n){h(t)?e.type.__ssrInlineRender?e.ssrRender=t:e.render=t:_(t)&&(e.setupState=St(t)),Or(e,n)}function Or(e,t,n){const r=e.type;if(!e.render){if(!t&&Sr&&!r.render){const t=r.template||Un(e).template;if(t){const{isCustomElement:n,compilerOptions:s}=e.appContext.config,{delimiters:o,compilerOptions:l}=r,c=i(i({isCustomElement:n,delimiters:o},s),l);r.render=Sr(t,c)}}e.render=r.render||s}{const t=br(e);ue();try{Vn(e)}finally{ae(),t()}}}const Er={get:(e,t)=>(be(e,0,""),e[t])};function Fr(e){return e.exposed?e.exposeProxy||(e.exposeProxy=new Proxy(St(pt(e.exposed)),{get:(t,n)=>n in t?t[n]:n in Rn?Rn[n](e):void 0,has:(e,t)=>t in e||t in Rn})):e.proxy}function Mr(e,t=!0){return h(e)?e.displayName||e.name:e.name||t&&e.__name}const Pr=(e,t)=>{const n=function(e,t,n=!1){let r,o;const l=h(e);return l?(r=e,o=s):(r=e.get,o=e.set),new vt(r,o,l||!o,n)}(e,0,Cr);return n};function Ar(e,t,n){const s=arguments.length;return 2===s?_(t)&&!f(t)?er(t)?or(e,null,[t]):or(e,t):or(e,null,t):(s>3?n=Array.prototype.slice.call(arguments,2):3===s&&er(n)&&(n=[n]),or(e,t,n))}const Rr="3.4.36",jr="undefined"!=typeof document?document:null,Tr=jr&&jr.createElement("template"),Lr={insert:(e,t,n)=>{t.insertBefore(e,n||null)},remove:e=>{const t=e.parentNode;t&&t.removeChild(e)},createElement:(e,t,n,s)=>{const r="svg"===t?jr.createElementNS("http://www.w3.org/2000/svg",e):"mathml"===t?jr.createElementNS("http://www.w3.org/1998/Math/MathML",e):n?jr.createElement(e,{is:n}):jr.createElement(e);return"select"===e&&s&&null!=s.multiple&&r.setAttribute("multiple",s.multiple),r},createText:e=>jr.createTextNode(e),createComment:e=>jr.createComment(e),setText:(e,t)=>{e.nodeValue=t},setElementText:(e,t)=>{e.textContent=t},parentNode:e=>e.parentNode,nextSibling:e=>e.nextSibling,querySelector:e=>jr.querySelector(e),setScopeId(e,t){e.setAttribute(t,"")},insertStaticContent(e,t,n,s,r,o){const l=n?n.previousSibling:t.lastChild;if(r&&(r===o||r.nextSibling))for(;t.insertBefore(r.cloneNode(!0),n),r!==o&&(r=r.nextSibling););else{Tr.innerHTML="svg"===s?`${e}`:"mathml"===s?`${e}`:e;const r=Tr.content;if("svg"===s||"mathml"===s){const e=r.firstChild;for(;e.firstChild;)r.appendChild(e.firstChild);r.removeChild(e)}t.insertBefore(r,n)}return[l?l.nextSibling:t.firstChild,n?n.previousSibling:t.lastChild]}},Ir=Symbol("_vtc"); +/** +* @vue/runtime-dom v3.4.36 +* (c) 2018-present Yuxi (Evan) You and Vue contributors +* @license MIT +**/const Vr=Symbol("_vod"),$r=Symbol("_vsh"),Nr=Symbol(""),Ur=/(^|;)\s*display\s*:/;const Dr=/\s*!important$/;function Br(e,t,n){if(f(n))n.forEach((n=>Br(e,t,n)));else if(null==n&&(n=""),t.startsWith("--"))e.setProperty(t,n);else{const s=function(e,t){const n=zr[t];if(n)return n;let s=E(t);if("filter"!==s&&s in e)return zr[t]=s;s=P(s);for(let r=0;r{if(e._vts){if(e._vts<=n.attached)return}else e._vts=Date.now();Pt(function(e,t){if(f(t)){const n=e.stopImmediatePropagation;return e.stopImmediatePropagation=()=>{n.call(e),e._stopped=!0},t.map((e=>t=>!t._stopped&&e&&e(t)))}return t}(e,n.value),t,5,[e])};return n.value=e,n.attached=Yr(),n}(s,r);qr(e,n,l,i)}else l&&(!function(e,t,n,s){e.removeEventListener(t,n,s)}(e,n,l,i),o[t]=void 0)}}const Qr=/(?:Once|Passive|Capture)$/;let Xr=0;const Zr=Promise.resolve(),Yr=()=>Xr||(Zr.then((()=>Xr=0)),Xr=Date.now());const eo=e=>111===e.charCodeAt(0)&&110===e.charCodeAt(1)&&e.charCodeAt(2)>96&&e.charCodeAt(2)<123;const to=e=>{const t=e.props["onUpdate:modelValue"]||!1;return f(t)?e=>j(t,e):t};function no(e){e.target.composing=!0}function so(e){const t=e.target;t.composing&&(t.composing=!1,t.dispatchEvent(new Event("input")))}const ro=Symbol("_assign"),oo={created(e,{modifiers:{lazy:t,trim:n,number:s}},r){e[ro]=to(r);const o=s||r.props&&"number"===r.props.type;qr(e,t?"change":"input",(t=>{if(t.target.composing)return;let s=e.value;n&&(s=s.trim()),o&&(s=L(s)),e[ro](s)})),n&&qr(e,"change",(()=>{e.value=e.value.trim()})),t||(qr(e,"compositionstart",no),qr(e,"compositionend",so),qr(e,"change",so))},mounted(e,{value:t}){e.value=null==t?"":t},beforeUpdate(e,{value:t,oldValue:n,modifiers:{lazy:s,trim:r,number:o}},l){if(e[ro]=to(l),e.composing)return;const i=null==t?"":t;if((!o&&"number"!==e.type||/^0\d/.test(e.value)?e.value:L(e.value))!==i){if(document.activeElement===e&&"range"!==e.type){if(s&&t===n)return;if(r&&e.value.trim()===i)return}e.value=i}}},lo=["ctrl","shift","alt","meta"],io={stop:e=>e.stopPropagation(),prevent:e=>e.preventDefault(),self:e=>e.target!==e.currentTarget,ctrl:e=>!e.ctrlKey,shift:e=>!e.shiftKey,alt:e=>!e.altKey,meta:e=>!e.metaKey,left:e=>"button"in e&&0!==e.button,middle:e=>"button"in e&&1!==e.button,right:e=>"button"in e&&2!==e.button,exact:(e,t)=>lo.some((n=>e[`${n}Key`]&&!t.includes(n)))},co=(e,t)=>{const n=e._withMods||(e._withMods={}),s=t.join(".");return n[s]||(n[s]=(n,...s)=>{for(let e=0;e{const n=e._withKeys||(e._withKeys={}),s=t.join(".");return n[s]||(n[s]=n=>{if(!("key"in n))return;const s=M(n.key);return t.some((e=>e===s||uo[e]===s))?e(n):void 0})},fo=i({patchProp:(e,t,n,s,r,i)=>{const c="svg"===r;"class"===t?function(e,t,n){const s=e[Ir];s&&(t=(t?[t,...s]:[...s]).join(" ")),null==t?e.removeAttribute("class"):n?e.setAttribute("class",t):e.className=t}(e,s,c):"style"===t?function(e,t,n){const s=e.style,r=v(n);let o=!1;if(n&&!r){if(t)if(v(t))for(const e of t.split(";")){const t=e.slice(0,e.indexOf(":")).trim();null==n[t]&&Br(s,t,"")}else for(const e in t)null==n[e]&&Br(s,e,"");for(const e in n)"display"===e&&(o=!0),Br(s,e,n[e])}else if(r){if(t!==n){const e=s[Nr];e&&(n+=";"+e),s.cssText=n,o=Ur.test(n)}}else t&&e.removeAttribute("style");Vr in e&&(e[Vr]=o?s.display:"",e[$r]&&(s.display="none"))}(e,n,s):o(t)?l(t)||Jr(e,t,0,s,i):("."===t[0]?(t=t.slice(1),1):"^"===t[0]?(t=t.slice(1),0):function(e,t,n,s){if(s)return"innerHTML"===t||"textContent"===t||!!(t in e&&eo(t)&&h(n));if("spellcheck"===t||"draggable"===t||"translate"===t)return!1;if("form"===t)return!1;if("list"===t&&"INPUT"===e.tagName)return!1;if("type"===t&&"TEXTAREA"===e.tagName)return!1;if("width"===t||"height"===t){const t=e.tagName;if("IMG"===t||"VIDEO"===t||"CANVAS"===t||"SOURCE"===t)return!1}if(eo(t)&&v(n))return!1;return t in e}(e,t,s,c))?(!function(e,t,n){if("innerHTML"===t||"textContent"===t){if(null==n)return;return void(e[t]=n)}const s=e.tagName;if("value"===t&&"PROGRESS"!==s&&!s.includes("-")){const r="OPTION"===s?e.getAttribute("value")||"":e.value,o=null==n?"":String(n);return r===o&&"_value"in e||(e.value=o),null==n&&e.removeAttribute(t),void(e._value=n)}let r=!1;if(""===n||null==n){const s=typeof e[t];"boolean"===s?n=H(n):null==n&&"string"===s?(n="",r=!0):"number"===s&&(n=0,r=!0)}try{e[t]=n}catch(o){}r&&e.removeAttribute(t)}(e,t,s),e.tagName.includes("-")||"value"!==t&&"checked"!==t&&"selected"!==t||Kr(e,t,s,c,0,"value"!==t)):("true-value"===t?e._trueValue=s:"false-value"===t&&(e._falseValue=s),Kr(e,t,s,c))}},Lr);let po;function ho(){return po||(po=bs(fo))}const vo=(...e)=>{ho().render(...e)},go=(...e)=>{const t=ho().createApp(...e),{mount:n}=t;return t.mount=e=>{const s=function(e){if(v(e)){return document.querySelector(e)}return e}(e);if(!s)return;const r=t._component;h(r)||r.render||r.template||(r.template=s.innerHTML),s.innerHTML="";const o=n(s,!1,function(e){if(e instanceof SVGElement)return"svg";if("function"==typeof MathMLElement&&e instanceof MathMLElement)return"mathml"}(s));return s instanceof Element&&(s.removeAttribute("v-cloak"),s.setAttribute("data-v-app","")),o},t};export{wn as A,kn as B,Yt as C,Ys as D,Zt as E,W as F,cr as G,Cn as H,Bs as I,oo as J,xt as K,Fn as L,co as M,$ as N,or as O,ao as P,vo as Q,Ar as R,go as S,ot as a,Gs as b,Zs as c,rr as d,rt as e,Y as f,_r as g,pt as h,mt as i,it as j,ee as k,te as l,pr as m,Dt as n,pn as o,Zn as p,Ct as q,yt as r,Pr as s,ft as t,Yn as u,Et as v,Ms as w,Mn as x,ir as y,q as z}; diff --git a/ComfyUI-Easy-Use/web_version/v2/easyuse.js b/ComfyUI-Easy-Use/web_version/v2/easyuse.js new file mode 100644 index 0000000000000000000000000000000000000000..3138c8ec9f2a67857eae2ac398d2fe8546fc4b7c --- /dev/null +++ b/ComfyUI-Easy-Use/web_version/v2/easyuse.js @@ -0,0 +1,2 @@ +!function(){"use strict";try{if("undefined"!=typeof document){var e=document.createElement("style");e.appendChild(document.createTextNode('@charset "UTF-8";.easyuse-model-info{color:#fff;max-width:90vw;font-family:var(--font-family)}.easyuse-model-content{display:flex;flex-direction:column;overflow:hidden}.easyuse-model-header{margin:0 0 15px}.easyuse-model-header-remark{display:flex;align-items:center;margin-top:5px}.easyuse-model-info h2{text-align:left;margin:0}.easyuse-model-info h5{text-align:left;margin:0 15px 0 0;font-weight:400;color:var(--descrip-text)}.easyuse-model-info p{margin:5px 0}.easyuse-model-info a{color:var(--theme-color-light)}.easyuse-model-info a:hover{text-decoration:underline}.easyuse-model-tags-list{display:flex;flex-wrap:wrap;list-style:none;gap:10px;max-height:200px;overflow:auto;margin:10px 0;padding:0}.easyuse-model-tag{background-color:var(--comfy-input-bg);border:2px solid var(--border-color);color:var(--input-text);display:flex;align-items:center;gap:5px;border-radius:5px;padding:2px 5px;cursor:pointer}.easyuse-model-tag--selected span:before{content:"✅";position:absolute;background-color:var(--theme-color-light);left:0;top:0;right:0;bottom:0;text-align:center}.easyuse-model-tag:hover{border:2px solid var(--theme-color-light)}.easyuse-model-tag p{margin:0}.easyuse-model-tag span{text-align:center;border-radius:5px;background-color:var(--theme-color-light);padding:2px;position:relative;min-width:20px;overflow:hidden;color:#fff}.easyuse-model-metadata .comfy-modal-content{max-width:100%}.easyuse-model-metadata label{margin-right:1ch;color:#ccc}.easyuse-model-metadata span{color:var(--theme-color-light)}.easyuse-preview{max-width:660px;margin-right:15px;position:relative}.easyuse-preview-group{position:relative;overflow:hidden;border-radius:.5rem;width:660px}.easyuse-preview-list{display:flex;flex-wrap:nowrap;width:100%;transition:all .5s ease-in-out}.easyuse-preview-list.no-transition{transition:none}.easyuse-preview-slide{display:flex;flex-basis:calc(50% - 5px);flex-grow:0;flex-shrink:0;position:relative;justify-content:center;align-items:center;padding-right:5px;padding-left:0}.easyuse-preview-slide:nth-child(2n){padding-left:5px;padding-right:0}.easyuse-preview-slide-content{position:relative;min-height:150px;width:100%}.easyuse-preview-slide-content .save{position:absolute;right:6px;z-index:12;bottom:6px;display:flex;align-items:center;height:26px;padding:0 9px;color:var(--input-text);font-size:12px;line-height:26px;background:#00000080;border-radius:13px;cursor:pointer;min-width:80px;text-align:center}.easyuse-preview-slide-content .save:hover{filter:brightness(120%);will-change:auto}.easyuse-preview-slide-content img{border-radius:14px;object-position:center center;max-width:100%;max-height:700px;border-style:none;vertical-align:middle}.easyuse-preview button{position:absolute;z-index:10;top:50%;display:flex;align-items:center;justify-content:center;width:30px;height:30px;border-radius:15px;border:1px solid rgba(66,63,78,.15);background-color:#423f4e80;color:#fffc;transition-property:color,background-color,border-color,text-decoration-color,fill,stroke;transition-timing-function:cubic-bezier(.4,0,.2,1);transition-duration:.15s;transform:translateY(-50%)}.easyuse-preview button.left{left:10px}.easyuse-preview button.right{right:10px}.easyuse-model-detail{margin-top:16px;overflow:hidden;border:1px solid var(--border-color);border-radius:8px;width:300px}.easyuse-model-detail-head{height:40px;padding:0 10px;font-weight:500;font-size:14px;font-style:normal;line-height:40px}.easyuse-model-detail-body{box-sizing:border-box;font-size:12px}.easyuse-model-detail-item{display:flex;justify-content:flex-start;border-top:1px solid var(--border-color)}.easyuse-model-detail-item-label{flex-shrink:0;width:88px;padding-top:5px;padding-bottom:5px;padding-left:10px;border-right:1px solid var(--border-color);color:var(--input-text);font-weight:400}.easyuse-model-detail-item-value{display:flex;flex-wrap:wrap;padding:5px 10px;color:var(--input-text)}.easyuse-model-detail-textarea{border-top:1px solid var(--border-color);padding:10px;height:100px;overflow-y:auto;font-size:12px}.easyuse-model-detail-textarea textarea{width:100%;height:100%;border:0;background-color:transparent;color:var(--input-text)}.easyuse-model-detail-textarea textarea::placeholder{color:var(--descrip-text)}.easyuse-model-detail-textarea.empty{display:flex;justify-content:center;align-items:center;color:var(--descrip-text)}.easyuse-model-notes{background-color:#00000040;padding:5px;margin-top:5px}.easyuse-model-notes:empty{display:none}.easyuse-account-user{font-size:10px;color:var(--descrip-text);text-align:center}.easyuse-account-user-info{display:flex;justify-content:space-between;align-items:center;padding-bottom:10px;cursor:pointer}.easyuse-account-user-info .user{display:flex;align-items:center}.easyuse-account-user-info .edit{padding:5px 10px;background:var(--comfy-menu-bg);border-radius:4px}.easyuse-account-user-info:hover{filter:brightness(110%)}.easyuse-account-user-info h5{margin:0;font-size:10px;text-align:left}.easyuse-account-user-info h6{margin:0;font-size:8px;text-align:left;font-weight:300}.easyuse-account-user-info .remark{margin-top:4px}.easyuse-account-user-info .avatar{width:36px;height:36px;background:var(--comfy-input-bg);border-radius:50%;margin-right:5px;display:flex;justify-content:center;align-items:center;font-size:16px;overflow:hidden}.easyuse-account-user-info .avatar img{width:100%;height:100%}.easyuse-account-dialog{width:600px}.easyuse-account-dialog-main a,.easyuse-account-dialog-main a:visited{font-weight:400;color:var(--theme-color-light)}.easyuse-account-dialog-item{display:flex;justify-content:flex-start;align-items:center;padding:10px 0;border-bottom:1px solid var(--border-color)}.easyuse-account-dialog-item input{padding:5px;margin-right:5px}.easyuse-account-dialog-item input.key{flex:1}.easyuse-account-dialog-item button{cursor:pointer;margin-left:5px!important;padding:5px!important;font-size:16px!important}.easyuse-account-dialog-item button:hover{filter:brightness(120%)}.easyuse-account-dialog-item button.choose{background:var(--theme-color)}.easyuse-account-dialog-item button.delete{background:var(--error-color)}.easy-dropdown,.easy-nested-dropdown{position:relative;box-sizing:border-box;background-color:#171717;box-shadow:0 4px 4px #ffffff40;padding:0;margin:0;list-style:none;z-index:1000;overflow:visible;max-height:fit-content;max-width:fit-content}.easy-dropdown{position:absolute;border-radius:0}.easy-dropdown li.item,.easy-nested-dropdown li.item{font-weight:400;min-width:max-content}.easy-dropdown li.folder,.easy-nested-dropdown li.folder{cursor:default;position:relative;border-right:3px solid cyan}.easy-dropdown li.folder:after,.easy-nested-dropdown li.folder:after{content:">";position:absolute;right:2px;font-weight:400}.easy-dropdown li,.easy-nested-dropdown li{padding:4px 10px;cursor:pointer;font-family:system-ui;font-size:.7rem;position:relative}.easy-nested-dropdown{position:absolute;top:0;left:100%;margin:0;border:none;display:none}.easy-dropdown li.selected>.easy-nested-dropdown,.easy-nested-dropdown li.selected>.easy-nested-dropdown{display:block;border:none}.easy-dropdown li.selected,.easy-nested-dropdown li.selected{background-color:#e5e5e5;border:none}:root{--theme-color:var(--primary-bg);--theme-color-light: var(--primary-hover-bg);--success-color: #52c41a;--error-color: #ff4d4f;--warning-color: #faad14;--font-family: Inter, -apple-system, BlinkMacSystemFont, Helvetica Neue, sans-serif;--p-inputtext-background: var(--p-form-field-background);--p-inputtext-disabled-background: var(--p-form-field-disabled-background);--p-inputtext-filled-background: var(--p-form-field-filled-background);--p-inputtext-filled-focus-background: var(--p-form-field-filled-focus-background);--p-inputtext-border-color: var(--p-form-field-border-color);--p-inputtext-hover-border-color: var(--p-form-field-hover-border-color);--p-inputtext-focus-border-color: var(--p-form-field-focus-border-color);--p-inputtext-invalid-border-color: var(--p-form-field-invalid-border-color);--p-inputtext-color: var(--p-form-field-color);--p-inputtext-disabled-color: var(--p-form-field-disabled-color);--p-inputtext-placeholder-color: var(--p-form-field-placeholder-color);--p-inputtext-shadow: var(--p-form-field-shadow);--p-inputtext-padding-x: var(--p-form-field-padding-x);--p-inputtext-padding-y: var(--p-form-field-padding-y);--p-inputtext-border-radius: var(--p-form-field-border-radius);--p-inputtext-focus-ring-width: var(--p-form-field-focus-ring-width);--p-inputtext-focus-ring-style: var(--p-form-field-focus-ring-style);--p-inputtext-focus-ring-color: var(--p-form-field-focus-ring-color);--p-inputtext-focus-ring-offset: var(--p-form-field-focus-ring-offset);--p-inputtext-focus-ring-shadow: var(--p-form-field-focus-ring-shadow);--p-inputtext-transition-duration: var(--p-form-field-transition-duration);--p-inputtext-sm-font-size: .875rem;--p-inputtext-sm-padding-x: .625rem;--p-inputtext-sm-padding-y: .375rem;--p-inputtext-lg-font-size: 1.125rem;--p-inputtext-lg-padding-x: .875rem;--p-inputtext-lg-padding-y: .625rem;--p-tooltip-max-width: 12.5rem;--p-tooltip-gutter: .25rem;--p-tooltip-shadow: var(--p-overlay-popover-shadow);--p-tooltip-padding: .5rem .75rem;--p-tooltip-border-radius: var(--p-overlay-popover-border-radius);--p-tooltip-background: var(--p-surface-700);--p-tooltip-color: var(--p-surface-0)}.comfyui-easyuse-theme,.comfyui-easyuse-primary{color:var(--theme-color-light)}.comfyui-easyuse-theme.point:hover,.comfyui-easyuse-primary.point:hover{opacity:.8}.comfyui-easyuse-success{color:var(--success-color)}.comfyui-easyuse-success.point:hover{opacity:.8}.comfyui-easyuse-error{color:var(--error-color)}.comfyui-easyuse-error.point:hover{opacity:.8}.comfyui-easyuse-warning,.comfyui-easyuse--warn{color:var(--warning-color)}.comfyui-easyuse-warning.point:hover,.comfyui-easyuse--warn.point:hover{opacity:.8}.grid-cols-1{grid-template-columns:repeat(1,minmax(0,1fr))}.grid-cols-12{grid-template-columns:repeat(12,minmax(0,1fr))}.grid-cols-2{grid-template-columns:repeat(2,minmax(0,1fr))}.grid-cols-3{grid-template-columns:repeat(3,minmax(0,1fr))}.flex-row{flex-direction:row}.flex-row-reverse{flex-direction:row-reverse}.flex-col{flex-direction:column}.flex-col-reverse{flex-direction:column-reverse}.flex-wrap{flex-wrap:wrap}.flex-nowrap{flex-wrap:nowrap}.items-start{align-items:flex-start}.items-end{align-items:flex-end}.items-center{align-items:center}.items-stretch{align-items:stretch}.justify-start{justify-content:flex-start}.justify-end{justify-content:flex-end}.justify-center{justify-content:center}.justify-between{justify-content:space-between}.grid{display:grid}.gap-0\\.5{gap:.125rem}.gap-1{gap:.25rem}.gap-10{gap:2.5rem}.gap-12{gap:3rem}.gap-2{gap:.5rem}.gap-2\\.5{gap:.625rem}.gap-20{gap:5rem}.gap-3{gap:.75rem}.gap-3\\.5{gap:.875rem}.gap-4{gap:1rem}.gap-5{gap:1.25rem}.gap-6{gap:1.5rem}.gap-7{gap:1.75rem}.gap-8{gap:2rem}.gap-\\[0\\.28rem\\]{gap:.28rem}.gap-x-2{-moz-column-gap:.5rem;column-gap:.5rem}.gap-x-4{-moz-column-gap:1rem;column-gap:1rem}.gap-y-1{row-gap:.25rem}.gap-y-6{row-gap:1.5rem}@media (min-width: 576px){.sm\\:col-span-4{grid-column:span 4/span 4}.sm\\:col-span-6{grid-column:span 6/span 6}.sm\\:ml-8{margin-left:2rem}.sm\\:mt-0{margin-top:0}.sm\\:flex{display:flex}.sm\\:h-60{height:15rem}.sm\\:\\!w-64{width:16rem!important}.sm\\:w-40{width:10rem}.sm\\:w-44{width:11rem}.sm\\:w-56{width:14rem}.sm\\:w-60{width:15rem}.sm\\:w-64{width:16rem}.sm\\:w-80{width:20rem}.sm\\:w-96{width:24rem}.sm\\:w-\\[30rem\\]{width:30rem}.sm\\:w-auto{width:auto}.sm\\:min-w-\\[30rem\\]{min-width:30rem}.sm\\:flex-row{flex-direction:row}.sm\\:flex-col{flex-direction:column}.sm\\:flex-nowrap{flex-wrap:nowrap}.sm\\:items-start{align-items:flex-start}.sm\\:items-end{align-items:flex-end}.sm\\:items-center{align-items:center}.sm\\:justify-center{justify-content:center}.sm\\:justify-between{justify-content:space-between}.sm\\:gap-2{gap:.5rem}.sm\\:p-20{padding:5rem}.sm\\:px-10{padding-left:2.5rem;padding-right:2.5rem}.sm\\:py-10{padding-bottom:2.5rem;padding-top:2.5rem}.sm\\:py-5{padding-bottom:1.25rem;padding-top:1.25rem}.sm\\:pt-32{padding-top:8rem}.sm\\:text-left{text-align:left}.sm\\:text-sm{font-size:.875rem;line-height:1.25rem}}@media (min-width: 768px){.md\\:-bottom-12{bottom:-3rem}.md\\:-bottom-28{bottom:-7rem}.md\\:-bottom-8{bottom:-2rem}.md\\:-bottom-\\[26rem\\]{bottom:-26rem}.md\\:-left-12{left:-3rem}.md\\:-left-28{left:-7rem}.md\\:-left-32{left:-8rem}.md\\:-left-4{left:-1rem}.md\\:-left-48{left:-12rem}.md\\:-left-\\[22rem\\]{left:-22rem}.md\\:bottom-0{bottom:0}.md\\:left-10{left:2.5rem}.md\\:left-\\[32rem\\]{left:32rem}.md\\:left-\\[42rem\\]{left:42rem}.md\\:top-1\\/2{top:50%}.md\\:top-32{top:8rem}.md\\:top-8{top:2rem}.md\\:col-span-2{grid-column:span 2/span 2}.md\\:col-span-4{grid-column:span 4/span 4}.md\\:col-span-6{grid-column:span 6/span 6}.md\\:ml-auto{margin-left:auto}.md\\:block{display:block}.md\\:flex{display:flex}.md\\:hidden{display:none}.md\\:h-\\[20rem\\]{height:20rem}.md\\:h-\\[32rem\\]{height:32rem}.md\\:\\!w-80{width:20rem!important}.md\\:w-2\\/12{width:16.666667%}.md\\:w-40{width:10rem}.md\\:w-5\\/12{width:41.666667%}.md\\:w-56{width:14rem}.md\\:w-6\\/12{width:50%}.md\\:w-60{width:15rem}.md\\:w-8\\/12{width:66.666667%}.md\\:w-80{width:20rem}.md\\:w-\\[100rem\\]{width:100rem}.md\\:w-\\[26rem\\]{width:26rem}.md\\:w-\\[30rem\\]{width:30rem}.md\\:w-\\[50rem\\]{width:50rem}.md\\:w-\\[52rem\\]{width:52rem}.md\\:w-\\[60rem\\]{width:60rem}.md\\:w-\\[95rem\\]{width:95rem}.md\\:w-screen{width:100vw}.md\\:flex-initial{flex:0 1 auto}.md\\:-translate-y-1\\/2{--tw-translate-y: -50% }.md\\:-translate-y-1\\/2,.md\\:translate-x-0{transform:translate(var(--tw-translate-x),var(--tw-translate-y)) rotate(var(--tw-rotate)) skew(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y))}.md\\:translate-x-0{--tw-translate-x: 0px }.md\\:grid-cols-2{grid-template-columns:repeat(2,minmax(0,1fr))}.md\\:flex-row{flex-direction:row}.md\\:flex-col{flex-direction:column}.md\\:items-end{align-items:flex-end}.md\\:items-center{align-items:center}.md\\:justify-start{justify-content:flex-start}.md\\:justify-center{justify-content:center}.md\\:gap-20{gap:5rem}.md\\:gap-4{gap:1rem}.md\\:p-5{padding:1.25rem}.md\\:p-8{padding:2rem}}@media (min-width: 992px){.lg\\:left-20{left:5rem}.lg\\:left-\\[36rem\\]{left:36rem}.lg\\:left-\\[50rem\\]{left:50rem}.lg\\:col-span-1{grid-column:span 1/span 1}.lg\\:col-span-2{grid-column:span 2/span 2}.lg\\:col-span-4{grid-column:span 4/span 4}.lg\\:col-span-6{grid-column:span 6/span 6}.lg\\:mb-0{margin-bottom:0}.lg\\:mt-0{margin-top:0}.lg\\:mt-20{margin-top:5rem}.lg\\:flex{display:flex}.lg\\:hidden{display:none}.lg\\:h-10{height:2.5rem}.lg\\:h-32{height:8rem}.lg\\:h-\\[28rem\\]{height:28rem}.lg\\:\\!w-\\[30rem\\]{width:30rem!important}.lg\\:w-3\\/12{width:25%}.lg\\:w-32{width:8rem}.lg\\:w-\\[28rem\\]{width:28rem}.lg\\:w-\\[64rem\\]{width:64rem}.lg\\:w-fit{width:-moz-fit-content;width:fit-content}.lg\\:max-w-6xl{max-width:72rem}.lg\\:flex-row{flex-direction:row}.lg\\:gap-0{gap:0}.lg\\:rounded-2xl{border-radius:1rem}.lg\\:rounded-3xl{border-radius:1.5rem}.lg\\:rounded-xl{border-radius:.75rem}.lg\\:p-7{padding:1.75rem}.lg\\:px-2{padding-left:.5rem;padding-right:.5rem}.lg\\:px-20{padding-left:5rem;padding-right:5rem}.lg\\:px-56{padding-left:14rem;padding-right:14rem}.lg\\:px-8{padding-left:2rem;padding-right:2rem}.lg\\:px-9{padding-left:2.25rem;padding-right:2.25rem}.lg\\:py-20{padding-bottom:5rem;padding-top:5rem}.lg\\:py-7{padding-bottom:1.75rem;padding-top:1.75rem}.lg\\:pt-0{padding-top:0}.lg\\:text-2xl{font-size:1.5rem;line-height:2rem}.lg\\:text-5xl{font-size:3rem;line-height:1}.lg\\:text-base{font-size:1rem;line-height:1.5rem}.lg\\:text-xl{font-size:1.25rem;line-height:1.75rem}}@media (min-width: 1200px){.xl\\:-left-12{left:-3rem}.xl\\:-left-28{left:-7rem}.xl\\:bottom-0{bottom:0}.xl\\:left-36{left:9rem}.xl\\:left-\\[42rem\\]{left:42rem}.xl\\:left-\\[60rem\\]{left:60rem}.xl\\:col-span-3{grid-column:span 3/span 3}.xl\\:col-span-4{grid-column:span 4/span 4}.xl\\:col-span-6{grid-column:span 6/span 6}.xl\\:block{display:block}.xl\\:flex{display:flex}.xl\\:hidden{display:none}.xl\\:h-\\[36\\.25rem\\]{height:36.25rem}.xl\\:\\!w-40{width:10rem!important}.xl\\:w-3\\/12{width:25%}.xl\\:w-6\\/12{width:50%}.xl\\:w-96{width:24rem}.xl\\:w-\\[29rem\\]{width:29rem}.xl\\:max-w-36{max-width:9rem}.xl\\:grid-cols-4{grid-template-columns:repeat(4,minmax(0,1fr))}.xl\\:flex-row{flex-direction:row}.xl\\:items-start{align-items:flex-start}.xl\\:items-center{align-items:center}.xl\\:gap-1{gap:.25rem}.xl\\:gap-6{gap:1.5rem}.xl\\:text-left{text-align:left}}.comfyui-easyuse-toast{position:fixed;z-index:99999;top:0;left:0;height:0;width:100%;display:flex;flex-direction:column;align-items:center;justify-content:start}.comfyui-easyuse-toast-container{position:relative;height:fit-content;padding:4px;margin-top:-100px;opacity:0;z-index:3;-webkit-transition:all .3s ease-in-out;-khtml-transition:all .3s ease-in-out;-moz-transition:all .3s ease-in-out;-ms-transition:all .3s ease-in-out;-o-transition:all .3s ease-in-out;transition:all .3s ease-in-out}.comfyui-easyuse-toast-container:last-child{z-index:1}.comfyui-easyuse-toast-container.show{opacity:1;margin-top:0!important;transform:translateY(0)}.comfyui-easyuse-toast-container:not(.show){z-index:1}.comfyui-easyuse-toast-container>div{position:relative;background:var(--comfy-menu-bg);color:var(--input-text);height:fit-content;box-shadow:0 0 10px #000000e0;padding:9px 12px;border-radius:var(--border-radius);font-size:14px;pointer-events:all;display:-webkit-box;display:-moz-box;display:-ms-flexbox;display:-webkit-flex;display:flex;-webkit-justify-content:center;-khtml-justify-content:center;-moz-justify-content:center;-ms-justify-content:center;-o-justify-content:center;justify-content:center;-webkit-align-items:center;-khtml-align-items:center;-moz-align-items:center;-ms-align-items:center;-o-align-items:center;align-items:center}.comfyui-easyuse-toast-container>div>span{display:-webkit-box;display:-moz-box;display:-ms-flexbox;display:-webkit-flex;display:flex;-webkit-justify-content:center;-khtml-justify-content:center;-moz-justify-content:center;-ms-justify-content:center;-o-justify-content:center;justify-content:center;-webkit-align-items:center;-khtml-align-items:center;-moz-align-items:center;-ms-align-items:center;-o-align-items:center;align-items:center}.comfyui-easyuse-toast-container>div>span i{font-size:16px;margin-right:8px}.comfyui-easyuse-toast-container>div>span i.loading{animation:loading-rotate 1s linear infinite}.comfyui-easyuse-toast-container a{cursor:pointer;text-decoration:underline;color:var(--theme-color-light);margin-left:4px;display:inline-block;line-height:1}.comfyui-easyuse-toast-container a:hover{color:var(--theme-color-light);text-decoration:none}@keyframes loading-rotate{0%{transform:rotate(0)}to{transform:rotate(360deg)}}.comfyui-easyuse-selector{position:relative}.comfyui-easyuse-selector.hide{display:none}.comfyui-easyuse-selector__header{--height:26px;display:-webkit-box;display:-moz-box;display:-ms-flexbox;display:-webkit-flex;display:flex;-webkit-justify-content:space-between;-khtml-justify-content:space-between;-moz-justify-content:space-between;-ms-justify-content:space-between;-o-justify-content:space-between;justify-content:space-between;-webkit-align-items:center;-khtml-align-items:center;-moz-align-items:center;-ms-align-items:center;-o-align-items:center;align-items:center;height:var(--height);padding-bottom:10px;border-bottom:1px solid var(--border-color-solid)}.comfyui-easyuse-selector__header_button{height:var(--height);width:var(--height);border-radius:var(--border-radius);border:1px solid var(--border-color);font-size:11px;background:var(--bg-color);box-shadow:none;cursor:pointer;display:-webkit-box;display:-moz-box;display:-ms-flexbox;display:-webkit-flex;display:flex;-webkit-justify-content:center;-khtml-justify-content:center;-moz-justify-content:center;-ms-justify-content:center;-o-justify-content:center;justify-content:center;-webkit-align-items:center;-khtml-align-items:center;-moz-align-items:center;-ms-align-items:center;-o-align-items:center;align-items:center}.comfyui-easyuse-selector__header_button:hover{filter:brightness(1.2)}.comfyui-easyuse-selector__header_button:hover i{color:var(--error-color)}.comfyui-easyuse-selector__header_button i{font-size:16px;color:var(--input-text);-webkit-transition:all .3s ease-in-out;-khtml-transition:all .3s ease-in-out;-moz-transition:all .3s ease-in-out;-ms-transition:all .3s ease-in-out;-o-transition:all .3s ease-in-out;transition:all .3s ease-in-out}.comfyui-easyuse-selector__header_search{flex:1;margin-left:10px;border-radius:var(--border-radius);border:1px solid var(--border-color);font-size:11px;background:var(--bg-color);padding:0 8px;height:var(--height);display:-webkit-box;display:-moz-box;display:-ms-flexbox;display:-webkit-flex;display:flex;-webkit-align-items:center;-khtml-align-items:center;-moz-align-items:center;-ms-align-items:center;-o-align-items:center;align-items:center}.comfyui-easyuse-selector__header_search i{font-size:16px}.comfyui-easyuse-selector__header_search .search{vertical-align:middle;margin-left:5px;outline:none;resize:none;border:none;-webkit-appearance:none;-moz-appearance:none;appearance:none;box-shadow:none;overflow-y:scroll;background:transparent;width:100%;line-height:var(--height);font-size:12px;color:var(--input-text);-webkit-box-flex:1;-ms-flex:1;-webkit-flex:1;flex:1}.comfyui-easyuse-selector__content{list-style:none;padding:0;margin:0;min-height:150px;height:calc(100% - 28px);overflow-y:auto;overflow-x:hidden}.comfyui-easyuse-selector-item{display:inline-block;position:relative}.comfyui-easyuse-selector-item__tag{display:inline-block;vertical-align:middle;margin-top:8px;margin-right:8px;padding:4px;color:var(--input-text);background-color:var(--bg-color);border-radius:var(--border-radius);border:1px solid var(--border-color);font-size:11px;cursor:pointer;overflow:hidden;position:relative}.comfyui-easyuse-selector-item__tag:hover{filter:brightness(1.2)}.comfyui-easyuse-selector-item__tag.hide{display:none}.comfyui-easyuse-selector-item__tag input{--ring-color: transparent;position:relative;box-shadow:none;border:1px solid var(--border-color);border-radius:2px;background:linear-gradient(135deg,var(--comfy-menu-bg) 0%,var(--comfy-input-bg) 60%)}.comfyui-easyuse-selector-item__tag input[type=checkbox]{display:inline-block;flex-shrink:0;vertical-align:middle;-webkit-appearance:none;-moz-appearance:none;appearance:none;border:1px solid var(--border-color);background-origin:border-box;padding:0;width:1rem;height:1rem;border-radius:4px;color:var(--theme-color-light);-webkit-user-select:none;user-select:none}.comfyui-easyuse-selector-item__tag input[type=checkbox]:checked{border:1px solid var(--theme-color-light);background-color:var(--theme-color-light);background-image:url("data:image/svg+xml,%3csvg viewBox=\'0 0 16 16\' fill=\'white\' xmlns=\'http://www.w3.org/2000/svg\'%3e%3cpath d=\'M12.207 4.793a1 1 0 010 1.414l-5 5a1 1 0 01-1.414 0l-2-2a1 1 0 011.414-1.414L6.5 9.086l4.293-4.293a1 1 0 011.414 0z\'/%3e%3c/svg%3e")}.comfyui-easyuse-selector-item__tag input span{margin:0 4px;vertical-align:middle}.comfyui-easyuse-selector-preview{position:absolute;left:-180px;top:-110px;z-index:2;border:1px solid var(--border-color);border-radius:var(--border-radius);background:var(--comfy-menu-bg);-webkit-backdrop-filter:saturate(180%) blur(40px);-khtml-backdrop-filter:saturate(180%) blur(40px);-moz-backdrop-filter:saturate(180%) blur(40px);-ms-backdrop-filter:saturate(180%) blur(40px);-o-backdrop-filter:saturate(180%) blur(40px);backdrop-filter:saturate(180%) blur(40px);overflow:hidden}.comfyui-easyuse-selector-preview img{width:150px;height:150px}.comfyui-easyuse-selector-preview__text{font-size:12px;padding:5px 10px;max-width:130px;color:var(--input-text)}.comfyui-easyuse-selector-preview__text h6{line-height:1.15;font-size:10px;margin:10px 0}.comfyui-easyuse-dialog{max-width:600px}.comfyui-easyuse-dialog-title{font-size:18px;font-weight:700;text-align:center;color:var(--input-text);margin:0}.comfyui-easyuse-dialog-images{margin-top:10px;display:flex;flex-wrap:wrap;width:100%;box-sizing:border-box}.comfyui-easyuse-dialog-images img{width:50%;height:auto;cursor:pointer;box-sizing:border-box;filter:brightness(80%)}.comfyui-easyuse-dialog-images img:hover{filter:brightness(100%)}.comfyui-easyuse-dialog-images.selected{border:4px solid var(--success-color)}.comfyui-easyuse-dialog-hidden{display:none;height:0}.comfyui-easyuse-contextmenu{--height: 26px;--padding: 8px;font-family:var(--font-family);position:fixed;top:0;left:0;width:100%;max-width:200px;min-width:100px;min-height:100px;padding:var(--padding) 0;box-shadow:0 0 10px #00000040;background-color:var(--tr-odd-bg-color);border-radius:var(--border-radius);z-index:10;will-change:transform}.comfyui-easyuse-contextmenu-item-divider{height:1px;width:100%;background-color:var(--border-color);margin:var(--padding) 0}.comfyui-easyuse-contextmenu-item-content{height:var(--height);padding:0 12px;cursor:pointer;display:-webkit-box;display:-moz-box;display:-ms-flexbox;display:-webkit-flex;display:flex;-webkit-justify-content:space-between;-khtml-justify-content:space-between;-moz-justify-content:space-between;-ms-justify-content:space-between;-o-justify-content:space-between;justify-content:space-between;-webkit-align-items:center;-khtml-align-items:center;-moz-align-items:center;-ms-align-items:center;-o-align-items:center;align-items:center}.comfyui-easyuse-contextmenu-item-content span{font-size:11px;color:var(--input-text);display:-webkit-box;-webkit-line-clamp:1;overflow:hidden;word-break:break-all;text-overflow:ellipsis;-webkit-box-orient:vertical;-webkit-box-flex:1;-ms-flex:1;-webkit-flex:1;flex:1}.comfyui-easyuse-contextmenu-item-content i{color:var(--input-text);margin-left:4px;display:block;width:0;height:0;border-width:4px 4px 0;border-style:solid;border-color:var(--descrip-text) transparent transparent;-webkit-transform:scaleY(.8) rotate(-90deg);-khtml-transform:scaleY(.8) rotate(-90deg);-moz-transform:scaleY(.8) rotate(-90deg);-ms-transform:scaleY(.8) rotate(-90deg);-o-transform:scaleY(.8) rotate(-90deg);transform:scaleY(.8) rotate(-90deg)}.comfyui-easyuse-contextmenu-item-content:hover{background:var(--theme-color)}.comfyui-easyuse-contextmenu-item.disabled .comfyui-easyuse-contextmenu-item-content span{color:var(--border-color);cursor:default}.comfyui-easyuse-contextmenu-item.disabled .comfyui-easyuse-contextmenu-item-content:hover{background:transparent}.comfyui-easyuse-contextmenu-submenu{font-family:var(--font-family);position:absolute;top:0;left:200px;max-width:200px;width:200px;min-width:100px;min-height:--height;padding:var(--padding) 0;box-shadow:0 0 10px #00000040;background-color:var(--tr-odd-bg-color);border-radius:var(--border-radius);z-index:10;will-change:transform}.comfyui-easyuse-contextmenu-model{position:relative}.comfyui-easyuse-contextmenu-model:hover img{display:block;opacity:1}.comfyui-easyuse-contextmenu-model img{position:absolute;z-index:1;right:-175px;top:-75px;width:150px;height:auto;display:none;filter:brightness(70%);-webkit-filter:brightness(70%);opacity:0;-webkit-transition:all .5s cubic-bezier(.55,0,.1,1);-khtml-transition:all .5s cubic-bezier(.55,0,.1,1);-moz-transition:all .5s cubic-bezier(.55,0,.1,1);-ms-transition:all .5s cubic-bezier(.55,0,.1,1);-o-transition:all .5s cubic-bezier(.55,0,.1,1);transition:all .5s cubic-bezier(.55,0,.1,1)}.comfyui-easyuse-slider{width:100%;height:100%;display:-webkit-box;display:-moz-box;display:-ms-flexbox;display:-webkit-flex;display:flex;-webkit-justify-content:space-between;-khtml-justify-content:space-between;-moz-justify-content:space-between;-ms-justify-content:space-between;-o-justify-content:space-between;justify-content:space-between;-webkit-align-items:center;-khtml-align-items:center;-moz-align-items:center;-ms-align-items:center;-o-align-items:center;align-items:center}.comfyui-easyuse-slider-item{height:inherit;min-width:25px;display:-webkit-box;display:-moz-box;display:-ms-flexbox;display:-webkit-flex;display:flex;-webkit-justify-content:center;-khtml-justify-content:center;-moz-justify-content:center;-ms-justify-content:center;-o-justify-content:center;justify-content:center;-webkit-align-items:center;-khtml-align-items:center;-moz-align-items:center;-ms-align-items:center;-o-align-items:center;align-items:center;-webkit-flex-direction:column;-khtml-flex-direction:column;-moz-flex-direction:column;-ms-flex-direction:column;-o-flex-direction:column;flex-direction:column}.comfyui-easyuse-slider-item-input{height:15px;font-size:10px;color:var(--input-text)}.comfyui-easyuse-slider-item-label{height:15px;border:none;color:var(--descrip-text);font-size:8px}.comfyui-easyuse-slider-item-scroll{width:5px;height:calc(100% - 30px);background:var(--comfy-input-bg);border-radius:10px;position:relative}.comfyui-easyuse-slider-item-bar{width:10px;height:10px;background:linear-gradient(to bottom,var(--input-text),var(--descrip-text));border-radius:100%;box-shadow:0 2px 10px var(--bg-color);position:absolute;top:0;left:-2.5px;cursor:pointer;z-index:1}.comfyui-easyuse-slider-item-area{width:100%;border-radius:20px;position:absolute;bottom:0;background:var(--input-text);z-index:0}.comfyui-easyuse-slider-item.positive .comfyui-easyuse-slider-item-label{color:var(--success-color)}.comfyui-easyuse-slider-item.positive .comfyui-easyuse-slider-item-area{background:var(--success-color)}.comfyui-easyuse-slider-item.negative .comfyui-easyuse-slider-item-label{color:var(--error-color)}.comfyui-easyuse-slider-item.negative .comfyui-easyuse-slider-item-area{background:var(--error-color)}.comfyui-easyuse-map{height:100%;background:var(--comfy-menu-bg)}.comfyui-easyuse-map .p-splitter-gutter-handle{height:1px!important}.comfyui-easyuse-map-nodes{height:100%;position:relative}.comfyui-easyuse-map-nodes__header{position:absolute;z-index:2;top:0;left:0;width:100%;padding:.25rem 0 .25rem 1rem;height:2.7rem;background:var(--comfy-menu-bg);border-bottom:1px solid var(--border-color);display:-webkit-box;display:-moz-box;display:-ms-flexbox;display:-webkit-flex;display:flex;-webkit-justify-content:space-between;-khtml-justify-content:space-between;-moz-justify-content:space-between;-ms-justify-content:space-between;-o-justify-content:space-between;justify-content:space-between;-webkit-align-items:center;-khtml-align-items:center;-moz-align-items:center;-ms-align-items:center;-o-align-items:center;align-items:center;-webkit-backdrop-filter:saturate(180%) blur(40px);-khtml-backdrop-filter:saturate(180%) blur(40px);-moz-backdrop-filter:saturate(180%) blur(40px);-ms-backdrop-filter:saturate(180%) blur(40px);-o-backdrop-filter:saturate(180%) blur(40px);backdrop-filter:saturate(180%) blur(40px)}.comfyui-easyuse-map-nodes__header .title{font-size:13px;color:var(--input-text);font-weight:400;line-height:1.5;-webkit-user-select:none;user-select:none}.comfyui-easyuse-map-nodes__header .toolbar{display:-webkit-box;display:-moz-box;display:-ms-flexbox;display:-webkit-flex;display:flex;-webkit-align-items:center;-khtml-align-items:center;-moz-align-items:center;-ms-align-items:center;-o-align-items:center;align-items:center}.comfyui-easyuse-map-nodes__header .toolbar .icon{font-size:.85rem;margin-left:.25rem;cursor:pointer}.comfyui-easyuse-map-nodes__content{position:relative;padding:2.7rem 0 0;height:100%;overflow:auto}.comfyui-easyuse-map-nodes__content dl .label{padding-left:1rem}.comfyui-easyuse-map-nodes__content ol,.comfyui-easyuse-map-nodes__content dl{list-style-type:none;padding:0;margin:0}.comfyui-easyuse-map-nodes__content ol .toolbar span,.comfyui-easyuse-map-nodes__content dl .toolbar span{font-size:13px}.comfyui-easyuse-map-nodes__content ol .toolbar span.pi-eye,.comfyui-easyuse-map-nodes__content dl .toolbar span.pi-eye{color:var(--input-text)}.comfyui-easyuse-map-nodes__content ol .toolbar span.pi-eye-slash,.comfyui-easyuse-map-nodes__content dl .toolbar span.pi-eye-slash{color:var(--descrip-text)}.comfyui-easyuse-map-nodes__content ol .toolbar span.pi-eye-slash.never,.comfyui-easyuse-map-nodes__content dl .toolbar span.pi-eye-slash.never{opacity:.5}.comfyui-easyuse-map-nodes__content .no_result{display:-webkit-box;display:-moz-box;display:-ms-flexbox;display:-webkit-flex;display:flex;-webkit-justify-content:center;-khtml-justify-content:center;-moz-justify-content:center;-ms-justify-content:center;-o-justify-content:center;justify-content:center;-webkit-align-items:center;-khtml-align-items:center;-moz-align-items:center;-ms-align-items:center;-o-align-items:center;align-items:center;text-align:center}.comfyui-easyuse-map-nodes-group{position:relative;overflow:hidden;width:100%;height:2rem;cursor:default;display:-webkit-box;display:-moz-box;display:-ms-flexbox;display:-webkit-flex;display:flex;-webkit-justify-content:space-between;-khtml-justify-content:space-between;-moz-justify-content:space-between;-ms-justify-content:space-between;-o-justify-content:space-between;justify-content:space-between;-webkit-align-items:center;-khtml-align-items:center;-moz-align-items:center;-ms-align-items:center;-o-align-items:center;align-items:center;pointer-events:auto}.comfyui-easyuse-map-nodes-group .left,.comfyui-easyuse-map-nodes-group .right{height:100%;display:-webkit-box;display:-moz-box;display:-ms-flexbox;display:-webkit-flex;display:flex;-webkit-align-items:center;-khtml-align-items:center;-moz-align-items:center;-ms-align-items:center;-o-align-items:center;align-items:center}.comfyui-easyuse-map-nodes-group .left{padding-left:.5rem;margin-right:.25rem;-webkit-box-flex:1;-ms-flex:1;-webkit-flex:1;flex:1}.comfyui-easyuse-map-nodes-group .icon{font-size:.85rem;margin-right:.25rem}.comfyui-easyuse-map-nodes-group .label{display:-webkit-box;display:-moz-box;display:-ms-flexbox;display:-webkit-flex;display:flex;-webkit-align-items:center;-khtml-align-items:center;-moz-align-items:center;-ms-align-items:center;-o-align-items:center;align-items:center;height:100%;width:100%;min-width:80px}.comfyui-easyuse-map-nodes-group .label span{font-size:14px;color:var(--input-text);font-weight:400;line-height:1.5;display:-webkit-box;-webkit-line-clamp:1;overflow:hidden;word-break:break-all;text-overflow:ellipsis;-webkit-box-orient:vertical;-webkit-user-select:none;-khtml-user-select:none;-moz-user-select:none;-ms-user-select:none;-o-user-select:none;user-select:none}.comfyui-easyuse-map-nodes-group:hover{background:var(--content-hover-bg)!important}.comfyui-easyuse-map-nodes-group.active{background:var(--theme-color)!important}.comfyui-easyuse-map-nodes-group.active .label{color:#fff;cursor:default}.comfyui-easyuse-map-nodes-group.never .label{color:var(--descrip-text);opacity:.4}.comfyui-easyuse-map-nodes-group.bypass .label{color:var(--descrip-text)}.comfyui-easyuse-map-nodes-node{height:2rem;cursor:default;display:-webkit-box;display:-moz-box;display:-ms-flexbox;display:-webkit-flex;display:flex;-webkit-justify-content:space-between;-khtml-justify-content:space-between;-moz-justify-content:space-between;-ms-justify-content:space-between;-o-justify-content:space-between;justify-content:space-between;-webkit-align-items:center;-khtml-align-items:center;-moz-align-items:center;-ms-align-items:center;-o-align-items:center;align-items:center;position:relative;overflow:hidden;pointer-events:auto}.comfyui-easyuse-map-nodes-node .label{text-indent:.5rem;font-size:13px;color:var(--input-text);font-weight:400;line-height:1.5;-webkit-box-flex:1;-ms-flex:1;-webkit-flex:1;flex:1;-webkit-user-select:none;-khtml-user-select:none;-moz-user-select:none;-ms-user-select:none;-o-user-select:none;user-select:none;margin-right:.25rem;height:2rem;line-height:2rem;width:100%;display:-webkit-box;-webkit-line-clamp:1;overflow:hidden;word-break:break-all;text-overflow:ellipsis;-webkit-box-orient:vertical}.comfyui-easyuse-map-nodes-node .label.error{color:var(--error-color)}.comfyui-easyuse-map-nodes-node:hover{background:var(--content-hover-bg)!important}.comfyui-easyuse-map-nodes-node.never .label{color:var(--descrip-text);opacity:.5}.comfyui-easyuse-map-nodes-node.bypass .label{color:#f0f;opacity:.5}.comfyui-easyuse-map-nodes .nodes .label{text-indent:1rem}.comfyui-easyuse-toolbar{border-radius:0 12px 12px 0;min-width:50px;height:24px;position:fixed;bottom:85px;left:0;display:flex;align-items:center;z-index:1000;background-color:var(--comfy-menu-bg);border:1px solid var(--bg-color);-webkit-backdrop-filter:saturate(180%) blur(40px);-khtml-backdrop-filter:saturate(180%) blur(40px);-moz-backdrop-filter:saturate(180%) blur(40px);-ms-backdrop-filter:saturate(180%) blur(40px);-o-backdrop-filter:saturate(180%) blur(40px);backdrop-filter:saturate(180%) blur(40px)}.comfyui-easyuse-toolbar-icon{height:100%;padding:0 4px;display:flex;justify-content:center;align-items:center;font-size:12px;color:#fff;transition:all .3s ease-in-out;cursor:pointer}.comfyui-easyuse-toolbar-icon svg{width:14px;height:14px}.comfyui-easyuse-toolbar-icon:hover.group{color:var(--warning-color)}.comfyui-easyuse-toolbar-icon:hover.rocket{color:var(--theme-color)}.comfyui-easyuse-toolbar-nodes-map{position:absolute;top:50px;left:10px;width:200px;border-radius:12px;min-height:100px;max-height:600px;color:var(--descrip-text);background-color:var(--comfy-menu-bg);border:1px solid var(--bg-color);-webkit-backdrop-filter:saturate(180%) blur(40px);-khtml-backdrop-filter:saturate(180%) blur(40px);-moz-backdrop-filter:saturate(180%) blur(40px);-ms-backdrop-filter:saturate(180%) blur(40px);-o-backdrop-filter:saturate(180%) blur(40px);backdrop-filter:saturate(180%) blur(40px);z-index:399;padding-top:0;overflow:hidden}.comfyui-easyuse-toolbar-nodes-map .no-result-placeholder-content{-webkit-transform:scale(.8);-khtml-transform:scale(.8);-moz-transform:scale(.8);-ms-transform:scale(.8);-o-transform:scale(.8);transform:scale(.8)}.comfyui-easyuse-toolbar-nodes-map .comfyui-easyuse-map-nodes{min-height:100px;max-height:600px}.comfyui-easyuse-toolbar-nodes-map .comfyui-easyuse-map-nodes__header:before{content:"…";position:absolute;left:.25rem;top:2.75rem;transform:translateY(-2rem) rotate(90deg);width:.5rem;height:.5rem;display:inline-block;overflow:hidden;line-height:5px;padding:3px 4px;cursor:move;vertical-align:middle;font-size:12px;font-family:sans-serif;letter-spacing:2px;color:var(--drag-text);z-index:3;text-shadow:1px 0 1px black}.comfyui-easyuse-toolbar-nodes-map .comfyui-easyuse-map-nodes__header .title{cursor:move;padding-left:.25rem}.comfyui-easyuse-toolbar-nodes-map .comfyui-easyuse-map-nodes__content{max-height:calc(600px - 2.7rem)}.no-result-placeholder{display:flex;justify-content:center;align-items:center;height:100%}.no-result-placeholder-content{text-align:center;display:-webkit-box;display:-moz-box;display:-ms-flexbox;display:-webkit-flex;display:flex;-webkit-flex-direction:column;-khtml-flex-direction:column;-moz-flex-direction:column;-ms-flex-direction:column;-o-flex-direction:column;flex-direction:column;-webkit-justify-content:space-between;-khtml-justify-content:space-between;-moz-justify-content:space-between;-ms-justify-content:space-between;-o-justify-content:space-between;justify-content:space-between}.no-result-placeholder .p-card{background-color:transparent!important;box-shadow:none;text-align:center}.no-result-placeholder h3{color:var(--input-text);margin-bottom:.5rem}.no-result-placeholder p{color:var(--descrip-text);margin-bottom:1rem;margin-top:0}#comfyui-easyuse-components{position:absolute;top:0;left:0;z-index:3}.comfyui-easyuse{--p-datatable-header-cell-padding: .15rem 1rem;--p-datatable-body-cell-padding: .15rem 1rem;--p-primary-color: var(--theme-color-light)!important;--border-color-solid: var(--border-color);--border-radius: 8px}.comfyui-easyuse.dark-theme{--fg-color: #fff;--bg-color: #242427;--content-bg:#18181b;--content-fg:#fff;--content-hover-bg: #27272a;--comfy-menu-bg: rgba(24,24,27,.9);--comfy-input-bg: #242427;--input-text: #ffffff;--descrip-text: #71717a;--drag-text: #ccc;--error-text: #ff4444;--border-color: #3f3f46;--border-color-solid: #2a2a2e;--tr-even-bg-color: rgba(28,28,28,.9);--tr-odd-bg-color: rgba(19,19,19,.9)}.comfyui-easyuse ::-webkit-scrollbar{width:0em}.comfyui-easyuse ::-webkit-scrollbar-track{background-color:transparent}.comfyui-easyuse ::-webkit-scrollbar-thumb{background-color:transparent;border-radius:2px}.comfyui-easyuse ::-webkit-scrollbar-thumb:hover{background-color:transparent}.comfyui-easyuse body{font-family:var(--font-family)!important;-webkit-font-smoothing:antialiased;-moz-osx-font-smoothing:grayscale}.comfyui-easyuse textarea{font-family:var(--font-family)!important}.comfyui-easyuse hr{border:1px solid var(--border-color)}.comfyui-easyuse .comfy-multiline-input{background-color:transparent;border:1px solid var(--border-color-solid);border-radius:var(--border-radius);padding:8px;font-size:12px}.comfyui-easyuse #comfy-settings-dialog{border:1px solid var(--border-color);background:transparent;-webkit-backdrop-filter:blur(8px) brightness(120%);backdrop-filter:blur(8px) brightness(120%);box-shadow:none}.comfyui-easyuse .comfy-modal{border:1px solid var(--border-color);box-shadow:none;-webkit-backdrop-filter:blur(8px) brightness(120%);backdrop-filter:blur(8px) brightness(120%)}.comfyui-easyuse .cm-title{background-color:transparent!important}.comfyui-easyuse .cm-notice-board{border-radius:10px!important;border:1px solid var(--border-color)!important}.comfyui-easyuse .cm-menu-container{margin-bottom:50px!important}.comfyui-easyuse .cn-manager-custom_milk_white .tg-column-name,.comfyui-easyuse .cmm-manager-custom_milk_white .tg-column-name{color:var(--input-text)}.comfyui-easyuse .cn-manager-custom_milk_white .tg-body-message,.comfyui-easyuse .cmm-manager-custom_milk_white .tg-body-message{text-align:center;color:var(--descrip-text)!important}.comfyui-easyuse .cn-manager-custom_milk_white .tg-body-frame .tg-cell,.comfyui-easyuse .cmm-manager-custom_milk_white .tg-body-frame .tg-cell{color:var(--input-text)}.comfyui-easyuse .cn-manager-custom_milk_white .tg-body-frame .cn-node-name a,.comfyui-easyuse .cn-manager-custom_milk_white .tg-body-frame .cmm-node-name a,.comfyui-easyuse .cmm-manager-custom_milk_white .tg-body-frame .cn-node-name a,.comfyui-easyuse .cmm-manager-custom_milk_white .tg-body-frame .cmm-node-name a{color:var(--theme-color)!important}.comfyui-easyuse .comfy-menu{border-radius:16px;box-shadow:0 0 1px var(--descrip-text);-webkit-backdrop-filter:blur(8px) brightness(120%);backdrop-filter:blur(8px) brightness(120%)}.comfyui-easyuse .comfy-menu button.comfy-settings-btn{font-size:12px}.comfyui-easyuse .comfy-menu-btns{margin-bottom:4px}.comfyui-easyuse .comfy-menu button,.comfyui-easyuse .comfy-modal button{font-size:14px;padding:4px 0;margin-bottom:4px}.comfyui-easyuse .comfy-menu-btns button,.comfyui-easyuse .comfy-list-actions button{font-size:10px}.comfyui-easyuse .comfy-menu>button,.comfyui-easyuse .comfy-menu-btns button,.comfyui-easyuse .comfy-menu .comfy-list button,.comfyui-easyuse .comfy-modal button{border-width:1px}.comfyui-easyuse #comfy-dev-save-api-button{justify-content:center}.comfyui-easyuse #queue-button{position:relative;overflow:hidden;min-height:30px;z-index:1}.comfyui-easyuse #queue-button:after{clear:both;content:attr(data-attr);background:green;color:#fff;width:var(--process-bar-width);height:100%;position:absolute;top:0;left:0;z-index:0;text-align:center;display:flex;justify-content:center;align-items:center}.comfyui-easyuse #shareButton{background:linear-gradient(to left,var(--theme-color),var(--theme-color-light))!important;color:#fff!important}.comfyui-easyuse .litegraph.litecontextmenu{--height: 24px;--padding: 6px;font-family:var(--font-family);padding:var(--padding) 0;border-radius:var(--border-radius);-webkit-backdrop-filter:blur(8px) brightness(120%);backdrop-filter:blur(8px) brightness(120%)}.comfyui-easyuse .litegraph.litecontextmenu .litemenu-title{padding:var(--padding)}.comfyui-easyuse .litegraph.litecontextmenu>div:first-child.litemenu-title{margin-top:-6px}.comfyui-easyuse .litegraph.litecontextmenu .submenu{height:var(--height);line-height:var(--height);padding:0 18px 0 12px;margin:0;background:transparent!important}.comfyui-easyuse .litegraph.litecontextmenu .submenu.has_submenu{border-right:none;position:relative}.comfyui-easyuse .litegraph.litecontextmenu .submenu.has_submenu:after{content:"";display:block;width:0;height:0;border-width:4px 4px 0;border-style:solid;border-color:var(--input-text) transparent transparent;transform:translateY(-50%) rotate(-90deg);top:50%;position:absolute}.comfyui-easyuse .litegraph.litecontextmenu .submenu.separator{height:1px;width:100%;background-color:var(--border-color)!important;margin:var(--padding) 0;border:none}.comfyui-easyuse .litegraph.litecontextmenu .submenu:last-child.separator{display:none}.comfyui-easyuse .litegraph.litecontextmenu .submenu:hover:not(.separator){background:var(--theme-color)!important}.comfyui-easyuse .litegraph.lite-search-item{background-color:var(--comfy-input-bg)!important;filter:brightness(100%)}.comfyui-easyuse .litegraph.lite-search-item:hover{filter:brightness(120%);color:var(--input-text)}.comfyui-easyuse .graphdialog{-webkit-backdrop-filter:blur(8px) brightness(120%);backdrop-filter:blur(8px) brightness(120%)}.comfyui-easyuse .graphdialog button{display:-webkit-box;display:-moz-box;display:-ms-flexbox;display:-webkit-flex;display:flex;-webkit-justify-content:center;-khtml-justify-content:center;-moz-justify-content:center;-ms-justify-content:center;-o-justify-content:center;justify-content:center;-webkit-align-items:center;-khtml-align-items:center;-moz-align-items:center;-ms-align-items:center;-o-align-items:center;align-items:center}.comfyui-easyuse .comfyui-menu{border-bottom:1px solid var(--bg-color)}.comfyui-easyuse .side-tool-bar-container{border-right:1px solid var(--bg-color)}.comfyui-easyuse .comfy-modal-content{width:100%}.comfyui-easyuse-poseEditor{display:-webkit-box;display:-moz-box;display:-ms-flexbox;display:-webkit-flex;display:flex;-webkit-justify-content:center;-khtml-justify-content:center;-moz-justify-content:center;-ms-justify-content:center;-o-justify-content:center;justify-content:center;-webkit-align-items:center;-khtml-align-items:center;-moz-align-items:center;-ms-align-items:center;-o-align-items:center;align-items:center;text-align:center;font-size:18px;line-height:1.5}#comfyui-menu-monitor{width:120px}#comfyui-menu-monitor #crystools-monitor-container{margin:0 auto!important}#comfyui-menu-monitor #crystools-monitor-container>div{margin:2px 0!important}#comfyui-menu-monitor #crystools-monitor-container>div>div>div{padding:0 4px!important}')),document.head.appendChild(e)}}catch(t){console.error("vite-plugin-css-injected-by-js",t)}}(); +var e;import{$ as s,l as t,a as l,t as a,b as o,s as n,g as i,c as r,u,N as d,j as c,d as p,e as v,f as m,h as g}from"./assets/extensions-Wh9Wq1Mi.js";import{r as y,w as h,e as f,b as A,c as w,I as S,d as x,F as M,C as E,J as b,K as C,L as _,M as k,z as B,G as I,o as N,N as H,D as Q,O as z,E as j,P as D,x as R,Q as L,R as Z,S as Y}from"./assets/vue-DjzFgvDF.js";import{d as O,s as V,a as P,b as G,c as W,T as U,e as F}from"./assets/vendor-DT1J-jWa.js";import{c as T}from"./assets/lodash-CZi7izHi.js";import{P as X}from"./assets/primevue-BSs2m5Wu.js";import"./assets/primeuix-Be3xdh47.js";const J=O("graphStore",{state:e=>({selectors:[],selectors_styles:{},seg_selectors:[],slider_controls:[]}),actions:{setSelectors(e){this.selectors=T(e)},setStyles(e,s){this.selectors_styles[e]||(this.selectors_styles[e]=s)},setSegSelectors(e){this.seg_selectors=T(e)},setSliderControls(e){this.slider_controls=T(e)}}}),q=["data-id"],K=[x("i",{class:"mdi mdi-trash-can"},null,-1)],$=x("i",{class:"mdi mdi-magnify"},null,-1),ee=["placeholder"],se=["onMouseenter","onMouseleave"],te=["onClick"],le=["name","checked"],ae=["src"],oe={key:0},ne=x("span",{class:"comfyui-easyuse-success"},"positive:",-1),ie={key:1},re=x("span",{class:"comfyui-easyuse-error"},"negative:",-1),ue="comfyui-easyuse-selector",de={__name:"stylesSelector",props:{id:{type:String|Number,default:""},type:{type:String,default:""},selectedStyles:{type:Array,default:[]},show:{type:Boolean,default:!1}},emits:["chooseStyle"],setup(e,{emit:o}){const n=e,i=J(),{selectors_styles:r}=V(i),u=y([]);h((e=>n.type),(async e=>{u.value=[],e&&await(async e=>{if(r.value[n.type])return!0;const t=await l.fetchApi(`/easyuse/prompt/styles?name=${e}`);if(200===t.status){let e=(await t.json()).map(((e,s)=>(e.index=s,e)));return await i.setStyles(n.type,e),!0}return a.error(s("Get styles list Failed")),!1})(e)&&c()}),{immediate:!0});const d=o,c=e=>{const s=n.selectedStyles,t=T(r.value[n.type]);u.value=t.sort(((e,s)=>e.index-s.index)).sort(((e,t)=>s.includes(t.name)-s.includes(e.name)))},p=y(""),v=e=>e.toLowerCase(),m=f({}),g=e=>{m.src="",m.name="",m.positive="",m.negative=""},N=async e=>{const s=await l.fetchApi(`/easyuse/prompt/styles/image?name=${e}&styles_name=${n.type}`);if(200===s.status){const t=await s.text();if(t.startsWith("http"))return t;return`/easyuse/prompt/styles/image?name=${e}&styles_name=${n.type}`}},H=e=>{e.target.src="data:image/jpeg;base64,/9j/4QAYRXhpZgAASUkqAAgAAAAAAAAAAAAAAP/sABFEdWNreQABAAQAAAA8AAD/4QNLaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wLwA8P3hwYWNrZXQgYmVnaW49Iu+7vyIgaWQ9Ilc1TTBNcENlaGlIenJlU3pOVGN6a2M5ZCI/PiA8eDp4bXBtZXRhIHhtbG5zOng9ImFkb2JlOm5zOm1ldGEvIiB4OnhtcHRrPSJBZG9iZSBYTVAgQ29yZSA5LjEtYzAwMSA3OS4xNDYyODk5Nzc3LCAyMDIzLzA2LzI1LTIzOjU3OjE0ICAgICAgICAiPiA8cmRmOlJERiB4bWxuczpyZGY9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkvMDIvMjItcmRmLXN5bnRheC1ucyMiPiA8cmRmOkRlc2NyaXB0aW9uIHJkZjphYm91dD0iIiB4bWxuczp4bXA9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC8iIHhtbG5zOnhtcE1NPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvbW0vIiB4bWxuczpzdFJlZj0iaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wL3NUeXBlL1Jlc291cmNlUmVmIyIgeG1wOkNyZWF0b3JUb29sPSJBZG9iZSBQaG90b3Nob3AgMjUuMSAoMjAyMzA5MDUubS4yMzE2IDk3OWM4NmQpICAoV2luZG93cykiIHhtcE1NOkluc3RhbmNlSUQ9InhtcC5paWQ6RjA3NEU1QzNCNUJBMTFFRUExMUVDNkZDRjI0NzlBN0QiIHhtcE1NOkRvY3VtZW50SUQ9InhtcC5kaWQ6RjA3NEU1QzRCNUJBMTFFRUExMUVDNkZDRjI0NzlBN0QiPiA8eG1wTU06RGVyaXZlZEZyb20gc3RSZWY6aW5zdGFuY2VJRD0ieG1wLmlpZDpGMDc0RTVDMUI1QkExMUVFQTExRUM2RkNGMjQ3OUE3RCIgc3RSZWY6ZG9jdW1lbnRJRD0ieG1wLmRpZDpGMDc0RTVDMkI1QkExMUVFQTExRUM2RkNGMjQ3OUE3RCIvPiA8L3JkZjpEZXNjcmlwdGlvbj4gPC9yZGY6UkRGPiA8L3g6eG1wbWV0YT4gPD94cGFja2V0IGVuZD0iciI/Pv/uAA5BZG9iZQBkwAAAAAH/2wCEAAYEBAQFBAYFBQYJBgUGCQsIBgYICwwKCgsKCgwQDAwMDAwMEAwODxAPDgwTExQUExMcGxsbHB8fHx8fHx8fHx8BBwcHDQwNGBAQGBoVERUaHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fH//AABEIAIAAgAMBEQACEQEDEQH/xACLAAEAAgMBAQEAAAAAAAAAAAAABAUCAwYBBwgBAQADAQEBAAAAAAAAAAAAAAABAgMEBQYQAAEEAgECAwUHAwUAAAAAAAEAAgMEEQUhEgYxEwdBYSIyFFFxgVJyIxWRoTOxwdFiJBEBAAICAQQBBAIDAAAAAAAAAAECEQMxIUESBBOB0SIyUXGCIwX/2gAMAwEAAhEDEQA/AP1SgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICDXJYgj+d4afsVopM8KWvEcy8it1pXdMcjXO/Lnn+im2u0cwV2VniW1UXEBAQEBAQEBAQEBAQRNlc+mgyDh7zhv+5WunX5Sw37fCHM2dh48r06ank7N6rn2Ja7qa4hw5BBwQV010uK+/DsO29v/J68SOI86Jxjl95HIP4gryPc0fHfHaXu+j7Py68zzHSVquV2iAgICAgICAgICDyTr6HdHz4PTnwypjnqic46OauNbY6mGX99p+L8w9xaeV6OufHt0eXtr59M9VFb194E9LmuH3kf6rv17avO2ets7YVcuuuk/uOa3PgBlxP4BdMbq9nLPqbJ5xDbSM9azFXpyujuSO+Bo5kcf0NPyj25We2YtEzaPxdfr6519Kz+UvqEIlELBKQZQ0eYRwC7HOPxXzVsZ6cPpK5x15ZKEiAgICAgICAgICCNc1tG40CzA2XHg4j4h9zhyFpr22p+s4Z7NNL/ALRlTX+1dVFBJOJrcTI2lxZHYcBx+sldWv3bzOMVn6fZy39OkRnNo+v3aoOx9JOxks8tqwHDPS+1IW8+IzGWZVrf9DZHSMR/j9yvo656zMz9V1rdLqdYwsoVIqwd87mNAc79Tvmd+JXJt332ftMy6temlP1jCasmggICAgICAgICAgwlmiib1SPDB7zhWrWZ4VtaI5QXb2l5ojYHvLjjIGB/dbR61sZlhPtVziFb3PYdd0luCvAZbXludVZ1huZQPgyTx4/atvWj4rxaZ6d/6Ye1/t1zSI6zx/bzti5YqaOpBeg8u41n/oa14cA4ccH7lPs1jZebVn8eyPUtOrXFbR+XdYx9xa90pjeXROaSCXDj+oysZ9S+Mx1bR7uvOJ6LGOWKVgfG8PafAtOQueazHLqraJjMMlCRAQEBAQEBAQRLNp4HTFx/2/4WtKR3Y32T2Udl8j3knk/aeSu6kREPPvaZlpY3DmyY8DyrzPZWv8tkvmFv7bg12RyR1DGeeMj2KnjE9JaeUx1hi1sgaet/U7JIOMcE8Dj7FMREcK2zPKMasr5XO6fmOVt5xEOadVplYU45IAOhxa72kLm2TFuXXqrNeF1WtlwDZeHfmHguO+vHDupszylLJsICAgICAg8cMjCQiYR5IVpFmc1Q5qLXHPgfbhbV2MLaYlqNQAYA4V/kV+PDA1fcp81fjYurtYMu4CmLZRNYhtZWBAI8CqzdaKN8df3LObtIokxwe5ZzZrFUloIGFnLWHqhIgICAgICAgxMbSpyjDAwAq3kr4MTWCnzR4MX02PGHDISNmETqieWba7QABwB4KJumKNgjaFXK0VZYChYQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEHzvuv1G7k1W9s6/Xamtaq15oaonmnsCR008HntaI4K8/s4HOeEGXZXqTud7uqtG7r6kNa5HdMU9aaw9zZde+FkrHsnr1+M2MZBPIKDRe9cO2K2mjs/V0m7X61lWzq32W+ZFEbfkSSO4B+GL9zw4QWm99TqFVmjsaSu7fUtxeNM2aTmSMBbHI9zWHqHVJlnDTxjPKCJL6sea502t1D7Ouhr0rNqxNM2CSNuwnkgjAi6ZOotdEc/Egibf1j/j+7JNL9DWdWg84TWn2ywtdFKyMZb5Tg0nLyG55x48IJ3bXqe/ea/a26dFtyTXtldDUqyOdNL5VqaDHS5gwXRxMe3xz1Y9iDKP1Sa7uefUnR7TyYqUVoEU5jY6pJZIz1RY4ZiMYd7TkexBA749Wr2gtCKlrIpGs17NjK29LLWmPmMsyiFkbIZsPEdKQu6y0eAQWdD1E2L93W1tzRyCDY3paev2NaxVlhIjidMfMb5vmse1kbi9pZ7MeKDt0BAQEBAQfEPU+lFY2++q2K1uSSezTnrReVsTTmiZVYHOd9LVuQyubIwANkbxz4FA7FsQ0NrrLNXX7N0eo1+3darGDYPjb5j6prxVRajjDetsRAjj4yM4CDre2uxO7q2hqtm7nua6w9rp5tfXgoSxwyTOMr42PlrPe4Nc8jJJQRDb3Oz1fYFrcV7As0mu3u7nbWkBZ9LSfG5nlxs/yySWRiNozwcBBx9EXadGTXz62+LG41+jZS6adhzS6vfnlkEjgzEZax7T8ePFBu3nbPdUXqJZsw6S5cqbCW1YdIY2lxhhfEGMjfHtoG9HxucwPEZy4/A7kMC87aq2Kmv7mdvxuqGmklFjUU4G2Yp21rdyW00t+kJkFl88pY9vDgwNDvEoK9np73FBcHdkrt2+rZd5FjQx7O0b8WvbzDKZhN1SSse573QdeAHkN+Ichj3p2rBvZq9vUnY2tcNQPqpZYZpJ44GxXqzHdVlzZZpib73mLHViI85c1BZ6OpsIe/6/XSuntevdsz6+8+pI0/yM1dtWVr2Z644P8rmyuj6S53jxkh9aQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBB/9k="};return(l,a)=>(A(),w("div",{class:M(ue+` ${ue}-styles`),"data-id":e.id,onMouseleave:g},[u.value.length>0&&e.show?(A(),w(S,{key:0},[x("div",{class:M(ue+"__header")},[x("div",{class:M(ue+"__header_button"),onClick:a[0]||(a[0]=e=>(d("chooseStyle",[]),void(p.value="")))},K,2),x("div",{class:M(ue+"__header_search")},[$,E(x("textarea",{class:"search","onUpdate:modelValue":a[1]||(a[1]=e=>p.value=e),dir:"ltr",rows:1,placeholder:C(s)("Type here to search styles ...")},null,8,ee),[[b,p.value]])],2)],2),x("div",{class:M(ue+"__content"),onMouseleave:c},[(A(!0),w(S,null,_(u.value,((s,l)=>(A(),w("div",{class:M(ue+"-item"),key:l,onMouseenter:e=>(async e=>{if(!e.imageSrc){if(e.imageLoading)return;e.imageLoading=!0;const s=await N(e.imgName).finally((()=>e.imageLoading=!1));e.imageSrc=s}m.name="zh-CN"==t&&e.name_cn?e.name_cn:e.name,m.positive=e.prompt,m.negative=e.negative_prompt,m.src=e.imageSrc})(s),onMouseleave:k((e=>g()),["stop"])},[x("span",{class:M([ue+"-item__tag",{hide:!(e.selectedStyles.includes(s.name)||-1!=v(s.name).indexOf(v(p.value))||s.name_cn&&-1!=v(s.name_cn).indexOf(v(p.value)))}]),onClick:e=>(e=>{let s=n.selectedStyles;s.includes(e.name)?s=s.filter((s=>s!==e.name)):s.push(e.name),d("chooseStyle",s)})(s)},[x("input",{type:"checkbox",name:s.name,checked:e.selectedStyles.includes(s.name)},null,8,le),x("span",null,B("zh-CN"==C(t)&&s.name_cn?s.name_cn:s.name),1)],10,te)],42,se)))),128))],34),(null==m?void 0:m.src)?(A(),w("div",{key:0,class:M(ue+"-preview")},[x("img",{src:m.src,ref:"image",alt:"preview",onError:H},null,40,ae),x("div",{class:M(ue+"-preview__text")},[x("b",null,B(m.name),1),x("div",{class:M(ue+"-preview__prompt")},[m.positive?(A(),w("h6",oe,[ne,x("span",null,B(m.positive),1)])):I("",!0),m.negative?(A(),w("h6",ie,[re,x("span",null,B(m.negative),1)])):I("",!0)],2)],2)],2)):I("",!0)],64)):I("",!0)],42,q))}},ce=["data-id"],pe=["onClick"],ve=["name","checked"],me="comfyui-easyuse-selector",ge={__name:"segSelector",props:{id:{type:String|Number,default:""},type:{type:String,default:""},selected:{type:Array,default:[]},show:{type:Boolean,default:!1}},emits:["select"],setup(e,{emit:t}){const l=e,a=y([]);h((e=>l.type),(async e=>{switch(e){case"selfie_multiclass_256x256":a.value=["Background","Hair","Body","Face","Clothes","Others"];break;case"human_parsing_lip":a.value=["Background","Hat","Hair","Glove","Sunglasses","Upper-clothes","Dress","Coat","Socks","Pants","Jumpsuits","Scarf","Skirt","Face","Left-arm","Right-arm","Left-leg","Right-leg","Left-shoe","Right-shoe"]}}),{immediate:!0});const o=t;return(t,n)=>{var i;return A(),w("div",{class:M(me+` ${me}-seg`),"data-id":e.id},[(null==(i=a.value)?void 0:i.length)>0&&e.show?(A(!0),w(S,{key:0},_(a.value,((t,a)=>(A(),w("div",{class:M(me+"-item"),key:a},[x("span",{class:M(me+"-item__tag"),onClick:e=>(e=>{let s=T(l.selected);s.includes(e)?s=s.filter((s=>s!==e)):s.push(e),o("select",s)})(a)},[x("input",{type:"checkbox",name:t,checked:e.selected.includes(a)},null,8,ve),x("span",null,B(C(s)(t)),1)],10,pe)],2)))),128)):I("",!0)],10,ce)}}},ye=["data-id"],he=["onMousedown","onDblclick"],fe="comfyui-easyuse-slider",Ae="ipadapter layer weights",we={__name:"sliderControl",props:{id:{type:String|Number,default:""},mode:{type:String,default:""},type:{type:String,default:""},values:{type:Array,default:[]},show:{type:Boolean,default:!1}},emits:["changeValues","showSlider"],setup(e,{emit:s}){const t=e,l=s,a=(e,s,t)=>(e-s)/(t-s)*100,o=(e,s,l=void 0)=>{if(t.mode===Ae){let t={3:2.5,6:1}[s]||0;return{default:12==e?t:0,min:-1,max:3,step:.05,value:void 0!==l?l:12==e?t:0,top:void 0!==l?100-a(l,-1,3)+"%":null,height:void 0!==l?a(l,-1,3)+"%":null}}};h((e=>t.mode),(async(e,s)=>{var a;if(e!==s&&e===Ae)if(!s&&(null==(a=t.values)?void 0:a.length)>0){const e=t.values.map((e=>{const s=e.split(":");return o(t.values.length,s[0],parseFloat(s[1]))}));await l("changeValues",e)}else{let e="sd1"==t.type?16:12,s=Array.from({length:e},((s,t)=>o(e,t)));await l("changeValues",s)}l("showSlider")}),{immediate:!0}),h((e=>t.type),((e,s)=>{if(e!=s&&t.mode==Ae){let e="sd1"==t.type?16:12,s=Array.from({length:e},((s,t)=>o(e,t)));l("changeValues",s)}}));const n=y(null),i=y(null);return N((()=>{document.onmouseup=e=>document.onmousemove=null})),(s,o)=>{var r;return A(),w("div",{class:M(fe),"data-id":e.id},[(null==(r=e.values)?void 0:r.length)>0&&e.show?(A(!0),w(S,{key:0},_(e.values,((s,o)=>(A(),w("div",{class:M([fe+"-item",{positive:3==o&&"sdxl"==e.type&&e.mode==Ae},{negative:6==o&&"sdxl"==e.type&&e.mode==Ae}]),key:o},[x("div",{class:M(fe+"-item-input")},B(s.value),3),x("div",{class:M(fe+"-item-scroll"),ref_for:!0,ref_key:"scroll",ref:n},[x("div",{class:M(fe+"-item-bar"),ref_for:!0,ref_key:"bar",ref:i,style:H({top:s.top||100-a(s.default,s.min,s.max)+"%"}),onMousedown:e=>((e,s,a)=>{let o=e||window.event,r=n.value[a],u=i.value[a],d=T(t.values),c=o.clientY-u.offsetTop;document.onmousemove=e=>{let t=(e||window.event).clientY-c;t<0?t=0:t>r.offsetHeight-u.offsetHeight&&(t=r.offsetHeight-u.offsetHeight);let o=(s.max-s.min)/s.step,n=(r.offsetHeight-u.offsetHeight)/o;t=Math.round(t/n)*n;const i=Math.floor(t/(r.offsetHeight-u.offsetHeight)*100)+"%",p=Math.floor((r.offsetHeight-u.offsetHeight-t)/(r.offsetHeight-u.offsetHeight)*100)+"%",v=parseFloat(parseFloat(s.max-(s.max-s.min)*(t/(r.offsetHeight-u.offsetHeight))).toFixed(2));d[a]={...d[a],top:i,height:p,value:v},l("changeValues",d),window.getSelection?window.getSelection().removeAllRanges():document.selection.empty()}})(e,s,o),onDblclick:e=>((e,s,a)=>{let o=T(t.values);o[a]={...o[a],top:null,height:null,value:s.default},l("changeValues",o)})(0,s,o)},null,46,he),x("div",{class:M(fe+"-item-area"),style:H({height:s.height||a(s.default,s.min,s.max)+"%"})},null,6)],2),x("div",{class:M(fe+"-item-label")},[x("span",null,B(s.label),1)],2)],2)))),128)):I("",!0)],8,ye)}}},Se={__name:"index",setup(e){const s=J(),{selectors:t,seg_selectors:l,slider_controls:a}=V(s),u=y({}),d=async e=>{var l,a,o,r,d,c,p;await n(1);const v=i(e,"styles"),m=(null==(l=e.properties.values)?void 0:l.length)>0?e.properties.values:[];let g=T(t.value);g.push({id:e.id,type:v.value,value:m,show:!1});const y=g.length-1;await s.setSelectors(g),(null==(a=e.flags)?void 0:a.collapsed)&&e.collapse();let h=null==(d=null==(r=null==(o=u.value[e.id])?void 0:o._)?void 0:r.vnode)?void 0:d.el;if(!h)return;let f=e.addDOMWidget("select_styles","btn",h);e.properties.values||e.setProperty("values",[]),g[y].show=!0,await s.setSelectors(g);let A=v.value;Object.defineProperty(v,"value",{set:t=>{A=t,g[y].type=t,e.properties.values=[],g[y].value=[],s.setSelectors(g)},get:e=>A}),Object.defineProperty(f,"value",{set:e=>{setTimeout((t=>{g[y].value=e.split(","),s.setSelectors(g)}),150)},get:s=>{var l,a;return e.properties.values=(null==(a=null==(l=t.value)?void 0:l[y])?void 0:a.value)||[],e.properties.values.join(",")}}),((null==(c=e.size)?void 0:c[0])<150||(null==(p=e.size)?void 0:p[1])<150)&&e.setSize([425,500]);const w=e.onRemoved;e.onRemoved=function(){if(w&&(null==w||w.apply(this,arguments)),void 0!==t.value.findIndex((s=>s.id==e.id))){let e=T(t.value);e.splice(y,1),s.setSelectors(e)}return w}},c=y({}),p=async e=>{var t,a,o,u,d;await n(1);const p=i(e,"method"),v=(null==(t=e.properties.values)?void 0:t.length)>0?e.properties.values:[];let m=T(l.value);m.push({id:e.id,type:p.value,value:v,show:!1});const g=m.length-1;await s.setSegSelectors(m),(null==(a=e.flags)?void 0:a.collapsed)&&e.collapse();let y=null==(d=null==(u=null==(o=c.value[e.id])?void 0:o._)?void 0:u.vnode)?void 0:d.el;if(!y)return;let h=e.addDOMWidget("mask_components","btn",y);e.properties.values||e.setProperty("values",[]),m[g].show=!0,await s.setSegSelectors(m);let f=p.value;Object.defineProperty(p,"value",{set:t=>{f=t,m[g].type=t,e.properties.values=[],m[g].value=[],r(e,i(e,"confidence"),"selfie_multiclass_256x256"===f),e.setSize([300,"selfie_multiclass_256x256"===f?260:500]),s.setSegSelectors(m)},get:e=>f}),Object.defineProperty(h,"value",{set:e=>{setTimeout((t=>{m[g].value=e.split(","),s.setSegSelectors(m)}),150)},get:s=>{var t;return e.properties.values=(null==(t=l.value)?void 0:t[g].value)||[],e.properties.values.join(",")}}),r(e,i(e,"confidence"),"selfie_multiclass_256x256"===f),e.setSize([300,"selfie_multiclass_256x256"===f?260:500]);const A=e.onRemoved;e.onRemoved=function(){if(A&&(null==A||A.apply(this,arguments)),void 0!==l.value.findIndex((s=>s.id==e.id))){let e=T(l.value);e.splice(g,1),s.setSegSelectors(e)}return A}},v=y({}),m=async e=>{var t,l,o,r,u;await n(1);const d=i(e,"mode"),c=i(e,"model_type"),p=(null==(t=e.properties.values)?void 0:t.length)>0?e.properties.values:[];(null==(l=e.flags)?void 0:l.collapsed)&&e.collapse();let m=T(a.value);m.push({id:e.id,type:c.value,mode:d.value,value:p,show:!1});const g=m.length-1;await s.setSliderControls(m);let y=null==(u=null==(r=null==(o=v.value[e.id])?void 0:o._)?void 0:r.vnode)?void 0:u.el;if(!y)return;let h=e.addDOMWidget("values","btn",y);e.properties.values||e.setProperty("values",[]),Object.defineProperty(h,"value",{set:function(){},get:s=>{var t;const l=(null==(t=a.value)?void 0:t[g].value)||[];return e.properties.values=l.map(((e,s)=>`${s}:${e.value}`)),e.properties.values.join(",")}}),e.setSize("sdxl"==c.value?[375,320]:[455,320]),c.callback=t=>{m=T(a.value),m[g].type!=t&&(e.setSize("sdxl"==t?[375,320]:[455,320]),m[g].value=[],m[g].type=t,s.setSliderControls(m))};const f=e.onRemoved;e.onRemoved=function(){if(f&&(null==f||f.apply(this,arguments)),void 0!==a.value.findIndex((s=>s.id==e.id))){let e=T(a.value);e.splice(g,1),s.setSliderControls(e)}return f}};return N((e=>{o.registerExtension({name:"Comfy.EasyUse.Components",async beforeRegisterNodeDef(e,s){const t=e.prototype.onNodeCreated;"easy stylesSelector"==s.name&&(e.prototype.onNodeCreated=async function(){return t&&(null==t||t.apply(this,arguments)),await d(this),t}),"easy humanSegmentation"==s.name&&(e.prototype.onNodeCreated=async function(){return t&&(null==t||t.apply(this,arguments)),await p(this),t}),"easy sliderControl"==s.name&&(e.prototype.onNodeCreated=async function(){return t&&(null==t||t.apply(this,arguments)),await m(this),t}),"easy poseEditor"==s.name&&(e.prototype.onNodeCreated=async function(){t&&(null==t||t.apply(this,arguments));const e=document.createElement("div");return e.className="comfyui-easyuse-poseEditor",e.innerHTML='
This node is about to be removed, you can use ComfyUI_Custom_Nodes_AlekPet to replace it.
',this.addDOMWidget("editor","btn",e),t})}})})),(e,o)=>(A(),w(S,null,[(A(!0),w(S,null,_(C(t),((e,l)=>(A(),Q(de,{ref_for:!0,ref:s=>{s&&(u.value[e.id]=s)},type:e.type,key:l,id:e.id,show:e.show,selectedStyles:e.value,onChooseStyle:e=>((e,l)=>{let a=T(t.value);a[l].value=e,s.setSelectors(a)})(e,l)},null,8,["type","id","show","selectedStyles","onChooseStyle"])))),128)),(A(!0),w(S,null,_(C(l),((e,t)=>(A(),Q(ge,{ref_for:!0,ref:s=>{s&&(c.value[e.id]=s)},type:e.type,key:t,id:e.id,show:e.show,selected:e.value,onSelect:e=>((e,t)=>{let a=T(l.value);a[t].value=e,s.setSegSelectors(a)})(e,t)},null,8,["type","id","show","selected","onSelect"])))),128)),(A(!0),w(S,null,_(C(a),((e,t)=>(A(),Q(we,{ref_for:!0,ref:s=>{s&&(v.value[e.id]=s)},type:e.type,key:t,id:e.id,show:e.show,mode:e.mode,values:e.value,onChangeValues:e=>((e,t)=>{let l=T(a.value);l[t].value=e,s.setSliderControls(l)})(e,t),onShowSlider:e=>(e=>{let t=T(a.value);t[e].show=!0,s.setSliderControls(t)})(t)},null,8,["type","id","show","mode","values","onChangeValues","onShowSlider"])))),128))],64))}},xe={class:"no-result-placeholder"},Me={class:"no-result-placeholder-content"},Ee={key:0},be={__name:"noResultsPlaceholder",props:{icon:{type:String,default:"",required:!1},iconSize:{type:String,default:"3rem",required:!1},title:{type:String,required:!0},message:{type:String,required:!1},buttonLabel:{type:String,default:"",required:!1}},emits:["action"],setup:e=>(s,t)=>(A(),w("div",xe,[z(C(G),null,{content:j((()=>[x("div",Me,[x("i",{class:M(e.icon),style:H({"font-size":e.iconSize,"margin-bottom":".5rem"})},null,6),x("h3",null,B(e.title),1),e.message?(A(),w("p",Ee,B(e.message),1)):I("",!0),e.buttonLabel?(A(),Q(C(P),{key:1,label:e.buttonLabel,onClick:t[0]||(t[0]=e=>s.$emit("action")),class:"p-button-text"},null,8,["label"])):I("",!0)])])),_:1})]))},Ce={class:"left flex-1"},_e={key:1,class:"edit"},ke={key:2,class:"label"},Be={class:"right toolbar"},Ie={key:0,class:"nodes"},Ne={__name:"group",props:{item:{type:Object,default:{}}},emits:["mousedown","mouseup","changeMode"],setup(e){const s=e,t=u(),l=y(!1),a=y(null),n=y(""),i=e=>{var l,a;let n=s.item;if(!(null==(l=n.info)?void 0:l.is_edit)&&(null==(a=n.children)?void 0:a.length)>0){let e=o.canvas.graph._groups.find((e=>e.pos[0]==n.info.pos[0]&&e.pos[1]==n.info.pos[1]));e&&(e.show_nodes=!e.show_nodes,t.setGroups(o.canvas.graph._groups))}},r=async()=>{let e=s.item,a=o.canvas.graph._groups.find((s=>s.pos[0]==e.info.pos[0]&&s.pos[1]==e.info.pos[1]));a?(a.is_edit=!1,a.title=n.value,await t.setGroups(o.canvas.graph._groups),l.value=!1):l.value=!1};return(u,c)=>{var p,v,m;return A(),w(S,null,[x("div",{class:M("comfyui-easyuse-map-nodes-group"),onClick:i},[x("div",Ce,[e.item.children?(A(),w("i",{key:0,class:M(["icon",e.item.info.show_nodes?"pi pi-folder-open":"pi pi-folder"]),style:H({color:e.item.info.color})},null,6)):I("",!0),(null==(p=e.item.info)?void 0:p.is_edit)?(A(),w("div",_e,[z(C(W),{ref_key:"modifyRef",ref:a,modelValue:n.value,"onUpdate:modelValue":c[0]||(c[0]=e=>n.value=e),variant:"outline",size:"small",type:"text",onBlur:r,onKeydown:[D(r,["enter"]),D(r,["esc"])],style:{width:"100%"}},null,8,["modelValue"])])):(A(),w("div",ke,[x("span",{onDblclick:c[1]||(c[1]=k((i=>(async()=>{var e,i;if(l.value)return;let r=s.item,u=o.canvas.graph._groups.find((e=>e.pos[0]==r.info.pos[0]&&e.pos[1]==r.info.pos[1]));u&&(u.is_edit=!u.is_edit,n.value=u.is_edit?r.info.title:"",await t.setGroups(o.canvas.graph._groups),l.value=!0,null==(i=null==(e=a.value)?void 0:e[0])||i.$el.focus())})(e.item)),["stop"]))},B(e.item.info.title),33)]))]),x("div",Be,[(null==(v=e.item.children)?void 0:v.length)>0?(A(),Q(C(P),{key:0,size:"small",icon:e.item.children.find((e=>e.mode==C(d).ALWAYS))?"pi pi-eye":"pi pi-eye-slash",text:"",rounded:"",severity:"secondary",onClick:c[2]||(c[2]=k((e=>u.$emit("changeMode")),["stop"])),onMousedown:c[3]||(c[3]=k((e=>u.$emit("mousedown")),["stop"])),onMouseup:c[4]||(c[4]=k((e=>u.$emit("mouseup")),["stop"]))},null,8,["icon"])):I("",!0)])]),(null==(m=e.item.children)?void 0:m.length)>0&&e.item.info.show_nodes?(A(),w("div",Ie,[R(u.$slots,"default")])):I("",!0)],64)}}},He={key:1,class:"label error"},Qe={class:"right toolbar"},ze={__name:"node",props:{node:{type:Object,default:{}}},emits:["mousedown","mouseup","changeMode"],setup:e=>(s,t)=>(A(),w("div",{draggable:!1,class:M(["comfyui-easyuse-map-nodes-node",{never:void 0!==e.node.mode&&e.node.mode==C(d).NEVER},{bypass:void 0!==e.node.mode&&e.node.mode==C(d).BYPASS}])},[void 0!==e.node.title?(A(),w("span",{key:0,class:"label",onDblclick:t[0]||(t[0]=k((s=>C(c)(e.node.id)),["stop"]))},B(e.node.title),33)):(A(),w("span",He,B(e.node.type),1)),x("div",Qe,[z(C(P),{size:"small",icon:e.node.mode==C(d).ALWAYS?"pi pi-eye":"pi pi-eye-slash",text:"",rounded:"",severity:"secondary",onClick:t[1]||(t[1]=k((e=>s.$emit("changeMode")),["stop"])),onMousedown:t[2]||(t[2]=k((e=>s.$emit("mousedown")),["stop"])),onMouseup:t[3]||(t[3]=k((e=>s.$emit("mouseup")),["stop"]))},null,8,["icon"])])],2))},je={class:"title"},De={class:"toolbar"},Re={key:0},Le=["onDragstart","onDragend","onDragover"],Ze={key:1,class:"no_result",style:{height:"100%"}},Ye="comfyui-easyuse-map-nodes",Oe={__name:"nodesMap",emits:["handleHeader"],setup(e){const t=u(),{groups_nodes:l,groups:n}=V(t),i=y(!1),r=e=>{i.value=!i.value,o.canvas.graph._groups.forEach((e=>{e.show_nodes=i.value})),t.setGroups(o.canvas.graph._groups)};let c,v=0,m=0,g=!1;const h=(e,s=!1)=>{if(g)return void(g=!1);const l=e.children.find((e=>e.mode==d.ALWAYS)),a=e.children.map((e=>e.id));o.canvas.graph._nodes.forEach((e=>{a.includes(e.id)&&(e.mode=l?s?d.NEVER:d.BYPASS:d.ALWAYS,e.graph.change())})),t.setNodes(o.canvas.graph._nodes)},f=(e,s=!1)=>{if(g)return void(g=!1);const l=e.mode==d.ALWAYS,a=o.canvas.graph._nodes.find((s=>s.id==e.id));a&&(a.mode=l?s?d.NEVER:d.BYPASS:d.ALWAYS,a.graph.change(),t.setNodes(o.canvas.graph._nodes))},b=(e,s="group")=>{v=(new Date).getTime(),clearTimeout(c),c=setTimeout((t=>{"group"==s?h(e,!0):f(e,!0)}),500)},N=e=>{m=(new Date).getTime(),m-v>500&&(g=!0),clearTimeout(c)};let H=y(null),D=y(null);y(!1);return(e,u)=>{var d,c;return A(),w("div",{class:M(Ye)},[x("div",{class:M(Ye+"__header"),onMousedown:u[0]||(u[0]=s=>e.$emit("handleHeader",s))},[x("div",je,B(C(s)("Nodes Map",!0)),1),x("div",De,[(null==(d=C(n))?void 0:d.length)>0?E((A(),Q(C(P),{key:0,icon:i.value?"pi pi-angle-double-down":"pi pi-angle-double-up",text:"",rounded:"",severity:"secondary",onClick:k(r,["stop"]),size:"small"},null,8,["icon"])),[[C(U),i.value?C(s)("Collapse All"):C(s)("Expand All"),void 0,{top:!0}]]):I("",!0),R(e.$slots,"icon")])],34),x("div",{class:M(Ye+"__content")},[(null==(c=C(l))?void 0:c.length)>0?(A(),w("ol",Re,[(A(!0),w(S,null,_(C(l),((e,l)=>(A(),w("li",{key:l,onDragstart:e=>((e,s)=>{H.value=s,e.currentTarget.style.opacity="0.6",e.currentTarget.style.border="1px dashed yellow",e.dataTransfer.effectAllowed="move"})(e,l),onDragend:e=>(e=>{if(e.target.style.opacity="1",e.currentTarget.style.border="1px dashed transparent","Manual drag&drop sorting"!==p("EasyUse.NodesMap.Sorting"))return void a.warn(s("For drag and drop sorting, please find Nodes map sorting mode in Settings->EasyUse and change it to manual"));let l=o.canvas.graph._groups,n=l[H.value],i=l[D.value];o.canvas.graph._groups[H.value]=i,o.canvas.graph._groups[D.value]=n,t.setGroups(o.canvas.graph._groups)})(e),onDragover:e=>((e,s)=>{e.preventDefault(),e.currentIndex!=H.value&&(D.value=s)})(e,l),draggable:!0},[void 0!==e.children?(A(),Q(Ne,{key:0,item:e,onChangeMode:s=>h(e),onMousedown:s=>b(e,"group"),onMouseup:N},{default:j((()=>[(A(!0),w(S,null,_(e.children,((e,s)=>(A(),Q(ze,{key:s,node:e,onChangeMode:s=>f(e),onMousedown:s=>b(e,"node"),onMouseup:N},null,8,["node","onChangeMode","onMousedown"])))),128))])),_:2},1032,["item","onChangeMode","onMousedown"])):(A(),Q(ze,{key:1,node:e.info,onChangeMode:s=>f(e.info),onMousedown:s=>b(e.info,"node"),onMouseup:N},null,8,["node","onChangeMode","onMousedown"]))],40,Le)))),128))])):(A(),w("div",Ze,[z(be,{icon:"pi pi-sitemap",title:C(s)("No Nodes",!0),message:C(s)("No nodes found in the map",!0)},null,8,["title","message"])]))],2)])}}},Ve=[x("svg",{class:"icon",t:"1714565543756",viewBox:"0 0 1024 1024",version:"1.1",xmlns:"http://www.w3.org/2000/svg","p-id":"22538",width:"200",height:"200"},[x("path",{d:"M871.616 64H152.384c-31.488 0-60.416 25.28-60.416 58.24v779.52c0 32.896 26.24 58.24 60.352 58.24h719.232c34.112 0 60.352-25.344 60.352-58.24V122.24c0.128-32.96-28.8-58.24-60.288-58.24zM286.272 512c-23.616 0-44.672-20.224-44.672-43.008 0-22.784 20.992-43.008 44.608-43.008 23.616 0 44.608 20.224 44.608 43.008A43.328 43.328 0 0 1 286.272 512z m0-202.496c-23.616 0-44.608-20.224-44.608-43.008 0-22.784 20.992-43.008 44.608-43.008 23.616 0 44.608 20.224 44.608 43.008a43.456 43.456 0 0 1-44.608 43.008zM737.728 512H435.904c-23.68 0-44.672-20.224-44.672-43.008 0-22.784 20.992-43.008 44.608-43.008h299.264c23.616 0 44.608 20.224 44.608 43.008a42.752 42.752 0 0 1-41.984 43.008z m0-202.496H435.904c-23.616 0-44.608-20.224-44.608-43.008 0-22.784 20.992-43.008 44.608-43.008h299.264c23.616 0 44.608 20.224 44.608 43.008a42.88 42.88 0 0 1-42.048 43.008z","p-id":"22539",fill:"currentColor"})],-1)],Pe=[x("svg",{class:"icon",t:"1714565020764",viewBox:"0 0 1024 1024",version:"1.1",xmlns:"http://www.w3.org/2000/svg","p-id":"7999",width:"200",height:"200"},[x("path",{d:"M810.438503 379.664884l-71.187166-12.777183C737.426025 180.705882 542.117647 14.602496 532.991087 7.301248c-12.777184-10.951872-32.855615-10.951872-47.45811 0-9.12656 7.301248-204.434938 175.229947-206.26025 359.586453l-67.536542 10.951871c-18.253119 3.650624-31.030303 18.253119-31.030303 36.506239v189.832442c0 10.951872 5.475936 21.903743 12.777184 27.379679 7.301248 5.475936 14.602496 9.12656 23.729055 9.12656h5.475936l133.247772-23.729055c40.156863 47.458111 91.265597 73.012478 151.500891 73.012477 60.235294 0 111.344029-27.379679 151.500891-74.837789l136.898396 23.729055h5.475936c9.12656 0 16.427807-3.650624 23.729055-9.12656 9.12656-7.301248 12.777184-16.427807 12.777184-27.379679V412.520499c1.825312-14.602496-10.951872-29.204991-27.379679-32.855615zM620.606061 766.631016H401.568627c-20.078431 0-36.506239 16.427807-36.506238 36.506239v109.518716c0 14.602496 9.12656 29.204991 23.729055 34.680927 14.602496 5.475936 31.030303 1.825312 40.156863-9.126559l16.427807-18.25312 32.855615 80.313726c5.475936 14.602496 18.253119 23.729055 34.680927 23.729055 16.427807 0 27.379679-9.12656 34.680927-23.729055l32.855615-80.313726 16.427807 18.25312c10.951872 10.951872 25.554367 14.602496 40.156863 9.126559 14.602496-5.475936 23.729055-18.253119 23.729055-34.680927v-109.518716c-3.650624-20.078431-20.078431-36.506239-40.156862-36.506239z",fill:"currentColor","p-id":"8000"})],-1)],Ge="comfyui-easyuse-toolbar",We={__name:"index",setup(e){const t=u(),l=y(!1);h((e=>l.value),(e=>{e?t.watchGraph(!0):t.unwatchGraph()}));const a=y(null),o=e=>{const s=a.value;var t=e.clientX||0,l=e.clientY||0,o=s.offsetLeft,n=s.offsetTop;function i(e){var a=e.clientX,i=e.clientY,r=a-t,u=i-l;s.style.left=o+r+"px",s.style.top=n+u+"px"}document.addEventListener("mousemove",i),document.addEventListener("mouseup",(function e(){document.removeEventListener("mousemove",i),document.removeEventListener("mouseup",e)}))};return(e,t)=>(A(),w(S,null,[x("div",{class:M(["flex-c",Ge])},[x("div",{class:M(["group flex-c",Ge+"-icon"]),onClick:t[0]||(t[0]=e=>l.value=!l.value)},Ve,2),x("div",{class:M(["rocket flex-c",Ge+"-icon"]),onClick:t[1]||(t[1]=(...e)=>C(v)&&C(v)(...e))},Pe,2)]),l.value?(A(),w("div",{key:0,ref_key:"nodesMapRef",ref:a,class:M(Ge+"-nodes-map")},[z(Oe,{onHandleHeader:o},{icon:j((()=>[E(z(C(P),{icon:"pi pi-times",text:"",rounded:"",severity:"secondary",onClick:t[2]||(t[2]=e=>l.value=!1),size:"small"},null,512),[[C(U),C(s)("Close"),void 0,{top:!0}]])])),_:1})],2)):I("",!0)],64))}},Ue={__name:"index",setup(e){const s=u();return N((e=>{s.watchGraph()})),(e,s)=>(A(),w("div",{class:M("comfyui-easyuse-map")},[z(Oe)]))}},Fe="Comfy.UseNewMenu",Te={__name:"App",setup(e){const t=y(p(Fe));return N((e=>{try{o.extensionManager.registerSidebarTab({id:m,icon:"pi pi-sitemap",title:s("Nodes Map",!0),tooltip:s("Nodes Map",!0),type:"custom",render:e=>{e.style.height="100%",L(Z(Ue,{}),e)}}),g(Fe,(e=>{t.value=e}))}catch(l){}})),(e,s)=>(A(),w(S,null,[z(Se),"Disabled"==t.value?(A(),Q(We,{key:0})):I("",!0)],64))}},Xe=null==(e=document.getElementsByClassName("graph-canvas-container"))?void 0:e[0],Je=document.createElement("div");Je.id="comfyui-easyuse-components",Xe?Xe.append(Je):document.body.append(Je);const qe=Y(Te);qe.use(X),qe.use(F()),qe.mount("#"+Je.id); diff --git a/ComfyUI-FluxExt-MZ/LICENSE b/ComfyUI-FluxExt-MZ/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..f288702d2fa16d3cdf0035b15a9fcbc552cd88e7 --- /dev/null +++ b/ComfyUI-FluxExt-MZ/LICENSE @@ -0,0 +1,674 @@ + GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU General Public License is a free, copyleft license for +software and other kinds of works. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +the GNU General Public License is intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU Affero General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If the program does terminal interaction, make it output a short +notice like this when it starts in an interactive mode: + + Copyright (C) + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, your program's commands +might be different; for a GUI interface, you would use an "about box". + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU GPL, see +. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +. diff --git a/ComfyUI-FluxExt-MZ/README.md b/ComfyUI-FluxExt-MZ/README.md new file mode 100644 index 0000000000000000000000000000000000000000..8fb43a944ba17da0f0661e8a8c8b4dc9c13d86be --- /dev/null +++ b/ComfyUI-FluxExt-MZ/README.md @@ -0,0 +1,21 @@ +# ComfyUI-FluxExt-MZ + +### 和flux1有关的工具节点 (Tool nodes related to flux1) + + +#### MZ_Flux1UnetLoader_cpuDynOffload flux1的UNET加载器,动态从CUDA加载/卸载 +![image](https://github.com/user-attachments/assets/237b5fd8-d5ab-47bf-8fd8-0f0f09b0dbde) + + +#### MZ_Flux1CheckpointLoaderNF4_cpuDynOffload flux1的NF4加载器,动态从CUDA加载/卸载,依赖 https://github.com/comfyanonymous/ComfyUI_bitsandbytes_NF4 +![image](https://github.com/user-attachments/assets/29084146-a42e-4651-812e-dc1d84e4eafc) + + + +#### MZ_Flux1CheckpointLoader_cpuDynOffload flux1的Checkpoint加载器,动态从CUDA加载/卸载 +![image](https://github.com/user-attachments/assets/99eb78ba-f2e0-4a6d-9bfd-1fa92aefcbd2) + + +#### MZ_Flux1PartialLoad_Patch flux动态从CUDA加载/卸载,缓解显存不足 (修改参数后运行会有bug) + +![image](https://github.com/user-attachments/assets/dc80f3c7-a17b-43c5-9db8-ac576592a188) diff --git a/ComfyUI-FluxExt-MZ/__init__.py b/ComfyUI-FluxExt-MZ/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..7bfa56dca9b6767459912bbae63289770bf60cc0 --- /dev/null +++ b/ComfyUI-FluxExt-MZ/__init__.py @@ -0,0 +1,149 @@ + + +import json +import os +import sys +from nodes import MAX_RESOLUTION +import comfy.utils +import shutil +import comfy.samplers +import folder_paths + + +WEB_DIRECTORY = "./web" + +AUTHOR_NAME = u"MinusZone" +CATEGORY_NAME = f"{AUTHOR_NAME} - FluxExt" + + +import importlib + +NODE_CLASS_MAPPINGS = { +} + + +NODE_DISPLAY_NAME_MAPPINGS = { +} + +from . import mz_fluxext_core +import importlib + + +class MZ_Flux1PartialLoad_Patch: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "model": ("MODEL", ), + "double_blocks_cuda_size": ("INT", {"min": 0, "max": 16, "default": 7}), + "single_blocks_cuda_size": ("INT", {"min": 0, "max": 37, "default": 7}), + }} + RETURN_TYPES = ("MODEL",) + FUNCTION = "load_unet" + + CATEGORY = f"{CATEGORY_NAME}" + + def load_unet(self, **kwargs): + from . import mz_fluxext_core + importlib.reload(mz_fluxext_core) + return mz_fluxext_core.Flux1PartialLoad_Patch(kwargs) + + +NODE_CLASS_MAPPINGS["MZ_Flux1PartialLoad_Patch"] = MZ_Flux1PartialLoad_Patch +NODE_DISPLAY_NAME_MAPPINGS["MZ_Flux1PartialLoad_Patch"] = f"{AUTHOR_NAME} - Flux1PartialLoad_Patch" + +import nodes + + +class MZ_Flux1CheckpointLoaderNF4_cpuDynOffload: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "ckpt_name": (folder_paths.get_filename_list("checkpoints"), ), + "double_blocks_cuda_size": ("INT", {"min": 0, "max": 16, "default": 7}), + "single_blocks_cuda_size": ("INT", {"min": 0, "max": 37, "default": 7}), + }} + + RETURN_TYPES = ("MODEL", "CLIP", "VAE") + FUNCTION = "load_checkpoint" + + CATEGORY = f"{CATEGORY_NAME}" + + def load_checkpoint(self, ckpt_name, **kwargs): + CheckpointLoaderNF4 = nodes.NODE_CLASS_MAPPINGS.get( + "CheckpointLoaderNF4", None) + if CheckpointLoaderNF4 is None: + # 必须安装 https://github.com/comfyanonymous/ComfyUI_bitsandbytes_NF4 + raise Exception( + "Please install comfyanonymous/ComfyUI_bitsandbytes_NF4 to use this node.") + + model, clip, vae = CheckpointLoaderNF4().load_checkpoint(ckpt_name) + return mz_fluxext_core.Flux1PartialLoad_Patch({ + "model": model, + "double_blocks_cuda_size": kwargs.get("double_blocks_cuda_size", 7), + "single_blocks_cuda_size": kwargs.get("single_blocks_cuda_size", 7), + })[0], clip, vae + + +NODE_CLASS_MAPPINGS["MZ_Flux1CheckpointLoaderNF4_cpuDynOffload"] = MZ_Flux1CheckpointLoaderNF4_cpuDynOffload +NODE_DISPLAY_NAME_MAPPINGS[ + "MZ_Flux1CheckpointLoaderNF4_cpuDynOffload"] = f"{AUTHOR_NAME} - Flux1CheckpointLoaderNF4_cpuDynOffload" + + +class MZ_Flux1CheckpointLoader_cpuDynOffload: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "ckpt_name": (folder_paths.get_filename_list("checkpoints"), ), + "double_blocks_cuda_size": ("INT", {"min": 0, "max": 16, "default": 7}), + "single_blocks_cuda_size": ("INT", {"min": 0, "max": 37, "default": 7}), + }} + + RETURN_TYPES = ("MODEL", "CLIP", "VAE") + FUNCTION = "load_checkpoint" + + CATEGORY = f"{CATEGORY_NAME}" + + def load_checkpoint(self, ckpt_name, **kwargs): + model, clip, vae = nodes.CheckpointLoaderSimple().load_checkpoint( + ckpt_name=ckpt_name) + return mz_fluxext_core.Flux1PartialLoad_Patch({ + "model": model, + "double_blocks_cuda_size": kwargs.get("double_blocks_cuda_size", 7), + "single_blocks_cuda_size": kwargs.get("single_blocks_cuda_size", 7), + })[0], clip, vae + + +NODE_CLASS_MAPPINGS["MZ_Flux1CheckpointLoader_cpuDynOffload"] = MZ_Flux1CheckpointLoader_cpuDynOffload +NODE_DISPLAY_NAME_MAPPINGS[ + "MZ_Flux1CheckpointLoader_cpuDynOffload"] = f"{AUTHOR_NAME} - Flux1CheckpointLoader_cpuDynOffload" + + +class MZ_Flux1UnetLoader_cpuDynOffload: + @classmethod + def INPUT_TYPES(s): + args = nodes.UNETLoader().INPUT_TYPES() + args["required"]["double_blocks_cuda_size"] = ( + "INT", {"min": 0, "max": 16, "default": 7}) + args["required"]["single_blocks_cuda_size"] = ( + "INT", {"min": 0, "max": 37, "default": 7}) + return args + + RETURN_TYPES = ("MODEL",) + FUNCTION = "load_unet" + + CATEGORY = f"{CATEGORY_NAME}" + + def load_unet(self, **kwargs): + model = nodes.UNETLoader().load_unet( + **{k: v for k, v in kwargs.items() if k != "double_blocks_cuda_size" and k != "single_blocks_cuda_size"})[0] + + return mz_fluxext_core.Flux1PartialLoad_Patch({ + "model": model, + "double_blocks_cuda_size": kwargs.get("double_blocks_cuda_size", 7), + "single_blocks_cuda_size": kwargs.get("single_blocks_cuda_size", 7), + }) + + +NODE_CLASS_MAPPINGS["MZ_Flux1UnetLoader_cpuDynOffload"] = MZ_Flux1UnetLoader_cpuDynOffload +NODE_DISPLAY_NAME_MAPPINGS[ + "MZ_Flux1UnetLoader_cpuDynOffload"] = f"{AUTHOR_NAME} - Flux1UnetLoader_cpuDynOffload" diff --git a/ComfyUI-FluxExt-MZ/mz_fluxext_core.py b/ComfyUI-FluxExt-MZ/mz_fluxext_core.py new file mode 100644 index 0000000000000000000000000000000000000000..fe510b82951152e0c58ea58364276fab39a5233a --- /dev/null +++ b/ComfyUI-FluxExt-MZ/mz_fluxext_core.py @@ -0,0 +1,131 @@ + +import gc +import json +from types import MethodType +import safetensors.torch +import torch +import torch.nn as nn +import safetensors + + +from torch import Tensor, nn +import copy + + +def Flux1PartialLoad_Patch(args={}): + model = args.get("model") + + double_blocks_cuda_size = args.get("double_blocks_cuda_size") + single_blocks_cuda_size = args.get("single_blocks_cuda_size") + + def other_to_cpu(): + model.model.diffusion_model.img_in.to("cpu") + model.model.diffusion_model.time_in.to("cpu") + model.model.diffusion_model.guidance_in.to("cpu") + model.model.diffusion_model.vector_in.to("cpu") + model.model.diffusion_model.txt_in.to("cpu") + model.model.diffusion_model.pe_embedder.to("cpu") + + torch.cuda.empty_cache() + + def other_to_cuda(): + model.model.diffusion_model.img_in.to("cuda") + model.model.diffusion_model.time_in.to("cuda") + model.model.diffusion_model.guidance_in.to("cuda") + model.model.diffusion_model.vector_in.to("cuda") + model.model.diffusion_model.txt_in.to("cuda") + model.model.diffusion_model.pe_embedder.to("cuda") + + def double_blocks_to_cpu(layer_start=0, layer_size=-1): + if layer_size == -1: + model.model.diffusion_model.double_blocks.to("cpu") + else: + model.model.diffusion_model.double_blocks[layer_start:layer_start + + layer_size].to("cpu") + torch.cuda.empty_cache() + # gc.collect() + + def double_blocks_to_cuda(layer_start=0, layer_size=-1): + if layer_size == -1: + model.model.diffusion_model.double_blocks.to("cuda") + else: + model.model.diffusion_model.double_blocks[layer_start:layer_start + + layer_size].to("cuda") + + def single_blocks_to_cpu(layer_start=0, layer_size=-1): + if layer_size == -1: + model.model.diffusion_model.single_blocks.to("cpu") + else: + model.model.diffusion_model.single_blocks[layer_start:layer_start + + layer_size].to("cpu") + torch.cuda.empty_cache() + # gc.collect() + + def single_blocks_to_cuda(layer_start=0, layer_size=-1): + if layer_size == -1: + model.model.diffusion_model.single_blocks.to("cuda") + else: + model.model.diffusion_model.single_blocks[layer_start:layer_start + + layer_size].to("cuda") + + def generate_double_blocks_forward_hook(layer_start, layer_size): + def pre_only_double_blocks_forward_hook(module, inp): + + other_to_cpu() + + if layer_start > 0: + double_blocks_to_cpu(layer_start=0, layer_size=layer_start) + + double_blocks_to_cuda(layer_start=layer_start, + layer_size=layer_size) + # print("pre_only_double_blocks_forward_hook: ", + # layer_start, layer_size) + # input("Press Enter to continue...") + return inp + return pre_only_double_blocks_forward_hook + + def generate_single_blocks_forward_hook(layer_start, layer_size): + def pre_only_single_blocks_forward_hook(module, inp): + double_blocks_to_cpu() + if layer_start > 0: + single_blocks_to_cpu(layer_start=0, layer_size=layer_start) + + single_blocks_to_cuda(layer_start=layer_start, + layer_size=layer_size) + # print("pre_only_single_blocks_forward_hook: ", + # layer_start, layer_size) + # input("Press Enter to continue...") + return inp + return pre_only_single_blocks_forward_hook + + def pre_only_model_forward_hook(module, inp): + # print("double_blocks to cpu") + double_blocks_to_cpu() + # print("single_blocks to cpu") + single_blocks_to_cpu() + # print("other to cuda") + other_to_cuda() + return inp + + model.model.diffusion_model.register_forward_pre_hook( + pre_only_model_forward_hook) + + double_blocks_depth = len(model.model.diffusion_model.double_blocks) + steps = double_blocks_cuda_size + for i in range(0, double_blocks_depth, steps): + s = steps + if i + s > double_blocks_depth: + s = double_blocks_depth - i + model.model.diffusion_model.double_blocks[i].register_forward_pre_hook( + generate_double_blocks_forward_hook(i, s)) + + single_blocks_depth = len(model.model.diffusion_model.single_blocks) + steps = single_blocks_cuda_size + for i in range(0, single_blocks_depth, steps): + s = steps + if i + s > single_blocks_depth: + s = single_blocks_depth - i + model.model.diffusion_model.single_blocks[i].register_forward_pre_hook( + generate_single_blocks_forward_hook(i, s)) + + return (model,) diff --git a/ComfyUI-GGUF/LICENSE b/ComfyUI-GGUF/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..261eeb9e9f8b2b4b0d119366dda99c6fd7d35c64 --- /dev/null +++ b/ComfyUI-GGUF/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/ComfyUI-GGUF/README.md b/ComfyUI-GGUF/README.md new file mode 100644 index 0000000000000000000000000000000000000000..5963d7740a777363fffb0e68888b524f87fe1437 --- /dev/null +++ b/ComfyUI-GGUF/README.md @@ -0,0 +1,44 @@ +# ComfyUI-GGUF +GGUF Quantization support for native ComfyUI models + +This is currently very much WIP. These custom nodes provide support for model files stored in the GGUF format popularized by [llama.cpp](https://github.com/ggerganov/llama.cpp). + +While quantization wasn't feasible for regular UNET models (conv2d), transformer/DiT models such as flux seem less affected by quantization. This allows running it in much lower bits per weight variable bitrate quants on low-end GPUs. For further VRAM savings, a node to load a quantized version of the T5 text encoder is also included. + +![Comfy_Flux1_dev_Q4_0_GGUF_1024](https://github.com/user-attachments/assets/70d16d97-c522-4ef4-9435-633f128644c8) + +Note: The "Force/Set CLIP Device" is **NOT** part of this node pack. Do not install it if you only have one GPU. Do not set it to cuda:0 then complain about OOM errors if you do not undestand what it is for. There is not need to copy the workflow above, just use your own workflow and replace the stock "Load Diffusion Model" with the "Unet Loader (GGUF)" node. + +## Installation + +> [!IMPORTANT] +> Make sure your ComfyUI is on a recent-enough version to support custom ops when loading the UNET-only. + +To install the custom node normally, git clone this repository into your custom nodes folder (`ComfyUI/custom_nodes`) and install the only dependency for inference (`pip install --upgrade gguf`) + +``` +git clone https://github.com/city96/ComfyUI-GGUF +``` + +To install the custom node on a standalone ComfyUI release, open a CMD inside the "ComfyUI_windows_portable" folder (where your `run_nvidia_gpu.bat` file is) and use the following commands: + +``` +git clone https://github.com/city96/ComfyUI-GGUF ComfyUI/custom_nodes/ComfyUI-GGUF +.\python_embedded\python.exe -s -m pip install -r .\ComfyUI\custom_nodes\ComfyUI-GGUF\requirements.txt +``` + +## Usage + +Simply use the GGUF Unet loader found under the `bootleg` category. Place the .gguf model files in your `ComfyUI/models/unet` folder. + +LoRA loading is experimental but it should work with just the built-in LoRA loader node(s). + +Pre-quantized models: + +- [flux1-dev GGUF](https://huggingface.co/city96/FLUX.1-dev-gguf) +- [flux1-schnell GGUF](https://huggingface.co/city96/FLUX.1-schnell-gguf) + +Initial support for quantizing T5 has also been added recently, these can be used using the various `*CLIPLoader (gguf)` nodes which can be used inplace of the regular ones. For the CLIP model, use whatever model you were using before for CLIP. The loader can handle both types of files - `gguf` and regular `safetensors`/`bin`. + +- [t5_v1.1-xxl GGUF](https://huggingface.co/city96/t5-v1_1-xxl-encoder-gguf) + diff --git a/ComfyUI-GGUF/__init__.py b/ComfyUI-GGUF/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..a03726e3b0a08957ded67cdd21beb9544a3f6e4d --- /dev/null +++ b/ComfyUI-GGUF/__init__.py @@ -0,0 +1,9 @@ +# only import if running as a custom node +try: + import comfy.utils +except ImportError: + pass +else: + from .nodes import NODE_CLASS_MAPPINGS + NODE_DISPLAY_NAME_MAPPINGS = {k:v.TITLE for k,v in NODE_CLASS_MAPPINGS.items()} + __all__ = ['NODE_CLASS_MAPPINGS', 'NODE_DISPLAY_NAME_MAPPINGS'] diff --git a/ComfyUI-GGUF/dequant.py b/ComfyUI-GGUF/dequant.py new file mode 100644 index 0000000000000000000000000000000000000000..00930ea7e45c1fcc3777cb18a0a8325e30c2f3aa --- /dev/null +++ b/ComfyUI-GGUF/dequant.py @@ -0,0 +1,248 @@ +# (c) City96 || Apache-2.0 (apache.org/licenses/LICENSE-2.0) +import gguf +import torch +from tqdm import tqdm + + +TORCH_COMPATIBLE_QTYPES = {None, gguf.GGMLQuantizationType.F32, gguf.GGMLQuantizationType.F16} + +def is_torch_compatible(tensor): + return tensor is None or getattr(tensor, "tensor_type", None) in TORCH_COMPATIBLE_QTYPES + +def is_quantized(tensor): + return not is_torch_compatible(tensor) + +def dequantize_tensor(tensor, dtype=None, dequant_dtype=None): + qtype = getattr(tensor, "tensor_type", None) + oshape = getattr(tensor, "tensor_shape", tensor.shape) + + if qtype in TORCH_COMPATIBLE_QTYPES: + return tensor.to(dtype) + elif qtype in dequantize_functions: + dequant_dtype = dtype if dequant_dtype == "target" else dequant_dtype + return dequantize(tensor.data, qtype, oshape, dtype=dequant_dtype).to(dtype) + else: + # this is incredibly slow + tqdm.write(f"Falling back to numpy dequant for qtype: {qtype}") + new = gguf.quants.dequantize(tensor.cpu().numpy(), qtype) + return torch.from_numpy(new).to(tensor.device, dtype=dtype) + +def dequantize(data, qtype, oshape, dtype=None): + """ + Dequantize tensor back to usable shape/dtype + """ + block_size, type_size = gguf.GGML_QUANT_SIZES[qtype] + dequantize_blocks = dequantize_functions[qtype] + + rows = data.reshape( + (-1, data.shape[-1]) + ).view(torch.uint8) + + n_blocks = rows.numel() // type_size + blocks = rows.reshape((n_blocks, type_size)) + blocks = dequantize_blocks(blocks, block_size, type_size, dtype) + return blocks.reshape(oshape) + +def to_uint32(x): + # no uint32 :( + x = x.view(torch.uint8).to(torch.int32) + return (x[:, 0] | x[:, 1] << 8 | x[:, 2] << 16 | x[:, 3] << 24).unsqueeze(1) + +def split_block_dims(blocks, *args): + n_max = blocks.shape[1] + dims = list(args) + [n_max - sum(args)] + return torch.split(blocks, dims, dim=1) + +# Full weights # +def dequantize_blocks_BF16(blocks, block_size, type_size, dtype=None): + return (blocks.view(torch.int16).to(torch.int32) << 16).view(torch.float32) + +# Legacy Quants # +def dequantize_blocks_Q8_0(blocks, block_size, type_size, dtype=None): + d, x = split_block_dims(blocks, 2) + d = d.view(torch.float16).to(dtype) + x = x.view(torch.int8) + return (d * x) + +def dequantize_blocks_Q5_1(blocks, block_size, type_size, dtype=None): + n_blocks = blocks.shape[0] + + d, m, qh, qs = split_block_dims(blocks, 2, 2, 4) + d = d.view(torch.float16).to(dtype) + m = m.view(torch.float16).to(dtype) + qh = to_uint32(qh) + + qh = qh.reshape((n_blocks, 1)) >> torch.arange(32, device=d.device, dtype=torch.int32).reshape(1, 32) + ql = qs.reshape((n_blocks, -1, 1, block_size // 2)) >> torch.tensor([0, 4], device=d.device, dtype=torch.uint8).reshape(1, 1, 2, 1) + qh = (qh & 1).to(torch.uint8) + ql = (ql & 0x0F).reshape((n_blocks, -1)) + + qs = (ql | (qh << 4)) + return (d * qs) + m + +def dequantize_blocks_Q5_0(blocks, block_size, type_size, dtype=None): + n_blocks = blocks.shape[0] + + d, qh, qs = split_block_dims(blocks, 2, 4) + d = d.view(torch.float16).to(dtype) + qh = to_uint32(qh) + + qh = qh.reshape(n_blocks, 1) >> torch.arange(32, device=d.device, dtype=torch.int32).reshape(1, 32) + ql = qs.reshape(n_blocks, -1, 1, block_size // 2) >> torch.tensor([0, 4], device=d.device, dtype=torch.uint8).reshape(1, 1, 2, 1) + + qh = (qh & 1).to(torch.uint8) + ql = (ql & 0x0F).reshape(n_blocks, -1) + + qs = (ql | (qh << 4)).to(torch.int8) - 16 + return (d * qs) + +def dequantize_blocks_Q4_1(blocks, block_size, type_size, dtype=None): + n_blocks = blocks.shape[0] + + d, m, qs = split_block_dims(blocks, 2, 2) + d = d.view(torch.float16).to(dtype) + m = m.view(torch.float16).to(dtype) + + qs = qs.reshape((n_blocks, -1, 1, block_size // 2)) >> torch.tensor([0, 4], device=d.device, dtype=torch.uint8).reshape(1, 1, 2, 1) + qs = (qs & 0x0F).reshape(n_blocks, -1) + + return (d * qs) + m + +def dequantize_blocks_Q4_0(blocks, block_size, type_size, dtype=None): + n_blocks = blocks.shape[0] + + d, qs = split_block_dims(blocks, 2) + d = d.view(torch.float16).to(dtype) + + qs = qs.reshape((n_blocks, -1, 1, block_size // 2)) >> torch.tensor([0, 4], device=d.device, dtype=torch.uint8).reshape((1, 1, 2, 1)) + qs = (qs & 0x0F).reshape((n_blocks, -1)).to(torch.int8) - 8 + return (d * qs) + +# K Quants # +QK_K = 256 +K_SCALE_SIZE = 12 + +def get_scale_min(scales): + n_blocks = scales.shape[0] + scales = scales.view(torch.uint8) + scales = scales.reshape((n_blocks, 3, 4)) + + d, m, m_d = torch.split(scales, scales.shape[-2] // 3, dim=-2) + + sc = torch.cat([d & 0x3F, (m_d & 0x0F) | ((d >> 2) & 0x30)], dim=-1) + min = torch.cat([m & 0x3F, (m_d >> 4) | ((m >> 2) & 0x30)], dim=-1) + + return (sc.reshape((n_blocks, 8)), min.reshape((n_blocks, 8))) + +def dequantize_blocks_Q6_K(blocks, block_size, type_size, dtype=None): + n_blocks = blocks.shape[0] + + ql, qh, scales, d, = split_block_dims(blocks, QK_K // 2, QK_K // 4, QK_K // 16) + + scales = scales.view(torch.int8).to(dtype) + d = d.view(torch.float16).to(dtype) + d = (d * scales).reshape((n_blocks, QK_K // 16, 1)) + + ql = ql.reshape((n_blocks, -1, 1, 64)) >> torch.tensor([0, 4], device=d.device, dtype=torch.uint8).reshape((1, 1, 2, 1)) + ql = (ql & 0x0F).reshape((n_blocks, -1, 32)) + qh = qh.reshape((n_blocks, -1, 1, 32)) >> torch.tensor([0, 2, 4, 6], device=d.device, dtype=torch.uint8).reshape((1, 1, 4, 1)) + qh = (qh & 0x03).reshape((n_blocks, -1, 32)) + q = (ql | (qh << 4)).to(torch.int8) - 32 + q = q.reshape((n_blocks, QK_K // 16, -1)) + + return (d * q).reshape((n_blocks, QK_K)) + +def dequantize_blocks_Q5_K(blocks, block_size, type_size, dtype=None): + n_blocks = blocks.shape[0] + + d, dmin, scales, qh, qs = split_block_dims(blocks, 2, 2, K_SCALE_SIZE, QK_K // 8) + + d = d.view(torch.float16).to(dtype) + dmin = dmin.view(torch.float16).to(dtype) + + sc, m = get_scale_min(scales) + + d = (d * sc).reshape((n_blocks, -1, 1)) + dm = (dmin * m).reshape((n_blocks, -1, 1)) + + ql = qs.reshape((n_blocks, -1, 1, 32)) >> torch.tensor([0, 4], device=d.device, dtype=torch.uint8).reshape((1, 1, 2, 1)) + qh = qh.reshape((n_blocks, -1, 1, 32)) >> torch.tensor([i for i in range(8)], device=d.device, dtype=torch.uint8).reshape((1, 1, 8, 1)) + ql = (ql & 0x0F).reshape((n_blocks, -1, 32)) + qh = (qh & 0x01).reshape((n_blocks, -1, 32)) + q = (ql | (qh << 4)) + + return (d * q - dm).reshape((n_blocks, QK_K)) + +def dequantize_blocks_Q4_K(blocks, block_size, type_size, dtype=None): + n_blocks = blocks.shape[0] + + d, dmin, scales, qs = split_block_dims(blocks, 2, 2, K_SCALE_SIZE) + d = d.view(torch.float16).to(dtype) + dmin = dmin.view(torch.float16).to(dtype) + + sc, m = get_scale_min(scales) + + d = (d * sc).reshape((n_blocks, -1, 1)) + dm = (dmin * m).reshape((n_blocks, -1, 1)) + + qs = qs.reshape((n_blocks, -1, 1, 32)) >> torch.tensor([0, 4], device=d.device, dtype=torch.uint8).reshape((1, 1, 2, 1)) + qs = (qs & 0x0F).reshape((n_blocks, -1, 32)) + + return (d * qs - dm).reshape((n_blocks, QK_K)) + +def dequantize_blocks_Q3_K(blocks, block_size, type_size, dtype=None): + n_blocks = blocks.shape[0] + + hmask, qs, scales, d = split_block_dims(blocks, QK_K // 8, QK_K // 4, 12) + d = d.view(torch.float16).to(dtype) + + lscales, hscales = scales[:, :8], scales[:, 8:] + lscales = lscales.reshape((n_blocks, 1, 8)) >> torch.tensor([0, 4], device=d.device, dtype=torch.uint8).reshape((1, 2, 1)) + lscales = lscales.reshape((n_blocks, 16)) + hscales = hscales.reshape((n_blocks, 1, 4)) >> torch.tensor([0, 2, 4, 6], device=d.device, dtype=torch.uint8).reshape((1, 4, 1)) + hscales = hscales.reshape((n_blocks, 16)) + scales = (lscales & 0x0F) | ((hscales & 0x03) << 4) + scales = (scales.to(torch.int8) - 32) + + dl = (d * scales).reshape((n_blocks, 16, 1)) + + ql = qs.reshape((n_blocks, -1, 1, 32)) >> torch.tensor([0, 2, 4, 6], device=d.device, dtype=torch.uint8).reshape((1, 1, 4, 1)) + qh = hmask.reshape(n_blocks, -1, 1, 32) >> torch.tensor([i for i in range(8)], device=d.device, dtype=torch.uint8).reshape((1, 1, 8, 1)) + ql = ql.reshape((n_blocks, 16, QK_K // 16)) & 3 + qh = (qh.reshape((n_blocks, 16, QK_K // 16)) & 1) ^ 1 + q = (ql.to(torch.int8) - (qh << 2).to(torch.int8)) + + return (dl * q).reshape((n_blocks, QK_K)) + +def dequantize_blocks_Q2_K(blocks, block_size, type_size, dtype=None): + n_blocks = blocks.shape[0] + + scales, qs, d, dmin = split_block_dims(blocks, QK_K // 16, QK_K // 4, 2) + d = d.view(torch.float16).to(dtype) + dmin = dmin.view(torch.float16).to(dtype) + + # (n_blocks, 16, 1) + dl = (d * (scales & 0xF)).reshape((n_blocks, QK_K // 16, 1)) + ml = (dmin * (scales >> 4)).reshape((n_blocks, QK_K // 16, 1)) + + shift = torch.tensor([0, 2, 4, 6], device=d.device, dtype=torch.uint8).reshape((1, 1, 4, 1)) + + qs = (qs.reshape((n_blocks, -1, 1, 32)) >> shift) & 3 + qs = qs.reshape((n_blocks, QK_K // 16, 16)) + qs = dl * qs - ml + + return qs.reshape((n_blocks, -1)) + +dequantize_functions = { + gguf.GGMLQuantizationType.BF16: dequantize_blocks_BF16, + gguf.GGMLQuantizationType.Q8_0: dequantize_blocks_Q8_0, + gguf.GGMLQuantizationType.Q5_1: dequantize_blocks_Q5_1, + gguf.GGMLQuantizationType.Q5_0: dequantize_blocks_Q5_0, + gguf.GGMLQuantizationType.Q4_1: dequantize_blocks_Q4_1, + gguf.GGMLQuantizationType.Q4_0: dequantize_blocks_Q4_0, + gguf.GGMLQuantizationType.Q6_K: dequantize_blocks_Q6_K, + gguf.GGMLQuantizationType.Q5_K: dequantize_blocks_Q5_K, + gguf.GGMLQuantizationType.Q4_K: dequantize_blocks_Q4_K, + gguf.GGMLQuantizationType.Q3_K: dequantize_blocks_Q3_K, + gguf.GGMLQuantizationType.Q2_K: dequantize_blocks_Q2_K, +} diff --git a/ComfyUI-GGUF/nodes.py b/ComfyUI-GGUF/nodes.py new file mode 100644 index 0000000000000000000000000000000000000000..47fcb4bb138ed7b222591297dbd235ab8a920a9d --- /dev/null +++ b/ComfyUI-GGUF/nodes.py @@ -0,0 +1,402 @@ +# (c) City96 || Apache-2.0 (apache.org/licenses/LICENSE-2.0) +import torch +import gguf +import copy +import logging + +import comfy.sd +import comfy.utils +import comfy.model_management +import comfy.model_patcher +import folder_paths + +from .ops import GGMLTensor, GGMLOps, move_patch_to_device +from .dequant import is_quantized, is_torch_compatible + +# Add a custom keys for files ending in .gguf +if "unet_gguf" not in folder_paths.folder_names_and_paths: + orig = folder_paths.folder_names_and_paths.get("diffusion_models", folder_paths.folder_names_and_paths.get("unet", [[], set()])) + folder_paths.folder_names_and_paths["unet_gguf"] = (orig[0], {".gguf"}) + +if "clip_gguf" not in folder_paths.folder_names_and_paths: + orig = folder_paths.folder_names_and_paths.get("clip", [[], set()]) + folder_paths.folder_names_and_paths["clip_gguf"] = (orig[0], {".gguf"}) + +def gguf_sd_loader_get_orig_shape(reader, tensor_name): + field_key = f"comfy.gguf.orig_shape.{tensor_name}" + field = reader.get_field(field_key) + if field is None: + return None + # Has original shape metadata, so we try to decode it. + if len(field.types) != 2 or field.types[0] != gguf.GGUFValueType.ARRAY or field.types[1] != gguf.GGUFValueType.INT32: + raise TypeError(f"Bad original shape metadata for {field_key}: Expected ARRAY of INT32, got {field.types}") + return torch.Size(tuple(int(field.parts[part_idx][0]) for part_idx in field.data)) + +def gguf_sd_loader(path, handle_prefix="model.diffusion_model."): + """ + Read state dict as fake tensors + """ + reader = gguf.GGUFReader(path) + + # filter and strip prefix + has_prefix = False + if handle_prefix is not None: + prefix_len = len(handle_prefix) + tensor_names = set(tensor.name for tensor in reader.tensors) + has_prefix = any(s.startswith(handle_prefix) for s in tensor_names) + + tensors = [] + for tensor in reader.tensors: + sd_key = tensor_name = tensor.name + if has_prefix: + if not tensor_name.startswith(handle_prefix): + continue + sd_key = tensor_name[prefix_len:] + tensors.append((sd_key, tensor)) + + # detect and verify architecture + compat = None + arch_str = None + arch_field = reader.get_field("general.architecture") + if arch_field is not None: + if len(arch_field.types) != 1 or arch_field.types[0] != gguf.GGUFValueType.STRING: + raise TypeError(f"Bad type for GGUF general.architecture key: expected string, got {arch_field.types!r}") + arch_str = str(arch_field.parts[arch_field.data[-1]], encoding="utf-8") + if arch_str not in {"flux", "sd1", "sdxl", "t5", "t5encoder"}: + raise ValueError(f"Unexpected architecture type in GGUF file, expected one of flux, sd1, sdxl, t5encoder but got {arch_str!r}") + else: # stable-diffusion.cpp + # import here to avoid changes to convert.py breaking regular models + from .tools.convert import detect_arch + arch_str = detect_arch(set(val[0] for val in tensors)) + compat = "sd.cpp" + + # main loading loop + state_dict = {} + qtype_dict = {} + for sd_key, tensor in tensors: + tensor_name = tensor.name + tensor_type_str = str(tensor.tensor_type) + torch_tensor = torch.from_numpy(tensor.data) # mmap + + shape = gguf_sd_loader_get_orig_shape(reader, tensor_name) + if shape is None: + shape = torch.Size(tuple(int(v) for v in reversed(tensor.shape))) + # Workaround for stable-diffusion.cpp SDXL detection. + if compat == "sd.cpp" and arch_str == "sdxl": + if any([tensor_name.endswith(x) for x in (".proj_in.weight", ".proj_out.weight")]): + while len(shape) > 2 and shape[-1] == 1: + shape = shape[:-1] + + # add to state dict + if tensor.tensor_type in {gguf.GGMLQuantizationType.F32, gguf.GGMLQuantizationType.F16}: + torch_tensor = torch_tensor.view(*shape) + state_dict[sd_key] = GGMLTensor(torch_tensor, tensor_type=tensor.tensor_type, tensor_shape=shape) + qtype_dict[tensor_type_str] = qtype_dict.get(tensor_type_str, 0) + 1 + + # sanity check debug print + print("\nggml_sd_loader:") + for k,v in qtype_dict.items(): + print(f" {k:30}{v:3}") + + return state_dict + +# for remapping llama.cpp -> original key names +clip_sd_map = { + "enc.": "encoder.", + ".blk.": ".block.", + "token_embd": "shared", + "output_norm": "final_layer_norm", + "attn_q": "layer.0.SelfAttention.q", + "attn_k": "layer.0.SelfAttention.k", + "attn_v": "layer.0.SelfAttention.v", + "attn_o": "layer.0.SelfAttention.o", + "attn_norm": "layer.0.layer_norm", + "attn_rel_b": "layer.0.SelfAttention.relative_attention_bias", + "ffn_up": "layer.1.DenseReluDense.wi_1", + "ffn_down": "layer.1.DenseReluDense.wo", + "ffn_gate": "layer.1.DenseReluDense.wi_0", + "ffn_norm": "layer.1.layer_norm", +} + +def gguf_clip_loader(path): + raw_sd = gguf_sd_loader(path) + assert "enc.blk.23.ffn_up.weight" in raw_sd, "Invalid Text Encoder!" + sd = {} + for k,v in raw_sd.items(): + for s,d in clip_sd_map.items(): + k = k.replace(s,d) + sd[k] = v + return sd + +# TODO: Temporary fix for now +import collections +class GGUFModelPatcher(comfy.model_patcher.ModelPatcher): + patch_on_device = False + + def patch_weight_to_device(self, key, device_to=None, inplace_update=False): + if key not in self.patches: + return + weight = comfy.utils.get_attr(self.model, key) + + try: + from comfy.lora import calculate_weight + except Exception: + calculate_weight = self.calculate_weight + + patches = self.patches[key] + if is_quantized(weight): + out_weight = weight.to(device_to) + patches = move_patch_to_device(patches, self.load_device if self.patch_on_device else self.offload_device) + # TODO: do we ever have legitimate duplicate patches? (i.e. patch on top of patched weight) + out_weight.patches = [(calculate_weight, patches, key)] + else: + inplace_update = self.weight_inplace_update or inplace_update + if key not in self.backup: + self.backup[key] = collections.namedtuple('Dimension', ['weight', 'inplace_update'])( + weight.to(device=self.offload_device, copy=inplace_update), inplace_update + ) + + if device_to is not None: + temp_weight = comfy.model_management.cast_to_device(weight, device_to, torch.float32, copy=True) + else: + temp_weight = weight.to(torch.float32, copy=True) + + out_weight = calculate_weight(patches, temp_weight, key) + out_weight = comfy.float.stochastic_rounding(out_weight, weight.dtype) + + if inplace_update: + comfy.utils.copy_to_param(self.model, key, out_weight) + else: + comfy.utils.set_attr_param(self.model, key, out_weight) + + def unpatch_model(self, device_to=None, unpatch_weights=True): + if unpatch_weights: + for p in self.model.parameters(): + if is_torch_compatible(p): + continue + patches = getattr(p, "patches", []) + if len(patches) > 0: + p.patches = [] + # TODO: Find another way to not unload after patches + return super().unpatch_model(device_to=device_to, unpatch_weights=unpatch_weights) + + mmap_released = False + def load(self, *args, force_patch_weights=False, **kwargs): + # always call `patch_weight_to_device` even for lowvram + super().load(*args, force_patch_weights=True, **kwargs) + + # make sure nothing stays linked to mmap after first load + if not self.mmap_released: + linked = [] + if kwargs.get("lowvram_model_memory", 0) > 0: + for n, m in self.model.named_modules(): + if hasattr(m, "weight"): + device = getattr(m.weight, "device", None) + if device == self.offload_device: + linked.append((n, m)) + continue + if hasattr(m, "bias"): + device = getattr(m.bias, "device", None) + if device == self.offload_device: + linked.append((n, m)) + continue + if linked: + print(f"Attempting to release mmap ({len(linked)})") + for n, m in linked: + # TODO: possible to OOM, find better way to detach + m.to(self.load_device).to(self.offload_device) + self.mmap_released = True + + def clone(self, *args, **kwargs): + n = GGUFModelPatcher(self.model, self.load_device, self.offload_device, self.size, weight_inplace_update=self.weight_inplace_update) + n.patches = {} + for k in self.patches: + n.patches[k] = self.patches[k][:] + n.patches_uuid = self.patches_uuid + + n.object_patches = self.object_patches.copy() + n.model_options = copy.deepcopy(self.model_options) + n.backup = self.backup + n.object_patches_backup = self.object_patches_backup + n.patch_on_device = getattr(self, "patch_on_device", False) + return n + +class UnetLoaderGGUF: + @classmethod + def INPUT_TYPES(s): + unet_names = [x for x in folder_paths.get_filename_list("unet_gguf")] + return { + "required": { + "unet_name": (unet_names,), + } + } + + RETURN_TYPES = ("MODEL",) + FUNCTION = "load_unet" + CATEGORY = "bootleg" + TITLE = "Unet Loader (GGUF)" + + def load_unet(self, unet_name, dequant_dtype=None, patch_dtype=None, patch_on_device=None): + ops = GGMLOps() + + if dequant_dtype in ("default", None): + ops.Linear.dequant_dtype = None + elif dequant_dtype in ["target"]: + ops.Linear.dequant_dtype = dequant_dtype + else: + ops.Linear.dequant_dtype = getattr(torch, dequant_dtype) + + if patch_dtype in ("default", None): + ops.Linear.patch_dtype = None + elif patch_dtype in ["target"]: + ops.Linear.patch_dtype = patch_dtype + else: + ops.Linear.patch_dtype = getattr(torch, patch_dtype) + + # init model + unet_path = folder_paths.get_full_path("unet", unet_name) + sd = gguf_sd_loader(unet_path) + model = comfy.sd.load_diffusion_model_state_dict( + sd, model_options={"custom_operations": ops} + ) + if model is None: + logging.error("ERROR UNSUPPORTED UNET {}".format(unet_path)) + raise RuntimeError("ERROR: Could not detect model type of: {}".format(unet_path)) + model = GGUFModelPatcher.clone(model) + model.patch_on_device = patch_on_device + return (model,) + +class UnetLoaderGGUFAdvanced(UnetLoaderGGUF): + @classmethod + def INPUT_TYPES(s): + unet_names = [x for x in folder_paths.get_filename_list("unet_gguf")] + return { + "required": { + "unet_name": (unet_names,), + "dequant_dtype": (["default", "target", "float32", "float16", "bfloat16"], {"default": "default"}), + "patch_dtype": (["default", "target", "float32", "float16", "bfloat16"], {"default": "default"}), + "patch_on_device": ("BOOLEAN", {"default": False}), + } + } + TITLE = "Unet Loader (GGUF/Advanced)" + +clip_name_dict = { + "stable_diffusion": comfy.sd.CLIPType.STABLE_DIFFUSION, + "stable_cascade": comfy.sd.CLIPType.STABLE_CASCADE, + "stable_audio": comfy.sd.CLIPType.STABLE_AUDIO, + "sdxl": comfy.sd.CLIPType.STABLE_DIFFUSION, + "sd3": comfy.sd.CLIPType.SD3, + "flux": comfy.sd.CLIPType.FLUX, +} + +class CLIPLoaderGGUF: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "clip_name": (s.get_filename_list(),), + "type": (["stable_diffusion", "stable_cascade", "sd3", "stable_audio"],), + } + } + + RETURN_TYPES = ("CLIP",) + FUNCTION = "load_clip" + CATEGORY = "bootleg" + TITLE = "CLIPLoader (GGUF)" + + @classmethod + def get_filename_list(s): + files = [] + files += folder_paths.get_filename_list("clip") + files += folder_paths.get_filename_list("clip_gguf") + return sorted(files) + + def load_data(self, ckpt_paths): + clip_data = [] + for p in ckpt_paths: + if p.endswith(".gguf"): + clip_data.append(gguf_clip_loader(p)) + else: + sd = comfy.utils.load_torch_file(p, safe_load=True) + clip_data.append( + {k:GGMLTensor(v, tensor_type=gguf.GGMLQuantizationType.F16, tensor_shape=v.shape) for k,v in sd.items()} + ) + return clip_data + + def load_patcher(self, clip_paths, clip_type, clip_data): + clip = comfy.sd.load_text_encoder_state_dicts( + clip_type = clip_type, + state_dicts = clip_data, + model_options = { + "custom_operations": GGMLOps, + "initial_device": comfy.model_management.text_encoder_offload_device() + }, + embedding_directory = folder_paths.get_folder_paths("embeddings"), + ) + clip.patcher = GGUFModelPatcher.clone(clip.patcher) + + # for some reason this is just missing in some SAI checkpoints + if getattr(clip.cond_stage_model, "clip_l", None) is not None: + if getattr(clip.cond_stage_model.clip_l.transformer.text_projection.weight, "tensor_shape", None) is None: + clip.cond_stage_model.clip_l.transformer.text_projection = comfy.ops.manual_cast.Linear(768, 768) + if getattr(clip.cond_stage_model, "clip_g", None) is not None: + if getattr(clip.cond_stage_model.clip_g.transformer.text_projection.weight, "tensor_shape", None) is None: + clip.cond_stage_model.clip_g.transformer.text_projection = comfy.ops.manual_cast.Linear(1280, 1280) + + return clip + + def load_clip(self, clip_name, type="stable_diffusion"): + clip_path = folder_paths.get_full_path("clip", clip_name) + clip_type = clip_name_dict.get(type, comfy.sd.CLIPType.STABLE_DIFFUSION) + return (self.load_patcher([clip_path], clip_type, self.load_data([clip_path])),) + +class DualCLIPLoaderGGUF(CLIPLoaderGGUF): + @classmethod + def INPUT_TYPES(s): + file_options = (s.get_filename_list(), ) + return { + "required": { + "clip_name1": file_options, + "clip_name2": file_options, + "type": (("sdxl", "sd3", "flux"), ), + } + } + + TITLE = "DualCLIPLoader (GGUF)" + + def load_clip(self, clip_name1, clip_name2, type): + clip_path1 = folder_paths.get_full_path("clip", clip_name1) + clip_path2 = folder_paths.get_full_path("clip", clip_name2) + clip_paths = (clip_path1, clip_path2) + clip_type = clip_name_dict.get(type, comfy.sd.CLIPType.STABLE_DIFFUSION) + return (self.load_patcher(clip_paths, clip_type, self.load_data(clip_paths)),) + +class TripleCLIPLoaderGGUF(CLIPLoaderGGUF): + @classmethod + def INPUT_TYPES(s): + file_options = (s.get_filename_list(), ) + return { + "required": { + "clip_name1": file_options, + "clip_name2": file_options, + "clip_name3": file_options, + } + } + + TITLE = "TripleCLIPLoader (GGUF)" + + def load_clip(self, clip_name1, clip_name2, clip_name3, type="sd3"): + clip_path1 = folder_paths.get_full_path("clip", clip_name1) + clip_path2 = folder_paths.get_full_path("clip", clip_name2) + clip_path3 = folder_paths.get_full_path("clip", clip_name3) + clip_paths = (clip_path1, clip_path2, clip_path3) + clip_type = clip_name_dict.get(type, comfy.sd.CLIPType.STABLE_DIFFUSION) + return (self.load_patcher(clip_paths, clip_type, self.load_data(clip_paths)),) + +NODE_CLASS_MAPPINGS = { + "UnetLoaderGGUF": UnetLoaderGGUF, + "CLIPLoaderGGUF": CLIPLoaderGGUF, + "DualCLIPLoaderGGUF": DualCLIPLoaderGGUF, + "TripleCLIPLoaderGGUF": TripleCLIPLoaderGGUF, + "UnetLoaderGGUFAdvanced": UnetLoaderGGUFAdvanced, +} diff --git a/ComfyUI-GGUF/ops.py b/ComfyUI-GGUF/ops.py new file mode 100644 index 0000000000000000000000000000000000000000..94c056ea6804ec50617aa355d40398d48e9cf19f --- /dev/null +++ b/ComfyUI-GGUF/ops.py @@ -0,0 +1,212 @@ +# (c) City96 || Apache-2.0 (apache.org/licenses/LICENSE-2.0) +import gguf +import torch + +import comfy.ops +import comfy.model_management +from .dequant import dequantize_tensor, is_quantized + +class GGMLTensor(torch.Tensor): + """ + Main tensor-like class for storing quantized weights + """ + def __init__(self, *args, tensor_type, tensor_shape, patches=[], **kwargs): + super().__init__() + self.tensor_type = tensor_type + self.tensor_shape = tensor_shape + self.patches = patches + + def __new__(cls, *args, tensor_type, tensor_shape, patches=[], **kwargs): + return super().__new__(cls, *args, **kwargs) + + def to(self, *args, **kwargs): + new = super().to(*args, **kwargs) + new.tensor_type = getattr(self, "tensor_type", None) + new.tensor_shape = getattr(self, "tensor_shape", new.data.shape) + new.patches = getattr(self, "patches", []).copy() + return new + + def clone(self, *args, **kwargs): + return self + + def detach(self, *args, **kwargs): + return self + + def copy_(self, *args, **kwargs): + # fixes .weight.copy_ in comfy/clip_model/CLIPTextModel + try: + return super().copy_(*args, **kwargs) + except Exception as e: + print(f"ignoring 'copy_' on tensor: {e}") + + def __deepcopy__(self, *args, **kwargs): + # Intel Arc fix, ref#50 + new = super().__deepcopy__(*args, **kwargs) + new.tensor_type = getattr(self, "tensor_type", None) + new.tensor_shape = getattr(self, "tensor_shape", new.data.shape) + new.patches = getattr(self, "patches", []).copy() + return new + + @property + def shape(self): + if not hasattr(self, "tensor_shape"): + self.tensor_shape = self.size() + return self.tensor_shape + +class GGMLLayer(torch.nn.Module): + """ + This (should) be responsible for de-quantizing on the fly + """ + comfy_cast_weights = True + dequant_dtype = None + patch_dtype = None + torch_compatible_tensor_types = {None, gguf.GGMLQuantizationType.F32, gguf.GGMLQuantizationType.F16} + + def is_ggml_quantized(self, *, weight=None, bias=None): + if weight is None: + weight = self.weight + if bias is None: + bias = self.bias + return is_quantized(weight) or is_quantized(bias) + + def _load_from_state_dict(self, state_dict, prefix, *args, **kwargs): + weight, bias = state_dict.get(f"{prefix}weight"), state_dict.get(f"{prefix}bias") + # NOTE: using modified load for linear due to not initializing on creation, see GGMLOps todo + if self.is_ggml_quantized(weight=weight, bias=bias) or isinstance(self, torch.nn.Linear): + return self.ggml_load_from_state_dict(state_dict, prefix, *args, **kwargs) + return super()._load_from_state_dict(state_dict, prefix, *args, **kwargs) + + def ggml_load_from_state_dict(self, state_dict, prefix, local_metadata, strict, missing_keys, unexpected_keys, error_msgs): + prefix_len = len(prefix) + for k,v in state_dict.items(): + if k[prefix_len:] == "weight": + self.weight = torch.nn.Parameter(v, requires_grad=False) + elif k[prefix_len:] == "bias" and v is not None: + self.bias = torch.nn.Parameter(v, requires_grad=False) + else: + missing_keys.append(k) + + def _save_to_state_dict(self, *args, **kwargs): + if self.is_ggml_quantized(): + return self.ggml_save_to_state_dict(*args, **kwargs) + return super()._save_to_state_dict(*args, **kwargs) + + def ggml_save_to_state_dict(self, destination, prefix, keep_vars): + # This is a fake state dict for vram estimation + weight = torch.zeros_like(self.weight, device=torch.device("meta")) + destination[prefix + "weight"] = weight + if self.bias is not None: + bias = torch.zeros_like(self.bias, device=torch.device("meta")) + destination[prefix + "bias"] = bias + return + + # This would return the actual state dict + destination[prefix + "weight"] = self.get_weight(self.weight) + if bias is not None: + destination[prefix + "bias"] = self.get_weight(self.bias) + + def get_weight(self, tensor, dtype): + if tensor is None: + return + + # consolidate and load patches to GPU in async + patch_list = [] + device = tensor.device + for function, patches, key in getattr(tensor, "patches", []): + patch_list += move_patch_to_device(patches, device) + + # dequantize tensor while patches load + weight = dequantize_tensor(tensor, dtype, self.dequant_dtype) + + # apply patches + if patch_list: + if self.patch_dtype is None: + weight = function(patch_list, weight, key) + else: + # for testing, may degrade image quality + patch_dtype = dtype if self.patch_dtype == "target" else self.patch_dtype + weight = function(patch_list, weight, key, patch_dtype) + return weight + + def cast_bias_weight(s, input=None, dtype=None, device=None, bias_dtype=None): + if input is not None: + if dtype is None: + dtype = getattr(input, "dtype", torch.float32) + if bias_dtype is None: + bias_dtype = dtype + if device is None: + device = input.device + + bias = None + non_blocking = comfy.model_management.device_supports_non_blocking(device) + if s.bias is not None: + bias = s.get_weight(s.bias.to(device), dtype) + bias = comfy.ops.cast_to(bias, bias_dtype, device, non_blocking=non_blocking, copy=False) + + weight = s.get_weight(s.weight.to(device), dtype) + weight = comfy.ops.cast_to(weight, dtype, device, non_blocking=non_blocking, copy=False) + return weight, bias + + def forward_comfy_cast_weights(self, input, *args, **kwargs): + if self.is_ggml_quantized(): + return self.forward_ggml_cast_weights(input, *args, **kwargs) + return super().forward_comfy_cast_weights(input, *args, **kwargs) + + def forward_ggml_cast_weights(self, input): + raise NotImplementedError + +class GGMLOps(comfy.ops.manual_cast): + """ + Dequantize weights on the fly before doing the compute + """ + class Linear(GGMLLayer, comfy.ops.manual_cast.Linear): + def __init__(self, in_features, out_features, bias=True, device=None, dtype=None): + torch.nn.Module.__init__(self) + # TODO: better workaround for reserved memory spike on windows + # Issue is with `torch.empty` still reserving the full memory for the layer + # Windows doesn't over-commit memory so without this 24GB+ of pagefile is used + self.in_features = in_features + self.out_features = out_features + self.weight = None + self.bias = None + + def forward_ggml_cast_weights(self, input): + weight, bias = self.cast_bias_weight(input) + return torch.nn.functional.linear(input, weight, bias) + + class Conv2d(GGMLLayer, comfy.ops.manual_cast.Conv2d): + def forward_ggml_cast_weights(self, input): + weight, bias = self.cast_bias_weight(input) + return self._conv_forward(input, weight, bias) + + class Embedding(GGMLLayer, comfy.ops.manual_cast.Embedding): + def forward_ggml_cast_weights(self, input, out_dtype=None): + output_dtype = out_dtype + if self.weight.dtype == torch.float16 or self.weight.dtype == torch.bfloat16: + out_dtype = None + weight, _bias = self.cast_bias_weight(self, device=input.device, dtype=out_dtype) + return torch.nn.functional.embedding( + input, weight, self.padding_idx, self.max_norm, self.norm_type, self.scale_grad_by_freq, self.sparse + ).to(dtype=output_dtype) + + class LayerNorm(GGMLLayer, comfy.ops.manual_cast.LayerNorm): + def forward_ggml_cast_weights(self, input): + if self.weight is None: + return super().forward_comfy_cast_weights(input) + weight, bias = self.cast_bias_weight(input) + return torch.nn.functional.layer_norm(input, self.normalized_shape, weight, bias, self.eps) + + class GroupNorm(GGMLLayer, comfy.ops.manual_cast.GroupNorm): + def forward_ggml_cast_weights(self, input): + weight, bias = self.cast_bias_weight(input) + return torch.nn.functional.group_norm(input, self.num_groups, weight, bias, self.eps) + +def move_patch_to_device(item, device): + if isinstance(item, torch.Tensor): + return item.to(device, non_blocking=True) + elif isinstance(item, tuple): + return tuple(move_patch_to_device(x, device) for x in item) + elif isinstance(item, list): + return [move_patch_to_device(x, device) for x in item] + else: + return item diff --git a/ComfyUI-GGUF/requirements.txt b/ComfyUI-GGUF/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..13860344d5278716fbee9e12ba329fb3c79a5aa7 --- /dev/null +++ b/ComfyUI-GGUF/requirements.txt @@ -0,0 +1,2 @@ +gguf>=0.9.1 +numpy<2.0.0 diff --git a/ComfyUI-GGUF/tools/README.md b/ComfyUI-GGUF/tools/README.md new file mode 100644 index 0000000000000000000000000000000000000000..2852ccb612e9c32428fd98aee61f5d7fd9cc8d02 --- /dev/null +++ b/ComfyUI-GGUF/tools/README.md @@ -0,0 +1,49 @@ +This needs the llama.cpp version of gguf-py to work at the moment, not the pip one as that one does not have the python quantization code yet. + +``` +git clone https://github.com/ggerganov/llama.cpp +pip install llama.cpp/gguf-py +``` + + +To convert your initial source model to FP16 (or BF16), run the following command: +``` +python convert.py --src E:\models\unet\flux1-dev.safetensors +``` + + +To quantize the model, first apply the provided patch to the llama.cpp repo you've just cloned. If you get a "corrupt patch" error, you may have to [change the line endings in the patch file](https://github.com/city96/ComfyUI-GGUF/issues/90#issuecomment-2323011648). +``` +cd llama.cpp +git checkout tags/b3600 +git apply ..\lcpp.patch +``` + + +Then, compile the llama-quantize binary. This example uses cmake, on linux you can just use make. +``` +mkdir build +cd build +cmake .. +cmake --build . --config Debug -j10 --target llama-quantize +cd .. +cd .. +``` + + +Now you can use the newly build binary to quantize your model to the desired format: +``` +llama.cpp\build\bin\Debug\llama-quantize.exe E:\models\unet\flux1-dev-BF16.gguf E:\models\unet\flux1-dev-Q4_K_S.gguf Q4_K_S +``` + + +You can extract the patch again with `git diff src\llama.cpp > lcpp.patch` if you wish to change something and contribute back. + + +> [!WARNING] +> Do not use the diffusers UNET for flux, it won't work, use the default/reference checkpoint format. This is due to q/k/v being merged into one qkv key. You can convert it by loading it in ComfyUI and saving it using the built-in "ModelSave" node. + + +> [!WARNING] +> Do not quantize SDXL / SD1 / other Conv2D heavy models. There's little to no benefit with these models. If you do, make sure to **extract the UNET model first**. +>This should be obvious, but also don't use the resulting llama-quantize binary with LLMs. diff --git a/ComfyUI-GGUF/tools/convert.py b/ComfyUI-GGUF/tools/convert.py new file mode 100644 index 0000000000000000000000000000000000000000..362f564471f4a337990d4caf7cc0701d24e928d9 --- /dev/null +++ b/ComfyUI-GGUF/tools/convert.py @@ -0,0 +1,199 @@ +# (c) City96 || Apache-2.0 (apache.org/licenses/LICENSE-2.0) +import os +import torch +import gguf # This needs to be the llama.cpp one specifically! +import argparse +from tqdm import tqdm + +from safetensors.torch import load_file + +QUANTIZATION_THRESHOLD = 1024 +REARRANGE_THRESHOLD = 512 +MAX_TENSOR_NAME_LENGTH = 127 + +# Tuple of arch_name, match_lists. +# Each item in match_lists is a tuple of keys that must match. +# All keys in a match_lists item must exist for the architecture to match. +# The architectures are checked in order and the first successful match terminates the search. +MODEL_DETECTION = ( + ("flux", ( + ("transformer_blocks.0.attn.norm_added_k.weight",), + ("double_blocks.0.img_attn.proj.weight",), + )), + ("sd3", ( + ("transformer_blocks.0.attn.add_q_proj.weight",), + )), + ("sdxl", ( + ("down_blocks.0.downsamplers.0.conv.weight", "add_embedding.linear_1.weight",), + ( + "input_blocks.3.0.op.weight", "input_blocks.6.0.op.weight", + "output_blocks.2.2.conv.weight", "output_blocks.5.2.conv.weight", + ), # Non-diffusers + ("label_emb.0.0.weight",), + )), + ("sd1", ( + ("down_blocks.0.downsamplers.0.conv.weight",), + ( + "input_blocks.3.0.op.weight", "input_blocks.6.0.op.weight", "input_blocks.9.0.op.weight", + "output_blocks.2.1.conv.weight", "output_blocks.5.2.conv.weight", "output_blocks.8.2.conv.weight" + ), # Non-diffusers + )), +) + + +def parse_args(): + parser = argparse.ArgumentParser(description="Generate F16 GGUF files from single UNET") + parser.add_argument("--src", required=True, help="Source model ckpt file.") + parser.add_argument("--dst", help="Output unet gguf file.") + args = parser.parse_args() + + if not os.path.isfile(args.src): + parser.error("No input provided!") + + return args + +def load_state_dict(path): + if any(path.endswith(x) for x in [".ckpt", ".pt", ".bin", ".pth"]): + state_dict = torch.load(path, map_location="cpu", weights_only=True) + state_dict = state_dict.get("model", state_dict) + else: + state_dict = load_file(path) + + # only keep unet with no prefix! + sd = {} + has_prefix = any(["model.diffusion_model." in x for x in state_dict.keys()]) + for k, v in state_dict.items(): + if has_prefix and "model.diffusion_model." not in k: + continue + if has_prefix: + k = k.replace("model.diffusion_model.", "") + sd[k] = v + + return sd + +def detect_arch(state_dict): + for arch, match_lists in MODEL_DETECTION: + for match_list in match_lists: + if all(key in state_dict for key in match_list): + return arch + breakpoint() + raise ValueError("Unknown model architecture!") + + +def load_model(path): + state_dict = load_state_dict(path) + arch = detect_arch(state_dict) + print(f"* Architecture detected from input: {arch}") + if arch == "flux" and "transformer_blocks.0.attn.norm_added_k.weight" in state_dict: + raise ValueError("The Diffusers UNET can not be used for this!") + writer = gguf.GGUFWriter(path=None, arch=arch) + return (writer, state_dict) + +def handle_tensors(args, writer, state_dict): + # TODO list: + # - do something about this being awful and hacky + + name_lengths = tuple(sorted( + ((key, len(key)) for key in state_dict.keys()), + key=lambda item: item[1], + reverse=True, + )) + if not name_lengths: + return + max_name_len = name_lengths[0][1] + if max_name_len > MAX_TENSOR_NAME_LENGTH: + bad_list = ", ".join(f"{key!r} ({namelen})" for key, namelen in name_lengths if namelen > MAX_TENSOR_NAME_LENGTH) + raise ValueError(f"Can only handle tensor names up to {MAX_TENSOR_NAME_LENGTH} characters. Tensors exceeding the limit: {bad_list}") + for key, data in tqdm(state_dict.items()): + old_dtype = data.dtype + + if data.dtype == torch.bfloat16: + data = data.to(torch.float32).numpy() + # this is so we don't break torch 2.0.X + elif data.dtype in [getattr(torch, "float8_e4m3fn", "_invalid"), getattr(torch, "float8_e5m2", "_invalid")]: + data = data.to(torch.float16).numpy() + else: + data = data.numpy() + + n_dims = len(data.shape) + data_shape = data.shape + data_qtype = getattr( + gguf.GGMLQuantizationType, + "BF16" if old_dtype == torch.bfloat16 else "F16" + ) + + # get number of parameters (AKA elements) in this tensor + n_params = 1 + for dim_size in data_shape: + n_params *= dim_size + + # keys to keep as max precision + blacklist = { + "time_embedding.", + "add_embedding.", + "time_in.", + "txt_in.", + "vector_in.", + "img_in.", + "guidance_in.", + "final_layer.", + } + + if old_dtype in (torch.float32, torch.bfloat16): + if n_dims == 1: + # one-dimensional tensors should be kept in F32 + # also speeds up inference due to not dequantizing + data_qtype = gguf.GGMLQuantizationType.F32 + + elif n_params <= QUANTIZATION_THRESHOLD: + # very small tensors + data_qtype = gguf.GGMLQuantizationType.F32 + + elif ".weight" in key and any(x in key for x in blacklist): + data_qtype = gguf.GGMLQuantizationType.F32 + + if ( n_dims > 1 # Skip one-dimensional tensors + and n_params >= REARRANGE_THRESHOLD # Only rearrange tensors meeting the size requirement + and (n_params / 256).is_integer() # Rearranging only makes sense if total elements is divisible by 256 + and not (data.shape[-1] / 256).is_integer() # Only need to rearrange if the last dimension is not divisible by 256 + ): + orig_shape = data.shape + data = data.reshape(n_params // 256, 256) + writer.add_array(f"comfy.gguf.orig_shape.{key}", tuple(int(dim) for dim in orig_shape)) + + try: + data = gguf.quants.quantize(data, data_qtype) + except (AttributeError, gguf.QuantError) as e: + tqdm.write(f"falling back to F16: {e}") + data_qtype = gguf.GGMLQuantizationType.F16 + data = gguf.quants.quantize(data, data_qtype) + + new_name = key # do we need to rename? + + shape_str = f"{{{', '.join(str(n) for n in reversed(data.shape))}}}" + tqdm.write(f"{f'%-{max_name_len + 4}s' % f'{new_name}'} {old_dtype} --> {data_qtype.name}, shape = {shape_str}") + + writer.add_tensor(new_name, data, raw_dtype=data_qtype) + +if __name__ == "__main__": + args = parse_args() + path = args.src + writer, state_dict = load_model(path) + + writer.add_quantization_version(gguf.GGML_QUANT_VERSION) + if next(iter(state_dict.values())).dtype == torch.bfloat16: + out_path = f"{os.path.splitext(path)[0]}-BF16.gguf" + writer.add_file_type(gguf.LlamaFileType.MOSTLY_BF16) + else: + out_path = f"{os.path.splitext(path)[0]}-F16.gguf" + writer.add_file_type(gguf.LlamaFileType.MOSTLY_F16) + + out_path = args.dst or out_path + if os.path.isfile(out_path): + input("Output exists enter to continue or ctrl+c to abort!") + + handle_tensors(path, writer, state_dict) + writer.write_header_to_file(path=out_path) + writer.write_kv_data_to_file() + writer.write_tensors_to_file(progress=True) + writer.close() diff --git a/ComfyUI-GGUF/tools/read_tensors.py b/ComfyUI-GGUF/tools/read_tensors.py new file mode 100644 index 0000000000000000000000000000000000000000..6fff5aaa4b5c527e38b98b0280473fc92e6a76fd --- /dev/null +++ b/ComfyUI-GGUF/tools/read_tensors.py @@ -0,0 +1,21 @@ +#!/usr/bin/python3 +import os +import sys +import gguf + +def read_tensors(path): + reader = gguf.GGUFReader(path) + for tensor in reader.tensors: + if tensor.tensor_type == gguf.GGMLQuantizationType.F32: + continue + print(f"{str(tensor.tensor_type):32}: {tensor.name}") + +try: + path = sys.argv[1] + assert os.path.isfile(path), "Invalid path" + print(f"input: {path}") +except Exception as e: + input(f"failed: {e}") +else: + read_tensors(path) + input() diff --git a/ComfyUI-IC-Light/LICENSE b/ComfyUI-IC-Light/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..b09cd7856d58590578ee1a4f3ad45d1310a97f87 --- /dev/null +++ b/ComfyUI-IC-Light/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/ComfyUI-IC-Light/README.md b/ComfyUI-IC-Light/README.md new file mode 100644 index 0000000000000000000000000000000000000000..7d90f01fadc27cac62dcbdc4b513fe7020c87946 --- /dev/null +++ b/ComfyUI-IC-Light/README.md @@ -0,0 +1,36 @@ +# ComfyUI native implementation of IC-Light + +Huge thanks to lllyasviel for yet more amazing models! +Original repository: https://github.com/lllyasviel/IC-Light + +# Installation + +Recommended way is to use the manager. There should be no extra requirements needed. +Manual way is to clone this repo to the `ComfyUI/custom_nodes` -folder. + +The models are also available through the Manager, search for "IC-light". By default they go to `ComfyUI/models/unet/IC-Light` + +Alternatively download them from here and place anywhere in the `ComfyUI/models/unet` -folder: + +https://huggingface.co/lllyasviel/ic-light/tree/main + +Some of the example workflows require the very latest features in KJNodes: + +https://github.com/kijai/ComfyUI-KJNodes + + + + + +https://github.com/kijai/ComfyUI-IC-Light/assets/40791699/c545a84f-3546-430e-b5dd-adce2ff19b6d + + + +https://github.com/kijai/ComfyUI-IC-Light/assets/40791699/b406ee2b-c9cb-4f9a-9aac-6ab5f753420d + + +![ic_light_fbc_example_01](https://github.com/kijai/ComfyUI-IC-Light/blob/main/examples/ic_light_fbc_example_01.png?raw=true) + +![ic_light_example_02](https://github.com/kijai/ComfyUI-IC-Light/blob/main/examples/iclight_example_fc_controlled_gradient_01.png?raw=true) + +![ic_light_fbc_example_01](https://github.com/kijai/ComfyUI-IC-Light/blob/main/examples/iclight_normals_example_01.png?raw=true) diff --git a/ComfyUI-IC-Light/__init__.py b/ComfyUI-IC-Light/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..b0dbf9be88f116b4ba794eb4edab657fd27c177d --- /dev/null +++ b/ComfyUI-IC-Light/__init__.py @@ -0,0 +1,2 @@ +from .nodes import NODE_CLASS_MAPPINGS, NODE_DISPLAY_NAME_MAPPINGS +__all__ = ["NODE_CLASS_MAPPINGS", "NODE_DISPLAY_NAME_MAPPINGS"] \ No newline at end of file diff --git a/ComfyUI-IC-Light/nodes.py b/ComfyUI-IC-Light/nodes.py new file mode 100644 index 0000000000000000000000000000000000000000..ffd4fe5b3fb5fcc9afc2a620dd1023836dc586de --- /dev/null +++ b/ComfyUI-IC-Light/nodes.py @@ -0,0 +1,537 @@ +import torch +import torchvision.transforms as transforms +import folder_paths +import os +import types +import numpy as np +import torch.nn.functional as F +from comfy.utils import load_torch_file +from .utils.convert_unet import convert_iclight_unet +from .utils.patches import calculate_weight_adjust_channel +from .utils.image import generate_gradient_image, LightPosition +from nodes import MAX_RESOLUTION +from comfy.model_patcher import ModelPatcher +from comfy import lora +import model_management +import logging + +from load_file_from_url import load_file_from_url, load_model_for_iclight + +class LoadAndApplyICLightUnet: + @classmethod + def INPUT_TYPES(s): + load_model_for_iclight() + return { + "required": { + "model": ("MODEL",), + "model_path": (folder_paths.get_filename_list("unet"), ) + } + } + + RETURN_TYPES = ("MODEL",) + FUNCTION = "load" + CATEGORY = "IC-Light" + DESCRIPTION = """ + +Loads and applies the diffusers SD1.5 IC-Light models available here: +https://huggingface.co/lllyasviel/ic-light/tree/main + +Used with ICLightConditioning -node +""" + + def load(self, model, model_path): + type_str = str(type(model.model.model_config).__name__) + if "SD15" not in type_str: + raise Exception(f"Attempted to load {type_str} model, IC-Light is only compatible with SD 1.5 models.") + + print("LoadAndApplyICLightUnet: Checking IC-Light Unet path") + model_full_path = folder_paths.get_full_path("unet", model_path) + if not os.path.exists(model_full_path): + raise Exception("Invalid model path") + else: + print("LoadAndApplyICLightUnet: Loading IC-Light Unet weights") + model_clone = model.clone() + + iclight_state_dict = load_torch_file(model_full_path) + + print("LoadAndApplyICLightUnet: Attempting to add patches with IC-Light Unet weights") + try: + if 'conv_in.weight' in iclight_state_dict: + iclight_state_dict = convert_iclight_unet(iclight_state_dict) + in_channels = iclight_state_dict["diffusion_model.input_blocks.0.0.weight"].shape[1] + for key in iclight_state_dict: + model_clone.add_patches({key: (iclight_state_dict[key],)}, 1.0, 1.0) + else: + for key in iclight_state_dict: + model_clone.add_patches({"diffusion_model." + key: (iclight_state_dict[key],)}, 1.0, 1.0) + + in_channels = iclight_state_dict["input_blocks.0.0.weight"].shape[1] + + except: + raise Exception("Could not patch model") + print("LoadAndApplyICLightUnet: Added LoadICLightUnet patches") + + #Patch ComfyUI's LoRA weight application to accept multi-channel inputs. Thanks @huchenlei + try: + if hasattr(lora, 'calculate_weight'): + lora.calculate_weight = calculate_weight_adjust_channel(lora.calculate_weight) + else: + raise Exception("IC-Light: The 'calculate_weight' function does not exist in 'lora'") + except Exception as e: + raise Exception(f"IC-Light: Could not patch calculate_weight - {str(e)}") + + # Mimic the existing IP2P class to enable extra_conds + def bound_extra_conds(self, **kwargs): + return ICLight.extra_conds(self, **kwargs) + new_extra_conds = types.MethodType(bound_extra_conds, model_clone.model) + model_clone.add_object_patch("extra_conds", new_extra_conds) + + + model_clone.model.model_config.unet_config["in_channels"] = in_channels + + return (model_clone, ) + +import comfy +class ICLight: + def extra_conds(self, **kwargs): + out = {} + + image = kwargs.get("concat_latent_image", None) + noise = kwargs.get("noise", None) + device = kwargs["device"] + + model_in_channels = self.model_config.unet_config['in_channels'] + input_channels = image.shape[1] + 4 + + if model_in_channels != input_channels: + raise Exception(f"Input channels {input_channels} does not match model in_channels {model_in_channels}, 'opt_background' latent input should be used with the IC-Light 'fbc' model, and only with it") + + if image is None: + image = torch.zeros_like(noise) + + if image.shape[1:] != noise.shape[1:]: + image = comfy.utils.common_upscale(image.to(device), noise.shape[-1], noise.shape[-2], "bilinear", "center") + + image = comfy.utils.resize_to_batch_size(image, noise.shape[0]) + + process_image_in = lambda image: image + out['c_concat'] = comfy.conds.CONDNoiseShape(process_image_in(image)) + + adm = self.encode_adm(**kwargs) + if adm is not None: + out['y'] = comfy.conds.CONDRegular(adm) + return out + +class ICLightConditioning: + @classmethod + def INPUT_TYPES(s): + return {"required": {"positive": ("CONDITIONING", ), + "negative": ("CONDITIONING", ), + "vae": ("VAE", ), + "foreground": ("LATENT", ), + "multiplier": ("FLOAT", {"default": 0.18215, "min": 0.0, "max": 1.0, "step": 0.001}), + }, + "optional": { + "opt_background": ("LATENT", ), + }, + } + + RETURN_TYPES = ("CONDITIONING","CONDITIONING","LATENT") + RETURN_NAMES = ("positive", "negative", "empty_latent") + FUNCTION = "encode" + CATEGORY = "IC-Light" + DESCRIPTION = """ + +Conditioning for the IC-Light model. +To use the "opt_background" input, you also need to use the +"fbc" version of the IC-Light models. + +""" + + def encode(self, positive, negative, vae, foreground, multiplier, opt_background=None): + samples_1 = foreground["samples"] + + if opt_background is not None: + samples_2 = opt_background["samples"] + + repeats_1 = samples_2.size(0) // samples_1.size(0) + repeats_2 = samples_1.size(0) // samples_2.size(0) + if samples_1.shape[1:] != samples_2.shape[1:]: + samples_2 = comfy.utils.common_upscale(samples_2, samples_1.shape[-1], samples_1.shape[-2], "bilinear", "disabled") + + # Repeat the tensors to match the larger batch size + if repeats_1 > 1: + samples_1 = samples_1.repeat(repeats_1, 1, 1, 1) + if repeats_2 > 1: + samples_2 = samples_2.repeat(repeats_2, 1, 1, 1) + + concat_latent = torch.cat((samples_1, samples_2), dim=1) + else: + concat_latent = samples_1 + + out_latent = torch.zeros_like(samples_1) + + out = [] + for conditioning in [positive, negative]: + c = [] + for t in conditioning: + d = t[1].copy() + d["concat_latent_image"] = concat_latent * multiplier + n = [t[0], d] + c.append(n) + out.append(c) + return (out[0], out[1], {"samples": out_latent}) + +class LightSource: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "light_position": ([member.value for member in LightPosition],), + "multiplier": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 100.0, "step": 0.001}), + "start_color": ("STRING", {"default": "#FFFFFF"}), + "end_color": ("STRING", {"default": "#000000"}), + "width": ("INT", { "default": 512, "min": 0, "max": MAX_RESOLUTION, "step": 8, }), + "height": ("INT", { "default": 512, "min": 0, "max": MAX_RESOLUTION, "step": 8, }), + }, + "optional": { + "batch_size": ("INT", { "default": 1, "min": 1, "max": 4096, "step": 1, }), + "prev_image": ("IMAGE",), + } + } + + RETURN_TYPES = ("IMAGE",) + RETURN_NAMES = ("IMAGE",) + FUNCTION = "execute" + CATEGORY = "IC-Light" + DESCRIPTION = """ +Generates a gradient image that can be used +as a simple light source. The color can be +specified in RGB or hex format. +""" + + def execute(self, light_position, multiplier, start_color, end_color, width, height, batch_size=1, prev_image=None): + def toRgb(color): + if color.startswith('#') and len(color) == 7: # e.g. "#RRGGBB" + color_rgb =tuple(int(color[i:i+2], 16) for i in (1, 3, 5)) + else: # e.g. "255,255,255" + color_rgb = tuple(int(i) for i in color.split(',')) + return color_rgb + lightPosition = LightPosition(light_position) + start_color_rgb = toRgb(start_color) + end_color_rgb = toRgb(end_color) + image = generate_gradient_image(width, height, start_color_rgb, end_color_rgb, multiplier, lightPosition) + + image = image.astype(np.float32) / 255.0 + image = torch.from_numpy(image)[None,] + image = image.repeat(batch_size, 1, 1, 1) + if prev_image is not None: + image = torch.cat((prev_image, image), dim=0) + return (image,) + +class CalculateNormalsFromImages: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "images": ("IMAGE",), + "sigma": ("FLOAT", { "default": 10.0, "min": 0.01, "max": 100.0, "step": 0.01, }), + "center_input_range": ("BOOLEAN", { "default": False, }), + }, + "optional": { + "mask": ("MASK",), + } + } + + RETURN_TYPES = ("IMAGE", "IMAGE",) + RETURN_NAMES = ("normal", "divided",) + FUNCTION = "execute" + CATEGORY = "IC-Light" + DESCRIPTION = """ +Calculates normal map from different directional exposures. +Takes in 4 images as a batch: +left, right, bottom, top + +""" + + def execute(self, images, sigma, center_input_range, mask=None): + B, H, W, C = images.shape + repetitions = B // 4 + + if center_input_range: + images = images * 0.5 + 0.5 + if mask is not None: + if mask.shape[-2:] != images[0].shape[:-1]: + mask = mask.unsqueeze(0) + mask = F.interpolate(mask, size=(images.shape[1], images.shape[2]), mode="bilinear") + mask = mask.squeeze(0) + + + + normal_list = [] + divided_list = [] + iteration_counter = 0 + + for i in range(0, B, 4): # Loop over every 4 images + index = torch.arange(iteration_counter, B, repetitions) + rearranged_images = images[index] + images_np = rearranged_images.numpy().astype(np.float32) + + left = images_np[0] + right = images_np[1] + bottom = images_np[2] + top = images_np[3] + + ambient = (left + right + bottom + top) / 4.0 + + def safe_divide(a, b): + e = 1e-5 + return ((a + e) / (b + e)) - 1.0 + + left = safe_divide(left, ambient) + right = safe_divide(right, ambient) + bottom = safe_divide(bottom, ambient) + top = safe_divide(top, ambient) + + u = (right - left) * 0.5 + v = (top - bottom) * 0.5 + + u = np.mean(u, axis=2) + v = np.mean(v, axis=2) + h = (1.0 - u ** 2.0 - v ** 2.0).clip(0, 1e5) ** (0.5 * sigma) + z = np.zeros_like(h) + + normal = np.stack([u, v, h], axis=2) + normal /= np.sum(normal ** 2.0, axis=2, keepdims=True) ** 0.5 + if mask is not None: + matting = mask[iteration_counter].unsqueeze(0).numpy().astype(np.float32) + matting = matting[..., np.newaxis] + normal = normal * matting + np.stack([z, z, 1 - z], axis=2) + normal = torch.from_numpy(normal) + #normal = normal.unsqueeze(0) + else: + normal = normal + np.stack([z, z, 1 - z], axis=2) + normal = torch.from_numpy(normal).unsqueeze(0) + + iteration_counter += 1 + normal = (normal - normal.min()) / ((normal.max() - normal.min())) + normal_list.append(normal) + divided = np.stack([left, right, bottom, top]) + divided = torch.from_numpy(divided) + divided = (divided - divided.min()) / ((divided.max() - divided.min())) + divided = torch.max(divided, dim=3, keepdim=True)[0].repeat(1, 1, 1, 3) + divided_list.append(divided) + + normal_out = torch.cat(normal_list, dim=0) + divided_out = torch.cat(divided_list, dim=0) + + return (normal_out, divided_out, ) + +class LoadHDRImage: + @classmethod + def INPUT_TYPES(s): + input_dir = folder_paths.get_input_directory() + files = [f for f in os.listdir(input_dir) if os.path.isfile(os.path.join(input_dir, f))] + return {"required": + {"image": (sorted(files), {"image_upload": False}), + "exposures": ("STRING", {"default": "-2,-1,0,1,2"}), + }, + } + + CATEGORY = "IC-Light" + RETURN_TYPES = ("IMAGE", "MASK") + FUNCTION = "loadhdrimage" + DESCRIPTION = """ +Loads a .hdr image from the input directory. +Output is a batch of LDR images with the selected exposures. + +""" + def loadhdrimage(self, image, exposures): + import cv2 + image_path = folder_paths.get_annotated_filepath(image) + # Load the HDR image + hdr_image = cv2.imread(image_path, cv2.IMREAD_ANYDEPTH) + + exposures = list(map(int, exposures.split(","))) + if not isinstance(exposures, list): + exposures = [exposures] # Example exposure values + ldr_images_tensors = [] + + for exposure in exposures: + # Scale pixel values to simulate different exposures + ldr_image = np.clip(hdr_image * (2**exposure), 0, 1) + # Convert to 8-bit image (LDR) by scaling to 255 + ldr_image_8bit = np.uint8(ldr_image * 255) + # Convert BGR to RGB + ldr_image_8bit = cv2.cvtColor(ldr_image_8bit, cv2.COLOR_BGR2RGB) + # Convert the LDR image to a torch tensor + tensor_image = torch.from_numpy(ldr_image_8bit).float() + # Normalize the tensor to the range [0, 1] + tensor_image = tensor_image / 255.0 + # Change the tensor shape to (C, H, W) + tensor_image = tensor_image.permute(2, 0, 1) + # Add the tensor to the list + ldr_images_tensors.append(tensor_image) + + batch_tensors = torch.stack(ldr_images_tensors) + batch_tensors = batch_tensors.permute(0, 2, 3, 1) + + return batch_tensors, + +class BackgroundScaler: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image": ("IMAGE",), + "mask": ("MASK",), + "scale": ("FLOAT", {"default": 0.5, "min": -10.0, "max": 10.0, "step": 0.001}), + "invert": ("BOOLEAN", { "default": False, }), + } + } + + CATEGORY = "IC-Light" + RETURN_TYPES = ("IMAGE",) + FUNCTION = "apply" + DESCRIPTION = """ +Sets the masked area color in grayscale range. +""" + + def apply(self, image: torch.Tensor, mask: torch.Tensor, scale: float, invert: bool): + + # Validate inputs + if not isinstance(image, torch.Tensor) or not isinstance(mask, torch.Tensor): + raise ValueError("image and mask must be torch.Tensor types.") + if image.ndim != 4 or mask.ndim not in [3, 4]: + raise ValueError("image must be a 4D tensor, and mask must be a 3D or 4D tensor.") + + # Adjust mask dimensions if necessary + if mask.ndim == 3: + # [B, H, W] => [B, H, W, C=1] + mask = mask.unsqueeze(-1) + + if invert: + mask = 1 - mask + image_out = image * mask + (1 - mask) * scale + image_out = torch.clamp(image_out, 0, 1).cpu().float() + + return (image_out,) + +class DetailTransfer: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "target": ("IMAGE", ), + "source": ("IMAGE", ), + "mode": ([ + "add", + "multiply", + "screen", + "overlay", + "soft_light", + "hard_light", + "color_dodge", + "color_burn", + "difference", + "exclusion", + "divide", + + ], + {"default": "add"} + ), + "blur_sigma": ("FLOAT", {"default": 1.0, "min": 0.1, "max": 100.0, "step": 0.01}), + "blend_factor": ("FLOAT", {"default": 1.0, "min": -10.0, "max": 10.0, "step": 0.001, "round": 0.001}), + }, + "optional": { + "mask": ("MASK", ), + } + } + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "process" + CATEGORY = "IC-Light" + + def adjust_mask(self, mask, target_tensor): + # Add a channel dimension and repeat to match the channel number of the target tensor + if len(mask.shape) == 3: + mask = mask.unsqueeze(1) # Add a channel dimension + target_channels = target_tensor.shape[1] + mask = mask.expand(-1, target_channels, -1, -1) # Expand the channel dimension to match the target tensor's channels + + return mask + + + def process(self, target, source, mode, blur_sigma, blend_factor, mask=None): + B, H, W, C = target.shape + device = model_management.get_torch_device() + target_tensor = target.permute(0, 3, 1, 2).clone().to(device) + source_tensor = source.permute(0, 3, 1, 2).clone().to(device) + + if target.shape[1:] != source.shape[1:]: + source_tensor = comfy.utils.common_upscale(source_tensor, W, H, "bilinear", "disabled") + + if source.shape[0] < B: + source = source[0].unsqueeze(0).repeat(B, 1, 1, 1) + + kernel_size = int(6 * int(blur_sigma) + 1) + + gaussian_blur = transforms.GaussianBlur(kernel_size=(kernel_size, kernel_size), sigma=(blur_sigma, blur_sigma)) + + blurred_target = gaussian_blur(target_tensor) + blurred_source = gaussian_blur(source_tensor) + + if mode == "add": + tensor_out = (source_tensor - blurred_source) + blurred_target + elif mode == "multiply": + tensor_out = source_tensor * blurred_target + elif mode == "screen": + tensor_out = 1 - (1 - source_tensor) * (1 - blurred_target) + elif mode == "overlay": + tensor_out = torch.where(blurred_target < 0.5, 2 * source_tensor * blurred_target, 1 - 2 * (1 - source_tensor) * (1 - blurred_target)) + elif mode == "soft_light": + tensor_out = (1 - 2 * blurred_target) * source_tensor**2 + 2 * blurred_target * source_tensor + elif mode == "hard_light": + tensor_out = torch.where(source_tensor < 0.5, 2 * source_tensor * blurred_target, 1 - 2 * (1 - source_tensor) * (1 - blurred_target)) + elif mode == "difference": + tensor_out = torch.abs(blurred_target - source_tensor) + elif mode == "exclusion": + tensor_out = 0.5 - 2 * (blurred_target - 0.5) * (source_tensor - 0.5) + elif mode == "color_dodge": + tensor_out = blurred_target / (1 - source_tensor) + elif mode == "color_burn": + tensor_out = 1 - (1 - blurred_target) / source_tensor + elif mode == "divide": + tensor_out = (source_tensor / blurred_source) * blurred_target + else: + tensor_out = source_tensor + + tensor_out = torch.lerp(target_tensor, tensor_out, blend_factor) + if mask is not None: + # Call the function and pass in mask and target_tensor + mask = self.adjust_mask(mask, target_tensor) + mask = mask.to(device) + tensor_out = torch.lerp(target_tensor, tensor_out, mask) + tensor_out = torch.clamp(tensor_out, 0, 1) + tensor_out = tensor_out.permute(0, 2, 3, 1).cpu().float() + return (tensor_out,) + + +NODE_CLASS_MAPPINGS = { + "LoadAndApplyICLightUnet": LoadAndApplyICLightUnet, + "ICLightConditioning": ICLightConditioning, + "LightSource": LightSource, + "CalculateNormalsFromImages": CalculateNormalsFromImages, + "LoadHDRImage": LoadHDRImage, + "BackgroundScaler": BackgroundScaler, + "DetailTransfer": DetailTransfer +} +NODE_DISPLAY_NAME_MAPPINGS = { + "LoadAndApplyICLightUnet": "Load And Apply IC-Light", + "ICLightConditioning": "IC-Light Conditioning", + "LightSource": "Simple Light Source", + "CalculateNormalsFromImages": "Calculate Normals From Images", + "LoadHDRImage": "Load HDR Image", + "BackgroundScaler": "Background Scaler", + "DetailTransfer": "Detail Transfer" +} diff --git a/ComfyUI-IC-Light/pyproject.toml b/ComfyUI-IC-Light/pyproject.toml new file mode 100644 index 0000000000000000000000000000000000000000..a48776fc86984bdc6f497c76d00f96e2d2c5a5dd --- /dev/null +++ b/ComfyUI-IC-Light/pyproject.toml @@ -0,0 +1,14 @@ +[project] +name = "comfyui-ic-light" +description = "ComfyUI native nodes for IC-Light" +version = "1.0.1" +license = { text = "Apache License 2.0" } +dependencies = ["opencv-python"] + +[project.urls] +Repository = "https://github.com/kijai/ComfyUI-IC-Light" + +[tool.comfy] +PublisherId = "kijai" +DisplayName = "ComfyUI-IC-Light" +Icon = "" diff --git a/ComfyUI-IC-Light/requirements.txt b/ComfyUI-IC-Light/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..1db7aea116e2b2026e2b660df58af81d997599e6 --- /dev/null +++ b/ComfyUI-IC-Light/requirements.txt @@ -0,0 +1 @@ +opencv-python \ No newline at end of file diff --git a/ComfyUI-IC-Light/utils/convert_unet.py b/ComfyUI-IC-Light/utils/convert_unet.py new file mode 100644 index 0000000000000000000000000000000000000000..7022f076e6dd831f221047710dd31cccb565d744 --- /dev/null +++ b/ComfyUI-IC-Light/utils/convert_unet.py @@ -0,0 +1,226 @@ +UNET_MAP_ATTENTIONS = { + "proj_in.weight", + "proj_in.bias", + "proj_out.weight", + "proj_out.bias", + "norm.weight", + "norm.bias", +} + +TRANSFORMER_BLOCKS = { + "norm1.weight", + "norm1.bias", + "norm2.weight", + "norm2.bias", + "norm3.weight", + "norm3.bias", + "attn1.to_q.weight", + "attn1.to_k.weight", + "attn1.to_v.weight", + "attn1.to_out.0.weight", + "attn1.to_out.0.bias", + "attn2.to_q.weight", + "attn2.to_k.weight", + "attn2.to_v.weight", + "attn2.to_out.0.weight", + "attn2.to_out.0.bias", + "ff.net.0.proj.weight", + "ff.net.0.proj.bias", + "ff.net.2.weight", + "ff.net.2.bias", +} + +UNET_MAP_RESNET = { + "in_layers.2.weight": "conv1.weight", + "in_layers.2.bias": "conv1.bias", + "emb_layers.1.weight": "time_emb_proj.weight", + "emb_layers.1.bias": "time_emb_proj.bias", + "out_layers.3.weight": "conv2.weight", + "out_layers.3.bias": "conv2.bias", + "skip_connection.weight": "conv_shortcut.weight", + "skip_connection.bias": "conv_shortcut.bias", + "in_layers.0.weight": "norm1.weight", + "in_layers.0.bias": "norm1.bias", + "out_layers.0.weight": "norm2.weight", + "out_layers.0.bias": "norm2.bias", +} + +UNET_MAP_BASIC = { + ("label_emb.0.0.weight", "class_embedding.linear_1.weight"), + ("label_emb.0.0.bias", "class_embedding.linear_1.bias"), + ("label_emb.0.2.weight", "class_embedding.linear_2.weight"), + ("label_emb.0.2.bias", "class_embedding.linear_2.bias"), + ("label_emb.0.0.weight", "add_embedding.linear_1.weight"), + ("label_emb.0.0.bias", "add_embedding.linear_1.bias"), + ("label_emb.0.2.weight", "add_embedding.linear_2.weight"), + ("label_emb.0.2.bias", "add_embedding.linear_2.bias"), + ("input_blocks.0.0.weight", "conv_in.weight"), + ("input_blocks.0.0.bias", "conv_in.bias"), + ("out.0.weight", "conv_norm_out.weight"), + ("out.0.bias", "conv_norm_out.bias"), + ("out.2.weight", "conv_out.weight"), + ("out.2.bias", "conv_out.bias"), + ("time_embed.0.weight", "time_embedding.linear_1.weight"), + ("time_embed.0.bias", "time_embedding.linear_1.bias"), + ("time_embed.2.weight", "time_embedding.linear_2.weight"), + ("time_embed.2.bias", "time_embedding.linear_2.bias") +} +TEMPORAL_TRANSFORMER_BLOCKS = { + "norm_in.weight", + "norm_in.bias", + "ff_in.net.0.proj.weight", + "ff_in.net.0.proj.bias", + "ff_in.net.2.weight", + "ff_in.net.2.bias", +} +TEMPORAL_TRANSFORMER_BLOCKS.update(TRANSFORMER_BLOCKS) + + +TEMPORAL_UNET_MAP_ATTENTIONS = { + "time_mixer.mix_factor", +} +TEMPORAL_UNET_MAP_ATTENTIONS.update(UNET_MAP_ATTENTIONS) + + +TEMPORAL_TRANSFORMER_MAP = { + "time_pos_embed.0.weight": "time_pos_embed.linear_1.weight", + "time_pos_embed.0.bias": "time_pos_embed.linear_1.bias", + "time_pos_embed.2.weight": "time_pos_embed.linear_2.weight", + "time_pos_embed.2.bias": "time_pos_embed.linear_2.bias", +} + + +TEMPORAL_RESNET = { + "time_mixer.mix_factor", +} + + +unet_config = {'use_checkpoint': False, 'image_size': 32, 'out_channels': 4, 'use_spatial_transformer': True, 'legacy': False, 'adm_in_channels': None, + 'in_channels': 8, 'model_channels': 320, 'num_res_blocks': [2, 2, 2, 2], 'transformer_depth': [1, 1, 1, 1, 1, 1, 0, 0], + 'channel_mult': [1, 2, 4, 4], 'transformer_depth_middle': 1, 'use_linear_in_transformer': False, 'context_dim': 768, 'num_heads': 8, + 'transformer_depth_output': [1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0], + 'use_temporal_attention': False, 'use_temporal_resblock': False} + +def convert_iclight_unet(state_dict): + num_res_blocks = unet_config["num_res_blocks"] + channel_mult = unet_config["channel_mult"] + transformer_depth = unet_config["transformer_depth"][:] + transformer_depth_output = unet_config["transformer_depth_output"][:] + num_blocks = len(channel_mult) + + transformers_mid = unet_config.get("transformer_depth_middle", None) + + diffusers_unet_map = {} + for x in range(num_blocks): + n = 1 + (num_res_blocks[x] + 1) * x + for i in range(num_res_blocks[x]): + for b in TEMPORAL_RESNET: + diffusers_unet_map["down_blocks.{}.resnets.{}.{}".format(x, i, b)] = "input_blocks.{}.0.{}".format(n, b) + for b in UNET_MAP_RESNET: + diffusers_unet_map["down_blocks.{}.resnets.{}.spatial_res_block.{}".format(x, i, UNET_MAP_RESNET[b])] = "input_blocks.{}.0.{}".format(n, b) + diffusers_unet_map["down_blocks.{}.resnets.{}.temporal_res_block.{}".format(x, i, UNET_MAP_RESNET[b])] = "input_blocks.{}.0.time_stack.{}".format(n, b) + diffusers_unet_map["down_blocks.{}.resnets.{}.{}".format(x, i, UNET_MAP_RESNET[b])] = "input_blocks.{}.0.{}".format(n, b) + num_transformers = transformer_depth.pop(0) + if num_transformers > 0: + for b in TEMPORAL_UNET_MAP_ATTENTIONS: + diffusers_unet_map["down_blocks.{}.attentions.{}.{}".format(x, i, b)] = "input_blocks.{}.1.{}".format(n, b) + for b in TEMPORAL_TRANSFORMER_MAP: + diffusers_unet_map["down_blocks.{}.attentions.{}.{}".format(x, i, TEMPORAL_TRANSFORMER_MAP[b])] = "input_blocks.{}.1.{}".format(n, b) + for t in range(num_transformers): + for b in TRANSFORMER_BLOCKS: + diffusers_unet_map["down_blocks.{}.attentions.{}.transformer_blocks.{}.{}".format(x, i, t, b)] = "input_blocks.{}.1.transformer_blocks.{}.{}".format(n, t, b) + for b in TEMPORAL_TRANSFORMER_BLOCKS: + diffusers_unet_map["down_blocks.{}.attentions.{}.temporal_transformer_blocks.{}.{}".format(x, i, t, b)] = "input_blocks.{}.1.time_stack.{}.{}".format(n, t, b) + n += 1 + for k in ["weight", "bias"]: + diffusers_unet_map["down_blocks.{}.downsamplers.0.conv.{}".format(x, k)] = "input_blocks.{}.0.op.{}".format(n, k) + + i = 0 + for b in TEMPORAL_UNET_MAP_ATTENTIONS: + diffusers_unet_map["mid_block.attentions.{}.{}".format(i, b)] = "middle_block.1.{}".format(b) + for b in TEMPORAL_TRANSFORMER_MAP: + diffusers_unet_map["mid_block.attentions.{}.{}".format(i, TEMPORAL_TRANSFORMER_MAP[b])] = "middle_block.1.{}".format(b) + for t in range(transformers_mid): + for b in TRANSFORMER_BLOCKS: + diffusers_unet_map["mid_block.attentions.{}.transformer_blocks.{}.{}".format(i, t, b)] = "middle_block.1.transformer_blocks.{}.{}".format(t, b) + for b in TEMPORAL_TRANSFORMER_BLOCKS: + diffusers_unet_map["mid_block.attentions.{}.temporal_transformer_blocks.{}.{}".format(i, t, b)] = "middle_block.1.time_stack.{}.{}".format(t, b) + + for i, n in enumerate([0, 2]): + for b in TEMPORAL_RESNET: + diffusers_unet_map["mid_block.resnets.{}.{}".format(i, b)] = "middle_block.{}.{}".format(n, b) + for b in UNET_MAP_RESNET: + diffusers_unet_map["mid_block.resnets.{}.spatial_res_block.{}".format(i, UNET_MAP_RESNET[b])] = "middle_block.{}.{}".format(n, b) + diffusers_unet_map["mid_block.resnets.{}.temporal_res_block.{}".format(i, UNET_MAP_RESNET[b])] = "middle_block.{}.time_stack.{}".format(n, b) + diffusers_unet_map["mid_block.resnets.{}.{}".format(i, UNET_MAP_RESNET[b])] = "middle_block.{}.{}".format(n, b) + + num_res_blocks = list(reversed(num_res_blocks)) + for x in range(num_blocks): + n = (num_res_blocks[x] + 1) * x + l = num_res_blocks[x] + 1 + for i in range(l): + for b in TEMPORAL_RESNET: + diffusers_unet_map["up_blocks.{}.resnets.{}.{}".format(x, i, b)] = "output_blocks.{}.0.{}".format(n, b) + c = 0 + for b in UNET_MAP_RESNET: + diffusers_unet_map["up_blocks.{}.resnets.{}.{}".format(x, i, UNET_MAP_RESNET[b])] = "output_blocks.{}.0.{}".format(n, b) + diffusers_unet_map["up_blocks.{}.resnets.{}.spatial_res_block.{}".format(x, i, UNET_MAP_RESNET[b])] = "output_blocks.{}.0.{}".format(n, b) + diffusers_unet_map["up_blocks.{}.resnets.{}.temporal_res_block.{}".format(x, i, UNET_MAP_RESNET[b])] = "output_blocks.{}.0.time_stack.{}".format(n, b) + for b in TEMPORAL_RESNET: + diffusers_unet_map["up_blocks.{}.resnets.{}".format(i, b)] = "output_blocks.{}.{}".format(n, b) + c += 1 + num_transformers = transformer_depth_output.pop() + if num_transformers > 0: + c += 1 + for b in UNET_MAP_ATTENTIONS: + diffusers_unet_map["up_blocks.{}.attentions.{}.{}".format(x, i, b)] = "output_blocks.{}.1.{}".format(n, b) + for b in TEMPORAL_TRANSFORMER_MAP: + diffusers_unet_map["up_blocks.{}.attentions.{}.{}".format(x, i, TEMPORAL_TRANSFORMER_MAP[b])] = "output_blocks.{}.1.{}".format(n, b) + for b in TEMPORAL_UNET_MAP_ATTENTIONS: + diffusers_unet_map["up_blocks.{}.attentions.{}.{}".format(x, i, b)] = "output_blocks.{}.1.{}".format(n, b) + + for t in range(num_transformers): + for b in TRANSFORMER_BLOCKS: + diffusers_unet_map["up_blocks.{}.attentions.{}.transformer_blocks.{}.{}".format(x, i, t, b)] = "output_blocks.{}.1.transformer_blocks.{}.{}".format(n, t, b) + for b in TEMPORAL_TRANSFORMER_BLOCKS: + diffusers_unet_map["up_blocks.{}.attentions.{}.temporal_transformer_blocks.{}.{}".format(x, i, t, b)] = "output_blocks.{}.1.time_stack.{}.{}".format(n, t, b) + if i == l - 1: + for k in ["weight", "bias"]: + diffusers_unet_map["up_blocks.{}.upsamplers.0.conv.{}".format(x, k)] = "output_blocks.{}.{}.conv.{}".format(n, c, k) + n += 1 + + for k in UNET_MAP_BASIC: + diffusers_unet_map[k[1]] = k[0] + + unet_state_dict = state_dict + diffusers_keys = diffusers_unet_map + + new_sd = {} + for k in diffusers_keys: + if k in unet_state_dict: + new_sd[diffusers_keys[k]] = unet_state_dict.pop(k) + + leftover_keys = unet_state_dict.keys() + if len(leftover_keys) > 0: + spatial_leftover_keys = [] + temporal_leftover_keys = [] + other_leftover_keys = [] + for key in leftover_keys: + if "spatial" in key: + spatial_leftover_keys.append(key) + elif "temporal" in key: + temporal_leftover_keys.append(key) + else: + other_leftover_keys.append(key) + print("spatial_leftover_keys:") + for key in spatial_leftover_keys: + print(key) + print("temporal_leftover_keys:") + for key in temporal_leftover_keys: + print(key) + print("other_leftover_keys:") + for key in other_leftover_keys: + print(key) + + new_sd = {"diffusion_model." + k: v for k, v in new_sd.items()} + return new_sd \ No newline at end of file diff --git a/ComfyUI-IC-Light/utils/image.py b/ComfyUI-IC-Light/utils/image.py new file mode 100644 index 0000000000000000000000000000000000000000..6f148365a8495cc8e469acb32e6523a43ea01927 --- /dev/null +++ b/ComfyUI-IC-Light/utils/image.py @@ -0,0 +1,68 @@ +### Light Source +import numpy as np +from enum import Enum + +class LightPosition(Enum): + LEFT = "Left Light" + RIGHT = "Right Light" + TOP = "Top Light" + BOTTOM = "Bottom Light" + TOP_LEFT = "Top Left Light" + TOP_RIGHT = "Top Right Light" + BOTTOM_LEFT = "Bottom Left Light" + BOTTOM_RIGHT = "Bottom Right Light" + +def generate_gradient_image(width:int, height:int, start_color: tuple, end_color: tuple, multiplier: float, lightPosition:LightPosition): + """ + Generate a gradient image with a light source effect. + + Parameters: + width (int): Width of the image. + height (int): Height of the image. + start_color: Starting color RGB of the gradient. + end_color: Ending color RGB of the gradient. + multiplier: Weight of light. + lightPosition (LightPosition): Position of the light source. + + Returns: + np.array: 2D gradient image array. + """ + # Create a gradient from 0 to 1 and apply multiplier + if lightPosition == LightPosition.LEFT: + gradient = np.tile(np.linspace(0, 1, width)**multiplier, (height, 1)) + elif lightPosition == LightPosition.RIGHT: + gradient = np.tile(np.linspace(1, 0, width)**multiplier, (height, 1)) + elif lightPosition == LightPosition.TOP: + gradient = np.tile(np.linspace(0, 1, height)**multiplier, (width, 1)).T + elif lightPosition == LightPosition.BOTTOM: + gradient = np.tile(np.linspace(1, 0, height)**multiplier, (width, 1)).T + elif lightPosition == LightPosition.BOTTOM_RIGHT: + x = np.linspace(1, 0, width)**multiplier + y = np.linspace(1, 0, height)**multiplier + x_mesh, y_mesh = np.meshgrid(x, y) + gradient = np.sqrt(x_mesh**2 + y_mesh**2) / np.sqrt(2.0) + elif lightPosition == LightPosition.BOTTOM_LEFT: + x = np.linspace(0, 1, width)**multiplier + y = np.linspace(1, 0, height)**multiplier + x_mesh, y_mesh = np.meshgrid(x, y) + gradient = np.sqrt(x_mesh**2 + y_mesh**2) / np.sqrt(2.0) + elif lightPosition == LightPosition.TOP_RIGHT: + x = np.linspace(1, 0, width)**multiplier + y = np.linspace(0, 1, height)**multiplier + x_mesh, y_mesh = np.meshgrid(x, y) + gradient = np.sqrt(x_mesh**2 + y_mesh**2) / np.sqrt(2.0) + elif lightPosition == LightPosition.TOP_LEFT: + x = np.linspace(0, 1, width)**multiplier + y = np.linspace(0, 1, height)**multiplier + x_mesh, y_mesh = np.meshgrid(x, y) + gradient = np.sqrt(x_mesh**2 + y_mesh**2) / np.sqrt(2.0) + else: + raise ValueError(f"Unsupported position. Choose from {', '.join([member.value for member in LightPosition])}.") + + # Interpolate between start_color and end_color based on the gradient + gradient_img = np.zeros((height, width, 3), dtype=np.float32) + for i in range(3): + gradient_img[..., i] = start_color[i] + (end_color[i] - start_color[i]) * gradient + + gradient_img = np.clip(gradient_img, 0, 255).astype(np.uint8) + return gradient_img \ No newline at end of file diff --git a/ComfyUI-IC-Light/utils/patches.py b/ComfyUI-IC-Light/utils/patches.py new file mode 100644 index 0000000000000000000000000000000000000000..0c6dd97660366b119e6a402c2e3510b88e718aae --- /dev/null +++ b/ComfyUI-IC-Light/utils/patches.py @@ -0,0 +1,64 @@ + +#credit to huchenlei for this +#from https://github.com/huchenlei/ComfyUI-layerdiffuse/blob/151f7460bbc9d7437d4f0010f21f80178f7a84a6/layered_diffusion.py#L34-L96 + +import torch +import functools +from comfy.model_patcher import ModelPatcher +import comfy.model_management + +def calculate_weight_adjust_channel(func): + """Patches ComfyUI's LoRA weight application to accept multi-channel inputs.""" + + @functools.wraps(func) + def calculate_weight(patches, weight: torch.Tensor, key: str) -> torch.Tensor: + weight = func(patches, weight, key) + + for p in patches: + alpha = p[0] + v = p[1] + + # The recursion call should be handled in the main func call. + if isinstance(v, list): + continue + + if len(v) == 1: + patch_type = "diff" + elif len(v) == 2: + patch_type = v[0] + v = v[1] + + if patch_type == "diff": + w1 = v[0] + if all( + ( + alpha != 0.0, + w1.shape != weight.shape, + w1.ndim == weight.ndim == 4, + ) + ): + new_shape = [max(n, m) for n, m in zip(weight.shape, w1.shape)] + print( + f"IC-Light: Merged with {key} channel changed from {weight.shape} to {new_shape}" + ) + new_diff = alpha * comfy.model_management.cast_to_device( + w1, weight.device, weight.dtype + ) + new_weight = torch.zeros(size=new_shape).to(weight) + new_weight[ + : weight.shape[0], + : weight.shape[1], + : weight.shape[2], + : weight.shape[3], + ] = weight + new_weight[ + : new_diff.shape[0], + : new_diff.shape[1], + : new_diff.shape[2], + : new_diff.shape[3], + ] += new_diff + new_weight = new_weight.contiguous().clone() + weight = new_weight + return weight + + return calculate_weight \ No newline at end of file diff --git a/ComfyUI-KJNodes/LICENSE b/ComfyUI-KJNodes/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..f288702d2fa16d3cdf0035b15a9fcbc552cd88e7 --- /dev/null +++ b/ComfyUI-KJNodes/LICENSE @@ -0,0 +1,674 @@ + GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU General Public License is a free, copyleft license for +software and other kinds of works. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +the GNU General Public License is intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU Affero General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If the program does terminal interaction, make it output a short +notice like this when it starts in an interactive mode: + + Copyright (C) + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, your program's commands +might be different; for a GUI interface, you would use an "about box". + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU GPL, see +. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +. diff --git a/ComfyUI-KJNodes/README.md b/ComfyUI-KJNodes/README.md new file mode 100644 index 0000000000000000000000000000000000000000..d5aafe6ec23094b3ee2257d5864b5de14f99bb76 --- /dev/null +++ b/ComfyUI-KJNodes/README.md @@ -0,0 +1,65 @@ +# KJNodes for ComfyUI + +Various quality of life and masking related -nodes and scripts made by combining functionality of existing nodes for ComfyUI. + +I know I'm bad at documentation, especially this project that has grown from random practice nodes to... too many lines in one file. +I have however started to add descriptions to the nodes themselves, there's a small ? you can click for info what the node does. +This is still work in progress, like everything else. + +# Installation +1. Clone this repo into `custom_nodes` folder. +2. Install dependencies: `pip install -r requirements.txt` + or if you use the portable install, run this in ComfyUI_windows_portable -folder: + + `python_embedded\python.exe -m pip install -r ComfyUI\custom_nodes\ComfyUI-KJNodes\requirements.txt` + + +## Javascript + +### browserstatus.js +Sets the favicon to green circle when not processing anything, sets it to red when processing and shows progress percentage and the lenghth of your queue. +Default off, needs to be enabled from options, overrides Custom-Scripts favicon when enabled. + +## Nodes: + +### Set/Get + +Javascript nodes to set and get constants to reduce unnecessary lines. Takes in and returns anything, purely visual nodes. +On the right click menu of these nodes there's now an options to visualize the paths, as well as option to jump to the corresponding node on the other end. + +**Known limitations**: + - Will not work with any node that dynamically sets it's outpute, such as reroute or other Set/Get node + - Will not work when directly connected to a bypassed node + - Other possible conflicts with javascript based nodes. + +### ColorToMask + +RBG color value to mask, works with batches and AnimateDiff. + +### ConditioningMultiCombine + +Combine any number of conditions, saves space. + +### ConditioningSetMaskAndCombine + +Mask and combine two sets of conditions, saves space. + +### GrowMaskWithBlur + +Grows or shrinks (with negative values) mask, option to invert input, returns mask and inverted mask. Additionally Blurs the mask, this is a slow operation especially with big batches. + +### RoundMask + +![image](https://github.com/kijai/ComfyUI-KJNodes/assets/40791699/52c85202-f74e-4b96-9dac-c8bda5ddcc40) + +### WidgetToString +Outputs the value of a widget on any node as a string +![example of use](docs/images/2024-04-03_20_49_29-ComfyUI.png) + +Enable node id display from Manager menu, to get the ID of the node you want to read a widget from: +![enable node id display](docs/images/319121636-706b5081-9120-4a29-bd76-901691ada688.png) + +Use the node id of the target node, and add the name of the widget to read from +![use node id and widget name](docs/images/319121566-05f66385-7568-4b1f-8bbc-11053660b02f.png) + +Recreating or reloading the target node will change its id, and the WidgetToString node will no longer be able to find it until you update the node id value with the new id. diff --git a/ComfyUI-KJNodes/__init__.py b/ComfyUI-KJNodes/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..1df5bca4be0d1f76bacad3609caba41f329033f9 --- /dev/null +++ b/ComfyUI-KJNodes/__init__.py @@ -0,0 +1,173 @@ +from .nodes.nodes import * +from .nodes.curve_nodes import * +from .nodes.batchcrop_nodes import * +from .nodes.audioscheduler_nodes import * +from .nodes.image_nodes import * +from .nodes.intrinsic_lora_nodes import * +from .nodes.mask_nodes import * +NODE_CONFIG = { + #constants + "INTConstant": {"class": INTConstant, "name": "INT Constant"}, + "FloatConstant": {"class": FloatConstant, "name": "Float Constant"}, + "StringConstant": {"class": StringConstant, "name": "String Constant"}, + "StringConstantMultiline": {"class": StringConstantMultiline, "name": "String Constant Multiline"}, + #conditioning + "ConditioningMultiCombine": {"class": ConditioningMultiCombine, "name": "Conditioning Multi Combine"}, + "ConditioningSetMaskAndCombine": {"class": ConditioningSetMaskAndCombine, "name": "ConditioningSetMaskAndCombine"}, + "ConditioningSetMaskAndCombine3": {"class": ConditioningSetMaskAndCombine3, "name": "ConditioningSetMaskAndCombine3"}, + "ConditioningSetMaskAndCombine4": {"class": ConditioningSetMaskAndCombine4, "name": "ConditioningSetMaskAndCombine4"}, + "ConditioningSetMaskAndCombine5": {"class": ConditioningSetMaskAndCombine5, "name": "ConditioningSetMaskAndCombine5"}, + "CondPassThrough": {"class": CondPassThrough}, + #masking + "DownloadAndLoadCLIPSeg": {"class": DownloadAndLoadCLIPSeg, "name": "(Down)load CLIPSeg"}, + "BatchCLIPSeg": {"class": BatchCLIPSeg, "name": "Batch CLIPSeg"}, + "ColorToMask": {"class": ColorToMask, "name": "Color To Mask"}, + "CreateGradientMask": {"class": CreateGradientMask, "name": "Create Gradient Mask"}, + "CreateTextMask": {"class": CreateTextMask, "name": "Create Text Mask"}, + "CreateAudioMask": {"class": CreateAudioMask, "name": "Create Audio Mask"}, + "CreateFadeMask": {"class": CreateFadeMask, "name": "Create Fade Mask"}, + "CreateFadeMaskAdvanced": {"class": CreateFadeMaskAdvanced, "name": "Create Fade Mask Advanced"}, + "CreateFluidMask": {"class": CreateFluidMask, "name": "Create Fluid Mask"}, + "CreateShapeMask": {"class": CreateShapeMask, "name": "Create Shape Mask"}, + "CreateVoronoiMask": {"class": CreateVoronoiMask, "name": "Create Voronoi Mask"}, + "CreateMagicMask": {"class": CreateMagicMask, "name": "Create Magic Mask"}, + "GetMaskSizeAndCount": {"class": GetMaskSizeAndCount, "name": "Get Mask Size & Count"}, + "GrowMaskWithBlur": {"class": GrowMaskWithBlur, "name": "Grow Mask With Blur"}, + "MaskBatchMulti": {"class": MaskBatchMulti, "name": "Mask Batch Multi"}, + "OffsetMask": {"class": OffsetMask, "name": "Offset Mask"}, + "RemapMaskRange": {"class": RemapMaskRange, "name": "Remap Mask Range"}, + "ResizeMask": {"class": ResizeMask, "name": "Resize Mask"}, + "RoundMask": {"class": RoundMask, "name": "Round Mask"}, + #images + "AddLabel": {"class": AddLabel, "name": "Add Label"}, + "ColorMatch": {"class": ColorMatch, "name": "Color Match"}, + "CrossFadeImages": {"class": CrossFadeImages, "name": "Cross Fade Images"}, + "GetImagesFromBatchIndexed": {"class": GetImagesFromBatchIndexed, "name": "Get Images From Batch Indexed"}, + "GetImageRangeFromBatch": {"class": GetImageRangeFromBatch, "name": "Get Image or Mask Range From Batch"}, + "GetImageSizeAndCount": {"class": GetImageSizeAndCount, "name": "Get Image Size & Count"}, + "ImageAndMaskPreview": {"class": ImageAndMaskPreview}, + "ImageAddMulti": {"class": ImageAddMulti, "name": "Image Add Multi"}, + "ImageBatchMulti": {"class": ImageBatchMulti, "name": "Image Batch Multi"}, + "ImageBatchRepeatInterleaving": {"class": ImageBatchRepeatInterleaving}, + "ImageBatchTestPattern": {"class": ImageBatchTestPattern, "name": "Image Batch Test Pattern"}, + "ImageConcanate": {"class": ImageConcanate, "name": "Image Concatenate"}, + "ImageConcatFromBatch": {"class": ImageConcatFromBatch, "name": "Image Concatenate From Batch"}, + "ImageConcatMulti": {"class": ImageConcatMulti, "name": "Image Concatenate Multi"}, + "ImageGrabPIL": {"class": ImageGrabPIL, "name": "Image Grab PIL"}, + "ImageGridComposite2x2": {"class": ImageGridComposite2x2, "name": "Image Grid Composite 2x2"}, + "ImageGridComposite3x3": {"class": ImageGridComposite3x3, "name": "Image Grid Composite 3x3"}, + "ImageGridtoBatch": {"class": ImageGridtoBatch, "name": "Image Grid To Batch"}, + "ImageNormalize_Neg1_To_1": {"class": ImageNormalize_Neg1_To_1, "name": "Image Normalize -1 to 1"}, + "ImagePass": {"class": ImagePass}, + "ImagePadForOutpaintMasked": {"class": ImagePadForOutpaintMasked, "name": "Image Pad For Outpaint Masked"}, + "ImagePadForOutpaintTargetSize": {"class": ImagePadForOutpaintTargetSize, "name": "Image Pad For Outpaint Target Size"}, + "ImageResizeKJ": {"class": ImageResizeKJ, "name": "Resize Image"}, + "ImageUpscaleWithModelBatched": {"class": ImageUpscaleWithModelBatched, "name": "Image Upscale With Model Batched"}, + "InsertImagesToBatchIndexed": {"class": InsertImagesToBatchIndexed, "name": "Insert Images To Batch Indexed"}, + "LoadAndResizeImage": {"class": LoadAndResizeImage, "name": "Load & Resize Image"}, + "LoadImagesFromFolderKJ": {"class": LoadImagesFromFolderKJ, "name": "Load Images From Folder (KJ)"}, + "MergeImageChannels": {"class": MergeImageChannels, "name": "Merge Image Channels"}, + "PreviewAnimation": {"class": PreviewAnimation, "name": "Preview Animation"}, + "RemapImageRange": {"class": RemapImageRange, "name": "Remap Image Range"}, + "ReverseImageBatch": {"class": ReverseImageBatch, "name": "Reverse Image Batch"}, + "ReplaceImagesInBatch": {"class": ReplaceImagesInBatch, "name": "Replace Images In Batch"}, + "SaveImageWithAlpha": {"class": SaveImageWithAlpha, "name": "Save Image With Alpha"}, + "SplitImageChannels": {"class": SplitImageChannels, "name": "Split Image Channels"}, + #batch cropping + "BatchCropFromMask": {"class": BatchCropFromMask, "name": "Batch Crop From Mask"}, + "BatchCropFromMaskAdvanced": {"class": BatchCropFromMaskAdvanced, "name": "Batch Crop From Mask Advanced"}, + "FilterZeroMasksAndCorrespondingImages": {"class": FilterZeroMasksAndCorrespondingImages}, + "InsertImageBatchByIndexes": {"class": InsertImageBatchByIndexes, "name": "Insert Image Batch By Indexes"}, + "BatchUncrop": {"class": BatchUncrop, "name": "Batch Uncrop"}, + "BatchUncropAdvanced": {"class": BatchUncropAdvanced, "name": "Batch Uncrop Advanced"}, + "SplitBboxes": {"class": SplitBboxes, "name": "Split Bboxes"}, + "BboxToInt": {"class": BboxToInt, "name": "Bbox To Int"}, + "BboxVisualize": {"class": BboxVisualize, "name": "Bbox Visualize"}, + #noise + "GenerateNoise": {"class": GenerateNoise, "name": "Generate Noise"}, + "FlipSigmasAdjusted": {"class": FlipSigmasAdjusted, "name": "Flip Sigmas Adjusted"}, + "InjectNoiseToLatent": {"class": InjectNoiseToLatent, "name": "Inject Noise To Latent"}, + "CustomSigmas": {"class": CustomSigmas, "name": "Custom Sigmas"}, + #utility + "WidgetToString": {"class": WidgetToString, "name": "Widget To String"}, + "DummyOut": {"class": DummyOut, "name": "Dummy Out"}, + "GetLatentsFromBatchIndexed": {"class": GetLatentsFromBatchIndexed, "name": "Get Latents From Batch Indexed"}, + "ScaleBatchPromptSchedule": {"class": ScaleBatchPromptSchedule, "name": "Scale Batch Prompt Schedule"}, + "CameraPoseVisualizer": {"class": CameraPoseVisualizer, "name": "Camera Pose Visualizer"}, + "JoinStrings": {"class": JoinStrings, "name": "Join Strings"}, + "JoinStringMulti": {"class": JoinStringMulti, "name": "Join String Multi"}, + "SomethingToString": {"class": SomethingToString, "name": "Something To String"}, + "Sleep": {"class": Sleep, "name": "Sleep"}, + "VRAM_Debug": {"class": VRAM_Debug, "name": "VRAM Debug"}, + "SomethingToString": {"class": SomethingToString, "name": "Something To String"}, + "EmptyLatentImagePresets": {"class": EmptyLatentImagePresets, "name": "Empty Latent Image Presets"}, + "ModelPassThrough": {"class": ModelPassThrough, "name": "ModelPass"}, + #audioscheduler stuff + "NormalizedAmplitudeToMask": {"class": NormalizedAmplitudeToMask}, + "NormalizedAmplitudeToFloatList": {"class": NormalizedAmplitudeToFloatList}, + "OffsetMaskByNormalizedAmplitude": {"class": OffsetMaskByNormalizedAmplitude}, + "ImageTransformByNormalizedAmplitude": {"class": ImageTransformByNormalizedAmplitude}, + #curve nodes + "SplineEditor": {"class": SplineEditor, "name": "Spline Editor"}, + "CreateShapeImageOnPath": {"class": CreateShapeImageOnPath, "name": "Create Shape Image On Path"}, + "CreateShapeMaskOnPath": {"class": CreateShapeMaskOnPath, "name": "Create Shape Mask On Path"}, + "CreateTextOnPath": {"class": CreateTextOnPath, "name": "Create Text On Path"}, + "CreateGradientFromCoords": {"class": CreateGradientFromCoords, "name": "Create Gradient From Coords"}, + "GradientToFloat": {"class": GradientToFloat, "name": "Gradient To Float"}, + "WeightScheduleExtend": {"class": WeightScheduleExtend, "name": "Weight Schedule Extend"}, + "MaskOrImageToWeight": {"class": MaskOrImageToWeight, "name": "Mask Or Image To Weight"}, + "WeightScheduleConvert": {"class": WeightScheduleConvert, "name": "Weight Schedule Convert"}, + "FloatToMask": {"class": FloatToMask, "name": "Float To Mask"}, + "FloatToSigmas": {"class": FloatToSigmas, "name": "Float To Sigmas"}, + "PlotCoordinates": {"class": PlotCoordinates, "name": "Plot Coordinates"}, + "InterpolateCoords": {"class": InterpolateCoords, "name": "Interpolate Coords"}, + "PointsEditor": {"class": PointsEditor, "name": "Points Editor"}, + #experimental + "StabilityAPI_SD3": {"class": StabilityAPI_SD3, "name": "Stability API SD3"}, + "SoundReactive": {"class": SoundReactive, "name": "Sound Reactive"}, + "StableZero123_BatchSchedule": {"class": StableZero123_BatchSchedule, "name": "Stable Zero123 Batch Schedule"}, + "SV3D_BatchSchedule": {"class": SV3D_BatchSchedule, "name": "SV3D Batch Schedule"}, + "LoadResAdapterNormalization": {"class": LoadResAdapterNormalization}, + "Superprompt": {"class": Superprompt, "name": "Superprompt"}, + "GLIGENTextBoxApplyBatchCoords": {"class": GLIGENTextBoxApplyBatchCoords}, + "Intrinsic_lora_sampling": {"class": Intrinsic_lora_sampling, "name": "Intrinsic Lora Sampling"}, + "CheckpointPerturbWeights": {"class": CheckpointPerturbWeights, "name": "CheckpointPerturbWeights"}, + "Screencap_mss": {"class": Screencap_mss, "name": "Screencap mss"}, + "WebcamCaptureCV2": {"class": WebcamCaptureCV2, "name": "Webcam Capture CV2"}, + "DifferentialDiffusionAdvanced": {"class": DifferentialDiffusionAdvanced, "name": "Differential Diffusion Advanced"}, + "FluxBlockLoraLoader": {"class": FluxBlockLoraLoader, "name": "Flux Block Lora Loader"}, + "FluxBlockLoraSelect": {"class": FluxBlockLoraSelect, "name": "Flux Block Lora Select"}, + + #instance diffusion + "CreateInstanceDiffusionTracking": {"class": CreateInstanceDiffusionTracking}, + "AppendInstanceDiffusionTracking": {"class": AppendInstanceDiffusionTracking}, + "DrawInstanceDiffusionTracking": {"class": DrawInstanceDiffusionTracking}, +} + +def generate_node_mappings(node_config): + node_class_mappings = {} + node_display_name_mappings = {} + + for node_name, node_info in node_config.items(): + node_class_mappings[node_name] = node_info["class"] + node_display_name_mappings[node_name] = node_info.get("name", node_info["class"].__name__) + + return node_class_mappings, node_display_name_mappings + +NODE_CLASS_MAPPINGS, NODE_DISPLAY_NAME_MAPPINGS = generate_node_mappings(NODE_CONFIG) + +__all__ = ["NODE_CLASS_MAPPINGS", "NODE_DISPLAY_NAME_MAPPINGS", "WEB_DIRECTORY"] + +WEB_DIRECTORY = "./web" + +from aiohttp import web +from server import PromptServer +from pathlib import Path + +if hasattr(PromptServer, "instance"): + + # NOTE: we add an extra static path to avoid comfy mechanism + # that loads every script in web. + PromptServer.instance.app.add_routes( + [web.static("/kjweb_async", (Path(__file__).parent.absolute() / "kjweb_async").as_posix())] + ) \ No newline at end of file diff --git a/ComfyUI-KJNodes/config.json b/ComfyUI-KJNodes/config.json new file mode 100644 index 0000000000000000000000000000000000000000..e44b55685c9f4e723dc50f6d854e29acc4ebafa6 --- /dev/null +++ b/ComfyUI-KJNodes/config.json @@ -0,0 +1,3 @@ +{ + "sai_api_key": "your_api_key_here" +} \ No newline at end of file diff --git a/ComfyUI-KJNodes/docs/images/2024-04-03_20_49_29-ComfyUI.png b/ComfyUI-KJNodes/docs/images/2024-04-03_20_49_29-ComfyUI.png new file mode 100644 index 0000000000000000000000000000000000000000..b42cfe56374c6db142326761a2a3d96211519664 --- /dev/null +++ b/ComfyUI-KJNodes/docs/images/2024-04-03_20_49_29-ComfyUI.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:85805d3c7ca8f5d281886ea0ad61f9a78edad755ef8014b3870f91b871807ac9 +size 176158 diff --git a/ComfyUI-KJNodes/docs/images/319121566-05f66385-7568-4b1f-8bbc-11053660b02f.png b/ComfyUI-KJNodes/docs/images/319121566-05f66385-7568-4b1f-8bbc-11053660b02f.png new file mode 100644 index 0000000000000000000000000000000000000000..e749239c1c4ffd5ab29b51695dd8d8b51ed3597f Binary files /dev/null and b/ComfyUI-KJNodes/docs/images/319121566-05f66385-7568-4b1f-8bbc-11053660b02f.png differ diff --git a/ComfyUI-KJNodes/docs/images/319121636-706b5081-9120-4a29-bd76-901691ada688.png b/ComfyUI-KJNodes/docs/images/319121636-706b5081-9120-4a29-bd76-901691ada688.png new file mode 100644 index 0000000000000000000000000000000000000000..b53ad666ff060d87971f3962e74101f0cb2a5c3f Binary files /dev/null and b/ComfyUI-KJNodes/docs/images/319121636-706b5081-9120-4a29-bd76-901691ada688.png differ diff --git a/ComfyUI-KJNodes/fonts/FreeMono.ttf b/ComfyUI-KJNodes/fonts/FreeMono.ttf new file mode 100644 index 0000000000000000000000000000000000000000..1e35d08261b9a61f87b6f1e7393d5c1221828ed1 --- /dev/null +++ b/ComfyUI-KJNodes/fonts/FreeMono.ttf @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a7c692ad545c308b7b8fc2db770760c4a5d15ca50f12addf58c8f5360370e831 +size 343980 diff --git a/ComfyUI-KJNodes/fonts/FreeMonoBoldOblique.otf b/ComfyUI-KJNodes/fonts/FreeMonoBoldOblique.otf new file mode 100644 index 0000000000000000000000000000000000000000..3aafa30ff9d9f31df006993f3c3d5c2eb5953f4f --- /dev/null +++ b/ComfyUI-KJNodes/fonts/FreeMonoBoldOblique.otf @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:96187651ee033d0d9791dc2beeebfba5d1f070ab410fce1a5c16483ca249c588 +size 237600 diff --git a/ComfyUI-KJNodes/fonts/TTNorms-Black.otf b/ComfyUI-KJNodes/fonts/TTNorms-Black.otf new file mode 100644 index 0000000000000000000000000000000000000000..2cd91a4fc9637af834c5e2793ab6487c5067063d --- /dev/null +++ b/ComfyUI-KJNodes/fonts/TTNorms-Black.otf @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:710977e683bf0db6416d6d41b427e0363c914e6c503a5291fcb330f30b8448ea +size 152736 diff --git a/ComfyUI-KJNodes/intrinsic_loras/intrinsic_loras.txt b/ComfyUI-KJNodes/intrinsic_loras/intrinsic_loras.txt new file mode 100644 index 0000000000000000000000000000000000000000..62ee933763a8aa9e1b232d228717ac754ab22751 --- /dev/null +++ b/ComfyUI-KJNodes/intrinsic_loras/intrinsic_loras.txt @@ -0,0 +1,4 @@ +source for the loras: +https://github.com/duxiaodan/intrinsic-lora + +Renamed and conveted to .safetensors \ No newline at end of file diff --git a/ComfyUI-KJNodes/intristic_loras/intrinsic_loras.txt b/ComfyUI-KJNodes/intristic_loras/intrinsic_loras.txt new file mode 100644 index 0000000000000000000000000000000000000000..62ee933763a8aa9e1b232d228717ac754ab22751 --- /dev/null +++ b/ComfyUI-KJNodes/intristic_loras/intrinsic_loras.txt @@ -0,0 +1,4 @@ +source for the loras: +https://github.com/duxiaodan/intrinsic-lora + +Renamed and conveted to .safetensors \ No newline at end of file diff --git a/ComfyUI-KJNodes/kjweb_async/marked.min.js b/ComfyUI-KJNodes/kjweb_async/marked.min.js new file mode 100644 index 0000000000000000000000000000000000000000..2e66c369c388c135cc68d399861a737f4c5e68cd --- /dev/null +++ b/ComfyUI-KJNodes/kjweb_async/marked.min.js @@ -0,0 +1,6 @@ +/** + * marked v12.0.1 - a markdown parser + * Copyright (c) 2011-2024, Christopher Jeffrey. (MIT Licensed) + * https://github.com/markedjs/marked + */ +!function(e,t){"object"==typeof exports&&"undefined"!=typeof module?t(exports):"function"==typeof define&&define.amd?define(["exports"],t):t((e="undefined"!=typeof globalThis?globalThis:e||self).marked={})}(this,(function(e){"use strict";function t(){return{async:!1,breaks:!1,extensions:null,gfm:!0,hooks:null,pedantic:!1,renderer:null,silent:!1,tokenizer:null,walkTokens:null}}function n(t){e.defaults=t}e.defaults={async:!1,breaks:!1,extensions:null,gfm:!0,hooks:null,pedantic:!1,renderer:null,silent:!1,tokenizer:null,walkTokens:null};const s=/[&<>"']/,r=new RegExp(s.source,"g"),i=/[<>"']|&(?!(#\d{1,7}|#[Xx][a-fA-F0-9]{1,6}|\w+);)/,l=new RegExp(i.source,"g"),o={"&":"&","<":"<",">":">",'"':""","'":"'"},a=e=>o[e];function c(e,t){if(t){if(s.test(e))return e.replace(r,a)}else if(i.test(e))return e.replace(l,a);return e}const h=/&(#(?:\d+)|(?:#x[0-9A-Fa-f]+)|(?:\w+));?/gi;function p(e){return e.replace(h,((e,t)=>"colon"===(t=t.toLowerCase())?":":"#"===t.charAt(0)?"x"===t.charAt(1)?String.fromCharCode(parseInt(t.substring(2),16)):String.fromCharCode(+t.substring(1)):""))}const u=/(^|[^\[])\^/g;function k(e,t){let n="string"==typeof e?e:e.source;t=t||"";const s={replace:(e,t)=>{let r="string"==typeof t?t:t.source;return r=r.replace(u,"$1"),n=n.replace(e,r),s},getRegex:()=>new RegExp(n,t)};return s}function g(e){try{e=encodeURI(e).replace(/%25/g,"%")}catch(e){return null}return e}const f={exec:()=>null};function d(e,t){const n=e.replace(/\|/g,((e,t,n)=>{let s=!1,r=t;for(;--r>=0&&"\\"===n[r];)s=!s;return s?"|":" |"})).split(/ \|/);let s=0;if(n[0].trim()||n.shift(),n.length>0&&!n[n.length-1].trim()&&n.pop(),t)if(n.length>t)n.splice(t);else for(;n.length0)return{type:"space",raw:t[0]}}code(e){const t=this.rules.block.code.exec(e);if(t){const e=t[0].replace(/^ {1,4}/gm,"");return{type:"code",raw:t[0],codeBlockStyle:"indented",text:this.options.pedantic?e:x(e,"\n")}}}fences(e){const t=this.rules.block.fences.exec(e);if(t){const e=t[0],n=function(e,t){const n=e.match(/^(\s+)(?:```)/);if(null===n)return t;const s=n[1];return t.split("\n").map((e=>{const t=e.match(/^\s+/);if(null===t)return e;const[n]=t;return n.length>=s.length?e.slice(s.length):e})).join("\n")}(e,t[3]||"");return{type:"code",raw:e,lang:t[2]?t[2].trim().replace(this.rules.inline.anyPunctuation,"$1"):t[2],text:n}}}heading(e){const t=this.rules.block.heading.exec(e);if(t){let e=t[2].trim();if(/#$/.test(e)){const t=x(e,"#");this.options.pedantic?e=t.trim():t&&!/ $/.test(t)||(e=t.trim())}return{type:"heading",raw:t[0],depth:t[1].length,text:e,tokens:this.lexer.inline(e)}}}hr(e){const t=this.rules.block.hr.exec(e);if(t)return{type:"hr",raw:t[0]}}blockquote(e){const t=this.rules.block.blockquote.exec(e);if(t){const e=x(t[0].replace(/^ *>[ \t]?/gm,""),"\n"),n=this.lexer.state.top;this.lexer.state.top=!0;const s=this.lexer.blockTokens(e);return this.lexer.state.top=n,{type:"blockquote",raw:t[0],tokens:s,text:e}}}list(e){let t=this.rules.block.list.exec(e);if(t){let n=t[1].trim();const s=n.length>1,r={type:"list",raw:"",ordered:s,start:s?+n.slice(0,-1):"",loose:!1,items:[]};n=s?`\\d{1,9}\\${n.slice(-1)}`:`\\${n}`,this.options.pedantic&&(n=s?n:"[*+-]");const i=new RegExp(`^( {0,3}${n})((?:[\t ][^\\n]*)?(?:\\n|$))`);let l="",o="",a=!1;for(;e;){let n=!1;if(!(t=i.exec(e)))break;if(this.rules.block.hr.test(e))break;l=t[0],e=e.substring(l.length);let s=t[2].split("\n",1)[0].replace(/^\t+/,(e=>" ".repeat(3*e.length))),c=e.split("\n",1)[0],h=0;this.options.pedantic?(h=2,o=s.trimStart()):(h=t[2].search(/[^ ]/),h=h>4?1:h,o=s.slice(h),h+=t[1].length);let p=!1;if(!s&&/^ *$/.test(c)&&(l+=c+"\n",e=e.substring(c.length+1),n=!0),!n){const t=new RegExp(`^ {0,${Math.min(3,h-1)}}(?:[*+-]|\\d{1,9}[.)])((?:[ \t][^\\n]*)?(?:\\n|$))`),n=new RegExp(`^ {0,${Math.min(3,h-1)}}((?:- *){3,}|(?:_ *){3,}|(?:\\* *){3,})(?:\\n+|$)`),r=new RegExp(`^ {0,${Math.min(3,h-1)}}(?:\`\`\`|~~~)`),i=new RegExp(`^ {0,${Math.min(3,h-1)}}#`);for(;e;){const a=e.split("\n",1)[0];if(c=a,this.options.pedantic&&(c=c.replace(/^ {1,4}(?=( {4})*[^ ])/g," ")),r.test(c))break;if(i.test(c))break;if(t.test(c))break;if(n.test(e))break;if(c.search(/[^ ]/)>=h||!c.trim())o+="\n"+c.slice(h);else{if(p)break;if(s.search(/[^ ]/)>=4)break;if(r.test(s))break;if(i.test(s))break;if(n.test(s))break;o+="\n"+c}p||c.trim()||(p=!0),l+=a+"\n",e=e.substring(a.length+1),s=c.slice(h)}}r.loose||(a?r.loose=!0:/\n *\n *$/.test(l)&&(a=!0));let u,k=null;this.options.gfm&&(k=/^\[[ xX]\] /.exec(o),k&&(u="[ ] "!==k[0],o=o.replace(/^\[[ xX]\] +/,""))),r.items.push({type:"list_item",raw:l,task:!!k,checked:u,loose:!1,text:o,tokens:[]}),r.raw+=l}r.items[r.items.length-1].raw=l.trimEnd(),r.items[r.items.length-1].text=o.trimEnd(),r.raw=r.raw.trimEnd();for(let e=0;e"space"===e.type)),n=t.length>0&&t.some((e=>/\n.*\n/.test(e.raw)));r.loose=n}if(r.loose)for(let e=0;e$/,"$1").replace(this.rules.inline.anyPunctuation,"$1"):"",s=t[3]?t[3].substring(1,t[3].length-1).replace(this.rules.inline.anyPunctuation,"$1"):t[3];return{type:"def",tag:e,raw:t[0],href:n,title:s}}}table(e){const t=this.rules.block.table.exec(e);if(!t)return;if(!/[:|]/.test(t[2]))return;const n=d(t[1]),s=t[2].replace(/^\||\| *$/g,"").split("|"),r=t[3]&&t[3].trim()?t[3].replace(/\n[ \t]*$/,"").split("\n"):[],i={type:"table",raw:t[0],header:[],align:[],rows:[]};if(n.length===s.length){for(const e of s)/^ *-+: *$/.test(e)?i.align.push("right"):/^ *:-+: *$/.test(e)?i.align.push("center"):/^ *:-+ *$/.test(e)?i.align.push("left"):i.align.push(null);for(const e of n)i.header.push({text:e,tokens:this.lexer.inline(e)});for(const e of r)i.rows.push(d(e,i.header.length).map((e=>({text:e,tokens:this.lexer.inline(e)}))));return i}}lheading(e){const t=this.rules.block.lheading.exec(e);if(t)return{type:"heading",raw:t[0],depth:"="===t[2].charAt(0)?1:2,text:t[1],tokens:this.lexer.inline(t[1])}}paragraph(e){const t=this.rules.block.paragraph.exec(e);if(t){const e="\n"===t[1].charAt(t[1].length-1)?t[1].slice(0,-1):t[1];return{type:"paragraph",raw:t[0],text:e,tokens:this.lexer.inline(e)}}}text(e){const t=this.rules.block.text.exec(e);if(t)return{type:"text",raw:t[0],text:t[0],tokens:this.lexer.inline(t[0])}}escape(e){const t=this.rules.inline.escape.exec(e);if(t)return{type:"escape",raw:t[0],text:c(t[1])}}tag(e){const t=this.rules.inline.tag.exec(e);if(t)return!this.lexer.state.inLink&&/^/i.test(t[0])&&(this.lexer.state.inLink=!1),!this.lexer.state.inRawBlock&&/^<(pre|code|kbd|script)(\s|>)/i.test(t[0])?this.lexer.state.inRawBlock=!0:this.lexer.state.inRawBlock&&/^<\/(pre|code|kbd|script)(\s|>)/i.test(t[0])&&(this.lexer.state.inRawBlock=!1),{type:"html",raw:t[0],inLink:this.lexer.state.inLink,inRawBlock:this.lexer.state.inRawBlock,block:!1,text:t[0]}}link(e){const t=this.rules.inline.link.exec(e);if(t){const e=t[2].trim();if(!this.options.pedantic&&/^$/.test(e))return;const t=x(e.slice(0,-1),"\\");if((e.length-t.length)%2==0)return}else{const e=function(e,t){if(-1===e.indexOf(t[1]))return-1;let n=0;for(let s=0;s-1){const n=(0===t[0].indexOf("!")?5:4)+t[1].length+e;t[2]=t[2].substring(0,e),t[0]=t[0].substring(0,n).trim(),t[3]=""}}let n=t[2],s="";if(this.options.pedantic){const e=/^([^'"]*[^\s])\s+(['"])(.*)\2/.exec(n);e&&(n=e[1],s=e[3])}else s=t[3]?t[3].slice(1,-1):"";return n=n.trim(),/^$/.test(e)?n.slice(1):n.slice(1,-1)),b(t,{href:n?n.replace(this.rules.inline.anyPunctuation,"$1"):n,title:s?s.replace(this.rules.inline.anyPunctuation,"$1"):s},t[0],this.lexer)}}reflink(e,t){let n;if((n=this.rules.inline.reflink.exec(e))||(n=this.rules.inline.nolink.exec(e))){const e=t[(n[2]||n[1]).replace(/\s+/g," ").toLowerCase()];if(!e){const e=n[0].charAt(0);return{type:"text",raw:e,text:e}}return b(n,e,n[0],this.lexer)}}emStrong(e,t,n=""){let s=this.rules.inline.emStrongLDelim.exec(e);if(!s)return;if(s[3]&&n.match(/[\p{L}\p{N}]/u))return;if(!(s[1]||s[2]||"")||!n||this.rules.inline.punctuation.exec(n)){const n=[...s[0]].length-1;let r,i,l=n,o=0;const a="*"===s[0][0]?this.rules.inline.emStrongRDelimAst:this.rules.inline.emStrongRDelimUnd;for(a.lastIndex=0,t=t.slice(-1*e.length+n);null!=(s=a.exec(t));){if(r=s[1]||s[2]||s[3]||s[4]||s[5]||s[6],!r)continue;if(i=[...r].length,s[3]||s[4]){l+=i;continue}if((s[5]||s[6])&&n%3&&!((n+i)%3)){o+=i;continue}if(l-=i,l>0)continue;i=Math.min(i,i+l+o);const t=[...s[0]][0].length,a=e.slice(0,n+s.index+t+i);if(Math.min(n,i)%2){const e=a.slice(1,-1);return{type:"em",raw:a,text:e,tokens:this.lexer.inlineTokens(e)}}const c=a.slice(2,-2);return{type:"strong",raw:a,text:c,tokens:this.lexer.inlineTokens(c)}}}}codespan(e){const t=this.rules.inline.code.exec(e);if(t){let e=t[2].replace(/\n/g," ");const n=/[^ ]/.test(e),s=/^ /.test(e)&&/ $/.test(e);return n&&s&&(e=e.substring(1,e.length-1)),e=c(e,!0),{type:"codespan",raw:t[0],text:e}}}br(e){const t=this.rules.inline.br.exec(e);if(t)return{type:"br",raw:t[0]}}del(e){const t=this.rules.inline.del.exec(e);if(t)return{type:"del",raw:t[0],text:t[2],tokens:this.lexer.inlineTokens(t[2])}}autolink(e){const t=this.rules.inline.autolink.exec(e);if(t){let e,n;return"@"===t[2]?(e=c(t[1]),n="mailto:"+e):(e=c(t[1]),n=e),{type:"link",raw:t[0],text:e,href:n,tokens:[{type:"text",raw:e,text:e}]}}}url(e){let t;if(t=this.rules.inline.url.exec(e)){let e,n;if("@"===t[2])e=c(t[0]),n="mailto:"+e;else{let s;do{s=t[0],t[0]=this.rules.inline._backpedal.exec(t[0])?.[0]??""}while(s!==t[0]);e=c(t[0]),n="www."===t[1]?"http://"+t[0]:t[0]}return{type:"link",raw:t[0],text:e,href:n,tokens:[{type:"text",raw:e,text:e}]}}}inlineText(e){const t=this.rules.inline.text.exec(e);if(t){let e;return e=this.lexer.state.inRawBlock?t[0]:c(t[0]),{type:"text",raw:t[0],text:e}}}}const m=/^ {0,3}((?:-[\t ]*){3,}|(?:_[ \t]*){3,}|(?:\*[ \t]*){3,})(?:\n+|$)/,y=/(?:[*+-]|\d{1,9}[.)])/,$=k(/^(?!bull |blockCode|fences|blockquote|heading|html)((?:.|\n(?!\s*?\n|bull |blockCode|fences|blockquote|heading|html))+?)\n {0,3}(=+|-+) *(?:\n+|$)/).replace(/bull/g,y).replace(/blockCode/g,/ {4}/).replace(/fences/g,/ {0,3}(?:`{3,}|~{3,})/).replace(/blockquote/g,/ {0,3}>/).replace(/heading/g,/ {0,3}#{1,6}/).replace(/html/g,/ {0,3}<[^\n>]+>\n/).getRegex(),z=/^([^\n]+(?:\n(?!hr|heading|lheading|blockquote|fences|list|html|table| +\n)[^\n]+)*)/,T=/(?!\s*\])(?:\\.|[^\[\]\\])+/,R=k(/^ {0,3}\[(label)\]: *(?:\n *)?([^<\s][^\s]*|<.*?>)(?:(?: +(?:\n *)?| *\n *)(title))? *(?:\n+|$)/).replace("label",T).replace("title",/(?:"(?:\\"?|[^"\\])*"|'[^'\n]*(?:\n[^'\n]+)*\n?'|\([^()]*\))/).getRegex(),_=k(/^( {0,3}bull)([ \t][^\n]+?)?(?:\n|$)/).replace(/bull/g,y).getRegex(),A="address|article|aside|base|basefont|blockquote|body|caption|center|col|colgroup|dd|details|dialog|dir|div|dl|dt|fieldset|figcaption|figure|footer|form|frame|frameset|h[1-6]|head|header|hr|html|iframe|legend|li|link|main|menu|menuitem|meta|nav|noframes|ol|optgroup|option|p|param|search|section|summary|table|tbody|td|tfoot|th|thead|title|tr|track|ul",S=/|$))/,I=k("^ {0,3}(?:<(script|pre|style|textarea)[\\s>][\\s\\S]*?(?:[^\\n]*\\n+|$)|comment[^\\n]*(\\n+|$)|<\\?[\\s\\S]*?(?:\\?>\\n*|$)|\\n*|$)|\\n*|$)|)[\\s\\S]*?(?:(?:\\n *)+\\n|$)|<(?!script|pre|style|textarea)([a-z][\\w-]*)(?:attribute)*? */?>(?=[ \\t]*(?:\\n|$))[\\s\\S]*?(?:(?:\\n *)+\\n|$)|(?=[ \\t]*(?:\\n|$))[\\s\\S]*?(?:(?:\\n *)+\\n|$))","i").replace("comment",S).replace("tag",A).replace("attribute",/ +[a-zA-Z:_][\w.:-]*(?: *= *"[^"\n]*"| *= *'[^'\n]*'| *= *[^\s"'=<>`]+)?/).getRegex(),E=k(z).replace("hr",m).replace("heading"," {0,3}#{1,6}(?:\\s|$)").replace("|lheading","").replace("|table","").replace("blockquote"," {0,3}>").replace("fences"," {0,3}(?:`{3,}(?=[^`\\n]*\\n)|~{3,})[^\\n]*\\n").replace("list"," {0,3}(?:[*+-]|1[.)]) ").replace("html",")|<(?:script|pre|style|textarea|!--)").replace("tag",A).getRegex(),q={blockquote:k(/^( {0,3}> ?(paragraph|[^\n]*)(?:\n|$))+/).replace("paragraph",E).getRegex(),code:/^( {4}[^\n]+(?:\n(?: *(?:\n|$))*)?)+/,def:R,fences:/^ {0,3}(`{3,}(?=[^`\n]*(?:\n|$))|~{3,})([^\n]*)(?:\n|$)(?:|([\s\S]*?)(?:\n|$))(?: {0,3}\1[~`]* *(?=\n|$)|$)/,heading:/^ {0,3}(#{1,6})(?=\s|$)(.*)(?:\n+|$)/,hr:m,html:I,lheading:$,list:_,newline:/^(?: *(?:\n|$))+/,paragraph:E,table:f,text:/^[^\n]+/},Z=k("^ *([^\\n ].*)\\n {0,3}((?:\\| *)?:?-+:? *(?:\\| *:?-+:? *)*(?:\\| *)?)(?:\\n((?:(?! *\\n|hr|heading|blockquote|code|fences|list|html).*(?:\\n|$))*)\\n*|$)").replace("hr",m).replace("heading"," {0,3}#{1,6}(?:\\s|$)").replace("blockquote"," {0,3}>").replace("code"," {4}[^\\n]").replace("fences"," {0,3}(?:`{3,}(?=[^`\\n]*\\n)|~{3,})[^\\n]*\\n").replace("list"," {0,3}(?:[*+-]|1[.)]) ").replace("html",")|<(?:script|pre|style|textarea|!--)").replace("tag",A).getRegex(),L={...q,table:Z,paragraph:k(z).replace("hr",m).replace("heading"," {0,3}#{1,6}(?:\\s|$)").replace("|lheading","").replace("table",Z).replace("blockquote"," {0,3}>").replace("fences"," {0,3}(?:`{3,}(?=[^`\\n]*\\n)|~{3,})[^\\n]*\\n").replace("list"," {0,3}(?:[*+-]|1[.)]) ").replace("html",")|<(?:script|pre|style|textarea|!--)").replace("tag",A).getRegex()},P={...q,html:k("^ *(?:comment *(?:\\n|\\s*$)|<(tag)[\\s\\S]+? *(?:\\n{2,}|\\s*$)|\\s]*)*?/?> *(?:\\n{2,}|\\s*$))").replace("comment",S).replace(/tag/g,"(?!(?:a|em|strong|small|s|cite|q|dfn|abbr|data|time|code|var|samp|kbd|sub|sup|i|b|u|mark|ruby|rt|rp|bdi|bdo|span|br|wbr|ins|del|img)\\b)\\w+(?!:|[^\\w\\s@]*@)\\b").getRegex(),def:/^ *\[([^\]]+)\]: *]+)>?(?: +(["(][^\n]+[")]))? *(?:\n+|$)/,heading:/^(#{1,6})(.*)(?:\n+|$)/,fences:f,lheading:/^(.+?)\n {0,3}(=+|-+) *(?:\n+|$)/,paragraph:k(z).replace("hr",m).replace("heading"," *#{1,6} *[^\n]").replace("lheading",$).replace("|table","").replace("blockquote"," {0,3}>").replace("|fences","").replace("|list","").replace("|html","").replace("|tag","").getRegex()},Q=/^\\([!"#$%&'()*+,\-./:;<=>?@\[\]\\^_`{|}~])/,v=/^( {2,}|\\)\n(?!\s*$)/,B="\\p{P}\\p{S}",C=k(/^((?![*_])[\spunctuation])/,"u").replace(/punctuation/g,B).getRegex(),M=k(/^(?:\*+(?:((?!\*)[punct])|[^\s*]))|^_+(?:((?!_)[punct])|([^\s_]))/,"u").replace(/punct/g,B).getRegex(),O=k("^[^_*]*?__[^_*]*?\\*[^_*]*?(?=__)|[^*]+(?=[^*])|(?!\\*)[punct](\\*+)(?=[\\s]|$)|[^punct\\s](\\*+)(?!\\*)(?=[punct\\s]|$)|(?!\\*)[punct\\s](\\*+)(?=[^punct\\s])|[\\s](\\*+)(?!\\*)(?=[punct])|(?!\\*)[punct](\\*+)(?!\\*)(?=[punct])|[^punct\\s](\\*+)(?=[^punct\\s])","gu").replace(/punct/g,B).getRegex(),D=k("^[^_*]*?\\*\\*[^_*]*?_[^_*]*?(?=\\*\\*)|[^_]+(?=[^_])|(?!_)[punct](_+)(?=[\\s]|$)|[^punct\\s](_+)(?!_)(?=[punct\\s]|$)|(?!_)[punct\\s](_+)(?=[^punct\\s])|[\\s](_+)(?!_)(?=[punct])|(?!_)[punct](_+)(?!_)(?=[punct])","gu").replace(/punct/g,B).getRegex(),j=k(/\\([punct])/,"gu").replace(/punct/g,B).getRegex(),H=k(/^<(scheme:[^\s\x00-\x1f<>]*|email)>/).replace("scheme",/[a-zA-Z][a-zA-Z0-9+.-]{1,31}/).replace("email",/[a-zA-Z0-9.!#$%&'*+/=?^_`{|}~-]+(@)[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)+(?![-_])/).getRegex(),U=k(S).replace("(?:--\x3e|$)","--\x3e").getRegex(),X=k("^comment|^|^<[a-zA-Z][\\w-]*(?:attribute)*?\\s*/?>|^<\\?[\\s\\S]*?\\?>|^|^").replace("comment",U).replace("attribute",/\s+[a-zA-Z:_][\w.:-]*(?:\s*=\s*"[^"]*"|\s*=\s*'[^']*'|\s*=\s*[^\s"'=<>`]+)?/).getRegex(),F=/(?:\[(?:\\.|[^\[\]\\])*\]|\\.|`[^`]*`|[^\[\]\\`])*?/,N=k(/^!?\[(label)\]\(\s*(href)(?:\s+(title))?\s*\)/).replace("label",F).replace("href",/<(?:\\.|[^\n<>\\])+>|[^\s\x00-\x1f]*/).replace("title",/"(?:\\"?|[^"\\])*"|'(?:\\'?|[^'\\])*'|\((?:\\\)?|[^)\\])*\)/).getRegex(),G=k(/^!?\[(label)\]\[(ref)\]/).replace("label",F).replace("ref",T).getRegex(),J=k(/^!?\[(ref)\](?:\[\])?/).replace("ref",T).getRegex(),K={_backpedal:f,anyPunctuation:j,autolink:H,blockSkip:/\[[^[\]]*?\]\([^\(\)]*?\)|`[^`]*?`|<[^<>]*?>/g,br:v,code:/^(`+)([^`]|[^`][\s\S]*?[^`])\1(?!`)/,del:f,emStrongLDelim:M,emStrongRDelimAst:O,emStrongRDelimUnd:D,escape:Q,link:N,nolink:J,punctuation:C,reflink:G,reflinkSearch:k("reflink|nolink(?!\\()","g").replace("reflink",G).replace("nolink",J).getRegex(),tag:X,text:/^(`+|[^`])(?:(?= {2,}\n)|[\s\S]*?(?:(?=[\\t+" ".repeat(n.length)));e;)if(!(this.options.extensions&&this.options.extensions.block&&this.options.extensions.block.some((s=>!!(n=s.call({lexer:this},e,t))&&(e=e.substring(n.raw.length),t.push(n),!0)))))if(n=this.tokenizer.space(e))e=e.substring(n.raw.length),1===n.raw.length&&t.length>0?t[t.length-1].raw+="\n":t.push(n);else if(n=this.tokenizer.code(e))e=e.substring(n.raw.length),s=t[t.length-1],!s||"paragraph"!==s.type&&"text"!==s.type?t.push(n):(s.raw+="\n"+n.raw,s.text+="\n"+n.text,this.inlineQueue[this.inlineQueue.length-1].src=s.text);else if(n=this.tokenizer.fences(e))e=e.substring(n.raw.length),t.push(n);else if(n=this.tokenizer.heading(e))e=e.substring(n.raw.length),t.push(n);else if(n=this.tokenizer.hr(e))e=e.substring(n.raw.length),t.push(n);else if(n=this.tokenizer.blockquote(e))e=e.substring(n.raw.length),t.push(n);else if(n=this.tokenizer.list(e))e=e.substring(n.raw.length),t.push(n);else if(n=this.tokenizer.html(e))e=e.substring(n.raw.length),t.push(n);else if(n=this.tokenizer.def(e))e=e.substring(n.raw.length),s=t[t.length-1],!s||"paragraph"!==s.type&&"text"!==s.type?this.tokens.links[n.tag]||(this.tokens.links[n.tag]={href:n.href,title:n.title}):(s.raw+="\n"+n.raw,s.text+="\n"+n.raw,this.inlineQueue[this.inlineQueue.length-1].src=s.text);else if(n=this.tokenizer.table(e))e=e.substring(n.raw.length),t.push(n);else if(n=this.tokenizer.lheading(e))e=e.substring(n.raw.length),t.push(n);else{if(r=e,this.options.extensions&&this.options.extensions.startBlock){let t=1/0;const n=e.slice(1);let s;this.options.extensions.startBlock.forEach((e=>{s=e.call({lexer:this},n),"number"==typeof s&&s>=0&&(t=Math.min(t,s))})),t<1/0&&t>=0&&(r=e.substring(0,t+1))}if(this.state.top&&(n=this.tokenizer.paragraph(r)))s=t[t.length-1],i&&"paragraph"===s.type?(s.raw+="\n"+n.raw,s.text+="\n"+n.text,this.inlineQueue.pop(),this.inlineQueue[this.inlineQueue.length-1].src=s.text):t.push(n),i=r.length!==e.length,e=e.substring(n.raw.length);else if(n=this.tokenizer.text(e))e=e.substring(n.raw.length),s=t[t.length-1],s&&"text"===s.type?(s.raw+="\n"+n.raw,s.text+="\n"+n.text,this.inlineQueue.pop(),this.inlineQueue[this.inlineQueue.length-1].src=s.text):t.push(n);else if(e){const t="Infinite loop on byte: "+e.charCodeAt(0);if(this.options.silent){console.error(t);break}throw new Error(t)}}return this.state.top=!0,t}inline(e,t=[]){return this.inlineQueue.push({src:e,tokens:t}),t}inlineTokens(e,t=[]){let n,s,r,i,l,o,a=e;if(this.tokens.links){const e=Object.keys(this.tokens.links);if(e.length>0)for(;null!=(i=this.tokenizer.rules.inline.reflinkSearch.exec(a));)e.includes(i[0].slice(i[0].lastIndexOf("[")+1,-1))&&(a=a.slice(0,i.index)+"["+"a".repeat(i[0].length-2)+"]"+a.slice(this.tokenizer.rules.inline.reflinkSearch.lastIndex))}for(;null!=(i=this.tokenizer.rules.inline.blockSkip.exec(a));)a=a.slice(0,i.index)+"["+"a".repeat(i[0].length-2)+"]"+a.slice(this.tokenizer.rules.inline.blockSkip.lastIndex);for(;null!=(i=this.tokenizer.rules.inline.anyPunctuation.exec(a));)a=a.slice(0,i.index)+"++"+a.slice(this.tokenizer.rules.inline.anyPunctuation.lastIndex);for(;e;)if(l||(o=""),l=!1,!(this.options.extensions&&this.options.extensions.inline&&this.options.extensions.inline.some((s=>!!(n=s.call({lexer:this},e,t))&&(e=e.substring(n.raw.length),t.push(n),!0)))))if(n=this.tokenizer.escape(e))e=e.substring(n.raw.length),t.push(n);else if(n=this.tokenizer.tag(e))e=e.substring(n.raw.length),s=t[t.length-1],s&&"text"===n.type&&"text"===s.type?(s.raw+=n.raw,s.text+=n.text):t.push(n);else if(n=this.tokenizer.link(e))e=e.substring(n.raw.length),t.push(n);else if(n=this.tokenizer.reflink(e,this.tokens.links))e=e.substring(n.raw.length),s=t[t.length-1],s&&"text"===n.type&&"text"===s.type?(s.raw+=n.raw,s.text+=n.text):t.push(n);else if(n=this.tokenizer.emStrong(e,a,o))e=e.substring(n.raw.length),t.push(n);else if(n=this.tokenizer.codespan(e))e=e.substring(n.raw.length),t.push(n);else if(n=this.tokenizer.br(e))e=e.substring(n.raw.length),t.push(n);else if(n=this.tokenizer.del(e))e=e.substring(n.raw.length),t.push(n);else if(n=this.tokenizer.autolink(e))e=e.substring(n.raw.length),t.push(n);else if(this.state.inLink||!(n=this.tokenizer.url(e))){if(r=e,this.options.extensions&&this.options.extensions.startInline){let t=1/0;const n=e.slice(1);let s;this.options.extensions.startInline.forEach((e=>{s=e.call({lexer:this},n),"number"==typeof s&&s>=0&&(t=Math.min(t,s))})),t<1/0&&t>=0&&(r=e.substring(0,t+1))}if(n=this.tokenizer.inlineText(r))e=e.substring(n.raw.length),"_"!==n.raw.slice(-1)&&(o=n.raw.slice(-1)),l=!0,s=t[t.length-1],s&&"text"===s.type?(s.raw+=n.raw,s.text+=n.text):t.push(n);else if(e){const t="Infinite loop on byte: "+e.charCodeAt(0);if(this.options.silent){console.error(t);break}throw new Error(t)}}else e=e.substring(n.raw.length),t.push(n);return t}}class se{options;constructor(t){this.options=t||e.defaults}code(e,t,n){const s=(t||"").match(/^\S*/)?.[0];return e=e.replace(/\n$/,"")+"\n",s?'
'+(n?e:c(e,!0))+"
\n":"
"+(n?e:c(e,!0))+"
\n"}blockquote(e){return`
\n${e}
\n`}html(e,t){return e}heading(e,t,n){return`${e}\n`}hr(){return"
\n"}list(e,t,n){const s=t?"ol":"ul";return"<"+s+(t&&1!==n?' start="'+n+'"':"")+">\n"+e+"\n"}listitem(e,t,n){return`
  • ${e}
  • \n`}checkbox(e){return"'}paragraph(e){return`

    ${e}

    \n`}table(e,t){return t&&(t=`${t}`),"\n\n"+e+"\n"+t+"
    \n"}tablerow(e){return`\n${e}\n`}tablecell(e,t){const n=t.header?"th":"td";return(t.align?`<${n} align="${t.align}">`:`<${n}>`)+e+`\n`}strong(e){return`${e}`}em(e){return`${e}`}codespan(e){return`${e}`}br(){return"
    "}del(e){return`${e}`}link(e,t,n){const s=g(e);if(null===s)return n;let r='
    ",r}image(e,t,n){const s=g(e);if(null===s)return n;let r=`${n}0&&"paragraph"===n.tokens[0].type?(n.tokens[0].text=e+" "+n.tokens[0].text,n.tokens[0].tokens&&n.tokens[0].tokens.length>0&&"text"===n.tokens[0].tokens[0].type&&(n.tokens[0].tokens[0].text=e+" "+n.tokens[0].tokens[0].text)):n.tokens.unshift({type:"text",text:e+" "}):o+=e+" "}o+=this.parse(n.tokens,i),l+=this.renderer.listitem(o,r,!!s)}n+=this.renderer.list(l,t,s);continue}case"html":{const e=r;n+=this.renderer.html(e.text,e.block);continue}case"paragraph":{const e=r;n+=this.renderer.paragraph(this.parseInline(e.tokens));continue}case"text":{let i=r,l=i.tokens?this.parseInline(i.tokens):i.text;for(;s+1{const r=e[s].flat(1/0);n=n.concat(this.walkTokens(r,t))})):e.tokens&&(n=n.concat(this.walkTokens(e.tokens,t)))}}return n}use(...e){const t=this.defaults.extensions||{renderers:{},childTokens:{}};return e.forEach((e=>{const n={...e};if(n.async=this.defaults.async||n.async||!1,e.extensions&&(e.extensions.forEach((e=>{if(!e.name)throw new Error("extension name required");if("renderer"in e){const n=t.renderers[e.name];t.renderers[e.name]=n?function(...t){let s=e.renderer.apply(this,t);return!1===s&&(s=n.apply(this,t)),s}:e.renderer}if("tokenizer"in e){if(!e.level||"block"!==e.level&&"inline"!==e.level)throw new Error("extension level must be 'block' or 'inline'");const n=t[e.level];n?n.unshift(e.tokenizer):t[e.level]=[e.tokenizer],e.start&&("block"===e.level?t.startBlock?t.startBlock.push(e.start):t.startBlock=[e.start]:"inline"===e.level&&(t.startInline?t.startInline.push(e.start):t.startInline=[e.start]))}"childTokens"in e&&e.childTokens&&(t.childTokens[e.name]=e.childTokens)})),n.extensions=t),e.renderer){const t=this.defaults.renderer||new se(this.defaults);for(const n in e.renderer){if(!(n in t))throw new Error(`renderer '${n}' does not exist`);if("options"===n)continue;const s=n,r=e.renderer[s],i=t[s];t[s]=(...e)=>{let n=r.apply(t,e);return!1===n&&(n=i.apply(t,e)),n||""}}n.renderer=t}if(e.tokenizer){const t=this.defaults.tokenizer||new w(this.defaults);for(const n in e.tokenizer){if(!(n in t))throw new Error(`tokenizer '${n}' does not exist`);if(["options","rules","lexer"].includes(n))continue;const s=n,r=e.tokenizer[s],i=t[s];t[s]=(...e)=>{let n=r.apply(t,e);return!1===n&&(n=i.apply(t,e)),n}}n.tokenizer=t}if(e.hooks){const t=this.defaults.hooks||new le;for(const n in e.hooks){if(!(n in t))throw new Error(`hook '${n}' does not exist`);if("options"===n)continue;const s=n,r=e.hooks[s],i=t[s];le.passThroughHooks.has(n)?t[s]=e=>{if(this.defaults.async)return Promise.resolve(r.call(t,e)).then((e=>i.call(t,e)));const n=r.call(t,e);return i.call(t,n)}:t[s]=(...e)=>{let n=r.apply(t,e);return!1===n&&(n=i.apply(t,e)),n}}n.hooks=t}if(e.walkTokens){const t=this.defaults.walkTokens,s=e.walkTokens;n.walkTokens=function(e){let n=[];return n.push(s.call(this,e)),t&&(n=n.concat(t.call(this,e))),n}}this.defaults={...this.defaults,...n}})),this}setOptions(e){return this.defaults={...this.defaults,...e},this}lexer(e,t){return ne.lex(e,t??this.defaults)}parser(e,t){return ie.parse(e,t??this.defaults)}#e(e,t){return(n,s)=>{const r={...s},i={...this.defaults,...r};!0===this.defaults.async&&!1===r.async&&(i.silent||console.warn("marked(): The async option was set to true by an extension. The async: false option sent to parse will be ignored."),i.async=!0);const l=this.#t(!!i.silent,!!i.async);if(null==n)return l(new Error("marked(): input parameter is undefined or null"));if("string"!=typeof n)return l(new Error("marked(): input parameter is of type "+Object.prototype.toString.call(n)+", string expected"));if(i.hooks&&(i.hooks.options=i),i.async)return Promise.resolve(i.hooks?i.hooks.preprocess(n):n).then((t=>e(t,i))).then((e=>i.hooks?i.hooks.processAllTokens(e):e)).then((e=>i.walkTokens?Promise.all(this.walkTokens(e,i.walkTokens)).then((()=>e)):e)).then((e=>t(e,i))).then((e=>i.hooks?i.hooks.postprocess(e):e)).catch(l);try{i.hooks&&(n=i.hooks.preprocess(n));let s=e(n,i);i.hooks&&(s=i.hooks.processAllTokens(s)),i.walkTokens&&this.walkTokens(s,i.walkTokens);let r=t(s,i);return i.hooks&&(r=i.hooks.postprocess(r)),r}catch(e){return l(e)}}}#t(e,t){return n=>{if(n.message+="\nPlease report this to https://github.com/markedjs/marked.",e){const e="

    An error occurred:

    "+c(n.message+"",!0)+"
    ";return t?Promise.resolve(e):e}if(t)return Promise.reject(n);throw n}}}const ae=new oe;function ce(e,t){return ae.parse(e,t)}ce.options=ce.setOptions=function(e){return ae.setOptions(e),ce.defaults=ae.defaults,n(ce.defaults),ce},ce.getDefaults=t,ce.defaults=e.defaults,ce.use=function(...e){return ae.use(...e),ce.defaults=ae.defaults,n(ce.defaults),ce},ce.walkTokens=function(e,t){return ae.walkTokens(e,t)},ce.parseInline=ae.parseInline,ce.Parser=ie,ce.parser=ie.parse,ce.Renderer=se,ce.TextRenderer=re,ce.Lexer=ne,ce.lexer=ne.lex,ce.Tokenizer=w,ce.Hooks=le,ce.parse=ce;const he=ce.options,pe=ce.setOptions,ue=ce.use,ke=ce.walkTokens,ge=ce.parseInline,fe=ce,de=ie.parse,xe=ne.lex;e.Hooks=le,e.Lexer=ne,e.Marked=oe,e.Parser=ie,e.Renderer=se,e.TextRenderer=re,e.Tokenizer=w,e.getDefaults=t,e.lexer=xe,e.marked=ce,e.options=he,e.parse=fe,e.parseInline=ge,e.parser=de,e.setOptions=pe,e.use=ue,e.walkTokens=ke})); diff --git a/ComfyUI-KJNodes/kjweb_async/protovis.min.js b/ComfyUI-KJNodes/kjweb_async/protovis.min.js new file mode 100644 index 0000000000000000000000000000000000000000..dfb84166521a49e4f7e41539933b101e126bd72f --- /dev/null +++ b/ComfyUI-KJNodes/kjweb_async/protovis.min.js @@ -0,0 +1,277 @@ +var a;if(!Array.prototype.map)Array.prototype.map=function(b,c){for(var d=this.length,f=new Array(d),g=0;g>>0,f=0;f=d)throw new Error("reduce: no values, no initial value");}for(;f=0&&d=69&&m<100?1900:0)});return"([0-9]+)";case "%Y":q.push(function(m){g=m});return"([0-9]+)";case "%%":q.push(function(){}); +return"%"}return p});(f=f.match(n))&&f.forEach(function(p,m){q[m](p)});return new Date(g,h,i,j,k,l)};return c}; +pv.Format.time=function(b){function c(f){f=Number(f);switch(b){case "short":if(f>=31536E6)return(f/31536E6).toFixed(1)+" years";else if(f>=6048E5)return(f/6048E5).toFixed(1)+" weeks";else if(f>=864E5)return(f/864E5).toFixed(1)+" days";else if(f>=36E5)return(f/36E5).toFixed(1)+" hours";else if(f>=6E4)return(f/6E4).toFixed(1)+" minutes";return(f/1E3).toFixed(1)+" seconds";case "long":var g=[],h=f%36E5/6E4>>0;g.push(d("0",2,f%6E4/1E3>>0));if(f>=36E5){var i=f%864E5/36E5>>0;g.push(d("0",2,h));if(f>=864E5){g.push(d("0", +2,i));g.push(Math.floor(f/864E5).toFixed())}else g.push(i.toFixed())}else g.push(h.toFixed());return g.reverse().join(":")}}var d=pv.Format.pad;c.format=c;c.parse=function(f){switch(b){case "short":for(var g=/([0-9,.]+)\s*([a-z]+)/g,h,i=0;h=g.exec(f);){var j=parseFloat(h[0].replace(",","")),k=0;switch(h[2].toLowerCase()){case "year":case "years":k=31536E6;break;case "week":case "weeks":k=6048E5;break;case "day":case "days":k=864E5;break;case "hour":case "hours":k=36E5;break;case "minute":case "minutes":k= +6E4;break;case "second":case "seconds":k=1E3;break}i+=j*k}return i;case "long":h=f.replace(",","").split(":").reverse();i=0;if(h.length)i+=parseFloat(h[0])*1E3;if(h.length>1)i+=parseFloat(h[1])*6E4;if(h.length>2)i+=parseFloat(h[2])*36E5;if(h.length>3)i+=parseFloat(h[3])*864E5;return i}};return c}; +pv.Format.number=function(){function b(r){if(Infinity>h)r=Math.round(r*i)/i;var s=String(Math.abs(r)).split("."),t=s[0];if(t.length>d)t=t.substring(t.length-d);if(l&&t.length3)t=t.replace(/\B(?=(?:\d{3})+(?!\d))/g,n);if(!l&&t.lengthd)s=s.substring(s.length-d);r=r[1]?Number("0."+r[1]):0;if(Infinity>h)r=Math.round(r*i)/i;return Math.round(s)+r};b.integerDigits=function(r,s){if(arguments.length){c=Number(r);d=arguments.length>1?Number(s):c;f=c+Math.floor(c/3)*n.length;return this}return[c,d]};b.fractionDigits=function(r,s){if(arguments.length){g= +Number(r);h=arguments.length>1?Number(s):g;i=Math.pow(10,h);return this}return[g,h]};b.integerPad=function(r){if(arguments.length){j=String(r);l=/\d/.test(j);return this}return j};b.fractionPad=function(r){if(arguments.length){k=String(r);return this}return k};b.decimal=function(r){if(arguments.length){q=String(r);return this}return q};b.group=function(r){if(arguments.length){n=r?String(r):"";f=c+Math.floor(c/3)*n.length;return this}return n};b.negativeAffix=function(r,s){if(arguments.length){p=String(r|| +"");m=String(s||"");return this}return[p,m]};return b};pv.map=function(b,c){var d={};return c?b.map(function(f,g){d.index=g;return c.call(d,f)}):b.slice()};pv.repeat=function(b,c){if(arguments.length==1)c=2;return pv.blend(pv.range(c).map(function(){return b}))};pv.cross=function(b,c){for(var d=[],f=0,g=b.length,h=c.length;fc){b.length=d;for(var f=c;fc?1:0}; +pv.reverseOrder=function(b,c){return cb?1:0};pv.search=function(b,c,d){if(!d)d=pv.identity;for(var f=0,g=b.length-1;f<=g;){var h=f+g>>1,i=d(b[h]);if(ic)g=h-1;else return h}return-f-1};pv.search.index=function(b,c,d){b=pv.search(b,c,d);return b<0?-b-1:b}; +pv.range=function(b,c,d){if(arguments.length==1){c=b;b=0}if(d==undefined)d=1;if((c-b)/d==Infinity)throw new Error("range must be finite");var f=[],g=0,h;c-=(c-b)*1.0E-10;if(d<0)for(;(h=b+d*g++)>c;)f.push(h);else for(;(h=b+d*g++)f){f=i;d=h}}return d}; +pv.min=function(b,c){if(c==pv.index)return 0;return Math.min.apply(null,c?pv.map(b,c):b)};pv.min.index=function(b,c){if(!b.length)return-1;if(c==pv.index)return 0;if(!c)c=pv.identity;for(var d=0,f=Infinity,g={},h=0;h0?Math.pow(c,Math.floor(pv.log(b,c))):-Math.pow(c,-Math.floor(-pv.log(-b,c)))};pv.logCeil=function(b,c){return b>0?Math.pow(c,Math.ceil(pv.log(b,c))):-Math.pow(c,-Math.ceil(-pv.log(-b,c)))}; +(function(){var b=Math.PI/180,c=180/Math.PI;pv.radians=function(d){return b*d};pv.degrees=function(d){return c*d}})();pv.keys=function(b){var c=[];for(var d in b)c.push(d);return c};pv.entries=function(b){var c=[];for(var d in b)c.push({key:d,value:b[d]});return c};pv.values=function(b){var c=[];for(var d in b)c.push(b[d]);return c};pv.dict=function(b,c){for(var d={},f={},g=0;g=94608E6){p=31536E6;u="%Y";o=function(w){w.setFullYear(w.getFullYear()+v)}}else if(t>=7776E6){p=2592E6;u="%m/%Y";o=function(w){w.setMonth(w.getMonth()+v)}}else if(t>=18144E5){p=6048E5;u="%m/%d";o=function(w){w.setDate(w.getDate()+7*v)}}else if(t>=2592E5){p=864E5;u="%m/%d";o=function(w){w.setDate(w.getDate()+v)}}else if(t>=108E5){p=36E5;u="%I:%M %p";o=function(w){w.setHours(w.getHours()+ +v)}}else if(t>=18E4){p=6E4;u="%I:%M %p";o=function(w){w.setMinutes(w.getMinutes()+v)}}else if(t>=3E3){p=1E3;u="%I:%M:%S";o=function(w){w.setSeconds(w.getSeconds()+v)}}else{p=1;u="%S.%Qs";o=function(w){w.setTime(w.getTime()+v)}}q=pv.Format.date(u);s=new Date(s);u=[];x(s,p);t=t/p;if(t>10)switch(p){case 36E5:v=t>20?6:3;s.setHours(Math.floor(s.getHours()/v)*v);break;case 2592E6:v=3;s.setMonth(Math.floor(s.getMonth()/v)*v);break;case 6E4:v=t>30?15:t>15?10:5;s.setMinutes(Math.floor(s.getMinutes()/v)*v); +break;case 1E3:v=t>90?15:t>60?10:5;s.setSeconds(Math.floor(s.getSeconds()/v)*v);break;case 1:v=t>1E3?250:t>200?100:t>100?50:t>50?25:5;s.setMilliseconds(Math.floor(s.getMilliseconds()/v)*v);break;default:v=pv.logCeil(t/15,10);if(t/v<2)v/=5;else if(t/v<5)v/=2;s.setFullYear(Math.floor(s.getFullYear()/v)*v);break}for(;;){o(s);if(s>m)break;u.push(new Date(s))}return r?u.reverse():u}arguments.length||(n=10);v=pv.logFloor(t/n,10);p=n/(t/v);if(p<=0.15)v*=10;else if(p<=0.35)v*=5;else if(p<=0.75)v*=2;p=Math.ceil(s/ +v)*v;m=Math.floor(m/v)*v;q=pv.Format.number().fractionDigits(Math.max(0,-Math.floor(pv.log(v,10)+0.01)));m=pv.range(p,m+v,v);return r?m.reverse():m};c.tickFormat=function(n){return q(n)};c.nice=function(){if(d.length!=2)return this;var n=d[0],p=d[d.length-1],m=p0;i--)l.push(-g(-j)*i);else{for(;jh[1];k--);return l.slice(j,k)};b.tickFormat=function(h){return h.toPrecision(1)}; +b.nice=function(){var h=b.domain();return b.domain(pv.logFloor(h[0],c),pv.logCeil(h[1],c))};b.base=function(h){if(arguments.length){c=Number(h);d=Math.log(c);b.transform(f,g);return this}return c};b.domain.apply(b,arguments);return b.base(10)};pv.Scale.root=function(){var b=pv.Scale.quantitative();b.power=function(c){if(arguments.length){var d=Number(c),f=1/d;b.transform(function(g){return Math.pow(g,f)},function(g){return Math.pow(g,d)});return this}return d};b.domain.apply(b,arguments);return b.power(2)}; +pv.Scale.ordinal=function(){function b(g){g in d||(d[g]=c.push(g)-1);return f[d[g]%f.length]}var c=[],d={},f=[];b.domain=function(g,h){if(arguments.length){g=g instanceof Array?arguments.length>1?pv.map(g,h):g:Array.prototype.slice.call(arguments);c=[];for(var i={},j=0;j1?pv.map(g,h):g:Array.prototype.slice.call(arguments); +if(typeof f[0]=="string")f=f.map(pv.color);return this}return f};b.split=function(g,h){var i=(h-g)/this.domain().length;f=pv.range(g+i/2,h,i);return this};b.splitFlush=function(g,h){var i=this.domain().length,j=(h-g)/(i-1);f=i==1?[(g+h)/2]:pv.range(g,h+j/2,j);return this};b.splitBanded=function(g,h,i){if(arguments.length<3)i=1;if(i<0){var j=this.domain().length;j=(h-g- -i*j)/(j+1);f=pv.range(g+j,h,j-i);f.band=-i}else{j=(h-g)/(this.domain().length+(1-i));f=pv.range(g+j*(1-i),h,j);f.band=j*i}return this}; +b.by=function(g){function h(){return b(g.apply(this,arguments))}for(var i in b)h[i]=b[i];return h};b.domain.apply(b,arguments);return b}; +pv.Scale.quantile=function(){function b(i){return h(Math.max(0,Math.min(d,pv.search.index(f,i)-1))/d)}var c=-1,d=-1,f=[],g=[],h=pv.Scale.linear();b.quantiles=function(i){if(arguments.length){c=Number(i);if(c<0){f=[g[0]].concat(g);d=g.length-1}else{f=[];f[0]=g[0];for(var j=1;j<=c;j++)f[j]=g[~~(j*(g.length-1)/c)];d=c-1}return this}return f};b.domain=function(i,j){if(arguments.length){g=i instanceof Array?pv.map(i,j):Array.prototype.slice.call(arguments);g.sort(pv.naturalOrder);b.quantiles(c);return this}return g}; +b.range=function(){if(arguments.length){h.range.apply(h,arguments);return this}return h.range()};b.by=function(i){function j(){return b(i.apply(this,arguments))}for(var k in b)j[k]=b[k];return j};b.domain.apply(b,arguments);return b}; +pv.histogram=function(b,c){var d=true;return{bins:function(f){var g=pv.map(b,c),h=[];arguments.length||(f=pv.Scale.linear(g).ticks());for(var i=0;i360)j-=360;else if(j<0)j+=360;if(j<60)return i+(h-i)*j/60;if(j<180)return h;if(j<240)return i+(h-i)*(240-j)/60;return i}function c(j){return Math.round(b(j)*255)}var d=this.h,f=this.s,g=this.l;d%=360;if(d<0)d+=360;f=Math.max(0,Math.min(f,1));g=Math.max(0,Math.min(g,1));var h=g<=0.5?g*(1+f):g+f-g*f,i=2*g-h;return pv.rgb(c(d+120),c(d),c(d-120),this.a)}; +pv.Color.names={aliceblue:"#f0f8ff",antiquewhite:"#faebd7",aqua:"#00ffff",aquamarine:"#7fffd4",azure:"#f0ffff",beige:"#f5f5dc",bisque:"#ffe4c4",black:"#000000",blanchedalmond:"#ffebcd",blue:"#0000ff",blueviolet:"#8a2be2",brown:"#a52a2a",burlywood:"#deb887",cadetblue:"#5f9ea0",chartreuse:"#7fff00",chocolate:"#d2691e",coral:"#ff7f50",cornflowerblue:"#6495ed",cornsilk:"#fff8dc",crimson:"#dc143c",cyan:"#00ffff",darkblue:"#00008b",darkcyan:"#008b8b",darkgoldenrod:"#b8860b",darkgray:"#a9a9a9",darkgreen:"#006400", +darkgrey:"#a9a9a9",darkkhaki:"#bdb76b",darkmagenta:"#8b008b",darkolivegreen:"#556b2f",darkorange:"#ff8c00",darkorchid:"#9932cc",darkred:"#8b0000",darksalmon:"#e9967a",darkseagreen:"#8fbc8f",darkslateblue:"#483d8b",darkslategray:"#2f4f4f",darkslategrey:"#2f4f4f",darkturquoise:"#00ced1",darkviolet:"#9400d3",deeppink:"#ff1493",deepskyblue:"#00bfff",dimgray:"#696969",dimgrey:"#696969",dodgerblue:"#1e90ff",firebrick:"#b22222",floralwhite:"#fffaf0",forestgreen:"#228b22",fuchsia:"#ff00ff",gainsboro:"#dcdcdc", +ghostwhite:"#f8f8ff",gold:"#ffd700",goldenrod:"#daa520",gray:"#808080",green:"#008000",greenyellow:"#adff2f",grey:"#808080",honeydew:"#f0fff0",hotpink:"#ff69b4",indianred:"#cd5c5c",indigo:"#4b0082",ivory:"#fffff0",khaki:"#f0e68c",lavender:"#e6e6fa",lavenderblush:"#fff0f5",lawngreen:"#7cfc00",lemonchiffon:"#fffacd",lightblue:"#add8e6",lightcoral:"#f08080",lightcyan:"#e0ffff",lightgoldenrodyellow:"#fafad2",lightgray:"#d3d3d3",lightgreen:"#90ee90",lightgrey:"#d3d3d3",lightpink:"#ffb6c1",lightsalmon:"#ffa07a", +lightseagreen:"#20b2aa",lightskyblue:"#87cefa",lightslategray:"#778899",lightslategrey:"#778899",lightsteelblue:"#b0c4de",lightyellow:"#ffffe0",lime:"#00ff00",limegreen:"#32cd32",linen:"#faf0e6",magenta:"#ff00ff",maroon:"#800000",mediumaquamarine:"#66cdaa",mediumblue:"#0000cd",mediumorchid:"#ba55d3",mediumpurple:"#9370db",mediumseagreen:"#3cb371",mediumslateblue:"#7b68ee",mediumspringgreen:"#00fa9a",mediumturquoise:"#48d1cc",mediumvioletred:"#c71585",midnightblue:"#191970",mintcream:"#f5fffa",mistyrose:"#ffe4e1", +moccasin:"#ffe4b5",navajowhite:"#ffdead",navy:"#000080",oldlace:"#fdf5e6",olive:"#808000",olivedrab:"#6b8e23",orange:"#ffa500",orangered:"#ff4500",orchid:"#da70d6",palegoldenrod:"#eee8aa",palegreen:"#98fb98",paleturquoise:"#afeeee",palevioletred:"#db7093",papayawhip:"#ffefd5",peachpuff:"#ffdab9",peru:"#cd853f",pink:"#ffc0cb",plum:"#dda0dd",powderblue:"#b0e0e6",purple:"#800080",red:"#ff0000",rosybrown:"#bc8f8f",royalblue:"#4169e1",saddlebrown:"#8b4513",salmon:"#fa8072",sandybrown:"#f4a460",seagreen:"#2e8b57", +seashell:"#fff5ee",sienna:"#a0522d",silver:"#c0c0c0",skyblue:"#87ceeb",slateblue:"#6a5acd",slategray:"#708090",slategrey:"#708090",snow:"#fffafa",springgreen:"#00ff7f",steelblue:"#4682b4",tan:"#d2b48c",teal:"#008080",thistle:"#d8bfd8",tomato:"#ff6347",turquoise:"#40e0d0",violet:"#ee82ee",wheat:"#f5deb3",white:"#ffffff",whitesmoke:"#f5f5f5",yellow:"#ffff00",yellowgreen:"#9acd32",transparent:pv.Color.transparent=pv.rgb(0,0,0,0)};(function(){var b=pv.Color.names;for(var c in b)b[c]=pv.color(b[c])})(); +pv.colors=function(){var b=pv.Scale.ordinal();b.range.apply(b,arguments);return b};pv.Colors={};pv.Colors.category10=function(){var b=pv.colors("#1f77b4","#ff7f0e","#2ca02c","#d62728","#9467bd","#8c564b","#e377c2","#7f7f7f","#bcbd22","#17becf");b.domain.apply(b,arguments);return b}; +pv.Colors.category20=function(){var b=pv.colors("#1f77b4","#aec7e8","#ff7f0e","#ffbb78","#2ca02c","#98df8a","#d62728","#ff9896","#9467bd","#c5b0d5","#8c564b","#c49c94","#e377c2","#f7b6d2","#7f7f7f","#c7c7c7","#bcbd22","#dbdb8d","#17becf","#9edae5");b.domain.apply(b,arguments);return b}; +pv.Colors.category19=function(){var b=pv.colors("#9c9ede","#7375b5","#4a5584","#cedb9c","#b5cf6b","#8ca252","#637939","#e7cb94","#e7ba52","#bd9e39","#8c6d31","#e7969c","#d6616b","#ad494a","#843c39","#de9ed6","#ce6dbd","#a55194","#7b4173");b.domain.apply(b,arguments);return b};pv.ramp=function(){var b=pv.Scale.linear();b.range.apply(b,arguments);return b}; +pv.Scene=pv.SvgScene={svg:"http://www.w3.org/2000/svg",xmlns:"http://www.w3.org/2000/xmlns",xlink:"http://www.w3.org/1999/xlink",xhtml:"http://www.w3.org/1999/xhtml",scale:1,events:["DOMMouseScroll","mousewheel","mousedown","mouseup","mouseover","mouseout","mousemove","click","dblclick"],implicit:{svg:{"shape-rendering":"auto","pointer-events":"painted",x:0,y:0,dy:0,"text-anchor":"start",transform:"translate(0,0)",fill:"none","fill-opacity":1,stroke:"none","stroke-opacity":1,"stroke-width":1.5,"stroke-linejoin":"miter"}, +css:{font:"10px sans-serif"}}};pv.SvgScene.updateAll=function(b){if(b.length&&b[0].reverse&&b.type!="line"&&b.type!="area"){for(var c=pv.extend(b),d=0,f=b.length-1;f>=0;d++,f--)c[d]=b[f];b=c}this.removeSiblings(this[b.type](b))};pv.SvgScene.create=function(b){return document.createElementNS(this.svg,b)}; +pv.SvgScene.expect=function(b,c,d,f){if(b){if(b.tagName=="a")b=b.firstChild;if(b.tagName!=c){c=this.create(c);b.parentNode.replaceChild(c,b);b=c}}else b=this.create(c);for(var g in d){c=d[g];if(c==this.implicit.svg[g])c=null;c==null?b.removeAttribute(g):b.setAttribute(g,c)}for(g in f){c=f[g];if(c==this.implicit.css[g])c=null;if(c==null)b.style.removeProperty(g);else b.style[g]=c}return b}; +pv.SvgScene.append=function(b,c,d){b.$scene={scenes:c,index:d};b=this.title(b,c[d]);b.parentNode||c.$g.appendChild(b);return b.nextSibling};pv.SvgScene.title=function(b,c){var d=b.parentNode;if(d&&d.tagName!="a")d=null;if(c.title){if(!d){d=this.create("a");b.parentNode&&b.parentNode.replaceChild(d,b);d.appendChild(b)}d.setAttributeNS(this.xlink,"title",c.title);return d}d&&d.parentNode.replaceChild(b,d);return b}; +pv.SvgScene.dispatch=pv.listener(function(b){var c=b.target.$scene;if(c){var d=b.type;switch(d){case "DOMMouseScroll":d="mousewheel";b.wheel=-480*b.detail;break;case "mousewheel":b.wheel=(window.opera?12:1)*b.wheelDelta;break}pv.Mark.dispatch(d,c.scenes,c.index)&&b.preventDefault()}});pv.SvgScene.removeSiblings=function(b){for(;b;){var c=b.nextSibling;b.parentNode.removeChild(b);b=c}};pv.SvgScene.undefined=function(){}; +pv.SvgScene.pathBasis=function(){function b(f,g,h,i,j){return{x:f[0]*g.left+f[1]*h.left+f[2]*i.left+f[3]*j.left,y:f[0]*g.top+f[1]*h.top+f[2]*i.top+f[3]*j.top}}var c=[[1/6,2/3,1/6,0],[0,2/3,1/3,0],[0,1/3,2/3,0],[0,1/6,2/3,1/6]],d=function(f,g,h,i){var j=b(c[1],f,g,h,i),k=b(c[2],f,g,h,i);f=b(c[3],f,g,h,i);return"C"+j.x+","+j.y+","+k.x+","+k.y+","+f.x+","+f.y};d.segment=function(f,g,h,i){var j=b(c[0],f,g,h,i),k=b(c[1],f,g,h,i),l=b(c[2],f,g,h,i);f=b(c[3],f,g,h,i);return"M"+j.x+","+j.y+"C"+k.x+","+k.y+ +","+l.x+","+l.y+","+f.x+","+f.y};return d}();pv.SvgScene.curveBasis=function(b){if(b.length<=2)return"";var c="",d=b[0],f=d,g=d,h=b[1];c+=this.pathBasis(d,f,g,h);for(var i=2;i1){j=c[1];h=b[k];k++;f+="C"+(g.left+i.x)+","+(g.top+i.y)+","+(h.left-j.x)+","+(h.top-j.y)+","+h.left+","+h.top;for(g=2;g9){k=3/Math.sqrt(k);f[h]= +k*i*d[h];f[h+1]=k*j*d[h]}}for(h=0;h2&&(g.interpolate=="basis"||g.interpolate=="cardinal"||g.interpolate=="monotone")?d:c)(l,q-1));l=q-1}}if(!j.length)return f;f=this.expect(f,"path",{"shape-rendering":g.antialias?null:"crispEdges","pointer-events":g.events,cursor:g.cursor,d:"M"+j.join("ZM")+"Z",fill:h.color,"fill-opacity":h.opacity|| +null,stroke:i.color,"stroke-opacity":i.opacity||null,"stroke-width":i.opacity?g.lineWidth/this.scale:null});return this.append(f,b,0)}; +pv.SvgScene.areaSegment=function(b){var c=b.$g.firstChild,d=b[0],f,g;if(d.interpolate=="basis"||d.interpolate=="cardinal"||d.interpolate=="monotone"){f=[];g=[];for(var h=0,i=b.length;h2&&(d.interpolate=="basis"||d.interpolate=="cardinal"||d.interpolate=="monotone"))switch(d.interpolate){case "basis":h+=this.curveBasis(b);break;case "cardinal":h+=this.curveCardinal(b,d.tension);break;case "monotone":h+=this.curveMonotone(b); +break}else for(var i=1;i1)break;return"A"+f+","+f+" 0 0,"+d+" "+c.left+","+c.top;case "step-before":return"V"+c.top+"H"+c.left;case "step-after":return"H"+c.left+"V"+c.top}return"L"+c.left+","+c.top};pv.SvgScene.lineIntersect=function(b,c,d,f){return b.plus(c.times(d.minus(b).dot(f.perp())/c.dot(f.perp())))}; +pv.SvgScene.pathJoin=function(b,c,d,f){var g=pv.vector(c.left,c.top);d=pv.vector(d.left,d.top);var h=d.minus(g),i=h.perp().norm(),j=i.times(c.lineWidth/(2*this.scale));c=g.plus(j);var k=d.plus(j),l=d.minus(j);j=g.minus(j);if(b&&b.visible){b=g.minus(b.left,b.top).perp().norm().plus(i);j=this.lineIntersect(g,b,j,h);c=this.lineIntersect(g,b,c,h)}if(f&&f.visible){f=pv.vector(f.left,f.top).minus(d).perp().norm().plus(i);l=this.lineIntersect(d,f,l,h);k=this.lineIntersect(d,f,k,h)}return"M"+c.x+","+c.y+ +"L"+k.x+","+k.y+" "+l.x+","+l.y+" "+j.x+","+j.y}; +pv.SvgScene.panel=function(b){for(var c=b.$g,d=c&&c.firstChild,f=0;f=2*Math.PI)i=i?"M0,"+j+"A"+j+","+j+" 0 1,1 0,"+-j+"A"+j+","+j+" 0 1,1 0,"+j+"M0,"+i+"A"+i+","+i+" 0 1,1 0,"+-i+"A"+i+","+i+" 0 1,1 0,"+i+"Z":"M0,"+j+"A"+j+","+j+" 0 1,1 0,"+-j+"A"+j+","+j+" 0 1,1 0,"+j+"Z";else{var l=Math.min(f.startAngle,f.endAngle),q=Math.max(f.startAngle,f.endAngle), +n=Math.cos(l),p=Math.cos(q);l=Math.sin(l);q=Math.sin(q);i=i?"M"+j*n+","+j*l+"A"+j+","+j+" 0 "+(k1?c:null)}; +a.anchor=function(b){b||(b="center");return(new pv.Anchor(this)).name(b).data(function(){return this.scene.target.map(function(c){return c.data})}).visible(function(){return this.scene.target[this.index].visible}).left(function(){var c=this.scene.target[this.index],d=c.width||0;switch(this.name()){case "bottom":case "top":case "center":return c.left+d/2;case "left":return null}return c.left+d}).top(function(){var c=this.scene.target[this.index],d=c.height||0;switch(this.name()){case "left":case "right":case "center":return c.top+ +d/2;case "top":return null}return c.top+d}).right(function(){var c=this.scene.target[this.index];return this.name()=="left"?c.right+(c.width||0):null}).bottom(function(){var c=this.scene.target[this.index];return this.name()=="top"?c.bottom+(c.height||0):null}).textAlign(function(){switch(this.name()){case "bottom":case "top":case "center":return"center";case "right":return"right"}return"left"}).textBaseline(function(){switch(this.name()){case "right":case "left":case "center":return"middle";case "top":return"top"}return"bottom"})}; +a.anchorTarget=function(){return this.target};a.margin=function(b){return this.left(b).right(b).top(b).bottom(b)};a.instance=function(b){var c=this.scene||this.parent.instance(-1).children[this.childIndex],d=!arguments.length||this.hasOwnProperty("index")?this.index:b;return c[d<0?c.length-1:d]}; +a.instances=function(b){for(var c=this,d=[],f;!(f=c.scene);){b=b.parent;d.push({index:b.index,childIndex:c.childIndex});c=c.parent}for(;d.length;){b=d.pop();f=f[b.index].children[b.childIndex]}if(this.hasOwnProperty("index")){d=pv.extend(f[this.index]);d.right=d.top=d.left=d.bottom=0;return[d]}return f};a.first=function(){return this.scene[0]};a.last=function(){return this.scene[this.scene.length-1]};a.sibling=function(){return this.index==0?null:this.scene[this.index-1]}; +a.cousin=function(){var b=this.parent;return(b=b&&b.sibling())&&b.children?b.children[this.childIndex][this.index]:null}; +a.render=function(){function b(i,j,k){i.scale=k;if(j=0;l--){var q=k[l];if(!(q.name in c)){c[q.name]=q;switch(q.name){case "data":f=q;break;case "visible":g=q;break;default:d[q.type].push(q);break}}}while(j=j.proto)}var c={},d=[[],[],[],[]],f,g;b(this);b(this.defaults);d[1].reverse();d[3].reverse();var h=this;do for(var i in h.properties)i in c||d[2].push(c[i]={name:i,type:2,value:null});while(h=h.proto);h=d[0].concat(d[1]);for(i=0;ih.id)d[g.name]={id:0,value:g.type&1?g.value.apply(this,c):g.value}}}d=this.binds.data;d=d.type& +1?d.value.apply(this,c):d.value;c.unshift(null);b.length=d.length;for(f=0;f0;l--){p=m[l];p.scale=q;q*=p.scene[p.index].transform.k}if(n.children){l=0;for(m=n.children.length;l=3*Math.PI/2};pv.Wedge.prototype.buildImplied=function(b){if(b.angle==null)b.angle=b.endAngle-b.startAngle;else if(b.endAngle==null)b.endAngle=b.startAngle+b.angle;pv.Mark.prototype.buildImplied.call(this,b)};pv.simulation=function(b){return new pv.Simulation(b)};pv.Simulation=function(b){for(var c=0;c=s,u=q.y>=t;l.leaf=false;switch((u<<1)+x){case 0:l=l.c1||(l.c1=new pv.Quadtree.Node);break;case 1:l=l.c2||(l.c2=new pv.Quadtree.Node);break;case 2:l=l.c3||(l.c3=new pv.Quadtree.Node);break;case 3:l=l.c4||(l.c4=new pv.Quadtree.Node); +break}if(x)n=s;else m=s;if(u)p=t;else r=t;c(l,q,n,p,m,r)}var f,g=Number.POSITIVE_INFINITY,h=g,i=Number.NEGATIVE_INFINITY,j=i;for(f=b;f;f=f.next){if(f.xi)i=f.x;if(f.y>j)j=f.y}f=i-g;var k=j-h;if(f>k)j=h+f;else i=g+k;this.xMin=g;this.yMin=h;this.xMax=i;this.yMax=j;this.root=new pv.Quadtree.Node;for(f=b;f;f=f.next)c(this.root,f,g,h,i,j)};pv.Quadtree.Node=function(){this.leaf=true;this.p=this.c4=this.c3=this.c2=this.c1=null};pv.Force={}; +pv.Force.charge=function(b){function c(l){function q(m){c(m);l.cn+=m.cn;n+=m.cn*m.cx;p+=m.cn*m.cy}var n=0,p=0;l.cn=0;if(!l.leaf){l.c1&&q(l.c1);l.c2&&q(l.c2);l.c3&&q(l.c3);l.c4&&q(l.c4)}if(l.p){l.cn+=b;n+=b*l.p.x;p+=b*l.p.y}l.cx=n/l.cn;l.cy=p/l.cn}function d(l,q,n,p,m,r){var s=l.cx-q.x,t=l.cy-q.y,x=1/Math.sqrt(s*s+t*t);if(l.leaf&&l.p!=q||(m-n)*xg)x=g;l=l.cn*x*x*x;s=s*l;t=t*l;q.fx+=s;q.fy+=t}}else if(!l.leaf){var u=(n+m)*0.5,o=(p+r)*0.5;l.c1&&d(l.c1,q,n,p,u,o);l.c2&&d(l.c2,q,u,p, +m,o);l.c3&&d(l.c3,q,n,o,u,r);l.c4&&d(l.c4,q,u,o,m,r);if(!(xg)x=g;if(l.p&&l.p!=q){l=b*x*x*x;s=s*l;t=t*l;q.fx+=s;q.fy+=t}}}}var f=2,g=1/f,h=500,i=1/h,j=0.9,k={};arguments.length||(b=-40);k.constant=function(l){if(arguments.length){b=Number(l);return k}return b};k.domain=function(l,q){if(arguments.length){f=Number(l);g=1/f;h=Number(q);i=1/h;return k}return[f,h]};k.theta=function(l){if(arguments.length){j=Number(l);return k}return j};k.apply=function(l,q){c(q.root);for(l=l;l;l=l.next)d(q.root, +l,q.xMin,q.yMin,q.xMax,q.yMax)};return k};pv.Force.drag=function(b){var c={};arguments.length||(b=0.1);c.constant=function(d){if(arguments.length){b=d;return c}return b};c.apply=function(d){if(b)for(d=d;d;d=d.next){d.fx-=b*d.vx;d.fy-=b*d.vy}};return c}; +pv.Force.spring=function(b){var c=0.1,d=20,f,g,h={};arguments.length||(b=0.1);h.links=function(i){if(arguments.length){f=i;g=i.map(function(j){return 1/Math.sqrt(Math.max(j.sourceNode.linkDegree,j.targetNode.linkDegree))});return h}return f};h.constant=function(i){if(arguments.length){b=Number(i);return h}return b};h.damping=function(i){if(arguments.length){c=Number(i);return h}return c};h.length=function(i){if(arguments.length){d=Number(i);return h}return d};h.apply=function(){for(var i=0;ig,o=sh){l.c1&&u&&c(l.c1,q,n,p,s,t);l.c2&&o&&c(l.c2,q,s,p,m,t)}if(x){l.c3&&u&&c(l.c3,q,n,t,s,r);l.c4&&o&&c(l.c4,q,s,t,m,r)}}if(l.p&&l.p!=q){n=q.x-l.p.x;p=q.y-l.p.y;m=Math.sqrt(n*n+p*p);r=f+b(l.p);if(mm)m=p}for(var r=0;rc.max?c.max:g.x;if(d)for(g=f;g;g=g.next)g.y=g.yd.max?d.max:g.y};return b};pv.Layout=function(){pv.Panel.call(this)};pv.Layout.prototype=pv.extend(pv.Panel); +pv.Layout.prototype.property=function(b,c){if(!this.hasOwnProperty("properties"))this.properties=pv.extend(this.properties);this.properties[b]=true;this.propertyMethod(b,false,pv.Mark.cast[b]=c);return this}; +pv.Layout.Network=function(){pv.Layout.call(this);var b=this;this.$id=pv.id();(this.node=(new pv.Mark).data(function(){return b.nodes()}).strokeStyle("#1f77b4").fillStyle("#fff").left(function(c){return c.x}).top(function(c){return c.y})).parent=this;this.link=(new pv.Mark).extend(this.node).data(function(c){return[c.sourceNode,c.targetNode]}).fillStyle(null).lineWidth(function(c,d){return d.linkValue*1.5}).strokeStyle("rgba(0,0,0,.2)");this.link.add=function(c){return b.add(pv.Panel).data(function(){return b.links()}).add(c).extend(this)}; +(this.label=(new pv.Mark).extend(this.node).textMargin(7).textBaseline("middle").text(function(c){return c.nodeName||c.nodeValue}).textAngle(function(c){c=c.midAngle;return pv.Wedge.upright(c)?c:c+Math.PI}).textAlign(function(c){return pv.Wedge.upright(c.midAngle)?"left":"right"})).parent=this}; +pv.Layout.Network.prototype=pv.extend(pv.Layout).property("nodes",function(b){return b.map(function(c,d){if(typeof c!="object")c={nodeValue:c};c.index=d;return c})}).property("links",function(b){return b.map(function(c){if(isNaN(c.linkValue))c.linkValue=isNaN(c.value)?1:c.value;return c})});pv.Layout.Network.prototype.reset=function(){this.$id=pv.id();return this};pv.Layout.Network.prototype.buildProperties=function(b,c){if((b.$id||0)=this.$id)return true;b.$id=this.$id;b.nodes.forEach(function(c){c.linkDegree=0});b.links.forEach(function(c){var d=c.linkValue;(c.sourceNode||(c.sourceNode=b.nodes[c.source])).linkDegree+=d;(c.targetNode||(c.targetNode=b.nodes[c.target])).linkDegree+=d})};pv.Layout.Hierarchy=function(){pv.Layout.Network.call(this);this.link.strokeStyle("#ccc")};pv.Layout.Hierarchy.prototype=pv.extend(pv.Layout.Network); +pv.Layout.Hierarchy.prototype.buildImplied=function(b){if(!b.links)b.links=pv.Layout.Hierarchy.links.call(this);pv.Layout.Network.prototype.buildImplied.call(this,b)};pv.Layout.Hierarchy.links=function(){return this.nodes().filter(function(b){return b.parentNode}).map(function(b){return{sourceNode:b,targetNode:b.parentNode,linkValue:1}})}; +pv.Layout.Hierarchy.NodeLink={buildImplied:function(b){function c(m){return m.parentNode?m.depth*(n-q)+q:0}function d(m){return m.parentNode?(m.breadth-0.25)*2*Math.PI:0}function f(m){switch(i){case "left":return m.depth*k;case "right":return k-m.depth*k;case "top":return m.breadth*k;case "bottom":return k-m.breadth*k;case "radial":return k/2+c(m)*Math.cos(m.midAngle)}}function g(m){switch(i){case "left":return m.breadth*l;case "right":return l-m.breadth*l;case "top":return m.depth*l;case "bottom":return l- +m.depth*l;case "radial":return l/2+c(m)*Math.sin(m.midAngle)}}var h=b.nodes,i=b.orient,j=/^(top|bottom)$/.test(i),k=b.width,l=b.height;if(i=="radial"){var q=b.innerRadius,n=b.outerRadius;if(q==null)q=0;if(n==null)n=Math.min(k,l)/2}for(b=0;bb.dy?0:-Math.PI/2});(this.leaf=(new pv.Mark).extend(this.node).fillStyle(null).strokeStyle(null).visible(function(b){return!b.firstChild})).parent= +this;delete this.link};pv.Layout.Treemap.prototype=pv.extend(pv.Layout.Hierarchy).property("round",Boolean).property("paddingLeft",Number).property("paddingRight",Number).property("paddingTop",Number).property("paddingBottom",Number).property("mode",String).property("order",String);a=pv.Layout.Treemap.prototype;a.defaults=(new pv.Layout.Treemap).extend(pv.Layout.Hierarchy.prototype.defaults).mode("squarify").order("ascending");a.padding=function(b){return this.paddingLeft(b).paddingRight(b).paddingTop(b).paddingBottom(b)}; +a.$size=function(b){return Number(b.nodeValue)};a.size=function(b){this.$size=pv.functor(b);return this}; +a.buildImplied=function(b){function c(r,s,t,x,u,o,v){for(var w=0,y=0;wt)t=v;u+=v}u*=u;s*=s;return Math.max(s*t/u,u/(s*x))}function f(r,s){function t(A){var D=o==y,G=pv.sum(A,n),E=y?p(G/y):0;c(A,G,D,x,u,D?o:E,D?E:v);if(D){u+=E;v-=E}else{x+= +E;o-=E}y=Math.min(o,v);return D}var x=r.x+j,u=r.y+l,o=r.dx-j-k,v=r.dy-l-q;if(m!="squarify")c(r.childNodes,r.size,m=="slice"?true:m=="dice"?false:s&1,x,u,o,v);else{var w=[];s=Infinity;var y=Math.min(o,v),z=o*v/r.size;if(!(r.size<=0)){r.visitBefore(function(A){A.size*=z});for(r=r.childNodes.slice();r.length;){var C=r[r.length-1];if(C.size){w.push(C);z=d(w,y);if(z<=s){r.pop();s=z}else{w.pop();t(w);w.length=0;s=Infinity}}else r.pop()}if(t(w))for(s=0;s0){i(k(C,o,v),o,B);A+=B;D+=B}G+=C.mod;A+=y.mod;E+=w.mod;D+=z.mod;C=h(C);y=g(y)}if(C&&!h(z)){z.thread=C;z.mod+=G-D}if(y&&!g(w)){w.thread=y;w.mod+=A-E;v=o}}return v}function g(o){return o.firstChild||o.thread}function h(o){return o.lastChild||o.thread}function i(o,v,w){var y=v.number-o.number;v.change-=w/y;v.shift+=w;o.change+= +w/y;v.prelim+=w;v.mod+=w}function j(o){var v=0,w=0;for(o=o.lastChild;o;o=o.previousSibling){o.prelim+=v;o.mod+=v;w+=o.change;v+=o.shift+w}}function k(o,v,w){return o.ancestor.parentNode==v.parentNode?o.ancestor:w}function l(o,v){return(v?1:t+1)/(m=="radial"?o:1)}function q(o){return m=="radial"?o.breadth/r:0}function n(o){switch(m){case "left":return o.depth;case "right":return x-o.depth;case "top":case "bottom":return o.breadth+x/2;case "radial":return x/2+o.depth*Math.cos(q(o))}}function p(o){switch(m){case "left":case "right":return o.breadth+ +u/2;case "top":return o.depth;case "bottom":return u-o.depth;case "radial":return u/2+o.depth*Math.sin(q(o))}}if(!pv.Layout.Hierarchy.prototype.buildImplied.call(this,b)){var m=b.orient,r=b.depth,s=b.breadth,t=b.group,x=b.width,u=b.height;b=b.nodes[0];b.visitAfter(function(o,v){o.ancestor=o;o.prelim=0;o.mod=0;o.change=0;o.shift=0;o.number=o.previousSibling?o.previousSibling.number+1:0;o.depth=v});c(b);d(b,-b.prelim,0);b.visitAfter(function(o){o.breadth*=s;o.depth*=r;o.midAngle=q(o);o.x=n(o);o.y=p(o); +if(o.firstChild)o.midAngle+=Math.PI;delete o.breadth;delete o.depth;delete o.ancestor;delete o.prelim;delete o.mod;delete o.change;delete o.shift;delete o.number;delete o.thread})}};pv.Layout.Indent=function(){pv.Layout.Hierarchy.call(this);this.link.interpolate("step-after")};pv.Layout.Indent.prototype=pv.extend(pv.Layout.Hierarchy).property("depth",Number).property("breadth",Number);pv.Layout.Indent.prototype.defaults=(new pv.Layout.Indent).extend(pv.Layout.Hierarchy.prototype.defaults).depth(15).breadth(15); +pv.Layout.Indent.prototype.buildImplied=function(b){function c(i,j,k){i.x=g+k++*f;i.y=h+j++*d;i.midAngle=0;for(i=i.firstChild;i;i=i.nextSibling)j=c(i,j,k);return j}if(!pv.Layout.Hierarchy.prototype.buildImplied.call(this,b)){var d=b.breadth,f=b.depth,g=0,h=0;c(b.nodes[0],1,1)}};pv.Layout.Pack=function(){pv.Layout.Hierarchy.call(this);this.node.radius(function(b){return b.radius}).strokeStyle("rgb(31, 119, 180)").fillStyle("rgba(31, 119, 180, .25)");this.label.textAlign("center");delete this.link}; +pv.Layout.Pack.prototype=pv.extend(pv.Layout.Hierarchy).property("spacing",Number).property("order",String);pv.Layout.Pack.prototype.defaults=(new pv.Layout.Pack).extend(pv.Layout.Hierarchy.prototype.defaults).spacing(1).order("ascending");pv.Layout.Pack.prototype.$radius=function(){return 1};pv.Layout.Pack.prototype.size=function(b){this.$radius=typeof b=="function"?function(){return Math.sqrt(b.apply(this,arguments))}:(b=Math.sqrt(b),function(){return b});return this}; +pv.Layout.Pack.prototype.buildImplied=function(b){function c(n){var p=pv.Mark.stack;p.unshift(null);for(var m=0,r=n.length;m0.0010}var t=Infinity,x=-Infinity,u=Infinity,o=-Infinity,v,w,y,z,C;v=n[0];v.x=-v.radius;v.y=0;p(v);if(n.length>1){w=n[1];w.x=w.radius;w.y=0;p(w);if(n.length>2){y=n[2];g(v,w,y);p(y);m(v,y);v.p= +y;m(y,w);w=v.n;for(var A=3;A0){r(v,z);w=z;A--}else if(D<0){r(z,w);v=z;A--}}}}v=(t+x)/2;w=(u+o)/2;for(A=y=0;An.min){n.sim.step(); +q=true}q&&d.render()},42)}else for(k=0;kg)g=j;i.size=i.firstChild?pv.sum(i.childNodes,function(k){return k.size}):c.$size.apply(c,(f[0]=i,f))});f.shift();switch(b.order){case "ascending":d.sort(function(i,j){return i.size-j.size});break;case "descending":d.sort(function(i,j){return j.size-i.size});break}var h=1/g;d.minBreadth=0;d.breadth= +0.5;d.maxBreadth=1;d.visitBefore(function(i){for(var j=i.minBreadth,k=i.maxBreadth-j,l=i.firstChild;l;l=l.nextSibling){l.minBreadth=j;l.maxBreadth=j+=l.size/i.size*k;l.breadth=(j+l.minBreadth)/2}});d.visitAfter(function(i,j){i.minDepth=(j-1)*h;i.maxDepth=i.depth=j*h});pv.Layout.Hierarchy.NodeLink.buildImplied.call(this,b)}};pv.Layout.Partition.Fill=function(){pv.Layout.Partition.call(this);pv.Layout.Hierarchy.Fill.constructor.call(this)};pv.Layout.Partition.Fill.prototype=pv.extend(pv.Layout.Partition); +pv.Layout.Partition.Fill.prototype.buildImplied=function(b){pv.Layout.Partition.prototype.buildImplied.call(this,b)||pv.Layout.Hierarchy.Fill.buildImplied.call(this,b)};pv.Layout.Arc=function(){pv.Layout.Network.call(this);var b,c,d,f=this.buildImplied;this.buildImplied=function(g){f.call(this,g);c=g.directed;b=g.orient=="radial"?"linear":"polar";d=g.orient=="right"||g.orient=="top"};this.link.data(function(g){var h=g.sourceNode;g=g.targetNode;return d!=(c||h.breadth>1)*f:null}).bottom(function(k,l){return d=="mirror"?l&1?null:(l+1>>1)*-f:(l&1||-1)*(l+1>>1)*f}).fillStyle(function(k,l){return(l&1?h:i)((l>>1)+1)});this.band.add=function(k){return b.add(pv.Panel).extend(c).add(k).extend(this)}};pv.Layout.Horizon.prototype=pv.extend(pv.Layout).property("bands",Number).property("mode",String).property("backgroundStyle",pv.color).property("positiveStyle",pv.color).property("negativeStyle",pv.color); +pv.Layout.Horizon.prototype.defaults=(new pv.Layout.Horizon).extend(pv.Layout.prototype.defaults).bands(2).mode("offset").backgroundStyle("white").positiveStyle("#1f77b4").negativeStyle("#d62728"); +pv.Layout.Rollup=function(){pv.Layout.Network.call(this);var b=this,c,d,f=b.buildImplied;this.buildImplied=function(g){f.call(this,g);c=g.$rollup.nodes;d=g.$rollup.links};this.node.data(function(){return c}).size(function(g){return g.nodes.length*20});this.link.interpolate("polar").eccentricity(0.8);this.link.add=function(g){return b.add(pv.Panel).data(function(){return d}).add(g).extend(this)}};pv.Layout.Rollup.prototype=pv.extend(pv.Layout.Network).property("directed",Boolean); +pv.Layout.Rollup.prototype.x=function(b){this.$x=pv.functor(b);return this};pv.Layout.Rollup.prototype.y=function(b){this.$y=pv.functor(b);return this}; +pv.Layout.Rollup.prototype.buildImplied=function(b){function c(r){return i[r]+","+j[r]}if(!pv.Layout.Network.prototype.buildImplied.call(this,b)){var d=b.nodes,f=b.links,g=b.directed,h=d.length,i=[],j=[],k=0,l={},q={},n=pv.Mark.stack,p={parent:this};n.unshift(null);for(var m=0;mk.index?k.index+","+d.index:d.index+","+k.index;(n=q[h])||(n=q[h]={sourceNode:d,targetNode:k,linkValue:0,links:[]});n.links.push(f[m]);n.linkValue+=f[m].linkValue}b.$rollup={nodes:pv.values(l),links:pv.values(q)}}}; +pv.Layout.Matrix=function(){pv.Layout.Network.call(this);var b,c,d,f,g,h=this.buildImplied;this.buildImplied=function(i){h.call(this,i);b=i.nodes.length;c=i.width/b;d=i.height/b;f=i.$matrix.labels;g=i.$matrix.pairs};this.link.data(function(){return g}).left(function(){return c*(this.index%b)}).top(function(){return d*Math.floor(this.index/b)}).width(function(){return c}).height(function(){return d}).lineWidth(1.5).strokeStyle("#fff").fillStyle(function(i){return i.linkValue?"#555":"#eee"}).parent= +this;delete this.link.add;this.label.data(function(){return f}).left(function(){return this.index&1?c*((this.index>>1)+0.5):0}).top(function(){return this.index&1?0:d*((this.index>>1)+0.5)}).textMargin(4).textAlign(function(){return this.index&1?"left":"right"}).textAngle(function(){return this.index&1?-Math.PI/2:0});delete this.node};pv.Layout.Matrix.prototype=pv.extend(pv.Layout.Network).property("directed",Boolean);pv.Layout.Matrix.prototype.sort=function(b){this.$sort=b;return this}; +pv.Layout.Matrix.prototype.buildImplied=function(b){if(!pv.Layout.Network.prototype.buildImplied.call(this,b)){var c=b.nodes,d=b.links,f=this.$sort,g=c.length,h=pv.range(g),i=[],j=[],k={};b.$matrix={labels:i,pairs:j};f&&h.sort(function(m,r){return f(c[m],c[r])});for(var l=0;lk)l=null;if(g){if(l&&g.scene==l.scene&&g.index==l.index)return;pv.Mark.dispatch("unpoint",g.scene,g.index)}if(g=l){pv.Mark.dispatch("point",l.scene,l.index);pv.listen(this.root.canvas(),"mouseout",f)}}function f(l){if(g&&!pv.ancestor(this,l.relatedTarget)){pv.Mark.dispatch("unpoint",g.scene,g.index);g=null}}var g,h=null,i=1,j=1,k=arguments.length?b*b:900;d.collapse=function(l){if(arguments.length){h=String(l);switch(h){case "y":i= +1;j=0;break;case "x":i=0;j=1;break;default:j=i=1;break}return d}return h};return d}; +pv.Behavior.select=function(){function b(j){g=this.index;f=this.scene;i=this.mouse();h=j;h.x=i.x;h.y=i.y;h.dx=h.dy=0;pv.Mark.dispatch("selectstart",f,g)}function c(){if(f){f.mark.context(f,g,function(){var j=this.mouse();h.x=Math.max(0,Math.min(i.x,j.x));h.y=Math.max(0,Math.min(i.y,j.y));h.dx=Math.min(this.width(),Math.max(j.x,i.x))-h.x;h.dy=Math.min(this.height(),Math.max(j.y,i.y))-h.y;this.render()});pv.Mark.dispatch("select",f,g)}}function d(){if(f){pv.Mark.dispatch("selectend",f,g);f=null}}var f, +g,h,i;pv.listen(window,"mousemove",c);pv.listen(window,"mouseup",d);return b}; +pv.Behavior.resize=function(b){function c(k){h=this.index;g=this.scene;j=this.mouse();i=k;switch(b){case "left":j.x=i.x+i.dx;break;case "right":j.x=i.x;break;case "top":j.y=i.y+i.dy;break;case "bottom":j.y=i.y;break}pv.Mark.dispatch("resizestart",g,h)}function d(){if(g){g.mark.context(g,h,function(){var k=this.mouse();i.x=Math.max(0,Math.min(j.x,k.x));i.y=Math.max(0,Math.min(j.y,k.y));i.dx=Math.min(this.parent.width(),Math.max(k.x,j.x))-i.x;i.dy=Math.min(this.parent.height(),Math.max(k.y,j.y))-i.y; +this.render()});pv.Mark.dispatch("resize",g,h)}}function f(){if(g){pv.Mark.dispatch("resizeend",g,h);g=null}}var g,h,i,j;pv.listen(window,"mousemove",d);pv.listen(window,"mouseup",f);return c}; +pv.Behavior.pan=function(){function b(){g=this.index;f=this.scene;i=pv.vector(pv.event.pageX,pv.event.pageY);h=this.transform();j=1/(h.k*this.scale);if(k)k={x:(1-h.k)*this.width(),y:(1-h.k)*this.height()}}function c(){if(f){f.mark.context(f,g,function(){var l=h.translate((pv.event.pageX-i.x)*j,(pv.event.pageY-i.y)*j);if(k){l.x=Math.max(k.x,Math.min(0,l.x));l.y=Math.max(k.y,Math.min(0,l.y))}this.transform(l).render()});pv.Mark.dispatch("pan",f,g)}}function d(){f=null}var f,g,h,i,j,k;b.bound=function(l){if(arguments.length){k= +Boolean(l);return this}return Boolean(k)};pv.listen(window,"mousemove",c);pv.listen(window,"mouseup",d);return b}; +pv.Behavior.zoom=function(b){function c(){var f=this.mouse(),g=pv.event.wheel*b;f=this.transform().translate(f.x,f.y).scale(g<0?1E3/(1E3-g):(1E3+g)/1E3).translate(-f.x,-f.y);if(d){f.k=Math.max(1,f.k);f.x=Math.max((1-f.k)*this.width(),Math.min(0,f.x));f.y=Math.max((1-f.k)*this.height(),Math.min(0,f.y))}this.transform(f).render();pv.Mark.dispatch("zoom",this.scene,this.index)}var d;arguments.length||(b=1/48);c.bound=function(f){if(arguments.length){d=Boolean(f);return this}return Boolean(d)};return c}; +pv.Geo=function(){}; +pv.Geo.projections={mercator:{project:function(b){return{x:b.lng/180,y:b.lat>85?1:b.lat<-85?-1:Math.log(Math.tan(Math.PI/4+pv.radians(b.lat)/2))/Math.PI}},invert:function(b){return{lng:b.x*180,lat:pv.degrees(2*Math.atan(Math.exp(b.y*Math.PI))-Math.PI/2)}}},"gall-peters":{project:function(b){return{x:b.lng/180,y:Math.sin(pv.radians(b.lat))}},invert:function(b){return{lng:b.x*180,lat:pv.degrees(Math.asin(b.y))}}},sinusoidal:{project:function(b){return{x:pv.radians(b.lng)*Math.cos(pv.radians(b.lat))/Math.PI, +y:b.lat/90}},invert:function(b){return{lng:pv.degrees(b.x*Math.PI/Math.cos(b.y*Math.PI/2)),lat:b.y*90}}},aitoff:{project:function(b){var c=pv.radians(b.lng);b=pv.radians(b.lat);var d=Math.acos(Math.cos(b)*Math.cos(c/2));return{x:2*(d?Math.cos(b)*Math.sin(c/2)*d/Math.sin(d):0)/Math.PI,y:2*(d?Math.sin(b)*d/Math.sin(d):0)/Math.PI}},invert:function(b){var c=b.y*Math.PI/2;return{lng:pv.degrees(b.x*Math.PI/2/Math.cos(c)),lat:pv.degrees(c)}}},hammer:{project:function(b){var c=pv.radians(b.lng);b=pv.radians(b.lat); +var d=Math.sqrt(1+Math.cos(b)*Math.cos(c/2));return{x:2*Math.SQRT2*Math.cos(b)*Math.sin(c/2)/d/3,y:Math.SQRT2*Math.sin(b)/d/1.5}},invert:function(b){var c=b.x*3;b=b.y*1.5;var d=Math.sqrt(1-c*c/16-b*b/4);return{lng:pv.degrees(2*Math.atan2(d*c,2*(2*d*d-1))),lat:pv.degrees(Math.asin(d*b))}}},identity:{project:function(b){return{x:b.lng/180,y:b.lat/90}},invert:function(b){return{lng:b.x*180,lat:b.y*90}}}}; +pv.Geo.scale=function(b){function c(m){if(!n||m.lng!=n.lng||m.lat!=n.lat){n=m;m=d(m);p={x:k(m.x),y:l(m.y)}}return p}function d(m){return j.project({lng:m.lng-q.lng,lat:m.lat})}function f(m){m=j.invert(m);m.lng+=q.lng;return m}var g={x:0,y:0},h={x:1,y:1},i=[],j=pv.Geo.projections.identity,k=pv.Scale.linear(-1,1).range(0,1),l=pv.Scale.linear(-1,1).range(1,0),q={lng:0,lat:0},n,p;c.x=function(m){return c(m).x};c.y=function(m){return c(m).y};c.ticks={lng:function(m){var r;if(i.length>1){var s=pv.Scale.linear(); +if(m==undefined)m=10;r=s.domain(i,function(t){return t.lat}).ticks(m);m=s.domain(i,function(t){return t.lng}).ticks(m)}else{r=pv.range(-80,81,10);m=pv.range(-180,181,10)}return m.map(function(t){return r.map(function(x){return{lat:x,lng:t}})})},lat:function(m){return pv.transpose(c.ticks.lng(m))}};c.invert=function(m){return f({x:k.invert(m.x),y:l.invert(m.y)})};c.domain=function(m,r){if(arguments.length){i=m instanceof Array?arguments.length>1?pv.map(m,r):m:Array.prototype.slice.call(arguments); +if(i.length>1){var s=i.map(function(x){return x.lng}),t=i.map(function(x){return x.lat});q={lng:(pv.max(s)+pv.min(s))/2,lat:(pv.max(t)+pv.min(t))/2};s=i.map(d);k.domain(s,function(x){return x.x});l.domain(s,function(x){return x.y})}else{q={lng:0,lat:0};k.domain(-1,1);l.domain(-1,1)}n=null;return this}return i};c.range=function(m,r){if(arguments.length){if(typeof m=="object"){g={x:Number(m.x),y:Number(m.y)};h={x:Number(r.x),y:Number(r.y)}}else{g={x:0,y:0};h={x:Number(m),y:Number(r)}}k.range(g.x,h.x); +l.range(h.y,g.y);n=null;return this}return[g,h]};c.projection=function(m){if(arguments.length){j=typeof m=="string"?pv.Geo.projections[m]||pv.Geo.projections.identity:m;return this.domain(i)}return m};c.by=function(m){function r(){return c(m.apply(this,arguments))}for(var s in c)r[s]=c[s];return r};arguments.length&&c.projection(b);return c}; diff --git a/ComfyUI-KJNodes/kjweb_async/purify.min.js b/ComfyUI-KJNodes/kjweb_async/purify.min.js new file mode 100644 index 0000000000000000000000000000000000000000..c2f5164618eebcc44b0186f594ccb8092639c670 --- /dev/null +++ b/ComfyUI-KJNodes/kjweb_async/purify.min.js @@ -0,0 +1,3 @@ +/*! @license DOMPurify 3.0.11 | (c) Cure53 and other contributors | Released under the Apache license 2.0 and Mozilla Public License 2.0 | github.com/cure53/DOMPurify/blob/3.0.11/LICENSE */ +!function(e,t){"object"==typeof exports&&"undefined"!=typeof module?module.exports=t():"function"==typeof define&&define.amd?define(t):(e="undefined"!=typeof globalThis?globalThis:e||self).DOMPurify=t()}(this,(function(){"use strict";const{entries:e,setPrototypeOf:t,isFrozen:n,getPrototypeOf:o,getOwnPropertyDescriptor:r}=Object;let{freeze:i,seal:a,create:l}=Object,{apply:c,construct:s}="undefined"!=typeof Reflect&&Reflect;i||(i=function(e){return e}),a||(a=function(e){return e}),c||(c=function(e,t,n){return e.apply(t,n)}),s||(s=function(e,t){return new e(...t)});const u=b(Array.prototype.forEach),m=b(Array.prototype.pop),p=b(Array.prototype.push),f=b(String.prototype.toLowerCase),d=b(String.prototype.toString),h=b(String.prototype.match),g=b(String.prototype.replace),T=b(String.prototype.indexOf),y=b(String.prototype.trim),E=b(Object.prototype.hasOwnProperty),A=b(RegExp.prototype.test),_=(N=TypeError,function(){for(var e=arguments.length,t=new Array(e),n=0;n1?n-1:0),r=1;r2&&void 0!==arguments[2]?arguments[2]:f;t&&t(e,null);let i=o.length;for(;i--;){let t=o[i];if("string"==typeof t){const e=r(t);e!==t&&(n(o)||(o[i]=e),t=e)}e[t]=!0}return e}function R(e){for(let t=0;t/gm),B=a(/\${[\w\W]*}/gm),W=a(/^data-[\-\w.\u00B7-\uFFFF]/),G=a(/^aria-[\-\w]+$/),Y=a(/^(?:(?:(?:f|ht)tps?|mailto|tel|callto|sms|cid|xmpp):|[^a-z]|[a-z+.\-]+(?:[^a-z+.\-:]|$))/i),j=a(/^(?:\w+script|data):/i),X=a(/[\u0000-\u0020\u00A0\u1680\u180E\u2000-\u2029\u205F\u3000]/g),q=a(/^html$/i),$=a(/^[a-z][.\w]*(-[.\w]+)+$/i);var K=Object.freeze({__proto__:null,MUSTACHE_EXPR:H,ERB_EXPR:z,TMPLIT_EXPR:B,DATA_ATTR:W,ARIA_ATTR:G,IS_ALLOWED_URI:Y,IS_SCRIPT_OR_DATA:j,ATTR_WHITESPACE:X,DOCTYPE_NAME:q,CUSTOM_ELEMENT:$});const V=function(){return"undefined"==typeof window?null:window},Z=function(e,t){if("object"!=typeof e||"function"!=typeof e.createPolicy)return null;let n=null;const o="data-tt-policy-suffix";t&&t.hasAttribute(o)&&(n=t.getAttribute(o));const r="dompurify"+(n?"#"+n:"");try{return e.createPolicy(r,{createHTML:e=>e,createScriptURL:e=>e})}catch(e){return console.warn("TrustedTypes policy "+r+" could not be created."),null}};var J=function t(){let n=arguments.length>0&&void 0!==arguments[0]?arguments[0]:V();const o=e=>t(e);if(o.version="3.0.11",o.removed=[],!n||!n.document||9!==n.document.nodeType)return o.isSupported=!1,o;let{document:r}=n;const a=r,c=a.currentScript,{DocumentFragment:s,HTMLTemplateElement:N,Node:b,Element:R,NodeFilter:H,NamedNodeMap:z=n.NamedNodeMap||n.MozNamedAttrMap,HTMLFormElement:B,DOMParser:W,trustedTypes:G}=n,j=R.prototype,X=L(j,"cloneNode"),$=L(j,"nextSibling"),J=L(j,"childNodes"),Q=L(j,"parentNode");if("function"==typeof N){const e=r.createElement("template");e.content&&e.content.ownerDocument&&(r=e.content.ownerDocument)}let ee,te="";const{implementation:ne,createNodeIterator:oe,createDocumentFragment:re,getElementsByTagName:ie}=r,{importNode:ae}=a;let le={};o.isSupported="function"==typeof e&&"function"==typeof Q&&ne&&void 0!==ne.createHTMLDocument;const{MUSTACHE_EXPR:ce,ERB_EXPR:se,TMPLIT_EXPR:ue,DATA_ATTR:me,ARIA_ATTR:pe,IS_SCRIPT_OR_DATA:fe,ATTR_WHITESPACE:de,CUSTOM_ELEMENT:he}=K;let{IS_ALLOWED_URI:ge}=K,Te=null;const ye=S({},[...D,...C,...O,...v,...M]);let Ee=null;const Ae=S({},[...I,...U,...P,...F]);let _e=Object.seal(l(null,{tagNameCheck:{writable:!0,configurable:!1,enumerable:!0,value:null},attributeNameCheck:{writable:!0,configurable:!1,enumerable:!0,value:null},allowCustomizedBuiltInElements:{writable:!0,configurable:!1,enumerable:!0,value:!1}})),Ne=null,be=null,Se=!0,Re=!0,we=!1,Le=!0,De=!1,Ce=!0,Oe=!1,xe=!1,ve=!1,ke=!1,Me=!1,Ie=!1,Ue=!0,Pe=!1;const Fe="user-content-";let He=!0,ze=!1,Be={},We=null;const Ge=S({},["annotation-xml","audio","colgroup","desc","foreignobject","head","iframe","math","mi","mn","mo","ms","mtext","noembed","noframes","noscript","plaintext","script","style","svg","template","thead","title","video","xmp"]);let Ye=null;const je=S({},["audio","video","img","source","image","track"]);let Xe=null;const qe=S({},["alt","class","for","id","label","name","pattern","placeholder","role","summary","title","value","style","xmlns"]),$e="http://www.w3.org/1998/Math/MathML",Ke="http://www.w3.org/2000/svg",Ve="http://www.w3.org/1999/xhtml";let Ze=Ve,Je=!1,Qe=null;const et=S({},[$e,Ke,Ve],d);let tt=null;const nt=["application/xhtml+xml","text/html"],ot="text/html";let rt=null,it=null;const at=r.createElement("form"),lt=function(e){return e instanceof RegExp||e instanceof Function},ct=function(){let e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{};if(!it||it!==e){if(e&&"object"==typeof e||(e={}),e=w(e),tt=-1===nt.indexOf(e.PARSER_MEDIA_TYPE)?ot:e.PARSER_MEDIA_TYPE,rt="application/xhtml+xml"===tt?d:f,Te=E(e,"ALLOWED_TAGS")?S({},e.ALLOWED_TAGS,rt):ye,Ee=E(e,"ALLOWED_ATTR")?S({},e.ALLOWED_ATTR,rt):Ae,Qe=E(e,"ALLOWED_NAMESPACES")?S({},e.ALLOWED_NAMESPACES,d):et,Xe=E(e,"ADD_URI_SAFE_ATTR")?S(w(qe),e.ADD_URI_SAFE_ATTR,rt):qe,Ye=E(e,"ADD_DATA_URI_TAGS")?S(w(je),e.ADD_DATA_URI_TAGS,rt):je,We=E(e,"FORBID_CONTENTS")?S({},e.FORBID_CONTENTS,rt):Ge,Ne=E(e,"FORBID_TAGS")?S({},e.FORBID_TAGS,rt):{},be=E(e,"FORBID_ATTR")?S({},e.FORBID_ATTR,rt):{},Be=!!E(e,"USE_PROFILES")&&e.USE_PROFILES,Se=!1!==e.ALLOW_ARIA_ATTR,Re=!1!==e.ALLOW_DATA_ATTR,we=e.ALLOW_UNKNOWN_PROTOCOLS||!1,Le=!1!==e.ALLOW_SELF_CLOSE_IN_ATTR,De=e.SAFE_FOR_TEMPLATES||!1,Ce=!1!==e.SAFE_FOR_XML,Oe=e.WHOLE_DOCUMENT||!1,ke=e.RETURN_DOM||!1,Me=e.RETURN_DOM_FRAGMENT||!1,Ie=e.RETURN_TRUSTED_TYPE||!1,ve=e.FORCE_BODY||!1,Ue=!1!==e.SANITIZE_DOM,Pe=e.SANITIZE_NAMED_PROPS||!1,He=!1!==e.KEEP_CONTENT,ze=e.IN_PLACE||!1,ge=e.ALLOWED_URI_REGEXP||Y,Ze=e.NAMESPACE||Ve,_e=e.CUSTOM_ELEMENT_HANDLING||{},e.CUSTOM_ELEMENT_HANDLING&<(e.CUSTOM_ELEMENT_HANDLING.tagNameCheck)&&(_e.tagNameCheck=e.CUSTOM_ELEMENT_HANDLING.tagNameCheck),e.CUSTOM_ELEMENT_HANDLING&<(e.CUSTOM_ELEMENT_HANDLING.attributeNameCheck)&&(_e.attributeNameCheck=e.CUSTOM_ELEMENT_HANDLING.attributeNameCheck),e.CUSTOM_ELEMENT_HANDLING&&"boolean"==typeof e.CUSTOM_ELEMENT_HANDLING.allowCustomizedBuiltInElements&&(_e.allowCustomizedBuiltInElements=e.CUSTOM_ELEMENT_HANDLING.allowCustomizedBuiltInElements),De&&(Re=!1),Me&&(ke=!0),Be&&(Te=S({},M),Ee=[],!0===Be.html&&(S(Te,D),S(Ee,I)),!0===Be.svg&&(S(Te,C),S(Ee,U),S(Ee,F)),!0===Be.svgFilters&&(S(Te,O),S(Ee,U),S(Ee,F)),!0===Be.mathMl&&(S(Te,v),S(Ee,P),S(Ee,F))),e.ADD_TAGS&&(Te===ye&&(Te=w(Te)),S(Te,e.ADD_TAGS,rt)),e.ADD_ATTR&&(Ee===Ae&&(Ee=w(Ee)),S(Ee,e.ADD_ATTR,rt)),e.ADD_URI_SAFE_ATTR&&S(Xe,e.ADD_URI_SAFE_ATTR,rt),e.FORBID_CONTENTS&&(We===Ge&&(We=w(We)),S(We,e.FORBID_CONTENTS,rt)),He&&(Te["#text"]=!0),Oe&&S(Te,["html","head","body"]),Te.table&&(S(Te,["tbody"]),delete Ne.tbody),e.TRUSTED_TYPES_POLICY){if("function"!=typeof e.TRUSTED_TYPES_POLICY.createHTML)throw _('TRUSTED_TYPES_POLICY configuration option must provide a "createHTML" hook.');if("function"!=typeof e.TRUSTED_TYPES_POLICY.createScriptURL)throw _('TRUSTED_TYPES_POLICY configuration option must provide a "createScriptURL" hook.');ee=e.TRUSTED_TYPES_POLICY,te=ee.createHTML("")}else void 0===ee&&(ee=Z(G,c)),null!==ee&&"string"==typeof te&&(te=ee.createHTML(""));i&&i(e),it=e}},st=S({},["mi","mo","mn","ms","mtext"]),ut=S({},["foreignobject","desc","title","annotation-xml"]),mt=S({},["title","style","font","a","script"]),pt=S({},[...C,...O,...x]),ft=S({},[...v,...k]),dt=function(e){let t=Q(e);t&&t.tagName||(t={namespaceURI:Ze,tagName:"template"});const n=f(e.tagName),o=f(t.tagName);return!!Qe[e.namespaceURI]&&(e.namespaceURI===Ke?t.namespaceURI===Ve?"svg"===n:t.namespaceURI===$e?"svg"===n&&("annotation-xml"===o||st[o]):Boolean(pt[n]):e.namespaceURI===$e?t.namespaceURI===Ve?"math"===n:t.namespaceURI===Ke?"math"===n&&ut[o]:Boolean(ft[n]):e.namespaceURI===Ve?!(t.namespaceURI===Ke&&!ut[o])&&(!(t.namespaceURI===$e&&!st[o])&&(!ft[n]&&(mt[n]||!pt[n]))):!("application/xhtml+xml"!==tt||!Qe[e.namespaceURI]))},ht=function(e){p(o.removed,{element:e});try{e.parentNode.removeChild(e)}catch(t){e.remove()}},gt=function(e,t){try{p(o.removed,{attribute:t.getAttributeNode(e),from:t})}catch(e){p(o.removed,{attribute:null,from:t})}if(t.removeAttribute(e),"is"===e&&!Ee[e])if(ke||Me)try{ht(t)}catch(e){}else try{t.setAttribute(e,"")}catch(e){}},Tt=function(e){let t=null,n=null;if(ve)e=""+e;else{const t=h(e,/^[\r\n\t ]+/);n=t&&t[0]}"application/xhtml+xml"===tt&&Ze===Ve&&(e=''+e+"");const o=ee?ee.createHTML(e):e;if(Ze===Ve)try{t=(new W).parseFromString(o,tt)}catch(e){}if(!t||!t.documentElement){t=ne.createDocument(Ze,"template",null);try{t.documentElement.innerHTML=Je?te:o}catch(e){}}const i=t.body||t.documentElement;return e&&n&&i.insertBefore(r.createTextNode(n),i.childNodes[0]||null),Ze===Ve?ie.call(t,Oe?"html":"body")[0]:Oe?t.documentElement:i},yt=function(e){return oe.call(e.ownerDocument||e,e,H.SHOW_ELEMENT|H.SHOW_COMMENT|H.SHOW_TEXT|H.SHOW_PROCESSING_INSTRUCTION|H.SHOW_CDATA_SECTION,null)},Et=function(e){return e instanceof B&&("string"!=typeof e.nodeName||"string"!=typeof e.textContent||"function"!=typeof e.removeChild||!(e.attributes instanceof z)||"function"!=typeof e.removeAttribute||"function"!=typeof e.setAttribute||"string"!=typeof e.namespaceURI||"function"!=typeof e.insertBefore||"function"!=typeof e.hasChildNodes)},At=function(e){return"function"==typeof b&&e instanceof b},_t=function(e,t,n){le[e]&&u(le[e],(e=>{e.call(o,t,n,it)}))},Nt=function(e){let t=null;if(_t("beforeSanitizeElements",e,null),Et(e))return ht(e),!0;const n=rt(e.nodeName);if(_t("uponSanitizeElement",e,{tagName:n,allowedTags:Te}),e.hasChildNodes()&&!At(e.firstElementChild)&&A(/<[/\w]/g,e.innerHTML)&&A(/<[/\w]/g,e.textContent))return ht(e),!0;if(7===e.nodeType)return ht(e),!0;if(Ce&&8===e.nodeType&&A(/<[/\w]/g,e.data))return ht(e),!0;if(!Te[n]||Ne[n]){if(!Ne[n]&&St(n)){if(_e.tagNameCheck instanceof RegExp&&A(_e.tagNameCheck,n))return!1;if(_e.tagNameCheck instanceof Function&&_e.tagNameCheck(n))return!1}if(He&&!We[n]){const t=Q(e)||e.parentNode,n=J(e)||e.childNodes;if(n&&t){for(let o=n.length-1;o>=0;--o)t.insertBefore(X(n[o],!0),$(e))}}return ht(e),!0}return e instanceof R&&!dt(e)?(ht(e),!0):"noscript"!==n&&"noembed"!==n&&"noframes"!==n||!A(/<\/no(script|embed|frames)/i,e.innerHTML)?(De&&3===e.nodeType&&(t=e.textContent,u([ce,se,ue],(e=>{t=g(t,e," ")})),e.textContent!==t&&(p(o.removed,{element:e.cloneNode()}),e.textContent=t)),_t("afterSanitizeElements",e,null),!1):(ht(e),!0)},bt=function(e,t,n){if(Ue&&("id"===t||"name"===t)&&(n in r||n in at))return!1;if(Re&&!be[t]&&A(me,t));else if(Se&&A(pe,t));else if(!Ee[t]||be[t]){if(!(St(e)&&(_e.tagNameCheck instanceof RegExp&&A(_e.tagNameCheck,e)||_e.tagNameCheck instanceof Function&&_e.tagNameCheck(e))&&(_e.attributeNameCheck instanceof RegExp&&A(_e.attributeNameCheck,t)||_e.attributeNameCheck instanceof Function&&_e.attributeNameCheck(t))||"is"===t&&_e.allowCustomizedBuiltInElements&&(_e.tagNameCheck instanceof RegExp&&A(_e.tagNameCheck,n)||_e.tagNameCheck instanceof Function&&_e.tagNameCheck(n))))return!1}else if(Xe[t]);else if(A(ge,g(n,de,"")));else if("src"!==t&&"xlink:href"!==t&&"href"!==t||"script"===e||0!==T(n,"data:")||!Ye[e]){if(we&&!A(fe,g(n,de,"")));else if(n)return!1}else;return!0},St=function(e){return"annotation-xml"!==e&&h(e,he)},Rt=function(e){_t("beforeSanitizeAttributes",e,null);const{attributes:t}=e;if(!t)return;const n={attrName:"",attrValue:"",keepAttr:!0,allowedAttributes:Ee};let r=t.length;for(;r--;){const i=t[r],{name:a,namespaceURI:l,value:c}=i,s=rt(a);let p="value"===a?c:y(c);if(n.attrName=s,n.attrValue=p,n.keepAttr=!0,n.forceKeepAttr=void 0,_t("uponSanitizeAttribute",e,n),p=n.attrValue,n.forceKeepAttr)continue;if(gt(a,e),!n.keepAttr)continue;if(!Le&&A(/\/>/i,p)){gt(a,e);continue}De&&u([ce,se,ue],(e=>{p=g(p,e," ")}));const f=rt(e.nodeName);if(bt(f,s,p)){if(!Pe||"id"!==s&&"name"!==s||(gt(a,e),p=Fe+p),ee&&"object"==typeof G&&"function"==typeof G.getAttributeType)if(l);else switch(G.getAttributeType(f,s)){case"TrustedHTML":p=ee.createHTML(p);break;case"TrustedScriptURL":p=ee.createScriptURL(p)}try{l?e.setAttributeNS(l,a,p):e.setAttribute(a,p),m(o.removed)}catch(e){}}}_t("afterSanitizeAttributes",e,null)},wt=function e(t){let n=null;const o=yt(t);for(_t("beforeSanitizeShadowDOM",t,null);n=o.nextNode();)_t("uponSanitizeShadowNode",n,null),Nt(n)||(n.content instanceof s&&e(n.content),Rt(n));_t("afterSanitizeShadowDOM",t,null)};return o.sanitize=function(e){let t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{},n=null,r=null,i=null,l=null;if(Je=!e,Je&&(e="\x3c!--\x3e"),"string"!=typeof e&&!At(e)){if("function"!=typeof e.toString)throw _("toString is not a function");if("string"!=typeof(e=e.toString()))throw _("dirty is not a string, aborting")}if(!o.isSupported)return e;if(xe||ct(t),o.removed=[],"string"==typeof e&&(ze=!1),ze){if(e.nodeName){const t=rt(e.nodeName);if(!Te[t]||Ne[t])throw _("root node is forbidden and cannot be sanitized in-place")}}else if(e instanceof b)n=Tt("\x3c!----\x3e"),r=n.ownerDocument.importNode(e,!0),1===r.nodeType&&"BODY"===r.nodeName||"HTML"===r.nodeName?n=r:n.appendChild(r);else{if(!ke&&!De&&!Oe&&-1===e.indexOf("<"))return ee&&Ie?ee.createHTML(e):e;if(n=Tt(e),!n)return ke?null:Ie?te:""}n&&ve&&ht(n.firstChild);const c=yt(ze?e:n);for(;i=c.nextNode();)Nt(i)||(i.content instanceof s&&wt(i.content),Rt(i));if(ze)return e;if(ke){if(Me)for(l=re.call(n.ownerDocument);n.firstChild;)l.appendChild(n.firstChild);else l=n;return(Ee.shadowroot||Ee.shadowrootmode)&&(l=ae.call(a,l,!0)),l}let m=Oe?n.outerHTML:n.innerHTML;return Oe&&Te["!doctype"]&&n.ownerDocument&&n.ownerDocument.doctype&&n.ownerDocument.doctype.name&&A(q,n.ownerDocument.doctype.name)&&(m="\n"+m),De&&u([ce,se,ue],(e=>{m=g(m,e," ")})),ee&&Ie?ee.createHTML(m):m},o.setConfig=function(){let e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{};ct(e),xe=!0},o.clearConfig=function(){it=null,xe=!1},o.isValidAttribute=function(e,t,n){it||ct({});const o=rt(e),r=rt(t);return bt(o,r,n)},o.addHook=function(e,t){"function"==typeof t&&(le[e]=le[e]||[],p(le[e],t))},o.removeHook=function(e){if(le[e])return m(le[e])},o.removeHooks=function(e){le[e]&&(le[e]=[])},o.removeAllHooks=function(){le={}},o}();return J})); +//# sourceMappingURL=purify.min.js.map diff --git a/ComfyUI-KJNodes/kjweb_async/svg-path-properties.min.js b/ComfyUI-KJNodes/kjweb_async/svg-path-properties.min.js new file mode 100644 index 0000000000000000000000000000000000000000..88d47e0de4c54f881083164c20045a7e8b621caf --- /dev/null +++ b/ComfyUI-KJNodes/kjweb_async/svg-path-properties.min.js @@ -0,0 +1,2 @@ +// http://geoexamples.com/path-properties/ v1.2.0 Copyright 2023 Roger Veciana i Rovira +!function(t,n){"object"==typeof exports&&"undefined"!=typeof module?n(exports):"function"==typeof define&&define.amd?define(["exports"],n):n((t="undefined"!=typeof globalThis?globalThis:t||self).svgPathProperties={})}(this,(function(t){"use strict";function n(t,n){for(var e=0;et.length)&&(n=t.length);for(var e=0,i=new Array(n);eu.length&&(t=u.length);var n=f({x:u.x0,y:u.y0},u.rx,u.ry,u.xAxisRotate,u.LargeArcFlag,u.SweepFlag,{x:u.x1,y:u.y1},t/u.length);return{x:n.x,y:n.y}})),i(this,"getTangentAtLength",(function(t){t<0?t=0:t>u.length&&(t=u.length);var n,e=.05,i=u.getPointAtLength(t);t<0?t=0:t>u.length&&(t=u.length);var r=(n=t1&&(n=Math.sqrt(c)*n,e=Math.sqrt(c)*e);var f=(Math.pow(n,2)*Math.pow(e,2)-Math.pow(n,2)*Math.pow(l.y,2)-Math.pow(e,2)*Math.pow(l.x,2))/(Math.pow(n,2)*Math.pow(l.y,2)+Math.pow(e,2)*Math.pow(l.x,2));f=f<0?0:f;var y=(r!==h?1:-1)*Math.sqrt(f),v=y*(n*l.y/e),M=y*(-e*l.x/n),L={x:Math.cos(o)*v-Math.sin(o)*M+(t.x+s.x)/2,y:Math.sin(o)*v+Math.cos(o)*M+(t.y+s.y)/2},d={x:(l.x-v)/n,y:(l.y-M)/e},A=w({x:1,y:0},d),b=w(d,{x:(-l.x-v)/n,y:(-l.y-M)/e});!h&&b>0?b-=2*Math.PI:h&&b<0&&(b+=2*Math.PI);var P=A+(b%=2*Math.PI)*a,m=n*Math.cos(P),T=e*Math.sin(P);return{x:Math.cos(o)*m-Math.sin(o)*T+L.x,y:Math.sin(o)*m+Math.cos(o)*T+L.y,ellipticalArcStartAngle:A,ellipticalArcEndAngle:A+b,ellipticalArcAngle:P,ellipticalArcCenter:L,resultantRx:n,resultantRy:e}},y=function(t,n){t=t||500;for(var e,i=0,r=[],h=[],s=n(0),a=0;a0?Math.sqrt(l*l+c):0,y=u*u+c>0?Math.sqrt(u*u+c):0,p=u+Math.sqrt(u*u+c)!==0&&(l+f)/(u+y)!=0?c*Math.log(Math.abs((l+f)/(u+y))):0;return Math.sqrt(a)/2*(l*f-u*y+p)},_=function(t,n,e){return{x:2*(1-e)*(t[1]-t[0])+2*e*(t[2]-t[1]),y:2*(1-e)*(n[1]-n[0])+2*e*(n[2]-n[1])}};function S(t,n,e){var i=N(1,e,t),r=N(1,e,n),h=i*i+r*r;return Math.sqrt(h)}var N=function t(n,e,i){var r,h,s=i.length-1;if(0===s)return 0;if(0===n){h=0;for(var a=0;a<=s;a++)h+=A[s][a]*Math.pow(1-e,s-a)*Math.pow(e,a)*i[a];return h}r=new Array(s);for(var o=0;o.001;){var a=e(r+h),o=Math.abs(t-a)/n;if(o500)break}return r},j=e((function(t,n,e,r,h,s,a,o){var g=this;i(this,"a",void 0),i(this,"b",void 0),i(this,"c",void 0),i(this,"d",void 0),i(this,"length",void 0),i(this,"getArcLength",void 0),i(this,"getPoint",void 0),i(this,"getDerivative",void 0),i(this,"getTotalLength",(function(){return g.length})),i(this,"getPointAtLength",(function(t){var n=[g.a.x,g.b.x,g.c.x,g.d.x],e=[g.a.y,g.b.y,g.c.y,g.d.y],i=C(t,g.length,(function(t){return g.getArcLength(n,e,t)}));return g.getPoint(n,e,i)})),i(this,"getTangentAtLength",(function(t){var n=[g.a.x,g.b.x,g.c.x,g.d.x],e=[g.a.y,g.b.y,g.c.y,g.d.y],i=C(t,g.length,(function(t){return g.getArcLength(n,e,t)})),r=g.getDerivative(n,e,i),h=Math.sqrt(r.x*r.x+r.y*r.y);return h>0?{x:r.x/h,y:r.y/h}:{x:0,y:0}})),i(this,"getPropertiesAtLength",(function(t){var n,e=[g.a.x,g.b.x,g.c.x,g.d.x],i=[g.a.y,g.b.y,g.c.y,g.d.y],r=C(t,g.length,(function(t){return g.getArcLength(e,i,t)})),h=g.getDerivative(e,i,r),s=Math.sqrt(h.x*h.x+h.y*h.y);n=s>0?{x:h.x/s,y:h.y/s}:{x:0,y:0};var a=g.getPoint(e,i,r);return{x:a.x,y:a.y,tangentX:n.x,tangentY:n.y}})),i(this,"getC",(function(){return g.c})),i(this,"getD",(function(){return g.d})),this.a={x:t,y:n},this.b={x:e,y:r},this.c={x:h,y:s},void 0!==a&&void 0!==o?(this.getArcLength=m,this.getPoint=b,this.getDerivative=P,this.d={x:a,y:o}):(this.getArcLength=q,this.getPoint=T,this.getDerivative=_,this.d={x:0,y:0}),this.length=this.getArcLength([this.a.x,this.b.x,this.c.x,this.d.x],[this.a.y,this.b.y,this.c.y,this.d.y],1)})),O=e((function(t){var n=this;i(this,"length",0),i(this,"partial_lengths",[]),i(this,"functions",[]),i(this,"initial_point",null),i(this,"getPartAtLength",(function(t){t<0?t=0:t>n.length&&(t=n.length);for(var e=n.partial_lengths.length-1;n.partial_lengths[e]>=t&&e>0;)e--;return e++,{fraction:t-n.partial_lengths[e-1],i:e}})),i(this,"getTotalLength",(function(){return n.length})),i(this,"getPointAtLength",(function(t){var e=n.getPartAtLength(t),i=n.functions[e.i];if(i)return i.getPointAtLength(e.fraction);if(n.initial_point)return n.initial_point;throw new Error("Wrong function at this part.")})),i(this,"getTangentAtLength",(function(t){var e=n.getPartAtLength(t),i=n.functions[e.i];if(i)return i.getTangentAtLength(e.fraction);if(n.initial_point)return{x:0,y:0};throw new Error("Wrong function at this part.")})),i(this,"getPropertiesAtLength",(function(t){var e=n.getPartAtLength(t),i=n.functions[e.i];if(i)return i.getPropertiesAtLength(e.fraction);if(n.initial_point)return{x:n.initial_point.x,y:n.initial_point.y,tangentX:0,tangentY:0};throw new Error("Wrong function at this part.")})),i(this,"getParts",(function(){for(var t=[],e=0;e0?t:"M0,0").match(o);if(!n)throw new Error("No path elements found in string ".concat(t));return n.reduce((function(t,n){var e=n.charAt(0),i=e.toLowerCase(),h=u(n.substring(1));if("m"===i&&h.length>2&&(t.push([e].concat(r(h.splice(0,2)))),i="l",e="m"===e?"l":"L"),"a"===i.toLowerCase()&&(5===h.length||6===h.length)){var s=n.substring(1).trim().split(" ");h=[Number(s[0]),Number(s[1]),Number(s[2]),Number(s[3].charAt(0)),Number(s[3].charAt(1)),Number(s[3].substring(2)),Number(s[4])]}for(;h.length>=0;){if(h.length===a[i]){t.push([e].concat(r(h.splice(0,a[i]))));break}if(h.length0?(this.length+=e.getTotalLength(),this.functions.push(e),s=[h[y][5]+s[0],h[y][6]+s[1]]):this.functions.push(new l(s[0],s[0],s[1],s[1]));else if("S"===h[y][0]){if(y>0&&["C","c","S","s"].indexOf(h[y-1][0])>-1){if(e){var p=e.getC();e=new j(s[0],s[1],2*s[0]-p.x,2*s[1]-p.y,h[y][1],h[y][2],h[y][3],h[y][4])}}else e=new j(s[0],s[1],s[0],s[1],h[y][1],h[y][2],h[y][3],h[y][4]);e&&(this.length+=e.getTotalLength(),s=[h[y][3],h[y][4]],this.functions.push(e))}else if("s"===h[y][0]){if(y>0&&["C","c","S","s"].indexOf(h[y-1][0])>-1){if(e){var x=e.getC(),v=e.getD();e=new j(s[0],s[1],s[0]+v.x-x.x,s[1]+v.y-x.y,s[0]+h[y][1],s[1]+h[y][2],s[0]+h[y][3],s[1]+h[y][4])}}else e=new j(s[0],s[1],s[0],s[1],s[0]+h[y][1],s[1]+h[y][2],s[0]+h[y][3],s[1]+h[y][4]);e&&(this.length+=e.getTotalLength(),s=[h[y][3]+s[0],h[y][4]+s[1]],this.functions.push(e))}else if("Q"===h[y][0]){if(s[0]==h[y][1]&&s[1]==h[y][2]){var M=new l(h[y][1],h[y][3],h[y][2],h[y][4]);this.length+=M.getTotalLength(),this.functions.push(M)}else e=new j(s[0],s[1],h[y][1],h[y][2],h[y][3],h[y][4],void 0,void 0),this.length+=e.getTotalLength(),this.functions.push(e);s=[h[y][3],h[y][4]],g=[h[y][1],h[y][2]]}else if("q"===h[y][0]){if(0!=h[y][1]||0!=h[y][2])e=new j(s[0],s[1],s[0]+h[y][1],s[1]+h[y][2],s[0]+h[y][3],s[1]+h[y][4],void 0,void 0),this.length+=e.getTotalLength(),this.functions.push(e);else{var w=new l(s[0]+h[y][1],s[0]+h[y][3],s[1]+h[y][2],s[1]+h[y][4]);this.length+=w.getTotalLength(),this.functions.push(w)}g=[s[0]+h[y][1],s[1]+h[y][2]],s=[h[y][3]+s[0],h[y][4]+s[1]]}else if("T"===h[y][0]){if(y>0&&["Q","q","T","t"].indexOf(h[y-1][0])>-1)e=new j(s[0],s[1],2*s[0]-g[0],2*s[1]-g[1],h[y][1],h[y][2],void 0,void 0),this.functions.push(e),this.length+=e.getTotalLength();else{var L=new l(s[0],h[y][1],s[1],h[y][2]);this.functions.push(L),this.length+=L.getTotalLength()}g=[2*s[0]-g[0],2*s[1]-g[1]],s=[h[y][1],h[y][2]]}else if("t"===h[y][0]){if(y>0&&["Q","q","T","t"].indexOf(h[y-1][0])>-1)e=new j(s[0],s[1],2*s[0]-g[0],2*s[1]-g[1],s[0]+h[y][1],s[1]+h[y][2],void 0,void 0),this.length+=e.getTotalLength(),this.functions.push(e);else{var d=new l(s[0],s[0]+h[y][1],s[1],s[1]+h[y][2]);this.length+=d.getTotalLength(),this.functions.push(d)}g=[2*s[0]-g[0],2*s[1]-g[1]],s=[h[y][1]+s[0],h[y][2]+s[1]]}else if("A"===h[y][0]){var A=new c(s[0],s[1],h[y][1],h[y][2],h[y][3],1===h[y][4],1===h[y][5],h[y][6],h[y][7]);this.length+=A.getTotalLength(),s=[h[y][6],h[y][7]],this.functions.push(A)}else if("a"===h[y][0]){var b=new c(s[0],s[1],h[y][1],h[y][2],h[y][3],1===h[y][4],1===h[y][5],s[0]+h[y][6],s[1]+h[y][7]);this.length+=b.getTotalLength(),s=[s[0]+h[y][6],s[1]+h[y][7]],this.functions.push(b)}this.partial_lengths.push(this.length)}})),E=e((function(t){var n=this;if(i(this,"inst",void 0),i(this,"getTotalLength",(function(){return n.inst.getTotalLength()})),i(this,"getPointAtLength",(function(t){return n.inst.getPointAtLength(t)})),i(this,"getTangentAtLength",(function(t){return n.inst.getTangentAtLength(t)})),i(this,"getPropertiesAtLength",(function(t){return n.inst.getPropertiesAtLength(t)})),i(this,"getParts",(function(){return n.inst.getParts()})),this.inst=new O(t),!(this instanceof E))return new E(t)}));t.svgPathProperties=E})); diff --git a/ComfyUI-KJNodes/nodes/audioscheduler_nodes.py b/ComfyUI-KJNodes/nodes/audioscheduler_nodes.py new file mode 100644 index 0000000000000000000000000000000000000000..69d0422e7da875298f87fe60a7f6d1494530dca2 --- /dev/null +++ b/ComfyUI-KJNodes/nodes/audioscheduler_nodes.py @@ -0,0 +1,251 @@ +# to be used with https://github.com/a1lazydog/ComfyUI-AudioScheduler +import torch +from torchvision.transforms import functional as TF +from PIL import Image, ImageDraw +import numpy as np +from ..utility.utility import pil2tensor +from nodes import MAX_RESOLUTION + +class NormalizedAmplitudeToMask: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "normalized_amp": ("NORMALIZED_AMPLITUDE",), + "width": ("INT", {"default": 512,"min": 16, "max": 4096, "step": 1}), + "height": ("INT", {"default": 512,"min": 16, "max": 4096, "step": 1}), + "frame_offset": ("INT", {"default": 0,"min": -255, "max": 255, "step": 1}), + "location_x": ("INT", {"default": 256,"min": 0, "max": 4096, "step": 1}), + "location_y": ("INT", {"default": 256,"min": 0, "max": 4096, "step": 1}), + "size": ("INT", {"default": 128,"min": 8, "max": 4096, "step": 1}), + "shape": ( + [ + 'none', + 'circle', + 'square', + 'triangle', + ], + { + "default": 'none' + }), + "color": ( + [ + 'white', + 'amplitude', + ], + { + "default": 'amplitude' + }), + },} + + CATEGORY = "KJNodes/audio" + RETURN_TYPES = ("MASK",) + FUNCTION = "convert" + DESCRIPTION = """ +Works as a bridge to the AudioScheduler -nodes: +https://github.com/a1lazydog/ComfyUI-AudioScheduler +Creates masks based on the normalized amplitude. +""" + + def convert(self, normalized_amp, width, height, frame_offset, shape, location_x, location_y, size, color): + # Ensure normalized_amp is an array and within the range [0, 1] + normalized_amp = np.clip(normalized_amp, 0.0, 1.0) + + # Offset the amplitude values by rolling the array + normalized_amp = np.roll(normalized_amp, frame_offset) + + # Initialize an empty list to hold the image tensors + out = [] + # Iterate over each amplitude value to create an image + for amp in normalized_amp: + # Scale the amplitude value to cover the full range of grayscale values + if color == 'amplitude': + grayscale_value = int(amp * 255) + elif color == 'white': + grayscale_value = 255 + # Convert the grayscale value to an RGB format + gray_color = (grayscale_value, grayscale_value, grayscale_value) + finalsize = size * amp + + if shape == 'none': + shapeimage = Image.new("RGB", (width, height), gray_color) + else: + shapeimage = Image.new("RGB", (width, height), "black") + + draw = ImageDraw.Draw(shapeimage) + if shape == 'circle' or shape == 'square': + # Define the bounding box for the shape + left_up_point = (location_x - finalsize, location_y - finalsize) + right_down_point = (location_x + finalsize,location_y + finalsize) + two_points = [left_up_point, right_down_point] + + if shape == 'circle': + draw.ellipse(two_points, fill=gray_color) + elif shape == 'square': + draw.rectangle(two_points, fill=gray_color) + + elif shape == 'triangle': + # Define the points for the triangle + left_up_point = (location_x - finalsize, location_y + finalsize) # bottom left + right_down_point = (location_x + finalsize, location_y + finalsize) # bottom right + top_point = (location_x, location_y) # top point + draw.polygon([top_point, left_up_point, right_down_point], fill=gray_color) + + shapeimage = pil2tensor(shapeimage) + mask = shapeimage[:, :, :, 0] + out.append(mask) + + return (torch.cat(out, dim=0),) + +class NormalizedAmplitudeToFloatList: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "normalized_amp": ("NORMALIZED_AMPLITUDE",), + },} + + CATEGORY = "KJNodes/audio" + RETURN_TYPES = ("FLOAT",) + FUNCTION = "convert" + DESCRIPTION = """ +Works as a bridge to the AudioScheduler -nodes: +https://github.com/a1lazydog/ComfyUI-AudioScheduler +Creates a list of floats from the normalized amplitude. +""" + + def convert(self, normalized_amp): + # Ensure normalized_amp is an array and within the range [0, 1] + normalized_amp = np.clip(normalized_amp, 0.0, 1.0) + return (normalized_amp.tolist(),) + +class OffsetMaskByNormalizedAmplitude: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "normalized_amp": ("NORMALIZED_AMPLITUDE",), + "mask": ("MASK",), + "x": ("INT", { "default": 0, "min": -4096, "max": MAX_RESOLUTION, "step": 1, "display": "number" }), + "y": ("INT", { "default": 0, "min": -4096, "max": MAX_RESOLUTION, "step": 1, "display": "number" }), + "rotate": ("BOOLEAN", { "default": False }), + "angle_multiplier": ("FLOAT", { "default": 0.0, "min": -1.0, "max": 1.0, "step": 0.001, "display": "number" }), + } + } + + RETURN_TYPES = ("MASK",) + RETURN_NAMES = ("mask",) + FUNCTION = "offset" + CATEGORY = "KJNodes/audio" + DESCRIPTION = """ +Works as a bridge to the AudioScheduler -nodes: +https://github.com/a1lazydog/ComfyUI-AudioScheduler +Offsets masks based on the normalized amplitude. +""" + + def offset(self, mask, x, y, angle_multiplier, rotate, normalized_amp): + + # Ensure normalized_amp is an array and within the range [0, 1] + offsetmask = mask.clone() + normalized_amp = np.clip(normalized_amp, 0.0, 1.0) + + batch_size, height, width = mask.shape + + if rotate: + for i in range(batch_size): + rotation_amp = int(normalized_amp[i] * (360 * angle_multiplier)) + rotation_angle = rotation_amp + offsetmask[i] = TF.rotate(offsetmask[i].unsqueeze(0), rotation_angle).squeeze(0) + if x != 0 or y != 0: + for i in range(batch_size): + offset_amp = normalized_amp[i] * 10 + shift_x = min(x*offset_amp, width-1) + shift_y = min(y*offset_amp, height-1) + if shift_x != 0: + offsetmask[i] = torch.roll(offsetmask[i], shifts=int(shift_x), dims=1) + if shift_y != 0: + offsetmask[i] = torch.roll(offsetmask[i], shifts=int(shift_y), dims=0) + + return offsetmask, + +class ImageTransformByNormalizedAmplitude: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "normalized_amp": ("NORMALIZED_AMPLITUDE",), + "zoom_scale": ("FLOAT", { "default": 0.0, "min": -1.0, "max": 1.0, "step": 0.001, "display": "number" }), + "x_offset": ("INT", { "default": 0, "min": (1 -MAX_RESOLUTION), "max": MAX_RESOLUTION, "step": 1, "display": "number" }), + "y_offset": ("INT", { "default": 0, "min": (1 -MAX_RESOLUTION), "max": MAX_RESOLUTION, "step": 1, "display": "number" }), + "cumulative": ("BOOLEAN", { "default": False }), + "image": ("IMAGE",), + }} + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "amptransform" + CATEGORY = "KJNodes/audio" + DESCRIPTION = """ +Works as a bridge to the AudioScheduler -nodes: +https://github.com/a1lazydog/ComfyUI-AudioScheduler +Transforms image based on the normalized amplitude. +""" + + def amptransform(self, image, normalized_amp, zoom_scale, cumulative, x_offset, y_offset): + # Ensure normalized_amp is an array and within the range [0, 1] + normalized_amp = np.clip(normalized_amp, 0.0, 1.0) + transformed_images = [] + + # Initialize the cumulative zoom factor + prev_amp = 0.0 + + for i in range(image.shape[0]): + img = image[i] # Get the i-th image in the batch + amp = normalized_amp[i] # Get the corresponding amplitude value + + # Incrementally increase the cumulative zoom factor + if cumulative: + prev_amp += amp + amp += prev_amp + + # Convert the image tensor from BxHxWxC to CxHxW format expected by torchvision + img = img.permute(2, 0, 1) + + # Convert PyTorch tensor to PIL Image for processing + pil_img = TF.to_pil_image(img) + + # Calculate the crop size based on the amplitude + width, height = pil_img.size + crop_size = int(min(width, height) * (1 - amp * zoom_scale)) + crop_size = max(crop_size, 1) + + # Calculate the crop box coordinates (centered crop) + left = (width - crop_size) // 2 + top = (height - crop_size) // 2 + right = (width + crop_size) // 2 + bottom = (height + crop_size) // 2 + + # Crop and resize back to original size + cropped_img = TF.crop(pil_img, top, left, crop_size, crop_size) + resized_img = TF.resize(cropped_img, (height, width)) + + # Convert back to tensor in CxHxW format + tensor_img = TF.to_tensor(resized_img) + + # Convert the tensor back to BxHxWxC format + tensor_img = tensor_img.permute(1, 2, 0) + + # Offset the image based on the amplitude + offset_amp = amp * 10 # Calculate the offset magnitude based on the amplitude + shift_x = min(x_offset * offset_amp, img.shape[1] - 1) # Calculate the shift in x direction + shift_y = min(y_offset * offset_amp, img.shape[0] - 1) # Calculate the shift in y direction + + # Apply the offset to the image tensor + if shift_x != 0: + tensor_img = torch.roll(tensor_img, shifts=int(shift_x), dims=1) + if shift_y != 0: + tensor_img = torch.roll(tensor_img, shifts=int(shift_y), dims=0) + + # Add to the list + transformed_images.append(tensor_img) + + # Stack all transformed images into a batch + transformed_batch = torch.stack(transformed_images) + + return (transformed_batch,) \ No newline at end of file diff --git a/ComfyUI-KJNodes/nodes/batchcrop_nodes.py b/ComfyUI-KJNodes/nodes/batchcrop_nodes.py new file mode 100644 index 0000000000000000000000000000000000000000..61e7446f7567f74421d4b05742cc3a340fec73c9 --- /dev/null +++ b/ComfyUI-KJNodes/nodes/batchcrop_nodes.py @@ -0,0 +1,757 @@ +from ..utility.utility import tensor2pil, pil2tensor +from PIL import Image, ImageDraw, ImageFilter +import numpy as np +import torch +from torchvision.transforms import Resize, CenterCrop, InterpolationMode +import math + +#based on nodes from mtb https://github.com/melMass/comfy_mtb + +def bbox_to_region(bbox, target_size=None): + bbox = bbox_check(bbox, target_size) + return (bbox[0], bbox[1], bbox[0] + bbox[2], bbox[1] + bbox[3]) + +def bbox_check(bbox, target_size=None): + if not target_size: + return bbox + + new_bbox = ( + bbox[0], + bbox[1], + min(target_size[0] - bbox[0], bbox[2]), + min(target_size[1] - bbox[1], bbox[3]), + ) + return new_bbox + +class BatchCropFromMask: + + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "original_images": ("IMAGE",), + "masks": ("MASK",), + "crop_size_mult": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.001}), + "bbox_smooth_alpha": ("FLOAT", {"default": 0.5, "min": 0.0, "max": 1.0, "step": 0.01}), + }, + } + + RETURN_TYPES = ( + "IMAGE", + "IMAGE", + "BBOX", + "INT", + "INT", + ) + RETURN_NAMES = ( + "original_images", + "cropped_images", + "bboxes", + "width", + "height", + ) + FUNCTION = "crop" + CATEGORY = "KJNodes/masking" + + def smooth_bbox_size(self, prev_bbox_size, curr_bbox_size, alpha): + if alpha == 0: + return prev_bbox_size + return round(alpha * curr_bbox_size + (1 - alpha) * prev_bbox_size) + + def smooth_center(self, prev_center, curr_center, alpha=0.5): + if alpha == 0: + return prev_center + return ( + round(alpha * curr_center[0] + (1 - alpha) * prev_center[0]), + round(alpha * curr_center[1] + (1 - alpha) * prev_center[1]) + ) + + def crop(self, masks, original_images, crop_size_mult, bbox_smooth_alpha): + + bounding_boxes = [] + cropped_images = [] + + self.max_bbox_width = 0 + self.max_bbox_height = 0 + + # First, calculate the maximum bounding box size across all masks + curr_max_bbox_width = 0 + curr_max_bbox_height = 0 + for mask in masks: + _mask = tensor2pil(mask)[0] + non_zero_indices = np.nonzero(np.array(_mask)) + min_x, max_x = np.min(non_zero_indices[1]), np.max(non_zero_indices[1]) + min_y, max_y = np.min(non_zero_indices[0]), np.max(non_zero_indices[0]) + width = max_x - min_x + height = max_y - min_y + curr_max_bbox_width = max(curr_max_bbox_width, width) + curr_max_bbox_height = max(curr_max_bbox_height, height) + + # Smooth the changes in the bounding box size + self.max_bbox_width = self.smooth_bbox_size(self.max_bbox_width, curr_max_bbox_width, bbox_smooth_alpha) + self.max_bbox_height = self.smooth_bbox_size(self.max_bbox_height, curr_max_bbox_height, bbox_smooth_alpha) + + # Apply the crop size multiplier + self.max_bbox_width = round(self.max_bbox_width * crop_size_mult) + self.max_bbox_height = round(self.max_bbox_height * crop_size_mult) + bbox_aspect_ratio = self.max_bbox_width / self.max_bbox_height + + # Then, for each mask and corresponding image... + for i, (mask, img) in enumerate(zip(masks, original_images)): + _mask = tensor2pil(mask)[0] + non_zero_indices = np.nonzero(np.array(_mask)) + min_x, max_x = np.min(non_zero_indices[1]), np.max(non_zero_indices[1]) + min_y, max_y = np.min(non_zero_indices[0]), np.max(non_zero_indices[0]) + + # Calculate center of bounding box + center_x = np.mean(non_zero_indices[1]) + center_y = np.mean(non_zero_indices[0]) + curr_center = (round(center_x), round(center_y)) + + # If this is the first frame, initialize prev_center with curr_center + if not hasattr(self, 'prev_center'): + self.prev_center = curr_center + + # Smooth the changes in the center coordinates from the second frame onwards + if i > 0: + center = self.smooth_center(self.prev_center, curr_center, bbox_smooth_alpha) + else: + center = curr_center + + # Update prev_center for the next frame + self.prev_center = center + + # Create bounding box using max_bbox_width and max_bbox_height + half_box_width = round(self.max_bbox_width / 2) + half_box_height = round(self.max_bbox_height / 2) + min_x = max(0, center[0] - half_box_width) + max_x = min(img.shape[1], center[0] + half_box_width) + min_y = max(0, center[1] - half_box_height) + max_y = min(img.shape[0], center[1] + half_box_height) + + # Append bounding box coordinates + bounding_boxes.append((min_x, min_y, max_x - min_x, max_y - min_y)) + + # Crop the image from the bounding box + cropped_img = img[min_y:max_y, min_x:max_x, :] + + # Calculate the new dimensions while maintaining the aspect ratio + new_height = min(cropped_img.shape[0], self.max_bbox_height) + new_width = round(new_height * bbox_aspect_ratio) + + # Resize the image + resize_transform = Resize((new_height, new_width)) + resized_img = resize_transform(cropped_img.permute(2, 0, 1)) + + # Perform the center crop to the desired size + crop_transform = CenterCrop((self.max_bbox_height, self.max_bbox_width)) # swap the order here if necessary + cropped_resized_img = crop_transform(resized_img) + + cropped_images.append(cropped_resized_img.permute(1, 2, 0)) + + cropped_out = torch.stack(cropped_images, dim=0) + + return (original_images, cropped_out, bounding_boxes, self.max_bbox_width, self.max_bbox_height, ) + +class BatchUncrop: + + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "original_images": ("IMAGE",), + "cropped_images": ("IMAGE",), + "bboxes": ("BBOX",), + "border_blending": ("FLOAT", {"default": 0.25, "min": 0.0, "max": 1.0, "step": 0.01}, ), + "crop_rescale": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}), + "border_top": ("BOOLEAN", {"default": True}), + "border_bottom": ("BOOLEAN", {"default": True}), + "border_left": ("BOOLEAN", {"default": True}), + "border_right": ("BOOLEAN", {"default": True}), + } + } + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "uncrop" + + CATEGORY = "KJNodes/masking" + + def uncrop(self, original_images, cropped_images, bboxes, border_blending, crop_rescale, border_top, border_bottom, border_left, border_right): + def inset_border(image, border_width, border_color, border_top, border_bottom, border_left, border_right): + draw = ImageDraw.Draw(image) + width, height = image.size + if border_top: + draw.rectangle((0, 0, width, border_width), fill=border_color) + if border_bottom: + draw.rectangle((0, height - border_width, width, height), fill=border_color) + if border_left: + draw.rectangle((0, 0, border_width, height), fill=border_color) + if border_right: + draw.rectangle((width - border_width, 0, width, height), fill=border_color) + return image + + if len(original_images) != len(cropped_images): + raise ValueError(f"The number of original_images ({len(original_images)}) and cropped_images ({len(cropped_images)}) should be the same") + + # Ensure there are enough bboxes, but drop the excess if there are more bboxes than images + if len(bboxes) > len(original_images): + print(f"Warning: Dropping excess bounding boxes. Expected {len(original_images)}, but got {len(bboxes)}") + bboxes = bboxes[:len(original_images)] + elif len(bboxes) < len(original_images): + raise ValueError("There should be at least as many bboxes as there are original and cropped images") + + input_images = tensor2pil(original_images) + crop_imgs = tensor2pil(cropped_images) + + out_images = [] + for i in range(len(input_images)): + img = input_images[i] + crop = crop_imgs[i] + bbox = bboxes[i] + + # uncrop the image based on the bounding box + bb_x, bb_y, bb_width, bb_height = bbox + + paste_region = bbox_to_region((bb_x, bb_y, bb_width, bb_height), img.size) + + # scale factors + scale_x = crop_rescale + scale_y = crop_rescale + + # scaled paste_region + paste_region = (round(paste_region[0]*scale_x), round(paste_region[1]*scale_y), round(paste_region[2]*scale_x), round(paste_region[3]*scale_y)) + + # rescale the crop image to fit the paste_region + crop = crop.resize((round(paste_region[2]-paste_region[0]), round(paste_region[3]-paste_region[1]))) + crop_img = crop.convert("RGB") + + if border_blending > 1.0: + border_blending = 1.0 + elif border_blending < 0.0: + border_blending = 0.0 + + blend_ratio = (max(crop_img.size) / 2) * float(border_blending) + + blend = img.convert("RGBA") + mask = Image.new("L", img.size, 0) + + mask_block = Image.new("L", (paste_region[2]-paste_region[0], paste_region[3]-paste_region[1]), 255) + mask_block = inset_border(mask_block, round(blend_ratio / 2), (0), border_top, border_bottom, border_left, border_right) + + mask.paste(mask_block, paste_region) + blend.paste(crop_img, paste_region) + + mask = mask.filter(ImageFilter.BoxBlur(radius=blend_ratio / 4)) + mask = mask.filter(ImageFilter.GaussianBlur(radius=blend_ratio / 4)) + + blend.putalpha(mask) + img = Image.alpha_composite(img.convert("RGBA"), blend) + out_images.append(img.convert("RGB")) + + return (pil2tensor(out_images),) + +class BatchCropFromMaskAdvanced: + + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "original_images": ("IMAGE",), + "masks": ("MASK",), + "crop_size_mult": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}), + "bbox_smooth_alpha": ("FLOAT", {"default": 0.5, "min": 0.0, "max": 1.0, "step": 0.01}), + }, + } + + RETURN_TYPES = ( + "IMAGE", + "IMAGE", + "MASK", + "IMAGE", + "MASK", + "BBOX", + "BBOX", + "INT", + "INT", + ) + RETURN_NAMES = ( + "original_images", + "cropped_images", + "cropped_masks", + "combined_crop_image", + "combined_crop_masks", + "bboxes", + "combined_bounding_box", + "bbox_width", + "bbox_height", + ) + FUNCTION = "crop" + CATEGORY = "KJNodes/masking" + + def smooth_bbox_size(self, prev_bbox_size, curr_bbox_size, alpha): + return round(alpha * curr_bbox_size + (1 - alpha) * prev_bbox_size) + + def smooth_center(self, prev_center, curr_center, alpha=0.5): + return (round(alpha * curr_center[0] + (1 - alpha) * prev_center[0]), + round(alpha * curr_center[1] + (1 - alpha) * prev_center[1])) + + def crop(self, masks, original_images, crop_size_mult, bbox_smooth_alpha): + bounding_boxes = [] + combined_bounding_box = [] + cropped_images = [] + cropped_masks = [] + cropped_masks_out = [] + combined_crop_out = [] + combined_cropped_images = [] + combined_cropped_masks = [] + + def calculate_bbox(mask): + non_zero_indices = np.nonzero(np.array(mask)) + + # handle empty masks + min_x, max_x, min_y, max_y = 0, 0, 0, 0 + if len(non_zero_indices[1]) > 0 and len(non_zero_indices[0]) > 0: + min_x, max_x = np.min(non_zero_indices[1]), np.max(non_zero_indices[1]) + min_y, max_y = np.min(non_zero_indices[0]), np.max(non_zero_indices[0]) + + width = max_x - min_x + height = max_y - min_y + bbox_size = max(width, height) + return min_x, max_x, min_y, max_y, bbox_size + + combined_mask = torch.max(masks, dim=0)[0] + _mask = tensor2pil(combined_mask)[0] + new_min_x, new_max_x, new_min_y, new_max_y, combined_bbox_size = calculate_bbox(_mask) + center_x = (new_min_x + new_max_x) / 2 + center_y = (new_min_y + new_max_y) / 2 + half_box_size = round(combined_bbox_size // 2) + new_min_x = max(0, round(center_x - half_box_size)) + new_max_x = min(original_images[0].shape[1], round(center_x + half_box_size)) + new_min_y = max(0, round(center_y - half_box_size)) + new_max_y = min(original_images[0].shape[0], round(center_y + half_box_size)) + + combined_bounding_box.append((new_min_x, new_min_y, new_max_x - new_min_x, new_max_y - new_min_y)) + + self.max_bbox_size = 0 + + # First, calculate the maximum bounding box size across all masks + curr_max_bbox_size = max(calculate_bbox(tensor2pil(mask)[0])[-1] for mask in masks) + # Smooth the changes in the bounding box size + self.max_bbox_size = self.smooth_bbox_size(self.max_bbox_size, curr_max_bbox_size, bbox_smooth_alpha) + # Apply the crop size multiplier + self.max_bbox_size = round(self.max_bbox_size * crop_size_mult) + # Make sure max_bbox_size is divisible by 16, if not, round it upwards so it is + self.max_bbox_size = math.ceil(self.max_bbox_size / 16) * 16 + + if self.max_bbox_size > original_images[0].shape[0] or self.max_bbox_size > original_images[0].shape[1]: + # max_bbox_size can only be as big as our input's width or height, and it has to be even + self.max_bbox_size = math.floor(min(original_images[0].shape[0], original_images[0].shape[1]) / 2) * 2 + + # Then, for each mask and corresponding image... + for i, (mask, img) in enumerate(zip(masks, original_images)): + _mask = tensor2pil(mask)[0] + non_zero_indices = np.nonzero(np.array(_mask)) + + # check for empty masks + if len(non_zero_indices[0]) > 0 and len(non_zero_indices[1]) > 0: + min_x, max_x = np.min(non_zero_indices[1]), np.max(non_zero_indices[1]) + min_y, max_y = np.min(non_zero_indices[0]), np.max(non_zero_indices[0]) + + # Calculate center of bounding box + center_x = np.mean(non_zero_indices[1]) + center_y = np.mean(non_zero_indices[0]) + curr_center = (round(center_x), round(center_y)) + + # If this is the first frame, initialize prev_center with curr_center + if not hasattr(self, 'prev_center'): + self.prev_center = curr_center + + # Smooth the changes in the center coordinates from the second frame onwards + if i > 0: + center = self.smooth_center(self.prev_center, curr_center, bbox_smooth_alpha) + else: + center = curr_center + + # Update prev_center for the next frame + self.prev_center = center + + # Create bounding box using max_bbox_size + half_box_size = self.max_bbox_size // 2 + min_x = max(0, center[0] - half_box_size) + max_x = min(img.shape[1], center[0] + half_box_size) + min_y = max(0, center[1] - half_box_size) + max_y = min(img.shape[0], center[1] + half_box_size) + + # Append bounding box coordinates + bounding_boxes.append((min_x, min_y, max_x - min_x, max_y - min_y)) + + # Crop the image from the bounding box + cropped_img = img[min_y:max_y, min_x:max_x, :] + cropped_mask = mask[min_y:max_y, min_x:max_x] + + # Resize the cropped image to a fixed size + new_size = max(cropped_img.shape[0], cropped_img.shape[1]) + resize_transform = Resize(new_size, interpolation=InterpolationMode.NEAREST, max_size=max(img.shape[0], img.shape[1])) + resized_mask = resize_transform(cropped_mask.unsqueeze(0).unsqueeze(0)).squeeze(0).squeeze(0) + resized_img = resize_transform(cropped_img.permute(2, 0, 1)) + # Perform the center crop to the desired size + # Constrain the crop to the smaller of our bbox or our image so we don't expand past the image dimensions. + crop_transform = CenterCrop((min(self.max_bbox_size, resized_img.shape[1]), min(self.max_bbox_size, resized_img.shape[2]))) + + cropped_resized_img = crop_transform(resized_img) + cropped_images.append(cropped_resized_img.permute(1, 2, 0)) + + cropped_resized_mask = crop_transform(resized_mask) + cropped_masks.append(cropped_resized_mask) + + combined_cropped_img = original_images[i][new_min_y:new_max_y, new_min_x:new_max_x, :] + combined_cropped_images.append(combined_cropped_img) + + combined_cropped_mask = masks[i][new_min_y:new_max_y, new_min_x:new_max_x] + combined_cropped_masks.append(combined_cropped_mask) + else: + bounding_boxes.append((0, 0, img.shape[1], img.shape[0])) + cropped_images.append(img) + cropped_masks.append(mask) + combined_cropped_images.append(img) + combined_cropped_masks.append(mask) + + cropped_out = torch.stack(cropped_images, dim=0) + combined_crop_out = torch.stack(combined_cropped_images, dim=0) + cropped_masks_out = torch.stack(cropped_masks, dim=0) + combined_crop_mask_out = torch.stack(combined_cropped_masks, dim=0) + + return (original_images, cropped_out, cropped_masks_out, combined_crop_out, combined_crop_mask_out, bounding_boxes, combined_bounding_box, self.max_bbox_size, self.max_bbox_size) + +class FilterZeroMasksAndCorrespondingImages: + + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "masks": ("MASK",), + }, + "optional": { + "original_images": ("IMAGE",), + }, + } + + RETURN_TYPES = ("MASK", "IMAGE", "IMAGE", "INDEXES",) + RETURN_NAMES = ("non_zero_masks_out", "non_zero_mask_images_out", "zero_mask_images_out", "zero_mask_images_out_indexes",) + FUNCTION = "filter" + CATEGORY = "KJNodes/masking" + DESCRIPTION = """ +Filter out all the empty (i.e. all zero) mask in masks +Also filter out all the corresponding images in original_images by indexes if provide + +original_images (optional): If provided, need have same length as masks. +""" + + def filter(self, masks, original_images=None): + non_zero_masks = [] + non_zero_mask_images = [] + zero_mask_images = [] + zero_mask_images_indexes = [] + + masks_num = len(masks) + also_process_images = False + if original_images is not None: + imgs_num = len(original_images) + if len(original_images) == masks_num: + also_process_images = True + else: + print(f"[WARNING] ignore input: original_images, due to number of original_images ({imgs_num}) is not equal to number of masks ({masks_num})") + + for i in range(masks_num): + non_zero_num = np.count_nonzero(np.array(masks[i])) + if non_zero_num > 0: + non_zero_masks.append(masks[i]) + if also_process_images: + non_zero_mask_images.append(original_images[i]) + else: + zero_mask_images.append(original_images[i]) + zero_mask_images_indexes.append(i) + + non_zero_masks_out = torch.stack(non_zero_masks, dim=0) + non_zero_mask_images_out = zero_mask_images_out = zero_mask_images_out_indexes = None + + if also_process_images: + non_zero_mask_images_out = torch.stack(non_zero_mask_images, dim=0) + if len(zero_mask_images) > 0: + zero_mask_images_out = torch.stack(zero_mask_images, dim=0) + zero_mask_images_out_indexes = zero_mask_images_indexes + + return (non_zero_masks_out, non_zero_mask_images_out, zero_mask_images_out, zero_mask_images_out_indexes) + +class InsertImageBatchByIndexes: + + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "images": ("IMAGE",), + "images_to_insert": ("IMAGE",), + "insert_indexes": ("INDEXES",), + }, + } + + RETURN_TYPES = ("IMAGE", ) + RETURN_NAMES = ("images_after_insert", ) + FUNCTION = "insert" + CATEGORY = "KJNodes/image" + DESCRIPTION = """ +This node is designed to be use with node FilterZeroMasksAndCorrespondingImages +It inserts the images_to_insert into images according to insert_indexes + +Returns: + images_after_insert: updated original images with origonal sequence order +""" + + def insert(self, images, images_to_insert, insert_indexes): + images_after_insert = images + + if images_to_insert is not None and insert_indexes is not None: + images_to_insert_num = len(images_to_insert) + insert_indexes_num = len(insert_indexes) + if images_to_insert_num == insert_indexes_num: + images_after_insert = [] + + i_images = 0 + for i in range(len(images) + images_to_insert_num): + if i in insert_indexes: + images_after_insert.append(images_to_insert[insert_indexes.index(i)]) + else: + images_after_insert.append(images[i_images]) + i_images += 1 + + images_after_insert = torch.stack(images_after_insert, dim=0) + + else: + print(f"[WARNING] skip this node, due to number of images_to_insert ({images_to_insert_num}) is not equal to number of insert_indexes ({insert_indexes_num})") + + + return (images_after_insert, ) + +class BatchUncropAdvanced: + + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "original_images": ("IMAGE",), + "cropped_images": ("IMAGE",), + "cropped_masks": ("MASK",), + "combined_crop_mask": ("MASK",), + "bboxes": ("BBOX",), + "border_blending": ("FLOAT", {"default": 0.25, "min": 0.0, "max": 1.0, "step": 0.01}, ), + "crop_rescale": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}), + "use_combined_mask": ("BOOLEAN", {"default": False}), + "use_square_mask": ("BOOLEAN", {"default": True}), + }, + "optional": { + "combined_bounding_box": ("BBOX", {"default": None}), + }, + } + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "uncrop" + CATEGORY = "KJNodes/masking" + + + def uncrop(self, original_images, cropped_images, cropped_masks, combined_crop_mask, bboxes, border_blending, crop_rescale, use_combined_mask, use_square_mask, combined_bounding_box = None): + + def inset_border(image, border_width=20, border_color=(0)): + width, height = image.size + bordered_image = Image.new(image.mode, (width, height), border_color) + bordered_image.paste(image, (0, 0)) + draw = ImageDraw.Draw(bordered_image) + draw.rectangle((0, 0, width - 1, height - 1), outline=border_color, width=border_width) + return bordered_image + + if len(original_images) != len(cropped_images): + raise ValueError(f"The number of original_images ({len(original_images)}) and cropped_images ({len(cropped_images)}) should be the same") + + # Ensure there are enough bboxes, but drop the excess if there are more bboxes than images + if len(bboxes) > len(original_images): + print(f"Warning: Dropping excess bounding boxes. Expected {len(original_images)}, but got {len(bboxes)}") + bboxes = bboxes[:len(original_images)] + elif len(bboxes) < len(original_images): + raise ValueError("There should be at least as many bboxes as there are original and cropped images") + + crop_imgs = tensor2pil(cropped_images) + input_images = tensor2pil(original_images) + out_images = [] + + for i in range(len(input_images)): + img = input_images[i] + crop = crop_imgs[i] + bbox = bboxes[i] + + if use_combined_mask: + bb_x, bb_y, bb_width, bb_height = combined_bounding_box[0] + paste_region = bbox_to_region((bb_x, bb_y, bb_width, bb_height), img.size) + mask = combined_crop_mask[i] + else: + bb_x, bb_y, bb_width, bb_height = bbox + paste_region = bbox_to_region((bb_x, bb_y, bb_width, bb_height), img.size) + mask = cropped_masks[i] + + # scale paste_region + scale_x = scale_y = crop_rescale + paste_region = (round(paste_region[0]*scale_x), round(paste_region[1]*scale_y), round(paste_region[2]*scale_x), round(paste_region[3]*scale_y)) + + # rescale the crop image to fit the paste_region + crop = crop.resize((round(paste_region[2]-paste_region[0]), round(paste_region[3]-paste_region[1]))) + crop_img = crop.convert("RGB") + + #border blending + if border_blending > 1.0: + border_blending = 1.0 + elif border_blending < 0.0: + border_blending = 0.0 + + blend_ratio = (max(crop_img.size) / 2) * float(border_blending) + blend = img.convert("RGBA") + + if use_square_mask: + mask = Image.new("L", img.size, 0) + mask_block = Image.new("L", (paste_region[2]-paste_region[0], paste_region[3]-paste_region[1]), 255) + mask_block = inset_border(mask_block, round(blend_ratio / 2), (0)) + mask.paste(mask_block, paste_region) + else: + original_mask = tensor2pil(mask)[0] + original_mask = original_mask.resize((paste_region[2]-paste_region[0], paste_region[3]-paste_region[1])) + mask = Image.new("L", img.size, 0) + mask.paste(original_mask, paste_region) + + mask = mask.filter(ImageFilter.BoxBlur(radius=blend_ratio / 4)) + mask = mask.filter(ImageFilter.GaussianBlur(radius=blend_ratio / 4)) + + blend.paste(crop_img, paste_region) + blend.putalpha(mask) + + img = Image.alpha_composite(img.convert("RGBA"), blend) + out_images.append(img.convert("RGB")) + + return (pil2tensor(out_images),) + +class SplitBboxes: + + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "bboxes": ("BBOX",), + "index": ("INT", {"default": 0,"min": 0, "max": 99999999, "step": 1}), + }, + } + + RETURN_TYPES = ("BBOX","BBOX",) + RETURN_NAMES = ("bboxes_a","bboxes_b",) + FUNCTION = "splitbbox" + CATEGORY = "KJNodes/masking" + DESCRIPTION = """ +Splits the specified bbox list at the given index into two lists. +""" + + def splitbbox(self, bboxes, index): + bboxes_a = bboxes[:index] # Sub-list from the start of bboxes up to (but not including) the index + bboxes_b = bboxes[index:] # Sub-list from the index to the end of bboxes + + return (bboxes_a, bboxes_b,) + +class BboxToInt: + + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "bboxes": ("BBOX",), + "index": ("INT", {"default": 0,"min": 0, "max": 99999999, "step": 1}), + }, + } + + RETURN_TYPES = ("INT","INT","INT","INT","INT","INT",) + RETURN_NAMES = ("x_min","y_min","width","height", "center_x","center_y",) + FUNCTION = "bboxtoint" + CATEGORY = "KJNodes/masking" + DESCRIPTION = """ +Returns selected index from bounding box list as integers. +""" + def bboxtoint(self, bboxes, index): + x_min, y_min, width, height = bboxes[index] + center_x = int(x_min + width / 2) + center_y = int(y_min + height / 2) + + return (x_min, y_min, width, height, center_x, center_y,) + +class BboxVisualize: + + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "images": ("IMAGE",), + "bboxes": ("BBOX",), + "line_width": ("INT", {"default": 1,"min": 1, "max": 10, "step": 1}), + }, + } + + RETURN_TYPES = ("IMAGE",) + RETURN_NAMES = ("images",) + FUNCTION = "visualizebbox" + DESCRIPTION = """ +Visualizes the specified bbox on the image. +""" + + CATEGORY = "KJNodes/masking" + + def visualizebbox(self, bboxes, images, line_width): + image_list = [] + for image, bbox in zip(images, bboxes): + x_min, y_min, width, height = bbox + + # Ensure bbox coordinates are integers + x_min = int(x_min) + y_min = int(y_min) + width = int(width) + height = int(height) + + # Permute the image dimensions + image = image.permute(2, 0, 1) + + # Clone the image to draw bounding boxes + img_with_bbox = image.clone() + + # Define the color for the bbox, e.g., red + color = torch.tensor([1, 0, 0], dtype=torch.float32) + + # Ensure color tensor matches the image channels + if color.shape[0] != img_with_bbox.shape[0]: + color = color.unsqueeze(1).expand(-1, line_width) + + # Draw lines for each side of the bbox with the specified line width + for lw in range(line_width): + # Top horizontal line + if y_min + lw < img_with_bbox.shape[1]: + img_with_bbox[:, y_min + lw, x_min:x_min + width] = color[:, None] + + # Bottom horizontal line + if y_min + height - lw < img_with_bbox.shape[1]: + img_with_bbox[:, y_min + height - lw, x_min:x_min + width] = color[:, None] + + # Left vertical line + if x_min + lw < img_with_bbox.shape[2]: + img_with_bbox[:, y_min:y_min + height, x_min + lw] = color[:, None] + + # Right vertical line + if x_min + width - lw < img_with_bbox.shape[2]: + img_with_bbox[:, y_min:y_min + height, x_min + width - lw] = color[:, None] + + # Permute the image dimensions back + img_with_bbox = img_with_bbox.permute(1, 2, 0).unsqueeze(0) + image_list.append(img_with_bbox) + + return (torch.cat(image_list, dim=0),) + + return (torch.cat(image_list, dim=0),) \ No newline at end of file diff --git a/ComfyUI-KJNodes/nodes/curve_nodes.py b/ComfyUI-KJNodes/nodes/curve_nodes.py new file mode 100644 index 0000000000000000000000000000000000000000..f1bb1f05cb7fef0543db6d3da30a0b4a830e3733 --- /dev/null +++ b/ComfyUI-KJNodes/nodes/curve_nodes.py @@ -0,0 +1,1393 @@ +import torch +from torchvision import transforms +import json +from PIL import Image, ImageDraw, ImageFont, ImageColor, ImageFilter +import numpy as np +from ..utility.utility import pil2tensor +import folder_paths +from comfy.utils import common_upscale + +def plot_coordinates_to_tensor(coordinates, height, width, bbox_height, bbox_width, size_multiplier, prompt): + import matplotlib + matplotlib.use('Agg') + from matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas + text_color = '#999999' + bg_color = '#353535' + matplotlib.pyplot.rcParams['text.color'] = text_color + fig, ax = matplotlib.pyplot.subplots(figsize=(width/100, height/100), dpi=100) + fig.patch.set_facecolor(bg_color) + ax.set_facecolor(bg_color) + ax.grid(color=text_color, linestyle='-', linewidth=0.5) + ax.set_xlabel('x', color=text_color) + ax.set_ylabel('y', color=text_color) + for text in ax.get_xticklabels() + ax.get_yticklabels(): + text.set_color(text_color) + ax.set_title('position for: ' + prompt) + ax.set_xlabel('X Coordinate') + ax.set_ylabel('Y Coordinate') + #ax.legend().remove() + ax.set_xlim(0, width) # Set the x-axis to match the input latent width + ax.set_ylim(height, 0) # Set the y-axis to match the input latent height, with (0,0) at top-left + # Adjust the margins of the subplot + matplotlib.pyplot.subplots_adjust(left=0.08, right=0.95, bottom=0.05, top=0.95, wspace=0.2, hspace=0.2) + + cmap = matplotlib.pyplot.get_cmap('rainbow') + image_batch = [] + canvas = FigureCanvas(fig) + width, height = fig.get_size_inches() * fig.get_dpi() + # Draw a box at each coordinate + for i, ((x, y), size) in enumerate(zip(coordinates, size_multiplier)): + color_index = i / (len(coordinates) - 1) + color = cmap(color_index) + draw_height = bbox_height * size + draw_width = bbox_width * size + rect = matplotlib.patches.Rectangle((x - draw_width/2, y - draw_height/2), draw_width, draw_height, + linewidth=1, edgecolor=color, facecolor='none', alpha=0.5) + ax.add_patch(rect) + + # Check if there is a next coordinate to draw an arrow to + if i < len(coordinates) - 1: + x1, y1 = coordinates[i] + x2, y2 = coordinates[i + 1] + ax.annotate("", xy=(x2, y2), xytext=(x1, y1), + arrowprops=dict(arrowstyle="->", + linestyle="-", + lw=1, + color=color, + mutation_scale=20)) + canvas.draw() + image_np = np.frombuffer(canvas.tostring_rgb(), dtype='uint8').reshape(int(height), int(width), 3).copy() + image_tensor = torch.from_numpy(image_np).float() / 255.0 + image_tensor = image_tensor.unsqueeze(0) + image_batch.append(image_tensor) + + matplotlib.pyplot.close(fig) + image_batch_tensor = torch.cat(image_batch, dim=0) + + return image_batch_tensor + +class PlotCoordinates: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "coordinates": ("STRING", {"forceInput": True}), + "text": ("STRING", {"default": 'title', "multiline": False}), + "width": ("INT", {"default": 512, "min": 8, "max": 4096, "step": 8}), + "height": ("INT", {"default": 512, "min": 8, "max": 4096, "step": 8}), + "bbox_width": ("INT", {"default": 128, "min": 8, "max": 4096, "step": 8}), + "bbox_height": ("INT", {"default": 128, "min": 8, "max": 4096, "step": 8}), + }, + "optional": {"size_multiplier": ("FLOAT", {"default": [1.0], "forceInput": True})}, + } + RETURN_TYPES = ("IMAGE", "INT", "INT", "INT", "INT",) + RETURN_NAMES = ("images", "width", "height", "bbox_width", "bbox_height",) + FUNCTION = "append" + CATEGORY = "KJNodes/experimental" + DESCRIPTION = """ +Plots coordinates to sequence of images using Matplotlib. + +""" + + def append(self, coordinates, text, width, height, bbox_width, bbox_height, size_multiplier=[1.0]): + coordinates = json.loads(coordinates.replace("'", '"')) + coordinates = [(coord['x'], coord['y']) for coord in coordinates] + batch_size = len(coordinates) + if len(size_multiplier) != batch_size: + size_multiplier = size_multiplier * (batch_size // len(size_multiplier)) + size_multiplier[:batch_size % len(size_multiplier)] + + plot_image_tensor = plot_coordinates_to_tensor(coordinates, height, width, bbox_height, bbox_width, size_multiplier, text) + + return (plot_image_tensor, width, height, bbox_width, bbox_height) + +class SplineEditor: + + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "points_store": ("STRING", {"multiline": False}), + "coordinates": ("STRING", {"multiline": False}), + "mask_width": ("INT", {"default": 512, "min": 8, "max": 4096, "step": 8}), + "mask_height": ("INT", {"default": 512, "min": 8, "max": 4096, "step": 8}), + "points_to_sample": ("INT", {"default": 16, "min": 2, "max": 1000, "step": 1}), + "sampling_method": ( + [ + 'path', + 'time', + 'controlpoints' + ], + { + "default": 'time' + }), + "interpolation": ( + [ + 'cardinal', + 'monotone', + 'basis', + 'linear', + 'step-before', + 'step-after', + 'polar', + 'polar-reverse', + ], + { + "default": 'cardinal' + }), + "tension": ("FLOAT", {"default": 0.5, "min": 0.0, "max": 1.0, "step": 0.01}), + "repeat_output": ("INT", {"default": 1, "min": 1, "max": 4096, "step": 1}), + "float_output_type": ( + [ + 'list', + 'pandas series', + 'tensor', + ], + { + "default": 'list' + }), + }, + "optional": { + "min_value": ("FLOAT", {"default": 0.0, "min": -10000.0, "max": 10000.0, "step": 0.01}), + "max_value": ("FLOAT", {"default": 1.0, "min": -10000.0, "max": 10000.0, "step": 0.01}), + } + } + + RETURN_TYPES = ("MASK", "STRING", "FLOAT", "INT", "STRING",) + RETURN_NAMES = ("mask", "coord_str", "float", "count", "normalized_str",) + FUNCTION = "splinedata" + CATEGORY = "KJNodes/weights" + DESCRIPTION = """ +# WORK IN PROGRESS +Do not count on this as part of your workflow yet, +probably contains lots of bugs and stability is not +guaranteed!! + +## Graphical editor to create values for various +## schedules and/or mask batches. + +**Shift + click** to add control point at end. +**Ctrl + click** to add control point (subdivide) between two points. +**Right click on a point** to delete it. +Note that you can't delete from start/end. + +Right click on canvas for context menu: +These are purely visual options, doesn't affect the output: + - Toggle handles visibility + - Display sample points: display the points to be returned. + +**points_to_sample** value sets the number of samples +returned from the **drawn spline itself**, this is independent from the +actual control points, so the interpolation type matters. +sampling_method: + - time: samples along the time axis, used for schedules + - path: samples along the path itself, useful for coordinates + +output types: + - mask batch + example compatible nodes: anything that takes masks + - list of floats + example compatible nodes: IPAdapter weights + - pandas series + example compatible nodes: anything that takes Fizz' + nodes Batch Value Schedule + - torch tensor + example compatible nodes: unknown +""" + + def splinedata(self, mask_width, mask_height, coordinates, float_output_type, interpolation, + points_to_sample, sampling_method, points_store, tension, repeat_output, min_value=0.0, max_value=1.0): + + coordinates = json.loads(coordinates) + normalized = [] + normalized_y_values = [] + for coord in coordinates: + coord['x'] = int(round(coord['x'])) + coord['y'] = int(round(coord['y'])) + norm_x = (1.0 - (coord['x'] / mask_height) - 0.0) * (max_value - min_value) + min_value + norm_y = (1.0 - (coord['y'] / mask_height) - 0.0) * (max_value - min_value) + min_value + normalized_y_values.append(norm_y) + normalized.append({'x':norm_x, 'y':norm_y}) + if float_output_type == 'list': + out_floats = normalized_y_values * repeat_output + elif float_output_type == 'pandas series': + try: + import pandas as pd + except: + raise Exception("MaskOrImageToWeight: pandas is not installed. Please install pandas to use this output_type") + out_floats = pd.Series(normalized_y_values * repeat_output), + elif float_output_type == 'tensor': + out_floats = torch.tensor(normalized_y_values * repeat_output, dtype=torch.float32) + # Create a color map for grayscale intensities + color_map = lambda y: torch.full((mask_height, mask_width, 3), y, dtype=torch.float32) + + # Create image tensors for each normalized y value + mask_tensors = [color_map(y) for y in normalized_y_values] + masks_out = torch.stack(mask_tensors) + masks_out = masks_out.repeat(repeat_output, 1, 1, 1) + masks_out = masks_out.mean(dim=-1) + return (masks_out, json.dumps(coordinates), out_floats, len(out_floats) , json.dumps(normalized)) + +class CreateShapeMaskOnPath: + + RETURN_TYPES = ("MASK", "MASK",) + RETURN_NAMES = ("mask", "mask_inverted",) + FUNCTION = "createshapemask" + CATEGORY = "KJNodes/masking/generate" + DESCRIPTION = """ +Creates a mask or batch of masks with the specified shape. +Locations are center locations. +""" + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "shape": ( + [ 'circle', + 'square', + 'triangle', + ], + { + "default": 'circle' + }), + "coordinates": ("STRING", {"forceInput": True}), + "frame_width": ("INT", {"default": 512,"min": 16, "max": 4096, "step": 1}), + "frame_height": ("INT", {"default": 512,"min": 16, "max": 4096, "step": 1}), + "shape_width": ("INT", {"default": 128,"min": 8, "max": 4096, "step": 1}), + "shape_height": ("INT", {"default": 128,"min": 8, "max": 4096, "step": 1}), + }, + "optional": { + "size_multiplier": ("FLOAT", {"default": [1.0], "forceInput": True}), + } + } + + def createshapemask(self, coordinates, frame_width, frame_height, shape_width, shape_height, shape, size_multiplier=[1.0]): + # Define the number of images in the batch + coordinates = coordinates.replace("'", '"') + coordinates = json.loads(coordinates) + + batch_size = len(coordinates) + out = [] + color = "white" + if len(size_multiplier) != batch_size: + size_multiplier = size_multiplier * (batch_size // len(size_multiplier)) + size_multiplier[:batch_size % len(size_multiplier)] + for i, coord in enumerate(coordinates): + image = Image.new("RGB", (frame_width, frame_height), "black") + draw = ImageDraw.Draw(image) + + # Calculate the size for this frame and ensure it's not less than 0 + current_width = max(0, shape_width + i * size_multiplier[i]) + current_height = max(0, shape_height + i * size_multiplier[i]) + + location_x = coord['x'] + location_y = coord['y'] + + if shape == 'circle' or shape == 'square': + # Define the bounding box for the shape + left_up_point = (location_x - current_width // 2, location_y - current_height // 2) + right_down_point = (location_x + current_width // 2, location_y + current_height // 2) + two_points = [left_up_point, right_down_point] + + if shape == 'circle': + draw.ellipse(two_points, fill=color) + elif shape == 'square': + draw.rectangle(two_points, fill=color) + + elif shape == 'triangle': + # Define the points for the triangle + left_up_point = (location_x - current_width // 2, location_y + current_height // 2) # bottom left + right_down_point = (location_x + current_width // 2, location_y + current_height // 2) # bottom right + top_point = (location_x, location_y - current_height // 2) # top point + draw.polygon([top_point, left_up_point, right_down_point], fill=color) + + image = pil2tensor(image) + mask = image[:, :, :, 0] + out.append(mask) + outstack = torch.cat(out, dim=0) + return (outstack, 1.0 - outstack,) + +class CreateShapeImageOnPath: + + RETURN_TYPES = ("IMAGE", "MASK",) + RETURN_NAMES = ("image","mask", ) + FUNCTION = "createshapemask" + CATEGORY = "KJNodes/image" + DESCRIPTION = """ +Creates an image or batch of images with the specified shape. +Locations are center locations. +""" + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "shape": ( + [ 'circle', + 'square', + 'triangle', + ], + { + "default": 'circle' + }), + "coordinates": ("STRING", {"forceInput": True}), + "frame_width": ("INT", {"default": 512,"min": 16, "max": 4096, "step": 1}), + "frame_height": ("INT", {"default": 512,"min": 16, "max": 4096, "step": 1}), + "shape_width": ("INT", {"default": 128,"min": 8, "max": 4096, "step": 1}), + "shape_height": ("INT", {"default": 128,"min": 8, "max": 4096, "step": 1}), + "shape_color": ("STRING", {"default": 'white'}), + "bg_color": ("STRING", {"default": 'black'}), + "blur_radius": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 100, "step": 0.1}), + "intensity": ("FLOAT", {"default": 1.0, "min": 0.01, "max": 100.0, "step": 0.01}), + }, + "optional": { + "size_multiplier": ("FLOAT", {"default": [1.0], "forceInput": True}), + } + } + + def createshapemask(self, coordinates, frame_width, frame_height, shape_width, shape_height, shape_color, + bg_color, blur_radius, shape, intensity, size_multiplier=[1.0]): + # Define the number of images in the batch + coordinates = coordinates.replace("'", '"') + coordinates = json.loads(coordinates) + + batch_size = len(coordinates) + images_list = [] + masks_list = [] + + if len(size_multiplier) != batch_size: + size_multiplier = size_multiplier * (batch_size // len(size_multiplier)) + size_multiplier[:batch_size % len(size_multiplier)] + for i, coord in enumerate(coordinates): + image = Image.new("RGB", (frame_width, frame_height), bg_color) + draw = ImageDraw.Draw(image) + + # Calculate the size for this frame and ensure it's not less than 0 + current_width = max(0, shape_width + i * size_multiplier[i]) + current_height = max(0, shape_height + i * size_multiplier[i]) + + location_x = coord['x'] + location_y = coord['y'] + + if shape == 'circle' or shape == 'square': + # Define the bounding box for the shape + left_up_point = (location_x - current_width // 2, location_y - current_height // 2) + right_down_point = (location_x + current_width // 2, location_y + current_height // 2) + two_points = [left_up_point, right_down_point] + + if shape == 'circle': + draw.ellipse(two_points, fill=shape_color) + elif shape == 'square': + draw.rectangle(two_points, fill=shape_color) + + elif shape == 'triangle': + # Define the points for the triangle + left_up_point = (location_x - current_width // 2, location_y + current_height // 2) # bottom left + right_down_point = (location_x + current_width // 2, location_y + current_height // 2) # bottom right + top_point = (location_x, location_y - current_height // 2) # top point + draw.polygon([top_point, left_up_point, right_down_point], fill=shape_color) + + if blur_radius != 0: + image = image.filter(ImageFilter.GaussianBlur(blur_radius)) + + image = pil2tensor(image) + image = image * intensity + mask = image[:, :, :, 0] + masks_list.append(mask) + images_list.append(image) + out_images = torch.cat(images_list, dim=0).cpu().float() + out_masks = torch.cat(masks_list, dim=0) + return (out_images, out_masks) + +class CreateTextOnPath: + + RETURN_TYPES = ("IMAGE", "MASK", "MASK",) + RETURN_NAMES = ("image", "mask", "mask_inverted",) + FUNCTION = "createtextmask" + CATEGORY = "KJNodes/masking/generate" + DESCRIPTION = """ +Creates a mask or batch of masks with the specified text. +Locations are center locations. +""" + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "coordinates": ("STRING", {"forceInput": True}), + "text": ("STRING", {"default": 'text', "multiline": True}), + "frame_width": ("INT", {"default": 512,"min": 16, "max": 4096, "step": 1}), + "frame_height": ("INT", {"default": 512,"min": 16, "max": 4096, "step": 1}), + "font": (folder_paths.get_filename_list("kjnodes_fonts"), ), + "font_size": ("INT", {"default": 42}), + "alignment": ( + [ 'left', + 'center', + 'right' + ], + {"default": 'center'} + ), + "text_color": ("STRING", {"default": 'white'}), + }, + "optional": { + "size_multiplier": ("FLOAT", {"default": [1.0], "forceInput": True}), + } + } + + def createtextmask(self, coordinates, frame_width, frame_height, font, font_size, text, text_color, alignment, size_multiplier=[1.0]): + coordinates = coordinates.replace("'", '"') + coordinates = json.loads(coordinates) + + batch_size = len(coordinates) + mask_list = [] + image_list = [] + color = text_color + font_path = folder_paths.get_full_path("kjnodes_fonts", font) + + if len(size_multiplier) != batch_size: + size_multiplier = size_multiplier * (batch_size // len(size_multiplier)) + size_multiplier[:batch_size % len(size_multiplier)] + + for i, coord in enumerate(coordinates): + image = Image.new("RGB", (frame_width, frame_height), "black") + draw = ImageDraw.Draw(image) + lines = text.split('\n') # Split the text into lines + # Apply the size multiplier to the font size for this iteration + current_font_size = int(font_size * size_multiplier[i]) + current_font = ImageFont.truetype(font_path, current_font_size) + line_heights = [current_font.getbbox(line)[3] for line in lines] # List of line heights + total_text_height = sum(line_heights) # Total height of text block + + # Calculate the starting Y position to center the block of text + start_y = coord['y'] - total_text_height // 2 + for j, line in enumerate(lines): + text_width, text_height = current_font.getbbox(line)[2], line_heights[j] + if alignment == 'left': + location_x = coord['x'] + elif alignment == 'center': + location_x = int(coord['x'] - text_width // 2) + elif alignment == 'right': + location_x = int(coord['x'] - text_width) + + location_y = int(start_y + sum(line_heights[:j])) + text_position = (location_x, location_y) + # Draw the text + try: + draw.text(text_position, line, fill=color, font=current_font, features=['-liga']) + except: + draw.text(text_position, line, fill=color, font=current_font) + + image = pil2tensor(image) + non_black_pixels = (image > 0).any(dim=-1) + mask = non_black_pixels.to(image.dtype) + mask_list.append(mask) + image_list.append(image) + + out_images = torch.cat(image_list, dim=0).cpu().float() + out_masks = torch.cat(mask_list, dim=0) + return (out_images, out_masks, 1.0 - out_masks,) + +class CreateGradientFromCoords: + + RETURN_TYPES = ("IMAGE", ) + RETURN_NAMES = ("image", ) + FUNCTION = "generate" + CATEGORY = "KJNodes/image" + DESCRIPTION = """ +Creates a gradient image from coordinates. +""" + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "coordinates": ("STRING", {"forceInput": True}), + "frame_width": ("INT", {"default": 512,"min": 16, "max": 4096, "step": 1}), + "frame_height": ("INT", {"default": 512,"min": 16, "max": 4096, "step": 1}), + "start_color": ("STRING", {"default": 'white'}), + "end_color": ("STRING", {"default": 'black'}), + "multiplier": ("FLOAT", {"default": 1.0, "min": 0.01, "max": 100.0, "step": 0.01}), + }, + } + + def generate(self, coordinates, frame_width, frame_height, start_color, end_color, multiplier): + # Parse the coordinates + coordinates = json.loads(coordinates.replace("'", '"')) + + # Create an image + image = Image.new("RGB", (frame_width, frame_height)) + draw = ImageDraw.Draw(image) + + # Extract start and end points for the gradient + start_coord = coordinates[0] + end_coord = coordinates[1] + + start_color = ImageColor.getrgb(start_color) + end_color = ImageColor.getrgb(end_color) + + # Calculate the gradient direction (vector) + gradient_direction = (end_coord['x'] - start_coord['x'], end_coord['y'] - start_coord['y']) + gradient_length = (gradient_direction[0] ** 2 + gradient_direction[1] ** 2) ** 0.5 + + # Iterate over each pixel in the image + for y in range(frame_height): + for x in range(frame_width): + # Calculate the projection of the point on the gradient line + point_vector = (x - start_coord['x'], y - start_coord['y']) + projection = (point_vector[0] * gradient_direction[0] + point_vector[1] * gradient_direction[1]) / gradient_length + projection = max(min(projection, gradient_length), 0) # Clamp the projection value + + # Calculate the blend factor for the current pixel + blend = projection * multiplier / gradient_length + + # Determine the color of the current pixel + color = ( + int(start_color[0] + (end_color[0] - start_color[0]) * blend), + int(start_color[1] + (end_color[1] - start_color[1]) * blend), + int(start_color[2] + (end_color[2] - start_color[2]) * blend) + ) + + # Set the pixel color + draw.point((x, y), fill=color) + + # Convert the PIL image to a tensor (assuming such a function exists in your context) + image_tensor = pil2tensor(image) + + return (image_tensor,) + +class GradientToFloat: + + RETURN_TYPES = ("FLOAT", "FLOAT",) + RETURN_NAMES = ("float_x", "float_y", ) + FUNCTION = "sample" + CATEGORY = "KJNodes/image" + DESCRIPTION = """ +Calculates list of floats from image. +""" + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image": ("IMAGE", ), + "steps": ("INT", {"default": 10, "min": 2, "max": 10000, "step": 1}), + }, + } + + def sample(self, image, steps): + # Assuming image is a tensor with shape [B, H, W, C] + B, H, W, C = image.shape + + # Sample along the width axis (W) + w_intervals = torch.linspace(0, W - 1, steps=steps, dtype=torch.int64) + # Assuming we're sampling from the first batch and the first channel + w_sampled = image[0, :, w_intervals, 0] + + # Sample along the height axis (H) + h_intervals = torch.linspace(0, H - 1, steps=steps, dtype=torch.int64) + # Assuming we're sampling from the first batch and the first channel + h_sampled = image[0, h_intervals, :, 0] + + # Taking the mean across the height for width sampling, and across the width for height sampling + w_values = w_sampled.mean(dim=0).tolist() + h_values = h_sampled.mean(dim=1).tolist() + + return (w_values, h_values) + +class MaskOrImageToWeight: + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "output_type": ( + [ + 'list', + 'pandas series', + 'tensor', + 'string' + ], + { + "default": 'list' + }), + }, + "optional": { + "images": ("IMAGE",), + "masks": ("MASK",), + }, + + } + RETURN_TYPES = ("FLOAT", "STRING",) + FUNCTION = "execute" + CATEGORY = "KJNodes/weights" + DESCRIPTION = """ +Gets the mean values from mask or image batch +and returns that as the selected output type. +""" + + def execute(self, output_type, images=None, masks=None): + mean_values = [] + if masks is not None and images is None: + for mask in masks: + mean_values.append(mask.mean().item()) + elif masks is None and images is not None: + for image in images: + mean_values.append(image.mean().item()) + elif masks is not None and images is not None: + raise Exception("MaskOrImageToWeight: Use either mask or image input only.") + + # Convert mean_values to the specified output_type + if output_type == 'list': + out = mean_values + elif output_type == 'pandas series': + try: + import pandas as pd + except: + raise Exception("MaskOrImageToWeight: pandas is not installed. Please install pandas to use this output_type") + out = pd.Series(mean_values), + elif output_type == 'tensor': + out = torch.tensor(mean_values, dtype=torch.float32), + return (out, [str(value) for value in mean_values],) + +class WeightScheduleConvert: + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "input_values": ("FLOAT", {"default": 0.0, "forceInput": True}), + "output_type": ( + [ + 'match_input', + 'list', + 'pandas series', + 'tensor', + ], + { + "default": 'list' + }), + "invert": ("BOOLEAN", {"default": False}), + "repeat": ("INT", {"default": 1,"min": 1, "max": 255, "step": 1}), + }, + "optional": { + "remap_to_frames": ("INT", {"default": 0}), + "interpolation_curve": ("FLOAT", {"forceInput": True}), + "remap_values": ("BOOLEAN", {"default": False}), + "remap_min": ("FLOAT", {"default": 0.0, "min": -100000, "max": 100000.0, "step": 0.01}), + "remap_max": ("FLOAT", {"default": 1.0, "min": -100000, "max": 100000.0, "step": 0.01}), + }, + + } + RETURN_TYPES = ("FLOAT", "STRING", "INT",) + FUNCTION = "execute" + CATEGORY = "KJNodes/weights" + DESCRIPTION = """ +Converts different value lists/series to another type. +""" + + def detect_input_type(self, input_values): + import pandas as pd + if isinstance(input_values, list): + return 'list' + elif isinstance(input_values, pd.Series): + return 'pandas series' + elif isinstance(input_values, torch.Tensor): + return 'tensor' + else: + raise ValueError("Unsupported input type") + + def execute(self, input_values, output_type, invert, repeat, remap_to_frames=0, interpolation_curve=None, remap_min=0.0, remap_max=1.0, remap_values=False): + import pandas as pd + input_type = self.detect_input_type(input_values) + + if input_type == 'pandas series': + float_values = input_values.tolist() + elif input_type == 'tensor': + float_values = input_values + else: + float_values = input_values + + if invert: + float_values = [1 - value for value in float_values] + + if interpolation_curve is not None: + interpolated_pattern = [] + orig_float_values = float_values + for value in interpolation_curve: + min_val = min(orig_float_values) + max_val = max(orig_float_values) + # Normalize the values to [0, 1] + normalized_values = [(value - min_val) / (max_val - min_val) for value in orig_float_values] + # Interpolate the normalized values to the new frame count + remapped_float_values = np.interp(np.linspace(0, 1, int(remap_to_frames * value)), np.linspace(0, 1, len(normalized_values)), normalized_values).tolist() + interpolated_pattern.extend(remapped_float_values) + float_values = interpolated_pattern + else: + # Remap float_values to match target_frame_amount + if remap_to_frames > 0 and remap_to_frames != len(float_values): + min_val = min(float_values) + max_val = max(float_values) + # Normalize the values to [0, 1] + normalized_values = [(value - min_val) / (max_val - min_val) for value in float_values] + # Interpolate the normalized values to the new frame count + float_values = np.interp(np.linspace(0, 1, remap_to_frames), np.linspace(0, 1, len(normalized_values)), normalized_values).tolist() + + float_values = float_values * repeat + if remap_values: + float_values = self.remap_values(float_values, remap_min, remap_max) + + if output_type == 'list': + out = float_values, + elif output_type == 'pandas series': + out = pd.Series(float_values), + elif output_type == 'tensor': + if input_type == 'pandas series': + out = torch.tensor(float_values.values, dtype=torch.float32), + else: + out = torch.tensor(float_values, dtype=torch.float32), + elif output_type == 'match_input': + out = float_values, + return (out, [str(value) for value in float_values], [int(value) for value in float_values]) + + def remap_values(self, values, target_min, target_max): + # Determine the current range + current_min = min(values) + current_max = max(values) + current_range = current_max - current_min + + # Determine the target range + target_range = target_max - target_min + + # Perform the linear interpolation for each value + remapped_values = [(value - current_min) / current_range * target_range + target_min for value in values] + + return remapped_values + + +class FloatToMask: + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "input_values": ("FLOAT", {"forceInput": True, "default": 0}), + "width": ("INT", {"default": 100, "min": 1}), + "height": ("INT", {"default": 100, "min": 1}), + }, + } + RETURN_TYPES = ("MASK",) + FUNCTION = "execute" + CATEGORY = "KJNodes/masking/generate" + DESCRIPTION = """ +Generates a batch of masks based on the input float values. +The batch size is determined by the length of the input float values. +Each mask is generated with the specified width and height. +""" + + def execute(self, input_values, width, height): + import pandas as pd + # Ensure input_values is a list + if isinstance(input_values, (float, int)): + input_values = [input_values] + elif isinstance(input_values, pd.Series): + input_values = input_values.tolist() + elif isinstance(input_values, list) and all(isinstance(item, list) for item in input_values): + input_values = [item for sublist in input_values for item in sublist] + + # Generate a batch of masks based on the input_values + masks = [] + for value in input_values: + # Assuming value is a float between 0 and 1 representing the mask's intensity + mask = torch.ones((height, width), dtype=torch.float32) * value + masks.append(mask) + masks_out = torch.stack(masks, dim=0) + + return(masks_out,) +class WeightScheduleExtend: + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "input_values_1": ("FLOAT", {"default": 0.0, "forceInput": True}), + "input_values_2": ("FLOAT", {"default": 0.0, "forceInput": True}), + "output_type": ( + [ + 'match_input', + 'list', + 'pandas series', + 'tensor', + ], + { + "default": 'match_input' + }), + }, + + } + RETURN_TYPES = ("FLOAT",) + FUNCTION = "execute" + CATEGORY = "KJNodes/weights" + DESCRIPTION = """ +Extends, and converts if needed, different value lists/series +""" + + def detect_input_type(self, input_values): + import pandas as pd + if isinstance(input_values, list): + return 'list' + elif isinstance(input_values, pd.Series): + return 'pandas series' + elif isinstance(input_values, torch.Tensor): + return 'tensor' + else: + raise ValueError("Unsupported input type") + + def execute(self, input_values_1, input_values_2, output_type): + import pandas as pd + input_type_1 = self.detect_input_type(input_values_1) + input_type_2 = self.detect_input_type(input_values_2) + # Convert input_values_2 to the same format as input_values_1 if they do not match + if not input_type_1 == input_type_2: + print("Converting input_values_2 to the same format as input_values_1") + if input_type_1 == 'pandas series': + # Convert input_values_2 to a pandas Series + float_values_2 = pd.Series(input_values_2) + elif input_type_1 == 'tensor': + # Convert input_values_2 to a tensor + float_values_2 = torch.tensor(input_values_2, dtype=torch.float32) + else: + print("Input types match, no conversion needed") + # If the types match, no conversion is needed + float_values_2 = input_values_2 + + float_values = input_values_1 + float_values_2 + + if output_type == 'list': + return float_values, + elif output_type == 'pandas series': + return pd.Series(float_values), + elif output_type == 'tensor': + if input_type_1 == 'pandas series': + return torch.tensor(float_values.values, dtype=torch.float32), + else: + return torch.tensor(float_values, dtype=torch.float32), + elif output_type == 'match_input': + return float_values, + else: + raise ValueError(f"Unsupported output_type: {output_type}") + +class FloatToSigmas: + @classmethod + def INPUT_TYPES(s): + return {"required": + { + "float_list": ("FLOAT", {"default": 0.0, "forceInput": True}), + } + } + RETURN_TYPES = ("SIGMAS",) + RETURN_NAMES = ("SIGMAS",) + CATEGORY = "KJNodes/noise" + FUNCTION = "customsigmas" + DESCRIPTION = """ +Creates a sigmas tensor from list of float values. + +""" + def customsigmas(self, float_list): + return torch.tensor(float_list, dtype=torch.float32), + +class GLIGENTextBoxApplyBatchCoords: + @classmethod + def INPUT_TYPES(s): + return {"required": {"conditioning_to": ("CONDITIONING", ), + "latents": ("LATENT", ), + "clip": ("CLIP", ), + "gligen_textbox_model": ("GLIGEN", ), + "coordinates": ("STRING", {"forceInput": True}), + "text": ("STRING", {"multiline": True}), + "width": ("INT", {"default": 128, "min": 8, "max": 4096, "step": 8}), + "height": ("INT", {"default": 128, "min": 8, "max": 4096, "step": 8}), + }, + "optional": {"size_multiplier": ("FLOAT", {"default": [1.0], "forceInput": True})}, + } + RETURN_TYPES = ("CONDITIONING", "IMAGE", ) + RETURN_NAMES = ("conditioning", "coord_preview", ) + FUNCTION = "append" + CATEGORY = "KJNodes/experimental" + DESCRIPTION = """ +This node allows scheduling GLIGEN text box positions in a batch, +to be used with AnimateDiff-Evolved. Intended to pair with the +Spline Editor -node. + +GLIGEN model can be downloaded through the Manage's "Install Models" menu. +Or directly from here: +https://huggingface.co/comfyanonymous/GLIGEN_pruned_safetensors/tree/main + +Inputs: +- **latents** input is used to calculate batch size +- **clip** is your standard text encoder, use same as for the main prompt +- **gligen_textbox_model** connects to GLIGEN Loader +- **coordinates** takes a json string of points, directly compatible +with the spline editor node. +- **text** is the part of the prompt to set position for +- **width** and **height** are the size of the GLIGEN bounding box + +Outputs: +- **conditioning** goes between to clip text encode and the sampler +- **coord_preview** is an optional preview of the coordinates and +bounding boxes. + +""" + + def append(self, latents, coordinates, conditioning_to, clip, gligen_textbox_model, text, width, height, size_multiplier=[1.0]): + coordinates = json.loads(coordinates.replace("'", '"')) + coordinates = [(coord['x'], coord['y']) for coord in coordinates] + + batch_size = sum(tensor.size(0) for tensor in latents.values()) + if len(coordinates) != batch_size: + print("GLIGENTextBoxApplyBatchCoords WARNING: The number of coordinates does not match the number of latents") + + c = [] + _, cond_pooled = clip.encode_from_tokens(clip.tokenize(text), return_pooled=True) + + for t in conditioning_to: + n = [t[0], t[1].copy()] + + position_params_batch = [[] for _ in range(batch_size)] # Initialize a list of empty lists for each batch item + if len(size_multiplier) != batch_size: + size_multiplier = size_multiplier * (batch_size // len(size_multiplier)) + size_multiplier[:batch_size % len(size_multiplier)] + + for i in range(batch_size): + x_position, y_position = coordinates[i] + position_param = (cond_pooled, int((height // 8) * size_multiplier[i]), int((width // 8) * size_multiplier[i]), (y_position - height // 2) // 8, (x_position - width // 2) // 8) + position_params_batch[i].append(position_param) # Append position_param to the correct sublist + + prev = [] + if "gligen" in n[1]: + prev = n[1]['gligen'][2] + else: + prev = [[] for _ in range(batch_size)] + # Concatenate prev and position_params_batch, ensuring both are lists of lists + # and each sublist corresponds to a batch item + combined_position_params = [prev_item + batch_item for prev_item, batch_item in zip(prev, position_params_batch)] + n[1]['gligen'] = ("position_batched", gligen_textbox_model, combined_position_params) + c.append(n) + + image_height = latents['samples'].shape[-2] * 8 + image_width = latents['samples'].shape[-1] * 8 + plot_image_tensor = plot_coordinates_to_tensor(coordinates, image_height, image_width, height, width, size_multiplier, text) + + return (c, plot_image_tensor,) + +class CreateInstanceDiffusionTracking: + + RETURN_TYPES = ("TRACKING", "STRING", "INT", "INT", "INT", "INT",) + RETURN_NAMES = ("tracking", "prompt", "width", "height", "bbox_width", "bbox_height",) + FUNCTION = "tracking" + CATEGORY = "KJNodes/InstanceDiffusion" + DESCRIPTION = """ +Creates tracking data to be used with InstanceDiffusion: +https://github.com/logtd/ComfyUI-InstanceDiffusion + +InstanceDiffusion prompt format: +"class_id.class_name": "prompt", +for example: +"1.head": "((head))", +""" + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "coordinates": ("STRING", {"forceInput": True}), + "width": ("INT", {"default": 512,"min": 16, "max": 4096, "step": 1}), + "height": ("INT", {"default": 512,"min": 16, "max": 4096, "step": 1}), + "bbox_width": ("INT", {"default": 512,"min": 16, "max": 4096, "step": 1}), + "bbox_height": ("INT", {"default": 512,"min": 16, "max": 4096, "step": 1}), + "class_name": ("STRING", {"default": "class_name"}), + "class_id": ("INT", {"default": 0,"min": 0, "max": 255, "step": 1}), + "prompt": ("STRING", {"default": "prompt", "multiline": True}), + }, + "optional": { + "size_multiplier": ("FLOAT", {"default": [1.0], "forceInput": True}), + "fit_in_frame": ("BOOLEAN", {"default": True}), + } + } + + def tracking(self, coordinates, class_name, class_id, width, height, bbox_width, bbox_height, prompt, size_multiplier=[1.0], fit_in_frame=True): + # Define the number of images in the batch + coordinates = coordinates.replace("'", '"') + coordinates = json.loads(coordinates) + + tracked = {} + tracked[class_name] = {} + batch_size = len(coordinates) + # Initialize a list to hold the coordinates for the current ID + id_coordinates = [] + if len(size_multiplier) != batch_size: + size_multiplier = size_multiplier * (batch_size // len(size_multiplier)) + size_multiplier[:batch_size % len(size_multiplier)] + for i, coord in enumerate(coordinates): + x = coord['x'] + y = coord['y'] + adjusted_bbox_width = bbox_width * size_multiplier[i] + adjusted_bbox_height = bbox_height * size_multiplier[i] + # Calculate the top left and bottom right coordinates + top_left_x = x - adjusted_bbox_width // 2 + top_left_y = y - adjusted_bbox_height // 2 + bottom_right_x = x + adjusted_bbox_width // 2 + bottom_right_y = y + adjusted_bbox_height // 2 + + if fit_in_frame: + # Clip the coordinates to the frame boundaries + top_left_x = max(0, top_left_x) + top_left_y = max(0, top_left_y) + bottom_right_x = min(width, bottom_right_x) + bottom_right_y = min(height, bottom_right_y) + # Ensure width and height are positive + adjusted_bbox_width = max(1, bottom_right_x - top_left_x) + adjusted_bbox_height = max(1, bottom_right_y - top_left_y) + + # Update the coordinates with the new width and height + bottom_right_x = top_left_x + adjusted_bbox_width + bottom_right_y = top_left_y + adjusted_bbox_height + + # Append the top left and bottom right coordinates to the list for the current ID + id_coordinates.append([top_left_x, top_left_y, bottom_right_x, bottom_right_y, width, height]) + + class_id = int(class_id) + # Assign the list of coordinates to the specified ID within the class_id dictionary + tracked[class_name][class_id] = id_coordinates + + prompt_string = "" + for class_name, class_data in tracked.items(): + for class_id in class_data.keys(): + class_id_str = str(class_id) + # Use the incoming prompt for each class name and ID + prompt_string += f'"{class_id_str}.{class_name}": "({prompt})",\n' + + # Remove the last comma and newline + prompt_string = prompt_string.rstrip(",\n") + + return (tracked, prompt_string, width, height, bbox_width, bbox_height) + +class AppendInstanceDiffusionTracking: + + RETURN_TYPES = ("TRACKING", "STRING",) + RETURN_NAMES = ("tracking", "prompt",) + FUNCTION = "append" + CATEGORY = "KJNodes/InstanceDiffusion" + DESCRIPTION = """ +Appends tracking data to be used with InstanceDiffusion: +https://github.com/logtd/ComfyUI-InstanceDiffusion + +""" + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "tracking_1": ("TRACKING", {"forceInput": True}), + "tracking_2": ("TRACKING", {"forceInput": True}), + }, + "optional": { + "prompt_1": ("STRING", {"default": "", "forceInput": True}), + "prompt_2": ("STRING", {"default": "", "forceInput": True}), + } + } + + def append(self, tracking_1, tracking_2, prompt_1="", prompt_2=""): + tracking_copy = tracking_1.copy() + # Check for existing class names and class IDs, and raise an error if they exist + for class_name, class_data in tracking_2.items(): + if class_name not in tracking_copy: + tracking_copy[class_name] = class_data + else: + # If the class name exists, merge the class data from tracking_2 into tracking_copy + # This will add new class IDs under the same class name without raising an error + tracking_copy[class_name].update(class_data) + prompt_string = prompt_1 + "," + prompt_2 + return (tracking_copy, prompt_string) + +class InterpolateCoords: + + RETURN_TYPES = ("STRING",) + RETURN_NAMES = ("coordinates",) + FUNCTION = "interpolate" + CATEGORY = "KJNodes/experimental" + DESCRIPTION = """ +Interpolates coordinates based on a curve. +""" + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "coordinates": ("STRING", {"forceInput": True}), + "interpolation_curve": ("FLOAT", {"forceInput": True}), + + }, + } + + def interpolate(self, coordinates, interpolation_curve): + # Parse the JSON string to get the list of coordinates + coordinates = json.loads(coordinates.replace("'", '"')) + + # Convert the list of dictionaries to a list of (x, y) tuples for easier processing + coordinates = [(coord['x'], coord['y']) for coord in coordinates] + + # Calculate the total length of the original path + path_length = sum(np.linalg.norm(np.array(coordinates[i]) - np.array(coordinates[i-1])) + for i in range(1, len(coordinates))) + + # Initialize variables for interpolation + interpolated_coords = [] + current_length = 0 + current_index = 0 + + # Iterate over the normalized curve + for normalized_length in interpolation_curve: + target_length = normalized_length * path_length # Convert to the original scale + while current_index < len(coordinates) - 1: + segment_start, segment_end = np.array(coordinates[current_index]), np.array(coordinates[current_index + 1]) + segment_length = np.linalg.norm(segment_end - segment_start) + if current_length + segment_length >= target_length: + break + current_length += segment_length + current_index += 1 + + # Interpolate between the last two points + if current_index < len(coordinates) - 1: + p1, p2 = np.array(coordinates[current_index]), np.array(coordinates[current_index + 1]) + segment_length = np.linalg.norm(p2 - p1) + if segment_length > 0: + t = (target_length - current_length) / segment_length + interpolated_point = p1 + t * (p2 - p1) + interpolated_coords.append(interpolated_point.tolist()) + else: + interpolated_coords.append(p1.tolist()) + else: + # If the target_length is at or beyond the end of the path, add the last coordinate + interpolated_coords.append(coordinates[-1]) + + # Convert back to string format if necessary + interpolated_coords_str = "[" + ", ".join([f"{{'x': {round(coord[0])}, 'y': {round(coord[1])}}}" for coord in interpolated_coords]) + "]" + print(interpolated_coords_str) + + return (interpolated_coords_str,) + +class DrawInstanceDiffusionTracking: + + RETURN_TYPES = ("IMAGE",) + RETURN_NAMES = ("image", ) + FUNCTION = "draw" + CATEGORY = "KJNodes/InstanceDiffusion" + DESCRIPTION = """ +Draws the tracking data from +CreateInstanceDiffusionTracking -node. + +""" + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image": ("IMAGE", ), + "tracking": ("TRACKING", {"forceInput": True}), + "box_line_width": ("INT", {"default": 2, "min": 1, "max": 10, "step": 1}), + "draw_text": ("BOOLEAN", {"default": True}), + "font": (folder_paths.get_filename_list("kjnodes_fonts"), ), + "font_size": ("INT", {"default": 20}), + }, + } + + def draw(self, image, tracking, box_line_width, draw_text, font, font_size): + import matplotlib.cm as cm + + modified_images = [] + + colormap = cm.get_cmap('rainbow', len(tracking)) + if draw_text: + font_path = folder_paths.get_full_path("kjnodes_fonts", font) + font = ImageFont.truetype(font_path, font_size) + + # Iterate over each image in the batch + for i in range(image.shape[0]): + # Extract the current image and convert it to a PIL image + current_image = image[i, :, :, :].permute(2, 0, 1) + pil_image = transforms.ToPILImage()(current_image) + + draw = ImageDraw.Draw(pil_image) + + # Iterate over the bounding boxes for the current image + for j, (class_name, class_data) in enumerate(tracking.items()): + for class_id, bbox_list in class_data.items(): + # Check if the current index is within the bounds of the bbox_list + if i < len(bbox_list): + bbox = bbox_list[i] + # Ensure bbox is a list or tuple before unpacking + if isinstance(bbox, (list, tuple)): + x1, y1, x2, y2, _, _ = bbox + # Convert coordinates to integers + x1, y1, x2, y2 = int(x1), int(y1), int(x2), int(y2) + # Generate a color from the rainbow colormap + color = tuple(int(255 * x) for x in colormap(j / len(tracking)))[:3] + # Draw the bounding box on the image with the generated color + draw.rectangle([x1, y1, x2, y2], outline=color, width=box_line_width) + if draw_text: + # Draw the class name and ID as text above the box with the generated color + text = f"{class_id}.{class_name}" + # Calculate the width and height of the text + _, _, text_width, text_height = draw.textbbox((0, 0), text=text, font=font) + # Position the text above the top-left corner of the box + text_position = (x1, y1 - text_height) + draw.text(text_position, text, fill=color, font=font) + else: + print(f"Unexpected data type for bbox: {type(bbox)}") + + # Convert the drawn image back to a torch tensor and adjust back to (H, W, C) + modified_image_tensor = transforms.ToTensor()(pil_image).permute(1, 2, 0) + modified_images.append(modified_image_tensor) + + # Stack the modified images back into a batch + image_tensor_batch = torch.stack(modified_images).cpu().float() + + return image_tensor_batch, + +class PointsEditor: + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "points_store": ("STRING", {"multiline": False}), + "coordinates": ("STRING", {"multiline": False}), + "neg_coordinates": ("STRING", {"multiline": False}), + "bbox_store": ("STRING", {"multiline": False}), + "bboxes": ("STRING", {"multiline": False}), + "bbox_format": ( + [ + 'xyxy', + 'xywh', + ], + ), + "width": ("INT", {"default": 512, "min": 8, "max": 4096, "step": 8}), + "height": ("INT", {"default": 512, "min": 8, "max": 4096, "step": 8}), + "normalize": ("BOOLEAN", {"default": False}), + }, + "optional": { + "bg_image": ("IMAGE", ), + }, + } + + RETURN_TYPES = ("STRING", "STRING", "BBOX", "MASK", "IMAGE") + RETURN_NAMES = ("positive_coords", "negative_coords", "bbox", "bbox_mask", "cropped_image") + FUNCTION = "pointdata" + CATEGORY = "KJNodes/experimental" + DESCRIPTION = """ +# WORK IN PROGRESS +Do not count on this as part of your workflow yet, +probably contains lots of bugs and stability is not +guaranteed!! + +## Graphical editor to create coordinates + +**Shift + click** to add a positive (green) point. +**Shift + right click** to add a negative (red) point. +**Ctrl + click** to draw a box. +**Right click on a point** to delete it. +Note that you can't delete from start/end of the points array. + +To add an image select the node and copy/paste or drag in the image. +Or from the bg_image input on queue (first frame of the batch). + +**THE IMAGE IS SAVED TO THE NODE AND WORKFLOW METADATA** +you can clear the image from the context menu by right clicking on the canvas + +""" + + def pointdata(self, points_store, bbox_store, width, height, coordinates, neg_coordinates, normalize, bboxes, bbox_format="xyxy", bg_image=None): + import io + import base64 + + coordinates = json.loads(coordinates) + pos_coordinates = [] + for coord in coordinates: + coord['x'] = int(round(coord['x'])) + coord['y'] = int(round(coord['y'])) + if normalize: + norm_x = coord['x'] / width + norm_y = coord['y'] / height + pos_coordinates.append({'x': norm_x, 'y': norm_y}) + else: + pos_coordinates.append({'x': coord['x'], 'y': coord['y']}) + + if neg_coordinates: + coordinates = json.loads(neg_coordinates) + neg_coordinates = [] + for coord in coordinates: + coord['x'] = int(round(coord['x'])) + coord['y'] = int(round(coord['y'])) + if normalize: + norm_x = coord['x'] / width + norm_y = coord['y'] / height + neg_coordinates.append({'x': norm_x, 'y': norm_y}) + else: + neg_coordinates.append({'x': coord['x'], 'y': coord['y']}) + + # Create a blank mask + mask = np.zeros((height, width), dtype=np.uint8) + bboxes = json.loads(bboxes) + print(bboxes) + valid_bboxes = [] + for bbox in bboxes: + if (bbox.get("startX") is None or + bbox.get("startY") is None or + bbox.get("endX") is None or + bbox.get("endY") is None): + continue # Skip this bounding box if any value is None + else: + # Ensure that endX and endY are greater than startX and startY + x_min = min(int(bbox["startX"]), int(bbox["endX"])) + y_min = min(int(bbox["startY"]), int(bbox["endY"])) + x_max = max(int(bbox["startX"]), int(bbox["endX"])) + y_max = max(int(bbox["startY"]), int(bbox["endY"])) + + valid_bboxes.append((x_min, y_min, x_max, y_max)) + + bboxes_xyxy = [] + for bbox in valid_bboxes: + x_min, y_min, x_max, y_max = bbox + bboxes_xyxy.append((x_min, y_min, x_max, y_max)) + mask[y_min:y_max, x_min:x_max] = 1 # Fill the bounding box area with 1s + + if bbox_format == "xywh": + bboxes_xywh = [] + for bbox in valid_bboxes: + x_min, y_min, x_max, y_max = bbox + width = x_max - x_min + height = y_max - y_min + bboxes_xywh.append((x_min, y_min, width, height)) + bboxes = bboxes_xywh + else: + bboxes = bboxes_xyxy + + mask_tensor = torch.from_numpy(mask) + mask_tensor = mask_tensor.unsqueeze(0).float().cpu() + + if bg_image is not None and len(valid_bboxes) > 0: + x_min, y_min, x_max, y_max = bboxes[0] + cropped_image = bg_image[:, y_min:y_max, x_min:x_max, :] + + elif bg_image is not None: + cropped_image = bg_image + + if bg_image is None: + return (json.dumps(pos_coordinates), json.dumps(neg_coordinates), bboxes, mask_tensor) + else: + transform = transforms.ToPILImage() + image = transform(bg_image[0].permute(2, 0, 1)) + buffered = io.BytesIO() + image.save(buffered, format="JPEG", quality=75) + + # Step 3: Encode the image bytes to a Base64 string + img_bytes = buffered.getvalue() + img_base64 = base64.b64encode(img_bytes).decode('utf-8') + + return { + "ui": {"bg_image": [img_base64]}, + "result": (json.dumps(pos_coordinates), json.dumps(neg_coordinates), bboxes, mask_tensor, cropped_image) + } \ No newline at end of file diff --git a/ComfyUI-KJNodes/nodes/image_nodes.py b/ComfyUI-KJNodes/nodes/image_nodes.py new file mode 100644 index 0000000000000000000000000000000000000000..170175220ebe1f075841c1b4b62eeff409a4d7e4 --- /dev/null +++ b/ComfyUI-KJNodes/nodes/image_nodes.py @@ -0,0 +1,1941 @@ +import numpy as np +import time +import torch +import torch.nn.functional as F +import torchvision.transforms as T +import random +import math +import os +import re +import json +import hashlib +try: + import cv2 +except: + print("OpenCV not installed") + pass +from PIL import ImageGrab, ImageDraw, ImageFont, Image, ImageSequence, ImageOps + +from nodes import MAX_RESOLUTION, SaveImage +from comfy_extras.nodes_mask import ImageCompositeMasked +from comfy.cli_args import args +from comfy.utils import ProgressBar, common_upscale +import folder_paths +import model_management + +script_directory = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) + +class ImagePass: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + }, + "optional": { + "image": ("IMAGE",), + }, + } + RETURN_TYPES = ("IMAGE",) + FUNCTION = "passthrough" + CATEGORY = "KJNodes/image" + DESCRIPTION = """ +Passes the image through without modifying it. +""" + + def passthrough(self, image=None): + return image, + +class ColorMatch: + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "image_ref": ("IMAGE",), + "image_target": ("IMAGE",), + "method": ( + [ + 'mkl', + 'hm', + 'reinhard', + 'mvgd', + 'hm-mvgd-hm', + 'hm-mkl-hm', + ], { + "default": 'mkl' + }), + }, + "optional": { + "strength": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}), + } + } + + CATEGORY = "KJNodes/image" + + RETURN_TYPES = ("IMAGE",) + RETURN_NAMES = ("image",) + FUNCTION = "colormatch" + DESCRIPTION = """ +color-matcher enables color transfer across images which comes in handy for automatic +color-grading of photographs, paintings and film sequences as well as light-field +and stopmotion corrections. + +The methods behind the mappings are based on the approach from Reinhard et al., +the Monge-Kantorovich Linearization (MKL) as proposed by Pitie et al. and our analytical solution +to a Multi-Variate Gaussian Distribution (MVGD) transfer in conjunction with classical histogram +matching. As shown below our HM-MVGD-HM compound outperforms existing methods. +https://github.com/hahnec/color-matcher/ + +""" + + def colormatch(self, image_ref, image_target, method, strength=1.0): + try: + from color_matcher import ColorMatcher + except: + raise Exception("Can't import color-matcher, did you install requirements.txt? Manual install: pip install color-matcher") + cm = ColorMatcher() + image_ref = image_ref.cpu() + image_target = image_target.cpu() + batch_size = image_target.size(0) + out = [] + images_target = image_target.squeeze() + images_ref = image_ref.squeeze() + + image_ref_np = images_ref.numpy() + images_target_np = images_target.numpy() + + if image_ref.size(0) > 1 and image_ref.size(0) != batch_size: + raise ValueError("ColorMatch: Use either single reference image or a matching batch of reference images.") + + for i in range(batch_size): + image_target_np = images_target_np if batch_size == 1 else images_target[i].numpy() + image_ref_np_i = image_ref_np if image_ref.size(0) == 1 else images_ref[i].numpy() + try: + image_result = cm.transfer(src=image_target_np, ref=image_ref_np_i, method=method) + except BaseException as e: + print(f"Error occurred during transfer: {e}") + break + # Apply the strength multiplier + image_result = image_target_np + strength * (image_result - image_target_np) + out.append(torch.from_numpy(image_result)) + + out = torch.stack(out, dim=0).to(torch.float32) + out.clamp_(0, 1) + return (out,) + +class SaveImageWithAlpha: + def __init__(self): + self.output_dir = folder_paths.get_output_directory() + self.type = "output" + self.prefix_append = "" + + @classmethod + def INPUT_TYPES(s): + return {"required": + {"images": ("IMAGE", ), + "mask": ("MASK", ), + "filename_prefix": ("STRING", {"default": "ComfyUI"})}, + "hidden": {"prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO"}, + } + + RETURN_TYPES = () + FUNCTION = "save_images_alpha" + OUTPUT_NODE = True + CATEGORY = "KJNodes/image" + DESCRIPTION = """ +Saves an image and mask as .PNG with the mask as the alpha channel. +""" + + def save_images_alpha(self, images, mask, filename_prefix="ComfyUI_image_with_alpha", prompt=None, extra_pnginfo=None): + from PIL.PngImagePlugin import PngInfo + filename_prefix += self.prefix_append + full_output_folder, filename, counter, subfolder, filename_prefix = folder_paths.get_save_image_path(filename_prefix, self.output_dir, images[0].shape[1], images[0].shape[0]) + results = list() + if mask.dtype == torch.float16: + mask = mask.to(torch.float32) + def file_counter(): + max_counter = 0 + # Loop through the existing files + for existing_file in os.listdir(full_output_folder): + # Check if the file matches the expected format + match = re.fullmatch(fr"{filename}_(\d+)_?\.[a-zA-Z0-9]+", existing_file) + if match: + # Extract the numeric portion of the filename + file_counter = int(match.group(1)) + # Update the maximum counter value if necessary + if file_counter > max_counter: + max_counter = file_counter + return max_counter + + for image, alpha in zip(images, mask): + i = 255. * image.cpu().numpy() + a = 255. * alpha.cpu().numpy() + img = Image.fromarray(np.clip(i, 0, 255).astype(np.uint8)) + + # Resize the mask to match the image size + a_resized = Image.fromarray(a).resize(img.size, Image.LANCZOS) + a_resized = np.clip(a_resized, 0, 255).astype(np.uint8) + img.putalpha(Image.fromarray(a_resized, mode='L')) + metadata = None + if not args.disable_metadata: + metadata = PngInfo() + if prompt is not None: + metadata.add_text("prompt", json.dumps(prompt)) + if extra_pnginfo is not None: + for x in extra_pnginfo: + metadata.add_text(x, json.dumps(extra_pnginfo[x])) + + # Increment the counter by 1 to get the next available value + counter = file_counter() + 1 + file = f"{filename}_{counter:05}.png" + img.save(os.path.join(full_output_folder, file), pnginfo=metadata, compress_level=4) + results.append({ + "filename": file, + "subfolder": subfolder, + "type": self.type + }) + + return { "ui": { "images": results } } + +class ImageConcanate: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "image1": ("IMAGE",), + "image2": ("IMAGE",), + "direction": ( + [ 'right', + 'down', + 'left', + 'up', + ], + { + "default": 'right' + }), + "match_image_size": ("BOOLEAN", {"default": True}), + }} + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "concanate" + CATEGORY = "KJNodes/image" + DESCRIPTION = """ +Concatenates the image2 to image1 in the specified direction. +""" + + def concanate(self, image1, image2, direction, match_image_size, first_image_shape=None): + # Check if the batch sizes are different + batch_size1 = image1.shape[0] + batch_size2 = image2.shape[0] + + if batch_size1 != batch_size2: + # Calculate the number of repetitions needed + max_batch_size = max(batch_size1, batch_size2) + repeats1 = max_batch_size // batch_size1 + repeats2 = max_batch_size // batch_size2 + + # Repeat the images to match the largest batch size + image1 = image1.repeat(repeats1, 1, 1, 1) + image2 = image2.repeat(repeats2, 1, 1, 1) + + if match_image_size: + # Use first_image_shape if provided; otherwise, default to image1's shape + target_shape = first_image_shape if first_image_shape is not None else image1.shape + + original_height = image2.shape[1] + original_width = image2.shape[2] + original_aspect_ratio = original_width / original_height + + if direction in ['left', 'right']: + # Match the height and adjust the width to preserve aspect ratio + target_height = target_shape[1] # B, H, W, C format + target_width = int(target_height * original_aspect_ratio) + elif direction in ['up', 'down']: + # Match the width and adjust the height to preserve aspect ratio + target_width = target_shape[2] # B, H, W, C format + target_height = int(target_width / original_aspect_ratio) + + # Adjust image2 to the expected format for common_upscale + image2_for_upscale = image2.movedim(-1, 1) # Move C to the second position (B, C, H, W) + + # Resize image2 to match the target size while preserving aspect ratio + image2_resized = common_upscale(image2_for_upscale, target_width, target_height, "lanczos", "disabled") + + # Adjust image2 back to the original format (B, H, W, C) after resizing + image2_resized = image2_resized.movedim(1, -1) + else: + image2_resized = image2 + + # Concatenate based on the specified direction + if direction == 'right': + concatenated_image = torch.cat((image1, image2_resized), dim=2) # Concatenate along width + elif direction == 'down': + concatenated_image = torch.cat((image1, image2_resized), dim=1) # Concatenate along height + elif direction == 'left': + concatenated_image = torch.cat((image2_resized, image1), dim=2) # Concatenate along width + elif direction == 'up': + concatenated_image = torch.cat((image2_resized, image1), dim=1) # Concatenate along height + return concatenated_image, + +import torch # Make sure you have PyTorch installed + +class ImageConcatFromBatch: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "images": ("IMAGE",), + "num_columns": ("INT", {"default": 3, "min": 1, "max": 255, "step": 1}), + "match_image_size": ("BOOLEAN", {"default": False}), + "max_resolution": ("INT", {"default": 4096}), + }, + } + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "concat" + CATEGORY = "KJNodes/image" + DESCRIPTION = """ + Concatenates images from a batch into a grid with a specified number of columns. + """ + + def concat(self, images, num_columns, match_image_size, max_resolution): + # Assuming images is a batch of images (B, H, W, C) + batch_size, height, width, channels = images.shape + num_rows = (batch_size + num_columns - 1) // num_columns # Calculate number of rows + + print(f"Initial dimensions: batch_size={batch_size}, height={height}, width={width}, channels={channels}") + print(f"num_rows={num_rows}, num_columns={num_columns}") + + if match_image_size: + target_shape = images[0].shape + + resized_images = [] + for image in images: + original_height = image.shape[0] + original_width = image.shape[1] + original_aspect_ratio = original_width / original_height + + if original_aspect_ratio > 1: + target_height = target_shape[0] + target_width = int(target_height * original_aspect_ratio) + else: + target_width = target_shape[1] + target_height = int(target_width / original_aspect_ratio) + + print(f"Resizing image from ({original_height}, {original_width}) to ({target_height}, {target_width})") + + # Resize the image to match the target size while preserving aspect ratio + resized_image = common_upscale(image.movedim(-1, 0), target_width, target_height, "lanczos", "disabled") + resized_image = resized_image.movedim(0, -1) # Move channels back to the last dimension + resized_images.append(resized_image) + + # Convert the list of resized images back to a tensor + images = torch.stack(resized_images) + + height, width = target_shape[:2] # Update height and width + + # Initialize an empty grid + grid_height = num_rows * height + grid_width = num_columns * width + + print(f"Grid dimensions before scaling: grid_height={grid_height}, grid_width={grid_width}") + + # Original scale factor calculation remains unchanged + scale_factor = min(max_resolution / grid_height, max_resolution / grid_width, 1.0) + + # Apply scale factor to height and width + scaled_height = height * scale_factor + scaled_width = width * scale_factor + + # Round scaled dimensions to the nearest number divisible by 8 + height = max(1, int(round(scaled_height / 8) * 8)) + width = max(1, int(round(scaled_width / 8) * 8)) + + if abs(scaled_height - height) > 4: + height = max(1, int(round((scaled_height + 4) / 8) * 8)) + if abs(scaled_width - width) > 4: + width = max(1, int(round((scaled_width + 4) / 8) * 8)) + + # Recalculate grid dimensions with adjusted height and width + grid_height = num_rows * height + grid_width = num_columns * width + print(f"Grid dimensions after scaling: grid_height={grid_height}, grid_width={grid_width}") + print(f"Final image dimensions: height={height}, width={width}") + + grid = torch.zeros((grid_height, grid_width, channels), dtype=images.dtype) + + for idx, image in enumerate(images): + resized_image = torch.nn.functional.interpolate(image.unsqueeze(0).permute(0, 3, 1, 2), size=(height, width), mode="bilinear").squeeze().permute(1, 2, 0) + row = idx // num_columns + col = idx % num_columns + grid[row*height:(row+1)*height, col*width:(col+1)*width, :] = resized_image + + return grid.unsqueeze(0), + +class ImageGridComposite2x2: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "image1": ("IMAGE",), + "image2": ("IMAGE",), + "image3": ("IMAGE",), + "image4": ("IMAGE",), + }} + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "compositegrid" + CATEGORY = "KJNodes/image" + DESCRIPTION = """ +Concatenates the 4 input images into a 2x2 grid. +""" + + def compositegrid(self, image1, image2, image3, image4): + top_row = torch.cat((image1, image2), dim=2) + bottom_row = torch.cat((image3, image4), dim=2) + grid = torch.cat((top_row, bottom_row), dim=1) + return (grid,) + +class ImageGridComposite3x3: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "image1": ("IMAGE",), + "image2": ("IMAGE",), + "image3": ("IMAGE",), + "image4": ("IMAGE",), + "image5": ("IMAGE",), + "image6": ("IMAGE",), + "image7": ("IMAGE",), + "image8": ("IMAGE",), + "image9": ("IMAGE",), + }} + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "compositegrid" + CATEGORY = "KJNodes/image" + DESCRIPTION = """ +Concatenates the 9 input images into a 3x3 grid. +""" + + def compositegrid(self, image1, image2, image3, image4, image5, image6, image7, image8, image9): + top_row = torch.cat((image1, image2, image3), dim=2) + mid_row = torch.cat((image4, image5, image6), dim=2) + bottom_row = torch.cat((image7, image8, image9), dim=2) + grid = torch.cat((top_row, mid_row, bottom_row), dim=1) + return (grid,) + +class ImageBatchTestPattern: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "batch_size": ("INT", {"default": 1,"min": 1, "max": 255, "step": 1}), + "start_from": ("INT", {"default": 0,"min": 0, "max": 255, "step": 1}), + "text_x": ("INT", {"default": 256,"min": 0, "max": 4096, "step": 1}), + "text_y": ("INT", {"default": 256,"min": 0, "max": 4096, "step": 1}), + "width": ("INT", {"default": 512,"min": 16, "max": 4096, "step": 1}), + "height": ("INT", {"default": 512,"min": 16, "max": 4096, "step": 1}), + "font": (folder_paths.get_filename_list("kjnodes_fonts"), ), + "font_size": ("INT", {"default": 255,"min": 8, "max": 4096, "step": 1}), + }} + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "generatetestpattern" + CATEGORY = "KJNodes/text" + + def generatetestpattern(self, batch_size, font, font_size, start_from, width, height, text_x, text_y): + out = [] + # Generate the sequential numbers for each image + numbers = np.arange(start_from, start_from + batch_size) + font_path = folder_paths.get_full_path("kjnodes_fonts", font) + + for number in numbers: + # Create a black image with the number as a random color text + image = Image.new("RGB", (width, height), color='black') + draw = ImageDraw.Draw(image) + + # Generate a random color for the text + font_color = (random.randint(0, 255), random.randint(0, 255), random.randint(0, 255)) + + font = ImageFont.truetype(font_path, font_size) + + # Get the size of the text and position it in the center + text = str(number) + + try: + draw.text((text_x, text_y), text, font=font, fill=font_color, features=['-liga']) + except: + draw.text((text_x, text_y), text, font=font, fill=font_color,) + + # Convert the image to a numpy array and normalize the pixel values + image_np = np.array(image).astype(np.float32) / 255.0 + image_tensor = torch.from_numpy(image_np).unsqueeze(0) + out.append(image_tensor) + out_tensor = torch.cat(out, dim=0) + + return (out_tensor,) + +class ImageGrabPIL: + + @classmethod + def IS_CHANGED(cls): + + return + + RETURN_TYPES = ("IMAGE",) + RETURN_NAMES = ("image",) + FUNCTION = "screencap" + CATEGORY = "KJNodes/experimental" + DESCRIPTION = """ +Captures an area specified by screen coordinates. +Can be used for realtime diffusion with autoqueue. +""" + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "x": ("INT", {"default": 0,"min": 0, "max": 4096, "step": 1}), + "y": ("INT", {"default": 0,"min": 0, "max": 4096, "step": 1}), + "width": ("INT", {"default": 512,"min": 0, "max": 4096, "step": 1}), + "height": ("INT", {"default": 512,"min": 0, "max": 4096, "step": 1}), + "num_frames": ("INT", {"default": 1,"min": 1, "max": 255, "step": 1}), + "delay": ("FLOAT", {"default": 0.1,"min": 0.0, "max": 10.0, "step": 0.01}), + }, + } + + def screencap(self, x, y, width, height, num_frames, delay): + start_time = time.time() + captures = [] + bbox = (x, y, x + width, y + height) + + for _ in range(num_frames): + # Capture screen + screen_capture = ImageGrab.grab(bbox=bbox) + screen_capture_torch = torch.from_numpy(np.array(screen_capture, dtype=np.float32) / 255.0).unsqueeze(0) + captures.append(screen_capture_torch) + + # Wait for a short delay if more than one frame is to be captured + if num_frames > 1: + time.sleep(delay) + + elapsed_time = time.time() - start_time + print(f"screengrab took {elapsed_time} seconds.") + + return (torch.cat(captures, dim=0),) + +class Screencap_mss: + + @classmethod + def IS_CHANGED(cls): + + return + + RETURN_TYPES = ("IMAGE",) + RETURN_NAMES = ("image",) + FUNCTION = "screencap" + CATEGORY = "KJNodes/experimental" + DESCRIPTION = """ +Captures an area specified by screen coordinates. +Can be used for realtime diffusion with autoqueue. +""" + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "x": ("INT", {"default": 0,"min": 0, "max": 4096, "step": 1}), + "y": ("INT", {"default": 0,"min": 0, "max": 4096, "step": 1}), + "width": ("INT", {"default": 512,"min": 0, "max": 4096, "step": 1}), + "height": ("INT", {"default": 512,"min": 0, "max": 4096, "step": 1}), + "num_frames": ("INT", {"default": 1,"min": 1, "max": 255, "step": 1}), + "delay": ("FLOAT", {"default": 0.1,"min": 0.0, "max": 10.0, "step": 0.01}), + }, + } + + def screencap(self, x, y, width, height, num_frames, delay): + from mss import mss + captures = [] + with mss() as sct: + bbox = {'top': y, 'left': x, 'width': width, 'height': height} + + for _ in range(num_frames): + sct_img = sct.grab(bbox) + img_np = np.array(sct_img) + img_torch = torch.from_numpy(img_np[..., [2, 1, 0]]).float() / 255.0 + captures.append(img_torch) + + if num_frames > 1: + time.sleep(delay) + + return (torch.stack(captures, 0),) + +class WebcamCaptureCV2: + + @classmethod + def IS_CHANGED(cls): + return + + RETURN_TYPES = ("IMAGE",) + RETURN_NAMES = ("image",) + FUNCTION = "capture" + CATEGORY = "KJNodes/experimental" + DESCRIPTION = """ +Captures a frame from a webcam using CV2. +Can be used for realtime diffusion with autoqueue. +""" + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "x": ("INT", {"default": 0,"min": 0, "max": 4096, "step": 1}), + "y": ("INT", {"default": 0,"min": 0, "max": 4096, "step": 1}), + "width": ("INT", {"default": 512,"min": 0, "max": 4096, "step": 1}), + "height": ("INT", {"default": 512,"min": 0, "max": 4096, "step": 1}), + "cam_index": ("INT", {"default": 0,"min": 0, "max": 255, "step": 1}), + "release": ("BOOLEAN", {"default": False}), + }, + } + + def capture(self, x, y, cam_index, width, height, release): + # Check if the camera index has changed or the capture object doesn't exist + if not hasattr(self, "cap") or self.cap is None or self.current_cam_index != cam_index: + if hasattr(self, "cap") and self.cap is not None: + self.cap.release() + self.current_cam_index = cam_index + self.cap = cv2.VideoCapture(cam_index) + try: + self.cap.set(cv2.CAP_PROP_FRAME_WIDTH, width) + self.cap.set(cv2.CAP_PROP_FRAME_HEIGHT, height) + except: + pass + if not self.cap.isOpened(): + raise Exception("Could not open webcam") + + ret, frame = self.cap.read() + if not ret: + raise Exception("Failed to capture image from webcam") + + # Crop the frame to the specified bbox + frame = frame[y:y+height, x:x+width] + img_torch = torch.from_numpy(frame[..., [2, 1, 0]]).float() / 255.0 + + if release: + self.cap.release() + self.cap = None + + return (img_torch.unsqueeze(0),) + +class AddLabel: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "image":("IMAGE",), + "text_x": ("INT", {"default": 10, "min": 0, "max": 4096, "step": 1}), + "text_y": ("INT", {"default": 2, "min": 0, "max": 4096, "step": 1}), + "height": ("INT", {"default": 48, "min": 0, "max": 4096, "step": 1}), + "font_size": ("INT", {"default": 32, "min": 0, "max": 4096, "step": 1}), + "font_color": ("STRING", {"default": "white"}), + "label_color": ("STRING", {"default": "black"}), + "font": (folder_paths.get_filename_list("kjnodes_fonts"), ), + "text": ("STRING", {"default": "Text"}), + "direction": ( + [ 'up', + 'down', + 'left', + 'right', + 'overlay' + ], + { + "default": 'up' + }), + }, + "optional":{ + "caption": ("STRING", {"default": "", "forceInput": True}), + } + } + RETURN_TYPES = ("IMAGE",) + FUNCTION = "addlabel" + CATEGORY = "KJNodes/text" + DESCRIPTION = """ +Creates a new with the given text, and concatenates it to +either above or below the input image. +Note that this changes the input image's height! +Fonts are loaded from this folder: +ComfyUI/custom_nodes/ComfyUI-KJNodes/fonts +""" + + def addlabel(self, image, text_x, text_y, text, height, font_size, font_color, label_color, font, direction, caption=""): + batch_size = image.shape[0] + width = image.shape[2] + + font_path = os.path.join(script_directory, "fonts", "TTNorms-Black.otf") if font == "TTNorms-Black.otf" else folder_paths.get_full_path("kjnodes_fonts", font) + + def process_image(input_image, caption_text): + if direction == 'overlay': + pil_image = Image.fromarray((input_image.cpu().numpy() * 255).astype(np.uint8)) + else: + label_image = Image.new("RGB", (width, height), label_color) + pil_image = label_image + + draw = ImageDraw.Draw(pil_image) + font = ImageFont.truetype(font_path, font_size) + + words = caption_text.split() + + lines = [] + current_line = [] + current_line_width = 0 + for word in words: + word_width = font.getbbox(word)[2] + if current_line_width + word_width <= width - 2 * text_x: + current_line.append(word) + current_line_width += word_width + font.getbbox(" ")[2] # Add space width + else: + lines.append(" ".join(current_line)) + current_line = [word] + current_line_width = word_width + + if current_line: + lines.append(" ".join(current_line)) + + y_offset = text_y + for line in lines: + try: + draw.text((text_x, y_offset), line, font=font, fill=font_color, features=['-liga']) + except: + draw.text((text_x, y_offset), line, font=font, fill=font_color) + y_offset += font_size # Move to the next line + + processed_image = torch.from_numpy(np.array(pil_image).astype(np.float32) / 255.0).unsqueeze(0) + return processed_image + + if caption == "": + processed_images = [process_image(img, text) for img in image] + else: + assert len(caption) == batch_size, f"Number of captions {(len(caption))} does not match number of images" + processed_images = [process_image(img, cap) for img, cap in zip(image, caption)] + processed_batch = torch.cat(processed_images, dim=0) + + # Combine images based on direction + if direction == 'down': + combined_images = torch.cat((image, processed_batch), dim=1) + elif direction == 'up': + combined_images = torch.cat((processed_batch, image), dim=1) + elif direction == 'left': + processed_batch = torch.rot90(processed_batch, 3, (2, 3)).permute(0, 3, 1, 2) + combined_images = torch.cat((processed_batch, image), dim=2) + elif direction == 'right': + processed_batch = torch.rot90(processed_batch, 3, (2, 3)).permute(0, 3, 1, 2) + combined_images = torch.cat((image, processed_batch), dim=2) + else: + combined_images = processed_batch + + return (combined_images,) + +class GetImageSizeAndCount: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "image": ("IMAGE",), + }} + + RETURN_TYPES = ("IMAGE","INT", "INT", "INT",) + RETURN_NAMES = ("image", "width", "height", "count",) + FUNCTION = "getsize" + CATEGORY = "KJNodes/image" + DESCRIPTION = """ +Returns width, height and batch size of the image, +and passes it through unchanged. + +""" + + def getsize(self, image): + width = image.shape[2] + height = image.shape[1] + count = image.shape[0] + return {"ui": { + "text": [f"{count}x{width}x{height}"]}, + "result": (image, width, height, count) + } + +class ImageBatchRepeatInterleaving: + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "repeat" + CATEGORY = "KJNodes/image" + DESCRIPTION = """ +Repeats each image in a batch by the specified number of times. +Example batch of 5 images: 0, 1 ,2, 3, 4 +with repeats 2 becomes batch of 10 images: 0, 0, 1, 1, 2, 2, 3, 3, 4, 4 +""" + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "images": ("IMAGE",), + "repeats": ("INT", {"default": 1, "min": 1, "max": 4096}), + }, + } + + def repeat(self, images, repeats): + + repeated_images = torch.repeat_interleave(images, repeats=repeats, dim=0) + return (repeated_images, ) + +class ImageUpscaleWithModelBatched: + @classmethod + def INPUT_TYPES(s): + return {"required": { "upscale_model": ("UPSCALE_MODEL",), + "images": ("IMAGE",), + "per_batch": ("INT", {"default": 16, "min": 1, "max": 4096, "step": 1}), + }} + RETURN_TYPES = ("IMAGE",) + FUNCTION = "upscale" + CATEGORY = "KJNodes/image" + DESCRIPTION = """ +Same as ComfyUI native model upscaling node, +but allows setting sub-batches for reduced VRAM usage. +""" + def upscale(self, upscale_model, images, per_batch): + + device = model_management.get_torch_device() + upscale_model.to(device) + in_img = images.movedim(-1,-3) + + steps = in_img.shape[0] + pbar = ProgressBar(steps) + t = [] + + for start_idx in range(0, in_img.shape[0], per_batch): + sub_images = upscale_model(in_img[start_idx:start_idx+per_batch].to(device)) + t.append(sub_images.cpu()) + # Calculate the number of images processed in this batch + batch_count = sub_images.shape[0] + # Update the progress bar by the number of images processed in this batch + pbar.update(batch_count) + upscale_model.cpu() + + t = torch.cat(t, dim=0).permute(0, 2, 3, 1).cpu() + + return (t,) + +class ImageNormalize_Neg1_To_1: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "images": ("IMAGE",), + + }} + RETURN_TYPES = ("IMAGE",) + FUNCTION = "normalize" + CATEGORY = "KJNodes/image" + DESCRIPTION = """ +Normalize the images to be in the range [-1, 1] +""" + + def normalize(self,images): + images = images * 2.0 - 1.0 + return (images,) + +class RemapImageRange: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "image": ("IMAGE",), + "min": ("FLOAT", {"default": 0.0,"min": -10.0, "max": 1.0, "step": 0.01}), + "max": ("FLOAT", {"default": 1.0,"min": 0.0, "max": 10.0, "step": 0.01}), + "clamp": ("BOOLEAN", {"default": True}), + }, + } + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "remap" + CATEGORY = "KJNodes/image" + DESCRIPTION = """ +Remaps the image values to the specified range. +""" + + def remap(self, image, min, max, clamp): + if image.dtype == torch.float16: + image = image.to(torch.float32) + image = min + image * (max - min) + if clamp: + image = torch.clamp(image, min=0.0, max=1.0) + return (image, ) + +class SplitImageChannels: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "image": ("IMAGE",), + }, + } + + RETURN_TYPES = ("IMAGE", "IMAGE", "IMAGE", "MASK") + RETURN_NAMES = ("red", "green", "blue", "mask") + FUNCTION = "split" + CATEGORY = "KJNodes/image" + DESCRIPTION = """ +Splits image channels into images where the selected channel +is repeated for all channels, and the alpha as a mask. +""" + + def split(self, image): + red = image[:, :, :, 0:1] # Red channel + green = image[:, :, :, 1:2] # Green channel + blue = image[:, :, :, 2:3] # Blue channel + alpha = image[:, :, :, 3:4] # Alpha channel + alpha = alpha.squeeze(-1) + + # Repeat the selected channel for all channels + red = torch.cat([red, red, red], dim=3) + green = torch.cat([green, green, green], dim=3) + blue = torch.cat([blue, blue, blue], dim=3) + return (red, green, blue, alpha) + +class MergeImageChannels: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "red": ("IMAGE",), + "green": ("IMAGE",), + "blue": ("IMAGE",), + + }, + "optional": { + "alpha": ("MASK", {"default": None}), + }, + } + + RETURN_TYPES = ("IMAGE",) + RETURN_NAMES = ("image",) + FUNCTION = "merge" + CATEGORY = "KJNodes/image" + DESCRIPTION = """ +Merges channel data into an image. +""" + + def merge(self, red, green, blue, alpha=None): + image = torch.stack([ + red[..., 0, None], # Red channel + green[..., 1, None], # Green channel + blue[..., 2, None] # Blue channel + ], dim=-1) + image = image.squeeze(-2) + if alpha is not None: + image = torch.cat([image, alpha.unsqueeze(-1)], dim=-1) + return (image,) + +class ImagePadForOutpaintMasked: + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image": ("IMAGE",), + "left": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 8}), + "top": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 8}), + "right": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 8}), + "bottom": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 8}), + "feathering": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 1}), + }, + "optional": { + "mask": ("MASK",), + } + } + + RETURN_TYPES = ("IMAGE", "MASK") + FUNCTION = "expand_image" + + CATEGORY = "image" + + def expand_image(self, image, left, top, right, bottom, feathering, mask=None): + if mask is not None: + if torch.allclose(mask, torch.zeros_like(mask)): + print("Warning: The incoming mask is fully black. Handling it as None.") + mask = None + B, H, W, C = image.size() + + new_image = torch.ones( + (B, H + top + bottom, W + left + right, C), + dtype=torch.float32, + ) * 0.5 + + new_image[:, top:top + H, left:left + W, :] = image + + if mask is None: + new_mask = torch.ones( + (B, H + top + bottom, W + left + right), + dtype=torch.float32, + ) + + t = torch.zeros( + (B, H, W), + dtype=torch.float32 + ) + else: + # If a mask is provided, pad it to fit the new image size + mask = F.pad(mask, (left, right, top, bottom), mode='constant', value=0) + mask = 1 - mask + t = torch.zeros_like(mask) + + if feathering > 0 and feathering * 2 < H and feathering * 2 < W: + + for i in range(H): + for j in range(W): + dt = i if top != 0 else H + db = H - i if bottom != 0 else H + + dl = j if left != 0 else W + dr = W - j if right != 0 else W + + d = min(dt, db, dl, dr) + + if d >= feathering: + continue + + v = (feathering - d) / feathering + + if mask is None: + t[:, i, j] = v * v + else: + t[:, top + i, left + j] = v * v + + if mask is None: + new_mask[:, top:top + H, left:left + W] = t + return (new_image, new_mask,) + else: + return (new_image, mask,) + +class ImagePadForOutpaintTargetSize: + upscale_methods = ["nearest-exact", "bilinear", "area", "bicubic", "lanczos"] + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image": ("IMAGE",), + "target_width": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 8}), + "target_height": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 8}), + "feathering": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 1}), + "upscale_method": (s.upscale_methods,), + }, + "optional": { + "mask": ("MASK",), + } + } + + RETURN_TYPES = ("IMAGE", "MASK") + FUNCTION = "expand_image" + + CATEGORY = "image" + + def expand_image(self, image, target_width, target_height, feathering, upscale_method, mask=None): + B, H, W, C = image.size() + new_height = 0 + new_width = 0 + # Calculate the scaling factor while maintaining aspect ratio + scaling_factor = min(target_width / W, target_height / H) + + # Check if the image needs to be downscaled + if scaling_factor < 1: + image = image.movedim(-1,1) + # Calculate the new width and height after downscaling + new_width = int(W * scaling_factor) + new_height = int(H * scaling_factor) + + # Downscale the image + image_scaled = common_upscale(image, new_width, new_height, upscale_method, "disabled").movedim(1,-1) + if mask is not None: + mask_scaled = mask.unsqueeze(0) # Add an extra dimension for batch size + mask_scaled = F.interpolate(mask_scaled, size=(new_height, new_width), mode="nearest") + mask_scaled = mask_scaled.squeeze(0) # Remove the extra dimension after interpolation + else: + mask_scaled = mask + else: + # If downscaling is not needed, use the original image dimensions + image_scaled = image + mask_scaled = mask + + # Calculate how much padding is needed to reach the target dimensions + pad_top = max(0, (target_height - new_height) // 2) + pad_bottom = max(0, target_height - new_height - pad_top) + pad_left = max(0, (target_width - new_width) // 2) + pad_right = max(0, target_width - new_width - pad_left) + + # Now call the original expand_image with the calculated padding + return ImagePadForOutpaintMasked.expand_image(self, image_scaled, pad_left, pad_top, pad_right, pad_bottom, feathering, mask_scaled) + +class ImageAndMaskPreview(SaveImage): + def __init__(self): + self.output_dir = folder_paths.get_temp_directory() + self.type = "temp" + self.prefix_append = "_temp_" + ''.join(random.choice("abcdefghijklmnopqrstupvxyz") for x in range(5)) + self.compress_level = 4 + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "mask_opacity": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}), + "mask_color": ("STRING", {"default": "255, 255, 255"}), + "pass_through": ("BOOLEAN", {"default": False}), + }, + "optional": { + "image": ("IMAGE",), + "mask": ("MASK",), + }, + "hidden": {"prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO"}, + } + RETURN_TYPES = ("IMAGE",) + RETURN_NAMES = ("composite",) + FUNCTION = "execute" + CATEGORY = "KJNodes" + DESCRIPTION = """ +Preview an image or a mask, when both inputs are used +composites the mask on top of the image. +with pass_through on the preview is disabled and the +composite is returned from the composite slot instead, +this allows for the preview to be passed for video combine +nodes for example. +""" + + def execute(self, mask_opacity, mask_color, pass_through, filename_prefix="ComfyUI", image=None, mask=None, prompt=None, extra_pnginfo=None): + if mask is not None and image is None: + preview = mask.reshape((-1, 1, mask.shape[-2], mask.shape[-1])).movedim(1, -1).expand(-1, -1, -1, 3) + elif mask is None and image is not None: + preview = image + elif mask is not None and image is not None: + mask_adjusted = mask * mask_opacity + mask_image = mask.reshape((-1, 1, mask.shape[-2], mask.shape[-1])).movedim(1, -1).expand(-1, -1, -1, 3).clone() + + if ',' in mask_color: + color_list = np.clip([int(channel) for channel in mask_color.split(',')], 0, 255) # RGB format + else: + mask_color = mask_color.lstrip('#') + color_list = [int(mask_color[i:i+2], 16) for i in (0, 2, 4)] # Hex format + mask_image[:, :, :, 0] = color_list[0] / 255 # Red channel + mask_image[:, :, :, 1] = color_list[1] / 255 # Green channel + mask_image[:, :, :, 2] = color_list[2] / 255 # Blue channel + + preview, = ImageCompositeMasked.composite(self, image, mask_image, 0, 0, True, mask_adjusted) + if pass_through: + return (preview, ) + return(self.save_images(preview, filename_prefix, prompt, extra_pnginfo)) + +class CrossFadeImages: + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "crossfadeimages" + CATEGORY = "KJNodes/image" + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "images_1": ("IMAGE",), + "images_2": ("IMAGE",), + "interpolation": (["linear", "ease_in", "ease_out", "ease_in_out", "bounce", "elastic", "glitchy", "exponential_ease_out"],), + "transition_start_index": ("INT", {"default": 1,"min": 0, "max": 4096, "step": 1}), + "transitioning_frames": ("INT", {"default": 1,"min": 0, "max": 4096, "step": 1}), + "start_level": ("FLOAT", {"default": 0.0,"min": 0.0, "max": 1.0, "step": 0.01}), + "end_level": ("FLOAT", {"default": 1.0,"min": 0.0, "max": 1.0, "step": 0.01}), + }, + } + + def crossfadeimages(self, images_1, images_2, transition_start_index, transitioning_frames, interpolation, start_level, end_level): + + def crossfade(images_1, images_2, alpha): + crossfade = (1 - alpha) * images_1 + alpha * images_2 + return crossfade + def ease_in(t): + return t * t + def ease_out(t): + return 1 - (1 - t) * (1 - t) + def ease_in_out(t): + return 3 * t * t - 2 * t * t * t + def bounce(t): + if t < 0.5: + return self.ease_out(t * 2) * 0.5 + else: + return self.ease_in((t - 0.5) * 2) * 0.5 + 0.5 + def elastic(t): + return math.sin(13 * math.pi / 2 * t) * math.pow(2, 10 * (t - 1)) + def glitchy(t): + return t + 0.1 * math.sin(40 * t) + def exponential_ease_out(t): + return 1 - (1 - t) ** 4 + + easing_functions = { + "linear": lambda t: t, + "ease_in": ease_in, + "ease_out": ease_out, + "ease_in_out": ease_in_out, + "bounce": bounce, + "elastic": elastic, + "glitchy": glitchy, + "exponential_ease_out": exponential_ease_out, + } + + crossfade_images = [] + + alphas = torch.linspace(start_level, end_level, transitioning_frames) + for i in range(transitioning_frames): + alpha = alphas[i] + image1 = images_1[i + transition_start_index] + image2 = images_2[i + transition_start_index] + easing_function = easing_functions.get(interpolation) + alpha = easing_function(alpha) # Apply the easing function to the alpha value + + crossfade_image = crossfade(image1, image2, alpha) + crossfade_images.append(crossfade_image) + + # Convert crossfade_images to tensor + crossfade_images = torch.stack(crossfade_images, dim=0) + # Get the last frame result of the interpolation + last_frame = crossfade_images[-1] + # Calculate the number of remaining frames from images_2 + remaining_frames = len(images_2) - (transition_start_index + transitioning_frames) + # Crossfade the remaining frames with the last used alpha value + for i in range(remaining_frames): + alpha = alphas[-1] + image1 = images_1[i + transition_start_index + transitioning_frames] + image2 = images_2[i + transition_start_index + transitioning_frames] + easing_function = easing_functions.get(interpolation) + alpha = easing_function(alpha) # Apply the easing function to the alpha value + + crossfade_image = crossfade(image1, image2, alpha) + crossfade_images = torch.cat([crossfade_images, crossfade_image.unsqueeze(0)], dim=0) + # Append the beginning of images_1 + beginning_images_1 = images_1[:transition_start_index] + crossfade_images = torch.cat([beginning_images_1, crossfade_images], dim=0) + return (crossfade_images, ) + +class GetImageRangeFromBatch: + + RETURN_TYPES = ("IMAGE", "MASK", ) + FUNCTION = "imagesfrombatch" + CATEGORY = "KJNodes/image" + DESCRIPTION = """ +Creates a new batch using images from the input, +batch, starting from start_index. +""" + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "start_index": ("INT", {"default": 0,"min": -1, "max": 4096, "step": 1}), + "num_frames": ("INT", {"default": 1,"min": 1, "max": 4096, "step": 1}), + }, + "optional": { + "images": ("IMAGE",), + "masks": ("MASK",), + } + } + + def imagesfrombatch(self, start_index, num_frames, images=None, masks=None): + + chosen_images = None + chosen_masks = None + + # Process images if provided + if images is not None: + if start_index == -1: + start_index = len(images) - num_frames + if start_index < 0 or start_index >= len(images): + raise ValueError("Start index is out of range") + end_index = start_index + num_frames + if end_index > len(images): + raise ValueError("End index is out of range") + chosen_images = images[start_index:end_index] + + # Process masks if provided + if masks is not None: + if start_index == -1: + start_index = len(masks) - num_frames + if start_index < 0 or start_index >= len(masks): + raise ValueError("Start index is out of range for masks") + end_index = start_index + num_frames + if end_index > len(masks): + raise ValueError("End index is out of range for masks") + chosen_masks = masks[start_index:end_index] + + return (chosen_images, chosen_masks,) + +class GetImagesFromBatchIndexed: + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "indexedimagesfrombatch" + CATEGORY = "KJNodes/image" + DESCRIPTION = """ +Selects and returns the images at the specified indices as an image batch. +""" + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "images": ("IMAGE",), + "indexes": ("STRING", {"default": "0, 1, 2", "multiline": True}), + }, + } + + def indexedimagesfrombatch(self, images, indexes): + + # Parse the indexes string into a list of integers + index_list = [int(index.strip()) for index in indexes.split(',')] + + # Convert list of indices to a PyTorch tensor + indices_tensor = torch.tensor(index_list, dtype=torch.long) + + # Select the images at the specified indices + chosen_images = images[indices_tensor] + + return (chosen_images,) + +class InsertImagesToBatchIndexed: + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "insertimagesfrombatch" + CATEGORY = "KJNodes/image" + DESCRIPTION = """ +Inserts images at the specified indices into the original image batch. +""" + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "original_images": ("IMAGE",), + "images_to_insert": ("IMAGE",), + "indexes": ("STRING", {"default": "0, 1, 2", "multiline": True}), + }, + } + + def insertimagesfrombatch(self, original_images, images_to_insert, indexes): + + # Parse the indexes string into a list of integers + index_list = [int(index.strip()) for index in indexes.split(',')] + + # Convert list of indices to a PyTorch tensor + indices_tensor = torch.tensor(index_list, dtype=torch.long) + + # Ensure the images_to_insert is a tensor + if not isinstance(images_to_insert, torch.Tensor): + images_to_insert = torch.tensor(images_to_insert) + + # Insert the images at the specified indices + for index, image in zip(indices_tensor, images_to_insert): + original_images[index] = image + + return (original_images,) + +class ReplaceImagesInBatch: + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "replace" + CATEGORY = "KJNodes/image" + DESCRIPTION = """ +Replaces the images in a batch, starting from the specified start index, +with the replacement images. +""" + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "original_images": ("IMAGE",), + "replacement_images": ("IMAGE",), + "start_index": ("INT", {"default": 1,"min": 0, "max": 4096, "step": 1}), + }, + } + + def replace(self, original_images, replacement_images, start_index): + images = None + if start_index >= len(original_images): + raise ValueError("GetImageRangeFromBatch: Start index is out of range") + end_index = start_index + len(replacement_images) + if end_index > len(original_images): + raise ValueError("GetImageRangeFromBatch: End index is out of range") + # Create a copy of the original_images tensor + original_images_copy = original_images.clone() + original_images_copy[start_index:end_index] = replacement_images + images = original_images_copy + return (images, ) + + +class ReverseImageBatch: + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "reverseimagebatch" + CATEGORY = "KJNodes/image" + DESCRIPTION = """ +Reverses the order of the images in a batch. +""" + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "images": ("IMAGE",), + }, + } + + def reverseimagebatch(self, images): + reversed_images = torch.flip(images, [0]) + return (reversed_images, ) + +class ImageBatchMulti: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "inputcount": ("INT", {"default": 2, "min": 2, "max": 1000, "step": 1}), + "image_1": ("IMAGE", ), + "image_2": ("IMAGE", ), + }, + } + + RETURN_TYPES = ("IMAGE",) + RETURN_NAMES = ("images",) + FUNCTION = "combine" + CATEGORY = "KJNodes/image" + DESCRIPTION = """ +Creates an image batch from multiple images. +You can set how many inputs the node has, +with the **inputcount** and clicking update. +""" + + def combine(self, inputcount, **kwargs): + from nodes import ImageBatch + image_batch_node = ImageBatch() + image = kwargs["image_1"] + for c in range(1, inputcount): + new_image = kwargs[f"image_{c + 1}"] + image, = image_batch_node.batch(image, new_image) + return (image,) + +class ImageAddMulti: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "inputcount": ("INT", {"default": 2, "min": 2, "max": 1000, "step": 1}), + "image_1": ("IMAGE", ), + "image_2": ("IMAGE", ), + "blending": ( + [ 'add', + 'subtract', + 'multiply', + 'difference', + ], + { + "default": 'add' + }), + }, + } + + RETURN_TYPES = ("IMAGE",) + RETURN_NAMES = ("images",) + FUNCTION = "add" + CATEGORY = "KJNodes/image" + DESCRIPTION = """ +Add blends multiple images together. +You can set how many inputs the node has, +with the **inputcount** and clicking update. +""" + + def add(self, inputcount, blending, **kwargs): + image = kwargs["image_1"] + for c in range(1, inputcount): + new_image = kwargs[f"image_{c + 1}"] + if blending == "add": + image = torch.add(image * 0.5, new_image * 0.5) + elif blending == "subtract": + image = torch.sub(image * 0.5, new_image * 0.5) + elif blending == "multiply": + image = torch.mul(image * 0.5, new_image * 0.5) + elif blending == "difference": + image = torch.sub(image, new_image) + return (image,) + +class ImageConcatMulti: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "inputcount": ("INT", {"default": 2, "min": 2, "max": 1000, "step": 1}), + "image_1": ("IMAGE", ), + "image_2": ("IMAGE", ), + "direction": ( + [ 'right', + 'down', + 'left', + 'up', + ], + { + "default": 'right' + }), + "match_image_size": ("BOOLEAN", {"default": False}), + }, + } + + RETURN_TYPES = ("IMAGE",) + RETURN_NAMES = ("images",) + FUNCTION = "combine" + CATEGORY = "KJNodes/image" + DESCRIPTION = """ +Creates an image from multiple images. +You can set how many inputs the node has, +with the **inputcount** and clicking update. +""" + + def combine(self, inputcount, direction, match_image_size, **kwargs): + image = kwargs["image_1"] + first_image_shape = None + if first_image_shape is None: + first_image_shape = image.shape + for c in range(1, inputcount): + new_image = kwargs[f"image_{c + 1}"] + image, = ImageConcanate.concanate(self, image, new_image, direction, match_image_size, first_image_shape=first_image_shape) + first_image_shape = None + return (image,) + +class PreviewAnimation: + def __init__(self): + self.output_dir = folder_paths.get_temp_directory() + self.type = "temp" + self.prefix_append = "_temp_" + ''.join(random.choice("abcdefghijklmnopqrstupvxyz") for x in range(5)) + self.compress_level = 1 + + methods = {"default": 4, "fastest": 0, "slowest": 6} + @classmethod + def INPUT_TYPES(s): + return {"required": + { + "fps": ("FLOAT", {"default": 8.0, "min": 0.01, "max": 1000.0, "step": 0.01}), + }, + "optional": { + "images": ("IMAGE", ), + "masks": ("MASK", ), + }, + } + + RETURN_TYPES = () + FUNCTION = "preview" + OUTPUT_NODE = True + CATEGORY = "KJNodes/image" + + def preview(self, fps, images=None, masks=None): + filename_prefix = "AnimPreview" + full_output_folder, filename, counter, subfolder, filename_prefix = folder_paths.get_save_image_path(filename_prefix, self.output_dir) + results = list() + + pil_images = [] + + if images is not None and masks is not None: + for image in images: + i = 255. * image.cpu().numpy() + img = Image.fromarray(np.clip(i, 0, 255).astype(np.uint8)) + pil_images.append(img) + for mask in masks: + if pil_images: + mask_np = mask.cpu().numpy() + mask_np = np.clip(mask_np * 255, 0, 255).astype(np.uint8) # Convert to values between 0 and 255 + mask_img = Image.fromarray(mask_np, mode='L') + img = pil_images.pop(0) # Remove and get the first image + img = img.convert("RGBA") # Convert base image to RGBA + + # Create a new RGBA image based on the grayscale mask + rgba_mask_img = Image.new("RGBA", img.size, (255, 255, 255, 255)) + rgba_mask_img.putalpha(mask_img) # Use the mask image as the alpha channel + + # Composite the RGBA mask onto the base image + composited_img = Image.alpha_composite(img, rgba_mask_img) + pil_images.append(composited_img) # Add the composited image back + + elif images is not None and masks is None: + for image in images: + i = 255. * image.cpu().numpy() + img = Image.fromarray(np.clip(i, 0, 255).astype(np.uint8)) + pil_images.append(img) + + elif masks is not None and images is None: + for mask in masks: + mask_np = 255. * mask.cpu().numpy() + mask_img = Image.fromarray(np.clip(mask_np, 0, 255).astype(np.uint8)) + pil_images.append(mask_img) + else: + print("PreviewAnimation: No images or masks provided") + return { "ui": { "images": results, "animated": (None,), "text": "empty" }} + + num_frames = len(pil_images) + + c = len(pil_images) + for i in range(0, c, num_frames): + file = f"{filename}_{counter:05}_.webp" + pil_images[i].save(os.path.join(full_output_folder, file), save_all=True, duration=int(1000.0/fps), append_images=pil_images[i + 1:i + num_frames], lossless=False, quality=80, method=4) + results.append({ + "filename": file, + "subfolder": subfolder, + "type": self.type + }) + counter += 1 + + animated = num_frames != 1 + return { "ui": { "images": results, "animated": (animated,), "text": [f"{num_frames}x{pil_images[0].size[0]}x{pil_images[0].size[1]}"] } } + +class ImageResizeKJ: + upscale_methods = ["nearest-exact", "bilinear", "area", "bicubic", "lanczos"] + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image": ("IMAGE",), + "width": ("INT", { "default": 512, "min": 0, "max": MAX_RESOLUTION, "step": 8, }), + "height": ("INT", { "default": 512, "min": 0, "max": MAX_RESOLUTION, "step": 8, }), + "upscale_method": (s.upscale_methods,), + "keep_proportion": ("BOOLEAN", { "default": False }), + "divisible_by": ("INT", { "default": 2, "min": 0, "max": 512, "step": 1, }), + }, + "optional" : { + "width_input": ("INT", { "forceInput": True}), + "height_input": ("INT", { "forceInput": True}), + "get_image_size": ("IMAGE",), + "crop": (["disabled","center"],), + } + } + + RETURN_TYPES = ("IMAGE", "INT", "INT",) + RETURN_NAMES = ("IMAGE", "width", "height",) + FUNCTION = "resize" + CATEGORY = "KJNodes/image" + DESCRIPTION = """ +Resizes the image to the specified width and height. +Size can be retrieved from the inputs, and the final scale +is determined in this order of importance: +- get_image_size +- width_input and height_input +- width and height widgets + +Keep proportions keeps the aspect ratio of the image, by +highest dimension. +""" + + def resize(self, image, width, height, keep_proportion, upscale_method, divisible_by, + width_input=None, height_input=None, get_image_size=None, crop="disabled"): + B, H, W, C = image.shape + + if width_input: + width = width_input + if height_input: + height = height_input + if get_image_size is not None: + _, height, width, _ = get_image_size.shape + + if keep_proportion and get_image_size is None: + # If one of the dimensions is zero, calculate it to maintain the aspect ratio + if width == 0 and height != 0: + ratio = height / H + width = round(W * ratio) + elif height == 0 and width != 0: + ratio = width / W + height = round(H * ratio) + elif width != 0 and height != 0: + # Scale based on which dimension is smaller in proportion to the desired dimensions + ratio = min(width / W, height / H) + width = round(W * ratio) + height = round(H * ratio) + else: + if width == 0: + width = W + if height == 0: + height = H + + if divisible_by > 1 and get_image_size is None: + width = width - (width % divisible_by) + height = height - (height % divisible_by) + + image = image.movedim(-1,1) + image = common_upscale(image, width, height, upscale_method, crop) + image = image.movedim(1,-1) + + return(image, image.shape[2], image.shape[1],) + +class LoadAndResizeImage: + _color_channels = ["alpha", "red", "green", "blue"] + @classmethod + def INPUT_TYPES(s): + input_dir = folder_paths.get_input_directory() + files = [f for f in os.listdir(input_dir) if os.path.isfile(os.path.join(input_dir, f))] + return {"required": + { + "image": (sorted(files), {"image_upload": True}), + "resize": ("BOOLEAN", { "default": False }), + "width": ("INT", { "default": 512, "min": 0, "max": MAX_RESOLUTION, "step": 8, }), + "height": ("INT", { "default": 512, "min": 0, "max": MAX_RESOLUTION, "step": 8, }), + "repeat": ("INT", { "default": 1, "min": 1, "max": 4096, "step": 1, }), + "keep_proportion": ("BOOLEAN", { "default": False }), + "divisible_by": ("INT", { "default": 2, "min": 0, "max": 512, "step": 1, }), + "mask_channel": (s._color_channels, {"tooltip": "Channel to use for the mask output"}), + "background_color": ("STRING", { "default": "", "tooltip": "Fills the alpha channel with the specified color."}), + }, + } + + CATEGORY = "KJNodes/image" + RETURN_TYPES = ("IMAGE", "MASK", "INT", "INT", "STRING",) + RETURN_NAMES = ("image", "mask", "width", "height","image_path",) + FUNCTION = "load_image" + + def load_image(self, image, resize, width, height, repeat, keep_proportion, divisible_by, mask_channel, background_color): + from PIL import ImageColor, Image, ImageOps, ImageSequence + import numpy as np + import torch + image_path = folder_paths.get_annotated_filepath(image) + + import node_helpers + img = node_helpers.pillow(Image.open, image_path) + + # Process the background_color + if background_color: + try: + # Try to parse as RGB tuple + bg_color_rgba = tuple(int(x.strip()) for x in background_color.split(',')) + except ValueError: + # If parsing fails, it might be a hex color or named color + if background_color.startswith('#') or background_color.lower() in ImageColor.colormap: + bg_color_rgba = ImageColor.getrgb(background_color) + else: + raise ValueError(f"Invalid background color: {background_color}") + + bg_color_rgba += (255,) # Add alpha channel + else: + bg_color_rgba = None # No background color specified + + output_images = [] + output_masks = [] + w, h = None, None + + excluded_formats = ['MPO'] + + W, H = img.size + if resize: + if keep_proportion: + ratio = min(width / W, height / H) + width = round(W * ratio) + height = round(H * ratio) + else: + if width == 0: + width = W + if height == 0: + height = H + + if divisible_by > 1: + width = width - (width % divisible_by) + height = height - (height % divisible_by) + else: + width, height = W, H + + for frame in ImageSequence.Iterator(img): + frame = node_helpers.pillow(ImageOps.exif_transpose, frame) + + if frame.mode == 'I': + frame = frame.point(lambda i: i * (1 / 255)) + + if frame.mode == 'P': + frame = frame.convert("RGBA") + elif 'A' in frame.getbands(): + frame = frame.convert("RGBA") + + # Extract alpha channel if it exists + if 'A' in frame.getbands() and bg_color_rgba: + alpha_mask = np.array(frame.getchannel('A')).astype(np.float32) / 255.0 + alpha_mask = 1. - torch.from_numpy(alpha_mask) + bg_image = Image.new("RGBA", frame.size, bg_color_rgba) + # Composite the frame onto the background + frame = Image.alpha_composite(bg_image, frame) + else: + alpha_mask = torch.zeros((64, 64), dtype=torch.float32, device="cpu") + + image = frame.convert("RGB") + + if len(output_images) == 0: + w = image.size[0] + h = image.size[1] + + if image.size[0] != w or image.size[1] != h: + continue + if resize: + image = image.resize((width, height), Image.Resampling.BILINEAR) + + image = np.array(image).astype(np.float32) / 255.0 + image = torch.from_numpy(image)[None,] + + c = mask_channel[0].upper() + if c in frame.getbands(): + if resize: + frame = frame.resize((width, height), Image.Resampling.BILINEAR) + mask = np.array(frame.getchannel(c)).astype(np.float32) / 255.0 + mask = torch.from_numpy(mask) + if c == 'A' and bg_color_rgba: + mask = alpha_mask + elif c == 'A': + mask = 1. - mask + else: + mask = torch.zeros((64, 64), dtype=torch.float32, device="cpu") + + output_images.append(image) + output_masks.append(mask.unsqueeze(0)) + + if len(output_images) > 1 and img.format not in excluded_formats: + output_image = torch.cat(output_images, dim=0) + output_mask = torch.cat(output_masks, dim=0) + else: + output_image = output_images[0] + output_mask = output_masks[0] + if repeat > 1: + output_image = output_image.repeat(repeat, 1, 1, 1) + output_mask = output_mask.repeat(repeat, 1, 1) + + return (output_image, output_mask, width, height, image_path) + + + @classmethod + def IS_CHANGED(s, image, **kwargs): + image_path = folder_paths.get_annotated_filepath(image) + m = hashlib.sha256() + with open(image_path, 'rb') as f: + m.update(f.read()) + return m.digest().hex() + + @classmethod + def VALIDATE_INPUTS(s, image): + if not folder_paths.exists_annotated_filepath(image): + return "Invalid image file: {}".format(image) + + return True + +class LoadImagesFromFolderKJ: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "folder": ("STRING", {"default": ""}), + }, + "optional": { + "image_load_cap": ("INT", {"default": 0, "min": 0, "step": 1}), + "start_index": ("INT", {"default": 0, "min": 0, "step": 1}), + } + } + + RETURN_TYPES = ("IMAGE", "MASK", "INT", "STRING",) + RETURN_NAMES = ("image", "mask", "count", "image_path",) + FUNCTION = "load_images" + + CATEGORY = "image" + + def load_images(self, folder, image_load_cap, start_index): + if not os.path.isdir(folder): + raise FileNotFoundError(f"Folder '{folder} cannot be found.'") + dir_files = os.listdir(folder) + if len(dir_files) == 0: + raise FileNotFoundError(f"No files in directory '{folder}'.") + + # Filter files by extension + valid_extensions = ['.jpg', '.jpeg', '.png', '.webp'] + dir_files = [f for f in dir_files if any(f.lower().endswith(ext) for ext in valid_extensions)] + + dir_files = sorted(dir_files) + dir_files = [os.path.join(folder, x) for x in dir_files] + + # start at start_index + dir_files = dir_files[start_index:] + + images = [] + masks = [] + image_path_list = [] + + limit_images = False + if image_load_cap > 0: + limit_images = True + image_count = 0 + + has_non_empty_mask = False + + for image_path in dir_files: + if os.path.isdir(image_path) and os.path.ex: + continue + if limit_images and image_count >= image_load_cap: + break + i = Image.open(image_path) + i = ImageOps.exif_transpose(i) + image = i.convert("RGB") + image = np.array(image).astype(np.float32) / 255.0 + image = torch.from_numpy(image)[None,] + if 'A' in i.getbands(): + mask = np.array(i.getchannel('A')).astype(np.float32) / 255.0 + mask = 1. - torch.from_numpy(mask) + has_non_empty_mask = True + else: + mask = torch.zeros((64, 64), dtype=torch.float32, device="cpu") + images.append(image) + masks.append(mask) + image_path_list.append(image_path) + image_count += 1 + + if len(images) == 1: + return (images[0], masks[0], 1) + + elif len(images) > 1: + image1 = images[0] + mask1 = None + + for image2 in images[1:]: + if image1.shape[1:] != image2.shape[1:]: + image2 = common_upscale(image2.movedim(-1, 1), image1.shape[2], image1.shape[1], "bilinear", "center").movedim(1, -1) + image1 = torch.cat((image1, image2), dim=0) + + for mask2 in masks[1:]: + if has_non_empty_mask: + if image1.shape[1:3] != mask2.shape: + mask2 = torch.nn.functional.interpolate(mask2.unsqueeze(0).unsqueeze(0), size=(image1.shape[2], image1.shape[1]), mode='bilinear', align_corners=False) + mask2 = mask2.squeeze(0) + else: + mask2 = mask2.unsqueeze(0) + else: + mask2 = mask2.unsqueeze(0) + + if mask1 is None: + mask1 = mask2 + else: + mask1 = torch.cat((mask1, mask2), dim=0) + + return (image1, mask1, len(images), image_path_list) + +class ImageGridtoBatch: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "image": ("IMAGE", ), + "columns": ("INT", {"default": 3, "min": 2, "max": 8, "tooltip": "The number of columns in the grid."}), + } + } + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "decompose" + CATEGORY = "KJNodes/image" + DESCRIPTION = "Converts a grid of images to a batch of images." + + def decompose(self, image, columns): + B, H, W, C = image.shape + orig_h = H // columns + orig_w = W // columns + + # Reshape and permute the image to get the grid + image = image.view(B, columns, orig_h, columns, orig_w, C) + image = image.permute(0, 1, 3, 2, 4, 5).contiguous() + image = image.view(B, columns * columns, orig_h, orig_w, C) + + # Reshape to the final batch tensor + img_tensor = image.view(-1, orig_h, orig_w, C) + + return img_tensor, \ No newline at end of file diff --git a/ComfyUI-KJNodes/nodes/intrinsic_lora_nodes.py b/ComfyUI-KJNodes/nodes/intrinsic_lora_nodes.py new file mode 100644 index 0000000000000000000000000000000000000000..c8f125363836cc7721b4b61d100702594522d389 --- /dev/null +++ b/ComfyUI-KJNodes/nodes/intrinsic_lora_nodes.py @@ -0,0 +1,115 @@ +import folder_paths +import os +import torch +import torch.nn.functional as F +from comfy.utils import ProgressBar, load_torch_file +import comfy.sample +from nodes import CLIPTextEncode + +script_directory = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +folder_paths.add_model_folder_path("intrinsic_loras", os.path.join(script_directory, "intrinsic_loras")) + +class Intrinsic_lora_sampling: + def __init__(self): + self.loaded_lora = None + + @classmethod + def INPUT_TYPES(s): + return {"required": { "model": ("MODEL",), + "lora_name": (folder_paths.get_filename_list("intrinsic_loras"), ), + "task": ( + [ + 'depth map', + 'surface normals', + 'albedo', + 'shading', + ], + { + "default": 'depth map' + }), + "text": ("STRING", {"multiline": True, "default": ""}), + "clip": ("CLIP", ), + "vae": ("VAE", ), + "per_batch": ("INT", {"default": 16, "min": 1, "max": 4096, "step": 1}), + }, + "optional": { + "image": ("IMAGE",), + "optional_latent": ("LATENT",), + }, + } + + RETURN_TYPES = ("IMAGE", "LATENT",) + FUNCTION = "onestepsample" + CATEGORY = "KJNodes" + DESCRIPTION = """ +Sampler to use the intrinsic loras: +https://github.com/duxiaodan/intrinsic-lora +These LoRAs are tiny and thus included +with this node pack. +""" + + def onestepsample(self, model, lora_name, clip, vae, text, task, per_batch, image=None, optional_latent=None): + pbar = ProgressBar(3) + + if optional_latent is None: + image_list = [] + for start_idx in range(0, image.shape[0], per_batch): + sub_pixels = vae.vae_encode_crop_pixels(image[start_idx:start_idx+per_batch]) + image_list.append(vae.encode(sub_pixels[:,:,:,:3])) + sample = torch.cat(image_list, dim=0) + else: + sample = optional_latent["samples"] + noise = torch.zeros(sample.size(), dtype=sample.dtype, layout=sample.layout, device="cpu") + prompt = task + "," + text + positive, = CLIPTextEncode.encode(self, clip, prompt) + negative = positive #negative shouldn't do anything in this scenario + + pbar.update(1) + + #custom model sampling to pass latent through as it is + class X0_PassThrough(comfy.model_sampling.EPS): + def calculate_denoised(self, sigma, model_output, model_input): + return model_output + def calculate_input(self, sigma, noise): + return noise + sampling_base = comfy.model_sampling.ModelSamplingDiscrete + sampling_type = X0_PassThrough + + class ModelSamplingAdvanced(sampling_base, sampling_type): + pass + model_sampling = ModelSamplingAdvanced(model.model.model_config) + + #load lora + model_clone = model.clone() + lora_path = folder_paths.get_full_path("intrinsic_loras", lora_name) + lora = load_torch_file(lora_path, safe_load=True) + self.loaded_lora = (lora_path, lora) + + model_clone_with_lora = comfy.sd.load_lora_for_models(model_clone, None, lora, 1.0, 0)[0] + + model_clone_with_lora.add_object_patch("model_sampling", model_sampling) + + samples = {"samples": comfy.sample.sample(model_clone_with_lora, noise, 1, 1.0, "euler", "simple", positive, negative, sample, + denoise=1.0, disable_noise=True, start_step=0, last_step=1, + force_full_denoise=True, noise_mask=None, callback=None, disable_pbar=True, seed=None)} + pbar.update(1) + + decoded = [] + for start_idx in range(0, samples["samples"].shape[0], per_batch): + decoded.append(vae.decode(samples["samples"][start_idx:start_idx+per_batch])) + image_out = torch.cat(decoded, dim=0) + + pbar.update(1) + + if task == 'depth map': + imax = image_out.max() + imin = image_out.min() + image_out = (image_out-imin)/(imax-imin) + image_out = torch.max(image_out, dim=3, keepdim=True)[0].repeat(1, 1, 1, 3) + elif task == 'surface normals': + image_out = F.normalize(image_out * 2 - 1, dim=3) / 2 + 0.5 + image_out = 1.0 - image_out + else: + image_out = image_out.clamp(-1.,1.) + + return (image_out, samples,) \ No newline at end of file diff --git a/ComfyUI-KJNodes/nodes/mask_nodes.py b/ComfyUI-KJNodes/nodes/mask_nodes.py new file mode 100644 index 0000000000000000000000000000000000000000..239e6a8c440c6cbeb2d71f8c330aa3ef78d4ffcf --- /dev/null +++ b/ComfyUI-KJNodes/nodes/mask_nodes.py @@ -0,0 +1,1249 @@ +import torch +import torch.nn.functional as F +from torchvision.transforms import functional as TF +from PIL import Image, ImageDraw, ImageFilter, ImageFont +import scipy.ndimage +import numpy as np +from contextlib import nullcontext +import os + +import model_management +from comfy.utils import ProgressBar +from nodes import MAX_RESOLUTION + +import folder_paths + +from ..utility.utility import tensor2pil, pil2tensor + +script_directory = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) + +class BatchCLIPSeg: + + def __init__(self): + pass + + @classmethod + def INPUT_TYPES(s): + + return {"required": + { + "images": ("IMAGE",), + "text": ("STRING", {"multiline": False}), + "threshold": ("FLOAT", {"default": 0.5,"min": 0.0, "max": 10.0, "step": 0.001}), + "binary_mask": ("BOOLEAN", {"default": True}), + "combine_mask": ("BOOLEAN", {"default": False}), + "use_cuda": ("BOOLEAN", {"default": True}), + }, + "optional": + { + "blur_sigma": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 100.0, "step": 0.1}), + "opt_model": ("CLIPSEGMODEL", ), + "prev_mask": ("MASK", {"default": None}), + "image_bg_level": ("FLOAT", {"default": 0.5, "min": 0.0, "max": 1.0, "step": 0.01}), + "invert": ("BOOLEAN", {"default": False}), + } + } + + CATEGORY = "KJNodes/masking" + RETURN_TYPES = ("MASK", "IMAGE", ) + RETURN_NAMES = ("Mask", "Image", ) + FUNCTION = "segment_image" + DESCRIPTION = """ +Segments an image or batch of images using CLIPSeg. +""" + + def segment_image(self, images, text, threshold, binary_mask, combine_mask, use_cuda, blur_sigma=0.0, opt_model=None, prev_mask=None, invert= False, image_bg_level=0.5): + from transformers import CLIPSegProcessor, CLIPSegForImageSegmentation + import torchvision.transforms as transforms + offload_device = model_management.unet_offload_device() + device = model_management.get_torch_device() + if not use_cuda: + device = torch.device("cpu") + dtype = model_management.unet_dtype() + + if opt_model is None: + checkpoint_path = os.path.join(folder_paths.models_dir,'clip_seg', 'clipseg-rd64-refined-fp16') + if not hasattr(self, "model"): + try: + if not os.path.exists(checkpoint_path): + from huggingface_hub import snapshot_download + snapshot_download(repo_id="Kijai/clipseg-rd64-refined-fp16", local_dir=checkpoint_path, local_dir_use_symlinks=False) + self.model = CLIPSegForImageSegmentation.from_pretrained(checkpoint_path) + except: + checkpoint_path = "CIDAS/clipseg-rd64-refined" + self.model = CLIPSegForImageSegmentation.from_pretrained(checkpoint_path) + processor = CLIPSegProcessor.from_pretrained(checkpoint_path) + + else: + self.model = opt_model['model'] + processor = opt_model['processor'] + + self.model.to(dtype).to(device) + + B, H, W, C = images.shape + images = images.to(device) + + autocast_condition = (dtype != torch.float32) and not model_management.is_device_mps(device) + with torch.autocast(model_management.get_autocast_device(device), dtype=dtype) if autocast_condition else nullcontext(): + + PIL_images = [Image.fromarray(np.clip(255. * image.cpu().numpy().squeeze(), 0, 255).astype(np.uint8)) for image in images ] + prompt = [text] * len(images) + input_prc = processor(text=prompt, images=PIL_images, return_tensors="pt") + + for key in input_prc: + input_prc[key] = input_prc[key].to(device) + outputs = self.model(**input_prc) + + mask_tensor = torch.sigmoid(outputs.logits) + mask_tensor = (mask_tensor - mask_tensor.min()) / (mask_tensor.max() - mask_tensor.min()) + mask_tensor = torch.where(mask_tensor > (threshold), mask_tensor, torch.tensor(0, dtype=torch.float)) + print(mask_tensor.shape) + if len(mask_tensor.shape) == 2: + mask_tensor = mask_tensor.unsqueeze(0) + mask_tensor = F.interpolate(mask_tensor.unsqueeze(1), size=(H, W), mode='nearest') + mask_tensor = mask_tensor.squeeze(1) + + self.model.to(offload_device) + + if binary_mask: + mask_tensor = (mask_tensor > 0).float() + if blur_sigma > 0: + kernel_size = int(6 * int(blur_sigma) + 1) + blur = transforms.GaussianBlur(kernel_size=(kernel_size, kernel_size), sigma=(blur_sigma, blur_sigma)) + mask_tensor = blur(mask_tensor) + + if combine_mask: + mask_tensor = torch.max(mask_tensor, dim=0)[0] + mask_tensor = mask_tensor.unsqueeze(0).repeat(len(images),1,1) + + del outputs + model_management.soft_empty_cache() + + if prev_mask is not None: + if prev_mask.shape != mask_tensor.shape: + prev_mask = F.interpolate(prev_mask.unsqueeze(1), size=(H, W), mode='nearest') + mask_tensor = mask_tensor + prev_mask.to(device) + torch.clamp(mask_tensor, min=0.0, max=1.0) + + if invert: + mask_tensor = 1 - mask_tensor + + image_tensor = images * mask_tensor.unsqueeze(-1) + (1 - mask_tensor.unsqueeze(-1)) * image_bg_level + image_tensor = torch.clamp(image_tensor, min=0.0, max=1.0).cpu().float() + + mask_tensor = mask_tensor.cpu().float() + + return mask_tensor, image_tensor, + +class DownloadAndLoadCLIPSeg: + + def __init__(self): + pass + + @classmethod + def INPUT_TYPES(s): + + return {"required": + { + "model": ( + [ 'Kijai/clipseg-rd64-refined-fp16', + 'CIDAS/clipseg-rd64-refined', + ], + ), + }, + } + + CATEGORY = "KJNodes/masking" + RETURN_TYPES = ("CLIPSEGMODEL",) + RETURN_NAMES = ("clipseg_model",) + FUNCTION = "segment_image" + DESCRIPTION = """ +Downloads and loads CLIPSeg model with huggingface_hub, +to ComfyUI/models/clip_seg +""" + + def segment_image(self, model): + from transformers import CLIPSegProcessor, CLIPSegForImageSegmentation + checkpoint_path = os.path.join(folder_paths.models_dir,'clip_seg', os.path.basename(model)) + if not hasattr(self, "model"): + if not os.path.exists(checkpoint_path): + from huggingface_hub import snapshot_download + snapshot_download(repo_id=model, local_dir=checkpoint_path, local_dir_use_symlinks=False) + self.model = CLIPSegForImageSegmentation.from_pretrained(checkpoint_path) + + processor = CLIPSegProcessor.from_pretrained(checkpoint_path) + + clipseg_model = {} + clipseg_model['model'] = self.model + clipseg_model['processor'] = processor + + return clipseg_model, + +class CreateTextMask: + + RETURN_TYPES = ("IMAGE", "MASK",) + FUNCTION = "createtextmask" + CATEGORY = "KJNodes/text" + DESCRIPTION = """ +Creates a text image and mask. +Looks for fonts from this folder: +ComfyUI/custom_nodes/ComfyUI-KJNodes/fonts + +If start_rotation and/or end_rotation are different values, +creates animation between them. +""" + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "invert": ("BOOLEAN", {"default": False}), + "frames": ("INT", {"default": 1,"min": 1, "max": 4096, "step": 1}), + "text_x": ("INT", {"default": 0,"min": 0, "max": 4096, "step": 1}), + "text_y": ("INT", {"default": 0,"min": 0, "max": 4096, "step": 1}), + "font_size": ("INT", {"default": 32,"min": 8, "max": 4096, "step": 1}), + "font_color": ("STRING", {"default": "white"}), + "text": ("STRING", {"default": "HELLO!", "multiline": True}), + "font": (folder_paths.get_filename_list("kjnodes_fonts"), ), + "width": ("INT", {"default": 512,"min": 16, "max": 4096, "step": 1}), + "height": ("INT", {"default": 512,"min": 16, "max": 4096, "step": 1}), + "start_rotation": ("INT", {"default": 0,"min": 0, "max": 359, "step": 1}), + "end_rotation": ("INT", {"default": 0,"min": -359, "max": 359, "step": 1}), + }, + } + + def createtextmask(self, frames, width, height, invert, text_x, text_y, text, font_size, font_color, font, start_rotation, end_rotation): + # Define the number of images in the batch + batch_size = frames + out = [] + masks = [] + rotation = start_rotation + if start_rotation != end_rotation: + rotation_increment = (end_rotation - start_rotation) / (batch_size - 1) + + font_path = folder_paths.get_full_path("kjnodes_fonts", font) + # Generate the text + for i in range(batch_size): + image = Image.new("RGB", (width, height), "black") + draw = ImageDraw.Draw(image) + font = ImageFont.truetype(font_path, font_size) + + # Split the text into words + words = text.split() + + # Initialize variables for line creation + lines = [] + current_line = [] + current_line_width = 0 + try: #new pillow + # Iterate through words to create lines + for word in words: + word_width = font.getbbox(word)[2] + if current_line_width + word_width <= width - 2 * text_x: + current_line.append(word) + current_line_width += word_width + font.getbbox(" ")[2] # Add space width + else: + lines.append(" ".join(current_line)) + current_line = [word] + current_line_width = word_width + except: #old pillow + for word in words: + word_width = font.getsize(word)[0] + if current_line_width + word_width <= width - 2 * text_x: + current_line.append(word) + current_line_width += word_width + font.getsize(" ")[0] # Add space width + else: + lines.append(" ".join(current_line)) + current_line = [word] + current_line_width = word_width + + # Add the last line if it's not empty + if current_line: + lines.append(" ".join(current_line)) + + # Draw each line of text separately + y_offset = text_y + for line in lines: + text_width = font.getlength(line) + text_height = font_size + text_center_x = text_x + text_width / 2 + text_center_y = y_offset + text_height / 2 + try: + draw.text((text_x, y_offset), line, font=font, fill=font_color, features=['-liga']) + except: + draw.text((text_x, y_offset), line, font=font, fill=font_color) + y_offset += text_height # Move to the next line + + if start_rotation != end_rotation: + image = image.rotate(rotation, center=(text_center_x, text_center_y)) + rotation += rotation_increment + + image = np.array(image).astype(np.float32) / 255.0 + image = torch.from_numpy(image)[None,] + mask = image[:, :, :, 0] + masks.append(mask) + out.append(image) + + if invert: + return (1.0 - torch.cat(out, dim=0), 1.0 - torch.cat(masks, dim=0),) + return (torch.cat(out, dim=0),torch.cat(masks, dim=0),) + +class ColorToMask: + + RETURN_TYPES = ("MASK",) + FUNCTION = "clip" + CATEGORY = "KJNodes/masking" + DESCRIPTION = """ +Converts chosen RGB value to a mask. +With batch inputs, the **per_batch** +controls the number of images processed at once. +""" + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "images": ("IMAGE",), + "invert": ("BOOLEAN", {"default": False}), + "red": ("INT", {"default": 0,"min": 0, "max": 255, "step": 1}), + "green": ("INT", {"default": 0,"min": 0, "max": 255, "step": 1}), + "blue": ("INT", {"default": 0,"min": 0, "max": 255, "step": 1}), + "threshold": ("INT", {"default": 10,"min": 0, "max": 255, "step": 1}), + "per_batch": ("INT", {"default": 16, "min": 1, "max": 4096, "step": 1}), + }, + } + + def clip(self, images, red, green, blue, threshold, invert, per_batch): + + color = torch.tensor([red, green, blue], dtype=torch.uint8) + black = torch.tensor([0, 0, 0], dtype=torch.uint8) + white = torch.tensor([255, 255, 255], dtype=torch.uint8) + + if invert: + black, white = white, black + + steps = images.shape[0] + pbar = ProgressBar(steps) + tensors_out = [] + + for start_idx in range(0, images.shape[0], per_batch): + + # Calculate color distances + color_distances = torch.norm(images[start_idx:start_idx+per_batch] * 255 - color, dim=-1) + + # Create a mask based on the threshold + mask = color_distances <= threshold + + # Apply the mask to create new images + mask_out = torch.where(mask.unsqueeze(-1), white, black).float() + mask_out = mask_out.mean(dim=-1) + + tensors_out.append(mask_out.cpu()) + batch_count = mask_out.shape[0] + pbar.update(batch_count) + + tensors_out = torch.cat(tensors_out, dim=0) + tensors_out = torch.clamp(tensors_out, min=0.0, max=1.0) + return tensors_out, + +class CreateFluidMask: + + RETURN_TYPES = ("IMAGE", "MASK") + FUNCTION = "createfluidmask" + CATEGORY = "KJNodes/masking/generate" + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "invert": ("BOOLEAN", {"default": False}), + "frames": ("INT", {"default": 1,"min": 1, "max": 4096, "step": 1}), + "width": ("INT", {"default": 256,"min": 16, "max": 4096, "step": 1}), + "height": ("INT", {"default": 256,"min": 16, "max": 4096, "step": 1}), + "inflow_count": ("INT", {"default": 3,"min": 0, "max": 255, "step": 1}), + "inflow_velocity": ("INT", {"default": 1,"min": 0, "max": 255, "step": 1}), + "inflow_radius": ("INT", {"default": 8,"min": 0, "max": 255, "step": 1}), + "inflow_padding": ("INT", {"default": 50,"min": 0, "max": 255, "step": 1}), + "inflow_duration": ("INT", {"default": 60,"min": 0, "max": 255, "step": 1}), + }, + } + #using code from https://github.com/GregTJ/stable-fluids + def createfluidmask(self, frames, width, height, invert, inflow_count, inflow_velocity, inflow_radius, inflow_padding, inflow_duration): + from ..utility.fluid import Fluid + try: + from scipy.special import erf + except: + from scipy.spatial import erf + out = [] + masks = [] + RESOLUTION = width, height + DURATION = frames + + INFLOW_PADDING = inflow_padding + INFLOW_DURATION = inflow_duration + INFLOW_RADIUS = inflow_radius + INFLOW_VELOCITY = inflow_velocity + INFLOW_COUNT = inflow_count + + print('Generating fluid solver, this may take some time.') + fluid = Fluid(RESOLUTION, 'dye') + + center = np.floor_divide(RESOLUTION, 2) + r = np.min(center) - INFLOW_PADDING + + points = np.linspace(-np.pi, np.pi, INFLOW_COUNT, endpoint=False) + points = tuple(np.array((np.cos(p), np.sin(p))) for p in points) + normals = tuple(-p for p in points) + points = tuple(r * p + center for p in points) + + inflow_velocity = np.zeros_like(fluid.velocity) + inflow_dye = np.zeros(fluid.shape) + for p, n in zip(points, normals): + mask = np.linalg.norm(fluid.indices - p[:, None, None], axis=0) <= INFLOW_RADIUS + inflow_velocity[:, mask] += n[:, None] * INFLOW_VELOCITY + inflow_dye[mask] = 1 + + + for f in range(DURATION): + print(f'Computing frame {f + 1} of {DURATION}.') + if f <= INFLOW_DURATION: + fluid.velocity += inflow_velocity + fluid.dye += inflow_dye + + curl = fluid.step()[1] + # Using the error function to make the contrast a bit higher. + # Any other sigmoid function e.g. smoothstep would work. + curl = (erf(curl * 2) + 1) / 4 + + color = np.dstack((curl, np.ones(fluid.shape), fluid.dye)) + color = (np.clip(color, 0, 1) * 255).astype('uint8') + image = np.array(color).astype(np.float32) / 255.0 + image = torch.from_numpy(image)[None,] + mask = image[:, :, :, 0] + masks.append(mask) + out.append(image) + + if invert: + return (1.0 - torch.cat(out, dim=0),1.0 - torch.cat(masks, dim=0),) + return (torch.cat(out, dim=0),torch.cat(masks, dim=0),) + +class CreateAudioMask: + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "createaudiomask" + CATEGORY = "KJNodes/deprecated" + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "invert": ("BOOLEAN", {"default": False}), + "frames": ("INT", {"default": 16,"min": 1, "max": 255, "step": 1}), + "scale": ("FLOAT", {"default": 0.5,"min": 0.0, "max": 2.0, "step": 0.01}), + "audio_path": ("STRING", {"default": "audio.wav"}), + "width": ("INT", {"default": 256,"min": 16, "max": 4096, "step": 1}), + "height": ("INT", {"default": 256,"min": 16, "max": 4096, "step": 1}), + }, + } + + def createaudiomask(self, frames, width, height, invert, audio_path, scale): + try: + import librosa + except ImportError: + raise Exception("Can not import librosa. Install it with 'pip install librosa'") + batch_size = frames + out = [] + masks = [] + if audio_path == "audio.wav": #I don't know why relative path won't work otherwise... + audio_path = os.path.join(script_directory, audio_path) + audio, sr = librosa.load(audio_path) + spectrogram = np.abs(librosa.stft(audio)) + + for i in range(batch_size): + image = Image.new("RGB", (width, height), "black") + draw = ImageDraw.Draw(image) + frame = spectrogram[:, i] + circle_radius = int(height * np.mean(frame)) + circle_radius *= scale + circle_center = (width // 2, height // 2) # Calculate the center of the image + + draw.ellipse([(circle_center[0] - circle_radius, circle_center[1] - circle_radius), + (circle_center[0] + circle_radius, circle_center[1] + circle_radius)], + fill='white') + + image = np.array(image).astype(np.float32) / 255.0 + image = torch.from_numpy(image)[None,] + mask = image[:, :, :, 0] + masks.append(mask) + out.append(image) + + if invert: + return (1.0 - torch.cat(out, dim=0),) + return (torch.cat(out, dim=0),torch.cat(masks, dim=0),) + +class CreateGradientMask: + + RETURN_TYPES = ("MASK",) + FUNCTION = "createmask" + CATEGORY = "KJNodes/masking/generate" + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "invert": ("BOOLEAN", {"default": False}), + "frames": ("INT", {"default": 0,"min": 0, "max": 255, "step": 1}), + "width": ("INT", {"default": 256,"min": 16, "max": 4096, "step": 1}), + "height": ("INT", {"default": 256,"min": 16, "max": 4096, "step": 1}), + }, + } + def createmask(self, frames, width, height, invert): + # Define the number of images in the batch + batch_size = frames + out = [] + # Create an empty array to store the image batch + image_batch = np.zeros((batch_size, height, width), dtype=np.float32) + # Generate the black to white gradient for each image + for i in range(batch_size): + gradient = np.linspace(1.0, 0.0, width, dtype=np.float32) + time = i / frames # Calculate the time variable + offset_gradient = gradient - time # Offset the gradient values based on time + image_batch[i] = offset_gradient.reshape(1, -1) + output = torch.from_numpy(image_batch) + mask = output + out.append(mask) + if invert: + return (1.0 - torch.cat(out, dim=0),) + return (torch.cat(out, dim=0),) + +class CreateFadeMask: + + RETURN_TYPES = ("MASK",) + FUNCTION = "createfademask" + CATEGORY = "KJNodes/deprecated" + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "invert": ("BOOLEAN", {"default": False}), + "frames": ("INT", {"default": 2,"min": 2, "max": 255, "step": 1}), + "width": ("INT", {"default": 256,"min": 16, "max": 4096, "step": 1}), + "height": ("INT", {"default": 256,"min": 16, "max": 4096, "step": 1}), + "interpolation": (["linear", "ease_in", "ease_out", "ease_in_out"],), + "start_level": ("FLOAT", {"default": 1.0,"min": 0.0, "max": 1.0, "step": 0.01}), + "midpoint_level": ("FLOAT", {"default": 0.5,"min": 0.0, "max": 1.0, "step": 0.01}), + "end_level": ("FLOAT", {"default": 0.0,"min": 0.0, "max": 1.0, "step": 0.01}), + "midpoint_frame": ("INT", {"default": 0,"min": 0, "max": 4096, "step": 1}), + }, + } + + def createfademask(self, frames, width, height, invert, interpolation, start_level, midpoint_level, end_level, midpoint_frame): + def ease_in(t): + return t * t + + def ease_out(t): + return 1 - (1 - t) * (1 - t) + + def ease_in_out(t): + return 3 * t * t - 2 * t * t * t + + batch_size = frames + out = [] + image_batch = np.zeros((batch_size, height, width), dtype=np.float32) + + if midpoint_frame == 0: + midpoint_frame = batch_size // 2 + + for i in range(batch_size): + if i <= midpoint_frame: + t = i / midpoint_frame + if interpolation == "ease_in": + t = ease_in(t) + elif interpolation == "ease_out": + t = ease_out(t) + elif interpolation == "ease_in_out": + t = ease_in_out(t) + color = start_level - t * (start_level - midpoint_level) + else: + t = (i - midpoint_frame) / (batch_size - midpoint_frame) + if interpolation == "ease_in": + t = ease_in(t) + elif interpolation == "ease_out": + t = ease_out(t) + elif interpolation == "ease_in_out": + t = ease_in_out(t) + color = midpoint_level - t * (midpoint_level - end_level) + + color = np.clip(color, 0, 255) + image = np.full((height, width), color, dtype=np.float32) + image_batch[i] = image + + output = torch.from_numpy(image_batch) + mask = output + out.append(mask) + + if invert: + return (1.0 - torch.cat(out, dim=0),) + return (torch.cat(out, dim=0),) + +class CreateFadeMaskAdvanced: + + RETURN_TYPES = ("MASK",) + FUNCTION = "createfademask" + CATEGORY = "KJNodes/masking/generate" + DESCRIPTION = """ +Create a batch of masks interpolated between given frames and values. +Uses same syntax as Fizz' BatchValueSchedule. +First value is the frame index (not that this starts from 0, not 1) +and the second value inside the brackets is the float value of the mask in range 0.0 - 1.0 + +For example the default values: +0:(0.0) +7:(1.0) +15:(0.0) + +Would create a mask batch fo 16 frames, starting from black, +interpolating with the chosen curve to fully white at the 8th frame, +and interpolating from that to fully black at the 16th frame. +""" + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "points_string": ("STRING", {"default": "0:(0.0),\n7:(1.0),\n15:(0.0)\n", "multiline": True}), + "invert": ("BOOLEAN", {"default": False}), + "frames": ("INT", {"default": 16,"min": 2, "max": 255, "step": 1}), + "width": ("INT", {"default": 512,"min": 1, "max": 4096, "step": 1}), + "height": ("INT", {"default": 512,"min": 1, "max": 4096, "step": 1}), + "interpolation": (["linear", "ease_in", "ease_out", "ease_in_out"],), + }, + } + + def createfademask(self, frames, width, height, invert, points_string, interpolation): + def ease_in(t): + return t * t + + def ease_out(t): + return 1 - (1 - t) * (1 - t) + + def ease_in_out(t): + return 3 * t * t - 2 * t * t * t + + # Parse the input string into a list of tuples + points = [] + points_string = points_string.rstrip(',\n') + for point_str in points_string.split(','): + frame_str, color_str = point_str.split(':') + frame = int(frame_str.strip()) + color = float(color_str.strip()[1:-1]) # Remove parentheses around color + points.append((frame, color)) + + # Check if the last frame is already in the points + if len(points) == 0 or points[-1][0] != frames - 1: + # If not, add it with the color of the last specified frame + points.append((frames - 1, points[-1][1] if points else 0)) + + # Sort the points by frame number + points.sort(key=lambda x: x[0]) + + batch_size = frames + out = [] + image_batch = np.zeros((batch_size, height, width), dtype=np.float32) + + # Index of the next point to interpolate towards + next_point = 1 + + for i in range(batch_size): + while next_point < len(points) and i > points[next_point][0]: + next_point += 1 + + # Interpolate between the previous point and the next point + prev_point = next_point - 1 + t = (i - points[prev_point][0]) / (points[next_point][0] - points[prev_point][0]) + if interpolation == "ease_in": + t = ease_in(t) + elif interpolation == "ease_out": + t = ease_out(t) + elif interpolation == "ease_in_out": + t = ease_in_out(t) + elif interpolation == "linear": + pass # No need to modify `t` for linear interpolation + + color = points[prev_point][1] - t * (points[prev_point][1] - points[next_point][1]) + color = np.clip(color, 0, 255) + image = np.full((height, width), color, dtype=np.float32) + image_batch[i] = image + + output = torch.from_numpy(image_batch) + mask = output + out.append(mask) + + if invert: + return (1.0 - torch.cat(out, dim=0),) + return (torch.cat(out, dim=0),) + +class CreateMagicMask: + + RETURN_TYPES = ("MASK", "MASK",) + RETURN_NAMES = ("mask", "mask_inverted",) + FUNCTION = "createmagicmask" + CATEGORY = "KJNodes/masking/generate" + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "frames": ("INT", {"default": 16,"min": 2, "max": 4096, "step": 1}), + "depth": ("INT", {"default": 12,"min": 1, "max": 500, "step": 1}), + "distortion": ("FLOAT", {"default": 1.5,"min": 0.0, "max": 100.0, "step": 0.01}), + "seed": ("INT", {"default": 123,"min": 0, "max": 99999999, "step": 1}), + "transitions": ("INT", {"default": 1,"min": 1, "max": 20, "step": 1}), + "frame_width": ("INT", {"default": 512,"min": 16, "max": 4096, "step": 1}), + "frame_height": ("INT", {"default": 512,"min": 16, "max": 4096, "step": 1}), + }, + } + + def createmagicmask(self, frames, transitions, depth, distortion, seed, frame_width, frame_height): + from ..utility.magictex import coordinate_grid, random_transform, magic + import matplotlib.pyplot as plt + rng = np.random.default_rng(seed) + out = [] + coords = coordinate_grid((frame_width, frame_height)) + + # Calculate the number of frames for each transition + frames_per_transition = frames // transitions + + # Generate a base set of parameters + base_params = { + "coords": random_transform(coords, rng), + "depth": depth, + "distortion": distortion, + } + for t in range(transitions): + # Generate a second set of parameters that is at most max_diff away from the base parameters + params1 = base_params.copy() + params2 = base_params.copy() + + params1['coords'] = random_transform(coords, rng) + params2['coords'] = random_transform(coords, rng) + + for i in range(frames_per_transition): + # Compute the interpolation factor + alpha = i / frames_per_transition + + # Interpolate between the two sets of parameters + params = params1.copy() + params['coords'] = (1 - alpha) * params1['coords'] + alpha * params2['coords'] + + tex = magic(**params) + + dpi = frame_width / 10 + fig = plt.figure(figsize=(10, 10), dpi=dpi) + + ax = fig.add_subplot(111) + plt.subplots_adjust(left=0, right=1, bottom=0, top=1) + + ax.get_yaxis().set_ticks([]) + ax.get_xaxis().set_ticks([]) + ax.imshow(tex, aspect='auto') + + fig.canvas.draw() + img = np.array(fig.canvas.renderer._renderer) + + plt.close(fig) + + pil_img = Image.fromarray(img).convert("L") + mask = torch.tensor(np.array(pil_img)) / 255.0 + + out.append(mask) + + return (torch.stack(out, dim=0), 1.0 - torch.stack(out, dim=0),) + +class CreateShapeMask: + + RETURN_TYPES = ("MASK", "MASK",) + RETURN_NAMES = ("mask", "mask_inverted",) + FUNCTION = "createshapemask" + CATEGORY = "KJNodes/masking/generate" + DESCRIPTION = """ +Creates a mask or batch of masks with the specified shape. +Locations are center locations. +Grow value is the amount to grow the shape on each frame, creating animated masks. +""" + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "shape": ( + [ 'circle', + 'square', + 'triangle', + ], + { + "default": 'circle' + }), + "frames": ("INT", {"default": 1,"min": 1, "max": 4096, "step": 1}), + "location_x": ("INT", {"default": 256,"min": 0, "max": 4096, "step": 1}), + "location_y": ("INT", {"default": 256,"min": 0, "max": 4096, "step": 1}), + "grow": ("INT", {"default": 0, "min": -512, "max": 512, "step": 1}), + "frame_width": ("INT", {"default": 512,"min": 16, "max": 4096, "step": 1}), + "frame_height": ("INT", {"default": 512,"min": 16, "max": 4096, "step": 1}), + "shape_width": ("INT", {"default": 128,"min": 8, "max": 4096, "step": 1}), + "shape_height": ("INT", {"default": 128,"min": 8, "max": 4096, "step": 1}), + }, + } + + def createshapemask(self, frames, frame_width, frame_height, location_x, location_y, shape_width, shape_height, grow, shape): + # Define the number of images in the batch + batch_size = frames + out = [] + color = "white" + for i in range(batch_size): + image = Image.new("RGB", (frame_width, frame_height), "black") + draw = ImageDraw.Draw(image) + + # Calculate the size for this frame and ensure it's not less than 0 + current_width = max(0, shape_width + i*grow) + current_height = max(0, shape_height + i*grow) + + if shape == 'circle' or shape == 'square': + # Define the bounding box for the shape + left_up_point = (location_x - current_width // 2, location_y - current_height // 2) + right_down_point = (location_x + current_width // 2, location_y + current_height // 2) + two_points = [left_up_point, right_down_point] + + if shape == 'circle': + draw.ellipse(two_points, fill=color) + elif shape == 'square': + draw.rectangle(two_points, fill=color) + + elif shape == 'triangle': + # Define the points for the triangle + left_up_point = (location_x - current_width // 2, location_y + current_height // 2) # bottom left + right_down_point = (location_x + current_width // 2, location_y + current_height // 2) # bottom right + top_point = (location_x, location_y - current_height // 2) # top point + draw.polygon([top_point, left_up_point, right_down_point], fill=color) + + image = pil2tensor(image) + mask = image[:, :, :, 0] + out.append(mask) + outstack = torch.cat(out, dim=0) + return (outstack, 1.0 - outstack,) + +class CreateVoronoiMask: + + RETURN_TYPES = ("MASK", "MASK",) + RETURN_NAMES = ("mask", "mask_inverted",) + FUNCTION = "createvoronoi" + CATEGORY = "KJNodes/masking/generate" + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "frames": ("INT", {"default": 16,"min": 2, "max": 4096, "step": 1}), + "num_points": ("INT", {"default": 15,"min": 1, "max": 4096, "step": 1}), + "line_width": ("INT", {"default": 4,"min": 1, "max": 4096, "step": 1}), + "speed": ("FLOAT", {"default": 0.5,"min": 0.0, "max": 1.0, "step": 0.01}), + "frame_width": ("INT", {"default": 512,"min": 16, "max": 4096, "step": 1}), + "frame_height": ("INT", {"default": 512,"min": 16, "max": 4096, "step": 1}), + }, + } + + def createvoronoi(self, frames, num_points, line_width, speed, frame_width, frame_height): + from scipy.spatial import Voronoi + # Define the number of images in the batch + batch_size = frames + out = [] + + # Calculate aspect ratio + aspect_ratio = frame_width / frame_height + + # Create start and end points for each point, considering the aspect ratio + start_points = np.random.rand(num_points, 2) + start_points[:, 0] *= aspect_ratio + + end_points = np.random.rand(num_points, 2) + end_points[:, 0] *= aspect_ratio + + for i in range(batch_size): + # Interpolate the points' positions based on the current frame + t = (i * speed) / (batch_size - 1) # normalize to [0, 1] over the frames + t = np.clip(t, 0, 1) # ensure t is in [0, 1] + points = (1 - t) * start_points + t * end_points # lerp + + # Adjust points for aspect ratio + points[:, 0] *= aspect_ratio + + vor = Voronoi(points) + + # Create a blank image with a white background + fig, ax = plt.subplots() + plt.subplots_adjust(left=0, right=1, bottom=0, top=1) + ax.set_xlim([0, aspect_ratio]); ax.set_ylim([0, 1]) # adjust x limits + ax.axis('off') + ax.margins(0, 0) + fig.set_size_inches(aspect_ratio * frame_height/100, frame_height/100) # adjust figure size + ax.fill_between([0, 1], [0, 1], color='white') + + # Plot each Voronoi ridge + for simplex in vor.ridge_vertices: + simplex = np.asarray(simplex) + if np.all(simplex >= 0): + plt.plot(vor.vertices[simplex, 0], vor.vertices[simplex, 1], 'k-', linewidth=line_width) + + fig.canvas.draw() + img = np.array(fig.canvas.renderer._renderer) + + plt.close(fig) + + pil_img = Image.fromarray(img).convert("L") + mask = torch.tensor(np.array(pil_img)) / 255.0 + + out.append(mask) + + return (torch.stack(out, dim=0), 1.0 - torch.stack(out, dim=0),) + +class GetMaskSizeAndCount: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "mask": ("MASK",), + }} + + RETURN_TYPES = ("MASK","INT", "INT", "INT",) + RETURN_NAMES = ("mask", "width", "height", "count",) + FUNCTION = "getsize" + CATEGORY = "KJNodes/masking" + DESCRIPTION = """ +Returns the width, height and batch size of the mask, +and passes it through unchanged. + +""" + + def getsize(self, mask): + width = mask.shape[2] + height = mask.shape[1] + count = mask.shape[0] + return {"ui": { + "text": [f"{count}x{width}x{height}"]}, + "result": (mask, width, height, count) + } + +class GrowMaskWithBlur: + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "mask": ("MASK",), + "expand": ("INT", {"default": 0, "min": -MAX_RESOLUTION, "max": MAX_RESOLUTION, "step": 1}), + "incremental_expandrate": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 100.0, "step": 0.1}), + "tapered_corners": ("BOOLEAN", {"default": True}), + "flip_input": ("BOOLEAN", {"default": False}), + "blur_radius": ("FLOAT", { + "default": 0.0, + "min": 0.0, + "max": 100, + "step": 0.1 + }), + "lerp_alpha": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}), + "decay_factor": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}), + }, + "optional": { + "fill_holes": ("BOOLEAN", {"default": False}), + }, + } + + CATEGORY = "KJNodes/masking" + RETURN_TYPES = ("MASK", "MASK",) + RETURN_NAMES = ("mask", "mask_inverted",) + FUNCTION = "expand_mask" + DESCRIPTION = """ +# GrowMaskWithBlur +- mask: Input mask or mask batch +- expand: Expand or contract mask or mask batch by a given amount +- incremental_expandrate: increase expand rate by a given amount per frame +- tapered_corners: use tapered corners +- flip_input: flip input mask +- blur_radius: value higher than 0 will blur the mask +- lerp_alpha: alpha value for interpolation between frames +- decay_factor: decay value for interpolation between frames +- fill_holes: fill holes in the mask (slow)""" + + def expand_mask(self, mask, expand, tapered_corners, flip_input, blur_radius, incremental_expandrate, lerp_alpha, decay_factor, fill_holes=False): + alpha = lerp_alpha + decay = decay_factor + if flip_input: + mask = 1.0 - mask + c = 0 if tapered_corners else 1 + kernel = np.array([[c, 1, c], + [1, 1, 1], + [c, 1, c]]) + growmask = mask.reshape((-1, mask.shape[-2], mask.shape[-1])).cpu() + out = [] + previous_output = None + current_expand = expand + for m in growmask: + output = m.numpy().astype(np.float32) + for _ in range(abs(round(current_expand))): + if current_expand < 0: + output = scipy.ndimage.grey_erosion(output, footprint=kernel) + else: + output = scipy.ndimage.grey_dilation(output, footprint=kernel) + if current_expand < 0: + current_expand -= abs(incremental_expandrate) + else: + current_expand += abs(incremental_expandrate) + if fill_holes: + binary_mask = output > 0 + output = scipy.ndimage.binary_fill_holes(binary_mask) + output = output.astype(np.float32) * 255 + output = torch.from_numpy(output) + if alpha < 1.0 and previous_output is not None: + # Interpolate between the previous and current frame + output = alpha * output + (1 - alpha) * previous_output + if decay < 1.0 and previous_output is not None: + # Add the decayed previous output to the current frame + output += decay * previous_output + output = output / output.max() + previous_output = output + out.append(output) + + if blur_radius != 0: + # Convert the tensor list to PIL images, apply blur, and convert back + for idx, tensor in enumerate(out): + # Convert tensor to PIL image + pil_image = tensor2pil(tensor.cpu().detach())[0] + # Apply Gaussian blur + pil_image = pil_image.filter(ImageFilter.GaussianBlur(blur_radius)) + # Convert back to tensor + out[idx] = pil2tensor(pil_image) + blurred = torch.cat(out, dim=0) + return (blurred, 1.0 - blurred) + else: + return (torch.stack(out, dim=0), 1.0 - torch.stack(out, dim=0),) + +class MaskBatchMulti: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "inputcount": ("INT", {"default": 2, "min": 2, "max": 1000, "step": 1}), + "mask_1": ("MASK", ), + "mask_2": ("MASK", ), + }, + } + + RETURN_TYPES = ("MASK",) + RETURN_NAMES = ("masks",) + FUNCTION = "combine" + CATEGORY = "KJNodes/masking" + DESCRIPTION = """ +Creates an image batch from multiple masks. +You can set how many inputs the node has, +with the **inputcount** and clicking update. +""" + + def combine(self, inputcount, **kwargs): + mask = kwargs["mask_1"] + for c in range(1, inputcount): + new_mask = kwargs[f"mask_{c + 1}"] + if mask.shape[1:] != new_mask.shape[1:]: + new_mask = F.interpolate(new_mask.unsqueeze(1), size=(mask.shape[1], mask.shape[2]), mode="bicubic").squeeze(1) + mask = torch.cat((mask, new_mask), dim=0) + return (mask,) + +class OffsetMask: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "mask": ("MASK",), + "x": ("INT", { "default": 0, "min": -4096, "max": MAX_RESOLUTION, "step": 1, "display": "number" }), + "y": ("INT", { "default": 0, "min": -4096, "max": MAX_RESOLUTION, "step": 1, "display": "number" }), + "angle": ("INT", { "default": 0, "min": -360, "max": 360, "step": 1, "display": "number" }), + "duplication_factor": ("INT", { "default": 1, "min": 1, "max": 1000, "step": 1, "display": "number" }), + "roll": ("BOOLEAN", { "default": False }), + "incremental": ("BOOLEAN", { "default": False }), + "padding_mode": ( + [ + 'empty', + 'border', + 'reflection', + + ], { + "default": 'empty' + }), + } + } + + RETURN_TYPES = ("MASK",) + RETURN_NAMES = ("mask",) + FUNCTION = "offset" + CATEGORY = "KJNodes/masking" + DESCRIPTION = """ +Offsets the mask by the specified amount. + - mask: Input mask or mask batch + - x: Horizontal offset + - y: Vertical offset + - angle: Angle in degrees + - roll: roll edge wrapping + - duplication_factor: Number of times to duplicate the mask to form a batch + - border padding_mode: Padding mode for the mask +""" + + def offset(self, mask, x, y, angle, roll=False, incremental=False, duplication_factor=1, padding_mode="empty"): + # Create duplicates of the mask batch + mask = mask.repeat(duplication_factor, 1, 1).clone() + + batch_size, height, width = mask.shape + + if angle != 0 and incremental: + for i in range(batch_size): + rotation_angle = angle * (i+1) + mask[i] = TF.rotate(mask[i].unsqueeze(0), rotation_angle).squeeze(0) + elif angle > 0: + for i in range(batch_size): + mask[i] = TF.rotate(mask[i].unsqueeze(0), angle).squeeze(0) + + if roll: + if incremental: + for i in range(batch_size): + shift_x = min(x*(i+1), width-1) + shift_y = min(y*(i+1), height-1) + if shift_x != 0: + mask[i] = torch.roll(mask[i], shifts=shift_x, dims=1) + if shift_y != 0: + mask[i] = torch.roll(mask[i], shifts=shift_y, dims=0) + else: + shift_x = min(x, width-1) + shift_y = min(y, height-1) + if shift_x != 0: + mask = torch.roll(mask, shifts=shift_x, dims=2) + if shift_y != 0: + mask = torch.roll(mask, shifts=shift_y, dims=1) + else: + + for i in range(batch_size): + if incremental: + temp_x = min(x * (i+1), width-1) + temp_y = min(y * (i+1), height-1) + else: + temp_x = min(x, width-1) + temp_y = min(y, height-1) + if temp_x > 0: + if padding_mode == 'empty': + mask[i] = torch.cat([torch.zeros((height, temp_x)), mask[i, :, :-temp_x]], dim=1) + elif padding_mode in ['replicate', 'reflect']: + mask[i] = F.pad(mask[i, :, :-temp_x], (0, temp_x), mode=padding_mode) + elif temp_x < 0: + if padding_mode == 'empty': + mask[i] = torch.cat([mask[i, :, :temp_x], torch.zeros((height, -temp_x))], dim=1) + elif padding_mode in ['replicate', 'reflect']: + mask[i] = F.pad(mask[i, :, -temp_x:], (temp_x, 0), mode=padding_mode) + + if temp_y > 0: + if padding_mode == 'empty': + mask[i] = torch.cat([torch.zeros((temp_y, width)), mask[i, :-temp_y, :]], dim=0) + elif padding_mode in ['replicate', 'reflect']: + mask[i] = F.pad(mask[i, :-temp_y, :], (0, temp_y), mode=padding_mode) + elif temp_y < 0: + if padding_mode == 'empty': + mask[i] = torch.cat([mask[i, :temp_y, :], torch.zeros((-temp_y, width))], dim=0) + elif padding_mode in ['replicate', 'reflect']: + mask[i] = F.pad(mask[i, -temp_y:, :], (temp_y, 0), mode=padding_mode) + + return mask, + +class RoundMask: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "mask": ("MASK",), + }} + + RETURN_TYPES = ("MASK",) + FUNCTION = "round" + CATEGORY = "KJNodes/masking" + DESCRIPTION = """ +Rounds the mask or batch of masks to a binary mask. +RoundMask example + +""" + + def round(self, mask): + mask = mask.round() + return (mask,) + +class ResizeMask: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "mask": ("MASK",), + "width": ("INT", { "default": 512, "min": 0, "max": MAX_RESOLUTION, "step": 8, "display": "number" }), + "height": ("INT", { "default": 512, "min": 0, "max": MAX_RESOLUTION, "step": 8, "display": "number" }), + "keep_proportions": ("BOOLEAN", { "default": False }), + } + } + + RETURN_TYPES = ("MASK", "INT", "INT",) + RETURN_NAMES = ("mask", "width", "height",) + FUNCTION = "resize" + CATEGORY = "KJNodes/masking" + DESCRIPTION = """ +Resizes the mask or batch of masks to the specified width and height. +""" + + def resize(self, mask, width, height, keep_proportions): + if keep_proportions: + _, oh, ow = mask.shape + width = ow if width == 0 else width + height = oh if height == 0 else height + ratio = min(width / ow, height / oh) + width = round(ow*ratio) + height = round(oh*ratio) + outputs = mask.unsqueeze(1) + outputs = F.interpolate(outputs, size=(height, width), mode="nearest") + outputs = outputs.squeeze(1) + + return(outputs, outputs.shape[2], outputs.shape[1],) + +class RemapMaskRange: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "mask": ("MASK",), + "min": ("FLOAT", {"default": 0.0,"min": -10.0, "max": 1.0, "step": 0.01}), + "max": ("FLOAT", {"default": 1.0,"min": 0.0, "max": 10.0, "step": 0.01}), + } + } + + RETURN_TYPES = ("MASK",) + RETURN_NAMES = ("mask",) + FUNCTION = "remap" + CATEGORY = "KJNodes/masking" + DESCRIPTION = """ +Sets new min and max values for the mask. +""" + + def remap(self, mask, min, max): + + # Find the maximum value in the mask + mask_max = torch.max(mask) + + # If the maximum mask value is zero, avoid division by zero by setting it to 1 + mask_max = mask_max if mask_max > 0 else 1 + + # Scale the mask values to the new range defined by min and max + # The highest pixel value in the mask will be scaled to max + scaled_mask = (mask / mask_max) * (max - min) + min + + # Clamp the values to ensure they are within [0.0, 1.0] + scaled_mask = torch.clamp(scaled_mask, min=0.0, max=1.0) + + return (scaled_mask, ) diff --git a/ComfyUI-KJNodes/nodes/nodes.py b/ComfyUI-KJNodes/nodes/nodes.py new file mode 100644 index 0000000000000000000000000000000000000000..d84947da464dffec9d46fac441aa8b81f1e966be --- /dev/null +++ b/ComfyUI-KJNodes/nodes/nodes.py @@ -0,0 +1,1946 @@ +import torch +import numpy as np +from PIL import Image + +import json, re, os, io, time + +import model_management +import folder_paths +from nodes import MAX_RESOLUTION +from comfy.utils import common_upscale, ProgressBar + +script_directory = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +folder_paths.add_model_folder_path("kjnodes_fonts", os.path.join(script_directory, "fonts")) + +class AnyType(str): + """A special class that is always equal in not equal comparisons. Credit to pythongosssss""" + + def __ne__(self, __value: object) -> bool: + return False +any = AnyType("*") + +class INTConstant: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "value": ("INT", {"default": 0, "min": 0, "max": 0xffffffffffffffff}), + }, + } + RETURN_TYPES = ("INT",) + RETURN_NAMES = ("value",) + FUNCTION = "get_value" + CATEGORY = "KJNodes/constants" + + def get_value(self, value): + return (value,) + +class FloatConstant: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "value": ("FLOAT", {"default": 0.0, "min": -0xffffffffffffffff, "max": 0xffffffffffffffff, "step": 0.001}), + }, + } + + RETURN_TYPES = ("FLOAT",) + RETURN_NAMES = ("value",) + FUNCTION = "get_value" + CATEGORY = "KJNodes/constants" + + def get_value(self, value): + return (value,) + +class StringConstant: + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "string": ("STRING", {"default": '', "multiline": False}), + } + } + RETURN_TYPES = ("STRING",) + FUNCTION = "passtring" + CATEGORY = "KJNodes/constants" + + def passtring(self, string): + return (string, ) + +class StringConstantMultiline: + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "string": ("STRING", {"default": "", "multiline": True}), + "strip_newlines": ("BOOLEAN", {"default": True}), + } + } + RETURN_TYPES = ("STRING",) + FUNCTION = "stringify" + CATEGORY = "KJNodes/constants" + + def stringify(self, string, strip_newlines): + new_string = [] + for line in io.StringIO(string): + if not line.strip().startswith("\n") and strip_newlines: + line = line.replace("\n", '') + new_string.append(line) + new_string = "\n".join(new_string) + + return (new_string, ) + + + +class ScaleBatchPromptSchedule: + + RETURN_TYPES = ("STRING",) + FUNCTION = "scaleschedule" + CATEGORY = "KJNodes" + DESCRIPTION = """ +Scales a batch schedule from Fizz' nodes BatchPromptSchedule +to a different frame count. +""" + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "input_str": ("STRING", {"forceInput": True,"default": "0:(0.0),\n7:(1.0),\n15:(0.0)\n"}), + "old_frame_count": ("INT", {"forceInput": True,"default": 1,"min": 1, "max": 4096, "step": 1}), + "new_frame_count": ("INT", {"forceInput": True,"default": 1,"min": 1, "max": 4096, "step": 1}), + + }, + } + + def scaleschedule(self, old_frame_count, input_str, new_frame_count): + pattern = r'"(\d+)"\s*:\s*"(.*?)"(?:,|\Z)' + frame_strings = dict(re.findall(pattern, input_str)) + + # Calculate the scaling factor + scaling_factor = (new_frame_count - 1) / (old_frame_count - 1) + + # Initialize a dictionary to store the new frame numbers and strings + new_frame_strings = {} + + # Iterate over the frame numbers and strings + for old_frame, string in frame_strings.items(): + # Calculate the new frame number + new_frame = int(round(int(old_frame) * scaling_factor)) + + # Store the new frame number and corresponding string + new_frame_strings[new_frame] = string + + # Format the output string + output_str = ', '.join([f'"{k}":"{v}"' for k, v in sorted(new_frame_strings.items())]) + return (output_str,) + + +class GetLatentsFromBatchIndexed: + + RETURN_TYPES = ("LATENT",) + FUNCTION = "indexedlatentsfrombatch" + CATEGORY = "KJNodes" + DESCRIPTION = """ +Selects and returns the latents at the specified indices as an latent batch. +""" + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "latents": ("LATENT",), + "indexes": ("STRING", {"default": "0, 1, 2", "multiline": True}), + }, + } + + def indexedlatentsfrombatch(self, latents, indexes): + + samples = latents.copy() + latent_samples = samples["samples"] + + # Parse the indexes string into a list of integers + index_list = [int(index.strip()) for index in indexes.split(',')] + + # Convert list of indices to a PyTorch tensor + indices_tensor = torch.tensor(index_list, dtype=torch.long) + + # Select the latents at the specified indices + chosen_latents = latent_samples[indices_tensor] + + samples["samples"] = chosen_latents + return (samples,) + + +class ConditioningMultiCombine: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "inputcount": ("INT", {"default": 2, "min": 2, "max": 20, "step": 1}), + "conditioning_1": ("CONDITIONING", ), + "conditioning_2": ("CONDITIONING", ), + }, + } + + RETURN_TYPES = ("CONDITIONING", "INT") + RETURN_NAMES = ("combined", "inputcount") + FUNCTION = "combine" + CATEGORY = "KJNodes/masking/conditioning" + DESCRIPTION = """ +Combines multiple conditioning nodes into one +""" + + def combine(self, inputcount, **kwargs): + from nodes import ConditioningCombine + cond_combine_node = ConditioningCombine() + cond = kwargs["conditioning_1"] + for c in range(1, inputcount): + new_cond = kwargs[f"conditioning_{c + 1}"] + cond = cond_combine_node.combine(new_cond, cond)[0] + return (cond, inputcount,) + + +class JoinStrings: + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "string1": ("STRING", {"default": '', "forceInput": True}), + "string2": ("STRING", {"default": '', "forceInput": True}), + "delimiter": ("STRING", {"default": ' ', "multiline": False}), + } + } + RETURN_TYPES = ("STRING",) + FUNCTION = "joinstring" + CATEGORY = "KJNodes/constants" + + def joinstring(self, string1, string2, delimiter): + joined_string = string1 + delimiter + string2 + return (joined_string, ) + +class JoinStringMulti: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "inputcount": ("INT", {"default": 2, "min": 2, "max": 1000, "step": 1}), + "string_1": ("STRING", {"default": '', "forceInput": True}), + "string_2": ("STRING", {"default": '', "forceInput": True}), + "delimiter": ("STRING", {"default": ' ', "multiline": False}), + "return_list": ("BOOLEAN", {"default": False}), + }, + } + + RETURN_TYPES = ("STRING",) + RETURN_NAMES = ("string",) + FUNCTION = "combine" + CATEGORY = "KJNodes" + DESCRIPTION = """ +Creates single string, or a list of strings, from +multiple input strings. +You can set how many inputs the node has, +with the **inputcount** and clicking update. +""" + + def combine(self, inputcount, delimiter, **kwargs): + string = kwargs["string_1"] + return_list = kwargs["return_list"] + strings = [string] # Initialize a list with the first string + for c in range(1, inputcount): + new_string = kwargs[f"string_{c + 1}"] + if return_list: + strings.append(new_string) # Add new string to the list + else: + string = string + delimiter + new_string + if return_list: + return (strings,) # Return the list of strings + else: + return (string,) # Return the combined string + +class CondPassThrough: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + }, + "optional": { + "positive": ("CONDITIONING", ), + "negative": ("CONDITIONING", ), + }, + } + + RETURN_TYPES = ("CONDITIONING", "CONDITIONING",) + RETURN_NAMES = ("positive", "negative") + FUNCTION = "passthrough" + CATEGORY = "KJNodes/misc" + DESCRIPTION = """ + Simply passes through the positive and negative conditioning, + workaround for Set node not allowing bypassed inputs. +""" + + def passthrough(self, positive=None, negative=None): + return (positive, negative,) + +class ModelPassThrough: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + }, + "optional": { + "model": ("MODEL", ), + }, + } + + RETURN_TYPES = ("MODEL", ) + RETURN_NAMES = ("model",) + FUNCTION = "passthrough" + CATEGORY = "KJNodes/misc" + DESCRIPTION = """ + Simply passes through the model, + workaround for Set node not allowing bypassed inputs. +""" + + def passthrough(self, model=None): + return (model,) + +def append_helper(t, mask, c, set_area_to_bounds, strength): + n = [t[0], t[1].copy()] + _, h, w = mask.shape + n[1]['mask'] = mask + n[1]['set_area_to_bounds'] = set_area_to_bounds + n[1]['mask_strength'] = strength + c.append(n) + +class ConditioningSetMaskAndCombine: + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "positive_1": ("CONDITIONING", ), + "negative_1": ("CONDITIONING", ), + "positive_2": ("CONDITIONING", ), + "negative_2": ("CONDITIONING", ), + "mask_1": ("MASK", ), + "mask_2": ("MASK", ), + "mask_1_strength": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}), + "mask_2_strength": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}), + "set_cond_area": (["default", "mask bounds"],), + } + } + + RETURN_TYPES = ("CONDITIONING","CONDITIONING",) + RETURN_NAMES = ("combined_positive", "combined_negative",) + FUNCTION = "append" + CATEGORY = "KJNodes/masking/conditioning" + DESCRIPTION = """ +Bundles multiple conditioning mask and combine nodes into one,functionality is identical to ComfyUI native nodes +""" + + def append(self, positive_1, negative_1, positive_2, negative_2, mask_1, mask_2, set_cond_area, mask_1_strength, mask_2_strength): + c = [] + c2 = [] + set_area_to_bounds = False + if set_cond_area != "default": + set_area_to_bounds = True + if len(mask_1.shape) < 3: + mask_1 = mask_1.unsqueeze(0) + if len(mask_2.shape) < 3: + mask_2 = mask_2.unsqueeze(0) + for t in positive_1: + append_helper(t, mask_1, c, set_area_to_bounds, mask_1_strength) + for t in positive_2: + append_helper(t, mask_2, c, set_area_to_bounds, mask_2_strength) + for t in negative_1: + append_helper(t, mask_1, c2, set_area_to_bounds, mask_1_strength) + for t in negative_2: + append_helper(t, mask_2, c2, set_area_to_bounds, mask_2_strength) + return (c, c2) + +class ConditioningSetMaskAndCombine3: + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "positive_1": ("CONDITIONING", ), + "negative_1": ("CONDITIONING", ), + "positive_2": ("CONDITIONING", ), + "negative_2": ("CONDITIONING", ), + "positive_3": ("CONDITIONING", ), + "negative_3": ("CONDITIONING", ), + "mask_1": ("MASK", ), + "mask_2": ("MASK", ), + "mask_3": ("MASK", ), + "mask_1_strength": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}), + "mask_2_strength": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}), + "mask_3_strength": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}), + "set_cond_area": (["default", "mask bounds"],), + } + } + + RETURN_TYPES = ("CONDITIONING","CONDITIONING",) + RETURN_NAMES = ("combined_positive", "combined_negative",) + FUNCTION = "append" + CATEGORY = "KJNodes/masking/conditioning" + DESCRIPTION = """ +Bundles multiple conditioning mask and combine nodes into one,functionality is identical to ComfyUI native nodes +""" + + def append(self, positive_1, negative_1, positive_2, positive_3, negative_2, negative_3, mask_1, mask_2, mask_3, set_cond_area, mask_1_strength, mask_2_strength, mask_3_strength): + c = [] + c2 = [] + set_area_to_bounds = False + if set_cond_area != "default": + set_area_to_bounds = True + if len(mask_1.shape) < 3: + mask_1 = mask_1.unsqueeze(0) + if len(mask_2.shape) < 3: + mask_2 = mask_2.unsqueeze(0) + if len(mask_3.shape) < 3: + mask_3 = mask_3.unsqueeze(0) + for t in positive_1: + append_helper(t, mask_1, c, set_area_to_bounds, mask_1_strength) + for t in positive_2: + append_helper(t, mask_2, c, set_area_to_bounds, mask_2_strength) + for t in positive_3: + append_helper(t, mask_3, c, set_area_to_bounds, mask_3_strength) + for t in negative_1: + append_helper(t, mask_1, c2, set_area_to_bounds, mask_1_strength) + for t in negative_2: + append_helper(t, mask_2, c2, set_area_to_bounds, mask_2_strength) + for t in negative_3: + append_helper(t, mask_3, c2, set_area_to_bounds, mask_3_strength) + return (c, c2) + +class ConditioningSetMaskAndCombine4: + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "positive_1": ("CONDITIONING", ), + "negative_1": ("CONDITIONING", ), + "positive_2": ("CONDITIONING", ), + "negative_2": ("CONDITIONING", ), + "positive_3": ("CONDITIONING", ), + "negative_3": ("CONDITIONING", ), + "positive_4": ("CONDITIONING", ), + "negative_4": ("CONDITIONING", ), + "mask_1": ("MASK", ), + "mask_2": ("MASK", ), + "mask_3": ("MASK", ), + "mask_4": ("MASK", ), + "mask_1_strength": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}), + "mask_2_strength": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}), + "mask_3_strength": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}), + "mask_4_strength": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}), + "set_cond_area": (["default", "mask bounds"],), + } + } + + RETURN_TYPES = ("CONDITIONING","CONDITIONING",) + RETURN_NAMES = ("combined_positive", "combined_negative",) + FUNCTION = "append" + CATEGORY = "KJNodes/masking/conditioning" + DESCRIPTION = """ +Bundles multiple conditioning mask and combine nodes into one,functionality is identical to ComfyUI native nodes +""" + + def append(self, positive_1, negative_1, positive_2, positive_3, positive_4, negative_2, negative_3, negative_4, mask_1, mask_2, mask_3, mask_4, set_cond_area, mask_1_strength, mask_2_strength, mask_3_strength, mask_4_strength): + c = [] + c2 = [] + set_area_to_bounds = False + if set_cond_area != "default": + set_area_to_bounds = True + if len(mask_1.shape) < 3: + mask_1 = mask_1.unsqueeze(0) + if len(mask_2.shape) < 3: + mask_2 = mask_2.unsqueeze(0) + if len(mask_3.shape) < 3: + mask_3 = mask_3.unsqueeze(0) + if len(mask_4.shape) < 3: + mask_4 = mask_4.unsqueeze(0) + for t in positive_1: + append_helper(t, mask_1, c, set_area_to_bounds, mask_1_strength) + for t in positive_2: + append_helper(t, mask_2, c, set_area_to_bounds, mask_2_strength) + for t in positive_3: + append_helper(t, mask_3, c, set_area_to_bounds, mask_3_strength) + for t in positive_4: + append_helper(t, mask_4, c, set_area_to_bounds, mask_4_strength) + for t in negative_1: + append_helper(t, mask_1, c2, set_area_to_bounds, mask_1_strength) + for t in negative_2: + append_helper(t, mask_2, c2, set_area_to_bounds, mask_2_strength) + for t in negative_3: + append_helper(t, mask_3, c2, set_area_to_bounds, mask_3_strength) + for t in negative_4: + append_helper(t, mask_4, c2, set_area_to_bounds, mask_4_strength) + return (c, c2) + +class ConditioningSetMaskAndCombine5: + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "positive_1": ("CONDITIONING", ), + "negative_1": ("CONDITIONING", ), + "positive_2": ("CONDITIONING", ), + "negative_2": ("CONDITIONING", ), + "positive_3": ("CONDITIONING", ), + "negative_3": ("CONDITIONING", ), + "positive_4": ("CONDITIONING", ), + "negative_4": ("CONDITIONING", ), + "positive_5": ("CONDITIONING", ), + "negative_5": ("CONDITIONING", ), + "mask_1": ("MASK", ), + "mask_2": ("MASK", ), + "mask_3": ("MASK", ), + "mask_4": ("MASK", ), + "mask_5": ("MASK", ), + "mask_1_strength": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}), + "mask_2_strength": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}), + "mask_3_strength": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}), + "mask_4_strength": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}), + "mask_5_strength": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}), + "set_cond_area": (["default", "mask bounds"],), + } + } + + RETURN_TYPES = ("CONDITIONING","CONDITIONING",) + RETURN_NAMES = ("combined_positive", "combined_negative",) + FUNCTION = "append" + CATEGORY = "KJNodes/masking/conditioning" + DESCRIPTION = """ +Bundles multiple conditioning mask and combine nodes into one,functionality is identical to ComfyUI native nodes +""" + + def append(self, positive_1, negative_1, positive_2, positive_3, positive_4, positive_5, negative_2, negative_3, negative_4, negative_5, mask_1, mask_2, mask_3, mask_4, mask_5, set_cond_area, mask_1_strength, mask_2_strength, mask_3_strength, mask_4_strength, mask_5_strength): + c = [] + c2 = [] + set_area_to_bounds = False + if set_cond_area != "default": + set_area_to_bounds = True + if len(mask_1.shape) < 3: + mask_1 = mask_1.unsqueeze(0) + if len(mask_2.shape) < 3: + mask_2 = mask_2.unsqueeze(0) + if len(mask_3.shape) < 3: + mask_3 = mask_3.unsqueeze(0) + if len(mask_4.shape) < 3: + mask_4 = mask_4.unsqueeze(0) + if len(mask_5.shape) < 3: + mask_5 = mask_5.unsqueeze(0) + for t in positive_1: + append_helper(t, mask_1, c, set_area_to_bounds, mask_1_strength) + for t in positive_2: + append_helper(t, mask_2, c, set_area_to_bounds, mask_2_strength) + for t in positive_3: + append_helper(t, mask_3, c, set_area_to_bounds, mask_3_strength) + for t in positive_4: + append_helper(t, mask_4, c, set_area_to_bounds, mask_4_strength) + for t in positive_5: + append_helper(t, mask_5, c, set_area_to_bounds, mask_5_strength) + for t in negative_1: + append_helper(t, mask_1, c2, set_area_to_bounds, mask_1_strength) + for t in negative_2: + append_helper(t, mask_2, c2, set_area_to_bounds, mask_2_strength) + for t in negative_3: + append_helper(t, mask_3, c2, set_area_to_bounds, mask_3_strength) + for t in negative_4: + append_helper(t, mask_4, c2, set_area_to_bounds, mask_4_strength) + for t in negative_5: + append_helper(t, mask_5, c2, set_area_to_bounds, mask_5_strength) + return (c, c2) + +class VRAM_Debug: + + @classmethod + + def INPUT_TYPES(s): + return { + "required": { + + "empty_cache": ("BOOLEAN", {"default": True}), + "gc_collect": ("BOOLEAN", {"default": True}), + "unload_all_models": ("BOOLEAN", {"default": False}), + }, + "optional": { + "any_input": (any, {}), + "image_pass": ("IMAGE",), + "model_pass": ("MODEL",), + } + } + + RETURN_TYPES = (any, "IMAGE","MODEL","INT", "INT",) + RETURN_NAMES = ("any_output", "image_pass", "model_pass", "freemem_before", "freemem_after") + FUNCTION = "VRAMdebug" + CATEGORY = "KJNodes/misc" + DESCRIPTION = """ +Returns the inputs unchanged, they are only used as triggers, +and performs comfy model management functions and garbage collection, +reports free VRAM before and after the operations. +""" + + def VRAMdebug(self, gc_collect, empty_cache, unload_all_models, image_pass=None, model_pass=None, any_input=None): + freemem_before = model_management.get_free_memory() + print("VRAMdebug: free memory before: ", f"{freemem_before:,.0f}") + if empty_cache: + model_management.soft_empty_cache() + if unload_all_models: + model_management.unload_all_models() + if gc_collect: + import gc + gc.collect() + freemem_after = model_management.get_free_memory() + print("VRAMdebug: free memory after: ", f"{freemem_after:,.0f}") + print("VRAMdebug: freed memory: ", f"{freemem_after - freemem_before:,.0f}") + return {"ui": { + "text": [f"{freemem_before:,.0f}x{freemem_after:,.0f}"]}, + "result": (any_input, image_pass, model_pass, freemem_before, freemem_after) + } + +class SomethingToString: + @classmethod + + def INPUT_TYPES(s): + return { + "required": { + "input": (any, {}), + }, + "optional": { + "prefix": ("STRING", {"default": ""}), + "suffix": ("STRING", {"default": ""}), + } + } + RETURN_TYPES = ("STRING",) + FUNCTION = "stringify" + CATEGORY = "KJNodes/text" + DESCRIPTION = """ +Converts any type to a string. +""" + + def stringify(self, input, prefix="", suffix=""): + if isinstance(input, (int, float, bool)): + stringified = str(input) + elif isinstance(input, list): + stringified = ', '.join(str(item) for item in input) + else: + return + if prefix: # Check if prefix is not empty + stringified = prefix + stringified # Add the prefix + if suffix: # Check if suffix is not empty + stringified = stringified + suffix # Add the suffix + + return (stringified,) + +class Sleep: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "input": (any, {}), + "minutes": ("INT", {"default": 0, "min": 0, "max": 1439}), + "seconds": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 59.99, "step": 0.01}), + }, + } + RETURN_TYPES = (any,) + FUNCTION = "sleepdelay" + CATEGORY = "KJNodes/misc" + DESCRIPTION = """ +Delays the execution for the input amount of time. +""" + + def sleepdelay(self, input, minutes, seconds): + total_seconds = minutes * 60 + seconds + time.sleep(total_seconds) + return input, + +class EmptyLatentImagePresets: + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "dimensions": ( + [ '512 x 512', + '768 x 512', + '960 x 512', + '1024 x 512', + '1536 x 640', + '1344 x 768', + '1216 x 832', + '1152 x 896', + '1024 x 1024', + ], + { + "default": '512 x 512' + }), + + "invert": ("BOOLEAN", {"default": False}), + "batch_size": ("INT", { + "default": 1, + "min": 1, + "max": 4096 + }), + }, + } + + RETURN_TYPES = ("LATENT", "INT", "INT") + RETURN_NAMES = ("Latent", "Width", "Height") + FUNCTION = "generate" + CATEGORY = "KJNodes" + + def generate(self, dimensions, invert, batch_size): + from nodes import EmptyLatentImage + result = [x.strip() for x in dimensions.split('x')] + + if invert: + width = int(result[1].split(' ')[0]) + height = int(result[0]) + else: + width = int(result[0]) + height = int(result[1].split(' ')[0]) + latent = EmptyLatentImage().generate(width, height, batch_size)[0] + + return (latent, int(width), int(height),) + + + +class WidgetToString: + @classmethod + def IS_CHANGED(cls, **kwargs): + return float("NaN") + + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "id": ("INT", {"default": 0}), + "widget_name": ("STRING", {"multiline": False}), + "return_all": ("BOOLEAN", {"default": False}), + }, + "optional": { + "any_input": (any, {}), + "node_title": ("STRING", {"multiline": False}), + }, + "hidden": {"extra_pnginfo": "EXTRA_PNGINFO", + "prompt": "PROMPT", + "unique_id": "UNIQUE_ID",}, + } + + RETURN_TYPES = ("STRING", ) + FUNCTION = "get_widget_value" + CATEGORY = "KJNodes/text" + DESCRIPTION = """ +Selects a node and it's specified widget and outputs the value as a string. +If no node id or title is provided it will use the 'any_input' link and use that node. +To see node id's, enable node id display from Manager badge menu. +Alternatively you can search with the node title. Node titles ONLY exist if they +are manually edited! +The 'any_input' is required for making sure the node you want the value from exists in the workflow. +""" + + def get_widget_value(self, id, widget_name, extra_pnginfo, prompt, unique_id, return_all=False, any_input=None, node_title=""): + workflow = extra_pnginfo["workflow"] + #print(json.dumps(workflow, indent=4)) + results = [] + node_id = None # Initialize node_id to handle cases where no match is found + link_id = None + link_to_node_map = {} + + for node in workflow["nodes"]: + if node_title: + if "title" in node: + if node["title"] == node_title: + node_id = node["id"] + break + else: + print("Node title not found.") + elif id != 0: + if node["id"] == id: + node_id = id + break + elif any_input is not None: + if node["type"] == "WidgetToString" and node["id"] == int(unique_id) and not link_id: + for node_input in node["inputs"]: + if node_input["name"] == "any_input": + link_id = node_input["link"] + + # Construct a map of links to node IDs for future reference + node_outputs = node.get("outputs", None) + if not node_outputs: + continue + for output in node_outputs: + node_links = output.get("links", None) + if not node_links: + continue + for link in node_links: + link_to_node_map[link] = node["id"] + if link_id and link == link_id: + break + + if link_id: + node_id = link_to_node_map.get(link_id, None) + + if node_id is None: + raise ValueError("No matching node found for the given title or id") + + values = prompt[str(node_id)] + if "inputs" in values: + if return_all: + results.append(', '.join(f'{k}: {str(v)}' for k, v in values["inputs"].items())) + elif widget_name in values["inputs"]: + v = str(values["inputs"][widget_name]) # Convert to string here + return (v, ) + else: + raise NameError(f"Widget not found: {node_id}.{widget_name}") + if not results: + raise NameError(f"Node not found: {node_id}") + return (', '.join(results).strip(', '), ) + +class DummyOut: + + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "any_input": (any, {}), + } + } + + RETURN_TYPES = (any,) + FUNCTION = "dummy" + CATEGORY = "KJNodes/misc" + OUTPUT_NODE = True + DESCRIPTION = """ +Does nothing, used to trigger generic workflow output. +A way to get previews in the UI without saving anything to disk. +""" + + def dummy(self, any_input): + return (any_input,) + +class FlipSigmasAdjusted: + @classmethod + def INPUT_TYPES(s): + return {"required": + {"sigmas": ("SIGMAS", ), + "divide_by_last_sigma": ("BOOLEAN", {"default": False}), + "divide_by": ("FLOAT", {"default": 1,"min": 1, "max": 255, "step": 0.01}), + "offset_by": ("INT", {"default": 1,"min": -100, "max": 100, "step": 1}), + } + } + RETURN_TYPES = ("SIGMAS", "STRING",) + RETURN_NAMES = ("SIGMAS", "sigmas_string",) + CATEGORY = "KJNodes/noise" + FUNCTION = "get_sigmas_adjusted" + + def get_sigmas_adjusted(self, sigmas, divide_by_last_sigma, divide_by, offset_by): + + sigmas = sigmas.flip(0) + if sigmas[0] == 0: + sigmas[0] = 0.0001 + adjusted_sigmas = sigmas.clone() + #offset sigma + for i in range(1, len(sigmas)): + offset_index = i - offset_by + if 0 <= offset_index < len(sigmas): + adjusted_sigmas[i] = sigmas[offset_index] + else: + adjusted_sigmas[i] = 0.0001 + if adjusted_sigmas[0] == 0: + adjusted_sigmas[0] = 0.0001 + if divide_by_last_sigma: + adjusted_sigmas = adjusted_sigmas / adjusted_sigmas[-1] + + sigma_np_array = adjusted_sigmas.numpy() + array_string = np.array2string(sigma_np_array, precision=2, separator=', ', threshold=np.inf) + adjusted_sigmas = adjusted_sigmas / divide_by + return (adjusted_sigmas, array_string,) + +class CustomSigmas: + @classmethod + def INPUT_TYPES(s): + return {"required": + { + "sigmas_string" :("STRING", {"default": "14.615, 6.475, 3.861, 2.697, 1.886, 1.396, 0.963, 0.652, 0.399, 0.152, 0.029","multiline": True}), + "interpolate_to_steps": ("INT", {"default": 10,"min": 0, "max": 255, "step": 1}), + } + } + RETURN_TYPES = ("SIGMAS",) + RETURN_NAMES = ("SIGMAS",) + CATEGORY = "KJNodes/noise" + FUNCTION = "customsigmas" + DESCRIPTION = """ +Creates a sigmas tensor from a string of comma separated values. +Examples: + +Nvidia's optimized AYS 10 step schedule for SD 1.5: +14.615, 6.475, 3.861, 2.697, 1.886, 1.396, 0.963, 0.652, 0.399, 0.152, 0.029 +SDXL: +14.615, 6.315, 3.771, 2.181, 1.342, 0.862, 0.555, 0.380, 0.234, 0.113, 0.029 +SVD: +700.00, 54.5, 15.886, 7.977, 4.248, 1.789, 0.981, 0.403, 0.173, 0.034, 0.002 +""" + def customsigmas(self, sigmas_string, interpolate_to_steps): + sigmas_list = sigmas_string.split(', ') + sigmas_float_list = [float(sigma) for sigma in sigmas_list] + sigmas_tensor = torch.FloatTensor(sigmas_float_list) + if len(sigmas_tensor) != interpolate_to_steps + 1: + sigmas_tensor = self.loglinear_interp(sigmas_tensor, interpolate_to_steps + 1) + sigmas_tensor[-1] = 0 + return (sigmas_tensor.float(),) + + def loglinear_interp(self, t_steps, num_steps): + """ + Performs log-linear interpolation of a given array of decreasing numbers. + """ + t_steps_np = t_steps.numpy() + + xs = np.linspace(0, 1, len(t_steps_np)) + ys = np.log(t_steps_np[::-1]) + + new_xs = np.linspace(0, 1, num_steps) + new_ys = np.interp(new_xs, xs, ys) + + interped_ys = np.exp(new_ys)[::-1].copy() + interped_ys_tensor = torch.tensor(interped_ys) + return interped_ys_tensor + + +class InjectNoiseToLatent: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "latents":("LATENT",), + "strength": ("FLOAT", {"default": 0.1, "min": 0.0, "max": 200.0, "step": 0.0001}), + "noise": ("LATENT",), + "normalize": ("BOOLEAN", {"default": False}), + "average": ("BOOLEAN", {"default": False}), + }, + "optional":{ + "mask": ("MASK", ), + "mix_randn_amount": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 1000.0, "step": 0.001}), + "seed": ("INT", {"default": 123,"min": 0, "max": 0xffffffffffffffff, "step": 1}), + } + } + + RETURN_TYPES = ("LATENT",) + FUNCTION = "injectnoise" + CATEGORY = "KJNodes/noise" + + def injectnoise(self, latents, strength, noise, normalize, average, mix_randn_amount=0, seed=None, mask=None): + samples = latents.copy() + if latents["samples"].shape != noise["samples"].shape: + raise ValueError("InjectNoiseToLatent: Latent and noise must have the same shape") + if average: + noised = (samples["samples"].clone() + noise["samples"].clone()) / 2 + else: + noised = samples["samples"].clone() + noise["samples"].clone() * strength + if normalize: + noised = noised / noised.std() + if mask is not None: + mask = torch.nn.functional.interpolate(mask.reshape((-1, 1, mask.shape[-2], mask.shape[-1])), size=(noised.shape[2], noised.shape[3]), mode="bilinear") + mask = mask.expand((-1,noised.shape[1],-1,-1)) + if mask.shape[0] < noised.shape[0]: + mask = mask.repeat((noised.shape[0] -1) // mask.shape[0] + 1, 1, 1, 1)[:noised.shape[0]] + noised = mask * noised + (1-mask) * latents["samples"] + if mix_randn_amount > 0: + if seed is not None: + generator = torch.manual_seed(seed) + rand_noise = torch.randn(noised.size(), dtype=noised.dtype, layout=noised.layout, generator=generator, device="cpu") + noised = noised + (mix_randn_amount * rand_noise) + samples["samples"] = noised + return (samples,) + +class SoundReactive: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "sound_level": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 99999, "step": 0.01}), + "start_range_hz": ("INT", {"default": 150, "min": 0, "max": 9999, "step": 1}), + "end_range_hz": ("INT", {"default": 2000, "min": 0, "max": 9999, "step": 1}), + "multiplier": ("FLOAT", {"default": 1.0, "min": 0.01, "max": 99999, "step": 0.01}), + "smoothing_factor": ("FLOAT", {"default": 0.5, "min": 0.0, "max": 1.0, "step": 0.01}), + "normalize": ("BOOLEAN", {"default": False}), + }, + } + + RETURN_TYPES = ("FLOAT","INT",) + RETURN_NAMES =("sound_level", "sound_level_int",) + FUNCTION = "react" + CATEGORY = "KJNodes/audio" + DESCRIPTION = """ +Reacts to the sound level of the input. +Uses your browsers sound input options and requires. +Meant to be used with realtime diffusion with autoqueue. +""" + + def react(self, sound_level, start_range_hz, end_range_hz, smoothing_factor, multiplier, normalize): + + sound_level *= multiplier + + if normalize: + sound_level /= 255 + + sound_level_int = int(sound_level) + return (sound_level, sound_level_int, ) + +class GenerateNoise: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "width": ("INT", {"default": 512,"min": 16, "max": 4096, "step": 1}), + "height": ("INT", {"default": 512,"min": 16, "max": 4096, "step": 1}), + "batch_size": ("INT", {"default": 1, "min": 1, "max": 4096}), + "seed": ("INT", {"default": 123,"min": 0, "max": 0xffffffffffffffff, "step": 1}), + "multiplier": ("FLOAT", {"default": 1.0,"min": 0.0, "max": 4096, "step": 0.01}), + "constant_batch_noise": ("BOOLEAN", {"default": False}), + "normalize": ("BOOLEAN", {"default": False}), + }, + "optional": { + "model": ("MODEL", ), + "sigmas": ("SIGMAS", ), + "latent_channels": ( + [ '4', + '16', + ], + ), + } + } + + RETURN_TYPES = ("LATENT",) + FUNCTION = "generatenoise" + CATEGORY = "KJNodes/noise" + DESCRIPTION = """ +Generates noise for injection or to be used as empty latents on samplers with add_noise off. +""" + + def generatenoise(self, batch_size, width, height, seed, multiplier, constant_batch_noise, normalize, sigmas=None, model=None, latent_channels=4): + + generator = torch.manual_seed(seed) + noise = torch.randn([batch_size, int(latent_channels), height // 8, width // 8], dtype=torch.float32, layout=torch.strided, generator=generator, device="cpu") + if sigmas is not None: + sigma = sigmas[0] - sigmas[-1] + sigma /= model.model.latent_format.scale_factor + noise *= sigma + + noise *=multiplier + + if normalize: + noise = noise / noise.std() + if constant_batch_noise: + noise = noise[0].repeat(batch_size, 1, 1, 1) + + + return ({"samples":noise}, ) + +def camera_embeddings(elevation, azimuth): + elevation = torch.as_tensor([elevation]) + azimuth = torch.as_tensor([azimuth]) + embeddings = torch.stack( + [ + torch.deg2rad( + (90 - elevation) - (90) + ), # Zero123 polar is 90-elevation + torch.sin(torch.deg2rad(azimuth)), + torch.cos(torch.deg2rad(azimuth)), + torch.deg2rad( + 90 - torch.full_like(elevation, 0) + ), + ], dim=-1).unsqueeze(1) + + return embeddings + +def interpolate_angle(start, end, fraction): + # Calculate the difference in angles and adjust for wraparound if necessary + diff = (end - start + 540) % 360 - 180 + # Apply fraction to the difference + interpolated = start + fraction * diff + # Normalize the result to be within the range of -180 to 180 + return (interpolated + 180) % 360 - 180 + + +class StableZero123_BatchSchedule: + @classmethod + def INPUT_TYPES(s): + return {"required": { "clip_vision": ("CLIP_VISION",), + "init_image": ("IMAGE",), + "vae": ("VAE",), + "width": ("INT", {"default": 256, "min": 16, "max": MAX_RESOLUTION, "step": 8}), + "height": ("INT", {"default": 256, "min": 16, "max": MAX_RESOLUTION, "step": 8}), + "batch_size": ("INT", {"default": 1, "min": 1, "max": 4096}), + "interpolation": (["linear", "ease_in", "ease_out", "ease_in_out"],), + "azimuth_points_string": ("STRING", {"default": "0:(0.0),\n7:(1.0),\n15:(0.0)\n", "multiline": True}), + "elevation_points_string": ("STRING", {"default": "0:(0.0),\n7:(0.0),\n15:(0.0)\n", "multiline": True}), + }} + + RETURN_TYPES = ("CONDITIONING", "CONDITIONING", "LATENT") + RETURN_NAMES = ("positive", "negative", "latent") + FUNCTION = "encode" + CATEGORY = "KJNodes/experimental" + + def encode(self, clip_vision, init_image, vae, width, height, batch_size, azimuth_points_string, elevation_points_string, interpolation): + output = clip_vision.encode_image(init_image) + pooled = output.image_embeds.unsqueeze(0) + pixels = common_upscale(init_image.movedim(-1,1), width, height, "bilinear", "center").movedim(1,-1) + encode_pixels = pixels[:,:,:,:3] + t = vae.encode(encode_pixels) + + def ease_in(t): + return t * t + def ease_out(t): + return 1 - (1 - t) * (1 - t) + def ease_in_out(t): + return 3 * t * t - 2 * t * t * t + + # Parse the azimuth input string into a list of tuples + azimuth_points = [] + azimuth_points_string = azimuth_points_string.rstrip(',\n') + for point_str in azimuth_points_string.split(','): + frame_str, azimuth_str = point_str.split(':') + frame = int(frame_str.strip()) + azimuth = float(azimuth_str.strip()[1:-1]) + azimuth_points.append((frame, azimuth)) + # Sort the points by frame number + azimuth_points.sort(key=lambda x: x[0]) + + # Parse the elevation input string into a list of tuples + elevation_points = [] + elevation_points_string = elevation_points_string.rstrip(',\n') + for point_str in elevation_points_string.split(','): + frame_str, elevation_str = point_str.split(':') + frame = int(frame_str.strip()) + elevation_val = float(elevation_str.strip()[1:-1]) + elevation_points.append((frame, elevation_val)) + # Sort the points by frame number + elevation_points.sort(key=lambda x: x[0]) + + # Index of the next point to interpolate towards + next_point = 1 + next_elevation_point = 1 + + positive_cond_out = [] + positive_pooled_out = [] + negative_cond_out = [] + negative_pooled_out = [] + + #azimuth interpolation + for i in range(batch_size): + # Find the interpolated azimuth for the current frame + while next_point < len(azimuth_points) and i >= azimuth_points[next_point][0]: + next_point += 1 + # If next_point is equal to the length of points, we've gone past the last point + if next_point == len(azimuth_points): + next_point -= 1 # Set next_point to the last index of points + prev_point = max(next_point - 1, 0) # Ensure prev_point is not less than 0 + + # Calculate fraction + if azimuth_points[next_point][0] != azimuth_points[prev_point][0]: # Prevent division by zero + fraction = (i - azimuth_points[prev_point][0]) / (azimuth_points[next_point][0] - azimuth_points[prev_point][0]) + if interpolation == "ease_in": + fraction = ease_in(fraction) + elif interpolation == "ease_out": + fraction = ease_out(fraction) + elif interpolation == "ease_in_out": + fraction = ease_in_out(fraction) + + # Use the new interpolate_angle function + interpolated_azimuth = interpolate_angle(azimuth_points[prev_point][1], azimuth_points[next_point][1], fraction) + else: + interpolated_azimuth = azimuth_points[prev_point][1] + # Interpolate the elevation + next_elevation_point = 1 + while next_elevation_point < len(elevation_points) and i >= elevation_points[next_elevation_point][0]: + next_elevation_point += 1 + if next_elevation_point == len(elevation_points): + next_elevation_point -= 1 + prev_elevation_point = max(next_elevation_point - 1, 0) + + if elevation_points[next_elevation_point][0] != elevation_points[prev_elevation_point][0]: + fraction = (i - elevation_points[prev_elevation_point][0]) / (elevation_points[next_elevation_point][0] - elevation_points[prev_elevation_point][0]) + if interpolation == "ease_in": + fraction = ease_in(fraction) + elif interpolation == "ease_out": + fraction = ease_out(fraction) + elif interpolation == "ease_in_out": + fraction = ease_in_out(fraction) + + interpolated_elevation = interpolate_angle(elevation_points[prev_elevation_point][1], elevation_points[next_elevation_point][1], fraction) + else: + interpolated_elevation = elevation_points[prev_elevation_point][1] + + cam_embeds = camera_embeddings(interpolated_elevation, interpolated_azimuth) + cond = torch.cat([pooled, cam_embeds.repeat((pooled.shape[0], 1, 1))], dim=-1) + + positive_pooled_out.append(t) + positive_cond_out.append(cond) + negative_pooled_out.append(torch.zeros_like(t)) + negative_cond_out.append(torch.zeros_like(pooled)) + + # Concatenate the conditions and pooled outputs + final_positive_cond = torch.cat(positive_cond_out, dim=0) + final_positive_pooled = torch.cat(positive_pooled_out, dim=0) + final_negative_cond = torch.cat(negative_cond_out, dim=0) + final_negative_pooled = torch.cat(negative_pooled_out, dim=0) + + # Structure the final output + final_positive = [[final_positive_cond, {"concat_latent_image": final_positive_pooled}]] + final_negative = [[final_negative_cond, {"concat_latent_image": final_negative_pooled}]] + + latent = torch.zeros([batch_size, 4, height // 8, width // 8]) + return (final_positive, final_negative, {"samples": latent}) + +def linear_interpolate(start, end, fraction): + return start + (end - start) * fraction + +class SV3D_BatchSchedule: + @classmethod + def INPUT_TYPES(s): + return {"required": { "clip_vision": ("CLIP_VISION",), + "init_image": ("IMAGE",), + "vae": ("VAE",), + "width": ("INT", {"default": 576, "min": 16, "max": MAX_RESOLUTION, "step": 8}), + "height": ("INT", {"default": 576, "min": 16, "max": MAX_RESOLUTION, "step": 8}), + "batch_size": ("INT", {"default": 21, "min": 1, "max": 4096}), + "interpolation": (["linear", "ease_in", "ease_out", "ease_in_out"],), + "azimuth_points_string": ("STRING", {"default": "0:(0.0),\n9:(180.0),\n20:(360.0)\n", "multiline": True}), + "elevation_points_string": ("STRING", {"default": "0:(0.0),\n9:(0.0),\n20:(0.0)\n", "multiline": True}), + }} + + RETURN_TYPES = ("CONDITIONING", "CONDITIONING", "LATENT") + RETURN_NAMES = ("positive", "negative", "latent") + FUNCTION = "encode" + CATEGORY = "KJNodes/experimental" + DESCRIPTION = """ +Allow scheduling of the azimuth and elevation conditions for SV3D. +Note that SV3D is still a video model and the schedule needs to always go forward +https://huggingface.co/stabilityai/sv3d +""" + + def encode(self, clip_vision, init_image, vae, width, height, batch_size, azimuth_points_string, elevation_points_string, interpolation): + output = clip_vision.encode_image(init_image) + pooled = output.image_embeds.unsqueeze(0) + pixels = common_upscale(init_image.movedim(-1,1), width, height, "bilinear", "center").movedim(1,-1) + encode_pixels = pixels[:,:,:,:3] + t = vae.encode(encode_pixels) + + def ease_in(t): + return t * t + def ease_out(t): + return 1 - (1 - t) * (1 - t) + def ease_in_out(t): + return 3 * t * t - 2 * t * t * t + + # Parse the azimuth input string into a list of tuples + azimuth_points = [] + azimuth_points_string = azimuth_points_string.rstrip(',\n') + for point_str in azimuth_points_string.split(','): + frame_str, azimuth_str = point_str.split(':') + frame = int(frame_str.strip()) + azimuth = float(azimuth_str.strip()[1:-1]) + azimuth_points.append((frame, azimuth)) + # Sort the points by frame number + azimuth_points.sort(key=lambda x: x[0]) + + # Parse the elevation input string into a list of tuples + elevation_points = [] + elevation_points_string = elevation_points_string.rstrip(',\n') + for point_str in elevation_points_string.split(','): + frame_str, elevation_str = point_str.split(':') + frame = int(frame_str.strip()) + elevation_val = float(elevation_str.strip()[1:-1]) + elevation_points.append((frame, elevation_val)) + # Sort the points by frame number + elevation_points.sort(key=lambda x: x[0]) + + # Index of the next point to interpolate towards + next_point = 1 + next_elevation_point = 1 + elevations = [] + azimuths = [] + # For azimuth interpolation + for i in range(batch_size): + # Find the interpolated azimuth for the current frame + while next_point < len(azimuth_points) and i >= azimuth_points[next_point][0]: + next_point += 1 + if next_point == len(azimuth_points): + next_point -= 1 + prev_point = max(next_point - 1, 0) + + if azimuth_points[next_point][0] != azimuth_points[prev_point][0]: + fraction = (i - azimuth_points[prev_point][0]) / (azimuth_points[next_point][0] - azimuth_points[prev_point][0]) + # Apply the ease function to the fraction + if interpolation == "ease_in": + fraction = ease_in(fraction) + elif interpolation == "ease_out": + fraction = ease_out(fraction) + elif interpolation == "ease_in_out": + fraction = ease_in_out(fraction) + + interpolated_azimuth = linear_interpolate(azimuth_points[prev_point][1], azimuth_points[next_point][1], fraction) + else: + interpolated_azimuth = azimuth_points[prev_point][1] + + # Interpolate the elevation + next_elevation_point = 1 + while next_elevation_point < len(elevation_points) and i >= elevation_points[next_elevation_point][0]: + next_elevation_point += 1 + if next_elevation_point == len(elevation_points): + next_elevation_point -= 1 + prev_elevation_point = max(next_elevation_point - 1, 0) + + if elevation_points[next_elevation_point][0] != elevation_points[prev_elevation_point][0]: + fraction = (i - elevation_points[prev_elevation_point][0]) / (elevation_points[next_elevation_point][0] - elevation_points[prev_elevation_point][0]) + # Apply the ease function to the fraction + if interpolation == "ease_in": + fraction = ease_in(fraction) + elif interpolation == "ease_out": + fraction = ease_out(fraction) + elif interpolation == "ease_in_out": + fraction = ease_in_out(fraction) + + interpolated_elevation = linear_interpolate(elevation_points[prev_elevation_point][1], elevation_points[next_elevation_point][1], fraction) + else: + interpolated_elevation = elevation_points[prev_elevation_point][1] + + azimuths.append(interpolated_azimuth) + elevations.append(interpolated_elevation) + + #print("azimuths", azimuths) + #print("elevations", elevations) + + # Structure the final output + final_positive = [[pooled, {"concat_latent_image": t, "elevation": elevations, "azimuth": azimuths}]] + final_negative = [[torch.zeros_like(pooled), {"concat_latent_image": torch.zeros_like(t),"elevation": elevations, "azimuth": azimuths}]] + + latent = torch.zeros([batch_size, 4, height // 8, width // 8]) + return (final_positive, final_negative, {"samples": latent}) + +class LoadResAdapterNormalization: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "model": ("MODEL",), + "resadapter_path": (folder_paths.get_filename_list("checkpoints"), ) + } + } + + RETURN_TYPES = ("MODEL",) + FUNCTION = "load_res_adapter" + CATEGORY = "KJNodes/experimental" + + def load_res_adapter(self, model, resadapter_path): + print("ResAdapter: Checking ResAdapter path") + resadapter_full_path = folder_paths.get_full_path("checkpoints", resadapter_path) + if not os.path.exists(resadapter_full_path): + raise Exception("Invalid model path") + else: + print("ResAdapter: Loading ResAdapter normalization weights") + from comfy.utils import load_torch_file + prefix_to_remove = 'diffusion_model.' + model_clone = model.clone() + norm_state_dict = load_torch_file(resadapter_full_path) + new_values = {key[len(prefix_to_remove):]: value for key, value in norm_state_dict.items() if key.startswith(prefix_to_remove)} + print("ResAdapter: Attempting to add patches with ResAdapter weights") + try: + for key in model.model.diffusion_model.state_dict().keys(): + if key in new_values: + original_tensor = model.model.diffusion_model.state_dict()[key] + new_tensor = new_values[key].to(model.model.diffusion_model.dtype) + if original_tensor.shape == new_tensor.shape: + model_clone.add_object_patch(f"diffusion_model.{key}.data", new_tensor) + else: + print("ResAdapter: No match for key: ",key) + except: + raise Exception("Could not patch model, this way of patching was added to ComfyUI on March 3rd 2024, is your ComfyUI up to date?") + print("ResAdapter: Added resnet normalization patches") + return (model_clone, ) + +class Superprompt: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "instruction_prompt": ("STRING", {"default": 'Expand the following prompt to add more detail', "multiline": True}), + "prompt": ("STRING", {"default": '', "multiline": True, "forceInput": True}), + "max_new_tokens": ("INT", {"default": 128, "min": 1, "max": 4096, "step": 1}), + } + } + + RETURN_TYPES = ("STRING",) + FUNCTION = "process" + CATEGORY = "KJNodes/text" + DESCRIPTION = """ +# SuperPrompt +A T5 model fine-tuned on the SuperPrompt dataset for +upsampling text prompts to more detailed descriptions. +Meant to be used as a pre-generation step for text-to-image +models that benefit from more detailed prompts. +https://huggingface.co/roborovski/superprompt-v1 +""" + + def process(self, instruction_prompt, prompt, max_new_tokens): + device = model_management.get_torch_device() + from transformers import T5Tokenizer, T5ForConditionalGeneration + + checkpoint_path = os.path.join(script_directory, "models","superprompt-v1") + if not os.path.exists(checkpoint_path): + print(f"Downloading model to: {checkpoint_path}") + from huggingface_hub import snapshot_download + snapshot_download(repo_id="roborovski/superprompt-v1", + local_dir=checkpoint_path, + local_dir_use_symlinks=False) + tokenizer = T5Tokenizer.from_pretrained("google/flan-t5-small", legacy=False) + + model = T5ForConditionalGeneration.from_pretrained(checkpoint_path, device_map=device) + model.to(device) + input_text = instruction_prompt + ": " + prompt + + input_ids = tokenizer(input_text, return_tensors="pt").input_ids.to(device) + outputs = model.generate(input_ids, max_new_tokens=max_new_tokens) + out = (tokenizer.decode(outputs[0])) + out = out.replace('', '') + out = out.replace('', '') + + return (out, ) + + +class CameraPoseVisualizer: + + @classmethod + def INPUT_TYPES(s): + return {"required": { + "pose_file_path": ("STRING", {"default": '', "multiline": False}), + "base_xval": ("FLOAT", {"default": 0.2,"min": 0, "max": 100, "step": 0.01}), + "zval": ("FLOAT", {"default": 0.3,"min": 0, "max": 100, "step": 0.01}), + "scale": ("FLOAT", {"default": 1.0,"min": 0.01, "max": 10.0, "step": 0.01}), + "use_exact_fx": ("BOOLEAN", {"default": False}), + "relative_c2w": ("BOOLEAN", {"default": True}), + "use_viewer": ("BOOLEAN", {"default": False}), + }, + "optional": { + "cameractrl_poses": ("CAMERACTRL_POSES", {"default": None}), + } + } + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "plot" + CATEGORY = "KJNodes/misc" + DESCRIPTION = """ +Visualizes the camera poses, from Animatediff-Evolved CameraCtrl Pose +or a .txt file with RealEstate camera intrinsics and coordinates, in a 3D plot. +""" + + def plot(self, pose_file_path, scale, base_xval, zval, use_exact_fx, relative_c2w, use_viewer, cameractrl_poses=None): + import matplotlib as mpl + import matplotlib.pyplot as plt + from torchvision.transforms import ToTensor + + x_min = -2.0 * scale + x_max = 2.0 * scale + y_min = -2.0 * scale + y_max = 2.0 * scale + z_min = -2.0 * scale + z_max = 2.0 * scale + plt.rcParams['text.color'] = '#999999' + self.fig = plt.figure(figsize=(18, 7)) + self.fig.patch.set_facecolor('#353535') + self.ax = self.fig.add_subplot(projection='3d') + self.ax.set_facecolor('#353535') # Set the background color here + self.ax.grid(color='#999999', linestyle='-', linewidth=0.5) + self.plotly_data = None # plotly data traces + self.ax.set_aspect("auto") + self.ax.set_xlim(x_min, x_max) + self.ax.set_ylim(y_min, y_max) + self.ax.set_zlim(z_min, z_max) + self.ax.set_xlabel('x', color='#999999') + self.ax.set_ylabel('y', color='#999999') + self.ax.set_zlabel('z', color='#999999') + for text in self.ax.get_xticklabels() + self.ax.get_yticklabels() + self.ax.get_zticklabels(): + text.set_color('#999999') + print('initialize camera pose visualizer') + + if pose_file_path != "": + with open(pose_file_path, 'r') as f: + poses = f.readlines() + w2cs = [np.asarray([float(p) for p in pose.strip().split(' ')[7:]]).reshape(3, 4) for pose in poses[1:]] + fxs = [float(pose.strip().split(' ')[1]) for pose in poses[1:]] + #print(poses) + elif cameractrl_poses is not None: + poses = cameractrl_poses + w2cs = [np.array(pose[7:]).reshape(3, 4) for pose in cameractrl_poses] + fxs = [pose[1] for pose in cameractrl_poses] + else: + raise ValueError("Please provide either pose_file_path or cameractrl_poses") + + total_frames = len(w2cs) + transform_matrix = np.asarray([[1, 0, 0, 0], [0, 0, 1, 0], [0, -1, 0, 0], [0, 0, 0, 1]]).reshape(4, 4) + last_row = np.zeros((1, 4)) + last_row[0, -1] = 1.0 + + w2cs = [np.concatenate((w2c, last_row), axis=0) for w2c in w2cs] + c2ws = self.get_c2w(w2cs, transform_matrix, relative_c2w) + + for frame_idx, c2w in enumerate(c2ws): + self.extrinsic2pyramid(c2w, frame_idx / total_frames, hw_ratio=1/1, base_xval=base_xval, + zval=(fxs[frame_idx] if use_exact_fx else zval)) + + # Create the colorbar + cmap = mpl.cm.rainbow + norm = mpl.colors.Normalize(vmin=0, vmax=total_frames) + colorbar = self.fig.colorbar(mpl.cm.ScalarMappable(norm=norm, cmap=cmap), ax=self.ax, orientation='vertical') + + # Change the colorbar label + colorbar.set_label('Frame', color='#999999') # Change the label and its color + + # Change the tick colors + colorbar.ax.yaxis.set_tick_params(colors='#999999') # Change the tick color + + # Change the tick frequency + # Assuming you want to set the ticks at every 10th frame + ticks = np.arange(0, total_frames, 10) + colorbar.ax.yaxis.set_ticks(ticks) + + plt.title('') + plt.draw() + buf = io.BytesIO() + plt.savefig(buf, format='png', bbox_inches='tight', pad_inches=0) + buf.seek(0) + img = Image.open(buf) + tensor_img = ToTensor()(img) + buf.close() + tensor_img = tensor_img.permute(1, 2, 0).unsqueeze(0) + if use_viewer: + time.sleep(1) + plt.show() + return (tensor_img,) + + def extrinsic2pyramid(self, extrinsic, color_map='red', hw_ratio=1/1, base_xval=1, zval=3): + from mpl_toolkits.mplot3d.art3d import Poly3DCollection + vertex_std = np.array([[0, 0, 0, 1], + [base_xval, -base_xval * hw_ratio, zval, 1], + [base_xval, base_xval * hw_ratio, zval, 1], + [-base_xval, base_xval * hw_ratio, zval, 1], + [-base_xval, -base_xval * hw_ratio, zval, 1]]) + vertex_transformed = vertex_std @ extrinsic.T + meshes = [[vertex_transformed[0, :-1], vertex_transformed[1][:-1], vertex_transformed[2, :-1]], + [vertex_transformed[0, :-1], vertex_transformed[2, :-1], vertex_transformed[3, :-1]], + [vertex_transformed[0, :-1], vertex_transformed[3, :-1], vertex_transformed[4, :-1]], + [vertex_transformed[0, :-1], vertex_transformed[4, :-1], vertex_transformed[1, :-1]], + [vertex_transformed[1, :-1], vertex_transformed[2, :-1], vertex_transformed[3, :-1], vertex_transformed[4, :-1]]] + + color = color_map if isinstance(color_map, str) else plt.cm.rainbow(color_map) + + self.ax.add_collection3d( + Poly3DCollection(meshes, facecolors=color, linewidths=0.3, edgecolors=color, alpha=0.25)) + + def customize_legend(self, list_label): + from matplotlib.patches import Patch + import matplotlib.pyplot as plt + list_handle = [] + for idx, label in enumerate(list_label): + color = plt.cm.rainbow(idx / len(list_label)) + patch = Patch(color=color, label=label) + list_handle.append(patch) + plt.legend(loc='right', bbox_to_anchor=(1.8, 0.5), handles=list_handle) + + def get_c2w(self, w2cs, transform_matrix, relative_c2w): + if relative_c2w: + target_cam_c2w = np.array([ + [1, 0, 0, 0], + [0, 1, 0, 0], + [0, 0, 1, 0], + [0, 0, 0, 1] + ]) + abs2rel = target_cam_c2w @ w2cs[0] + ret_poses = [target_cam_c2w, ] + [abs2rel @ np.linalg.inv(w2c) for w2c in w2cs[1:]] + else: + ret_poses = [np.linalg.inv(w2c) for w2c in w2cs] + ret_poses = [transform_matrix @ x for x in ret_poses] + return np.array(ret_poses, dtype=np.float32) + + + +class StabilityAPI_SD3: + + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "prompt": ("STRING", {"multiline": True}), + "n_prompt": ("STRING", {"multiline": True}), + "seed": ("INT", {"default": 123,"min": 0, "max": 4294967294, "step": 1}), + "model": ( + [ + 'sd3', + 'sd3-turbo', + ], + { + "default": 'sd3' + }), + "aspect_ratio": ( + [ + '1:1', + '16:9', + '21:9', + '2:3', + '3:2', + '4:5', + '5:4', + '9:16', + '9:21', + ], + { + "default": '1:1' + }), + "output_format": ( + [ + 'png', + 'jpeg', + ], + { + "default": 'jpeg' + }), + }, + "optional": { + "api_key": ("STRING", {"multiline": True}), + "image": ("IMAGE",), + "img2img_strength": ("FLOAT", {"default": 0.5, "min": 0.0, "max": 1.0, "step": 0.01}), + "disable_metadata": ("BOOLEAN", {"default": True}), + }, + } + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "apicall" + + CATEGORY = "KJNodes/experimental" + DESCRIPTION = """ +## Calls StabilityAI API + +Although you may have multiple keys in your account, +you should use the same key for all requests to this API. + +Get your API key here: https://platform.stability.ai/account/keys +Recommended to set the key in the config.json -file under this +node packs folder. +# WARNING: +Otherwise the API key may get saved in the image metadata even +with "disable_metadata" on if the workflow includes save nodes +separate from this node. + +sd3 requires 6.5 credits per generation +sd3-turbo requires 4 credits per generation + +If no image is provided, mode is set to text-to-image + +""" + + def apicall(self, prompt, n_prompt, model, seed, aspect_ratio, output_format, + img2img_strength=0.5, image=None, disable_metadata=True, api_key=""): + from comfy.cli_args import args + if disable_metadata: + args.disable_metadata = True + else: + args.disable_metadata = False + + import requests + from torchvision import transforms + + data = { + "mode": "text-to-image", + "prompt": prompt, + "model": model, + "seed": seed, + "output_format": output_format + } + + if image is not None: + image = image.permute(0, 3, 1, 2).squeeze(0) + to_pil = transforms.ToPILImage() + pil_image = to_pil(image) + # Save the PIL Image to a BytesIO object + buffer = io.BytesIO() + pil_image.save(buffer, format='PNG') + buffer.seek(0) + files = {"image": ("image.png", buffer, "image/png")} + + data["mode"] = "image-to-image" + data["image"] = pil_image + data["strength"] = img2img_strength + else: + data["aspect_ratio"] = aspect_ratio, + files = {"none": ''} + + if model != "sd3-turbo": + data["negative_prompt"] = n_prompt + + headers={ + "accept": "image/*" + } + + if api_key != "": + headers["authorization"] = api_key + else: + config_file_path = os.path.join(script_directory,"config.json") + with open(config_file_path, 'r') as file: + config = json.load(file) + api_key_from_config = config.get("sai_api_key") + headers["authorization"] = api_key_from_config + + response = requests.post( + f"https://api.stability.ai/v2beta/stable-image/generate/sd3", + headers=headers, + files = files, + data = data, + ) + + if response.status_code == 200: + # Convert the response content to a PIL Image + image = Image.open(io.BytesIO(response.content)) + # Convert the PIL Image to a PyTorch tensor + transform = transforms.ToTensor() + tensor_image = transform(image) + tensor_image = tensor_image.unsqueeze(0) + tensor_image = tensor_image.permute(0, 2, 3, 1).cpu().float() + return (tensor_image,) + else: + try: + # Attempt to parse the response as JSON + error_data = response.json() + raise Exception(f"Server error: {error_data}") + except json.JSONDecodeError: + # If the response is not valid JSON, raise a different exception + raise Exception(f"Server error: {response.text}") + +class CheckpointPerturbWeights: + + @classmethod + def INPUT_TYPES(s): + return {"required": { + "model": ("MODEL",), + "joint_blocks": ("FLOAT", {"default": 0.02, "min": 0.001, "max": 10.0, "step": 0.001}), + "final_layer": ("FLOAT", {"default": 0.02, "min": 0.001, "max": 10.0, "step": 0.001}), + "rest_of_the_blocks": ("FLOAT", {"default": 0.02, "min": 0.001, "max": 10.0, "step": 0.001}), + "seed": ("INT", {"default": 123,"min": 0, "max": 0xffffffffffffffff, "step": 1}), + } + } + RETURN_TYPES = ("MODEL",) + FUNCTION = "mod" + OUTPUT_NODE = True + + CATEGORY = "KJNodes/experimental" + + def mod(self, seed, model, joint_blocks, final_layer, rest_of_the_blocks): + import copy + torch.manual_seed(seed) + torch.cuda.manual_seed_all(seed) + device = model_management.get_torch_device() + model_copy = copy.deepcopy(model) + model_copy.model.to(device) + keys = model_copy.model.diffusion_model.state_dict().keys() + + dict = {} + for key in keys: + dict[key] = model_copy.model.diffusion_model.state_dict()[key] + + pbar = ProgressBar(len(keys)) + for k in keys: + v = dict[k] + print(f'{k}: {v.std()}') + if k.startswith('joint_blocks'): + multiplier = joint_blocks + elif k.startswith('final_layer'): + multiplier = final_layer + else: + multiplier = rest_of_the_blocks + dict[k] += torch.normal(torch.zeros_like(v) * v.mean(), torch.ones_like(v) * v.std() * multiplier).to(device) + pbar.update(1) + model_copy.model.diffusion_model.load_state_dict(dict) + return model_copy, + +class DifferentialDiffusionAdvanced(): + @classmethod + def INPUT_TYPES(s): + return {"required": { + "model": ("MODEL", ), + "samples": ("LATENT",), + "mask": ("MASK",), + "multiplier": ("FLOAT", {"default": 1.0, "min": -10.0, "max": 10.0, "step": 0.001}), + }} + RETURN_TYPES = ("MODEL", "LATENT") + FUNCTION = "apply" + CATEGORY = "_for_testing" + INIT = False + + def apply(self, model, samples, mask, multiplier): + self.multiplier = multiplier + model = model.clone() + model.set_model_denoise_mask_function(self.forward) + s = samples.copy() + s["noise_mask"] = mask.reshape((-1, 1, mask.shape[-2], mask.shape[-1])) + return (model, s) + + def forward(self, sigma: torch.Tensor, denoise_mask: torch.Tensor, extra_options: dict): + model = extra_options["model"] + step_sigmas = extra_options["sigmas"] + sigma_to = model.inner_model.model_sampling.sigma_min + if step_sigmas[-1] > sigma_to: + sigma_to = step_sigmas[-1] + sigma_from = step_sigmas[0] + + ts_from = model.inner_model.model_sampling.timestep(sigma_from) + ts_to = model.inner_model.model_sampling.timestep(sigma_to) + current_ts = model.inner_model.model_sampling.timestep(sigma[0]) + + threshold = (current_ts - ts_to) / (ts_from - ts_to) / self.multiplier + + return (denoise_mask >= threshold).to(denoise_mask.dtype) + +class FluxBlockLoraSelect: + def __init__(self): + self.loaded_lora = None + + @classmethod + def INPUT_TYPES(s): + arg_dict = {} + argument = ("FLOAT", {"default": 0.0, "min": 0.0, "max": 1000.0, "step": 0.01}) + + for i in range(19): + arg_dict["double_blocks.{}.".format(i)] = argument + + for i in range(38): + arg_dict["single_blocks.{}.".format(i)] = argument + + return {"required": arg_dict} + + RETURN_TYPES = ("SELECTEDBLOCKS", ) + RETURN_NAMES = ("blocks", ) + OUTPUT_TOOLTIPS = ("The modified diffusion model.",) + FUNCTION = "load_lora" + + CATEGORY = "KJNodes/experimental" + DESCRIPTION = "Select individual block alpha values, value of 0 removes the block altogether" + + def load_lora(self, **kwargs): + return (kwargs,) + +class FluxBlockLoraLoader: + def __init__(self): + self.loaded_lora = None + + @classmethod + def INPUT_TYPES(s): + return {"required": { + "model": ("MODEL", {"tooltip": "The diffusion model the LoRA will be applied to."}), + "strength_model": ("FLOAT", {"default": 1.0, "min": -100.0, "max": 100.0, "step": 0.01, "tooltip": "How strongly to modify the diffusion model. This value can be negative."}), + + }, + "optional": { + "lora_name": (folder_paths.get_filename_list("loras"), {"tooltip": "The name of the LoRA."}), + "opt_lora_path": ("STRING", {"forceInput": True, "tooltip": "Absolute path of the LoRA."}), + "blocks": ("SELECTEDBLOCKS",), + } + } + + RETURN_TYPES = ("MODEL", "STRING", ) + RETURN_NAMES = ("model", "rank", ) + OUTPUT_TOOLTIPS = ("The modified diffusion model.", "possible rank of the LoRA.") + FUNCTION = "load_lora" + CATEGORY = "KJNodes/experimental" + + def load_lora(self, model, strength_model, lora_name=None, opt_lora_path=None, blocks=None): + from comfy.utils import load_torch_file + import comfy.lora + + if opt_lora_path: + lora_path = opt_lora_path + else: + lora_path = folder_paths.get_full_path("loras", lora_name) + + lora = None + if self.loaded_lora is not None: + if self.loaded_lora[0] == lora_path: + lora = self.loaded_lora[1] + else: + temp = self.loaded_lora + self.loaded_lora = None + del temp + + if lora is None: + lora = load_torch_file(lora_path, safe_load=True) + # Find the first key that ends with "weight" + rank = "unknown" + weight_key = next((key for key in lora.keys() if key.endswith('weight')), None) + # Print the shape of the value corresponding to the key + if weight_key: + print(f"Shape of the first 'weight' key ({weight_key}): {lora[weight_key].shape}") + rank = str(lora[weight_key].shape[0]) + else: + print("No key ending with 'weight' found.") + rank = "Couldn't find rank" + self.loaded_lora = (lora_path, lora) + + key_map = {} + if model is not None: + key_map = comfy.lora.model_lora_keys_unet(model.model, key_map) + + loaded = comfy.lora.load_lora(lora, key_map) + + if blocks is not None: + keys_to_delete = [] + + for block in blocks: + for key in list(loaded.keys()): # Convert keys to a list to avoid runtime error due to size change + match = False + if isinstance(key, str) and block in key: + match = True + elif isinstance(key, tuple): + for k in key: + if block in k: + match = True + break + + if match: + ratio = blocks[block] + if ratio == 0: + keys_to_delete.append(key) # Collect keys to delete + else: + value = loaded[key] + if isinstance(value, tuple) and len(value) > 1 and isinstance(value[1], tuple): + # Handle the tuple format + if len(value[1]) > 3: + loaded[key] = (value[0], value[1][:-3] + (ratio, value[1][-2], value[1][-1])) + else: + loaded[key] = (value[0], value[1][:-2] + (ratio, value[1][-1])) + else: + # Handle the simpler format directly + loaded[key] = (value[0], ratio) + + # Now perform the deletion of keys + for key in keys_to_delete: + del loaded[key] + + print("loading lora keys:") + for key, value in loaded.items(): + if isinstance(value, tuple) and len(value) > 1 and isinstance(value[1], tuple): + # Handle the tuple format + if len(value[1]) > 2: + alpha = value[1][-3] # Assuming the alpha value is the third last element in the tuple + else: + alpha = value[1][-2] # Adjust according to the second format's structure + else: + # Handle the simpler format directly + alpha = value[1] if len(value) > 1 else None + print(f"Key: {key}, Alpha: {alpha}") + + + if model is not None: + new_modelpatcher = model.clone() + k = new_modelpatcher.add_patches(loaded, strength_model) + + k = set(k) + for x in loaded: + if (x not in k): + print("NOT LOADED {}".format(x)) + + return (new_modelpatcher, rank) \ No newline at end of file diff --git a/ComfyUI-KJNodes/pyproject.toml b/ComfyUI-KJNodes/pyproject.toml new file mode 100644 index 0000000000000000000000000000000000000000..069193bfd8bd8fb12e7f7c06a0705bdcca417b8a --- /dev/null +++ b/ComfyUI-KJNodes/pyproject.toml @@ -0,0 +1,15 @@ +[project] +name = "comfyui-kjnodes" +description = "Various quality of life -nodes for ComfyUI, mostly just visual stuff to improve usability." +version = "1.0.1" +license = {file = "LICENSE"} +dependencies = ["librosa", "numpy", "pillow>=10.3.0", "scipy", "color-matcher", "matplotlib", "huggingface_hub"] + +[project.urls] +Repository = "https://github.com/kijai/ComfyUI-KJNodes" +# Used by Comfy Registry https://comfyregistry.org + +[tool.comfy] +PublisherId = "kijai" +DisplayName = "ComfyUI-KJNodes" +Icon = "" diff --git a/ComfyUI-KJNodes/requirements.txt b/ComfyUI-KJNodes/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..5bc18ca95b226298cbb88bd4de3307c157e0a88b --- /dev/null +++ b/ComfyUI-KJNodes/requirements.txt @@ -0,0 +1,7 @@ +pillow>=10.3.0 +scipy +color-matcher +matplotlib +huggingface_hub +mss +opencv-python \ No newline at end of file diff --git a/ComfyUI-KJNodes/utility/fluid.py b/ComfyUI-KJNodes/utility/fluid.py new file mode 100644 index 0000000000000000000000000000000000000000..c0691987f5249a031ecbb74329ba513d5788b691 --- /dev/null +++ b/ComfyUI-KJNodes/utility/fluid.py @@ -0,0 +1,67 @@ +import numpy as np +from scipy.ndimage import map_coordinates, spline_filter +from scipy.sparse.linalg import factorized + +from .numerical import difference, operator + + +class Fluid: + def __init__(self, shape, *quantities, pressure_order=1, advect_order=3): + self.shape = shape + self.dimensions = len(shape) + + # Prototyping is simplified by dynamically + # creating advected quantities as needed. + self.quantities = quantities + for q in quantities: + setattr(self, q, np.zeros(shape)) + + self.indices = np.indices(shape) + self.velocity = np.zeros((self.dimensions, *shape)) + + laplacian = operator(shape, difference(2, pressure_order)) + self.pressure_solver = factorized(laplacian) + + self.advect_order = advect_order + + def step(self): + # Advection is computed backwards in time as described in Stable Fluids. + advection_map = self.indices - self.velocity + + # SciPy's spline filter introduces checkerboard divergence. + # A linear blend of the filtered and unfiltered fields based + # on some value epsilon eliminates this error. + def advect(field, filter_epsilon=10e-2, mode='constant'): + filtered = spline_filter(field, order=self.advect_order, mode=mode) + field = filtered * (1 - filter_epsilon) + field * filter_epsilon + return map_coordinates(field, advection_map, prefilter=False, order=self.advect_order, mode=mode) + + # Apply advection to each axis of the + # velocity field and each user-defined quantity. + for d in range(self.dimensions): + self.velocity[d] = advect(self.velocity[d]) + + for q in self.quantities: + setattr(self, q, advect(getattr(self, q))) + + # Compute the jacobian at each point in the + # velocity field to extract curl and divergence. + jacobian_shape = (self.dimensions,) * 2 + partials = tuple(np.gradient(d) for d in self.velocity) + jacobian = np.stack(partials).reshape(*jacobian_shape, *self.shape) + + divergence = jacobian.trace() + + # If this curl calculation is extended to 3D, the y-axis value must be negated. + # This corresponds to the coefficients of the levi-civita symbol in that dimension. + # Higher dimensions do not have a vector -> scalar, or vector -> vector, + # correspondence between velocity and curl due to differing isomorphisms + # between exterior powers in dimensions != 2 or 3 respectively. + curl_mask = np.triu(np.ones(jacobian_shape, dtype=bool), k=1) + curl = (jacobian[curl_mask] - jacobian[curl_mask.T]).squeeze() + + # Apply the pressure correction to the fluid's velocity field. + pressure = self.pressure_solver(divergence.flatten()).reshape(self.shape) + self.velocity -= np.gradient(pressure) + + return divergence, curl, pressure \ No newline at end of file diff --git a/ComfyUI-KJNodes/utility/magictex.py b/ComfyUI-KJNodes/utility/magictex.py new file mode 100644 index 0000000000000000000000000000000000000000..e6d426f7deb3deb977604dd37581eb4e9fe9e6a9 --- /dev/null +++ b/ComfyUI-KJNodes/utility/magictex.py @@ -0,0 +1,95 @@ +"""Generates psychedelic color textures in the spirit of Blender's magic texture shader using Python/Numpy + +https://github.com/cheind/magic-texture +""" +from typing import Tuple, Optional +import numpy as np + + +def coordinate_grid(shape: Tuple[int, int], dtype=np.float32): + """Returns a three-dimensional coordinate grid of given shape for use in `magic`.""" + x = np.linspace(-1, 1, shape[1], endpoint=True, dtype=dtype) + y = np.linspace(-1, 1, shape[0], endpoint=True, dtype=dtype) + X, Y = np.meshgrid(x, y) + XYZ = np.stack((X, Y, np.ones_like(X)), -1) + return XYZ + + +def random_transform(coords: np.ndarray, rng: np.random.Generator = None): + """Returns randomly transformed coordinates""" + H, W = coords.shape[:2] + rng = rng or np.random.default_rng() + m = rng.uniform(-1.0, 1.0, size=(3, 3)).astype(coords.dtype) + return (coords.reshape(-1, 3) @ m.T).reshape(H, W, 3) + + +def magic( + coords: np.ndarray, + depth: Optional[int] = None, + distortion: Optional[int] = None, + rng: np.random.Generator = None, +): + """Returns color magic color texture. + + The implementation is based on Blender's (https://www.blender.org/) magic + texture shader. The following adaptions have been made: + - we exchange the nested if-cascade by a probabilistic iterative approach + + Kwargs + ------ + coords: HxWx3 array + Coordinates transformed into colors by this method. See + `magictex.coordinate_grid` to generate the default. + depth: int (optional) + Number of transformations applied. Higher numbers lead to more + nested patterns. If not specified, randomly sampled. + distortion: float (optional) + Distortion of patterns. Larger values indicate more distortion, + lower values tend to generate smoother patterns. If not specified, + randomly sampled. + rng: np.random.Generator + Optional random generator to draw samples from. + + Returns + ------- + colors: HxWx3 array + Three channel color image in range [0,1] + """ + rng = rng or np.random.default_rng() + if distortion is None: + distortion = rng.uniform(1, 4) + if depth is None: + depth = rng.integers(1, 5) + + H, W = coords.shape[:2] + XYZ = coords + x = np.sin((XYZ[..., 0] + XYZ[..., 1] + XYZ[..., 2]) * distortion) + y = np.cos((-XYZ[..., 0] + XYZ[..., 1] - XYZ[..., 2]) * distortion) + z = -np.cos((-XYZ[..., 0] - XYZ[..., 1] + XYZ[..., 2]) * distortion) + + if depth > 0: + x *= distortion + y *= distortion + z *= distortion + y = -np.cos(x - y + z) + y *= distortion + + xyz = [x, y, z] + fns = [np.cos, np.sin] + for _ in range(1, depth): + axis = rng.choice(3) + fn = fns[rng.choice(2)] + signs = rng.binomial(n=1, p=0.5, size=4) * 2 - 1 + + xyz[axis] = signs[-1] * fn( + signs[0] * xyz[0] + signs[1] * xyz[1] + signs[2] * xyz[2] + ) + xyz[axis] *= distortion + + x, y, z = xyz + x /= 2 * distortion + y /= 2 * distortion + z /= 2 * distortion + c = 0.5 - np.stack((x, y, z), -1) + np.clip(c, 0, 1.0) + return c \ No newline at end of file diff --git a/ComfyUI-KJNodes/utility/numerical.py b/ComfyUI-KJNodes/utility/numerical.py new file mode 100644 index 0000000000000000000000000000000000000000..b5b88bc63c45d63d8913e56cbd06eb7ab413fe4f --- /dev/null +++ b/ComfyUI-KJNodes/utility/numerical.py @@ -0,0 +1,25 @@ +from functools import reduce +from itertools import cycle +from math import factorial + +import numpy as np +import scipy.sparse as sp + + +def difference(derivative, accuracy=1): + # Central differences implemented based on the article here: + # http://web.media.mit.edu/~crtaylor/calculator.html + derivative += 1 + radius = accuracy + derivative // 2 - 1 + points = range(-radius, radius + 1) + coefficients = np.linalg.inv(np.vander(points)) + return coefficients[-derivative] * factorial(derivative - 1), points + + +def operator(shape, *differences): + # Credit to Philip Zucker for figuring out + # that kronsum's argument order is reversed. + # Without that bit of wisdom I'd have lost it. + differences = zip(shape, cycle(differences)) + factors = (sp.diags(*diff, shape=(dim,) * 2) for dim, diff in differences) + return reduce(lambda a, f: sp.kronsum(f, a, format='csc'), factors) \ No newline at end of file diff --git a/ComfyUI-KJNodes/utility/utility.py b/ComfyUI-KJNodes/utility/utility.py new file mode 100644 index 0000000000000000000000000000000000000000..f3b5c425922784522791e33c225c29be1e8249e0 --- /dev/null +++ b/ComfyUI-KJNodes/utility/utility.py @@ -0,0 +1,39 @@ +import torch +import numpy as np +from PIL import Image +from typing import Union, List + +# Utility functions from mtb nodes: https://github.com/melMass/comfy_mtb +def pil2tensor(image: Union[Image.Image, List[Image.Image]]) -> torch.Tensor: + if isinstance(image, list): + return torch.cat([pil2tensor(img) for img in image], dim=0) + + return torch.from_numpy(np.array(image).astype(np.float32) / 255.0).unsqueeze(0) + + +def np2tensor(img_np: Union[np.ndarray, List[np.ndarray]]) -> torch.Tensor: + if isinstance(img_np, list): + return torch.cat([np2tensor(img) for img in img_np], dim=0) + + return torch.from_numpy(img_np.astype(np.float32) / 255.0).unsqueeze(0) + + +def tensor2np(tensor: torch.Tensor): + if len(tensor.shape) == 3: # Single image + return np.clip(255.0 * tensor.cpu().numpy(), 0, 255).astype(np.uint8) + else: # Batch of images + return [np.clip(255.0 * t.cpu().numpy(), 0, 255).astype(np.uint8) for t in tensor] + +def tensor2pil(image: torch.Tensor) -> List[Image.Image]: + batch_count = image.size(0) if len(image.shape) > 3 else 1 + if batch_count > 1: + out = [] + for i in range(batch_count): + out.extend(tensor2pil(image[i])) + return out + + return [ + Image.fromarray( + np.clip(255.0 * image.cpu().numpy().squeeze(), 0, 255).astype(np.uint8) + ) + ] \ No newline at end of file diff --git a/ComfyUI-KJNodes/web/green.png b/ComfyUI-KJNodes/web/green.png new file mode 100644 index 0000000000000000000000000000000000000000..900964e4b3907145fe1e75a5b58473567450e16d Binary files /dev/null and b/ComfyUI-KJNodes/web/green.png differ diff --git a/ComfyUI-KJNodes/web/js/appearance.js b/ComfyUI-KJNodes/web/js/appearance.js new file mode 100644 index 0000000000000000000000000000000000000000..d90b4aa34d4c52b22a4411194100972c83eed88d --- /dev/null +++ b/ComfyUI-KJNodes/web/js/appearance.js @@ -0,0 +1,23 @@ +import { app } from "../../../scripts/app.js"; + +app.registerExtension({ + name: "KJNodes.appearance", + nodeCreated(node) { + switch (node.comfyClass) { + case "INTConstant": + node.setSize([200, 58]); + node.color = "#1b4669"; + node.bgcolor = "#29699c"; + break; + case "FloatConstant": + node.setSize([200, 58]); + node.color = LGraphCanvas.node_colors.green.color; + node.bgcolor = LGraphCanvas.node_colors.green.bgcolor; + break; + case "ConditioningMultiCombine": + node.color = LGraphCanvas.node_colors.brown.color; + node.bgcolor = LGraphCanvas.node_colors.brown.bgcolor; + break; + } + } +}); diff --git a/ComfyUI-KJNodes/web/js/browserstatus.js b/ComfyUI-KJNodes/web/js/browserstatus.js new file mode 100644 index 0000000000000000000000000000000000000000..fd377e74a6c5ef6cfd66ab8a4940e1448654dba3 --- /dev/null +++ b/ComfyUI-KJNodes/web/js/browserstatus.js @@ -0,0 +1,53 @@ +import { api } from "../../../scripts/api.js"; +import { app } from "../../../scripts/app.js"; + +app.registerExtension({ + name: "KJNodes.browserstatus", + setup() { + if (!app.ui.settings.getSettingValue("KJNodes.browserStatus")) { + return; + } + api.addEventListener("status", ({ detail }) => { + let title = "ComfyUI"; + let favicon = "green"; + let queueRemaining = detail && detail.exec_info.queue_remaining; + + if (queueRemaining) { + favicon = "red"; + title = `00% - ${queueRemaining} | ${title}`; + } + let link = document.querySelector("link[rel~='icon']"); + if (!link) { + link = document.createElement("link"); + link.rel = "icon"; + document.head.appendChild(link); + } + link.href = new URL(`../${favicon}.png`, import.meta.url); + document.title = title; + }); + //add progress to the title + api.addEventListener("progress", ({ detail }) => { + const { value, max } = detail; + const progress = Math.floor((value / max) * 100); + let title = document.title; + + if (!isNaN(progress) && progress >= 0 && progress <= 100) { + const paddedProgress = String(progress).padStart(2, '0'); + title = `${paddedProgress}% ${title.replace(/^\d+%\s/, '')}`; + } + document.title = title; + }); + }, + init() { + if (!app.ui.settings.getSettingValue("KJNodes.browserStatus")) { + return; + } + const pythongossFeed = app.extensions.find( + (e) => e.name === 'pysssss.FaviconStatus', + ) + if (pythongossFeed) { + console.warn("KJNodes - Overriding pysssss.FaviconStatus") + app.extensions = app.extensions.filter(item => item !== pythongossFeed); + } + }, +}); \ No newline at end of file diff --git a/ComfyUI-KJNodes/web/js/contextmenu.js b/ComfyUI-KJNodes/web/js/contextmenu.js new file mode 100644 index 0000000000000000000000000000000000000000..e77301f4f1d7c4fcc13d94e99495260cccb4dab6 --- /dev/null +++ b/ComfyUI-KJNodes/web/js/contextmenu.js @@ -0,0 +1,152 @@ +import { app } from "../../../scripts/app.js"; + +// Adds context menu entries, code partly from pyssssscustom-scripts + +function addMenuHandler(nodeType, cb) { + const getOpts = nodeType.prototype.getExtraMenuOptions; + nodeType.prototype.getExtraMenuOptions = function () { + const r = getOpts.apply(this, arguments); + cb.apply(this, arguments); + return r; + }; +} + +function addNode(name, nextTo, options) { + console.log("name:", name); + console.log("nextTo:", nextTo); + options = { side: "left", select: true, shiftY: 0, shiftX: 0, ...(options || {}) }; + const node = LiteGraph.createNode(name); + app.graph.add(node); + + node.pos = [ + options.side === "left" ? nextTo.pos[0] - (node.size[0] + options.offset): nextTo.pos[0] + nextTo.size[0] + options.offset, + + nextTo.pos[1] + options.shiftY, + ]; + if (options.select) { + app.canvas.selectNode(node, false); + } + return node; +} + +app.registerExtension({ + name: "KJNodesContextmenu", + async beforeRegisterNodeDef(nodeType, nodeData, app) { + if (nodeData.input && nodeData.input.required) { + addMenuHandler(nodeType, function (_, options) { + options.unshift( + { + content: "Add GetNode", + callback: () => {addNode("GetNode", this, { side:"left", offset: 30});} + }, + { + content: "Add SetNode", + callback: () => {addNode("SetNode", this, { side:"right", offset: 30 }); + }, + }); + }); + } + }, + async setup(app) { + const onChange = (value) => { + if (value) { + const valuesToAddToIn = ["GetNode"]; + const valuesToAddToOut = ["SetNode"]; + + for (const arr of Object.values(LiteGraph.slot_types_default_in)) { + for (const valueToAdd of valuesToAddToIn) { + const idx = arr.indexOf(valueToAdd); + if (idx !== 0) { + arr.splice(idx, 1); + } + arr.unshift(valueToAdd); + } + } + + for (const arr of Object.values(LiteGraph.slot_types_default_out)) { + for (const valueToAdd of valuesToAddToOut) { + const idx = arr.indexOf(valueToAdd); + if (idx !== 0) { + arr.splice(idx, 1); + } + arr.unshift(valueToAdd); + } + } + } + }; + + app.ui.settings.addSetting({ + id: "KJNodes.SetGetMenu", + name: "KJNodes: Make Set/Get -nodes defaults (turn off and reload to disable)", + defaultValue: false, + type: "boolean", + options: (value) => [ + { + value: true, + text: "On", + selected: value === true, + }, + { + value: false, + text: "Off", + selected: value === false, + }, + ], + onChange: onChange, + + }); + app.ui.settings.addSetting({ + id: "KJNodes.DisableMiddleClickDefault", + name: "KJNodes: Middle click default node adding", + defaultValue: false, + type: "boolean", + options: (value) => [ + { value: true, text: "On", selected: value === true }, + { value: false, text: "Off", selected: value === false }, + ], + onChange: (value) => { + LiteGraph.middle_click_slot_add_default_node = value; + }, + }); + app.ui.settings.addSetting({ + id: "KJNodes.nodeAutoColor", + name: "KJNodes: Automatically set node colors", + defaultValue: true, + type: "boolean", + options: (value) => [ + { value: true, text: "On", selected: value === true }, + { value: false, text: "Off", selected: value === false }, + ], + }); + app.ui.settings.addSetting({ + id: "KJNodes.helpPopup", + name: "KJNodes: Help popups", + defaultValue: true, + type: "boolean", + options: (value) => [ + { value: true, text: "On", selected: value === true }, + { value: false, text: "Off", selected: value === false }, + ], + }); + app.ui.settings.addSetting({ + id: "KJNodes.disablePrefix", + name: "KJNodes: Disable automatic Set_ and Get_ prefix", + defaultValue: false, + type: "boolean", + options: (value) => [ + { value: true, text: "On", selected: value === true }, + { value: false, text: "Off", selected: value === false }, + ], + }); + app.ui.settings.addSetting({ + id: "KJNodes.browserStatus", + name: "KJNodes: 🟢 Stoplight browser status icon 🔴", + defaultValue: false, + type: "boolean", + options: (value) => [ + { value: true, text: "On", selected: value === true }, + { value: false, text: "Off", selected: value === false }, + ], + }); +} +}); diff --git a/ComfyUI-KJNodes/web/js/help_popup.js b/ComfyUI-KJNodes/web/js/help_popup.js new file mode 100644 index 0000000000000000000000000000000000000000..c05907a3cbbc04406af90139fb7a05691d1b1eb0 --- /dev/null +++ b/ComfyUI-KJNodes/web/js/help_popup.js @@ -0,0 +1,326 @@ +import { app } from "../../../scripts/app.js"; + +// code based on mtb nodes by Mel Massadian https://github.com/melMass/comfy_mtb/ +export const loadScript = ( + FILE_URL, + async = true, + type = 'text/javascript', +) => { + return new Promise((resolve, reject) => { + try { + // Check if the script already exists + const existingScript = document.querySelector(`script[src="${FILE_URL}"]`) + if (existingScript) { + resolve({ status: true, message: 'Script already loaded' }) + return + } + + const scriptEle = document.createElement('script') + scriptEle.type = type + scriptEle.async = async + scriptEle.src = FILE_URL + + scriptEle.addEventListener('load', (ev) => { + resolve({ status: true }) + }) + + scriptEle.addEventListener('error', (ev) => { + reject({ + status: false, + message: `Failed to load the script ${FILE_URL}`, + }) + }) + + document.body.appendChild(scriptEle) + } catch (error) { + reject(error) + } + }) +} + +loadScript('/kjweb_async/marked.min.js').catch((e) => { + console.log(e) +}) +loadScript('/kjweb_async/purify.min.js').catch((e) => { + console.log(e) +}) + +const categories = ["KJNodes", "SUPIR", "VoiceCraft", "Marigold", "IC-Light"]; +app.registerExtension({ + name: "KJNodes.HelpPopup", + async beforeRegisterNodeDef(nodeType, nodeData) { + + if (app.ui.settings.getSettingValue("KJNodes.helpPopup") === false) { + return; + } + try { + categories.forEach(category => { + if (nodeData?.category?.startsWith(category)) { + addDocumentation(nodeData, nodeType); + } + else return + }); + } catch (error) { + console.error("Error in registering KJNodes.HelpPopup", error); + } + }, +}); + +const create_documentation_stylesheet = () => { + const tag = 'kj-documentation-stylesheet' + + let styleTag = document.head.querySelector(tag) + + if (!styleTag) { + styleTag = document.createElement('style') + styleTag.type = 'text/css' + styleTag.id = tag + styleTag.innerHTML = ` + .kj-documentation-popup { + background: var(--comfy-menu-bg); + position: absolute; + color: var(--fg-color); + font: 12px monospace; + line-height: 1.5em; + padding: 10px; + border-radius: 10px; + border-style: solid; + border-width: medium; + border-color: var(--border-color); + z-index: 5; + overflow: hidden; + } + .content-wrapper { + overflow: auto; + max-height: 100%; + /* Scrollbar styling for Chrome */ + &::-webkit-scrollbar { + width: 6px; + } + &::-webkit-scrollbar-track { + background: var(--bg-color); + } + &::-webkit-scrollbar-thumb { + background-color: var(--fg-color); + border-radius: 6px; + border: 3px solid var(--bg-color); + } + + /* Scrollbar styling for Firefox */ + scrollbar-width: thin; + scrollbar-color: var(--fg-color) var(--bg-color); + a { + color: yellow; + } + a:visited { + color: orange; + } + a:hover { + color: red; + } + } + ` + document.head.appendChild(styleTag) + } + } + + /** Add documentation widget to the selected node */ + export const addDocumentation = ( + nodeData, + nodeType, + opts = { icon_size: 14, icon_margin: 4 },) => { + + opts = opts || {} + const iconSize = opts.icon_size ? opts.icon_size : 14 + const iconMargin = opts.icon_margin ? opts.icon_margin : 4 + let docElement = null + let contentWrapper = null + //if no description in the node python code, don't do anything + if (!nodeData.description) { + return + } + + const drawFg = nodeType.prototype.onDrawForeground + nodeType.prototype.onDrawForeground = function (ctx) { + const r = drawFg ? drawFg.apply(this, arguments) : undefined + if (this.flags.collapsed) return r + + // icon position + const x = this.size[0] - iconSize - iconMargin + + // create the popup + if (this.show_doc && docElement === null) { + docElement = document.createElement('div') + contentWrapper = document.createElement('div'); + docElement.appendChild(contentWrapper); + + create_documentation_stylesheet() + contentWrapper.classList.add('content-wrapper'); + docElement.classList.add('kj-documentation-popup') + + //parse the string from the python node code to html with marked, and sanitize the html with DOMPurify + contentWrapper.innerHTML = DOMPurify.sanitize(marked.parse(nodeData.description,)) + + // resize handle + const resizeHandle = document.createElement('div'); + resizeHandle.style.width = '0'; + resizeHandle.style.height = '0'; + resizeHandle.style.position = 'absolute'; + resizeHandle.style.bottom = '0'; + resizeHandle.style.right = '0'; + resizeHandle.style.cursor = 'se-resize'; + + // Add pseudo-elements to create a triangle shape + const borderColor = getComputedStyle(document.documentElement).getPropertyValue('--border-color').trim(); + resizeHandle.style.borderTop = '10px solid transparent'; + resizeHandle.style.borderLeft = '10px solid transparent'; + resizeHandle.style.borderBottom = `10px solid ${borderColor}`; + resizeHandle.style.borderRight = `10px solid ${borderColor}`; + + docElement.appendChild(resizeHandle) + let isResizing = false + let startX, startY, startWidth, startHeight + + resizeHandle.addEventListener('mousedown', function (e) { + e.preventDefault(); + e.stopPropagation(); + isResizing = true; + startX = e.clientX; + startY = e.clientY; + startWidth = parseInt(document.defaultView.getComputedStyle(docElement).width, 10); + startHeight = parseInt(document.defaultView.getComputedStyle(docElement).height, 10); + }, + { signal: this.docCtrl.signal }, + ); + + // close button + const closeButton = document.createElement('div'); + closeButton.textContent = '❌'; + closeButton.style.position = 'absolute'; + closeButton.style.top = '0'; + closeButton.style.right = '0'; + closeButton.style.cursor = 'pointer'; + closeButton.style.padding = '5px'; + closeButton.style.color = 'red'; + closeButton.style.fontSize = '12px'; + + docElement.appendChild(closeButton) + + closeButton.addEventListener('mousedown', (e) => { + e.stopPropagation(); + this.show_doc = !this.show_doc + docElement.parentNode.removeChild(docElement) + docElement = null + if (contentWrapper) { + contentWrapper.remove() + contentWrapper = null + } + }, + { signal: this.docCtrl.signal }, + ); + + document.addEventListener('mousemove', function (e) { + if (!isResizing) return; + const scale = app.canvas.ds.scale; + const newWidth = startWidth + (e.clientX - startX) / scale; + const newHeight = startHeight + (e.clientY - startY) / scale;; + docElement.style.width = `${newWidth}px`; + docElement.style.height = `${newHeight}px`; + }, + { signal: this.docCtrl.signal }, + ); + + document.addEventListener('mouseup', function () { + isResizing = false + }, + { signal: this.docCtrl.signal }, + ) + + document.body.appendChild(docElement) + } + // close the popup + else if (!this.show_doc && docElement !== null) { + docElement.parentNode.removeChild(docElement) + docElement = null + } + // update position of the popup + if (this.show_doc && docElement !== null) { + const rect = ctx.canvas.getBoundingClientRect() + const scaleX = rect.width / ctx.canvas.width + const scaleY = rect.height / ctx.canvas.height + + const transform = new DOMMatrix() + .scaleSelf(scaleX, scaleY) + .multiplySelf(ctx.getTransform()) + .translateSelf(this.size[0] * scaleX * Math.max(1.0,window.devicePixelRatio) , 0) + .translateSelf(10, -32) + + const scale = new DOMMatrix() + .scaleSelf(transform.a, transform.d); + const bcr = app.canvas.canvas.getBoundingClientRect() + + const styleObject = { + transformOrigin: '0 0', + transform: scale, + left: `${transform.a + bcr.x + transform.e}px`, + top: `${transform.d + bcr.y + transform.f}px`, + }; + Object.assign(docElement.style, styleObject); + } + + ctx.save() + ctx.translate(x - 2, iconSize - 34) + ctx.scale(iconSize / 32, iconSize / 32) + ctx.strokeStyle = 'rgba(255,255,255,0.3)' + ctx.lineCap = 'round' + ctx.lineJoin = 'round' + ctx.lineWidth = 2.4 + ctx.font = 'bold 36px monospace' + ctx.fillStyle = 'orange'; + ctx.fillText('?', 0, 24) + ctx.restore() + return r + } + // handle clicking of the icon + const mouseDown = nodeType.prototype.onMouseDown + nodeType.prototype.onMouseDown = function (e, localPos, canvas) { + const r = mouseDown ? mouseDown.apply(this, arguments) : undefined + const iconX = this.size[0] - iconSize - iconMargin + const iconY = iconSize - 34 + if ( + localPos[0] > iconX && + localPos[0] < iconX + iconSize && + localPos[1] > iconY && + localPos[1] < iconY + iconSize + ) { + if (this.show_doc === undefined) { + this.show_doc = true + } else { + this.show_doc = !this.show_doc + } + if (this.show_doc) { + this.docCtrl = new AbortController() + } else { + this.docCtrl.abort() + } + return true; + } + return r; + } + const onRem = nodeType.prototype.onRemoved + + nodeType.prototype.onRemoved = function () { + const r = onRem ? onRem.apply(this, []) : undefined + + if (docElement) { + docElement.remove() + docElement = null + } + + if (contentWrapper) { + contentWrapper.remove() + contentWrapper = null + } + return r + } +} \ No newline at end of file diff --git a/ComfyUI-KJNodes/web/js/jsnodes.js b/ComfyUI-KJNodes/web/js/jsnodes.js new file mode 100644 index 0000000000000000000000000000000000000000..aa71369c6e9970146c46bbf08df0f7402c2a6bed --- /dev/null +++ b/ComfyUI-KJNodes/web/js/jsnodes.js @@ -0,0 +1,354 @@ +import { app } from "../../../scripts/app.js"; + +app.registerExtension({ + name: "KJNodes.jsnodes", + async beforeRegisterNodeDef(nodeType, nodeData, app) { + if(!nodeData?.category?.startsWith("KJNodes")) { + return; + } + switch (nodeData.name) { + case "ConditioningMultiCombine": + nodeType.prototype.onNodeCreated = function () { + this.cond_type = "CONDITIONING" + this.inputs_offset = nodeData.name.includes("selective")?1:0 + this.addWidget("button", "Update inputs", null, () => { + if (!this.inputs) { + this.inputs = []; + } + const target_number_of_inputs = this.widgets.find(w => w.name === "inputcount")["value"]; + if(target_number_of_inputs===this.inputs.length)return; // already set, do nothing + + if(target_number_of_inputs < this.inputs.length){ + for(let i = this.inputs.length; i>=this.inputs_offset+target_number_of_inputs; i--) + this.removeInput(i) + } + else{ + for(let i = this.inputs.length+1-this.inputs_offset; i <= target_number_of_inputs; ++i) + this.addInput(`conditioning_${i}`, this.cond_type) + } + }); + } + break; + case "ImageBatchMulti": + case "ImageAddMulti": + case "ImageConcatMulti": + nodeType.prototype.onNodeCreated = function () { + this._type = "IMAGE" + this.inputs_offset = nodeData.name.includes("selective")?1:0 + this.addWidget("button", "Update inputs", null, () => { + if (!this.inputs) { + this.inputs = []; + } + const target_number_of_inputs = this.widgets.find(w => w.name === "inputcount")["value"]; + if(target_number_of_inputs===this.inputs.length)return; // already set, do nothing + + if(target_number_of_inputs < this.inputs.length){ + for(let i = this.inputs.length; i>=this.inputs_offset+target_number_of_inputs; i--) + this.removeInput(i) + } + else{ + for(let i = this.inputs.length+1-this.inputs_offset; i <= target_number_of_inputs; ++i) + this.addInput(`image_${i}`, this._type) + } + }); + } + break; + case "MaskBatchMulti": + nodeType.prototype.onNodeCreated = function () { + this._type = "MASK" + this.inputs_offset = nodeData.name.includes("selective")?1:0 + this.addWidget("button", "Update inputs", null, () => { + if (!this.inputs) { + this.inputs = []; + } + const target_number_of_inputs = this.widgets.find(w => w.name === "inputcount")["value"]; + if(target_number_of_inputs===this.inputs.length)return; // already set, do nothing + + if(target_number_of_inputs < this.inputs.length){ + for(let i = this.inputs.length; i>=this.inputs_offset+target_number_of_inputs; i--) + this.removeInput(i) + } + else{ + for(let i = this.inputs.length+1-this.inputs_offset; i <= target_number_of_inputs; ++i) + this.addInput(`mask_${i}`, this._type) + } + }); + } + break; + + case "FluxBlockLoraSelect": + nodeType.prototype.onNodeCreated = function () { + this.addWidget("button", "Set all", null, () => { + const userInput = prompt("Enter the values to set for widgets (e.g., s0,1,2-7=2.0, d0,1,2-7=2.0, or 1.0):", ""); + if (userInput) { + const regex = /([sd])?(\d+(?:,\d+|-?\d+)*?)?=(\d+(\.\d+)?)/; + const match = userInput.match(regex); + if (match) { + const type = match[1]; + const indicesPart = match[2]; + const value = parseFloat(match[3]); + + let targetWidgets = []; + if (type === 's') { + targetWidgets = this.widgets.filter(widget => widget.name.includes("single")); + } else if (type === 'd') { + targetWidgets = this.widgets.filter(widget => widget.name.includes("double")); + } else { + targetWidgets = this.widgets; // No type specified, all widgets + } + + if (indicesPart) { + const indices = indicesPart.split(',').flatMap(part => { + if (part.includes('-')) { + const [start, end] = part.split('-').map(Number); + return Array.from({ length: end - start + 1 }, (_, i) => start + i); + } + return Number(part); + }); + + for (const index of indices) { + if (index < targetWidgets.length) { + targetWidgets[index].value = value; + } + } + } else { + // No indices provided, set value for all target widgets + for (const widget of targetWidgets) { + widget.value = value; + } + } + } else if (!isNaN(parseFloat(userInput))) { + // Single value provided, set it for all widgets + const value = parseFloat(userInput); + for (const widget of this.widgets) { + widget.value = value; + } + } else { + alert("Invalid input format. Please use the format s0,1,2-7=2.0, d0,1,2-7=2.0, or 1.0"); + } + } else { + alert("Invalid input. Please enter a value."); + } + }); + }; + break; + + case "GetMaskSizeAndCount": + const onGetMaskSizeConnectInput = nodeType.prototype.onConnectInput; + nodeType.prototype.onConnectInput = function (targetSlot, type, output, originNode, originSlot) { + const v = onGetMaskSizeConnectInput? onGetMaskSizeConnectInput.apply(this, arguments): undefined + this.outputs[1]["name"] = "width" + this.outputs[2]["name"] = "height" + this.outputs[3]["name"] = "count" + return v; + } + const onGetMaskSizeExecuted = nodeType.prototype.onExecuted; + nodeType.prototype.onExecuted = function(message) { + const r = onGetMaskSizeExecuted? onGetMaskSizeExecuted.apply(this,arguments): undefined + let values = message["text"].toString().split('x').map(Number); + this.outputs[1]["name"] = values[1] + " width" + this.outputs[2]["name"] = values[2] + " height" + this.outputs[3]["name"] = values[0] + " count" + return r + } + break; + + case "GetImageSizeAndCount": + const onGetImageSizeConnectInput = nodeType.prototype.onConnectInput; + nodeType.prototype.onConnectInput = function (targetSlot, type, output, originNode, originSlot) { + const v = onGetImageSizeConnectInput? onGetImageSizeConnectInput.apply(this, arguments): undefined + this.outputs[1]["name"] = "width" + this.outputs[2]["name"] = "height" + this.outputs[3]["name"] = "count" + return v; + } + const onGetImageSizeExecuted = nodeType.prototype.onExecuted; + nodeType.prototype.onExecuted = function(message) { + const r = onGetImageSizeExecuted? onGetImageSizeExecuted.apply(this,arguments): undefined + let values = message["text"].toString().split('x').map(Number); + this.outputs[1]["name"] = values[1] + " width" + this.outputs[2]["name"] = values[2] + " height" + this.outputs[3]["name"] = values[0] + " count" + return r + } + break; + + case "PreviewAnimation": + const onPreviewAnimationConnectInput = nodeType.prototype.onConnectInput; + nodeType.prototype.onConnectInput = function (targetSlot, type, output, originNode, originSlot) { + const v = onPreviewAnimationConnectInput? onPreviewAnimationConnectInput.apply(this, arguments): undefined + this.title = "Preview Animation" + return v; + } + const onPreviewAnimationExecuted = nodeType.prototype.onExecuted; + nodeType.prototype.onExecuted = function(message) { + const r = onPreviewAnimationExecuted? onPreviewAnimationExecuted.apply(this,arguments): undefined + let values = message["text"].toString(); + this.title = "Preview Animation " + values + return r + } + break; + + case "VRAM_Debug": + const onVRAM_DebugConnectInput = nodeType.prototype.onConnectInput; + nodeType.prototype.onConnectInput = function (targetSlot, type, output, originNode, originSlot) { + const v = onVRAM_DebugConnectInput? onVRAM_DebugConnectInput.apply(this, arguments): undefined + this.outputs[3]["name"] = "freemem_before" + this.outputs[4]["name"] = "freemem_after" + return v; + } + const onVRAM_DebugExecuted = nodeType.prototype.onExecuted; + nodeType.prototype.onExecuted = function(message) { + const r = onVRAM_DebugExecuted? onVRAM_DebugExecuted.apply(this,arguments): undefined + let values = message["text"].toString().split('x'); + this.outputs[3]["name"] = values[0] + " freemem_before" + this.outputs[4]["name"] = values[1] + " freemem_after" + return r + } + break; + + case "JoinStringMulti": + const originalOnNodeCreated = nodeType.prototype.onNodeCreated || function() {}; + nodeType.prototype.onNodeCreated = function () { + originalOnNodeCreated.apply(this, arguments); + + this._type = "STRING"; + this.inputs_offset = nodeData.name.includes("selective") ? 1 : 0; + this.addWidget("button", "Update inputs", null, () => { + if (!this.inputs) { + this.inputs = []; + } + const target_number_of_inputs = this.widgets.find(w => w.name === "inputcount")["value"]; + if (target_number_of_inputs === this.inputs.length) return; // already set, do nothing + + if (target_number_of_inputs < this.inputs.length) { + for (let i = this.inputs.length; i >= this.inputs_offset + target_number_of_inputs; i--) + this.removeInput(i); + } else { + for (let i = this.inputs.length + 1 - this.inputs_offset; i <= target_number_of_inputs; ++i) + this.addInput(`string_${i}`, this._type); + } + }); + } + break; + case "SoundReactive": + nodeType.prototype.onNodeCreated = function () { + let audioContext; + let microphoneStream; + let animationFrameId; + let analyser; + let dataArray; + let startRangeHz; + let endRangeHz; + let smoothingFactor = 0.5; + let smoothedSoundLevel = 0; + + // Function to update the widget value in real-time + const updateWidgetValueInRealTime = () => { + // Ensure analyser and dataArray are defined before using them + if (analyser && dataArray) { + analyser.getByteFrequencyData(dataArray); + + const startRangeHzWidget = this.widgets.find(w => w.name === "start_range_hz"); + if (startRangeHzWidget) startRangeHz = startRangeHzWidget.value; + const endRangeHzWidget = this.widgets.find(w => w.name === "end_range_hz"); + if (endRangeHzWidget) endRangeHz = endRangeHzWidget.value; + const smoothingFactorWidget = this.widgets.find(w => w.name === "smoothing_factor"); + if (smoothingFactorWidget) smoothingFactor = smoothingFactorWidget.value; + + // Calculate frequency bin width (frequency resolution) + const frequencyBinWidth = audioContext.sampleRate / analyser.fftSize; + // Convert the widget values from Hz to indices + const startRangeIndex = Math.floor(startRangeHz / frequencyBinWidth); + const endRangeIndex = Math.floor(endRangeHz / frequencyBinWidth); + + // Function to calculate the average value for a frequency range + const calculateAverage = (start, end) => { + const sum = dataArray.slice(start, end).reduce((acc, val) => acc + val, 0); + const average = sum / (end - start); + + // Apply exponential moving average smoothing + smoothedSoundLevel = (average * (1 - smoothingFactor)) + (smoothedSoundLevel * smoothingFactor); + return smoothedSoundLevel; + }; + // Calculate the average levels for each frequency range + const soundLevel = calculateAverage(startRangeIndex, endRangeIndex); + + // Update the widget values + + const lowLevelWidget = this.widgets.find(w => w.name === "sound_level"); + if (lowLevelWidget) lowLevelWidget.value = soundLevel; + + animationFrameId = requestAnimationFrame(updateWidgetValueInRealTime); + } + }; + + // Function to start capturing audio from the microphone + const startMicrophoneCapture = () => { + // Only create the audio context and analyser once + if (!audioContext) { + audioContext = new (window.AudioContext || window.webkitAudioContext)(); + // Access the sample rate of the audio context + console.log(`Sample rate: ${audioContext.sampleRate}Hz`); + analyser = audioContext.createAnalyser(); + analyser.fftSize = 2048; + dataArray = new Uint8Array(analyser.frequencyBinCount); + // Get the range values from widgets (assumed to be in Hz) + const lowRangeWidget = this.widgets.find(w => w.name === "low_range_hz"); + if (lowRangeWidget) startRangeHz = lowRangeWidget.value; + + const midRangeWidget = this.widgets.find(w => w.name === "mid_range_hz"); + if (midRangeWidget) endRangeHz = midRangeWidget.value; + } + + navigator.mediaDevices.getUserMedia({ audio: true }).then(stream => { + microphoneStream = stream; + const microphone = audioContext.createMediaStreamSource(stream); + microphone.connect(analyser); + updateWidgetValueInRealTime(); + }).catch(error => { + console.error('Access to microphone was denied or an error occurred:', error); + }); + }; + + // Function to stop capturing audio from the microphone + const stopMicrophoneCapture = () => { + if (animationFrameId) { + cancelAnimationFrame(animationFrameId); + } + if (microphoneStream) { + microphoneStream.getTracks().forEach(track => track.stop()); + } + if (audioContext) { + audioContext.close(); + // Reset audioContext to ensure it can be created again when starting + audioContext = null; + } + }; + + // Add start button + this.addWidget("button", "Start mic capture", null, startMicrophoneCapture); + + // Add stop button + this.addWidget("button", "Stop mic capture", null, stopMicrophoneCapture); + }; + break; + + } + + }, + async setup() { + // to keep Set/Get node virtual connections visible when offscreen + const originalComputeVisibleNodes = LGraphCanvas.prototype.computeVisibleNodes; + LGraphCanvas.prototype.computeVisibleNodes = function () { + const visibleNodesSet = new Set(originalComputeVisibleNodes.apply(this, arguments)); + for (const node of this.graph._nodes) { + if ((node.type === "SetNode" || node.type === "GetNode") && node.drawConnection) { + visibleNodesSet.add(node); + } + } + return Array.from(visibleNodesSet); + }; + + } +}); \ No newline at end of file diff --git a/ComfyUI-KJNodes/web/js/point_editor.js b/ComfyUI-KJNodes/web/js/point_editor.js new file mode 100644 index 0000000000000000000000000000000000000000..210591fa51d660296e28e29e37183757eaee05bb --- /dev/null +++ b/ComfyUI-KJNodes/web/js/point_editor.js @@ -0,0 +1,736 @@ +import { app } from '../../../scripts/app.js' + +//from melmass +export function makeUUID() { + let dt = new Date().getTime() + const uuid = 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, (c) => { + const r = ((dt + Math.random() * 16) % 16) | 0 + dt = Math.floor(dt / 16) + return (c === 'x' ? r : (r & 0x3) | 0x8).toString(16) + }) + return uuid +} + +export const loadScript = ( + FILE_URL, + async = true, + type = 'text/javascript', +) => { + return new Promise((resolve, reject) => { + try { + // Check if the script already exists + const existingScript = document.querySelector(`script[src="${FILE_URL}"]`) + if (existingScript) { + resolve({ status: true, message: 'Script already loaded' }) + return + } + + const scriptEle = document.createElement('script') + scriptEle.type = type + scriptEle.async = async + scriptEle.src = FILE_URL + + scriptEle.addEventListener('load', (ev) => { + resolve({ status: true }) + }) + + scriptEle.addEventListener('error', (ev) => { + reject({ + status: false, + message: `Failed to load the script ${FILE_URL}`, + }) + }) + + document.body.appendChild(scriptEle) + } catch (error) { + reject(error) + } + }) +} +const create_documentation_stylesheet = () => { + const tag = 'kj-pointseditor-stylesheet' + + let styleTag = document.head.querySelector(tag) + + if (!styleTag) { + styleTag = document.createElement('style') + styleTag.type = 'text/css' + styleTag.id = tag + styleTag.innerHTML = ` + .points-editor { + + position: absolute; + + font: 12px monospace; + line-height: 1.5em; + padding: 10px; + z-index: 0; + overflow: hidden; + } + ` + document.head.appendChild(styleTag) + } +} + +loadScript('/kjweb_async/svg-path-properties.min.js').catch((e) => { + console.log(e) +}) +loadScript('/kjweb_async/protovis.min.js').catch((e) => { + console.log(e) +}) +create_documentation_stylesheet() + +function chainCallback(object, property, callback) { + if (object == undefined) { + //This should not happen. + console.error("Tried to add callback to non-existant object") + return; + } + if (property in object) { + const callback_orig = object[property] + object[property] = function () { + const r = callback_orig.apply(this, arguments); + callback.apply(this, arguments); + return r + }; + } else { + object[property] = callback; + } +} +app.registerExtension({ + name: 'KJNodes.PointEditor', + + async beforeRegisterNodeDef(nodeType, nodeData) { + if (nodeData?.name === 'PointsEditor') { + chainCallback(nodeType.prototype, "onNodeCreated", function () { + + hideWidgetForGood(this, this.widgets.find(w => w.name === "coordinates")) + hideWidgetForGood(this, this.widgets.find(w => w.name === "neg_coordinates")) + hideWidgetForGood(this, this.widgets.find(w => w.name === "bboxes")) + + var element = document.createElement("div"); + this.uuid = makeUUID() + element.id = `points-editor-${this.uuid}` + + // fake image widget to allow copy/paste + const fakeimagewidget = this.addWidget("COMBO", "image", null, () => { }, {}); + hideWidgetForGood(this, fakeimagewidget) + + this.pointsEditor = this.addDOMWidget(nodeData.name, "PointsEditorWidget", element, { + serialize: false, + hideOnZoom: false, + }); + + // context menu + this.contextMenu = document.createElement("div"); + this.contextMenu.id = "context-menu"; + this.contextMenu.style.display = "none"; + this.contextMenu.style.position = "absolute"; + this.contextMenu.style.backgroundColor = "#202020"; + this.contextMenu.style.minWidth = "100px"; + this.contextMenu.style.boxShadow = "0px 8px 16px 0px rgba(0,0,0,0.2)"; + this.contextMenu.style.zIndex = "100"; + this.contextMenu.style.padding = "5px"; + + function styleMenuItem(menuItem) { + menuItem.style.display = "block"; + menuItem.style.padding = "5px"; + menuItem.style.color = "#FFF"; + menuItem.style.fontFamily = "Arial, sans-serif"; + menuItem.style.fontSize = "16px"; + menuItem.style.textDecoration = "none"; + menuItem.style.marginBottom = "5px"; + } + function createMenuItem(id, textContent) { + let menuItem = document.createElement("a"); + menuItem.href = "#"; + menuItem.id = `menu-item-${id}`; + menuItem.textContent = textContent; + styleMenuItem(menuItem); + return menuItem; + } + + // Create an array of menu items using the createMenuItem function + this.menuItems = [ + createMenuItem(0, "Load Image"), + createMenuItem(1, "Clear Image"), + ]; + + // Add mouseover and mouseout event listeners to each menu item for styling + this.menuItems.forEach(menuItem => { + menuItem.addEventListener('mouseover', function () { + this.style.backgroundColor = "gray"; + }); + + menuItem.addEventListener('mouseout', function () { + this.style.backgroundColor = "#202020"; + }); + }); + + // Append each menu item to the context menu + this.menuItems.forEach(menuItem => { + this.contextMenu.appendChild(menuItem); + }); + + document.body.appendChild(this.contextMenu); + + this.addWidget("button", "New canvas", null, () => { + if (!this.properties || !("points" in this.properties)) { + this.editor = new PointsEditor(this); + this.addProperty("points", this.constructor.type, "string"); + this.addProperty("neg_points", this.constructor.type, "string"); + + } + else { + this.editor = new PointsEditor(this, true); + } + }); + + this.setSize([550, 550]); + this.resizable = false; + this.pointsEditor.parentEl = document.createElement("div"); + this.pointsEditor.parentEl.className = "points-editor"; + this.pointsEditor.parentEl.id = `points-editor-${this.uuid}` + element.appendChild(this.pointsEditor.parentEl); + + chainCallback(this, "onConfigure", function () { + try { + this.editor = new PointsEditor(this); + } catch (error) { + console.error("An error occurred while configuring the editor:", error); + } + }); + chainCallback(this, "onExecuted", function (message) { + let bg_image = message["bg_image"]; + this.properties.imgData = { + name: "bg_image", + base64: bg_image + }; + this.editor.refreshBackgroundImage(this); + }); + + }); // onAfterGraphConfigured + }//node created + } //before register +})//register + +class PointsEditor { + constructor(context, reset = false) { + this.node = context; + this.reset = reset; + const self = this; // Keep a reference to the main class context + + console.log("creatingPointEditor") + + this.node.pasteFile = (file) => { + if (file.type.startsWith("image/")) { + this.handleImageFile(file); + return true; + } + return false; + }; + + this.node.onDragOver = function (e) { + if (e.dataTransfer && e.dataTransfer.items) { + return [...e.dataTransfer.items].some(f => f.kind === "file" && f.type.startsWith("image/")); + } + return false; + }; + + // On drop upload files + this.node.onDragDrop = (e) => { + console.log("onDragDrop called"); + let handled = false; + for (const file of e.dataTransfer.files) { + if (file.type.startsWith("image/")) { + this.handleImageFile(file); + handled = true; + } + } + return handled; + }; + + // context menu + this.createContextMenu(); + + if (reset && context.pointsEditor.element) { + context.pointsEditor.element.innerHTML = ''; // Clear the container + } + this.pos_coordWidget = context.widgets.find(w => w.name === "coordinates"); + this.neg_coordWidget = context.widgets.find(w => w.name === "neg_coordinates"); + this.pointsStoreWidget = context.widgets.find(w => w.name === "points_store"); + this.widthWidget = context.widgets.find(w => w.name === "width"); + this.heightWidget = context.widgets.find(w => w.name === "height"); + this.bboxStoreWidget = context.widgets.find(w => w.name === "bbox_store"); + this.bboxWidget = context.widgets.find(w => w.name === "bboxes"); + + //widget callbacks + this.widthWidget.callback = () => { + this.width = this.widthWidget.value; + if (this.width > 256) { + context.setSize([this.width + 45, context.size[1]]); + } + this.vis.width(this.width); + this.updateData(); + } + this.heightWidget.callback = () => { + this.height = this.heightWidget.value + this.vis.height(this.height) + context.setSize([context.size[0], this.height + 300]); + this.updateData(); + } + this.pointsStoreWidget.callback = () => { + this.points = JSON.parse(pointsStoreWidget.value).positive; + this.neg_points = JSON.parse(pointsStoreWidget.value).negative; + this.updateData(); + } + this.bboxStoreWidget.callback = () => { + this.bbox = JSON.parse(bboxStoreWidget.value) + this.updateData(); + } + + this.width = this.widthWidget.value; + this.height = this.heightWidget.value; + var i = 3; + this.points = []; + this.neg_points = []; + this.bbox = [{}]; + var drawing = false; + + // Initialize or reset points array + if (!reset && this.pointsStoreWidget.value != "") { + this.points = JSON.parse(this.pointsStoreWidget.value).positive; + this.neg_points = JSON.parse(this.pointsStoreWidget.value).negative; + this.bbox = JSON.parse(this.bboxStoreWidget.value); + console.log(this.bbox) + } else { + this.points = [ + { + x: this.width / 2, // Middle point horizontally centered + y: this.height / 2 // Middle point vertically centered + } + ]; + this.neg_points = [ + { + x: 0, // Middle point horizontally centered + y: 0 // Middle point vertically centered + } + ]; + const combinedPoints = { + positive: this.points, + negative: this.neg_points, + }; + this.pointsStoreWidget.value = JSON.stringify(combinedPoints); + this.bboxStoreWidget.value = JSON.stringify(this.bbox); + } + + //create main canvas panel + this.vis = new pv.Panel() + .width(this.width) + .height(this.height) + .fillStyle("#222") + .strokeStyle("gray") + .lineWidth(2) + .antialias(false) + .margin(10) + .event("mousedown", function () { + if (pv.event.shiftKey && pv.event.button === 2) { // Use pv.event to access the event object + let scaledMouse = { + x: this.mouse().x / app.canvas.ds.scale, + y: this.mouse().y / app.canvas.ds.scale + }; + i = self.neg_points.push(scaledMouse) - 1; + self.updateData(); + return this; + } + else if (pv.event.shiftKey) { + let scaledMouse = { + x: this.mouse().x / app.canvas.ds.scale, + y: this.mouse().y / app.canvas.ds.scale + }; + i = self.points.push(scaledMouse) - 1; + self.updateData(); + return this; + } + else if (pv.event.ctrlKey) { + console.log("start drawing at " + this.mouse().x / app.canvas.ds.scale + ", " + this.mouse().y / app.canvas.ds.scale); + drawing = true; + self.bbox[0].startX = this.mouse().x / app.canvas.ds.scale; + self.bbox[0].startY = this.mouse().y / app.canvas.ds.scale; + } + else if (pv.event.button === 2) { + self.node.contextMenu.style.display = 'block'; + self.node.contextMenu.style.left = `${pv.event.clientX}px`; + self.node.contextMenu.style.top = `${pv.event.clientY}px`; + } + }) + .event("mousemove", function () { + if (drawing) { + self.bbox[0].endX = this.mouse().x / app.canvas.ds.scale; + self.bbox[0].endY = this.mouse().y / app.canvas.ds.scale; + self.vis.render(); + } + }) + .event("mouseup", function () { + console.log("end drawing at " + this.mouse().x / app.canvas.ds.scale + ", " + this.mouse().y / app.canvas.ds.scale); + drawing = false; + self.updateData(); + }); + + this.backgroundImage = this.vis.add(pv.Image).visible(false) + + //create bounding box + this.bounding_box = this.vis.add(pv.Area) + .data(function () { + if (drawing || (self.bbox && self.bbox[0] && Object.keys(self.bbox[0]).length > 0)) { + return [self.bbox[0].startX, self.bbox[0].endX]; + } else { + return []; + } + }) + .bottom(function () {return self.height - Math.max(self.bbox[0].startY, self.bbox[0].endY); }) + .left(function (d) {return d; }) + .height(function () {return Math.abs(self.bbox[0].startY - self.bbox[0].endY);}) + .fillStyle("rgba(70, 130, 180, 0.5)") + .strokeStyle("steelblue") + .visible(function () {return drawing || Object.keys(self.bbox[0]).length > 0; }) + .add(pv.Dot) + .visible(function () {return drawing || Object.keys(self.bbox[0]).length > 0; }) + .data(() => { + if (self.bbox && Object.keys(self.bbox[0]).length > 0) { + return [{ + x: self.bbox[0].endX, + y: self.bbox[0].endY + }]; + } else { + return []; + } + }) + .left(d => d.x) + .top(d => d.y) + .radius(Math.log(Math.min(self.width, self.height)) * 1) + .shape("square") + .cursor("move") + .strokeStyle("steelblue") + .lineWidth(2) + .fillStyle(function () { return "rgba(100, 100, 100, 0.6)"; }) + .event("mousedown", pv.Behavior.drag()) + .event("drag", function () { + let adjustedX = this.mouse().x / app.canvas.ds.scale; // Adjust the new position by the inverse of the scale factor + let adjustedY = this.mouse().y / app.canvas.ds.scale; + + // Adjust the new position if it would place the dot outside the bounds of the vis.Panel + adjustedX = Math.max(0, Math.min(self.vis.width(), adjustedX)); + adjustedY = Math.max(0, Math.min(self.vis.height(), adjustedY)); + self.bbox[0].endX = this.mouse().x / app.canvas.ds.scale; + self.bbox[0].endY = this.mouse().y / app.canvas.ds.scale; + self.vis.render(); + }) + .event("dragend", function () { + self.updateData(); + }); + + //create positive points + this.vis.add(pv.Dot) + .data(() => this.points) + .left(d => d.x) + .top(d => d.y) + .radius(Math.log(Math.min(self.width, self.height)) * 4) + .shape("circle") + .cursor("move") + .strokeStyle(function () { return i == this.index ? "#07f907" : "#139613"; }) + .lineWidth(4) + .fillStyle(function () { return "rgba(100, 100, 100, 0.6)"; }) + .event("mousedown", pv.Behavior.drag()) + .event("dragstart", function () { + i = this.index; + }) + .event("dragend", function () { + if (pv.event.button === 2 && i !== 0 && i !== self.points.length - 1) { + this.index = i; + self.points.splice(i--, 1); + } + self.updateData(); + + }) + .event("drag", function () { + let adjustedX = this.mouse().x / app.canvas.ds.scale; // Adjust the new X position by the inverse of the scale factor + let adjustedY = this.mouse().y / app.canvas.ds.scale; // Adjust the new Y position by the inverse of the scale factor + // Determine the bounds of the vis.Panel + const panelWidth = self.vis.width(); + const panelHeight = self.vis.height(); + + // Adjust the new position if it would place the dot outside the bounds of the vis.Panel + adjustedX = Math.max(0, Math.min(panelWidth, adjustedX)); + adjustedY = Math.max(0, Math.min(panelHeight, adjustedY)); + self.points[this.index] = { x: adjustedX, y: adjustedY }; // Update the point's position + self.vis.render(); // Re-render the visualization to reflect the new position + }) + + .anchor("center") + .add(pv.Label) + .left(d => d.x < this.width / 2 ? d.x + 30 : d.x - 35) // Shift label to right if on left half, otherwise shift to left + .top(d => d.y < this.height / 2 ? d.y + 25 : d.y - 25) // Shift label down if on top half, otherwise shift up + .font(25 + "px sans-serif") + .text(d => {return this.points.indexOf(d); }) + .textStyle("#139613") + .textShadow("2px 2px 2px black") + .add(pv.Dot) // Add smaller point in the center + .data(() => this.points) + .left(d => d.x) + .top(d => d.y) + .radius(2) // Smaller radius for the center point + .shape("circle") + .fillStyle("red") // Color for the center point + .lineWidth(1); // Stroke thickness for the center point + + //create negative points + this.vis.add(pv.Dot) + .data(() => this.neg_points) + .left(d => d.x) + .top(d => d.y) + .radius(Math.log(Math.min(self.width, self.height)) * 4) + .shape("circle") + .cursor("move") + .strokeStyle(function () { return i == this.index ? "#f91111" : "#891616"; }) + .lineWidth(4) + .fillStyle(function () { return "rgba(100, 100, 100, 0.6)"; }) + .event("mousedown", pv.Behavior.drag()) + .event("dragstart", function () { + i = this.index; + }) + .event("dragend", function () { + if (pv.event.button === 2 && i !== 0 && i !== self.neg_points.length - 1) { + this.index = i; + self.neg_points.splice(i--, 1); + } + self.updateData(); + + }) + .event("drag", function () { + let adjustedX = this.mouse().x / app.canvas.ds.scale; // Adjust the new X position by the inverse of the scale factor + let adjustedY = this.mouse().y / app.canvas.ds.scale; // Adjust the new Y position by the inverse of the scale factor + // Determine the bounds of the vis.Panel + const panelWidth = self.vis.width(); + const panelHeight = self.vis.height(); + + // Adjust the new position if it would place the dot outside the bounds of the vis.Panel + adjustedX = Math.max(0, Math.min(panelWidth, adjustedX)); + adjustedY = Math.max(0, Math.min(panelHeight, adjustedY)); + self.neg_points[this.index] = { x: adjustedX, y: adjustedY }; // Update the point's position + self.vis.render(); // Re-render the visualization to reflect the new position + }) + .anchor("center") + .add(pv.Label) + .left(d => d.x < this.width / 2 ? d.x + 30 : d.x - 35) // Shift label to right if on left half, otherwise shift to left + .top(d => d.y < this.height / 2 ? d.y + 25 : d.y - 25) // Shift label down if on top half, otherwise shift up + .font(25 + "px sans-serif") + .text(d => {return this.neg_points.indexOf(d); }) + .textStyle("red") + .textShadow("2px 2px 2px black") + .add(pv.Dot) // Add smaller point in the center + .data(() => this.neg_points) + .left(d => d.x) + .top(d => d.y) + .radius(2) // Smaller radius for the center point + .shape("circle") + .fillStyle("red") // Color for the center point + .lineWidth(1); // Stroke thickness for the center point + + if (this.points.length != 0) { + this.vis.render(); + } + + var svgElement = this.vis.canvas(); + svgElement.style['zIndex'] = "2" + svgElement.style['position'] = "relative" + this.node.pointsEditor.element.appendChild(svgElement); + + if (this.width > 256) { + this.node.setSize([this.width + 45, this.node.size[1]]); + } + this.node.setSize([this.node.size[0], this.height + 300]); + this.updateData(); + this.refreshBackgroundImage(); + + }//end constructor + + updateData = () => { + if (!this.points || this.points.length === 0) { + console.log("no points"); + return; + } + const combinedPoints = { + positive: this.points, + negative: this.neg_points, + }; + this.pointsStoreWidget.value = JSON.stringify(combinedPoints); + this.pos_coordWidget.value = JSON.stringify(this.points); + this.neg_coordWidget.value = JSON.stringify(this.neg_points); + + if (this.bbox.length != 0) { + let bboxString = JSON.stringify(this.bbox); + this.bboxStoreWidget.value = bboxString; + this.bboxWidget.value = bboxString; + } + + this.vis.render(); + }; + + handleImageLoad = (img, file, base64String) => { + console.log(img.width, img.height); // Access width and height here + this.widthWidget.value = img.width; + this.heightWidget.value = img.height; + + if (img.width != this.vis.width() || img.height != this.vis.height()) { + if (img.width > 256) { + this.node.setSize([img.width + 45, this.node.size[1]]); + } + this.node.setSize([this.node.size[0], img.height + 300]); + this.vis.width(img.width); + this.vis.height(img.height); + this.height = img.height; + this.width = img.width; + this.updateData(); + } + this.backgroundImage.url(file ? URL.createObjectURL(file) : `data:${this.node.properties.imgData.type};base64,${base64String}`).visible(true).root.render(); + }; + + processImage = (img, file) => { + const canvas = document.createElement('canvas'); + const ctx = canvas.getContext('2d'); + + const maxWidth = 800; // maximum width + const maxHeight = 600; // maximum height + let width = img.width; + let height = img.height; + + // Calculate the new dimensions while preserving the aspect ratio + if (width > height) { + if (width > maxWidth) { + height *= maxWidth / width; + width = maxWidth; + } + } else { + if (height > maxHeight) { + width *= maxHeight / height; + height = maxHeight; + } + } + + canvas.width = width; + canvas.height = height; + ctx.drawImage(img, 0, 0, width, height); + + // Get the compressed image data as a Base64 string + const base64String = canvas.toDataURL('image/jpeg', 0.5).replace('data:', '').replace(/^.+,/, ''); // 0.5 is the quality from 0 to 1 + + this.node.properties.imgData = { + name: file.name, + lastModified: file.lastModified, + size: file.size, + type: file.type, + base64: base64String + }; + handleImageLoad(img, file, base64String); +}; + + handleImageFile = (file) => { + const reader = new FileReader(); + reader.onloadend = () => { + const img = new Image(); + img.src = reader.result; + img.onload = () => processImage(img, file); + }; + reader.readAsDataURL(file); + + const imageUrl = URL.createObjectURL(file); + const img = new Image(); + img.src = imageUrl; + img.onload = () => this.handleImageLoad(img, file, null); + }; + + refreshBackgroundImage = () => { + if (this.node.properties.imgData && this.node.properties.imgData.base64) { + const base64String = this.node.properties.imgData.base64; + const imageUrl = `data:${this.node.properties.imgData.type};base64,${base64String}`; + const img = new Image(); + img.src = imageUrl; + img.onload = () => this.handleImageLoad(img, null, base64String); + } + }; + + createContextMenu = () => { + self = this; + document.addEventListener('contextmenu', function (e) { + e.preventDefault(); + }); + + document.addEventListener('click', function (e) { + if (!self.node.contextMenu.contains(e.target)) { + self.node.contextMenu.style.display = 'none'; + } + }); + + this.node.menuItems.forEach((menuItem, index) => { + self = this; + menuItem.addEventListener('click', function (e) { + e.preventDefault(); + switch (index) { + case 0: + // Create file input element + const fileInput = document.createElement('input'); + fileInput.type = 'file'; + fileInput.accept = 'image/*'; // Accept only image files + + // Listen for file selection + fileInput.addEventListener('change', function (event) { + const file = event.target.files[0]; // Get the selected file + + if (file) { + const imageUrl = URL.createObjectURL(file); + let img = new Image(); + img.src = imageUrl; + img.onload = () => self.handleImageLoad(img, file, null); + } + }); + + fileInput.click(); + + self.node.contextMenu.style.display = 'none'; + break; + case 1: + self.backgroundImage.visible(false).root.render(); + self.node.properties.imgData = null; + self.node.contextMenu.style.display = 'none'; + break; + } + }); + }); + }//end createContextMenu +}//end class + + +//from melmass +export function hideWidgetForGood(node, widget, suffix = '') { + widget.origType = widget.type + widget.origComputeSize = widget.computeSize + widget.origSerializeValue = widget.serializeValue + widget.computeSize = () => [0, -4] // -4 is due to the gap litegraph adds between widgets automatically + widget.type = "converted-widget" + suffix + // widget.serializeValue = () => { + // // Prevent serializing the widget if we have no input linked + // const w = node.inputs?.find((i) => i.widget?.name === widget.name); + // if (w?.link == null) { + // return undefined; + // } + // return widget.origSerializeValue ? widget.origSerializeValue() : widget.value; + // }; + + // Hide any linked widgets, e.g. seed+seedControl + if (widget.linkedWidgets) { + for (const w of widget.linkedWidgets) { + hideWidgetForGood(node, w, ':' + widget.name) + } + } +} \ No newline at end of file diff --git a/ComfyUI-KJNodes/web/js/setgetnodes.js b/ComfyUI-KJNodes/web/js/setgetnodes.js new file mode 100644 index 0000000000000000000000000000000000000000..bb2b19dfef5516cbe06df40cd806d47323ebad74 --- /dev/null +++ b/ComfyUI-KJNodes/web/js/setgetnodes.js @@ -0,0 +1,559 @@ +import { app } from "../../../scripts/app.js"; + +//based on diffus3's SetGet: https://github.com/diffus3/ComfyUI-extensions + +// Nodes that allow you to tunnel connections for cleaner graphs +function setColorAndBgColor(type) { + const colorMap = { + "MODEL": LGraphCanvas.node_colors.blue, + "LATENT": LGraphCanvas.node_colors.purple, + "VAE": LGraphCanvas.node_colors.red, + "CONDITIONING": LGraphCanvas.node_colors.brown, + "IMAGE": LGraphCanvas.node_colors.pale_blue, + "CLIP": LGraphCanvas.node_colors.yellow, + "FLOAT": LGraphCanvas.node_colors.green, + "MASK": { color: "#1c5715", bgcolor: "#1f401b"}, + "INT": { color: "#1b4669", bgcolor: "#29699c"}, + "CONTROL_NET": { color: "#156653", bgcolor: "#1c453b"}, + "NOISE": { color: "#2e2e2e", bgcolor: "#242121"}, + "GUIDER": { color: "#3c7878", bgcolor: "#1c453b"}, + "SAMPLER": { color: "#614a4a", bgcolor: "#3b2c2c"}, + "SIGMAS": { color: "#485248", bgcolor: "#272e27"}, + + }; + + const colors = colorMap[type]; + if (colors) { + this.color = colors.color; + this.bgcolor = colors.bgcolor; + } +} +let isAlertShown = false; +let disablePrefix = app.ui.settings.getSettingValue("KJNodes.disablePrefix") +const LGraphNode = LiteGraph.LGraphNode + +function showAlertWithThrottle(message, delay) { + if (!isAlertShown) { + isAlertShown = true; + alert(message); + setTimeout(() => isAlertShown = false, delay); + } +} +app.registerExtension({ + name: "SetNode", + registerCustomNodes() { + class SetNode extends LGraphNode { + defaultVisibility = true; + serialize_widgets = true; + drawConnection = false; + currentGetters = null; + slotColor = "#FFF"; + canvas = app.canvas; + menuEntry = "Show connections"; + + constructor(title) { + super(title) + if (!this.properties) { + this.properties = { + "previousName": "" + }; + } + this.properties.showOutputText = SetNode.defaultVisibility; + + const node = this; + + this.addWidget( + "text", + "Constant", + '', + (s, t, u, v, x) => { + node.validateName(node.graph); + if(this.widgets[0].value !== ''){ + this.title = (!disablePrefix ? "Set_" : "") + this.widgets[0].value; + } + this.update(); + this.properties.previousName = this.widgets[0].value; + }, + {} + ) + + this.addInput("*", "*"); + this.addOutput("*", '*'); + + this.onConnectionsChange = function( + slotType, //1 = input, 2 = output + slot, + isChangeConnect, + link_info, + output + ) { + //On Disconnect + if (slotType == 1 && !isChangeConnect) { + if(this.inputs[slot].name === ''){ + this.inputs[slot].type = '*'; + this.inputs[slot].name = '*'; + this.title = "Set" + } + } + if (slotType == 2 && !isChangeConnect) { + this.outputs[slot].type = '*'; + this.outputs[slot].name = '*'; + + } + //On Connect + if (link_info && node.graph && slotType == 1 && isChangeConnect) { + const fromNode = node.graph._nodes.find((otherNode) => otherNode.id == link_info.origin_id); + + if (fromNode && fromNode.outputs && fromNode.outputs[link_info.origin_slot]) { + const type = fromNode.outputs[link_info.origin_slot].type; + + if (this.title === "Set"){ + this.title = (!disablePrefix ? "Set_" : "") + type; + } + if (this.widgets[0].value === '*'){ + this.widgets[0].value = type + } + + this.validateName(node.graph); + this.inputs[0].type = type; + this.inputs[0].name = type; + + if (app.ui.settings.getSettingValue("KJNodes.nodeAutoColor")){ + setColorAndBgColor.call(this, type); + } + } else { + alert("Error: Set node input undefined. Most likely you're missing custom nodes"); + } + } + if (link_info && node.graph && slotType == 2 && isChangeConnect) { + const fromNode = node.graph._nodes.find((otherNode) => otherNode.id == link_info.origin_id); + + if (fromNode && fromNode.inputs && fromNode.inputs[link_info.origin_slot]) { + const type = fromNode.inputs[link_info.origin_slot].type; + + this.outputs[0].type = type; + this.outputs[0].name = type; + } else { + alert("Error: Get Set node output undefined. Most likely you're missing custom nodes"); + } + } + + + //Update either way + this.update(); + } + + this.validateName = function(graph) { + let widgetValue = node.widgets[0].value; + + if (widgetValue !== '') { + let tries = 0; + const existingValues = new Set(); + + graph._nodes.forEach(otherNode => { + if (otherNode !== this && otherNode.type === 'SetNode') { + existingValues.add(otherNode.widgets[0].value); + } + }); + + while (existingValues.has(widgetValue)) { + widgetValue = node.widgets[0].value + "_" + tries; + tries++; + } + + node.widgets[0].value = widgetValue; + this.update(); + } + } + + this.clone = function () { + const cloned = SetNode.prototype.clone.apply(this); + cloned.inputs[0].name = '*'; + cloned.inputs[0].type = '*'; + cloned.value = ''; + cloned.properties.previousName = ''; + cloned.size = cloned.computeSize(); + return cloned; + }; + + this.onAdded = function(graph) { + this.validateName(graph); + } + + + this.update = function() { + if (!node.graph) { + return; + } + + const getters = this.findGetters(node.graph); + getters.forEach(getter => { + getter.setType(this.inputs[0].type); + }); + + if (this.widgets[0].value) { + const gettersWithPreviousName = this.findGetters(node.graph, true); + gettersWithPreviousName.forEach(getter => { + getter.setName(this.widgets[0].value); + }); + } + + const allGetters = node.graph._nodes.filter(otherNode => otherNode.type === "GetNode"); + allGetters.forEach(otherNode => { + if (otherNode.setComboValues) { + otherNode.setComboValues(); + } + }); + } + + + this.findGetters = function(graph, checkForPreviousName) { + const name = checkForPreviousName ? this.properties.previousName : this.widgets[0].value; + return graph._nodes.filter(otherNode => otherNode.type === 'GetNode' && otherNode.widgets[0].value === name && name !== ''); + } + + + // This node is purely frontend and does not impact the resulting prompt so should not be serialized + this.isVirtualNode = true; + } + + + onRemoved() { + const allGetters = this.graph._nodes.filter((otherNode) => otherNode.type == "GetNode"); + allGetters.forEach((otherNode) => { + if (otherNode.setComboValues) { + otherNode.setComboValues([this]); + } + }) + } + getExtraMenuOptions(_, options) { + this.menuEntry = this.drawConnection ? "Hide connections" : "Show connections"; + options.unshift( + { + content: this.menuEntry, + callback: () => { + this.currentGetters = this.findGetters(this.graph); + if (this.currentGetters.length == 0) return; + let linkType = (this.currentGetters[0].outputs[0].type); + this.slotColor = this.canvas.default_connection_color_byType[linkType] + this.menuEntry = this.drawConnection ? "Hide connections" : "Show connections"; + this.drawConnection = !this.drawConnection; + this.canvas.setDirty(true, true); + + }, + has_submenu: true, + submenu: { + title: "Color", + options: [ + { + content: "Highlight", + callback: () => { + this.slotColor = "orange" + this.canvas.setDirty(true, true); + } + } + ], + }, + }, + { + content: "Hide all connections", + callback: () => { + const allGetters = this.graph._nodes.filter(otherNode => otherNode.type === "GetNode" || otherNode.type === "SetNode"); + allGetters.forEach(otherNode => { + otherNode.drawConnection = false; + console.log(otherNode); + }); + + this.menuEntry = "Show connections"; + this.drawConnection = false + this.canvas.setDirty(true, true); + + }, + + }, + ); + // Dynamically add a submenu for all getters + this.currentGetters = this.findGetters(this.graph); + if (this.currentGetters) { + + let gettersSubmenu = this.currentGetters.map(getter => ({ + + content: `${getter.title} id: ${getter.id}`, + callback: () => { + this.canvas.centerOnNode(getter); + this.canvas.selectNode(getter, false); + this.canvas.setDirty(true, true); + + }, + })); + + options.unshift({ + content: "Getters", + has_submenu: true, + submenu: { + title: "GetNodes", + options: gettersSubmenu, + } + }); + } + } + + + onDrawForeground(ctx, lGraphCanvas) { + if (this.drawConnection) { + this._drawVirtualLinks(lGraphCanvas, ctx); + } + } + // onDrawCollapsed(ctx, lGraphCanvas) { + // if (this.drawConnection) { + // this._drawVirtualLinks(lGraphCanvas, ctx); + // } + // } + _drawVirtualLinks(lGraphCanvas, ctx) { + if (!this.currentGetters?.length) return; + var title = this.getTitle ? this.getTitle() : this.title; + var title_width = ctx.measureText(title).width; + if (!this.flags.collapsed) { + var start_node_slotpos = [ + this.size[0], + LiteGraph.NODE_TITLE_HEIGHT * 0.5, + ]; + } + else { + + var start_node_slotpos = [ + title_width + 55, + -15, + + ]; + } + + for (const getter of this.currentGetters) { + if (!this.flags.collapsed) { + var end_node_slotpos = this.getConnectionPos(false, 0); + end_node_slotpos = [ + getter.pos[0] - end_node_slotpos[0] + this.size[0], + getter.pos[1] - end_node_slotpos[1] + ]; + } + else { + var end_node_slotpos = this.getConnectionPos(false, 0); + end_node_slotpos = [ + getter.pos[0] - end_node_slotpos[0] + title_width + 50, + getter.pos[1] - end_node_slotpos[1] - 30 + ]; + } + lGraphCanvas.renderLink( + ctx, + start_node_slotpos, + end_node_slotpos, + null, + false, + null, + this.slotColor, + LiteGraph.RIGHT, + LiteGraph.LEFT + ); + } + } + } + + LiteGraph.registerNodeType( + "SetNode", + Object.assign(SetNode, { + title: "Set", + }) + ); + + SetNode.category = "KJNodes"; + }, +}); + +app.registerExtension({ + name: "GetNode", + registerCustomNodes() { + class GetNode extends LGraphNode { + + defaultVisibility = true; + serialize_widgets = true; + drawConnection = false; + slotColor = "#FFF"; + currentSetter = null; + canvas = app.canvas; + + constructor(title) { + super(title) + if (!this.properties) { + this.properties = {}; + } + this.properties.showOutputText = GetNode.defaultVisibility; + const node = this; + this.addWidget( + "combo", + "Constant", + "", + (e) => { + this.onRename(); + }, + { + values: () => { + const setterNodes = node.graph._nodes.filter((otherNode) => otherNode.type == 'SetNode'); + return setterNodes.map((otherNode) => otherNode.widgets[0].value).sort(); + } + } + ) + + this.addOutput("*", '*'); + this.onConnectionsChange = function( + slotType, //0 = output, 1 = input + slot, //self-explanatory + isChangeConnect, + link_info, + output + ) { + this.validateLinks(); + } + + this.setName = function(name) { + node.widgets[0].value = name; + node.onRename(); + node.serialize(); + } + + this.onRename = function() { + const setter = this.findSetter(node.graph); + if (setter) { + let linkType = (setter.inputs[0].type); + + this.setType(linkType); + this.title = (!disablePrefix ? "Get_" : "") + setter.widgets[0].value; + + if (app.ui.settings.getSettingValue("KJNodes.nodeAutoColor")){ + setColorAndBgColor.call(this, linkType); + } + + } else { + this.setType('*'); + } + } + + this.clone = function () { + const cloned = GetNode.prototype.clone.apply(this); + cloned.size = cloned.computeSize(); + return cloned; + }; + + this.validateLinks = function() { + if (this.outputs[0].type !== '*' && this.outputs[0].links) { + this.outputs[0].links.filter(linkId => { + const link = node.graph.links[linkId]; + return link && (link.type !== this.outputs[0].type && link.type !== '*'); + }).forEach(linkId => { + node.graph.removeLink(linkId); + }); + } + }; + + this.setType = function(type) { + this.outputs[0].name = type; + this.outputs[0].type = type; + this.validateLinks(); + } + + this.findSetter = function(graph) { + const name = this.widgets[0].value; + const foundNode = graph._nodes.find(otherNode => otherNode.type === 'SetNode' && otherNode.widgets[0].value === name && name !== ''); + return foundNode; + }; + + this.goToSetter = function() { + const setter = this.findSetter(this.graph); + this.canvas.centerOnNode(setter); + this.canvas.selectNode(setter, false); + }; + + // This node is purely frontend and does not impact the resulting prompt so should not be serialized + this.isVirtualNode = true; + } + + getInputLink(slot) { + const setter = this.findSetter(this.graph); + + if (setter) { + const slotInfo = setter.inputs[slot]; + const link = this.graph.links[slotInfo.link]; + return link; + } else { + const errorMessage = "No SetNode found for " + this.widgets[0].value + "(" + this.type + ")"; + showAlertWithThrottle(errorMessage, 5000); + //throw new Error(errorMessage); + } + } + onAdded(graph) { + } + getExtraMenuOptions(_, options) { + let menuEntry = this.drawConnection ? "Hide connections" : "Show connections"; + + options.unshift( + { + content: "Go to setter", + callback: () => { + this.goToSetter(); + }, + }, + { + content: menuEntry, + callback: () => { + this.currentSetter = this.findSetter(this.graph); + if (this.currentSetter.length == 0) return; + let linkType = (this.currentSetter.inputs[0].type); + this.drawConnection = !this.drawConnection; + this.slotColor = this.canvas.default_connection_color_byType[linkType] + menuEntry = this.drawConnection ? "Hide connections" : "Show connections"; + this.canvas.setDirty(true, true); + }, + }, + ); + } + + onDrawForeground(ctx, lGraphCanvas) { + if (this.drawConnection) { + this._drawVirtualLink(lGraphCanvas, ctx); + } + } + // onDrawCollapsed(ctx, lGraphCanvas) { + // if (this.drawConnection) { + // this._drawVirtualLink(lGraphCanvas, ctx); + // } + // } + _drawVirtualLink(lGraphCanvas, ctx) { + if (!this.currentSetter) return; + + let start_node_slotpos = this.currentSetter.getConnectionPos(false, 0); + start_node_slotpos = [ + start_node_slotpos[0] - this.pos[0], + start_node_slotpos[1] - this.pos[1], + ]; + let end_node_slotpos = [0, -LiteGraph.NODE_TITLE_HEIGHT * 0.5]; + lGraphCanvas.renderLink( + ctx, + start_node_slotpos, + end_node_slotpos, + null, + false, + null, + this.slotColor + ); + } + } + + LiteGraph.registerNodeType( + "GetNode", + Object.assign(GetNode, { + title: "Get", + }) + ); + + GetNode.category = "KJNodes"; + }, +}); diff --git a/ComfyUI-KJNodes/web/js/spline_editor.js b/ComfyUI-KJNodes/web/js/spline_editor.js new file mode 100644 index 0000000000000000000000000000000000000000..ebed27fe8b95a4a7bfbf307f9690674d6b498311 --- /dev/null +++ b/ComfyUI-KJNodes/web/js/spline_editor.js @@ -0,0 +1,724 @@ +import { app } from '../../../scripts/app.js' + +//from melmass +export function makeUUID() { + let dt = new Date().getTime() + const uuid = 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, (c) => { + const r = ((dt + Math.random() * 16) % 16) | 0 + dt = Math.floor(dt / 16) + return (c === 'x' ? r : (r & 0x3) | 0x8).toString(16) + }) + return uuid +} + +export const loadScript = ( + FILE_URL, + async = true, + type = 'text/javascript', + ) => { + return new Promise((resolve, reject) => { + try { + // Check if the script already exists + const existingScript = document.querySelector(`script[src="${FILE_URL}"]`) + if (existingScript) { + resolve({ status: true, message: 'Script already loaded' }) + return + } + + const scriptEle = document.createElement('script') + scriptEle.type = type + scriptEle.async = async + scriptEle.src = FILE_URL + + scriptEle.addEventListener('load', (ev) => { + resolve({ status: true }) + }) + + scriptEle.addEventListener('error', (ev) => { + reject({ + status: false, + message: `Failed to load the script ${FILE_URL}`, + }) + }) + + document.body.appendChild(scriptEle) + } catch (error) { + reject(error) + } + }) + } + const create_documentation_stylesheet = () => { + const tag = 'kj-splineditor-stylesheet' + + let styleTag = document.head.querySelector(tag) + + if (!styleTag) { + styleTag = document.createElement('style') + styleTag.type = 'text/css' + styleTag.id = tag + styleTag.innerHTML = ` + .spline-editor { + + position: absolute; + + font: 12px monospace; + line-height: 1.5em; + padding: 10px; + z-index: 0; + overflow: hidden; + } + ` + document.head.appendChild(styleTag) + } + } + +loadScript('/kjweb_async/svg-path-properties.min.js').catch((e) => { + console.log(e) +}) +loadScript('/kjweb_async/protovis.min.js').catch((e) => { + console.log(e) +}) +create_documentation_stylesheet() + +function chainCallback(object, property, callback) { + if (object == undefined) { + //This should not happen. + console.error("Tried to add callback to non-existant object") + return; + } + if (property in object) { + const callback_orig = object[property] + object[property] = function () { + const r = callback_orig.apply(this, arguments); + callback.apply(this, arguments); + return r + }; + } else { + object[property] = callback; + } +} +app.registerExtension({ + name: 'KJNodes.SplineEditor', + + async beforeRegisterNodeDef(nodeType, nodeData) { + if (nodeData?.name === 'SplineEditor') { + chainCallback(nodeType.prototype, "onNodeCreated", function () { + + hideWidgetForGood(this, this.widgets.find(w => w.name === "coordinates")) + + var element = document.createElement("div"); + this.uuid = makeUUID() + element.id = `spline-editor-${this.uuid}` + + this.splineEditor = this.addDOMWidget(nodeData.name, "SplineEditorWidget", element, { + serialize: false, + hideOnZoom: false, + }); + + // context menu + this.contextMenu = document.createElement("div"); + this.contextMenu.id = "context-menu"; + this.contextMenu.style.display = "none"; + this.contextMenu.style.position = "absolute"; + this.contextMenu.style.backgroundColor = "#202020"; + this.contextMenu.style.minWidth = "100px"; + this.contextMenu.style.boxShadow = "0px 8px 16px 0px rgba(0,0,0,0.2)"; + this.contextMenu.style.zIndex = "100"; + this.contextMenu.style.padding = "5px"; + + function styleMenuItem(menuItem) { + menuItem.style.display = "block"; + menuItem.style.padding = "5px"; + menuItem.style.color = "#FFF"; + menuItem.style.fontFamily = "Arial, sans-serif"; + menuItem.style.fontSize = "16px"; + menuItem.style.textDecoration = "none"; + menuItem.style.marginBottom = "5px"; + } + function createMenuItem(id, textContent) { + let menuItem = document.createElement("a"); + menuItem.href = "#"; + menuItem.id = `menu-item-${id}`; + menuItem.textContent = textContent; + styleMenuItem(menuItem); + return menuItem; + } + + // Create an array of menu items using the createMenuItem function + this.menuItems = [ + createMenuItem(1, "Toggle handles"), + createMenuItem(2, "Display sample points"), + createMenuItem(3, "Switch point shape"), + createMenuItem(4, "Background image"), + createMenuItem(5, "Invert point order") + ]; + + // Add mouseover and mouseout event listeners to each menu item for styling + this.menuItems.forEach(menuItem => { + menuItem.addEventListener('mouseover', function() { + this.style.backgroundColor = "gray"; + }); + + menuItem.addEventListener('mouseout', function() { + this.style.backgroundColor = "#202020"; + }); + }); + + // Append each menu item to the context menu + this.menuItems.forEach(menuItem => { + this.contextMenu.appendChild(menuItem); + }); + + document.body.appendChild( this.contextMenu); + + this.addWidget("button", "New spline", null, () => { + if (!this.properties || !("points" in this.properties)) { + createSplineEditor(this) + this.addProperty("points", this.constructor.type, "string"); + } + else { + createSplineEditor(this, true) + } + }); + + this.setSize([550, 950]); + this.resizable = false; + this.splineEditor.parentEl = document.createElement("div"); + this.splineEditor.parentEl.className = "spline-editor"; + this.splineEditor.parentEl.id = `spline-editor-${this.uuid}` + element.appendChild(this.splineEditor.parentEl); + + chainCallback(this, "onConfigure", function() { + createSplineEditor(this); + }); + + }); // onAfterGraphConfigured + }//node created + } //before register +})//register + + +function createSplineEditor(context, reset=false) { + console.log("creatingSplineEditor") + + // context menu + function createContextMenu() { + document.addEventListener('contextmenu', function(e) { + e.preventDefault(); + }); + + document.addEventListener('click', function(e) { + if (!context.contextMenu.contains(e.target)) { + context.contextMenu.style.display = 'none'; + } + }); + + context.menuItems.forEach((menuItem, index) => { + menuItem.addEventListener('click', function(e) { + e.preventDefault(); + // Logic specific to each menu item based on its index or id + switch (index) { + case 0: + e.preventDefault(); + if (!drawHandles) { + drawHandles = true + vis.add(pv.Line) + .data(() => points.map((point, index) => ({ + start: point, + end: [index] + }))) + .left(d => d.start.x) + .top(d => d.start.y) + .interpolate("linear") + .tension(0) // Straight lines + .strokeStyle("#ff7f0e") // Same color as control points + .lineWidth(1) + .visible(() => drawHandles); + vis.render(); + } else { + drawHandles = false + vis.render(); + } + context.contextMenu.style.display = 'none'; + break; + case 1: + e.preventDefault(); + drawSamplePoints = !drawSamplePoints; + updatePath(); + break; + case 2: + e.preventDefault(); + if (dotShape == "circle"){ + dotShape = "triangle" + } + else { + dotShape = "circle" + } + console.log(dotShape) + updatePath(); + break; + case 3: + // Create file input element + const fileInput = document.createElement('input'); + fileInput.type = 'file'; + fileInput.accept = 'image/*'; // Accept only image files + + // Listen for file selection + fileInput.addEventListener('change', function(event) { + const file = event.target.files[0]; // Get the selected file + + if (file) { + // Create a URL for the selected file + const imageUrl = URL.createObjectURL(file); + + // Set the backgroundImage with the new URL and make it visible + backgroundImage + .url(imageUrl) + .visible(true) + .root.render(); + } + }); + + // If the backgroundImage is already visible, hide it. Otherwise, show file input. + if (backgroundImage.visible()) { + backgroundImage.visible(false) + .root.render(); + } else { + // Trigger the file input dialog + fileInput.click(); + } + context.contextMenu.style.display = 'none'; + break; + case 4: + e.preventDefault(); + points.reverse(); + updatePath(); + } + }); + }); + } + + var dotShape = "circle"; + var drawSamplePoints = false; + + createContextMenu(); + function updatePath() { + if (samplingMethod != "controlpoints") { + var coords = samplePoints(pathElements[0], points_to_sample, samplingMethod, w); + } + else { + var coords = points + } + + if (drawSamplePoints) { + if (pointsLayer) { + // Update the data of the existing points layer + pointsLayer.data(coords); + } else { + // Create the points layer if it doesn't exist + pointsLayer = vis.add(pv.Dot) + .data(coords) + .left(function(d) { return d.x; }) + .top(function(d) { return d.y; }) + .radius(5) // Adjust the radius as needed + .fillStyle("red") // Change the color as needed + .strokeStyle("black") // Change the stroke color as needed + .lineWidth(1); // Adjust the line width as needed + } + } else { + if (pointsLayer) { + // Remove the points layer + pointsLayer.data([]); + vis.render(); + } + } + let coordsString = JSON.stringify(coords); + pointsStoreWidget.value = JSON.stringify(points); + if (coordWidget) { + coordWidget.value = coordsString; + } + vis.render(); + } + + if (reset && context.splineEditor.element) { + context.splineEditor.element.innerHTML = ''; // Clear the container + } + const coordWidget = context.widgets.find(w => w.name === "coordinates"); + const interpolationWidget = context.widgets.find(w => w.name === "interpolation"); + const pointsWidget = context.widgets.find(w => w.name === "points_to_sample"); + const pointsStoreWidget = context.widgets.find(w => w.name === "points_store"); + const tensionWidget = context.widgets.find(w => w.name === "tension"); + const minValueWidget = context.widgets.find(w => w.name === "min_value"); + const maxValueWidget = context.widgets.find(w => w.name === "max_value"); + const samplingMethodWidget = context.widgets.find(w => w.name === "sampling_method"); + const widthWidget = context.widgets.find(w => w.name === "mask_width"); + const heightWidget = context.widgets.find(w => w.name === "mask_height"); + //const segmentedWidget = context.widgets.find(w => w.name === "segmented"); + + var interpolation = interpolationWidget.value + var tension = tensionWidget.value + var points_to_sample = pointsWidget.value + var rangeMin = minValueWidget.value + var rangeMax = maxValueWidget.value + var pointsLayer = null; + var samplingMethod = samplingMethodWidget.value + + if (samplingMethod == "path") { + dotShape = "triangle" + } + + + interpolationWidget.callback = () => { + interpolation = interpolationWidget.value + updatePath(); + } + samplingMethodWidget.callback = () => { + samplingMethod = samplingMethodWidget.value + if (samplingMethod == "path") { + dotShape = "triangle" + } + else if (samplingMethod == "controlpoints") { + dotShape = "circle" + drawSamplePoints = true; + } + updatePath(); + } + tensionWidget.callback = () => { + tension = tensionWidget.value + updatePath(); + } + pointsWidget.callback = () => { + points_to_sample = pointsWidget.value + updatePath(); + } + minValueWidget.callback = () => { + rangeMin = minValueWidget.value + updatePath(); + } + maxValueWidget.callback = () => { + rangeMax = maxValueWidget.value + updatePath(); + } + widthWidget.callback = () => { + w = widthWidget.value; + if (w > 256) { + context.setSize([w + 45, context.size[1]]); + } + vis.width(w); + updatePath(); +} + heightWidget.callback = () => { + h = heightWidget.value + vis.height(h) + context.setSize([context.size[0], h + 430]); + updatePath(); + } + pointsStoreWidget.callback = () => { + points = JSON.parse(pointsStoreWidget.value); + updatePath(); + } + + // Initialize or reset points array + var drawHandles = false; + var hoverIndex = -1; + var isDragging = false; + var w = widthWidget.value; + var h = heightWidget.value; + var i = 3; + let points = []; + + if (!reset && pointsStoreWidget.value != "") { + points = JSON.parse(pointsStoreWidget.value); + } else { + points = pv.range(1, 4).map((i, index) => { + if (index === 0) { + // First point at the bottom-left corner + return { x: 0, y: h }; + } else if (index === 2) { + // Last point at the top-right corner + return { x: w, y: 0 }; + } else { + // Other points remain as they were + return { + x: i * w / 5, + y: 50 + Math.random() * (h - 100) + }; + } + }); + pointsStoreWidget.value = JSON.stringify(points); + } + + var vis = new pv.Panel() + .width(w) + .height(h) + .fillStyle("#222") + .strokeStyle("gray") + .lineWidth(2) + .antialias(false) + .margin(10) + .event("mousedown", function() { + if (pv.event.shiftKey) { // Use pv.event to access the event object + let scaledMouse = { + x: this.mouse().x / app.canvas.ds.scale, + y: this.mouse().y / app.canvas.ds.scale + }; + i = points.push(scaledMouse) - 1; + updatePath(); + return this; + } + else if (pv.event.ctrlKey) { + // Capture the clicked location + let clickedPoint = { + x: this.mouse().x / app.canvas.ds.scale, + y: this.mouse().y / app.canvas.ds.scale + }; + + // Find the two closest points to the clicked location + let { point1Index, point2Index } = findClosestPoints(points, clickedPoint); + + // Calculate the midpoint between the two closest points + let midpoint = { + x: (points[point1Index].x + points[point2Index].x) / 2, + y: (points[point1Index].y + points[point2Index].y) / 2 + }; + + // Insert the midpoint into the array + points.splice(point2Index, 0, midpoint); + i = point2Index; + updatePath(); + } + else if (pv.event.button === 2) { + context.contextMenu.style.display = 'block'; + context.contextMenu.style.left = `${pv.event.clientX}px`; + context.contextMenu.style.top = `${pv.event.clientY}px`; + } + }) + var backgroundImage = vis.add(pv.Image) + .visible(false) + vis.add(pv.Rule) + .data(pv.range(0, h, 64)) + .bottom(d => d) + .strokeStyle("gray") + .lineWidth(3) + + // vis.add(pv.Rule) + // .data(pv.range(0, points_to_sample, 1)) + // .left(d => d * 512 / (points_to_sample - 1)) + // .strokeStyle("gray") + // .lineWidth(2) + + vis.add(pv.Line) + .data(() => points) + .left(d => d.x) + .top(d => d.y) + .interpolate(() => interpolation) + .tension(() => tension) + .segmented(() => false) + .strokeStyle(pv.Colors.category10().by(pv.index)) + .lineWidth(3) + + vis.add(pv.Dot) + .data(() => points) + .left(d => d.x) + .top(d => d.y) + .radius(10) + .shape(function() { + return dotShape; + }) + .angle(function() { + const index = this.index; + let angle = 0; + + if (dotShape === "triangle") { + let dxNext = 0, dyNext = 0; + if (index < points.length - 1) { + dxNext = points[index + 1].x - points[index].x; + dyNext = points[index + 1].y - points[index].y; + } + + let dxPrev = 0, dyPrev = 0; + if (index > 0) { + dxPrev = points[index].x - points[index - 1].x; + dyPrev = points[index].y - points[index - 1].y; + } + + const dx = (dxNext + dxPrev) / 2; + const dy = (dyNext + dyPrev) / 2; + + angle = Math.atan2(dy, dx); + angle -= Math.PI / 2; + angle = (angle + 2 * Math.PI) % (2 * Math.PI); + } + + return angle; + }) + .cursor("move") + .strokeStyle(function() { return i == this.index ? "#ff7f0e" : "#1f77b4"; }) + .fillStyle(function() { return "rgba(100, 100, 100, 0.3)"; }) + .event("mousedown", pv.Behavior.drag()) + .event("dragstart", function() { + i = this.index; + hoverIndex = this.index; + isDragging = true; + if (pv.event.button === 2 && i !== 0 && i !== points.length - 1) { + points.splice(i--, 1); + vis.render(); + } + return this; + }) + .event("dragend", function() { + if (this.pathElements !== null) { + updatePath(); + } + isDragging = false; + }) + .event("drag", function() { + let adjustedX = this.mouse().x / app.canvas.ds.scale; // Adjust the new X position by the inverse of the scale factor + let adjustedY = this.mouse().y / app.canvas.ds.scale; // Adjust the new Y position by the inverse of the scale factor + // Determine the bounds of the vis.Panel + const panelWidth = vis.width(); + const panelHeight = vis.height(); + + // Adjust the new position if it would place the dot outside the bounds of the vis.Panel + adjustedX = Math.max(0, Math.min(panelWidth, adjustedX)); + adjustedY = Math.max(0, Math.min(panelHeight, adjustedY)); + points[this.index] = { x: adjustedX, y: adjustedY }; // Update the point's position + vis.render(); // Re-render the visualization to reflect the new position + }) + .event("mouseover", function() { + hoverIndex = this.index; // Set the hover index to the index of the hovered dot + vis.render(); // Re-render the visualization + }) + .event("mouseout", function() { + !isDragging && (hoverIndex = -1); // Reset the hover index when the mouse leaves the dot + vis.render(); // Re-render the visualization + }) + .anchor("center") + .add(pv.Label) + .visible(function() { + return hoverIndex === this.index; // Only show the label for the hovered dot + }) + .left(d => d.x < w / 2 ? d.x + 80 : d.x - 70) // Shift label to right if on left half, otherwise shift to left + .top(d => d.y < h / 2 ? d.y + 20 : d.y - 20) // Shift label down if on top half, otherwise shift up + .font(12 + "px sans-serif") + .text(d => { + if (samplingMethod == "path") { + return `X: ${Math.round(d.x)}, Y: ${Math.round(d.y)}`; + } else { + let frame = Math.round((d.x / w) * points_to_sample); + let normalizedY = (1.0 - (d.y / h) - 0.0) * (rangeMax - rangeMin) + rangeMin; + let normalizedX = (d.x / w); + return `F: ${frame}, X: ${normalizedX.toFixed(2)}, Y: ${normalizedY.toFixed(2)}`; + } + }) + .textStyle("orange") + + vis.render(); + var svgElement = vis.canvas(); + svgElement.style['zIndex'] = "2" + svgElement.style['position'] = "relative" + context.splineEditor.element.appendChild(svgElement); + var pathElements = svgElement.getElementsByTagName('path'); // Get all path elements + + if (w > 256) { + context.setSize([w + 45, context.size[1]]); + } + context.setSize([context.size[0], h + 430]); + updatePath(); +} + +function samplePoints(svgPathElement, numSamples, samplingMethod, width) { + var svgWidth = width; // Fixed width of the SVG element + var pathLength = svgPathElement.getTotalLength(); + var points = []; + + for (var i = 0; i < numSamples; i++) { + if (samplingMethod === "time") { + // Calculate the x-coordinate for the current sample based on the SVG's width + var x = (svgWidth / (numSamples - 1)) * i; + // Find the point on the path that intersects the vertical line at the calculated x-coordinate + var point = findPointAtX(svgPathElement, x, pathLength); + } + else if (samplingMethod === "path") { + // Calculate the distance along the path for the current sample + var distance = (pathLength / (numSamples - 1)) * i; + // Get the point at the current distance + var point = svgPathElement.getPointAtLength(distance); + } + + // Add the point to the array of points + points.push({ x: point.x, y: point.y }); + } + return points; +} + +function findClosestPoints(points, clickedPoint) { + // Calculate distances from clickedPoint to each point in the array + let distances = points.map(point => { + let dx = clickedPoint.x - point.x; + let dy = clickedPoint.y - point.y; + return { index: points.indexOf(point), distance: Math.sqrt(dx * dx + dy * dy) }; + }); + // Sort distances and get the indices of the two closest points + let sortedDistances = distances.sort((a, b) => a.distance - b.distance); + let closestPoint1Index = sortedDistances[0].index; + let closestPoint2Index = sortedDistances[1].index; + // Ensure point1Index is always the smaller index + if (closestPoint1Index > closestPoint2Index) { + [closestPoint1Index, closestPoint2Index] = [closestPoint2Index, closestPoint1Index]; + } + return { point1Index: closestPoint1Index, point2Index: closestPoint2Index }; +} + +function findPointAtX(svgPathElement, targetX, pathLength) { + let low = 0; + let high = pathLength; + let bestPoint = svgPathElement.getPointAtLength(0); + + while (low <= high) { + let mid = low + (high - low) / 2; + let point = svgPathElement.getPointAtLength(mid); + + if (Math.abs(point.x - targetX) < 1) { + return point; // The point is close enough to the target + } + + if (point.x < targetX) { + low = mid + 1; + } else { + high = mid - 1; + } + + // Keep track of the closest point found so far + if (Math.abs(point.x - targetX) < Math.abs(bestPoint.x - targetX)) { + bestPoint = point; + } + } + + // Return the closest point found + return bestPoint; +} + +//from melmass +export function hideWidgetForGood(node, widget, suffix = '') { + widget.origType = widget.type + widget.origComputeSize = widget.computeSize + widget.origSerializeValue = widget.serializeValue + widget.computeSize = () => [0, -4] // -4 is due to the gap litegraph adds between widgets automatically + widget.type = "converted-widget" + suffix + // widget.serializeValue = () => { + // // Prevent serializing the widget if we have no input linked + // const w = node.inputs?.find((i) => i.widget?.name === widget.name); + // if (w?.link == null) { + // return undefined; + // } + // return widget.origSerializeValue ? widget.origSerializeValue() : widget.value; + // }; + + // Hide any linked widgets, e.g. seed+seedControl + if (widget.linkedWidgets) { + for (const w of widget.linkedWidgets) { + hideWidgetForGood(node, w, ':' + widget.name) + } + } +} \ No newline at end of file diff --git a/ComfyUI-KJNodes/web/red.png b/ComfyUI-KJNodes/web/red.png new file mode 100644 index 0000000000000000000000000000000000000000..4352c118b2c5fa6f33edc4d99a5e4d22649ff827 Binary files /dev/null and b/ComfyUI-KJNodes/web/red.png differ diff --git a/ComfyUI-Kolors-MZ/ComfyUI_IPAdapter_plus/CrossAttentionPatch.py b/ComfyUI-Kolors-MZ/ComfyUI_IPAdapter_plus/CrossAttentionPatch.py new file mode 100644 index 0000000000000000000000000000000000000000..26fc5dc54dfcf89fb1ab23b28fe9de5d70a1af22 --- /dev/null +++ b/ComfyUI-Kolors-MZ/ComfyUI_IPAdapter_plus/CrossAttentionPatch.py @@ -0,0 +1,209 @@ +import torch +import math +import torch.nn.functional as F +from comfy.ldm.modules.attention import optimized_attention +from .utils import tensor_to_size + +class Attn2Replace: + def __init__(self, callback=None, **kwargs): + self.callback = [callback] + self.kwargs = [kwargs] + + def add(self, callback, **kwargs): + self.callback.append(callback) + self.kwargs.append(kwargs) + + for key, value in kwargs.items(): + setattr(self, key, value) + + def __call__(self, q, k, v, extra_options): + dtype = q.dtype + out = optimized_attention(q, k, v, extra_options["n_heads"]) + sigma = extra_options["sigmas"].detach().cpu()[0].item() if 'sigmas' in extra_options else 999999999.9 + + for i, callback in enumerate(self.callback): + if sigma <= self.kwargs[i]["sigma_start"] and sigma >= self.kwargs[i]["sigma_end"]: + out = out + callback(out, q, k, v, extra_options, **self.kwargs[i]) + + return out.to(dtype=dtype) + +def ipadapter_attention(out, q, k, v, extra_options, module_key='', ipadapter=None, weight=1.0, cond=None, cond_alt=None, uncond=None, weight_type="linear", mask=None, sigma_start=0.0, sigma_end=1.0, unfold_batch=False, embeds_scaling='V only', **kwargs): + dtype = q.dtype + cond_or_uncond = extra_options["cond_or_uncond"] + block_type = extra_options["block"][0] + #block_id = extra_options["block"][1] + t_idx = extra_options["transformer_index"] + layers = 11 if '101_to_k_ip' in ipadapter.ip_layers.to_kvs else 16 + k_key = module_key + "_to_k_ip" + v_key = module_key + "_to_v_ip" + + # extra options for AnimateDiff + ad_params = extra_options['ad_params'] if "ad_params" in extra_options else None + + b = q.shape[0] + seq_len = q.shape[1] + batch_prompt = b // len(cond_or_uncond) + _, _, oh, ow = extra_options["original_shape"] + + if weight_type == 'ease in': + weight = weight * (0.05 + 0.95 * (1 - t_idx / layers)) + elif weight_type == 'ease out': + weight = weight * (0.05 + 0.95 * (t_idx / layers)) + elif weight_type == 'ease in-out': + weight = weight * (0.05 + 0.95 * (1 - abs(t_idx - (layers/2)) / (layers/2))) + elif weight_type == 'reverse in-out': + weight = weight * (0.05 + 0.95 * (abs(t_idx - (layers/2)) / (layers/2))) + elif weight_type == 'weak input' and block_type == 'input': + weight = weight * 0.2 + elif weight_type == 'weak middle' and block_type == 'middle': + weight = weight * 0.2 + elif weight_type == 'weak output' and block_type == 'output': + weight = weight * 0.2 + elif weight_type == 'strong middle' and (block_type == 'input' or block_type == 'output'): + weight = weight * 0.2 + elif isinstance(weight, dict): + if t_idx not in weight: + return 0 + + if weight_type == "style transfer precise": + if layers == 11 and t_idx == 3: + uncond = cond + cond = cond * 0 + elif layers == 16 and (t_idx == 4 or t_idx == 5): + uncond = cond + cond = cond * 0 + elif weight_type == "composition precise": + if layers == 11 and t_idx != 3: + uncond = cond + cond = cond * 0 + elif layers == 16 and (t_idx != 4 and t_idx != 5): + uncond = cond + cond = cond * 0 + + weight = weight[t_idx] + + if cond_alt is not None and t_idx in cond_alt: + cond = cond_alt[t_idx] + del cond_alt + + if unfold_batch: + # Check AnimateDiff context window + if ad_params is not None and ad_params["sub_idxs"] is not None: + if isinstance(weight, torch.Tensor): + weight = tensor_to_size(weight, ad_params["full_length"]) + weight = torch.Tensor(weight[ad_params["sub_idxs"]]) + if torch.all(weight == 0): + return 0 + weight = weight.repeat(len(cond_or_uncond), 1, 1) # repeat for cond and uncond + elif weight == 0: + return 0 + + # if image length matches or exceeds full_length get sub_idx images + if cond.shape[0] >= ad_params["full_length"]: + cond = torch.Tensor(cond[ad_params["sub_idxs"]]) + uncond = torch.Tensor(uncond[ad_params["sub_idxs"]]) + # otherwise get sub_idxs images + else: + cond = tensor_to_size(cond, ad_params["full_length"]) + uncond = tensor_to_size(uncond, ad_params["full_length"]) + cond = cond[ad_params["sub_idxs"]] + uncond = uncond[ad_params["sub_idxs"]] + else: + if isinstance(weight, torch.Tensor): + weight = tensor_to_size(weight, batch_prompt) + if torch.all(weight == 0): + return 0 + weight = weight.repeat(len(cond_or_uncond), 1, 1) # repeat for cond and uncond + elif weight == 0: + return 0 + + cond = tensor_to_size(cond, batch_prompt) + uncond = tensor_to_size(uncond, batch_prompt) + + k_cond = ipadapter.ip_layers.to_kvs[k_key](cond) + k_uncond = ipadapter.ip_layers.to_kvs[k_key](uncond) + v_cond = ipadapter.ip_layers.to_kvs[v_key](cond) + v_uncond = ipadapter.ip_layers.to_kvs[v_key](uncond) + else: + # TODO: should we always convert the weights to a tensor? + if isinstance(weight, torch.Tensor): + weight = tensor_to_size(weight, batch_prompt) + if torch.all(weight == 0): + return 0 + weight = weight.repeat(len(cond_or_uncond), 1, 1) # repeat for cond and uncond + elif weight == 0: + return 0 + + k_cond = ipadapter.ip_layers.to_kvs[k_key](cond).repeat(batch_prompt, 1, 1) + k_uncond = ipadapter.ip_layers.to_kvs[k_key](uncond).repeat(batch_prompt, 1, 1) + v_cond = ipadapter.ip_layers.to_kvs[v_key](cond).repeat(batch_prompt, 1, 1) + v_uncond = ipadapter.ip_layers.to_kvs[v_key](uncond).repeat(batch_prompt, 1, 1) + + if len(cond_or_uncond) == 3: # TODO: conxl, I need to check this + ip_k = torch.cat([(k_cond, k_uncond, k_cond)[i] for i in cond_or_uncond], dim=0) + ip_v = torch.cat([(v_cond, v_uncond, v_cond)[i] for i in cond_or_uncond], dim=0) + else: + ip_k = torch.cat([(k_cond, k_uncond)[i] for i in cond_or_uncond], dim=0) + ip_v = torch.cat([(v_cond, v_uncond)[i] for i in cond_or_uncond], dim=0) + + if embeds_scaling == 'K+mean(V) w/ C penalty': + scaling = float(ip_k.shape[2]) / 1280.0 + weight = weight * scaling + ip_k = ip_k * weight + ip_v_mean = torch.mean(ip_v, dim=1, keepdim=True) + ip_v = (ip_v - ip_v_mean) + ip_v_mean * weight + out_ip = optimized_attention(q, ip_k, ip_v, extra_options["n_heads"]) + del ip_v_mean + elif embeds_scaling == 'K+V w/ C penalty': + scaling = float(ip_k.shape[2]) / 1280.0 + weight = weight * scaling + ip_k = ip_k * weight + ip_v = ip_v * weight + out_ip = optimized_attention(q, ip_k, ip_v, extra_options["n_heads"]) + elif embeds_scaling == 'K+V': + ip_k = ip_k * weight + ip_v = ip_v * weight + out_ip = optimized_attention(q, ip_k, ip_v, extra_options["n_heads"]) + else: + #ip_v = ip_v * weight + out_ip = optimized_attention(q, ip_k, ip_v, extra_options["n_heads"]) + out_ip = out_ip * weight # I'm doing this to get the same results as before + + if mask is not None: + mask_h = oh / math.sqrt(oh * ow / seq_len) + mask_h = int(mask_h) + int((seq_len % int(mask_h)) != 0) + mask_w = seq_len // mask_h + + # check if using AnimateDiff and sliding context window + if (mask.shape[0] > 1 and ad_params is not None and ad_params["sub_idxs"] is not None): + # if mask length matches or exceeds full_length, get sub_idx masks + if mask.shape[0] >= ad_params["full_length"]: + mask = torch.Tensor(mask[ad_params["sub_idxs"]]) + mask = F.interpolate(mask.unsqueeze(1), size=(mask_h, mask_w), mode="bilinear").squeeze(1) + else: + mask = F.interpolate(mask.unsqueeze(1), size=(mask_h, mask_w), mode="bilinear").squeeze(1) + mask = tensor_to_size(mask, ad_params["full_length"]) + mask = mask[ad_params["sub_idxs"]] + else: + mask = F.interpolate(mask.unsqueeze(1), size=(mask_h, mask_w), mode="bilinear").squeeze(1) + mask = tensor_to_size(mask, batch_prompt) + + mask = mask.repeat(len(cond_or_uncond), 1, 1) + mask = mask.view(mask.shape[0], -1, 1).repeat(1, 1, out.shape[2]) + + # covers cases where extreme aspect ratios can cause the mask to have a wrong size + mask_len = mask_h * mask_w + if mask_len < seq_len: + pad_len = seq_len - mask_len + pad1 = pad_len // 2 + pad2 = pad_len - pad1 + mask = F.pad(mask, (0, 0, pad1, pad2), value=0.0) + elif mask_len > seq_len: + crop_start = (mask_len - seq_len) // 2 + mask = mask[:, crop_start:crop_start+seq_len, :] + + out_ip = out_ip * mask + + #out = out + out_ip + + return out_ip.to(dtype=dtype) diff --git a/ComfyUI-Kolors-MZ/ComfyUI_IPAdapter_plus/IPAdapterPlus.py b/ComfyUI-Kolors-MZ/ComfyUI_IPAdapter_plus/IPAdapterPlus.py new file mode 100644 index 0000000000000000000000000000000000000000..91967e78ed05d3656afe892e560ea3daf4fc08a0 --- /dev/null +++ b/ComfyUI-Kolors-MZ/ComfyUI_IPAdapter_plus/IPAdapterPlus.py @@ -0,0 +1,1835 @@ +import torch +import os +import math +import folder_paths + +import comfy.model_management as model_management +from node_helpers import conditioning_set_values +from comfy.clip_vision import load as load_clip_vision +from comfy.sd import load_lora_for_models +import comfy.utils + +import torch.nn as nn +from PIL import Image +try: + import torchvision.transforms.v2 as T +except ImportError: + import torchvision.transforms as T + +from .image_proj_models import MLPProjModel, MLPProjModelFaceId, ProjModelFaceIdPlus, Resampler, ImageProjModel +from .CrossAttentionPatch import Attn2Replace, ipadapter_attention +from .utils import ( + encode_image_masked, + tensor_to_size, + contrast_adaptive_sharpening, + tensor_to_image, + image_to_tensor, + ipadapter_model_loader, + insightface_loader, + get_clipvision_file, + get_ipadapter_file, + get_lora_file, +) + +# set the models directory +if "ipadapter" not in folder_paths.folder_names_and_paths: + current_paths = [os.path.join(folder_paths.models_dir, "ipadapter")] +else: + current_paths, _ = folder_paths.folder_names_and_paths["ipadapter"] +folder_paths.folder_names_and_paths["ipadapter"] = (current_paths, folder_paths.supported_pt_extensions) + +WEIGHT_TYPES = ["linear", "ease in", "ease out", 'ease in-out', 'reverse in-out', 'weak input', 'weak output', 'weak middle', 'strong middle', 'style transfer', 'composition', 'strong style transfer', 'style and composition', 'style transfer precise', 'composition precise'] + +""" +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + Main IPAdapter Class +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +""" +class IPAdapter(nn.Module): + def __init__(self, ipadapter_model, cross_attention_dim=1024, output_cross_attention_dim=1024, clip_embeddings_dim=1024, clip_extra_context_tokens=4, is_sdxl=False, is_plus=False, is_full=False, is_faceid=False, is_portrait_unnorm=False, is_kwai_kolors=False, encoder_hid_proj=None): + super().__init__() + + self.clip_embeddings_dim = clip_embeddings_dim + self.cross_attention_dim = cross_attention_dim + self.output_cross_attention_dim = output_cross_attention_dim + self.clip_extra_context_tokens = clip_extra_context_tokens + self.is_sdxl = is_sdxl + self.is_full = is_full + self.is_plus = is_plus + self.is_portrait_unnorm = is_portrait_unnorm + self.is_kwai_kolors = is_kwai_kolors + + if is_faceid and not is_portrait_unnorm: + self.image_proj_model = self.init_proj_faceid() + elif is_full: + self.image_proj_model = self.init_proj_full() + elif is_plus or is_portrait_unnorm: + self.image_proj_model = self.init_proj_plus() + else: + self.image_proj_model = self.init_proj() + + self.image_proj_model.load_state_dict(ipadapter_model["image_proj"]) + + self.ip_layers = To_KV(ipadapter_model["ip_adapter"], encoder_hid_proj_state_dict=encoder_hid_proj) + + def init_proj(self): + image_proj_model = ImageProjModel( + cross_attention_dim=self.cross_attention_dim, + clip_embeddings_dim=self.clip_embeddings_dim, + clip_extra_context_tokens=self.clip_extra_context_tokens + ) + return image_proj_model + + def init_proj_plus(self): + image_proj_model = Resampler( + dim=self.cross_attention_dim, + depth=4, + dim_head=64, + heads=20 if self.is_sdxl and not self.is_kwai_kolors else 12, + num_queries=self.clip_extra_context_tokens, + embedding_dim=self.clip_embeddings_dim, + output_dim=self.output_cross_attention_dim, + ff_mult=4 + ) + return image_proj_model + + def init_proj_full(self): + image_proj_model = MLPProjModel( + cross_attention_dim=self.cross_attention_dim, + clip_embeddings_dim=self.clip_embeddings_dim + ) + return image_proj_model + + def init_proj_faceid(self): + if self.is_plus: + image_proj_model = ProjModelFaceIdPlus( + cross_attention_dim=self.cross_attention_dim, + id_embeddings_dim=512, + clip_embeddings_dim=self.clip_embeddings_dim, # 1280, + num_tokens=self.clip_extra_context_tokens, # 4, + ) + else: + image_proj_model = MLPProjModelFaceId( + cross_attention_dim=self.cross_attention_dim, + id_embeddings_dim=512, + num_tokens=self.clip_extra_context_tokens, + ) + return image_proj_model + + @torch.inference_mode() + def get_image_embeds(self, clip_embed, clip_embed_zeroed, batch_size): + torch_device = model_management.get_torch_device() + intermediate_device = model_management.intermediate_device() + + if batch_size == 0: + batch_size = clip_embed.shape[0] + intermediate_device = torch_device + elif batch_size > clip_embed.shape[0]: + batch_size = clip_embed.shape[0] + + clip_embed = torch.split(clip_embed, batch_size, dim=0) + clip_embed_zeroed = torch.split(clip_embed_zeroed, batch_size, dim=0) + + image_prompt_embeds = [] + uncond_image_prompt_embeds = [] + + for ce, cez in zip(clip_embed, clip_embed_zeroed): + image_prompt_embeds.append(self.image_proj_model(ce.to(torch_device)).to(intermediate_device)) + uncond_image_prompt_embeds.append(self.image_proj_model(cez.to(torch_device)).to(intermediate_device)) + + del clip_embed, clip_embed_zeroed + + image_prompt_embeds = torch.cat(image_prompt_embeds, dim=0) + uncond_image_prompt_embeds = torch.cat(uncond_image_prompt_embeds, dim=0) + + torch.cuda.empty_cache() + + #image_prompt_embeds = self.image_proj_model(clip_embed) + #uncond_image_prompt_embeds = self.image_proj_model(clip_embed_zeroed) + return image_prompt_embeds, uncond_image_prompt_embeds + + @torch.inference_mode() + def get_image_embeds_faceid_plus(self, face_embed, clip_embed, s_scale, shortcut, batch_size): + torch_device = model_management.get_torch_device() + intermediate_device = model_management.intermediate_device() + + if batch_size == 0: + batch_size = clip_embed.shape[0] + intermediate_device = torch_device + elif batch_size > clip_embed.shape[0]: + batch_size = clip_embed.shape[0] + + face_embed_batch = torch.split(face_embed, batch_size, dim=0) + clip_embed_batch = torch.split(clip_embed, batch_size, dim=0) + + embeds = [] + for face_embed, clip_embed in zip(face_embed_batch, clip_embed_batch): + embeds.append(self.image_proj_model(face_embed.to(torch_device), clip_embed.to(torch_device), scale=s_scale, shortcut=shortcut).to(intermediate_device)) + + del face_embed_batch, clip_embed_batch + + embeds = torch.cat(embeds, dim=0) + torch.cuda.empty_cache() + #embeds = self.image_proj_model(face_embed, clip_embed, scale=s_scale, shortcut=shortcut) + return embeds + +class To_KV(nn.Module): + def __init__(self, state_dict, encoder_hid_proj_state_dict=None): + super().__init__() + + if encoder_hid_proj_state_dict is not None: + encoder_hid_proj_linear = nn.Linear(encoder_hid_proj_state_dict["weight"].shape[1], encoder_hid_proj_state_dict["weight"].shape[0], bias=True) + encoder_hid_proj_linear.weight.data = encoder_hid_proj_state_dict["weight"] + encoder_hid_proj_linear.bias.data = encoder_hid_proj_state_dict["bias"] + + self.to_kvs = nn.ModuleDict() + for key, value in state_dict.items(): + if encoder_hid_proj_state_dict is None: + self.to_kvs[key.replace(".weight", "").replace(".", "_")] = nn.Linear(value.shape[1], value.shape[0], bias=False) + self.to_kvs[key.replace(".weight", "").replace(".", "_")].weight.data = value + else: + linear = nn.Linear(value.shape[1], value.shape[0], bias=False) + linear.weight.data = value + self.to_kvs[key.replace(".weight", "").replace(".", "_")] = nn.Sequential(encoder_hid_proj_linear, linear) + + +def set_model_patch_replace(model, patch_kwargs, key): + to = model.model_options["transformer_options"].copy() + if "patches_replace" not in to: + to["patches_replace"] = {} + else: + to["patches_replace"] = to["patches_replace"].copy() + + if "attn2" not in to["patches_replace"]: + to["patches_replace"]["attn2"] = {} + else: + to["patches_replace"]["attn2"] = to["patches_replace"]["attn2"].copy() + + if key not in to["patches_replace"]["attn2"]: + to["patches_replace"]["attn2"][key] = Attn2Replace(ipadapter_attention, **patch_kwargs) + model.model_options["transformer_options"] = to + else: + to["patches_replace"]["attn2"][key].add(ipadapter_attention, **patch_kwargs) + +def ipadapter_execute(model, + ipadapter, + clipvision, + insightface=None, + image=None, + image_composition=None, + image_negative=None, + weight=1.0, + weight_composition=1.0, + weight_faceidv2=None, + weight_type="linear", + combine_embeds="concat", + start_at=0.0, + end_at=1.0, + attn_mask=None, + pos_embed=None, + neg_embed=None, + unfold_batch=False, + embeds_scaling='V only', + layer_weights=None, + encode_batch_size=0, + style_boost=None, + composition_boost=None,): + device = model_management.get_torch_device() + dtype = model_management.unet_dtype() + if dtype not in [torch.float32, torch.float16, torch.bfloat16]: + dtype = torch.float16 if model_management.should_use_fp16() else torch.float32 + + is_full = "proj.3.weight" in ipadapter["image_proj"] + is_portrait = "proj.2.weight" in ipadapter["image_proj"] and not "proj.3.weight" in ipadapter["image_proj"] and not "0.to_q_lora.down.weight" in ipadapter["ip_adapter"] + is_portrait_unnorm = "portraitunnorm" in ipadapter + is_faceid = is_portrait or "0.to_q_lora.down.weight" in ipadapter["ip_adapter"] or is_portrait_unnorm + is_plus = (is_full or "latents" in ipadapter["image_proj"] or "perceiver_resampler.proj_in.weight" in ipadapter["image_proj"]) and not is_portrait_unnorm + is_faceidv2 = "faceidplusv2" in ipadapter + output_cross_attention_dim = ipadapter["ip_adapter"]["1.to_k_ip.weight"].shape[1] + is_sdxl = output_cross_attention_dim == 2048 + is_kwai_kolors = is_sdxl and "layers.0.0.to_out.weight" in ipadapter["image_proj"] and ipadapter["image_proj"]["layers.0.0.to_out.weight"].shape[0] == 2048 + + + # kwai-kolors faceid + is_kwai_kolors_faceid = "perceiver_resampler.layers.0.0.to_out.weight" in ipadapter["image_proj"] and ipadapter["image_proj"]["perceiver_resampler.layers.0.0.to_out.weight"].shape[0] == 4096 + if is_kwai_kolors_faceid: + is_faceid = True + is_kwai_kolors = True + is_faceidv2 = True + + if is_faceid and not insightface: + raise Exception("insightface model is required for FaceID models") + + if is_faceidv2: + weight_faceidv2 = weight_faceidv2 if weight_faceidv2 is not None else weight*2 + + cross_attention_dim = 1280 if (is_plus and is_sdxl and not is_faceid and not is_kwai_kolors) or is_portrait_unnorm else output_cross_attention_dim + clip_extra_context_tokens = 16 if (is_plus and not is_faceid) or is_portrait or is_portrait_unnorm else 4 + + if is_kwai_kolors_faceid: + clip_extra_context_tokens = 6 + cross_attention_dim = 4096 + + if image is not None and image.shape[1] != image.shape[2]: + print("\033[33mINFO: the IPAdapter reference image is not a square, CLIPImageProcessor will resize and crop it at the center. If the main focus of the picture is not in the middle the result might not be what you are expecting.\033[0m") + + if isinstance(weight, list): + weight = torch.tensor(weight).unsqueeze(-1).unsqueeze(-1).to(device, dtype=dtype) if unfold_batch else weight[0] + + if style_boost is not None: + weight_type = "style transfer precise" + elif composition_boost is not None: + weight_type = "composition precise" + + # special weight types + if layer_weights is not None and layer_weights != '': + weight = { int(k): float(v)*weight for k, v in [x.split(":") for x in layer_weights.split(",")] } + weight_type = weight_type if weight_type == "style transfer precise" or weight_type == "composition precise" else "linear" + elif weight_type == "style transfer": + weight = { 6:weight } if is_sdxl else { 0:weight, 1:weight, 2:weight, 3:weight, 9:weight, 10:weight, 11:weight, 12:weight, 13:weight, 14:weight, 15:weight } + elif weight_type == "composition": + weight = { 3:weight } if is_sdxl else { 4:weight*0.25, 5:weight } + elif weight_type == "strong style transfer": + if is_sdxl: + weight = { 0:weight, 1:weight, 2:weight, 4:weight, 5:weight, 6:weight, 7:weight, 8:weight, 9:weight, 10:weight } + else: + weight = { 0:weight, 1:weight, 2:weight, 3:weight, 6:weight, 7:weight, 8:weight, 9:weight, 10:weight, 11:weight, 12:weight, 13:weight, 14:weight, 15:weight } + elif weight_type == "style and composition": + if is_sdxl: + weight = { 3:weight_composition, 6:weight } + else: + weight = { 0:weight, 1:weight, 2:weight, 3:weight, 4:weight_composition*0.25, 5:weight_composition, 9:weight, 10:weight, 11:weight, 12:weight, 13:weight, 14:weight, 15:weight } + elif weight_type == "strong style and composition": + if is_sdxl: + weight = { 0:weight, 1:weight, 2:weight, 3:weight_composition, 4:weight, 5:weight, 6:weight, 7:weight, 8:weight, 9:weight, 10:weight } + else: + weight = { 0:weight, 1:weight, 2:weight, 3:weight, 4:weight_composition, 5:weight_composition, 6:weight, 7:weight, 8:weight, 9:weight, 10:weight, 11:weight, 12:weight, 13:weight, 14:weight, 15:weight } + elif weight_type == "style transfer precise": + weight_composition = style_boost if style_boost is not None else weight + if is_sdxl: + weight = { 3:weight_composition, 6:weight } + else: + weight = { 0:weight, 1:weight, 2:weight, 3:weight, 4:weight_composition*0.25, 5:weight_composition, 9:weight, 10:weight, 11:weight, 12:weight, 13:weight, 14:weight, 15:weight } + elif weight_type == "composition precise": + weight_composition = weight + weight = composition_boost if composition_boost is not None else weight + if is_sdxl: + weight = { 0:weight*.1, 1:weight*.1, 2:weight*.1, 3:weight_composition, 4:weight*.1, 5:weight*.1, 6:weight, 7:weight*.1, 8:weight*.1, 9:weight*.1, 10:weight*.1 } + else: + weight = { 0:weight, 1:weight, 2:weight, 3:weight, 4:weight_composition*0.25, 5:weight_composition, 6:weight*.1, 7:weight*.1, 8:weight*.1, 9:weight, 10:weight, 11:weight, 12:weight, 13:weight, 14:weight, 15:weight } + + image_size=224 if not is_kwai_kolors else 336 + + img_comp_cond_embeds = None + face_cond_embeds = None + if is_faceid: + if insightface is None: + raise Exception("Insightface model is required for FaceID models") + + from insightface.utils import face_align + + insightface.det_model.input_size = (640,640) # reset the detection size + image_iface = tensor_to_image(image) + face_cond_embeds = [] + image = [] + + face_image_size = 256 if is_sdxl else 224 + if is_kwai_kolors_faceid: + face_image_size = 336 + for i in range(image_iface.shape[0]): + for size in [(size, size) for size in range(640, 256, -64)]: + insightface.det_model.input_size = size # TODO: hacky but seems to be working + face = insightface.get(image_iface[i]) + if face: + if not is_portrait_unnorm: + face_cond_embeds.append(torch.from_numpy(face[0].normed_embedding).unsqueeze(0)) + else: + face_cond_embeds.append(torch.from_numpy(face[0].embedding).unsqueeze(0)) + image.append(image_to_tensor(face_align.norm_crop(image_iface[i], landmark=face[0].kps, image_size=face_image_size))) + + if 640 not in size: + print(f"\033[33mINFO: InsightFace detection resolution lowered to {size}.\033[0m") + break + else: + raise Exception('InsightFace: No face detected.') + face_cond_embeds = torch.stack(face_cond_embeds).to(device, dtype=dtype) + image = torch.stack(image) + del image_iface, face + + + if image is not None: + img_cond_embeds = encode_image_masked(clipvision, image, batch_size=encode_batch_size, size=image_size) + if image_composition is not None: + img_comp_cond_embeds = encode_image_masked(clipvision, image_composition, batch_size=encode_batch_size, size=image_size) + + if is_plus: + img_cond_embeds = img_cond_embeds.penultimate_hidden_states + image_negative = image_negative if image_negative is not None else torch.zeros([1, image_size, image_size, 3]) + img_uncond_embeds = encode_image_masked(clipvision, image_negative, batch_size=encode_batch_size, size=image_size).penultimate_hidden_states + if image_composition is not None: + img_comp_cond_embeds = img_comp_cond_embeds.penultimate_hidden_states + else: + img_cond_embeds = img_cond_embeds.image_embeds if not is_faceid else face_cond_embeds + if image_negative is not None and not is_faceid: + img_uncond_embeds = encode_image_masked(clipvision, image_negative, batch_size=encode_batch_size, size=image_size).image_embeds + else: + img_uncond_embeds = torch.zeros_like(img_cond_embeds) + if image_composition is not None: + img_comp_cond_embeds = img_comp_cond_embeds.image_embeds + del image_negative, image_composition + + image = None if not is_faceid else image # if it's face_id we need the cropped face for later + elif pos_embed is not None: + img_cond_embeds = pos_embed + + if neg_embed is not None: + img_uncond_embeds = neg_embed + else: + if is_plus: + img_uncond_embeds = encode_image_masked(clipvision, torch.zeros([1, image_size, image_size, 3]), size=image_size).penultimate_hidden_states + else: + img_uncond_embeds = torch.zeros_like(img_cond_embeds) + del pos_embed, neg_embed + else: + raise Exception("Images or Embeds are required") + + # ensure that cond and uncond have the same batch size + img_uncond_embeds = tensor_to_size(img_uncond_embeds, img_cond_embeds.shape[0]) + + img_cond_embeds = img_cond_embeds.to(device, dtype=dtype) + img_uncond_embeds = img_uncond_embeds.to(device, dtype=dtype) + if img_comp_cond_embeds is not None: + img_comp_cond_embeds = img_comp_cond_embeds.to(device, dtype=dtype) + + # combine the embeddings if needed + if combine_embeds != "concat" and img_cond_embeds.shape[0] > 1 and not unfold_batch: + if combine_embeds == "add": + img_cond_embeds = torch.sum(img_cond_embeds, dim=0).unsqueeze(0) + if face_cond_embeds is not None: + face_cond_embeds = torch.sum(face_cond_embeds, dim=0).unsqueeze(0) + if img_comp_cond_embeds is not None: + img_comp_cond_embeds = torch.sum(img_comp_cond_embeds, dim=0).unsqueeze(0) + elif combine_embeds == "subtract": + img_cond_embeds = img_cond_embeds[0] - torch.mean(img_cond_embeds[1:], dim=0) + img_cond_embeds = img_cond_embeds.unsqueeze(0) + if face_cond_embeds is not None: + face_cond_embeds = face_cond_embeds[0] - torch.mean(face_cond_embeds[1:], dim=0) + face_cond_embeds = face_cond_embeds.unsqueeze(0) + if img_comp_cond_embeds is not None: + img_comp_cond_embeds = img_comp_cond_embeds[0] - torch.mean(img_comp_cond_embeds[1:], dim=0) + img_comp_cond_embeds = img_comp_cond_embeds.unsqueeze(0) + elif combine_embeds == "average": + img_cond_embeds = torch.mean(img_cond_embeds, dim=0).unsqueeze(0) + if face_cond_embeds is not None: + face_cond_embeds = torch.mean(face_cond_embeds, dim=0).unsqueeze(0) + if img_comp_cond_embeds is not None: + img_comp_cond_embeds = torch.mean(img_comp_cond_embeds, dim=0).unsqueeze(0) + elif combine_embeds == "norm average": + img_cond_embeds = torch.mean(img_cond_embeds / torch.norm(img_cond_embeds, dim=0, keepdim=True), dim=0).unsqueeze(0) + if face_cond_embeds is not None: + face_cond_embeds = torch.mean(face_cond_embeds / torch.norm(face_cond_embeds, dim=0, keepdim=True), dim=0).unsqueeze(0) + if img_comp_cond_embeds is not None: + img_comp_cond_embeds = torch.mean(img_comp_cond_embeds / torch.norm(img_comp_cond_embeds, dim=0, keepdim=True), dim=0).unsqueeze(0) + img_uncond_embeds = img_uncond_embeds[0].unsqueeze(0) # TODO: better strategy for uncond could be to average them + + if attn_mask is not None: + attn_mask = attn_mask.to(device, dtype=dtype) + + encoder_hid_proj = None + if is_kwai_kolors_faceid: + if hasattr(model.model, "diffusion_model") and hasattr(model.model.diffusion_model, "encoder_hid_proj"): + encoder_hid_proj = model.model.diffusion_model.encoder_hid_proj.state_dict() + + ipa = IPAdapter( + ipadapter, + cross_attention_dim=cross_attention_dim, + output_cross_attention_dim=output_cross_attention_dim, + clip_embeddings_dim=img_cond_embeds.shape[-1], + clip_extra_context_tokens=clip_extra_context_tokens, + is_sdxl=is_sdxl, + is_plus=is_plus, + is_full=is_full, + is_faceid=is_faceid, + is_portrait_unnorm=is_portrait_unnorm, + is_kwai_kolors=is_kwai_kolors, + encoder_hid_proj=encoder_hid_proj + ).to(device, dtype=dtype) + + if is_faceid and is_plus: + cond = ipa.get_image_embeds_faceid_plus(face_cond_embeds, img_cond_embeds, weight_faceidv2, is_faceidv2, encode_batch_size) + # TODO: check if noise helps with the uncond face embeds + uncond = ipa.get_image_embeds_faceid_plus(torch.zeros_like(face_cond_embeds), img_uncond_embeds, weight_faceidv2, is_faceidv2, encode_batch_size) + else: + cond, uncond = ipa.get_image_embeds(img_cond_embeds, img_uncond_embeds, encode_batch_size) + if img_comp_cond_embeds is not None: + cond_comp = ipa.get_image_embeds(img_comp_cond_embeds, img_uncond_embeds, encode_batch_size)[0] + + cond = cond.to(device, dtype=dtype) + uncond = uncond.to(device, dtype=dtype) + + cond_alt = None + if img_comp_cond_embeds is not None: + cond_alt = { 3: cond_comp.to(device, dtype=dtype) } + + del img_cond_embeds, img_uncond_embeds, img_comp_cond_embeds, face_cond_embeds + + sigma_start = model.get_model_object("model_sampling").percent_to_sigma(start_at) + sigma_end = model.get_model_object("model_sampling").percent_to_sigma(end_at) + + + patch_kwargs = { + "ipadapter": ipa, + "weight": weight, + "cond": cond, + "cond_alt": cond_alt, + "uncond": uncond, + "weight_type": weight_type, + "mask": attn_mask, + "sigma_start": sigma_start, + "sigma_end": sigma_end, + "unfold_batch": unfold_batch, + "embeds_scaling": embeds_scaling, + } + + number = 0 + if not is_sdxl: + for id in [1,2,4,5,7,8]: # id of input_blocks that have cross attention + patch_kwargs["module_key"] = str(number*2+1) + set_model_patch_replace(model, patch_kwargs, ("input", id)) + number += 1 + for id in [3,4,5,6,7,8,9,10,11]: # id of output_blocks that have cross attention + patch_kwargs["module_key"] = str(number*2+1) + set_model_patch_replace(model, patch_kwargs, ("output", id)) + number += 1 + patch_kwargs["module_key"] = str(number*2+1) + set_model_patch_replace(model, patch_kwargs, ("middle", 0)) + else: + for id in [4,5,7,8]: # id of input_blocks that have cross attention + block_indices = range(2) if id in [4, 5] else range(10) # transformer_depth + for index in block_indices: + patch_kwargs["module_key"] = str(number*2+1) + set_model_patch_replace(model, patch_kwargs, ("input", id, index)) + number += 1 + for id in range(6): # id of output_blocks that have cross attention + block_indices = range(2) if id in [3, 4, 5] else range(10) # transformer_depth + for index in block_indices: + patch_kwargs["module_key"] = str(number*2+1) + set_model_patch_replace(model, patch_kwargs, ("output", id, index)) + number += 1 + for index in range(10): + patch_kwargs["module_key"] = str(number*2+1) + set_model_patch_replace(model, patch_kwargs, ("middle", 0, index)) + number += 1 + + return (model, image) + +""" +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + Loaders +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +""" +class IPAdapterUnifiedLoader: + def __init__(self): + self.lora = None + self.clipvision = { "file": None, "model": None } + self.ipadapter = { "file": None, "model": None } + self.insightface = { "provider": None, "model": None } + + @classmethod + def INPUT_TYPES(s): + return {"required": { + "model": ("MODEL", ), + "preset": (['LIGHT - SD1.5 only (low strength)', 'STANDARD (medium strength)', 'VIT-G (medium strength)', 'PLUS (high strength)', 'PLUS FACE (portraits)', 'FULL FACE - SD1.5 only (portraits stronger)'], ), + }, + "optional": { + "ipadapter": ("IPADAPTER", ), + }} + + RETURN_TYPES = ("MODEL", "IPADAPTER", ) + RETURN_NAMES = ("model", "ipadapter", ) + FUNCTION = "load_models" + CATEGORY = "ipadapter" + + def load_models(self, model, preset, lora_strength=0.0, provider="CPU", ipadapter=None): + pipeline = { "clipvision": { 'file': None, 'model': None }, "ipadapter": { 'file': None, 'model': None }, "insightface": { 'provider': None, 'model': None } } + if ipadapter is not None: + pipeline = ipadapter + + # 1. Load the clipvision model + clipvision_file = get_clipvision_file(preset) + if clipvision_file is None: + raise Exception("ClipVision model not found.") + + if clipvision_file != self.clipvision['file']: + if clipvision_file != pipeline['clipvision']['file']: + self.clipvision['file'] = clipvision_file + self.clipvision['model'] = load_clip_vision(clipvision_file) + print(f"\033[33mINFO: Clip Vision model loaded from {clipvision_file}\033[0m") + else: + self.clipvision = pipeline['clipvision'] + + # 2. Load the ipadapter model + is_sdxl = isinstance(model.model, (comfy.model_base.SDXL, comfy.model_base.SDXLRefiner, comfy.model_base.SDXL_instructpix2pix)) + ipadapter_file, is_insightface, lora_pattern = get_ipadapter_file(preset, is_sdxl) + if ipadapter_file is None: + raise Exception("IPAdapter model not found.") + + if ipadapter_file != self.ipadapter['file']: + if pipeline['ipadapter']['file'] != ipadapter_file: + self.ipadapter['file'] = ipadapter_file + self.ipadapter['model'] = ipadapter_model_loader(ipadapter_file) + print(f"\033[33mINFO: IPAdapter model loaded from {ipadapter_file}\033[0m") + else: + self.ipadapter = pipeline['ipadapter'] + + # 3. Load the lora model if needed + if lora_pattern is not None: + lora_file = get_lora_file(lora_pattern) + lora_model = None + if lora_file is None: + raise Exception("LoRA model not found.") + + if self.lora is not None: + if lora_file == self.lora['file']: + lora_model = self.lora['model'] + else: + self.lora = None + torch.cuda.empty_cache() + + if lora_model is None: + lora_model = comfy.utils.load_torch_file(lora_file, safe_load=True) + self.lora = { 'file': lora_file, 'model': lora_model } + print(f"\033[33mINFO: LoRA model loaded from {lora_file}\033[0m") + + if lora_strength > 0: + model, _ = load_lora_for_models(model, None, lora_model, lora_strength, 0) + + # 4. Load the insightface model if needed + if is_insightface: + if provider != self.insightface['provider']: + if pipeline['insightface']['provider'] != provider: + self.insightface['provider'] = provider + self.insightface['model'] = insightface_loader(provider) + print(f"\033[33mINFO: InsightFace model loaded with {provider} provider\033[0m") + else: + self.insightface = pipeline['insightface'] + + return (model, { 'clipvision': self.clipvision, 'ipadapter': self.ipadapter, 'insightface': self.insightface }, ) + +class IPAdapterUnifiedLoaderFaceID(IPAdapterUnifiedLoader): + @classmethod + def INPUT_TYPES(s): + return {"required": { + "model": ("MODEL", ), + "preset": (['FACEID', 'FACEID PLUS - SD1.5 only', 'FACEID PLUS V2', 'FACEID PORTRAIT (style transfer)', 'FACEID PORTRAIT UNNORM - SDXL only (strong)'], ), + "lora_strength": ("FLOAT", { "default": 0.6, "min": 0, "max": 1, "step": 0.01 }), + "provider": (["CPU", "CUDA", "ROCM", "DirectML", "OpenVINO", "CoreML"], ), + }, + "optional": { + "ipadapter": ("IPADAPTER", ), + }} + + RETURN_NAMES = ("MODEL", "ipadapter", ) + CATEGORY = "ipadapter/faceid" + +class IPAdapterUnifiedLoaderCommunity(IPAdapterUnifiedLoader): + @classmethod + def INPUT_TYPES(s): + return {"required": { + "model": ("MODEL", ), + "preset": (['Composition',], ), + }, + "optional": { + "ipadapter": ("IPADAPTER", ), + }} + + CATEGORY = "ipadapter/loaders" + +class IPAdapterModelLoader: + @classmethod + def INPUT_TYPES(s): + return {"required": { "ipadapter_file": (folder_paths.get_filename_list("ipadapter"), )}} + + RETURN_TYPES = ("IPADAPTER",) + FUNCTION = "load_ipadapter_model" + CATEGORY = "ipadapter/loaders" + + def load_ipadapter_model(self, ipadapter_file): + ipadapter_file = folder_paths.get_full_path("ipadapter", ipadapter_file) + return (ipadapter_model_loader(ipadapter_file),) + +class IPAdapterInsightFaceLoader: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "provider": (["CPU", "CUDA", "ROCM"], ), + }, + } + + RETURN_TYPES = ("INSIGHTFACE",) + FUNCTION = "load_insightface" + CATEGORY = "ipadapter/loaders" + + def load_insightface(self, provider): + return (insightface_loader(provider),) + +""" +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + Main Apply Nodes +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +""" +class IPAdapterSimple: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "model": ("MODEL", ), + "ipadapter": ("IPADAPTER", ), + "image": ("IMAGE",), + "weight": ("FLOAT", { "default": 1.0, "min": -1, "max": 3, "step": 0.05 }), + "start_at": ("FLOAT", { "default": 0.0, "min": 0.0, "max": 1.0, "step": 0.001 }), + "end_at": ("FLOAT", { "default": 1.0, "min": 0.0, "max": 1.0, "step": 0.001 }), + "weight_type": (['standard', 'prompt is more important', 'style transfer'], ), + }, + "optional": { + "attn_mask": ("MASK",), + } + } + + RETURN_TYPES = ("MODEL",) + FUNCTION = "apply_ipadapter" + CATEGORY = "ipadapter" + + def apply_ipadapter(self, model, ipadapter, image, weight, start_at, end_at, weight_type, attn_mask=None): + if weight_type.startswith("style"): + weight_type = "style transfer" + elif weight_type == "prompt is more important": + weight_type = "ease out" + else: + weight_type = "linear" + + ipa_args = { + "image": image, + "weight": weight, + "start_at": start_at, + "end_at": end_at, + "attn_mask": attn_mask, + "weight_type": weight_type, + "insightface": ipadapter['insightface']['model'] if 'insightface' in ipadapter else None, + } + + if 'ipadapter' not in ipadapter: + raise Exception("IPAdapter model not present in the pipeline. Please load the models with the IPAdapterUnifiedLoader node.") + if 'clipvision' not in ipadapter: + raise Exception("CLIPVision model not present in the pipeline. Please load the models with the IPAdapterUnifiedLoader node.") + + return ipadapter_execute(model.clone(), ipadapter['ipadapter']['model'], ipadapter['clipvision']['model'], **ipa_args) + +class IPAdapterAdvanced: + def __init__(self): + self.unfold_batch = False + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "model": ("MODEL", ), + "ipadapter": ("IPADAPTER", ), + "image": ("IMAGE",), + "weight": ("FLOAT", { "default": 1.0, "min": -1, "max": 5, "step": 0.05 }), + "weight_type": (WEIGHT_TYPES, ), + "combine_embeds": (["concat", "add", "subtract", "average", "norm average"],), + "start_at": ("FLOAT", { "default": 0.0, "min": 0.0, "max": 1.0, "step": 0.001 }), + "end_at": ("FLOAT", { "default": 1.0, "min": 0.0, "max": 1.0, "step": 0.001 }), + "embeds_scaling": (['V only', 'K+V', 'K+V w/ C penalty', 'K+mean(V) w/ C penalty'], ), + }, + "optional": { + "image_negative": ("IMAGE",), + "attn_mask": ("MASK",), + "clip_vision": ("CLIP_VISION",), + } + } + + RETURN_TYPES = ("MODEL",) + FUNCTION = "apply_ipadapter" + CATEGORY = "ipadapter" + + def apply_ipadapter(self, model, ipadapter, start_at=0.0, end_at=1.0, weight=1.0, weight_style=1.0, weight_composition=1.0, expand_style=False, weight_type="linear", combine_embeds="concat", weight_faceidv2=None, image=None, image_style=None, image_composition=None, image_negative=None, clip_vision=None, attn_mask=None, insightface=None, embeds_scaling='V only', layer_weights=None, ipadapter_params=None, encode_batch_size=0, style_boost=None, composition_boost=None): + is_sdxl = isinstance(model.model, (comfy.model_base.SDXL, comfy.model_base.SDXLRefiner, comfy.model_base.SDXL_instructpix2pix)) + + if 'ipadapter' in ipadapter: + ipadapter_model = ipadapter['ipadapter']['model'] + clip_vision = clip_vision if clip_vision is not None else ipadapter['clipvision']['model'] + else: + ipadapter_model = ipadapter + + if clip_vision is None: + raise Exception("Missing CLIPVision model.") + + if image_style is not None: # we are doing style + composition transfer + if not is_sdxl: + raise Exception("Style + Composition transfer is only available for SDXL models at the moment.") # TODO: check feasibility for SD1.5 models + + image = image_style + weight = weight_style + if image_composition is None: + image_composition = image_style + + weight_type = "strong style and composition" if expand_style else "style and composition" + if ipadapter_params is not None: # we are doing batch processing + image = ipadapter_params['image'] + attn_mask = ipadapter_params['attn_mask'] + weight = ipadapter_params['weight'] + weight_type = ipadapter_params['weight_type'] + start_at = ipadapter_params['start_at'] + end_at = ipadapter_params['end_at'] + else: + # at this point weight can be a list from the batch-weight or a single float + weight = [weight] + + image = image if isinstance(image, list) else [image] + + work_model = model.clone() + + for i in range(len(image)): + if image[i] is None: + continue + + ipa_args = { + "image": image[i], + "image_composition": image_composition, + "image_negative": image_negative, + "weight": weight[i], + "weight_composition": weight_composition, + "weight_faceidv2": weight_faceidv2, + "weight_type": weight_type if not isinstance(weight_type, list) else weight_type[i], + "combine_embeds": combine_embeds, + "start_at": start_at if not isinstance(start_at, list) else start_at[i], + "end_at": end_at if not isinstance(end_at, list) else end_at[i], + "attn_mask": attn_mask if not isinstance(attn_mask, list) else attn_mask[i], + "unfold_batch": self.unfold_batch, + "embeds_scaling": embeds_scaling, + "insightface": insightface if insightface is not None else ipadapter['insightface']['model'] if 'insightface' in ipadapter else None, + "layer_weights": layer_weights, + "encode_batch_size": encode_batch_size, + "style_boost": style_boost, + "composition_boost": composition_boost, + } + + work_model, face_image = ipadapter_execute(work_model, ipadapter_model, clip_vision, **ipa_args) + + del ipadapter + return (work_model, face_image, ) + +class IPAdapterBatch(IPAdapterAdvanced): + def __init__(self): + self.unfold_batch = True + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "model": ("MODEL", ), + "ipadapter": ("IPADAPTER", ), + "image": ("IMAGE",), + "weight": ("FLOAT", { "default": 1.0, "min": -1, "max": 5, "step": 0.05 }), + "weight_type": (WEIGHT_TYPES, ), + "start_at": ("FLOAT", { "default": 0.0, "min": 0.0, "max": 1.0, "step": 0.001 }), + "end_at": ("FLOAT", { "default": 1.0, "min": 0.0, "max": 1.0, "step": 0.001 }), + "embeds_scaling": (['V only', 'K+V', 'K+V w/ C penalty', 'K+mean(V) w/ C penalty'], ), + "encode_batch_size": ("INT", { "default": 0, "min": 0, "max": 4096 }), + }, + "optional": { + "image_negative": ("IMAGE",), + "attn_mask": ("MASK",), + "clip_vision": ("CLIP_VISION",), + } + } + +class IPAdapterStyleComposition(IPAdapterAdvanced): + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "model": ("MODEL", ), + "ipadapter": ("IPADAPTER", ), + "image_style": ("IMAGE",), + "image_composition": ("IMAGE",), + "weight_style": ("FLOAT", { "default": 1.0, "min": -1, "max": 5, "step": 0.05 }), + "weight_composition": ("FLOAT", { "default": 1.0, "min": -1, "max": 5, "step": 0.05 }), + "expand_style": ("BOOLEAN", { "default": False }), + "combine_embeds": (["concat", "add", "subtract", "average", "norm average"], {"default": "average"}), + "start_at": ("FLOAT", { "default": 0.0, "min": 0.0, "max": 1.0, "step": 0.001 }), + "end_at": ("FLOAT", { "default": 1.0, "min": 0.0, "max": 1.0, "step": 0.001 }), + "embeds_scaling": (['V only', 'K+V', 'K+V w/ C penalty', 'K+mean(V) w/ C penalty'], ), + }, + "optional": { + "image_negative": ("IMAGE",), + "attn_mask": ("MASK",), + "clip_vision": ("CLIP_VISION",), + } + } + + CATEGORY = "ipadapter/style_composition" + +class IPAdapterStyleCompositionBatch(IPAdapterStyleComposition): + def __init__(self): + self.unfold_batch = True + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "model": ("MODEL", ), + "ipadapter": ("IPADAPTER", ), + "image_style": ("IMAGE",), + "image_composition": ("IMAGE",), + "weight_style": ("FLOAT", { "default": 1.0, "min": -1, "max": 5, "step": 0.05 }), + "weight_composition": ("FLOAT", { "default": 1.0, "min": -1, "max": 5, "step": 0.05 }), + "expand_style": ("BOOLEAN", { "default": False }), + "start_at": ("FLOAT", { "default": 0.0, "min": 0.0, "max": 1.0, "step": 0.001 }), + "end_at": ("FLOAT", { "default": 1.0, "min": 0.0, "max": 1.0, "step": 0.001 }), + "embeds_scaling": (['V only', 'K+V', 'K+V w/ C penalty', 'K+mean(V) w/ C penalty'], ), + }, + "optional": { + "image_negative": ("IMAGE",), + "attn_mask": ("MASK",), + "clip_vision": ("CLIP_VISION",), + } + } + +class IPAdapterFaceID(IPAdapterAdvanced): + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "model": ("MODEL", ), + "ipadapter": ("IPADAPTER", ), + "image": ("IMAGE",), + "weight": ("FLOAT", { "default": 1.0, "min": -1, "max": 3, "step": 0.05 }), + "weight_faceidv2": ("FLOAT", { "default": 1.0, "min": -1, "max": 5.0, "step": 0.05 }), + "weight_type": (WEIGHT_TYPES, ), + "combine_embeds": (["concat", "add", "subtract", "average", "norm average"],), + "start_at": ("FLOAT", { "default": 0.0, "min": 0.0, "max": 1.0, "step": 0.001 }), + "end_at": ("FLOAT", { "default": 1.0, "min": 0.0, "max": 1.0, "step": 0.001 }), + "embeds_scaling": (['V only', 'K+V', 'K+V w/ C penalty', 'K+mean(V) w/ C penalty'], ), + }, + "optional": { + "image_negative": ("IMAGE",), + "attn_mask": ("MASK",), + "clip_vision": ("CLIP_VISION",), + "insightface": ("INSIGHTFACE",), + } + } + + CATEGORY = "ipadapter/faceid" + RETURN_TYPES = ("MODEL","IMAGE",) + RETURN_NAMES = ("MODEL", "face_image", ) + +class IPAAdapterFaceIDBatch(IPAdapterFaceID): + def __init__(self): + self.unfold_batch = True + +class IPAdapterTiled: + def __init__(self): + self.unfold_batch = False + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "model": ("MODEL", ), + "ipadapter": ("IPADAPTER", ), + "image": ("IMAGE",), + "weight": ("FLOAT", { "default": 1.0, "min": -1, "max": 3, "step": 0.05 }), + "weight_type": (WEIGHT_TYPES, ), + "combine_embeds": (["concat", "add", "subtract", "average", "norm average"],), + "start_at": ("FLOAT", { "default": 0.0, "min": 0.0, "max": 1.0, "step": 0.001 }), + "end_at": ("FLOAT", { "default": 1.0, "min": 0.0, "max": 1.0, "step": 0.001 }), + "sharpening": ("FLOAT", { "default": 0.0, "min": 0.0, "max": 1.0, "step": 0.05 }), + "embeds_scaling": (['V only', 'K+V', 'K+V w/ C penalty', 'K+mean(V) w/ C penalty'], ), + }, + "optional": { + "image_negative": ("IMAGE",), + "attn_mask": ("MASK",), + "clip_vision": ("CLIP_VISION",), + } + } + + RETURN_TYPES = ("MODEL", "IMAGE", "MASK", ) + RETURN_NAMES = ("MODEL", "tiles", "masks", ) + FUNCTION = "apply_tiled" + CATEGORY = "ipadapter/tiled" + + def apply_tiled(self, model, ipadapter, image, weight, weight_type, start_at, end_at, sharpening, combine_embeds="concat", image_negative=None, attn_mask=None, clip_vision=None, embeds_scaling='V only', encode_batch_size=0): + # 1. Select the models + if 'ipadapter' in ipadapter: + ipadapter_model = ipadapter['ipadapter']['model'] + clip_vision = clip_vision if clip_vision is not None else ipadapter['clipvision']['model'] + else: + ipadapter_model = ipadapter + clip_vision = clip_vision + + if clip_vision is None: + raise Exception("Missing CLIPVision model.") + + del ipadapter + + # 2. Extract the tiles + tile_size = 256 # I'm using 256 instead of 224 as it is more likely divisible by the latent size, it will be downscaled to 224 by the clip vision encoder + _, oh, ow, _ = image.shape + if attn_mask is None: + attn_mask = torch.ones([1, oh, ow], dtype=image.dtype, device=image.device) + + image = image.permute([0,3,1,2]) + attn_mask = attn_mask.unsqueeze(1) + # the mask should have the same proportions as the reference image and the latent + attn_mask = T.Resize((oh, ow), interpolation=T.InterpolationMode.BICUBIC, antialias=True)(attn_mask) + + # if the image is almost a square, we crop it to a square + if oh / ow > 0.75 and oh / ow < 1.33: + # crop the image to a square + image = T.CenterCrop(min(oh, ow))(image) + resize = (tile_size*2, tile_size*2) + + attn_mask = T.CenterCrop(min(oh, ow))(attn_mask) + # otherwise resize the smallest side and the other proportionally + else: + resize = (int(tile_size * ow / oh), tile_size) if oh < ow else (tile_size, int(tile_size * oh / ow)) + + # using PIL for better results + imgs = [] + for img in image: + img = T.ToPILImage()(img) + img = img.resize(resize, resample=Image.Resampling['LANCZOS']) + imgs.append(T.ToTensor()(img)) + image = torch.stack(imgs) + del imgs, img + + # we don't need a high quality resize for the mask + attn_mask = T.Resize(resize[::-1], interpolation=T.InterpolationMode.BICUBIC, antialias=True)(attn_mask) + + # we allow a maximum of 4 tiles + if oh / ow > 4 or oh / ow < 0.25: + crop = (tile_size, tile_size*4) if oh < ow else (tile_size*4, tile_size) + image = T.CenterCrop(crop)(image) + attn_mask = T.CenterCrop(crop)(attn_mask) + + attn_mask = attn_mask.squeeze(1) + + if sharpening > 0: + image = contrast_adaptive_sharpening(image, sharpening) + + image = image.permute([0,2,3,1]) + + _, oh, ow, _ = image.shape + + # find the number of tiles for each side + tiles_x = math.ceil(ow / tile_size) + tiles_y = math.ceil(oh / tile_size) + overlap_x = max(0, (tiles_x * tile_size - ow) / (tiles_x - 1 if tiles_x > 1 else 1)) + overlap_y = max(0, (tiles_y * tile_size - oh) / (tiles_y - 1 if tiles_y > 1 else 1)) + + base_mask = torch.zeros([attn_mask.shape[0], oh, ow], dtype=image.dtype, device=image.device) + + # extract all the tiles from the image and create the masks + tiles = [] + masks = [] + for y in range(tiles_y): + for x in range(tiles_x): + start_x = int(x * (tile_size - overlap_x)) + start_y = int(y * (tile_size - overlap_y)) + tiles.append(image[:, start_y:start_y+tile_size, start_x:start_x+tile_size, :]) + mask = base_mask.clone() + mask[:, start_y:start_y+tile_size, start_x:start_x+tile_size] = attn_mask[:, start_y:start_y+tile_size, start_x:start_x+tile_size] + masks.append(mask) + del mask + + # 3. Apply the ipadapter to each group of tiles + model = model.clone() + for i in range(len(tiles)): + ipa_args = { + "image": tiles[i], + "image_negative": image_negative, + "weight": weight, + "weight_type": weight_type, + "combine_embeds": combine_embeds, + "start_at": start_at, + "end_at": end_at, + "attn_mask": masks[i], + "unfold_batch": self.unfold_batch, + "embeds_scaling": embeds_scaling, + "encode_batch_size": encode_batch_size, + } + # apply the ipadapter to the model without cloning it + model, _ = ipadapter_execute(model, ipadapter_model, clip_vision, **ipa_args) + + return (model, torch.cat(tiles), torch.cat(masks), ) + +class IPAdapterTiledBatch(IPAdapterTiled): + def __init__(self): + self.unfold_batch = True + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "model": ("MODEL", ), + "ipadapter": ("IPADAPTER", ), + "image": ("IMAGE",), + "weight": ("FLOAT", { "default": 1.0, "min": -1, "max": 3, "step": 0.05 }), + "weight_type": (WEIGHT_TYPES, ), + "start_at": ("FLOAT", { "default": 0.0, "min": 0.0, "max": 1.0, "step": 0.001 }), + "end_at": ("FLOAT", { "default": 1.0, "min": 0.0, "max": 1.0, "step": 0.001 }), + "sharpening": ("FLOAT", { "default": 0.0, "min": 0.0, "max": 1.0, "step": 0.05 }), + "embeds_scaling": (['V only', 'K+V', 'K+V w/ C penalty', 'K+mean(V) w/ C penalty'], ), + "encode_batch_size": ("INT", { "default": 0, "min": 0, "max": 4096 }), + }, + "optional": { + "image_negative": ("IMAGE",), + "attn_mask": ("MASK",), + "clip_vision": ("CLIP_VISION",), + } + } + +class IPAdapterEmbeds: + def __init__(self): + self.unfold_batch = False + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "model": ("MODEL", ), + "ipadapter": ("IPADAPTER", ), + "pos_embed": ("EMBEDS",), + "weight": ("FLOAT", { "default": 1.0, "min": -1, "max": 3, "step": 0.05 }), + "weight_type": (WEIGHT_TYPES, ), + "start_at": ("FLOAT", { "default": 0.0, "min": 0.0, "max": 1.0, "step": 0.001 }), + "end_at": ("FLOAT", { "default": 1.0, "min": 0.0, "max": 1.0, "step": 0.001 }), + "embeds_scaling": (['V only', 'K+V', 'K+V w/ C penalty', 'K+mean(V) w/ C penalty'], ), + }, + "optional": { + "neg_embed": ("EMBEDS",), + "attn_mask": ("MASK",), + "clip_vision": ("CLIP_VISION",), + } + } + + RETURN_TYPES = ("MODEL",) + FUNCTION = "apply_ipadapter" + CATEGORY = "ipadapter/embeds" + + def apply_ipadapter(self, model, ipadapter, pos_embed, weight, weight_type, start_at, end_at, neg_embed=None, attn_mask=None, clip_vision=None, embeds_scaling='V only'): + ipa_args = { + "pos_embed": pos_embed, + "neg_embed": neg_embed, + "weight": weight, + "weight_type": weight_type, + "start_at": start_at, + "end_at": end_at, + "attn_mask": attn_mask, + "embeds_scaling": embeds_scaling, + "unfold_batch": self.unfold_batch, + } + + if 'ipadapter' in ipadapter: + ipadapter_model = ipadapter['ipadapter']['model'] + clip_vision = clip_vision if clip_vision is not None else ipadapter['clipvision']['model'] + else: + ipadapter_model = ipadapter + clip_vision = clip_vision + + if clip_vision is None and neg_embed is None: + raise Exception("Missing CLIPVision model.") + + del ipadapter + + return ipadapter_execute(model.clone(), ipadapter_model, clip_vision, **ipa_args) + +class IPAdapterEmbedsBatch(IPAdapterEmbeds): + def __init__(self): + self.unfold_batch = True + +class IPAdapterMS(IPAdapterAdvanced): + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "model": ("MODEL", ), + "ipadapter": ("IPADAPTER", ), + "image": ("IMAGE",), + "weight": ("FLOAT", { "default": 1.0, "min": -1, "max": 5, "step": 0.05 }), + "weight_faceidv2": ("FLOAT", { "default": 1.0, "min": -1, "max": 5.0, "step": 0.05 }), + "weight_type": (WEIGHT_TYPES, ), + "combine_embeds": (["concat", "add", "subtract", "average", "norm average"],), + "start_at": ("FLOAT", { "default": 0.0, "min": 0.0, "max": 1.0, "step": 0.001 }), + "end_at": ("FLOAT", { "default": 1.0, "min": 0.0, "max": 1.0, "step": 0.001 }), + "embeds_scaling": (['V only', 'K+V', 'K+V w/ C penalty', 'K+mean(V) w/ C penalty'], ), + "layer_weights": ("STRING", { "default": "", "multiline": True }), + }, + "optional": { + "image_negative": ("IMAGE",), + "attn_mask": ("MASK",), + "clip_vision": ("CLIP_VISION",), + "insightface": ("INSIGHTFACE",), + } + } + + CATEGORY = "ipadapter/dev" + +class IPAdapterFromParams(IPAdapterAdvanced): + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "model": ("MODEL", ), + "ipadapter": ("IPADAPTER", ), + "ipadapter_params": ("IPADAPTER_PARAMS", ), + "combine_embeds": (["concat", "add", "subtract", "average", "norm average"],), + "embeds_scaling": (['V only', 'K+V', 'K+V w/ C penalty', 'K+mean(V) w/ C penalty'], ), + }, + "optional": { + "image_negative": ("IMAGE",), + "clip_vision": ("CLIP_VISION",), + } + } + + CATEGORY = "ipadapter/params" + +class IPAdapterPreciseStyleTransfer(IPAdapterAdvanced): + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "model": ("MODEL", ), + "ipadapter": ("IPADAPTER", ), + "image": ("IMAGE",), + "weight": ("FLOAT", { "default": 1.0, "min": -1, "max": 5, "step": 0.05 }), + "style_boost": ("FLOAT", { "default": 1.0, "min": -5, "max": 5, "step": 0.05 }), + "combine_embeds": (["concat", "add", "subtract", "average", "norm average"],), + "start_at": ("FLOAT", { "default": 0.0, "min": 0.0, "max": 1.0, "step": 0.001 }), + "end_at": ("FLOAT", { "default": 1.0, "min": 0.0, "max": 1.0, "step": 0.001 }), + "embeds_scaling": (['V only', 'K+V', 'K+V w/ C penalty', 'K+mean(V) w/ C penalty'], ), + }, + "optional": { + "image_negative": ("IMAGE",), + "attn_mask": ("MASK",), + "clip_vision": ("CLIP_VISION",), + } + } + +class IPAdapterPreciseStyleTransferBatch(IPAdapterPreciseStyleTransfer): + def __init__(self): + self.unfold_batch = True + +class IPAdapterPreciseComposition(IPAdapterAdvanced): + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "model": ("MODEL", ), + "ipadapter": ("IPADAPTER", ), + "image": ("IMAGE",), + "weight": ("FLOAT", { "default": 1.0, "min": -1, "max": 5, "step": 0.05 }), + "composition_boost": ("FLOAT", { "default": 0.0, "min": -5, "max": 5, "step": 0.05 }), + "combine_embeds": (["concat", "add", "subtract", "average", "norm average"],), + "start_at": ("FLOAT", { "default": 0.0, "min": 0.0, "max": 1.0, "step": 0.001 }), + "end_at": ("FLOAT", { "default": 1.0, "min": 0.0, "max": 1.0, "step": 0.001 }), + "embeds_scaling": (['V only', 'K+V', 'K+V w/ C penalty', 'K+mean(V) w/ C penalty'], ), + }, + "optional": { + "image_negative": ("IMAGE",), + "attn_mask": ("MASK",), + "clip_vision": ("CLIP_VISION",), + } + } + +class IPAdapterPreciseCompositionBatch(IPAdapterPreciseComposition): + def __init__(self): + self.unfold_batch = True + +""" +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + Helpers +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +""" +class IPAdapterEncoder: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "ipadapter": ("IPADAPTER",), + "image": ("IMAGE",), + "weight": ("FLOAT", { "default": 1.0, "min": -1.0, "max": 3.0, "step": 0.01 }), + }, + "optional": { + "mask": ("MASK",), + "clip_vision": ("CLIP_VISION",), + } + } + + RETURN_TYPES = ("EMBEDS", "EMBEDS",) + RETURN_NAMES = ("pos_embed", "neg_embed",) + FUNCTION = "encode" + CATEGORY = "ipadapter/embeds" + + def encode(self, ipadapter, image, weight, mask=None, clip_vision=None): + if 'ipadapter' in ipadapter: + ipadapter_model = ipadapter['ipadapter']['model'] + clip_vision = clip_vision if clip_vision is not None else ipadapter['clipvision']['model'] + else: + ipadapter_model = ipadapter + clip_vision = clip_vision + + if clip_vision is None: + raise Exception("Missing CLIPVision model.") + + is_plus = "proj.3.weight" in ipadapter_model["image_proj"] or "latents" in ipadapter_model["image_proj"] or "perceiver_resampler.proj_in.weight" in ipadapter_model["image_proj"] + is_kwai_kolors = is_plus and "layers.0.0.to_out.weight" in ipadapter_model["image_proj"] and ipadapter_model["image_proj"]["layers.0.0.to_out.weight"].shape[0] == 2048 + + image_size = 224 if not is_kwai_kolors else 336 + + # resize and crop the mask to 224x224 + if mask is not None and mask.shape[1:3] != torch.Size([image_size, image_size]): + mask = mask.unsqueeze(1) + transforms = T.Compose([ + T.CenterCrop(min(mask.shape[2], mask.shape[3])), + T.Resize((image_size, image_size), interpolation=T.InterpolationMode.BICUBIC, antialias=True), + ]) + mask = transforms(mask).squeeze(1) + #mask = T.Resize((image.shape[1], image.shape[2]), interpolation=T.InterpolationMode.BICUBIC, antialias=True)(mask.unsqueeze(1)).squeeze(1) + + img_cond_embeds = encode_image_masked(clip_vision, image, mask, size=image_size) + + if is_plus: + img_cond_embeds = img_cond_embeds.penultimate_hidden_states + img_uncond_embeds = encode_image_masked(clip_vision, torch.zeros([1, image_size, image_size, 3]), size=image_size).penultimate_hidden_states + else: + img_cond_embeds = img_cond_embeds.image_embeds + img_uncond_embeds = torch.zeros_like(img_cond_embeds) + + if weight != 1: + img_cond_embeds = img_cond_embeds * weight + + return (img_cond_embeds, img_uncond_embeds, ) + +class IPAdapterCombineEmbeds: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "embed1": ("EMBEDS",), + "method": (["concat", "add", "subtract", "average", "norm average", "max", "min"], ), + }, + "optional": { + "embed2": ("EMBEDS",), + "embed3": ("EMBEDS",), + "embed4": ("EMBEDS",), + "embed5": ("EMBEDS",), + }} + + RETURN_TYPES = ("EMBEDS",) + FUNCTION = "batch" + CATEGORY = "ipadapter/embeds" + + def batch(self, embed1, method, embed2=None, embed3=None, embed4=None, embed5=None): + if method=='concat' and embed2 is None and embed3 is None and embed4 is None and embed5 is None: + return (embed1, ) + + embeds = [embed1, embed2, embed3, embed4, embed5] + embeds = [embed for embed in embeds if embed is not None] + embeds = torch.cat(embeds, dim=0) + + if method == "add": + embeds = torch.sum(embeds, dim=0).unsqueeze(0) + elif method == "subtract": + embeds = embeds[0] - torch.mean(embeds[1:], dim=0) + embeds = embeds.unsqueeze(0) + elif method == "average": + embeds = torch.mean(embeds, dim=0).unsqueeze(0) + elif method == "norm average": + embeds = torch.mean(embeds / torch.norm(embeds, dim=0, keepdim=True), dim=0).unsqueeze(0) + elif method == "max": + embeds = torch.max(embeds, dim=0).values.unsqueeze(0) + elif method == "min": + embeds = torch.min(embeds, dim=0).values.unsqueeze(0) + + return (embeds, ) + +class IPAdapterNoise: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "type": (["fade", "dissolve", "gaussian", "shuffle"], ), + "strength": ("FLOAT", { "default": 1.0, "min": 0, "max": 1, "step": 0.05 }), + "blur": ("INT", { "default": 0, "min": 0, "max": 32, "step": 1 }), + }, + "optional": { + "image_optional": ("IMAGE",), + } + } + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "make_noise" + CATEGORY = "ipadapter/utils" + + def make_noise(self, type, strength, blur, image_optional=None): + if image_optional is None: + image = torch.zeros([1, 224, 224, 3]) + else: + transforms = T.Compose([ + T.CenterCrop(min(image_optional.shape[1], image_optional.shape[2])), + T.Resize((224, 224), interpolation=T.InterpolationMode.BICUBIC, antialias=True), + ]) + image = transforms(image_optional.permute([0,3,1,2])).permute([0,2,3,1]) + + seed = int(torch.sum(image).item()) % 1000000007 # hash the image to get a seed, grants predictability + torch.manual_seed(seed) + + if type == "fade": + noise = torch.rand_like(image) + noise = image * (1 - strength) + noise * strength + elif type == "dissolve": + mask = (torch.rand_like(image) < strength).float() + noise = torch.rand_like(image) + noise = image * (1-mask) + noise * mask + elif type == "gaussian": + noise = torch.randn_like(image) * strength + noise = image + noise + elif type == "shuffle": + transforms = T.Compose([ + T.ElasticTransform(alpha=75.0, sigma=(1-strength)*3.5), + T.RandomVerticalFlip(p=1.0), + T.RandomHorizontalFlip(p=1.0), + ]) + image = transforms(image.permute([0,3,1,2])).permute([0,2,3,1]) + noise = torch.randn_like(image) * (strength*0.75) + noise = image * (1-noise) + noise + + del image + noise = torch.clamp(noise, 0, 1) + + if blur > 0: + if blur % 2 == 0: + blur += 1 + noise = T.functional.gaussian_blur(noise.permute([0,3,1,2]), blur).permute([0,2,3,1]) + + return (noise, ) + +class PrepImageForClipVision: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "image": ("IMAGE",), + "interpolation": (["LANCZOS", "BICUBIC", "HAMMING", "BILINEAR", "BOX", "NEAREST"],), + "crop_position": (["top", "bottom", "left", "right", "center", "pad"],), + "sharpening": ("FLOAT", {"default": 0.0, "min": 0, "max": 1, "step": 0.05}), + }, + } + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "prep_image" + + CATEGORY = "ipadapter/utils" + + def prep_image(self, image, interpolation="LANCZOS", crop_position="center", sharpening=0.0): + size = (224, 224) + _, oh, ow, _ = image.shape + output = image.permute([0,3,1,2]) + + if crop_position == "pad": + if oh != ow: + if oh > ow: + pad = (oh - ow) // 2 + pad = (pad, 0, pad, 0) + elif ow > oh: + pad = (ow - oh) // 2 + pad = (0, pad, 0, pad) + output = T.functional.pad(output, pad, fill=0) + else: + crop_size = min(oh, ow) + x = (ow-crop_size) // 2 + y = (oh-crop_size) // 2 + if "top" in crop_position: + y = 0 + elif "bottom" in crop_position: + y = oh-crop_size + elif "left" in crop_position: + x = 0 + elif "right" in crop_position: + x = ow-crop_size + + x2 = x+crop_size + y2 = y+crop_size + + output = output[:, :, y:y2, x:x2] + + imgs = [] + for img in output: + img = T.ToPILImage()(img) # using PIL for better results + img = img.resize(size, resample=Image.Resampling[interpolation]) + imgs.append(T.ToTensor()(img)) + output = torch.stack(imgs, dim=0) + del imgs, img + + if sharpening > 0: + output = contrast_adaptive_sharpening(output, sharpening) + + output = output.permute([0,2,3,1]) + + return (output, ) + +class IPAdapterSaveEmbeds: + def __init__(self): + self.output_dir = folder_paths.get_output_directory() + + @classmethod + def INPUT_TYPES(s): + return {"required": { + "embeds": ("EMBEDS",), + "filename_prefix": ("STRING", {"default": "IP_embeds"}) + }, + } + + RETURN_TYPES = () + FUNCTION = "save" + OUTPUT_NODE = True + CATEGORY = "ipadapter/embeds" + + def save(self, embeds, filename_prefix): + full_output_folder, filename, counter, subfolder, filename_prefix = folder_paths.get_save_image_path(filename_prefix, self.output_dir) + file = f"{filename}_{counter:05}.ipadpt" + file = os.path.join(full_output_folder, file) + + torch.save(embeds, file) + return (None, ) + +class IPAdapterLoadEmbeds: + @classmethod + def INPUT_TYPES(s): + input_dir = folder_paths.get_input_directory() + files = [os.path.relpath(os.path.join(root, file), input_dir) for root, dirs, files in os.walk(input_dir) for file in files if file.endswith('.ipadpt')] + return {"required": {"embeds": [sorted(files), ]}, } + + RETURN_TYPES = ("EMBEDS", ) + FUNCTION = "load" + CATEGORY = "ipadapter/embeds" + + def load(self, embeds): + path = folder_paths.get_annotated_filepath(embeds) + return (torch.load(path).cpu(), ) + +class IPAdapterWeights: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "weights": ("STRING", {"default": '1.0, 0.0', "multiline": True }), + "timing": (["custom", "linear", "ease_in_out", "ease_in", "ease_out", "random"], { "default": "linear" } ), + "frames": ("INT", {"default": 0, "min": 0, "max": 9999, "step": 1 }), + "start_frame": ("INT", {"default": 0, "min": 0, "max": 9999, "step": 1 }), + "end_frame": ("INT", {"default": 9999, "min": 0, "max": 9999, "step": 1 }), + "add_starting_frames": ("INT", {"default": 0, "min": 0, "max": 9999, "step": 1 }), + "add_ending_frames": ("INT", {"default": 0, "min": 0, "max": 9999, "step": 1 }), + "method": (["full batch", "shift batches", "alternate batches"], { "default": "full batch" }), + }, "optional": { + "image": ("IMAGE",), + } + } + + RETURN_TYPES = ("FLOAT", "FLOAT", "INT", "IMAGE", "IMAGE", "WEIGHTS_STRATEGY") + RETURN_NAMES = ("weights", "weights_invert", "total_frames", "image_1", "image_2", "weights_strategy") + FUNCTION = "weights" + CATEGORY = "ipadapter/weights" + + def weights(self, weights='', timing='custom', frames=0, start_frame=0, end_frame=9999, add_starting_frames=0, add_ending_frames=0, method='full batch', weights_strategy=None, image=None): + import random + + frame_count = image.shape[0] if image is not None else 0 + if weights_strategy is not None: + weights = weights_strategy["weights"] + timing = weights_strategy["timing"] + frames = weights_strategy["frames"] + start_frame = weights_strategy["start_frame"] + end_frame = weights_strategy["end_frame"] + add_starting_frames = weights_strategy["add_starting_frames"] + add_ending_frames = weights_strategy["add_ending_frames"] + method = weights_strategy["method"] + frame_count = weights_strategy["frame_count"] + else: + weights_strategy = { + "weights": weights, + "timing": timing, + "frames": frames, + "start_frame": start_frame, + "end_frame": end_frame, + "add_starting_frames": add_starting_frames, + "add_ending_frames": add_ending_frames, + "method": method, + "frame_count": frame_count, + } + + # convert the string to a list of floats separated by commas or newlines + weights = weights.replace("\n", ",") + weights = [float(weight) for weight in weights.split(",") if weight.strip() != ""] + + if timing != "custom": + frames = max(frames, 2) + start = 0.0 + end = 1.0 + + if len(weights) > 0: + start = weights[0] + end = weights[-1] + + weights = [] + + end_frame = min(end_frame, frames) + duration = end_frame - start_frame + if start_frame > 0: + weights.extend([start] * start_frame) + + for i in range(duration): + n = duration - 1 + if timing == "linear": + weights.append(start + (end - start) * i / n) + elif timing == "ease_in_out": + weights.append(start + (end - start) * (1 - math.cos(i / n * math.pi)) / 2) + elif timing == "ease_in": + weights.append(start + (end - start) * math.sin(i / n * math.pi / 2)) + elif timing == "ease_out": + weights.append(start + (end - start) * (1 - math.cos(i / n * math.pi / 2))) + elif timing == "random": + weights.append(random.uniform(start, end)) + + weights[-1] = end if timing != "random" else weights[-1] + if end_frame < frames: + weights.extend([end] * (frames - end_frame)) + + if len(weights) == 0: + weights = [0.0] + + frames = len(weights) + + # repeat the images for cross fade + image_1 = None + image_2 = None + + # Calculate the min and max of the weights + min_weight = min(weights) + max_weight = max(weights) + + if image is not None: + + if "shift" in method: + image_1 = image[:-1] + image_2 = image[1:] + + weights = weights * image_1.shape[0] + image_1 = image_1.repeat_interleave(frames, 0) + image_2 = image_2.repeat_interleave(frames, 0) + elif "alternate" in method: + image_1 = image[::2].repeat_interleave(2, 0) + image_1 = image_1[1:] + image_2 = image[1::2].repeat_interleave(2, 0) + + # Invert the weights relative to their own range + mew_weights = weights + [max_weight - (w - min_weight) for w in weights] + + mew_weights = mew_weights * (image_1.shape[0] // 2) + if image.shape[0] % 2: + image_1 = image_1[:-1] + else: + image_2 = image_2[:-1] + mew_weights = mew_weights + weights + + weights = mew_weights + image_1 = image_1.repeat_interleave(frames, 0) + image_2 = image_2.repeat_interleave(frames, 0) + else: + weights = weights * image.shape[0] + image_1 = image.repeat_interleave(frames, 0) + + # add starting and ending frames + if add_starting_frames > 0: + weights = [weights[0]] * add_starting_frames + weights + image_1 = torch.cat([image[:1].repeat(add_starting_frames, 1, 1, 1), image_1], dim=0) + if image_2 is not None: + image_2 = torch.cat([image[:1].repeat(add_starting_frames, 1, 1, 1), image_2], dim=0) + if add_ending_frames > 0: + weights = weights + [weights[-1]] * add_ending_frames + image_1 = torch.cat([image_1, image[-1:].repeat(add_ending_frames, 1, 1, 1)], dim=0) + if image_2 is not None: + image_2 = torch.cat([image_2, image[-1:].repeat(add_ending_frames, 1, 1, 1)], dim=0) + + # reverse the weights array + weights_invert = weights[::-1] + + frame_count = len(weights) + + return (weights, weights_invert, frame_count, image_1, image_2, weights_strategy,) + +class IPAdapterWeightsFromStrategy(IPAdapterWeights): + @classmethod + def INPUT_TYPES(s): + return {"required": { + "weights_strategy": ("WEIGHTS_STRATEGY",), + }, "optional": { + "image": ("IMAGE",), + } + } + +class IPAdapterPromptScheduleFromWeightsStrategy(): + @classmethod + def INPUT_TYPES(s): + return {"required": { + "weights_strategy": ("WEIGHTS_STRATEGY",), + "prompt": ("STRING", {"default": "", "multiline": True }), + }} + + RETURN_TYPES = ("STRING",) + RETURN_NAMES = ("prompt_schedule", ) + FUNCTION = "prompt_schedule" + CATEGORY = "ipadapter/weights" + + def prompt_schedule(self, weights_strategy, prompt=""): + frames = weights_strategy["frames"] + add_starting_frames = weights_strategy["add_starting_frames"] + add_ending_frames = weights_strategy["add_ending_frames"] + frame_count = weights_strategy["frame_count"] + + out = "" + + prompt = [p for p in prompt.split("\n") if p.strip() != ""] + + if len(prompt) > 0 and frame_count > 0: + # prompt_pos must be the same size as the image batch + if len(prompt) > frame_count: + prompt = prompt[:frame_count] + elif len(prompt) < frame_count: + prompt += [prompt[-1]] * (frame_count - len(prompt)) + + if add_starting_frames > 0: + out += f"\"0\": \"{prompt[0]}\",\n" + for i in range(frame_count): + out += f"\"{i * frames + add_starting_frames}\": \"{prompt[i]}\",\n" + if add_ending_frames > 0: + out += f"\"{frame_count * frames + add_starting_frames}\": \"{prompt[-1]}\",\n" + + return (out, ) + +class IPAdapterCombineWeights: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "weights_1": ("FLOAT", { "default": 0.0, "min": 0.0, "max": 1.0, "step": 0.05 }), + "weights_2": ("FLOAT", { "default": 0.0, "min": 0.0, "max": 1.0, "step": 0.05 }), + }} + RETURN_TYPES = ("FLOAT", "INT") + RETURN_NAMES = ("weights", "count") + FUNCTION = "combine" + CATEGORY = "ipadapter/utils" + + def combine(self, weights_1, weights_2): + if not isinstance(weights_1, list): + weights_1 = [weights_1] + if not isinstance(weights_2, list): + weights_2 = [weights_2] + weights = weights_1 + weights_2 + + return (weights, len(weights), ) + +class IPAdapterRegionalConditioning: + @classmethod + def INPUT_TYPES(s): + return {"required": { + #"set_cond_area": (["default", "mask bounds"],), + "image": ("IMAGE",), + "image_weight": ("FLOAT", { "default": 1.0, "min": -1.0, "max": 3.0, "step": 0.05 }), + "prompt_weight": ("FLOAT", { "default": 1.0, "min": 0.0, "max": 10.0, "step": 0.05 }), + "weight_type": (WEIGHT_TYPES, ), + "start_at": ("FLOAT", { "default": 0.0, "min": 0.0, "max": 1.0, "step": 0.001 }), + "end_at": ("FLOAT", { "default": 1.0, "min": 0.0, "max": 1.0, "step": 0.001 }), + }, "optional": { + "mask": ("MASK",), + "positive": ("CONDITIONING",), + "negative": ("CONDITIONING",), + }} + + RETURN_TYPES = ("IPADAPTER_PARAMS", "CONDITIONING", "CONDITIONING", ) + RETURN_NAMES = ("IPADAPTER_PARAMS", "POSITIVE", "NEGATIVE") + FUNCTION = "conditioning" + + CATEGORY = "ipadapter/params" + + def conditioning(self, image, image_weight, prompt_weight, weight_type, start_at, end_at, mask=None, positive=None, negative=None): + set_area_to_bounds = False #if set_cond_area == "default" else True + + if mask is not None: + if positive is not None: + positive = conditioning_set_values(positive, {"mask": mask, "set_area_to_bounds": set_area_to_bounds, "mask_strength": prompt_weight}) + if negative is not None: + negative = conditioning_set_values(negative, {"mask": mask, "set_area_to_bounds": set_area_to_bounds, "mask_strength": prompt_weight}) + + ipadapter_params = { + "image": [image], + "attn_mask": [mask], + "weight": [image_weight], + "weight_type": [weight_type], + "start_at": [start_at], + "end_at": [end_at], + } + + return (ipadapter_params, positive, negative, ) + +class IPAdapterCombineParams: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "params_1": ("IPADAPTER_PARAMS",), + "params_2": ("IPADAPTER_PARAMS",), + }, "optional": { + "params_3": ("IPADAPTER_PARAMS",), + "params_4": ("IPADAPTER_PARAMS",), + "params_5": ("IPADAPTER_PARAMS",), + }} + + RETURN_TYPES = ("IPADAPTER_PARAMS",) + FUNCTION = "combine" + CATEGORY = "ipadapter/params" + + def combine(self, params_1, params_2, params_3=None, params_4=None, params_5=None): + ipadapter_params = { + "image": params_1["image"] + params_2["image"], + "attn_mask": params_1["attn_mask"] + params_2["attn_mask"], + "weight": params_1["weight"] + params_2["weight"], + "weight_type": params_1["weight_type"] + params_2["weight_type"], + "start_at": params_1["start_at"] + params_2["start_at"], + "end_at": params_1["end_at"] + params_2["end_at"], + } + + if params_3 is not None: + ipadapter_params["image"] += params_3["image"] + ipadapter_params["attn_mask"] += params_3["attn_mask"] + ipadapter_params["weight"] += params_3["weight"] + ipadapter_params["weight_type"] += params_3["weight_type"] + ipadapter_params["start_at"] += params_3["start_at"] + ipadapter_params["end_at"] += params_3["end_at"] + if params_4 is not None: + ipadapter_params["image"] += params_4["image"] + ipadapter_params["attn_mask"] += params_4["attn_mask"] + ipadapter_params["weight"] += params_4["weight"] + ipadapter_params["weight_type"] += params_4["weight_type"] + ipadapter_params["start_at"] += params_4["start_at"] + ipadapter_params["end_at"] += params_4["end_at"] + if params_5 is not None: + ipadapter_params["image"] += params_5["image"] + ipadapter_params["attn_mask"] += params_5["attn_mask"] + ipadapter_params["weight"] += params_5["weight"] + ipadapter_params["weight_type"] += params_5["weight_type"] + ipadapter_params["start_at"] += params_5["start_at"] + ipadapter_params["end_at"] += params_5["end_at"] + + return (ipadapter_params, ) + \ No newline at end of file diff --git a/ComfyUI-Kolors-MZ/ComfyUI_IPAdapter_plus/image_proj_models.py b/ComfyUI-Kolors-MZ/ComfyUI_IPAdapter_plus/image_proj_models.py new file mode 100644 index 0000000000000000000000000000000000000000..da61b53bb20e94cdc203c121a66129f745e3f4f0 --- /dev/null +++ b/ComfyUI-Kolors-MZ/ComfyUI_IPAdapter_plus/image_proj_models.py @@ -0,0 +1,275 @@ +import math +import torch +import torch.nn as nn +from einops import rearrange +from einops.layers.torch import Rearrange + + +# FFN +def FeedForward(dim, mult=4): + inner_dim = int(dim * mult) + return nn.Sequential( + nn.LayerNorm(dim), + nn.Linear(dim, inner_dim, bias=False), + nn.GELU(), + nn.Linear(inner_dim, dim, bias=False), + ) + + +def reshape_tensor(x, heads): + bs, length, width = x.shape + # (bs, length, width) --> (bs, length, n_heads, dim_per_head) + x = x.view(bs, length, heads, -1) + # (bs, length, n_heads, dim_per_head) --> (bs, n_heads, length, dim_per_head) + x = x.transpose(1, 2) + # (bs, n_heads, length, dim_per_head) --> (bs*n_heads, length, dim_per_head) + x = x.reshape(bs, heads, length, -1) + return x + + +class PerceiverAttention(nn.Module): + def __init__(self, *, dim, dim_head=64, heads=8): + super().__init__() + self.scale = dim_head**-0.5 + self.dim_head = dim_head + self.heads = heads + inner_dim = dim_head * heads + + self.norm1 = nn.LayerNorm(dim) + self.norm2 = nn.LayerNorm(dim) + + self.to_q = nn.Linear(dim, inner_dim, bias=False) + self.to_kv = nn.Linear(dim, inner_dim * 2, bias=False) + self.to_out = nn.Linear(inner_dim, dim, bias=False) + + def forward(self, x, latents): + """ + Args: + x (torch.Tensor): image features + shape (b, n1, D) + latent (torch.Tensor): latent features + shape (b, n2, D) + """ + x = self.norm1(x) + latents = self.norm2(latents) + + b, l, _ = latents.shape + + q = self.to_q(latents) + kv_input = torch.cat((x, latents), dim=-2) + k, v = self.to_kv(kv_input).chunk(2, dim=-1) + + q = reshape_tensor(q, self.heads) + k = reshape_tensor(k, self.heads) + v = reshape_tensor(v, self.heads) + + # attention + scale = 1 / math.sqrt(math.sqrt(self.dim_head)) + weight = (q * scale) @ (k * scale).transpose(-2, -1) # More stable with f16 than dividing afterwards + weight = torch.softmax(weight.float(), dim=-1).type(weight.dtype) + out = weight @ v + + out = out.permute(0, 2, 1, 3).reshape(b, l, -1) + + return self.to_out(out) + + +class Resampler(nn.Module): + def __init__( + self, + dim=1024, + depth=8, + dim_head=64, + heads=16, + num_queries=8, + embedding_dim=768, + output_dim=1024, + ff_mult=4, + max_seq_len: int = 257, # CLIP tokens + CLS token + apply_pos_emb: bool = False, + num_latents_mean_pooled: int = 0, # number of latents derived from mean pooled representation of the sequence + ): + super().__init__() + self.pos_emb = nn.Embedding(max_seq_len, embedding_dim) if apply_pos_emb else None + + self.latents = nn.Parameter(torch.randn(1, num_queries, dim) / dim**0.5) + + self.proj_in = nn.Linear(embedding_dim, dim) + + self.proj_out = nn.Linear(dim, output_dim) + self.norm_out = nn.LayerNorm(output_dim) + + self.to_latents_from_mean_pooled_seq = ( + nn.Sequential( + nn.LayerNorm(dim), + nn.Linear(dim, dim * num_latents_mean_pooled), + Rearrange("b (n d) -> b n d", n=num_latents_mean_pooled), + ) + if num_latents_mean_pooled > 0 + else None + ) + + self.layers = nn.ModuleList([]) + for _ in range(depth): + self.layers.append( + nn.ModuleList( + [ + PerceiverAttention(dim=dim, dim_head=dim_head, heads=heads), + FeedForward(dim=dim, mult=ff_mult), + ] + ) + ) + + def forward(self, x): + if self.pos_emb is not None: + n, device = x.shape[1], x.device + pos_emb = self.pos_emb(torch.arange(n, device=device)) + x = x + pos_emb + + latents = self.latents.repeat(x.size(0), 1, 1) + + x = self.proj_in(x) + + if self.to_latents_from_mean_pooled_seq: + meanpooled_seq = masked_mean(x, dim=1, mask=torch.ones(x.shape[:2], device=x.device, dtype=torch.bool)) + meanpooled_latents = self.to_latents_from_mean_pooled_seq(meanpooled_seq) + latents = torch.cat((meanpooled_latents, latents), dim=-2) + + for attn, ff in self.layers: + latents = attn(x, latents) + latents + latents = ff(latents) + latents + + latents = self.proj_out(latents) + return self.norm_out(latents) + + +def masked_mean(t, *, dim, mask=None): + if mask is None: + return t.mean(dim=dim) + + denom = mask.sum(dim=dim, keepdim=True) + mask = rearrange(mask, "b n -> b n 1") + masked_t = t.masked_fill(~mask, 0.0) + + return masked_t.sum(dim=dim) / denom.clamp(min=1e-5) + + +class FacePerceiverResampler(nn.Module): + def __init__( + self, + *, + dim=768, + depth=4, + dim_head=64, + heads=16, + embedding_dim=1280, + output_dim=768, + ff_mult=4, + ): + super().__init__() + + self.proj_in = nn.Linear(embedding_dim, dim) + self.proj_out = nn.Linear(dim, output_dim) + self.norm_out = nn.LayerNorm(output_dim) + self.layers = nn.ModuleList([]) + for _ in range(depth): + self.layers.append( + nn.ModuleList( + [ + PerceiverAttention(dim=dim, dim_head=dim_head, heads=heads), + FeedForward(dim=dim, mult=ff_mult), + ] + ) + ) + + def forward(self, latents, x): + x = self.proj_in(x) + for attn, ff in self.layers: + latents = attn(x, latents) + latents + latents = ff(latents) + latents + latents = self.proj_out(latents) + return self.norm_out(latents) + + +class MLPProjModel(nn.Module): + def __init__(self, cross_attention_dim=1024, clip_embeddings_dim=1024): + super().__init__() + + self.proj = nn.Sequential( + nn.Linear(clip_embeddings_dim, clip_embeddings_dim), + nn.GELU(), + nn.Linear(clip_embeddings_dim, cross_attention_dim), + nn.LayerNorm(cross_attention_dim) + ) + + def forward(self, image_embeds): + clip_extra_context_tokens = self.proj(image_embeds) + return clip_extra_context_tokens + +class MLPProjModelFaceId(nn.Module): + def __init__(self, cross_attention_dim=768, id_embeddings_dim=512, num_tokens=4): + super().__init__() + + self.cross_attention_dim = cross_attention_dim + self.num_tokens = num_tokens + + self.proj = nn.Sequential( + nn.Linear(id_embeddings_dim, id_embeddings_dim*2), + nn.GELU(), + nn.Linear(id_embeddings_dim*2, cross_attention_dim*num_tokens), + ) + self.norm = nn.LayerNorm(cross_attention_dim) + + def forward(self, id_embeds): + x = self.proj(id_embeds) + x = x.reshape(-1, self.num_tokens, self.cross_attention_dim) + x = self.norm(x) + return x + +class ProjModelFaceIdPlus(nn.Module): + def __init__(self, cross_attention_dim=768, id_embeddings_dim=512, clip_embeddings_dim=1280, num_tokens=4): + super().__init__() + + self.cross_attention_dim = cross_attention_dim + self.num_tokens = num_tokens + + self.proj = nn.Sequential( + nn.Linear(id_embeddings_dim, id_embeddings_dim*2), + nn.GELU(), + nn.Linear(id_embeddings_dim*2, cross_attention_dim*num_tokens), + ) + self.norm = nn.LayerNorm(cross_attention_dim) + + self.perceiver_resampler = FacePerceiverResampler( + dim=cross_attention_dim, + depth=4, + dim_head=64, + heads=cross_attention_dim // 64, + embedding_dim=clip_embeddings_dim, + output_dim=cross_attention_dim, + ff_mult=4, + ) + + def forward(self, id_embeds, clip_embeds, scale=1.0, shortcut=False): + x = self.proj(id_embeds) + x = x.reshape(-1, self.num_tokens, self.cross_attention_dim) + x = self.norm(x) + out = self.perceiver_resampler(x, clip_embeds) + if shortcut: + out = x + scale * out + return out + +class ImageProjModel(nn.Module): + def __init__(self, cross_attention_dim=1024, clip_embeddings_dim=1024, clip_extra_context_tokens=4): + super().__init__() + + self.cross_attention_dim = cross_attention_dim + self.clip_extra_context_tokens = clip_extra_context_tokens + self.proj = nn.Linear(clip_embeddings_dim, self.clip_extra_context_tokens * cross_attention_dim) + self.norm = nn.LayerNorm(cross_attention_dim) + + def forward(self, image_embeds): + embeds = image_embeds + x = self.proj(embeds).reshape(-1, self.clip_extra_context_tokens, self.cross_attention_dim) + x = self.norm(x) + return x diff --git a/ComfyUI-Kolors-MZ/ComfyUI_IPAdapter_plus/utils.py b/ComfyUI-Kolors-MZ/ComfyUI_IPAdapter_plus/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..21612ae686cb67159cf54387e3fd3ab0caf42189 --- /dev/null +++ b/ComfyUI-Kolors-MZ/ComfyUI_IPAdapter_plus/utils.py @@ -0,0 +1,270 @@ +import re +import torch +import os +import folder_paths +from comfy.clip_vision import clip_preprocess, Output +import comfy.utils +import comfy.model_management as model_management +try: + import torchvision.transforms.v2 as T +except ImportError: + import torchvision.transforms as T + +def get_clipvision_file(preset): + preset = preset.lower() + clipvision_list = folder_paths.get_filename_list("clip_vision") + + if preset.startswith("vit-g"): + pattern = r'(ViT.bigG.14.*39B.b160k|ipadapter.*sdxl|sdxl.*model\.(bin|safetensors))' + else: + pattern = r'(ViT.H.14.*s32B.b79K|ipadapter.*sd15|sd1.?5.*model\.(bin|safetensors))' + clipvision_file = [e for e in clipvision_list if re.search(pattern, e, re.IGNORECASE)] + + clipvision_file = folder_paths.get_full_path("clip_vision", clipvision_file[0]) if clipvision_file else None + + return clipvision_file + +def get_ipadapter_file(preset, is_sdxl): + preset = preset.lower() + ipadapter_list = folder_paths.get_filename_list("ipadapter") + is_insightface = False + lora_pattern = None + + if preset.startswith("light"): + if is_sdxl: + raise Exception("light model is not supported for SDXL") + pattern = r'sd15.light.v11\.(safetensors|bin)$' + # if v11 is not found, try with the old version + if not [e for e in ipadapter_list if re.search(pattern, e, re.IGNORECASE)]: + pattern = r'sd15.light\.(safetensors|bin)$' + elif preset.startswith("standard"): + if is_sdxl: + pattern = r'ip.adapter.sdxl.vit.h\.(safetensors|bin)$' + else: + pattern = r'ip.adapter.sd15\.(safetensors|bin)$' + elif preset.startswith("vit-g"): + if is_sdxl: + pattern = r'ip.adapter.sdxl\.(safetensors|bin)$' + else: + pattern = r'sd15.vit.g\.(safetensors|bin)$' + elif preset.startswith("plus ("): + if is_sdxl: + pattern = r'plus.sdxl.vit.h\.(safetensors|bin)$' + else: + pattern = r'ip.adapter.plus.sd15\.(safetensors|bin)$' + elif preset.startswith("plus face"): + if is_sdxl: + pattern = r'plus.face.sdxl.vit.h\.(safetensors|bin)$' + else: + pattern = r'plus.face.sd15\.(safetensors|bin)$' + elif preset.startswith("full"): + if is_sdxl: + raise Exception("full face model is not supported for SDXL") + pattern = r'full.face.sd15\.(safetensors|bin)$' + elif preset.startswith("faceid portrait ("): + if is_sdxl: + pattern = r'portrait.sdxl\.(safetensors|bin)$' + else: + pattern = r'portrait.v11.sd15\.(safetensors|bin)$' + # if v11 is not found, try with the old version + if not [e for e in ipadapter_list if re.search(pattern, e, re.IGNORECASE)]: + pattern = r'portrait.sd15\.(safetensors|bin)$' + is_insightface = True + elif preset.startswith("faceid portrait unnorm"): + if is_sdxl: + pattern = r'portrait.sdxl.unnorm\.(safetensors|bin)$' + else: + raise Exception("portrait unnorm model is not supported for SD1.5") + is_insightface = True + elif preset == "faceid": + if is_sdxl: + pattern = r'faceid.sdxl\.(safetensors|bin)$' + lora_pattern = r'faceid.sdxl.lora\.safetensors$' + else: + pattern = r'faceid.sd15\.(safetensors|bin)$' + lora_pattern = r'faceid.sd15.lora\.safetensors$' + is_insightface = True + elif preset.startswith("faceid plus -"): + if is_sdxl: + raise Exception("faceid plus model is not supported for SDXL") + pattern = r'faceid.plus.sd15\.(safetensors|bin)$' + lora_pattern = r'faceid.plus.sd15.lora\.safetensors$' + is_insightface = True + elif preset.startswith("faceid plus v2"): + if is_sdxl: + pattern = r'faceid.plusv2.sdxl\.(safetensors|bin)$' + lora_pattern = r'faceid.plusv2.sdxl.lora\.safetensors$' + else: + pattern = r'faceid.plusv2.sd15\.(safetensors|bin)$' + lora_pattern = r'faceid.plusv2.sd15.lora\.safetensors$' + is_insightface = True + # Community's models + elif preset.startswith("composition"): + if is_sdxl: + pattern = r'plus.composition.sdxl\.safetensors$' + else: + pattern = r'plus.composition.sd15\.safetensors$' + else: + raise Exception(f"invalid type '{preset}'") + + ipadapter_file = [e for e in ipadapter_list if re.search(pattern, e, re.IGNORECASE)] + ipadapter_file = folder_paths.get_full_path("ipadapter", ipadapter_file[0]) if ipadapter_file else None + + return ipadapter_file, is_insightface, lora_pattern + +def get_lora_file(pattern): + lora_list = folder_paths.get_filename_list("loras") + lora_file = [e for e in lora_list if re.search(pattern, e, re.IGNORECASE)] + lora_file = folder_paths.get_full_path("loras", lora_file[0]) if lora_file else None + + return lora_file + +def ipadapter_model_loader(file): + model = comfy.utils.load_torch_file(file, safe_load=True) + + if file.lower().endswith(".safetensors"): + st_model = {"image_proj": {}, "ip_adapter": {}} + for key in model.keys(): + if key.startswith("image_proj."): + st_model["image_proj"][key.replace("image_proj.", "")] = model[key] + elif key.startswith("ip_adapter."): + st_model["ip_adapter"][key.replace("ip_adapter.", "")] = model[key] + model = st_model + del st_model + + if "adapter_modules" in model.keys(): + model["ip_adapter"] = model["adapter_modules"] + del model["adapter_modules"] + + if not "ip_adapter" in model.keys() or not model["ip_adapter"]: + raise Exception("invalid IPAdapter model {}".format(file)) + + if 'plusv2' in file.lower(): + model["faceidplusv2"] = True + + if 'unnorm' in file.lower(): + model["portraitunnorm"] = True + + return model + +def insightface_loader(provider): + try: + from insightface.app import FaceAnalysis + except ImportError as e: + raise Exception(e) + + path = os.path.join(folder_paths.models_dir, "insightface") + model = FaceAnalysis(name="antelopev2", root=path, providers=[provider + 'ExecutionProvider',]) + model.prepare(ctx_id=0, det_size=(640, 640)) + return model + +def encode_image_masked(clip_vision, image, mask=None, batch_size=0, size=224): + model_management.load_model_gpu(clip_vision.patcher) + outputs = Output() + + if batch_size == 0: + batch_size = image.shape[0] + elif batch_size > image.shape[0]: + batch_size = image.shape[0] + + image_batch = torch.split(image, batch_size, dim=0) + + for img in image_batch: + img = img.to(clip_vision.load_device) + + pixel_values = clip_preprocess(img.to(clip_vision.load_device), size=size).float() + + # TODO: support for multiple masks + if mask is not None: + pixel_values = pixel_values * mask.to(clip_vision.load_device) + + out = clip_vision.model(pixel_values=pixel_values, intermediate_output=-2) + + if not hasattr(outputs, "last_hidden_state"): + outputs["last_hidden_state"] = out[0].to(model_management.intermediate_device()) + outputs["image_embeds"] = out[2].to(model_management.intermediate_device()) + outputs["penultimate_hidden_states"] = out[1].to(model_management.intermediate_device()) + else: + outputs["last_hidden_state"] = torch.cat((outputs["last_hidden_state"], out[0].to(model_management.intermediate_device())), dim=0) + outputs["image_embeds"] = torch.cat((outputs["image_embeds"], out[2].to(model_management.intermediate_device())), dim=0) + outputs["penultimate_hidden_states"] = torch.cat((outputs["penultimate_hidden_states"], out[1].to(model_management.intermediate_device())), dim=0) + + del img, pixel_values, out + torch.cuda.empty_cache() + + return outputs + +def tensor_to_size(source, dest_size): + if isinstance(dest_size, torch.Tensor): + dest_size = dest_size.shape[0] + source_size = source.shape[0] + + if source_size < dest_size: + shape = [dest_size - source_size] + [1]*(source.dim()-1) + source = torch.cat((source, source[-1:].repeat(shape)), dim=0) + elif source_size > dest_size: + source = source[:dest_size] + + return source + +def min_(tensor_list): + # return the element-wise min of the tensor list. + x = torch.stack(tensor_list) + mn = x.min(axis=0)[0] + return torch.clamp(mn, min=0) + +def max_(tensor_list): + # return the element-wise max of the tensor list. + x = torch.stack(tensor_list) + mx = x.max(axis=0)[0] + return torch.clamp(mx, max=1) + +# From https://github.com/Jamy-L/Pytorch-Contrast-Adaptive-Sharpening/ +def contrast_adaptive_sharpening(image, amount): + img = T.functional.pad(image, (1, 1, 1, 1)).cpu() + + a = img[..., :-2, :-2] + b = img[..., :-2, 1:-1] + c = img[..., :-2, 2:] + d = img[..., 1:-1, :-2] + e = img[..., 1:-1, 1:-1] + f = img[..., 1:-1, 2:] + g = img[..., 2:, :-2] + h = img[..., 2:, 1:-1] + i = img[..., 2:, 2:] + + # Computing contrast + cross = (b, d, e, f, h) + mn = min_(cross) + mx = max_(cross) + + diag = (a, c, g, i) + mn2 = min_(diag) + mx2 = max_(diag) + mx = mx + mx2 + mn = mn + mn2 + + # Computing local weight + inv_mx = torch.reciprocal(mx) + amp = inv_mx * torch.minimum(mn, (2 - mx)) + + # scaling + amp = torch.sqrt(amp) + w = - amp * (amount * (1/5 - 1/8) + 1/8) + div = torch.reciprocal(1 + 4*w) + + output = ((b + d + f + h)*w + e) * div + output = torch.nan_to_num(output) + output = output.clamp(0, 1) + + return output + +def tensor_to_image(tensor): + image = tensor.mul(255).clamp(0, 255).byte().cpu() + image = image[..., [2, 1, 0]].numpy() + return image + +def image_to_tensor(image): + tensor = torch.clamp(torch.from_numpy(image).float() / 255., 0, 1) + tensor = tensor[..., [2, 1, 0]] + return tensor diff --git a/ComfyUI-Kolors-MZ/LICENSE b/ComfyUI-Kolors-MZ/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..f288702d2fa16d3cdf0035b15a9fcbc552cd88e7 --- /dev/null +++ b/ComfyUI-Kolors-MZ/LICENSE @@ -0,0 +1,674 @@ + GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU General Public License is a free, copyleft license for +software and other kinds of works. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +the GNU General Public License is intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU Affero General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If the program does terminal interaction, make it output a short +notice like this when it starts in an interactive mode: + + Copyright (C) + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, your program's commands +might be different; for a GUI interface, you would use an "about box". + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU GPL, see +. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +. diff --git a/ComfyUI-Kolors-MZ/README.md b/ComfyUI-Kolors-MZ/README.md new file mode 100644 index 0000000000000000000000000000000000000000..9e1ad8e082b3181329edf565d83f18d79228ad8a --- /dev/null +++ b/ComfyUI-Kolors-MZ/README.md @@ -0,0 +1,203 @@ +![image](./examples/workflow_ipa.png) + +## Recent changes +* [2024-08-02] 支持faceid,新增一些相关节点,工作流见examples/workflow_ipa_faceid.png +* [2024-07-27] 新增MZ_KolorsControlNetLoader节点,用于加载可图ControlNet官方模型 +* [2024-07-26] 新增MZ_ApplySDXLSamplingSettings节点,用于V2版本重新回到SDXL的scheduler配置. + ![image](https://github.com/user-attachments/assets/8c3be6bf-4744-478f-8660-4842a4558a1f) + +* [2024-07-25] 修正sampling_settings,参数来自 [scheduler_config.json](https://huggingface.co/Kwai-Kolors/Kolors/blob/main/scheduler/scheduler_config.json),仅V2生效 +* [2024-07-21] 感谢来自yiwangsimple对Mac修复和测试的分支 https://github.com/yiwangsimple/ComfyUI-Kolors-MZ +* [2024-07-21] 新增MZ_ChatGLM3TextEncodeAdvanceV2节点 +* [2024-07-18] IPA相关节点已在ComfyUI_IPAdapter_plus中支持 +* [2024-07-17] 新增支持IPAdapter_plus的加载器和高级应用节点 MZ_KolorsCLIPVisionLoader,MZ_IPAdapterModelLoaderKolors,MZ_IPAdapterAdvancedKolors +* [2024-07-14] 删除自动兼容ControlNet, 新增MZ_KolorsControlNetPatch节点 + ![image](https://github.com/user-attachments/assets/73ae6447-c69d-4781-9c66-94e0029709ed) + +## ComfyUI上Kolors的实现 + +参考自 https://github.com/kijai/ComfyUI-KwaiKolorsWrapper + +使用ComfyUI原生采样 + +工作流在examples/workflow.png中获取 + +### UNET模型下载 +unet模型放置在 models/unet/ 文件夹下 + +模型主页: https://huggingface.co/Kwai-Kolors/Kolors + +下载地址: https://huggingface.co/Kwai-Kolors/Kolors/resolve/main/unet/diffusion_pytorch_model.fp16.safetensors + + +### ChatGLM3模型下载 +chatglm3放置在 models/LLM/ 文件夹下 + +模型主页: https://huggingface.co/Kijai/ChatGLM3-safetensors + +下载地址: https://huggingface.co/Kijai/ChatGLM3-safetensors/resolve/main/chatglm3-fp16.safetensors + + +## IPAdapter实现推荐使用 [cubiq/ComfyUI_IPAdapter_plus](https://github.com/cubiq/ComfyUI_IPAdapter_plus) +faceid相关节点已经在其中支持,IPAdapter实现需要更新ComfyUI_IPAdapter_plus到最新版本 +### IPAdapter工作流 +https://github.com/cubiq/ComfyUI_IPAdapter_plus/blob/main/examples/ipadapter_kolors.json + +### IPAdapter_FaceIDv2工作流 +https://github.com/cubiq/ComfyUI_IPAdapter_plus/blob/main/examples/IPAdapter_FaceIDv2_Kolors.json + + +### 官方IP-Adapter-Plus模型下载地址 +模型主页: https://huggingface.co/Kwai-Kolors/Kolors-IP-Adapter-Plus + +https://huggingface.co/Kwai-Kolors/Kolors-IP-Adapter-Plus/resolve/main/ip_adapter_plus_general.bin 下载至 models/ipadapter/ + +https://huggingface.co/Kwai-Kolors/Kolors-IP-Adapter-Plus/resolve/main/image_encoder/pytorch_model.bin 下载至 models/clip_vision/ + +### 官方Kolors-IP-Adapter-FaceID-Plus模型下载地址 +模型主页: https://huggingface.co/Kwai-Kolors/Kolors-IP-Adapter-FaceID-Plus + +https://huggingface.co/Kwai-Kolors/Kolors-IP-Adapter-FaceID-Plus/resolve/main/ipa-faceid-plus.bin 下载至 models/ipadapter/ + +https://huggingface.co/Kwai-Kolors/Kolors-IP-Adapter-FaceID-Plus/resolve/main/clip-vit-large-patch14-336/pytorch_model.bin 下载至 models/clip_vision/ + +https://huggingface.co/MonsterMMORPG/tools/resolve/main/antelopev2.zip 下载并解压至 models/insightface/models/antelopev2/*.onnx + +### 官方ControlNet模型下载地址 +模型主页(Depth): https://huggingface.co/Kwai-Kolors/Kolors-ControlNet-Depth + +模型主页(Canny): https://huggingface.co/Kwai-Kolors/Kolors-ControlNet-Canny + +### Kolors-Inpainting模型下载地址 +模型主页: https://huggingface.co/Kwai-Kolors/Kolors-Inpainting + +https://huggingface.co/Kwai-Kolors/Kolors-Inpainting/resolve/main/unet/diffusion_pytorch_model.safetensors 下载至 models/unet/ + + + +## Implementation of Kolors on ComfyUI + +Reference from https://github.com/kijai/ComfyUI-KwaiKolorsWrapper + +Using ComfyUI Native Sampling + +The workflow is obtained in examples/workflow.png + + +### UNET model download +The unet model is placed in the models/unet/ folder + +Model homepage: https://huggingface.co/Kwai-Kolors/Kolors + +Download link: +https://huggingface.co/Kwai-Kolors/Kolors/resolve/main/unet/diffusion_pytorch_model.fp16.safetensors + + +### ChatGLM3 model download +The chatglm3 is placed in the models/LLM/ folder + +Model homepage: https://huggingface.co/Kijai/ChatGLM3-safetensors + +Download link: +https://huggingface.co/Kijai/ChatGLM3-safetensors/resolve/main/chatglm3-fp16.safetensors + + +## IPAdapter implementation is recommended to use [cubiq/ComfyUI_IPAdapter_plus](https://github.com/cubiq/ComfyUI_IPAdapter_plus) +The faceid related nodes have been supported in it, and the IPAdapter implementation needs to update ComfyUI_IPAdapter_plus to the latest version +### IPAdapter workflow +https://github.com/cubiq/ComfyUI_IPAdapter_plus/blob/main/examples/ipadapter_kolors.json + +### IPAdapter_FaceIDv2 workflow +https://github.com/cubiq/ComfyUI_IPAdapter_plus/blob/main/examples/IPAdapter_FaceIDv2_Kolors.json + + +### Official IP-Adapter-Plus model download link + +Model homepage: https://huggingface.co/Kwai-Kolors/Kolors-IP-Adapter-Plus + +https://huggingface.co/Kwai-Kolors/Kolors-IP-Adapter-Plus/resolve/main/ip_adapter_plus_general.bin Download to models/ipadapter/ + +https://huggingface.co/Kwai-Kolors/Kolors-IP-Adapter-Plus/resolve/main/image_encoder/pytorch_model.bin Download to models/clip_vision/ + + +### Official Kolors-IP-Adapter-FaceID-Plus model download link +Model homepage: https://huggingface.co/Kwai-Kolors/Kolors-IP-Adapter-FaceID-Plus + +https://huggingface.co/Kwai-Kolors/Kolors-IP-Adapter-FaceID-Plus/resolve/main/ipa-faceid-plus.bin Download to models/ipadapter/ + +https://huggingface.co/Kwai-Kolors/Kolors-IP-Adapter-FaceID-Plus/resolve/main/clip-vit-large-patch14-336/pytorch_model.bin Download to models/clip_vision/ + +https://huggingface.co/MonsterMMORPG/tools/resolve/main/antelopev2.zip Download and unzip to models/insightface/models/antelopev2/*.onnx + +### Official ControlNet model download link +Model homepage(Depth): https://huggingface.co/Kwai-Kolors/Kolors-ControlNet-Depth + +Model homepage(Canny): https://huggingface.co/Kwai-Kolors/Kolors-ControlNet-Canny + +### Kolors-Inpainting model download link +Model homepage: https://huggingface.co/Kwai-Kolors/Kolors-Inpainting + +https://huggingface.co/Kwai-Kolors/Kolors-Inpainting/resolve/main/unet/diffusion_pytorch_model.safetensors Download to models/unet/ + + +## 使用ComfyUI-KwaiKolorsWrapper在相同种子下测试结果 (Testing results with the same seed using ComfyUI-KwaiKolorsWrapper) +测试工作流来自examples/workflow_same_seed_test.png + +The test workflow comes from examples/workflow_same_seed_test.png + +![image](./examples/workflow_same_seed_test.png) + +## FAQ +加载模型时出现的错误 ++ 目前kolors有两个版本, 一种是unet类型采用unet加载器, 一种是放checkpoints文件夹用KolorsCheckpointLoaderSimple加载器, 如果你的模型来自huggingface的unet文件夹, 优先尝试使用unet加载器 (Currently, there are two versions of kolors, one is unet type using unet loader, and the other is placed in the checkpoints folder using KolorsCheckpointLoaderSimple loader. If your model comes from the huggingface unet folder, try to use the unet loader first) ++ 如果你无法确定模型类型, 那就都试一下 (If you are not sure about the model type, try both) + +Mac用户无法使用(Mac users cannot use) ++ Mac用户可移步至[ComfyUI-Kolors-MZ](https://github.com/yiwangsimple/ComfyUI-Kolors-MZ) (Mac users can go to [ComfyUI-Kolors-MZ](https://github.com/yiwangsimple/ComfyUI-Kolors-MZ) ) + +和IPAdapter有关的错误(Errors related to IPAdapter) ++ 确保ComfyUI本体和ComfyUI_IPAdapter_plus已经更新到最新版本(Make sure ComfyUI ontology and ComfyUI_IPAdapter_plus are updated to the latest version) + +name 'round_up' is not defined ++ 参考:https://github.com/THUDM/ChatGLM2-6B/issues/272#issuecomment-1632164243 , 使用 pip install cpm_kernels 或者 pip install -U cpm_kernels 更新 cpm_kernels + +module 'comfy.model_detection' has no attribute 'unet_prefix_from_state_dict' ++ 更新ComfyUI本体到最新版本(Update ComfyUI ontology to the latest version) + +RuntimeError: Only Tensors of floating point dtype can require gradients ++ 尝试使用fp16版本的模型: https://huggingface.co/Kijai/ChatGLM3-safetensors/blob/main/chatglm3-fp16.safetensors + +Error occurred when executing MZ_ChatGLM3Loader: 'ChatGLMModel' object has no attribute 'transformer' ++ 检查ChatGLM3Loader节点选择的模型是否已经正确下载 + + +## Credits + +- [Kolors](https://github.com/Kwai-Kolors/Kolors) +- [ComfyUI](https://github.com/comfyanonymous/ComfyUI) +- [ComfyUI_IPAdapter_plus](https://github.com/cubiq/ComfyUI_IPAdapter_plus) + +## Star History + +
    + + + + Star History Chart + + + + +## Contact +- 微信Wechat: minrszone +- Bilibili: [minus_zone](https://space.bilibili.com/5950992) +- 小红书: [MinusZoneAI](https://www.xiaohongshu.com/user/profile/5f072e990000000001005472) + +## Stargazers +[![Stargazers repo roster for @MinusZoneAI/ComfyUI-Kolors-MZ](https://reporoster.com/stars/MinusZoneAI/ComfyUI-Kolors-MZ)](https://github.com/MinusZoneAI/ComfyUI-Kolors-MZ/stargazers) + + +## Sponsorship + + + diff --git a/ComfyUI-Kolors-MZ/__init__.py b/ComfyUI-Kolors-MZ/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..06bba576c11b6267ae40d79fb67a53fcb9bd9cc2 --- /dev/null +++ b/ComfyUI-Kolors-MZ/__init__.py @@ -0,0 +1,339 @@ +import inspect +import json +import os +import folder_paths +import importlib + + +NODE_CLASS_MAPPINGS = { +} + + +NODE_DISPLAY_NAME_MAPPINGS = { +} + +MAX_RESOLUTION = 16384 + +AUTHOR_NAME = "MinusZone" +CATEGORY_NAME = f"{AUTHOR_NAME} - Kolors" +folder_paths.add_model_folder_path( + "LLM", os.path.join(folder_paths.models_dir, "LLM")) + + +class MZ_ChatGLM3Loader: + @classmethod + def INPUT_TYPES(s): + # from .mz_kolors_utils import Utils + # llm_dir = os.path.join(Utils.get_models_path(), "LLM") + # print("llm_dir:", llm_dir) + llm_models = folder_paths.get_filename_list("LLM") + + # 筛选safetensors结尾的文件 + llm_models = [ + model for model in llm_models if model.endswith("safetensors")] + + return {"required": { + "chatglm3_checkpoint": (llm_models,), + }} + + RETURN_TYPES = ("CHATGLM3MODEL",) + RETURN_NAMES = ("chatglm3_model",) + FUNCTION = "load_chatglm3" + CATEGORY = CATEGORY_NAME + + def load_chatglm3(self, **kwargs): + from . import mz_kolors_core + importlib.reload(mz_kolors_core) + return mz_kolors_core.MZ_ChatGLM3Loader_call(kwargs) + + +NODE_CLASS_MAPPINGS["MZ_ChatGLM3Loader"] = MZ_ChatGLM3Loader +NODE_DISPLAY_NAME_MAPPINGS["MZ_ChatGLM3Loader"] = f"{AUTHOR_NAME} - ChatGLM3Loader" + + +class MZ_ChatGLM3TextEncodeV2: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "chatglm3_model": ("CHATGLM3MODEL", ), + "text": ("STRING", {"multiline": True, "dynamicPrompts": True}), + } + } + + RETURN_TYPES = ("CONDITIONING",) + + FUNCTION = "encode" + CATEGORY = CATEGORY_NAME + + def encode(self, **kwargs): + from . import mz_kolors_core + importlib.reload(mz_kolors_core) + return mz_kolors_core.MZ_ChatGLM3TextEncodeV2_call(kwargs) + + +NODE_CLASS_MAPPINGS["MZ_ChatGLM3_V2"] = MZ_ChatGLM3TextEncodeV2 +NODE_DISPLAY_NAME_MAPPINGS[ + "MZ_ChatGLM3_V2"] = f"{AUTHOR_NAME} - ChatGLM3TextEncodeV2" + + +class MZ_ChatGLM3Embeds2Conditioning: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "kolors_embeds": ("KOLORS_EMBEDS", ), + "width": ("INT", {"default": 1024.0, "min": 0, "max": MAX_RESOLUTION}), + "height": ("INT", {"default": 1024.0, "min": 0, "max": MAX_RESOLUTION}), + "crop_w": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION}), + "crop_h": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION}), + "target_width": ("INT", {"default": 1024.0, "min": 0, "max": MAX_RESOLUTION}), + "target_height": ("INT", {"default": 1024.0, "min": 0, "max": MAX_RESOLUTION}), + } + } + + RETURN_TYPES = ("CONDITIONING", "CONDITIONING",) + RETURN_NAMES = ("positive", "negative",) + + FUNCTION = "embeds2conditioning" + CATEGORY = CATEGORY_NAME + + def embeds2conditioning(self, **kwargs): + from . import mz_kolors_core + importlib.reload(mz_kolors_core) + return mz_kolors_core.MZ_ChatGLM3Embeds2Conditioning_call(kwargs) + + +NODE_CLASS_MAPPINGS["MZ_ChatGLM3Embeds2Conditioning"] = MZ_ChatGLM3Embeds2Conditioning +NODE_DISPLAY_NAME_MAPPINGS[ + "MZ_ChatGLM3Embeds2Conditioning"] = f"{AUTHOR_NAME} - ChatGLM3Embeds2Conditioning" + + +# for 2048 resolution +class MZ_ChatGLM3TextEncodeAdvanceV2: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "chatglm3_model": ("CHATGLM3MODEL", ), + "text": ("STRING", {"multiline": True, "dynamicPrompts": True}), + "width": ("INT", {"default": 1024.0, "min": 0, "max": MAX_RESOLUTION}), + "height": ("INT", {"default": 1024.0, "min": 0, "max": MAX_RESOLUTION}), + "crop_w": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION}), + "crop_h": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION}), + "target_width": ("INT", {"default": 1024.0, "min": 0, "max": MAX_RESOLUTION}), + "target_height": ("INT", {"default": 1024.0, "min": 0, "max": MAX_RESOLUTION}), + } + } + + RETURN_TYPES = ("CONDITIONING",) + + FUNCTION = "encode" + CATEGORY = CATEGORY_NAME + + def encode(self, **kwargs): + from . import mz_kolors_core + importlib.reload(mz_kolors_core) + return mz_kolors_core.MZ_ChatGLM3TextEncodeV2_call(kwargs) + + +NODE_CLASS_MAPPINGS["MZ_ChatGLM3_Advance_V2"] = MZ_ChatGLM3TextEncodeAdvanceV2 +NODE_DISPLAY_NAME_MAPPINGS[ + "MZ_ChatGLM3_Advance_V2"] = f"{AUTHOR_NAME} - ChatGLM3TextEncodeAdvanceV2" + + +class MZ_KolorsCheckpointLoaderSimple(): + @classmethod + def INPUT_TYPES(s): + return {"required": {"ckpt_name": (folder_paths.get_filename_list("checkpoints"), ), + }} + RETURN_TYPES = ("MODEL", "VAE") + FUNCTION = "load_checkpoint" + + CATEGORY = CATEGORY_NAME + + def load_checkpoint(self, **kwargs): + from . import mz_kolors_core + importlib.reload(mz_kolors_core) + return mz_kolors_core.MZ_KolorsCheckpointLoaderSimple_call(kwargs) + + +NODE_CLASS_MAPPINGS["MZ_KolorsCheckpointLoaderSimple"] = MZ_KolorsCheckpointLoaderSimple +NODE_DISPLAY_NAME_MAPPINGS[ + "MZ_KolorsCheckpointLoaderSimple"] = f"{AUTHOR_NAME} - KolorsCheckpointLoaderSimple" + + +class MZ_KolorsControlNetLoader: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "control_net_name": (folder_paths.get_filename_list("controlnet"), ), + # "seed": ("INT", {"default": 0, "min": 0, "max": 1000000}), + }} + + RETURN_TYPES = ("CONTROL_NET",) + RETURN_NAMES = ("ControlNet",) + FUNCTION = "load_controlnet" + + CATEGORY = CATEGORY_NAME + + def load_controlnet(self, **kwargs): + from . import mz_kolors_core + importlib.reload(mz_kolors_core) + return mz_kolors_core.MZ_KolorsControlNetLoader_call(kwargs) + + +NODE_CLASS_MAPPINGS["MZ_KolorsControlNetLoader"] = MZ_KolorsControlNetLoader +NODE_DISPLAY_NAME_MAPPINGS[ + "MZ_KolorsControlNetLoader"] = f"{AUTHOR_NAME} - KolorsControlNetLoader" + + +class MZ_KolorsUNETLoaderV2(): + @classmethod + def INPUT_TYPES(s): + return {"required": { + "unet_name": (folder_paths.get_filename_list("unet"), ), + }} + + RETURN_TYPES = ("MODEL",) + + RETURN_NAMES = ("model",) + + FUNCTION = "load_unet" + + CATEGORY = CATEGORY_NAME + + def load_unet(self, **kwargs): + from . import mz_kolors_core + importlib.reload(mz_kolors_core) + return mz_kolors_core.MZ_KolorsUNETLoaderV2_call(kwargs) + + +NODE_CLASS_MAPPINGS["MZ_KolorsUNETLoaderV2"] = MZ_KolorsUNETLoaderV2 +NODE_DISPLAY_NAME_MAPPINGS[ + "MZ_KolorsUNETLoaderV2"] = f"{AUTHOR_NAME} - KolorsUNETLoaderV2" + + +class MZ_KolorsControlNetPatch: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "control_net": ("CONTROL_NET", ), + "model": ("MODEL", ), + } + } + + RETURN_TYPES = ("CONTROL_NET",) + + FUNCTION = "start" + CATEGORY = CATEGORY_NAME + + def start(self, **kwargs): + from . import mz_kolors_core + importlib.reload(mz_kolors_core) + return mz_kolors_core.MZ_KolorsControlNetPatch_call(kwargs) + + +NODE_CLASS_MAPPINGS["MZ_KolorsControlNetPatch"] = MZ_KolorsControlNetPatch +NODE_DISPLAY_NAME_MAPPINGS[ + "MZ_KolorsControlNetPatch"] = f"{AUTHOR_NAME} - KolorsControlNetPatch" + + +class MZ_KolorsCLIPVisionLoader: + @classmethod + def INPUT_TYPES(s): + return {"required": {"clip_name": (folder_paths.get_filename_list("clip_vision"), ), + }} + RETURN_TYPES = ("CLIP_VISION",) + FUNCTION = "load_clip" + + CATEGORY = CATEGORY_NAME + "/Legacy" + + def load_clip(self, **kwargs): + from . import mz_kolors_core + importlib.reload(mz_kolors_core) + return mz_kolors_core.MZ_KolorsCLIPVisionLoader_call(kwargs) + + +NODE_CLASS_MAPPINGS["MZ_KolorsCLIPVisionLoader"] = MZ_KolorsCLIPVisionLoader +NODE_DISPLAY_NAME_MAPPINGS["MZ_KolorsCLIPVisionLoader"] = f"{AUTHOR_NAME} - KolorsCLIPVisionLoader - Legacy" + + +class MZ_ApplySDXLSamplingSettings(): + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "model": ("MODEL", ), + } + } + + RETURN_TYPES = ("MODEL", ) + + FUNCTION = "apply_sampling_settings" + CATEGORY = CATEGORY_NAME + + def apply_sampling_settings(self, **kwargs): + from . import mz_kolors_core + importlib.reload(mz_kolors_core) + return mz_kolors_core.MZ_ApplySDXLSamplingSettings_call(kwargs) + + +NODE_CLASS_MAPPINGS["MZ_ApplySDXLSamplingSettings"] = MZ_ApplySDXLSamplingSettings +NODE_DISPLAY_NAME_MAPPINGS[ + "MZ_ApplySDXLSamplingSettings"] = f"{AUTHOR_NAME} - ApplySDXLSamplingSettings" + + +class MZ_ApplyCUDAGenerator(): + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "model": ("MODEL", ), + } + } + + RETURN_TYPES = ("MODEL", ) + + FUNCTION = "apply_cuda_generator" + CATEGORY = CATEGORY_NAME + + def apply_cuda_generator(self, **kwargs): + from . import mz_kolors_core + importlib.reload(mz_kolors_core) + return mz_kolors_core.MZ_ApplyCUDAGenerator_call(kwargs) + + +NODE_CLASS_MAPPINGS["MZ_ApplyCUDAGenerator"] = MZ_ApplyCUDAGenerator +NODE_DISPLAY_NAME_MAPPINGS[ + "MZ_ApplyCUDAGenerator"] = f"{AUTHOR_NAME} - ApplyCUDAGenerator" + + +from .ComfyUI_IPAdapter_plus.IPAdapterPlus import IPAdapterAdvanced, IPAdapterModelLoader, IPAdapterInsightFaceLoader, IPAdapterFaceID + +IPAdapterModelLoader.CATEGORY = CATEGORY_NAME + "/IPAdapter" +NODE_CLASS_MAPPINGS["MZ_IPAdapterModelLoaderKolors"] = IPAdapterModelLoader +NODE_DISPLAY_NAME_MAPPINGS[ + "MZ_IPAdapterModelLoaderKolors"] = f"IPAdapterModelLoader(kolors) - Legacy" + +IPAdapterAdvanced.CATEGORY = CATEGORY_NAME + "/IPAdapter" +NODE_CLASS_MAPPINGS["MZ_IPAdapterAdvancedKolors"] = IPAdapterAdvanced +NODE_DISPLAY_NAME_MAPPINGS[ + "MZ_IPAdapterAdvancedKolors"] = f"IPAdapterAdvanced(kolors) - Legacy" + +IPAdapterInsightFaceLoader.CATEGORY = CATEGORY_NAME + "/IPAdapter" +NODE_CLASS_MAPPINGS["MZ_IPAdapterInsightFaceLoader"] = IPAdapterInsightFaceLoader + +NODE_DISPLAY_NAME_MAPPINGS[ + "MZ_IPAdapterInsightFaceLoader"] = f"IPAdapterInsightFaceLoader(kolors) - Legacy" + +IPAdapterFaceID.CATEGORY = CATEGORY_NAME + "/IPAdapter" +NODE_CLASS_MAPPINGS["MZ_IPAdapterFaceID"] = IPAdapterFaceID + +NODE_DISPLAY_NAME_MAPPINGS[ + "MZ_IPAdapterFaceID"] = f"IPAdapterFaceID(kolors) - Legacy" + +from . import mz_kolors_legacy +NODE_CLASS_MAPPINGS.update(mz_kolors_legacy.NODE_CLASS_MAPPINGS) +NODE_DISPLAY_NAME_MAPPINGS.update(mz_kolors_legacy.NODE_DISPLAY_NAME_MAPPINGS) diff --git a/ComfyUI-Kolors-MZ/chatglm3/__init__.py b/ComfyUI-Kolors-MZ/chatglm3/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/ComfyUI-Kolors-MZ/chatglm3/configuration_chatglm.py b/ComfyUI-Kolors-MZ/chatglm3/configuration_chatglm.py new file mode 100644 index 0000000000000000000000000000000000000000..ec32e66d3e601916be56caa5250bac1b27240a8a --- /dev/null +++ b/ComfyUI-Kolors-MZ/chatglm3/configuration_chatglm.py @@ -0,0 +1,61 @@ +from transformers import PretrainedConfig + + +class ChatGLMConfig(PretrainedConfig): + model_type = "chatglm" + def __init__( + self, + num_layers=28, + padded_vocab_size=65024, + hidden_size=4096, + ffn_hidden_size=13696, + kv_channels=128, + num_attention_heads=32, + seq_length=2048, + hidden_dropout=0.0, + classifier_dropout=None, + attention_dropout=0.0, + layernorm_epsilon=1e-5, + rmsnorm=True, + apply_residual_connection_post_layernorm=False, + post_layer_norm=True, + add_bias_linear=False, + add_qkv_bias=False, + bias_dropout_fusion=True, + multi_query_attention=False, + multi_query_group_num=1, + apply_query_key_layer_scaling=True, + attention_softmax_in_fp32=True, + fp32_residual_connection=False, + quantization_bit=0, + pre_seq_len=None, + prefix_projection=False, + **kwargs + ): + self.num_layers = num_layers + self.vocab_size = padded_vocab_size + self.padded_vocab_size = padded_vocab_size + self.hidden_size = hidden_size + self.ffn_hidden_size = ffn_hidden_size + self.kv_channels = kv_channels + self.num_attention_heads = num_attention_heads + self.seq_length = seq_length + self.hidden_dropout = hidden_dropout + self.classifier_dropout = classifier_dropout + self.attention_dropout = attention_dropout + self.layernorm_epsilon = layernorm_epsilon + self.rmsnorm = rmsnorm + self.apply_residual_connection_post_layernorm = apply_residual_connection_post_layernorm + self.post_layer_norm = post_layer_norm + self.add_bias_linear = add_bias_linear + self.add_qkv_bias = add_qkv_bias + self.bias_dropout_fusion = bias_dropout_fusion + self.multi_query_attention = multi_query_attention + self.multi_query_group_num = multi_query_group_num + self.apply_query_key_layer_scaling = apply_query_key_layer_scaling + self.attention_softmax_in_fp32 = attention_softmax_in_fp32 + self.fp32_residual_connection = fp32_residual_connection + self.quantization_bit = quantization_bit + self.pre_seq_len = pre_seq_len + self.prefix_projection = prefix_projection + super().__init__(**kwargs) diff --git a/ComfyUI-Kolors-MZ/chatglm3/modeling_chatglm.py b/ComfyUI-Kolors-MZ/chatglm3/modeling_chatglm.py new file mode 100644 index 0000000000000000000000000000000000000000..0cfb314510cf6c8a91e58ecf9e6c9ac0e8a0dfdd --- /dev/null +++ b/ComfyUI-Kolors-MZ/chatglm3/modeling_chatglm.py @@ -0,0 +1,1298 @@ +""" PyTorch ChatGLM model. """ + +import math +import copy +import warnings +import re +import sys + +import torch +import torch.utils.checkpoint +import torch.nn.functional as F +from torch import nn +from torch.nn import CrossEntropyLoss, LayerNorm +from torch.nn import CrossEntropyLoss, LayerNorm, MSELoss, BCEWithLogitsLoss +from torch.nn.utils import skip_init +from typing import Optional, Tuple, Union, List, Callable, Dict, Any +from copy import deepcopy + +from transformers.modeling_outputs import ( + BaseModelOutputWithPast, + CausalLMOutputWithPast, + SequenceClassifierOutputWithPast, +) +from transformers.modeling_utils import PreTrainedModel +from transformers.utils import logging +from transformers.generation.logits_process import LogitsProcessor +from transformers.generation.utils import LogitsProcessorList, StoppingCriteriaList, GenerationConfig, ModelOutput + +try: + from .configuration_chatglm import ChatGLMConfig +except: + from configuration_chatglm import ChatGLMConfig + + +# flags required to enable jit fusion kernels + +if sys.platform != 'darwin': + torch._C._jit_set_profiling_mode(False) + torch._C._jit_set_profiling_executor(False) + torch._C._jit_override_can_fuse_on_cpu(True) + torch._C._jit_override_can_fuse_on_gpu(True) + +logger = logging.get_logger(__name__) + +_CHECKPOINT_FOR_DOC = "THUDM/ChatGLM" +_CONFIG_FOR_DOC = "ChatGLM6BConfig" + +CHATGLM_6B_PRETRAINED_MODEL_ARCHIVE_LIST = [ + "THUDM/chatglm3-6b-base", + # See all ChatGLM models at https://huggingface.co/models?filter=chatglm +] + + +def default_init(cls, *args, **kwargs): + return cls(*args, **kwargs) + + +class InvalidScoreLogitsProcessor(LogitsProcessor): + def __call__(self, input_ids: torch.LongTensor, scores: torch.FloatTensor) -> torch.FloatTensor: + if torch.isnan(scores).any() or torch.isinf(scores).any(): + scores.zero_() + scores[..., 5] = 5e4 + return scores + + +class PrefixEncoder(torch.nn.Module): + """ + The torch.nn model to encode the prefix + Input shape: (batch-size, prefix-length) + Output shape: (batch-size, prefix-length, 2*layers*hidden) + """ + + def __init__(self, config: ChatGLMConfig): + super().__init__() + self.prefix_projection = config.prefix_projection + if self.prefix_projection: + # Use a two-layer MLP to encode the prefix + kv_size = config.num_layers * config.kv_channels * config.multi_query_group_num * 2 + self.embedding = torch.nn.Embedding(config.pre_seq_len, kv_size) + self.trans = torch.nn.Sequential( + torch.nn.Linear(kv_size, config.hidden_size), + torch.nn.Tanh(), + torch.nn.Linear(config.hidden_size, kv_size) + ) + else: + self.embedding = torch.nn.Embedding(config.pre_seq_len, + config.num_layers * config.kv_channels * config.multi_query_group_num * 2) + + def forward(self, prefix: torch.Tensor): + if self.prefix_projection: + prefix_tokens = self.embedding(prefix) + past_key_values = self.trans(prefix_tokens) + else: + past_key_values = self.embedding(prefix) + return past_key_values + + +def split_tensor_along_last_dim( + tensor: torch.Tensor, + num_partitions: int, + contiguous_split_chunks: bool = False, +) -> List[torch.Tensor]: + """Split a tensor along its last dimension. + + Arguments: + tensor: input tensor. + num_partitions: number of partitions to split the tensor + contiguous_split_chunks: If True, make each chunk contiguous + in memory. + + Returns: + A list of Tensors + """ + # Get the size and dimension. + last_dim = tensor.dim() - 1 + last_dim_size = tensor.size()[last_dim] // num_partitions + # Split. + tensor_list = torch.split(tensor, last_dim_size, dim=last_dim) + # Note: torch.split does not create contiguous tensors by default. + if contiguous_split_chunks: + return tuple(chunk.contiguous() for chunk in tensor_list) + + return tensor_list + + +class RotaryEmbedding(nn.Module): + def __init__(self, dim, original_impl=False, device=None, dtype=None): + super().__init__() + inv_freq = 1.0 / (10000 ** (torch.arange(0, dim, 2, device=device).to(dtype=dtype) / dim)) + self.register_buffer("inv_freq", inv_freq) + self.dim = dim + self.original_impl = original_impl + + def forward_impl( + self, seq_len: int, n_elem: int, dtype: torch.dtype, device: torch.device, base: int = 10000 + ): + """Enhanced Transformer with Rotary Position Embedding. + + Derived from: https://github.com/labmlai/annotated_deep_learning_paper_implementations/blob/master/labml_nn/ + transformers/rope/__init__.py. MIT License: + https://github.com/labmlai/annotated_deep_learning_paper_implementations/blob/master/license. + """ + # $\Theta = {\theta_i = 10000^{\frac{2(i-1)}{d}}, i \in [1, 2, ..., \frac{d}{2}]}$ + theta = 1.0 / (base ** (torch.arange(0, n_elem, 2, dtype=torch.float, device=device) / n_elem)) + + # Create position indexes `[0, 1, ..., seq_len - 1]` + seq_idx = torch.arange(seq_len, dtype=torch.float, device=device) + + # Calculate the product of position index and $\theta_i$ + idx_theta = torch.outer(seq_idx, theta).float() + + cache = torch.stack([torch.cos(idx_theta), torch.sin(idx_theta)], dim=-1) + + # this is to mimic the behaviour of complex32, else we will get different results + if dtype in (torch.float16, torch.bfloat16, torch.int8): + cache = cache.bfloat16() if dtype == torch.bfloat16 else cache.half() + return cache + + def forward(self, max_seq_len, offset=0): + return self.forward_impl( + max_seq_len, self.dim, dtype=self.inv_freq.dtype, device=self.inv_freq.device + ) + + +@torch.jit.script +def apply_rotary_pos_emb(x: torch.Tensor, rope_cache: torch.Tensor) -> torch.Tensor: + # x: [sq, b, np, hn] + sq, b, np, hn = x.size(0), x.size(1), x.size(2), x.size(3) + rot_dim = rope_cache.shape[-2] * 2 + x, x_pass = x[..., :rot_dim], x[..., rot_dim:] + # truncate to support variable sizes + rope_cache = rope_cache[:sq] + xshaped = x.reshape(sq, -1, np, rot_dim // 2, 2) + rope_cache = rope_cache.view(sq, -1, 1, xshaped.size(3), 2) + x_out2 = torch.stack( + [ + xshaped[..., 0] * rope_cache[..., 0] - xshaped[..., 1] * rope_cache[..., 1], + xshaped[..., 1] * rope_cache[..., 0] + xshaped[..., 0] * rope_cache[..., 1], + ], + -1, + ) + x_out2 = x_out2.flatten(3) + return torch.cat((x_out2, x_pass), dim=-1) + + +class RMSNorm(torch.nn.Module): + def __init__(self, normalized_shape, eps=1e-5, device=None, dtype=None, **kwargs): + super().__init__() + self.weight = torch.nn.Parameter(torch.empty(normalized_shape, device=device, dtype=dtype)) + self.eps = eps + + def forward(self, hidden_states: torch.Tensor): + input_dtype = hidden_states.dtype + variance = hidden_states.to(torch.float32).pow(2).mean(-1, keepdim=True) + hidden_states = hidden_states * torch.rsqrt(variance + self.eps) + + return (self.weight * hidden_states).to(input_dtype) + + +class CoreAttention(torch.nn.Module): + def __init__(self, config: ChatGLMConfig, layer_number): + super(CoreAttention, self).__init__() + + self.apply_query_key_layer_scaling = config.apply_query_key_layer_scaling + self.attention_softmax_in_fp32 = config.attention_softmax_in_fp32 + if self.apply_query_key_layer_scaling: + self.attention_softmax_in_fp32 = True + self.layer_number = max(1, layer_number) + + projection_size = config.kv_channels * config.num_attention_heads + + # Per attention head and per partition values. + self.hidden_size_per_partition = projection_size + self.hidden_size_per_attention_head = projection_size // config.num_attention_heads + self.num_attention_heads_per_partition = config.num_attention_heads + + coeff = None + self.norm_factor = math.sqrt(self.hidden_size_per_attention_head) + if self.apply_query_key_layer_scaling: + coeff = self.layer_number + self.norm_factor *= coeff + self.coeff = coeff + + self.attention_dropout = torch.nn.Dropout(config.attention_dropout) + + def forward(self, query_layer, key_layer, value_layer, attention_mask): + pytorch_major_version = int(torch.__version__.split('.')[0]) + if pytorch_major_version >= 2: + query_layer, key_layer, value_layer = [k.permute(1, 2, 0, 3) for k in [query_layer, key_layer, value_layer]] + if attention_mask is None and query_layer.shape[2] == key_layer.shape[2]: + context_layer = torch.nn.functional.scaled_dot_product_attention(query_layer, key_layer, value_layer, + is_causal=True) + else: + if attention_mask is not None: + attention_mask = ~attention_mask + context_layer = torch.nn.functional.scaled_dot_product_attention(query_layer, key_layer, value_layer, + attention_mask) + context_layer = context_layer.permute(2, 0, 1, 3) + new_context_layer_shape = context_layer.size()[:-2] + (self.hidden_size_per_partition,) + context_layer = context_layer.reshape(*new_context_layer_shape) + else: + # Raw attention scores + + # [b, np, sq, sk] + output_size = (query_layer.size(1), query_layer.size(2), query_layer.size(0), key_layer.size(0)) + + # [sq, b, np, hn] -> [sq, b * np, hn] + query_layer = query_layer.view(output_size[2], output_size[0] * output_size[1], -1) + # [sk, b, np, hn] -> [sk, b * np, hn] + key_layer = key_layer.view(output_size[3], output_size[0] * output_size[1], -1) + + # preallocting input tensor: [b * np, sq, sk] + matmul_input_buffer = torch.empty( + output_size[0] * output_size[1], output_size[2], output_size[3], dtype=query_layer.dtype, + device=query_layer.device + ) + + # Raw attention scores. [b * np, sq, sk] + matmul_result = torch.baddbmm( + matmul_input_buffer, + query_layer.transpose(0, 1), # [b * np, sq, hn] + key_layer.transpose(0, 1).transpose(1, 2), # [b * np, hn, sk] + beta=0.0, + alpha=(1.0 / self.norm_factor), + ) + + # change view to [b, np, sq, sk] + attention_scores = matmul_result.view(*output_size) + + # =========================== + # Attention probs and dropout + # =========================== + + # attention scores and attention mask [b, np, sq, sk] + if self.attention_softmax_in_fp32: + attention_scores = attention_scores.float() + if self.coeff is not None: + attention_scores = attention_scores * self.coeff + if attention_mask is None and attention_scores.shape[2] == attention_scores.shape[3]: + attention_mask = torch.ones(output_size[0], 1, output_size[2], output_size[3], + device=attention_scores.device, dtype=torch.bool) + attention_mask.tril_() + attention_mask = ~attention_mask + if attention_mask is not None: + attention_scores = attention_scores.masked_fill(attention_mask, float("-inf")) + attention_probs = F.softmax(attention_scores, dim=-1) + attention_probs = attention_probs.type_as(value_layer) + + # This is actually dropping out entire tokens to attend to, which might + # seem a bit unusual, but is taken from the original Transformer paper. + attention_probs = self.attention_dropout(attention_probs) + # ========================= + # Context layer. [sq, b, hp] + # ========================= + + # value_layer -> context layer. + # [sk, b, np, hn] --> [b, np, sq, hn] + + # context layer shape: [b, np, sq, hn] + output_size = (value_layer.size(1), value_layer.size(2), query_layer.size(0), value_layer.size(3)) + # change view [sk, b * np, hn] + value_layer = value_layer.view(value_layer.size(0), output_size[0] * output_size[1], -1) + # change view [b * np, sq, sk] + attention_probs = attention_probs.view(output_size[0] * output_size[1], output_size[2], -1) + # matmul: [b * np, sq, hn] + context_layer = torch.bmm(attention_probs, value_layer.transpose(0, 1)) + # change view [b, np, sq, hn] + context_layer = context_layer.view(*output_size) + # [b, np, sq, hn] --> [sq, b, np, hn] + context_layer = context_layer.permute(2, 0, 1, 3).contiguous() + # [sq, b, np, hn] --> [sq, b, hp] + new_context_layer_shape = context_layer.size()[:-2] + (self.hidden_size_per_partition,) + context_layer = context_layer.view(*new_context_layer_shape) + + return context_layer + + +class SelfAttention(torch.nn.Module): + """Parallel self-attention layer abstract class. + + Self-attention layer takes input with size [s, b, h] + and returns output of the same size. + """ + + def __init__(self, config: ChatGLMConfig, layer_number, device=None): + super(SelfAttention, self).__init__() + self.layer_number = max(1, layer_number) + + self.projection_size = config.kv_channels * config.num_attention_heads + + # Per attention head and per partition values. + self.hidden_size_per_attention_head = self.projection_size // config.num_attention_heads + self.num_attention_heads_per_partition = config.num_attention_heads + + self.multi_query_attention = config.multi_query_attention + self.qkv_hidden_size = 3 * self.projection_size + if self.multi_query_attention: + self.num_multi_query_groups_per_partition = config.multi_query_group_num + self.qkv_hidden_size = ( + self.projection_size + 2 * self.hidden_size_per_attention_head * config.multi_query_group_num + ) + self.query_key_value = nn.Linear(config.hidden_size, self.qkv_hidden_size, + bias=config.add_bias_linear or config.add_qkv_bias, + device=device, **_config_to_kwargs(config) + ) + + self.core_attention = CoreAttention(config, self.layer_number) + + # Output. + self.dense = nn.Linear(self.projection_size, config.hidden_size, bias=config.add_bias_linear, + device=device, **_config_to_kwargs(config) + ) + + def _allocate_memory(self, inference_max_sequence_len, batch_size, device=None, dtype=None): + if self.multi_query_attention: + num_attention_heads = self.num_multi_query_groups_per_partition + else: + num_attention_heads = self.num_attention_heads_per_partition + return torch.empty( + inference_max_sequence_len, + batch_size, + num_attention_heads, + self.hidden_size_per_attention_head, + dtype=dtype, + device=device, + ) + + def forward( + self, hidden_states, attention_mask, rotary_pos_emb, kv_cache=None, use_cache=True + ): + # hidden_states: [sq, b, h] + + # ================================================= + # Pre-allocate memory for key-values for inference. + # ================================================= + # ===================== + # Query, Key, and Value + # ===================== + + # Attention heads [sq, b, h] --> [sq, b, (np * 3 * hn)] + mixed_x_layer = self.query_key_value(hidden_states) + + if self.multi_query_attention: + (query_layer, key_layer, value_layer) = mixed_x_layer.split( + [ + self.num_attention_heads_per_partition * self.hidden_size_per_attention_head, + self.num_multi_query_groups_per_partition * self.hidden_size_per_attention_head, + self.num_multi_query_groups_per_partition * self.hidden_size_per_attention_head, + ], + dim=-1, + ) + query_layer = query_layer.view( + query_layer.size()[:-1] + (self.num_attention_heads_per_partition, self.hidden_size_per_attention_head) + ) + key_layer = key_layer.view( + key_layer.size()[:-1] + (self.num_multi_query_groups_per_partition, self.hidden_size_per_attention_head) + ) + value_layer = value_layer.view( + value_layer.size()[:-1] + + (self.num_multi_query_groups_per_partition, self.hidden_size_per_attention_head) + ) + else: + new_tensor_shape = mixed_x_layer.size()[:-1] + \ + (self.num_attention_heads_per_partition, + 3 * self.hidden_size_per_attention_head) + mixed_x_layer = mixed_x_layer.view(*new_tensor_shape) + + # [sq, b, np, 3 * hn] --> 3 [sq, b, np, hn] + (query_layer, key_layer, value_layer) = split_tensor_along_last_dim(mixed_x_layer, 3) + + # apply relative positional encoding (rotary embedding) + if rotary_pos_emb is not None: + query_layer = apply_rotary_pos_emb(query_layer, rotary_pos_emb) + key_layer = apply_rotary_pos_emb(key_layer, rotary_pos_emb) + + # adjust key and value for inference + if kv_cache is not None: + cache_k, cache_v = kv_cache + key_layer = torch.cat((cache_k, key_layer), dim=0) + value_layer = torch.cat((cache_v, value_layer), dim=0) + if use_cache: + kv_cache = (key_layer, value_layer) + else: + kv_cache = None + + if self.multi_query_attention: + key_layer = key_layer.unsqueeze(-2) + key_layer = key_layer.expand( + -1, -1, -1, self.num_attention_heads_per_partition // self.num_multi_query_groups_per_partition, -1 + ) + key_layer = key_layer.contiguous().view( + key_layer.size()[:2] + (self.num_attention_heads_per_partition, self.hidden_size_per_attention_head) + ) + value_layer = value_layer.unsqueeze(-2) + value_layer = value_layer.expand( + -1, -1, -1, self.num_attention_heads_per_partition // self.num_multi_query_groups_per_partition, -1 + ) + value_layer = value_layer.contiguous().view( + value_layer.size()[:2] + (self.num_attention_heads_per_partition, self.hidden_size_per_attention_head) + ) + + # ================================== + # core attention computation + # ================================== + + context_layer = self.core_attention(query_layer, key_layer, value_layer, attention_mask) + + # ================= + # Output. [sq, b, h] + # ================= + + output = self.dense(context_layer) + + return output, kv_cache + + +def _config_to_kwargs(args): + common_kwargs = { + "dtype": args.torch_dtype, + } + return common_kwargs + + +class MLP(torch.nn.Module): + """MLP. + + MLP will take the input with h hidden state, project it to 4*h + hidden dimension, perform nonlinear transformation, and project the + state back into h hidden dimension. + """ + + def __init__(self, config: ChatGLMConfig, device=None): + super(MLP, self).__init__() + + self.add_bias = config.add_bias_linear + + # Project to 4h. If using swiglu double the output width, see https://arxiv.org/pdf/2002.05202.pdf + self.dense_h_to_4h = nn.Linear( + config.hidden_size, + config.ffn_hidden_size * 2, + bias=self.add_bias, + device=device, + **_config_to_kwargs(config) + ) + + def swiglu(x): + x = torch.chunk(x, 2, dim=-1) + return F.silu(x[0]) * x[1] + + self.activation_func = swiglu + + # Project back to h. + self.dense_4h_to_h = nn.Linear( + config.ffn_hidden_size, + config.hidden_size, + bias=self.add_bias, + device=device, + **_config_to_kwargs(config) + ) + + def forward(self, hidden_states): + # [s, b, 4hp] + intermediate_parallel = self.dense_h_to_4h(hidden_states) + intermediate_parallel = self.activation_func(intermediate_parallel) + # [s, b, h] + output = self.dense_4h_to_h(intermediate_parallel) + return output + + +class GLMBlock(torch.nn.Module): + """A single transformer layer. + + Transformer layer takes input with size [s, b, h] and returns an + output of the same size. + """ + + def __init__(self, config: ChatGLMConfig, layer_number, device=None): + super(GLMBlock, self).__init__() + self.layer_number = layer_number + + self.apply_residual_connection_post_layernorm = config.apply_residual_connection_post_layernorm + + self.fp32_residual_connection = config.fp32_residual_connection + + LayerNormFunc = RMSNorm if config.rmsnorm else LayerNorm + # Layernorm on the input data. + self.input_layernorm = LayerNormFunc(config.hidden_size, eps=config.layernorm_epsilon, device=device, + dtype=config.torch_dtype) + + # Self attention. + self.self_attention = SelfAttention(config, layer_number, device=device) + self.hidden_dropout = config.hidden_dropout + + # Layernorm on the attention output + self.post_attention_layernorm = LayerNormFunc(config.hidden_size, eps=config.layernorm_epsilon, device=device, + dtype=config.torch_dtype) + + # MLP + self.mlp = MLP(config, device=device) + + def forward( + self, hidden_states, attention_mask, rotary_pos_emb, kv_cache=None, use_cache=True, + ): + # hidden_states: [s, b, h] + + # Layer norm at the beginning of the transformer layer. + layernorm_output = self.input_layernorm(hidden_states) + # Self attention. + attention_output, kv_cache = self.self_attention( + layernorm_output, + attention_mask, + rotary_pos_emb, + kv_cache=kv_cache, + use_cache=use_cache + ) + + # Residual connection. + if self.apply_residual_connection_post_layernorm: + residual = layernorm_output + else: + residual = hidden_states + + layernorm_input = torch.nn.functional.dropout(attention_output, p=self.hidden_dropout, training=self.training) + layernorm_input = residual + layernorm_input + + # Layer norm post the self attention. + layernorm_output = self.post_attention_layernorm(layernorm_input) + + # MLP. + mlp_output = self.mlp(layernorm_output) + + # Second residual connection. + if self.apply_residual_connection_post_layernorm: + residual = layernorm_output + else: + residual = layernorm_input + + output = torch.nn.functional.dropout(mlp_output, p=self.hidden_dropout, training=self.training) + output = residual + output + + return output, kv_cache + + +class GLMTransformer(torch.nn.Module): + """Transformer class.""" + + def __init__(self, config: ChatGLMConfig, device=None): + super(GLMTransformer, self).__init__() + + self.fp32_residual_connection = config.fp32_residual_connection + self.post_layer_norm = config.post_layer_norm + + # Number of layers. + self.num_layers = config.num_layers + + # Transformer layers. + def build_layer(layer_number): + return GLMBlock(config, layer_number, device=device) + + self.layers = torch.nn.ModuleList([build_layer(i + 1) for i in range(self.num_layers)]) + + if self.post_layer_norm: + LayerNormFunc = RMSNorm if config.rmsnorm else LayerNorm + # Final layer norm before output. + self.final_layernorm = LayerNormFunc(config.hidden_size, eps=config.layernorm_epsilon, device=device, + dtype=config.torch_dtype) + + self.gradient_checkpointing = False + + def _get_layer(self, layer_number): + return self.layers[layer_number] + + def forward( + self, hidden_states, attention_mask, rotary_pos_emb, kv_caches=None, + use_cache: Optional[bool] = True, + output_hidden_states: Optional[bool] = False, + ): + if not kv_caches: + kv_caches = [None for _ in range(self.num_layers)] + presents = () if use_cache else None + if self.gradient_checkpointing and self.training: + if use_cache: + logger.warning_once( + "`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..." + ) + use_cache = False + + all_self_attentions = None + all_hidden_states = () if output_hidden_states else None + for index in range(self.num_layers): + if output_hidden_states: + all_hidden_states = all_hidden_states + (hidden_states,) + + layer = self._get_layer(index) + if self.gradient_checkpointing and self.training: + layer_ret = torch.utils.checkpoint.checkpoint( + layer, + hidden_states, + attention_mask, + rotary_pos_emb, + kv_caches[index], + use_cache + ) + else: + layer_ret = layer( + hidden_states, + attention_mask, + rotary_pos_emb, + kv_cache=kv_caches[index], + use_cache=use_cache + ) + hidden_states, kv_cache = layer_ret + if use_cache: + presents = presents + (kv_cache,) + + if output_hidden_states: + all_hidden_states = all_hidden_states + (hidden_states,) + + # Final layer norm. + if self.post_layer_norm: + hidden_states = self.final_layernorm(hidden_states) + + return hidden_states, presents, all_hidden_states, all_self_attentions + + +class ChatGLMPreTrainedModel(PreTrainedModel): + """ + An abstract class to handle weights initialization and + a simple interface for downloading and loading pretrained models. + """ + + is_parallelizable = False + supports_gradient_checkpointing = True + config_class = ChatGLMConfig + base_model_prefix = "transformer" + _no_split_modules = ["GLMBlock"] + + def _init_weights(self, module: nn.Module): + """Initialize the weights.""" + return + + def get_masks(self, input_ids, past_key_values, padding_mask=None): + batch_size, seq_length = input_ids.shape + full_attention_mask = torch.ones(batch_size, seq_length, seq_length, device=input_ids.device) + full_attention_mask.tril_() + past_length = 0 + if past_key_values: + past_length = past_key_values[0][0].shape[0] + if past_length: + full_attention_mask = torch.cat((torch.ones(batch_size, seq_length, past_length, + device=input_ids.device), full_attention_mask), dim=-1) + if padding_mask is not None: + full_attention_mask = full_attention_mask * padding_mask.unsqueeze(1) + if not past_length and padding_mask is not None: + full_attention_mask -= padding_mask.unsqueeze(-1) - 1 + full_attention_mask = (full_attention_mask < 0.5).bool() + full_attention_mask.unsqueeze_(1) + return full_attention_mask + + def get_position_ids(self, input_ids, device): + batch_size, seq_length = input_ids.shape + position_ids = torch.arange(seq_length, dtype=torch.long, device=device).unsqueeze(0).repeat(batch_size, 1) + return position_ids + + def _set_gradient_checkpointing(self, module, value=False): + if isinstance(module, GLMTransformer): + module.gradient_checkpointing = value + + +class Embedding(torch.nn.Module): + """Language model embeddings.""" + + def __init__(self, config: ChatGLMConfig, device=None): + super(Embedding, self).__init__() + + self.hidden_size = config.hidden_size + # Word embeddings (parallel). + self.word_embeddings = nn.Embedding( + config.padded_vocab_size, + self.hidden_size, + dtype=config.torch_dtype, + device=device + ) + self.fp32_residual_connection = config.fp32_residual_connection + + def forward(self, input_ids): + # Embeddings. + words_embeddings = self.word_embeddings(input_ids) + embeddings = words_embeddings + # Data format change to avoid explicit tranposes : [b s h] --> [s b h]. + embeddings = embeddings.transpose(0, 1).contiguous() + # If the input flag for fp32 residual connection is set, convert for float. + if self.fp32_residual_connection: + embeddings = embeddings.float() + return embeddings + + +class ChatGLMModel(ChatGLMPreTrainedModel): + def __init__(self, config: ChatGLMConfig, device=None, empty_init=True): + super().__init__(config) + if empty_init: + init_method = skip_init + else: + init_method = default_init + init_kwargs = {} + if device is not None: + init_kwargs["device"] = device + self.embedding = init_method(Embedding, config, **init_kwargs) + self.num_layers = config.num_layers + self.multi_query_group_num = config.multi_query_group_num + self.kv_channels = config.kv_channels + + # Rotary positional embeddings + self.seq_length = config.seq_length + rotary_dim = ( + config.hidden_size // config.num_attention_heads if config.kv_channels is None else config.kv_channels + ) + + self.rotary_pos_emb = RotaryEmbedding(rotary_dim // 2, original_impl=config.original_rope, device=device, + dtype=config.torch_dtype) + self.encoder = init_method(GLMTransformer, config, **init_kwargs) + self.output_layer = init_method(nn.Linear, config.hidden_size, config.padded_vocab_size, bias=False, + dtype=config.torch_dtype, **init_kwargs) + self.pre_seq_len = config.pre_seq_len + self.prefix_projection = config.prefix_projection + if self.pre_seq_len is not None: + for param in self.parameters(): + param.requires_grad = False + self.prefix_tokens = torch.arange(self.pre_seq_len).long() + self.prefix_encoder = PrefixEncoder(config) + self.dropout = torch.nn.Dropout(0.1) + + def get_input_embeddings(self): + return self.embedding.word_embeddings + + def get_prompt(self, batch_size, device, dtype=torch.half): + prefix_tokens = self.prefix_tokens.unsqueeze(0).expand(batch_size, -1).to(device) + past_key_values = self.prefix_encoder(prefix_tokens).type(dtype) + past_key_values = past_key_values.view( + batch_size, + self.pre_seq_len, + self.num_layers * 2, + self.multi_query_group_num, + self.kv_channels + ) + # seq_len, b, nh, hidden_size + past_key_values = self.dropout(past_key_values) + past_key_values = past_key_values.permute([2, 1, 0, 3, 4]).split(2) + return past_key_values + + def forward( + self, + input_ids, + position_ids: Optional[torch.Tensor] = None, + attention_mask: Optional[torch.BoolTensor] = None, + full_attention_mask: Optional[torch.BoolTensor] = None, + past_key_values: Optional[Tuple[Tuple[torch.Tensor, torch.Tensor], ...]] = None, + inputs_embeds: Optional[torch.Tensor] = None, + use_cache: Optional[bool] = None, + output_hidden_states: Optional[bool] = None, + return_dict: Optional[bool] = None, + ): + output_hidden_states = ( + output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states + ) + use_cache = use_cache if use_cache is not None else self.config.use_cache + return_dict = return_dict if return_dict is not None else self.config.use_return_dict + + batch_size, seq_length = input_ids.shape + + if inputs_embeds is None: + inputs_embeds = self.embedding(input_ids) + + if self.pre_seq_len is not None: + if past_key_values is None: + past_key_values = self.get_prompt(batch_size=batch_size, device=input_ids.device, + dtype=inputs_embeds.dtype) + if attention_mask is not None: + attention_mask = torch.cat([attention_mask.new_ones((batch_size, self.pre_seq_len)), + attention_mask], dim=-1) + + if full_attention_mask is None: + if (attention_mask is not None and not attention_mask.all()) or (past_key_values and seq_length != 1): + full_attention_mask = self.get_masks(input_ids, past_key_values, padding_mask=attention_mask) + + # Rotary positional embeddings + rotary_pos_emb = self.rotary_pos_emb(self.seq_length) + if position_ids is not None: + rotary_pos_emb = rotary_pos_emb[position_ids] + else: + rotary_pos_emb = rotary_pos_emb[None, :seq_length] + rotary_pos_emb = rotary_pos_emb.transpose(0, 1).contiguous() + + # Run encoder. + hidden_states, presents, all_hidden_states, all_self_attentions = self.encoder( + inputs_embeds, full_attention_mask, rotary_pos_emb=rotary_pos_emb, + kv_caches=past_key_values, use_cache=use_cache, output_hidden_states=output_hidden_states + ) + + if not return_dict: + return tuple(v for v in [hidden_states, presents, all_hidden_states, all_self_attentions] if v is not None) + + return BaseModelOutputWithPast( + last_hidden_state=hidden_states, + past_key_values=presents, + hidden_states=all_hidden_states, + attentions=all_self_attentions, + ) + + def quantize(self, weight_bit_width: int): + from .quantization import quantize + quantize(self.encoder, weight_bit_width) + return self + + +class ChatGLMForConditionalGeneration(ChatGLMPreTrainedModel): + def __init__(self, config: ChatGLMConfig, empty_init=True, device=None): + super().__init__(config) + + self.max_sequence_length = config.max_length + self.transformer = ChatGLMModel(config, empty_init=empty_init, device=device) + self.config = config + self.quantized = False + + if self.config.quantization_bit: + self.quantize(self.config.quantization_bit, empty_init=True) + + def _update_model_kwargs_for_generation( + self, + outputs: ModelOutput, + model_kwargs: Dict[str, Any], + is_encoder_decoder: bool = False, + standardize_cache_format: bool = False, + ) -> Dict[str, Any]: + # update past_key_values + model_kwargs["past_key_values"] = self._extract_past_from_model_output( + outputs, standardize_cache_format=standardize_cache_format + ) + + # update attention mask + if "attention_mask" in model_kwargs: + attention_mask = model_kwargs["attention_mask"] + model_kwargs["attention_mask"] = torch.cat( + [attention_mask, attention_mask.new_ones((attention_mask.shape[0], 1))], dim=-1 + ) + + # update position ids + if "position_ids" in model_kwargs: + position_ids = model_kwargs["position_ids"] + new_position_id = position_ids[..., -1:].clone() + new_position_id += 1 + model_kwargs["position_ids"] = torch.cat( + [position_ids, new_position_id], dim=-1 + ) + + model_kwargs["is_first_forward"] = False + return model_kwargs + + def prepare_inputs_for_generation( + self, + input_ids: torch.LongTensor, + past_key_values: Optional[torch.Tensor] = None, + attention_mask: Optional[torch.Tensor] = None, + position_ids: Optional[torch.Tensor] = None, + use_cache: Optional[bool] = None, + is_first_forward: bool = True, + **kwargs + ) -> dict: + # only last token for input_ids if past is not None + if position_ids is None: + position_ids = self.get_position_ids(input_ids, device=input_ids.device) + if not is_first_forward: + if past_key_values is not None: + position_ids = position_ids[..., -1:] + input_ids = input_ids[:, -1:] + return { + "input_ids": input_ids, + "past_key_values": past_key_values, + "position_ids": position_ids, + "attention_mask": attention_mask, + "return_last_logit": True, + "use_cache": use_cache + } + + def forward( + self, + input_ids: Optional[torch.Tensor] = None, + position_ids: Optional[torch.Tensor] = None, + attention_mask: Optional[torch.Tensor] = None, + past_key_values: Optional[Tuple[torch.FloatTensor]] = None, + inputs_embeds: Optional[torch.Tensor] = None, + labels: Optional[torch.Tensor] = None, + use_cache: Optional[bool] = None, + output_attentions: Optional[bool] = None, + output_hidden_states: Optional[bool] = None, + return_dict: Optional[bool] = None, + return_last_logit: Optional[bool] = False, + ): + use_cache = use_cache if use_cache is not None else self.config.use_cache + return_dict = return_dict if return_dict is not None else self.config.use_return_dict + + transformer_outputs = self.transformer( + input_ids=input_ids, + position_ids=position_ids, + attention_mask=attention_mask, + past_key_values=past_key_values, + inputs_embeds=inputs_embeds, + use_cache=use_cache, + output_hidden_states=output_hidden_states, + return_dict=return_dict, + ) + + hidden_states = transformer_outputs[0] + if return_last_logit: + hidden_states = hidden_states[-1:] + lm_logits = self.transformer.output_layer(hidden_states) + lm_logits = lm_logits.transpose(0, 1).contiguous() + + loss = None + if labels is not None: + lm_logits = lm_logits.to(torch.float32) + + # Shift so that tokens < n predict n + shift_logits = lm_logits[..., :-1, :].contiguous() + shift_labels = labels[..., 1:].contiguous() + # Flatten the tokens + loss_fct = CrossEntropyLoss(ignore_index=-100) + loss = loss_fct(shift_logits.view(-1, shift_logits.size(-1)), shift_labels.view(-1)) + + lm_logits = lm_logits.to(hidden_states.dtype) + loss = loss.to(hidden_states.dtype) + + if not return_dict: + output = (lm_logits,) + transformer_outputs[1:] + return ((loss,) + output) if loss is not None else output + + return CausalLMOutputWithPast( + loss=loss, + logits=lm_logits, + past_key_values=transformer_outputs.past_key_values, + hidden_states=transformer_outputs.hidden_states, + attentions=transformer_outputs.attentions, + ) + + @staticmethod + def _reorder_cache( + past: Tuple[Tuple[torch.Tensor, torch.Tensor], ...], beam_idx: torch.LongTensor + ) -> Tuple[Tuple[torch.Tensor, torch.Tensor], ...]: + """ + This function is used to re-order the `past_key_values` cache if [`~PreTrainedModel.beam_search`] or + [`~PreTrainedModel.beam_sample`] is called. This is required to match `past_key_values` with the correct + beam_idx at every generation step. + + Output shares the same memory storage as `past`. + """ + return tuple( + ( + layer_past[0].index_select(1, beam_idx.to(layer_past[0].device)), + layer_past[1].index_select(1, beam_idx.to(layer_past[1].device)), + ) + for layer_past in past + ) + + def process_response(self, output, history): + content = "" + history = deepcopy(history) + for response in output.split("<|assistant|>"): + metadata, content = response.split("\n", maxsplit=1) + if not metadata.strip(): + content = content.strip() + history.append({"role": "assistant", "metadata": metadata, "content": content}) + content = content.replace("[[训练时间]]", "2023年") + else: + history.append({"role": "assistant", "metadata": metadata, "content": content}) + if history[0]["role"] == "system" and "tools" in history[0]: + content = "\n".join(content.split("\n")[1:-1]) + def tool_call(**kwargs): + return kwargs + parameters = eval(content) + content = {"name": metadata.strip(), "parameters": parameters} + else: + content = {"name": metadata.strip(), "content": content} + return content, history + + @torch.inference_mode() + def chat(self, tokenizer, query: str, history: List[Tuple[str, str]] = None, role: str = "user", + max_length: int = 8192, num_beams=1, do_sample=True, top_p=0.8, temperature=0.8, logits_processor=None, + **kwargs): + if history is None: + history = [] + if logits_processor is None: + logits_processor = LogitsProcessorList() + logits_processor.append(InvalidScoreLogitsProcessor()) + gen_kwargs = {"max_length": max_length, "num_beams": num_beams, "do_sample": do_sample, "top_p": top_p, + "temperature": temperature, "logits_processor": logits_processor, **kwargs} + inputs = tokenizer.build_chat_input(query, history=history, role=role) + inputs = inputs.to(self.device) + eos_token_id = [tokenizer.eos_token_id, tokenizer.get_command("<|user|>"), + tokenizer.get_command("<|observation|>")] + outputs = self.generate(**inputs, **gen_kwargs, eos_token_id=eos_token_id) + outputs = outputs.tolist()[0][len(inputs["input_ids"][0]):-1] + response = tokenizer.decode(outputs) + history.append({"role": role, "content": query}) + response, history = self.process_response(response, history) + return response, history + + @torch.inference_mode() + def stream_chat(self, tokenizer, query: str, history: List[Tuple[str, str]] = None, role: str = "user", + past_key_values=None,max_length: int = 8192, do_sample=True, top_p=0.8, temperature=0.8, + logits_processor=None, return_past_key_values=False, **kwargs): + if history is None: + history = [] + if logits_processor is None: + logits_processor = LogitsProcessorList() + logits_processor.append(InvalidScoreLogitsProcessor()) + eos_token_id = [tokenizer.eos_token_id, tokenizer.get_command("<|user|>"), + tokenizer.get_command("<|observation|>")] + gen_kwargs = {"max_length": max_length, "do_sample": do_sample, "top_p": top_p, + "temperature": temperature, "logits_processor": logits_processor, **kwargs} + if past_key_values is None: + inputs = tokenizer.build_chat_input(query, history=history, role=role) + else: + inputs = tokenizer.build_chat_input(query, role=role) + inputs = inputs.to(self.device) + if past_key_values is not None: + past_length = past_key_values[0][0].shape[0] + if self.transformer.pre_seq_len is not None: + past_length -= self.transformer.pre_seq_len + inputs.position_ids += past_length + attention_mask = inputs.attention_mask + attention_mask = torch.cat((attention_mask.new_ones(1, past_length), attention_mask), dim=1) + inputs['attention_mask'] = attention_mask + history.append({"role": role, "content": query}) + for outputs in self.stream_generate(**inputs, past_key_values=past_key_values, + eos_token_id=eos_token_id, return_past_key_values=return_past_key_values, + **gen_kwargs): + if return_past_key_values: + outputs, past_key_values = outputs + outputs = outputs.tolist()[0][len(inputs["input_ids"][0]):-1] + response = tokenizer.decode(outputs) + if response and response[-1] != "�": + response, new_history = self.process_response(response, history) + if return_past_key_values: + yield response, new_history, past_key_values + else: + yield response, new_history + + @torch.inference_mode() + def stream_generate( + self, + input_ids, + generation_config: Optional[GenerationConfig] = None, + logits_processor: Optional[LogitsProcessorList] = None, + stopping_criteria: Optional[StoppingCriteriaList] = None, + prefix_allowed_tokens_fn: Optional[Callable[[int, torch.Tensor], List[int]]] = None, + return_past_key_values=False, + **kwargs, + ): + batch_size, input_ids_seq_length = input_ids.shape[0], input_ids.shape[-1] + + if generation_config is None: + generation_config = self.generation_config + generation_config = copy.deepcopy(generation_config) + model_kwargs = generation_config.update(**kwargs) + model_kwargs["use_cache"] = generation_config.use_cache + bos_token_id, eos_token_id = generation_config.bos_token_id, generation_config.eos_token_id + + if isinstance(eos_token_id, int): + eos_token_id = [eos_token_id] + eos_token_id_tensor = torch.tensor(eos_token_id).to(input_ids.device) if eos_token_id is not None else None + + has_default_max_length = kwargs.get("max_length") is None and generation_config.max_length is not None + if has_default_max_length and generation_config.max_new_tokens is None: + warnings.warn( + f"Using `max_length`'s default ({generation_config.max_length}) to control the generation length. " + "This behaviour is deprecated and will be removed from the config in v5 of Transformers -- we" + " recommend using `max_new_tokens` to control the maximum length of the generation.", + UserWarning, + ) + elif generation_config.max_new_tokens is not None: + generation_config.max_length = generation_config.max_new_tokens + input_ids_seq_length + if not has_default_max_length: + logger.warn( + f"Both `max_new_tokens` (={generation_config.max_new_tokens}) and `max_length`(=" + f"{generation_config.max_length}) seem to have been set. `max_new_tokens` will take precedence. " + "Please refer to the documentation for more information. " + "(https://huggingface.co/docs/transformers/main/en/main_classes/text_generation)", + UserWarning, + ) + + if input_ids_seq_length >= generation_config.max_length: + input_ids_string = "decoder_input_ids" if self.config.is_encoder_decoder else "input_ids" + logger.warning( + f"Input length of {input_ids_string} is {input_ids_seq_length}, but `max_length` is set to" + f" {generation_config.max_length}. This can lead to unexpected behavior. You should consider" + " increasing `max_new_tokens`." + ) + + # 2. Set generation parameters if not already defined + logits_processor = logits_processor if logits_processor is not None else LogitsProcessorList() + stopping_criteria = stopping_criteria if stopping_criteria is not None else StoppingCriteriaList() + + logits_processor = self._get_logits_processor( + generation_config=generation_config, + input_ids_seq_length=input_ids_seq_length, + encoder_input_ids=input_ids, + prefix_allowed_tokens_fn=prefix_allowed_tokens_fn, + logits_processor=logits_processor, + ) + + stopping_criteria = self._get_stopping_criteria( + generation_config=generation_config, stopping_criteria=stopping_criteria + ) + logits_warper = self._get_logits_warper(generation_config) + + unfinished_sequences = input_ids.new(input_ids.shape[0]).fill_(1) + scores = None + while True: + model_inputs = self.prepare_inputs_for_generation(input_ids, **model_kwargs) + # forward pass to get next token + outputs = self( + **model_inputs, + return_dict=True, + output_attentions=False, + output_hidden_states=False, + ) + + next_token_logits = outputs.logits[:, -1, :] + + # pre-process distribution + next_token_scores = logits_processor(input_ids, next_token_logits) + next_token_scores = logits_warper(input_ids, next_token_scores) + + # sample + probs = nn.functional.softmax(next_token_scores, dim=-1) + if generation_config.do_sample: + next_tokens = torch.multinomial(probs, num_samples=1).squeeze(1) + else: + next_tokens = torch.argmax(probs, dim=-1) + # update generated ids, model inputs, and length for next step + input_ids = torch.cat([input_ids, next_tokens[:, None]], dim=-1) + model_kwargs = self._update_model_kwargs_for_generation( + outputs, model_kwargs, is_encoder_decoder=self.config.is_encoder_decoder + ) + unfinished_sequences = unfinished_sequences.mul( + next_tokens.tile(eos_token_id_tensor.shape[0], 1).ne(eos_token_id_tensor.unsqueeze(1)).prod(dim=0) + ) + if return_past_key_values: + yield input_ids, outputs.past_key_values + else: + yield input_ids + # stop when each sentence is finished, or if we exceed the maximum length + if unfinished_sequences.max() == 0 or stopping_criteria(input_ids, scores): + break + + def quantize(self, bits: int, empty_init=False, device=None, **kwargs): + if bits == 0: + return + + from .quantization import quantize + + if self.quantized: + logger.info("Already quantized.") + return self + + self.quantized = True + + self.config.quantization_bit = bits + + self.transformer.encoder = quantize(self.transformer.encoder, bits, empty_init=empty_init, device=device, + **kwargs) + return self + + +class ChatGLMForSequenceClassification(ChatGLMPreTrainedModel): + def __init__(self, config: ChatGLMConfig, empty_init=True, device=None): + super().__init__(config) + + self.num_labels = config.num_labels + self.transformer = ChatGLMModel(config, empty_init=empty_init, device=device) + + self.classifier_head = nn.Linear(config.hidden_size, config.num_labels, bias=True, dtype=torch.half) + if config.classifier_dropout is not None: + self.dropout = nn.Dropout(config.classifier_dropout) + else: + self.dropout = None + self.config = config + + if self.config.quantization_bit: + self.quantize(self.config.quantization_bit, empty_init=True) + + def forward( + self, + input_ids: Optional[torch.LongTensor] = None, + position_ids: Optional[torch.LongTensor] = None, + attention_mask: Optional[torch.Tensor] = None, + full_attention_mask: Optional[torch.Tensor] = None, + past_key_values: Optional[Tuple[Tuple[torch.Tensor, torch.Tensor], ...]] = None, + inputs_embeds: Optional[torch.LongTensor] = None, + labels: Optional[torch.LongTensor] = None, + use_cache: Optional[bool] = None, + output_hidden_states: Optional[bool] = None, + return_dict: Optional[bool] = None, + ) -> Union[Tuple[torch.Tensor, ...], SequenceClassifierOutputWithPast]: + return_dict = return_dict if return_dict is not None else self.config.use_return_dict + + transformer_outputs = self.transformer( + input_ids=input_ids, + position_ids=position_ids, + attention_mask=attention_mask, + full_attention_mask=full_attention_mask, + past_key_values=past_key_values, + inputs_embeds=inputs_embeds, + use_cache=use_cache, + output_hidden_states=output_hidden_states, + return_dict=return_dict, + ) + + hidden_states = transformer_outputs[0] + pooled_hidden_states = hidden_states[-1] + if self.dropout is not None: + pooled_hidden_states = self.dropout(pooled_hidden_states) + logits = self.classifier_head(pooled_hidden_states) + + loss = None + if labels is not None: + if self.config.problem_type is None: + if self.num_labels == 1: + self.config.problem_type = "regression" + elif self.num_labels > 1 and (labels.dtype == torch.long or labels.dtype == torch.int): + self.config.problem_type = "single_label_classification" + else: + self.config.problem_type = "multi_label_classification" + + if self.config.problem_type == "regression": + loss_fct = MSELoss() + if self.num_labels == 1: + loss = loss_fct(logits.squeeze().float(), labels.squeeze()) + else: + loss = loss_fct(logits.float(), labels) + elif self.config.problem_type == "single_label_classification": + loss_fct = CrossEntropyLoss() + loss = loss_fct(logits.view(-1, self.num_labels).float(), labels.view(-1)) + elif self.config.problem_type == "multi_label_classification": + loss_fct = BCEWithLogitsLoss() + loss = loss_fct(logits.float(), labels.view(-1, self.num_labels)) + + if not return_dict: + output = (logits,) + transformer_outputs[1:] + return ((loss,) + output) if loss is not None else output + + return SequenceClassifierOutputWithPast( + loss=loss, + logits=logits, + past_key_values=transformer_outputs.past_key_values, + hidden_states=transformer_outputs.hidden_states, + attentions=transformer_outputs.attentions, + ) diff --git a/ComfyUI-Kolors-MZ/chatglm3/quantization.py b/ComfyUI-Kolors-MZ/chatglm3/quantization.py new file mode 100644 index 0000000000000000000000000000000000000000..812e0017a791bfdd143662c9717aa5df354cb002 --- /dev/null +++ b/ComfyUI-Kolors-MZ/chatglm3/quantization.py @@ -0,0 +1,211 @@ +from torch.nn import Linear +from torch.nn.parameter import Parameter + +import bz2 +import torch +import base64 +import ctypes +from transformers.utils import logging + +from typing import List +from functools import partial + +logger = logging.get_logger(__name__) + +try: + from cpm_kernels.kernels.base import LazyKernelCModule, KernelFunction, round_up + + import cpm_kernels.library.base + + original_windows_find_lib = cpm_kernels.library.base.windows_find_lib + + def windows_find_lib(name): + result = original_windows_find_lib(name) + if result is not None: + return result + import torch + import os + torch_dir = os.path.dirname(torch.__file__) + torch_lib_dir = os.path.join(torch_dir, "lib") + + for name in os.listdir(torch_lib_dir): + if name.startswith(name) and name.lower().endswith(".dll"): + return os.path.join(torch_lib_dir, name) + + return None + cpm_kernels.library.base.windows_find_lib = windows_find_lib + + class Kernel: + def __init__(self, code: bytes, function_names: List[str]): + self.code = code + self._function_names = function_names + self._cmodule = LazyKernelCModule(self.code) + + for name in self._function_names: + setattr(self, name, KernelFunction(self._cmodule, name)) + + quantization_code = "$QlpoOTFBWSZTWU9yuJUAQHN//////////f/n/8/n///n//bt4dTidcVx8X3V9FV/92/v4B7/AD5FBQFAAAChSgKpFCFAFVSigUAAAEKhSgUUqgFBKigqVREQAABQBQIANDTTIGI00BkZBkNGE0A0BkBkGQGRkaNAaAGQNBoGgDIAAYIGTI0DQAQAaGmmQMRpoDIyDIaMJoBoDIDIMgMjI0aA0AMgaDQNAGQAAwQMmRoGgAgA0NNMgYjTQGRkGQ0YTQDQGQGQZAZGRo0BoAZA0GgaAMgABggZMjQNABABoaaZAxGmgMjIMhowmgGgMgMgyAyMjRoDQAyBoNA0AZAADBAyZGgaAAmqU1NEgJqnptU/Sn4jRR6J6epk2pqb1Q/SgAPUGgyNNGjQ2SBpoAZAAGg0NB6mgDIAAAAA2oaApSREBNAARhGiYEaEwU8pvImlP0k2aam1GaGqbFNM1MHpTwmkepmyU9R6nqPKekHqNNPUxNGhp6n6p6QaZ6o9TG1GMqcoV9ly6nRanHlq6zPNbnGZNi6HSug+2nPiZ13XcnFYZW+45W11CumhzYhchOJ2GLLV1OBjBjGf4TptOddTSOcVxhqYZMYwZXZZY00zI1paX5X9J+b+f4e+x43RXSxXPOdquiGpduatGyXneN696M9t4HU2eR5XX/kPhP261NTx3JO1Ow7LyuDmeo9a7d351T1ZxnvnrvYnrXv/hXxPCeuYx2XsNmO003eg9J3Z6U7b23meJ4ri01OdzTk9BNO96brz+qT5nuvvH3ds/G+m/JcG/F2XYuhXlvO+jP7U3XgrzPN/lr8Sf1n6j4j7jZs+s/T0tNaNNYzTs12rxjwztHlnire3Nzc3N1wuBwOBwXBvZfoHpD7rFmR99V5vj3aXza3xdBbXMalubTg/jIv5dfAi54Pdc75j4z412n3Npj3Ld/ENm7a3b/Cod6h/ret1/5vn/C+l+gdslMvgPSLJ8d8q+U66fevYn/tW1chleEtNTGlcHCbLRlq0tHzF5tsbbZZfHjjLgZu42XCuC3NrdjTasZGNzgxPIrGqp7r3p7L2p5XjnpPSmTd5XtzqnB6U87zzg1Ol0zd0zsLszxR6lkxp35u6/teL0L0W922cR7Lu1lpL9CsHirzuM2T+BgsyViT6LHcm0/Vr6U/7LGGyJeqTEjt0PHWhF5mCT7R9mtlDwriYv0Tyr/OxYt6qp5r0mPVT0608TqnqMZaarU2nFwrTzzlrs1ed7z1ux60wyr4ydCaTi3enW8x68x0zU7tXSlcmPSW1mGpWJMg4zmPC2lK96tp0OE80y4MfEvnZj8zGluR6b22ki1Ou9V2nCd9xovcPvcYMZYy0lvN60ScZ45vN6yeCeeXFb1lVjnnCar5fwXwE2bzJ4HI1XVPXfXZMm44GUsMpYsmLB65TuVdm0cl0b+i/wGNN66XjeV7zuPpHcnK/juhhjdfId5jMdE5nN0dGmmm2zZs2cexD5n9p/dY352XsvXHaZNWWsmmS1atjR452nYudzvqv2HMRyvNNnlMcDl3R2+yx2uVrBubTW9icHDVtbNXlZm7jma1rM4VurZZd2y6nUau7ZXZ7bVU+mnoOVxZGMrVmvX60605JwmzGZhhhjTWtaaaMaaGTGmNMZasY0iX8VMUl8eepaIrzGSpemWOQyZORk2bNpjUybMmxqYmknCGCFynutfksaZpjTNMaaatM0xsxcGR0sociNqxNSmhhR1ZJPbsn8qyF0t2qH6iYBclclalbtTTcHTDsPaX6rlnElph2Jyumumtynv2Kk8GI7rsvXbIcJgHJOSaSXnnGaI3m87RtVXJOZ/YtgdTE6Wpha6ZlE8ayXkef1fh602r2WwvfMXtMdLlkfnLFdYYwYso+bWqm7yJqHXZGw2nrS5ZanSYnWlxBxMF1V940K2wdrI7R6OYf7DGGamMmTSbRhlS45xmVOumF1EyPCmHrrN8wwZOOrdNtLeMtzFzDlWnfTBxMk2NaXIZHBYxYLD4w8yju0ao65Vz1OIXoS9dLanwCe1PWrYuWMqf1if1z2k2yYfKJ741PDgno1ZQ8DRqvUny3mNoWTzGO6m1DkrJI8JiR5cSd+vZdGOO8nrMoc5+NDUFsMSXaZJeNlMmGLtJsovOsUp7I9S5VojKxF6bTVEelXqlfJobQr3LozSh2Jk7VcrVMfhXqszGWMzNqGhqZY0OadxkyyMssKugZR0KNFXBHlqwmJgTE/BNVMk6ItJXZMR0H47GpXv/DMOvNkmVuaV1PRfEdxuqc7Hcd+ZV/zTLaRxWk0nl9CdCeM6mn5rstHIBcpiuwmUZXeq81DacHI2rmrZ5SuE5mOZd6LQrZg9mx32TprA8BMo5jKN6yLTCi3WzQaZSuhzTtM1fUTGVpG8Tw+KXI0tjEpiWxtLYynOlktSbVlaI5kxP8TDH8kx50xoxi5KcA4pcja8KWLRlO/Ks6q06ergnvm1ca3Tq8Uw7LTUsmWyctXPWmpitl/uvGcWTGXGuAXDfhqazGmjkxcJW5hMMMMpYsXl2TZYtVOddG3XCarUt6Ptq9CZXSNzyuRzqRZOjsxdBbFVz6OA5HI43r1jityVlVpVkxmOsyaYWE1NTGq1sOVh36mHMcxtSvcy70edG0ZGR3I1Go1GRlV7mWWo1G0ZGRqlvH40l7o4m5xMWLLLYyNjnqc8556mdPqLJ31n/1nWOncxzG1tizrHs/Z+d2vP/B/l8wdJ6rHUn2nbbDq4p6htFtYzMMMTaZis1K5GKzGNmxhmUx2DDlZ/qNnIx41xnaMfCZWYaZWtNLTNW8ND4Fw1MyZOCdM428suKG1ehW8TesOydg7J+YYcD4cYR+8dFK6M4E3HM9ZfRNNL+Sn6rsl4DsrDl2HpPCnfxjGXtbZtYys1ttlyJ4T+BvexjGWRjMszK4Jpc77D3GyuVD7q0+G8m9G+2+rGm7cOR2y7FdtY2XUYx/oNlfRYxhMYyYZkyyg55enna9Kt/FFi6GMMwYwdwxWgxGMLKYmUyGExTKMZkMFhkymKuh0NOBNnBu+23LdwDoZYYzGGMxtORaTU1pjTGWTTGGtMrNWUsyyTTLLG1qy2ZjbK2DBllWqxMtBMaYZQmcE7zvvRcTkclUwdkxTaSdyySt/7fpL+T1v516Ji97fwr5JbLu305zMn5+GMTTZ9F+y7ExwmGVfG44yxn3dLv6l5i+Wth1jCrDq21nW9LqvvDzz3Vf3LLH/O/32TJ/erx3bXftO4eF+G956D952K/An4NfvOpjFjExjevP/UmE0fIoZXx6/w6lX/no3D0bLt+ixjieBM6ksRd0yB4Lt2SwYNE+gd1detlZWUnpiZfGfFaK+4PyCa/v18V8X75pe9fLXzp7l3VjF76vWZmHwGz1IZNWT7b8yddJ4q5kyrVdfru6atWc7bVYztL9Jf4GXvT+Y8m9/YsXP6H018a8D4XVOqvfzqeR+6yZOD8dPv0+U7/q5Pl+2dNb0MjzGVH5p6MNQ7cOWvw62U9aHE8DprDek+McLyvDz+te+9Zhq5+YTruufMcWMabqysTmZVWjKPfnK0wyVcrsuhjZRdLkHNvD72b9abriOSGIxiLixMOoalNPXzy+wT/tf+U6HHONfsz+xe8ufHBdQWWGWLA9if0rsnmrxK5LvRZQeWsTCsrmOYy8VteVfuRfcVTtDLItLIsMYxZLdU/DbtSemxF6Z6Zo5WBXE4tFdCyVMMXMTEMZXVlS6Xec2T4e0tHsRcEuWshcJ2YsNF5rUx1E8ifCq6Z+ZP7qdCeu/aTwFd53l16/o0NOw6O3dLavP4Hbi4RdmuDk6DoYaninC0+o4uZjbJ7Rxeu0/FbuFg+q7DVS6fQe0rZ6NDGUNNU6DEqOaLTicKnYZMnBWruljQxoaS3dZhocDge0bSTyOvdAbG5hxe2xji7E/L55xX13wWNDi6HCekcFxfCPGxY0MXC+s7afWaMdDyjyr+o8Rudm/NabOZvdl274zH4f5XK9z6On1Pe/K5TdPAslg77BjuO6Y3eO7GqvOPG/stknp1leyvLL0Z7bl9I4noMvLkzytLhWYzrOZzLXCORe028rORzOg4N/L0HlMOQ3Pgmnbb6KczlabORpu980q37TBqRu0/p3PO6234Bl03Ynuz+9W7gnsEcmvYaYY3aMYY0wx3pYd+ujsXauWdaY5Xkbtl23fPzFHiDB/QMo0yFjBllYxTQYYyxkrwn7JufwJ/PfgJ+C83X69ni6zvXcnyXabv0ncbLwsceS+RNlyN2mnneJtX0ngYO0+e+0+UnA+Wch3ji8hj5an4h+i6XBySU4n+R0roVcbw5yvHrmr4Yw8Y7x6c+9POPYHI5HI5HI5HI5HGXGww4nE4nrVyOR8XeqPEO7PLOiukYa3Novk5hV4cdtYZLI93e+uxff2jRo0aNGjRo0aNG1bVtW1dy3m83m8+tQ5ZzHw3nObwOu8La9Rc1dtkdS8A3eTk823tnktXWlxN6Oixe06zrN70Isd9jiOgZFq9yfkPqP/SLhN2Myl8jDM43bl1nbcb4cO57jlh8Jow6pzXZdL4dyODTuuhu77FyO27DdwdRxmvO+O+3N2+BdqyTwLHVczDVY4UPE4O66/ZO2cx1LFzVdSXtF7G4HMbrauOHRw6c8FdZ5m9fHZHYZXfTlZquyynSyTTKke6vcffSD9pzPA/G7n7jxPmuhc1DHMynPMrGL6AdewYmwu5ko+UUyTwrMv27rPH1v1nGqd87+p6N6LU8k3NEng53xXyHS97+44OSg/sy/hn+Se6yfYNjW0/uTgP+PvWYzLMmjhcLB/gGpri6H83/84eUXWT6T9Hsv7785z/7z4icpW+zfXypuR7rx/gMdZb1/wC678pcs8/2a3mDitGHxl9mfPlll5MafWWqxk/eYuTDgcNMzDGWLWvsuglNxs53GtN6uWpktlW1tZZYcuinMMWmnNnJydze3b2Y1McBxrBkXw799izLMZZYyy0TkbsGM4p03S2uVu5s/XXUdSdec6smVxZYYGpVmT8A+8ajuEyV5FatkvVru2x6uxGXXbH4A+jvgP4GMYy3iPLXzq/6z65+E005ey+cwMZD3fZcqc6xpjTFjQ0P3U+e++cPYmTIwj0nrK5NPTfl3WvpfLtXDcb2HQMudYOxFXQBor4L4T6vrOauFctYXJQ++NUWmJe5bmx1jDiZS1dTqWxo4GR8jm3fttpmPHppk9PEyv4/y8/sO07XacOmcqc0x2Vi9BvNJvN5oW8x4mOsydpidRxMYJPx06m1bqPzq9KtK8sxXNXFodD/+MYYaJTLwOhc9brCsV18oOR1i4tXChyTkq4lf4y1Ke+9axjDHqs1mfBbMXuP4Hzi+X7t8vzv7bHerrUPgPCxhjre4fXdfLNtNM+Jd+Zdh8xd8wP87uNPoPgv4W7/5P2BuxfsMabNnMnza+54Pdi5U671GPZY8CehX8Voeoo7FHpkeEc6715FwHZrIrUrHaviPUbPZHND+IhczrP6FcYvhOZ0Di/ETt0OI+YwNWR9r7tpf6WDeZKZDB1+z2IthOl1mPyb5FluvEx9h9d0NnM0Y1XPFkWIsk1WotJ0PBMmkvjvQTd0e71tfeV+8r8lQ/tpzpsmxJ+InrI/dj2UajUajVTUajatRqNRtGo1Go1Go4wjeMpZFMVV9CHbofPraLsJ3JpWV2XOoanCuFky4y3PPNxucK2uKC1Lbdb1eo+m5XomN6HfeZsabHLHRX/K+offtNGGmHWctcVcG44MdSqsOLY9VzX+Zxfxn2HPdWTpzWvkrtJ8M5zorrKcquRytJ5N5DZmcaW02l76nWO+BqPXm1A2Ry/0q71dH/mqrqeFjkYxjEXtsX8qubTk67rGycyqsdm4tZx5D6D5hhi0waaWmiaMP81Yjii5qxPlPuU/GfTL1Y5E6Jyfiq63qTa39A4J0sOGDgO9WF9bOXl0XfPRbsY2bPNKPy1YrFYrFYmRhhlTIyMjJWJYZHXuCXI8OoXsvfljGLFicNifpp2XunoPiG1wtx3p1Tah+/DD66OnVtVXP9rKbVxOnL0tR/rHtqB5UDErUVcl11D4qqvjpOcxX7armUNJB3LpW6bxVvD08e8h3odKKvyCFZBdSh2FVcST9xV3n3T8t1j7Kr9qgrqXg+13Pt5U7JCvFXVIV1YG5lRhkVYZJYYDDD4KOIMoHCp26WS8GB7uBh2zIdgq/PKyInjV2STShuoapUdCpX1yTwqq/z1VvET7Kh5nVPkO8YyxjLt2MaaMmWTLQvx3qnzltnXW0p2jxgbEtSny/Osv8Y9pLMXYoHVPAhkVdWVeODhR6q9/Sxe2liwwZWMVvFXfRkeIDxAePUPIrdJ4ey6yquzH+PD/bUOWAu05qVHtFd8rrKHSoeNIOUqrYr3FXyToqfYJgwmJdKpXXOwYYegNNGMzfZPp/t3t/DVs4zjNTN61rRqaWaa4NYbRjTa0tWwy2Y2tGN8ZO8ofNKq4j9SL7I+cSm4/6ovLV5HNXLI0jJidwrtk6ynCaP6Z++GjRlWS3tLeW129Mi9evxU9mtz6s5J3Z7M2ngTgnKvmpomxpaLCzPfmx0JWE+m3NLDDGOX47RctdYYNK5jakdqLkRlI39n590T5zctGSwwZZDJj6kW8XSi6ot2MmWWJ0DUT3nuvebBudScjZ79g8cWJ8av0k+/bE5WKd5MdbFpbDVMxu1DVMmtNZGJvq1mtRbn6M+g/kP0FwDwr7quZs7xosNGpbscyxhhd9TyJyFwbLcxlTasg75vW7TsV5K7ji44XPMMrdoj+Y3rT0Hie62nlYV/pwczzOmdLqLhYkzGMzCZWGMQzGMSsZYY6Di1t4nlJ+Em63mJxrVLxPbYxNEdgc1dU2iOKyoYYWjNrEeHTYybVk0atSa7ehuwsWMWTqn1TrnS6hYsi71d1+s+k+ic70e20fzE/VaTdxT9ZtU4GIXdeNx3X77guYYfpHeTQjaMX6brOu4OY4K7Y2d9mbHarI5ox3p4GpJ2Vd/Tst60f7j999pppjR+Q/Qf8J/VaORs3cji7FfFuN61+ui9s8hix1OCh5KGVV23BPXvZfz3CLyHpix+exi8z/KnCnosY2eunor+cxyPO/xJ0vKey9OvE9VjqaYu0x3Z3jd6o2b1T12D+F8l232lwaaacD5LE8LBxu7WTlbWraWpew8Xexjel3E+wWD4APITdNqR8F3R3T0lunCQ4GaE9R37DxeCYfcHi4xci5ovKfxVs55y2hf+65E/Xdp6jR5nrebTmi5incpkyOjs50JvrZwstbbW6kfuuQw+2mykf/EXNFzxfKTrxew929TR6bWnGL//F3JFOFCQT3K4lQ" + + kernels = Kernel( + bz2.decompress(base64.b64decode(quantization_code)), + [ + "int4WeightCompression", + "int4WeightExtractionFloat", + "int4WeightExtractionHalf", + "int8WeightExtractionFloat", + "int8WeightExtractionHalf", + ], + ) +except Exception as exception: + kernels = None + logger.warning("Failed to load cpm_kernels:" + str(exception)) + + +class W8A16Linear(torch.autograd.Function): + @staticmethod + def forward(ctx, inp: torch.Tensor, quant_w: torch.Tensor, scale_w: torch.Tensor, weight_bit_width): + ctx.inp_shape = inp.size() + ctx.weight_bit_width = weight_bit_width + out_features = quant_w.size(0) + inp = inp.contiguous().view(-1, inp.size(-1)) + weight = extract_weight_to_half(quant_w, scale_w, weight_bit_width) + ctx.weight_shape = weight.size() + output = inp.mm(weight.t()) + ctx.save_for_backward(inp, quant_w, scale_w) + return output.view(*(ctx.inp_shape[:-1] + (out_features,))) + + @staticmethod + def backward(ctx, grad_output: torch.Tensor): + inp, quant_w, scale_w = ctx.saved_tensors + weight = extract_weight_to_half(quant_w, scale_w, ctx.weight_bit_width) + grad_output = grad_output.contiguous().view(-1, weight.size(0)) + grad_input = grad_output.mm(weight) + grad_weight = grad_output.t().mm(inp) + return grad_input.view(ctx.inp_shape), grad_weight.view(ctx.weight_shape), None, None + + +def compress_int4_weight(weight: torch.Tensor): # (n, m) + with torch.cuda.device(weight.device): + n, m = weight.size(0), weight.size(1) + assert m % 2 == 0 + m = m // 2 + out = torch.empty(n, m, dtype=torch.int8, device="cuda") + stream = torch.cuda.current_stream() + + gridDim = (n, 1, 1) + blockDim = (min(round_up(m, 32), 1024), 1, 1) + + kernels.int4WeightCompression( + gridDim, + blockDim, + 0, + stream, + [ctypes.c_void_p(weight.data_ptr()), ctypes.c_void_p(out.data_ptr()), ctypes.c_int32(n), ctypes.c_int32(m)], + ) + return out + + +def extract_weight_to_half(weight: torch.Tensor, scale_list: torch.Tensor, source_bit_width: int): + assert scale_list.dtype in [torch.half, torch.bfloat16] + assert weight.dtype in [torch.int8] + if source_bit_width == 8: + return weight.to(scale_list.dtype) * scale_list[:, None] + elif source_bit_width == 4: + func = ( + kernels.int4WeightExtractionHalf if scale_list.dtype == torch.half else kernels.int4WeightExtractionBFloat16 + ) + else: + assert False, "Unsupported bit-width" + + with torch.cuda.device(weight.device): + n, m = weight.size(0), weight.size(1) + out = torch.empty(n, m * (8 // source_bit_width), dtype=scale_list.dtype, device="cuda") + stream = torch.cuda.current_stream() + + gridDim = (n, 1, 1) + blockDim = (min(round_up(m, 32), 1024), 1, 1) + + func( + gridDim, + blockDim, + 0, + stream, + [ + ctypes.c_void_p(weight.data_ptr()), + ctypes.c_void_p(scale_list.data_ptr()), + ctypes.c_void_p(out.data_ptr()), + ctypes.c_int32(n), + ctypes.c_int32(m), + ], + ) + return out + + +class QuantizedLinear(torch.nn.Module): + def __init__(self, weight_bit_width: int, weight, bias=None, device="cpu", dtype=None, empty_init=False, *args, + **kwargs): + super().__init__() + self.weight_bit_width = weight_bit_width + + shape = weight.shape + + if weight is None or empty_init: + self.weight = torch.empty(shape[0], shape[1] * weight_bit_width // 8, dtype=torch.int8, device=device) + self.weight_scale = torch.empty(shape[0], dtype=dtype, device=device) + else: + self.weight_scale = weight.abs().max(dim=-1).values / ((2 ** (weight_bit_width - 1)) - 1) + self.weight = torch.round(weight / self.weight_scale[:, None]).to(torch.int8) + if weight_bit_width == 4: + self.weight = compress_int4_weight(self.weight) + + try: + self.weight = Parameter(self.weight.to(device), requires_grad=False) + except: + self.weight.to(device, dtype=self.weight.dtype) + self.weight_scale = Parameter(self.weight_scale.to(device), requires_grad=False) + self.bias = Parameter(bias.to(device), requires_grad=False) if bias is not None else None + + def forward(self, input): + output = W8A16Linear.apply(input, self.weight, self.weight_scale, self.weight_bit_width) + if self.bias is not None: + output = output + self.bias + return output + + +def quantize(model, weight_bit_width, empty_init=False, device=None): + """Replace fp16 linear with quantized linear""" + for layer in model.layers: + layer.self_attention.query_key_value = QuantizedLinear( + weight_bit_width=weight_bit_width, + weight=layer.self_attention.query_key_value.weight.to(torch.cuda.current_device()), + bias=layer.self_attention.query_key_value.bias, + dtype=layer.self_attention.query_key_value.weight.dtype, + device=layer.self_attention.query_key_value.weight.device if device is None else device, + empty_init=empty_init + ) + layer.self_attention.dense = QuantizedLinear( + weight_bit_width=weight_bit_width, + weight=layer.self_attention.dense.weight.to(torch.cuda.current_device()), + bias=layer.self_attention.dense.bias, + dtype=layer.self_attention.dense.weight.dtype, + device=layer.self_attention.dense.weight.device if device is None else device, + empty_init=empty_init + ) + layer.mlp.dense_h_to_4h = QuantizedLinear( + weight_bit_width=weight_bit_width, + weight=layer.mlp.dense_h_to_4h.weight.to(torch.cuda.current_device()), + bias=layer.mlp.dense_h_to_4h.bias, + dtype=layer.mlp.dense_h_to_4h.weight.dtype, + device=layer.mlp.dense_h_to_4h.weight.device if device is None else device, + empty_init=empty_init + ) + layer.mlp.dense_4h_to_h = QuantizedLinear( + weight_bit_width=weight_bit_width, + weight=layer.mlp.dense_4h_to_h.weight.to(torch.cuda.current_device()), + bias=layer.mlp.dense_4h_to_h.bias, + dtype=layer.mlp.dense_4h_to_h.weight.dtype, + device=layer.mlp.dense_4h_to_h.weight.device if device is None else device, + empty_init=empty_init + ) + + return model diff --git a/ComfyUI-Kolors-MZ/chatglm3/tokenization_chatglm.py b/ComfyUI-Kolors-MZ/chatglm3/tokenization_chatglm.py new file mode 100644 index 0000000000000000000000000000000000000000..50e44b05e4b3e54d2f1c3f0cab8247ea53a7d4e5 --- /dev/null +++ b/ComfyUI-Kolors-MZ/chatglm3/tokenization_chatglm.py @@ -0,0 +1,300 @@ +import json +import os +import re +from typing import List, Optional, Union, Dict +from sentencepiece import SentencePieceProcessor +from transformers import PreTrainedTokenizer +from transformers.utils import logging, PaddingStrategy +from transformers.tokenization_utils_base import EncodedInput, BatchEncoding + + +class SPTokenizer: + def __init__(self, model_path: str): + # reload tokenizer + assert os.path.isfile(model_path), model_path + self.sp_model = SentencePieceProcessor(model_file=model_path) + + # BOS / EOS token IDs + self.n_words: int = self.sp_model.vocab_size() + self.bos_id: int = self.sp_model.bos_id() + self.eos_id: int = self.sp_model.eos_id() + self.pad_id: int = self.sp_model.unk_id() + assert self.sp_model.vocab_size() == self.sp_model.get_piece_size() + + role_special_tokens = ["<|system|>", "<|user|>", "<|assistant|>", "<|observation|>"] + special_tokens = ["[MASK]", "[gMASK]", "[sMASK]", "sop", "eop"] + role_special_tokens + self.special_tokens = {} + self.index_special_tokens = {} + for token in special_tokens: + self.special_tokens[token] = self.n_words + self.index_special_tokens[self.n_words] = token + self.n_words += 1 + self.role_special_token_expression = "|".join([re.escape(token) for token in role_special_tokens]) + + def tokenize(self, s: str, encode_special_tokens=False): + if encode_special_tokens: + last_index = 0 + t = [] + for match in re.finditer(self.role_special_token_expression, s): + if last_index < match.start(): + t.extend(self.sp_model.EncodeAsPieces(s[last_index:match.start()])) + t.append(s[match.start():match.end()]) + last_index = match.end() + if last_index < len(s): + t.extend(self.sp_model.EncodeAsPieces(s[last_index:])) + return t + else: + return self.sp_model.EncodeAsPieces(s) + + def encode(self, s: str, bos: bool = False, eos: bool = False) -> List[int]: + assert type(s) is str + t = self.sp_model.encode(s) + if bos: + t = [self.bos_id] + t + if eos: + t = t + [self.eos_id] + return t + + def decode(self, t: List[int]) -> str: + text, buffer = "", [] + for token in t: + if token in self.index_special_tokens: + if buffer: + text += self.sp_model.decode(buffer) + buffer = [] + text += self.index_special_tokens[token] + else: + buffer.append(token) + if buffer: + text += self.sp_model.decode(buffer) + return text + + def decode_tokens(self, tokens: List[str]) -> str: + text = self.sp_model.DecodePieces(tokens) + return text + + def convert_token_to_id(self, token): + """ Converts a token (str) in an id using the vocab. """ + if token in self.special_tokens: + return self.special_tokens[token] + return self.sp_model.PieceToId(token) + + def convert_id_to_token(self, index): + """Converts an index (integer) in a token (str) using the vocab.""" + if index in self.index_special_tokens: + return self.index_special_tokens[index] + if index in [self.eos_id, self.bos_id, self.pad_id] or index < 0: + return "" + return self.sp_model.IdToPiece(index) + + +class ChatGLMTokenizer(PreTrainedTokenizer): + vocab_files_names = {"vocab_file": "tokenizer.model"} + + model_input_names = ["input_ids", "attention_mask", "position_ids"] + + def __init__(self, vocab_file, padding_side="left", clean_up_tokenization_spaces=False, encode_special_tokens=False, + **kwargs): + self.name = "GLMTokenizer" + + self.vocab_file = vocab_file + self.tokenizer = SPTokenizer(vocab_file) + self.special_tokens = { + "": self.tokenizer.bos_id, + "": self.tokenizer.eos_id, + "": self.tokenizer.pad_id + } + self.encode_special_tokens = encode_special_tokens + super().__init__(padding_side=padding_side, clean_up_tokenization_spaces=clean_up_tokenization_spaces, + encode_special_tokens=encode_special_tokens, + **kwargs) + + def get_command(self, token): + if token in self.special_tokens: + return self.special_tokens[token] + assert token in self.tokenizer.special_tokens, f"{token} is not a special token for {self.name}" + return self.tokenizer.special_tokens[token] + + @property + def unk_token(self) -> str: + return "" + + @property + def pad_token(self) -> str: + return "" + + @property + def pad_token_id(self): + return self.get_command("") + + @property + def eos_token(self) -> str: + return "" + + @property + def eos_token_id(self): + return self.get_command("") + + @property + def vocab_size(self): + return self.tokenizer.n_words + + def get_vocab(self): + """ Returns vocab as a dict """ + vocab = {self._convert_id_to_token(i): i for i in range(self.vocab_size)} + vocab.update(self.added_tokens_encoder) + return vocab + + def _tokenize(self, text, **kwargs): + return self.tokenizer.tokenize(text, encode_special_tokens=self.encode_special_tokens) + + def _convert_token_to_id(self, token): + """ Converts a token (str) in an id using the vocab. """ + return self.tokenizer.convert_token_to_id(token) + + def _convert_id_to_token(self, index): + """Converts an index (integer) in a token (str) using the vocab.""" + return self.tokenizer.convert_id_to_token(index) + + def convert_tokens_to_string(self, tokens: List[str]) -> str: + return self.tokenizer.decode_tokens(tokens) + + def save_vocabulary(self, save_directory, filename_prefix=None): + """ + Save the vocabulary and special tokens file to a directory. + + Args: + save_directory (`str`): + The directory in which to save the vocabulary. + filename_prefix (`str`, *optional*): + An optional prefix to add to the named of the saved files. + + Returns: + `Tuple(str)`: Paths to the files saved. + """ + if os.path.isdir(save_directory): + vocab_file = os.path.join( + save_directory, self.vocab_files_names["vocab_file"] + ) + else: + vocab_file = save_directory + + with open(self.vocab_file, 'rb') as fin: + proto_str = fin.read() + + with open(vocab_file, "wb") as writer: + writer.write(proto_str) + + return (vocab_file,) + + def get_prefix_tokens(self): + prefix_tokens = [self.get_command("[gMASK]"), self.get_command("sop")] + return prefix_tokens + + def build_single_message(self, role, metadata, message): + assert role in ["system", "user", "assistant", "observation"], role + role_tokens = [self.get_command(f"<|{role}|>")] + self.tokenizer.encode(f"{metadata}\n") + message_tokens = self.tokenizer.encode(message) + tokens = role_tokens + message_tokens + return tokens + + def build_chat_input(self, query, history=None, role="user"): + if history is None: + history = [] + input_ids = [] + for item in history: + content = item["content"] + if item["role"] == "system" and "tools" in item: + content = content + "\n" + json.dumps(item["tools"], indent=4, ensure_ascii=False) + input_ids.extend(self.build_single_message(item["role"], item.get("metadata", ""), content)) + input_ids.extend(self.build_single_message(role, "", query)) + input_ids.extend([self.get_command("<|assistant|>")]) + return self.batch_encode_plus([input_ids], return_tensors="pt", is_split_into_words=True) + + def build_inputs_with_special_tokens( + self, token_ids_0: List[int], token_ids_1: Optional[List[int]] = None + ) -> List[int]: + """ + Build model inputs from a sequence or a pair of sequence for sequence classification tasks by concatenating and + adding special tokens. A BERT sequence has the following format: + + - single sequence: `[CLS] X [SEP]` + - pair of sequences: `[CLS] A [SEP] B [SEP]` + + Args: + token_ids_0 (`List[int]`): + List of IDs to which the special tokens will be added. + token_ids_1 (`List[int]`, *optional*): + Optional second list of IDs for sequence pairs. + + Returns: + `List[int]`: List of [input IDs](../glossary#input-ids) with the appropriate special tokens. + """ + prefix_tokens = self.get_prefix_tokens() + token_ids_0 = prefix_tokens + token_ids_0 + if token_ids_1 is not None: + token_ids_0 = token_ids_0 + token_ids_1 + [self.get_command("")] + return token_ids_0 + + def _pad( + self, + encoded_inputs: Union[Dict[str, EncodedInput], BatchEncoding], + max_length: Optional[int] = None, + padding_strategy: PaddingStrategy = PaddingStrategy.DO_NOT_PAD, + pad_to_multiple_of: Optional[int] = None, + return_attention_mask: Optional[bool] = None, + ) -> dict: + """ + Pad encoded inputs (on left/right and up to predefined length or max length in the batch) + + Args: + encoded_inputs: + Dictionary of tokenized inputs (`List[int]`) or batch of tokenized inputs (`List[List[int]]`). + max_length: maximum length of the returned list and optionally padding length (see below). + Will truncate by taking into account the special tokens. + padding_strategy: PaddingStrategy to use for padding. + + - PaddingStrategy.LONGEST Pad to the longest sequence in the batch + - PaddingStrategy.MAX_LENGTH: Pad to the max length (default) + - PaddingStrategy.DO_NOT_PAD: Do not pad + The tokenizer padding sides are defined in self.padding_side: + + - 'left': pads on the left of the sequences + - 'right': pads on the right of the sequences + pad_to_multiple_of: (optional) Integer if set will pad the sequence to a multiple of the provided value. + This is especially useful to enable the use of Tensor Core on NVIDIA hardware with compute capability + `>= 7.5` (Volta). + return_attention_mask: + (optional) Set to False to avoid returning attention mask (default: set to model specifics) + """ + # Load from model defaults + assert self.padding_side == "left" + + required_input = encoded_inputs[self.model_input_names[0]] + seq_length = len(required_input) + + if padding_strategy == PaddingStrategy.LONGEST: + max_length = len(required_input) + + if max_length is not None and pad_to_multiple_of is not None and (max_length % pad_to_multiple_of != 0): + max_length = ((max_length // pad_to_multiple_of) + 1) * pad_to_multiple_of + + needs_to_be_padded = padding_strategy != PaddingStrategy.DO_NOT_PAD and len(required_input) != max_length + + # Initialize attention mask if not present. + if "attention_mask" not in encoded_inputs: + encoded_inputs["attention_mask"] = [1] * seq_length + + if "position_ids" not in encoded_inputs: + encoded_inputs["position_ids"] = list(range(seq_length)) + + if needs_to_be_padded: + difference = max_length - len(required_input) + + if "attention_mask" in encoded_inputs: + encoded_inputs["attention_mask"] = [0] * difference + encoded_inputs["attention_mask"] + if "position_ids" in encoded_inputs: + encoded_inputs["position_ids"] = [0] * difference + encoded_inputs["position_ids"] + encoded_inputs[self.model_input_names[0]] = [self.pad_token_id] * difference + required_input + + return encoded_inputs diff --git a/ComfyUI-Kolors-MZ/clip_vit_336/config.json b/ComfyUI-Kolors-MZ/clip_vit_336/config.json new file mode 100644 index 0000000000000000000000000000000000000000..2458615a6fa10c29155bfa241388525435e314f0 --- /dev/null +++ b/ComfyUI-Kolors-MZ/clip_vit_336/config.json @@ -0,0 +1,19 @@ +{ + "attention_dropout": 0.0, + "dropout": 0.0, + "hidden_act": "quick_gelu", + "hidden_size": 1024, + "image_size": 336, + "initializer_factor": 1.0, + "initializer_range": 0.02, + "intermediate_size": 4096, + "layer_norm_eps": 1e-05, + "model_type": "clip_vision_model", + "num_attention_heads": 16, + "num_channels": 3, + "num_hidden_layers": 24, + "patch_size": 14, + "projection_dim": 768, + "torch_dtype": "float32" +} + diff --git a/ComfyUI-Kolors-MZ/configs/text_encoder_config.json b/ComfyUI-Kolors-MZ/configs/text_encoder_config.json new file mode 100644 index 0000000000000000000000000000000000000000..c6e19300822b25ae0a07125bbc171c6581dbeda4 --- /dev/null +++ b/ComfyUI-Kolors-MZ/configs/text_encoder_config.json @@ -0,0 +1,42 @@ +{ + "_name_or_path": "THUDM/chatglm3-6b-base", + "model_type": "chatglm", + "architectures": [ + "ChatGLMModel" + ], + "auto_map": { + "AutoConfig": "configuration_chatglm.ChatGLMConfig", + "AutoModel": "modeling_chatglm.ChatGLMForConditionalGeneration", + "AutoModelForCausalLM": "modeling_chatglm.ChatGLMForConditionalGeneration", + "AutoModelForSeq2SeqLM": "modeling_chatglm.ChatGLMForConditionalGeneration", + "AutoModelForSequenceClassification": "modeling_chatglm.ChatGLMForSequenceClassification" + }, + "add_bias_linear": false, + "add_qkv_bias": true, + "apply_query_key_layer_scaling": true, + "apply_residual_connection_post_layernorm": false, + "attention_dropout": 0.0, + "attention_softmax_in_fp32": true, + "bias_dropout_fusion": true, + "ffn_hidden_size": 13696, + "fp32_residual_connection": false, + "hidden_dropout": 0.0, + "hidden_size": 4096, + "kv_channels": 128, + "layernorm_epsilon": 1e-05, + "multi_query_attention": true, + "multi_query_group_num": 2, + "num_attention_heads": 32, + "num_layers": 28, + "original_rope": true, + "padded_vocab_size": 65024, + "post_layer_norm": true, + "rmsnorm": true, + "seq_length": 32768, + "use_cache": true, + "torch_dtype": "float16", + "transformers_version": "4.30.2", + "tie_word_embeddings": false, + "eos_token_id": 2, + "pad_token_id": 0 +} \ No newline at end of file diff --git a/ComfyUI-Kolors-MZ/configs/tokenizer/tokenizer.model b/ComfyUI-Kolors-MZ/configs/tokenizer/tokenizer.model new file mode 100644 index 0000000000000000000000000000000000000000..8a8007697b7cc3d3868dcffbbebf8c1f2bd690ba --- /dev/null +++ b/ComfyUI-Kolors-MZ/configs/tokenizer/tokenizer.model @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e7dc4c393423b76e4373e5157ddc34803a0189ba96b21ddbb40269d31468a6f2 +size 1018370 diff --git a/ComfyUI-Kolors-MZ/configs/tokenizer/tokenizer_config.json b/ComfyUI-Kolors-MZ/configs/tokenizer/tokenizer_config.json new file mode 100644 index 0000000000000000000000000000000000000000..f6f13c88707490cebd8023da86e8bf7a56fa21e3 --- /dev/null +++ b/ComfyUI-Kolors-MZ/configs/tokenizer/tokenizer_config.json @@ -0,0 +1,12 @@ +{ + "name_or_path": "THUDM/chatglm3-6b-base", + "remove_space": false, + "do_lower_case": false, + "tokenizer_class": "ChatGLMTokenizer", + "auto_map": { + "AutoTokenizer": [ + "tokenization_chatglm.ChatGLMTokenizer", + null + ] + } +} diff --git a/ComfyUI-Kolors-MZ/configs/tokenizer/vocab.txt b/ComfyUI-Kolors-MZ/configs/tokenizer/vocab.txt new file mode 100644 index 0000000000000000000000000000000000000000..8a8007697b7cc3d3868dcffbbebf8c1f2bd690ba --- /dev/null +++ b/ComfyUI-Kolors-MZ/configs/tokenizer/vocab.txt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e7dc4c393423b76e4373e5157ddc34803a0189ba96b21ddbb40269d31468a6f2 +size 1018370 diff --git a/ComfyUI-Kolors-MZ/hook_comfyui_kolors_v1.py b/ComfyUI-Kolors-MZ/hook_comfyui_kolors_v1.py new file mode 100644 index 0000000000000000000000000000000000000000..fca212321816a0bd0aff912770b22505f9024598 --- /dev/null +++ b/ComfyUI-Kolors-MZ/hook_comfyui_kolors_v1.py @@ -0,0 +1,85 @@ +from comfy.model_detection import * +import comfy.model_detection as model_detection +import comfy.supported_models + + +class Kolors(comfy.supported_models.SDXL): + unet_config = { + "model_channels": 320, + "use_linear_in_transformer": True, + "transformer_depth": [0, 0, 2, 2, 10, 10], + "context_dim": 2048, + "adm_in_channels": 5632, + "use_temporal_attention": False, + } + + +def kolors_unet_config_from_diffusers_unet(state_dict, dtype=None): + match = {} + transformer_depth = [] + + attn_res = 1 + down_blocks = count_blocks(state_dict, "down_blocks.{}") + for i in range(down_blocks): + attn_blocks = count_blocks( + state_dict, "down_blocks.{}.attentions.".format(i) + '{}') + res_blocks = count_blocks( + state_dict, "down_blocks.{}.resnets.".format(i) + '{}') + for ab in range(attn_blocks): + transformer_count = count_blocks( + state_dict, "down_blocks.{}.attentions.{}.transformer_blocks.".format(i, ab) + '{}') + transformer_depth.append(transformer_count) + if transformer_count > 0: + match["context_dim"] = state_dict["down_blocks.{}.attentions.{}.transformer_blocks.0.attn2.to_k.weight".format( + i, ab)].shape[1] + + attn_res *= 2 + if attn_blocks == 0: + for i in range(res_blocks): + transformer_depth.append(0) + + match["transformer_depth"] = transformer_depth + + match["model_channels"] = state_dict["conv_in.weight"].shape[0] + match["in_channels"] = state_dict["conv_in.weight"].shape[1] + match["adm_in_channels"] = None + if "class_embedding.linear_1.weight" in state_dict: + match["adm_in_channels"] = state_dict["class_embedding.linear_1.weight"].shape[1] + elif "add_embedding.linear_1.weight" in state_dict: + match["adm_in_channels"] = state_dict["add_embedding.linear_1.weight"].shape[1] + + Kolors = {'use_checkpoint': False, 'image_size': 32, 'out_channels': 4, 'use_spatial_transformer': True, 'legacy': False, + 'num_classes': 'sequential', 'adm_in_channels': 5632, 'dtype': dtype, 'in_channels': 4, 'model_channels': 320, + 'num_res_blocks': [2, 2, 2], 'transformer_depth': [0, 0, 2, 2, 10, 10], 'channel_mult': [1, 2, 4], 'transformer_depth_middle': 10, + 'use_linear_in_transformer': True, 'context_dim': 2048, 'num_head_channels': 64, 'transformer_depth_output': [0, 0, 0, 2, 2, 2, 10, 10, 10], + 'use_temporal_attention': False, 'use_temporal_resblock': False} + + supported_models = [Kolors] + + for unet_config in supported_models: + matches = True + for k in match: + if match[k] != unet_config[k]: + print("key {} does not match".format( + k), match[k], "||", unet_config[k]) + matches = False + break + if matches: + return convert_config(unet_config) + return None + + +class apply_kolors: + def __enter__(self): + import comfy.supported_models + self.old_supported_models = comfy.supported_models.models + comfy.supported_models.models = [Kolors] + + self.old_unet_config_from_diffusers_unet = model_detection.unet_config_from_diffusers_unet + model_detection.unet_config_from_diffusers_unet = kolors_unet_config_from_diffusers_unet + + def __exit__(self, type, value, traceback): + model_detection.unet_config_from_diffusers_unet = self.old_unet_config_from_diffusers_unet + + import comfy.supported_models + comfy.supported_models.models = self.old_supported_models diff --git a/ComfyUI-Kolors-MZ/hook_comfyui_kolors_v2.py b/ComfyUI-Kolors-MZ/hook_comfyui_kolors_v2.py new file mode 100644 index 0000000000000000000000000000000000000000..05dbbddebb29cc10a345dbc259a0a834ef3e0af7 --- /dev/null +++ b/ComfyUI-Kolors-MZ/hook_comfyui_kolors_v2.py @@ -0,0 +1,318 @@ +import os +from types import MethodType +import warnings +from comfy.model_detection import * +import comfy.model_detection as model_detection +import comfy.supported_models +import comfy.utils + +import torch +from comfy import model_base +from comfy.model_base import sdxl_pooled, CLIPEmbeddingNoiseAugmentation, Timestep, ModelType + + +from comfy.ldm.modules.diffusionmodules.openaimodel import UNetModel +from comfy.cldm.cldm import ControlNet + +# try: +# import comfy.samplers as samplers +# original_CFGGuider_inner_set_conds = samplers.CFGGuider.set_conds + +# def patched_set_conds(self, positive, negative): +# if isinstance(self.model_patcher.model, KolorsSDXL): +# import copy +# if "control" in positive[0][1]: +# if hasattr(positive[0][1]["control"], "control_model"): +# if positive[0][1]["control"].control_model.label_emb.shape[1] == 5632: +# return + + +# warnings.warn("该方法不再维护") +# positive = copy.deepcopy(positive) +# negative = copy.deepcopy(negative) +# hid_proj = self.model_patcher.model.encoder_hid_proj +# if hid_proj is not None: +# positive[0][0] = hid_proj(positive[0][0]) +# negative[0][0] = hid_proj(negative[0][0]) + +# if "control" in positive[0][1]: +# if hasattr(positive[0][1]["control"], "control_model"): +# positive[0][1]["control"].control_model.label_emb = self.model_patcher.model.diffusion_model.label_emb + +# if "control" in negative[0][1]: +# if hasattr(negative[0][1]["control"], "control_model"): +# negative[0][1]["control"].control_model.label_emb = self.model_patcher.model.diffusion_model.label_emb + +# return original_CFGGuider_inner_set_conds(self, positive, negative) + +# samplers.CFGGuider.set_conds = patched_set_conds +# except ImportError: +# print("CFGGuider not found, skipping patching") + + +class KolorsUNetModel(UNetModel): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.encoder_hid_proj = nn.Linear( + 4096, 2048, bias=True) + + def forward(self, *args, **kwargs): + with torch.cuda.amp.autocast(enabled=True): + if "context" in kwargs: + kwargs["context"] = self.encoder_hid_proj( + kwargs["context"]) + + # if "y" in kwargs: + # if kwargs["y"].shape[1] == 2816: + # # 扩展至5632 + # kwargs["y"] = torch.cat( + # torch.zeros(kwargs["y"].shape[0], 2816).to(kwargs["y"].device), kwargs["y"], dim=1) + + result = super().forward(*args, **kwargs) + return result + + +class KolorsSDXL(model_base.SDXL): + def __init__(self, model_config, model_type=ModelType.EPS, device=None): + model_config.sampling_settings["beta_schedule"] = "linear" + model_config.sampling_settings["linear_start"] = 0.00085 + model_config.sampling_settings["linear_end"] = 0.014 + model_config.sampling_settings["timesteps"] = 1100 + model_type = ModelType.EPS + model_base.BaseModel.__init__( + self, model_config, model_type, device=device, unet_model=KolorsUNetModel) + self.embedder = Timestep(256) + self.noise_augmentor = CLIPEmbeddingNoiseAugmentation( + **{"noise_schedule_config": {"timesteps": 1100, "beta_schedule": "linear", "linear_start": 0.00085, "linear_end": 0.014}, "timestep_dim": 1280}) + + def encode_adm(self, **kwargs): + clip_pooled = sdxl_pooled(kwargs, self.noise_augmentor) + width = kwargs.get("width", 768) + height = kwargs.get("height", 768) + crop_w = kwargs.get("crop_w", 0) + crop_h = kwargs.get("crop_h", 0) + target_width = kwargs.get("target_width", width) + target_height = kwargs.get("target_height", height) + + out = [] + out.append(self.embedder(torch.Tensor([height]))) + out.append(self.embedder(torch.Tensor([width]))) + out.append(self.embedder(torch.Tensor([crop_h]))) + out.append(self.embedder(torch.Tensor([crop_w]))) + out.append(self.embedder(torch.Tensor([target_height]))) + out.append(self.embedder(torch.Tensor([target_width]))) + flat = torch.flatten(torch.cat(out)).unsqueeze( + dim=0).repeat(clip_pooled.shape[0], 1) + return torch.cat((clip_pooled.to(flat.device), flat), dim=1) + + +class KolorsSupported(comfy.supported_models.SDXL): + unet_config = { + "model_channels": 320, + "use_linear_in_transformer": True, + "transformer_depth": [0, 0, 2, 2, 10, 10], + "context_dim": 2048, + "adm_in_channels": 5632, + "use_temporal_attention": False, + } + + def get_model(self, state_dict, prefix="", device=None): + out = KolorsSDXL(self, model_type=self.model_type( + state_dict, prefix), device=device,) + out.__class__ = model_base.SDXL + if self.inpaint_model(): + out.set_inpaint() + return out + + +def kolors_unet_config_from_diffusers_unet(state_dict, dtype=None): + match = {} + transformer_depth = [] + + attn_res = 1 + down_blocks = count_blocks(state_dict, "down_blocks.{}") + for i in range(down_blocks): + attn_blocks = count_blocks( + state_dict, "down_blocks.{}.attentions.".format(i) + '{}') + res_blocks = count_blocks( + state_dict, "down_blocks.{}.resnets.".format(i) + '{}') + for ab in range(attn_blocks): + transformer_count = count_blocks( + state_dict, "down_blocks.{}.attentions.{}.transformer_blocks.".format(i, ab) + '{}') + transformer_depth.append(transformer_count) + if transformer_count > 0: + match["context_dim"] = state_dict["down_blocks.{}.attentions.{}.transformer_blocks.0.attn2.to_k.weight".format( + i, ab)].shape[1] + + attn_res *= 2 + if attn_blocks == 0: + for i in range(res_blocks): + transformer_depth.append(0) + + match["transformer_depth"] = transformer_depth + + match["model_channels"] = state_dict["conv_in.weight"].shape[0] + match["in_channels"] = state_dict["conv_in.weight"].shape[1] + match["adm_in_channels"] = None + if "class_embedding.linear_1.weight" in state_dict: + match["adm_in_channels"] = state_dict["class_embedding.linear_1.weight"].shape[1] + elif "add_embedding.linear_1.weight" in state_dict: + match["adm_in_channels"] = state_dict["add_embedding.linear_1.weight"].shape[1] + + Kolors = {'use_checkpoint': False, 'image_size': 32, 'out_channels': 4, 'use_spatial_transformer': True, 'legacy': False, + 'num_classes': 'sequential', 'adm_in_channels': 5632, 'dtype': dtype, 'in_channels': 4, 'model_channels': 320, + 'num_res_blocks': [2, 2, 2], 'transformer_depth': [0, 0, 2, 2, 10, 10], 'channel_mult': [1, 2, 4], 'transformer_depth_middle': 10, + 'use_linear_in_transformer': True, 'context_dim': 2048, 'num_head_channels': 64, 'transformer_depth_output': [0, 0, 0, 2, 2, 2, 10, 10, 10], + 'use_temporal_attention': False, 'use_temporal_resblock': False} + + Kolors_inpaint = {'use_checkpoint': False, 'image_size': 32, 'out_channels': 4, 'use_spatial_transformer': True, 'legacy': False, + 'num_classes': 'sequential', 'adm_in_channels': 5632, 'dtype': dtype, 'in_channels': 9, 'model_channels': 320, + 'num_res_blocks': [2, 2, 2], 'transformer_depth': [0, 0, 2, 2, 10, 10], 'channel_mult': [1, 2, 4], 'transformer_depth_middle': 10, + 'use_linear_in_transformer': True, 'context_dim': 2048, 'num_head_channels': 64, 'transformer_depth_output': [0, 0, 0, 2, 2, 2, 10, 10, 10], + 'use_temporal_attention': False, 'use_temporal_resblock': False} + + Kolors_ip2p = {'use_checkpoint': False, 'image_size': 32, 'out_channels': 4, 'use_spatial_transformer': True, 'legacy': False, + 'num_classes': 'sequential', 'adm_in_channels': 5632, 'dtype': dtype, 'in_channels': 8, 'model_channels': 320, + 'num_res_blocks': [2, 2, 2], 'transformer_depth': [0, 0, 2, 2, 10, 10], 'channel_mult': [1, 2, 4], 'transformer_depth_middle': 10, + 'use_linear_in_transformer': True, 'context_dim': 2048, 'num_head_channels': 64, 'transformer_depth_output': [0, 0, 0, 2, 2, 2, 10, 10, 10], + 'use_temporal_attention': False, 'use_temporal_resblock': False} + + SDXL = {'use_checkpoint': False, 'image_size': 32, 'out_channels': 4, 'use_spatial_transformer': True, 'legacy': False, + 'num_classes': 'sequential', 'adm_in_channels': 2816, 'dtype': dtype, 'in_channels': 4, 'model_channels': 320, + 'num_res_blocks': [2, 2, 2], 'transformer_depth': [0, 0, 2, 2, 10, 10], 'channel_mult': [1, 2, 4], 'transformer_depth_middle': 10, + 'use_linear_in_transformer': True, 'context_dim': 2048, 'num_head_channels': 64, 'transformer_depth_output': [0, 0, 0, 2, 2, 2, 10, 10, 10], + 'use_temporal_attention': False, 'use_temporal_resblock': False} + + SDXL_mid_cnet = {'use_checkpoint': False, 'image_size': 32, 'out_channels': 4, 'use_spatial_transformer': True, 'legacy': False, + 'num_classes': 'sequential', 'adm_in_channels': 2816, 'dtype': dtype, 'in_channels': 4, 'model_channels': 320, + 'num_res_blocks': [2, 2, 2], 'transformer_depth': [0, 0, 0, 0, 1, 1], 'channel_mult': [1, 2, 4], 'transformer_depth_middle': 1, + 'use_linear_in_transformer': True, 'context_dim': 2048, 'num_head_channels': 64, 'transformer_depth_output': [0, 0, 0, 0, 0, 0, 1, 1, 1], + 'use_temporal_attention': False, 'use_temporal_resblock': False} + + SDXL_small_cnet = {'use_checkpoint': False, 'image_size': 32, 'out_channels': 4, 'use_spatial_transformer': True, 'legacy': False, + 'num_classes': 'sequential', 'adm_in_channels': 2816, 'dtype': dtype, 'in_channels': 4, 'model_channels': 320, + 'num_res_blocks': [2, 2, 2], 'transformer_depth': [0, 0, 0, 0, 0, 0], 'channel_mult': [1, 2, 4], 'transformer_depth_middle': 0, + 'use_linear_in_transformer': True, 'num_head_channels': 64, 'context_dim': 1, 'transformer_depth_output': [0, 0, 0, 0, 0, 0, 0, 0, 0], + 'use_temporal_attention': False, 'use_temporal_resblock': False} + + supported_models = [Kolors, Kolors_inpaint, + Kolors_ip2p, SDXL, SDXL_mid_cnet, SDXL_small_cnet] + + for unet_config in supported_models: + matches = True + for k in match: + if match[k] != unet_config[k]: + print("key {} does not match".format( + k), match[k], "||", unet_config[k]) + matches = False + break + if matches: + return convert_config(unet_config) + return None + + +import comfy.ldm.modules.diffusionmodules.openaimodel +from torch import nn + + +def load_clipvision_336_from_sd(sd, prefix="", convert_keys=False): + from comfy.clip_vision import ClipVisionModel, convert_to_transformers + + json_config = os.path.join(os.path.dirname( + os.path.realpath(__file__)), "clip_vit_336", "config.json") + + clip = ClipVisionModel(json_config) + + m, u = clip.load_sd(sd) + if len(m) > 0: + logging.warning("missing clip vision: {}".format(m)) + u = set(u) + keys = list(sd.keys()) + for k in keys: + if k not in u: + t = sd.pop(k) + del t + + # def vis_forward(self, pixel_values, attention_mask=None, intermediate_output=None): + # pixel_values = nn.functional.interpolate( + # pixel_values, size=(336, 336), mode='bilinear', align_corners=False) + # x = self.embeddings(pixel_values) + # x = self.pre_layrnorm(x) + # # TODO: attention_mask? + # x, i = self.encoder( + # x, mask=None, intermediate_output=intermediate_output) + # pooled_output = self.post_layernorm(x[:, 0, :]) + # return x, i, pooled_output + + # clip.model.vision_model.forward = MethodType( + # vis_forward, clip.model.vision_model + # ) + + return clip + + +class KolorsControlNet(ControlNet): + def __init__(self, *args, **kwargs): + adm_in_channels = kwargs["adm_in_channels"] + if adm_in_channels == 2816: + # 异常: 该加载器不支持SDXL类型, 请使用ControlNet加载器+KolorsControlNetPatch节点 + raise Exception( + "This loader does not support SDXL type, please use ControlNet loader + KolorsControlNetPatch node") + + super().__init__(*args, **kwargs) + self.encoder_hid_proj = nn.Linear( + 4096, 2048, bias=True) + + def forward(self, *args, **kwargs): + with torch.cuda.amp.autocast(enabled=True): + if "context" in kwargs: + kwargs["context"] = self.encoder_hid_proj( + kwargs["context"]) + + result = super().forward(*args, **kwargs) + return result + + +class apply_kolors: + def __enter__(self): + import comfy.ldm.modules.diffusionmodules.openaimodel + import comfy.cldm.cldm + import comfy.utils + import comfy.clip_vision + + self.original_load_clipvision_from_sd = comfy.clip_vision.load_clipvision_from_sd + comfy.clip_vision.load_clipvision_from_sd = load_clipvision_336_from_sd + + self.original_UNET_MAP_BASIC = comfy.utils.UNET_MAP_BASIC.copy() + comfy.utils.UNET_MAP_BASIC.add( + ("encoder_hid_proj.weight", "encoder_hid_proj.weight"), + ) + comfy.utils.UNET_MAP_BASIC.add( + ("encoder_hid_proj.bias", "encoder_hid_proj.bias"), + ) + + self.original_unet_config_from_diffusers_unet = model_detection.unet_config_from_diffusers_unet + model_detection.unet_config_from_diffusers_unet = kolors_unet_config_from_diffusers_unet + + import comfy.supported_models + self.original_supported_models = comfy.supported_models.models + comfy.supported_models.models = [KolorsSupported] + + self.original_controlnet = comfy.cldm.cldm.ControlNet + comfy.cldm.cldm.ControlNet = KolorsControlNet + + def __exit__(self, type, value, traceback): + import comfy.ldm.modules.diffusionmodules.openaimodel + import comfy.cldm.cldm + import comfy.utils + comfy.utils.UNET_MAP_BASIC = self.original_UNET_MAP_BASIC + + model_detection.unet_config_from_diffusers_unet = self.original_unet_config_from_diffusers_unet + + import comfy.supported_models + comfy.supported_models.models = self.original_supported_models + + import comfy.clip_vision + comfy.clip_vision.load_clipvision_from_sd = self.original_load_clipvision_from_sd + + comfy.cldm.cldm.ControlNet = self.original_controlnet diff --git a/ComfyUI-Kolors-MZ/mz_kolors_core.py b/ComfyUI-Kolors-MZ/mz_kolors_core.py new file mode 100644 index 0000000000000000000000000000000000000000..73c45b5a5db4c5bf511d3044641e0f97464db0af --- /dev/null +++ b/ComfyUI-Kolors-MZ/mz_kolors_core.py @@ -0,0 +1,394 @@ + + +import gc +import json +import os +import random +import re +import subprocess +import sys +from types import MethodType + +import torch +import folder_paths +import comfy.model_management as mm + + +def chatglm3_text_encode(chatglm3_model, prompt): + device = mm.get_torch_device() + offload_device = mm.unet_offload_device() + mm.unload_all_models() + mm.soft_empty_cache() + # Function to randomly select an option from the brackets + + def choose_random_option(match): + options = match.group(1).split('|') + return random.choice(options) + + prompt = re.sub(r'\{([^{}]*)\}', choose_random_option, prompt) + + # Define tokenizers and text encoders + tokenizer = chatglm3_model['tokenizer'] + text_encoder = chatglm3_model['text_encoder'] + text_encoder.to(device) + text_inputs = tokenizer( + prompt, + padding="max_length", + max_length=256, + truncation=True, + return_tensors="pt", + ).to(device) + + output = text_encoder( + input_ids=text_inputs['input_ids'], + attention_mask=text_inputs['attention_mask'], + position_ids=text_inputs['position_ids'], + output_hidden_states=True) + + # [batch_size, 77, 4096] + prompt_embeds = output.hidden_states[-2].permute(1, 0, 2).clone() + text_proj = output.hidden_states[-1][-1, + :, :].clone() # [batch_size, 4096] + bs_embed, seq_len, _ = prompt_embeds.shape + prompt_embeds = prompt_embeds.repeat(1, 1, 1) + prompt_embeds = prompt_embeds.view( + bs_embed, seq_len, -1) + + bs_embed = text_proj.shape[0] + text_proj = text_proj.repeat(1, 1).view( + bs_embed, -1 + ) + text_encoder.to(offload_device) + mm.soft_empty_cache() + gc.collect() + return prompt_embeds, text_proj + + +def MZ_ChatGLM3Loader_call(args): + # from .mz_kolors_utils import Utils + # llm_dir = os.path.join(Utils.get_models_path(), "LLM") + chatglm3_checkpoint = args.get("chatglm3_checkpoint") + + chatglm3_checkpoint_path = folder_paths.get_full_path( + 'LLM', chatglm3_checkpoint) + + if not os.path.exists(chatglm3_checkpoint_path): + raise RuntimeError( + f"ERROR: Could not find chatglm3 checkpoint: {chatglm3_checkpoint_path}") + + from .chatglm3.configuration_chatglm import ChatGLMConfig + from .chatglm3.modeling_chatglm import ChatGLMModel + from .chatglm3.tokenization_chatglm import ChatGLMTokenizer + + offload_device = mm.unet_offload_device() + + text_encoder_config = os.path.join( + os.path.dirname(__file__), 'configs', 'text_encoder_config.json') + with open(text_encoder_config, 'r') as file: + config = json.load(file) + + text_encoder_config = ChatGLMConfig(**config) + + from comfy.utils import load_torch_file + from contextlib import nullcontext + is_accelerate_available = False + try: + from accelerate import init_empty_weights + from accelerate.utils import set_module_tensor_to_device + is_accelerate_available = True + except: + pass + + with (init_empty_weights() if is_accelerate_available else nullcontext()): + with torch.no_grad(): + # 打印版本号 + print("torch version:", torch.__version__) + text_encoder = ChatGLMModel(text_encoder_config).eval() + if '4bit' in chatglm3_checkpoint: + try: + import cpm_kernels + except ImportError: + print("Installing cpm_kernels...") + subprocess.run( + [sys.executable, "-m", "pip", "install", "cpm_kernels"], check=True) + pass + text_encoder.quantize(4) + elif '8bit' in chatglm3_checkpoint: + text_encoder.quantize(8) + text_encoder_sd = load_torch_file(chatglm3_checkpoint_path) + if is_accelerate_available: + for key in text_encoder_sd: + set_module_tensor_to_device( + text_encoder, key, device=offload_device, value=text_encoder_sd[key]) + else: + print("WARNING: Accelerate not available, use load_state_dict load model") + text_encoder.load_state_dict(text_encoder_sd) + + tokenizer_path = os.path.join( + os.path.dirname(__file__), 'configs', "tokenizer") + tokenizer = ChatGLMTokenizer.from_pretrained(tokenizer_path) + + return ({"text_encoder": text_encoder, "tokenizer": tokenizer},) + + +def MZ_ChatGLM3TextEncodeV2_call(args): + text = args.get("text") + chatglm3_model = args.get("chatglm3_model") + prompt_embeds, pooled_output = chatglm3_text_encode( + chatglm3_model, + text, + ) + extra_kwargs = { + "pooled_output": pooled_output, + } + extra_cond_keys = [ + "width", + "height", + "crop_w", + "crop_h", + "target_width", + "target_height" + ] + for key, value in args.items(): + if key in extra_cond_keys: + extra_kwargs[key] = value + return ([[ + prompt_embeds, + # {"pooled_output": pooled_output}, + extra_kwargs + ]], ) + + +def MZ_ChatGLM3Embeds2Conditioning_call(args): + kolors_embeds = args.get("kolors_embeds") + + # kolors_embeds = { + # 'prompt_embeds': prompt_embeds, + # 'negative_prompt_embeds': negative_prompt_embeds, + # 'pooled_prompt_embeds': text_proj, + # 'negative_pooled_prompt_embeds': negative_text_proj + # } + + positive = [[ + kolors_embeds['prompt_embeds'], + { + "pooled_output": kolors_embeds['pooled_prompt_embeds'], + "width": args.get("width"), + "height": args.get("height"), + "crop_w": args.get("crop_w"), + "crop_h": args.get("crop_h"), + "target_width": args.get("target_width"), + "target_height": args.get("target_height") + } + ]] + + negative = [[ + kolors_embeds['negative_prompt_embeds'], + { + "pooled_output": kolors_embeds['negative_pooled_prompt_embeds'], + } + ]] + + return (positive, negative) + + +def MZ_KolorsUNETLoaderV2_call(kwargs): + + from . import hook_comfyui_kolors_v2 + import comfy.sd + + with hook_comfyui_kolors_v2.apply_kolors(): + unet_name = kwargs.get("unet_name") + unet_path = folder_paths.get_full_path("unet", unet_name) + import comfy.utils + sd = comfy.utils.load_torch_file(unet_path) + model = comfy.sd.load_unet_state_dict(sd) + if model is None: + raise RuntimeError( + "ERROR: Could not detect model type of: {}".format(unet_path)) + + return (model, ) + + +def MZ_KolorsCheckpointLoaderSimple_call(kwargs): + checkpoint_name = kwargs.get("ckpt_name") + + ckpt_path = folder_paths.get_full_path("checkpoints", checkpoint_name) + + from . import hook_comfyui_kolors_v2 + import comfy.sd + + with hook_comfyui_kolors_v2.apply_kolors(): + out = comfy.sd.load_checkpoint_guess_config( + ckpt_path, output_vae=True, output_clip=False, embedding_directory=folder_paths.get_folder_paths("embeddings")) + + unet, _, vae = out[:3] + return (unet, vae) + + +from comfy.cldm.cldm import ControlNet +from comfy.controlnet import ControlLora + + +def MZ_KolorsControlNetLoader_call(kwargs): + control_net_name = kwargs.get("control_net_name") + controlnet_path = folder_paths.get_full_path( + "controlnet", control_net_name) + + from torch import nn + from . import hook_comfyui_kolors_v2 + import comfy.controlnet + + with hook_comfyui_kolors_v2.apply_kolors(): + control_net = comfy.controlnet.load_controlnet(controlnet_path) + return (control_net, ) + + +def MZ_KolorsControlNetPatch_call(kwargs): + import copy + from . import hook_comfyui_kolors_v2 + import comfy.model_management + import comfy.model_patcher + + model = kwargs.get("model") + control_net = kwargs.get("control_net") + + if hasattr(control_net, "control_model") and hasattr(control_net.control_model, "encoder_hid_proj"): + return (control_net,) + + control_net = copy.deepcopy(control_net) + + import comfy.controlnet + if isinstance(control_net, ControlLora): + del_keys = [] + for k in control_net.control_weights: + if k.startswith("label_emb.0.0."): + del_keys.append(k) + + for k in del_keys: + control_net.control_weights.pop(k) + + super_pre_run = ControlLora.pre_run + super_forward = ControlNet.forward + + def KolorsControlNet_forward(self, x, hint, timesteps, context, **kwargs): + with torch.cuda.amp.autocast(enabled=True): + context = self.encoder_hid_proj(context) + return super_forward(self, x, hint, timesteps, context, **kwargs) + + def KolorsControlLora_pre_run(self, *args, **kwargs): + result = super_pre_run(self, *args, **kwargs) + + if hasattr(self, "control_model"): + if hasattr(self.control_model, "encoder_hid_proj"): + return result + + setattr(self.control_model, "encoder_hid_proj", + model.model.diffusion_model.encoder_hid_proj) + + self.control_model.forward = MethodType( + KolorsControlNet_forward, self.control_model) + + return result + + control_net.pre_run = MethodType( + KolorsControlLora_pre_run, control_net) + + super_copy = ControlLora.copy + + def KolorsControlLora_copy(self, *args, **kwargs): + c = super_copy(self, *args, **kwargs) + c.pre_run = MethodType( + KolorsControlLora_pre_run, c) + return c + + control_net.copy = MethodType( + KolorsControlLora_copy, control_net) + + control_net = copy.deepcopy(control_net) + + elif isinstance(control_net, comfy.controlnet.ControlNet): + model_label_emb = model.model.diffusion_model.label_emb + + control_net.control_model.label_emb = model_label_emb + setattr(control_net.control_model, "encoder_hid_proj", + model.model.diffusion_model.encoder_hid_proj) + + control_net.control_model_wrapped = comfy.model_patcher.ModelPatcher( + control_net.control_model, load_device=control_net.load_device, offload_device=comfy.model_management.unet_offload_device()) + + super_forward = ControlNet.forward + + def KolorsControlNet_forward(self, x, hint, timesteps, context, **kwargs): + with torch.cuda.amp.autocast(enabled=True): + context = self.encoder_hid_proj(context) + return super_forward(self, x, hint, timesteps, context, **kwargs) + + control_net.control_model.forward = MethodType( + KolorsControlNet_forward, control_net.control_model) + + else: + raise NotImplementedError( + f"Type {control_net} not supported for KolorsControlNetPatch") + + return (control_net,) + + +def MZ_KolorsCLIPVisionLoader_call(kwargs): + import comfy.clip_vision + from . import hook_comfyui_kolors_v2 + clip_name = kwargs.get("clip_name") + clip_path = folder_paths.get_full_path("clip_vision", clip_name) + with hook_comfyui_kolors_v2.apply_kolors(): + clip_vision = comfy.clip_vision.load(clip_path) + return (clip_vision,) + + +def MZ_ApplySDXLSamplingSettings_call(kwargs): + model = kwargs.get("model").clone() + + import comfy.model_sampling + sampling_base = comfy.model_sampling.ModelSamplingDiscrete + sampling_type = comfy.model_sampling.EPS + + class SDXLSampling(sampling_base, sampling_type): + pass + + model.model.model_config.sampling_settings["beta_schedule"] = "linear" + model.model.model_config.sampling_settings["linear_start"] = 0.00085 + model.model.model_config.sampling_settings["linear_end"] = 0.012 + model.model.model_config.sampling_settings["timesteps"] = 1000 + + model_sampling = SDXLSampling(model.model.model_config) + + model.add_object_patch("model_sampling", model_sampling) + + return (model,) + + +def MZ_ApplyCUDAGenerator_call(kwargs): + model = kwargs.get("model") + + def prepare_noise(latent_image, seed, noise_inds=None): + """ + creates random noise given a latent image and a seed. + optional arg skip can be used to skip and discard x number of noise generations for a given seed + """ + generator = torch.Generator(device="cuda").manual_seed(seed) + if noise_inds is None: + return torch.randn(latent_image.size(), dtype=latent_image.dtype, layout=latent_image.layout, generator=generator, device="cuda") + + unique_inds, inverse = np.unique(noise_inds, return_inverse=True) + noises = [] + for i in range(unique_inds[-1] + 1): + noise = torch.randn([1] + list(latent_image.size())[1:], dtype=latent_image.dtype, + layout=latent_image.layout, generator=generator, device="cuda") + if i in unique_inds: + noises.append(noise) + noises = [noises[i] for i in inverse] + noises = torch.cat(noises, axis=0) + return noises + + import comfy.sample + comfy.sample.prepare_noise = prepare_noise + return (model,) diff --git a/ComfyUI-Kolors-MZ/mz_kolors_legacy.py b/ComfyUI-Kolors-MZ/mz_kolors_legacy.py new file mode 100644 index 0000000000000000000000000000000000000000..cf057018e09d8065a3124159bd38abd5bb653e73 --- /dev/null +++ b/ComfyUI-Kolors-MZ/mz_kolors_legacy.py @@ -0,0 +1,226 @@ + + +import gc +import json +import os +import random +import re + +import torch +import folder_paths +import comfy.model_management as mm +from . import mz_kolors_core + + +def MZ_ChatGLM3TextEncode_call(args): + + text = args.get("text") + chatglm3_model = args.get("chatglm3_model") + + prompt_embeds, pooled_output = mz_kolors_core.chatglm3_text_encode( + chatglm3_model, + text, + ) + + from torch import nn + hid_proj: nn.Linear = args.get("hid_proj") + + if hid_proj.weight.dtype != prompt_embeds.dtype: + with torch.cuda.amp.autocast(dtype=hid_proj.weight.dtype): + prompt_embeds = hid_proj(prompt_embeds) + else: + prompt_embeds = hid_proj(prompt_embeds) + + return ([[ + prompt_embeds, + {"pooled_output": pooled_output}, + ]], ) + + +def load_unet_state_dict(sd): # load unet in diffusers or regular format + from comfy import model_management, model_detection + import comfy.utils + + # Allow loading unets from checkpoint files + checkpoint = False + diffusion_model_prefix = model_detection.unet_prefix_from_state_dict(sd) + temp_sd = comfy.utils.state_dict_prefix_replace( + sd, {diffusion_model_prefix: ""}, filter_keys=True) + if len(temp_sd) > 0: + sd = temp_sd + checkpoint = True + + parameters = comfy.utils.calculate_parameters(sd) + unet_dtype = model_management.unet_dtype(model_params=parameters) + load_device = model_management.get_torch_device() + + from torch import nn + hid_proj: nn.Linear = None + if True: + model_config = model_detection.model_config_from_diffusers_unet(sd) + if model_config is None: + return None + + diffusers_keys = comfy.utils.unet_to_diffusers( + model_config.unet_config) + + new_sd = {} + for k in diffusers_keys: + if k in sd: + new_sd[diffusers_keys[k]] = sd.pop(k) + else: + print("{} {}".format(diffusers_keys[k], k)) + + encoder_hid_proj_weight = sd.pop("encoder_hid_proj.weight") + encoder_hid_proj_bias = sd.pop("encoder_hid_proj.bias") + hid_proj = nn.Linear( + encoder_hid_proj_weight.shape[1], encoder_hid_proj_weight.shape[0]) + hid_proj.weight.data = encoder_hid_proj_weight + hid_proj.bias.data = encoder_hid_proj_bias + hid_proj = hid_proj.to(load_device) + + offload_device = model_management.unet_offload_device() + unet_dtype = model_management.unet_dtype( + model_params=parameters, supported_dtypes=model_config.supported_inference_dtypes) + manual_cast_dtype = model_management.unet_manual_cast( + unet_dtype, load_device, model_config.supported_inference_dtypes) + model_config.set_inference_dtype(unet_dtype, manual_cast_dtype) + model = model_config.get_model(new_sd, "") + model = model.to(offload_device) + model.load_model_weights(new_sd, "") + left_over = sd.keys() + if len(left_over) > 0: + print("left over keys in unet: {}".format(left_over)) + return comfy.model_patcher.ModelPatcher(model, load_device=load_device, offload_device=offload_device), hid_proj + + +def MZ_KolorsUNETLoader_call(kwargs): + + from . import hook_comfyui_kolors_v1 + with hook_comfyui_kolors_v1.apply_kolors(): + unet_name = kwargs.get("unet_name") + unet_path = folder_paths.get_full_path("unet", unet_name) + import comfy.utils + sd = comfy.utils.load_torch_file(unet_path) + model, hid_proj = load_unet_state_dict(sd) + if model is None: + raise RuntimeError( + "ERROR: Could not detect model type of: {}".format(unet_path)) + return (model, hid_proj) + + +def MZ_FakeCond_call(kwargs): + import torch + cond = torch.zeros(2, 256, 4096) + pool = torch.zeros(2, 4096) + + dtype = kwargs.get("dtype") + if dtype == "fp16": + print("fp16") + cond = cond.half() + pool = pool.half() + elif dtype == "bf16": + print("bf16") + cond = cond.bfloat16() + pool = pool.bfloat16() + else: + print("fp32") + cond = cond.float() + pool = pool.float() + + return ([[ + cond, + {"pooled_output": pool}, + ]],) + + +NODE_CLASS_MAPPINGS = { +} + + +NODE_DISPLAY_NAME_MAPPINGS = { +} + +AUTHOR_NAME = "MinusZone" +CATEGORY_NAME = f"{AUTHOR_NAME} - Kolors" + + +class MZ_ChatGLM3TextEncode: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "chatglm3_model": ("CHATGLM3MODEL", ), + "text": ("STRING", {"multiline": True, "dynamicPrompts": True}), + "hid_proj": ("TorchLinear", ), + } + } + + RETURN_TYPES = ("CONDITIONING",) + + FUNCTION = "encode" + CATEGORY = CATEGORY_NAME + "/Legacy" + + def encode(self, **kwargs): + return MZ_ChatGLM3TextEncode_call(kwargs) + + +NODE_CLASS_MAPPINGS["MZ_ChatGLM3"] = MZ_ChatGLM3TextEncode +NODE_DISPLAY_NAME_MAPPINGS[ + "MZ_ChatGLM3"] = f"{AUTHOR_NAME} - ChatGLM3TextEncode" + + +class MZ_KolorsUNETLoader(): + @classmethod + def INPUT_TYPES(s): + return {"required": { + "unet_name": (folder_paths.get_filename_list("unet"), ), + }} + + RETURN_TYPES = ("MODEL", "TorchLinear") + + RETURN_NAMES = ("model", "hid_proj") + + FUNCTION = "load_unet" + + CATEGORY = CATEGORY_NAME + "/Legacy" + + def load_unet(self, **kwargs): + return MZ_KolorsUNETLoader_call(kwargs) + + +NODE_CLASS_MAPPINGS["MZ_KolorsUNETLoader"] = MZ_KolorsUNETLoader +NODE_DISPLAY_NAME_MAPPINGS[ + "MZ_KolorsUNETLoader"] = f"{AUTHOR_NAME} - Kolors UNET Loader" + + +class MZ_FakeCond: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "seed": ("INT", {"default": 0}), + "dtype": ([ + "fp32", + "fp16", + "bf16", + ],), + } + } + + RETURN_TYPES = ("CONDITIONING", ) + RETURN_NAMES = ("prompt", ) + FUNCTION = "encode" + CATEGORY = CATEGORY_NAME + + def encode(self, **kwargs): + return MZ_FakeCond_call(kwargs) + + +try: + if os.environ.get("MZ_DEV", None) is not None: + NODE_CLASS_MAPPINGS["MZ_FakeCond"] = MZ_FakeCond + NODE_DISPLAY_NAME_MAPPINGS[ + "MZ_FakeCond"] = f"{AUTHOR_NAME} - FakeCond" +except ImportError: + pass diff --git a/ComfyUI-Kolors-MZ/mz_kolors_utils.py b/ComfyUI-Kolors-MZ/mz_kolors_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..5ee66ba21881833d43967c48e4fee58ea2a979de --- /dev/null +++ b/ComfyUI-Kolors-MZ/mz_kolors_utils.py @@ -0,0 +1,935 @@ + +import json +import os +import shutil +import subprocess +import sys +import threading +import time +import numpy as np +import folder_paths +import base64 +from PIL import Image, ImageFilter +import io +import torch +import re +import hashlib +import cv2 +# sys.path.append(os.path.join(os.path.dirname(__file__))) +temp_directory = folder_paths.get_temp_directory() +from tqdm import tqdm +import requests +import comfy.utils + + +CACHE_POOL = {} + + +class Utils: + def Md5(str): + return hashlib.md5(str.encode('utf-8')).hexdigest() + + def check_frames_path(frames_path): + + if frames_path == "" or frames_path.startswith(".") or frames_path.startswith("/") or frames_path.endswith("/") or frames_path.endswith("\\"): + return "frames_path不能为空" + + frames_path = os.path.join( + folder_paths.get_output_directory(), frames_path) + + if frames_path == folder_paths.get_output_directory(): + return "frames_path不能为output目录" + + return "" + + def base64_to_pil_image(base64_str): + if base64_str is None: + return None + if len(base64_str) == 0: + return None + if type(base64_str) not in [str, bytes]: + return None + if base64_str.startswith("data:image/png;base64,"): + base64_str = base64_str.split(",")[-1] + base64_str = base64_str.encode("utf-8") + base64_str = base64.b64decode(base64_str) + return Image.open(io.BytesIO(base64_str)) + + def pil_image_to_base64(pil_image): + buffered = io.BytesIO() + pil_image.save(buffered, format="PNG") + img_str = base64.b64encode(buffered.getvalue()) + img_str = str(img_str, encoding="utf-8") + return f"data:image/png;base64,{img_str}" + + def listdir_png(path): + try: + files = os.listdir(path) + new_files = [] + for file in files: + if file.endswith(".png"): + new_files.append(file) + files = new_files + files.sort(key=lambda x: int(os.path.basename(x).split(".")[0])) + return files + except Exception as e: + return [] + + def listdir_models(path): + try: + relative_paths = [] + for root, dirs, files in os.walk(path): + for file in files: + relative_paths.append(os.path.relpath( + os.path.join(root, file), path)) + relative_paths = [f for f in relative_paths if f.endswith(".safetensors") or f.endswith( + ".pt") or f.endswith(".pth") or f.endswith(".onnx")] + return relative_paths + + except Exception as e: + + return [] + + def tensor2pil(image): + return Image.fromarray(np.clip(255.0 * image.cpu().numpy().squeeze(), 0, 255).astype(np.uint8)) + + # Convert PIL to Tensor + + def pil2tensor(image): + return torch.from_numpy(np.array(image).astype(np.float32) / 255.0).unsqueeze(0)[0] + + def pil2cv(image): + return cv2.cvtColor(np.array(image), cv2.COLOR_RGB2BGR) + + def cv2pil(image): + return Image.fromarray(cv2.cvtColor(image, cv2.COLOR_BGR2RGB)) + + def list_tensor2tensor(data): + result_tensor = torch.stack(data) + return result_tensor + + def loadImage(path): + img = Image.open(path) + img = img.convert("RGB") + return img + + def vae_encode_crop_pixels(pixels): + x = (pixels.shape[1] // 8) * 8 + y = (pixels.shape[2] // 8) * 8 + if pixels.shape[1] != x or pixels.shape[2] != y: + x_offset = (pixels.shape[1] % 8) // 2 + y_offset = (pixels.shape[2] % 8) // 2 + pixels = pixels[:, x_offset:x + x_offset, y_offset:y + y_offset, :] + return pixels + + def native_vae_encode(vae, image): + pixels = Utils.vae_encode_crop_pixels(image) + t = vae.encode(pixels[:, :, :, :3]) + return {"samples": t} + + def native_vae_encode_for_inpaint(vae, pixels, mask): + x = (pixels.shape[1] // 8) * 8 + y = (pixels.shape[2] // 8) * 8 + mask = torch.nn.functional.interpolate(mask.reshape( + (-1, 1, mask.shape[-2], mask.shape[-1])), size=(pixels.shape[1], pixels.shape[2]), mode="bilinear") + + pixels = pixels.clone() + if pixels.shape[1] != x or pixels.shape[2] != y: + x_offset = (pixels.shape[1] % 8) // 2 + y_offset = (pixels.shape[2] % 8) // 2 + pixels = pixels[:, x_offset:x + x_offset, y_offset:y + y_offset, :] + mask = mask[:, :, x_offset:x + x_offset, y_offset:y + y_offset] + + # grow mask by a few pixels to keep things seamless in latent space + + mask_erosion = mask + + m = (1.0 - mask.round()).squeeze(1) + for i in range(3): + pixels[:, :, :, i] -= 0.5 + pixels[:, :, :, i] *= m + pixels[:, :, :, i] += 0.5 + t = vae.encode(pixels) + + return {"samples": t, "noise_mask": (mask_erosion[:, :, :x, :y].round())} + + def native_vae_decode(vae, samples): + return vae.decode(samples["samples"]) + + def native_clip_text_encode(clip, text): + tokens = clip.tokenize(text) + cond, pooled = clip.encode_from_tokens(tokens, return_pooled=True) + return [[cond, {"pooled_output": pooled}]] + + def a1111_clip_text_encode(clip, text): + try: + from . import ADV_CLIP_emb_encode + cond, pooled = ADV_CLIP_emb_encode.advanced_encode( + clip, text, "none", "A1111", w_max=1.0, apply_to_pooled=False) + return [[cond, {"pooled_output": pooled}]] + except Exception as e: + import nodes + return nodes.CLIPTextEncode().encode(clip, text)[0] + + def cache_get(key): + return CACHE_POOL.get(key, None) + + def cache_set(key, value): + global CACHE_POOL + CACHE_POOL[key] = value + return True + + def get_models_path(): + return folder_paths.models_dir + + def get_gguf_models_path(): + models_path = os.path.join( + folder_paths.models_dir, "gguf") + os.makedirs(models_path, exist_ok=True) + return models_path + + def get_translate_object(from_code, to_code): + try: + is_disabel_argostranslate = Utils.cache_get( + "is_disabel_argostranslate") + + if is_disabel_argostranslate is not None: + return None + + try: + import argostranslate + from argostranslate import translate, package + except ImportError: + subprocess.run([ + sys.executable, "-m", + "pip", "install", "argostranslate"], check=True) + + try: + import argostranslate + from argostranslate import translate, package + except ImportError: + Utils.cache_set("is_disabel_argostranslate", True) + print( + "argostranslate not found and install failed , will disable it") + return None + + packages = package.get_installed_packages() + installed_packages = {} + for p in packages: + installed_packages[f"{p.from_code}_{p.to_code}"] = p + + argosmodel_dir = os.path.join( + Utils.get_models_path(), "argosmodel") + if not os.path.exists(argosmodel_dir): + os.makedirs(argosmodel_dir) + + model_name = None + if from_code == "zh" and to_code == "en": + model_name = "zh_en" + elif from_code == "en" and to_code == "zh": + model_name = "en_zh" + else: + return None + + if Utils.cache_get(f"argostranslate_{model_name}") is not None: + return Utils.cache_get(f"argostranslate_{model_name}") + + if installed_packages.get(model_name, None) is None: + if not os.path.exists(os.path.join(argosmodel_dir, f"translate-{model_name}-1_9.argosmodel")): + argosmodel_file = Utils.download_file( + url=f"https://www.modelscope.cn/api/v1/models/wailovet/MinusZoneAIModels/repo?Revision=master&FilePath=argosmodel%2Ftranslate-{model_name}-1_9.argosmodel", + filepath=os.path.join( + argosmodel_dir, f"translate-{model_name}-1_9.argosmodel"), + ) + else: + argosmodel_file = os.path.join( + argosmodel_dir, f"translate-{model_name}-1_9.argosmodel") + package.install_from_path(argosmodel_file) + + translate_object = translate.get_translation_from_codes( + from_code=from_code, to_code=to_code) + + Utils.cache_set(f"argostranslate_{model_name}", translate_object) + + return translate_object + except Exception as e: + Utils.cache_set("is_disabel_argostranslate", True) + print( + "argostranslate not found and install failed , will disable it") + print(f"get_translate_object error: {e}") + return None + + def translate_text(text, from_code, to_code): + translation = Utils.get_translate_object(from_code, to_code) + if translation is None: + return text + + # Translate + translatedText = translation.translate( + text) + + return translatedText + + def zh2en(text): + try: + return Utils.translate_text(text, "zh", "en") + except Exception as e: + print(f"zh2en error: {e}") + return text + + def en2zh(text): + try: + return Utils.translate_text(text, "en", "zh") + except Exception as e: + print(f"en2zh error: {e}") + return text + + def prompt_zh_to_en(prompt): + prompt = prompt.replace(",", ",") + prompt = prompt.replace("。", ",") + prompt = prompt.replace("\n", ",") + tags = prompt.split(",") + # 判断是否有中文 + for i, tag in enumerate(tags): + if re.search(u'[\u4e00-\u9fff]', tag): + tags[i] = Utils.zh2en(tag) + # 如果第一个字母是大写,转为小写 + if tags[i][0].isupper(): + tags[i] = tags[i].lower().replace(".", "") + + return ",".join(tags) + + def mask_resize(mask, width, height): + mask = mask.unsqueeze(0).unsqueeze(0) + mask = torch.nn.functional.interpolate( + mask, size=(height, width), mode="bilinear") + mask = mask.squeeze(0).squeeze(0) + return mask + + def mask_threshold(interested_mask): + mask_image = Utils.tensor2pil(interested_mask) + mask_image_cv2 = Utils.pil2cv(mask_image) + ret, thresh1 = cv2.threshold( + mask_image_cv2, 127, 255, cv2.THRESH_BINARY) + thresh1 = Utils.cv2pil(thresh1) + thresh1 = np.array(thresh1) + thresh1 = thresh1[:, :, 0] + return Utils.pil2tensor(thresh1) + + def mask_erode(interested_mask, value): + value = int(value) + mask_image = Utils.tensor2pil(interested_mask) + mask_image_cv2 = Utils.pil2cv(mask_image) + kernel = np.ones((5, 5), np.uint8) + erosion = cv2.erode(mask_image_cv2, kernel, iterations=value) + erosion = Utils.cv2pil(erosion) + erosion = np.array(erosion) + erosion = erosion[:, :, 0] + return Utils.pil2tensor(erosion) + + def mask_dilate(interested_mask, value): + value = int(value) + mask_image = Utils.tensor2pil(interested_mask) + mask_image_cv2 = Utils.pil2cv(mask_image) + kernel = np.ones((5, 5), np.uint8) + dilation = cv2.dilate(mask_image_cv2, kernel, iterations=value) + dilation = Utils.cv2pil(dilation) + dilation = np.array(dilation) + dilation = dilation[:, :, 0] + return Utils.pil2tensor(dilation) + + def mask_edge_opt(interested_mask, edge_feathering): + + mask_image = Utils.tensor2pil(interested_mask) + mask_image_cv2 = Utils.pil2cv(mask_image) + + # 高斯模糊 + dilation2 = Utils.cv2pil(mask_image_cv2) + dilation2 = mask_image.filter( + ImageFilter.GaussianBlur(edge_feathering)) + + # mask_image dilation2 图片蒙版叠加 + dilation2 = Utils.pil2cv(dilation2) + # dilation2[mask_image_cv2 < 127] = 0 + dilation2 = Utils.cv2pil(dilation2) + # to RGB + dilation2 = np.array(dilation2) + dilation2 = dilation2[:, :, 0] + return Utils.pil2tensor(dilation2) + + def mask_composite(destination, source, x, y, mask=None, multiplier=8, resize_source=False): + source = source.to(destination.device) + if resize_source: + source = torch.nn.functional.interpolate(source, size=( + destination.shape[2], destination.shape[3]), mode="bilinear") + + source = comfy.utils.repeat_to_batch_size(source, destination.shape[0]) + + x = max(-source.shape[3] * multiplier, + min(x, destination.shape[3] * multiplier)) + y = max(-source.shape[2] * multiplier, + min(y, destination.shape[2] * multiplier)) + + left, top = (x // multiplier, y // multiplier) + right, bottom = (left + source.shape[3], top + source.shape[2],) + + if mask is None: + mask = torch.ones_like(source) + else: + mask = mask.to(destination.device, copy=True) + mask = torch.nn.functional.interpolate(mask.reshape( + (-1, 1, mask.shape[-2], mask.shape[-1])), size=(source.shape[2], source.shape[3]), mode="bilinear") + mask = comfy.utils.repeat_to_batch_size(mask, source.shape[0]) + + # calculate the bounds of the source that will be overlapping the destination + # this prevents the source trying to overwrite latent pixels that are out of bounds + # of the destination + visible_width, visible_height = ( + destination.shape[3] - left + min(0, x), destination.shape[2] - top + min(0, y),) + + mask = mask[:, :, :visible_height, :visible_width] + inverse_mask = torch.ones_like(mask) - mask + + source_portion = mask * source[:, :, :visible_height, :visible_width] + destination_portion = inverse_mask * \ + destination[:, :, top:bottom, left:right] + + destination[:, :, top:bottom, + left:right] = source_portion + destination_portion + return destination + + def latent_upscale_by(samples, scale_by): + s = samples.copy() + width = round(samples["samples"].shape[3] * scale_by) + height = round(samples["samples"].shape[2] * scale_by) + s["samples"] = comfy.utils.common_upscale( + samples["samples"], width, height, "nearest-exact", "disabled") + return s + + def resize_by(image, percent): + # 判断类型是否为PIL + if not isinstance(image, Image.Image): + image = Image.fromarray(image) + + width, height = image.size + new_width = int(width * percent) + new_height = int(height * percent) + return image.resize((new_width, new_height), Image.LANCZOS) + + def resize_max(im, dst_w, dst_h): + src_w, src_h = im.size + + if src_h < src_w: + newWidth = dst_w + newHeight = dst_w * src_h // src_w + else: + newWidth = dst_h * src_w // src_h + newHeight = dst_h + + newHeight = newHeight // 8 * 8 + newWidth = newWidth // 8 * 8 + + return im.resize((newWidth, newHeight), Image.Resampling.LANCZOS) + + def get_device(): + return comfy.model_management.get_torch_device() + + def download_small_file(url, filepath): + response = requests.get(url) + os.makedirs(os.path.dirname(filepath), exist_ok=True) + with open(filepath, "wb") as f: + f.write(response.content) + return filepath + + def download_file(url, filepath, threads=8, retries=6): + + get_size_tmp = requests.get(url, stream=True) + total_size = int(get_size_tmp.headers.get("content-length", 0)) + + print(f"Downloading {url} to {filepath} with size {total_size} bytes") + + # 如果文件大小小于 50MB,使用download_small_file + if total_size < 50 * 1024 * 1024: + return Utils.download_small_file(url, filepath) + + base_filename = os.path.basename(filepath) + cache_dir = os.path.join(os.path.dirname( + filepath), f"{base_filename}.t_{threads}_cache") + os.makedirs(cache_dir, exist_ok=True) + + def get_total_existing_size(): + fs = os.listdir(cache_dir) + existing_size = 0 + for f in fs: + if f.startswith("block_"): + existing_size += os.path.getsize( + os.path.join(cache_dir, f)) + return existing_size + + total_existing_size = get_total_existing_size() + + if total_size != 0 and total_existing_size != total_size: + + with tqdm(total=total_size, initial=total_existing_size, unit="B", unit_scale=True) as progress_bar: + all_threads = [] + + for i in range(threads): + cache_filepath = os.path.join(cache_dir, f"block_{i}") + + start = total_size // threads * i + end = total_size // threads * (i + 1) - 1 + + if i == threads - 1: + end = total_size + + # Check if the file already exists + if os.path.exists(cache_filepath): + # Get the size of the existing file + existing_size = os.path.getsize(cache_filepath) + else: + existing_size = 0 + + headers = {"Range": f"bytes={start + existing_size}-{end}"} + if end == total_size: + headers = {"Range": f"bytes={start + existing_size}-"} + if start + existing_size >= end: + continue + # print(f"Downloading {cache_filepath} with headers bytes={start + existing_size}-{end}") + + # Streaming, so we can iterate over the response. + response = requests.get(url, stream=True, headers=headers) + + def download_file_thread(response, cache_filepath): + block_size = 1024 + if end - (start + existing_size) < block_size: + block_size = end - (start + existing_size) + with open(cache_filepath, "ab") as file: + for data in response.iter_content(block_size): + file.write(data) + progress_bar.update( + len(data) + ) + + t = threading.Thread( + target=download_file_thread, args=(response, cache_filepath)) + + all_threads.append(t) + + t.start() + + for t in all_threads: + t.join() + + if total_size != 0 and get_total_existing_size() > total_size: + # 文件下载失败 + shutil.rmtree(cache_dir) + raise RuntimeError("Download failed, file is incomplete") + + if total_size != 0 and total_size != get_total_existing_size(): + if retries > 0: + retries -= 1 + print( + f"Download failed: {total_size} != {get_total_existing_size()}, retrying... {retries} retries left") + return Utils.download_file(url, filepath, threads, retries) + + # 文件损坏 + raise RuntimeError( + f"Download failed: {total_size} != {get_total_existing_size()}") + + if os.path.exists(filepath): + shutil.move(filepath, filepath + ".old." + + time.strftime("%Y%m%d%H%M%S")) + + # merge the files + with open(filepath, "wb") as f: + for i in range(threads): + cache_filepath = os.path.join(cache_dir, f"block_{i}") + with open(cache_filepath, "rb") as cf: + f.write(cf.read()) + + shutil.rmtree(cache_dir) + return filepath + + def hf_download_model(url, only_get_path=False): + if not url.startswith("https://"): + raise ValueError("URL must start with https://") + if url.startswith("https://huggingface.co/") or url.startswith("https://hf-mirror.com/"): + base_model_path = os.path.abspath(os.path.join( + Utils.get_models_path(), "transformers_models")) + # https://huggingface.co/FaradayDotDev/llama-3-8b-Instruct-GGUF/resolve/main/llama-3-8b-Instruct.Q2_K.gguf?download=true + texts = url.split("?")[0].split("/") + file_name = texts[-1] + zone_path = f"{texts[3]}/{texts[4]}" + + save_path = os.path.join(base_model_path, zone_path, file_name) + + if os.path.exists(save_path) is False: + if only_get_path: + return None + os.makedirs(os.path.join( + base_model_path, zone_path), exist_ok=True) + Utils.download_file(url, save_path) + + # Utils.print_log( + # f"File {save_path} => {os.path.getsize(save_path)} ") + + # 获取大小 + if os.path.getsize(save_path) == 0: + if only_get_path: + return None + os.remove(save_path) + raise ValueError(f"Download failed: {url}") + return save_path + else: + texts = url.split("?")[0].split("/") + host = texts[2].replace(".", "_") + base_model_path = os.path.abspath(os.path.join( + Utils.get_models_path(), f"{host}_models")) + + file_name = texts[-1] + file_name_no_ext = os.path.splitext(file_name)[0] + file_ext = os.path.splitext(file_name)[1] + md5_hash = Utils.Md5(url) + + save_path = os.path.join( + base_model_path, f"{file_name_no_ext}.{md5_hash}{file_ext}") + + if os.path.exists(save_path) is False: + if only_get_path: + return None + os.makedirs(base_model_path, exist_ok=True) + Utils.download_file(url, save_path) + + return save_path + + def print_log(*args): + if os.environ.get("MZ_DEV", None) is not None: + print(*args) + + def modelscope_download_model(model_type, model_name, only_get_path=False): + if model_type not in modelscope_models_map: + if only_get_path: + return None + raise ValueError(f"模型类型 {model_type} 不支持") + + if model_name not in modelscope_models_map[model_type]: + if only_get_path: + return None + error_info = "魔搭可选模型名称列表:\n" + for key in modelscope_models_map[model_type].keys(): + error_info += f"> {key}\n" + raise ValueError(error_info) + + model_info = modelscope_models_map[model_type][model_name] + url = model_info["url"] + output = model_info["output"] + save_path = os.path.abspath( + os.path.join(Utils.get_models_path(), output)) + if not os.path.exists(save_path): + if only_get_path: + return None + save_path = Utils.download_file(url, save_path) + return save_path + + def progress_bar(steps): + class pb: + def __init__(self, steps): + self.steps = steps + self.pbar = comfy.utils.ProgressBar(steps) + + def update(self, step, total_steps, pil_img): + if pil_img is None: + self.pbar.update(step, total_steps) + + else: + if pil_img.mode != "RGB": + pil_img = pil_img.convert("RGB") + self.pbar.update_absolute( + step, total_steps, ("JPEG", pil_img, 512)) + + return pb(steps) + + def split_en_to_zh(text: str): + if text.find("(") != -1 and text.find(")") != -1: + sentences = [ + "", + ] + for word_index in range(len(text)): + if text[word_index] == "(" or text[word_index] == ")": + sentences.append(str(text[word_index])) + sentences.append("") + else: + sentences[-1] += str(text[word_index]) + + Utils.print_log("not_translated:", sentences) + for i in range(len(sentences)): + if sentences[i] != "(" and sentences[i] != ")": + sentences[i] = Utils.split_en_to_zh(sentences[i]) + + Utils.print_log("translated:", sentences) + + return "".join(sentences) + + # 中文标点转英文标点 + text = text.replace(",", ",") + text = text.replace("。", ".") + text = text.replace("?", "?") + text = text.replace("!", "!") + text = text.replace(";", ";") + + result = [] + if text.find("\n") != -1: + text = text.split("\n") + for t in text: + if t != "": + result.append(Utils.split_en_to_zh(t)) + else: + result.append(t) + return "\n".join(result) + + if text.find(".") != -1: + text = text.split(".") + for t in text: + if t != "": + result.append(Utils.split_en_to_zh(t)) + else: + result.append(t) + return ".".join(result) + + if text.find("?") != -1: + text = text.split("?") + for t in text: + if t != "": + result.append(Utils.split_en_to_zh(t)) + else: + result.append(t) + return "?".join(result) + + if text.find("!") != -1: + text = text.split("!") + for t in text: + if t != "": + result.append(Utils.split_en_to_zh(t)) + else: + result.append(t) + return "!".join(result) + + if text.find(";") != -1: + text = text.split(";") + for t in text: + if t != "": + result.append(Utils.split_en_to_zh(t)) + else: + result.append(t) + return ";".join(result) + + if text.find(",") != -1: + text = text.split(",") + for t in text: + if t != "": + result.append(Utils.split_en_to_zh(t)) + else: + result.append(t) + return ",".join(result) + + if text.find(":") != -1: + text = text.split(":") + for t in text: + if t != "": + result.append(Utils.split_en_to_zh(t)) + else: + result.append(t) + return ":".join(result) + + # 如果是纯数字,不翻译 + if text.isdigit() or text.replace(".", "").isdigit() or text.replace(" ", "").isdigit() or text.replace("-", "").isdigit(): + return text + + return Utils.en2zh(text) + + def to_debug_prompt(p): + if p is None: + return "" + zh = Utils.en2zh(p) + if p == zh: + return p + zh = Utils.split_en_to_zh(p) + p = p.strip() + return f""" +原文: +{p} + +中文翻译: +{zh} +""" + + def get_gguf_files(): + gguf_dir = Utils.get_gguf_models_path() + if not os.path.exists(gguf_dir): + os.makedirs(gguf_dir) + gguf_files = [] + # walk gguf_dir + for root, dirs, files in os.walk(gguf_dir): + for file in files: + if file.endswith(".gguf"): + gguf_files.append( + os.path.relpath(os.path.join(root, file), gguf_dir)) + + return gguf_files + + def get_comfyui_models_path(): + return folder_paths.models_dir + + def download_model(model_info, only_get_path=False): + + url = model_info["url"] + output = model_info["output"] + save_path = os.path.abspath( + os.path.join(Utils.get_comfyui_models_path(), output)) + if not os.path.exists(save_path): + if only_get_path: + return None + save_path = Utils.download_file(url, save_path) + return save_path + + def file_hash(file_path, hash_method): + if not os.path.isfile(file_path): + return '' + h = hash_method() + with open(file_path, 'rb') as f: + while b := f.read(8192): + h.update(b) + return h.hexdigest() + + def get_cache_by_local(key): + try: + cache_json_file = os.path.join( + Utils.get_models_path(), f"caches.json") + + if not os.path.exists(cache_json_file): + return None + + with open(cache_json_file, "r", encoding="utf-8") as f: + cache_json = json.load(f) + return cache_json.get(key, None) + except: + return None + + def set_cache_by_local(key, value): + try: + cache_json_file = os.path.join( + Utils.get_models_path(), f"caches.json") + + if not os.path.exists(cache_json_file): + cache_json = {} + else: + with open(cache_json_file, "r", encoding="utf-8") as f: + cache_json = json.load(f) + + cache_json[key] = value + + with open(cache_json_file, "w", encoding="utf-8") as f: + json.dump(cache_json, f, indent=4) + except: + pass + + def file_sha256(file_path): + # 获取文件的更新时间 + file_stat = os.stat(file_path) + file_mtime = file_stat.st_mtime + file_size = file_stat.st_size + cache_key = f"{file_path}_{file_mtime}_{file_size}" + cache_value = Utils.get_cache_by_local(cache_key) + if cache_value is not None: + return cache_value + + sha256 = Utils.file_hash(file_path, hashlib.sha256) + Utils.set_cache_by_local(cache_key, sha256) + return sha256 + + def get_auto_model_fullpath(model_name): + fullpath = Utils.cache_get(f"get_auto_model_fullpath_{model_name}") + Utils.print_log(f"get_auto_model_fullpath_{model_name} => {fullpath}") + if fullpath is not None: + if os.path.exists(fullpath): + return fullpath + + find_paths = [] + target_sha256 = "" + file_path = "" + download_url = "" + + MODEL_ZOO = Utils.get_model_zoo() + for model in MODEL_ZOO: + if model["model"] == model_name: + find_paths = model["find_path"] + target_sha256 = model["SHA256"] + file_path = model["file_path"] + download_url = model["url"] + break + + if target_sha256 == "": + raise ValueError(f"Model {model_name} not found in MODEL_ZOO") + + if os.path.exists(file_path): + if Utils.file_sha256(file_path) != target_sha256: + print(f"Model {model_name} file hash not match...") + return file_path + + for find_path in find_paths: + find_fullpath = os.path.join( + Utils.get_comfyui_models_path(), find_path) + + if os.path.exists(find_fullpath): + for root, dirs, files in os.walk(find_fullpath): + for file in files: + if target_sha256 == Utils.file_sha256(os.path.join(root, file)): + Utils.cache_set( + f"get_auto_model_fullpath_{model_name}", os.path.join(root, file)) + return os.path.join(root, file) + else: + Utils.print_log( + f"Model {os.path.join(root, file)} file hash not match, {target_sha256} != {Utils.file_sha256(os.path.join(root, file))}") + + result = Utils.download_model( + {"url": download_url, "output": file_path}) + Utils.cache_set(f"get_auto_model_fullpath_{model_name}", result) + return result + + def testDownloadSpeed(url): + try: + print(f"Testing download speed for {url}") + start = time.time() + # 下载2M数据 + headers = {"Range": "bytes=0-2097151"} + _ = requests.get(url, headers=headers, timeout=5) + end = time.time() + print( + f"Download speed: {round(5.00 / (float(end) - float(start)) / 1024, 2)} KB/s") + return float(end) - float(start) < 4 + except Exception as e: + print(f"Test download speed failed: {e}") + return False + + def get_model_zoo(tags_filter=None): + source_model_zoo_file = os.path.join( + os.path.dirname(__file__), "configs", "model_zoo.json") + source_model_zoo_json = [] + try: + with open(source_model_zoo_file, "r", encoding="utf-8") as f: + source_model_zoo_json = json.load(f) + except: + pass + + # Utils.print_log(f"source_model_zoo_json: {json.dumps(source_model_zoo_json, indent=4)}") + if tags_filter is not None: + source_model_zoo_json = [ + m for m in source_model_zoo_json if tags_filter in m["tags"]] + + return source_model_zoo_json + + + +modelscope_models_map = { + +} diff --git a/ComfyUI-Kolors-MZ/pyproject.toml b/ComfyUI-Kolors-MZ/pyproject.toml new file mode 100644 index 0000000000000000000000000000000000000000..1eb6bde2978ddd4a66a3e078c2973d1a6150233b --- /dev/null +++ b/ComfyUI-Kolors-MZ/pyproject.toml @@ -0,0 +1,14 @@ +[project] +name = "comfyui-kolors-mz" +description = "Implementation of Kolors on ComfyUI\nReference from [a/https://github.com/kijai/ComfyUI-KwaiKolorsWrapper](https://github.com/kijai/ComfyUI-KwaiKolorsWrapper)\nUsing ComfyUI Native Sampling" +version = "1.0.0" +license = { file = "GPL-3.0 license" } + +[project.urls] +Repository = "https://github.com/MinusZoneAI/ComfyUI-Kolors-MZ" +# Used by Comfy Registry https://comfyregistry.org + +[tool.comfy] +PublisherId = "wailovet" +DisplayName = "ComfyUI-Kolors-MZ" +Icon = "" diff --git a/ComfyUI-KwaiKolorsWrapper/LICENSE b/ComfyUI-KwaiKolorsWrapper/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..261eeb9e9f8b2b4b0d119366dda99c6fd7d35c64 --- /dev/null +++ b/ComfyUI-KwaiKolorsWrapper/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/ComfyUI-KwaiKolorsWrapper/MODEL_LICENSE b/ComfyUI-KwaiKolorsWrapper/MODEL_LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..4e34338da34cd35b092b2e0b694eef31f6446908 --- /dev/null +++ b/ComfyUI-KwaiKolorsWrapper/MODEL_LICENSE @@ -0,0 +1,91 @@ +模型许可协议 +模型发布日期:2024/7/6 + +通过点击同意或使用、复制、修改、分发、表演或展示模型作品的任何部分或元素,您将被视为已承认并接受本协议的内容,本协议立即生效。 + +1.定义。 +a. “协议”指本协议中所规定的使用、复制、分发、修改、表演和展示模型作品或其任何部分或元素的条款和条件。 +b. “材料”是指根据本协议提供的专有的模型和文档(及其任何部分)的统称。 +c. “模型”指大型语言模型、图像/视频/音频/3D 生成模型、多模态大型语言模型及其软件和算法,包括训练后的模型权重、参数(包括优化器状态)、机器学习模型代码、推理支持代码、训练支持代码、微调支持代码以及我们公开提供的前述其他元素。 +d. “输出”是指通过操作或以其他方式使用模型或模型衍生品而产生的模型或模型衍生品的信息和/或内容输出。 +e. “模型衍生品”包括:(i)对模型或任何模型衍生物的修改;(ii)基于模型的任何模型衍生物的作品;或(iii)通过将模型或模型的任何模型衍生物的权重、参数、操作或输出的模式转移到该模型而创建的任何其他机器学习模型,以使该模型的性能类似于模型或模型衍生物。为清楚起见,输出本身不被视为模型衍生物。 +f. “模型作品”包括:(i)材料;(ii)模型衍生品;及(iii)其所有衍生作品。 +g. “许可人”或“我们”指作品所有者或作品所有者授权的授予许可的实体,包括可能对模型和/或分发模型拥有权利的个人或实体。 +h.“被许可人”、“您”或“您的”是指行使本协议授予的权利和/或为任何目的和在任何使用领域使用模型作品的自然人或法人实体。 +i.“第三方”是指不受我们或您共同控制的个人或法人实体。 + +2. 许可内容。 +a.我们授予您非排他性的、全球性的、不可转让的、免版税的许可(在我们的知识产权或我们拥有的体现在材料中或利用材料的其他权利的范围内),允许您仅根据本协议的条款使用、复制、分发、创作衍生作品(包括模型衍生品)和对材料进行修改,并且您不得违反(或鼓励、或允许任何其他人违反)本协议的任何条款。 +b.在遵守本协议的前提下,您可以分发或向第三方提供模型作品,您须满足以下条件: +(i)您必须向所有该模型作品或使用该作品的产品或服务的任何第三方接收者提供模型作品的来源和本协议的副本; +(ii)您必须在任何修改过的文档上附加明显的声明,说明您更改了这些文档; +(iii)您可以在您的修改中添加您自己的版权声明,并且,在您对该作品的使用、复制、修改、分发、表演和展示符合本协议的条款和条件的前提下,您可以为您的修改或任何此类模型衍生品的使用、复制或分发提供额外或不同的许可条款和条件。 +c. 附加商业条款: 若您希望将模型及模型衍生品用作商业用途,则您必须向许可人申请许可,许可人可自行决定向您授予许可。除非许可人另行明确授予您该等权利,否则您无权行使本协议项下的任何权利。 + +3.使用限制。 +a. 您对本模型作品的使用必须遵守适用法律法规(包括贸易合规法律法规),并遵守《服务协议》(https://kolors.kuaishou.com/agreement)。您必须将本第 3(a) 和 3(b) 条中提及的使用限制作为可执行条款纳入任何规范本模型作品使用和/或分发的协议(例如许可协议、使用条款等),并且您必须向您分发的后续用户发出通知,告知其本模型作品受本第 3(a) 和 3(b) 条中的使用限制约束。 +b. 您不得使用本模型作品或本模型作品的任何输出或成果来改进任何其他模型(本模型或其模型衍生品除外)。 + +4.知识产权。 +a. 我们保留模型的所有权及其相关知识产权。在遵守本协议条款和条件的前提下,对于您制作的材料的任何衍生作品和修改,您是且将是此类衍生作品和修改的所有者。 +b. 本协议不授予任何商标、商号、服务标记或产品名称的标识许可,除非出于描述和分发本模型作品的合理和惯常用途。 +c. 如果您对我们或任何个人或实体提起诉讼或其他程序(包括诉讼中的交叉索赔或反索赔),声称材料或任何输出或任何上述内容的任何部分侵犯您拥有或可许可的任何知识产权或其他权利,则根据本协议授予您的所有许可应于提起此类诉讼或其他程序之日起终止。 + +5. 免责声明和责任限制。 +a. 本模型作品及其任何输出和结果按“原样”提供,不作任何明示或暗示的保证,包括适销性、非侵权性或适用于特定用途的保证。我们不对材料及其任何输出的安全性或稳定性作任何保证,也不承担任何责任。 +b. 在任何情况下,我们均不对您承担任何损害赔偿责任,包括但不限于因您使用或无法使用材料或其任何输出而造成的任何直接、间接、特殊或后果性损害赔偿责任,无论该损害赔偿责任是如何造成的。 + +6. 存续和终止。 +a. 本协议期限自您接受本协议或访问材料之日起开始,并将持续完全有效,直至根据本协议条款和条件终止。 +b. 如果您违反本协议的任何条款或条件,我们可终止本协议。本协议终止后,您必须立即删除并停止使用本模型作品。第 4(a)、4(c)、5和 7 条在本协议终止后仍然有效。 + +7. 适用法律和管辖权。 +a. 本协议及由本协议引起的或与本协议有关的任何争议均受中华人民共和国大陆地区(仅为本协议目的,不包括香港、澳门和台湾)法律管辖,并排除冲突法的适用,且《联合国国际货物销售合同公约》不适用于本协议。 +b. 因本协议引起或与本协议有关的任何争议,由许可人住所地人民法院管辖。 + +请注意,许可证可能会更新到更全面的版本。 有关许可和版权的任何问题,请通过 kolors_opensource@kuaishou.com 与我们联系。 +  + +英文版 + +MODEL LICENSE AGREEMENT +Release Date: 2024/7/6 +By clicking to agree or by using, reproducing, modifying, distributing, performing or displaying any portion or element of the Model Works, You will be deemed to have recognized and accepted the content of this Agreement, which is effective immediately. +1. DEFINITIONS. +a. “Agreement” shall mean the terms and conditions for use, reproduction, distribution, modification, performance and displaying of the Model Works or any portion or element thereof set forth herein. +b. “Documentation” shall mean the specifications, manuals and documentation for Tencent Hunyuan made publicly available by Tencent. +c. “Model” shall mean the large language models, image/video/audio/3D generation models, and multimodal large language models and their software and algorithms, including trained model weights, parameters (including optimizer states), machine-learning model code, inference-enabling code, training-enabling code, fine-tuning enabling code and other elements of the foregoing made publicly available by Us . +d. “Output” shall mean the information and/or content output of Model or a Model Derivative that results from operating or otherwise using Model or a Model Derivative. +e. “Model Derivatives” shall mean all: (i) modifications to the Model or any Model Derivative; (ii) works based on the Model or any Model Derivative; or (iii) any other machine learning model which is created by transfer of patterns of the weights, parameters, operations, or Output of the Model or any Model Derivative, to that model in order to cause that model to perform similarly to the Model or a Model Derivative, including distillation methods, methods that use intermediate data representations, or methods based on the generation of synthetic data Outputs or a Model Derivative for training that model. For clarity, Outputs by themselves are not deemed Model Derivatives. +f. “Model Works” shall mean: (i) the Materials; (ii) Model Derivatives; and (iii) all derivative works thereof. +g. “Licensor” , “We” or “Us” shall mean the copyright owner or entity authorized by the copyright owner that is granting the License, including the persons or entities that may have rights in the Model and/or distributing the Model. +h. “Licensee”, “You” or “Your” shall mean a natural person or legal entity exercising the rights granted by this Agreement and/or using the Model Works for any purpose and in any field of use. +i. “Third Party” or “Third Parties” shall mean individuals or legal entities that are not under common control with Us or You. + +2. LICENSE CONTENT. +a. We grant You a non-exclusive, worldwide, non-transferable and royalty-free limited license under the intellectual property or other rights owned by Us embodied in or utilized by the Materials to use, reproduce, distribute, create derivative works of (including Model Derivatives), and make modifications to the Materials, only in accordance with the terms of this Agreement and the Acceptable Use Policy, and You must not violate (or encourage or permit anyone else to violate) any term of this Agreement or the Acceptable Use Policy. +b. You may, subject to Your compliance with this Agreement, distribute or make available to Third Parties the Model Works, provided that You meet all of the following conditions: + (i) You must provide all such Third Party recipients of the Model Works or products or services using them the source of the Model and a copy of this Agreement; +(ii) You must cause any modified documents to carry prominent notices stating that You changed the documents; +(iii) You may add Your own copyright statement to Your modifications and, may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Model Derivatives as a whole, provided Your use, reproduction, modification, distribution, performance and display of the work otherwise complies with the terms and conditions of this Agreement. +c. additional commercial terms: If, You wish to use the model or its derivatives for commercial purposes, You must request a license from licensor, which the licensor may grant to You in its sole discretion, and You are not authorized to exercise any of the rights under this Agreement unless or until Tencent otherwise expressly grants You such rights. +3. LICENSE RESTRICITIONS. +a. Your use of the Model Works must comply with applicable laws and regulations (including trade compliance laws and regulations) and adhere to the Service Agreement (https://kolors.kuaishou.com/agreement). You must include the use restrictions referenced in these Sections 3(a) and 3(b) as an enforceable provision in any agreement (e.g., license agreement, terms of use, etc.) governing the use and/or distribution of Model Works and You must provide notice to subsequent users to whom You distribute that Model Works are subject to the use restrictions in these Sections 3(a) and 3(b). +b. You must not use the Model Works or any Output or results of the Model Works to improve any other large language model (other than Model or Model Derivatives thereof). +4. INTELLECTUAL PROPERTY. +a. We retain ownership of all intellectual property rights in and to the Materials and derivatives. Conditioned upon compliance with the terms and conditions of this Agreement, with respect to any derivative works and modifications of the Materials that are made by you, you are and will be the owner of such derivative works and modifications. +b. No trademark license is granted to use the trade names, trademarks, service marks, or product names of Us, except as required to fulfill notice requirements under this Agreement or as required for reasonable and customary use in describing and redistributing the Materials. +c. If You commence a lawsuit or other proceedings (including a cross-claim or counterclaim in a lawsuit) against Us or any person or entity alleging that the Materials or any Output, or any portion of any of the foregoing, infringe any intellectual property or other right owned or licensable by You, then all licenses granted to You under this Agreement shall terminate as of the date such lawsuit or other proceeding is filed. +5. DISCLAIMERS OF WARRANTY AND LIMITATIONS OF LIABILITY. +a. THE MODEL WORKS AND ANY OUTPUT AND RESULTS THERE FROM ARE PROVIDED "AS IS" WITHOUT ANY EXPRESS OR IMPLIED WARRANTY OF ANY KIND INCLUDING WARRANTIES OF MERCHANTABILITY, NONINFRINGEMENT, OR FITNESS FOR A PARTICULAR PURPOSE. WE MAKE NO WARRANTY AND ASSUME NO RESPONSIBILITY FOR THE SAFETY OR STABILITY OF THE MATERIALS AND ANY OUTPUT THEREFROM. +b. IN NO EVENT SHALL WE BE LIABLE TO YOU FOR ANY DAMAGES, INCLUDING, BUT NOT LIMITED TO ANY DIRECT, OR INDIRECT, SPECIAL OR CONSEQUENTIAL DAMAGES ARISING FROM YOUR USE OR INABILITY TO USE THE MATERIALS OR ANY OUTPUT OF IT, NO MATTER HOW IT’S CAUSED. +c. You will defend, indemnify and hold harmless Us from and against any claim by any third party arising out of or related to your use or distribution of the Materials. + +6. SURVIVAL AND TERMINATION. +a. The term of this Agreement shall commence upon Your acceptance of this Agreement or access to the Materials and will continue in full force and effect until terminated in accordance with the terms and conditions herein. +b. We may terminate this Agreement if You breach any of the terms or conditions of this Agreement. Upon termination of this Agreement, You must promptly delete and cease use of the Model Works. Sections 4(a), 4(c), 5 and 7 shall survive the termination of this Agreement. +7. GOVERNING LAW AND JURISDICTION. +a. This Agreement and any dispute arising out of or relating to it will be governed by the laws of China (for the purpose of this agreement only, excluding Hong Kong, Macau, and Taiwan), without regard to conflict of law principles, and the UN Convention on Contracts for the International Sale of Goods does not apply to this Agreement. +b. Any disputes arising from or related to this Agreement shall be under the jurisdiction of the People's Court where the Licensor is located. + +Note that the license is subject to update to a more comprehensive version. For any questions related to the license and copyright, please contact us at kolors_opensource@kuaishou.com. diff --git a/ComfyUI-KwaiKolorsWrapper/README.md b/ComfyUI-KwaiKolorsWrapper/README.md new file mode 100644 index 0000000000000000000000000000000000000000..b5c1a255f76d214720310c75ac49bf5dc7fc93de --- /dev/null +++ b/ComfyUI-KwaiKolorsWrapper/README.md @@ -0,0 +1,79 @@ +# ComfyUI wrapper for Kwai-Kolors + +Rudimentary wrapper that runs Kwai-Kolors text2image pipeline using diffusers. + +## Update - safetensors + +Added alternative way to load the ChatGLM3 model from single safetensors file (the configs are included in this repo already). +Including already quantized models: + +![image](https://github.com/kijai/ComfyUI-KwaiKolorsWrapper/assets/40791699/e161eee6-ffd8-4945-8905-1ca47f2a5ef1) + +https://huggingface.co/Kijai/ChatGLM3-safetensors/upload/main + +goes into: + +`ComfyUI\models\LLM\checkpoints` +![image](https://github.com/kijai/ComfyUI-KwaiKolorsWrapper/assets/40791699/2a6c6f3f-e159-4a82-b16f-4956f9affb25) + +![image](https://github.com/kijai/ComfyUI-KwaiKolorsWrapper/assets/40791699/a31ab13a-b321-4cc6-b853-4a4e078eb6dc) + + +## Installation: + +Clone this repository to 'ComfyUI/custom_nodes` folder. + +Install the dependencies in requirements.txt, transformers version 4.38.0 minimum is required: + +`pip install -r requirements.txt` + +or if you use portable (run this in ComfyUI_windows_portable -folder): + +`python_embedded\python.exe -m pip install -r ComfyUI\custom_nodes\ComfyUI-KwaiKolorsWrapper\requirements.txt` + + +Models (fp16, 16.5GB) are automatically downloaded from https://huggingface.co/Kwai-Kolors/Kolors/tree/main + +to `ComfyUI/models/diffusers/Kolors` + +Model folder structure needs to be the following: + +``` +PS C:\ComfyUI_windows_portable\ComfyUI\models\diffusers\Kolors> tree /F +│ model_index.json +│ +├───scheduler +│ scheduler_config.json +│ +├───text_encoder +│ config.json +│ pytorch_model-00001-of-00007.bin +│ pytorch_model-00002-of-00007.bin +│ pytorch_model-00003-of-00007.bin +│ pytorch_model-00004-of-00007.bin +│ pytorch_model-00005-of-00007.bin +│ pytorch_model-00006-of-00007.bin +│ pytorch_model-00007-of-00007.bin +│ pytorch_model.bin.index.json +│ tokenizer.model +│ tokenizer_config.json +│ vocab.txt +│ +└───unet + config.json + diffusion_pytorch_model.fp16.safetensors +``` +To run this, the text enconder is what takes most of the VRAM, but can be quantized to fit approximately these amounts: + +| Model | Size | +|--------|------| +| fp16 | ~13 GB| +| quant8 | ~8 GB | +| quant4 | ~4 GB | + +After that, the sampling single image at 1024 can be expected to take similar amounts than SDXL. For VAE the base SDXL VAE is used. + +![image](https://github.com/kijai/ComfyUI-KwaiKolorsWrapper/assets/40791699/ada4ac93-58ee-4957-96cd-2b327579d4f8) + +![image](https://github.com/kijai/ComfyUI-KwaiKolorsWrapper/assets/40791699/b6a17074-be09-4075-b66f-7857c871057a) + diff --git a/ComfyUI-KwaiKolorsWrapper/__init__.py b/ComfyUI-KwaiKolorsWrapper/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..2e96bd6ab3db650f769ae7886e0c13515752bd16 --- /dev/null +++ b/ComfyUI-KwaiKolorsWrapper/__init__.py @@ -0,0 +1,3 @@ +from .nodes import NODE_CLASS_MAPPINGS, NODE_DISPLAY_NAME_MAPPINGS + +__all__ = ["NODE_CLASS_MAPPINGS", "NODE_DISPLAY_NAME_MAPPINGS"] \ No newline at end of file diff --git a/ComfyUI-KwaiKolorsWrapper/configs/text_encoder_config.json b/ComfyUI-KwaiKolorsWrapper/configs/text_encoder_config.json new file mode 100644 index 0000000000000000000000000000000000000000..c6e19300822b25ae0a07125bbc171c6581dbeda4 --- /dev/null +++ b/ComfyUI-KwaiKolorsWrapper/configs/text_encoder_config.json @@ -0,0 +1,42 @@ +{ + "_name_or_path": "THUDM/chatglm3-6b-base", + "model_type": "chatglm", + "architectures": [ + "ChatGLMModel" + ], + "auto_map": { + "AutoConfig": "configuration_chatglm.ChatGLMConfig", + "AutoModel": "modeling_chatglm.ChatGLMForConditionalGeneration", + "AutoModelForCausalLM": "modeling_chatglm.ChatGLMForConditionalGeneration", + "AutoModelForSeq2SeqLM": "modeling_chatglm.ChatGLMForConditionalGeneration", + "AutoModelForSequenceClassification": "modeling_chatglm.ChatGLMForSequenceClassification" + }, + "add_bias_linear": false, + "add_qkv_bias": true, + "apply_query_key_layer_scaling": true, + "apply_residual_connection_post_layernorm": false, + "attention_dropout": 0.0, + "attention_softmax_in_fp32": true, + "bias_dropout_fusion": true, + "ffn_hidden_size": 13696, + "fp32_residual_connection": false, + "hidden_dropout": 0.0, + "hidden_size": 4096, + "kv_channels": 128, + "layernorm_epsilon": 1e-05, + "multi_query_attention": true, + "multi_query_group_num": 2, + "num_attention_heads": 32, + "num_layers": 28, + "original_rope": true, + "padded_vocab_size": 65024, + "post_layer_norm": true, + "rmsnorm": true, + "seq_length": 32768, + "use_cache": true, + "torch_dtype": "float16", + "transformers_version": "4.30.2", + "tie_word_embeddings": false, + "eos_token_id": 2, + "pad_token_id": 0 +} \ No newline at end of file diff --git a/ComfyUI-KwaiKolorsWrapper/configs/tokenizer/tokenizer.model b/ComfyUI-KwaiKolorsWrapper/configs/tokenizer/tokenizer.model new file mode 100644 index 0000000000000000000000000000000000000000..8a8007697b7cc3d3868dcffbbebf8c1f2bd690ba --- /dev/null +++ b/ComfyUI-KwaiKolorsWrapper/configs/tokenizer/tokenizer.model @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e7dc4c393423b76e4373e5157ddc34803a0189ba96b21ddbb40269d31468a6f2 +size 1018370 diff --git a/ComfyUI-KwaiKolorsWrapper/configs/tokenizer/tokenizer_config.json b/ComfyUI-KwaiKolorsWrapper/configs/tokenizer/tokenizer_config.json new file mode 100644 index 0000000000000000000000000000000000000000..f6f13c88707490cebd8023da86e8bf7a56fa21e3 --- /dev/null +++ b/ComfyUI-KwaiKolorsWrapper/configs/tokenizer/tokenizer_config.json @@ -0,0 +1,12 @@ +{ + "name_or_path": "THUDM/chatglm3-6b-base", + "remove_space": false, + "do_lower_case": false, + "tokenizer_class": "ChatGLMTokenizer", + "auto_map": { + "AutoTokenizer": [ + "tokenization_chatglm.ChatGLMTokenizer", + null + ] + } +} diff --git a/ComfyUI-KwaiKolorsWrapper/configs/tokenizer/vocab.txt b/ComfyUI-KwaiKolorsWrapper/configs/tokenizer/vocab.txt new file mode 100644 index 0000000000000000000000000000000000000000..8a8007697b7cc3d3868dcffbbebf8c1f2bd690ba --- /dev/null +++ b/ComfyUI-KwaiKolorsWrapper/configs/tokenizer/vocab.txt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e7dc4c393423b76e4373e5157ddc34803a0189ba96b21ddbb40269d31468a6f2 +size 1018370 diff --git a/ComfyUI-KwaiKolorsWrapper/kolors/__init__.py b/ComfyUI-KwaiKolorsWrapper/kolors/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/ComfyUI-KwaiKolorsWrapper/kolors/models/__init__.py b/ComfyUI-KwaiKolorsWrapper/kolors/models/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/ComfyUI-KwaiKolorsWrapper/kolors/models/configuration_chatglm.py b/ComfyUI-KwaiKolorsWrapper/kolors/models/configuration_chatglm.py new file mode 100644 index 0000000000000000000000000000000000000000..ec32e66d3e601916be56caa5250bac1b27240a8a --- /dev/null +++ b/ComfyUI-KwaiKolorsWrapper/kolors/models/configuration_chatglm.py @@ -0,0 +1,61 @@ +from transformers import PretrainedConfig + + +class ChatGLMConfig(PretrainedConfig): + model_type = "chatglm" + def __init__( + self, + num_layers=28, + padded_vocab_size=65024, + hidden_size=4096, + ffn_hidden_size=13696, + kv_channels=128, + num_attention_heads=32, + seq_length=2048, + hidden_dropout=0.0, + classifier_dropout=None, + attention_dropout=0.0, + layernorm_epsilon=1e-5, + rmsnorm=True, + apply_residual_connection_post_layernorm=False, + post_layer_norm=True, + add_bias_linear=False, + add_qkv_bias=False, + bias_dropout_fusion=True, + multi_query_attention=False, + multi_query_group_num=1, + apply_query_key_layer_scaling=True, + attention_softmax_in_fp32=True, + fp32_residual_connection=False, + quantization_bit=0, + pre_seq_len=None, + prefix_projection=False, + **kwargs + ): + self.num_layers = num_layers + self.vocab_size = padded_vocab_size + self.padded_vocab_size = padded_vocab_size + self.hidden_size = hidden_size + self.ffn_hidden_size = ffn_hidden_size + self.kv_channels = kv_channels + self.num_attention_heads = num_attention_heads + self.seq_length = seq_length + self.hidden_dropout = hidden_dropout + self.classifier_dropout = classifier_dropout + self.attention_dropout = attention_dropout + self.layernorm_epsilon = layernorm_epsilon + self.rmsnorm = rmsnorm + self.apply_residual_connection_post_layernorm = apply_residual_connection_post_layernorm + self.post_layer_norm = post_layer_norm + self.add_bias_linear = add_bias_linear + self.add_qkv_bias = add_qkv_bias + self.bias_dropout_fusion = bias_dropout_fusion + self.multi_query_attention = multi_query_attention + self.multi_query_group_num = multi_query_group_num + self.apply_query_key_layer_scaling = apply_query_key_layer_scaling + self.attention_softmax_in_fp32 = attention_softmax_in_fp32 + self.fp32_residual_connection = fp32_residual_connection + self.quantization_bit = quantization_bit + self.pre_seq_len = pre_seq_len + self.prefix_projection = prefix_projection + super().__init__(**kwargs) diff --git a/ComfyUI-KwaiKolorsWrapper/kolors/models/modeling_chatglm.py b/ComfyUI-KwaiKolorsWrapper/kolors/models/modeling_chatglm.py new file mode 100644 index 0000000000000000000000000000000000000000..9bffd6a82829d22229598fe8213cefa76066eac0 --- /dev/null +++ b/ComfyUI-KwaiKolorsWrapper/kolors/models/modeling_chatglm.py @@ -0,0 +1,1319 @@ +""" PyTorch ChatGLM model. """ + +import math +import copy +import warnings +import re +import sys + +import torch +import torch.utils.checkpoint +import torch.nn.functional as F +from torch import nn +from torch.nn import CrossEntropyLoss, LayerNorm +from torch.nn import CrossEntropyLoss, LayerNorm, MSELoss, BCEWithLogitsLoss +from torch.nn.utils import skip_init +from typing import Optional, Tuple, Union, List, Callable, Dict, Any +from copy import deepcopy + +from transformers.modeling_outputs import ( + BaseModelOutputWithPast, + CausalLMOutputWithPast, + SequenceClassifierOutputWithPast, +) +from transformers.modeling_utils import PreTrainedModel +from transformers.utils import logging +from transformers.generation.logits_process import LogitsProcessor +from transformers.generation.utils import LogitsProcessorList, StoppingCriteriaList, GenerationConfig, ModelOutput + +try: + from .configuration_chatglm import ChatGLMConfig +except: + from configuration_chatglm import ChatGLMConfig + +try: + import xformers.ops + xformers_available = True +except ImportError: + xformers_available = False + +# flags required to enable jit fusion kernels + +if sys.platform != 'darwin': + torch._C._jit_set_profiling_mode(False) + torch._C._jit_set_profiling_executor(False) + torch._C._jit_override_can_fuse_on_cpu(True) + torch._C._jit_override_can_fuse_on_gpu(True) + +logger = logging.get_logger(__name__) + +_CHECKPOINT_FOR_DOC = "THUDM/ChatGLM" +_CONFIG_FOR_DOC = "ChatGLM6BConfig" + +CHATGLM_6B_PRETRAINED_MODEL_ARCHIVE_LIST = [ + "THUDM/chatglm3-6b-base", + # See all ChatGLM models at https://huggingface.co/models?filter=chatglm +] + + +def default_init(cls, *args, **kwargs): + return cls(*args, **kwargs) + + +class InvalidScoreLogitsProcessor(LogitsProcessor): + def __call__(self, input_ids: torch.LongTensor, scores: torch.FloatTensor) -> torch.FloatTensor: + if torch.isnan(scores).any() or torch.isinf(scores).any(): + scores.zero_() + scores[..., 5] = 5e4 + return scores + + +class PrefixEncoder(torch.nn.Module): + """ + The torch.nn model to encode the prefix + Input shape: (batch-size, prefix-length) + Output shape: (batch-size, prefix-length, 2*layers*hidden) + """ + + def __init__(self, config: ChatGLMConfig): + super().__init__() + self.prefix_projection = config.prefix_projection + if self.prefix_projection: + # Use a two-layer MLP to encode the prefix + kv_size = config.num_layers * config.kv_channels * config.multi_query_group_num * 2 + self.embedding = torch.nn.Embedding(config.pre_seq_len, kv_size) + self.trans = torch.nn.Sequential( + torch.nn.Linear(kv_size, config.hidden_size), + torch.nn.Tanh(), + torch.nn.Linear(config.hidden_size, kv_size) + ) + else: + self.embedding = torch.nn.Embedding(config.pre_seq_len, + config.num_layers * config.kv_channels * config.multi_query_group_num * 2) + + def forward(self, prefix: torch.Tensor): + if self.prefix_projection: + prefix_tokens = self.embedding(prefix) + past_key_values = self.trans(prefix_tokens) + else: + past_key_values = self.embedding(prefix) + return past_key_values + + +def split_tensor_along_last_dim( + tensor: torch.Tensor, + num_partitions: int, + contiguous_split_chunks: bool = False, +) -> List[torch.Tensor]: + """Split a tensor along its last dimension. + + Arguments: + tensor: input tensor. + num_partitions: number of partitions to split the tensor + contiguous_split_chunks: If True, make each chunk contiguous + in memory. + + Returns: + A list of Tensors + """ + # Get the size and dimension. + last_dim = tensor.dim() - 1 + last_dim_size = tensor.size()[last_dim] // num_partitions + # Split. + tensor_list = torch.split(tensor, last_dim_size, dim=last_dim) + # Note: torch.split does not create contiguous tensors by default. + if contiguous_split_chunks: + return tuple(chunk.contiguous() for chunk in tensor_list) + + return tensor_list + + +class RotaryEmbedding(nn.Module): + def __init__(self, dim, original_impl=False, device=None, dtype=None): + super().__init__() + inv_freq = 1.0 / (10000 ** (torch.arange(0, dim, 2, device=device).to(dtype=dtype) / dim)) + self.register_buffer("inv_freq", inv_freq) + self.dim = dim + self.original_impl = original_impl + + def forward_impl( + self, seq_len: int, n_elem: int, dtype: torch.dtype, device: torch.device, base: int = 10000 + ): + """Enhanced Transformer with Rotary Position Embedding. + + Derived from: https://github.com/labmlai/annotated_deep_learning_paper_implementations/blob/master/labml_nn/ + transformers/rope/__init__.py. MIT License: + https://github.com/labmlai/annotated_deep_learning_paper_implementations/blob/master/license. + """ + # $\Theta = {\theta_i = 10000^{\frac{2(i-1)}{d}}, i \in [1, 2, ..., \frac{d}{2}]}$ + theta = 1.0 / (base ** (torch.arange(0, n_elem, 2, dtype=torch.float, device=device) / n_elem)) + + # Create position indexes `[0, 1, ..., seq_len - 1]` + seq_idx = torch.arange(seq_len, dtype=torch.float, device=device) + + # Calculate the product of position index and $\theta_i$ + idx_theta = torch.outer(seq_idx, theta).float() + + cache = torch.stack([torch.cos(idx_theta), torch.sin(idx_theta)], dim=-1) + + # this is to mimic the behaviour of complex32, else we will get different results + if dtype in (torch.float16, torch.bfloat16, torch.int8): + cache = cache.bfloat16() if dtype == torch.bfloat16 else cache.half() + return cache + + def forward(self, max_seq_len, offset=0): + return self.forward_impl( + max_seq_len, self.dim, dtype=self.inv_freq.dtype, device=self.inv_freq.device + ) + + +@torch.jit.script +def apply_rotary_pos_emb(x: torch.Tensor, rope_cache: torch.Tensor) -> torch.Tensor: + # x: [sq, b, np, hn] + sq, b, np, hn = x.size(0), x.size(1), x.size(2), x.size(3) + rot_dim = rope_cache.shape[-2] * 2 + x, x_pass = x[..., :rot_dim], x[..., rot_dim:] + # truncate to support variable sizes + rope_cache = rope_cache[:sq] + xshaped = x.reshape(sq, -1, np, rot_dim // 2, 2) + rope_cache = rope_cache.view(sq, -1, 1, xshaped.size(3), 2) + x_out2 = torch.stack( + [ + xshaped[..., 0] * rope_cache[..., 0] - xshaped[..., 1] * rope_cache[..., 1], + xshaped[..., 1] * rope_cache[..., 0] + xshaped[..., 0] * rope_cache[..., 1], + ], + -1, + ) + x_out2 = x_out2.flatten(3) + return torch.cat((x_out2, x_pass), dim=-1) + + +class RMSNorm(torch.nn.Module): + def __init__(self, normalized_shape, eps=1e-5, device=None, dtype=None, **kwargs): + super().__init__() + self.weight = torch.nn.Parameter(torch.empty(normalized_shape, device=device, dtype=dtype)) + self.eps = eps + + def forward(self, hidden_states: torch.Tensor): + input_dtype = hidden_states.dtype + variance = hidden_states.to(torch.float32).pow(2).mean(-1, keepdim=True) + hidden_states = hidden_states * torch.rsqrt(variance + self.eps) + + return (self.weight * hidden_states).to(input_dtype) + + +class CoreAttention(torch.nn.Module): + def __init__(self, config: ChatGLMConfig, layer_number): + super(CoreAttention, self).__init__() + + self.apply_query_key_layer_scaling = config.apply_query_key_layer_scaling + self.attention_softmax_in_fp32 = config.attention_softmax_in_fp32 + if self.apply_query_key_layer_scaling: + self.attention_softmax_in_fp32 = True + self.layer_number = max(1, layer_number) + + projection_size = config.kv_channels * config.num_attention_heads + + # Per attention head and per partition values. + self.hidden_size_per_partition = projection_size + self.hidden_size_per_attention_head = projection_size // config.num_attention_heads + self.num_attention_heads_per_partition = config.num_attention_heads + + coeff = None + self.norm_factor = math.sqrt(self.hidden_size_per_attention_head) + if self.apply_query_key_layer_scaling: + coeff = self.layer_number + self.norm_factor *= coeff + self.coeff = coeff + + self.attention_dropout = torch.nn.Dropout(config.attention_dropout) + + def forward(self, query_layer, key_layer, value_layer, attention_mask): + if False: #xformers_available: + print('attention forward with xformers.') + query_layer = query_layer.permute(1, 2, 0, 3) + key_layer = key_layer.permute(1, 2, 0, 3) + value_layer = value_layer.permute(1, 2, 0, 3) + if attention_mask is None and query_layer.shape[2] == key_layer.shape[2]: + context_layer = xformers.ops.memory_efficient_attention(query_layer, key_layer, value_layer, attn_bias=None) + else: + if attention_mask is not None: + attention_mask = ~attention_mask + context_layer = xformers.ops.memory_efficient_attention(query_layer, key_layer, value_layer, attn_bias=attention_mask) + context_layer = context_layer.permute(2, 0, 1, 3) + new_context_layer_shape = context_layer.size()[:-2] + (self.hidden_size_per_partition,) + context_layer = context_layer.reshape(*new_context_layer_shape) + else: + #print('attention forward with pytorch.') + pytorch_major_version = int(torch.__version__.split('.')[0]) + if pytorch_major_version >= 2: + query_layer, key_layer, value_layer = [k.permute(1, 2, 0, 3) for k in [query_layer, key_layer, value_layer]] + if attention_mask is None and query_layer.shape[2] == key_layer.shape[2]: + context_layer = torch.nn.functional.scaled_dot_product_attention(query_layer, key_layer, value_layer, + is_causal=True) + else: + if attention_mask is not None: + attention_mask = ~attention_mask + context_layer = torch.nn.functional.scaled_dot_product_attention(query_layer, key_layer, value_layer, + attention_mask) + context_layer = context_layer.permute(2, 0, 1, 3) + new_context_layer_shape = context_layer.size()[:-2] + (self.hidden_size_per_partition,) + context_layer = context_layer.reshape(*new_context_layer_shape) + else: + # Raw attention scores + + # [b, np, sq, sk] + output_size = (query_layer.size(1), query_layer.size(2), query_layer.size(0), key_layer.size(0)) + + # [sq, b, np, hn] -> [sq, b * np, hn] + query_layer = query_layer.view(output_size[2], output_size[0] * output_size[1], -1) + # [sk, b, np, hn] -> [sk, b * np, hn] + key_layer = key_layer.view(output_size[3], output_size[0] * output_size[1], -1) + + # preallocting input tensor: [b * np, sq, sk] + matmul_input_buffer = torch.empty( + output_size[0] * output_size[1], output_size[2], output_size[3], dtype=query_layer.dtype, + device=query_layer.device + ) + + # Raw attention scores. [b * np, sq, sk] + matmul_result = torch.baddbmm( + matmul_input_buffer, + query_layer.transpose(0, 1), # [b * np, sq, hn] + key_layer.transpose(0, 1).transpose(1, 2), # [b * np, hn, sk] + beta=0.0, + alpha=(1.0 / self.norm_factor), + ) + + # change view to [b, np, sq, sk] + attention_scores = matmul_result.view(*output_size) + + # =========================== + # Attention probs and dropout + # =========================== + + # attention scores and attention mask [b, np, sq, sk] + if self.attention_softmax_in_fp32: + attention_scores = attention_scores.float() + if self.coeff is not None: + attention_scores = attention_scores * self.coeff + if attention_mask is None and attention_scores.shape[2] == attention_scores.shape[3]: + attention_mask = torch.ones(output_size[0], 1, output_size[2], output_size[3], + device=attention_scores.device, dtype=torch.bool) + attention_mask.tril_() + attention_mask = ~attention_mask + if attention_mask is not None: + attention_scores = attention_scores.masked_fill(attention_mask, float("-inf")) + attention_probs = F.softmax(attention_scores, dim=-1) + attention_probs = attention_probs.type_as(value_layer) + + # This is actually dropping out entire tokens to attend to, which might + # seem a bit unusual, but is taken from the original Transformer paper. + attention_probs = self.attention_dropout(attention_probs) + # ========================= + # Context layer. [sq, b, hp] + # ========================= + + # value_layer -> context layer. + # [sk, b, np, hn] --> [b, np, sq, hn] + + # context layer shape: [b, np, sq, hn] + output_size = (value_layer.size(1), value_layer.size(2), query_layer.size(0), value_layer.size(3)) + # change view [sk, b * np, hn] + value_layer = value_layer.view(value_layer.size(0), output_size[0] * output_size[1], -1) + # change view [b * np, sq, sk] + attention_probs = attention_probs.view(output_size[0] * output_size[1], output_size[2], -1) + # matmul: [b * np, sq, hn] + context_layer = torch.bmm(attention_probs, value_layer.transpose(0, 1)) + # change view [b, np, sq, hn] + context_layer = context_layer.view(*output_size) + # [b, np, sq, hn] --> [sq, b, np, hn] + context_layer = context_layer.permute(2, 0, 1, 3).contiguous() + # [sq, b, np, hn] --> [sq, b, hp] + new_context_layer_shape = context_layer.size()[:-2] + (self.hidden_size_per_partition,) + context_layer = context_layer.view(*new_context_layer_shape) + + return context_layer + + +class SelfAttention(torch.nn.Module): + """Parallel self-attention layer abstract class. + + Self-attention layer takes input with size [s, b, h] + and returns output of the same size. + """ + + def __init__(self, config: ChatGLMConfig, layer_number, device=None): + super(SelfAttention, self).__init__() + self.layer_number = max(1, layer_number) + + self.projection_size = config.kv_channels * config.num_attention_heads + + # Per attention head and per partition values. + self.hidden_size_per_attention_head = self.projection_size // config.num_attention_heads + self.num_attention_heads_per_partition = config.num_attention_heads + + self.multi_query_attention = config.multi_query_attention + self.qkv_hidden_size = 3 * self.projection_size + if self.multi_query_attention: + self.num_multi_query_groups_per_partition = config.multi_query_group_num + self.qkv_hidden_size = ( + self.projection_size + 2 * self.hidden_size_per_attention_head * config.multi_query_group_num + ) + self.query_key_value = nn.Linear(config.hidden_size, self.qkv_hidden_size, + bias=config.add_bias_linear or config.add_qkv_bias, + device=device, **_config_to_kwargs(config) + ) + + self.core_attention = CoreAttention(config, self.layer_number) + + # Output. + self.dense = nn.Linear(self.projection_size, config.hidden_size, bias=config.add_bias_linear, + device=device, **_config_to_kwargs(config) + ) + + def _allocate_memory(self, inference_max_sequence_len, batch_size, device=None, dtype=None): + if self.multi_query_attention: + num_attention_heads = self.num_multi_query_groups_per_partition + else: + num_attention_heads = self.num_attention_heads_per_partition + return torch.empty( + inference_max_sequence_len, + batch_size, + num_attention_heads, + self.hidden_size_per_attention_head, + dtype=dtype, + device=device, + ) + + def forward( + self, hidden_states, attention_mask, rotary_pos_emb, kv_cache=None, use_cache=True + ): + # hidden_states: [sq, b, h] + + # ================================================= + # Pre-allocate memory for key-values for inference. + # ================================================= + # ===================== + # Query, Key, and Value + # ===================== + + # Attention heads [sq, b, h] --> [sq, b, (np * 3 * hn)] + mixed_x_layer = self.query_key_value(hidden_states) + + if self.multi_query_attention: + (query_layer, key_layer, value_layer) = mixed_x_layer.split( + [ + self.num_attention_heads_per_partition * self.hidden_size_per_attention_head, + self.num_multi_query_groups_per_partition * self.hidden_size_per_attention_head, + self.num_multi_query_groups_per_partition * self.hidden_size_per_attention_head, + ], + dim=-1, + ) + query_layer = query_layer.view( + query_layer.size()[:-1] + (self.num_attention_heads_per_partition, self.hidden_size_per_attention_head) + ) + key_layer = key_layer.view( + key_layer.size()[:-1] + (self.num_multi_query_groups_per_partition, self.hidden_size_per_attention_head) + ) + value_layer = value_layer.view( + value_layer.size()[:-1] + + (self.num_multi_query_groups_per_partition, self.hidden_size_per_attention_head) + ) + else: + new_tensor_shape = mixed_x_layer.size()[:-1] + \ + (self.num_attention_heads_per_partition, + 3 * self.hidden_size_per_attention_head) + mixed_x_layer = mixed_x_layer.view(*new_tensor_shape) + + # [sq, b, np, 3 * hn] --> 3 [sq, b, np, hn] + (query_layer, key_layer, value_layer) = split_tensor_along_last_dim(mixed_x_layer, 3) + + # apply relative positional encoding (rotary embedding) + if rotary_pos_emb is not None: + query_layer = apply_rotary_pos_emb(query_layer, rotary_pos_emb) + key_layer = apply_rotary_pos_emb(key_layer, rotary_pos_emb) + + # adjust key and value for inference + if kv_cache is not None: + cache_k, cache_v = kv_cache + key_layer = torch.cat((cache_k, key_layer), dim=0) + value_layer = torch.cat((cache_v, value_layer), dim=0) + if use_cache: + kv_cache = (key_layer, value_layer) + else: + kv_cache = None + + if self.multi_query_attention: + key_layer = key_layer.unsqueeze(-2) + key_layer = key_layer.expand( + -1, -1, -1, self.num_attention_heads_per_partition // self.num_multi_query_groups_per_partition, -1 + ) + key_layer = key_layer.contiguous().view( + key_layer.size()[:2] + (self.num_attention_heads_per_partition, self.hidden_size_per_attention_head) + ) + value_layer = value_layer.unsqueeze(-2) + value_layer = value_layer.expand( + -1, -1, -1, self.num_attention_heads_per_partition // self.num_multi_query_groups_per_partition, -1 + ) + value_layer = value_layer.contiguous().view( + value_layer.size()[:2] + (self.num_attention_heads_per_partition, self.hidden_size_per_attention_head) + ) + + # ================================== + # core attention computation + # ================================== + + context_layer = self.core_attention(query_layer, key_layer, value_layer, attention_mask) + + # ================= + # Output. [sq, b, h] + # ================= + + output = self.dense(context_layer) + + return output, kv_cache + + +def _config_to_kwargs(args): + common_kwargs = { + "dtype": args.torch_dtype, + } + return common_kwargs + + +class MLP(torch.nn.Module): + """MLP. + + MLP will take the input with h hidden state, project it to 4*h + hidden dimension, perform nonlinear transformation, and project the + state back into h hidden dimension. + """ + + def __init__(self, config: ChatGLMConfig, device=None): + super(MLP, self).__init__() + + self.add_bias = config.add_bias_linear + + # Project to 4h. If using swiglu double the output width, see https://arxiv.org/pdf/2002.05202.pdf + self.dense_h_to_4h = nn.Linear( + config.hidden_size, + config.ffn_hidden_size * 2, + bias=self.add_bias, + device=device, + **_config_to_kwargs(config) + ) + + def swiglu(x): + x = torch.chunk(x, 2, dim=-1) + return F.silu(x[0]) * x[1] + + self.activation_func = swiglu + + # Project back to h. + self.dense_4h_to_h = nn.Linear( + config.ffn_hidden_size, + config.hidden_size, + bias=self.add_bias, + device=device, + **_config_to_kwargs(config) + ) + + def forward(self, hidden_states): + # [s, b, 4hp] + intermediate_parallel = self.dense_h_to_4h(hidden_states) + intermediate_parallel = self.activation_func(intermediate_parallel) + # [s, b, h] + output = self.dense_4h_to_h(intermediate_parallel) + return output + + +class GLMBlock(torch.nn.Module): + """A single transformer layer. + + Transformer layer takes input with size [s, b, h] and returns an + output of the same size. + """ + + def __init__(self, config: ChatGLMConfig, layer_number, device=None): + super(GLMBlock, self).__init__() + self.layer_number = layer_number + + self.apply_residual_connection_post_layernorm = config.apply_residual_connection_post_layernorm + + self.fp32_residual_connection = config.fp32_residual_connection + + LayerNormFunc = RMSNorm if config.rmsnorm else LayerNorm + # Layernorm on the input data. + self.input_layernorm = LayerNormFunc(config.hidden_size, eps=config.layernorm_epsilon, device=device, + dtype=config.torch_dtype) + + # Self attention. + self.self_attention = SelfAttention(config, layer_number, device=device) + self.hidden_dropout = config.hidden_dropout + + # Layernorm on the attention output + self.post_attention_layernorm = LayerNormFunc(config.hidden_size, eps=config.layernorm_epsilon, device=device, + dtype=config.torch_dtype) + + # MLP + self.mlp = MLP(config, device=device) + + def forward( + self, hidden_states, attention_mask, rotary_pos_emb, kv_cache=None, use_cache=True, + ): + # hidden_states: [s, b, h] + + # Layer norm at the beginning of the transformer layer. + layernorm_output = self.input_layernorm(hidden_states) + # Self attention. + attention_output, kv_cache = self.self_attention( + layernorm_output, + attention_mask, + rotary_pos_emb, + kv_cache=kv_cache, + use_cache=use_cache + ) + + # Residual connection. + if self.apply_residual_connection_post_layernorm: + residual = layernorm_output + else: + residual = hidden_states + + layernorm_input = torch.nn.functional.dropout(attention_output, p=self.hidden_dropout, training=self.training) + layernorm_input = residual + layernorm_input + + # Layer norm post the self attention. + layernorm_output = self.post_attention_layernorm(layernorm_input) + + # MLP. + mlp_output = self.mlp(layernorm_output) + + # Second residual connection. + if self.apply_residual_connection_post_layernorm: + residual = layernorm_output + else: + residual = layernorm_input + + output = torch.nn.functional.dropout(mlp_output, p=self.hidden_dropout, training=self.training) + output = residual + output + + return output, kv_cache + + +class GLMTransformer(torch.nn.Module): + """Transformer class.""" + + def __init__(self, config: ChatGLMConfig, device=None): + super(GLMTransformer, self).__init__() + + self.fp32_residual_connection = config.fp32_residual_connection + self.post_layer_norm = config.post_layer_norm + + # Number of layers. + self.num_layers = config.num_layers + + # Transformer layers. + def build_layer(layer_number): + return GLMBlock(config, layer_number, device=device) + + self.layers = torch.nn.ModuleList([build_layer(i + 1) for i in range(self.num_layers)]) + + if self.post_layer_norm: + LayerNormFunc = RMSNorm if config.rmsnorm else LayerNorm + # Final layer norm before output. + self.final_layernorm = LayerNormFunc(config.hidden_size, eps=config.layernorm_epsilon, device=device, + dtype=config.torch_dtype) + + self.gradient_checkpointing = False + + def _get_layer(self, layer_number): + return self.layers[layer_number] + + def forward( + self, hidden_states, attention_mask, rotary_pos_emb, kv_caches=None, + use_cache: Optional[bool] = True, + output_hidden_states: Optional[bool] = False, + ): + if not kv_caches: + kv_caches = [None for _ in range(self.num_layers)] + presents = () if use_cache else None + if self.gradient_checkpointing and self.training: + if use_cache: + logger.warning_once( + "`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..." + ) + use_cache = False + + all_self_attentions = None + all_hidden_states = () if output_hidden_states else None + for index in range(self.num_layers): + if output_hidden_states: + all_hidden_states = all_hidden_states + (hidden_states,) + + layer = self._get_layer(index) + if self.gradient_checkpointing and self.training: + layer_ret = torch.utils.checkpoint.checkpoint( + layer, + hidden_states, + attention_mask, + rotary_pos_emb, + kv_caches[index], + use_cache + ) + else: + layer_ret = layer( + hidden_states, + attention_mask, + rotary_pos_emb, + kv_cache=kv_caches[index], + use_cache=use_cache + ) + hidden_states, kv_cache = layer_ret + if use_cache: + presents = presents + (kv_cache,) + + if output_hidden_states: + all_hidden_states = all_hidden_states + (hidden_states,) + + # Final layer norm. + if self.post_layer_norm: + hidden_states = self.final_layernorm(hidden_states) + + return hidden_states, presents, all_hidden_states, all_self_attentions + + +class ChatGLMPreTrainedModel(PreTrainedModel): + """ + An abstract class to handle weights initialization and + a simple interface for downloading and loading pretrained models. + """ + + is_parallelizable = False + supports_gradient_checkpointing = True + config_class = ChatGLMConfig + base_model_prefix = "transformer" + _no_split_modules = ["GLMBlock"] + + def _init_weights(self, module: nn.Module): + """Initialize the weights.""" + return + + def get_masks(self, input_ids, past_key_values, padding_mask=None): + batch_size, seq_length = input_ids.shape + full_attention_mask = torch.ones(batch_size, seq_length, seq_length, device=input_ids.device) + full_attention_mask.tril_() + past_length = 0 + if past_key_values: + past_length = past_key_values[0][0].shape[0] + if past_length: + full_attention_mask = torch.cat((torch.ones(batch_size, seq_length, past_length, + device=input_ids.device), full_attention_mask), dim=-1) + if padding_mask is not None: + full_attention_mask = full_attention_mask * padding_mask.unsqueeze(1) + if not past_length and padding_mask is not None: + full_attention_mask -= padding_mask.unsqueeze(-1) - 1 + full_attention_mask = (full_attention_mask < 0.5).bool() + full_attention_mask.unsqueeze_(1) + return full_attention_mask + + def get_position_ids(self, input_ids, device): + batch_size, seq_length = input_ids.shape + position_ids = torch.arange(seq_length, dtype=torch.long, device=device).unsqueeze(0).repeat(batch_size, 1) + return position_ids + + def _set_gradient_checkpointing(self, module, value=False): + if isinstance(module, GLMTransformer): + module.gradient_checkpointing = value + + +class Embedding(torch.nn.Module): + """Language model embeddings.""" + + def __init__(self, config: ChatGLMConfig, device=None): + super(Embedding, self).__init__() + + self.hidden_size = config.hidden_size + # Word embeddings (parallel). + self.word_embeddings = nn.Embedding( + config.padded_vocab_size, + self.hidden_size, + dtype=config.torch_dtype, + device=device + ) + self.fp32_residual_connection = config.fp32_residual_connection + + def forward(self, input_ids): + # Embeddings. + words_embeddings = self.word_embeddings(input_ids) + embeddings = words_embeddings + # Data format change to avoid explicit tranposes : [b s h] --> [s b h]. + embeddings = embeddings.transpose(0, 1).contiguous() + # If the input flag for fp32 residual connection is set, convert for float. + if self.fp32_residual_connection: + embeddings = embeddings.float() + return embeddings + + +class ChatGLMModel(ChatGLMPreTrainedModel): + def __init__(self, config: ChatGLMConfig, device=None, empty_init=True): + super().__init__(config) + if empty_init: + init_method = skip_init + else: + init_method = default_init + init_kwargs = {} + if device is not None: + init_kwargs["device"] = device + self.embedding = init_method(Embedding, config, **init_kwargs) + self.num_layers = config.num_layers + self.multi_query_group_num = config.multi_query_group_num + self.kv_channels = config.kv_channels + + # Rotary positional embeddings + self.seq_length = config.seq_length + rotary_dim = ( + config.hidden_size // config.num_attention_heads if config.kv_channels is None else config.kv_channels + ) + + self.rotary_pos_emb = RotaryEmbedding(rotary_dim // 2, original_impl=config.original_rope, device=device, + dtype=config.torch_dtype) + self.encoder = init_method(GLMTransformer, config, **init_kwargs) + self.output_layer = init_method(nn.Linear, config.hidden_size, config.padded_vocab_size, bias=False, + dtype=config.torch_dtype, **init_kwargs) + self.pre_seq_len = config.pre_seq_len + self.prefix_projection = config.prefix_projection + if self.pre_seq_len is not None: + for param in self.parameters(): + param.requires_grad = False + self.prefix_tokens = torch.arange(self.pre_seq_len).long() + self.prefix_encoder = PrefixEncoder(config) + self.dropout = torch.nn.Dropout(0.1) + + def get_input_embeddings(self): + return self.embedding.word_embeddings + + def get_prompt(self, batch_size, device, dtype=torch.half): + prefix_tokens = self.prefix_tokens.unsqueeze(0).expand(batch_size, -1).to(device) + past_key_values = self.prefix_encoder(prefix_tokens).type(dtype) + past_key_values = past_key_values.view( + batch_size, + self.pre_seq_len, + self.num_layers * 2, + self.multi_query_group_num, + self.kv_channels + ) + # seq_len, b, nh, hidden_size + past_key_values = self.dropout(past_key_values) + past_key_values = past_key_values.permute([2, 1, 0, 3, 4]).split(2) + return past_key_values + + def forward( + self, + input_ids, + position_ids: Optional[torch.Tensor] = None, + attention_mask: Optional[torch.BoolTensor] = None, + full_attention_mask: Optional[torch.BoolTensor] = None, + past_key_values: Optional[Tuple[Tuple[torch.Tensor, torch.Tensor], ...]] = None, + inputs_embeds: Optional[torch.Tensor] = None, + use_cache: Optional[bool] = None, + output_hidden_states: Optional[bool] = None, + return_dict: Optional[bool] = None, + ): + output_hidden_states = ( + output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states + ) + use_cache = use_cache if use_cache is not None else self.config.use_cache + return_dict = return_dict if return_dict is not None else self.config.use_return_dict + + batch_size, seq_length = input_ids.shape + + if inputs_embeds is None: + inputs_embeds = self.embedding(input_ids) + + if self.pre_seq_len is not None: + if past_key_values is None: + past_key_values = self.get_prompt(batch_size=batch_size, device=input_ids.device, + dtype=inputs_embeds.dtype) + if attention_mask is not None: + attention_mask = torch.cat([attention_mask.new_ones((batch_size, self.pre_seq_len)), + attention_mask], dim=-1) + + if full_attention_mask is None: + if (attention_mask is not None and not attention_mask.all()) or (past_key_values and seq_length != 1): + full_attention_mask = self.get_masks(input_ids, past_key_values, padding_mask=attention_mask) + + # Rotary positional embeddings + rotary_pos_emb = self.rotary_pos_emb(self.seq_length) + if position_ids is not None: + rotary_pos_emb = rotary_pos_emb[position_ids] + else: + rotary_pos_emb = rotary_pos_emb[None, :seq_length] + rotary_pos_emb = rotary_pos_emb.transpose(0, 1).contiguous() + + # Run encoder. + hidden_states, presents, all_hidden_states, all_self_attentions = self.encoder( + inputs_embeds, full_attention_mask, rotary_pos_emb=rotary_pos_emb, + kv_caches=past_key_values, use_cache=use_cache, output_hidden_states=output_hidden_states + ) + + if not return_dict: + return tuple(v for v in [hidden_states, presents, all_hidden_states, all_self_attentions] if v is not None) + + return BaseModelOutputWithPast( + last_hidden_state=hidden_states, + past_key_values=presents, + hidden_states=all_hidden_states, + attentions=all_self_attentions, + ) + + def quantize(self, weight_bit_width: int): + from .quantization import quantize + quantize(self.encoder, weight_bit_width) + return self + + +class ChatGLMForConditionalGeneration(ChatGLMPreTrainedModel): + def __init__(self, config: ChatGLMConfig, empty_init=True, device=None): + super().__init__(config) + + self.max_sequence_length = config.max_length + self.transformer = ChatGLMModel(config, empty_init=empty_init, device=device) + self.config = config + self.quantized = False + + if self.config.quantization_bit: + self.quantize(self.config.quantization_bit, empty_init=True) + + def _update_model_kwargs_for_generation( + self, + outputs: ModelOutput, + model_kwargs: Dict[str, Any], + is_encoder_decoder: bool = False, + standardize_cache_format: bool = False, + ) -> Dict[str, Any]: + # update past_key_values + model_kwargs["past_key_values"] = self._extract_past_from_model_output( + outputs, standardize_cache_format=standardize_cache_format + ) + + # update attention mask + if "attention_mask" in model_kwargs: + attention_mask = model_kwargs["attention_mask"] + model_kwargs["attention_mask"] = torch.cat( + [attention_mask, attention_mask.new_ones((attention_mask.shape[0], 1))], dim=-1 + ) + + # update position ids + if "position_ids" in model_kwargs: + position_ids = model_kwargs["position_ids"] + new_position_id = position_ids[..., -1:].clone() + new_position_id += 1 + model_kwargs["position_ids"] = torch.cat( + [position_ids, new_position_id], dim=-1 + ) + + model_kwargs["is_first_forward"] = False + return model_kwargs + + def prepare_inputs_for_generation( + self, + input_ids: torch.LongTensor, + past_key_values: Optional[torch.Tensor] = None, + attention_mask: Optional[torch.Tensor] = None, + position_ids: Optional[torch.Tensor] = None, + use_cache: Optional[bool] = None, + is_first_forward: bool = True, + **kwargs + ) -> dict: + # only last token for input_ids if past is not None + if position_ids is None: + position_ids = self.get_position_ids(input_ids, device=input_ids.device) + if not is_first_forward: + if past_key_values is not None: + position_ids = position_ids[..., -1:] + input_ids = input_ids[:, -1:] + return { + "input_ids": input_ids, + "past_key_values": past_key_values, + "position_ids": position_ids, + "attention_mask": attention_mask, + "return_last_logit": True, + "use_cache": use_cache + } + + def forward( + self, + input_ids: Optional[torch.Tensor] = None, + position_ids: Optional[torch.Tensor] = None, + attention_mask: Optional[torch.Tensor] = None, + past_key_values: Optional[Tuple[torch.FloatTensor]] = None, + inputs_embeds: Optional[torch.Tensor] = None, + labels: Optional[torch.Tensor] = None, + use_cache: Optional[bool] = None, + output_attentions: Optional[bool] = None, + output_hidden_states: Optional[bool] = None, + return_dict: Optional[bool] = None, + return_last_logit: Optional[bool] = False, + ): + use_cache = use_cache if use_cache is not None else self.config.use_cache + return_dict = return_dict if return_dict is not None else self.config.use_return_dict + + transformer_outputs = self.transformer( + input_ids=input_ids, + position_ids=position_ids, + attention_mask=attention_mask, + past_key_values=past_key_values, + inputs_embeds=inputs_embeds, + use_cache=use_cache, + output_hidden_states=output_hidden_states, + return_dict=return_dict, + ) + + hidden_states = transformer_outputs[0] + if return_last_logit: + hidden_states = hidden_states[-1:] + lm_logits = self.transformer.output_layer(hidden_states) + lm_logits = lm_logits.transpose(0, 1).contiguous() + + loss = None + if labels is not None: + lm_logits = lm_logits.to(torch.float32) + + # Shift so that tokens < n predict n + shift_logits = lm_logits[..., :-1, :].contiguous() + shift_labels = labels[..., 1:].contiguous() + # Flatten the tokens + loss_fct = CrossEntropyLoss(ignore_index=-100) + loss = loss_fct(shift_logits.view(-1, shift_logits.size(-1)), shift_labels.view(-1)) + + lm_logits = lm_logits.to(hidden_states.dtype) + loss = loss.to(hidden_states.dtype) + + if not return_dict: + output = (lm_logits,) + transformer_outputs[1:] + return ((loss,) + output) if loss is not None else output + + return CausalLMOutputWithPast( + loss=loss, + logits=lm_logits, + past_key_values=transformer_outputs.past_key_values, + hidden_states=transformer_outputs.hidden_states, + attentions=transformer_outputs.attentions, + ) + + @staticmethod + def _reorder_cache( + past: Tuple[Tuple[torch.Tensor, torch.Tensor], ...], beam_idx: torch.LongTensor + ) -> Tuple[Tuple[torch.Tensor, torch.Tensor], ...]: + """ + This function is used to re-order the `past_key_values` cache if [`~PreTrainedModel.beam_search`] or + [`~PreTrainedModel.beam_sample`] is called. This is required to match `past_key_values` with the correct + beam_idx at every generation step. + + Output shares the same memory storage as `past`. + """ + return tuple( + ( + layer_past[0].index_select(1, beam_idx.to(layer_past[0].device)), + layer_past[1].index_select(1, beam_idx.to(layer_past[1].device)), + ) + for layer_past in past + ) + + def process_response(self, output, history): + content = "" + history = deepcopy(history) + for response in output.split("<|assistant|>"): + metadata, content = response.split("\n", maxsplit=1) + if not metadata.strip(): + content = content.strip() + history.append({"role": "assistant", "metadata": metadata, "content": content}) + content = content.replace("[[训练时间]]", "2023年") + else: + history.append({"role": "assistant", "metadata": metadata, "content": content}) + if history[0]["role"] == "system" and "tools" in history[0]: + content = "\n".join(content.split("\n")[1:-1]) + def tool_call(**kwargs): + return kwargs + parameters = eval(content) + content = {"name": metadata.strip(), "parameters": parameters} + else: + content = {"name": metadata.strip(), "content": content} + return content, history + + @torch.inference_mode() + def chat(self, tokenizer, query: str, history: List[Tuple[str, str]] = None, role: str = "user", + max_length: int = 8192, num_beams=1, do_sample=True, top_p=0.8, temperature=0.8, logits_processor=None, + **kwargs): + if history is None: + history = [] + if logits_processor is None: + logits_processor = LogitsProcessorList() + logits_processor.append(InvalidScoreLogitsProcessor()) + gen_kwargs = {"max_length": max_length, "num_beams": num_beams, "do_sample": do_sample, "top_p": top_p, + "temperature": temperature, "logits_processor": logits_processor, **kwargs} + inputs = tokenizer.build_chat_input(query, history=history, role=role) + inputs = inputs.to(self.device) + eos_token_id = [tokenizer.eos_token_id, tokenizer.get_command("<|user|>"), + tokenizer.get_command("<|observation|>")] + outputs = self.generate(**inputs, **gen_kwargs, eos_token_id=eos_token_id) + outputs = outputs.tolist()[0][len(inputs["input_ids"][0]):-1] + response = tokenizer.decode(outputs) + history.append({"role": role, "content": query}) + response, history = self.process_response(response, history) + return response, history + + @torch.inference_mode() + def stream_chat(self, tokenizer, query: str, history: List[Tuple[str, str]] = None, role: str = "user", + past_key_values=None,max_length: int = 8192, do_sample=True, top_p=0.8, temperature=0.8, + logits_processor=None, return_past_key_values=False, **kwargs): + if history is None: + history = [] + if logits_processor is None: + logits_processor = LogitsProcessorList() + logits_processor.append(InvalidScoreLogitsProcessor()) + eos_token_id = [tokenizer.eos_token_id, tokenizer.get_command("<|user|>"), + tokenizer.get_command("<|observation|>")] + gen_kwargs = {"max_length": max_length, "do_sample": do_sample, "top_p": top_p, + "temperature": temperature, "logits_processor": logits_processor, **kwargs} + if past_key_values is None: + inputs = tokenizer.build_chat_input(query, history=history, role=role) + else: + inputs = tokenizer.build_chat_input(query, role=role) + inputs = inputs.to(self.device) + if past_key_values is not None: + past_length = past_key_values[0][0].shape[0] + if self.transformer.pre_seq_len is not None: + past_length -= self.transformer.pre_seq_len + inputs.position_ids += past_length + attention_mask = inputs.attention_mask + attention_mask = torch.cat((attention_mask.new_ones(1, past_length), attention_mask), dim=1) + inputs['attention_mask'] = attention_mask + history.append({"role": role, "content": query}) + for outputs in self.stream_generate(**inputs, past_key_values=past_key_values, + eos_token_id=eos_token_id, return_past_key_values=return_past_key_values, + **gen_kwargs): + if return_past_key_values: + outputs, past_key_values = outputs + outputs = outputs.tolist()[0][len(inputs["input_ids"][0]):-1] + response = tokenizer.decode(outputs) + if response and response[-1] != "�": + response, new_history = self.process_response(response, history) + if return_past_key_values: + yield response, new_history, past_key_values + else: + yield response, new_history + + @torch.inference_mode() + def stream_generate( + self, + input_ids, + generation_config: Optional[GenerationConfig] = None, + logits_processor: Optional[LogitsProcessorList] = None, + stopping_criteria: Optional[StoppingCriteriaList] = None, + prefix_allowed_tokens_fn: Optional[Callable[[int, torch.Tensor], List[int]]] = None, + return_past_key_values=False, + **kwargs, + ): + batch_size, input_ids_seq_length = input_ids.shape[0], input_ids.shape[-1] + + if generation_config is None: + generation_config = self.generation_config + generation_config = copy.deepcopy(generation_config) + model_kwargs = generation_config.update(**kwargs) + model_kwargs["use_cache"] = generation_config.use_cache + bos_token_id, eos_token_id = generation_config.bos_token_id, generation_config.eos_token_id + + if isinstance(eos_token_id, int): + eos_token_id = [eos_token_id] + eos_token_id_tensor = torch.tensor(eos_token_id).to(input_ids.device) if eos_token_id is not None else None + + has_default_max_length = kwargs.get("max_length") is None and generation_config.max_length is not None + if has_default_max_length and generation_config.max_new_tokens is None: + warnings.warn( + f"Using `max_length`'s default ({generation_config.max_length}) to control the generation length. " + "This behaviour is deprecated and will be removed from the config in v5 of Transformers -- we" + " recommend using `max_new_tokens` to control the maximum length of the generation.", + UserWarning, + ) + elif generation_config.max_new_tokens is not None: + generation_config.max_length = generation_config.max_new_tokens + input_ids_seq_length + if not has_default_max_length: + logger.warn( + f"Both `max_new_tokens` (={generation_config.max_new_tokens}) and `max_length`(=" + f"{generation_config.max_length}) seem to have been set. `max_new_tokens` will take precedence. " + "Please refer to the documentation for more information. " + "(https://huggingface.co/docs/transformers/main/en/main_classes/text_generation)", + UserWarning, + ) + + if input_ids_seq_length >= generation_config.max_length: + input_ids_string = "decoder_input_ids" if self.config.is_encoder_decoder else "input_ids" + logger.warning( + f"Input length of {input_ids_string} is {input_ids_seq_length}, but `max_length` is set to" + f" {generation_config.max_length}. This can lead to unexpected behavior. You should consider" + " increasing `max_new_tokens`." + ) + + # 2. Set generation parameters if not already defined + logits_processor = logits_processor if logits_processor is not None else LogitsProcessorList() + stopping_criteria = stopping_criteria if stopping_criteria is not None else StoppingCriteriaList() + + logits_processor = self._get_logits_processor( + generation_config=generation_config, + input_ids_seq_length=input_ids_seq_length, + encoder_input_ids=input_ids, + prefix_allowed_tokens_fn=prefix_allowed_tokens_fn, + logits_processor=logits_processor, + ) + + stopping_criteria = self._get_stopping_criteria( + generation_config=generation_config, stopping_criteria=stopping_criteria + ) + logits_warper = self._get_logits_warper(generation_config) + + unfinished_sequences = input_ids.new(input_ids.shape[0]).fill_(1) + scores = None + while True: + model_inputs = self.prepare_inputs_for_generation(input_ids, **model_kwargs) + # forward pass to get next token + outputs = self( + **model_inputs, + return_dict=True, + output_attentions=False, + output_hidden_states=False, + ) + + next_token_logits = outputs.logits[:, -1, :] + + # pre-process distribution + next_token_scores = logits_processor(input_ids, next_token_logits) + next_token_scores = logits_warper(input_ids, next_token_scores) + + # sample + probs = nn.functional.softmax(next_token_scores, dim=-1) + if generation_config.do_sample: + next_tokens = torch.multinomial(probs, num_samples=1).squeeze(1) + else: + next_tokens = torch.argmax(probs, dim=-1) + # update generated ids, model inputs, and length for next step + input_ids = torch.cat([input_ids, next_tokens[:, None]], dim=-1) + model_kwargs = self._update_model_kwargs_for_generation( + outputs, model_kwargs, is_encoder_decoder=self.config.is_encoder_decoder + ) + unfinished_sequences = unfinished_sequences.mul( + next_tokens.tile(eos_token_id_tensor.shape[0], 1).ne(eos_token_id_tensor.unsqueeze(1)).prod(dim=0) + ) + if return_past_key_values: + yield input_ids, outputs.past_key_values + else: + yield input_ids + # stop when each sentence is finished, or if we exceed the maximum length + if unfinished_sequences.max() == 0 or stopping_criteria(input_ids, scores): + break + + def quantize(self, bits: int, empty_init=False, device=None, **kwargs): + if bits == 0: + return + + from .quantization import quantize + + if self.quantized: + logger.info("Already quantized.") + return self + + self.quantized = True + + self.config.quantization_bit = bits + + self.transformer.encoder = quantize(self.transformer.encoder, bits, empty_init=empty_init, device=device, + **kwargs) + return self + + +class ChatGLMForSequenceClassification(ChatGLMPreTrainedModel): + def __init__(self, config: ChatGLMConfig, empty_init=True, device=None): + super().__init__(config) + + self.num_labels = config.num_labels + self.transformer = ChatGLMModel(config, empty_init=empty_init, device=device) + + self.classifier_head = nn.Linear(config.hidden_size, config.num_labels, bias=True, dtype=torch.half) + if config.classifier_dropout is not None: + self.dropout = nn.Dropout(config.classifier_dropout) + else: + self.dropout = None + self.config = config + + if self.config.quantization_bit: + self.quantize(self.config.quantization_bit, empty_init=True) + + def forward( + self, + input_ids: Optional[torch.LongTensor] = None, + position_ids: Optional[torch.LongTensor] = None, + attention_mask: Optional[torch.Tensor] = None, + full_attention_mask: Optional[torch.Tensor] = None, + past_key_values: Optional[Tuple[Tuple[torch.Tensor, torch.Tensor], ...]] = None, + inputs_embeds: Optional[torch.LongTensor] = None, + labels: Optional[torch.LongTensor] = None, + use_cache: Optional[bool] = None, + output_hidden_states: Optional[bool] = None, + return_dict: Optional[bool] = None, + ) -> Union[Tuple[torch.Tensor, ...], SequenceClassifierOutputWithPast]: + return_dict = return_dict if return_dict is not None else self.config.use_return_dict + + transformer_outputs = self.transformer( + input_ids=input_ids, + position_ids=position_ids, + attention_mask=attention_mask, + full_attention_mask=full_attention_mask, + past_key_values=past_key_values, + inputs_embeds=inputs_embeds, + use_cache=use_cache, + output_hidden_states=output_hidden_states, + return_dict=return_dict, + ) + + hidden_states = transformer_outputs[0] + pooled_hidden_states = hidden_states[-1] + if self.dropout is not None: + pooled_hidden_states = self.dropout(pooled_hidden_states) + logits = self.classifier_head(pooled_hidden_states) + + loss = None + if labels is not None: + if self.config.problem_type is None: + if self.num_labels == 1: + self.config.problem_type = "regression" + elif self.num_labels > 1 and (labels.dtype == torch.long or labels.dtype == torch.int): + self.config.problem_type = "single_label_classification" + else: + self.config.problem_type = "multi_label_classification" + + if self.config.problem_type == "regression": + loss_fct = MSELoss() + if self.num_labels == 1: + loss = loss_fct(logits.squeeze().float(), labels.squeeze()) + else: + loss = loss_fct(logits.float(), labels) + elif self.config.problem_type == "single_label_classification": + loss_fct = CrossEntropyLoss() + loss = loss_fct(logits.view(-1, self.num_labels).float(), labels.view(-1)) + elif self.config.problem_type == "multi_label_classification": + loss_fct = BCEWithLogitsLoss() + loss = loss_fct(logits.float(), labels.view(-1, self.num_labels)) + + if not return_dict: + output = (logits,) + transformer_outputs[1:] + return ((loss,) + output) if loss is not None else output + + return SequenceClassifierOutputWithPast( + loss=loss, + logits=logits, + past_key_values=transformer_outputs.past_key_values, + hidden_states=transformer_outputs.hidden_states, + attentions=transformer_outputs.attentions, + ) diff --git a/ComfyUI-KwaiKolorsWrapper/kolors/models/quantization.py b/ComfyUI-KwaiKolorsWrapper/kolors/models/quantization.py new file mode 100644 index 0000000000000000000000000000000000000000..cb95bfe82b203ff6a2aa962326d2c7a438d6a52f --- /dev/null +++ b/ComfyUI-KwaiKolorsWrapper/kolors/models/quantization.py @@ -0,0 +1,188 @@ +from torch.nn import Linear +from torch.nn.parameter import Parameter + +import bz2 +import torch +import base64 +import ctypes +from transformers.utils import logging + +from typing import List +from functools import partial + +logger = logging.get_logger(__name__) + +try: + from cpm_kernels.kernels.base import LazyKernelCModule, KernelFunction, round_up + + class Kernel: + def __init__(self, code: bytes, function_names: List[str]): + self.code = code + self._function_names = function_names + self._cmodule = LazyKernelCModule(self.code) + + for name in self._function_names: + setattr(self, name, KernelFunction(self._cmodule, name)) + + quantization_code = "$QlpoOTFBWSZTWU9yuJUAQHN//////////f/n/8/n///n//bt4dTidcVx8X3V9FV/92/v4B7/AD5FBQFAAAChSgKpFCFAFVSigUAAAEKhSgUUqgFBKigqVREQAABQBQIANDTTIGI00BkZBkNGE0A0BkBkGQGRkaNAaAGQNBoGgDIAAYIGTI0DQAQAaGmmQMRpoDIyDIaMJoBoDIDIMgMjI0aA0AMgaDQNAGQAAwQMmRoGgAgA0NNMgYjTQGRkGQ0YTQDQGQGQZAZGRo0BoAZA0GgaAMgABggZMjQNABABoaaZAxGmgMjIMhowmgGgMgMgyAyMjRoDQAyBoNA0AZAADBAyZGgaAAmqU1NEgJqnptU/Sn4jRR6J6epk2pqb1Q/SgAPUGgyNNGjQ2SBpoAZAAGg0NB6mgDIAAAAA2oaApSREBNAARhGiYEaEwU8pvImlP0k2aam1GaGqbFNM1MHpTwmkepmyU9R6nqPKekHqNNPUxNGhp6n6p6QaZ6o9TG1GMqcoV9ly6nRanHlq6zPNbnGZNi6HSug+2nPiZ13XcnFYZW+45W11CumhzYhchOJ2GLLV1OBjBjGf4TptOddTSOcVxhqYZMYwZXZZY00zI1paX5X9J+b+f4e+x43RXSxXPOdquiGpduatGyXneN696M9t4HU2eR5XX/kPhP261NTx3JO1Ow7LyuDmeo9a7d351T1ZxnvnrvYnrXv/hXxPCeuYx2XsNmO003eg9J3Z6U7b23meJ4ri01OdzTk9BNO96brz+qT5nuvvH3ds/G+m/JcG/F2XYuhXlvO+jP7U3XgrzPN/lr8Sf1n6j4j7jZs+s/T0tNaNNYzTs12rxjwztHlnire3Nzc3N1wuBwOBwXBvZfoHpD7rFmR99V5vj3aXza3xdBbXMalubTg/jIv5dfAi54Pdc75j4z412n3Npj3Ld/ENm7a3b/Cod6h/ret1/5vn/C+l+gdslMvgPSLJ8d8q+U66fevYn/tW1chleEtNTGlcHCbLRlq0tHzF5tsbbZZfHjjLgZu42XCuC3NrdjTasZGNzgxPIrGqp7r3p7L2p5XjnpPSmTd5XtzqnB6U87zzg1Ol0zd0zsLszxR6lkxp35u6/teL0L0W922cR7Lu1lpL9CsHirzuM2T+BgsyViT6LHcm0/Vr6U/7LGGyJeqTEjt0PHWhF5mCT7R9mtlDwriYv0Tyr/OxYt6qp5r0mPVT0608TqnqMZaarU2nFwrTzzlrs1ed7z1ux60wyr4ydCaTi3enW8x68x0zU7tXSlcmPSW1mGpWJMg4zmPC2lK96tp0OE80y4MfEvnZj8zGluR6b22ki1Ou9V2nCd9xovcPvcYMZYy0lvN60ScZ45vN6yeCeeXFb1lVjnnCar5fwXwE2bzJ4HI1XVPXfXZMm44GUsMpYsmLB65TuVdm0cl0b+i/wGNN66XjeV7zuPpHcnK/juhhjdfId5jMdE5nN0dGmmm2zZs2cexD5n9p/dY352XsvXHaZNWWsmmS1atjR452nYudzvqv2HMRyvNNnlMcDl3R2+yx2uVrBubTW9icHDVtbNXlZm7jma1rM4VurZZd2y6nUau7ZXZ7bVU+mnoOVxZGMrVmvX60605JwmzGZhhhjTWtaaaMaaGTGmNMZasY0iX8VMUl8eepaIrzGSpemWOQyZORk2bNpjUybMmxqYmknCGCFynutfksaZpjTNMaaatM0xsxcGR0sociNqxNSmhhR1ZJPbsn8qyF0t2qH6iYBclclalbtTTcHTDsPaX6rlnElph2Jyumumtynv2Kk8GI7rsvXbIcJgHJOSaSXnnGaI3m87RtVXJOZ/YtgdTE6Wpha6ZlE8ayXkef1fh602r2WwvfMXtMdLlkfnLFdYYwYso+bWqm7yJqHXZGw2nrS5ZanSYnWlxBxMF1V940K2wdrI7R6OYf7DGGamMmTSbRhlS45xmVOumF1EyPCmHrrN8wwZOOrdNtLeMtzFzDlWnfTBxMk2NaXIZHBYxYLD4w8yju0ao65Vz1OIXoS9dLanwCe1PWrYuWMqf1if1z2k2yYfKJ741PDgno1ZQ8DRqvUny3mNoWTzGO6m1DkrJI8JiR5cSd+vZdGOO8nrMoc5+NDUFsMSXaZJeNlMmGLtJsovOsUp7I9S5VojKxF6bTVEelXqlfJobQr3LozSh2Jk7VcrVMfhXqszGWMzNqGhqZY0OadxkyyMssKugZR0KNFXBHlqwmJgTE/BNVMk6ItJXZMR0H47GpXv/DMOvNkmVuaV1PRfEdxuqc7Hcd+ZV/zTLaRxWk0nl9CdCeM6mn5rstHIBcpiuwmUZXeq81DacHI2rmrZ5SuE5mOZd6LQrZg9mx32TprA8BMo5jKN6yLTCi3WzQaZSuhzTtM1fUTGVpG8Tw+KXI0tjEpiWxtLYynOlktSbVlaI5kxP8TDH8kx50xoxi5KcA4pcja8KWLRlO/Ks6q06ergnvm1ca3Tq8Uw7LTUsmWyctXPWmpitl/uvGcWTGXGuAXDfhqazGmjkxcJW5hMMMMpYsXl2TZYtVOddG3XCarUt6Ptq9CZXSNzyuRzqRZOjsxdBbFVz6OA5HI43r1jityVlVpVkxmOsyaYWE1NTGq1sOVh36mHMcxtSvcy70edG0ZGR3I1Go1GRlV7mWWo1G0ZGRqlvH40l7o4m5xMWLLLYyNjnqc8556mdPqLJ31n/1nWOncxzG1tizrHs/Z+d2vP/B/l8wdJ6rHUn2nbbDq4p6htFtYzMMMTaZis1K5GKzGNmxhmUx2DDlZ/qNnIx41xnaMfCZWYaZWtNLTNW8ND4Fw1MyZOCdM428suKG1ehW8TesOydg7J+YYcD4cYR+8dFK6M4E3HM9ZfRNNL+Sn6rsl4DsrDl2HpPCnfxjGXtbZtYys1ttlyJ4T+BvexjGWRjMszK4Jpc77D3GyuVD7q0+G8m9G+2+rGm7cOR2y7FdtY2XUYx/oNlfRYxhMYyYZkyyg55enna9Kt/FFi6GMMwYwdwxWgxGMLKYmUyGExTKMZkMFhkymKuh0NOBNnBu+23LdwDoZYYzGGMxtORaTU1pjTGWTTGGtMrNWUsyyTTLLG1qy2ZjbK2DBllWqxMtBMaYZQmcE7zvvRcTkclUwdkxTaSdyySt/7fpL+T1v516Ji97fwr5JbLu305zMn5+GMTTZ9F+y7ExwmGVfG44yxn3dLv6l5i+Wth1jCrDq21nW9LqvvDzz3Vf3LLH/O/32TJ/erx3bXftO4eF+G956D952K/An4NfvOpjFjExjevP/UmE0fIoZXx6/w6lX/no3D0bLt+ixjieBM6ksRd0yB4Lt2SwYNE+gd1detlZWUnpiZfGfFaK+4PyCa/v18V8X75pe9fLXzp7l3VjF76vWZmHwGz1IZNWT7b8yddJ4q5kyrVdfru6atWc7bVYztL9Jf4GXvT+Y8m9/YsXP6H018a8D4XVOqvfzqeR+6yZOD8dPv0+U7/q5Pl+2dNb0MjzGVH5p6MNQ7cOWvw62U9aHE8DprDek+McLyvDz+te+9Zhq5+YTruufMcWMabqysTmZVWjKPfnK0wyVcrsuhjZRdLkHNvD72b9abriOSGIxiLixMOoalNPXzy+wT/tf+U6HHONfsz+xe8ufHBdQWWGWLA9if0rsnmrxK5LvRZQeWsTCsrmOYy8VteVfuRfcVTtDLItLIsMYxZLdU/DbtSemxF6Z6Zo5WBXE4tFdCyVMMXMTEMZXVlS6Xec2T4e0tHsRcEuWshcJ2YsNF5rUx1E8ifCq6Z+ZP7qdCeu/aTwFd53l16/o0NOw6O3dLavP4Hbi4RdmuDk6DoYaninC0+o4uZjbJ7Rxeu0/FbuFg+q7DVS6fQe0rZ6NDGUNNU6DEqOaLTicKnYZMnBWruljQxoaS3dZhocDge0bSTyOvdAbG5hxe2xji7E/L55xX13wWNDi6HCekcFxfCPGxY0MXC+s7afWaMdDyjyr+o8Rudm/NabOZvdl274zH4f5XK9z6On1Pe/K5TdPAslg77BjuO6Y3eO7GqvOPG/stknp1leyvLL0Z7bl9I4noMvLkzytLhWYzrOZzLXCORe028rORzOg4N/L0HlMOQ3Pgmnbb6KczlabORpu980q37TBqRu0/p3PO6234Bl03Ynuz+9W7gnsEcmvYaYY3aMYY0wx3pYd+ujsXauWdaY5Xkbtl23fPzFHiDB/QMo0yFjBllYxTQYYyxkrwn7JufwJ/PfgJ+C83X69ni6zvXcnyXabv0ncbLwsceS+RNlyN2mnneJtX0ngYO0+e+0+UnA+Wch3ji8hj5an4h+i6XBySU4n+R0roVcbw5yvHrmr4Yw8Y7x6c+9POPYHI5HI5HI5HI5HGXGww4nE4nrVyOR8XeqPEO7PLOiukYa3Novk5hV4cdtYZLI93e+uxff2jRo0aNGjRo0aNG1bVtW1dy3m83m8+tQ5ZzHw3nObwOu8La9Rc1dtkdS8A3eTk823tnktXWlxN6Oixe06zrN70Isd9jiOgZFq9yfkPqP/SLhN2Myl8jDM43bl1nbcb4cO57jlh8Jow6pzXZdL4dyODTuuhu77FyO27DdwdRxmvO+O+3N2+BdqyTwLHVczDVY4UPE4O66/ZO2cx1LFzVdSXtF7G4HMbrauOHRw6c8FdZ5m9fHZHYZXfTlZquyynSyTTKke6vcffSD9pzPA/G7n7jxPmuhc1DHMynPMrGL6AdewYmwu5ko+UUyTwrMv27rPH1v1nGqd87+p6N6LU8k3NEng53xXyHS97+44OSg/sy/hn+Se6yfYNjW0/uTgP+PvWYzLMmjhcLB/gGpri6H83/84eUXWT6T9Hsv7785z/7z4icpW+zfXypuR7rx/gMdZb1/wC678pcs8/2a3mDitGHxl9mfPlll5MafWWqxk/eYuTDgcNMzDGWLWvsuglNxs53GtN6uWpktlW1tZZYcuinMMWmnNnJydze3b2Y1McBxrBkXw799izLMZZYyy0TkbsGM4p03S2uVu5s/XXUdSdec6smVxZYYGpVmT8A+8ajuEyV5FatkvVru2x6uxGXXbH4A+jvgP4GMYy3iPLXzq/6z65+E005ey+cwMZD3fZcqc6xpjTFjQ0P3U+e++cPYmTIwj0nrK5NPTfl3WvpfLtXDcb2HQMudYOxFXQBor4L4T6vrOauFctYXJQ++NUWmJe5bmx1jDiZS1dTqWxo4GR8jm3fttpmPHppk9PEyv4/y8/sO07XacOmcqc0x2Vi9BvNJvN5oW8x4mOsydpidRxMYJPx06m1bqPzq9KtK8sxXNXFodD/+MYYaJTLwOhc9brCsV18oOR1i4tXChyTkq4lf4y1Ke+9axjDHqs1mfBbMXuP4Hzi+X7t8vzv7bHerrUPgPCxhjre4fXdfLNtNM+Jd+Zdh8xd8wP87uNPoPgv4W7/5P2BuxfsMabNnMnza+54Pdi5U671GPZY8CehX8Voeoo7FHpkeEc6715FwHZrIrUrHaviPUbPZHND+IhczrP6FcYvhOZ0Di/ETt0OI+YwNWR9r7tpf6WDeZKZDB1+z2IthOl1mPyb5FluvEx9h9d0NnM0Y1XPFkWIsk1WotJ0PBMmkvjvQTd0e71tfeV+8r8lQ/tpzpsmxJ+InrI/dj2UajUajVTUajatRqNRtGo1Go1Go4wjeMpZFMVV9CHbofPraLsJ3JpWV2XOoanCuFky4y3PPNxucK2uKC1Lbdb1eo+m5XomN6HfeZsabHLHRX/K+offtNGGmHWctcVcG44MdSqsOLY9VzX+Zxfxn2HPdWTpzWvkrtJ8M5zorrKcquRytJ5N5DZmcaW02l76nWO+BqPXm1A2Ry/0q71dH/mqrqeFjkYxjEXtsX8qubTk67rGycyqsdm4tZx5D6D5hhi0waaWmiaMP81Yjii5qxPlPuU/GfTL1Y5E6Jyfiq63qTa39A4J0sOGDgO9WF9bOXl0XfPRbsY2bPNKPy1YrFYrFYmRhhlTIyMjJWJYZHXuCXI8OoXsvfljGLFicNifpp2XunoPiG1wtx3p1Tah+/DD66OnVtVXP9rKbVxOnL0tR/rHtqB5UDErUVcl11D4qqvjpOcxX7armUNJB3LpW6bxVvD08e8h3odKKvyCFZBdSh2FVcST9xV3n3T8t1j7Kr9qgrqXg+13Pt5U7JCvFXVIV1YG5lRhkVYZJYYDDD4KOIMoHCp26WS8GB7uBh2zIdgq/PKyInjV2STShuoapUdCpX1yTwqq/z1VvET7Kh5nVPkO8YyxjLt2MaaMmWTLQvx3qnzltnXW0p2jxgbEtSny/Osv8Y9pLMXYoHVPAhkVdWVeODhR6q9/Sxe2liwwZWMVvFXfRkeIDxAePUPIrdJ4ey6yquzH+PD/bUOWAu05qVHtFd8rrKHSoeNIOUqrYr3FXyToqfYJgwmJdKpXXOwYYegNNGMzfZPp/t3t/DVs4zjNTN61rRqaWaa4NYbRjTa0tWwy2Y2tGN8ZO8ofNKq4j9SL7I+cSm4/6ovLV5HNXLI0jJidwrtk6ynCaP6Z++GjRlWS3tLeW129Mi9evxU9mtz6s5J3Z7M2ngTgnKvmpomxpaLCzPfmx0JWE+m3NLDDGOX47RctdYYNK5jakdqLkRlI39n590T5zctGSwwZZDJj6kW8XSi6ot2MmWWJ0DUT3nuvebBudScjZ79g8cWJ8av0k+/bE5WKd5MdbFpbDVMxu1DVMmtNZGJvq1mtRbn6M+g/kP0FwDwr7quZs7xosNGpbscyxhhd9TyJyFwbLcxlTasg75vW7TsV5K7ji44XPMMrdoj+Y3rT0Hie62nlYV/pwczzOmdLqLhYkzGMzCZWGMQzGMSsZYY6Di1t4nlJ+Em63mJxrVLxPbYxNEdgc1dU2iOKyoYYWjNrEeHTYybVk0atSa7ehuwsWMWTqn1TrnS6hYsi71d1+s+k+ic70e20fzE/VaTdxT9ZtU4GIXdeNx3X77guYYfpHeTQjaMX6brOu4OY4K7Y2d9mbHarI5ox3p4GpJ2Vd/Tst60f7j999pppjR+Q/Qf8J/VaORs3cji7FfFuN61+ui9s8hix1OCh5KGVV23BPXvZfz3CLyHpix+exi8z/KnCnosY2eunor+cxyPO/xJ0vKey9OvE9VjqaYu0x3Z3jd6o2b1T12D+F8l232lwaaacD5LE8LBxu7WTlbWraWpew8Xexjel3E+wWD4APITdNqR8F3R3T0lunCQ4GaE9R37DxeCYfcHi4xci5ovKfxVs55y2hf+65E/Xdp6jR5nrebTmi5incpkyOjs50JvrZwstbbW6kfuuQw+2mykf/EXNFzxfKTrxew929TR6bWnGL//F3JFOFCQT3K4lQ" + + kernels = Kernel( + bz2.decompress(base64.b64decode(quantization_code)), + [ + "int4WeightCompression", + "int4WeightExtractionFloat", + "int4WeightExtractionHalf", + "int8WeightExtractionFloat", + "int8WeightExtractionHalf", + ], + ) +except Exception as exception: + kernels = None + logger.warning("Failed to load cpm_kernels:" + str(exception)) + + +class W8A16Linear(torch.autograd.Function): + @staticmethod + def forward(ctx, inp: torch.Tensor, quant_w: torch.Tensor, scale_w: torch.Tensor, weight_bit_width): + ctx.inp_shape = inp.size() + ctx.weight_bit_width = weight_bit_width + out_features = quant_w.size(0) + inp = inp.contiguous().view(-1, inp.size(-1)) + weight = extract_weight_to_half(quant_w, scale_w, weight_bit_width) + ctx.weight_shape = weight.size() + output = inp.mm(weight.t()) + ctx.save_for_backward(inp, quant_w, scale_w) + return output.view(*(ctx.inp_shape[:-1] + (out_features,))) + + @staticmethod + def backward(ctx, grad_output: torch.Tensor): + inp, quant_w, scale_w = ctx.saved_tensors + weight = extract_weight_to_half(quant_w, scale_w, ctx.weight_bit_width) + grad_output = grad_output.contiguous().view(-1, weight.size(0)) + grad_input = grad_output.mm(weight) + grad_weight = grad_output.t().mm(inp) + return grad_input.view(ctx.inp_shape), grad_weight.view(ctx.weight_shape), None, None + + +def compress_int4_weight(weight: torch.Tensor): # (n, m) + with torch.cuda.device(weight.device): + n, m = weight.size(0), weight.size(1) + assert m % 2 == 0 + m = m // 2 + out = torch.empty(n, m, dtype=torch.int8, device="cuda") + stream = torch.cuda.current_stream() + + gridDim = (n, 1, 1) + blockDim = (min(round_up(m, 32), 1024), 1, 1) + + kernels.int4WeightCompression( + gridDim, + blockDim, + 0, + stream, + [ctypes.c_void_p(weight.data_ptr()), ctypes.c_void_p(out.data_ptr()), ctypes.c_int32(n), ctypes.c_int32(m)], + ) + return out + + +def extract_weight_to_half(weight: torch.Tensor, scale_list: torch.Tensor, source_bit_width: int): + assert scale_list.dtype in [torch.half, torch.bfloat16] + assert weight.dtype in [torch.int8] + if source_bit_width == 8: + return weight.to(scale_list.dtype) * scale_list[:, None] + elif source_bit_width == 4: + func = ( + kernels.int4WeightExtractionHalf if scale_list.dtype == torch.half else kernels.int4WeightExtractionBFloat16 + ) + else: + assert False, "Unsupported bit-width" + + with torch.cuda.device(weight.device): + n, m = weight.size(0), weight.size(1) + out = torch.empty(n, m * (8 // source_bit_width), dtype=scale_list.dtype, device="cuda") + stream = torch.cuda.current_stream() + + gridDim = (n, 1, 1) + blockDim = (min(round_up(m, 32), 1024), 1, 1) + + func( + gridDim, + blockDim, + 0, + stream, + [ + ctypes.c_void_p(weight.data_ptr()), + ctypes.c_void_p(scale_list.data_ptr()), + ctypes.c_void_p(out.data_ptr()), + ctypes.c_int32(n), + ctypes.c_int32(m), + ], + ) + return out + + +class QuantizedLinear(torch.nn.Module): + def __init__(self, weight_bit_width: int, weight, bias=None, device="cpu", dtype=None, empty_init=False, *args, + **kwargs): + super().__init__() + self.weight_bit_width = weight_bit_width + + shape = weight.shape + + if weight is None or empty_init: + self.weight = torch.empty(shape[0], shape[1] * weight_bit_width // 8, dtype=torch.int8, device=device) + self.weight_scale = torch.empty(shape[0], dtype=dtype, device=device) + else: + self.weight_scale = weight.abs().max(dim=-1).values / ((2 ** (weight_bit_width - 1)) - 1) + self.weight = torch.round(weight / self.weight_scale[:, None]).to(torch.int8) + if weight_bit_width == 4: + self.weight = compress_int4_weight(self.weight) + + self.weight = Parameter(self.weight.to(device), requires_grad=False) + self.weight_scale = Parameter(self.weight_scale.to(device), requires_grad=False) + self.bias = Parameter(bias.to(device), requires_grad=False) if bias is not None else None + + def forward(self, input): + output = W8A16Linear.apply(input, self.weight, self.weight_scale, self.weight_bit_width) + if self.bias is not None: + output = output + self.bias + return output + + +def quantize(model, weight_bit_width, empty_init=False, device=None): + """Replace fp16 linear with quantized linear""" + for layer in model.layers: + layer.self_attention.query_key_value = QuantizedLinear( + weight_bit_width=weight_bit_width, + weight=layer.self_attention.query_key_value.weight.to(torch.cuda.current_device()), + bias=layer.self_attention.query_key_value.bias, + dtype=layer.self_attention.query_key_value.weight.dtype, + device=layer.self_attention.query_key_value.weight.device if device is None else device, + empty_init=empty_init + ) + layer.self_attention.dense = QuantizedLinear( + weight_bit_width=weight_bit_width, + weight=layer.self_attention.dense.weight.to(torch.cuda.current_device()), + bias=layer.self_attention.dense.bias, + dtype=layer.self_attention.dense.weight.dtype, + device=layer.self_attention.dense.weight.device if device is None else device, + empty_init=empty_init + ) + layer.mlp.dense_h_to_4h = QuantizedLinear( + weight_bit_width=weight_bit_width, + weight=layer.mlp.dense_h_to_4h.weight.to(torch.cuda.current_device()), + bias=layer.mlp.dense_h_to_4h.bias, + dtype=layer.mlp.dense_h_to_4h.weight.dtype, + device=layer.mlp.dense_h_to_4h.weight.device if device is None else device, + empty_init=empty_init + ) + layer.mlp.dense_4h_to_h = QuantizedLinear( + weight_bit_width=weight_bit_width, + weight=layer.mlp.dense_4h_to_h.weight.to(torch.cuda.current_device()), + bias=layer.mlp.dense_4h_to_h.bias, + dtype=layer.mlp.dense_4h_to_h.weight.dtype, + device=layer.mlp.dense_4h_to_h.weight.device if device is None else device, + empty_init=empty_init + ) + + return model diff --git a/ComfyUI-KwaiKolorsWrapper/kolors/models/tokenization_chatglm.py b/ComfyUI-KwaiKolorsWrapper/kolors/models/tokenization_chatglm.py new file mode 100644 index 0000000000000000000000000000000000000000..50e44b05e4b3e54d2f1c3f0cab8247ea53a7d4e5 --- /dev/null +++ b/ComfyUI-KwaiKolorsWrapper/kolors/models/tokenization_chatglm.py @@ -0,0 +1,300 @@ +import json +import os +import re +from typing import List, Optional, Union, Dict +from sentencepiece import SentencePieceProcessor +from transformers import PreTrainedTokenizer +from transformers.utils import logging, PaddingStrategy +from transformers.tokenization_utils_base import EncodedInput, BatchEncoding + + +class SPTokenizer: + def __init__(self, model_path: str): + # reload tokenizer + assert os.path.isfile(model_path), model_path + self.sp_model = SentencePieceProcessor(model_file=model_path) + + # BOS / EOS token IDs + self.n_words: int = self.sp_model.vocab_size() + self.bos_id: int = self.sp_model.bos_id() + self.eos_id: int = self.sp_model.eos_id() + self.pad_id: int = self.sp_model.unk_id() + assert self.sp_model.vocab_size() == self.sp_model.get_piece_size() + + role_special_tokens = ["<|system|>", "<|user|>", "<|assistant|>", "<|observation|>"] + special_tokens = ["[MASK]", "[gMASK]", "[sMASK]", "sop", "eop"] + role_special_tokens + self.special_tokens = {} + self.index_special_tokens = {} + for token in special_tokens: + self.special_tokens[token] = self.n_words + self.index_special_tokens[self.n_words] = token + self.n_words += 1 + self.role_special_token_expression = "|".join([re.escape(token) for token in role_special_tokens]) + + def tokenize(self, s: str, encode_special_tokens=False): + if encode_special_tokens: + last_index = 0 + t = [] + for match in re.finditer(self.role_special_token_expression, s): + if last_index < match.start(): + t.extend(self.sp_model.EncodeAsPieces(s[last_index:match.start()])) + t.append(s[match.start():match.end()]) + last_index = match.end() + if last_index < len(s): + t.extend(self.sp_model.EncodeAsPieces(s[last_index:])) + return t + else: + return self.sp_model.EncodeAsPieces(s) + + def encode(self, s: str, bos: bool = False, eos: bool = False) -> List[int]: + assert type(s) is str + t = self.sp_model.encode(s) + if bos: + t = [self.bos_id] + t + if eos: + t = t + [self.eos_id] + return t + + def decode(self, t: List[int]) -> str: + text, buffer = "", [] + for token in t: + if token in self.index_special_tokens: + if buffer: + text += self.sp_model.decode(buffer) + buffer = [] + text += self.index_special_tokens[token] + else: + buffer.append(token) + if buffer: + text += self.sp_model.decode(buffer) + return text + + def decode_tokens(self, tokens: List[str]) -> str: + text = self.sp_model.DecodePieces(tokens) + return text + + def convert_token_to_id(self, token): + """ Converts a token (str) in an id using the vocab. """ + if token in self.special_tokens: + return self.special_tokens[token] + return self.sp_model.PieceToId(token) + + def convert_id_to_token(self, index): + """Converts an index (integer) in a token (str) using the vocab.""" + if index in self.index_special_tokens: + return self.index_special_tokens[index] + if index in [self.eos_id, self.bos_id, self.pad_id] or index < 0: + return "" + return self.sp_model.IdToPiece(index) + + +class ChatGLMTokenizer(PreTrainedTokenizer): + vocab_files_names = {"vocab_file": "tokenizer.model"} + + model_input_names = ["input_ids", "attention_mask", "position_ids"] + + def __init__(self, vocab_file, padding_side="left", clean_up_tokenization_spaces=False, encode_special_tokens=False, + **kwargs): + self.name = "GLMTokenizer" + + self.vocab_file = vocab_file + self.tokenizer = SPTokenizer(vocab_file) + self.special_tokens = { + "": self.tokenizer.bos_id, + "": self.tokenizer.eos_id, + "": self.tokenizer.pad_id + } + self.encode_special_tokens = encode_special_tokens + super().__init__(padding_side=padding_side, clean_up_tokenization_spaces=clean_up_tokenization_spaces, + encode_special_tokens=encode_special_tokens, + **kwargs) + + def get_command(self, token): + if token in self.special_tokens: + return self.special_tokens[token] + assert token in self.tokenizer.special_tokens, f"{token} is not a special token for {self.name}" + return self.tokenizer.special_tokens[token] + + @property + def unk_token(self) -> str: + return "" + + @property + def pad_token(self) -> str: + return "" + + @property + def pad_token_id(self): + return self.get_command("") + + @property + def eos_token(self) -> str: + return "" + + @property + def eos_token_id(self): + return self.get_command("") + + @property + def vocab_size(self): + return self.tokenizer.n_words + + def get_vocab(self): + """ Returns vocab as a dict """ + vocab = {self._convert_id_to_token(i): i for i in range(self.vocab_size)} + vocab.update(self.added_tokens_encoder) + return vocab + + def _tokenize(self, text, **kwargs): + return self.tokenizer.tokenize(text, encode_special_tokens=self.encode_special_tokens) + + def _convert_token_to_id(self, token): + """ Converts a token (str) in an id using the vocab. """ + return self.tokenizer.convert_token_to_id(token) + + def _convert_id_to_token(self, index): + """Converts an index (integer) in a token (str) using the vocab.""" + return self.tokenizer.convert_id_to_token(index) + + def convert_tokens_to_string(self, tokens: List[str]) -> str: + return self.tokenizer.decode_tokens(tokens) + + def save_vocabulary(self, save_directory, filename_prefix=None): + """ + Save the vocabulary and special tokens file to a directory. + + Args: + save_directory (`str`): + The directory in which to save the vocabulary. + filename_prefix (`str`, *optional*): + An optional prefix to add to the named of the saved files. + + Returns: + `Tuple(str)`: Paths to the files saved. + """ + if os.path.isdir(save_directory): + vocab_file = os.path.join( + save_directory, self.vocab_files_names["vocab_file"] + ) + else: + vocab_file = save_directory + + with open(self.vocab_file, 'rb') as fin: + proto_str = fin.read() + + with open(vocab_file, "wb") as writer: + writer.write(proto_str) + + return (vocab_file,) + + def get_prefix_tokens(self): + prefix_tokens = [self.get_command("[gMASK]"), self.get_command("sop")] + return prefix_tokens + + def build_single_message(self, role, metadata, message): + assert role in ["system", "user", "assistant", "observation"], role + role_tokens = [self.get_command(f"<|{role}|>")] + self.tokenizer.encode(f"{metadata}\n") + message_tokens = self.tokenizer.encode(message) + tokens = role_tokens + message_tokens + return tokens + + def build_chat_input(self, query, history=None, role="user"): + if history is None: + history = [] + input_ids = [] + for item in history: + content = item["content"] + if item["role"] == "system" and "tools" in item: + content = content + "\n" + json.dumps(item["tools"], indent=4, ensure_ascii=False) + input_ids.extend(self.build_single_message(item["role"], item.get("metadata", ""), content)) + input_ids.extend(self.build_single_message(role, "", query)) + input_ids.extend([self.get_command("<|assistant|>")]) + return self.batch_encode_plus([input_ids], return_tensors="pt", is_split_into_words=True) + + def build_inputs_with_special_tokens( + self, token_ids_0: List[int], token_ids_1: Optional[List[int]] = None + ) -> List[int]: + """ + Build model inputs from a sequence or a pair of sequence for sequence classification tasks by concatenating and + adding special tokens. A BERT sequence has the following format: + + - single sequence: `[CLS] X [SEP]` + - pair of sequences: `[CLS] A [SEP] B [SEP]` + + Args: + token_ids_0 (`List[int]`): + List of IDs to which the special tokens will be added. + token_ids_1 (`List[int]`, *optional*): + Optional second list of IDs for sequence pairs. + + Returns: + `List[int]`: List of [input IDs](../glossary#input-ids) with the appropriate special tokens. + """ + prefix_tokens = self.get_prefix_tokens() + token_ids_0 = prefix_tokens + token_ids_0 + if token_ids_1 is not None: + token_ids_0 = token_ids_0 + token_ids_1 + [self.get_command("")] + return token_ids_0 + + def _pad( + self, + encoded_inputs: Union[Dict[str, EncodedInput], BatchEncoding], + max_length: Optional[int] = None, + padding_strategy: PaddingStrategy = PaddingStrategy.DO_NOT_PAD, + pad_to_multiple_of: Optional[int] = None, + return_attention_mask: Optional[bool] = None, + ) -> dict: + """ + Pad encoded inputs (on left/right and up to predefined length or max length in the batch) + + Args: + encoded_inputs: + Dictionary of tokenized inputs (`List[int]`) or batch of tokenized inputs (`List[List[int]]`). + max_length: maximum length of the returned list and optionally padding length (see below). + Will truncate by taking into account the special tokens. + padding_strategy: PaddingStrategy to use for padding. + + - PaddingStrategy.LONGEST Pad to the longest sequence in the batch + - PaddingStrategy.MAX_LENGTH: Pad to the max length (default) + - PaddingStrategy.DO_NOT_PAD: Do not pad + The tokenizer padding sides are defined in self.padding_side: + + - 'left': pads on the left of the sequences + - 'right': pads on the right of the sequences + pad_to_multiple_of: (optional) Integer if set will pad the sequence to a multiple of the provided value. + This is especially useful to enable the use of Tensor Core on NVIDIA hardware with compute capability + `>= 7.5` (Volta). + return_attention_mask: + (optional) Set to False to avoid returning attention mask (default: set to model specifics) + """ + # Load from model defaults + assert self.padding_side == "left" + + required_input = encoded_inputs[self.model_input_names[0]] + seq_length = len(required_input) + + if padding_strategy == PaddingStrategy.LONGEST: + max_length = len(required_input) + + if max_length is not None and pad_to_multiple_of is not None and (max_length % pad_to_multiple_of != 0): + max_length = ((max_length // pad_to_multiple_of) + 1) * pad_to_multiple_of + + needs_to_be_padded = padding_strategy != PaddingStrategy.DO_NOT_PAD and len(required_input) != max_length + + # Initialize attention mask if not present. + if "attention_mask" not in encoded_inputs: + encoded_inputs["attention_mask"] = [1] * seq_length + + if "position_ids" not in encoded_inputs: + encoded_inputs["position_ids"] = list(range(seq_length)) + + if needs_to_be_padded: + difference = max_length - len(required_input) + + if "attention_mask" in encoded_inputs: + encoded_inputs["attention_mask"] = [0] * difference + encoded_inputs["attention_mask"] + if "position_ids" in encoded_inputs: + encoded_inputs["position_ids"] = [0] * difference + encoded_inputs["position_ids"] + encoded_inputs[self.model_input_names[0]] = [self.pad_token_id] * difference + required_input + + return encoded_inputs diff --git a/ComfyUI-KwaiKolorsWrapper/kolors/pipelines/__init__.py b/ComfyUI-KwaiKolorsWrapper/kolors/pipelines/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/ComfyUI-KwaiKolorsWrapper/kolors/pipelines/pipeline_stable_diffusion_xl_chatglm_256.py b/ComfyUI-KwaiKolorsWrapper/kolors/pipelines/pipeline_stable_diffusion_xl_chatglm_256.py new file mode 100644 index 0000000000000000000000000000000000000000..fee50de5413515ae833d2e14244f1a5cc4d71e21 --- /dev/null +++ b/ComfyUI-KwaiKolorsWrapper/kolors/pipelines/pipeline_stable_diffusion_xl_chatglm_256.py @@ -0,0 +1,590 @@ +# Copyright 2023 The HuggingFace Team. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from ...kolors.models.modeling_chatglm import ChatGLMModel +from ...kolors.models.tokenization_chatglm import ChatGLMTokenizer +import inspect +from typing import Any, Callable, Dict, List, Optional, Tuple, Union +import torch + +from diffusers.loaders import FromSingleFileMixin, LoraLoaderMixin +from diffusers.models import UNet2DConditionModel + +from diffusers.schedulers import KarrasDiffusionSchedulers +from diffusers.utils import ( + is_accelerate_available, + is_accelerate_version, + logging, + replace_example_docstring, +) +try: + from diffusers.utils import randn_tensor +except: + from diffusers.utils.torch_utils import randn_tensor +from diffusers.pipelines.pipeline_utils import DiffusionPipeline +from diffusers.pipelines.stable_diffusion_xl import StableDiffusionXLPipelineOutput + +from comfy.utils import ProgressBar + + +logger = logging.get_logger(__name__) # pylint: disable=invalid-name + +EXAMPLE_DOC_STRING = """ + Examples: + ```py + >>> import torch + >>> from diffusers import StableDiffusionXLPipeline + + >>> pipe = StableDiffusionXLPipeline.from_pretrained( + ... "stabilityai/stable-diffusion-xl-base-0.9", torch_dtype=torch.float16 + ... ) + >>> pipe = pipe.to("cuda") + + >>> prompt = "a photo of an astronaut riding a horse on mars" + >>> image = pipe(prompt).images[0] + ``` +""" + + +# Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.rescale_noise_cfg +def rescale_noise_cfg(noise_cfg, noise_pred_text, guidance_rescale=0.0): + """ + Rescale `noise_cfg` according to `guidance_rescale`. Based on findings of [Common Diffusion Noise Schedules and + Sample Steps are Flawed](https://arxiv.org/pdf/2305.08891.pdf). See Section 3.4 + """ + std_text = noise_pred_text.std(dim=list(range(1, noise_pred_text.ndim)), keepdim=True) + std_cfg = noise_cfg.std(dim=list(range(1, noise_cfg.ndim)), keepdim=True) + # rescale the results from guidance (fixes overexposure) + noise_pred_rescaled = noise_cfg * (std_text / std_cfg) + # mix with the original results from guidance by factor guidance_rescale to avoid "plain looking" images + noise_cfg = guidance_rescale * noise_pred_rescaled + (1 - guidance_rescale) * noise_cfg + return noise_cfg + + +class StableDiffusionXLPipeline(DiffusionPipeline, FromSingleFileMixin, LoraLoaderMixin): + r""" + Pipeline for text-to-image generation using Stable Diffusion XL. + + This model inherits from [`DiffusionPipeline`]. Check the superclass documentation for the generic methods the + library implements for all the pipelines (such as downloading or saving, running on a particular device, etc.) + + In addition the pipeline inherits the following loading methods: + - *Textual-Inversion*: [`loaders.TextualInversionLoaderMixin.load_textual_inversion`] + - *LoRA*: [`loaders.LoraLoaderMixin.load_lora_weights`] + - *Ckpt*: [`loaders.FromSingleFileMixin.from_single_file`] + + as well as the following saving methods: + - *LoRA*: [`loaders.LoraLoaderMixin.save_lora_weights`] + + Args: + vae ([`AutoencoderKL`]): + Variational Auto-Encoder (VAE) Model to encode and decode images to and from latent representations. + text_encoder ([`CLIPTextModel`]): + Frozen text-encoder. Stable Diffusion XL uses the text portion of + [CLIP](https://huggingface.co/docs/transformers/model_doc/clip#transformers.CLIPTextModel), specifically + the [clip-vit-large-patch14](https://huggingface.co/openai/clip-vit-large-patch14) variant. + + tokenizer (`CLIPTokenizer`): + Tokenizer of class + [CLIPTokenizer](https://huggingface.co/docs/transformers/v4.21.0/en/model_doc/clip#transformers.CLIPTokenizer). + + unet ([`UNet2DConditionModel`]): Conditional U-Net architecture to denoise the encoded image latents. + scheduler ([`SchedulerMixin`]): + A scheduler to be used in combination with `unet` to denoise the encoded image latents. Can be one of + [`DDIMScheduler`], [`LMSDiscreteScheduler`], or [`PNDMScheduler`]. + """ + + def __init__( + self, + # text_encoder: ChatGLMModel, + # tokenizer: ChatGLMTokenizer, + unet: UNet2DConditionModel, + scheduler: KarrasDiffusionSchedulers, + ): + super().__init__() + + self.register_modules( + #vae=vae, + #text_encoder=text_encoder, + #tokenizer=tokenizer, + unet=unet, + scheduler=scheduler, + ) + self.vae_scale_factor = 8 + self.default_sample_size = self.unet.config.sample_size + + def enable_sequential_cpu_offload(self, gpu_id=0): + r""" + Offloads all models to CPU using accelerate, significantly reducing memory usage. When called, unet, + text_encoder, vae and safety checker have their state dicts saved to CPU and then are moved to a + `torch.device('meta') and loaded to GPU only when their specific submodule has its `forward` method called. + Note that offloading happens on a submodule basis. Memory savings are higher than with + `enable_model_cpu_offload`, but performance is lower. + """ + if is_accelerate_available() and is_accelerate_version(">=", "0.14.0"): + from accelerate import cpu_offload + else: + raise ImportError("`enable_sequential_cpu_offload` requires `accelerate v0.14.0` or higher") + + device = torch.device(f"cuda:{gpu_id}") + + if self.device.type != "cpu": + self.to("cpu", silence_dtype_warnings=True) + torch.cuda.empty_cache() # otherwise we don't see the memory savings (but they probably exist) + + for cpu_offloaded_model in [self.unet, self.text_encoder]: + cpu_offload(cpu_offloaded_model, device) + + def enable_model_cpu_offload(self, gpu_id=0): + r""" + Offloads all models to CPU using accelerate, reducing memory usage with a low impact on performance. Compared + to `enable_sequential_cpu_offload`, this method moves one whole model at a time to the GPU when its `forward` + method is called, and the model remains in GPU until the next model runs. Memory savings are lower than with + `enable_sequential_cpu_offload`, but performance is much better due to the iterative execution of the `unet`. + """ + if is_accelerate_available() and is_accelerate_version(">=", "0.17.0.dev0"): + from accelerate import cpu_offload_with_hook + else: + raise ImportError("`enable_model_cpu_offload` requires `accelerate v0.17.0` or higher.") + + device = torch.device(f"cuda:{gpu_id}") + + if self.device.type != "cpu": + self.to("cpu", silence_dtype_warnings=True) + torch.cuda.empty_cache() # otherwise we don't see the memory savings (but they probably exist) + + model_sequence = ( + [self.text_encoder] + ) + model_sequence.extend([self.unet]) + + hook = None + for cpu_offloaded_model in model_sequence: + _, hook = cpu_offload_with_hook(cpu_offloaded_model, device, prev_module_hook=hook) + + # We'll offload the last model manually. + self.final_offload_hook = hook + + @property + # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline._execution_device + def _execution_device(self): + r""" + Returns the device on which the pipeline's models will be executed. After calling + `pipeline.enable_sequential_cpu_offload()` the execution device can only be inferred from Accelerate's module + hooks. + """ + if not hasattr(self.unet, "_hf_hook"): + return self.device + for module in self.unet.modules(): + if ( + hasattr(module, "_hf_hook") + and hasattr(module._hf_hook, "execution_device") + and module._hf_hook.execution_device is not None + ): + return torch.device(module._hf_hook.execution_device) + return self.device + + def get_timesteps(self, num_inference_steps, strength, device, denoising_start=None): + # get the original timestep using init_timestep + if denoising_start is None: + init_timestep = min(int(num_inference_steps * strength), num_inference_steps) + t_start = max(num_inference_steps - init_timestep, 0) + else: + t_start = 0 + + timesteps = self.scheduler.timesteps[t_start * self.scheduler.order :] + + # Strength is irrelevant if we directly request a timestep to start at; + # that is, strength is determined by the denoising_start instead. + if denoising_start is not None: + discrete_timestep_cutoff = int( + round( + self.scheduler.config.num_train_timesteps + - (denoising_start * self.scheduler.config.num_train_timesteps) + ) + ) + + num_inference_steps = (timesteps < discrete_timestep_cutoff).sum().item() + if self.scheduler.order == 2 and num_inference_steps % 2 == 0: + # if the scheduler is a 2nd order scheduler we might have to do +1 + # because `num_inference_steps` might be even given that every timestep + # (except the highest one) is duplicated. If `num_inference_steps` is even it would + # mean that we cut the timesteps in the middle of the denoising step + # (between 1st and 2nd derivative) which leads to incorrect results. By adding 1 + # we ensure that the denoising process always ends after the 2nd derivate step of the scheduler + num_inference_steps = num_inference_steps + 1 + + # because t_n+1 >= t_n, we slice the timesteps starting from the end + timesteps = timesteps[-num_inference_steps:] + return timesteps, num_inference_steps + + return timesteps, num_inference_steps - t_start + + # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.prepare_extra_step_kwargs + def prepare_extra_step_kwargs(self, generator, eta): + # prepare extra kwargs for the scheduler step, since not all schedulers have the same signature + # eta (η) is only used with the DDIMScheduler, it will be ignored for other schedulers. + # eta corresponds to η in DDIM paper: https://arxiv.org/abs/2010.02502 + # and should be between [0, 1] + + accepts_eta = "eta" in set(inspect.signature(self.scheduler.step).parameters.keys()) + extra_step_kwargs = {} + if accepts_eta: + extra_step_kwargs["eta"] = eta + + # check if the scheduler accepts generator + accepts_generator = "generator" in set(inspect.signature(self.scheduler.step).parameters.keys()) + if accepts_generator: + extra_step_kwargs["generator"] = generator + return extra_step_kwargs + + def check_inputs( + self, + prompt, + height, + width, + callback_steps, + negative_prompt=None, + prompt_embeds=None, + negative_prompt_embeds=None, + pooled_prompt_embeds=None, + negative_pooled_prompt_embeds=None, + ): + if height % 8 != 0 or width % 8 != 0: + raise ValueError(f"`height` and `width` have to be divisible by 8 but are {height} and {width}.") + + if (callback_steps is None) or ( + callback_steps is not None and (not isinstance(callback_steps, int) or callback_steps <= 0) + ): + raise ValueError( + f"`callback_steps` has to be a positive integer but is {callback_steps} of type" + f" {type(callback_steps)}." + ) + + if prompt is not None and prompt_embeds is not None: + raise ValueError( + f"Cannot forward both `prompt`: {prompt} and `prompt_embeds`: {prompt_embeds}. Please make sure to" + " only forward one of the two." + ) + elif prompt is None and prompt_embeds is None: + raise ValueError( + "Provide either `prompt` or `prompt_embeds`. Cannot leave both `prompt` and `prompt_embeds` undefined." + ) + elif prompt is not None and (not isinstance(prompt, str) and not isinstance(prompt, list)): + raise ValueError(f"`prompt` has to be of type `str` or `list` but is {type(prompt)}") + + if negative_prompt is not None and negative_prompt_embeds is not None: + raise ValueError( + f"Cannot forward both `negative_prompt`: {negative_prompt} and `negative_prompt_embeds`:" + f" {negative_prompt_embeds}. Please make sure to only forward one of the two." + ) + + if prompt_embeds is not None and negative_prompt_embeds is not None: + if prompt_embeds.shape != negative_prompt_embeds.shape: + raise ValueError( + "`prompt_embeds` and `negative_prompt_embeds` must have the same shape when passed directly, but" + f" got: `prompt_embeds` {prompt_embeds.shape} != `negative_prompt_embeds`" + f" {negative_prompt_embeds.shape}." + ) + + if prompt_embeds is not None and pooled_prompt_embeds is None: + raise ValueError( + "If `prompt_embeds` are provided, `pooled_prompt_embeds` also have to be passed. Make sure to generate `pooled_prompt_embeds` from the same text encoder that was used to generate `prompt_embeds`." + ) + + if negative_prompt_embeds is not None and negative_pooled_prompt_embeds is None: + raise ValueError( + "If `negative_prompt_embeds` are provided, `negative_pooled_prompt_embeds` also have to be passed. Make sure to generate `negative_pooled_prompt_embeds` from the same text encoder that was used to generate `negative_prompt_embeds`." + ) + + # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.prepare_latents + def prepare_latents(self, batch_size, num_channels_latents, timesteps, num_images_per_prompt, height, width, dtype, device, generator, latents=None): + shape = (batch_size, num_channels_latents, height // self.vae_scale_factor, width // self.vae_scale_factor) + if isinstance(generator, list) and len(generator) != batch_size: + raise ValueError( + f"You have passed a list of generators of length {len(generator)}, but requested an effective batch" + f" size of {batch_size}. Make sure the batch size matches the length of the generators." + ) + + if latents is None: + latents = randn_tensor(shape, generator=generator, device=device, dtype=dtype) + latents = latents * self.scheduler.init_noise_sigma + else: + latent_timestep = timesteps[:1].repeat(batch_size * num_images_per_prompt) + latents = latents.to(device) + shape = latents.shape + noise = randn_tensor(shape, generator=generator, device=device, dtype=dtype) + # get latents + latents = self.scheduler.add_noise(latents, noise, latent_timestep) + + return latents + + + def _get_add_time_ids(self, original_size, crops_coords_top_left, target_size, dtype): + add_time_ids = list(original_size + crops_coords_top_left + target_size) + + passed_add_embed_dim = ( + self.unet.config.addition_time_embed_dim * len(add_time_ids) + 4096 + ) + expected_add_embed_dim = self.unet.add_embedding.linear_1.in_features + + if expected_add_embed_dim != passed_add_embed_dim: + raise ValueError( + f"Model expects an added time embedding vector of length {expected_add_embed_dim}, but a vector of {passed_add_embed_dim} was created. The model has an incorrect config. Please check `unet.config.time_embedding_type` and `text_encoder_2.config.projection_dim`." + ) + + add_time_ids = torch.tensor([add_time_ids], dtype=dtype) + return add_time_ids + + @torch.no_grad() + @replace_example_docstring(EXAMPLE_DOC_STRING) + def __call__( + self, + prompt: Union[str, List[str]] = None, + height: Optional[int] = None, + width: Optional[int] = None, + num_inference_steps: int = 50, + denoising_end: Optional[float] = None, + strength: Optional[float] = 1.0, + guidance_scale: float = 5.0, + negative_prompt: Optional[Union[str, List[str]]] = None, + num_images_per_prompt: Optional[int] = 1, + eta: float = 0.0, + generator: Optional[Union[torch.Generator, List[torch.Generator]]] = None, + latents: Optional[torch.FloatTensor] = None, + prompt_embeds: Optional[torch.FloatTensor] = None, + negative_prompt_embeds: Optional[torch.FloatTensor] = None, + pooled_prompt_embeds: Optional[torch.FloatTensor] = None, + negative_pooled_prompt_embeds: Optional[torch.FloatTensor] = None, + callback: Optional[Callable[[int, int, torch.FloatTensor], None]] = None, + callback_steps: int = 1, + cross_attention_kwargs: Optional[Dict[str, Any]] = None, + guidance_rescale: float = 0.0, + original_size: Optional[Tuple[int, int]] = None, + crops_coords_top_left: Tuple[int, int] = (0, 0), + target_size: Optional[Tuple[int, int]] = None, + use_dynamic_threshold: Optional[bool] = False, + ): + r""" + Function invoked when calling the pipeline for generation. + + Args: + prompt (`str` or `List[str]`, *optional*): + The prompt or prompts to guide the image generation. If not defined, one has to pass `prompt_embeds`. + instead. + height (`int`, *optional*, defaults to self.unet.config.sample_size * self.vae_scale_factor): + The height in pixels of the generated image. + width (`int`, *optional*, defaults to self.unet.config.sample_size * self.vae_scale_factor): + The width in pixels of the generated image. + num_inference_steps (`int`, *optional*, defaults to 50): + The number of denoising steps. More denoising steps usually lead to a higher quality image at the + expense of slower inference. + denoising_end (`float`, *optional*): + When specified, determines the fraction (between 0.0 and 1.0) of the total denoising process to be + completed before it is intentionally prematurely terminated. For instance, if denoising_end is set to + 0.7 and `num_inference_steps` is fixed at 50, the process will execute only 35 (i.e., 0.7 * 50) + Denoisers" multi-pipeline setup, as elaborated in [**Refining the Image + Output**](https://huggingface.co/docs/diffusers/api/pipelines/stable_diffusion/stable_diffusion_xl#refining-the-image-output) + guidance_scale (`float`, *optional*, defaults to 7.5): + `guidance_scale` is defined as `w` of equation 2. of [Imagen + Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale > + 1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`, + negative_prompt (`str` or `List[str]`, *optional*): + The prompt or prompts not to guide the image generation. If not defined, one has to pass + `negative_prompt_embeds` instead. Ignored when not using guidance (i.e., ignored if `guidance_scale` is + less than `1`). + num_images_per_prompt (`int`, *optional*, defaults to 1): + The number of images to generate per prompt. + eta (`float`, *optional*, defaults to 0.0): + Corresponds to parameter eta (η) in the DDIM paper: https://arxiv.org/abs/2010.02502. Only applies to + [`schedulers.DDIMScheduler`], will be ignored for others. + generator (`torch.Generator` or `List[torch.Generator]`, *optional*): + One or a list of [torch generator(s)](https://pytorch.org/docs/stable/generated/torch.Generator.html) + to make generation deterministic. + latents (`torch.FloatTensor`, *optional*): + Pre-generated noisy latents, sampled from a Gaussian distribution, to be used as inputs for image + generation. Can be used to tweak the same generation with different prompts. If not provided, a latents + tensor will ge generated by sampling using the supplied random `generator`. + prompt_embeds (`torch.FloatTensor`, *optional*): + Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not + provided, text embeddings will be generated from `prompt` input argument. + negative_prompt_embeds (`torch.FloatTensor`, *optional*): + Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt + weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input + argument. + pooled_prompt_embeds (`torch.FloatTensor`, *optional*): + Pre-generated pooled text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. + If not provided, pooled text embeddings will be generated from `prompt` input argument. + negative_pooled_prompt_embeds (`torch.FloatTensor`, *optional*): + output_type (`str`, *optional*, defaults to `"pil"`): + The output format of the generate image. Choose between + [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`. + return_dict (`bool`, *optional*, defaults to `True`): + Whether or not to return a [`~pipelines.stable_diffusion.StableDiffusionXLPipelineOutput`] instead of a + callback (`Callable`, *optional*): + A function that will be called every `callback_steps` steps during inference. The function will be + callback_steps (`int`, *optional*, defaults to 1): + The frequency at which the `callback` function will be called. If not specified, the callback will be + called at every step. + cross_attention_kwargs (`dict`, *optional*): + A kwargs dictionary that if specified is passed along to the `AttentionProcessor` as defined under + `self.processor` in + [diffusers.cross_attention](https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/cross_attention.py). + guidance_rescale (`float`, *optional*, defaults to 0.7): + Guidance rescale factor proposed by [Common Diffusion Noise Schedules and Sample Steps are + Flawed](https://arxiv.org/pdf/2305.08891.pdf) `guidance_scale` is defined as `φ` in equation 16. of + [Common Diffusion Noise Schedules and Sample Steps are Flawed](https://arxiv.org/pdf/2305.08891.pdf). + Guidance rescale factor should fix overexposure when using zero terminal SNR. + original_size (`Tuple[int]`, *optional*, defaults to (1024, 1024)): + TODO + crops_coords_top_left (`Tuple[int]`, *optional*, defaults to (0, 0)): + TODO + target_size (`Tuple[int]`, *optional*, defaults to (1024, 1024)): + TODO + + Examples: + + Returns: + [`~pipelines.stable_diffusion.StableDiffusionXLPipelineOutput`] or `tuple`: + [`~pipelines.stable_diffusion.StableDiffusionXLPipelineOutput`] if `return_dict` is True, otherwise a + `tuple. When returning a tuple, the first element is a list with the generated images, and the second + element is a list of `bool`s denoting whether the corresponding generated image likely represents + "not-safe-for-work" (nsfw) content, according to the `safety_checker`. + """ + # 0. Default height and width to unet + height = height or self.default_sample_size * self.vae_scale_factor + width = width or self.default_sample_size * self.vae_scale_factor + + original_size = original_size or (height, width) + target_size = target_size or (height, width) + + # 1. Check inputs. Raise error if not correct + self.check_inputs( + prompt, + height, + width, + callback_steps, + negative_prompt, + prompt_embeds, + negative_prompt_embeds, + pooled_prompt_embeds, + negative_pooled_prompt_embeds, + ) + + batch_size = prompt_embeds.shape[0] + + device = self._execution_device + + # here `guidance_scale` is defined analog to the guidance weight `w` of equation (2) + # of the Imagen paper: https://arxiv.org/pdf/2205.11487.pdf . `guidance_scale = 1` + # corresponds to doing no classifier free guidance. + do_classifier_free_guidance = guidance_scale > 1.0 + + # 4. Prepare timesteps + self.scheduler.set_timesteps(num_inference_steps, device=device) + + timesteps, num_inference_steps = self.get_timesteps( + num_inference_steps, + strength, + device, + denoising_start=None, + ) + # 5. Prepare latent variables + num_channels_latents = self.unet.config.in_channels + latents = self.prepare_latents( + batch_size * num_images_per_prompt, + num_channels_latents, + timesteps, + num_images_per_prompt, + height, + width, + prompt_embeds.dtype, + device, + generator, + latents, + ) + + # 6. Prepare extra step kwargs. TODO: Logic should ideally just be moved out of the pipeline + extra_step_kwargs = self.prepare_extra_step_kwargs(generator, eta) + + # 7. Prepare added time ids & embeddings + add_text_embeds = pooled_prompt_embeds + add_time_ids = self._get_add_time_ids( + original_size, crops_coords_top_left, target_size, dtype=prompt_embeds.dtype + ) + + if do_classifier_free_guidance: + prompt_embeds = torch.cat([negative_prompt_embeds, prompt_embeds], dim=0) + add_text_embeds = torch.cat([negative_pooled_prompt_embeds, add_text_embeds], dim=0) + add_time_ids = torch.cat([add_time_ids, add_time_ids], dim=0) + + prompt_embeds = prompt_embeds.to(device) + add_text_embeds = add_text_embeds.to(device) + add_time_ids = add_time_ids.to(device).repeat(batch_size * num_images_per_prompt, 1) + + # 8. Denoising loop + num_warmup_steps = max(len(timesteps) - num_inference_steps * self.scheduler.order, 0) + + # 7.1 Apply denoising_end + if denoising_end is not None: + num_inference_steps = int(round(denoising_end * num_inference_steps)) + timesteps = timesteps[: num_warmup_steps + self.scheduler.order * num_inference_steps] + comfy_pbar = ProgressBar(num_inference_steps) + with self.progress_bar(total=num_inference_steps) as progress_bar: + for i, t in enumerate(timesteps): + # expand the latents if we are doing classifier free guidance + latent_model_input = torch.cat([latents] * 2) if do_classifier_free_guidance else latents + + latent_model_input = self.scheduler.scale_model_input(latent_model_input, t) + + # predict the noise residual + added_cond_kwargs = {"text_embeds": add_text_embeds, "time_ids": add_time_ids} + noise_pred = self.unet( + latent_model_input, + t, + encoder_hidden_states=prompt_embeds, + cross_attention_kwargs=cross_attention_kwargs, + added_cond_kwargs=added_cond_kwargs, + return_dict=False, + )[0] + + # perform guidance + if do_classifier_free_guidance: + noise_pred_uncond, noise_pred_text = noise_pred.chunk(2) + noise_pred = noise_pred_uncond + guidance_scale * (noise_pred_text - noise_pred_uncond) + if use_dynamic_threshold: + DynamicThresh = DynThresh(maxSteps=num_inference_steps, experiment_mode=0) + noise_pred = DynamicThresh.dynthresh(noise_pred_text, + noise_pred_uncond, + guidance_scale, + None) + + if do_classifier_free_guidance and guidance_rescale > 0.0: + # Based on 3.4. in https://arxiv.org/pdf/2305.08891.pdf + noise_pred = rescale_noise_cfg(noise_pred, noise_pred_text, guidance_rescale=guidance_rescale) + + # compute the previous noisy sample x_t -> x_t-1 + latents = self.scheduler.step(noise_pred, t, latents, **extra_step_kwargs, return_dict=False)[0] + + # call the callback, if provided + if i == len(timesteps) - 1 or ((i + 1) > num_warmup_steps and (i + 1) % self.scheduler.order == 0): + progress_bar.update() + comfy_pbar.update(1) + if callback is not None and i % callback_steps == 0: + callback(i, t, latents) + + return StableDiffusionXLPipelineOutput(images=latents) + +if __name__ == "__main__": + pass diff --git a/ComfyUI-KwaiKolorsWrapper/nodes.py b/ComfyUI-KwaiKolorsWrapper/nodes.py new file mode 100644 index 0000000000000000000000000000000000000000..25d1bb1ad4bb9f2ea6c9322c11a0c2e4b416b69f --- /dev/null +++ b/ComfyUI-KwaiKolorsWrapper/nodes.py @@ -0,0 +1,476 @@ +import torch +import os +import random +import re +import gc +import json +import psutil +import comfy.model_management as mm +from comfy.utils import ProgressBar, load_torch_file + +import folder_paths + +script_directory = os.path.dirname(os.path.abspath(__file__)) + +folder_paths.add_model_folder_path("llms", os.path.join(folder_paths.models_dir, "llms", "checkpoints")) + +from .kolors.pipelines.pipeline_stable_diffusion_xl_chatglm_256 import StableDiffusionXLPipeline +from .kolors.models.modeling_chatglm import ChatGLMModel, ChatGLMConfig +from .kolors.models.tokenization_chatglm import ChatGLMTokenizer +from diffusers import UNet2DConditionModel +from diffusers import (DPMSolverMultistepScheduler, + EulerDiscreteScheduler, + EulerAncestralDiscreteScheduler, + DEISMultistepScheduler, + UniPCMultistepScheduler +) + +from contextlib import nullcontext +try: + from accelerate import init_empty_weights + from accelerate.utils import set_module_tensor_to_device + is_accelerate_available = True +except: + pass +from comfy.utils import ProgressBar + +class DownloadAndLoadKolorsModel: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "model": ( + [ + 'Kwai-Kolors/Kolors', + ], + ), + "precision": ([ 'fp16'], + { + "default": 'fp16' + }), + }, + } + + RETURN_TYPES = ("KOLORSMODEL",) + RETURN_NAMES = ("kolors_model",) + FUNCTION = "loadmodel" + CATEGORY = "KwaiKolorsWrapper" + + def loadmodel(self, model, precision): + device = mm.get_torch_device() + offload_device = mm.unet_offload_device() + dtype = {"bf16": torch.bfloat16, "fp16": torch.float16, "fp32": torch.float32}[precision] + + pbar = ProgressBar(4) + + model_name = model.rsplit('/', 1)[-1] + model_path = os.path.join(folder_paths.models_dir, "diffusers", model_name) + + if not os.path.exists(model_path): + print(f"Downloading Kolor model to: {model_path}") + from huggingface_hub import snapshot_download + snapshot_download(repo_id=model, + allow_patterns=['*fp16.safetensors*', '*.json'], + ignore_patterns=['vae/*', 'text_encoder/*', 'tokenizer/*'], + local_dir=model_path, + local_dir_use_symlinks=False) + pbar.update(1) + + ram_rss_start = psutil.Process().memory_info().rss + scheduler = EulerDiscreteScheduler.from_pretrained(model_path, subfolder= 'scheduler') + + print(f'Load UNET...') + unet = UNet2DConditionModel.from_pretrained(model_path, subfolder= 'unet', variant="fp16", revision=None, low_cpu_mem_usage=True).to(dtype).eval() + ram_rss_end = psutil.Process().memory_info().rss + print(f'Kolors-unet: RAM allocated = {(ram_rss_end-ram_rss_start)/(1024*1024*1024):.3f}GB') + pipeline = StableDiffusionXLPipeline( + unet=unet, + scheduler=scheduler, + ) + + kolors_model = { + 'pipeline': pipeline, + 'dtype': dtype + } + + return (kolors_model,) + +class LoadChatGLM3: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "chatglm3_checkpoint": (folder_paths.get_filename_list("llms"),), + }, + } + + RETURN_TYPES = ("CHATGLM3MODEL",) + RETURN_NAMES = ("chatglm3_model",) + FUNCTION = "loadmodel" + CATEGORY = "KwaiKolorsWrapper" + + def loadmodel(self, chatglm3_checkpoint): + device=mm.get_torch_device() + offload_device=mm.unet_offload_device() + print(f'chatglm3: device={device}, offload_device={offload_device}') + + pbar = ProgressBar(2) + chatglm3_path = folder_paths.get_full_path("llms", chatglm3_checkpoint) + print("Load TEXT_ENCODER...") + text_encoder_config = os.path.join(script_directory, 'configs', 'text_encoder_config.json') + with open(text_encoder_config, 'r') as file: + config = json.load(file) + + text_encoder_config = ChatGLMConfig(**config) + with (init_empty_weights() if is_accelerate_available else nullcontext()): + text_encoder = ChatGLMModel(text_encoder_config) + if '4bit' in chatglm3_checkpoint: + text_encoder.quantize(4) + elif '8bit' in chatglm3_checkpoint: + text_encoder.quantize(8) + + text_encoder_sd = load_torch_file(chatglm3_path) + + if is_accelerate_available: + for key in text_encoder_sd: + set_module_tensor_to_device(text_encoder, key, device=offload_device, value=text_encoder_sd[key]) + else: + text_encoder.load_state_dict() + + tokenizer_path = os.path.join(script_directory,'configs',"tokenizer") + tokenizer = ChatGLMTokenizer.from_pretrained(tokenizer_path) + pbar.update(1) + + chatglm3_model = { + 'text_encoder': text_encoder, + 'tokenizer': tokenizer + } + + return (chatglm3_model,) + +class DownloadAndLoadChatGLM3: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "precision": ([ 'fp16', 'quant4', 'quant8'], + { + "default": 'fp16' + }), + }, + } + + RETURN_TYPES = ("CHATGLM3MODEL",) + RETURN_NAMES = ("chatglm3_model",) + FUNCTION = "loadmodel" + CATEGORY = "KwaiKolorsWrapper" + + def loadmodel(self, precision): + + pbar = ProgressBar(2) + model = "Kwai-Kolors/Kolors" + model_name = model.rsplit('/', 1)[-1] + model_path = os.path.join(folder_paths.models_dir, "diffusers", model_name) + text_encoder_path = os.path.join(model_path, "text_encoder") + + if not os.path.exists(text_encoder_path): + print(f"Downloading ChatGLM3 to: {text_encoder_path}") + from huggingface_hub import snapshot_download + snapshot_download(repo_id=model, + allow_patterns=['text_encoder/*'], + ignore_patterns=['*.py', '*.pyc'], + local_dir=model_path, + local_dir_use_symlinks=False) + pbar.update(1) + + ram_rss_start = psutil.Process().memory_info().rss + device = mm.get_torch_device() + offload_device = mm.unet_offload_device() + print(f"Load TEXT_ENCODER..., {precision}, {offload_device}") + text_encoder = ChatGLMModel.from_pretrained( + text_encoder_path, + torch_dtype=torch.float16 + ).to(offload_device) + if precision == 'quant8': + text_encoder.quantize(8) + elif precision == 'quant4': + text_encoder.quantize(4) + #device_text = next(text_encoder.parameters()).device + #print(f'chatglm3: device={device_text}, torch_device={device}, offload_device={offload_device}') + + tokenizer = ChatGLMTokenizer.from_pretrained(text_encoder_path) + pbar.update(1) + + chatglm3_model = { + 'text_encoder': text_encoder, + 'tokenizer': tokenizer + } + ram_rss_end = psutil.Process().memory_info().rss + print(f'chatglm3: RAM allocated = {(ram_rss_end-ram_rss_start)/(1024*1024*1024):.3f}GB') + return (chatglm3_model,) + +class KolorsTextEncode: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "chatglm3_model": ("CHATGLM3MODEL", ), + "prompt": ("STRING", {"multiline": True, "default": "",}), + "negative_prompt": ("STRING", {"multiline": True, "default": "",}), + "num_images_per_prompt": ("INT", {"default": 1, "min": 1, "max": 128, "step": 1}), + }, + } + + RETURN_TYPES = ("KOLORS_EMBEDS",) + RETURN_NAMES =("kolors_embeds",) + FUNCTION = "encode" + CATEGORY = "KwaiKolorsWrapper" + + def encode(self, chatglm3_model, prompt, negative_prompt, num_images_per_prompt): + device = mm.get_torch_device() + offload_device = mm.unet_offload_device() + mm.unload_all_models() + mm.soft_empty_cache() + # Function to randomly select an option from the brackets + def choose_random_option(match): + options = match.group(1).split('|') + return random.choice(options) + + # Randomly choose between options in brackets for prompt and negative_prompt + prompt = re.sub(r'\{([^{}]*)\}', choose_random_option, prompt) + negative_prompt = re.sub(r'\{([^{}]*)\}', choose_random_option, negative_prompt) + + if "|" in prompt: + prompt = prompt.split("|") + negative_prompt = [negative_prompt] * len(prompt) # Replicate negative_prompt to match length of prompt list + + + print(prompt) + do_classifier_free_guidance = True + + if prompt is not None and isinstance(prompt, str): + batch_size = 1 + elif prompt is not None and isinstance(prompt, list): + batch_size = len(prompt) + + # Define tokenizers and text encoders + tokenizer = chatglm3_model['tokenizer'] + text_encoder = chatglm3_model['text_encoder'] + + text_encoder.to(device) + + text_inputs = tokenizer( + prompt, + padding="max_length", + max_length=256, + truncation=True, + return_tensors="pt", + ).to(device) + + output = text_encoder( + input_ids=text_inputs['input_ids'] , + attention_mask=text_inputs['attention_mask'], + position_ids=text_inputs['position_ids'], + output_hidden_states=True) + + prompt_embeds = output.hidden_states[-2].permute(1, 0, 2).clone() # [batch_size, 77, 4096] + text_proj = output.hidden_states[-1][-1, :, :].clone() # [batch_size, 4096] + bs_embed, seq_len, _ = prompt_embeds.shape + prompt_embeds = prompt_embeds.repeat(1, num_images_per_prompt, 1) + prompt_embeds = prompt_embeds.view(bs_embed * num_images_per_prompt, seq_len, -1) + + + if do_classifier_free_guidance: + uncond_tokens = [] + if negative_prompt is None: + uncond_tokens = [""] * batch_size + elif prompt is not None and type(prompt) is not type(negative_prompt): + raise TypeError( + f"`negative_prompt` should be the same type to `prompt`, but got {type(negative_prompt)} !=" + f" {type(prompt)}." + ) + elif isinstance(negative_prompt, str): + uncond_tokens = [negative_prompt] + elif batch_size != len(negative_prompt): + raise ValueError( + f"`negative_prompt`: {negative_prompt} has batch size {len(negative_prompt)}, but `prompt`:" + f" {prompt} has batch size {batch_size}. Please make sure that passed `negative_prompt` matches" + " the batch size of `prompt`." + ) + else: + uncond_tokens = negative_prompt + + + max_length = prompt_embeds.shape[1] + uncond_input = tokenizer( + uncond_tokens, + padding="max_length", + max_length=max_length, + truncation=True, + return_tensors="pt", + ).to(device) + output = text_encoder( + input_ids=uncond_input['input_ids'] , + attention_mask=uncond_input['attention_mask'], + position_ids=uncond_input['position_ids'], + output_hidden_states=True) + negative_prompt_embeds = output.hidden_states[-2].permute(1, 0, 2).clone() # [batch_size, 77, 4096] + negative_text_proj = output.hidden_states[-1][-1, :, :].clone() # [batch_size, 4096] + + if do_classifier_free_guidance: + # duplicate unconditional embeddings for each generation per prompt, using mps friendly method + seq_len = negative_prompt_embeds.shape[1] + + negative_prompt_embeds = negative_prompt_embeds.to(dtype=text_encoder.dtype, device=device) + + negative_prompt_embeds = negative_prompt_embeds.repeat(1, num_images_per_prompt, 1) + negative_prompt_embeds = negative_prompt_embeds.view( + batch_size * num_images_per_prompt, seq_len, -1 + ) + + bs_embed = text_proj.shape[0] + text_proj = text_proj.repeat(1, num_images_per_prompt).view( + bs_embed * num_images_per_prompt, -1 + ) + negative_text_proj = negative_text_proj.repeat(1, num_images_per_prompt).view( + bs_embed * num_images_per_prompt, -1 + ) + text_encoder.to(offload_device) + mm.soft_empty_cache() + gc.collect() + kolors_embeds = { + 'prompt_embeds': prompt_embeds, + 'negative_prompt_embeds': negative_prompt_embeds, + 'pooled_prompt_embeds': text_proj, + 'negative_pooled_prompt_embeds': negative_text_proj + } + + return (kolors_embeds,) + + +class KolorsSampler: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "kolors_model": ("KOLORSMODEL", ), + "kolors_embeds": ("KOLORS_EMBEDS", ), + + "width": ("INT", {"default": 1024, "min": 64, "max": 2048, "step": 64}), + "height": ("INT", {"default": 1024, "min": 64, "max": 2048, "step": 64}), + "seed": ("INT", {"default": 0, "min": 0, "max": 0xffffffffffffffff}), + "steps": ("INT", {"default": 25, "min": 1, "max": 200, "step": 1}), + "cfg": ("FLOAT", {"default": 5.0, "min": 0.0, "max": 20.0, "step": 0.01}), + + "scheduler": ( + [ + "EulerDiscreteScheduler", + "EulerAncestralDiscreteScheduler", + "DPMSolverMultistepScheduler", + "DPMSolverMultistepScheduler_SDE_karras", + "UniPCMultistepScheduler", + "DEISMultistepScheduler", + ], + { + "default": 'EulerDiscreteScheduler' + } + ), + }, + "optional": { + "latent": ("LATENT", ), + "denoise_strength": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}), + } + } + + RETURN_TYPES = ("LATENT",) + RETURN_NAMES =("latent",) + FUNCTION = "process" + CATEGORY = "KwaiKolorsWrapper" + + def process(self, kolors_model, kolors_embeds, width, height, seed, steps, cfg, scheduler, latent=None, denoise_strength=1.0): + device = mm.get_torch_device() + offload_device = mm.unet_offload_device() + + vae_scaling_factor = 0.13025 #SDXL scaling factor + + mm.soft_empty_cache() + gc.collect() + + pipeline = kolors_model['pipeline'] + + scheduler_config = { + "beta_schedule": "scaled_linear", + "beta_start": 0.00085, + "beta_end": 0.014, + "dynamic_thresholding_ratio": 0.995, + "num_train_timesteps": 1100, + "prediction_type": "epsilon", + "rescale_betas_zero_snr": False, + "steps_offset": 1, + "timestep_spacing": "leading", + "trained_betas": None, + } + if scheduler == "DPMSolverMultistepScheduler": + noise_scheduler = DPMSolverMultistepScheduler(**scheduler_config) + elif scheduler == "DPMSolverMultistepScheduler_SDE_karras": + scheduler_config.update({"algorithm_type": "sde-dpmsolver++"}) + scheduler_config.update({"use_karras_sigmas": True}) + noise_scheduler = DPMSolverMultistepScheduler(**scheduler_config) + elif scheduler == "DEISMultistepScheduler": + scheduler_config.pop("rescale_betas_zero_snr") + noise_scheduler = DEISMultistepScheduler(**scheduler_config) + elif scheduler == "EulerDiscreteScheduler": + scheduler_config.update({"interpolation_type": "linear"}) + scheduler_config.pop("dynamic_thresholding_ratio") + noise_scheduler = EulerDiscreteScheduler(**scheduler_config) + elif scheduler == "EulerAncestralDiscreteScheduler": + scheduler_config.pop("dynamic_thresholding_ratio") + noise_scheduler = EulerAncestralDiscreteScheduler(**scheduler_config) + elif scheduler == "UniPCMultistepScheduler": + scheduler_config.pop("rescale_betas_zero_snr") + noise_scheduler = UniPCMultistepScheduler(**scheduler_config) + + pipeline.scheduler = noise_scheduler + + generator= torch.Generator(device).manual_seed(seed) + + pipeline.unet.to(device) + + if latent is not None: + samples_in = latent['samples'] + samples_in = samples_in * vae_scaling_factor + samples_in = samples_in.to(pipeline.unet.dtype).to(device) + + latent_out = pipeline( + prompt=None, + latents=samples_in if latent is not None else None, + prompt_embeds = kolors_embeds['prompt_embeds'], + pooled_prompt_embeds = kolors_embeds['pooled_prompt_embeds'], + negative_prompt_embeds = kolors_embeds['negative_prompt_embeds'], + negative_pooled_prompt_embeds = kolors_embeds['negative_pooled_prompt_embeds'], + height=height, + width=width, + num_inference_steps=steps, + guidance_scale=cfg, + num_images_per_prompt=1, + generator= generator, + strength=denoise_strength, + ).images + + pipeline.unet.to(offload_device) + + latent_out = latent_out / vae_scaling_factor + + return ({'samples': latent_out},) + +NODE_CLASS_MAPPINGS = { + "DownloadAndLoadKolorsModel": DownloadAndLoadKolorsModel, + "DownloadAndLoadChatGLM3": DownloadAndLoadChatGLM3, + "KolorsSampler": KolorsSampler, + "KolorsTextEncode": KolorsTextEncode, + "LoadChatGLM3": LoadChatGLM3 +} +NODE_DISPLAY_NAME_MAPPINGS = { + "DownloadAndLoadKolorsModel": "(Down)load Kolors Model", + "DownloadAndLoadChatGLM3": "(Down)load ChatGLM3 Model", + "KolorsSampler": "Kolors Sampler", + "KolorsTextEncode": "Kolors Text Encode", + "LoadChatGLM3": "Load ChatGLM3 Model" +} diff --git a/ComfyUI-KwaiKolorsWrapper/pyproject.toml b/ComfyUI-KwaiKolorsWrapper/pyproject.toml new file mode 100644 index 0000000000000000000000000000000000000000..c49bebf3fcf2a76de75b383a32ccae53bf6785e5 --- /dev/null +++ b/ComfyUI-KwaiKolorsWrapper/pyproject.toml @@ -0,0 +1,15 @@ +[project] +name = "comfyui-kwaikolorswrapper" +description = "Rudimentary wrapper that runs [a/Kwai-Kolors](https://huggingface.co/Kwai-Kolors/Kolors) text2image pipeline using diffusers." +version = "1.0.1" +license = "Apache-2.0" +dependencies = ["diffusers>=0.28.2", "transformers>=4.26.1", "sentencepiece", "accelerate", "cpm-kernels"] + +[project.urls] +Repository = "https://github.com/kijai/ComfyUI-KwaiKolorsWrapper" +# Used by Comfy Registry https://comfyregistry.org + +[tool.comfy] +PublisherId = "kijai" +DisplayName = "ComfyUI-KwaiKolorsWrapper" +Icon = "" diff --git a/ComfyUI-KwaiKolorsWrapper/requirements.txt b/ComfyUI-KwaiKolorsWrapper/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..5eb41f285a49f33171efff9c11e130db2592f429 --- /dev/null +++ b/ComfyUI-KwaiKolorsWrapper/requirements.txt @@ -0,0 +1,5 @@ +diffusers>=0.28.2 +transformers>=4.26.1 +sentencepiece +accelerate +cpm-kernels \ No newline at end of file diff --git a/ComfyUI-SUPIR/CKPT_PTH.py b/ComfyUI-SUPIR/CKPT_PTH.py new file mode 100644 index 0000000000000000000000000000000000000000..974b8ac0e2bb8acd280305fc9653548611f649da --- /dev/null +++ b/ComfyUI-SUPIR/CKPT_PTH.py @@ -0,0 +1,4 @@ +LLAVA_CLIP_PATH = None +LLAVA_MODEL_PATH = None +SDXL_CLIP1_PATH = None +SDXL_CLIP2_CKPT_PTH = None \ No newline at end of file diff --git a/ComfyUI-SUPIR/LICENSE b/ComfyUI-SUPIR/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..49080ca81cb3c89df746f1d80056f1b127cdad0a --- /dev/null +++ b/ComfyUI-SUPIR/LICENSE @@ -0,0 +1,13 @@ +License + +Copyright (c) 2024 XPixel Group, Especially the author team of SUPIR. + +The SUPIR ("Software") is made available for use, reproduction, and distribution strictly for non-commercial purposes. For the purposes of this declaration, "non-commercial" is defined as not primarily intended for or directed towards commercial advantage or monetary compensation. + +By using, reproducing, or distributing the Software, you agree to abide by this restriction and not to use the Software for any commercial purposes without obtaining prior written permission from Dr. Jinjin Gu. + +This declaration does not in any way limit the rights under any open source license that may apply to the Software; it solely adds a condition that the Software shall not be used for commercial purposes. + +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +For inquiries or to obtain permission for commercial use, please contact Dr. Jinjin Gu (jinjin.gu@suppixel.ai). diff --git a/ComfyUI-SUPIR/README.md b/ComfyUI-SUPIR/README.md new file mode 100644 index 0000000000000000000000000000000000000000..7e1d117dc415431af6348f65e10a2bf4e9e7f858 --- /dev/null +++ b/ComfyUI-SUPIR/README.md @@ -0,0 +1,100 @@ +# ComfyUI SUPIR upscaler wrapper node +# UPDATE3: +Pruned models in safetensors format now available here: +https://huggingface.co/Kijai/SUPIR_pruned/tree/main +# UPDATE2: +![image](https://github.com/kijai/ComfyUI-SUPIR/assets/40791699/65baec3e-cb4a-4eec-8d45-2b08157b1e86) +Added a better way to load the SDXL model, which also allows using LoRAs. The old node will remain for now to not break old workflows, and it is dubbed Legacy along with the single node, as I do not want to maintain those. + +# UPDATE: + +As I have learned a lot with this project, I have now separated the single node to multiple nodes that make more sense to use in ComfyUI, and makes it clearer how SUPIR works. This is still a wrapper, though the whole thing has deviated from the original with much wider hardware support, more efficient model loading, far less memory usage and more sampler options. Here's a quick example (workflow is included) of using a Ligntning model, quality suffers then but it's very fast and I recommend starting with it as faster sampling makes it a lot easier to learn what the settings do. + +Under the hood SUPIR is SDXL img2img pipeline, the biggest custom part being their ControlNet. What they call "first stage" is a denoising process using their special "denoise encoder" VAE. This is not to be confused with the Gradio demo's "first stage" that's labeled as such for the Llava preprocessing, the Gradio "Stage2" still runs the denoising process anyway. This can be fully skipped with the nodes, or replaced with any other preprocessing node such as a model upscaler or anything you want. + +https://github.com/kijai/ComfyUI-SUPIR/assets/40791699/5cae2a24-d425-462c-b89d-df7dcf01595c + + + +# Installing +Either manager and install from git, or clone this repo to custom_nodes and run: + +`pip install -r requirements.txt` + +or if you use portable (run this in ComfyUI_windows_portable -folder): + +`python_embedded\python.exe -m pip install -r ComfyUI\custom_nodes\ComfyUI-SUPIR\requirements.txt` + +Pytorch version should be pretty new too, latest stable (2.2.1) works. + +`xformers` is automatically detected and enabled if found, but it's not necessary, in some cases it can be a bit faster though: + +`pip install -U xformers --no-dependencies` (for portable `python_embedded\python.exe -m pip install -U xformers --no-dependencies` ) + +Get the SUPIR model(s) from the original links below, they are loaded from the normal `ComfyUI/models/checkpoints` -folder +In addition you need an SDXL model, they are loaded from the same folder. + +I have not included llava in this, but you can input any captions to the node and thus use anything you want to generate them, or just don't, seems to work great even without. + +Memory requirements are directly related to the input image resolution, the "scale_by" in the node simply scales the input, you can leave it at 1.0 and size your input with any other node as well. In my testing I was able to run 512x512 to 1024x1024 with a 10GB 3080 GPU, and other tests on 24GB GPU to up 3072x3072. System RAM requirements are also hefty, don't know numbers but I would guess under 32GB is going to have issues, tested with 64GB. + +## Updates: +- fp8 seems to work fine for the unet, I was able to do 512p to 2048 with under 10GB VRAM used. For the VAE it seems to cause artifacts, I recommend using tiled_vae instead. +- CLIP models are no longer needed separately, instead they are loaded from your selected SDXL checkpoint +______ +Mirror for the models: https://huggingface.co/camenduru/SUPIR/tree/main + +# Tests +Video upscale test (currently the node does frames one by one from input batch): + +Original: https://github.com/kijai/ComfyUI-SUPIR/assets/40791699/33621520-a429-4155-aa3a-ac5cd15bda56 + +Upscaled 3x: https://github.com/kijai/ComfyUI-SUPIR/assets/40791699/d6c60e0a-11c3-496d-82c6-a724758a131a + +Image upscale from 3x from 512p: +https://github.com/kijai/ComfyUI-SUPIR/assets/40791699/545ddce4-8324-45cb-a545-6d1f527d8750 + + + +------------------------------------------- + + +Original repo: +https://github.com/Fanghua-Yu/SUPIR + +#### Models we provided: +* `SUPIR-v0Q`: [Baidu Netdisk](https://pan.baidu.com/s/1lnefCZhBTeDWijqbj1jIyw?pwd=pjq6), [Google Drive](https://drive.google.com/drive/folders/1yELzm5SvAi9e7kPcO_jPp2XkTs4vK6aR?usp=sharing) + + Default training settings with paper. High generalization and high image quality in most cases. + +* `SUPIR-v0F`: [Baidu Netdisk](https://pan.baidu.com/s/1AECN8NjiVuE3hvO8o-Ua6A?pwd=k2uz), [Google Drive](https://drive.google.com/drive/folders/1yELzm5SvAi9e7kPcO_jPp2XkTs4vK6aR?usp=sharing) + + Training with light degradation settings. Stage1 encoder of `SUPIR-v0F` remains more details when facing light degradations. + + +## BibTeX + @misc{yu2024scaling, + title={Scaling Up to Excellence: Practicing Model Scaling for Photo-Realistic Image Restoration In the Wild}, + author={Fanghua Yu and Jinjin Gu and Zheyuan Li and Jinfan Hu and Xiangtao Kong and Xintao Wang and Jingwen He and Yu Qiao and Chao Dong}, + year={2024}, + eprint={2401.13627}, + archivePrefix={arXiv}, + primaryClass={cs.CV} + } + +--- + +## 📧 Contact +If you have any question, please email `fanghuayu96@gmail.com`. + +--- +## Non-Commercial Use Only Declaration +The SUPIR ("Software") is made available for use, reproduction, and distribution strictly for non-commercial purposes. For the purposes of this declaration, "non-commercial" is defined as not primarily intended for or directed towards commercial advantage or monetary compensation. + +By using, reproducing, or distributing the Software, you agree to abide by this restriction and not to use the Software for any commercial purposes without obtaining prior written permission from Dr. Jinjin Gu. + +This declaration does not in any way limit the rights under any open source license that may apply to the Software; it solely adds a condition that the Software shall not be used for commercial purposes. + +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +For inquiries or to obtain permission for commercial use, please contact Dr. Jinjin Gu (hellojasongt@gmail.com). diff --git a/ComfyUI-SUPIR/SUPIR/__init__.py b/ComfyUI-SUPIR/SUPIR/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/ComfyUI-SUPIR/SUPIR/models/SUPIR_model.py b/ComfyUI-SUPIR/SUPIR/models/SUPIR_model.py new file mode 100644 index 0000000000000000000000000000000000000000..2d69eda48bb37090886b2c478aa20339656e49cd --- /dev/null +++ b/ComfyUI-SUPIR/SUPIR/models/SUPIR_model.py @@ -0,0 +1,203 @@ +import torch +from ...sgm.models.diffusion import DiffusionEngine +from ...sgm.util import instantiate_from_config +import copy +from ...sgm.modules.distributions.distributions import DiagonalGaussianDistribution +import random +from ...SUPIR.utils.colorfix import wavelet_reconstruction, adaptive_instance_normalization +from pytorch_lightning import seed_everything +from ...SUPIR.utils.tilevae import VAEHook +from ...SUPIR.util import convert_dtype +from contextlib import nullcontext +import comfy.model_management + +device = comfy.model_management.get_torch_device() + +class SUPIRModel(DiffusionEngine): + def __init__(self, control_stage_config, ae_dtype='fp32', diffusion_dtype='fp32', p_p='', n_p='', *args, **kwargs): + super().__init__(*args, **kwargs) + control_model = instantiate_from_config(control_stage_config) + self.model.load_control_model(control_model) + self.first_stage_model.denoise_encoder = copy.deepcopy(self.first_stage_model.encoder) + self.sampler_config = kwargs['sampler_config'] + + self.ae_dtype = convert_dtype(ae_dtype) + self.model.dtype = convert_dtype(diffusion_dtype) + + self.p_p = p_p + self.n_p = n_p + + @torch.no_grad() + def encode_first_stage(self, x): + #with torch.autocast(device, dtype=self.ae_dtype): + autocast_condition = (self.ae_dtype == torch.float16 or self.ae_dtype == torch.bfloat16) and not comfy.model_management.is_device_mps(device) + with torch.autocast(comfy.model_management.get_autocast_device(device), dtype=self.ae_dtype) if autocast_condition else nullcontext(): + z = self.first_stage_model.encode(x) + z = self.scale_factor * z + return z + + @torch.no_grad() + def encode_first_stage_with_denoise(self, x, use_sample=True, is_stage1=False): + #with torch.autocast(device, dtype=self.ae_dtype): + self.first_stage_model.to(self.ae_dtype) + autocast_condition = (self.model.dtype == torch.float16 or self.model.dtype == torch.bfloat16) and not comfy.model_management.is_device_mps(device) + with torch.autocast(comfy.model_management.get_autocast_device(device), dtype=self.ae_dtype) if autocast_condition else nullcontext(): + if is_stage1: + h = self.first_stage_model.denoise_encoder_s1(x) + else: + h = self.first_stage_model.denoise_encoder(x) + moments = self.first_stage_model.quant_conv(h) + posterior = DiagonalGaussianDistribution(moments) + if use_sample: + z = posterior.sample() + else: + z = posterior.mode() + z = self.scale_factor * z + return z + + @torch.no_grad() + def decode_first_stage(self, z): + z = 1.0 / self.scale_factor * z + autocast_condition = (self.ae_dtype == torch.float16 or self.ae_dtype == torch.bfloat16) and not comfy.model_management.is_device_mps(device) + with torch.autocast(comfy.model_management.get_autocast_device(device), dtype=self.ae_dtype) if autocast_condition else nullcontext(): + out = self.first_stage_model.decode(z) + return out.float() + + @torch.no_grad() + def batchify_denoise(self, x, is_stage1=False): + ''' + [N, C, H, W], [-1, 1], RGB + ''' + x = self.encode_first_stage_with_denoise(x, use_sample=False, is_stage1=is_stage1) + return self.decode_first_stage(x) + + @torch.no_grad() + def batchify_sample(self, x, p, p_p='default', n_p='default', num_steps=100, restoration_scale=4.0, s_churn=0, s_noise=1.003, cfg_scale=4.0, seed=-1, + num_samples=1, control_scale=1, color_fix_type='None', use_linear_CFG=False, use_linear_control_scale=False, + cfg_scale_start=1.0, control_scale_start=0.0, **kwargs): + ''' + [N, C], [-1, 1], RGB + ''' + assert len(x) == len(p) + assert color_fix_type in ['Wavelet', 'AdaIn', 'None'] + + N = len(x) + if num_samples > 1: + assert N == 1 + N = num_samples + x = x.repeat(N, 1, 1, 1) + p = p * N + + if p_p == 'default': + p_p = self.p_p + if n_p == 'default': + n_p = self.n_p + + self.sampler_config.params.num_steps = num_steps + if use_linear_CFG: + self.sampler_config.params.guider_config.params.scale_min = cfg_scale + self.sampler_config.params.guider_config.params.scale = cfg_scale_start + else: + self.sampler_config.params.guider_config.params.scale_min = cfg_scale + self.sampler_config.params.guider_config.params.scale = cfg_scale + self.sampler_config.params.restore_cfg = restoration_scale + self.sampler_config.params.s_churn = s_churn + self.sampler_config.params.s_noise = s_noise + self.sampler = instantiate_from_config(self.sampler_config) + + print("Sampler: ", self.sampler_config.target) + print("sampler_config: ", self.sampler_config.params) + + if seed == -1: + seed = random.randint(0, 65535) + seed_everything(seed) + + + self.model.to('cpu') + self.conditioner.to('cpu') + + # stage 1: encode/decode/encode + self.first_stage_model.to(device) + _z = self.encode_first_stage_with_denoise(x, use_sample=False) + x_stage1 = self.decode_first_stage(_z) + z_stage1 = self.encode_first_stage(x_stage1) + self.first_stage_model.to('cpu') + + #conditioning + self.conditioner.to(device) + c, uc = self.prepare_condition(_z, p, p_p, n_p, N) + self.conditioner.to('cpu') + + denoiser = lambda input, sigma, c, control_scale: self.denoiser( + self.model, input, sigma, c, control_scale, **kwargs + ) + noised_z = torch.randn_like(_z).to(_z.device) + + comfy.model_management.soft_empty_cache() + + #sampling + self.model.diffusion_model.to(device) + self.model.control_model.to(device) + self.denoiser.to(device) + + _samples = self.sampler(denoiser, noised_z, cond=c, uc=uc, x_center=z_stage1, control_scale=control_scale, + use_linear_control_scale=use_linear_control_scale, control_scale_start=control_scale_start) + self.model.diffusion_model.to('cpu') + self.model.control_model.to('cpu') + + #decoding + self.first_stage_model.to(device) + samples = self.decode_first_stage(_samples) + self.first_stage_model.to('cpu') + + if color_fix_type == 'Wavelet': + samples = wavelet_reconstruction(samples, x_stage1) + elif color_fix_type == 'AdaIn': + samples = adaptive_instance_normalization(samples, x_stage1) + return samples + + def init_tile_vae(self, encoder_tile_size=512, decoder_tile_size=64): + self.first_stage_model.denoise_encoder.original_forward = self.first_stage_model.denoise_encoder.forward + self.first_stage_model.encoder.original_forward = self.first_stage_model.encoder.forward + self.first_stage_model.decoder.original_forward = self.first_stage_model.decoder.forward + self.first_stage_model.denoise_encoder.forward = VAEHook( + self.first_stage_model.denoise_encoder, encoder_tile_size, is_decoder=False, fast_decoder=False, + fast_encoder=False, color_fix=False, to_gpu=True) + self.first_stage_model.encoder.forward = VAEHook( + self.first_stage_model.encoder, encoder_tile_size, is_decoder=False, fast_decoder=False, + fast_encoder=False, color_fix=False, to_gpu=True) + self.first_stage_model.decoder.forward = VAEHook( + self.first_stage_model.decoder, decoder_tile_size, is_decoder=True, fast_decoder=False, + fast_encoder=False, color_fix=False, to_gpu=True) + + def prepare_condition(self, _z, p, p_p, n_p, N): + batch = {} + batch['original_size_as_tuple'] = torch.tensor([1024, 1024]).repeat(N, 1).to(_z.device) + batch['crop_coords_top_left'] = torch.tensor([0, 0]).repeat(N, 1).to(_z.device) + batch['target_size_as_tuple'] = torch.tensor([1024, 1024]).repeat(N, 1).to(_z.device) + batch['aesthetic_score'] = torch.tensor([9.0]).repeat(N, 1).to(_z.device) + batch['control'] = _z + + batch_uc = copy.deepcopy(batch) + batch_uc['txt'] = [n_p for _ in p] + autocast_condition = (self.model.dtype == torch.float16 or self.model.dtype == torch.bfloat16) and not comfy.model_management.is_device_mps(device) + if not isinstance(p[0], list): + print("Using local prompt: ") + batch['txt'] = [''.join([_p, p_p]) for _p in p] + print(batch['txt']) + with torch.autocast(comfy.model_management.get_autocast_device(device), dtype=self.model.dtype) if autocast_condition else nullcontext(): + c, uc = self.conditioner.get_unconditional_conditioning(batch, batch_uc) + else: + print("Using tile prompts") + assert len(p) == 1, 'Support bs=1 only for local prompt conditioning.' + p_tiles = p[0] + c = [] + for i, p_tile in enumerate(p_tiles): + batch['txt'] = [''.join([p_tile, p_p])] + with torch.autocast(comfy.model_management.get_autocast_device(device), dtype=self.model.dtype) if autocast_condition else nullcontext(): + if i == 0: + _c, uc = self.conditioner.get_unconditional_conditioning(batch, batch_uc) + else: + _c, _ = self.conditioner.get_unconditional_conditioning(batch, None) + c.append(_c) + return c, uc \ No newline at end of file diff --git a/ComfyUI-SUPIR/SUPIR/models/SUPIR_model_v2.py b/ComfyUI-SUPIR/SUPIR/models/SUPIR_model_v2.py new file mode 100644 index 0000000000000000000000000000000000000000..968fc8e334aa4695993aa5d3c450d14842ac985d --- /dev/null +++ b/ComfyUI-SUPIR/SUPIR/models/SUPIR_model_v2.py @@ -0,0 +1,11 @@ +from ...sgm.models.diffusion import DiffusionEngine +from ...sgm.util import instantiate_from_config +import copy + +class SUPIRModel(DiffusionEngine): + def __init__(self, control_stage_config, ae_dtype='fp32', diffusion_dtype='fp32', p_p='', n_p='', *args, **kwargs): + super().__init__(*args, **kwargs) + control_model = instantiate_from_config(control_stage_config) + self.model.load_control_model(control_model) + self.first_stage_model.denoise_encoder = copy.deepcopy(self.first_stage_model.encoder) + self.sampler_config = kwargs['sampler_config'] \ No newline at end of file diff --git a/ComfyUI-SUPIR/SUPIR/models/__init__.py b/ComfyUI-SUPIR/SUPIR/models/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/ComfyUI-SUPIR/SUPIR/modules/SUPIR_v0.py b/ComfyUI-SUPIR/SUPIR/modules/SUPIR_v0.py new file mode 100644 index 0000000000000000000000000000000000000000..6d91f334ce8e8e140e7755616c87882a820bf2a2 --- /dev/null +++ b/ComfyUI-SUPIR/SUPIR/modules/SUPIR_v0.py @@ -0,0 +1,722 @@ +# from einops._torch_specific import allow_ops_in_compiled_graph +# allow_ops_in_compiled_graph() +import einops +import torch +import torch as th +import torch.nn as nn +from einops import rearrange, repeat + +from ...sgm.modules.diffusionmodules.util import ( + avg_pool_nd, + checkpoint, + conv_nd, + linear, + normalization, + timestep_embedding, + zero_module, +) + +from ...sgm.modules.diffusionmodules.openaimodel import Downsample, Upsample, UNetModel, Timestep, \ + TimestepEmbedSequential, ResBlock, AttentionBlock, TimestepBlock +from ...sgm.modules.attention import SpatialTransformer, MemoryEfficientCrossAttention, CrossAttention +from ...sgm.util import default, log_txt_as_img, exists, instantiate_from_config +import re +import torch +from functools import partial + +import comfy.model_management +device = comfy.model_management.get_torch_device() + +import comfy.ops +ops = comfy.ops.manual_cast + +try: + import xformers + import xformers.ops + XFORMERS_IS_AVAILBLE = True +except: + XFORMERS_IS_AVAILBLE = False + +# dummy replace +def convert_module_to_f16(x): + pass + + +def convert_module_to_f32(x): + pass + + +class ZeroConv(nn.Module): + def __init__(self, label_nc, norm_nc, mask=False): + super().__init__() + self.zero_conv = zero_module(conv_nd(2, label_nc, norm_nc, 1, 1, 0)) + self.mask = mask + + def forward(self, c, h, h_ori=None): + # with torch.cuda.amp.autocast(enabled=False, dtype=torch.float32): + if not self.mask: + h = h + self.zero_conv(c) + else: + h = h + self.zero_conv(c) * torch.zeros_like(h) + if h_ori is not None: + h = th.cat([h_ori, h], dim=1) + return h + + +class ZeroSFT(nn.Module): + def __init__(self, label_nc, norm_nc, concat_channels=0, norm=True, mask=False): + super().__init__() + + # param_free_norm_type = str(parsed.group(1)) + ks = 3 + pw = ks // 2 + + self.norm = norm + if self.norm: + self.param_free_norm = normalization(norm_nc + concat_channels) + else: + self.param_free_norm = nn.Identity() + + nhidden = 128 + + self.mlp_shared = nn.Sequential( + ops.Conv2d(label_nc, nhidden, kernel_size=ks, padding=pw), + nn.SiLU() + ) + self.zero_mul = zero_module(ops.Conv2d(nhidden, norm_nc + concat_channels, kernel_size=ks, padding=pw)) + self.zero_add = zero_module(ops.Conv2d(nhidden, norm_nc + concat_channels, kernel_size=ks, padding=pw)) + # self.zero_mul = ops.Conv2d(nhidden, norm_nc + concat_channels, kernel_size=ks, padding=pw) + # self.zero_add = ops.Conv2d(nhidden, norm_nc + concat_channels, kernel_size=ks, padding=pw) + + self.zero_conv = zero_module(conv_nd(2, label_nc, norm_nc, 1, 1, 0)) + self.pre_concat = bool(concat_channels != 0) + self.mask = mask + + def forward(self, c, h, h_ori=None, control_scale=1): + assert self.mask is False + if h_ori is not None and self.pre_concat: + h_raw = th.cat([h_ori, h], dim=1) + else: + h_raw = h + + if self.mask: + h = h + self.zero_conv(c) * torch.zeros_like(h) + else: + h = h + self.zero_conv(c) + if h_ori is not None and self.pre_concat: + h = th.cat([h_ori, h], dim=1) + actv = self.mlp_shared(c) + gamma = self.zero_mul(actv) + beta = self.zero_add(actv) + if self.mask: + gamma = gamma * torch.zeros_like(gamma) + beta = beta * torch.zeros_like(beta) + h = self.param_free_norm(h) * (gamma + 1) + beta + if h_ori is not None and not self.pre_concat: + h = th.cat([h_ori, h], dim=1) + return h * control_scale + h_raw * (1 - control_scale) + + +class ZeroCrossAttn(nn.Module): + ATTENTION_MODES = { + "softmax": CrossAttention, # vanilla attention + "softmax-xformers": MemoryEfficientCrossAttention + } + + def __init__(self, context_dim, query_dim, zero_out=True, mask=False): + super().__init__() + attn_mode = "softmax-xformers" if XFORMERS_IS_AVAILBLE else "softmax" + assert attn_mode in self.ATTENTION_MODES + attn_cls = self.ATTENTION_MODES[attn_mode] + self.attn = attn_cls(query_dim=query_dim, context_dim=context_dim, heads=query_dim//64, dim_head=64) + self.norm1 = normalization(query_dim) + self.norm2 = normalization(context_dim) + + self.mask = mask + + # if zero_out: + # # for p in self.attn.to_out.parameters(): + # # p.detach().zero_() + # self.attn.to_out = zero_module(self.attn.to_out) + + def forward(self, context, x, control_scale=1): + assert self.mask is False + x_in = x + x = self.norm1(x) + context = self.norm2(context) + b, c, h, w = x.shape + x = rearrange(x, 'b c h w -> b (h w) c').contiguous() + context = rearrange(context, 'b c h w -> b (h w) c').contiguous() + x = self.attn(x, context) + x = rearrange(x, 'b (h w) c -> b c h w', h=h, w=w).contiguous() + if self.mask: + x = x * torch.zeros_like(x) + x = x_in + x * control_scale + + return x + + +class GLVControl(nn.Module): + def __init__( + self, + in_channels, + model_channels, + out_channels, + num_res_blocks, + attention_resolutions, + dropout=0, + channel_mult=(1, 2, 4, 8), + conv_resample=True, + dims=2, + num_classes=None, + use_checkpoint=False, + use_fp16=False, + num_heads=-1, + num_head_channels=-1, + num_heads_upsample=-1, + use_scale_shift_norm=False, + resblock_updown=False, + use_new_attention_order=False, + use_spatial_transformer=False, # custom transformer support + transformer_depth=1, # custom transformer support + context_dim=None, # custom transformer support + n_embed=None, # custom support for prediction of discrete ids into codebook of first stage vq model + legacy=True, + disable_self_attentions=None, + num_attention_blocks=None, + disable_middle_self_attn=False, + use_linear_in_transformer=False, + spatial_transformer_attn_type="softmax", + adm_in_channels=None, + use_fairscale_checkpoint=False, + offload_to_cpu=False, + transformer_depth_middle=None, + input_upscale=1, + ): + super().__init__() + from omegaconf.listconfig import ListConfig + + if use_spatial_transformer: + assert ( + context_dim is not None + ), "Fool!! You forgot to include the dimension of your cross-attention conditioning..." + + if context_dim is not None: + assert ( + use_spatial_transformer + ), "Fool!! You forgot to use the spatial transformer for your cross-attention conditioning..." + if type(context_dim) == ListConfig: + context_dim = list(context_dim) + + if num_heads_upsample == -1: + num_heads_upsample = num_heads + + if num_heads == -1: + assert ( + num_head_channels != -1 + ), "Either num_heads or num_head_channels has to be set" + + if num_head_channels == -1: + assert ( + num_heads != -1 + ), "Either num_heads or num_head_channels has to be set" + + self.in_channels = in_channels + self.model_channels = model_channels + self.out_channels = out_channels + if isinstance(transformer_depth, int): + transformer_depth = len(channel_mult) * [transformer_depth] + elif isinstance(transformer_depth, ListConfig): + transformer_depth = list(transformer_depth) + transformer_depth_middle = default( + transformer_depth_middle, transformer_depth[-1] + ) + + if isinstance(num_res_blocks, int): + self.num_res_blocks = len(channel_mult) * [num_res_blocks] + else: + if len(num_res_blocks) != len(channel_mult): + raise ValueError( + "provide num_res_blocks either as an int (globally constant) or " + "as a list/tuple (per-level) with the same length as channel_mult" + ) + self.num_res_blocks = num_res_blocks + # self.num_res_blocks = num_res_blocks + if disable_self_attentions is not None: + # should be a list of booleans, indicating whether to disable self-attention in TransformerBlocks or not + assert len(disable_self_attentions) == len(channel_mult) + if num_attention_blocks is not None: + assert len(num_attention_blocks) == len(self.num_res_blocks) + assert all( + map( + lambda i: self.num_res_blocks[i] >= num_attention_blocks[i], + range(len(num_attention_blocks)), + ) + ) + print( + f"Constructor of UNetModel received num_attention_blocks={num_attention_blocks}. " + f"This option has LESS priority than attention_resolutions {attention_resolutions}, " + f"i.e., in cases where num_attention_blocks[i] > 0 but 2**i not in attention_resolutions, " + f"attention will still not be set." + ) # todo: convert to warning + + self.attention_resolutions = attention_resolutions + self.dropout = dropout + self.channel_mult = channel_mult + self.conv_resample = conv_resample + self.num_classes = num_classes + self.use_checkpoint = use_checkpoint + if use_fp16: + print("WARNING: use_fp16 was dropped and has no effect anymore.") + # self.dtype = th.float16 if use_fp16 else th.float32 + self.num_heads = num_heads + self.num_head_channels = num_head_channels + self.num_heads_upsample = num_heads_upsample + self.predict_codebook_ids = n_embed is not None + + assert use_fairscale_checkpoint != use_checkpoint or not ( + use_checkpoint or use_fairscale_checkpoint + ) + + self.use_fairscale_checkpoint = False + checkpoint_wrapper_fn = ( + partial(checkpoint_wrapper, offload_to_cpu=offload_to_cpu) + if self.use_fairscale_checkpoint + else lambda x: x + ) + + time_embed_dim = model_channels * 4 + self.time_embed = checkpoint_wrapper_fn( + nn.Sequential( + linear(model_channels, time_embed_dim), + nn.SiLU(), + linear(time_embed_dim, time_embed_dim), + ) + ) + + if self.num_classes is not None: + if isinstance(self.num_classes, int): + self.label_emb = nn.Embedding(num_classes, time_embed_dim) + elif self.num_classes == "continuous": + print("setting up linear c_adm embedding layer") + self.label_emb = ops.Linear(1, time_embed_dim) + elif self.num_classes == "timestep": + self.label_emb = checkpoint_wrapper_fn( + nn.Sequential( + Timestep(model_channels), + nn.Sequential( + linear(model_channels, time_embed_dim), + nn.SiLU(), + linear(time_embed_dim, time_embed_dim), + ), + ) + ) + elif self.num_classes == "sequential": + assert adm_in_channels is not None + self.label_emb = nn.Sequential( + nn.Sequential( + linear(adm_in_channels, time_embed_dim), + nn.SiLU(), + linear(time_embed_dim, time_embed_dim), + ) + ) + else: + raise ValueError() + + self.input_blocks = nn.ModuleList( + [ + TimestepEmbedSequential( + conv_nd(dims, in_channels, model_channels, 3, padding=1) + ) + ] + ) + self._feature_size = model_channels + input_block_chans = [model_channels] + ch = model_channels + ds = 1 + for level, mult in enumerate(channel_mult): + for nr in range(self.num_res_blocks[level]): + layers = [ + checkpoint_wrapper_fn( + ResBlock( + ch, + time_embed_dim, + dropout, + out_channels=mult * model_channels, + dims=dims, + use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, + ) + ) + ] + ch = mult * model_channels + if ds in attention_resolutions: + if num_head_channels == -1: + dim_head = ch // num_heads + else: + num_heads = ch // num_head_channels + dim_head = num_head_channels + if legacy: + # num_heads = 1 + dim_head = ( + ch // num_heads + if use_spatial_transformer + else num_head_channels + ) + if exists(disable_self_attentions): + disabled_sa = disable_self_attentions[level] + else: + disabled_sa = False + + if ( + not exists(num_attention_blocks) + or nr < num_attention_blocks[level] + ): + layers.append( + checkpoint_wrapper_fn( + AttentionBlock( + ch, + use_checkpoint=use_checkpoint, + num_heads=num_heads, + num_head_channels=dim_head, + use_new_attention_order=use_new_attention_order, + ) + ) + if not use_spatial_transformer + else checkpoint_wrapper_fn( + SpatialTransformer( + ch, + num_heads, + dim_head, + depth=transformer_depth[level], + context_dim=context_dim, + disable_self_attn=disabled_sa, + use_linear=use_linear_in_transformer, + attn_type=spatial_transformer_attn_type, + use_checkpoint=use_checkpoint, + ) + ) + ) + self.input_blocks.append(TimestepEmbedSequential(*layers)) + self._feature_size += ch + input_block_chans.append(ch) + if level != len(channel_mult) - 1: + out_ch = ch + self.input_blocks.append( + TimestepEmbedSequential( + checkpoint_wrapper_fn( + ResBlock( + ch, + time_embed_dim, + dropout, + out_channels=out_ch, + dims=dims, + use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, + down=True, + ) + ) + if resblock_updown + else Downsample( + ch, conv_resample, dims=dims, out_channels=out_ch + ) + ) + ) + ch = out_ch + input_block_chans.append(ch) + ds *= 2 + self._feature_size += ch + + if num_head_channels == -1: + dim_head = ch // num_heads + else: + num_heads = ch // num_head_channels + dim_head = num_head_channels + if legacy: + # num_heads = 1 + dim_head = ch // num_heads if use_spatial_transformer else num_head_channels + self.middle_block = TimestepEmbedSequential( + checkpoint_wrapper_fn( + ResBlock( + ch, + time_embed_dim, + dropout, + dims=dims, + use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, + ) + ), + checkpoint_wrapper_fn( + AttentionBlock( + ch, + use_checkpoint=use_checkpoint, + num_heads=num_heads, + num_head_channels=dim_head, + use_new_attention_order=use_new_attention_order, + ) + ) + if not use_spatial_transformer + else checkpoint_wrapper_fn( + SpatialTransformer( # always uses a self-attn + ch, + num_heads, + dim_head, + depth=transformer_depth_middle, + context_dim=context_dim, + disable_self_attn=disable_middle_self_attn, + use_linear=use_linear_in_transformer, + attn_type=spatial_transformer_attn_type, + use_checkpoint=use_checkpoint, + ) + ), + checkpoint_wrapper_fn( + ResBlock( + ch, + time_embed_dim, + dropout, + dims=dims, + use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, + ) + ), + ) + + self.input_upscale = input_upscale + self.input_hint_block = TimestepEmbedSequential( + zero_module(conv_nd(dims, in_channels, model_channels, 3, padding=1)) + ) + + def convert_to_fp16(self): + """ + Convert the torso of the model to float16. + """ + self.input_blocks.apply(convert_module_to_f16) + self.middle_block.apply(convert_module_to_f16) + + def convert_to_fp32(self): + """ + Convert the torso of the model to float32. + """ + self.input_blocks.apply(convert_module_to_f32) + self.middle_block.apply(convert_module_to_f32) + + def forward(self, x, timesteps, xt, context=None, y=None, **kwargs): + # with torch.cuda.amp.autocast(enabled=False, dtype=torch.float32): + # x = x.to(torch.float32) + # timesteps = timesteps.to(torch.float32) + # xt = xt.to(torch.float32) + # context = context.to(torch.float32) + # y = y.to(torch.float32) + # print(x.dtype) + xt, context, y = xt.to(x.dtype), context.to(x.dtype), y.to(x.dtype) + + if self.input_upscale != 1: + x = nn.functional.interpolate(x, scale_factor=self.input_upscale, mode='bilinear', antialias=True) + assert (y is not None) == ( + self.num_classes is not None + ), "must specify y if and only if the model is class-conditional" + hs = [] + t_emb = timestep_embedding(timesteps, self.model_channels, repeat_only=False).to(x.dtype) + # import pdb + # pdb.set_trace() + emb = self.time_embed(t_emb) + + if self.num_classes is not None: + assert y.shape[0] == xt.shape[0] + emb = emb + self.label_emb(y) + + guided_hint = self.input_hint_block(x, emb, context) + + # h = x.type(self.dtype) + h = xt + for module in self.input_blocks: + if guided_hint is not None: + h = module(h, emb, context) + h += guided_hint + guided_hint = None + else: + h = module(h, emb, context) + hs.append(h) + # print(module) + # print(h.shape) + h = self.middle_block(h, emb, context) + hs.append(h) + return hs + + +class LightGLVUNet(UNetModel): + def __init__(self, mode='', project_type='ZeroSFT', project_channel_scale=1, + *args, **kwargs): + super().__init__(*args, **kwargs) + if mode == 'XL-base': + cond_output_channels = [320] * 4 + [640] * 3 + [1280] * 3 + project_channels = [160] * 4 + [320] * 3 + [640] * 3 + concat_channels = [320] * 2 + [640] * 3 + [1280] * 4 + [0] + cross_attn_insert_idx = [6, 3] + self.progressive_mask_nums = [0, 3, 7, 11] + elif mode == 'XL-refine': + cond_output_channels = [384] * 4 + [768] * 3 + [1536] * 6 + project_channels = [192] * 4 + [384] * 3 + [768] * 6 + concat_channels = [384] * 2 + [768] * 3 + [1536] * 7 + [0] + cross_attn_insert_idx = [9, 6, 3] + self.progressive_mask_nums = [0, 3, 6, 10, 14] + else: + raise NotImplementedError + + project_channels = [int(c * project_channel_scale) for c in project_channels] + + self.project_modules = nn.ModuleList() + for i in range(len(cond_output_channels)): + # if i == len(cond_output_channels) - 1: + # _project_type = 'ZeroCrossAttn' + # else: + # _project_type = project_type + _project_type = project_type + if _project_type == 'ZeroSFT': + self.project_modules.append(ZeroSFT(project_channels[i], cond_output_channels[i], + concat_channels=concat_channels[i])) + elif _project_type == 'ZeroCrossAttn': + self.project_modules.append(ZeroCrossAttn(cond_output_channels[i], project_channels[i])) + else: + raise NotImplementedError + + for i in cross_attn_insert_idx: + self.project_modules.insert(i, ZeroCrossAttn(cond_output_channels[i], concat_channels[i])) + # print(self.project_modules[i]) + + def step_progressive_mask(self): + if len(self.progressive_mask_nums) > 0: + mask_num = self.progressive_mask_nums.pop() + for i in range(len(self.project_modules)): + if i < mask_num: + self.project_modules[i].mask = True + else: + self.project_modules[i].mask = False + return + # print(f'step_progressive_mask, current masked layers: {mask_num}') + else: + return + # print('step_progressive_mask, no more masked layers') + # for i in range(len(self.project_modules)): + # print(self.project_modules[i].mask) + + + def forward(self, x, timesteps=None, context=None, y=None, control=None, control_scale=1, **kwargs): + """ + Apply the model to an input batch. + :param x: an [N x C x ...] Tensor of inputs. + :param timesteps: a 1-D batch of timesteps. + :param context: conditioning plugged in via crossattn + :param y: an [N] Tensor of labels, if class-conditional. + :return: an [N x C x ...] Tensor of outputs. + """ + assert (y is not None) == ( + self.num_classes is not None + ), "must specify y if and only if the model is class-conditional" + hs = [] + + _dtype = control[0].dtype + x, context, y = x.to(_dtype), context.to(_dtype), y.to(_dtype) + + with torch.no_grad(): + t_emb = timestep_embedding(timesteps, self.model_channels, repeat_only=False).to(x.dtype) + emb = self.time_embed(t_emb) + + if self.num_classes is not None: + assert y.shape[0] == x.shape[0] + emb = emb + self.label_emb(y) + + # h = x.type(self.dtype) + h = x + for module in self.input_blocks: + h = module(h, emb, context) + hs.append(h) + + adapter_idx = len(self.project_modules) - 1 + control_idx = len(control) - 1 + h = self.middle_block(h, emb, context) + h = self.project_modules[adapter_idx](control[control_idx], h, control_scale=control_scale) + adapter_idx -= 1 + control_idx -= 1 + + for i, module in enumerate(self.output_blocks): + _h = hs.pop() + h = self.project_modules[adapter_idx](control[control_idx], _h, h, control_scale=control_scale) + adapter_idx -= 1 + # h = th.cat([h, _h], dim=1) + if len(module) == 3: + assert isinstance(module[2], Upsample) + for layer in module[:2]: + if isinstance(layer, TimestepBlock): + h = layer(h, emb) + elif isinstance(layer, SpatialTransformer): + h = layer(h, context) + else: + h = layer(h) + # print('cross_attn_here') + h = self.project_modules[adapter_idx](control[control_idx], h, control_scale=control_scale) + adapter_idx -= 1 + h = module[2](h) + else: + h = module(h, emb, context) + control_idx -= 1 + # print(module) + # print(h.shape) + + h = h.type(x.dtype) + if self.predict_codebook_ids: + assert False, "not supported anymore. what the f*** are you doing?" + else: + return self.out(h) + +if __name__ == '__main__': + from omegaconf import OmegaConf + + # refiner + # opt = OmegaConf.load('../../options/train/debug_p2_xl.yaml') + # + # model = instantiate_from_config(opt.model.params.control_stage_config) + # hint = model(torch.randn([1, 4, 64, 64]), torch.randn([1]), torch.randn([1, 4, 64, 64])) + # hint = [h.device for h in hint] + # print(sum(map(lambda hint: hint.numel(), model.parameters()))) + # + # unet = instantiate_from_config(opt.model.params.network_config) + # unet = unet.device + # + # _output = unet(torch.randn([1, 4, 64, 64]).device, torch.randn([1]).device, torch.randn([1, 77, 1280]).device, + # torch.randn([1, 2560]).device, hint) + # print(sum(map(lambda _output: _output.numel(), unet.parameters()))) + + # base + with torch.no_grad(): + opt = OmegaConf.load('../../options/dev/SUPIR_tmp.yaml') + + model = instantiate_from_config(opt.model.params.control_stage_config) + model = model.to(device) + + hint = model(torch.randn([1, 4, 64, 64]).device, torch.randn([1]).device, torch.randn([1, 4, 64, 64]).device, torch.randn([1, 77, 2048]).device, + torch.randn([1, 2816]).device) + + #for h in hint: + # print(h.shape) + # + unet = instantiate_from_config(opt.model.params.network_config) + unet = unet.to(device) + _output = unet(torch.randn([1, 4, 64, 64]).device, torch.randn([1]).device, torch.randn([1, 77, 2048]).device, + torch.randn([1, 2816]).device, hint) + + + # model = instantiate_from_config(opt.model.params.control_stage_config) + # model = model.device + # # hint = model(torch.randn([1, 4, 64, 64]), torch.randn([1]), torch.randn([1, 4, 64, 64])) + # hint = model(torch.randn([1, 4, 64, 64]).device, torch.randn([1]).device, torch.randn([1, 4, 64, 64]).device, torch.randn([1, 77, 1280]).device, + # torch.randn([1, 2560]).device) + # # hint = [h.device for h in hint] + # + # for h in hint: + # print(h.shape) + # + # unet = instantiate_from_config(opt.model.params.network_config) + # unet = unet.device + # _output = unet(torch.randn([1, 4, 64, 64]).device, torch.randn([1]).device, torch.randn([1, 77, 1280]).device, + # torch.randn([1, 2560]).device, hint) diff --git a/ComfyUI-SUPIR/SUPIR/modules/__init__.py b/ComfyUI-SUPIR/SUPIR/modules/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..7161cc1ef68c3d12b51cebf432aaa871619242bd --- /dev/null +++ b/ComfyUI-SUPIR/SUPIR/modules/__init__.py @@ -0,0 +1,11 @@ +SDXL_BASE_CHANNEL_DICT = { + 'cond_output_channels': [320] * 4 + [640] * 3 + [1280] * 3, + 'project_channels': [160] * 4 + [320] * 3 + [640] * 3, + 'concat_channels': [320] * 2 + [640] * 3 + [1280] * 4 + [0] +} + +SDXL_REFINE_CHANNEL_DICT = { + 'cond_output_channels': [384] * 4 + [768] * 3 + [1536] * 6, + 'project_channels': [192] * 4 + [384] * 3 + [768] * 6, + 'concat_channels': [384] * 2 + [768] * 3 + [1536] * 7 + [0] +} \ No newline at end of file diff --git a/ComfyUI-SUPIR/SUPIR/util.py b/ComfyUI-SUPIR/SUPIR/util.py new file mode 100644 index 0000000000000000000000000000000000000000..62b98fc3d3814089418e60dcc532c75e210677e9 --- /dev/null +++ b/ComfyUI-SUPIR/SUPIR/util.py @@ -0,0 +1,173 @@ +import os +import torch +import numpy as np +#import cv2 +from PIL import Image +from torch.nn.functional import interpolate +from omegaconf import OmegaConf +from ..sgm.util import instantiate_from_config + + +def get_state_dict(d): + return d.get('state_dict', d) + + +def load_state_dict(ckpt_path, location='cpu'): + _, extension = os.path.splitext(ckpt_path) + if extension.lower() == ".safetensors": + import safetensors.torch + state_dict = safetensors.torch.load_file(ckpt_path, device=location) + else: + state_dict = get_state_dict(torch.load(ckpt_path, map_location=torch.device(location))) + state_dict = get_state_dict(state_dict) + print(f'Loaded state_dict from [{ckpt_path}]') + return state_dict + + +def create_model(config_path): + config = OmegaConf.load(config_path) + model = instantiate_from_config(config.model).cpu() + print(f'Loaded model config from [{config_path}]') + return model + + +def create_SUPIR_model(config_path, SUPIR_sign=None): + config = OmegaConf.load(config_path) + model = instantiate_from_config(config.model).cpu() + print(f'Loaded model config from [{config_path}]') + if config.SDXL_CKPT is not None: + model.load_state_dict(load_state_dict(config.SDXL_CKPT), strict=False) + if config.SUPIR_CKPT is not None: + model.load_state_dict(load_state_dict(config.SUPIR_CKPT), strict=False) + if SUPIR_sign is not None: + assert SUPIR_sign in ['F', 'Q'] + if SUPIR_sign == 'F': + model.load_state_dict(load_state_dict(config.SUPIR_CKPT_F), strict=False) + elif SUPIR_sign == 'Q': + model.load_state_dict(load_state_dict(config.SUPIR_CKPT_Q), strict=False) + return model + +def load_QF_ckpt(config_path): + config = OmegaConf.load(config_path) + ckpt_F = torch.load(config.SUPIR_CKPT_F, map_location='cpu') + ckpt_Q = torch.load(config.SUPIR_CKPT_Q, map_location='cpu') + return ckpt_Q, ckpt_F + + +def PIL2Tensor(img, upsacle=1, min_size=1024): + ''' + PIL.Image -> Tensor[C, H, W], RGB, [-1, 1] + ''' + # size + w, h = img.size + w *= upsacle + h *= upsacle + w0, h0 = round(w), round(h) + if min(w, h) < min_size: + _upsacle = min_size / min(w, h) + w *= _upsacle + h *= _upsacle + else: + _upsacle = 1 + w = int(np.round(w / 64.0)) * 64 + h = int(np.round(h / 64.0)) * 64 + x = img.resize((w, h), Image.BICUBIC) + x = np.array(x).round().clip(0, 255).astype(np.uint8) + x = x / 255 * 2 - 1 + x = torch.tensor(x, dtype=torch.float32).permute(2, 0, 1) + return x, h0, w0 + + +def Tensor2PIL(x, h0, w0): + ''' + Tensor[C, H, W], RGB, [-1, 1] -> PIL.Image + ''' + x = x.unsqueeze(0) + x = interpolate(x, size=(h0, w0), mode='bicubic') + x = (x.squeeze(0).permute(1, 2, 0) * 127.5 + 127.5).cpu().numpy().clip(0, 255).astype(np.uint8) + return Image.fromarray(x) + + +def HWC3(x): + assert x.dtype == np.uint8 + if x.ndim == 2: + x = x[:, :, None] + assert x.ndim == 3 + H, W, C = x.shape + assert C == 1 or C == 3 or C == 4 + if C == 3: + return x + if C == 1: + return np.concatenate([x, x, x], axis=2) + if C == 4: + color = x[:, :, 0:3].astype(np.float32) + alpha = x[:, :, 3:4].astype(np.float32) / 255.0 + y = color * alpha + 255.0 * (1.0 - alpha) + y = y.clip(0, 255).astype(np.uint8) + return y + + +def upscale_image(input_image, upscale, min_size=None, unit_resolution=64): + H, W, C = input_image.shape + H = float(H) + W = float(W) + H *= upscale + W *= upscale + if min_size is not None: + if min(H, W) < min_size: + _upsacle = min_size / min(W, H) + W *= _upsacle + H *= _upsacle + H = int(np.round(H / unit_resolution)) * unit_resolution + W = int(np.round(W / unit_resolution)) * unit_resolution + img = cv2.resize(input_image, (W, H), interpolation=cv2.INTER_LANCZOS4 if upscale > 1 else cv2.INTER_AREA) + img = img.round().clip(0, 255).astype(np.uint8) + return img + + +def fix_resize(input_image, size=512, unit_resolution=64): + H, W, C = input_image.shape + H = float(H) + W = float(W) + upscale = size / min(H, W) + H *= upscale + W *= upscale + H = int(np.round(H / unit_resolution)) * unit_resolution + W = int(np.round(W / unit_resolution)) * unit_resolution + img = cv2.resize(input_image, (W, H), interpolation=cv2.INTER_LANCZOS4 if upscale > 1 else cv2.INTER_AREA) + img = img.round().clip(0, 255).astype(np.uint8) + return img + + + +def Numpy2Tensor(img): + ''' + np.array[H, w, C] [0, 255] -> Tensor[C, H, W], RGB, [-1, 1] + ''' + # size + img = np.array(img) / 255 * 2 - 1 + img = torch.tensor(img, dtype=torch.float32).permute(2, 0, 1) + return img + + +def Tensor2Numpy(x, h0=None, w0=None): + ''' + Tensor[C, H, W], RGB, [-1, 1] -> PIL.Image + ''' + if h0 is not None and w0 is not None: + x = x.unsqueeze(0) + x = interpolate(x, size=(h0, w0), mode='bicubic') + x = x.squeeze(0) + x = (x.permute(1, 2, 0) * 127.5 + 127.5).cpu().numpy().clip(0, 255).astype(np.uint8) + return x + + +def convert_dtype(dtype_str): + if dtype_str == 'fp32': + return torch.float32 + elif dtype_str == 'fp16': + return torch.float16 + elif dtype_str == 'bf16': + return torch.bfloat16 + else: + raise NotImplementedError diff --git a/ComfyUI-SUPIR/SUPIR/utils/__init__.py b/ComfyUI-SUPIR/SUPIR/utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/ComfyUI-SUPIR/SUPIR/utils/colorfix.py b/ComfyUI-SUPIR/SUPIR/utils/colorfix.py new file mode 100644 index 0000000000000000000000000000000000000000..32f2537ec858c172e2340a9e2867cd3f8c8eaaca --- /dev/null +++ b/ComfyUI-SUPIR/SUPIR/utils/colorfix.py @@ -0,0 +1,120 @@ +''' +# -------------------------------------------------------------------------------- +# Color fixed script from Li Yi (https://github.com/pkuliyi2015/sd-webui-stablesr/blob/master/srmodule/colorfix.py) +# -------------------------------------------------------------------------------- +''' + +import torch +from PIL import Image +from torch import Tensor +from torch.nn import functional as F + +from torchvision.transforms import ToTensor, ToPILImage + +def adain_color_fix(target: Image, source: Image): + # Convert images to tensors + to_tensor = ToTensor() + target_tensor = to_tensor(target).unsqueeze(0) + source_tensor = to_tensor(source).unsqueeze(0) + + # Apply adaptive instance normalization + result_tensor = adaptive_instance_normalization(target_tensor, source_tensor) + + # Convert tensor back to image + to_image = ToPILImage() + result_image = to_image(result_tensor.squeeze(0).clamp_(0.0, 1.0)) + + return result_image + +def wavelet_color_fix(target: Image, source: Image): + # Convert images to tensors + to_tensor = ToTensor() + target_tensor = to_tensor(target).unsqueeze(0) + source_tensor = to_tensor(source).unsqueeze(0) + + # Apply wavelet reconstruction + result_tensor = wavelet_reconstruction(target_tensor, source_tensor) + + # Convert tensor back to image + to_image = ToPILImage() + result_image = to_image(result_tensor.squeeze(0).clamp_(0.0, 1.0)) + + return result_image + +def calc_mean_std(feat: Tensor, eps=1e-5): + """Calculate mean and std for adaptive_instance_normalization. + Args: + feat (Tensor): 4D tensor. + eps (float): A small value added to the variance to avoid + divide-by-zero. Default: 1e-5. + """ + size = feat.size() + assert len(size) == 4, 'The input feature should be 4D tensor.' + b, c = size[:2] + feat_var = feat.reshape(b, c, -1).var(dim=2) + eps + feat_std = feat_var.sqrt().reshape(b, c, 1, 1) + feat_mean = feat.reshape(b, c, -1).mean(dim=2).reshape(b, c, 1, 1) + return feat_mean, feat_std + +def adaptive_instance_normalization(content_feat:Tensor, style_feat:Tensor): + """Adaptive instance normalization. + Adjust the reference features to have the similar color and illuminations + as those in the degradate features. + Args: + content_feat (Tensor): The reference feature. + style_feat (Tensor): The degradate features. + """ + size = content_feat.size() + style_mean, style_std = calc_mean_std(style_feat) + content_mean, content_std = calc_mean_std(content_feat) + normalized_feat = (content_feat - content_mean.expand(size)) / content_std.expand(size) + return normalized_feat * style_std.expand(size) + style_mean.expand(size) + +def wavelet_blur(image: Tensor, radius: int): + """ + Apply wavelet blur to the input tensor. + """ + # input shape: (1, 3, H, W) + # convolution kernel + kernel_vals = [ + [0.0625, 0.125, 0.0625], + [0.125, 0.25, 0.125], + [0.0625, 0.125, 0.0625], + ] + kernel = torch.tensor(kernel_vals, dtype=image.dtype, device=image.device) + # add channel dimensions to the kernel to make it a 4D tensor + kernel = kernel[None, None] + # repeat the kernel across all input channels + kernel = kernel.repeat(3, 1, 1, 1) + image = F.pad(image, (radius, radius, radius, radius), mode='replicate') + # apply convolution + output = F.conv2d(image, kernel, groups=3, dilation=radius) + return output + +def wavelet_decomposition(image: Tensor, levels=5): + """ + Apply wavelet decomposition to the input tensor. + This function only returns the low frequency & the high frequency. + """ + high_freq = torch.zeros_like(image) + for i in range(levels): + radius = 2 ** i + low_freq = wavelet_blur(image, radius) + high_freq += (image - low_freq) + image = low_freq + + return high_freq, low_freq + +def wavelet_reconstruction(content_feat:Tensor, style_feat:Tensor): + """ + Apply wavelet decomposition, so that the content will have the same color as the style. + """ + # calculate the wavelet decomposition of the content feature + content_high_freq, content_low_freq = wavelet_decomposition(content_feat) + del content_low_freq + # calculate the wavelet decomposition of the style feature + style_high_freq, style_low_freq = wavelet_decomposition(style_feat) + del style_high_freq + # reconstruct the content feature with the style's high frequency + return content_high_freq + style_low_freq + diff --git a/ComfyUI-SUPIR/SUPIR/utils/devices.py b/ComfyUI-SUPIR/SUPIR/utils/devices.py new file mode 100644 index 0000000000000000000000000000000000000000..3b1da17735097fde5321f9e832e7c4d0d94c69c1 --- /dev/null +++ b/ComfyUI-SUPIR/SUPIR/utils/devices.py @@ -0,0 +1,137 @@ +import sys +import contextlib +from functools import lru_cache + +import torch +#from modules import errors + +if sys.platform == "darwin": + from modules import mac_specific + + +def has_mps() -> bool: + if sys.platform != "darwin": + return False + else: + return mac_specific.has_mps + + +def get_cuda_device_string(): + return "cuda" + + +def get_optimal_device_name(): + if torch.cuda.is_available(): + return get_cuda_device_string() + + if has_mps(): + return "mps" + + return "cpu" + + +def get_optimal_device(): + return torch.device(get_optimal_device_name()) + + +def get_device_for(task): + return get_optimal_device() + + +def torch_gc(): + + if torch.cuda.is_available(): + with torch.cuda.device(get_cuda_device_string()): + torch.cuda.empty_cache() + torch.cuda.ipc_collect() + + if has_mps(): + mac_specific.torch_mps_gc() + + +def enable_tf32(): + if torch.cuda.is_available(): + + # enabling benchmark option seems to enable a range of cards to do fp16 when they otherwise can't + # see https://github.com/AUTOMATIC1111/stable-diffusion-webui/pull/4407 + if any(torch.cuda.get_device_capability(devid) == (7, 5) for devid in range(0, torch.cuda.device_count())): + torch.backends.cudnn.benchmark = True + + torch.backends.cuda.matmul.allow_tf32 = True + torch.backends.cudnn.allow_tf32 = True + + +enable_tf32() +#errors.run(enable_tf32, "Enabling TF32") + +cpu = torch.device("cpu") +device = device_interrogate = device_gfpgan = device_esrgan = device_codeformer = torch.device("cuda") +dtype = torch.float16 +dtype_vae = torch.float16 +dtype_unet = torch.float16 +unet_needs_upcast = False + + +def cond_cast_unet(input): + return input.to(dtype_unet) if unet_needs_upcast else input + + +def cond_cast_float(input): + return input.float() if unet_needs_upcast else input + + +def randn(seed, shape): + torch.manual_seed(seed) + return torch.randn(shape, device=device) + + +def randn_without_seed(shape): + return torch.randn(shape, device=device) + + +def autocast(disable=False): + if disable: + return contextlib.nullcontext() + + return torch.autocast("cuda") + + +def without_autocast(disable=False): + return torch.autocast("cuda", enabled=False) if torch.is_autocast_enabled() and not disable else contextlib.nullcontext() + + +class NansException(Exception): + pass + + +def test_for_nans(x, where): + if not torch.all(torch.isnan(x)).item(): + return + + if where == "unet": + message = "A tensor with all NaNs was produced in Unet." + + elif where == "vae": + message = "A tensor with all NaNs was produced in VAE." + + else: + message = "A tensor with all NaNs was produced." + + message += " Use --disable-nan-check commandline argument to disable this check." + + raise NansException(message) + +@lru_cache +def first_time_calculation(): + """ + just do any calculation with pytorch layers - the first time this is done it allocaltes about 700MB of memory and + spends about 2.7 seconds doing that, at least wih NVidia. + """ + + x = torch.zeros((1, 1)).to(device, dtype) + linear = torch.nn.Linear(1, 1).to(device, dtype) + linear(x) + + x = torch.zeros((1, 1, 3, 3)).to(device, dtype) + conv2d = torch.nn.Conv2d(1, 1, (3, 3)).to(device, dtype) + conv2d(x) \ No newline at end of file diff --git a/ComfyUI-SUPIR/SUPIR/utils/tilevae.py b/ComfyUI-SUPIR/SUPIR/utils/tilevae.py new file mode 100644 index 0000000000000000000000000000000000000000..c7e2c81fd4f11e82aa26b32da7bba00e43ced4c8 --- /dev/null +++ b/ComfyUI-SUPIR/SUPIR/utils/tilevae.py @@ -0,0 +1,978 @@ +# ------------------------------------------------------------------------ +# +# Ultimate VAE Tile Optimization +# +# Introducing a revolutionary new optimization designed to make +# the VAE work with giant images on limited VRAM! +# Say goodbye to the frustration of OOM and hello to seamless output! +# +# ------------------------------------------------------------------------ +# +# This script is a wild hack that splits the image into tiles, +# encodes each tile separately, and merges the result back together. +# +# Advantages: +# - The VAE can now work with giant images on limited VRAM +# (~10 GB for 8K images!) +# - The merged output is completely seamless without any post-processing. +# +# Drawbacks: +# - Giant RAM needed. To store the intermediate results for a 4096x4096 +# images, you need 32 GB RAM it consumes ~20GB); for 8192x8192 +# you need 128 GB RAM machine (it consumes ~100 GB) +# - NaNs always appear in for 8k images when you use fp16 (half) VAE +# You must use --no-half-vae to disable half VAE for that giant image. +# - Slow speed. With default tile size, it takes around 50/200 seconds +# to encode/decode a 4096x4096 image; and 200/900 seconds to encode/decode +# a 8192x8192 image. (The speed is limited by both the GPU and the CPU.) +# - The gradient calculation is not compatible with this hack. It +# will break any backward() or torch.autograd.grad() that passes VAE. +# (But you can still use the VAE to generate training data.) +# +# How it works: +# 1) The image is split into tiles. +# - To ensure perfect results, each tile is padded with 32 pixels +# on each side. +# - Then the conv2d/silu/upsample/downsample can produce identical +# results to the original image without splitting. +# 2) The original forward is decomposed into a task queue and a task worker. +# - The task queue is a list of functions that will be executed in order. +# - The task worker is a loop that executes the tasks in the queue. +# 3) The task queue is executed for each tile. +# - Current tile is sent to GPU. +# - local operations are directly executed. +# - Group norm calculation is temporarily suspended until the mean +# and var of all tiles are calculated. +# - The residual is pre-calculated and stored and addded back later. +# - When need to go to the next tile, the current tile is send to cpu. +# 4) After all tiles are processed, tiles are merged on cpu and return. +# +# Enjoy! +# +# @author: LI YI @ Nanyang Technological University - Singapore +# @date: 2023-03-02 +# @license: MIT License +# +# Please give me a star if you like this project! +# +# ------------------------------------------------------------------------- + +import gc +from time import time +import math +from tqdm import tqdm + +import torch +import torch.version +import torch.nn.functional as F +from einops import rearrange + +import comfy.model_management +device = comfy.model_management.get_torch_device() + +if comfy.model_management.XFORMERS_IS_AVAILABLE: + try: + import xformers + import xformers.ops + XFORMERS_IS_AVAILABLE = True + except: + XFORMERS_IS_AVAILABLE = False + print("no module 'xformers'. Processing without...") +else: + XFORMERS_IS_AVAILABLE = False + +sd_flag = True + +def get_recommend_encoder_tile_size(): + if torch.cuda.is_available(): + total_memory = torch.cuda.get_device_properties( + device).total_memory // 2**20 + if total_memory > 16*1000: + ENCODER_TILE_SIZE = 3072 + elif total_memory > 12*1000: + ENCODER_TILE_SIZE = 2048 + elif total_memory > 8*1000: + ENCODER_TILE_SIZE = 1536 + else: + ENCODER_TILE_SIZE = 960 + else: + ENCODER_TILE_SIZE = 512 + return ENCODER_TILE_SIZE + + +def get_recommend_decoder_tile_size(): + if torch.cuda.is_available(): + total_memory = torch.cuda.get_device_properties( + device).total_memory // 2**20 + if total_memory > 30*1000: + DECODER_TILE_SIZE = 256 + elif total_memory > 16*1000: + DECODER_TILE_SIZE = 192 + elif total_memory > 12*1000: + DECODER_TILE_SIZE = 128 + elif total_memory > 8*1000: + DECODER_TILE_SIZE = 96 + else: + DECODER_TILE_SIZE = 64 + else: + DECODER_TILE_SIZE = 64 + return DECODER_TILE_SIZE + + +if 'global const': + DEFAULT_ENABLED = False + DEFAULT_MOVE_TO_GPU = False + DEFAULT_FAST_ENCODER = True + DEFAULT_FAST_DECODER = True + DEFAULT_COLOR_FIX = 0 + DEFAULT_ENCODER_TILE_SIZE = get_recommend_encoder_tile_size() + DEFAULT_DECODER_TILE_SIZE = get_recommend_decoder_tile_size() + + +# inplace version of silu +def inplace_nonlinearity(x): + # Test: fix for Nans + return F.silu(x, inplace=True) + +# extracted from ldm.modules.diffusionmodules.model + +# from diffusers lib +def attn_forward_new(self, h_): + batch_size, channel, height, width = h_.shape + hidden_states = h_.view(batch_size, channel, height * width).transpose(1, 2) + + attention_mask = None + encoder_hidden_states = None + batch_size, sequence_length, _ = hidden_states.shape + attention_mask = self.prepare_attention_mask(attention_mask, sequence_length, batch_size) + + query = self.to_q(hidden_states) + + if encoder_hidden_states is None: + encoder_hidden_states = hidden_states + elif self.norm_cross: + encoder_hidden_states = self.norm_encoder_hidden_states(encoder_hidden_states) + + key = self.to_k(encoder_hidden_states) + value = self.to_v(encoder_hidden_states) + + query = self.head_to_batch_dim(query) + key = self.head_to_batch_dim(key) + value = self.head_to_batch_dim(value) + + attention_probs = self.get_attention_scores(query, key, attention_mask) + hidden_states = torch.bmm(attention_probs, value) + hidden_states = self.batch_to_head_dim(hidden_states) + + # linear proj + hidden_states = self.to_out[0](hidden_states) + # dropout + hidden_states = self.to_out[1](hidden_states) + + hidden_states = hidden_states.transpose(-1, -2).reshape(batch_size, channel, height, width) + + return hidden_states + +def attn_forward_new_pt2_0(self, hidden_states,): + scale = 1 + attention_mask = None + encoder_hidden_states = None + + input_ndim = hidden_states.ndim + + if input_ndim == 4: + batch_size, channel, height, width = hidden_states.shape + hidden_states = hidden_states.view(batch_size, channel, height * width).transpose(1, 2) + + batch_size, sequence_length, _ = ( + hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape + ) + + if attention_mask is not None: + attention_mask = self.prepare_attention_mask(attention_mask, sequence_length, batch_size) + # scaled_dot_product_attention expects attention_mask shape to be + # (batch, heads, source_length, target_length) + attention_mask = attention_mask.view(batch_size, self.heads, -1, attention_mask.shape[-1]) + + if self.group_norm is not None: + hidden_states = self.group_norm(hidden_states.transpose(1, 2)).transpose(1, 2) + + query = self.to_q(hidden_states, scale=scale) + + if encoder_hidden_states is None: + encoder_hidden_states = hidden_states + elif self.norm_cross: + encoder_hidden_states = self.norm_encoder_hidden_states(encoder_hidden_states) + + key = self.to_k(encoder_hidden_states, scale=scale) + value = self.to_v(encoder_hidden_states, scale=scale) + + inner_dim = key.shape[-1] + head_dim = inner_dim // self.heads + + query = query.view(batch_size, -1, self.heads, head_dim).transpose(1, 2) + + key = key.view(batch_size, -1, self.heads, head_dim).transpose(1, 2) + value = value.view(batch_size, -1, self.heads, head_dim).transpose(1, 2) + + # the output of sdp = (batch, num_heads, seq_len, head_dim) + # TODO: add support for attn.scale when we move to Torch 2.1 + hidden_states = F.scaled_dot_product_attention( + query, key, value, attn_mask=attention_mask, dropout_p=0.0, is_causal=False + ) + + hidden_states = hidden_states.transpose(1, 2).reshape(batch_size, -1, self.heads * head_dim) + hidden_states = hidden_states.to(query.dtype) + + # linear proj + hidden_states = self.to_out[0](hidden_states, scale=scale) + # dropout + hidden_states = self.to_out[1](hidden_states) + + if input_ndim == 4: + hidden_states = hidden_states.transpose(-1, -2).reshape(batch_size, channel, height, width) + + return hidden_states + +def attn_forward_new_xformers(self, hidden_states): + scale = 1 + attention_op = None + attention_mask = None + encoder_hidden_states = None + + input_ndim = hidden_states.ndim + + if input_ndim == 4: + batch_size, channel, height, width = hidden_states.shape + hidden_states = hidden_states.view(batch_size, channel, height * width).transpose(1, 2) + + batch_size, key_tokens, _ = ( + hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape + ) + + attention_mask = self.prepare_attention_mask(attention_mask, key_tokens, batch_size) + if attention_mask is not None: + # expand our mask's singleton query_tokens dimension: + # [batch*heads, 1, key_tokens] -> + # [batch*heads, query_tokens, key_tokens] + # so that it can be added as a bias onto the attention scores that xformers computes: + # [batch*heads, query_tokens, key_tokens] + # we do this explicitly because xformers doesn't broadcast the singleton dimension for us. + _, query_tokens, _ = hidden_states.shape + attention_mask = attention_mask.expand(-1, query_tokens, -1) + + if self.group_norm is not None: + hidden_states = self.group_norm(hidden_states.transpose(1, 2)).transpose(1, 2) + + query = self.to_q(hidden_states, scale=scale) + + if encoder_hidden_states is None: + encoder_hidden_states = hidden_states + elif self.norm_cross: + encoder_hidden_states = self.norm_encoder_hidden_states(encoder_hidden_states) + + key = self.to_k(encoder_hidden_states, scale=scale) + value = self.to_v(encoder_hidden_states, scale=scale) + + query = self.head_to_batch_dim(query).contiguous() + key = self.head_to_batch_dim(key).contiguous() + value = self.head_to_batch_dim(value).contiguous() + + hidden_states = xformers.ops.memory_efficient_attention( + query, key, value, attn_bias=attention_mask, op=attention_op#, scale=scale + ) + hidden_states = hidden_states.to(query.dtype) + hidden_states = self.batch_to_head_dim(hidden_states) + + # linear proj + hidden_states = self.to_out[0](hidden_states, scale=scale) + # dropout + hidden_states = self.to_out[1](hidden_states) + + if input_ndim == 4: + hidden_states = hidden_states.transpose(-1, -2).reshape(batch_size, channel, height, width) + + return hidden_states + +def attn_forward(self, h_): + q = self.q(h_) + k = self.k(h_) + v = self.v(h_) + + # compute attention + b, c, h, w = q.shape + q = q.reshape(b, c, h*w) + q = q.permute(0, 2, 1) # b,hw,c + k = k.reshape(b, c, h*w) # b,c,hw + w_ = torch.bmm(q, k) # b,hw,hw w[b,i,j]=sum_c q[b,i,c]k[b,c,j] + w_ = w_ * (int(c)**(-0.5)) + w_ = torch.nn.functional.softmax(w_, dim=2) + + # attend to values + v = v.reshape(b, c, h*w) + w_ = w_.permute(0, 2, 1) # b,hw,hw (first hw of k, second of q) + # b, c,hw (hw of q) h_[b,c,j] = sum_i v[b,c,i] w_[b,i,j] + h_ = torch.bmm(v, w_) + h_ = h_.reshape(b, c, h, w) + + h_ = self.proj_out(h_) + + return h_ + + +def xformer_attn_forward(self, h_): + q = self.q(h_) + k = self.k(h_) + v = self.v(h_) + + # compute attention + B, C, H, W = q.shape + q, k, v = map(lambda x: rearrange(x, 'b c h w -> b (h w) c'), (q, k, v)) + + q, k, v = map( + lambda t: t.unsqueeze(3) + .reshape(B, t.shape[1], 1, C) + .permute(0, 2, 1, 3) + .reshape(B * 1, t.shape[1], C) + .contiguous(), + (q, k, v), + ) + out = xformers.ops.memory_efficient_attention( + q, k, v, attn_bias=None, op=self.attention_op) + + out = ( + out.unsqueeze(0) + .reshape(B, 1, out.shape[1], C) + .permute(0, 2, 1, 3) + .reshape(B, out.shape[1], C) + ) + out = rearrange(out, 'b (h w) c -> b c h w', b=B, h=H, w=W, c=C) + out = self.proj_out(out) + return out + + +def attn2task(task_queue, net): + if False: #isinstance(net, AttnBlock): + task_queue.append(('store_res', lambda x: x)) + task_queue.append(('pre_norm', net.norm)) + task_queue.append(('attn', lambda x, net=net: attn_forward(net, x))) + task_queue.append(['add_res', None]) + elif False: #isinstance(net, MemoryEfficientAttnBlock): + task_queue.append(('store_res', lambda x: x)) + task_queue.append(('pre_norm', net.norm)) + task_queue.append( + ('attn', lambda x, net=net: xformer_attn_forward(net, x))) + task_queue.append(['add_res', None]) + else: + task_queue.append(('store_res', lambda x: x)) + task_queue.append(('pre_norm', net.norm)) + if XFORMERS_IS_AVAILABLE: + # task_queue.append(('attn', lambda x, net=net: attn_forward_new_xformers(net, x))) + task_queue.append( + ('attn', lambda x, net=net: xformer_attn_forward(net, x))) + elif hasattr(F, "scaled_dot_product_attention"): + task_queue.append(('attn', lambda x, net=net: attn_forward(net, x))) + #task_queue.append(('attn', lambda x, net=net: attn_forward_new_pt2_0(net, x))) + else: + task_queue.append(('attn', lambda x, net=net: attn_forward_new(net, x))) + task_queue.append(['add_res', None]) + +def resblock2task(queue, block): + """ + Turn a ResNetBlock into a sequence of tasks and append to the task queue + + @param queue: the target task queue + @param block: ResNetBlock + + """ + if block.in_channels != block.out_channels: + if sd_flag: + if block.use_conv_shortcut: + queue.append(('store_res', block.conv_shortcut)) + else: + queue.append(('store_res', block.nin_shortcut)) + else: + if block.use_in_shortcut: + queue.append(('store_res', block.conv_shortcut)) + else: + queue.append(('store_res', block.nin_shortcut)) + + else: + queue.append(('store_res', lambda x: x)) + queue.append(('pre_norm', block.norm1)) + queue.append(('silu', inplace_nonlinearity)) + queue.append(('conv1', block.conv1)) + queue.append(('pre_norm', block.norm2)) + queue.append(('silu', inplace_nonlinearity)) + queue.append(('conv2', block.conv2)) + queue.append(['add_res', None]) + + +def build_sampling(task_queue, net, is_decoder): + """ + Build the sampling part of a task queue + @param task_queue: the target task queue + @param net: the network + @param is_decoder: currently building decoder or encoder + """ + if is_decoder: + if sd_flag: + resblock2task(task_queue, net.mid.block_1) + attn2task(task_queue, net.mid.attn_1) + #print(task_queue) + resblock2task(task_queue, net.mid.block_2) + resolution_iter = reversed(range(net.num_resolutions)) + block_ids = net.num_res_blocks + 1 + condition = 0 + module = net.up + func_name = 'upsample' + else: + resblock2task(task_queue, net.mid_block.resnets[0]) + attn2task(task_queue, net.mid_block.attentions[0]) + resblock2task(task_queue, net.mid_block.resnets[1]) + resolution_iter = (range(len(net.up_blocks))) # net.num_resolutions = 3 + block_ids = 2 + 1 + condition = len(net.up_blocks) - 1 + module = net.up_blocks + func_name = 'upsamplers' + else: + if sd_flag: + resolution_iter = range(net.num_resolutions) + block_ids = net.num_res_blocks + condition = net.num_resolutions - 1 + module = net.down + func_name = 'downsample' + else: + resolution_iter = range(len(net.down_blocks)) + block_ids = 2 + condition = len(net.down_blocks) - 1 + module = net.down_blocks + func_name = 'downsamplers' + + for i_level in resolution_iter: + for i_block in range(block_ids): + if sd_flag: + resblock2task(task_queue, module[i_level].block[i_block]) + else: + resblock2task(task_queue, module[i_level].resnets[i_block]) + if i_level != condition: + if sd_flag: + task_queue.append((func_name, getattr(module[i_level], func_name))) + else: + if is_decoder: + task_queue.append((func_name, module[i_level].upsamplers[0])) + else: + task_queue.append((func_name, module[i_level].downsamplers[0])) + + if not is_decoder: + if sd_flag: + resblock2task(task_queue, net.mid.block_1) + attn2task(task_queue, net.mid.attn_1) + resblock2task(task_queue, net.mid.block_2) + else: + resblock2task(task_queue, net.mid_block.resnets[0]) + attn2task(task_queue, net.mid_block.attentions[0]) + resblock2task(task_queue, net.mid_block.resnets[1]) + + +def build_task_queue(net, is_decoder): + """ + Build a single task queue for the encoder or decoder + @param net: the VAE decoder or encoder network + @param is_decoder: currently building decoder or encoder + @return: the task queue + """ + task_queue = [] + task_queue.append(('conv_in', net.conv_in)) + + # construct the sampling part of the task queue + # because encoder and decoder share the same architecture, we extract the sampling part + build_sampling(task_queue, net, is_decoder) + if is_decoder and not sd_flag: + net.give_pre_end = False + net.tanh_out = False + + if not is_decoder or not net.give_pre_end: + if sd_flag: + task_queue.append(('pre_norm', net.norm_out)) + else: + task_queue.append(('pre_norm', net.conv_norm_out)) + task_queue.append(('silu', inplace_nonlinearity)) + task_queue.append(('conv_out', net.conv_out)) + if is_decoder and net.tanh_out: + task_queue.append(('tanh', torch.tanh)) + + return task_queue + + +def clone_task_queue(task_queue): + """ + Clone a task queue + @param task_queue: the task queue to be cloned + @return: the cloned task queue + """ + return [[item for item in task] for task in task_queue] + + +def get_var_mean(input, num_groups, eps=1e-6): + """ + Get mean and var for group norm + """ + b, c = input.size(0), input.size(1) + channel_in_group = int(c/num_groups) + input_reshaped = input.contiguous().view( + 1, int(b * num_groups), channel_in_group, *input.size()[2:]) + var, mean = torch.var_mean( + input_reshaped, dim=[0, 2, 3, 4], unbiased=False) + return var, mean + + +def custom_group_norm(input, num_groups, mean, var, weight=None, bias=None, eps=1e-6): + """ + Custom group norm with fixed mean and var + + @param input: input tensor + @param num_groups: number of groups. by default, num_groups = 32 + @param mean: mean, must be pre-calculated by get_var_mean + @param var: var, must be pre-calculated by get_var_mean + @param weight: weight, should be fetched from the original group norm + @param bias: bias, should be fetched from the original group norm + @param eps: epsilon, by default, eps = 1e-6 to match the original group norm + + @return: normalized tensor + """ + b, c = input.size(0), input.size(1) + channel_in_group = int(c/num_groups) + input_reshaped = input.contiguous().view( + 1, int(b * num_groups), channel_in_group, *input.size()[2:]) + + out = F.batch_norm(input_reshaped, mean, var, weight=None, bias=None, + training=False, momentum=0, eps=eps) + + out = out.view(b, c, *input.size()[2:]) + + # post affine transform + if weight is not None: + out *= weight.view(1, -1, 1, 1) + if bias is not None: + out += bias.view(1, -1, 1, 1) + return out + + +def crop_valid_region(x, input_bbox, target_bbox, is_decoder): + """ + Crop the valid region from the tile + @param x: input tile + @param input_bbox: original input bounding box + @param target_bbox: output bounding box + @param scale: scale factor + @return: cropped tile + """ + padded_bbox = [i * 8 if is_decoder else i//8 for i in input_bbox] + margin = [target_bbox[i] - padded_bbox[i] for i in range(4)] + return x[:, :, margin[2]:x.size(2)+margin[3], margin[0]:x.size(3)+margin[1]] + +# ↓↓↓ https://github.com/Kahsolt/stable-diffusion-webui-vae-tile-infer ↓↓↓ + + +def perfcount(fn): + def wrapper(*args, **kwargs): + ts = time() + + if torch.cuda.is_available(): + torch.cuda.reset_peak_memory_stats(device) + comfy.model_management.soft_empty_cache() + gc.collect() + + ret = fn(*args, **kwargs) + + comfy.model_management.soft_empty_cache() + gc.collect() + if torch.cuda.is_available(): + vram = torch.cuda.max_memory_allocated(device) / 2**20 + torch.cuda.reset_peak_memory_stats(device) + print( + f'[Tiled VAE]: Done in {time() - ts:.3f}s, max VRAM alloc {vram:.3f} MB') + else: + print(f'[Tiled VAE]: Done in {time() - ts:.3f}s') + + return ret + return wrapper + +# copy end :) + + +class GroupNormParam: + def __init__(self): + self.var_list = [] + self.mean_list = [] + self.pixel_list = [] + self.weight = None + self.bias = None + + def add_tile(self, tile, layer): + var, mean = get_var_mean(tile, 32) + # For giant images, the variance can be larger than max float16 + # In this case we create a copy to float32 + if var.dtype == torch.float16 and var.isinf().any(): + fp32_tile = tile.float() + var, mean = get_var_mean(fp32_tile, 32) + # ============= DEBUG: test for infinite ============= + # if torch.isinf(var).any(): + # print('var: ', var) + # ==================================================== + self.var_list.append(var) + self.mean_list.append(mean) + self.pixel_list.append( + tile.shape[2]*tile.shape[3]) + if hasattr(layer, 'weight'): + self.weight = layer.weight + self.bias = layer.bias + else: + self.weight = None + self.bias = None + + def summary(self): + """ + summarize the mean and var and return a function + that apply group norm on each tile + """ + if len(self.var_list) == 0: + return None + var = torch.vstack(self.var_list) + mean = torch.vstack(self.mean_list) + max_value = max(self.pixel_list) + pixels = torch.tensor( + self.pixel_list, dtype=torch.float32, device=device) / max_value + sum_pixels = torch.sum(pixels) + pixels = pixels.unsqueeze( + 1) / sum_pixels + var = torch.sum( + var * pixels, dim=0) + mean = torch.sum( + mean * pixels, dim=0) + return lambda x: custom_group_norm(x, 32, mean, var, self.weight, self.bias) + + @staticmethod + def from_tile(tile, norm): + """ + create a function from a single tile without summary + """ + var, mean = get_var_mean(tile, 32) + if var.dtype == torch.float16 and var.isinf().any(): + fp32_tile = tile.float() + var, mean = get_var_mean(fp32_tile, 32) + # if it is a macbook, we need to convert back to float16 + if var.device.type == 'mps': + # clamp to avoid overflow + var = torch.clamp(var, 0, 60000) + var = var.half() + mean = mean.half() + if hasattr(norm, 'weight'): + weight = norm.weight + bias = norm.bias + else: + weight = None + bias = None + + def group_norm_func(x, mean=mean, var=var, weight=weight, bias=bias): + return custom_group_norm(x, 32, mean, var, weight, bias, 1e-6) + return group_norm_func + + +class VAEHook: + def __init__(self, net, tile_size, is_decoder, fast_decoder, fast_encoder, color_fix, to_gpu=False): + self.net = net # encoder | decoder + self.tile_size = tile_size + self.is_decoder = is_decoder + self.fast_mode = (fast_encoder and not is_decoder) or ( + fast_decoder and is_decoder) + self.color_fix = color_fix and not is_decoder + self.to_gpu = to_gpu + self.pad = 11 if is_decoder else 32 + + def __call__(self, x): + B, C, H, W = x.shape + original_device = next(self.net.parameters()).device + try: + if self.to_gpu: + self.net.to(device) + if max(H, W) <= self.pad * 2 + self.tile_size: + print("[Tiled VAE]: the input size is tiny and unnecessary to tile.") + return self.net.original_forward(x) + else: + return self.vae_tile_forward(x) + finally: + self.net.to(original_device) + + def get_best_tile_size(self, lowerbound, upperbound): + """ + Get the best tile size for GPU memory + """ + divider = 32 + while divider >= 2: + remainer = lowerbound % divider + if remainer == 0: + return lowerbound + candidate = lowerbound - remainer + divider + if candidate <= upperbound: + return candidate + divider //= 2 + return lowerbound + + def split_tiles(self, h, w): + """ + Tool function to split the image into tiles + @param h: height of the image + @param w: width of the image + @return: tile_input_bboxes, tile_output_bboxes + """ + tile_input_bboxes, tile_output_bboxes = [], [] + tile_size = self.tile_size + pad = self.pad + num_height_tiles = math.ceil((h - 2 * pad) / tile_size) + num_width_tiles = math.ceil((w - 2 * pad) / tile_size) + # If any of the numbers are 0, we let it be 1 + # This is to deal with long and thin images + num_height_tiles = max(num_height_tiles, 1) + num_width_tiles = max(num_width_tiles, 1) + + # Suggestions from https://github.com/Kahsolt: auto shrink the tile size + real_tile_height = math.ceil((h - 2 * pad) / num_height_tiles) + real_tile_width = math.ceil((w - 2 * pad) / num_width_tiles) + real_tile_height = self.get_best_tile_size(real_tile_height, tile_size) + real_tile_width = self.get_best_tile_size(real_tile_width, tile_size) + + print(f'[Tiled VAE]: split to {num_height_tiles}x{num_width_tiles} = {num_height_tiles*num_width_tiles} tiles. ' + + f'Optimal tile size {real_tile_width}x{real_tile_height}, original tile size {tile_size}x{tile_size}') + + for i in range(num_height_tiles): + for j in range(num_width_tiles): + # bbox: [x1, x2, y1, y2] + # the padding is is unnessary for image borders. So we directly start from (32, 32) + input_bbox = [ + pad + j * real_tile_width, + min(pad + (j + 1) * real_tile_width, w), + pad + i * real_tile_height, + min(pad + (i + 1) * real_tile_height, h), + ] + + # if the output bbox is close to the image boundary, we extend it to the image boundary + output_bbox = [ + input_bbox[0] if input_bbox[0] > pad else 0, + input_bbox[1] if input_bbox[1] < w - pad else w, + input_bbox[2] if input_bbox[2] > pad else 0, + input_bbox[3] if input_bbox[3] < h - pad else h, + ] + + # scale to get the final output bbox + output_bbox = [x * 8 if self.is_decoder else x // 8 for x in output_bbox] + tile_output_bboxes.append(output_bbox) + + # indistinguishable expand the input bbox by pad pixels + tile_input_bboxes.append([ + max(0, input_bbox[0] - pad), + min(w, input_bbox[1] + pad), + max(0, input_bbox[2] - pad), + min(h, input_bbox[3] + pad), + ]) + + return tile_input_bboxes, tile_output_bboxes + + @torch.no_grad() + def estimate_group_norm(self, z, task_queue, color_fix): + device = z.device + tile = z + last_id = len(task_queue) - 1 + while last_id >= 0 and task_queue[last_id][0] != 'pre_norm': + last_id -= 1 + if last_id <= 0 or task_queue[last_id][0] != 'pre_norm': + raise ValueError('No group norm found in the task queue') + # estimate until the last group norm + for i in range(last_id + 1): + task = task_queue[i] + if task[0] == 'pre_norm': + group_norm_func = GroupNormParam.from_tile(tile, task[1]) + task_queue[i] = ('apply_norm', group_norm_func) + if i == last_id: + return True + tile = group_norm_func(tile) + elif task[0] == 'store_res': + task_id = i + 1 + while task_id < last_id and task_queue[task_id][0] != 'add_res': + task_id += 1 + if task_id >= last_id: + continue + task_queue[task_id][1] = task[1](tile) + elif task[0] == 'add_res': + tile += task[1].to(device) + task[1] = None + elif color_fix and task[0] == 'downsample': + for j in range(i, last_id + 1): + if task_queue[j][0] == 'store_res': + task_queue[j] = ('store_res_cpu', task_queue[j][1]) + return True + else: + tile = task[1](tile) + try: + devices.test_for_nans(tile, "vae") + except: + print(f'Nan detected in fast mode estimation. Fast mode disabled.') + return False + + raise IndexError('Should not reach here') + + @perfcount + @torch.no_grad() + def vae_tile_forward(self, z): + """ + Decode a latent vector z into an image in a tiled manner. + @param z: latent vector + @return: image + """ + device = next(self.net.parameters()).device + dtype = z.dtype + net = self.net + tile_size = self.tile_size + is_decoder = self.is_decoder + + z = z.detach() # detach the input to avoid backprop + + N, height, width = z.shape[0], z.shape[2], z.shape[3] + net.last_z_shape = z.shape + + # Split the input into tiles and build a task queue for each tile + print(f'[Tiled VAE]: input_size: {z.shape}, tile_size: {tile_size}, padding: {self.pad}') + + in_bboxes, out_bboxes = self.split_tiles(height, width) + + # Prepare tiles by split the input latents + tiles = [] + for input_bbox in in_bboxes: + tile = z[:, :, input_bbox[2]:input_bbox[3], input_bbox[0]:input_bbox[1]].cpu() + tiles.append(tile) + + num_tiles = len(tiles) + num_completed = 0 + + # Build task queues + single_task_queue = build_task_queue(net, is_decoder) + #print(single_task_queue) + if self.fast_mode: + # Fast mode: downsample the input image to the tile size, + # then estimate the group norm parameters on the downsampled image + scale_factor = tile_size / max(height, width) + z = z.to(device) + downsampled_z = F.interpolate(z, scale_factor=scale_factor, mode='nearest-exact') + # use nearest-exact to keep statictics as close as possible + print(f'[Tiled VAE]: Fast mode enabled, estimating group norm parameters on {downsampled_z.shape[3]} x {downsampled_z.shape[2]} image') + + # ======= Special thanks to @Kahsolt for distribution shift issue ======= # + # The downsampling will heavily distort its mean and std, so we need to recover it. + std_old, mean_old = torch.std_mean(z, dim=[0, 2, 3], keepdim=True) + std_new, mean_new = torch.std_mean(downsampled_z, dim=[0, 2, 3], keepdim=True) + downsampled_z = (downsampled_z - mean_new) / std_new * std_old + mean_old + del std_old, mean_old, std_new, mean_new + # occasionally the std_new is too small or too large, which exceeds the range of float16 + # so we need to clamp it to max z's range. + downsampled_z = torch.clamp_(downsampled_z, min=z.min(), max=z.max()) + estimate_task_queue = clone_task_queue(single_task_queue) + if self.estimate_group_norm(downsampled_z, estimate_task_queue, color_fix=self.color_fix): + single_task_queue = estimate_task_queue + del downsampled_z + + task_queues = [clone_task_queue(single_task_queue) for _ in range(num_tiles)] + + # Dummy result + result = None + result_approx = None + #try: + # with devices.autocast(): + # result_approx = torch.cat([F.interpolate(cheap_approximation(x).unsqueeze(0), scale_factor=opt_f, mode='nearest-exact') for x in z], dim=0).cpu() + #except: pass + # Free memory of input latent tensor + del z + + # Task queue execution + pbar = tqdm(total=num_tiles * len(task_queues[0]), desc=f"[Tiled VAE]: Executing {'Decoder' if is_decoder else 'Encoder'} Task Queue: ") + pbar_comfy = comfy.utils.ProgressBar(num_tiles * len(task_queues[0])) + # execute the task back and forth when switch tiles so that we always + # keep one tile on the GPU to reduce unnecessary data transfer + forward = True + interrupted = False + #state.interrupted = interrupted + while True: + #if state.interrupted: interrupted = True ; break + + group_norm_param = GroupNormParam() + for i in range(num_tiles) if forward else reversed(range(num_tiles)): + #if state.interrupted: interrupted = True ; break + + tile = tiles[i].to(device) + input_bbox = in_bboxes[i] + task_queue = task_queues[i] + + interrupted = False + while len(task_queue) > 0: + #if state.interrupted: interrupted = True ; break + + # DEBUG: current task + # print('Running task: ', task_queue[0][0], ' on tile ', i, '/', num_tiles, ' with shape ', tile.shape) + task = task_queue.pop(0) + if task[0] == 'pre_norm': + group_norm_param.add_tile(tile, task[1]) + break + elif task[0] == 'store_res' or task[0] == 'store_res_cpu': + task_id = 0 + res = task[1](tile) + if not self.fast_mode or task[0] == 'store_res_cpu': + res = res.cpu() + while task_queue[task_id][0] != 'add_res': + task_id += 1 + task_queue[task_id][1] = res + elif task[0] == 'add_res': + tile += task[1].to(device) + task[1] = None + else: + tile = task[1](tile) + #print(tiles[i].shape, tile.shape, task) + pbar.update(1) + pbar_comfy.update(1) + + if interrupted: break + + # check for NaNs in the tile. + # If there are NaNs, we abort the process to save user's time + #devices.test_for_nans(tile, "vae") + + #print(tiles[i].shape, tile.shape, i, num_tiles) + if len(task_queue) == 0: + tiles[i] = None + num_completed += 1 + if result is None: # NOTE: dim C varies from different cases, can only be inited dynamically + result = torch.zeros((N, tile.shape[1], height * 8 if is_decoder else height // 8, width * 8 if is_decoder else width // 8), device=device, requires_grad=False) + result[:, :, out_bboxes[i][2]:out_bboxes[i][3], out_bboxes[i][0]:out_bboxes[i][1]] = crop_valid_region(tile, in_bboxes[i], out_bboxes[i], is_decoder) + del tile + elif i == num_tiles - 1 and forward: + forward = False + tiles[i] = tile + elif i == 0 and not forward: + forward = True + tiles[i] = tile + else: + tiles[i] = tile.cpu() + del tile + + if interrupted: break + if num_completed == num_tiles: break + + # insert the group norm task to the head of each task queue + group_norm_func = group_norm_param.summary() + if group_norm_func is not None: + for i in range(num_tiles): + task_queue = task_queues[i] + task_queue.insert(0, ('apply_norm', group_norm_func)) + + # Done! + pbar.close() + return result.to(dtype) if result is not None else result_approx.to(device) \ No newline at end of file diff --git a/ComfyUI-SUPIR/__init__.py b/ComfyUI-SUPIR/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..5f7a30d511d1eda13c15cc2acbc5afeb8cb8599e --- /dev/null +++ b/ComfyUI-SUPIR/__init__.py @@ -0,0 +1,28 @@ +from .nodes import SUPIR_Upscale +from .nodes_v2 import SUPIR_sample, SUPIR_model_loader, SUPIR_first_stage, SUPIR_encode, SUPIR_decode, SUPIR_conditioner, SUPIR_tiles, SUPIR_model_loader_v2, SUPIR_model_loader_v2_clip + +NODE_CLASS_MAPPINGS = { + "SUPIR_Upscale": SUPIR_Upscale, + "SUPIR_sample": SUPIR_sample, + "SUPIR_model_loader": SUPIR_model_loader, + "SUPIR_first_stage": SUPIR_first_stage, + "SUPIR_encode": SUPIR_encode, + "SUPIR_decode": SUPIR_decode, + "SUPIR_conditioner": SUPIR_conditioner, + "SUPIR_tiles": SUPIR_tiles, + "SUPIR_model_loader_v2": SUPIR_model_loader_v2, + "SUPIR_model_loader_v2_clip": SUPIR_model_loader_v2_clip +} +NODE_DISPLAY_NAME_MAPPINGS = { + "SUPIR_Upscale": "SUPIR Upscale (Legacy)", + "SUPIR_sample": "SUPIR Sampler", + "SUPIR_model_loader": "SUPIR Model Loader (Legacy)", + "SUPIR_first_stage": "SUPIR First Stage (Denoiser)", + "SUPIR_encode": "SUPIR Encode", + "SUPIR_decode": "SUPIR Decode", + "SUPIR_conditioner": "SUPIR Conditioner", + "SUPIR_tiles": "SUPIR Tiles Preview", + "SUPIR_model_loader_v2": "SUPIR Model Loader (v2)", + "SUPIR_model_loader_v2_clip": "SUPIR Model Loader (v2) (Clip)" +} +__all__ = ["NODE_CLASS_MAPPINGS", "NODE_DISPLAY_NAME_MAPPINGS"] \ No newline at end of file diff --git a/ComfyUI-SUPIR/configs/clip_vit_config.json b/ComfyUI-SUPIR/configs/clip_vit_config.json new file mode 100644 index 0000000000000000000000000000000000000000..4d3e873ab5086ad989f407abd50fdce66db8d657 --- /dev/null +++ b/ComfyUI-SUPIR/configs/clip_vit_config.json @@ -0,0 +1,25 @@ +{ + "_name_or_path": "openai/clip-vit-large-patch14", + "architectures": [ + "CLIPTextModel" + ], + "attention_dropout": 0.0, + "bos_token_id": 0, + "dropout": 0.0, + "eos_token_id": 2, + "hidden_act": "quick_gelu", + "hidden_size": 768, + "initializer_factor": 1.0, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "max_position_embeddings": 77, + "model_type": "clip_text_model", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "pad_token_id": 1, + "projection_dim": 768, + "torch_dtype": "float32", + "transformers_version": "4.22.0.dev0", + "vocab_size": 49408 +} diff --git a/ComfyUI-SUPIR/configs/tokenizer/config.json b/ComfyUI-SUPIR/configs/tokenizer/config.json new file mode 100644 index 0000000000000000000000000000000000000000..2c19f6666e0e163c7954df66cb901353fcad088e --- /dev/null +++ b/ComfyUI-SUPIR/configs/tokenizer/config.json @@ -0,0 +1,171 @@ +{ + "_name_or_path": "clip-vit-large-patch14/", + "architectures": [ + "CLIPModel" + ], + "initializer_factor": 1.0, + "logit_scale_init_value": 2.6592, + "model_type": "clip", + "projection_dim": 768, + "text_config": { + "_name_or_path": "", + "add_cross_attention": false, + "architectures": null, + "attention_dropout": 0.0, + "bad_words_ids": null, + "bos_token_id": 0, + "chunk_size_feed_forward": 0, + "cross_attention_hidden_size": null, + "decoder_start_token_id": null, + "diversity_penalty": 0.0, + "do_sample": false, + "dropout": 0.0, + "early_stopping": false, + "encoder_no_repeat_ngram_size": 0, + "eos_token_id": 2, + "finetuning_task": null, + "forced_bos_token_id": null, + "forced_eos_token_id": null, + "hidden_act": "quick_gelu", + "hidden_size": 768, + "id2label": { + "0": "LABEL_0", + "1": "LABEL_1" + }, + "initializer_factor": 1.0, + "initializer_range": 0.02, + "intermediate_size": 3072, + "is_decoder": false, + "is_encoder_decoder": false, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1 + }, + "layer_norm_eps": 1e-05, + "length_penalty": 1.0, + "max_length": 20, + "max_position_embeddings": 77, + "min_length": 0, + "model_type": "clip_text_model", + "no_repeat_ngram_size": 0, + "num_attention_heads": 12, + "num_beam_groups": 1, + "num_beams": 1, + "num_hidden_layers": 12, + "num_return_sequences": 1, + "output_attentions": false, + "output_hidden_states": false, + "output_scores": false, + "pad_token_id": 1, + "prefix": null, + "problem_type": null, + "projection_dim" : 768, + "pruned_heads": {}, + "remove_invalid_values": false, + "repetition_penalty": 1.0, + "return_dict": true, + "return_dict_in_generate": false, + "sep_token_id": null, + "task_specific_params": null, + "temperature": 1.0, + "tie_encoder_decoder": false, + "tie_word_embeddings": true, + "tokenizer_class": null, + "top_k": 50, + "top_p": 1.0, + "torch_dtype": null, + "torchscript": false, + "transformers_version": "4.16.0.dev0", + "use_bfloat16": false, + "vocab_size": 49408 + }, + "text_config_dict": { + "hidden_size": 768, + "intermediate_size": 3072, + "num_attention_heads": 12, + "num_hidden_layers": 12, + "projection_dim": 768 + }, + "torch_dtype": "float32", + "transformers_version": null, + "vision_config": { + "_name_or_path": "", + "add_cross_attention": false, + "architectures": null, + "attention_dropout": 0.0, + "bad_words_ids": null, + "bos_token_id": null, + "chunk_size_feed_forward": 0, + "cross_attention_hidden_size": null, + "decoder_start_token_id": null, + "diversity_penalty": 0.0, + "do_sample": false, + "dropout": 0.0, + "early_stopping": false, + "encoder_no_repeat_ngram_size": 0, + "eos_token_id": null, + "finetuning_task": null, + "forced_bos_token_id": null, + "forced_eos_token_id": null, + "hidden_act": "quick_gelu", + "hidden_size": 1024, + "id2label": { + "0": "LABEL_0", + "1": "LABEL_1" + }, + "image_size": 224, + "initializer_factor": 1.0, + "initializer_range": 0.02, + "intermediate_size": 4096, + "is_decoder": false, + "is_encoder_decoder": false, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1 + }, + "layer_norm_eps": 1e-05, + "length_penalty": 1.0, + "max_length": 20, + "min_length": 0, + "model_type": "clip_vision_model", + "no_repeat_ngram_size": 0, + "num_attention_heads": 16, + "num_beam_groups": 1, + "num_beams": 1, + "num_hidden_layers": 24, + "num_return_sequences": 1, + "output_attentions": false, + "output_hidden_states": false, + "output_scores": false, + "pad_token_id": null, + "patch_size": 14, + "prefix": null, + "problem_type": null, + "projection_dim" : 768, + "pruned_heads": {}, + "remove_invalid_values": false, + "repetition_penalty": 1.0, + "return_dict": true, + "return_dict_in_generate": false, + "sep_token_id": null, + "task_specific_params": null, + "temperature": 1.0, + "tie_encoder_decoder": false, + "tie_word_embeddings": true, + "tokenizer_class": null, + "top_k": 50, + "top_p": 1.0, + "torch_dtype": null, + "torchscript": false, + "transformers_version": "4.16.0.dev0", + "use_bfloat16": false + }, + "vision_config_dict": { + "hidden_size": 1024, + "intermediate_size": 4096, + "num_attention_heads": 16, + "num_hidden_layers": 24, + "patch_size": 14, + "projection_dim": 768 + } +} diff --git a/ComfyUI-SUPIR/configs/tokenizer/merges.txt b/ComfyUI-SUPIR/configs/tokenizer/merges.txt new file mode 100644 index 0000000000000000000000000000000000000000..76e821f1b6f0a9709293c3b6b51ed90980b3166b --- /dev/null +++ b/ComfyUI-SUPIR/configs/tokenizer/merges.txt @@ -0,0 +1,48895 @@ +#version: 0.2 +i n +t h +a n +r e +a r +e r +th e +in g +o u +o n +s t +o r +e n +o n +a l +a t +e r +i t +i n +t o +r o +i s +l e +i c +a t +an d +e d +o f +c h +o r +e s +i l +e l +s t +a c +o m +a m +l o +a n +a y +s h +r i +l i +t i +f or +n e +ð Ł +r a +h a +d e +o l +v e +s i +u r +a l +s e +' s +u n +d i +b e +l a +w h +o o +d ay +e n +m a +n o +l e +t o +ou r +i r +g h +w it +i t +y o +a s +s p +th is +t s +at i +yo u +wit h +a d +i s +a b +l y +w e +th e +t e +a s +a g +v i +p p +s u +h o +m y +. . +b u +c om +s e +er s +m e +m e +al l +c on +m o +k e +g e +ou t +en t +c o +f e +v er +a r +f ro +a u +p o +c e +gh t +ar e +s s +fro m +c h +t r +ou n +on e +b y +d o +t h +w or +er e +k e +p ro +f or +d s +b o +t a +w e +g o +h e +t er +in g +d e +b e +ati on +m or +a y +e x +il l +p e +k s +s c +l u +f u +q u +v er +ðŁ ĺ +j u +m u +at e +an d +v e +k ing +m ar +o p +h i +.. . +p re +a d +r u +th at +j o +o f +c e +ne w +a m +a p +g re +s s +d u +no w +y e +t ing +y our +it y +n i +c i +p ar +g u +f i +a f +p er +t er +u p +s o +g i +on s +g r +g e +b r +p l +' t +m i +in e +we e +b i +u s +sh o +ha ve +to day +a v +m an +en t +ac k +ur e +ou r +â Ģ +c u +l d +lo o +i m +ic e +s om +f in +re d +re n +oo d +w as +ti on +p i +i r +th er +t y +p h +ar d +e c +! ! +m on +mor e +w ill +t ra +c an +c ol +p u +t e +w n +m b +s o +it i +ju st +n ing +h ere +t u +p a +p r +bu t +wh at +al ly +f ir +m in +c a +an t +s a +t ed +e v +m ent +f a +ge t +am e +ab out +g ra +no t +ha pp +ay s +m an +h is +ti me +li ke +g h +ha s +th an +lo ve +ar t +st e +d ing +h e +c re +w s +w at +d er +it e +s er +ac e +ag e +en d +st r +a w +st or +r e +c ar +el l +al l +p s +f ri +p ho +p or +d o +a k +w i +f re +wh o +sh i +b oo +s on +el l +wh en +il l +ho w +gre at +w in +e l +b l +s si +al i +som e +ðŁ Ĵ +t on +d er +le s +p la +ï ¸ +e d +s ch +h u +on g +d on +k i +s h +an n +c or +. . +oun d +a z +in e +ar y +fu l +st u +ou ld +st i +g o +se e +ab le +ar s +l l +m is +b er +c k +w a +en ts +n o +si g +f e +fir st +e t +sp e +ac k +i f +ou s +' m +st er +a pp +an g +an ce +an s +g ood +b re +e ver +the y +t ic +com e +of f +b ack +as e +ing s +ol d +i ght +f o +h er +happ y +p ic +it s +v ing +u s +m at +h om +d y +e m +s k +y ing +the ir +le d +r y +u l +h ar +c k +t on +on al +h el +r ic +b ir +vi e +w ay +t ri +d a +p le +b ro +st o +oo l +ni ght +tr u +b a +re ad +re s +ye ar +f r +t or +al s +c oun +c la +t ure +v el +at ed +le c +en d +th ing +v o +ic i +be st +c an +wor k +la st +af ter +en ce +p ri +p e +e s +i l +âĢ ¦ +d re +y s +o ver +i es +ðŁ ij +com m +t w +in k +s un +c l +li fe +t t +a ch +l and +s y +t re +t al +p ol +s m +du c +s al +f t +' re +ch e +w ar +t ur +ati ons +ac h +m s +il e +p m +ou gh +at e +st ar +wee k +! !! +c lu +th ere +n er +t om +s el +ï¸ ı +wor ld +v es +c am +go t +in ter +of f +u m +ton ight +o ther +h ou +loo k +j e +i d +si on +be au +at t +el i +or t +re c +f f +st er +su pp +g en +be en +il y +te am +m m +i c +pe op +it t +at s +on ly +mb er +en g +b ri +m p +k now +b ur +b ar +in s +lo w +sh e +ro w +â Ŀ +t ro +peop le +vi a +lo w +ag a +be t +x t +f ac +ch ar +e ar +w al +s en +f am +b le +n ati +is h +n or +g ame +li ve +s co +le y +d on +ic k +b all +ver y +the se +p an +i a +at ing +c r +a re +g ir +ma ke +st re +sho w +. " +f l +u p +d r +than ks +il li +w om +st s +i g +s ur +ever y +c ur +vie w +le t +in to +mo st +n a +in di +g ar +ha d +s ou +v ed +an t +iti on +ma de +f ol +un i +it ed +ðŁ ı +ic al +th r +read y +ch ec +d ra +k es +boo k +e p +si c +mor ning +ne ws +c au +c t +w ell +an c +pho to +th an +or s +bir th +g g +ou t +ne xt +som e +en ing +stor y +ch ri +do wn +hom e +f fe +fre e +d a +b or +f il +ci al +than k +si de +le ar +qu e +l ine +t en +at es +ye ars +m y +pho to +beau ti +ri ght +n u +for m +shi p +b an +th er +d ays +g am +as on +g y +ðŁ İ +birth day +se t +ic k +e t +st ill +com ing +ta ke +ðŁ ĩ +b b +s ol +s on +d en +e p +mu sic +the m +de n +wh y +f oo +c ra +am az +w n +h ol +t ting +w r +u e +ma g +c ro +l an +c lo +b ra +a k +s ing +c al +re ad +' ve +jo h +b ab +d ri +b lo +bi g +er ic +in t +t or +tr y +l a +le g +hou se +m ic +v al +beauti ful +l itt +chec k +ne w +ver s +s w +ar i +pla y +h er +âĢ ĵ +w in +m a +con gr +sch ool +f un +. @ +he al +ic h +d el +wh ere +l on +ke t +tw o +mu ch +wat ch +v en +d ed +a st +k ed +b as +go ing +m p +e ver +w ays +ro o +de sig +l y +s ed +to p +l in +ch an +to o +it ing +d ent +gh ts +t y +sp o +ne ed +b lu +in st +be ing +âĿ ¤ +w el +l s +hi m +m ay +st ing +n a +el y +litt le +g a +n at +tom or +m c +h on +w ant +a ir +pi c +am eric +p er +le ss +wee k +ve l +a h +c ap +ch am +g er +ti m +tomor row +ne ss +st ate +h al +ser v +z e +o s +p at +v is +ex c +s in +f f +c ity +c en +an y +b el +su mm +t in +w ould +loo king +k o +ce le +fam ily +m er +po w +hel p +bu s +c o +c le +sel f +en s +ic s +th o +an i +ch o +le ad +b s +t wee +th ink +for e +ch il +vi de +di d +al e +ch i +v il +en ds +w ing +p as +' ll +v ol +s a +g s +man y +j ec +be fore +gra ph +n y +ur ing +w il +d d +bu il +f av +st ed +tr an +l ing +ou d +d ge +fi el +nati onal +st a +c er +w ere +in a +se ason +c ou +n ed +amaz ing +ti ons +cele br +n s +a th +he ad +s day +d ar +lo c +v in +an other +g oo +s at +n y +jo in +pre s +s es +s ing +an a +in ing +.. .. +c our +ï¸ ı +ac t +cau se +li ght +am s +t a +b al +f c +hi gh +off ici +t t +chri st +d ic +d ay +ra l +h or +: ) +vi si +n am +o b +ma s +gh t +re ally +t un +fin d +thr ough +por t +u t +ti ve +st y +n e +or e +ðŁĺ Ĥ +supp ort +ne ver +ev en +ðŁ Ķ +h a +y a +l d +u k +r an +j am +wi th +me di +d es +ne y +ch ing +al e +h y +k in +! ! +d y +pl ace +al so +b le +wh ich +bl ack +b li +s ay +par k +pl ay +ir e +vide o +week end +a il +ke y +p t +w ard +fri day +d in +ine ss +g ro +b en +al ways +t ball +ag o +m il +c y +pro duc +di sc +un der +ple ase +sp or +fu ll +e y +ðŁ Ļ +is e +iti es +c at +k no +u se +fo re +k er +ar t +hi gh +op en +s an +e f +our s +sh ed +st ri +d ro +aga in +i m +ðŁ ĵ +en jo +fu n +ge tting +p en +g er +c li +an y +ever y +e u +wom en +â ľ +e st +c ould +r y +" @ +th ou +sh a +comm un +b er +d ents +di s +wh ile +aw ay +di o +h am +g la +d ate +k a +mis s +un ch +w on +in f +roo m +g a +re al +ex per +di rec +sh ould +sp r +g ol +l ong +bet ter +or i +e y +i ence +il s +z z +h an +f ound +v s +â Ļ +po st +ti c +par t +m en +ren ce +ce ss +v ic +s il +sho p +ðŁĺ Ĥ +f ood +v al +sti c +y ou +s ays +e lec +st ar +o c +l and +i d +c tion +fiel d +s of +st art +wat er +fri ends +on es +ðŁ Į +f la +f ar +wh ite +par ty +in st +gr ou +t v +every one +m ent +j a +ch a +pr in +an ts +d uring +l at +l ar +we st +th en +k a +y oun +in sp +in te +we en +visi t +aga inst +re le +he ad +c es +to wn +loo ks +th re +re gi +ren t +pro jec +gir l +se ar +w o +m om +c ar +h un +pu bli +d i +p le +c all +c ri +u m +for d +per fe +fri end +h ard +ssi on +te st +pla ying +ar ound +be cause +ke ts +me et +sat ur +ar ti +wor k +j un +v en +r un +me mber +por t +su per +t wit +s am +el s +t ly +ad v +ati ve +at h +s ure +av ail +la r +s qu +ar ds +ev ent +m en +l l +o ver +lo gy +it al +tim es +m al +b ack +c oo +ma king +st ru +â ģ +it u +sh ar +g an +c as +s n +summ er +pic ture +f an +h in +christ mas +c y +pr oud +cham pi +desig n +pp ing +ho pe +c a +avail able +ma y +we d +photo graph +spe cial +sal e +sto p +er y +a we +al ity +hi story +am a +pre si +b ru +wor king +d one +d r +k en +fe at +w ood +ate st +sun day +mo vi +vel y +s le +f ace +sp ec +stu dents +b y +ha m +sp on +bus iness +d at +i e +i p +so ci +g lo +h and +re cor +r s +me e +ke ep +p ur +heal th +sh e +com ple +go d +da vi +col lec +li st +r a +clu b +t ers +in clu +th ings +pl an +â ĺ +joh n +sh ing +at ul +so on +blu e +g or +satur day +w on +congr atul +se e +âĿ¤ ï¸ı +tho se +ðŁĺ į +fin al +d ou +it h +o wn +ro ad +t our +a st +indi a +ti l +n d +f er +fav or +su l +lear n +fir e +ju st +grou p +a h +r ac +bo dy +u r +c are +à ¸ +p lo +o h +po s +gi ve +te ch +su b +c ent +er ing +y m +il ity +f ic +lon don +v ir +gu ys +b a +ðŁ ¤ +bab y +sc re +ðŁĺ į +tru mp +un der +chan ge +i an +col le +ss es +l er +ss ed +n ice +ann oun +pow er +s ar +a king +min i +s li +s wee +k ar +fu l +c ru +ac tion +a ther +) . +st and +de vel +a a +g an +le ft +lo l +re l +tran s +m ents +in t +e f +man ag +di g +gen er +do wn +p au +ti v +k u +th ur +k en +st on +f ans +tal k +twee t +t oo +sty le +pro te +se con +fr on +awe some +g l +p al +ne t +s or +la u +g on +sin ce +t ty +ser ies +me mor +b eli +fil m +di d +di es +o t +congratul ations +p ra +e ve +w oo +offici al +su c +in cre +b on +par t +pp ed +cla ss +si ve +bo y +cu l +perfe ct +t ou +d am +wel come +foo tball +h i +p ap +wa it +ad a +congr ats +youn g +exc ited +re ce +j an +v a +re d +st ra +medi a +' d +do es +le t +mu l +ill s +gre en +m el +to ge +fu ture +ye ster +vers ity +for m +ta in +i de +ch es +ki ds +qu i +ha ha +de ta +bi g +favor ite +gir ls +con tin +do m +sear ch +u al +a ir +d ers +mon th +c er +yester day +commun ity +ad e +do g +vil le +ic es +d eli +sy ste +ru n +is m +he art +c up +en ti +fe w +presi dent +e ds +un til +fe sti +o k +f lo +sa id +ol e +me d +tra vel + £ +ph one +toge ther +fa st +lo t +gam es +sh ir +bet ween +y es +th ers +do ing +m ac +at or +b and +fol low +projec t +devel op +di ffe +con fe +spe ci +ca st +y s +bo ard +r d +i al +sh oo +r am +ha ving +sh are +fol low +on e +n ame +m r +pu t +disc u +or y +c ame +ou s +s ite +twit ter +t b +t it +fin ally +z ed +su per +com pan +us ing +all s +li st +r is +sho t +g al +t ar +de l +joh n +âĢ Ķ +some thing +ra m +inte re +wh e +b it +ðŁ į +stre et +oun d +a i +tic kets +movi e +re al +k y +ta king +o pp +c c +l am +m oun +in ve +bl ack +us ed +on line +y or +loc al +gu e +c ks +o w +ge st +bo ys +illi on +con t +re ci +in ed +eu ro +no w +se en +p h +te ach +de f +sou th +su ch +aw ard +mu st +is su +ca re +fe el +p lu +l atest +spor ts +we b +te x +e ment +s k +fi c +w an +te ch +o t +bo x +n er +fre e +t al +a sh +c ase +ho t +won der +mee ting +er a +ch all +ðŁ IJ +jo b +il i +c ool +j our +th s +m o +f el +di e +mic ha +e le +te am +serv ice +st and +ma kes +p ing +ear ly +com es +e k +ho li +v ers +ag ue +s au +thre e +mon day +fa shi +some one +th ro +se a +b ad +supp or +tur n +ur y +m ing +photograph y +n ic +mar k +pre tty +ss ing +wat ching +me mb +ar ri +coun ty +be ach +fr an +cen ter +pol ice +b at +publi c +t an +pre ss +s af +s y +ge ts +ro y +n ers +y our +bu y +st ers +sho w +as ed +chil dre +af ric +in es +sp ace +sc ri +h all +pa in +ar ing +hom e +m ur +heal th +ch ed +s and +rece i +gu y +e a +americ an +re si +childre n +- - +i ri +ing ton +coun try +ro ss +le n +ann a +boo ks +b c +e ce +d om +lo vely +k h +pe t +g y +g ri +st age +off ice +ro ck +m on +b ay +t able +su n +m ed +th in +l or +f low +( @ +uni versity +stor e +fron t +goo d +z a +vo te +nor th +he y +an im +or der +mi d +with out +a de +re member +mar ket +? ? +mu s +tra ining +e duc +bu t +co ver +st an +sc en +b la +bre ak +l ou +s ame +g old +a in +o s +bo th +l it +ver n +a i +al bu +p a +enjo y +be g +ell ing +thur sday +inf o +s an +americ a +ha ir +te l +mar ch +con cer +colle ge +confe rence +ap p +h our +ch ang +â ļ +s our +ol s +we ather +w ar +p hi +festi val +secon d +cu te +pr ac +en er +str y +le a +pol it +s av +se n +o w +m i +ne ar +ou ght +z e +co ffe +w illi +d an +se y +davi d +e se +f an +de ci +the at +no v +ati on +tr ac +sc i +re view +c el +e m +u n +ju ly +or ig +ti on +d ru +form er +st ay +af ter +in v +too k +dat a +b al +tu es +d an +ev ening +ðŁĺĤ ðŁĺĤ +d ol +u res +pro vi +t s +e st +sig n +j ac +u k +s ong +ye t +bo w +in du +j ap +h oo +po int +any one +z y +i st +h ur +it al +buil ding +wom an +ch ur +j er +per for +co ach +le ague +ce ss +ne t +i mag +nati on +br it +qu e +aw ards +ag es +wor ks +c ed +man ce +l ate +ig n +mon ey +tru e +i i +t ell +pl ac +p ac +as y +wor ld +be hin +im port +read ing +gra m +gi ving +me t +h it +for ward +st om +pres ent +jun e +so cial +no on +mar t +hal f +s we +go vern +k er +deta ils +li sh +_ _ +ac y +si a +ber t +f all +! !!! +) , +th i +d iti +sp ort +k ing +f it +st af +c at +mu se +cen tr +y er +con tro +b loo +wal k +ac tu +did n +li m +lear ning +re search +wed ne +au th +h ours +k y +f ar +h en +.. .. +it ch +ri l +str ong +sk y +que sti +jam es +r on +d g +f ur +c in +do es +app ro +mar ke +tu res +ful ly +ch at +behin d +te m +fin i +mis sion +b att +fe el +he av +every thing +b ar +w ish +pre mi +i ma +exper ience +e ach +re port +swee t +tic s +spr ing +re spon +syste m +vic tor +l in +sa w +al ready +gh ter +f le +ã ĥ +br ing +albu m +- - +ell s +st an +to m +inter national +w ent +an ni +mat ch +pp er +st one +sm all +ra in +fashi on +are a +v an +ag ram +k o +thou ght +wor th +v an +m er +coffe e +it es +g n +arti st +c on +ar ch +c ir +se cre +gr ound +is o +h and +co m +bri dge +h s +x i +l ink +pu l +sp l +r ace +f li +ri ver +g as +di sco +d al +play er +f it +photo s +it y +o k +j or +tr a +ap ril +ad s +a di +sol u +beau ty +do or +me ss +up date +ali a +sch o +en ed +mom ent +sco t +sc ience +i or +ti es +ac ross +ous ly +sh es +does n +p age +wat er +m illion +cla ssi +l ic +ca st +form ation +micha el +ell o +s mo +in ts +vi sion +op ening +ld n +au str +tues day +win ner +po ssi +r ound +shir t +di t +b o +u es +il led +al ong +tri p +star ting +im pro +k an +per son +no t +re co +ne eds +c le +li e +re st +r ing +win ter +si mp +mo m +be er +fac e +tor s +us a +collec tion +ge or +se ssion +tr ying +la s +la ke +j en +orig in +stu dent +se cur +v in +pic s +ex pe +com p +gon na +e qu +b ad +le y +a u +memb ers +bre ak +w all +gi c +din ner +bu l +insp ir +r i +min d +ic a +win ning +tal king +t ren +s is +t en +wonder ful +s now +he ar +th om +no thing +gu i +st in +blo g +fe st +b un +le e +war ds +ch ance +dre ss +re n +pau l +p es +tech no +ru ssi +c ard +e ast +mar i +w ine +t i +la w +str ic +k i +ap e +au gu +pro fe +as h +cour se +ma il +ren tly +d un +m un +lo ve +is land +dri ve +s l +end ed +ma in +lo st +nat ure +âĿ¤ ï¸ı +ch ic +re por +p in +pr o +st ation +ce p +ta kes +compan y +go es +on d +ma ch +ra dio +d ad +ro ck +j a +p ay +champi on +e e +in de +tt a +ati c +t ab +beli eve +ener gy +z i +t at +wor d +on ce +re sul +y l +and re +an o +inst agram +clo se +t am +cu stom +w a +con om +sho ws +li fe +k in +ro b +t age +n ation +al most +list en +sa ve +re li +ac e +mar y +tre e +for get +j ack +wa iting +direc tor +h ill +bor n +te mp +f l +st e +on a +sing le +wedne sday +un ited +in o +@ _ +ne l +celebr ate +en ding +de al +j i +can ada +hu ge +tr ack +âĢ ¢ +f y +fan ta +an g +yor k +rele ase +p un +ep iso +wor ds +t our +p ack +i gh +classi c +perfor mance +ke t +after noon +recor d +win s +pro ble +âĿ ¤ +f our +b ed +ban k +d ance +s la +cal led +mi ght +a p +pa st +ðŁ ļ +diffe rent +it e +gi ft +ssi ve +chur ch +c us +pro gram +ho tel +ic e +ma d +secur ity +en ge +d c +en ough +st a +e ty +de ad +g un +he ar +m ir +hu man +gre ss +oun ds +pi ece +bre aking +gar den +fi ght +vie ws +f ish +star ted +run ning +gre en +ser i +s m +as k +d or +de ath +e conom +er i +ir d +s er +l unch +âģ ¦ +bo x +nat u +ba se +b an +f al +glo bal +wil d +wo w +out side +mo ve +le ad +an al +muse um +on g +ha w +pow er +than k +b ac +char ac +cam pa +dig ital +r o +op er +de v +w ol +p ati +f a +m ale +pap er +ill ing +c s +â ĥ +educ ation +ta ken +e ffe +m ou +s ad +" . +bas ed +staf f +inclu ding +li ving +a c +ch ina +mo b +stor m +lu ck +ph il +o o +y n +tra vel +k el +ti al +pr ice +boo k +import ant +bi o +p ool +ny c +f ab +lo ad +? ! +chall enge +cr y +ser ve +we ar +bu s +ta in +nu mber +ro r +k at +i z +th ough +ho sp +m m +fa ir +ut es +ho t +po p +fi ed +cam p +develop ment +li br +c ali +em s +âģ¦ @ +b ol +is ed +stand ing +mo del +it a +g le +bro wn +ima ge +ve red +for ce +o il +par tic +sh u +da ily +la w +se c +cla ss +cam p +holi day +cl in +k ers +pres ent +gam e +incre di +er ship +inter view +b ill +du e +and y +ab o +in nov +ke y +ac ade +p il +mo der +st ars +br and +f er +wee ks +con si +pr e +sa fe +wr it +di um +la unch +marke ting +ann ual +as si +cour t +la dy +c ted +and a +in side +chil d +opp or +sm ith +centr e +gu e +âģ © +f ren +st y +for t +ent ly +is n +ke ep +to ber +on y +bo y +al d +col la +de mo +le vel +com pet +ad o +b our +fanta stic +m ate +s u +sou th +oppor tun +vers ary +lat er +bu d +face book +la un +ster n +p it +! " +ma j +gr am +tb t +fi re +happ y +a ks +wh ole +actu ally +ill er +ell a +lo ts +al ex +an ge +lan ds +ðŁĺ Ń +en ter +r ou +episo de +p ed +in ten +sh ire +wh o +pl an +h o +ca ke +we st +mag az +fre sh +c c +n ar +ch ris +wr iting +w er +n om +l o +mi dd +dre am +o l +ti onal +de b +> > +be come +s i +gr and +all ing +hi stor +ri de +i red +saf e +que en +ci l +in tro +vi l +d ani +.. . +ar tic +st at +sh ort +or ing +sel fi +mis si +do c +b it +g all +b om +i re +se lec +d ition +ðŁĶ ¥ +fri end +be at +gh ting +ðŁĺ Ĭ +pe ace +ex hi +ant a +ab ility +il lu +j on +qu ality +tri bu +m es +play ers +fa ir +cu t +c ab +suc cess +b i +su s +pro mo +sch e +an ge +ic o +comm it +cat ch +ill a +kin d +feel ing +qu o +s ay +anni versary +spo t +mo ther +an e +p end +your self +op s +app le +min utes +p o +gr and +ri es +ha ha +care er +ed ition +de c +ric k +am i +concer t +iti ve +ge ous +d ly +t te +adv ent +i g +li ghts +ak er +sk y +âĥ £ +r ay +fini shed +w ay +s d +ac coun +ðŁĴ ķ +ck y +ch el +lit er +pain ting +lo s +st un +techno logy +n as +ma r +b il +afric a +ki e +ey es +gol f +plu s +ni a +it ec +serv ices +wed ding +kno wn +te le +.. ... +star ts +pa ren +w ants +ati onal +mon ths +win do +fav our +er t +magaz ine +ex clu +re ve +b c +origin al +e ss +n al +an ti +st ro +t ice +stu dy +à ¤ +v ac +nation al +fi ve +ra in +ve ment +u te +ver se +em er +ar my +possi ble +gue ss +val ley +ther n +cro w +m r +col or +on to +pic k +cle ar +dar k +t ac +wan ted +it ting +can cer +govern ment +di e +ri se +z ing +col d +f oun +stu dio +str ation +bro ther +a head +sh el +mic ro +ic ally +d au +sig ned +vi ol +a x +as se +i o +w re +spl ay +ch ick +augu st +pl at +ti ps +sp i +hu man +e asy +lo gi +mi ke +gro w +ag re +w w +sh ad +mo tiv +wi de +tur ns +om g +v ar +de fin +su g +j im +ðŁĶ ¥ +t d +campa ign +nam ed +re tweet +co p +t v +le av +k is +dou ble +s mar +issu e +vil la +in formation +li es +sto ck +n t +di stric +sh or +mi x +er o +se p +me x +see ing +li ve +re min +co de +g ur +s c +wil d +l un +h ood +spo t +fa ther +fore ver +up d +tra f +f ly +ne ed +gra du +tra in +ma ke +s ab +be y +si ze +lead er +tal ks +e u +lo g +fo x +gor geous +le ss +le ts +sur pri +my self +no te +li ves +f ru +lo ved +se ver +de m +j i +so c +h old +do gs +n i +â ŀ +lea ve +air port +ben ef +ex pl +shi ps +comple te +ach i +gre at +vin tage +j ack +ro c +woo d +pri v +off er +ey e +ver sion +te a +co ach +off ic +w ell +g en +s at +h h +you th +o x +? " +m t +mi x +g g +d le +natu ral +buil d +break fast +thin king +theat re +mo on +ber g +go als +geor ge +en e +exc ell +il ing +tun e +y ed +g ate +m it +net work +jo e +h ello +f b +tu be +we aring +ath le +stru c +har d +gla ss +g ers +thro w +g es +b t +indu stry +manag ement +ali st +go al +stre am +y el +a vi +ici ous +o thers +s ki +chri sti +bir d +e sc +m in +tr o +l t +j an +im p +ri ghts +sh a +or gan +cent ral +ar a +ro ll +favour ite +che ster +el se +p ay +car s +m ine +ste p +prac tice +maj or +h ang +ðŁĺ ĺ +n on +v ari +eng ine +vol un +di a +i led +arch itec +p ink +d s +th y +wa sh +web site +ba g +contro l +el li +f ra +an sw +d ence +y u +r on +ol a +g in +dr in +li c +cou ple +sp ar +g on +cre ate +c t +celebr ating +de ep +e at +te e +vo ice +dro p +vis it +at ors +sta dium +f t +w is +ro l +gra de +fam il +po ints +re pre +w as +traf fic +jap an +or g +hon or +tex as +man u +âĻ ¥ +safe ty +re r +b ag +em plo +rele ased +re gu +ak a +n av +ro le +sen ior +spec t +cro ss +lin es +be st +p ack +s in +ti e +mis sing +sun set +li ber +is ing +j ay +sk i +champion ship +ac tiv +la dies +play ed +y y +pu bl +al o +pri de +s r +pa ki +lu x +sur vi +ck ed +e ts +cho col +austr alia +par is +mi les +h at +ment al +al a +me an +mob ile +en a +in si +f ound +chi ef +t ag +incredi ble +re turn +à © +goo gle +fren ch +cre w +hal lo +ali an +j az +ch er +sil ver +nor th +eng lish +base ball +c af +lim ited +follow ing +app reci +ear th +k ir +ve mber +w ed +p tion +g ed +oc tober +fl ori +c r +en cy +ga ve +lor d +stu ff +ber ry +po st +sm ile +bro ad +st ate +gg er +me ans +ic y +gu n +y o +ma ster +bur g +han ds +ni e +/ / +uni on +brit ish +big gest +distric t +am ing +h il +o ce +per son +pas s +en vir +scho ols +arri ved +anc es +insp ired +ex pla +be n +libr ary +bo tt +am p +ste ph +cont act +b ang +m s +cali for +t old +batt le +b b +chic ago +âľ ¨ +str ate +sh i +de ce +- ) +ad d +la b +j ones +leg end +cast le +ing er +st ance +be l +ur a +re fu +lead ers +po t +se x +h ic +artic le +ki d +fr ance +x x +ex e +gui de +volun te +pr int +al i +ce o +twee ts +w x +scen e +vol u +ant i +h an +as soci +shar ing +ro se +mini ster +sh er +in ste +cle an +demo cr +po ster +sk in +p sy +pro per +cra zy +i am +o re +in i +any thing +po d +mo ving +cl ick +ex plo +com b +cra ft +f i +bloo d +is ra +publ ic +d ent +ol ym +eng land +a si +ch er +fac t +envir on +har ry +g one +me dic +enjo ying +just ice +j r +indi an +wi fe +s ound +t es +dra wing +p al +ide a +cr it +ju li +il er +war m +cl ar +thou ghts +def en +coun cil +intro duc +di ed +jan u +an i +s end +li er +m l +intere sting +tra de +win d +b ay +s ac +anc y +sour ce +b es +org ani +ar ly +lar ge +ff ici +ta g +u t +de sp +o es +tit le +sy m +pic tures +op en +wom en +sho wing +ri a +le ast +lead ership +cur rent +elec tr +val ent +list ening +c key +gener al +de ser +du ce +; ) +c ent +ðŁĺį ðŁĺį +sco tt +po or +selfi e +ev ents +i on +wr ong +de v +h ill +sep te +cul ture +l ine +sor ry +s ent +si ster +ce pt +k ri +no vember +ar i +announ ce +z ation +br an +g ent +d u +l en +per s +f m +mart in +o p +e mb +om e +midd le +suc cess +pe ter +janu ary +f lu +rac ing +d av +bi ke +ðŁı » +pe t +shoo t +profe ssi +feat uring +septe mber +now playing +sta ur +z a +on ic +qu ick +bas ke +spe aking +mil it +z er +chick en +b ell +s ad +co ast +lo ving +y ers +d j +pan el +ver age +s wit +ic ks +b ou +califor nia +s am +paren ts +er o +k illed +ph ys +jo bs +mi gr +an th +e mo +hallo ween +and er +c m +compet ition +e ag +s ket +sp ir +may be +exclu sive +app e +jour ney +scre en +for d +i o +h ate +u g +sou l +her o +soci ety +sy n +gu it +n h +d j +as es +im pre +ti me +sal es +d d +f ts +summ it +stun ning +om s +tur ned +cle an +sof t +be at +re staur +de red +en ces +ma gic +di o +sh ine +gu est +health y +exhi b +stor ies +po pu +n is +el a +bel ow +fun ny +resul ts +s ne +cur rently +ar d +down load +f light +m al +f ine +p ad +ch u +ent ed +h at +ðŁij ı +ste ve +j o +mar k +r at +b all +p c +p on +b by +o li +ar ts +as ure +bow l +att ack +mi c +de ar +ran ge +en ter +chocol ate +br illi +ac cess +, " +? ?? +ch ap +con st +t n +mat ter +blu e +gall ery +em p +work shop +lead ing +y ours +baske tball +w anna +th u +_ _ +mar ri +sle ep +bi a +ch e +ma d +imp act +o wn +si r +chan nel +euro pe +e sp +k itch +hosp ital +w ra +roy al +f s +ne u +qu ar +ne y +ac ks +ch ase +pp y +st al +at ely +ti m +dece mber +r are +per form +cre am +we ight +ch oo +ni ght +ha ven +fr anc +kh an +buil t +hel ping +tru st +ty pe +gol den +ta x +s now +s wi +di sa +questi ons +ve y +li ght +c n +cl oud +thom as +ag ed +sh ou +te ams +gr an +re ason +a a +you tube +v p +pi zz +manag er +bur y +cre dit +tre at +ma x +i k +ma in +g ing +de ad +pro bab +ye ah +ã Ĥ +br and +so li +pl ant +ta yl +gir l +ðŁĺ Ń +nam ent +au to +mess age +ko re +n ur +ter r +ag u +ma p +sen ting +lo ves +gi ves +g ab +z en +ro bert +con fir +w ars +o m +sta in +cam era +and er +won der +a b +ca p +s old +su it +wal king +contin ue +effe c +dau ghter +d anc +cha in +mul ti +ki d +y an +champi on +v o +ta ins +ho st +min i +mis sed +re sc +ly n +fin ish +del icious +s as +tayl or +i b +pro mis +produc ts +moun tain +flori da +regi ster +tre at +rec ent +fe male +boo th +mat t +ve hic +s op +mo tor +suppor ting +phi c +ex tre +dr ink +lan e +th ird +p s +con stru +ce re +far m +ðŁİ ī +tu red +ðŁij ī +c ats +a j +gi e +shoo ting +as ked +paki stan +am e +m b +g il +leg al +squ are +in vol +dra w +oo oo +!! !! +opportun ity +p y +e i +b ts +teach er +charac ter +john son +br on +ly wood +ch ine +c ing +c ine +d ge +gam ing +russi a +ci a +quo te +ric h +go v +flow ers +sp iri +st in +grow th +ðŁı ¼ +comm er +j uni +mu m +r an +s na +a ren +c b +ac tor +col or +si t +pa ir +ch i +bo w +acade my +hel d +r ang +me tal +y l +ac tive +probab ly +t ch +need ed +spe e +cho ice +ital y +ry an +ðŁĩ º +flow er +v it +m n +found ation +b ak +si ons +ne igh +f loo +he ard +re mo +fre sh +ing ing +re f +to wn +cl ou +je sus +spiri t +cou ldn +z es +ðŁĴ Ļ +willi ams +pro ce +moder n +pro cess +sho es +cre ated +tri c +issu es +ann e +att en +de but +h r +n it +sti g +a po +e ps +z u +ã Ģ +si x +car ds +lan gu +fam ous +tour nament +se l +e bay +y n +st on +k ick +announ ced +k am +vo c +brilli ant +hou se +che ese +war ri +mus ic +ho ckey +ðŁĺĤ ðŁĺĤ +sk ills +au tom +smar t +med ical +mon y +e x +gu ar +gi ve +pers onal +ven tion +al li +pre ss +flo or +m c +victor y +hi m +simp le +th or +ðŁĩº ðŁĩ +ta il +lu cky +ale x +qu ite +bo t +ssi ons +chall eng +c ann +amaz on +h ell +b ought +) : +ed y +secre t +produc tion +inde pend +de fe +ad ded +p r +p ag +be d +gre atest +with in +j ay +ðŁ ¥ +ire land +re ly +s d +te xt +dri ving +pro gram +spe ed +col um +str on +à © +fore st +â ĸ +mach ine +co in +sc ar +oun t +bi e +¡ ï¸ı +por tra +comm on +wre st +recei ved +kno w +inve st +pl ans +ac cor +ad op +ter y +re ali +p p +k al +art work +me an +go d +inste ad +an ci +motiv ation +as ing +inspir ation +up coming +polit ical +euro pe +m ers +heav y +ðŁij į +fe bru +scot land +ou gh +b t +bo ss +sche du +spe ak +n ick +u red +in o +e k +ri sk +tor y +pres ents +b on +ru g +st ates +exhib ition +il o +m ill +br ought +: -) +tou ri +com e +offici ally +champi ons +do ors +re p +po se +ex tra +k ings +soc cer +squ ad +app lic +at a +some times +t ari +excell ent +ðŁĺ ĺ +stra ight +car ol +ri p +âĢ į +gra phic +m ol +elec tion +febru ary +as ons +l i +di r +m t +n ick +u su +m rs +com ics +inst itu +cor por +v i +ðŁĻ ı +tu ral +di se +ac ci +we are +am ong +sho pping +t ill +wh at +cha ir +sp an +chine se +innov ation +jo y +k it +cent ury +ob ama +ph ili +f c +re ach +c iti +ul ous +n on +d ang +happ ening +bur n +p el +or ange +d v +k ick +cla im +ing ham +ph y +no v +pod cast +wh i +ni ghts +ear lier +be ar +la h +exc iting +or a +gi ven +s lo +memor ies +contin ues +produc t +gh o +c d +kno ws +ðŁİ ī +publi shed +discu ss +y ard +i phone +tri es +w all +fe b +are n +tru th +win ners +tu re +diti onal +milit ary +proble m +m and +do g +lo ss +c ric +can adi +ve ter +villa ge +" , +y r +un g +don ald +ag ing +bir ds +sci enti +le s +th is +regi on +tic al +itt en +il a +ðŁĺ İ +d ad +di am +abo ve +st ren +li t +p ir +la b +fo cus +bus y +d ur +app ly +s ma +auth or +ac i +exe cu +dom in +re la +jack son +at o +wash ington +ðŁĻ Į +k ill +popu lar +ce ment +ro ad +e ating +loc ation +v ent +ar re +n an +cu sto +advent ure +or din +spor t +ul t +lo ck +questi on +dri ver +land sc +on i +k ins +p d +jor dan +te red +k k +a f +chil d +s p +just in +en i +s elling +z o +wh it +bo ston +partic ip +sig ning +happ ened +he at +m am +dre ams +lo ws +gra ph +the day +head ing +br o +ble ssed +vi c +ve gas +h d +in ning +ro man +and ro +den ti +u se +c it +pro gress +writ er +bo b +ff s +gro wing +b ly +aw are +ex am +sp ent +be t +sc ore +bey ond +do cu +ad el +s f +cou ra +colla bor +in c +priv ate +bo at +* * +z one +p ha +b ill +to tal +plan ning +to wards +plac es +pre view +cre ative +dam n +ide as +se ems +po ten +say ing +di splay +s w +a qu +lou is +by e +li l +e mail +we stern +ger many +ell er +re s +f ant +ment ary +de als +ric hard +jer sey +stren g +ra d +pizz a +mon d +w are +l ac +g i +ar chi +c d +yel low +rec ently +re ach +à ¹ +kitch en +desig ned +tr y +g al +restaur ant +at ure +w w +j as +l ma +ðŁij Į +pa in +av o +min ute +sch ol +ther ap +tic ket +d ry +jap an +diti ons +ter ri +sel ves +happ en +t up +ma g +cop y +sh er +free dom +f ile +speci ally +tor onto +lo ad +g ary +re y +answ er +lo y +cau ght +pri ze +u ne +fic ation +ni ger +sy d +tou ch +feat ure +jaz z +recor ds +him self +di sh +ro ber +spot ted +ma ster +wa ve +fin als +bu ll +for um +al d +re comm +ch a +a e +d oo +inst ru +tru ly +l g +in k +bro thers +de st +j im +m it +clo sed +is on +tri ed +s anta +af fe +w an +hor se +g row +camp us +rel ation +nati ve +jour n +go v +o ct +k it +b ound +part ner +re ma +crow d +! ) +c alls +ra il +qu ali +solu tion +con test +con vers +sn ap +b ase +in iti +ta x +y e +ent repre +it or +constru ction +foo d +present ed +n ings +cli mate +k m +mo del +b j +blo ck +present ation +dre am +fi x +c alling +bus ine +con gress +under stand +we b +val ue +ï¸ı âĥ£ +mex ico +it ely +ki m +char ity +ref lec +bl an +fl ying +anal y +famil ies +b and +reci pe +celebr ation +ac cep +ar y +to t +g b +intere sted +cap tain +âĻ ¥ +ti p +ab sol +bra z +inve stig +o logy +de c +tru ck +ver ing +c lear +don t +go tta +ad vis +beg ins +ma ss +de scri +blo ck +k im +davi d +son gs +memor ial +feat ures +su stain +' . +gra b +jo se +v a +con serv +se ts +man chester +fi ghting +de gre +ag a +in d +sle ep +pos ition +ha ir +sig ns +pol icy +it o +al ert +st am +sp end +w y +absol ut +d m +anim al +my ster +success ful +proble ms +ro bo +k ay +gar den +p d +may or +d ale +t ol +off ers +vis iting +friend ly +tre es +offic er +accoun t +ke vin +ðŁij į +gi ant +contin u +con su +tr act +n fl +ðŁĺ Ĭ +h q +b ility +a ar +dis ney +te en +on ed +wh ite +tra iler +de dic +al one +absolut ely +dig ital +willi am +in ation +s wa +e e +enti re +ger man +ro ll +h its +co st +st ay +th a +ali ve +accor ding +co t +liter ally +her it +re ti +haha ha +exper i +li kes +g t +ste el +__ __ +ch air +christi an +to wer +diffe rence +m d +tre ss +mi d +prin ce +afric an +fe der +foo t +car ri +ser ved +r ice +sh all +feat ured +ck er +rec ru +po e +sen se +ni fic +com edy +cont ent +f at +po sted +con tribu +tim ate +li ver +mb le +inter net +ag e +europe an +cl ing +gla d +ff ic +sc o +ak es +el le +ter min +ton y +p ale +col our +seri ous +pat ri +movi es +b m +professi onal +ad o +al u +br inging +f alls +isra el +ter m +langu age +bro ok +man n +commun ic +can not +ac ti +p he +y an +entrepre ne +tur key +log ical +lon g +ar m +ur s +work ers +ing ly +gg s +ri c +tu al +recei ve +op ens +ge ar +soci al +fe et +c king +ad ver +fin an +fe els +sp la +h r +ea ster +bra in +ã ģ +fi g +le dge +ne arly +prote ct +ma ssive +e th +aw a +ðŁĺ ģ +y rs +aware ness +defin itely +k n +imag ine +k u +syste ms +ðŁij ı +f as +li k +provi de +am o +disco ver +inf lu +ma ker +g az +fit ness +stre et +er s +te d +w c +ys is +pos itive +hel ped +que st +andre w +bra d +b in +hang ing +l ing +bri ght +se ction +ma ss +ðŁĻ Į +follow ers +ho sting +tem por +fla g +a ve +let ter +k ur +re qui +of ten +cry p +su ff +âļ ½ +russi an +treat ment +al le +ha y +l an +keep ing +hol y +power ful +pre dic +fun d +e specially +windo w +je wel +il y +ðŁĴ ľ +gener ation +app a +seri ously +o d +ðŁĺĤðŁĺĤ ðŁĺĤ +cer ti +iri sh +ðŁij Į +mi ami +be th +v ity +se cu +che f +cri me +graph y +ma x +arti sts +re volu +gu ard +spee ch +u c +upd ates +fac es +st ant +chang ed +repor ts +low er +pe ar +n c +k il +loo ked +spe aker +s f +re spect +ok ay +oce an +s itting +architec ture +tra il +se at +i ra +le g +japan ese +d am +u lar +sw im +polit ics +finan cial +ol d +mou th +at temp +de stin +fi shing +atten tion +me m +chang es +deci ded +reli gi +g in +c av +z z +ad am +ma c +wr ite +beg in +sc ul +al ter +is s +ath on +imag es +m oo +jo ined +ðŁĺ ī +âŀ ¡ï¸ı +pas sed +mu sli +h ir +lar gest +cam er +com ic +gh ted +rug by +bur gh +gg ing +te sting +pre par +lau gh +al ed +impro ve +beli ev +adv ice +sha res +he art +tur ning +s b +t el +caf e +n es +dani el +pat ter +t z +se tt +par k +c and +st ick +happ ens +bri an +ne west +e pic +ad or +ki es +war ning +anim als +custo m +ar c +di an +gol d +cor e +t f +c ity +pan ts +re ality +con fi +in ju +fo x +gu il +k new +âĺ º +cor rec +itu de +d den +. # +re duc +pas s +f on +y a +ow ner +re turns +n c +e ast +ap ol +in sur +th o +si m +juni or +be e +ang el +att le +elec tric +hor ror +cra sh +e ye +pat h +sou thern +emplo ye +ge o +t an +ha z +r ally +ðŁı » +proper ty +was n +enjo yed +gre y +g as +bre w +nor thern +hol ding +g p +ta ke +ch art +ly n +dr ama +z o +pa id +throw back +cu p +discu ssion +down town +w ill +le w +b is +t ary +bre ad +up on +r ate +teach ers +it ation +anc ed +cy cle +choo se +d c +ir an +co w +da ve +ra ise +prin cess +fa ith +- > +indu stri +sp ain +guit ar +fac ts +m n +sp en +cour te +go tt +projec ts +au di +o sc +pe ter +s and +intere st +happ iness +ven ue +sol di +surpri se +poten tial +per io +custom er +i i +g ni +manu fac +e co +bro ken +sing er +vel s +wal es +hu s +in j +f our +tal ent +d ying +mat the +fil m +jo ining +s ell +j ar +lma o +sur ger +bb c +sour ces +au stin +ni k +char les +f am +prin ci +ange l +cas h +lo t +o red +pla ys +pl ate +don e +memor y +br ings +n ba +solu tions +teach ing +gr ace +cir cu +hel ps +foun der +mar y +expl ore +de cor +par ts +ch o +inte gr +ha u +is es +pu tting +in er +r it +v y +mic hel +blu es +every day +for ms +bi o +ye ar +p in +t ter +spr ing +) ) +po t +al ing +perform ing +sh an +plan et +mus ical +head s +it alian +stru gg +âĢį âĻ +w ings +pu mp +h h +tr ou +a id +pri me +ear th +pa int +mon t +am y +bb c +fab ulous +fru it +andro id +bour ne +cere mony +enti al +? ? +deb ate +on ing +dra ft +sol ar +t x +j am +cor n +!! !!! +bro o +mil k +po sed +o hi +mo vement +b ren +part ner +p g +et te +ar ies +sh out +n g +leav ing +t ells +sen s +ta ste +kel ly +wor l +gy m +ric h +e gy +pi d +ma s +â Ĥ +courte sy +fran k +incre ase +wr itten +pp ers +re l +ha i +s as +s ound +tt i +w ich +ri ver +.. ." +a g +fel low +ro me +sm all +gen cy +ic an +lux ury +pro of +me t +wild life +mom ents +ra ther +cor ner +com pe +canadi an +lik ely +therap y +li am +econom ic +indi e +rou te +fi ght +ho pe +se tting +ant ly +cro ss +fant asy +de e +sket ch +comp li +ym i +ru les +engine ering +fig ure +ro w +. , +f w +syd ney +w ou +t ation +dre w +us es +the re +sp read +struc ture +pat rick +appa rently +ro s +h ills +w we +ann y +com mission +di v +f ying +con sul +anal ysis +ex i +ten nis +vehic le +ðŁĺŃ ðŁĺŃ +as s +high ly +op ened +b ann +ðŁĴ Ļ +mp h +wi shing +v or +fi f +give away +r r +ra y +je ss +g at +ic ymi +x it +high est +yor k +pi e +invol ved +high er +ri e +mal ay +int elli +desp ite +che e +sar ah +be an +reco gni +ar sen +tal ented +pas sion +ic h +ab c +lead s +dise ase +v is +se c +pre senting +m illi +hol e +sho ts +de part +surger y +gov t +b in +du al +e vi +lon ger +ev ol +scre en +portra it +et c +lo se +ch at +p en +p i +om a +s ick +er c +compan ies +en try +plan e +gr y +ven e +liver pool +premi ere +sha red +a red +fil ms +ir a +holi days +cric ket +ici an +v ing +. ) +ul timate +di vision +con duc +se pt +for ces +mon t +s mart +disa pp +sun shine +in d +b less +ma de +col ors +fran k +ir on +bott le +s go +m ood +j ason +er ic +bir th +te en +respon se +tar get +state ment +fe ar +th el +al um +ar ab +bl in +direc tion +ste ps +er ial +wor ked +at l +ðŁĴ ķ +fel t +pol i +scen es +hom es +b ell +e at +ate ful +t in +l ace +fol ks +p se +an n +wis dom +fa v +but ter +s r +are as +sm oo +bi z +dg es +app o +mo re +the m +effe ct +windo ws +sun ny +cap ital +tot ally +c ities +gr ant +mb ers +s low +au tu +il ities +w ro +ri sing +st ics +viol ence +i gh +qu ot +h it +t c +herit age +bu ff +ne s +z ar +den tial +ex ac +ed ge +de ep +aren a +be came +benef its +mar ks +mb er +a z +am es +pre ci +dra gon +re g +d ings +do s +ðŁĴ ª +n el +s ity +me al +di st +leg end +pur chase +pic al +st ick +f at +du ba +profe ss +car to +pro f +coun tries +respon si +se qu +fa b +tribu te +hon ored +prac tic +pur ple +an ton +pa red +t ough +summ er +environ ment +s ons +ðŁĻ ı +m ps +gi es +her oes +t elling +hen ry +f en +know ledge +Ģ ï¸ı +f r +ne g +u re +ac king +hear ts +s oo +hol lywood +ju mp +sau ce +schedu le +tur n +yo ga +cre ating +c ket +cre ek +â Ń +custom ers +ma dri +gu l +asse mb +moun t +c ell +to p +st al +dav is +t wi +sig n +premi er +iti ons +he aring +un k +pati ents +app ear +heav en +al ty +doc tor +a e +plat form +je ff +ðŁĵ · +regi onal +bi d +box ing +ex ten +or ity +a w +w ise +il le +sever al +bi e +s itu +sy ria +âľ ħ +remin der +enter tain +li on +part ners +in n +ph ar +f au +pl s +expe cted +sug ar +deci sion +s b +ch ron +associ ation +leav es +vis ited +sh ap +ðŁĴ ĸ +fur ther +h ann +w i +run s +l er +fun ding +fil led +.. .... +tin y +han g +or g +co ol +se min +ðŁı Ĩ +spon s +nav y +sa int +dru g +d al +r oun +co vered +tra ditional +invest ment +de te +al ism +f low +n is +sun rise +fe at +f ted +we ird +je re +ve gan +medic ine +an o +ac cu +deli very +temp le +chang ing +wil son +phili pp +re fe +n d +is er +g ay +r and +ati ves +t ely +p and +intelli g +g are +am bas +de mon +commit tee +strate gy +refu ge +bud get +prote c +pi er +ex press +nom in +econom y +al low +ic on +gal ax +o h +indi vi +dem and +vir gin +lu ke +ali sts +man i +s mi +ju dge +ent y +mic hi +resul t +am ed +spe aks +' , +hou ston +sh in +b ing +fl y +ch em +au to +v as +ge t +ar m +thank s +d in +gan g +x x +si on +loc ated +p l +jo sh +in fo +jo ins +adver ti +ot d +el d +si e +re asons +v ent +ðŁĩºðŁĩ ¸ +â ł +convers ation +stu di +ðŁĶ¥ ðŁĶ¥ +go s +s ounds +un it +mu sc +ge l +ack ed +pac i +co s +de re +u u +a o +la m +inspir ing +ar ms +tw are +mat ters +ad dic +du de +ex t +cri sis +b ath +me et +sing h +expe ct +del hi +resc ue +wor st +au g +shi pping +ser ving +st o +dar k +ac es +histor ic +landsc ape +desig ner +b illion +gr ateful +wa ke +e ve +m iller +hou sing +dy nam +is co +be ha +sh op +pr ou +e as +a sia +e ding +k on +depart ment +aw ar +mar ine +in ci +photograph er +ta pe +lo go +r ings +d it +-- -- +vin yl +w c +vo ting +se ven +ambas sad +dal las +t u +com ment +k ra +b les +w ag +u d +au dio +stri ke +offici al +o ts +me tho +to ols +ra di +al an +hun t +wat ched +a ke +fa ke +drin king +mer ry +m l +b day +ri o +ni ke +c ant +re pe +co stu +mur der +ak ers +ch ers +ou ts +beg inning +so s +ad es +n in +not es +wro te +sol o +c i +li ghting +ur ban +bre xit +att end +shir ts +pla yo +ac tress +pl ic +stand ard +quot es +par ade +anci ent + © +tur ing +re e +pri mary +fla sh +citi z +mat es +ste in +z i +clin ton +sk in +gen e +hu m +g ar +t le +y i +fo cu +de an +pl ants +cy ber +b u +om e +ho p +ad dress +ti x +gi fts +relation ship +sub scri +fe ed +exac tly +haw ks +ex o +stre ss +s n +arre sted +an e +sof tware +z ero +the me +mu mb +im migr +mi a +make up +ple asure +uni vers +har b +eng ine +ap er +r in +br a +institu te +le ather +al th +sing ing +co s +gh ty +me as +st ic +si de +insur ance +co t +pit ch +moun tains +cri min +su pre +valent ine +at er +wou ldn +sc ale +rel ated +re gar +star tup +pack ed +mi ke +week ly +p ts +coun t +ha r +gott en +min d +ber lin +con ditions +swit ch +cor n +sa ve +g li +emer gency +tun ed +sto ck +discu ssing +every body +s day +whe ther +wrest ling +ec es +gen der +ch en +ðŁij Ģ +madri d +mar athon +e gg +i er +th x +as king +kore a +wol f +ay a +g m +g au +at ory +v r +gra ss +k illing +b ble +ur o +un i +e th +sh ore +th en +re ale +bot tom +ex erc +k ar +or ies +ad ri +san ds +se x +. ' +volunte ers +per form +par liam +inclu de +deli ghted +execu tive +fu el +kis s +ã ħ +char ge +h u +ca kes +ve t +g lu +agre e +pr ices +n au +h l +g ru +ra j +streng th +b ic +sp ending +al es +av en +b last +: ( +yo f +nor mal +si x +qu ick +se a +d aw +mee ts +lo vers +upd ated +po tat +comple ted +coo k +opportun ities +p ure +organ ic +tem per +c am +avo id +par king +duba i +and o +di stri +to y +comple tely +don ald +tri al +bas s +b oun +back ground +v as +mar vel +lu m +ru s +t ool +com missi +throw back +fin ding +is lam +! ? +st op +e vil +or al +resi dents +i denti +o ak +ðŁİ ¶ +l il +span ish +chap ter +sto pped +direc t +ho sted +pic ked +lab our +lew is +defen se +à ® +health care +wh is +mat h +pe ak +ra ised +fi x +bu ll +th ir +chel sea +fol k +tr e +can di +pau l +ei ther +ad am +poe try +jewel ry +ðŁ ¦ +pr ay +Ø § +g c +o z +wi shes +fore ign +sun g +lear ned +en e +n ing +micha el +illu stration +legend ary +w av +b au +ðŁļ ¨ +cal end +stre ets +â Ĩ +mon ster +bu ck +g r +scho ol +ba th +wa ste +ne ck +ha wa +be ach +re plac +jec t +on er +fac tory +coun t +ðŁĵ ¸ +mor gan +der ing +se an +steph en +de p +no vel +vide os +ic al +press ure +arsen al +ex pre +ir s +tren ding +ss a +fla sh +re sear +thr ough +profess or +scul p +to s +gg ed +mm a +be e +a pe +hun ter +am i +he i +pla stic +bu cks +uni verse +le gen +niger ia +ple ased +ri s +thin ks +autu mn +i ds +d is +anth ony +ðŁı ½ +ak ed +gla sses +fin ance +z er +k as +con tract +nu mbers +sh aw +partner ship +t il +laun ched +s al +victor ia +theat er +usu al +nam es +perio d +eli za +i th +bar cel +ro cks +bag s +mat e +distri bu +j on +di ffic +ali zed +cur ren +sco red +b ha +du blin +ro se +in ted +soli d +beha vi +wal ker +simp ly +garden s +head ed +in i +ohi o +we ap +f o +gl en +e state +ran dom +th under +thr u +k ill +jac ket +it i +entertain ment +thanks giving +ent al +en coura +el o +a ther +tan k +high lights +f ting +ru le +model s +bor der +bj p +hus band +in done +ken ya +be ars +al o +n inten +pi x +str o +or ders +sal ad +ro ads +n or +l ation +sop hi +ðŁı ¼ +pi eces +b one +min s +inclu des +nu tr +phi l +s ent +fun dra +ga in +bor ough +n ad +mon day +activ ity +it ems +be coming +ken ne +de tro +car di +gue sts +u x +world wide +sever e +new s +thank ful +fic tion +ve ge +m all +si an +er al +inj ury +le e +men u +danc ing +scot ti +exam ple +( # +na i +studi os +ba i +ðŁĴ Ľ +j av +diam ond +vin ce +ric k +prote ction +lin col +cham ps +appro ach +d ar +m ile +clou ds +je ff +in fin +l ers +p les +pe ace +go p +âĻ ¡ +tech n +str a +a verage +ef fort +introduc ing +di versity +austr alian +am p +boo st +s ke +pati ent +appreci ate +ici ans +pu r +f ell +woo ds +illu str +ðŁ ĸ +ag ency +ac tions +brit ain +under way +se attle +el and +ag o +f ill +stre aming +pro test +challeng es +ky o +et sy +coo king +exper t +ru ss +rain bow +commer cial +sp in +be ats +c ry +val u +el i +th row +gr ams +le vels +michi gan +c ad +ador able +const itu +w s +pu b +mid night +th at +net fli +braz il +die go +regu lar +jo y +âĤ ¬ +li qu +ea stern +k ni +fl at +n p +bro wn +w er +se y +tt ers +ac ting +v anc +cy cling +program me +ra w +comple x +tat too +throwback thursday +se ssions +ro oms +si ght +speci es +bom b +lau gh +ke eps +mo on +offic ers +con ver +t r +ha sh +t ack +ri ous +ad ap +a j +reco gn +ex po +sug ge +confir med +rol ling +dre ssing +ic t +fri day +ph ones +ri dge +con cept +ro y +ke ys +ef for +c ate +k ne +ev en +l ay +commun ities +mo d +n az +every where +al ab +bit coin +ban ks +out door +feder al +sto res +h p +c al +m ely +sig nific +be ar +re public +clo ser +al lah +pic k +x d +pal ace +ch ill +b am +er ous +un a +al len +out standing +olym pic +supp ly +fi gu +v au +l p +char lie +un es +> >> +legen ds +ici al +co ast +benef it +mul ti +f its +far mers +am ount +si sters +har ve +hon ey +que en +b ers +pl ann +âŃ IJ +m u +barcel ona +al ber +stat us +re main +ex tra +c andy +vi ous +âľ Į +o v +warri ors +-- > +ju mp +am ar +x mas +stu dies +i ors +k or +don ate +pre p +fi sh +im a +pain ted +ad mini +co splay +spor ts +dro ps +fi ghter +evi dence +ðŁĴ ª +la ke +ro b +cine ma +pro file +à ± +stan ds +leg acy +sh ape +ro of +ci vil +i ans +sy l +sh am +vo ted +re tail +ph illi +li sted +du ty +n b +th es +f are +au ction +ffici al +stor ms +d p +l oun +sh ops +al y +ani me +multi ple +ðŁĺį ðŁĺį +psy cho +je an +ap art +candi date +gg y +con f +jose ph +w ick +me at +fr ame +c l +for got +ph y +f ing +li ed +re p +se ed +f all +u fc +nu t +lin d +mo de +fiel ds +en ce +s ley +ðŁ¤ Ķ +ch ill +follow ed +announ ces +cor ru +tro phy +them selves +ac le +al du +k ong +l on +s v +bro ke +ander son +ta i +stor y +tempor ary +activ ities +k ati +ari z +cry stal +spo ke +extre mely +tra ding +ðŁĴ ļ +à ¼ +in ch +ed in +out fit +equ ip +ma di +form ed +be ef +po p +ti ger +this day +ti red +neigh b +re tro +is a +un t +t as +kan sas +de st +secon ds +ta y +hur ric +o u +galax y +dad dy +bro w +bur ger +en ced +de sk +ac cur +secre tary +el ite +k ab +ch in +touri sm +bud dy +ici de +dre ssed +u d +vac ation +che ers +com for +charac ters +j et +bu ying +l ins +n ap +reale state +li e +af c +i ii +f ame +n r +b at +ag ent +ma kers +âĢ ¼ +sec tor +op ti +le on +di et +pra yer +hi p +mi r +le x +br y +an a +pas sing +w en +reco very +ak i +po pul +res ort +mar ia +stu ck +read s +ti er +perfe c +netfli x +p oo +cham p +o c +re duce +we red +comm ents +cla im +acci dent +s ag +h ack +sal t +kin da +k iller +i os +z y +ex change +lec ture +eng er +ic king +t au +reve als +pri son +z om +gh an +u l +jour nal +i ot +tr in +jon a +govern or +cap e +quar ter +spec tive +impre ssive +bab ies +t x +m ill +o y +har ri +jo int +su e +collabor ation +tren d +revolu tion +re new +alum ni +ge tt +sh ell +sun day +ent u +ni c +donald trump +block chain +paci fic +expla ins +sp y +ad voc +par adi +to f +star ring +p av +fe ed +br ac +smo ke +ham p +y am +to kyo +si mon +d h +e ffici +phys ical +n j +ell i +s low +gradu ate +americ ans +ti fy +f red +ap ore +fin ds +rob in +we t +not ice +se mi +un ve +k om +pil ot +scre ening +da ily +ðŁĴ Ĺ +roy al +sp a +vo tes +n ag +wh ate +att ending +exper im +ad dition +k ate +sto l +m ali +foo t +chri st +ch an +de e +lic en +glo bal +mo ore +ti a +bri gh +myster y +y ay +âĿ¤ï¸ı âĿ¤ï¸ı +cre ati +me chan +clo ck +di c +âĢ Ķ +pp er +al ph +through out +al low +re sources +selec tion +ham il +bb q +aa aa +virgin ia +dis ney +en g +so red +drin ks +f ancy +consi der +end a +jan e +hand made +du l +on tari +i us +s ville +color ado +whate ver +whe el +promis e +ne ver +desig ns +ab ly +sex ual +vanc ou +at i +con vention +cul tural +sing apore +pro mo +load ed +gla sgo +pp l +n oo +ke e +ste m +men tion +i do +cru ise +ri ding +be comes +be y +âļ½ ï¸ı +tw in +dedic ated +na sh +de si +work out +jen ni +i v +grou ps +rela x +pho eni +li ft +mix ed +m ck +p c +mu st +me tro +ci es +y ar +a im +ang er +i e +rec y +marri ed +dro pped +eng ag +le st +ambassad or +op h +de s +w ick +assi stant +nat ur +fa il +l td +shor t +k ap +sha w +bi gger +rema ins +crit ical +sur vey +co verage +er son +win d +n b +bil ly +let es +ac ts +jim my +at lan +al and +t c +import ance +dam age +f g +stor age +tw t +bon d +bal ance +cr ying +pu ppy +vo te +pu sh +ðŁĴ ľ +pol y +me l +lon don +terr ori +effec tive +corpor ate +atl anta +jac o +nas a +gre ek +sen ate +i sh +ev a +intellig ence +effor ts +al co +k un +h all +di ag +claim s +fir st +h b +ba e +v ul +pu ll + ° +se par +spe ed +vic ti +on thisday +audi ence +r ates +te ach +fil ming +bu sh +son g +y um +br un +ra ine +aw a +par ks +ð Ŀ +ra bb +ra ch +ra id +reach ed +ra il +mo ves +selec ted +fr i +ra ising +om y +st ones +su k +franc isco +cas es +cap it +con fu +w tf +po ke +equip ment +gre g +ess ential +off ering +ne x +pi es +be c +cre ation +chair man +cro wn +w al +john ny +shi ft +ne ck +ban g +bir d +ðŁĺ ı +du ck +re serve +de pu +ma sters +over all +no tic +ju ice +sne ak +che er +cla sses +eag les +n ca +car pet +ci vil +coach es +har ris +u ps +b alls +dec or +mar tin +ro s +v ice +announ cement +who se +ti gers +ste red +c ts +dr am +ste el +youn g +inst all +supp o +recor ding +de ck +se ats +l der +ang le +bo t +sty les +elec tions +for tun +n ab +but ter +ari an +ka sh +in ner +ou red +be ast +we i +ic onic +exper ts +ne cess +b eng +jam es +li a +gre ece +ðŁĵ · +ðŁĺ ģ +good bye +m itch +tw ice +mumb ai +ste am +ru sh +med al +ne tt +fashi on +t ar +r s +sav ing +ric ul +l m +sleep ing +brook lyn +mis s +sen ding +disco vered +sp here +of theday +k icks +missi ons +w right +er n +ght ly +i ous +mel bourne +star tu +mo ved +car ry +d ak +ag ues +bel gi +e ma +way ne +do t +er ie +pe l +it unes +matthe w +no body +est ab +cal m +win ds +lu c +prep are +tren ds +exerc ise +adv ant +ðŁĴ ¯ +athle tics +app s +c tions +adv ance +laun ches +litt le +real donaldtrump +eliza beth +carol ina +hu b +hi dden +n w +us er +pol l +great er +mo st +f ed +p at +life style +s ati +sco res +marri age +l r +aven ue +de serve +ri f +ðŁ Ĺ +wat ch +champion ships +gr ay +en ni +cot ton +g om +whe re +pack age +su m +ab solu +new ly +foo ds +ty ler +assemb ly +musli m +ban k +re memb +op tions +produc er +land o +fun ds +u pper +shad ow +pro gre +co p +ing e +leg s +detro it +hill ary +jo se +gi ants +sou p +sustain able +t us +clo thes +roc king +n z +min ne +mat eri +bru ce +ear t +ca sting +independ ent +thou sands +ta h +de cl +veter ans +li ons +wra p +âĢ ¦ +de ss +bl ing +st ine +e ggs +o on +clo sing +z ay +at t +bac on +fa il +ariz ona +de pre +gho st +new sp +w ers +vi p +li ked +id ent +volunte er +ad ult +pu pp +cir cle +mat erial +degre e +gro wn +boo m +calend ar +su r +vie wing +ath letes +ch and +re ll +asi an +en tr +vol ley +victi ms +bo dy +m ama +trans fer +ge ek +in dic +sav ed +ma i +g ent +it s +loun ge +k ol +the ory +situ ation +is lands +ar th +z oo +floo d +vi ously +show ed +parliam ent +ch ev +el ine +at trac +ab ad +ta il +h rs +lu s +por tu +gor y +provi des +to ys +de ath +in fe +an ce +g le +li am +lo ver +hu d +dv d +reve aled +g w +re ment +ca the +l ying +ra dio +der by +stor s +che mi +hosp it +âľ ¨ +' : +ilo ve +le mon +re public +s ni +ne ss +do or +re action +pre gn +fla v +schol ar +spo tify +is ation +vis ual +aw are +spon sored +jo ke +less ons +leg is +lo ck +si mil +ðŁĺ ĭ +kin d +la y +ma h +ho ping +vancou ver +as er +clean ing +gal a +thre at +la p +ach e +ro mance +ex pen +re post +z am +e pi +mir ror +o ak +ad ul +bat man +s lu +l c +vie wed +re views +d ates +indone sia +acti vi +off en +lea f +i si +ag ricul +costu me +s ites +spir itu +appear ance +ir y +st air +applic ation +spec tac +ic ity +ski es +hand le +pun k +paradi se +t n +de al +provi ding +do c +recei ving +bre w +micro soft +à ¶ +fer r +me tro +th ail +y um +car ter +à ¡ +gent le +bre aks +coo per +show case +cu tting +egy pt +bab y +semin ar +gl ori +ss on +fa ve +re hear +lo tte +la dy +al as +pre p +deli vered +nu clear +ir o +engag ement +at ta +con ven +z an +gl ory +hol ds +busine sses +str ange +sch e +it self +gra d +mar kets +f alling +st ats +ge on +bu dd +li s +she et +thi si +co lo +deser t +regi stration +ig n +expla in +inter ior +la ws +writ ers +spr ings +k r +fri ed +blo om +inf ra +a o +cre d +pa st +line up +bo o +bre a +boo ts +celebr ity +att acks +bro ok +ev es +ex cu +cher ry +oo p +fas cin +boy friend +se as +n ine +effec ts +po wered +k ha +ðŁĺ Ģ +sh out +con dition +i j +her o +enter pri +win ter +applic ations +sho e +g el +batt le +pro grams +w art +ðŁĴ ¥ +ra p +ho l +dang erous +di a +coun ter +ric s +i or +k night +co at +emo tional +at ures +d as +whe el +fore cast +tran sport +glasgo w +king dom +prepar ing +im medi +ff in +awar ded +prin ting +ro man +fight ers +any more +bel t +p ine +win e +x i +employe es +logi es +al led +de mo +birth day +ange les +lo g +dri vers +neck lace +k ath +s it +athle te +ef s +s burg +pur pose +resi stance +rele ases +t is +vari ous +deli ver +ch al +s anc +opp o +cra w +neu ro +dr a +suppor ters +sna p +diffic ult +swe ar +logi st +pa th +attemp t +à ¥ +swim ming +ste ve +hur t +inclu ded +b ap +wa re +ðŁĴ ĭ +end ers +ja ke +le eds +cli mb +l b +im ple +li sa +clo thing +ðŁĺ İ +d t +com pla +sw ing +stra w +v als +k le +us ers +stor m +cu ts +ontari o +p an +hand some +i ow +ar gu +chec king +scotti sh +Ķ ï¸ı +si er +em ma +po d +patter n +de sh +en h +ed ward +t ing +k h +hal f +lincol n +mo ther +al leg +r c +volley ball +d n +g ay +all y +le ton +gro ve +l oud +adv anced +re spec +cli ent +supre me +thail and +ho w +gi g +to i +do t +dol lar +ðŁij ĩ +p it +r b +h n +produc ed +gg ers +âĨ Ĵ +ml b +can vas +fin eart +us d +in the +p son +actu al +s l +t b +ip ad +en sure +u mb +w d +sk a +mar s +k end +f eli +th ing +count down +absolu te +r out +dra l +p y +inju red +min t +hun ting +mm er +s age +li gh +ac ity +ex pan +mur ray +ar o +sec ure +four th +eag le +reli ef +st akes +industri al +clar k +under standing +see m +pl enty +sil ver +cla u +thre at +sa il +pro duce +ab str +is is +b r +eng ers +wor ry +bie ber +s j +just in +reali ze +ky le +esp n +fil ter +s ch +ty pes +game dev +d ing +twit ter +soldi ers +p om +car bon +y ards +child hood +ri ed +ke l +ele ph +t ons +key note +qui et +wi re +po sting +is sa +repre senting +bac ks +alex ander +celebr ates +ta ining +| | +ch or +esc ape +pe ek +ti ves +fiel d +ssi e +im pac +spons or +r c +we dd +cann ab +si des +trac ks +com par +con trac +techn ical +bi ble +expl oring +sh are +tra v +n ate +ill o +sc ru +m ingham +gun s +of the +sh ame +se es +ca tho +ac cess +ce l +repor ted + » +mari o +p ad +hope fully +ou se +y on +disapp o +ol o +p itt +pa c +ga p +cru sh +s g +k le +ge m +emp ire +dir ty +a is +avi ation +ze aland +fac ing +high way +d anny +spi der +ot ta +ðŁĺ Ħ +w y +col ours +in fl +co sts +olym pics +au s +h m +ho ward +pas ses +lau ren +mu sh +op in +r ho +disc ount +oper ation +em ily +mm m +cham ber +d il +to yo +shi p +sam u +pic tured +un ic +po l +keep er +carto on +st en +ig nor +n ations +n l +ta sting +deta il +offici als +mo tor +franc is +ed itor +ðŁij ĩ +pe ts +rang ers +t g +r n +w ri +nic hol +i se +spo ts +ani e +chec k +tri ple +ku mar +spe akers +ic ing +pre pared +ab use +friend ship +mon th +swi m +air e +sc ent +hamil ton +indi an +j es +yum my +te ars +da wn +i zed +worl ds +ðŁ ķ +b illi +st one +n hs +ba sic +p or +st le +ir on +ol der +cle vel +e ing +ðŁĺįðŁĺį ðŁĺį +prin ts +fir m +air craft +fin est +devel op +aar on +t z +gra ham +own ers +fo li +less on +qu es +bab e +cra ft +ph en +ju n +bir mingham +v ine +ll er +i an +fineart america +evol u +st ab +im per +war d +com ic +wi z +inv ited +du ke +mat ch +por ts +ro ger +diag no +ke pt +te st +vis u +r hy +so c +to x +b aker +sur face +co vers +man s +b its +x box +ff le +n an +gar d +h art +wat ers +v illa +re tro +light ning +catho lic +democr acy +neigh bor +pen n +cr an +jona than +la ura +vi bes +su b +coach ing +clear ly +uk raine +bra ve +commit ment +t all +mar t +ra p +mo di +sco tt +bro s +show er +ðŁı ¾ +âĺº ï¸ı +cou sin +appro ach +br e +com pos +hil ari +phil ly +g ad +quick ly +ri an +t m +vir tual +hou ses +k t +phoeni x +w ire +ff y +b unch +anc ing +tal e +snap chat +star ter +h t +k icking +ap art +th y +) ! +blo gger +it z +com fort +ang els +w ash +" : +ar gent +re quest +hon est +mi ghty +bo bby +k g +ro l +thou se +ex po +h c +tab les +mag ical +po sts +de m +n w +or lando +ab er +* ** +ðŁĺ ľ +environ mental +trans formation +mi le +w ic +hir ing +ma ine +bo ar +r ying +ti s +nit ure +twee ted +anton io +opin ion +fin ale +di y +f is +th in +trou ble +le go +fi les +qu art +sp a +curren cy +cli mate +fan art +rail way +sp ace +ban ds +dani el +mo tion +l eng +hol der +oc cu +mar ie +cathe dral +bu zz +bi es +nas car +bm w +bat tery +char lotte +doc tor +zz le +se ven +in san +d dy +st en +lab or +thr illed +se ren +docu mentary +wav es +cer tain +can did +allow ed +ninten do +star wars +ta p +home made +d les +ther ing +bre e +emp ty +pi ano +pos iti +coun try +por k +pu ts +per ry +m atic +spot light +ti st +or ities +we alth +c p +bar bar +commit ted +as sau +pro fit +e ight +hu l +fini shing +run ner +ss o +insp ec +char ged +christ op +lo sing +co al +ho o +ele v +de le +mo ham +don ation +c able +clin ic +j in +manag ed +ter ing +â ¬ +ur ban +depu ty +bb er +bur n +acade mic +o tt +sta ke +it er +sto wn +ack er +advent ures +ad ams +gre g +pro m +vo l +ac qu +con gre +pa int +citiz ens +c all +af ford +v c +as ks +the tic +independ ence +â Ľ +h itting +bl on +fu ture +â ı +in no +gen e +bo ards +di stance +se t +re mem +th al +pre vent +l ang +ob jec +su sp +mat t +in duc +bor o +pi one +re di +vir tu +prin ted +sco pe +shar k +suc ce +a stron +il legal +j ag +c ting +ine e +at o +rob in +nutr ition +b f +du tch +b n +fur niture +for gotten +at ar +ru p +hy per +bran ch +communic ation +degre es +on ia +un cle +promo te +or che +wi i +j s +but ton +ma jor +c bs +bri stol +premi um +ordin ary +e dit +m g +we ed +st even +: ' +gu s +te s +cap tured +dru gs +do w +wr ites +bi shop +whe els +ali zation +disco very +w r +rach el +ne il +hy dr +cu test +entreprene ur +kore an +ore gon +ul ty +perfec tly +suppor ted +histor ical +t wins +ell y +we l +de vil +in come +scienti sts +de leg +h en +on i +ic ed +gi o +cur ry +reve al +e g +buff alo +n ol +op era +camer on +haha haha +j ab +gradu ation +cra ig +r al +i f +organi zation +le ge +g ang +su d +edin burgh +l ack +fli es +g ate +thr ones +q b +the real +e leg +pp in +c les +jam ie +tn am +cryp to +ou l +p ages +a se +roo ts +stu pid +a did +boo t +prote in +s ap +si um +su s +end or +fun ction +don t +en na +ch y +squ e +wor ker +m tv +e a +k an +ðŁĴ ļ +mu s +professi on +t to +oper ations +al lo +c tor +inv ite +sc and +ou th +z im +lin ks +cli ents +sam sung +discu sses +n ell +ul tra +some where +ste wart +ine t +de z +b out +fac tor +ti an +tr ans +jere my +d b +ðŁĩ ¬ +or n +develop ing +spo l +coo per +ma u +rememb ering +tre k +famil y +sen iors +fo ster +att ended +w ing +trans form +ele mentary +hor iz +li sting +malay sia +it ch +warri or +philipp ines +russ ell +m end +initi ative +cre ep +to ps +br iti +a ur +shar p +adverti sing +ug ly +achi ev +materi als +bu g +dev ice +bon us +fac ility +col e +nh l +y as +plann ed +pol e +excell ence +tr ick +con fl +r p +achi eve +lo an +swa g +jess ica +ho we +p our +sc u +z oo +r ated +dre sses +re bel +mex ican +co ordin +me ss +atlan tic +t l +osc ar +wal ks +phar mac +investig ation +... # +cc i +eas ily +monday motivation +y ment +au ti +for ced +ar med +colle agues +pap ers +pro per +sha ke +bu c +le an +exhi bit +e vement +co tt +bi z +sp er +k ent +sw an +/ @ +girl friend +haw k +âĺ Ģï¸ı +mon o +ðŁĴ Ľ +stat ue +ðŁĺ ³ +ra s +te eth +preci ous +t ile +p am +swi ft +v ali +no se +dr unk +experi ences +come back +gen ius +wor se +sh ef +ra d +ed it +hon our +au spol +lar ry +h ire +gor don +achi evement +.... .... +su icide +alter native +su p +sur roun +sha ke +ke ith +pe pper +tur k +crimin al +be ck +su m +w alls +cn n +an tic +of fe +col li +win es +high light +hawa ii +emb ar +l fc +ðŁĩ ® +m v +> > +at mo +wor d +car l +shout out +bre wing +ì Ŀ +do f +s ic +hot test +col on +hh h +shu t +low ing +volu me +apart ment +agre ement +de stro +we e +religi ous +iow a +ro d +land ing +re present +ðŁĵ· : +la s +usu ally +h l +c ac +sal v +al ong +laugh ing +be ans +remin ds +pha se +some body +ma sk +ran ked +dest roy +sc i +â̼ ï¸ı +gab ri +le o +ro a +fa iled +si l +refuge es +re vi +r ing +ber ries +coo kies +y y +conserv ation +sh ab +human s +de termin +a in +ni all +as su +mb a +fro m +extre me +vic es +commer ce +ght ful +or dered +suppor ts +re cap +v or +dro pping +correc t +pay ing +mean ing +n j +qui z +" # +busine ss +ðŁĩ® ðŁĩ +indi gen +du st +box es +bl ind +x xx +zz y +ðŁĩ¬ ðŁĩ +ss els +s ant +dd le +hilari ous +desig n +wonder ing +vehic les +k re +ju d +rece ption +par ker +Ã Ń +pri vi +hy dro +sof tball +pol lu +lo cked +ba h +e ar +scri pt +di vi +br ace +geor ge +the ast +bel o +j al +tion ary +dent al +roc ket +pur ch +sh ak +manufac turing +e z +it is +con cep +tb all +ch s +direc ted +pra yers +oo k +phil os +vari ety +che ss +ser ver +g and +bal ti +ðŁĵ ¸ +sel y +cru z +spectac ular +bur ning +re present +i z +t one +mer ce +h ell +bed room +estab li +bo l +com mon +ãĥ » +ab or +kit ty +hei ghts +re pair +willi am +qu ake +alab ama +popul ation +re v +re tt +i sts +n ite +le m +a ha +clevel and +r m +po ver +ob se +mon tre +man ia + ® +con ne +car ni +sh ah +f y +u a +sc or +strugg le +bo b +' ' +appro pri +deci de +ff ed +ca ster +s ort +hun gry +dra g +ا Ù +gr ounds +d w +sli ghtly +car din +dead line +bron ze +web in +bar ry +sil ence +e uro +op tion +ear n +ðŁĴ ĸ +howe ver +na ren +na ils +bath room +v ine +ph d +min ing +gar age +( ) +shou lder +defe at +di r +o v +liber ty +ple as +x on +com pre +a v +j in +ab les +sil ent +fam ili +vis its +di pl +ha bit +milli ons +regar ding +innov ative +sen ator +r ts +v on +k l +wh il +requi red +âĿ Ħ +lu v +presi dential +po cket +hun dre +sho wn +fro zen +to ward +fa st +confi dence +r ough +indivi dual +qu et +ðŁı ½ +dom e +fi fa +engine er +z en +re mix +ðŁĺ ĥ +pl ant +min or +robin son +as y +pul led +cer tain +potat o +( : +pre s +oc ca +w it +it em +si e +d ating +thom pson +own ed +an u +vi e +te dly +good night +ex cept +ðŁĮ Ł +ira q +ki e +ren ces +li p +simil ar +sau di +vi g +arth ur +pic ks +mil an +hon da +ma xi +o g +ste st +ar ch +analy tics +ba sti +pear l +ter ry +hor se +ast ro +ac ce +laun ching +inter national +s no +ta sty +den ver +ir l +pe te +tor n +advant age +var sity +" " +sol e +g c +lan g +demon str +ol ds +un ity +ne ts +insp ire +cre te +nash ville +nel son +e ter +wal k +hy un +m ack +tre as +see king +ra ge +bru sh +ab and +whil st +co con +h ong +shel ter +i p +possi bly +so o +it ed +â Ħ +rac es +war ming +qu in +tele vision +mat ches +ra pi +ment al +pal m +jenni fer +rol ls +indi ana +b ars +cat ching +resc u +candid ates +fa re +âł Ģ +se o +vie tnam +alph a +michel le +visi ble +re gre +wn ed +app le +li p +f fe +li z +york shire +ha il +se asons +be gan +m d +k c +la p +fascin ating +hel p +ur y +u ms +nu ts +se m +along side +bri dge +ori al +o ve +world cup +briti sh +comfor table +i ve +hot els +fair s +hor ri +so x +d ining +stre am +bar ri +ss y +w im +ter ms +v u +pe re +l ens +wal ked +r or +l ars +shi eld +dou bt +pro to +cro ssing +me ant +medi um +ad ding +e b +che ap +fun c +pap er +bran ds +ry an +feed back +col lins +un known +tro pical +sand wich +fal len +for mu +selec t +lo ads +answ ers +or i +mag a +d or +du o +ali e +dru m +ur i +de er +sou l +sh ut +âĺ º +sto len +don ated +bu zz +patri ots +ha l +na sty +nomin ated +mon te +ki a +th ri +ing u +te sts +pe tro +ðŁij ij +ho sts +ne st +to pic +pat ch +m my +hu gh +ab ilities +ma the +s miles +g b +ag enda +insi ghts +chi p +ph an +fail ure +dg ers +ha i +signific ant +sho ck +ru ral +gl am +figu res +pot us +o ta +mini stry +appe ars +fe ar +r h +americ an +h att +son y +fi res +e di +n ou +e qui +wh en +univers al +mad ness +i x +sculp ture +b ach +t to +swe den +et a +en to +develop ed +month ly +ma ps +ra h +le d +del ta +sa ints +is lam +ben ch +fif th +v ard +so cks +wel coming +j e +tur ner +v b +ad i +nor way +ad y +hurric ane +por sche +tra dition +ex am +newsp aper +lu ci +a ver +ide al +d na +madi son +ðŁ § +wit ness +ac ou +insi ght +si mon +robo t +sna ke +n bc +ac o +ro ss +sh ment +religi on +ch ann +in su +camp bell +inst alled +we ather +hor ses +ol i +rober t +k az +ðŁı Ģ +veter an +th read +quar ter +ea sier +cap ture +hi pho +law rence +roman tic +pas sion +cl ay +ox ford +th ai +stu dying +fi a +elec ted +most ly +c b +tu mb +âĢįâĻ Ĥ +x l +sh an +fa ster +ev ans +sli de +sh ri +see k +mi es +chemi stry +pump kin +tu m +, , +ro om +fi red +li ps +pres ence +af f +brew ery +arri ve +sw ag +photo graph +pen gu +chi ps +at tor +val ues +accur ate +con temporary +princi pal +cannab is +ari o +any where +gi a +democr ats +buil dings +li ved +ap s +neg ative +m are +bal lo +li on +diam on +loo k +re form +tom my +il la +tre ats +hundre ds +port land +wor thy +ex cep +ar ia +ido l +be er +cd n +y u +aw k +ðŁĩ ¨ +c ells +à ³ +ident ity +dra wn +de vil +f inger +th am +ðŁij Ĭ +ear ned +fin tech +dol ph +twee ting +evolu tion +ðŁĵ į +est im +m vp +n one +ðŁĩºðŁĩ ¸ +toyo ta +au x +mar in +b old +l bs +ste ak +mur phy +it able +lou is +sol ve +pi a +sk ir +ill ino +webin ar +ban ana +lo v +th on +vo ters +afford able +defe ated +lm fa +air lines +super b +any way +deb t +bo red +ver si +me tal +responsi ble +m k +s se +f ay +cau sed +f p +recomm end +pla za +spor ting +alli ance +au stri +n n +t ours +surpri sed +arti f +th under +sur ve +wor e +bri ef +necess ary +z ie +ash ley +dra ke +r t +kni fe +im mun +char ges +a the +bri de +rep ly +g av +broad cast +pu er +brace let +cap acity +harve st +id k +perfor man +d ding +il ers +par a +jam a +pro vince +ch in +id ers +har i +te aser +ch en +re stor +r at +fl at +col om +ðŁĴ ŀ +ðŁĩ¨ ðŁĩ +smoo th +r t +p itch +stay ing +isra eli +t cot +per spective +do ck +open er +lo vel +x o +class room +l ington +go al +kenne dy +sh am +sp aces +mitch ell +home coming +uk i +claim ed +recru it +ing o +mu fc +mon it +g roo +resi dent +per cent +per man +otta wa +int ment +an xi +stand ards +wor ship +sche me +f x +pot ter +bi an +athle tic +af gh +s se +sat ell +par ties +âĿ¤ âĿ¤ +infra structure +rela x +mo du +wor n +smo king +y ach +practic es +wc w +am b +dome stic +tay lor +k entu +provi ded +mo di +ve g +" ... +ob serv +ðŁĺ © +be ard +m our +an gry +ðŁĺ ± +startu ps +woo den +di ve +na il +anti que +ro ses +torn ado +m at +^ ^ +su spect +far m +de vices +me ga +tu l +scholar ship +ge e +disa ster +arri val +po in +mar c +kati e +bb ed +fal se +deser ves +ric hard +ju ana +fre y +tion ed +hy bri +r w +sar ah +ach i +c ure +o le +mor ris +ch ic +broad way +la bel +pa k +pover ty +gol f +e red +f u +er ies +be es +alo gue +st el +wire less +je wish +ti de +blo cked +life time +b har +sp lit +am ster +th i +jo shu +br unch +ha ps +s for +oo ps +ka poor +hi king +suppo sed +ro of +re as +tra in +ti ght +tru mp +bas ically +r r +ea red +see ds +entr ance +c p +wi e +son ic +vic tim +he re +e h +ear rings +sal mon +arc tic +an ne +dou gla +corru ption +hann ah +ha sn +vo ices +con ce +att a +fle et +clin ical +democr atic +ton y +st ood +le f +twit ch +a il +honest ly +incre ased +dro me +don na +accep ted +visit ors +ap ar +ad or +p ar +jer ry +ra i +brand on +ab u +!! !!!! +me me +in gh +glori ous +b hu +pu mp +j ol +li ke +fi sher +ma z +ag an +destin ation +play list +le tters +gen u +br ace +celebr ated +bann er +r he +dra gon +ðŁĺ ħ +sig nature +gre y +âľ Ķï¸ı +al ice +be red +ph er +ber n +ca th +ga thering +sc oring +influ ence +sm iling +de pt +lo cal +a x +ac u +reti rement +hon or +her self +chem ical +asse ss +y all +fre qu +appreci ation +ac a +cho ir +cu z +so il +c il +repor ting +u h +enterpri se +gr at +jaco b +ru m +fe e +j ak +sp in +bi kes +phi a +ste re +p is +bloo d +t att +ra ft +war ren +sh eri +back stage +mar sh +hash tag +ther ine +re in +game day +guar an +reci pes +min ds +stron ger +issu ed +bic y +n ak +ment ed +sc ary +u x +pre vious +tt le +th ats +ac tors +u ma +tin a +bun ny +promo tion +u ss +oli ver +montre al +what s +appreci ated +la kes +excu se +kno wing +pri zes +musc le +shad es +sco t +ing redi +electr onic +ju an +comb at +s ri +e h +turk ish +l om +stri kes +pri son +re e +po pe +vi d +ol dest +dol l +sw iss +certi fied +cli p +re turning +lat or +le igh +tt es +wat son +heal ing +el im +per haps +ha ss +k au +d der +mou se +new castle +indigen ous +wel comes +co le +tau ght +no ise +appe ar +jo e +can on +wedne sday +u tah +c tive +dri ven +i v +c ell +stri p +ac c +focu sed +ar rest +sto cks +wo o +â Ĺ +notic ed +shad o +di spla +ter ror +bor ne +secon d +que ens +wo ke +ja il +no tt +cam bridge +har t +se af +fa x +ac cept +âĺ ħ +goo ds +k at +t win +h s +thou sand +s ins +su ite +amp ton +ar n +rele v +ric har +hoo ps +n bc +class ic +p ab +soldi er +de plo +le ans +install ation +cla sh +le ban +ee e +ti re +belo ved +fu sion +travel ing +ne i +coo kie +glo be +phys ics +s q +co l +wol ves +d l +ex it +" - +foo tball +le af +ster ling +hi de +minne so +fresh man +natu re +indi e +supp lies +bri s +iri sh +ink tober +doo dle +ic op +mess ages +adul ts +recor ded +fix ed +ar do +offe red +under ground +dr one +p ine +ma inten +and re +ham mer +s x +r ound +hi ke +bra d +ro me +fu ll +on ey +ro ws +colum bia +archi ves +appro ved +bat ch +illino is +recogn ition +shou ldn +fo g +nca a +ke vin +human ity +al though +pow ers +p ou +s ar +pe st +alco hol +con sci +phil adel +en o +t m +ok la +cate gory +particip ate +accu sed +bri ef +po em +clu bs +consul t +ja b +big data +amster dam +ac ing +certi fic +n u +d at +impro ved +and y +campa ig +pale stin +p ace +mo bi +feel ings +wol f +bra in +pro pos +inter active +prin ce +inde x +c is +cha e +peace ful +co vering +ac o +cour ses +mon key +re place +b l +bloo dy +tal es +brigh ton +neighbor hood +g ates +spiritu al +af raid +bre ast +b ones +ðŁij ī +vide o +w au +tou ch +inju ries +car l +ri x +une x +âĢ ¢ +fre d +consi dered +thu si +an ch +on y +u sa +graph ics +ac re +ðŁĺ © +com memor +com mod +go ti +guar dian +star bucks +pre vention +haha haha +admini stration +portu gal +fac ulty +bet a +ul a +al bert +bre ath +er i +le tting +tr ic +ment ation +incredi bly +ten nes +v d +ðŁĻ Ī +ed die +br ick +gr ill +bt w +wat ches +resear chers +t ney +ni e +p as +a ster +vi br +poke mon +ch rome +go at +pitt s +il ly +festi ve +y d +can al +ðŁ Ĩ +fi es +car los +re que +partic i +tra ins +sam ple +temper ature +sym ph +pic king +in door +z ers +playo ffs +____ ____ +ap es +ly rics +islam ic +performan ces +d ick +spar k +se as +hom a +gr ound +disc i +employe e +com mu +alas ka +al an +fe ast +dg ing +ban king +manu el +slow ly +tru cks +mc car +oo o +sc rat +orche stra +indivi du +m x +bre ath +stair s +equ ality +bla ke +loc ations +cocon ut +balti more +aa a +l c +ðŁı Ĩ +har vey +resi st +immigr ation +adid as +fil i +re f +lg bt +mo s +pp i +ken ny +terr or +ban e +apol is +s g +social media +ka i +hon est +as sas +bol lywood +âĢįâĻ Ģï¸ı +ferr ari +hor n +cryp to +bo om +mainten ance +i di +s man +w l +ext ended +in sul +ve s +go sp +tr i +pi g +tar ge +cel er +st ati +sm h +ri dic +appe al +? ) +con clu +cos me +she ep +christop her +en thusi +po lish +me ts +oun ded +sustain ability +creati vity +con crete +ra i +ali en +ble ss +te es +clu b +ro t +bo s +ex ist +perfe ction +lu ck +rock y +expen sive +mean while +happy birthday +pre t +thr iller +ca ve +playo ff +som er +l u +le x +def ence +am writing +home less +pro phe +ch et +past or +ðŁ¤ £ +land er +ww w +Ģ ï¸ı +tic a +! # +o tic +rad ar +po sters +pow der +po li +ha un +tra p +bl in +assau lt +shor ts +re y +sh y +squ ir +rac ist +gar lic +fu r +remo te +sm ell +impre ssed +fing ers +âł Ģ +din o +le ment +s nu +promo ting +str ing +produc tive +b age +ma son +ra z +direc tly +j k +ev al +ðŁij Ĭ +doc tors +co w +ri der +st v +re move +w u +na than +ro d +n r += > +affe cted +inve st +mp tion +g inger +o d +agricul ture +s que +mu g +coun ting +ke e +mag nific +coo k +ani stan +roo t +plac ed +sym po +gh ana +un d +che er +thro wing +secre ts +f illing +opti mi +butter fly +bu bb +ðŁĺ ī +terri ble +d g +sil k +obse ssed +lo u +ai de +sal ute +mon u +philadel phia +scienti fic +i st +u ae +dess ert +bott les +can yon +ðŁĺ Ī +car ib +o ther +w ich +re source +guil ty +un d +le on +e ss +kan e +el e +tra iner +he im +an te +man age +roo kie +tre ated +po ses +rs vp +cau ses +aw ak +je well +le tt +on ics +tit les +cardi ff +g aga +bu mp +use ful +? ! +loo se +bb ing +: : +argent ina +de bu +cy cl +wh el +dis gu +j el +k ills +bio logy +ex ter +tra sh +bo dies +tr am +circu it +expe ct +la ds +w ells +sho t +ge e +naren dr +fa stest +b ent +b ills +mar shall +h ats +intro duce +citi zen +im possible +gi b +az z +net working +r ant +thin k +in dy +st ops +f theday +bri an +* * +amo di +dom e +coura ge +pac king +af fairs +g n +si zed +ent ary +pol and +swit zer +afgh anistan +w u +ten der +subscri be +mo sco +att end +republic an +hon ey +âĢ ĭ +si mul +we ster +foo die +or o +midd le +ab t +co pies +ma je +narendr amodi +ty pical +inspir ational +vit am +wis con +cu bs +tiv ity +h ali +e ars +k ay +d are +mari juana +cu rious +an ia +tom ato +re mind +ðŁĩ · +sc ared +cou p +po et +land ed +ri d +wra pped +mor ri +climb ing +e ws +fe eding +con tra +tho logy +gri d +ti vely +read er +la ser +di ving +di g +lat in +ti ed +shake spe +o ci +ad m +show ers +chu ck +mar cus +oo s +kne e +o live +ow l +dy lan +an no +g ym +deci sions +well ness +arri ves +sati s +chri s +thur s +ðŁ¤ £ +inter views +thank you +switzer land +over night +journ alist +ser ves +vol can +.... ... +plo t +nic ol +car rying +mag ne +tre asure +ex p +be ver +ðŁĺ ¢ +mar ty +mo le +don ations +recogni zed +b h +du s +sh ann +al do +success fully +ent e +ðŁĺĤðŁĺĤ ðŁĺĤðŁĺĤ +cab inet +cu is +tit led +d as +so l +strate gies +deli vering +ad ds +ani an +ne ther +ðŁĴ ĥ +con tain +su its +pa irs +to dd +rel la +ro pe +ci o +cro p +paint ings +su z +re jec +bu st +d h +fra ud +m h +contro l +je al +destroy ed +al lows +wo ol +minneso ta +om en +j u +sympo sium +d af +lim it +accoun ts +load ing +inter n +re solution +hol land +qu al +meet ings +gra ve +cam ping +v am +re nov +liber al +am ber +gre e +hu mb +fe ver +el ing +broo ks +à ² +be th +ad ed +al t +ro e +perform ed +jo sh +frank lin +nic ole +de ss +bb s +m g +net works +min im +al t +weap ons +gu y +jas on +g ha +harb our +at on +pra ise +kentu cky +bel fast +st icks +blo ss +ho pes +an thro +famili ar +wa it +ch ile +depre ssion +la x +je ts +le ice +recei ves +si er +an k +de x +inde ed +fle xi +fab ric +lam b +hel icop +am anda +âĢĶ âĢĶ +compe te +sn ack +techno logies +sy rian +mom s +mu ham +cho sen +an at +dev on +shar ks +re t +fundra iser +selfi es +st ations +communic ations +tennes see +tu tor +ro t +valu able +dynam ic +nur se +i ed +earth quake +deser ved +a ve +sar a +stre tch +dougla s +ne pal +à § +ob viously +d ame +ra pe +any body +k w +pat rol +hol ders +h anna +info graphic +ec o +be ating +stan ley +bo ats +ri bb +e z +wit ch +inv a +ac id +boar ding +- @ +gi l +da ve +care ers +opp os +l loy +in ter +do pe +re su +j agu +sh ade +in dy +on ist +rel ations +ag en +ab le +inci dent +me ter +shar ma +id r +pro ve +immedi ately +tro ops +am an +g low +gaz a +blo cks +person al +chron ic +all er +si d +sh r +whats app +lu cy +ar chae +ho u +journ alism +our selves +go t +the med +shap ed +we ak +cas ual +leng th +sla m +ab bey +e v +coun ter +est a +reci pi +cha pel +expan sion +sel f +suff ering +sp ice +n z +sp art +desp er +boo king +quart ers +y on +ðŁĴ Ĺ +p k +continu ed +- # +man hatt +tal ked +sh en +com bo +hybri d +je ans +liqu id +se al +re tweets +ac celer +collec tive +t as +: )) +profession als +ra w +o tt +su san +ir ing +okla homa +re ven +survi val +cre ator +tran sit +st ac +sur f +i k +ed iting +ch illing +bai ley +ste al +ra ble +pa rent +hun ger +sn app +collec t +philos oph +dedic ation +c f +c m +le ep +repe at +re ha +un fortun +a er +a ero +abstr act +mon itor +ag ents +bu l +sci ence +harb or +drag ons +floo ding +ac compli +d ash +juli a +the red +tues day +cy ber +b low +ta ined +le m +refe rence +pp o +ne goti +char le +con nor +au lt +access ories +commissi oner +rain y +re ar +advis ory +luc as +ma id +co al +k av +pol o +ðŁı ¾ +tran sport +mar gare +straw berry +bur ns +gre ens +ne v +partici pants +col in +belgi um +col our +in form +d ell +br on +cal y +kick off +strate gic +re union +hon ors +li b +egy p +âŃIJ ï¸ı +hy po +si zes +regi stered +bet es +relax ing +bloo m +inten se +valent ines +insan e +w wii +p x +tri o +bla de +wiscon sin +con e +plat in +ali ze +ra ven +incre asing +indi ans +il ian +bl u +rabb it +exten sion +je f +au di +fer ry +s ell +a day +us b +swe at +cham pag +metho d +mem ph +assi st +s by +ca pe +remo ved +mag n +v t +r ams +f bi +tack le +phe w +h on +motor cycle +su spec +eleph ant +sub ject +let te +da iry +whe at +awk ward +ac t +tro l +mit ted +zay n +sheri ff +ene my +con s +ke tt +bul ls +ev alu +bt c +satell ite +ho lo +por ter +dia betes +bet ter +rele asing +sur f +: - +se basti +collec ting +en cing +e thi +go ds +al ley +health y +m ills +sma sh +co pper +cr ack +read ers +sp ac +licen se +bas ket +bang la +en tic +om i +m ere +si vely +anim ation +lan es +dent ally +chill in +fi e +k aren +dep th +li pse +n g +ri p +mel o +sand y +ðŁijı ðŁijı +vin cent +nu t +hu g +who le +cre ates +? ??? +âĿ¤ï¸ı âĿ¤ï¸ı +bak ed +up grade +rober ts +har a +carib bean +auth entic +mb s +mosco w +attor ney +wi ki +ch lo +hu ll +cor k +" ! +sty lish +ðŁĵ¸ : +di ary +impro ving +ex pand +bri ght +pollu tion +k nights +person ality +chec ked +fac ilities +z el +bow ling +gu er +ðŁİ Ĥ +on going +un its +hoo k +be ck +confl ict +to dd +far ming +educ ational +k ak +cla y +stro ke +bel ly +explo re +mill enni +th m +loo p +sm s +consi st +cir ca +br yan +d ab +youn ger +soli dar +pp a +experi enced +b ella +bo ard +shef field +steph en +consu mer +sub mit +spon sor +t ang +ag gre +comb ined +trac king +sand ers +b az +survi ve +fer red +equ al +se p +re ed +str ong +priv acy +st ap +un g +ac ry +pa sta +pir ates +ag er +fair y +du p +introduc ed +wi p +let s +spr ay +ðŁĵ º +gre w +a sts +pitts burgh +new york +jo ey +lau ren +tra de +ch op +pi pe +cla ire +behavi or +v ap +cre ws +lap top +ðŁ¤ Ĺ +che ster +disci pl +d f +out doors +k s +go ver +super star +cas ino +far mer +; -) +re turned +ðŁı Ī +ma il +roa sted +co sta +v ill +pe z +gard ening +distribu tion +sh ining +inve stors +ra sp +dec ades +reali zed +bar n +p ti +st able +ut d +pan thers +m ens +b n +ca de +bu cket +yn n +when ever +wa ke +da is +ber nie +lo dge +ju lie +atmo sphere +ðŁĺĺ ðŁĺĺ +major ity +par ti +exc it +cu t +me h +musli ms +be gun +fli ghts +vene ss +ce me +po sing +so le +g ou +dark ness +pe ach +cel tic +auth ority +grand ma +ful ness +smi th +speci fic +gar cia +co ins +good ness +aldu b +recru iting +den nis +gar y +sle eve +weap on +pl z +disco ver +harri son +recruit ment +ja i +ch im +com pared +tom s +mo thers +am y +archi ve +t ask +ben jam +se g +law yer +al um +inve sting +mi e +che z +j p +a ke +fl am +wall paper +âĻ¥ ï¸ı +t ton +che st +favor ites +we igh +coo lest +r ating +relev ant +lo gan +ma ple +run ners +pri or +peop le +ma ur +terrori st +te sted +carni val +su spen +me asure +m v +cyber security +app ren +terror ism +o z +v ital +ni es +gon z +fun ded +twi st +assess ment +die sel +en for +colum n +ad dressing +ca sts +pay ment +x ton +fi er +, ' +la st +ne e +un less +clo se +sk ill +cuis ine +fun eral +ti les +a un +k ru +relation ships +ðŁĴ ¯ +ev ent +âĢįâĻĤ ï¸ı +kind ness +pro posed +acou stic +a es +defen der +dan ce +h tt +w at +vo y +ðŁ¤ ĺ +au s +cli ff +sear ching +beauti fully +in qu +at l +speci alist +ðŁIJ ¶ +da i +tra ils +class ics +inst ant +v ous +re venue +mar ch +kir k +fr inge +fire works +tri via +âĺ ħ +tr action +wal ter +mo to +l ily +att itude +cli mb +sc an +sav ings +c w +fa ith +cred its +ab led +gra ff +auto graph +he he +ran ch +ha d +ro gers +ðŁĮ ¹ +f in +re qu +fol k +ad ditional +lyn n +u ber +dol lars +lo gic +wor th +so m +the sis +p ound +bi c +st ur +cer am +spen cer +en tered +v amp +organi zed +âľ Ī +pp s +tr on +merce des +no ti +compet itive +do w +ous ness +vic tor +gr illed +na i +pu tin +ab ra +bl ame +alex and +anim al +dec ent +p ent +inter ior +:' ) +but ler +bal let +ðŁĴ Ķ +albu ms +down s +la d +si r +pla in +p ers +blon de +dis c +paki stan +se ment +ga a +w age +ch as +man i +co ps +terr it +lo l +lau ghter +ri vers +magnific ent +lam p +w b +new sle +char ts +ble ssing +p unch +lon gest +fl oral +cu tie +fare well +sto pping +mb b +bu d +chee se +de cla +si m +mc donald +de ter +you th +t ch +fre der +kin dle +fer n +at or +as leep +p ond +spr int +p ounds +la zy +gh e +fundra ising +dead ly +gran de +dou g +he y +lin da +consi dering +i um +gol den +vi k +auth ors +di ss +u ally +appropri ate +mor ning +y le +hon oring +foli o +be c +re bec +fin land +formu la +corn wall +sh ay +cau sing +bl end +sig nal +t ent +kash mir +nation als +har mony +sc out +acce ssi +he ight +medi eval +impro vement +ke es +prac tical +car d +de par +hu n +om ing +cal gary +ste l +bu bble +gur u +ma h +unex pe +n h +ed a +me at +i ge +si o +god dess +in ches +tun es +br itt +sti on +ra j +âĻ « +mer cy +ðŁĴ ĺ +sen ds +i est +pol ici +val e +reduc ed +as ap +vi jay +defen sive +celebr ations +ri ders +med itation +har mon +g ing + ¡ +program ming +in au +sud den +m h +replac ement +sk u +j ar +gra des +ta st +k itt +brand ing +k aw +boo t +f ought +p ays +g f +iz ation +ho p +k k +activi st +v end +coast al +cha os +ðŁĶ ´ +se me +bill board +li fting +cu mb +sc al +ðŁĸ ¤ +stru ck +l v +indie dev +beat en +jun gle +al right +destin y +m ing +k c +ch ances +om an +q atar +cra f +tra ined +pri x +char m +o tive +s mu +e c +and ers +hand ed +al ban +certain ly +arri ving +i ze +sa i +tr ack +pain ter +hu mble +appo intment +head line +manag ing +mo d +as pe +andre a +à ¤ +ethi op +un ited +exi st +bal i +k ad +n t +d red +re x +recogni ze +tam pa +be ers +ati a +he els +no te +transport ation +tur tle +re de +hipho p +sp icy +sp urs +⬠ĩ +cor p +ther n +to ast +hur ry +proper ties +ma ge +mar co +ele ments +bou ti +syn drome +ms g +develop er +gra ders +he im +re sil +off ices +del ay +di men +vin tag +barbar a +ðŁĺ ± +vene zu +cu lar +fac ed +bar n +ðŁĺ Ĩ +survi vor +wor m +confu sed +passion ate +Ø ± +identi fy +electr icity +sou ls +brad ley +repor tedly +lun ch +shel f +eli a +swee t +smoo th +emplo yment +am el +manhatt an +ste am +oun ts +ye p +li ving +un e +descri be +ca res +man ila +sha wn +ac ted +bas h +st even +re st +pet ition +div ine +wel sh +rac e +platin um +ðŁĮ ¸ +p b +extra ordinary +solidar ity +m all +on ion +schedu led +game of +fer gu +de ms +nor m +p k +tri als +polici es +publi shing +st ole +fron t +charac ter +van ia +ex ce +sti e +sc a +resi dential +sa iling +ðŁĶ¥ðŁĶ¥ ðŁĶ¥ +spons ors +th ick +champag ne +she pher +continu ing +ven ice +per th +na p +a ster +y ak +un limited +cho ices +ne o +hi v +repor ter +bru ssels +f old +dy s +se mi +la wn +it alia +wi fi +as k +em ed +fr ame +monit oring +ste ad +i da +gr in +is a +fli p +re stric +offen sive +atta ched +di sh +wh y +philli ps +gre et +p als +mix tape +v ou +fiel der +spar k +alber ta +g len +ca sh +s ri +u ri +ro dri +entreprene urs +climate change +p sy +d le +em ents +lin ked +nether lands +acci dentally +oppos ition +vel vet +ra ys +c w +om o +m f +lmfa o +newsle tter +: ) +toi let +liter ature +di sp +phili p +uni form +sudden ly +head er +cool er +-- - +prou d +bri g +nis san +scienti st +j ah +con centr +pac ks +appo inted +so ap +eng age +cho se +âĻ ¡ +se tup +jeal ous +har ry +g ation +tun nel +te mp +osc ars +dec ade +recomm ended +child ren +ab a +anxi ety +ve ments +sal on +pho too +organi z +mach ines +ab s +vil le +hy pe +ti ff +emer ging +av geek +[ # +contribu tion +bra dy +re sto +g mail +fit z +photo shoot +hel met +h t +eleg ant +ug anda +nur sing +or leans +pen n +na h +foo tage +em a +w o +w ad +concer ns +ve re +re mark +who ever +str ang +p t +qu it +sh ang +histor y +s ick +perman ent +ill ness +col d +visi on +he m +ar row +con vic +pin k +oc cup +bal d +ex hau +u of +am o +on t +ãĥ » +adop t +la id +smo ked +inter pre +ess enti +associ ated +b d +bb y +fi er +inst all +dipl om +con diti +c f +w ak +any a +gr aci +fi sher +s ss +ap r +il it +mus ician +symph ony +cor d +h ack +le gi +l v +bless ings +hum or +sc ra +e ti +min ster +trav elling +bu sh +jewell ery +li me +!! ! +pregn ant +pe e +lo b +cap ital +ip a +pen cil +la bor +duc ks +prou dly +wedd ing +dere k +m w +pe g +valent ine +an gu +re treat +pro spect +dang er +vul ner +up set +, # +sr k +x im +thur sday +n fl +kis ses +re ds +cr ack +re ward +c u +ko k +me te +aband oned +it t +me als +sp ell +stan bul +del ays +ru m +le op +gu m +no va +super man +ch ick +m is +dram atic +inno cent +r ounds +re c +auti sm +bangla desh +mor al +mo vie +sp oo +k la +âĥ £ +ou ting +mess i +ab road +loo kin +a im +q i +st ack +colla ge +à ¯ +hud son +sc an +ho e +ch au +oc cur +comm ander +ho les +ðŁİ Ħ +bi as +v on +stick er +ma k +responsi bility +colum bus +sa int +ed mon +rac ism +far ms +w en +gul f +may o +!!!! !!!! +corpor ation +ba chel +el a +inter nal +je ep +fol lows +di alogue +de rer +smart phone +he len +rich mond +equ ity +s land +b g +ne ar +av i +memph is +we ir +discu ssed +bad ge +p up +mi stake +phen omen +un ite +ðŁ Ľ +de pic +ri des +in augu +n at +sof twitter +comb ination +gosp el +âļ ¾ +ad mission +retro gaming +ðŁIJ ¾ +sch u +mb o +jun ction +al arm +à ¦ +gr ac +kh ali +k ul +m ale +cap tion +wi sh +te re +cor ps +ru bber +play station +er in +effici ent +l or +jo kes +in ary +nor man +lu is +inaugu ral +ch ed +âļ½ ï¸ı +di p +to e +str at +aa c +am u +pi er +co tt +comm and +tt en +sn oo +cu be +clo ses +class ical +s word +expre ssion +reach ing +n app +co st +affe ct +ric o +gi f +brea the +tri be +or tho +h ay +l g +fri es +n m +hi ding +richar ds +en de +mic ro +capit ol +cop y +ro m +regi me +mary land +tax i +di al +embar ra +un believ +ch t +v s +elim in +o dd +pen ny +sound track +l ings +trans ition +rema ining +a is +mali k +? !? +rand om +def end +ul tra +tru m +danc er +st ol +dri ve +a ver +ro ast +defin ition +se an +excit ement +partic ul +su rely +sh av +ber y +di shes +com m +is ol +i am +ob li +gho st +hugh es +chi efs +b as +conserv ative +speci al +fe min +sh ri +n ancy +inte l +tu ne +ðŁĩ ª +jo el +gg le +mo to +ðŁĺ Ķ +bu ck +d ag +antic ip +mont ana +gu id +fro g +ec raft +op e +dri ves +nu mer +x y +color ful +wednesday wisdom +illu min +bey on +inau gur +deep ly +pre fer +for tune +coo ked +ti ble +âĺ ķ +swe ater +it ter +tt y +u i +gi e +com plic +~ ~ +tax es +cu ps +di verse +sam anth +âłĢ âłĢ +ba king +sy mp +wa i +be half +mer cur +travel s +ðŁİī ðŁİ +or ia +eng aged +jump ing +reti red +n aked +p uni +speed way +sci ences +rehear sal +on ym +dy ou +pl ates +r ati +kri sh +jaz z +car ol +ra f +pen alty +tim eline +ru by +engine ers +ra f +bel le +do se +che on +esc ap +me g +ran k +or d +me gan +mer ch +ec lipse +âĺº ï¸ı +ple dge +kir k +per si +leice ster +sa k +w k +saf ely +yy y +je t +promis ed +j c +en ne +no ah +re no +re a +ðŁĺĤðŁĺĤ ðŁĺĤðŁĺĤ +tra il +ðŁij Ģ +f d +soo o +ri min +w k +ภ² +i al +x ox +bis cu +d ale +fan dom +particip ating +fla g +privi lege +pe ach +mach ine +bo ston +gro ss +o g +mir acle +adop tion +u ss +mon sters +be ij +clar ke +pu shing +pra ying +ar o +d n +ell is +apol lo +od ds +refuge e +to w +b p +ðŁĩ¬ðŁĩ § +h end +app eared +memb ership +pe an +du m +viol ent +v y +potat oes +aw w +greet ings +t ts +ac on +sh ane +photograph ed +cra b +temper atures +cu ba +c fc +wel com +he l +in nings +m k +co de +kno ck +gra ss +swe dish +p ta +ick y +v at +lin ing +s q +sa p +ar c +announ cing +sk ins +cit yof +br ing +co x +gam er +it arian +i da +h d +ros se +sad ly +ge o +âļ ¡ï¸ı +tag s +fa ther +chan ge +l ance +whis key +adel aide +te c +stick ers +marke t +class y +bad ass +flo rence +lin er +fro st +k ate +ac on +scand al +es sex +ðŁĺ ı +vi vi +dr ill +blo ggers +recomm end +d ha +ac res +ro ma +bu y +gro cer +er ia +ma har +ff er +patter ns +ver i +com pu +st ev +ang a +ment or +do o +it ali +cdn poli +on ly +conduc t +elec tro +de f +wh ale +prepar ation +bicy cle +vi ral +turn out +bra ss +qu ad +hospit ality +pack aging +den cy +ceme tery +abo ard +dre aming +pic ture +t all +inv ent +ad mi +o e +tem ps +qu an +fun dam +pro mp +resi dence +mu d +sour i +âĦ ¢ +graff iti +gi f +d nd +com p +s war +pe eps +pale stine +devil s +san g +assi stance +bi ke +missi ssi +inter viewed +ne phew +dru ms +v and +gentle men +n sw +inst a +leban on +ee ee +oli via +ver y +rou gh +industri es +m ation +ðŁĺ Ĵ +bar rel +n ay +po ps +moder n +ill y +are st +on ents +protec ting +v ans +e o +vi kings +restaur ants +re ck +jac kie +andre w +w illing +he ath +citiz en +disc rimin +à¹ Ī +stu art +m ys +hi p +tran sp +" ? +te x +su shi +ke d +cro ssed +dist ur +pe dia +f ate +some how +mo th +proce ssing +is s +r in +u ts +yy c +ver t +lg bt +re id +on to +arab ia +habit at += = +stre ak +simp son +addic tion +wim ble +deli vers +challeng ing +ðŁİ ¶ +fran ch +e du +s me +ai ds +hur st +th am +tari an +remem bered +palestin ian +fe es +tru m +sket ch +ur u +fit ting +jes se +ðŁĶ¥ ðŁĶ¥ +---- ---- +ba ch +ici a +colo red +da h +associ ate +int el +s eller +p u +stu ffed +ac s +b s +sh in +cooper ation +certific ate +ab u +ingredi ents +re v +in ge +el der +christi an +bun dle +th ic +dir t +beij ing +comm it +ted dy +ed u +to day +s field +w yn +confir ms +lo o +j v +ene ss +al pha +vir us +ari um +gr ind +bri dges +introduc tion +pol ls +bac ter +z ach +termin al +ra iders +fla vor +zom bie +vo d +sp reading +gameof thrones +effici ency +lat ely +ale m +twee t +cri mes +cl er +de y +dg ed +hy un +pay ments +cir cus +ðŁĺŃ ðŁĺŃ +mis souri +lu b +episo des +c age +po s +mat ching +tumb lr +lin ed +ge st +am bi +nar r +ing ton +regu l +blo wn +is le +co co +on don +joshu a +tour ing +sm a +sau sage +best friend +bo eing +desi re +sav age +ra pper +de vo +te ar +take over +cow boys +po ker +par ag +pp e +h int +we ars +se th +ro les +l anc +man ga +form at +fl yer +c ay +mo or +ba ke +spla sh +v ad +ker ala +proce eds +sil ly +reflec tion +di str +wi d +su it +ci vic +yan kees +by n +migr ation +di stin +or ch +fe mini +quali fying +tu ri +o be +hun dred +cra p +wan g +mathe mat +bu re +expo sure +fergu son +seme ster +re serv +pl ym +a hu +fac ial +wa x +wor ried +ca b +vi o +as a +co d +to pics +p cs +hal o +rescu ed +horiz on +ar k +âļ ª +hol ly +el f +ul ti +pu p +quali fied +attend ance +ati vely +destro y +y c +for th +photoo ftheday +c ents +ic eland +meas ures +de sk +port folio +artic les +direc tors +dat ab +e w +creep y +oun ding +hon oured +mi st +j it +men tioned +port able +iti c +d ann +friday feeling +am id +ti ger +scri p +helicop ter +hard ware +expl or +work place +austri a +beat les +ber nar +spi der +disc o +cul t +lim its +shor tly +fin al +nin ja +lu ke +le bron +wal mart +o il +van illa +shi re +ye g +ak y +c s +bl er +collec ted +t g +rol led +speci als +b ff +pier re +sh im +vi er +flash back +restor ation +individu als +pro d +fre aking +tu rer +o a +re fre +mor oc +gre et +re yn +care ful +our ing +u sh +is d +g ill +vie w +thunder storm +b led +pic nic +guar di +pi g +ar k +syl vania +bann ed +u cl +vi jay +ori um +av engers +believ es +eu r +monu ment +concer ned +la bs +ber g +a ap +vi sh +sing les +can cel +z el +ar ab +ru th +too th +ar ta +sh af +chair s +r ack +dise ases +crow d +cl y +fle x +christ ma +artif icial +tom at +fin e +dra ws +advoc ate +fran ce +Ù Ĭ +ðŁĺ ³ +heav y +s our +compre hen +no ble +aa p +hin du +cor al +g ars +ow en +n l +st all +yel low +mar ina +in ver +suppor t +tou gh +promis es +pi e +master piece +sco re +for ce +mor tg +crypto currency +o x +r ors +rock in +pro vin +ho g +no stal +oak land +pat rick +inclu sion +tra ffic +ah med +a ha +lux ury +con secu +de mon +âĸ º +b lowing +st ag +: " +encoura ge +ben e +sku ll +do dge +bu ster +kin son +wit ne +er ror +lo west +fel low +à ° +sh re +bl ur +vir gin +compos er +sli p +mor nings +ga ins +tab le +gra in +ari st +braz ilian +w we +tu es +ribb on +an ag +di st +sac rif +em brace +entreprene ur +af fili +de o +t ali +touri st +fat al +ì Ĭ +autom atic +ðŁĩ µ +we ak +wel fare +confir m +benjam in +fi ghts +alleg ed +me ad +strugg ling +pro secu +che f +à ¨ +propos al +er n +ðŁĺ Ħ +dy k +on gs +hon g +m ack +mel on +on ent +ru sh +d ap +tol er +pro pag +c ze +trans lation +wal let +cott age +sa il +constitu tion +ðŁĴ Ģ +mun ici +fav or +storm hour +i h +ðŁĺ Į +approach ing +pin ned +j ed +niger ian +n ach +sh at +particul arly +mc don +camer as +anni e +admini str +he at +electr ical +char ming +gib son +bouti que +ex posed +ac tor +pil low +beach es +genu ine +margare t +ben nett +lou isi +pos itions +el y +shin y +ten tion +architec t +ren tal +ac qui +goo gle +sub way +mom ent +ðŁļ ¨ +ri m +metho ds +cy cli +nor folk +Ù Ī +over whel +ra pid +we ar +happy birthday +progre ssive +ðŁĴ ¥ +co gn +pap a +f ool +philosoph y +pol ar +jim my +wi g +ðŁĴ ĭ +oper ating +reduc tion +ph i +fla gs +to the +o di +a res +k oo +k ang +ar kansas +ash ton +wimble don +sci fi +attrac tive +mississi ppi +logi sts +ral ph +la bel +gradu ates +ma ha +home town +âľĮ ï¸ı +foun ded +on the +li z +trans l +mini mum +pre sti +ta m +gener ations +re bel +journ alists +par am +mc m +acry lic +death s +tes la +w t +bry ant +jer us +i stanbul +muham mad +ri ley +k ris +work shops +is o +coun ts +stre t +prote cted +trin ity +man ual +r hin +r il +pleas ant +le mon +ner d +har der +dar ren +bur y +ra h +bas is +mi gu +occa sion +li sts +âĿ¤ï¸ıâĿ¤ï¸ı âĿ¤ï¸ı +e b +de cre +hamp ton +ìĿ ´ +tra vis +trans form +puer to +nh l +av oc +tri ps +unexpe cted +ve t +di dyou +bar ber +st ages +m son +re presented +for t +l al +pp le +nic ely +ignor e +qu il +qu inn +h k +carri er +remin ded +am ong +pass enger +el len +gue z +sc ape +mu ral +youn gest +ma sh +d ill +rout ine +stain less +jack son +gand hi +th al +on ers +edit orial +convers ations +sd ale +autom ation +i ke +า ภ+ðŁĩ ª +hau l +la ying +men tions +am en +abor tion +i bi +coun ties +ca therine +man ds +jam e +roll er +au t +n am +o logical +cep tion +ran king +tox ic +sn acks +victor ian +bang kok +psycho logy +re g +ang ela +respon d +sty le +sophi e +dak ota +achiev ed +mar ked +imper ial +in as +glo ves +sli m +confi dent +att acked +gg er +lon ely +valentine sday +re b +craft beer +orig in +zim bab +ce iling +te ens +other wise +w b +f ers +day sof +advis or +y ah +âĻ ª +en der +republic ans +av a +skir t +pi pel +chi e +jan e +ja x +ðŁĺ ĭ +âľ Ĭ +j ays +bre tt +bal o +cru cial +d har +as is +de au +lloy d +chat ting +âĿĦ ï¸ı +rel ay +remark able +n s +we t +bris bane +ðŁĶ ´ +tion ally +f k +la yer +house hold +consecu tive +es is +pend ant +st ir +crit ic +su gar +photo shop +pa res +arti stic +do dgers +c un +cra fted +am end +bo at +âŃIJ ï¸ı +egyp tian +sa w +tra ge +small er +ox y +pa ired +nex t +i res +tac o +o y +u c +st i +a erial +: // +dr o +dot com +gg ins +r pg +ay e +le an +stri ker +lo bby +prote sts +pri ority +congre ss +am ate +inv it +r ington +mom my +th us +allow ing +pione er +enfor cement +g ori +tal k +dra g +du mb +bul let +san ge +er y +tar gets +ðŁĩ ¦ +he ather +consi der +seaf ood +ve st +ris ks +% . +p g +sac red +he ating +kick ed +tto t +. - +chan di +co ven +po ol +pul se +i a +ro ster +shakespe are +es a +car go +pean ut +tro op +ac tion +tab let +home work +cast le +stru ction +mus icians +free zing +bu tt +justin bieber +j j +bah rain +an them +au dit +didyou know +na vig +guid ance +âĸ ¶ +tur f +n un +fic ations +ye men +char ging +x c +bron cos +su bur +p ale +bor ing +among st +for the +em per +om fg +p j +expe cting +ðŁĴ « +st l +ad min +expect ations +sw an +shoo t +oooo o +min ent +ãĢ IJ +wall ace +stan g +satur day +adop ted +dou bles +hom ie +ome z +d han +vent ure +surroun ding +fi le +mob ility +de es +w ski +broo ke +emb ro +re members +kar a +test im +bo tan +m tv +sacrif ice +jerus alem +d l + ´ +proper ly +ili on +as i +leg it +co pe +m cla +recy cling +lar ger +ðŁĴ ĵ +pat ric +gener ous +ja red +p f +mol ly +thom as +ju dges +h b +sor ts +bl vd +o ven +enter ing +plan es +be et +integr ation +boo ked +fre ed +ver n +ash es +to pped +de pot +welcom ed +ren a +m ick +d and +see ks +gam er +ran kings +ren e +mu t +whis ky +fire fighters +gu es +ga ther +tour ney +de men +y ang +new ton +autom otive +back yard +deta iled +mi st +to bac +fi ber +un usual +grat itude +sp are +ne ys +: * +per i +flo ating +fin alist +don ating +dre ss +bro ad +be the +econom ics +tai wan +ed wards +plu g +pra iri +val en +bab a +f ad +an as +har per +dis order +app lied +p att +bi kin +li ver +cu ri +carol ine +ann er +juli an +wal king +mal col +screen shot +co ding +skin care +activi sts +myster ious +ex act +blo cking +mercur y +bat ter +du mp +âľ Į +en se +li sh +ridic ulous +prote sters +ðŁĻ Ī +lu st +swe at +as s +ali ke +co dy +re ments +win ds +as pir +vi enna +pra y +.. .@ +bo i +cand le +assi sts +te e +der son +p ony +f ence +con spir +âĺħ âĺħ +oo th +e pic +ba rely +a unt +b am +diamon ds +end less +scre ens +can cer +gr o +p st +pro spec +mo sque +help ful +ou ri +bro ther +gu jar +cri sti +ine z +to wers +ad dresses +gra y +bur ton +re tweeted +ðŁ¤ Ķ +n ity +du ck +super vis +jo an +kin der +sanc tu +pi ed +âı ° +ł ï¸ı +m ati +reven ge +ce ster +eli fe +desig ners +back ed +bo li +wei ght +cou ch +su res +s its +shri mp +la gos +auth orities +os ity +hol ly +compu ting +fac tors +ab e +pan els +ram ad +sent ence +missi on +hol m +r b +d ads +shang hai +mon ey +she ets +sk ate +thre w +cup cakes +infin ite +l is +practic ing +ess ay +ka i +as ci +mo b +u gh +hol mes +re gg +ik h +mo ck +collec tions +pe p +o va +sal t +nan dez +co y +thre ats +tex ts +cin nam +pregn ancy +pen ding +stam p +flow er +g is +agre ed +pay ne +ro ver +ph ra +sof t +f fin +fa thers +pass engers +aw ays +al a +h es +li van +in s +samu el +ingu i +h of +j j +chen nai +cat al +om ic +he ath +ni ece +pump ed +integr ated +are l +no m +produc tivity +wan ting +vis a +di ana +tw il +it v +cam ps +ro wing +d ley +black and +gu ards +b ells +re verse +vi be +ric ky +mo ss +ny t +âĺ Ģï¸ı +el le +tro y +cu dd +ev an +women s +fo to +mi stakes +wick ed +mi l +c led +me mes +co smo +schol ar +ren o +ðŁĺ Ģ +v ents +# â̦ +terrori sts +ca sey +cardin als +ðŁĺĬ ðŁĺĬ +venezu ela +bol a +liter acy +t w +en o +con tains +au stin +fin anci +ev an +har vard +origin ally +chev ro +her ald +nott ingham +manag ers +âŀ ¡ +accep ting +wal sh +tutor ial +entrepreneur ship +yach t +requi rements +glen n +pe de +unfortun ately +ach ing +dais y +gi an +night mare +âĿ Ĺ +r ina +b art +ema ils +oppo site +who m +sa ke +pu zzle +da shi +par ty +blan ket +bus es +lo re +beau ty +reas on +pun jab +winds or +func tional +exi sting +hel lo +gli mp +con vin +la k +scre aming +rebec ca +bli ss +north west +infin ity +cosme tics +pul ling +coffe e +pl ing +op ho +colom bia +interior design +( + +emo tions +sa c +sun glasses +sav es +d f +six th +al y +ðŁĺ » +de en +dev ast +polit icians +lac rosse +g u +pe i +jav a +comb ine +coal ition +er ts +survi v +ch ad +stri an +n n +de vi +coun c +concer n +contro ller +bre ast +j ury +tu m +introduc es +la di +mobi le +al z +ste ady +nur ses +h acking +on line +oce an +ðŁİ Ħ +a am +ju ven +ic c +louisi ana +ar te +street art +is on +wn s +fr m +p anda +no ir +main tain +del ay +symp toms +thor n +ge ome +ter n +carri ed +p ru +pan or +as sy +per u +clou d +sp ra +pe di +e ste +tag ged +ðŁĺ Ŀ +shado ws +naz i +ا٠Ħ +cor ri +âĻ¥ âĻ¥ +j ad +ðŁĩ « +form al +spo ken +ðŁĮ ŀ +enjo y +lo pez +out look +in ho +w ander +Ù ħ +ma ya +pe e +d ine +ãĢ ij +brief ing +suppor ter +ar ily +ght ers +natur ally +doctor who +j en +v ar +new year +re se +si mm +re x +con sequ +tomat oes +bur st +bra vo +bur gers +cr acking +nor theast +bi om +mush room +mar que +dou ble +ni er +v ag +tw enty +key board +win ni +jama ica +par ish +: - +mental health +ali zing +ren der +wa king +ðŁİ Ĥ +g ly +na than +wa shing +mel issa +jun g +loy al +chil i +song writer +guit arist +bo wie +neighb ors +onym ous +as set +ta i +head quarters +ðŁĮ Ī +i hear +ci gare +sur g +) " +re pl +dar ling +ðŁĻ Ħ +z ak +sa re +ãħ ĭ +mic key +ware house +mass age +ine es +did nt +i w +hur ts +eng aging +mag ic +women in +k itten +mor s +c art +tit ans +colle ague +compe ting +er an +k hal +mar ble +dem and +del ight +et ary +bli zz +lou ise +m ls +fini shes +experim ent +conduc ted +electr onics +itt ers +car ing +wh ats +sym bol +jun g +e cu +pi x +con text +char ger +ðŁĺ ĩ +re ig +fra g +ë ĭ +ch ad +tru e +ker ry +def ending +a int +au ton +check out +bar nes +less ly +d t +m me +clou dy +second ary +are z +_ : +app a +const ant +" ) +ve ts +jo b +i ent +ðŁĺŃðŁĺŃ ðŁĺŃ +m j +fren ch +di ver +davi es +hh hh +e book +๠ī +mar iti +bree ze +susp ended +mat o +vi et +ra hu +se i +bol t +en ary +le is +kar l +fr amed +expla ining +ab c +de aling +nat o +ja ke +exp and +leon ard +establi shed +du b +ar men +el led +voc al +nichol as +ori ent +k yo +illustr ated +ah h +danc ers +milli on +ge ta +po pp +as u +mur dered +gi ble +sto ked +gri ffin +maxi mum +adri an +en counter +ther o +david son +ðŁį » +holi day +ev o +asse ts +car son +memor able +âļ ½ +ob am +represent ative +cb d +tr icks +vo gue +vo ice +mm mm +sebasti an +cli f +ath y +par alle +ðŁ¤ · +pa k +ev acu +e ats +ا Ø +tou ched +organ ised +spir its +can ad +gui ded +frame work +ðŁĮ Ł +pe d +natur al +ag ar +replac ed +anch or +ti t +sha h +organ is +super ior +r n +ch ro +eric a +st ill +cor on +chu ck +loc ks +or gan +ro sen +sc am +ben ed +/ # +ke en +tre vor +vamp ire +sor ted +! ' +af ford +in tro +gr ace +ðŁĺ ľ +sau r +kick starter +influ en +v u +y up +po c +ðŁİ ¥ +a ar +s ang +tre k +et sy +tb h +scre am +chevro let +pix el +shepher d +an or +gabri el +tw ood +sd cc +me ters +develop ers +clo sure +v w +twit ch +ì Ĺ +se oul +pr ice +ho g +n ish +hill ary +scrat ch +in cen +wag on +dis ability +pan ther +ch ats +g d +wit z +sus sex +l ate +den mark +ger ald +cancel led +net te +i x +nav al +bap tist +te t +y ad +ma th +ho y +r andy +po int +intel lec +fru its +w ool +gu in +pr on +the ft +con dem +mar ry +n ola +architec ts +cin cin +roc kets +gentle man +ex plan +t ate +do e +ra ises +wild life +w l +insi der +blan c +w p +for sale +ny c +po well +unbeliev able +pen s +goo dies +mu stang +p ens +st ays +squ ash +xox o +near by +ever ton +co co +le agu +k han +stu d +south west +con struc +s worth +cro atia +le a +su ms +aim s +e an +van ess +iti ous +pa thy +arc ade +b end +sugge sts +sac ram +roy als +ri er +em ir +in cl +an k +clar k +ri ght +vac c +ठ¾ +tan e +li b +u sc +sal es +hu h +s ally +ver a +p ga +gro ws +dru m +tre e +eth ics +sug gest +is ab +se aled +pre viously +anim ated +ab du +ri ses +glo b +pre dat +scar f +del ic +om ar +ll i +sx sw +py thon +ne bra +fun k +reflec t +pav ilion +tic ally +ch asing +bak ery +inva sion +ko h +believ ed +co hen +con qu +cra fts +nat i +cle ver +govern ance +sam ples +fa ils +â Ķ +ti mo +r itu +stri king +inclu sive +sho cking +can t +requi res +dra wings +à¸ Ń +purch ased +du m +z ach +war ner +con sole +man sion +foun tain +circu m +e sh +is land +mil k +pro fits +hali fax +ri val +âľĪ ï¸ı +jen ny +sand ra +ny e +k elly +y al +qu ad +no s +inste in +fin alists +mid fielder +cu e +excep tional +a an +sa pp +gett in +sa a +f ati +sl ice +vol k +s wal +la sting +sum mary +it as +sm o +s z +âĺ Ĩ +ip l +fl ames +ene ws +ha v +hoo die +pitch er +win dy +re vol +centr al +ton ite +ðŁİī ðŁİī +sol ved +mil wau +organiz ations +wee ts +re fin +s th +ãĥ ¼ +el in +ton a +cinnam on +ðŁİ ¨ +ðŁİ ģ +ron aldo +pen insu +ome ga +el ds +desig ning +e igh +blu et +ben z +nu g +ash a +robo ts +su dan +choo sing +en do +ser ge +clo sely +hand y +fing er +be ing +ar te +survi ved +fl ame +mile stone +gu t +d war +fu tures +é e +el o +fri dge +eli c +ou ch +u b +p v +tit an +col lar +st ation +nev ada +aur ora +r d +dun can +âģ ł +bri en +mar sh +Ð ¾ +to tal +ch ry +s ers +su ffe +ra chel +colle ge +to days +cour ts +ch it +re united +gym na +gen esis +be side +re presentation +ch ant +collec tor +ra k +ath ens +ni gh +mun ich +langu ages +fl u +particip ation +__ _ +c v +spec trum +so da +co ver +refe ren +ab bo +ap a +public ation +ed m +mon ica +ar my +ðŁļ Ģ +div or +dr y +stre ams +robo tics +ci der +bull ying +appro val +sto ke +plat forms +sier ra +ex tin +i b +ha yes +succe ed +suff er +at ically +da i +lyn ch +h ound +del ines +ack now +d ated +exclu sively +he res +fac ilit +dam aged +char ter +la kers +fal con +unve iled +wel ove +e ase +pati ence +l one +gent le +gene tic +produc ing +g our +shann on +bil ities +zimbab we +p int +dau ghters +liter ary +bel le +cl am +surroun ded +k any +ne il +pir ate +rang er +hb d +nat alie +bel ong +olym pi +emb assy +sc ol +en er +ak in +lo ren +b h +: / +di va +den im +hi pp +ðŁĩµ ðŁĩ +arn old +? ' +we ren +em power +dis abled +man or +rasp berry +b af +aw ful +dru mmer +kar dashi +n ash +machine learning +ch u +rebel s +tim ing +mon roe +ton gue +ran ge +pup ils +re ss +amaz on +b z +har ley +pal mer +ballo on +s ings +ic ec +j b +c ers +g ps +whi st +ri se +l t +oo oo +c attle +shoo ter +vod ka +uc l +mt g +le sli +jon as +di spo +at ric +ste in +vintag e +fir ms +flo yd +cow boy +soo oo +is aac +war craft +disney land +beauti ful +be am +franch ise +bu n +k ag +an on +tur bo +swee p +made in +kar achi +dete ctive +penn sylvania +contro versi +vitam in +a side +chron ic +descri bes +remo val +ha h +ap er +ten ed +u to +bad ly +mir ac +f ry +ye a +in jec +ther mal +comp act +th or +te ed +ur gent +l ite +g illi +sop hom +ic o +che m +p m +for k +fre ak +ch ak +recipi ent +i y +ni k +model ing +c ans +ðŁı Ģ +del ux +se am +surviv ors +rad ical +investig ating +reli able +f m +tur t +ligh thouse +to ol +go wn +) ) +bo ts +auto graph +a id +bu ffe +h mm +horri ble +ssi onal +ann i +à¹ Ģ +k its +sch i +eter nal +hu ss +sens itive +r u +tast es +chec ks +im o +por tion +sk ate +e den +half time +fri ed +ri hanna +ti se +fl ick +ca in +s gt +âľ Ķ +sh au +sta ined +ra ffle +dro ve +sal man +princi ples +sh o +ar u +je ss +gu ine +gar bage +my an +jel ly +dis ru +z ia +q ld +ent ries +la v +fle w +ad mit +objec ts +comp are +ny times +cann es +p n +suff ol +ro c +d ana +e gg +hi st +coun sel +' ! +phy si +imag ination +ad just +explo sion +plym outh +hor ror +elli ott +bour ne +de x +bre ed +au dio +lob ster +disappo inted +nation wide +( ( +incre ases +austr ali +ce dar +star ing +rac ial +e is +g mt +visi ons +stay ed +discu ssions +de an +cur tis +mai den +stel lar +happ iest +h wy +pre season +car av +mon days +hospit als +glimp se +schol ars +ja i +ter race +ann a +goo se +gra ded +lot us +hun g +grocer y +stam ps +emper or +sc oop +in ser +c as +exist ence +he al +fal cons +mar vel +reduc ing +terri fic +magne tic +perfor ms +bar re +p us +tre ating +ic on +w h +decla red +tra uma +do d +come dian +nik on +bu gs +as m +mont gom +ibi za +comprehen sive +ha s +san ti +fellow ship +da sh +p sal +louis ville +sp y +fau lt +d the +fi led +vi sta +de sc +fe ars +you tu +sp s +es p +ri g +cri me +ber ger +wonder land +k ent +in formed +stev ens +my th +ast on +ir i +visit or +at ri +produc ers +al la +person ally +separ ate +agen cies +af ri +il an +spo ke +n ina +squ ad +di ves +de pend +li v +fier ce +enter taining +cha in +sc at +bor ders +pal ette +sp ro +os is +der by +tobac co +zi o +willi e +ju vent +zoo m +hol y +enti rely +af e +mart inez +be ds +pe a +bull dogs +ðŁĩª ðŁĩ +ib m +ne on +ethiop ia +team mates +plan ting +tw er +any time +for bes +ó n +run way +ner vous +ro ger +p ile +ch anc +apo caly +u w +o i +dr ought +territ ory +br ick +cre atures +go in +w aff +gre n +sou theast +je an +am bul +ed ited +stra p +c v +aar on +ãĥ» ãĥ» +t su +descri ption +kin dly +clu tch +im mer +en or +women sday +or ange +ra g +ob vious +hy der +chann els +man go +me yer +ra ining +ge tty +pil gri +coordin ator +up load +ninten do +don uts +san chez +app arel +j r +zz i +, @ +jeff erson +accessi ble +great ly +e id +initi al +budd ha +par is +ma scot +â¬ĩ ï¸ı +sch war +si ri +sp inning +mortg age +e cho +end ange +ge dly +chlo e +enh ance +kar nat +k ry +explo res +ðŁĴ ģ +af fair +ic als +all a +dar t +dolph ins +diffe rences +squir rel +au gh +dr ones +ell en +re store +pa w +un for +pi ke +hil ton +colla b +consu mers +co inci +out comes +pp p +a q +coup on +li est +si ms +k ho +av es +spo on +pu dding +cor byn +hat ers +ex ams +sla ve +. ! +p sa +app les +tam il +se d +co ke +zz o +lo sange +car bon +cla ir +... ) +k hu +cra ig +explor ation +sanctu ary +su e +al way +demen tia +won ders +super hero +pakistan i +brown s +bluet ooth +lo cker +mar c +ev entu +delux e +rodri guez +âĿ¤ âĿ¤ +ro bb +ðŁĴ ¦ +lin ux +ten s +intellig ent +se ed +vo ter +s ler +pe aks +inter n +teen age +peninsu la +hand ling +ti e +cou sins +wen dy +me e +à¹Ģ ภ+din o +ðŁĴ ° +ðŁĺ ĥ +ze e +s bury +trage dy +b k +bo re +z in +war ns +idi ot +tou ching +contin ental +tac os +saf ari +wa shed +po dium +morri son +fore sts +c bc +al on +partic ular +be ads +inv ented +lo ch +li ghter +where ver +i de +docu ments +a we +k r +no where +min er +st it +ro x +contribu te +har dy +cl an +ob ject +ca it +ðŁĴķ ðŁĴķ +happ ier +vege tables +t art +g ag +nom inee +heav ily +pan ic +j d +there sa +at m +u ph +s fc +su ri +drin k +n al +re vel +k l +avoc ado +nom ination +ma donna +shar on +malcol m +control led +sh ers +revi val +legis lation +shoo ts +n in +comm entary +pro s +human rights +str anger +mit ch +pipel ine +leg ally +th u +gil bert +tol l +gran ted +gh s +ir anian +refre shing +du k +ab i +pri me +jose ph +mo sa +stati stics +produc tions +mer ry +pat el +sa x +human itarian +struc tures +e missions +town s +fre el +ster ing +rat ings +alle gedly +cab in +st l +w ade +fl yers +tri m +promis ing +z u +bal lot +compar ison +free ze +ou ter +great ness +as sign +snow y +r ale +tor ies +med iter +kno ck +consult ant +cincin nati +analy st +sc oo +je ws +appro xim +pu re +portra its +cy rus +ation al +lo ans +acqu is +el u +accep table +uni on +water color +ru st +batt les +per fu +seas onal +ser ial +mind set +ri ot +fel d +enni al +clo set +pri est +tan ks +int l +scre w +bu m +ab dul +ou x +expla ined +ric a +imag ing +law yers +bu ried +ãĥ»ãĥ» ãĥ» +ear l +âĢ ķ +l ton +resto red +stri pes +fo ss +de mands +ste aling +alex is +mun d +ak er +ur us +war dro +hu gs +gen re +e go +Ù Ħ +particip ated +bab es +ban quet +ti ous +he mi +ds b +lo st +milwau kee +jen ner +ge m +ou tra +lo ses +id i +re ps +ðŁİ § +regu lation +fla w +f ang +vibr ant +ram p +ra ins +well being +so viet +vie wers +de po +libr aries +bi go +ser y +g ill +de struction +co z +c x +bri dal +al ds +plan ted +amate ur +lu d +che ering +show cas +pro file +i u +ver tical +pack ers +wiz ard +ski p +s light +be au +air ways +mu ch +re ra +ðŁĮ Ĭ +ab sor +pati o +pack ages +s ells +ment ally +ðŁĺ ¢ +reyn olds +k are +tri bun +wal t +kn it +ta ste +sur rey +boun ce +cre ature +b are +bet ting +su re +mi ley +laugh s +al ore +cy n +t l +arti st +ann ah +war mer +dynam ics +lunch time +mariti me +vulner able +ðŁĴ ĥ +wol ver +dur ham +const antly +am in +si bl +: @ +bul let +k ach +angel o +wil der +doo m +desk top +law suit +k ca +hen derson +inv iting +bet ty +ta wards +ra fa +le aked +and i +ge ms +af l +vel o +mediter ran +pro be +to tten +steph anie +sn ation +com be +q s +over come +assas sin +ra v +fil ip +winni peg +sh il +determin ed +k as +ou tre +regre t +gui des +aa a +ðŁĺ Ī +wi ves +mani fe +er ly +sm y +sh ima +x ing +pix el +jac ob +ac commod +to y +on o +po o +ti er +an swe +ðŁĴ ģ +ro sa +le ase +bel ongs +th ar +eventu ally +nei ther +go a +ski ing +at ra +ag h +broad casting +f ury +py ram +d ice +volk swag +wom ens +provi der +bom bs +miss ile +whi p +d ick +nor we +back up +el der +mat ure +concer ts +gi ous +sque e +good morning +bra ves +^ _ +au ssie +lun a +mal es +he ck +for tn +rome o +steel ers +p n +pe er +re presents + « +kat y +migu el +requ ire +cha ins +l ur +immedi ate +ti mber +âĸ¶ ï¸ı +advoc acy +ex port +an z +tiff any +auth or +ðŁİ Ī +du des +chil ly +hi d +har m +bu g +mon ster +terri er +tu c +story telling +ta k +in ti +immigr ants +b is +reach es +com passion +john ny +contribu tions +ðŁIJ ¶ +mechan ical +impre ssion +ran ks +ko be +men ting +bloss om +pab lo +buil der +bom bing +tw el +sul livan +om o +pe te +de mi +ku dos +w bb +t gif +mass ach +neighb or +che fs +eng ines +pun e +ga ined +phan tom +s days +ext end +gr an +cent ers +jac qu +dat asci +sleep y +el vis +answe red +s lot +con y +flexi ble +ti ally +le tics +% , +andre ws +si ble +mom ma +vin o +do x +invit ational +twil ight +j ade +ill ery +joh ns +f ou +p v +-- -> +break down +billi on +prin ter +mon d +c bc +mag gie +legi on +du b +kur t +po or +paren ting +regi ons +bikin i +be ware +si onal +au burn +kid ding +amp les +sp an +con tempor +c ic +ha bits +ak o +pre fe +bud dies +it z +em ily +person nel +moun tain +ver sus +ðŁĺ ¬ +ear ning +s ink +dar i +u u +s win +i ster +bru tal +n ac +kat a +clo th +am and +ðŁĶ Ĺ +ne o +alu min +week ends +nebra ska +co des +delay ed +brun o +pro ven +in c +i ght +fl an +or o +lam bert +regu lat +w f +massach use +kardashi an +bern ard +fi esta +volcan o +grand pa +anc a +d re +st itu +mean ing +fo am +au ck +at ed +r l +hot el +pers ons +dy nasty +ell or +ma i +am ne +sty ling +avi er +e g +vege tarian +, â̦ +foun ders +sta in +g d +cy cles +sky line +trac tor +exi sts +tra l +kid ney +mar il +inst ag +se tte +addic t +tri angle +flash back +controversi al +z on +p ins +i as +tr ay +town ship +deleg ates +sp am +h ms +cr ane +peop les +o lo +fac tion +but es +on ica +deleg ation +new profile +eli er +mc a +w and +g ely +losange les +ber ke +ti ve +dis rup +zz a +cas a +jor dan +ford shire +ga thered +ic hi +atten dees +à¸Ń ภ+pe ppers +co in +bour bon +ern ity +ro tary +behavi our +jere my +team work +compli ance +tre mend +ðŁĩ § +bu hari +cam bo +bu yers +ha gen +bu ds +bay ern +mon te +sm ells +an za +ath lon +descri bed +work force +gi ving +ap i +invest ments +da il +sel ena +datab ase +th um +mor tal +stu dent +bu yer +do ver +gar ten +att le +loy alty +gen oci +holo cau +theat ers +ru ling +ven us +pat ent +ch un +ab by +awa ke +mass acre +bang alore +break ing +simm ons +ju sti +hal e +ed chat +gg les +haw k +mar king +head lines +stro m +co ve +breath taking +med als +hair cut +christ ine +tele graph +gujar at +ju ra +can e +sho re +propag anda +mu eller +.... .... +sa vi +stom ach +thro ws +ta b +war m +j ong +reno wned +hi r +ra is +mush rooms +guaran teed +bo a +m j +revolu tionary +certi fication +bru ins +jo in +w es +pas sport +c g +sex u +cap able +w v +ton es +jac kets +ac compan +spin ach +fore ver +bla ir +wat ts +g l +cou ples +prairi e +newprofile pic +logi stics +massachuse tts +jagu ar +o id +we al +under water +mo z +y i +ma ths +myan mar +pre ps +suffe red +tr ace +wal i +ah hh +bor g +st itch +cu lin +real ise +infe ction +discrimin ation +sh ame +an kle +hu mid +y t +brac ket +tru ck +tri u +ea ster +commun ity +post card +invol ving +ty ler +car amel +over view +ex amples +integr ity +base ment +instru ments +ani um +at us +gh er +laun dry +achi eve +gen eva +pr icing +hyder abad +beli ef +me ta +j aw +accoun ting +lead er +cristi ano +cou ture +cy p +vis ed +, ,, +k nu +h ick +break er +br am +ra b +mo or +ham as +gradu ating +pupp ies +ak h +ta h +ach es +ri e +op ini +g ta +re ign +tra gic +re ver +p ill +pine apple +tou ches +da re +le ys +il o +inter iors +sc outs +bar t +en zie +don o +bro ck +christi ans +ense mble + · +cine mas +new port +air line +win ston +le igh +cont ents +pre scri +ur ge +tr out +fic ally +il ia +sub si +are r +âļ¾ ï¸ı +w ounded +ðŁĻ Ĥ +pe pper +ðŁĴ ŀ +fit ted +af f +re sur +thursday thoughts +z ero +archae ology +di v +je e +i on +awa iting +co zy +beauti es +bal d +dat a +gri zz +stal k +kin ds +cle ared +jess ic +regu lar +ali ens +plac e +bo s +bi zar +thisi s +ðŁĴ Ģ +totten ham +ma fia +s lam +ari ana +car roll +back pack +care y +uni v +r g +pe p +dig it +tatt oos +ag on +volunte ering +diffe ren +consu mption +ka thr +head phones +t shirt +o b +ele ment +re tail +sh ru +al gori +contain er +consci ous +fi l +com ing +ra sh +u rope +def ine +gi or +femini st +flow ing +rout es +gl aci +fer t +somer set +ant es +twee ps +$ $ +h our +endange red +year sof +ro h +po pped +bac king +ba sil +bra ke +mon aco +lgbt q +pra gue +ut ility +cas si +gate way +haun ted +sch ul +ðŁİ µ +shou ld +walking dead +comple ting +dann y +montgom ery +pengu in +ss i +mer chandi +ðŁij ij +chur ch +h ates +cap tain +brea thing +ce t +fair ly +approach es +compan ion +surpri sing +kany e +pe y +hin di +targe ted +lor ds +de ut +di gging +ger man +ru t +ener gy +close st +y un +apo logi +ภ± +s ack +ru p +dd y +port al +d ough +b ats +ðŁĵ ° +at ur +graph er +pi res +mo tors +ðŁĮ ¹ +j c +dan g +tu k +clu e +us c +pag e +d less +bro ws +ju s +ad ing +re marks +oo m +car dio +ste fan +arm strong +âĢ¢ âĢ¢ +ni est +belgi an +bi op +so y +lo f +í ĥ +q t +flashback friday +ce e +ģ ภ+wre ck +mar ines +amend ment +wardro be +vo y +bur ned +guit ars +ra inf +li fel +ssi l +oun ce +exter nal +c key +me sh +she ikh +inv itation +sugge sti +pop corn +phenomen al +an onymous +tun a +chic ago +o val +del y +loc als +( & +pro f +no vel +fin der +spar ks +la ven +in fu +nic ks +qu ant +ra e +exe c +dist ingui +st ances +mu tual +sh al +unve ils +edmon ton +zan ia +a dio +vie wer +brad ford +audit orium +qu is +re act +htt p +l ero +chee ky +impac ts +ta k +ed t +desper ate +t ay +ì Ħ +sett le +bar gain +resu me +un ite +thro wn +ke st +se ys +mar ching +am it +decl ine +sch ar +me tr +stan ford +lin ke +ber ra +dol ls +rug by +jam i +b or +road trip +dino saur +mi k +sun der +re m +b k +over seas +nau ghty +imple mentation +iam srk +lun cheon +fir ing +mi ami +pere z +the e +z on +gi fted +con version +ceram ic +¡ ï¸ı +pe dro +ì Ĩ +v ick +! @ +he ed +si d +b w +docu ment +pl un +gr ants +fant asy +predic tions +vali d +car ved +gradu ated +ðŁijį ðŁı» +nation ally +ch y +af l +re sso +blan k +ri vals +j ig +e ties +om ics +une mp +b ound +sk o +inspec tion +par al +high s +cri sp +b ans +ob a +[ @ +co spla +costu mes +rec all +mou th +ni gel +b ts +ter a +ko v +do cs +west minster +dic t +gra vity +kar i +ro gue +t ted +war k +ida ho +w end +aw i +queen sland +proce sses +cli ffe +m ick +com pens +op ol +the y +cl ari +wiki pedia +salman khan +haz ard +pre ston +swee test +pd f +che es +tr ilo +south africa +bur nt +( $ +con tain +t p +sub mitted +sound cloud +at u +re z +word press +corru pt +n f +ma ker +í ķ +par as +adv ent +ri al +ca fe +fo ssil +!!!! !!! +co ws +c j +sp ur +institu tions +land mark +ent it +re ut +h is +alz heim +we mb +regg ae +mo squ +st at +identi fied +deal er +re am +re land +ten sion +ðŁĩ © +wra pping +deep er +fr at +red dit +ar is +moroc co +.. " +b low +ma pping +pri orities +ing a +swa p +re wards +conspir acy +creati ve +c j +congre ssional +vau lt +ple x +sophom ore +shad ow +ele ss +ðŁĺ ħ +dar ts +aldu b +anno ying +pro ps +n as +alumin um +h bo +offen se +j ill +oni ons +la ur +ta e +har dest +sh ro +ga ining +meas ure +ed tech +cyp rus +tar a +ang eli +car lo +go on +all i +im plic +ju pit +resil ience +ha il +bal anced +) ... +joy ce +gr a +th eli +defin ed +shi pped +main ly +min a +l m +sac ri +o ber +p im +claim ing +ent ers +co rey +bo k +cri ed +cool ing +dani elle +pharmac y +thor ough +ca ke +k lo +outre ach +z ens +digital marketing +val ent +sn p +her b +mr w +caf é +cap tures +no tre +triu mph +pan cakes +cu mber +spi ke +d ation +bi gg +sp er +crit ical +am al +too th +foun ding +a stro +' # +quan tum +th ames +un c +pri de +air bus +kno cked +un defeated +mediterran ean +cal cu +clo wn +sens or +ham mer +for give +cu shi +ber ry +maje stic +elec t +polit an +g ta +k ari +bur ke +sea hawks +volkswag en +re i +landsc apes +cas u +grand father +list ened +/ / +star trek +rainf all +fur ry +vi er +star k +rif le +ff a +leg es +hillary clinton +min us +correc tly +architec tural +pre ce +up side +box er +ðŁĻĮ ðŁı¼ +is ai +de t +pro vo +tis sue +spoo ky +ve led +re con +prospec ts +que bec +âļ « +ig no +anat omy +shap es +w p +p interest +hor e +an es +pick up +ti p +pra desh +hu gh +co e +po k +gram my +well ington +sti gate +ri gh +lea p +king ston +scen ic +go sh +v ani +au g +s ary +zi er +bure au +lin son +con te +fra gr +all an +g aw +lan a +colli sion +surve ill +ren ais +ar range +s ali +do in +br ance +bren dan +our se +in coming +suspen sion +à ´ +l la +educ ators +in tri +da e +bio graphy +bul gar +villa in +go thic +rw anda +e w +may or +meet up +democr at +mor gan +su dden +te sco +car rot +bom ber +mck in +re ne +fun day +agricul tural +haha h +show time +form ing +col a +scor pi +quo te +po ppy +s life +d az +tu b +ne n +mo t +ðŁĺ » +s ore +elder ly +o ve +skin ny +um i +anc o +man ship +we re +g v +k ah +fol ding +ne at +samanth a +dan ish +uk rain +humid ity +nu tri +jak arta +cand les +oooo oooo +at ile +streng th +i bra +bap ti +charle ston +fr ames +girl s +clear ing +glu ten +# # +super natural +ju bi +ph one +he in +dr un +le ak +invest or +y er +dom ain +ball room +mi sh +app li +off shore +bla ze +dor o +âĺķ ï¸ı +win ery +shar if +ad ore +n ir +saf er +si gh +as cri +strong ly +trac y +ck er +ol l +faith ful +ey ed +deli ghtful +vis m +karnat aka +tit an +wh ar +jer seys +re fur +heav en +gri p +pan ama +pre li +glu ten +o dd +cont ent +pon ti +tion ing +e commerce +feder ation +flaw less +ge ar +ti res +by r +pol ice +cu ban +tri butes +tic ul +chur ches +nur sery +di aries +muse ums +snapp ed +i van +wi ght +touri sts +ramad an +t rent +prophe t +won dered +focu sing +hi d +ic ons +i q +ambul ance +pi st +fun niest +time less +sr ilan +bu ys +ki ds +colour ful +a shi +ch ir +mu m +ðŁĵ ļ +let ter +x en +reut ers +pre serve +in ting +ste p +fu ji +uni ver +i u +show down +po ems +surveill ance +suspec ted +ta e +sol ving +tom b +mother sday +car pen +recru it +pil ots +bro c +mix ing +fri days +ty r +represent atives +tra pped +abdu l +free style +clu ster +âļ łï¸ı +k d +sk ill +pit t +ex o +commer ci +muse um +loc ally +g ina +no bel +immun e +fr ac +cap su +main ed +attemp ts +bull dog +be spoke +sing ers +sp elling +seg ment +nat ures +tic k +lip stick +clean er +gett able +preci sion +â̼ ï¸ı +th ood +re ef +no pe +bill y +di gi +mu si +ri val +figu red +tal ity +sun ny +ber k +aw ww +awa its +un real +co pen +asy lum +ex otic +bu en +mo ck +en able +arch y +fr a +pla stic +al mond +amp li +displa ys +abbo tt +s me +x p +ðŁĻ ĥ +graph ic +i ved +mar a +cau tion +lea ks +en berg +ul u +unic orn +cann on +appren tic +ðŁĺĺ ðŁĺĺ +b ball +wil low +at ics +am as +manufac turer +campaig ns +port ers +flo ors +l su +ty pe +ke j +honor ary +it im +to le +min ecraft +d x +ma sh +ri o +consequ ences +ron ald +go ssi +suffol k +mu se +r bi +live music +i van +ðŁİ ¤ +le u +patri ot +man it +lan ca +home decor +de ar +sig ma +ti de +str ings +v ita +sequ el +try na +inve stigate +bor is +ve gan +barri er +mind fulness +web b +hu stle +in da +tan zania +str ay +tex as +c ag +diagno sis +wom an +g w +ob session +l ative +nu fc +fl ynn +moment um +sof a +wal d +vege table +tu cker +supp er +se ab +ar ro +se ag +ven ting +counc ill +sp lat +cal cul +.. # +com fy +odi sha +sto pp +war fare +ca es +à ¨ +co y +price less +in sec +ðŁĺ Ľ +contro ls +empower ment +datasci ence +per pe +gen ic +e res +tru deau +man o +sla very +expand ing +ma he +fa iling +s aga +photograph s +cre st +re on +surf ing +hi e +ðŁį Ģ +ja e +fel lows +south ampton +sol om +ce ster +tab ility +hor n +se ct +he e +cole man +at las +explo rer +consul tation +copy right +organi zing +den ied +mon keys +noo dles +br is +fl or +dou gh +bon ds +sho cked +eco system +care fully +w m +apart ments +cur ve +san diego +must ard +comm en +cere mon +e ch +ru th +ðŁĻĮ ðŁı» +hawa i +fil med +te ar +as ingly +ca ir +wat t +instru ment +ou tta +ye ol +river side +ë ° +. : +nor wich +alo g +migr ants +new man +ri de +spr ink +targe ting +beli eve +tor ch +reflec ts +per mission +ff man +ene mies +bas ics +se ized +sun days +le i +hass an +en do +h c +st ad +le ments +kk kk +nan o +shar k +man a +on ic +treat ments +ear ly +collabor ative +shu ttle +bran ches +mis ses +mained cm +ap ers +ky le +carri e +leis ure +sh et +bir ding +adv ances +ðŁĵ Ŀ +popu lar +di ane +a be +re war +neigh bour +k pop +remem brance +play ground +ru b +krish na +e bola +inqu iry +ep a +lu min +organ isation +abra ham +norm ally +pre ten +jan et +w t +ðŁĴ İ +encoura ging +a stic +bu mp +syd ney +s z +ss ss +gar rett +ðŁĵ » +consul ting +roman ia +spo tting +chanc ellor +ar ma +presti gious +ðĿ IJ +t ad +cry st +compe tit +rati o +cat aly +bro w +j ur +vi king +commu te +y day +la yers +du mb +esc al +genoci de +f ill +gu pta +ste pping +se i +fo to +wild cats +col i +projec t +ear nings +st r +ge ons +comple tion +b m +decor ated +craw ford +af ghan +sc are +visi bility +hi b +direc tion +stro ll +christ ina +alter nate +cl are +sty list +be hold +s ance +leop ard +acqui red +narr ative +ash i +the a +?? ?? +pe as +at ch +sli des +le en +renew able +eng lish +qu ir +co aster +r x +fo ols +match day +mis m +amaz ing +z ig +ke ting +won t +to wel +di ab +sta ke +n m +mel t +e than +gra pe +polit ician +sm en +í ĺ +re o +wedd ings +cat cher +or acle +me mo +ðŁĮ ´ +ec k +rob bie +norwe gian +oper ator +am or +se wing +ju l +x ie +u v +fif ty +me ga +tatt oo +liber als +u pri +traffic king +richard son +su v +ki p +mess y +tremend ous +gl ou +cour tney +la d +stere o +my ers +i dio +^_ ^ +man ning +dy e +w d +thr one +jun k +as u +provin cial +k ook +wr c +fine art +hamp shire +renais sance +b red +fall out +s j +sn l +al am +tor ture +fy i +sh ines +pa w +ch ar +hen ry +c row +aci ous +di an +pa ige +ba re +stock holm +scen ery +ðŁĩ · +jef frey +pu sh +decor ation +ne d +cu te +brig ade +laven der +inv ites +e sports +vo ir +dri ed +tran spl +sur geon +no vels +pul ls +son y +lun ar +man e +i vy +fru str +dor set +sa i +tor res +ssi on +shut down +suggesti ons +writ ing +e o +battle field +u ga +ðŁIJ ¾ +vac u +spl ac +g it +u g +high land +% ) +mer maid +sacram ento +ta ils +p w +ka h +t ell +enh anced +ì ķ +auck land +cru el +ðŁ¤ © +au dre +sail or +gram mar +g love +de on +infl am +fresh ly +k ell +zi p +christi e +mil d +di xon +instru ctor +g ence +ãħ ł +sub jec +constitu tional +crow ds +in visible +ru ins +da k +si p +pla que +p ouring +comple x +z ine +ste ad +f let +trans mission +lo way +ar un +incre asingly +au d +transp aren +cro wned +sc oun +blizz ard +lux u +fi ers +achieve ments +hun ters +rock ed +bas in +vio let +pro ves +achiev ing +pro sper +se ga +flo at +vi an +xi v +pol ic +tur a +approxim ately +wander lust +keep ers +geta way +co d +pol is +br yan +col ts +tal ents +yo gur +gluten free +wri st +gr y +cze ch +ðŁİ Ī +ev ille +ðŁı Ī +to x +dani els +am er +bi ds +weare one +me tab +g t +boy z +pd x +pos session +pu shed +shr ine +reali stic +tri gger +na vi +ru mors +n af +jen kins +tr un +comm uni +Ã Ĺ +gam ers +arm or +moham med +bal cony +y ah +stron gest +rhy thm +unfor gettable +k p +ho bb +custo dy +greg or +r ita +aes thetic +il ation +sponsor ing +n ay +kid napp +sh s +ra jas +me g +signific antly +butt ons +la c +ver sions +essenti als +opini ons +k ro +d printing +wi dely +d k +ur an +y al +reque sted +c n +cur ric +plu m +gr un +v m +dev on +m yo +rel ation +juvent us +rou ge +min ority +min es +jupit er +n ine +oxy gen +fran kie +une sco +fab ric +disgu sting +sal man +dete ction +lan ka +d ac +ðŁĩ« ðŁĩ· +argu ment +shel ves +cel tics +rober to +pi gs +he dge +fau l +pow ering +butter flies +fi r +re make +att i +com o +emp ha +kend all +poke mon +se ating +d ans +bald win +ðŁij » +lesli e +one direction +ti mber +im an +fon t +e der +di on +ste ph +for mat +gre gory +pro p +he x +ru in +sor y +inf er +n aw +bar ak +sd gs +kar ao +lu sh +v ander +end ent +g is +a fro +soc cer +ay an +t uni +lun g +da yof +alex a +mar ath +addic ted +ag ile +hy gi +light weight +ì § +mand ela +jo ey +anc y +hu m +bi r +memor ial +jim in +ging er +v ak +jav ascri +cro ps +orig ins +d ari +pi per +im port +aggre ssive +predic tion +re pairs +cr acker +voy age +ni ke +mu mmy +linke din +country side +bor der +gla ss +per t +s als +sho e +autograph ed +wal nut +colle gi +sal ary +pa iring +ðŁĮ ¸ +cath ol +swee the +defe ats +streng then +roof top +impro vements +barri ers +ur u +t ally +ru led +ðŁĨ ļ +nai ja +emo ji +per cent +gi o +pro bs +on ce +adm its +pa ths +li ar +day tona +pe ters +cal i +cal li +mu g +o sa +ap h +ab y +hy de +eth nic +pla ins +ol f +haha hahaha +holi c +?! ?! +su bli +bl acks +mo t +gh ton +lo vin +b rent +bar u +l ati +de w +ate au +q a +pain ful +bu sters +st atic +ðŁĩ¨ðŁĩ ¦ +note book +out fits +si es +r f +floo ds +Ñ Ģ +thro at +su ici +ro vers +beng al +pre pares +blo g +mini ature +Ø ¨ +am phi +com b +r sp +in timate +green e +Ì ĩ +al tar +surg ical +ves sel +... ? +gav in +g ator +threat ened +z ar +rob bery +di er +promo ted +y g +x s +su bs +inter viewing +threat ening +do zen +me ado +water fall +nintendo switch +cal um +mini sters +dro p +univers ities +war ned +tac tics +ðŁĩ ² +refu se +ad ju +v ast +ðŁĺ ´ +mc fc +lib ya +no filter +distribu ted +re ser +ron nie +de co +javascri pt +mon k +intere sts +fle x +mar tha +sti es +oo d +ðŁ¤£ ðŁ¤£ +e un +b ali +g omez +sti mul +moder ate +d ity +ir is +stra w +consist ent +direc tions +adop t +sal sa +cro o +reco vered +black friday +lan caster +accep t +weareone exo +buil ds +free man +air plane +diti on +bel ong +jam ie +pit ching +li f +om in +cri spy +pre pping +ve g +chan g +accompli shed +graci as +dolph in +elec tor +culin ary +super bowl +wal a +pur suit +black berry +be an +cardin al +pro ved +immigr ant +stric tly +holocau st +pass age +ha us +cou p +pur se +har ass +< < +le ed +ado be +st ad +legis lat +par ked +pri yan +sil va +kri st +s the +fun ky +ig a +sett lement +ph s +t mrw +stre ssed +hun t +ho ckey +treas ures +cham bers +ol u +hu t +mar ley +tex ture +wilder ness +mm ing +poten tially +om aha +ju dy +to es +spo iler +distingui shed +feli x +ah u +recommend ations +zom bies +hit ler +tri ple +colla pse +motiv ated +ulti mat +gg ling +so y +ci gar +fo ren +vine yard +gl itter +fin dings +colon ial +hun ter +eri k +den s +beet le +lot te +sub tle +s matter +tru sted +experim ental +nam ents +ðŁĺ Ĩ +regi on +acquis ition +bre eding +quarter back +am reading +oo td +ru de +initi atives +st out +hy ung +out come +al fred +mic s +exper tise +bacter ia +pengu ins +jump er +valen cia +bar k +ing day +sell ers +contrac ts +hou ston +commissi oned +adap tation +swan sea +santi ago +common wealth +ju dging +sub mission +sco rer +tom my +ñ o +ex quis +fil ing +explan ation +alli son +wemb ley +ri dge +chev y +san tos +own ership +cogn itive +favour ites +sh ed +phil anthro +dele ted +go dd +s nor +gui delines +ff ing +je ep +cli ps +sw amp +an or +guil d +bol ton +spring field +munici pal +goal keeper +ye on +ðŁĺįðŁĺį ðŁĺįðŁĺį +ãħĭ ãħĭ +water front +gra ve +contempor ary +ar ity +ÃŃ a +sle eps +sy rup +al am +pi re +co yo +moto gp +ty son +kej ri +cir cul +sing ly +cr unch +complic ated +nostal gia +k op +mo ve +k ale +mac ro +mid west +h ans +tri bal +nu de +௠į +bey once +congratul ate +cat er +leagu e +ðŁĻ Ĭ +la dder +cra shed +tech nic +karao ke +harass ment +ro ts +experi encing +kri sten +ðŁĩ ³ +ðŁ¤ Ĺ +reflec tions +guin ness +illustr ator +ðŁĻı ðŁı» +cen ter +nar row +comm ons +regul ations +Ù Ĩ +har m +cro ft +cu ssion +hong kong +st ical +intern ship +zo e +cho p +hoo ds +estim ated +batter ies +berke ley +smooth ie +shau n +cro s +~ ~ +cam pe +hu mp +b g +proto type +cl ick +shaw n +re viewed +tem pl +p f +jed i +blo gs +ray mond +as th +ba h +av ail +scot ch +leaf s +nik ki +to k +hol low +ur ges +of t +un like +lat in +u e +cat ering +mil i +alter nati +ma ver +Ð ¸ +ag le +pre order +lu x +cu cu +ðŁijı ðŁijı +t art +âĿ¤âĿ¤ âĿ¤ +arab ic +rapi dly +ar rang +all en +travel tuesday +pa ws +flo ws +st ability +flu id +ca pp +can berra +uu uu +sp ani +demon stration +m la +plac ement +m w +presi dents +awe som +bever ly +ani st +ne al +father sday +referen dum +la hore +o aks +deb bie +half way +gho sts +de bor +matthe ws +fi at +t fw +pre sen +rob i +de d +bro ck +laugh ed +am ounts +bam boo +kinder garten +eat en +mtv hottest +break out +u sic +fra ser +legis lative +p ang +modu le +sam my +go ver +ear ns +expe dition +gar h +concep ts +char lie +la va +bachel or +veg gies +deter mine +el lie +un locked +fru it +dal la +cou pe +wash ington +depo sit +iv ory +pau la +chic ag +gu cci +ðŁİ ĥ +cul tiv +pier ce +li fted +stu mb +re cover +musc les +conduc ting +cb s +mcla ren +sophi a +cel lu +oce ans +up loaded +game play +mal dives +kim ber +avo i +rac er +ca ine +cav s +h ana +li ga +ra ven +inter vention +inaugur ation +oo h +at traction +merchandi se +tune in +li king +juni ors +int ended +att acking +aqu arium +i wd +comp onents +sur ing +cent u +yogur t +ðŁı ĥ +show room +op tical +ty our +ju dge +yi eld +an to +pl c +transparen cy +recy cled +chi ef +ar om +ambassad ors +plan et +âĿĦ ï¸ı +om ed +vaness a +cour t +mar gar +hal ey +v r +reg ina +pd ates +hi span +live stream +âģ £ +ya hoo +gal la +secu red +w ir +bene ath +off l +n il +am b +ye g +out let +u te +pe ep +lind say +bent ley +... ! +he el +trilo gy +vo s +ty re +there fore +tor onto +ab i +simp li +ja e +exten sive +eleph ants +s or +orient ation +im peach +re play +constru cted +peter son +pa is +por ted +custom s +colla p +ad u +high lands +sal em +shel by +ko vic +stra in +ro sie +sen ators +snap s +bo bb +suz uki +bla des +k p +lo lo +gener ate +si ght +ma e +struc tural +predic t +jump ed +ah mad +sun g +just ice +gla m +vol vo +jubi lee +de tention +lo sses +pu ri +every time +Ð ° +ra o +ed ge +li mer +rese mb +har old +re tri +sacri fic +surpri ses +am c +srilan ka +bar bie +men s +fin n +ag s +ukrain ian +em brac +î IJ +flav ors +hom er +lau re +ou th +pr iced +ver de +fir m +ah s +cu b +tre y +par anor +pro fit +in dv +who a +har sh +al ot +crit ics +hu bby +fi gur +gi ra +ca stro +chan el +in put +origin als +ten ant +yy yy +ture rs +lincol n +co on +lear n +ch ou +ac are +o les +din er +hy p +bizar re +mc r +let sgo +decor ating +ðŁĮ İ +al ison +ar vin +f d +reha b +mccar thy +lot tery +da h +minne apolis +eli gible +diagno sed +emer ald +destin ations +s ans +or y +bla zers +n v +ba il +digital art +no c +mal ta +sol ar +pi pes +alleg ations +no ck +po pe +bri d +premi er +n x +present ations +ef a +bo ws +val ve +opp onent +Į ë +visu al +ing le +cate gor +e ter +po is +dan i +at tract +neu tral +th ene +cra shes +fred die +ut ili +c st +awak ening +slo ven +quali fy +pro of +fair y +le v +fre ight +enjo ys +cup cake +flav our +â ķ +protec tive +ðŁijı ðŁı» +is u +ad mir +h mmm +continu ous +ai res +rap tors +showcas ing +y uk +pa ste +follow er +instru ctions +sp ru +@ __ +the o +debu ts +ve tte +sto w +es of +ach ed +sul tan +sand wich +som alia +franc o +car ne +flu ffy +al pine +jas mine +he ated +viol in +ple ss +divor ce +per former +phi es +port sm +dar a +kir by +lo p +chill i +for th +sky pe +ðŁĩ®ðŁĩ ¹ +celebr ities +ed y +ve e +po ison +ey el +gra bs +ssi c +un o +wester n +rail road +am er +numer ous +s v +fo w +fi st +âĢ ĭ +reque sts +mar tial +em my +accept ance +lau ra +ภ´ +er up +hyun dai +out lander +u tt +wrest le +esp resso +demand ing +g dp +geo graphy +sas kat +tro ll +confe der +su es +se m +be ts +t ful +to sh +teach es +col oured +gal way +mac y +dis orders +bb cra +at em +fen der +lit ter +e sh +provi ders +renov ation +nomin ate +ps g +nomin ations +jen na +shar p +some day +z ur +bra ins +che shire +pre y +hu go + ¿ +to ken +r v +car r +tac tical +zel da +kay la +fern ando +photograph ers +j our +umb rella +woo dy +congress man +du mp +le vy +ju an +d azz +sign als +la in +an u +mic hel +por ch +al den +sibl ings +y ale +pe el +sw ick +gg in +ll c +k ale +s con +il d +pat reon +re el +qu in +wit t +mar ty +moo dy +ton i +der y +g ators +speci fically +dd in +ly on +tr ick +meado ws +p j +bor gh +vi k +tu r +bron x +pu ff +lan tern +ðŁ¤ ¦ +g ently +be stie +fac t +refu sed +fas ci +mp y +ðŁĶ µ +cross over +mead ow +indian apolis +duc ation +sle y +loo m +mix er +new music +film maker +prosper ity +li m +week end +cre amy +neu tr +lu ther +h v +nor thern +tw o +h ra +cat ches +appear ances +ha bit +kitt ens +n v +illa c +inf an +regar dless +liz ard +dun k +cur tain +ac om +in tu +ve z +e min +fl ats +calend ars +em power +ru ined +hun gary +vi d +we x +u lum +aber deen +o sa +k t +ma ssi +se emed +s den +' ? +tele phone +de fi +insp ires +me ow +z ones +bl ind +pl y +tuc son +advent ure +ge d +oy ster +ðŁijıðŁijı ðŁijı +out put +tt t +metal lic +sma sh +ucl a +sco ts +perfe ct +lu cy +regular ly +sp ic +rel ative +ath ers +mis e +batt ling +deci des +mat a +occu pied +random ly +cat softwitter +gi an +ball y +al ties +al lies +im men +sy rac +ðŁĴľ ðŁĴľ +l lan +au r +k ut +lam ar +affe cts +n ra +star war +ðŁ¤ ĺ +sc ram +en chan +pro cess +luxu rious +ar ray +sher lock +comp ati +dor f +stre ss +m su +s with +sal a +sof instagram +fo il +under stood +qu ay +r p +c ade +ja w +en ab +en coun +ðŁİī : +do ck +satur n +mu ll +lay out +ra rely +happ ily +fix ture +or ph +over looking +her bs +m itt +pil lar +nol an +pe tty +str y +u i +mu k +o res +o vers +á µ +re creation +we sley +ri t +kejri wal +sto cking +g v +subscri bers +moo se +ma e +ber t +opp re +assign ment +u ro +high lighting +cal vin +we igh +cambo dia +av on +ke m +dis abilities +read y +char gers +p ads +iz ing +illi an +tru ste +col leges +associ ates +alban y +mil ton +cr on +bu r +har dly +si ghts +anti ques +e cho +surpri singly +ha iti +cap t +ph p +op io +ine quality +equ al +ken y +sch mid +autograph s +ren t +qu er +cit rus +challeng ed +te c +epi de +fe st +z hou +li me +citizen ship +cry stal +convin ced +mess enger +copen hagen +âĿĹ ï¸ı +war ran +develop ments +ï¸ı âĥ£ +fore x +hi ro +sne akers +xi de +vi va +stere o +bat ting +ss el +ho st +beng al +critic ism +q c +cr un +attemp ted +ry e +determin ation +cre ations +d read +label s +pos se +anc er +joh an +si ster +partner ships +les bian +k st +guaran tee +bar o +fix ing +ma son +m ous +chem icals +t less +bio diversity +par o +bhar at +ac ol +refu ge +en te +t iti +dys sey +respon ds +lef to +in er +se vel +rahu l +ol ine +frank fur +cho reo +enjoy able +c to +strugg les +wood land +heavy weight +gen s +rece p +ac cred +ðŁĺ ¡ +trans formed +list en +at op +n k +sur ge +be re +gover nor +prison ers +clau de +t ill +mu lator +emo tion +water loo +star t +ðŁĩ º +clean ed +grand mother +fear less +afric an +astron omy +ðŁı ģ +à¸ Ļ +the world +su itable +anth ony +k and +tt en +meaning ful +disc lo +jaco bs +à ¸ +tom linson +ghe tti +ty pho +sub stan +as co +te k +nag ar +mu d +am on +vacc ine +f ty +fle sh +no el +infl ation +portu gue +glam our +tra m +v re +te qu +roun dup +w yn +rejec ted +mosa ic +si ghting +cal f +o ta +com position +go pro +gonz ale +e ed +b ard +tu e +effec tively +we en +al to +ri bs +rel ate +thir sty +fu rious +di m +ch ard +perfu me +s ny +chur chill +k of +master class +wa ve +ðŁĶ µ +er in +own s +to be +sk illed +te m +go f +en i +tor i +cra zy +l ick +resi stant +ici al +ag ar +! : +g ali +del aware +bl itz +koh li +pu ck +avail ability +hi malay +influ ential +cro chet +victor i +read ing +ho bby +vie t +j as +en gra +sk ul +ðŁĩ² ðŁĩ +educ ate +tech no +distric ts +blu es +se tt +seven th +lear ns +ee ee +apocaly pse +hang out +cru el +mu tu +bru h +hel en +she er +c tion +kle in +tex ans +ce real +sh ine +ne red +gra s +am bro +f ella +hin du +matthe w +li ma +mir anda +je wel +so ho +euro vision +neighb ours +chand ler +be sides +ðŁ¥ ° +ast ros +thu mbs +ren ault +ra ve +hi red +ðŁĸ ¤ +it ary +z or +bla zer +k ine +ea u +kat y +dc comics +pe c +ro dgers +water proof +kill ers +super int +pre serv +as so +brew ers +promo tional +sc am +villa ges +sket ches +ju icy +for life +au dit +so lo +fundam ental +len e +philipp ine +t end +conserv atives +sponsor ship +dd le +a ine +h tc +os i +hul k +w af +à¸ Ļ +evalu ation +ant ine +sle e +robert son +roo sevel +ag i +sophi stic +emplo yers +bubb les +ko wski +inter action +sh u +bou le +ic an +j are +han k +leg itim +k nicks +kar ma +recei ver +per ks +u h +sta ir +sun i +labor atory +gra ves +voc als +oo t +c ture +thri ve +tic o +ãĥ ³ +b w +carto ons +mcdon alds +dra w +y ung +pl er +li d +eth ical +groo ve +ent a +international womensday +pat ron +wor ries +ðŁİ ħ +ðŁij ĭ +ka therine +di az +tor i +bach chan +tru st +min eral +ic om +buil ders +bor n +col oring +lat te +ca se +revolu tion +tra der +ox id +chi pot +inst antly +sou thern +se hun +pro b +her nandez +lis bon +hu awe +p ong +me a +ro oney +wheel chair +ke en +be tt +cor in +regulat ory +di splac +ka ren +sch em +sun sets +wh ales +remin is +he p +hi de +mar cel +pand ora +do yle +th fc +ot to +no kia +trans gender +ko v +hawai ian +sha ve +so vere +exc er +nick i +pu g +st or +ro th +wee t +leg al +dig nity +po w +hom age +ðŁĩ³ ðŁĩ +s re +can on +la x +wo ah +quart z +ñ a +gree ting +flick r +nai robi +advoc ates +an c +vi i +eu gene +th ra +c re +el an +pen sion +th letics +ton i +re agan +x v +sto re +ben ch +har lem +todd ler +sent enced +âĻ¥ ï¸ı +glob ally +che aper +u f +ma m +nic o +ik u +tho u +ni st +dam i +th ala +rho des +sal e +bow ls +â Ī +las vegas +sanc tions +adm ire +mat ched +un able +travel er +ele ven +straw berries +âĢĶâĢĶ âĢĶâĢĶ +stu dio +jac ques +im s +valu ed +s no +cheese cake +n xt +e os +s x +f x +ton ic +hat ch +chic ks +gra ds +hand ic +r ory +as p +ri pped +denti st +n en +lu fc +âľ Ĭ +di ge +hop kins +sher man +f da +for all +ash ley +str and +h y +liqu or +buffe t +ess ence +phar ma +suri ya +ðŁĴĻ ðŁĴĻ +festi vals +z an +re fresh +pur ple +uni forms +kenne th += ) +as an +hel sin +transform ers +k ali +person alized +chal k +bo bby +â Į +the mes +depar ture +prin t +illustr ations +qui et +agre es +gri ff +Ø ³ +m iti +toge ther +conven ience +ab ar +car lo +turt les +info sec +some what +ar lington +scholar ships +emir ates +mu ms +st ella +auton om +fe ather +g ore +nom inees +fragr ance +Ñ Ĥ +w ong +thea stern +gr e +z illa +is i +bump er +go o +do zens +ab duc +âļª ï¸ı +o ils +don ors +sil icon +i pod +fortn ite +ðŁĴ ¨ +tor o +spark ling +consci ousness +pal a +nu m +moun ted +ffin s +thi eves +team mate +pra b +om er +ta pes +bo d +mit su +ste w +e re +p bs +tu sc +lo we +ra de +parliam entary +h m +ed gar +ðŁijĩ ðŁijĩ +to a +a gh +hon i +s late +ge ek +ap t +hard t +ta p +horiz on +grow th +make over +hi l +paper back +id an +reha bil +gi u +possi bilities +let tu +fran co +bo ss +ach er +does nt +mo e +ta ker +huss ain +ml k +di l +th ia +ham a +real ised +raven s +curric ulum +m ith +k night +ted x +r v +isai ah +cumb ria +birth days +f ing +pre z +mu barak +exquis ite +clear ance +y en +par i +ev o +à º +modi fied +app lying +imple ment +disco vering +chap man +indie game +dis k +crowd funding +mach in +li vel +sty led +âĿ Į +ma king +rehear sals +nutr iti +subscri ption +and ro +cre ators +car ries +ky lie +cam den +appren tice +tax pay +c ca +tuesday thoughts +pis sed +er man +dete c +freed om +mer i +.. ! +psal m +sun light +per spec +be ings +book store +rock star +fun ctions +p ence +fav es +z n +obam acare +sp ill +coven try +pi geon +pi vo +ba it +kol kata +av al +don or +wa h +privi leg +tra ditions +rajas than +ten ess +portugue se +yn es +tack les +de fic +tor n +pol ling +thor ne +in a +bened ict +bar ry +cal ories +ver dict +save the +nor ton +off ice +main stream +impro ves +fr on +respon ding +real tor +scotti sh +de clar +r l +shi v +supp lier +re sting +swee ts +qu i +. â̦ +whit ney +startu p +thank you +teach er +h alls +ha ve +hand made +pro ving +quar tet +ro chester +li an +virtu al +mend es +of icial +mid lands +x box +meas uring +o vo +accommod ation +bri des +collegi ate +intellec tual +in car +ni ag +ðŁį · +sf w +coco a +co ats +civil ians +presi dency +mat rix +sweethe art +tri athlon +wag ner +ra dic +plann er +the o +execu tion +k um +the walkingdead +sc ar +ro tation +blo gging +bom b +re son +bb les +st are +assi sted +e do +brand ed +war nings +thor pe +acknow le +satis fied +sho res +ri d +dor a +phys ically +bi gh +appro ves +ha h +ric al +vers atile +pret end +lu m +ab hi +ye e +sp it +ãĢ Į +dj s +ash tra +j t +ven ues +gram mys +cy clo +tr acker +over watch +repl ica +el yn +nr l +lind sey +hom o +ballo ons +kitch en +si s +am os +ende av +ðŁĴ » +a rec +thu g +hoo ked +hr c +new york +bur gh +americ as +patric ia +ug u +ap athy +ha st +psy chi +cor k +petro l +ðŁİ ¬ +ak u +po pping +psycho logical +au x +g ma +cad illac +wa ste +auth ent +bri stol +nam e +que er +to ber +jer ry +com in +ch ant +privileg ed +op ar +lo ser +tex t +mar ker +stri es +equ ally +ak i +christ mas +gare th +ble w +em ma +imag in +se als +che at +conditi oning +j ana +ren s +dar ies +o asis +disc ounts +coun cil +i ka +shir ley +vou cher +al ps +w x +q r +dri ft +attemp ting +ut c +Ø ª +gonzale z +m f +jo ker +paralle l +pa re +aspe cts +proce du +n p +am a +rale igh +bright en +gu ire +radi ation +cre scent +ho b +il le +str and +v ore +n ard +che st +di wali +av atar +al der +d ling +pa thetic +ðŁĴ ĺ +spir it +jor ge +film making +ðŁĻı ðŁĻı +challeng er +b j +down town +ht ml +ade qu +twi sted +in ely +( ' +wra ps +oper ational +y ne +n us +mag net +market place +health ier +snap shot +dam on +inter ven +fe derer +ow ls +biscu its +j p +ro deo +blue berry +lec tion +fron tier +summ ers +re yes +pede strian +go l +caf fe +refur bi +bou lder +me ghan +speci alty +la ss +e i +suspec ts +appro x +rr r +ra th +st im +cru shed +he d +wh un +lo af +cr ore +river a +gene tics +so ck +wa sted +ny pd +answ ering +do ve +bel la +ol in +du n +fi ji +pre tty +spar kle +y un +j d +euro pa +li fts +am ber +mu r +te k +boy d +roy alty +in do +ri b +go tham +ti est +inst alling +ke mp +the photo +cos mic +) )) +whole sale +loy ment +eas y +su ing +sett led +af p +pro ver +suppor tive +re es +ne ath +deli ber +c é +wel come +pic oftheday +new born +pat ty +sun s +si est +fl int +diffe rently +spo ilers +troop er +g ins +cor y +look out +equi pped +ta pe +to by +resear cher +u sh +ke yes +al ma +induc tion +k w +k har +sl ick +bri de +e ur +cra ving +book ings +ch es +tr unk +vern on +sp her +cryst als +rel atively +pom pe +uni ons +val ley +par a +w ant +ok c +de af +ser gio +len non +sh ay +cr a +v at +he e +t we +liqu id +pol y +ðŁİ ģ +b ent +be aring +motor sport +bar be +te sti +han i +fin ancing +astron aut +water colour +ri sh +comic con +gar t +wr ong +ber n +it an +ste pped +fil ters +c low +me x +dem ons +all o +expand ed +comm and +et ers +go ats +si ri +y r +pot tery +mari on +i le +el an +san to +person a +du ke +hom eless +li ghted +wheel er +chang er +cab bage +sur real +ham burg +sma shed +str an +k not +i art +ob i +be dro +di al +th ick +b ingo +fu s +vacu um +con ve +ati ve +accur acy +accoun t +re fer +ri z +spider man +ban a +r ite +u b +ab s +medic al +lin k +si em +> >>> +be tra +g lowing +re actions +pupp et +spa ghetti +ang s +re medi +pray for +roy ce +char lotte +£ ï¸ı +gh et +affe cting +ro de +soci alist +mo ses +az i +o it +re porters +cd t +ap ing +s nat +minim al +wa ist +sie ge +>> >> +ri g +schmid t +h are +ec a +thor n +he mp +es the +cly de +th a +don ut +moham ed +ling erie +le gg +carpen ter +perform ers +de a +imag ined +cur se +la sh +ct r +agu a +ro ar +gr i +ro le +j fk +resur rec +roosevel t +maril yn +sm alle +will is +wa ited +char ities +the res +li k +origin al +car i +c ough +cru ci +la gun +contra st +k ou +arm our +re moving +t ent +maz da +bri ghter +thi ef +cor ner +tequ ila +buzz ing +al bi +p am +az ure +disc oun +pixel art +possi bility +ham ont +tra des +bu da +hi ve +vers y +fin ch +tran spa +em i +terri fying +in qui +g ba +sub stitu +collec ti +plac ing +cin dy +k ann +pa tho +diamon d +mour inho +guine a +anthro po +air s +pu mps +ì ļ +pas o +cur ling +an ita +resi dency +ne wh +jo on +cigare tte +que ue +ex trac +gam es +spl en +ex press +public ly +bon nie +tribun e +ba ek +reason able +c or +timo thy +she eran +Ä ± +f dn +su tton +concentr ation +carav an +x avier +al ger +cy lin +freder ick +ner ve +pe ak +lettu ce +j ail +pre game +kav an +up graded +eco logy +squad ron +gra pes +goo g +pa stry +ðŁĹ £ +ãĥ¼ ãĥ +mil ano +awa z +presen ter +ðŁĮ ¿ +her d +king s +tem plate +fl our +h v +k ley +i ya +spe c +at er +frankfur t +co ch +tex ting +del i +communi st +regi ment +ele anor +anticip ated +ðŁijĮ ðŁı» +thephoto hour +ran o +survi ving +simul ation +daw son +ar in +aqu a +m or +â̦ . +cin o +ira qi +sh az +dun dee +we s +dra u +hann ah +s news +occup ation +ste en +x m +ang les +sett ings +gur u +kno x +or ca +shap ing +w ent +dr illing +zz ie +br i +kis sing +fin d +ma ine +âŃIJï¸ı âŃIJï¸ı +ðŁĮ į +lar ry +bu sted +ta vern +acti vely +- " +replac ing +no d +un lock +. " +âŀ ¤ +affili ate +to w +l n +happy newyear +di f +j m +green wich +contro versy +daw g +con dol +sav annah +compens ation +touch down +te o +amb itious +embro i +convic ted +iart g +bar ack +tr ance +testim ony +au dition +thum b +my ths +be x +que z +orch id +den y +entit led +hoo d +gr ant +in box +blue jays +r illa +smalle st +bur den +in famous +divi ded +boun daries +t ter +el t +wy oming +be verage +me sm +one ws +budd hist +y ana +as sad +is ms +bar rett +predic ted +back to +tw it +e there +cap tains +escap ed +ay o +lam borgh +gard ner +la ps +k al +adverti sement +insec ts +na po +am en +ac y +r and +g k +te h +k athle +tri dge +pan cake +at ro +pyram id +bu la +paral ym +gau ge +en cies +tom y +biscu it +but cher +quali fier +coun ty +ke i +po ols +dar ker +should ers +ðŁĩºðŁĩ¸ ðŁĩºðŁĩ¸ +sp re +( " +writ ers +g m +ðŁİ ĵ +k nit +hu ff +mt b +philli es +o st +den is +g art +licen sed +inter face +ex cel +d well +from the +co fficial +az zi +appear ing +fore st +n ana +ke ith +manufac turers +beck ham +) ? +e se +col ony +delic ate +ut ter +mc in +transpl ant +pre ferred +par d +ari e +hu b +po ds +perspec tives +pic t +del u +app er +be than +p mo +crimin als +femin ism +sh ack +circum stances +fel las +prote sting +wa x +sugge sted +t ator +dre w +om ni +fa ke +kath y +re b +del ine +ber ni +mi sty +ðŁij © +er able +break through +men swear +millenni als +chan yeol +la z +inser t +rep lies +phra se +n x +ihear tawards +audre y +gran ite +rac ec +ori e +ter ra +innov ations +britt any +at eral +pe ar +bio logical +sh ments +institu tion +m sn +frequ ency +d man +neg lec +t f +ste fan +fox news +ty po +comm s +sequ ence +car men +wh ites +econom ist +exe ter +se um +re sorts +cas ually +bun de +divi de +Ø ¹ +ga g +cre ed +reti re +cau cus +rapi ds +wrestle mania +tul sa +sunder land +fundam ent +o di +yam aha +v ary +intri gu +el se +be acon +an gie +tra ded +tran sm +g ents +kn itting +gal ac +ðĿ Ĺ +u to +sea side +hol t +re rs +far go +train ers +mon soon +b ale +sou ght +mad die +h w +co li +fr an +fav s +ðŁĴ Ķ +int ent +r ally +s bs +lemon ade +barack obama +bre ad +stick y +explo sive +chel ten +t j +as soc +ram en +hom ies +v log +mi ster +lor d +âĢįâĻ Ģï¸ı +aly ssa +sketch book +ru mble +cat ch +migr ant +discipl ine +un likely +chronic les +fl ora +sl ams +am id +s boro +coo p +ju mps +tran qu +mel is +sof ia +en ri +gab e +sy ri +nicol as +cha i +w v +be cky +foo ty +ta o +suppo se +ðŁĺįðŁĺį ðŁĺįðŁĺį +plu sh +ri sh +ðŁ¤ ĵ +k ha +satur days +ac cent +he c +lim it +carl ton +wi red +taylor swift +ðŁĺ ij +sq l +har ro +recipi ents +g at +go p +th of +amaz ed +gh an +ðŁıĨ ðŁıĨ +por to +cla re +di stant +na c +ohi o +ðŁĻı ðŁı¼ +mt n +anti bio +dino sa +me sa +par tial +b v +lear nt +lov ato +questi on +ex tract +gossi p +gi bb +niag ara +ðŁij ¨ +displa yed +so oner +ste vie +nug gets +ml n +bro m +tur b +give aways +stu pi +bl ink +c ili +conven ient +mo h +vi ve +f ric +cau se +cham ber +cu les +ne arest +is se +small biz +t j +canadi ans +smar ter +bra sil +ra re +que tte +w ha +cand le +at omic +ðŁijį ðŁijį +warri or +relax ed +stri ps +ne ur +k ka +r fc +jen sen +reco vering +respon ses +sal am +ortho dox +acti ve +ell ers +n it +âŃ IJ +metro politan +centu ries +vi da +gra ding +transpa rent +sim ple +do ts +superint endent +elev ator +autom ated +red skins +ima m +summer time +jona than +ge aring +michel le +confl ic +m ice +to te +publi sh +pa x +) - +na iled +á ´ +tele scope +ser bia +ba b +ape u +st ically +sen ti +r ats +isol ated +grou p +hat red +paranor mal +stan ley +ali on +safe ty +l s +ठ° +nex us +alexand ra +mas ks ++ + +tr on +au k +brother hood +brow se +mix es +sim one +mu sk +appro ve +lo la +ex p +per th +fu turi +un seen +d m +chel se +sc outing +o we +portsm outh +k ram +mi ze +di spen +su p +d lc +adver t +tere sa +is le +cy cle +met all +shi elds +marin ers +ra z +ing en +fun d +an go +jon es +o ka +mad den +broc coli +domin ic +situ ations +mer o +cric ke +puni shment +d b +sha king +ðŁĺ ļ +m q +ari ans +le h +cla w +we ds +d ure +ni el +j elly +gour met +tra ders +le vi +w ages +kne es +wi se +heaven ly +avi d +melo dy +z ack +ban anas +apprentic e +pro p +fun ny +o de +respec ted +me gan +fe wer +dra fted +med it +gra pe +us army +cru sad +vo cali +prepar ations +non sense +us age +th r +ro th +wiz ards +insi de +promo tions +mon a +red sox +si g +eleg ance +ch ia +univer sal +ãĢ į +ra ja +un ga +pol lin +filip ino +ak a +t sun +ik on +bi king +decor ations +z ac +cade ts +hum our +ag m +re ppin +vac cin +elo ve +u w +dia be +galla gher +az er +do l +a while +pro minent +wel sh +t ann +' ) +bi en +wa g +in al +c wc +wic ket +ur st +q anon +x e +out door +dun n +star r +co logy +ric ky +u efa +reb ounds +s music +inf ant +ðŁĻ ĭ +so p +u mber +hand ing +beg in +sor ting +ha sh +sp ati +re k +buda pest +black hawks +dele te +ro m +can did +auth ori +de bris +spe cul +inter section +marri ott +im ran +ðŁĺģ ðŁĺģ +cru ises +ram sey +rafa el +aware ness +vas cular +beyon cé +ru g +ðŁĺ Į +festi v +ar am +s able +bas il +p ill +flo oring +un beaten +implic ations +u f +w ound +for ge +poin ting +po ts +popular ity +ðŁijı ðŁı» +mani pul +s lots +deb ates +abs ence +ver mont +never forget +wri st +gl oria +ren ce +hu sk +mel ting +ðŁİ Ł +br aces +tim ely +transform ing +am ps +ma k +po e +ah an +gener ally +nd p +ale ppo +unic ef +pro fs +nor d +ma sk +jackson ville +v v +sh ells +bloom ing +oper ators +char coal +ne ville +ma gi +chi p +sam a +ir an +re forms +accu mul +ru e +æ ľ +web sites +ga on +devast ating +sto s +glaci er +ra pp +chipot le +pr a +or ous +rom ney +seas on +decor ative +c isco +dit ch +compla in +ll o +assu me +ðŁĺĤðŁĺĤ ðŁĺĤðŁĺĤðŁĺĤ +n els +cent ric +ft w +car rots +tat a +can ter +per ience +li ers +demo s +bl unt +oper ate +reserv ations +le ah +sub stance +di son +an te +elec tion +v ue +squ are +non profit +ca a +f su +y am +ãĤ ¤ +v ladi +comple tes +mar i +philli p +ne ill +er as +ka it +men do +mahar ashtra +g p +dan e +provi dence +ther apeu +juven ile +me mo +in corpor +aa aa +seven teen +teen ager +à £ +or ns +wi de +cu teness +tw d +ff les +bar a +com edy +over time +y az +bar on +unemp loyment +ðŁij ĭ +exter ior +den se +cent res +match up +history month +artif icial +qu it +e sk +war n +cr itic +j af +ðŁĵ ² +inform ative +fu els +recy cle +nam ing +stri pe +sol ic +mole cular +dee pi +con vo +s sel +na e +de scent +ti z +accoun tability +ter ry +r ito +sl ay +em o +dem ol +sens ation +co v +tor e +round table +y ol +excu ses +ॠį +tur quo +hh hh +pod casts +cele b +me ssi +li o +man n +contribu ted +u z +gener ator +ele ts +veg gie +indu l +en suring +detro it +pun jab +tran spor +instru ction +ad d +por cel +pan eli +cir cles +persi st +clay ton +sp n +dog softwitter +is nt +sp r +retail ers +p w +hun gar +el ena +mon aster +gu atem +je ssie +an z +ra shi +fle e +car ving +fau x +l al +hen ri +d jo +du ll +s ana +lar a +glo be +cri mson +com pass +pau se +na b +lion el +ba ths +u fo +invent ory +sin gh +sat an +ðŁĩ ¸ +ce ments +in form +gener ated +bi den +av g +tas ks +de er +sa u +ja iled +pa stel +sc c +na il +steel e +per is +lamborgh ini +pur sue +mar gin +u ch +bo sch +dra in +cl ara +bo m +lat ino +web ster +rose mary +r ha +s oun +billion aire +not ch +percent age +con or +' " +hom es +earth day +h ort +big gest +di sin +wal ton +edit ors +im ma +om ar +equi valent +pharmac eu +ah med +cam eo +han ni +under rated +ge ment +micro bi +v oo +honor able +obe sity +âļ ¡ï¸ı +limer ick +invol vement +st agram +boule vard +bur g +blackand white +liber ation +fi ve +inter im +sm m +rival ry +cap abilities +stat ements +thu mb +ve d +sw ans +bar ber +e que +seren a +hel m +noo dle +sam pling +n awaz +sing le +thunder storms +sh on +in ev +ë ¯ +to pp +orch ard +bi an +ðŁĺ Ķ +door step +salv ation +marke ting +r ons +cle mson +ra vi +in take +stand with +sin a +ha iku +ple y +elector al +ph illy +la ys +electr ic +cap turing +u pp +er gy +believ ing +cul tures +es day +inva sive +ed ed +spee ch +end ur +viet nam +boy cott +pe de +deli ver +ðŁĴĸ ðŁĴĸ +mer chant +st ir +den ies +poc kets +o ti +cu ddle +ro land +mm ed +den ed +lear ners +hoo p +sour cing +h acked +di m +environ ments +ben son +jud icial +wor cester +pear ls +govern ments +arri vals +cor ners +tun ing +la bour +y m +or dering +le wi +i fe +hygi ene +thou ghtful +indone sian +campaig ning +princi ple +assau l +ru bb +at v +wil ly +en tre +il i +ph on +du ties +âĻ¥ âĻ¥ +sn akes +lo op +am ar +conver tible +bon ding +ment oring +max well +ethere um +destro ying +ax is +ca iro +fin nish +sho ck +ðŁĺ IJ +cal eb +com a +pe dal +co re +contin ent +el son +temp o +helsin ki +ac p +tack ling +st ated +bl a +dou b +sma shing +a ja +camer on +disru ption +warm th +being salmankhan +bullet in +o de +syrac use +ar an +mc gregor +bul k +an ton +confir mation +sp ine +im ran +instru c +jac ks +chi o +pal m +str e +embarra ssing +un t +elimin ate +to ss +c ise +a ws +oni sts +sh inee +jo s +ho se +li vely +opp onents +mo vements +recogni zing +sandwich es +sh akes +exerc ises +se at +profe ssion +merry christmas +lu gg +adopt dont +mar vin +byr ne +un le +he t +ku wait +rah man +aspe ct +humb led +gen es +f and +long time +) ; +cam pu +an gus +ðŁijį ðŁı¼ +q uran +sle eves +s lic +¸ ë +twel ve +your e +i ke +go gh +b st +dic tionary +reflec ting +to on +yar n +em bed +ðŁı ´ +re serves +floo ded +ver iz +du sk +estab lish +pro li +au d +ritu al +or bit +declar ation +recor dings +cam o +cas sette +good luck +cu tter +bo p +b ho +che ating +paci fic +ma res +tim er +col t +tr ous +tomor row +han sen +ci e +w ang +ban i +circu lar +ac ute +far mer +co ys +p se +ir ving +w j +haw kins +b ison +ur day +cru ising +o te +k ath +whi stle +your selves +ant is +sla sh +thorough ly +ke sh +ser ie +ex em +en ig +guil d +sh red +ho gan +ap o +ä ¸ +pu zz +ne tball +au ssi +panor ama +ws j +av is +ar ming +hum ph +brow ser +cri es +fo ggy +mat te +ðŁĮ » +it er +tal lest +by ron +cap tiv +je su +any ways +flag ship +p ton +we y +fay ette +financi al +f oul +solom on +jenni fer +cucu mber +ar gue +tex tile +wrest ler +john ston +pa stor +ðŁĺŃðŁĺŃ ðŁĺŃðŁĺŃ +cac tus +edi ble +re served +ric hie +met res +ingredi ent +h ella +un to +ch ol +cele bs +po ets +gra ham +hay den +coinci dence +b aw +communic ate +flet cher +/ - +tole do +ecu ador +coun sel +s laughter +line ar +at p +os u +jo el +ev ed +conqu er +ru stic +plic ity +recogn ise +room mate +cr acked +jas per +ph er +ðŁĮ º +wo ven +mo ist +ff c +ste ering +ni sh +stand ings +frequ ent +ar di +haz el +as msg +bau m +d art +si dd +nat h +ch ero +card board +c ss +n sfw +pa ir +ðŁĺį ðŁĺĺ +occur red +homeless ness +mal one +ph e +xi a +pad dy +decl are +theat re +b f +per sian +ta d +ax e +susp icious +lam b +mu cho +sen ior +st as +k ite +st ing +gra d +k af +wat ering +Ø ¯ +spi ral +th ms +educ ator +jer ome +of c +clo ck +su l +pe mb +.... ..... +park way +de aux +restric tions +m ons +need le +e j +le agues +water melon +am an +pl enary +max im +w ab +coming soon +bry ce +vi gil +super market +fortun ate +turquo ise +presi dent +li v +inter ns +feel in +fix tures +stun t +st aged +premi eres +lo k +prac titi +shor tage +log ne +ve c +con cor +roc ke +li g +com posed +syn thetic +di p +cam ila +ch is +j ou +su san +eye brows +supp lement +satis faction +moham mad +ti bet +house of +pu n +as sam +shado whun +psy ched +se duc +mand atory +her bert +sc allo +stream ers +proto col +block buster +produc es +sch nei +lau rel +tri be +time hop +pl a +mod elling +tv time +mtv stars +wi dow +me tric +ch am +con do +flow ering +ale c +d ms +inten sity + ¨ +mccar tney +islam abad +k b +f fi +ph al +anal og +f ond +h acks +positi vity +treat y +sub marine +conne ct +sel en +categor ies +cu b +organi ze +si k +quote oftheday +remin ding +am or +loc king +ðŁijı ðŁı¼ +comp ound +et te +b out +rec ur +fe rence +mi zz +tren d +hip ster +for tress +forth coming +preli min +o dyssey +ang p +del ici +even ings +ðŁĶ ¹ +i q +d w +da ir +kathr yn +christian ity +moon light +ha b +wh oo +f bf +se th +genu inely +pa x +char ity +deplo yed +b nb +bu cs +ju dg +con ge +plant ation +im press +car a +sc lub +sco py +land ers +compla ints +b ama +re build +x y +real ism +sh our +le in +brac elets +mer a +assas sin +an chor +ðŁijĮ ðŁı¼ +lin en +con fron +chronic le +comm ent +cat alog +il les +gor ge +me try +jung kook +love my +sent in +se em +fit ness +alli ed +ts man +digital transformation +pr an +lo ft +min ton +alden richards +en vel +cher ish +certain ty +zz z +rhin o +per kins +en rich +cape town +ome ter +sec tions +ske leton +def enders +ðŁĺ Ŀ +pen c +bri t +ja h +capital ism +ðŁ¥ ĩ +baz aar +re me +ex t +kk k +conver t +stor my +b ye +kar an +chry sler +ad os +pre ssed +syn c +ation day +dang er +bad ges +refu ses +em powering +ly m +ex ports +adoptdont shop +ðŁĩ ¯ +th c +awa ited +focu ses +fin ed +o at +haha hah +âģ © +n family +fi ona +luck ily +thr illing +ty ping +out break +di es +he u +craw l +ne sses +o ath +scri pts +gee ks +ðŁIJ Ŀ +p b +mathemat ics +al is +________ ________ +gymna stics +acti vism +recommend ation +gre n +wa in +cour ty +n apol +cau li +hor nets +g als +jo ckey +dir ty +at ar +enor mous +pe st +greg ation +an os +ii ii +def ends +black historymonth +at x +mb c +lugg age +wit ch +co b +la sts +cu m +gg g +ba thing +n ar +ce bu +ðŁį ĥ +navig ation +min e +re jo +ðŁİ Ģ +gif tide +re ta +use less +pu ll +defic it +al lu +ati me +it v +tr illion +pu e +ac ies +proce dure +l ori +jen ny +c ad +ul ously +dr ac +promo tes +ing the +can u +woo hoo +na omi +zar dari +ts u +be ir +sd g +le ver +we ber +ab ud +lun d +crow ded +deplo yment +ter rain +ken ny +ho f +witne ssed +lo ch +j k +bul ly +w ren +poe try +do ff +ww i +mo red +din i +cul ture +promp t + ¥ +maur ice +to pps +r m +cor respon +ab out +jewel s +gi br +eag le +ðŁĺĺ ðŁĺĺðŁĺĺ +l ending +sou ven +ç Ķ +contemporary art +establi shment +j ong +â̦ " +gat or +patri otic +mc coy +v ape +human e +feli z +coach ella +re posting +ste als +fu ller +n ering +at ra +( - +bla ke +he ather +wor ms +discipl inary +rede mption +y ard +am in +" @_ +d nc +t ds +k appa +ne wark +comm its +spe ars +j ams +t and +msn bc +inter medi +aim ed +at ic +teen th +observ ation +kash mir +kavan augh +ou l +san francisco +re u +bel ated +cho w +pass word +st ills +deta ined +sar i +day ton +dar ren +itali an +ar th +amu sic +ar bit +w m +v m +he m +dou g +my r +a sho +pre v +vin d +bra h +sta g +ภµ +pre views +gu k +con taining +leon ardo +sad dle +ru shing +st av +lon gh +gam bling +ve gas +reserv ation +end ale +bal a +fl a +vari ant +he dge +bulgar ia +nat ali +we aver +sol st +encoura ged +ap c +as parag +ne st +cycli sts +fe l +ìĬ ¤ +overwhel ming +pey ton +j it +a post +mb le +ble eding +neighbour hood +a very +expre ssions +mac donald +gi gs +mon ds +illu sion +n ct +cam ero +over head +my th +ol y +vi o +et v +lau rie +unve iling +pri or +con n +iron man +di ff +day in +crit ici +con go +re vision +wal e +direc tor +p ines +black pink +gar ner +cur ated +manit oba +h ac +common ly +bar ton +.... # +mor tality +live smatter +philos op +shor ter +con vince +fre ak +vend ors +insi ghtful +el ly +sens ors +e led +s berg +weight loss +u kip +sp ur +priv ate +qu a +ss c +, ... +supervis or +advis er +amaz ingly +less er +at es +mah on +oooo oo +sar as +pmo india +waff le +un ders +toler ance +sculp tures +her sh +kno cking +smo ke +cathol ic +gri m +tra veled +fli p +ge off +dinosa urs +sle pt +scar let +ok i +compla int +ob sc +nam i +la g +cross fit +u fc +mc cain +refe ree +sad ness +pen ny +li eu +mo de +ki er +vol s +w is +el on +she a +ba o +son ia +cla ire +em manuel +moist ure +di gest +vi ii +t eller +ch on +access ory +night club +foss il +aw an +hu sky +ab original +brand on +ffici ent +cou gars +ste d +ad mitted +igno red +content marketing +ag as +v ase +execu ted +negoti ations +she ad +n and +tab lets +go th +ts al +d fw +on ep +protec tor +sp ho +gaz ette +andre as +ss er +comp ilation +ha v +contain ers +bro ker +soc al +porcel ain +hy uk +air ing +ðŁĴ ° +publi sher +scen ario +spart ans +re viewing +itu des +ed el +pear son +ba sh +mau i +a ad +ðŁĮ Ĭ +li u +ul ate +program mes +fav our +web design +real ty +motiv ational +cro sses +' ... +bus ch +adjust able +ar jun +mist ak +dimen sion +pi stol +weigh s +en y +unve il +indy car +gor don +f ade +fran ken +qual ities +bet t +loc ate +ker r +sp c +confu sion +ne e +luck y +bas es +dep ends +fire fighter +ol a +re t +mar oon +ðŁĶ Ĭ +w am +defin ing +whe at +bi l +é s +b hai +psy ch +ta u +ic ans +thi k +ob ile +inspec tor +ìĨ Įë +ill on +go s +ev angel +fa i +si st +voc ation +bur ge +chi stan +renew ed +enthusi asm +en ting +ag ri +ike a +m sc +aero space +sens iti +memo ir +hosp ice +co caine +der ry +mechan ics +Ħ ภ+tin o +reduc es +collec tors +in justice +supp re +v ana +ab un +nap a +su sa +os lo +e ff +en core +lic ence +ched dar +z al +moun t +ðŁĴ IJ +threat ens +!! " +archi e +fu tsal +scu ba +jo s +gn on +se xi +s official +compar ing +domin ant +tof theday +fa it +propos als +gi ft +y as +cn c +l r +ha b +reser voir +beli efs +gener al +mar ti +t d +est e +ì ł +wi l +ðŁij ¯ +ðŁĶ « +sp x +et work +excer pt +e instein +hir o +sil hou +team ed +per ception +corri dor +mental health +hin ts +ben ny +induc ted +sw x +wi desp +spe ak +cher yl +dru g +ðŁĺ ķ +h f +asparag us +myster ies +fitz gerald +off er +therap ist +care er +dam aging +ts d +per u +wei bo +y ay +phoeni x +disc re +mac book +bar ker +stig ma +sp read +roc kies +kang ar +bri dg +pa i +bi shop +ta iled +capsu le +ðŁĴ ĵ +ge of +roy ale +short listed +o ste +ash amed +ch app +key e +cl a +screen shot +austri an +nati ve +en ight +juli et +michel e +ðŁĮ ´ +travel ers +pi l +football er +win chester +ðŁĻ Ħ +azer bai +gold eng +organis ations +interpre tation +predat or +ofthe week +lo gan +pok é +mari e +cal la +t nt +cin de +ge tic +fit fam +gra v +ow ens +ðŁĮ ± +shoot out +sal is +commissi ons +co he +p tic +ni xon +hi a +amb ition +mar ine +cruel ty +t k +cru de +sal ty +jim a +mon go +ir ony +on wards +arre sts +strang ers +ig er +cycli st +ra g +exten ds +tra dio +bour g +mo i +el la +e able +lex us +au l +der a +histor ian +mor ton +ti ff +man ner +ko t +d k +po inted +mar qu +a an +en ey +du blin +on poli +em ili +secre t +fl o +âļ ¡ +ba j +ste ep +accompan ied +rum ours +dev i +purch asing +fi g +pu b +sch oo +autonom ous +go alie +x ia +autom atically +re vers +ter o +fu ku +titan ic +shoo k +sand als +see kers +exc av +nor dic +bigo live +ba ke +r att +z ak +ne p +ðŁĺ ¤ +cand y +billi ons +book worm +pp et +à ³ +sur faces +sc ars +phil ip +do gg +ci gars +co te +transl ated +cur ator +sin dh +han gover +bre wer +on es +el ton +ðŁĴª ðŁı¼ +mar cu +elli ot +righ te +di oce +ru ss +rail ways +grand son +as cen +apo logy +awa it +mob ili +re spir +parti san +oli vi +stri ke +yo o +white house +expre ssed +pu ps +bed ford +cul tur +fro gs +fly ing +cav ali +c ds +fri ger +street photography +re solve +tali ban +kan g +cru shing +ju m +ðŁĺ Ĵ +william son +tan g +cur ly +t man +veter an +fa ire +artificial intelligence +un anim +pre n +back drop +fr ances +oc cer +doro thy +work ing +ar thr +conver ted +day light +serv ant +pad dle +compla ining +thir ty +nad al +ak u +ibra him +ad dressed +p iss +green house +batt alion +si mulator +out lets +embroi dery +ðŁĵ ± +fis cal +ger ard +sas sy +ðŁİī ðŁİīðŁİī +vent ures +mer it +public ity +ðŁij Ī +sophistic ated +c tu +conven tional +condol ences +isra el +tra dition +ar an +te ss +gla d +ðŁĺĬ ðŁĺĬ +correc tion +ge on +am d +or ship +be ast +ch ment +ì ŀ +nic o +wk nd +wel s +cushi on +beli e +vo c +idio ts +under neath +pu ma +corn ell +en ation +lu l +swa ch +ab ig +u rer +mi e +form erly +ca f +er nal +chor us +juli us +sen ator +âľ į +wh ir +salv ador +ph d +uni fied +boo ster +graph ical +w rec +son ny +mi z +dere rs +s all +ven s +tusc any +wi d +y ong +kur ds +w az +trol ls +mac ro +cat urday +pre ssing +sa sha +cent ennial +gu sts +em c +be fore +den ise +cu st +ðŁĵ ¢ +lo oo +base l +eng land +y olo +ar du +manife sto +do ha +ì ľ +kni ves +bourne mouth +bi bl +bar b +al icia +Ø © +com er +cycl one +g it +ane ws +character i +vent ura +in tra +sf giants +hu t +be a +dar win +ell er +al v +re ese +bl y +kar an +conclu sion +man ny +fla kes +unite blue +nad u +co pp +ed ges +lanca shire +i als +o tta +philipp e +l ent +che e +ment ors +festi val +an ism +compli mentary +r j +pu g +d ine +we i +cli ffs +sar my +ti veness +treas ury +il and +after math +rabb i +ou n +bou quet +herit age +zi on +sur render +shen an +in ks +kar l +gh ty +pol icing +exam ination +ce y +per su +measure ment +hydro gen +lu han +âłĢâłĢ âłĢâłĢ +war i +о Ð +j y +fow ler +mis h +al fre +âĺ ij +bb naija +cat alogue +recogn ised +sa ver +hu skies +col in +mun do +si va +p ng +discoun ted +man utd +fre sno +de vin +prelimin ary +tro phies +pla stics +du g +pro cu +indi go +g ard +dy lan +pit ches +ground breaking +in son +bl ac +an thology +f h +expl ic +r ard +admi ral +so chi +la shes +splen did +en vy +ad v +sex y +festiv ities +stic king +bi b +thr ill +op p +ari el +botan ical +endur ance +fe males +br icks +vat ican +black pool +ber mu +br ough +roll er +bi d +sue de +sloven ia +mm ing +ml b +med alist +di ans +rehabil itation +ne on +s go +li thu +ram os +z ed +pi anist +inten sive +broad band +stu dy +peter sburg +lu ca +ah hhh +phys ician +dill on +tele com +gri ef +mu n +ac ro +si ded +s ly +blo ws +classic cars +tri um +ar gy +? : +h ri +marsh mal +âĢ ĵ +to pping +war saw +tran sc +preserv ation +b av +re friger +experim ents +ä º +gl it +sli ga +g age +fac tor +flav ours +br ony +sp o +cook book +carri age +aw ay +ny fw +on ian +w g +simp sons +ro lex +ðŁı ¿ +cro sby +ãħ ¤ +cre di +syn dic +pu bs +ali fe +poor ly +mac ed +ðŁĺ ŀ +behin dthe +w enger +n ats +ðŁİ Ł +rubb ish +procedu res +typho on +opho bia +er do +fu el +vi era +bu mps +millenni um +new zealand +lec tures +it on +mil ky +respon ded +ê ° +landsc ape +.. @ +bo ther +âĸ ¶ +z hang +huawe i +tu ition +s worn +in u +y or +pa olo +au ditions +ab il +malay sian +ho ps +fe athers +mp le +au ts +ã o +boun ty +ic he +ì ĺ +sh q +pin ot +ge ars +disapp ear +video games +t na +alzheim er +ðŁĮ ŀ +a ji +under wear +swit ching +sign age +o scar +ec on +dro w +cl int +pl ated +gun dy +emb lem +ho es +ici st +nel ly +juni or +road show +miner als +at le +alexand ria +ac claimed +v ell +shi va +ad he +en ne +amne sty +h ounds +councill or +ðŁĴ ¦ +aes the +part nering +influ enced +mag no +fl are +extin ction +civil ian +maje sty +va il +law makers +rac ks +mc c +ori an +sp ices +er rors +may er +co ca +pa i +s ooooo +reti ring +ba thro +ðŁĻĮ ðŁĻĮ +âĸ ª +su f +endor sement +buil ding +broo ch +pal la +arvin d +ag ent +kar ate +r hi +c tv +ta ine +um m +ba x +reig ns +uni of +enterpri ses +adel e +fla ke +at tire +bru ce +ba hamas +gra vy +sa in +che ek +tri vi +lo v +e en +bb lo +lady gaga +itt a +. "- +du stin +observ atory +eigh th +bloom berg +kh s +f cc +gi st +commemor ate +ve er +sexu ality +ed c +nic ole +vac ancy +u ser +son a +:' ( +dipl oma +t end +up grades +Å Ł +jura ssic +cardi ac +dr s +widesp read +à ł +dail ies +vend or +sim plicity +wi der +len ses +supp lements +de pos +ob served +vin es +parti ally +renew al +collabor ate +ali g +fin ity +ph u +zz y +pe tit +ðŁĵ ħ +z in +i gu +sm ack +fall on +ðŁĵ £ +back wards +comp onent +o so +compati ble +bin ding +zur ich +thom e +w ounds +ly ric +fresh men +sne aky +fi bro +di et +emplo yer +in sect +h ated +sch er +raz or +n sw +boo ker +califor ni +av fc + ° +preten ding +pep si +al is +un titled +k art +grand parents +e the +o ck +lux emb +visu als +small business +abdul lah +min ho +su baru +h ra +reve aling +heart breaking +clar ity +am g +sl r +** ** +âŀ ĸ +recor d +ici ary +min ded +ye h +exce ssive +knu ck +icec ream +tru th +ev ic +ta stic +ant arc +ren dering +, , +mit t +loren zo +st patrick +bound ary +zi g +vo cab +osa ka +fur n +tu n +gu l +s ounding +blo gger +utter ly +g af +adv ancing +l cd +mar gin +lifel ong +solst ice +sh ra +wa its +ple ar +bre ach +en ligh +ad er +itt le +c ation +ho on +stu died +?? ??? +k ash +ev angeli +ps l +wei ghts +met als +ty res +tur no +wi e +car b +g ale +se al +sun ite +am ic +patter son +á n +eu ph +up stairs +quali fiers +khali fa +apple music +ìĨĮë ħ +vau ghan +al ter +cru iser +mu a +t ana +kat rina +id ols +spo iled +secre tly +fi bre +part nered +um es +gi ov +com et +screenshot saturday +k eller +fil tr +fe t +con way +pe u +bad minton +gi d +m ound +don key +bu ff +lea ther +lar gely +bro ch +int ments +am use +r k +sto ve +impac ted +con t +cr acks +prison er +bar i +contrac tor +ori oles +domin ate +pol ar +am elia +dr c +ðŁijĮ ðŁijĮ +vi st +su arez +injec tion +blo oms +ðŁļ¨ ðŁļ¨ +sti ff +pay pal +sno wing +thur sdays +goo se +we dge +educ ated +weak ness +de cker +abud ha +bree zy +Û Į +hope ful +o bi +rai der +gh am +de u +se ve +par tly +fu t +infu sed +mer ri +than e +some time +hu e +me in +cre dit +sli ding +ran de +cher ry +dead pool +sh ol +ar am +under wood +sky e +distur bing +m nt +poli shed +guardi ans +ha dn +pic asso +ari us +ak shay +ir ri +j h +happ en +la kh +dal ton +at the +s well +mar sha +re h +cour s +j kt +top us +serv ice +r ink +hack ers +dono van +hor o +tc m +may hem +cha se +dev ops +ken sing +sc up +sh ere +quali fication +c live +ton g +n ancy +mar is +der dale +ber man +cinde rella +jol ly +ci c +loo t +collecti bles +hom icide +g ge +epide mic +su ites +mu ddy +gi mme +e rec +- * +tal la +lis le +embro ide +ðŁĩ© ðŁĩª +veriz on +ve ctor +be anie +arti san +ga in +flo res +vi gil +u so +ðŁĻı ðŁı½ +grin ding +gh er +air ports +respon sive +shaf t +can cel +ceremon ies +e me +at ari +bru shes +eag er +bo hemi +children s +yan kee +ma a +suspen se +mor an +mac ar +sun flower +cre w +vo id +ke ar +fashi oned +jen nings +sunday funday +sub missions +me ad +her man +wa i +crit ically +le um +baek hyun +for cing +co bra +ãģ ® +acqu ire +al k +ge ology +pri mar +import antly +ire z +bunde sliga +curi osity +sen a +stric t +con soli +win ters +ven om +chelten ham +ðŁį º +cen a +t at +ba in +glo ver +under cover +as ses +car n +memorial day +am eli +i rene +ch on +syn thesis +spe edy +mitsu bi +sla yer +compos ite +under stands +pe w +inter rup +hen ri +mor row +an om +thof july +g lee +thre e +ðŁĺ ® +and hi +ch att +renew ables +ye s +trans fers +!!!! !!!! +bab u +du ter +lo ops +pe ers +o ilers +pau lo +ic ation +h mu +war a +mer cer +hom eland +fu ji +ale y +year book +re m +re en +ab sur +bo is +] : +caes ar +shot gun +kur dish +o ren +ra e +anci es +ty pic +f h +def ault +re plic +lu k +trans actions +r ys +infan try +ðŁį ¾ +cho w +chick ens +ba gh +wy att +ay e +gg i +bre ws +ed itions +mi ra +commen cement +pre su +peris cope +ic hi +guatem ala +zam bia +pain ts +wit ches +wan i +un dere +cro y +vo ws +us mc +hear ted +theat res +shu ffle +le vel +mul tic +squee ze +fer n +app et +post al +mal t +on board +ld nt +co o +s sc +k ac +ðŁĺ ĩ +sc rap +mar cos +deal ers +ann u +mill er +co ve +ul ary +vladi mir +be ef +th ur +pick led +se same +bengal uru +mo tt +kathle en +hi st +no tor +dr ank +du chess +snow fall +e ff +tin y +j n +sy our +speci alists +scot us +bay lor +eve rest +mali bu +pre m +harm ful +l ali +b ates +g ye +differen ti +and ra +geome try +el over +black out +== == +ko ta +inter act +asi an +la yo +samu rai +fi del +exhau sted +gla di +pd t +spher ic +anti qu +guit ar +stu ri +ho pper +ang le +f ills +sla p +mi th +rod ney +ong i +in som +pre venting +cassi dy +ap ho +ore gon +lo in +ham mond +contribu ting +f n +gar ri +ori on +comp elling +escap ing +aim ing +plu mb +bi stro +be asts +concer ning +bo e +do pp +shop local +stumb led +âĤ ¹ +naz is +âĢįâĻĤ ï¸ı +gest ure +war ts +us open +hi ggins +char li +hang s +bom bers +° : +fe eds +c ch +st il +nic ola +ðŁĵ º +clam ation +tro pic +af ro +ou k +expen ses +der rick +al ine +fa w +reg ard +im er +sat in +thi um +ry der +pear l +te ss +mm mmm +sen ses +ðŁĩ ¹ +positi ve +exhau st +occu r +nor ris +lil ly +is les +direc ting +yo fficial +count less +sam ar +on stage +flo ck +mir rors +arch er +mo i +k d +vi v +in os +si kh +le i +sen sory +br its +kno x +chest nut +op y +coli seum +z af +di vin +adap ter +:) )) +tem ple +ku n +hel mets +t df +gu ide +m old +o ids +lu ther +he is +monaster y +sp ree +k lu +brit ney +jagu ars +gre ats +c cc +ky rie +machin ery +cric ket +re ro +ab o +aspir ing +semi finals +ale ss +sig natures +var d +me th +her bal +hol den +king dom +ap or +reg gie +ore o +palestin ians +em mys +sec tional +ro i +ney mar +qu el +cu ll +l ka +haz el +estim ate +ul ties +go w +be a +purch ases +bel ts +protec ts +m é +gue ssing +bb o +clau dia +fr acking +jon ny +el k +cel tic +al mighty +ra je +courty ard +ig i +can es +ðŁĴª ðŁı» +bank rup +le thal +âľĮ ï¸ı +graphic design +vad er +penc ils +rough ly +dan te +m fg +const ell +cam el +j b +bloss oms +en to +balo chistan +cine mato +ill ard +jer sey +con sent +dent ed +con templ +sch er +hol i +lou gh +st our +a yo +begin ners +cur b +v hs +a jax +du ff +av eng +dom est +commit ting +ai red +cha p +hedge hog +disappo inting +freel ance +in land +char ms +ðŁĺį âĿ¤ï¸ı +ai sh +m x +buck le +ti dal +per mit +bo ating +ra cha +kend rick +b ello +b hi +ple a +estim ates +l b +apo logies +jay a +bb l +ast oni +inter state +main taining +el bow +mu p +ep it +ðŁĺ ¡ +viol ations +def end +be h +sl c +am ir +pur i +ti um +fi fa +blur ry +scri m +ðŁĻı ðŁı¾ +ma ple +rel atives +âĺ Ŀ +cho c +con nor +⾨ ⾨ +whi sp +list ings +ma ze +than king +ri dd +grass roots +shi fting +desper ately +gor illa +den i +ju les +stra th +g ley +ja in +bu ick +t anner +ðŁĴ Ŀ +ga e +pri m +it ors +n ano +separ ation +armen ia +bor deaux +ðŁ ħ +pj net +bu rial +e bon +glo ss +re new +gri er +spe eds +comic books +sym boli +pur poses +ãħł ãħł +spati al +no table +ci on +n ps +ho ffman +nor man +rt g +du sty +situ ated +tr an +k fc +em en +nic kel +hast ings +sett ling +gr it +l ena +w aw +art s +gu m +ca regi +le wis +sapp hire +rememb er +embed ded +t lc +bl at +serge ant +el sa +boot camp +bow man +photo graphic +pill ars +direction ers +classi fied +no is +ve er +barre ls +wh oop +ðŁĺ± ðŁĺ± +fe male +petro leum +medi a +e fc +poké mon +ठķ +enthusi astic +var un +pro files +pedi atric +acci dents +con rad +jan g +jo jo +ac or +ob server +l f +live stock +for gi +fo s +el m +an and +go e +c ere +avoi ding +gri t +om an +thank fully +scat tered +nick y +cylin der +chees y +di ver +mahe sh +cav es +ear liest +qu inte +subjec ts +b end +gul f +vocali st +glu e +pat ches +un stopp +sny der +demonstr ating +pi o +hor ns +wic kets +and the +r ama +yo on +stra ight +bed time +or ang +bul lets +sa urus +min ers +inci dents +! ... +ðŁİ ¸ +ag ers +hand les +stat es +in ity +d ons +incredi ble +emin em +avi v +ru dy +moz art +folk lore +appli ances +mt l +fre y +di as +hu a +page ant +stri ve +im prison +bul lish +r ana +al erts +bb mas +hy per +derby shire +re cre +re dd +debor ah +cosmo s +law son +mel anie +psy cho +ho or +doo dles +sni per +shad y +man tle +canadi an +new year +inter actions +separ ated +cor ds +spiritu ality +ap u +it o +p ct +pel osi +rebel lion +se iz +wor cester +sec tors +ul i +san ta +Ð µ +ðŁĩªðŁĩ ¸ +bi ased +class ical +gam ma +dee plear +emer ge +back er +sur ance +hand crafted +ðŁİ ¥ +franc is +mill an +ic i +cro wn +wo w +stri ped +un fair +relax ation +³ ï¸ı +embrac ing +she alth +pale o +martin i +dist illery +wr ink +or k +na th +hay ley +cour thouse +si ber +sa di +quiet ly +mel t +m sm +me h +smart phones +rel ent +pp ing +war wick +co logne +gli a +cot ton +pro g +lon e +ip sw +star ters +expan ds +u mp +su ed +ski pper +infe ctions +ing le +à ¡ +cler k +demonstr ate +ac ar +ðŁĺĤðŁĺĤ ðŁĺĤ +ti bet +bun s +alo m +demol ition +ssi a +g st +[ ] +so ar +âĺ Ģ +ðŁĺ ª +ðŁĵ Ĭ +dee pest +beyon d +are t +att ends +activ ated +di mit +âļª ï¸ı +high lighted +magaz ines +rum or +az za +steph ens +dol ph +sho ckey +mat s +we av +mel an +serv ers +tra um +ku sh +æ Ĺ +bab ys +pa z +a al +la use +break ers +canter bury +ul ture +mi ri +euro s +tane ous +impre ssions +du tch +il d +gh i +pur due +adequ ate +l p +sy ner +ang ler +du rable +gal ore +ro wn +mg mt +ðŁĵ Į +lu cia +âĺij ï¸ı +zay n +bor row +. ( +north umber +cru sh +eng a +su sh +extra vag +t out +ma hal +ali stic +ther mo +gall eries +es se +chi bi +attrac tions +lex ington +legislat ure +docu mented +resi den +brow nies +w f +st ool +plan ets +sho ppers +conduc tor +ms p +tr icky +fru ity +end ra +feel the +whi pped +hair style +re fer +oo k +oc topus +audi ences +ku mar +after no +op tim +c fl +ni p +gen i +alpha bet +ann ab +lam in +accep ts +l ng +ðŁĺ « +t ine +ac om +cheer leaders +t k +gr on +v g +k ung +ja x +dha bi +r ss +mack enzie +beir ut +clean up +gy psy +st ell +bur ger +hurric anes +educ ation +st ina +âĻ¡ âĻ¡ +unfortun ate +jere mi +bad ger +at ers +: â̦ +ter ra +subli me +stu d +y mca +mr u +duter te +bren nan +bul b +mel o +yl on +hack er +c red +gu d +as an +pad illa +embroide red +vietnam ese +pione ers +projec tion +re boot +id c +an ey +pri mer +suff ers +win ding +p on +sto day +mor n +u ch +all in +adid as +eliza beth +tu ck +o graphy +ðŁļ Ģ +be g +os borne +ghet to +r h +cn n +ir ma +ma kin +cab les +mur ders +oc ks +inst a +al as +si k +cu ff +la re +foo dies +o vic +at om +geome tric +em pathy +ภµ +cent enary +newsp apers +administr ative +ðŁİ Ĭ +sti ve +contrac tors +le tt +tas mania +awesom eness +den sity +ve en +prince ton +frequ ently +re ject +gh i +modu lar +ceram ics +sh ag +ki wi +can vas +sweat shirt +an j +ti mm +napol i +il er +appe als +hamil ton +ma yo +we ave +arrang ed +whar f +occu py +b vb +as aki +ot ter +nor m +vi es +de tox +tion al +dere k +id ad +ad missions +constitu ency +u pper +woo t +allo y +se ve +lu b +un comfortable +ed win +ab re +d wight +ar che +virtu ally +sp ol +pri e +ai i +er r +swit ch +bar ack +se ok +cou l +wn t +pou l +o live +caffe ine +cardi ff +notor ious +de mp +ex cess +bar r +t ford +a jay +bump ed +my thology +shel ley +fal con +shakespe are +must angs +no ted +bon e +civil ization +sy d +par sons +un official +hy ped +sp ends +oppo sed +v ings +space x +noti fication +deci ding +bio tech +out si +sal ah +! . +fe d +ss y +c ms +bad gers +cr o +ela ine +n ba +dy our +n ant +honey moon +climb ed +conom y +ath a +m ell +ne bula +nature photography +juli e +bm x +inve sted +mon o +lieu tenant +wat kins +techn ician +o se +ka e +ì Ľ +mc queen +pre ach +trav eller +flexi bility +ze bra +reta iler +p ant +ben der +brand t +squ id +war rant +veri fied +cas s +pier cing +hon ours +t ying +mor ris +kis sed +op rah +panor amic +me i +splat oon +wich ita +ari as +gal li +indy ref +good times +athe ist +confe ssion +ow ski +re pping +ad ditions +mechan ism +z im +j ans +su f +cho pped +beg innings +vitam ins +ãħ¤ ãħ¤ +or th +po les +ru b +antarc tica +indie film +web cam +ket ch +bre tt +cle ment +her on +defe ating +hydr o +buc ket +wand ering +sid ney +future of +b inge +on ies +knock out +administr ator +syn the +l ent +jan i +bar ley +premier league +ner ds +cr m +bra s +bot any +evol ved +rot ter +ro wed +tum or +weal thy +Â Ń +mon arch +li shed +da hl +ðŁİ ĥ +bu ch +ken yan +Ø § +red ness +assemb led +se mit +hud der +shro p +ran i +lear ning +mor y +iti a +geo graphic +worl dof +f b +pho sp +boo gie +am ped +? ... +che w +dwar f +ar us +s sen +ru sty +recru its +h k +gar de +app lause +vol umes +invol ves +ta c +hand bag +trans late +ffe l +se ym +aqu atic +trans fer +zo di +and r +acade mia +cr ater +te z +ar se +adap t +col oni +snow man +mal i +hang in +di schar +oy sters +pho e +colon el +w ba +hispan ic +thri ving +sh y +ag les +sales force +cre me +so les +la fayette +â ī +ter ia +ach a +sp erson +go go +car ly +the ore +am ore +vo x +af t +ãĤ ¹ +stap le +mu ffin +di agram +ino x +su stained +av ent +me ta +arbit r +dec ay +ado le +Ð ½ +ec ol +ph o +n k +o cu +gr anny +ç a +luxemb our +stad t +alber to +le vit +am as +d x +or phan +co bb +as c +lo gy +immen se +chan ts +off line +p ent +bre x +w inger +plan e +i el +nichol s +ca thy +nar uto +low ed +/ // +ignor ance +cat astro +you ts +sch en +buil d +haz i +s ine +critical role +du g +dete ct +lo gs +en amel +stpatrick sday +ed die +co pa +cigare ttes +ho ff +kay a +la goon +ra pha +air borne +choo se +puer tor +ke v +gui ding +fro sty +bor ough +mir a +ðŁİ Ĭ +cade t +anu sh +yo gi +e ger +fl ing +slo pe +nin th +we ston +foot wear +f n +may weather +a am +pla in +stair case +witne sses +work outs +ro bust +dex ter +co hort +ðŁļ Ĺ +sp ell +ha ze +o om +organ ising +wild fire +cont acts +av on +min o +upd ating +ðŁį » +li thium +ing ual +k is +au ga +lo com +de duc +u da +th ak +boy le +mp er +hot tie +eri k +re vised +is la +travel photography +oo za +en qui +confe rences +clo ver +g room +cur ves +live on +per f +displac ed +bo log +xx xx +ðŁĺ© ðŁĺ© +te al +ve ssels +rain forest +cal ci +pan ther +gira ffe +ta sted +imag ery +pad res +day time +bas s +ri pe +opio id +nu e +vin yl +invent or +sen s +process or +mu t +gad gets +bibl ical +shann on +jacqu eline +car y +the resistance +ali en +n vi +co sy +bi har +fo ley +ren d +mu gs +fa ken +cl one +ni allo +gra bbed +chi hu +power house +n tt +chero kee +spon ge +imple menting +rh ine +le one +ðŁį Ģ +pret tiest +infra red +impro v +swit ched +tu bes +con tr +bl k +projec ted +be aver +yo t +bbcra dio +thi gh +per secu +apologi ze +w ack +po ster +oli ver +az a +lou d +( ?) +f the +women shi +spar row +blu sh +us able +sc ales +it ative +peu ge +ne eding +legg ings +glam orous +mat ur +c z +wat t +da b +tam ar +et sym +bau er +heart felt +h n +else where +bir ch +alu mini +hu ck +e me +j l +traf ford +d z +por tions +ana sta +arthr itis +esp n +ber gen +viol ation +yo shi +c z +northumber land +clo sures +ðŁĩ¯ ðŁĩ +smi ley +r w +tel ugu +inten si +gre gg +ve ga +dun geon +south bound +ba il +domin ican +semi final +chap ters +h itch +van ity +trans iti +recomm ends +sati sf +bar ca +queen s +( ( +de struc +stra it +ra vi +dess erts +in tru +har am +k os +fo e +fat ty +pais ley +magn itude +dri dge +com ey +schem es +vision ary +our t +down loaded +ðŁĻĮ ðŁı½ +gd pr +lan i +p wc +gu ad +nic est +stake holders +re ferred +george town +arvind kejriwal +schnei der +in doors +all star +strand ed +gen der +ze pp +ma sses +ðŁIJ ± +pati ently +bl dg +z ab +we arab +vi vid +he ck +d ella +sy mb +je opar +la ger +à ª +comb ines +ne c +br ay +flo p +tx wx +jo ys +pon t +pro found +sur round +mad hu +ma ble +ay r +te as +n sa +open ly +er nest +ãĥ © +to po +g na +anti oxid +ti an +e tr +c ello +ma thi +gener osity +b iting +man ic +kel sey +chee ks +ten der +w th +pron oun +ultimat ely +gu sta +ari anag +ger ry +ble ed +red dy +mic h +mitsubi shi +oper ated +sex ually +ma u +cl lr +vi ds +co c +mel ted +ðŁĮ Ī +q ld +ite ch +instru mental +end game +ðŁĵ ĸ +ener gi +brow nie +tam il +at in +domin ated +pra ises +fire place +sens ational +men a +k arti +un prece +ru pt +ori ental +mc cor +tour naments +scen ter +re eves +prescri ption +sam e +fra u +tru ffle +em bo +roman s +bla sts +techno logical +pr at +b sb +y ar +tren dy +ac l +al ad +ðŁį ģ +o hh +bankrup t +tho ven +regar ds +is er +war wick +vine yards +real m +niallo fficial +do ta +ge mini +to do +v able +¨ ¨ +la u +wre ath +ju ve +nat asha +le ver +lor i +hor ser +cc tv +air bnb +es anders +sin clair +ema biggest +high school +con test +optimi stic +t te +ðŁĴķ ðŁĴķ +ss d +ye e +hel ena +con sen +ric ks +jes se +an ic +ðŁİ ¯ +re acts +ro be +independ ence +vol tage +m ington +s ant +à¸Ļ ภ+-------- -------- +sentin el +ke tt +rehear sing +aaaa aaaa +sof the +stir ling +sear ch +wi gan +stand out +sna il +pent agon +Ä ģ +ch lor +cru st +net any +chemi st +disapp eared +ric ardo +sp iders +bo se +war ren +me ssing +bann ers +gu el +par ach +ma id +coun ted +epi le +bon fire +speech less +se tter +meas ured +rejec ts +nik ki +le ster +foren sic +fab rics +alo ha +pre served +wat ford +deta iling +dar th +bo u +car ly +... ' +tail gate +noti fications +å ¤ +pas sive +trous ers +balo ch +ro ther +typic ally +à ¥ +sp it +wi z +sic ily +technic ally +ex pose +st age +hu bb +cre am +cap s +po ke +sle ek +ju ne +tempor arily +de z +awak ens +l ame +_ - +ji ha +tues days +advis ed +advis ors +exi sted +dis agree +news room +lo sers +world tour +dr ying +al di +har ness +foot print +hobb it +p mln +i ro +que red +asse ss +gaz e +sa b +th ian +í Ĭ +ti f +ob serve +ev il +dra wer +swee p +cor y +co dy +kyo to +cal lum +n inj +lau rent +be i +sket ching +custom ized +du r +regre ts +knox ville +ìķ Ħ +mess aging +grac ie +abun dance +bi dding +bre wed +fl ouri +therapeu tic +alt itude +ho gs +bur ner +elec tro +wonder fully +he ater +post pon +li very +r all +ad as +a ac +sau l +brook lyn +play house +âĻ¥âĻ¥ âĻ¥ +char itable +in y +z ah +compet itions +be av +plu gged +o is +do om +astron om +speci alized +max i +ta ps +cellu lar +depre ssed +folklore thursday +cri b +e mul +ë° © +fi gh +ru z +car lisle +spe ar +side walk +de i +depend ent +lac es +nh s +ðŁĮ Ļ +reali zing +net work +ric he +re gin +re fresh +st ral +pa thology +pla id +psyched elic +hin d +u ka +algori thm +lin king +progre ssi +fe y +d ade +hydr ated +b ant +fam ed +cot sw +bo ise +as c +rac ing +ja vier +ww en +mar lins +poo p +swe pt +toni ghts +we f +ani me +slo vak +âŀĸ âŀĸ +cla us +lem me +cli ppers +re ls +arianag rande +r te +ko t +thal apathy +hungar ian +zu ma +y von +is u +jour neys +clin ics +be be +ww f +n ws +super heroes +er it +sle ague +identi fication +mo tto +ba i +sour ced +ill er +ap i +pri se +unprece dented +dam as +tuni sia +dra in +undere stim +e ther +quarter ly +rewar ding +al ham +wolver ine +cab ine +hyp no +nad ine +hav ana +da e +ðŁĵ Ī +dr on +read ings +b ati +pic o +mer ci +iti an +wal kers +el ope +mi key +god zilla +bur lington +abu ja +social ism +at ility +sh ell +harry potter +g no +ab ur +re leg +fel ici +ro gen +neuro science +inst in +ath am +vou chers +j arre +fu se +def ici +monte rey +de port +mid day +pp ard +fre ed +ame ter +wil t +n ingham +pr att +liber ty +slo gan +o to +pr i +co ated +c pd +ne tt +il las +mal awi +evol ve +accessi bility +ðŁĶ¥ðŁĶ¥ ðŁĶ¥ðŁĶ¥ +or nament +b p +el is +son line +chi ro +fl ick +ib m +ar ak +en ables +gar land +san e +cu ties +tri p +rotter dam +n ys +lam ps +lu cas +bo g +ra ils +travel led +hic ks +en u +sab ha +scru b +hi er +hart ford +fo o +fer nandez +tre vor +mat tress +appo intments +ale j +fe i +o logist +saf ar +oc ta +sr c +sha un +ambi ent +dri c +bi ker +she e +must ache +h ta +bo one +her ty +car dio +bra kes +rec ital +consi sts +overwhel med +cau l +robb ins +im it +al th +ur l +bi bli +on ne +black livesmatter +diffic ulties +tel ang +tall er +ðŁĵ Ĩ +deb ating +bur rito +mo vember +strength ening +bo e +te stam +mirac les +base ball +re nee +ðŁijī ðŁı» +al fa +âĺ ĺ +unstopp able +ec s +g mo +giftide as +path way +fen cing +ðŁİ ¤ +b ham +ra s +sk o +d led +thel ast +magn um +bin ary +wil de +wil der +wh ati +barbe cue +h ism +can oe +kur di +eli ve +advant ages +mad ame +bi er +mis sing +enter tain +air force +y ama +c is +hash tags +j is +ve il +dream y +ten se +may ward +ch ateau +hunt ington +âļ ĵ +v all +up on +bl ouse +dun es +ðŁĺ ´ +fert ility +m ole +curren cies +st u +ber lin +toa sted +div as +wal t +lar k +por a +hit ter +um er +chil led +bal ancing +fa is +y in +or tiz +east enders +h ate +ur al +ap ril +tim el +à ± +per o +sto cked +respec ts +th t +best friends +giving tuesday +be ad +inv ent +im i +nap les +comb ining +tok ens +thir st +ma sc +par rot +sp u +dent on +* -* +t res +subur ban +wid th +si ve +con tender +siri us +lo k +troop ers +outra ge +tur bo +frag ile +me ssed +do h +disc ord +netany ahu +re sign +forgi veness +mo han +mun ch +cam ou +identi fying +enab ling +hot ter +thorn ton +jai pur +ar ya +ðŁı» âĢįâĻĢï¸ı +mu staf +maj ors +o ke +du ffy +roh ing +til t +ðŁĩ®ðŁĩ ³ +rock star +she ep +hend rix +ra v +in vention +do u +lagun a +gru mpy +sw is +im pe +) ' +you ths +bun ker +st ache +oppo se +indi es +acceler ate +ml p +ed en +w ann +k ail +akshay kumar +su pt +pol ym +midd leton +extra ordin +wil son +australi an +alumini um +way ne +alum nus +mat ics +gri m +er nie +opp a +competit ors +rand all +h ence +decla res +pre aching +sha he +can e +sustain able +stap les +le dge +ad ena +doctor al +bur gundy +decor ate +ren dered +ri sen +pr ank +di or +bee thoven +flo or +ac com +to t +ho dg +touri sm +say in +objec tive +mar kers +premi ership +en abled +camou fla +gi ant +Ñ ģ +smo key +ric ket +pan g +de pending +s ation +evol ving +inter cep +cen sus +tof the +re en +mendo za +trum pet +marke ters +an it +ðŁĻ Ĭ +north western +v la +foto gra +blackand white +che wan +wi g +tro om +ginger bread +k n +ro mero +n fc +or chi +fun ko +sour ce +f s +ra ped +o st +tar ot +ann ually +ðŁĺ ¬ +r ill +del av +.. !! +se s +can n +medic are +ph el +ape x +guardi an +rema ined +r pm +a ñ +story month +instag ood +neighb our +p ing +sem ite +my stic +as cot +mat er +hand ful +dang ers +ti d +ana heim +opol y +sh allow +nami bia +tor ia +procu rement +big bang +announ cements +prosecu tor +beng als +sal le +en roll +ga stro +sugge stion +ba k +ha ul +budd hism +berni esanders +flu te +fati gue +cyn thia +cho i +ir win +gu a +str ous +h p +ba p +satisf ying +play a +ðŁİ ¼ +inst ap +al ice +t p +irri gation +ðŁĩ¬ðŁĩ § +in tric +clu es +ple x +sa x +he pat +dump ed +signific ance +by u +medic ation +pro v +tough est +corn ish +âŀ ľ +kel ley +u v +si zz +si bling +me st +di stor +diplom atic +aun tie +b hat +son ic +bren da +pump kins +ro ch +black burn +ur ged +shi a +arrange ments +floo d +sa unders +lec turer +nou ri +popul ations +diplom acy +consist ently +ðŁ¤ Ļ +t mund +cauli flower +l ily +vocab ulary +vari eties +coo ker +up town +qu ent +mo sa +re inde +velo city +spru ce +social medi +i ber +volun tary +proce ssed +bal tic +y ang +leban ese +d p +dol ly +arrange ment +y uri +cran berry +kal yan +elev ation +cli ff +pu shes +ìĬ ¤ +sil ic +co wx +eter nity +sla ves +vine gar +glou cester +con tained +breaking news +aga inst +renov ated +norm andy +hero in +ys m +mo ds +gre ek +un di +tren ch +v h +encoura ges +head ache +gr ange +: ' +ever green +Ù Ĭ +reck on +ab used +th ru +cho ice +ti dy +col der +scho ice +ha in +bru m +li ars +bre it +yor ker +sh ack +he idi +micha els +sco pic +fasci st +play ful +ca c +yas ss +sh ad +.. ? +qu en +ram irez +clif ton +pr s +best fan +âģ ł +gener ating +head set +disappo intment +abstr act +bo iled +paren thood +azerbai jan +exhib iting +bom bay +oli vier +ko so +un lea +mat ernity +iz er +si ves +r hu +col l +saskat chewan +fre akin +de k +na g +stab ili +ðŁį ķ +organi zer +bo sses +ar u +u va +at able +ta un +after wards +fert ili +ver ge +az i +mor ph +๠ģภ+jer k +cosme tic +ko w +stru st +ap ache +post cards +for mul +ì ĭ +spin al +jack pot +elec tri +Ã Ń +lo y +gra der +diab lo +ar di +he sit +f w +arch ery +pa sh +the ories +repe al +re live +per cy +âĺ Ĩ +im in +syn chron +sham poo +coup ons +o to +la i +thou ght +luxembour g +mo v +ðŁĺ ¥ +ge mma +se ated +m ga +strat ford +un certainty +shi fts +est o +fo ol +fire arms +cor rie +ki ki +appa rent +p ills +olym pia +fi d +elev ated +de cks +ignor ing +av alan +ro v +whist le +p tsd +milit ants +robo tic +pac ers +quil t +bankrupt cy +lic h +per cussion +celebr ity +al s +( ; +su t +pokemon go +h g +off s +gibr altar +scre ams +billi e +gen ome +mar in +be ams +arch bishop +em in +bedro oms +g ated +ol ly +warran ty +at own +cudd les +gun na +k ic +vi ve +cy mru +nar row +pro b +le o +refe rences +manufac tured +cho pper +brun swick +sem is +don ia +r ye +man o +hur ting +? # +hol li +investig ations +c els +ðŁĵ ŀ +le ster +temp les +sto rey +mc mahon +toi lets +wo of +ï¸ İ +le verage +at om +night mares +victor ious +haun ting +custom er +ag i +yo ongi +mon ty +ver onica +w ur +inti mid +blan kets +volu tion +j m +âĺ İ +am on +jud ith +ðŁĺİ ðŁĺİ +distr acted +dri p +hurric ane +and es +revel ation +tro op +ab leg +col lin +tibet an +wor rying +inter nationally +eat er +camero on +brad or +y uk +ðŁĴĹ ðŁĴĹ +tra k +slo pes +ci er +ne a +ol er +ta ka +albi on +volcan ic +am n +a fi +ob stac +face time +ger ing +n pr +metall ica +organ ic +ðŁĴ ¡ +ki dd +d ances +pemb ro +wash er +m its +om er +emo tionally +tan go +ip o +do cks +scan ning +spec s +tho m +the ology +emer gen +om i +g pa +selec tions +un necessary +ima ge +ter s +induc ed +gi gan +rent als +supp lied +m fa +shan kar +lat er +pa jam +cla ve +Ù ģ +ma hin +carl son +avi an +ano va +kati e +aj ith +design ated +chocol ates +investig ators +gla zed +prin cess +er ry +ra gn +ou rable +hr u +sun dance +peuge ot +steam punk +gh lin +gre ase +hi res +z ap +per ce +j ill +tom e +he hehe +joy ful +mae stro +ni shed +gene alo +v ich +p its +fox es +good man +emer son +lo bes +con verse +o ats +thom son +ra him +mal ware +ah i +man kind +re sin +im g +sw ood +kin der +sc roll +ar a +sak ura +ro bbed +xi on +ny a +c ism +ce dar +be in +mour ning +tor to +heath row +done gal +bar b +hydr ation +k or +elim ination +su pdates +hill s +appe ti +star red +ko m +gw en +dd d +cra y +sc anner +personal ised +seren ity +re design +meta ph +box ed +judg ment +no se +ë ¹ +er ad +ac ne +supp liers +ener getic +v om +as ap +ðŁĶ ¸ +ir vine +hat ch +la ss +ad ren +waff les +accur ately +ici o +itt le +se un +occup y +web cam +thene w +ent es +ga i +j w +accoun table +vis or +ir rit +licen sing +hudder sfield +gen ie +ðŁİ ¾ +atmo spheric +ten sions +spart an +clif ford +ol an +north bound +ame en +cen sor +u el +ster y +$ $ +far rell +hy ster +cl t +se dan +rep lied +descri bing +micro wave +sla b +pro sp +assi sting +ru bio +e than +hh hhh +gu ay +z man +ra ise +roll ing +o e +n ile +ambro se +scar borough +hero ic +coo ks +mor t +chop ra +ðŁĮ · +to b +shav ing +stac ey +dor m +motor sports +wi ki +fol ds +sp iced +stress ful +liter al +fu dge +pe ggy +wa ite +tre sses +se sh +pr ic +ðŁİ ħ +fri ght +r va +mumb ai +po m +tt v +cel lar +tom e +andro id +dor is +tsun ami +tin der +o ec +m wc +dor tmund +no thin +l iti +so u +believe in +at u +kno cks +mag ni +ss sss +ro hit +ine ws +ang i +m andy +ke ttle +intermedi ate +av ant +cur l +endor sed +ori o +ur t +consider ation +wi res +shel ters +b ino +vik ram +imple mented +ly dia +bu k +paro dy +c news +under graduate +canu cks +sam i +polit ically +ro tten +gh z +tex tiles +over load +moder ni +recre ational +fli r +bat on +typo graphy +ov ation +intrigu ing +pilgri mage +al ge +ad ays +tcm party +sp elled +cur ls +boo ze +ste m +ann es +ir ls +spon ge +sho pper +sig nation +bra ss +mi stress +le ah +beg inner +lau derdale +augu st +pre school +ta ping +tai pei +execu tives +b d +rhe tor +esc or +immun o +deeplear ning +stat ues +it us +manu script +ly ric +cor vette +mol ly +la ge +de p +cn bc +le st +je ssi +fi fe +griff ith +oppo sing +ran g +dr ills +respec tful +p ity +d ell +har ding +play boy +blo ke +shut out +k ili +o sp +se attle +bc poli +mis es +journ als +team ing +es ther +fre ddy +Ķ ï¸ı +metr ics +no tre +gar ry +for ty +navi gate +perio ds +bened ic +j id +da w +ance stors +restor ing +con g +aller gy +tit anium +c ence +lean ing +ab bas +v ast +uc f +roof ing +e man +seve rely +vo gue +ve au +in bound +d z +tane ously +stret ching +man chester +dr yer +dav is +kan th +the game +it ted +re tain +el les +conge stion +frat ernity +ol lie +lo ki +fre ely +cho o +pon y +sc ep +tab ly +bal t +rock n +di me +lo gging +ðŁį · +ad u +ha voc +water ford +char is +swee tie +run ning +ner d +erdo gan +z ara +weigh ing +fif ty +pre cise +low ell +kurdi stan +r yo +or th +syn th +lin ers +phenomen on +art illery +il legally +constru ct +nostal gic +gar th +al ta +shel ton +a sean +w ander +dur ban +di versi +bon o +cl on +le man +sh un +obstac les +appet ite +fe eder +respir atory +di xie +formu la +an to +so ber +extin ct +au c +ing les +legitim ate +; ; +min nie +ipsw ich +dram atically +ðŁijı ðŁı¼ +ingh am +milit ary +mon et +us navy +for k +dun no +play er +q otd +st oo +ex or +ethiop ian +film fest +pe red +c ate +sau di +in ner +sin cere +tion ality +ale e +de eds +cooper ative +ir onic +cro cod +br ary +post season +cam per +can ary +e in +exten sions +nb d +sher wood +spo kane +hu mp +jit su +ê ¹ +dar yl +p si +stab bed +offer ings +expe cts +cav al +body building +fr aming +f ca +ye arly +bom bed +sk il +resear ching +jud iciary +gree ted +tu dor +mil o +innov ate +ðŁĺ Ľ +r hs +ru by +contribu tor +fam er +soci ally +m lin +fi ery +ut ter +beau t +it os +de voted +rain bow +bar ney +pe ren +ar jun +r na +gab by +ut i +hann ity +pick le +ser v +qu akes +pp e +fe m +wh itec +j n +victor ies +ðŁ§ ¡ +gol fer +congratul ates +resul ting +mechan ic +ur ve +cen tered +kie v +an s +in cub +< < +c mo +bestfan army +dap h +en ham +on cology +ku sh +t xt +ori ented +fashion able +c sr +sa hara +r ack +pd p +han son +ภĩ +ti ers +ra r +pan am +in sky +sa hi +testam ent +asth ma +in her +fisher ies +or der +ho we +gall on +ep is +suz anne +drow ning +paneli sts +ðŁĺ ² +ë ¦ +al ach +commemor ative +at tribu +ðŁij » +mo o +visi onal +week sary +gu st +ak in +poin te +ee e +di spar +ni pp +dent al +st all +pi an +bor e +ul ster +tic k +ir r +tae hyung +micro phone +bermu da +ga ard +el er +plumb ing +hu gely +âļ« ï¸ı +race way +cam bridge +mar cel +burn ley +to ast +holly wood +fa sting +me red +hib ition +ca pped +benef icial +ow ning +cont amin +arab ian +to on +cap ac +hul u +sm ir +nutri ents +se in +graph s +con ditional +ðŁij ħ +or ac +play in +nor the +tor nad +mar ian +ju mbo +lex i +incredible india +road to +uk one +confu sing +sp h +shan k +pi ed +mq m +positi vely +sher ry +path ways +consi ders +tof u +argu ments +resil ient +che tt +with dra +ter o +ated ly +sw ana +he b +fli ght +har ley +decre ase +kind le +book shop +³ ï¸ı +marty rs +sm ur +mc cl +concer to +sti me +rejo ice +app lau +cle ment +mer kel +jai me +im mortal +isle of +mar co +youtu ber +stal king +me too +st ack +sp ouse +u st +lu v +âļ¾ ï¸ı +eque strian +ev ing +fl in +nick name +the big +as ar +st acks +wal ker +bor a +kidnapp ed +hur ling +humb old +rec alls +co pper +ann is +se o +mer ger +mu ir +ad dy +ðŁĴª ðŁĴª +be x +cr acy +con an +congratul ation +mid st +âĻ ¬ +for bi +op tic +cr ate +crocod ile +mad agas +secur ing +ast on +o gue +savi or +salis bury +love it +fuji film +cast les +as st +ar rows +sp acious +tr s +poly vore +progre ssion +m ri +nel son +bi m +indic ator +o da +pe pe +re signation +gu t +sne aker +log ically +az y +are lla +te aring +jo shi +ssion ism +q pr +mari ah +p x +ble ed +mi an +med ley +we iss +ker ry +gat ory +at al +madi son +av enger +nab y +pl and +gi les +fresh water +d ington +ta j +demonstr ates +n tv +bul bs +sunday morning +pe ake +souven ir +wa h +ton nes +m kt +complex ity +con den +ross i +b ing +y ds +su k +n go +mid land +ol y +life is +ri pple +mo reno +dd ers +tu s +á ĥ +bou l +x a +hol dings +wn y +shadowhun ters +ke i +asp ire +m ous +ow en +so ak +skir ts +moun taine +stor ming +ch rome +ri ots +sar ato +amaz e +less ness +nav ar +crit eria +ra fa +indul ge +ay er +por to +nam o +........ ........ +yi elds +val le +j h +mac ron +sa ins +dur ant +tra ilers +wo t +confeder ate +sh rin +id ol +form ally +ten e +motor cycles +than g +no de +bang er +dal y +p ats +enroll ment +au ctions +at al +ar bor +lo gos +de arest +trans action +dom ingo +fle a +ser mon +de ck +sin cere +questi oning +juli o +was p +pre tz +armen ian +k ham +inflam mation +picture sque +acci dental +film makers +ðŁĺ ļ +ðŁĴ į +ca sey +so b +yee zy +good will +parag ra +ss ly +fe ather +dy ed +assassin ation +na de +b cs +app lies +femin ine +fe u +ext ent +depu ties +l ack +psy chic +go i +kill ings +pse u +ðŁ¤ ª +un c +mar l +tan e +mck enna +sur fer +influ ences +free way +hack ney +mal aria +el and +te au +rema stered +Ø ± +raz or +gg y +cor ro +lak sh +fla ir +honest y +hoor ay +de pp +am c +wedne sdays +q a +ed its +- $ +se villa +dou bled +human ities +c cot +som os +r ine +af a +si oux +re construction +wel ding +th reads +am ish +encoura gement +po der +bo ck +bal m +p tions +stand up +accompli shments +guar ding +convic tion +ac ion +napo leon +depic ting +att ack +su i +wear able +âĸª ï¸ı +pot ter +esc ort +vis e +to ts +bo on +event profs +angu lar +womenshi storymonth +bar row +sch i +ac comp +ti k +l end +kensing ton +wol fe +st acked +cra shing +exhi bit +wing ed +sab rina +ma sa +k ms +alway s +et t +pla sma +counsel ing +pick les +nfl draft +mr s +inev itable +coura geous +staf ford +writers life +ho s +e j +gh yun +trade mark +adri an +influen cer +coron ation +ra ging +explo red +usa f +excep tion +eu x +tan ker +sw ami +pac ket +ðŁij¨ âĢį +f en +she en +a ero +j l +re gal +nw t +au ster +meh ta +char ge +a ste +b ate +inf eld +racec ourse +collap sed +fle ece +z il +al lie +alternati ves +geor ges +ðŁĵ į +quir ky +fc b +nat geo +philanthro py +bra i +every day +ðŁIJ ° +ach ers +ja an +fin es +q i +fisher man +distin ct +gri mes +nation alist +comm ence +ro wn +âĢ ³ +z ing +f ter +hr w +baro que +bl ender +kitt y +hoo ks +c ited +w anda +consen sus +reinde er +an and +supp ly +me ds +v n +ol ph +rat chet +shel don +secur ities +ë°© íĥ +cro m +mosqu ito +j eric +im mac +dimen sions +â ¤ +di ssi +sponge bob +dami en +steven son +jo anne +del ish +yi kes +than x +surve ys +postpon ed +alco holic +al ised +ðŁĻı ðŁı» +do ch +sen tim +mered ith +com pares +b ago +happy days +mo ss +ãħ ĭ +ne c +gn ment +frustr ated +comb in +ri v +ec lec +col lo +compli ment +actor slife +ct to +nic ar +op hon +apar the +man t +ja de +trol ley +optimi zation +eye on +eco logical +qui st +ep he +ॠĩ +cin co +appo ints +old school +c pr +behavi oral +min aj +:- ( +tag ging +ev al +jo aqu +ðŁĺ « +ha k +de me +jama ican +so s +hy att +hand book +libr arian +hanni bal +pump ing +ch om +f man +ga i +hu ll +respon ders +green ville +n us +vau gh +ðŁİī ðŁİī +ta xi +gold berg +man tra +te ase +forbi dden +metho dist +ati vity +* *** +ec t +mc gr +Ħ ëĭ +se b +amid st +disapp ear +thy ro +phili ps +er ina +v icious +stream er +million aire +ma p +str ick +hack athon +gh a +ed ic +mi ka +pe ck +ill i +anto ine +ar ca +op tic +ma ure +ðŁĩ¦ ðŁĩº +cla shes +man ly +âĺ ģ +al var +and res +me i +el m +ww ww +al tered +l te +ê¹ Ģ +mo jo +for rest +thal ai +non t +spee ches +acknow ledge +ign ite +x factor +ðŁ¥ Ĥ +mead ow +disru pt +debu ted +scrim mage +pharmaceu tical +fi dd +found ations +philosop her +et al +publi shers +bo ys +c ke +ru gged +opti mism +re be +phil harmon +nar cis +ral lies +lu is +go blue +fol ded +un acceptable +optim al +li sa +pol aro ++ . +en za +âĿ £ï¸ı +mon opoly +grace ful +dair y +du a +diffic ulty +judge ment +o si +mer sey +flu x +new found +ter ns +dimen sional +in vic +al ba +am it +abudha bi +alger ia +autom obile +the ad +lo tion +acceler ator +vac ant +iti on +lu f +al ic +pl l +bla zing +ba z +sen e +ðŁij ¼ +villa ins +direc tory +eis en +to ck +broch ure +ri pp +hb d +zayn malik +nic he +lo lol +certific ates +mor se +fac up +x ham +un wanted +im ports +carne gie +fan sign +mo u +r alph +destroy er +sw ing +trek king +cili ation +pit bull +g aps +ho well +defin itive +mc le +f ps +et z +bol ly +lyn n +gan o +at ure +fur suit +co il +na v +but ts +tro jans +eu re +en ko +sch umer +horri fic +install ment +br b +subur bs +a bel +vi r +de sh +cun ningham +ðŁIJ » +span n +sch we +ke mp +tr u +ste alth +qu es +le w +deli ghts +ko ch +hu mili +cr iti +il t +sp ells +mi ley +car ic +ðŁį ´ +lc fc +substitu te +oun g +? !! +af fir +predic table +class of +er r +cy press +chand ra +age ing +__ __ +ther land +don caster +el in +yo shi +sail ors +har ris +jo anna +niger ians +h ers +pla gue +pro cra +k no +can ton +busine s +un h +pra kash +c in +bow en +co ating +m als +be gging +smith son +ponti ac +sp ies +dam ian +pl ine +und ant +al ta +one ss +shame less +da q +bb m +wal es +stam pede +ser um +Ù Ĩ +cataly st +x n +ab sc +free zer +ch un +ari os +mc cre +fore head +he ars +damas cus +tac oma +ardu ino +encoun ters +stan ton +lg b +ab as +" .. +ke te +drac ula +ele m +g ne +zepp elin +la brador +pul p +op tional +or n +russi ans +san itation +hil ary +etsym ntt +pen alties +au st +ig ans +olympi an +medic aid +vers ace +va pe +re stra +pe ep +sexi est +st alls +di le +the a +punjab i +pupp y +tuesday motivation +ðŁĵ ļ +the flash +roc ket +mo dest +chihu ahu +on na +k sa +hur dles +ca ve +fail ures +sp lit +bo ho +gur l +disappo int +ho ward +nug get +fran z +stal ert +kaz akh +for getting +sch ri +ag ate +am at +eve rett +du et +veter inary +juli an +ch ills +bra ve +ghost busters +lan do +gre ets +profit able +d é +ti r +ze e +om en +pd x +gray son +har i +fix es +stab bing +swim mer +symb ols +compli ments +po se +func tioning +th nx +gi r +corpor ations +bar low +lo e +off season +distin ctive +marvel ous +nik on +enri que +ky u +ja ws +amo to +lom bar +travel blogger +fa h +ouri sm +tri stan +so e +ce ase +ðŁı ħ +z ac +mck enzie +taxpay ers +swim suit +bl o +les ley +kan sas +w ks +ki el +provo king +my les +str ing +kangar oo +galac tic +fif th +s ke +we ir +ll is +mat ory +ðŁĩ ¿ +un ci +re productive +roo ting +ti des +gad get +.... ...... +alex ander +bow ler +scre w +apo log +eri ka +wal ters +shet ty +lan e +ban ter +as ant +me so +v ain +" "" +us i +fer din +accomp lish +man sfield +bom bar +collabor ating +cla p +it ure +s da +smo ky +na k +im person +car la +com ra +bur gl +lo co +ti es +in hi +trac ey +se is +diss er +rr rr +dra y +prote ct +cor ona +hun ger +ck en +c eli +trou bled +predat ors +fic tional +shav ed +riche st +metab oli +ful ham +gro oming +mono chrome +wa sting +as co +ast e +ti sta +remedi es +ung soo +south end +perman ently +bu mble +procra stin +ident ical +practic ally +ma scul +su ke +assu red +val erie +devi ant +grizz lies +thi er +pur a +ne pal +not ts +bil ateral +spo il +car mel +cine matic +ph l +ni fty +ma o +hypo cri +la ser +pan try +mathemat ical +el isa +coordin ation +bel mont +a it +radi ant +bo iler +man g +f ag +cr c +h ams +br in +â¬ĩ ï¸ı +famil ia +âĿ £ +sab er +ru pert +gg an +rit z +mic h +sal ford +le vi +gra l +ðŁĴ ¤ +n ino +ce d +business man +ul tr +sim ply +compre ssion +pa ins +hal t +ë°©íĥ Ħ +landsc aping +n f +croo ked +er d +itt in +ddle ston +sur passed +ino a +da g +bl en +exten ding +at ing +al gae +ball er +u mar +snoo ker +col lu +flo wn +thu b +ridic ulously +ki sh +op le +di re +as ser +ari sto +sc iss +h ating +trou ble +syl via +suc cul +plo ts +sincere ly +al er +laure ate +br ack +att n +rif les +me to +collec tible +cu omo +conte stant +consist ency +ant z +rang es +abig ail +de b +mini ster +grow ers +an oo +hoo ver +dream er +nu cle +resear ch +mi y +sha hid +ma v +d honi +cin i +do j +hin dus +part ying +dal i +alon so +inform al +clark son +it ton +ki an +cit yo +mor i +la sted +as pen +libr ary +susp ici +qu at +den ial +fol der +ch ori +swee ping +eni x +ðŁį Ĥ +Ø Ń +nas car +handmade hour +mou l +heat wave +em er +exam ine +ib n +gr ind +po v +tion ist +m bo +she ila +integr ate +om es +take away +cer v +con nie +tic ket +ce led +bi en +visu ally +madagas car +sor ry +gu i +park run +tra its +la be +pois oning +à¥ Ģ +vi able +bohemi an +denti stry +bad os +spr outs +mask ed +te ddy +ðŁĺ · +sa f +sa as +ji ang +ti ght +spe aker +withdra wal +bc n +as signed +class rooms +fle ming +ðŁĴ « +super girl +tot als +table top +e books +horizon tal +cra z +flu sh +j ard +c dc +er son +ãħ ł +green wood +ni h +co x +ad a +lit re +go ing +v icky +cur ved +lou ie +gra ins +hy e +lon ge +reme dy +tra inee +san jay +super stars +ma ser +man u +s age +wh l +ðŁĺĤ ðŁĺŃ +ðŁijį ðŁı» +m sd +en z +rab hu +j oo +gh u +ac er +e po +resurrec tion +justice for +bl ended +mo da +avalan che +france sco +re spective +g s +ye ast +wel ch +devo tion +ge tin +athe ism +am ic +carol yn +lo c +ld nont +ave c +us da +le gged +bra very +b lower +cow boy +he h +sti ble +buff al +chann el +run chat +âĺķ ï¸ı +ide ology +best seller +y oo +pe anu +bon ne +fel ic +edi son +fr actu +naren dra +pp ets +seym our +ri viera +he ctor +necess arily +bi anca +soci eties +the best +w g +sent ences +win k +vacc ines +pal ooza +jam ming +as f +mp us +agre ements +ec k +ba c +hon ore +com pul +wild cat +im posed +yo ga +hud son +can celed +l ich +fu zzy +es que +ch uk +w vu +se k +fli pping +r hon +wi shed +wh a +cap ability +len ovo +ìĨĮëħ Ħëĭ +vi vo +tv d +nor a +sil k +pas adena +yo semite +valu ation +clo cks +u ber +mr c +dar kest +au bre +ss o +bell y +wrest lers +kill in +lou der +buck ley +ge el +ad on +un s +appe aling +ðŁij ¯ +semit ism +list ens +fit z +ãĥ³ ãĥ +ny lon +ar ty +seem ingly +hal a +su ited +et y +she ds +mu ffins +ap ric +um ents +u ta +jam mu +chelse afc +star z +yo ko +roo t +clean sing +di ar +pione ering +ihear tradio +dig iti +fin dyour +can o +ðŁĴ İ +z ol +spac ecraft +six ers +moi sturi +b ile +ti sts +hor ton +rang ing +colum bi +mete oro +senti ment +ep l +foo th +text book +drain age +r ly +sc ue +imran khan +ðŁĴ ¸ +margar ita +ed dy +predic ts +gamer gate +advis e +growth hacking +love you +ug and +v f +beng hazi +s later +ne wor +ch el +independence day +p np +cul len +hoo dies +num bered +brit t +t sa +kl tu +s ages +mom o +onep lus +col l +gu ts +w ta +mesm eri +enh ancing +chiro prac +j is +teen agers +m one +constell ation +sweep stakes +e ze +slovak ia +la ye +pear ce +wa ver +po gba +k ron +sur geons +mar x +ti d +gg a +desc end +p ours +upri sing +wal la +sab bath +bachel ore +mack in +k am +peter borough +hor a +ðŁĮŁ ðŁĮŁ +think big +r j +hy drau +sp al +univers it +ðŁı ī +mail online +league of +ten ants +w ally +lan ce +heav ens +dd r +bol ts +am ir +i phone +ci gar +en du +re i +el abor +r inging +john son +characteri stics +sal oon +algori thms +tal kin +m tn +di ve +region als +ff ice +hat i +deviant art +so tto +shir o +l ama +k we +f aded +por ting +tu mmy +est ates +buen os +ðŁ¦ ģ +beli ever +pen etr +dar n +sp ite +can opy +fashi oni +t illa +pet als +eli jah +bra wl +marty r +ë°©íĥĦ ìĨĮëħĦëĭ +mid town +eric h +d apper +sm town +me gam +ww w +le le +on s +cat fish +fir th +fossil friday +ball park +th aw +pot ent +illi e +cre ep +car p +so ap +gun dam +infe c +yy yyy +ठ¨ +z ag +rit t +calcu lator +bo ca +ok o +to ad +threat en +refin ed +olym pic +accompli shment +bacter ial +a ji +tat um +feli z +she ed +j at +th ic +jam al +ðĿ ĺ +lin a +ðŁIJ ¯ +jo king +yot po +pin ch +ak ron +her b +motiv ation +li a +ho stage +cre ek +gam ble +russ ell +patt i +fo tos +c pc +bro ken +back the +cla ys +u mm +stock ton +mat ernal +ü r +la kel +cent ury +be k +infe cted +ภ¡ +smack down +man ned +ta hoe +sm es +bas a +su la +augu sta +. * +rohing ya +gre ed +counsel or +silhou ette +gra vit +cla use +' - +bo bc +occa sions +now adays +dic tat +be ard +n ally +brigh test +kab ul +inc india +dhan ush +archae ological +che ape +mizz ou +d hi +ov ski +bax ter +asse mble +à ¢ +gi gi +ac am +wis ely +haz ard +north ampton +âľĪ ï¸ı +me th +bla sting +re unite +mu lus +ali zes +t read +mil a +ed ward +ko va +pe sto +ðŁij ¶ +vit z +hydrau lic +refurbi shed +mo tel +isab ella +hom me +sever ance +uph ol +mis erable +f ari +lat ter +ef er +crack ers +es l +ac io +yy j +in an +ec b +z ind +pan as +tru cking +re ed +sh aker +burge ss +em pire +ag nes +n ington +art works +fr s +ti le +bi ome +eu n +ch ong +americ ana +god father +go blin +i shi +! ). +temp ted +gen omics +mand ate +ck y +ðŁĴĻ ðŁĴĽ +som ali +br andy +in ven +spoke sperson +pc b +yu an +h g +fa z +starwar s +ro wan +blue grass +don g +d day +trin idad +er ton +ban ning +re tention +cu red +tober fest +re set +we is +deta ched +behindthe scenes +immun ity +ph a +bra y +ðŁij ½ +ran cho +ram say +est onia +nd tv +] . +cab aret +tar o +d v +show cases +plu m +ðŁij ¸ +son oma +pre pa +memor ab +e stu +drive way +u les +magn us +x r +nn n +much as +en ge +stre amed +fore stry +audio book +tro y +reck less +kil om +ru ler +ra k +proce ssion +i ons +po ole +noc tur +wh s +farm house +per a +par me +hypocri sy +s ics +v ant +cas k +holi stic +au st +Ð ¿ +in do +ðŁij© âĢį +di so +disp atch +ol sen +make it +en nis +cent re +ar range +ðŁĮ ¼ +sal ted +ea siest +f ate +reg atta +mo zz +ac an +sin i +g ically +ch ops +chick en +work in +ha gg +invol ve +wee ds +book day +wake up +ky r +michel in +fu ss +re juven +vac ancies +incar cer +m st +sc ents +sovere ign +kick er +à § +bo d +âĢĶ > +sa h +mob il +shrop shire +oph one +dress er +mis suni +hep burn +i mo +foli age +diagno stic +as san +cycl ing +guil t +c sa +puertor ico +win elover +wake field +do ggy +k he +pa pp +co g +al lot +cu ck +poe tic +mi o +re vit +mag ician +ç ¥ +ant enna +west wood +mber g +lux e +oat meal +Ø ¬ +te at +ffe e +sear ches +l ly +plu to +el on +let tering +inno cence +fa i +ann on +telang ana +ma it +neu ral +can ni +ar oma +a stor +fe x +co cac +mon etary +f ent +un sure +' @ +indi rec +teh ran +isol ation +li bs +make up +merce des +ff y +he tero +de o +sco m +cur sed +veteran sday +franken stein +shre ws +de co +ge ese +lefto ver +ha did +vari able +acade mics +carol in +under going +vari ation +na h +ssi er +gamer sunite +pur suing +emer ged +ll ers +control ling +ro aring +mete or +vol t +daw gs +be aver +is life +bathro oms +aci onal +pre vent +lake district +in als +y ani +gra bbing +sac ks +le z +sw ay +k ool +time s +klo pp +la de +con cord +resul ted +revi ve +recon ciliation +ol and +az z +gir o +mand arin +de en +nutriti onal +is coming +van i +aw www +der ived +love your +stop the +shou ting +nov ak +ðŁĻĮ ðŁı¾ +lo af +displa ying +sunday with +ma guire +ch eri +ðŁı Ł +re match +qu ic +Ú © +y in +ðŁĺ ¹ +ili ve +z ip +our ke +down loads +sw at +missi ss +care rs +t ment +proper ty +hahahaha haha +gi bbs +sur rey +ar ise +tic ism +sti a +ir ling +fro g +co se +bas sist +fore ig +lea u +pil lows +hol la +eli e +disclo sure +peanu ts +inte ch +ww c +plun ge +trium ph +cor i +sli ppers +ðŁĻı ðŁĻı +neutr ality +ma re +hair y +gang ster +hu mming +cust ard +mer lin +ale a +s by +dam p +mo han +ver bal +j st +gu tted +b jor +un finished +ðŁĩ¯ðŁĩ µ +un happy +âļ« ï¸ı +by pass +at su +fis cher +sa v +afric ans +re use +mid way +demo lished +ger rard +her cules +Ä Ł +medic ines +cl icking +sur round +jo ong +wav ing +tri bes +wet lands +offici el +argu ing +l le +do va +su zy +club house +ne gro +ob tain +ga o +gl ance +assi st +ch os +ãĤ ¢ +âĺ ķ +adri d +occur s +st ans +par don +livel i +emplo yed +re visit +ff xiv +bb le +ne aring +min er +ðŁĺ ¹ +giov anni +up to +mar vell +mar se +to wels +cb n +engine ered +y elling +spart an +si ans +ðŁĻĮ ðŁı¼ +se v +coyo te +sta di +t cm +app en +shenan igans +open access +so aked +ma squ +le vine +stro kes +l k +aparthe id +hipho p +char don +may may +ha asan +stri pped +fr o +scri ption +f ton +h f +pri sons +marsh al +ķ ãĤ +an cho +com promise +classi fication +buzz feed +bblo ggers +deser ving +) / +s way +ob o +camp ers +poder nfamily +p oured +bri e +squir rels +se ize +: # +le k +ti mb +st acy +nas daq +repe atedly +br at +mi ghty +competit or +mah one +de si +o ke +bm w +shi e +f cb +cheape st +minim alist +par amount +n ate +har as +insan ity +lat eral +ment ality +mo zam +ta pped +yad av +u sp +b way +the od +bil t +ra ids +em press +adap ted +pat ron +nut shell +ag ra +be aded +sundaywith marsha +vi king +proce ed +main tained +thinkbig sundaywithmarsha +sn es +mus ica +to wer +ch ab +bo k +sm t +insul t +harve sting +windo w +ru ther +be ige +dec al +indic ate +ma iling +ri ft +po le +ander son +ch oral +sp ride +l ili +ev elyn +imrankhan pti +.... " +ke red +un dp +water falls +se ars +le mans +world series +ri el +ani e +app ar +score rs +lam p +a than +phys icians +qu inoa +refu sing +vu itton +unle ash +s la +pat i +shou ts +inten tions +fo amed +europe an +neighbor hoods +me er +man son +du h +br at +con es +bow l +kazakh stan +ठ¿ +in appropriate +del hi +ketch up +ful ton +s ys +consul t +gar field +to go +f ml +f led +b ds +facilit ate +ree bok +selfi e +elev ate +activ ate +bi ble +ca wx +b ys +cam ille +sy ou +sk ool +her t +w bc +ple dges +recor der +po sh +ac re +so aking +mat il +v sco +shoot ings +pla r +e con +ðŁĻĮ ðŁı» +rashi d +u bi +ðŁ¤ ¤ +sw inging +wi pe +rap tor +m su +music video +dur ham +at tic +apar ty +fe tus +activ ation +aa z +motiv ate +ðŁĴķ ðŁĴķðŁĴķ +j al +ठ® +ag on +sche er +stal ker +fo ster +az zo +tele gram +vi gor +s laugh +screen shots +entrepre neu +kri stin +inten tion +ch illi +fr action +don a +ge a +tc u +s ite +la k +em il +d nt +bor o +wil kinson +re cu +ato day +t anya +bl anco +cd n +brilli antly +g cc +ac c +evacu ated +ther ine +den ny +cait lin +she pard +pou ch +hand held +sou theastern +ha a +à ´ +re solutions +led ger +sr in +r ar +shat tered +chim ney +im with +mete or +hand led +ra ke +town send +en han +shi py +duc t +tw x +inflam matory +war hammer +theat rical +gro s +sk ar +sco tty +ni el +tit o +tin i +conne ction +_ . +goldeng lobes +sha q +ðŁı ³ï¸ı +hall way +fron ts +effec tiveness +gla ston +d hs +ex pi +to h +c pl +sc s +re o +ha g +resemb lance +hor an +abu sive +qu er +virtu e +cho lester +a q +shan e +m ce +carri ers +di stress +re wind + ¡ +voo doo +int act +ann o +ðŁĺ ¤ +pi led +adi a +ãĥ ³ +en ow +di gs +light ly +goo fy +turb ine +governor s +con te +re open +pa h +i ve +cra fting +swee ps +jo di +an de +zu cker +kaw aii +o ko +v ai +out line +kri sti +ts n +insp o +qu int +fil thy +lyn ne +listen ers +depar ting +or d +t weed +, & +ale k +sel fish +nor ther +recogni zes +i ps +be s +a ed +w ills +pe at +surround ings +mon uments +ais le +be cker +la v +quant ity +v ah +helicop ters +tu cked +alv arez +sha pe +o bey +ad diti +road side +m ite +bl ers +ep age +j au +ignor ant +b ins +lu lu +x o +c fo +ee eee +apprentice ship +shef fiel +to i +ho k +faken ews +deplo y +aid an +husk ers +ãĢ İ +west brook +mi ster +confi gur +car r +fic a +proceed ings +ha w +ste ak +mur derer +pay day +a jo +p vc +don ates +bi af +nom nom +be it +k ali +x rp +ahmed abad +se mic +che y +x tra +an twer +head lining +squ ares +roun ded +flu ore +bol d +disa sters +am oo +gener ic +cran es +brief ly +gi g +auster ity +anticip ation +for ti +treas urer +cann y +ce cil +dete cted +check list +ภ§ +pam ela +bar bados +an field +hear ty +tx lege +peren ni +arro g +ing ram +âĹ ı +ty ne +spo on +r ation +am ba +m be +cam el +h hs +york shire +reflec tive +fre aks +to k +ju do +partic les +du bs +ban jo +accred itation +prover bs +over dose +inte gral +gu ang +mc s +super car +af b +al vin +ail s +x tre +st aging +tw ent +rabb its +mar o +inste m +dol l +cr ay +sant ana +ble ach +mini ons +che ap +man t +di vers +catal onia +lo is +mat ri +cou gar +kay ak +e gre +p so +a ia +å ® +char lton +tr acked +sc ari +pe tt +f wd +x in +gra vel +br ic +bigg boss +ar den +hu gging +pal ms +st v +li mb +the movie +handic ap +ri me +z ai +stu b +indi a +lithu ania +rhy th +p ita +maced onia +high ered +brid get +schwar z +ske let +hi kes +ant arctic +c ps +mash up +Ð ° +n ell +chand ra +he ir +an us +sher idan +mi mi +muse u +bec ca +an ir +bar rie +dioce se +compar able +ðŁı³ï¸ı âĢį +yuk on +me p +hor mon +mer ic +al f +con quered +christ church +ðŁĴĻ ðŁĴĻ +hazard ous +poo h +cont ing +retro spective +par ame +na ir +con sor +ho tra +astoni shing +cater pillar +u man +ti sm +t vs +serv ic +croy don +mor ales +c g +cu m +te ur +scan ada +s all +magno lia +el ise +th our +à® ¿ +ag omez +phel ps +ë°©íĥĦìĨĮëħĦëĭ ¨ +wh os +weav ing +si sd +pro poses +cro ws +pre sale +econom ies +bernar do +sha hid +air show +mc cann +hor ticul +nr l +du el +mongo lia +tou lou +requi rement +struc tured +ed i +o lives +he a +cu ter +Ð º +enthusi ast +harri et +domin ion +sub mer +ðŁį ĥ +sa ab +nes burg +mo ff +def ended +bur t +rewar ded +gold man +op tics +khali d +house holds +buc kets +ce cil +che ss +substan tial +ef l +oper ation +evalu ate +st n +rece ssion +l ll +tom as +tru ths +ak bar +s words +p act +embarra ss +ha o +ay urve +scrip ture +ny cc +op t +di ameter +sc ented +organi zers +re lat +ha e +dream ers +de se +ðŁĮ » +restric ted +n ale +r hp +dol an +mun ster +ha ired +consult ants +jo ints +hu mil +d ill +relent less +t é +af il +ut ilities +japan ese +condem n +pet ite +colli de +q f +peach es +cou rier +l ore +âĺİ ï¸ı +reli ability +ch uk +ðŁĻ ĥ +stu res +ge ther +ho stel +bi er +- _- +â ĩ +e ze +ta ilo +di ent +blu ff +chu ffed +pil ip +mon arch +e em +bu chan +b ick +op au +ku ps +ภ¢ +pist ons +sp ins +m and +ce st +bur ne +v ile +cher ries +bec kett +need les +pan ch +ë Ĥ +haha h +trou bles +insi sts +do you +g mc +mor tar +deleg ate +in n +g anda +sin atra +ठ¤ +spee ding +pu pil +pre mises +ali gnment +pi kach +as us +j alan +Ø µ +lime stone +fol kl +parme san +ce il +mo y +shawn mendes +ac up +hu st +ot es +med ina +ma di +gta v +censor ship +ar g +swe eney +sy kes +col o +foot steps +cann ed +adv ance +gta online +healthy living +ðŁį ¾ +a ig +p ality +oc s +he brew +im minent +berk shire +jeremi ah +out going +bak er +entr ata +ma ids +gro ves +bo c +a del +m fw +con science +arm ys +nut ella +conte stalert +novel ist +la h +ban ker +marque z +ðŁı ¡ +to ff +out age +gr p +ðŁĺŃðŁĺŃ ðŁĺŃðŁĺŃ +musc le +du dley +nvi dia +mi di +m uni +ess ays +dat ac +car ter +ภ£ +t ans +i ves +public ations +al er +ok wx +il u +cu tt +har p +out law +luther an +br ill +bo lic +do well +green land +be sties +path i +pay ton +gue st +har den +ðŁ¤ © +ann ed +evacu ation +po ised +mc der +b han +o i +envel ope +ci d +ca vi +ta pas +book review +grey hound +âĻ ª +fe ud +lun gs +for te +rai der +ff er +oni x +dep end +yn wa +rel ating +de vs +ðŁĴ IJ +acqui res +d ha +j yo +priv ati +can ine +k b +cra b +sar din +imag ining +k j +em por +down hill +ne z +ta eyeon +nick imin +gb p +à µ +w ap +sec co +ma shed +ðŁĴ¥ ðŁĴ¥ +augu stine +diss ol +dic tator +â ĵ +vi per +ed fringe +vau x +hard work +book let +no x +chi ff +ðŁĴ ¨ +observ ations +xbox one +u sher +ke er +lu p +dal las +cal gary +ma dra +di ous +k bs +wood ward +hero ine +lu mber +sea world +o ws +mc ke +maver ick +gu la +cross roads +fan g +s ade +nik ol +chee tah +me c +pp g +er ick +ðŁİ µ +tox ic +bj j +viol a +sp ire +ch ino +tra vis +institu tional +ha as +low ry +w ac +ea e +hu mid +mp ton +ru ck +je w +c ine +zim mer +se f +bhar at +fre es +aam ir +ðŁĴ ħ +z inc +wan e +multi player +royal wedding +e el +preci pit +qu ery +kimber ly +isa bel +ful fill +ig an +vau l +pan e +sc y +dig it +gun n +u tah +dog day +fi on +xia omi +da c +el ast +cha vez +ro blo +g ine +ten th +ab h +ke to +hur dle +na dia +memorab ilia +ha bs +qu an +h w +hv ac +pix ar +ec cle +kram er +accu ses +ðŁĴļ ðŁĴļ +per se +mean time +wa hl +atle tico +âĢ¢âĢ¢ âĢ¢âĢ¢ +ott oman +no vo +k us +conne cted +tru sts +d mv +spen cer +rahu lg +do ve +sto kes +bolog na +enthusi asts +à ª +rockstar games +ted cruz +du ras +s acked +late x +immer sive +cer t +lu cin +princi pals +fa res +sa ils +far n +am ent +saf fron +quent in +check point +fer ris +ex cur +ðŁijī ðŁı¼ +bai ley +se h +ter re +mad am +s band +wan derers +cumber batch +yy c +digit ally +blackandwhite photography +roll in +moroc can +ðŁĮ ħ +din ner +d well +to om +m ye +ez ra +cp fc +war hol +me er +jon ah +no aa +s gate +so on +secu lar +g ating +ti o +dri ver +si ssy +assan ge +ta th +ed mund +bobc ats +ra ji +po stage +stu ds +m gm +kat o +edin burgh +meet the +shir t +fa a +mens fashion +sp reads +wi m +car ts +phoe be +j ars +bot swana +Ù Ĥ +ed war +sk ar +ri ve +gu sty +c tv +ferdin and +su therland +nickimin aj +k v +si us +bee ch +re z +desi res +on ial +camp o +quar ry +lor raine +gil more +ig gy +µ ï¸ı +ho pping +avi z +ðŁĮ º +uni sex +dedic ate +att itudes +ste er +jun kie +rail way +y b +whi sper +key an +k us +ju g +di x +a ins +sum mon +ov ich +sy ed +her ald +ma ison +me ded +wild flower +main land +ri sky +ru kh +over looked +ki c +destro ys +nam an +ki p +z ano +champion sleague +ban dit +quin cy +smi le +cal vin +open ings +ta pp +ol ulu +spec tro +accred ited +ap k +pra ised +bar nett +pol len +premi ered +selen agomez +tou red +screen ings +uu u +mis o +en se +adam lambert +guel ph +har yana +hu tto +le ar +l tc +po ached +brex it +æ Ŀ +tt c +pa vement +mon gers +ro e +ad ers +ling ton +particip ant +ca red +ga il +y ates +lan tic +dash board +jo o +feli pe +ssi onist +bu m +s end +a eri +thu gs +luci fer +a he +dete ctor +fil ly +gas oline +ham per +hump day +the ta +the band +fore casts +o hhh +lo bb +hol l +cp u +az u +ad ar +hai ley +bu b +car t +quo ted +an archy +pan cre +twit art +al den +st ash +the less +or ni +belie bers +mor mon +partic le +avi ation +⬠Ĩ +webcam toy +sad dened +cru is +ham let +n ct +roll ins +marque e +saw yer +reli ance +a ura +di ec +soo thing +sig nings +ak is +à ³ +at kins +aer op +ðŁĮ ¿ +y ab +sh ari +con nol +du bbed +manufac ture +convin cing +feelthe bern +ra u +pu lit +on ec +gem stone +ur ging +bag u +ga h +aci ds +fi anc +zodi ac +sn oop +her rera +initi ated +ven ge +profess ors +pro di +stron ger +e mission +bb a +hal le +ta pp +haw an +wh im +compe ted +myr tle +ir port +cold play +ach e +ske p +m son +ss ic +calli graphy +swim mers +me y +pp c +thri ft +po c +re places +commu ter +âģ¦ âģ¦@ +go ers +lo gue +para dig +bas kets +sensiti vity +joh an +atl antis +& & +suit case +anxi ous +l h +str i +gal loway +stre ad +war den +gr ounded +ffici ency +li feat +reli c +disgu ise +island ers +f cofficial +classical music +b mc +en field +bi que +oak ley +bat man +sla ying +ner ves +mul tit +calci um +projec tor +scott sdale +ant ino +gri ps +kim mel +des mond +prote stors +hi atus +metaboli sm +conclu ded +press er +ti pping +sli de +e to +hun ting +aus open +ri k +pp ery +innov ators +pitch ers +ag ger +fun gi +z ad +proli fic +rockn roll +bl ames +ct ar +stam ford +q ad +mozz arella +insan ely +den ver +ph ouse +nom ad +ï ¿ +s ris +pro du +hen ley +pag an +am trak +ru bi +in cl +tu tor +sco tia +wo es +sing apo +fun nel +turn bull +know ledge +gri mm +real madrid +we are +missi les +con sol +emo jis +sne ak +smi ths +ru iz +br ou +i el +ha ver +ðŁĮ ļ +kin gof +basil ica +circul ation +prin ters +ta pping +ri dley +dra gged +ha j +writ er +fundament als +personal ities +me tre +stereo types +bur le +best of +n ffc +ha th +mini stries +a ali +trac ing +pav ed +ł ï¸ı +g ic +insp ire +tu g +ha re +repe ated +ex pon +lol li +rho de +pre cin +install ations +instag ram +az ar +i es +sole ly +du kes +mission ary +van guard +fursuit friday +on d +pol ari +ma st +har an +jos é +jack ed +ec oun +al ities +ne ph +ra vel +moder ated +sco w +s fb +uru guay +as o +ni g +au du +p ints +lat ina +ben z +m itting +char ted +mat ology +cit ro +biop ic +ðŁij Ń +djo kovic +fox y +agu il +so to +an ada +sin king +sc rap +hair s +bethan y +fact friday +ðŁIJ IJ +unlea shed +) ( +contra dic +ram on +coast line +y ong +sn sd +li gan +p ome +mit age +ge tt +wat i +ri sk +so aring +bru sh +f pl +av an +å Ĩ +lar son +sh ear +mul til +blu r +multi media +chun ky +par i +n ani +weir d +cholester ol +char les +dream ed +tan ning +puzz les +fr am +hand ball +ch ag +beli ze +al u +bang s +Ñ Ħ +detec tives +mc g +ish q +bo thered +saf c +mp ing +ten eri +g ays +sail or +an gi +mul ticul +gue ssed +ros é +high ways +bro om +chatt anoo +- ' +see ker +on ed +at f +lu c +> < +bar i +per cep +jewel ry +as ph +sor row +sl ing +mam moth +jac kie +ë § +wilt shire +sa o +can cell +im paired +tor ial +bre ed +guy en +jud ice +tit le +pro spective +applic ants +ðŁį Ĭ +epis cop +e id +b yo +stock ings +ðŁĴĥ ðŁĴĥ +ll p +sna g +keep it +l ough +ol son +matur ity +!! !" +cop ter +i sha +bl i +wil mington +tr youts +th ai +ðŁ¥ ³ +pe bble +kra ft +f p + º +ssi vely +li vin +contest ants +tex tures +jo an +h dr +film festival +prov ence +wi do +op end +c si +sto wn +cro ati +ad just +host ile +analy sts +il an +cu ppa +bru m +newfound land +good win +me tt +mall orca +plu gs +bu k +bb hutto +wrest le +sa ire +sho pped +for za +le head +vi vo +ba st +ro xy +reg is +hard working +hon olulu +desp air +young sters +ni g +impro mp +roll tide +de emed +tre ason +ru shed +for ged +ff f +pikach u +bri ggs +do it +ac cent +la us +gla ze +compet ent +a ho +photo g +mid field +le go +har vard +min orities +re illy +slic ed +once upon +initi ally +financi ally +landscape photography +har dro +qu o +mm ers +par kinson +smu gg +read iness +bru tally +glou cester +mp ed +bbhutto zardari +mur der +ye d +dat aviz +sr t +dow ning +bi ans +m ü +fle ck +fli pped +s ly +brilli ance +ri m +k um +bubb a +ko i +knit ted +sor g +ma is +ðŁĮ ² +ti ss +su stain +sen su +ak han +zi est +exam ines +chardon nay +user name +short list +re bs +on o +dar ing +hard wood +che que +righte ous +light ening +dir k +shra dd +du ra +down stairs +sh al +ami gos +ru ff +s law +ri es +red nation +man us +ðŁĩ§ ðŁĩ· +distin ction +u bun +dur an +mi gra +thi ans +la ver +domest ic +k x +jaz zy +justi fy +belong ing +insul ation +color stv +drun ken +chann eling +qu and +xi ii +enligh ten +kan o +fati ma +teen choice +terri fied +p ba +as ley +met museum +dun e +pack er +ki o +ðŁĴľ ðŁĴľ +bo iler +fas cism +ar mored +back grounds +in mates +embarra ssed +defin es +th d +we go +silic one +lo on +el ding +bor rowed +he mp +ak sh +kaw asaki +br y +de af +kill er +dispo sal +ðŁĩ ° +glaston bury +un covered +o xide +po ff +d ant +k j +ku ro +dri zzle +peop les +fe e +pro pri +dd lovato +pi ggy +ot is +aller gies +u bis +pengu in +ser a +vi z +prosp erous +ici des +tornad oes +sene gal +web cast +sto red +enchan ted +bb cone +bay area +entrepreneu rial +rednation rising +experim enting +ang an +lot to +they re +por e +er p +seren e +east wood +bro kers +bar ge +stal lion +timber lake +tailo red +dy stop +b ate +lat ors +di xit +bran son +dynam o +ky lie +shame ful +bt wn +spring time +mix ture +s ounded +lu ton +dad es +mal a +op ra +en ic +rahulg andhi +se wer +~~ ~~ +ky u +nor theastern +ca er +bc u +nir vana +kitch ens +ous y +al m +river dale +hid den +fl int +sp d +pat rons +katy perry +au gh +exhib itions +sm c +shu ts +at ore +da in +some thing +ber th +bo g +por ter +gen to +con cussion +ang lic +ro we +gr illing +scar lett +master ing +mor nin +comm ented +si me +si zing +christ y +ce os +st m +at ry +tari ffs +vac ation +pre judice +p su +paren tal +far age +can a +cap com +koso vo +you re +men stru +stal in +grape fruit +br an +che sa +dav en +exc el +!! ) +๠Į +distribu tor +ce a +bride sma +millenni al +wa in +ob serving +mis ery +plan etary +expo sing +bra ised +comp ton +don gha +q l +spring steen +th ul +syl ve +cab o +pal ad +niel sen +gaz ing +ba ja +r oud +orchi ds +johan nesburg +se man +d ji +oper ative +affe ction +eclec tic +at c +mut ant +aw x +nic e +mel bourne +indu lg +tu lip +dias pora +wel p +big gie +mississ auga +retri ever +or an +tam my +c ta +hipp o +seas oned +ger mans +eng v +marvell ous +im f +rela ys +mon tan +maur iti +me ister +as surance +reig ning +su fficient +han e +no thing +pos se +nav y +in love +brigh ton +en qu +ch ung +sweat y +es c +cal ed +man s +nicar agua +sl ices +mo cha +washington post +bb n +dam ned +grow ing +en burg +lo an +me s +wh oops +believ ers +spi el +vo daf +l at +s led +cricke ter +brown e +golf ers +bar ra +wat chers +lu igi +sw amy +mom s +pit ched +san tor +cr s +si re +sc amp +bo de +ste war +jon ny +ent ity +pac qui +mind ful +min india +bear ded +temp t +scorpi on +eat on +authori zed +ar to +s vp +op athy +cch ini +house music +disney world +âĢĶ @ +pro pose +di y +expen se +ten g +pupp ets +sm el +d aca +per ry +fin n +boo sting +lefto vers +cou gs +satell ites +man y +az e +g ong +fi e +metho do +fer ries +ðŁ¤Ķ ðŁ¤Ķ +explore rs +load er +attrac ted +il ton +godd amn +pi azza +doc tr +sav ing +paragra ph +visu alization +may ors +work flow +ack les +ðŁĺĤðŁĺĤðŁĺĤðŁĺĤ ðŁĺĤðŁĺĤðŁĺĤðŁĺĤ +ठ¸ +twer k +clu t +lo ver +te ases +si an +o te +deter ior +accor d +l fw +swar ovski +nat al +tra ps +k ina +analy ze +laye red +bever ages +un it +ran som +pe shaw +dest ined +astro logy +si pping +miley cyrus +cam ino +marshmal low +bli ss +out back +fa q +int oler +humil ity +po ppin +hallo ween +mon tene +op hy +nu n +tattoo ed +a as +ðŁĮ ³ +dale y +qual ity +du sa +fisher men +swi f +ter rac +st au +le in +trol ling +ship ment +garden er +march madness +head band +gr t +bur nett +w and +!!!! !!!!! +gh e +du x +hu d +war ner +ðŁĩ ¦ +ex ile +rescu e +rat a +d han +duc ati +dro wn +bl ends +spi e +alli gator +simul taneously +broo ke +u ke +k har +comm union +ri ka +ford fc +chin atown +you rown +me y +can al +syste matic +de pri +ox ford +an il +w ut +equ ation +be z +fle ur +the good +lang ley +ad ity +ed ith +al fie +о ÑĤ +en cry +br ill +ex emp +ce sar +mb ling +ab ri +sc icom +j ing +school ing +mi ka +mechan isms +impromp tu +rhe a +moo re +crime a +be sto +wri ght +el ders +ro ds +kam al +folkl ore +be et +mini on +reli eve +thr o +team usa +pas cal +made with +boli via +itt i +free bies +desi red +best selling +l iness +la den +ke ane +mi sts +hipp ie +atta chment +@ / +se w +flan agan +âĿĹ ï¸ı +supre mac +stl cards +si as +q u +rh ys +ste ep +val leys +v w +pav ing +disp at +al ison +por te +id u +new sc +soc ket +mo s +co star +re vo +prote ins +stanley cup +m cal +ear ring +se cs +mc lean +cap ric +nick elo +ad en +v c +shou se +adap tive +maxi mize +entertain er +pro se +gri ffi +six teen +lam ar +mi rage +saudi arabia +awe ather +ru st +in filtr +fashion week +ðŁĺĬðŁĺĬ ðŁĺĬ +selec tive +bubb le +a den +fen nel +deci sive +m ta +mock ing +mb les +st amp +mu le +bernar do +gr in +po tt +j ingle +vet tel +colom bian +cam o +motivation monday +ba han +p ly +dh ary +k ami +x men +sleep er +gar a +my sti +confi dential +conflic ts +p neu +ce s +insur tech +clean se +me rely +va is +tu x +the great +shar on +ma j +hol a +eco systems +aj ay +aa j +hu sh +har mon +backto school +wiki leaks +reflec ted +ðŁĺ ĵ +commemor ating +ac et +buck ingham +messi ah +tu ous +hor net +to be +d q +he ine +mi g +pl ate +nichol son +sp ie +cumber land +nor mal +pho bia +happy halloween +city fc +mc el +gilli an +ke to +lu de +de mise +su ga +str ate +mcgr ath +visit scotland +foo led +cb r +gc se +col ori +po td +missuni verse +fin ances +ma poli +for ks +Ø ´ +cann on +medic inal +ðŁĹ ĵ +kh o +wre ck +pan to +bag el +gu ll +syndic ate +ic y +pr c +ki en +zi ka +ti sh +pe ta +c co +li za +ch ut +ex traction +el g +gl i +fu eled +pos it +respec tively +leice ster +br ink +vulner ability +im ported +e sha +ðŁ¦ ħ +r ural +re ll +gam ing +atlan tic +aband on +no ah +re solved +pro state +aller gic +ps d +âĺ ¹ +dun geon +fang irl +illumin ated +m hs +white sox +d ently +ck o +endor se +over ly +dazz ling +prior iti +night life +ut il +be have +flam en +east bound +ðŁĴ Ł +ilove you +gov uk +mozam bique +alle gi +dr i +testim onial +ath s +ì§ Ģ +mm y +shab by +pro secco +friend ships +cal am +dam ages +off set +jura ssic +jun o +arre ll +ðŁĴ © +interven tions +dare devil +car ver +run away +ran e +truste es +ha ute +dep ths +ðŁİ Ń +me in +sacrific es +con cier +ne sting +i zzy +me tam +ilove my +ur ine +du lu +mal hotra +ve ins +night ly +co at +an di +he witt +lon el +ci ble +wr ite +jen nie +sant ac +ĸ ï¸ı +str ato +singapo re +sop rano +kri sten +cheer ful +flee twood +fa iri +m eli +wa st +tur nt +sfor sale +sc rolling +angel ina +ren dition +jeric ho +nick y +or b +fla vo +patri ot +ash eville +sick ness +re fund +aggre ssion +b pl +ãĥ ĥ +elu sive +thi story +hang er +bu ffs +vil las +at kinson +sp h +ja it +decl ined +wo k +supre macy +oo tball +ey ang +ðŁİ ĵ +s ford +ath i +consu me +road ster +e so +u pro +reci pe +au f +uc i +ar on +oo oh +cs go +re ich +mc d +min ute +ladi es +pun k +rut gers +mee k +ariz on +ta j +land lord +de gra +autu mn +lyn x +us f +b hi +fairy tale +dongha e +bet sy +explo ded +chen nai +op a +pro tag +br ant +ðŁĵ °: +g f +pal li +ðŁı¼ âĢįâĻĢï¸ı +su t +ill ini +colum nist +shir tless +de centr +sear ched +ec or +bu ggy +s ack +ðŁĺĤ ðŁĺŃ +de t +ther i +or naments +bring back +to v +quarter finals +ic he +con stra +gi er +buchan an +vi x +kay aking +mu stread +swal low +mel b +sc af +op al +may oral +har at +ðŁ¦ ĭ +schedu les +id f +ha gue +ro z +a ah +d mc +du plic +ca che +orph an +frac ture +rec on +ch av +bun nies +al ain +mustaf a +ðŁİ Ļ +vac ations +dynam ite +tex ted +broad caster +ðŁĴ £ +ste amed +rock er +di etary +luxury travel +inaugur ated +sa wards +vaugh n +lincoln shire +click ed +kra ja +f anc +remo ves +layo ffs +mc far +bre eds +win nie +jon ghyun +incen tive +vari ations +pat ton +atur day +persist ent +pr un +pi ers +dal es +æ ĸ +breast feeding +r ance +ta wa +Ĥ âĸ +mur doch +cap tive +thi stle +nic a +commod ity +cou ldnt +board walk +graci ous +practiti oners +n gc +scru m +ner o +camoufla ge +col on +he i +phys icist +saturday morning +ten er +si won +colum ns +bru ne +y vr +ba ir +reti res +hal am +cab er +shaz am +min u +cas cade +milk shake +gri d +d ren +vin cent +so dium +plat ter +cheer leader +chen ko +y ak +elimin ated +ty po +y man +re think +âĿ Ĺ +ts ville +bernardo kath +ex tr +ðŁĺģ ðŁĺģðŁĺģ +ta o +re per +mo ths +em powered +c iting +transpor ted +mon ks +san at +cle ars +bachelore tte +camp bell +racha el +har le +hand ler +climb s +inter ference +rele ase +sh and +r bs +hr h +ãģ ª +val le +r é +sli me +w akes +chu bby +slo an +el ves +ath en +attor neys +micro scope +ston er +sc aling +o be +c out +se man +mid week +bal sam +ðŁĺį âĿ¤ +ti ful +v ish +lo tta +ri pping +re mn +ti re +le ap +ha vent +la by +hi mach +whisp ers +we in +ðŁİ ¸ +wild flowers +se le +u cc +li ability +az ine +sw ings +k ya +ta ir +re main +e do +flo ps +poc ket +grand ad +exam iner +gr is +ffe ct +ðŁijĬ ðŁı» +stud ded +heart beat +de acon +firm ly +infec tious +ste f +out lines +le asing +cla ws +sen se +tab s +hoo t +mo sul +spa wn +co a +hog warts +ve in +alban ia +manu el +b ino +vaux hall +scot land +go bucks +mat ty +phy sio +tor ino +const able +investig ated +s lower +mistak en +bay er +wild fires +vo ic +x on +time to +chas sis +bar ric +pi on +bald head +woo k +regi str +dra fts +b hs +li gue +l ick +staf fordshire +baf ta +dar ry +je anne +ven ding +cor p +⼠³ï¸ı +kid dos +fen way +ca o +west bound +ðŁĺ Ļ +dv r +quick er +bla h +goo die +ðŁĴĭ ðŁĴĭ +vo x +esp er +fac ade +cor relation +red bull +rou p +decl ining +chi ve +mc gee +tur o +in der +f eller +fu g +il ysm +mar di +peshaw ar +ki eran +ine ma +meat balls +pe ck +depre ssing +sen sing +gi z +dd ington +spring watch +ro aming +yellow stone +horse shoe +am man +week day +ol or +ðŁ¥ ° +boo sts +spr int +scar ves +je e +bee tro +cl an +all the +ìĦ ¸ë +enlighten ment +ado be +re generation +? @ +cont ag +yach ts +to u +mor a +en voy +r ani +go li +dhanush kraja +wood working +streng ths +se di +disc s +ar ina +sc on +lit e +ano ther +ðŁ¥ Ĭ +ye men +gu ern +sav vy +lo yed +biom ed +heart break +comra des +milli e +pat ch +un f +jar vis +bl aming +commemor ation +ge y +å ¥ +cardio vascular +alig ned +docu ment +. ? +aesthe tics +em u +the irs +le h +ps ic +si f +pl ateau +ex pend +domin ating +rob es +mauriti us +excep tionally +hom er +discover ies +bra un +ten nant +insul in +ðŁİ ® +car bs +te as +? !" +zi e +franco is +brow sing +th ol +cla rence +hel per +ob tained +cas sie +le es +! , +pome gran +hu bs +presti ge +] [ +mach er +bott led +pun ch +pi pe +o ch +gall ons +deliver ies +u ra +un day +mon de +depic ts +re gency +outra geous +khal ed +car o +he arti +za g +develop mental +over coming +stati stical +flavo red +for ds +cre atives +lau rence +di as +sun screen +in ked +pre acher +n ul +impac ting +auti stic +âļ Ķï¸ı +o ss +pel icans +cele ste +v b +ru mp +mc gra +fair fax +hu mor +bbc news +row ling +cal der +seam less +ag ne +p ti +mix ed +t shirts +mer ci +b tob +women instem +genealo gy +pre ven +l our +cra dle +gi use +Ð ¾ +chron o +fair ness +chocol ate +tor y +as da +pre scott +stret ched +al man +u il +re charge +in tre +ob st +hosp ital +hay ward +teneri fe +fried man +vap ing +confe ssions +ye ah +bal li +luck now +cor pse +sculp tor +amp ton +t pp +indic ates +sur plus +tru man +ðĿ Ļ +sin ha +in vo +sovere ign +ke v +establi shing +engra ved +assu ming +ðŁı ģ +sou za +fab i +ton ed +oun ge +del oit +dow ney +no ble +om or +car tridge +ðŁı IJ +u hur +hol loway +succe sses +r sa +âĦ ¢ +ma zz +tw d +disc ourse +. < +y at +satis fy +com pri +ठ¹ +graph ite +disser tation +ar ter +í Ķ +b ally +zom bi +ly ons +a ic +u bc +pra da +e il +da x +cla i +grand daughter +extravag anza +chall enge +ðŁ¤ ŀ +po ver +primar ily +dad dy +man a +bi kers +inqui ries +da un +fel ine +gener ative +he f +benef iting +lind sey +pol ka +demonstr ated +al le +rand y +o su +low key +weir dest +red bull +our y +n ous +wood stock +cre denti +nic er +g ado +aly ss +ap h +prepa redness +station ary +incorpor ated +dy er +sarato ga +cele sti +: " +antibio tics +or gs +inde fin +ap ron +и Ð +fif teen +no f +ðŁĶ Ŀ +ph x +te ga +m z +organiz ational +on air +band ung +pleas ures +mor i +secre tari +rac coon +ca shi +pil ates +k on +geof frey +la o +kam p +depart ments +back packing +an am +à « +crack down +aun ty +on do +li zzie +ph ers +cu n +ðŁĩ ± +k pop +pu t +inten tional +connol ly +bar clays +hs fb +swin don +u ku +s ally +a int +âľ ħ +pen ang +up lifting +epile psy +inter ro +bun gal +go ku +blue berries +ठ¦ +u ssia +sil ky +mou red +i stic +bri efs +me ats +go b +ch aser +state wide +pra sad +gl itch +ar in +ban ff +memb er +ðŁĺŃ âĿ¤ï¸ı +lo ving +hall a +ภ¡ +smo kers +yak u +scicom m +physi o +sw ol +lem ons +gel ato +ch ool +capit als +ki stan +ti ghts +spi kes +trav ellers +ik lan +commissi oning +ar ine +emabiggest fans +empha sis +front line +pad dock +destruc tive +ba ha +l inger +je wish +shet land +mc gin +mon key +ko z +s one +raj ini +te h +y en +c vs +masqu er +gir ly +we sle +was nt +bro dy +termin ator +gil le +mag gi +bir die +jeopar dy +cu bic +vm ware +intric ate +an up +to pia +east on +sab res +investig ates +bu sting +bil ingual +valent ino +in format +fer re +advent ur +hydr ate +for sy +az iz +san to +e de +whist ler +continu ously +d ham +un used +ji had +addic tive +vi dy +do b +i do +fi ed +ni versary +n one +fu er +ðŁĺį ðŁĺĺ +coven ant +prin table +immac ulate +o em +cl t +serv ants +consu med +un released +sc um +pack aged +me re +ìĦ¸ë ¸ +to by +ta f +spo ons +me al +f ball +fair field +jan et +silver stone +dart mouth +follow me +voy ager +kom bat +anni ver +ene w +mag dal +ho ve +sa th +grizz ly +car di +gart ner +sand y +kan ye +post ure +po ign +im pulse +radio logy +horiz ons +si am +aish war += => +no che +tr is +el yn +com me +du i +ce c +councill ors +cudd ling +creep ing +loc ke +manag es +trans ferred +ne cks +di er +dan o +v ick +lun ches +d he +en sures +cri ss +ul ster +bann on +cont enders +sp am +sweet ness +med al +hon duras +arc tic +ultra sound +in fr +disco vers +ei ffel +ca sters +ru ben +du st +awe ed +atri um +lest we +se ared +ðŁĵº : +ty ne +ex changes +little mix +l le +astron auts +hersh ey +work day +kno b +so v +re signs +today show +der man +an th +af c +ta ster +sw oo +sa eed +per ing +narrow ly +rn li +best buy +panas onic +obst acle +farmer s +ðŁİ Ļ +pa wan +ki est +ang ers +absur d +oh my +sin o +pist achi +sp ice +giu li +prime time +ko w +k ens +ex agger +! ?! +u ba +midd les +ju dd +e jec +slam med +pen sions +of a +re create +b hp +xx l +liver pool +thre sh +pur ity +ni eu +hol ics +wr ath +ra do +gli o +am ma +dile mma +cr u +lets go +.... @ +âĿ ĵ +sugge sting +tru mps +hor us +f v +ic om +refer ring +predic tive +tar ts +ge tte +so ck +glo ssy +pin ky +al ec +thy me +ou ra +thero ad +pe tr +cr am +p fi +dv n +me ier +incen tives +tun nels +mobi l +rec ap +extra s +upri ght +rev amp +per severance +, - +ot p +mir ror +ar wx +ger ry +ma her +g or +hom epage +am is +ag ra +made le +best friend +sirius xm +bun dles +admir ing +t dsb +ðŁį ģ +ch as +slow ing +ro h +wall papers +â̦ / +tek ken +gang s +tal a +lind say +shou l +line backer +tool kit +ur anium +caly p +ab rams +mat thi +ðŁı ¿ +hon ourable +da yo +ver sail +tan k +st c +fr itz +spl end +pat ag +anno yed +on day +devast ated +chattanoo ga +national ism +mas sey +jen n +tail or +dev gn +org ans +zu cchini +on fox +sat ire +wex ford +dis grace +no to +vol ta +âĿ¤ï¸ıâĿ¤ï¸ı âĿ¤ï¸ıâĿ¤ï¸ı +à ¶ +home owners +poin ter +m cr +au sten +day sto +mo ons +pal ma +gra zing +e so +influen cers +shahid kapoor +compli ant +measure ments +develop s +y d +par l +p vt +rand olph +tor tured +ger ald +eli as +deepi kap +war mup +hick ory +g ap +co ffin +am our +re neg +moun ting +seven s +ig le +hi er +dec ad +tri ght +esc apes +wer ner +t fl +ful filled +ni ger +sour dough +re aper +choo ses +spin ner +week nd +fil tered +sh uk +kat i +old ham +open source +kh anna +at elier +conne c +opho bic +gla s +complic ations +ar son +counc ils +sm ol +as sy +lur king +ling ui +han ks +e in +Ù ħ +ru gs +n guyen +nou veau +men ace +le v +alad din +ru ining +round about +k m +con or +shoo ps +may day +traum atic +prab has +ka iser +k ita +rou ter +pe dro +re tar +stun ner +spani sh +distur bed +acade my +e learning +wit ty +sen g +fer al +av y +sta b +ke aton +ur du +ko to +hu i +coo ke +ari an +the personal +u ma +se ap +a sting +rhetor ic +hand writing +munici pality +consor tium +ðŁIJ Ł +glasgo w +ra ya +eli za +polym er +bro th +prac ti +correspon dent +addic ts +gay le +ail ing +o fe +p li +hear tw +st itch +sight ings +prie sts +sam o +slo th +good wood +roc co +sab c +summ it +l ace +pres ley +itt en +cin cy +thepersonal network +s week +pe gas +af con +regi stry +ci m +le th +dic ap +cand ice +flu ent +sm ack +pede stri +al oud +car ac +priyan kach +p gh +ir ons +dol ce +lat via +dece ased +thero ck +cla p +cen e +fo am +morris sey +gre t +essenti ally +com cast +be agle +argu es +ing ed +- â̦ +sa g +ha san +ðŁĻ Ĩ +ðŁį ° +nh ra +kann ada +indic ators +on er +bri xton +at as +screen play +sor ority +sha heed +he em +class mates +tain ment +es i +breast cancer +zucker berg +aur or +en cia +ref ers +kae per +vor tex +com part +lym ph +photograph ing +ste ff +rest ling +par sley +mom ento +th man +lac king +du tt +ocu lus +fin o +fren zy +ra sc +der n +dis missed +noo k +met gala +sh ill +rapha el +maver icks +exhib its +eag erly +c pa +amen ities +. âłĢ +exo dus +ern st +lit a +deal t +womens march +i ain +score board +campe ones +c en +ti ki +garri son +fidel ity +bra g +road map +psy chop +lo e +ble u +ðŁijĬ ðŁı¼ +sau vi +spr inger +temp tation +ru dolph +ac ura +wic z +parach ute +stro l +len ny +zi k +dom s +nb af +al pac +vivi an +ro ve +pre et +perpe tu +sna ke +air soft +infl atable +prin ces +ati e +ffe y +pati ent +m ire +chel le +sl ack +groo vy +# : +up loading +!!!!!!!! !!!!!!!! +siem ens +provi sion +v fx +need y +f ats +to poli +bhu tto +sa thletics +alu ms +t winning +south western +adop ting +last night +man ne +la ga +tw ell +ac ia +-- -- +eye wear +hur ley +fle e +sa ch +pe cker +cost ly +is k +cr ates +polic y +ero sion +in go +wer k +ðŁIJ į +torto ise +therap ies +inter net +chihuahu a +ri ps +fre i +ed or +tai ji +t fc +do d +demp sey +christ in +chen g +hi ps +gra eme +com passionate +cavali ers +histor ic +soul ful +crimin al +ja c +vin ci +expi red +sur at +turi smo +k ona +se aweed +ber ts +le ica +expre ssing +a al +wor t +break fast +her ring +am used +rhu barb +mar tian +cospla yer +y ash +stri al +ra ul +refer ral +dw ts +j w +ad ler +cur tains +gu r +val ence +tyr one +sw fc +coach ed +re born +diabe tic +cho ke +nor folk +investig ative +ðŁĴ¯ ðŁĴ¯ +z id +v mas +phi e +objec tives +âľ ĭ +over due +di vers +mat su +ðŁİŁ ï¸ı +casu alties +ภ§ +al k +stand ardi +re alist +arti facts +pand or +ke x +in vin +( !) +ine y +par aly +mr t +fay e +the voice +on ga +de ed +skin ner +az wx +speci men +priyankach opra +nu evo +bar kley +toulou se +resu mes +football ers +cit i +fe tch +è re +lestwe forget +ðŁĻ ĭ +ch unk +dri fting +manipul ation +equ als +pu tt +ky ungsoo +âĿ¤ï¸ı # +ela stic +par ano +fo y +do ping +cin cy +ss ler +interrup ted +al ay +ado res +ame thy +con voy +ãĢ ı +Ĭ ãģ +black list +gener als +sa chin +bru shed +oun ces +non stop +illi ams +bt sarmy +u av +ru ff +bur ma +bi k +defen ce +schul tz +bo asts +lonel iness +go re +trans forms +alum na +@ @ +ra ppers +ne hru +car o +himalay an +wearab les +ge h +pepper mint +re development +flam ingo +cos by +big baldhead +ag ri +bare foot +sco pes +re gram +gh ana +ðŁİ « +i heart +sa die +carri e +microbi al +ku ala +sk ater +quer que +âĻ © +gen res +reas oning +ch ased +as o +sli pped +en can +vam os +ker s +ad verse +mo il +commod ities +with you +sil ent +hy pe +an de +am ination +whi spe +lit z +âļ½ï¸ı âļ½ï¸ı +ri ff +pp y +lam bs +gan esh +ab sent +regu lator +marse ille +en roll +par cel +wa p +by rd +ðŁĩ Ń +tu ber +country music +par l +contro llers +responsi bilities +we y +ch ate +montene gro +chic o +mil an +l ms +tra inees +appropri ately +un certain +popp ies +ed sheeran +nutr itious +gar o +deut sch +awe some +ãĥ ¼ +comfor tably +land marks +et i +re usable +daniel le +ro sal +co les +just ic +c cs +f anny +ni m +mc u +clin ch +at ene +mer ge +im db +ang lo +uc cino +pan ini +an not +bur berry +feat ure +predic ting +fashioni sta +s ask +imag inary +mm o +south sudan +spe ar +hu bble +jo inthe +coyo tes +sli go +ko dak +sit com +polaro id +roo ted +corru p +ðŁĻĮ ðŁĻĮ +bris ban +at z +ah l +re my +tal ent +aval on +ra da +pau line +locom otive +go ons +ne mo +maser ati +ic u +stu tt +histor ically +sm b +pres by +avo id +so oners +rhine stone +w ad +ri sing +tro t +mo des +reg ent +optimi ze +re ece +sm u +ver ti +newyork city +cor tez +ra c +in case +sin c +fiel ding +e tta +tiff any +al monds +sad dle +k rat +mat ter +g low +star ving +gl o +cra ppy +sl ur +st d +monit ors +recei pt +maymay entrata +mc il +un is +rain bows +cal dwell +pacqui ao +j op +a fe +hoo k +es sen +wiz ard +medi an +fla ws +com s +âĿ Ħ +ing h +ha ynes +anton io +tem plates +ou ter +na w +cardi gan +bel grade +ðŁĴ ī +hom o +a ise +ro pes +no ve +what you +tri gge +concep tion +ad ukone +na di +fri ars +sw er +adju sted +hot line +san ity +kau r +down loading +c gi +ten or +eth nic +app alach +ภ¸ +pa g +gol ds +on set +investig ator +car tel +peace fully +jarre tt +cat alan +poli o +n um +fru stration +dhar ma +my life +âľĮ ðŁı» +aber deen +mu sa +bin der +spark ly +fle eing +instin ct +co ping +domin ance +ill ers +er a +u conn +lo oms +living ston +gal i +he s +c ma +bel a +se ley +mon k +la ch +mar x + ´ +m erica +woman in +es sex +ra ina +jim i +nep tune +z ack +chine se +mart ins +chand elier +her n +with us +ear l +asph alt +modu les +st p +ul la +psychi atric +mile age +captiv ating +si der +men to +mor t +tran ce +tal bot +ab by +ì ĥ +âľĮ ðŁı¼ +j ak +daw n +turn up +scre wed +fe ds +blue print +ðŁĴĸ ðŁĴĸ +har sh +er os +insom nia +ban kers +ta emin +mis conduct +hu mber +gi di +edu ardo +con a +musc ular +consu ming +ra sh +don nie +di pped +col lie +samu el +melt down +ðŁĺįðŁĺį ðŁĺį +me z +exam ining +schwar tz +pri stine +ðŁIJ Ŀ +ve it +ful filling +an esthe +gue sses +dra ft +som me +soli d +pati onal +ho ped +evolu tionary +all er +enter tained +sli ps +lud wig +conclu des +sen sible +bon net +cra ze +tra s +haz ards +const antine +ed ics +star trek +to c +occu pational +in cheon +deepikap adukone +pizz as +new comer +de part +oppre ssion +ebon y +foss ils +tro jan +el en +ste aks +k hou +positi oning +ug by +red cross +ak h +dol ce +us mnt +pp en +dil ig +ma vs +call er +cost ello +⼠Ħ +dy n +thing s +rhin os +a xi +sar kar +con vocation +att ers +ss ss +fun gus +eu gen +russ o +squ at +w sb +eli on +william sburg +s off +defici ency +be arer +o kin +key stone +t wain +cal ming +break able +wa res +horser acing +com bs +bun ting +u it +t land +ðŁĴĻðŁĴĻ ðŁĴĻ +ga stron +sab ot +ick ers +commissi oners +sen ate +ii ot +ath ena +nit rogen +an tony +ero tic +di alo +mis sou +hypo cr +âľ Ī +kaeper nick +can v +d roo +clevel and +o sh +mon sta +stefan o +^ ) +sh ul +po ison +ha e +commerci als +ma ul +nit ro +co worker +alo e +vap or +t ents +russi an +qu id +question able +mid get +po ker +girl friends +sin the +erit rea +ten ure +depos its +buc keyes +spot ter +theod ore +trin ity +joaqu in +u cci +follow the +caf c +mp a +ðŁIJ » +plo tting +dom ino +ta ek +sion ally +dicap rio +pa p +car mel +ig er +bt cc +beth le +www bigbaldhead +foo die +bagh dad +mason ry +off ended +à · +ภģ +sc ro +vers es +ori ent +ar ches +pi yu +know your +gre e +ta kers +gu ard +dish on +bucket list +bha fc +war dly +ðŁİīðŁİ Ĭ +leigh ton +pe w +stra y +assaul ted +in hal +ly fe +amar keting +l x +kat z +ubun tu +me o +carto onist +turno ver +mi z +dis like +mul len +mo f +bl and +hi des +emer ges +chori zo +truste e +ma hog +lan sing +paralym pic +fa int +fa una +ch al +sn ar +cat h +bent on +cast illo +sli ppery +apric ot +oec d +bar o +l z +he ming +clow ns +co workers +peru vian +commu ters +y ell +ðŁļ ´ +under ing +v j +tt p +fli pk +w ana +soc ent +Ĥâĸ Ĥâĸ +ठĤ +oo sa +jag ger +di sm +e less +d ham +cali f +a official +ec lip +harro gate +gra pp +com rade +n tr +concentr ate +thi ghs +bit coin +bel arus +ë ĵ +end uring +now watching +industri al +pi p +ar on +ar at + ® +whit by +oooo ooo +sa ree +tic als +mis leading +yo on +year s +sle igh +roman ian +sciss ors +vam pires +ac up +ab ba +th weeksary +cent ri +fl ye +u o +c bi +bu ena +sin d +mar ino +bur r +re building +ठ² +anniver saire +ac ca +ðŁĴĢ ðŁĴĢ +gett ing +tu lips +wolf pack +âľį ï¸ı +more than +ta kin +ð٤ĺ ðŁı» +u be +mon ic +dou bts +mo wer +co balt +don ne +specul ation +argu ably +kak u +htt ps +prosecu tion +din ah +stam atic +disclo sed +bever ly +fl wx +cra bs +extraordin aire +war mest +imper i +o logists +trac es +par c +lake side +am r +ter i +hour ly +domin ation +ar row +shrews bury +ance stry +wr angler +trigge red +pen sac +roo ster +survi ves +a on +bo ko +val or +love is +la g +pe y +fo cal +out laws +bl anc +artic ho +wit s +marsh all +die go +support small +u ca +sa h +je et +syn ago +gover ning +ðŁĴ ¬ +sal ads +cre ate +miri am +cen sored +ami de +no u +z eta +allegi ance +* ) +bl m +ric an +pa stors +oly mpus +blo c +whir l +star ry +pr one +y k +p ne +congratul ating +be v +so ber +love island +sa ir +an ing +tutor ials +q e +lun d +in ist +cle ver +taxpay er +ali z +wren ch +dd ling +cap ri +h pa +ðŁı» âĢįâĻĤï¸ı +na j +o j +futuri stic +jelly fish +ðŁĶ¥ðŁĶ¥ ðŁĶ¥ðŁĶ¥ +cel ery +plan k +fil a +ne me +un healthy +lec tions +ðŁ§ ¡ +rit chie +n ws +mi kha +wonder woman +âĢ İ +hip stamatic +ka g +ðŁĴľðŁĴľ ðŁĴľ +poul try +mo w +wor ds +lo ff +ðŁ¤£ ðŁ¤£ +relat able +re mixes +keny atta +ke m +re signed +fo d +stra igh +j lo +hu tch +box ers +colle en +mag s +instruc tional +ko l +attrac ts +pra g +account ant +go ggles +br u +th ole +mar row +leu ke +oc to +pon ds +bubb ly +he ist +ìĹ ij +im p +a har +ha unt +hall mark +psy ch +kkkk kkkk +col umb +jump suit +cost co +si delines +ag gies +over turned +ni b +key chain +fu k +f af +mi am +assist ants +cy cled +ri der +dam mit +red wings +mag es +kin s +ì Ĥ +ho d +son t +carol ine +" ' +cu le +bra id +fel ony +ar ities +ruther ford +depic tion +isab elle +ro ach +k day +fifth harmony +em y +li gam +bari sta +albu querque +gro ss +ðŁį º +oo ks +ðŁij ¼ +dun can +try in +jag s +g ould +li tho +âģ £ +а Ð +sam my +tun g +cas ser +apo lo +aaaa a +man g +as ics +sh en +p ye +tur bul +ss p +saint sfc +on lin +n anny +he ster +do z +à¸ Ķ +th read +ren ts +kh and +ðŁĴª ðŁı½ +un conditional +rob son +car re +ph on +sacrific ed + £ +auto s +par ker +oc a +log in +kee gan +hard cover +dough nuts +ðŁĮ İ +spit fire +refresh ments +saskat oon +commod ore +j f +rub ber +halam adrid +child care +stra da +io m +ri k +dak ar +ther mom +cro pped +gar u +ali k +ven i +i ft +si ka +ritu als +z ul +e ch + © +su dan +l land +i me +do cker +ì ¤ +fe ared +fa o +wal ter +no g +mutu als +l h +ali gn +mon ia +concep tart +ðŁĻı ðŁı¼ +sco e +compet ence +sw ine +ly me +laun ch +green er +abstract art +inqu is +gran ada +ga elic +flu ff +d backs +grave yard +ba be +acade mic +adventur ous +joh ann +~ ! +bi bi +| # +pl ings +gett y +as b +âĿ¤ï¸ı @ +staf f +religi ons +bang or +world bookday +me gh +de vin +ash ore +meri dian +gi thub +qui z +all stars +be stest +ir resi +ack er +do te +war rington +pol ly +newor leans +cr ou +wi gs +che y +smithson ian +la sag +de tour +bor is +stra ps +mari ah +inten tionally +ko h +ðŁį ¸ +ssi an +mar issa +cor al +episcop al +casu alty +tom o +supply chain +sam p +on go +ro o +cavi ar +p fw +clau dio +buff alo +s ations +mat ty +snap back +l ds +al arms +mat te +âĺ Ķï¸ı +conditi oner +d ors +he x +fi zz +a stri +sus sex +secur ity +qa eda +all star +cocac ola +as one +cl icks +sc ans +mu te +he avier +ðŁİ § +âĺ ŀ +lv l +book boost +youtu be +fla shes +f jor +c su +explo de +do dge +cair n +gonz ales +th ill +pel le +hart ley +renew able +re tin +e stre +costar ica +shipy ard +nc fc +pri ya +a ghan +an ath +plu gin +co rey +re bound +or u +kat rin +hor mone +gi m +mahin dra +s sus +park land +har per +fanta stic +infer no +ep ilo +wrest ling +fe ct +c it +ac oun +to ssed +monu mental +char tered +bu st +pe tra +âĮ ļ +wildflower hour +sweat ers +* . +bl er +ate ch +go wan +demo graphic +bra l +suici de +renov ations +vu el +sin ister +ar mani +miso gy +ph arrell +nap s +un iting +crusad ers +cor gi +insu red +than i +no or +g q +d ada +bicy cles +snu ggle +sch an +ten berg +ss al +fe mme +bo il +½ ï¸ı +re ap +occur ring +hus sein +divi d +sto ke +sh alom +na ia +o lic +frustr ating +Ù ĩ +ig s +gro ver +scen arios +n ds +bru tality +med alli +bu on +sas s +skate boarding +ony x +lor ry +ny u +gau tam +mm ings +gu g +end i +lo thian +comm ando +chal k +ph ora +asse ssing +ti gh +crun chy +ad ay +is l +ci ara +pilgri ms +kam al +p to +brit anni +t ani +sm c +l ure +app store +ab y +golf ing +cl c +fa u +an as +shu tting +regul ated +carn age +scow boys +all enge +c ma +humbold t +rel le +ku mb +her i +refin ery +sound check +d wayne +bos nia +i sp +the alth +anni v +relev ance +my a +bag gage +dre ad +s bc +th ed +bu h +hi jab +lo id +ke w +c te +respec t +lovel ies +cu bes +celebr ate +dir t +sav ers +_ , +gar ment +pulit zer +mas jid +beat port +al arts +encry ption +s ner +ple ads +found ry +sym metry +ru mi +birth place +scallo ps +supp le +pivo tal +t ati +no de +so d +pro xim +tr ics +col dest +bren t +mand u +cla ir +e ach +and alu +hi ddleston +ðŁIJ º +mel ts +v ance +pin n +se ments +scre ened +sa chs +o bl +ic ha +âĺĺ ï¸ı +school ers +heal ed +lo gged +ð٤ĺ ðŁı¼ +ic us +bore dom +b ish +b ffs +tal king +sure sh +hoo kem +de on +de fl +ei leen +ðŁį ķ +women intech +ri sotto +rang er +adverti se +ภģภ+tel ly +la go +dart moor +d ong +sk ates +lo go +un ner +mail box +ma sala +lo oooo +amethy st +che wing +c bb +australi ans +rc mp +game art +# ... +kor n +extre mism +fruit ful +anci ent +pu bg +pol ite +wh it +mur als +m gr +line man +dav ao +ste ms +ten nis +av age +tu pac +gigan tic +hs bc +auto biography +up the +ี à¹Ī +re gal +fig uring +ku l +mis sy +hoo p +gra s +for ums +back lash +abduc ted +p nw +min ic +bu tt +bott oms +at on +ven g +ðŁĮ ı +del aney +prab hu +fan club +over haul +health ye +sy no +aa f +ren amed +kim i +un cle +man city +se u +qu anti +este em +um in +en zo +mel vin +under go +j har +far ah +coast ers +humph rey +mh z +children s +^ . +d hi +disrup tive +integr ating +r nb +over sized +a ide +ne au +docu mentation +ðŁijĢ ðŁijĢ +pal o +hear th +ri yad +pun ctu +abc news +secu res +boy band +bir ch +ju co +tra ff +legislat ors +bay a +ãĤ ¯ +no ises +collec ts +s warm +k ner +bi shops +stur geon +snapp ing +mo l +fre aky +chair person +tro p +lyn ch +car cin +art sy +e sto +cha i +fl ur +inv ali +sau sages +im el +j or +fun fact +wit ter +puni shed +ac ons +h ya +re versi +em c +dif fu +z x +sp aw +cla d +d mit +hol land +fre sco +pay roll +ab undant +stu ffing +mor o +c ny +boy cott +wend y +ele ven +pro voc +pil ot +tr x +be ad +climate action +ri on +assi e +ì ĸ +o sm +islam ic +ho ar +good reads +al ici +afterno ons +spoke sman +jo lie +it as +masc ara +âĻ© âĻ« +pre vail +beetro ot +lu jah +k li +dod ger + » +ru le +l n +scre am +ho bart +col bert +r tc +er m +pat ro +quo ting +s live +que st +non fiction +semin ary +prosecu tors +ve st +express way +g ge +nau tical +et f +ðŁİīðŁİ Ĭ +dur ation +cha ired +the film +fab io +she h +can o +ðŁĴª ðŁı» +with draw +! :) +cor pus +phen om +yel p +la wn +ent om +snapp er +but te +pin ball +pro xy +libr e +alle vi +n ada +gabri el +fo wl +eure ka +daph ne +tu nes +pun ched +wh ore +jo g +ren tial +man ners +o pe +wh ufc +gu th +revol t +sne aker +philharmon ic +ho ste +sovereign ty +ðŁĻıðŁĻı ðŁĻı +fish ing +sci art +fe ta +i pp +dump ing +kel own +gir i +dig its +sal u +san jay +twee ters +sp as +col chester +sc ab +ma dd +๠Ħภ+Ä ĩ +ged don +march for +do p +maure en +un plugged +di do +fashion blogger +up a +mex ic +tar y +pol ye +jame son +v t +grin der +mad dy +consult ancy +¬ ë +leagueof legends +ac cents +um ni +jane iro +tu ss +h ens +ampli fier +to shi +pret tier +pre vents +new town +red wood +vant age +ball ard +ar tof +a she +a sion +lac ey +ap at +gro ve +ภĦ +rw and +real tors +tra itor +bed ding +ö r +zi on +fla shing +cam pan +boom er +secretari at +ab ol +liti gation +cont amination +se dly +shred ded +in for +do herty +bench mark +ro che +skate board +sho vel +i zz +to pper +o ster +laby rin +autu m +k ong +hum mus +vi z +tech news +kla us +am using +socialmedi amarketing +i des +cast ell +ste e +underestim ate +cal ab +pa ign +b illing +unanim ously +g mb +fly fishing +hath away +commerci al +colour ing +skul ls +pivo t +te p +tb c +motor way +x press +construc tive +pu k +under lying +kir sten +mani ac +cha o +se ma +chiff on +ðŁijĮ ðŁı» +ver ona +kom o +stan doff +wi ped +c ated +bla ir +wor kin +m sc +bethle hem +swi pe +unexpe c +pe es +pe tri +orig ami +ðŁij ħ +mex ico +flav or +ru dd +cannab is +mar u +ri ddle +wor shi +sil on +sch at +ap se +tang er +bi ous +e er +questi oned +o zar +dan k +angle sey +char an +bak u +compe ten +re pri +bat ter +sa xon +cal ves +leng ths +$ $$ +âŀ ¡ï¸ı +immer sion +ga unt +car ry +cy to +b anda +shu tt +experi ence +el gin +mous se +ta z +ê µ +in correct +en z +b ham +mor on +so ver +ar un +ti pped +la ble +de arly +bau tista +í Ļ +mor tal +woo p +dt la +sho cks +dav os +ðŁĵ Ŀ +swim wear +her man +ðŁijĩ ðŁijĩ +z ir +neglec ted +grac ed +campu ses +av s +ar ora +swach hb +live pd +ac cra +enqui ries +shoo ters +kur t +vancou ver +brad ley +gar da +g ü +ol la +attrac ting +up ton +ne win +lu mia +furn ace +ev ers +e on +sw a +roo kies +a oc +v ss +bris ket +tor ch +yo da +heart land +tac o +ph ony +food bank +ab bey +bab ylon +u y +gre ate +expre sses +d andy +sc apes +survi vor +ron d +e ci +ha vin +ab el +chil dish +tor que +wav y +ur self +kanye west +year of +ale stine +o brien +al fon +sk ag +kore an +anchor age +val eri +de w +ðŁİ ¨ +land slide +car ole +christ en +go phers +af i +priyan ka +q q +power of +it te +pc so +tw ol +pr y +intellec tu +guer rero +pi les +wish list +w ren +time table +ë ı +prodi gy +gibb ons +. / +ne ur +anz ac +mur ray +vie st +pla ster +la ir +art gallery +inter continental +g br +bell ator +nam joon +mam mals +am el +y aw +saras ota +cam ar +bud ding +sum mari +aco sta +la sh +ey ou +post graduate +instruc tors +ti g +const ant +were wolf +ic os +cla s +glen n +bud ge +ðŁĻ Ĥ +er ta +sta ins +persecu tion +cumb ri +o ch +syner gy +hu ang +scand in +mid terms +comment ator +regar ded +perpe tual +bo iling +al p +lan ge +sch le +fac eli +twee ta +ri dden +ok toberfest +charlotte sville +ik lan +jo u +ch atham +b sc +ðŁį ¦ +stra uss +mel low +xx xx +happy hour +re actor +ww er +distr action +at orial +ðŁĴª ðŁı¼ +twin peaks +fay ette +a or +ko k +bro om +sy fy +ou se +am ag +Ø · +ubis oft +lu lu +hall mark +stu art +it ya +si deline +venge ance +re lu +sex ism +boun cing +un ites +gu stav +te ssa +stu mp +pro clamation +ima x +divid end +col by +ðŁį İ +play wright +un safe +co smo +ðŁĩ²ðŁĩ ½ +cup board +constitu ents +ang lia +ram page +ðŁĺįðŁĺį ðŁĺįðŁĺįðŁĺį +than ked +take aways +shro ff +de bat +kh ur +conduc ts +format s +à © +port age +graph ers +u ten +pre m +mo ines +condem ns +s ous +l ps +f cs +deal ership +leuke mia +bure au +ski d +guardi ola +ca ster +thir d +avoi ded +en cyclo +c sr +vi xx +analy zing +she ar +dulu th +shap iro +chan ting +stre sses +as be +mil itia +ãĥ ª +col lin +arsen e +sure sh +teach ings +yi xing +sh ill +nu des +sv u +clear water +war ped +pro life +artist son +it u +versail les +galax y +ax el +spring st +cal a +hu hu +sc u +commit ments +exe ter +poign ant +mo tion +conserv atory +row dy +rec alled +mu sk +emb elli +so the +âĺ Ģ +sto pper +sch ild +to pe +el mo +zi el +j om +barn sley +snow den +on tour +jour ney +hills borough +par ole +w ts +mo ving +ag ility +tiv o +ff ers +kindle unlimited +g wen +ann an +ah mad +tex tured +hepat itis +dra m +insi ders +tis sues +ãĥ Ħ +fc barcelona +cr atic +na acp +pe can +f gm +custom ize +concer t +g sm +pe g +p one +justin trudeau +super cars +happy holidays +bu lar +ado x +lap tops +digital health +destin ation +gradu ally +áĥ ¦ +popp y +ss l +inhi bit +star light +of fro +glo omy +x per +hal der +im plants +le to +hass el +a as +un told +en ci +liber ia +or an +con tests +il ah +sma g +sc out +mari anne +cr yo +schedu ling +lo s +kan e +stutt gart +ne se +law rence +da in +pho tom +car ou +ภ£ +g wy +national dogday +roa sting +band camp +kentu cky +stret ches +ke rel +ca she +ãĤ ¸ +sta x +tran si +dog gie +at ric +hal le +ci vic +brow ning +lein ster +cat day +high land +joy ous +in cumb +or lando +ro mo +col ton +del ta +car ab +ro tc +aster oid +goose bumps +mo logy +yo ko +an ds +tomor rows +red carpet +sm p +ca sio +ðŁ¤£ðŁ¤£ ðŁ¤£ +se au +rejec tion +rot ating +bi partisan +th un +mat i +bon i +ol l +ener gye +do it +l j +mother hood +lou ise +neck laces +el ite +ni x +l cs +en v +gl u +le sh +cran k +su sie +m clau +so tu +crow ley +rat ri +use d +bre ton +alfre do +ye o +travel pics +ti pp +elli son +sax ophone +me red +heu ghan +ta ine +f es +vi ro +suppo sedly +i as +dige stive +y le +li zzy +wildlife photography +bri anna +west field +ra ined +am her +ðŁĺĦ ðŁĺĦ +distribu te +bott om +pre serving +oil and +craf ty +de scen +col ling +shakespeare sunday +r wc +ang led +ci an +t ations +mon tage +me yers +france sca +ðŁĮ · +wi ggins +san ford +volunte er +car ra +bar k +vari ed +pl in +am u +kap il +rock ers +qu ind +br ane +in mate +ent al +impro vis +michi gan +re tweeting +progre ssing +mercedes benz +smo ker +physi ology +dor ado +watt pad +h wa +sr bachchan +w ga +vol atility +hi re +ac ap +wn ba +hein z +stit ches +kidnapp ing +bur ys +lim b +f itters +thumb nail +ton e +mir and +desi rable +ad dison +tar an +tamil nadu +spec tator +soci ology +amit shah +remo tely +âĻ ¦ +ham id +r ds +g lee +smooth ly +sch ro +er c +lali ga +he als +us f +ni shi +d hu +un il +h le +tro mb +bhu tan +pilip inas +se ung +whit man +te y +min ce +snow boarding +re au +k ker +av o +zach ary +ran veer +ti k +gover n +qu al +beck y +anthropo logy +att en +grocer ies +de bit +war p +sil icon +hawa ii +ðŁĴ ħ +pomegran ate +pe er +orang es +people schoice +end ure +ðŁĴĽ ðŁĴĽ +ãĤ¹ ãĥ +ac ial +a haha +stu k +imper ial +bl ond +pow der +kno ts +vin ce +wood lands +den a +watch in +mat cha +ma hat +galax ies +middles brough +k ö +stre e +resc ues +wal do +lero y +desp ic +real ities +tm nt +ha q +un o +pe c +bolly wood +blin ds +design thinking +he ms +and hra +ab sen +fan s +ste ch +shire hour +bla ine +shak ti +pu rely +ðŁı ı +tra fal +ke ynes +gr ate +to bias +spon taneous +satur ated +caval ry +pri sc +ðŁĺ ij +wh t +pas si +~~ ~ +vir at +patt inson +la o +weir do +sym pathy +ju da +occa sionally +cred ited +stat u +es co +hil ly +esc ape +dischar ge +se er +may nard +sud bury +z lat +or al +we er +encoun tered +sm elling +over sight +ê ¸ +that cher +mack ay +you can +fre ep +freed oms +prophe cy +ho e +ishq ba +dra ke +qu its +pel led +tur k +o vi +wesle yan +new music +leg g +ch eng +h illi +ay y +pan ties +ad versity +ad jac +vaccin ation +ju ke +ga c +exce ed +time sof +sta ining +ep cot +v ital +up ward +bethe sda +apar k +ma hi +camp fire +enchan ting +rha pso +h z +na ver +fa x +vali dation +ac ad +ny r +as ym +coordin ated +depar ted +all ery +var ies +spr ite +chap lin +ss occer +s wat +bre t +relu ct +tunes app +super star +reminis cing +o co +home grown +dough nut +un canny +la pd +thyro id +! âĿ¤ï¸ı +botan ic +bre s +sp ade +i ste +echo es +du lil +bur sting +qui ero +ðŁij İ +loy ola +amuse ment +ha ils +sleep y +burgl ary +âľ ı +ro gue +cot land +mo ors +low er +wic ked +ðŁĶ Ĭ +compet iti +argent ine +yvon ne +karti keyan +ili ary +gat sby +precin ct +six ty +na ji +cam s +practiti oner +ðŁĺ³ ðŁĺ³ +pu ne +neg li +juli en +inv aded +cali br +cla m +duba i +mu k +lan tic +produc t +fe dex +ï¸ı : +eu ra +dari us +s ling +virtual reality +home stead +ðŁı³ï¸ıâĢį ðŁĮĪ +pac ed +in ha +pul mon +la zy +premi ering +ma stered +in he +con gregation +ba jo +sport ing +new jersey +hor ny +lma oo +leng thy +du t +yo gh +swe aring +philosoph ical +pap ua +in ski +know les +dy ke +âĢ ² +to ken +mc guire +ri ot +probab ility +mc con +gro s +su mat +c ite +da a +on da +mad dow +che w +board games +spar ked +re claimed +ad hd +ny se +imwith her +equ inox +boo ths +balsam ic +ha zy +dor chester +ag os +se aw +moder ator +seri ea +ander sen +pilgri m +âŃIJ âŃIJ +itch en +hal li +x ton +nathan iel +mun ition +celesti al +ga f +zo om +mark le +pen thouse +cal e +s fa +bar king +tu cket +em ery +cal orie +li que +ad ar +mc nam +tor tilla +wood pecker +mo town +bad ger +ayr shire +scram ble +dd ay +cra ziest +per rie +cho co +cast e +i ot +wre cked +selec ting +uss r +gra ft +pun t +lab ou +ir st +ba ek +Û Į +su ki +que u +ach at +te ster +aug mented +wc vb +sin ks +ðŁĵ » +ra ke +inter ne +be cause +belle vue +une arth +light en +ðŁĺ £ +turn around +labe led +unemp loyed +twitter kurds +le ia +h ye +great er +ðŁIJ İ +tim ed +i red +e tt +limit ations +cab e +s out +bee ch +anni hil +re trac +yo ona +ang er +den nis +supp lying +di z +" ( +sc ur +gun man +su ho +sauvi gnon +ภ¥ +wi ley +land on +choreo graphy +pre historic +ðŁı ĥ +var gas +assess ments +pinn acle +di i +chamber lain +ì Ī +v p +present ers +deut sche +sun shine +sal utes +r one +bu siest +- .- +motor ists +hemi sphere +al wx +ps p +ow a +den ying +cho c +gu tier +han uk +mus kete +jait ley +se wage +t ame +thin kers +shi m +se quo +pap ar +middle east +k wa +ke g +patag onia +no y +bar ça +take off +he a +à ¬ +n sc +g dc +ðŁij Ī +mou stache +mel ania +thr a +â¬Ĩ ï¸ı +pier ced +ze us +fon ts +ber a +it iner +q atar +contr ary +ire land +i fy +ou los +commun al +fin s +un paid +pa a +ðŁijĩ ðŁı» +ri os +ou p +f iller +cafe teria +à¸ Ń +kas i +cali ber +z ulu +v sco +ts ford +dragon fly +smo kin +pi st +psycho logist +diplom at +we bs +buc cane +à® ¾ +motiv ational +du ne +ba e +c fs +with out +er on +i ac +ate e +pen sion +fra zier +en sis +sk is +par ting +ger y +territ ories +nach os +eni ght +ever lasting +msd honi +tel e +sp un +po di +sab ah +environ mentally +ce ase +beau mont +mar ta +kel vin +ho ff +sun il +n da +co b +sh ale +ree dus +un boxing +u bio +re opened +n all +capsu les +mar r +himalay as +swee ter +ja z +f mr +twee ter +dha ka +na u +de mi +d fs +ta urus +fad ing +it utes +ci p +over flow +jef frey +don ny +car tunesapp +ðŁį ij +prefe cture +danc ed +c pt +ple asing +ital k +earth quakes +ul ation +hi o +ãĢ ĭ +ant an +nutri ent +de ere +selec ts +enrich ment +r iti +tram pol +bl amed +j ia +contribu tors +chesa peake +pi geons +tribun al +mad uro +w su +ilo ve +effici ently +dar cy +war ms +ar ra +ec u +ho wer +strugg led +rajini kanth +ðŁĺ¢ ðŁĺ¢ +hou sing +str at +eli x +disp ro +raf fic +thi erry +na sty +c fb +staf fing +al ma +back ers +hen son +sky walker +reale state +roo s +ness y +chan ce +cair ns +c ci +pe dal +ly ft +cross word +wait er +only in +kru ger +k ir +alej andro +car tier +car rera +re paired +ou at +un clear +un breakable +today in +qu eries +jo dy +gen ital +win ner +to l +kelown a +fascin ated +ãĥ ¬ +sris ri +squ ared +spr ung +negoti ate +priv ately +av en +>> >>> +g ical +gav in +chester field +zu mba +or r +nat alia +impeach ment +mn l +car at +criti que +credi ble +trac y +tan i +musi k +jig saw +gam bia +tol kien +fe u +as per +sav ory +fo xx +f itt +mar lon +l rt +v ell +p br +imprison ed +i om +chu l +wind shield +kay e +ba a +chor d +s art +al gon +minister ial +nat geo +la zio +nor ms +ðŁijį ðŁijį +lic king +fut bol +un sung +dalla scowboys +sh red +distur b +dev ine +be ards +ch f +b day +ro sso +ig or +ay i +si ren +k air +sti les +ro f +mag nets +un cover +mou se +bang ing +si ghted +spe ople +impac t +row land +kir a +environ ment +love the +p sis +mish ra +gl endale +ca jun +o che +de ception +sex ist +stra ws +s ga +buff er +apost le +sp l +pop up +ðŁļ Ĺ +r g +up er +ball in +i dy +occa sional +national park +ðŁı Ĭ +u an +innov ation +ภ« +te aparty +re tte +counter fe +b ha +rec s +ig en +ðŁĮ IJ +humming bird +cu r +ha ven +la zar +pue blo +: : +zi onist +op ath +inver ness +promo ter +carto on +cabine ts +mahog any +surve ying +r ational +feel ing +testi fy +so w +oc on +ภ¢ +ne el +mar is +sol itary +che mo +rad cliffe +sim ons +ros ary +new er +jo die +re tali +pra wn +pad dy +hen ge +k ala +im plant +at y +bren twood +par adox +ene z +re designed +p our +wy d +al de +௠ģ +sol d +biomed ical +๠Ĥ +tt tt +mat teo +ys er +new ton +de bun +ner dy +loo l +wo on +elisa beth +ec c +wh i +ach o +salv age +sal aries +qu ity +navig ating +oph thal +con soles +re built +o pec +ast ers +sho red +set list +kathr yn +rhy mes +re visiting +ash ish +li ft +re post +sole il +âı ± +weal th +sa at +we c +king james +flipk art +field work +se gu +mo dal +bu b +are rs +ðŁį Ĵ +clo oney +pad dington +necess ity +guth rie +pen te +li mo +jo sie +ar tin +en c +l hs +betra yal +info graphics +i er +mo a +hear ings +bon jour +sym bolic +ag ro +wed ges +krist ina +wild flower +athle tic +photograph y +pe sh +ca hill +chi lean +gou l +fi oren +ðŁij ¶ +z il +sk im +bad oo +deli a +tre ble +n cc +ðŁĩ¦ ðŁĩ +a house +bul lock +sol itude +ا٠Ĩ +can cers +futureof work +hu tch +water shed +war mongers +sp illed +colom bo +mo th +associ ations +weigh ed +global goals +not just +christ i +tor g +swe ating +man eu +clu sters +â̼ï¸ı â̼ï¸ı +ta ped +ul y +tru sting +yu suf +te in +ra b +, ,,, +sin ai +audi ble +explic it +cro wns +sch iz +at least +ðŁĹ £ +de bra +je suit +ene gger +z hen +one sie +i it +ss f +gur gaon +chak ra +bear cats +k ran +k awa +reque sting +han over +g end +sor os +mer cy +lovel y +do omed +tim my +ku z +ul l +ab ram +sa ison +ãĥ « +clean ers +re mo +circu its +bar red +o th +mo ist +madele ine +gall o +u j +per mits +hea viest +car ols +az te +gior gio +flo ats +decl aring +us rc +min at +craf ts +pri ma +conven i +nickelo deon +danc ing +ceremon ial +blo gg +tw p +anglic an +she k +k nick +( (( +hubb ard +harve y +hit man +fen g +we some +for za +s word +op us +bro m +gi bility +z al +m unch +dance hall +gre edy +hd mi +re birth +ðŁĺĭ ðŁĺĭ +s world +figur ine +com post +k f +engra ving +gior no +st ana +k man +ham ster +compos ers +aj e +func tionality +pol k +is ons +air planes +te se +hor rors +musc at +gi ven +sp ence +ðŁĩ¸ ðŁĩ +eli ot +ach illes +fre ck +crypto currencies +sou ther +hal o +bor neo +polit ic +hahahaha h +up state +si ena +obsc ure +hau sen +lloy d +happy friday +motor bike +bon a +americ as +hol s +- ( +spor ty +un aware +reven ues +christop her +bank sy +av an +ev apor +com press +eyel iner +to dos +buff y +renewable energy +ly rical +ar chan +rapi st +fair trade +lma ooo +beat z +pro active +la pse +ir ical +revers al +po de +mcin tyre +mac au +ãĥ ķãĤ +nash grier +f sa +g all +çĶ Ł +perpe tr +il ya +configur ation +% ; +str ange +rac i +ภĩ +pic kups +kov sky +mam mal +w ps +g able +compar ative +z h +save our +da vey +on etsy +mu ssels +mis er +cri stina +electr on +cra ve +lo ren +precipit ation +m z +ðŁį « +vin cen +snow board +no ida +ah n +marin ated +g tr +town hall +min is +bethe l +adv an +su ra +shi el +fur ry +ðŁĺĤðŁĺĤðŁĺĤðŁĺĤ ðŁĺĤðŁĺĤ +lyn d +so il +sc ence +sen eca +shar jah +dick ens +credenti als +av ar +per k +requ iring +pre fer +j ian +de ca +r ach +ing for +del e +be ep +ðŁĴ » +cis ely +hu ddle +green sboro +haw king +ho ax +hang ar +ç ľ +mis o +lo vin +gre ta +ab ad +logi e +at an +snow flake +mahe sh +fear the +al kal +bobb lehead +ba hn +ju dged +fu tu +feli x +ðŁį ĵ +pi ke +der iv +notic es +au er +dis super +or da +wi pes +am ino +stri kers +foo tb +dram as +pun ching +score less +heming way +bi h +bal lad +chat ter +am mo +kle in +fabric ation +kari m +z end +hi sto +vol ta +rock y +marke ter +xtre me +sequ encing +paradig m +cle ats +boom ing +âģł âģł +block ade +promp ts +yogh urt +pur pose +nu r +regu late +nois y +ing rid +bird watching +bar tender +Ù ĥ +wor dof +cha otic +shor ty +el dest +z app +onceupon atime +fl yo +rit os +mike quind +ðŁIJ ´ +regi stering +. ] +ad ol +gg gg +pur ge +kid lit +ar bor +val ves +synago gue +o th +unanim ous +veri fication +dar rell +ãģ Ħ +vander bilt +tape stry +pro sper +did dy +dra fting +de cep +marqu is +st int +michael jackson +pee led +men us +bb b +sc are +ema il +wri gley +it is +f ell +some thin +bar ra +ed gar +di pping +pu ddle +sla de +lear ner +jal en +ðŁ§ IJ +the daily +mikequind azzi +ju x +iq bal +mckin ney +ra iser +ef an +dr one +cat o +pic ket +cro we +l att +uk o +giuse ppe +hin i +synthe si +ponti fex +song writing +to d +swit ches +din ners +h q +gabri elle +pensac ola +cir cle +expo ses +ev s +riyad h +pro men +o ck +sa j +cit ation +brew co +jo si +ep aper +dri f +point less +tang led +cri pp +line ups +fairi es +daz e +mour n +bla dder +sal z +bur undi +book mark +the people +sub sequ +princi pal +sk er +court ney +a oki +rac ers +ad m +mom a +critical role +hou n +shed ding +sa ka +ace ous +mck ay +hus bands + ½ +me da +accu sations +ro sel +nc is +witne ssing +or ama +go ds +hil ton +el man +ÃŃ n +meg ap +cra ven +announ cer +crit eri +sheffiel dissuper +milit ant +consu l +hoo ded +aby ss +b x +ma dam +lo cu +mary am +manic ure +grat is +ac tresses +ros ario +this dayin +king ly +gn ome +cel ine +r ous +he el +lil ac +vish al +ab h +thor ns +s ls +ne al +construc ting +be ren +s lang +ma ins +far ra +sar ko +pai ge +gu iller +l ala +ice berg +nou n +plann ers +u mmm +ou ses +ill ary +ma an +box ing +zi pper +srin agar +migu el +o str +mp o +responsi bly +lan terns +appli ance +x b +gren ade +neglec t +dy sle +ham mock +ne ctar +wit cher +r gv +di ence +ser bian +seed ed +cru z +bi sh +sp he +e q +sky rim +alge bra +phil ately +bungal ow +ge off +y ves +demand ed +consider ations +the vamp +pawan kalyan +co ded +grit ty +erup tion +se infeld +uni denti +ëĭ Ī +wor m +ac us +se ung +dun g +ro land +su d +di visions +ab lanc +shor test +j f +p oun +plant based +be to +tough er +mc o +don et +mark us +v fl +ðŁı ł +open ing +co ward +caber net +o xi +burle sque +sand ra +su mo +consi st +tho t +cay man +motor ola +gutier rez +d slr +y w +no bel +nov ice +moms demand +grun ge +sp or +d cc +pre sses +sli st +allot ment +voc ational +ft c +pu ja +lo ven +utt arak +tan dem +sh ep +come dians +anat om +cant wait +healthye ating +west side +mar gins +chi ang +asbe stos +stupi dity +proble matic +fit bit +: $ +ceil ings +shu a +protec tions +bio tic +beng ali +re sts +bien nale +tim o +cul min +e minent +affe ction +unbeliev ably +individu ally +canvas sing +wh itt +nov asco +chin son +h pe +go w +gloucester shire +pa o +thresh old +chev ron +s ine +we ther +pp ie +aqu ino +antwer p +âĸ ¬ +po on +inst af +equ ine +cinemato graphy +nbaf inals +vali ant +kil kenny +te rence +syste mic +sr l +p ound +made ira +pl ough +tre cht +mat ed +mp d +ransom ware +ph in +li qui +bb ce +boom er +i standwith +con ju +r te +nar a +foo lish +da shing +vier nes +br ite +da u +juni per +ai da +you now +ra zer +de i +repe ating +comfor ting +adjac ent +e to +ca sted +chat ur +mu er +syn th +san itary +mac le +independ ent +law ful +e erie +h or +ðŁĴ Ń +am rit +vel o +station ery +mu f +may may +contempl ating +elabor ate +gre gor +dri es +ac col +ภļ +schwarz enegger +ill nesses +day break +follow back +collu sion +electr onic +jo vi +hiro shima +ta w +hom ec +mic ah +qu itting +fro sting +ben fica +hel i +s ical +pic cad +corpor ate +ment orship +you are +sing er +shi va +ru ne +ing er +ri um +play able +doo p +wil low +ter re +ni p +at d +war bler +profession ally +er ase +proce ed +pedestri ans +mis chief +ben ding +alas kan +c kett +mo p +dd les +shut ter +ge ared +atene o +ma deline +g ations +o sha +der ick +sw ild +an gry +pat ents +hun k +decre ased +fr y +ðŁĴĸðŁĴĸ ðŁĴĸ +sal on +quant ities +d ario +ni gel +ku ma +jen n +happ ye +xx x +rex perience +pro s +au sch +rele ssly +ham burger +fuku shima +er ne +stat ec +ren d +may field +j one +lef ty +bern stein +sm il +gener ates +fore station +band its +ta yo +r ca +ac ci +rodri go +kn app +elo vers +vege tation +u ral +le ft +ħ ï¸ı +worl dre +sur i +embar k +w son +ba you +mu ller +mo vers +ðŁķ º +presby ter +l f +cre e +bat b +sal am +demonstr ations +an ec +n pc +it ics +to graphy +re inst +thur st +tal e +off ences +smart city +bro tha +ofthe year +in valuable +ear n +ðŁijı ðŁı½ +kre mlin +gra dy +town fc +guern sey +ma ha +contag ious +dre x +be en +( £ +nati vity +k tm +somer halder +comp ounds +íķ ĺ +" â̦ +af g +ott news +h ound +fire fly +cil an +donet sk +volunte ered +ak ira +è ª +sing ul +st h +dro wned +mand o +he ir +ðŁİīðŁİ Ī +tax is +y uki +vel d +k ans +el k +ran ts +hash tag +t eng +ro g +a at +gru b +e ber +in india +colo ssus +sig ni +so ever +mile stones +der o +differen tial +phu ket +master mind +an gh +mel ani +bro ker +actor vijay +stun ned +continu ity +af fl +vo cal +perenni al +fianc é +in complete +hun ts +re issue +domin ates +tur meric +ro am +ri on +bag ged +nas sau +fu t +x ox +national trust +jo ye +san o +hearth stone +dis respect +le es +h se +siber ian +offe e +re stock +wolf gang +re gan +plan o +un wind +re par +mil le +] , +skul l +fat ally +concep tual +ðŁĮ ² +f é +ber to +b ms +u a +mag na +notre dame +le te +la undering +heartw arming +buffe tt +go at +pe abo +wind mill +v ac +continu ally +az alea +mem brane +can cels +make yourown +athe red +p to +tor pe +ðŁĺ ł +ðŁĴ § +sc ares +le aking +z et +pix els +ac i +kh il +marath i +ðŁĻı ðŁı½ +u la +tam u +chandi garh +z agre +aa b +pronoun ced +aubre y +sand er +pun ta +har low +ic elan +celebr atory +so t +unci ation +stru ly +mc dowell +deepi ka +remin ders +my stical +ct c +chat ted +s ica +bar gains +ch hat +ru bin +m net +oiland gas +pel ican +o at +mor ality +k our +i h +nu clear +gc u +ric her +vene zia +m ma +le ith +ac company +rich mond +sports net +ba ahu +smu ggling +mm i +ðŁĩ®ðŁĩ ª +twi sts +sahi b +.... . +amb itions +il lo +histor ical +fo rec +show biz +pon ies +chas ers +remo del +will ing +prince sses +am ple +cushi ons +ac les +lot r +da ch +an the +in corporate +new bury +ki ri +fried rich +ab v +ball ers +alber t +ðŁij Ń +let i +nan op +ci de +anal o +n sf +)) )) +griffi ths +valen ci +ro ano +fun run +babys itting +ca day +ent re +u ck +slu g +tic al +the sims +ro ar +car ney +g am +sto we +fi d +bun ny +sham rock +pe cu +mol ina +go cougs +con tributes +transform ation +mo y +v aj +sever y +antioxid ants +thir teen +sight seeing +l j +reversi ble +odd ly +hoo kah +nou vel +hal al +fe i +stab les +mul t +ho pped +bra ids +inter change +ghana ian +ww ww +eth no +con junction +ago v +ye ti +earth and +ts p +con serve +heir loom +metaph or +woo f +tor io +self less +n wa +em ilia +yl ene +y xe +gi ar +moder ating +pro bz +b fi +ne er +du mmy +hanuk kah +we bber +k v +eye brow +dag ger +su mp +ra ges +ork ney +tb o +hal sey +assign ments +tr onic +scri b +co on +an war +# âĢİ +jal ape +flori da +qu aid +haw keyes +âĻ¡ âĻ¡ +street car +ro g +dat lantic +gran ola +un changed +expect ation +Ù ĩ +mar lin +gu mmy +ðŁĻı ðŁı¾ +awareness month +oil painting +mu th +per ch +jun to +villa gers +mor g +che ated +web comic +the future +d ps +la kings +men tioning +vo or +ident ities +accor d +mc gu +l pga +rum our +massi vely +m pls +heal y +d ate +sp oli +re visited +on t +al and +scru tiny +lakel and +bl ending +< / +an kara +jami edor +metab olic +f ences +ann y +å ħ +semic on +oo tt +space ship +wack y +le ta +ap ac +she e +in herit +do res +ðŁĩ¨ðŁĩ ¦ +gent e +tw ick +ri ms +gal ve +de ville +king fisher +scorpi o +ow l +al ar +vari an +ðŁĹ ĵ +vene tian +star dust +then orth +q ing +har rington +consul ate +spectac le +ho bbs +tur ks +gre er +mat ing +ðŁİ Ģ +ðŁĮ Ģ +direc ts +í ĭ +pompe o +vo iced +la os +tz u +pro me +pri sm +mer c +fortun ately +bc fc +mcdon nell +not sorry +smi led +t ba +for war +mid term +dar by +we instein +up grading +wol ff +bron co +cab ello +ðŁ¥ ĩ +fi able +shar pe +bat tered +sat o +myth ical +instap ic +pre pped +eni um +e spo +di aper +explan ations +who pping +ragn ar +pe el +antibio tic +l acks +harri son +li sm +au l +qu ail +martin a +sent encing +sc ams +di di +tr onics +ãħł ãħł +go ff +za in +param ore +cha ined +clin ton +li ff +cott ages +em on +reve rend +consu mer +ce an +t any +lum pur +e bay +sto ol +ðŁĺ» ðŁĺ» +ta pro +h ath +modern art +just ine +prover b +app y +tra x +mani fest +am bu +nai k +pe pp +r sd +mer chants +kitch ener +shi fted +li zz +âĺħâĺħ âĺħâĺħ +âĢĶâĢĶâĢĶâĢĶ âĢĶâĢĶâĢĶâĢĶ +uto pia +tom o +ou ted +com ers +chiroprac tic +book club +cin dy +pro hibition +se uss +ë¯ ¼ +thin kin +rr rr +go fund +t ack +om b +catastro phic +ling u +guild ford +bo td +ॠĭ +plan ter +^ ^ +win k +kath mandu +sto ppers +smooth ies +re efs +hin d +bell amy +Ħ ë +waste water +vo or +nat l +! ] +re el +y ap +scoo by +work space +corin thians +bl un +obli gation +g bbo +dy son +cra vings +ell ington +dap l +wre xham +earthand clouds +uk runchat +positi oned +kal b +four square +jo ck +im pending +even ing +ath y +pro claimed +c ites +ann apolis +san i +mar th +ir l +accom mo +ka a +fin a +y aa +di sper +ec ar +bha k +will y +ðŁĺĢ ðŁĺĢ +mcder mott +mo j +gener ational +u said +train ing +lon ely +lo res +impe cc +âĢ IJ +beav ers +ma ki +he b +aap l +å ı +wolver hampton +leader board +me u +c fa +easter n +hu r +civil war +ou rage +hor ned +le high +awar ds +evi dent +gi gab +r ous +ma del +ro byn +ur gently +k ors +en as +heis man +bam bam +fab ian +f om +evalu ating +assemb ly +out sourcing +hun tsville +ðŁĶ ª +justi fied +cashi er +sp aper +buc keye +analy tical +illumin ati +au tho +o j +sha de +geel ong +wh ey +he aton +terri bly +ele k +un charted +sd live +moto cross +her mes +dar shan +dar lington +cash mere +gri pping +cilan tro +pun ish +... : +ðŁĴ Ħ +inst ance +der i +lo bal +muk her +sp ar +thin ker +fre mont +com piled +color ado +vig ne +sm d +whe ad +villa ge +le ek +formula e +ta res +persist ence +?? ???? +ped ago +he z +alzheim ers +vul ture +off ence +is great +suff ra +kick in +h mmmm +broad way +ï¸ı @ +art i +alli son +endor ses +ry u +lolli pop +soy bean +kend all +cer a +inv ade +( ðŁĵ·: +conver ter +car pets +ho bo +fr it +pe ac +es qu +ern an +ou f +an il +di ffer +ch ing +bre cht +sp g +daven port +stra va +sever n +n gos +stor ians +fe te +parame dic +j hb +al amo +sne aking +gold coast +roof s +isi l +depic ted +projec tions +nu mb +o ss +ep i +glu cose +zid ane +infin iti +íĺ Ħ +ran som +ton ics +fal k +g ler +ou tw +re ss +week ly +the on +n ole +ðŁĩªðŁĩ º +vol ley +sum mar +neg ativity +sam son +ye w +aus votes +ju l +ju dy +f art +pra yed +pal ate +multicul tural +double header +cycl ones +pier re +ãģ ¨ +âĺ łï¸ı +rt w +conver ting +wir ral +l ari +ir relevant +austin mahone +an che +ya an +sd f +$ . +explo ding +ulti mate +prof ici +gofund me +cell ence +ep stein +bul lied +sep tic +à® ¤ +lu mber +cu ff +vsco cam +pl or +ภ¥ +se ok +ro to +venezu elan +sor ta +spir ited +daniel padilla +team sisd +radio active +icelan dic +ðŁĴ ¤ +ver e +accommo date +shi pp +ot ter +ol ina +e go +su la +san antonio +de as +simil arities +âļ ¾ +y om +bro ward +å ° +can cun +veri fy +on te +candle light +ìł ķ +inf ants +az am +ðŁĺ ° +le ven +un stable +bloom ington +x ford +con tour +y p +innov ator +histor ies +po y +lolo lol +ex pires +cat alo +bill boards +an ab +el ic +novasco tia +fa ire +ìĿ ´ +rock well +gr ille +az tec +joh or +ur struly +fi ren +dun lop +id le +port man +jo es +tx hsfb +hol m +cham ele +under world +lo ss +ti em +therap ists +past ure +pa ste +ing now +vul can +ra gon +lar kin +o shi +ho co +child hood +umb rel +success or +kath y +iz en +° ï¸ı +share holders +ol ga +ai b +he ap +fl aming +ro u +air tel +rat t +z ane +vo w +thor ough +sn ag +par th +un conscious +ve y +new release +gh ee +croati an +facilit ating +swan son +astor ia +to logy +master y +ðŁ¤ ij +bil bao +trou pe +the ori +chey enne +ro tt +shore line +gra sso +master chef ++ ) +vi x +ellen show +as g +an ak +ku ya +safar ilive +debu ting +blu m +list ener +v ins +book shelf +smart cities +makeyourown lane +; ; +ðŁIJ ¯ +ri zz +on ward +bull dog +bear ish +vir uses +fri gh +lin den +we iser +sn t +gon a +dre sden +fl anders +cu k +wheel ing +ba u +atu esday +surf ers +swi ft +mc call +arbitr ation +aw d +mon c +b ine +at x +re fr +mi ro +po sey +n are +rit ter +âģ ¦ +play book +blow out +sports manship +s oooooo +malay alam +gri ms +bur bank +infin ity +sar gent +oit nb +joseph ine +ski pping +par kin +excur sion +semin ars +jo har +par tridge +post game +ll ll +blan che +temp ting +m na +lu ka +is ers +to ffee +bar ron +he mmings +sa e +go hawks +cu pid +li mbs +con se +un common +z ada +head shot +so ils +pione er +mam ma +sem itic +pan dey +jamiedor nan +spl its +vel a +son i +ra ff +t mobile +âŀ ĸ +pra wns +lit er +enjo yment +egg plant +tu b +cultur al +us ic +suspici on +sy cam +summ ed +ma du +ho ck +up wards +eye ing +ri ve +assas sins +âĤ ¬ +out fy +chi ves +t ner +la is +por ridge +sad dest +w cc +vick i +sna ils +biz italk +mill an +ðŁĮ į +sam oa +j ing +mi key +gu j +chel ms +eli gibility +arma da +thro p +surger ies +ãĤ ¿ +mo hawk +ex its +me m +is lington +c me +land fill +kait lyn +ðŁİ ¼ +combin ations +tomorrow land +ver b +cor a +pre cisely +na om +ðŁĨ ķ +shr ink +sof tly +merce de +mand el +poo dle +ball erina +sop h +jux ta +y at +ary an +hesit ate +lo wered +gu lar +dungeon sand +ron an +my ri +sp f +men opau +gra sp +pa thi +fe asi +fla w +shi story +ste ward +gg le +fay re +cli que +credi bility +yo g +sec tion +mu sko +se ville +no tt +cal m +mate o +indic ted +fi ba +by l +lin o +u kin +!! # +enig ma +siri us +bu sc +ðŁį Ĭ +mac kerel +psal ms +a at +tomorrow spaper +ðŁĺ ĸ +p fc +........ ... +shre k +mul let +o sh +danger ously +immen sely +am ur +ðŁį Ĥ +pro por +sy a +london marathon +abo ve +obli gatory +pro v +ra cha +alex is +pri mary +sh h +ether net +d stv +cou gar +un lucky +ni l +steak house +mel a +fc bayern +cause way +ca therine +fluore scent +nx t +to kyo +au sp +releg ation +qui zz +shored itch +proud tobe +promo s +inter acting +home brew +da esh +w pg +stead ily +provin ces +bal lots +i ah +al to +< << +you u +ri ley +prefe rence +tra verse +incen se +am munition +ho dges +# @ +hail state +tart an +witch craft +vent ilation +liber tarian +! â̦ +ow es +% ! +ong chang +bru shing +le ic +fi ber +under attack +down load +ex pir +hy o +pompe y +mc bride +y ag +stre e +com bat +ten ding +ai ra +gug gen +ab ra +in na +fli ps +aw al +m ach +dol lar +inspir ations +z um +o du +it ty +video game +aqu aman +har u +bel fast +je b +but ch +us gs +calcu lus +go yal +mor gen +x finity +stand up +contrac ep +sab re +na be +in secure +gener ously +epit ome +l w +t ca +narr atives +don nell +pand as +ber gh +tu t +ker al +fel icity +br ampton +quinte t +nom ore +ðŁĶ ij +lo i +alham dulil +ðŁĶ¥ ðŁĶĹ +ston er +shaw l +clin ical +bren dan +gon e +fla wed +tri ppy +j g +al location +po aching +ve vo +mo cks +lef tist +bon uses +condem ned +abil ity +st ating +microbi ome +bio logist +for you +wahl berg +ss or +ift ar +w ul +ÑĦ оÑĤ +pom er +me me +ver te +tre ll +tra it +in let +hormon es +deliber ately +vill ar +battle ship +p bl +tw enti +ho kies +dal ail +say a +may fair +han s +die ts +⾨ ⾨ +od in +hot spur +pap i +k ana +k amp +fin na +flo tus +ti ans +unic orns +tribe ca +chang ers +fore ground +out a +inv aders +gett ys +tomorrowspaper stoday +mac millan +hand written +w fp +u de +state of +base d +âĺģ ï¸ı +cas m +psy ched +histor ians +fol d +d da +ag grav +p ans +green way +au sv +ðŁĺ ¶ +shradd ha +inde x +be sti +zim mer +t ness +eye shadow +ot te +go ts +distribu ting +pro min +yo l +ace a +tram rahim +hoo per +supre me +jam min +intu itive +quali fications +sli m +sid di +jay ne +tri pping +g tx +pun s +e manuel +om g +mid summer +in to +succul ent +ri en +new mexico +o or +hoo king +in f +ðŁ¤ Ŀ +flir ting +na hi +g friend +t ps +hel ix +z s +on ie +ct f +kri s +irresi stible +fla p +ðŁijıðŁı» ðŁijıðŁı» +us wnt +ru d +ram ps +pin oy +ot w +lol z +low ering +favor ite +t mc +phra ses +her mi +aver aging +em br +ben o +estu ary +sle eve +ribb ons +ta sh +ภ¹ +x f +aw gs +sun ited +brew eries +anir ud +pun ches +ol die +ip ads +wi fey +land lords +d ji +gun ner +íķ ´ +tex an +ex op +cas sandra +s off +ðŁļ « +igh ton +bak ers +awareness week +v all +ear p +bts bbmas +apologi zes +âļĵ ï¸ı +was ps +states man +snat ch +watch dog +ra fi +after party +spi ke +j er +peri ph +r nc +mu ll +le en +shi es +li eu +urstruly mahesh +mer ton +de sai +shi f +ðŁĮ ± +pe dic +gos ling +arrang ing +ww g +gen y +you uu +netfli x +e ttes +k wi +bernar dino +am iga +Ø ¨ +kashmir i +t ings +emer itus +de cat +ab domin +dc i +pha ses +d jan +be am +op ry +i shed +the ellenshow +the st +habit ats +to ons +mclau ghlin +ri pper +micro biology +tal aga +clu eless +ss u +cro che +bro mance +longe vity +zagre b +prev ented +tra ve +spo ilt +darry l +migra ine +al cat +dd dd +vi v +ser pent +mat tel +jam a +con quest +î Ħ +sam sung +presbyter ian +ket ch +fire fox +mo tif +le c +cho pping +cher no +j ann +ðŁIJ ° +pro lon +wake up +conver gence +mersey side +heart broken +lo oming +hal lucin +mai ze +commun ism +mo h +twitter storians +serge y +res eller +favor able +ed gy +re iter +mal aga +live me +ka hn +pul sion +big g +kim kardashian +ati o +tyr anny +ru ption +q ant +pro ven +by z +pu shaw +kri stin +e er +tar dis +ri z +awak en +mi ko +un documented +path finder +indirec t +resemb les +h ler +conce aled +scand al +re im +d nb +cr itters +attend ant +apprentice ships +aa u +scre amed +l su +fa h +har bour +ed d +bat sman +li ss +mi sha +spani el +it f +advan cement +fa c +close up +cecil ia +medi c +narcis si +lav ish +gi ac +ma ys +le it +wine wednesday +pushaw ard +let to +curren ts +bug atti +out ine +w j +un do +ler osis +devo tional +ðŁij « +on na +fais al +sa una +himach al +am ii +à® ® +di zzy +screen writing +ph x +sp n +ick i +ag irl +fi shes +wb z +pi m +bo ar +ac id +! .. +rocke feller +n ga +dra stically +simpli fy +dru mming +autum nal +gur mee +lor de +jo ann +give up +b our +am ura +der land +sim pler +wat son +tri dent +concor dia +bel lum +bre k +dum plings +vi on +dungeonsand dragons +sp ri +ascen sion +wil datlantic +u st +rob ins +legi on +insi st +jar o +gue ss +so b +bigh it +pool side +negoti ating +mc gill +bil d +techn icians +miti gation +ajay devgn +b to +ant en +cosmo politan +ðŁĺĬðŁĺĬ ðŁĺĬðŁĺĬ +patri oti +temp er +promen ade +nav ajo +nam m +wrink les +dc fc +le ach +bru nette +r f +cout inho +al ti +tradition ally +op tome +na z +accord ingly +rec ard +de ets +sw ell +po sure +whit ening +strang er +illi on +here ford +u wu +ro bber +cotsw olds +cl en +gor ge +nam aste +re lish +gri ff +adren aline +bla sio +val e +ê ² +toler ate +rail minindia +jen sen +ho ven +el lu +ob sole +eisen hower +unidenti fied +than niversary +body guard +Ø ¯ +i dge +sch al +stock port +sn i +re taining +po po +pix ie +oli thic +ki er +ha jj +sa z +cor bin +!!!! !!!!!! +v it +me gat +de h +circu it +af fleck +theore tical +hope less +u ab +slu mp +b ice +jam med +let stalk +can i +side ways +labyrin th +re fs +ha hn +jare d +ðŁį ¹ +jam bo +ph yl +enhan cement +c tr +ful lest +se ye +do ba +cho ic +yo s +cb j +andr é +re watch +pri ma +doctr ine +for gets +u hm +ar ound +u le +art lovers +shi raz +har th +ex tor +Å ¡ +unexpec tedly +eli us +y x +em my +se ac +ðŁijĩðŁijĩ ðŁijĩ +correc ted +com bu +wom anc +cou gh +what son +publi shes +divers ity +back bone +lock down +mesmeri zing +nor te +ma b +desig ner +í ģ +ra gh +mole cules +get outside +the beatles +semicon duc +nach o +lun es +ham mers +sul tan +o on +fe ren +att ach +ar qu +uttarak hand +s ash +; - +tre ad +i ko +ar thur +scandin avian +r ation +ga el +charge able +fish y +v ma +hand bags +char a +ay ne +de fam +sett lers +qad ri +pal ais +in wx +apocaly ptic +poo ja +a es +at ories +proof ing +n lp +ts la +v ina +li do +dee phouse +informat ics +v v +pp ings +di ss +à ¯ +uhur u +st ony +betra yed +b aff +my ra +as pen +allow ance +tam ara +ci f +cor bett +ser ge +di go +ambi gu +pain ters +p cr +p ca +nom s +lo ft +ve e +opend ata +ðŁIJ ± +alex andre +identi fies +fantasy football +re production +brom ley +ware agle +mm er +p ss +cu es +ay at +hut chinson +sar ac +jack man +ira h +ap ink +col s +aussi es +ex ecs +day ton +ðŁĻ Ĩ +im v +har am +chuck le +authent icity +ar do +incub ator +ภª +photo shopped +embrac ed +fight for +gor man +zz zz +schol astic +cri sps +te apo +mid night +ga ine +col lier +s ate +de tte +å Ń +imag ine +i ff +tw ili +i fication +teat ro +nor ma +es ur +emergen cies +rise up +r inger +hass le +cait lyn +tranqu il +vers a +se b +over look +gin i +bo go +se re +may ne +henri k +contamin ated +rhapso dy +pro portion +wildatlantic way +âģ© . +organis ers +tran e +stand ard +sper m +laun cher +ric ci +her ts +paper work +showcas ed +mer yl +pen a +p imp +disa strous +^. ^ +phar a +x is +fron tal +sw irl +sp ills +swag ger +smart watch +sizz ling +savi our +cat ar +bb cr +refurbi shment +dr is +citro en +absor b +patrioti sm +il leg +chro mo +fresh ers +ru s +lim iting +ef ish +down ed +man dir +hazel nut +p all +mac on +disappear ing +quali fies +bo on +bar racks +am ine +gen dere +ðŁļ ĺ +j es +ãĥ Ń +qu ito +middle weight +sch au +quad ru +aci ones +limit less +ðŁijĮ ðŁı½ +ch man +ar av +regulat ors +it up +batter sea +mil ford +g z +tic king +gh ou +cru shes +tu tu +dread ful +fam ine +for change +dalail ama +ðŁĴ į +whit aker +hash mi +h us +vo d +bet te +aa ah +iso o +ðŁ¥ Ī +ha ar +la ine +b v +all day +spr out +indie games +free bie +gree ks +but ler +ill in +ha al +ware ness +si ma +public health +gam a +wa a +oun g +goo oo +okin awa +off enders +im pose +ho c +young ster +story teller +sc ap +figh ter ++ , +whit es +music monday +re za +go ducks +bri a +mi um +cas per +cru mbs +a ad +marti alarts +ch p +ri gged +tn g +harve sted +sa k +do jo +mill wall +b nw +oc d +histor yof +t mr +si rens +fan ci +caregi vers +vir a +son i +recur ring +acknowle dged +ðŁı Ł +oph ile +bu cky +stre ssing +roo k +di gger +vi val +san do +fle et +si ers +sel caday +refre shed +anti fa +a que +po lo +disappear ance +de mb +âĮļ ï¸ı +ren ted +ber ger +g mb +cu la +ss al +goo dy +u hh +marcel o +w anna +soft ware +shop small +turt le +tom as +fri sco +ðŁĺį ðŁĴķ +jim enez +c su +day z +an do +wyn ne +choreo grapher +cerv ical +trail blazers +ed g +zend aya +travel blog +el s +whole some +co g +lab out +ar ney +del le +su isse +ma si +ine se +om be +fi ddle +re claim +pa u +wat cher +sla in +ber ty +opti mum +el ites +min is +tur key +patro ls +ger ard +au reli +wild ly +wal tz +br gy +w ob +cre st ++ ++ +ve z +fro sted +davi do +the x +param edics +p into +han k +du pont +ur g +fo stering +micro poetry +spec tre +---- > +ne uro +fri da +music al +galve ston +e ffic +sc ape +pal azzo +th all +pro visional +p js +au re +ðŁĶ ľ +mam amoo +kit ties +cre e +wa k +lo ool +lu pus +cn blue +à º +ðŁİ ¬ +rac ed +tro se +om as +stri de +co ors +⤠µï¸ı +in comparable +cy ril +broad er +arec lipse +ðŁį Ķ +inter val +ti ru +co working +w aco +a ham +a bee +flouri sh +the times +ol ini +kick boxing +lu cer +at la +as un +casser ole +mi aw +lobb ying +jan ice +cir que +re flex +le ary +sanat omy +tem pest +se mb +mur dering +us av +ro bo +on et +p cc +nati ves +life of +sa ha +ruth less +rel ates +appeti zer +pye ongchang +nor d +er u +a thing +ug ly +pl ying +bran ce +organ ise +kend ra +dat o +chees es +par ma +burn out +a stra +pre toria +adjust ment +uk u +sl o +li ken +fav ors +cli ve +be ets +snow donia +go tv +sy n +open house +pan i +portra yed +sl ated +me cca +ren al +supportsmall streamers +staf fs +da o +bi ker +vik tor +tit us +admi red +ðŁĵ ± +hurric an +he ats +gl ory +photo genic +mer i +de por +burn ham +or angu +dj ing +impre ssionism +ign ition +ca i +w ynn +de pe +cove ted +colla gen +sau s +or nam +administr ators +ss on +nh politics +hahahaha hahahaha +aspir ations +r gb +swol len +so we +sc r +diver gent +hou ghton +han oi +d ory +ni ki +land ry +b cci +ðŁijĮ ðŁijĮ +is mail +tri pod +her d +bhat t +dress age +tab by +ingu ish +hur on +à³ į +à ł +to das +evangel ical +chor ds +st john +slo ppy +marty r +face book +ali ght +sen sei +kath niel +r ites +zi one +u o +revel ations +weight lifting +pan o +nc wx +ac ton +à® ķ +Ø ² +som a +à¸ Ĺ +respec ting +mar che +fore man +be tty +ki k +shi bu +po on +argy le +k swx +et z +mar bella +brac kets +stand by +fire side +defi ance +v ex +britanni a +in habit +appo int +piyu sh +le ash +sci ento +fla sk +sen na +> : +at roc +sand erson +id lib +dhan ush +ðŁĺ Ļ +en thr +hit ch +de dly +al ley +dor k +mon do +cudd ly +mis sin +ye sss +night ing +j pn +w ary +ump ire +ma z +ê ³ +bab s +ĭ ãģ +stan ford +posse ssed +exce eded +ðŁĶ ¶ +wall art +tra p +j il +hi bis +sp ying +scri be +khali l +trans lator +lu mb +di zed +ch c +super vision +shut ter +ja g +_ * +yester days +ms f +hi hi +gonz aga +gille spie +vive k +ec static +this morning +ch us +ed es +ston ed +be es +ðŁĩ¹ ðŁĩ +tur in +ho ver +at rics +ster n +sam heughan +auti sm +mi ya +eye witness +writ ings +travel tips +chut ney +px rtg +keny ans +my stic +k rit +/ $ +red head +world ly +am us +op la +le ve +gab bana +se en +o clock +gang a +keen an +sc ent +ol dies +go green +corner stone +comp ly +con cours +ðŁİ¶ ðŁİ¶ +ha an +con fis +aw son +cle op +î Ģ +su zu +sau té +al gar +subscri ber +este emed +ãĤ¤ ãĥ +worth while +mel rose +flo ck +bri ghtly +viol inist +p ere +sli pping +and co +si gh +ha van +cu lo +m sa +fibro sis +matil da +ra fting +aw ard +ë ª +mm mm +ge aux +ste iner +sin n +help ers +beet les +ai mee +tai wan +pistachi o +mac beth +m zan +descend ants +on sale +in r +il m +grou se +sa ig +mo w +bi gre +adjust ments +tu la +mathe w +transl ates +mu h +bol lah +ðŁĴĽ ðŁĴĻ +amo res +ab outs +bomb shell +bla ster +x avi +s ns +k roger +ga ther +erad ic +daf t +chem o +ben ches +ðŁĩ© ðŁĩ +ut v +our a +n ko +gator ade +biaf ra +ok state +im danielpadilla +dom ains +open ingday +kid do +do i +ric e +day care +mac millan +ba thurst +cheer leading +ðŁ¦ ģ +cash back +k won +hob bies +exem pl +ries ling +âļ ª +ag les +ny s +every thing +nav is +ad di +magne sium +faceli ft +ark ham +grand es +extre mist +don at +vit ality +pump kin +be tta +sl td +arti san +li by +pe aked +ah hhhh +mary am +assi m +un sc +ment e +al aya +low ers +ar as +gri ev +le ip +gr ati +cri ses +spr ints +exe cute +w to +ms d +mag ical +re viewer +spark les +juke box +ðŁĺĤ âĿ¤ï¸ı +pay back +licen ses +dun kin +bel t +lake wood +h ateful +bud gets +rev amped +ph erson +ky iv +went worth +ro sen +cru ise +gi ggle +def star +assassin scre +ym outh +win kle +w fc +band wagon +b kk +w iring +kear ney +south side +pe tit +! ðŁĺį +nor dic +mir za +mu gabe +v l +scon es +k tv +sand al +du c +m alls +ðŁĴŀ ðŁĴŀ +it c +al ay +im pair +un rest +flo ss +c é +ab ou +var ying +muse o +ser ver +di ya +hibis cus +ero y +mer ritt +fin dom +f pp +un usually +go tt +conting ent +ali aa +ball on +jo l +hi ked +zy me +ay r +ag n +ga z +perio dic +spar ty +practi sing +lin ton +tal is +cy pri +womanin biz +radio disney +ðŁĮ ¼ +jump ers +endo cr +ðŁļ¨ ðŁļ¨ +and on +shar apo +mi er +ma sonic +fac tories +vi en +bb ers +ìĽ IJ +hol d +ke bab +be ak +approach ed +ac milan +mun ro +ko sher +excell ency +negoti ation +walt disneyworld +cr ouch +te asing +suppre ssion +en ya +b ce +transformation tuesday +cal lie +vis was +p gat +ic ted +end ings +esc u +recru ited +it fc +collabor ations +g ino +snu ck +ausch witz +i fc +x ii +ke sha +ger vais +clo ak +x l +sa ad +prob ation +pre cau +mac in +anasta si +le k +e azy +daysof code +mariah carey +yo g +stit ched +boy friends +sh ar +ph ile +ag u +twin kle +phi shing +week ender +ic ton +gurmee tramrahim +al ton +l eness +all an +pen ultimate +kry stal +go u +lan de +dis mant +ab using +nor se +pat erson +ed mun +ap an +xi umin +sk el +cat walk +re act +wal led +t angle +br yn +ve to +super moon +cas ablanc +appreci ates +ski d +bo th +catal ina +ele ague +cyber monday +cau tious +ðŁ¤ ĵ +nov o +hamp ton +ha ye +jose f +var an +lo bos +roano ke +orph ans +tt in +squ ads +ishqba aaz +black panther +e tu +k sh +cru mble +cess na +reli eved +scul ly +pollin ators +explore canada +ki es +kam loops +kir an +pri mal +sett lements +hot spot +brain storming +ce dric +bi ennial +sh ant +âĻ¡âĻ¡ âĻ¡ +do on +hear n +walk way +fe m +ve al +deport ation +tox ins +elimin ating +descen ding +by the +bla sphe +ha sta +comple ment +as cent +ri ga +provo st +âĸ ª +wee ping +anti semitism +employe e +unearth ed +pin o +natali e +bla d +ang ola +lock heed +in ian +ag r +ni ster +im pala +m ke +fan atic +âĺħ âĺħ +ðŁij ¸ +lu ch +simpli fied +gall ery +econom ic +cy borg +con i +sel ma +in ception +ko ala +dv ds +cre sted +m mor +visi ble +n sd +ðŁĻĮ ðŁı½ +w under +refriger ator +re opening +e era +carou sel +as p +balli stic +victor y +mo tive +tre y +sharapo va +si i +mon ter +int end +west chester +sp e +cy mb +vi dal +ll ama +uni v +fin er +crafts manship +jazz fest +b ch +ag gio +n cc +lamb da +tranqu ility +cis co +ba den +so bbing +of i +go ta +ru mored +war med +ore an +ac ton +mar ci +gh ani +âľ ĵ +as sorted +pembro ke +pen elope +da f +at ty +aim o +pretz el +carni val +than os +ko chi +mer sal +ham radio +ar twit +cas c +guer rilla +kush ner +k app +al ise +todd lers +steward ship +o tti +ter ri +tem pe +rest less +vit o +zay ed +rsp b +pi on +hi ppo +haw thorne +in as +am ily +nut cracker +lo p +d ali +tro pic +ðŁ¤ ł +ul o +jare dle +py rene +pale o +usa ir +m ould +it ated +gene tically +biom ass +ðŁĩ³ðŁĩ ± +do dd +practic ed +monarch s +un manned +m buhari +am al +photo gra +ko ol +bren don +ju ices +cu re +world bank +poin ters +ðŁĴ Ŀ +tur f +le ds +bor ussia +bapti sm +warwick shire +moun ts +gay o +be gg +co pied +asi ans +k g +moder nist +gi d +front man +concentr ated +y t +sc avenger +iron ically +adi c +ps n +ðŁ¥ ī +cultur ally +yu v +mac arthur +fertili zer +be withyou +ri gor +min ors +z oning +âĸ ł +ri r +adole scent +vin ny +ren g +sand stone +gu et +we sth +ple dged +lac ed +sp ide +v ai +ty coon +seiz ure +du p +appalach ian +ro k +cathol ics +sey chel +posse ss +la ger +jo di +cham p +stra s +d ina +cent uri +cal der +blur ay +ðŁĩ¨ðŁĩ ³ +mo do +an nette +youtu bers +chap s +ang ling +label ing +a qui +pk wy +ly le +bi sexual +lit ur +dug out +li bby +grey sanatomy +sub stances +august us +rall ying +fi del +ing ue +äº º +hallmark channel +tooth brush +m á +adi rond +ag gi +ðŁĵį : +cru sade +tax ation +k z +i ver +dou bling +room ie +wa b +en rolled +az on +a ju +grand children +as df +ðŁ¥ º +mat ic +ough ton +utili ze +ðŁĴ £ +pon der +rais in +dys function +co bain +butter nut +e man +su red +dri an +and friends +with the +on omy +heine ken +bri dal +leader ship +pyram ids +deutsch land +jo cel +bo wel +y qr +horse power +be acon +ing eni +gra dient +fer mented +mo om +thing y +pot assi +wrist band +bor d +bo died +ðŁĺŃ ðŁĺį +ma pp +ka u +cyber punk +ph ish +loo king +co ates +ap ur +am ie +uk labour +at in +g la +adop table +shel by +v illi +ri ya +m ingly +cli mber +bumble bee +ðŁĺ ¸ +c sd +âĿ ¥ +hospit alized +c ki +hat er +ch r +re tina +it a +fan base +beat rice +gwy ne +go ss +fo s +favor ited +swachhb harat +mal ade +mon mouth +" [ +si van +sh hh +command ing +sains burys +wee d +g man +ss w +rep tile +iv y +tro pics +roll ers +over cast +ex position +masquer ade +man crush +wa ist +spr inter +sle et +le vin +j pg +_ ( +o pel +explo it +ap a +po we +wrec king +jong in +or b +er ick +bo sco +pra ising +ber tr +to wing +in security +ku t +resto cked +rr p +prescri bed +trafal gar +per t +g ases +app rais +g har +music als +âĸ¬ âĸ¬ +mc fad +ag ony +conditi on +equi p +shi k +atra vel +ðŁĩ¿ ðŁĩ¦ +ke h +abduc tion +pe oria +wil kins +g ms +as d +ev i +ðŁĴĹ ðŁĴĹðŁĴĹ +u z +mo c +halle lujah +guad alu +lou vre +dra wing +go ve +ph ant +fri e +web dev +program mer +z able +games com +clari fy +li th +kin ky +âĿ £ +labour doorstep +son ata +ju ris +mai den +vi adu +buch arest +conditi oned +capit alist +u de +ps b +sp ca +lul la +footh ills +kay o +bon d +wom b +roun der +ce sar +bur sts +ap ra +sw oon +sab rin +fra grant +cle arer +ku brick +cli max +jour no +ag le +ðŁı½ âĢįâĻĢï¸ı +poo ch +hal e +sol it +sal mon +organis ms +bron son +art en +hodg son +alo ve +vent ure +bb i +ae a +ðŁIJ ¢ +ld n +d nr +o zone +el las +man ny +azz ur +un beat +tru ffles +th ong +ma ñ +las ers +ley e +gettys burg +back packs +or is +ma ison +craw ling +la bra +cl ing +dra gging +ste al +dou bt +de van +ck ers +agent sof +photo bomb +elon musk +abo y +dist ances +story line +sp i +nor than +europe ans +wh ale +ser pent +ðŁļ ² +fi or +tr it +ox o +awar ding +class mate +su fc +smar test +rich es +pr k +big foot +ar mb +bi polar +dw elling +om ars +k wan +gri me +m eng +freder ick +navar ro +sorry notsorry +jaredle to +pa ve +sl ack +barn sley +att ar +evic tion +accumul ation +o ir +cat chy +wel ter +vik as +has see +nik ita +mo yes +mathe ws +shi v +gat wick +pro filing +compan ions +mar rake +an tics +ðŁĻĮðŁĻĮ ðŁĻĮ +se se +bo i +bart lett +poison ous +ab uses +ym m +kam pala +guggen heim +imv kohli +dol om +bre e +thro ttle +gare th +fitz patrick +un ya +par ad +mar got +j nr +we a +potassi um +p nc +disgu ised +cra sh +ren ergy +ill ic +coup led +ni els +ci ones +æĹ ¥ +im ent +despic able +d ye +what cha +conne ctions +paralym pics +gaunt let +wait rose +suici dal +star ship +vap or +st ou +law maker +coo led +si mo +then o +offro ad +ja den +bas que +vick y +lu kaku +centr o +tri sh +strate gist +medic ations +hor st +b fc +gra il +sharp ly +ad itya +tom b +kau fman +tri pad +sam ba +pastor al +brit ney +sag an +hill side +mas ons +sar a +z one +x u +to tes +rob bie +app en +mon tag +der o +short film +charis matic +tat ors +ki ba +and ri +al arming +split ting +ic ar +th ug +scari est +sylve ster +an an +u trecht +a difference +me ade +bu ster +air strikes +cu ffs +account ants +ðŁĺ¡ ðŁĺ¡ +new t +bo tt +issu ing +cl ancy +wwen etwork +kyu hyun +rese mble +pajam as +sin k +kin ney +sul ph +or k +li es +la gh +or ton +ra hul +d sc +we will +re am +collo qui +shar ia +hec tic +sar casm +land er +tm z +endor f +ro z +ham mered +fri s +w adi +pope francis +he it +flash light +un born +op es +hol iness +ðŁIJ ¦ +nach t +im sa +gr acing +bj p +ver ts +c sc +home owner +a que +bigo try +anni e +bag h +âĿ¤ï¸ı ðŁĺį +car i +thom p +dispo sable +cardio logy +pat ented +hh hhhh +ld r +stephen son +cro res +fan ning +cli mat +ðŁijį ðŁijįðŁijį +ðŁijį ðŁı¼ +aer on +piccad illy +bank rupt +sil via +emplo y +don ny +commen ting +screen writer +io ta +ce an +anc ers +tu an +street wear +ठ¯ +sk ine +esp a +asi f +os ce +she ppard +more cam +bott le +der s +orac le +google play +aver aged +edmon ton +steph an +sister hood +cru sted +stag gering +methodo logy +congress woman +c abo +tri ggers +mil ky +gli de +tooth paste +room mates +nu ff +gu am +sprink les +alternati ve +wat fordfc +uof t +hal ey +cont acted +bun dy +pro stitu +gh ar +pre ston +on site +hil ar +g ts +c att +hamp stead +? ?! +ðŁĩ§ ðŁĩ +bbc qt +aless andro +resi st +ma idan +t ko +shad ing +pin up +gal lo +sin u +at ec +fun k +ac lu +stri des +rhy me +wet land +bbc springwatch +t ins +wild card +st our +flamen co +pau la +onto logy +gang sta +am ade +ãĤ « +t bs +skelet al +run ner +jard in +harri er +hun ted +z hen +believein film +de mean +au diti +re start +chon dri +âĿ¤ï¸ı ðŁĴĻ +mcla ren +ga b +sh um +au sa +lewi sham +y pg +k jv +fur nished +dor o +bon ded +mor ty +lat itude +_ ) +lo va +water ways +vin ai +shor th +drun k +c ay +ay ana +kap lan +capp uccino +spr o +life boat +has bro +spol ice +tor on +do ing +dam n +sh ree +foun tains +ent ation +mar u +boar der +to pless +j ada +chan ning +ul ls +en closure +gib son +fractu red +brit ton +à ¶ +t ous +por th +dra f +tra iling +mar gate +eli fe +down ward +lin n +gla des +girl power +ak rish +u ki +ron da +ts c +appreci ationday +vis ing +lo om +ðŁį ³ +mex ican +ar gos +y ya +jad ine +south port +d end +si sta +rede em +men g +bra xton +antioxid ant +s key +mp g +fin ding +vibr ation +ce u +kh art +di mini +cl ine +shel ly +hin es +ī ï¸ı +to pical +no ver +ma xx +prim itive +illustr ate +b ounds +tren ton +join tly +breed ers +u chi +wakeup america +b ada +ðŁĹ £ï¸ı +gu acam +sp heres +pere gr +youth ful +lo lo +bir min +t ly +jeremy corbyn +defe cts +co sm +a rent +v aa +bag els +medi ac +cori ander +ic ago +g haz +ab bas +re model +struc turing +pu m +out law +ad ani +r bc +gul ls +n li +confu se +ðŁijĩ ðŁı¼ +vil a +mcnam ara +correc tions +mug hal +ser i +re gain +ss b +lea ve +haha hah +gran de +di stressed +re chargeable +ho a +hou sed +sti l +attribu ted +opath ic +di ps +pri t +head phone +conclu de +pil o +he t +ut sa +nit in +je m +sni ppet +tutor ing +op er +sun k +en sla +cha u +ac orn +quinte ss +ran kin +affili ated +our lives +cl int +se ater +isa ac +ba shing +sme ar +nur se +doo dling +" ; +sa ku +atroc ities +im am +g fs +viol ating +comm end +brad shaw +er ville +b illed +b be +thul hu +i phones +moo se +di os +re w +me thane +strang ely +whis ky +ti ghtly +spiel berg +radi us +notic ing +wi f +ig nati +i fa +ap is +w ali +ha itian +bu shes +y z +v l +ex ited +asse l +tru ec +dom en +ash er +in king +newyear seve +hend ricks +bat i +ìĿ´ ì +rich ter +mon santo +con line +agre at +ðŁ¤ ¯ +master pieces +ar n +rough s +cle ve +se v +fashi ons +to ya +sh ail +cop eland +aqu ari +dec als +are you +y aya +a str +fon t +ml m +ar ca +pp or +pol lock +xper ia +conserv ation +chain saw +ag gie +?! ?!? +si le +sh on +ìĹ IJ +note books +marque tte +de us +bb led +spic er +mc cabe +nor wich +modi fication +boo sted +stru m +sales man +bang le +nis san +hez bollah +brea sts +a af +anth us +sk er +ow ed +her os +gi fs +fo sters +eat ers +du es +_ / +lymph oma +sf am +me gal +afri di +ag ic +p amp +jeal ousy +ðŁijĮ ðŁı¼ +calcul ate +napp ing +g ale +ðŁ¦ Ħ +lub bock +assu med +ren ting +íĥ ľ +subur b +ãĤ · +tech nic +u cla +in front +gar net +ster oids +stri ving +ho war +mo ver +le ton +bull do +is in +ci ao +sn z +fore front +d ams +mid wife +ma wards +cla pton +we in +subsi dies +spr oud +rother ham +phan tom +ar ach +spi el +rac ket +sel amat +no on +l bc +enti ally +ðŁĴ ¸ +sil ve +m oud +kine tic +y asi +ðŁİ © +o ol +mi ku +i za +fer a +flo ren +barber shop +groo t +z est +ne ars +stan is +z and +police man +juris dic +form ations +appar atus +sp d +arti fact +to sc +motiv ating +womanc rush +re dro +diagno stics +ra za +out fitters +el xn +dod gy +ry n +sh d +ortho don +ol de +jay anti +bal ances +quic kest +can ton +friday reads +! * +na a +a ak +ðŁĶ · +behavi ors +rasp berries +ä » +polit ical +cam il +å ľ +di k +ast ounding +lie be +novel ty +tur moil +sul ly +spring break +hon ouring +cc g +ðŁı Ĵ +my little +ky c +pro ms +ðŁķ Ĭ +à ¨ +bi ge +av ril +ðŁĩµðŁĩ ° +mari on +as ants +sur ya +oc tag +luf than +ac ron +fayette ville +ti que +love s +en ca +de kalb +ta ver +de vote +aux iliary +joh annes +tread mill +ay an +qu r +donald son +cher yl +" .... +s ven +kir sty +gun ners +ra dish +o ahu +v sky +i ble +con course +b ps +elo qu +ash ford +te bow +roblo x +ma da +dri ving +th day +spro ject +m ms +band ed +. !! +libr arians +flan nel +intoler ance +her al +ç µ +neme sis +list a +tar ak +cry pt +star plus +vish nu +sc ale +cr is +% ), +j illian +regg ae +pegas us +ol in +ip ment +man ic +l fc +godd ard +ite am +parl our +anch ors +lee minho +talla hassee +ant it +d ho +kid ney +y ash +batt led +az ad +gar is +faul kner +sni ff +papar azzi +ed m +phy llis +con tested +aa ay +se ca +k ton +vel ve +rain ier +for um +tam pab +ho sp +trac tors +ox fordshire +no tion +guang zhou +ðŁĺ ¯ +ref ill +wednesday motivation +sli der +mukher jee +pr att +fon taine +alph on +af ar +ts i +pest icides +fi ends +mo cking +bra w +tran sat +do ses +co res +hom ophobia +docu menting +zlat an +con doms +s é +sun set +kun st +ton ga +ภª +v ation +sp ray +chow der +ra ps +palla dium +nor wood +music history +hoo ker +si si +osp rey +ph ys +conce ded +bob cat +ar mad +ze it +Ù Ħ +ðŁĺģ ðŁĺģ +mer idi +ðŁĩ· ðŁĩº +corn wall +! ), +touch downs +ze it +chal et +mm m +al che +gor illa +fo ss +ati ku +lumin ous +ivan ka +be ek +sta res +sw iss +âĿ¤âĿ¤ âĿ¤âĿ¤ +scru bs +me ath +gusta v +jo gging +confe tti +as os +ers fc +breit bart +applic able +autho red +ya ho +h in +displac ement +j v +ðŁĮ¹ ðŁĮ¹ +ot c +non profits +diec ast +gu sto +inte stin +c ages +me en +lu kas +moon ey +ðŁĺ · +very day +tor ah +is sion +wa c +lever aging +ish able +cu se +le wood +may an +turn table +ju ice +tru sty +tu p +eti quette +supervis ors +stu n +gu zman +confe ren +ric o +fe ast +back ward +pol aris +mic he +jo g +h ing +field house +vel ing +sho cker +esc ence +ठ¾ +vi be +anasta sia +mar ched +kill ing +Ķ ë +fe tt +exop lan +... ( +snow day +lo h +ir ani +la khs +del a +po caly +boom ers +dictat orship +ac er +tur keys +quarter final +muskete ers +ðŁĴĽ ðŁĴļ +sf x +museum week +sc ala +ri sis +( ðŁĵ· +ãĢ Ĥ +z ies +bo eh +hu es +lu sci +dol a +impeach trump +roo d +don caster +tor re +hero es +fo yer +tar i +blur red +ke w +frank ly +dro id +ap al +Ð ¼ +y af +bre t +par agu +cac ao +ðŁĻĮ ðŁı¾ +ru e +head aches +shaw ty +char ley +pal er +go wns +correc tional +ðŁĺ© ðŁĺ© +breaking bad +ol ing +da p +endeav our +cit adel +tra d +incumb ent +medit ate +foo ted +ðŁĴ µ +shab bat +dayof the +wil lem +gal way +to red +marri age +f illion +sleeve less +aud itor +jin young +invin cible +kad una +a and +volcan oes +mon eti +indie gogo +buccane ers +ðŁijī ðŁı½ +ãĢ Ĥ +lay ton +cuck oo +hu mber +buzz er +Ï ī +to re +stra ins +sto m +pa ine +s we +du ff +z ou +si mi +li pp +ur n +se agu +ðŁĶ ® +sun dae +hi c +ðŁĺ ¨ +bull pen +u per +flyo ver +al dridge +glo bes +ali es +ken zie +ge es +y cle +sp lin +mag enta +j ha +bal u +gh orn +ti pper +wick er +taste of +con clave +ch ale +inv asi +cat er +dio xide +me gab +win n +at p +transform ative +nest led +hi g +bri dging +lil ies +chee red +bad dest +sc rolls +real is +dipl o +ðŁĶ « +conce ssion +prefe rences +explo des +er gon +introduc tory +ine au +ch af +som es +land rover +spir ation +sex y +sco recard +illustr ates +soul mate +wi en +inter disciplinary +fore casting +ent ities +glu ed +en lar +cur t +percep tions +boot leg +mi re +asho k +v az +hor ne +cal le +ac ulture +ther oy +night time +oc al +character design +ar mist +ðŁĺı ðŁĺı +yah oo +ac eae +to se +even to +sou t +nay anth +wh om +v are +ri gging +gen us +hi ve +com mands +sti e +day a +ethan ol +en f +hi fi +flu ence +cle mson +re invent +thermom eter +humor ous +emer ging +aci ón +ðŁĺĺ ðŁĺį +s ity +haw ke +accompan ying +t ility +ðŁĺ ª +re cess +protag onist +l ery +dun dal +int l +britt any +q bs +off the +marri ages +how to +viol ated +adel aide +wit t +lanc er +pak v +hu me +st ade +bra gging +ou tright +ad c +super st +real time +cu res +garden ers +ero ck +dale jr +ver o +bar tol +mo ti +mc fly +v pn +st ink +over rated +guer ra +e tis +ath ome +twd family +th ab +tn x +rafa el +family travel +x ley +sat anic +equ ations +ru dy +wal dorf +stan i +tu be +meas les +zimmer man +obli gations +i ously +bow ser +trans former +sho ppe +shak en +gh ouse +to d +ke tball +share holder +mar ca +kp mg +ak an +given chy +coast al +au th +roller coaster +mar ches +coordin ate +cine ma +apprentic es +par lor +mit o +men on +consider able +bar re +glo ss +enh ances +jaz eera +fal mouth +thra sh +stat en +k zn +eng el +samanth ap +flo ppy +sal om +ðŁıĨ ðŁıĨ +w ack +deliber ate +osc ill +herit ag +du sted +orni thology +pad dle +fer ns +bar un +cl ans +anticip ate +a ay +mat ically +é ĩ +tu mble +post man +unic ef +tro tter +op d +leaf let +ge ist +cease fire +scre ws +cre ation +wal nuts +longh orns +under statement +ab b +proxim ity +na x +un ity +turn pike +orda ined +dub step +chak ra +me ch +love her +look alike +donne in +vir on +Ù Ī +bang ers +vari ants +out dated +in ta +cri sto +sp elt +food and +f on +stefan i +margin al +hu tton +ti ara +tel ford +qu en +fair grounds +que tta +mikha il +heal er +v ball +ty re +under grad +gl end +hom ers +scri bed +main tains +po che +mis sal +mar ko +u as +á n +sh p +con vey +pad re +sab a +pu glia +madhu ri +pa xton +chap lain +n ago +ca si +... !!! +fli rt +sal eh +k are +di re +stam ped +extre me +ðŁĺĥ ðŁĺĥ +ho ppy +guadalu pe +advant aged +eu char +p low +un n +mac qu +port land +cla sh +pe s +lou bout +y p +keep ing +arca dia +fran kie +fi u +de th +encyclo pedia +si ze +inve sts +ðŁį © +geo logical +fran ç +con front +ðŁĺ ¥ +d ys +af m +tex an +graph ene +repost app +ac f +ur sula +gaz a +dd led +fu m +wsb tv +m be +fron tiers +chrono graph +ke s +inter faith +tab oo +spar ta +won do +flori st +em braces +ca w +no el +arch ers +ðŁIJ · +roman o +ban an +sh akers +melo dies +geo thermal +se phora +ìļ ° +оР´ +pro c +hand shake +pan de +popul ated +slow down +hor tons +registr ations +un deni +lan ts +pas sover +thak ur +li ef +adhe sive +pe tal +micro scopy +memph is +confir ming +air drop +mesm er +perce ived +ming le +lifel ine +gh j +worcester shire +pas sions +ach er +el lar +ah o +firen ze +bar ang +letter man +hat field +lu cha +je ter +e shop +william s +horo scope +pre de +east bourne +dur ga +di version +al trin +seis mic +premi osm +nar co +ti r +ori g +or m +land fall +ci ous +lin do +max ine +x ico +tra y +os wald +c ba +ric otta +n cr +mar au +ภ² +gladi ator +ch ery +lun g +u me +po psic +lon ging +can als +ta ya +decentr alized +sho pp +pres sures +mahar aj +eti had +wal greens +succe ssion +sign aling +li g +staf fer +north korea +def ying +as ma +de g +peri meter +oak ville +m sk +balti more +rece ip +de ple +ðŁĺŃ ðŁĺĤ +jambo ree +> .< +rsp b +puni sher +consider ably +in tothe +pari sian +acceler ated +polye ster +low es +fr ying +sauté ed +mou ths +seychel les +ra x +go dis +dak ota +house wives +the me +mat inee +black bird +ye sung +pre fers +pelle gr +in ated +trun ks +stronger together +re pet +re pairing +ped als +toler ant +her r +dun ne +indic ation +decat ur +b tv +exhibit ors +ik on +friday motivation +bra gg +live tweet +al ves +womens art +foreig ners +wal lets +min dy +lan ey +bb in +tv miaw +lif ter +tar get +tam e +dr ou +astro photography +mp c +g pu +nord strom +fric tion +run off +lov able +sp nfamily +ext ingui +bloo dy +sch el +arti stry +sw ish +scar ce +ph ils +max im +pos sum +com promised +sty li +sc fc +is sa +birmin gham +sket ched +angel ica +ordin ance +je ts +conqu er +ðŁĺ IJ +online shopping +s ori +reason ably +nue stro +ar turo +ch l +benef ici +spho to +wel t +ni kk +ðŁ¤ ŀ +dan ao +for mid +as se +af irst +âľ Ĥ +gil lette +as sor +an onym +sel ca +fe mi +bear able +y and +ar mory +cre pe +celtic fc +bra vo +in expensive +de lec +ge cko +new market +snow flakes +kab ir +con tra +can ning +mor pho +gar wal +ðŁĴĥ ðŁı» +fight ing +mu tation +woo dy +ju gg +gr aces +premiosm tvmiaw +kenne dy +gu p +sa e +op ha +off spring +fini sher +bet ts +span ning +mar j +h one +sh ing +contin ents +samanthap rabhu +un related +l acy +explo sions +benjam in +sophi e +no ting +micro soft +as sen +a hoy +i ker +ho fer +mo e +ah madi +yan n +an ak +ma hi +be u +aha h +creep er +baahu bali +am at +pri ory +haw keye +deloit te +sko da +print making +assemb ling +mirac ulous +no ch +sw o +leg a +oper ates +border lands +eli e +stron gh +rep tiles +pir ate +un fold + ¯ +qual comm +un predictable +ot r +rose wood +direc tional +counsel ors +corn ell +liber ated +j ad +ir regular +bulgar ian +high ness +vodaf one +sw ild +mini mize +gra zie +๠ĩ +r stats +stre ep +ome tric +humb le +lu mp +l ille +b ü +home depot +tripad visor +ki wan +a via +er z +ex ico +du f +blu men +mi zing +ar ma +in im +con stan +sor a +ju al +au n +tw ell +tren ches +her a +r k +po plar +recipe oftheday +ll an +bhu ban +short ages +ing don +bridge water +ðŁIJ ĺ +fortn ite +cam den +un cture +pro w +colon ies +t ks +n go +b hm +live pd +spl ace +sli ke +happye aster +ter rence +revol ver +j ed +yy yy +office of +m ts +exist ential +r ourke +explore bc +sse d +pri est +vix en +si ding +k pa +a har +ju ic +ob struc +foren sics +uk mfg +cancell ation +we ary +ab q +ele c +pri zed +deb ts +me zz +salv atore +m dc +gre tte +c gc +th on +snow storm +ts ch +cook ery +å ¹ +wa xing +n acional +mur s +ra ve +cap es +ger main +dri pping +sub mitting +ome lette +iter ation +aj es +shim mer +fu eling +ðŁĩ§ ðŁĩª +li po +bo bble +un follow +islam ist +hi ber +cat s +agentsof shield +sen si +____ _ +ster ia +inst al +ausp icious +har row +over land +femini sts +inst ant +char iot +blind ness +sp ed +sc arec +nu it +mini atures +ho seok +glo ck +fifa worldcup +e te +dis m +we iner +ex foli +ear ts +à¸ Ķ +my art +man il +iss ant +form a +in cu +buffal ob +in tim +mc cul +anj ali +po po +un doub +hil a +fun gal +thank ful +fu tur +en dish +ren ds +th ar +she ff +ring o +nichol ls +io wa +po tom +cl ams +ãģ Ħ +acon f +stadi ums +di mp +di k +residen ces +do v +caric ature +seagu ll +kl m +confe ss +sla pped +cele b +turb ines +pp v +nur ture +el ab +.... .# +tu ff +de press +al far +amii bo +di spon +e wing +que er +friend s +for re +âĺ ¼ +sw t +aqu arius +head liner +cur d +fi gs +o tters +love fl +kare em +go vegan +fri yay +consol ation +at ri +ì§ Ħ +âĺĿ ï¸ı +poly ne +gu ed +o ya +la us +intestin al +cam illa +scal p +pi r +leed s +horri fying +bore tum +dand elion +fer rer +ell ic +as x +so ren +re loaded +ale ague +navig ator +ine tte +add ams +al chemist +ak shay +dystop ian +awe c +n aya +al isa +ai led +ag or +avi ator +ali zer +smo bile +findyour park +cop ying +to ddy +sh ti +mon ger +cal houn +nap kin +break up +y atra +se thu +ric hi +eras mus +fer ry +am ore +prac tise +bo bo +power point +oo se +li ffe +chin a +sh ka +fad navis +du ane +war on +fal se +ðŁļ Ĥ +wa shes +disc ip +==== ==== +g k +ab b +stub born +medi eval +p ci +ðŁį ª +maril yn +h yo +man di +cr i +prede cess +continu ation +om usic +s lat +wh al +mall ory +bon n +shen zhen +ca i +âĺ ĥ +sa fest +for wards +dra wers +bla sted +sle e +mor phe +mb ta +dumb ass +ÑĦоÑĤ о +alhamdulil lah +ec lub +al beit +heal ey +ayurve da +adverti sed +cro cs +itt les +bry son +be i +nj pw +honore e +fu sed +ðŁĶ ĺ +mul tin +n aga +de parts +ko p +kin o +jhar khand +ed na +ax le +mil ton +supremac ist +marrake ch +domin ic +tran script +] [# +: ). +wo c +sur rounds +o gil +leaf lets +co well +whe w +tru de +proli fer +succe s +sports man +con dom +po che +k up +imprison ment +{ } +scram bled +å Ľ +ka ine +cell phone +metam or +con i +remn ants +ee z +down pour +afterno on +exerc ising +ber ser +architec ture +wick low +m ns +is p +bo c +n iss +mn wild +stu mble +r si +lu ffy +sil en +dd ad +bul lies +haw ker +bb cc +scu ba +e pp +que ts +for aging +pal let +ha di +cinemato grapher +cat chers +to aster +k hi +lite coin +kid lit +amher st +maur icio +ip ad +mar malade +fe y +don nelly +g to +est as +cere bral +ant grasso +zz led +vir gil +swa pped +ðŁĺħ ðŁĺħ +no dapl +greate st +nhl bruins +fra ser +b mo +ane w +. âĿ¤ï¸ı +se gregation +remark ably +mccor mick +lo gger +er as +contrac ting +âłĢ âłĢ +yor ks +uku lele +touch screen +de cked +ben n +south wark +ra vin +nu mis +ðŁ¤ Ļ +ru t +gre co +eth ic +red neck +ar r +t cs +ih ri +ðŁĩ« ðŁĩ· +l k +inher ited +zy k +viadu ct +marty red +hi gu +ss n +be in +street style +fer gie +bank of +æĹ ¥ +stake holder +exempl ary +cre ss +ess a +ero tica +intre pid +gom es +bra un +bethan y +bang tan +pulmon ary +m illing +doctor ate +trump russia +ठ° +s ani +bl att +pla u +depri ved +t le +ful ly +bour n +st ak +lufthan sa +kio sk +far oo +def y +bad an +ðŁĺĺ âĿ¤ï¸ı +rit z +tri sha +ran ds +middle sex +arab s +pro j +sport scenter +repe ats +iv f +bleed blue +as sure +o bs +territ orial +ele n +bever ley +ann ah +âĿ¤ï¸ıâĿ¤ï¸ı âĿ¤ï¸ıâĿ¤ï¸ı +z l +for good +science fiction +gla u +son ya +pri th +st weets +mix ers +mari o +ant elope +writing community +went z +den ham +be di +sf o +harley davidson +look book +immuno therapy +or phe +es ville +ed ged +tas k +sb ball +corro sion +kilom eters +co sting +play back +ke ke +di visi +u ter +re location +yel led +pen g +up beat +ser ve +âļ ł +hal en +stir ring +reh man +en v +schu macher +frag ment +alkal ine +sb k +resil i +share point +rol lover +tra sh +counter part +âĻ « +ob itu +à ½ +ãĤ ¹ +mul berry +ðŁİ Ĩ +auton omy +spra ying +nat l +love you +fran ki +nu k +esc ar +can teen +ali baba +de plor +mole cule +pu d +fort night +blon die +sp hin +portra yal +ta che +bu te +consi sting +freep alestine +c sp +im mort +d ns +ðŁĴ¥ ðŁĴ¥ +tour de +coo king +archi val +ga thers +bit t +b anc +pre mature +snow ball +poetry day +lou dly +fug itive +ed ay +em ra +ðŁĩ¸ ðŁĩª +sci en +node js +jur gen +je ong +band ana +un is +fox sports +v andy +pro visions +wee p +tu k +i ko +h oun +zig gy +z r +fil let +bat a +tin k +con e +we want +k ilo +hor ace +sl t +sc t +stay tuned +victor ia +umb ria +att acker +ingham shire +fright ening +no ir +fr at +con tempt +lia ison +ho i +br ink +tr ill +ni agar +kick ass +dun das +not my +rho de +bu mble +no xi +fa g +spec tators +mancrush monday +jin ping +distr act +dais y +wal den +portra it +ar thistory +vol tron +ev el +is c +ac m +r ite +na o +de ported +swe ats +ru fus +lo bo +labor day +gam o +ihri thik +bl it +abdomin al +ãħ¤ãħ¤ ãħ¤ãħ¤ +i it +e q +bu sy +allu arjun +un disclosed +de ton +pro create +ki l +ðŁİĤ ðŁİĤ +mitch ell +ki i +inherit ance +al p +jo burg +pat rolling +compul sory +un signed +ni am +l ga +eshop suk +tr illi +ma w +appreci ating +rock ab +mañ ana +an tal +mal vern +roy o +grand prix +sut ton +go ftheday +dig i +ãħĭãħĭ ãħĭãħĭ +t les +varan asi +erec ted +discip les +cont act +ðŁĺ µ +li d +⬠ĩ +scen tre +radi ator +ing tips +trans itions +thursday motivation +chem ical +separ ati +sal is +mi m +geo graphical +book fest +/ . +âľ ĭ +v ae +cur rie +ag garwal +acceler ation +the ses +lg m +u mass +pro portions +nat a +ani ans +ku ch +be acons +ap r +@ # +ðŁĴª ðŁı¾ +nu ke +sher aton +ki o +ma kati +polit ico +mor ale +ì Ļ +econom ically +gg ly +ss en +pa stries +intern ships +vic ente +fanta ken +aveng ers +accu se +slee pover +indic ated +the dream +ster one +ren ders +fro st +ou i +gre gg +d ore +⾨ ⾨⾨ +pu gs +sat y +nu mb +hems worth +tam i +la ssic +schi ff +igle sias +ag awa +] " +re shi +game stop +divor ced +theat er +clau di +un conventional +prophe ts +ac in +twel f +tow ering +t ml +sc lerosis +k wan +ge ts +distur b +na ira +ener g +pir acy +pru itt +noti fied +hen na +bra m +ground water +bl s +opti mis +$ ) +luci e +biz hour +fang irling +gr ills +or l +ver se +c ina +law less +artistson twitter +tele vised +marshmal lows +radio head +bar r +m fc +bre vi +mmor pg +g aya +âĸ « +sub titles +j t +disney land +to bago +nh m +groo ve +fi awec +" / +ba o +scra bble +om ni +ff l +um c +si mba +ali er +ter rell +plu me +mi di +dig nit +co c +bru t +ad ata +alche my +d sm +ðŁĺĨ ðŁĺĨ +win try +spa res +cu er +conclu sions +to ys +od or +fl ann +gar vey +scrip tions +inspec tions +cat ap +ang lo +st louis +heim er +at ay +tr ich +en yc +chil ds +vent il +mont p +guiller mo +circu lare +z ell +mode led +craf tsman +al ina +stimul ation +cashe w +ju das +best of +to ire +susp ends +scol lege +real ising +by tes +bloo ds +as si +ðŁĴ ¿ +o hs +ðŁį ĭ +scallo p +ठµ +gi fting +camo gie +wil kes +o zzy +ðŁ¤ ¤ +ver onic +sav oy +deme tri +baby girl +ðŁĺį ðŁĺŃ +so x +cly de +induc tee +count down +self care +ठľ +vi ka +tor re +phd chat +pe ars +aw h +suff rage +le sn +admir ation +mp p +shark week +schul z +santor ini +clo ver +( * +stras bourg +ex iting +so yu +finger print +che a +ãĢ ľ +vin dic +song writers +so a +prou der +nam a += )) +simple st +delici ously +gil les +u q +mn wx +ep p +sh un +ken nel +fall on +ðŁIJ £ +sin d +tra gically +out es +modern ism +co ke +gy n +spi on +âĺ¹ ï¸ı +le am +compress or +apolog ise +twent yon +fan atics +âĻ » +sco tsman +sa wa +ko u +as er +ภļ +welter weight +phen om +twick enham +stri a +p out +ka z +gi am +cd p +ho y +emplo y +red mond +ภĦภ+sm ere +trance family +proto cols +pie ce +lu iz +iter acy +carl s +united states +har med +phd life +ch aw +foot prints +l é +cho ker +z ana +sli pper +eric sson +insul ting +articho ke +advis ing +acquis itions +op or +mut ations +re ar +ॠģ +pod cast +wi ther +kun g +íĺ ¸ +win slow +di apers +ðŁĵ¸ @ +ec ker +col lar +hu ey +gi ro +mono gram +kas ich +si veness +malay si +arom atic +gre s +gali leo +u ji +rob b +dr m +none theless +as a +: > +lo a +l np +at work +ag t +laksh mi +pipel ines +id al +stre l +re all +chain z +stone wall +san sk +ðŁı ´ +pied mont +hoste ss +ci u +t é +analy ses +wil helm +scott y +rw by +mosqu it +use mb +qu ins +ðŁij İ +tu cker +s conf +speci fications +psychi atry +broo kes +s ils +ol af +de to +co di +cli p +fil th +womancrush wednesday +go to +ang erous +be ale +w tc +paneli st +ne x +lar sen +emili o +tab leau +h itters +conce ived +americ ani +or tega +mar di +Ñ ĥ +pain tball +thir sty +new yorker +etis ation +go ss +we aker +u gh +tro ll +har ga +du al +ght ning +at ine +ðŁĺİ ðŁĺİðŁĺİ +cook out +pyrene es +po ss +authent ication +sports wear +yun ho +kir o +archi pel +shen ko +ren der +nov ation +divin ity +ðŁij £ +su fi +humb ling +ge opol +devote es +wait ress +tr ough +py ro +i ba +bl ing +gra f +epilo ts +bt r +of tball +bas king +domin os +so om +r ath +sher yl +qu el +astronom ical +wel d +track list +sig nee +slee pless +com man +ch ron +summ on +pure michigan +cri spr +sli p +la gi +ra q +um u +thal ap +char med +scru mp +quad copter +ski p +peter sen +mun i +ðŁĮ ¾ +mon aghan +tra ys +ick ed +canad aday +te gr +ï¿ ½ +hot ness +heavy metal +ab ar +gop debate +az ul +spider man +sun flowers +ľ ë +web comics +bar d +Ð ² +nichol as +slu sh +ram an +mark ham +ffici al +ff ler +íĬ ¸ +ple ss +anush ka +to to +sk aters +pro wrestling +compet es +ay ala +myster y +thr ills +mp g +independ ently +y ul +imper ative +formid able +tire less +st acking +ton gues +mal tese +pot ts +mat ti +char ting +chill out +super nova +ome o +sky sports +nu tty +ðŁĹĵ ï¸ı +ro han +insp ired +concier ge +ser ra +ma kk +gal at +chi pp +ye v +ì £ +reim bur +op ul +kimber ley +i eee +bre men +ch itec +or in +nak u +bon kers +foo ty +emer gence +ðŁĨ ĺ +sti p +serge i +zo ey +ai me +wou ld +dy es +destin y +vinai grette +dri er +circulare conomy +an archi +ss r +sch el +cin er +gro om +determin ing +gar min +cal ais +incarcer ation +bu kit +no i +chelms ford +mckin ley +chi pped +belong ed +tu mors +str oud +mi i +influen za +wwen xt +tun dra +tele communications +cat sofinstagram +t ages +beat ty +o du +ml kday +oo per +dang le +ak ley +cru mb +anti gua +ti mbers +rou hani +ðŁĴª ðŁĴªðŁĴª +ha fi +... !! +w cs +coo p +sn c +lit res +ãĢ Ĭ +ha z +co z +k ant +green field +cur ti +y ale +flye agles +what soever +wor thing +rou lette +flyeagles fly +un da +a inted +stand ing +lusci ous +h pc +effic acy +ash land +me ghan +ky wx +n pr +bath tub +ac os +h ani +mar cor +man tis +da isi +bo ba +ab bie +mu til +vi al +spy der +po z +g ti +el fie +nigh tw +metro id +anton i +mad die +dh ry +dar lings +ten ds +taek wondo +atlan ta +me ow +chlo e +ãĥ İ +ym es +siber ia +k con +gu es +mar iner +fac il +azz le +[ ... +han nover +bav aria +vir go +te uk +u sps +) # +wall a +sam pson +need less +ver bally +hay ley +bow led +pi us +lam pard +ham string +vol vo +road safety +cho king +sor bet +a hem +healthy food +brai ded +horticul ture +cr ative +che ek +ad do +the force +ko ko +schiz oph +j ie +w ada +twentyon epilots +h bcu +pro ton +pau ls +lou isa +lat am +kyr gy +com pac +sd k +sap i +?? ? +liber alism +ep silon +ai den +w usa +spra yed +baske tball +kim ono +blue wave +ali as +ë§ Ī +mug shot +ce c +do gre +ad ora +ðŁĵ· @ +kra kow +intrigu ed +exhau sting +astron omer +ven ison +lady bug +ci v +bra e +us m +bri be +acup uncture +pembro ke +ke ating +chi e +y ad +t si +sm i +see ding +gate shead +lis boa +gy p +canv ass +ðŁĶ´ âļªï¸ı +op i +ni r +soci etal +ly te +ati es +c sm +ar tery +al in +aka poor +abstr acts +â̦ â̦ +teen wolf +ne we +travel gram +sentim ental +per ched +han del +ho ek +f ay +coordin ating +anim ate +man ian +effor t +jer ky +f ck +adri enne +ma bly +tra ding +my el +spi ro +sol a +stor ing +over drive +monday morning +dream team +pul se +bon di +ber nie +pgat our +tri poli +son am +plat t +âļ ¡ +ag roup +îIJ Ĵ +inv ading +v cu +k ell +ñ os +un dead +pod casting +mercede sam +mana fort +cor tex +que so +impecc able +pal mer +wil doz +sport sc +guacam ole +dispen ser +cate gori +stun ts +per il +invit ations +dune din +xi e +achi eves +saf er +pre ds +ph an +knuck les +k ak +igno res +lovemy job +aru ba +ound ation +datac enter +co vert +gr ing +cou ple +ا ر +vol i +mc cle +arti sans +lu do +kal am +arom a +under taker +hu la +wiz kid +gu mb +god frey +bakers field +ker n +engine er +car ve +pal in +guaran tees +pe bbles +b ays +zi eg +fin k +â¬ĩï¸ı â¬ĩï¸ı +down pours +ro chelle +rasp berry +ðŁĺ ® +gra phies +stom p +caf es +ari zed +utt ar +cal vary +dri e +crusad er +bus an +tux edo +si u +seam us +cul tured +blan chard +town house +ge red +butter milk +flu ctu +roger federer +hel i +ðŁ¦ ĥ +u ous +ram esh +mu ppets +email marketing +ye ss +br ice +ri zio +pel o +donnein arte +u rable +inve stin +bump ing +raji v +sav a +thro wer +fore x +o hhhh +th rust +pull man +r fid +sep sis +le ed +fri ght +roun ding +ne b +ph ins +ai sha +utili zing +squ ats +gold smith +j ic +bo ks +vau s +i po +exclu sion +tari ff +po kes +min al +land s +en force +washington dc +or char +g x +mar ys +ey our +aussi e +bak ers +un popular +latin os +lar ge +pu tnam +bol o +wa de +pel o +di zz +ob struction +fla ppy +weare the +depend ence +pajam a +e te +y ann +e wan +disc la +a ay +kar ina +e ic +an trim +w soc +neg atively +kai do +fotogra fia +dh ru +colo ssal +mcle od +k wang +mani pu +ex hilar +us atoday +summer slam +co les +tapro om +unbeat able +de ma +tic ks +k ling +fil s +campaig ners +ภķ +brew ster +audu bon +qu ay +ch s +ki gali +d ler +strength ens +som al +sign ingday +gol ds +pig ment +orche stral +g q +lin kin +ðŁı ĩ +ta w +algar ve +ho v +ear le +gold fish +am ig +ex er +ben in +dru id +ðŁIJ ¸ +she m +quat tro +mer cen +men te +incorpor ating +bon anza +state fair +en de +concep tions +e es +âĻ¥ï¸ı âĻ¥ï¸ı +d son +fire arm +orb ital +we h +multi p +fo b +requi em +p light +thou se +sa id +oc re +remem brance +n old +chi pping +be v +er t +ca thy +sy m +ri ggs +m ley +dialo gues +sl ender +how l +gau teng +wd w +to bi +smo kes +im plo +b pm +ad n +mom basa +cap sul +bloom field +artic ul +cle o +goog led +flu ffy +l ard +en zyme +ve sti +ibra hi +fl ame +e mea +out ages +dispro por +ble ak +an sel +ick er +st louis +stock market +good friday +sau lt +stal led +pro m +ep som +b é +the se +sau ces +me w +lit fest +pre d +re u +kar ak +si enna +ell in +bio technology +ï¸ıâĥ£ - +tac tic +sa in +por k +mon za +ka j +lu sh +compart ment +chang ing +shraddha kapoor +fo al +ar tem +cu ando +can ola +ori ente +me sse +d ited +br c +box er +bbc two +s st +ment day +em ing +de wey +kof i +âŀĸâŀĸ âŀĸâŀĸ +reali zation +smo l +tw ood +san je +flag staff +ber wick +cor set +can ary +whistle blower +et ched +com posing +squee zed +bow er +auto desk +ne h +mathi eu +ba ja +Å Ĥ +hy dra +da im +am eri +insi sted +mer lot +gar ros +heart news +gaine sville +cut ler +bo de +ðŁĺī ðŁĺī +lew es +scoun try +g sa +us u +cc m +god awgs +phara oh +cra e +mor ley +hyp noti +f ades +neur ons +fu zz +ing co +high landers +star k +vig ne +pac kets +amar illo +reu ben +insul ts +bas ic +vec tor +n me +ac ruz +tro s +transm itter +ðŁĺ ŀ +interpre t +ðŁĺ ² +pre quel +mc gowan +dis semin +ðŁĴĺ ðŁĴĺ +mascul inity +indie gamedev +ali ve +te t +pe tal +ema iled +ar med +ko o +he er +ba ird +super junior +metro polis +delav in +decl ines +stit utes +Û ģ +p tbo +g lan +cho res +e aling +chri ssy +ste mc +vi an +assassin ated +pron ounce +illeg als +discover y +cav ill +fri fotos +f al +so i +sabot age +t int +p dc +ðŁİīðŁİ Ī +ãĤ Ĭãģ +ji o +endeav or +in sig +commit tees +she arer +me tz +mar rying +h dd +g by +fre t +tri sh +pu l +scrip ted +sa ki +l w +ke ye +shim i +nan aimo +ca h +à « +tem pered +ici an +du gg +dish washer +air field +s rugby +gr inch +y st +r ms +mahat ma +lan kan +disc ar +dige stion +no des +l ls +om ic +gu tter +tis garh +feder ico +election day +bo he +master card +fire ball +âľ Ķï¸ı +oy ster +p ong +do k +en route +m vc +beat the +ali stair +shu b +sh aming +cherno byl +ghi bli +the s +pin ion +d bs +sal ts +ic tion +epi ph +nc pol +in convenience +whit ley +inspec ting +wood ley +wi ener +skil let +no les +m ca +h ina +a sha +willing ness +well ness +tam ed +show time +dis advantaged +ber nat +us n +mission aries +coun selling +arrog ant +quant itative +leg alization +ho dge +energye fficiency +cameron dallas +pos sessions +p bb +harris burg +v g +hindu ism +happy thanksgiving +fi b +re acting +tweeta picture +pol iti +mu ppet +hur rah +pac e +coast guard +guar ded +as am +par ry +fore very +x q +oom f +ke anu +j ind +ri st +customer service +sac red +ðŁĺ º +ton er +occur rence +mat u +val dez +red d +is ak +power rangers +pe asant +raj ini +abra ham +e mil +car do +tr il +hair styles +obsole te +sam pler +direc tive +delavin kisses +ver ton +glo s +sp ay +paler mo +com ets +man ziel +chicag of +ski pped +pic torial +h ant +b mi +a ol +re opens +pad dling +devo s +fra ud +bas eline +que ues +sp ired +sn are +eu ve +descri ptions +daisi es +ca ching +gall eria +tri mmed +stin o +recy cla +ic ular +bir ken +raw lings +fli x +chic as +b gt +lik eli +argy ll +thel ove +ga ston +bl anca +ha k +f one +sailor moon +h aci +ima c +fl yn +de can +bel les +ap ic +zo g +taun ton +con stance +lasag na +ker nel +in ka +har bor +collec tively +calcul ated +av ille +shil pa +pur du +gi mm +fun er +a est +pembroke shire +nighting ale +n unes +hyper tension +hu bert +sli ders +infer tility +comm ended +transat lantic +metr ical +!! @ +Å Ł +ss g +bac ca +inver ted +fun factfriday +it ans +albu m +acqu ainted +ri er +whel an +sar ab +mu e +snoo ze +pi ff +agre eing +sp itting +jer maine +n ye +âľı ï¸ı +am bush +ze ph +con greg +univers ity +s app +wann abe +pat rice +ib d +do glo +fri dges +sun d +king ston +ar gon +kam en +hardro ck +ds ley +do lores +ì ° +ota ku +pi ping +be having +âŃIJï¸ıâŃIJï¸ı âŃIJï¸ı +blue bird +an sari +teapo t +fire work +cro p +log ans +ty ped +thick ness +ig ers +c fp +dys functional +contra sting +et ty +aston martin +tx st +dra grace +at tributes +marath on +manu scripts +john stone +ðŁĺ± ðŁĺ± +bo er +ay u +aru gula +poo rest +con du +assu mption +anag h +no h +delav in +sit ter +g ö +mor ow +kick start +com i +gl acial +ghe ad +ba in +ker shaw +en dof +fre ud +om at +i af +hu g +sign up +each other +defin ite +tu bing +shak ira +ðŁijı ðŁı½ +uu uu +sw in +sham bles +ol as +sk ell +brit ain +kn w +clu tter +om y +j ens +hang ed +city scape +scra ps +un locking +dead liest +er no +breast cancer +a it +inspec t +fu ri +ðŁĴ Į +ku d +ju le +or ah +mi ds +m dt +bur gring +r attle +pu sa +stal k +cle ans +iss ance +z ek +worth it +nam eis +musko ka +council man +urban art +bar rac +un solved +tu l +g ita +white board +soy beans +em ent +cont i +saturday motivation +conveni ently +doc king +t ado +âı © +sp ino +puppy love +po f +fabric ated +robb ers +adop ts +ti fied +kk r +indulg ence +notic eable +macqu arie +chap el +sensu al +ki ko +melan oma +lore tta +li ance +ab en +sp lus +ga al +ac ele +lib dems +compar isons +ðŁĮ µ +rhy thms +mer y +en capsul +nap ier +ðŁijĮ ðŁijĮðŁijĮ +ðŁij IJ +plat z +fre sno +re formed +ran bir +el it +the best +bhu shan +vin nie +impro vised +s ittin +re created +e ba +ec ker +ac rob +pon te +cor d +gi ddy +eur usd +fe ver +intu ition +gar i +dum mies +bud weiser +amend ments +te tra +sch nit +ay as +mar ys +ci st +k ani +ker mit +ðŁĺ±ðŁĺ± ðŁĺ± +tin ker +strol ling +di visional +niger i +omin ous +menstru al +kar ab +k hy +bw fc +pan handle +l illi +well er +stra pped +son the +transfer ring +ethe real +sne aks +ru dol +gab les +jac king +cin code +for tune +canadi ens +con for +ab normal +frank lin +tit a +mu la +persi st +cu ties +ki el +ðŁĩ± ðŁĩ +her mann +aw k +fi asco +ko to +we ta +hi ker +budd y +preven tive +mcgra w +game boy +forsy th +top shop +si ob +sad h +in tram +follow art +so aps +dragon ball +ou x +morri son +๠ĥ +lu bric +adul thood +morri sons +âļ łï¸ı +her mo +ta ka +stall one +mis use +team gb +ra gha +con fined +at y +hom ophobic +nw o +sky news +ho ya +ac rosse +wi iu +pur ée +jed dah +ðŁ¤ § +advis ers +ph ine +an is +scrump tious +ë° ķ +c ke +vin y +ter m +s dc +o do +home school +vas c +leop ards +debor ah +illic it +cur ran +as roma +nau ght +mar ig +brand i +em p +ðŁĺį ðŁijĮ +î Į +su spend +lu z +initi ation +sch aft +jensen ackles +craw ler +post doc +des ks +trail blazer +den omin +tri x +no ise +po et +± ï¸ı +s mug +vol atile +proof s +pharmac ist +sardin ia +mash able +kim chi +co ed +schal ke +doo dled +c sw +sh ur +ro x +do k +chris brown +mathemat ician +ab ound +ang elic +rock ford +d ole +yor kers +ms n +g man +xavi er +bor rowing +mark ings +longh orn +k ja +diver ted +mm it +euph oria +ay yy +te a +pa h +ck i +un cut +li ven +ky ung +fan art +mer ing +red ding +amo vie +gri di +c thulhu +schol arly +ju dah +th bewithyou +eu calyp +ðŁIJ ķ +hert fordshire +cour troom +by u +auc tioned +ple ase +mar cia +ê° ĵ +succe eded +el as +arvin d +t lot +saig on +re tt +ra kesh +fd ny +as en +se bring +gladi ators +you know +v lad +gol a +par ap +ÑĢ Ð¸ +sab cnews +one team +oh l +sun e +ri j +cd c +star gate +run down +plat o +ph c +chat ter +ra viol +mn f +mand ala +li et +ภķ +mari a +hun gover +consoli dation +fer rell +tradition al +ilove art +gal ap +ðŁı Į +que zon +espa ña +ðŁĩ¨ðŁĩ Ń +ho bby +steam boat +mali gn +guil lau +pro hi +its me +íĥ Ģ +in scription +al z +mari an +k ade +mm on +adju sting +ne sts +intern ally +ci r +vik ram +mal ala +k ph +fel icia +the real +cap tivity +at is +marcor ubio +kale ido +che v +mano j +le more +gent ri +vi ps +tro pe +" âĢĶ +pair ings +mal nutrition +fr ay +desig nation +brun omars +az e +tor rential +pan zer +ga il +under the +the ological +schizoph re +dazz le +freder ic +mo par +ad illa +so ggy +ra un +medi ocre +colo rec +i fe +p inst +blu ef + ² +world water +gir oud +clar inet +ad olf +tar antino +receip ts +assu mp +ðŁij Ł +coffe es +âľĬ ðŁı¾ +du plex +s of +r x +lin o +timber wolves +pan dit +mo tm +e ga +ay ama +ach s +outsi der +ll en +co er +til ly +cheese burger +ma ds +ple dis +emp ty +national parks +az iz +p mi +jun kies +f ener +sq n +è s +gener ation +cleop atra +bhuban es +mosqu es +ty free +popp ins +tw c +or well +n age +ka whi +hol low +dal ai +¨¨ ¨¨ +ou ro +m health +gi on +az o +vis as +reneg ade +re ic +w sop +ðŁĴļ ðŁĴĽ +e chel +tox icity +mü n +bun k +stimul ating +asth our +\ ' +ep h +ende mic +cn bc +shrin king +peabo dy +michel angelo +can yon +wal e +su mi +si ders +inu it +? . +profession alism +dr acing +plat oon +p ons +out bound +maple leafs +de sol +cen cy +a than +ver ma +ru bbing +ok an +ðŁij ł +mull ins +authent ic +Å į +alman ac +ga ia +bb q +on imo +ke h +ty a +tou ts +y av +re posit +, . +wi ght +se eyou +cal lof +done sia +bar gaining +gr anth +sd su +amphi theater +p su +re watching +wine tasting +peak district +dete cting +thur man +phe e +èª ķ +u mich +re r +sculp ted +go le +name sake +ðŁĶ ģ +serv icing +bau gh +pu gh +pen cil +dar th +munch kin +at orium +ten ers +sun y +rolling stones +mag ing +star rer +i dris +fe instein +ag ron +âĺºï¸ı âĺºï¸ı +supervis ed +chamele on +aggre gate +succe ssive +mo gul +inst yle +pol dark +custom e +ohio state +ha ya +ci des +broker age +angel ou +fifa wwc +de forestation +al ton +pam ph +hu gged +ho bo +change able +ku ber +bur roughs +demon etisation +cape cod +vers atility +or ice +le ila +womenin science +tu a +he dges +embarrass ment +ali fe +so ars +ni ghter +hy mn +gi pp +chas u +tech s +ni all +k illa +hi ka +cam els +valu e + ¢ +sc oops +mah moud +clu sive +adri ana +pac o +oz il +un as +transl ations +whispe rer +s bi +bu xton +bio tics +indi ffe +ken ney +k lar +et ching +barra best +inst ability +se ine +vo tel +blo gged +whis key +my space +t ant +lan dia +give back +illu s +aw ak +ac ab +f bloggers +cloud computing +blat ant +syri ans +band ra +sty n +an em +ke ted +kar thik +barun sob +pin ot +gu bernat +gay e +arti ste +i fied +conven tions +hu an +geni uses +eeee ee +fol ly +somer ville +pride month +ðŁĩºðŁĩ¸ ðŁĩºðŁĩ¸ +chemo therapy +paul s +bak ar +ìĦ¸ë¸ IJ +taiwan ese +fol lo +c ss +re ign +nn nn +fla un +catastro phe +iti es +frag ments +extre mists +ym oun +car men +eze kiel +conne cting +se h +man ta +remodel ing +we ymouth +at oms +ce m +ne well +lu mi +the open +mo c +mili band +g land +z shq +mag gie +mani acs +m sp +ad y +cre ams +le anne +e sta +py g +af finity +pray er +dun bar +ligh troom +ac adi +wyn onna +roman tic +state dept +sick le +wh os +lam o +et our +fin ity +shru b +shar pen +pun dit +ed on +af ore +mar s +jeff ery +ter ps +medal list +kath arine +accu sing +ta z +roy d +from home +confron tation +alle gh +ðŁijī ðŁijī +refresh er +ran veer +never land +jo jo +lu crative +en am +ca ver +pa edi +man jaro +flu ids +the ssal +oppre ssed +mu ss +joh anna +Ø ® +cn g +buil dthe +sett les +s ith +fu ego +cl amp +ar ag +pay er +ted x +mand y +inter stellar +fr c +ch and +b cc +mo lo +len til +johan sson +grims by +nature lovers +ðŁļ¨ ðŁļ¨ðŁļ¨ +shin de +x in +international dayof +transiti onal +sat a +cad dy +wo d +if u +ha ys +holl yo +j ang +ir c +co im +grad able +" " +ðŁį ´ +ঠ¾ +a el +n yo +west lake +time out +sof i +phenom ena +cultiv ation +ag no +un armed +so t +con j +gen o +royal navy +nutriti on +fair mont +ti relessly +sn g +re ty +mic a +lu cent +slo ane +droo l +riz al +od ell +critici zed +. '" +la ze +deser ted +co der +pra s +l illian +itiner ary +dav y +an ap +whi pping +hobo ken +kare ena +çľ Ł +vi us +ter n +nan tucket +mis understood +bu laga +st ant +chin ook +z am +reli es +d ss +ed mond +sket chy +m ell +fe x +rec tor +dist ill +day dream +wine maker +ri pley +billion aires +hel ene +ati f +cul prit +bertr and +wou ldnt +ma pped +v ak +gla dly +parliam ent +kidlit art +ware ness +goli ath +âĨ ĵ +view point +tat ted +fu ls +dor sey +ang lers +li ds +ki ya +bow les +be h +b ite +compati bility +ance stral +pro x +beha ved +gubernat orial +ch field +sab an +z h +teen y +shibu ya +holli day +pan cy +âĿĦï¸ı âĿĦï¸ı +seun gri +? , +ðŁĩ¦ ðŁĩ· +im itation +impac tful +any i +gene vie +añ os +bate man +gli der +af ar +ra sheed +effor tless +sh war +dach sh +er un +at os +kin i +ch d +kha ki +k lin +felici dades +bel o +as l +to ppers +fin ley +stac ey +rigor ous +kar ting +le ppard +car michael +be ret +c se +ak hi +mer ingue +ab an +ha ke +ger i +er jee +re sto +comm anders +pr it +fl or +ad ven +ex termin +remain der +å IJ +es g +martin o +lulla by +| @ +mi gn +in store +big bang +cor di +cau ley +ante bellum +dg ate +cro ck +span dex +scaf folding +ore os +ê°ĵ ìĦ¸ë¸IJ +pom ona +ma uro +uni versi +re mi +af ootball +t ant +sm alls +ne h +worl do +tropic al +mor ph +jav elin +gla r +arqu itec +reminis cent +tu bs +spide y +make u +syl la +progressi ves +blo t +shor ten +keep in +ch ak +ang st +super food +decad ent +ston y +neuro logical +ar boretum +ann ak +fe ma +per cu +dis respectful +small biz +lo x +co om +c sc +bs bi +pre valence +him ss +esp an +mo ga +fr ampton +sky map +mas se +levi athan +( ). +noctur nal +car ameli +ang or +amne sia +outsi ders +she alth +rhin o +ant ag +ag io +ðŁĴ° ðŁĴ° +take me +kab addi +c si +m sh +coch rane +thessal oni +sil a +ha us +du sting +obe se +mack lemore +mani sh +len in +m dc +gro wn +shef field +s rs +ke le +car son +ch um +dah lia +can tore +opp o +how ling +cyber crime +sur realism +sc ran +fa iz +thre n +rac ists +r out +pk not +se mana +sin i +mc cull +ma chi +alfon so +y b +sar dar +kend rick +den g +reci pro +on f +doom sday +bri bery +custom iz +art is +c pi +ðŁĻĪ ðŁĻĪ +sla va +let te +en s +âĿ¤ï¸ı ðŁĺĺ +cra yon +ad an +tr c +migr ate +simp son +row ers +king sley +farmers market +shee han +ne phe +bor non +car ton +mic key +all ure +u lu +sli pknot +heb do +gui do +dog celebration +online marketing +acceler ating +) .. +origin ated +macar oni +ed tech +out field +mit z +disc us +adverti ser +man or +ha shi +descri p +cap ita +ful bright +recep tor +con n +con ey +spion age +r attle +pre st +u li +blog post +acker ay +) â̦ +red velvet +mat th +inspir ing +b sd +ker ri +po con +mil lar +re pur +accent ure +ä ¹ +ram bo +ragnar ok +dele ting +british museum +pat ory +leip zig +flori an +sci fi +in ers +br ate +yo y +melis sa +ab er +ma sa +po te +mosquit oes +transpl ant +r pa +; )) +bast ille +yl an +joye ux +melo dic +cap tions +atri st +roch dale +gott i +pew die +cuties aturday +who is +aqu aculture +tiv a +sp el +he ss +ha ji +fred die +co per +brand o +v k +photo book +* , +my dayin +micha ela +brune i +sr ini +in te +Ä ± +de ol +d fc +separ ately +bun d +ve sts +to c +me ck +rein forced +constra ints +car roll +sq ft +re ver +cam per +bird man +in action +gener ators +triumph ant +pe sts +o vo +gy pt +al amo +sc aled +suresh pp +sd n +is mo +gi os +) @ +justic eleague +restaur ant +gab i +den gue +next gen +exemp li +ap ex +inspir ational +down side +kid z +u pl +et na +alvar o +fel dman +bar net +m ha +es ch +bloo ded +>>>> >>>> +kan i +ho fficial +casablanc a +bir ds +ty ga +sw amp +o day +new castle +nb ap +ci sion +cho ols +af lo +ne p +mon ton +ak b +super model +down time +th os +sc wx +snoo py +ag greg +yo ke +nor cal +we tt +prolon ged +me tast +beat er +f ta +t lap +disgu sted +y h +voice over +itch y +ip c +ðŁİ ¾ +phe asant +stra its +ram pant +j g +fer til +assu res +fortun es +sal inas +liz ards +kett le +i bs +cyn thi +he g +mc cr +soccer oos +happen ings +cor den +ðŁĺĤ ðŁijĮ +t ches +egre t +wolver ines +congratul ated +ho gg +bott ling +wr i +fer ri +bo sch +af ire +og den +s jo +j dm +sv t +con tex +tol lywood +min k +me se +super sonic +op oulos +å ¸ +âĶ ģ +knuck le +gu ise +gam i +chu cky +z inger +radi al +compla ined +bo da +fe tal +discipl ines +cor ro +ðŁĩ®ðŁĩ ¹ +op ted +filtr ation +ad nan +em cee +mi stre +insom ni +fer gus +tra jec +on don +med tech +tanger ine +madra s +gru e +cab s +z hu +sureshpp rabhu +insul ated +day swild +pp m +band ai +v day +s ff +squ id +lo thing +not dead +expre ssive +cu ll +ala stair +x u +up front +fish ers +en es +um d +dis missal +sti er +sel s +lu st +re active +prote ster +eyel ashes +al im +goo de +gre eng +da ir +com pen +anush ka +proto typing +ma pu +bear ings +ðŁIJ Ł +for me +bsbi botany +timo thy +out skirts +am bed +are tha +wend ell +stre aks +ni m +k pk +sne e +fit ter +quo ta +p ate +win ning +ðŁį Ń +sho pping +ma inst +cul ver +ste vie +mcfad den +counter parts +gren fell +fol som +dor set +tech crunch +⬠ħï¸ı +tip tuesday +us l +tre x +geor gie +ranveer official +lic ks +se wn +k f +' â̦ +jap s +p ate +orth op +fe sta +stra s +mon tal +hammer smith +fore most +wido ws +mad re +ite z +mito chondri +lig ans +z ona +cari bou +m ss +andre i +weather channel +gh c +: ... +ta ft +awe ather +al isation +bru tal +bliss ful +nik ola +mal icious +q m +mpg vip +bro die +bl itz +applau d +dri bb +v ague +dog go +transl ating +interpre ted +hat ched +ge tyour +benefici aries +spar ring +caes ars +aw illiams +la hat +bro ke +ti mp +virtu es +rel ying +pie tro +k tn +ici sts +pab lo +lou i +a ag +pn pp +cha st +pul ses +fini sh +usair force +type writer +thomp son +dog s +ut to +ãģ į +sand al +new ly +do ge +z w +wan kers +ne gr +mu cha +determin es +black fish +sk unk +mu ps +instru ment +phy to +daysto go +skin ned +hai der +con ten +ðŁIJ¾ ðŁIJ¾ +we iler +undoub tedly +chair ing +wall is +sh ard +zind abad +adul t +absor ption +pre sto +deplo ying +drum mond +battle front +seag ulls +how dy +juda ism +des de +part ition +âľ Ŀ +no logy +national bestfriend +lesn ar +film fare +co asts +christen sen +ac an +mb u +co pped +ru bble +sw c +fun nier +far ther +where as +nano technology +with stand +pil low +bow ers +to pe +it ly +con fit +ma kar +comfor ts +bo sh +cli pper +bal la +sti k +mil b +safe guard +musi que +eas port +ya z +pad ded +bad er +fore ign +chop in +archi ve +o ka +tran sporting +tml talk +aj it +consequ ence +sc roo +ff o +collabor ated +pug chat +ye mi +jav ed +au burn +o of +ma w +sau cer +miti gate +i les +evangeli st +ter ie +re cl +indic tment +cat a +bright ness +may the +whim sical +un lv +key word +cu min +med way +west world +tra w +im posing +form ity +coul ter +ab z +ny pd +grass i +kel sey +qld pol +clock work +f dr +di anne +âĺ ij +ad h +p ann +bra vely +ae ge +un lawful +ver di +pocaly pse +phar o +kar la +reson ance +ma stiff +la dak +bu u +ma iled +hi i +craw ley +tor rent +mach ado +liby an +effort lessly +fal sely +q vist +ke ef +craf thour +cheri shed +val kyrie +s ari +kal amaz +be he +ðŁĮ Ļ +th im +ro ddy +col trane +but chers +ach im +wk end +awk ward +cab rera +:) ))) +fran c +decl an +con dos +a ja +pandor amusic +char ter +ph ill +mon trose +hatch back +handic app +gre aves +eucalyp tus +ut most +t son +bur ton +mid wives +in cur +ðŁĺį # +moo d +compre ssed +tom a +must ang +mo g +as ana +te stic +sho tel +in sol +cor sair +nh q +ben ny +sm ma +kap ur +in con +jon as +ener gies +don al +as ad +se z +n pa +archi ved +stimul ate +do p +hy d +gri eving +ãĥ Ī +ron a +why te +tree house +ss ell +sand ro +ko bo +ther most +se clu +hi ya +ge ez +mam as +prisc illa +flav oured +fas s +w old +maker space +cospla y +p tv +happy valentinesday +sequo ia +love craft +gu an +d tm +ci i +yoko hama +pos thum +re q +ðŁĶµ âļªï¸ı +galat asar +dol by +hamp tons +disturb ance +stone henge +ok c +disrup ting +month sary +jun gle +head lights +du stin +micro sof +happy mothersday +ko ko +gra zi +te sto +na idu +mal ay +ari al +ru mb +ab oo +har man +tra pe +spo ils +je ho +go dly +lock screen +z un +pi ous +ma gento +l enders +prob able +corpor al +m our +aw al +su a +call me +ton ne +go vin +devast ation +x j +gear box +war lock +per me +it ate +gaza underattack +du val +paras ite +clement e +le th +i va +fro zen +tho les +to bin +cair n +s ill +luc kiest +conver ts +st ale +pan cra +euro pale +wis dom +sch ur +ì ¶ +verti go +bi j +u bc +nu re +righte ousness +mt c +factor y +ver st +revers ed +hur i +hee chul +fab er +ar r +ul ous +ven om +ph at +green ery +bra dy +à ¦ +: (( +never giveup +di sha +mo ta +health care +dun ham +dex po +den zel +bb ins +f ics +wh am +mc g +eli an +wat a +str alia +tel lu +pe sky +spin off +ar moured +re acted +do fficial +te du +sag ar +mor ally +paralle led +fi os +dow ner +dau gh +re do +world cup +tari q +bar ne +glaci ers +oc cult +barbar ian +her mosa +!! !) +y ur +inter nation +p ss +sit u +p int +american air +sw am +dopp ler +ðŁĴĻ ðŁĴľ +cincode mayo +le van +hell enic +mc ne +ju di +yu h +st x +qu are +ðŁĺĤ . +sti g +g els +mot ley +hard work +euro zone +e ad +ç¥ Ń +seab ir +ci us +la id +alpac a +presu mably +pewdie pie +boo ted +am ari +tam ine +sol ace +bar row +acade mies +x ian +om ination +dun geons +b ma +de ity +ai k +stab il +hir a +affection ate +ving ne +new port +ãħĭ ãħĭ +thir ds +re tains +aroma therapy +ski er +ni ma +do pe +cr inge +con domin +to or +anim ator +sar aj +seas cape +minim alism +lake shore +calla way +berg man +à¤ Ĺ +whisp ering +stupi d +ri ghtful +requ is +ir n +se va +ut pol +tuber culo +squ ish +de but +govern mental +christ ine +all man +weap on +s ito +bur i +lo lita +leaf y +fu ch +tin ted +mck en +a hahaha +ðŁĩµðŁĩ ¹ +repe al +ne gan +ðŁķ Ĭ +tail gating +game insight +ðŁıŁ ï¸ı +yaku za +z t +ti ring +pro posing +bow lers +tra itors +ak shi +cler gy +cit o +up sets +tu scal +symph onic +sil ently +shu ff +black well +ðŁĺĤ ) +ko be +rober to +ri dg +dc u +mer ino +ft p +east side +. ~ +nb l +mn leg +ts for +frau dul +ca pping +in my +gymna st +ston es +ss in +twe aks +shag gy +oak land +dem sin +sang ria +mm va +hen nessy +down ton +ri ghtly +in it +aga ve +ob last +northe ast +friend ship +dal a +tro phy +ðŁij ½ +mag in +margar itas +ê · +ww fc +fa sh +di ke +cu d +char t +ðŁij ® +refuge es +jop lin +n cs +imp y +firm ware +pas cu +flam in +health tech +bell letstalk +w aka +ol ls +la go +co wan +bombar dier +sh ome +ðŁĻ ħ +mc master +na ve +well s +u ta +tell ers +mis fits +kap il +face off +af firm +a pro +whit epaper +super yacht +speci mens +al located +... , +- __ +ka w +dachsh und +djo ker +s work +qui ere +or um +ðŁIJ ł +som m +c mt +ingh our +skin ny +lgb ti +gi ggles +break away +resear ched +par ity +my al +ms l +re tained +si vity +make inindia +sol ves +defam ation +wal tham +sri racha +road way +concep tu +al in +iw ant +å Ī +del ft +tender loin +ga ins +faul ts +sw ire +st ellen +pol lo +dy ne +bornon thisday +asdf ghj +sq l +sali m +advis es +vo ip +ìĹij ìĨ +un touched +she il +ontari o +uph ill +so bre +de shi +nov ella +du tton +craw fish +ا٠Ĩ +ma a +tw ine +kal in +ðŁĩµðŁĩ Ń +ye ss +brook s +hoo siers +ton ka +umbrel las +ay ers +ate am +acqu iring +su ction +ä n +wi es +tari ans +soci o +mat tb +shepher ds +o so +charity tuesday +s logans +ninj as +al bat +by te +bash ir +trampol ine +mydayin la +i ja +bas el +ror y +gol die +fi rec +un noticed +pecu liar +sch a +ker son +mour ns +liquid ity +qu ipment +hi bs +ar s +aeron au +slide show +sla bs +delici ousness +sk itchen +hta fc +full erton +cre ighton +aer ob +procrastin ation +az ores +white hall +uss occer +medi ation +djoker nole +and me +um en +noxi ous +jo ss +ili fe +anni vers +sudan ese +et res +under mine +whole foods +diso be +kor i +ade le +eli z +can ti +al on +gymna sium +sarko die +meteoro logist +yl de +ste en +stamp collecting +nas al +lo tt +fran ks +ex ol +ack i +good year +animal rights +y les +vio lets +mm es +s thel +ra pping +tu scan +wai ver +tur ner +eat local +northe asthour +anim ations +tom morow +t sh +ff ame +bra e +pe tron +glam our +br yn +d cs +bal es +ðŁĶ ¶ +bro v +bre v +b ons +physi que +car ne +x e +elix ir +vol ved +l oma +ìľ ł +æ ĺ +van u +ri gs +bal ance +va res +bon ita +sprink le +perfec to +di on +le ak +calcu tta +o ba +d ma +c mon +tun er +pneu monia +bo gus +apolo ge +cl ough +bor ne +)) )) +revi ved +o varian +ner f +c legg +fan fest +cho u +reali zes +mc n +li gu +leg alize +just saying +for ster +bo sni +k hi +in dom +hei del +en cryp +si ss +ed di +mar bles +brisban e +y ing +pre paid +wal sall +cooper ate +orche str +mar isa +ho wie +che wy +bren ner +andro meda +e gan +sto cki +cav endish +ag an +ban o +de ir +go g +bl k +re thinking +ch ig +rhe u +sni p +p eng +semin ole +m swx +an nex +lyn da +lewisham ilton +cu mul +tb l +dolph in +agu ero +........ .... +pre lude +at our +gr anger +too ting +ro tun +dis ar +home items +da res +**** **** +ðŁij Ĩ +compre h +jin x +as well +iri e +circul ating +ðŁIJ ¥ +over board +cultiv ate +rhe tt +oriente ering +ca k +bal kans +s itt +jas min +britney spears +ro tor +se aling +g bc +oc ci +f as +eman cip +com er +war time +tic kle +son ny +pac es +log g +at rix +sr p +g win +do bbs +uz be +the wanted +dru sh +ex tru +m icky +honore es +dar win +re dux +mm j +ram i +jalape ño +io c +do ver +ju ju +whit ney +s eng +en ly +au ch +archipel ago +vigil ant +man gal +wil dest +parano id +hal i +bb ly +sanc tioned +real ms +con co +u ddin +c sk +play time +libr a +sav ag +oc tane +rec tan +re turn +par rish +mor rha +cc p +c mu +sa iled +se vent +ro sie +pil ing +he w +boar ded +seg ments +neph ro +( . +cr ats +bak es +ðŁį ¸ +back tothe +sibl ing +kirk land +ke o +gu wa +bre ads +ðŁĺľ ðŁĺľ +t q +haras sed +ga u +wil bur +j isoo +ep er +li sam +tri ppin +sh ino +ru kh +beast mode +cho a +inst aweather +rich land +gar i +fe z +cowboy snation +fur suit +k run +a en +sycam ore +se gun +ent ennial +di h +o ax +demsin philly +ðŁĻ Ģ +sn hl +pen nies +pass words +ma kin +ty e +d eng +kni gh +jeep life +hel pline +a for +zz zz +ste amy +pic ker +iter ate +happen ingnow +ki b +bloom berg +martyr dom +bul ly +assor tment +a hora +zo e +no i +illu stri +agar wal +p sc +electr onica +recruit er +gar diner +rad ha +naf ta +dot net +pi ero +geor g +bel s +ðŁĺĤ ðŁĺį +tuberculo sis +run nin +mor is +haul ing +ev oc +bre thren +sha ir +frame works +a stu +ri gid +ku ma +kre me +jin nah +insu rers +ny u +f ere +nol lywood +good vibes +- ... +toi le +sk ril +instaweather pro +cze ch +pa vel +one piece +nike plus +fi let +cav ity +ðŁı½ âĢįâĻĤï¸ı +ðŁİ £ +dra stic +dail ys +siam ese +re bu +oste o +lar k +f re +sh elling +p é +glad ys +ðŁıĢ ðŁıĢ +gusta ve +submer ged +grand stand +att u +won t +f pv +b ley +jon i +ang ames +weigh ted +al ou +ठ¶ +les bians +f j +anni es +am l +dor ia +dav in +be ta +can c +madewith unity +ha j +bad lands +mu l +blu ec +pa wn +cov ington +neuro logy +htt weets +dysle xia +thel ove +ne at +fork lift +autom ate +une ven +monte ss +he in +ha g +rel ics +competiti veness +can elo +mar tens +bullet proof +sk ittles +g ya +pri mo +americ afirst +woo o +abor tions +?? !! +ma che +ld ers +rl ly +preli ms +direc t +cour se +swa in +super cell +ec centric +sting ray +ple ts +wil cox +west in +okan agan +kir an +car bo +bomb ings +ra rest +bo h +gaw d +di gg +mo ana +enti rety +en closed +dodge ball +par ton +milky way +at r +thorough bred +re ally +qant as +epiph any +ine e +aero smith +spi eth +ar thro +ell ini +du bu +bra ving +âļ½ âļ½ +re structuring +illumin ate +equ ili +mp i +ash ton +pony tail +ma scots +flat tering +cru m +ast a +à® ° +stranger things +bar nab +ر ÙĬ +make shift +got cha +will am +cho irs +kilom etres +gho sh +eu than +dol ly +un ning +the ar +cre we +w sw +j ace +dis miss +ke an +ho ta +kh at +~ > +thir u +ren dez +hart man +tee ssi +cas ca +z ah +hydr ange +fo d +aw p +mzan si +thick er +nago ya +ne va +sti que +cast el +dam ian +there by +ji ang +ale k +music islife +ra q +calla han +gou ache +somal iland +sean hannity +ra heem +lo se +elo ve +whar ton +rectan gular +illustr ating +har ne +auti sma +scra pped +ell and +decre e +nag pur +ki pp +so re +n md +ma as +gun a +gart ner +bel li +then ight +je on +gendere quality +gi ver +a el +gar ments +ne u +mardi gras +mar sden +ro wer +pollu ted +camer aman +vin od +be asley +cro c +ji u +hollyo aks +anesthe sia +al les +ste ward +lati mes +ðŁĩºðŁĩ¸ðŁĩºðŁĩ¸ ðŁĩºðŁĩ¸ +tic ian +gor ia +come dic +ðŁ¤Ķ ð٤ĶðŁ¤Ķ +nai ve +sli ons +ł Ī +bur glar +ðŁĺŃðŁĺŃ ðŁĺŃðŁĺŃðŁĺŃ +york shi +se ñ +fan boy +lau rel +inci dence +potom ac +rober ta +presi den +pr yor +os bourne +w ku +te me +pal ae +ðŁ¥ º +re boun +itu de +red dish +k hand +coloni alism +north carolina +ðĿ Ĵ +manne quin +lady bird +ta sty +knowledge able +g shore +ðŁĮ Į +à® © +qu aker +salz burg +med alists +chy na +bridesma id +ma ori +ro p +outra ged +in adequate +truck ers +al ana +ìĿ ¼ +ri x +oooo oooo +command ments +lam beth +aa j +eco friendly +bla z +morecam be +boun cy +rou x +rai ded +mi zed +sh c +gaw x +labor atories +ru bs +rest room +consult ations +ca jun +virgin i +so ir +rev ue +ple in +wag er +ç ¹ +we do +growing up +! ðŁĺĬ +face ted +sin ners +ho vering +ti ene +seas oning +an ja +leg go +il is +fla x +dev o +ash ram +mati sse +ker i +go wer +bo tox +mar shes +unh cr +ts m +opti mus +dun i +stu ffs +so k +order ly +n bad +islam ophobia +raviol i +fab er +cre ds +won ka +in fusion +over weight +daily news +assi mil +acol lege +medalli on +kili manjaro +sti ff +tham es +sun ken +th ard +my dubai +hilari ously +han nel +plu mber +fair view +separ ating +rasc al +qui en +necess ities +confeder ation +ll ll +: ] +weak nesses +bron co +ra ffles +el ot +ãĤ¸ ãĥ +advent calendar +ðŁİ ¹ +stra vel +tun ic +k su +im peach +e spionage +! - +di ment +cur rant +bio de +commu ting +by ron +ðŁĴĵ ðŁĴĵ +shad ed +tr uro +cray ons +ar ne +h sc +fre aked +dram ati +fle ek +u cd +marl borough +^ - +cross ings +mal o +black ops +bin ance +cho ked +chen ey +pl o +ge stures +val edic +ryan air +rem ington +v cs +mc kee +ec z +be gs +nail art +mayor of +happy fathersday +war t +pet itions +n ingly +clean energy +bro x +sl alom +exist ent +ab ay +ug liest +tom p +stom a +sel by +goal scorer +ben ji +overwhel mingly +lan s +semiconduc tor +south korea +re scheduled +sk yl +en listed +dow ski +si del +rosen berg +nas ser +white head +pri us +har are +en n +ry der +í Ĥ +mon g +clas ico +transpor ter +po tty +is me +** *** +vic e +sk it +ode ssa +l mp +her n +raci ally +pin oy +paragu ay +obitu ary +go es +bu cha +side walks +angu lar +un constitutional +transiti oning +i bu +gu ys +un packing +oooo oo +black girl +ber gs + ¯ +wordof theday +trump train +thunder bolt +m si +fasci sts +ठ¬ +t sk +collap ses +raje sh +loveis love +migr ating +set back +ðŁĺĬ âĿ¤ï¸ı +t els +safety first +nar rated +jae joong +un answered +lique ur +en nes +dal go +bill ings +salt water +mer maids +lon gs +clap ham +we arec +pic collage +n ach +h ace +pois oned +lo th +ag na +adel rey +guar dia +poli shing +peace keeping +d all +p isa +la pland +process ors +de andre +so bs +p once +dra ins +c be +ðŁİ¥ : +spla sh +meat ball +fon tana +worcester shirehour +ne v +bri sk +b int +ac r +po x +cay enne +skril lex +j fc +hahahaha hahaha +gla s +en gul +tempor al +oni zed +con cre +com pose +vibr ations +plant ers +fer t +criticalrole fanart +t bli +sch allenge +huck abee +munici pal +iam bic +radi os +ne vis +dura bility +mc cla +horse back +inst itutes +ful fill +atta ch +ate ur +ak an +resi sting +illumin ation +hand le +hair care +om ent +macle od +ka iser +g no +bear down +ly f +gl omer +distor tion +z m +san k +roo sters +is now +as ports +ag en +wo ken +st george +ro mper +my le +econom ists +ru to +t will +health and +d ito +ws l +tair p +pra kash +mic heal +h ts +w rights +kat su +fioren tina +defen seman +d itch +var sity +texan scheer +ba ham +sc anned +we il +seduc tive +ðŁijį ðŁı½ +fu e +er win +dav ison +ter ran +moo ds +wool f +re source +@ . +cu sh +ðŁį ° +regre ssion +cur led +la zer +jo anne +ab bott +mo z +down ers +mm mmmm +valent ina +k hair +dream t +cro ok +che k +ste aming +nephe ws +cl eric +as ober +indefin itely +w ye +us news +joy ce +flu shing +wynonna earp +ron do +kis s +hot dog +bar ns +sax ophon +far ley +gas p +decre asing +al way +pe x +l sd +shi ft +p outine +ra zz +rescu ing +ni ko +ho ch +cc l +u aap +n ts +m car +il wx +conqu ering +ket tering +stur dy +delay ing +sto k +vani shed +cath ar +bin gham +in v +ic hiro +he mo +budge ting +[... ] +be ss +sebasti an +slow ed +ðĿ ij +musli m +stun s +acton climate +ve a +se ton +rose tta +oun t +hard in +flu id +ca w +ðŁ¥ Ĥ +yach t +un l +sp hy +provoc ative +or ic +is back +__ _ +nicol as +gy an +loo se +fl in +reb ate +: :: +! "@ +com icon +she ff +down stream +chic hester +beach life +mom life +diabe te +ar ra +van e +ok u +ye o +man go +try out +app ell +he irs +arjun a +dd u +na veen +movi c +soci alists +s back +criteri on +soyu z +k her +da z +yol anda +wine oclock +re ina +one w +leon ard +en dez +u bs +support local +facilit ated +carameli zed +b pa +vuel ta +my tho +m ami +spe are +nbap layoffs +fe vre +nick jonas +im print +c so +craig slist +la salle +gi deon +ha doop +dis regard +w ud +tu c +ma gee +acou stics +ta a +qui e +pol a +cr t +dw yer +dis sec +capit ol +men tion +kn oll +he igh +fin ders +plac ements +l se +indi ra +gur i +madhuri dixit +kingdom s +iambic pent +geor gina +je ky +conflic ting +bay an +aga tha +uph old +dr on +vic ar +ex pat +periph eral +pe ssi +fa f +ance stor +? .. +wid get +pun c +comm enced +beav s +air waves +ad dis +po a +de sses +co den +vu e +ru pee +kar in +spo ck +m sy +ภ° +pr ick +fill more +ti fication +thing sto +sar de +em ile +pere ira +n ad +bright ening +arre sting +wo king +usc g +sp ill +raspberry pi +hu go +ite c +is ma +cuff links +optimi zed +oc c +mi wx +en ka +el ited +afford able +sa kh +coron ado +ho h +at ul +ai oli +jim cantore +accoun ted +vin ay +her mit +groo ves +ran ch +r illa +we tter +ou tof +veter in +ni kov +ki an +fair banks +ram apho +n iti +k ko +ru sty +ne stle +tv xq +shahe er +âĿ¤âĿ¤ âĿ¤âĿ¤ +penn ant +gem stones +dem debate +ðŁIJ Ĭ +auton ews +support indiefilm +mach o +ve x +new sat +ne ti +conce ssions +can died +yof the +mac au +den ds +cricke ters +san iti +mari ano +gh at +ar toftheday +¡ ľ +e gos +gen oa +chat bots +bri er +al labout +mon ty +spi ed +r tr +comfor t +sni ppets +real time +gra in +exam ined +en lightening +tt u +god bless +release the +sing ular +ki ans +ha ka +sor ren +defe ct +mar g +equ ities +d orian +su ka +per l +aishwar ya +pul lover +preci sion +fair way +ne ve +rive ting +vill anova +en com +ak o +passion ately +europale ague +siem pre +x vi +enligh tened +c fr +âĺħâĺħ âĺħâĺħ +wast eland +is f +new comers +emergen cy +amphi theatre +- . +text books +figur ative +tre mb +pe sc +ab hin +ab bot +ac acia +har ds +por sche +kau ai +el isa +car rick +abo u +elli er +be ch +neu tron +galap agos +ru ben +in nis +how to +nun s +sab ine +i ac +clin ched +no tori +fi ves +cairn gor +per i +gr c +ðŁĴ¯ ðŁĴ¯ +mal m +twelf th +di ff +rout ines +marty n +lin den +synthesi zer +nu mber +game cube +fal kirk +byz antine +queu ing +gr ill +scal able +char red +rou ting +her bali +gri zz +ðŁĺŃðŁĺŃ ðŁĺŃ +tol l +termin als +l pc +ab d +war mups +remo vable +¯ \ +vi go +pap aya +ne ve +lov ingly +jo kers +ib les +sse tt +poten ti +pel e +gi gi +sadi q +leg acy +son o +ru pees +retar ded +ele e +par r +fi ance +ey re +say ers +pend ants +mak nae +al bans +adap ting +p ff +pu berty +ji u +ing rad +hypocr ite +diplom ats +phys ical +rob by +bon sai +ãģ · +f att +catal unya +âľ ĸï¸ı +ro ma +more land +so e +conver sions +stl blues +shol m +gra ssy +pra do +on u +assaul ting +> _ +sett es +dis graceful +aph ra +âļ½ï¸ı âļ½ï¸ı +ठª +kil n +goal tender +s ru +philanthro pist +b als +th n +stu den +sando val +dogre scue +eli ons +asse ssed +lar go +hec tares +sh rm +sa if +cle avage +no ches +n ene +fat alities +cur ing +clean ser +al es +p vp +south bank +pizz eria +marsh als +kni fe +an dover +tbli ghtning +sr sly +ou te +digi mon +timesof india +prome the +le bo +f su +wit z +rever e +man as +mam ba +ch ica +gu an +exhibit or +csr racing +d ere +xx xxx +gu sta +story time +ston ey +organ ics +and u +se am +min ogue +anushka sharma +ab a +ðŁİĻ ï¸ı +ugand an +chro matic +as sn +document aries +sh t +ru paul +loy d +k ats +e us +ite ch +me dusa +pan ty +kel logg +et to +talla de +sha a +do st +p ms +mari ana +je ster +croo ks +ðŁĶ ¬ +min danao +ind hoven +ðŁ¤ ª +le xi +tv n +jan is +co te +ãģ Ĩ +ser rano +iw m +ðŁIJ ¬ +k ke +distribu tors +cap u +counterfe it +camp site +ag gie +ðŁĺ ¼ +chhat tisgarh +~ @ +state u +san di +prevent able +cl s +can ne +mm c +i ver +sa haran +pal is +night out +do s +ap ia +absc bn +manag erial +aro se +mo wx +aro sa +ðŁĮ ³ +under dog +remo ver +astronom ers +lent ils +su scep +smoo ther +pend leton +fau cet +e mory +dal mati +af cb +tic us +exem pt +en rol +d heim +ðŁIJ º +restric tion +star fish +sto w +snor kel +thunder birds +she ad +homo sexual +dy n +as li +andre tti +dou che +dom o +tar mac +slu mber +pr onto +first dayof +mini ature +mari achi +argu s +recomm ending +mobi les +in ce +illustri ous +or c +adver ts +gr its +wea sel +pag oda +over pass +gre ys +maxi mus +arma gh +wood land +sun ni +ðŁĴ ī +ë Ŀ +ti one +soci o +ho s +ðŁ¤Ĺ ðŁ¤Ĺ +wind sor +subsequ ent +munch ies +id h +exclu ding +e mi +cu th +z ai +week days +law suits +barn ard +Ø ª +pe tting +net es +mul ligan +pharmac ists +ra quel +e ton +cran ston +gil ded +cle ary +ce ph +ra a +pam per +lombar di +as in +sher ry +pro d +for te +ari anism +buffalob ills +æľ ¬ +ðŁĶ¥ # +uu u +just ices +car ina +nat in +mas low +dro oling +cog nac +cam ber +el ong +r dr +in en +convic tions +am use +tro ck +harm less +visit ation +gen omic +bl and +beno it +chim p +tuscal oosa +gre asy +x po +gil t +se q +per mitted +christma seve +book s +mu e +old school +human right +be ati +ðŁĶ Ŀ +sh at +sculp ting +h wan +fern andes +sci utto +fu entes +endeav ors +maid stone +un paralleled +shou ted +queen of +mer c +band ic +ve da +sel angor +pi le +ja han +intimid ating +disapp ears +cl ich +za ha +w urst +hi v +fod ils +cor dless +aaaa aa +hy dra +bel inda +e els +bu f +su staining +rugby league +no c +brig itte +( ðŁĵ¸: +tromb one +soo the +smo g +ad p +stab le +ing ley +diagno se +ms g +we ss +tic keting +one e +nsw pol +e up +auto psy +adity anath +sun down +river front +si ya +p is +hier archy +dur ango +di jk +ren shaw +he aps +epide mi +david bowie +interne tof +dd i +nation ality +mb ar +air y +win der +w alia +elli ott +c x +bav arian +pl att +an tw +wi wx +sof ter +ne ha +h eller +th and +dani ela +bo ast +degra dation +ðŁĴ¦ ðŁĴ¦ +transform ing +man e +av ut +ðŁĺĪ ðŁĺĪ +vo ter +the e +t ate +pu ff +in door +sop roud +boy ce +boris johnson +wait in +immun ology +ðŁıĨðŁıĨ ðŁıĨ +âĿ Į +street food +liz asober +cavali er +c elia +need le +motor ing +g ato +, ) +ra de +harve st +t ms +jar pad +on ey +air men +v re +impair ment +abhi shek +snoo p +l ant +fam ously +bl ou +s ze +g ander +un touch +tu f +dee jay +col lateral +b ind +ðŁļ © +pin ning +ic n +' ; +the economist +ul tram +worldwater day +ti poff +the i +feed ers +campa ign +sc umb +day weekend +yo m +pe dic +h ough +ps v +pl in +on de +boston marathon +az zy +* _* +con ley +thi ago +hoo o +gal erie +luci d +je tt +gl itz +final fantasy +achiev ers +y ung +peregr ine +op hi +dam es +biom ar +âĺĢï¸ı âĺĢï¸ı +sk c +l ics +fl ank +ar rahman +ho of +uphol stery +t ats +wo z + ¿ +snor ing +ra er +l ju +ap d +pl ating +kan u +im ation +fragr ances +m ra +mor ay +mo tt +im muni +hearti es +bho pal +tim ers +g ata +color way +car nation +win get +si ghs +s ville +optimi st +chate au +olympi ans +ci o +singer songwriter +ny o +fi bers +bur ch +ag ro +mil ne +ig bo +cr amer +ation als +dan ube +pad ma +nor mani +en forced +bre ck +boeh ner +ar den +sur rendered +pros thetic +om a +ha iled +calcul ations +w fa +bi b +fcb live +fon da +west coast +que sts +friend ly +to wie +fit ch +bal ot +star dom +scrat ching +ho sa +thi ka +o ven +stro ke +out post +pharmaceu ticals +hi kari +mu y +af d +fallon tonight +squ at +or u +dra ined +chocol at +ë¯ ¼ +wor ths +ri b +mu j +that s +residen te +it el +boo st +mi gos +mul led +la a +etsy shop +don keys +me k +p tc +flin ders +e hs +ro hit +mu ir +g ad +compos itions +åĨ Ļ +combu stion +i kh +yemen i +wav ed +gar ci +ak os +oo ds +fu sion +se que +s lan +pl ur +kic chasu +shenan do +s ams +worl den +horo witz +with me +mic robes +k ki +ðŁĴĶ ðŁĴĶ +w su +patch work +fre er +y aki +the art +symboli sm +mil er +bt n +ma bu +side kick +motiv ates +sag itt +natur als +serv iced +ps ori +pa ola +qu ig +i badan +gi ggs +ë ³ +sciento logy +si oux +salam at +d res +cad bury +d hawan +ci ón +_ ' +swa pping +maris ka +james bond +explo sives +ay les +af er +s agu +cen sor +tom a +jeff erson +ring ed +par tist +ir responsible +aguil ar +vac ay +equ itable +altrin cham +ac ur +man ish +ger min +schoo led +pu tter +ed ad +nav al +toast y +sol areclipse +dish u +coy ne +ac co +mu ck +mar an +el os +len der +cro ix +worth less +ha ber +gun men +ðŁį ĵ +zen ith +t enders +hur st +hol tz +itali ans +car low +u cd +characteri stic +bun g +av l +u th +sa sia +rs l +red man +neighbor ing +green peace +sti ps +follow party +y gk +en os +omni bus +na issance +chri ssy +secu re +call back +ji hoon +memor y +block er +l anta +daf fodils +bil t +ffer ty +fau st +ie c +nipp les +so g +m nd +jagu ar +bol dly +ab poli +pro position +gun sense +evan sville +cu tters +we go +dou n +do x +stal lions +ka j +shi ppers +j awa +vol o +le ven +pap rika +kov ich +jor di +induc tees +app alling +dial ysis +allevi ate +âĢĶ âĢĶ +pie ter +mid wi +q tr +juli ette +inter mission +haw ks +act ment +one ill +k lin +vam ps +fam ous +cou ld +autom obi +da an +west end +elli p +nh c +mel anch +web series +ton gue +snat ched +smy th +tan gible +sl i +e asing +bar stool +over lay +afford ability +ting ed +ter as +ay ush +wanna one +rh ine +dan a +sh ana +kend al +fer tile +w ir +repl eni +lar vae +is ro +con vos +ab brevi +u cc +hun gry +bur rows +ag er +nav i +mat in +du per +cer n +ma don +ķ ï¸ı +é ģ +tu ps +hy att +sh ep +friday night +wis er +hei di +hat ton +p gh +foun tain +wrist bands +ahmadi yya +aeri al +subscri bed +so los +m ace +sla yed +for fe +dul ce +christ mass +arun jaitley +viol ate +ob stru +ni eces +w vu +idy l +fa ze +pre serves +infr inge +premi ers +inter vals +agen cy +( © +stand alone +di mes +bo er +param eters +ge tit +ðŁĺĺðŁĺĺ ðŁĺĺðŁĺĺ +tu lane +for given +scol l +mb ps +smash bros +rob bi +prima vera +ali st +ghost ly +ay at +ye ats +impre ssionist +ear phones +caul field +wai kiki +sal ute +sc ou +mu ay +louis vuitton +bak hta +ado g +inven tions +hur d +forec lo +stream line +thalai var +ch snews +will ard +t sn +euro parl +cru sher +my sore +gro wer +ra ping +pat ti +g den +sm w +muf ti +kid man +ab r +soun ders +skep tical +ðŁĶ İ +sun dar +i me +fer g +feather weight +ar lington +pas qu +ag azine +wearab le +nati c +mccl ure +inter mitt +hor de +six ties +car te +bha v +ze al +experi ential +ador ned +som mer +eno te +hypo thesis +stin ky +pro to +dead lines +vo gel +mus ings +monc ton +gu ter +f le +aci on +voice of +ta sha +inhabit ants +type face +s ba +bts x +ðŁĶ Ĵ +wor x +u hc +jo ko +cell ars +gor o +continu um +... & +weather cee +ha p +sr k +ris ers +lonely planet +un named +co eur +ðŁį Į +the world +ili ke +fa sten +ami go +ri ba +ramapho sa +staf fers +had ley +? ?" +fi ore +sal ut +hu ff +bez os +Ñ ĭ +ra der +kam ala +in line +fill ers +um atic +all in +shat ter +re in +o ku +ch ases +fla gged +baby metal +water stones +ts b +cut out +op hel +aam a +rockab illy +sto lic +jet blue +ich ick +down ton +uzbe kistan +pat na +la q +gr ange +) _/ +subsi di +sc p +newsc ast +it sa +twee tyour +e mor +archae ologists +uni fication +por ta +q x +protec tors +pro hib +charis ma +car tag +ren fre +scul pt +guwa hati +de ma +boo p +unf pa +dex ter +lay la +alleg es +sou ps +never again +l ys +cal c +bar oness +visu alize +ger ber +absor bed +i ers +a han +fon tein +detec tors +verst appen +sv c +formul ated +ac dc +li x +in competent +bh k +lour des +water house +snow ed +appreci ative +sig ma +lizasober ano +pen ned +pay check +tall inn +fanc afe +par isi +av alley +vi g +ru fc +hard ship +so cute +po ise +ì ¹ +roth schild +k ly +???? ???? +l hp +il ay +f hs +am ad +ide als +brad bury +bal boa +nic ot +kid nap +wol ve +tas manian +op t +matthi as +ãĥ³ ãĤ +super markets +mylittle pony +me lee +li ster +gr oun +fe dora +kind ness +en en +bra hms +¯\ _( +ros well +mar lene +ic u +re formation +or ail +he brides +dispar ities +terrac otta +swal lows +re id +influ encing +flu or +den e +tum our +blon des +thunder bird +sh eva +moga dishu +ka b +cre eps +i ving +ene ed +anno y +âĶ Ģ +intri gue +enqu iry +ar aj +tur al +kuber netes +end lessly +divi dends +tor a +ti sh +commemor ates +un ra +tri b +pon ty +ne m +diss ent +brew ingco +ðŁĺ ½ +nor mali +bi of +( ... +chil len +ì£ ¼ +mell on +av is +mccor mack +ing ra +enrich ed +custome rexperience +testo sterone +snu g +sett i +ger onimo +inqui rer +bre aches +very thing +bloom ing +mu ra +dispo s +bi de +de va +shade sof +in trin +sh ev +s ven +nayanth ara +gan esha +c ws +ber ta +label led +use um +nick named +ma han +car uso +ap ur +ðŁij Ĩ +w q +orphan age +discar ded +mag nu +lu e +je on +bridge port +pac ing +mercur y +( ðŁĵ¸ +marx ist +amphi bious +transplant ation +stit ching +then burg +gradu al +ãĤ Į +ro ft +ma ils +ine c +guy ana +dopp elg +ver o +re write +head less +harb augh +gate way +car sforsale +sw i +st is +mach t +un de +sura baya +stap leton +nur turing +mil ner +ya o +lma oooo +ko sh +arsen al +k ame +er ry +ar royo +dis misses +ru bbed +rc b +lew d +dil u +and or +vi de +ur in +inter sec +ha ar +al b +year swith +app leton +é al +ul livan +suc cu +monter rey +d mx +artem is +ron nie +farm land +s football +gro tto +anth i +ãĢ ģ +à® Ł +vid ya +jimmy fallon +ൠį +t zer +gravit ational +w thr +u hhh +e hr +tin ker +ti juana +scran ton +ram charan +bar clay +re van +m si +ka p +wr s +we thenorth +tor al +sat u +gro m +fac ep +erick son +z yn +se dge +oo dle +spur sofficial +ds p +sic ilian +soli hull +recei vers +ladak h +hend rick +ther i +presi ding +mc guinness +litt ers +gun nar +gh oul +wi b +n tv +kar o +fro ck +b lau +ampli fy +all is +ul lah +memo irs +kh loe +intercep tions +pet day +lo oney +con fin +ch ay +piyush goyal +frequ encies +ut z +event ual +warm ly +obli vion +an ka +ta it +âĿ¤ï¸ı . +director ial +ru lers +prince s +mu ck +stur ridge +deu ce +abri dged +bagu ette +un cles +pen du +min ding +forre ster +av ila +wall er +wall street +ment or +hin o +high way +crom well +fanart friday +mb i +co yle +a hi +tro ve +spie gel +pay tm +mcin tosh +jan sen +nit i +nash ville +len o +leicester shire +le gos +dic t +ðŁĵ ½ +sp ad +beverly hills +sy rah +separ ates +z ain +un fit +dra gs +tan ia +over flowing +hri thik +haw thorn +z ani +mac far +fi de +to tem +pe ds +fundament ally +cal ico +sin ner +j ä +hil de +ds d +ten ay +ta hit +mil f +lie b +inform ing +up lift +ra el +mortg ages +lec t +ii ii +guillau me +compos ites +old smobile +l end +gar th +com mish +bapti zed +scorpi ons +ru cker +bringback our +alli ance +thalap athy +tal i +sp ans +eri dge +wither spoon +lin da +sky lar +kor n +hom s +Ä į +sil enced +caf fe +ar ty +dist inguish +to wed +pun g +jessic a +ear nest +beau fort +t ama +study abroad +si khs +new bie +nav ratri +mar ble +loun ging +lit ter +dal it +so sa +iz es +gra de +com promising +tr iton +de tta +v j +chau ffe +spec tral +powe red +montess ori +artic ulate +hal ton +al co +ye y +mn twins +acoun ty +ðŁijı ðŁı¾ +âī Ī +mad men +kal a +gru m +chi k +ati s +su me +akh tar +job search +high lighter +bo ath +âĦ ¹ +tar zan +lam bo +âĽĦ ï¸ı +ox fam +dump ster +pretz els +mac os +incl ined +fac tual +adverti sers +shu i +pu ree +ml pfi +anti dote +cap o +pa str +merc ado +but ton +ar min +ag g +lol la +horri bly +er rands +christop he +time snow +monday motiv +li ss +scand als +mc i +dispropor tion +âĺ İ +sur pass +samar itan +so tho +pu rest +fl att +trivi atuesday +delec table +leop old +hermi one +chou dhary +en rich +¡ ¡ +subsi diary +ine qualities +bachel or +auto immune +la kota +i hop +ad jec +the simpsons +sh es +se k +gret chen +up stream +hin akhan +coper nic +x tina +lu g +tough ness +e ad +cli pped +bi us +sl v +fah ren +dee pak +ca u +x an +im mature +dig ni +bo bs +shred ding +but tery +accommod ations +de ven +chun ks +super league +sky bet +kil dare +je et +ë į +ce k +wrec ks +pro pane +oh l +tb d +quo i +trum pp +mi mo +reluct ant +ver ne +o ic +ma gh +ar nau +se ver +li dge +stair way +kicchasu deep +ðŁĶ º +mach ining +aama admi +ot i +c da +al it +pan y +inst alls +ac ct +e shop +di em +hard well +fulfill ment +sc afe +qu ack +extrac ts +swee tened +fi ghton +f di +d inger +wal tham +us ur +refe rees +seok jin +gran n +af rin +th n +sch af +par cels +bet is +amar ine +nom an +kh tar +mor itz +cou pling +bar ons +ðŁIJ ¸ +à ¸ +sl p +sad ler +x ander +tri ad +mc millan +kh z +divi ding +ìĹijìĨ Į +dar yl +zed d +le ys +pla ques +flu ori +tipper ary +on nell +di dier +lang ford +im c +the sun +bir dies +ar cha +ye ssss +t di +dar ia +cand ace +al tam +pal aces +ch it +sant am +event ful +book of +ad b +mon stax +cre ole +co el +âĸ ½ +we aren +sten nis +she ath +ati sm +gron ingen +mlpfi m +le pre +wrong ly +rsp ca +rendez vous +acknowle dging +pel vic +solic itor +sla ys +nue stra +lo d +is lander +fer oci +fashion show +ra ss +dge on +adole scents +sma shes +negli gence +grate ful +ved ere +sw oop +ing l +apol ice +vand alism +gan n +jo ao +di supdates +zimbab we +under age +radi ance +w of +bour geo +pla s +cr ani +gh ue +wrec kem +warran ts +re form +jim mie +at wood +ys l +neil himself +l bj +i man +tan to +nois se +ver bs +equip o +al together +mam ent +l ice +dou glass +tier ney +pri med +j hal +furn itu +braz ili +v ill +past els +n ison +u ff +paral ysis +jay e +im po +ðŁij ģ +strate gically +pakistan is +was sup +super bike +thank u +tru elove +sha ikh +israel is +vi p +to g +li en +la ker +grey hounds +cul ars +bian chi +balot elli +ar ran +loo s +str ates +he bron +ar vo +sunder land +the al +tomb stone +sand man +c pac +thanks giving +love him +lat ino +an in +aka if +ĭ ãĤ +tor quay +di est +alli anz +ðŁĺ ķ +golf club +cl lr +wal cott +sch nau +promp ted +nomin ating +len nox +val et +mon ro +may ward +e ph +ðŁĶ Ķ +inter oper +r da +re flex +arm chair +ê° ķ +stri pper +por ti +ph arm +ham za +ni reland +ne ue +h pv +port foli +sun burn +fris bee +be al +bapti ste +x h +ty m +pr ati +o vers +haz rat +deser t +der ry +us ky +em mett +ach arya +)_/ ¯ +shu d +may a +ham ill +ra im +nr c +fitt ings +cur vy +ðŁı ĩ +ster ling +à¥ Ģ +wal kin +short cuts +mil ly +ast ur +alpha be +pl i +pe z +miss you +rad ford +ml g +ta eyang +notjust lakes +du mps +seren dip +le ur +ra ving +e ster +de priv +absc bn +ðŁijĩ ðŁı» +scar city +o cr +mean ings +cap t +da hl +fer mentation +bri oche +to win +out lander +massi mo +en cro +ðŁ¥ ³ +buil t +po tam +kir i +tm w +monit ored +k ites +peoples vote +gray son +íģ ¬ +afri ka +a dies +i vote +gy ne +g annon +di x +c mc +ou ral +fox andfriends +bel i +ig ne +gl an +katrin akaif +co politics +qual itative +p si +lu cci +disc oura +âĺ ® +kel li +gau tam +carac as +reale st +pu la +in us +hill top +make aw +atten borough +tw y +r arity +peck ham +ma hon +corn elius +clin icians +ton line +tb i +paradi se +ka si +inev it +fresh ness +colling wood +lun atic +defen se +cop d +in fra +wain wright +sains bury +alab am +te ma +lac o +chec ker +releg ated +tren t +stal ks +huff post +bhubanes war +ast ral +share your +prim rose +hi me +cat an +end ment +en dow +cle mens +mal oney +hil ary +game time +den ise +collabor ators +b wo +radic als +gue tta +ici on +au a +snap matic +sat chel +excav ation +base man +s ão +gn ation +fel d +surve y +shah zad +ma st +anirud hofficial +tru cker +ot ago +geo graph +ethe l +âļ¡ï¸ı âļ¡ï¸ı +s ver +mu tt +internetof things +ancho red +wh ouse +bang la +bal main +ç¹ ĭãģ +break fa +á Ģ +twi ster +te tris +ca v +stag s +g z +au b +stor med +hel ens +yar mouth +st asy +gustav o +co sc +vin son +up p +sc ricket +assump tions +app e +nu h +u er +pre mise +n aga +e amon +coron ary +na f +north side +el mer +ro tar +out lining +el f +re surg +kat elyn +in can +hyster ia +ce e +am bani +pro lly +Į ãĤĬãģ +ax es +san jose +rem brandt +mag pie +even ly +scor sese +qu aint +f g +b buk +indian football +weare all +spd wy +pis ces +ec g +âĺħâĺħâĺħâĺħ âĺħ +pre orders +: | +ni pple +sal azar +ju me +jail break +min n +bas sett +ze tta +jef free +ad jun +tic on +san diego +drink local +chol era +solic itors +o bo +com post +ni an +wr a +tre ach +ic ic +profession al +del ve +leg ate +histor ia +cro issant +con noisse +nam o +palli ative +chem trails +i ority +global warming +comic art +behavi oural +re sted +li as +cli mates +Ł ãģĦ +rut land +nou rish +menopau se +hot ties +demen ti +ve spa +mel ville +anal ogue +tz man +str ung +im perfect +gl are +cir cling +ros berg +rec o +oc ity +lo ire +em be +do ssier +ne el +nan do +me a +gal vani +fin esse +ag p +berke ley +asi m +âĺº âĺº +quil ted +ish ere +un matched +po tion +for z +at re +selfi es +juli ana +ðŁļ ¶ +âĸ º +mel ton +âłĢâłĢâłĢâłĢ âłĢâłĢâłĢâłĢ +spin rilla +pur cell +ed p +at leti +tony awards +ra ja +pro gno +mol ten +stu ff +p ally +nobel prize +âĻ» ï¸ı +spiritu al +spe ake +sa sha +bri um +tru ss +critici ze +assassinscre ed +yor uba +u lo +fire man +workin progress +ef cc +fla res +ro bot +hi kers +cl l +shado wing +pat sy +leh man +c ns +å ± +guad al +à± į +ra pe +r honda +paralle ls +son ja +langu age +land ings +z ola +cr amps +bur ning +apprais al +jol la +ham m +kas a +gul ly +f go +uly sses +ri be +ðŁĴ Ħ +ib u +eti enne +bri ar +fin ely +comb ating +y ql +go tham +we chat +to paz +primar ies +l se +iz z +hel e +dispon ible +cy stic +bel ichick +th rush +kansas city +ge om +soli di +red bubble +by stand +cambridge shire +par fait +ast le +ow o +ind ore +stom ping +sm elly +ðŁ¤ ĸ +locom o +adm itting +hol me +clock wise +min sk +mc co +for get +ev p +cam ra +ab ella +yo tes +universit yof +mé xico +silver ado +ric ket +crom bie +pu j +eradic ate +deli ght +y go +glam ping +vic a +du ggan +coun ters +cf d +sc our +react js +pu ram +paras ites +in ki +vill en +stel la +li mbo +ang as +k cr +ðŁĴļðŁĴļ ðŁĴļ +vap ori +mum ford +oli gar +à ¼ +al oo +boo ties +ad r +k elli +dru mmers +av ici +nature uk +ron al +in trac +un splash +le che +g oma +el ine +envir o +bi onic +bu eno +mi k +av in +star ling +em powers +cake day +boy cot +ðŁĴļ ðŁĴļ +ðŁĮ¸ ðŁĮ¸ +v ach +m ci +fractu res +ger i +sk ing +exclu ded +lu ce +ja ve +ig gy +evi den +aki stan +a wn +mor als +luci fer +ha ban +tumb ling +sunday motivation +mo sley +captain america +sch icago +the one +mo td +d ts +ðŁIJ ¼ +rep ell +ii i +locu st +geo spatial +mer sey +immer se +desc end +ber nade +j s +boat sales +win der +cran k +sing leton +candid acy +ben a +ðŁı» âĢį +high lander +ol t +k prs +healthy lifestyle +four teen +end the +ith aca +circul ated +r ans +pre valent +ha vas +splend or +roo ster +kalamaz oo +jewell ers +enne dy +rou sey +es y +cann ons +ornam ental +// // +ren don +win ne +mol ding +eid mubarak +coun tess +simon a +ha wa +fo es +du ster +sb u +por tray +mar ries +goo dday +cho co +achi ever +ðŁĺ¹ ðŁĺ¹ +pre neur +tr amp +tom i +n bat +garden chat +farra khan +ever glades +ab ru +sou sa +se ce +homes wee +terre strial +bar it +sri devi +ol u +mel inda +f rick +can dies +ðŁĺŃ ðŁĴķ +qu reshi +family fun +exor cist +cardin al +ny t +dies el +cu mulus +capric orn +si ology +lor na +dou gie +an die +super sport +c fl +п ÑĢи +say ang +pe ek +ภĬ +lo be +j em +ing lis +gg led +c sn +amne sty +chu ps +ba es +sau er +ðŁı IJ +mongo lian +en et +back street +dr illed +acce ssing +ce o +b se +ai ken +pur r +wor sen +whe res +war k +testi fying +bu ri +bla st +aw g +ðŁĵ ĭ +re defining +hear ing +u ci +c mp +bon i +tail oring +ta ji +noc chi +em t +stephen king +ne et +compla ins +campaig ner +luci ano +twili ght +ti esto +pas sports +flo yd +cathe dr +na ked +caregi ver +b coz +ade cides +ku ri +ly k +br aries +dren ched +disc lose +ðŁĴª ðŁı½ +le blanc +je tty +gar ty +chip mun +b su +rhyth mic +ic z +fri d +anne x +ame x +solo ist +lanc ers +arro whead +speci fication +simul ated +na is +inver te +bo wing +wor ship +f z +abo ss +sha q +ì¶ ķ +challeng ers +an arch +aamaadmi party +ãħĭãħĭ ãħĭ +suffol k +so corro +sn ell +cla dding +absor bing +shaw a +particip ates +ðŁį Ķ +book stores +bak u +seap ort +ko jima +gab y +pack ard +electr ician +let it +mo wing +fa wad +young jae +hot mail +men ing +u rie +intim acy +con ti +: ") +lifeis good +in ciner +i dri +craz iness +jour nos +fran chi +bott len +al da +ff es +k x +south we +air a +clay ton +sco ti +f j +bri ga +ð٤ĺ ðŁı» +demonstr ators +y z +stor k +na q +casc ades +travel chat +plat a +pad ma +fran ci +at tain +bat girl +lom bard +hoo s +d dos +neon atal +discla imer +r ss +r ant +di sen +tex aste +so cal +frac tal +cam ry +stri fe +sn acking +mu h +sant ander +mor ons +gra f +par ades +hu ston +dru pal +mi ento +kir stel +hy de +vom it +forti fied +sphin x +da v +bir yani +win nings +s baseball +mer ged +lovel ondon +ling ering +dream big +car leton +liveli hood +djan go +astri d +gri ds +down e +bru ised +s ne +scarec row +hel ium +f nc +bi ggs +an ter +restor ative +em pires +ab del +life style +kiwan is +colloqui um +me en +pr ick +anti que +ze b +mi mic +edmon ds +ðŁijĬ ðŁijĬ +q ing +pp el +mc gill +interpre ting +âŀ ķ +rash ad +do ka +narr ator +electro magnetic +ash by +sau ra +iran deal +âģ īï¸ı +krish nan +in di +ff en +bre a +os man +multin ational +chi ppe +recruit ers +aus biz +p ounding +re gen +cur sor +refu sal +mac s +in ak +ax ial +wa ifu +up cycled +hindu stan +cas sini +carly le +scrat ches +re ef +man atee +eat ery +ðŁĵ ¢ +un condition +sen pai +on ther +comic book +pro sciutto +de mar +mi se +ma ge +fre ec +aye sha +al der +android games +ley ton +ho ck +door way +chicagof ire +aali yah +sw elling +bi x +. ðŁĺĤ +evan kirstel +torpe do +kon stant +genevie ve +ma ia +ha user +do torg +hide ous +fi k +sp raw +e ek +z appa +wan dered +' ' +ra jan +bam bi +( $) +wid ening +tool box +sa ir +illumin ating +pra ys +out patient +i w +day o +lo b +sw fl +sha des +gu ms +coo kin +ko di +gri ffin +traum ati +ste a +slaugh tered +god bless +air time +pseu do +b sa +hau led +ar if +à¸Ńภĩ +le l +wc po +mil iti +char ters +worl da +ru k +k gs +digital india +is able +idyl lic +esp ino +marie tta +e bo +team canada +ab our +wil ton +rock stars +fav ored +phys ic +wrink le +tb r +d print +ball arat +ad al +z ey +ðŁĺį ðŁĶ¥ +tom lin +mt r +pal sy +fener bah +tight en +phil ia +ir oning +ry u +b ant +enqu ire +ca ir +abur ger +tru n +green berg +chau han +ir ina +sh ani +trend setter +pre tt +zaf ar +alo ve +v ici +pan ic +no o +lu stre +disrup ted +bal lis +son sof +mon si +inst ac +ake st +ëĭ ¤ +kw ame +horror movies +distric t +sau cy +mb an +ar mies +with drawn +med ics +loft us +er oom +be kind +ar ns +all on +un ison +davi ds +cr at +nicot ine +so or +sm x +on co +cospla ying +zombi es +har ms +e ger +ro sy +moon shine +fe in +ce tt +du brov +reg ents +ben itez +ðŁijıðŁı¼ ðŁijıðŁı¼ +ste c +m alia +prioriti ze +ic eland +ft se +v amo +lam ont +homo sexuality +bre es +regu i +cb p +te j +sky sports +deter gent +sha sta +de rel +conserv ancy +colori zed +accol ades +vis o +show your +nan ow +bice ps +us ability +bi m +dailys ketch +pearl jam +stran gest +mega deth +broad casts +bar ren +ar ton +chri ss +confi gu +lu res +is the +e ul +railway ana +global health +gi anni +u aap +s lum +consci ously +ab re +n up +bud get +v ada +e sch +real ness +er ased +th unt +be z +armist ice +ðŁij ¹ +sh run +o led +driver less +ðŁ¤· ðŁı»âĢįâĻĢï¸ı +won dr +sk an +sal aam +mother land +h wang +gen o +gang nam +tw right +endor sing +en ic +ador ation +pau sed +patric ks +do cked +plat te +ff xv +ethnic ity +auto show +side show +after life +re located +orphan ed +food network +dare to +and ra +sla ps +v live +swim s +re imagined +mist le +re vise +real ity +bhar ti +ðŁĴĻ ðŁĴĽ +late st +prou dest +gra sses +lan yard +fresh est +carcin oma +anom aly +zieg ler +sum ner +ly rix +gor g +is d +av el +swild life +me squ +john cena +euro league +sab er +master ful +yar ra +cogn ition +jacob son +abo lic +sir loin +shuk la +moj ito +su pere +st weet +me z +e sa +rudol f +gur a +where you +tt m +win s +trust worthy +ny k +bra den +table top +good food +es on +be k +lingui stic +gra ys +ch ath +h cs +mon i +de ans +cu ssions +ch ell +slo ws +he mi +d app +shar pie +boo sters +a os +str ack +se dona +mu eller +hard wick +or nate +thor a +sal ud +o twol +ch um +mi ho +for age +thel ittle +tear ful +ones elf +min dy +sm g +gmb h +emer ald +ðŁĶ´ âļªï¸ı +tu tti +recep tions +re vising +i brox +tope ka +sal ami +expan se +i books +dob son +cli o +at s +ðŁļ Į +mo ha +is ance +shu tters +moo t +jan ine +marvel comics +jor dani +pos er +kenne th +hy ung +de ja +ase ball +speci ality +eu ston +classic car +had ith +ðŁIJ ī +chas ing +iz o +gros ven +ag lia +thisdayin history +t row +om ile +hu ar +by n +sal ine +div ine +demon ic +ty ran +han dover +revit alization +pa ella +cryp tic +se dg +m end +dun kirk +bre d +wal d +sport scar +a ard +whe aton +da ener +k lan +br t +bakhta war +spi res +schu bert +ro ti +poli sh +o se +ag ame +wonder con +prote stant +bo sa +ðŁĺ Ł +d ü +joy ride +ger trude +âĿ Ŀ +gil a +v h +tw a +tra v +swal lowed +star ve +la in +ent ren +rei ki +su kh +cra ic +az u +web page +kee fe +hypo the +hir sch +hel le +camp ground +w amy +tra vi +sha hi +san deep +ru i +han uman +dw p +reposit ory +no or +no ff +un real +p ell +black history +har vick +ma scar +pay ee +pa sha +gastron omy +d ÃŃ +ai g +rosen thal +open day +embelli shed +t tip +sun bathing +go pack +end ome +ï¸ı # +invali d +final four +st fu +squish y +ra sta +mo sch +jam esc +die trich +sel a +mel b +el vi +t dp +sun i +sli t +j ha +bi za +spi ked +l li +l illard +vam pi +syno psis +az har +kendrick lamar +ĮãĤĬãģ ŁãģĦ +heart less +country file +air play +arrog ance +pre e +virtu oso +ãħłãħł ãħłãħł +raj u +le bu +for ward +tu g +dro s +mondaymotiv aton +concep cion +thel o +pad i +looo ol +ÑĢ Ð¾Ð´ +it ss +eth ical +end uro +__ : +expend iture +mon ste +mas king +terri ers +ib is +e mber +cu mple +punctu ation +pi per +ir vin +ade e +yy yyyy +flash backs +cel sius +don nie +bo gota +ben evol +the script +shil pa +pro se +fin dia +ze ke +ne ko +do ves +blues lyrix +fro sh +sowe to +mp lo +al ai +sab i +raq qa +wf tv +stro ller +ian somerhalder +ðŁĶ ª +an on +mo seley +! ?!? +sta king +mol y +car tri +c sg +ast or +transc end +ma er +de ux +cow girl +sas k +pun ter +ma ken +o ates +love tt +grow ler +sag in +v n +ssi ble +officeof rg +y mc +sab ar +faul ty +ap ha +ak on +ðŁij « +snow don +ae w +raise the +ðĿ ĵ +grue some +clement ine +sp ing +lat a +worlden viron +mi mic +can aria +bakhtawar bz +ao a +fal a +ãĤ Ń +avi va +you uuu +thi gh +la dders +gu mbo +tz ky +fu zz +plastic pollution +est ate +strength ened +k ant +dr in +cal vert +transform ational +frigh tened +mac lean +elited angerous +ear thy +t son +to da +j nu +.. , +mic hal +i ban +je ong +is real +sim coe +exclu sives +blue bells +ben e +te u +pil sner +pens ke +athe ists +m pu +cartag ena +ðŁĴĹ ðŁĴĹ +million aires +kk kk +it ar +subscri ptions +remo te +ma fi +hin ton +w cc +ho k +ds b +ab leton +sevent y +pun ks +e indhoven +sh one +mcfar lane +lim popo +empha si +à ¼ +sin fo +pe tre +man grove +ch ino +ber tie +play lists +push awards +p af +deb bie +c do +r ino +ðŁı¾ âĢįâĻĤï¸ı +fol ke +bon nar +th ine +sl an +hal ter +evi e +aw some +vul tures +spar ky +seiz ures +âľ Ķ +ram one +ine ffe +al n +pro ctor +ast ra +the voice +gro te +sci on +dead line +am aya +tain ted +patter ned +exce eding +cross fit +kay lee +drop box +ru shes +tack led +mo by +retro gamer +n cbd +benef itting +shay kh +guild hall +gen try +dream cast +dread ed +bun dled +th aw +revol ving +n pt +kylie jenner +imagin ative +ron i +over came +family time +ds burg +car naval +relation ship +recogni zable +cor oner +ho le +fan fic +emir ates +bur ritos +analy se +thin ner +ne es +galli poli +bl r +cat woman +-- >> +au lt +ada ily +nau ghty +ili o +solit aire +mtv br +jocel yn +arun ach +rep ent +south gate +hy acin +essenti al +fent on +and um +it or +go pal +sl inger +po sei +aw il +wi elding +ra ila +eli as +a sto +à ¤ +tend ency +str ata +ker t +< - +im acele +da es +sti mulus +han ley +fit nes +ec stasy +lim ous +ha iling +ðŁ¤ Ń +chis wick +tar ies +sla v +pul i +moderni zation +black mail +b ingham +h fx ++ + +ðŁĩ®ðŁĩ ³ +ni v +we a +profess or +k off +bol ster +su ave +sequ ences +pepper oni +not te +dre n +ãģ¨ ç¹ĭãģ +hs v +o ga +ap tly +z ad +excel si +rin ka +mol dova +min n +ma bel +conferen cing +bas ing +of er +ob si +hamill himself +care less +brief ed +inhe rent +par ish +dub nation +town sville +sar awak +gee ky +doncaster isgreat +was abi +gu p +phen o +dra inthe +carrie underwood +ble eds +bbc world +ane w +alta f +dul wich +ani ston +w ti +sumat ra +gra fton +bl n +me ster +bode ga +re go +es q +an jo +sump tuous +mai sie +ï¿ ½ +wil t +jak ob +el vis +se pul +mu ster +air pollution +president e +happy monday +exten sively +fl ondon +t ls +play ing +pe ed +din ho +var dy +pi ka +n iro +au cus +ðŁį ¦ +nu ll +el ondon +juvent us +imag ines +dis ab +lit o +d ura +work places +promo te +mc caf +wood work +waw x +à® ª +tt ino +shar i +sem per +better together +ðŁijĬ ðŁı» +ze bra +pon dering +en chil +ho m +cosm ic +tan z +mo cked +ec cc +ath ed +abo lish +prop eller +paris agreement +assemb lies +indu stry +fraudul ent +pe sa +chang min +ax x +ðŁĴ µ +irr ational +cu sa +ramad han +octa via +on elove +jac ki +bar ak +taxi der +seri ous +nathan fillion +mc en +ch k +po part +grav ity +copp ola +reading fc +illu sions +j ig +ww x +re sh +ex porting +buzz ard +âĻ ¤ +p cm +lan apar +ko s +arom as +antal ya +ww dc +ven a +phil a +ball in +ðŁij Ħ +quin ta +ma o +f ery +eigh ty +sentim ents +safe guarding +r wa +pu ffs +luc ille +de cath +sl u +nu gent +de ter +braz il +ze iss +super bowl +subsi dy +alter n +hi dalgo +enz ymes +ä ½ +tag ne +hair dresser +adri en +walk out +oppo ses +can tina +bed side +af an +ðŁĶ Ĺ +prophe tic +dan es +un successful +super charged +pk k +exem ption +hart le +secu lar +cli pping +br s +united way +c net +pat chy +ha gan +e en +âļ ľ +var a +sym pathi +never trump +affir mation +om f +ny cfc +ma ja +sur ro +keer th +up scale +sandal wood +mon archy +kno bs +å ĭ +po tholes +hunger games +ter races +na sir +coun sell +welcome to +wa q +se aman +m ita +stun ningly +on theroad +in ability +) !! +bon go +ant v +sp ut +worldenviron mentday +resu sc +y td +fi m +eun hyuk +sa chin +rose anne +cler mont +ape c +am ina +v ening +n antes +al most +sin us +ex as +ty l +ti en +ple ad +lanc s +bur naby +re k +jo om +observ ers +disco graphy +cl g +âĻ ¦ +sn ack +r ti +o ily +crystal li +bru te +web development +topp ings +la f +an is +ad der +reli ving +car lin +battle of +we g +syri an +pon t +n dc +lagh ate +yu ma +sp p +p iti +ro bbing +mart ing +rey kja +raj put +nc ds +kie wicz +âĢ¢ âĢ¢ +vam pire +substan tially +opio ids +nepal i +k line +ar oo +under stand +lit t +u it +thro mbo +sar ies +qu ot +b alling +t tr +s gh +philip p +br ant +ac l +m ello +whit taker +. ; +defi ant +b gc +repl ying +mir ren +metamor pho +sch wab +bul ge +utili zed +pick ering +par don +d sa +à¸ Ī +doo ley +cumul ative +Ð » +ur gency +e mir ++ /- +¦ Ī +ot as +âı ³ +station ed +grape vine +ar ac +karan johar +f ancy +sau l +coo gs +lgbt q +ا٠ħ +jav i +u mmer +pl l +den is +dai pur +pu ffin +lewi sham +fand om +co pe +ves matter +s ve +hel pless +deo dor +ostr ich +kaz an +friday the +con dor +v x +sophom ores +rob les +cu tt +cli mbers +ë¦ ¬ +sle g +sn f +mac ys +hydr ating +grou pe +po yn +mou lin +hg tv +lmfa ooo +sulph ur +asdfghj kl +annab elle +hump back +bra ved +viswas am +multi purpose +hu midi +escor ted +barb ican +f ad +cor sa +ðŁ¤ « +pi ppa +here to +can y +ser gi +or cas +o vie +ed ou +s any +glob alization +man cini +food truck +f is +defi brill +sch re +sma fia +love wins +la ut +k aka +hol lande +game on +resurg ence +out side +olympi ad +int an +abstr action +rapi d +pal om +cal le +jas min +attack ers +swag g +mit ra +ky lo +à® ² +her mitage +gor do +e ira +so sfam +roll out +exc ite +sy nod +mer rill +c als +as sa +liveli hoods +ju ve +the black +gopack go +ant lers +alban ian +wool ly +qu iche +puri fication +are th +smar thome +ne k +all blacks +mex icans +is m +ger ms +comple xion +mar ck +u shi +ðŁIJ IJ +char l +ca stic +till erson +giuli ani +biode gradable +mal bec +bo is +ju bil +im es +r ame +gene tic +esp nu +ch ley +so ho +go pher +g sc +buu ren +cu be +bridesma ids +webin ars +to e +mani pur +viol ently +notic ias +ex changing +chi ev +replac eable +muay thai +bu ss +sp il +instal ment +div ya +cait lin +o lim +fil tering +whirl wind +sta red +prior it +pr am +pompe ii +mono logue +k ite +bu ka +â̦ .. +vac cine +bre ro +woz ni +sol ent +re ferr +my rt +gridi ron +galatasar ay +fro ze +clare mont +ðŁ¥ ĥ +victori as +ssel dorf +pa stures +net neutrality +ch or +ðŁij ģ +ಠ¿ +we ho +symp tom +jo sel +in ous +dragon con +power ball +p te +four thofjuly +ec la +ear buds +where abouts +salt life +depriv ation +ch ter +wi ggle +syste m +ps st +ch az +d any +ri mo +oax aca +lanapar rilla +barcel on +melanch oly +way back +ho tro +n si +l illy +kur o +ja han +intellec t +board game +ðŁı Ĭ +sneak peek +k prc +jail s +cand el +zan zi +mor timer +star ch +ra gs +p fa +long live +k art +gir ona +cro cker +christop h +precau tions +war ship +per m +paren t +van gogh +gif ford +allegh eny +ra yn +ut m +sten cil +rec alling +pen ney +z azzle +ìĥ Ŀ +hin ds +aren as +nu ev +law ler +gu in +do this +ðŁij ķ +ì¶ķ íķĺ +we g +ti b +ri din +complex es +turbul ent +pe sos +de marcus +vall arta +sam sun +kis ses +hein rich +deport es +wil ms +ur d +then ext +inki gayo +ho wi +fir sts +carri age +clean liness +mas war +is ch +ax el +si zzle +road house +fr ans +ent ourage +co bble +boo th +benedic t +tal on +fc u +year ofthe +ray on +raider nation +fo yle +ko val +pi anos +l pg +bur mese +man ure +geo caching +cosc ino +b np +fer ra +stro phy +mar ais +ce es +legen dof +kat niss +eno ch +av ed +you know +d prk +ðŁĺ¢ ðŁĺ¢ +sp un +pro st +sor rows +cent red +ke a +gal icia +? ðŁ¤Ķ +ÑĢод а +bou chard +ðŁĴĻ ðŁĴľ +yu i +seed lings +jon ah +reco vers +ny rd +board room +su ma +my japs +tun g +sha i +ir gc +eli o +wag ons +ka shi +polic emen +john nie +ale coscino +shop ify +dot ted +de tri +va w +to fficial +in your +chal mers +trac ed +no vi +by es +ari el +nipp on +la pel +gri ez +b gs +fool ing +d ita +vijay sethu +nm wx +as ot +kr anti +hel m +ve di +sic kest +mo chi +k abo +shru bs +he red +b sp +sq m +ham r +dul kar +anth a +nr f +avoid ance +at en +publi x +be arers +nas i +ha p +h ells +ðŁĸ ¥ +ภ· +thelast jedi +oh wx +ðŁį « +wa hoo +there se +rec aps +ss nhq +bird photography +v ay +pet ti +pau lo +bel vedere +( * +gr l +du vet +c pec +sa it +por sch +meas urable +avi ators +fre mantle +bre en +on om +me and +life saving +eu ref +en don +embar as +aira sia +el is +dun kin +star magic +s ill +porto bello +ki efer +ex e +mu ted +ãģ ¦ +we thepeople +logi a +liber al +theforce awakens +min ed +haun ts +freck les +care taker +s india +âķ IJ +dev lin +list on +direction er +oh n +fi garo +em manuel +du bois +cl ones +bru ise +ðŁİĪ ðŁİī +disin fe +der matology +as r +s watch +dis comfort +tam anna +pi day +mack en +k atic +delu sional +shaw nee +gu d +al bino +p ali +din gh +cucu mbers +coffe y +anticip ating +treas ured +web summit +shel tered +sav or +pedago gy +m gs +sh ma +s bu +den ali +cam pos +bubble gum +o ir +le aps +y ler +r one +sansk rit +min t +meat less +futuri st +du de +a vel +prote sted +squ ire +z aki +sz n +har court +cycl one +bour dain +gather ings +d ant +advent urer +parag on +alt man +dd ing +ban erjee +snorkel ing +mother well +mis sy +en der +glo ws +ki wis +chick pea +por o +e fron +app t +u y +speci fied +gab by +e strada +com bos +bour bon +vin i +var un +steph ani +key words +car vings +amit abh +wr ought +tw al +re els +clu bbing +ubi quit +cri t +ambed kar +æ Ļ +prun ing +vaccin ated +boe ing +s ks +lo ona +hypno sis +edel man +pho l +he w +colo sse +mckin sey +u on +to te +sacrific ing +ox i +n ang +e mu +пÑĢи ÑĢода +m th +kers wednesday +argu ed +timel apse +ris king +regul ating +ni gh +likeli hood +cu bic +au ction +rein for +pi stor +no ses +ye l +snu ggles +pe i +jean ette +ta ku +ri th +guy z +ภŀ +y te +ver ted +pay soff +jau regui +hoo ligans +procedu ral +mi b +har dy +el eng +chec kers +all ine +the met +prou dof +keerth yofficial +collabor ator +ni u +infl icted +adv ani +re twee +memor iam +f icial +ti ghter +sal em +re viewers +br ics +ben digo +am ell +tur kish +sush maswar +paul son +pal awan +mol lie +stitch er +s burgh +ir u +hay dn +en ers +aro a +u zzi +saraj evo +hel a +apol lo +nine ty +vac a +sp on +vent u +jel ena +hei fer +avo ids +sp ine +pri ze +mar ist +re creating +me de +woo den +find lay +ro fl +n di +compreh end +yu go +y ü +to work +u fos +son ar +pi ston +recor ding +tent ative +art forsale +pel lets +fre do +ÙĪ Ø± +mu ses +custom ization +pro found +is ner +ide ally +si am +plan kton +cm dr +man ger +fran ken +customiz able +ठ® +walk away +swi vel +vast ly +no ton +lex a +ex moor +z as +tan te +reduc tions +lol ly +hip sters +benef ited +ë ² +ww www +mascul ine +fi ji +dre y +ph ill +ane ous +nic ol +men dez +disapp ro +ch ner +through s +shen mue +east man +ðŁIJ İ +yu ck +under tale +re ys +go beavs +eng en +c na +mer r +bir k +ãģ¨ç¹ĭãģ ĮãĤĬãģŁãģĦ +âĥ£ @ +yn na +ste ed +offen der +at um +vani shing +presi denti +love them +g nocchi +fri ggin +per il +mad hya +ag ne +dee jay +mar nock +m tb +fold able +@ ___ +stand re +bron x +bow ski +fin ite +cro ckett +b sf +ge tit +seren awilliams +mir o +ignati us +sla y +rin se +fon due +sel dom +s more +gan i +dy ce +dmit ry +cru mb +late post +pri mark +oh ana +flor als +do a +remembrance day +d ds +azi one +toon ami +air port +æĿ ± +th ad +fi st +dine sh +dr who +ad words +admi rer +pro je +kyrgy z +à « +manife station +le wan +j ic +thi bau +le ased +van ity +nouri shed +never theless +aug mente +fu elled +che ad +wil shere +ru di +p z +my co +mor ro +herbali fe +hardro ck +de man +dre ality +sp ades +ce vic +bha i +bar on +ultimat efan +hou news +to bi +stru t +ke el +affili ation +the masters +sm al +hu e +este ban +con v +om nic +datab ases +co v +ter ti +st g +snoop dogg +metab ol +leth bridge +ðŁı» âĢįâĻĢï¸ı +year ling +residente vil +nws l +iy aki +griez mann +c ous +ðŁĵĿ : +tor ian +sam i +ðŁĶ¥ðŁĶ¥ ðŁĶ¥ðŁĶ¥ðŁĶ¥ +g are +alli ances +whit field +we ther +refin ing +coy i +kra ken +ðŁĺĺ âĿ¤ +singul arity +lil i +h ns +bol dand +waw rinka +misogy ny +lo vers +c q +b dg +ad ona +gar ter +women of +sc d +recogn ising +mun a +str ou +sign alling +lare do +hell boy +alek sand +un available +pedi atric +as in +mer ia +ri shi +futuri sm +w ye +polari zed +e we +pro pel +in forms +cre ase +~ " +arti ston +like for +heidel berg +er ra +life in +len ny +inter rupt +cohe rent +ca z +vick ers +le veled +f bs +cab ins +bu mmed +apost les +we h +ten don +souven irs +infu ri +pier ce +asse t +m las +go th +di ggin +ann as +yl or +th waite +sw el +pan era +mur derers +croo ked +bs go +ac u +a on +re an +one of +ko hl +bloo dh +pest icide +lost dog +fle xing +ëĤ ĺ +su pra +eter nally +ðŁļ Ļ +pa olo +ol an +mom o +is elle +captain marvel +s lou +mistak enly +akhi lesh +mer t +il inan +bu on +bal kan +mir ro +mill en +der ail +dam on +tit i +bi os +re don +pic ard +par te +ðŁ¤ Ł +Ø º +son ics +fir sth +dd c +veg ans +tur ban +ni gan +lot tie +lyn don +star buck +pink floyd +life styles +am ara +a she +r sc +val a +sm er +cw gc +cli ent +buen as +jag an +coo ps +ðŁijij ðŁijij +speci alizes +snag ged +g lar +ben net +wildlife wednesday +bow den +pi k +art in +empor ium +ar l +re ba +pas ser +disappo ints +additi ve +âľĬ ðŁı½ +bay er +missou la +ha skell +comm ences +ni x +ne man +explo ited +plastic surgery +cc d +aso cial +vo t +sie gel +fro ome +kap am +far a +e ha +pro bes +mw f +meet ing +p bb +ak ins +mistle toe +kingdom hearts +for kids +ec r +bal e +escor ts +adidas originals +k wa +k ts +hallo ffame +ðŁĺį . +wag s +pot ted +o wing +honey comb +he fty +uro logy +mer le +b pd +stri pping +re ich +k state +gu ay +yon ge +shak ti +g loom +bat t +son om +n ery +el ba +blan ks +hel le +triple ts +bom bay +ak arta +ab ia +transm itted +rol f +ja is +angular js +fi erc +m ss +trac e +ॠĩ +tom bs +old man +kom bucha +fo l +e health +cere als +are lli +in ari +ðŁĴ © +wo l +liber ties +fa wn +af firm +nun avut +hyster ical +k drama +art es +âĢ¢âĢ¢âĢ¢âĢ¢ âĢ¢âĢ¢âĢ¢âĢ¢ +valent in +man slaughter +gal es +eo in +energi zed +del s +with draws +st les +sar castic +ram esh +incredi bles +lock hart +ya wn +ultimatefan live +oooooooo oooooooo +mu en +guru dev +te er +pe eling +new snow +lingui stics +direc tv +ag end +uni lever +ru ger +han dedly +ero se +li mel +the c +royal ties +fini shers +nr g +m gt +fid get +com ps +bac on +aggre ssively +ab it +ch â +tar de +slu gger +q anda +gre ening +d ats +ensla ved +spec tor +o ye +fre ef +b hand +stop brexit +mis conceptions +cav a +ðŁĺįðŁĺįðŁĺįðŁĺį ðŁĺįðŁĺįðŁĺįðŁĺį +multit asking +hou sel +ferre ira +cen time +ank les +jo dh +hel ly +fro me +out tuesday +nar nia +bal aji +l bloggers +jyo ti +ðŁį ĩ +lan cia +cap ri +y ap +nat ash +down fall +." âĢĶ +à ® +ligam ent +coat ings +ai ded +hi ko +fall ing +encryp ted +yeg food +infringe ment +cu di +ce p +ðŁĺį ðŁĺĤ +tra d +super rugby +ed win +wh iche +vi meo +lay ne +in vigor +he he +dubrov nik +bie ber +u tr +sham an +op ers +ham ill +en ig +di f +ar um +scrap book +min h +diver gence +mckin non +life time +guter res +wil le +ple as +patt y +mic ron +k z +dom aine +ru sher +m ds +ches ney +screw driver +âģ© , +sle dge +hau er +chan a +stam ina +sprink ler +pl n +he ff +bol ton +om on +car rington +accor dion +jor ge +inter ception +in puts +gu ll +tran scription +vanu atu +it ical +eth os +tic h +spac ey +pee king +u mi +ha ger +psycho tic +illi an +illi a +bonnar oo +an ese +pu c +laghate parth +en hall +econom ical +dre dge +% - +u we +tu bular +scoun cil +pe asants +fl er +tumb ler +he p +ford ham +row ley +initi als +ev asion +er nation +plu gins +coch ran +c attle +acid ity +ðŁİĬ ðŁİī +re grann +jump man +ef ace +x ma +patri archy +esco bar +cristi an +tip ton +nu eva +hack ney +back seat +kill arney +aid an +sta dion +simul taneous +ida ho +a je +u th +figu re +clo s +bur k +volun tar +rec ite +macfar lane +cur few +bou do +w gn +sti x +sla p +scrat ched +philli p +jour ne +ex pelled +wa z +u ke +tati ana +ou e +ho pp +dimit ri +ðŁĵ £ +mato logist +electri fying +blu ffs +bill smafia +az cardinals +y aa +x mas +shar a +r ith +g ills +dre s +bar ton +authori zation +imperi alism +home of +to do +foot path +band width +visit spain +moh sin +erup ted +mi ki +insig nia +mike l +ss h +ger a +bank holiday +aw an +t weak +star craft +e al +construc tion +skelet ons +le ep +ine m +bar clay +ship wreck +monsi eur +yo h +ron t +form ative +ser o +le p +horse man +hoo sier +haz mat +cylin ders +cen ti +ðŁĴ¥ðŁĴ¥ ðŁĴ¥ +re em +na ire +mus ically +gras shopper +est onian +termin ology +ro main +blogger rt +tox in +stan ce +cultiv ated +an ast +ðŁIJ į +shi mano +go pher +ene i +recycla ble +gam ification +fight for +c q +avoc ados +ke ys +eli ke +gly cer +shak ur +mobili zation +gal ley +expla in +ex changed +pe th +obe dience +illa ge +en nis +ãĥ ŀ +wi v +walla bies +ma ar +ig ers +fin tech +fin alized +wo j +meaning less +in field +onna ise +e et +bron te +pass ages +ðŁij § +strick land +northern lights +lom ond +h tc +wr ay +shi fter +di alog +ðŁį į +>> >>>> +te atime +ste ch +sic huan +qu ill +fran ca +comple mentary +bar rington +marcu s +mal am +goo oo +for sa +elec tra +af s +âĹ Ĩ +tri fe +sn azzy +fo lia +and olan +after dark +wood son +stra de +litt lest +o gun +con wy +co wards +ðŁĺĤðŁĺĤðŁĺĤðŁĺĤ ðŁĺĤðŁĺĤðŁĺĤ +íĬ ¸ +se ul +mur phy +dun ks +kapil shar +jo achim +wom ack +equal ity +aver ages +a ine +ðŁ¦ Ī +tac ular +dis ability +u ked +mid century +bar thol +teas ers +tab ern +nj caa +sp out +op i +ku bball +bl om +so ar +popu lism +meth yl +ðŁijĬ ðŁı¼ +o spre +alo ils +ðŁĵ ĸ +ðŁĮ ļ +x er +sp illing +publ ica +car dam +adi sh +sa cha +p kg +bu da +lyric ist +i bc +gru mp +ho ver +hal ep +anti body +anem one +âĻ¥âĻ¥ âĻ¥âĻ¥ +m cl +litho graph +cc u +s fest +path ic +calli ster +otta wa +gun sn +rut ger +hali but +en vision +differenti ate +ðŁļĢ ðŁļĢ +pir an +lat el +uc n +trou bad +ra ine +fierc ely +learn english +lea se +wex mondays +em it +dray ton +bur rell +scuba diving +hol ler +dr u +clo cked +w ral +ap ro +trans lucent +w bo +patri arch +mo ja +lan nister +fish ery +ne derland +mil dly +mi rai +ma ko +ja p +ðŁĺ©ðŁĺ© ðŁĺ© +pro statec +p anna +ar ama +under taking +tomp kins +ne op +soli ds +sav oury +e ames +cut lery +wood bridge +steam er +ri zzo +wild cat +rat na +lamin ated +kin eni +jal ap +ai des +acknowle dges +?! ?!?! +! ðŁİī +w afc +mag gio +ha ves +dar je +of i +gr il +v asi +bru x +mo hd +fake speare +arn old +r mb +for be +wal leye +ro di +therapeu tics +strate gi +ob ste +mu dder +download able +dd ings +d ca +asi angames +campe on +appropri ation +th century +ram atta +dra ped +bul lion +mu c +one x +se greg +ophel ia +bod ily +âĿ¤ ðŁĺį +wi zar +te ased +ade my +to id +sur a +lazar us +sn ickers +ma se +lo h +bow ed +bibli o +x change +har lan +gho shal +flavor ful +bha gat +alle z +whiche ver +ten stein +disc er +organ iser +mt g +dream liner +t se +hok kaido +mo k +indulg ent +hick man +blin ded +al yn +aaa ah +sp ool +lough borough +inter pret +et v +aristo tle +optimi zing +avici i +madu rai +ju li +naw az +mat chups +ab ide +paint ing +w elling +vel i +octag on +in scribed +po king +plac er +life cycle +kili g +g sp +eli ves +cle ments +na sheed +me sut +incarcer ated +dist illed +wal ang +delic acy +del gado +che z +ch ita +ad ero +tu x +pati l +o do +abh cosmetics +tv c +p bc +in accurate +hardwork paysoff +ball er +quot ation +merchandi sing +ga stri +defen ses +dro gba +bex hill +ban kno +win ona +si eg +p gs +hahah ha +agu chi +su bram +mirac le +de sch +li bre +ba cher +ent ine +bbcra di +lou dest +r ps +pi erc +fr yer +storm trooper +rafael nadal +pas co +exhau stion +epic onetsy +rc tid +kel lie +ga ines +d bz +sm riti +s bridge +lim ited +cla w +technic al +bio graphical +ado red +ภ° +exclu de +ac adia +key boards +fur man +so ca +sur u +ni ps +sw aps +server less +run e +pu ffy +north ampton +nish ings +hen der +cartri dges +gun shot +ðŁĵ ¹ +fil ament +respon dents +pey ton +mountaine er +mer ging +life span +intimid ation +p afc +nl wx +expan sive +pur r +f ck +ca e +at ti +tele thon +so hn +mend el +lo pes +dor i +un broken +te red +tast ings +in active +disin tegr +t assel +share the +pi ano +is lay +air space +z awa +ricci ardo +ming ton +fresh er +cur ry +re vs +pharo ah +h mv +exhilar ating +wh oo +lin kin +kri spy +competen cy +ste wards +ne bu +kat su +ad mins +baz ar +as ar +giving back +s summit +song z +lin us +raj kumar +farm ington +fanta sia +ðŁĺ´ ðŁĺ´ +so bri +lis se +barry more +pri sm +blo b +sen ew +mono xide +exp ire +eigh teen +di pper +xi ao +kil t +hin ch +bbc sport +bam boo +p ter +ex al +ðŁ¦ ĭ +ham lin +expe ditions +star gazing +food security +wy lie +ul f +st ingly +on storm +lo eb +bro ome +bn ha +pancre atic +eli ve +!!!!!!!! !!! +ther apper +ortho pedic +avengers endgame +antit rust +ìļ ° +go te +om d +off side +gy llen +win eries +white water +ad l +lu pita +exce eds +consi sted +chew bacca +ash leigh +nhl jets +is san +sh ld +hay at +cran berries +ð٤ĺ ðŁı½ +rock the +spring training +fall out +dairy free +wa j +un decided +so wn +rc n +north wales +htt r +fu mble +d its +comp elled +popu list +min ted +blan chett +. '' +pro pulsion +m illa +au berg +her tz +h ta +u daipur +serendip ity +azte cs +als ace +ðŁIJ ij +lu n +sho es +char li +gar za +ðŁĴ Ł +pro biotics +fox tv +ol is +mi ff +loc alized +diffu ser +si gue +fun ko +rend ous +ðŁĴ ij +jeky ll diff --git a/ComfyUI-SUPIR/configs/tokenizer/preprocessor_config.json b/ComfyUI-SUPIR/configs/tokenizer/preprocessor_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5a12a1eb250987a4eee0e3e7d7338c4b22724be1 --- /dev/null +++ b/ComfyUI-SUPIR/configs/tokenizer/preprocessor_config.json @@ -0,0 +1,19 @@ +{ + "crop_size": 224, + "do_center_crop": true, + "do_normalize": true, + "do_resize": true, + "feature_extractor_type": "CLIPFeatureExtractor", + "image_mean": [ + 0.48145466, + 0.4578275, + 0.40821073 + ], + "image_std": [ + 0.26862954, + 0.26130258, + 0.27577711 + ], + "resample": 3, + "size": 224 +} diff --git a/ComfyUI-SUPIR/configs/tokenizer/special_tokens_map.json b/ComfyUI-SUPIR/configs/tokenizer/special_tokens_map.json new file mode 100644 index 0000000000000000000000000000000000000000..9bfb42aa97dcd61e89f279ccaee988bccb4fabae --- /dev/null +++ b/ComfyUI-SUPIR/configs/tokenizer/special_tokens_map.json @@ -0,0 +1 @@ +{"bos_token": {"content": "<|startoftext|>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, "eos_token": {"content": "<|endoftext|>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, "unk_token": {"content": "<|endoftext|>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, "pad_token": "<|endoftext|>"} \ No newline at end of file diff --git a/ComfyUI-SUPIR/configs/tokenizer/tokenizer.json b/ComfyUI-SUPIR/configs/tokenizer/tokenizer.json new file mode 100644 index 0000000000000000000000000000000000000000..580c79c6862f31d1f9bd08dd1a415ba0d0502cd9 --- /dev/null +++ b/ComfyUI-SUPIR/configs/tokenizer/tokenizer.json @@ -0,0 +1,98393 @@ +{ + "version": "1.0", + "truncation": null, + "padding": null, + "added_tokens": [ + { + "id": 49406, + "content": "<|startoftext|>", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": true, + "special": true + }, + { + "id": 49407, + "content": "<|endoftext|>", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + } + ], + "normalizer": { + "type": "Sequence", + "normalizers": [ + { + "type": "NFC" + }, + { + "type": "Replace", + "pattern": { + "Regex": "\\s+" + }, + "content": " " + }, + { + "type": "Lowercase" + } + ] + }, + "pre_tokenizer": { + "type": "Sequence", + "pretokenizers": [ + { + "type": "Split", + "pattern": { + "Regex": "'s|'t|'re|'ve|'m|'ll|'d|[\\p{L}]+|[\\p{N}]|[^\\s\\p{L}\\p{N}]+" + }, + "behavior": "Removed", + "invert": true + }, + { + "type": "ByteLevel", + "add_prefix_space": false, + "trim_offsets": true + } + ] + }, + "post_processor": { + "type": "RobertaProcessing", + "sep": [ + "<|endoftext|>", + 49407 + ], + "cls": [ + "<|startoftext|>", + 49406 + ], + "trim_offsets": false, + "add_prefix_space": false + }, + "decoder": { + "type": "ByteLevel", + "add_prefix_space": true, + "trim_offsets": true + }, + "model": { + "type": "BPE", + "dropout": null, + "unk_token": "<|endoftext|>", + "continuing_subword_prefix": "", + "end_of_word_suffix": "", + "fuse_unk": false, + "vocab": { + "!": 0, + "\"": 1, + "#": 2, + "$": 3, + "%": 4, + "&": 5, + "'": 6, + "(": 7, + ")": 8, + "*": 9, + "+": 10, + ",": 11, + "-": 12, + ".": 13, + "/": 14, + "0": 15, + "1": 16, + "2": 17, + "3": 18, + "4": 19, + "5": 20, + "6": 21, + "7": 22, + "8": 23, + "9": 24, + ":": 25, + ";": 26, + "<": 27, + "=": 28, + ">": 29, + "?": 30, + "@": 31, + "A": 32, + "B": 33, + "C": 34, + "D": 35, + "E": 36, + "F": 37, + "G": 38, + "H": 39, + "I": 40, + "J": 41, + "K": 42, + "L": 43, + "M": 44, + "N": 45, + "O": 46, + "P": 47, + "Q": 48, + "R": 49, + "S": 50, + "T": 51, + "U": 52, + "V": 53, + "W": 54, + "X": 55, + "Y": 56, + "Z": 57, + "[": 58, + "\\": 59, + "]": 60, + "^": 61, + "_": 62, + "`": 63, + "a": 64, + "b": 65, + "c": 66, + "d": 67, + "e": 68, + "f": 69, + "g": 70, + "h": 71, + "i": 72, + "j": 73, + "k": 74, + "l": 75, + "m": 76, + "n": 77, + "o": 78, + "p": 79, + "q": 80, + "r": 81, + "s": 82, + "t": 83, + "u": 84, + "v": 85, + "w": 86, + "x": 87, + "y": 88, + "z": 89, + "{": 90, + "|": 91, + "}": 92, + "~": 93, + "¡": 94, + "¢": 95, + "£": 96, + "¤": 97, + "¥": 98, + "¦": 99, + "§": 100, + "¨": 101, + "©": 102, + "ª": 103, + "«": 104, + "¬": 105, + "®": 106, + "¯": 107, + "°": 108, + "±": 109, + "²": 110, + "³": 111, + "´": 112, + "µ": 113, + "¶": 114, + "·": 115, + "¸": 116, + "¹": 117, + "º": 118, + "»": 119, + "¼": 120, + "½": 121, + "¾": 122, + "¿": 123, + "À": 124, + "Á": 125, + "Â": 126, + "Ã": 127, + "Ä": 128, + "Å": 129, + "Æ": 130, + "Ç": 131, + "È": 132, + "É": 133, + "Ê": 134, + "Ë": 135, + "Ì": 136, + "Í": 137, + "Î": 138, + "Ï": 139, + "Ð": 140, + "Ñ": 141, + "Ò": 142, + "Ó": 143, + "Ô": 144, + "Õ": 145, + "Ö": 146, + "×": 147, + "Ø": 148, + "Ù": 149, + "Ú": 150, + "Û": 151, + "Ü": 152, + "Ý": 153, + "Þ": 154, + "ß": 155, + "à": 156, + "á": 157, + "â": 158, + "ã": 159, + "ä": 160, + "å": 161, + "æ": 162, + "ç": 163, + "è": 164, + "é": 165, + "ê": 166, + "ë": 167, + "ì": 168, + "í": 169, + "î": 170, + "ï": 171, + "ð": 172, + "ñ": 173, + "ò": 174, + "ó": 175, + "ô": 176, + "õ": 177, + "ö": 178, + "÷": 179, + "ø": 180, + "ù": 181, + "ú": 182, + "û": 183, + "ü": 184, + "ý": 185, + "þ": 186, + "ÿ": 187, + "Ā": 188, + "ā": 189, + "Ă": 190, + "ă": 191, + "Ą": 192, + "ą": 193, + "Ć": 194, + "ć": 195, + "Ĉ": 196, + "ĉ": 197, + "Ċ": 198, + "ċ": 199, + "Č": 200, + "č": 201, + "Ď": 202, + "ď": 203, + "Đ": 204, + "đ": 205, + "Ē": 206, + "ē": 207, + "Ĕ": 208, + "ĕ": 209, + "Ė": 210, + "ė": 211, + "Ę": 212, + "ę": 213, + "Ě": 214, + "ě": 215, + "Ĝ": 216, + "ĝ": 217, + "Ğ": 218, + "ğ": 219, + "Ġ": 220, + "ġ": 221, + "Ģ": 222, + "ģ": 223, + "Ĥ": 224, + "ĥ": 225, + "Ħ": 226, + "ħ": 227, + "Ĩ": 228, + "ĩ": 229, + "Ī": 230, + "ī": 231, + "Ĭ": 232, + "ĭ": 233, + "Į": 234, + "į": 235, + "İ": 236, + "ı": 237, + "IJ": 238, + "ij": 239, + "Ĵ": 240, + "ĵ": 241, + "Ķ": 242, + "ķ": 243, + "ĸ": 244, + "Ĺ": 245, + "ĺ": 246, + "Ļ": 247, + "ļ": 248, + "Ľ": 249, + "ľ": 250, + "Ŀ": 251, + "ŀ": 252, + "Ł": 253, + "ł": 254, + "Ń": 255, + "!": 256, + "\"": 257, + "#": 258, + "$": 259, + "%": 260, + "&": 261, + "'": 262, + "(": 263, + ")": 264, + "*": 265, + "+": 266, + ",": 267, + "-": 268, + ".": 269, + "/": 270, + "0": 271, + "1": 272, + "2": 273, + "3": 274, + "4": 275, + "5": 276, + "6": 277, + "7": 278, + "8": 279, + "9": 280, + ":": 281, + ";": 282, + "<": 283, + "=": 284, + ">": 285, + "?": 286, + "@": 287, + "A": 288, + "B": 289, + "C": 290, + "D": 291, + "E": 292, + "F": 293, + "G": 294, + "H": 295, + "I": 296, + "J": 297, + "K": 298, + "L": 299, + "M": 300, + "N": 301, + "O": 302, + "P": 303, + "Q": 304, + "R": 305, + "S": 306, + "T": 307, + "U": 308, + "V": 309, + "W": 310, + "X": 311, + "Y": 312, + "Z": 313, + "[": 314, + "\\": 315, + "]": 316, + "^": 317, + "_": 318, + "`": 319, + "a": 320, + "b": 321, + "c": 322, + "d": 323, + "e": 324, + "f": 325, + "g": 326, + "h": 327, + "i": 328, + "j": 329, + "k": 330, + "l": 331, + "m": 332, + "n": 333, + "o": 334, + "p": 335, + "q": 336, + "r": 337, + "s": 338, + "t": 339, + "u": 340, + "v": 341, + "w": 342, + "x": 343, + "y": 344, + "z": 345, + "{": 346, + "|": 347, + "}": 348, + "~": 349, + "¡": 350, + "¢": 351, + "£": 352, + "¤": 353, + "¥": 354, + "¦": 355, + "§": 356, + "¨": 357, + "©": 358, + "ª": 359, + "«": 360, + "¬": 361, + "®": 362, + "¯": 363, + "°": 364, + "±": 365, + "²": 366, + "³": 367, + "´": 368, + "µ": 369, + "¶": 370, + "·": 371, + "¸": 372, + "¹": 373, + "º": 374, + "»": 375, + "¼": 376, + "½": 377, + "¾": 378, + "¿": 379, + "À": 380, + "Á": 381, + "Â": 382, + "Ã": 383, + "Ä": 384, + "Å": 385, + "Æ": 386, + "Ç": 387, + "È": 388, + "É": 389, + "Ê": 390, + "Ë": 391, + "Ì": 392, + "Í": 393, + "Î": 394, + "Ï": 395, + "Ð": 396, + "Ñ": 397, + "Ò": 398, + "Ó": 399, + "Ô": 400, + "Õ": 401, + "Ö": 402, + "×": 403, + "Ø": 404, + "Ù": 405, + "Ú": 406, + "Û": 407, + "Ü": 408, + "Ý": 409, + "Þ": 410, + "ß": 411, + "à": 412, + "á": 413, + "â": 414, + "ã": 415, + "ä": 416, + "å": 417, + "æ": 418, + "ç": 419, + "è": 420, + "é": 421, + "ê": 422, + "ë": 423, + "ì": 424, + "í": 425, + "î": 426, + "ï": 427, + "ð": 428, + "ñ": 429, + "ò": 430, + "ó": 431, + "ô": 432, + "õ": 433, + "ö": 434, + "÷": 435, + "ø": 436, + "ù": 437, + "ú": 438, + "û": 439, + "ü": 440, + "ý": 441, + "þ": 442, + "ÿ": 443, + "Ā": 444, + "ā": 445, + "Ă": 446, + "ă": 447, + "Ą": 448, + "ą": 449, + "Ć": 450, + "ć": 451, + "Ĉ": 452, + "ĉ": 453, + "Ċ": 454, + "ċ": 455, + "Č": 456, + "č": 457, + "Ď": 458, + "ď": 459, + "Đ": 460, + "đ": 461, + "Ē": 462, + "ē": 463, + "Ĕ": 464, + "ĕ": 465, + "Ė": 466, + "ė": 467, + "Ę": 468, + "ę": 469, + "Ě": 470, + "ě": 471, + "Ĝ": 472, + "ĝ": 473, + "Ğ": 474, + "ğ": 475, + "Ġ": 476, + "ġ": 477, + "Ģ": 478, + "ģ": 479, + "Ĥ": 480, + "ĥ": 481, + "Ħ": 482, + "ħ": 483, + "Ĩ": 484, + "ĩ": 485, + "Ī": 486, + "ī": 487, + "Ĭ": 488, + "ĭ": 489, + "Į": 490, + "į": 491, + "İ": 492, + "ı": 493, + "IJ": 494, + "ij": 495, + "Ĵ": 496, + "ĵ": 497, + "Ķ": 498, + "ķ": 499, + "ĸ": 500, + "Ĺ": 501, + "ĺ": 502, + "Ļ": 503, + "ļ": 504, + "Ľ": 505, + "ľ": 506, + "Ŀ": 507, + "ŀ": 508, + "Ł": 509, + "ł": 510, + "Ń": 511, + "in": 512, + "th": 513, + "an": 514, + "re": 515, + "ar": 516, + "er": 517, + "the": 518, + "ing": 519, + "ou": 520, + "on": 521, + "st": 522, + "or": 523, + "en": 524, + "on": 525, + "al": 526, + "at": 527, + "er": 528, + "it": 529, + "in": 530, + "to": 531, + "ro": 532, + "is": 533, + "le": 534, + "ic": 535, + "at": 536, + "and": 537, + "ed": 538, + "of": 539, + "ch": 540, + "or": 541, + "es": 542, + "il": 543, + "el": 544, + "st": 545, + "ac": 546, + "om": 547, + "am": 548, + "lo": 549, + "an": 550, + "ay": 551, + "sh": 552, + "ri": 553, + "li": 554, + "ti": 555, + "for": 556, + "ne": 557, + "ðŁ": 558, + "ra": 559, + "ha": 560, + "de": 561, + "ol": 562, + "ve": 563, + "si": 564, + "ur": 565, + "al": 566, + "se": 567, + "'s": 568, + "un": 569, + "di": 570, + "be": 571, + "la": 572, + "wh": 573, + "oo": 574, + "day": 575, + "en": 576, + "ma": 577, + "no": 578, + "le": 579, + "to": 580, + "our": 581, + "ir": 582, + "gh": 583, + "wit": 584, + "it": 585, + "yo": 586, + "as": 587, + "sp": 588, + "this": 589, + "ts": 590, + "ati": 591, + "you": 592, + "with": 593, + "ad": 594, + "is": 595, + "ab": 596, + "ly": 597, + "we": 598, + "the": 599, + "te": 600, + "as": 601, + "ag": 602, + "vi": 603, + "pp": 604, + "su": 605, + "ho": 606, + "my": 607, + "..": 608, + "bu": 609, + "com": 610, + "se": 611, + "ers": 612, + "me": 613, + "me": 614, + "all": 615, + "con": 616, + "mo": 617, + "ke": 618, + "ge": 619, + "out": 620, + "ent": 621, + "co": 622, + "fe": 623, + "ver": 624, + "ar": 625, + "fro": 626, + "au": 627, + "po": 628, + "ce": 629, + "ght": 630, + "are": 631, + "ss": 632, + "from": 633, + "ch": 634, + "tr": 635, + "oun": 636, + "one": 637, + "by": 638, + "do": 639, + "th": 640, + "wor": 641, + "ere": 642, + "ke": 643, + "pro": 644, + "for": 645, + "ds": 646, + "bo": 647, + "ta": 648, + "we": 649, + "go": 650, + "he": 651, + "ter": 652, + "ing": 653, + "de": 654, + "be": 655, + "ation": 656, + "mor": 657, + "ay": 658, + "ex": 659, + "ill": 660, + "pe": 661, + "ks": 662, + "sc": 663, + "lu": 664, + "fu": 665, + "qu": 666, + "ver": 667, + "ðŁĺ": 668, + "ju": 669, + "mu": 670, + "ate": 671, + "and": 672, + "ve": 673, + "king": 674, + "mar": 675, + "op": 676, + "hi": 677, + "...": 678, + "pre": 679, + "ad": 680, + "ru": 681, + "that": 682, + "jo": 683, + "of": 684, + "ce": 685, + "new": 686, + "am": 687, + "ap": 688, + "gre": 689, + "ss": 690, + "du": 691, + "now": 692, + "ye": 693, + "ting": 694, + "your": 695, + "ity": 696, + "ni": 697, + "ci": 698, + "par": 699, + "gu": 700, + "fi": 701, + "af": 702, + "per": 703, + "ter": 704, + "up": 705, + "so": 706, + "gi": 707, + "ons": 708, + "gr": 709, + "ge": 710, + "br": 711, + "pl": 712, + "'t": 713, + "mi": 714, + "ine": 715, + "wee": 716, + "bi": 717, + "us": 718, + "sho": 719, + "have": 720, + "today": 721, + "av": 722, + "man": 723, + "ent": 724, + "ack": 725, + "ure": 726, + "our": 727, + "âĢ": 728, + "cu": 729, + "ld": 730, + "loo": 731, + "im": 732, + "ice": 733, + "som": 734, + "fin": 735, + "red": 736, + "ren": 737, + "ood": 738, + "was": 739, + "tion": 740, + "pi": 741, + "ir": 742, + "ther": 743, + "ty": 744, + "ph": 745, + "ard": 746, + "ec": 747, + "!!": 748, + "mon": 749, + "more": 750, + "will": 751, + "tra": 752, + "can": 753, + "col": 754, + "pu": 755, + "te": 756, + "wn": 757, + "mb": 758, + "so": 759, + "iti": 760, + "just": 761, + "ning": 762, + "here": 763, + "tu": 764, + "pa": 765, + "pr": 766, + "but": 767, + "what": 768, + "ally": 769, + "fir": 770, + "min": 771, + "ca": 772, + "ant": 773, + "sa": 774, + "ted": 775, + "ev": 776, + "ment": 777, + "fa": 778, + "get": 779, + "ame": 780, + "about": 781, + "gra": 782, + "not": 783, + "happ": 784, + "ays": 785, + "man": 786, + "his": 787, + "time": 788, + "like": 789, + "gh": 790, + "has": 791, + "than": 792, + "love": 793, + "art": 794, + "ste": 795, + "ding": 796, + "he": 797, + "cre": 798, + "ws": 799, + "wat": 800, + "der": 801, + "ite": 802, + "ser": 803, + "ace": 804, + "age": 805, + "end": 806, + "str": 807, + "aw": 808, + "stor": 809, + "re": 810, + "car": 811, + "ell": 812, + "all": 813, + "ps": 814, + "fri": 815, + "pho": 816, + "por": 817, + "do": 818, + "ak": 819, + "wi": 820, + "fre": 821, + "who": 822, + "shi": 823, + "boo": 824, + "son": 825, + "ell": 826, + "when": 827, + "ill": 828, + "how": 829, + "great": 830, + "win": 831, + "el": 832, + "bl": 833, + "ssi": 834, + "ali": 835, + "some": 836, + "ðŁĴ": 837, + "ton": 838, + "der": 839, + "les": 840, + "pla": 841, + "ï¸": 842, + "ed": 843, + "sch": 844, + "hu": 845, + "ong": 846, + "don": 847, + "ki": 848, + "sh": 849, + "ann": 850, + "cor": 851, + "..": 852, + "ound": 853, + "az": 854, + "ine": 855, + "ary": 856, + "ful": 857, + "stu": 858, + "ould": 859, + "sti": 860, + "go": 861, + "see": 862, + "able": 863, + "ars": 864, + "ll": 865, + "mis": 866, + "ber": 867, + "ck": 868, + "wa": 869, + "ents": 870, + "no": 871, + "sig": 872, + "fe": 873, + "first": 874, + "et": 875, + "spe": 876, + "ack": 877, + "if": 878, + "ous": 879, + "'m": 880, + "ster": 881, + "app": 882, + "ang": 883, + "ance": 884, + "ans": 885, + "good": 886, + "bre": 887, + "ever": 888, + "they": 889, + "tic": 890, + "come": 891, + "off": 892, + "back": 893, + "ase": 894, + "ings": 895, + "old": 896, + "ight": 897, + "fo": 898, + "her": 899, + "happy": 900, + "pic": 901, + "its": 902, + "ving": 903, + "us": 904, + "mat": 905, + "hom": 906, + "dy": 907, + "em": 908, + "sk": 909, + "ying": 910, + "their": 911, + "led": 912, + "ry": 913, + "ul": 914, + "har": 915, + "ck": 916, + "ton": 917, + "onal": 918, + "hel": 919, + "ric": 920, + "bir": 921, + "vie": 922, + "way": 923, + "tri": 924, + "da": 925, + "ple": 926, + "bro": 927, + "sto": 928, + "ool": 929, + "night": 930, + "tru": 931, + "ba": 932, + "read": 933, + "res": 934, + "year": 935, + "fr": 936, + "tor": 937, + "als": 938, + "coun": 939, + "cla": 940, + "ture": 941, + "vel": 942, + "ated": 943, + "lec": 944, + "end": 945, + "thing": 946, + "vo": 947, + "ici": 948, + "best": 949, + "can": 950, + "work": 951, + "last": 952, + "after": 953, + "ence": 954, + "pri": 955, + "pe": 956, + "es": 957, + "il": 958, + "â̦": 959, + "dre": 960, + "ys": 961, + "over": 962, + "ies": 963, + "ðŁij": 964, + "comm": 965, + "tw": 966, + "ink": 967, + "sun": 968, + "cl": 969, + "life": 970, + "tt": 971, + "ach": 972, + "land": 973, + "sy": 974, + "tre": 975, + "tal": 976, + "pol": 977, + "sm": 978, + "duc": 979, + "sal": 980, + "ft": 981, + "'re": 982, + "che": 983, + "war": 984, + "tur": 985, + "ations": 986, + "ach": 987, + "ms": 988, + "ile": 989, + "pm": 990, + "ough": 991, + "ate": 992, + "star": 993, + "week": 994, + "!!!": 995, + "clu": 996, + "there": 997, + "ner": 998, + "tom": 999, + "sel": 1000, + "ï¸ı": 1001, + "world": 1002, + "ves": 1003, + "cam": 1004, + "got": 1005, + "inter": 1006, + "off": 1007, + "um": 1008, + "tonight": 1009, + "other": 1010, + "hou": 1011, + "look": 1012, + "je": 1013, + "id": 1014, + "sion": 1015, + "beau": 1016, + "att": 1017, + "eli": 1018, + "ort": 1019, + "rec": 1020, + "ff": 1021, + "ster": 1022, + "supp": 1023, + "gen": 1024, + "been": 1025, + "ily": 1026, + "team": 1027, + "mm": 1028, + "ic": 1029, + "peop": 1030, + "itt": 1031, + "ats": 1032, + "only": 1033, + "mber": 1034, + "eng": 1035, + "bri": 1036, + "mp": 1037, + "know": 1038, + "bur": 1039, + "bar": 1040, + "ins": 1041, + "low": 1042, + "she": 1043, + "row": 1044, + "âĿ": 1045, + "tro": 1046, + "people": 1047, + "via": 1048, + "low": 1049, + "aga": 1050, + "bet": 1051, + "xt": 1052, + "fac": 1053, + "char": 1054, + "ear": 1055, + "wal": 1056, + "sen": 1057, + "fam": 1058, + "ble": 1059, + "nati": 1060, + "ish": 1061, + "nor": 1062, + "game": 1063, + "live": 1064, + "sco": 1065, + "ley": 1066, + "don": 1067, + "ick": 1068, + "ball": 1069, + "very": 1070, + "these": 1071, + "pan": 1072, + "ia": 1073, + "ating": 1074, + "cr": 1075, + "are": 1076, + "gir": 1077, + "make": 1078, + "stre": 1079, + "show": 1080, + ".\"": 1081, + "fl": 1082, + "up": 1083, + "dr": 1084, + "thanks": 1085, + "illi": 1086, + "wom": 1087, + "sts": 1088, + "ig": 1089, + "sur": 1090, + "every": 1091, + "cur": 1092, + "view": 1093, + "let": 1094, + "into": 1095, + "most": 1096, + "na": 1097, + "indi": 1098, + "gar": 1099, + "had": 1100, + "sou": 1101, + "ved": 1102, + "ant": 1103, + "ition": 1104, + "made": 1105, + "fol": 1106, + "uni": 1107, + "ited": 1108, + "ðŁı": 1109, + "ical": 1110, + "thr": 1111, + "ready": 1112, + "chec": 1113, + "dra": 1114, + "kes": 1115, + "book": 1116, + "ep": 1117, + "sic": 1118, + "morning": 1119, + "news": 1120, + "cau": 1121, + "ct": 1122, + "well": 1123, + "anc": 1124, + "photo": 1125, + "than": 1126, + "ors": 1127, + "birth": 1128, + "gg": 1129, + "out": 1130, + "next": 1131, + "some": 1132, + "ening": 1133, + "story": 1134, + "chri": 1135, + "down": 1136, + "home": 1137, + "ffe": 1138, + "free": 1139, + "da": 1140, + "bor": 1141, + "fil": 1142, + "cial": 1143, + "thank": 1144, + "side": 1145, + "lear": 1146, + "que": 1147, + "line": 1148, + "ten": 1149, + "ates": 1150, + "years": 1151, + "my": 1152, + "photo": 1153, + "beauti": 1154, + "right": 1155, + "nu": 1156, + "form": 1157, + "ship": 1158, + "ban": 1159, + "ther": 1160, + "days": 1161, + "gam": 1162, + "ason": 1163, + "gy": 1164, + "ðŁİ": 1165, + "birthday": 1166, + "set": 1167, + "ick": 1168, + "et": 1169, + "still": 1170, + "coming": 1171, + "take": 1172, + "ðŁĩ": 1173, + "bb": 1174, + "sol": 1175, + "son": 1176, + "den": 1177, + "ep": 1178, + "music": 1179, + "them": 1180, + "den": 1181, + "why": 1182, + "foo": 1183, + "cra": 1184, + "amaz": 1185, + "wn": 1186, + "hol": 1187, + "tting": 1188, + "wr": 1189, + "ue": 1190, + "mag": 1191, + "cro": 1192, + "lan": 1193, + "clo": 1194, + "bra": 1195, + "ak": 1196, + "sing": 1197, + "cal": 1198, + "read": 1199, + "'ve": 1200, + "joh": 1201, + "bab": 1202, + "dri": 1203, + "blo": 1204, + "big": 1205, + "eric": 1206, + "int": 1207, + "tor": 1208, + "try": 1209, + "la": 1210, + "leg": 1211, + "house": 1212, + "mic": 1213, + "val": 1214, + "beautiful": 1215, + "litt": 1216, + "check": 1217, + "new": 1218, + "vers": 1219, + "sw": 1220, + "ari": 1221, + "play": 1222, + "her": 1223, + "âĢĵ": 1224, + "win": 1225, + "ma": 1226, + "congr": 1227, + "school": 1228, + "fun": 1229, + ".@": 1230, + "heal": 1231, + "ich": 1232, + "del": 1233, + "where": 1234, + "lon": 1235, + "ket": 1236, + "two": 1237, + "much": 1238, + "watch": 1239, + "ven": 1240, + "ded": 1241, + "ast": 1242, + "ked": 1243, + "bas": 1244, + "going": 1245, + "mp": 1246, + "ever": 1247, + "ways": 1248, + "roo": 1249, + "desig": 1250, + "ly": 1251, + "sed": 1252, + "top": 1253, + "lin": 1254, + "chan": 1255, + "too": 1256, + "iting": 1257, + "dent": 1258, + "ghts": 1259, + "ty": 1260, + "spo": 1261, + "need": 1262, + "blu": 1263, + "inst": 1264, + "being": 1265, + "âĿ¤": 1266, + "wel": 1267, + "ls": 1268, + "him": 1269, + "may": 1270, + "sting": 1271, + "na": 1272, + "ely": 1273, + "little": 1274, + "ga": 1275, + "nat": 1276, + "tomor": 1277, + "mc": 1278, + "hon": 1279, + "want": 1280, + "air": 1281, + "pic": 1282, + "americ": 1283, + "per": 1284, + "less": 1285, + "week": 1286, + "vel": 1287, + "ah": 1288, + "cap": 1289, + "cham": 1290, + "ger": 1291, + "tim": 1292, + "tomorrow": 1293, + "ness": 1294, + "state": 1295, + "hal": 1296, + "serv": 1297, + "ze": 1298, + "os": 1299, + "pat": 1300, + "vis": 1301, + "exc": 1302, + "sin": 1303, + "ff": 1304, + "city": 1305, + "cen": 1306, + "any": 1307, + "bel": 1308, + "summ": 1309, + "tin": 1310, + "would": 1311, + "looking": 1312, + "ko": 1313, + "cele": 1314, + "family": 1315, + "mer": 1316, + "pow": 1317, + "help": 1318, + "bus": 1319, + "co": 1320, + "cle": 1321, + "self": 1322, + "ens": 1323, + "ics": 1324, + "tho": 1325, + "ani": 1326, + "cho": 1327, + "lead": 1328, + "bs": 1329, + "twee": 1330, + "think": 1331, + "fore": 1332, + "chil": 1333, + "vide": 1334, + "did": 1335, + "ale": 1336, + "chi": 1337, + "vil": 1338, + "ends": 1339, + "wing": 1340, + "pas": 1341, + "'ll": 1342, + "vol": 1343, + "sa": 1344, + "gs": 1345, + "many": 1346, + "jec": 1347, + "before": 1348, + "graph": 1349, + "ny": 1350, + "uring": 1351, + "wil": 1352, + "dd": 1353, + "buil": 1354, + "fav": 1355, + "sted": 1356, + "tran": 1357, + "ling": 1358, + "oud": 1359, + "dge": 1360, + "fiel": 1361, + "national": 1362, + "sta": 1363, + "cer": 1364, + "were": 1365, + "ina": 1366, + "season": 1367, + "cou": 1368, + "ned": 1369, + "amazing": 1370, + "tions": 1371, + "celebr": 1372, + "ns": 1373, + "ath": 1374, + "head": 1375, + "sday": 1376, + "dar": 1377, + "loc": 1378, + "vin": 1379, + "another": 1380, + "goo": 1381, + "sat": 1382, + "ny": 1383, + "join": 1384, + "pres": 1385, + "ses": 1386, + "sing": 1387, + "ana": 1388, + "ining": 1389, + "....": 1390, + "cour": 1391, + "ï¸ı": 1392, + "act": 1393, + "cause": 1394, + "light": 1395, + "ams": 1396, + "ta": 1397, + "bal": 1398, + "fc": 1399, + "high": 1400, + "offici": 1401, + "tt": 1402, + "christ": 1403, + "dic": 1404, + "day": 1405, + "ral": 1406, + "hor": 1407, + ":)": 1408, + "visi": 1409, + "nam": 1410, + "ob": 1411, + "mas": 1412, + "ght": 1413, + "really": 1414, + "tun": 1415, + "find": 1416, + "through": 1417, + "port": 1418, + "ut": 1419, + "tive": 1420, + "sty": 1421, + "ne": 1422, + "ore": 1423, + "ðŁĺĤ": 1424, + "support": 1425, + "never": 1426, + "even": 1427, + "ðŁĶ": 1428, + "ha": 1429, + "ya": 1430, + "ld": 1431, + "uk": 1432, + "ran": 1433, + "jam": 1434, + "with": 1435, + "medi": 1436, + "des": 1437, + "ney": 1438, + "ching": 1439, + "ale": 1440, + "hy": 1441, + "kin": 1442, + "!!": 1443, + "dy": 1444, + "place": 1445, + "also": 1446, + "ble": 1447, + "which": 1448, + "black": 1449, + "bli": 1450, + "say": 1451, + "park": 1452, + "play": 1453, + "ire": 1454, + "video": 1455, + "weekend": 1456, + "ail": 1457, + "key": 1458, + "pt": 1459, + "ward": 1460, + "friday": 1461, + "din": 1462, + "iness": 1463, + "gro": 1464, + "ben": 1465, + "always": 1466, + "tball": 1467, + "ago": 1468, + "mil": 1469, + "cy": 1470, + "produc": 1471, + "disc": 1472, + "under": 1473, + "please": 1474, + "spor": 1475, + "full": 1476, + "ey": 1477, + "ðŁĻ": 1478, + "ise": 1479, + "ities": 1480, + "cat": 1481, + "kno": 1482, + "use": 1483, + "fore": 1484, + "ker": 1485, + "art": 1486, + "high": 1487, + "open": 1488, + "san": 1489, + "ef": 1490, + "ours": 1491, + "shed": 1492, + "stri": 1493, + "dro": 1494, + "again": 1495, + "im": 1496, + "ðŁĵ": 1497, + "enjo": 1498, + "fun": 1499, + "getting": 1500, + "pen": 1501, + "ger": 1502, + "cli": 1503, + "any": 1504, + "every": 1505, + "eu": 1506, + "women": 1507, + "âľ": 1508, + "est": 1509, + "could": 1510, + "ry": 1511, + "\"@": 1512, + "thou": 1513, + "sha": 1514, + "commun": 1515, + "ber": 1516, + "dents": 1517, + "dis": 1518, + "while": 1519, + "away": 1520, + "dio": 1521, + "ham": 1522, + "gla": 1523, + "date": 1524, + "ka": 1525, + "miss": 1526, + "unch": 1527, + "won": 1528, + "inf": 1529, + "room": 1530, + "ga": 1531, + "real": 1532, + "exper": 1533, + "direc": 1534, + "should": 1535, + "spr": 1536, + "gol": 1537, + "long": 1538, + "better": 1539, + "ori": 1540, + "ey": 1541, + "ience": 1542, + "ils": 1543, + "zz": 1544, + "han": 1545, + "found": 1546, + "vs": 1547, + "âĻ": 1548, + "post": 1549, + "tic": 1550, + "part": 1551, + "men": 1552, + "rence": 1553, + "cess": 1554, + "vic": 1555, + "sil": 1556, + "shop": 1557, + "ðŁĺĤ": 1558, + "food": 1559, + "val": 1560, + "stic": 1561, + "you": 1562, + "says": 1563, + "elec": 1564, + "star": 1565, + "oc": 1566, + "land": 1567, + "id": 1568, + "ction": 1569, + "field": 1570, + "sof": 1571, + "start": 1572, + "water": 1573, + "friends": 1574, + "ones": 1575, + "ðŁĮ": 1576, + "fla": 1577, + "far": 1578, + "white": 1579, + "party": 1580, + "inst": 1581, + "grou": 1582, + "tv": 1583, + "everyone": 1584, + "ment": 1585, + "ja": 1586, + "cha": 1587, + "prin": 1588, + "ants": 1589, + "during": 1590, + "lat": 1591, + "lar": 1592, + "west": 1593, + "then": 1594, + "ka": 1595, + "youn": 1596, + "insp": 1597, + "inte": 1598, + "ween": 1599, + "visit": 1600, + "against": 1601, + "rele": 1602, + "head": 1603, + "ces": 1604, + "town": 1605, + "looks": 1606, + "thre": 1607, + "regi": 1608, + "rent": 1609, + "projec": 1610, + "girl": 1611, + "sear": 1612, + "wo": 1613, + "mom": 1614, + "car": 1615, + "hun": 1616, + "publi": 1617, + "di": 1618, + "ple": 1619, + "call": 1620, + "cri": 1621, + "um": 1622, + "ford": 1623, + "perfe": 1624, + "friend": 1625, + "hard": 1626, + "ssion": 1627, + "test": 1628, + "playing": 1629, + "around": 1630, + "because": 1631, + "kets": 1632, + "meet": 1633, + "satur": 1634, + "arti": 1635, + "work": 1636, + "jun": 1637, + "ven": 1638, + "run": 1639, + "member": 1640, + "port": 1641, + "super": 1642, + "twit": 1643, + "sam": 1644, + "els": 1645, + "tly": 1646, + "adv": 1647, + "ative": 1648, + "ath": 1649, + "sure": 1650, + "avail": 1651, + "lar": 1652, + "squ": 1653, + "ards": 1654, + "event": 1655, + "men": 1656, + "ll": 1657, + "over": 1658, + "logy": 1659, + "ital": 1660, + "times": 1661, + "mal": 1662, + "back": 1663, + "coo": 1664, + "making": 1665, + "stru": 1666, + "âģ": 1667, + "itu": 1668, + "shar": 1669, + "gan": 1670, + "cas": 1671, + "sn": 1672, + "summer": 1673, + "picture": 1674, + "fan": 1675, + "hin": 1676, + "christmas": 1677, + "cy": 1678, + "proud": 1679, + "champi": 1680, + "design": 1681, + "pping": 1682, + "hope": 1683, + "ca": 1684, + "available": 1685, + "may": 1686, + "wed": 1687, + "photograph": 1688, + "special": 1689, + "sale": 1690, + "stop": 1691, + "ery": 1692, + "awe": 1693, + "ality": 1694, + "history": 1695, + "ama": 1696, + "presi": 1697, + "bru": 1698, + "working": 1699, + "done": 1700, + "dr": 1701, + "ken": 1702, + "feat": 1703, + "wood": 1704, + "atest": 1705, + "sunday": 1706, + "movi": 1707, + "vely": 1708, + "sle": 1709, + "face": 1710, + "spec": 1711, + "students": 1712, + "by": 1713, + "ham": 1714, + "spon": 1715, + "business": 1716, + "dat": 1717, + "ie": 1718, + "ip": 1719, + "soci": 1720, + "glo": 1721, + "hand": 1722, + "recor": 1723, + "rs": 1724, + "mee": 1725, + "keep": 1726, + "pur": 1727, + "health": 1728, + "she": 1729, + "comple": 1730, + "god": 1731, + "davi": 1732, + "collec": 1733, + "list": 1734, + "ra": 1735, + "club": 1736, + "ters": 1737, + "inclu": 1738, + "things": 1739, + "plan": 1740, + "âĺ": 1741, + "john": 1742, + "shing": 1743, + "atul": 1744, + "soon": 1745, + "blue": 1746, + "gor": 1747, + "saturday": 1748, + "won": 1749, + "congratul": 1750, + "see": 1751, + "âĿ¤ï¸ı": 1752, + "those": 1753, + "ðŁĺį": 1754, + "final": 1755, + "dou": 1756, + "ith": 1757, + "own": 1758, + "road": 1759, + "tour": 1760, + "ast": 1761, + "india": 1762, + "til": 1763, + "nd": 1764, + "fer": 1765, + "favor": 1766, + "sul": 1767, + "learn": 1768, + "fire": 1769, + "just": 1770, + "group": 1771, + "ah": 1772, + "rac": 1773, + "body": 1774, + "ur": 1775, + "care": 1776, + "à¸": 1777, + "plo": 1778, + "oh": 1779, + "pos": 1780, + "give": 1781, + "tech": 1782, + "sub": 1783, + "cent": 1784, + "ering": 1785, + "ym": 1786, + "ility": 1787, + "fic": 1788, + "london": 1789, + "vir": 1790, + "guys": 1791, + "ba": 1792, + "ð٤": 1793, + "baby": 1794, + "scre": 1795, + "ðŁĺį": 1796, + "trump": 1797, + "under": 1798, + "change": 1799, + "ian": 1800, + "colle": 1801, + "sses": 1802, + "ler": 1803, + "ssed": 1804, + "nice": 1805, + "announ": 1806, + "power": 1807, + "sar": 1808, + "aking": 1809, + "mini": 1810, + "sli": 1811, + "swee": 1812, + "kar": 1813, + "ful": 1814, + "cru": 1815, + "action": 1816, + "ather": 1817, + ").": 1818, + "stand": 1819, + "devel": 1820, + "aa": 1821, + "gan": 1822, + "left": 1823, + "lol": 1824, + "rel": 1825, + "trans": 1826, + "ments": 1827, + "int": 1828, + "ef": 1829, + "manag": 1830, + "dig": 1831, + "gener": 1832, + "down": 1833, + "pau": 1834, + "tiv": 1835, + "ku": 1836, + "thur": 1837, + "ken": 1838, + "ston": 1839, + "fans": 1840, + "talk": 1841, + "tweet": 1842, + "too": 1843, + "style": 1844, + "prote": 1845, + "secon": 1846, + "fron": 1847, + "awesome": 1848, + "gl": 1849, + "pal": 1850, + "net": 1851, + "sor": 1852, + "lau": 1853, + "gon": 1854, + "since": 1855, + "tty": 1856, + "series": 1857, + "memor": 1858, + "beli": 1859, + "film": 1860, + "did": 1861, + "dies": 1862, + "ot": 1863, + "congratulations": 1864, + "pra": 1865, + "eve": 1866, + "woo": 1867, + "official": 1868, + "suc": 1869, + "incre": 1870, + "bon": 1871, + "part": 1872, + "pped": 1873, + "class": 1874, + "sive": 1875, + "boy": 1876, + "cul": 1877, + "perfect": 1878, + "tou": 1879, + "dam": 1880, + "welcome": 1881, + "football": 1882, + "hi": 1883, + "pap": 1884, + "wait": 1885, + "ada": 1886, + "congrats": 1887, + "young": 1888, + "excited": 1889, + "rece": 1890, + "jan": 1891, + "va": 1892, + "red": 1893, + "stra": 1894, + "media": 1895, + "'d": 1896, + "does": 1897, + "let": 1898, + "mul": 1899, + "ills": 1900, + "green": 1901, + "mel": 1902, + "toge": 1903, + "future": 1904, + "yester": 1905, + "versity": 1906, + "form": 1907, + "tain": 1908, + "ide": 1909, + "ches": 1910, + "kids": 1911, + "qui": 1912, + "haha": 1913, + "deta": 1914, + "big": 1915, + "favorite": 1916, + "girls": 1917, + "contin": 1918, + "dom": 1919, + "search": 1920, + "ual": 1921, + "air": 1922, + "ders": 1923, + "month": 1924, + "cer": 1925, + "yesterday": 1926, + "community": 1927, + "ade": 1928, + "dog": 1929, + "ville": 1930, + "ices": 1931, + "deli": 1932, + "syste": 1933, + "run": 1934, + "ism": 1935, + "heart": 1936, + "cup": 1937, + "enti": 1938, + "few": 1939, + "president": 1940, + "eds": 1941, + "until": 1942, + "festi": 1943, + "ok": 1944, + "flo": 1945, + "said": 1946, + "ole": 1947, + "med": 1948, + "travel": 1949, + "£": 1950, + "phone": 1951, + "together": 1952, + "fast": 1953, + "lot": 1954, + "games": 1955, + "shir": 1956, + "between": 1957, + "yes": 1958, + "thers": 1959, + "doing": 1960, + "mac": 1961, + "ator": 1962, + "band": 1963, + "follow": 1964, + "project": 1965, + "develop": 1966, + "diffe": 1967, + "confe": 1968, + "speci": 1969, + "cast": 1970, + "ys": 1971, + "board": 1972, + "rd": 1973, + "ial": 1974, + "shoo": 1975, + "ram": 1976, + "having": 1977, + "share": 1978, + "follow": 1979, + "one": 1980, + "name": 1981, + "mr": 1982, + "put": 1983, + "discu": 1984, + "ory": 1985, + "came": 1986, + "ous": 1987, + "site": 1988, + "twitter": 1989, + "tb": 1990, + "tit": 1991, + "finally": 1992, + "zed": 1993, + "super": 1994, + "compan": 1995, + "using": 1996, + "alls": 1997, + "list": 1998, + "ris": 1999, + "shot": 2000, + "gal": 2001, + "tar": 2002, + "del": 2003, + "john": 2004, + "âĢĶ": 2005, + "something": 2006, + "ram": 2007, + "intere": 2008, + "whe": 2009, + "bit": 2010, + "ðŁį": 2011, + "street": 2012, + "ound": 2013, + "ai": 2014, + "tickets": 2015, + "movie": 2016, + "real": 2017, + "ky": 2018, + "taking": 2019, + "opp": 2020, + "cc": 2021, + "lam": 2022, + "moun": 2023, + "inve": 2024, + "black": 2025, + "used": 2026, + "online": 2027, + "yor": 2028, + "local": 2029, + "gue": 2030, + "cks": 2031, + "ow": 2032, + "gest": 2033, + "boys": 2034, + "illion": 2035, + "cont": 2036, + "reci": 2037, + "ined": 2038, + "euro": 2039, + "now": 2040, + "seen": 2041, + "ph": 2042, + "teach": 2043, + "def": 2044, + "south": 2045, + "such": 2046, + "award": 2047, + "must": 2048, + "issu": 2049, + "care": 2050, + "feel": 2051, + "plu": 2052, + "latest": 2053, + "sports": 2054, + "web": 2055, + "tex": 2056, + "ement": 2057, + "sk": 2058, + "fic": 2059, + "wan": 2060, + "tech": 2061, + "ot": 2062, + "box": 2063, + "ner": 2064, + "free": 2065, + "tal": 2066, + "ash": 2067, + "case": 2068, + "hot": 2069, + "wonder": 2070, + "meeting": 2071, + "era": 2072, + "chall": 2073, + "ðŁIJ": 2074, + "job": 2075, + "ili": 2076, + "cool": 2077, + "jour": 2078, + "ths": 2079, + "mo": 2080, + "fel": 2081, + "die": 2082, + "micha": 2083, + "ele": 2084, + "team": 2085, + "service": 2086, + "stand": 2087, + "makes": 2088, + "ping": 2089, + "early": 2090, + "comes": 2091, + "ek": 2092, + "holi": 2093, + "vers": 2094, + "ague": 2095, + "sau": 2096, + "three": 2097, + "monday": 2098, + "fashi": 2099, + "someone": 2100, + "thro": 2101, + "sea": 2102, + "bad": 2103, + "suppor": 2104, + "turn": 2105, + "ury": 2106, + "ming": 2107, + "photography": 2108, + "nic": 2109, + "mark": 2110, + "pretty": 2111, + "ssing": 2112, + "watching": 2113, + "memb": 2114, + "arri": 2115, + "county": 2116, + "beach": 2117, + "fran": 2118, + "center": 2119, + "police": 2120, + "bat": 2121, + "public": 2122, + "tan": 2123, + "press": 2124, + "saf": 2125, + "sy": 2126, + "gets": 2127, + "roy": 2128, + "ners": 2129, + "your": 2130, + "buy": 2131, + "sters": 2132, + "show": 2133, + "ased": 2134, + "childre": 2135, + "afric": 2136, + "ines": 2137, + "space": 2138, + "scri": 2139, + "hall": 2140, + "pain": 2141, + "aring": 2142, + "home": 2143, + "mur": 2144, + "health": 2145, + "ched": 2146, + "sand": 2147, + "recei": 2148, + "guy": 2149, + "ea": 2150, + "american": 2151, + "resi": 2152, + "children": 2153, + "--": 2154, + "iri": 2155, + "ington": 2156, + "country": 2157, + "ross": 2158, + "len": 2159, + "anna": 2160, + "books": 2161, + "bc": 2162, + "ece": 2163, + "dom": 2164, + "lovely": 2165, + "kh": 2166, + "pet": 2167, + "gy": 2168, + "gri": 2169, + "stage": 2170, + "office": 2171, + "rock": 2172, + "mon": 2173, + "bay": 2174, + "table": 2175, + "sun": 2176, + "med": 2177, + "thin": 2178, + "lor": 2179, + "flow": 2180, + "(@": 2181, + "university": 2182, + "store": 2183, + "front": 2184, + "good": 2185, + "za": 2186, + "vote": 2187, + "north": 2188, + "hey": 2189, + "anim": 2190, + "order": 2191, + "mid": 2192, + "without": 2193, + "ade": 2194, + "remember": 2195, + "market": 2196, + "??": 2197, + "mus": 2198, + "training": 2199, + "educ": 2200, + "but": 2201, + "cover": 2202, + "stan": 2203, + "scen": 2204, + "bla": 2205, + "break": 2206, + "lou": 2207, + "same": 2208, + "gold": 2209, + "ain": 2210, + "os": 2211, + "both": 2212, + "lit": 2213, + "vern": 2214, + "ai": 2215, + "albu": 2216, + "pa": 2217, + "enjoy": 2218, + "beg": 2219, + "elling": 2220, + "thursday": 2221, + "info": 2222, + "san": 2223, + "america": 2224, + "hair": 2225, + "tel": 2226, + "march": 2227, + "concer": 2228, + "college": 2229, + "conference": 2230, + "app": 2231, + "hour": 2232, + "chang": 2233, + "âļ": 2234, + "sour": 2235, + "ols": 2236, + "weather": 2237, + "war": 2238, + "phi": 2239, + "festival": 2240, + "second": 2241, + "cute": 2242, + "prac": 2243, + "ener": 2244, + "stry": 2245, + "lea": 2246, + "polit": 2247, + "sav": 2248, + "sen": 2249, + "ow": 2250, + "mi": 2251, + "near": 2252, + "ought": 2253, + "ze": 2254, + "coffe": 2255, + "willi": 2256, + "dan": 2257, + "sey": 2258, + "david": 2259, + "ese": 2260, + "fan": 2261, + "deci": 2262, + "theat": 2263, + "nov": 2264, + "ation": 2265, + "trac": 2266, + "sci": 2267, + "review": 2268, + "cel": 2269, + "em": 2270, + "un": 2271, + "july": 2272, + "orig": 2273, + "tion": 2274, + "dru": 2275, + "former": 2276, + "stay": 2277, + "after": 2278, + "inv": 2279, + "took": 2280, + "data": 2281, + "bal": 2282, + "tues": 2283, + "dan": 2284, + "evening": 2285, + "ðŁĺĤðŁĺĤ": 2286, + "dol": 2287, + "ures": 2288, + "provi": 2289, + "ts": 2290, + "est": 2291, + "sign": 2292, + "jac": 2293, + "uk": 2294, + "song": 2295, + "yet": 2296, + "bow": 2297, + "indu": 2298, + "jap": 2299, + "hoo": 2300, + "point": 2301, + "anyone": 2302, + "zy": 2303, + "ist": 2304, + "hur": 2305, + "ital": 2306, + "building": 2307, + "woman": 2308, + "chur": 2309, + "jer": 2310, + "perfor": 2311, + "coach": 2312, + "league": 2313, + "cess": 2314, + "net": 2315, + "imag": 2316, + "nation": 2317, + "brit": 2318, + "que": 2319, + "awards": 2320, + "ages": 2321, + "works": 2322, + "ced": 2323, + "mance": 2324, + "late": 2325, + "ign": 2326, + "money": 2327, + "true": 2328, + "ii": 2329, + "tell": 2330, + "plac": 2331, + "pac": 2332, + "asy": 2333, + "world": 2334, + "behin": 2335, + "import": 2336, + "reading": 2337, + "gram": 2338, + "giving": 2339, + "met": 2340, + "hit": 2341, + "forward": 2342, + "stom": 2343, + "present": 2344, + "june": 2345, + "social": 2346, + "noon": 2347, + "mart": 2348, + "half": 2349, + "swe": 2350, + "govern": 2351, + "ker": 2352, + "details": 2353, + "lish": 2354, + "__": 2355, + "acy": 2356, + "sia": 2357, + "bert": 2358, + "fall": 2359, + "!!!!": 2360, + "),": 2361, + "thi": 2362, + "diti": 2363, + "sport": 2364, + "king": 2365, + "fit": 2366, + "staf": 2367, + "cat": 2368, + "muse": 2369, + "centr": 2370, + "yer": 2371, + "contro": 2372, + "bloo": 2373, + "walk": 2374, + "actu": 2375, + "didn": 2376, + "lim": 2377, + "learning": 2378, + "research": 2379, + "wedne": 2380, + "auth": 2381, + "hours": 2382, + "ky": 2383, + "far": 2384, + "hen": 2385, + "....": 2386, + "itch": 2387, + "ril": 2388, + "strong": 2389, + "sky": 2390, + "questi": 2391, + "james": 2392, + "ron": 2393, + "dg": 2394, + "fur": 2395, + "cin": 2396, + "does": 2397, + "appro": 2398, + "marke": 2399, + "tures": 2400, + "fully": 2401, + "chat": 2402, + "behind": 2403, + "tem": 2404, + "fini": 2405, + "mission": 2406, + "batt": 2407, + "feel": 2408, + "heav": 2409, + "everything": 2410, + "bar": 2411, + "wish": 2412, + "premi": 2413, + "ima": 2414, + "experience": 2415, + "each": 2416, + "report": 2417, + "sweet": 2418, + "tics": 2419, + "spring": 2420, + "respon": 2421, + "system": 2422, + "victor": 2423, + "lin": 2424, + "saw": 2425, + "already": 2426, + "ghter": 2427, + "fle": 2428, + "ãĥ": 2429, + "bring": 2430, + "album": 2431, + "--": 2432, + "ells": 2433, + "stan": 2434, + "tom": 2435, + "international": 2436, + "went": 2437, + "anni": 2438, + "match": 2439, + "pper": 2440, + "stone": 2441, + "small": 2442, + "rain": 2443, + "fashion": 2444, + "area": 2445, + "van": 2446, + "agram": 2447, + "ko": 2448, + "thought": 2449, + "worth": 2450, + "van": 2451, + "mer": 2452, + "coffee": 2453, + "ites": 2454, + "gn": 2455, + "artist": 2456, + "con": 2457, + "arch": 2458, + "cir": 2459, + "secre": 2460, + "ground": 2461, + "iso": 2462, + "hand": 2463, + "com": 2464, + "bridge": 2465, + "hs": 2466, + "xi": 2467, + "link": 2468, + "pul": 2469, + "spl": 2470, + "race": 2471, + "fli": 2472, + "river": 2473, + "gas": 2474, + "disco": 2475, + "dal": 2476, + "player": 2477, + "fit": 2478, + "photos": 2479, + "ity": 2480, + "ok": 2481, + "jor": 2482, + "tra": 2483, + "april": 2484, + "ads": 2485, + "adi": 2486, + "solu": 2487, + "beauty": 2488, + "door": 2489, + "mess": 2490, + "update": 2491, + "alia": 2492, + "scho": 2493, + "ened": 2494, + "moment": 2495, + "scot": 2496, + "science": 2497, + "ior": 2498, + "ties": 2499, + "across": 2500, + "ously": 2501, + "shes": 2502, + "doesn": 2503, + "page": 2504, + "water": 2505, + "million": 2506, + "classi": 2507, + "lic": 2508, + "cast": 2509, + "formation": 2510, + "michael": 2511, + "ello": 2512, + "smo": 2513, + "ints": 2514, + "vision": 2515, + "opening": 2516, + "ldn": 2517, + "austr": 2518, + "tuesday": 2519, + "winner": 2520, + "possi": 2521, + "round": 2522, + "shirt": 2523, + "dit": 2524, + "bo": 2525, + "ues": 2526, + "illed": 2527, + "along": 2528, + "trip": 2529, + "starting": 2530, + "impro": 2531, + "kan": 2532, + "person": 2533, + "not": 2534, + "reco": 2535, + "needs": 2536, + "cle": 2537, + "lie": 2538, + "rest": 2539, + "ring": 2540, + "winter": 2541, + "simp": 2542, + "mom": 2543, + "beer": 2544, + "face": 2545, + "tors": 2546, + "usa": 2547, + "collection": 2548, + "geor": 2549, + "session": 2550, + "trying": 2551, + "las": 2552, + "lake": 2553, + "jen": 2554, + "origin": 2555, + "student": 2556, + "secur": 2557, + "vin": 2558, + "pics": 2559, + "expe": 2560, + "comp": 2561, + "gonna": 2562, + "equ": 2563, + "bad": 2564, + "ley": 2565, + "au": 2566, + "members": 2567, + "break": 2568, + "wall": 2569, + "gic": 2570, + "dinner": 2571, + "bul": 2572, + "inspir": 2573, + "ri": 2574, + "mind": 2575, + "ica": 2576, + "winning": 2577, + "talking": 2578, + "tren": 2579, + "sis": 2580, + "ten": 2581, + "wonderful": 2582, + "snow": 2583, + "hear": 2584, + "thom": 2585, + "nothing": 2586, + "gui": 2587, + "stin": 2588, + "blog": 2589, + "fest": 2590, + "bun": 2591, + "lee": 2592, + "wards": 2593, + "chance": 2594, + "dress": 2595, + "ren": 2596, + "paul": 2597, + "pes": 2598, + "techno": 2599, + "russi": 2600, + "card": 2601, + "east": 2602, + "mari": 2603, + "wine": 2604, + "ti": 2605, + "law": 2606, + "stric": 2607, + "ki": 2608, + "ape": 2609, + "augu": 2610, + "profe": 2611, + "ash": 2612, + "course": 2613, + "mail": 2614, + "rently": 2615, + "dun": 2616, + "mun": 2617, + "love": 2618, + "island": 2619, + "drive": 2620, + "sl": 2621, + "ended": 2622, + "main": 2623, + "lost": 2624, + "nature": 2625, + "âĿ¤ï¸ı": 2626, + "chic": 2627, + "repor": 2628, + "pin": 2629, + "pro": 2630, + "station": 2631, + "cep": 2632, + "takes": 2633, + "company": 2634, + "goes": 2635, + "ond": 2636, + "mach": 2637, + "radio": 2638, + "dad": 2639, + "rock": 2640, + "ja": 2641, + "pay": 2642, + "champion": 2643, + "ee": 2644, + "inde": 2645, + "tta": 2646, + "atic": 2647, + "tab": 2648, + "believe": 2649, + "energy": 2650, + "zi": 2651, + "tat": 2652, + "word": 2653, + "once": 2654, + "resul": 2655, + "yl": 2656, + "andre": 2657, + "ano": 2658, + "instagram": 2659, + "close": 2660, + "tam": 2661, + "custom": 2662, + "wa": 2663, + "conom": 2664, + "shows": 2665, + "life": 2666, + "kin": 2667, + "rob": 2668, + "tage": 2669, + "nation": 2670, + "almost": 2671, + "listen": 2672, + "save": 2673, + "reli": 2674, + "ace": 2675, + "mary": 2676, + "tree": 2677, + "forget": 2678, + "jack": 2679, + "waiting": 2680, + "director": 2681, + "hill": 2682, + "born": 2683, + "temp": 2684, + "fl": 2685, + "ste": 2686, + "ona": 2687, + "single": 2688, + "wednesday": 2689, + "united": 2690, + "ino": 2691, + "@_": 2692, + "nel": 2693, + "celebrate": 2694, + "ending": 2695, + "deal": 2696, + "ji": 2697, + "canada": 2698, + "huge": 2699, + "track": 2700, + "âĢ¢": 2701, + "fy": 2702, + "fanta": 2703, + "ang": 2704, + "york": 2705, + "release": 2706, + "pun": 2707, + "episo": 2708, + "words": 2709, + "tour": 2710, + "pack": 2711, + "igh": 2712, + "classic": 2713, + "performance": 2714, + "ket": 2715, + "afternoon": 2716, + "record": 2717, + "wins": 2718, + "proble": 2719, + "âĿ¤": 2720, + "four": 2721, + "bed": 2722, + "bank": 2723, + "dance": 2724, + "sla": 2725, + "called": 2726, + "might": 2727, + "ap": 2728, + "past": 2729, + "ðŁļ": 2730, + "different": 2731, + "ite": 2732, + "gift": 2733, + "ssive": 2734, + "church": 2735, + "cus": 2736, + "program": 2737, + "hotel": 2738, + "ice": 2739, + "mad": 2740, + "security": 2741, + "enge": 2742, + "dc": 2743, + "enough": 2744, + "sta": 2745, + "ety": 2746, + "dead": 2747, + "gun": 2748, + "hear": 2749, + "mir": 2750, + "human": 2751, + "gress": 2752, + "ounds": 2753, + "piece": 2754, + "breaking": 2755, + "garden": 2756, + "fight": 2757, + "views": 2758, + "fish": 2759, + "started": 2760, + "running": 2761, + "green": 2762, + "seri": 2763, + "sm": 2764, + "ask": 2765, + "dor": 2766, + "death": 2767, + "econom": 2768, + "eri": 2769, + "ird": 2770, + "ser": 2771, + "lunch": 2772, + "âģ¦": 2773, + "box": 2774, + "natu": 2775, + "base": 2776, + "ban": 2777, + "fal": 2778, + "global": 2779, + "wild": 2780, + "wow": 2781, + "outside": 2782, + "move": 2783, + "lead": 2784, + "anal": 2785, + "museum": 2786, + "ong": 2787, + "haw": 2788, + "power": 2789, + "thank": 2790, + "bac": 2791, + "charac": 2792, + "campa": 2793, + "digital": 2794, + "ro": 2795, + "oper": 2796, + "dev": 2797, + "wol": 2798, + "pati": 2799, + "fa": 2800, + "male": 2801, + "paper": 2802, + "illing": 2803, + "cs": 2804, + "âĥ": 2805, + "education": 2806, + "taken": 2807, + "effe": 2808, + "mou": 2809, + "sad": 2810, + "\".": 2811, + "based": 2812, + "staff": 2813, + "including": 2814, + "living": 2815, + "ac": 2816, + "china": 2817, + "mob": 2818, + "storm": 2819, + "luck": 2820, + "phil": 2821, + "oo": 2822, + "yn": 2823, + "travel": 2824, + "kel": 2825, + "tial": 2826, + "price": 2827, + "book": 2828, + "important": 2829, + "bio": 2830, + "pool": 2831, + "nyc": 2832, + "fab": 2833, + "load": 2834, + "?!": 2835, + "challenge": 2836, + "cry": 2837, + "serve": 2838, + "wear": 2839, + "bus": 2840, + "tain": 2841, + "number": 2842, + "ror": 2843, + "kat": 2844, + "iz": 2845, + "though": 2846, + "hosp": 2847, + "mm": 2848, + "fair": 2849, + "utes": 2850, + "hot": 2851, + "pop": 2852, + "fied": 2853, + "camp": 2854, + "development": 2855, + "libr": 2856, + "cali": 2857, + "ems": 2858, + "âģ¦@": 2859, + "bol": 2860, + "ised": 2861, + "standing": 2862, + "model": 2863, + "ita": 2864, + "gle": 2865, + "brown": 2866, + "image": 2867, + "vered": 2868, + "force": 2869, + "oil": 2870, + "partic": 2871, + "shu": 2872, + "daily": 2873, + "law": 2874, + "sec": 2875, + "class": 2876, + "camp": 2877, + "holiday": 2878, + "clin": 2879, + "kers": 2880, + "present": 2881, + "game": 2882, + "incredi": 2883, + "ership": 2884, + "interview": 2885, + "bill": 2886, + "due": 2887, + "andy": 2888, + "abo": 2889, + "innov": 2890, + "key": 2891, + "acade": 2892, + "pil": 2893, + "moder": 2894, + "stars": 2895, + "brand": 2896, + "fer": 2897, + "weeks": 2898, + "consi": 2899, + "pre": 2900, + "safe": 2901, + "writ": 2902, + "dium": 2903, + "launch": 2904, + "marketing": 2905, + "annual": 2906, + "assi": 2907, + "court": 2908, + "lady": 2909, + "cted": 2910, + "anda": 2911, + "inside": 2912, + "child": 2913, + "oppor": 2914, + "smith": 2915, + "centre": 2916, + "gue": 2917, + "âģ©": 2918, + "fren": 2919, + "sty": 2920, + "fort": 2921, + "ently": 2922, + "isn": 2923, + "keep": 2924, + "tober": 2925, + "ony": 2926, + "boy": 2927, + "ald": 2928, + "colla": 2929, + "demo": 2930, + "level": 2931, + "compet": 2932, + "ado": 2933, + "bour": 2934, + "fantastic": 2935, + "mate": 2936, + "su": 2937, + "south": 2938, + "opportun": 2939, + "versary": 2940, + "later": 2941, + "bud": 2942, + "facebook": 2943, + "laun": 2944, + "stern": 2945, + "pit": 2946, + "!\"": 2947, + "maj": 2948, + "gram": 2949, + "tbt": 2950, + "fire": 2951, + "happy": 2952, + "aks": 2953, + "whole": 2954, + "actually": 2955, + "iller": 2956, + "ella": 2957, + "lots": 2958, + "alex": 2959, + "ange": 2960, + "lands": 2961, + "ðŁĺŃ": 2962, + "enter": 2963, + "rou": 2964, + "episode": 2965, + "ped": 2966, + "inten": 2967, + "shire": 2968, + "who": 2969, + "plan": 2970, + "ho": 2971, + "cake": 2972, + "west": 2973, + "magaz": 2974, + "fresh": 2975, + "cc": 2976, + "nar": 2977, + "chris": 2978, + "writing": 2979, + "wer": 2980, + "nom": 2981, + "lo": 2982, + "midd": 2983, + "dream": 2984, + "ol": 2985, + "tional": 2986, + "deb": 2987, + ">>": 2988, + "become": 2989, + "si": 2990, + "grand": 2991, + "alling": 2992, + "histor": 2993, + "ride": 2994, + "ired": 2995, + "safe": 2996, + "queen": 2997, + "cil": 2998, + "intro": 2999, + "vil": 3000, + "dani": 3001, + "...": 3002, + "artic": 3003, + "stat": 3004, + "short": 3005, + "oring": 3006, + "selfi": 3007, + "missi": 3008, + "doc": 3009, + "bit": 3010, + "gall": 3011, + "bom": 3012, + "ire": 3013, + "selec": 3014, + "dition": 3015, + "ðŁĶ¥": 3016, + "friend": 3017, + "beat": 3018, + "ghting": 3019, + "ðŁĺĬ": 3020, + "peace": 3021, + "exhi": 3022, + "anta": 3023, + "ability": 3024, + "illu": 3025, + "jon": 3026, + "quality": 3027, + "tribu": 3028, + "mes": 3029, + "players": 3030, + "fair": 3031, + "cut": 3032, + "cab": 3033, + "success": 3034, + "bi": 3035, + "sus": 3036, + "promo": 3037, + "sche": 3038, + "ange": 3039, + "ico": 3040, + "commit": 3041, + "catch": 3042, + "illa": 3043, + "kind": 3044, + "feeling": 3045, + "quo": 3046, + "say": 3047, + "anniversary": 3048, + "spot": 3049, + "mother": 3050, + "ane": 3051, + "pend": 3052, + "yourself": 3053, + "ops": 3054, + "apple": 3055, + "minutes": 3056, + "po": 3057, + "grand": 3058, + "ries": 3059, + "haha": 3060, + "career": 3061, + "edition": 3062, + "dec": 3063, + "rick": 3064, + "ami": 3065, + "concert": 3066, + "itive": 3067, + "geous": 3068, + "dly": 3069, + "tte": 3070, + "advent": 3071, + "ig": 3072, + "lights": 3073, + "aker": 3074, + "sky": 3075, + "âĥ£": 3076, + "ray": 3077, + "finished": 3078, + "way": 3079, + "sd": 3080, + "accoun": 3081, + "ðŁĴķ": 3082, + "cky": 3083, + "chel": 3084, + "liter": 3085, + "painting": 3086, + "los": 3087, + "stun": 3088, + "technology": 3089, + "nas": 3090, + "mar": 3091, + "bil": 3092, + "africa": 3093, + "kie": 3094, + "eyes": 3095, + "golf": 3096, + "plus": 3097, + "nia": 3098, + "itec": 3099, + "services": 3100, + "wedding": 3101, + "known": 3102, + "tele": 3103, + ".....": 3104, + "starts": 3105, + "paren": 3106, + "wants": 3107, + "ational": 3108, + "months": 3109, + "windo": 3110, + "favour": 3111, + "ert": 3112, + "magazine": 3113, + "exclu": 3114, + "reve": 3115, + "bc": 3116, + "original": 3117, + "ess": 3118, + "nal": 3119, + "anti": 3120, + "stro": 3121, + "tice": 3122, + "study": 3123, + "à¤": 3124, + "vac": 3125, + "national": 3126, + "five": 3127, + "rain": 3128, + "vement": 3129, + "ute": 3130, + "verse": 3131, + "emer": 3132, + "army": 3133, + "possible": 3134, + "guess": 3135, + "valley": 3136, + "thern": 3137, + "crow": 3138, + "mr": 3139, + "color": 3140, + "onto": 3141, + "pick": 3142, + "clear": 3143, + "dark": 3144, + "tac": 3145, + "wanted": 3146, + "itting": 3147, + "cancer": 3148, + "government": 3149, + "die": 3150, + "rise": 3151, + "zing": 3152, + "cold": 3153, + "foun": 3154, + "studio": 3155, + "stration": 3156, + "brother": 3157, + "ahead": 3158, + "shel": 3159, + "micro": 3160, + "ically": 3161, + "dau": 3162, + "signed": 3163, + "viol": 3164, + "ax": 3165, + "asse": 3166, + "io": 3167, + "wre": 3168, + "splay": 3169, + "chick": 3170, + "august": 3171, + "plat": 3172, + "tips": 3173, + "spi": 3174, + "human": 3175, + "easy": 3176, + "logi": 3177, + "mike": 3178, + "grow": 3179, + "agre": 3180, + "ww": 3181, + "shad": 3182, + "motiv": 3183, + "wide": 3184, + "turns": 3185, + "omg": 3186, + "var": 3187, + "defin": 3188, + "sug": 3189, + "jim": 3190, + "ðŁĶ¥": 3191, + "td": 3192, + "campaign": 3193, + "named": 3194, + "retweet": 3195, + "cop": 3196, + "tv": 3197, + "leav": 3198, + "kis": 3199, + "double": 3200, + "smar": 3201, + "issue": 3202, + "villa": 3203, + "information": 3204, + "lies": 3205, + "stock": 3206, + "nt": 3207, + "distric": 3208, + "shor": 3209, + "mix": 3210, + "ero": 3211, + "sep": 3212, + "mex": 3213, + "seeing": 3214, + "live": 3215, + "remin": 3216, + "code": 3217, + "gur": 3218, + "sc": 3219, + "wild": 3220, + "lun": 3221, + "hood": 3222, + "spot": 3223, + "father": 3224, + "forever": 3225, + "upd": 3226, + "traf": 3227, + "fly": 3228, + "need": 3229, + "gradu": 3230, + "train": 3231, + "make": 3232, + "sab": 3233, + "bey": 3234, + "size": 3235, + "leader": 3236, + "talks": 3237, + "eu": 3238, + "log": 3239, + "fox": 3240, + "gorgeous": 3241, + "less": 3242, + "lets": 3243, + "surpri": 3244, + "myself": 3245, + "note": 3246, + "lives": 3247, + "fru": 3248, + "loved": 3249, + "sever": 3250, + "dem": 3251, + "ji": 3252, + "soc": 3253, + "hold": 3254, + "dogs": 3255, + "ni": 3256, + "âŀ": 3257, + "leave": 3258, + "airport": 3259, + "benef": 3260, + "expl": 3261, + "ships": 3262, + "complete": 3263, + "achi": 3264, + "great": 3265, + "vintage": 3266, + "jack": 3267, + "roc": 3268, + "wood": 3269, + "priv": 3270, + "offer": 3271, + "eye": 3272, + "version": 3273, + "tea": 3274, + "coach": 3275, + "offic": 3276, + "well": 3277, + "gen": 3278, + "sat": 3279, + "hh": 3280, + "youth": 3281, + "ox": 3282, + "?\"": 3283, + "mt": 3284, + "mix": 3285, + "gg": 3286, + "dle": 3287, + "natural": 3288, + "build": 3289, + "breakfast": 3290, + "thinking": 3291, + "theatre": 3292, + "moon": 3293, + "berg": 3294, + "goals": 3295, + "george": 3296, + "ene": 3297, + "excell": 3298, + "iling": 3299, + "tune": 3300, + "yed": 3301, + "gate": 3302, + "mit": 3303, + "network": 3304, + "joe": 3305, + "hello": 3306, + "fb": 3307, + "tube": 3308, + "wearing": 3309, + "athle": 3310, + "struc": 3311, + "hard": 3312, + "glass": 3313, + "gers": 3314, + "throw": 3315, + "ges": 3316, + "bt": 3317, + "industry": 3318, + "management": 3319, + "alist": 3320, + "goal": 3321, + "stream": 3322, + "yel": 3323, + "avi": 3324, + "icious": 3325, + "others": 3326, + "ski": 3327, + "christi": 3328, + "bird": 3329, + "esc": 3330, + "min": 3331, + "tro": 3332, + "lt": 3333, + "jan": 3334, + "imp": 3335, + "rights": 3336, + "sha": 3337, + "organ": 3338, + "central": 3339, + "ara": 3340, + "roll": 3341, + "favourite": 3342, + "chester": 3343, + "else": 3344, + "pay": 3345, + "cars": 3346, + "mine": 3347, + "step": 3348, + "practice": 3349, + "major": 3350, + "hang": 3351, + "ðŁĺĺ": 3352, + "non": 3353, + "vari": 3354, + "engine": 3355, + "volun": 3356, + "dia": 3357, + "iled": 3358, + "architec": 3359, + "pink": 3360, + "ds": 3361, + "thy": 3362, + "wash": 3363, + "website": 3364, + "bag": 3365, + "control": 3366, + "elli": 3367, + "fra": 3368, + "answ": 3369, + "dence": 3370, + "yu": 3371, + "ron": 3372, + "ola": 3373, + "gin": 3374, + "drin": 3375, + "lic": 3376, + "couple": 3377, + "spar": 3378, + "gon": 3379, + "create": 3380, + "ct": 3381, + "celebrating": 3382, + "deep": 3383, + "eat": 3384, + "tee": 3385, + "voice": 3386, + "drop": 3387, + "visit": 3388, + "ators": 3389, + "stadium": 3390, + "ft": 3391, + "wis": 3392, + "rol": 3393, + "grade": 3394, + "famil": 3395, + "points": 3396, + "repre": 3397, + "was": 3398, + "traffic": 3399, + "japan": 3400, + "org": 3401, + "honor": 3402, + "texas": 3403, + "manu": 3404, + "âĻ¥": 3405, + "safety": 3406, + "rer": 3407, + "bag": 3408, + "emplo": 3409, + "released": 3410, + "regu": 3411, + "aka": 3412, + "nav": 3413, + "role": 3414, + "senior": 3415, + "spect": 3416, + "cross": 3417, + "lines": 3418, + "best": 3419, + "pack": 3420, + "sin": 3421, + "tie": 3422, + "missing": 3423, + "sunset": 3424, + "liber": 3425, + "ising": 3426, + "jay": 3427, + "ski": 3428, + "championship": 3429, + "activ": 3430, + "ladies": 3431, + "played": 3432, + "yy": 3433, + "publ": 3434, + "alo": 3435, + "pride": 3436, + "sr": 3437, + "paki": 3438, + "lux": 3439, + "survi": 3440, + "cked": 3441, + "ets": 3442, + "chocol": 3443, + "australia": 3444, + "paris": 3445, + "miles": 3446, + "hat": 3447, + "mental": 3448, + "ala": 3449, + "mean": 3450, + "mobile": 3451, + "ena": 3452, + "insi": 3453, + "found": 3454, + "chief": 3455, + "tag": 3456, + "incredible": 3457, + "return": 3458, + "é": 3459, + "google": 3460, + "french": 3461, + "crew": 3462, + "hallo": 3463, + "alian": 3464, + "jaz": 3465, + "cher": 3466, + "silver": 3467, + "north": 3468, + "english": 3469, + "baseball": 3470, + "caf": 3471, + "limited": 3472, + "following": 3473, + "appreci": 3474, + "earth": 3475, + "kir": 3476, + "vember": 3477, + "wed": 3478, + "ption": 3479, + "ged": 3480, + "october": 3481, + "flori": 3482, + "cr": 3483, + "ency": 3484, + "gave": 3485, + "lord": 3486, + "stuff": 3487, + "berry": 3488, + "post": 3489, + "smile": 3490, + "broad": 3491, + "state": 3492, + "gger": 3493, + "means": 3494, + "icy": 3495, + "gun": 3496, + "yo": 3497, + "master": 3498, + "burg": 3499, + "hands": 3500, + "nie": 3501, + "//": 3502, + "union": 3503, + "british": 3504, + "biggest": 3505, + "district": 3506, + "aming": 3507, + "hil": 3508, + "oce": 3509, + "person": 3510, + "pass": 3511, + "envir": 3512, + "schools": 3513, + "arrived": 3514, + "ances": 3515, + "inspired": 3516, + "expla": 3517, + "ben": 3518, + "library": 3519, + "bott": 3520, + "amp": 3521, + "steph": 3522, + "contact": 3523, + "bang": 3524, + "ms": 3525, + "califor": 3526, + "told": 3527, + "battle": 3528, + "bb": 3529, + "chicago": 3530, + "⾨": 3531, + "strate": 3532, + "shi": 3533, + "dece": 3534, + "-)": 3535, + "add": 3536, + "lab": 3537, + "jones": 3538, + "legend": 3539, + "castle": 3540, + "inger": 3541, + "stance": 3542, + "bel": 3543, + "ura": 3544, + "refu": 3545, + "leaders": 3546, + "pot": 3547, + "sex": 3548, + "hic": 3549, + "article": 3550, + "kid": 3551, + "france": 3552, + "xx": 3553, + "exe": 3554, + "guide": 3555, + "volunte": 3556, + "print": 3557, + "ali": 3558, + "ceo": 3559, + "tweets": 3560, + "wx": 3561, + "scene": 3562, + "volu": 3563, + "anti": 3564, + "han": 3565, + "associ": 3566, + "sharing": 3567, + "rose": 3568, + "minister": 3569, + "sher": 3570, + "inste": 3571, + "clean": 3572, + "democr": 3573, + "poster": 3574, + "skin": 3575, + "psy": 3576, + "proper": 3577, + "crazy": 3578, + "iam": 3579, + "ore": 3580, + "ini": 3581, + "anything": 3582, + "pod": 3583, + "moving": 3584, + "click": 3585, + "explo": 3586, + "comb": 3587, + "craft": 3588, + "fi": 3589, + "blood": 3590, + "isra": 3591, + "public": 3592, + "dent": 3593, + "olym": 3594, + "england": 3595, + "asi": 3596, + "cher": 3597, + "fact": 3598, + "environ": 3599, + "harry": 3600, + "gone": 3601, + "medic": 3602, + "enjoying": 3603, + "justice": 3604, + "jr": 3605, + "indian": 3606, + "wife": 3607, + "sound": 3608, + "tes": 3609, + "drawing": 3610, + "pal": 3611, + "idea": 3612, + "crit": 3613, + "juli": 3614, + "iler": 3615, + "warm": 3616, + "clar": 3617, + "thoughts": 3618, + "defen": 3619, + "council": 3620, + "introduc": 3621, + "died": 3622, + "janu": 3623, + "ani": 3624, + "send": 3625, + "lier": 3626, + "ml": 3627, + "interesting": 3628, + "trade": 3629, + "wind": 3630, + "bay": 3631, + "sac": 3632, + "ancy": 3633, + "source": 3634, + "bes": 3635, + "organi": 3636, + "arly": 3637, + "large": 3638, + "ffici": 3639, + "tag": 3640, + "ut": 3641, + "desp": 3642, + "oes": 3643, + "title": 3644, + "sym": 3645, + "pictures": 3646, + "open": 3647, + "women": 3648, + "showing": 3649, + "ria": 3650, + "least": 3651, + "leadership": 3652, + "current": 3653, + "electr": 3654, + "valent": 3655, + "listening": 3656, + "ckey": 3657, + "general": 3658, + "deser": 3659, + "duce": 3660, + ";)": 3661, + "cent": 3662, + "ðŁĺįðŁĺį": 3663, + "scott": 3664, + "poor": 3665, + "selfie": 3666, + "events": 3667, + "ion": 3668, + "wrong": 3669, + "dev": 3670, + "hill": 3671, + "septe": 3672, + "culture": 3673, + "line": 3674, + "sorry": 3675, + "sent": 3676, + "sister": 3677, + "cept": 3678, + "kri": 3679, + "november": 3680, + "ari": 3681, + "announce": 3682, + "zation": 3683, + "bran": 3684, + "gent": 3685, + "du": 3686, + "len": 3687, + "pers": 3688, + "fm": 3689, + "martin": 3690, + "op": 3691, + "emb": 3692, + "ome": 3693, + "middle": 3694, + "success": 3695, + "peter": 3696, + "january": 3697, + "flu": 3698, + "racing": 3699, + "dav": 3700, + "bike": 3701, + "ðŁı»": 3702, + "pet": 3703, + "shoot": 3704, + "professi": 3705, + "featuring": 3706, + "september": 3707, + "nowplaying": 3708, + "staur": 3709, + "za": 3710, + "onic": 3711, + "quick": 3712, + "baske": 3713, + "speaking": 3714, + "milit": 3715, + "zer": 3716, + "chicken": 3717, + "bell": 3718, + "sad": 3719, + "coast": 3720, + "loving": 3721, + "yers": 3722, + "dj": 3723, + "panel": 3724, + "verage": 3725, + "swit": 3726, + "icks": 3727, + "bou": 3728, + "california": 3729, + "sam": 3730, + "parents": 3731, + "ero": 3732, + "killed": 3733, + "phys": 3734, + "jobs": 3735, + "migr": 3736, + "anth": 3737, + "emo": 3738, + "halloween": 3739, + "ander": 3740, + "cm": 3741, + "competition": 3742, + "eag": 3743, + "sket": 3744, + "spir": 3745, + "maybe": 3746, + "exclusive": 3747, + "appe": 3748, + "journey": 3749, + "screen": 3750, + "ford": 3751, + "io": 3752, + "hate": 3753, + "ug": 3754, + "soul": 3755, + "hero": 3756, + "society": 3757, + "syn": 3758, + "guit": 3759, + "nh": 3760, + "dj": 3761, + "ases": 3762, + "impre": 3763, + "time": 3764, + "sales": 3765, + "dd": 3766, + "fts": 3767, + "summit": 3768, + "stunning": 3769, + "oms": 3770, + "turned": 3771, + "clean": 3772, + "soft": 3773, + "beat": 3774, + "restaur": 3775, + "dered": 3776, + "ences": 3777, + "magic": 3778, + "dio": 3779, + "shine": 3780, + "guest": 3781, + "healthy": 3782, + "exhib": 3783, + "stories": 3784, + "popu": 3785, + "nis": 3786, + "ela": 3787, + "below": 3788, + "funny": 3789, + "results": 3790, + "sne": 3791, + "currently": 3792, + "ard": 3793, + "download": 3794, + "flight": 3795, + "mal": 3796, + "fine": 3797, + "pad": 3798, + "chu": 3799, + "ented": 3800, + "hat": 3801, + "ðŁijı": 3802, + "steve": 3803, + "jo": 3804, + "mark": 3805, + "rat": 3806, + "ball": 3807, + "pc": 3808, + "pon": 3809, + "bby": 3810, + "oli": 3811, + "arts": 3812, + "asure": 3813, + "bowl": 3814, + "attack": 3815, + "mic": 3816, + "dear": 3817, + "range": 3818, + "enter": 3819, + "chocolate": 3820, + "brilli": 3821, + "access": 3822, + ",\"": 3823, + "???": 3824, + "chap": 3825, + "const": 3826, + "tn": 3827, + "matter": 3828, + "blue": 3829, + "gallery": 3830, + "emp": 3831, + "workshop": 3832, + "leading": 3833, + "yours": 3834, + "basketball": 3835, + "wanna": 3836, + "thu": 3837, + "__": 3838, + "marri": 3839, + "sleep": 3840, + "bia": 3841, + "che": 3842, + "mad": 3843, + "impact": 3844, + "own": 3845, + "sir": 3846, + "channel": 3847, + "europe": 3848, + "esp": 3849, + "kitch": 3850, + "hospital": 3851, + "wra": 3852, + "royal": 3853, + "fs": 3854, + "neu": 3855, + "quar": 3856, + "ney": 3857, + "acks": 3858, + "chase": 3859, + "ppy": 3860, + "stal": 3861, + "ately": 3862, + "tim": 3863, + "december": 3864, + "rare": 3865, + "perform": 3866, + "cream": 3867, + "weight": 3868, + "choo": 3869, + "night": 3870, + "haven": 3871, + "franc": 3872, + "khan": 3873, + "built": 3874, + "helping": 3875, + "trust": 3876, + "type": 3877, + "golden": 3878, + "tax": 3879, + "snow": 3880, + "swi": 3881, + "disa": 3882, + "questions": 3883, + "vey": 3884, + "light": 3885, + "cn": 3886, + "cloud": 3887, + "thomas": 3888, + "aged": 3889, + "shou": 3890, + "teams": 3891, + "gran": 3892, + "reason": 3893, + "aa": 3894, + "youtube": 3895, + "vp": 3896, + "pizz": 3897, + "manager": 3898, + "bury": 3899, + "credit": 3900, + "treat": 3901, + "max": 3902, + "ik": 3903, + "main": 3904, + "ging": 3905, + "dead": 3906, + "probab": 3907, + "yeah": 3908, + "ãĤ": 3909, + "brand": 3910, + "soli": 3911, + "plant": 3912, + "tayl": 3913, + "girl": 3914, + "ðŁĺŃ": 3915, + "nament": 3916, + "auto": 3917, + "message": 3918, + "kore": 3919, + "nur": 3920, + "terr": 3921, + "agu": 3922, + "map": 3923, + "senting": 3924, + "loves": 3925, + "gives": 3926, + "gab": 3927, + "zen": 3928, + "robert": 3929, + "confir": 3930, + "wars": 3931, + "om": 3932, + "stain": 3933, + "camera": 3934, + "ander": 3935, + "wonder": 3936, + "ab": 3937, + "cap": 3938, + "sold": 3939, + "suit": 3940, + "walking": 3941, + "continue": 3942, + "effec": 3943, + "daughter": 3944, + "danc": 3945, + "chain": 3946, + "multi": 3947, + "kid": 3948, + "yan": 3949, + "champion": 3950, + "vo": 3951, + "tains": 3952, + "host": 3953, + "mini": 3954, + "missed": 3955, + "resc": 3956, + "lyn": 3957, + "finish": 3958, + "delicious": 3959, + "sas": 3960, + "taylor": 3961, + "ib": 3962, + "promis": 3963, + "products": 3964, + "mountain": 3965, + "florida": 3966, + "register": 3967, + "treat": 3968, + "recent": 3969, + "female": 3970, + "booth": 3971, + "matt": 3972, + "vehic": 3973, + "sop": 3974, + "motor": 3975, + "supporting": 3976, + "phic": 3977, + "extre": 3978, + "drink": 3979, + "lane": 3980, + "third": 3981, + "ps": 3982, + "constru": 3983, + "cere": 3984, + "farm": 3985, + "ðŁİī": 3986, + "tured": 3987, + "ðŁijī": 3988, + "cats": 3989, + "aj": 3990, + "gie": 3991, + "shooting": 3992, + "asked": 3993, + "pakistan": 3994, + "ame": 3995, + "mb": 3996, + "gil": 3997, + "legal": 3998, + "square": 3999, + "invol": 4000, + "draw": 4001, + "oooo": 4002, + "!!!!": 4003, + "opportunity": 4004, + "py": 4005, + "ei": 4006, + "bts": 4007, + "teacher": 4008, + "character": 4009, + "johnson": 4010, + "bron": 4011, + "lywood": 4012, + "chine": 4013, + "cing": 4014, + "cine": 4015, + "dge": 4016, + "gaming": 4017, + "russia": 4018, + "cia": 4019, + "quote": 4020, + "rich": 4021, + "gov": 4022, + "flowers": 4023, + "spiri": 4024, + "stin": 4025, + "growth": 4026, + "ðŁı¼": 4027, + "commer": 4028, + "juni": 4029, + "mum": 4030, + "ran": 4031, + "sna": 4032, + "aren": 4033, + "cb": 4034, + "actor": 4035, + "color": 4036, + "sit": 4037, + "pair": 4038, + "chi": 4039, + "bow": 4040, + "academy": 4041, + "held": 4042, + "rang": 4043, + "metal": 4044, + "yl": 4045, + "active": 4046, + "probably": 4047, + "tch": 4048, + "needed": 4049, + "spee": 4050, + "choice": 4051, + "italy": 4052, + "ryan": 4053, + "ðŁĩº": 4054, + "flower": 4055, + "vit": 4056, + "mn": 4057, + "foundation": 4058, + "bak": 4059, + "sions": 4060, + "neigh": 4061, + "floo": 4062, + "heard": 4063, + "remo": 4064, + "fresh": 4065, + "inging": 4066, + "ref": 4067, + "town": 4068, + "clou": 4069, + "jesus": 4070, + "spirit": 4071, + "couldn": 4072, + "zes": 4073, + "ðŁĴĻ": 4074, + "williams": 4075, + "proce": 4076, + "modern": 4077, + "process": 4078, + "shoes": 4079, + "created": 4080, + "tric": 4081, + "issues": 4082, + "anne": 4083, + "atten": 4084, + "debut": 4085, + "hr": 4086, + "nit": 4087, + "stig": 4088, + "apo": 4089, + "eps": 4090, + "zu": 4091, + "ãĢ": 4092, + "six": 4093, + "cards": 4094, + "langu": 4095, + "famous": 4096, + "tournament": 4097, + "sel": 4098, + "ebay": 4099, + "yn": 4100, + "ston": 4101, + "kick": 4102, + "announced": 4103, + "kam": 4104, + "voc": 4105, + "brilliant": 4106, + "house": 4107, + "cheese": 4108, + "warri": 4109, + "music": 4110, + "hockey": 4111, + "ðŁĺĤðŁĺĤ": 4112, + "skills": 4113, + "autom": 4114, + "smart": 4115, + "medical": 4116, + "mony": 4117, + "ex": 4118, + "guar": 4119, + "give": 4120, + "personal": 4121, + "vention": 4122, + "alli": 4123, + "press": 4124, + "floor": 4125, + "mc": 4126, + "victory": 4127, + "him": 4128, + "simple": 4129, + "thor": 4130, + "ðŁĩºðŁĩ": 4131, + "tail": 4132, + "lucky": 4133, + "alex": 4134, + "quite": 4135, + "bot": 4136, + "ssions": 4137, + "challeng": 4138, + "cann": 4139, + "amazon": 4140, + "hell": 4141, + "bought": 4142, + "):": 4143, + "edy": 4144, + "secret": 4145, + "production": 4146, + "independ": 4147, + "defe": 4148, + "added": 4149, + "pr": 4150, + "pag": 4151, + "bed": 4152, + "greatest": 4153, + "within": 4154, + "jay": 4155, + "ðŁ¥": 4156, + "ireland": 4157, + "rely": 4158, + "sd": 4159, + "text": 4160, + "driving": 4161, + "program": 4162, + "speed": 4163, + "colum": 4164, + "stron": 4165, + "é": 4166, + "forest": 4167, + "âĸ": 4168, + "machine": 4169, + "coin": 4170, + "scar": 4171, + "ount": 4172, + "bie": 4173, + "¡ï¸ı": 4174, + "portra": 4175, + "common": 4176, + "wrest": 4177, + "received": 4178, + "know": 4179, + "invest": 4180, + "plans": 4181, + "accor": 4182, + "adop": 4183, + "tery": 4184, + "reali": 4185, + "pp": 4186, + "kal": 4187, + "artwork": 4188, + "mean": 4189, + "god": 4190, + "instead": 4191, + "anci": 4192, + "motivation": 4193, + "asing": 4194, + "inspiration": 4195, + "upcoming": 4196, + "political": 4197, + "europe": 4198, + "mers": 4199, + "heavy": 4200, + "ðŁijį": 4201, + "febru": 4202, + "scotland": 4203, + "ough": 4204, + "bt": 4205, + "boss": 4206, + "schedu": 4207, + "speak": 4208, + "nick": 4209, + "ured": 4210, + "ino": 4211, + "ek": 4212, + "risk": 4213, + "tory": 4214, + "presents": 4215, + "bon": 4216, + "rug": 4217, + "states": 4218, + "exhibition": 4219, + "ilo": 4220, + "mill": 4221, + "brought": 4222, + ":-)": 4223, + "touri": 4224, + "come": 4225, + "officially": 4226, + "champions": 4227, + "doors": 4228, + "rep": 4229, + "pose": 4230, + "extra": 4231, + "kings": 4232, + "soccer": 4233, + "squad": 4234, + "applic": 4235, + "ata": 4236, + "sometimes": 4237, + "tari": 4238, + "excellent": 4239, + "ðŁĺĺ": 4240, + "straight": 4241, + "carol": 4242, + "rip": 4243, + "âĢį": 4244, + "graphic": 4245, + "mol": 4246, + "election": 4247, + "february": 4248, + "asons": 4249, + "li": 4250, + "dir": 4251, + "mt": 4252, + "nick": 4253, + "usu": 4254, + "mrs": 4255, + "comics": 4256, + "institu": 4257, + "corpor": 4258, + "vi": 4259, + "ðŁĻı": 4260, + "tural": 4261, + "dise": 4262, + "acci": 4263, + "weare": 4264, + "among": 4265, + "shopping": 4266, + "till": 4267, + "what": 4268, + "chair": 4269, + "span": 4270, + "chinese": 4271, + "innovation": 4272, + "joy": 4273, + "kit": 4274, + "century": 4275, + "obama": 4276, + "phili": 4277, + "fc": 4278, + "reach": 4279, + "citi": 4280, + "ulous": 4281, + "non": 4282, + "dang": 4283, + "happening": 4284, + "burn": 4285, + "pel": 4286, + "orange": 4287, + "dv": 4288, + "kick": 4289, + "claim": 4290, + "ingham": 4291, + "phy": 4292, + "nov": 4293, + "podcast": 4294, + "whi": 4295, + "nights": 4296, + "earlier": 4297, + "bear": 4298, + "lah": 4299, + "exciting": 4300, + "ora": 4301, + "given": 4302, + "slo": 4303, + "memories": 4304, + "continues": 4305, + "product": 4306, + "gho": 4307, + "cd": 4308, + "knows": 4309, + "ðŁİī": 4310, + "published": 4311, + "discuss": 4312, + "yard": 4313, + "iphone": 4314, + "tries": 4315, + "wall": 4316, + "feb": 4317, + "aren": 4318, + "truth": 4319, + "winners": 4320, + "ture": 4321, + "ditional": 4322, + "military": 4323, + "problem": 4324, + "mand": 4325, + "dog": 4326, + "loss": 4327, + "cric": 4328, + "canadi": 4329, + "veter": 4330, + "village": 4331, + "\",": 4332, + "yr": 4333, + "ung": 4334, + "donald": 4335, + "aging": 4336, + "birds": 4337, + "scienti": 4338, + "les": 4339, + "this": 4340, + "region": 4341, + "tical": 4342, + "itten": 4343, + "ila": 4344, + "ðŁĺİ": 4345, + "dad": 4346, + "diam": 4347, + "above": 4348, + "stren": 4349, + "lit": 4350, + "pir": 4351, + "lab": 4352, + "focus": 4353, + "busy": 4354, + "dur": 4355, + "apply": 4356, + "sma": 4357, + "author": 4358, + "aci": 4359, + "execu": 4360, + "domin": 4361, + "rela": 4362, + "jackson": 4363, + "ato": 4364, + "washington": 4365, + "ðŁĻĮ": 4366, + "kill": 4367, + "popular": 4368, + "cement": 4369, + "road": 4370, + "eating": 4371, + "location": 4372, + "vent": 4373, + "arre": 4374, + "nan": 4375, + "custo": 4376, + "adventure": 4377, + "ordin": 4378, + "sport": 4379, + "ult": 4380, + "lock": 4381, + "question": 4382, + "driver": 4383, + "landsc": 4384, + "oni": 4385, + "kins": 4386, + "pd": 4387, + "jordan": 4388, + "tered": 4389, + "kk": 4390, + "af": 4391, + "child": 4392, + "sp": 4393, + "justin": 4394, + "eni": 4395, + "selling": 4396, + "zo": 4397, + "whit": 4398, + "boston": 4399, + "particip": 4400, + "signing": 4401, + "happened": 4402, + "heat": 4403, + "mam": 4404, + "dreams": 4405, + "lows": 4406, + "graph": 4407, + "theday": 4408, + "heading": 4409, + "bro": 4410, + "blessed": 4411, + "vic": 4412, + "vegas": 4413, + "hd": 4414, + "inning": 4415, + "roman": 4416, + "andro": 4417, + "denti": 4418, + "use": 4419, + "cit": 4420, + "progress": 4421, + "writer": 4422, + "bob": 4423, + "ffs": 4424, + "growing": 4425, + "bly": 4426, + "aware": 4427, + "exam": 4428, + "spent": 4429, + "bet": 4430, + "score": 4431, + "beyond": 4432, + "docu": 4433, + "adel": 4434, + "sf": 4435, + "coura": 4436, + "collabor": 4437, + "inc": 4438, + "private": 4439, + "boat": 4440, + "**": 4441, + "zone": 4442, + "pha": 4443, + "bill": 4444, + "total": 4445, + "planning": 4446, + "towards": 4447, + "places": 4448, + "preview": 4449, + "creative": 4450, + "damn": 4451, + "ideas": 4452, + "seems": 4453, + "poten": 4454, + "saying": 4455, + "display": 4456, + "sw": 4457, + "aqu": 4458, + "louis": 4459, + "bye": 4460, + "lil": 4461, + "email": 4462, + "western": 4463, + "germany": 4464, + "eller": 4465, + "res": 4466, + "fant": 4467, + "mentary": 4468, + "deals": 4469, + "richard": 4470, + "jersey": 4471, + "streng": 4472, + "rad": 4473, + "pizza": 4474, + "mond": 4475, + "ware": 4476, + "lac": 4477, + "gi": 4478, + "archi": 4479, + "cd": 4480, + "yellow": 4481, + "recently": 4482, + "reach": 4483, + "à¹": 4484, + "kitchen": 4485, + "designed": 4486, + "try": 4487, + "gal": 4488, + "restaurant": 4489, + "ature": 4490, + "ww": 4491, + "jas": 4492, + "lma": 4493, + "ðŁijĮ": 4494, + "pain": 4495, + "avo": 4496, + "minute": 4497, + "schol": 4498, + "therap": 4499, + "ticket": 4500, + "dry": 4501, + "japan": 4502, + "ditions": 4503, + "terri": 4504, + "selves": 4505, + "happen": 4506, + "tup": 4507, + "mag": 4508, + "copy": 4509, + "sher": 4510, + "freedom": 4511, + "file": 4512, + "specially": 4513, + "toronto": 4514, + "load": 4515, + "gary": 4516, + "rey": 4517, + "answer": 4518, + "loy": 4519, + "caught": 4520, + "prize": 4521, + "une": 4522, + "fication": 4523, + "niger": 4524, + "syd": 4525, + "touch": 4526, + "feature": 4527, + "jazz": 4528, + "records": 4529, + "himself": 4530, + "dish": 4531, + "rober": 4532, + "spotted": 4533, + "master": 4534, + "wave": 4535, + "finals": 4536, + "bull": 4537, + "forum": 4538, + "ald": 4539, + "recomm": 4540, + "cha": 4541, + "ae": 4542, + "doo": 4543, + "instru": 4544, + "truly": 4545, + "lg": 4546, + "ink": 4547, + "brothers": 4548, + "dest": 4549, + "jim": 4550, + "mit": 4551, + "closed": 4552, + "ison": 4553, + "tried": 4554, + "santa": 4555, + "affe": 4556, + "wan": 4557, + "horse": 4558, + "grow": 4559, + "campus": 4560, + "relation": 4561, + "native": 4562, + "journ": 4563, + "gov": 4564, + "oct": 4565, + "kit": 4566, + "bound": 4567, + "partner": 4568, + "rema": 4569, + "crowd": 4570, + "!)": 4571, + "calls": 4572, + "rail": 4573, + "quali": 4574, + "solution": 4575, + "contest": 4576, + "convers": 4577, + "snap": 4578, + "base": 4579, + "initi": 4580, + "tax": 4581, + "ye": 4582, + "entrepre": 4583, + "itor": 4584, + "construction": 4585, + "food": 4586, + "presented": 4587, + "nings": 4588, + "climate": 4589, + "km": 4590, + "model": 4591, + "bj": 4592, + "block": 4593, + "presentation": 4594, + "dream": 4595, + "fix": 4596, + "calling": 4597, + "busine": 4598, + "congress": 4599, + "understand": 4600, + "web": 4601, + "value": 4602, + "ï¸ıâĥ£": 4603, + "mexico": 4604, + "itely": 4605, + "kim": 4606, + "charity": 4607, + "reflec": 4608, + "blan": 4609, + "flying": 4610, + "analy": 4611, + "families": 4612, + "band": 4613, + "recipe": 4614, + "celebration": 4615, + "accep": 4616, + "ary": 4617, + "tot": 4618, + "gb": 4619, + "interested": 4620, + "captain": 4621, + "âĻ¥": 4622, + "tip": 4623, + "absol": 4624, + "braz": 4625, + "investig": 4626, + "ology": 4627, + "dec": 4628, + "truck": 4629, + "vering": 4630, + "clear": 4631, + "dont": 4632, + "gotta": 4633, + "advis": 4634, + "begins": 4635, + "mass": 4636, + "descri": 4637, + "block": 4638, + "kim": 4639, + "david": 4640, + "songs": 4641, + "memorial": 4642, + "features": 4643, + "sustain": 4644, + "'.": 4645, + "grab": 4646, + "jose": 4647, + "va": 4648, + "conserv": 4649, + "sets": 4650, + "manchester": 4651, + "fighting": 4652, + "degre": 4653, + "aga": 4654, + "ind": 4655, + "sleep": 4656, + "position": 4657, + "hair": 4658, + "signs": 4659, + "policy": 4660, + "ito": 4661, + "alert": 4662, + "stam": 4663, + "spend": 4664, + "wy": 4665, + "absolut": 4666, + "dm": 4667, + "animal": 4668, + "myster": 4669, + "successful": 4670, + "problems": 4671, + "robo": 4672, + "kay": 4673, + "garden": 4674, + "pd": 4675, + "mayor": 4676, + "dale": 4677, + "tol": 4678, + "offers": 4679, + "visiting": 4680, + "friendly": 4681, + "trees": 4682, + "officer": 4683, + "account": 4684, + "kevin": 4685, + "ðŁijį": 4686, + "giant": 4687, + "continu": 4688, + "consu": 4689, + "tract": 4690, + "nfl": 4691, + "ðŁĺĬ": 4692, + "hq": 4693, + "bility": 4694, + "aar": 4695, + "disney": 4696, + "teen": 4697, + "oned": 4698, + "white": 4699, + "trailer": 4700, + "dedic": 4701, + "alone": 4702, + "absolutely": 4703, + "digital": 4704, + "william": 4705, + "ination": 4706, + "swa": 4707, + "ee": 4708, + "entire": 4709, + "german": 4710, + "roll": 4711, + "hits": 4712, + "cost": 4713, + "stay": 4714, + "tha": 4715, + "alive": 4716, + "according": 4717, + "cot": 4718, + "literally": 4719, + "herit": 4720, + "reti": 4721, + "hahaha": 4722, + "experi": 4723, + "likes": 4724, + "gt": 4725, + "steel": 4726, + "____": 4727, + "chair": 4728, + "christian": 4729, + "tower": 4730, + "difference": 4731, + "md": 4732, + "tress": 4733, + "mid": 4734, + "prince": 4735, + "african": 4736, + "feder": 4737, + "foot": 4738, + "carri": 4739, + "served": 4740, + "rice": 4741, + "shall": 4742, + "featured": 4743, + "cker": 4744, + "recru": 4745, + "poe": 4746, + "sense": 4747, + "nific": 4748, + "comedy": 4749, + "content": 4750, + "fat": 4751, + "posted": 4752, + "contribu": 4753, + "timate": 4754, + "liver": 4755, + "mble": 4756, + "internet": 4757, + "age": 4758, + "european": 4759, + "cling": 4760, + "glad": 4761, + "ffic": 4762, + "sco": 4763, + "akes": 4764, + "elle": 4765, + "termin": 4766, + "tony": 4767, + "pale": 4768, + "colour": 4769, + "serious": 4770, + "patri": 4771, + "movies": 4772, + "bm": 4773, + "professional": 4774, + "ado": 4775, + "alu": 4776, + "bringing": 4777, + "falls": 4778, + "israel": 4779, + "term": 4780, + "language": 4781, + "brook": 4782, + "mann": 4783, + "communic": 4784, + "cannot": 4785, + "acti": 4786, + "phe": 4787, + "yan": 4788, + "entreprene": 4789, + "turkey": 4790, + "logical": 4791, + "long": 4792, + "arm": 4793, + "urs": 4794, + "workers": 4795, + "ingly": 4796, + "ggs": 4797, + "ric": 4798, + "tual": 4799, + "receive": 4800, + "opens": 4801, + "gear": 4802, + "social": 4803, + "feet": 4804, + "cking": 4805, + "adver": 4806, + "finan": 4807, + "feels": 4808, + "spla": 4809, + "hr": 4810, + "easter": 4811, + "brain": 4812, + "ãģ": 4813, + "fig": 4814, + "ledge": 4815, + "nearly": 4816, + "protect": 4817, + "massive": 4818, + "eth": 4819, + "awa": 4820, + "ðŁĺģ": 4821, + "yrs": 4822, + "awareness": 4823, + "definitely": 4824, + "kn": 4825, + "imagine": 4826, + "ku": 4827, + "systems": 4828, + "ðŁijı": 4829, + "fas": 4830, + "lik": 4831, + "provide": 4832, + "amo": 4833, + "discover": 4834, + "influ": 4835, + "maker": 4836, + "gaz": 4837, + "fitness": 4838, + "street": 4839, + "ers": 4840, + "ted": 4841, + "wc": 4842, + "ysis": 4843, + "positive": 4844, + "helped": 4845, + "quest": 4846, + "andrew": 4847, + "brad": 4848, + "bin": 4849, + "hanging": 4850, + "ling": 4851, + "bright": 4852, + "section": 4853, + "mass": 4854, + "ðŁĻĮ": 4855, + "followers": 4856, + "hosting": 4857, + "tempor": 4858, + "flag": 4859, + "ave": 4860, + "letter": 4861, + "kur": 4862, + "requi": 4863, + "often": 4864, + "cryp": 4865, + "suff": 4866, + "âļ½": 4867, + "russian": 4868, + "treatment": 4869, + "alle": 4870, + "hay": 4871, + "lan": 4872, + "keeping": 4873, + "holy": 4874, + "powerful": 4875, + "predic": 4876, + "fund": 4877, + "especially": 4878, + "window": 4879, + "jewel": 4880, + "ily": 4881, + "ðŁĴľ": 4882, + "generation": 4883, + "appa": 4884, + "seriously": 4885, + "od": 4886, + "ðŁĺĤðŁĺĤðŁĺĤ": 4887, + "certi": 4888, + "irish": 4889, + "ðŁijĮ": 4890, + "miami": 4891, + "beth": 4892, + "vity": 4893, + "secu": 4894, + "chef": 4895, + "crime": 4896, + "graphy": 4897, + "max": 4898, + "artists": 4899, + "revolu": 4900, + "guard": 4901, + "speech": 4902, + "uc": 4903, + "updates": 4904, + "faces": 4905, + "stant": 4906, + "changed": 4907, + "reports": 4908, + "lower": 4909, + "pear": 4910, + "nc": 4911, + "kil": 4912, + "looked": 4913, + "speaker": 4914, + "sf": 4915, + "respect": 4916, + "okay": 4917, + "ocean": 4918, + "sitting": 4919, + "architecture": 4920, + "trail": 4921, + "seat": 4922, + "ira": 4923, + "leg": 4924, + "japanese": 4925, + "dam": 4926, + "ular": 4927, + "swim": 4928, + "politics": 4929, + "financial": 4930, + "old": 4931, + "mouth": 4932, + "attemp": 4933, + "destin": 4934, + "fishing": 4935, + "attention": 4936, + "mem": 4937, + "changes": 4938, + "decided": 4939, + "religi": 4940, + "gin": 4941, + "cav": 4942, + "zz": 4943, + "adam": 4944, + "mac": 4945, + "write": 4946, + "begin": 4947, + "scul": 4948, + "alter": 4949, + "iss": 4950, + "athon": 4951, + "images": 4952, + "moo": 4953, + "joined": 4954, + "ðŁĺī": 4955, + "âŀ¡ï¸ı": 4956, + "passed": 4957, + "musli": 4958, + "hir": 4959, + "largest": 4960, + "camer": 4961, + "comic": 4962, + "ghted": 4963, + "rugby": 4964, + "burgh": 4965, + "gging": 4966, + "testing": 4967, + "prepar": 4968, + "laugh": 4969, + "aled": 4970, + "improve": 4971, + "believ": 4972, + "advice": 4973, + "shares": 4974, + "heart": 4975, + "turning": 4976, + "sb": 4977, + "tel": 4978, + "cafe": 4979, + "nes": 4980, + "daniel": 4981, + "patter": 4982, + "tz": 4983, + "sett": 4984, + "park": 4985, + "cand": 4986, + "stick": 4987, + "happens": 4988, + "brian": 4989, + "newest": 4990, + "epic": 4991, + "ador": 4992, + "kies": 4993, + "warning": 4994, + "animals": 4995, + "custom": 4996, + "arc": 4997, + "dian": 4998, + "gold": 4999, + "core": 5000, + "tf": 5001, + "city": 5002, + "pants": 5003, + "reality": 5004, + "confi": 5005, + "inju": 5006, + "fox": 5007, + "guil": 5008, + "knew": 5009, + "âĺº": 5010, + "correc": 5011, + "itude": 5012, + "dden": 5013, + ".#": 5014, + "reduc": 5015, + "pass": 5016, + "fon": 5017, + "ya": 5018, + "owner": 5019, + "returns": 5020, + "nc": 5021, + "east": 5022, + "apol": 5023, + "insur": 5024, + "tho": 5025, + "sim": 5026, + "junior": 5027, + "bee": 5028, + "angel": 5029, + "attle": 5030, + "electric": 5031, + "horror": 5032, + "crash": 5033, + "eye": 5034, + "path": 5035, + "southern": 5036, + "employe": 5037, + "geo": 5038, + "tan": 5039, + "haz": 5040, + "rally": 5041, + "ðŁı»": 5042, + "property": 5043, + "wasn": 5044, + "enjoyed": 5045, + "grey": 5046, + "gas": 5047, + "brew": 5048, + "northern": 5049, + "holding": 5050, + "gp": 5051, + "take": 5052, + "chart": 5053, + "lyn": 5054, + "drama": 5055, + "zo": 5056, + "paid": 5057, + "throwback": 5058, + "cup": 5059, + "discussion": 5060, + "downtown": 5061, + "will": 5062, + "lew": 5063, + "bis": 5064, + "tary": 5065, + "bread": 5066, + "upon": 5067, + "rate": 5068, + "teachers": 5069, + "itation": 5070, + "anced": 5071, + "cycle": 5072, + "choose": 5073, + "dc": 5074, + "iran": 5075, + "cow": 5076, + "dave": 5077, + "raise": 5078, + "princess": 5079, + "faith": 5080, + "->": 5081, + "industri": 5082, + "spain": 5083, + "guitar": 5084, + "facts": 5085, + "mn": 5086, + "spen": 5087, + "courte": 5088, + "gott": 5089, + "projects": 5090, + "audi": 5091, + "osc": 5092, + "peter": 5093, + "sand": 5094, + "interest": 5095, + "happiness": 5096, + "venue": 5097, + "soldi": 5098, + "surprise": 5099, + "potential": 5100, + "perio": 5101, + "customer": 5102, + "ii": 5103, + "gni": 5104, + "manufac": 5105, + "eco": 5106, + "broken": 5107, + "singer": 5108, + "vels": 5109, + "wales": 5110, + "hus": 5111, + "inj": 5112, + "four": 5113, + "talent": 5114, + "dying": 5115, + "matthe": 5116, + "film": 5117, + "joining": 5118, + "sell": 5119, + "jar": 5120, + "lmao": 5121, + "surger": 5122, + "bbc": 5123, + "sources": 5124, + "austin": 5125, + "nik": 5126, + "charles": 5127, + "fam": 5128, + "princi": 5129, + "angel": 5130, + "cash": 5131, + "lot": 5132, + "ored": 5133, + "plays": 5134, + "plate": 5135, + "done": 5136, + "memory": 5137, + "brings": 5138, + "nba": 5139, + "solutions": 5140, + "teaching": 5141, + "grace": 5142, + "circu": 5143, + "helps": 5144, + "founder": 5145, + "mary": 5146, + "explore": 5147, + "decor": 5148, + "parts": 5149, + "cho": 5150, + "integr": 5151, + "hau": 5152, + "ises": 5153, + "putting": 5154, + "iner": 5155, + "rit": 5156, + "vy": 5157, + "michel": 5158, + "blues": 5159, + "everyday": 5160, + "forms": 5161, + "bio": 5162, + "year": 5163, + "pin": 5164, + "tter": 5165, + "spring": 5166, + "))": 5167, + "pot": 5168, + "aling": 5169, + "performing": 5170, + "shan": 5171, + "planet": 5172, + "musical": 5173, + "heads": 5174, + "italian": 5175, + "strugg": 5176, + "âĢįâĻ": 5177, + "wings": 5178, + "pump": 5179, + "hh": 5180, + "trou": 5181, + "aid": 5182, + "prime": 5183, + "earth": 5184, + "paint": 5185, + "mont": 5186, + "amy": 5187, + "bbc": 5188, + "fabulous": 5189, + "fruit": 5190, + "android": 5191, + "bourne": 5192, + "ceremony": 5193, + "ential": 5194, + "??": 5195, + "debate": 5196, + "oning": 5197, + "draft": 5198, + "solar": 5199, + "tx": 5200, + "jam": 5201, + "corn": 5202, + "!!!!!": 5203, + "broo": 5204, + "milk": 5205, + "posed": 5206, + "ohi": 5207, + "movement": 5208, + "bren": 5209, + "partner": 5210, + "pg": 5211, + "ette": 5212, + "aries": 5213, + "shout": 5214, + "ng": 5215, + "leaving": 5216, + "tells": 5217, + "sens": 5218, + "taste": 5219, + "kelly": 5220, + "worl": 5221, + "gym": 5222, + "rich": 5223, + "egy": 5224, + "pid": 5225, + "mas": 5226, + "âĤ": 5227, + "courtesy": 5228, + "frank": 5229, + "increase": 5230, + "written": 5231, + "ppers": 5232, + "rel": 5233, + "hai": 5234, + "sas": 5235, + "sound": 5236, + "tti": 5237, + "wich": 5238, + "river": 5239, + "...\"": 5240, + "ag": 5241, + "fellow": 5242, + "rome": 5243, + "small": 5244, + "gency": 5245, + "ican": 5246, + "luxury": 5247, + "proof": 5248, + "met": 5249, + "wildlife": 5250, + "moments": 5251, + "rather": 5252, + "corner": 5253, + "compe": 5254, + "canadian": 5255, + "likely": 5256, + "therapy": 5257, + "liam": 5258, + "economic": 5259, + "indie": 5260, + "route": 5261, + "fight": 5262, + "hope": 5263, + "setting": 5264, + "antly": 5265, + "cross": 5266, + "fantasy": 5267, + "dee": 5268, + "sketch": 5269, + "compli": 5270, + "ymi": 5271, + "rules": 5272, + "engineering": 5273, + "figure": 5274, + "row": 5275, + ".,": 5276, + "fw": 5277, + "sydney": 5278, + "wou": 5279, + "tation": 5280, + "drew": 5281, + "uses": 5282, + "there": 5283, + "spread": 5284, + "structure": 5285, + "patrick": 5286, + "apparently": 5287, + "ros": 5288, + "hills": 5289, + "wwe": 5290, + "anny": 5291, + "commission": 5292, + "div": 5293, + "fying": 5294, + "consul": 5295, + "analysis": 5296, + "exi": 5297, + "tennis": 5298, + "vehicle": 5299, + "ðŁĺŃðŁĺŃ": 5300, + "ass": 5301, + "highly": 5302, + "opened": 5303, + "bann": 5304, + "ðŁĴĻ": 5305, + "mph": 5306, + "wishing": 5307, + "vor": 5308, + "fif": 5309, + "giveaway": 5310, + "rr": 5311, + "ray": 5312, + "jess": 5313, + "gat": 5314, + "icymi": 5315, + "xit": 5316, + "highest": 5317, + "york": 5318, + "pie": 5319, + "involved": 5320, + "higher": 5321, + "rie": 5322, + "malay": 5323, + "intelli": 5324, + "despite": 5325, + "chee": 5326, + "sarah": 5327, + "bean": 5328, + "recogni": 5329, + "arsen": 5330, + "talented": 5331, + "passion": 5332, + "ich": 5333, + "abc": 5334, + "leads": 5335, + "disease": 5336, + "vis": 5337, + "sec": 5338, + "presenting": 5339, + "milli": 5340, + "hole": 5341, + "shots": 5342, + "depart": 5343, + "surgery": 5344, + "govt": 5345, + "bin": 5346, + "dual": 5347, + "evi": 5348, + "longer": 5349, + "evol": 5350, + "screen": 5351, + "portrait": 5352, + "etc": 5353, + "lose": 5354, + "chat": 5355, + "pen": 5356, + "pi": 5357, + "oma": 5358, + "sick": 5359, + "erc": 5360, + "companies": 5361, + "entry": 5362, + "plane": 5363, + "gry": 5364, + "vene": 5365, + "liverpool": 5366, + "premiere": 5367, + "shared": 5368, + "ared": 5369, + "films": 5370, + "ira": 5371, + "holidays": 5372, + "cricket": 5373, + "ician": 5374, + "ving": 5375, + ".)": 5376, + "ultimate": 5377, + "division": 5378, + "conduc": 5379, + "sept": 5380, + "forces": 5381, + "mont": 5382, + "smart": 5383, + "disapp": 5384, + "sunshine": 5385, + "ind": 5386, + "bless": 5387, + "made": 5388, + "colors": 5389, + "frank": 5390, + "iron": 5391, + "bottle": 5392, + "sgo": 5393, + "mood": 5394, + "jason": 5395, + "eric": 5396, + "birth": 5397, + "teen": 5398, + "response": 5399, + "target": 5400, + "statement": 5401, + "fear": 5402, + "thel": 5403, + "alum": 5404, + "arab": 5405, + "blin": 5406, + "direction": 5407, + "steps": 5408, + "erial": 5409, + "worked": 5410, + "atl": 5411, + "ðŁĴķ": 5412, + "felt": 5413, + "poli": 5414, + "scenes": 5415, + "homes": 5416, + "bell": 5417, + "eat": 5418, + "ateful": 5419, + "tin": 5420, + "lace": 5421, + "folks": 5422, + "pse": 5423, + "ann": 5424, + "wisdom": 5425, + "fav": 5426, + "butter": 5427, + "sr": 5428, + "areas": 5429, + "smoo": 5430, + "biz": 5431, + "dges": 5432, + "appo": 5433, + "more": 5434, + "them": 5435, + "effect": 5436, + "windows": 5437, + "sunny": 5438, + "capital": 5439, + "totally": 5440, + "cities": 5441, + "grant": 5442, + "mbers": 5443, + "slow": 5444, + "autu": 5445, + "ilities": 5446, + "wro": 5447, + "rising": 5448, + "stics": 5449, + "violence": 5450, + "igh": 5451, + "quot": 5452, + "hit": 5453, + "tc": 5454, + "heritage": 5455, + "buff": 5456, + "nes": 5457, + "zar": 5458, + "dential": 5459, + "exac": 5460, + "edge": 5461, + "deep": 5462, + "arena": 5463, + "became": 5464, + "benefits": 5465, + "marks": 5466, + "mber": 5467, + "az": 5468, + "ames": 5469, + "preci": 5470, + "dragon": 5471, + "reg": 5472, + "dings": 5473, + "dos": 5474, + "ðŁĴª": 5475, + "nel": 5476, + "sity": 5477, + "meal": 5478, + "dist": 5479, + "legend": 5480, + "purchase": 5481, + "pical": 5482, + "stick": 5483, + "fat": 5484, + "duba": 5485, + "profess": 5486, + "carto": 5487, + "prof": 5488, + "countries": 5489, + "responsi": 5490, + "sequ": 5491, + "fab": 5492, + "tribute": 5493, + "honored": 5494, + "practic": 5495, + "purple": 5496, + "anton": 5497, + "pared": 5498, + "tough": 5499, + "summer": 5500, + "environment": 5501, + "sons": 5502, + "ðŁĻı": 5503, + "mps": 5504, + "gies": 5505, + "heroes": 5506, + "telling": 5507, + "henry": 5508, + "fen": 5509, + "knowledge": 5510, + "Ģï¸ı": 5511, + "fr": 5512, + "neg": 5513, + "ure": 5514, + "acking": 5515, + "hearts": 5516, + "soo": 5517, + "hollywood": 5518, + "jump": 5519, + "sauce": 5520, + "schedule": 5521, + "turn": 5522, + "yoga": 5523, + "creating": 5524, + "cket": 5525, + "creek": 5526, + "âŃ": 5527, + "customers": 5528, + "madri": 5529, + "gul": 5530, + "assemb": 5531, + "mount": 5532, + "cell": 5533, + "top": 5534, + "stal": 5535, + "davis": 5536, + "twi": 5537, + "sign": 5538, + "premier": 5539, + "itions": 5540, + "hearing": 5541, + "unk": 5542, + "patients": 5543, + "appear": 5544, + "heaven": 5545, + "alty": 5546, + "doctor": 5547, + "ae": 5548, + "platform": 5549, + "jeff": 5550, + "ðŁĵ·": 5551, + "regional": 5552, + "bid": 5553, + "boxing": 5554, + "exten": 5555, + "ority": 5556, + "aw": 5557, + "wise": 5558, + "ille": 5559, + "several": 5560, + "bie": 5561, + "situ": 5562, + "syria": 5563, + "âľħ": 5564, + "reminder": 5565, + "entertain": 5566, + "lion": 5567, + "partners": 5568, + "inn": 5569, + "phar": 5570, + "fau": 5571, + "pls": 5572, + "expected": 5573, + "sugar": 5574, + "decision": 5575, + "sb": 5576, + "chron": 5577, + "association": 5578, + "leaves": 5579, + "visited": 5580, + "shap": 5581, + "ðŁĴĸ": 5582, + "further": 5583, + "hann": 5584, + "wi": 5585, + "runs": 5586, + "ler": 5587, + "funding": 5588, + "filled": 5589, + "......": 5590, + "tiny": 5591, + "hang": 5592, + "org": 5593, + "cool": 5594, + "semin": 5595, + "ðŁıĨ": 5596, + "spons": 5597, + "navy": 5598, + "saint": 5599, + "drug": 5600, + "dal": 5601, + "roun": 5602, + "covered": 5603, + "traditional": 5604, + "investment": 5605, + "dete": 5606, + "alism": 5607, + "flow": 5608, + "nis": 5609, + "sunrise": 5610, + "feat": 5611, + "fted": 5612, + "weird": 5613, + "jere": 5614, + "vegan": 5615, + "medicine": 5616, + "ano": 5617, + "accu": 5618, + "delivery": 5619, + "temple": 5620, + "changing": 5621, + "wilson": 5622, + "philipp": 5623, + "refe": 5624, + "nd": 5625, + "iser": 5626, + "gay": 5627, + "rand": 5628, + "atives": 5629, + "tely": 5630, + "pand": 5631, + "intellig": 5632, + "gare": 5633, + "ambas": 5634, + "demon": 5635, + "committee": 5636, + "strategy": 5637, + "refuge": 5638, + "budget": 5639, + "protec": 5640, + "pier": 5641, + "express": 5642, + "nomin": 5643, + "economy": 5644, + "allow": 5645, + "icon": 5646, + "galax": 5647, + "oh": 5648, + "indivi": 5649, + "demand": 5650, + "virgin": 5651, + "luke": 5652, + "alists": 5653, + "mani": 5654, + "smi": 5655, + "judge": 5656, + "enty": 5657, + "michi": 5658, + "result": 5659, + "amed": 5660, + "speaks": 5661, + "',": 5662, + "houston": 5663, + "shin": 5664, + "bing": 5665, + "fly": 5666, + "chem": 5667, + "auto": 5668, + "vas": 5669, + "get": 5670, + "arm": 5671, + "thanks": 5672, + "din": 5673, + "gang": 5674, + "xx": 5675, + "sion": 5676, + "located": 5677, + "pl": 5678, + "josh": 5679, + "info": 5680, + "joins": 5681, + "adverti": 5682, + "otd": 5683, + "eld": 5684, + "sie": 5685, + "reasons": 5686, + "vent": 5687, + "ðŁĩºðŁĩ¸": 5688, + "âł": 5689, + "conversation": 5690, + "studi": 5691, + "ðŁĶ¥ðŁĶ¥": 5692, + "gos": 5693, + "sounds": 5694, + "unit": 5695, + "musc": 5696, + "gel": 5697, + "acked": 5698, + "paci": 5699, + "cos": 5700, + "dere": 5701, + "uu": 5702, + "ao": 5703, + "lam": 5704, + "inspiring": 5705, + "arms": 5706, + "tware": 5707, + "matters": 5708, + "addic": 5709, + "dude": 5710, + "ext": 5711, + "crisis": 5712, + "bath": 5713, + "meet": 5714, + "singh": 5715, + "expect": 5716, + "delhi": 5717, + "rescue": 5718, + "worst": 5719, + "aug": 5720, + "shipping": 5721, + "serving": 5722, + "sto": 5723, + "dark": 5724, + "aces": 5725, + "historic": 5726, + "landscape": 5727, + "designer": 5728, + "billion": 5729, + "grateful": 5730, + "wake": 5731, + "eve": 5732, + "miller": 5733, + "housing": 5734, + "dynam": 5735, + "isco": 5736, + "beha": 5737, + "shop": 5738, + "prou": 5739, + "eas": 5740, + "asia": 5741, + "eding": 5742, + "kon": 5743, + "department": 5744, + "awar": 5745, + "marine": 5746, + "inci": 5747, + "photographer": 5748, + "tape": 5749, + "logo": 5750, + "rings": 5751, + "dit": 5752, + "----": 5753, + "vinyl": 5754, + "wc": 5755, + "voting": 5756, + "seven": 5757, + "ambassad": 5758, + "dallas": 5759, + "tu": 5760, + "comment": 5761, + "kra": 5762, + "bles": 5763, + "wag": 5764, + "ud": 5765, + "audio": 5766, + "strike": 5767, + "official": 5768, + "ots": 5769, + "metho": 5770, + "tools": 5771, + "radi": 5772, + "alan": 5773, + "hunt": 5774, + "watched": 5775, + "ake": 5776, + "fake": 5777, + "drinking": 5778, + "merry": 5779, + "ml": 5780, + "bday": 5781, + "rio": 5782, + "nike": 5783, + "cant": 5784, + "repe": 5785, + "costu": 5786, + "murder": 5787, + "akers": 5788, + "chers": 5789, + "outs": 5790, + "beginning": 5791, + "sos": 5792, + "ades": 5793, + "nin": 5794, + "notes": 5795, + "wrote": 5796, + "solo": 5797, + "ci": 5798, + "lighting": 5799, + "urban": 5800, + "brexit": 5801, + "attend": 5802, + "shirts": 5803, + "playo": 5804, + "actress": 5805, + "plic": 5806, + "standard": 5807, + "quotes": 5808, + "parade": 5809, + "ancient": 5810, + "©": 5811, + "turing": 5812, + "ree": 5813, + "primary": 5814, + "flash": 5815, + "citiz": 5816, + "mates": 5817, + "stein": 5818, + "zi": 5819, + "clinton": 5820, + "skin": 5821, + "gene": 5822, + "hum": 5823, + "gar": 5824, + "tle": 5825, + "yi": 5826, + "focu": 5827, + "dean": 5828, + "plants": 5829, + "cyber": 5830, + "bu": 5831, + "ome": 5832, + "hop": 5833, + "address": 5834, + "tix": 5835, + "gifts": 5836, + "relationship": 5837, + "subscri": 5838, + "feed": 5839, + "exactly": 5840, + "hawks": 5841, + "exo": 5842, + "stress": 5843, + "sn": 5844, + "arrested": 5845, + "ane": 5846, + "software": 5847, + "zero": 5848, + "theme": 5849, + "mumb": 5850, + "immigr": 5851, + "mia": 5852, + "makeup": 5853, + "pleasure": 5854, + "univers": 5855, + "harb": 5856, + "engine": 5857, + "aper": 5858, + "rin": 5859, + "bra": 5860, + "institute": 5861, + "leather": 5862, + "alth": 5863, + "singing": 5864, + "cos": 5865, + "ghty": 5866, + "meas": 5867, + "stic": 5868, + "side": 5869, + "insurance": 5870, + "cot": 5871, + "pitch": 5872, + "mountains": 5873, + "crimin": 5874, + "supre": 5875, + "valentine": 5876, + "ater": 5877, + "wouldn": 5878, + "scale": 5879, + "related": 5880, + "regar": 5881, + "startup": 5882, + "packed": 5883, + "mike": 5884, + "weekly": 5885, + "pts": 5886, + "count": 5887, + "har": 5888, + "gotten": 5889, + "mind": 5890, + "berlin": 5891, + "conditions": 5892, + "switch": 5893, + "corn": 5894, + "save": 5895, + "gli": 5896, + "emergency": 5897, + "tuned": 5898, + "stock": 5899, + "discussing": 5900, + "everybody": 5901, + "sday": 5902, + "whether": 5903, + "wrestling": 5904, + "eces": 5905, + "gender": 5906, + "chen": 5907, + "ðŁijĢ": 5908, + "madrid": 5909, + "marathon": 5910, + "egg": 5911, + "ier": 5912, + "thx": 5913, + "asking": 5914, + "korea": 5915, + "wolf": 5916, + "aya": 5917, + "gm": 5918, + "gau": 5919, + "atory": 5920, + "vr": 5921, + "grass": 5922, + "killing": 5923, + "bble": 5924, + "uro": 5925, + "uni": 5926, + "eth": 5927, + "shore": 5928, + "then": 5929, + "reale": 5930, + "bottom": 5931, + "exerc": 5932, + "kar": 5933, + "ories": 5934, + "adri": 5935, + "sands": 5936, + "sex": 5937, + ".'": 5938, + "volunteers": 5939, + "perform": 5940, + "parliam": 5941, + "include": 5942, + "delighted": 5943, + "executive": 5944, + "fuel": 5945, + "kiss": 5946, + "ãħ": 5947, + "charge": 5948, + "hu": 5949, + "cakes": 5950, + "vet": 5951, + "glu": 5952, + "agree": 5953, + "prices": 5954, + "nau": 5955, + "hl": 5956, + "gru": 5957, + "raj": 5958, + "strength": 5959, + "bic": 5960, + "spending": 5961, + "ales": 5962, + "aven": 5963, + "blast": 5964, + ":(": 5965, + "yof": 5966, + "normal": 5967, + "six": 5968, + "quick": 5969, + "sea": 5970, + "daw": 5971, + "meets": 5972, + "lovers": 5973, + "updated": 5974, + "potat": 5975, + "completed": 5976, + "cook": 5977, + "opportunities": 5978, + "pure": 5979, + "organic": 5980, + "temper": 5981, + "cam": 5982, + "avoid": 5983, + "parking": 5984, + "dubai": 5985, + "ando": 5986, + "distri": 5987, + "toy": 5988, + "completely": 5989, + "donald": 5990, + "trial": 5991, + "bass": 5992, + "boun": 5993, + "background": 5994, + "vas": 5995, + "marvel": 5996, + "lum": 5997, + "rus": 5998, + "tool": 5999, + "commissi": 6000, + "throwback": 6001, + "finding": 6002, + "islam": 6003, + "!?": 6004, + "stop": 6005, + "evil": 6006, + "oral": 6007, + "residents": 6008, + "identi": 6009, + "oak": 6010, + "ðŁİ¶": 6011, + "lil": 6012, + "spanish": 6013, + "chapter": 6014, + "stopped": 6015, + "direct": 6016, + "hosted": 6017, + "picked": 6018, + "labour": 6019, + "lewis": 6020, + "defense": 6021, + "à®": 6022, + "healthcare": 6023, + "whis": 6024, + "math": 6025, + "peak": 6026, + "raised": 6027, + "fix": 6028, + "bull": 6029, + "thir": 6030, + "chelsea": 6031, + "folk": 6032, + "tre": 6033, + "candi": 6034, + "paul": 6035, + "either": 6036, + "adam": 6037, + "poetry": 6038, + "jewelry": 6039, + "ð٦": 6040, + "pray": 6041, + "ا": 6042, + "gc": 6043, + "oz": 6044, + "wishes": 6045, + "foreign": 6046, + "sung": 6047, + "learned": 6048, + "ene": 6049, + "ning": 6050, + "michael": 6051, + "illustration": 6052, + "legendary": 6053, + "wav": 6054, + "bau": 6055, + "ðŁļ¨": 6056, + "calend": 6057, + "streets": 6058, + "âĨ": 6059, + "monster": 6060, + "buck": 6061, + "gr": 6062, + "school": 6063, + "bath": 6064, + "waste": 6065, + "neck": 6066, + "hawa": 6067, + "beach": 6068, + "replac": 6069, + "ject": 6070, + "oner": 6071, + "factory": 6072, + "count": 6073, + "ðŁĵ¸": 6074, + "morgan": 6075, + "dering": 6076, + "sean": 6077, + "stephen": 6078, + "dep": 6079, + "novel": 6080, + "videos": 6081, + "ical": 6082, + "pressure": 6083, + "arsenal": 6084, + "expre": 6085, + "irs": 6086, + "trending": 6087, + "ssa": 6088, + "flash": 6089, + "resear": 6090, + "through": 6091, + "professor": 6092, + "sculp": 6093, + "tos": 6094, + "gged": 6095, + "mma": 6096, + "bee": 6097, + "ape": 6098, + "hunter": 6099, + "ami": 6100, + "hei": 6101, + "plastic": 6102, + "bucks": 6103, + "universe": 6104, + "legen": 6105, + "nigeria": 6106, + "pleased": 6107, + "ris": 6108, + "thinks": 6109, + "autumn": 6110, + "ids": 6111, + "dis": 6112, + "anthony": 6113, + "ðŁı½": 6114, + "aked": 6115, + "glasses": 6116, + "finance": 6117, + "zer": 6118, + "kas": 6119, + "contract": 6120, + "numbers": 6121, + "shaw": 6122, + "partnership": 6123, + "til": 6124, + "launched": 6125, + "sal": 6126, + "victoria": 6127, + "theater": 6128, + "usual": 6129, + "names": 6130, + "period": 6131, + "eliza": 6132, + "ith": 6133, + "barcel": 6134, + "rocks": 6135, + "bags": 6136, + "mate": 6137, + "distribu": 6138, + "jon": 6139, + "diffic": 6140, + "alized": 6141, + "curren": 6142, + "scored": 6143, + "bha": 6144, + "dublin": 6145, + "rose": 6146, + "inted": 6147, + "solid": 6148, + "behavi": 6149, + "walker": 6150, + "simply": 6151, + "gardens": 6152, + "headed": 6153, + "ini": 6154, + "ohio": 6155, + "weap": 6156, + "fo": 6157, + "glen": 6158, + "estate": 6159, + "random": 6160, + "thunder": 6161, + "thru": 6162, + "kill": 6163, + "jacket": 6164, + "iti": 6165, + "entertainment": 6166, + "thanksgiving": 6167, + "ental": 6168, + "encoura": 6169, + "elo": 6170, + "ather": 6171, + "tank": 6172, + "highlights": 6173, + "fting": 6174, + "rule": 6175, + "models": 6176, + "border": 6177, + "bjp": 6178, + "husband": 6179, + "indone": 6180, + "kenya": 6181, + "bears": 6182, + "alo": 6183, + "ninten": 6184, + "pix": 6185, + "stro": 6186, + "orders": 6187, + "salad": 6188, + "roads": 6189, + "nor": 6190, + "lation": 6191, + "sophi": 6192, + "ðŁı¼": 6193, + "pieces": 6194, + "bone": 6195, + "mins": 6196, + "includes": 6197, + "nutr": 6198, + "phil": 6199, + "sent": 6200, + "fundra": 6201, + "gain": 6202, + "borough": 6203, + "nad": 6204, + "monday": 6205, + "activity": 6206, + "items": 6207, + "becoming": 6208, + "kenne": 6209, + "detro": 6210, + "cardi": 6211, + "guests": 6212, + "ux": 6213, + "worldwide": 6214, + "severe": 6215, + "news": 6216, + "thankful": 6217, + "fiction": 6218, + "vege": 6219, + "mall": 6220, + "sian": 6221, + "eral": 6222, + "injury": 6223, + "lee": 6224, + "menu": 6225, + "dancing": 6226, + "scotti": 6227, + "example": 6228, + "(#": 6229, + "nai": 6230, + "studios": 6231, + "bai": 6232, + "ðŁĴĽ": 6233, + "jav": 6234, + "diamond": 6235, + "vince": 6236, + "rick": 6237, + "protection": 6238, + "lincol": 6239, + "champs": 6240, + "approach": 6241, + "dar": 6242, + "mile": 6243, + "clouds": 6244, + "jeff": 6245, + "infin": 6246, + "lers": 6247, + "ples": 6248, + "peace": 6249, + "gop": 6250, + "âĻ¡": 6251, + "techn": 6252, + "stra": 6253, + "average": 6254, + "effort": 6255, + "introducing": 6256, + "diversity": 6257, + "australian": 6258, + "amp": 6259, + "boost": 6260, + "ske": 6261, + "patient": 6262, + "appreciate": 6263, + "icians": 6264, + "pur": 6265, + "fell": 6266, + "woods": 6267, + "illustr": 6268, + "ðŁĸ": 6269, + "agency": 6270, + "actions": 6271, + "britain": 6272, + "underway": 6273, + "seattle": 6274, + "eland": 6275, + "ago": 6276, + "fill": 6277, + "streaming": 6278, + "protest": 6279, + "challenges": 6280, + "kyo": 6281, + "etsy": 6282, + "cooking": 6283, + "expert": 6284, + "russ": 6285, + "rainbow": 6286, + "commercial": 6287, + "spin": 6288, + "beats": 6289, + "cry": 6290, + "valu": 6291, + "eli": 6292, + "throw": 6293, + "grams": 6294, + "levels": 6295, + "michigan": 6296, + "cad": 6297, + "adorable": 6298, + "constitu": 6299, + "ws": 6300, + "pub": 6301, + "midnight": 6302, + "that": 6303, + "netfli": 6304, + "brazil": 6305, + "diego": 6306, + "regular": 6307, + "joy": 6308, + "âĤ¬": 6309, + "liqu": 6310, + "eastern": 6311, + "kni": 6312, + "flat": 6313, + "np": 6314, + "brown": 6315, + "wer": 6316, + "sey": 6317, + "tters": 6318, + "acting": 6319, + "vanc": 6320, + "cycling": 6321, + "programme": 6322, + "raw": 6323, + "complex": 6324, + "tattoo": 6325, + "throwbackthursday": 6326, + "sessions": 6327, + "rooms": 6328, + "sight": 6329, + "species": 6330, + "bomb": 6331, + "laugh": 6332, + "keeps": 6333, + "moon": 6334, + "officers": 6335, + "conver": 6336, + "tr": 6337, + "hash": 6338, + "tack": 6339, + "rious": 6340, + "adap": 6341, + "aj": 6342, + "recogn": 6343, + "expo": 6344, + "sugge": 6345, + "confirmed": 6346, + "rolling": 6347, + "dressing": 6348, + "ict": 6349, + "friday": 6350, + "phones": 6351, + "ridge": 6352, + "concept": 6353, + "roy": 6354, + "keys": 6355, + "effor": 6356, + "cate": 6357, + "kne": 6358, + "even": 6359, + "lay": 6360, + "communities": 6361, + "mod": 6362, + "naz": 6363, + "everywhere": 6364, + "alab": 6365, + "bitcoin": 6366, + "banks": 6367, + "outdoor": 6368, + "federal": 6369, + "stores": 6370, + "hp": 6371, + "cal": 6372, + "mely": 6373, + "signific": 6374, + "bear": 6375, + "republic": 6376, + "closer": 6377, + "allah": 6378, + "pick": 6379, + "xd": 6380, + "palace": 6381, + "chill": 6382, + "bam": 6383, + "erous": 6384, + "una": 6385, + "allen": 6386, + "outstanding": 6387, + "olympic": 6388, + "supply": 6389, + "figu": 6390, + "vau": 6391, + "lp": 6392, + "charlie": 6393, + "unes": 6394, + ">>>": 6395, + "legends": 6396, + "icial": 6397, + "coast": 6398, + "benefit": 6399, + "multi": 6400, + "fits": 6401, + "farmers": 6402, + "amount": 6403, + "sisters": 6404, + "harve": 6405, + "honey": 6406, + "queen": 6407, + "bers": 6408, + "plann": 6409, + "âŃIJ": 6410, + "mu": 6411, + "barcelona": 6412, + "alber": 6413, + "status": 6414, + "remain": 6415, + "extra": 6416, + "candy": 6417, + "vious": 6418, + "âľĮ": 6419, + "ov": 6420, + "warriors": 6421, + "-->": 6422, + "jump": 6423, + "amar": 6424, + "xmas": 6425, + "studies": 6426, + "iors": 6427, + "kor": 6428, + "donate": 6429, + "prep": 6430, + "fish": 6431, + "ima": 6432, + "painted": 6433, + "admini": 6434, + "cosplay": 6435, + "sports": 6436, + "drops": 6437, + "fighter": 6438, + "evidence": 6439, + "ðŁĴª": 6440, + "lake": 6441, + "rob": 6442, + "cinema": 6443, + "profile": 6444, + "ñ": 6445, + "stands": 6446, + "legacy": 6447, + "shape": 6448, + "roof": 6449, + "civil": 6450, + "ians": 6451, + "syl": 6452, + "sham": 6453, + "voted": 6454, + "retail": 6455, + "philli": 6456, + "listed": 6457, + "duty": 6458, + "nb": 6459, + "thes": 6460, + "fare": 6461, + "auction": 6462, + "fficial": 6463, + "storms": 6464, + "dp": 6465, + "loun": 6466, + "shops": 6467, + "aly": 6468, + "anime": 6469, + "multiple": 6470, + "ðŁĺįðŁĺį": 6471, + "psycho": 6472, + "jean": 6473, + "apart": 6474, + "candidate": 6475, + "ggy": 6476, + "conf": 6477, + "joseph": 6478, + "wick": 6479, + "meat": 6480, + "frame": 6481, + "cl": 6482, + "forgot": 6483, + "phy": 6484, + "fing": 6485, + "lied": 6486, + "rep": 6487, + "seed": 6488, + "fall": 6489, + "ufc": 6490, + "nut": 6491, + "lind": 6492, + "mode": 6493, + "fields": 6494, + "ence": 6495, + "sley": 6496, + "ð٤Ķ": 6497, + "chill": 6498, + "followed": 6499, + "announces": 6500, + "corru": 6501, + "trophy": 6502, + "themselves": 6503, + "acle": 6504, + "aldu": 6505, + "kong": 6506, + "lon": 6507, + "sv": 6508, + "broke": 6509, + "anderson": 6510, + "tai": 6511, + "story": 6512, + "temporary": 6513, + "activities": 6514, + "kati": 6515, + "ariz": 6516, + "crystal": 6517, + "spoke": 6518, + "extremely": 6519, + "trading": 6520, + "ðŁĴļ": 6521, + "ü": 6522, + "inch": 6523, + "edin": 6524, + "outfit": 6525, + "equip": 6526, + "madi": 6527, + "formed": 6528, + "beef": 6529, + "pop": 6530, + "tiger": 6531, + "thisday": 6532, + "tired": 6533, + "neighb": 6534, + "retro": 6535, + "isa": 6536, + "unt": 6537, + "tas": 6538, + "kansas": 6539, + "dest": 6540, + "seconds": 6541, + "tay": 6542, + "hurric": 6543, + "ou": 6544, + "galaxy": 6545, + "daddy": 6546, + "brow": 6547, + "burger": 6548, + "enced": 6549, + "desk": 6550, + "accur": 6551, + "secretary": 6552, + "elite": 6553, + "kab": 6554, + "chin": 6555, + "tourism": 6556, + "buddy": 6557, + "icide": 6558, + "dressed": 6559, + "ud": 6560, + "vacation": 6561, + "cheers": 6562, + "comfor": 6563, + "characters": 6564, + "jet": 6565, + "buying": 6566, + "lins": 6567, + "nap": 6568, + "realestate": 6569, + "lie": 6570, + "afc": 6571, + "iii": 6572, + "fame": 6573, + "nr": 6574, + "bat": 6575, + "agent": 6576, + "makers": 6577, + "â̼": 6578, + "sector": 6579, + "opti": 6580, + "leon": 6581, + "diet": 6582, + "prayer": 6583, + "hip": 6584, + "mir": 6585, + "lex": 6586, + "bry": 6587, + "ana": 6588, + "passing": 6589, + "wen": 6590, + "recovery": 6591, + "aki": 6592, + "popul": 6593, + "resort": 6594, + "maria": 6595, + "stuck": 6596, + "reads": 6597, + "tier": 6598, + "perfec": 6599, + "netflix": 6600, + "poo": 6601, + "champ": 6602, + "oc": 6603, + "reduce": 6604, + "wered": 6605, + "comments": 6606, + "claim": 6607, + "accident": 6608, + "sag": 6609, + "hack": 6610, + "salt": 6611, + "kinda": 6612, + "killer": 6613, + "ios": 6614, + "zy": 6615, + "exchange": 6616, + "lecture": 6617, + "enger": 6618, + "icking": 6619, + "tau": 6620, + "reveals": 6621, + "prison": 6622, + "zom": 6623, + "ghan": 6624, + "ul": 6625, + "journal": 6626, + "iot": 6627, + "trin": 6628, + "jona": 6629, + "governor": 6630, + "cape": 6631, + "quarter": 6632, + "spective": 6633, + "impressive": 6634, + "babies": 6635, + "tx": 6636, + "mill": 6637, + "oy": 6638, + "harri": 6639, + "joint": 6640, + "sue": 6641, + "collaboration": 6642, + "trend": 6643, + "revolution": 6644, + "renew": 6645, + "alumni": 6646, + "gett": 6647, + "shell": 6648, + "sunday": 6649, + "entu": 6650, + "nic": 6651, + "donaldtrump": 6652, + "blockchain": 6653, + "pacific": 6654, + "explains": 6655, + "spy": 6656, + "advoc": 6657, + "paradi": 6658, + "tof": 6659, + "starring": 6660, + "pav": 6661, + "feed": 6662, + "brac": 6663, + "smoke": 6664, + "hamp": 6665, + "yam": 6666, + "tokyo": 6667, + "simon": 6668, + "dh": 6669, + "effici": 6670, + "physical": 6671, + "nj": 6672, + "elli": 6673, + "slow": 6674, + "graduate": 6675, + "americans": 6676, + "tify": 6677, + "fred": 6678, + "apore": 6679, + "finds": 6680, + "robin": 6681, + "wet": 6682, + "notice": 6683, + "semi": 6684, + "unve": 6685, + "kom": 6686, + "pilot": 6687, + "screening": 6688, + "daily": 6689, + "ðŁĴĹ": 6690, + "royal": 6691, + "spa": 6692, + "votes": 6693, + "nag": 6694, + "whate": 6695, + "attending": 6696, + "experim": 6697, + "addition": 6698, + "kate": 6699, + "stol": 6700, + "mali": 6701, + "foot": 6702, + "christ": 6703, + "chan": 6704, + "dee": 6705, + "licen": 6706, + "global": 6707, + "moore": 6708, + "tia": 6709, + "brigh": 6710, + "mystery": 6711, + "yay": 6712, + "âĿ¤ï¸ıâĿ¤ï¸ı": 6713, + "creati": 6714, + "mechan": 6715, + "clock": 6716, + "dic": 6717, + "âĢĶ": 6718, + "pper": 6719, + "alph": 6720, + "throughout": 6721, + "allow": 6722, + "resources": 6723, + "selection": 6724, + "hamil": 6725, + "bbq": 6726, + "aaaa": 6727, + "virginia": 6728, + "disney": 6729, + "eng": 6730, + "sored": 6731, + "drinks": 6732, + "fancy": 6733, + "consider": 6734, + "enda": 6735, + "jane": 6736, + "handmade": 6737, + "dul": 6738, + "ontari": 6739, + "ius": 6740, + "sville": 6741, + "colorado": 6742, + "whatever": 6743, + "wheel": 6744, + "promise": 6745, + "never": 6746, + "designs": 6747, + "ably": 6748, + "sexual": 6749, + "vancou": 6750, + "ati": 6751, + "convention": 6752, + "cultural": 6753, + "singapore": 6754, + "promo": 6755, + "loaded": 6756, + "glasgo": 6757, + "ppl": 6758, + "noo": 6759, + "kee": 6760, + "stem": 6761, + "mention": 6762, + "ido": 6763, + "cruise": 6764, + "riding": 6765, + "becomes": 6766, + "bey": 6767, + "âļ½ï¸ı": 6768, + "twin": 6769, + "dedicated": 6770, + "nash": 6771, + "desi": 6772, + "workout": 6773, + "jenni": 6774, + "iv": 6775, + "groups": 6776, + "relax": 6777, + "phoeni": 6778, + "lift": 6779, + "mixed": 6780, + "mck": 6781, + "pc": 6782, + "must": 6783, + "metro": 6784, + "cies": 6785, + "yar": 6786, + "aim": 6787, + "anger": 6788, + "ie": 6789, + "recy": 6790, + "married": 6791, + "dropped": 6792, + "engag": 6793, + "lest": 6794, + "ambassador": 6795, + "oph": 6796, + "des": 6797, + "wick": 6798, + "assistant": 6799, + "natur": 6800, + "fail": 6801, + "ltd": 6802, + "short": 6803, + "kap": 6804, + "shaw": 6805, + "bigger": 6806, + "remains": 6807, + "critical": 6808, + "survey": 6809, + "coverage": 6810, + "erson": 6811, + "wind": 6812, + "nb": 6813, + "billy": 6814, + "letes": 6815, + "acts": 6816, + "jimmy": 6817, + "atlan": 6818, + "aland": 6819, + "tc": 6820, + "importance": 6821, + "damage": 6822, + "fg": 6823, + "storage": 6824, + "twt": 6825, + "bond": 6826, + "balance": 6827, + "crying": 6828, + "puppy": 6829, + "vote": 6830, + "push": 6831, + "ðŁĴľ": 6832, + "poly": 6833, + "mel": 6834, + "london": 6835, + "terrori": 6836, + "effective": 6837, + "corporate": 6838, + "atlanta": 6839, + "jaco": 6840, + "nasa": 6841, + "greek": 6842, + "senate": 6843, + "ish": 6844, + "eva": 6845, + "intelligence": 6846, + "efforts": 6847, + "alco": 6848, + "kun": 6849, + "hall": 6850, + "diag": 6851, + "claims": 6852, + "first": 6853, + "hb": 6854, + "bae": 6855, + "vul": 6856, + "pull": 6857, + "°": 6858, + "separ": 6859, + "speed": 6860, + "victi": 6861, + "onthisday": 6862, + "audience": 6863, + "rates": 6864, + "teach": 6865, + "filming": 6866, + "bush": 6867, + "song": 6868, + "yum": 6869, + "brun": 6870, + "raine": 6871, + "awa": 6872, + "parks": 6873, + "ðĿ": 6874, + "rabb": 6875, + "rach": 6876, + "raid": 6877, + "reached": 6878, + "rail": 6879, + "moves": 6880, + "selected": 6881, + "fri": 6882, + "raising": 6883, + "omy": 6884, + "stones": 6885, + "suk": 6886, + "francisco": 6887, + "cases": 6888, + "capit": 6889, + "confu": 6890, + "wtf": 6891, + "poke": 6892, + "equipment": 6893, + "greg": 6894, + "essential": 6895, + "offering": 6896, + "nex": 6897, + "pies": 6898, + "bec": 6899, + "creation": 6900, + "chairman": 6901, + "crown": 6902, + "wal": 6903, + "johnny": 6904, + "shift": 6905, + "neck": 6906, + "bang": 6907, + "bird": 6908, + "ðŁĺı": 6909, + "duck": 6910, + "reserve": 6911, + "depu": 6912, + "masters": 6913, + "overall": 6914, + "notic": 6915, + "juice": 6916, + "sneak": 6917, + "cheer": 6918, + "classes": 6919, + "eagles": 6920, + "nca": 6921, + "carpet": 6922, + "civil": 6923, + "coaches": 6924, + "harris": 6925, + "ups": 6926, + "balls": 6927, + "decor": 6928, + "martin": 6929, + "ros": 6930, + "vice": 6931, + "announcement": 6932, + "whose": 6933, + "tigers": 6934, + "stered": 6935, + "cts": 6936, + "dram": 6937, + "steel": 6938, + "young": 6939, + "install": 6940, + "suppo": 6941, + "recording": 6942, + "deck": 6943, + "seats": 6944, + "lder": 6945, + "angle": 6946, + "bot": 6947, + "styles": 6948, + "elections": 6949, + "fortun": 6950, + "nab": 6951, + "butter": 6952, + "arian": 6953, + "kash": 6954, + "inner": 6955, + "oured": 6956, + "beast": 6957, + "wei": 6958, + "iconic": 6959, + "experts": 6960, + "necess": 6961, + "beng": 6962, + "james": 6963, + "lia": 6964, + "greece": 6965, + "ðŁĵ·": 6966, + "ðŁĺģ": 6967, + "goodbye": 6968, + "mitch": 6969, + "twice": 6970, + "mumbai": 6971, + "steam": 6972, + "rush": 6973, + "medal": 6974, + "nett": 6975, + "fashion": 6976, + "tar": 6977, + "rs": 6978, + "saving": 6979, + "ricul": 6980, + "lm": 6981, + "sleeping": 6982, + "brooklyn": 6983, + "miss": 6984, + "sending": 6985, + "discovered": 6986, + "sphere": 6987, + "oftheday": 6988, + "kicks": 6989, + "missions": 6990, + "wright": 6991, + "ern": 6992, + "ghtly": 6993, + "ious": 6994, + "melbourne": 6995, + "startu": 6996, + "moved": 6997, + "carry": 6998, + "dak": 6999, + "agues": 7000, + "belgi": 7001, + "ema": 7002, + "wayne": 7003, + "dot": 7004, + "erie": 7005, + "pel": 7006, + "itunes": 7007, + "matthew": 7008, + "nobody": 7009, + "estab": 7010, + "calm": 7011, + "winds": 7012, + "luc": 7013, + "prepare": 7014, + "trends": 7015, + "exercise": 7016, + "advant": 7017, + "ðŁĴ¯": 7018, + "athletics": 7019, + "apps": 7020, + "ctions": 7021, + "advance": 7022, + "launches": 7023, + "little": 7024, + "realdonaldtrump": 7025, + "elizabeth": 7026, + "carolina": 7027, + "hub": 7028, + "hidden": 7029, + "nw": 7030, + "user": 7031, + "poll": 7032, + "greater": 7033, + "most": 7034, + "fed": 7035, + "pat": 7036, + "lifestyle": 7037, + "sati": 7038, + "scores": 7039, + "marriage": 7040, + "lr": 7041, + "avenue": 7042, + "deserve": 7043, + "rif": 7044, + "ðŁĹ": 7045, + "watch": 7046, + "championships": 7047, + "gray": 7048, + "enni": 7049, + "cotton": 7050, + "gom": 7051, + "where": 7052, + "package": 7053, + "sum": 7054, + "absolu": 7055, + "newly": 7056, + "foods": 7057, + "tyler": 7058, + "assembly": 7059, + "muslim": 7060, + "bank": 7061, + "rememb": 7062, + "options": 7063, + "producer": 7064, + "lando": 7065, + "funds": 7066, + "upper": 7067, + "shadow": 7068, + "progre": 7069, + "cop": 7070, + "inge": 7071, + "legs": 7072, + "detroit": 7073, + "hillary": 7074, + "jose": 7075, + "giants": 7076, + "soup": 7077, + "sustainable": 7078, + "tus": 7079, + "clothes": 7080, + "rocking": 7081, + "nz": 7082, + "minne": 7083, + "materi": 7084, + "bruce": 7085, + "eart": 7086, + "casting": 7087, + "independent": 7088, + "thousands": 7089, + "tah": 7090, + "decl": 7091, + "veterans": 7092, + "lions": 7093, + "wrap": 7094, + "â̦": 7095, + "dess": 7096, + "bling": 7097, + "stine": 7098, + "eggs": 7099, + "oon": 7100, + "closing": 7101, + "zay": 7102, + "att": 7103, + "bacon": 7104, + "fail": 7105, + "arizona": 7106, + "depre": 7107, + "ghost": 7108, + "newsp": 7109, + "wers": 7110, + "vip": 7111, + "liked": 7112, + "ident": 7113, + "volunteer": 7114, + "adult": 7115, + "pupp": 7116, + "circle": 7117, + "material": 7118, + "degree": 7119, + "grown": 7120, + "boom": 7121, + "calendar": 7122, + "sur": 7123, + "viewing": 7124, + "athletes": 7125, + "chand": 7126, + "rell": 7127, + "asian": 7128, + "entr": 7129, + "volley": 7130, + "victims": 7131, + "body": 7132, + "mama": 7133, + "transfer": 7134, + "geek": 7135, + "indic": 7136, + "saved": 7137, + "mai": 7138, + "gent": 7139, + "its": 7140, + "lounge": 7141, + "kol": 7142, + "theory": 7143, + "situation": 7144, + "islands": 7145, + "arth": 7146, + "zoo": 7147, + "flood": 7148, + "viously": 7149, + "showed": 7150, + "parliament": 7151, + "chev": 7152, + "eline": 7153, + "attrac": 7154, + "abad": 7155, + "tail": 7156, + "hrs": 7157, + "lus": 7158, + "portu": 7159, + "gory": 7160, + "provides": 7161, + "toys": 7162, + "death": 7163, + "infe": 7164, + "ance": 7165, + "gle": 7166, + "liam": 7167, + "lover": 7168, + "hud": 7169, + "dvd": 7170, + "revealed": 7171, + "gw": 7172, + "rement": 7173, + "cathe": 7174, + "lying": 7175, + "radio": 7176, + "derby": 7177, + "stors": 7178, + "chemi": 7179, + "hospit": 7180, + "⾨": 7181, + "':": 7182, + "ilove": 7183, + "lemon": 7184, + "republic": 7185, + "sni": 7186, + "ness": 7187, + "door": 7188, + "reaction": 7189, + "pregn": 7190, + "flav": 7191, + "scholar": 7192, + "spotify": 7193, + "isation": 7194, + "visual": 7195, + "aware": 7196, + "sponsored": 7197, + "joke": 7198, + "lessons": 7199, + "legis": 7200, + "lock": 7201, + "simil": 7202, + "ðŁĺĭ": 7203, + "kind": 7204, + "lay": 7205, + "mah": 7206, + "hoping": 7207, + "vancouver": 7208, + "aser": 7209, + "cleaning": 7210, + "gala": 7211, + "threat": 7212, + "lap": 7213, + "ache": 7214, + "romance": 7215, + "expen": 7216, + "repost": 7217, + "zam": 7218, + "epi": 7219, + "mirror": 7220, + "oak": 7221, + "adul": 7222, + "batman": 7223, + "slu": 7224, + "lc": 7225, + "viewed": 7226, + "reviews": 7227, + "dates": 7228, + "indonesia": 7229, + "activi": 7230, + "offen": 7231, + "leaf": 7232, + "isi": 7233, + "agricul": 7234, + "costume": 7235, + "sites": 7236, + "spiritu": 7237, + "appearance": 7238, + "iry": 7239, + "stair": 7240, + "application": 7241, + "spectac": 7242, + "icity": 7243, + "skies": 7244, + "handle": 7245, + "punk": 7246, + "paradise": 7247, + "tn": 7248, + "deal": 7249, + "providing": 7250, + "doc": 7251, + "receiving": 7252, + "brew": 7253, + "microsoft": 7254, + "ö": 7255, + "ferr": 7256, + "metro": 7257, + "thail": 7258, + "yum": 7259, + "carter": 7260, + "á": 7261, + "gentle": 7262, + "breaks": 7263, + "cooper": 7264, + "showcase": 7265, + "cutting": 7266, + "egypt": 7267, + "baby": 7268, + "seminar": 7269, + "glori": 7270, + "sson": 7271, + "fave": 7272, + "rehear": 7273, + "lotte": 7274, + "lady": 7275, + "alas": 7276, + "prep": 7277, + "delivered": 7278, + "nuclear": 7279, + "iro": 7280, + "engagement": 7281, + "atta": 7282, + "conven": 7283, + "zan": 7284, + "glory": 7285, + "holds": 7286, + "businesses": 7287, + "strange": 7288, + "sche": 7289, + "itself": 7290, + "grad": 7291, + "markets": 7292, + "falling": 7293, + "stats": 7294, + "geon": 7295, + "budd": 7296, + "lis": 7297, + "sheet": 7298, + "thisi": 7299, + "colo": 7300, + "desert": 7301, + "registration": 7302, + "ign": 7303, + "explain": 7304, + "interior": 7305, + "laws": 7306, + "writers": 7307, + "springs": 7308, + "kr": 7309, + "fried": 7310, + "bloom": 7311, + "infra": 7312, + "ao": 7313, + "cred": 7314, + "past": 7315, + "lineup": 7316, + "boo": 7317, + "brea": 7318, + "boots": 7319, + "celebrity": 7320, + "attacks": 7321, + "brook": 7322, + "eves": 7323, + "excu": 7324, + "cherry": 7325, + "oop": 7326, + "fascin": 7327, + "boyfriend": 7328, + "seas": 7329, + "nine": 7330, + "effects": 7331, + "powered": 7332, + "kha": 7333, + "ðŁĺĢ": 7334, + "shout": 7335, + "condition": 7336, + "ij": 7337, + "hero": 7338, + "enterpri": 7339, + "winter": 7340, + "applications": 7341, + "shoe": 7342, + "gel": 7343, + "battle": 7344, + "programs": 7345, + "wart": 7346, + "ðŁĴ¥": 7347, + "rap": 7348, + "hol": 7349, + "dangerous": 7350, + "dia": 7351, + "counter": 7352, + "rics": 7353, + "ior": 7354, + "knight": 7355, + "coat": 7356, + "emotional": 7357, + "atures": 7358, + "das": 7359, + "wheel": 7360, + "forecast": 7361, + "transport": 7362, + "glasgow": 7363, + "kingdom": 7364, + "preparing": 7365, + "immedi": 7366, + "ffin": 7367, + "awarded": 7368, + "printing": 7369, + "roman": 7370, + "fighters": 7371, + "anymore": 7372, + "belt": 7373, + "pine": 7374, + "wine": 7375, + "xi": 7376, + "employees": 7377, + "logies": 7378, + "alled": 7379, + "demo": 7380, + "birthday": 7381, + "angeles": 7382, + "log": 7383, + "drivers": 7384, + "necklace": 7385, + "kath": 7386, + "sit": 7387, + "athlete": 7388, + "efs": 7389, + "sburg": 7390, + "purpose": 7391, + "resistance": 7392, + "releases": 7393, + "tis": 7394, + "various": 7395, + "deliver": 7396, + "chal": 7397, + "sanc": 7398, + "oppo": 7399, + "craw": 7400, + "neuro": 7401, + "dra": 7402, + "supporters": 7403, + "snap": 7404, + "difficult": 7405, + "swear": 7406, + "logist": 7407, + "path": 7408, + "attempt": 7409, + "à¥": 7410, + "swimming": 7411, + "steve": 7412, + "hurt": 7413, + "included": 7414, + "bap": 7415, + "ware": 7416, + "ðŁĴĭ": 7417, + "enders": 7418, + "jake": 7419, + "leeds": 7420, + "climb": 7421, + "lb": 7422, + "imple": 7423, + "lisa": 7424, + "clothing": 7425, + "ðŁĺİ": 7426, + "dt": 7427, + "compla": 7428, + "swing": 7429, + "straw": 7430, + "vals": 7431, + "kle": 7432, + "users": 7433, + "storm": 7434, + "cuts": 7435, + "ontario": 7436, + "pan": 7437, + "handsome": 7438, + "iow": 7439, + "argu": 7440, + "checking": 7441, + "scottish": 7442, + "Ķï¸ı": 7443, + "sier": 7444, + "emma": 7445, + "pod": 7446, + "pattern": 7447, + "desh": 7448, + "enh": 7449, + "edward": 7450, + "ting": 7451, + "kh": 7452, + "half": 7453, + "lincoln": 7454, + "mother": 7455, + "alleg": 7456, + "rc": 7457, + "volleyball": 7458, + "dn": 7459, + "gay": 7460, + "ally": 7461, + "leton": 7462, + "grove": 7463, + "loud": 7464, + "advanced": 7465, + "respec": 7466, + "client": 7467, + "supreme": 7468, + "thailand": 7469, + "how": 7470, + "gig": 7471, + "toi": 7472, + "dot": 7473, + "dollar": 7474, + "ðŁijĩ": 7475, + "pit": 7476, + "rb": 7477, + "hn": 7478, + "produced": 7479, + "ggers": 7480, + "âĨĴ": 7481, + "mlb": 7482, + "canvas": 7483, + "fineart": 7484, + "usd": 7485, + "inthe": 7486, + "pson": 7487, + "actual": 7488, + "sl": 7489, + "tb": 7490, + "ipad": 7491, + "ensure": 7492, + "umb": 7493, + "wd": 7494, + "ska": 7495, + "mars": 7496, + "kend": 7497, + "feli": 7498, + "thing": 7499, + "countdown": 7500, + "absolute": 7501, + "rout": 7502, + "dral": 7503, + "py": 7504, + "injured": 7505, + "mint": 7506, + "hunting": 7507, + "mmer": 7508, + "sage": 7509, + "ligh": 7510, + "acity": 7511, + "expan": 7512, + "murray": 7513, + "aro": 7514, + "secure": 7515, + "fourth": 7516, + "eagle": 7517, + "relief": 7518, + "stakes": 7519, + "industrial": 7520, + "clark": 7521, + "understanding": 7522, + "seem": 7523, + "plenty": 7524, + "silver": 7525, + "clau": 7526, + "threat": 7527, + "sail": 7528, + "produce": 7529, + "abstr": 7530, + "isis": 7531, + "br": 7532, + "engers": 7533, + "worry": 7534, + "bieber": 7535, + "sj": 7536, + "justin": 7537, + "realize": 7538, + "kyle": 7539, + "espn": 7540, + "filter": 7541, + "sch": 7542, + "types": 7543, + "gamedev": 7544, + "ding": 7545, + "twitter": 7546, + "soldiers": 7547, + "pom": 7548, + "carbon": 7549, + "yards": 7550, + "childhood": 7551, + "ried": 7552, + "kel": 7553, + "eleph": 7554, + "tons": 7555, + "keynote": 7556, + "quiet": 7557, + "wire": 7558, + "posting": 7559, + "issa": 7560, + "representing": 7561, + "backs": 7562, + "alexander": 7563, + "celebrates": 7564, + "taining": 7565, + "||": 7566, + "chor": 7567, + "escape": 7568, + "peek": 7569, + "tives": 7570, + "field": 7571, + "ssie": 7572, + "impac": 7573, + "sponsor": 7574, + "rc": 7575, + "wedd": 7576, + "cannab": 7577, + "sides": 7578, + "tracks": 7579, + "compar": 7580, + "contrac": 7581, + "technical": 7582, + "bible": 7583, + "exploring": 7584, + "share": 7585, + "trav": 7586, + "nate": 7587, + "illo": 7588, + "scru": 7589, + "mingham": 7590, + "guns": 7591, + "ofthe": 7592, + "shame": 7593, + "sees": 7594, + "catho": 7595, + "access": 7596, + "cel": 7597, + "reported": 7598, + "»": 7599, + "mario": 7600, + "pad": 7601, + "hopefully": 7602, + "ouse": 7603, + "yon": 7604, + "disappo": 7605, + "olo": 7606, + "pitt": 7607, + "pac": 7608, + "gap": 7609, + "crush": 7610, + "sg": 7611, + "kle": 7612, + "gem": 7613, + "empire": 7614, + "dirty": 7615, + "ais": 7616, + "aviation": 7617, + "zealand": 7618, + "facing": 7619, + "highway": 7620, + "danny": 7621, + "spider": 7622, + "otta": 7623, + "ðŁĺĦ": 7624, + "wy": 7625, + "colours": 7626, + "infl": 7627, + "costs": 7628, + "olympics": 7629, + "aus": 7630, + "hm": 7631, + "howard": 7632, + "passes": 7633, + "lauren": 7634, + "mush": 7635, + "opin": 7636, + "rho": 7637, + "discount": 7638, + "operation": 7639, + "emily": 7640, + "mmm": 7641, + "chamber": 7642, + "dil": 7643, + "toyo": 7644, + "ship": 7645, + "samu": 7646, + "pictured": 7647, + "unic": 7648, + "pol": 7649, + "keeper": 7650, + "cartoon": 7651, + "sten": 7652, + "ignor": 7653, + "nations": 7654, + "nl": 7655, + "tasting": 7656, + "detail": 7657, + "officials": 7658, + "motor": 7659, + "francis": 7660, + "editor": 7661, + "ðŁijĩ": 7662, + "pets": 7663, + "rangers": 7664, + "tg": 7665, + "rn": 7666, + "wri": 7667, + "nichol": 7668, + "ise": 7669, + "spots": 7670, + "anie": 7671, + "check": 7672, + "triple": 7673, + "kumar": 7674, + "speakers": 7675, + "icing": 7676, + "prepared": 7677, + "abuse": 7678, + "friendship": 7679, + "month": 7680, + "swim": 7681, + "aire": 7682, + "scent": 7683, + "hamilton": 7684, + "indian": 7685, + "jes": 7686, + "yummy": 7687, + "tears": 7688, + "dawn": 7689, + "ized": 7690, + "worlds": 7691, + "ðŁķ": 7692, + "billi": 7693, + "stone": 7694, + "nhs": 7695, + "basic": 7696, + "por": 7697, + "stle": 7698, + "iron": 7699, + "older": 7700, + "clevel": 7701, + "eing": 7702, + "ðŁĺįðŁĺįðŁĺį": 7703, + "prints": 7704, + "firm": 7705, + "aircraft": 7706, + "finest": 7707, + "develop": 7708, + "aaron": 7709, + "tz": 7710, + "graham": 7711, + "owners": 7712, + "foli": 7713, + "lesson": 7714, + "ques": 7715, + "babe": 7716, + "craft": 7717, + "phen": 7718, + "jun": 7719, + "birmingham": 7720, + "vine": 7721, + "ller": 7722, + "ian": 7723, + "fineartamerica": 7724, + "evolu": 7725, + "stab": 7726, + "imper": 7727, + "ward": 7728, + "comic": 7729, + "wiz": 7730, + "invited": 7731, + "duke": 7732, + "match": 7733, + "ports": 7734, + "roger": 7735, + "diagno": 7736, + "kept": 7737, + "test": 7738, + "visu": 7739, + "rhy": 7740, + "soc": 7741, + "tox": 7742, + "baker": 7743, + "surface": 7744, + "covers": 7745, + "mans": 7746, + "bits": 7747, + "xbox": 7748, + "ffle": 7749, + "nan": 7750, + "gard": 7751, + "hart": 7752, + "waters": 7753, + "villa": 7754, + "retro": 7755, + "lightning": 7756, + "catholic": 7757, + "democracy": 7758, + "neighbor": 7759, + "penn": 7760, + "cran": 7761, + "jonathan": 7762, + "laura": 7763, + "vibes": 7764, + "sub": 7765, + "coaching": 7766, + "clearly": 7767, + "ukraine": 7768, + "brave": 7769, + "commitment": 7770, + "tall": 7771, + "mart": 7772, + "rap": 7773, + "modi": 7774, + "scott": 7775, + "bros": 7776, + "shower": 7777, + "ðŁı¾": 7778, + "âĺºï¸ı": 7779, + "cousin": 7780, + "approach": 7781, + "bre": 7782, + "compos": 7783, + "hilari": 7784, + "philly": 7785, + "gad": 7786, + "quickly": 7787, + "rian": 7788, + "tm": 7789, + "virtual": 7790, + "houses": 7791, + "kt": 7792, + "phoenix": 7793, + "wire": 7794, + "ffy": 7795, + "bunch": 7796, + "ancing": 7797, + "tale": 7798, + "snapchat": 7799, + "starter": 7800, + "ht": 7801, + "kicking": 7802, + "apart": 7803, + "thy": 7804, + ")!": 7805, + "blogger": 7806, + "itz": 7807, + "comfort": 7808, + "angels": 7809, + "wash": 7810, + "\":": 7811, + "argent": 7812, + "request": 7813, + "honest": 7814, + "mighty": 7815, + "bobby": 7816, + "kg": 7817, + "rol": 7818, + "thouse": 7819, + "expo": 7820, + "hc": 7821, + "tables": 7822, + "magical": 7823, + "posts": 7824, + "dem": 7825, + "nw": 7826, + "orlando": 7827, + "aber": 7828, + "***": 7829, + "ðŁĺľ": 7830, + "environmental": 7831, + "transformation": 7832, + "mile": 7833, + "wic": 7834, + "hiring": 7835, + "maine": 7836, + "boar": 7837, + "rying": 7838, + "tis": 7839, + "niture": 7840, + "tweeted": 7841, + "antonio": 7842, + "opinion": 7843, + "finale": 7844, + "diy": 7845, + "fis": 7846, + "thin": 7847, + "trouble": 7848, + "lego": 7849, + "files": 7850, + "quart": 7851, + "spa": 7852, + "currency": 7853, + "climate": 7854, + "fanart": 7855, + "railway": 7856, + "space": 7857, + "bands": 7858, + "daniel": 7859, + "motion": 7860, + "leng": 7861, + "holder": 7862, + "occu": 7863, + "marie": 7864, + "cathedral": 7865, + "buzz": 7866, + "bies": 7867, + "nascar": 7868, + "bmw": 7869, + "battery": 7870, + "charlotte": 7871, + "doctor": 7872, + "zzle": 7873, + "seven": 7874, + "insan": 7875, + "ddy": 7876, + "sten": 7877, + "labor": 7878, + "thrilled": 7879, + "seren": 7880, + "documentary": 7881, + "waves": 7882, + "certain": 7883, + "candid": 7884, + "allowed": 7885, + "nintendo": 7886, + "starwars": 7887, + "tap": 7888, + "homemade": 7889, + "dles": 7890, + "thering": 7891, + "bree": 7892, + "empty": 7893, + "piano": 7894, + "positi": 7895, + "country": 7896, + "pork": 7897, + "puts": 7898, + "perry": 7899, + "matic": 7900, + "spotlight": 7901, + "tist": 7902, + "orities": 7903, + "wealth": 7904, + "cp": 7905, + "barbar": 7906, + "committed": 7907, + "assau": 7908, + "profit": 7909, + "eight": 7910, + "hul": 7911, + "finishing": 7912, + "runner": 7913, + "sso": 7914, + "inspec": 7915, + "charged": 7916, + "christop": 7917, + "losing": 7918, + "coal": 7919, + "hoo": 7920, + "elev": 7921, + "dele": 7922, + "moham": 7923, + "donation": 7924, + "cable": 7925, + "clinic": 7926, + "jin": 7927, + "managed": 7928, + "tering": 7929, + "â¬": 7930, + "urban": 7931, + "deputy": 7932, + "bber": 7933, + "burn": 7934, + "academic": 7935, + "ott": 7936, + "stake": 7937, + "iter": 7938, + "stown": 7939, + "acker": 7940, + "adventures": 7941, + "adams": 7942, + "greg": 7943, + "prom": 7944, + "vol": 7945, + "acqu": 7946, + "congre": 7947, + "paint": 7948, + "citizens": 7949, + "call": 7950, + "afford": 7951, + "vc": 7952, + "asks": 7953, + "thetic": 7954, + "independence": 7955, + "âĽ": 7956, + "hitting": 7957, + "blon": 7958, + "future": 7959, + "âı": 7960, + "inno": 7961, + "gene": 7962, + "boards": 7963, + "distance": 7964, + "set": 7965, + "remem": 7966, + "thal": 7967, + "prevent": 7968, + "lang": 7969, + "objec": 7970, + "susp": 7971, + "matt": 7972, + "induc": 7973, + "boro": 7974, + "pione": 7975, + "redi": 7976, + "virtu": 7977, + "printed": 7978, + "scope": 7979, + "shark": 7980, + "succe": 7981, + "astron": 7982, + "illegal": 7983, + "jag": 7984, + "cting": 7985, + "inee": 7986, + "ato": 7987, + "robin": 7988, + "nutrition": 7989, + "bf": 7990, + "dutch": 7991, + "bn": 7992, + "furniture": 7993, + "forgotten": 7994, + "atar": 7995, + "rup": 7996, + "hyper": 7997, + "branch": 7998, + "communication": 7999, + "degrees": 8000, + "onia": 8001, + "uncle": 8002, + "promote": 8003, + "orche": 8004, + "wii": 8005, + "js": 8006, + "button": 8007, + "major": 8008, + "cbs": 8009, + "bristol": 8010, + "premium": 8011, + "ordinary": 8012, + "edit": 8013, + "mg": 8014, + "weed": 8015, + "steven": 8016, + ":'": 8017, + "gus": 8018, + "tes": 8019, + "captured": 8020, + "drugs": 8021, + "dow": 8022, + "writes": 8023, + "bishop": 8024, + "wheels": 8025, + "alization": 8026, + "discovery": 8027, + "wr": 8028, + "rachel": 8029, + "neil": 8030, + "hydr": 8031, + "cutest": 8032, + "entrepreneur": 8033, + "korean": 8034, + "oregon": 8035, + "ulty": 8036, + "perfectly": 8037, + "supported": 8038, + "historical": 8039, + "twins": 8040, + "elly": 8041, + "wel": 8042, + "devil": 8043, + "income": 8044, + "scientists": 8045, + "deleg": 8046, + "hen": 8047, + "oni": 8048, + "iced": 8049, + "gio": 8050, + "curry": 8051, + "reveal": 8052, + "eg": 8053, + "buffalo": 8054, + "nol": 8055, + "opera": 8056, + "cameron": 8057, + "hahahaha": 8058, + "jab": 8059, + "graduation": 8060, + "craig": 8061, + "ral": 8062, + "if": 8063, + "organization": 8064, + "lege": 8065, + "gang": 8066, + "sud": 8067, + "edinburgh": 8068, + "lack": 8069, + "flies": 8070, + "gate": 8071, + "thrones": 8072, + "qb": 8073, + "thereal": 8074, + "eleg": 8075, + "ppin": 8076, + "cles": 8077, + "jamie": 8078, + "tnam": 8079, + "crypto": 8080, + "oul": 8081, + "pages": 8082, + "ase": 8083, + "roots": 8084, + "stupid": 8085, + "adid": 8086, + "boot": 8087, + "protein": 8088, + "sap": 8089, + "sium": 8090, + "sus": 8091, + "endor": 8092, + "function": 8093, + "dont": 8094, + "enna": 8095, + "chy": 8096, + "sque": 8097, + "worker": 8098, + "mtv": 8099, + "ea": 8100, + "kan": 8101, + "ðŁĴļ": 8102, + "mus": 8103, + "profession": 8104, + "tto": 8105, + "operations": 8106, + "allo": 8107, + "ctor": 8108, + "invite": 8109, + "scand": 8110, + "outh": 8111, + "zim": 8112, + "links": 8113, + "clients": 8114, + "samsung": 8115, + "discusses": 8116, + "nell": 8117, + "ultra": 8118, + "somewhere": 8119, + "stewart": 8120, + "inet": 8121, + "dez": 8122, + "bout": 8123, + "factor": 8124, + "tian": 8125, + "trans": 8126, + "jeremy": 8127, + "db": 8128, + "ðŁĩ¬": 8129, + "orn": 8130, + "developing": 8131, + "spol": 8132, + "cooper": 8133, + "mau": 8134, + "remembering": 8135, + "trek": 8136, + "family": 8137, + "seniors": 8138, + "foster": 8139, + "attended": 8140, + "wing": 8141, + "transform": 8142, + "elementary": 8143, + "horiz": 8144, + "listing": 8145, + "malaysia": 8146, + "itch": 8147, + "warrior": 8148, + "philippines": 8149, + "russell": 8150, + "mend": 8151, + "initiative": 8152, + "creep": 8153, + "tops": 8154, + "briti": 8155, + "aur": 8156, + "sharp": 8157, + "advertising": 8158, + "ugly": 8159, + "achiev": 8160, + "materials": 8161, + "bug": 8162, + "device": 8163, + "bonus": 8164, + "facility": 8165, + "cole": 8166, + "nhl": 8167, + "yas": 8168, + "planned": 8169, + "pole": 8170, + "excellence": 8171, + "trick": 8172, + "confl": 8173, + "rp": 8174, + "achieve": 8175, + "loan": 8176, + "swag": 8177, + "jessica": 8178, + "howe": 8179, + "pour": 8180, + "scu": 8181, + "zoo": 8182, + "rated": 8183, + "dresses": 8184, + "rebel": 8185, + "mexican": 8186, + "coordin": 8187, + "mess": 8188, + "atlantic": 8189, + "tl": 8190, + "oscar": 8191, + "walks": 8192, + "pharmac": 8193, + "investigation": 8194, + "...#": 8195, + "cci": 8196, + "easily": 8197, + "mondaymotivation": 8198, + "yment": 8199, + "auti": 8200, + "forced": 8201, + "armed": 8202, + "colleagues": 8203, + "papers": 8204, + "proper": 8205, + "shake": 8206, + "buc": 8207, + "lean": 8208, + "exhibit": 8209, + "evement": 8210, + "cott": 8211, + "biz": 8212, + "sper": 8213, + "kent": 8214, + "swan": 8215, + "/@": 8216, + "girlfriend": 8217, + "hawk": 8218, + "âĺĢï¸ı": 8219, + "mono": 8220, + "ðŁĴĽ": 8221, + "statue": 8222, + "ðŁĺ³": 8223, + "ras": 8224, + "teeth": 8225, + "precious": 8226, + "tile": 8227, + "pam": 8228, + "swift": 8229, + "vali": 8230, + "nose": 8231, + "drunk": 8232, + "experiences": 8233, + "comeback": 8234, + "genius": 8235, + "worse": 8236, + "shef": 8237, + "rad": 8238, + "edit": 8239, + "honour": 8240, + "auspol": 8241, + "larry": 8242, + "hire": 8243, + "gordon": 8244, + "achievement": 8245, + "........": 8246, + "suicide": 8247, + "alternative": 8248, + "sup": 8249, + "surroun": 8250, + "shake": 8251, + "keith": 8252, + "pepper": 8253, + "turk": 8254, + "criminal": 8255, + "beck": 8256, + "sum": 8257, + "walls": 8258, + "cnn": 8259, + "antic": 8260, + "offe": 8261, + "colli": 8262, + "wines": 8263, + "highlight": 8264, + "hawaii": 8265, + "embar": 8266, + "lfc": 8267, + "ðŁĩ®": 8268, + "mv": 8269, + ">>": 8270, + "atmo": 8271, + "word": 8272, + "carl": 8273, + "shoutout": 8274, + "brewing": 8275, + "ìĿ": 8276, + "dof": 8277, + "sic": 8278, + "hottest": 8279, + "colon": 8280, + "hhh": 8281, + "shut": 8282, + "lowing": 8283, + "volume": 8284, + "apartment": 8285, + "agreement": 8286, + "destro": 8287, + "wee": 8288, + "religious": 8289, + "iowa": 8290, + "rod": 8291, + "landing": 8292, + "represent": 8293, + "ðŁĵ·:": 8294, + "las": 8295, + "usually": 8296, + "hl": 8297, + "cac": 8298, + "salv": 8299, + "along": 8300, + "laughing": 8301, + "beans": 8302, + "reminds": 8303, + "phase": 8304, + "somebody": 8305, + "mask": 8306, + "ranked": 8307, + "destroy": 8308, + "sci": 8309, + "â̼ï¸ı": 8310, + "gabri": 8311, + "leo": 8312, + "roa": 8313, + "failed": 8314, + "sil": 8315, + "refugees": 8316, + "revi": 8317, + "ring": 8318, + "berries": 8319, + "cookies": 8320, + "yy": 8321, + "conservation": 8322, + "shab": 8323, + "humans": 8324, + "determin": 8325, + "ain": 8326, + "niall": 8327, + "assu": 8328, + "mba": 8329, + "from": 8330, + "extreme": 8331, + "vices": 8332, + "commerce": 8333, + "ghtful": 8334, + "ordered": 8335, + "supports": 8336, + "recap": 8337, + "vor": 8338, + "dropping": 8339, + "correct": 8340, + "paying": 8341, + "meaning": 8342, + "nj": 8343, + "quiz": 8344, + "\"#": 8345, + "business": 8346, + "ðŁĩ®ðŁĩ": 8347, + "indigen": 8348, + "dust": 8349, + "boxes": 8350, + "blind": 8351, + "xxx": 8352, + "zzy": 8353, + "ðŁĩ¬ðŁĩ": 8354, + "ssels": 8355, + "sant": 8356, + "ddle": 8357, + "hilarious": 8358, + "design": 8359, + "wondering": 8360, + "vehicles": 8361, + "kre": 8362, + "jud": 8363, + "reception": 8364, + "parker": 8365, + "ÃŃ": 8366, + "privi": 8367, + "hydro": 8368, + "softball": 8369, + "pollu": 8370, + "locked": 8371, + "bah": 8372, + "ear": 8373, + "script": 8374, + "divi": 8375, + "brace": 8376, + "george": 8377, + "theast": 8378, + "belo": 8379, + "jal": 8380, + "tionary": 8381, + "dental": 8382, + "rocket": 8383, + "purch": 8384, + "shak": 8385, + "manufacturing": 8386, + "ez": 8387, + "itis": 8388, + "concep": 8389, + "tball": 8390, + "chs": 8391, + "directed": 8392, + "prayers": 8393, + "ook": 8394, + "philos": 8395, + "variety": 8396, + "chess": 8397, + "server": 8398, + "gand": 8399, + "balti": 8400, + "ðŁĵ¸": 8401, + "sely": 8402, + "cruz": 8403, + "spectacular": 8404, + "burning": 8405, + "represent": 8406, + "iz": 8407, + "tone": 8408, + "merce": 8409, + "hell": 8410, + "bedroom": 8411, + "establi": 8412, + "bol": 8413, + "common": 8414, + "ãĥ»": 8415, + "abor": 8416, + "kitty": 8417, + "heights": 8418, + "repair": 8419, + "william": 8420, + "quake": 8421, + "alabama": 8422, + "population": 8423, + "rev": 8424, + "rett": 8425, + "ists": 8426, + "nite": 8427, + "lem": 8428, + "aha": 8429, + "cleveland": 8430, + "rm": 8431, + "pover": 8432, + "obse": 8433, + "montre": 8434, + "mania": 8435, + "®": 8436, + "conne": 8437, + "carni": 8438, + "shah": 8439, + "fy": 8440, + "ua": 8441, + "scor": 8442, + "struggle": 8443, + "bob": 8444, + "''": 8445, + "appropri": 8446, + "decide": 8447, + "ffed": 8448, + "caster": 8449, + "sort": 8450, + "hungry": 8451, + "drag": 8452, + "اÙ": 8453, + "grounds": 8454, + "dw": 8455, + "slightly": 8456, + "cardin": 8457, + "deadline": 8458, + "bronze": 8459, + "webin": 8460, + "barry": 8461, + "silence": 8462, + "euro": 8463, + "option": 8464, + "earn": 8465, + "ðŁĴĸ": 8466, + "however": 8467, + "naren": 8468, + "nails": 8469, + "bathroom": 8470, + "vine": 8471, + "phd": 8472, + "mining": 8473, + "garage": 8474, + "()": 8475, + "shoulder": 8476, + "defeat": 8477, + "dir": 8478, + "ov": 8479, + "liberty": 8480, + "pleas": 8481, + "xon": 8482, + "compre": 8483, + "av": 8484, + "jin": 8485, + "ables": 8486, + "silent": 8487, + "famili": 8488, + "visits": 8489, + "dipl": 8490, + "habit": 8491, + "millions": 8492, + "regarding": 8493, + "innovative": 8494, + "senator": 8495, + "rts": 8496, + "von": 8497, + "kl": 8498, + "whil": 8499, + "required": 8500, + "âĿĦ": 8501, + "luv": 8502, + "presidential": 8503, + "pocket": 8504, + "hundre": 8505, + "shown": 8506, + "frozen": 8507, + "toward": 8508, + "fast": 8509, + "confidence": 8510, + "rough": 8511, + "individual": 8512, + "quet": 8513, + "ðŁı½": 8514, + "dome": 8515, + "fifa": 8516, + "engineer": 8517, + "zen": 8518, + "remix": 8519, + "ðŁĺĥ": 8520, + "plant": 8521, + "minor": 8522, + "robinson": 8523, + "asy": 8524, + "pulled": 8525, + "certain": 8526, + "potato": 8527, + "(:": 8528, + "pres": 8529, + "occa": 8530, + "wit": 8531, + "item": 8532, + "sie": 8533, + "dating": 8534, + "thompson": 8535, + "owned": 8536, + "anu": 8537, + "vie": 8538, + "tedly": 8539, + "goodnight": 8540, + "except": 8541, + "ðŁĮŁ": 8542, + "iraq": 8543, + "kie": 8544, + "rences": 8545, + "lip": 8546, + "similar": 8547, + "saudi": 8548, + "vig": 8549, + "arthur": 8550, + "picks": 8551, + "milan": 8552, + "honda": 8553, + "maxi": 8554, + "og": 8555, + "stest": 8556, + "arch": 8557, + "analytics": 8558, + "basti": 8559, + "pearl": 8560, + "terry": 8561, + "horse": 8562, + "astro": 8563, + "acce": 8564, + "launching": 8565, + "international": 8566, + "sno": 8567, + "tasty": 8568, + "denver": 8569, + "irl": 8570, + "pete": 8571, + "torn": 8572, + "advantage": 8573, + "varsity": 8574, + "\"\"": 8575, + "sole": 8576, + "gc": 8577, + "lang": 8578, + "demonstr": 8579, + "olds": 8580, + "unity": 8581, + "nets": 8582, + "inspire": 8583, + "crete": 8584, + "nashville": 8585, + "nelson": 8586, + "eter": 8587, + "walk": 8588, + "hyun": 8589, + "mack": 8590, + "treas": 8591, + "seeking": 8592, + "rage": 8593, + "brush": 8594, + "aband": 8595, + "whilst": 8596, + "cocon": 8597, + "hong": 8598, + "shelter": 8599, + "ip": 8600, + "possibly": 8601, + "soo": 8602, + "ited": 8603, + "âĦ": 8604, + "races": 8605, + "warming": 8606, + "quin": 8607, + "television": 8608, + "matches": 8609, + "rapi": 8610, + "mental": 8611, + "palm": 8612, + "jennifer": 8613, + "rolls": 8614, + "indiana": 8615, + "bars": 8616, + "catching": 8617, + "rescu": 8618, + "candidates": 8619, + "fare": 8620, + "âłĢ": 8621, + "seo": 8622, + "vietnam": 8623, + "alpha": 8624, + "michelle": 8625, + "visible": 8626, + "regre": 8627, + "wned": 8628, + "apple": 8629, + "lip": 8630, + "ffe": 8631, + "liz": 8632, + "yorkshire": 8633, + "hail": 8634, + "seasons": 8635, + "began": 8636, + "md": 8637, + "kc": 8638, + "lap": 8639, + "fascinating": 8640, + "help": 8641, + "ury": 8642, + "ums": 8643, + "nuts": 8644, + "sem": 8645, + "alongside": 8646, + "bridge": 8647, + "orial": 8648, + "ove": 8649, + "worldcup": 8650, + "british": 8651, + "comfortable": 8652, + "ive": 8653, + "hotels": 8654, + "fairs": 8655, + "horri": 8656, + "sox": 8657, + "dining": 8658, + "stream": 8659, + "barri": 8660, + "ssy": 8661, + "wim": 8662, + "terms": 8663, + "vu": 8664, + "pere": 8665, + "lens": 8666, + "walked": 8667, + "ror": 8668, + "lars": 8669, + "shield": 8670, + "doubt": 8671, + "proto": 8672, + "crossing": 8673, + "meant": 8674, + "medium": 8675, + "adding": 8676, + "eb": 8677, + "cheap": 8678, + "func": 8679, + "paper": 8680, + "brands": 8681, + "ryan": 8682, + "feedback": 8683, + "collins": 8684, + "unknown": 8685, + "tropical": 8686, + "sandwich": 8687, + "fallen": 8688, + "formu": 8689, + "select": 8690, + "loads": 8691, + "answers": 8692, + "ori": 8693, + "maga": 8694, + "dor": 8695, + "duo": 8696, + "alie": 8697, + "drum": 8698, + "uri": 8699, + "deer": 8700, + "soul": 8701, + "shut": 8702, + "âĺº": 8703, + "stolen": 8704, + "donated": 8705, + "buzz": 8706, + "patriots": 8707, + "hal": 8708, + "nasty": 8709, + "nominated": 8710, + "monte": 8711, + "kia": 8712, + "thri": 8713, + "ingu": 8714, + "tests": 8715, + "petro": 8716, + "ðŁijij": 8717, + "hosts": 8718, + "nest": 8719, + "topic": 8720, + "patch": 8721, + "mmy": 8722, + "hugh": 8723, + "abilities": 8724, + "mathe": 8725, + "smiles": 8726, + "gb": 8727, + "agenda": 8728, + "insights": 8729, + "chip": 8730, + "phan": 8731, + "failure": 8732, + "dgers": 8733, + "hai": 8734, + "significant": 8735, + "shock": 8736, + "rural": 8737, + "glam": 8738, + "figures": 8739, + "potus": 8740, + "ota": 8741, + "ministry": 8742, + "appears": 8743, + "fear": 8744, + "rh": 8745, + "american": 8746, + "hatt": 8747, + "sony": 8748, + "fires": 8749, + "edi": 8750, + "nou": 8751, + "equi": 8752, + "when": 8753, + "universal": 8754, + "madness": 8755, + "ix": 8756, + "sculpture": 8757, + "bach": 8758, + "tto": 8759, + "sweden": 8760, + "eta": 8761, + "ento": 8762, + "developed": 8763, + "monthly": 8764, + "maps": 8765, + "rah": 8766, + "led": 8767, + "delta": 8768, + "saints": 8769, + "islam": 8770, + "bench": 8771, + "fifth": 8772, + "vard": 8773, + "socks": 8774, + "welcoming": 8775, + "je": 8776, + "turner": 8777, + "vb": 8778, + "adi": 8779, + "norway": 8780, + "ady": 8781, + "hurricane": 8782, + "porsche": 8783, + "tradition": 8784, + "exam": 8785, + "newspaper": 8786, + "luci": 8787, + "aver": 8788, + "ideal": 8789, + "dna": 8790, + "madison": 8791, + "ð٧": 8792, + "witness": 8793, + "acou": 8794, + "insight": 8795, + "simon": 8796, + "robot": 8797, + "snake": 8798, + "nbc": 8799, + "aco": 8800, + "ross": 8801, + "shment": 8802, + "religion": 8803, + "chann": 8804, + "insu": 8805, + "campbell": 8806, + "installed": 8807, + "weather": 8808, + "horses": 8809, + "oli": 8810, + "robert": 8811, + "kaz": 8812, + "ðŁıĢ": 8813, + "veteran": 8814, + "thread": 8815, + "quarter": 8816, + "easier": 8817, + "capture": 8818, + "hipho": 8819, + "lawrence": 8820, + "romantic": 8821, + "passion": 8822, + "clay": 8823, + "oxford": 8824, + "thai": 8825, + "studying": 8826, + "fia": 8827, + "elected": 8828, + "mostly": 8829, + "cb": 8830, + "tumb": 8831, + "âĢįâĻĤ": 8832, + "xl": 8833, + "shan": 8834, + "faster": 8835, + "evans": 8836, + "slide": 8837, + "shri": 8838, + "seek": 8839, + "mies": 8840, + "chemistry": 8841, + "pumpkin": 8842, + "tum": 8843, + ",,": 8844, + "room": 8845, + "fired": 8846, + "lips": 8847, + "presence": 8848, + "aff": 8849, + "brewery": 8850, + "arrive": 8851, + "swag": 8852, + "photograph": 8853, + "pengu": 8854, + "chips": 8855, + "attor": 8856, + "values": 8857, + "accurate": 8858, + "contemporary": 8859, + "principal": 8860, + "cannabis": 8861, + "ario": 8862, + "anywhere": 8863, + "gia": 8864, + "democrats": 8865, + "buildings": 8866, + "lived": 8867, + "aps": 8868, + "negative": 8869, + "mare": 8870, + "ballo": 8871, + "lion": 8872, + "diamon": 8873, + "look": 8874, + "reform": 8875, + "tommy": 8876, + "illa": 8877, + "treats": 8878, + "hundreds": 8879, + "portland": 8880, + "worthy": 8881, + "excep": 8882, + "aria": 8883, + "idol": 8884, + "beer": 8885, + "cdn": 8886, + "yu": 8887, + "awk": 8888, + "ðŁĩ¨": 8889, + "cells": 8890, + "ó": 8891, + "identity": 8892, + "drawn": 8893, + "devil": 8894, + "finger": 8895, + "tham": 8896, + "ðŁijĬ": 8897, + "earned": 8898, + "fintech": 8899, + "dolph": 8900, + "tweeting": 8901, + "evolution": 8902, + "ðŁĵį": 8903, + "estim": 8904, + "mvp": 8905, + "none": 8906, + "ðŁĩºðŁĩ¸": 8907, + "toyota": 8908, + "aux": 8909, + "marin": 8910, + "bold": 8911, + "lbs": 8912, + "steak": 8913, + "murphy": 8914, + "itable": 8915, + "louis": 8916, + "solve": 8917, + "pia": 8918, + "skir": 8919, + "illino": 8920, + "webinar": 8921, + "banana": 8922, + "lov": 8923, + "thon": 8924, + "voters": 8925, + "affordable": 8926, + "defeated": 8927, + "lmfa": 8928, + "airlines": 8929, + "superb": 8930, + "anyway": 8931, + "debt": 8932, + "bored": 8933, + "versi": 8934, + "metal": 8935, + "responsible": 8936, + "mk": 8937, + "sse": 8938, + "fay": 8939, + "caused": 8940, + "fp": 8941, + "recommend": 8942, + "plaza": 8943, + "sporting": 8944, + "alliance": 8945, + "austri": 8946, + "nn": 8947, + "tours": 8948, + "surprised": 8949, + "artif": 8950, + "thunder": 8951, + "surve": 8952, + "wore": 8953, + "brief": 8954, + "necessary": 8955, + "zie": 8956, + "ashley": 8957, + "drake": 8958, + "rt": 8959, + "knife": 8960, + "immun": 8961, + "charges": 8962, + "athe": 8963, + "bride": 8964, + "reply": 8965, + "gav": 8966, + "broadcast": 8967, + "puer": 8968, + "bracelet": 8969, + "capacity": 8970, + "harvest": 8971, + "idk": 8972, + "performan": 8973, + "dding": 8974, + "ilers": 8975, + "para": 8976, + "jama": 8977, + "province": 8978, + "chin": 8979, + "iders": 8980, + "hari": 8981, + "teaser": 8982, + "chen": 8983, + "restor": 8984, + "rat": 8985, + "flat": 8986, + "colom": 8987, + "ðŁĴŀ": 8988, + "ðŁĩ¨ðŁĩ": 8989, + "smooth": 8990, + "rt": 8991, + "pitch": 8992, + "staying": 8993, + "israeli": 8994, + "tcot": 8995, + "perspective": 8996, + "dock": 8997, + "opener": 8998, + "lovel": 8999, + "xo": 9000, + "classroom": 9001, + "lington": 9002, + "goal": 9003, + "kennedy": 9004, + "sham": 9005, + "spaces": 9006, + "mitchell": 9007, + "homecoming": 9008, + "uki": 9009, + "claimed": 9010, + "recruit": 9011, + "ingo": 9012, + "mufc": 9013, + "monit": 9014, + "groo": 9015, + "resident": 9016, + "percent": 9017, + "perman": 9018, + "ottawa": 9019, + "intment": 9020, + "anxi": 9021, + "standards": 9022, + "worship": 9023, + "scheme": 9024, + "fx": 9025, + "potter": 9026, + "bian": 9027, + "athletic": 9028, + "afgh": 9029, + "sse": 9030, + "satell": 9031, + "parties": 9032, + "âĿ¤âĿ¤": 9033, + "infrastructure": 9034, + "relax": 9035, + "modu": 9036, + "worn": 9037, + "smoking": 9038, + "yach": 9039, + "practices": 9040, + "wcw": 9041, + "amb": 9042, + "domestic": 9043, + "taylor": 9044, + "kentu": 9045, + "provided": 9046, + "modi": 9047, + "veg": 9048, + "\"...": 9049, + "observ": 9050, + "ðŁĺ©": 9051, + "beard": 9052, + "mour": 9053, + "angry": 9054, + "ðŁĺ±": 9055, + "startups": 9056, + "wooden": 9057, + "dive": 9058, + "nail": 9059, + "antique": 9060, + "roses": 9061, + "tornado": 9062, + "mat": 9063, + "^^": 9064, + "suspect": 9065, + "farm": 9066, + "devices": 9067, + "mega": 9068, + "tul": 9069, + "scholarship": 9070, + "gee": 9071, + "disaster": 9072, + "arrival": 9073, + "poin": 9074, + "marc": 9075, + "katie": 9076, + "bbed": 9077, + "false": 9078, + "deserves": 9079, + "richard": 9080, + "juana": 9081, + "frey": 9082, + "tioned": 9083, + "hybri": 9084, + "rw": 9085, + "sarah": 9086, + "achi": 9087, + "cure": 9088, + "ole": 9089, + "morris": 9090, + "chic": 9091, + "broadway": 9092, + "label": 9093, + "pak": 9094, + "poverty": 9095, + "golf": 9096, + "ered": 9097, + "fu": 9098, + "eries": 9099, + "bees": 9100, + "alogue": 9101, + "stel": 9102, + "wireless": 9103, + "jewish": 9104, + "tide": 9105, + "blocked": 9106, + "lifetime": 9107, + "bhar": 9108, + "split": 9109, + "amster": 9110, + "thi": 9111, + "joshu": 9112, + "brunch": 9113, + "haps": 9114, + "sfor": 9115, + "oops": 9116, + "kapoor": 9117, + "hiking": 9118, + "supposed": 9119, + "roof": 9120, + "reas": 9121, + "train": 9122, + "tight": 9123, + "trump": 9124, + "basically": 9125, + "rr": 9126, + "eared": 9127, + "seeds": 9128, + "entrance": 9129, + "cp": 9130, + "wie": 9131, + "sonic": 9132, + "victim": 9133, + "here": 9134, + "eh": 9135, + "earrings": 9136, + "salmon": 9137, + "arctic": 9138, + "anne": 9139, + "dougla": 9140, + "corruption": 9141, + "hannah": 9142, + "hasn": 9143, + "voices": 9144, + "conce": 9145, + "atta": 9146, + "fleet": 9147, + "clinical": 9148, + "democratic": 9149, + "tony": 9150, + "stood": 9151, + "lef": 9152, + "twitch": 9153, + "ail": 9154, + "honestly": 9155, + "increased": 9156, + "drome": 9157, + "donna": 9158, + "accepted": 9159, + "visitors": 9160, + "apar": 9161, + "ador": 9162, + "par": 9163, + "jerry": 9164, + "rai": 9165, + "brandon": 9166, + "abu": 9167, + "!!!!!!": 9168, + "meme": 9169, + "ingh": 9170, + "glorious": 9171, + "bhu": 9172, + "pump": 9173, + "jol": 9174, + "like": 9175, + "fisher": 9176, + "maz": 9177, + "agan": 9178, + "destination": 9179, + "playlist": 9180, + "letters": 9181, + "genu": 9182, + "brace": 9183, + "celebrated": 9184, + "banner": 9185, + "rhe": 9186, + "dragon": 9187, + "ðŁĺħ": 9188, + "signature": 9189, + "grey": 9190, + "âľĶï¸ı": 9191, + "alice": 9192, + "bered": 9193, + "pher": 9194, + "bern": 9195, + "cath": 9196, + "gathering": 9197, + "scoring": 9198, + "influence": 9199, + "smiling": 9200, + "dept": 9201, + "local": 9202, + "ax": 9203, + "acu": 9204, + "retirement": 9205, + "honor": 9206, + "herself": 9207, + "chemical": 9208, + "assess": 9209, + "yall": 9210, + "frequ": 9211, + "appreciation": 9212, + "aca": 9213, + "choir": 9214, + "cuz": 9215, + "soil": 9216, + "cil": 9217, + "reporting": 9218, + "uh": 9219, + "enterprise": 9220, + "grat": 9221, + "jacob": 9222, + "rum": 9223, + "fee": 9224, + "jak": 9225, + "spin": 9226, + "bikes": 9227, + "phia": 9228, + "stere": 9229, + "pis": 9230, + "blood": 9231, + "tatt": 9232, + "raft": 9233, + "warren": 9234, + "sheri": 9235, + "backstage": 9236, + "marsh": 9237, + "hashtag": 9238, + "therine": 9239, + "rein": 9240, + "gameday": 9241, + "guaran": 9242, + "recipes": 9243, + "minds": 9244, + "stronger": 9245, + "issued": 9246, + "bicy": 9247, + "nak": 9248, + "mented": 9249, + "scary": 9250, + "ux": 9251, + "previous": 9252, + "ttle": 9253, + "thats": 9254, + "actors": 9255, + "uma": 9256, + "tina": 9257, + "bunny": 9258, + "promotion": 9259, + "uss": 9260, + "oliver": 9261, + "montreal": 9262, + "whats": 9263, + "appreciated": 9264, + "lakes": 9265, + "excuse": 9266, + "knowing": 9267, + "prizes": 9268, + "muscle": 9269, + "shades": 9270, + "scot": 9271, + "ingredi": 9272, + "electronic": 9273, + "juan": 9274, + "combat": 9275, + "sri": 9276, + "eh": 9277, + "turkish": 9278, + "lom": 9279, + "strikes": 9280, + "prison": 9281, + "ree": 9282, + "pope": 9283, + "vid": 9284, + "oldest": 9285, + "doll": 9286, + "swiss": 9287, + "certified": 9288, + "clip": 9289, + "returning": 9290, + "lator": 9291, + "leigh": 9292, + "ttes": 9293, + "watson": 9294, + "healing": 9295, + "elim": 9296, + "perhaps": 9297, + "hass": 9298, + "kau": 9299, + "dder": 9300, + "mouse": 9301, + "newcastle": 9302, + "indigenous": 9303, + "welcomes": 9304, + "cole": 9305, + "taught": 9306, + "noise": 9307, + "appear": 9308, + "joe": 9309, + "canon": 9310, + "wednesday": 9311, + "utah": 9312, + "ctive": 9313, + "driven": 9314, + "iv": 9315, + "cell": 9316, + "strip": 9317, + "acc": 9318, + "focused": 9319, + "arrest": 9320, + "stocks": 9321, + "woo": 9322, + "âĹ": 9323, + "noticed": 9324, + "shado": 9325, + "displa": 9326, + "terror": 9327, + "borne": 9328, + "second": 9329, + "queens": 9330, + "woke": 9331, + "jail": 9332, + "nott": 9333, + "cambridge": 9334, + "hart": 9335, + "seaf": 9336, + "fax": 9337, + "accept": 9338, + "âĺħ": 9339, + "goods": 9340, + "kat": 9341, + "twin": 9342, + "hs": 9343, + "thousand": 9344, + "sins": 9345, + "suite": 9346, + "ampton": 9347, + "arn": 9348, + "relev": 9349, + "richar": 9350, + "hoops": 9351, + "nbc": 9352, + "classic": 9353, + "pab": 9354, + "soldier": 9355, + "deplo": 9356, + "leans": 9357, + "installation": 9358, + "clash": 9359, + "leban": 9360, + "eee": 9361, + "tire": 9362, + "beloved": 9363, + "fusion": 9364, + "traveling": 9365, + "nei": 9366, + "cookie": 9367, + "globe": 9368, + "physics": 9369, + "sq": 9370, + "col": 9371, + "wolves": 9372, + "dl": 9373, + "exit": 9374, + "\"-": 9375, + "football": 9376, + "leaf": 9377, + "sterling": 9378, + "hide": 9379, + "minneso": 9380, + "freshman": 9381, + "nature": 9382, + "indie": 9383, + "supplies": 9384, + "bris": 9385, + "irish": 9386, + "inktober": 9387, + "doodle": 9388, + "icop": 9389, + "messages": 9390, + "adults": 9391, + "recorded": 9392, + "fixed": 9393, + "ardo": 9394, + "offered": 9395, + "underground": 9396, + "drone": 9397, + "pine": 9398, + "mainten": 9399, + "andre": 9400, + "hammer": 9401, + "sx": 9402, + "round": 9403, + "hike": 9404, + "brad": 9405, + "rome": 9406, + "full": 9407, + "oney": 9408, + "rows": 9409, + "columbia": 9410, + "archives": 9411, + "approved": 9412, + "batch": 9413, + "illinois": 9414, + "recognition": 9415, + "shouldn": 9416, + "fog": 9417, + "ncaa": 9418, + "kevin": 9419, + "humanity": 9420, + "although": 9421, + "powers": 9422, + "pou": 9423, + "sar": 9424, + "pest": 9425, + "alcohol": 9426, + "consci": 9427, + "philadel": 9428, + "eno": 9429, + "tm": 9430, + "okla": 9431, + "category": 9432, + "participate": 9433, + "accused": 9434, + "brief": 9435, + "poem": 9436, + "clubs": 9437, + "consult": 9438, + "jab": 9439, + "bigdata": 9440, + "amsterdam": 9441, + "acing": 9442, + "certific": 9443, + "nu": 9444, + "dat": 9445, + "improved": 9446, + "andy": 9447, + "campaig": 9448, + "palestin": 9449, + "pace": 9450, + "mobi": 9451, + "feelings": 9452, + "wolf": 9453, + "brain": 9454, + "propos": 9455, + "interactive": 9456, + "prince": 9457, + "index": 9458, + "cis": 9459, + "chae": 9460, + "peaceful": 9461, + "covering": 9462, + "aco": 9463, + "courses": 9464, + "monkey": 9465, + "replace": 9466, + "bl": 9467, + "bloody": 9468, + "tales": 9469, + "brighton": 9470, + "neighborhood": 9471, + "gates": 9472, + "spiritual": 9473, + "afraid": 9474, + "breast": 9475, + "bones": 9476, + "ðŁijī": 9477, + "video": 9478, + "wau": 9479, + "touch": 9480, + "injuries": 9481, + "carl": 9482, + "rix": 9483, + "unex": 9484, + "âĢ¢": 9485, + "fred": 9486, + "considered": 9487, + "thusi": 9488, + "anch": 9489, + "ony": 9490, + "usa": 9491, + "graphics": 9492, + "acre": 9493, + "ðŁĺ©": 9494, + "commemor": 9495, + "commod": 9496, + "goti": 9497, + "guardian": 9498, + "starbucks": 9499, + "prevention": 9500, + "hahahaha": 9501, + "administration": 9502, + "portugal": 9503, + "faculty": 9504, + "beta": 9505, + "ula": 9506, + "albert": 9507, + "breath": 9508, + "eri": 9509, + "letting": 9510, + "tric": 9511, + "mentation": 9512, + "incredibly": 9513, + "tennes": 9514, + "vd": 9515, + "ðŁĻĪ": 9516, + "eddie": 9517, + "brick": 9518, + "grill": 9519, + "btw": 9520, + "watches": 9521, + "researchers": 9522, + "tney": 9523, + "nie": 9524, + "pas": 9525, + "aster": 9526, + "vibr": 9527, + "pokemon": 9528, + "chrome": 9529, + "goat": 9530, + "pitts": 9531, + "illy": 9532, + "festive": 9533, + "yd": 9534, + "canal": 9535, + "ðŁĨ": 9536, + "fies": 9537, + "carlos": 9538, + "reque": 9539, + "partici": 9540, + "trains": 9541, + "sample": 9542, + "temperature": 9543, + "symph": 9544, + "picking": 9545, + "indoor": 9546, + "zers": 9547, + "playoffs": 9548, + "________": 9549, + "apes": 9550, + "lyrics": 9551, + "islamic": 9552, + "performances": 9553, + "dick": 9554, + "spark": 9555, + "seas": 9556, + "homa": 9557, + "ground": 9558, + "disci": 9559, + "employee": 9560, + "commu": 9561, + "alaska": 9562, + "alan": 9563, + "feast": 9564, + "dging": 9565, + "banking": 9566, + "manuel": 9567, + "slowly": 9568, + "trucks": 9569, + "mccar": 9570, + "ooo": 9571, + "scrat": 9572, + "orchestra": 9573, + "individu": 9574, + "mx": 9575, + "breath": 9576, + "stairs": 9577, + "equality": 9578, + "blake": 9579, + "locations": 9580, + "coconut": 9581, + "baltimore": 9582, + "aaa": 9583, + "lc": 9584, + "ðŁıĨ": 9585, + "harvey": 9586, + "resist": 9587, + "immigration": 9588, + "adidas": 9589, + "fili": 9590, + "ref": 9591, + "lgbt": 9592, + "mos": 9593, + "ppi": 9594, + "kenny": 9595, + "terror": 9596, + "bane": 9597, + "apolis": 9598, + "sg": 9599, + "socialmedia": 9600, + "kai": 9601, + "honest": 9602, + "assas": 9603, + "bollywood": 9604, + "âĢįâĻĢï¸ı": 9605, + "ferrari": 9606, + "horn": 9607, + "crypto": 9608, + "boom": 9609, + "maintenance": 9610, + "idi": 9611, + "sman": 9612, + "wl": 9613, + "extended": 9614, + "insul": 9615, + "ves": 9616, + "gosp": 9617, + "tri": 9618, + "pig": 9619, + "targe": 9620, + "celer": 9621, + "stati": 9622, + "smh": 9623, + "ridic": 9624, + "appeal": 9625, + "?)": 9626, + "conclu": 9627, + "cosme": 9628, + "sheep": 9629, + "christopher": 9630, + "enthusi": 9631, + "polish": 9632, + "mets": 9633, + "ounded": 9634, + "sustainability": 9635, + "creativity": 9636, + "concrete": 9637, + "rai": 9638, + "alien": 9639, + "bless": 9640, + "tees": 9641, + "club": 9642, + "rot": 9643, + "bos": 9644, + "exist": 9645, + "perfection": 9646, + "luck": 9647, + "rocky": 9648, + "expensive": 9649, + "meanwhile": 9650, + "happybirthday": 9651, + "pret": 9652, + "thriller": 9653, + "cave": 9654, + "playoff": 9655, + "somer": 9656, + "lu": 9657, + "lex": 9658, + "defence": 9659, + "amwriting": 9660, + "homeless": 9661, + "prophe": 9662, + "chet": 9663, + "pastor": 9664, + "ðŁ¤£": 9665, + "lander": 9666, + "www": 9667, + "Ģï¸ı": 9668, + "tica": 9669, + "!#": 9670, + "otic": 9671, + "radar": 9672, + "posters": 9673, + "powder": 9674, + "poli": 9675, + "haun": 9676, + "trap": 9677, + "blin": 9678, + "assault": 9679, + "shorts": 9680, + "rey": 9681, + "shy": 9682, + "squir": 9683, + "racist": 9684, + "garlic": 9685, + "fur": 9686, + "remote": 9687, + "smell": 9688, + "impressed": 9689, + "fingers": 9690, + "âłĢ": 9691, + "dino": 9692, + "lement": 9693, + "snu": 9694, + "promoting": 9695, + "string": 9696, + "productive": 9697, + "bage": 9698, + "mason": 9699, + "raz": 9700, + "directly": 9701, + "jk": 9702, + "eval": 9703, + "ðŁijĬ": 9704, + "doctors": 9705, + "cow": 9706, + "rider": 9707, + "stv": 9708, + "remove": 9709, + "wu": 9710, + "nathan": 9711, + "rod": 9712, + "nr": 9713, + "=>": 9714, + "affected": 9715, + "invest": 9716, + "mption": 9717, + "ginger": 9718, + "od": 9719, + "agriculture": 9720, + "sque": 9721, + "mug": 9722, + "counting": 9723, + "kee": 9724, + "magnific": 9725, + "cook": 9726, + "anistan": 9727, + "root": 9728, + "placed": 9729, + "sympo": 9730, + "ghana": 9731, + "und": 9732, + "cheer": 9733, + "throwing": 9734, + "secrets": 9735, + "filling": 9736, + "optimi": 9737, + "butterfly": 9738, + "bubb": 9739, + "ðŁĺī": 9740, + "terrible": 9741, + "dg": 9742, + "silk": 9743, + "obsessed": 9744, + "lou": 9745, + "aide": 9746, + "salute": 9747, + "monu": 9748, + "philadelphia": 9749, + "scientific": 9750, + "ist": 9751, + "uae": 9752, + "dessert": 9753, + "bottles": 9754, + "canyon": 9755, + "ðŁĺĪ": 9756, + "carib": 9757, + "other": 9758, + "wich": 9759, + "resource": 9760, + "guilty": 9761, + "und": 9762, + "leon": 9763, + "ess": 9764, + "kane": 9765, + "ele": 9766, + "trainer": 9767, + "heim": 9768, + "ante": 9769, + "manage": 9770, + "rookie": 9771, + "treated": 9772, + "poses": 9773, + "rsvp": 9774, + "causes": 9775, + "awak": 9776, + "jewell": 9777, + "lett": 9778, + "onics": 9779, + "titles": 9780, + "cardiff": 9781, + "gaga": 9782, + "bump": 9783, + "useful": 9784, + "?!": 9785, + "loose": 9786, + "bbing": 9787, + "::": 9788, + "argentina": 9789, + "debu": 9790, + "cycl": 9791, + "whel": 9792, + "disgu": 9793, + "jel": 9794, + "kills": 9795, + "biology": 9796, + "exter": 9797, + "trash": 9798, + "bodies": 9799, + "tram": 9800, + "circuit": 9801, + "expect": 9802, + "lads": 9803, + "wells": 9804, + "shot": 9805, + "gee": 9806, + "narendr": 9807, + "fastest": 9808, + "bent": 9809, + "bills": 9810, + "marshall": 9811, + "hats": 9812, + "introduce": 9813, + "citizen": 9814, + "impossible": 9815, + "gib": 9816, + "azz": 9817, + "networking": 9818, + "rant": 9819, + "think": 9820, + "indy": 9821, + "stops": 9822, + "ftheday": 9823, + "brian": 9824, + "**": 9825, + "amodi": 9826, + "dome": 9827, + "courage": 9828, + "packing": 9829, + "affairs": 9830, + "gn": 9831, + "sized": 9832, + "entary": 9833, + "poland": 9834, + "switzer": 9835, + "afghanistan": 9836, + "wu": 9837, + "tender": 9838, + "subscribe": 9839, + "mosco": 9840, + "attend": 9841, + "republican": 9842, + "honey": 9843, + "âĢĭ": 9844, + "simul": 9845, + "wester": 9846, + "foodie": 9847, + "oro": 9848, + "middle": 9849, + "abt": 9850, + "copies": 9851, + "maje": 9852, + "narendramodi": 9853, + "typical": 9854, + "inspirational": 9855, + "vitam": 9856, + "wiscon": 9857, + "cubs": 9858, + "tivity": 9859, + "hali": 9860, + "ears": 9861, + "kay": 9862, + "dare": 9863, + "marijuana": 9864, + "curious": 9865, + "ania": 9866, + "tomato": 9867, + "remind": 9868, + "ðŁĩ·": 9869, + "scared": 9870, + "coup": 9871, + "poet": 9872, + "landed": 9873, + "rid": 9874, + "wrapped": 9875, + "morri": 9876, + "climbing": 9877, + "ews": 9878, + "feeding": 9879, + "contra": 9880, + "thology": 9881, + "grid": 9882, + "tively": 9883, + "reader": 9884, + "laser": 9885, + "diving": 9886, + "dig": 9887, + "latin": 9888, + "tied": 9889, + "shakespe": 9890, + "oci": 9891, + "adm": 9892, + "showers": 9893, + "chuck": 9894, + "marcus": 9895, + "oos": 9896, + "knee": 9897, + "olive": 9898, + "owl": 9899, + "dylan": 9900, + "anno": 9901, + "gym": 9902, + "decisions": 9903, + "wellness": 9904, + "arrives": 9905, + "satis": 9906, + "chris": 9907, + "thurs": 9908, + "ðŁ¤£": 9909, + "interviews": 9910, + "thankyou": 9911, + "switzerland": 9912, + "overnight": 9913, + "journalist": 9914, + "serves": 9915, + "volcan": 9916, + ".......": 9917, + "plot": 9918, + "nicol": 9919, + "carrying": 9920, + "magne": 9921, + "treasure": 9922, + "exp": 9923, + "bever": 9924, + "ðŁĺ¢": 9925, + "marty": 9926, + "mole": 9927, + "donations": 9928, + "recognized": 9929, + "bh": 9930, + "dus": 9931, + "shann": 9932, + "aldo": 9933, + "successfully": 9934, + "ente": 9935, + "ðŁĺĤðŁĺĤðŁĺĤðŁĺĤ": 9936, + "cabinet": 9937, + "cuis": 9938, + "titled": 9939, + "das": 9940, + "sol": 9941, + "strategies": 9942, + "delivering": 9943, + "adds": 9944, + "anian": 9945, + "nether": 9946, + "ðŁĴĥ": 9947, + "contain": 9948, + "suits": 9949, + "pairs": 9950, + "todd": 9951, + "rella": 9952, + "rope": 9953, + "cio": 9954, + "crop": 9955, + "paintings": 9956, + "suz": 9957, + "rejec": 9958, + "bust": 9959, + "dh": 9960, + "fraud": 9961, + "mh": 9962, + "control": 9963, + "jeal": 9964, + "destroyed": 9965, + "allows": 9966, + "wool": 9967, + "minnesota": 9968, + "omen": 9969, + "ju": 9970, + "symposium": 9971, + "daf": 9972, + "limit": 9973, + "accounts": 9974, + "loading": 9975, + "intern": 9976, + "resolution": 9977, + "holland": 9978, + "qual": 9979, + "meetings": 9980, + "grave": 9981, + "camping": 9982, + "vam": 9983, + "renov": 9984, + "liberal": 9985, + "amber": 9986, + "gree": 9987, + "humb": 9988, + "fever": 9989, + "eling": 9990, + "brooks": 9991, + "à²": 9992, + "beth": 9993, + "aded": 9994, + "alt": 9995, + "roe": 9996, + "performed": 9997, + "josh": 9998, + "franklin": 9999, + "nicole": 10000, + "dess": 10001, + "bbs": 10002, + "mg": 10003, + "networks": 10004, + "minim": 10005, + "alt": 10006, + "weapons": 10007, + "guy": 10008, + "jason": 10009, + "gha": 10010, + "harbour": 10011, + "aton": 10012, + "praise": 10013, + "kentucky": 10014, + "belfast": 10015, + "sticks": 10016, + "bloss": 10017, + "hopes": 10018, + "anthro": 10019, + "familiar": 10020, + "wait": 10021, + "chile": 10022, + "depression": 10023, + "lax": 10024, + "jets": 10025, + "leice": 10026, + "receives": 10027, + "sier": 10028, + "ank": 10029, + "dex": 10030, + "indeed": 10031, + "flexi": 10032, + "fabric": 10033, + "lamb": 10034, + "helicop": 10035, + "amanda": 10036, + "âĢĶâĢĶ": 10037, + "compete": 10038, + "snack": 10039, + "technologies": 10040, + "syrian": 10041, + "moms": 10042, + "muham": 10043, + "chosen": 10044, + "anat": 10045, + "devon": 10046, + "sharks": 10047, + "ret": 10048, + "fundraiser": 10049, + "selfies": 10050, + "stations": 10051, + "communications": 10052, + "tennessee": 10053, + "tutor": 10054, + "rot": 10055, + "valuable": 10056, + "dynamic": 10057, + "nurse": 10058, + "ied": 10059, + "earthquake": 10060, + "deserved": 10061, + "ave": 10062, + "sara": 10063, + "stretch": 10064, + "douglas": 10065, + "nepal": 10066, + "ç": 10067, + "obviously": 10068, + "dame": 10069, + "rape": 10070, + "anybody": 10071, + "kw": 10072, + "patrol": 10073, + "holders": 10074, + "hanna": 10075, + "infographic": 10076, + "eco": 10077, + "beating": 10078, + "stanley": 10079, + "boats": 10080, + "ribb": 10081, + "ez": 10082, + "witch": 10083, + "inva": 10084, + "acid": 10085, + "boarding": 10086, + "-@": 10087, + "gil": 10088, + "dave": 10089, + "careers": 10090, + "oppos": 10091, + "lloy": 10092, + "inter": 10093, + "dope": 10094, + "resu": 10095, + "jagu": 10096, + "shade": 10097, + "indy": 10098, + "onist": 10099, + "relations": 10100, + "agen": 10101, + "able": 10102, + "incident": 10103, + "meter": 10104, + "sharma": 10105, + "idr": 10106, + "prove": 10107, + "immediately": 10108, + "troops": 10109, + "aman": 10110, + "glow": 10111, + "gaza": 10112, + "blocks": 10113, + "personal": 10114, + "chronic": 10115, + "aller": 10116, + "sid": 10117, + "shr": 10118, + "whatsapp": 10119, + "lucy": 10120, + "archae": 10121, + "hou": 10122, + "journalism": 10123, + "ourselves": 10124, + "got": 10125, + "themed": 10126, + "shaped": 10127, + "weak": 10128, + "casual": 10129, + "length": 10130, + "slam": 10131, + "abbey": 10132, + "ev": 10133, + "counter": 10134, + "esta": 10135, + "recipi": 10136, + "chapel": 10137, + "expansion": 10138, + "self": 10139, + "suffering": 10140, + "spice": 10141, + "nz": 10142, + "spart": 10143, + "desper": 10144, + "booking": 10145, + "quarters": 10146, + "yon": 10147, + "ðŁĴĹ": 10148, + "pk": 10149, + "continued": 10150, + "-#": 10151, + "manhatt": 10152, + "talked": 10153, + "shen": 10154, + "combo": 10155, + "hybrid": 10156, + "jeans": 10157, + "liquid": 10158, + "seal": 10159, + "retweets": 10160, + "acceler": 10161, + "collective": 10162, + "tas": 10163, + ":))": 10164, + "professionals": 10165, + "raw": 10166, + "ott": 10167, + "susan": 10168, + "iring": 10169, + "oklahoma": 10170, + "reven": 10171, + "survival": 10172, + "creator": 10173, + "transit": 10174, + "stac": 10175, + "surf": 10176, + "ik": 10177, + "editing": 10178, + "chilling": 10179, + "bailey": 10180, + "steal": 10181, + "rable": 10182, + "parent": 10183, + "hunger": 10184, + "snapp": 10185, + "collect": 10186, + "philosoph": 10187, + "dedication": 10188, + "cf": 10189, + "cm": 10190, + "leep": 10191, + "repeat": 10192, + "reha": 10193, + "unfortun": 10194, + "aer": 10195, + "aero": 10196, + "abstract": 10197, + "monitor": 10198, + "agents": 10199, + "bul": 10200, + "science": 10201, + "harbor": 10202, + "dragons": 10203, + "flooding": 10204, + "accompli": 10205, + "dash": 10206, + "julia": 10207, + "thered": 10208, + "tuesday": 10209, + "cyber": 10210, + "blow": 10211, + "tained": 10212, + "lem": 10213, + "reference": 10214, + "ppo": 10215, + "negoti": 10216, + "charle": 10217, + "connor": 10218, + "ault": 10219, + "accessories": 10220, + "commissioner": 10221, + "rainy": 10222, + "rear": 10223, + "advisory": 10224, + "lucas": 10225, + "maid": 10226, + "coal": 10227, + "kav": 10228, + "polo": 10229, + "ðŁı¾": 10230, + "transport": 10231, + "margare": 10232, + "strawberry": 10233, + "burns": 10234, + "greens": 10235, + "nev": 10236, + "participants": 10237, + "colin": 10238, + "belgium": 10239, + "colour": 10240, + "inform": 10241, + "dell": 10242, + "bron": 10243, + "caly": 10244, + "kickoff": 10245, + "strategic": 10246, + "reunion": 10247, + "honors": 10248, + "lib": 10249, + "egyp": 10250, + "âŃIJï¸ı": 10251, + "hypo": 10252, + "sizes": 10253, + "registered": 10254, + "betes": 10255, + "relaxing": 10256, + "bloom": 10257, + "intense": 10258, + "valentines": 10259, + "insane": 10260, + "wwii": 10261, + "px": 10262, + "trio": 10263, + "blade": 10264, + "wisconsin": 10265, + "cone": 10266, + "platin": 10267, + "alize": 10268, + "raven": 10269, + "increasing": 10270, + "indians": 10271, + "ilian": 10272, + "blu": 10273, + "rabbit": 10274, + "extension": 10275, + "jef": 10276, + "audi": 10277, + "ferry": 10278, + "sell": 10279, + "aday": 10280, + "usb": 10281, + "sweat": 10282, + "champag": 10283, + "method": 10284, + "memph": 10285, + "assist": 10286, + "sby": 10287, + "cape": 10288, + "removed": 10289, + "magn": 10290, + "vt": 10291, + "rams": 10292, + "fbi": 10293, + "tackle": 10294, + "phew": 10295, + "hon": 10296, + "motorcycle": 10297, + "suspec": 10298, + "elephant": 10299, + "subject": 10300, + "lette": 10301, + "dairy": 10302, + "wheat": 10303, + "awkward": 10304, + "act": 10305, + "trol": 10306, + "mitted": 10307, + "zayn": 10308, + "sheriff": 10309, + "enemy": 10310, + "cons": 10311, + "kett": 10312, + "bulls": 10313, + "evalu": 10314, + "btc": 10315, + "satellite": 10316, + "holo": 10317, + "porter": 10318, + "diabetes": 10319, + "better": 10320, + "releasing": 10321, + "surf": 10322, + ":-": 10323, + "sebasti": 10324, + "collecting": 10325, + "encing": 10326, + "ethi": 10327, + "gods": 10328, + "alley": 10329, + "healthy": 10330, + "mills": 10331, + "smash": 10332, + "copper": 10333, + "crack": 10334, + "readers": 10335, + "spac": 10336, + "license": 10337, + "basket": 10338, + "bangla": 10339, + "entic": 10340, + "omi": 10341, + "mere": 10342, + "sively": 10343, + "animation": 10344, + "lanes": 10345, + "dentally": 10346, + "chillin": 10347, + "fie": 10348, + "karen": 10349, + "depth": 10350, + "lipse": 10351, + "ng": 10352, + "rip": 10353, + "melo": 10354, + "sandy": 10355, + "ðŁijıðŁijı": 10356, + "vincent": 10357, + "nut": 10358, + "hug": 10359, + "whole": 10360, + "creates": 10361, + "????": 10362, + "âĿ¤ï¸ıâĿ¤ï¸ı": 10363, + "baked": 10364, + "upgrade": 10365, + "roberts": 10366, + "hara": 10367, + "caribbean": 10368, + "authentic": 10369, + "mbs": 10370, + "moscow": 10371, + "attorney": 10372, + "wiki": 10373, + "chlo": 10374, + "hull": 10375, + "cork": 10376, + "\"!": 10377, + "stylish": 10378, + "ðŁĵ¸:": 10379, + "diary": 10380, + "improving": 10381, + "expand": 10382, + "bright": 10383, + "pollution": 10384, + "knights": 10385, + "personality": 10386, + "checked": 10387, + "facilities": 10388, + "zel": 10389, + "bowling": 10390, + "guer": 10391, + "ðŁİĤ": 10392, + "ongoing": 10393, + "units": 10394, + "hook": 10395, + "beck": 10396, + "conflict": 10397, + "todd": 10398, + "farming": 10399, + "educational": 10400, + "kak": 10401, + "clay": 10402, + "stroke": 10403, + "belly": 10404, + "explore": 10405, + "millenni": 10406, + "thm": 10407, + "loop": 10408, + "sms": 10409, + "consist": 10410, + "circa": 10411, + "bryan": 10412, + "dab": 10413, + "younger": 10414, + "solidar": 10415, + "ppa": 10416, + "experienced": 10417, + "bella": 10418, + "board": 10419, + "sheffield": 10420, + "stephen": 10421, + "consumer": 10422, + "submit": 10423, + "sponsor": 10424, + "tang": 10425, + "aggre": 10426, + "combined": 10427, + "tracking": 10428, + "sanders": 10429, + "baz": 10430, + "survive": 10431, + "ferred": 10432, + "equal": 10433, + "sep": 10434, + "reed": 10435, + "strong": 10436, + "privacy": 10437, + "stap": 10438, + "ung": 10439, + "acry": 10440, + "pasta": 10441, + "pirates": 10442, + "ager": 10443, + "fairy": 10444, + "dup": 10445, + "introduced": 10446, + "wip": 10447, + "lets": 10448, + "spray": 10449, + "ðŁĵº": 10450, + "grew": 10451, + "asts": 10452, + "pittsburgh": 10453, + "newyork": 10454, + "joey": 10455, + "lauren": 10456, + "trade": 10457, + "chop": 10458, + "pipe": 10459, + "claire": 10460, + "behavior": 10461, + "vap": 10462, + "crews": 10463, + "laptop": 10464, + "ð٤Ĺ": 10465, + "chester": 10466, + "discipl": 10467, + "df": 10468, + "outdoors": 10469, + "ks": 10470, + "gover": 10471, + "superstar": 10472, + "casino": 10473, + "farmer": 10474, + ";-)": 10475, + "returned": 10476, + "ðŁıĪ": 10477, + "mail": 10478, + "roasted": 10479, + "costa": 10480, + "vill": 10481, + "pez": 10482, + "gardening": 10483, + "distribution": 10484, + "shining": 10485, + "investors": 10486, + "rasp": 10487, + "decades": 10488, + "realized": 10489, + "barn": 10490, + "pti": 10491, + "stable": 10492, + "utd": 10493, + "panthers": 10494, + "mens": 10495, + "bn": 10496, + "cade": 10497, + "bucket": 10498, + "ynn": 10499, + "whenever": 10500, + "wake": 10501, + "dais": 10502, + "bernie": 10503, + "lodge": 10504, + "julie": 10505, + "atmosphere": 10506, + "ðŁĺĺðŁĺĺ": 10507, + "majority": 10508, + "parti": 10509, + "excit": 10510, + "cut": 10511, + "meh": 10512, + "muslims": 10513, + "begun": 10514, + "flights": 10515, + "veness": 10516, + "ceme": 10517, + "posing": 10518, + "sole": 10519, + "gou": 10520, + "darkness": 10521, + "peach": 10522, + "celtic": 10523, + "authority": 10524, + "grandma": 10525, + "fulness": 10526, + "smith": 10527, + "specific": 10528, + "garcia": 10529, + "coins": 10530, + "goodness": 10531, + "aldub": 10532, + "recruiting": 10533, + "dennis": 10534, + "gary": 10535, + "sleeve": 10536, + "weapon": 10537, + "plz": 10538, + "discover": 10539, + "harrison": 10540, + "recruitment": 10541, + "jai": 10542, + "chim": 10543, + "compared": 10544, + "toms": 10545, + "mothers": 10546, + "amy": 10547, + "archive": 10548, + "task": 10549, + "benjam": 10550, + "seg": 10551, + "lawyer": 10552, + "alum": 10553, + "investing": 10554, + "mie": 10555, + "chez": 10556, + "jp": 10557, + "ake": 10558, + "flam": 10559, + "wallpaper": 10560, + "âĻ¥ï¸ı": 10561, + "tton": 10562, + "chest": 10563, + "favorites": 10564, + "weigh": 10565, + "coolest": 10566, + "rating": 10567, + "relevant": 10568, + "logan": 10569, + "maple": 10570, + "runners": 10571, + "prior": 10572, + "people": 10573, + "maur": 10574, + "terrorist": 10575, + "tested": 10576, + "carnival": 10577, + "suspen": 10578, + "measure": 10579, + "mv": 10580, + "cybersecurity": 10581, + "appren": 10582, + "terrorism": 10583, + "oz": 10584, + "vital": 10585, + "nies": 10586, + "gonz": 10587, + "funded": 10588, + "twist": 10589, + "assessment": 10590, + "diesel": 10591, + "enfor": 10592, + "column": 10593, + "addressing": 10594, + "casts": 10595, + "payment": 10596, + "xton": 10597, + "fier": 10598, + ",'": 10599, + "last": 10600, + "nee": 10601, + "unless": 10602, + "close": 10603, + "skill": 10604, + "cuisine": 10605, + "funeral": 10606, + "tiles": 10607, + "aun": 10608, + "kru": 10609, + "relationships": 10610, + "ðŁĴ¯": 10611, + "event": 10612, + "âĢįâĻĤï¸ı": 10613, + "kindness": 10614, + "proposed": 10615, + "acoustic": 10616, + "aes": 10617, + "defender": 10618, + "dance": 10619, + "htt": 10620, + "wat": 10621, + "voy": 10622, + "ð٤ĺ": 10623, + "aus": 10624, + "cliff": 10625, + "searching": 10626, + "beautifully": 10627, + "inqu": 10628, + "atl": 10629, + "specialist": 10630, + "ðŁIJ¶": 10631, + "dai": 10632, + "trails": 10633, + "classics": 10634, + "instant": 10635, + "vous": 10636, + "revenue": 10637, + "march": 10638, + "kirk": 10639, + "fringe": 10640, + "fireworks": 10641, + "trivia": 10642, + "âĺħ": 10643, + "traction": 10644, + "walter": 10645, + "moto": 10646, + "lily": 10647, + "attitude": 10648, + "climb": 10649, + "scan": 10650, + "savings": 10651, + "cw": 10652, + "faith": 10653, + "credits": 10654, + "abled": 10655, + "graff": 10656, + "autograph": 10657, + "hehe": 10658, + "ranch": 10659, + "had": 10660, + "rogers": 10661, + "ðŁĮ¹": 10662, + "fin": 10663, + "requ": 10664, + "folk": 10665, + "additional": 10666, + "lynn": 10667, + "uber": 10668, + "dollars": 10669, + "logic": 10670, + "worth": 10671, + "som": 10672, + "thesis": 10673, + "pound": 10674, + "bic": 10675, + "stur": 10676, + "ceram": 10677, + "spencer": 10678, + "entered": 10679, + "vamp": 10680, + "organized": 10681, + "âľĪ": 10682, + "pps": 10683, + "tron": 10684, + "mercedes": 10685, + "noti": 10686, + "competitive": 10687, + "dow": 10688, + "ousness": 10689, + "victor": 10690, + "grilled": 10691, + "nai": 10692, + "putin": 10693, + "abra": 10694, + "blame": 10695, + "alexand": 10696, + "animal": 10697, + "decent": 10698, + "pent": 10699, + "interior": 10700, + ":')": 10701, + "butler": 10702, + "ballet": 10703, + "ðŁĴĶ": 10704, + "albums": 10705, + "downs": 10706, + "lad": 10707, + "sir": 10708, + "plain": 10709, + "pers": 10710, + "blonde": 10711, + "disc": 10712, + "pakistan": 10713, + "sement": 10714, + "gaa": 10715, + "wage": 10716, + "chas": 10717, + "mani": 10718, + "cops": 10719, + "territ": 10720, + "lol": 10721, + "laughter": 10722, + "rivers": 10723, + "magnificent": 10724, + "lamp": 10725, + "wb": 10726, + "newsle": 10727, + "charts": 10728, + "blessing": 10729, + "punch": 10730, + "longest": 10731, + "floral": 10732, + "cutie": 10733, + "farewell": 10734, + "stopping": 10735, + "mbb": 10736, + "bud": 10737, + "cheese": 10738, + "decla": 10739, + "sim": 10740, + "mcdonald": 10741, + "deter": 10742, + "youth": 10743, + "tch": 10744, + "freder": 10745, + "kindle": 10746, + "fern": 10747, + "ator": 10748, + "asleep": 10749, + "pond": 10750, + "sprint": 10751, + "pounds": 10752, + "lazy": 10753, + "ghe": 10754, + "fundraising": 10755, + "deadly": 10756, + "grande": 10757, + "doug": 10758, + "hey": 10759, + "linda": 10760, + "considering": 10761, + "ium": 10762, + "golden": 10763, + "vik": 10764, + "authors": 10765, + "diss": 10766, + "ually": 10767, + "appropriate": 10768, + "morning": 10769, + "yle": 10770, + "honoring": 10771, + "folio": 10772, + "bec": 10773, + "rebec": 10774, + "finland": 10775, + "formula": 10776, + "cornwall": 10777, + "shay": 10778, + "causing": 10779, + "blend": 10780, + "signal": 10781, + "tent": 10782, + "kashmir": 10783, + "nationals": 10784, + "harmony": 10785, + "scout": 10786, + "accessi": 10787, + "height": 10788, + "medieval": 10789, + "improvement": 10790, + "kees": 10791, + "practical": 10792, + "card": 10793, + "depar": 10794, + "hun": 10795, + "oming": 10796, + "calgary": 10797, + "stel": 10798, + "bubble": 10799, + "guru": 10800, + "mah": 10801, + "unexpe": 10802, + "nh": 10803, + "eda": 10804, + "meat": 10805, + "ige": 10806, + "sio": 10807, + "goddess": 10808, + "inches": 10809, + "tunes": 10810, + "britt": 10811, + "stion": 10812, + "raj": 10813, + "âĻ«": 10814, + "mercy": 10815, + "ðŁĴĺ": 10816, + "sends": 10817, + "iest": 10818, + "polici": 10819, + "vale": 10820, + "reduced": 10821, + "asap": 10822, + "vijay": 10823, + "defensive": 10824, + "celebrations": 10825, + "riders": 10826, + "meditation": 10827, + "harmon": 10828, + "ging": 10829, + "¡": 10830, + "programming": 10831, + "inau": 10832, + "sudden": 10833, + "mh": 10834, + "replacement": 10835, + "sku": 10836, + "jar": 10837, + "grades": 10838, + "tast": 10839, + "kitt": 10840, + "branding": 10841, + "kaw": 10842, + "boot": 10843, + "fought": 10844, + "pays": 10845, + "gf": 10846, + "ization": 10847, + "hop": 10848, + "kk": 10849, + "activist": 10850, + "vend": 10851, + "coastal": 10852, + "chaos": 10853, + "ðŁĶ´": 10854, + "seme": 10855, + "billboard": 10856, + "lifting": 10857, + "cumb": 10858, + "scal": 10859, + "ðŁĸ¤": 10860, + "struck": 10861, + "lv": 10862, + "indiedev": 10863, + "beaten": 10864, + "jungle": 10865, + "alright": 10866, + "destiny": 10867, + "ming": 10868, + "kc": 10869, + "chances": 10870, + "oman": 10871, + "qatar": 10872, + "craf": 10873, + "trained": 10874, + "prix": 10875, + "charm": 10876, + "otive": 10877, + "smu": 10878, + "ec": 10879, + "anders": 10880, + "handed": 10881, + "alban": 10882, + "certainly": 10883, + "arriving": 10884, + "ize": 10885, + "sai": 10886, + "track": 10887, + "painter": 10888, + "humble": 10889, + "appointment": 10890, + "headline": 10891, + "managing": 10892, + "mod": 10893, + "aspe": 10894, + "andrea": 10895, + "ä": 10896, + "ethiop": 10897, + "united": 10898, + "exist": 10899, + "bali": 10900, + "kad": 10901, + "nt": 10902, + "dred": 10903, + "rex": 10904, + "recognize": 10905, + "tampa": 10906, + "beers": 10907, + "atia": 10908, + "heels": 10909, + "note": 10910, + "transportation": 10911, + "turtle": 10912, + "rede": 10913, + "hiphop": 10914, + "spicy": 10915, + "spurs": 10916, + "â¬ĩ": 10917, + "corp": 10918, + "thern": 10919, + "toast": 10920, + "hurry": 10921, + "properties": 10922, + "mage": 10923, + "marco": 10924, + "elements": 10925, + "bouti": 10926, + "syndrome": 10927, + "msg": 10928, + "developer": 10929, + "graders": 10930, + "heim": 10931, + "resil": 10932, + "offices": 10933, + "delay": 10934, + "dimen": 10935, + "vintag": 10936, + "barbara": 10937, + "ðŁĺ±": 10938, + "venezu": 10939, + "cular": 10940, + "faced": 10941, + "barn": 10942, + "ðŁĺĨ": 10943, + "survivor": 10944, + "worm": 10945, + "confused": 10946, + "passionate": 10947, + "ر": 10948, + "identify": 10949, + "electricity": 10950, + "souls": 10951, + "bradley": 10952, + "reportedly": 10953, + "lunch": 10954, + "shelf": 10955, + "elia": 10956, + "sweet": 10957, + "smooth": 10958, + "employment": 10959, + "amel": 10960, + "manhattan": 10961, + "steam": 10962, + "ounts": 10963, + "yep": 10964, + "living": 10965, + "une": 10966, + "describe": 10967, + "cares": 10968, + "manila": 10969, + "shawn": 10970, + "acted": 10971, + "bash": 10972, + "steven": 10973, + "rest": 10974, + "petition": 10975, + "divine": 10976, + "welsh": 10977, + "race": 10978, + "platinum": 10979, + "ðŁĮ¸": 10980, + "pb": 10981, + "extraordinary": 10982, + "solidarity": 10983, + "mall": 10984, + "onion": 10985, + "scheduled": 10986, + "gameof": 10987, + "fergu": 10988, + "dems": 10989, + "norm": 10990, + "pk": 10991, + "trials": 10992, + "policies": 10993, + "publishing": 10994, + "stole": 10995, + "front": 10996, + "character": 10997, + "vania": 10998, + "exce": 10999, + "stie": 11000, + "sca": 11001, + "residential": 11002, + "sailing": 11003, + "ðŁĶ¥ðŁĶ¥ðŁĶ¥": 11004, + "sponsors": 11005, + "thick": 11006, + "champagne": 11007, + "shepher": 11008, + "continuing": 11009, + "venice": 11010, + "perth": 11011, + "nap": 11012, + "aster": 11013, + "yak": 11014, + "unlimited": 11015, + "choices": 11016, + "neo": 11017, + "hiv": 11018, + "reporter": 11019, + "brussels": 11020, + "fold": 11021, + "dys": 11022, + "semi": 11023, + "lawn": 11024, + "italia": 11025, + "wifi": 11026, + "ask": 11027, + "emed": 11028, + "frame": 11029, + "monitoring": 11030, + "stead": 11031, + "ida": 11032, + "grin": 11033, + "isa": 11034, + "flip": 11035, + "restric": 11036, + "offensive": 11037, + "attached": 11038, + "dish": 11039, + "why": 11040, + "phillips": 11041, + "greet": 11042, + "pals": 11043, + "mixtape": 11044, + "vou": 11045, + "fielder": 11046, + "spark": 11047, + "alberta": 11048, + "glen": 11049, + "cash": 11050, + "sri": 11051, + "uri": 11052, + "rodri": 11053, + "entrepreneurs": 11054, + "climatechange": 11055, + "psy": 11056, + "dle": 11057, + "ements": 11058, + "linked": 11059, + "netherlands": 11060, + "accidentally": 11061, + "opposition": 11062, + "velvet": 11063, + "rays": 11064, + "cw": 11065, + "omo": 11066, + "mf": 11067, + "lmfao": 11068, + "newsletter": 11069, + ":)": 11070, + "toilet": 11071, + "literature": 11072, + "disp": 11073, + "philip": 11074, + "uniform": 11075, + "suddenly": 11076, + "header": 11077, + "cooler": 11078, + "---": 11079, + "proud": 11080, + "brig": 11081, + "nissan": 11082, + "scientist": 11083, + "jah": 11084, + "concentr": 11085, + "packs": 11086, + "appointed": 11087, + "soap": 11088, + "engage": 11089, + "chose": 11090, + "âĻ¡": 11091, + "setup": 11092, + "jealous": 11093, + "harry": 11094, + "gation": 11095, + "tunnel": 11096, + "temp": 11097, + "oscars": 11098, + "decade": 11099, + "recommended": 11100, + "children": 11101, + "aba": 11102, + "anxiety": 11103, + "vements": 11104, + "salon": 11105, + "photoo": 11106, + "organiz": 11107, + "machines": 11108, + "abs": 11109, + "ville": 11110, + "hype": 11111, + "tiff": 11112, + "emerging": 11113, + "avgeek": 11114, + "[#": 11115, + "contribution": 11116, + "brady": 11117, + "resto": 11118, + "gmail": 11119, + "fitz": 11120, + "photoshoot": 11121, + "helmet": 11122, + "ht": 11123, + "elegant": 11124, + "uganda": 11125, + "nursing": 11126, + "orleans": 11127, + "penn": 11128, + "nah": 11129, + "footage": 11130, + "ema": 11131, + "wo": 11132, + "wad": 11133, + "concerns": 11134, + "vere": 11135, + "remark": 11136, + "whoever": 11137, + "strang": 11138, + "pt": 11139, + "quit": 11140, + "shang": 11141, + "history": 11142, + "sick": 11143, + "permanent": 11144, + "illness": 11145, + "cold": 11146, + "vision": 11147, + "hem": 11148, + "arrow": 11149, + "convic": 11150, + "pink": 11151, + "occup": 11152, + "bald": 11153, + "exhau": 11154, + "uof": 11155, + "amo": 11156, + "ont": 11157, + "ãĥ»": 11158, + "adopt": 11159, + "laid": 11160, + "smoked": 11161, + "interpre": 11162, + "essenti": 11163, + "associated": 11164, + "bd": 11165, + "bby": 11166, + "fier": 11167, + "install": 11168, + "diplom": 11169, + "conditi": 11170, + "cf": 11171, + "wak": 11172, + "anya": 11173, + "graci": 11174, + "fisher": 11175, + "sss": 11176, + "apr": 11177, + "ilit": 11178, + "musician": 11179, + "symphony": 11180, + "cord": 11181, + "hack": 11182, + "legi": 11183, + "lv": 11184, + "blessings": 11185, + "humor": 11186, + "scra": 11187, + "eti": 11188, + "minster": 11189, + "travelling": 11190, + "bush": 11191, + "jewellery": 11192, + "lime": 11193, + "!!!": 11194, + "pregnant": 11195, + "pee": 11196, + "lob": 11197, + "capital": 11198, + "ipa": 11199, + "pencil": 11200, + "labor": 11201, + "ducks": 11202, + "proudly": 11203, + "wedding": 11204, + "derek": 11205, + "mw": 11206, + "peg": 11207, + "valentine": 11208, + "angu": 11209, + "retreat": 11210, + "prospect": 11211, + "danger": 11212, + "vulner": 11213, + "upset": 11214, + ",#": 11215, + "srk": 11216, + "xim": 11217, + "thursday": 11218, + "nfl": 11219, + "kisses": 11220, + "reds": 11221, + "crack": 11222, + "reward": 11223, + "cu": 11224, + "kok": 11225, + "mete": 11226, + "abandoned": 11227, + "itt": 11228, + "meals": 11229, + "spell": 11230, + "stanbul": 11231, + "delays": 11232, + "rum": 11233, + "leop": 11234, + "gum": 11235, + "nova": 11236, + "superman": 11237, + "chick": 11238, + "mis": 11239, + "dramatic": 11240, + "innocent": 11241, + "rounds": 11242, + "rec": 11243, + "autism": 11244, + "bangladesh": 11245, + "moral": 11246, + "movie": 11247, + "spoo": 11248, + "kla": 11249, + "âĥ£": 11250, + "outing": 11251, + "messi": 11252, + "abroad": 11253, + "lookin": 11254, + "aim": 11255, + "qi": 11256, + "stack": 11257, + "collage": 11258, + "à¯": 11259, + "hudson": 11260, + "scan": 11261, + "hoe": 11262, + "chau": 11263, + "occur": 11264, + "commander": 11265, + "holes": 11266, + "ðŁİĦ": 11267, + "bias": 11268, + "von": 11269, + "sticker": 11270, + "mak": 11271, + "responsibility": 11272, + "columbus": 11273, + "saint": 11274, + "edmon": 11275, + "racism": 11276, + "farms": 11277, + "wen": 11278, + "gulf": 11279, + "mayo": 11280, + "!!!!!!!!": 11281, + "corporation": 11282, + "bachel": 11283, + "ela": 11284, + "internal": 11285, + "jeep": 11286, + "follows": 11287, + "dialogue": 11288, + "derer": 11289, + "smartphone": 11290, + "helen": 11291, + "richmond": 11292, + "equity": 11293, + "sland": 11294, + "bg": 11295, + "near": 11296, + "avi": 11297, + "memphis": 11298, + "weir": 11299, + "discussed": 11300, + "badge": 11301, + "pup": 11302, + "mistake": 11303, + "phenomen": 11304, + "unite": 11305, + "ðŁĽ": 11306, + "depic": 11307, + "rides": 11308, + "inaugu": 11309, + "nat": 11310, + "softwitter": 11311, + "combination": 11312, + "gospel": 11313, + "âļ¾": 11314, + "admission": 11315, + "retrogaming": 11316, + "ðŁIJ¾": 11317, + "schu": 11318, + "mbo": 11319, + "junction": 11320, + "alarm": 11321, + "à¦": 11322, + "grac": 11323, + "khali": 11324, + "kul": 11325, + "male": 11326, + "caption": 11327, + "wish": 11328, + "tere": 11329, + "corps": 11330, + "rubber": 11331, + "playstation": 11332, + "erin": 11333, + "efficient": 11334, + "lor": 11335, + "jokes": 11336, + "inary": 11337, + "norman": 11338, + "luis": 11339, + "inaugural": 11340, + "ched": 11341, + "âļ½ï¸ı": 11342, + "dip": 11343, + "toe": 11344, + "strat": 11345, + "aac": 11346, + "amu": 11347, + "pier": 11348, + "cott": 11349, + "command": 11350, + "tten": 11351, + "snoo": 11352, + "cube": 11353, + "closes": 11354, + "classical": 11355, + "sword": 11356, + "expression": 11357, + "reaching": 11358, + "napp": 11359, + "cost": 11360, + "affect": 11361, + "rico": 11362, + "gif": 11363, + "breathe": 11364, + "tribe": 11365, + "ortho": 11366, + "hay": 11367, + "lg": 11368, + "fries": 11369, + "nm": 11370, + "hiding": 11371, + "richards": 11372, + "ende": 11373, + "micro": 11374, + "capitol": 11375, + "copy": 11376, + "rom": 11377, + "regime": 11378, + "maryland": 11379, + "taxi": 11380, + "dial": 11381, + "embarra": 11382, + "unbeliev": 11383, + "cht": 11384, + "vs": 11385, + "elimin": 11386, + "odd": 11387, + "penny": 11388, + "soundtrack": 11389, + "lings": 11390, + "transition": 11391, + "remaining": 11392, + "ais": 11393, + "malik": 11394, + "?!?": 11395, + "random": 11396, + "defend": 11397, + "ultra": 11398, + "trum": 11399, + "dancer": 11400, + "stol": 11401, + "drive": 11402, + "aver": 11403, + "roast": 11404, + "definition": 11405, + "sean": 11406, + "excitement": 11407, + "particul": 11408, + "surely": 11409, + "shav": 11410, + "bery": 11411, + "dishes": 11412, + "comm": 11413, + "isol": 11414, + "iam": 11415, + "obli": 11416, + "ghost": 11417, + "hughes": 11418, + "chiefs": 11419, + "bas": 11420, + "conservative": 11421, + "special": 11422, + "femin": 11423, + "shri": 11424, + "nancy": 11425, + "intel": 11426, + "tune": 11427, + "ðŁĩª": 11428, + "joel": 11429, + "ggle": 11430, + "moto": 11431, + "ðŁĺĶ": 11432, + "buck": 11433, + "dag": 11434, + "anticip": 11435, + "montana": 11436, + "guid": 11437, + "frog": 11438, + "ecraft": 11439, + "ope": 11440, + "drives": 11441, + "numer": 11442, + "xy": 11443, + "colorful": 11444, + "wednesdaywisdom": 11445, + "illumin": 11446, + "beyon": 11447, + "inaugur": 11448, + "deeply": 11449, + "prefer": 11450, + "fortune": 11451, + "cooked": 11452, + "tible": 11453, + "âĺķ": 11454, + "sweater": 11455, + "itter": 11456, + "tty": 11457, + "ui": 11458, + "gie": 11459, + "complic": 11460, + "~~": 11461, + "taxes": 11462, + "cups": 11463, + "diverse": 11464, + "samanth": 11465, + "âłĢâłĢ": 11466, + "baking": 11467, + "symp": 11468, + "wai": 11469, + "behalf": 11470, + "mercur": 11471, + "travels": 11472, + "ðŁİīðŁİ": 11473, + "oria": 11474, + "engaged": 11475, + "jumping": 11476, + "retired": 11477, + "naked": 11478, + "puni": 11479, + "speedway": 11480, + "sciences": 11481, + "rehearsal": 11482, + "onym": 11483, + "dyou": 11484, + "plates": 11485, + "rati": 11486, + "krish": 11487, + "jazz": 11488, + "carol": 11489, + "raf": 11490, + "penalty": 11491, + "timeline": 11492, + "ruby": 11493, + "engineers": 11494, + "raf": 11495, + "belle": 11496, + "dose": 11497, + "cheon": 11498, + "escap": 11499, + "meg": 11500, + "rank": 11501, + "ord": 11502, + "megan": 11503, + "merch": 11504, + "eclipse": 11505, + "âĺºï¸ı": 11506, + "pledge": 11507, + "kirk": 11508, + "persi": 11509, + "leicester": 11510, + "sak": 11511, + "wk": 11512, + "safely": 11513, + "yyy": 11514, + "jet": 11515, + "promised": 11516, + "jc": 11517, + "enne": 11518, + "noah": 11519, + "reno": 11520, + "rea": 11521, + "ðŁĺĤðŁĺĤðŁĺĤðŁĺĤ": 11522, + "trail": 11523, + "ðŁijĢ": 11524, + "fd": 11525, + "sooo": 11526, + "rimin": 11527, + "wk": 11528, + "า": 11529, + "ial": 11530, + "xox": 11531, + "biscu": 11532, + "dale": 11533, + "fandom": 11534, + "participating": 11535, + "flag": 11536, + "privilege": 11537, + "peach": 11538, + "machine": 11539, + "boston": 11540, + "gross": 11541, + "og": 11542, + "miracle": 11543, + "adoption": 11544, + "uss": 11545, + "monsters": 11546, + "beij": 11547, + "clarke": 11548, + "pushing": 11549, + "praying": 11550, + "aro": 11551, + "dn": 11552, + "ellis": 11553, + "apollo": 11554, + "odds": 11555, + "refugee": 11556, + "tow": 11557, + "bp": 11558, + "ðŁĩ¬ðŁĩ§": 11559, + "hend": 11560, + "appeared": 11561, + "membership": 11562, + "pean": 11563, + "dum": 11564, + "violent": 11565, + "vy": 11566, + "potatoes": 11567, + "aww": 11568, + "greetings": 11569, + "tts": 11570, + "acon": 11571, + "shane": 11572, + "photographed": 11573, + "crab": 11574, + "temperatures": 11575, + "cuba": 11576, + "cfc": 11577, + "welcom": 11578, + "hel": 11579, + "innings": 11580, + "mk": 11581, + "code": 11582, + "knock": 11583, + "grass": 11584, + "swedish": 11585, + "pta": 11586, + "icky": 11587, + "vat": 11588, + "lining": 11589, + "sq": 11590, + "sap": 11591, + "arc": 11592, + "announcing": 11593, + "skins": 11594, + "cityof": 11595, + "bring": 11596, + "cox": 11597, + "gamer": 11598, + "itarian": 11599, + "ida": 11600, + "hd": 11601, + "rosse": 11602, + "sadly": 11603, + "geo": 11604, + "âļ¡ï¸ı": 11605, + "tags": 11606, + "father": 11607, + "change": 11608, + "lance": 11609, + "whiskey": 11610, + "adelaide": 11611, + "tec": 11612, + "stickers": 11613, + "market": 11614, + "classy": 11615, + "badass": 11616, + "florence": 11617, + "liner": 11618, + "frost": 11619, + "kate": 11620, + "acon": 11621, + "scandal": 11622, + "essex": 11623, + "ðŁĺı": 11624, + "vivi": 11625, + "drill": 11626, + "bloggers": 11627, + "recommend": 11628, + "dha": 11629, + "acres": 11630, + "roma": 11631, + "buy": 11632, + "grocer": 11633, + "eria": 11634, + "mahar": 11635, + "ffer": 11636, + "patterns": 11637, + "veri": 11638, + "compu": 11639, + "stev": 11640, + "anga": 11641, + "mentor": 11642, + "doo": 11643, + "itali": 11644, + "cdnpoli": 11645, + "only": 11646, + "conduct": 11647, + "electro": 11648, + "def": 11649, + "whale": 11650, + "preparation": 11651, + "bicycle": 11652, + "viral": 11653, + "turnout": 11654, + "brass": 11655, + "quad": 11656, + "hospitality": 11657, + "packaging": 11658, + "dency": 11659, + "cemetery": 11660, + "aboard": 11661, + "dreaming": 11662, + "picture": 11663, + "tall": 11664, + "invent": 11665, + "admi": 11666, + "oe": 11667, + "temps": 11668, + "quan": 11669, + "fundam": 11670, + "promp": 11671, + "residence": 11672, + "mud": 11673, + "souri": 11674, + "âĦ¢": 11675, + "graffiti": 11676, + "gif": 11677, + "dnd": 11678, + "comp": 11679, + "swar": 11680, + "peeps": 11681, + "palestine": 11682, + "devils": 11683, + "sang": 11684, + "assistance": 11685, + "bike": 11686, + "mississi": 11687, + "interviewed": 11688, + "nephew": 11689, + "drums": 11690, + "vand": 11691, + "gentlemen": 11692, + "nsw": 11693, + "insta": 11694, + "lebanon": 11695, + "eeee": 11696, + "olivia": 11697, + "very": 11698, + "rough": 11699, + "industries": 11700, + "mation": 11701, + "ðŁĺĴ": 11702, + "barrel": 11703, + "nay": 11704, + "pops": 11705, + "modern": 11706, + "illy": 11707, + "arest": 11708, + "onents": 11709, + "protecting": 11710, + "vans": 11711, + "eo": 11712, + "vikings": 11713, + "restaurants": 11714, + "reck": 11715, + "jackie": 11716, + "andrew": 11717, + "willing": 11718, + "heath": 11719, + "citizen": 11720, + "discrimin": 11721, + "à¹Ī": 11722, + "stuart": 11723, + "mys": 11724, + "hip": 11725, + "transp": 11726, + "\"?": 11727, + "tex": 11728, + "sushi": 11729, + "ked": 11730, + "crossed": 11731, + "distur": 11732, + "pedia": 11733, + "fate": 11734, + "somehow": 11735, + "moth": 11736, + "processing": 11737, + "iss": 11738, + "rin": 11739, + "uts": 11740, + "yyc": 11741, + "vert": 11742, + "lgbt": 11743, + "reid": 11744, + "onto": 11745, + "arabia": 11746, + "habitat": 11747, + "==": 11748, + "streak": 11749, + "simpson": 11750, + "addiction": 11751, + "wimble": 11752, + "delivers": 11753, + "challenging": 11754, + "ðŁİ¶": 11755, + "franch": 11756, + "edu": 11757, + "sme": 11758, + "aids": 11759, + "hurst": 11760, + "tham": 11761, + "tarian": 11762, + "remembered": 11763, + "palestinian": 11764, + "fees": 11765, + "trum": 11766, + "sketch": 11767, + "uru": 11768, + "fitting": 11769, + "jesse": 11770, + "ðŁĶ¥ðŁĶ¥": 11771, + "--------": 11772, + "bach": 11773, + "icia": 11774, + "colored": 11775, + "dah": 11776, + "associate": 11777, + "intel": 11778, + "seller": 11779, + "pu": 11780, + "stuffed": 11781, + "acs": 11782, + "bs": 11783, + "shin": 11784, + "cooperation": 11785, + "certificate": 11786, + "abu": 11787, + "ingredients": 11788, + "rev": 11789, + "inge": 11790, + "elder": 11791, + "christian": 11792, + "bundle": 11793, + "thic": 11794, + "dirt": 11795, + "beijing": 11796, + "commit": 11797, + "teddy": 11798, + "edu": 11799, + "today": 11800, + "sfield": 11801, + "wyn": 11802, + "confirms": 11803, + "loo": 11804, + "jv": 11805, + "eness": 11806, + "alpha": 11807, + "virus": 11808, + "arium": 11809, + "grind": 11810, + "bridges": 11811, + "introduction": 11812, + "polls": 11813, + "bacter": 11814, + "zach": 11815, + "terminal": 11816, + "raiders": 11817, + "flavor": 11818, + "zombie": 11819, + "vod": 11820, + "spreading": 11821, + "gameofthrones": 11822, + "efficiency": 11823, + "lately": 11824, + "alem": 11825, + "tweet": 11826, + "crimes": 11827, + "cler": 11828, + "dey": 11829, + "dged": 11830, + "hyun": 11831, + "payments": 11832, + "circus": 11833, + "ðŁĺŃðŁĺŃ": 11834, + "missouri": 11835, + "lub": 11836, + "episodes": 11837, + "cage": 11838, + "pos": 11839, + "matching": 11840, + "tumblr": 11841, + "lined": 11842, + "gest": 11843, + "ambi": 11844, + "narr": 11845, + "ington": 11846, + "regul": 11847, + "blown": 11848, + "isle": 11849, + "coco": 11850, + "ondon": 11851, + "joshua": 11852, + "touring": 11853, + "sma": 11854, + "sausage": 11855, + "bestfriend": 11856, + "boeing": 11857, + "desire": 11858, + "savage": 11859, + "rapper": 11860, + "devo": 11861, + "tear": 11862, + "takeover": 11863, + "cowboys": 11864, + "poker": 11865, + "parag": 11866, + "ppe": 11867, + "hint": 11868, + "wears": 11869, + "seth": 11870, + "roles": 11871, + "lanc": 11872, + "manga": 11873, + "format": 11874, + "flyer": 11875, + "cay": 11876, + "moor": 11877, + "bake": 11878, + "splash": 11879, + "vad": 11880, + "kerala": 11881, + "proceeds": 11882, + "silly": 11883, + "reflection": 11884, + "distr": 11885, + "wid": 11886, + "suit": 11887, + "civic": 11888, + "yankees": 11889, + "byn": 11890, + "migration": 11891, + "distin": 11892, + "orch": 11893, + "femini": 11894, + "qualifying": 11895, + "turi": 11896, + "obe": 11897, + "hundred": 11898, + "crap": 11899, + "wang": 11900, + "mathemat": 11901, + "bure": 11902, + "exposure": 11903, + "ferguson": 11904, + "semester": 11905, + "reserv": 11906, + "plym": 11907, + "ahu": 11908, + "facial": 11909, + "wax": 11910, + "worried": 11911, + "cab": 11912, + "vio": 11913, + "asa": 11914, + "cod": 11915, + "topics": 11916, + "pcs": 11917, + "halo": 11918, + "rescued": 11919, + "horizon": 11920, + "ark": 11921, + "âļª": 11922, + "holly": 11923, + "elf": 11924, + "ulti": 11925, + "pup": 11926, + "qualified": 11927, + "attendance": 11928, + "atively": 11929, + "destroy": 11930, + "yc": 11931, + "forth": 11932, + "photooftheday": 11933, + "cents": 11934, + "iceland": 11935, + "measures": 11936, + "desk": 11937, + "portfolio": 11938, + "articles": 11939, + "directors": 11940, + "datab": 11941, + "ew": 11942, + "creepy": 11943, + "ounding": 11944, + "honoured": 11945, + "mist": 11946, + "jit": 11947, + "mentioned": 11948, + "portable": 11949, + "itic": 11950, + "dann": 11951, + "fridayfeeling": 11952, + "amid": 11953, + "tiger": 11954, + "scrip": 11955, + "helicopter": 11956, + "hardware": 11957, + "explor": 11958, + "workplace": 11959, + "austria": 11960, + "beatles": 11961, + "bernar": 11962, + "spider": 11963, + "disco": 11964, + "cult": 11965, + "limits": 11966, + "shortly": 11967, + "final": 11968, + "ninja": 11969, + "luke": 11970, + "lebron": 11971, + "walmart": 11972, + "oil": 11973, + "vanilla": 11974, + "shire": 11975, + "yeg": 11976, + "aky": 11977, + "cs": 11978, + "bler": 11979, + "collected": 11980, + "tg": 11981, + "rolled": 11982, + "specials": 11983, + "bff": 11984, + "pierre": 11985, + "shim": 11986, + "vier": 11987, + "flashback": 11988, + "restoration": 11989, + "individuals": 11990, + "prod": 11991, + "freaking": 11992, + "turer": 11993, + "oa": 11994, + "refre": 11995, + "moroc": 11996, + "greet": 11997, + "reyn": 11998, + "careful": 11999, + "ouring": 12000, + "ush": 12001, + "isd": 12002, + "gill": 12003, + "view": 12004, + "thunderstorm": 12005, + "bled": 12006, + "picnic": 12007, + "guardi": 12008, + "pig": 12009, + "ark": 12010, + "sylvania": 12011, + "banned": 12012, + "ucl": 12013, + "vijay": 12014, + "orium": 12015, + "avengers": 12016, + "believes": 12017, + "eur": 12018, + "monument": 12019, + "concerned": 12020, + "labs": 12021, + "berg": 12022, + "aap": 12023, + "vish": 12024, + "singles": 12025, + "cancel": 12026, + "zel": 12027, + "arab": 12028, + "ruth": 12029, + "tooth": 12030, + "arta": 12031, + "shaf": 12032, + "chairs": 12033, + "rack": 12034, + "diseases": 12035, + "crowd": 12036, + "cly": 12037, + "flex": 12038, + "christma": 12039, + "artificial": 12040, + "tomat": 12041, + "fine": 12042, + "draws": 12043, + "advocate": 12044, + "france": 12045, + "ÙĬ": 12046, + "ðŁĺ³": 12047, + "heavy": 12048, + "sour": 12049, + "comprehen": 12050, + "noble": 12051, + "aap": 12052, + "hindu": 12053, + "coral": 12054, + "gars": 12055, + "owen": 12056, + "nl": 12057, + "stall": 12058, + "yellow": 12059, + "marina": 12060, + "inver": 12061, + "support": 12062, + "tough": 12063, + "promises": 12064, + "pie": 12065, + "masterpiece": 12066, + "score": 12067, + "force": 12068, + "mortg": 12069, + "cryptocurrency": 12070, + "ox": 12071, + "rors": 12072, + "rockin": 12073, + "provin": 12074, + "hog": 12075, + "nostal": 12076, + "oakland": 12077, + "patrick": 12078, + "inclusion": 12079, + "traffic": 12080, + "ahmed": 12081, + "aha": 12082, + "luxury": 12083, + "consecu": 12084, + "demon": 12085, + "âĸº": 12086, + "blowing": 12087, + "stag": 12088, + ":\"": 12089, + "encourage": 12090, + "bene": 12091, + "skull": 12092, + "dodge": 12093, + "buster": 12094, + "kinson": 12095, + "witne": 12096, + "error": 12097, + "lowest": 12098, + "fellow": 12099, + "à°": 12100, + "shre": 12101, + "blur": 12102, + "virgin": 12103, + "composer": 12104, + "slip": 12105, + "mornings": 12106, + "gains": 12107, + "table": 12108, + "grain": 12109, + "arist": 12110, + "brazilian": 12111, + "wwe": 12112, + "tues": 12113, + "ribbon": 12114, + "anag": 12115, + "dist": 12116, + "sacrif": 12117, + "embrace": 12118, + "entrepreneur": 12119, + "affili": 12120, + "deo": 12121, + "tali": 12122, + "tourist": 12123, + "fatal": 12124, + "ìĬ": 12125, + "automatic": 12126, + "ðŁĩµ": 12127, + "weak": 12128, + "welfare": 12129, + "confirm": 12130, + "benjamin": 12131, + "fights": 12132, + "alleged": 12133, + "mead": 12134, + "struggling": 12135, + "prosecu": 12136, + "chef": 12137, + "è": 12138, + "proposal": 12139, + "ern": 12140, + "ðŁĺĦ": 12141, + "dyk": 12142, + "ongs": 12143, + "hong": 12144, + "mack": 12145, + "melon": 12146, + "onent": 12147, + "rush": 12148, + "dap": 12149, + "toler": 12150, + "propag": 12151, + "cze": 12152, + "translation": 12153, + "wallet": 12154, + "cottage": 12155, + "sail": 12156, + "constitution": 12157, + "ðŁĴĢ": 12158, + "munici": 12159, + "favor": 12160, + "stormhour": 12161, + "ih": 12162, + "ðŁĺĮ": 12163, + "approaching": 12164, + "pinned": 12165, + "jed": 12166, + "nigerian": 12167, + "nach": 12168, + "shat": 12169, + "particularly": 12170, + "mcdon": 12171, + "cameras": 12172, + "annie": 12173, + "administr": 12174, + "heat": 12175, + "electrical": 12176, + "charming": 12177, + "gibson": 12178, + "boutique": 12179, + "exposed": 12180, + "actor": 12181, + "pillow": 12182, + "beaches": 12183, + "genuine": 12184, + "margaret": 12185, + "bennett": 12186, + "louisi": 12187, + "positions": 12188, + "ely": 12189, + "shiny": 12190, + "tention": 12191, + "architect": 12192, + "rental": 12193, + "acqui": 12194, + "google": 12195, + "subway": 12196, + "moment": 12197, + "ðŁļ¨": 12198, + "rim": 12199, + "methods": 12200, + "cycli": 12201, + "norfolk": 12202, + "ÙĪ": 12203, + "overwhel": 12204, + "rapid": 12205, + "wear": 12206, + "happybirthday": 12207, + "progressive": 12208, + "ðŁĴ¥": 12209, + "cogn": 12210, + "papa": 12211, + "fool": 12212, + "philosophy": 12213, + "polar": 12214, + "jimmy": 12215, + "wig": 12216, + "ðŁĴĭ": 12217, + "operating": 12218, + "reduction": 12219, + "phi": 12220, + "flags": 12221, + "tothe": 12222, + "odi": 12223, + "ares": 12224, + "koo": 12225, + "kang": 12226, + "arkansas": 12227, + "ashton": 12228, + "wimbledon": 12229, + "scifi": 12230, + "attractive": 12231, + "mississippi": 12232, + "logists": 12233, + "ralph": 12234, + "label": 12235, + "graduates": 12236, + "maha": 12237, + "hometown": 12238, + "âľĮï¸ı": 12239, + "founded": 12240, + "onthe": 12241, + "liz": 12242, + "transl": 12243, + "minimum": 12244, + "presti": 12245, + "tam": 12246, + "generations": 12247, + "rebel": 12248, + "journalists": 12249, + "param": 12250, + "mcm": 12251, + "acrylic": 12252, + "deaths": 12253, + "tesla": 12254, + "wt": 12255, + "bryant": 12256, + "jerus": 12257, + "istanbul": 12258, + "muhammad": 12259, + "riley": 12260, + "kris": 12261, + "workshops": 12262, + "iso": 12263, + "counts": 12264, + "stret": 12265, + "protected": 12266, + "trinity": 12267, + "manual": 12268, + "rhin": 12269, + "ril": 12270, + "pleasant": 12271, + "lemon": 12272, + "nerd": 12273, + "harder": 12274, + "darren": 12275, + "bury": 12276, + "rah": 12277, + "basis": 12278, + "migu": 12279, + "occasion": 12280, + "lists": 12281, + "âĿ¤ï¸ıâĿ¤ï¸ıâĿ¤ï¸ı": 12282, + "eb": 12283, + "decre": 12284, + "hampton": 12285, + "ìĿ´": 12286, + "travis": 12287, + "transform": 12288, + "puerto": 12289, + "nhl": 12290, + "avoc": 12291, + "trips": 12292, + "unexpected": 12293, + "vet": 12294, + "didyou": 12295, + "barber": 12296, + "stages": 12297, + "mson": 12298, + "represented": 12299, + "fort": 12300, + "lal": 12301, + "pple": 12302, + "nicely": 12303, + "ignore": 12304, + "quil": 12305, + "quinn": 12306, + "hk": 12307, + "carrier": 12308, + "reminded": 12309, + "among": 12310, + "passenger": 12311, + "ellen": 12312, + "guez": 12313, + "scape": 12314, + "mural": 12315, + "youngest": 12316, + "mash": 12317, + "dill": 12318, + "routine": 12319, + "stainless": 12320, + "jackson": 12321, + "gandhi": 12322, + "thal": 12323, + "oners": 12324, + "editorial": 12325, + "conversations": 12326, + "sdale": 12327, + "automation": 12328, + "ike": 12329, + "าà¸": 12330, + "ðŁĩª": 12331, + "haul": 12332, + "laying": 12333, + "mentions": 12334, + "amen": 12335, + "abortion": 12336, + "ibi": 12337, + "counties": 12338, + "catherine": 12339, + "mands": 12340, + "jame": 12341, + "roller": 12342, + "aut": 12343, + "nam": 12344, + "ological": 12345, + "ception": 12346, + "ranking": 12347, + "toxic": 12348, + "snacks": 12349, + "victorian": 12350, + "bangkok": 12351, + "psychology": 12352, + "reg": 12353, + "angela": 12354, + "respond": 12355, + "style": 12356, + "sophie": 12357, + "dakota": 12358, + "achieved": 12359, + "marked": 12360, + "imperial": 12361, + "inas": 12362, + "gloves": 12363, + "slim": 12364, + "confident": 12365, + "attacked": 12366, + "gger": 12367, + "lonely": 12368, + "valentinesday": 12369, + "reb": 12370, + "craftbeer": 12371, + "origin": 12372, + "zimbab": 12373, + "ceiling": 12374, + "teens": 12375, + "otherwise": 12376, + "wb": 12377, + "fers": 12378, + "daysof": 12379, + "advisor": 12380, + "yah": 12381, + "âĻª": 12382, + "ender": 12383, + "republicans": 12384, + "ava": 12385, + "skirt": 12386, + "pipel": 12387, + "chie": 12388, + "jane": 12389, + "jax": 12390, + "ðŁĺĭ": 12391, + "âľĬ": 12392, + "jays": 12393, + "brett": 12394, + "balo": 12395, + "crucial": 12396, + "dhar": 12397, + "asis": 12398, + "deau": 12399, + "lloyd": 12400, + "chatting": 12401, + "âĿĦï¸ı": 12402, + "relay": 12403, + "remarkable": 12404, + "ns": 12405, + "wet": 12406, + "brisbane": 12407, + "ðŁĶ´": 12408, + "tionally": 12409, + "fk": 12410, + "layer": 12411, + "household": 12412, + "consecutive": 12413, + "esis": 12414, + "pendant": 12415, + "stir": 12416, + "critic": 12417, + "sugar": 12418, + "photoshop": 12419, + "pares": 12420, + "artistic": 12421, + "dodgers": 12422, + "cun": 12423, + "crafted": 12424, + "amend": 12425, + "boat": 12426, + "âŃIJï¸ı": 12427, + "egyptian": 12428, + "saw": 12429, + "trage": 12430, + "smaller": 12431, + "oxy": 12432, + "paired": 12433, + "next": 12434, + "ires": 12435, + "taco": 12436, + "oy": 12437, + "uc": 12438, + "sti": 12439, + "aerial": 12440, + "://": 12441, + "dro": 12442, + "dotcom": 12443, + "ggins": 12444, + "rpg": 12445, + "aye": 12446, + "lean": 12447, + "striker": 12448, + "lobby": 12449, + "protests": 12450, + "priority": 12451, + "congress": 12452, + "amate": 12453, + "invit": 12454, + "rington": 12455, + "mommy": 12456, + "thus": 12457, + "allowing": 12458, + "pioneer": 12459, + "enforcement": 12460, + "gori": 12461, + "talk": 12462, + "drag": 12463, + "dumb": 12464, + "bullet": 12465, + "sange": 12466, + "ery": 12467, + "targets": 12468, + "ðŁĩ¦": 12469, + "heather": 12470, + "consider": 12471, + "seafood": 12472, + "vest": 12473, + "risks": 12474, + "%.": 12475, + "pg": 12476, + "sacred": 12477, + "heating": 12478, + "kicked": 12479, + "ttot": 12480, + ".-": 12481, + "chandi": 12482, + "coven": 12483, + "pool": 12484, + "pulse": 12485, + "ia": 12486, + "roster": 12487, + "shakespeare": 12488, + "esa": 12489, + "cargo": 12490, + "peanut": 12491, + "troop": 12492, + "action": 12493, + "tablet": 12494, + "homework": 12495, + "castle": 12496, + "struction": 12497, + "musicians": 12498, + "freezing": 12499, + "butt": 12500, + "justinbieber": 12501, + "jj": 12502, + "bahrain": 12503, + "anthem": 12504, + "audit": 12505, + "didyouknow": 12506, + "navig": 12507, + "guidance": 12508, + "âĸ¶": 12509, + "turf": 12510, + "nun": 12511, + "fications": 12512, + "yemen": 12513, + "charging": 12514, + "xc": 12515, + "broncos": 12516, + "subur": 12517, + "pale": 12518, + "boring": 12519, + "amongst": 12520, + "forthe": 12521, + "emper": 12522, + "omfg": 12523, + "pj": 12524, + "expecting": 12525, + "ðŁĴ«": 12526, + "stl": 12527, + "admin": 12528, + "expectations": 12529, + "swan": 12530, + "shoot": 12531, + "ooooo": 12532, + "minent": 12533, + "ãĢIJ": 12534, + "wallace": 12535, + "stang": 12536, + "saturday": 12537, + "adopted": 12538, + "doubles": 12539, + "homie": 12540, + "omez": 12541, + "dhan": 12542, + "venture": 12543, + "surrounding": 12544, + "file": 12545, + "mobility": 12546, + "dees": 12547, + "wski": 12548, + "brooke": 12549, + "embro": 12550, + "remembers": 12551, + "kara": 12552, + "testim": 12553, + "botan": 12554, + "mtv": 12555, + "sacrifice": 12556, + "jerusalem": 12557, + "dl": 12558, + "´": 12559, + "properly": 12560, + "ilion": 12561, + "asi": 12562, + "legit": 12563, + "cope": 12564, + "mcla": 12565, + "recycling": 12566, + "larger": 12567, + "ðŁĴĵ": 12568, + "patric": 12569, + "generous": 12570, + "jared": 12571, + "pf": 12572, + "molly": 12573, + "thomas": 12574, + "judges": 12575, + "hb": 12576, + "sorts": 12577, + "blvd": 12578, + "oven": 12579, + "entering": 12580, + "planes": 12581, + "beet": 12582, + "integration": 12583, + "booked": 12584, + "freed": 12585, + "vern": 12586, + "ashes": 12587, + "topped": 12588, + "depot": 12589, + "welcomed": 12590, + "rena": 12591, + "mick": 12592, + "dand": 12593, + "seeks": 12594, + "gamer": 12595, + "rankings": 12596, + "rene": 12597, + "mut": 12598, + "whisky": 12599, + "firefighters": 12600, + "gues": 12601, + "gather": 12602, + "tourney": 12603, + "demen": 12604, + "yang": 12605, + "newton": 12606, + "automotive": 12607, + "backyard": 12608, + "detailed": 12609, + "mist": 12610, + "tobac": 12611, + "fiber": 12612, + "unusual": 12613, + "gratitude": 12614, + "spare": 12615, + "neys": 12616, + ":*": 12617, + "peri": 12618, + "floating": 12619, + "finalist": 12620, + "donating": 12621, + "dress": 12622, + "broad": 12623, + "bethe": 12624, + "economics": 12625, + "taiwan": 12626, + "edwards": 12627, + "plug": 12628, + "prairi": 12629, + "valen": 12630, + "baba": 12631, + "fad": 12632, + "anas": 12633, + "harper": 12634, + "disorder": 12635, + "applied": 12636, + "patt": 12637, + "bikin": 12638, + "liver": 12639, + "curi": 12640, + "caroline": 12641, + "anner": 12642, + "julian": 12643, + "walking": 12644, + "malcol": 12645, + "screenshot": 12646, + "coding": 12647, + "skincare": 12648, + "activists": 12649, + "mysterious": 12650, + "exact": 12651, + "blocking": 12652, + "mercury": 12653, + "batter": 12654, + "dump": 12655, + "âľĮ": 12656, + "ense": 12657, + "lish": 12658, + "ridiculous": 12659, + "protesters": 12660, + "ðŁĻĪ": 12661, + "lust": 12662, + "sweat": 12663, + "ass": 12664, + "alike": 12665, + "cody": 12666, + "rements": 12667, + "winds": 12668, + "aspir": 12669, + "vienna": 12670, + "pray": 12671, + "...@": 12672, + "boi": 12673, + "candle": 12674, + "assists": 12675, + "tee": 12676, + "derson": 12677, + "pony": 12678, + "fence": 12679, + "conspir": 12680, + "âĺħâĺħ": 12681, + "ooth": 12682, + "epic": 12683, + "barely": 12684, + "aunt": 12685, + "bam": 12686, + "diamonds": 12687, + "endless": 12688, + "screens": 12689, + "cancer": 12690, + "gro": 12691, + "pst": 12692, + "prospec": 12693, + "mosque": 12694, + "helpful": 12695, + "ouri": 12696, + "brother": 12697, + "gujar": 12698, + "cristi": 12699, + "inez": 12700, + "towers": 12701, + "addresses": 12702, + "gray": 12703, + "burton": 12704, + "retweeted": 12705, + "ð٤Ķ": 12706, + "nity": 12707, + "duck": 12708, + "supervis": 12709, + "joan": 12710, + "kinder": 12711, + "sanctu": 12712, + "pied": 12713, + "âı°": 12714, + "łï¸ı": 12715, + "mati": 12716, + "revenge": 12717, + "cester": 12718, + "elife": 12719, + "designers": 12720, + "backed": 12721, + "boli": 12722, + "weight": 12723, + "couch": 12724, + "sures": 12725, + "sits": 12726, + "shrimp": 12727, + "lagos": 12728, + "authorities": 12729, + "osity": 12730, + "holly": 12731, + "computing": 12732, + "factors": 12733, + "abe": 12734, + "panels": 12735, + "ramad": 12736, + "sentence": 12737, + "mission": 12738, + "holm": 12739, + "rb": 12740, + "dads": 12741, + "shanghai": 12742, + "money": 12743, + "sheets": 12744, + "skate": 12745, + "threw": 12746, + "cupcakes": 12747, + "infinite": 12748, + "lis": 12749, + "practicing": 12750, + "essay": 12751, + "kai": 12752, + "asci": 12753, + "mob": 12754, + "ugh": 12755, + "holmes": 12756, + "regg": 12757, + "ikh": 12758, + "mock": 12759, + "collections": 12760, + "pep": 12761, + "ova": 12762, + "salt": 12763, + "nandez": 12764, + "coy": 12765, + "threats": 12766, + "texts": 12767, + "cinnam": 12768, + "pregnancy": 12769, + "pending": 12770, + "stamp": 12771, + "flower": 12772, + "gis": 12773, + "agreed": 12774, + "payne": 12775, + "rover": 12776, + "phra": 12777, + "soft": 12778, + "ffin": 12779, + "fathers": 12780, + "passengers": 12781, + "aways": 12782, + "ala": 12783, + "hes": 12784, + "livan": 12785, + "ins": 12786, + "samuel": 12787, + "ingui": 12788, + "hof": 12789, + "jj": 12790, + "chennai": 12791, + "catal": 12792, + "omic": 12793, + "heath": 12794, + "niece": 12795, + "pumped": 12796, + "integrated": 12797, + "arel": 12798, + "nom": 12799, + "productivity": 12800, + "wanting": 12801, + "visa": 12802, + "diana": 12803, + "twil": 12804, + "itv": 12805, + "camps": 12806, + "rowing": 12807, + "dley": 12808, + "blackand": 12809, + "guards": 12810, + "bells": 12811, + "reverse": 12812, + "vibe": 12813, + "ricky": 12814, + "moss": 12815, + "nyt": 12816, + "âĺĢï¸ı": 12817, + "elle": 12818, + "troy": 12819, + "cudd": 12820, + "evan": 12821, + "womens": 12822, + "foto": 12823, + "mistakes": 12824, + "wicked": 12825, + "mil": 12826, + "cled": 12827, + "memes": 12828, + "cosmo": 12829, + "scholar": 12830, + "reno": 12831, + "ðŁĺĢ": 12832, + "vents": 12833, + "#â̦": 12834, + "terrorists": 12835, + "casey": 12836, + "cardinals": 12837, + "ðŁĺĬðŁĺĬ": 12838, + "venezuela": 12839, + "bola": 12840, + "literacy": 12841, + "tw": 12842, + "eno": 12843, + "contains": 12844, + "austin": 12845, + "financi": 12846, + "evan": 12847, + "harvard": 12848, + "originally": 12849, + "chevro": 12850, + "herald": 12851, + "nottingham": 12852, + "managers": 12853, + "âŀ¡": 12854, + "accepting": 12855, + "walsh": 12856, + "tutorial": 12857, + "entrepreneurship": 12858, + "yacht": 12859, + "requirements": 12860, + "glenn": 12861, + "pede": 12862, + "unfortunately": 12863, + "aching": 12864, + "daisy": 12865, + "gian": 12866, + "nightmare": 12867, + "âĿĹ": 12868, + "rina": 12869, + "bart": 12870, + "emails": 12871, + "opposite": 12872, + "whom": 12873, + "sake": 12874, + "puzzle": 12875, + "dashi": 12876, + "party": 12877, + "blanket": 12878, + "buses": 12879, + "lore": 12880, + "beauty": 12881, + "reason": 12882, + "punjab": 12883, + "windsor": 12884, + "functional": 12885, + "existing": 12886, + "hello": 12887, + "glimp": 12888, + "convin": 12889, + "lak": 12890, + "screaming": 12891, + "rebecca": 12892, + "bliss": 12893, + "northwest": 12894, + "infinity": 12895, + "cosmetics": 12896, + "pulling": 12897, + "coffee": 12898, + "pling": 12899, + "opho": 12900, + "colombia": 12901, + "interiordesign": 12902, + "(+": 12903, + "emotions": 12904, + "sac": 12905, + "sunglasses": 12906, + "saves": 12907, + "df": 12908, + "sixth": 12909, + "aly": 12910, + "ðŁĺ»": 12911, + "deen": 12912, + "devast": 12913, + "politicians": 12914, + "lacrosse": 12915, + "gu": 12916, + "pei": 12917, + "java": 12918, + "combine": 12919, + "coalition": 12920, + "erts": 12921, + "surviv": 12922, + "chad": 12923, + "strian": 12924, + "nn": 12925, + "devi": 12926, + "counc": 12927, + "concern": 12928, + "controller": 12929, + "breast": 12930, + "jury": 12931, + "tum": 12932, + "introduces": 12933, + "ladi": 12934, + "mobile": 12935, + "alz": 12936, + "steady": 12937, + "nurses": 12938, + "hacking": 12939, + "online": 12940, + "ocean": 12941, + "ðŁİĦ": 12942, + "aam": 12943, + "juven": 12944, + "icc": 12945, + "louisiana": 12946, + "arte": 12947, + "streetart": 12948, + "ison": 12949, + "wns": 12950, + "frm": 12951, + "panda": 12952, + "noir": 12953, + "maintain": 12954, + "delay": 12955, + "symptoms": 12956, + "thorn": 12957, + "geome": 12958, + "tern": 12959, + "carried": 12960, + "pru": 12961, + "panor": 12962, + "assy": 12963, + "peru": 12964, + "cloud": 12965, + "spra": 12966, + "pedi": 12967, + "este": 12968, + "tagged": 12969, + "ðŁĺĿ": 12970, + "shadows": 12971, + "nazi": 12972, + "اÙĦ": 12973, + "corri": 12974, + "âĻ¥âĻ¥": 12975, + "jad": 12976, + "ðŁĩ«": 12977, + "formal": 12978, + "spoken": 12979, + "ðŁĮŀ": 12980, + "enjoy": 12981, + "lopez": 12982, + "outlook": 12983, + "inho": 12984, + "wander": 12985, + "Ùħ": 12986, + "maya": 12987, + "pee": 12988, + "dine": 12989, + "ãĢij": 12990, + "briefing": 12991, + "supporter": 12992, + "arily": 12993, + "ghters": 12994, + "naturally": 12995, + "doctorwho": 12996, + "jen": 12997, + "var": 12998, + "newyear": 12999, + "rese": 13000, + "simm": 13001, + "rex": 13002, + "consequ": 13003, + "tomatoes": 13004, + "burst": 13005, + "bravo": 13006, + "burgers": 13007, + "cracking": 13008, + "northeast": 13009, + "biom": 13010, + "mushroom": 13011, + "marque": 13012, + "double": 13013, + "nier": 13014, + "vag": 13015, + "twenty": 13016, + "keyboard": 13017, + "winni": 13018, + "jamaica": 13019, + "parish": 13020, + ":-": 13021, + "mentalhealth": 13022, + "alizing": 13023, + "render": 13024, + "waking": 13025, + "ðŁİĤ": 13026, + "gly": 13027, + "nathan": 13028, + "washing": 13029, + "melissa": 13030, + "jung": 13031, + "loyal": 13032, + "chili": 13033, + "songwriter": 13034, + "guitarist": 13035, + "bowie": 13036, + "neighbors": 13037, + "onymous": 13038, + "asset": 13039, + "tai": 13040, + "headquarters": 13041, + "ðŁĮĪ": 13042, + "ihear": 13043, + "cigare": 13044, + "surg": 13045, + ")\"": 13046, + "repl": 13047, + "darling": 13048, + "ðŁĻĦ": 13049, + "zak": 13050, + "sare": 13051, + "ãħĭ": 13052, + "mickey": 13053, + "warehouse": 13054, + "massage": 13055, + "inees": 13056, + "didnt": 13057, + "iw": 13058, + "hurts": 13059, + "engaging": 13060, + "magic": 13061, + "womenin": 13062, + "kitten": 13063, + "mors": 13064, + "cart": 13065, + "titans": 13066, + "colleague": 13067, + "competing": 13068, + "eran": 13069, + "khal": 13070, + "marble": 13071, + "demand": 13072, + "delight": 13073, + "etary": 13074, + "blizz": 13075, + "louise": 13076, + "mls": 13077, + "finishes": 13078, + "experiment": 13079, + "conducted": 13080, + "electronics": 13081, + "itters": 13082, + "caring": 13083, + "whats": 13084, + "symbol": 13085, + "jung": 13086, + "ecu": 13087, + "pix": 13088, + "context": 13089, + "charger": 13090, + "ðŁĺĩ": 13091, + "reig": 13092, + "frag": 13093, + "ëĭ": 13094, + "chad": 13095, + "true": 13096, + "kerry": 13097, + "defending": 13098, + "aint": 13099, + "auton": 13100, + "checkout": 13101, + "barnes": 13102, + "lessly": 13103, + "dt": 13104, + "mme": 13105, + "cloudy": 13106, + "secondary": 13107, + "arez": 13108, + "_:": 13109, + "appa": 13110, + "constant": 13111, + "\")": 13112, + "vets": 13113, + "job": 13114, + "ient": 13115, + "ðŁĺŃðŁĺŃðŁĺŃ": 13116, + "mj": 13117, + "french": 13118, + "diver": 13119, + "davies": 13120, + "hhhh": 13121, + "ebook": 13122, + "à¹ī": 13123, + "mariti": 13124, + "breeze": 13125, + "suspended": 13126, + "mato": 13127, + "viet": 13128, + "rahu": 13129, + "sei": 13130, + "bolt": 13131, + "enary": 13132, + "leis": 13133, + "karl": 13134, + "framed": 13135, + "explaining": 13136, + "abc": 13137, + "dealing": 13138, + "nato": 13139, + "jake": 13140, + "expand": 13141, + "leonard": 13142, + "established": 13143, + "dub": 13144, + "armen": 13145, + "elled": 13146, + "vocal": 13147, + "nicholas": 13148, + "orient": 13149, + "kyo": 13150, + "illustrated": 13151, + "ahh": 13152, + "dancers": 13153, + "million": 13154, + "geta": 13155, + "popp": 13156, + "asu": 13157, + "murdered": 13158, + "gible": 13159, + "stoked": 13160, + "griffin": 13161, + "maximum": 13162, + "adrian": 13163, + "encounter": 13164, + "thero": 13165, + "davidson": 13166, + "ðŁį»": 13167, + "holiday": 13168, + "evo": 13169, + "assets": 13170, + "carson": 13171, + "memorable": 13172, + "âļ½": 13173, + "obam": 13174, + "representative": 13175, + "cbd": 13176, + "tricks": 13177, + "vogue": 13178, + "voice": 13179, + "mmmm": 13180, + "sebastian": 13181, + "clif": 13182, + "athy": 13183, + "paralle": 13184, + "ðŁ¤·": 13185, + "pak": 13186, + "evacu": 13187, + "eats": 13188, + "اØ": 13189, + "touched": 13190, + "organised": 13191, + "spirits": 13192, + "canad": 13193, + "guided": 13194, + "framework": 13195, + "ðŁĮŁ": 13196, + "ped": 13197, + "natural": 13198, + "agar": 13199, + "replaced": 13200, + "anchor": 13201, + "tit": 13202, + "shah": 13203, + "organis": 13204, + "superior": 13205, + "rn": 13206, + "chro": 13207, + "erica": 13208, + "still": 13209, + "coron": 13210, + "chuck": 13211, + "locks": 13212, + "organ": 13213, + "rosen": 13214, + "scam": 13215, + "bened": 13216, + "/#": 13217, + "keen": 13218, + "trevor": 13219, + "vampire": 13220, + "sorted": 13221, + "!'": 13222, + "afford": 13223, + "intro": 13224, + "grace": 13225, + "ðŁĺľ": 13226, + "saur": 13227, + "kickstarter": 13228, + "influen": 13229, + "vu": 13230, + "yup": 13231, + "poc": 13232, + "ðŁİ¥": 13233, + "aar": 13234, + "sang": 13235, + "trek": 13236, + "etsy": 13237, + "tbh": 13238, + "scream": 13239, + "chevrolet": 13240, + "pixel": 13241, + "shepherd": 13242, + "anor": 13243, + "gabriel": 13244, + "twood": 13245, + "sdcc": 13246, + "meters": 13247, + "developers": 13248, + "closure": 13249, + "vw": 13250, + "twitch": 13251, + "ìĹ": 13252, + "seoul": 13253, + "price": 13254, + "hog": 13255, + "nish": 13256, + "hillary": 13257, + "scratch": 13258, + "incen": 13259, + "wagon": 13260, + "disability": 13261, + "panther": 13262, + "chats": 13263, + "gd": 13264, + "witz": 13265, + "sussex": 13266, + "late": 13267, + "denmark": 13268, + "gerald": 13269, + "cancelled": 13270, + "nette": 13271, + "ix": 13272, + "naval": 13273, + "baptist": 13274, + "tet": 13275, + "yad": 13276, + "math": 13277, + "hoy": 13278, + "randy": 13279, + "point": 13280, + "intellec": 13281, + "fruits": 13282, + "wool": 13283, + "guin": 13284, + "pron": 13285, + "theft": 13286, + "condem": 13287, + "marry": 13288, + "nola": 13289, + "architects": 13290, + "cincin": 13291, + "rockets": 13292, + "gentleman": 13293, + "explan": 13294, + "tate": 13295, + "doe": 13296, + "raises": 13297, + "wildlife": 13298, + "wl": 13299, + "insider": 13300, + "blanc": 13301, + "wp": 13302, + "forsale": 13303, + "nyc": 13304, + "powell": 13305, + "unbelievable": 13306, + "pens": 13307, + "goodies": 13308, + "mustang": 13309, + "pens": 13310, + "stays": 13311, + "squash": 13312, + "xoxo": 13313, + "nearby": 13314, + "everton": 13315, + "coco": 13316, + "leagu": 13317, + "khan": 13318, + "stud": 13319, + "southwest": 13320, + "construc": 13321, + "sworth": 13322, + "croatia": 13323, + "lea": 13324, + "sums": 13325, + "aims": 13326, + "ean": 13327, + "vaness": 13328, + "itious": 13329, + "pathy": 13330, + "arcade": 13331, + "bend": 13332, + "suggests": 13333, + "sacram": 13334, + "royals": 13335, + "rier": 13336, + "emir": 13337, + "incl": 13338, + "ank": 13339, + "clark": 13340, + "right": 13341, + "vacc": 13342, + "ा": 13343, + "tane": 13344, + "lib": 13345, + "usc": 13346, + "sales": 13347, + "huh": 13348, + "sally": 13349, + "vera": 13350, + "pga": 13351, + "grows": 13352, + "drum": 13353, + "tree": 13354, + "ethics": 13355, + "suggest": 13356, + "isab": 13357, + "sealed": 13358, + "previously": 13359, + "animated": 13360, + "abdu": 13361, + "rises": 13362, + "glob": 13363, + "predat": 13364, + "scarf": 13365, + "delic": 13366, + "omar": 13367, + "lli": 13368, + "sxsw": 13369, + "python": 13370, + "nebra": 13371, + "funk": 13372, + "reflect": 13373, + "pavilion": 13374, + "tically": 13375, + "chasing": 13376, + "bakery": 13377, + "invasion": 13378, + "koh": 13379, + "believed": 13380, + "cohen": 13381, + "conqu": 13382, + "crafts": 13383, + "nati": 13384, + "clever": 13385, + "governance": 13386, + "samples": 13387, + "fails": 13388, + "âĶ": 13389, + "timo": 13390, + "ritu": 13391, + "striking": 13392, + "inclusive": 13393, + "shocking": 13394, + "cant": 13395, + "requires": 13396, + "drawings": 13397, + "à¸Ń": 13398, + "purchased": 13399, + "dum": 13400, + "zach": 13401, + "warner": 13402, + "console": 13403, + "mansion": 13404, + "fountain": 13405, + "circum": 13406, + "esh": 13407, + "island": 13408, + "milk": 13409, + "profits": 13410, + "halifax": 13411, + "rival": 13412, + "âľĪï¸ı": 13413, + "jenny": 13414, + "sandra": 13415, + "nye": 13416, + "kelly": 13417, + "yal": 13418, + "quad": 13419, + "nos": 13420, + "instein": 13421, + "finalists": 13422, + "midfielder": 13423, + "cue": 13424, + "exceptional": 13425, + "aan": 13426, + "sapp": 13427, + "gettin": 13428, + "saa": 13429, + "fati": 13430, + "slice": 13431, + "volk": 13432, + "swal": 13433, + "lasting": 13434, + "summary": 13435, + "itas": 13436, + "smo": 13437, + "sz": 13438, + "âĺĨ": 13439, + "ipl": 13440, + "flames": 13441, + "enews": 13442, + "hav": 13443, + "hoodie": 13444, + "pitcher": 13445, + "windy": 13446, + "revol": 13447, + "central": 13448, + "tonite": 13449, + "ðŁİīðŁİī": 13450, + "solved": 13451, + "milwau": 13452, + "organizations": 13453, + "weets": 13454, + "refin": 13455, + "sth": 13456, + "ãĥ¼": 13457, + "elin": 13458, + "tona": 13459, + "cinnamon": 13460, + "ðŁİ¨": 13461, + "ðŁİģ": 13462, + "ronaldo": 13463, + "peninsu": 13464, + "omega": 13465, + "elds": 13466, + "designing": 13467, + "eigh": 13468, + "bluet": 13469, + "benz": 13470, + "nug": 13471, + "asha": 13472, + "robots": 13473, + "sudan": 13474, + "choosing": 13475, + "endo": 13476, + "serge": 13477, + "closely": 13478, + "handy": 13479, + "finger": 13480, + "being": 13481, + "arte": 13482, + "survived": 13483, + "flame": 13484, + "milestone": 13485, + "gut": 13486, + "dwar": 13487, + "futures": 13488, + "ée": 13489, + "elo": 13490, + "fridge": 13491, + "elic": 13492, + "ouch": 13493, + "ub": 13494, + "pv": 13495, + "titan": 13496, + "collar": 13497, + "station": 13498, + "nevada": 13499, + "aurora": 13500, + "rd": 13501, + "duncan": 13502, + "âģł": 13503, + "brien": 13504, + "marsh": 13505, + "о": 13506, + "total": 13507, + "chry": 13508, + "sers": 13509, + "suffe": 13510, + "rachel": 13511, + "college": 13512, + "todays": 13513, + "courts": 13514, + "chit": 13515, + "reunited": 13516, + "gymna": 13517, + "genesis": 13518, + "beside": 13519, + "representation": 13520, + "chant": 13521, + "collector": 13522, + "rak": 13523, + "athens": 13524, + "nigh": 13525, + "munich": 13526, + "languages": 13527, + "flu": 13528, + "participation": 13529, + "___": 13530, + "cv": 13531, + "spectrum": 13532, + "soda": 13533, + "cover": 13534, + "referen": 13535, + "abbo": 13536, + "apa": 13537, + "publication": 13538, + "edm": 13539, + "monica": 13540, + "army": 13541, + "ðŁļĢ": 13542, + "divor": 13543, + "dry": 13544, + "streams": 13545, + "robotics": 13546, + "cider": 13547, + "bullying": 13548, + "approval": 13549, + "stoke": 13550, + "platforms": 13551, + "sierra": 13552, + "extin": 13553, + "ib": 13554, + "hayes": 13555, + "succeed": 13556, + "suffer": 13557, + "atically": 13558, + "dai": 13559, + "lynch": 13560, + "hound": 13561, + "delines": 13562, + "acknow": 13563, + "dated": 13564, + "exclusively": 13565, + "heres": 13566, + "facilit": 13567, + "damaged": 13568, + "charter": 13569, + "lakers": 13570, + "falcon": 13571, + "unveiled": 13572, + "welove": 13573, + "ease": 13574, + "patience": 13575, + "lone": 13576, + "gentle": 13577, + "genetic": 13578, + "producing": 13579, + "gour": 13580, + "shannon": 13581, + "bilities": 13582, + "zimbabwe": 13583, + "pint": 13584, + "daughters": 13585, + "literary": 13586, + "belle": 13587, + "clam": 13588, + "surrounded": 13589, + "kany": 13590, + "neil": 13591, + "pirate": 13592, + "ranger": 13593, + "hbd": 13594, + "natalie": 13595, + "belong": 13596, + "olympi": 13597, + "embassy": 13598, + "scol": 13599, + "ener": 13600, + "akin": 13601, + "loren": 13602, + "bh": 13603, + ":/": 13604, + "diva": 13605, + "denim": 13606, + "hipp": 13607, + "ðŁĩµðŁĩ": 13608, + "arnold": 13609, + "?'": 13610, + "weren": 13611, + "empower": 13612, + "disabled": 13613, + "manor": 13614, + "raspberry": 13615, + "baf": 13616, + "awful": 13617, + "drummer": 13618, + "kardashi": 13619, + "nash": 13620, + "machinelearning": 13621, + "chu": 13622, + "rebels": 13623, + "timing": 13624, + "monroe": 13625, + "tongue": 13626, + "range": 13627, + "pupils": 13628, + "ress": 13629, + "amazon": 13630, + "bz": 13631, + "harley": 13632, + "palmer": 13633, + "balloon": 13634, + "sings": 13635, + "icec": 13636, + "jb": 13637, + "cers": 13638, + "gps": 13639, + "whist": 13640, + "rise": 13641, + "lt": 13642, + "oooo": 13643, + "cattle": 13644, + "shooter": 13645, + "vodka": 13646, + "ucl": 13647, + "mtg": 13648, + "lesli": 13649, + "jonas": 13650, + "dispo": 13651, + "atric": 13652, + "stein": 13653, + "vintage": 13654, + "firms": 13655, + "floyd": 13656, + "cowboy": 13657, + "soooo": 13658, + "isaac": 13659, + "warcraft": 13660, + "disneyland": 13661, + "beautiful": 13662, + "beam": 13663, + "franchise": 13664, + "bun": 13665, + "kag": 13666, + "anon": 13667, + "turbo": 13668, + "sweep": 13669, + "madein": 13670, + "karachi": 13671, + "detective": 13672, + "pennsylvania": 13673, + "controversi": 13674, + "vitamin": 13675, + "aside": 13676, + "chronic": 13677, + "describes": 13678, + "removal": 13679, + "hah": 13680, + "aper": 13681, + "tened": 13682, + "uto": 13683, + "badly": 13684, + "mirac": 13685, + "fry": 13686, + "yea": 13687, + "injec": 13688, + "thermal": 13689, + "compact": 13690, + "thor": 13691, + "teed": 13692, + "urgent": 13693, + "lite": 13694, + "gilli": 13695, + "sophom": 13696, + "ico": 13697, + "chem": 13698, + "pm": 13699, + "fork": 13700, + "freak": 13701, + "chak": 13702, + "recipient": 13703, + "iy": 13704, + "nik": 13705, + "modeling": 13706, + "cans": 13707, + "ðŁıĢ": 13708, + "delux": 13709, + "seam": 13710, + "survivors": 13711, + "radical": 13712, + "investigating": 13713, + "reliable": 13714, + "fm": 13715, + "turt": 13716, + "lighthouse": 13717, + "tool": 13718, + "gown": 13719, + "))": 13720, + "bots": 13721, + "autograph": 13722, + "aid": 13723, + "buffe": 13724, + "hmm": 13725, + "horrible": 13726, + "ssional": 13727, + "anni": 13728, + "à¹Ģ": 13729, + "kits": 13730, + "schi": 13731, + "eternal": 13732, + "huss": 13733, + "sensitive": 13734, + "ru": 13735, + "tastes": 13736, + "checks": 13737, + "imo": 13738, + "portion": 13739, + "skate": 13740, + "eden": 13741, + "halftime": 13742, + "fried": 13743, + "rihanna": 13744, + "tise": 13745, + "flick": 13746, + "cain": 13747, + "sgt": 13748, + "âľĶ": 13749, + "shau": 13750, + "stained": 13751, + "raffle": 13752, + "drove": 13753, + "salman": 13754, + "principles": 13755, + "sho": 13756, + "aru": 13757, + "jess": 13758, + "guine": 13759, + "garbage": 13760, + "myan": 13761, + "jelly": 13762, + "disru": 13763, + "zia": 13764, + "qld": 13765, + "entries": 13766, + "lav": 13767, + "flew": 13768, + "admit": 13769, + "objects": 13770, + "compare": 13771, + "nytimes": 13772, + "cannes": 13773, + "pn": 13774, + "suffol": 13775, + "roc": 13776, + "dana": 13777, + "egg": 13778, + "hist": 13779, + "counsel": 13780, + "'!": 13781, + "physi": 13782, + "imagination": 13783, + "adjust": 13784, + "explosion": 13785, + "plymouth": 13786, + "horror": 13787, + "elliott": 13788, + "bourne": 13789, + "dex": 13790, + "breed": 13791, + "audio": 13792, + "lobster": 13793, + "disappointed": 13794, + "nationwide": 13795, + "((": 13796, + "increases": 13797, + "australi": 13798, + "cedar": 13799, + "staring": 13800, + "racial": 13801, + "eis": 13802, + "gmt": 13803, + "visions": 13804, + "stayed": 13805, + "discussions": 13806, + "dean": 13807, + "curtis": 13808, + "maiden": 13809, + "stellar": 13810, + "happiest": 13811, + "hwy": 13812, + "preseason": 13813, + "carav": 13814, + "mondays": 13815, + "hospitals": 13816, + "glimpse": 13817, + "scholars": 13818, + "jai": 13819, + "terrace": 13820, + "anna": 13821, + "goose": 13822, + "graded": 13823, + "lotus": 13824, + "hung": 13825, + "grocery": 13826, + "stamps": 13827, + "emperor": 13828, + "scoop": 13829, + "inser": 13830, + "cas": 13831, + "existence": 13832, + "heal": 13833, + "falcons": 13834, + "marvel": 13835, + "reducing": 13836, + "terrific": 13837, + "magnetic": 13838, + "performs": 13839, + "barre": 13840, + "pus": 13841, + "treating": 13842, + "icon": 13843, + "wh": 13844, + "declared": 13845, + "trauma": 13846, + "dod": 13847, + "comedian": 13848, + "nikon": 13849, + "bugs": 13850, + "asm": 13851, + "montgom": 13852, + "ibiza": 13853, + "comprehensive": 13854, + "has": 13855, + "santi": 13856, + "fellowship": 13857, + "dash": 13858, + "psal": 13859, + "louisville": 13860, + "spy": 13861, + "fault": 13862, + "dthe": 13863, + "filed": 13864, + "vista": 13865, + "desc": 13866, + "fears": 13867, + "youtu": 13868, + "sps": 13869, + "esp": 13870, + "rig": 13871, + "crime": 13872, + "berger": 13873, + "wonderland": 13874, + "kent": 13875, + "informed": 13876, + "stevens": 13877, + "myth": 13878, + "aston": 13879, + "iri": 13880, + "visitor": 13881, + "atri": 13882, + "producers": 13883, + "alla": 13884, + "personally": 13885, + "separate": 13886, + "agencies": 13887, + "afri": 13888, + "ilan": 13889, + "spoke": 13890, + "nina": 13891, + "squad": 13892, + "dives": 13893, + "depend": 13894, + "liv": 13895, + "fierce": 13896, + "entertaining": 13897, + "chain": 13898, + "scat": 13899, + "borders": 13900, + "palette": 13901, + "spro": 13902, + "osis": 13903, + "derby": 13904, + "tobacco": 13905, + "zio": 13906, + "willie": 13907, + "juvent": 13908, + "zoom": 13909, + "holy": 13910, + "entirely": 13911, + "afe": 13912, + "martinez": 13913, + "beds": 13914, + "pea": 13915, + "bulldogs": 13916, + "ðŁĩªðŁĩ": 13917, + "ibm": 13918, + "neon": 13919, + "ethiopia": 13920, + "teammates": 13921, + "planting": 13922, + "twer": 13923, + "anytime": 13924, + "forbes": 13925, + "ón": 13926, + "runway": 13927, + "nervous": 13928, + "roger": 13929, + "pile": 13930, + "chanc": 13931, + "apocaly": 13932, + "uw": 13933, + "oi": 13934, + "drought": 13935, + "territory": 13936, + "brick": 13937, + "creatures": 13938, + "goin": 13939, + "waff": 13940, + "gren": 13941, + "southeast": 13942, + "jean": 13943, + "ambul": 13944, + "edited": 13945, + "strap": 13946, + "cv": 13947, + "aaron": 13948, + "ãĥ»ãĥ»": 13949, + "tsu": 13950, + "description": 13951, + "kindly": 13952, + "clutch": 13953, + "immer": 13954, + "enor": 13955, + "womensday": 13956, + "orange": 13957, + "rag": 13958, + "obvious": 13959, + "hyder": 13960, + "channels": 13961, + "mango": 13962, + "meyer": 13963, + "raining": 13964, + "getty": 13965, + "pilgri": 13966, + "coordinator": 13967, + "upload": 13968, + "nintendo": 13969, + "donuts": 13970, + "sanchez": 13971, + "apparel": 13972, + "jr": 13973, + "zzi": 13974, + ",@": 13975, + "jefferson": 13976, + "accessible": 13977, + "greatly": 13978, + "eid": 13979, + "initial": 13980, + "buddha": 13981, + "paris": 13982, + "mascot": 13983, + "â¬ĩï¸ı": 13984, + "schwar": 13985, + "siri": 13986, + "spinning": 13987, + "mortgage": 13988, + "echo": 13989, + "endange": 13990, + "gedly": 13991, + "chloe": 13992, + "enhance": 13993, + "karnat": 13994, + "kry": 13995, + "explores": 13996, + "ðŁĴģ": 13997, + "affair": 13998, + "icals": 13999, + "alla": 14000, + "dart": 14001, + "dolphins": 14002, + "differences": 14003, + "squirrel": 14004, + "augh": 14005, + "drones": 14006, + "ellen": 14007, + "restore": 14008, + "paw": 14009, + "unfor": 14010, + "pike": 14011, + "hilton": 14012, + "collab": 14013, + "consumers": 14014, + "coinci": 14015, + "outcomes": 14016, + "ppp": 14017, + "aq": 14018, + "coupon": 14019, + "liest": 14020, + "sims": 14021, + "kho": 14022, + "aves": 14023, + "spoon": 14024, + "pudding": 14025, + "corbyn": 14026, + "haters": 14027, + "exams": 14028, + "slave": 14029, + ".!": 14030, + "psa": 14031, + "apples": 14032, + "tamil": 14033, + "sed": 14034, + "coke": 14035, + "zzo": 14036, + "losange": 14037, + "carbon": 14038, + "clair": 14039, + "...)": 14040, + "khu": 14041, + "craig": 14042, + "exploration": 14043, + "sanctuary": 14044, + "sue": 14045, + "alway": 14046, + "dementia": 14047, + "wonders": 14048, + "superhero": 14049, + "pakistani": 14050, + "browns": 14051, + "bluetooth": 14052, + "locker": 14053, + "marc": 14054, + "eventu": 14055, + "deluxe": 14056, + "rodriguez": 14057, + "âĿ¤âĿ¤": 14058, + "robb": 14059, + "ðŁĴ¦": 14060, + "linux": 14061, + "tens": 14062, + "intelligent": 14063, + "seed": 14064, + "voter": 14065, + "sler": 14066, + "peaks": 14067, + "intern": 14068, + "teenage": 14069, + "peninsula": 14070, + "handling": 14071, + "tie": 14072, + "cousins": 14073, + "wendy": 14074, + "mee": 14075, + "à¹Ģà¸": 14076, + "dino": 14077, + "ðŁĴ°": 14078, + "ðŁĺĥ": 14079, + "zee": 14080, + "sbury": 14081, + "tragedy": 14082, + "bk": 14083, + "bore": 14084, + "zin": 14085, + "warns": 14086, + "idiot": 14087, + "touching": 14088, + "continental": 14089, + "tacos": 14090, + "safari": 14091, + "washed": 14092, + "podium": 14093, + "morrison": 14094, + "forests": 14095, + "cbc": 14096, + "alon": 14097, + "particular": 14098, + "beads": 14099, + "invented": 14100, + "loch": 14101, + "lighter": 14102, + "wherever": 14103, + "ide": 14104, + "documents": 14105, + "awe": 14106, + "kr": 14107, + "nowhere": 14108, + "miner": 14109, + "stit": 14110, + "rox": 14111, + "contribute": 14112, + "hardy": 14113, + "clan": 14114, + "object": 14115, + "cait": 14116, + "ðŁĴķðŁĴķ": 14117, + "happier": 14118, + "vegetables": 14119, + "tart": 14120, + "gag": 14121, + "nominee": 14122, + "heavily": 14123, + "panic": 14124, + "jd": 14125, + "theresa": 14126, + "atm": 14127, + "uph": 14128, + "sfc": 14129, + "suri": 14130, + "drink": 14131, + "nal": 14132, + "revel": 14133, + "kl": 14134, + "avocado": 14135, + "nomination": 14136, + "madonna": 14137, + "sharon": 14138, + "malcolm": 14139, + "controlled": 14140, + "shers": 14141, + "revival": 14142, + "legislation": 14143, + "shoots": 14144, + "nin": 14145, + "commentary": 14146, + "pros": 14147, + "humanrights": 14148, + "stranger": 14149, + "mitch": 14150, + "pipeline": 14151, + "legally": 14152, + "thu": 14153, + "gilbert": 14154, + "toll": 14155, + "granted": 14156, + "ghs": 14157, + "iranian": 14158, + "refreshing": 14159, + "duk": 14160, + "abi": 14161, + "prime": 14162, + "joseph": 14163, + "mosa": 14164, + "statistics": 14165, + "productions": 14166, + "merry": 14167, + "patel": 14168, + "sax": 14169, + "humanitarian": 14170, + "structures": 14171, + "emissions": 14172, + "towns": 14173, + "freel": 14174, + "stering": 14175, + "ratings": 14176, + "allegedly": 14177, + "cabin": 14178, + "stl": 14179, + "wade": 14180, + "flyers": 14181, + "trim": 14182, + "promising": 14183, + "zu": 14184, + "ballot": 14185, + "comparison": 14186, + "freeze": 14187, + "outer": 14188, + "greatness": 14189, + "assign": 14190, + "snowy": 14191, + "rale": 14192, + "tories": 14193, + "mediter": 14194, + "knock": 14195, + "consultant": 14196, + "cincinnati": 14197, + "analyst": 14198, + "scoo": 14199, + "jews": 14200, + "approxim": 14201, + "pure": 14202, + "portraits": 14203, + "cyrus": 14204, + "ational": 14205, + "loans": 14206, + "acquis": 14207, + "elu": 14208, + "acceptable": 14209, + "union": 14210, + "watercolor": 14211, + "rust": 14212, + "battles": 14213, + "perfu": 14214, + "seasonal": 14215, + "serial": 14216, + "mindset": 14217, + "riot": 14218, + "feld": 14219, + "ennial": 14220, + "closet": 14221, + "priest": 14222, + "tanks": 14223, + "intl": 14224, + "screw": 14225, + "bum": 14226, + "abdul": 14227, + "oux": 14228, + "explained": 14229, + "rica": 14230, + "imaging": 14231, + "lawyers": 14232, + "buried": 14233, + "ãĥ»ãĥ»ãĥ»": 14234, + "earl": 14235, + "âĢķ": 14236, + "lton": 14237, + "restored": 14238, + "stripes": 14239, + "foss": 14240, + "demands": 14241, + "stealing": 14242, + "alexis": 14243, + "mund": 14244, + "aker": 14245, + "urus": 14246, + "wardro": 14247, + "hugs": 14248, + "genre": 14249, + "ego": 14250, + "ÙĦ": 14251, + "participated": 14252, + "babes": 14253, + "banquet": 14254, + "tious": 14255, + "hemi": 14256, + "dsb": 14257, + "lost": 14258, + "milwaukee": 14259, + "jenner": 14260, + "gem": 14261, + "outra": 14262, + "loses": 14263, + "idi": 14264, + "reps": 14265, + "ðŁİ§": 14266, + "regulation": 14267, + "flaw": 14268, + "fang": 14269, + "vibrant": 14270, + "ramp": 14271, + "rains": 14272, + "wellbeing": 14273, + "soviet": 14274, + "viewers": 14275, + "depo": 14276, + "libraries": 14277, + "bigo": 14278, + "sery": 14279, + "gill": 14280, + "destruction": 14281, + "coz": 14282, + "cx": 14283, + "bridal": 14284, + "alds": 14285, + "planted": 14286, + "amateur": 14287, + "lud": 14288, + "cheering": 14289, + "showcas": 14290, + "profile": 14291, + "iu": 14292, + "vertical": 14293, + "packers": 14294, + "wizard": 14295, + "skip": 14296, + "slight": 14297, + "beau": 14298, + "airways": 14299, + "much": 14300, + "rera": 14301, + "ðŁĮĬ": 14302, + "absor": 14303, + "patio": 14304, + "packages": 14305, + "sells": 14306, + "mentally": 14307, + "ðŁĺ¢": 14308, + "reynolds": 14309, + "kare": 14310, + "tribun": 14311, + "walt": 14312, + "knit": 14313, + "taste": 14314, + "surrey": 14315, + "bounce": 14316, + "creature": 14317, + "bare": 14318, + "betting": 14319, + "sure": 14320, + "miley": 14321, + "laughs": 14322, + "alore": 14323, + "cyn": 14324, + "tl": 14325, + "artist": 14326, + "annah": 14327, + "warmer": 14328, + "dynamics": 14329, + "lunchtime": 14330, + "maritime": 14331, + "vulnerable": 14332, + "ðŁĴĥ": 14333, + "wolver": 14334, + "durham": 14335, + "constantly": 14336, + "amin": 14337, + "sibl": 14338, + ":@": 14339, + "bullet": 14340, + "kach": 14341, + "angelo": 14342, + "wilder": 14343, + "doom": 14344, + "desktop": 14345, + "lawsuit": 14346, + "kca": 14347, + "henderson": 14348, + "inviting": 14349, + "betty": 14350, + "tawards": 14351, + "rafa": 14352, + "leaked": 14353, + "andi": 14354, + "gems": 14355, + "afl": 14356, + "velo": 14357, + "mediterran": 14358, + "probe": 14359, + "totten": 14360, + "stephanie": 14361, + "snation": 14362, + "combe": 14363, + "qs": 14364, + "overcome": 14365, + "assassin": 14366, + "rav": 14367, + "filip": 14368, + "winnipeg": 14369, + "shil": 14370, + "determined": 14371, + "kas": 14372, + "outre": 14373, + "regret": 14374, + "guides": 14375, + "aaa": 14376, + "ðŁĺĪ": 14377, + "wives": 14378, + "manife": 14379, + "erly": 14380, + "smy": 14381, + "shima": 14382, + "xing": 14383, + "pixel": 14384, + "jacob": 14385, + "accommod": 14386, + "toy": 14387, + "ono": 14388, + "poo": 14389, + "tier": 14390, + "answe": 14391, + "ðŁĴģ": 14392, + "rosa": 14393, + "lease": 14394, + "belongs": 14395, + "thar": 14396, + "eventually": 14397, + "neither": 14398, + "goa": 14399, + "skiing": 14400, + "atra": 14401, + "agh": 14402, + "broadcasting": 14403, + "fury": 14404, + "pyram": 14405, + "dice": 14406, + "volkswag": 14407, + "womens": 14408, + "provider": 14409, + "bombs": 14410, + "missile": 14411, + "whip": 14412, + "dick": 14413, + "norwe": 14414, + "backup": 14415, + "elder": 14416, + "mature": 14417, + "concerts": 14418, + "gious": 14419, + "squee": 14420, + "goodmorning": 14421, + "braves": 14422, + "^_": 14423, + "aussie": 14424, + "luna": 14425, + "males": 14426, + "heck": 14427, + "fortn": 14428, + "romeo": 14429, + "steelers": 14430, + "pn": 14431, + "peer": 14432, + "represents": 14433, + "«": 14434, + "katy": 14435, + "miguel": 14436, + "require": 14437, + "chains": 14438, + "lur": 14439, + "immediate": 14440, + "timber": 14441, + "âĸ¶ï¸ı": 14442, + "advocacy": 14443, + "export": 14444, + "anz": 14445, + "tiffany": 14446, + "author": 14447, + "ðŁİĪ": 14448, + "dudes": 14449, + "chilly": 14450, + "hid": 14451, + "harm": 14452, + "bug": 14453, + "monster": 14454, + "terrier": 14455, + "tuc": 14456, + "storytelling": 14457, + "tak": 14458, + "inti": 14459, + "immigrants": 14460, + "bis": 14461, + "reaches": 14462, + "compassion": 14463, + "johnny": 14464, + "contributions": 14465, + "ðŁIJ¶": 14466, + "mechanical": 14467, + "impression": 14468, + "ranks": 14469, + "kobe": 14470, + "menting": 14471, + "blossom": 14472, + "pablo": 14473, + "builder": 14474, + "bombing": 14475, + "twel": 14476, + "sullivan": 14477, + "omo": 14478, + "pete": 14479, + "demi": 14480, + "kudos": 14481, + "wbb": 14482, + "tgif": 14483, + "massach": 14484, + "neighbor": 14485, + "chefs": 14486, + "engines": 14487, + "pune": 14488, + "gained": 14489, + "phantom": 14490, + "sdays": 14491, + "extend": 14492, + "gran": 14493, + "centers": 14494, + "jacqu": 14495, + "datasci": 14496, + "sleepy": 14497, + "elvis": 14498, + "answered": 14499, + "slot": 14500, + "cony": 14501, + "flexible": 14502, + "tially": 14503, + "letics": 14504, + "%,": 14505, + "andrews": 14506, + "sible": 14507, + "momma": 14508, + "vino": 14509, + "dox": 14510, + "invitational": 14511, + "twilight": 14512, + "jade": 14513, + "illery": 14514, + "johns": 14515, + "fou": 14516, + "pv": 14517, + "--->": 14518, + "breakdown": 14519, + "billion": 14520, + "printer": 14521, + "mond": 14522, + "cbc": 14523, + "maggie": 14524, + "legion": 14525, + "dub": 14526, + "kurt": 14527, + "poor": 14528, + "parenting": 14529, + "regions": 14530, + "bikini": 14531, + "beware": 14532, + "sional": 14533, + "auburn": 14534, + "kidding": 14535, + "amples": 14536, + "span": 14537, + "contempor": 14538, + "cic": 14539, + "habits": 14540, + "ako": 14541, + "prefe": 14542, + "buddies": 14543, + "itz": 14544, + "emily": 14545, + "personnel": 14546, + "mountain": 14547, + "versus": 14548, + "ðŁĺ¬": 14549, + "earning": 14550, + "sink": 14551, + "dari": 14552, + "uu": 14553, + "swin": 14554, + "ister": 14555, + "brutal": 14556, + "nac": 14557, + "kata": 14558, + "cloth": 14559, + "amand": 14560, + "ðŁĶĹ": 14561, + "neo": 14562, + "alumin": 14563, + "weekends": 14564, + "nebraska": 14565, + "codes": 14566, + "delayed": 14567, + "bruno": 14568, + "proven": 14569, + "inc": 14570, + "ight": 14571, + "flan": 14572, + "oro": 14573, + "lambert": 14574, + "regulat": 14575, + "wf": 14576, + "massachuse": 14577, + "kardashian": 14578, + "bernard": 14579, + "fiesta": 14580, + "volcano": 14581, + "grandpa": 14582, + "anca": 14583, + "dre": 14584, + "stitu": 14585, + "meaning": 14586, + "foam": 14587, + "auck": 14588, + "ated": 14589, + "rl": 14590, + "hotel": 14591, + "persons": 14592, + "dynasty": 14593, + "ellor": 14594, + "mai": 14595, + "amne": 14596, + "styling": 14597, + "avier": 14598, + "eg": 14599, + "vegetarian": 14600, + ",â̦": 14601, + "founders": 14602, + "stain": 14603, + "gd": 14604, + "cycles": 14605, + "skyline": 14606, + "tractor": 14607, + "exists": 14608, + "tral": 14609, + "kidney": 14610, + "maril": 14611, + "instag": 14612, + "sette": 14613, + "addict": 14614, + "triangle": 14615, + "flashback": 14616, + "controversial": 14617, + "zon": 14618, + "pins": 14619, + "ias": 14620, + "tray": 14621, + "township": 14622, + "delegates": 14623, + "spam": 14624, + "hms": 14625, + "crane": 14626, + "peoples": 14627, + "olo": 14628, + "faction": 14629, + "butes": 14630, + "onica": 14631, + "delegation": 14632, + "newprofile": 14633, + "elier": 14634, + "mca": 14635, + "wand": 14636, + "gely": 14637, + "losangeles": 14638, + "berke": 14639, + "tive": 14640, + "disrup": 14641, + "zza": 14642, + "casa": 14643, + "jordan": 14644, + "fordshire": 14645, + "gathered": 14646, + "ichi": 14647, + "attendees": 14648, + "à¸Ńà¸": 14649, + "peppers": 14650, + "coin": 14651, + "bourbon": 14652, + "ernity": 14653, + "rotary": 14654, + "behaviour": 14655, + "jeremy": 14656, + "teamwork": 14657, + "compliance": 14658, + "tremend": 14659, + "ðŁĩ§": 14660, + "buhari": 14661, + "cambo": 14662, + "buyers": 14663, + "hagen": 14664, + "buds": 14665, + "bayern": 14666, + "monte": 14667, + "smells": 14668, + "anza": 14669, + "athlon": 14670, + "described": 14671, + "workforce": 14672, + "giving": 14673, + "api": 14674, + "investments": 14675, + "dail": 14676, + "selena": 14677, + "database": 14678, + "thum": 14679, + "mortal": 14680, + "student": 14681, + "buyer": 14682, + "dover": 14683, + "garten": 14684, + "attle": 14685, + "loyalty": 14686, + "genoci": 14687, + "holocau": 14688, + "theaters": 14689, + "ruling": 14690, + "venus": 14691, + "patent": 14692, + "chun": 14693, + "abby": 14694, + "awake": 14695, + "massacre": 14696, + "bangalore": 14697, + "breaking": 14698, + "simmons": 14699, + "justi": 14700, + "hale": 14701, + "edchat": 14702, + "ggles": 14703, + "hawk": 14704, + "marking": 14705, + "headlines": 14706, + "strom": 14707, + "cove": 14708, + "breathtaking": 14709, + "medals": 14710, + "haircut": 14711, + "christine": 14712, + "telegraph": 14713, + "gujarat": 14714, + "jura": 14715, + "cane": 14716, + "shore": 14717, + "propaganda": 14718, + "mueller": 14719, + "........": 14720, + "savi": 14721, + "stomach": 14722, + "throws": 14723, + "tab": 14724, + "warm": 14725, + "jong": 14726, + "renowned": 14727, + "hir": 14728, + "rais": 14729, + "mushrooms": 14730, + "guaranteed": 14731, + "boa": 14732, + "mj": 14733, + "revolutionary": 14734, + "certification": 14735, + "bruins": 14736, + "join": 14737, + "wes": 14738, + "passport": 14739, + "cg": 14740, + "sexu": 14741, + "capable": 14742, + "wv": 14743, + "tones": 14744, + "jackets": 14745, + "accompan": 14746, + "spinach": 14747, + "forever": 14748, + "blair": 14749, + "watts": 14750, + "gl": 14751, + "couples": 14752, + "prairie": 14753, + "newprofilepic": 14754, + "logistics": 14755, + "massachusetts": 14756, + "jaguar": 14757, + "oid": 14758, + "weal": 14759, + "underwater": 14760, + "moz": 14761, + "yi": 14762, + "maths": 14763, + "myanmar": 14764, + "preps": 14765, + "suffered": 14766, + "trace": 14767, + "wali": 14768, + "ahhh": 14769, + "borg": 14770, + "stitch": 14771, + "culin": 14772, + "realise": 14773, + "infection": 14774, + "discrimination": 14775, + "shame": 14776, + "ankle": 14777, + "humid": 14778, + "yt": 14779, + "bracket": 14780, + "truck": 14781, + "triu": 14782, + "easter": 14783, + "community": 14784, + "postcard": 14785, + "involving": 14786, + "tyler": 14787, + "caramel": 14788, + "overview": 14789, + "examples": 14790, + "integrity": 14791, + "basement": 14792, + "instruments": 14793, + "anium": 14794, + "atus": 14795, + "gher": 14796, + "laundry": 14797, + "achieve": 14798, + "geneva": 14799, + "pricing": 14800, + "hyderabad": 14801, + "belief": 14802, + "meta": 14803, + "jaw": 14804, + "accounting": 14805, + "leader": 14806, + "cristiano": 14807, + "couture": 14808, + "cyp": 14809, + "vised": 14810, + ",,,": 14811, + "knu": 14812, + "hick": 14813, + "breaker": 14814, + "bram": 14815, + "rab": 14816, + "moor": 14817, + "hamas": 14818, + "graduating": 14819, + "puppies": 14820, + "akh": 14821, + "tah": 14822, + "aches": 14823, + "rie": 14824, + "opini": 14825, + "gta": 14826, + "reign": 14827, + "tragic": 14828, + "rever": 14829, + "pill": 14830, + "pineapple": 14831, + "touches": 14832, + "dare": 14833, + "leys": 14834, + "ilo": 14835, + "interiors": 14836, + "scouts": 14837, + "bart": 14838, + "enzie": 14839, + "dono": 14840, + "brock": 14841, + "christians": 14842, + "ensemble": 14843, + "·": 14844, + "cinemas": 14845, + "newport": 14846, + "airline": 14847, + "winston": 14848, + "leigh": 14849, + "contents": 14850, + "prescri": 14851, + "urge": 14852, + "trout": 14853, + "fically": 14854, + "ilia": 14855, + "subsi": 14856, + "arer": 14857, + "âļ¾ï¸ı": 14858, + "wounded": 14859, + "ðŁĻĤ": 14860, + "pepper": 14861, + "ðŁĴŀ": 14862, + "fitted": 14863, + "aff": 14864, + "resur": 14865, + "thursdaythoughts": 14866, + "zero": 14867, + "archaeology": 14868, + "div": 14869, + "jee": 14870, + "ion": 14871, + "awaiting": 14872, + "cozy": 14873, + "beauties": 14874, + "bald": 14875, + "data": 14876, + "grizz": 14877, + "stalk": 14878, + "kinds": 14879, + "cleared": 14880, + "jessic": 14881, + "regular": 14882, + "aliens": 14883, + "place": 14884, + "bos": 14885, + "bizar": 14886, + "thisis": 14887, + "ðŁĴĢ": 14888, + "tottenham": 14889, + "mafia": 14890, + "slam": 14891, + "ariana": 14892, + "carroll": 14893, + "backpack": 14894, + "carey": 14895, + "univ": 14896, + "rg": 14897, + "pep": 14898, + "digit": 14899, + "tattoos": 14900, + "agon": 14901, + "volunteering": 14902, + "differen": 14903, + "consumption": 14904, + "kathr": 14905, + "headphones": 14906, + "tshirt": 14907, + "ob": 14908, + "element": 14909, + "retail": 14910, + "shru": 14911, + "algori": 14912, + "container": 14913, + "conscious": 14914, + "fil": 14915, + "coming": 14916, + "rash": 14917, + "urope": 14918, + "define": 14919, + "gior": 14920, + "feminist": 14921, + "flowing": 14922, + "routes": 14923, + "glaci": 14924, + "fert": 14925, + "somerset": 14926, + "antes": 14927, + "tweeps": 14928, + "$$": 14929, + "hour": 14930, + "endangered": 14931, + "yearsof": 14932, + "roh": 14933, + "popped": 14934, + "backing": 14935, + "basil": 14936, + "brake": 14937, + "monaco": 14938, + "lgbtq": 14939, + "prague": 14940, + "utility": 14941, + "cassi": 14942, + "gateway": 14943, + "haunted": 14944, + "schul": 14945, + "ðŁİµ": 14946, + "should": 14947, + "walkingdead": 14948, + "completing": 14949, + "danny": 14950, + "montgomery": 14951, + "penguin": 14952, + "ssi": 14953, + "merchandi": 14954, + "ðŁijij": 14955, + "church": 14956, + "hates": 14957, + "captain": 14958, + "breathing": 14959, + "cet": 14960, + "fairly": 14961, + "approaches": 14962, + "companion": 14963, + "surprising": 14964, + "kanye": 14965, + "pey": 14966, + "hindi": 14967, + "targeted": 14968, + "lords": 14969, + "deut": 14970, + "digging": 14971, + "german": 14972, + "rut": 14973, + "energy": 14974, + "closest": 14975, + "yun": 14976, + "apologi": 14977, + "ั": 14978, + "sack": 14979, + "rup": 14980, + "ddy": 14981, + "portal": 14982, + "dough": 14983, + "bats": 14984, + "ðŁĵ°": 14985, + "atur": 14986, + "grapher": 14987, + "pires": 14988, + "motors": 14989, + "ðŁĮ¹": 14990, + "jc": 14991, + "dang": 14992, + "tuk": 14993, + "clue": 14994, + "usc": 14995, + "page": 14996, + "dless": 14997, + "brows": 14998, + "jus": 14999, + "ading": 15000, + "remarks": 15001, + "oom": 15002, + "cardio": 15003, + "stefan": 15004, + "armstrong": 15005, + "âĢ¢âĢ¢": 15006, + "niest": 15007, + "belgian": 15008, + "biop": 15009, + "soy": 15010, + "lof": 15011, + "íĥ": 15012, + "qt": 15013, + "flashbackfriday": 15014, + "cee": 15015, + "ģà¸": 15016, + "wreck": 15017, + "marines": 15018, + "amendment": 15019, + "wardrobe": 15020, + "voy": 15021, + "burned": 15022, + "guitars": 15023, + "rainf": 15024, + "lifel": 15025, + "ssil": 15026, + "ounce": 15027, + "external": 15028, + "ckey": 15029, + "mesh": 15030, + "sheikh": 15031, + "invitation": 15032, + "suggesti": 15033, + "popcorn": 15034, + "phenomenal": 15035, + "anonymous": 15036, + "tuna": 15037, + "chicago": 15038, + "oval": 15039, + "dely": 15040, + "locals": 15041, + "(&": 15042, + "prof": 15043, + "novel": 15044, + "finder": 15045, + "sparks": 15046, + "laven": 15047, + "infu": 15048, + "nicks": 15049, + "quant": 15050, + "rae": 15051, + "exec": 15052, + "distingui": 15053, + "stances": 15054, + "mutual": 15055, + "shal": 15056, + "unveils": 15057, + "edmonton": 15058, + "zania": 15059, + "adio": 15060, + "viewer": 15061, + "bradford": 15062, + "auditorium": 15063, + "quis": 15064, + "react": 15065, + "http": 15066, + "lero": 15067, + "cheeky": 15068, + "impacts": 15069, + "tak": 15070, + "edt": 15071, + "desperate": 15072, + "tay": 15073, + "ìĦ": 15074, + "settle": 15075, + "bargain": 15076, + "resume": 15077, + "unite": 15078, + "thrown": 15079, + "kest": 15080, + "seys": 15081, + "marching": 15082, + "amit": 15083, + "decline": 15084, + "schar": 15085, + "metr": 15086, + "stanford": 15087, + "linke": 15088, + "berra": 15089, + "dolls": 15090, + "rugby": 15091, + "jami": 15092, + "bor": 15093, + "roadtrip": 15094, + "dinosaur": 15095, + "mik": 15096, + "sunder": 15097, + "rem": 15098, + "bk": 15099, + "overseas": 15100, + "naughty": 15101, + "implementation": 15102, + "iamsrk": 15103, + "luncheon": 15104, + "firing": 15105, + "miami": 15106, + "perez": 15107, + "thee": 15108, + "zon": 15109, + "gifted": 15110, + "conversion": 15111, + "ceramic": 15112, + "¡ï¸ı": 15113, + "pedro": 15114, + "ìĨ": 15115, + "vick": 15116, + "!@": 15117, + "heed": 15118, + "sid": 15119, + "bw": 15120, + "document": 15121, + "plun": 15122, + "grants": 15123, + "fantasy": 15124, + "predictions": 15125, + "valid": 15126, + "carved": 15127, + "graduated": 15128, + "ðŁijįðŁı»": 15129, + "nationally": 15130, + "chy": 15131, + "afl": 15132, + "resso": 15133, + "blank": 15134, + "rivals": 15135, + "jig": 15136, + "eties": 15137, + "omics": 15138, + "unemp": 15139, + "bound": 15140, + "sko": 15141, + "inspection": 15142, + "paral": 15143, + "highs": 15144, + "crisp": 15145, + "bans": 15146, + "oba": 15147, + "[@": 15148, + "cospla": 15149, + "costumes": 15150, + "recall": 15151, + "mouth": 15152, + "nigel": 15153, + "bts": 15154, + "tera": 15155, + "kov": 15156, + "docs": 15157, + "westminster": 15158, + "dict": 15159, + "gravity": 15160, + "kari": 15161, + "rogue": 15162, + "tted": 15163, + "wark": 15164, + "idaho": 15165, + "wend": 15166, + "awi": 15167, + "queensland": 15168, + "processes": 15169, + "cliffe": 15170, + "mick": 15171, + "compens": 15172, + "opol": 15173, + "they": 15174, + "clari": 15175, + "wikipedia": 15176, + "salmankhan": 15177, + "hazard": 15178, + "preston": 15179, + "sweetest": 15180, + "pdf": 15181, + "chees": 15182, + "trilo": 15183, + "southafrica": 15184, + "burnt": 15185, + "($": 15186, + "contain": 15187, + "tp": 15188, + "submitted": 15189, + "soundcloud": 15190, + "atu": 15191, + "rez": 15192, + "wordpress": 15193, + "corrupt": 15194, + "nf": 15195, + "maker": 15196, + "íķ": 15197, + "paras": 15198, + "advent": 15199, + "rial": 15200, + "cafe": 15201, + "fossil": 15202, + "!!!!!!!": 15203, + "cows": 15204, + "cj": 15205, + "spur": 15206, + "institutions": 15207, + "landmark": 15208, + "entit": 15209, + "reut": 15210, + "his": 15211, + "alzheim": 15212, + "wemb": 15213, + "reggae": 15214, + "mosqu": 15215, + "stat": 15216, + "identified": 15217, + "dealer": 15218, + "ream": 15219, + "reland": 15220, + "tension": 15221, + "ðŁĩ©": 15222, + "wrapping": 15223, + "deeper": 15224, + "frat": 15225, + "reddit": 15226, + "aris": 15227, + "morocco": 15228, + "..\"": 15229, + "blow": 15230, + "mapping": 15231, + "priorities": 15232, + "inga": 15233, + "swap": 15234, + "rewards": 15235, + "conspiracy": 15236, + "creative": 15237, + "cj": 15238, + "congressional": 15239, + "vault": 15240, + "plex": 15241, + "sophomore": 15242, + "shadow": 15243, + "eless": 15244, + "ðŁĺħ": 15245, + "darts": 15246, + "aldub": 15247, + "annoying": 15248, + "props": 15249, + "nas": 15250, + "aluminum": 15251, + "hbo": 15252, + "offense": 15253, + "jill": 15254, + "onions": 15255, + "laur": 15256, + "tae": 15257, + "hardest": 15258, + "shro": 15259, + "gaining": 15260, + "measure": 15261, + "edtech": 15262, + "cyprus": 15263, + "tara": 15264, + "angeli": 15265, + "carlo": 15266, + "goon": 15267, + "alli": 15268, + "implic": 15269, + "jupit": 15270, + "resilience": 15271, + "hail": 15272, + "balanced": 15273, + ")...": 15274, + "joyce": 15275, + "gra": 15276, + "theli": 15277, + "defined": 15278, + "shipped": 15279, + "mainly": 15280, + "mina": 15281, + "lm": 15282, + "sacri": 15283, + "ober": 15284, + "pim": 15285, + "claiming": 15286, + "enters": 15287, + "corey": 15288, + "bok": 15289, + "cried": 15290, + "cooling": 15291, + "danielle": 15292, + "pharmacy": 15293, + "thorough": 15294, + "cake": 15295, + "klo": 15296, + "outreach": 15297, + "zens": 15298, + "digitalmarketing": 15299, + "valent": 15300, + "snp": 15301, + "herb": 15302, + "mrw": 15303, + "café": 15304, + "captures": 15305, + "notre": 15306, + "triumph": 15307, + "pancakes": 15308, + "cumber": 15309, + "spike": 15310, + "dation": 15311, + "bigg": 15312, + "sper": 15313, + "critical": 15314, + "amal": 15315, + "tooth": 15316, + "founding": 15317, + "astro": 15318, + "'#": 15319, + "quantum": 15320, + "thames": 15321, + "unc": 15322, + "pride": 15323, + "airbus": 15324, + "knocked": 15325, + "undefeated": 15326, + "mediterranean": 15327, + "calcu": 15328, + "clown": 15329, + "sensor": 15330, + "hammer": 15331, + "forgive": 15332, + "cushi": 15333, + "berry": 15334, + "majestic": 15335, + "elect": 15336, + "politan": 15337, + "gta": 15338, + "kari": 15339, + "burke": 15340, + "seahawks": 15341, + "volkswagen": 15342, + "rei": 15343, + "landscapes": 15344, + "casu": 15345, + "grandfather": 15346, + "listened": 15347, + "//": 15348, + "startrek": 15349, + "rainfall": 15350, + "furry": 15351, + "vier": 15352, + "stark": 15353, + "rifle": 15354, + "ffa": 15355, + "leges": 15356, + "hillaryclinton": 15357, + "minus": 15358, + "correctly": 15359, + "architectural": 15360, + "prece": 15361, + "upside": 15362, + "boxer": 15363, + "ðŁĻĮðŁı¼": 15364, + "isai": 15365, + "det": 15366, + "provo": 15367, + "tissue": 15368, + "spooky": 15369, + "veled": 15370, + "recon": 15371, + "prospects": 15372, + "quebec": 15373, + "âļ«": 15374, + "igno": 15375, + "anatomy": 15376, + "shapes": 15377, + "wp": 15378, + "pinterest": 15379, + "hore": 15380, + "anes": 15381, + "pickup": 15382, + "tip": 15383, + "pradesh": 15384, + "hugh": 15385, + "coe": 15386, + "pok": 15387, + "grammy": 15388, + "wellington": 15389, + "stigate": 15390, + "righ": 15391, + "leap": 15392, + "kingston": 15393, + "scenic": 15394, + "gosh": 15395, + "vani": 15396, + "aug": 15397, + "sary": 15398, + "zier": 15399, + "bureau": 15400, + "linson": 15401, + "conte": 15402, + "fragr": 15403, + "allan": 15404, + "gaw": 15405, + "lana": 15406, + "collision": 15407, + "surveill": 15408, + "renais": 15409, + "arrange": 15410, + "sali": 15411, + "doin": 15412, + "brance": 15413, + "brendan": 15414, + "ourse": 15415, + "incoming": 15416, + "suspension": 15417, + "à´": 15418, + "lla": 15419, + "educators": 15420, + "intri": 15421, + "dae": 15422, + "biography": 15423, + "bulgar": 15424, + "villain": 15425, + "gothic": 15426, + "rwanda": 15427, + "ew": 15428, + "mayor": 15429, + "meetup": 15430, + "democrat": 15431, + "morgan": 15432, + "sudden": 15433, + "tesco": 15434, + "carrot": 15435, + "bomber": 15436, + "mckin": 15437, + "rene": 15438, + "funday": 15439, + "agricultural": 15440, + "hahah": 15441, + "showtime": 15442, + "forming": 15443, + "cola": 15444, + "scorpi": 15445, + "quote": 15446, + "poppy": 15447, + "slife": 15448, + "daz": 15449, + "tub": 15450, + "nen": 15451, + "mot": 15452, + "ðŁĺ»": 15453, + "sore": 15454, + "elderly": 15455, + "ove": 15456, + "skinny": 15457, + "umi": 15458, + "anco": 15459, + "manship": 15460, + "were": 15461, + "gv": 15462, + "kah": 15463, + "folding": 15464, + "neat": 15465, + "samantha": 15466, + "danish": 15467, + "ukrain": 15468, + "humidity": 15469, + "nutri": 15470, + "jakarta": 15471, + "candles": 15472, + "oooooooo": 15473, + "atile": 15474, + "strength": 15475, + "ibra": 15476, + "bapti": 15477, + "charleston": 15478, + "frames": 15479, + "girls": 15480, + "clearing": 15481, + "gluten": 15482, + "##": 15483, + "supernatural": 15484, + "jubi": 15485, + "phone": 15486, + "hein": 15487, + "drun": 15488, + "leak": 15489, + "investor": 15490, + "yer": 15491, + "domain": 15492, + "ballroom": 15493, + "mish": 15494, + "appli": 15495, + "offshore": 15496, + "blaze": 15497, + "doro": 15498, + "âĺķï¸ı": 15499, + "winery": 15500, + "sharif": 15501, + "adore": 15502, + "nir": 15503, + "safer": 15504, + "sigh": 15505, + "ascri": 15506, + "strongly": 15507, + "tracy": 15508, + "cker": 15509, + "oll": 15510, + "faithful": 15511, + "eyed": 15512, + "delightful": 15513, + "vism": 15514, + "karnataka": 15515, + "titan": 15516, + "whar": 15517, + "jerseys": 15518, + "refur": 15519, + "heaven": 15520, + "grip": 15521, + "panama": 15522, + "preli": 15523, + "gluten": 15524, + "odd": 15525, + "content": 15526, + "ponti": 15527, + "tioning": 15528, + "ecommerce": 15529, + "federation": 15530, + "flawless": 15531, + "gear": 15532, + "tires": 15533, + "byr": 15534, + "police": 15535, + "cuban": 15536, + "tributes": 15537, + "ticul": 15538, + "churches": 15539, + "nursery": 15540, + "diaries": 15541, + "museums": 15542, + "snapped": 15543, + "ivan": 15544, + "wight": 15545, + "tourists": 15546, + "ramadan": 15547, + "trent": 15548, + "prophet": 15549, + "wondered": 15550, + "focusing": 15551, + "hid": 15552, + "icons": 15553, + "iq": 15554, + "ambulance": 15555, + "pist": 15556, + "funniest": 15557, + "timeless": 15558, + "srilan": 15559, + "buys": 15560, + "kids": 15561, + "colourful": 15562, + "ashi": 15563, + "chir": 15564, + "mum": 15565, + "ðŁĵļ": 15566, + "letter": 15567, + "xen": 15568, + "reuters": 15569, + "preserve": 15570, + "inting": 15571, + "step": 15572, + "fuji": 15573, + "univer": 15574, + "iu": 15575, + "showdown": 15576, + "poems": 15577, + "surveillance": 15578, + "suspected": 15579, + "tae": 15580, + "solving": 15581, + "tomb": 15582, + "mothersday": 15583, + "carpen": 15584, + "recruit": 15585, + "pilots": 15586, + "broc": 15587, + "mixing": 15588, + "fridays": 15589, + "tyr": 15590, + "representatives": 15591, + "trapped": 15592, + "abdul": 15593, + "freestyle": 15594, + "cluster": 15595, + "âļłï¸ı": 15596, + "kd": 15597, + "skill": 15598, + "pitt": 15599, + "exo": 15600, + "commerci": 15601, + "museum": 15602, + "locally": 15603, + "gina": 15604, + "nobel": 15605, + "immune": 15606, + "frac": 15607, + "capsu": 15608, + "mained": 15609, + "attempts": 15610, + "bulldog": 15611, + "bespoke": 15612, + "singers": 15613, + "spelling": 15614, + "segment": 15615, + "natures": 15616, + "tick": 15617, + "lipstick": 15618, + "cleaner": 15619, + "gettable": 15620, + "precision": 15621, + "â̼ï¸ı": 15622, + "thood": 15623, + "reef": 15624, + "nope": 15625, + "billy": 15626, + "digi": 15627, + "musi": 15628, + "rival": 15629, + "figured": 15630, + "tality": 15631, + "sunny": 15632, + "berk": 15633, + "awww": 15634, + "awaits": 15635, + "unreal": 15636, + "copen": 15637, + "asylum": 15638, + "exotic": 15639, + "buen": 15640, + "mock": 15641, + "enable": 15642, + "archy": 15643, + "fra": 15644, + "plastic": 15645, + "almond": 15646, + "ampli": 15647, + "displays": 15648, + "abbott": 15649, + "sme": 15650, + "xp": 15651, + "ðŁĻĥ": 15652, + "graphic": 15653, + "ived": 15654, + "mara": 15655, + "caution": 15656, + "leaks": 15657, + "enberg": 15658, + "ulu": 15659, + "unicorn": 15660, + "cannon": 15661, + "apprentic": 15662, + "ðŁĺĺðŁĺĺ": 15663, + "bball": 15664, + "willow": 15665, + "atics": 15666, + "amas": 15667, + "manufacturer": 15668, + "campaigns": 15669, + "porters": 15670, + "floors": 15671, + "lsu": 15672, + "type": 15673, + "kej": 15674, + "honorary": 15675, + "itim": 15676, + "tole": 15677, + "minecraft": 15678, + "dx": 15679, + "mash": 15680, + "rio": 15681, + "consequences": 15682, + "ronald": 15683, + "gossi": 15684, + "suffolk": 15685, + "muse": 15686, + "rbi": 15687, + "livemusic": 15688, + "ivan": 15689, + "ðŁİ¤": 15690, + "leu": 15691, + "patriot": 15692, + "manit": 15693, + "lanca": 15694, + "homedecor": 15695, + "dear": 15696, + "sigma": 15697, + "tide": 15698, + "strings": 15699, + "vita": 15700, + "sequel": 15701, + "tryna": 15702, + "investigate": 15703, + "boris": 15704, + "vegan": 15705, + "barrier": 15706, + "mindfulness": 15707, + "webb": 15708, + "hustle": 15709, + "inda": 15710, + "tanzania": 15711, + "stray": 15712, + "texas": 15713, + "cag": 15714, + "diagnosis": 15715, + "woman": 15716, + "gw": 15717, + "obsession": 15718, + "lative": 15719, + "nufc": 15720, + "flynn": 15721, + "momentum": 15722, + "sofa": 15723, + "wald": 15724, + "vegetable": 15725, + "tucker": 15726, + "supper": 15727, + "seab": 15728, + "arro": 15729, + "seag": 15730, + "venting": 15731, + "councill": 15732, + "splat": 15733, + "calcul": 15734, + "..#": 15735, + "comfy": 15736, + "odisha": 15737, + "stopp": 15738, + "warfare": 15739, + "caes": 15740, + "à¨": 15741, + "coy": 15742, + "priceless": 15743, + "insec": 15744, + "ðŁĺĽ": 15745, + "controls": 15746, + "empowerment": 15747, + "datascience": 15748, + "perpe": 15749, + "genic": 15750, + "eres": 15751, + "trudeau": 15752, + "mano": 15753, + "slavery": 15754, + "expanding": 15755, + "mahe": 15756, + "failing": 15757, + "saga": 15758, + "photographs": 15759, + "crest": 15760, + "reon": 15761, + "surfing": 15762, + "hie": 15763, + "ðŁįĢ": 15764, + "jae": 15765, + "fellows": 15766, + "southampton": 15767, + "solom": 15768, + "cester": 15769, + "tability": 15770, + "horn": 15771, + "sect": 15772, + "hee": 15773, + "coleman": 15774, + "atlas": 15775, + "explorer": 15776, + "consultation": 15777, + "copyright": 15778, + "organizing": 15779, + "denied": 15780, + "monkeys": 15781, + "noodles": 15782, + "bris": 15783, + "flor": 15784, + "dough": 15785, + "bonds": 15786, + "shocked": 15787, + "ecosystem": 15788, + "carefully": 15789, + "wm": 15790, + "apartments": 15791, + "curve": 15792, + "sandiego": 15793, + "mustard": 15794, + "commen": 15795, + "ceremon": 15796, + "ech": 15797, + "ruth": 15798, + "ðŁĻĮðŁı»": 15799, + "hawai": 15800, + "filmed": 15801, + "tear": 15802, + "asingly": 15803, + "cair": 15804, + "watt": 15805, + "instrument": 15806, + "outta": 15807, + "yeol": 15808, + "riverside": 15809, + "ë°": 15810, + ".:": 15811, + "norwich": 15812, + "alog": 15813, + "migrants": 15814, + "newman": 15815, + "ride": 15816, + "sprink": 15817, + "targeting": 15818, + "believe": 15819, + "torch": 15820, + "reflects": 15821, + "permission": 15822, + "ffman": 15823, + "enemies": 15824, + "basics": 15825, + "seized": 15826, + "sundays": 15827, + "lei": 15828, + "hassan": 15829, + "endo": 15830, + "hc": 15831, + "stad": 15832, + "lements": 15833, + "kkkk": 15834, + "nano": 15835, + "shark": 15836, + "mana": 15837, + "onic": 15838, + "treatments": 15839, + "early": 15840, + "collaborative": 15841, + "shuttle": 15842, + "branches": 15843, + "misses": 15844, + "mainedcm": 15845, + "apers": 15846, + "kyle": 15847, + "carrie": 15848, + "leisure": 15849, + "shet": 15850, + "birding": 15851, + "advances": 15852, + "ðŁĵĿ": 15853, + "popular": 15854, + "diane": 15855, + "abe": 15856, + "rewar": 15857, + "neighbour": 15858, + "kpop": 15859, + "remembrance": 15860, + "playground": 15861, + "rub": 15862, + "krishna": 15863, + "ebola": 15864, + "inquiry": 15865, + "epa": 15866, + "lumin": 15867, + "organisation": 15868, + "abraham": 15869, + "normally": 15870, + "preten": 15871, + "janet": 15872, + "wt": 15873, + "ðŁĴİ": 15874, + "encouraging": 15875, + "astic": 15876, + "bump": 15877, + "sydney": 15878, + "sz": 15879, + "ssss": 15880, + "garrett": 15881, + "ðŁĵ»": 15882, + "consulting": 15883, + "romania": 15884, + "spotting": 15885, + "chancellor": 15886, + "arma": 15887, + "prestigious": 15888, + "ðĿIJ": 15889, + "tad": 15890, + "cryst": 15891, + "competit": 15892, + "ratio": 15893, + "cataly": 15894, + "brow": 15895, + "jur": 15896, + "viking": 15897, + "commute": 15898, + "yday": 15899, + "layers": 15900, + "dumb": 15901, + "escal": 15902, + "genocide": 15903, + "fill": 15904, + "gupta": 15905, + "stepping": 15906, + "sei": 15907, + "foto": 15908, + "wildcats": 15909, + "coli": 15910, + "project": 15911, + "earnings": 15912, + "str": 15913, + "geons": 15914, + "completion": 15915, + "bm": 15916, + "decorated": 15917, + "crawford": 15918, + "afghan": 15919, + "scare": 15920, + "visibility": 15921, + "hib": 15922, + "direction": 15923, + "stroll": 15924, + "christina": 15925, + "alternate": 15926, + "clare": 15927, + "stylist": 15928, + "behold": 15929, + "sance": 15930, + "leopard": 15931, + "acquired": 15932, + "narrative": 15933, + "ashi": 15934, + "thea": 15935, + "????": 15936, + "peas": 15937, + "atch": 15938, + "slides": 15939, + "leen": 15940, + "renewable": 15941, + "english": 15942, + "quir": 15943, + "coaster": 15944, + "rx": 15945, + "fools": 15946, + "matchday": 15947, + "mism": 15948, + "amazing": 15949, + "zig": 15950, + "keting": 15951, + "wont": 15952, + "towel": 15953, + "diab": 15954, + "stake": 15955, + "nm": 15956, + "melt": 15957, + "ethan": 15958, + "grape": 15959, + "politician": 15960, + "smen": 15961, + "íĺ": 15962, + "reo": 15963, + "weddings": 15964, + "catcher": 15965, + "oracle": 15966, + "memo": 15967, + "ðŁĮ´": 15968, + "eck": 15969, + "robbie": 15970, + "norwegian": 15971, + "operator": 15972, + "amor": 15973, + "sewing": 15974, + "jul": 15975, + "xie": 15976, + "uv": 15977, + "fifty": 15978, + "mega": 15979, + "tattoo": 15980, + "liberals": 15981, + "upri": 15982, + "trafficking": 15983, + "richardson": 15984, + "suv": 15985, + "kip": 15986, + "messy": 15987, + "tremendous": 15988, + "glou": 15989, + "courtney": 15990, + "lad": 15991, + "stereo": 15992, + "myers": 15993, + "idio": 15994, + "^_^": 15995, + "manning": 15996, + "dye": 15997, + "wd": 15998, + "throne": 15999, + "junk": 16000, + "asu": 16001, + "provincial": 16002, + "kook": 16003, + "wrc": 16004, + "fineart": 16005, + "hampshire": 16006, + "renaissance": 16007, + "bred": 16008, + "fallout": 16009, + "sj": 16010, + "snl": 16011, + "alam": 16012, + "torture": 16013, + "fyi": 16014, + "shines": 16015, + "paw": 16016, + "char": 16017, + "henry": 16018, + "crow": 16019, + "acious": 16020, + "dian": 16021, + "paige": 16022, + "bare": 16023, + "stockholm": 16024, + "scenery": 16025, + "ðŁĩ·": 16026, + "jeffrey": 16027, + "push": 16028, + "decoration": 16029, + "ned": 16030, + "cute": 16031, + "brigade": 16032, + "lavender": 16033, + "invites": 16034, + "esports": 16035, + "voir": 16036, + "dried": 16037, + "transpl": 16038, + "surgeon": 16039, + "novels": 16040, + "pulls": 16041, + "sony": 16042, + "lunar": 16043, + "mane": 16044, + "ivy": 16045, + "frustr": 16046, + "dorset": 16047, + "sai": 16048, + "torres": 16049, + "ssion": 16050, + "shutdown": 16051, + "suggestions": 16052, + "writing": 16053, + "eo": 16054, + "battlefield": 16055, + "uga": 16056, + "ðŁIJ¾": 16057, + "vacu": 16058, + "splac": 16059, + "git": 16060, + "ug": 16061, + "highland": 16062, + "%)": 16063, + "mermaid": 16064, + "sacramento": 16065, + "tails": 16066, + "pw": 16067, + "kah": 16068, + "tell": 16069, + "enhanced": 16070, + "ìķ": 16071, + "auckland": 16072, + "cruel": 16073, + "ðŁ¤©": 16074, + "audre": 16075, + "sailor": 16076, + "grammar": 16077, + "glove": 16078, + "deon": 16079, + "inflam": 16080, + "freshly": 16081, + "kell": 16082, + "zip": 16083, + "christie": 16084, + "mild": 16085, + "dixon": 16086, + "instructor": 16087, + "gence": 16088, + "ãħł": 16089, + "subjec": 16090, + "constitutional": 16091, + "crowds": 16092, + "invisible": 16093, + "ruins": 16094, + "dak": 16095, + "sip": 16096, + "plaque": 16097, + "pouring": 16098, + "complex": 16099, + "zine": 16100, + "stead": 16101, + "flet": 16102, + "transmission": 16103, + "loway": 16104, + "arun": 16105, + "increasingly": 16106, + "aud": 16107, + "transparen": 16108, + "crowned": 16109, + "scoun": 16110, + "blizzard": 16111, + "luxu": 16112, + "fiers": 16113, + "achievements": 16114, + "hunters": 16115, + "rocked": 16116, + "basin": 16117, + "violet": 16118, + "proves": 16119, + "achieving": 16120, + "prosper": 16121, + "sega": 16122, + "float": 16123, + "vian": 16124, + "xiv": 16125, + "polic": 16126, + "tura": 16127, + "approximately": 16128, + "wanderlust": 16129, + "keepers": 16130, + "getaway": 16131, + "cod": 16132, + "polis": 16133, + "bryan": 16134, + "colts": 16135, + "talents": 16136, + "yogur": 16137, + "glutenfree": 16138, + "wrist": 16139, + "gry": 16140, + "czech": 16141, + "ðŁİĪ": 16142, + "eville": 16143, + "ðŁıĪ": 16144, + "tox": 16145, + "daniels": 16146, + "amer": 16147, + "bids": 16148, + "weareone": 16149, + "metab": 16150, + "gt": 16151, + "boyz": 16152, + "pdx": 16153, + "possession": 16154, + "pushed": 16155, + "shrine": 16156, + "realistic": 16157, + "trigger": 16158, + "navi": 16159, + "rumors": 16160, + "naf": 16161, + "jenkins": 16162, + "trun": 16163, + "communi": 16164, + "ÃĹ": 16165, + "gamers": 16166, + "armor": 16167, + "mohammed": 16168, + "balcony": 16169, + "yah": 16170, + "strongest": 16171, + "rhythm": 16172, + "unforgettable": 16173, + "kp": 16174, + "hobb": 16175, + "custody": 16176, + "gregor": 16177, + "rita": 16178, + "aesthetic": 16179, + "ilation": 16180, + "sponsoring": 16181, + "nay": 16182, + "kidnapp": 16183, + "shs": 16184, + "rajas": 16185, + "meg": 16186, + "significantly": 16187, + "buttons": 16188, + "lac": 16189, + "versions": 16190, + "essentials": 16191, + "opinions": 16192, + "kro": 16193, + "dprinting": 16194, + "widely": 16195, + "dk": 16196, + "uran": 16197, + "yal": 16198, + "requested": 16199, + "cn": 16200, + "curric": 16201, + "plum": 16202, + "grun": 16203, + "vm": 16204, + "devon": 16205, + "myo": 16206, + "relation": 16207, + "juventus": 16208, + "rouge": 16209, + "minority": 16210, + "mines": 16211, + "jupiter": 16212, + "nine": 16213, + "oxygen": 16214, + "frankie": 16215, + "unesco": 16216, + "fabric": 16217, + "disgusting": 16218, + "salman": 16219, + "detection": 16220, + "lanka": 16221, + "dac": 16222, + "ðŁĩ«ðŁĩ·": 16223, + "argument": 16224, + "shelves": 16225, + "celtics": 16226, + "roberto": 16227, + "pigs": 16228, + "hedge": 16229, + "faul": 16230, + "powering": 16231, + "butterflies": 16232, + "fir": 16233, + "remake": 16234, + "atti": 16235, + "como": 16236, + "empha": 16237, + "kendall": 16238, + "pokemon": 16239, + "seating": 16240, + "dans": 16241, + "baldwin": 16242, + "ðŁij»": 16243, + "leslie": 16244, + "onedirection": 16245, + "timber": 16246, + "iman": 16247, + "font": 16248, + "eder": 16249, + "dion": 16250, + "steph": 16251, + "format": 16252, + "gregory": 16253, + "prop": 16254, + "hex": 16255, + "ruin": 16256, + "sory": 16257, + "infer": 16258, + "naw": 16259, + "barak": 16260, + "sdgs": 16261, + "karao": 16262, + "lush": 16263, + "vander": 16264, + "endent": 16265, + "gis": 16266, + "afro": 16267, + "soccer": 16268, + "ayan": 16269, + "tuni": 16270, + "lung": 16271, + "dayof": 16272, + "alexa": 16273, + "marath": 16274, + "addicted": 16275, + "agile": 16276, + "hygi": 16277, + "lightweight": 16278, + "ì§": 16279, + "mandela": 16280, + "joey": 16281, + "ancy": 16282, + "hum": 16283, + "bir": 16284, + "memorial": 16285, + "jimin": 16286, + "ginger": 16287, + "vak": 16288, + "javascri": 16289, + "crops": 16290, + "origins": 16291, + "dari": 16292, + "piper": 16293, + "import": 16294, + "aggressive": 16295, + "prediction": 16296, + "repairs": 16297, + "cracker": 16298, + "voyage": 16299, + "nike": 16300, + "mummy": 16301, + "linkedin": 16302, + "countryside": 16303, + "border": 16304, + "glass": 16305, + "pert": 16306, + "sals": 16307, + "shoe": 16308, + "autographed": 16309, + "walnut": 16310, + "collegi": 16311, + "salary": 16312, + "pairing": 16313, + "ðŁĮ¸": 16314, + "cathol": 16315, + "sweethe": 16316, + "defeats": 16317, + "strengthen": 16318, + "rooftop": 16319, + "improvements": 16320, + "barriers": 16321, + "uru": 16322, + "tally": 16323, + "ruled": 16324, + "ðŁĨļ": 16325, + "naija": 16326, + "emoji": 16327, + "percent": 16328, + "gio": 16329, + "probs": 16330, + "once": 16331, + "admits": 16332, + "paths": 16333, + "liar": 16334, + "daytona": 16335, + "peters": 16336, + "cali": 16337, + "calli": 16338, + "mug": 16339, + "osa": 16340, + "aph": 16341, + "aby": 16342, + "hyde": 16343, + "ethnic": 16344, + "plains": 16345, + "olf": 16346, + "hahahahaha": 16347, + "holic": 16348, + "?!?!": 16349, + "subli": 16350, + "blacks": 16351, + "mot": 16352, + "ghton": 16353, + "lovin": 16354, + "brent": 16355, + "baru": 16356, + "lati": 16357, + "dew": 16358, + "ateau": 16359, + "qa": 16360, + "painful": 16361, + "busters": 16362, + "static": 16363, + "ðŁĩ¨ðŁĩ¦": 16364, + "notebook": 16365, + "outfits": 16366, + "sies": 16367, + "rf": 16368, + "floods": 16369, + "ÑĢ": 16370, + "throat": 16371, + "suici": 16372, + "rovers": 16373, + "bengal": 16374, + "prepares": 16375, + "blog": 16376, + "miniature": 16377, + "ب": 16378, + "amphi": 16379, + "comb": 16380, + "rsp": 16381, + "intimate": 16382, + "greene": 16383, + "Ìĩ": 16384, + "altar": 16385, + "surgical": 16386, + "vessel": 16387, + "...?": 16388, + "gavin": 16389, + "gator": 16390, + "threatened": 16391, + "zar": 16392, + "robbery": 16393, + "dier": 16394, + "promoted": 16395, + "yg": 16396, + "xs": 16397, + "subs": 16398, + "interviewing": 16399, + "threatening": 16400, + "dozen": 16401, + "meado": 16402, + "waterfall": 16403, + "nintendoswitch": 16404, + "calum": 16405, + "ministers": 16406, + "drop": 16407, + "universities": 16408, + "warned": 16409, + "tactics": 16410, + "ðŁĩ²": 16411, + "refuse": 16412, + "adju": 16413, + "vast": 16414, + "ðŁĺ´": 16415, + "mcfc": 16416, + "libya": 16417, + "nofilter": 16418, + "distributed": 16419, + "reser": 16420, + "ronnie": 16421, + "deco": 16422, + "javascript": 16423, + "monk": 16424, + "interests": 16425, + "flex": 16426, + "martha": 16427, + "sties": 16428, + "ood": 16429, + "ðŁ¤£ðŁ¤£": 16430, + "eun": 16431, + "bali": 16432, + "gomez": 16433, + "stimul": 16434, + "moderate": 16435, + "dity": 16436, + "iris": 16437, + "straw": 16438, + "consistent": 16439, + "directions": 16440, + "adopt": 16441, + "salsa": 16442, + "croo": 16443, + "recovered": 16444, + "blackfriday": 16445, + "lancaster": 16446, + "accept": 16447, + "weareoneexo": 16448, + "builds": 16449, + "freeman": 16450, + "airplane": 16451, + "dition": 16452, + "belong": 16453, + "jamie": 16454, + "pitching": 16455, + "lif": 16456, + "omin": 16457, + "crispy": 16458, + "prepping": 16459, + "veg": 16460, + "chang": 16461, + "accomplished": 16462, + "gracias": 16463, + "dolphin": 16464, + "elector": 16465, + "culinary": 16466, + "superbowl": 16467, + "wala": 16468, + "pursuit": 16469, + "blackberry": 16470, + "bean": 16471, + "cardinal": 16472, + "proved": 16473, + "immigrant": 16474, + "strictly": 16475, + "holocaust": 16476, + "passage": 16477, + "haus": 16478, + "coup": 16479, + "purse": 16480, + "harass": 16481, + "<<": 16482, + "leed": 16483, + "adobe": 16484, + "stad": 16485, + "legislat": 16486, + "parked": 16487, + "priyan": 16488, + "silva": 16489, + "krist": 16490, + "sthe": 16491, + "funky": 16492, + "iga": 16493, + "settlement": 16494, + "phs": 16495, + "tmrw": 16496, + "stressed": 16497, + "hunt": 16498, + "hockey": 16499, + "treasures": 16500, + "chambers": 16501, + "olu": 16502, + "hut": 16503, + "marley": 16504, + "texture": 16505, + "wilderness": 16506, + "mming": 16507, + "potentially": 16508, + "omaha": 16509, + "judy": 16510, + "toes": 16511, + "spoiler": 16512, + "distinguished": 16513, + "felix": 16514, + "ahu": 16515, + "recommendations": 16516, + "zombies": 16517, + "hitler": 16518, + "triple": 16519, + "collapse": 16520, + "motivated": 16521, + "ultimat": 16522, + "ggling": 16523, + "soy": 16524, + "cigar": 16525, + "foren": 16526, + "vineyard": 16527, + "glitter": 16528, + "findings": 16529, + "colonial": 16530, + "hunter": 16531, + "erik": 16532, + "dens": 16533, + "beetle": 16534, + "lotte": 16535, + "subtle": 16536, + "smatter": 16537, + "trusted": 16538, + "experimental": 16539, + "naments": 16540, + "ðŁĺĨ": 16541, + "region": 16542, + "acquisition": 16543, + "breeding": 16544, + "quarterback": 16545, + "amreading": 16546, + "ootd": 16547, + "rude": 16548, + "initiatives": 16549, + "stout": 16550, + "hyung": 16551, + "outcome": 16552, + "alfred": 16553, + "mics": 16554, + "expertise": 16555, + "bacteria": 16556, + "penguins": 16557, + "jumper": 16558, + "valencia": 16559, + "bark": 16560, + "ingday": 16561, + "sellers": 16562, + "contracts": 16563, + "houston": 16564, + "commissioned": 16565, + "adaptation": 16566, + "swansea": 16567, + "santiago": 16568, + "commonwealth": 16569, + "judging": 16570, + "submission": 16571, + "scorer": 16572, + "tommy": 16573, + "ño": 16574, + "exquis": 16575, + "filing": 16576, + "explanation": 16577, + "allison": 16578, + "wembley": 16579, + "ridge": 16580, + "chevy": 16581, + "santos": 16582, + "ownership": 16583, + "cognitive": 16584, + "favourites": 16585, + "shed": 16586, + "philanthro": 16587, + "deleted": 16588, + "godd": 16589, + "snor": 16590, + "guidelines": 16591, + "ffing": 16592, + "jeep": 16593, + "clips": 16594, + "swamp": 16595, + "anor": 16596, + "guild": 16597, + "bolton": 16598, + "springfield": 16599, + "municipal": 16600, + "goalkeeper": 16601, + "yeon": 16602, + "ðŁĺįðŁĺįðŁĺįðŁĺį": 16603, + "ãħĭãħĭ": 16604, + "waterfront": 16605, + "grave": 16606, + "contemporary": 16607, + "arity": 16608, + "ÃŃa": 16609, + "sleeps": 16610, + "syrup": 16611, + "alam": 16612, + "pire": 16613, + "coyo": 16614, + "motogp": 16615, + "tyson": 16616, + "kejri": 16617, + "circul": 16618, + "singly": 16619, + "crunch": 16620, + "complicated": 16621, + "nostalgia": 16622, + "kop": 16623, + "move": 16624, + "kale": 16625, + "macro": 16626, + "midwest": 16627, + "hans": 16628, + "tribal": 16629, + "nude": 16630, + "à¯į": 16631, + "beyonce": 16632, + "congratulate": 16633, + "cater": 16634, + "league": 16635, + "ðŁĻĬ": 16636, + "ladder": 16637, + "crashed": 16638, + "technic": 16639, + "karaoke": 16640, + "harassment": 16641, + "rots": 16642, + "experiencing": 16643, + "kristen": 16644, + "ðŁĩ³": 16645, + "ð٤Ĺ": 16646, + "reflections": 16647, + "guinness": 16648, + "illustrator": 16649, + "ðŁĻıðŁı»": 16650, + "center": 16651, + "narrow": 16652, + "commons": 16653, + "regulations": 16654, + "ÙĨ": 16655, + "harm": 16656, + "croft": 16657, + "cussion": 16658, + "hongkong": 16659, + "stical": 16660, + "internship": 16661, + "zoe": 16662, + "chop": 16663, + "hoods": 16664, + "estimated": 16665, + "batteries": 16666, + "berkeley": 16667, + "smoothie": 16668, + "shaun": 16669, + "cros": 16670, + "~~": 16671, + "campe": 16672, + "hump": 16673, + "bg": 16674, + "prototype": 16675, + "click": 16676, + "shawn": 16677, + "reviewed": 16678, + "templ": 16679, + "pf": 16680, + "jedi": 16681, + "blogs": 16682, + "raymond": 16683, + "asth": 16684, + "bah": 16685, + "avail": 16686, + "scotch": 16687, + "leafs": 16688, + "nikki": 16689, + "tok": 16690, + "hollow": 16691, + "urges": 16692, + "oft": 16693, + "unlike": 16694, + "latin": 16695, + "ue": 16696, + "catering": 16697, + "mili": 16698, + "alternati": 16699, + "maver": 16700, + "и": 16701, + "agle": 16702, + "preorder": 16703, + "lux": 16704, + "cucu": 16705, + "ðŁijıðŁijı": 16706, + "tart": 16707, + "âĿ¤âĿ¤âĿ¤": 16708, + "arabic": 16709, + "rapidly": 16710, + "arrang": 16711, + "allen": 16712, + "traveltuesday": 16713, + "paws": 16714, + "flows": 16715, + "stability": 16716, + "fluid": 16717, + "capp": 16718, + "canberra": 16719, + "uuuu": 16720, + "spani": 16721, + "demonstration": 16722, + "mla": 16723, + "placement": 16724, + "mw": 16725, + "presidents": 16726, + "awesom": 16727, + "beverly": 16728, + "anist": 16729, + "neal": 16730, + "fathersday": 16731, + "referendum": 16732, + "lahore": 16733, + "oaks": 16734, + "debbie": 16735, + "halfway": 16736, + "ghosts": 16737, + "debor": 16738, + "matthews": 16739, + "fiat": 16740, + "tfw": 16741, + "presen": 16742, + "robi": 16743, + "ded": 16744, + "brock": 16745, + "laughed": 16746, + "amounts": 16747, + "bamboo": 16748, + "kindergarten": 16749, + "eaten": 16750, + "mtvhottest": 16751, + "breakout": 16752, + "usic": 16753, + "fraser": 16754, + "legislative": 16755, + "pang": 16756, + "module": 16757, + "sammy": 16758, + "gover": 16759, + "earns": 16760, + "expedition": 16761, + "garh": 16762, + "concepts": 16763, + "charlie": 16764, + "lava": 16765, + "bachelor": 16766, + "veggies": 16767, + "determine": 16768, + "ellie": 16769, + "unlocked": 16770, + "fruit": 16771, + "dalla": 16772, + "coupe": 16773, + "washington": 16774, + "deposit": 16775, + "ivory": 16776, + "paula": 16777, + "chicag": 16778, + "gucci": 16779, + "ðŁİĥ": 16780, + "cultiv": 16781, + "pierce": 16782, + "lifted": 16783, + "stumb": 16784, + "recover": 16785, + "muscles": 16786, + "conducting": 16787, + "cbs": 16788, + "mclaren": 16789, + "sophia": 16790, + "cellu": 16791, + "oceans": 16792, + "uploaded": 16793, + "gameplay": 16794, + "maldives": 16795, + "kimber": 16796, + "avoi": 16797, + "racer": 16798, + "caine": 16799, + "cavs": 16800, + "hana": 16801, + "liga": 16802, + "raven": 16803, + "intervention": 16804, + "inauguration": 16805, + "ooh": 16806, + "attraction": 16807, + "merchandise": 16808, + "tunein": 16809, + "liking": 16810, + "juniors": 16811, + "intended": 16812, + "attacking": 16813, + "aquarium": 16814, + "iwd": 16815, + "components": 16816, + "suring": 16817, + "centu": 16818, + "yogurt": 16819, + "ðŁıĥ": 16820, + "showroom": 16821, + "optical": 16822, + "tyour": 16823, + "judge": 16824, + "yield": 16825, + "anto": 16826, + "plc": 16827, + "transparency": 16828, + "recycled": 16829, + "chief": 16830, + "arom": 16831, + "ambassadors": 16832, + "planet": 16833, + "âĿĦï¸ı": 16834, + "omed": 16835, + "vanessa": 16836, + "court": 16837, + "margar": 16838, + "haley": 16839, + "vr": 16840, + "regina": 16841, + "pdates": 16842, + "hispan": 16843, + "livestream": 16844, + "âģ£": 16845, + "yahoo": 16846, + "galla": 16847, + "secured": 16848, + "wir": 16849, + "beneath": 16850, + "offl": 16851, + "nil": 16852, + "amb": 16853, + "yeg": 16854, + "outlet": 16855, + "ute": 16856, + "peep": 16857, + "lindsay": 16858, + "bentley": 16859, + "...!": 16860, + "heel": 16861, + "trilogy": 16862, + "vos": 16863, + "tyre": 16864, + "therefore": 16865, + "toronto": 16866, + "abi": 16867, + "simpli": 16868, + "jae": 16869, + "extensive": 16870, + "elephants": 16871, + "sor": 16872, + "orientation": 16873, + "impeach": 16874, + "replay": 16875, + "constructed": 16876, + "peterson": 16877, + "pais": 16878, + "ported": 16879, + "customs": 16880, + "collap": 16881, + "adu": 16882, + "highlands": 16883, + "salem": 16884, + "shelby": 16885, + "kovic": 16886, + "strain": 16887, + "rosie": 16888, + "senators": 16889, + "snaps": 16890, + "bobb": 16891, + "suzuki": 16892, + "blades": 16893, + "kp": 16894, + "lolo": 16895, + "generate": 16896, + "sight": 16897, + "mae": 16898, + "structural": 16899, + "predict": 16900, + "jumped": 16901, + "ahmad": 16902, + "sung": 16903, + "justice": 16904, + "glam": 16905, + "volvo": 16906, + "jubilee": 16907, + "detention": 16908, + "losses": 16909, + "puri": 16910, + "everytime": 16911, + "а": 16912, + "rao": 16913, + "edge": 16914, + "limer": 16915, + "resemb": 16916, + "harold": 16917, + "retri": 16918, + "sacrific": 16919, + "surprises": 16920, + "amc": 16921, + "srilanka": 16922, + "barbie": 16923, + "mens": 16924, + "finn": 16925, + "ags": 16926, + "ukrainian": 16927, + "embrac": 16928, + "îIJ": 16929, + "flavors": 16930, + "homer": 16931, + "laure": 16932, + "outh": 16933, + "priced": 16934, + "verde": 16935, + "firm": 16936, + "ahs": 16937, + "cub": 16938, + "trey": 16939, + "paranor": 16940, + "profit": 16941, + "indv": 16942, + "whoa": 16943, + "harsh": 16944, + "alot": 16945, + "critics": 16946, + "hubby": 16947, + "figur": 16948, + "gira": 16949, + "castro": 16950, + "chanel": 16951, + "input": 16952, + "originals": 16953, + "tenant": 16954, + "yyyy": 16955, + "turers": 16956, + "lincoln": 16957, + "coon": 16958, + "learn": 16959, + "chou": 16960, + "acare": 16961, + "oles": 16962, + "diner": 16963, + "hyp": 16964, + "bizarre": 16965, + "mcr": 16966, + "letsgo": 16967, + "decorating": 16968, + "ðŁĮİ": 16969, + "alison": 16970, + "arvin": 16971, + "fd": 16972, + "rehab": 16973, + "mccarthy": 16974, + "lottery": 16975, + "dah": 16976, + "minneapolis": 16977, + "eligible": 16978, + "diagnosed": 16979, + "emerald": 16980, + "destinations": 16981, + "sans": 16982, + "ory": 16983, + "blazers": 16984, + "nv": 16985, + "bail": 16986, + "digitalart": 16987, + "noc": 16988, + "malta": 16989, + "solar": 16990, + "pipes": 16991, + "allegations": 16992, + "nock": 16993, + "pope": 16994, + "brid": 16995, + "premier": 16996, + "nx": 16997, + "presentations": 16998, + "efa": 16999, + "bows": 17000, + "valve": 17001, + "opponent": 17002, + "Įë": 17003, + "visual": 17004, + "ingle": 17005, + "categor": 17006, + "eter": 17007, + "pois": 17008, + "dani": 17009, + "attract": 17010, + "neutral": 17011, + "thene": 17012, + "crashes": 17013, + "freddie": 17014, + "utili": 17015, + "cst": 17016, + "awakening": 17017, + "sloven": 17018, + "qualify": 17019, + "proof": 17020, + "fairy": 17021, + "lev": 17022, + "freight": 17023, + "enjoys": 17024, + "cupcake": 17025, + "flavour": 17026, + "âķ": 17027, + "protective": 17028, + "ðŁijıðŁı»": 17029, + "isu": 17030, + "admir": 17031, + "hmmm": 17032, + "continuous": 17033, + "aires": 17034, + "raptors": 17035, + "showcasing": 17036, + "yuk": 17037, + "paste": 17038, + "follower": 17039, + "instructions": 17040, + "spru": 17041, + "@__": 17042, + "theo": 17043, + "debuts": 17044, + "vette": 17045, + "stow": 17046, + "esof": 17047, + "ached": 17048, + "sultan": 17049, + "sandwich": 17050, + "somalia": 17051, + "franco": 17052, + "carne": 17053, + "fluffy": 17054, + "alpine": 17055, + "jasmine": 17056, + "heated": 17057, + "violin": 17058, + "pless": 17059, + "divorce": 17060, + "performer": 17061, + "phies": 17062, + "portsm": 17063, + "dara": 17064, + "kirby": 17065, + "lop": 17066, + "chilli": 17067, + "forth": 17068, + "skype": 17069, + "ðŁĩ®ðŁĩ¹": 17070, + "celebrities": 17071, + "edy": 17072, + "vee": 17073, + "poison": 17074, + "eyel": 17075, + "grabs": 17076, + "ssic": 17077, + "uno": 17078, + "western": 17079, + "railroad": 17080, + "amer": 17081, + "numerous": 17082, + "sv": 17083, + "fow": 17084, + "fist": 17085, + "âĢĭ": 17086, + "requests": 17087, + "martial": 17088, + "emmy": 17089, + "acceptance": 17090, + "laura": 17091, + "ิ": 17092, + "erup": 17093, + "hyundai": 17094, + "outlander": 17095, + "utt": 17096, + "wrestle": 17097, + "espresso": 17098, + "demanding": 17099, + "gdp": 17100, + "geography": 17101, + "saskat": 17102, + "troll": 17103, + "confeder": 17104, + "sues": 17105, + "sem": 17106, + "bets": 17107, + "tful": 17108, + "tosh": 17109, + "teaches": 17110, + "coloured": 17111, + "galway": 17112, + "macy": 17113, + "disorders": 17114, + "bbcra": 17115, + "atem": 17116, + "fender": 17117, + "litter": 17118, + "esh": 17119, + "providers": 17120, + "renovation": 17121, + "nominate": 17122, + "psg": 17123, + "nominations": 17124, + "jenna": 17125, + "sharp": 17126, + "someday": 17127, + "zur": 17128, + "brains": 17129, + "cheshire": 17130, + "prey": 17131, + "hugo": 17132, + "¿": 17133, + "token": 17134, + "rv": 17135, + "carr": 17136, + "tactical": 17137, + "zelda": 17138, + "kayla": 17139, + "fernando": 17140, + "photographers": 17141, + "jour": 17142, + "umbrella": 17143, + "woody": 17144, + "congressman": 17145, + "dump": 17146, + "levy": 17147, + "juan": 17148, + "dazz": 17149, + "signals": 17150, + "lain": 17151, + "anu": 17152, + "michel": 17153, + "porch": 17154, + "alden": 17155, + "siblings": 17156, + "yale": 17157, + "peel": 17158, + "swick": 17159, + "ggin": 17160, + "llc": 17161, + "kale": 17162, + "scon": 17163, + "ild": 17164, + "patreon": 17165, + "reel": 17166, + "quin": 17167, + "witt": 17168, + "marty": 17169, + "moody": 17170, + "toni": 17171, + "dery": 17172, + "gators": 17173, + "specifically": 17174, + "ddin": 17175, + "lyon": 17176, + "trick": 17177, + "meadows": 17178, + "pj": 17179, + "borgh": 17180, + "vik": 17181, + "tur": 17182, + "bronx": 17183, + "puff": 17184, + "lantern": 17185, + "ðŁ¤¦": 17186, + "gently": 17187, + "bestie": 17188, + "fact": 17189, + "refused": 17190, + "fasci": 17191, + "mpy": 17192, + "ðŁĶµ": 17193, + "crossover": 17194, + "meadow": 17195, + "indianapolis": 17196, + "ducation": 17197, + "sley": 17198, + "loom": 17199, + "mixer": 17200, + "newmusic": 17201, + "filmmaker": 17202, + "prosperity": 17203, + "lim": 17204, + "weekend": 17205, + "creamy": 17206, + "neutr": 17207, + "luther": 17208, + "hv": 17209, + "northern": 17210, + "two": 17211, + "hra": 17212, + "catches": 17213, + "appearances": 17214, + "habit": 17215, + "kittens": 17216, + "nv": 17217, + "illac": 17218, + "infan": 17219, + "regardless": 17220, + "lizard": 17221, + "dunk": 17222, + "curtain": 17223, + "acom": 17224, + "intu": 17225, + "vez": 17226, + "emin": 17227, + "flats": 17228, + "calendars": 17229, + "empower": 17230, + "ruined": 17231, + "hungary": 17232, + "vid": 17233, + "wex": 17234, + "ulum": 17235, + "aberdeen": 17236, + "osa": 17237, + "kt": 17238, + "massi": 17239, + "seemed": 17240, + "sden": 17241, + "'?": 17242, + "telephone": 17243, + "defi": 17244, + "inspires": 17245, + "meow": 17246, + "zones": 17247, + "blind": 17248, + "ply": 17249, + "tucson": 17250, + "adventure": 17251, + "ged": 17252, + "oyster": 17253, + "ðŁijıðŁijıðŁijı": 17254, + "output": 17255, + "ttt": 17256, + "metallic": 17257, + "smash": 17258, + "ucla": 17259, + "scots": 17260, + "perfect": 17261, + "lucy": 17262, + "regularly": 17263, + "spic": 17264, + "relative": 17265, + "athers": 17266, + "mise": 17267, + "battling": 17268, + "decides": 17269, + "mata": 17270, + "occupied": 17271, + "randomly": 17272, + "catsoftwitter": 17273, + "gian": 17274, + "bally": 17275, + "alties": 17276, + "allies": 17277, + "immen": 17278, + "syrac": 17279, + "ðŁĴľðŁĴľ": 17280, + "llan": 17281, + "aur": 17282, + "kut": 17283, + "lamar": 17284, + "affects": 17285, + "nra": 17286, + "starwar": 17287, + "ð٤ĺ": 17288, + "scram": 17289, + "enchan": 17290, + "process": 17291, + "luxurious": 17292, + "array": 17293, + "sherlock": 17294, + "compati": 17295, + "dorf": 17296, + "stress": 17297, + "msu": 17298, + "swith": 17299, + "sala": 17300, + "sofinstagram": 17301, + "foil": 17302, + "understood": 17303, + "quay": 17304, + "rp": 17305, + "cade": 17306, + "jaw": 17307, + "enab": 17308, + "encoun": 17309, + "ðŁİī:": 17310, + "dock": 17311, + "saturn": 17312, + "mull": 17313, + "layout": 17314, + "rarely": 17315, + "happily": 17316, + "fixture": 17317, + "orph": 17318, + "overlooking": 17319, + "herbs": 17320, + "mitt": 17321, + "pillar": 17322, + "nolan": 17323, + "petty": 17324, + "stry": 17325, + "ui": 17326, + "muk": 17327, + "ores": 17328, + "overs": 17329, + "áµ": 17330, + "recreation": 17331, + "wesley": 17332, + "rit": 17333, + "kejriwal": 17334, + "stocking": 17335, + "gv": 17336, + "subscribers": 17337, + "moose": 17338, + "mae": 17339, + "bert": 17340, + "oppre": 17341, + "assignment": 17342, + "uro": 17343, + "highlighting": 17344, + "calvin": 17345, + "weigh": 17346, + "cambodia": 17347, + "avon": 17348, + "kem": 17349, + "disabilities": 17350, + "ready": 17351, + "chargers": 17352, + "pads": 17353, + "izing": 17354, + "illian": 17355, + "truste": 17356, + "colleges": 17357, + "associates": 17358, + "albany": 17359, + "milton": 17360, + "cron": 17361, + "bur": 17362, + "hardly": 17363, + "sights": 17364, + "antiques": 17365, + "echo": 17366, + "surprisingly": 17367, + "haiti": 17368, + "capt": 17369, + "php": 17370, + "opio": 17371, + "inequality": 17372, + "equal": 17373, + "keny": 17374, + "schmid": 17375, + "autographs": 17376, + "rent": 17377, + "quer": 17378, + "citrus": 17379, + "challenged": 17380, + "tec": 17381, + "epide": 17382, + "fest": 17383, + "zhou": 17384, + "lime": 17385, + "citizenship": 17386, + "crystal": 17387, + "convinced": 17388, + "messenger": 17389, + "copenhagen": 17390, + "âĿĹï¸ı": 17391, + "warran": 17392, + "developments": 17393, + "ï¸ıâĥ£": 17394, + "forex": 17395, + "hiro": 17396, + "sneakers": 17397, + "xide": 17398, + "viva": 17399, + "stereo": 17400, + "batting": 17401, + "ssel": 17402, + "host": 17403, + "bengal": 17404, + "criticism": 17405, + "qc": 17406, + "crun": 17407, + "attempted": 17408, + "rye": 17409, + "determination": 17410, + "creations": 17411, + "dread": 17412, + "labels": 17413, + "posse": 17414, + "ancer": 17415, + "johan": 17416, + "sister": 17417, + "partnerships": 17418, + "lesbian": 17419, + "kst": 17420, + "guarantee": 17421, + "baro": 17422, + "fixing": 17423, + "mason": 17424, + "mous": 17425, + "chemicals": 17426, + "tless": 17427, + "biodiversity": 17428, + "paro": 17429, + "bharat": 17430, + "acol": 17431, + "refuge": 17432, + "ente": 17433, + "titi": 17434, + "dyssey": 17435, + "responds": 17436, + "lefto": 17437, + "iner": 17438, + "sevel": 17439, + "rahul": 17440, + "oline": 17441, + "frankfur": 17442, + "choreo": 17443, + "enjoyable": 17444, + "cto": 17445, + "struggles": 17446, + "woodland": 17447, + "heavyweight": 17448, + "gens": 17449, + "recep": 17450, + "accred": 17451, + "ðŁĺ¡": 17452, + "transformed": 17453, + "listen": 17454, + "atop": 17455, + "nk": 17456, + "surge": 17457, + "bere": 17458, + "governor": 17459, + "prisoners": 17460, + "claude": 17461, + "till": 17462, + "mulator": 17463, + "emotion": 17464, + "waterloo": 17465, + "start": 17466, + "ðŁĩº": 17467, + "cleaned": 17468, + "grandmother": 17469, + "fearless": 17470, + "african": 17471, + "astronomy": 17472, + "ðŁıģ": 17473, + "à¸Ļ": 17474, + "theworld": 17475, + "suitable": 17476, + "anthony": 17477, + "kand": 17478, + "tten": 17479, + "meaningful": 17480, + "disclo": 17481, + "jacobs": 17482, + "ø": 17483, + "tomlinson": 17484, + "ghetti": 17485, + "typho": 17486, + "substan": 17487, + "asco": 17488, + "tek": 17489, + "nagar": 17490, + "mud": 17491, + "amon": 17492, + "vaccine": 17493, + "fty": 17494, + "flesh": 17495, + "noel": 17496, + "inflation": 17497, + "portugue": 17498, + "glamour": 17499, + "tram": 17500, + "vre": 17501, + "tequ": 17502, + "roundup": 17503, + "wyn": 17504, + "rejected": 17505, + "mosaic": 17506, + "sighting": 17507, + "calf": 17508, + "ota": 17509, + "composition": 17510, + "gopro": 17511, + "gonzale": 17512, + "eed": 17513, + "bard": 17514, + "tue": 17515, + "effectively": 17516, + "ween": 17517, + "alto": 17518, + "ribs": 17519, + "relate": 17520, + "thirsty": 17521, + "furious": 17522, + "dim": 17523, + "chard": 17524, + "perfume": 17525, + "sny": 17526, + "churchill": 17527, + "kof": 17528, + "masterclass": 17529, + "wave": 17530, + "ðŁĶµ": 17531, + "erin": 17532, + "owns": 17533, + "tobe": 17534, + "skilled": 17535, + "tem": 17536, + "gof": 17537, + "eni": 17538, + "tori": 17539, + "crazy": 17540, + "lick": 17541, + "resistant": 17542, + "icial": 17543, + "agar": 17544, + "!:": 17545, + "gali": 17546, + "delaware": 17547, + "blitz": 17548, + "kohli": 17549, + "puck": 17550, + "availability": 17551, + "himalay": 17552, + "influential": 17553, + "crochet": 17554, + "victori": 17555, + "reading": 17556, + "hobby": 17557, + "viet": 17558, + "jas": 17559, + "engra": 17560, + "skul": 17561, + "ðŁĩ²ðŁĩ": 17562, + "educate": 17563, + "techno": 17564, + "districts": 17565, + "blues": 17566, + "sett": 17567, + "seventh": 17568, + "learns": 17569, + "eeee": 17570, + "apocalypse": 17571, + "hangout": 17572, + "cruel": 17573, + "mutu": 17574, + "bruh": 17575, + "helen": 17576, + "sheer": 17577, + "ction": 17578, + "klein": 17579, + "texans": 17580, + "cereal": 17581, + "shine": 17582, + "nered": 17583, + "gras": 17584, + "ambro": 17585, + "fella": 17586, + "hindu": 17587, + "matthew": 17588, + "lima": 17589, + "miranda": 17590, + "jewel": 17591, + "soho": 17592, + "eurovision": 17593, + "neighbours": 17594, + "chandler": 17595, + "besides": 17596, + "ðŁ¥°": 17597, + "astros": 17598, + "thumbs": 17599, + "renault": 17600, + "rave": 17601, + "hired": 17602, + "ðŁĸ¤": 17603, + "itary": 17604, + "zor": 17605, + "blazer": 17606, + "kine": 17607, + "eau": 17608, + "katy": 17609, + "dccomics": 17610, + "pec": 17611, + "rodgers": 17612, + "waterproof": 17613, + "killers": 17614, + "superint": 17615, + "preserv": 17616, + "asso": 17617, + "brewers": 17618, + "promotional": 17619, + "scam": 17620, + "villages": 17621, + "sketches": 17622, + "juicy": 17623, + "forlife": 17624, + "audit": 17625, + "solo": 17626, + "fundamental": 17627, + "lene": 17628, + "philippine": 17629, + "tend": 17630, + "conservatives": 17631, + "sponsorship": 17632, + "ddle": 17633, + "aine": 17634, + "htc": 17635, + "osi": 17636, + "hulk": 17637, + "waf": 17638, + "à¸Ļ": 17639, + "evaluation": 17640, + "antine": 17641, + "slee": 17642, + "robertson": 17643, + "roosevel": 17644, + "agi": 17645, + "sophistic": 17646, + "employers": 17647, + "bubbles": 17648, + "kowski": 17649, + "interaction": 17650, + "shu": 17651, + "boule": 17652, + "ican": 17653, + "jare": 17654, + "hank": 17655, + "legitim": 17656, + "knicks": 17657, + "karma": 17658, + "receiver": 17659, + "perks": 17660, + "uh": 17661, + "stair": 17662, + "suni": 17663, + "laboratory": 17664, + "graves": 17665, + "vocals": 17666, + "oot": 17667, + "cture": 17668, + "thrive": 17669, + "tico": 17670, + "ãĥ³": 17671, + "bw": 17672, + "cartoons": 17673, + "mcdonalds": 17674, + "draw": 17675, + "yung": 17676, + "pler": 17677, + "lid": 17678, + "ethical": 17679, + "groove": 17680, + "enta": 17681, + "internationalwomensday": 17682, + "patron": 17683, + "worries": 17684, + "ðŁİħ": 17685, + "ðŁijĭ": 17686, + "katherine": 17687, + "diaz": 17688, + "tori": 17689, + "bachchan": 17690, + "trust": 17691, + "mineral": 17692, + "icom": 17693, + "builders": 17694, + "born": 17695, + "coloring": 17696, + "latte": 17697, + "case": 17698, + "revolution": 17699, + "trader": 17700, + "oxid": 17701, + "chipot": 17702, + "instantly": 17703, + "southern": 17704, + "sehun": 17705, + "prob": 17706, + "hernandez": 17707, + "lisbon": 17708, + "huawe": 17709, + "pong": 17710, + "mea": 17711, + "rooney": 17712, + "wheelchair": 17713, + "keen": 17714, + "bett": 17715, + "corin": 17716, + "regulatory": 17717, + "displac": 17718, + "karen": 17719, + "schem": 17720, + "sunsets": 17721, + "whales": 17722, + "reminis": 17723, + "hep": 17724, + "hide": 17725, + "marcel": 17726, + "pandora": 17727, + "doyle": 17728, + "thfc": 17729, + "otto": 17730, + "nokia": 17731, + "transgender": 17732, + "kov": 17733, + "hawaiian": 17734, + "shave": 17735, + "sovere": 17736, + "excer": 17737, + "nicki": 17738, + "pug": 17739, + "stor": 17740, + "roth": 17741, + "weet": 17742, + "legal": 17743, + "dignity": 17744, + "pow": 17745, + "homage": 17746, + "ðŁĩ³ðŁĩ": 17747, + "sre": 17748, + "canon": 17749, + "lax": 17750, + "woah": 17751, + "quartz": 17752, + "ña": 17753, + "greeting": 17754, + "flickr": 17755, + "nairobi": 17756, + "advocates": 17757, + "anc": 17758, + "vii": 17759, + "eugene": 17760, + "thra": 17761, + "cre": 17762, + "elan": 17763, + "pension": 17764, + "thletics": 17765, + "toni": 17766, + "reagan": 17767, + "xv": 17768, + "store": 17769, + "bench": 17770, + "harlem": 17771, + "toddler": 17772, + "sentenced": 17773, + "âĻ¥ï¸ı": 17774, + "globally": 17775, + "cheaper": 17776, + "uf": 17777, + "mam": 17778, + "nico": 17779, + "iku": 17780, + "thou": 17781, + "nist": 17782, + "dami": 17783, + "thala": 17784, + "rhodes": 17785, + "sale": 17786, + "bowls": 17787, + "âĪ": 17788, + "lasvegas": 17789, + "sanctions": 17790, + "admire": 17791, + "matched": 17792, + "unable": 17793, + "traveler": 17794, + "eleven": 17795, + "strawberries": 17796, + "âĢĶâĢĶâĢĶâĢĶ": 17797, + "studio": 17798, + "jacques": 17799, + "ims": 17800, + "valued": 17801, + "sno": 17802, + "cheesecake": 17803, + "nxt": 17804, + "eos": 17805, + "sx": 17806, + "fx": 17807, + "tonic": 17808, + "hatch": 17809, + "chicks": 17810, + "grads": 17811, + "handic": 17812, + "rory": 17813, + "asp": 17814, + "ripped": 17815, + "dentist": 17816, + "nen": 17817, + "lufc": 17818, + "âľĬ": 17819, + "dige": 17820, + "hopkins": 17821, + "sherman": 17822, + "fda": 17823, + "forall": 17824, + "ashley": 17825, + "strand": 17826, + "hy": 17827, + "liquor": 17828, + "buffet": 17829, + "essence": 17830, + "pharma": 17831, + "suriya": 17832, + "ðŁĴĻðŁĴĻ": 17833, + "festivals": 17834, + "zan": 17835, + "refresh": 17836, + "purple": 17837, + "uniforms": 17838, + "kenneth": 17839, + "=)": 17840, + "asan": 17841, + "helsin": 17842, + "transformers": 17843, + "kali": 17844, + "personalized": 17845, + "chalk": 17846, + "bobby": 17847, + "âĮ": 17848, + "themes": 17849, + "departure": 17850, + "print": 17851, + "illustrations": 17852, + "quiet": 17853, + "agrees": 17854, + "griff": 17855, + "س": 17856, + "miti": 17857, + "together": 17858, + "convenience": 17859, + "abar": 17860, + "carlo": 17861, + "turtles": 17862, + "infosec": 17863, + "somewhat": 17864, + "arlington": 17865, + "scholarships": 17866, + "emirates": 17867, + "mums": 17868, + "stella": 17869, + "autonom": 17870, + "feather": 17871, + "gore": 17872, + "nominees": 17873, + "fragrance": 17874, + "ÑĤ": 17875, + "wong": 17876, + "theastern": 17877, + "gre": 17878, + "zilla": 17879, + "isi": 17880, + "bumper": 17881, + "goo": 17882, + "dozens": 17883, + "abduc": 17884, + "âļªï¸ı": 17885, + "oils": 17886, + "donors": 17887, + "silicon": 17888, + "ipod": 17889, + "fortnite": 17890, + "ðŁĴ¨": 17891, + "toro": 17892, + "sparkling": 17893, + "consciousness": 17894, + "pala": 17895, + "num": 17896, + "mounted": 17897, + "ffins": 17898, + "thieves": 17899, + "teammate": 17900, + "prab": 17901, + "omer": 17902, + "tapes": 17903, + "bod": 17904, + "mitsu": 17905, + "stew": 17906, + "ere": 17907, + "pbs": 17908, + "tusc": 17909, + "lowe": 17910, + "rade": 17911, + "parliamentary": 17912, + "hm": 17913, + "edgar": 17914, + "ðŁijĩðŁijĩ": 17915, + "toa": 17916, + "agh": 17917, + "honi": 17918, + "slate": 17919, + "geek": 17920, + "apt": 17921, + "hardt": 17922, + "tap": 17923, + "horizon": 17924, + "growth": 17925, + "makeover": 17926, + "hil": 17927, + "paperback": 17928, + "idan": 17929, + "rehabil": 17930, + "giu": 17931, + "possibilities": 17932, + "lettu": 17933, + "franco": 17934, + "boss": 17935, + "acher": 17936, + "doesnt": 17937, + "moe": 17938, + "taker": 17939, + "hussain": 17940, + "mlk": 17941, + "dil": 17942, + "thia": 17943, + "hama": 17944, + "realised": 17945, + "ravens": 17946, + "curriculum": 17947, + "mith": 17948, + "knight": 17949, + "tedx": 17950, + "rv": 17951, + "isaiah": 17952, + "cumbria": 17953, + "birthdays": 17954, + "fing": 17955, + "prez": 17956, + "mubarak": 17957, + "exquisite": 17958, + "clearance": 17959, + "yen": 17960, + "pari": 17961, + "evo": 17962, + "ú": 17963, + "modified": 17964, + "applying": 17965, + "implement": 17966, + "discovering": 17967, + "chapman": 17968, + "indiegame": 17969, + "disk": 17970, + "crowdfunding": 17971, + "machin": 17972, + "livel": 17973, + "styled": 17974, + "âĿĮ": 17975, + "making": 17976, + "rehearsals": 17977, + "nutriti": 17978, + "subscription": 17979, + "andro": 17980, + "creators": 17981, + "carries": 17982, + "kylie": 17983, + "camden": 17984, + "apprentice": 17985, + "taxpay": 17986, + "cca": 17987, + "tuesdaythoughts": 17988, + "pissed": 17989, + "erman": 17990, + "detec": 17991, + "freedom": 17992, + "meri": 17993, + "..!": 17994, + "psalm": 17995, + "sunlight": 17996, + "perspec": 17997, + "beings": 17998, + "bookstore": 17999, + "rockstar": 18000, + "functions": 18001, + "pence": 18002, + "faves": 18003, + "zn": 18004, + "obamacare": 18005, + "spill": 18006, + "coventry": 18007, + "pigeon": 18008, + "pivo": 18009, + "bait": 18010, + "kolkata": 18011, + "aval": 18012, + "donor": 18013, + "wah": 18014, + "privileg": 18015, + "traditions": 18016, + "rajasthan": 18017, + "teness": 18018, + "portuguese": 18019, + "ynes": 18020, + "tackles": 18021, + "defic": 18022, + "torn": 18023, + "polling": 18024, + "thorne": 18025, + "ina": 18026, + "benedict": 18027, + "barry": 18028, + "calories": 18029, + "verdict": 18030, + "savethe": 18031, + "norton": 18032, + "office": 18033, + "mainstream": 18034, + "improves": 18035, + "fron": 18036, + "responding": 18037, + "realtor": 18038, + "scottish": 18039, + "declar": 18040, + "rl": 18041, + "shiv": 18042, + "supplier": 18043, + "resting": 18044, + "sweets": 18045, + "qui": 18046, + ".â̦": 18047, + "whitney": 18048, + "startup": 18049, + "thankyou": 18050, + "teacher": 18051, + "halls": 18052, + "have": 18053, + "handmade": 18054, + "proving": 18055, + "quartet": 18056, + "rochester": 18057, + "lian": 18058, + "virtual": 18059, + "mendes": 18060, + "oficial": 18061, + "midlands": 18062, + "xbox": 18063, + "measuring": 18064, + "ovo": 18065, + "accommodation": 18066, + "brides": 18067, + "collegiate": 18068, + "intellectual": 18069, + "incar": 18070, + "niag": 18071, + "ðŁį·": 18072, + "sfw": 18073, + "cocoa": 18074, + "coats": 18075, + "civilians": 18076, + "presidency": 18077, + "matrix": 18078, + "sweetheart": 18079, + "triathlon": 18080, + "wagner": 18081, + "radic": 18082, + "planner": 18083, + "theo": 18084, + "execution": 18085, + "kum": 18086, + "thewalkingdead": 18087, + "scar": 18088, + "rotation": 18089, + "blogging": 18090, + "bomb": 18091, + "reson": 18092, + "bbles": 18093, + "stare": 18094, + "assisted": 18095, + "edo": 18096, + "branded": 18097, + "warnings": 18098, + "thorpe": 18099, + "acknowle": 18100, + "satisfied": 18101, + "shores": 18102, + "rid": 18103, + "dora": 18104, + "physically": 18105, + "bigh": 18106, + "approves": 18107, + "hah": 18108, + "rical": 18109, + "versatile": 18110, + "pretend": 18111, + "lum": 18112, + "abhi": 18113, + "yee": 18114, + "spit": 18115, + "ãĢĮ": 18116, + "djs": 18117, + "ashtra": 18118, + "jt": 18119, + "venues": 18120, + "grammys": 18121, + "cyclo": 18122, + "tracker": 18123, + "overwatch": 18124, + "replica": 18125, + "elyn": 18126, + "nrl": 18127, + "lindsey": 18128, + "homo": 18129, + "balloons": 18130, + "kitchen": 18131, + "sis": 18132, + "amos": 18133, + "endeav": 18134, + "ðŁĴ»": 18135, + "arec": 18136, + "thug": 18137, + "hooked": 18138, + "hrc": 18139, + "newyork": 18140, + "burgh": 18141, + "americas": 18142, + "patricia": 18143, + "ugu": 18144, + "apathy": 18145, + "hast": 18146, + "psychi": 18147, + "cork": 18148, + "petrol": 18149, + "ðŁİ¬": 18150, + "aku": 18151, + "popping": 18152, + "psychological": 18153, + "aux": 18154, + "gma": 18155, + "cadillac": 18156, + "waste": 18157, + "authent": 18158, + "bristol": 18159, + "name": 18160, + "queer": 18161, + "tober": 18162, + "jerry": 18163, + "comin": 18164, + "chant": 18165, + "privileged": 18166, + "opar": 18167, + "loser": 18168, + "text": 18169, + "marker": 18170, + "stries": 18171, + "equally": 18172, + "aki": 18173, + "christmas": 18174, + "gareth": 18175, + "blew": 18176, + "emma": 18177, + "imagin": 18178, + "seals": 18179, + "cheat": 18180, + "conditioning": 18181, + "jana": 18182, + "rens": 18183, + "daries": 18184, + "oasis": 18185, + "discounts": 18186, + "council": 18187, + "ika": 18188, + "shirley": 18189, + "voucher": 18190, + "alps": 18191, + "wx": 18192, + "qr": 18193, + "drift": 18194, + "attempting": 18195, + "utc": 18196, + "ت": 18197, + "gonzalez": 18198, + "mf": 18199, + "joker": 18200, + "parallel": 18201, + "pare": 18202, + "aspects": 18203, + "procedu": 18204, + "np": 18205, + "ama": 18206, + "raleigh": 18207, + "brighten": 18208, + "guire": 18209, + "radiation": 18210, + "crescent": 18211, + "hob": 18212, + "ille": 18213, + "strand": 18214, + "vore": 18215, + "nard": 18216, + "chest": 18217, + "diwali": 18218, + "avatar": 18219, + "alder": 18220, + "dling": 18221, + "pathetic": 18222, + "ðŁĴĺ": 18223, + "spirit": 18224, + "jorge": 18225, + "filmmaking": 18226, + "ðŁĻıðŁĻı": 18227, + "challenger": 18228, + "bj": 18229, + "downtown": 18230, + "html": 18231, + "adequ": 18232, + "twisted": 18233, + "inely": 18234, + "('": 18235, + "wraps": 18236, + "operational": 18237, + "yne": 18238, + "nus": 18239, + "magnet": 18240, + "marketplace": 18241, + "healthier": 18242, + "snapshot": 18243, + "damon": 18244, + "interven": 18245, + "federer": 18246, + "owls": 18247, + "biscuits": 18248, + "jp": 18249, + "rodeo": 18250, + "blueberry": 18251, + "lection": 18252, + "frontier": 18253, + "summers": 18254, + "reyes": 18255, + "pedestrian": 18256, + "gol": 18257, + "caffe": 18258, + "refurbi": 18259, + "boulder": 18260, + "meghan": 18261, + "specialty": 18262, + "lass": 18263, + "ei": 18264, + "suspects": 18265, + "approx": 18266, + "rrr": 18267, + "rath": 18268, + "stim": 18269, + "crushed": 18270, + "hed": 18271, + "whun": 18272, + "loaf": 18273, + "crore": 18274, + "rivera": 18275, + "genetics": 18276, + "sock": 18277, + "wasted": 18278, + "nypd": 18279, + "answering": 18280, + "dove": 18281, + "bella": 18282, + "olin": 18283, + "dun": 18284, + "fiji": 18285, + "pretty": 18286, + "sparkle": 18287, + "yun": 18288, + "jd": 18289, + "europa": 18290, + "lifts": 18291, + "amber": 18292, + "mur": 18293, + "tek": 18294, + "boyd": 18295, + "royalty": 18296, + "indo": 18297, + "rib": 18298, + "gotham": 18299, + "tiest": 18300, + "installing": 18301, + "kemp": 18302, + "thephoto": 18303, + "cosmic": 18304, + ")))": 18305, + "wholesale": 18306, + "loyment": 18307, + "easy": 18308, + "suing": 18309, + "settled": 18310, + "afp": 18311, + "prover": 18312, + "supportive": 18313, + "rees": 18314, + "neath": 18315, + "deliber": 18316, + "cé": 18317, + "welcome": 18318, + "picoftheday": 18319, + "newborn": 18320, + "patty": 18321, + "suns": 18322, + "siest": 18323, + "flint": 18324, + "differently": 18325, + "spoilers": 18326, + "trooper": 18327, + "gins": 18328, + "cory": 18329, + "lookout": 18330, + "equipped": 18331, + "tape": 18332, + "toby": 18333, + "researcher": 18334, + "ush": 18335, + "keyes": 18336, + "alma": 18337, + "induction": 18338, + "kw": 18339, + "khar": 18340, + "slick": 18341, + "bride": 18342, + "eur": 18343, + "craving": 18344, + "bookings": 18345, + "ches": 18346, + "trunk": 18347, + "vernon": 18348, + "spher": 18349, + "crystals": 18350, + "relatively": 18351, + "pompe": 18352, + "unions": 18353, + "valley": 18354, + "para": 18355, + "want": 18356, + "okc": 18357, + "deaf": 18358, + "sergio": 18359, + "lennon": 18360, + "shay": 18361, + "cra": 18362, + "vat": 18363, + "hee": 18364, + "twe": 18365, + "liquid": 18366, + "poly": 18367, + "ðŁİģ": 18368, + "bent": 18369, + "bearing": 18370, + "motorsport": 18371, + "barbe": 18372, + "testi": 18373, + "hani": 18374, + "financing": 18375, + "astronaut": 18376, + "watercolour": 18377, + "rish": 18378, + "comiccon": 18379, + "gart": 18380, + "wrong": 18381, + "bern": 18382, + "itan": 18383, + "stepped": 18384, + "filters": 18385, + "clow": 18386, + "mex": 18387, + "demons": 18388, + "allo": 18389, + "expanded": 18390, + "command": 18391, + "eters": 18392, + "goats": 18393, + "siri": 18394, + "yr": 18395, + "pottery": 18396, + "marion": 18397, + "ile": 18398, + "elan": 18399, + "santo": 18400, + "persona": 18401, + "duke": 18402, + "homeless": 18403, + "lighted": 18404, + "wheeler": 18405, + "changer": 18406, + "cabbage": 18407, + "surreal": 18408, + "hamburg": 18409, + "smashed": 18410, + "stran": 18411, + "knot": 18412, + "iart": 18413, + "obi": 18414, + "bedro": 18415, + "dial": 18416, + "thick": 18417, + "bingo": 18418, + "fus": 18419, + "vacuum": 18420, + "conve": 18421, + "ative": 18422, + "accuracy": 18423, + "account": 18424, + "refer": 18425, + "riz": 18426, + "spiderman": 18427, + "bana": 18428, + "rite": 18429, + "ub": 18430, + "abs": 18431, + "medical": 18432, + "link": 18433, + "siem": 18434, + ">>>>": 18435, + "betra": 18436, + "glowing": 18437, + "reactions": 18438, + "puppet": 18439, + "spaghetti": 18440, + "angs": 18441, + "remedi": 18442, + "prayfor": 18443, + "royce": 18444, + "charlotte": 18445, + "£ï¸ı": 18446, + "ghet": 18447, + "affecting": 18448, + "rode": 18449, + "socialist": 18450, + "moses": 18451, + "azi": 18452, + "oit": 18453, + "reporters": 18454, + "cdt": 18455, + "aping": 18456, + "snat": 18457, + "minimal": 18458, + "waist": 18459, + "siege": 18460, + ">>>>": 18461, + "rig": 18462, + "schmidt": 18463, + "hare": 18464, + "eca": 18465, + "thorn": 18466, + "hemp": 18467, + "esthe": 18468, + "clyde": 18469, + "tha": 18470, + "donut": 18471, + "mohamed": 18472, + "lingerie": 18473, + "legg": 18474, + "carpenter": 18475, + "performers": 18476, + "dea": 18477, + "imagined": 18478, + "curse": 18479, + "lash": 18480, + "ctr": 18481, + "agua": 18482, + "roar": 18483, + "gri": 18484, + "role": 18485, + "jfk": 18486, + "resurrec": 18487, + "roosevelt": 18488, + "marilyn": 18489, + "smalle": 18490, + "willis": 18491, + "waited": 18492, + "charities": 18493, + "theres": 18494, + "lik": 18495, + "original": 18496, + "cari": 18497, + "cough": 18498, + "cruci": 18499, + "lagun": 18500, + "contrast": 18501, + "kou": 18502, + "armour": 18503, + "removing": 18504, + "tent": 18505, + "mazda": 18506, + "brighter": 18507, + "thief": 18508, + "corner": 18509, + "tequila": 18510, + "buzzing": 18511, + "albi": 18512, + "pam": 18513, + "azure": 18514, + "discoun": 18515, + "pixelart": 18516, + "possibility": 18517, + "hamont": 18518, + "trades": 18519, + "buda": 18520, + "hive": 18521, + "versy": 18522, + "finch": 18523, + "transpa": 18524, + "emi": 18525, + "terrifying": 18526, + "inqui": 18527, + "gba": 18528, + "substitu": 18529, + "collecti": 18530, + "placing": 18531, + "cindy": 18532, + "kann": 18533, + "patho": 18534, + "diamond": 18535, + "mourinho": 18536, + "guinea": 18537, + "anthropo": 18538, + "airs": 18539, + "pumps": 18540, + "ìļ": 18541, + "paso": 18542, + "curling": 18543, + "anita": 18544, + "residency": 18545, + "newh": 18546, + "joon": 18547, + "cigarette": 18548, + "queue": 18549, + "extrac": 18550, + "games": 18551, + "splen": 18552, + "express": 18553, + "publicly": 18554, + "bonnie": 18555, + "tribune": 18556, + "baek": 18557, + "reasonable": 18558, + "cor": 18559, + "timothy": 18560, + "sheeran": 18561, + "ı": 18562, + "fdn": 18563, + "sutton": 18564, + "concentration": 18565, + "caravan": 18566, + "xavier": 18567, + "alger": 18568, + "cylin": 18569, + "frederick": 18570, + "nerve": 18571, + "peak": 18572, + "lettuce": 18573, + "jail": 18574, + "pregame": 18575, + "kavan": 18576, + "upgraded": 18577, + "ecology": 18578, + "squadron": 18579, + "grapes": 18580, + "goog": 18581, + "pastry": 18582, + "ðŁĹ£": 18583, + "ãĥ¼ãĥ": 18584, + "milano": 18585, + "awaz": 18586, + "presenter": 18587, + "ðŁĮ¿": 18588, + "herd": 18589, + "kings": 18590, + "template": 18591, + "flour": 18592, + "hv": 18593, + "kley": 18594, + "iya": 18595, + "spec": 18596, + "ater": 18597, + "frankfurt": 18598, + "coch": 18599, + "texting": 18600, + "deli": 18601, + "communist": 18602, + "regiment": 18603, + "eleanor": 18604, + "anticipated": 18605, + "ðŁijĮðŁı»": 18606, + "thephotohour": 18607, + "rano": 18608, + "surviving": 18609, + "simulation": 18610, + "dawson": 18611, + "arin": 18612, + "aqua": 18613, + "mor": 18614, + "â̦.": 18615, + "cino": 18616, + "iraqi": 18617, + "shaz": 18618, + "dundee": 18619, + "wes": 18620, + "drau": 18621, + "hannah": 18622, + "snews": 18623, + "occupation": 18624, + "steen": 18625, + "xm": 18626, + "angles": 18627, + "settings": 18628, + "guru": 18629, + "knox": 18630, + "orca": 18631, + "shaping": 18632, + "went": 18633, + "drilling": 18634, + "zzie": 18635, + "bri": 18636, + "kissing": 18637, + "find": 18638, + "maine": 18639, + "âŃIJï¸ıâŃIJï¸ı": 18640, + "ðŁĮį": 18641, + "larry": 18642, + "busted": 18643, + "tavern": 18644, + "actively": 18645, + "-\"": 18646, + "replacing": 18647, + "nod": 18648, + "unlock": 18649, + ".\"": 18650, + "âŀ¤": 18651, + "affiliate": 18652, + "tow": 18653, + "ln": 18654, + "happynewyear": 18655, + "dif": 18656, + "jm": 18657, + "greenwich": 18658, + "controversy": 18659, + "dawg": 18660, + "condol": 18661, + "savannah": 18662, + "compensation": 18663, + "touchdown": 18664, + "teo": 18665, + "ambitious": 18666, + "embroi": 18667, + "convicted": 18668, + "iartg": 18669, + "barack": 18670, + "trance": 18671, + "testimony": 18672, + "audition": 18673, + "thumb": 18674, + "myths": 18675, + "bex": 18676, + "quez": 18677, + "orchid": 18678, + "deny": 18679, + "entitled": 18680, + "hood": 18681, + "grant": 18682, + "inbox": 18683, + "bluejays": 18684, + "rilla": 18685, + "smallest": 18686, + "burden": 18687, + "infamous": 18688, + "divided": 18689, + "boundaries": 18690, + "tter": 18691, + "elt": 18692, + "wyoming": 18693, + "beverage": 18694, + "mesm": 18695, + "onews": 18696, + "buddhist": 18697, + "yana": 18698, + "assad": 18699, + "isms": 18700, + "barrett": 18701, + "predicted": 18702, + "backto": 18703, + "twit": 18704, + "ethere": 18705, + "captains": 18706, + "escaped": 18707, + "ayo": 18708, + "lamborgh": 18709, + "gardner": 18710, + "laps": 18711, + "kal": 18712, + "advertisement": 18713, + "insects": 18714, + "napo": 18715, + "amen": 18716, + "acy": 18717, + "rand": 18718, + "gk": 18719, + "teh": 18720, + "kathle": 18721, + "tridge": 18722, + "pancake": 18723, + "atro": 18724, + "pyramid": 18725, + "bula": 18726, + "paralym": 18727, + "gauge": 18728, + "encies": 18729, + "tomy": 18730, + "biscuit": 18731, + "butcher": 18732, + "qualifier": 18733, + "county": 18734, + "kei": 18735, + "pools": 18736, + "darker": 18737, + "shoulders": 18738, + "ðŁĩºðŁĩ¸ðŁĩºðŁĩ¸": 18739, + "spre": 18740, + "(\"": 18741, + "writers": 18742, + "gm": 18743, + "ðŁİĵ": 18744, + "knit": 18745, + "huff": 18746, + "mtb": 18747, + "phillies": 18748, + "ost": 18749, + "denis": 18750, + "gart": 18751, + "licensed": 18752, + "interface": 18753, + "excel": 18754, + "dwell": 18755, + "fromthe": 18756, + "cofficial": 18757, + "azzi": 18758, + "appearing": 18759, + "forest": 18760, + "nana": 18761, + "keith": 18762, + "manufacturers": 18763, + "beckham": 18764, + ")?": 18765, + "ese": 18766, + "colony": 18767, + "delicate": 18768, + "utter": 18769, + "mcin": 18770, + "transplant": 18771, + "preferred": 18772, + "pard": 18773, + "arie": 18774, + "hub": 18775, + "pods": 18776, + "perspectives": 18777, + "pict": 18778, + "delu": 18779, + "apper": 18780, + "bethan": 18781, + "pmo": 18782, + "criminals": 18783, + "feminism": 18784, + "shack": 18785, + "circumstances": 18786, + "fellas": 18787, + "protesting": 18788, + "wax": 18789, + "suggested": 18790, + "tator": 18791, + "drew": 18792, + "omni": 18793, + "fake": 18794, + "kathy": 18795, + "reb": 18796, + "deline": 18797, + "berni": 18798, + "misty": 18799, + "ðŁij©": 18800, + "erable": 18801, + "breakthrough": 18802, + "menswear": 18803, + "millennials": 18804, + "chanyeol": 18805, + "laz": 18806, + "insert": 18807, + "replies": 18808, + "phrase": 18809, + "nx": 18810, + "iheartawards": 18811, + "audrey": 18812, + "granite": 18813, + "racec": 18814, + "orie": 18815, + "terra": 18816, + "innovations": 18817, + "brittany": 18818, + "ateral": 18819, + "pear": 18820, + "biological": 18821, + "shments": 18822, + "institution": 18823, + "msn": 18824, + "frequency": 18825, + "dman": 18826, + "neglec": 18827, + "tf": 18828, + "stefan": 18829, + "foxnews": 18830, + "typo": 18831, + "comms": 18832, + "sequence": 18833, + "carmen": 18834, + "whites": 18835, + "economist": 18836, + "exeter": 18837, + "seum": 18838, + "resorts": 18839, + "casually": 18840, + "bunde": 18841, + "divide": 18842, + "ع": 18843, + "gag": 18844, + "creed": 18845, + "retire": 18846, + "caucus": 18847, + "rapids": 18848, + "wrestlemania": 18849, + "tulsa": 18850, + "sunderland": 18851, + "fundament": 18852, + "odi": 18853, + "yamaha": 18854, + "vary": 18855, + "intrigu": 18856, + "else": 18857, + "beacon": 18858, + "angie": 18859, + "traded": 18860, + "transm": 18861, + "gents": 18862, + "knitting": 18863, + "galac": 18864, + "ðĿĹ": 18865, + "uto": 18866, + "seaside": 18867, + "holt": 18868, + "rers": 18869, + "fargo": 18870, + "trainers": 18871, + "monsoon": 18872, + "bale": 18873, + "sought": 18874, + "maddie": 18875, + "hw": 18876, + "coli": 18877, + "fran": 18878, + "favs": 18879, + "ðŁĴĶ": 18880, + "intent": 18881, + "rally": 18882, + "sbs": 18883, + "lemonade": 18884, + "barackobama": 18885, + "bread": 18886, + "sticky": 18887, + "explosive": 18888, + "chelten": 18889, + "tj": 18890, + "assoc": 18891, + "ramen": 18892, + "homies": 18893, + "vlog": 18894, + "mister": 18895, + "lord": 18896, + "âĢįâĻĢï¸ı": 18897, + "alyssa": 18898, + "sketchbook": 18899, + "rumble": 18900, + "catch": 18901, + "migrant": 18902, + "discipline": 18903, + "unlikely": 18904, + "chronicles": 18905, + "flora": 18906, + "slams": 18907, + "amid": 18908, + "sboro": 18909, + "coop": 18910, + "jumps": 18911, + "tranqu": 18912, + "melis": 18913, + "sofia": 18914, + "enri": 18915, + "gabe": 18916, + "syri": 18917, + "nicolas": 18918, + "chai": 18919, + "wv": 18920, + "becky": 18921, + "footy": 18922, + "tao": 18923, + "suppose": 18924, + "ðŁĺįðŁĺįðŁĺįðŁĺį": 18925, + "plush": 18926, + "rish": 18927, + "ð٤ĵ": 18928, + "kha": 18929, + "saturdays": 18930, + "accent": 18931, + "hec": 18932, + "limit": 18933, + "carlton": 18934, + "wired": 18935, + "taylorswift": 18936, + "ðŁĺij": 18937, + "sql": 18938, + "harro": 18939, + "recipients": 18940, + "gat": 18941, + "gop": 18942, + "thof": 18943, + "amazed": 18944, + "ghan": 18945, + "ðŁıĨðŁıĨ": 18946, + "porto": 18947, + "clare": 18948, + "distant": 18949, + "nac": 18950, + "ohio": 18951, + "ðŁĻıðŁı¼": 18952, + "mtn": 18953, + "antibio": 18954, + "dinosa": 18955, + "mesa": 18956, + "partial": 18957, + "bv": 18958, + "learnt": 18959, + "lovato": 18960, + "question": 18961, + "extract": 18962, + "gossip": 18963, + "gibb": 18964, + "niagara": 18965, + "ðŁij¨": 18966, + "displayed": 18967, + "sooner": 18968, + "stevie": 18969, + "nuggets": 18970, + "mln": 18971, + "brom": 18972, + "turb": 18973, + "giveaways": 18974, + "stupi": 18975, + "blink": 18976, + "cili": 18977, + "convenient": 18978, + "moh": 18979, + "vive": 18980, + "fric": 18981, + "cause": 18982, + "chamber": 18983, + "cules": 18984, + "nearest": 18985, + "isse": 18986, + "smallbiz": 18987, + "tj": 18988, + "canadians": 18989, + "smarter": 18990, + "brasil": 18991, + "rare": 18992, + "quette": 18993, + "wha": 18994, + "candle": 18995, + "atomic": 18996, + "ðŁijįðŁijį": 18997, + "warrior": 18998, + "relaxed": 18999, + "strips": 19000, + "neur": 19001, + "kka": 19002, + "rfc": 19003, + "jensen": 19004, + "recovering": 19005, + "responses": 19006, + "salam": 19007, + "orthodox": 19008, + "active": 19009, + "ellers": 19010, + "nit": 19011, + "âŃIJ": 19012, + "metropolitan": 19013, + "centuries": 19014, + "vida": 19015, + "grading": 19016, + "transparent": 19017, + "simple": 19018, + "dots": 19019, + "superintendent": 19020, + "elevator": 19021, + "automated": 19022, + "redskins": 19023, + "imam": 19024, + "summertime": 19025, + "jonathan": 19026, + "gearing": 19027, + "michelle": 19028, + "conflic": 19029, + "mice": 19030, + "tote": 19031, + "publish": 19032, + "pax": 19033, + ")-": 19034, + "nailed": 19035, + "á´": 19036, + "telescope": 19037, + "serbia": 19038, + "bab": 19039, + "apeu": 19040, + "stically": 19041, + "senti": 19042, + "rats": 19043, + "isolated": 19044, + "group": 19045, + "hatred": 19046, + "paranormal": 19047, + "stanley": 19048, + "alion": 19049, + "safety": 19050, + "ls": 19051, + "र": 19052, + "nexus": 19053, + "alexandra": 19054, + "masks": 19055, + "++": 19056, + "tron": 19057, + "auk": 19058, + "brotherhood": 19059, + "browse": 19060, + "mixes": 19061, + "simone": 19062, + "musk": 19063, + "approve": 19064, + "lola": 19065, + "exp": 19066, + "perth": 19067, + "futuri": 19068, + "unseen": 19069, + "dm": 19070, + "chelse": 19071, + "scouting": 19072, + "owe": 19073, + "portsmouth": 19074, + "kram": 19075, + "mize": 19076, + "dispen": 19077, + "sup": 19078, + "dlc": 19079, + "advert": 19080, + "teresa": 19081, + "isle": 19082, + "cycle": 19083, + "metall": 19084, + "shields": 19085, + "mariners": 19086, + "raz": 19087, + "ingen": 19088, + "fund": 19089, + "ango": 19090, + "jones": 19091, + "oka": 19092, + "madden": 19093, + "broccoli": 19094, + "dominic": 19095, + "situations": 19096, + "mero": 19097, + "cricke": 19098, + "punishment": 19099, + "db": 19100, + "shaking": 19101, + "ðŁĺļ": 19102, + "mq": 19103, + "arians": 19104, + "leh": 19105, + "claw": 19106, + "weds": 19107, + "dure": 19108, + "niel": 19109, + "jelly": 19110, + "gourmet": 19111, + "traders": 19112, + "levi": 19113, + "wages": 19114, + "knees": 19115, + "wise": 19116, + "heavenly": 19117, + "avid": 19118, + "melody": 19119, + "zack": 19120, + "bananas": 19121, + "apprentice": 19122, + "prop": 19123, + "funny": 19124, + "ode": 19125, + "respected": 19126, + "megan": 19127, + "fewer": 19128, + "drafted": 19129, + "medit": 19130, + "grape": 19131, + "usarmy": 19132, + "crusad": 19133, + "vocali": 19134, + "preparations": 19135, + "nonsense": 19136, + "usage": 19137, + "thr": 19138, + "roth": 19139, + "wizards": 19140, + "inside": 19141, + "promotions": 19142, + "mona": 19143, + "redsox": 19144, + "sig": 19145, + "elegance": 19146, + "chia": 19147, + "universal": 19148, + "ãĢį": 19149, + "raja": 19150, + "unga": 19151, + "pollin": 19152, + "filipino": 19153, + "aka": 19154, + "tsun": 19155, + "ikon": 19156, + "biking": 19157, + "decorations": 19158, + "zac": 19159, + "cadets": 19160, + "humour": 19161, + "agm": 19162, + "reppin": 19163, + "vaccin": 19164, + "elove": 19165, + "uw": 19166, + "diabe": 19167, + "gallagher": 19168, + "azer": 19169, + "dol": 19170, + "awhile": 19171, + "prominent": 19172, + "welsh": 19173, + "tann": 19174, + "')": 19175, + "bien": 19176, + "wag": 19177, + "inal": 19178, + "cwc": 19179, + "wicket": 19180, + "urst": 19181, + "qanon": 19182, + "xe": 19183, + "outdoor": 19184, + "dunn": 19185, + "starr": 19186, + "cology": 19187, + "ricky": 19188, + "uefa": 19189, + "rebounds": 19190, + "smusic": 19191, + "infant": 19192, + "ðŁĻĭ": 19193, + "sop": 19194, + "umber": 19195, + "handing": 19196, + "begin": 19197, + "sorting": 19198, + "hash": 19199, + "spati": 19200, + "rek": 19201, + "budapest": 19202, + "blackhawks": 19203, + "delete": 19204, + "rom": 19205, + "candid": 19206, + "authori": 19207, + "debris": 19208, + "specul": 19209, + "intersection": 19210, + "marriott": 19211, + "imran": 19212, + "ðŁĺģðŁĺģ": 19213, + "cruises": 19214, + "ramsey": 19215, + "rafael": 19216, + "awareness": 19217, + "vascular": 19218, + "beyoncé": 19219, + "rug": 19220, + "ðŁĺĮ": 19221, + "festiv": 19222, + "aram": 19223, + "sable": 19224, + "basil": 19225, + "pill": 19226, + "flooring": 19227, + "unbeaten": 19228, + "implications": 19229, + "uf": 19230, + "wound": 19231, + "forge": 19232, + "pointing": 19233, + "pots": 19234, + "popularity": 19235, + "ðŁijıðŁı»": 19236, + "manipul": 19237, + "slots": 19238, + "debates": 19239, + "absence": 19240, + "vermont": 19241, + "neverforget": 19242, + "wrist": 19243, + "gloria": 19244, + "rence": 19245, + "husk": 19246, + "melting": 19247, + "ðŁİŁ": 19248, + "braces": 19249, + "timely": 19250, + "transforming": 19251, + "amps": 19252, + "mak": 19253, + "poe": 19254, + "ahan": 19255, + "generally": 19256, + "ndp": 19257, + "aleppo": 19258, + "unicef": 19259, + "profs": 19260, + "nord": 19261, + "mask": 19262, + "jacksonville": 19263, + "vv": 19264, + "shells": 19265, + "blooming": 19266, + "operators": 19267, + "charcoal": 19268, + "neville": 19269, + "magi": 19270, + "chip": 19271, + "sama": 19272, + "iran": 19273, + "reforms": 19274, + "accumul": 19275, + "rue": 19276, + "æľ": 19277, + "websites": 19278, + "gaon": 19279, + "devastating": 19280, + "stos": 19281, + "glacier": 19282, + "rapp": 19283, + "chipotle": 19284, + "pra": 19285, + "orous": 19286, + "romney": 19287, + "season": 19288, + "decorative": 19289, + "cisco": 19290, + "ditch": 19291, + "complain": 19292, + "llo": 19293, + "assume": 19294, + "ðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤ": 19295, + "nels": 19296, + "centric": 19297, + "ftw": 19298, + "carrots": 19299, + "tata": 19300, + "canter": 19301, + "perience": 19302, + "liers": 19303, + "demos": 19304, + "blunt": 19305, + "operate": 19306, + "reservations": 19307, + "leah": 19308, + "substance": 19309, + "dison": 19310, + "ante": 19311, + "election": 19312, + "vue": 19313, + "square": 19314, + "nonprofit": 19315, + "caa": 19316, + "fsu": 19317, + "yam": 19318, + "ãĤ¤": 19319, + "vladi": 19320, + "completes": 19321, + "mari": 19322, + "phillip": 19323, + "neill": 19324, + "eras": 19325, + "kait": 19326, + "mendo": 19327, + "maharashtra": 19328, + "gp": 19329, + "dane": 19330, + "providence": 19331, + "therapeu": 19332, + "juvenile": 19333, + "memo": 19334, + "incorpor": 19335, + "aaaa": 19336, + "seventeen": 19337, + "teenager": 19338, + "ã": 19339, + "orns": 19340, + "wide": 19341, + "cuteness": 19342, + "twd": 19343, + "ffles": 19344, + "bara": 19345, + "comedy": 19346, + "overtime": 19347, + "yaz": 19348, + "baron": 19349, + "unemployment": 19350, + "ðŁijĭ": 19351, + "exterior": 19352, + "dense": 19353, + "centres": 19354, + "matchup": 19355, + "historymonth": 19356, + "artificial": 19357, + "quit": 19358, + "esk": 19359, + "warn": 19360, + "critic": 19361, + "jaf": 19362, + "ðŁĵ²": 19363, + "informative": 19364, + "fuels": 19365, + "recycle": 19366, + "naming": 19367, + "stripe": 19368, + "solic": 19369, + "molecular": 19370, + "deepi": 19371, + "convo": 19372, + "ssel": 19373, + "nae": 19374, + "descent": 19375, + "tiz": 19376, + "accountability": 19377, + "terry": 19378, + "rito": 19379, + "slay": 19380, + "emo": 19381, + "demol": 19382, + "sensation": 19383, + "cov": 19384, + "tore": 19385, + "roundtable": 19386, + "yol": 19387, + "excuses": 19388, + "à¥į": 19389, + "turquo": 19390, + "hhhh": 19391, + "podcasts": 19392, + "celeb": 19393, + "messi": 19394, + "lio": 19395, + "mann": 19396, + "contributed": 19397, + "uz": 19398, + "generator": 19399, + "elets": 19400, + "veggie": 19401, + "indul": 19402, + "ensuring": 19403, + "detroit": 19404, + "punjab": 19405, + "transpor": 19406, + "instruction": 19407, + "add": 19408, + "porcel": 19409, + "paneli": 19410, + "circles": 19411, + "persist": 19412, + "clayton": 19413, + "spn": 19414, + "dogsoftwitter": 19415, + "isnt": 19416, + "spr": 19417, + "retailers": 19418, + "pw": 19419, + "hungar": 19420, + "elena": 19421, + "monaster": 19422, + "guatem": 19423, + "jessie": 19424, + "anz": 19425, + "rashi": 19426, + "flee": 19427, + "carving": 19428, + "faux": 19429, + "lal": 19430, + "henri": 19431, + "djo": 19432, + "dull": 19433, + "sana": 19434, + "lara": 19435, + "globe": 19436, + "crimson": 19437, + "compass": 19438, + "pause": 19439, + "nab": 19440, + "lionel": 19441, + "baths": 19442, + "ufo": 19443, + "inventory": 19444, + "singh": 19445, + "satan": 19446, + "ðŁĩ¸": 19447, + "cements": 19448, + "inform": 19449, + "generated": 19450, + "biden": 19451, + "avg": 19452, + "tasks": 19453, + "deer": 19454, + "sau": 19455, + "jailed": 19456, + "pastel": 19457, + "scc": 19458, + "nail": 19459, + "steele": 19460, + "peris": 19461, + "lamborghini": 19462, + "pursue": 19463, + "margin": 19464, + "uch": 19465, + "bosch": 19466, + "drain": 19467, + "clara": 19468, + "bom": 19469, + "latino": 19470, + "webster": 19471, + "rosemary": 19472, + "rha": 19473, + "soun": 19474, + "billionaire": 19475, + "notch": 19476, + "percentage": 19477, + "conor": 19478, + "'\"": 19479, + "homes": 19480, + "earthday": 19481, + "hort": 19482, + "biggest": 19483, + "disin": 19484, + "walton": 19485, + "editors": 19486, + "imma": 19487, + "omar": 19488, + "equivalent": 19489, + "pharmaceu": 19490, + "ahmed": 19491, + "cameo": 19492, + "hanni": 19493, + "underrated": 19494, + "gement": 19495, + "microbi": 19496, + "voo": 19497, + "honorable": 19498, + "obesity": 19499, + "âļ¡ï¸ı": 19500, + "limerick": 19501, + "involvement": 19502, + "stagram": 19503, + "boulevard": 19504, + "burg": 19505, + "blackandwhite": 19506, + "liberation": 19507, + "five": 19508, + "interim": 19509, + "smm": 19510, + "rivalry": 19511, + "capabilities": 19512, + "statements": 19513, + "thumb": 19514, + "ved": 19515, + "swans": 19516, + "barber": 19517, + "eque": 19518, + "serena": 19519, + "helm": 19520, + "noodle": 19521, + "sampling": 19522, + "nawaz": 19523, + "single": 19524, + "thunderstorms": 19525, + "shon": 19526, + "inev": 19527, + "ë¯": 19528, + "topp": 19529, + "orchard": 19530, + "bian": 19531, + "ðŁĺĶ": 19532, + "doorstep": 19533, + "salvation": 19534, + "marketing": 19535, + "rons": 19536, + "clemson": 19537, + "ravi": 19538, + "intake": 19539, + "standwith": 19540, + "sina": 19541, + "haiku": 19542, + "pley": 19543, + "electoral": 19544, + "philly": 19545, + "lays": 19546, + "electric": 19547, + "capturing": 19548, + "upp": 19549, + "ergy": 19550, + "believing": 19551, + "cultures": 19552, + "esday": 19553, + "invasive": 19554, + "eded": 19555, + "speech": 19556, + "endur": 19557, + "vietnam": 19558, + "boycott": 19559, + "pede": 19560, + "deliver": 19561, + "ðŁĴĸðŁĴĸ": 19562, + "merchant": 19563, + "stir": 19564, + "denies": 19565, + "pockets": 19566, + "oti": 19567, + "cuddle": 19568, + "roland": 19569, + "mmed": 19570, + "dened": 19571, + "learners": 19572, + "hoop": 19573, + "sourcing": 19574, + "hacked": 19575, + "dim": 19576, + "environments": 19577, + "benson": 19578, + "judicial": 19579, + "worcester": 19580, + "pearls": 19581, + "governments": 19582, + "arrivals": 19583, + "corners": 19584, + "tuning": 19585, + "labour": 19586, + "ym": 19587, + "ordering": 19588, + "lewi": 19589, + "ife": 19590, + "hygiene": 19591, + "thoughtful": 19592, + "indonesian": 19593, + "campaigning": 19594, + "principle": 19595, + "assaul": 19596, + "rubb": 19597, + "atv": 19598, + "willy": 19599, + "entre": 19600, + "ili": 19601, + "phon": 19602, + "duties": 19603, + "âĻ¥âĻ¥": 19604, + "snakes": 19605, + "loop": 19606, + "amar": 19607, + "convertible": 19608, + "bonding": 19609, + "mentoring": 19610, + "maxwell": 19611, + "ethereum": 19612, + "destroying": 19613, + "axis": 19614, + "cairo": 19615, + "finnish": 19616, + "shock": 19617, + "ðŁĺIJ": 19618, + "caleb": 19619, + "coma": 19620, + "pedal": 19621, + "core": 19622, + "continent": 19623, + "elson": 19624, + "tempo": 19625, + "helsinki": 19626, + "acp": 19627, + "tackling": 19628, + "stated": 19629, + "bla": 19630, + "doub": 19631, + "smashing": 19632, + "aja": 19633, + "cameron": 19634, + "disruption": 19635, + "warmth": 19636, + "beingsalmankhan": 19637, + "bulletin": 19638, + "ode": 19639, + "syracuse": 19640, + "aran": 19641, + "mcgregor": 19642, + "bulk": 19643, + "anton": 19644, + "confirmation": 19645, + "spine": 19646, + "imran": 19647, + "instruc": 19648, + "jacks": 19649, + "chio": 19650, + "palm": 19651, + "stre": 19652, + "embarrassing": 19653, + "unt": 19654, + "eliminate": 19655, + "toss": 19656, + "cise": 19657, + "aws": 19658, + "onists": 19659, + "shinee": 19660, + "jos": 19661, + "hose": 19662, + "lively": 19663, + "opponents": 19664, + "movements": 19665, + "recognizing": 19666, + "sandwiches": 19667, + "shakes": 19668, + "exercises": 19669, + "seat": 19670, + "profession": 19671, + "merrychristmas": 19672, + "lugg": 19673, + "adoptdont": 19674, + "marvin": 19675, + "byrne": 19676, + "unle": 19677, + "het": 19678, + "kuwait": 19679, + "rahman": 19680, + "aspect": 19681, + "humbled": 19682, + "genes": 19683, + "fand": 19684, + "longtime": 19685, + ");": 19686, + "campu": 19687, + "angus": 19688, + "ðŁijįðŁı¼": 19689, + "quran": 19690, + "sleeves": 19691, + "slic": 19692, + "¸ë": 19693, + "twelve": 19694, + "youre": 19695, + "ike": 19696, + "gogh": 19697, + "bst": 19698, + "dictionary": 19699, + "reflecting": 19700, + "toon": 19701, + "yarn": 19702, + "embed": 19703, + "ðŁı´": 19704, + "reserves": 19705, + "flooded": 19706, + "veriz": 19707, + "dusk": 19708, + "establish": 19709, + "proli": 19710, + "aud": 19711, + "ritual": 19712, + "orbit": 19713, + "declaration": 19714, + "recordings": 19715, + "camo": 19716, + "cassette": 19717, + "goodluck": 19718, + "cutter": 19719, + "bop": 19720, + "bho": 19721, + "cheating": 19722, + "pacific": 19723, + "mares": 19724, + "timer": 19725, + "colt": 19726, + "trous": 19727, + "tomorrow": 19728, + "hansen": 19729, + "cie": 19730, + "wang": 19731, + "bani": 19732, + "circular": 19733, + "acute": 19734, + "farmer": 19735, + "coys": 19736, + "pse": 19737, + "irving": 19738, + "wj": 19739, + "hawkins": 19740, + "bison": 19741, + "urday": 19742, + "cruising": 19743, + "ote": 19744, + "kath": 19745, + "whistle": 19746, + "yourselves": 19747, + "antis": 19748, + "slash": 19749, + "thoroughly": 19750, + "kesh": 19751, + "serie": 19752, + "exem": 19753, + "enig": 19754, + "guild": 19755, + "shred": 19756, + "hogan": 19757, + "apo": 19758, + "ä¸": 19759, + "puzz": 19760, + "netball": 19761, + "aussi": 19762, + "panorama": 19763, + "wsj": 19764, + "avis": 19765, + "arming": 19766, + "humph": 19767, + "browser": 19768, + "cries": 19769, + "foggy": 19770, + "matte": 19771, + "ðŁĮ»": 19772, + "iter": 19773, + "tallest": 19774, + "byron": 19775, + "captiv": 19776, + "jesu": 19777, + "anyways": 19778, + "flagship": 19779, + "pton": 19780, + "wey": 19781, + "fayette": 19782, + "financial": 19783, + "foul": 19784, + "solomon": 19785, + "jennifer": 19786, + "cucumber": 19787, + "argue": 19788, + "textile": 19789, + "wrestler": 19790, + "johnston": 19791, + "pastor": 19792, + "ðŁĺŃðŁĺŃðŁĺŃðŁĺŃ": 19793, + "cactus": 19794, + "edible": 19795, + "reserved": 19796, + "richie": 19797, + "metres": 19798, + "ingredient": 19799, + "hella": 19800, + "unto": 19801, + "chol": 19802, + "celebs": 19803, + "poets": 19804, + "graham": 19805, + "hayden": 19806, + "coincidence": 19807, + "baw": 19808, + "communicate": 19809, + "fletcher": 19810, + "/-": 19811, + "toledo": 19812, + "ecuador": 19813, + "counsel": 19814, + "slaughter": 19815, + "linear": 19816, + "atp": 19817, + "osu": 19818, + "joel": 19819, + "eved": 19820, + "conquer": 19821, + "rustic": 19822, + "plicity": 19823, + "recognise": 19824, + "roommate": 19825, + "cracked": 19826, + "jasper": 19827, + "pher": 19828, + "ðŁĮº": 19829, + "woven": 19830, + "moist": 19831, + "ffc": 19832, + "steering": 19833, + "nish": 19834, + "standings": 19835, + "frequent": 19836, + "ardi": 19837, + "hazel": 19838, + "asmsg": 19839, + "baum": 19840, + "dart": 19841, + "sidd": 19842, + "nath": 19843, + "chero": 19844, + "cardboard": 19845, + "css": 19846, + "nsfw": 19847, + "pair": 19848, + "ðŁĺįðŁĺĺ": 19849, + "occurred": 19850, + "homelessness": 19851, + "malone": 19852, + "phe": 19853, + "xia": 19854, + "paddy": 19855, + "declare": 19856, + "theatre": 19857, + "bf": 19858, + "persian": 19859, + "tad": 19860, + "axe": 19861, + "suspicious": 19862, + "lamb": 19863, + "mucho": 19864, + "senior": 19865, + "stas": 19866, + "kite": 19867, + "sting": 19868, + "grad": 19869, + "kaf": 19870, + "watering": 19871, + "د": 19872, + "spiral": 19873, + "thms": 19874, + "educator": 19875, + "jerome": 19876, + "ofc": 19877, + "clock": 19878, + "sul": 19879, + "pemb": 19880, + ".........": 19881, + "parkway": 19882, + "deaux": 19883, + "restrictions": 19884, + "mons": 19885, + "needle": 19886, + "ej": 19887, + "leagues": 19888, + "watermelon": 19889, + "aman": 19890, + "plenary": 19891, + "maxim": 19892, + "wab": 19893, + "comingsoon": 19894, + "bryce": 19895, + "vigil": 19896, + "supermarket": 19897, + "fortunate": 19898, + "turquoise": 19899, + "president": 19900, + "liv": 19901, + "interns": 19902, + "feelin": 19903, + "fixtures": 19904, + "stunt": 19905, + "staged": 19906, + "premieres": 19907, + "lok": 19908, + "practiti": 19909, + "shortage": 19910, + "logne": 19911, + "vec": 19912, + "concor": 19913, + "rocke": 19914, + "lig": 19915, + "composed": 19916, + "synthetic": 19917, + "dip": 19918, + "camila": 19919, + "chis": 19920, + "jou": 19921, + "susan": 19922, + "eyebrows": 19923, + "supplement": 19924, + "satisfaction": 19925, + "mohammad": 19926, + "tibet": 19927, + "houseof": 19928, + "pun": 19929, + "assam": 19930, + "shadowhun": 19931, + "psyched": 19932, + "seduc": 19933, + "mandatory": 19934, + "herbert": 19935, + "scallo": 19936, + "streamers": 19937, + "protocol": 19938, + "blockbuster": 19939, + "produces": 19940, + "schnei": 19941, + "laurel": 19942, + "tribe": 19943, + "timehop": 19944, + "pla": 19945, + "modelling": 19946, + "tvtime": 19947, + "mtvstars": 19948, + "widow": 19949, + "metric": 19950, + "cham": 19951, + "condo": 19952, + "flowering": 19953, + "alec": 19954, + "dms": 19955, + "intensity": 19956, + "¨": 19957, + "mccartney": 19958, + "islamabad": 19959, + "kb": 19960, + "ffi": 19961, + "phal": 19962, + "analog": 19963, + "fond": 19964, + "hacks": 19965, + "positivity": 19966, + "treaty": 19967, + "submarine": 19968, + "connect": 19969, + "selen": 19970, + "categories": 19971, + "cub": 19972, + "organize": 19973, + "sik": 19974, + "quoteoftheday": 19975, + "reminding": 19976, + "amor": 19977, + "locking": 19978, + "ðŁijıðŁı¼": 19979, + "compound": 19980, + "ette": 19981, + "bout": 19982, + "recur": 19983, + "ference": 19984, + "mizz": 19985, + "trend": 19986, + "hipster": 19987, + "fortress": 19988, + "forthcoming": 19989, + "prelimin": 19990, + "odyssey": 19991, + "angp": 19992, + "delici": 19993, + "evenings": 19994, + "ðŁĶ¹": 19995, + "iq": 19996, + "dw": 19997, + "dair": 19998, + "kathryn": 19999, + "christianity": 20000, + "moonlight": 20001, + "hab": 20002, + "whoo": 20003, + "fbf": 20004, + "seth": 20005, + "genuinely": 20006, + "pax": 20007, + "charity": 20008, + "deployed": 20009, + "bnb": 20010, + "bucs": 20011, + "judg": 20012, + "conge": 20013, + "plantation": 20014, + "impress": 20015, + "cara": 20016, + "sclub": 20017, + "scopy": 20018, + "landers": 20019, + "complaints": 20020, + "bama": 20021, + "rebuild": 20022, + "xy": 20023, + "realism": 20024, + "shour": 20025, + "lein": 20026, + "bracelets": 20027, + "mera": 20028, + "assassin": 20029, + "anchor": 20030, + "ðŁijĮðŁı¼": 20031, + "linen": 20032, + "confron": 20033, + "chronicle": 20034, + "comment": 20035, + "catalog": 20036, + "illes": 20037, + "gorge": 20038, + "metry": 20039, + "jungkook": 20040, + "lovemy": 20041, + "sentin": 20042, + "seem": 20043, + "fitness": 20044, + "allied": 20045, + "tsman": 20046, + "digitaltransformation": 20047, + "pran": 20048, + "loft": 20049, + "minton": 20050, + "aldenrichards": 20051, + "envel": 20052, + "cherish": 20053, + "certainty": 20054, + "zzz": 20055, + "rhino": 20056, + "perkins": 20057, + "enrich": 20058, + "capetown": 20059, + "ometer": 20060, + "sections": 20061, + "skeleton": 20062, + "defenders": 20063, + "ðŁĺĿ": 20064, + "penc": 20065, + "brit": 20066, + "jah": 20067, + "capitalism": 20068, + "ðŁ¥ĩ": 20069, + "bazaar": 20070, + "reme": 20071, + "ext": 20072, + "kkk": 20073, + "convert": 20074, + "stormy": 20075, + "bye": 20076, + "karan": 20077, + "chrysler": 20078, + "ados": 20079, + "pressed": 20080, + "sync": 20081, + "ationday": 20082, + "danger": 20083, + "badges": 20084, + "refuses": 20085, + "empowering": 20086, + "lym": 20087, + "exports": 20088, + "adoptdontshop": 20089, + "ðŁĩ¯": 20090, + "thc": 20091, + "awaited": 20092, + "focuses": 20093, + "fined": 20094, + "oat": 20095, + "hahahah": 20096, + "âģ©": 20097, + "nfamily": 20098, + "fiona": 20099, + "luckily": 20100, + "thrilling": 20101, + "typing": 20102, + "outbreak": 20103, + "dies": 20104, + "heu": 20105, + "crawl": 20106, + "nesses": 20107, + "oath": 20108, + "scripts": 20109, + "geeks": 20110, + "ðŁIJĿ": 20111, + "pb": 20112, + "mathematics": 20113, + "alis": 20114, + "________________": 20115, + "gymnastics": 20116, + "activism": 20117, + "recommendation": 20118, + "gren": 20119, + "wain": 20120, + "courty": 20121, + "napol": 20122, + "cauli": 20123, + "hornets": 20124, + "gals": 20125, + "jockey": 20126, + "dirty": 20127, + "atar": 20128, + "enormous": 20129, + "pest": 20130, + "gregation": 20131, + "anos": 20132, + "iiii": 20133, + "defends": 20134, + "blackhistorymonth": 20135, + "atx": 20136, + "mbc": 20137, + "luggage": 20138, + "witch": 20139, + "cob": 20140, + "lasts": 20141, + "cum": 20142, + "ggg": 20143, + "bathing": 20144, + "nar": 20145, + "cebu": 20146, + "ðŁįĥ": 20147, + "navigation": 20148, + "mine": 20149, + "rejo": 20150, + "ðŁİĢ": 20151, + "giftide": 20152, + "reta": 20153, + "useless": 20154, + "pull": 20155, + "deficit": 20156, + "allu": 20157, + "atime": 20158, + "itv": 20159, + "trillion": 20160, + "pue": 20161, + "acies": 20162, + "procedure": 20163, + "lori": 20164, + "jenny": 20165, + "cad": 20166, + "ulously": 20167, + "drac": 20168, + "promotes": 20169, + "ingthe": 20170, + "canu": 20171, + "woohoo": 20172, + "naomi": 20173, + "zardari": 20174, + "tsu": 20175, + "beir": 20176, + "sdg": 20177, + "lever": 20178, + "weber": 20179, + "abud": 20180, + "lund": 20181, + "crowded": 20182, + "deployment": 20183, + "terrain": 20184, + "kenny": 20185, + "hof": 20186, + "witnessed": 20187, + "loch": 20188, + "jk": 20189, + "bully": 20190, + "wren": 20191, + "poetry": 20192, + "doff": 20193, + "wwi": 20194, + "mored": 20195, + "dini": 20196, + "culture": 20197, + "prompt": 20198, + "Â¥": 20199, + "maurice": 20200, + "topps": 20201, + "rm": 20202, + "correspon": 20203, + "about": 20204, + "jewels": 20205, + "gibr": 20206, + "eagle": 20207, + "ðŁĺĺðŁĺĺðŁĺĺ": 20208, + "lending": 20209, + "souven": 20210, + "çĶ": 20211, + "contemporaryart": 20212, + "establishment": 20213, + "jong": 20214, + "â̦\"": 20215, + "gator": 20216, + "patriotic": 20217, + "mccoy": 20218, + "vape": 20219, + "humane": 20220, + "feliz": 20221, + "coachella": 20222, + "reposting": 20223, + "steals": 20224, + "fuller": 20225, + "nering": 20226, + "atra": 20227, + "(-": 20228, + "blake": 20229, + "heather": 20230, + "worms": 20231, + "disciplinary": 20232, + "redemption": 20233, + "yard": 20234, + "amin": 20235, + "\"@_": 20236, + "dnc": 20237, + "tds": 20238, + "kappa": 20239, + "newark": 20240, + "commits": 20241, + "spears": 20242, + "jams": 20243, + "tand": 20244, + "msnbc": 20245, + "intermedi": 20246, + "aimed": 20247, + "atic": 20248, + "teenth": 20249, + "observation": 20250, + "kashmir": 20251, + "kavanaugh": 20252, + "oul": 20253, + "sanfrancisco": 20254, + "reu": 20255, + "belated": 20256, + "chow": 20257, + "password": 20258, + "stills": 20259, + "detained": 20260, + "sari": 20261, + "dayton": 20262, + "darren": 20263, + "italian": 20264, + "arth": 20265, + "amusic": 20266, + "arbit": 20267, + "wm": 20268, + "vm": 20269, + "hem": 20270, + "doug": 20271, + "myr": 20272, + "asho": 20273, + "prev": 20274, + "vind": 20275, + "brah": 20276, + "stag": 20277, + "ี": 20278, + "previews": 20279, + "guk": 20280, + "containing": 20281, + "leonardo": 20282, + "saddle": 20283, + "rushing": 20284, + "stav": 20285, + "longh": 20286, + "gambling": 20287, + "vegas": 20288, + "reservation": 20289, + "endale": 20290, + "bala": 20291, + "fla": 20292, + "variant": 20293, + "hedge": 20294, + "bulgaria": 20295, + "natali": 20296, + "weaver": 20297, + "solst": 20298, + "encouraged": 20299, + "apc": 20300, + "asparag": 20301, + "nest": 20302, + "cyclists": 20303, + "fel": 20304, + "ìĬ¤": 20305, + "overwhelming": 20306, + "peyton": 20307, + "jit": 20308, + "apost": 20309, + "mble": 20310, + "bleeding": 20311, + "neighbourhood": 20312, + "avery": 20313, + "expressions": 20314, + "macdonald": 20315, + "gigs": 20316, + "monds": 20317, + "illusion": 20318, + "nct": 20319, + "camero": 20320, + "overhead": 20321, + "myth": 20322, + "oly": 20323, + "vio": 20324, + "etv": 20325, + "laurie": 20326, + "unveiling": 20327, + "prior": 20328, + "conn": 20329, + "ironman": 20330, + "diff": 20331, + "dayin": 20332, + "critici": 20333, + "congo": 20334, + "revision": 20335, + "wale": 20336, + "director": 20337, + "pines": 20338, + "blackpink": 20339, + "garner": 20340, + "curated": 20341, + "manitoba": 20342, + "hac": 20343, + "commonly": 20344, + "barton": 20345, + "....#": 20346, + "mortality": 20347, + "livesmatter": 20348, + "philosop": 20349, + "shorter": 20350, + "convince": 20351, + "freak": 20352, + "vendors": 20353, + "insightful": 20354, + "elly": 20355, + "sensors": 20356, + "eled": 20357, + "sberg": 20358, + "weightloss": 20359, + "ukip": 20360, + "spur": 20361, + "private": 20362, + "qua": 20363, + "ssc": 20364, + ",...": 20365, + "supervisor": 20366, + "adviser": 20367, + "amazingly": 20368, + "lesser": 20369, + "ates": 20370, + "mahon": 20371, + "oooooo": 20372, + "saras": 20373, + "pmoindia": 20374, + "waffle": 20375, + "unders": 20376, + "tolerance": 20377, + "sculptures": 20378, + "hersh": 20379, + "knocking": 20380, + "smoke": 20381, + "catholic": 20382, + "grim": 20383, + "traveled": 20384, + "flip": 20385, + "geoff": 20386, + "dinosaurs": 20387, + "slept": 20388, + "scarlet": 20389, + "oki": 20390, + "complaint": 20391, + "obsc": 20392, + "nami": 20393, + "lag": 20394, + "crossfit": 20395, + "ufc": 20396, + "mccain": 20397, + "referee": 20398, + "sadness": 20399, + "penny": 20400, + "lieu": 20401, + "mode": 20402, + "kier": 20403, + "vols": 20404, + "wis": 20405, + "elon": 20406, + "shea": 20407, + "bao": 20408, + "sonia": 20409, + "claire": 20410, + "emmanuel": 20411, + "moisture": 20412, + "digest": 20413, + "viii": 20414, + "teller": 20415, + "chon": 20416, + "accessory": 20417, + "nightclub": 20418, + "fossil": 20419, + "awan": 20420, + "husky": 20421, + "aboriginal": 20422, + "brandon": 20423, + "fficient": 20424, + "cougars": 20425, + "sted": 20426, + "admitted": 20427, + "ignored": 20428, + "contentmarketing": 20429, + "agas": 20430, + "vase": 20431, + "executed": 20432, + "negotiations": 20433, + "shead": 20434, + "nand": 20435, + "tablets": 20436, + "goth": 20437, + "tsal": 20438, + "dfw": 20439, + "onep": 20440, + "protector": 20441, + "spho": 20442, + "gazette": 20443, + "andreas": 20444, + "sser": 20445, + "compilation": 20446, + "hav": 20447, + "containers": 20448, + "broker": 20449, + "socal": 20450, + "porcelain": 20451, + "hyuk": 20452, + "airing": 20453, + "ðŁĴ°": 20454, + "publisher": 20455, + "scenario": 20456, + "spartans": 20457, + "reviewing": 20458, + "itudes": 20459, + "edel": 20460, + "pearson": 20461, + "bash": 20462, + "maui": 20463, + "aad": 20464, + "ðŁĮĬ": 20465, + "liu": 20466, + "ulate": 20467, + "programmes": 20468, + "favour": 20469, + "webdesign": 20470, + "realty": 20471, + "motivational": 20472, + "crosses": 20473, + "'...": 20474, + "busch": 20475, + "adjustable": 20476, + "arjun": 20477, + "mistak": 20478, + "dimension": 20479, + "pistol": 20480, + "weighs": 20481, + "eny": 20482, + "unveil": 20483, + "indycar": 20484, + "gordon": 20485, + "fade": 20486, + "franken": 20487, + "qualities": 20488, + "bett": 20489, + "locate": 20490, + "kerr": 20491, + "spc": 20492, + "confusion": 20493, + "nee": 20494, + "lucky": 20495, + "bases": 20496, + "depends": 20497, + "firefighter": 20498, + "ola": 20499, + "ret": 20500, + "maroon": 20501, + "ðŁĶĬ": 20502, + "wam": 20503, + "defining": 20504, + "wheat": 20505, + "bil": 20506, + "és": 20507, + "bhai": 20508, + "psych": 20509, + "tau": 20510, + "icans": 20511, + "thik": 20512, + "obile": 20513, + "inspector": 20514, + "ìĨĮë": 20515, + "illon": 20516, + "gos": 20517, + "evangel": 20518, + "fai": 20519, + "sist": 20520, + "vocation": 20521, + "burge": 20522, + "chistan": 20523, + "renewed": 20524, + "enthusiasm": 20525, + "enting": 20526, + "agri": 20527, + "ikea": 20528, + "msc": 20529, + "aerospace": 20530, + "sensiti": 20531, + "memoir": 20532, + "hospice": 20533, + "cocaine": 20534, + "derry": 20535, + "mechanics": 20536, + "Ħà¸": 20537, + "tino": 20538, + "reduces": 20539, + "collectors": 20540, + "injustice": 20541, + "suppre": 20542, + "vana": 20543, + "abun": 20544, + "napa": 20545, + "susa": 20546, + "oslo": 20547, + "eff": 20548, + "encore": 20549, + "licence": 20550, + "cheddar": 20551, + "zal": 20552, + "mount": 20553, + "ðŁĴIJ": 20554, + "threatens": 20555, + "!!\"": 20556, + "archie": 20557, + "futsal": 20558, + "scuba": 20559, + "jos": 20560, + "gnon": 20561, + "sexi": 20562, + "sofficial": 20563, + "comparing": 20564, + "dominant": 20565, + "toftheday": 20566, + "fait": 20567, + "proposals": 20568, + "gift": 20569, + "yas": 20570, + "cnc": 20571, + "lr": 20572, + "hab": 20573, + "reservoir": 20574, + "beliefs": 20575, + "general": 20576, + "marti": 20577, + "td": 20578, + "este": 20579, + "ìł": 20580, + "wil": 20581, + "ðŁij¯": 20582, + "ðŁĶ«": 20583, + "spx": 20584, + "etwork": 20585, + "excerpt": 20586, + "einstein": 20587, + "hiro": 20588, + "silhou": 20589, + "teamed": 20590, + "perception": 20591, + "corridor": 20592, + "mentalhealth": 20593, + "hints": 20594, + "benny": 20595, + "inducted": 20596, + "swx": 20597, + "widesp": 20598, + "speak": 20599, + "cheryl": 20600, + "drug": 20601, + "ðŁĺķ": 20602, + "hf": 20603, + "asparagus": 20604, + "mysteries": 20605, + "fitzgerald": 20606, + "offer": 20607, + "therapist": 20608, + "career": 20609, + "damaging": 20610, + "tsd": 20611, + "peru": 20612, + "weibo": 20613, + "yay": 20614, + "phoenix": 20615, + "discre": 20616, + "macbook": 20617, + "barker": 20618, + "stigma": 20619, + "spread": 20620, + "rockies": 20621, + "kangar": 20622, + "bridg": 20623, + "pai": 20624, + "bishop": 20625, + "tailed": 20626, + "capsule": 20627, + "ðŁĴĵ": 20628, + "geof": 20629, + "royale": 20630, + "shortlisted": 20631, + "oste": 20632, + "ashamed": 20633, + "chapp": 20634, + "keye": 20635, + "cla": 20636, + "screenshot": 20637, + "austrian": 20638, + "native": 20639, + "enight": 20640, + "juliet": 20641, + "michele": 20642, + "ðŁĮ´": 20643, + "travelers": 20644, + "pil": 20645, + "footballer": 20646, + "winchester": 20647, + "ðŁĻĦ": 20648, + "azerbai": 20649, + "goldeng": 20650, + "organisations": 20651, + "interpretation": 20652, + "predator": 20653, + "oftheweek": 20654, + "logan": 20655, + "poké": 20656, + "marie": 20657, + "calla": 20658, + "tnt": 20659, + "cinde": 20660, + "getic": 20661, + "fitfam": 20662, + "grav": 20663, + "owens": 20664, + "ðŁĮ±": 20665, + "shootout": 20666, + "salis": 20667, + "commissions": 20668, + "cohe": 20669, + "ptic": 20670, + "nixon": 20671, + "hia": 20672, + "ambition": 20673, + "marine": 20674, + "cruelty": 20675, + "tk": 20676, + "crude": 20677, + "salty": 20678, + "jima": 20679, + "mongo": 20680, + "irony": 20681, + "onwards": 20682, + "arrests": 20683, + "strangers": 20684, + "iger": 20685, + "cyclist": 20686, + "rag": 20687, + "extends": 20688, + "tradio": 20689, + "bourg": 20690, + "moi": 20691, + "ella": 20692, + "eable": 20693, + "lexus": 20694, + "aul": 20695, + "dera": 20696, + "historian": 20697, + "morton": 20698, + "tiff": 20699, + "manner": 20700, + "kot": 20701, + "dk": 20702, + "pointed": 20703, + "marqu": 20704, + "aan": 20705, + "eney": 20706, + "dublin": 20707, + "onpoli": 20708, + "emili": 20709, + "secret": 20710, + "flo": 20711, + "âļ¡": 20712, + "baj": 20713, + "steep": 20714, + "accompanied": 20715, + "rumours": 20716, + "devi": 20717, + "purchasing": 20718, + "fig": 20719, + "pub": 20720, + "schoo": 20721, + "autonomous": 20722, + "goalie": 20723, + "xia": 20724, + "automatically": 20725, + "revers": 20726, + "tero": 20727, + "fuku": 20728, + "titanic": 20729, + "shook": 20730, + "sandals": 20731, + "seekers": 20732, + "excav": 20733, + "nordic": 20734, + "bigolive": 20735, + "bake": 20736, + "ratt": 20737, + "zak": 20738, + "nep": 20739, + "ðŁĺ¤": 20740, + "candy": 20741, + "billions": 20742, + "bookworm": 20743, + "ppet": 20744, + "à³": 20745, + "surfaces": 20746, + "scars": 20747, + "philip": 20748, + "dogg": 20749, + "cigars": 20750, + "cote": 20751, + "translated": 20752, + "curator": 20753, + "sindh": 20754, + "hangover": 20755, + "brewer": 20756, + "ones": 20757, + "elton": 20758, + "ðŁĴªðŁı¼": 20759, + "marcu": 20760, + "elliot": 20761, + "righte": 20762, + "dioce": 20763, + "russ": 20764, + "railways": 20765, + "grandson": 20766, + "ascen": 20767, + "apology": 20768, + "await": 20769, + "mobili": 20770, + "respir": 20771, + "partisan": 20772, + "olivi": 20773, + "strike": 20774, + "yoo": 20775, + "whitehouse": 20776, + "expressed": 20777, + "pups": 20778, + "bedford": 20779, + "cultur": 20780, + "frogs": 20781, + "flying": 20782, + "cavali": 20783, + "cds": 20784, + "friger": 20785, + "streetphotography": 20786, + "resolve": 20787, + "taliban": 20788, + "kang": 20789, + "crushing": 20790, + "jum": 20791, + "ðŁĺĴ": 20792, + "williamson": 20793, + "tang": 20794, + "curly": 20795, + "tman": 20796, + "veteran": 20797, + "faire": 20798, + "artificialintelligence": 20799, + "unanim": 20800, + "pren": 20801, + "backdrop": 20802, + "frances": 20803, + "occer": 20804, + "dorothy": 20805, + "working": 20806, + "arthr": 20807, + "converted": 20808, + "daylight": 20809, + "servant": 20810, + "paddle": 20811, + "complaining": 20812, + "thirty": 20813, + "nadal": 20814, + "aku": 20815, + "ibrahim": 20816, + "addressed": 20817, + "piss": 20818, + "greenhouse": 20819, + "battalion": 20820, + "simulator": 20821, + "outlets": 20822, + "embroidery": 20823, + "ðŁĵ±": 20824, + "fiscal": 20825, + "gerard": 20826, + "sassy": 20827, + "ðŁİīðŁİīðŁİī": 20828, + "ventures": 20829, + "merit": 20830, + "publicity": 20831, + "ðŁijĪ": 20832, + "sophisticated": 20833, + "ctu": 20834, + "conventional": 20835, + "condolences": 20836, + "israel": 20837, + "tradition": 20838, + "aran": 20839, + "tess": 20840, + "glad": 20841, + "ðŁĺĬðŁĺĬ": 20842, + "correction": 20843, + "geon": 20844, + "amd": 20845, + "orship": 20846, + "beast": 20847, + "chment": 20848, + "ìŀ": 20849, + "nico": 20850, + "wknd": 20851, + "wels": 20852, + "cushion": 20853, + "belie": 20854, + "voc": 20855, + "idiots": 20856, + "underneath": 20857, + "puma": 20858, + "cornell": 20859, + "enation": 20860, + "lul": 20861, + "swach": 20862, + "abig": 20863, + "urer": 20864, + "mie": 20865, + "formerly": 20866, + "caf": 20867, + "ernal": 20868, + "chorus": 20869, + "julius": 20870, + "senator": 20871, + "âľį": 20872, + "whir": 20873, + "salvador": 20874, + "phd": 20875, + "unified": 20876, + "booster": 20877, + "graphical": 20878, + "wrec": 20879, + "sonny": 20880, + "miz": 20881, + "derers": 20882, + "sall": 20883, + "vens": 20884, + "tuscany": 20885, + "wid": 20886, + "yong": 20887, + "kurds": 20888, + "waz": 20889, + "trolls": 20890, + "macro": 20891, + "caturday": 20892, + "pressing": 20893, + "sasha": 20894, + "centennial": 20895, + "gusts": 20896, + "emc": 20897, + "before": 20898, + "denise": 20899, + "cust": 20900, + "ðŁĵ¢": 20901, + "looo": 20902, + "basel": 20903, + "england": 20904, + "yolo": 20905, + "ardu": 20906, + "manifesto": 20907, + "doha": 20908, + "ìľ": 20909, + "knives": 20910, + "bournemouth": 20911, + "bibl": 20912, + "barb": 20913, + "alicia": 20914, + "Ø©": 20915, + "comer": 20916, + "cyclone": 20917, + "git": 20918, + "anews": 20919, + "characteri": 20920, + "ventura": 20921, + "intra": 20922, + "sfgiants": 20923, + "hut": 20924, + "bea": 20925, + "darwin": 20926, + "eller": 20927, + "alv": 20928, + "reese": 20929, + "bly": 20930, + "karan": 20931, + "conclusion": 20932, + "manny": 20933, + "flakes": 20934, + "uniteblue": 20935, + "nadu": 20936, + "copp": 20937, + "edges": 20938, + "lancashire": 20939, + "ials": 20940, + "otta": 20941, + "philippe": 20942, + "lent": 20943, + "chee": 20944, + "mentors": 20945, + "festival": 20946, + "anism": 20947, + "complimentary": 20948, + "rj": 20949, + "pug": 20950, + "dine": 20951, + "wei": 20952, + "cliffs": 20953, + "sarmy": 20954, + "tiveness": 20955, + "treasury": 20956, + "iland": 20957, + "aftermath": 20958, + "rabbi": 20959, + "oun": 20960, + "bouquet": 20961, + "heritage": 20962, + "zion": 20963, + "surrender": 20964, + "shenan": 20965, + "inks": 20966, + "karl": 20967, + "ghty": 20968, + "policing": 20969, + "examination": 20970, + "cey": 20971, + "persu": 20972, + "measurement": 20973, + "hydrogen": 20974, + "luhan": 20975, + "âłĢâłĢâłĢâłĢ": 20976, + "wari": 20977, + "оÐ": 20978, + "jy": 20979, + "fowler": 20980, + "mish": 20981, + "alfre": 20982, + "âĺij": 20983, + "bbnaija": 20984, + "catalogue": 20985, + "recognised": 20986, + "saver": 20987, + "huskies": 20988, + "colin": 20989, + "mundo": 20990, + "siva": 20991, + "png": 20992, + "discounted": 20993, + "manutd": 20994, + "fresno": 20995, + "devin": 20996, + "preliminary": 20997, + "trophies": 20998, + "plastics": 20999, + "dug": 21000, + "procu": 21001, + "indigo": 21002, + "gard": 21003, + "dylan": 21004, + "pitches": 21005, + "groundbreaking": 21006, + "inson": 21007, + "blac": 21008, + "anthology": 21009, + "fh": 21010, + "explic": 21011, + "rard": 21012, + "admiral": 21013, + "sochi": 21014, + "lashes": 21015, + "splendid": 21016, + "envy": 21017, + "adv": 21018, + "sexy": 21019, + "festivities": 21020, + "sticking": 21021, + "bib": 21022, + "thrill": 21023, + "opp": 21024, + "ariel": 21025, + "botanical": 21026, + "endurance": 21027, + "females": 21028, + "bricks": 21029, + "vatican": 21030, + "blackpool": 21031, + "bermu": 21032, + "brough": 21033, + "roller": 21034, + "bid": 21035, + "suede": 21036, + "slovenia": 21037, + "mming": 21038, + "mlb": 21039, + "medalist": 21040, + "dians": 21041, + "rehabilitation": 21042, + "neon": 21043, + "sgo": 21044, + "lithu": 21045, + "ramos": 21046, + "zed": 21047, + "pianist": 21048, + "intensive": 21049, + "broadband": 21050, + "study": 21051, + "petersburg": 21052, + "luca": 21053, + "ahhhh": 21054, + "physician": 21055, + "dillon": 21056, + "telecom": 21057, + "grief": 21058, + "mun": 21059, + "acro": 21060, + "sided": 21061, + "sly": 21062, + "blows": 21063, + "classiccars": 21064, + "trium": 21065, + "argy": 21066, + "?:": 21067, + "hri": 21068, + "marshmal": 21069, + "âĢĵ": 21070, + "topping": 21071, + "warsaw": 21072, + "transc": 21073, + "preservation": 21074, + "bav": 21075, + "refriger": 21076, + "experiments": 21077, + "äº": 21078, + "glit": 21079, + "sliga": 21080, + "gage": 21081, + "factor": 21082, + "flavours": 21083, + "brony": 21084, + "spo": 21085, + "cookbook": 21086, + "carriage": 21087, + "away": 21088, + "nyfw": 21089, + "onian": 21090, + "wg": 21091, + "simpsons": 21092, + "rolex": 21093, + "ðŁı¿": 21094, + "crosby": 21095, + "ãħ¤": 21096, + "credi": 21097, + "syndic": 21098, + "pubs": 21099, + "alife": 21100, + "poorly": 21101, + "maced": 21102, + "ðŁĺŀ": 21103, + "behindthe": 21104, + "wenger": 21105, + "nats": 21106, + "ðŁİŁ": 21107, + "rubbish": 21108, + "procedures": 21109, + "typhoon": 21110, + "ophobia": 21111, + "erdo": 21112, + "fuel": 21113, + "viera": 21114, + "bumps": 21115, + "millennium": 21116, + "newzealand": 21117, + "lectures": 21118, + "iton": 21119, + "milky": 21120, + "responded": 21121, + "ê°": 21122, + "landscape": 21123, + "..@": 21124, + "bother": 21125, + "âĸ¶": 21126, + "zhang": 21127, + "huawei": 21128, + "tuition": 21129, + "sworn": 21130, + "inu": 21131, + "yor": 21132, + "paolo": 21133, + "auditions": 21134, + "abil": 21135, + "malaysian": 21136, + "hops": 21137, + "feathers": 21138, + "mple": 21139, + "auts": 21140, + "ão": 21141, + "bounty": 21142, + "iche": 21143, + "ìĺ": 21144, + "shq": 21145, + "pinot": 21146, + "gears": 21147, + "disappear": 21148, + "videogames": 21149, + "tna": 21150, + "alzheimer": 21151, + "ðŁĮŀ": 21152, + "aji": 21153, + "underwear": 21154, + "switching": 21155, + "signage": 21156, + "oscar": 21157, + "econ": 21158, + "drow": 21159, + "clint": 21160, + "plated": 21161, + "gundy": 21162, + "emblem": 21163, + "hoes": 21164, + "icist": 21165, + "nelly": 21166, + "junior": 21167, + "roadshow": 21168, + "minerals": 21169, + "atle": 21170, + "alexandria": 21171, + "acclaimed": 21172, + "vell": 21173, + "shiva": 21174, + "adhe": 21175, + "enne": 21176, + "amnesty": 21177, + "hounds": 21178, + "councillor": 21179, + "ðŁĴ¦": 21180, + "aesthe": 21181, + "partnering": 21182, + "influenced": 21183, + "magno": 21184, + "flare": 21185, + "extinction": 21186, + "civilian": 21187, + "majesty": 21188, + "vail": 21189, + "lawmakers": 21190, + "racks": 21191, + "mcc": 21192, + "orian": 21193, + "spices": 21194, + "errors": 21195, + "mayer": 21196, + "coca": 21197, + "pai": 21198, + "sooooo": 21199, + "retiring": 21200, + "bathro": 21201, + "ðŁĻĮðŁĻĮ": 21202, + "âĸª": 21203, + "suf": 21204, + "endorsement": 21205, + "building": 21206, + "brooch": 21207, + "palla": 21208, + "arvind": 21209, + "agent": 21210, + "karate": 21211, + "rhi": 21212, + "ctv": 21213, + "taine": 21214, + "umm": 21215, + "bax": 21216, + "reigns": 21217, + "uniof": 21218, + "enterprises": 21219, + "adele": 21220, + "flake": 21221, + "attire": 21222, + "bruce": 21223, + "bahamas": 21224, + "gravy": 21225, + "sain": 21226, + "cheek": 21227, + "trivi": 21228, + "lov": 21229, + "een": 21230, + "bblo": 21231, + "ladygaga": 21232, + "itta": 21233, + ".\"-": 21234, + "dustin": 21235, + "observatory": 21236, + "eighth": 21237, + "bloomberg": 21238, + "khs": 21239, + "fcc": 21240, + "gist": 21241, + "commemorate": 21242, + "veer": 21243, + "sexuality": 21244, + "edc": 21245, + "nicole": 21246, + "vacancy": 21247, + "user": 21248, + "sona": 21249, + ":'(": 21250, + "diploma": 21251, + "tend": 21252, + "upgrades": 21253, + "ÅŁ": 21254, + "jurassic": 21255, + "cardiac": 21256, + "drs": 21257, + "widespread": 21258, + "Ãł": 21259, + "dailies": 21260, + "vendor": 21261, + "simplicity": 21262, + "wider": 21263, + "lenses": 21264, + "supplements": 21265, + "depos": 21266, + "observed": 21267, + "vines": 21268, + "partially": 21269, + "renewal": 21270, + "collaborate": 21271, + "alig": 21272, + "finity": 21273, + "phu": 21274, + "zzy": 21275, + "petit": 21276, + "ðŁĵħ": 21277, + "zin": 21278, + "igu": 21279, + "smack": 21280, + "fallon": 21281, + "ðŁĵ£": 21282, + "backwards": 21283, + "component": 21284, + "oso": 21285, + "compatible": 21286, + "binding": 21287, + "zurich": 21288, + "thome": 21289, + "wounds": 21290, + "lyric": 21291, + "freshmen": 21292, + "sneaky": 21293, + "fibro": 21294, + "diet": 21295, + "employer": 21296, + "insect": 21297, + "hated": 21298, + "scher": 21299, + "razor": 21300, + "nsw": 21301, + "booker": 21302, + "californi": 21303, + "avfc": 21304, + "°": 21305, + "pretending": 21306, + "pepsi": 21307, + "alis": 21308, + "untitled": 21309, + "kart": 21310, + "grandparents": 21311, + "ethe": 21312, + "ock": 21313, + "luxemb": 21314, + "visuals": 21315, + "smallbusiness": 21316, + "abdullah": 21317, + "minho": 21318, + "subaru": 21319, + "hra": 21320, + "revealing": 21321, + "heartbreaking": 21322, + "clarity": 21323, + "amg": 21324, + "slr": 21325, + "****": 21326, + "âŀĸ": 21327, + "record": 21328, + "iciary": 21329, + "minded": 21330, + "yeh": 21331, + "excessive": 21332, + "knuck": 21333, + "icecream": 21334, + "truth": 21335, + "evic": 21336, + "tastic": 21337, + "antarc": 21338, + "rendering": 21339, + ",,": 21340, + "mitt": 21341, + "lorenzo": 21342, + "stpatrick": 21343, + "boundary": 21344, + "zig": 21345, + "vocab": 21346, + "osaka": 21347, + "furn": 21348, + "tun": 21349, + "gul": 21350, + "sounding": 21351, + "blogger": 21352, + "utterly": 21353, + "gaf": 21354, + "advancing": 21355, + "lcd": 21356, + "margin": 21357, + "lifelong": 21358, + "solstice": 21359, + "shra": 21360, + "waits": 21361, + "plear": 21362, + "breach": 21363, + "enligh": 21364, + "ader": 21365, + "ittle": 21366, + "cation": 21367, + "hoon": 21368, + "studied": 21369, + "?????": 21370, + "kash": 21371, + "evangeli": 21372, + "psl": 21373, + "weights": 21374, + "metals": 21375, + "tyres": 21376, + "turno": 21377, + "wie": 21378, + "carb": 21379, + "gale": 21380, + "seal": 21381, + "sunite": 21382, + "amic": 21383, + "patterson": 21384, + "án": 21385, + "euph": 21386, + "upstairs": 21387, + "qualifiers": 21388, + "khalifa": 21389, + "applemusic": 21390, + "ìĨĮëħ": 21391, + "vaughan": 21392, + "alter": 21393, + "cruiser": 21394, + "mua": 21395, + "tana": 21396, + "katrina": 21397, + "idols": 21398, + "spoiled": 21399, + "secretly": 21400, + "fibre": 21401, + "partnered": 21402, + "umes": 21403, + "giov": 21404, + "comet": 21405, + "screenshotsaturday": 21406, + "keller": 21407, + "filtr": 21408, + "fet": 21409, + "conway": 21410, + "peu": 21411, + "badminton": 21412, + "gid": 21413, + "mound": 21414, + "donkey": 21415, + "buff": 21416, + "leather": 21417, + "largely": 21418, + "broch": 21419, + "intments": 21420, + "amuse": 21421, + "rk": 21422, + "stove": 21423, + "impacted": 21424, + "cont": 21425, + "cracks": 21426, + "prisoner": 21427, + "bari": 21428, + "contractor": 21429, + "orioles": 21430, + "dominate": 21431, + "polar": 21432, + "amelia": 21433, + "drc": 21434, + "ðŁijĮðŁijĮ": 21435, + "vist": 21436, + "suarez": 21437, + "injection": 21438, + "blooms": 21439, + "ðŁļ¨ðŁļ¨": 21440, + "stiff": 21441, + "paypal": 21442, + "snowing": 21443, + "thursdays": 21444, + "goose": 21445, + "wedge": 21446, + "educated": 21447, + "weakness": 21448, + "decker": 21449, + "abudha": 21450, + "breezy": 21451, + "ÛĮ": 21452, + "hopeful": 21453, + "obi": 21454, + "raider": 21455, + "gham": 21456, + "deu": 21457, + "seve": 21458, + "partly": 21459, + "fut": 21460, + "infused": 21461, + "merri": 21462, + "thane": 21463, + "sometime": 21464, + "hue": 21465, + "mein": 21466, + "credit": 21467, + "sliding": 21468, + "rande": 21469, + "cherry": 21470, + "deadpool": 21471, + "shol": 21472, + "aram": 21473, + "underwood": 21474, + "skye": 21475, + "disturbing": 21476, + "mnt": 21477, + "polished": 21478, + "guardians": 21479, + "hadn": 21480, + "picasso": 21481, + "arius": 21482, + "akshay": 21483, + "irri": 21484, + "jh": 21485, + "happen": 21486, + "lakh": 21487, + "dalton": 21488, + "atthe": 21489, + "swell": 21490, + "marsha": 21491, + "reh": 21492, + "cours": 21493, + "jkt": 21494, + "topus": 21495, + "service": 21496, + "rink": 21497, + "hackers": 21498, + "donovan": 21499, + "horo": 21500, + "tcm": 21501, + "mayhem": 21502, + "chase": 21503, + "devops": 21504, + "kensing": 21505, + "scup": 21506, + "shere": 21507, + "qualification": 21508, + "clive": 21509, + "tong": 21510, + "nancy": 21511, + "maris": 21512, + "derdale": 21513, + "berman": 21514, + "cinderella": 21515, + "jolly": 21516, + "cic": 21517, + "loot": 21518, + "collectibles": 21519, + "homicide": 21520, + "gge": 21521, + "epidemic": 21522, + "suites": 21523, + "muddy": 21524, + "gimme": 21525, + "erec": 21526, + "-*": 21527, + "talla": 21528, + "lisle": 21529, + "embroide": 21530, + "ðŁĩ©ðŁĩª": 21531, + "verizon": 21532, + "vector": 21533, + "beanie": 21534, + "artisan": 21535, + "gain": 21536, + "flores": 21537, + "vigil": 21538, + "uso": 21539, + "ðŁĻıðŁı½": 21540, + "grinding": 21541, + "gher": 21542, + "airports": 21543, + "responsive": 21544, + "shaft": 21545, + "cancel": 21546, + "ceremonies": 21547, + "eme": 21548, + "atari": 21549, + "brushes": 21550, + "eager": 21551, + "bohemi": 21552, + "childrens": 21553, + "yankee": 21554, + "maa": 21555, + "suspense": 21556, + "moran": 21557, + "macar": 21558, + "sunflower": 21559, + "crew": 21560, + "void": 21561, + "kear": 21562, + "fashioned": 21563, + "jennings": 21564, + "sundayfunday": 21565, + "submissions": 21566, + "mead": 21567, + "herman": 21568, + "wai": 21569, + "critically": 21570, + "leum": 21571, + "baekhyun": 21572, + "forcing": 21573, + "cobra": 21574, + "ãģ®": 21575, + "acquire": 21576, + "alk": 21577, + "geology": 21578, + "primar": 21579, + "importantly": 21580, + "irez": 21581, + "bundesliga": 21582, + "curiosity": 21583, + "sena": 21584, + "strict": 21585, + "consoli": 21586, + "winters": 21587, + "venom": 21588, + "cheltenham": 21589, + "ðŁįº": 21590, + "cena": 21591, + "tat": 21592, + "bain": 21593, + "glover": 21594, + "undercover": 21595, + "asses": 21596, + "carn": 21597, + "memorialday": 21598, + "ameli": 21599, + "irene": 21600, + "chon": 21601, + "synthesis": 21602, + "speedy": 21603, + "mitsubi": 21604, + "slayer": 21605, + "composite": 21606, + "understands": 21607, + "pew": 21608, + "interrup": 21609, + "henri": 21610, + "morrow": 21611, + "anom": 21612, + "thofjuly": 21613, + "glee": 21614, + "three": 21615, + "ðŁĺ®": 21616, + "andhi": 21617, + "chatt": 21618, + "renewables": 21619, + "yes": 21620, + "transfers": 21621, + "!!!!!!!!": 21622, + "babu": 21623, + "duter": 21624, + "loops": 21625, + "peers": 21626, + "oilers": 21627, + "paulo": 21628, + "ication": 21629, + "hmu": 21630, + "wara": 21631, + "mercer": 21632, + "homeland": 21633, + "fuji": 21634, + "aley": 21635, + "yearbook": 21636, + "rem": 21637, + "reen": 21638, + "absur": 21639, + "bois": 21640, + "]:": 21641, + "caesar": 21642, + "shotgun": 21643, + "kurdish": 21644, + "oren": 21645, + "rae": 21646, + "ancies": 21647, + "typic": 21648, + "fh": 21649, + "default": 21650, + "replic": 21651, + "luk": 21652, + "transactions": 21653, + "rys": 21654, + "infantry": 21655, + "ðŁį¾": 21656, + "chow": 21657, + "chickens": 21658, + "bagh": 21659, + "wyatt": 21660, + "aye": 21661, + "ggi": 21662, + "brews": 21663, + "editions": 21664, + "mira": 21665, + "commencement": 21666, + "presu": 21667, + "periscope": 21668, + "ichi": 21669, + "guatemala": 21670, + "zambia": 21671, + "paints": 21672, + "witches": 21673, + "wani": 21674, + "undere": 21675, + "croy": 21676, + "vows": 21677, + "usmc": 21678, + "hearted": 21679, + "theatres": 21680, + "shuffle": 21681, + "level": 21682, + "multic": 21683, + "squeeze": 21684, + "fern": 21685, + "appet": 21686, + "postal": 21687, + "malt": 21688, + "onboard": 21689, + "ldnt": 21690, + "coo": 21691, + "ssc": 21692, + "kac": 21693, + "ðŁĺĩ": 21694, + "scrap": 21695, + "marcos": 21696, + "dealers": 21697, + "annu": 21698, + "miller": 21699, + "cove": 21700, + "ulary": 21701, + "vladimir": 21702, + "beef": 21703, + "thur": 21704, + "pickled": 21705, + "sesame": 21706, + "bengaluru": 21707, + "mott": 21708, + "kathleen": 21709, + "hist": 21710, + "notor": 21711, + "drank": 21712, + "duchess": 21713, + "snowfall": 21714, + "eff": 21715, + "tiny": 21716, + "jn": 21717, + "syour": 21718, + "specialists": 21719, + "scotus": 21720, + "baylor": 21721, + "everest": 21722, + "malibu": 21723, + "prem": 21724, + "harmful": 21725, + "lali": 21726, + "bates": 21727, + "gye": 21728, + "differenti": 21729, + "andra": 21730, + "geometry": 21731, + "elover": 21732, + "blackout": 21733, + "====": 21734, + "kota": 21735, + "interact": 21736, + "asian": 21737, + "layo": 21738, + "samurai": 21739, + "fidel": 21740, + "exhausted": 21741, + "gladi": 21742, + "pdt": 21743, + "spheric": 21744, + "antiqu": 21745, + "guitar": 21746, + "sturi": 21747, + "hopper": 21748, + "angle": 21749, + "fills": 21750, + "slap": 21751, + "mith": 21752, + "rodney": 21753, + "ongi": 21754, + "insom": 21755, + "preventing": 21756, + "cassidy": 21757, + "apho": 21758, + "oregon": 21759, + "loin": 21760, + "hammond": 21761, + "contributing": 21762, + "fn": 21763, + "garri": 21764, + "orion": 21765, + "compelling": 21766, + "escaping": 21767, + "aiming": 21768, + "plumb": 21769, + "bistro": 21770, + "beasts": 21771, + "concerning": 21772, + "boe": 21773, + "dopp": 21774, + "shoplocal": 21775, + "stumbled": 21776, + "âĤ¹": 21777, + "nazis": 21778, + "âĢįâĻĤï¸ı": 21779, + "gesture": 21780, + "warts": 21781, + "usopen": 21782, + "higgins": 21783, + "charli": 21784, + "hangs": 21785, + "bombers": 21786, + "°:": 21787, + "feeds": 21788, + "cch": 21789, + "stil": 21790, + "nicola": 21791, + "ðŁĵº": 21792, + "clamation": 21793, + "tropic": 21794, + "afro": 21795, + "ouk": 21796, + "expenses": 21797, + "derrick": 21798, + "aline": 21799, + "faw": 21800, + "regard": 21801, + "imer": 21802, + "satin": 21803, + "thium": 21804, + "ryder": 21805, + "pearl": 21806, + "tess": 21807, + "mmmmm": 21808, + "senses": 21809, + "ðŁĩ¹": 21810, + "positive": 21811, + "exhaust": 21812, + "occur": 21813, + "norris": 21814, + "lilly": 21815, + "isles": 21816, + "directing": 21817, + "yofficial": 21818, + "countless": 21819, + "samar": 21820, + "onstage": 21821, + "flock": 21822, + "mirrors": 21823, + "archer": 21824, + "moi": 21825, + "kd": 21826, + "viv": 21827, + "inos": 21828, + "sikh": 21829, + "lei": 21830, + "sensory": 21831, + "brits": 21832, + "knox": 21833, + "chestnut": 21834, + "opy": 21835, + "coliseum": 21836, + "zaf": 21837, + "divin": 21838, + "adapter": 21839, + ":)))": 21840, + "temple": 21841, + "kun": 21842, + "helmets": 21843, + "tdf": 21844, + "guide": 21845, + "mold": 21846, + "oids": 21847, + "luther": 21848, + "heis": 21849, + "monastery": 21850, + "spree": 21851, + "klu": 21852, + "britney": 21853, + "jaguars": 21854, + "greats": 21855, + "ccc": 21856, + "kyrie": 21857, + "machinery": 21858, + "cricket": 21859, + "rero": 21860, + "abo": 21861, + "aspiring": 21862, + "semifinals": 21863, + "aless": 21864, + "signatures": 21865, + "vard": 21866, + "meth": 21867, + "herbal": 21868, + "holden": 21869, + "kingdom": 21870, + "apor": 21871, + "reggie": 21872, + "oreo": 21873, + "palestinians": 21874, + "emmys": 21875, + "sectional": 21876, + "roi": 21877, + "neymar": 21878, + "quel": 21879, + "cull": 21880, + "lka": 21881, + "hazel": 21882, + "estimate": 21883, + "ulties": 21884, + "gow": 21885, + "bea": 21886, + "purchases": 21887, + "belts": 21888, + "protects": 21889, + "mé": 21890, + "guessing": 21891, + "bbo": 21892, + "claudia": 21893, + "fracking": 21894, + "jonny": 21895, + "elk": 21896, + "celtic": 21897, + "almighty": 21898, + "raje": 21899, + "courtyard": 21900, + "igi": 21901, + "canes": 21902, + "ðŁĴªðŁı»": 21903, + "bankrup": 21904, + "lethal": 21905, + "âľĮï¸ı": 21906, + "graphicdesign": 21907, + "vader": 21908, + "pencils": 21909, + "roughly": 21910, + "dante": 21911, + "mfg": 21912, + "constell": 21913, + "camel": 21914, + "jb": 21915, + "blossoms": 21916, + "ento": 21917, + "balochistan": 21918, + "cinemato": 21919, + "illard": 21920, + "jersey": 21921, + "consent": 21922, + "dented": 21923, + "contempl": 21924, + "scher": 21925, + "holi": 21926, + "lough": 21927, + "stour": 21928, + "ayo": 21929, + "beginners": 21930, + "curb": 21931, + "vhs": 21932, + "ajax": 21933, + "duff": 21934, + "aveng": 21935, + "domest": 21936, + "committing": 21937, + "aired": 21938, + "chap": 21939, + "hedgehog": 21940, + "disappointing": 21941, + "freelance": 21942, + "inland": 21943, + "charms": 21944, + "ðŁĺįâĿ¤ï¸ı": 21945, + "aish": 21946, + "mx": 21947, + "buckle": 21948, + "tidal": 21949, + "permit": 21950, + "boating": 21951, + "racha": 21952, + "kendrick": 21953, + "bello": 21954, + "bhi": 21955, + "plea": 21956, + "estimates": 21957, + "lb": 21958, + "apologies": 21959, + "jaya": 21960, + "bbl": 21961, + "astoni": 21962, + "interstate": 21963, + "maintaining": 21964, + "elbow": 21965, + "mup": 21966, + "epit": 21967, + "ðŁĺ¡": 21968, + "violations": 21969, + "defend": 21970, + "beh": 21971, + "slc": 21972, + "amir": 21973, + "puri": 21974, + "tium": 21975, + "fifa": 21976, + "blurry": 21977, + "scrim": 21978, + "ðŁĻıðŁı¾": 21979, + "maple": 21980, + "relatives": 21981, + "âĺĿ": 21982, + "choc": 21983, + "connor": 21984, + "⾨⾨": 21985, + "whisp": 21986, + "listings": 21987, + "maze": 21988, + "thanking": 21989, + "ridd": 21990, + "grassroots": 21991, + "shifting": 21992, + "desperately": 21993, + "gorilla": 21994, + "deni": 21995, + "jules": 21996, + "strath": 21997, + "gley": 21998, + "jain": 21999, + "buick": 22000, + "tanner": 22001, + "ðŁĴĿ": 22002, + "gae": 22003, + "prim": 22004, + "itors": 22005, + "nano": 22006, + "separation": 22007, + "armenia": 22008, + "bordeaux": 22009, + "ðŁħ": 22010, + "pjnet": 22011, + "burial": 22012, + "ebon": 22013, + "gloss": 22014, + "renew": 22015, + "grier": 22016, + "speeds": 22017, + "comicbooks": 22018, + "symboli": 22019, + "purposes": 22020, + "ãħłãħł": 22021, + "spatial": 22022, + "notable": 22023, + "cion": 22024, + "nps": 22025, + "hoffman": 22026, + "norman": 22027, + "rtg": 22028, + "dusty": 22029, + "situated": 22030, + "tran": 22031, + "kfc": 22032, + "emen": 22033, + "nickel": 22034, + "hastings": 22035, + "settling": 22036, + "grit": 22037, + "lena": 22038, + "waw": 22039, + "arts": 22040, + "gum": 22041, + "caregi": 22042, + "lewis": 22043, + "sapphire": 22044, + "remember": 22045, + "embedded": 22046, + "tlc": 22047, + "blat": 22048, + "sergeant": 22049, + "elsa": 22050, + "bootcamp": 22051, + "bowman": 22052, + "photographic": 22053, + "pillars": 22054, + "directioners": 22055, + "classified": 22056, + "nois": 22057, + "veer": 22058, + "barrels": 22059, + "whoop": 22060, + "ðŁĺ±ðŁĺ±": 22061, + "female": 22062, + "petroleum": 22063, + "media": 22064, + "efc": 22065, + "pokémon": 22066, + "à¤ķ": 22067, + "enthusiastic": 22068, + "varun": 22069, + "profiles": 22070, + "pediatric": 22071, + "accidents": 22072, + "conrad": 22073, + "jang": 22074, + "jojo": 22075, + "acor": 22076, + "observer": 22077, + "lf": 22078, + "livestock": 22079, + "forgi": 22080, + "fos": 22081, + "elm": 22082, + "anand": 22083, + "goe": 22084, + "cere": 22085, + "avoiding": 22086, + "grit": 22087, + "oman": 22088, + "thankfully": 22089, + "scattered": 22090, + "nicky": 22091, + "cylinder": 22092, + "cheesy": 22093, + "diver": 22094, + "mahesh": 22095, + "caves": 22096, + "earliest": 22097, + "quinte": 22098, + "subjects": 22099, + "bend": 22100, + "gulf": 22101, + "vocalist": 22102, + "glue": 22103, + "patches": 22104, + "unstopp": 22105, + "snyder": 22106, + "demonstrating": 22107, + "pio": 22108, + "horns": 22109, + "wickets": 22110, + "andthe": 22111, + "rama": 22112, + "yoon": 22113, + "straight": 22114, + "bedtime": 22115, + "orang": 22116, + "bullets": 22117, + "saurus": 22118, + "miners": 22119, + "incidents": 22120, + "!...": 22121, + "ðŁİ¸": 22122, + "agers": 22123, + "handles": 22124, + "states": 22125, + "inity": 22126, + "dons": 22127, + "incredible": 22128, + "eminem": 22129, + "aviv": 22130, + "rudy": 22131, + "mozart": 22132, + "folklore": 22133, + "appliances": 22134, + "mtl": 22135, + "frey": 22136, + "dias": 22137, + "hua": 22138, + "pageant": 22139, + "strive": 22140, + "imprison": 22141, + "bullish": 22142, + "rana": 22143, + "alerts": 22144, + "bbmas": 22145, + "hyper": 22146, + "derbyshire": 22147, + "recre": 22148, + "redd": 22149, + "deborah": 22150, + "cosmos": 22151, + "lawson": 22152, + "melanie": 22153, + "psycho": 22154, + "hoor": 22155, + "doodles": 22156, + "sniper": 22157, + "shady": 22158, + "mantle": 22159, + "canadian": 22160, + "newyear": 22161, + "interactions": 22162, + "separated": 22163, + "cords": 22164, + "spirituality": 22165, + "apu": 22166, + "ito": 22167, + "pct": 22168, + "pelosi": 22169, + "rebellion": 22170, + "seiz": 22171, + "worcester": 22172, + "sectors": 22173, + "uli": 22174, + "santa": 22175, + "е": 22176, + "ðŁĩªðŁĩ¸": 22177, + "biased": 22178, + "classical": 22179, + "gamma": 22180, + "deeplear": 22181, + "emerge": 22182, + "backer": 22183, + "surance": 22184, + "handcrafted": 22185, + "ðŁİ¥": 22186, + "francis": 22187, + "millan": 22188, + "ici": 22189, + "crown": 22190, + "wow": 22191, + "striped": 22192, + "unfair": 22193, + "relaxation": 22194, + "³ï¸ı": 22195, + "embracing": 22196, + "shealth": 22197, + "paleo": 22198, + "martini": 22199, + "distillery": 22200, + "wrink": 22201, + "ork": 22202, + "nath": 22203, + "hayley": 22204, + "courthouse": 22205, + "siber": 22206, + "sadi": 22207, + "quietly": 22208, + "melt": 22209, + "msm": 22210, + "meh": 22211, + "smartphones": 22212, + "relent": 22213, + "pping": 22214, + "warwick": 22215, + "cologne": 22216, + "glia": 22217, + "cotton": 22218, + "prog": 22219, + "lone": 22220, + "ipsw": 22221, + "starters": 22222, + "expands": 22223, + "ump": 22224, + "sued": 22225, + "skipper": 22226, + "infections": 22227, + "ingle": 22228, + "á": 22229, + "clerk": 22230, + "demonstrate": 22231, + "acar": 22232, + "ðŁĺĤðŁĺĤðŁĺĤ": 22233, + "tibet": 22234, + "buns": 22235, + "alom": 22236, + "demolition": 22237, + "ssia": 22238, + "gst": 22239, + "[]": 22240, + "soar": 22241, + "âĺĢ": 22242, + "ðŁĺª": 22243, + "ðŁĵĬ": 22244, + "deepest": 22245, + "beyond": 22246, + "aret": 22247, + "attends": 22248, + "activated": 22249, + "dimit": 22250, + "âļªï¸ı": 22251, + "highlighted": 22252, + "magazines": 22253, + "rumor": 22254, + "azza": 22255, + "stephens": 22256, + "dolph": 22257, + "shockey": 22258, + "mats": 22259, + "weav": 22260, + "melan": 22261, + "servers": 22262, + "traum": 22263, + "kush": 22264, + "æĹ": 22265, + "babys": 22266, + "paz": 22267, + "aal": 22268, + "lause": 22269, + "breakers": 22270, + "canterbury": 22271, + "ulture": 22272, + "miri": 22273, + "euros": 22274, + "taneous": 22275, + "impressions": 22276, + "dutch": 22277, + "ild": 22278, + "ghi": 22279, + "purdue": 22280, + "adequate": 22281, + "lp": 22282, + "syner": 22283, + "angler": 22284, + "durable": 22285, + "galore": 22286, + "rown": 22287, + "mgmt": 22288, + "ðŁĵĮ": 22289, + "lucia": 22290, + "âĺijï¸ı": 22291, + "zayn": 22292, + "borrow": 22293, + ".(": 22294, + "northumber": 22295, + "crush": 22296, + "enga": 22297, + "sush": 22298, + "extravag": 22299, + "tout": 22300, + "mahal": 22301, + "alistic": 22302, + "thermo": 22303, + "galleries": 22304, + "esse": 22305, + "chibi": 22306, + "attractions": 22307, + "lexington": 22308, + "legislature": 22309, + "documented": 22310, + "residen": 22311, + "brownies": 22312, + "wf": 22313, + "stool": 22314, + "planets": 22315, + "shoppers": 22316, + "conductor": 22317, + "msp": 22318, + "tricky": 22319, + "fruity": 22320, + "endra": 22321, + "feelthe": 22322, + "whipped": 22323, + "hairstyle": 22324, + "refer": 22325, + "ook": 22326, + "octopus": 22327, + "audiences": 22328, + "kumar": 22329, + "afterno": 22330, + "optim": 22331, + "cfl": 22332, + "nip": 22333, + "geni": 22334, + "alphabet": 22335, + "annab": 22336, + "lamin": 22337, + "accepts": 22338, + "lng": 22339, + "ðŁĺ«": 22340, + "tine": 22341, + "acom": 22342, + "cheerleaders": 22343, + "tk": 22344, + "gron": 22345, + "vg": 22346, + "kung": 22347, + "jax": 22348, + "dhabi": 22349, + "rss": 22350, + "mackenzie": 22351, + "beirut": 22352, + "cleanup": 22353, + "gypsy": 22354, + "stell": 22355, + "burger": 22356, + "hurricanes": 22357, + "education": 22358, + "stina": 22359, + "âĻ¡âĻ¡": 22360, + "unfortunate": 22361, + "jeremi": 22362, + "badger": 22363, + "aters": 22364, + ":â̦": 22365, + "terra": 22366, + "sublime": 22367, + "stud": 22368, + "ymca": 22369, + "mru": 22370, + "duterte": 22371, + "brennan": 22372, + "bulb": 22373, + "melo": 22374, + "ylon": 22375, + "hacker": 22376, + "cred": 22377, + "gud": 22378, + "asan": 22379, + "padilla": 22380, + "embroidered": 22381, + "vietnamese": 22382, + "pioneers": 22383, + "projection": 22384, + "reboot": 22385, + "idc": 22386, + "aney": 22387, + "primer": 22388, + "suffers": 22389, + "winding": 22390, + "pon": 22391, + "stoday": 22392, + "morn": 22393, + "uch": 22394, + "allin": 22395, + "adidas": 22396, + "elizabeth": 22397, + "tuck": 22398, + "ography": 22399, + "ðŁļĢ": 22400, + "beg": 22401, + "osborne": 22402, + "ghetto": 22403, + "rh": 22404, + "cnn": 22405, + "irma": 22406, + "makin": 22407, + "cables": 22408, + "murders": 22409, + "ocks": 22410, + "insta": 22411, + "alas": 22412, + "sik": 22413, + "cuff": 22414, + "lare": 22415, + "foodies": 22416, + "ovic": 22417, + "atom": 22418, + "geometric": 22419, + "empathy": 22420, + "ี": 22421, + "centenary": 22422, + "newspapers": 22423, + "administrative": 22424, + "ðŁİĬ": 22425, + "stive": 22426, + "contractors": 22427, + "lett": 22428, + "tasmania": 22429, + "awesomeness": 22430, + "density": 22431, + "veen": 22432, + "princeton": 22433, + "frequently": 22434, + "reject": 22435, + "ghi": 22436, + "modular": 22437, + "ceramics": 22438, + "shag": 22439, + "kiwi": 22440, + "canvas": 22441, + "sweatshirt": 22442, + "anj": 22443, + "timm": 22444, + "napoli": 22445, + "iler": 22446, + "appeals": 22447, + "hamilton": 22448, + "mayo": 22449, + "weave": 22450, + "arranged": 22451, + "wharf": 22452, + "occupy": 22453, + "bvb": 22454, + "asaki": 22455, + "otter": 22456, + "norm": 22457, + "vies": 22458, + "detox": 22459, + "tional": 22460, + "derek": 22461, + "idad": 22462, + "admissions": 22463, + "constituency": 22464, + "upper": 22465, + "woot": 22466, + "alloy": 22467, + "seve": 22468, + "lub": 22469, + "uncomfortable": 22470, + "edwin": 22471, + "abre": 22472, + "dwight": 22473, + "arche": 22474, + "virtually": 22475, + "spol": 22476, + "prie": 22477, + "aii": 22478, + "err": 22479, + "switch": 22480, + "barack": 22481, + "seok": 22482, + "coul": 22483, + "wnt": 22484, + "poul": 22485, + "olive": 22486, + "caffeine": 22487, + "cardiff": 22488, + "notorious": 22489, + "demp": 22490, + "excess": 22491, + "barr": 22492, + "tford": 22493, + "ajay": 22494, + "bumped": 22495, + "mythology": 22496, + "shelley": 22497, + "falcon": 22498, + "shakespeare": 22499, + "mustangs": 22500, + "noted": 22501, + "bone": 22502, + "civilization": 22503, + "syd": 22504, + "parsons": 22505, + "unofficial": 22506, + "hyped": 22507, + "spends": 22508, + "opposed": 22509, + "vings": 22510, + "spacex": 22511, + "notification": 22512, + "deciding": 22513, + "biotech": 22514, + "outsi": 22515, + "salah": 22516, + "!.": 22517, + "fed": 22518, + "ssy": 22519, + "cms": 22520, + "badgers": 22521, + "cro": 22522, + "elaine": 22523, + "nba": 22524, + "dyour": 22525, + "nant": 22526, + "honeymoon": 22527, + "climbed": 22528, + "conomy": 22529, + "atha": 22530, + "mell": 22531, + "nebula": 22532, + "naturephotography": 22533, + "julie": 22534, + "bmx": 22535, + "invested": 22536, + "mono": 22537, + "lieutenant": 22538, + "watkins": 22539, + "technician": 22540, + "ose": 22541, + "kae": 22542, + "ìĽ": 22543, + "mcqueen": 22544, + "preach": 22545, + "traveller": 22546, + "flexibility": 22547, + "zebra": 22548, + "retailer": 22549, + "pant": 22550, + "bender": 22551, + "brandt": 22552, + "squid": 22553, + "warrant": 22554, + "verified": 22555, + "cass": 22556, + "piercing": 22557, + "honours": 22558, + "tying": 22559, + "morris": 22560, + "kissed": 22561, + "oprah": 22562, + "panoramic": 22563, + "mei": 22564, + "splatoon": 22565, + "wichita": 22566, + "arias": 22567, + "galli": 22568, + "indyref": 22569, + "goodtimes": 22570, + "atheist": 22571, + "confession": 22572, + "owski": 22573, + "repping": 22574, + "additions": 22575, + "mechanism": 22576, + "zim": 22577, + "jans": 22578, + "suf": 22579, + "chopped": 22580, + "beginnings": 22581, + "vitamins": 22582, + "ãħ¤ãħ¤": 22583, + "orth": 22584, + "poles": 22585, + "rub": 22586, + "antarctica": 22587, + "indiefilm": 22588, + "webcam": 22589, + "ketch": 22590, + "brett": 22591, + "clement": 22592, + "heron": 22593, + "defeating": 22594, + "hydro": 22595, + "bucket": 22596, + "wandering": 22597, + "sidney": 22598, + "futureof": 22599, + "binge": 22600, + "onies": 22601, + "knockout": 22602, + "administrator": 22603, + "synthe": 22604, + "lent": 22605, + "jani": 22606, + "barley": 22607, + "premierleague": 22608, + "nerds": 22609, + "crm": 22610, + "bras": 22611, + "botany": 22612, + "evolved": 22613, + "rotter": 22614, + "rowed": 22615, + "tumor": 22616, + "wealthy": 22617, + "ÂŃ": 22618, + "monarch": 22619, + "lished": 22620, + "dahl": 22621, + "ðŁİĥ": 22622, + "buch": 22623, + "kenyan": 22624, + "ا": 22625, + "redness": 22626, + "assembled": 22627, + "semit": 22628, + "hudder": 22629, + "shrop": 22630, + "rani": 22631, + "learning": 22632, + "mory": 22633, + "itia": 22634, + "geographic": 22635, + "worldof": 22636, + "fb": 22637, + "phosp": 22638, + "boogie": 22639, + "amped": 22640, + "?...": 22641, + "chew": 22642, + "dwarf": 22643, + "arus": 22644, + "ssen": 22645, + "rusty": 22646, + "recruits": 22647, + "hk": 22648, + "garde": 22649, + "applause": 22650, + "volumes": 22651, + "involves": 22652, + "tac": 22653, + "handbag": 22654, + "translate": 22655, + "ffel": 22656, + "seym": 22657, + "aquatic": 22658, + "transfer": 22659, + "zodi": 22660, + "andr": 22661, + "academia": 22662, + "crater": 22663, + "tez": 22664, + "arse": 22665, + "adapt": 22666, + "coloni": 22667, + "snowman": 22668, + "mali": 22669, + "hangin": 22670, + "dischar": 22671, + "oysters": 22672, + "phoe": 22673, + "colonel": 22674, + "wba": 22675, + "hispanic": 22676, + "thriving": 22677, + "shy": 22678, + "agles": 22679, + "salesforce": 22680, + "creme": 22681, + "soles": 22682, + "lafayette": 22683, + "âī": 22684, + "teria": 22685, + "acha": 22686, + "sperson": 22687, + "gogo": 22688, + "carly": 22689, + "theore": 22690, + "amore": 22691, + "vox": 22692, + "aft": 22693, + "ãĤ¹": 22694, + "staple": 22695, + "muffin": 22696, + "diagram": 22697, + "inox": 22698, + "sustained": 22699, + "avent": 22700, + "meta": 22701, + "arbitr": 22702, + "decay": 22703, + "adole": 22704, + "н": 22705, + "ecol": 22706, + "pho": 22707, + "nk": 22708, + "ocu": 22709, + "granny": 22710, + "ça": 22711, + "luxembour": 22712, + "stadt": 22713, + "alberto": 22714, + "levit": 22715, + "amas": 22716, + "dx": 22717, + "orphan": 22718, + "cobb": 22719, + "asc": 22720, + "logy": 22721, + "immense": 22722, + "chants": 22723, + "offline": 22724, + "pent": 22725, + "brex": 22726, + "winger": 22727, + "plane": 22728, + "iel": 22729, + "nichols": 22730, + "cathy": 22731, + "naruto": 22732, + "lowed": 22733, + "///": 22734, + "ignorance": 22735, + "catastro": 22736, + "youts": 22737, + "schen": 22738, + "build": 22739, + "hazi": 22740, + "sine": 22741, + "criticalrole": 22742, + "dug": 22743, + "detect": 22744, + "logs": 22745, + "enamel": 22746, + "stpatricksday": 22747, + "eddie": 22748, + "copa": 22749, + "cigarettes": 22750, + "hoff": 22751, + "kaya": 22752, + "lagoon": 22753, + "rapha": 22754, + "airborne": 22755, + "choose": 22756, + "puertor": 22757, + "kev": 22758, + "guiding": 22759, + "frosty": 22760, + "borough": 22761, + "mira": 22762, + "ðŁİĬ": 22763, + "cadet": 22764, + "anush": 22765, + "yogi": 22766, + "eger": 22767, + "fling": 22768, + "slope": 22769, + "ninth": 22770, + "weston": 22771, + "footwear": 22772, + "fn": 22773, + "mayweather": 22774, + "aam": 22775, + "plain": 22776, + "staircase": 22777, + "witnesses": 22778, + "workouts": 22779, + "robust": 22780, + "dexter": 22781, + "cohort": 22782, + "ðŁļĹ": 22783, + "spell": 22784, + "haze": 22785, + "oom": 22786, + "organising": 22787, + "wildfire": 22788, + "contacts": 22789, + "avon": 22790, + "mino": 22791, + "updating": 22792, + "ðŁį»": 22793, + "lithium": 22794, + "ingual": 22795, + "kis": 22796, + "auga": 22797, + "locom": 22798, + "deduc": 22799, + "uda": 22800, + "thak": 22801, + "boyle": 22802, + "mper": 22803, + "hottie": 22804, + "erik": 22805, + "revised": 22806, + "isla": 22807, + "travelphotography": 22808, + "ooza": 22809, + "enqui": 22810, + "conferences": 22811, + "clover": 22812, + "groom": 22813, + "curves": 22814, + "liveon": 22815, + "perf": 22816, + "displaced": 22817, + "bolog": 22818, + "xxxx": 22819, + "ðŁĺ©ðŁĺ©": 22820, + "teal": 22821, + "vessels": 22822, + "rainforest": 22823, + "calci": 22824, + "panther": 22825, + "giraffe": 22826, + "tasted": 22827, + "imagery": 22828, + "padres": 22829, + "daytime": 22830, + "bass": 22831, + "ripe": 22832, + "opioid": 22833, + "nue": 22834, + "vinyl": 22835, + "inventor": 22836, + "sens": 22837, + "processor": 22838, + "mut": 22839, + "gadgets": 22840, + "biblical": 22841, + "shannon": 22842, + "jacqueline": 22843, + "cary": 22844, + "theresistance": 22845, + "alien": 22846, + "nvi": 22847, + "cosy": 22848, + "bihar": 22849, + "foley": 22850, + "rend": 22851, + "mugs": 22852, + "faken": 22853, + "clone": 22854, + "niallo": 22855, + "grabbed": 22856, + "chihu": 22857, + "powerhouse": 22858, + "ntt": 22859, + "cherokee": 22860, + "sponge": 22861, + "implementing": 22862, + "rhine": 22863, + "leone": 22864, + "ðŁįĢ": 22865, + "prettiest": 22866, + "infrared": 22867, + "improv": 22868, + "switched": 22869, + "tubes": 22870, + "contr": 22871, + "blk": 22872, + "projected": 22873, + "beaver": 22874, + "yot": 22875, + "bbcradio": 22876, + "thigh": 22877, + "persecu": 22878, + "apologize": 22879, + "wack": 22880, + "poster": 22881, + "oliver": 22882, + "aza": 22883, + "loud": 22884, + "(?)": 22885, + "fthe": 22886, + "womenshi": 22887, + "sparrow": 22888, + "blush": 22889, + "usable": 22890, + "scales": 22891, + "itative": 22892, + "peuge": 22893, + "needing": 22894, + "leggings": 22895, + "glamorous": 22896, + "matur": 22897, + "cz": 22898, + "watt": 22899, + "dab": 22900, + "tamar": 22901, + "etsym": 22902, + "bauer": 22903, + "heartfelt": 22904, + "hn": 22905, + "elsewhere": 22906, + "birch": 22907, + "alumini": 22908, + "huck": 22909, + "eme": 22910, + "jl": 22911, + "trafford": 22912, + "dz": 22913, + "portions": 22914, + "anasta": 22915, + "arthritis": 22916, + "espn": 22917, + "bergen": 22918, + "violation": 22919, + "yoshi": 22920, + "cz": 22921, + "northumberland": 22922, + "closures": 22923, + "ðŁĩ¯ðŁĩ": 22924, + "smiley": 22925, + "rw": 22926, + "telugu": 22927, + "intensi": 22928, + "gregg": 22929, + "vega": 22930, + "dungeon": 22931, + "southbound": 22932, + "bail": 22933, + "dominican": 22934, + "semifinal": 22935, + "chapters": 22936, + "hitch": 22937, + "vanity": 22938, + "transiti": 22939, + "recommends": 22940, + "satisf": 22941, + "barca": 22942, + "queens": 22943, + "((": 22944, + "destruc": 22945, + "strait": 22946, + "ravi": 22947, + "desserts": 22948, + "intru": 22949, + "haram": 22950, + "kos": 22951, + "foe": 22952, + "fatty": 22953, + "paisley": 22954, + "magnitude": 22955, + "dridge": 22956, + "comey": 22957, + "schemes": 22958, + "visionary": 22959, + "ourt": 22960, + "downloaded": 22961, + "ðŁĻĮðŁı½": 22962, + "gdpr": 22963, + "lani": 22964, + "pwc": 22965, + "guad": 22966, + "nicest": 22967, + "stakeholders": 22968, + "referred": 22969, + "georgetown": 22970, + "arvindkejriwal": 22971, + "schneider": 22972, + "indoors": 22973, + "allstar": 22974, + "stranded": 22975, + "gender": 22976, + "zepp": 22977, + "masses": 22978, + "ðŁIJ±": 22979, + "patiently": 22980, + "bldg": 22981, + "zab": 22982, + "wearab": 22983, + "vivid": 22984, + "heck": 22985, + "della": 22986, + "symb": 22987, + "jeopar": 22988, + "lager": 22989, + "àª": 22990, + "combines": 22991, + "nec": 22992, + "bray": 22993, + "flop": 22994, + "txwx": 22995, + "joys": 22996, + "pont": 22997, + "profound": 22998, + "surround": 22999, + "madhu": 23000, + "mable": 23001, + "ayr": 23002, + "teas": 23003, + "nsa": 23004, + "openly": 23005, + "ernest": 23006, + "ãĥ©": 23007, + "topo": 23008, + "gna": 23009, + "antioxid": 23010, + "tian": 23011, + "etr": 23012, + "cello": 23013, + "mathi": 23014, + "generosity": 23015, + "biting": 23016, + "manic": 23017, + "kelsey": 23018, + "cheeks": 23019, + "tender": 23020, + "wth": 23021, + "pronoun": 23022, + "ultimately": 23023, + "gusta": 23024, + "arianag": 23025, + "gerry": 23026, + "bleed": 23027, + "reddy": 23028, + "mich": 23029, + "mitsubishi": 23030, + "operated": 23031, + "sexually": 23032, + "mau": 23033, + "cllr": 23034, + "vids": 23035, + "coc": 23036, + "melted": 23037, + "ðŁĮĪ": 23038, + "qld": 23039, + "itech": 23040, + "instrumental": 23041, + "endgame": 23042, + "ðŁĵĸ": 23043, + "energi": 23044, + "brownie": 23045, + "tamil": 23046, + "atin": 23047, + "dominated": 23048, + "praises": 23049, + "fireplace": 23050, + "sensational": 23051, + "mena": 23052, + "karti": 23053, + "unprece": 23054, + "rupt": 23055, + "oriental": 23056, + "mccor": 23057, + "tournaments": 23058, + "scenter": 23059, + "reeves": 23060, + "prescription": 23061, + "same": 23062, + "frau": 23063, + "truffle": 23064, + "embo": 23065, + "romans": 23066, + "blasts": 23067, + "technological": 23068, + "prat": 23069, + "bsb": 23070, + "yar": 23071, + "trendy": 23072, + "acl": 23073, + "alad": 23074, + "ðŁįģ": 23075, + "ohh": 23076, + "bankrupt": 23077, + "thoven": 23078, + "regards": 23079, + "iser": 23080, + "warwick": 23081, + "vineyards": 23082, + "realm": 23083, + "niallofficial": 23084, + "dota": 23085, + "gemini": 23086, + "todo": 23087, + "vable": 23088, + "¨¨": 23089, + "lau": 23090, + "wreath": 23091, + "juve": 23092, + "natasha": 23093, + "lever": 23094, + "lori": 23095, + "horser": 23096, + "cctv": 23097, + "airbnb": 23098, + "esanders": 23099, + "sinclair": 23100, + "emabiggest": 23101, + "highschool": 23102, + "contest": 23103, + "optimistic": 23104, + "tte": 23105, + "ðŁĴķðŁĴķ": 23106, + "ssd": 23107, + "yee": 23108, + "helena": 23109, + "consen": 23110, + "ricks": 23111, + "jesse": 23112, + "anic": 23113, + "ðŁİ¯": 23114, + "reacts": 23115, + "robe": 23116, + "independence": 23117, + "voltage": 23118, + "mington": 23119, + "sant": 23120, + "à¸Ļà¸": 23121, + "----------------": 23122, + "sentinel": 23123, + "kett": 23124, + "rehearsing": 23125, + "aaaaaaaa": 23126, + "softhe": 23127, + "stirling": 23128, + "search": 23129, + "wigan": 23130, + "standout": 23131, + "snail": 23132, + "pentagon": 23133, + "Äģ": 23134, + "chlor": 23135, + "crust": 23136, + "netany": 23137, + "chemist": 23138, + "disappeared": 23139, + "ricardo": 23140, + "spiders": 23141, + "bose": 23142, + "warren": 23143, + "messing": 23144, + "banners": 23145, + "guel": 23146, + "parach": 23147, + "maid": 23148, + "counted": 23149, + "epile": 23150, + "bonfire": 23151, + "speechless": 23152, + "setter": 23153, + "measured": 23154, + "rejects": 23155, + "nikki": 23156, + "lester": 23157, + "forensic": 23158, + "fabrics": 23159, + "aloha": 23160, + "preserved": 23161, + "watford": 23162, + "detailing": 23163, + "darth": 23164, + "bou": 23165, + "carly": 23166, + "...'": 23167, + "tailgate": 23168, + "notifications": 23169, + "å¤": 23170, + "passive": 23171, + "trousers": 23172, + "baloch": 23173, + "rother": 23174, + "typically": 23175, + "Ã¥": 23176, + "spit": 23177, + "wiz": 23178, + "sicily": 23179, + "technically": 23180, + "expose": 23181, + "stage": 23182, + "hubb": 23183, + "cream": 23184, + "caps": 23185, + "poke": 23186, + "sleek": 23187, + "june": 23188, + "temporarily": 23189, + "dez": 23190, + "awakens": 23191, + "lame": 23192, + "_-": 23193, + "jiha": 23194, + "tuesdays": 23195, + "advised": 23196, + "advisors": 23197, + "existed": 23198, + "disagree": 23199, + "newsroom": 23200, + "losers": 23201, + "worldtour": 23202, + "drying": 23203, + "aldi": 23204, + "harness": 23205, + "footprint": 23206, + "hobbit": 23207, + "pmln": 23208, + "iro": 23209, + "quered": 23210, + "assess": 23211, + "gaze": 23212, + "sab": 23213, + "thian": 23214, + "íĬ": 23215, + "tif": 23216, + "observe": 23217, + "evil": 23218, + "drawer": 23219, + "sweep": 23220, + "cory": 23221, + "cody": 23222, + "kyoto": 23223, + "callum": 23224, + "ninj": 23225, + "laurent": 23226, + "bei": 23227, + "sketching": 23228, + "customized": 23229, + "dur": 23230, + "regrets": 23231, + "knoxville": 23232, + "ìķĦ": 23233, + "messaging": 23234, + "gracie": 23235, + "abundance": 23236, + "bidding": 23237, + "brewed": 23238, + "flouri": 23239, + "therapeutic": 23240, + "altitude": 23241, + "hogs": 23242, + "burner": 23243, + "electro": 23244, + "wonderfully": 23245, + "heater": 23246, + "postpon": 23247, + "livery": 23248, + "rall": 23249, + "adas": 23250, + "aac": 23251, + "saul": 23252, + "brooklyn": 23253, + "playhouse": 23254, + "âĻ¥âĻ¥âĻ¥": 23255, + "charitable": 23256, + "iny": 23257, + "zah": 23258, + "competitions": 23259, + "beav": 23260, + "plugged": 23261, + "ois": 23262, + "doom": 23263, + "astronom": 23264, + "specialized": 23265, + "maxi": 23266, + "taps": 23267, + "cellular": 23268, + "depressed": 23269, + "folklorethursday": 23270, + "crib": 23271, + "emul": 23272, + "ë°©": 23273, + "figh": 23274, + "ruz": 23275, + "carlisle": 23276, + "spear": 23277, + "sidewalk": 23278, + "dei": 23279, + "dependent": 23280, + "laces": 23281, + "nhs": 23282, + "ðŁĮĻ": 23283, + "realizing": 23284, + "network": 23285, + "riche": 23286, + "regin": 23287, + "refresh": 23288, + "stral": 23289, + "pathology": 23290, + "plaid": 23291, + "psychedelic": 23292, + "hind": 23293, + "uka": 23294, + "algorithm": 23295, + "linking": 23296, + "progressi": 23297, + "fey": 23298, + "dade": 23299, + "hydrated": 23300, + "bant": 23301, + "famed": 23302, + "cotsw": 23303, + "boise": 23304, + "asc": 23305, + "racing": 23306, + "javier": 23307, + "wwen": 23308, + "marlins": 23309, + "poop": 23310, + "swept": 23311, + "tonights": 23312, + "wef": 23313, + "anime": 23314, + "slovak": 23315, + "âŀĸâŀĸ": 23316, + "claus": 23317, + "lemme": 23318, + "clippers": 23319, + "rels": 23320, + "arianagrande": 23321, + "rte": 23322, + "kot": 23323, + "thalapathy": 23324, + "hungarian": 23325, + "zuma": 23326, + "yvon": 23327, + "isu": 23328, + "journeys": 23329, + "clinics": 23330, + "bebe": 23331, + "wwf": 23332, + "nws": 23333, + "superheroes": 23334, + "erit": 23335, + "sleague": 23336, + "identification": 23337, + "motto": 23338, + "bai": 23339, + "sourced": 23340, + "iller": 23341, + "api": 23342, + "prise": 23343, + "unprecedented": 23344, + "damas": 23345, + "tunisia": 23346, + "drain": 23347, + "underestim": 23348, + "ether": 23349, + "quarterly": 23350, + "rewarding": 23351, + "alham": 23352, + "wolverine": 23353, + "cabine": 23354, + "hypno": 23355, + "nadine": 23356, + "havana": 23357, + "dae": 23358, + "ðŁĵĪ": 23359, + "dron": 23360, + "readings": 23361, + "bati": 23362, + "pico": 23363, + "merci": 23364, + "itian": 23365, + "walkers": 23366, + "elope": 23367, + "mikey": 23368, + "godzilla": 23369, + "burlington": 23370, + "abuja": 23371, + "socialism": 23372, + "atility": 23373, + "shell": 23374, + "harrypotter": 23375, + "gno": 23376, + "abur": 23377, + "releg": 23378, + "felici": 23379, + "rogen": 23380, + "neuroscience": 23381, + "instin": 23382, + "atham": 23383, + "vouchers": 23384, + "jarre": 23385, + "fuse": 23386, + "defici": 23387, + "monterey": 23388, + "deport": 23389, + "midday": 23390, + "ppard": 23391, + "freed": 23392, + "ameter": 23393, + "wilt": 23394, + "ningham": 23395, + "pratt": 23396, + "liberty": 23397, + "slogan": 23398, + "oto": 23399, + "pri": 23400, + "coated": 23401, + "cpd": 23402, + "nett": 23403, + "illas": 23404, + "malawi": 23405, + "evolve": 23406, + "accessibility": 23407, + "ðŁĶ¥ðŁĶ¥ðŁĶ¥ðŁĶ¥": 23408, + "ornament": 23409, + "bp": 23410, + "elis": 23411, + "sonline": 23412, + "chiro": 23413, + "flick": 23414, + "ibm": 23415, + "arak": 23416, + "enables": 23417, + "garland": 23418, + "sane": 23419, + "cuties": 23420, + "trip": 23421, + "rotterdam": 23422, + "nys": 23423, + "lamps": 23424, + "lucas": 23425, + "bog": 23426, + "rails": 23427, + "travelled": 23428, + "hicks": 23429, + "enu": 23430, + "sabha": 23431, + "scrub": 23432, + "hier": 23433, + "hartford": 23434, + "foo": 23435, + "fernandez": 23436, + "trevor": 23437, + "mattress": 23438, + "appointments": 23439, + "alej": 23440, + "fei": 23441, + "ologist": 23442, + "safar": 23443, + "octa": 23444, + "src": 23445, + "shaun": 23446, + "ambient": 23447, + "dric": 23448, + "biker": 23449, + "shee": 23450, + "mustache": 23451, + "hta": 23452, + "boone": 23453, + "herty": 23454, + "cardio": 23455, + "brakes": 23456, + "recital": 23457, + "consists": 23458, + "overwhelmed": 23459, + "caul": 23460, + "robbins": 23461, + "imit": 23462, + "alth": 23463, + "url": 23464, + "bibli": 23465, + "onne": 23466, + "blacklivesmatter": 23467, + "difficulties": 23468, + "telang": 23469, + "taller": 23470, + "ðŁĵĨ": 23471, + "debating": 23472, + "burrito": 23473, + "movember": 23474, + "strengthening": 23475, + "boe": 23476, + "testam": 23477, + "miracles": 23478, + "baseball": 23479, + "renee": 23480, + "ðŁijīðŁı»": 23481, + "alfa": 23482, + "âĺĺ": 23483, + "unstoppable": 23484, + "ecs": 23485, + "gmo": 23486, + "giftideas": 23487, + "pathway": 23488, + "fencing": 23489, + "ðŁİ¤": 23490, + "bham": 23491, + "ras": 23492, + "sko": 23493, + "dled": 23494, + "thelast": 23495, + "magnum": 23496, + "binary": 23497, + "wilde": 23498, + "wilder": 23499, + "whati": 23500, + "barbecue": 23501, + "hism": 23502, + "canoe": 23503, + "kurdi": 23504, + "elive": 23505, + "advantages": 23506, + "madame": 23507, + "bier": 23508, + "missing": 23509, + "entertain": 23510, + "airforce": 23511, + "yama": 23512, + "cis": 23513, + "hashtags": 23514, + "jis": 23515, + "veil": 23516, + "dreamy": 23517, + "tense": 23518, + "mayward": 23519, + "chateau": 23520, + "huntington": 23521, + "âļĵ": 23522, + "vall": 23523, + "upon": 23524, + "blouse": 23525, + "dunes": 23526, + "ðŁĺ´": 23527, + "fertility": 23528, + "mole": 23529, + "currencies": 23530, + "stu": 23531, + "berlin": 23532, + "toasted": 23533, + "divas": 23534, + "walt": 23535, + "lark": 23536, + "pora": 23537, + "hitter": 23538, + "umer": 23539, + "chilled": 23540, + "balancing": 23541, + "fais": 23542, + "yin": 23543, + "ortiz": 23544, + "eastenders": 23545, + "hate": 23546, + "ural": 23547, + "april": 23548, + "timel": 23549, + "à±": 23550, + "pero": 23551, + "stocked": 23552, + "respects": 23553, + "tht": 23554, + "bestfriends": 23555, + "givingtuesday": 23556, + "bead": 23557, + "invent": 23558, + "imi": 23559, + "naples": 23560, + "combining": 23561, + "tokens": 23562, + "thirst": 23563, + "masc": 23564, + "parrot": 23565, + "spu": 23566, + "denton": 23567, + "*-*": 23568, + "tres": 23569, + "suburban": 23570, + "width": 23571, + "sive": 23572, + "contender": 23573, + "sirius": 23574, + "lok": 23575, + "troopers": 23576, + "outrage": 23577, + "turbo": 23578, + "fragile": 23579, + "messed": 23580, + "doh": 23581, + "discord": 23582, + "netanyahu": 23583, + "resign": 23584, + "forgiveness": 23585, + "mohan": 23586, + "munch": 23587, + "camou": 23588, + "identifying": 23589, + "enabling": 23590, + "hotter": 23591, + "thornton": 23592, + "jaipur": 23593, + "arya": 23594, + "ðŁı»âĢįâĻĢï¸ı": 23595, + "mustaf": 23596, + "majors": 23597, + "oke": 23598, + "duffy": 23599, + "rohing": 23600, + "tilt": 23601, + "ðŁĩ®ðŁĩ³": 23602, + "rockstar": 23603, + "sheep": 23604, + "hendrix": 23605, + "rav": 23606, + "invention": 23607, + "dou": 23608, + "laguna": 23609, + "grumpy": 23610, + "swis": 23611, + "impe": 23612, + ")'": 23613, + "youths": 23614, + "bunker": 23615, + "stache": 23616, + "oppose": 23617, + "indies": 23618, + "accelerate": 23619, + "mlp": 23620, + "eden": 23621, + "wann": 23622, + "kail": 23623, + "akshaykumar": 23624, + "supt": 23625, + "polym": 23626, + "middleton": 23627, + "extraordin": 23628, + "wilson": 23629, + "australian": 23630, + "aluminium": 23631, + "wayne": 23632, + "alumnus": 23633, + "matics": 23634, + "grim": 23635, + "ernie": 23636, + "oppa": 23637, + "competitors": 23638, + "randall": 23639, + "hence": 23640, + "declares": 23641, + "preaching": 23642, + "shahe": 23643, + "cane": 23644, + "sustainable": 23645, + "staples": 23646, + "ledge": 23647, + "adena": 23648, + "doctoral": 23649, + "burgundy": 23650, + "decorate": 23651, + "rendered": 23652, + "risen": 23653, + "prank": 23654, + "dior": 23655, + "beethoven": 23656, + "floor": 23657, + "accom": 23658, + "tot": 23659, + "hodg": 23660, + "tourism": 23661, + "sayin": 23662, + "objective": 23663, + "markers": 23664, + "premiership": 23665, + "enabled": 23666, + "camoufla": 23667, + "giant": 23668, + "Ñģ": 23669, + "smokey": 23670, + "ricket": 23671, + "pang": 23672, + "depending": 23673, + "sation": 23674, + "evolving": 23675, + "intercep": 23676, + "census": 23677, + "tofthe": 23678, + "reen": 23679, + "mendoza": 23680, + "trumpet": 23681, + "marketers": 23682, + "anit": 23683, + "ðŁĻĬ": 23684, + "northwestern": 23685, + "vla": 23686, + "fotogra": 23687, + "blackandwhite": 23688, + "chewan": 23689, + "wig": 23690, + "troom": 23691, + "gingerbread": 23692, + "kn": 23693, + "romero": 23694, + "nfc": 23695, + "orchi": 23696, + "funko": 23697, + "source": 23698, + "fs": 23699, + "raped": 23700, + "ost": 23701, + "tarot": 23702, + "annually": 23703, + "ðŁĺ¬": 23704, + "rill": 23705, + "delav": 23706, + "..!!": 23707, + "ses": 23708, + "cann": 23709, + "medicare": 23710, + "phel": 23711, + "apex": 23712, + "guardian": 23713, + "remained": 23714, + "rpm": 23715, + "añ": 23716, + "storymonth": 23717, + "instagood": 23718, + "neighbour": 23719, + "ping": 23720, + "semite": 23721, + "mystic": 23722, + "ascot": 23723, + "mater": 23724, + "handful": 23725, + "dangers": 23726, + "tid": 23727, + "anaheim": 23728, + "opoly": 23729, + "shallow": 23730, + "namibia": 23731, + "toria": 23732, + "procurement": 23733, + "bigbang": 23734, + "announcements": 23735, + "prosecutor": 23736, + "bengals": 23737, + "salle": 23738, + "enroll": 23739, + "gastro": 23740, + "suggestion": 23741, + "bak": 23742, + "haul": 23743, + "buddhism": 23744, + "berniesanders": 23745, + "flute": 23746, + "fatigue": 23747, + "cynthia": 23748, + "choi": 23749, + "irwin": 23750, + "gua": 23751, + "strous": 23752, + "hp": 23753, + "bap": 23754, + "satisfying": 23755, + "playa": 23756, + "ðŁİ¼": 23757, + "instap": 23758, + "alice": 23759, + "tp": 23760, + "irrigation": 23761, + "ðŁĩ¬ðŁĩ§": 23762, + "intric": 23763, + "clues": 23764, + "plex": 23765, + "sax": 23766, + "hepat": 23767, + "dumped": 23768, + "significance": 23769, + "byu": 23770, + "medication": 23771, + "prov": 23772, + "toughest": 23773, + "cornish": 23774, + "âŀľ": 23775, + "kelley": 23776, + "uv": 23777, + "sizz": 23778, + "sibling": 23779, + "mest": 23780, + "distor": 23781, + "diplomatic": 23782, + "auntie": 23783, + "bhat": 23784, + "sonic": 23785, + "brenda": 23786, + "pumpkins": 23787, + "roch": 23788, + "blackburn": 23789, + "urged": 23790, + "shia": 23791, + "arrangements": 23792, + "flood": 23793, + "saunders": 23794, + "lecturer": 23795, + "nouri": 23796, + "populations": 23797, + "diplomacy": 23798, + "consistently": 23799, + "ð٤Ļ": 23800, + "tmund": 23801, + "cauliflower": 23802, + "lily": 23803, + "vocabulary": 23804, + "varieties": 23805, + "cooker": 23806, + "uptown": 23807, + "quent": 23808, + "mosa": 23809, + "reinde": 23810, + "velocity": 23811, + "spruce": 23812, + "socialmedi": 23813, + "iber": 23814, + "voluntary": 23815, + "processed": 23816, + "baltic": 23817, + "yang": 23818, + "lebanese": 23819, + "dp": 23820, + "dolly": 23821, + "arrangement": 23822, + "yuri": 23823, + "cranberry": 23824, + "kalyan": 23825, + "elevation": 23826, + "cliff": 23827, + "pushes": 23828, + "ìĬ¤": 23829, + "silic": 23830, + "cowx": 23831, + "eternity": 23832, + "slaves": 23833, + "vinegar": 23834, + "gloucester": 23835, + "contained": 23836, + "breakingnews": 23837, + "against": 23838, + "renovated": 23839, + "normandy": 23840, + "heroin": 23841, + "ysm": 23842, + "mods": 23843, + "greek": 23844, + "undi": 23845, + "trench": 23846, + "vh": 23847, + "encourages": 23848, + "headache": 23849, + "grange": 23850, + ":'": 23851, + "evergreen": 23852, + "ÙĬ": 23853, + "reckon": 23854, + "abused": 23855, + "thru": 23856, + "choice": 23857, + "tidy": 23858, + "colder": 23859, + "schoice": 23860, + "hain": 23861, + "brum": 23862, + "liars": 23863, + "breit": 23864, + "yorker": 23865, + "shack": 23866, + "heidi": 23867, + "michaels": 23868, + "scopic": 23869, + "fascist": 23870, + "playful": 23871, + "cac": 23872, + "yasss": 23873, + "shad": 23874, + "..?": 23875, + "quen": 23876, + "ramirez": 23877, + "clifton": 23878, + "prs": 23879, + "bestfan": 23880, + "âģł": 23881, + "generating": 23882, + "headset": 23883, + "disappointment": 23884, + "abstract": 23885, + "boiled": 23886, + "parenthood": 23887, + "azerbaijan": 23888, + "exhibiting": 23889, + "bombay": 23890, + "olivier": 23891, + "koso": 23892, + "unlea": 23893, + "maternity": 23894, + "izer": 23895, + "sives": 23896, + "rhu": 23897, + "coll": 23898, + "saskatchewan": 23899, + "freakin": 23900, + "dek": 23901, + "nag": 23902, + "stabili": 23903, + "ðŁįķ": 23904, + "organizer": 23905, + "bosses": 23906, + "aru": 23907, + "uva": 23908, + "atable": 23909, + "taun": 23910, + "afterwards": 23911, + "fertili": 23912, + "verge": 23913, + "azi": 23914, + "morph": 23915, + "à¹ģà¸": 23916, + "jerk": 23917, + "cosmetic": 23918, + "kow": 23919, + "strust": 23920, + "apache": 23921, + "postcards": 23922, + "formul": 23923, + "ìĭ": 23924, + "spinal": 23925, + "jackpot": 23926, + "electri": 23927, + "ÃŃ": 23928, + "loy": 23929, + "grader": 23930, + "diablo": 23931, + "ardi": 23932, + "hesit": 23933, + "fw": 23934, + "archery": 23935, + "pash": 23936, + "theories": 23937, + "repeal": 23938, + "relive": 23939, + "percy": 23940, + "âĺĨ": 23941, + "imin": 23942, + "synchron": 23943, + "shampoo": 23944, + "coupons": 23945, + "oto": 23946, + "lai": 23947, + "thought": 23948, + "luxembourg": 23949, + "mov": 23950, + "ðŁĺ¥": 23951, + "gemma": 23952, + "seated": 23953, + "mga": 23954, + "stratford": 23955, + "uncertainty": 23956, + "shifts": 23957, + "esto": 23958, + "fool": 23959, + "firearms": 23960, + "corrie": 23961, + "kiki": 23962, + "apparent": 23963, + "pills": 23964, + "olympia": 23965, + "fid": 23966, + "elevated": 23967, + "decks": 23968, + "ignoring": 23969, + "avalan": 23970, + "rov": 23971, + "whistle": 23972, + "ptsd": 23973, + "militants": 23974, + "robotic": 23975, + "pacers": 23976, + "quilt": 23977, + "bankruptcy": 23978, + "lich": 23979, + "percussion": 23980, + "celebrity": 23981, + "als": 23982, + "(;": 23983, + "sut": 23984, + "pokemongo": 23985, + "hg": 23986, + "offs": 23987, + "gibraltar": 23988, + "screams": 23989, + "billie": 23990, + "genome": 23991, + "marin": 23992, + "beams": 23993, + "archbishop": 23994, + "emin": 23995, + "bedrooms": 23996, + "gated": 23997, + "olly": 23998, + "warranty": 23999, + "atown": 24000, + "cuddles": 24001, + "gunna": 24002, + "kic": 24003, + "vive": 24004, + "cymru": 24005, + "narrow": 24006, + "prob": 24007, + "leo": 24008, + "references": 24009, + "manufactured": 24010, + "chopper": 24011, + "brunswick": 24012, + "semis": 24013, + "donia": 24014, + "rye": 24015, + "mano": 24016, + "hurting": 24017, + "?#": 24018, + "holli": 24019, + "investigations": 24020, + "cels": 24021, + "ðŁĵŀ": 24022, + "lester": 24023, + "temples": 24024, + "storey": 24025, + "mcmahon": 24026, + "toilets": 24027, + "woof": 24028, + "ï¸İ": 24029, + "leverage": 24030, + "atom": 24031, + "nightmares": 24032, + "victorious": 24033, + "haunting": 24034, + "customer": 24035, + "agi": 24036, + "yoongi": 24037, + "monty": 24038, + "veronica": 24039, + "wur": 24040, + "intimid": 24041, + "blankets": 24042, + "volution": 24043, + "jm": 24044, + "âĺİ": 24045, + "amon": 24046, + "judith": 24047, + "ðŁĺİðŁĺİ": 24048, + "distracted": 24049, + "drip": 24050, + "hurricane": 24051, + "andes": 24052, + "revelation": 24053, + "troop": 24054, + "ableg": 24055, + "collin": 24056, + "tibetan": 24057, + "worrying": 24058, + "internationally": 24059, + "eater": 24060, + "cameroon": 24061, + "brador": 24062, + "yuk": 24063, + "ðŁĴĹðŁĴĹ": 24064, + "trak": 24065, + "slopes": 24066, + "cier": 24067, + "nea": 24068, + "oler": 24069, + "taka": 24070, + "albion": 24071, + "volcanic": 24072, + "amn": 24073, + "afi": 24074, + "obstac": 24075, + "facetime": 24076, + "gering": 24077, + "npr": 24078, + "metallica": 24079, + "organic": 24080, + "ðŁĴ¡": 24081, + "kidd": 24082, + "dances": 24083, + "pembro": 24084, + "washer": 24085, + "mits": 24086, + "omer": 24087, + "emotionally": 24088, + "tango": 24089, + "ipo": 24090, + "docks": 24091, + "scanning": 24092, + "specs": 24093, + "thom": 24094, + "theology": 24095, + "emergen": 24096, + "omi": 24097, + "gpa": 24098, + "selections": 24099, + "unnecessary": 24100, + "image": 24101, + "ters": 24102, + "induced": 24103, + "gigan": 24104, + "rentals": 24105, + "supplied": 24106, + "mfa": 24107, + "shankar": 24108, + "later": 24109, + "pajam": 24110, + "clave": 24111, + "Ùģ": 24112, + "mahin": 24113, + "carlson": 24114, + "avian": 24115, + "anova": 24116, + "katie": 24117, + "ajith": 24118, + "designated": 24119, + "chocolates": 24120, + "investigators": 24121, + "glazed": 24122, + "princess": 24123, + "erry": 24124, + "ragn": 24125, + "ourable": 24126, + "hru": 24127, + "sundance": 24128, + "peugeot": 24129, + "steampunk": 24130, + "ghlin": 24131, + "grease": 24132, + "hires": 24133, + "zap": 24134, + "perce": 24135, + "jill": 24136, + "tome": 24137, + "hehehe": 24138, + "joyful": 24139, + "maestro": 24140, + "nished": 24141, + "genealo": 24142, + "vich": 24143, + "pits": 24144, + "foxes": 24145, + "goodman": 24146, + "emerson": 24147, + "lobes": 24148, + "converse": 24149, + "oats": 24150, + "thomson": 24151, + "rahim": 24152, + "malware": 24153, + "ahi": 24154, + "mankind": 24155, + "resin": 24156, + "img": 24157, + "swood": 24158, + "kinder": 24159, + "scroll": 24160, + "ara": 24161, + "sakura": 24162, + "robbed": 24163, + "xion": 24164, + "nya": 24165, + "cism": 24166, + "cedar": 24167, + "bein": 24168, + "mourning": 24169, + "torto": 24170, + "heathrow": 24171, + "donegal": 24172, + "barb": 24173, + "hydration": 24174, + "kor": 24175, + "elimination": 24176, + "supdates": 24177, + "hills": 24178, + "appeti": 24179, + "starred": 24180, + "kom": 24181, + "gwen": 24182, + "ddd": 24183, + "cray": 24184, + "scanner": 24185, + "personalised": 24186, + "serenity": 24187, + "redesign": 24188, + "metaph": 24189, + "boxed": 24190, + "judgment": 24191, + "nose": 24192, + "ë¹": 24193, + "erad": 24194, + "acne": 24195, + "suppliers": 24196, + "energetic": 24197, + "vom": 24198, + "asap": 24199, + "ðŁĶ¸": 24200, + "irvine": 24201, + "hatch": 24202, + "lass": 24203, + "adren": 24204, + "waffles": 24205, + "accurately": 24206, + "icio": 24207, + "ittle": 24208, + "seun": 24209, + "occupy": 24210, + "webcam": 24211, + "thenew": 24212, + "entes": 24213, + "gai": 24214, + "jw": 24215, + "accountable": 24216, + "visor": 24217, + "irrit": 24218, + "licensing": 24219, + "huddersfield": 24220, + "genie": 24221, + "ðŁİ¾": 24222, + "atmospheric": 24223, + "tensions": 24224, + "spartan": 24225, + "clifford": 24226, + "olan": 24227, + "northbound": 24228, + "ameen": 24229, + "censor": 24230, + "uel": 24231, + "stery": 24232, + "$$": 24233, + "farrell": 24234, + "hyster": 24235, + "clt": 24236, + "sedan": 24237, + "replied": 24238, + "describing": 24239, + "microwave": 24240, + "slab": 24241, + "prosp": 24242, + "assisting": 24243, + "rubio": 24244, + "ethan": 24245, + "hhhhh": 24246, + "guay": 24247, + "zman": 24248, + "raise": 24249, + "rolling": 24250, + "oe": 24251, + "nile": 24252, + "ambrose": 24253, + "scarborough": 24254, + "heroic": 24255, + "cooks": 24256, + "mort": 24257, + "chopra": 24258, + "ðŁĮ·": 24259, + "tob": 24260, + "shaving": 24261, + "stacey": 24262, + "dorm": 24263, + "motorsports": 24264, + "wiki": 24265, + "folds": 24266, + "spiced": 24267, + "stressful": 24268, + "literal": 24269, + "fudge": 24270, + "peggy": 24271, + "waite": 24272, + "tresses": 24273, + "sesh": 24274, + "pric": 24275, + "ðŁİħ": 24276, + "fright": 24277, + "rva": 24278, + "mumbai": 24279, + "pom": 24280, + "ttv": 24281, + "cellar": 24282, + "tome": 24283, + "android": 24284, + "doris": 24285, + "tsunami": 24286, + "tinder": 24287, + "oec": 24288, + "mwc": 24289, + "dortmund": 24290, + "nothin": 24291, + "liti": 24292, + "sou": 24293, + "believein": 24294, + "atu": 24295, + "knocks": 24296, + "magni": 24297, + "sssss": 24298, + "rohit": 24299, + "inews": 24300, + "angi": 24301, + "mandy": 24302, + "kettle": 24303, + "intermediate": 24304, + "avant": 24305, + "curl": 24306, + "endorsed": 24307, + "orio": 24308, + "urt": 24309, + "consideration": 24310, + "wires": 24311, + "shelters": 24312, + "bino": 24313, + "vikram": 24314, + "implemented": 24315, + "lydia": 24316, + "buk": 24317, + "parody": 24318, + "cnews": 24319, + "undergraduate": 24320, + "canucks": 24321, + "sami": 24322, + "politically": 24323, + "rotten": 24324, + "ghz": 24325, + "textiles": 24326, + "overload": 24327, + "moderni": 24328, + "recreational": 24329, + "flir": 24330, + "baton": 24331, + "typography": 24332, + "ovation": 24333, + "intriguing": 24334, + "pilgrimage": 24335, + "alge": 24336, + "adays": 24337, + "tcmparty": 24338, + "spelled": 24339, + "curls": 24340, + "booze": 24341, + "stem": 24342, + "annes": 24343, + "irls": 24344, + "sponge": 24345, + "shopper": 24346, + "signation": 24347, + "brass": 24348, + "mistress": 24349, + "leah": 24350, + "beginner": 24351, + "lauderdale": 24352, + "august": 24353, + "preschool": 24354, + "taping": 24355, + "taipei": 24356, + "executives": 24357, + "bd": 24358, + "rhetor": 24359, + "escor": 24360, + "immuno": 24361, + "deeplearning": 24362, + "statues": 24363, + "itus": 24364, + "manuscript": 24365, + "lyric": 24366, + "corvette": 24367, + "molly": 24368, + "lage": 24369, + "dep": 24370, + "cnbc": 24371, + "lest": 24372, + "jessi": 24373, + "fife": 24374, + "griffith": 24375, + "opposing": 24376, + "rang": 24377, + "drills": 24378, + "respectful": 24379, + "pity": 24380, + "dell": 24381, + "harding": 24382, + "playboy": 24383, + "bloke": 24384, + "shutout": 24385, + "kili": 24386, + "osp": 24387, + "seattle": 24388, + "bcpoli": 24389, + "mises": 24390, + "journals": 24391, + "teaming": 24392, + "esther": 24393, + "freddy": 24394, + "Ķï¸ı": 24395, + "metrics": 24396, + "notre": 24397, + "garry": 24398, + "forty": 24399, + "navigate": 24400, + "periods": 24401, + "benedic": 24402, + "jid": 24403, + "daw": 24404, + "ancestors": 24405, + "restoring": 24406, + "cong": 24407, + "allergy": 24408, + "titanium": 24409, + "cence": 24410, + "leaning": 24411, + "abbas": 24412, + "vast": 24413, + "ucf": 24414, + "roofing": 24415, + "eman": 24416, + "severely": 24417, + "vogue": 24418, + "veau": 24419, + "inbound": 24420, + "dz": 24421, + "taneously": 24422, + "stretching": 24423, + "manchester": 24424, + "dryer": 24425, + "davis": 24426, + "kanth": 24427, + "thegame": 24428, + "itted": 24429, + "retain": 24430, + "elles": 24431, + "congestion": 24432, + "fraternity": 24433, + "ollie": 24434, + "loki": 24435, + "freely": 24436, + "choo": 24437, + "pony": 24438, + "scep": 24439, + "tably": 24440, + "balt": 24441, + "rockn": 24442, + "dime": 24443, + "logging": 24444, + "ðŁį·": 24445, + "adu": 24446, + "havoc": 24447, + "waterford": 24448, + "charis": 24449, + "sweetie": 24450, + "running": 24451, + "nerd": 24452, + "erdogan": 24453, + "zara": 24454, + "weighing": 24455, + "fifty": 24456, + "precise": 24457, + "lowell": 24458, + "kurdistan": 24459, + "ryo": 24460, + "orth": 24461, + "synth": 24462, + "liners": 24463, + "phenomenon": 24464, + "artillery": 24465, + "illegally": 24466, + "construct": 24467, + "nostalgic": 24468, + "garth": 24469, + "alta": 24470, + "shelton": 24471, + "asean": 24472, + "wander": 24473, + "durban": 24474, + "diversi": 24475, + "bono": 24476, + "clon": 24477, + "leman": 24478, + "shun": 24479, + "obstacles": 24480, + "appetite": 24481, + "feeder": 24482, + "respiratory": 24483, + "dixie": 24484, + "formula": 24485, + "anto": 24486, + "sober": 24487, + "extinct": 24488, + "auc": 24489, + "ingles": 24490, + "legitimate": 24491, + ";;": 24492, + "minnie": 24493, + "ipswich": 24494, + "dramatically": 24495, + "ðŁijıðŁı¼": 24496, + "ingham": 24497, + "military": 24498, + "monet": 24499, + "usnavy": 24500, + "fork": 24501, + "dunno": 24502, + "player": 24503, + "qotd": 24504, + "stoo": 24505, + "exor": 24506, + "ethiopian": 24507, + "filmfest": 24508, + "pered": 24509, + "cate": 24510, + "saudi": 24511, + "inner": 24512, + "sincere": 24513, + "tionality": 24514, + "alee": 24515, + "deeds": 24516, + "cooperative": 24517, + "ironic": 24518, + "crocod": 24519, + "brary": 24520, + "postseason": 24521, + "camper": 24522, + "canary": 24523, + "ein": 24524, + "extensions": 24525, + "nbd": 24526, + "sherwood": 24527, + "spokane": 24528, + "hump": 24529, + "jitsu": 24530, + "ê¹": 24531, + "daryl": 24532, + "psi": 24533, + "stabbed": 24534, + "offerings": 24535, + "expects": 24536, + "caval": 24537, + "bodybuilding": 24538, + "framing": 24539, + "fca": 24540, + "yearly": 24541, + "bombed": 24542, + "skil": 24543, + "researching": 24544, + "judiciary": 24545, + "greeted": 24546, + "tudor": 24547, + "milo": 24548, + "innovate": 24549, + "ðŁĺĽ": 24550, + "rhs": 24551, + "ruby": 24552, + "contributor": 24553, + "famer": 24554, + "socially": 24555, + "mlin": 24556, + "fiery": 24557, + "utter": 24558, + "beaut": 24559, + "itos": 24560, + "devoted": 24561, + "rainbow": 24562, + "barney": 24563, + "peren": 24564, + "arjun": 24565, + "rna": 24566, + "gabby": 24567, + "uti": 24568, + "hannity": 24569, + "pickle": 24570, + "serv": 24571, + "quakes": 24572, + "ppe": 24573, + "fem": 24574, + "whitec": 24575, + "jn": 24576, + "victories": 24577, + "ðŁ§¡": 24578, + "golfer": 24579, + "congratulates": 24580, + "resulting": 24581, + "mechanic": 24582, + "urve": 24583, + "centered": 24584, + "kiev": 24585, + "ans": 24586, + "incub": 24587, + "<<": 24588, + "cmo": 24589, + "bestfanarmy": 24590, + "daph": 24591, + "enham": 24592, + "oncology": 24593, + "kush": 24594, + "txt": 24595, + "oriented": 24596, + "fashionable": 24597, + "csr": 24598, + "sahara": 24599, + "rack": 24600, + "pdp": 24601, + "hanson": 24602, + "à¸ĩ": 24603, + "tiers": 24604, + "rar": 24605, + "panam": 24606, + "insky": 24607, + "sahi": 24608, + "testament": 24609, + "asthma": 24610, + "inher": 24611, + "fisheries": 24612, + "order": 24613, + "howe": 24614, + "gallon": 24615, + "epis": 24616, + "suzanne": 24617, + "drowning": 24618, + "panelists": 24619, + "ðŁĺ²": 24620, + "ë¦": 24621, + "alach": 24622, + "commemorative": 24623, + "attribu": 24624, + "ðŁij»": 24625, + "moo": 24626, + "visional": 24627, + "weeksary": 24628, + "gust": 24629, + "akin": 24630, + "pointe": 24631, + "eee": 24632, + "dispar": 24633, + "nipp": 24634, + "dental": 24635, + "stall": 24636, + "pian": 24637, + "bore": 24638, + "ulster": 24639, + "tick": 24640, + "irr": 24641, + "taehyung": 24642, + "microphone": 24643, + "bermuda": 24644, + "gaard": 24645, + "eler": 24646, + "plumbing": 24647, + "hugely": 24648, + "âļ«ï¸ı": 24649, + "raceway": 24650, + "cambridge": 24651, + "marcel": 24652, + "burnley": 24653, + "toast": 24654, + "hollywood": 24655, + "fasting": 24656, + "mered": 24657, + "hibition": 24658, + "capped": 24659, + "beneficial": 24660, + "owning": 24661, + "contamin": 24662, + "arabian": 24663, + "toon": 24664, + "capac": 24665, + "hulu": 24666, + "smir": 24667, + "nutrients": 24668, + "sein": 24669, + "graphs": 24670, + "conditional": 24671, + "ðŁijħ": 24672, + "orac": 24673, + "playin": 24674, + "northe": 24675, + "tornad": 24676, + "marian": 24677, + "jumbo": 24678, + "lexi": 24679, + "incredibleindia": 24680, + "roadto": 24681, + "ukone": 24682, + "confusing": 24683, + "sph": 24684, + "shank": 24685, + "pied": 24686, + "mqm": 24687, + "positively": 24688, + "sherry": 24689, + "pathways": 24690, + "considers": 24691, + "tofu": 24692, + "arguments": 24693, + "resilient": 24694, + "chett": 24695, + "withdra": 24696, + "tero": 24697, + "atedly": 24698, + "swana": 24699, + "heb": 24700, + "flight": 24701, + "harley": 24702, + "decrease": 24703, + "kindle": 24704, + "bookshop": 24705, + "³ï¸ı": 24706, + "martyrs": 24707, + "smur": 24708, + "mccl": 24709, + "concerto": 24710, + "stime": 24711, + "rejoice": 24712, + "applau": 24713, + "clement": 24714, + "merkel": 24715, + "jaime": 24716, + "immortal": 24717, + "isleof": 24718, + "marco": 24719, + "youtuber": 24720, + "stalking": 24721, + "metoo": 24722, + "stack": 24723, + "spouse": 24724, + "ust": 24725, + "luv": 24726, + "âļ¾ï¸ı": 24727, + "equestrian": 24728, + "eving": 24729, + "flin": 24730, + "nickname": 24731, + "thebig": 24732, + "asar": 24733, + "stacks": 24734, + "walker": 24735, + "bora": 24736, + "kidnapped": 24737, + "hurling": 24738, + "humbold": 24739, + "recalls": 24740, + "copper": 24741, + "annis": 24742, + "seo": 24743, + "merger": 24744, + "muir": 24745, + "addy": 24746, + "ðŁĴªðŁĴª": 24747, + "bex": 24748, + "cracy": 24749, + "conan": 24750, + "congratulation": 24751, + "midst": 24752, + "âϬ": 24753, + "forbi": 24754, + "optic": 24755, + "crate": 24756, + "crocodile": 24757, + "madagas": 24758, + "securing": 24759, + "aston": 24760, + "ogue": 24761, + "savior": 24762, + "salisbury": 24763, + "loveit": 24764, + "fujifilm": 24765, + "castles": 24766, + "asst": 24767, + "arrows": 24768, + "spacious": 24769, + "trs": 24770, + "polyvore": 24771, + "progression": 24772, + "mri": 24773, + "nelson": 24774, + "bim": 24775, + "indicator": 24776, + "oda": 24777, + "pepe": 24778, + "resignation": 24779, + "gut": 24780, + "sneaker": 24781, + "logically": 24782, + "azy": 24783, + "arella": 24784, + "tearing": 24785, + "joshi": 24786, + "ssionism": 24787, + "qpr": 24788, + "mariah": 24789, + "px": 24790, + "bleed": 24791, + "mian": 24792, + "medley": 24793, + "weiss": 24794, + "kerry": 24795, + "gatory": 24796, + "atal": 24797, + "madison": 24798, + "avenger": 24799, + "naby": 24800, + "pland": 24801, + "giles": 24802, + "freshwater": 24803, + "dington": 24804, + "taj": 24805, + "demonstrates": 24806, + "ntv": 24807, + "bulbs": 24808, + "sundaymorning": 24809, + "peake": 24810, + "souvenir": 24811, + "wah": 24812, + "tonnes": 24813, + "mkt": 24814, + "complexity": 24815, + "conden": 24816, + "rossi": 24817, + "bing": 24818, + "yds": 24819, + "suk": 24820, + "ngo": 24821, + "midland": 24822, + "oly": 24823, + "lifeis": 24824, + "ripple": 24825, + "moreno": 24826, + "dders": 24827, + "tus": 24828, + "áĥ": 24829, + "boul": 24830, + "xa": 24831, + "holdings": 24832, + "wny": 24833, + "shadowhunters": 24834, + "kei": 24835, + "aspire": 24836, + "mous": 24837, + "owen": 24838, + "soak": 24839, + "skirts": 24840, + "mountaine": 24841, + "storming": 24842, + "chrome": 24843, + "riots": 24844, + "sarato": 24845, + "amaze": 24846, + "lessness": 24847, + "navar": 24848, + "criteria": 24849, + "rafa": 24850, + "indulge": 24851, + "ayer": 24852, + "porto": 24853, + "namo": 24854, + "................": 24855, + "yields": 24856, + "valle": 24857, + "jh": 24858, + "macron": 24859, + "sains": 24860, + "durant": 24861, + "trailers": 24862, + "wot": 24863, + "confederate": 24864, + "shrin": 24865, + "idol": 24866, + "formally": 24867, + "tene": 24868, + "motorcycles": 24869, + "thang": 24870, + "node": 24871, + "banger": 24872, + "daly": 24873, + "pats": 24874, + "enrollment": 24875, + "auctions": 24876, + "atal": 24877, + "arbor": 24878, + "logos": 24879, + "dearest": 24880, + "transaction": 24881, + "domingo": 24882, + "flea": 24883, + "sermon": 24884, + "deck": 24885, + "sincere": 24886, + "questioning": 24887, + "julio": 24888, + "wasp": 24889, + "pretz": 24890, + "armenian": 24891, + "kham": 24892, + "inflammation": 24893, + "picturesque": 24894, + "accidental": 24895, + "filmmakers": 24896, + "ðŁĺļ": 24897, + "ðŁĴį": 24898, + "casey": 24899, + "sob": 24900, + "yeezy": 24901, + "goodwill": 24902, + "paragra": 24903, + "ssly": 24904, + "feather": 24905, + "dyed": 24906, + "assassination": 24907, + "nade": 24908, + "bcs": 24909, + "applies": 24910, + "feminine": 24911, + "feu": 24912, + "extent": 24913, + "deputies": 24914, + "lack": 24915, + "psychic": 24916, + "goi": 24917, + "killings": 24918, + "pseu": 24919, + "ðŁ¤ª": 24920, + "unc": 24921, + "marl": 24922, + "tane": 24923, + "mckenna": 24924, + "surfer": 24925, + "influences": 24926, + "freeway": 24927, + "hackney": 24928, + "malaria": 24929, + "eland": 24930, + "teau": 24931, + "remastered": 24932, + "ر": 24933, + "razor": 24934, + "ggy": 24935, + "corro": 24936, + "laksh": 24937, + "flair": 24938, + "honesty": 24939, + "hooray": 24940, + "depp": 24941, + "amc": 24942, + "wednesdays": 24943, + "qa": 24944, + "edits": 24945, + "-$": 24946, + "sevilla": 24947, + "doubled": 24948, + "humanities": 24949, + "ccot": 24950, + "somos": 24951, + "rine": 24952, + "afa": 24953, + "sioux": 24954, + "reconstruction": 24955, + "welding": 24956, + "threads": 24957, + "amish": 24958, + "encouragement": 24959, + "poder": 24960, + "bock": 24961, + "balm": 24962, + "ptions": 24963, + "standup": 24964, + "accomplishments": 24965, + "guarding": 24966, + "conviction": 24967, + "acion": 24968, + "napoleon": 24969, + "depicting": 24970, + "attack": 24971, + "sui": 24972, + "wearable": 24973, + "âĸªï¸ı": 24974, + "potter": 24975, + "escort": 24976, + "vise": 24977, + "tots": 24978, + "boon": 24979, + "eventprofs": 24980, + "angular": 24981, + "womenshistorymonth": 24982, + "barrow": 24983, + "schi": 24984, + "accomp": 24985, + "tik": 24986, + "lend": 24987, + "kensington": 24988, + "wolfe": 24989, + "stacked": 24990, + "crashing": 24991, + "exhibit": 24992, + "winged": 24993, + "sabrina": 24994, + "masa": 24995, + "kms": 24996, + "always": 24997, + "ett": 24998, + "plasma": 24999, + "counseling": 25000, + "pickles": 25001, + "nfldraft": 25002, + "mrs": 25003, + "inevitable": 25004, + "courageous": 25005, + "stafford": 25006, + "writerslife": 25007, + "hos": 25008, + "ej": 25009, + "ghyun": 25010, + "trademark": 25011, + "adrian": 25012, + "influencer": 25013, + "coronation": 25014, + "raging": 25015, + "explored": 25016, + "usaf": 25017, + "exception": 25018, + "eux": 25019, + "tanker": 25020, + "swami": 25021, + "packet": 25022, + "ðŁij¨âĢį": 25023, + "fen": 25024, + "sheen": 25025, + "aero": 25026, + "jl": 25027, + "regal": 25028, + "nwt": 25029, + "auster": 25030, + "mehta": 25031, + "charge": 25032, + "aste": 25033, + "bate": 25034, + "infeld": 25035, + "racecourse": 25036, + "collapsed": 25037, + "fleece": 25038, + "zil": 25039, + "allie": 25040, + "alternatives": 25041, + "georges": 25042, + "ðŁĵį": 25043, + "quirky": 25044, + "fcb": 25045, + "natgeo": 25046, + "philanthropy": 25047, + "brai": 25048, + "everyday": 25049, + "ðŁIJ°": 25050, + "achers": 25051, + "jaan": 25052, + "fines": 25053, + "qi": 25054, + "fisherman": 25055, + "distinct": 25056, + "grimes": 25057, + "nationalist": 25058, + "commence": 25059, + "rown": 25060, + "â̳": 25061, + "zing": 25062, + "fter": 25063, + "hrw": 25064, + "baroque": 25065, + "blender": 25066, + "kitty": 25067, + "hooks": 25068, + "cited": 25069, + "wanda": 25070, + "consensus": 25071, + "reindeer": 25072, + "anand": 25073, + "supply": 25074, + "meds": 25075, + "vn": 25076, + "olph": 25077, + "ratchet": 25078, + "sheldon": 25079, + "securities": 25080, + "ë°©íĥ": 25081, + "crom": 25082, + "mosquito": 25083, + "jeric": 25084, + "immac": 25085, + "dimensions": 25086, + "â¤": 25087, + "dissi": 25088, + "spongebob": 25089, + "damien": 25090, + "stevenson": 25091, + "joanne": 25092, + "delish": 25093, + "yikes": 25094, + "thanx": 25095, + "surveys": 25096, + "postponed": 25097, + "alcoholic": 25098, + "alised": 25099, + "ðŁĻıðŁı»": 25100, + "doch": 25101, + "sentim": 25102, + "meredith": 25103, + "compares": 25104, + "bago": 25105, + "happydays": 25106, + "moss": 25107, + "ãħĭ": 25108, + "nec": 25109, + "gnment": 25110, + "frustrated": 25111, + "combin": 25112, + "riv": 25113, + "eclec": 25114, + "collo": 25115, + "compliment": 25116, + "actorslife": 25117, + "ctto": 25118, + "nicar": 25119, + "ophon": 25120, + "aparthe": 25121, + "mant": 25122, + "jade": 25123, + "trolley": 25124, + "optimization": 25125, + "eyeon": 25126, + "ecological": 25127, + "quist": 25128, + "ephe": 25129, + "à¥ĩ": 25130, + "cinco": 25131, + "appoints": 25132, + "oldschool": 25133, + "cpr": 25134, + "behavioral": 25135, + "minaj": 25136, + ":-(": 25137, + "tagging": 25138, + "eval": 25139, + "joaqu": 25140, + "ðŁĺ«": 25141, + "hak": 25142, + "deme": 25143, + "jamaican": 25144, + "sos": 25145, + "hyatt": 25146, + "handbook": 25147, + "librarian": 25148, + "hannibal": 25149, + "pumping": 25150, + "chom": 25151, + "fman": 25152, + "gai": 25153, + "hull": 25154, + "responders": 25155, + "greenville": 25156, + "nus": 25157, + "vaugh": 25158, + "ðŁİīðŁİī": 25159, + "taxi": 25160, + "goldberg": 25161, + "mantra": 25162, + "tease": 25163, + "forbidden": 25164, + "methodist": 25165, + "ativity": 25166, + "****": 25167, + "ect": 25168, + "mcgr": 25169, + "Ħëĭ": 25170, + "seb": 25171, + "amidst": 25172, + "disappear": 25173, + "thyro": 25174, + "philips": 25175, + "erina": 25176, + "vicious": 25177, + "streamer": 25178, + "millionaire": 25179, + "map": 25180, + "strick": 25181, + "hackathon": 25182, + "gha": 25183, + "edic": 25184, + "mika": 25185, + "peck": 25186, + "illi": 25187, + "antoine": 25188, + "arca": 25189, + "optic": 25190, + "maure": 25191, + "ðŁĩ¦ðŁĩº": 25192, + "clashes": 25193, + "manly": 25194, + "âĺģ": 25195, + "alvar": 25196, + "andres": 25197, + "mei": 25198, + "elm": 25199, + "wwww": 25200, + "altered": 25201, + "lte": 25202, + "ê¹Ģ": 25203, + "mojo": 25204, + "forrest": 25205, + "thalai": 25206, + "nont": 25207, + "speeches": 25208, + "acknowledge": 25209, + "ignite": 25210, + "xfactor": 25211, + "ðŁ¥Ĥ": 25212, + "meadow": 25213, + "disrupt": 25214, + "debuted": 25215, + "scrimmage": 25216, + "pharmaceutical": 25217, + "fidd": 25218, + "foundations": 25219, + "philosopher": 25220, + "etal": 25221, + "publishers": 25222, + "boys": 25223, + "cke": 25224, + "rugged": 25225, + "optimism": 25226, + "rebe": 25227, + "philharmon": 25228, + "narcis": 25229, + "rallies": 25230, + "luis": 25231, + "goblue": 25232, + "folded": 25233, + "unacceptable": 25234, + "optimal": 25235, + "lisa": 25236, + "polaro": 25237, + "+.": 25238, + "enza": 25239, + "âĿ£ï¸ı": 25240, + "monopoly": 25241, + "graceful": 25242, + "dairy": 25243, + "dua": 25244, + "difficulty": 25245, + "judgement": 25246, + "osi": 25247, + "mersey": 25248, + "flux": 25249, + "newfound": 25250, + "terns": 25251, + "dimensional": 25252, + "invic": 25253, + "alba": 25254, + "amit": 25255, + "abudhabi": 25256, + "algeria": 25257, + "automobile": 25258, + "thead": 25259, + "lotion": 25260, + "accelerator": 25261, + "vacant": 25262, + "ition": 25263, + "luf": 25264, + "alic": 25265, + "pll": 25266, + "blazing": 25267, + "baz": 25268, + "sene": 25269, + "ðŁij¼": 25270, + "villains": 25271, + "directory": 25272, + "eisen": 25273, + "tock": 25274, + "brochure": 25275, + "ripp": 25276, + "hbd": 25277, + "zaynmalik": 25278, + "niche": 25279, + "lolol": 25280, + "certificates": 25281, + "morse": 25282, + "facup": 25283, + "xham": 25284, + "unwanted": 25285, + "imports": 25286, + "carnegie": 25287, + "fansign": 25288, + "mou": 25289, + "ralph": 25290, + "destroyer": 25291, + "swing": 25292, + "trekking": 25293, + "ciliation": 25294, + "pitbull": 25295, + "gaps": 25296, + "howell": 25297, + "definitive": 25298, + "mcle": 25299, + "fps": 25300, + "etz": 25301, + "bolly": 25302, + "lynn": 25303, + "gano": 25304, + "ature": 25305, + "fursuit": 25306, + "coil": 25307, + "nav": 25308, + "butts": 25309, + "trojans": 25310, + "eure": 25311, + "enko": 25312, + "schumer": 25313, + "horrific": 25314, + "installment": 25315, + "brb": 25316, + "suburbs": 25317, + "abel": 25318, + "vir": 25319, + "desh": 25320, + "cunningham": 25321, + "ðŁIJ»": 25322, + "spann": 25323, + "schwe": 25324, + "kemp": 25325, + "tru": 25326, + "stealth": 25327, + "ques": 25328, + "lew": 25329, + "delights": 25330, + "koch": 25331, + "humili": 25332, + "criti": 25333, + "ilt": 25334, + "spells": 25335, + "miley": 25336, + "caric": 25337, + "ðŁį´": 25338, + "lcfc": 25339, + "substitute": 25340, + "oung": 25341, + "?!!": 25342, + "affir": 25343, + "predictable": 25344, + "classof": 25345, + "err": 25346, + "cypress": 25347, + "chandra": 25348, + "ageing": 25349, + "____": 25350, + "therland": 25351, + "doncaster": 25352, + "elin": 25353, + "yoshi": 25354, + "sailors": 25355, + "harris": 25356, + "joanna": 25357, + "nigerians": 25358, + "hers": 25359, + "plague": 25360, + "procra": 25361, + "kno": 25362, + "canton": 25363, + "busines": 25364, + "unh": 25365, + "prakash": 25366, + "cin": 25367, + "bowen": 25368, + "coating": 25369, + "mals": 25370, + "begging": 25371, + "smithson": 25372, + "pontiac": 25373, + "spies": 25374, + "damian": 25375, + "pline": 25376, + "undant": 25377, + "alta": 25378, + "oness": 25379, + "shameless": 25380, + "daq": 25381, + "bbm": 25382, + "wales": 25383, + "stampede": 25384, + "serum": 25385, + "ÙĨ": 25386, + "catalyst": 25387, + "xn": 25388, + "absc": 25389, + "freezer": 25390, + "chun": 25391, + "arios": 25392, + "mccre": 25393, + "forehead": 25394, + "hears": 25395, + "damascus": 25396, + "tacoma": 25397, + "arduino": 25398, + "encounters": 25399, + "stanton": 25400, + "lgb": 25401, + "abas": 25402, + "\"..": 25403, + "kete": 25404, + "dracula": 25405, + "elem": 25406, + "gne": 25407, + "zeppelin": 25408, + "labrador": 25409, + "pulp": 25410, + "optional": 25411, + "orn": 25412, + "russians": 25413, + "sanitation": 25414, + "hilary": 25415, + "etsymntt": 25416, + "penalties": 25417, + "aust": 25418, + "igans": 25419, + "olympian": 25420, + "medicaid": 25421, + "versace": 25422, + "vape": 25423, + "restra": 25424, + "peep": 25425, + "sexiest": 25426, + "stalls": 25427, + "dile": 25428, + "thea": 25429, + "punjabi": 25430, + "puppy": 25431, + "tuesdaymotivation": 25432, + "ðŁĵļ": 25433, + "theflash": 25434, + "rocket": 25435, + "modest": 25436, + "chihuahu": 25437, + "onna": 25438, + "ksa": 25439, + "hurdles": 25440, + "cave": 25441, + "failures": 25442, + "split": 25443, + "boho": 25444, + "gurl": 25445, + "disappoint": 25446, + "howard": 25447, + "nugget": 25448, + "franz": 25449, + "stalert": 25450, + "kazakh": 25451, + "forgetting": 25452, + "schri": 25453, + "agate": 25454, + "amat": 25455, + "everett": 25456, + "duet": 25457, + "veterinary": 25458, + "julian": 25459, + "chills": 25460, + "brave": 25461, + "ghostbusters": 25462, + "lando": 25463, + "greets": 25464, + "profitable": 25465, + "dé": 25466, + "tir": 25467, + "zee": 25468, + "omen": 25469, + "pdx": 25470, + "grayson": 25471, + "hari": 25472, + "fixes": 25473, + "stabbing": 25474, + "swimmer": 25475, + "symbols": 25476, + "compliments": 25477, + "pose": 25478, + "functioning": 25479, + "thnx": 25480, + "gir": 25481, + "corporations": 25482, + "barlow": 25483, + "loe": 25484, + "offseason": 25485, + "distinctive": 25486, + "marvelous": 25487, + "nikon": 25488, + "enrique": 25489, + "kyu": 25490, + "jaws": 25491, + "amoto": 25492, + "lombar": 25493, + "travelblogger": 25494, + "fah": 25495, + "ourism": 25496, + "tristan": 25497, + "soe": 25498, + "cease": 25499, + "ðŁıħ": 25500, + "zac": 25501, + "mckenzie": 25502, + "taxpayers": 25503, + "swimsuit": 25504, + "blo": 25505, + "lesley": 25506, + "kansas": 25507, + "wks": 25508, + "kiel": 25509, + "provoking": 25510, + "myles": 25511, + "string": 25512, + "kangaroo": 25513, + "galactic": 25514, + "fifth": 25515, + "ske": 25516, + "weir": 25517, + "llis": 25518, + "matory": 25519, + "ðŁĩ¿": 25520, + "unci": 25521, + "reproductive": 25522, + "rooting": 25523, + "tides": 25524, + "gadget": 25525, + "..........": 25526, + "alexander": 25527, + "bowler": 25528, + "screw": 25529, + "apolog": 25530, + "erika": 25531, + "walters": 25532, + "shetty": 25533, + "lane": 25534, + "banter": 25535, + "asant": 25536, + "meso": 25537, + "vain": 25538, + "\"\"\"": 25539, + "usi": 25540, + "ferdin": 25541, + "accomplish": 25542, + "mansfield": 25543, + "bombar": 25544, + "collaborating": 25545, + "clap": 25546, + "iture": 25547, + "sda": 25548, + "smoky": 25549, + "nak": 25550, + "imperson": 25551, + "carla": 25552, + "comra": 25553, + "burgl": 25554, + "loco": 25555, + "ties": 25556, + "inhi": 25557, + "tracey": 25558, + "seis": 25559, + "disser": 25560, + "rrrr": 25561, + "dray": 25562, + "protect": 25563, + "corona": 25564, + "hunger": 25565, + "cken": 25566, + "celi": 25567, + "troubled": 25568, + "predators": 25569, + "fictional": 25570, + "shaved": 25571, + "richest": 25572, + "metaboli": 25573, + "fulham": 25574, + "grooming": 25575, + "monochrome": 25576, + "wasting": 25577, + "asco": 25578, + "aste": 25579, + "tista": 25580, + "remedies": 25581, + "ungsoo": 25582, + "southend": 25583, + "permanently": 25584, + "bumble": 25585, + "procrastin": 25586, + "identical": 25587, + "practically": 25588, + "mascul": 25589, + "suke": 25590, + "assured": 25591, + "valerie": 25592, + "deviant": 25593, + "grizzlies": 25594, + "thier": 25595, + "pura": 25596, + "nepal": 25597, + "notts": 25598, + "bilateral": 25599, + "spoil": 25600, + "carmel": 25601, + "cinematic": 25602, + "phl": 25603, + "nifty": 25604, + "mao": 25605, + "hypocri": 25606, + "laser": 25607, + "pantry": 25608, + "mathematical": 25609, + "elisa": 25610, + "coordination": 25611, + "belmont": 25612, + "ait": 25613, + "radiant": 25614, + "boiler": 25615, + "mang": 25616, + "fag": 25617, + "crc": 25618, + "hams": 25619, + "brin": 25620, + "â¬ĩï¸ı": 25621, + "familia": 25622, + "âĿ£": 25623, + "saber": 25624, + "rupert": 25625, + "ggan": 25626, + "ritz": 25627, + "mich": 25628, + "salford": 25629, + "levi": 25630, + "gral": 25631, + "ðŁĴ¤": 25632, + "nino": 25633, + "ced": 25634, + "businessman": 25635, + "ultr": 25636, + "simply": 25637, + "compression": 25638, + "pains": 25639, + "halt": 25640, + "ë°©íĥĦ": 25641, + "landscaping": 25642, + "nf": 25643, + "crooked": 25644, + "erd": 25645, + "ittin": 25646, + "ddleston": 25647, + "surpassed": 25648, + "inoa": 25649, + "dag": 25650, + "blen": 25651, + "extending": 25652, + "ating": 25653, + "algae": 25654, + "baller": 25655, + "umar": 25656, + "snooker": 25657, + "collu": 25658, + "flown": 25659, + "thub": 25660, + "ridiculously": 25661, + "kish": 25662, + "ople": 25663, + "dire": 25664, + "asser": 25665, + "aristo": 25666, + "sciss": 25667, + "hating": 25668, + "trouble": 25669, + "sylvia": 25670, + "succul": 25671, + "plots": 25672, + "sincerely": 25673, + "aler": 25674, + "laureate": 25675, + "brack": 25676, + "attn": 25677, + "rifles": 25678, + "meto": 25679, + "collectible": 25680, + "cuomo": 25681, + "contestant": 25682, + "consistency": 25683, + "antz": 25684, + "ranges": 25685, + "abigail": 25686, + "deb": 25687, + "minister": 25688, + "growers": 25689, + "anoo": 25690, + "hoover": 25691, + "dreamer": 25692, + "nucle": 25693, + "research": 25694, + "miy": 25695, + "shahid": 25696, + "mav": 25697, + "dhoni": 25698, + "cini": 25699, + "doj": 25700, + "hindus": 25701, + "partying": 25702, + "dali": 25703, + "alonso": 25704, + "informal": 25705, + "clarkson": 25706, + "itton": 25707, + "kian": 25708, + "cityo": 25709, + "mori": 25710, + "lasted": 25711, + "aspen": 25712, + "library": 25713, + "suspici": 25714, + "quat": 25715, + "denial": 25716, + "folder": 25717, + "chori": 25718, + "sweeping": 25719, + "enix": 25720, + "ðŁįĤ": 25721, + "ØŃ": 25722, + "nascar": 25723, + "handmadehour": 25724, + "moul": 25725, + "heatwave": 25726, + "emer": 25727, + "examine": 25728, + "ibn": 25729, + "grind": 25730, + "pov": 25731, + "tionist": 25732, + "mbo": 25733, + "sheila": 25734, + "integrate": 25735, + "omes": 25736, + "takeaway": 25737, + "cerv": 25738, + "connie": 25739, + "ticket": 25740, + "celed": 25741, + "bien": 25742, + "visually": 25743, + "madagascar": 25744, + "sorry": 25745, + "gui": 25746, + "parkrun": 25747, + "traits": 25748, + "labe": 25749, + "poisoning": 25750, + "à¥Ģ": 25751, + "viable": 25752, + "bohemian": 25753, + "dentistry": 25754, + "bados": 25755, + "sprouts": 25756, + "masked": 25757, + "teddy": 25758, + "ðŁĺ·": 25759, + "saf": 25760, + "saas": 25761, + "jiang": 25762, + "tight": 25763, + "speaker": 25764, + "withdrawal": 25765, + "bcn": 25766, + "assigned": 25767, + "classrooms": 25768, + "fleming": 25769, + "ðŁĴ«": 25770, + "supergirl": 25771, + "totals": 25772, + "tabletop": 25773, + "ebooks": 25774, + "horizontal": 25775, + "craz": 25776, + "flush": 25777, + "jard": 25778, + "cdc": 25779, + "erson": 25780, + "ãħł": 25781, + "greenwood": 25782, + "nih": 25783, + "cox": 25784, + "ada": 25785, + "litre": 25786, + "going": 25787, + "vicky": 25788, + "curved": 25789, + "louie": 25790, + "grains": 25791, + "hye": 25792, + "longe": 25793, + "remedy": 25794, + "trainee": 25795, + "sanjay": 25796, + "superstars": 25797, + "maser": 25798, + "manu": 25799, + "sage": 25800, + "whl": 25801, + "ðŁĺĤðŁĺŃ": 25802, + "ðŁijįðŁı»": 25803, + "msd": 25804, + "enz": 25805, + "rabhu": 25806, + "joo": 25807, + "ghu": 25808, + "acer": 25809, + "epo": 25810, + "resurrection": 25811, + "justicefor": 25812, + "blended": 25813, + "moda": 25814, + "avalanche": 25815, + "francesco": 25816, + "respective": 25817, + "gs": 25818, + "yeast": 25819, + "welch": 25820, + "devotion": 25821, + "getin": 25822, + "atheism": 25823, + "amic": 25824, + "carolyn": 25825, + "loc": 25826, + "ldnont": 25827, + "avec": 25828, + "usda": 25829, + "legged": 25830, + "bravery": 25831, + "blower": 25832, + "cowboy": 25833, + "heh": 25834, + "stible": 25835, + "buffal": 25836, + "channel": 25837, + "runchat": 25838, + "âĺķï¸ı": 25839, + "ideology": 25840, + "bestseller": 25841, + "yoo": 25842, + "peanu": 25843, + "bonne": 25844, + "felic": 25845, + "edison": 25846, + "fractu": 25847, + "narendra": 25848, + "ppets": 25849, + "seymour": 25850, + "riviera": 25851, + "hector": 25852, + "necessarily": 25853, + "bianca": 25854, + "societies": 25855, + "thebest": 25856, + "wg": 25857, + "sentences": 25858, + "wink": 25859, + "vaccines": 25860, + "palooza": 25861, + "jamming": 25862, + "asf": 25863, + "mpus": 25864, + "agreements": 25865, + "eck": 25866, + "bac": 25867, + "honore": 25868, + "compul": 25869, + "wildcat": 25870, + "imposed": 25871, + "yoga": 25872, + "hudson": 25873, + "canceled": 25874, + "lich": 25875, + "fuzzy": 25876, + "esque": 25877, + "chuk": 25878, + "wvu": 25879, + "sek": 25880, + "flipping": 25881, + "rhon": 25882, + "wished": 25883, + "wha": 25884, + "capability": 25885, + "lenovo": 25886, + "ìĨĮëħĦëĭ": 25887, + "vivo": 25888, + "tvd": 25889, + "nora": 25890, + "silk": 25891, + "pasadena": 25892, + "yosemite": 25893, + "valuation": 25894, + "clocks": 25895, + "uber": 25896, + "mrc": 25897, + "darkest": 25898, + "aubre": 25899, + "sso": 25900, + "belly": 25901, + "wrestlers": 25902, + "killin": 25903, + "louder": 25904, + "buckley": 25905, + "geel": 25906, + "adon": 25907, + "uns": 25908, + "appealing": 25909, + "ðŁij¯": 25910, + "semitism": 25911, + "listens": 25912, + "fitz": 25913, + "ãĥ³ãĥ": 25914, + "nylon": 25915, + "arty": 25916, + "seemingly": 25917, + "hala": 25918, + "suited": 25919, + "ety": 25920, + "sheds": 25921, + "muffins": 25922, + "apric": 25923, + "uments": 25924, + "uta": 25925, + "jammu": 25926, + "chelseafc": 25927, + "starz": 25928, + "yoko": 25929, + "root": 25930, + "cleansing": 25931, + "diar": 25932, + "pioneering": 25933, + "iheartradio": 25934, + "digiti": 25935, + "findyour": 25936, + "cano": 25937, + "ðŁĴİ": 25938, + "zol": 25939, + "spacecraft": 25940, + "sixers": 25941, + "moisturi": 25942, + "bile": 25943, + "tists": 25944, + "horton": 25945, + "ranging": 25946, + "columbi": 25947, + "meteoro": 25948, + "sentiment": 25949, + "epl": 25950, + "footh": 25951, + "textbook": 25952, + "drainage": 25953, + "rly": 25954, + "scue": 25955, + "imrankhan": 25956, + "ðŁĴ¸": 25957, + "margarita": 25958, + "eddy": 25959, + "predicts": 25960, + "gamergate": 25961, + "advise": 25962, + "growthhacking": 25963, + "loveyou": 25964, + "ugand": 25965, + "vf": 25966, + "benghazi": 25967, + "slater": 25968, + "newor": 25969, + "chel": 25970, + "independenceday": 25971, + "pnp": 25972, + "cullen": 25973, + "hoodies": 25974, + "numbered": 25975, + "britt": 25976, + "tsa": 25977, + "kltu": 25978, + "sages": 25979, + "momo": 25980, + "oneplus": 25981, + "coll": 25982, + "guts": 25983, + "wta": 25984, + "mesmeri": 25985, + "enhancing": 25986, + "chiroprac": 25987, + "jis": 25988, + "teenagers": 25989, + "mone": 25990, + "constellation": 25991, + "sweepstakes": 25992, + "eze": 25993, + "slovakia": 25994, + "laye": 25995, + "pearce": 25996, + "waver": 25997, + "pogba": 25998, + "kron": 25999, + "surgeons": 26000, + "marx": 26001, + "tid": 26002, + "gga": 26003, + "descend": 26004, + "pours": 26005, + "uprising": 26006, + "walla": 26007, + "sabbath": 26008, + "bachelore": 26009, + "mackin": 26010, + "kam": 26011, + "peterborough": 26012, + "hora": 26013, + "ðŁĮŁðŁĮŁ": 26014, + "thinkbig": 26015, + "rj": 26016, + "hydrau": 26017, + "spal": 26018, + "universit": 26019, + "ðŁıī": 26020, + "mailonline": 26021, + "leagueof": 26022, + "tenants": 26023, + "wally": 26024, + "lance": 26025, + "heavens": 26026, + "ddr": 26027, + "bolts": 26028, + "amir": 26029, + "iphone": 26030, + "cigar": 26031, + "endu": 26032, + "rei": 26033, + "elabor": 26034, + "ringing": 26035, + "johnson": 26036, + "characteristics": 26037, + "saloon": 26038, + "algorithms": 26039, + "talkin": 26040, + "mtn": 26041, + "dive": 26042, + "regionals": 26043, + "ffice": 26044, + "hati": 26045, + "deviantart": 26046, + "sotto": 26047, + "shiro": 26048, + "lama": 26049, + "kwe": 26050, + "faded": 26051, + "porting": 26052, + "tummy": 26053, + "estates": 26054, + "buenos": 26055, + "ð٦ģ": 26056, + "believer": 26057, + "penetr": 26058, + "darn": 26059, + "spite": 26060, + "canopy": 26061, + "fashioni": 26062, + "tilla": 26063, + "petals": 26064, + "elijah": 26065, + "brawl": 26066, + "martyr": 26067, + "ë°©íĥĦìĨĮëħĦëĭ": 26068, + "midtown": 26069, + "erich": 26070, + "dapper": 26071, + "smtown": 26072, + "megam": 26073, + "www": 26074, + "lele": 26075, + "ons": 26076, + "catfish": 26077, + "firth": 26078, + "fossilfriday": 26079, + "ballpark": 26080, + "thaw": 26081, + "potent": 26082, + "illie": 26083, + "creep": 26084, + "carp": 26085, + "soap": 26086, + "gundam": 26087, + "infec": 26088, + "yyyyy": 26089, + "न": 26090, + "zag": 26091, + "ritt": 26092, + "calculator": 26093, + "boca": 26094, + "oko": 26095, + "toad": 26096, + "threaten": 26097, + "refined": 26098, + "olympic": 26099, + "accomplishment": 26100, + "bacterial": 26101, + "aji": 26102, + "tatum": 26103, + "feliz": 26104, + "sheed": 26105, + "jat": 26106, + "thic": 26107, + "jamal": 26108, + "ðĿĺ": 26109, + "lina": 26110, + "ðŁIJ¯": 26111, + "joking": 26112, + "yotpo": 26113, + "pinch": 26114, + "akron": 26115, + "herb": 26116, + "motivation": 26117, + "lia": 26118, + "hostage": 26119, + "creek": 26120, + "gamble": 26121, + "russell": 26122, + "patti": 26123, + "fotos": 26124, + "cpc": 26125, + "broken": 26126, + "backthe": 26127, + "clays": 26128, + "umm": 26129, + "stockton": 26130, + "maternal": 26131, + "ür": 26132, + "lakel": 26133, + "century": 26134, + "bek": 26135, + "infected": 26136, + "ม": 26137, + "smackdown": 26138, + "manned": 26139, + "tahoe": 26140, + "smes": 26141, + "basa": 26142, + "sula": 26143, + "augusta": 26144, + ".*": 26145, + "rohingya": 26146, + "greed": 26147, + "counselor": 26148, + "silhouette": 26149, + "gravit": 26150, + "clause": 26151, + "'-": 26152, + "bobc": 26153, + "occasions": 26154, + "nowadays": 26155, + "dictat": 26156, + "beard": 26157, + "nally": 26158, + "brightest": 26159, + "kabul": 26160, + "incindia": 26161, + "dhanush": 26162, + "archaeological": 26163, + "cheape": 26164, + "mizzou": 26165, + "dhi": 26166, + "ovski": 26167, + "baxter": 26168, + "assemble": 26169, + "â": 26170, + "gigi": 26171, + "acam": 26172, + "wisely": 26173, + "hazard": 26174, + "northampton": 26175, + "âľĪï¸ı": 26176, + "meth": 26177, + "blasting": 26178, + "reunite": 26179, + "mulus": 26180, + "alizes": 26181, + "tread": 26182, + "mila": 26183, + "edward": 26184, + "kova": 26185, + "pesto": 26186, + "ðŁij¶": 26187, + "vitz": 26188, + "hydraulic": 26189, + "refurbished": 26190, + "motel": 26191, + "isabella": 26192, + "homme": 26193, + "severance": 26194, + "uphol": 26195, + "miserable": 26196, + "fari": 26197, + "latter": 26198, + "efer": 26199, + "crackers": 26200, + "esl": 26201, + "acio": 26202, + "yyj": 26203, + "inan": 26204, + "ecb": 26205, + "zind": 26206, + "panas": 26207, + "trucking": 26208, + "reed": 26209, + "shaker": 26210, + "burgess": 26211, + "empire": 26212, + "agnes": 26213, + "nington": 26214, + "artworks": 26215, + "frs": 26216, + "tile": 26217, + "biome": 26218, + "eun": 26219, + "chong": 26220, + "americana": 26221, + "godfather": 26222, + "goblin": 26223, + "ishi": 26224, + "!).": 26225, + "tempted": 26226, + "genomics": 26227, + "mandate": 26228, + "cky": 26229, + "ðŁĴĻðŁĴĽ": 26230, + "somali": 26231, + "brandy": 26232, + "inven": 26233, + "spokesperson": 26234, + "pcb": 26235, + "yuan": 26236, + "hg": 26237, + "faz": 26238, + "starwars": 26239, + "rowan": 26240, + "bluegrass": 26241, + "dong": 26242, + "dday": 26243, + "trinidad": 26244, + "erton": 26245, + "banning": 26246, + "retention": 26247, + "cured": 26248, + "toberfest": 26249, + "reset": 26250, + "weis": 26251, + "detached": 26252, + "behindthescenes": 26253, + "immunity": 26254, + "pha": 26255, + "bray": 26256, + "ðŁij½": 26257, + "rancho": 26258, + "ramsay": 26259, + "estonia": 26260, + "ndtv": 26261, + "].": 26262, + "cabaret": 26263, + "taro": 26264, + "dv": 26265, + "showcases": 26266, + "plum": 26267, + "ðŁij¸": 26268, + "sonoma": 26269, + "prepa": 26270, + "memorab": 26271, + "estu": 26272, + "driveway": 26273, + "ules": 26274, + "magnus": 26275, + "xr": 26276, + "nnn": 26277, + "muchas": 26278, + "enge": 26279, + "streamed": 26280, + "forestry": 26281, + "audiobook": 26282, + "troy": 26283, + "reckless": 26284, + "kilom": 26285, + "ruler": 26286, + "rak": 26287, + "procession": 26288, + "ions": 26289, + "poole": 26290, + "noctur": 26291, + "whs": 26292, + "farmhouse": 26293, + "pera": 26294, + "parme": 26295, + "hypocrisy": 26296, + "sics": 26297, + "vant": 26298, + "cask": 26299, + "holistic": 26300, + "aust": 26301, + "п": 26302, + "indo": 26303, + "ðŁij©âĢį": 26304, + "diso": 26305, + "dispatch": 26306, + "olsen": 26307, + "makeit": 26308, + "ennis": 26309, + "centre": 26310, + "arrange": 26311, + "ðŁĮ¼": 26312, + "salted": 26313, + "easiest": 26314, + "fate": 26315, + "regatta": 26316, + "mozz": 26317, + "acan": 26318, + "sini": 26319, + "gically": 26320, + "chops": 26321, + "chicken": 26322, + "workin": 26323, + "hagg": 26324, + "involve": 26325, + "weeds": 26326, + "bookday": 26327, + "wakeup": 26328, + "kyr": 26329, + "michelin": 26330, + "fuss": 26331, + "rejuven": 26332, + "vacancies": 26333, + "incarcer": 26334, + "mst": 26335, + "scents": 26336, + "sovereign": 26337, + "kicker": 26338, + "à§": 26339, + "bod": 26340, + "âĢĶ>": 26341, + "sah": 26342, + "mobil": 26343, + "shropshire": 26344, + "ophone": 26345, + "dresser": 26346, + "missuni": 26347, + "hepburn": 26348, + "imo": 26349, + "foliage": 26350, + "diagnostic": 26351, + "assan": 26352, + "cycling": 26353, + "guilt": 26354, + "csa": 26355, + "puertorico": 26356, + "winelover": 26357, + "wakefield": 26358, + "doggy": 26359, + "khe": 26360, + "papp": 26361, + "cog": 26362, + "allot": 26363, + "cuck": 26364, + "poetic": 26365, + "mio": 26366, + "revit": 26367, + "magician": 26368, + "ç¥": 26369, + "antenna": 26370, + "westwood": 26371, + "mberg": 26372, + "luxe": 26373, + "oatmeal": 26374, + "ج": 26375, + "teat": 26376, + "ffee": 26377, + "searches": 26378, + "lly": 26379, + "pluto": 26380, + "elon": 26381, + "lettering": 26382, + "innocence": 26383, + "fai": 26384, + "annon": 26385, + "telangana": 26386, + "mait": 26387, + "neural": 26388, + "canni": 26389, + "aroma": 26390, + "astor": 26391, + "fex": 26392, + "cocac": 26393, + "monetary": 26394, + "fent": 26395, + "unsure": 26396, + "'@": 26397, + "indirec": 26398, + "tehran": 26399, + "isolation": 26400, + "libs": 26401, + "makeup": 26402, + "mercedes": 26403, + "ffy": 26404, + "hetero": 26405, + "deo": 26406, + "scom": 26407, + "cursed": 26408, + "veteransday": 26409, + "frankenstein": 26410, + "shrews": 26411, + "deco": 26412, + "geese": 26413, + "leftover": 26414, + "hadid": 26415, + "variable": 26416, + "academics": 26417, + "carolin": 26418, + "undergoing": 26419, + "variation": 26420, + "nah": 26421, + "ssier": 26422, + "gamersunite": 26423, + "pursuing": 26424, + "emerged": 26425, + "llers": 26426, + "controlling": 26427, + "roaring": 26428, + "meteor": 26429, + "volt": 26430, + "dawgs": 26431, + "beaver": 26432, + "islife": 26433, + "bathrooms": 26434, + "acional": 26435, + "prevent": 26436, + "lakedistrict": 26437, + "inals": 26438, + "yani": 26439, + "grabbing": 26440, + "sacks": 26441, + "lez": 26442, + "sway": 26443, + "kool": 26444, + "times": 26445, + "klopp": 26446, + "lade": 26447, + "concord": 26448, + "resulted": 26449, + "revive": 26450, + "reconciliation": 26451, + "oland": 26452, + "azz": 26453, + "giro": 26454, + "mandarin": 26455, + "deen": 26456, + "nutritional": 26457, + "iscoming": 26458, + "vani": 26459, + "awwww": 26460, + "derived": 26461, + "loveyour": 26462, + "stopthe": 26463, + "shouting": 26464, + "novak": 26465, + "ðŁĻĮðŁı¾": 26466, + "loaf": 26467, + "displaying": 26468, + "sundaywith": 26469, + "maguire": 26470, + "cheri": 26471, + "ðŁıŁ": 26472, + "rematch": 26473, + "quic": 26474, + "Ú©": 26475, + "yin": 26476, + "ðŁĺ¹": 26477, + "ilive": 26478, + "zip": 26479, + "ourke": 26480, + "downloads": 26481, + "swat": 26482, + "mississ": 26483, + "carers": 26484, + "tment": 26485, + "property": 26486, + "hahahahahaha": 26487, + "gibbs": 26488, + "surrey": 26489, + "arise": 26490, + "ticism": 26491, + "stia": 26492, + "irling": 26493, + "frog": 26494, + "cose": 26495, + "bassist": 26496, + "foreig": 26497, + "leau": 26498, + "pillows": 26499, + "holla": 26500, + "elie": 26501, + "disclosure": 26502, + "peanuts": 26503, + "intech": 26504, + "wwc": 26505, + "plunge": 26506, + "triumph": 26507, + "cori": 26508, + "slippers": 26509, + "ðŁĻıðŁĻı": 26510, + "neutrality": 26511, + "mare": 26512, + "hairy": 26513, + "gangster": 26514, + "humming": 26515, + "custard": 26516, + "merlin": 26517, + "alea": 26518, + "sby": 26519, + "damp": 26520, + "mohan": 26521, + "verbal": 26522, + "jst": 26523, + "gutted": 26524, + "bjor": 26525, + "unfinished": 26526, + "ðŁĩ¯ðŁĩµ": 26527, + "unhappy": 26528, + "âļ«ï¸ı": 26529, + "bypass": 26530, + "atsu": 26531, + "fischer": 26532, + "sav": 26533, + "africans": 26534, + "reuse": 26535, + "midway": 26536, + "demolished": 26537, + "gerrard": 26538, + "hercules": 26539, + "ÄŁ": 26540, + "medicines": 26541, + "clicking": 26542, + "surround": 26543, + "joong": 26544, + "waving": 26545, + "tribes": 26546, + "wetlands": 26547, + "officiel": 26548, + "arguing": 26549, + "lle": 26550, + "dova": 26551, + "suzy": 26552, + "clubhouse": 26553, + "negro": 26554, + "obtain": 26555, + "gao": 26556, + "glance": 26557, + "assist": 26558, + "chos": 26559, + "ãĤ¢": 26560, + "âĺķ": 26561, + "adrid": 26562, + "occurs": 26563, + "stans": 26564, + "pardon": 26565, + "liveli": 26566, + "employed": 26567, + "revisit": 26568, + "ffxiv": 26569, + "bble": 26570, + "nearing": 26571, + "miner": 26572, + "ðŁĺ¹": 26573, + "giovanni": 26574, + "upto": 26575, + "marvell": 26576, + "marse": 26577, + "towels": 26578, + "cbn": 26579, + "engineered": 26580, + "yelling": 26581, + "spartan": 26582, + "sians": 26583, + "ðŁĻĮðŁı¼": 26584, + "sev": 26585, + "coyote": 26586, + "stadi": 26587, + "tcm": 26588, + "appen": 26589, + "shenanigans": 26590, + "openaccess": 26591, + "soaked": 26592, + "masqu": 26593, + "levine": 26594, + "strokes": 26595, + "lk": 26596, + "apartheid": 26597, + "hiphop": 26598, + "chardon": 26599, + "maymay": 26600, + "haasan": 26601, + "stripped": 26602, + "fro": 26603, + "scription": 26604, + "fton": 26605, + "hf": 26606, + "prisons": 26607, + "marshal": 26608, + "ķãĤ": 26609, + "ancho": 26610, + "compromise": 26611, + "classification": 26612, + "buzzfeed": 26613, + "bbloggers": 26614, + "deserving": 26615, + ")/": 26616, + "sway": 26617, + "obo": 26618, + "campers": 26619, + "podernfamily": 26620, + "poured": 26621, + "brie": 26622, + "squirrels": 26623, + "seize": 26624, + ":#": 26625, + "lek": 26626, + "timb": 26627, + "stacy": 26628, + "nasdaq": 26629, + "repeatedly": 26630, + "brat": 26631, + "mighty": 26632, + "competitor": 26633, + "mahone": 26634, + "desi": 26635, + "oke": 26636, + "bmw": 26637, + "shie": 26638, + "fcb": 26639, + "cheapest": 26640, + "minimalist": 26641, + "paramount": 26642, + "nate": 26643, + "haras": 26644, + "insanity": 26645, + "lateral": 26646, + "mentality": 26647, + "mozam": 26648, + "tapped": 26649, + "yadav": 26650, + "usp": 26651, + "bway": 26652, + "theod": 26653, + "bilt": 26654, + "raids": 26655, + "empress": 26656, + "adapted": 26657, + "patron": 26658, + "nutshell": 26659, + "agra": 26660, + "beaded": 26661, + "sundaywithmarsha": 26662, + "viking": 26663, + "proceed": 26664, + "maintained": 26665, + "thinkbigsundaywithmarsha": 26666, + "snes": 26667, + "musica": 26668, + "tower": 26669, + "chab": 26670, + "bok": 26671, + "smt": 26672, + "insult": 26673, + "harvesting": 26674, + "window": 26675, + "ruther": 26676, + "beige": 26677, + "decal": 26678, + "indicate": 26679, + "mailing": 26680, + "rift": 26681, + "pole": 26682, + "anderson": 26683, + "choral": 26684, + "spride": 26685, + "lili": 26686, + "evelyn": 26687, + "imrankhanpti": 26688, + "....\"": 26689, + "kered": 26690, + "undp": 26691, + "waterfalls": 26692, + "sears": 26693, + "lemans": 26694, + "worldseries": 26695, + "riel": 26696, + "anie": 26697, + "appar": 26698, + "scorers": 26699, + "lamp": 26700, + "athan": 26701, + "physicians": 26702, + "quinoa": 26703, + "refusing": 26704, + "vuitton": 26705, + "unleash": 26706, + "sla": 26707, + "pati": 26708, + "shouts": 26709, + "intentions": 26710, + "foamed": 26711, + "european": 26712, + "neighborhoods": 26713, + "meer": 26714, + "manson": 26715, + "duh": 26716, + "brat": 26717, + "cones": 26718, + "bowl": 26719, + "kazakhstan": 26720, + "ि": 26721, + "inappropriate": 26722, + "delhi": 26723, + "ketchup": 26724, + "fulton": 26725, + "sys": 26726, + "consult": 26727, + "garfield": 26728, + "togo": 26729, + "fml": 26730, + "fled": 26731, + "bds": 26732, + "facilitate": 26733, + "reebok": 26734, + "selfie": 26735, + "elevate": 26736, + "activate": 26737, + "bible": 26738, + "cawx": 26739, + "bys": 26740, + "camille": 26741, + "syou": 26742, + "skool": 26743, + "hert": 26744, + "wbc": 26745, + "pledges": 26746, + "recorder": 26747, + "posh": 26748, + "acre": 26749, + "soaking": 26750, + "matil": 26751, + "vsco": 26752, + "shootings": 26753, + "plar": 26754, + "econ": 26755, + "ðŁĻĮðŁı»": 26756, + "rashid": 26757, + "ubi": 26758, + "ðŁ¤¤": 26759, + "swinging": 26760, + "wipe": 26761, + "raptor": 26762, + "msu": 26763, + "musicvideo": 26764, + "durham": 26765, + "attic": 26766, + "aparty": 26767, + "fetus": 26768, + "activation": 26769, + "aaz": 26770, + "motivate": 26771, + "ðŁĴķðŁĴķðŁĴķ": 26772, + "jal": 26773, + "म": 26774, + "agon": 26775, + "scheer": 26776, + "stalker": 26777, + "foster": 26778, + "azzo": 26779, + "telegram": 26780, + "vigor": 26781, + "slaugh": 26782, + "screenshots": 26783, + "entrepreneu": 26784, + "kristin": 26785, + "intention": 26786, + "chilli": 26787, + "fraction": 26788, + "dona": 26789, + "gea": 26790, + "tcu": 26791, + "site": 26792, + "lak": 26793, + "emil": 26794, + "dnt": 26795, + "boro": 26796, + "wilkinson": 26797, + "recu": 26798, + "atoday": 26799, + "tanya": 26800, + "blanco": 26801, + "cdn": 26802, + "brilliantly": 26803, + "gcc": 26804, + "acc": 26805, + "evacuated": 26806, + "therine": 26807, + "denny": 26808, + "caitlin": 26809, + "shepard": 26810, + "pouch": 26811, + "handheld": 26812, + "southeastern": 26813, + "haa": 26814, + "ô": 26815, + "resolutions": 26816, + "ledger": 26817, + "srin": 26818, + "rar": 26819, + "shattered": 26820, + "chimney": 26821, + "imwith": 26822, + "meteor": 26823, + "handled": 26824, + "rake": 26825, + "townsend": 26826, + "enhan": 26827, + "shipy": 26828, + "duct": 26829, + "twx": 26830, + "inflammatory": 26831, + "warhammer": 26832, + "theatrical": 26833, + "gros": 26834, + "skar": 26835, + "scotty": 26836, + "niel": 26837, + "tito": 26838, + "tini": 26839, + "connection": 26840, + "_.": 26841, + "goldenglobes": 26842, + "shaq": 26843, + "ðŁı³ï¸ı": 26844, + "hallway": 26845, + "fronts": 26846, + "effectiveness": 26847, + "glaston": 26848, + "dhs": 26849, + "expi": 26850, + "toh": 26851, + "cpl": 26852, + "scs": 26853, + "reo": 26854, + "hag": 26855, + "resemblance": 26856, + "horan": 26857, + "abusive": 26858, + "quer": 26859, + "virtue": 26860, + "cholester": 26861, + "aq": 26862, + "shane": 26863, + "mce": 26864, + "carriers": 26865, + "distress": 26866, + "rewind": 26867, + "¡": 26868, + "voodoo": 26869, + "intact": 26870, + "anno": 26871, + "ðŁĺ¤": 26872, + "piled": 26873, + "adia": 26874, + "ãĥ³": 26875, + "enow": 26876, + "digs": 26877, + "lightly": 26878, + "goofy": 26879, + "turbine": 26880, + "governors": 26881, + "conte": 26882, + "reopen": 26883, + "pah": 26884, + "ive": 26885, + "crafting": 26886, + "sweeps": 26887, + "jodi": 26888, + "ande": 26889, + "zucker": 26890, + "kawaii": 26891, + "oko": 26892, + "vai": 26893, + "outline": 26894, + "kristi": 26895, + "tsn": 26896, + "inspo": 26897, + "quint": 26898, + "filthy": 26899, + "lynne": 26900, + "listeners": 26901, + "departing": 26902, + "ord": 26903, + "tweed": 26904, + ",&": 26905, + "alek": 26906, + "selfish": 26907, + "norther": 26908, + "recognizes": 26909, + "ips": 26910, + "bes": 26911, + "aed": 26912, + "wills": 26913, + "peat": 26914, + "surroundings": 26915, + "monuments": 26916, + "aisle": 26917, + "becker": 26918, + "lav": 26919, + "quantity": 26920, + "vah": 26921, + "helicopters": 26922, + "tucked": 26923, + "alvarez": 26924, + "shape": 26925, + "obey": 26926, + "additi": 26927, + "roadside": 26928, + "mite": 26929, + "blers": 26930, + "epage": 26931, + "jau": 26932, + "ignorant": 26933, + "bins": 26934, + "lulu": 26935, + "xo": 26936, + "cfo": 26937, + "eeeee": 26938, + "apprenticeship": 26939, + "sheffiel": 26940, + "toi": 26941, + "hok": 26942, + "fakenews": 26943, + "deploy": 26944, + "aidan": 26945, + "huskers": 26946, + "ãĢİ": 26947, + "westbrook": 26948, + "mister": 26949, + "configur": 26950, + "carr": 26951, + "fica": 26952, + "proceedings": 26953, + "haw": 26954, + "steak": 26955, + "murderer": 26956, + "payday": 26957, + "ajo": 26958, + "pvc": 26959, + "donates": 26960, + "biaf": 26961, + "nomnom": 26962, + "beit": 26963, + "kali": 26964, + "xrp": 26965, + "ahmedabad": 26966, + "semic": 26967, + "chey": 26968, + "xtra": 26969, + "antwer": 26970, + "headlining": 26971, + "squares": 26972, + "rounded": 26973, + "fluore": 26974, + "bold": 26975, + "disasters": 26976, + "amoo": 26977, + "generic": 26978, + "cranes": 26979, + "briefly": 26980, + "gig": 26981, + "austerity": 26982, + "anticipation": 26983, + "forti": 26984, + "treasurer": 26985, + "canny": 26986, + "cecil": 26987, + "detected": 26988, + "checklist": 26989, + "ว": 26990, + "pamela": 26991, + "barbados": 26992, + "anfield": 26993, + "hearty": 26994, + "txlege": 26995, + "perenni": 26996, + "arrog": 26997, + "ingram": 26998, + "âĹı": 26999, + "tyne": 27000, + "spoon": 27001, + "ration": 27002, + "amba": 27003, + "mbe": 27004, + "camel": 27005, + "hhs": 27006, + "yorkshire": 27007, + "reflective": 27008, + "freaks": 27009, + "tok": 27010, + "judo": 27011, + "particles": 27012, + "dubs": 27013, + "banjo": 27014, + "accreditation": 27015, + "proverbs": 27016, + "overdose": 27017, + "integral": 27018, + "guang": 27019, + "mcs": 27020, + "supercar": 27021, + "afb": 27022, + "alvin": 27023, + "ails": 27024, + "xtre": 27025, + "staging": 27026, + "twent": 27027, + "rabbits": 27028, + "maro": 27029, + "instem": 27030, + "doll": 27031, + "cray": 27032, + "santana": 27033, + "bleach": 27034, + "minions": 27035, + "cheap": 27036, + "mant": 27037, + "divers": 27038, + "catalonia": 27039, + "lois": 27040, + "matri": 27041, + "cougar": 27042, + "kayak": 27043, + "egre": 27044, + "pso": 27045, + "aia": 27046, + "å®": 27047, + "charlton": 27048, + "tracked": 27049, + "scari": 27050, + "pett": 27051, + "fwd": 27052, + "xin": 27053, + "gravel": 27054, + "bric": 27055, + "biggboss": 27056, + "arden": 27057, + "hugging": 27058, + "palms": 27059, + "stv": 27060, + "limb": 27061, + "themovie": 27062, + "handicap": 27063, + "rime": 27064, + "zai": 27065, + "stub": 27066, + "india": 27067, + "lithuania": 27068, + "rhyth": 27069, + "pita": 27070, + "macedonia": 27071, + "highered": 27072, + "bridget": 27073, + "schwarz": 27074, + "skelet": 27075, + "hikes": 27076, + "antarctic": 27077, + "cps": 27078, + "mashup": 27079, + "а": 27080, + "nell": 27081, + "chandra": 27082, + "heir": 27083, + "anus": 27084, + "sheridan": 27085, + "mimi": 27086, + "museu": 27087, + "becca": 27088, + "anir": 27089, + "barrie": 27090, + "diocese": 27091, + "comparable": 27092, + "ðŁı³ï¸ıâĢį": 27093, + "yukon": 27094, + "mep": 27095, + "hormon": 27096, + "meric": 27097, + "alf": 27098, + "conquered": 27099, + "christchurch": 27100, + "ðŁĴĻðŁĴĻ": 27101, + "hazardous": 27102, + "pooh": 27103, + "conting": 27104, + "retrospective": 27105, + "parame": 27106, + "nair": 27107, + "consor": 27108, + "hotra": 27109, + "astonishing": 27110, + "caterpillar": 27111, + "uman": 27112, + "tism": 27113, + "tvs": 27114, + "servic": 27115, + "croydon": 27116, + "morales": 27117, + "cg": 27118, + "cum": 27119, + "teur": 27120, + "scanada": 27121, + "sall": 27122, + "magnolia": 27123, + "elise": 27124, + "thour": 27125, + "ி": 27126, + "agomez": 27127, + "phelps": 27128, + "ë°©íĥĦìĨĮëħĦëĭ¨": 27129, + "whos": 27130, + "weaving": 27131, + "sisd": 27132, + "proposes": 27133, + "crows": 27134, + "presale": 27135, + "economies": 27136, + "bernardo": 27137, + "shahid": 27138, + "airshow": 27139, + "mccann": 27140, + "horticul": 27141, + "nrl": 27142, + "duel": 27143, + "mongolia": 27144, + "toulou": 27145, + "requirement": 27146, + "structured": 27147, + "edi": 27148, + "olives": 27149, + "hea": 27150, + "cuter": 27151, + "к": 27152, + "enthusiast": 27153, + "harriet": 27154, + "dominion": 27155, + "submer": 27156, + "ðŁįĥ": 27157, + "saab": 27158, + "nesburg": 27159, + "moff": 27160, + "defended": 27161, + "burt": 27162, + "rewarded": 27163, + "goldman": 27164, + "optics": 27165, + "khalid": 27166, + "households": 27167, + "buckets": 27168, + "cecil": 27169, + "chess": 27170, + "substantial": 27171, + "efl": 27172, + "operation": 27173, + "evaluate": 27174, + "stn": 27175, + "recession": 27176, + "lll": 27177, + "tomas": 27178, + "truths": 27179, + "akbar": 27180, + "swords": 27181, + "pact": 27182, + "embarrass": 27183, + "hao": 27184, + "ayurve": 27185, + "scripture": 27186, + "nycc": 27187, + "opt": 27188, + "diameter": 27189, + "scented": 27190, + "organizers": 27191, + "relat": 27192, + "hae": 27193, + "dreamers": 27194, + "dese": 27195, + "ðŁĮ»": 27196, + "restricted": 27197, + "nale": 27198, + "rhp": 27199, + "dolan": 27200, + "munster": 27201, + "haired": 27202, + "consultants": 27203, + "joints": 27204, + "humil": 27205, + "dill": 27206, + "relentless": 27207, + "té": 27208, + "afil": 27209, + "utilities": 27210, + "japanese": 27211, + "condemn": 27212, + "petite": 27213, + "collide": 27214, + "qf": 27215, + "peaches": 27216, + "courier": 27217, + "lore": 27218, + "âĺİï¸ı": 27219, + "reliability": 27220, + "chuk": 27221, + "ðŁĻĥ": 27222, + "stures": 27223, + "gether": 27224, + "hostel": 27225, + "bier": 27226, + "-_-": 27227, + "âĩ": 27228, + "eze": 27229, + "tailo": 27230, + "dient": 27231, + "bluff": 27232, + "chuffed": 27233, + "pilip": 27234, + "monarch": 27235, + "eem": 27236, + "buchan": 27237, + "bick": 27238, + "opau": 27239, + "kups": 27240, + "ย": 27241, + "pistons": 27242, + "spins": 27243, + "mand": 27244, + "cest": 27245, + "burne": 27246, + "vile": 27247, + "cherries": 27248, + "beckett": 27249, + "needles": 27250, + "panch": 27251, + "ëĤ": 27252, + "hahah": 27253, + "troubles": 27254, + "insists": 27255, + "doyou": 27256, + "gmc": 27257, + "mortar": 27258, + "delegate": 27259, + "inn": 27260, + "ganda": 27261, + "sinatra": 27262, + "त": 27263, + "speeding": 27264, + "pupil": 27265, + "premises": 27266, + "alignment": 27267, + "pikach": 27268, + "asus": 27269, + "jalan": 27270, + "ص": 27271, + "limestone": 27272, + "folkl": 27273, + "parmesan": 27274, + "ceil": 27275, + "moy": 27276, + "shawnmendes": 27277, + "acup": 27278, + "hust": 27279, + "otes": 27280, + "medina": 27281, + "madi": 27282, + "gtav": 27283, + "censorship": 27284, + "arg": 27285, + "sweeney": 27286, + "sykes": 27287, + "colo": 27288, + "footsteps": 27289, + "canned": 27290, + "advance": 27291, + "gtaonline": 27292, + "healthyliving": 27293, + "ðŁį¾": 27294, + "aig": 27295, + "pality": 27296, + "ocs": 27297, + "hebrew": 27298, + "imminent": 27299, + "berkshire": 27300, + "jeremiah": 27301, + "outgoing": 27302, + "baker": 27303, + "entrata": 27304, + "maids": 27305, + "groves": 27306, + "boc": 27307, + "adel": 27308, + "mfw": 27309, + "conscience": 27310, + "armys": 27311, + "nutella": 27312, + "contestalert": 27313, + "novelist": 27314, + "lah": 27315, + "banker": 27316, + "marquez": 27317, + "ðŁı¡": 27318, + "toff": 27319, + "outage": 27320, + "grp": 27321, + "ðŁĺŃðŁĺŃðŁĺŃðŁĺŃ": 27322, + "muscle": 27323, + "dudley": 27324, + "nvidia": 27325, + "midi": 27326, + "muni": 27327, + "essays": 27328, + "datac": 27329, + "carter": 27330, + "ร": 27331, + "tans": 27332, + "ives": 27333, + "publications": 27334, + "aler": 27335, + "okwx": 27336, + "ilu": 27337, + "cutt": 27338, + "harp": 27339, + "outlaw": 27340, + "lutheran": 27341, + "brill": 27342, + "bolic": 27343, + "dowell": 27344, + "greenland": 27345, + "besties": 27346, + "pathi": 27347, + "payton": 27348, + "guest": 27349, + "harden": 27350, + "ðŁ¤©": 27351, + "anned": 27352, + "evacuation": 27353, + "poised": 27354, + "mcder": 27355, + "bhan": 27356, + "oi": 27357, + "envelope": 27358, + "cid": 27359, + "cavi": 27360, + "tapas": 27361, + "bookreview": 27362, + "greyhound": 27363, + "âĻª": 27364, + "feud": 27365, + "lungs": 27366, + "forte": 27367, + "raider": 27368, + "ffer": 27369, + "onix": 27370, + "depend": 27371, + "ynwa": 27372, + "relating": 27373, + "devs": 27374, + "ðŁĴIJ": 27375, + "acquires": 27376, + "dha": 27377, + "jyo": 27378, + "privati": 27379, + "canine": 27380, + "kb": 27381, + "crab": 27382, + "sardin": 27383, + "imagining": 27384, + "kj": 27385, + "empor": 27386, + "downhill": 27387, + "nez": 27388, + "taeyeon": 27389, + "nickimin": 27390, + "gbp": 27391, + "àµ": 27392, + "wap": 27393, + "secco": 27394, + "mashed": 27395, + "ðŁĴ¥ðŁĴ¥": 27396, + "augustine": 27397, + "dissol": 27398, + "dictator": 27399, + "âĵ": 27400, + "viper": 27401, + "edfringe": 27402, + "vaux": 27403, + "hardwork": 27404, + "booklet": 27405, + "nox": 27406, + "chiff": 27407, + "ðŁĴ¨": 27408, + "observations": 27409, + "xboxone": 27410, + "usher": 27411, + "keer": 27412, + "lup": 27413, + "dallas": 27414, + "calgary": 27415, + "madra": 27416, + "dious": 27417, + "kbs": 27418, + "woodward": 27419, + "heroine": 27420, + "lumber": 27421, + "seaworld": 27422, + "ows": 27423, + "mcke": 27424, + "maverick": 27425, + "gula": 27426, + "crossroads": 27427, + "fang": 27428, + "sade": 27429, + "nikol": 27430, + "cheetah": 27431, + "mec": 27432, + "ppg": 27433, + "erick": 27434, + "ðŁİµ": 27435, + "toxic": 27436, + "bjj": 27437, + "viola": 27438, + "spire": 27439, + "chino": 27440, + "travis": 27441, + "institutional": 27442, + "haas": 27443, + "lowry": 27444, + "wac": 27445, + "eae": 27446, + "humid": 27447, + "mpton": 27448, + "ruck": 27449, + "jew": 27450, + "cine": 27451, + "zimmer": 27452, + "sef": 27453, + "bharat": 27454, + "frees": 27455, + "aamir": 27456, + "ðŁĴħ": 27457, + "zinc": 27458, + "wane": 27459, + "multiplayer": 27460, + "royalwedding": 27461, + "eel": 27462, + "precipit": 27463, + "query": 27464, + "kimberly": 27465, + "isabel": 27466, + "fulfill": 27467, + "igan": 27468, + "vaul": 27469, + "pane": 27470, + "scy": 27471, + "digit": 27472, + "gunn": 27473, + "utah": 27474, + "dogday": 27475, + "fion": 27476, + "xiaomi": 27477, + "dac": 27478, + "elast": 27479, + "chavez": 27480, + "roblo": 27481, + "gine": 27482, + "tenth": 27483, + "abh": 27484, + "keto": 27485, + "hurdle": 27486, + "nadia": 27487, + "memorabilia": 27488, + "habs": 27489, + "quan": 27490, + "hw": 27491, + "hvac": 27492, + "pixar": 27493, + "eccle": 27494, + "kramer": 27495, + "accuses": 27496, + "ðŁĴļðŁĴļ": 27497, + "perse": 27498, + "meantime": 27499, + "wahl": 27500, + "atletico": 27501, + "âĢ¢âĢ¢âĢ¢âĢ¢": 27502, + "ottoman": 27503, + "novo": 27504, + "kus": 27505, + "connected": 27506, + "trusts": 27507, + "dmv": 27508, + "spencer": 27509, + "rahulg": 27510, + "dove": 27511, + "stokes": 27512, + "bologna": 27513, + "enthusiasts": 27514, + "ê": 27515, + "rockstargames": 27516, + "tedcruz": 27517, + "duras": 27518, + "sacked": 27519, + "latex": 27520, + "immersive": 27521, + "cert": 27522, + "lucin": 27523, + "principals": 27524, + "fares": 27525, + "sails": 27526, + "farn": 27527, + "ament": 27528, + "saffron": 27529, + "quentin": 27530, + "checkpoint": 27531, + "ferris": 27532, + "excur": 27533, + "ðŁijīðŁı¼": 27534, + "bailey": 27535, + "seh": 27536, + "terre": 27537, + "madam": 27538, + "sband": 27539, + "wanderers": 27540, + "cumberbatch": 27541, + "yyc": 27542, + "digitally": 27543, + "blackandwhitephotography": 27544, + "rollin": 27545, + "moroccan": 27546, + "ðŁĮħ": 27547, + "dinner": 27548, + "dwell": 27549, + "toom": 27550, + "mye": 27551, + "ezra": 27552, + "cpfc": 27553, + "warhol": 27554, + "meer": 27555, + "jonah": 27556, + "noaa": 27557, + "sgate": 27558, + "soon": 27559, + "secular": 27560, + "gating": 27561, + "tio": 27562, + "driver": 27563, + "sissy": 27564, + "assange": 27565, + "tath": 27566, + "edmund": 27567, + "bobcats": 27568, + "raji": 27569, + "postage": 27570, + "studs": 27571, + "mgm": 27572, + "kato": 27573, + "edinburgh": 27574, + "meetthe": 27575, + "shirt": 27576, + "faa": 27577, + "mensfashion": 27578, + "spreads": 27579, + "wim": 27580, + "carts": 27581, + "phoebe": 27582, + "jars": 27583, + "botswana": 27584, + "ÙĤ": 27585, + "edwar": 27586, + "skar": 27587, + "rive": 27588, + "gusty": 27589, + "ctv": 27590, + "ferdinand": 27591, + "sutherland": 27592, + "nickiminaj": 27593, + "kv": 27594, + "sius": 27595, + "beech": 27596, + "rez": 27597, + "desires": 27598, + "onial": 27599, + "campo": 27600, + "quarry": 27601, + "lorraine": 27602, + "gilmore": 27603, + "iggy": 27604, + "µï¸ı": 27605, + "hopping": 27606, + "aviz": 27607, + "ðŁĮº": 27608, + "unisex": 27609, + "dedicate": 27610, + "attitudes": 27611, + "steer": 27612, + "junkie": 27613, + "railway": 27614, + "yb": 27615, + "whisper": 27616, + "keyan": 27617, + "kus": 27618, + "jug": 27619, + "dix": 27620, + "ains": 27621, + "summon": 27622, + "ovich": 27623, + "syed": 27624, + "herald": 27625, + "maison": 27626, + "meded": 27627, + "wildflower": 27628, + "mainland": 27629, + "risky": 27630, + "rukh": 27631, + "overlooked": 27632, + "kic": 27633, + "destroys": 27634, + "naman": 27635, + "kip": 27636, + "zano": 27637, + "championsleague": 27638, + "bandit": 27639, + "quincy": 27640, + "smile": 27641, + "calvin": 27642, + "openings": 27643, + "tapp": 27644, + "olulu": 27645, + "spectro": 27646, + "accredited": 27647, + "apk": 27648, + "praised": 27649, + "barnett": 27650, + "pollen": 27651, + "premiered": 27652, + "selenagomez": 27653, + "toured": 27654, + "screenings": 27655, + "uuu": 27656, + "miso": 27657, + "ense": 27658, + "adamlambert": 27659, + "guelph": 27660, + "haryana": 27661, + "hutto": 27662, + "lear": 27663, + "ltc": 27664, + "poached": 27665, + "brexit": 27666, + "æĿ": 27667, + "ttc": 27668, + "pavement": 27669, + "mongers": 27670, + "roe": 27671, + "aders": 27672, + "lington": 27673, + "participant": 27674, + "cared": 27675, + "gail": 27676, + "yates": 27677, + "lantic": 27678, + "dashboard": 27679, + "joo": 27680, + "felipe": 27681, + "ssionist": 27682, + "bum": 27683, + "send": 27684, + "aeri": 27685, + "thugs": 27686, + "lucifer": 27687, + "ahe": 27688, + "detector": 27689, + "filly": 27690, + "gasoline": 27691, + "hamper": 27692, + "humpday": 27693, + "theta": 27694, + "theband": 27695, + "forecasts": 27696, + "ohhh": 27697, + "lobb": 27698, + "holl": 27699, + "cpu": 27700, + "azu": 27701, + "adar": 27702, + "hailey": 27703, + "bub": 27704, + "cart": 27705, + "quoted": 27706, + "anarchy": 27707, + "pancre": 27708, + "twitart": 27709, + "alden": 27710, + "stash": 27711, + "theless": 27712, + "orni": 27713, + "beliebers": 27714, + "mormon": 27715, + "particle": 27716, + "aviation": 27717, + "â¬Ĩ": 27718, + "webcamtoy": 27719, + "saddened": 27720, + "cruis": 27721, + "hamlet": 27722, + "nct": 27723, + "rollins": 27724, + "marquee": 27725, + "sawyer": 27726, + "reliance": 27727, + "aura": 27728, + "diec": 27729, + "soothing": 27730, + "signings": 27731, + "akis": 27732, + "ó": 27733, + "atkins": 27734, + "aerop": 27735, + "ðŁĮ¿": 27736, + "yab": 27737, + "shari": 27738, + "connol": 27739, + "dubbed": 27740, + "manufacture": 27741, + "convincing": 27742, + "feelthebern": 27743, + "rau": 27744, + "pulit": 27745, + "onec": 27746, + "gemstone": 27747, + "urging": 27748, + "bagu": 27749, + "gah": 27750, + "acids": 27751, + "fianc": 27752, + "zodiac": 27753, + "snoop": 27754, + "herrera": 27755, + "initiated": 27756, + "venge": 27757, + "professors": 27758, + "prodi": 27759, + "stronger": 27760, + "emission": 27761, + "bba": 27762, + "halle": 27763, + "tapp": 27764, + "hawan": 27765, + "whim": 27766, + "competed": 27767, + "myrtle": 27768, + "irport": 27769, + "coldplay": 27770, + "ache": 27771, + "skep": 27772, + "mson": 27773, + "ssic": 27774, + "calligraphy": 27775, + "swimmers": 27776, + "mey": 27777, + "ppc": 27778, + "thrift": 27779, + "poc": 27780, + "replaces": 27781, + "commuter": 27782, + "âģ¦âģ¦@": 27783, + "goers": 27784, + "logue": 27785, + "paradig": 27786, + "baskets": 27787, + "sensitivity": 27788, + "johan": 27789, + "atlantis": 27790, + "&&": 27791, + "suitcase": 27792, + "anxious": 27793, + "lh": 27794, + "stri": 27795, + "galloway": 27796, + "stread": 27797, + "warden": 27798, + "grounded": 27799, + "fficiency": 27800, + "lifeat": 27801, + "relic": 27802, + "disguise": 27803, + "islanders": 27804, + "fcofficial": 27805, + "classicalmusic": 27806, + "bmc": 27807, + "enfield": 27808, + "bique": 27809, + "oakley": 27810, + "batman": 27811, + "slaying": 27812, + "nerves": 27813, + "multit": 27814, + "calcium": 27815, + "projector": 27816, + "scottsdale": 27817, + "antino": 27818, + "grips": 27819, + "kimmel": 27820, + "desmond": 27821, + "protestors": 27822, + "hiatus": 27823, + "metabolism": 27824, + "concluded": 27825, + "presser": 27826, + "tipping": 27827, + "slide": 27828, + "eto": 27829, + "hunting": 27830, + "ausopen": 27831, + "rik": 27832, + "ppery": 27833, + "innovators": 27834, + "pitchers": 27835, + "agger": 27836, + "fungi": 27837, + "zad": 27838, + "prolific": 27839, + "rocknroll": 27840, + "blames": 27841, + "ctar": 27842, + "stamford": 27843, + "qad": 27844, + "mozzarella": 27845, + "insanely": 27846, + "denver": 27847, + "phouse": 27848, + "nomad": 27849, + "ï¿": 27850, + "sris": 27851, + "produ": 27852, + "henley": 27853, + "pagan": 27854, + "amtrak": 27855, + "rubi": 27856, + "incl": 27857, + "tutor": 27858, + "scotia": 27859, + "woes": 27860, + "singapo": 27861, + "funnel": 27862, + "turnbull": 27863, + "knowledge": 27864, + "grimm": 27865, + "realmadrid": 27866, + "weare": 27867, + "missiles": 27868, + "consol": 27869, + "emojis": 27870, + "sneak": 27871, + "smiths": 27872, + "ruiz": 27873, + "brou": 27874, + "iel": 27875, + "haver": 27876, + "ðŁĮļ": 27877, + "kingof": 27878, + "basilica": 27879, + "circulation": 27880, + "printers": 27881, + "tapping": 27882, + "ridley": 27883, + "dragged": 27884, + "haj": 27885, + "writer": 27886, + "fundamentals": 27887, + "personalities": 27888, + "metre": 27889, + "stereotypes": 27890, + "burle": 27891, + "bestof": 27892, + "nffc": 27893, + "hath": 27894, + "ministries": 27895, + "aali": 27896, + "tracing": 27897, + "paved": 27898, + "łï¸ı": 27899, + "gic": 27900, + "inspire": 27901, + "tug": 27902, + "hare": 27903, + "repeated": 27904, + "expon": 27905, + "lolli": 27906, + "rhode": 27907, + "precin": 27908, + "installations": 27909, + "instagram": 27910, + "azar": 27911, + "ies": 27912, + "solely": 27913, + "dukes": 27914, + "missionary": 27915, + "vanguard": 27916, + "fursuitfriday": 27917, + "ond": 27918, + "polari": 27919, + "mast": 27920, + "haran": 27921, + "josé": 27922, + "jacked": 27923, + "ecoun": 27924, + "alities": 27925, + "neph": 27926, + "ravel": 27927, + "moderated": 27928, + "scow": 27929, + "sfb": 27930, + "uruguay": 27931, + "aso": 27932, + "nig": 27933, + "audu": 27934, + "pints": 27935, + "latina": 27936, + "benz": 27937, + "mitting": 27938, + "charted": 27939, + "matology": 27940, + "citro": 27941, + "biopic": 27942, + "ðŁijŃ": 27943, + "djokovic": 27944, + "foxy": 27945, + "aguil": 27946, + "soto": 27947, + "anada": 27948, + "sinking": 27949, + "scrap": 27950, + "hairs": 27951, + "bethany": 27952, + "factfriday": 27953, + "ðŁIJIJ": 27954, + "unleashed": 27955, + ")(": 27956, + "contradic": 27957, + "ramon": 27958, + "coastline": 27959, + "yong": 27960, + "snsd": 27961, + "ligan": 27962, + "pome": 27963, + "mitage": 27964, + "gett": 27965, + "wati": 27966, + "risk": 27967, + "soaring": 27968, + "brush": 27969, + "fpl": 27970, + "avan": 27971, + "åĨ": 27972, + "larson": 27973, + "shear": 27974, + "multil": 27975, + "blur": 27976, + "multimedia": 27977, + "chunky": 27978, + "pari": 27979, + "nani": 27980, + "weird": 27981, + "cholesterol": 27982, + "charles": 27983, + "dreamed": 27984, + "tanning": 27985, + "puzzles": 27986, + "fram": 27987, + "handball": 27988, + "chag": 27989, + "belize": 27990, + "alu": 27991, + "bangs": 27992, + "ÑĦ": 27993, + "detectives": 27994, + "mcg": 27995, + "ishq": 27996, + "bothered": 27997, + "safc": 27998, + "mping": 27999, + "teneri": 28000, + "gays": 28001, + "sailor": 28002, + "angi": 28003, + "multicul": 28004, + "guessed": 28005, + "rosé": 28006, + "highways": 28007, + "broom": 28008, + "chattanoo": 28009, + "-'": 28010, + "seeker": 28011, + "oned": 28012, + "atf": 28013, + "luc": 28014, + "><": 28015, + "bari": 28016, + "percep": 28017, + "jewelry": 28018, + "asph": 28019, + "sorrow": 28020, + "sling": 28021, + "mammoth": 28022, + "jackie": 28023, + "ë§": 28024, + "wiltshire": 28025, + "sao": 28026, + "cancell": 28027, + "impaired": 28028, + "torial": 28029, + "breed": 28030, + "guyen": 28031, + "judice": 28032, + "title": 28033, + "prospective": 28034, + "applicants": 28035, + "ðŁįĬ": 28036, + "episcop": 28037, + "eid": 28038, + "byo": 28039, + "stockings": 28040, + "ðŁĴĥðŁĴĥ": 28041, + "llp": 28042, + "snag": 28043, + "keepit": 28044, + "lough": 28045, + "olson": 28046, + "maturity": 28047, + "!!!\"": 28048, + "copter": 28049, + "isha": 28050, + "bli": 28051, + "wilmington": 28052, + "tryouts": 28053, + "thai": 28054, + "ðŁ¥³": 28055, + "pebble": 28056, + "kraft": 28057, + "fp": 28058, + "º": 28059, + "ssively": 28060, + "livin": 28061, + "contestants": 28062, + "textures": 28063, + "joan": 28064, + "hdr": 28065, + "filmfestival": 28066, + "provence": 28067, + "wido": 28068, + "opend": 28069, + "csi": 28070, + "stown": 28071, + "croati": 28072, + "adjust": 28073, + "hostile": 28074, + "analysts": 28075, + "ilan": 28076, + "cuppa": 28077, + "brum": 28078, + "newfoundland": 28079, + "goodwin": 28080, + "mett": 28081, + "mallorca": 28082, + "plugs": 28083, + "buk": 28084, + "bbhutto": 28085, + "wrestle": 28086, + "saire": 28087, + "shopped": 28088, + "forza": 28089, + "lehead": 28090, + "vivo": 28091, + "bast": 28092, + "roxy": 28093, + "regis": 28094, + "hardworking": 28095, + "honolulu": 28096, + "despair": 28097, + "youngsters": 28098, + "nig": 28099, + "impromp": 28100, + "rolltide": 28101, + "deemed": 28102, + "treason": 28103, + "rushed": 28104, + "forged": 28105, + "fff": 28106, + "pikachu": 28107, + "briggs": 28108, + "doit": 28109, + "accent": 28110, + "laus": 28111, + "glaze": 28112, + "competent": 28113, + "aho": 28114, + "photog": 28115, + "midfield": 28116, + "lego": 28117, + "harvard": 28118, + "minorities": 28119, + "reilly": 28120, + "sliced": 28121, + "onceupon": 28122, + "initially": 28123, + "financially": 28124, + "landscapephotography": 28125, + "hardro": 28126, + "quo": 28127, + "mmers": 28128, + "parkinson": 28129, + "smugg": 28130, + "readiness": 28131, + "brutally": 28132, + "gloucester": 28133, + "mped": 28134, + "bbhuttozardari": 28135, + "murder": 28136, + "yed": 28137, + "dataviz": 28138, + "srt": 28139, + "downing": 28140, + "bians": 28141, + "mü": 28142, + "fleck": 28143, + "flipped": 28144, + "sly": 28145, + "brilliance": 28146, + "rim": 28147, + "kum": 28148, + "bubba": 28149, + "koi": 28150, + "knitted": 28151, + "sorg": 28152, + "mais": 28153, + "ðŁĮ²": 28154, + "tiss": 28155, + "sustain": 28156, + "sensu": 28157, + "akhan": 28158, + "ziest": 28159, + "examines": 28160, + "chardonnay": 28161, + "username": 28162, + "shortlist": 28163, + "rebs": 28164, + "ono": 28165, + "daring": 28166, + "hardwood": 28167, + "cheque": 28168, + "righteous": 28169, + "lightening": 28170, + "dirk": 28171, + "shradd": 28172, + "dura": 28173, + "downstairs": 28174, + "shal": 28175, + "amigos": 28176, + "ruff": 28177, + "slaw": 28178, + "ries": 28179, + "rednation": 28180, + "manus": 28181, + "ðŁĩ§ðŁĩ·": 28182, + "distinction": 28183, + "ubun": 28184, + "duran": 28185, + "migra": 28186, + "thians": 28187, + "laver": 28188, + "domestic": 28189, + "kx": 28190, + "jazzy": 28191, + "justify": 28192, + "belonging": 28193, + "insulation": 28194, + "colorstv": 28195, + "drunken": 28196, + "channeling": 28197, + "quand": 28198, + "xiii": 28199, + "enlighten": 28200, + "kano": 28201, + "fatima": 28202, + "teenchoice": 28203, + "terrified": 28204, + "pba": 28205, + "asley": 28206, + "metmuseum": 28207, + "dune": 28208, + "packer": 28209, + "kio": 28210, + "ðŁĴľðŁĴľ": 28211, + "boiler": 28212, + "fascism": 28213, + "armored": 28214, + "backgrounds": 28215, + "inmates": 28216, + "embarrassed": 28217, + "defines": 28218, + "thd": 28219, + "wego": 28220, + "silicone": 28221, + "loon": 28222, + "elding": 28223, + "borrowed": 28224, + "hemp": 28225, + "aksh": 28226, + "kawasaki": 28227, + "bry": 28228, + "deaf": 28229, + "killer": 28230, + "disposal": 28231, + "ðŁĩ°": 28232, + "glastonbury": 28233, + "uncovered": 28234, + "oxide": 28235, + "poff": 28236, + "dant": 28237, + "kj": 28238, + "kuro": 28239, + "drizzle": 28240, + "peoples": 28241, + "fee": 28242, + "propri": 28243, + "ddlovato": 28244, + "piggy": 28245, + "otis": 28246, + "allergies": 28247, + "ubis": 28248, + "penguin": 28249, + "sera": 28250, + "viz": 28251, + "prosperous": 28252, + "icides": 28253, + "tornadoes": 28254, + "senegal": 28255, + "webcast": 28256, + "stored": 28257, + "enchanted": 28258, + "bbcone": 28259, + "bayarea": 28260, + "entrepreneurial": 28261, + "rednationrising": 28262, + "experimenting": 28263, + "angan": 28264, + "lotto": 28265, + "theyre": 28266, + "pore": 28267, + "erp": 28268, + "serene": 28269, + "eastwood": 28270, + "brokers": 28271, + "barge": 28272, + "stallion": 28273, + "timberlake": 28274, + "tailored": 28275, + "dystop": 28276, + "bate": 28277, + "lators": 28278, + "dixit": 28279, + "branson": 28280, + "dynamo": 28281, + "kylie": 28282, + "shameful": 28283, + "btwn": 28284, + "springtime": 28285, + "mixture": 28286, + "sounded": 28287, + "luton": 28288, + "dades": 28289, + "mala": 28290, + "opra": 28291, + "enic": 28292, + "rahulgandhi": 28293, + "sewer": 28294, + "~~~~": 28295, + "kyu": 28296, + "northeastern": 28297, + "caer": 28298, + "bcu": 28299, + "nirvana": 28300, + "kitchens": 28301, + "ousy": 28302, + "alm": 28303, + "riverdale": 28304, + "hidden": 28305, + "flint": 28306, + "spd": 28307, + "patrons": 28308, + "katyperry": 28309, + "augh": 28310, + "exhibitions": 28311, + "smc": 28312, + "shuts": 28313, + "atore": 28314, + "dain": 28315, + "something": 28316, + "berth": 28317, + "bog": 28318, + "porter": 28319, + "gento": 28320, + "concussion": 28321, + "anglic": 28322, + "rowe": 28323, + "grilling": 28324, + "scarlett": 28325, + "mastering": 28326, + "mornin": 28327, + "commented": 28328, + "sime": 28329, + "sizing": 28330, + "christy": 28331, + "ceos": 28332, + "stm": 28333, + "atry": 28334, + "tariffs": 28335, + "vacation": 28336, + "prejudice": 28337, + "psu": 28338, + "parental": 28339, + "farage": 28340, + "cana": 28341, + "capcom": 28342, + "kosovo": 28343, + "youre": 28344, + "menstru": 28345, + "stalin": 28346, + "grapefruit": 28347, + "bran": 28348, + "chesa": 28349, + "daven": 28350, + "excel": 28351, + "!!)": 28352, + "à¹Į": 28353, + "distributor": 28354, + "cea": 28355, + "bridesma": 28356, + "millennial": 28357, + "wain": 28358, + "observing": 28359, + "misery": 28360, + "planetary": 28361, + "exposing": 28362, + "braised": 28363, + "compton": 28364, + "dongha": 28365, + "ql": 28366, + "springsteen": 28367, + "thul": 28368, + "sylve": 28369, + "cabo": 28370, + "palad": 28371, + "nielsen": 28372, + "gazing": 28373, + "baja": 28374, + "roud": 28375, + "orchids": 28376, + "johannesburg": 28377, + "seman": 28378, + "dji": 28379, + "operative": 28380, + "affection": 28381, + "eclectic": 28382, + "atc": 28383, + "mutant": 28384, + "awx": 28385, + "nice": 28386, + "melbourne": 28387, + "indulg": 28388, + "tulip": 28389, + "diaspora": 28390, + "welp": 28391, + "biggie": 28392, + "mississauga": 28393, + "retriever": 28394, + "oran": 28395, + "tammy": 28396, + "cta": 28397, + "hippo": 28398, + "seasoned": 28399, + "germans": 28400, + "engv": 28401, + "marvellous": 28402, + "imf": 28403, + "relays": 28404, + "montan": 28405, + "mauriti": 28406, + "meister": 28407, + "assurance": 28408, + "reigning": 28409, + "sufficient": 28410, + "hane": 28411, + "nothing": 28412, + "posse": 28413, + "navy": 28414, + "inlove": 28415, + "brighton": 28416, + "enqu": 28417, + "chung": 28418, + "sweaty": 28419, + "esc": 28420, + "caled": 28421, + "mans": 28422, + "nicaragua": 28423, + "slices": 28424, + "mocha": 28425, + "washingtonpost": 28426, + "bbn": 28427, + "damned": 28428, + "growing": 28429, + "enburg": 28430, + "loan": 28431, + "mes": 28432, + "whoops": 28433, + "believers": 28434, + "spiel": 28435, + "vodaf": 28436, + "lat": 28437, + "sled": 28438, + "cricketer": 28439, + "browne": 28440, + "golfers": 28441, + "barra": 28442, + "watchers": 28443, + "luigi": 28444, + "swamy": 28445, + "moms": 28446, + "pitched": 28447, + "santor": 28448, + "crs": 28449, + "sire": 28450, + "scamp": 28451, + "bode": 28452, + "stewar": 28453, + "jonny": 28454, + "entity": 28455, + "pacqui": 28456, + "mindful": 28457, + "minindia": 28458, + "bearded": 28459, + "tempt": 28460, + "scorpion": 28461, + "eaton": 28462, + "authorized": 28463, + "arto": 28464, + "svp": 28465, + "opathy": 28466, + "cchini": 28467, + "housemusic": 28468, + "disneyworld": 28469, + "âĢĶ@": 28470, + "propose": 28471, + "diy": 28472, + "expense": 28473, + "teng": 28474, + "puppets": 28475, + "smel": 28476, + "daca": 28477, + "perry": 28478, + "finn": 28479, + "boosting": 28480, + "leftovers": 28481, + "cougs": 28482, + "satellites": 28483, + "many": 28484, + "aze": 28485, + "gong": 28486, + "fie": 28487, + "methodo": 28488, + "ferries": 28489, + "ð٤Ķð٤Ķ": 28490, + "explorers": 28491, + "loader": 28492, + "attracted": 28493, + "ilton": 28494, + "goddamn": 28495, + "piazza": 28496, + "doctr": 28497, + "saving": 28498, + "paragraph": 28499, + "visualization": 28500, + "mayors": 28501, + "workflow": 28502, + "ackles": 28503, + "ðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤ": 28504, + "स": 28505, + "twerk": 28506, + "clut": 28507, + "lover": 28508, + "teases": 28509, + "sian": 28510, + "ote": 28511, + "deterior": 28512, + "accord": 28513, + "lfw": 28514, + "swarovski": 28515, + "natal": 28516, + "traps": 28517, + "kina": 28518, + "analyze": 28519, + "layered": 28520, + "beverages": 28521, + "unit": 28522, + "ransom": 28523, + "peshaw": 28524, + "destined": 28525, + "astrology": 28526, + "sipping": 28527, + "mileycyrus": 28528, + "camino": 28529, + "marshmallow": 28530, + "bliss": 28531, + "outback": 28532, + "faq": 28533, + "intoler": 28534, + "humility": 28535, + "poppin": 28536, + "halloween": 28537, + "montene": 28538, + "ophy": 28539, + "nun": 28540, + "tattooed": 28541, + "aas": 28542, + "ðŁĮ³": 28543, + "daley": 28544, + "quality": 28545, + "dusa": 28546, + "fishermen": 28547, + "swif": 28548, + "terrac": 28549, + "stau": 28550, + "lein": 28551, + "trolling": 28552, + "shipment": 28553, + "gardener": 28554, + "marchmadness": 28555, + "headband": 28556, + "grt": 28557, + "burnett": 28558, + "wand": 28559, + "!!!!!!!!!": 28560, + "ghe": 28561, + "dux": 28562, + "hud": 28563, + "warner": 28564, + "ðŁĩ¦": 28565, + "exile": 28566, + "rescue": 28567, + "rata": 28568, + "dhan": 28569, + "ducati": 28570, + "drown": 28571, + "blends": 28572, + "spie": 28573, + "alligator": 28574, + "simultaneously": 28575, + "brooke": 28576, + "uke": 28577, + "khar": 28578, + "communion": 28579, + "rika": 28580, + "fordfc": 28581, + "chinatown": 28582, + "yourown": 28583, + "mey": 28584, + "canal": 28585, + "systematic": 28586, + "depri": 28587, + "oxford": 28588, + "anil": 28589, + "wut": 28590, + "equation": 28591, + "bez": 28592, + "fleur": 28593, + "thegood": 28594, + "langley": 28595, + "adity": 28596, + "edith": 28597, + "alfie": 28598, + "оÑĤ": 28599, + "encry": 28600, + "brill": 28601, + "exemp": 28602, + "cesar": 28603, + "mbling": 28604, + "abri": 28605, + "scicom": 28606, + "jing": 28607, + "schooling": 28608, + "mika": 28609, + "mechanisms": 28610, + "impromptu": 28611, + "rhea": 28612, + "moore": 28613, + "crimea": 28614, + "besto": 28615, + "wright": 28616, + "elders": 28617, + "rods": 28618, + "kamal": 28619, + "folklore": 28620, + "beet": 28621, + "minion": 28622, + "relieve": 28623, + "thro": 28624, + "teamusa": 28625, + "pascal": 28626, + "madewith": 28627, + "bolivia": 28628, + "itti": 28629, + "freebies": 28630, + "desired": 28631, + "bestselling": 28632, + "liness": 28633, + "laden": 28634, + "keane": 28635, + "mists": 28636, + "hippie": 28637, + "attachment": 28638, + "@/": 28639, + "sew": 28640, + "flanagan": 28641, + "âĿĹï¸ı": 28642, + "supremac": 28643, + "stlcards": 28644, + "sias": 28645, + "qu": 28646, + "rhys": 28647, + "steep": 28648, + "valleys": 28649, + "vw": 28650, + "paving": 28651, + "dispat": 28652, + "alison": 28653, + "porte": 28654, + "idu": 28655, + "newsc": 28656, + "socket": 28657, + "mos": 28658, + "costar": 28659, + "revo": 28660, + "proteins": 28661, + "stanleycup": 28662, + "mcal": 28663, + "earring": 28664, + "secs": 28665, + "mclean": 28666, + "capric": 28667, + "nickelo": 28668, + "aden": 28669, + "vc": 28670, + "shouse": 28671, + "adaptive": 28672, + "maximize": 28673, + "entertainer": 28674, + "prose": 28675, + "griffi": 28676, + "sixteen": 28677, + "lamar": 28678, + "mirage": 28679, + "saudiarabia": 28680, + "aweather": 28681, + "rust": 28682, + "infiltr": 28683, + "fashionweek": 28684, + "ðŁĺĬðŁĺĬðŁĺĬ": 28685, + "selective": 28686, + "bubble": 28687, + "aden": 28688, + "fennel": 28689, + "decisive": 28690, + "mta": 28691, + "mocking": 28692, + "mbles": 28693, + "stamp": 28694, + "mule": 28695, + "bernardo": 28696, + "grin": 28697, + "pott": 28698, + "jingle": 28699, + "vettel": 28700, + "colombian": 28701, + "camo": 28702, + "motivationmonday": 28703, + "bahan": 28704, + "ply": 28705, + "dhary": 28706, + "kami": 28707, + "xmen": 28708, + "sleeper": 28709, + "gara": 28710, + "mysti": 28711, + "confidential": 28712, + "conflicts": 28713, + "pneu": 28714, + "ces": 28715, + "insurtech": 28716, + "cleanse": 28717, + "merely": 28718, + "vais": 28719, + "tux": 28720, + "thegreat": 28721, + "sharon": 28722, + "maj": 28723, + "hola": 28724, + "ecosystems": 28725, + "ajay": 28726, + "aaj": 28727, + "hush": 28728, + "harmon": 28729, + "backtoschool": 28730, + "wikileaks": 28731, + "reflected": 28732, + "ðŁĺĵ": 28733, + "commemorating": 28734, + "acet": 28735, + "buckingham": 28736, + "messiah": 28737, + "tuous": 28738, + "hornet": 28739, + "tobe": 28740, + "dq": 28741, + "heine": 28742, + "mig": 28743, + "plate": 28744, + "nicholson": 28745, + "spie": 28746, + "cumberland": 28747, + "normal": 28748, + "phobia": 28749, + "happyhalloween": 28750, + "cityfc": 28751, + "mcel": 28752, + "gillian": 28753, + "keto": 28754, + "lude": 28755, + "demise": 28756, + "suga": 28757, + "strate": 28758, + "mcgrath": 28759, + "visitscotland": 28760, + "fooled": 28761, + "cbr": 28762, + "gcse": 28763, + "colori": 28764, + "potd": 28765, + "missuniverse": 28766, + "finances": 28767, + "mapoli": 28768, + "forks": 28769, + "Ø´": 28770, + "cannon": 28771, + "medicinal": 28772, + "ðŁĹĵ": 28773, + "kho": 28774, + "wreck": 28775, + "panto": 28776, + "bagel": 28777, + "gull": 28778, + "syndicate": 28779, + "icy": 28780, + "prc": 28781, + "kien": 28782, + "zika": 28783, + "tish": 28784, + "peta": 28785, + "cco": 28786, + "liza": 28787, + "chut": 28788, + "extraction": 28789, + "elg": 28790, + "gli": 28791, + "fueled": 28792, + "posit": 28793, + "respectively": 28794, + "leicester": 28795, + "brink": 28796, + "vulnerability": 28797, + "imported": 28798, + "esha": 28799, + "ð٦ħ": 28800, + "rural": 28801, + "rell": 28802, + "gaming": 28803, + "atlantic": 28804, + "abandon": 28805, + "noah": 28806, + "resolved": 28807, + "prostate": 28808, + "allergic": 28809, + "psd": 28810, + "âĺ¹": 28811, + "dungeon": 28812, + "fangirl": 28813, + "illuminated": 28814, + "mhs": 28815, + "whitesox": 28816, + "dently": 28817, + "cko": 28818, + "endorse": 28819, + "overly": 28820, + "dazzling": 28821, + "prioriti": 28822, + "nightlife": 28823, + "util": 28824, + "behave": 28825, + "flamen": 28826, + "eastbound": 28827, + "ðŁĴŁ": 28828, + "iloveyou": 28829, + "govuk": 28830, + "mozambique": 28831, + "allegi": 28832, + "dri": 28833, + "testimonial": 28834, + "aths": 28835, + "ì§Ģ": 28836, + "mmy": 28837, + "shabby": 28838, + "prosecco": 28839, + "friendships": 28840, + "calam": 28841, + "damages": 28842, + "offset": 28843, + "jurassic": 28844, + "juno": 28845, + "arrell": 28846, + "ðŁĴ©": 28847, + "interventions": 28848, + "daredevil": 28849, + "carver": 28850, + "runaway": 28851, + "rane": 28852, + "trustees": 28853, + "haute": 28854, + "depths": 28855, + "ðŁİŃ": 28856, + "mein": 28857, + "sacrifices": 28858, + "concier": 28859, + "nesting": 28860, + "izzy": 28861, + "metam": 28862, + "ilovemy": 28863, + "urine": 28864, + "dulu": 28865, + "malhotra": 28866, + "veins": 28867, + "nightly": 28868, + "coat": 28869, + "andi": 28870, + "hewitt": 28871, + "lonel": 28872, + "cible": 28873, + "write": 28874, + "jennie": 28875, + "santac": 28876, + "ĸï¸ı": 28877, + "strato": 28878, + "singapore": 28879, + "soprano": 28880, + "kristen": 28881, + "cheerful": 28882, + "fleetwood": 28883, + "fairi": 28884, + "meli": 28885, + "wast": 28886, + "turnt": 28887, + "sforsale": 28888, + "scrolling": 28889, + "angelina": 28890, + "rendition": 28891, + "jericho": 28892, + "nicky": 28893, + "orb": 28894, + "flavo": 28895, + "patriot": 28896, + "asheville": 28897, + "sickness": 28898, + "refund": 28899, + "aggression": 28900, + "bpl": 28901, + "ãĥĥ": 28902, + "elusive": 28903, + "thistory": 28904, + "hanger": 28905, + "buffs": 28906, + "villas": 28907, + "atkinson": 28908, + "sph": 28909, + "jait": 28910, + "declined": 28911, + "wok": 28912, + "supremacy": 28913, + "ootball": 28914, + "eyang": 28915, + "ðŁİĵ": 28916, + "sford": 28917, + "athi": 28918, + "consume": 28919, + "roadster": 28920, + "eso": 28921, + "upro": 28922, + "recipe": 28923, + "auf": 28924, + "uci": 28925, + "aron": 28926, + "oooh": 28927, + "csgo": 28928, + "reich": 28929, + "mcd": 28930, + "minute": 28931, + "ladies": 28932, + "punk": 28933, + "rutgers": 28934, + "meek": 28935, + "arizon": 28936, + "taj": 28937, + "landlord": 28938, + "degra": 28939, + "autumn": 28940, + "lynx": 28941, + "usf": 28942, + "bhi": 28943, + "fairytale": 28944, + "donghae": 28945, + "betsy": 28946, + "exploded": 28947, + "chennai": 28948, + "opa": 28949, + "protag": 28950, + "brant": 28951, + "ðŁĵ°:": 28952, + "gf": 28953, + "palli": 28954, + "ðŁı¼âĢįâĻĢï¸ı": 28955, + "sut": 28956, + "illini": 28957, + "columnist": 28958, + "shirtless": 28959, + "decentr": 28960, + "searched": 28961, + "ecor": 28962, + "buggy": 28963, + "sack": 28964, + "ðŁĺĤðŁĺŃ": 28965, + "det": 28966, + "theri": 28967, + "ornaments": 28968, + "bringback": 28969, + "tov": 28970, + "quarterfinals": 28971, + "iche": 28972, + "constra": 28973, + "gier": 28974, + "buchanan": 28975, + "vix": 28976, + "kayaking": 28977, + "mustread": 28978, + "swallow": 28979, + "melb": 28980, + "scaf": 28981, + "opal": 28982, + "mayoral": 28983, + "harat": 28984, + "ð٦ĭ": 28985, + "schedules": 28986, + "idf": 28987, + "hague": 28988, + "roz": 28989, + "aah": 28990, + "dmc": 28991, + "duplic": 28992, + "cache": 28993, + "orphan": 28994, + "fracture": 28995, + "recon": 28996, + "chav": 28997, + "bunnies": 28998, + "alain": 28999, + "mustafa": 29000, + "ðŁİĻ": 29001, + "vacations": 29002, + "dynamite": 29003, + "texted": 29004, + "broadcaster": 29005, + "ðŁĴ£": 29006, + "steamed": 29007, + "rocker": 29008, + "dietary": 29009, + "luxurytravel": 29010, + "inaugurated": 29011, + "sawards": 29012, + "vaughn": 29013, + "lincolnshire": 29014, + "clicked": 29015, + "kraja": 29016, + "fanc": 29017, + "removes": 29018, + "layoffs": 29019, + "mcfar": 29020, + "breeds": 29021, + "winnie": 29022, + "jonghyun": 29023, + "incentive": 29024, + "variations": 29025, + "patton": 29026, + "aturday": 29027, + "persistent": 29028, + "prun": 29029, + "piers": 29030, + "dales": 29031, + "æĸ": 29032, + "breastfeeding": 29033, + "rance": 29034, + "tawa": 29035, + "Ĥâĸ": 29036, + "murdoch": 29037, + "captive": 29038, + "thistle": 29039, + "nica": 29040, + "commodity": 29041, + "couldnt": 29042, + "boardwalk": 29043, + "gracious": 29044, + "practitioners": 29045, + "ngc": 29046, + "scrum": 29047, + "nero": 29048, + "camouflage": 29049, + "colon": 29050, + "hei": 29051, + "physicist": 29052, + "saturdaymorning": 29053, + "tener": 29054, + "siwon": 29055, + "columns": 29056, + "brune": 29057, + "yvr": 29058, + "bair": 29059, + "retires": 29060, + "halam": 29061, + "caber": 29062, + "shazam": 29063, + "minu": 29064, + "cascade": 29065, + "milkshake": 29066, + "grid": 29067, + "dren": 29068, + "vincent": 29069, + "sodium": 29070, + "platter": 29071, + "cheerleader": 29072, + "chenko": 29073, + "yak": 29074, + "eliminated": 29075, + "typo": 29076, + "yman": 29077, + "rethink": 29078, + "âĿĹ": 29079, + "tsville": 29080, + "bernardokath": 29081, + "extr": 29082, + "ðŁĺģðŁĺģðŁĺģ": 29083, + "tao": 29084, + "reper": 29085, + "moths": 29086, + "empowered": 29087, + "citing": 29088, + "transported": 29089, + "monks": 29090, + "sanat": 29091, + "clears": 29092, + "bachelorette": 29093, + "campbell": 29094, + "rachael": 29095, + "harle": 29096, + "handler": 29097, + "climbs": 29098, + "interference": 29099, + "release": 29100, + "shand": 29101, + "rbs": 29102, + "hrh": 29103, + "ãģª": 29104, + "valle": 29105, + "ré": 29106, + "slime": 29107, + "wakes": 29108, + "chubby": 29109, + "sloan": 29110, + "elves": 29111, + "athen": 29112, + "attorneys": 29113, + "microscope": 29114, + "stoner": 29115, + "scaling": 29116, + "obe": 29117, + "cout": 29118, + "seman": 29119, + "midweek": 29120, + "balsam": 29121, + "ðŁĺįâĿ¤": 29122, + "tiful": 29123, + "vish": 29124, + "lotta": 29125, + "ripping": 29126, + "remn": 29127, + "tire": 29128, + "leap": 29129, + "havent": 29130, + "laby": 29131, + "himach": 29132, + "whispers": 29133, + "wein": 29134, + "ðŁİ¸": 29135, + "wildflowers": 29136, + "sele": 29137, + "ucc": 29138, + "liability": 29139, + "azine": 29140, + "swings": 29141, + "kya": 29142, + "tair": 29143, + "remain": 29144, + "edo": 29145, + "flops": 29146, + "pocket": 29147, + "grandad": 29148, + "examiner": 29149, + "gris": 29150, + "ffect": 29151, + "ðŁijĬðŁı»": 29152, + "studded": 29153, + "heartbeat": 29154, + "deacon": 29155, + "firmly": 29156, + "infectious": 29157, + "stef": 29158, + "outlines": 29159, + "leasing": 29160, + "claws": 29161, + "sense": 29162, + "tabs": 29163, + "hoot": 29164, + "mosul": 29165, + "spawn": 29166, + "coa": 29167, + "hogwarts": 29168, + "vein": 29169, + "albania": 29170, + "manuel": 29171, + "bino": 29172, + "vauxhall": 29173, + "scotland": 29174, + "gobucks": 29175, + "matty": 29176, + "physio": 29177, + "torino": 29178, + "constable": 29179, + "investigated": 29180, + "slower": 29181, + "mistaken": 29182, + "bayer": 29183, + "wildfires": 29184, + "voic": 29185, + "xon": 29186, + "timeto": 29187, + "chassis": 29188, + "barric": 29189, + "pion": 29190, + "baldhead": 29191, + "wook": 29192, + "registr": 29193, + "drafts": 29194, + "bhs": 29195, + "ligue": 29196, + "lick": 29197, + "staffordshire": 29198, + "bafta": 29199, + "darry": 29200, + "jeanne": 29201, + "vending": 29202, + "corp": 29203, + "âĽ³ï¸ı": 29204, + "kiddos": 29205, + "fenway": 29206, + "cao": 29207, + "westbound": 29208, + "ðŁĺĻ": 29209, + "dvr": 29210, + "quicker": 29211, + "blah": 29212, + "goodie": 29213, + "ðŁĴĭðŁĴĭ": 29214, + "vox": 29215, + "esper": 29216, + "facade": 29217, + "correlation": 29218, + "redbull": 29219, + "roup": 29220, + "declining": 29221, + "chive": 29222, + "mcgee": 29223, + "turo": 29224, + "inder": 29225, + "feller": 29226, + "fug": 29227, + "ilysm": 29228, + "mardi": 29229, + "peshawar": 29230, + "kieran": 29231, + "inema": 29232, + "meatballs": 29233, + "peck": 29234, + "depressing": 29235, + "sensing": 29236, + "giz": 29237, + "ddington": 29238, + "springwatch": 29239, + "roaming": 29240, + "yellowstone": 29241, + "horseshoe": 29242, + "amman": 29243, + "weekday": 29244, + "olor": 29245, + "ðŁ¥°": 29246, + "boosts": 29247, + "sprint": 29248, + "scarves": 29249, + "jee": 29250, + "beetro": 29251, + "clan": 29252, + "allthe": 29253, + "ìĦ¸ë": 29254, + "enlightenment": 29255, + "adobe": 29256, + "regeneration": 29257, + "?@": 29258, + "contag": 29259, + "yachts": 29260, + "tou": 29261, + "mora": 29262, + "envoy": 29263, + "rani": 29264, + "goli": 29265, + "dhanushkraja": 29266, + "woodworking": 29267, + "strengths": 29268, + "sedi": 29269, + "discs": 29270, + "arina": 29271, + "scon": 29272, + "lite": 29273, + "another": 29274, + "ðŁ¥Ĭ": 29275, + "yemen": 29276, + "guern": 29277, + "savvy": 29278, + "loyed": 29279, + "biomed": 29280, + "heartbreak": 29281, + "comrades": 29282, + "millie": 29283, + "patch": 29284, + "unf": 29285, + "jarvis": 29286, + "blaming": 29287, + "commemoration": 29288, + "gey": 29289, + "å¥": 29290, + "cardiovascular": 29291, + "aligned": 29292, + "document": 29293, + ".?": 29294, + "aesthetics": 29295, + "emu": 29296, + "theirs": 29297, + "leh": 29298, + "psic": 29299, + "sif": 29300, + "plateau": 29301, + "expend": 29302, + "dominating": 29303, + "robes": 29304, + "mauritius": 29305, + "exceptionally": 29306, + "homer": 29307, + "discoveries": 29308, + "braun": 29309, + "tennant": 29310, + "insulin": 29311, + "ðŁİ®": 29312, + "carbs": 29313, + "teas": 29314, + "?!\"": 29315, + "zie": 29316, + "francois": 29317, + "browsing": 29318, + "thol": 29319, + "clarence": 29320, + "helper": 29321, + "obtained": 29322, + "cassie": 29323, + "lees": 29324, + "!,": 29325, + "pomegran": 29326, + "hubs": 29327, + "prestige": 29328, + "][": 29329, + "macher": 29330, + "bottled": 29331, + "punch": 29332, + "pipe": 29333, + "och": 29334, + "gallons": 29335, + "deliveries": 29336, + "ura": 29337, + "unday": 29338, + "monde": 29339, + "depicts": 29340, + "regency": 29341, + "outrageous": 29342, + "khaled": 29343, + "caro": 29344, + "hearti": 29345, + "zag": 29346, + "developmental": 29347, + "overcoming": 29348, + "statistical": 29349, + "flavored": 29350, + "fords": 29351, + "creatives": 29352, + "laurence": 29353, + "dias": 29354, + "sunscreen": 29355, + "inked": 29356, + "preacher": 29357, + "nul": 29358, + "impacting": 29359, + "autistic": 29360, + "âļĶï¸ı": 29361, + "oss": 29362, + "pelicans": 29363, + "celeste": 29364, + "vb": 29365, + "rump": 29366, + "mcgra": 29367, + "fairfax": 29368, + "humor": 29369, + "bbcnews": 29370, + "rowling": 29371, + "calder": 29372, + "seamless": 29373, + "agne": 29374, + "pti": 29375, + "mixed": 29376, + "tshirts": 29377, + "merci": 29378, + "btob": 29379, + "womeninstem": 29380, + "genealogy": 29381, + "preven": 29382, + "lour": 29383, + "cradle": 29384, + "giuse": 29385, + "о": 29386, + "chrono": 29387, + "fairness": 29388, + "chocolate": 29389, + "tory": 29390, + "asda": 29391, + "prescott": 29392, + "stretched": 29393, + "alman": 29394, + "uil": 29395, + "recharge": 29396, + "intre": 29397, + "obst": 29398, + "hospital": 29399, + "hayward": 29400, + "tenerife": 29401, + "friedman": 29402, + "vaping": 29403, + "confessions": 29404, + "yeah": 29405, + "balli": 29406, + "lucknow": 29407, + "corpse": 29408, + "sculptor": 29409, + "ampton": 29410, + "tpp": 29411, + "indicates": 29412, + "surplus": 29413, + "truman": 29414, + "ðĿĻ": 29415, + "sinha": 29416, + "invo": 29417, + "sovereign": 29418, + "kev": 29419, + "establishing": 29420, + "engraved": 29421, + "assuming": 29422, + "ðŁıģ": 29423, + "souza": 29424, + "fabi": 29425, + "toned": 29426, + "ounge": 29427, + "deloit": 29428, + "downey": 29429, + "noble": 29430, + "omor": 29431, + "cartridge": 29432, + "ðŁıIJ": 29433, + "uhur": 29434, + "holloway": 29435, + "successes": 29436, + "rsa": 29437, + "âĦ¢": 29438, + "mazz": 29439, + "twd": 29440, + "discourse": 29441, + ".<": 29442, + "yat": 29443, + "satisfy": 29444, + "compri": 29445, + "ह": 29446, + "graphite": 29447, + "dissertation": 29448, + "arter": 29449, + "íĶ": 29450, + "bally": 29451, + "zombi": 29452, + "lyons": 29453, + "aic": 29454, + "ubc": 29455, + "prada": 29456, + "eil": 29457, + "dax": 29458, + "clai": 29459, + "granddaughter": 29460, + "extravaganza": 29461, + "challenge": 29462, + "ð٤ŀ": 29463, + "pover": 29464, + "primarily": 29465, + "daddy": 29466, + "mana": 29467, + "bikers": 29468, + "inquiries": 29469, + "daun": 29470, + "feline": 29471, + "generative": 29472, + "hef": 29473, + "benefiting": 29474, + "lindsey": 29475, + "polka": 29476, + "demonstrated": 29477, + "alle": 29478, + "randy": 29479, + "osu": 29480, + "lowkey": 29481, + "weirdest": 29482, + "redbull": 29483, + "oury": 29484, + "nous": 29485, + "woodstock": 29486, + "credenti": 29487, + "nicer": 29488, + "gado": 29489, + "alyss": 29490, + "aph": 29491, + "preparedness": 29492, + "stationary": 29493, + "incorporated": 29494, + "dyer": 29495, + "saratoga": 29496, + "celesti": 29497, + ":\"": 29498, + "antibiotics": 29499, + "orgs": 29500, + "indefin": 29501, + "apron": 29502, + "иÐ": 29503, + "fifteen": 29504, + "nof": 29505, + "ðŁĶĿ": 29506, + "phx": 29507, + "tega": 29508, + "mz": 29509, + "organizational": 29510, + "onair": 29511, + "bandung": 29512, + "pleasures": 29513, + "mori": 29514, + "secretari": 29515, + "raccoon": 29516, + "cashi": 29517, + "pilates": 29518, + "kon": 29519, + "geoffrey": 29520, + "lao": 29521, + "kamp": 29522, + "departments": 29523, + "backpacking": 29524, + "anam": 29525, + "ë": 29526, + "crackdown": 29527, + "aunty": 29528, + "ondo": 29529, + "lizzie": 29530, + "phers": 29531, + "cun": 29532, + "ðŁĩ±": 29533, + "kpop": 29534, + "put": 29535, + "intentional": 29536, + "connolly": 29537, + "barclays": 29538, + "hsfb": 29539, + "swindon": 29540, + "uku": 29541, + "sally": 29542, + "aint": 29543, + "âľħ": 29544, + "penang": 29545, + "uplifting": 29546, + "epilepsy": 29547, + "interro": 29548, + "bungal": 29549, + "goku": 29550, + "blueberries": 29551, + "द": 29552, + "ussia": 29553, + "silky": 29554, + "moured": 29555, + "istic": 29556, + "briefs": 29557, + "meats": 29558, + "gob": 29559, + "chaser": 29560, + "statewide": 29561, + "prasad": 29562, + "glitch": 29563, + "arin": 29564, + "banff": 29565, + "member": 29566, + "ðŁĺŃâĿ¤ï¸ı": 29567, + "loving": 29568, + "halla": 29569, + "ม": 29570, + "smokers": 29571, + "yaku": 29572, + "scicomm": 29573, + "physio": 29574, + "swol": 29575, + "lemons": 29576, + "gelato": 29577, + "chool": 29578, + "capitals": 29579, + "kistan": 29580, + "tights": 29581, + "spikes": 29582, + "travellers": 29583, + "iklan": 29584, + "commissioning": 29585, + "arine": 29586, + "emabiggestfans": 29587, + "emphasis": 29588, + "frontline": 29589, + "paddock": 29590, + "destructive": 29591, + "baha": 29592, + "linger": 29593, + "jewish": 29594, + "shetland": 29595, + "mcgin": 29596, + "monkey": 29597, + "koz": 29598, + "sone": 29599, + "rajini": 29600, + "teh": 29601, + "yen": 29602, + "cvs": 29603, + "masquer": 29604, + "girly": 29605, + "wesle": 29606, + "wasnt": 29607, + "brody": 29608, + "terminator": 29609, + "gille": 29610, + "maggi": 29611, + "birdie": 29612, + "jeopardy": 29613, + "cubic": 29614, + "vmware": 29615, + "intricate": 29616, + "anup": 29617, + "topia": 29618, + "easton": 29619, + "sabres": 29620, + "investigates": 29621, + "busting": 29622, + "bilingual": 29623, + "valentino": 29624, + "informat": 29625, + "ferre": 29626, + "adventur": 29627, + "hydrate": 29628, + "forsy": 29629, + "aziz": 29630, + "santo": 29631, + "ede": 29632, + "whistler": 29633, + "continuously": 29634, + "dham": 29635, + "unused": 29636, + "jihad": 29637, + "addictive": 29638, + "vidy": 29639, + "dob": 29640, + "ido": 29641, + "fied": 29642, + "niversary": 29643, + "none": 29644, + "fuer": 29645, + "ðŁĺįðŁĺĺ": 29646, + "covenant": 29647, + "printable": 29648, + "immaculate": 29649, + "oem": 29650, + "clt": 29651, + "servants": 29652, + "consumed": 29653, + "unreleased": 29654, + "scum": 29655, + "packaged": 29656, + "mere": 29657, + "ìĦ¸ë¸": 29658, + "toby": 29659, + "taf": 29660, + "spoons": 29661, + "meal": 29662, + "fball": 29663, + "fairfield": 29664, + "janet": 29665, + "silverstone": 29666, + "dartmouth": 29667, + "followme": 29668, + "voyager": 29669, + "kombat": 29670, + "anniver": 29671, + "enew": 29672, + "magdal": 29673, + "hove": 29674, + "sath": 29675, + "grizzly": 29676, + "cardi": 29677, + "gartner": 29678, + "sandy": 29679, + "kanye": 29680, + "posture": 29681, + "poign": 29682, + "impulse": 29683, + "radiology": 29684, + "horizons": 29685, + "siam": 29686, + "aishwar": 29687, + "==>": 29688, + "noche": 29689, + "tris": 29690, + "elyn": 29691, + "comme": 29692, + "dui": 29693, + "cec": 29694, + "councillors": 29695, + "cuddling": 29696, + "creeping": 29697, + "locke": 29698, + "manages": 29699, + "transferred": 29700, + "necks": 29701, + "dier": 29702, + "dano": 29703, + "vick": 29704, + "lunches": 29705, + "dhe": 29706, + "ensures": 29707, + "criss": 29708, + "ulster": 29709, + "bannon": 29710, + "contenders": 29711, + "spam": 29712, + "sweetness": 29713, + "medal": 29714, + "honduras": 29715, + "arctic": 29716, + "ultrasound": 29717, + "infr": 29718, + "discovers": 29719, + "eiffel": 29720, + "casters": 29721, + "ruben": 29722, + "dust": 29723, + "aweed": 29724, + "atrium": 29725, + "lestwe": 29726, + "seared": 29727, + "ðŁĵº:": 29728, + "tyne": 29729, + "exchanges": 29730, + "littlemix": 29731, + "lle": 29732, + "astronauts": 29733, + "hershey": 29734, + "workday": 29735, + "knob": 29736, + "sov": 29737, + "resigns": 29738, + "todayshow": 29739, + "derman": 29740, + "anth": 29741, + "afc": 29742, + "taster": 29743, + "swoo": 29744, + "saeed": 29745, + "pering": 29746, + "narrowly": 29747, + "rnli": 29748, + "bestbuy": 29749, + "panasonic": 29750, + "obstacle": 29751, + "farmers": 29752, + "ðŁİĻ": 29753, + "pawan": 29754, + "kiest": 29755, + "angers": 29756, + "absurd": 29757, + "ohmy": 29758, + "sino": 29759, + "pistachi": 29760, + "spice": 29761, + "giuli": 29762, + "primetime": 29763, + "kow": 29764, + "kens": 29765, + "exagger": 29766, + "!?!": 29767, + "uba": 29768, + "middles": 29769, + "judd": 29770, + "ejec": 29771, + "slammed": 29772, + "pensions": 29773, + "ofa": 29774, + "recreate": 29775, + "bhp": 29776, + "xxl": 29777, + "liverpool": 29778, + "thresh": 29779, + "purity": 29780, + "nieu": 29781, + "holics": 29782, + "wrath": 29783, + "rado": 29784, + "glio": 29785, + "amma": 29786, + "dilemma": 29787, + "cru": 29788, + "letsgo": 29789, + "....@": 29790, + "âĿĵ": 29791, + "suggesting": 29792, + "trumps": 29793, + "horus": 29794, + "fv": 29795, + "icom": 29796, + "referring": 29797, + "predictive": 29798, + "tarts": 29799, + "gette": 29800, + "sock": 29801, + "glossy": 29802, + "pinky": 29803, + "alec": 29804, + "thyme": 29805, + "oura": 29806, + "theroad": 29807, + "petr": 29808, + "cram": 29809, + "pfi": 29810, + "dvn": 29811, + "meier": 29812, + "incentives": 29813, + "tunnels": 29814, + "mobil": 29815, + "recap": 29816, + "extras": 29817, + "upright": 29818, + "revamp": 29819, + "perseverance": 29820, + ",-": 29821, + "otp": 29822, + "mirror": 29823, + "arwx": 29824, + "gerry": 29825, + "maher": 29826, + "gor": 29827, + "homepage": 29828, + "amis": 29829, + "agra": 29830, + "madele": 29831, + "bestfriend": 29832, + "siriusxm": 29833, + "bundles": 29834, + "admiring": 29835, + "tdsb": 29836, + "ðŁįģ": 29837, + "chas": 29838, + "slowing": 29839, + "roh": 29840, + "wallpapers": 29841, + "â̦/": 29842, + "tekken": 29843, + "gangs": 29844, + "tala": 29845, + "lindsay": 29846, + "shoul": 29847, + "linebacker": 29848, + "toolkit": 29849, + "uranium": 29850, + "calyp": 29851, + "abrams": 29852, + "matthi": 29853, + "ðŁı¿": 29854, + "honourable": 29855, + "dayo": 29856, + "versail": 29857, + "tank": 29858, + "stc": 29859, + "fritz": 29860, + "splend": 29861, + "patag": 29862, + "annoyed": 29863, + "onday": 29864, + "devastated": 29865, + "chattanooga": 29866, + "nationalism": 29867, + "massey": 29868, + "jenn": 29869, + "tailor": 29870, + "devgn": 29871, + "organs": 29872, + "zucchini": 29873, + "onfox": 29874, + "satire": 29875, + "wexford": 29876, + "disgrace": 29877, + "noto": 29878, + "volta": 29879, + "âĿ¤ï¸ıâĿ¤ï¸ıâĿ¤ï¸ıâĿ¤ï¸ı": 29880, + "à¶": 29881, + "homeowners": 29882, + "pointer": 29883, + "mcr": 29884, + "austen": 29885, + "daysto": 29886, + "moons": 29887, + "palma": 29888, + "grazing": 29889, + "eso": 29890, + "influencers": 29891, + "shahidkapoor": 29892, + "compliant": 29893, + "measurements": 29894, + "develops": 29895, + "yd": 29896, + "parl": 29897, + "pvt": 29898, + "randolph": 29899, + "tortured": 29900, + "gerald": 29901, + "elias": 29902, + "deepikap": 29903, + "warmup": 29904, + "hickory": 29905, + "gap": 29906, + "coffin": 29907, + "amour": 29908, + "reneg": 29909, + "mounting": 29910, + "sevens": 29911, + "igle": 29912, + "hier": 29913, + "decad": 29914, + "tright": 29915, + "escapes": 29916, + "werner": 29917, + "tfl": 29918, + "fulfilled": 29919, + "niger": 29920, + "sourdough": 29921, + "reaper": 29922, + "chooses": 29923, + "spinner": 29924, + "weeknd": 29925, + "filtered": 29926, + "shuk": 29927, + "kati": 29928, + "oldham": 29929, + "opensource": 29930, + "khanna": 29931, + "atelier": 29932, + "connec": 29933, + "ophobic": 29934, + "glas": 29935, + "complications": 29936, + "arson": 29937, + "councils": 29938, + "smol": 29939, + "assy": 29940, + "lurking": 29941, + "lingui": 29942, + "hanks": 29943, + "ein": 29944, + "Ùħ": 29945, + "rugs": 29946, + "nguyen": 29947, + "nouveau": 29948, + "menace": 29949, + "lev": 29950, + "aladdin": 29951, + "ruining": 29952, + "roundabout": 29953, + "km": 29954, + "conor": 29955, + "shoops": 29956, + "mayday": 29957, + "traumatic": 29958, + "prabhas": 29959, + "kaiser": 29960, + "kita": 29961, + "router": 29962, + "pedro": 29963, + "retar": 29964, + "stunner": 29965, + "spanish": 29966, + "disturbed": 29967, + "academy": 29968, + "elearning": 29969, + "witty": 29970, + "seng": 29971, + "feral": 29972, + "avy": 29973, + "stab": 29974, + "keaton": 29975, + "urdu": 29976, + "koto": 29977, + "hui": 29978, + "cooke": 29979, + "arian": 29980, + "thepersonal": 29981, + "uma": 29982, + "seap": 29983, + "asting": 29984, + "rhetoric": 29985, + "handwriting": 29986, + "municipality": 29987, + "consortium": 29988, + "ðŁIJŁ": 29989, + "glasgow": 29990, + "raya": 29991, + "eliza": 29992, + "polymer": 29993, + "broth": 29994, + "practi": 29995, + "correspondent": 29996, + "addicts": 29997, + "gayle": 29998, + "ailing": 29999, + "ofe": 30000, + "pli": 30001, + "heartw": 30002, + "stitch": 30003, + "sightings": 30004, + "priests": 30005, + "samo": 30006, + "sloth": 30007, + "goodwood": 30008, + "rocco": 30009, + "sabc": 30010, + "summit": 30011, + "lace": 30012, + "presley": 30013, + "itten": 30014, + "cincy": 30015, + "thepersonalnetwork": 30016, + "sweek": 30017, + "pegas": 30018, + "afcon": 30019, + "registry": 30020, + "cim": 30021, + "leth": 30022, + "dicap": 30023, + "candice": 30024, + "fluent": 30025, + "smack": 30026, + "pedestri": 30027, + "aloud": 30028, + "carac": 30029, + "priyankach": 30030, + "pgh": 30031, + "irons": 30032, + "dolce": 30033, + "latvia": 30034, + "deceased": 30035, + "therock": 30036, + "clap": 30037, + "cene": 30038, + "foam": 30039, + "morrissey": 30040, + "gret": 30041, + "essentially": 30042, + "comcast": 30043, + "beagle": 30044, + "argues": 30045, + "inged": 30046, + "-â̦": 30047, + "sag": 30048, + "hasan": 30049, + "ðŁĻĨ": 30050, + "ðŁį°": 30051, + "nhra": 30052, + "kannada": 30053, + "indicators": 30054, + "oner": 30055, + "brixton": 30056, + "atas": 30057, + "screenplay": 30058, + "sorority": 30059, + "shaheed": 30060, + "heem": 30061, + "classmates": 30062, + "tainment": 30063, + "esi": 30064, + "breastcancer": 30065, + "zuckerberg": 30066, + "auror": 30067, + "encia": 30068, + "refers": 30069, + "kaeper": 30070, + "vortex": 30071, + "compart": 30072, + "lymph": 30073, + "photographing": 30074, + "steff": 30075, + "restling": 30076, + "parsley": 30077, + "momento": 30078, + "thman": 30079, + "lacking": 30080, + "dutt": 30081, + "oculus": 30082, + "fino": 30083, + "frenzy": 30084, + "rasc": 30085, + "dern": 30086, + "dismissed": 30087, + "nook": 30088, + "metgala": 30089, + "shill": 30090, + "raphael": 30091, + "mavericks": 30092, + "exhibits": 30093, + "eagerly": 30094, + "cpa": 30095, + "amenities": 30096, + ".âłĢ": 30097, + "exodus": 30098, + "ernst": 30099, + "lita": 30100, + "dealt": 30101, + "womensmarch": 30102, + "iain": 30103, + "scoreboard": 30104, + "campeones": 30105, + "cen": 30106, + "tiki": 30107, + "garrison": 30108, + "fidelity": 30109, + "brag": 30110, + "roadmap": 30111, + "psychop": 30112, + "loe": 30113, + "bleu": 30114, + "ðŁijĬðŁı¼": 30115, + "sauvi": 30116, + "springer": 30117, + "temptation": 30118, + "rudolph": 30119, + "acura": 30120, + "wicz": 30121, + "parachute": 30122, + "strol": 30123, + "lenny": 30124, + "zik": 30125, + "doms": 30126, + "nbaf": 30127, + "alpac": 30128, + "vivian": 30129, + "rove": 30130, + "preet": 30131, + "perpetu": 30132, + "snake": 30133, + "airsoft": 30134, + "inflatable": 30135, + "princes": 30136, + "atie": 30137, + "ffey": 30138, + "patient": 30139, + "mire": 30140, + "chelle": 30141, + "slack": 30142, + "groovy": 30143, + "#:": 30144, + "uploading": 30145, + "!!!!!!!!!!!!!!!!": 30146, + "siemens": 30147, + "provision": 30148, + "vfx": 30149, + "needy": 30150, + "fats": 30151, + "topoli": 30152, + "bhutto": 30153, + "sathletics": 30154, + "alums": 30155, + "twinning": 30156, + "southwestern": 30157, + "adopting": 30158, + "lastnight": 30159, + "manne": 30160, + "laga": 30161, + "twell": 30162, + "acia": 30163, + "----": 30164, + "eyewear": 30165, + "hurley": 30166, + "flee": 30167, + "sach": 30168, + "pecker": 30169, + "costly": 30170, + "isk": 30171, + "crates": 30172, + "policy": 30173, + "erosion": 30174, + "ingo": 30175, + "werk": 30176, + "ðŁIJį": 30177, + "tortoise": 30178, + "therapies": 30179, + "internet": 30180, + "chihuahua": 30181, + "rips": 30182, + "frei": 30183, + "edor": 30184, + "taiji": 30185, + "tfc": 30186, + "dod": 30187, + "dempsey": 30188, + "christin": 30189, + "cheng": 30190, + "hips": 30191, + "graeme": 30192, + "compassionate": 30193, + "cavaliers": 30194, + "historic": 30195, + "soulful": 30196, + "criminal": 30197, + "jac": 30198, + "vinci": 30199, + "expired": 30200, + "surat": 30201, + "turismo": 30202, + "kona": 30203, + "seaweed": 30204, + "berts": 30205, + "leica": 30206, + "expressing": 30207, + "aal": 30208, + "wort": 30209, + "breakfast": 30210, + "herring": 30211, + "amused": 30212, + "rhubarb": 30213, + "martian": 30214, + "cosplayer": 30215, + "yash": 30216, + "strial": 30217, + "raul": 30218, + "referral": 30219, + "dwts": 30220, + "jw": 30221, + "adler": 30222, + "curtains": 30223, + "gur": 30224, + "valence": 30225, + "tyrone": 30226, + "swfc": 30227, + "coached": 30228, + "reborn": 30229, + "diabetic": 30230, + "choke": 30231, + "norfolk": 30232, + "investigative": 30233, + "ðŁĴ¯ðŁĴ¯": 30234, + "zid": 30235, + "vmas": 30236, + "phie": 30237, + "objectives": 30238, + "âľĭ": 30239, + "overdue": 30240, + "divers": 30241, + "matsu": 30242, + "ðŁİŁï¸ı": 30243, + "casualties": 30244, + "ว": 30245, + "alk": 30246, + "standardi": 30247, + "realist": 30248, + "artifacts": 30249, + "pandor": 30250, + "kex": 30251, + "invin": 30252, + "(!)": 30253, + "iney": 30254, + "paraly": 30255, + "mrt": 30256, + "faye": 30257, + "thevoice": 30258, + "onga": 30259, + "deed": 30260, + "skinner": 30261, + "azwx": 30262, + "specimen": 30263, + "priyankachopra": 30264, + "nuevo": 30265, + "barkley": 30266, + "toulouse": 30267, + "resumes": 30268, + "footballers": 30269, + "citi": 30270, + "fetch": 30271, + "ère": 30272, + "lestweforget": 30273, + "ðŁĻĭ": 30274, + "chunk": 30275, + "drifting": 30276, + "manipulation": 30277, + "equals": 30278, + "putt": 30279, + "kyungsoo": 30280, + "âĿ¤ï¸ı#": 30281, + "elastic": 30282, + "parano": 30283, + "foy": 30284, + "doping": 30285, + "cincy": 30286, + "ssler": 30287, + "interrupted": 30288, + "alay": 30289, + "adores": 30290, + "amethy": 30291, + "convoy": 30292, + "ãĢı": 30293, + "Ĭãģ": 30294, + "blacklist": 30295, + "generals": 30296, + "sachin": 30297, + "brushed": 30298, + "ounces": 30299, + "nonstop": 30300, + "illiams": 30301, + "btsarmy": 30302, + "uav": 30303, + "ruff": 30304, + "burma": 30305, + "bik": 30306, + "defence": 30307, + "schultz": 30308, + "boasts": 30309, + "loneliness": 30310, + "gore": 30311, + "transforms": 30312, + "alumna": 30313, + "@@": 30314, + "rappers": 30315, + "nehru": 30316, + "caro": 30317, + "himalayan": 30318, + "wearables": 30319, + "geh": 30320, + "peppermint": 30321, + "redevelopment": 30322, + "flamingo": 30323, + "cosby": 30324, + "bigbaldhead": 30325, + "agri": 30326, + "barefoot": 30327, + "scopes": 30328, + "regram": 30329, + "ghana": 30330, + "ðŁİ«": 30331, + "iheart": 30332, + "sadie": 30333, + "carrie": 30334, + "microbial": 30335, + "kuala": 30336, + "skater": 30337, + "querque": 30338, + "âĻ©": 30339, + "genres": 30340, + "reasoning": 30341, + "chased": 30342, + "aso": 30343, + "slipped": 30344, + "encan": 30345, + "vamos": 30346, + "kers": 30347, + "adverse": 30348, + "moil": 30349, + "commodities": 30350, + "withyou": 30351, + "silent": 30352, + "hype": 30353, + "ande": 30354, + "amination": 30355, + "whispe": 30356, + "litz": 30357, + "âļ½ï¸ıâļ½ï¸ı": 30358, + "riff": 30359, + "ppy": 30360, + "lambs": 30361, + "ganesh": 30362, + "absent": 30363, + "regulator": 30364, + "marseille": 30365, + "enroll": 30366, + "parcel": 30367, + "wap": 30368, + "byrd": 30369, + "ðŁĩŃ": 30370, + "tuber": 30371, + "countrymusic": 30372, + "parl": 30373, + "controllers": 30374, + "responsibilities": 30375, + "wey": 30376, + "chate": 30377, + "montenegro": 30378, + "chico": 30379, + "milan": 30380, + "lms": 30381, + "trainees": 30382, + "appropriately": 30383, + "uncertain": 30384, + "poppies": 30385, + "edsheeran": 30386, + "nutritious": 30387, + "garo": 30388, + "deutsch": 30389, + "awesome": 30390, + "ãĥ¼": 30391, + "comfortably": 30392, + "landmarks": 30393, + "eti": 30394, + "reusable": 30395, + "danielle": 30396, + "rosal": 30397, + "coles": 30398, + "justic": 30399, + "ccs": 30400, + "fanny": 30401, + "nim": 30402, + "mcu": 30403, + "clinch": 30404, + "atene": 30405, + "merge": 30406, + "imdb": 30407, + "anglo": 30408, + "uccino": 30409, + "panini": 30410, + "annot": 30411, + "burberry": 30412, + "feature": 30413, + "predicting": 30414, + "fashionista": 30415, + "sask": 30416, + "imaginary": 30417, + "mmo": 30418, + "southsudan": 30419, + "spear": 30420, + "hubble": 30421, + "jointhe": 30422, + "coyotes": 30423, + "sligo": 30424, + "kodak": 30425, + "sitcom": 30426, + "polaroid": 30427, + "rooted": 30428, + "corrup": 30429, + "ðŁĻĮðŁĻĮ": 30430, + "brisban": 30431, + "atz": 30432, + "ahl": 30433, + "remy": 30434, + "talent": 30435, + "avalon": 30436, + "rada": 30437, + "pauline": 30438, + "locomotive": 30439, + "goons": 30440, + "nemo": 30441, + "maserati": 30442, + "icu": 30443, + "stutt": 30444, + "historically": 30445, + "smb": 30446, + "presby": 30447, + "avoid": 30448, + "sooners": 30449, + "rhinestone": 30450, + "wad": 30451, + "rising": 30452, + "trot": 30453, + "modes": 30454, + "regent": 30455, + "optimize": 30456, + "reece": 30457, + "smu": 30458, + "verti": 30459, + "newyorkcity": 30460, + "cortez": 30461, + "rac": 30462, + "incase": 30463, + "sinc": 30464, + "fielding": 30465, + "etta": 30466, + "tiffany": 30467, + "almonds": 30468, + "saddle": 30469, + "krat": 30470, + "matter": 30471, + "glow": 30472, + "starving": 30473, + "glo": 30474, + "crappy": 30475, + "slur": 30476, + "std": 30477, + "monitors": 30478, + "receipt": 30479, + "maymayentrata": 30480, + "mcil": 30481, + "unis": 30482, + "rainbows": 30483, + "caldwell": 30484, + "pacquiao": 30485, + "jop": 30486, + "afe": 30487, + "hook": 30488, + "essen": 30489, + "wizard": 30490, + "median": 30491, + "flaws": 30492, + "coms": 30493, + "âĿĦ": 30494, + "ingh": 30495, + "haynes": 30496, + "antonio": 30497, + "templates": 30498, + "outer": 30499, + "naw": 30500, + "cardigan": 30501, + "belgrade": 30502, + "ðŁĴī": 30503, + "homo": 30504, + "aise": 30505, + "ropes": 30506, + "nove": 30507, + "whatyou": 30508, + "trigge": 30509, + "conception": 30510, + "adukone": 30511, + "nadi": 30512, + "friars": 30513, + "swer": 30514, + "adjusted": 30515, + "hotline": 30516, + "sanity": 30517, + "kaur": 30518, + "downloading": 30519, + "cgi": 30520, + "tenor": 30521, + "ethnic": 30522, + "appalach": 30523, + "ุ": 30524, + "pag": 30525, + "golds": 30526, + "onset": 30527, + "investigator": 30528, + "cartel": 30529, + "peacefully": 30530, + "jarrett": 30531, + "catalan": 30532, + "polio": 30533, + "num": 30534, + "frustration": 30535, + "dharma": 30536, + "mylife": 30537, + "âľĮðŁı»": 30538, + "aberdeen": 30539, + "musa": 30540, + "binder": 30541, + "sparkly": 30542, + "fleeing": 30543, + "instinct": 30544, + "coping": 30545, + "dominance": 30546, + "illers": 30547, + "era": 30548, + "uconn": 30549, + "looms": 30550, + "livingston": 30551, + "gali": 30552, + "hes": 30553, + "cma": 30554, + "bela": 30555, + "seley": 30556, + "monk": 30557, + "lach": 30558, + "marx": 30559, + "´": 30560, + "merica": 30561, + "womanin": 30562, + "essex": 30563, + "raina": 30564, + "jimi": 30565, + "neptune": 30566, + "zack": 30567, + "chinese": 30568, + "martins": 30569, + "chandelier": 30570, + "hern": 30571, + "withus": 30572, + "earl": 30573, + "asphalt": 30574, + "modules": 30575, + "stp": 30576, + "ulla": 30577, + "psychiatric": 30578, + "mileage": 30579, + "captivating": 30580, + "sider": 30581, + "mento": 30582, + "mort": 30583, + "trance": 30584, + "talbot": 30585, + "abby": 30586, + "ìĥ": 30587, + "âľĮðŁı¼": 30588, + "jak": 30589, + "dawn": 30590, + "turnup": 30591, + "screwed": 30592, + "feds": 30593, + "blueprint": 30594, + "ðŁĴĸðŁĴĸ": 30595, + "harsh": 30596, + "eros": 30597, + "insomnia": 30598, + "bankers": 30599, + "taemin": 30600, + "misconduct": 30601, + "humber": 30602, + "gidi": 30603, + "eduardo": 30604, + "cona": 30605, + "muscular": 30606, + "consuming": 30607, + "rash": 30608, + "donnie": 30609, + "dipped": 30610, + "collie": 30611, + "samuel": 30612, + "meltdown": 30613, + "ðŁĺįðŁĺįðŁĺį": 30614, + "mez": 30615, + "examining": 30616, + "schwartz": 30617, + "pristine": 30618, + "ðŁIJĿ": 30619, + "veit": 30620, + "fulfilling": 30621, + "anesthe": 30622, + "guesses": 30623, + "draft": 30624, + "somme": 30625, + "solid": 30626, + "pational": 30627, + "hoped": 30628, + "evolutionary": 30629, + "aller": 30630, + "entertained": 30631, + "slips": 30632, + "ludwig": 30633, + "concludes": 30634, + "sensible": 30635, + "bonnet": 30636, + "craze": 30637, + "tras": 30638, + "hazards": 30639, + "constantine": 30640, + "edics": 30641, + "startrek": 30642, + "toc": 30643, + "occupational": 30644, + "incheon": 30645, + "deepikapadukone": 30646, + "pizzas": 30647, + "newcomer": 30648, + "depart": 30649, + "oppression": 30650, + "ebony": 30651, + "fossils": 30652, + "trojan": 30653, + "elen": 30654, + "steaks": 30655, + "khou": 30656, + "positioning": 30657, + "ugby": 30658, + "redcross": 30659, + "akh": 30660, + "dolce": 30661, + "usmnt": 30662, + "ppen": 30663, + "dilig": 30664, + "mavs": 30665, + "caller": 30666, + "costello": 30667, + "âĽĦ": 30668, + "dyn": 30669, + "things": 30670, + "rhinos": 30671, + "axi": 30672, + "sarkar": 30673, + "convocation": 30674, + "atters": 30675, + "ssss": 30676, + "fungus": 30677, + "eugen": 30678, + "russo": 30679, + "squat": 30680, + "wsb": 30681, + "elion": 30682, + "williamsburg": 30683, + "soff": 30684, + "deficiency": 30685, + "bearer": 30686, + "okin": 30687, + "keystone": 30688, + "twain": 30689, + "calming": 30690, + "breakable": 30691, + "wares": 30692, + "horseracing": 30693, + "combs": 30694, + "bunting": 30695, + "uit": 30696, + "tland": 30697, + "ðŁĴĻðŁĴĻðŁĴĻ": 30698, + "gastron": 30699, + "sabot": 30700, + "ickers": 30701, + "commissioners": 30702, + "senate": 30703, + "iiot": 30704, + "athena": 30705, + "nitrogen": 30706, + "antony": 30707, + "erotic": 30708, + "dialo": 30709, + "missou": 30710, + "hypocr": 30711, + "âľĪ": 30712, + "kaepernick": 30713, + "canv": 30714, + "droo": 30715, + "cleveland": 30716, + "osh": 30717, + "monsta": 30718, + "stefano": 30719, + "^)": 30720, + "shul": 30721, + "poison": 30722, + "hae": 30723, + "commercials": 30724, + "maul": 30725, + "nitro": 30726, + "coworker": 30727, + "aloe": 30728, + "vapor": 30729, + "tents": 30730, + "russian": 30731, + "quid": 30732, + "questionable": 30733, + "midget": 30734, + "poker": 30735, + "girlfriends": 30736, + "sinthe": 30737, + "eritrea": 30738, + "tenure": 30739, + "deposits": 30740, + "buckeyes": 30741, + "spotter": 30742, + "theodore": 30743, + "trinity": 30744, + "joaquin": 30745, + "ucci": 30746, + "followthe": 30747, + "cafc": 30748, + "mpa": 30749, + "ðŁIJ»": 30750, + "plotting": 30751, + "domino": 30752, + "taek": 30753, + "sionally": 30754, + "dicaprio": 30755, + "pap": 30756, + "carmel": 30757, + "iger": 30758, + "btcc": 30759, + "bethle": 30760, + "wwwbigbaldhead": 30761, + "foodie": 30762, + "baghdad": 30763, + "masonry": 30764, + "offended": 30765, + "à·": 30766, + "à¸ģ": 30767, + "scro": 30768, + "verses": 30769, + "orient": 30770, + "arches": 30771, + "piyu": 30772, + "knowyour": 30773, + "gree": 30774, + "takers": 30775, + "guard": 30776, + "dishon": 30777, + "bucketlist": 30778, + "bhafc": 30779, + "wardly": 30780, + "ðŁİīðŁİĬ": 30781, + "leighton": 30782, + "pew": 30783, + "stray": 30784, + "assaulted": 30785, + "inhal": 30786, + "lyfe": 30787, + "amarketing": 30788, + "lx": 30789, + "katz": 30790, + "ubuntu": 30791, + "meo": 30792, + "cartoonist": 30793, + "turnover": 30794, + "miz": 30795, + "dislike": 30796, + "mullen": 30797, + "mof": 30798, + "bland": 30799, + "hides": 30800, + "emerges": 30801, + "chorizo": 30802, + "trustee": 30803, + "mahog": 30804, + "lansing": 30805, + "paralympic": 30806, + "faint": 30807, + "fauna": 30808, + "chal": 30809, + "snar": 30810, + "cath": 30811, + "benton": 30812, + "castillo": 30813, + "slippery": 30814, + "apricot": 30815, + "oecd": 30816, + "baro": 30817, + "lz": 30818, + "heming": 30819, + "clowns": 30820, + "coworkers": 30821, + "peruvian": 30822, + "commuters": 30823, + "yell": 30824, + "ðŁļ´": 30825, + "undering": 30826, + "vj": 30827, + "ttp": 30828, + "flipk": 30829, + "wana": 30830, + "socent": 30831, + "ĤâĸĤâĸ": 30832, + "à¤Ĥ": 30833, + "oosa": 30834, + "jagger": 30835, + "dism": 30836, + "eless": 30837, + "dham": 30838, + "calif": 30839, + "aofficial": 30840, + "eclip": 30841, + "harrogate": 30842, + "grapp": 30843, + "comrade": 30844, + "ntr": 30845, + "concentrate": 30846, + "thighs": 30847, + "bitcoin": 30848, + "belarus": 30849, + "ëĵ": 30850, + "enduring": 30851, + "nowwatching": 30852, + "industrial": 30853, + "pip": 30854, + "aron": 30855, + "arat": 30856, + "®": 30857, + "whitby": 30858, + "ooooooo": 30859, + "saree": 30860, + "ticals": 30861, + "misleading": 30862, + "yoon": 30863, + "years": 30864, + "sleigh": 30865, + "romanian": 30866, + "scissors": 30867, + "vampires": 30868, + "acup": 30869, + "abba": 30870, + "thweeksary": 30871, + "centri": 30872, + "flye": 30873, + "uo": 30874, + "cbi": 30875, + "buena": 30876, + "sind": 30877, + "marino": 30878, + "burr": 30879, + "rebuilding": 30880, + "ल": 30881, + "anniversaire": 30882, + "acca": 30883, + "ðŁĴĢðŁĴĢ": 30884, + "getting": 30885, + "tulips": 30886, + "wolfpack": 30887, + "âľįï¸ı": 30888, + "morethan": 30889, + "takin": 30890, + "ð٤ĺðŁı»": 30891, + "ube": 30892, + "monic": 30893, + "doubts": 30894, + "mower": 30895, + "cobalt": 30896, + "donne": 30897, + "speculation": 30898, + "arguably": 30899, + "kaku": 30900, + "https": 30901, + "prosecution": 30902, + "dinah": 30903, + "stamatic": 30904, + "disclosed": 30905, + "beverly": 30906, + "flwx": 30907, + "crabs": 30908, + "extraordinaire": 30909, + "warmest": 30910, + "imperi": 30911, + "ologists": 30912, + "traces": 30913, + "parc": 30914, + "lakeside": 30915, + "amr": 30916, + "teri": 30917, + "hourly": 30918, + "domination": 30919, + "arrow": 30920, + "shrewsbury": 30921, + "ancestry": 30922, + "wrangler": 30923, + "triggered": 30924, + "pensac": 30925, + "rooster": 30926, + "survives": 30927, + "aon": 30928, + "boko": 30929, + "valor": 30930, + "loveis": 30931, + "lag": 30932, + "pey": 30933, + "focal": 30934, + "outlaws": 30935, + "blanc": 30936, + "articho": 30937, + "wits": 30938, + "marshall": 30939, + "diego": 30940, + "supportsmall": 30941, + "uca": 30942, + "sah": 30943, + "jeet": 30944, + "synago": 30945, + "governing": 30946, + "ðŁĴ¬": 30947, + "salads": 30948, + "create": 30949, + "miriam": 30950, + "censored": 30951, + "amide": 30952, + "nou": 30953, + "zeta": 30954, + "allegiance": 30955, + "*)": 30956, + "blm": 30957, + "rican": 30958, + "pastors": 30959, + "olympus": 30960, + "bloc": 30961, + "whirl": 30962, + "starry": 30963, + "prone": 30964, + "yk": 30965, + "pne": 30966, + "congratulating": 30967, + "bev": 30968, + "sober": 30969, + "loveisland": 30970, + "sair": 30971, + "aning": 30972, + "tutorials": 30973, + "qe": 30974, + "lund": 30975, + "inist": 30976, + "clever": 30977, + "taxpayer": 30978, + "aliz": 30979, + "wrench": 30980, + "ddling": 30981, + "capri": 30982, + "hpa": 30983, + "ðŁı»âĢįâĻĤï¸ı": 30984, + "naj": 30985, + "oj": 30986, + "futuristic": 30987, + "jellyfish": 30988, + "ðŁĶ¥ðŁĶ¥ðŁĶ¥ðŁĶ¥": 30989, + "celery": 30990, + "plank": 30991, + "fila": 30992, + "neme": 30993, + "unhealthy": 30994, + "lections": 30995, + "ðŁ§¡": 30996, + "ritchie": 30997, + "nws": 30998, + "mikha": 30999, + "wonderwoman": 31000, + "âĢİ": 31001, + "hipstamatic": 31002, + "kag": 31003, + "ðŁĴľðŁĴľðŁĴľ": 31004, + "poultry": 31005, + "mow": 31006, + "words": 31007, + "loff": 31008, + "ðŁ¤£ðŁ¤£": 31009, + "relatable": 31010, + "remixes": 31011, + "kenyatta": 31012, + "kem": 31013, + "resigned": 31014, + "fod": 31015, + "straigh": 31016, + "jlo": 31017, + "hutch": 31018, + "boxers": 31019, + "colleen": 31020, + "mags": 31021, + "instructional": 31022, + "kol": 31023, + "attracts": 31024, + "prag": 31025, + "accountant": 31026, + "goggles": 31027, + "bru": 31028, + "thole": 31029, + "marrow": 31030, + "leuke": 31031, + "octo": 31032, + "ponds": 31033, + "bubbly": 31034, + "heist": 31035, + "ìĹij": 31036, + "imp": 31037, + "ahar": 31038, + "haunt": 31039, + "hallmark": 31040, + "psych": 31041, + "kkkkkkkk": 31042, + "columb": 31043, + "jumpsuit": 31044, + "costco": 31045, + "sidelines": 31046, + "aggies": 31047, + "overturned": 31048, + "nib": 31049, + "keychain": 31050, + "fuk": 31051, + "faf": 31052, + "miam": 31053, + "assistants": 31054, + "cycled": 31055, + "rider": 31056, + "dammit": 31057, + "redwings": 31058, + "mages": 31059, + "kins": 31060, + "ìĤ": 31061, + "hod": 31062, + "sont": 31063, + "caroline": 31064, + "\"'": 31065, + "cule": 31066, + "braid": 31067, + "felony": 31068, + "arities": 31069, + "rutherford": 31070, + "depiction": 31071, + "isabelle": 31072, + "roach": 31073, + "kday": 31074, + "fifthharmony": 31075, + "emy": 31076, + "ligam": 31077, + "barista": 31078, + "albuquerque": 31079, + "gross": 31080, + "ðŁįº": 31081, + "ooks": 31082, + "ðŁij¼": 31083, + "duncan": 31084, + "tryin": 31085, + "jags": 31086, + "gould": 31087, + "litho": 31088, + "âģ£": 31089, + "аÐ": 31090, + "sammy": 31091, + "tung": 31092, + "casser": 31093, + "apolo": 31094, + "aaaaa": 31095, + "mang": 31096, + "asics": 31097, + "shen": 31098, + "pye": 31099, + "turbul": 31100, + "ssp": 31101, + "saintsfc": 31102, + "onlin": 31103, + "nanny": 31104, + "hester": 31105, + "doz": 31106, + "à¸Ķ": 31107, + "thread": 31108, + "rents": 31109, + "khand": 31110, + "ðŁĴªðŁı½": 31111, + "unconditional": 31112, + "robson": 31113, + "carre": 31114, + "phon": 31115, + "sacrificed": 31116, + "£": 31117, + "autos": 31118, + "parker": 31119, + "oca": 31120, + "login": 31121, + "keegan": 31122, + "hardcover": 31123, + "doughnuts": 31124, + "ðŁĮİ": 31125, + "spitfire": 31126, + "refreshments": 31127, + "saskatoon": 31128, + "commodore": 31129, + "jf": 31130, + "rubber": 31131, + "halamadrid": 31132, + "childcare": 31133, + "strada": 31134, + "iom": 31135, + "rik": 31136, + "dakar": 31137, + "thermom": 31138, + "cropped": 31139, + "garu": 31140, + "alik": 31141, + "veni": 31142, + "ift": 31143, + "sika": 31144, + "rituals": 31145, + "zul": 31146, + "ech": 31147, + "©": 31148, + "sudan": 31149, + "lland": 31150, + "ime": 31151, + "docker": 31152, + "ì¤": 31153, + "feared": 31154, + "fao": 31155, + "walter": 31156, + "nog": 31157, + "mutuals": 31158, + "lh": 31159, + "align": 31160, + "monia": 31161, + "conceptart": 31162, + "ðŁĻıðŁı¼": 31163, + "scoe": 31164, + "competence": 31165, + "swine": 31166, + "lyme": 31167, + "launch": 31168, + "greener": 31169, + "abstractart": 31170, + "inquis": 31171, + "granada": 31172, + "gaelic": 31173, + "fluff": 31174, + "dbacks": 31175, + "graveyard": 31176, + "babe": 31177, + "academic": 31178, + "adventurous": 31179, + "johann": 31180, + "~!": 31181, + "bibi": 31182, + "|#": 31183, + "plings": 31184, + "getty": 31185, + "asb": 31186, + "âĿ¤ï¸ı@": 31187, + "staff": 31188, + "religions": 31189, + "bangor": 31190, + "worldbookday": 31191, + "megh": 31192, + "devin": 31193, + "ashore": 31194, + "meridian": 31195, + "github": 31196, + "quiz": 31197, + "allstars": 31198, + "bestest": 31199, + "irresi": 31200, + "acker": 31201, + "dote": 31202, + "warrington": 31203, + "polly": 31204, + "neworleans": 31205, + "crou": 31206, + "wigs": 31207, + "chey": 31208, + "smithsonian": 31209, + "lasag": 31210, + "detour": 31211, + "boris": 31212, + "straps": 31213, + "mariah": 31214, + "intentionally": 31215, + "koh": 31216, + "ðŁį¸": 31217, + "ssian": 31218, + "marissa": 31219, + "coral": 31220, + "episcopal": 31221, + "casualty": 31222, + "tomo": 31223, + "supplychain": 31224, + "samp": 31225, + "ongo": 31226, + "roo": 31227, + "caviar": 31228, + "pfw": 31229, + "claudio": 31230, + "buffalo": 31231, + "sations": 31232, + "matty": 31233, + "snapback": 31234, + "lds": 31235, + "alarms": 31236, + "matte": 31237, + "âĺĶï¸ı": 31238, + "conditioner": 31239, + "dors": 31240, + "hex": 31241, + "fizz": 31242, + "astri": 31243, + "sussex": 31244, + "security": 31245, + "qaeda": 31246, + "allstar": 31247, + "cocacola": 31248, + "asone": 31249, + "clicks": 31250, + "scans": 31251, + "mute": 31252, + "heavier": 31253, + "ðŁİ§": 31254, + "âĺŀ": 31255, + "lvl": 31256, + "bookboost": 31257, + "youtube": 31258, + "flashes": 31259, + "fjor": 31260, + "csu": 31261, + "explode": 31262, + "dodge": 31263, + "cairn": 31264, + "gonzales": 31265, + "thill": 31266, + "pelle": 31267, + "hartley": 31268, + "renewable": 31269, + "retin": 31270, + "estre": 31271, + "costarica": 31272, + "shipyard": 31273, + "ncfc": 31274, + "priya": 31275, + "aghan": 31276, + "anath": 31277, + "plugin": 31278, + "corey": 31279, + "rebound": 31280, + "oru": 31281, + "katrin": 31282, + "hormone": 31283, + "gim": 31284, + "mahindra": 31285, + "ssus": 31286, + "parkland": 31287, + "harper": 31288, + "fantastic": 31289, + "inferno": 31290, + "epilo": 31291, + "wrestling": 31292, + "fect": 31293, + "cit": 31294, + "acoun": 31295, + "tossed": 31296, + "monumental": 31297, + "chartered": 31298, + "bust": 31299, + "petra": 31300, + "âĮļ": 31301, + "wildflowerhour": 31302, + "sweaters": 31303, + "*.": 31304, + "bler": 31305, + "atech": 31306, + "gowan": 31307, + "demographic": 31308, + "bral": 31309, + "suicide": 31310, + "renovations": 31311, + "vuel": 31312, + "sinister": 31313, + "armani": 31314, + "misogy": 31315, + "pharrell": 31316, + "naps": 31317, + "uniting": 31318, + "crusaders": 31319, + "corgi": 31320, + "insured": 31321, + "thani": 31322, + "noor": 31323, + "gq": 31324, + "dada": 31325, + "bicycles": 31326, + "snuggle": 31327, + "schan": 31328, + "tenberg": 31329, + "ssal": 31330, + "femme": 31331, + "boil": 31332, + "½ï¸ı": 31333, + "reap": 31334, + "occurring": 31335, + "hussein": 31336, + "divid": 31337, + "stoke": 31338, + "shalom": 31339, + "naia": 31340, + "olic": 31341, + "frustrating": 31342, + "Ùĩ": 31343, + "igs": 31344, + "grover": 31345, + "scenarios": 31346, + "nds": 31347, + "brutality": 31348, + "medalli": 31349, + "buon": 31350, + "sass": 31351, + "skateboarding": 31352, + "onyx": 31353, + "lorry": 31354, + "nyu": 31355, + "gautam": 31356, + "mmings": 31357, + "gug": 31358, + "endi": 31359, + "lothian": 31360, + "commando": 31361, + "chalk": 31362, + "phora": 31363, + "assessing": 31364, + "tigh": 31365, + "crunchy": 31366, + "aday": 31367, + "isl": 31368, + "ciara": 31369, + "pilgrims": 31370, + "kamal": 31371, + "pto": 31372, + "britanni": 31373, + "tani": 31374, + "smc": 31375, + "lure": 31376, + "appstore": 31377, + "aby": 31378, + "golfing": 31379, + "clc": 31380, + "fau": 31381, + "anas": 31382, + "shutting": 31383, + "regulated": 31384, + "carnage": 31385, + "scowboys": 31386, + "allenge": 31387, + "cma": 31388, + "humboldt": 31389, + "relle": 31390, + "kumb": 31391, + "heri": 31392, + "refinery": 31393, + "soundcheck": 31394, + "dwayne": 31395, + "bosnia": 31396, + "isp": 31397, + "thealth": 31398, + "anniv": 31399, + "relevance": 31400, + "mya": 31401, + "baggage": 31402, + "dread": 31403, + "sbc": 31404, + "thed": 31405, + "buh": 31406, + "hijab": 31407, + "loid": 31408, + "kew": 31409, + "cte": 31410, + "respect": 31411, + "lovelies": 31412, + "cubes": 31413, + "celebrate": 31414, + "dirt": 31415, + "savers": 31416, + "_,": 31417, + "garment": 31418, + "pulitzer": 31419, + "masjid": 31420, + "beatport": 31421, + "alarts": 31422, + "encryption": 31423, + "sner": 31424, + "pleads": 31425, + "foundry": 31426, + "symmetry": 31427, + "rumi": 31428, + "birthplace": 31429, + "scallops": 31430, + "supple": 31431, + "pivotal": 31432, + "tati": 31433, + "node": 31434, + "sod": 31435, + "proxim": 31436, + "trics": 31437, + "coldest": 31438, + "brent": 31439, + "mandu": 31440, + "clair": 31441, + "each": 31442, + "andalu": 31443, + "hiddleston": 31444, + "ðŁIJº": 31445, + "melts": 31446, + "vance": 31447, + "pinn": 31448, + "sements": 31449, + "screened": 31450, + "sachs": 31451, + "obl": 31452, + "icha": 31453, + "âĺĺï¸ı": 31454, + "schoolers": 31455, + "healed": 31456, + "logged": 31457, + "ð٤ĺðŁı¼": 31458, + "icus": 31459, + "boredom": 31460, + "bish": 31461, + "bffs": 31462, + "talking": 31463, + "suresh": 31464, + "hookem": 31465, + "deon": 31466, + "defl": 31467, + "eileen": 31468, + "ðŁįķ": 31469, + "womenintech": 31470, + "risotto": 31471, + "ranger": 31472, + "advertise": 31473, + "à¸ģà¸": 31474, + "telly": 31475, + "lago": 31476, + "dartmoor": 31477, + "dong": 31478, + "skates": 31479, + "logo": 31480, + "unner": 31481, + "mailbox": 31482, + "masala": 31483, + "looooo": 31484, + "amethyst": 31485, + "chewing": 31486, + "cbb": 31487, + "australians": 31488, + "rcmp": 31489, + "gameart": 31490, + "#...": 31491, + "korn": 31492, + "extremism": 31493, + "fruitful": 31494, + "ancient": 31495, + "pubg": 31496, + "polite": 31497, + "whit": 31498, + "murals": 31499, + "mgr": 31500, + "lineman": 31501, + "davao": 31502, + "stems": 31503, + "tennis": 31504, + "avage": 31505, + "tupac": 31506, + "gigantic": 31507, + "hsbc": 31508, + "autobiography": 31509, + "upthe": 31510, + "ีà¹Ī": 31511, + "regal": 31512, + "figuring": 31513, + "kul": 31514, + "missy": 31515, + "hoop": 31516, + "gras": 31517, + "forums": 31518, + "backlash": 31519, + "abducted": 31520, + "pnw": 31521, + "minic": 31522, + "butt": 31523, + "bottoms": 31524, + "aton": 31525, + "veng": 31526, + "ðŁĮı": 31527, + "delaney": 31528, + "prabhu": 31529, + "fanclub": 31530, + "overhaul": 31531, + "healthye": 31532, + "syno": 31533, + "aaf": 31534, + "renamed": 31535, + "kimi": 31536, + "uncle": 31537, + "mancity": 31538, + "seu": 31539, + "quanti": 31540, + "esteem": 31541, + "umin": 31542, + "enzo": 31543, + "melvin": 31544, + "undergo": 31545, + "jhar": 31546, + "farah": 31547, + "coasters": 31548, + "humphrey": 31549, + "mhz": 31550, + "childrens": 31551, + "^.": 31552, + "dhi": 31553, + "disruptive": 31554, + "integrating": 31555, + "rnb": 31556, + "oversized": 31557, + "aide": 31558, + "neau": 31559, + "documentation": 31560, + "ðŁijĢðŁijĢ": 31561, + "palo": 31562, + "hearth": 31563, + "riyad": 31564, + "punctu": 31565, + "abcnews": 31566, + "secures": 31567, + "boyband": 31568, + "birch": 31569, + "juco": 31570, + "traff": 31571, + "legislators": 31572, + "baya": 31573, + "ãĤ¯": 31574, + "noises": 31575, + "collects": 31576, + "swarm": 31577, + "kner": 31578, + "bishops": 31579, + "sturgeon": 31580, + "snapping": 31581, + "mol": 31582, + "freaky": 31583, + "chairperson": 31584, + "trop": 31585, + "lynch": 31586, + "carcin": 31587, + "artsy": 31588, + "esto": 31589, + "chai": 31590, + "flur": 31591, + "invali": 31592, + "sausages": 31593, + "imel": 31594, + "jor": 31595, + "funfact": 31596, + "witter": 31597, + "punished": 31598, + "acons": 31599, + "hya": 31600, + "reversi": 31601, + "emc": 31602, + "diffu": 31603, + "zx": 31604, + "spaw": 31605, + "clad": 31606, + "dmit": 31607, + "holland": 31608, + "fresco": 31609, + "payroll": 31610, + "abundant": 31611, + "stuffing": 31612, + "moro": 31613, + "cny": 31614, + "boycott": 31615, + "wendy": 31616, + "eleven": 31617, + "provoc": 31618, + "pilot": 31619, + "trx": 31620, + "bead": 31621, + "climateaction": 31622, + "rion": 31623, + "assie": 31624, + "ìĸ": 31625, + "osm": 31626, + "islamic": 31627, + "hoar": 31628, + "goodreads": 31629, + "alici": 31630, + "afternoons": 31631, + "spokesman": 31632, + "jolie": 31633, + "itas": 31634, + "mascara": 31635, + "âĻ©âĻ«": 31636, + "prevail": 31637, + "beetroot": 31638, + "lujah": 31639, + "kli": 31640, + "dodger": 31641, + "»": 31642, + "rule": 31643, + "ln": 31644, + "scream": 31645, + "hobart": 31646, + "colbert": 31647, + "rtc": 31648, + "erm": 31649, + "patro": 31650, + "quoting": 31651, + "slive": 31652, + "quest": 31653, + "nonfiction": 31654, + "seminary": 31655, + "prosecutors": 31656, + "vest": 31657, + "expressway": 31658, + "gge": 31659, + "nautical": 31660, + "etf": 31661, + "ðŁİīðŁİĬ": 31662, + "duration": 31663, + "chaired": 31664, + "thefilm": 31665, + "fabio": 31666, + "sheh": 31667, + "cano": 31668, + "ðŁĴªðŁı»": 31669, + "withdraw": 31670, + "!:)": 31671, + "corpus": 31672, + "phenom": 31673, + "yelp": 31674, + "lawn": 31675, + "entom": 31676, + "snapper": 31677, + "butte": 31678, + "pinball": 31679, + "proxy": 31680, + "libre": 31681, + "allevi": 31682, + "nada": 31683, + "gabriel": 31684, + "fowl": 31685, + "eureka": 31686, + "daphne": 31687, + "tunes": 31688, + "punched": 31689, + "whore": 31690, + "jog": 31691, + "rential": 31692, + "manners": 31693, + "ope": 31694, + "whufc": 31695, + "guth": 31696, + "revolt": 31697, + "sneaker": 31698, + "philharmonic": 31699, + "hoste": 31700, + "sovereignty": 31701, + "ðŁĻıðŁĻıðŁĻı": 31702, + "fishing": 31703, + "sciart": 31704, + "feta": 31705, + "ipp": 31706, + "dumping": 31707, + "kelown": 31708, + "giri": 31709, + "digits": 31710, + "salu": 31711, + "sanjay": 31712, + "tweeters": 31713, + "spas": 31714, + "colchester": 31715, + "scab": 31716, + "madd": 31717, + "à¹Ħà¸": 31718, + "Äĩ": 31719, + "geddon": 31720, + "marchfor": 31721, + "dop": 31722, + "maureen": 31723, + "unplugged": 31724, + "dido": 31725, + "fashionblogger": 31726, + "upa": 31727, + "mexic": 31728, + "tary": 31729, + "polye": 31730, + "jameson": 31731, + "vt": 31732, + "grinder": 31733, + "maddy": 31734, + "consultancy": 31735, + "¬ë": 31736, + "leagueoflegends": 31737, + "accents": 31738, + "umni": 31739, + "janeiro": 31740, + "tuss": 31741, + "hens": 31742, + "amplifier": 31743, + "toshi": 31744, + "prettier": 31745, + "prevents": 31746, + "newtown": 31747, + "redwood": 31748, + "vantage": 31749, + "ballard": 31750, + "artof": 31751, + "ashe": 31752, + "asion": 31753, + "lacey": 31754, + "apat": 31755, + "grove": 31756, + "à¸Ħ": 31757, + "rwand": 31758, + "realtors": 31759, + "traitor": 31760, + "bedding": 31761, + "ör": 31762, + "zion": 31763, + "flashing": 31764, + "campan": 31765, + "boomer": 31766, + "secretariat": 31767, + "abol": 31768, + "litigation": 31769, + "contamination": 31770, + "sedly": 31771, + "shredded": 31772, + "infor": 31773, + "doherty": 31774, + "benchmark": 31775, + "roche": 31776, + "skateboard": 31777, + "shovel": 31778, + "izz": 31779, + "topper": 31780, + "oster": 31781, + "labyrin": 31782, + "autum": 31783, + "kong": 31784, + "hummus": 31785, + "viz": 31786, + "technews": 31787, + "klaus": 31788, + "amusing": 31789, + "socialmediamarketing": 31790, + "ides": 31791, + "castell": 31792, + "stee": 31793, + "underestimate": 31794, + "calab": 31795, + "paign": 31796, + "billing": 31797, + "unanimously": 31798, + "gmb": 31799, + "flyfishing": 31800, + "hathaway": 31801, + "commercial": 31802, + "colouring": 31803, + "skulls": 31804, + "pivot": 31805, + "tep": 31806, + "tbc": 31807, + "motorway": 31808, + "xpress": 31809, + "constructive": 31810, + "puk": 31811, + "underlying": 31812, + "kirsten": 31813, + "maniac": 31814, + "chao": 31815, + "sema": 31816, + "chiffon": 31817, + "ðŁijĮðŁı»": 31818, + "verona": 31819, + "komo": 31820, + "standoff": 31821, + "wiped": 31822, + "cated": 31823, + "blair": 31824, + "workin": 31825, + "msc": 31826, + "bethlehem": 31827, + "swipe": 31828, + "unexpec": 31829, + "pees": 31830, + "petri": 31831, + "origami": 31832, + "ðŁijħ": 31833, + "mexico": 31834, + "flavor": 31835, + "rudd": 31836, + "cannabis": 31837, + "maru": 31838, + "riddle": 31839, + "worshi": 31840, + "silon": 31841, + "schat": 31842, + "apse": 31843, + "tanger": 31844, + "bious": 31845, + "eer": 31846, + "questioned": 31847, + "ozar": 31848, + "dank": 31849, + "anglesey": 31850, + "charan": 31851, + "baku": 31852, + "competen": 31853, + "repri": 31854, + "batter": 31855, + "saxon": 31856, + "calves": 31857, + "lengths": 31858, + "$$$": 31859, + "âŀ¡ï¸ı": 31860, + "immersion": 31861, + "gaunt": 31862, + "carry": 31863, + "cyto": 31864, + "banda": 31865, + "shutt": 31866, + "experience": 31867, + "elgin": 31868, + "mousse": 31869, + "taz": 31870, + "êµ": 31871, + "incorrect": 31872, + "enz": 31873, + "bham": 31874, + "moron": 31875, + "sover": 31876, + "arun": 31877, + "tipped": 31878, + "lable": 31879, + "dearly": 31880, + "bautista": 31881, + "íĻ": 31882, + "mortal": 31883, + "woop": 31884, + "dtla": 31885, + "shocks": 31886, + "davos": 31887, + "ðŁĵĿ": 31888, + "swimwear": 31889, + "herman": 31890, + "ðŁijĩðŁijĩ": 31891, + "zir": 31892, + "neglected": 31893, + "graced": 31894, + "campuses": 31895, + "avs": 31896, + "arora": 31897, + "swachhb": 31898, + "livepd": 31899, + "accra": 31900, + "enquiries": 31901, + "shooters": 31902, + "kurt": 31903, + "vancouver": 31904, + "bradley": 31905, + "garda": 31906, + "gü": 31907, + "olla": 31908, + "attracting": 31909, + "upton": 31910, + "newin": 31911, + "lumia": 31912, + "furnace": 31913, + "evers": 31914, + "eon": 31915, + "swa": 31916, + "rookies": 31917, + "aoc": 31918, + "vss": 31919, + "brisket": 31920, + "torch": 31921, + "yoda": 31922, + "heartland": 31923, + "taco": 31924, + "phony": 31925, + "foodbank": 31926, + "abbey": 31927, + "babylon": 31928, + "uy": 31929, + "greate": 31930, + "expresses": 31931, + "dandy": 31932, + "scapes": 31933, + "survivor": 31934, + "rond": 31935, + "eci": 31936, + "havin": 31937, + "abel": 31938, + "childish": 31939, + "torque": 31940, + "wavy": 31941, + "urself": 31942, + "kanyewest": 31943, + "yearof": 31944, + "alestine": 31945, + "obrien": 31946, + "alfon": 31947, + "skag": 31948, + "korean": 31949, + "anchorage": 31950, + "valeri": 31951, + "dew": 31952, + "ðŁİ¨": 31953, + "landslide": 31954, + "carole": 31955, + "christen": 31956, + "gophers": 31957, + "afi": 31958, + "priyanka": 31959, + "qq": 31960, + "powerof": 31961, + "itte": 31962, + "pcso": 31963, + "twol": 31964, + "pry": 31965, + "intellectu": 31966, + "guerrero": 31967, + "piles": 31968, + "wishlist": 31969, + "wren": 31970, + "timetable": 31971, + "ëı": 31972, + "prodigy": 31973, + "gibbons": 31974, + "./": 31975, + "neur": 31976, + "anzac": 31977, + "murray": 31978, + "viest": 31979, + "plaster": 31980, + "lair": 31981, + "artgallery": 31982, + "intercontinental": 31983, + "gbr": 31984, + "bellator": 31985, + "namjoon": 31986, + "mammals": 31987, + "amel": 31988, + "yaw": 31989, + "sarasota": 31990, + "camar": 31991, + "budding": 31992, + "summari": 31993, + "acosta": 31994, + "lash": 31995, + "eyou": 31996, + "postgraduate": 31997, + "instructors": 31998, + "tig": 31999, + "constant": 32000, + "werewolf": 32001, + "icos": 32002, + "clas": 32003, + "glenn": 32004, + "budge": 32005, + "ðŁĻĤ": 32006, + "erta": 32007, + "stains": 32008, + "persecution": 32009, + "cumbri": 32010, + "och": 32011, + "synergy": 32012, + "huang": 32013, + "scandin": 32014, + "midterms": 32015, + "commentator": 32016, + "regarded": 32017, + "perpetual": 32018, + "boiling": 32019, + "alp": 32020, + "lange": 32021, + "schle": 32022, + "faceli": 32023, + "tweeta": 32024, + "ridden": 32025, + "oktoberfest": 32026, + "charlottesville": 32027, + "iklan": 32028, + "jou": 32029, + "chatham": 32030, + "bsc": 32031, + "ðŁį¦": 32032, + "strauss": 32033, + "mellow": 32034, + "xxxx": 32035, + "happyhour": 32036, + "reactor": 32037, + "wwer": 32038, + "distraction": 32039, + "atorial": 32040, + "ðŁĴªðŁı¼": 32041, + "twinpeaks": 32042, + "fayette": 32043, + "aor": 32044, + "kok": 32045, + "broom": 32046, + "syfy": 32047, + "ouse": 32048, + "amag": 32049, + "Ø·": 32050, + "ubisoft": 32051, + "lulu": 32052, + "hallmark": 32053, + "stuart": 32054, + "itya": 32055, + "sideline": 32056, + "vengeance": 32057, + "relu": 32058, + "sexism": 32059, + "bouncing": 32060, + "unites": 32061, + "gustav": 32062, + "tessa": 32063, + "stump": 32064, + "proclamation": 32065, + "imax": 32066, + "dividend": 32067, + "colby": 32068, + "ðŁįİ": 32069, + "playwright": 32070, + "unsafe": 32071, + "cosmo": 32072, + "ðŁĩ²ðŁĩ½": 32073, + "cupboard": 32074, + "constituents": 32075, + "anglia": 32076, + "rampage": 32077, + "ðŁĺįðŁĺįðŁĺįðŁĺįðŁĺį": 32078, + "thanked": 32079, + "takeaways": 32080, + "shroff": 32081, + "debat": 32082, + "khur": 32083, + "conducts": 32084, + "formats": 32085, + "à©": 32086, + "portage": 32087, + "graphers": 32088, + "uten": 32089, + "prem": 32090, + "moines": 32091, + "condemns": 32092, + "sous": 32093, + "lps": 32094, + "fcs": 32095, + "dealership": 32096, + "leukemia": 32097, + "bureau": 32098, + "skid": 32099, + "guardiola": 32100, + "caster": 32101, + "third": 32102, + "avoided": 32103, + "encyclo": 32104, + "csr": 32105, + "vixx": 32106, + "analyzing": 32107, + "shear": 32108, + "duluth": 32109, + "shapiro": 32110, + "chanting": 32111, + "stresses": 32112, + "asbe": 32113, + "militia": 32114, + "ãĥª": 32115, + "collin": 32116, + "arsene": 32117, + "suresh": 32118, + "teachings": 32119, + "yixing": 32120, + "shill": 32121, + "nudes": 32122, + "svu": 32123, + "clearwater": 32124, + "warped": 32125, + "prolife": 32126, + "artistson": 32127, + "itu": 32128, + "versailles": 32129, + "galaxy": 32130, + "axel": 32131, + "springst": 32132, + "cala": 32133, + "huhu": 32134, + "scu": 32135, + "commitments": 32136, + "exeter": 32137, + "poignant": 32138, + "motion": 32139, + "conservatory": 32140, + "rowdy": 32141, + "recalled": 32142, + "musk": 32143, + "embelli": 32144, + "sothe": 32145, + "âĺĢ": 32146, + "stopper": 32147, + "schild": 32148, + "tope": 32149, + "elmo": 32150, + "ziel": 32151, + "jom": 32152, + "barnsley": 32153, + "snowden": 32154, + "ontour": 32155, + "journey": 32156, + "hillsborough": 32157, + "parole": 32158, + "wts": 32159, + "moving": 32160, + "agility": 32161, + "tivo": 32162, + "ffers": 32163, + "kindleunlimited": 32164, + "gwen": 32165, + "annan": 32166, + "ahmad": 32167, + "textured": 32168, + "hepatitis": 32169, + "dram": 32170, + "insiders": 32171, + "tissues": 32172, + "ãĥĦ": 32173, + "fcbarcelona": 32174, + "cratic": 32175, + "naacp": 32176, + "pecan": 32177, + "fgm": 32178, + "customize": 32179, + "concert": 32180, + "gsm": 32181, + "peg": 32182, + "pone": 32183, + "justintrudeau": 32184, + "supercars": 32185, + "happyholidays": 32186, + "bular": 32187, + "adox": 32188, + "laptops": 32189, + "digitalhealth": 32190, + "destination": 32191, + "gradually": 32192, + "áĥ¦": 32193, + "poppy": 32194, + "ssl": 32195, + "inhibit": 32196, + "starlight": 32197, + "offro": 32198, + "gloomy": 32199, + "xper": 32200, + "halder": 32201, + "implants": 32202, + "leto": 32203, + "hassel": 32204, + "aas": 32205, + "untold": 32206, + "enci": 32207, + "liberia": 32208, + "oran": 32209, + "contests": 32210, + "ilah": 32211, + "smag": 32212, + "scout": 32213, + "marianne": 32214, + "cryo": 32215, + "scheduling": 32216, + "los": 32217, + "kane": 32218, + "stuttgart": 32219, + "nese": 32220, + "lawrence": 32221, + "dain": 32222, + "photom": 32223, + "carou": 32224, + "ร": 32225, + "gwy": 32226, + "nationaldogday": 32227, + "roasting": 32228, + "bandcamp": 32229, + "kentucky": 32230, + "stretches": 32231, + "kerel": 32232, + "cashe": 32233, + "ãĤ¸": 32234, + "stax": 32235, + "transi": 32236, + "doggie": 32237, + "atric": 32238, + "halle": 32239, + "civic": 32240, + "browning": 32241, + "leinster": 32242, + "catday": 32243, + "highland": 32244, + "joyous": 32245, + "incumb": 32246, + "orlando": 32247, + "romo": 32248, + "colton": 32249, + "delta": 32250, + "carab": 32251, + "rotc": 32252, + "asteroid": 32253, + "goosebumps": 32254, + "mology": 32255, + "yoko": 32256, + "ands": 32257, + "tomorrows": 32258, + "redcarpet": 32259, + "smp": 32260, + "casio": 32261, + "ðŁ¤£ðŁ¤£ðŁ¤£": 32262, + "seau": 32263, + "rejection": 32264, + "rotating": 32265, + "bipartisan": 32266, + "thun": 32267, + "mati": 32268, + "boni": 32269, + "oll": 32270, + "energye": 32271, + "doit": 32272, + "lj": 32273, + "motherhood": 32274, + "louise": 32275, + "necklaces": 32276, + "elite": 32277, + "nix": 32278, + "lcs": 32279, + "env": 32280, + "glu": 32281, + "lesh": 32282, + "crank": 32283, + "susie": 32284, + "mclau": 32285, + "sotu": 32286, + "crowley": 32287, + "ratri": 32288, + "used": 32289, + "breton": 32290, + "alfredo": 32291, + "yeo": 32292, + "travelpics": 32293, + "tipp": 32294, + "ellison": 32295, + "saxophone": 32296, + "mered": 32297, + "heughan": 32298, + "taine": 32299, + "fes": 32300, + "viro": 32301, + "supposedly": 32302, + "ias": 32303, + "digestive": 32304, + "yle": 32305, + "lizzy": 32306, + "wildlifephotography": 32307, + "brianna": 32308, + "westfield": 32309, + "rained": 32310, + "amher": 32311, + "ðŁĺĦðŁĺĦ": 32312, + "distribute": 32313, + "bottom": 32314, + "preserving": 32315, + "oiland": 32316, + "crafty": 32317, + "descen": 32318, + "colling": 32319, + "shakespearesunday": 32320, + "rwc": 32321, + "angled": 32322, + "cian": 32323, + "tations": 32324, + "montage": 32325, + "meyers": 32326, + "francesca": 32327, + "ðŁĮ·": 32328, + "wiggins": 32329, + "sanford": 32330, + "volunteer": 32331, + "carra": 32332, + "bark": 32333, + "varied": 32334, + "plin": 32335, + "amu": 32336, + "kapil": 32337, + "rockers": 32338, + "quind": 32339, + "brane": 32340, + "inmate": 32341, + "ental": 32342, + "improvis": 32343, + "michigan": 32344, + "retweeting": 32345, + "progressing": 32346, + "mercedesbenz": 32347, + "smoker": 32348, + "physiology": 32349, + "dorado": 32350, + "wattpad": 32351, + "hwa": 32352, + "srbachchan": 32353, + "wga": 32354, + "volatility": 32355, + "hire": 32356, + "acap": 32357, + "wnba": 32358, + "heinz": 32359, + "stitches": 32360, + "kidnapping": 32361, + "burys": 32362, + "limb": 32363, + "fitters": 32364, + "thumbnail": 32365, + "tone": 32366, + "mirand": 32367, + "desirable": 32368, + "addison": 32369, + "taran": 32370, + "tamilnadu": 32371, + "spectator": 32372, + "sociology": 32373, + "amitshah": 32374, + "remotely": 32375, + "âϦ": 32376, + "hamid": 32377, + "rds": 32378, + "glee": 32379, + "smoothly": 32380, + "schro": 32381, + "erc": 32382, + "laliga": 32383, + "heals": 32384, + "usf": 32385, + "nishi": 32386, + "dhu": 32387, + "unil": 32388, + "hle": 32389, + "tromb": 32390, + "bhutan": 32391, + "pilipinas": 32392, + "seung": 32393, + "whitman": 32394, + "tey": 32395, + "mince": 32396, + "snowboarding": 32397, + "reau": 32398, + "kker": 32399, + "avo": 32400, + "zachary": 32401, + "ranveer": 32402, + "tik": 32403, + "govern": 32404, + "qual": 32405, + "becky": 32406, + "anthropology": 32407, + "atten": 32408, + "groceries": 32409, + "debit": 32410, + "warp": 32411, + "silicon": 32412, + "hawaii": 32413, + "ðŁĴħ": 32414, + "pomegranate": 32415, + "peer": 32416, + "oranges": 32417, + "peopleschoice": 32418, + "endure": 32419, + "ðŁĴĽðŁĴĽ": 32420, + "ãĤ¹ãĥ": 32421, + "acial": 32422, + "ahaha": 32423, + "stuk": 32424, + "imperial": 32425, + "blond": 32426, + "powder": 32427, + "knots": 32428, + "vince": 32429, + "woodlands": 32430, + "dena": 32431, + "watchin": 32432, + "matcha": 32433, + "mahat": 32434, + "galaxies": 32435, + "middlesbrough": 32436, + "kö": 32437, + "stree": 32438, + "rescues": 32439, + "waldo": 32440, + "leroy": 32441, + "despic": 32442, + "realities": 32443, + "tmnt": 32444, + "haq": 32445, + "uno": 32446, + "pec": 32447, + "bollywood": 32448, + "blinds": 32449, + "designthinking": 32450, + "hems": 32451, + "andhra": 32452, + "absen": 32453, + "fans": 32454, + "stech": 32455, + "shirehour": 32456, + "blaine": 32457, + "shakti": 32458, + "purely": 32459, + "ðŁıı": 32460, + "trafal": 32461, + "keynes": 32462, + "grate": 32463, + "tobias": 32464, + "spontaneous": 32465, + "saturated": 32466, + "cavalry": 32467, + "prisc": 32468, + "ðŁĺij": 32469, + "wht": 32470, + "passi": 32471, + "~~~": 32472, + "virat": 32473, + "pattinson": 32474, + "lao": 32475, + "weirdo": 32476, + "sympathy": 32477, + "juda": 32478, + "occasionally": 32479, + "credited": 32480, + "statu": 32481, + "esco": 32482, + "hilly": 32483, + "escape": 32484, + "discharge": 32485, + "seer": 32486, + "maynard": 32487, + "sudbury": 32488, + "zlat": 32489, + "oral": 32490, + "weer": 32491, + "encountered": 32492, + "smelling": 32493, + "oversight": 32494, + "ê¸": 32495, + "thatcher": 32496, + "mackay": 32497, + "youcan": 32498, + "freep": 32499, + "freedoms": 32500, + "prophecy": 32501, + "hoe": 32502, + "ishqba": 32503, + "drake": 32504, + "quits": 32505, + "pelled": 32506, + "turk": 32507, + "ovi": 32508, + "wesleyan": 32509, + "newmusic": 32510, + "legg": 32511, + "cheng": 32512, + "hilli": 32513, + "ayy": 32514, + "panties": 32515, + "adversity": 32516, + "adjac": 32517, + "vaccination": 32518, + "juke": 32519, + "gac": 32520, + "exceed": 32521, + "timesof": 32522, + "staining": 32523, + "epcot": 32524, + "vital": 32525, + "upward": 32526, + "bethesda": 32527, + "apark": 32528, + "mahi": 32529, + "campfire": 32530, + "enchanting": 32531, + "rhapso": 32532, + "hz": 32533, + "naver": 32534, + "fax": 32535, + "validation": 32536, + "acad": 32537, + "nyr": 32538, + "asym": 32539, + "coordinated": 32540, + "departed": 32541, + "allery": 32542, + "varies": 32543, + "sprite": 32544, + "chaplin": 32545, + "ssoccer": 32546, + "swat": 32547, + "bret": 32548, + "reluct": 32549, + "tunesapp": 32550, + "superstar": 32551, + "reminiscing": 32552, + "oco": 32553, + "homegrown": 32554, + "doughnut": 32555, + "uncanny": 32556, + "lapd": 32557, + "thyroid": 32558, + "!âĿ¤ï¸ı": 32559, + "botanic": 32560, + "bres": 32561, + "spade": 32562, + "iste": 32563, + "echoes": 32564, + "dulil": 32565, + "bursting": 32566, + "quiero": 32567, + "ðŁijİ": 32568, + "loyola": 32569, + "amusement": 32570, + "hails": 32571, + "sleepy": 32572, + "burglary": 32573, + "âľı": 32574, + "rogue": 32575, + "cotland": 32576, + "moors": 32577, + "lower": 32578, + "wicked": 32579, + "ðŁĶĬ": 32580, + "competiti": 32581, + "argentine": 32582, + "yvonne": 32583, + "kartikeyan": 32584, + "iliary": 32585, + "gatsby": 32586, + "precinct": 32587, + "sixty": 32588, + "naji": 32589, + "cams": 32590, + "practitioner": 32591, + "ðŁĺ³ðŁĺ³": 32592, + "pune": 32593, + "negli": 32594, + "julien": 32595, + "invaded": 32596, + "calibr": 32597, + "clam": 32598, + "dubai": 32599, + "muk": 32600, + "lantic": 32601, + "product": 32602, + "fedex": 32603, + "ï¸ı:": 32604, + "eura": 32605, + "darius": 32606, + "sling": 32607, + "virtualreality": 32608, + "homestead": 32609, + "ðŁı³ï¸ıâĢįðŁĮĪ": 32610, + "paced": 32611, + "inha": 32612, + "pulmon": 32613, + "lazy": 32614, + "premiering": 32615, + "mastered": 32616, + "inhe": 32617, + "congregation": 32618, + "bajo": 32619, + "sporting": 32620, + "newjersey": 32621, + "horny": 32622, + "lmaoo": 32623, + "lengthy": 32624, + "dut": 32625, + "yogh": 32626, + "swearing": 32627, + "philosophical": 32628, + "papua": 32629, + "inski": 32630, + "knowles": 32631, + "dyke": 32632, + "â̲": 32633, + "token": 32634, + "mcguire": 32635, + "riot": 32636, + "probability": 32637, + "mccon": 32638, + "gros": 32639, + "sumat": 32640, + "cite": 32641, + "daa": 32642, + "onda": 32643, + "maddow": 32644, + "chew": 32645, + "boardgames": 32646, + "sparked": 32647, + "reclaimed": 32648, + "adhd": 32649, + "nyse": 32650, + "imwithher": 32651, + "equinox": 32652, + "booths": 32653, + "balsamic": 32654, + "hazy": 32655, + "dorchester": 32656, + "agos": 32657, + "seaw": 32658, + "moderator": 32659, + "seriea": 32660, + "andersen": 32661, + "pilgrim": 32662, + "âŃIJâŃIJ": 32663, + "itchen": 32664, + "halli": 32665, + "xton": 32666, + "nathaniel": 32667, + "munition": 32668, + "celestial": 32669, + "gaf": 32670, + "zoom": 32671, + "markle": 32672, + "penthouse": 32673, + "cale": 32674, + "sfa": 32675, + "barking": 32676, + "tucket": 32677, + "emery": 32678, + "calorie": 32679, + "lique": 32680, + "adar": 32681, + "mcnam": 32682, + "tortilla": 32683, + "woodpecker": 32684, + "motown": 32685, + "badger": 32686, + "ayrshire": 32687, + "scramble": 32688, + "dday": 32689, + "craziest": 32690, + "perrie": 32691, + "choco": 32692, + "caste": 32693, + "iot": 32694, + "wrecked": 32695, + "selecting": 32696, + "ussr": 32697, + "graft": 32698, + "punt": 32699, + "labou": 32700, + "irst": 32701, + "baek": 32702, + "ÛĮ": 32703, + "suki": 32704, + "queu": 32705, + "achat": 32706, + "tester": 32707, + "augmented": 32708, + "wcvb": 32709, + "sinks": 32710, + "ðŁĵ»": 32711, + "rake": 32712, + "interne": 32713, + "because": 32714, + "bellevue": 32715, + "unearth": 32716, + "lighten": 32717, + "ðŁĺ£": 32718, + "turnaround": 32719, + "labeled": 32720, + "unemployed": 32721, + "twitterkurds": 32722, + "leia": 32723, + "hye": 32724, + "greater": 32725, + "ðŁIJİ": 32726, + "timed": 32727, + "ired": 32728, + "ett": 32729, + "limitations": 32730, + "cabe": 32731, + "sout": 32732, + "beech": 32733, + "annihil": 32734, + "retrac": 32735, + "yoona": 32736, + "anger": 32737, + "dennis": 32738, + "supplying": 32739, + "diz": 32740, + "\"(": 32741, + "scur": 32742, + "gunman": 32743, + "suho": 32744, + "sauvignon": 32745, + "ล": 32746, + "wiley": 32747, + "landon": 32748, + "choreography": 32749, + "prehistoric": 32750, + "ðŁıĥ": 32751, + "vargas": 32752, + "assessments": 32753, + "pinnacle": 32754, + "dii": 32755, + "chamberlain": 32756, + "ìĪ": 32757, + "vp": 32758, + "presenters": 32759, + "deutsche": 32760, + "sunshine": 32761, + "salutes": 32762, + "rone": 32763, + "busiest": 32764, + "-.-": 32765, + "motorists": 32766, + "hemisphere": 32767, + "alwx": 32768, + "psp": 32769, + "owa": 32770, + "denying": 32771, + "choc": 32772, + "gutier": 32773, + "hanuk": 32774, + "muskete": 32775, + "jaitley": 32776, + "sewage": 32777, + "tame": 32778, + "thinkers": 32779, + "shim": 32780, + "sequo": 32781, + "papar": 32782, + "middleeast": 32783, + "kwa": 32784, + "keg": 32785, + "patagonia": 32786, + "noy": 32787, + "barça": 32788, + "takeoff": 32789, + "hea": 32790, + "à¬": 32791, + "nsc": 32792, + "gdc": 32793, + "ðŁijĪ": 32794, + "moustache": 32795, + "melania": 32796, + "thra": 32797, + "â¬Ĩï¸ı": 32798, + "pierced": 32799, + "zeus": 32800, + "fonts": 32801, + "bera": 32802, + "itiner": 32803, + "qatar": 32804, + "contrary": 32805, + "ireland": 32806, + "ify": 32807, + "oulos": 32808, + "communal": 32809, + "fins": 32810, + "unpaid": 32811, + "paa": 32812, + "ðŁijĩðŁı»": 32813, + "rios": 32814, + "oup": 32815, + "filler": 32816, + "cafeteria": 32817, + "à¸Ń": 32818, + "kasi": 32819, + "caliber": 32820, + "zulu": 32821, + "vsco": 32822, + "tsford": 32823, + "dragonfly": 32824, + "smokin": 32825, + "pist": 32826, + "psychologist": 32827, + "diplomat": 32828, + "webs": 32829, + "buccane": 32830, + "ா": 32831, + "motivational": 32832, + "dune": 32833, + "bae": 32834, + "cfs": 32835, + "without": 32836, + "eron": 32837, + "iac": 32838, + "atee": 32839, + "pension": 32840, + "frazier": 32841, + "ensis": 32842, + "skis": 32843, + "parting": 32844, + "gery": 32845, + "territories": 32846, + "nachos": 32847, + "enight": 32848, + "everlasting": 32849, + "msdhoni": 32850, + "tele": 32851, + "spun": 32852, + "podi": 32853, + "sabah": 32854, + "environmentally": 32855, + "cease": 32856, + "beaumont": 32857, + "marta": 32858, + "kelvin": 32859, + "hoff": 32860, + "sunil": 32861, + "nda": 32862, + "cob": 32863, + "shale": 32864, + "reedus": 32865, + "unboxing": 32866, + "ubio": 32867, + "reopened": 32868, + "nall": 32869, + "capsules": 32870, + "marr": 32871, + "himalayas": 32872, + "sweeter": 32873, + "jaz": 32874, + "fmr": 32875, + "tweeter": 32876, + "dhaka": 32877, + "nau": 32878, + "demi": 32879, + "dfs": 32880, + "taurus": 32881, + "fading": 32882, + "itutes": 32883, + "cip": 32884, + "overflow": 32885, + "jeffrey": 32886, + "donny": 32887, + "cartunesapp": 32888, + "ðŁįij": 32889, + "prefecture": 32890, + "danced": 32891, + "cpt": 32892, + "pleasing": 32893, + "italk": 32894, + "earthquakes": 32895, + "ulation": 32896, + "hio": 32897, + "ãĢĭ": 32898, + "antan": 32899, + "nutrient": 32900, + "deere": 32901, + "selects": 32902, + "enrichment": 32903, + "riti": 32904, + "trampol": 32905, + "blamed": 32906, + "jia": 32907, + "contributors": 32908, + "chesapeake": 32909, + "pigeons": 32910, + "tribunal": 32911, + "maduro": 32912, + "wsu": 32913, + "ilove": 32914, + "efficiently": 32915, + "darcy": 32916, + "warms": 32917, + "arra": 32918, + "ecu": 32919, + "hower": 32920, + "struggled": 32921, + "rajinikanth": 32922, + "ðŁĺ¢ðŁĺ¢": 32923, + "housing": 32924, + "strat": 32925, + "elix": 32926, + "dispro": 32927, + "raffic": 32928, + "thierry": 32929, + "nasty": 32930, + "cfb": 32931, + "staffing": 32932, + "alma": 32933, + "backers": 32934, + "henson": 32935, + "skywalker": 32936, + "realestate": 32937, + "roos": 32938, + "nessy": 32939, + "chance": 32940, + "cairns": 32941, + "cci": 32942, + "pedal": 32943, + "lyft": 32944, + "crossword": 32945, + "waiter": 32946, + "onlyin": 32947, + "kruger": 32948, + "kir": 32949, + "alejandro": 32950, + "cartier": 32951, + "carrera": 32952, + "repaired": 32953, + "ouat": 32954, + "unclear": 32955, + "unbreakable": 32956, + "todayin": 32957, + "queries": 32958, + "jody": 32959, + "genital": 32960, + "winner": 32961, + "tol": 32962, + "kelowna": 32963, + "fascinated": 32964, + "ãĥ¬": 32965, + "srisri": 32966, + "squared": 32967, + "sprung": 32968, + "negotiate": 32969, + "privately": 32970, + "aven": 32971, + ">>>>>": 32972, + "gical": 32973, + "gavin": 32974, + "chesterfield": 32975, + "zumba": 32976, + "orr": 32977, + "natalia": 32978, + "impeachment": 32979, + "mnl": 32980, + "carat": 32981, + "critique": 32982, + "credible": 32983, + "tracy": 32984, + "tani": 32985, + "musik": 32986, + "jigsaw": 32987, + "gambia": 32988, + "tolkien": 32989, + "feu": 32990, + "asper": 32991, + "savory": 32992, + "foxx": 32993, + "fitt": 32994, + "marlon": 32995, + "lrt": 32996, + "vell": 32997, + "pbr": 32998, + "imprisoned": 32999, + "iom": 33000, + "chul": 33001, + "windshield": 33002, + "kaye": 33003, + "baa": 33004, + "chord": 33005, + "sart": 33006, + "algon": 33007, + "ministerial": 33008, + "natgeo": 33009, + "lazio": 33010, + "norms": 33011, + "ðŁijįðŁijį": 33012, + "licking": 33013, + "futbol": 33014, + "unsung": 33015, + "dallascowboys": 33016, + "shred": 33017, + "disturb": 33018, + "devine": 33019, + "beards": 33020, + "chf": 33021, + "bday": 33022, + "rosso": 33023, + "igor": 33024, + "ayi": 33025, + "siren": 33026, + "kair": 33027, + "stiles": 33028, + "rof": 33029, + "magnets": 33030, + "uncover": 33031, + "mouse": 33032, + "banging": 33033, + "sighted": 33034, + "speople": 33035, + "impact": 33036, + "rowland": 33037, + "kira": 33038, + "environment": 33039, + "lovethe": 33040, + "psis": 33041, + "mishra": 33042, + "glendale": 33043, + "cajun": 33044, + "oche": 33045, + "deception": 33046, + "sexist": 33047, + "straws": 33048, + "sga": 33049, + "buffer": 33050, + "apostle": 33051, + "spl": 33052, + "popup": 33053, + "ðŁļĹ": 33054, + "rg": 33055, + "uper": 33056, + "ballin": 33057, + "idy": 33058, + "occasional": 33059, + "nationalpark": 33060, + "ðŁıĬ": 33061, + "uan": 33062, + "innovation": 33063, + "ห": 33064, + "teaparty": 33065, + "rette": 33066, + "counterfe": 33067, + "bha": 33068, + "recs": 33069, + "igen": 33070, + "ðŁĮIJ": 33071, + "hummingbird": 33072, + "cur": 33073, + "haven": 33074, + "lazar": 33075, + "pueblo": 33076, + "::": 33077, + "zionist": 33078, + "opath": 33079, + "inverness": 33080, + "promoter": 33081, + "cartoon": 33082, + "cabinets": 33083, + "mahogany": 33084, + "surveying": 33085, + "rational": 33086, + "feeling": 33087, + "testify": 33088, + "sow": 33089, + "ocon": 33090, + "ย": 33091, + "neel": 33092, + "maris": 33093, + "solitary": 33094, + "chemo": 33095, + "radcliffe": 33096, + "simons": 33097, + "rosary": 33098, + "newer": 33099, + "jodie": 33100, + "retali": 33101, + "prawn": 33102, + "paddy": 33103, + "henge": 33104, + "kala": 33105, + "implant": 33106, + "aty": 33107, + "brentwood": 33108, + "paradox": 33109, + "enez": 33110, + "redesigned": 33111, + "pour": 33112, + "wyd": 33113, + "alde": 33114, + "à¯ģ": 33115, + "sold": 33116, + "biomedical": 33117, + "à¹Ĥ": 33118, + "tttt": 33119, + "matteo": 33120, + "yser": 33121, + "newton": 33122, + "debun": 33123, + "nerdy": 33124, + "lool": 33125, + "woon": 33126, + "elisabeth": 33127, + "ecc": 33128, + "whi": 33129, + "acho": 33130, + "salvage": 33131, + "salaries": 33132, + "quity": 33133, + "navigating": 33134, + "ophthal": 33135, + "consoles": 33136, + "rebuilt": 33137, + "opec": 33138, + "asters": 33139, + "shored": 33140, + "setlist": 33141, + "kathryn": 33142, + "rhymes": 33143, + "revisiting": 33144, + "ashish": 33145, + "lift": 33146, + "repost": 33147, + "soleil": 33148, + "âı±": 33149, + "wealth": 33150, + "saat": 33151, + "wec": 33152, + "kingjames": 33153, + "flipkart": 33154, + "fieldwork": 33155, + "segu": 33156, + "modal": 33157, + "bub": 33158, + "arers": 33159, + "ðŁįĴ": 33160, + "clooney": 33161, + "paddington": 33162, + "necessity": 33163, + "guthrie": 33164, + "pente": 33165, + "limo": 33166, + "josie": 33167, + "artin": 33168, + "enc": 33169, + "lhs": 33170, + "betrayal": 33171, + "infographics": 33172, + "ier": 33173, + "moa": 33174, + "hearings": 33175, + "bonjour": 33176, + "symbolic": 33177, + "agro": 33178, + "wedges": 33179, + "kristina": 33180, + "wildflower": 33181, + "athletic": 33182, + "photography": 33183, + "pesh": 33184, + "cahill": 33185, + "chilean": 33186, + "goul": 33187, + "fioren": 33188, + "ðŁij¶": 33189, + "zil": 33190, + "skim": 33191, + "badoo": 33192, + "delia": 33193, + "treble": 33194, + "ncc": 33195, + "ðŁĩ¦ðŁĩ": 33196, + "ahouse": 33197, + "bullock": 33198, + "solitude": 33199, + "اÙĨ": 33200, + "cancers": 33201, + "futureofwork": 33202, + "hutch": 33203, + "watershed": 33204, + "warmongers": 33205, + "spilled": 33206, + "colombo": 33207, + "moth": 33208, + "associations": 33209, + "weighed": 33210, + "globalgoals": 33211, + "notjust": 33212, + "christi": 33213, + "torg": 33214, + "sweating": 33215, + "maneu": 33216, + "clusters": 33217, + "â̼ï¸ıâ̼ï¸ı": 33218, + "taped": 33219, + "uly": 33220, + "trusting": 33221, + "yusuf": 33222, + "tein": 33223, + "rab": 33224, + ",,,,": 33225, + "sinai": 33226, + "audible": 33227, + "explicit": 33228, + "crowns": 33229, + "schiz": 33230, + "atleast": 33231, + "ðŁĹ£": 33232, + "debra": 33233, + "jesuit": 33234, + "enegger": 33235, + "zhen": 33236, + "onesie": 33237, + "iit": 33238, + "ssf": 33239, + "gurgaon": 33240, + "chakra": 33241, + "bearcats": 33242, + "kran": 33243, + "kawa": 33244, + "requesting": 33245, + "hanover": 33246, + "gend": 33247, + "soros": 33248, + "mercy": 33249, + "lovely": 33250, + "doomed": 33251, + "timmy": 33252, + "kuz": 33253, + "ull": 33254, + "abram": 33255, + "saison": 33256, + "ãĥ«": 33257, + "cleaners": 33258, + "remo": 33259, + "circuits": 33260, + "barred": 33261, + "oth": 33262, + "moist": 33263, + "madeleine": 33264, + "gallo": 33265, + "uj": 33266, + "permits": 33267, + "heaviest": 33268, + "carols": 33269, + "azte": 33270, + "giorgio": 33271, + "floats": 33272, + "declaring": 33273, + "usrc": 33274, + "minat": 33275, + "crafts": 33276, + "prima": 33277, + "conveni": 33278, + "nickelodeon": 33279, + "dancing": 33280, + "ceremonial": 33281, + "blogg": 33282, + "twp": 33283, + "anglican": 33284, + "shek": 33285, + "knick": 33286, + "(((": 33287, + "hubbard": 33288, + "harvey": 33289, + "hitman": 33290, + "feng": 33291, + "wesome": 33292, + "forza": 33293, + "sword": 33294, + "opus": 33295, + "brom": 33296, + "gibility": 33297, + "zal": 33298, + "munch": 33299, + "dancehall": 33300, + "greedy": 33301, + "hdmi": 33302, + "rebirth": 33303, + "ðŁĺĭðŁĺĭ": 33304, + "sworld": 33305, + "figurine": 33306, + "compost": 33307, + "kf": 33308, + "engraving": 33309, + "giorno": 33310, + "stana": 33311, + "kman": 33312, + "hamster": 33313, + "composers": 33314, + "aje": 33315, + "functionality": 33316, + "polk": 33317, + "isons": 33318, + "airplanes": 33319, + "tese": 33320, + "horrors": 33321, + "muscat": 33322, + "given": 33323, + "spence": 33324, + "ðŁĩ¸ðŁĩ": 33325, + "eliot": 33326, + "achilles": 33327, + "freck": 33328, + "cryptocurrencies": 33329, + "souther": 33330, + "halo": 33331, + "borneo": 33332, + "politic": 33333, + "hahahahah": 33334, + "upstate": 33335, + "siena": 33336, + "obscure": 33337, + "hausen": 33338, + "lloyd": 33339, + "happyfriday": 33340, + "motorbike": 33341, + "bona": 33342, + "americas": 33343, + "hols": 33344, + "-(": 33345, + "sporty": 33346, + "unaware": 33347, + "revenues": 33348, + "christopher": 33349, + "banksy": 33350, + "avan": 33351, + "evapor": 33352, + "compress": 33353, + "eyeliner": 33354, + "todos": 33355, + "buffy": 33356, + "renewableenergy": 33357, + "lyrical": 33358, + "archan": 33359, + "rapist": 33360, + "fairtrade": 33361, + "lmaooo": 33362, + "beatz": 33363, + "proactive": 33364, + "lapse": 33365, + "irical": 33366, + "reversal": 33367, + "pode": 33368, + "mcintyre": 33369, + "macau": 33370, + "ãĥķãĤ": 33371, + "nashgrier": 33372, + "fsa": 33373, + "gall": 33374, + "çĶŁ": 33375, + "perpetr": 33376, + "ilya": 33377, + "configuration": 33378, + "%;": 33379, + "strange": 33380, + "raci": 33381, + "à¸ĩ": 33382, + "pickups": 33383, + "kovsky": 33384, + "mammal": 33385, + "wps": 33386, + "gable": 33387, + "comparative": 33388, + "zh": 33389, + "saveour": 33390, + "davey": 33391, + "onetsy": 33392, + "mussels": 33393, + "miser": 33394, + "cristina": 33395, + "electron": 33396, + "crave": 33397, + "loren": 33398, + "precipitation": 33399, + "mz": 33400, + "ðŁį«": 33401, + "vincen": 33402, + "snowboard": 33403, + "noida": 33404, + "ahn": 33405, + "marinated": 33406, + "gtr": 33407, + "townhall": 33408, + "minis": 33409, + "bethel": 33410, + "advan": 33411, + "sura": 33412, + "shiel": 33413, + "furry": 33414, + "ðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤ": 33415, + "lynd": 33416, + "soil": 33417, + "scence": 33418, + "seneca": 33419, + "sharjah": 33420, + "dickens": 33421, + "credentials": 33422, + "avar": 33423, + "perk": 33424, + "requiring": 33425, + "prefer": 33426, + "jian": 33427, + "deca": 33428, + "rach": 33429, + "ingfor": 33430, + "dele": 33431, + "beep": 33432, + "ðŁĴ»": 33433, + "cisely": 33434, + "huddle": 33435, + "greensboro": 33436, + "hawking": 33437, + "hoax": 33438, + "hangar": 33439, + "çľ": 33440, + "miso": 33441, + "lovin": 33442, + "greta": 33443, + "abad": 33444, + "logie": 33445, + "atan": 33446, + "snowflake": 33447, + "mahesh": 33448, + "fearthe": 33449, + "alkal": 33450, + "bobblehead": 33451, + "bahn": 33452, + "judged": 33453, + "futu": 33454, + "felix": 33455, + "ðŁįĵ": 33456, + "pike": 33457, + "deriv": 33458, + "notices": 33459, + "auer": 33460, + "dissuper": 33461, + "orda": 33462, + "wipes": 33463, + "amino": 33464, + "strikers": 33465, + "footb": 33466, + "dramas": 33467, + "punching": 33468, + "scoreless": 33469, + "hemingway": 33470, + "bih": 33471, + "ballad": 33472, + "chatter": 33473, + "ammo": 33474, + "klein": 33475, + "fabrication": 33476, + "karim": 33477, + "zend": 33478, + "histo": 33479, + "volta": 33480, + "rocky": 33481, + "marketer": 33482, + "xtreme": 33483, + "sequencing": 33484, + "paradigm": 33485, + "cleats": 33486, + "booming": 33487, + "âģłâģł": 33488, + "blockade": 33489, + "prompts": 33490, + "yoghurt": 33491, + "purpose": 33492, + "nur": 33493, + "regulate": 33494, + "noisy": 33495, + "ingrid": 33496, + "birdwatching": 33497, + "bartender": 33498, + "Ùĥ": 33499, + "wordof": 33500, + "chaotic": 33501, + "shorty": 33502, + "eldest": 33503, + "zapp": 33504, + "onceuponatime": 33505, + "flyo": 33506, + "ritos": 33507, + "mikequind": 33508, + "ðŁIJ´": 33509, + "registering": 33510, + ".]": 33511, + "adol": 33512, + "gggg": 33513, + "purge": 33514, + "kidlit": 33515, + "arbor": 33516, + "valves": 33517, + "synagogue": 33518, + "oth": 33519, + "unanimous": 33520, + "verification": 33521, + "darrell": 33522, + "ãģĦ": 33523, + "vanderbilt": 33524, + "tapestry": 33525, + "prosper": 33526, + "diddy": 33527, + "drafting": 33528, + "decep": 33529, + "marquis": 33530, + "stint": 33531, + "michaeljackson": 33532, + "peeled": 33533, + "menus": 33534, + "bbb": 33535, + "scare": 33536, + "email": 33537, + "wrigley": 33538, + "itis": 33539, + "fell": 33540, + "somethin": 33541, + "barra": 33542, + "edgar": 33543, + "dipping": 33544, + "puddle": 33545, + "slade": 33546, + "learner": 33547, + "jalen": 33548, + "ð٧IJ": 33549, + "thedaily": 33550, + "mikequindazzi": 33551, + "jux": 33552, + "iqbal": 33553, + "mckinney": 33554, + "raiser": 33555, + "efan": 33556, + "drone": 33557, + "cato": 33558, + "picket": 33559, + "crowe": 33560, + "latt": 33561, + "uko": 33562, + "giuseppe": 33563, + "hini": 33564, + "synthesi": 33565, + "pontifex": 33566, + "songwriting": 33567, + "tod": 33568, + "switches": 33569, + "dinners": 33570, + "hq": 33571, + "gabrielle": 33572, + "pensacola": 33573, + "circle": 33574, + "exposes": 33575, + "evs": 33576, + "riyadh": 33577, + "promen": 33578, + "ock": 33579, + "saj": 33580, + "citation": 33581, + "brewco": 33582, + "josi": 33583, + "epaper": 33584, + "drif": 33585, + "pointless": 33586, + "tangled": 33587, + "cripp": 33588, + "lineups": 33589, + "fairies": 33590, + "daze": 33591, + "mourn": 33592, + "bladder": 33593, + "salz": 33594, + "burundi": 33595, + "bookmark": 33596, + "thepeople": 33597, + "subsequ": 33598, + "principal": 33599, + "sker": 33600, + "courtney": 33601, + "aoki": 33602, + "racers": 33603, + "adm": 33604, + "moma": 33605, + "criticalrole": 33606, + "houn": 33607, + "shedding": 33608, + "saka": 33609, + "aceous": 33610, + "mckay": 33611, + "husbands": 33612, + "½": 33613, + "meda": 33614, + "accusations": 33615, + "rosel": 33616, + "ncis": 33617, + "witnessing": 33618, + "orama": 33619, + "gods": 33620, + "hilton": 33621, + "elman": 33622, + "ÃŃn": 33623, + "megap": 33624, + "craven": 33625, + "announcer": 33626, + "criteri": 33627, + "sheffieldissuper": 33628, + "militant": 33629, + "consul": 33630, + "hooded": 33631, + "abyss": 33632, + "bx": 33633, + "madam": 33634, + "locu": 33635, + "maryam": 33636, + "manicure": 33637, + "gratis": 33638, + "actresses": 33639, + "rosario": 33640, + "thisdayin": 33641, + "kingly": 33642, + "gnome": 33643, + "celine": 33644, + "rous": 33645, + "heel": 33646, + "lilac": 33647, + "vishal": 33648, + "abh": 33649, + "thorns": 33650, + "sls": 33651, + "neal": 33652, + "constructing": 33653, + "beren": 33654, + "slang": 33655, + "mains": 33656, + "farra": 33657, + "sarko": 33658, + "paige": 33659, + "guiller": 33660, + "lala": 33661, + "iceberg": 33662, + "noun": 33663, + "planners": 33664, + "ummm": 33665, + "ouses": 33666, + "illary": 33667, + "maan": 33668, + "boxing": 33669, + "zipper": 33670, + "srinagar": 33671, + "miguel": 33672, + "ostr": 33673, + "mpo": 33674, + "responsibly": 33675, + "lanterns": 33676, + "appliance": 33677, + "xb": 33678, + "grenade": 33679, + "neglect": 33680, + "dysle": 33681, + "hammock": 33682, + "nectar": 33683, + "witcher": 33684, + "rgv": 33685, + "dience": 33686, + "serbian": 33687, + "seeded": 33688, + "cruz": 33689, + "bish": 33690, + "sphe": 33691, + "eq": 33692, + "skyrim": 33693, + "algebra": 33694, + "philately": 33695, + "bungalow": 33696, + "geoff": 33697, + "yves": 33698, + "demanded": 33699, + "considerations": 33700, + "thevamp": 33701, + "pawankalyan": 33702, + "coded": 33703, + "gritty": 33704, + "eruption": 33705, + "seinfeld": 33706, + "unidenti": 33707, + "ëĭĪ": 33708, + "worm": 33709, + "acus": 33710, + "seung": 33711, + "dung": 33712, + "roland": 33713, + "sud": 33714, + "divisions": 33715, + "ablanc": 33716, + "shortest": 33717, + "jf": 33718, + "poun": 33719, + "plantbased": 33720, + "beto": 33721, + "tougher": 33722, + "mco": 33723, + "donet": 33724, + "markus": 33725, + "vfl": 33726, + "ðŁıł": 33727, + "opening": 33728, + "coward": 33729, + "cabernet": 33730, + "oxi": 33731, + "burlesque": 33732, + "sandra": 33733, + "sumo": 33734, + "consist": 33735, + "thot": 33736, + "cayman": 33737, + "motorola": 33738, + "gutierrez": 33739, + "dslr": 33740, + "yw": 33741, + "nobel": 33742, + "novice": 33743, + "momsdemand": 33744, + "grunge": 33745, + "spor": 33746, + "dcc": 33747, + "presses": 33748, + "slist": 33749, + "allotment": 33750, + "vocational": 33751, + "ftc": 33752, + "puja": 33753, + "loven": 33754, + "uttarak": 33755, + "tandem": 33756, + "shep": 33757, + "comedians": 33758, + "anatom": 33759, + "cantwait": 33760, + "healthyeating": 33761, + "westside": 33762, + "margins": 33763, + "chiang": 33764, + "asbestos": 33765, + "stupidity": 33766, + "problematic": 33767, + "fitbit": 33768, + ":$": 33769, + "ceilings": 33770, + "shua": 33771, + "protections": 33772, + "biotic": 33773, + "bengali": 33774, + "rests": 33775, + "biennale": 33776, + "timo": 33777, + "culmin": 33778, + "eminent": 33779, + "affection": 33780, + "unbelievably": 33781, + "individually": 33782, + "canvassing": 33783, + "whitt": 33784, + "novasco": 33785, + "chinson": 33786, + "hpe": 33787, + "gow": 33788, + "gloucestershire": 33789, + "pao": 33790, + "threshold": 33791, + "chevron": 33792, + "sine": 33793, + "wether": 33794, + "ppie": 33795, + "aquino": 33796, + "antwerp": 33797, + "âĸ¬": 33798, + "poon": 33799, + "instaf": 33800, + "equine": 33801, + "cinematography": 33802, + "nbafinals": 33803, + "valiant": 33804, + "kilkenny": 33805, + "terence": 33806, + "systemic": 33807, + "srl": 33808, + "pound": 33809, + "madeira": 33810, + "plough": 33811, + "trecht": 33812, + "mated": 33813, + "mpd": 33814, + "ransomware": 33815, + "phin": 33816, + "liqui": 33817, + "bbce": 33818, + "boomer": 33819, + "istandwith": 33820, + "conju": 33821, + "rte": 33822, + "nara": 33823, + "foolish": 33824, + "dashing": 33825, + "viernes": 33826, + "brite": 33827, + "dau": 33828, + "juniper": 33829, + "aida": 33830, + "younow": 33831, + "razer": 33832, + "dei": 33833, + "repeating": 33834, + "comforting": 33835, + "adjacent": 33836, + "eto": 33837, + "casted": 33838, + "chatur": 33839, + "muer": 33840, + "synth": 33841, + "sanitary": 33842, + "macle": 33843, + "independent": 33844, + "lawful": 33845, + "eerie": 33846, + "hor": 33847, + "ðŁĴŃ": 33848, + "amrit": 33849, + "velo": 33850, + "stationery": 33851, + "muf": 33852, + "maymay": 33853, + "contemplating": 33854, + "elaborate": 33855, + "gregor": 33856, + "dries": 33857, + "accol": 33858, + "à¸ļ": 33859, + "schwarzenegger": 33860, + "illnesses": 33861, + "daybreak": 33862, + "followback": 33863, + "collusion": 33864, + "electronic": 33865, + "jovi": 33866, + "hiroshima": 33867, + "taw": 33868, + "homec": 33869, + "micah": 33870, + "quitting": 33871, + "frosting": 33872, + "benfica": 33873, + "heli": 33874, + "sical": 33875, + "piccad": 33876, + "corporate": 33877, + "mentorship": 33878, + "youare": 33879, + "singer": 33880, + "shiva": 33881, + "rune": 33882, + "inger": 33883, + "rium": 33884, + "playable": 33885, + "doop": 33886, + "willow": 33887, + "terre": 33888, + "nip": 33889, + "atd": 33890, + "warbler": 33891, + "professionally": 33892, + "erase": 33893, + "proceed": 33894, + "pedestrians": 33895, + "mischief": 33896, + "bending": 33897, + "alaskan": 33898, + "ckett": 33899, + "mop": 33900, + "ddles": 33901, + "shutter": 33902, + "geared": 33903, + "ateneo": 33904, + "madeline": 33905, + "gations": 33906, + "osha": 33907, + "derick": 33908, + "swild": 33909, + "angry": 33910, + "patents": 33911, + "hunk": 33912, + "decreased": 33913, + "fry": 33914, + "ðŁĴĸðŁĴĸðŁĴĸ": 33915, + "salon": 33916, + "quantities": 33917, + "dario": 33918, + "nigel": 33919, + "kuma": 33920, + "jenn": 33921, + "happye": 33922, + "xxx": 33923, + "rexperience": 33924, + "pros": 33925, + "ausch": 33926, + "relessly": 33927, + "hamburger": 33928, + "fukushima": 33929, + "erne": 33930, + "statec": 33931, + "rend": 33932, + "mayfield": 33933, + "jone": 33934, + "lefty": 33935, + "bernstein": 33936, + "smil": 33937, + "generates": 33938, + "forestation": 33939, + "bandits": 33940, + "tayo": 33941, + "rca": 33942, + "acci": 33943, + "rodrigo": 33944, + "knapp": 33945, + "elovers": 33946, + "vegetation": 33947, + "ural": 33948, + "left": 33949, + "ħï¸ı": 33950, + "worldre": 33951, + "suri": 33952, + "embark": 33953, + "wson": 33954, + "bayou": 33955, + "muller": 33956, + "movers": 33957, + "ðŁķº": 33958, + "presbyter": 33959, + "lf": 33960, + "cree": 33961, + "batb": 33962, + "salam": 33963, + "demonstrations": 33964, + "anec": 33965, + "npc": 33966, + "itics": 33967, + "tography": 33968, + "reinst": 33969, + "thurst": 33970, + "tale": 33971, + "offences": 33972, + "smartcity": 33973, + "brotha": 33974, + "oftheyear": 33975, + "invaluable": 33976, + "earn": 33977, + "ðŁijıðŁı½": 33978, + "kremlin": 33979, + "grady": 33980, + "townfc": 33981, + "guernsey": 33982, + "maha": 33983, + "contagious": 33984, + "drex": 33985, + "been": 33986, + "(£": 33987, + "nativity": 33988, + "ktm": 33989, + "somerhalder": 33990, + "compounds": 33991, + "íķĺ": 33992, + "\"â̦": 33993, + "afg": 33994, + "ottnews": 33995, + "hound": 33996, + "firefly": 33997, + "cilan": 33998, + "donetsk": 33999, + "volunteered": 34000, + "akira": 34001, + "èª": 34002, + "singul": 34003, + "sth": 34004, + "drowned": 34005, + "mando": 34006, + "heir": 34007, + "ðŁİīðŁİĪ": 34008, + "taxis": 34009, + "yuki": 34010, + "veld": 34011, + "kans": 34012, + "elk": 34013, + "rants": 34014, + "hashtag": 34015, + "teng": 34016, + "rog": 34017, + "aat": 34018, + "grub": 34019, + "eber": 34020, + "inindia": 34021, + "colossus": 34022, + "signi": 34023, + "soever": 34024, + "milestones": 34025, + "dero": 34026, + "differential": 34027, + "phuket": 34028, + "mastermind": 34029, + "angh": 34030, + "melani": 34031, + "broker": 34032, + "actorvijay": 34033, + "stunned": 34034, + "continuity": 34035, + "affl": 34036, + "vocal": 34037, + "perennial": 34038, + "fiancé": 34039, + "incomplete": 34040, + "hunts": 34041, + "reissue": 34042, + "dominates": 34043, + "turmeric": 34044, + "roam": 34045, + "rion": 34046, + "bagged": 34047, + "nassau": 34048, + "fut": 34049, + "xox": 34050, + "nationaltrust": 34051, + "joye": 34052, + "sano": 34053, + "hearthstone": 34054, + "disrespect": 34055, + "lees": 34056, + "hse": 34057, + "siberian": 34058, + "offee": 34059, + "restock": 34060, + "wolfgang": 34061, + "regan": 34062, + "plano": 34063, + "unwind": 34064, + "repar": 34065, + "mille": 34066, + "],": 34067, + "skull": 34068, + "fatally": 34069, + "conceptual": 34070, + "ðŁĮ²": 34071, + "fé": 34072, + "berto": 34073, + "bms": 34074, + "ua": 34075, + "magna": 34076, + "notredame": 34077, + "lete": 34078, + "laundering": 34079, + "heartwarming": 34080, + "buffett": 34081, + "goat": 34082, + "peabo": 34083, + "windmill": 34084, + "vac": 34085, + "continually": 34086, + "azalea": 34087, + "membrane": 34088, + "cancels": 34089, + "makeyourown": 34090, + "athered": 34091, + "pto": 34092, + "torpe": 34093, + "ðŁĺł": 34094, + "ðŁĴ§": 34095, + "scares": 34096, + "leaking": 34097, + "zet": 34098, + "pixels": 34099, + "aci": 34100, + "khil": 34101, + "marathi": 34102, + "ðŁĻıðŁı½": 34103, + "ula": 34104, + "tamu": 34105, + "chandigarh": 34106, + "zagre": 34107, + "aab": 34108, + "pronounced": 34109, + "aubrey": 34110, + "sander": 34111, + "punta": 34112, + "harlow": 34113, + "icelan": 34114, + "celebratory": 34115, + "sot": 34116, + "unciation": 34117, + "struly": 34118, + "mcdowell": 34119, + "deepika": 34120, + "reminders": 34121, + "mystical": 34122, + "ctc": 34123, + "chatted": 34124, + "sica": 34125, + "bargains": 34126, + "chhat": 34127, + "rubin": 34128, + "mnet": 34129, + "oilandgas": 34130, + "pelican": 34131, + "oat": 34132, + "morality": 34133, + "kour": 34134, + "ih": 34135, + "nuclear": 34136, + "gcu": 34137, + "richer": 34138, + "venezia": 34139, + "mma": 34140, + "leith": 34141, + "accompany": 34142, + "richmond": 34143, + "sportsnet": 34144, + "baahu": 34145, + "smuggling": 34146, + "mmi": 34147, + "ðŁĩ®ðŁĩª": 34148, + "twists": 34149, + "sahib": 34150, + ".....": 34151, + "ambitions": 34152, + "illo": 34153, + "historical": 34154, + "forec": 34155, + "showbiz": 34156, + "ponies": 34157, + "chasers": 34158, + "remodel": 34159, + "willing": 34160, + "princesses": 34161, + "ample": 34162, + "cushions": 34163, + "acles": 34164, + "lotr": 34165, + "dach": 34166, + "anthe": 34167, + "incorporate": 34168, + "newbury": 34169, + "kiri": 34170, + "friedrich": 34171, + "abv": 34172, + "ballers": 34173, + "albert": 34174, + "ðŁijŃ": 34175, + "leti": 34176, + "nanop": 34177, + "cide": 34178, + "analo": 34179, + "nsf": 34180, + "))))": 34181, + "griffiths": 34182, + "valenci": 34183, + "roano": 34184, + "funrun": 34185, + "babysitting": 34186, + "caday": 34187, + "entre": 34188, + "uck": 34189, + "slug": 34190, + "tical": 34191, + "thesims": 34192, + "roar": 34193, + "carney": 34194, + "gam": 34195, + "stowe": 34196, + "fid": 34197, + "bunny": 34198, + "shamrock": 34199, + "pecu": 34200, + "molina": 34201, + "gocougs": 34202, + "contributes": 34203, + "transformation": 34204, + "moy": 34205, + "vaj": 34206, + "severy": 34207, + "antioxidants": 34208, + "thirteen": 34209, + "sightseeing": 34210, + "lj": 34211, + "reversible": 34212, + "oddly": 34213, + "hookah": 34214, + "nouvel": 34215, + "halal": 34216, + "fei": 34217, + "stables": 34218, + "mult": 34219, + "hopped": 34220, + "braids": 34221, + "interchange": 34222, + "ghanaian": 34223, + "wwww": 34224, + "ethno": 34225, + "conjunction": 34226, + "agov": 34227, + "yeti": 34228, + "earthand": 34229, + "tsp": 34230, + "conserve": 34231, + "heirloom": 34232, + "metaphor": 34233, + "woof": 34234, + "torio": 34235, + "selfless": 34236, + "nwa": 34237, + "emilia": 34238, + "ylene": 34239, + "yxe": 34240, + "giar": 34241, + "moderating": 34242, + "probz": 34243, + "bfi": 34244, + "neer": 34245, + "dummy": 34246, + "hanukkah": 34247, + "webber": 34248, + "kv": 34249, + "eyebrow": 34250, + "dagger": 34251, + "sump": 34252, + "rages": 34253, + "orkney": 34254, + "tbo": 34255, + "halsey": 34256, + "assignments": 34257, + "tronic": 34258, + "scrib": 34259, + "coon": 34260, + "anwar": 34261, + "#âĢİ": 34262, + "jalape": 34263, + "florida": 34264, + "quaid": 34265, + "hawkeyes": 34266, + "âĻ¡âĻ¡": 34267, + "streetcar": 34268, + "rog": 34269, + "datlantic": 34270, + "granola": 34271, + "unchanged": 34272, + "expectation": 34273, + "Ùĩ": 34274, + "marlin": 34275, + "gummy": 34276, + "ðŁĻıðŁı¾": 34277, + "awarenessmonth": 34278, + "oilpainting": 34279, + "muth": 34280, + "perch": 34281, + "junto": 34282, + "villagers": 34283, + "morg": 34284, + "cheated": 34285, + "webcomic": 34286, + "thefuture": 34287, + "dps": 34288, + "lakings": 34289, + "mentioning": 34290, + "voor": 34291, + "identities": 34292, + "accord": 34293, + "mcgu": 34294, + "lpga": 34295, + "rumour": 34296, + "massively": 34297, + "mpls": 34298, + "healy": 34299, + "date": 34300, + "spoli": 34301, + "revisited": 34302, + "ont": 34303, + "aland": 34304, + "scrutiny": 34305, + "lakeland": 34306, + "blending": 34307, + "": 34308, + "ankara": 34309, + "jamiedor": 34310, + "metabolic": 34311, + "fences": 34312, + "anny": 34313, + "åħ": 34314, + "semicon": 34315, + "oott": 34316, + "spaceship": 34317, + "wacky": 34318, + "leta": 34319, + "apac": 34320, + "shee": 34321, + "inherit": 34322, + "dores": 34323, + "ðŁĩ¨ðŁĩ¦": 34324, + "gente": 34325, + "twick": 34326, + "rims": 34327, + "galve": 34328, + "deville": 34329, + "kingfisher": 34330, + "scorpio": 34331, + "owl": 34332, + "alar": 34333, + "varian": 34334, + "ðŁĹĵ": 34335, + "venetian": 34336, + "stardust": 34337, + "thenorth": 34338, + "qing": 34339, + "harrington": 34340, + "consulate": 34341, + "spectacle": 34342, + "hobbs": 34343, + "turks": 34344, + "greer": 34345, + "mating": 34346, + "ðŁİĢ": 34347, + "ðŁĮĢ": 34348, + "directs": 34349, + "íĭ": 34350, + "pompeo": 34351, + "voiced": 34352, + "laos": 34353, + "tzu": 34354, + "prome": 34355, + "prism": 34356, + "merc": 34357, + "fortunately": 34358, + "bcfc": 34359, + "mcdonnell": 34360, + "notsorry": 34361, + "smiled": 34362, + "tba": 34363, + "forwar": 34364, + "midterm": 34365, + "darby": 34366, + "weinstein": 34367, + "upgrading": 34368, + "wolff": 34369, + "bronco": 34370, + "cabello": 34371, + "ðŁ¥ĩ": 34372, + "fiable": 34373, + "sharpe": 34374, + "battered": 34375, + "sato": 34376, + "mythical": 34377, + "instapic": 34378, + "prepped": 34379, + "enium": 34380, + "espo": 34381, + "diaper": 34382, + "explanations": 34383, + "whopping": 34384, + "ragnar": 34385, + "peel": 34386, + "antibiotic": 34387, + "lacks": 34388, + "harrison": 34389, + "lism": 34390, + "aul": 34391, + "quail": 34392, + "martina": 34393, + "sentencing": 34394, + "scams": 34395, + "didi": 34396, + "tronics": 34397, + "ãħłãħł": 34398, + "goff": 34399, + "zain": 34400, + "paramore": 34401, + "chained": 34402, + "clinton": 34403, + "liff": 34404, + "cottages": 34405, + "emon": 34406, + "reverend": 34407, + "consumer": 34408, + "cean": 34409, + "tany": 34410, + "lumpur": 34411, + "ebay": 34412, + "stool": 34413, + "ðŁĺ»ðŁĺ»": 34414, + "tapro": 34415, + "hath": 34416, + "modernart": 34417, + "justine": 34418, + "proverb": 34419, + "appy": 34420, + "trax": 34421, + "manifest": 34422, + "ambu": 34423, + "naik": 34424, + "pepp": 34425, + "rsd": 34426, + "merchants": 34427, + "kitchener": 34428, + "shifted": 34429, + "lizz": 34430, + "âĺħâĺħâĺħâĺħ": 34431, + "âĢĶâĢĶâĢĶâĢĶâĢĶâĢĶâĢĶâĢĶ": 34432, + "utopia": 34433, + "tomo": 34434, + "outed": 34435, + "comers": 34436, + "chiropractic": 34437, + "bookclub": 34438, + "cindy": 34439, + "prohibition": 34440, + "seuss": 34441, + "민": 34442, + "thinkin": 34443, + "rrrr": 34444, + "gofund": 34445, + "tack": 34446, + "omb": 34447, + "catastrophic": 34448, + "lingu": 34449, + "guildford": 34450, + "botd": 34451, + "à¥ĭ": 34452, + "planter": 34453, + "^^": 34454, + "wink": 34455, + "kathmandu": 34456, + "stoppers": 34457, + "smoothies": 34458, + "reefs": 34459, + "hind": 34460, + "bellamy": 34461, + "Ħë": 34462, + "wastewater": 34463, + "voor": 34464, + "natl": 34465, + "!]": 34466, + "reel": 34467, + "yap": 34468, + "scooby": 34469, + "workspace": 34470, + "corinthians": 34471, + "blun": 34472, + "obligation": 34473, + "gbbo": 34474, + "dyson": 34475, + "cravings": 34476, + "ellington": 34477, + "dapl": 34478, + "wrexham": 34479, + "earthandclouds": 34480, + "ukrunchat": 34481, + "positioned": 34482, + "kalb": 34483, + "foursquare": 34484, + "jock": 34485, + "impending": 34486, + "evening": 34487, + "athy": 34488, + "proclaimed": 34489, + "cites": 34490, + "annapolis": 34491, + "sani": 34492, + "marth": 34493, + "irl": 34494, + "accommo": 34495, + "kaa": 34496, + "fina": 34497, + "yaa": 34498, + "disper": 34499, + "ecar": 34500, + "bhak": 34501, + "willy": 34502, + "ðŁĺĢðŁĺĢ": 34503, + "mcdermott": 34504, + "moj": 34505, + "generational": 34506, + "usaid": 34507, + "training": 34508, + "lonely": 34509, + "lores": 34510, + "impecc": 34511, + "âĢIJ": 34512, + "beavers": 34513, + "maki": 34514, + "heb": 34515, + "aapl": 34516, + "åı": 34517, + "wolverhampton": 34518, + "leaderboard": 34519, + "meu": 34520, + "cfa": 34521, + "eastern": 34522, + "hur": 34523, + "civilwar": 34524, + "ourage": 34525, + "horned": 34526, + "lehigh": 34527, + "awards": 34528, + "evident": 34529, + "gigab": 34530, + "rous": 34531, + "madel": 34532, + "robyn": 34533, + "urgently": 34534, + "kors": 34535, + "enas": 34536, + "heisman": 34537, + "bambam": 34538, + "fabian": 34539, + "fom": 34540, + "evaluating": 34541, + "assembly": 34542, + "outsourcing": 34543, + "huntsville": 34544, + "ðŁĶª": 34545, + "justified": 34546, + "cashier": 34547, + "spaper": 34548, + "buckeye": 34549, + "analytical": 34550, + "illuminati": 34551, + "autho": 34552, + "oj": 34553, + "shade": 34554, + "geelong": 34555, + "whey": 34556, + "heaton": 34557, + "terribly": 34558, + "elek": 34559, + "uncharted": 34560, + "sdlive": 34561, + "motocross": 34562, + "hermes": 34563, + "darshan": 34564, + "darlington": 34565, + "cashmere": 34566, + "gripping": 34567, + "cilantro": 34568, + "punish": 34569, + "...:": 34570, + "ðŁĴĦ": 34571, + "instance": 34572, + "deri": 34573, + "lobal": 34574, + "mukher": 34575, + "spar": 34576, + "thinker": 34577, + "fremont": 34578, + "compiled": 34579, + "colorado": 34580, + "vigne": 34581, + "smd": 34582, + "whead": 34583, + "village": 34584, + "leek": 34585, + "formulae": 34586, + "tares": 34587, + "persistence": 34588, + "??????": 34589, + "pedago": 34590, + "hez": 34591, + "alzheimers": 34592, + "vulture": 34593, + "offence": 34594, + "isgreat": 34595, + "suffra": 34596, + "kickin": 34597, + "hmmmm": 34598, + "broadway": 34599, + "ï¸ı@": 34600, + "arti": 34601, + "allison": 34602, + "endorses": 34603, + "ryu": 34604, + "lollipop": 34605, + "soybean": 34606, + "kendall": 34607, + "cera": 34608, + "invade": 34609, + "(ðŁĵ·:": 34610, + "converter": 34611, + "carpets": 34612, + "hobo": 34613, + "frit": 34614, + "peac": 34615, + "esqu": 34616, + "ernan": 34617, + "ouf": 34618, + "anil": 34619, + "differ": 34620, + "ching": 34621, + "brecht": 34622, + "spg": 34623, + "davenport": 34624, + "strava": 34625, + "severn": 34626, + "ngos": 34627, + "storians": 34628, + "fete": 34629, + "paramedic": 34630, + "jhb": 34631, + "alamo": 34632, + "sneaking": 34633, + "goldcoast": 34634, + "roofs": 34635, + "isil": 34636, + "depicted": 34637, + "projections": 34638, + "numb": 34639, + "oss": 34640, + "epi": 34641, + "glucose": 34642, + "zidane": 34643, + "infiniti": 34644, + "íĺĦ": 34645, + "ransom": 34646, + "tonics": 34647, + "falk": 34648, + "gler": 34649, + "outw": 34650, + "ress": 34651, + "weekly": 34652, + "theon": 34653, + "nole": 34654, + "ðŁĩªðŁĩº": 34655, + "volley": 34656, + "summar": 34657, + "negativity": 34658, + "samson": 34659, + "yew": 34660, + "ausvotes": 34661, + "jul": 34662, + "judy": 34663, + "fart": 34664, + "prayed": 34665, + "palate": 34666, + "multicultural": 34667, + "doubleheader": 34668, + "cyclones": 34669, + "pierre": 34670, + "ãģ¨": 34671, + "âĺłï¸ı": 34672, + "rtw": 34673, + "converting": 34674, + "wirral": 34675, + "lari": 34676, + "irrelevant": 34677, + "austinmahone": 34678, + "anche": 34679, + "yaan": 34680, + "sdf": 34681, + "$.": 34682, + "exploding": 34683, + "ultimate": 34684, + "profici": 34685, + "gofundme": 34686, + "cellence": 34687, + "epstein": 34688, + "bullied": 34689, + "septic": 34690, + "த": 34691, + "lumber": 34692, + "cuff": 34693, + "vscocam": 34694, + "plor": 34695, + "ล": 34696, + "seok": 34697, + "roto": 34698, + "venezuelan": 34699, + "sorta": 34700, + "spirited": 34701, + "danielpadilla": 34702, + "teamsisd": 34703, + "radioactive": 34704, + "icelandic": 34705, + "ðŁĴ¤": 34706, + "vere": 34707, + "accommodate": 34708, + "shipp": 34709, + "otter": 34710, + "olina": 34711, + "ego": 34712, + "sula": 34713, + "sanantonio": 34714, + "deas": 34715, + "similarities": 34716, + "âļ¾": 34717, + "yom": 34718, + "broward": 34719, + "å°": 34720, + "cancun": 34721, + "verify": 34722, + "onte": 34723, + "candlelight": 34724, + "ìłķ": 34725, + "infants": 34726, + "azam": 34727, + "ðŁĺ°": 34728, + "leven": 34729, + "unstable": 34730, + "bloomington": 34731, + "xford": 34732, + "contour": 34733, + "yp": 34734, + "innovator": 34735, + "histories": 34736, + "poy": 34737, + "lololol": 34738, + "expires": 34739, + "catalo": 34740, + "billboards": 34741, + "anab": 34742, + "elic": 34743, + "novascotia": 34744, + "faire": 34745, + "ìĿ´": 34746, + "rockwell": 34747, + "grille": 34748, + "aztec": 34749, + "johor": 34750, + "urstruly": 34751, + "firen": 34752, + "dunlop": 34753, + "idle": 34754, + "portman": 34755, + "joes": 34756, + "txhsfb": 34757, + "holm": 34758, + "chamele": 34759, + "underworld": 34760, + "loss": 34761, + "tiem": 34762, + "therapists": 34763, + "pasture": 34764, + "paste": 34765, + "ingnow": 34766, + "vulcan": 34767, + "ragon": 34768, + "larkin": 34769, + "oshi": 34770, + "hoco": 34771, + "childhood": 34772, + "umbrel": 34773, + "successor": 34774, + "kathy": 34775, + "izen": 34776, + "°ï¸ı": 34777, + "shareholders": 34778, + "olga": 34779, + "aib": 34780, + "heap": 34781, + "flaming": 34782, + "rou": 34783, + "airtel": 34784, + "ratt": 34785, + "zane": 34786, + "vow": 34787, + "thorough": 34788, + "snag": 34789, + "parth": 34790, + "unconscious": 34791, + "vey": 34792, + "newrelease": 34793, + "ghee": 34794, + "croatian": 34795, + "facilitating": 34796, + "swanson": 34797, + "astoria": 34798, + "tology": 34799, + "mastery": 34800, + "ð٤ij": 34801, + "bilbao": 34802, + "troupe": 34803, + "theori": 34804, + "cheyenne": 34805, + "rott": 34806, + "shoreline": 34807, + "grasso": 34808, + "masterchef": 34809, + "+)": 34810, + "vix": 34811, + "ellenshow": 34812, + "asg": 34813, + "anak": 34814, + "kuya": 34815, + "safarilive": 34816, + "debuting": 34817, + "blum": 34818, + "listener": 34819, + "vins": 34820, + "bookshelf": 34821, + "smartcities": 34822, + "makeyourownlane": 34823, + ";;": 34824, + "ðŁIJ¯": 34825, + "rizz": 34826, + "onward": 34827, + "bulldog": 34828, + "bearish": 34829, + "viruses": 34830, + "frigh": 34831, + "linden": 34832, + "weiser": 34833, + "snt": 34834, + "gona": 34835, + "dresden": 34836, + "flanders": 34837, + "cuk": 34838, + "wheeling": 34839, + "bau": 34840, + "atuesday": 34841, + "surfers": 34842, + "swift": 34843, + "mccall": 34844, + "arbitration": 34845, + "awd": 34846, + "monc": 34847, + "bine": 34848, + "atx": 34849, + "refr": 34850, + "miro": 34851, + "posey": 34852, + "nare": 34853, + "ritter": 34854, + "âģ¦": 34855, + "playbook": 34856, + "blowout": 34857, + "sportsmanship": 34858, + "soooooo": 34859, + "malayalam": 34860, + "grims": 34861, + "burbank": 34862, + "infinity": 34863, + "sargent": 34864, + "oitnb": 34865, + "josephine": 34866, + "skipping": 34867, + "parkin": 34868, + "excursion": 34869, + "seminars": 34870, + "johar": 34871, + "partridge": 34872, + "postgame": 34873, + "llll": 34874, + "blanche": 34875, + "tempting": 34876, + "mna": 34877, + "luka": 34878, + "isers": 34879, + "toffee": 34880, + "barron": 34881, + "hemmings": 34882, + "sae": 34883, + "gohawks": 34884, + "cupid": 34885, + "limbs": 34886, + "conse": 34887, + "uncommon": 34888, + "zada": 34889, + "headshot": 34890, + "soils": 34891, + "pioneer": 34892, + "mamma": 34893, + "semitic": 34894, + "pandey": 34895, + "jamiedornan": 34896, + "splits": 34897, + "vela": 34898, + "soni": 34899, + "raff": 34900, + "tmobile": 34901, + "âŀĸ": 34902, + "prawns": 34903, + "liter": 34904, + "enjoyment": 34905, + "eggplant": 34906, + "tub": 34907, + "cultural": 34908, + "usic": 34909, + "suspicion": 34910, + "sycam": 34911, + "summed": 34912, + "madu": 34913, + "hock": 34914, + "upwards": 34915, + "eyeing": 34916, + "rive": 34917, + "assassins": 34918, + "âĤ¬": 34919, + "outfy": 34920, + "chives": 34921, + "tner": 34922, + "lais": 34923, + "porridge": 34924, + "saddest": 34925, + "wcc": 34926, + "vicki": 34927, + "snails": 34928, + "bizitalk": 34929, + "millan": 34930, + "ðŁĮį": 34931, + "samoa": 34932, + "jing": 34933, + "mikey": 34934, + "guj": 34935, + "chelms": 34936, + "eligibility": 34937, + "armada": 34938, + "throp": 34939, + "surgeries": 34940, + "ãĤ¿": 34941, + "mohawk": 34942, + "exits": 34943, + "mem": 34944, + "islington": 34945, + "cme": 34946, + "landfill": 34947, + "kaitlyn": 34948, + "ðŁİ¼": 34949, + "combinations": 34950, + "tomorrowland": 34951, + "verb": 34952, + "cora": 34953, + "precisely": 34954, + "naom": 34955, + "ðŁĨķ": 34956, + "shrink": 34957, + "softly": 34958, + "mercede": 34959, + "mandel": 34960, + "poodle": 34961, + "ballerina": 34962, + "soph": 34963, + "juxta": 34964, + "yat": 34965, + "aryan": 34966, + "hesitate": 34967, + "lowered": 34968, + "gular": 34969, + "dungeonsand": 34970, + "ronan": 34971, + "myri": 34972, + "spf": 34973, + "menopau": 34974, + "grasp": 34975, + "pathi": 34976, + "feasi": 34977, + "flaw": 34978, + "shistory": 34979, + "steward": 34980, + "ggle": 34981, + "fayre": 34982, + "clique": 34983, + "credibility": 34984, + "yog": 34985, + "section": 34986, + "musko": 34987, + "seville": 34988, + "nott": 34989, + "calm": 34990, + "mateo": 34991, + "indicted": 34992, + "fiba": 34993, + "byl": 34994, + "lino": 34995, + "ukin": 34996, + "!!#": 34997, + "enigma": 34998, + "sirius": 34999, + "busc": 35000, + "ðŁįĬ": 35001, + "mackerel": 35002, + "psalms": 35003, + "aat": 35004, + "tomorrowspaper": 35005, + "ðŁĺĸ": 35006, + "pfc": 35007, + "...........": 35008, + "shrek": 35009, + "mullet": 35010, + "osh": 35011, + "dangerously": 35012, + "immensely": 35013, + "amur": 35014, + "ðŁįĤ": 35015, + "propor": 35016, + "sya": 35017, + "londonmarathon": 35018, + "above": 35019, + "obligatory": 35020, + "prov": 35021, + "racha": 35022, + "alexis": 35023, + "primary": 35024, + "shh": 35025, + "ethernet": 35026, + "dstv": 35027, + "cougar": 35028, + "unlucky": 35029, + "nil": 35030, + "steakhouse": 35031, + "mela": 35032, + "fcbayern": 35033, + "causeway": 35034, + "catherine": 35035, + "fluorescent": 35036, + "nxt": 35037, + "tokyo": 35038, + "ausp": 35039, + "relegation": 35040, + "quizz": 35041, + "shoreditch": 35042, + "proudtobe": 35043, + "promos": 35044, + "interacting": 35045, + "homebrew": 35046, + "daesh": 35047, + "wpg": 35048, + "steadily": 35049, + "provinces": 35050, + "ballots": 35051, + "iah": 35052, + "alto": 35053, + "<<<": 35054, + "youu": 35055, + "riley": 35056, + "preference": 35057, + "traverse": 35058, + "incense": 35059, + "ammunition": 35060, + "hodges": 35061, + "#@": 35062, + "hailstate": 35063, + "tartan": 35064, + "witchcraft": 35065, + "ventilation": 35066, + "libertarian": 35067, + "!â̦": 35068, + "owes": 35069, + "%!": 35070, + "ongchang": 35071, + "brushing": 35072, + "leic": 35073, + "fiber": 35074, + "underattack": 35075, + "download": 35076, + "expir": 35077, + "hyo": 35078, + "pompey": 35079, + "mcbride": 35080, + "yag": 35081, + "stree": 35082, + "combat": 35083, + "tending": 35084, + "aira": 35085, + "guggen": 35086, + "abra": 35087, + "inna": 35088, + "flips": 35089, + "awal": 35090, + "mach": 35091, + "dollar": 35092, + "inspirations": 35093, + "zum": 35094, + "odu": 35095, + "itty": 35096, + "videogame": 35097, + "aquaman": 35098, + "haru": 35099, + "belfast": 35100, + "jeb": 35101, + "butch": 35102, + "usgs": 35103, + "calculus": 35104, + "goyal": 35105, + "morgen": 35106, + "xfinity": 35107, + "standup": 35108, + "contracep": 35109, + "sabre": 35110, + "nabe": 35111, + "insecure": 35112, + "generously": 35113, + "epitome": 35114, + "lw": 35115, + "tca": 35116, + "narratives": 35117, + "donnell": 35118, + "pandas": 35119, + "bergh": 35120, + "tut": 35121, + "keral": 35122, + "felicity": 35123, + "brampton": 35124, + "quintet": 35125, + "nomore": 35126, + "ðŁĶij": 35127, + "loi": 35128, + "alhamdulil": 35129, + "ðŁĶ¥ðŁĶĹ": 35130, + "stoner": 35131, + "shawl": 35132, + "clinical": 35133, + "brendan": 35134, + "gone": 35135, + "flawed": 35136, + "trippy": 35137, + "jg": 35138, + "allocation": 35139, + "poaching": 35140, + "vevo": 35141, + "mocks": 35142, + "leftist": 35143, + "bonuses": 35144, + "condemned": 35145, + "ability": 35146, + "stating": 35147, + "microbiome": 35148, + "biologist": 35149, + "foryou": 35150, + "wahlberg": 35151, + "ssor": 35152, + "iftar": 35153, + "wul": 35154, + "ÑĦоÑĤ": 35155, + "pomer": 35156, + "meme": 35157, + "verte": 35158, + "trell": 35159, + "trait": 35160, + "inlet": 35161, + "hormones": 35162, + "deliberately": 35163, + "villar": 35164, + "battleship": 35165, + "pbl": 35166, + "twenti": 35167, + "hokies": 35168, + "dalail": 35169, + "saya": 35170, + "mayfair": 35171, + "hans": 35172, + "diets": 35173, + "⾨⾨": 35174, + "odin": 35175, + "hotspur": 35176, + "papi": 35177, + "kana": 35178, + "kamp": 35179, + "finna": 35180, + "flotus": 35181, + "tians": 35182, + "unicorns": 35183, + "tribeca": 35184, + "changers": 35185, + "foreground": 35186, + "outa": 35187, + "invaders": 35188, + "gettys": 35189, + "tomorrowspaperstoday": 35190, + "macmillan": 35191, + "handwritten": 35192, + "wfp": 35193, + "ude": 35194, + "stateof": 35195, + "based": 35196, + "âĺģï¸ı": 35197, + "casm": 35198, + "psyched": 35199, + "historians": 35200, + "fold": 35201, + "dda": 35202, + "aggrav": 35203, + "pans": 35204, + "greenway": 35205, + "ausv": 35206, + "ðŁĺ¶": 35207, + "shraddha": 35208, + "index": 35209, + "besti": 35210, + "zimmer": 35211, + "tness": 35212, + "eyeshadow": 35213, + "otte": 35214, + "gots": 35215, + "distributing": 35216, + "promin": 35217, + "yol": 35218, + "acea": 35219, + "tramrahim": 35220, + "hooper": 35221, + "supreme": 35222, + "jammin": 35223, + "intuitive": 35224, + "qualifications": 35225, + "slim": 35226, + "siddi": 35227, + "jayne": 35228, + "tripping": 35229, + "gtx": 35230, + "puns": 35231, + "emanuel": 35232, + "omg": 35233, + "midsummer": 35234, + "into": 35235, + "succulent": 35236, + "rien": 35237, + "newmexico": 35238, + "oor": 35239, + "hooking": 35240, + "inf": 35241, + "ð٤Ŀ": 35242, + "flirting": 35243, + "nahi": 35244, + "gfriend": 35245, + "tps": 35246, + "helix": 35247, + "zs": 35248, + "onie": 35249, + "ctf": 35250, + "kris": 35251, + "irresistible": 35252, + "flap": 35253, + "ðŁijıðŁı»ðŁijıðŁı»": 35254, + "uswnt": 35255, + "rud": 35256, + "ramps": 35257, + "pinoy": 35258, + "otw": 35259, + "lolz": 35260, + "lowering": 35261, + "favorite": 35262, + "tmc": 35263, + "phrases": 35264, + "hermi": 35265, + "averaging": 35266, + "embr": 35267, + "beno": 35268, + "estuary": 35269, + "sleeve": 35270, + "ribbons": 35271, + "tash": 35272, + "ู": 35273, + "xf": 35274, + "awgs": 35275, + "sunited": 35276, + "breweries": 35277, + "anirud": 35278, + "punches": 35279, + "oldie": 35280, + "ipads": 35281, + "wifey": 35282, + "landlords": 35283, + "dji": 35284, + "gunner": 35285, + "íķ´": 35286, + "texan": 35287, + "exop": 35288, + "cassandra": 35289, + "soff": 35290, + "ðŁļ«": 35291, + "ighton": 35292, + "bakers": 35293, + "awarenessweek": 35294, + "vall": 35295, + "earp": 35296, + "btsbbmas": 35297, + "apologizes": 35298, + "âļĵï¸ı": 35299, + "wasps": 35300, + "statesman": 35301, + "snatch": 35302, + "watchdog": 35303, + "rafi": 35304, + "afterparty": 35305, + "spike": 35306, + "jer": 35307, + "periph": 35308, + "rnc": 35309, + "mull": 35310, + "leen": 35311, + "shies": 35312, + "lieu": 35313, + "urstrulymahesh": 35314, + "merton": 35315, + "desai": 35316, + "shif": 35317, + "ðŁĮ±": 35318, + "pedic": 35319, + "gosling": 35320, + "arranging": 35321, + "wwg": 35322, + "geny": 35323, + "youuu": 35324, + "netflix": 35325, + "ettes": 35326, + "kwi": 35327, + "bernardino": 35328, + "amiga": 35329, + "ب": 35330, + "kashmiri": 35331, + "tings": 35332, + "emeritus": 35333, + "decat": 35334, + "abdomin": 35335, + "dci": 35336, + "phases": 35337, + "djan": 35338, + "beam": 35339, + "opry": 35340, + "ished": 35341, + "theellenshow": 35342, + "thest": 35343, + "habitats": 35344, + "toons": 35345, + "mclaughlin": 35346, + "ripper": 35347, + "microbiology": 35348, + "talaga": 35349, + "clueless": 35350, + "ssu": 35351, + "croche": 35352, + "bromance": 35353, + "longevity": 35354, + "zagreb": 35355, + "prevented": 35356, + "trave": 35357, + "spoilt": 35358, + "darryl": 35359, + "migraine": 35360, + "alcat": 35361, + "dddd": 35362, + "viv": 35363, + "serpent": 35364, + "mattel": 35365, + "jama": 35366, + "conquest": 35367, + "îĦ": 35368, + "samsung": 35369, + "presbyterian": 35370, + "ketch": 35371, + "firefox": 35372, + "motif": 35373, + "lec": 35374, + "chopping": 35375, + "cherno": 35376, + "jann": 35377, + "ðŁIJ°": 35378, + "prolon": 35379, + "wakeup": 35380, + "convergence": 35381, + "merseyside": 35382, + "heartbroken": 35383, + "looming": 35384, + "hallucin": 35385, + "maize": 35386, + "communism": 35387, + "moh": 35388, + "twitterstorians": 35389, + "sergey": 35390, + "reseller": 35391, + "favorable": 35392, + "edgy": 35393, + "reiter": 35394, + "malaga": 35395, + "liveme": 35396, + "kahn": 35397, + "pulsion": 35398, + "bigg": 35399, + "kimkardashian": 35400, + "atio": 35401, + "tyranny": 35402, + "ruption": 35403, + "qant": 35404, + "proven": 35405, + "byz": 35406, + "pushaw": 35407, + "kristin": 35408, + "eer": 35409, + "tardis": 35410, + "riz": 35411, + "awaken": 35412, + "miko": 35413, + "undocumented": 35414, + "pathfinder": 35415, + "indirect": 35416, + "resembles": 35417, + "hler": 35418, + "concealed": 35419, + "scandal": 35420, + "reim": 35421, + "dnb": 35422, + "critters": 35423, + "attendant": 35424, + "apprenticeships": 35425, + "aau": 35426, + "screamed": 35427, + "lsu": 35428, + "fah": 35429, + "harbour": 35430, + "edd": 35431, + "batsman": 35432, + "liss": 35433, + "misha": 35434, + "spaniel": 35435, + "itf": 35436, + "advancement": 35437, + "fac": 35438, + "closeup": 35439, + "cecilia": 35440, + "medic": 35441, + "narcissi": 35442, + "lavish": 35443, + "giac": 35444, + "mays": 35445, + "leit": 35446, + "winewednesday": 35447, + "pushaward": 35448, + "letto": 35449, + "currents": 35450, + "bugatti": 35451, + "outine": 35452, + "wj": 35453, + "undo": 35454, + "lerosis": 35455, + "devotional": 35456, + "ðŁij«": 35457, + "onna": 35458, + "faisal": 35459, + "sauna": 35460, + "himachal": 35461, + "amii": 35462, + "à®®": 35463, + "dizzy": 35464, + "screenwriting": 35465, + "phx": 35466, + "spn": 35467, + "icki": 35468, + "agirl": 35469, + "fishes": 35470, + "wbz": 35471, + "pim": 35472, + "boar": 35473, + "acid": 35474, + "!..": 35475, + "rockefeller": 35476, + "nga": 35477, + "drastically": 35478, + "simplify": 35479, + "drumming": 35480, + "autumnal": 35481, + "gurmee": 35482, + "lorde": 35483, + "joann": 35484, + "giveup": 35485, + "bour": 35486, + "amura": 35487, + "derland": 35488, + "simpler": 35489, + "watson": 35490, + "trident": 35491, + "concordia": 35492, + "bellum": 35493, + "brek": 35494, + "dumplings": 35495, + "vion": 35496, + "dungeonsanddragons": 35497, + "spri": 35498, + "ascension": 35499, + "wildatlantic": 35500, + "ust": 35501, + "robins": 35502, + "legion": 35503, + "insist": 35504, + "jaro": 35505, + "guess": 35506, + "sob": 35507, + "bighit": 35508, + "poolside": 35509, + "negotiating": 35510, + "mcgill": 35511, + "bild": 35512, + "technicians": 35513, + "mitigation": 35514, + "ajaydevgn": 35515, + "bto": 35516, + "anten": 35517, + "cosmopolitan": 35518, + "ðŁĺĬðŁĺĬðŁĺĬðŁĺĬ": 35519, + "patrioti": 35520, + "temper": 35521, + "promenade": 35522, + "navajo": 35523, + "namm": 35524, + "wrinkles": 35525, + "dcfc": 35526, + "leach": 35527, + "brunette": 35528, + "rf": 35529, + "coutinho": 35530, + "alti": 35531, + "traditionally": 35532, + "optome": 35533, + "naz": 35534, + "accordingly": 35535, + "recard": 35536, + "deets": 35537, + "swell": 35538, + "posure": 35539, + "whitening": 35540, + "stranger": 35541, + "illion": 35542, + "hereford": 35543, + "uwu": 35544, + "robber": 35545, + "cotswolds": 35546, + "clen": 35547, + "gorge": 35548, + "namaste": 35549, + "relish": 35550, + "griff": 35551, + "adrenaline": 35552, + "blasio": 35553, + "vale": 35554, + "ê²": 35555, + "tolerate": 35556, + "railminindia": 35557, + "jensen": 35558, + "hoven": 35559, + "ellu": 35560, + "obsole": 35561, + "eisenhower": 35562, + "unidentified": 35563, + "thanniversary": 35564, + "bodyguard": 35565, + "د": 35566, + "idge": 35567, + "schal": 35568, + "stockport": 35569, + "sni": 35570, + "retaining": 35571, + "popo": 35572, + "pixie": 35573, + "olithic": 35574, + "kier": 35575, + "hajj": 35576, + "saz": 35577, + "corbin": 35578, + "!!!!!!!!!!": 35579, + "vit": 35580, + "megat": 35581, + "deh": 35582, + "circuit": 35583, + "affleck": 35584, + "theoretical": 35585, + "hopeless": 35586, + "uab": 35587, + "slump": 35588, + "bice": 35589, + "jammed": 35590, + "letstalk": 35591, + "cani": 35592, + "sideways": 35593, + "labyrinth": 35594, + "refs": 35595, + "hahn": 35596, + "jared": 35597, + "ðŁį¹": 35598, + "jambo": 35599, + "phyl": 35600, + "enhancement": 35601, + "ctr": 35602, + "fullest": 35603, + "seye": 35604, + "doba": 35605, + "choic": 35606, + "yos": 35607, + "cbj": 35608, + "andré": 35609, + "rewatch": 35610, + "prima": 35611, + "doctrine": 35612, + "forgets": 35613, + "uhm": 35614, + "around": 35615, + "ule": 35616, + "artlovers": 35617, + "shiraz": 35618, + "harth": 35619, + "extor": 35620, + "Å¡": 35621, + "unexpectedly": 35622, + "elius": 35623, + "yx": 35624, + "emmy": 35625, + "seac": 35626, + "ðŁijĩðŁijĩðŁijĩ": 35627, + "corrected": 35628, + "combu": 35629, + "womanc": 35630, + "cough": 35631, + "whatson": 35632, + "publishes": 35633, + "diversity": 35634, + "backbone": 35635, + "lockdown": 35636, + "mesmerizing": 35637, + "norte": 35638, + "mab": 35639, + "designer": 35640, + "íģ": 35641, + "ragh": 35642, + "molecules": 35643, + "getoutside": 35644, + "thebeatles": 35645, + "semiconduc": 35646, + "nacho": 35647, + "lunes": 35648, + "hammers": 35649, + "sultan": 35650, + "oon": 35651, + "feren": 35652, + "attach": 35653, + "arqu": 35654, + "uttarakhand": 35655, + "sash": 35656, + ";-": 35657, + "tread": 35658, + "iko": 35659, + "arthur": 35660, + "scandinavian": 35661, + "ration": 35662, + "gael": 35663, + "chargeable": 35664, + "fishy": 35665, + "vma": 35666, + "handbags": 35667, + "chara": 35668, + "ayne": 35669, + "defam": 35670, + "settlers": 35671, + "qadri": 35672, + "palais": 35673, + "inwx": 35674, + "apocalyptic": 35675, + "pooja": 35676, + "aes": 35677, + "atories": 35678, + "proofing": 35679, + "nlp": 35680, + "tsla": 35681, + "vina": 35682, + "lido": 35683, + "deephouse": 35684, + "informatics": 35685, + "vv": 35686, + "ppings": 35687, + "diss": 35688, + "ï": 35689, + "uhuru": 35690, + "stony": 35691, + "betrayed": 35692, + "baff": 35693, + "myra": 35694, + "aspen": 35695, + "allowance": 35696, + "tamara": 35697, + "cif": 35698, + "corbett": 35699, + "serge": 35700, + "digo": 35701, + "ambigu": 35702, + "painters": 35703, + "pcr": 35704, + "pca": 35705, + "noms": 35706, + "loft": 35707, + "vee": 35708, + "opendata": 35709, + "ðŁIJ±": 35710, + "alexandre": 35711, + "identifies": 35712, + "fantasyfootball": 35713, + "reproduction": 35714, + "bromley": 35715, + "wareagle": 35716, + "mmer": 35717, + "pss": 35718, + "cues": 35719, + "ayat": 35720, + "hutchinson": 35721, + "sarac": 35722, + "jackman": 35723, + "irah": 35724, + "apink": 35725, + "cols": 35726, + "aussies": 35727, + "execs": 35728, + "dayton": 35729, + "ðŁĻĨ": 35730, + "imv": 35731, + "haram": 35732, + "chuckle": 35733, + "authenticity": 35734, + "ardo": 35735, + "incubator": 35736, + "ส": 35737, + "photoshopped": 35738, + "embraced": 35739, + "fightfor": 35740, + "gorman": 35741, + "zzzz": 35742, + "scholastic": 35743, + "crisps": 35744, + "teapo": 35745, + "midnight": 35746, + "gaine": 35747, + "collier": 35748, + "sate": 35749, + "dette": 35750, + "åŃ": 35751, + "imagine": 35752, + "iff": 35753, + "twili": 35754, + "ification": 35755, + "teatro": 35756, + "norma": 35757, + "esur": 35758, + "emergencies": 35759, + "riseup": 35760, + "ringer": 35761, + "hassle": 35762, + "caitlyn": 35763, + "tranquil": 35764, + "versa": 35765, + "seb": 35766, + "overlook": 35767, + "gini": 35768, + "bogo": 35769, + "sere": 35770, + "mayne": 35771, + "henrik": 35772, + "contaminated": 35773, + "rhapsody": 35774, + "proportion": 35775, + "wildatlanticway": 35776, + "âģ©.": 35777, + "organisers": 35778, + "trane": 35779, + "standard": 35780, + "sperm": 35781, + "launcher": 35782, + "ricci": 35783, + "herts": 35784, + "paperwork": 35785, + "showcased": 35786, + "meryl": 35787, + "pena": 35788, + "pimp": 35789, + "disastrous": 35790, + "^.^": 35791, + "phara": 35792, + "xis": 35793, + "frontal": 35794, + "swirl": 35795, + "spills": 35796, + "swagger": 35797, + "smartwatch": 35798, + "sizzling": 35799, + "saviour": 35800, + "catar": 35801, + "bbcr": 35802, + "refurbishment": 35803, + "dris": 35804, + "citroen": 35805, + "absorb": 35806, + "patriotism": 35807, + "illeg": 35808, + "chromo": 35809, + "freshers": 35810, + "rus": 35811, + "limiting": 35812, + "efish": 35813, + "downed": 35814, + "mandir": 35815, + "hazelnut": 35816, + "pall": 35817, + "macon": 35818, + "disappearing": 35819, + "qualifies": 35820, + "boon": 35821, + "barracks": 35822, + "amine": 35823, + "gendere": 35824, + "ðŁļĺ": 35825, + "jes": 35826, + "ãĥŃ": 35827, + "quito": 35828, + "middleweight": 35829, + "schau": 35830, + "quadru": 35831, + "aciones": 35832, + "limitless": 35833, + "ðŁijĮðŁı½": 35834, + "chman": 35835, + "arav": 35836, + "regulators": 35837, + "itup": 35838, + "battersea": 35839, + "milford": 35840, + "gz": 35841, + "ticking": 35842, + "ghou": 35843, + "crushes": 35844, + "tutu": 35845, + "dreadful": 35846, + "famine": 35847, + "forchange": 35848, + "dalailama": 35849, + "ðŁĴį": 35850, + "whitaker": 35851, + "hashmi": 35852, + "hus": 35853, + "vod": 35854, + "bette": 35855, + "aaah": 35856, + "isoo": 35857, + "ðŁ¥Ī": 35858, + "haar": 35859, + "laine": 35860, + "bv": 35861, + "allday": 35862, + "sprout": 35863, + "indiegames": 35864, + "freebie": 35865, + "greeks": 35866, + "butler": 35867, + "illin": 35868, + "haal": 35869, + "wareness": 35870, + "sima": 35871, + "publichealth": 35872, + "gama": 35873, + "waa": 35874, + "oung": 35875, + "goooo": 35876, + "okinawa": 35877, + "offenders": 35878, + "impose": 35879, + "hoc": 35880, + "youngster": 35881, + "storyteller": 35882, + "scap": 35883, + "fighter": 35884, + "+,": 35885, + "whites": 35886, + "musicmonday": 35887, + "reza": 35888, + "goducks": 35889, + "bria": 35890, + "mium": 35891, + "casper": 35892, + "crumbs": 35893, + "aad": 35894, + "martialarts": 35895, + "chp": 35896, + "rigged": 35897, + "tng": 35898, + "harvested": 35899, + "sak": 35900, + "dojo": 35901, + "millwall": 35902, + "bnw": 35903, + "ocd": 35904, + "historyof": 35905, + "tmr": 35906, + "sirens": 35907, + "fanci": 35908, + "caregivers": 35909, + "vira": 35910, + "soni": 35911, + "recurring": 35912, + "acknowledged": 35913, + "ðŁıŁ": 35914, + "ophile": 35915, + "bucky": 35916, + "stressing": 35917, + "rook": 35918, + "digger": 35919, + "vival": 35920, + "sando": 35921, + "fleet": 35922, + "siers": 35923, + "selcaday": 35924, + "refreshed": 35925, + "antifa": 35926, + "aque": 35927, + "polo": 35928, + "disappearance": 35929, + "demb": 35930, + "âĮļï¸ı": 35931, + "rented": 35932, + "berger": 35933, + "gmb": 35934, + "cula": 35935, + "ssal": 35936, + "goody": 35937, + "uhh": 35938, + "marcelo": 35939, + "wanna": 35940, + "software": 35941, + "shopsmall": 35942, + "turtle": 35943, + "tomas": 35944, + "frisco": 35945, + "ðŁĺįðŁĴķ": 35946, + "jimenez": 35947, + "csu": 35948, + "dayz": 35949, + "ando": 35950, + "wynne": 35951, + "choreographer": 35952, + "cervical": 35953, + "trailblazers": 35954, + "edg": 35955, + "zendaya": 35956, + "travelblog": 35957, + "els": 35958, + "wholesome": 35959, + "cog": 35960, + "labout": 35961, + "arney": 35962, + "delle": 35963, + "suisse": 35964, + "masi": 35965, + "inese": 35966, + "ombe": 35967, + "fiddle": 35968, + "reclaim": 35969, + "pau": 35970, + "watcher": 35971, + "slain": 35972, + "berty": 35973, + "optimum": 35974, + "elites": 35975, + "minis": 35976, + "turkey": 35977, + "patrols": 35978, + "gerard": 35979, + "aureli": 35980, + "wildly": 35981, + "waltz": 35982, + "brgy": 35983, + "wob": 35984, + "crest": 35985, + "+++": 35986, + "vez": 35987, + "frosted": 35988, + "davido": 35989, + "thex": 35990, + "paramedics": 35991, + "pinto": 35992, + "hank": 35993, + "dupont": 35994, + "urg": 35995, + "fostering": 35996, + "micropoetry": 35997, + "spectre": 35998, + "---->": 35999, + "neuro": 36000, + "frida": 36001, + "musical": 36002, + "galveston": 36003, + "effic": 36004, + "scape": 36005, + "palazzo": 36006, + "thall": 36007, + "provisional": 36008, + "pjs": 36009, + "aure": 36010, + "ðŁĶľ": 36011, + "mamamoo": 36012, + "kitties": 36013, + "cree": 36014, + "wak": 36015, + "loool": 36016, + "lupus": 36017, + "cnblue": 36018, + "ú": 36019, + "ðŁİ¬": 36020, + "raced": 36021, + "trose": 36022, + "omas": 36023, + "stride": 36024, + "coors": 36025, + "⤵ï¸ı": 36026, + "incomparable": 36027, + "cyril": 36028, + "broader": 36029, + "areclipse": 36030, + "ðŁįĶ": 36031, + "interval": 36032, + "tiru": 36033, + "coworking": 36034, + "waco": 36035, + "aham": 36036, + "abee": 36037, + "flourish": 36038, + "thetimes": 36039, + "olini": 36040, + "kickboxing": 36041, + "lucer": 36042, + "atla": 36043, + "asun": 36044, + "casserole": 36045, + "miaw": 36046, + "lobbying": 36047, + "janice": 36048, + "cirque": 36049, + "reflex": 36050, + "leary": 36051, + "sanatomy": 36052, + "tempest": 36053, + "semb": 36054, + "murdering": 36055, + "usav": 36056, + "robo": 36057, + "onet": 36058, + "pcc": 36059, + "natives": 36060, + "lifeof": 36061, + "saha": 36062, + "ruthless": 36063, + "relates": 36064, + "appetizer": 36065, + "pyeongchang": 36066, + "nord": 36067, + "eru": 36068, + "athing": 36069, + "ugly": 36070, + "plying": 36071, + "brance": 36072, + "organise": 36073, + "kendra": 36074, + "dato": 36075, + "cheeses": 36076, + "parma": 36077, + "burnout": 36078, + "astra": 36079, + "pretoria": 36080, + "adjustment": 36081, + "uku": 36082, + "slo": 36083, + "liken": 36084, + "favors": 36085, + "clive": 36086, + "beets": 36087, + "snowdonia": 36088, + "gotv": 36089, + "syn": 36090, + "openhouse": 36091, + "pani": 36092, + "portrayed": 36093, + "slated": 36094, + "mecca": 36095, + "renal": 36096, + "supportsmallstreamers": 36097, + "staffs": 36098, + "dao": 36099, + "biker": 36100, + "viktor": 36101, + "titus": 36102, + "admired": 36103, + "ðŁĵ±": 36104, + "hurrican": 36105, + "heats": 36106, + "glory": 36107, + "photogenic": 36108, + "meri": 36109, + "depor": 36110, + "burnham": 36111, + "orangu": 36112, + "djing": 36113, + "impressionism": 36114, + "ignition": 36115, + "cai": 36116, + "wynn": 36117, + "depe": 36118, + "coveted": 36119, + "collagen": 36120, + "saus": 36121, + "ornam": 36122, + "administrators": 36123, + "sson": 36124, + "nhpolitics": 36125, + "hahahahahahahaha": 36126, + "aspirations": 36127, + "rgb": 36128, + "swollen": 36129, + "sowe": 36130, + "scr": 36131, + "divergent": 36132, + "houghton": 36133, + "hanoi": 36134, + "dory": 36135, + "niki": 36136, + "landry": 36137, + "bcci": 36138, + "ðŁijĮðŁijĮ": 36139, + "ismail": 36140, + "tripod": 36141, + "herd": 36142, + "bhatt": 36143, + "dressage": 36144, + "tabby": 36145, + "inguish": 36146, + "huron": 36147, + "à³į": 36148, + "Ãł": 36149, + "todas": 36150, + "evangelical": 36151, + "chords": 36152, + "stjohn": 36153, + "sloppy": 36154, + "martyr": 36155, + "facebook": 36156, + "alight": 36157, + "sensei": 36158, + "kathniel": 36159, + "rites": 36160, + "zione": 36161, + "uo": 36162, + "revelations": 36163, + "weightlifting": 36164, + "pano": 36165, + "ncwx": 36166, + "acton": 36167, + "à®ķ": 36168, + "ز": 36169, + "soma": 36170, + "à¸Ĺ": 36171, + "respecting": 36172, + "marche": 36173, + "foreman": 36174, + "betty": 36175, + "kik": 36176, + "shibu": 36177, + "poon": 36178, + "argyle": 36179, + "kswx": 36180, + "etz": 36181, + "marbella": 36182, + "brackets": 36183, + "standby": 36184, + "fireside": 36185, + "defiance": 36186, + "vex": 36187, + "britannia": 36188, + "inhabit": 36189, + "appoint": 36190, + "piyush": 36191, + "leash": 36192, + "sciento": 36193, + "flask": 36194, + "senna": 36195, + ">:": 36196, + "atroc": 36197, + "sanderson": 36198, + "idlib": 36199, + "dhanush": 36200, + "ðŁĺĻ": 36201, + "enthr": 36202, + "hitch": 36203, + "dedly": 36204, + "alley": 36205, + "dork": 36206, + "mondo": 36207, + "cuddly": 36208, + "missin": 36209, + "yesss": 36210, + "nighting": 36211, + "jpn": 36212, + "wary": 36213, + "umpire": 36214, + "maz": 36215, + "ê³": 36216, + "babs": 36217, + "ĭãģ": 36218, + "stanford": 36219, + "possessed": 36220, + "exceeded": 36221, + "ðŁĶ¶": 36222, + "wallart": 36223, + "trap": 36224, + "jil": 36225, + "hibis": 36226, + "spying": 36227, + "scribe": 36228, + "khalil": 36229, + "translator": 36230, + "lumb": 36231, + "dized": 36232, + "chc": 36233, + "supervision": 36234, + "shutter": 36235, + "jag": 36236, + "_*": 36237, + "yesterdays": 36238, + "msf": 36239, + "hihi": 36240, + "gonzaga": 36241, + "gillespie": 36242, + "vivek": 36243, + "ecstatic": 36244, + "thismorning": 36245, + "chus": 36246, + "edes": 36247, + "stoned": 36248, + "bees": 36249, + "ðŁĩ¹ðŁĩ": 36250, + "turin": 36251, + "hover": 36252, + "atrics": 36253, + "stern": 36254, + "samheughan": 36255, + "autism": 36256, + "miya": 36257, + "eyewitness": 36258, + "writings": 36259, + "traveltips": 36260, + "chutney": 36261, + "pxrtg": 36262, + "kenyans": 36263, + "mystic": 36264, + "krit": 36265, + "/$": 36266, + "redhead": 36267, + "worldly": 36268, + "amus": 36269, + "opla": 36270, + "leve": 36271, + "gabbana": 36272, + "seen": 36273, + "oclock": 36274, + "ganga": 36275, + "keenan": 36276, + "scent": 36277, + "oldies": 36278, + "gogreen": 36279, + "cornerstone": 36280, + "comply": 36281, + "concours": 36282, + "ðŁİ¶ðŁİ¶": 36283, + "haan": 36284, + "confis": 36285, + "awson": 36286, + "cleop": 36287, + "îĢ": 36288, + "suzu": 36289, + "sauté": 36290, + "algar": 36291, + "subscriber": 36292, + "esteemed": 36293, + "ãĤ¤ãĥ": 36294, + "worthwhile": 36295, + "melrose": 36296, + "flock": 36297, + "brightly": 36298, + "violinist": 36299, + "pere": 36300, + "slipping": 36301, + "andco": 36302, + "sigh": 36303, + "havan": 36304, + "culo": 36305, + "msa": 36306, + "fibrosis": 36307, + "matilda": 36308, + "rafting": 36309, + "award": 36310, + "ëª": 36311, + "mmmm": 36312, + "geaux": 36313, + "steiner": 36314, + "sinn": 36315, + "helpers": 36316, + "beetles": 36317, + "aimee": 36318, + "taiwan": 36319, + "pistachio": 36320, + "macbeth": 36321, + "mzan": 36322, + "descendants": 36323, + "onsale": 36324, + "inr": 36325, + "ilm": 36326, + "grouse": 36327, + "saig": 36328, + "mow": 36329, + "bigre": 36330, + "adjustments": 36331, + "tula": 36332, + "mathew": 36333, + "translates": 36334, + "muh": 36335, + "bollah": 36336, + "ðŁĴĽðŁĴĻ": 36337, + "amores": 36338, + "abouts": 36339, + "bombshell": 36340, + "blaster": 36341, + "xavi": 36342, + "sns": 36343, + "kroger": 36344, + "gather": 36345, + "eradic": 36346, + "daft": 36347, + "chemo": 36348, + "benches": 36349, + "ðŁĩ©ðŁĩ": 36350, + "utv": 36351, + "oura": 36352, + "nko": 36353, + "gatorade": 36354, + "biafra": 36355, + "okstate": 36356, + "imdanielpadilla": 36357, + "domains": 36358, + "openingday": 36359, + "kiddo": 36360, + "doi": 36361, + "rice": 36362, + "daycare": 36363, + "macmillan": 36364, + "bathurst": 36365, + "cheerleading": 36366, + "ð٦ģ": 36367, + "cashback": 36368, + "kwon": 36369, + "hobbies": 36370, + "exempl": 36371, + "riesling": 36372, + "âļª": 36373, + "agles": 36374, + "nys": 36375, + "everything": 36376, + "navis": 36377, + "addi": 36378, + "magnesium": 36379, + "facelift": 36380, + "arkham": 36381, + "grandes": 36382, + "extremist": 36383, + "donat": 36384, + "vitality": 36385, + "pumpkin": 36386, + "betta": 36387, + "sltd": 36388, + "artisan": 36389, + "liby": 36390, + "peaked": 36391, + "ahhhhh": 36392, + "maryam": 36393, + "assim": 36394, + "unsc": 36395, + "mente": 36396, + "alaya": 36397, + "lowers": 36398, + "aras": 36399, + "griev": 36400, + "leip": 36401, + "grati": 36402, + "crises": 36403, + "sprints": 36404, + "execute": 36405, + "wto": 36406, + "msd": 36407, + "magical": 36408, + "reviewer": 36409, + "sparkles": 36410, + "jukebox": 36411, + "ðŁĺĤâĿ¤ï¸ı": 36412, + "payback": 36413, + "licenses": 36414, + "dunkin": 36415, + "belt": 36416, + "lakewood": 36417, + "hateful": 36418, + "budgets": 36419, + "revamped": 36420, + "pherson": 36421, + "kyiv": 36422, + "wentworth": 36423, + "rosen": 36424, + "cruise": 36425, + "giggle": 36426, + "defstar": 36427, + "assassinscre": 36428, + "ymouth": 36429, + "winkle": 36430, + "wfc": 36431, + "bandwagon": 36432, + "bkk": 36433, + "wiring": 36434, + "kearney": 36435, + "southside": 36436, + "petit": 36437, + "!ðŁĺį": 36438, + "nordic": 36439, + "mirza": 36440, + "mugabe": 36441, + "vl": 36442, + "scones": 36443, + "ktv": 36444, + "sandal": 36445, + "duc": 36446, + "malls": 36447, + "ðŁĴŀðŁĴŀ": 36448, + "itc": 36449, + "alay": 36450, + "impair": 36451, + "unrest": 36452, + "floss": 36453, + "cé": 36454, + "abou": 36455, + "varying": 36456, + "museo": 36457, + "server": 36458, + "diya": 36459, + "hibiscus": 36460, + "eroy": 36461, + "merritt": 36462, + "findom": 36463, + "fpp": 36464, + "unusually": 36465, + "gott": 36466, + "contingent": 36467, + "aliaa": 36468, + "ballon": 36469, + "jol": 36470, + "hiked": 36471, + "zyme": 36472, + "ayr": 36473, + "agn": 36474, + "gaz": 36475, + "periodic": 36476, + "sparty": 36477, + "practising": 36478, + "linton": 36479, + "talis": 36480, + "cypri": 36481, + "womaninbiz": 36482, + "radiodisney": 36483, + "ðŁĮ¼": 36484, + "jumpers": 36485, + "endocr": 36486, + "ðŁļ¨ðŁļ¨": 36487, + "andon": 36488, + "sharapo": 36489, + "mier": 36490, + "masonic": 36491, + "factories": 36492, + "vien": 36493, + "bbers": 36494, + "ìĽIJ": 36495, + "hold": 36496, + "kebab": 36497, + "beak": 36498, + "approached": 36499, + "acmilan": 36500, + "munro": 36501, + "kosher": 36502, + "excellency": 36503, + "negotiation": 36504, + "waltdisneyworld": 36505, + "crouch": 36506, + "teasing": 36507, + "suppression": 36508, + "enya": 36509, + "bce": 36510, + "transformationtuesday": 36511, + "callie": 36512, + "viswas": 36513, + "pgat": 36514, + "icted": 36515, + "endings": 36516, + "escu": 36517, + "recruited": 36518, + "itfc": 36519, + "collaborations": 36520, + "gino": 36521, + "snuck": 36522, + "auschwitz": 36523, + "ifc": 36524, + "xii": 36525, + "kesha": 36526, + "gervais": 36527, + "cloak": 36528, + "xl": 36529, + "saad": 36530, + "probation": 36531, + "precau": 36532, + "macin": 36533, + "anastasi": 36534, + "lek": 36535, + "eazy": 36536, + "daysofcode": 36537, + "mariahcarey": 36538, + "yog": 36539, + "stitched": 36540, + "boyfriends": 36541, + "shar": 36542, + "phile": 36543, + "agu": 36544, + "twinkle": 36545, + "phishing": 36546, + "weekender": 36547, + "icton": 36548, + "gurmeetramrahim": 36549, + "alton": 36550, + "leness": 36551, + "allan": 36552, + "penultimate": 36553, + "krystal": 36554, + "gou": 36555, + "lande": 36556, + "dismant": 36557, + "abusing": 36558, + "norse": 36559, + "paterson": 36560, + "edmun": 36561, + "apan": 36562, + "xiumin": 36563, + "skel": 36564, + "catwalk": 36565, + "react": 36566, + "walled": 36567, + "tangle": 36568, + "bryn": 36569, + "veto": 36570, + "supermoon": 36571, + "casablanc": 36572, + "appreciates": 36573, + "skid": 36574, + "both": 36575, + "catalina": 36576, + "eleague": 36577, + "cybermonday": 36578, + "cautious": 36579, + "ð٤ĵ": 36580, + "novo": 36581, + "hampton": 36582, + "haye": 36583, + "josef": 36584, + "varan": 36585, + "lobos": 36586, + "roanoke": 36587, + "orphans": 36588, + "ttin": 36589, + "squads": 36590, + "ishqbaaaz": 36591, + "blackpanther": 36592, + "etu": 36593, + "ksh": 36594, + "crumble": 36595, + "cessna": 36596, + "relieved": 36597, + "scully": 36598, + "pollinators": 36599, + "explorecanada": 36600, + "kies": 36601, + "kamloops": 36602, + "kiran": 36603, + "primal": 36604, + "settlements": 36605, + "hotspot": 36606, + "brainstorming": 36607, + "cedric": 36608, + "biennial": 36609, + "shant": 36610, + "âĻ¡âĻ¡âĻ¡": 36611, + "doon": 36612, + "hearn": 36613, + "walkway": 36614, + "fem": 36615, + "veal": 36616, + "deportation": 36617, + "toxins": 36618, + "eliminating": 36619, + "descending": 36620, + "bythe": 36621, + "blasphe": 36622, + "hasta": 36623, + "complement": 36624, + "ascent": 36625, + "riga": 36626, + "provost": 36627, + "âĸª": 36628, + "weeping": 36629, + "antisemitism": 36630, + "employee": 36631, + "unearthed": 36632, + "pino": 36633, + "natalie": 36634, + "blad": 36635, + "angola": 36636, + "lockheed": 36637, + "inian": 36638, + "agr": 36639, + "nister": 36640, + "impala": 36641, + "mke": 36642, + "fanatic": 36643, + "âĺħâĺħ": 36644, + "ðŁij¸": 36645, + "luch": 36646, + "simplified": 36647, + "gallery": 36648, + "economic": 36649, + "cyborg": 36650, + "coni": 36651, + "selma": 36652, + "inception": 36653, + "koala": 36654, + "dvds": 36655, + "crested": 36656, + "mmor": 36657, + "visible": 36658, + "nsd": 36659, + "ðŁĻĮðŁı½": 36660, + "wunder": 36661, + "refrigerator": 36662, + "reopening": 36663, + "eera": 36664, + "carousel": 36665, + "asp": 36666, + "ballistic": 36667, + "victory": 36668, + "motive": 36669, + "trey": 36670, + "sharapova": 36671, + "sii": 36672, + "monter": 36673, + "intend": 36674, + "westchester": 36675, + "spe": 36676, + "cymb": 36677, + "vidal": 36678, + "llama": 36679, + "univ": 36680, + "finer": 36681, + "craftsmanship": 36682, + "jazzfest": 36683, + "bch": 36684, + "aggio": 36685, + "ncc": 36686, + "lambda": 36687, + "tranquility": 36688, + "cisco": 36689, + "baden": 36690, + "sobbing": 36691, + "ofi": 36692, + "gota": 36693, + "rumored": 36694, + "warmed": 36695, + "orean": 36696, + "acton": 36697, + "marci": 36698, + "ghani": 36699, + "âľĵ": 36700, + "assorted": 36701, + "pembroke": 36702, + "penelope": 36703, + "daf": 36704, + "atty": 36705, + "aimo": 36706, + "pretzel": 36707, + "carnival": 36708, + "thanos": 36709, + "kochi": 36710, + "mersal": 36711, + "hamradio": 36712, + "artwit": 36713, + "casc": 36714, + "guerrilla": 36715, + "kushner": 36716, + "kapp": 36717, + "alise": 36718, + "toddlers": 36719, + "stewardship": 36720, + "otti": 36721, + "terri": 36722, + "tempe": 36723, + "restless": 36724, + "vito": 36725, + "zayed": 36726, + "rspb": 36727, + "pion": 36728, + "hippo": 36729, + "hawthorne": 36730, + "inas": 36731, + "amily": 36732, + "nutcracker": 36733, + "lop": 36734, + "dali": 36735, + "tropic": 36736, + "ðŁ¤ł": 36737, + "ulo": 36738, + "jaredle": 36739, + "pyrene": 36740, + "paleo": 36741, + "usair": 36742, + "mould": 36743, + "itated": 36744, + "genetically": 36745, + "biomass": 36746, + "ðŁĩ³ðŁĩ±": 36747, + "dodd": 36748, + "practiced": 36749, + "monarchs": 36750, + "unmanned": 36751, + "mbuhari": 36752, + "amal": 36753, + "photogra": 36754, + "kool": 36755, + "brendon": 36756, + "juices": 36757, + "cure": 36758, + "worldbank": 36759, + "pointers": 36760, + "ðŁĴĿ": 36761, + "turf": 36762, + "leds": 36763, + "borussia": 36764, + "baptism": 36765, + "warwickshire": 36766, + "mounts": 36767, + "gayo": 36768, + "begg": 36769, + "copied": 36770, + "asians": 36771, + "kg": 36772, + "modernist": 36773, + "gid": 36774, + "frontman": 36775, + "concentrated": 36776, + "yt": 36777, + "scavenger": 36778, + "ironically": 36779, + "adic": 36780, + "psn": 36781, + "ðŁ¥ī": 36782, + "culturally": 36783, + "yuv": 36784, + "macarthur": 36785, + "fertilizer": 36786, + "bewithyou": 36787, + "rigor": 36788, + "minors": 36789, + "zoning": 36790, + "âĸł": 36791, + "rir": 36792, + "adolescent": 36793, + "vinny": 36794, + "reng": 36795, + "sandstone": 36796, + "guet": 36797, + "westh": 36798, + "pledged": 36799, + "laced": 36800, + "spide": 36801, + "vai": 36802, + "tycoon": 36803, + "seizure": 36804, + "dup": 36805, + "appalachian": 36806, + "rok": 36807, + "catholics": 36808, + "seychel": 36809, + "possess": 36810, + "lager": 36811, + "jodi": 36812, + "champ": 36813, + "stras": 36814, + "dina": 36815, + "centuri": 36816, + "calder": 36817, + "bluray": 36818, + "ðŁĩ¨ðŁĩ³": 36819, + "modo": 36820, + "annette": 36821, + "youtubers": 36822, + "chaps": 36823, + "angling": 36824, + "labeling": 36825, + "aqui": 36826, + "pkwy": 36827, + "lyle": 36828, + "bisexual": 36829, + "litur": 36830, + "dugout": 36831, + "libby": 36832, + "greysanatomy": 36833, + "substances": 36834, + "augustus": 36835, + "rallying": 36836, + "fidel": 36837, + "ingue": 36838, + "人": 36839, + "hallmarkchannel": 36840, + "toothbrush": 36841, + "má": 36842, + "adirond": 36843, + "aggi": 36844, + "ðŁĵį:": 36845, + "crusade": 36846, + "taxation": 36847, + "kz": 36848, + "iver": 36849, + "doubling": 36850, + "roomie": 36851, + "wab": 36852, + "enrolled": 36853, + "azon": 36854, + "aju": 36855, + "grandchildren": 36856, + "asdf": 36857, + "ðŁ¥º": 36858, + "matic": 36859, + "oughton": 36860, + "utilize": 36861, + "ðŁĴ£": 36862, + "ponder": 36863, + "raisin": 36864, + "dysfunction": 36865, + "cobain": 36866, + "butternut": 36867, + "eman": 36868, + "sured": 36869, + "drian": 36870, + "andfriends": 36871, + "withthe": 36872, + "onomy": 36873, + "heineken": 36874, + "bridal": 36875, + "leadership": 36876, + "pyramids": 36877, + "deutschland": 36878, + "jocel": 36879, + "bowel": 36880, + "yqr": 36881, + "horsepower": 36882, + "beacon": 36883, + "ingeni": 36884, + "gradient": 36885, + "fermented": 36886, + "moom": 36887, + "thingy": 36888, + "potassi": 36889, + "wristband": 36890, + "bord": 36891, + "bodied": 36892, + "ðŁĺŃðŁĺį": 36893, + "mapp": 36894, + "kau": 36895, + "cyberpunk": 36896, + "phish": 36897, + "looking": 36898, + "coates": 36899, + "apur": 36900, + "amie": 36901, + "uklabour": 36902, + "atin": 36903, + "gla": 36904, + "adoptable": 36905, + "shelby": 36906, + "villi": 36907, + "riya": 36908, + "mingly": 36909, + "climber": 36910, + "bumblebee": 36911, + "ðŁĺ¸": 36912, + "csd": 36913, + "âĿ¥": 36914, + "hospitalized": 36915, + "cki": 36916, + "hater": 36917, + "chr": 36918, + "retina": 36919, + "ita": 36920, + "fanbase": 36921, + "beatrice": 36922, + "gwyne": 36923, + "goss": 36924, + "fos": 36925, + "favorited": 36926, + "swachhbharat": 36927, + "malade": 36928, + "monmouth": 36929, + "\"[": 36930, + "sivan": 36931, + "shhh": 36932, + "commanding": 36933, + "sainsburys": 36934, + "weed": 36935, + "gman": 36936, + "ssw": 36937, + "reptile": 36938, + "ivy": 36939, + "tropics": 36940, + "rollers": 36941, + "overcast": 36942, + "exposition": 36943, + "masquerade": 36944, + "mancrush": 36945, + "waist": 36946, + "sprinter": 36947, + "sleet": 36948, + "levin": 36949, + "jpg": 36950, + "_(": 36951, + "opel": 36952, + "exploit": 36953, + "apa": 36954, + "powe": 36955, + "wrecking": 36956, + "jongin": 36957, + "orb": 36958, + "erick": 36959, + "bosco": 36960, + "praising": 36961, + "bertr": 36962, + "towing": 36963, + "insecurity": 36964, + "kut": 36965, + "restocked": 36966, + "rrp": 36967, + "prescribed": 36968, + "trafalgar": 36969, + "pert": 36970, + "gases": 36971, + "apprais": 36972, + "ghar": 36973, + "musicals": 36974, + "âĸ¬âĸ¬": 36975, + "mcfad": 36976, + "agony": 36977, + "condition": 36978, + "equip": 36979, + "shik": 36980, + "atravel": 36981, + "ðŁĩ¿ðŁĩ¦": 36982, + "keh": 36983, + "abduction": 36984, + "peoria": 36985, + "wilkins": 36986, + "gms": 36987, + "asd": 36988, + "evi": 36989, + "ðŁĴĹðŁĴĹðŁĴĹ": 36990, + "uz": 36991, + "moc": 36992, + "hallelujah": 36993, + "guadalu": 36994, + "louvre": 36995, + "drawing": 36996, + "gove": 36997, + "phant": 36998, + "frie": 36999, + "webdev": 37000, + "programmer": 37001, + "zable": 37002, + "gamescom": 37003, + "clarify": 37004, + "lith": 37005, + "kinky": 37006, + "âĿ£": 37007, + "labourdoorstep": 37008, + "sonata": 37009, + "juris": 37010, + "maiden": 37011, + "viadu": 37012, + "bucharest": 37013, + "conditioned": 37014, + "capitalist": 37015, + "ude": 37016, + "psb": 37017, + "spca": 37018, + "lulla": 37019, + "foothills": 37020, + "kayo": 37021, + "bond": 37022, + "womb": 37023, + "rounder": 37024, + "cesar": 37025, + "bursts": 37026, + "apra": 37027, + "swoon": 37028, + "sabrin": 37029, + "fragrant": 37030, + "clearer": 37031, + "kubrick": 37032, + "climax": 37033, + "journo": 37034, + "agle": 37035, + "ðŁı½âĢįâĻĢï¸ı": 37036, + "pooch": 37037, + "hale": 37038, + "solit": 37039, + "salmon": 37040, + "organisms": 37041, + "bronson": 37042, + "arten": 37043, + "hodgson": 37044, + "alove": 37045, + "venture": 37046, + "bbi": 37047, + "aea": 37048, + "ðŁIJ¢": 37049, + "ldn": 37050, + "dnr": 37051, + "ozone": 37052, + "ellas": 37053, + "manny": 37054, + "azzur": 37055, + "unbeat": 37056, + "truffles": 37057, + "thong": 37058, + "mañ": 37059, + "lasers": 37060, + "leye": 37061, + "gettysburg": 37062, + "backpacks": 37063, + "oris": 37064, + "maison": 37065, + "crawling": 37066, + "labra": 37067, + "cling": 37068, + "dragging": 37069, + "steal": 37070, + "doubt": 37071, + "devan": 37072, + "ckers": 37073, + "agentsof": 37074, + "photobomb": 37075, + "elonmusk": 37076, + "aboy": 37077, + "distances": 37078, + "storyline": 37079, + "spi": 37080, + "northan": 37081, + "europeans": 37082, + "whale": 37083, + "serpent": 37084, + "ðŁļ²": 37085, + "fior": 37086, + "trit": 37087, + "oxo": 37088, + "awarding": 37089, + "classmate": 37090, + "sufc": 37091, + "smartest": 37092, + "riches": 37093, + "prk": 37094, + "bigfoot": 37095, + "armb": 37096, + "bipolar": 37097, + "dwelling": 37098, + "omars": 37099, + "kwan": 37100, + "grime": 37101, + "meng": 37102, + "frederick": 37103, + "navarro": 37104, + "sorrynotsorry": 37105, + "jaredleto": 37106, + "pave": 37107, + "slack": 37108, + "barnsley": 37109, + "attar": 37110, + "eviction": 37111, + "accumulation": 37112, + "oir": 37113, + "catchy": 37114, + "welter": 37115, + "vikas": 37116, + "hassee": 37117, + "nikita": 37118, + "moyes": 37119, + "mathews": 37120, + "shiv": 37121, + "gatwick": 37122, + "profiling": 37123, + "companions": 37124, + "marrake": 37125, + "antics": 37126, + "ðŁĻĮðŁĻĮðŁĻĮ": 37127, + "sese": 37128, + "boi": 37129, + "bartlett": 37130, + "poisonous": 37131, + "abuses": 37132, + "ymm": 37133, + "kampala": 37134, + "guggenheim": 37135, + "imvkohli": 37136, + "dolom": 37137, + "bree": 37138, + "throttle": 37139, + "gareth": 37140, + "fitzpatrick": 37141, + "unya": 37142, + "parad": 37143, + "margot": 37144, + "jnr": 37145, + "wea": 37146, + "potassium": 37147, + "pnc": 37148, + "disguised": 37149, + "crash": 37150, + "renergy": 37151, + "illic": 37152, + "coupled": 37153, + "niels": 37154, + "ciones": 37155, + "æĹ¥": 37156, + "iment": 37157, + "despicable": 37158, + "dye": 37159, + "whatcha": 37160, + "connections": 37161, + "paralympics": 37162, + "gauntlet": 37163, + "waitrose": 37164, + "suicidal": 37165, + "starship": 37166, + "vapor": 37167, + "stou": 37168, + "lawmaker": 37169, + "cooled": 37170, + "simo": 37171, + "theno": 37172, + "offroad": 37173, + "jaden": 37174, + "basque": 37175, + "vicky": 37176, + "lukaku": 37177, + "centro": 37178, + "trish": 37179, + "strategist": 37180, + "medications": 37181, + "horst": 37182, + "bfc": 37183, + "grail": 37184, + "sharply": 37185, + "aditya": 37186, + "tomb": 37187, + "kaufman": 37188, + "tripad": 37189, + "samba": 37190, + "pastoral": 37191, + "britney": 37192, + "sagan": 37193, + "hillside": 37194, + "masons": 37195, + "sara": 37196, + "zone": 37197, + "xu": 37198, + "totes": 37199, + "robbie": 37200, + "appen": 37201, + "montag": 37202, + "dero": 37203, + "shortfilm": 37204, + "charismatic": 37205, + "tators": 37206, + "kiba": 37207, + "andri": 37208, + "alarming": 37209, + "splitting": 37210, + "icar": 37211, + "thug": 37212, + "scariest": 37213, + "sylvester": 37214, + "anan": 37215, + "utrecht": 37216, + "adifference": 37217, + "meade": 37218, + "buster": 37219, + "airstrikes": 37220, + "cuffs": 37221, + "accountants": 37222, + "ðŁĺ¡ðŁĺ¡": 37223, + "newt": 37224, + "bott": 37225, + "issuing": 37226, + "clancy": 37227, + "wwenetwork": 37228, + "kyuhyun": 37229, + "resemble": 37230, + "pajamas": 37231, + "sink": 37232, + "kinney": 37233, + "sulph": 37234, + "ork": 37235, + "lies": 37236, + "lagh": 37237, + "orton": 37238, + "rahul": 37239, + "dsc": 37240, + "wewill": 37241, + "ream": 37242, + "colloqui": 37243, + "sharia": 37244, + "hectic": 37245, + "sarcasm": 37246, + "lander": 37247, + "tmz": 37248, + "endorf": 37249, + "roz": 37250, + "hammered": 37251, + "fris": 37252, + "wadi": 37253, + "popefrancis": 37254, + "heit": 37255, + "flashlight": 37256, + "unborn": 37257, + "opes": 37258, + "holiness": 37259, + "ðŁIJ¦": 37260, + "nacht": 37261, + "imsa": 37262, + "gracing": 37263, + "bjp": 37264, + "verts": 37265, + "csc": 37266, + "homeowner": 37267, + "aque": 37268, + "bigotry": 37269, + "annie": 37270, + "bagh": 37271, + "âĿ¤ï¸ıðŁĺį": 37272, + "cari": 37273, + "thomp": 37274, + "disposable": 37275, + "cardiology": 37276, + "patented": 37277, + "hhhhhh": 37278, + "ldr": 37279, + "stephenson": 37280, + "crores": 37281, + "fanning": 37282, + "climat": 37283, + "ðŁijįðŁijįðŁijį": 37284, + "ðŁijįðŁı¼": 37285, + "aeron": 37286, + "piccadilly": 37287, + "bankrupt": 37288, + "silvia": 37289, + "employ": 37290, + "donny": 37291, + "commenting": 37292, + "screenwriter": 37293, + "iota": 37294, + "cean": 37295, + "ancers": 37296, + "tuan": 37297, + "streetwear": 37298, + "य": 37299, + "skine": 37300, + "espa": 37301, + "asif": 37302, + "osce": 37303, + "sheppard": 37304, + "morecam": 37305, + "bottle": 37306, + "ders": 37307, + "oracle": 37308, + "googleplay": 37309, + "averaged": 37310, + "edmonton": 37311, + "stephan": 37312, + "sisterhood": 37313, + "crusted": 37314, + "staggering": 37315, + "methodology": 37316, + "congresswoman": 37317, + "cabo": 37318, + "triggers": 37319, + "milky": 37320, + "glide": 37321, + "toothpaste": 37322, + "roommates": 37323, + "nuff": 37324, + "guam": 37325, + "sprinkles": 37326, + "alternative": 37327, + "watfordfc": 37328, + "uoft": 37329, + "haley": 37330, + "contacted": 37331, + "bundy": 37332, + "prostitu": 37333, + "ghar": 37334, + "preston": 37335, + "onsite": 37336, + "hilar": 37337, + "gts": 37338, + "catt": 37339, + "hampstead": 37340, + "??!": 37341, + "ðŁĩ§ðŁĩ": 37342, + "bbcqt": 37343, + "alessandro": 37344, + "resist": 37345, + "maidan": 37346, + "tko": 37347, + "shading": 37348, + "pinup": 37349, + "gallo": 37350, + "sinu": 37351, + "atec": 37352, + "funk": 37353, + "aclu": 37354, + "strides": 37355, + "rhyme": 37356, + "wetland": 37357, + "bbcspringwatch": 37358, + "tins": 37359, + "wildcard": 37360, + "stour": 37361, + "flamenco": 37362, + "paula": 37363, + "ontology": 37364, + "gangsta": 37365, + "amade": 37366, + "ãĤ«": 37367, + "tbs": 37368, + "skeletal": 37369, + "runner": 37370, + "jardin": 37371, + "harrier": 37372, + "hunted": 37373, + "zhen": 37374, + "believeinfilm": 37375, + "demean": 37376, + "auditi": 37377, + "restart": 37378, + "chondri": 37379, + "âĿ¤ï¸ıðŁĴĻ": 37380, + "mclaren": 37381, + "gab": 37382, + "shum": 37383, + "ausa": 37384, + "lewisham": 37385, + "ypg": 37386, + "kjv": 37387, + "furnished": 37388, + "doro": 37389, + "bonded": 37390, + "morty": 37391, + "latitude": 37392, + "_)": 37393, + "lova": 37394, + "waterways": 37395, + "vinai": 37396, + "shorth": 37397, + "drunk": 37398, + "cay": 37399, + "ayana": 37400, + "kaplan": 37401, + "cappuccino": 37402, + "spro": 37403, + "lifeboat": 37404, + "hasbro": 37405, + "spolice": 37406, + "toron": 37407, + "doing": 37408, + "damn": 37409, + "shree": 37410, + "fountains": 37411, + "entation": 37412, + "maru": 37413, + "boarder": 37414, + "topless": 37415, + "jada": 37416, + "channing": 37417, + "ulls": 37418, + "enclosure": 37419, + "gibson": 37420, + "fractured": 37421, + "britton": 37422, + "ö": 37423, + "tous": 37424, + "porth": 37425, + "draf": 37426, + "trailing": 37427, + "margate": 37428, + "elife": 37429, + "downward": 37430, + "linn": 37431, + "glades": 37432, + "girlpower": 37433, + "akrish": 37434, + "uki": 37435, + "ronda": 37436, + "tsc": 37437, + "appreciationday": 37438, + "vising": 37439, + "loom": 37440, + "ðŁį³": 37441, + "mexican": 37442, + "argos": 37443, + "yya": 37444, + "jadine": 37445, + "southport": 37446, + "dend": 37447, + "sista": 37448, + "redeem": 37449, + "meng": 37450, + "braxton": 37451, + "antioxidant": 37452, + "skey": 37453, + "mpg": 37454, + "finding": 37455, + "vibration": 37456, + "ceu": 37457, + "khart": 37458, + "dimini": 37459, + "cline": 37460, + "shelly": 37461, + "hines": 37462, + "īï¸ı": 37463, + "topical": 37464, + "nover": 37465, + "maxx": 37466, + "primitive": 37467, + "illustrate": 37468, + "bounds": 37469, + "trenton": 37470, + "jointly": 37471, + "breeders": 37472, + "uchi": 37473, + "wakeupamerica": 37474, + "bada": 37475, + "ðŁĹ£ï¸ı": 37476, + "guacam": 37477, + "spheres": 37478, + "peregr": 37479, + "youthful": 37480, + "lolo": 37481, + "birmin": 37482, + "tly": 37483, + "jeremycorbyn": 37484, + "defects": 37485, + "cosm": 37486, + "arent": 37487, + "vaa": 37488, + "bagels": 37489, + "mediac": 37490, + "coriander": 37491, + "icago": 37492, + "ghaz": 37493, + "abbas": 37494, + "remodel": 37495, + "structuring": 37496, + "pum": 37497, + "outlaw": 37498, + "adani": 37499, + "rbc": 37500, + "gulls": 37501, + "nli": 37502, + "confuse": 37503, + "ðŁijĩðŁı¼": 37504, + "vila": 37505, + "mcnamara": 37506, + "corrections": 37507, + "mughal": 37508, + "seri": 37509, + "regain": 37510, + "ssb": 37511, + "leave": 37512, + "hahahah": 37513, + "grande": 37514, + "distressed": 37515, + "rechargeable": 37516, + "hoa": 37517, + "housed": 37518, + "stil": 37519, + "attributed": 37520, + "opathic": 37521, + "dips": 37522, + "prit": 37523, + "headphone": 37524, + "conclude": 37525, + "pilo": 37526, + "het": 37527, + "utsa": 37528, + "nitin": 37529, + "jem": 37530, + "snippet": 37531, + "tutoring": 37532, + "oper": 37533, + "sunk": 37534, + "ensla": 37535, + "chau": 37536, + "acorn": 37537, + "quintess": 37538, + "rankin": 37539, + "affiliated": 37540, + "ourlives": 37541, + "clint": 37542, + "seater": 37543, + "isaac": 37544, + "bashing": 37545, + "smear": 37546, + "nurse": 37547, + "doodling": 37548, + "\";": 37549, + "saku": 37550, + "atrocities": 37551, + "imam": 37552, + "gfs": 37553, + "violating": 37554, + "commend": 37555, + "bradshaw": 37556, + "erville": 37557, + "billed": 37558, + "bbe": 37559, + "thulhu": 37560, + "iphones": 37561, + "moose": 37562, + "dios": 37563, + "rew": 37564, + "methane": 37565, + "strangely": 37566, + "whisky": 37567, + "tightly": 37568, + "spielberg": 37569, + "radius": 37570, + "noticing": 37571, + "wif": 37572, + "ignati": 37573, + "ifa": 37574, + "apis": 37575, + "wali": 37576, + "haitian": 37577, + "bushes": 37578, + "yz": 37579, + "vl": 37580, + "exited": 37581, + "assel": 37582, + "truec": 37583, + "domen": 37584, + "asher": 37585, + "inking": 37586, + "newyearseve": 37587, + "hendricks": 37588, + "bati": 37589, + "ìĿ´ì": 37590, + "richter": 37591, + "monsanto": 37592, + "conline": 37593, + "agreat": 37594, + "ðŁ¤¯": 37595, + "masterpieces": 37596, + "arn": 37597, + "roughs": 37598, + "cleve": 37599, + "sev": 37600, + "fashions": 37601, + "toya": 37602, + "shail": 37603, + "copeland": 37604, + "aquari": 37605, + "decals": 37606, + "areyou": 37607, + "yaya": 37608, + "astr": 37609, + "font": 37610, + "mlm": 37611, + "arca": 37612, + "ppor": 37613, + "pollock": 37614, + "xperia": 37615, + "conservation": 37616, + "chainsaw": 37617, + "aggie": 37618, + "?!?!?": 37619, + "sile": 37620, + "shon": 37621, + "ìĹIJ": 37622, + "notebooks": 37623, + "marquette": 37624, + "deus": 37625, + "bbled": 37626, + "spicer": 37627, + "mccabe": 37628, + "norwich": 37629, + "modification": 37630, + "boosted": 37631, + "strum": 37632, + "salesman": 37633, + "bangle": 37634, + "nissan": 37635, + "hezbollah": 37636, + "breasts": 37637, + "aaf": 37638, + "anthus": 37639, + "sker": 37640, + "owed": 37641, + "heros": 37642, + "gifs": 37643, + "fosters": 37644, + "eaters": 37645, + "dues": 37646, + "_/": 37647, + "lymphoma": 37648, + "sfam": 37649, + "megal": 37650, + "afridi": 37651, + "agic": 37652, + "pamp": 37653, + "jealousy": 37654, + "ðŁijĮðŁı¼": 37655, + "calculate": 37656, + "napping": 37657, + "gale": 37658, + "ð٦Ħ": 37659, + "lubbock": 37660, + "assumed": 37661, + "renting": 37662, + "íĥľ": 37663, + "suburb": 37664, + "ãĤ·": 37665, + "technic": 37666, + "ucla": 37667, + "infront": 37668, + "garnet": 37669, + "steroids": 37670, + "striving": 37671, + "howar": 37672, + "mover": 37673, + "leton": 37674, + "bulldo": 37675, + "isin": 37676, + "ciao": 37677, + "snz": 37678, + "forefront": 37679, + "dams": 37680, + "midwife": 37681, + "mawards": 37682, + "clapton": 37683, + "wein": 37684, + "subsidies": 37685, + "sproud": 37686, + "rotherham": 37687, + "phantom": 37688, + "arach": 37689, + "spiel": 37690, + "racket": 37691, + "selamat": 37692, + "noon": 37693, + "lbc": 37694, + "entially": 37695, + "ðŁĴ¸": 37696, + "silve": 37697, + "moud": 37698, + "kinetic": 37699, + "yasi": 37700, + "ðŁİ©": 37701, + "ool": 37702, + "miku": 37703, + "iza": 37704, + "fera": 37705, + "floren": 37706, + "barbershop": 37707, + "groot": 37708, + "zest": 37709, + "nears": 37710, + "stanis": 37711, + "zand": 37712, + "policeman": 37713, + "jurisdic": 37714, + "formations": 37715, + "apparatus": 37716, + "spd": 37717, + "artifact": 37718, + "tosc": 37719, + "motivating": 37720, + "womancrush": 37721, + "redro": 37722, + "diagnostics": 37723, + "raza": 37724, + "outfitters": 37725, + "elxn": 37726, + "dodgy": 37727, + "ryn": 37728, + "shd": 37729, + "orthodon": 37730, + "olde": 37731, + "jayanti": 37732, + "balances": 37733, + "quickest": 37734, + "canton": 37735, + "fridayreads": 37736, + "!*": 37737, + "naa": 37738, + "aak": 37739, + "ðŁĶ·": 37740, + "behaviors": 37741, + "raspberries": 37742, + "ä»": 37743, + "political": 37744, + "camil": 37745, + "åľ": 37746, + "dik": 37747, + "astounding": 37748, + "liebe": 37749, + "novelty": 37750, + "turmoil": 37751, + "sully": 37752, + "springbreak": 37753, + "honouring": 37754, + "ccg": 37755, + "ðŁıĴ": 37756, + "mylittle": 37757, + "kyc": 37758, + "proms": 37759, + "ðŁķĬ": 37760, + "è": 37761, + "bige": 37762, + "avril": 37763, + "ðŁĩµðŁĩ°": 37764, + "marion": 37765, + "asants": 37766, + "surya": 37767, + "octag": 37768, + "lufthan": 37769, + "acron": 37770, + "fayetteville": 37771, + "tique": 37772, + "loves": 37773, + "enca": 37774, + "dekalb": 37775, + "taver": 37776, + "devote": 37777, + "auxiliary": 37778, + "johannes": 37779, + "treadmill": 37780, + "ayan": 37781, + "qur": 37782, + "donaldson": 37783, + "cheryl": 37784, + "\"....": 37785, + "sven": 37786, + "kirsty": 37787, + "gunners": 37788, + "radish": 37789, + "oahu": 37790, + "vsky": 37791, + "ible": 37792, + "concourse": 37793, + "bps": 37794, + "eloqu": 37795, + "ashford": 37796, + "tebow": 37797, + "roblox": 37798, + "mada": 37799, + "driving": 37800, + "thday": 37801, + "sproject": 37802, + "mms": 37803, + "banded": 37804, + ".!!": 37805, + "librarians": 37806, + "flannel": 37807, + "intolerance": 37808, + "heral": 37809, + "çµ": 37810, + "nemesis": 37811, + "lista": 37812, + "tarak": 37813, + "crypt": 37814, + "starplus": 37815, + "vishnu": 37816, + "scale": 37817, + "cris": 37818, + "%),": 37819, + "jillian": 37820, + "reggae": 37821, + "pegasus": 37822, + "olin": 37823, + "ipment": 37824, + "manic": 37825, + "lfc": 37826, + "goddard": 37827, + "iteam": 37828, + "parlour": 37829, + "anchors": 37830, + "leeminho": 37831, + "tallahassee": 37832, + "antit": 37833, + "dho": 37834, + "kidney": 37835, + "yash": 37836, + "battled": 37837, + "azad": 37838, + "garis": 37839, + "faulkner": 37840, + "sniff": 37841, + "paparazzi": 37842, + "edm": 37843, + "phyllis": 37844, + "contested": 37845, + "aaay": 37846, + "seca": 37847, + "kton": 37848, + "velve": 37849, + "rainier": 37850, + "forum": 37851, + "tampab": 37852, + "hosp": 37853, + "tractors": 37854, + "oxfordshire": 37855, + "notion": 37856, + "guangzhou": 37857, + "ðŁĺ¯": 37858, + "refill": 37859, + "wednesdaymotivation": 37860, + "slider": 37861, + "mukherjee": 37862, + "pratt": 37863, + "fontaine": 37864, + "alphon": 37865, + "afar": 37866, + "tsi": 37867, + "pesticides": 37868, + "fiends": 37869, + "mocking": 37870, + "braw": 37871, + "transat": 37872, + "doses": 37873, + "cores": 37874, + "homophobia": 37875, + "documenting": 37876, + "zlatan": 37877, + "condoms": 37878, + "sé": 37879, + "sunset": 37880, + "kunst": 37881, + "tonga": 37882, + "ส": 37883, + "vation": 37884, + "spray": 37885, + "chowder": 37886, + "raps": 37887, + "palladium": 37888, + "norwood": 37889, + "musichistory": 37890, + "hooker": 37891, + "sisi": 37892, + "osprey": 37893, + "phys": 37894, + "conceded": 37895, + "bobcat": 37896, + "armad": 37897, + "zeit": 37898, + "ÙĦ": 37899, + "ðŁĺģðŁĺģ": 37900, + "meridi": 37901, + "ðŁĩ·ðŁĩº": 37902, + "cornwall": 37903, + "!),": 37904, + "touchdowns": 37905, + "zeit": 37906, + "chalet": 37907, + "mmm": 37908, + "alche": 37909, + "gorilla": 37910, + "foss": 37911, + "atiku": 37912, + "luminous": 37913, + "ivanka": 37914, + "beek": 37915, + "stares": 37916, + "swiss": 37917, + "âĿ¤âĿ¤âĿ¤âĿ¤": 37918, + "scrubs": 37919, + "meath": 37920, + "gustav": 37921, + "jogging": 37922, + "confetti": 37923, + "asos": 37924, + "ersfc": 37925, + "breitbart": 37926, + "applicable": 37927, + "authored": 37928, + "yaho": 37929, + "hin": 37930, + "displacement": 37931, + "jv": 37932, + "ðŁĮ¹ðŁĮ¹": 37933, + "otc": 37934, + "nonprofits": 37935, + "diecast": 37936, + "gusto": 37937, + "intestin": 37938, + "cages": 37939, + "meen": 37940, + "lukas": 37941, + "mooney": 37942, + "ðŁĺ·": 37943, + "veryday": 37944, + "torah": 37945, + "ission": 37946, + "wac": 37947, + "leveraging": 37948, + "ishable": 37949, + "cuse": 37950, + "lewood": 37951, + "mayan": 37952, + "turntable": 37953, + "juice": 37954, + "trusty": 37955, + "tup": 37956, + "etiquette": 37957, + "supervisors": 37958, + "stun": 37959, + "guzman": 37960, + "conferen": 37961, + "rico": 37962, + "feast": 37963, + "backward": 37964, + "polaris": 37965, + "miche": 37966, + "jog": 37967, + "hing": 37968, + "fieldhouse": 37969, + "veling": 37970, + "shocker": 37971, + "escence": 37972, + "ा": 37973, + "vibe": 37974, + "anastasia": 37975, + "marched": 37976, + "killing": 37977, + "Ķë": 37978, + "fett": 37979, + "exoplan": 37980, + "...(": 37981, + "snowday": 37982, + "loh": 37983, + "irani": 37984, + "lakhs": 37985, + "dela": 37986, + "pocaly": 37987, + "boomers": 37988, + "dictatorship": 37989, + "acer": 37990, + "turkeys": 37991, + "quarterfinal": 37992, + "musketeers": 37993, + "ðŁĴĽðŁĴļ": 37994, + "sfx": 37995, + "museumweek": 37996, + "scala": 37997, + "risis": 37998, + "(ðŁĵ·": 37999, + "ãĢĤ": 38000, + "zies": 38001, + "boeh": 38002, + "hues": 38003, + "lusci": 38004, + "dola": 38005, + "impeachtrump": 38006, + "rood": 38007, + "doncaster": 38008, + "torre": 38009, + "heroes": 38010, + "foyer": 38011, + "tari": 38012, + "blurred": 38013, + "kew": 38014, + "frankly": 38015, + "droid": 38016, + "apal": 38017, + "м": 38018, + "yaf": 38019, + "bret": 38020, + "paragu": 38021, + "cacao": 38022, + "ðŁĻĮðŁı¾": 38023, + "rue": 38024, + "headaches": 38025, + "shawty": 38026, + "charley": 38027, + "paler": 38028, + "gowns": 38029, + "correctional": 38030, + "ðŁĺ©ðŁĺ©": 38031, + "breakingbad": 38032, + "oling": 38033, + "dap": 38034, + "endeavour": 38035, + "citadel": 38036, + "trad": 38037, + "incumbent": 38038, + "meditate": 38039, + "footed": 38040, + "ðŁĴµ": 38041, + "shabbat": 38042, + "dayofthe": 38043, + "willem": 38044, + "galway": 38045, + "tored": 38046, + "marriage": 38047, + "fillion": 38048, + "sleeveless": 38049, + "auditor": 38050, + "jinyoung": 38051, + "invincible": 38052, + "kaduna": 38053, + "aand": 38054, + "volcanoes": 38055, + "moneti": 38056, + "indiegogo": 38057, + "buccaneers": 38058, + "ðŁijīðŁı½": 38059, + "ãĢĤ": 38060, + "layton": 38061, + "cuckoo": 38062, + "humber": 38063, + "buzzer": 38064, + "Ïī": 38065, + "tore": 38066, + "strains": 38067, + "stom": 38068, + "paine": 38069, + "swe": 38070, + "duff": 38071, + "zou": 38072, + "simi": 38073, + "lipp": 38074, + "urn": 38075, + "seagu": 38076, + "ðŁĶ®": 38077, + "sundae": 38078, + "hic": 38079, + "ðŁĺ¨": 38080, + "bullpen": 38081, + "uper": 38082, + "flyover": 38083, + "aldridge": 38084, + "globes": 38085, + "alies": 38086, + "kenzie": 38087, + "gees": 38088, + "ycle": 38089, + "splin": 38090, + "magenta": 38091, + "jha": 38092, + "balu": 38093, + "ghorn": 38094, + "tipper": 38095, + "wicker": 38096, + "tasteof": 38097, + "conclave": 38098, + "chale": 38099, + "invasi": 38100, + "cater": 38101, + "dioxide": 38102, + "megab": 38103, + "winn": 38104, + "atp": 38105, + "transformative": 38106, + "nestled": 38107, + "hig": 38108, + "bridging": 38109, + "lilies": 38110, + "cheered": 38111, + "baddest": 38112, + "scrolls": 38113, + "realis": 38114, + "diplo": 38115, + "ðŁĶ«": 38116, + "concession": 38117, + "preferences": 38118, + "explodes": 38119, + "ergon": 38120, + "introductory": 38121, + "ineau": 38122, + "chaf": 38123, + "somes": 38124, + "landrover": 38125, + "spiration": 38126, + "sexy": 38127, + "scorecard": 38128, + "illustrates": 38129, + "soulmate": 38130, + "wien": 38131, + "interdisciplinary": 38132, + "forecasting": 38133, + "entities": 38134, + "glued": 38135, + "enlar": 38136, + "curt": 38137, + "perceptions": 38138, + "bootleg": 38139, + "mire": 38140, + "ashok": 38141, + "vaz": 38142, + "horne": 38143, + "calle": 38144, + "aculture": 38145, + "theroy": 38146, + "nighttime": 38147, + "ocal": 38148, + "characterdesign": 38149, + "armist": 38150, + "ðŁĺıðŁĺı": 38151, + "yahoo": 38152, + "aceae": 38153, + "tose": 38154, + "evento": 38155, + "sout": 38156, + "nayanth": 38157, + "whom": 38158, + "vare": 38159, + "rigging": 38160, + "genus": 38161, + "hive": 38162, + "commands": 38163, + "stie": 38164, + "daya": 38165, + "ethanol": 38166, + "enf": 38167, + "hifi": 38168, + "fluence": 38169, + "clemson": 38170, + "reinvent": 38171, + "thermometer": 38172, + "humorous": 38173, + "emerging": 38174, + "ación": 38175, + "ðŁĺĺðŁĺį": 38176, + "sity": 38177, + "hawke": 38178, + "accompanying": 38179, + "tility": 38180, + "ðŁĺª": 38181, + "recess": 38182, + "protagonist": 38183, + "lery": 38184, + "dundal": 38185, + "intl": 38186, + "brittany": 38187, + "qbs": 38188, + "offthe": 38189, + "marriages": 38190, + "howto": 38191, + "violated": 38192, + "adelaide": 38193, + "witt": 38194, + "lancer": 38195, + "pakv": 38196, + "hume": 38197, + "stade": 38198, + "bragging": 38199, + "outright": 38200, + "adc": 38201, + "superst": 38202, + "realtime": 38203, + "cures": 38204, + "gardeners": 38205, + "erock": 38206, + "dalejr": 38207, + "vero": 38208, + "bartol": 38209, + "moti": 38210, + "mcfly": 38211, + "vpn": 38212, + "stink": 38213, + "overrated": 38214, + "guerra": 38215, + "etis": 38216, + "athome": 38217, + "twdfamily": 38218, + "thab": 38219, + "tnx": 38220, + "rafael": 38221, + "familytravel": 38222, + "xley": 38223, + "satanic": 38224, + "equations": 38225, + "rudy": 38226, + "waldorf": 38227, + "stani": 38228, + "tube": 38229, + "measles": 38230, + "zimmerman": 38231, + "obligations": 38232, + "iously": 38233, + "bowser": 38234, + "transformer": 38235, + "shoppe": 38236, + "shaken": 38237, + "ghouse": 38238, + "tod": 38239, + "ketball": 38240, + "shareholder": 38241, + "marca": 38242, + "kpmg": 38243, + "akan": 38244, + "givenchy": 38245, + "coastal": 38246, + "auth": 38247, + "rollercoaster": 38248, + "marches": 38249, + "coordinate": 38250, + "cinema": 38251, + "apprentices": 38252, + "parlor": 38253, + "mito": 38254, + "menon": 38255, + "considerable": 38256, + "barre": 38257, + "gloss": 38258, + "enhances": 38259, + "jazeera": 38260, + "falmouth": 38261, + "thrash": 38262, + "staten": 38263, + "kzn": 38264, + "engel": 38265, + "samanthap": 38266, + "floppy": 38267, + "salom": 38268, + "ðŁıĨðŁıĨ": 38269, + "wack": 38270, + "deliberate": 38271, + "oscill": 38272, + "heritag": 38273, + "dusted": 38274, + "ornithology": 38275, + "paddle": 38276, + "ferns": 38277, + "barun": 38278, + "clans": 38279, + "anticipate": 38280, + "aay": 38281, + "matically": 38282, + "éĩ": 38283, + "tumble": 38284, + "postman": 38285, + "unicef": 38286, + "trotter": 38287, + "opd": 38288, + "leaflet": 38289, + "geist": 38290, + "ceasefire": 38291, + "screws": 38292, + "creation": 38293, + "walnuts": 38294, + "longhorns": 38295, + "understatement": 38296, + "abb": 38297, + "proximity": 38298, + "nax": 38299, + "unity": 38300, + "turnpike": 38301, + "ordained": 38302, + "dubstep": 38303, + "chakra": 38304, + "mech": 38305, + "loveher": 38306, + "lookalike": 38307, + "donnein": 38308, + "viron": 38309, + "ÙĪ": 38310, + "bangers": 38311, + "variants": 38312, + "outdated": 38313, + "inta": 38314, + "cristo": 38315, + "spelt": 38316, + "foodand": 38317, + "fon": 38318, + "stefani": 38319, + "marginal": 38320, + "hutton": 38321, + "tiara": 38322, + "telford": 38323, + "quen": 38324, + "fairgrounds": 38325, + "quetta": 38326, + "mikhail": 38327, + "healer": 38328, + "vball": 38329, + "tyre": 38330, + "undergrad": 38331, + "glend": 38332, + "homers": 38333, + "scribed": 38334, + "maintains": 38335, + "poche": 38336, + "missal": 38337, + "marko": 38338, + "uas": 38339, + "án": 38340, + "shp": 38341, + "convey": 38342, + "padre": 38343, + "saba": 38344, + "puglia": 38345, + "madhuri": 38346, + "paxton": 38347, + "chaplain": 38348, + "nago": 38349, + "casi": 38350, + "...!!!": 38351, + "flirt": 38352, + "saleh": 38353, + "kare": 38354, + "dire": 38355, + "stamped": 38356, + "extreme": 38357, + "ðŁĺĥðŁĺĥ": 38358, + "hoppy": 38359, + "guadalupe": 38360, + "advantaged": 38361, + "euchar": 38362, + "plow": 38363, + "unn": 38364, + "macqu": 38365, + "portland": 38366, + "clash": 38367, + "pes": 38368, + "loubout": 38369, + "yp": 38370, + "keeping": 38371, + "arcadia": 38372, + "frankie": 38373, + "fiu": 38374, + "deth": 38375, + "encyclopedia": 38376, + "size": 38377, + "invests": 38378, + "ðŁį©": 38379, + "geological": 38380, + "franç": 38381, + "confront": 38382, + "ðŁĺ¥": 38383, + "dys": 38384, + "afm": 38385, + "texan": 38386, + "graphene": 38387, + "repostapp": 38388, + "acf": 38389, + "ursula": 38390, + "gaza": 38391, + "ddled": 38392, + "fum": 38393, + "wsbtv": 38394, + "mbe": 38395, + "frontiers": 38396, + "chronograph": 38397, + "kes": 38398, + "interfaith": 38399, + "taboo": 38400, + "sparta": 38401, + "wondo": 38402, + "florist": 38403, + "embraces": 38404, + "caw": 38405, + "noel": 38406, + "archers": 38407, + "ðŁIJ·": 38408, + "romano": 38409, + "banan": 38410, + "shakers": 38411, + "melodies": 38412, + "geothermal": 38413, + "sephora": 38414, + "ìļ°": 38415, + "од": 38416, + "proc": 38417, + "handshake": 38418, + "pande": 38419, + "populated": 38420, + "slowdown": 38421, + "hortons": 38422, + "registrations": 38423, + "undeni": 38424, + "lants": 38425, + "passover": 38426, + "thakur": 38427, + "lief": 38428, + "adhesive": 38429, + "petal": 38430, + "microscopy": 38431, + "memphis": 38432, + "confirming": 38433, + "airdrop": 38434, + "mesmer": 38435, + "perceived": 38436, + "mingle": 38437, + "lifeline": 38438, + "ghj": 38439, + "worcestershire": 38440, + "passions": 38441, + "acher": 38442, + "ellar": 38443, + "aho": 38444, + "firenze": 38445, + "barang": 38446, + "letterman": 38447, + "hatfield": 38448, + "lucha": 38449, + "jeter": 38450, + "eshop": 38451, + "williams": 38452, + "horoscope": 38453, + "prede": 38454, + "eastbourne": 38455, + "durga": 38456, + "diversion": 38457, + "altrin": 38458, + "seismic": 38459, + "premiosm": 38460, + "narco": 38461, + "tir": 38462, + "orig": 38463, + "orm": 38464, + "landfall": 38465, + "cious": 38466, + "lindo": 38467, + "maxine": 38468, + "xico": 38469, + "tray": 38470, + "oswald": 38471, + "cba": 38472, + "ricotta": 38473, + "ncr": 38474, + "marau": 38475, + "า": 38476, + "gladiator": 38477, + "chery": 38478, + "lung": 38479, + "ume": 38480, + "popsic": 38481, + "longing": 38482, + "canals": 38483, + "taya": 38484, + "decentralized": 38485, + "shopp": 38486, + "pressures": 38487, + "maharaj": 38488, + "etihad": 38489, + "walgreens": 38490, + "succession": 38491, + "signaling": 38492, + "lig": 38493, + "staffer": 38494, + "northkorea": 38495, + "defying": 38496, + "asma": 38497, + "deg": 38498, + "perimeter": 38499, + "oakville": 38500, + "msk": 38501, + "baltimore": 38502, + "receip": 38503, + "deple": 38504, + "ðŁĺŃðŁĺĤ": 38505, + "jamboree": 38506, + ">.<": 38507, + "rspb": 38508, + "punisher": 38509, + "considerably": 38510, + "intothe": 38511, + "parisian": 38512, + "accelerated": 38513, + "polyester": 38514, + "lowes": 38515, + "frying": 38516, + "sautéed": 38517, + "mouths": 38518, + "seychelles": 38519, + "rax": 38520, + "godis": 38521, + "dakota": 38522, + "housewives": 38523, + "theme": 38524, + "matinee": 38525, + "blackbird": 38526, + "yesung": 38527, + "prefers": 38528, + "pellegr": 38529, + "inated": 38530, + "trunks": 38531, + "strongertogether": 38532, + "repet": 38533, + "repairing": 38534, + "pedals": 38535, + "tolerant": 38536, + "herr": 38537, + "dunne": 38538, + "indication": 38539, + "decatur": 38540, + "btv": 38541, + "exhibitors": 38542, + "ikon": 38543, + "fridaymotivation": 38544, + "bragg": 38545, + "livetweet": 38546, + "alves": 38547, + "womensart": 38548, + "foreigners": 38549, + "wallets": 38550, + "mindy": 38551, + "laney": 38552, + "bbin": 38553, + "tvmiaw": 38554, + "lifter": 38555, + "target": 38556, + "tame": 38557, + "drou": 38558, + "astrophotography": 38559, + "mpc": 38560, + "gpu": 38561, + "nordstrom": 38562, + "friction": 38563, + "runoff": 38564, + "lovable": 38565, + "spnfamily": 38566, + "extingui": 38567, + "bloody": 38568, + "schel": 38569, + "artistry": 38570, + "swish": 38571, + "scarce": 38572, + "phils": 38573, + "maxim": 38574, + "possum": 38575, + "compromised": 38576, + "styli": 38577, + "scfc": 38578, + "issa": 38579, + "birmingham": 38580, + "sketched": 38581, + "angelica": 38582, + "ordinance": 38583, + "jets": 38584, + "conquer": 38585, + "ðŁĺIJ": 38586, + "onlineshopping": 38587, + "sori": 38588, + "reasonably": 38589, + "nuestro": 38590, + "arturo": 38591, + "chl": 38592, + "benefici": 38593, + "sphoto": 38594, + "welt": 38595, + "nikk": 38596, + "ð٤ŀ": 38597, + "danao": 38598, + "formid": 38599, + "asse": 38600, + "afirst": 38601, + "âľĤ": 38602, + "gillette": 38603, + "assor": 38604, + "anonym": 38605, + "selca": 38606, + "femi": 38607, + "bearable": 38608, + "yand": 38609, + "armory": 38610, + "crepe": 38611, + "celticfc": 38612, + "bravo": 38613, + "inexpensive": 38614, + "delec": 38615, + "gecko": 38616, + "newmarket": 38617, + "snowflakes": 38618, + "kabir": 38619, + "contra": 38620, + "canning": 38621, + "morpho": 38622, + "garwal": 38623, + "ðŁĴĥðŁı»": 38624, + "fighting": 38625, + "mutation": 38626, + "woody": 38627, + "jugg": 38628, + "graces": 38629, + "premiosmtvmiaw": 38630, + "kennedy": 38631, + "gup": 38632, + "sae": 38633, + "opha": 38634, + "offspring": 38635, + "finisher": 38636, + "betts": 38637, + "spanning": 38638, + "marj": 38639, + "hone": 38640, + "shing": 38641, + "continents": 38642, + "samanthaprabhu": 38643, + "unrelated": 38644, + "lacy": 38645, + "explosions": 38646, + "benjamin": 38647, + "sophie": 38648, + "noting": 38649, + "microsoft": 38650, + "assen": 38651, + "ahoy": 38652, + "iker": 38653, + "hofer": 38654, + "moe": 38655, + "ahmadi": 38656, + "yann": 38657, + "anak": 38658, + "mahi": 38659, + "beu": 38660, + "ahah": 38661, + "creeper": 38662, + "baahubali": 38663, + "amat": 38664, + "priory": 38665, + "hawkeye": 38666, + "deloitte": 38667, + "skoda": 38668, + "printmaking": 38669, + "assembling": 38670, + "miraculous": 38671, + "noch": 38672, + "swo": 38673, + "lega": 38674, + "operates": 38675, + "borderlands": 38676, + "elie": 38677, + "strongh": 38678, + "reptiles": 38679, + "pirate": 38680, + "unfold": 38681, + "¯": 38682, + "qualcomm": 38683, + "unpredictable": 38684, + "otr": 38685, + "rosewood": 38686, + "directional": 38687, + "counselors": 38688, + "cornell": 38689, + "liberated": 38690, + "jad": 38691, + "irregular": 38692, + "bulgarian": 38693, + "highness": 38694, + "vodafone": 38695, + "swild": 38696, + "minimize": 38697, + "grazie": 38698, + "à¹ĩ": 38699, + "rstats": 38700, + "streep": 38701, + "ometric": 38702, + "humble": 38703, + "lump": 38704, + "lille": 38705, + "bü": 38706, + "homedepot": 38707, + "tripadvisor": 38708, + "kiwan": 38709, + "avia": 38710, + "erz": 38711, + "exico": 38712, + "duf": 38713, + "blumen": 38714, + "mizing": 38715, + "arma": 38716, + "inim": 38717, + "constan": 38718, + "sora": 38719, + "jual": 38720, + "aun": 38721, + "twell": 38722, + "trenches": 38723, + "hera": 38724, + "rk": 38725, + "poplar": 38726, + "recipeoftheday": 38727, + "llan": 38728, + "bhuban": 38729, + "shortages": 38730, + "ingdon": 38731, + "bridgewater": 38732, + "ðŁIJĺ": 38733, + "fortnite": 38734, + "camden": 38735, + "uncture": 38736, + "prow": 38737, + "colonies": 38738, + "tks": 38739, + "ngo": 38740, + "bhm": 38741, + "livepd": 38742, + "splace": 38743, + "slike": 38744, + "happyeaster": 38745, + "terrence": 38746, + "revolver": 38747, + "jed": 38748, + "yyyy": 38749, + "officeof": 38750, + "mts": 38751, + "existential": 38752, + "rourke": 38753, + "explorebc": 38754, + "ssed": 38755, + "priest": 38756, + "vixen": 38757, + "siding": 38758, + "kpa": 38759, + "ahar": 38760, + "juic": 38761, + "obstruc": 38762, + "forensics": 38763, + "ukmfg": 38764, + "cancellation": 38765, + "weary": 38766, + "abq": 38767, + "elec": 38768, + "prized": 38769, + "debts": 38770, + "mezz": 38771, + "salvatore": 38772, + "mdc": 38773, + "grette": 38774, + "cgc": 38775, + "thon": 38776, + "snowstorm": 38777, + "tsch": 38778, + "cookery": 38779, + "å¹": 38780, + "waxing": 38781, + "nacional": 38782, + "murs": 38783, + "rave": 38784, + "capes": 38785, + "germain": 38786, + "dripping": 38787, + "submitting": 38788, + "omelette": 38789, + "iteration": 38790, + "ajes": 38791, + "shimmer": 38792, + "fueling": 38793, + "ðŁĩ§ðŁĩª": 38794, + "lipo": 38795, + "bobble": 38796, + "unfollow": 38797, + "islamist": 38798, + "hiber": 38799, + "cats": 38800, + "agentsofshield": 38801, + "sensi": 38802, + "_____": 38803, + "steria": 38804, + "instal": 38805, + "auspicious": 38806, + "harrow": 38807, + "overland": 38808, + "feminists": 38809, + "instant": 38810, + "chariot": 38811, + "blindness": 38812, + "sped": 38813, + "scarec": 38814, + "nuit": 38815, + "miniatures": 38816, + "hoseok": 38817, + "glock": 38818, + "fifaworldcup": 38819, + "ete": 38820, + "dism": 38821, + "weiner": 38822, + "exfoli": 38823, + "earts": 38824, + "à¸Ķ": 38825, + "myart": 38826, + "manil": 38827, + "issant": 38828, + "forma": 38829, + "incu": 38830, + "buffalob": 38831, + "intim": 38832, + "mccul": 38833, + "anjali": 38834, + "popo": 38835, + "undoub": 38836, + "hila": 38837, + "fungal": 38838, + "thankful": 38839, + "futur": 38840, + "endish": 38841, + "rends": 38842, + "thar": 38843, + "sheff": 38844, + "ringo": 38845, + "nicholls": 38846, + "iowa": 38847, + "potom": 38848, + "clams": 38849, + "ãģĦ": 38850, + "aconf": 38851, + "stadiums": 38852, + "dimp": 38853, + "dik": 38854, + "residences": 38855, + "dov": 38856, + "caricature": 38857, + "seagull": 38858, + "klm": 38859, + "confess": 38860, + "slapped": 38861, + "celeb": 38862, + "turbines": 38863, + "ppv": 38864, + "nurture": 38865, + "elab": 38866, + ".....#": 38867, + "tuff": 38868, + "depress": 38869, + "alfar": 38870, + "amiibo": 38871, + "dispon": 38872, + "ewing": 38873, + "queer": 38874, + "friends": 38875, + "forre": 38876, + "âĺ¼": 38877, + "swt": 38878, + "aquarius": 38879, + "headliner": 38880, + "curd": 38881, + "figs": 38882, + "otters": 38883, + "lovefl": 38884, + "kareem": 38885, + "govegan": 38886, + "friyay": 38887, + "consolation": 38888, + "atri": 38889, + "ì§Ħ": 38890, + "âĺĿï¸ı": 38891, + "polyne": 38892, + "gued": 38893, + "oya": 38894, + "laus": 38895, + "intestinal": 38896, + "camilla": 38897, + "scalp": 38898, + "pir": 38899, + "leeds": 38900, + "horrifying": 38901, + "boretum": 38902, + "dandelion": 38903, + "ferrer": 38904, + "ellic": 38905, + "asx": 38906, + "soren": 38907, + "reloaded": 38908, + "aleague": 38909, + "navigator": 38910, + "inette": 38911, + "addams": 38912, + "alchemist": 38913, + "akshay": 38914, + "dystopian": 38915, + "awec": 38916, + "naya": 38917, + "alisa": 38918, + "ailed": 38919, + "agor": 38920, + "aviator": 38921, + "alizer": 38922, + "smobile": 38923, + "findyourpark": 38924, + "copying": 38925, + "toddy": 38926, + "shti": 38927, + "monger": 38928, + "calhoun": 38929, + "napkin": 38930, + "breakup": 38931, + "yatra": 38932, + "sethu": 38933, + "richi": 38934, + "erasmus": 38935, + "ferry": 38936, + "amore": 38937, + "practise": 38938, + "bobo": 38939, + "powerpoint": 38940, + "oose": 38941, + "liffe": 38942, + "china": 38943, + "shka": 38944, + "fadnavis": 38945, + "duane": 38946, + "waron": 38947, + "false": 38948, + "ðŁļĤ": 38949, + "washes": 38950, + "discip": 38951, + "========": 38952, + "gk": 38953, + "abb": 38954, + "stubborn": 38955, + "medieval": 38956, + "pci": 38957, + "ðŁįª": 38958, + "marilyn": 38959, + "hyo": 38960, + "mandi": 38961, + "cri": 38962, + "predecess": 38963, + "continuation": 38964, + "omusic": 38965, + "slat": 38966, + "whal": 38967, + "mallory": 38968, + "bonn": 38969, + "shenzhen": 38970, + "cai": 38971, + "âĺĥ": 38972, + "safest": 38973, + "forwards": 38974, + "drawers": 38975, + "blasted": 38976, + "slee": 38977, + "morphe": 38978, + "mbta": 38979, + "dumbass": 38980, + "ÑĦоÑĤо": 38981, + "alhamdulillah": 38982, + "eclub": 38983, + "albeit": 38984, + "healey": 38985, + "ayurveda": 38986, + "advertised": 38987, + "crocs": 38988, + "ittles": 38989, + "bryson": 38990, + "bei": 38991, + "njpw": 38992, + "honoree": 38993, + "fused": 38994, + "ðŁĶĺ": 38995, + "multin": 38996, + "naga": 38997, + "departs": 38998, + "kop": 38999, + "kino": 39000, + "jharkhand": 39001, + "edna": 39002, + "axle": 39003, + "milton": 39004, + "supremacist": 39005, + "marrakech": 39006, + "dominic": 39007, + "transcript": 39008, + "][#": 39009, + ":).": 39010, + "woc": 39011, + "surrounds": 39012, + "ogil": 39013, + "leaflets": 39014, + "cowell": 39015, + "whew": 39016, + "trude": 39017, + "prolifer": 39018, + "succes": 39019, + "sportsman": 39020, + "condom": 39021, + "poche": 39022, + "kup": 39023, + "imprisonment": 39024, + "{}": 39025, + "scrambled": 39026, + "åĽ": 39027, + "kaine": 39028, + "cellphone": 39029, + "metamor": 39030, + "coni": 39031, + "remnants": 39032, + "eez": 39033, + "downpour": 39034, + "afternoon": 39035, + "exercising": 39036, + "berser": 39037, + "architecture": 39038, + "wicklow": 39039, + "mns": 39040, + "isp": 39041, + "boc": 39042, + "niss": 39043, + "mnwild": 39044, + "stumble": 39045, + "rsi": 39046, + "luffy": 39047, + "silen": 39048, + "ddad": 39049, + "bullies": 39050, + "hawker": 39051, + "bbcc": 39052, + "scuba": 39053, + "epp": 39054, + "quets": 39055, + "foraging": 39056, + "pallet": 39057, + "hadi": 39058, + "cinematographer": 39059, + "catchers": 39060, + "toaster": 39061, + "khi": 39062, + "litecoin": 39063, + "kidlit": 39064, + "amherst": 39065, + "mauricio": 39066, + "ipad": 39067, + "marmalade": 39068, + "fey": 39069, + "donnelly": 39070, + "gto": 39071, + "estas": 39072, + "cerebral": 39073, + "antgrasso": 39074, + "zzled": 39075, + "virgil": 39076, + "swapped": 39077, + "ðŁĺħðŁĺħ": 39078, + "nodapl": 39079, + "greatest": 39080, + "nhlbruins": 39081, + "fraser": 39082, + "bmo": 39083, + "anew": 39084, + ".âĿ¤ï¸ı": 39085, + "segregation": 39086, + "remarkably": 39087, + "mccormick": 39088, + "logger": 39089, + "eras": 39090, + "contracting": 39091, + "âłĢâłĢ": 39092, + "yorks": 39093, + "ukulele": 39094, + "touchscreen": 39095, + "decked": 39096, + "benn": 39097, + "southwark": 39098, + "ravin": 39099, + "numis": 39100, + "ð٤Ļ": 39101, + "rut": 39102, + "greco": 39103, + "ethic": 39104, + "redneck": 39105, + "arr": 39106, + "tcs": 39107, + "ihri": 39108, + "ðŁĩ«ðŁĩ·": 39109, + "lk": 39110, + "inherited": 39111, + "zyk": 39112, + "viaduct": 39113, + "martyred": 39114, + "higu": 39115, + "ssn": 39116, + "bein": 39117, + "streetstyle": 39118, + "fergie": 39119, + "bankof": 39120, + "æĹ¥": 39121, + "stakeholder": 39122, + "exemplary": 39123, + "cress": 39124, + "essa": 39125, + "erotica": 39126, + "intrepid": 39127, + "gomes": 39128, + "braun": 39129, + "bethany": 39130, + "bangtan": 39131, + "pulmonary": 39132, + "milling": 39133, + "doctorate": 39134, + "trumprussia": 39135, + "र": 39136, + "sani": 39137, + "blatt": 39138, + "plau": 39139, + "deprived": 39140, + "tle": 39141, + "fully": 39142, + "bourn": 39143, + "stak": 39144, + "lufthansa": 39145, + "kiosk": 39146, + "faroo": 39147, + "defy": 39148, + "badan": 39149, + "ðŁĺĺâĿ¤ï¸ı": 39150, + "ritz": 39151, + "trisha": 39152, + "rands": 39153, + "middlesex": 39154, + "arabs": 39155, + "proj": 39156, + "sportscenter": 39157, + "repeats": 39158, + "ivf": 39159, + "bleedblue": 39160, + "assure": 39161, + "obs": 39162, + "territorial": 39163, + "elen": 39164, + "beverley": 39165, + "annah": 39166, + "âĿ¤ï¸ıâĿ¤ï¸ıâĿ¤ï¸ıâĿ¤ï¸ı": 39167, + "zl": 39168, + "forgood": 39169, + "sciencefiction": 39170, + "glau": 39171, + "sonya": 39172, + "prith": 39173, + "stweets": 39174, + "mixers": 39175, + "mario": 39176, + "antelope": 39177, + "writingcommunity": 39178, + "wentz": 39179, + "denham": 39180, + "bedi": 39181, + "sfo": 39182, + "harleydavidson": 39183, + "lookbook": 39184, + "immunotherapy": 39185, + "orphe": 39186, + "esville": 39187, + "edged": 39188, + "task": 39189, + "sbball": 39190, + "corrosion": 39191, + "kilometers": 39192, + "costing": 39193, + "playback": 39194, + "keke": 39195, + "divisi": 39196, + "uter": 39197, + "relocation": 39198, + "yelled": 39199, + "peng": 39200, + "upbeat": 39201, + "serve": 39202, + "âļł": 39203, + "halen": 39204, + "stirring": 39205, + "rehman": 39206, + "env": 39207, + "schumacher": 39208, + "fragment": 39209, + "alkaline": 39210, + "sbk": 39211, + "resili": 39212, + "sharepoint": 39213, + "rollover": 39214, + "trash": 39215, + "counterpart": 39216, + "âĻ«": 39217, + "obitu": 39218, + "à½": 39219, + "ãĤ¹": 39220, + "mulberry": 39221, + "ðŁİĨ": 39222, + "autonomy": 39223, + "spraying": 39224, + "natl": 39225, + "loveyou": 39226, + "franki": 39227, + "nuk": 39228, + "escar": 39229, + "canteen": 39230, + "alibaba": 39231, + "deplor": 39232, + "molecule": 39233, + "pud": 39234, + "fortnight": 39235, + "blondie": 39236, + "sphin": 39237, + "portrayal": 39238, + "tache": 39239, + "bute": 39240, + "consisting": 39241, + "freepalestine": 39242, + "csp": 39243, + "immort": 39244, + "dns": 39245, + "ðŁĴ¥ðŁĴ¥": 39246, + "tourde": 39247, + "cooking": 39248, + "archival": 39249, + "gathers": 39250, + "bitt": 39251, + "banc": 39252, + "premature": 39253, + "snowball": 39254, + "poetryday": 39255, + "loudly": 39256, + "fugitive": 39257, + "eday": 39258, + "emra": 39259, + "ðŁĩ¸ðŁĩª": 39260, + "scien": 39261, + "nodejs": 39262, + "jurgen": 39263, + "jeong": 39264, + "bandana": 39265, + "unis": 39266, + "foxsports": 39267, + "vandy": 39268, + "provisions": 39269, + "weep": 39270, + "tuk": 39271, + "iko": 39272, + "houn": 39273, + "ziggy": 39274, + "zr": 39275, + "fillet": 39276, + "bata": 39277, + "tink": 39278, + "cone": 39279, + "wewant": 39280, + "kilo": 39281, + "horace": 39282, + "slt": 39283, + "sct": 39284, + "staytuned": 39285, + "victoria": 39286, + "umbria": 39287, + "attacker": 39288, + "inghamshire": 39289, + "frightening": 39290, + "noir": 39291, + "frat": 39292, + "contempt": 39293, + "liaison": 39294, + "hoi": 39295, + "brink": 39296, + "trill": 39297, + "niagar": 39298, + "kickass": 39299, + "dundas": 39300, + "notmy": 39301, + "rhode": 39302, + "bumble": 39303, + "noxi": 39304, + "fag": 39305, + "spectators": 39306, + "mancrushmonday": 39307, + "jinping": 39308, + "distract": 39309, + "daisy": 39310, + "walden": 39311, + "portrait": 39312, + "arthistory": 39313, + "voltron": 39314, + "evel": 39315, + "isc": 39316, + "acm": 39317, + "rite": 39318, + "nao": 39319, + "deported": 39320, + "sweats": 39321, + "rufus": 39322, + "lobo": 39323, + "laborday": 39324, + "gamo": 39325, + "ihrithik": 39326, + "blit": 39327, + "abdominal": 39328, + "ãħ¤ãħ¤ãħ¤ãħ¤": 39329, + "iit": 39330, + "eq": 39331, + "busy": 39332, + "alluarjun": 39333, + "undisclosed": 39334, + "deton": 39335, + "procreate": 39336, + "kil": 39337, + "ðŁİĤðŁİĤ": 39338, + "mitchell": 39339, + "kii": 39340, + "inheritance": 39341, + "alp": 39342, + "joburg": 39343, + "patrolling": 39344, + "compulsory": 39345, + "unsigned": 39346, + "niam": 39347, + "lga": 39348, + "eshopsuk": 39349, + "trilli": 39350, + "maw": 39351, + "appreciating": 39352, + "rockab": 39353, + "mañana": 39354, + "antal": 39355, + "malvern": 39356, + "royo": 39357, + "grandprix": 39358, + "sutton": 39359, + "goftheday": 39360, + "digi": 39361, + "ãħĭãħĭãħĭãħĭ": 39362, + "tles": 39363, + "varanasi": 39364, + "erected": 39365, + "disciples": 39366, + "contact": 39367, + "ðŁĺµ": 39368, + "lid": 39369, + "â¬ĩ": 39370, + "scentre": 39371, + "radiator": 39372, + "ingtips": 39373, + "transitions": 39374, + "thursdaymotivation": 39375, + "chemical": 39376, + "separati": 39377, + "salis": 39378, + "mim": 39379, + "geographical": 39380, + "bookfest": 39381, + "/.": 39382, + "âľĭ": 39383, + "vae": 39384, + "currie": 39385, + "aggarwal": 39386, + "acceleration": 39387, + "theses": 39388, + "lgm": 39389, + "umass": 39390, + "proportions": 39391, + "nata": 39392, + "anians": 39393, + "kuch": 39394, + "beacons": 39395, + "apr": 39396, + "@#": 39397, + "ðŁĴªðŁı¾": 39398, + "nuke": 39399, + "sheraton": 39400, + "kio": 39401, + "makati": 39402, + "politico": 39403, + "morale": 39404, + "ìĻ": 39405, + "economically": 39406, + "ggly": 39407, + "ssen": 39408, + "pastries": 39409, + "internships": 39410, + "vicente": 39411, + "fantaken": 39412, + "avengers": 39413, + "accuse": 39414, + "sleepover": 39415, + "indicated": 39416, + "thedream": 39417, + "sterone": 39418, + "renders": 39419, + "frost": 39420, + "oui": 39421, + "gregg": 39422, + "dore": 39423, + "⾨⾨⾨": 39424, + "pugs": 39425, + "saty": 39426, + "numb": 39427, + "hemsworth": 39428, + "tami": 39429, + "lassic": 39430, + "schiff": 39431, + "iglesias": 39432, + "agawa": 39433, + "]\"": 39434, + "reshi": 39435, + "gamestop": 39436, + "divorced": 39437, + "theater": 39438, + "claudi": 39439, + "unconventional": 39440, + "prophets": 39441, + "acin": 39442, + "twelf": 39443, + "towering": 39444, + "tml": 39445, + "sclerosis": 39446, + "kwan": 39447, + "gets": 39448, + "disturb": 39449, + "naira": 39450, + "energ": 39451, + "piracy": 39452, + "pruitt": 39453, + "notified": 39454, + "henna": 39455, + "bram": 39456, + "groundwater": 39457, + "bls": 39458, + "optimis": 39459, + "$)": 39460, + "lucie": 39461, + "bizhour": 39462, + "fangirling": 39463, + "grills": 39464, + "orl": 39465, + "verse": 39466, + "cina": 39467, + "lawless": 39468, + "artistsontwitter": 39469, + "televised": 39470, + "marshmallows": 39471, + "radiohead": 39472, + "barr": 39473, + "mfc": 39474, + "brevi": 39475, + "mmorpg": 39476, + "gaya": 39477, + "âĸ«": 39478, + "subtitles": 39479, + "jt": 39480, + "disneyland": 39481, + "tobago": 39482, + "nhm": 39483, + "groove": 39484, + "fiawec": 39485, + "\"/": 39486, + "bao": 39487, + "scrabble": 39488, + "omni": 39489, + "ffl": 39490, + "umc": 39491, + "simba": 39492, + "alier": 39493, + "terrell": 39494, + "plume": 39495, + "midi": 39496, + "dignit": 39497, + "coc": 39498, + "brut": 39499, + "adata": 39500, + "alchemy": 39501, + "dsm": 39502, + "ðŁĺĨðŁĺĨ": 39503, + "wintry": 39504, + "spares": 39505, + "cuer": 39506, + "conclusions": 39507, + "toys": 39508, + "odor": 39509, + "flann": 39510, + "garvey": 39511, + "scriptions": 39512, + "inspections": 39513, + "catap": 39514, + "anglo": 39515, + "stlouis": 39516, + "heimer": 39517, + "atay": 39518, + "trich": 39519, + "enyc": 39520, + "childs": 39521, + "ventil": 39522, + "montp": 39523, + "guillermo": 39524, + "circulare": 39525, + "zell": 39526, + "modeled": 39527, + "craftsman": 39528, + "alina": 39529, + "stimulation": 39530, + "cashew": 39531, + "judas": 39532, + "bestof": 39533, + "toire": 39534, + "suspends": 39535, + "scollege": 39536, + "realising": 39537, + "bytes": 39538, + "bloods": 39539, + "assi": 39540, + "ðŁĴ¿": 39541, + "ohs": 39542, + "ðŁįĭ": 39543, + "scallop": 39544, + "व": 39545, + "gifting": 39546, + "camogie": 39547, + "wilkes": 39548, + "ozzy": 39549, + "ðŁ¤¤": 39550, + "veronic": 39551, + "savoy": 39552, + "demetri": 39553, + "babygirl": 39554, + "ðŁĺįðŁĺŃ": 39555, + "sox": 39556, + "clyde": 39557, + "inductee": 39558, + "countdown": 39559, + "selfcare": 39560, + "à¤ľ": 39561, + "vika": 39562, + "torre": 39563, + "phdchat": 39564, + "pears": 39565, + "awh": 39566, + "suffrage": 39567, + "lesn": 39568, + "admiration": 39569, + "mpp": 39570, + "sharkweek": 39571, + "schulz": 39572, + "santorini": 39573, + "clover": 39574, + "(*": 39575, + "strasbourg": 39576, + "exiting": 39577, + "soyu": 39578, + "fingerprint": 39579, + "chea": 39580, + "ãĢľ": 39581, + "vindic": 39582, + "songwriters": 39583, + "soa": 39584, + "prouder": 39585, + "nama": 39586, + "=))": 39587, + "simplest": 39588, + "deliciously": 39589, + "gilles": 39590, + "uq": 39591, + "mnwx": 39592, + "epp": 39593, + "shun": 39594, + "kennel": 39595, + "fallon": 39596, + "ðŁIJ£": 39597, + "sind": 39598, + "tragically": 39599, + "outes": 39600, + "modernism": 39601, + "coke": 39602, + "gyn": 39603, + "spion": 39604, + "âĺ¹ï¸ı": 39605, + "leam": 39606, + "compressor": 39607, + "apologise": 39608, + "twentyon": 39609, + "fanatics": 39610, + "âĻ»": 39611, + "scotsman": 39612, + "sawa": 39613, + "kou": 39614, + "aser": 39615, + "à¸ļ": 39616, + "welterweight": 39617, + "phenom": 39618, + "twickenham": 39619, + "stria": 39620, + "pout": 39621, + "kaz": 39622, + "giam": 39623, + "cdp": 39624, + "hoy": 39625, + "employ": 39626, + "redmond": 39627, + "à¸Ħà¸": 39628, + "smere": 39629, + "trancefamily": 39630, + "protocols": 39631, + "piece": 39632, + "luiz": 39633, + "iteracy": 39634, + "carls": 39635, + "unitedstates": 39636, + "harmed": 39637, + "phdlife": 39638, + "chaw": 39639, + "footprints": 39640, + "lé": 39641, + "choker": 39642, + "zana": 39643, + "slipper": 39644, + "ericsson": 39645, + "insulting": 39646, + "artichoke": 39647, + "advising": 39648, + "acquisitions": 39649, + "opor": 39650, + "mutations": 39651, + "rear": 39652, + "à¥ģ": 39653, + "podcast": 39654, + "wither": 39655, + "kung": 39656, + "íĺ¸": 39657, + "winslow": 39658, + "diapers": 39659, + "ðŁĵ¸@": 39660, + "ecker": 39661, + "collar": 39662, + "huey": 39663, + "giro": 39664, + "monogram": 39665, + "kasich": 39666, + "siveness": 39667, + "malaysi": 39668, + "aromatic": 39669, + "gres": 39670, + "galileo": 39671, + "uji": 39672, + "robb": 39673, + "drm": 39674, + "nonetheless": 39675, + "asa": 39676, + ":>": 39677, + "loa": 39678, + "lnp": 39679, + "atwork": 39680, + "agt": 39681, + "lakshmi": 39682, + "pipelines": 39683, + "idal": 39684, + "strel": 39685, + "reall": 39686, + "chainz": 39687, + "stonewall": 39688, + "sansk": 39689, + "ðŁı´": 39690, + "piedmont": 39691, + "hostess": 39692, + "ciu": 39693, + "té": 39694, + "analyses": 39695, + "wilhelm": 39696, + "scotty": 39697, + "rwby": 39698, + "mosquit": 39699, + "usemb": 39700, + "quins": 39701, + "ðŁijİ": 39702, + "tucker": 39703, + "sconf": 39704, + "specifications": 39705, + "psychiatry": 39706, + "brookes": 39707, + "sils": 39708, + "olaf": 39709, + "deto": 39710, + "codi": 39711, + "clip": 39712, + "filth": 39713, + "womancrushwednesday": 39714, + "goto": 39715, + "angerous": 39716, + "beale": 39717, + "wtc": 39718, + "panelist": 39719, + "nex": 39720, + "larsen": 39721, + "emilio": 39722, + "tableau": 39723, + "hitters": 39724, + "conceived": 39725, + "americani": 39726, + "ortega": 39727, + "mardi": 39728, + "Ñĥ": 39729, + "paintball": 39730, + "thirsty": 39731, + "newyorker": 39732, + "etisation": 39733, + "goss": 39734, + "weaker": 39735, + "ugh": 39736, + "troll": 39737, + "harga": 39738, + "dual": 39739, + "ghtning": 39740, + "atine": 39741, + "ðŁĺİðŁĺİðŁĺİ": 39742, + "cookout": 39743, + "pyrenees": 39744, + "poss": 39745, + "authentication": 39746, + "sportswear": 39747, + "yunho": 39748, + "kiro": 39749, + "archipel": 39750, + "shenko": 39751, + "render": 39752, + "novation": 39753, + "divinity": 39754, + "ðŁij£": 39755, + "sufi": 39756, + "humbling": 39757, + "geopol": 39758, + "devotees": 39759, + "waitress": 39760, + "trough": 39761, + "pyro": 39762, + "iba": 39763, + "bling": 39764, + "graf": 39765, + "epilots": 39766, + "btr": 39767, + "oftball": 39768, + "basking": 39769, + "dominos": 39770, + "soom": 39771, + "rath": 39772, + "sheryl": 39773, + "quel": 39774, + "astronomical": 39775, + "weld": 39776, + "tracklist": 39777, + "signee": 39778, + "sleepless": 39779, + "comman": 39780, + "chron": 39781, + "summon": 39782, + "puremichigan": 39783, + "crispr": 39784, + "slip": 39785, + "lagi": 39786, + "raq": 39787, + "umu": 39788, + "thalap": 39789, + "charmed": 39790, + "scrump": 39791, + "quadcopter": 39792, + "skip": 39793, + "petersen": 39794, + "muni": 39795, + "ðŁĮ¾": 39796, + "monaghan": 39797, + "trays": 39798, + "icked": 39799, + "canadaday": 39800, + "tegr": 39801, + "�": 39802, + "hotness": 39803, + "heavymetal": 39804, + "abar": 39805, + "gopdebate": 39806, + "azul": 39807, + "spiderman": 39808, + "sunflowers": 39809, + "ľë": 39810, + "webcomics": 39811, + "bard": 39812, + "в": 39813, + "nicholas": 39814, + "slush": 39815, + "raman": 39816, + "markham": 39817, + "fficial": 39818, + "ffler": 39819, + "íĬ¸": 39820, + "pless": 39821, + "anushka": 39822, + "toto": 39823, + "skaters": 39824, + "prowrestling": 39825, + "competes": 39826, + "ayala": 39827, + "mystery": 39828, + "thrills": 39829, + "mpg": 39830, + "independently": 39831, + "yul": 39832, + "imperative": 39833, + "formidable": 39834, + "tireless": 39835, + "stacking": 39836, + "tongues": 39837, + "maltese": 39838, + "potts": 39839, + "matti": 39840, + "charting": 39841, + "chillout": 39842, + "supernova": 39843, + "omeo": 39844, + "skysports": 39845, + "nutty": 39846, + "ðŁĹĵï¸ı": 39847, + "rohan": 39848, + "inspired": 39849, + "concierge": 39850, + "serra": 39851, + "makk": 39852, + "galat": 39853, + "chipp": 39854, + "yev": 39855, + "ì£": 39856, + "reimbur": 39857, + "opul": 39858, + "kimberley": 39859, + "ieee": 39860, + "bremen": 39861, + "chitec": 39862, + "orin": 39863, + "naku": 39864, + "bonkers": 39865, + "footy": 39866, + "emergence": 39867, + "ðŁĨĺ": 39868, + "stip": 39869, + "sergei": 39870, + "zoey": 39871, + "aime": 39872, + "would": 39873, + "dyes": 39874, + "destiny": 39875, + "vinaigrette": 39876, + "drier": 39877, + "circulareconomy": 39878, + "anarchi": 39879, + "ssr": 39880, + "schel": 39881, + "ciner": 39882, + "groom": 39883, + "determining": 39884, + "garmin": 39885, + "calais": 39886, + "incarceration": 39887, + "bukit": 39888, + "noi": 39889, + "chelmsford": 39890, + "mckinley": 39891, + "chipped": 39892, + "belonged": 39893, + "tumors": 39894, + "stroud": 39895, + "mii": 39896, + "influenza": 39897, + "wwenxt": 39898, + "tundra": 39899, + "telecommunications": 39900, + "catsofinstagram": 39901, + "tages": 39902, + "beatty": 39903, + "odu": 39904, + "mlkday": 39905, + "ooper": 39906, + "dangle": 39907, + "akley": 39908, + "crumb": 39909, + "antigua": 39910, + "timbers": 39911, + "rouhani": 39912, + "ðŁĴªðŁĴªðŁĴª": 39913, + "hafi": 39914, + "...!!": 39915, + "wcs": 39916, + "coop": 39917, + "snc": 39918, + "litres": 39919, + "ãĢĬ": 39920, + "haz": 39921, + "coz": 39922, + "kant": 39923, + "greenfield": 39924, + "curti": 39925, + "yale": 39926, + "flyeagles": 39927, + "whatsoever": 39928, + "worthing": 39929, + "roulette": 39930, + "flyeaglesfly": 39931, + "unda": 39932, + "ainted": 39933, + "standing": 39934, + "luscious": 39935, + "hpc": 39936, + "efficacy": 39937, + "ashland": 39938, + "meghan": 39939, + "kywx": 39940, + "npr": 39941, + "bathtub": 39942, + "acos": 39943, + "hani": 39944, + "marcor": 39945, + "mantis": 39946, + "daisi": 39947, + "boba": 39948, + "abbie": 39949, + "mutil": 39950, + "vial": 39951, + "spyder": 39952, + "poz": 39953, + "gti": 39954, + "elfie": 39955, + "nightw": 39956, + "metroid": 39957, + "antoni": 39958, + "maddie": 39959, + "dhry": 39960, + "darlings": 39961, + "tends": 39962, + "taekwondo": 39963, + "atlanta": 39964, + "meow": 39965, + "chloe": 39966, + "ãĥİ": 39967, + "ymes": 39968, + "siberia": 39969, + "kcon": 39970, + "gues": 39971, + "mariner": 39972, + "facil": 39973, + "azzle": 39974, + "[...": 39975, + "hannover": 39976, + "bavaria": 39977, + "virgo": 39978, + "teuk": 39979, + "usps": 39980, + ")#": 39981, + "walla": 39982, + "sampson": 39983, + "needless": 39984, + "verbally": 39985, + "hayley": 39986, + "bowled": 39987, + "pius": 39988, + "lampard": 39989, + "hamstring": 39990, + "volvo": 39991, + "roadsafety": 39992, + "choking": 39993, + "sorbet": 39994, + "ahem": 39995, + "healthyfood": 39996, + "braided": 39997, + "horticulture": 39998, + "crative": 39999, + "cheek": 40000, + "addo": 40001, + "theforce": 40002, + "koko": 40003, + "schizoph": 40004, + "jie": 40005, + "wada": 40006, + "twentyonepilots": 40007, + "hbcu": 40008, + "proton": 40009, + "pauls": 40010, + "louisa": 40011, + "latam": 40012, + "kyrgy": 40013, + "compac": 40014, + "sdk": 40015, + "sapi": 40016, + "???": 40017, + "liberalism": 40018, + "epsilon": 40019, + "aiden": 40020, + "wusa": 40021, + "sprayed": 40022, + "basketball": 40023, + "kimono": 40024, + "bluewave": 40025, + "alias": 40026, + "ë§Ī": 40027, + "mugshot": 40028, + "cec": 40029, + "dogre": 40030, + "adora": 40031, + "ðŁĵ·@": 40032, + "krakow": 40033, + "intrigued": 40034, + "exhausting": 40035, + "astronomer": 40036, + "venison": 40037, + "ladybug": 40038, + "civ": 40039, + "brae": 40040, + "usm": 40041, + "bribe": 40042, + "acupuncture": 40043, + "pembroke": 40044, + "keating": 40045, + "chie": 40046, + "yad": 40047, + "tsi": 40048, + "smi": 40049, + "seeding": 40050, + "gateshead": 40051, + "lisboa": 40052, + "gyp": 40053, + "canvass": 40054, + "ðŁĶ´âļªï¸ı": 40055, + "opi": 40056, + "nir": 40057, + "societal": 40058, + "lyte": 40059, + "aties": 40060, + "csm": 40061, + "artery": 40062, + "alin": 40063, + "akapoor": 40064, + "abstracts": 40065, + "â̦â̦": 40066, + "teenwolf": 40067, + "newe": 40068, + "travelgram": 40069, + "sentimental": 40070, + "perched": 40071, + "handel": 40072, + "hoek": 40073, + "fay": 40074, + "coordinating": 40075, + "animate": 40076, + "manian": 40077, + "effort": 40078, + "jerky": 40079, + "fck": 40080, + "adrienne": 40081, + "mably": 40082, + "trading": 40083, + "myel": 40084, + "spiro": 40085, + "sola": 40086, + "storing": 40087, + "overdrive": 40088, + "mondaymorning": 40089, + "dreamteam": 40090, + "pulse": 40091, + "bondi": 40092, + "bernie": 40093, + "pgatour": 40094, + "tripoli": 40095, + "sonam": 40096, + "platt": 40097, + "âļ¡": 40098, + "agroup": 40099, + "îIJĴ": 40100, + "invading": 40101, + "vcu": 40102, + "kell": 40103, + "ños": 40104, + "undead": 40105, + "podcasting": 40106, + "mercedesam": 40107, + "manafort": 40108, + "cortex": 40109, + "queso": 40110, + "impeccable": 40111, + "palmer": 40112, + "wildoz": 40113, + "sportsc": 40114, + "guacamole": 40115, + "dispenser": 40116, + "categori": 40117, + "stunts": 40118, + "peril": 40119, + "invitations": 40120, + "dunedin": 40121, + "xie": 40122, + "achieves": 40123, + "safer": 40124, + "preds": 40125, + "phan": 40126, + "knuckles": 40127, + "kak": 40128, + "ignores": 40129, + "lovemyjob": 40130, + "aruba": 40131, + "oundation": 40132, + "datacenter": 40133, + "covert": 40134, + "gring": 40135, + "couple": 40136, + "ار": 40137, + "voli": 40138, + "mccle": 40139, + "artisans": 40140, + "ludo": 40141, + "kalam": 40142, + "aroma": 40143, + "undertaker": 40144, + "hula": 40145, + "wizkid": 40146, + "gumb": 40147, + "godfrey": 40148, + "bakersfield": 40149, + "kern": 40150, + "engineer": 40151, + "carve": 40152, + "palin": 40153, + "guarantees": 40154, + "pebbles": 40155, + "bays": 40156, + "zieg": 40157, + "fink": 40158, + "â¬ĩï¸ıâ¬ĩï¸ı": 40159, + "downpours": 40160, + "rochelle": 40161, + "raspberry": 40162, + "ðŁĺ®": 40163, + "graphies": 40164, + "stomp": 40165, + "cafes": 40166, + "arized": 40167, + "uttar": 40168, + "calvary": 40169, + "drie": 40170, + "crusader": 40171, + "busan": 40172, + "tuxedo": 40173, + "siu": 40174, + "seamus": 40175, + "cultured": 40176, + "blanchard": 40177, + "townhouse": 40178, + "gered": 40179, + "buttermilk": 40180, + "fluctu": 40181, + "rogerfederer": 40182, + "heli": 40183, + "ð٦ĥ": 40184, + "uous": 40185, + "ramesh": 40186, + "muppets": 40187, + "emailmarketing": 40188, + "yess": 40189, + "brice": 40190, + "rizio": 40191, + "pelo": 40192, + "donneinarte": 40193, + "urable": 40194, + "investin": 40195, + "bumping": 40196, + "rajiv": 40197, + "sava": 40198, + "thrower": 40199, + "forex": 40200, + "ohhhh": 40201, + "thrust": 40202, + "pullman": 40203, + "rfid": 40204, + "sepsis": 40205, + "leed": 40206, + "fright": 40207, + "rounding": 40208, + "neb": 40209, + "phins": 40210, + "aisha": 40211, + "utilizing": 40212, + "squats": 40213, + "goldsmith": 40214, + "jic": 40215, + "boks": 40216, + "vaus": 40217, + "ipo": 40218, + "exclusion": 40219, + "tariff": 40220, + "pokes": 40221, + "minal": 40222, + "lands": 40223, + "enforce": 40224, + "washingtondc": 40225, + "orchar": 40226, + "gx": 40227, + "marys": 40228, + "eyour": 40229, + "aussie": 40230, + "bakers": 40231, + "unpopular": 40232, + "latinos": 40233, + "large": 40234, + "putnam": 40235, + "bolo": 40236, + "wade": 40237, + "pelo": 40238, + "dizz": 40239, + "obstruction": 40240, + "flappy": 40241, + "wearethe": 40242, + "dependence": 40243, + "pajama": 40244, + "ete": 40245, + "yann": 40246, + "ewan": 40247, + "discla": 40248, + "aay": 40249, + "karina": 40250, + "eic": 40251, + "antrim": 40252, + "wsoc": 40253, + "negatively": 40254, + "kaido": 40255, + "fotografia": 40256, + "dhru": 40257, + "colossal": 40258, + "mcleod": 40259, + "kwang": 40260, + "manipu": 40261, + "exhilar": 40262, + "usatoday": 40263, + "summerslam": 40264, + "coles": 40265, + "taproom": 40266, + "unbeatable": 40267, + "dema": 40268, + "ticks": 40269, + "kling": 40270, + "fils": 40271, + "campaigners": 40272, + "à¸ķ": 40273, + "brewster": 40274, + "audubon": 40275, + "quay": 40276, + "chs": 40277, + "kigali": 40278, + "dler": 40279, + "strengthens": 40280, + "somal": 40281, + "signingday": 40282, + "golds": 40283, + "pigment": 40284, + "orchestral": 40285, + "gq": 40286, + "linkin": 40287, + "ðŁıĩ": 40288, + "taw": 40289, + "algarve": 40290, + "hov": 40291, + "earle": 40292, + "goldfish": 40293, + "amig": 40294, + "exer": 40295, + "benin": 40296, + "druid": 40297, + "ðŁIJ¸": 40298, + "shem": 40299, + "quattro": 40300, + "mercen": 40301, + "mente": 40302, + "incorporating": 40303, + "bonanza": 40304, + "statefair": 40305, + "ende": 40306, + "conceptions": 40307, + "ees": 40308, + "âĻ¥ï¸ıâĻ¥ï¸ı": 40309, + "dson": 40310, + "firearm": 40311, + "orbital": 40312, + "weh": 40313, + "multip": 40314, + "fob": 40315, + "requiem": 40316, + "plight": 40317, + "thouse": 40318, + "said": 40319, + "ocre": 40320, + "remembrance": 40321, + "nold": 40322, + "chipping": 40323, + "bev": 40324, + "ert": 40325, + "cathy": 40326, + "sym": 40327, + "riggs": 40328, + "mley": 40329, + "dialogues": 40330, + "slender": 40331, + "howl": 40332, + "gauteng": 40333, + "wdw": 40334, + "tobi": 40335, + "smokes": 40336, + "implo": 40337, + "bpm": 40338, + "adn": 40339, + "mombasa": 40340, + "capsul": 40341, + "bloomfield": 40342, + "articul": 40343, + "cleo": 40344, + "googled": 40345, + "fluffy": 40346, + "lard": 40347, + "enzyme": 40348, + "vesti": 40349, + "ibrahi": 40350, + "flame": 40351, + "emea": 40352, + "outages": 40353, + "dispropor": 40354, + "bleak": 40355, + "ansel": 40356, + "icker": 40357, + "stlouis": 40358, + "stockmarket": 40359, + "goodfriday": 40360, + "sault": 40361, + "stalled": 40362, + "prom": 40363, + "epsom": 40364, + "bé": 40365, + "these": 40366, + "sauces": 40367, + "mew": 40368, + "litfest": 40369, + "pred": 40370, + "reu": 40371, + "karak": 40372, + "sienna": 40373, + "ellin": 40374, + "biotechnology": 40375, + "ï¸ıâĥ£-": 40376, + "tactic": 40377, + "sain": 40378, + "pork": 40379, + "monza": 40380, + "kaj": 40381, + "lush": 40382, + "compartment": 40383, + "changing": 40384, + "shraddhakapoor": 40385, + "foal": 40386, + "artem": 40387, + "cuando": 40388, + "canola": 40389, + "oriente": 40390, + "messe": 40391, + "dited": 40392, + "brc": 40393, + "boxer": 40394, + "bbctwo": 40395, + "sst": 40396, + "mentday": 40397, + "eming": 40398, + "dewey": 40399, + "kofi": 40400, + "âŀĸâŀĸâŀĸâŀĸ": 40401, + "realization": 40402, + "smol": 40403, + "twood": 40404, + "sanje": 40405, + "flagstaff": 40406, + "berwick": 40407, + "corset": 40408, + "canary": 40409, + "whistleblower": 40410, + "etched": 40411, + "composing": 40412, + "squeezed": 40413, + "bower": 40414, + "autodesk": 40415, + "neh": 40416, + "mathieu": 40417, + "baja": 40418, + "ÅĤ": 40419, + "hydra": 40420, + "daim": 40421, + "ameri": 40422, + "insisted": 40423, + "merlot": 40424, + "garros": 40425, + "heartnews": 40426, + "gainesville": 40427, + "cutler": 40428, + "bode": 40429, + "ðŁĺīðŁĺī": 40430, + "lewes": 40431, + "scountry": 40432, + "gsa": 40433, + "usu": 40434, + "ccm": 40435, + "godawgs": 40436, + "pharaoh": 40437, + "crae": 40438, + "morley": 40439, + "hypnoti": 40440, + "fades": 40441, + "neurons": 40442, + "fuzz": 40443, + "ingco": 40444, + "highlanders": 40445, + "stark": 40446, + "vigne": 40447, + "packets": 40448, + "amarillo": 40449, + "reuben": 40450, + "insults": 40451, + "basic": 40452, + "vector": 40453, + "nme": 40454, + "acruz": 40455, + "tros": 40456, + "transmitter": 40457, + "ðŁĺŀ": 40458, + "interpret": 40459, + "ðŁĺ²": 40460, + "prequel": 40461, + "mcgowan": 40462, + "dissemin": 40463, + "ðŁĴĺðŁĴĺ": 40464, + "masculinity": 40465, + "indiegamedev": 40466, + "alive": 40467, + "tet": 40468, + "petal": 40469, + "emailed": 40470, + "armed": 40471, + "koo": 40472, + "heer": 40473, + "baird": 40474, + "superjunior": 40475, + "metropolis": 40476, + "delavin": 40477, + "declines": 40478, + "stitutes": 40479, + "Ûģ": 40480, + "ptbo": 40481, + "glan": 40482, + "chores": 40483, + "ealing": 40484, + "chrissy": 40485, + "stemc": 40486, + "vian": 40487, + "assassinated": 40488, + "pronounce": 40489, + "illegals": 40490, + "discovery": 40491, + "cavill": 40492, + "frifotos": 40493, + "fal": 40494, + "soi": 40495, + "sabotage": 40496, + "tint": 40497, + "pdc": 40498, + "ðŁİīðŁİĪ": 40499, + "ãĤĬãģ": 40500, + "jio": 40501, + "endeavor": 40502, + "insig": 40503, + "committees": 40504, + "shearer": 40505, + "metz": 40506, + "marrying": 40507, + "hdd": 40508, + "gby": 40509, + "fret": 40510, + "trish": 40511, + "pul": 40512, + "scripted": 40513, + "saki": 40514, + "lw": 40515, + "keye": 40516, + "shimi": 40517, + "nanaimo": 40518, + "cah": 40519, + "ë": 40520, + "tempered": 40521, + "ician": 40522, + "dugg": 40523, + "dishwasher": 40524, + "airfield": 40525, + "srugby": 40526, + "grinch": 40527, + "yst": 40528, + "rms": 40529, + "mahatma": 40530, + "lankan": 40531, + "discar": 40532, + "digestion": 40533, + "nodes": 40534, + "lls": 40535, + "omic": 40536, + "gutter": 40537, + "tisgarh": 40538, + "federico": 40539, + "electionday": 40540, + "bohe": 40541, + "mastercard": 40542, + "fireball": 40543, + "âľĶï¸ı": 40544, + "oyster": 40545, + "pong": 40546, + "dok": 40547, + "enroute": 40548, + "mvc": 40549, + "beatthe": 40550, + "alistair": 40551, + "shub": 40552, + "shaming": 40553, + "chernobyl": 40554, + "ghibli": 40555, + "thes": 40556, + "pinion": 40557, + "dbs": 40558, + "salts": 40559, + "iction": 40560, + "epiph": 40561, + "ncpol": 40562, + "inconvenience": 40563, + "whitley": 40564, + "inspecting": 40565, + "woodley": 40566, + "wiener": 40567, + "skillet": 40568, + "noles": 40569, + "mca": 40570, + "hina": 40571, + "asha": 40572, + "willingness": 40573, + "wellness": 40574, + "tamed": 40575, + "showtime": 40576, + "disadvantaged": 40577, + "bernat": 40578, + "usn": 40579, + "missionaries": 40580, + "counselling": 40581, + "arrogant": 40582, + "quantitative": 40583, + "legalization": 40584, + "hodge": 40585, + "energyefficiency": 40586, + "camerondallas": 40587, + "possessions": 40588, + "pbb": 40589, + "harrisburg": 40590, + "vg": 40591, + "hinduism": 40592, + "happythanksgiving": 40593, + "fib": 40594, + "reacting": 40595, + "tweetapicture": 40596, + "politi": 40597, + "muppet": 40598, + "hurrah": 40599, + "pace": 40600, + "coastguard": 40601, + "guarded": 40602, + "asam": 40603, + "parry": 40604, + "forevery": 40605, + "xq": 40606, + "oomf": 40607, + "keanu": 40608, + "jind": 40609, + "rist": 40610, + "customerservice": 40611, + "sacred": 40612, + "ðŁĺº": 40613, + "toner": 40614, + "occurrence": 40615, + "matu": 40616, + "valdez": 40617, + "redd": 40618, + "isak": 40619, + "powerrangers": 40620, + "peasant": 40621, + "rajini": 40622, + "abraham": 40623, + "emil": 40624, + "cardo": 40625, + "tril": 40626, + "hairstyles": 40627, + "obsolete": 40628, + "sampler": 40629, + "directive": 40630, + "delavinkisses": 40631, + "verton": 40632, + "glos": 40633, + "spay": 40634, + "palermo": 40635, + "comets": 40636, + "manziel": 40637, + "chicagof": 40638, + "skipped": 40639, + "pictorial": 40640, + "hant": 40641, + "bmi": 40642, + "aol": 40643, + "reopens": 40644, + "paddling": 40645, + "devos": 40646, + "fraud": 40647, + "baseline": 40648, + "queues": 40649, + "spired": 40650, + "snare": 40651, + "euve": 40652, + "descriptions": 40653, + "daisies": 40654, + "caching": 40655, + "galleria": 40656, + "trimmed": 40657, + "stino": 40658, + "recycla": 40659, + "icular": 40660, + "birken": 40661, + "rawlings": 40662, + "flix": 40663, + "chicas": 40664, + "bgt": 40665, + "likeli": 40666, + "argyll": 40667, + "thelove": 40668, + "gaston": 40669, + "blanca": 40670, + "hak": 40671, + "fone": 40672, + "sailormoon": 40673, + "haci": 40674, + "imac": 40675, + "flyn": 40676, + "decan": 40677, + "belles": 40678, + "apic": 40679, + "zog": 40680, + "taunton": 40681, + "constance": 40682, + "lasagna": 40683, + "kernel": 40684, + "inka": 40685, + "harbor": 40686, + "collectively": 40687, + "calculated": 40688, + "aville": 40689, + "shilpa": 40690, + "purdu": 40691, + "gimm": 40692, + "funer": 40693, + "aest": 40694, + "pembrokeshire": 40695, + "nightingale": 40696, + "nunes": 40697, + "hypertension": 40698, + "hubert": 40699, + "sliders": 40700, + "infertility": 40701, + "commended": 40702, + "transatlantic": 40703, + "metrical": 40704, + "!!@": 40705, + "ÅŁ": 40706, + "ssg": 40707, + "bacca": 40708, + "inverted": 40709, + "funfactfriday": 40710, + "itans": 40711, + "album": 40712, + "acquainted": 40713, + "rier": 40714, + "whelan": 40715, + "sarab": 40716, + "mue": 40717, + "snooze": 40718, + "piff": 40719, + "agreeing": 40720, + "spitting": 40721, + "jermaine": 40722, + "nye": 40723, + "âľıï¸ı": 40724, + "ambush": 40725, + "zeph": 40726, + "congreg": 40727, + "university": 40728, + "sapp": 40729, + "wannabe": 40730, + "patrice": 40731, + "ibd": 40732, + "doglo": 40733, + "fridges": 40734, + "sund": 40735, + "kingston": 40736, + "argon": 40737, + "kamen": 40738, + "hardrock": 40739, + "dsley": 40740, + "dolores": 40741, + "ì°": 40742, + "otaku": 40743, + "piping": 40744, + "behaving": 40745, + "âŃIJï¸ıâŃIJï¸ıâŃIJï¸ı": 40746, + "bluebird": 40747, + "ansari": 40748, + "teapot": 40749, + "firework": 40750, + "crop": 40751, + "logans": 40752, + "typed": 40753, + "thickness": 40754, + "igers": 40755, + "cfp": 40756, + "dysfunctional": 40757, + "contrasting": 40758, + "etty": 40759, + "astonmartin": 40760, + "txst": 40761, + "dragrace": 40762, + "attributes": 40763, + "marathon": 40764, + "manuscripts": 40765, + "johnstone": 40766, + "ðŁĺ±ðŁĺ±": 40767, + "boer": 40768, + "ayu": 40769, + "arugula": 40770, + "poorest": 40771, + "condu": 40772, + "assumption": 40773, + "anagh": 40774, + "noh": 40775, + "delavin": 40776, + "sitter": 40777, + "gö": 40778, + "morow": 40779, + "kickstart": 40780, + "comi": 40781, + "glacial": 40782, + "ghead": 40783, + "bain": 40784, + "kershaw": 40785, + "endof": 40786, + "freud": 40787, + "omat": 40788, + "iaf": 40789, + "hug": 40790, + "signup": 40791, + "eachother": 40792, + "definite": 40793, + "tubing": 40794, + "shakira": 40795, + "ðŁijıðŁı½": 40796, + "uuuu": 40797, + "swin": 40798, + "shambles": 40799, + "olas": 40800, + "skell": 40801, + "britain": 40802, + "knw": 40803, + "clutter": 40804, + "omy": 40805, + "jens": 40806, + "hanged": 40807, + "cityscape": 40808, + "scraps": 40809, + "unlocking": 40810, + "deadliest": 40811, + "erno": 40812, + "breastcancer": 40813, + "ait": 40814, + "inspect": 40815, + "furi": 40816, + "ðŁĴĮ": 40817, + "kud": 40818, + "jule": 40819, + "orah": 40820, + "mids": 40821, + "mdt": 40822, + "burgring": 40823, + "rattle": 40824, + "pusa": 40825, + "stalk": 40826, + "cleans": 40827, + "issance": 40828, + "zek": 40829, + "worthit": 40830, + "nameis": 40831, + "muskoka": 40832, + "councilman": 40833, + "urbanart": 40834, + "barrac": 40835, + "unsolved": 40836, + "tul": 40837, + "gita": 40838, + "whiteboard": 40839, + "soybeans": 40840, + "ement": 40841, + "conti": 40842, + "saturdaymotivation": 40843, + "conveniently": 40844, + "docking": 40845, + "tado": 40846, + "âı©": 40847, + "spino": 40848, + "puppylove": 40849, + "pof": 40850, + "fabricated": 40851, + "robbers": 40852, + "adopts": 40853, + "tified": 40854, + "kkr": 40855, + "indulgence": 40856, + "noticeable": 40857, + "macquarie": 40858, + "chapel": 40859, + "sensual": 40860, + "kiko": 40861, + "melanoma": 40862, + "loretta": 40863, + "liance": 40864, + "aben": 40865, + "splus": 40866, + "gaal": 40867, + "acele": 40868, + "libdems": 40869, + "comparisons": 40870, + "ðŁĮµ": 40871, + "rhythms": 40872, + "mery": 40873, + "encapsul": 40874, + "napier": 40875, + "ðŁijĮðŁijĮðŁijĮ": 40876, + "ðŁijIJ": 40877, + "platz": 40878, + "fresno": 40879, + "reformed": 40880, + "ranbir": 40881, + "elit": 40882, + "thebest": 40883, + "bhushan": 40884, + "vinnie": 40885, + "improvised": 40886, + "sittin": 40887, + "recreated": 40888, + "eba": 40889, + "ecker": 40890, + "acrob": 40891, + "ponte": 40892, + "cord": 40893, + "giddy": 40894, + "eurusd": 40895, + "fever": 40896, + "intuition": 40897, + "gari": 40898, + "dummies": 40899, + "budweiser": 40900, + "amendments": 40901, + "tetra": 40902, + "schnit": 40903, + "ayas": 40904, + "marys": 40905, + "cist": 40906, + "kani": 40907, + "kermit": 40908, + "ðŁĺ±ðŁĺ±ðŁĺ±": 40909, + "tinker": 40910, + "strolling": 40911, + "divisional": 40912, + "nigeri": 40913, + "ominous": 40914, + "menstrual": 40915, + "karab": 40916, + "khy": 40917, + "bwfc": 40918, + "panhandle": 40919, + "lilli": 40920, + "weller": 40921, + "strapped": 40922, + "sonthe": 40923, + "transferring": 40924, + "ethereal": 40925, + "sneaks": 40926, + "rudol": 40927, + "gables": 40928, + "jacking": 40929, + "cincode": 40930, + "fortune": 40931, + "canadiens": 40932, + "confor": 40933, + "abnormal": 40934, + "franklin": 40935, + "tita": 40936, + "mula": 40937, + "persist": 40938, + "cuties": 40939, + "kiel": 40940, + "ðŁĩ±ðŁĩ": 40941, + "hermann": 40942, + "awk": 40943, + "fiasco": 40944, + "koto": 40945, + "weta": 40946, + "hiker": 40947, + "buddy": 40948, + "preventive": 40949, + "mcgraw": 40950, + "gameboy": 40951, + "forsyth": 40952, + "topshop": 40953, + "siob": 40954, + "sadh": 40955, + "intram": 40956, + "followart": 40957, + "soaps": 40958, + "dragonball": 40959, + "oux": 40960, + "morrison": 40961, + "à¹ĥ": 40962, + "lubric": 40963, + "adulthood": 40964, + "morrisons": 40965, + "âļłï¸ı": 40966, + "hermo": 40967, + "taka": 40968, + "stallone": 40969, + "misuse": 40970, + "teamgb": 40971, + "ragha": 40972, + "confined": 40973, + "aty": 40974, + "homophobic": 40975, + "nwo": 40976, + "skynews": 40977, + "hoya": 40978, + "acrosse": 40979, + "wiiu": 40980, + "purée": 40981, + "jeddah": 40982, + "ðŁ¤§": 40983, + "advisers": 40984, + "phine": 40985, + "anis": 40986, + "scrumptious": 40987, + "ë°ķ": 40988, + "cke": 40989, + "viny": 40990, + "term": 40991, + "sdc": 40992, + "odo": 40993, + "homeschool": 40994, + "vasc": 40995, + "leopards": 40996, + "deborah": 40997, + "illicit": 40998, + "curran": 40999, + "asroma": 41000, + "naught": 41001, + "marig": 41002, + "brandi": 41003, + "emp": 41004, + "ðŁĺįðŁijĮ": 41005, + "îĮ": 41006, + "suspend": 41007, + "luz": 41008, + "initiation": 41009, + "schaft": 41010, + "jensenackles": 41011, + "crawler": 41012, + "postdoc": 41013, + "desks": 41014, + "trailblazer": 41015, + "denomin": 41016, + "trix": 41017, + "noise": 41018, + "poet": 41019, + "±ï¸ı": 41020, + "smug": 41021, + "volatile": 41022, + "proofs": 41023, + "pharmacist": 41024, + "sardinia": 41025, + "mashable": 41026, + "kimchi": 41027, + "coed": 41028, + "schalke": 41029, + "doodled": 41030, + "csw": 41031, + "shur": 41032, + "rox": 41033, + "dok": 41034, + "chrisbrown": 41035, + "mathematician": 41036, + "abound": 41037, + "angelic": 41038, + "rockford": 41039, + "dole": 41040, + "yorkers": 41041, + "msn": 41042, + "gman": 41043, + "xavier": 41044, + "borrowing": 41045, + "markings": 41046, + "longhorn": 41047, + "kja": 41048, + "diverted": 41049, + "mmit": 41050, + "euphoria": 41051, + "ayyy": 41052, + "tea": 41053, + "pah": 41054, + "cki": 41055, + "uncut": 41056, + "liven": 41057, + "kyung": 41058, + "fanart": 41059, + "mering": 41060, + "redding": 41061, + "amovie": 41062, + "gridi": 41063, + "cthulhu": 41064, + "scholarly": 41065, + "judah": 41066, + "thbewithyou": 41067, + "eucalyp": 41068, + "ðŁIJķ": 41069, + "hertfordshire": 41070, + "courtroom": 41071, + "byu": 41072, + "auctioned": 41073, + "please": 41074, + "marcia": 41075, + "ê°ĵ": 41076, + "succeeded": 41077, + "elas": 41078, + "arvind": 41079, + "tlot": 41080, + "saigon": 41081, + "rett": 41082, + "rakesh": 41083, + "fdny": 41084, + "asen": 41085, + "sebring": 41086, + "gladiators": 41087, + "youknow": 41088, + "vlad": 41089, + "gola": 41090, + "parap": 41091, + "ÑĢи": 41092, + "sabcnews": 41093, + "oneteam": 41094, + "ohl": 41095, + "sune": 41096, + "rij": 41097, + "cdc": 41098, + "stargate": 41099, + "rundown": 41100, + "plato": 41101, + "phc": 41102, + "chatter": 41103, + "raviol": 41104, + "mnf": 41105, + "mandala": 41106, + "liet": 41107, + "à¸ķ": 41108, + "maria": 41109, + "hungover": 41110, + "consolidation": 41111, + "ferrell": 41112, + "traditional": 41113, + "iloveart": 41114, + "galap": 41115, + "ðŁıĮ": 41116, + "quezon": 41117, + "españa": 41118, + "ðŁĩ¨ðŁĩŃ": 41119, + "hobby": 41120, + "steamboat": 41121, + "malign": 41122, + "guillau": 41123, + "prohi": 41124, + "itsme": 41125, + "íĥĢ": 41126, + "inscription": 41127, + "alz": 41128, + "marian": 41129, + "kade": 41130, + "mmon": 41131, + "adjusting": 41132, + "nests": 41133, + "internally": 41134, + "cir": 41135, + "vikram": 41136, + "malala": 41137, + "kph": 41138, + "felicia": 41139, + "thereal": 41140, + "captivity": 41141, + "atis": 41142, + "marcorubio": 41143, + "kaleido": 41144, + "chev": 41145, + "manoj": 41146, + "lemore": 41147, + "gentri": 41148, + "vips": 41149, + "trope": 41150, + "\"âĢĶ": 41151, + "pairings": 41152, + "malnutrition": 41153, + "fray": 41154, + "designation": 41155, + "brunomars": 41156, + "aze": 41157, + "torrential": 41158, + "panzer": 41159, + "gail": 41160, + "underthe": 41161, + "theological": 41162, + "schizophre": 41163, + "dazzle": 41164, + "frederic": 41165, + "mopar": 41166, + "adilla": 41167, + "soggy": 41168, + "raun": 41169, + "mediocre": 41170, + "colorec": 41171, + "ife": 41172, + "pinst": 41173, + "bluef": 41174, + "²": 41175, + "worldwater": 41176, + "giroud": 41177, + "clarinet": 41178, + "adolf": 41179, + "tarantino": 41180, + "receipts": 41181, + "assump": 41182, + "ðŁijŁ": 41183, + "coffees": 41184, + "âľĬðŁı¾": 41185, + "duplex": 41186, + "sof": 41187, + "rx": 41188, + "lino": 41189, + "timberwolves": 41190, + "pandit": 41191, + "motm": 41192, + "ega": 41193, + "ayama": 41194, + "achs": 41195, + "outsider": 41196, + "llen": 41197, + "coer": 41198, + "tilly": 41199, + "cheeseburger": 41200, + "mads": 41201, + "pledis": 41202, + "empty": 41203, + "nationalparks": 41204, + "aziz": 41205, + "pmi": 41206, + "junkies": 41207, + "fener": 41208, + "sqn": 41209, + "ès": 41210, + "generation": 41211, + "cleopatra": 41212, + "bhubanes": 41213, + "mosques": 41214, + "tyfree": 41215, + "poppins": 41216, + "twc": 41217, + "orwell": 41218, + "nage": 41219, + "kawhi": 41220, + "hollow": 41221, + "dalai": 41222, + "¨¨¨¨": 41223, + "ouro": 41224, + "mhealth": 41225, + "gion": 41226, + "azo": 41227, + "visas": 41228, + "renegade": 41229, + "reic": 41230, + "wsop": 41231, + "ðŁĴļðŁĴĽ": 41232, + "echel": 41233, + "toxicity": 41234, + "mün": 41235, + "bunk": 41236, + "stimulating": 41237, + "asthour": 41238, + "\\'": 41239, + "eph": 41240, + "endemic": 41241, + "cnbc": 41242, + "shrinking": 41243, + "peabody": 41244, + "michelangelo": 41245, + "canyon": 41246, + "wale": 41247, + "sumi": 41248, + "siders": 41249, + "inuit": 41250, + "?.": 41251, + "professionalism": 41252, + "dracing": 41253, + "platoon": 41254, + "pons": 41255, + "outbound": 41256, + "mapleleafs": 41257, + "desol": 41258, + "cency": 41259, + "athan": 41260, + "verma": 41261, + "rubbing": 41262, + "okan": 41263, + "ðŁijł": 41264, + "mullins": 41265, + "authentic": 41266, + "Åį": 41267, + "almanac": 41268, + "gaia": 41269, + "bbq": 41270, + "onimo": 41271, + "keh": 41272, + "tya": 41273, + "touts": 41274, + "yav": 41275, + "reposit": 41276, + ",.": 41277, + "wight": 41278, + "seeyou": 41279, + "callof": 41280, + "donesia": 41281, + "bargaining": 41282, + "granth": 41283, + "sdsu": 41284, + "amphitheater": 41285, + "psu": 41286, + "rewatching": 41287, + "winetasting": 41288, + "peakdistrict": 41289, + "detecting": 41290, + "thurman": 41291, + "phee": 41292, + "èªķ": 41293, + "umich": 41294, + "rer": 41295, + "sculpted": 41296, + "gole": 41297, + "namesake": 41298, + "ðŁĶģ": 41299, + "servicing": 41300, + "baugh": 41301, + "pugh": 41302, + "pencil": 41303, + "darth": 41304, + "munchkin": 41305, + "atorium": 41306, + "teners": 41307, + "suny": 41308, + "rollingstones": 41309, + "maging": 41310, + "starrer": 41311, + "idris": 41312, + "feinstein": 41313, + "agron": 41314, + "âĺºï¸ıâĺºï¸ı": 41315, + "supervised": 41316, + "chameleon": 41317, + "aggregate": 41318, + "successive": 41319, + "mogul": 41320, + "instyle": 41321, + "poldark": 41322, + "custome": 41323, + "ohiostate": 41324, + "haya": 41325, + "cides": 41326, + "brokerage": 41327, + "angelou": 41328, + "fifawwc": 41329, + "deforestation": 41330, + "alton": 41331, + "pamph": 41332, + "hugged": 41333, + "hobo": 41334, + "changeable": 41335, + "kuber": 41336, + "burroughs": 41337, + "demonetisation": 41338, + "capecod": 41339, + "versatility": 41340, + "orice": 41341, + "leila": 41342, + "womeninscience": 41343, + "tua": 41344, + "hedges": 41345, + "embarrassment": 41346, + "alife": 41347, + "soars": 41348, + "nighter": 41349, + "hymn": 41350, + "gipp": 41351, + "chasu": 41352, + "techs": 41353, + "niall": 41354, + "killa": 41355, + "hika": 41356, + "camels": 41357, + "value": 41358, + "¢": 41359, + "scoops": 41360, + "mahmoud": 41361, + "clusive": 41362, + "adriana": 41363, + "paco": 41364, + "ozil": 41365, + "unas": 41366, + "translations": 41367, + "whisperer": 41368, + "sbi": 41369, + "buxton": 41370, + "biotics": 41371, + "indiffe": 41372, + "kenney": 41373, + "klar": 41374, + "etching": 41375, + "barrabest": 41376, + "instability": 41377, + "seine": 41378, + "votel": 41379, + "blogged": 41380, + "whiskey": 41381, + "myspace": 41382, + "tant": 41383, + "landia": 41384, + "giveback": 41385, + "illus": 41386, + "awak": 41387, + "acab": 41388, + "fbloggers": 41389, + "cloudcomputing": 41390, + "blatant": 41391, + "syrians": 41392, + "bandra": 41393, + "styn": 41394, + "anem": 41395, + "keted": 41396, + "karthik": 41397, + "barunsob": 41398, + "pinot": 41399, + "gubernat": 41400, + "gaye": 41401, + "artiste": 41402, + "ified": 41403, + "conventions": 41404, + "huan": 41405, + "geniuses": 41406, + "eeeeee": 41407, + "folly": 41408, + "somerville": 41409, + "pridemonth": 41410, + "ðŁĩºðŁĩ¸ðŁĩºðŁĩ¸": 41411, + "chemotherapy": 41412, + "pauls": 41413, + "bakar": 41414, + "ìĦ¸ë¸IJ": 41415, + "taiwanese": 41416, + "follo": 41417, + "css": 41418, + "reign": 41419, + "nnnn": 41420, + "flaun": 41421, + "catastrophe": 41422, + "ities": 41423, + "fragments": 41424, + "extremists": 41425, + "ymoun": 41426, + "carmen": 41427, + "ezekiel": 41428, + "connecting": 41429, + "seh": 41430, + "manta": 41431, + "remodeling": 41432, + "weymouth": 41433, + "atoms": 41434, + "cem": 41435, + "newell": 41436, + "lumi": 41437, + "theopen": 41438, + "moc": 41439, + "miliband": 41440, + "gland": 41441, + "zshq": 41442, + "maggie": 41443, + "maniacs": 41444, + "msp": 41445, + "ady": 41446, + "creams": 41447, + "leanne": 41448, + "esta": 41449, + "pyg": 41450, + "affinity": 41451, + "prayer": 41452, + "dunbar": 41453, + "lightroom": 41454, + "acadi": 41455, + "wynonna": 41456, + "romantic": 41457, + "statedept": 41458, + "sickle": 41459, + "whos": 41460, + "lamo": 41461, + "etour": 41462, + "finity": 41463, + "shrub": 41464, + "sharpen": 41465, + "pundit": 41466, + "edon": 41467, + "afore": 41468, + "mars": 41469, + "jeffery": 41470, + "terps": 41471, + "medallist": 41472, + "katharine": 41473, + "accusing": 41474, + "taz": 41475, + "royd": 41476, + "fromhome": 41477, + "confrontation": 41478, + "allegh": 41479, + "ðŁijīðŁijī": 41480, + "refresher": 41481, + "ranveer": 41482, + "neverland": 41483, + "jojo": 41484, + "lucrative": 41485, + "enam": 41486, + "caver": 41487, + "paedi": 41488, + "manjaro": 41489, + "fluids": 41490, + "thessal": 41491, + "oppressed": 41492, + "muss": 41493, + "johanna": 41494, + "Ø®": 41495, + "cng": 41496, + "buildthe": 41497, + "settles": 41498, + "sith": 41499, + "fuego": 41500, + "clamp": 41501, + "arag": 41502, + "payer": 41503, + "tedx": 41504, + "mandy": 41505, + "interstellar": 41506, + "frc": 41507, + "chand": 41508, + "bcc": 41509, + "molo": 41510, + "lentil": 41511, + "johansson": 41512, + "grimsby": 41513, + "naturelovers": 41514, + "ðŁļ¨ðŁļ¨ðŁļ¨": 41515, + "shinde": 41516, + "xin": 41517, + "internationaldayof": 41518, + "transitional": 41519, + "sata": 41520, + "caddy": 41521, + "wod": 41522, + "ifu": 41523, + "hays": 41524, + "hollyo": 41525, + "jang": 41526, + "irc": 41527, + "coim": 41528, + "gradable": 41529, + "\"\"": 41530, + "ðŁį´": 41531, + "া": 41532, + "ael": 41533, + "nyo": 41534, + "westlake": 41535, + "timeout": 41536, + "sofi": 41537, + "phenomena": 41538, + "cultivation": 41539, + "agno": 41540, + "unarmed": 41541, + "sot": 41542, + "conj": 41543, + "geno": 41544, + "royalnavy": 41545, + "nutrition": 41546, + "fairmont": 41547, + "tirelessly": 41548, + "sng": 41549, + "rety": 41550, + "mica": 41551, + "lucent": 41552, + "sloane": 41553, + "drool": 41554, + "rizal": 41555, + "odell": 41556, + "criticized": 41557, + ".'\"": 41558, + "laze": 41559, + "deserted": 41560, + "coder": 41561, + "pras": 41562, + "lillian": 41563, + "itinerary": 41564, + "davy": 41565, + "anap": 41566, + "whipping": 41567, + "hoboken": 41568, + "kareena": 41569, + "羣": 41570, + "vius": 41571, + "tern": 41572, + "nantucket": 41573, + "misunderstood": 41574, + "bulaga": 41575, + "stant": 41576, + "chinook": 41577, + "zam": 41578, + "relies": 41579, + "dss": 41580, + "edmond": 41581, + "sketchy": 41582, + "mell": 41583, + "fex": 41584, + "rector": 41585, + "distill": 41586, + "daydream": 41587, + "winemaker": 41588, + "ripley": 41589, + "billionaires": 41590, + "helene": 41591, + "atif": 41592, + "culprit": 41593, + "bertrand": 41594, + "wouldnt": 41595, + "mapped": 41596, + "vak": 41597, + "gladly": 41598, + "parliament": 41599, + "kidlitart": 41600, + "wareness": 41601, + "goliath": 41602, + "âĨĵ": 41603, + "viewpoint": 41604, + "tatted": 41605, + "fuls": 41606, + "dorsey": 41607, + "anglers": 41608, + "lids": 41609, + "kiya": 41610, + "bowles": 41611, + "beh": 41612, + "bite": 41613, + "compatibility": 41614, + "ancestral": 41615, + "prox": 41616, + "behaved": 41617, + "gubernatorial": 41618, + "chfield": 41619, + "saban": 41620, + "zh": 41621, + "teeny": 41622, + "shibuya": 41623, + "holliday": 41624, + "pancy": 41625, + "âĿĦï¸ıâĿĦï¸ı": 41626, + "seungri": 41627, + "?,": 41628, + "ðŁĩ¦ðŁĩ·": 41629, + "imitation": 41630, + "impactful": 41631, + "anyi": 41632, + "genevie": 41633, + "años": 41634, + "bateman": 41635, + "glider": 41636, + "afar": 41637, + "rasheed": 41638, + "effortless": 41639, + "shwar": 41640, + "dachsh": 41641, + "erun": 41642, + "atos": 41643, + "kini": 41644, + "chd": 41645, + "khaki": 41646, + "klin": 41647, + "felicidades": 41648, + "belo": 41649, + "asl": 41650, + "toppers": 41651, + "finley": 41652, + "stacey": 41653, + "rigorous": 41654, + "karting": 41655, + "leppard": 41656, + "carmichael": 41657, + "beret": 41658, + "cse": 41659, + "akhi": 41660, + "meringue": 41661, + "aban": 41662, + "hake": 41663, + "geri": 41664, + "erjee": 41665, + "resto": 41666, + "commanders": 41667, + "prit": 41668, + "flor": 41669, + "adven": 41670, + "extermin": 41671, + "remainder": 41672, + "åIJ": 41673, + "esg": 41674, + "martino": 41675, + "lullaby": 41676, + "|@": 41677, + "mign": 41678, + "instore": 41679, + "bigbang": 41680, + "cordi": 41681, + "cauley": 41682, + "antebellum": 41683, + "dgate": 41684, + "crock": 41685, + "spandex": 41686, + "scaffolding": 41687, + "oreos": 41688, + "ê°ĵìĦ¸ë¸IJ": 41689, + "pomona": 41690, + "mauro": 41691, + "universi": 41692, + "remi": 41693, + "afootball": 41694, + "tant": 41695, + "smalls": 41696, + "neh": 41697, + "worldo": 41698, + "tropical": 41699, + "morph": 41700, + "javelin": 41701, + "glar": 41702, + "arquitec": 41703, + "reminiscent": 41704, + "tubs": 41705, + "spidey": 41706, + "makeu": 41707, + "sylla": 41708, + "progressives": 41709, + "blot": 41710, + "shorten": 41711, + "keepin": 41712, + "chak": 41713, + "angst": 41714, + "superfood": 41715, + "decadent": 41716, + "stony": 41717, + "neurological": 41718, + "arboretum": 41719, + "annak": 41720, + "fema": 41721, + "percu": 41722, + "disrespectful": 41723, + "smallbiz": 41724, + "lox": 41725, + "coom": 41726, + "csc": 41727, + "bsbi": 41728, + "prevalence": 41729, + "himss": 41730, + "espan": 41731, + "moga": 41732, + "frampton": 41733, + "skymap": 41734, + "masse": 41735, + "leviathan": 41736, + "().": 41737, + "nocturnal": 41738, + "carameli": 41739, + "angor": 41740, + "amnesia": 41741, + "outsiders": 41742, + "shealth": 41743, + "rhino": 41744, + "antag": 41745, + "agio": 41746, + "ðŁĴ°ðŁĴ°": 41747, + "takeme": 41748, + "kabaddi": 41749, + "csi": 41750, + "msh": 41751, + "cochrane": 41752, + "thessaloni": 41753, + "sila": 41754, + "haus": 41755, + "dusting": 41756, + "obese": 41757, + "macklemore": 41758, + "manish": 41759, + "lenin": 41760, + "mdc": 41761, + "grown": 41762, + "sheffield": 41763, + "srs": 41764, + "kele": 41765, + "carson": 41766, + "chum": 41767, + "dahlia": 41768, + "cantore": 41769, + "oppo": 41770, + "howling": 41771, + "cybercrime": 41772, + "surrealism": 41773, + "scran": 41774, + "faiz": 41775, + "thren": 41776, + "racists": 41777, + "rout": 41778, + "pknot": 41779, + "semana": 41780, + "sini": 41781, + "mccull": 41782, + "machi": 41783, + "alfonso": 41784, + "yb": 41785, + "sardar": 41786, + "kendrick": 41787, + "deng": 41788, + "recipro": 41789, + "onf": 41790, + "doomsday": 41791, + "bribery": 41792, + "customiz": 41793, + "artis": 41794, + "cpi": 41795, + "ðŁĻĪðŁĻĪ": 41796, + "slava": 41797, + "lette": 41798, + "ens": 41799, + "âĿ¤ï¸ıðŁĺĺ": 41800, + "crayon": 41801, + "adan": 41802, + "trc": 41803, + "migrate": 41804, + "simpson": 41805, + "rowers": 41806, + "kingsley": 41807, + "farmersmarket": 41808, + "sheehan": 41809, + "nephe": 41810, + "bornon": 41811, + "carton": 41812, + "mickey": 41813, + "allure": 41814, + "ulu": 41815, + "slipknot": 41816, + "hebdo": 41817, + "guido": 41818, + "dogcelebration": 41819, + "onlinemarketing": 41820, + "accelerating": 41821, + ")..": 41822, + "originated": 41823, + "macaroni": 41824, + "edtech": 41825, + "outfield": 41826, + "mitz": 41827, + "discus": 41828, + "advertiser": 41829, + "manor": 41830, + "hashi": 41831, + "descrip": 41832, + "capita": 41833, + "fulbright": 41834, + "receptor": 41835, + "conn": 41836, + "coney": 41837, + "spionage": 41838, + "rattle": 41839, + "prest": 41840, + "uli": 41841, + "blogpost": 41842, + "ackeray": 41843, + ")â̦": 41844, + "redvelvet": 41845, + "matth": 41846, + "inspiring": 41847, + "bsd": 41848, + "kerri": 41849, + "pocon": 41850, + "millar": 41851, + "repur": 41852, + "accenture": 41853, + "ä¹": 41854, + "rambo": 41855, + "ragnarok": 41856, + "deleting": 41857, + "britishmuseum": 41858, + "patory": 41859, + "leipzig": 41860, + "florian": 41861, + "scifi": 41862, + "iners": 41863, + "brate": 41864, + "yoy": 41865, + "melissa": 41866, + "aber": 41867, + "masa": 41868, + "pote": 41869, + "mosquitoes": 41870, + "transplant": 41871, + "rpa": 41872, + ";))": 41873, + "bastille": 41874, + "ylan": 41875, + "joyeux": 41876, + "melodic": 41877, + "captions": 41878, + "atrist": 41879, + "rochdale": 41880, + "gotti": 41881, + "pewdie": 41882, + "cutiesaturday": 41883, + "whois": 41884, + "aquaculture": 41885, + "tiva": 41886, + "spel": 41887, + "hess": 41888, + "haji": 41889, + "freddie": 41890, + "coper": 41891, + "brando": 41892, + "vk": 41893, + "photobook": 41894, + "*,": 41895, + "mydayin": 41896, + "michaela": 41897, + "brunei": 41898, + "srini": 41899, + "inte": 41900, + "ı": 41901, + "deol": 41902, + "dfc": 41903, + "separately": 41904, + "bund": 41905, + "vests": 41906, + "toc": 41907, + "meck": 41908, + "reinforced": 41909, + "constraints": 41910, + "carroll": 41911, + "sqft": 41912, + "rever": 41913, + "camper": 41914, + "birdman": 41915, + "inaction": 41916, + "generators": 41917, + "triumphant": 41918, + "pests": 41919, + "ovo": 41920, + "gypt": 41921, + "alamo": 41922, + "scaled": 41923, + "sureshpp": 41924, + "sdn": 41925, + "ismo": 41926, + "gios": 41927, + ")@": 41928, + "justiceleague": 41929, + "restaurant": 41930, + "gabi": 41931, + "dengue": 41932, + "nextgen": 41933, + "exempli": 41934, + "apex": 41935, + "inspirational": 41936, + "downside": 41937, + "kidz": 41938, + "upl": 41939, + "etna": 41940, + "alvaro": 41941, + "feldman": 41942, + "barnet": 41943, + "mha": 41944, + "esch": 41945, + "blooded": 41946, + ">>>>>>>>": 41947, + "kani": 41948, + "hofficial": 41949, + "casablanca": 41950, + "birds": 41951, + "tyga": 41952, + "swamp": 41953, + "oday": 41954, + "newcastle": 41955, + "nbap": 41956, + "cision": 41957, + "chools": 41958, + "aflo": 41959, + "nep": 41960, + "monton": 41961, + "akb": 41962, + "supermodel": 41963, + "downtime": 41964, + "thos": 41965, + "scwx": 41966, + "snoopy": 41967, + "aggreg": 41968, + "yoke": 41969, + "norcal": 41970, + "wett": 41971, + "prolonged": 41972, + "metast": 41973, + "beater": 41974, + "fta": 41975, + "tlap": 41976, + "disgusted": 41977, + "yh": 41978, + "voiceover": 41979, + "itchy": 41980, + "ipc": 41981, + "ðŁİ¾": 41982, + "pheasant": 41983, + "straits": 41984, + "rampant": 41985, + "jg": 41986, + "fertil": 41987, + "assures": 41988, + "fortunes": 41989, + "salinas": 41990, + "lizards": 41991, + "kettle": 41992, + "ibs": 41993, + "cynthi": 41994, + "heg": 41995, + "mccr": 41996, + "socceroos": 41997, + "happenings": 41998, + "corden": 41999, + "ðŁĺĤðŁijĮ": 42000, + "tches": 42001, + "egret": 42002, + "wolverines": 42003, + "congratulated": 42004, + "hogg": 42005, + "bottling": 42006, + "wri": 42007, + "ferri": 42008, + "bosch": 42009, + "afire": 42010, + "ogden": 42011, + "sjo": 42012, + "jdm": 42013, + "svt": 42014, + "contex": 42015, + "tollywood": 42016, + "mink": 42017, + "mese": 42018, + "supersonic": 42019, + "opoulos": 42020, + "å¸": 42021, + "âĶģ": 42022, + "knuckle": 42023, + "guise": 42024, + "gami": 42025, + "chucky": 42026, + "zinger": 42027, + "radial": 42028, + "complained": 42029, + "boda": 42030, + "fetal": 42031, + "disciplines": 42032, + "corro": 42033, + "ðŁĩ®ðŁĩ¹": 42034, + "opted": 42035, + "filtration": 42036, + "adnan": 42037, + "emcee": 42038, + "mistre": 42039, + "insomni": 42040, + "fergus": 42041, + "trajec": 42042, + "ondon": 42043, + "medtech": 42044, + "tangerine": 42045, + "madras": 42046, + "grue": 42047, + "cabs": 42048, + "zhu": 42049, + "sureshpprabhu": 42050, + "insulated": 42051, + "dayswild": 42052, + "ppm": 42053, + "bandai": 42054, + "vday": 42055, + "sff": 42056, + "squid": 42057, + "lothing": 42058, + "notdead": 42059, + "expressive": 42060, + "cull": 42061, + "alastair": 42062, + "xu": 42063, + "upfront": 42064, + "fishers": 42065, + "enes": 42066, + "umd": 42067, + "dismissal": 42068, + "stier": 42069, + "sels": 42070, + "lust": 42071, + "reactive": 42072, + "protester": 42073, + "eyelashes": 42074, + "alim": 42075, + "goode": 42076, + "greeng": 42077, + "dair": 42078, + "compen": 42079, + "anushka": 42080, + "prototyping": 42081, + "mapu": 42082, + "bearings": 42083, + "ðŁIJŁ": 42084, + "forme": 42085, + "bsbibotany": 42086, + "timothy": 42087, + "outskirts": 42088, + "ambed": 42089, + "aretha": 42090, + "wendell": 42091, + "streaks": 42092, + "nim": 42093, + "kpk": 42094, + "snee": 42095, + "fitter": 42096, + "quota": 42097, + "pate": 42098, + "winning": 42099, + "ðŁįŃ": 42100, + "shopping": 42101, + "mainst": 42102, + "culver": 42103, + "stevie": 42104, + "mcfadden": 42105, + "counterparts": 42106, + "grenfell": 42107, + "folsom": 42108, + "dorset": 42109, + "techcrunch": 42110, + "â¬ħï¸ı": 42111, + "tiptuesday": 42112, + "usl": 42113, + "trex": 42114, + "georgie": 42115, + "ranveerofficial": 42116, + "licks": 42117, + "sewn": 42118, + "kf": 42119, + "'â̦": 42120, + "japs": 42121, + "pate": 42122, + "orthop": 42123, + "festa": 42124, + "stras": 42125, + "montal": 42126, + "hammersmith": 42127, + "foremost": 42128, + "widows": 42129, + "madre": 42130, + "itez": 42131, + "mitochondri": 42132, + "ligans": 42133, + "zona": 42134, + "caribou": 42135, + "mss": 42136, + "andrei": 42137, + "weatherchannel": 42138, + "ghc": 42139, + ":...": 42140, + "taft": 42141, + "aweather": 42142, + "alisation": 42143, + "brutal": 42144, + "blissful": 42145, + "nikola": 42146, + "malicious": 42147, + "qm": 42148, + "mpgvip": 42149, + "brodie": 42150, + "blitz": 42151, + "applaud": 42152, + "dribb": 42153, + "vague": 42154, + "doggo": 42155, + "translating": 42156, + "interpreted": 42157, + "hatched": 42158, + "getyour": 42159, + "beneficiaries": 42160, + "sparring": 42161, + "caesars": 42162, + "awilliams": 42163, + "lahat": 42164, + "broke": 42165, + "timp": 42166, + "virtues": 42167, + "relying": 42168, + "pietro": 42169, + "ktn": 42170, + "icists": 42171, + "pablo": 42172, + "loui": 42173, + "aag": 42174, + "pnpp": 42175, + "chast": 42176, + "pulses": 42177, + "finish": 42178, + "usairforce": 42179, + "typewriter": 42180, + "thompson": 42181, + "dogs": 42182, + "utto": 42183, + "ãģį": 42184, + "sandal": 42185, + "newly": 42186, + "doge": 42187, + "zw": 42188, + "wankers": 42189, + "negr": 42190, + "mucha": 42191, + "determines": 42192, + "blackfish": 42193, + "skunk": 42194, + "mups": 42195, + "instrument": 42196, + "phyto": 42197, + "daystogo": 42198, + "skinned": 42199, + "haider": 42200, + "conten": 42201, + "ðŁIJ¾ðŁIJ¾": 42202, + "weiler": 42203, + "undoubtedly": 42204, + "chairing": 42205, + "wallis": 42206, + "shard": 42207, + "zindabad": 42208, + "adult": 42209, + "absorption": 42210, + "presto": 42211, + "deploying": 42212, + "drummond": 42213, + "battlefront": 42214, + "seagulls": 42215, + "howdy": 42216, + "judaism": 42217, + "desde": 42218, + "partition": 42219, + "âľĿ": 42220, + "nology": 42221, + "nationalbestfriend": 42222, + "lesnar": 42223, + "filmfare": 42224, + "coasts": 42225, + "christensen": 42226, + "acan": 42227, + "mbu": 42228, + "copped": 42229, + "rubble": 42230, + "swc": 42231, + "funnier": 42232, + "farther": 42233, + "whereas": 42234, + "nanotechnology": 42235, + "withstand": 42236, + "pillow": 42237, + "bowers": 42238, + "tope": 42239, + "itly": 42240, + "confit": 42241, + "makar": 42242, + "comforts": 42243, + "bosh": 42244, + "clipper": 42245, + "balla": 42246, + "stik": 42247, + "milb": 42248, + "safeguard": 42249, + "musique": 42250, + "easport": 42251, + "yaz": 42252, + "padded": 42253, + "bader": 42254, + "foreign": 42255, + "chopin": 42256, + "archive": 42257, + "oka": 42258, + "transporting": 42259, + "tmltalk": 42260, + "ajit": 42261, + "consequence": 42262, + "scroo": 42263, + "ffo": 42264, + "collaborated": 42265, + "pugchat": 42266, + "yemi": 42267, + "javed": 42268, + "auburn": 42269, + "oof": 42270, + "maw": 42271, + "saucer": 42272, + "mitigate": 42273, + "iles": 42274, + "evangelist": 42275, + "terie": 42276, + "recl": 42277, + "indictment": 42278, + "cata": 42279, + "brightness": 42280, + "maythe": 42281, + "whimsical": 42282, + "unlv": 42283, + "keyword": 42284, + "cumin": 42285, + "medway": 42286, + "westworld": 42287, + "traw": 42288, + "imposing": 42289, + "formity": 42290, + "coulter": 42291, + "abz": 42292, + "nypd": 42293, + "grassi": 42294, + "kelsey": 42295, + "qldpol": 42296, + "clockwork": 42297, + "fdr": 42298, + "dianne": 42299, + "âĺij": 42300, + "adh": 42301, + "pann": 42302, + "bravely": 42303, + "aege": 42304, + "unlawful": 42305, + "verdi": 42306, + "pocalypse": 42307, + "pharo": 42308, + "karla": 42309, + "resonance": 42310, + "mastiff": 42311, + "ladak": 42312, + "buu": 42313, + "mailed": 42314, + "hii": 42315, + "crawley": 42316, + "torrent": 42317, + "machado": 42318, + "libyan": 42319, + "effortlessly": 42320, + "falsely": 42321, + "qvist": 42322, + "keef": 42323, + "crafthour": 42324, + "cherished": 42325, + "valkyrie": 42326, + "sari": 42327, + "kalamaz": 42328, + "behe": 42329, + "ðŁĮĻ": 42330, + "thim": 42331, + "roddy": 42332, + "coltrane": 42333, + "butchers": 42334, + "achim": 42335, + "wkend": 42336, + "awkward": 42337, + "cabrera": 42338, + ":))))": 42339, + "franc": 42340, + "declan": 42341, + "condos": 42342, + "aja": 42343, + "pandoramusic": 42344, + "charter": 42345, + "phill": 42346, + "montrose": 42347, + "hatchback": 42348, + "handicapp": 42349, + "greaves": 42350, + "eucalyptus": 42351, + "utmost": 42352, + "tson": 42353, + "burton": 42354, + "midwives": 42355, + "incur": 42356, + "ðŁĺį#": 42357, + "mood": 42358, + "compressed": 42359, + "toma": 42360, + "mustang": 42361, + "mog": 42362, + "asana": 42363, + "testic": 42364, + "shotel": 42365, + "insol": 42366, + "corsair": 42367, + "nhq": 42368, + "benny": 42369, + "smma": 42370, + "kapur": 42371, + "incon": 42372, + "jonas": 42373, + "energies": 42374, + "donal": 42375, + "asad": 42376, + "sez": 42377, + "npa": 42378, + "archived": 42379, + "stimulate": 42380, + "dop": 42381, + "hyd": 42382, + "grieving": 42383, + "ãĥĪ": 42384, + "rona": 42385, + "whyte": 42386, + "treehouse": 42387, + "ssell": 42388, + "sandro": 42389, + "kobo": 42390, + "thermost": 42391, + "seclu": 42392, + "hiya": 42393, + "geez": 42394, + "mamas": 42395, + "priscilla": 42396, + "flavoured": 42397, + "fass": 42398, + "wold": 42399, + "makerspace": 42400, + "cosplay": 42401, + "ptv": 42402, + "happyvalentinesday": 42403, + "sequoia": 42404, + "lovecraft": 42405, + "guan": 42406, + "dtm": 42407, + "cii": 42408, + "yokohama": 42409, + "posthum": 42410, + "req": 42411, + "ðŁĶµâļªï¸ı": 42412, + "galatasar": 42413, + "dolby": 42414, + "hamptons": 42415, + "disturbance": 42416, + "stonehenge": 42417, + "okc": 42418, + "disrupting": 42419, + "monthsary": 42420, + "jungle": 42421, + "headlights": 42422, + "dustin": 42423, + "microsof": 42424, + "happymothersday": 42425, + "koko": 42426, + "grazi": 42427, + "testo": 42428, + "naidu": 42429, + "malay": 42430, + "arial": 42431, + "rumb": 42432, + "aboo": 42433, + "harman": 42434, + "trape": 42435, + "spoils": 42436, + "jeho": 42437, + "godly": 42438, + "lockscreen": 42439, + "zun": 42440, + "pious": 42441, + "magento": 42442, + "lenders": 42443, + "probable": 42444, + "corporal": 42445, + "mour": 42446, + "awal": 42447, + "sua": 42448, + "callme": 42449, + "tonne": 42450, + "govin": 42451, + "devastation": 42452, + "xj": 42453, + "gearbox": 42454, + "warlock": 42455, + "perme": 42456, + "itate": 42457, + "gazaunderattack": 42458, + "duval": 42459, + "parasite": 42460, + "clemente": 42461, + "leth": 42462, + "iva": 42463, + "frozen": 42464, + "tholes": 42465, + "tobin": 42466, + "cairn": 42467, + "sill": 42468, + "luckiest": 42469, + "converts": 42470, + "stale": 42471, + "pancra": 42472, + "europale": 42473, + "wisdom": 42474, + "schur": 42475, + "ì¶": 42476, + "vertigo": 42477, + "bij": 42478, + "ubc": 42479, + "nure": 42480, + "righteousness": 42481, + "mtc": 42482, + "factory": 42483, + "verst": 42484, + "reversed": 42485, + "huri": 42486, + "heechul": 42487, + "faber": 42488, + "arr": 42489, + "ulous": 42490, + "venom": 42491, + "phat": 42492, + "greenery": 42493, + "brady": 42494, + "æ": 42495, + ":((": 42496, + "nevergiveup": 42497, + "disha": 42498, + "mota": 42499, + "healthcare": 42500, + "dunham": 42501, + "dexpo": 42502, + "denzel": 42503, + "bbins": 42504, + "fics": 42505, + "wham": 42506, + "mcg": 42507, + "elian": 42508, + "wata": 42509, + "stralia": 42510, + "tellu": 42511, + "pesky": 42512, + "spinoff": 42513, + "armoured": 42514, + "reacted": 42515, + "dofficial": 42516, + "tedu": 42517, + "sagar": 42518, + "morally": 42519, + "paralleled": 42520, + "fios": 42521, + "downer": 42522, + "daugh": 42523, + "redo": 42524, + "worldcup": 42525, + "tariq": 42526, + "barne": 42527, + "glaciers": 42528, + "occult": 42529, + "barbarian": 42530, + "hermosa": 42531, + "!!!)": 42532, + "yur": 42533, + "internation": 42534, + "pss": 42535, + "situ": 42536, + "pint": 42537, + "americanair": 42538, + "swam": 42539, + "doppler": 42540, + "ðŁĴĻðŁĴľ": 42541, + "cincodemayo": 42542, + "levan": 42543, + "hellenic": 42544, + "mcne": 42545, + "judi": 42546, + "yuh": 42547, + "stx": 42548, + "quare": 42549, + "ðŁĺĤ.": 42550, + "stig": 42551, + "gels": 42552, + "motley": 42553, + "hardwork": 42554, + "eurozone": 42555, + "ead": 42556, + "ç¥Ń": 42557, + "seabir": 42558, + "cius": 42559, + "laid": 42560, + "alpaca": 42561, + "presumably": 42562, + "pewdiepie": 42563, + "booted": 42564, + "amari": 42565, + "tamine": 42566, + "solace": 42567, + "barrow": 42568, + "academies": 42569, + "xian": 42570, + "omination": 42571, + "dungeons": 42572, + "bma": 42573, + "deity": 42574, + "aik": 42575, + "stabil": 42576, + "hira": 42577, + "affectionate": 42578, + "vingne": 42579, + "newport": 42580, + "ãħĭãħĭ": 42581, + "thirds": 42582, + "retains": 42583, + "aromatherapy": 42584, + "skier": 42585, + "nima": 42586, + "dope": 42587, + "cringe": 42588, + "condomin": 42589, + "toor": 42590, + "animator": 42591, + "saraj": 42592, + "seascape": 42593, + "minimalism": 42594, + "lakeshore": 42595, + "callaway": 42596, + "bergman": 42597, + "à¤Ĺ": 42598, + "whispering": 42599, + "stupid": 42600, + "rightful": 42601, + "requis": 42602, + "irn": 42603, + "seva": 42604, + "utpol": 42605, + "tuberculo": 42606, + "squish": 42607, + "debut": 42608, + "governmental": 42609, + "christine": 42610, + "allman": 42611, + "weapon": 42612, + "sito": 42613, + "buri": 42614, + "lolita": 42615, + "leafy": 42616, + "fuch": 42617, + "tinted": 42618, + "mcken": 42619, + "ahahaha": 42620, + "ðŁĩµðŁĩ¹": 42621, + "repeal": 42622, + "negan": 42623, + "ðŁķĬ": 42624, + "tailgating": 42625, + "gameinsight": 42626, + "ðŁıŁï¸ı": 42627, + "yakuza": 42628, + "zt": 42629, + "tiring": 42630, + "proposing": 42631, + "bowlers": 42632, + "traitors": 42633, + "akshi": 42634, + "clergy": 42635, + "cito": 42636, + "upsets": 42637, + "tuscal": 42638, + "symphonic": 42639, + "silently": 42640, + "shuff": 42641, + "blackwell": 42642, + "ðŁĺĤ)": 42643, + "kobe": 42644, + "roberto": 42645, + "ridg": 42646, + "dcu": 42647, + "merino": 42648, + "ftp": 42649, + "eastside": 42650, + ".~": 42651, + "nbl": 42652, + "mnleg": 42653, + "tsfor": 42654, + "fraudul": 42655, + "capping": 42656, + "inmy": 42657, + "gymnast": 42658, + "stones": 42659, + "ssin": 42660, + "tweaks": 42661, + "shaggy": 42662, + "oakland": 42663, + "demsin": 42664, + "sangria": 42665, + "mmva": 42666, + "hennessy": 42667, + "downton": 42668, + "rightly": 42669, + "init": 42670, + "agave": 42671, + "oblast": 42672, + "northeast": 42673, + "friendship": 42674, + "dala": 42675, + "trophy": 42676, + "ðŁij½": 42677, + "magin": 42678, + "margaritas": 42679, + "ê·": 42680, + "wwfc": 42681, + "fash": 42682, + "dike": 42683, + "cud": 42684, + "chart": 42685, + "ðŁij®": 42686, + "refugees": 42687, + "joplin": 42688, + "ncs": 42689, + "impy": 42690, + "firmware": 42691, + "pascu": 42692, + "flamin": 42693, + "healthtech": 42694, + "bellletstalk": 42695, + "waka": 42696, + "olls": 42697, + "lago": 42698, + "cowan": 42699, + "bombardier": 42700, + "shome": 42701, + "ðŁĻħ": 42702, + "mcmaster": 42703, + "nave": 42704, + "wells": 42705, + "uta": 42706, + "tellers": 42707, + "misfits": 42708, + "kapil": 42709, + "faceoff": 42710, + "affirm": 42711, + "apro": 42712, + "whitepaper": 42713, + "superyacht": 42714, + "specimens": 42715, + "allocated": 42716, + "...,": 42717, + "-__": 42718, + "kaw": 42719, + "dachshund": 42720, + "djoker": 42721, + "swork": 42722, + "quiere": 42723, + "orum": 42724, + "ðŁIJł": 42725, + "somm": 42726, + "cmt": 42727, + "inghour": 42728, + "skinny": 42729, + "lgbti": 42730, + "giggles": 42731, + "breakaway": 42732, + "researched": 42733, + "parity": 42734, + "myal": 42735, + "msl": 42736, + "retained": 42737, + "sivity": 42738, + "makeinindia": 42739, + "solves": 42740, + "defamation": 42741, + "waltham": 42742, + "sriracha": 42743, + "roadway": 42744, + "conceptu": 42745, + "alin": 42746, + "iwant": 42747, + "åĪ": 42748, + "delft": 42749, + "tenderloin": 42750, + "gains": 42751, + "faults": 42752, + "swire": 42753, + "stellen": 42754, + "pollo": 42755, + "dyne": 42756, + "bornonthisday": 42757, + "asdfghj": 42758, + "sql": 42759, + "salim": 42760, + "advises": 42761, + "voip": 42762, + "ìĹijìĨ": 42763, + "untouched": 42764, + "sheil": 42765, + "ontario": 42766, + "uphill": 42767, + "sobre": 42768, + "deshi": 42769, + "novella": 42770, + "dutton": 42771, + "crawfish": 42772, + "اÙĨ": 42773, + "maa": 42774, + "twine": 42775, + "kalin": 42776, + "ðŁĩµðŁĩŃ": 42777, + "yess": 42778, + "brooks": 42779, + "hoosiers": 42780, + "tonka": 42781, + "umbrellas": 42782, + "ayers": 42783, + "ateam": 42784, + "acquiring": 42785, + "suction": 42786, + "än": 42787, + "wies": 42788, + "tarians": 42789, + "socio": 42790, + "mattb": 42791, + "shepherds": 42792, + "oso": 42793, + "charitytuesday": 42794, + "slogans": 42795, + "ninjas": 42796, + "albat": 42797, + "byte": 42798, + "bashir": 42799, + "trampoline": 42800, + "mydayinla": 42801, + "ija": 42802, + "basel": 42803, + "rory": 42804, + "goldie": 42805, + "firec": 42806, + "unnoticed": 42807, + "peculiar": 42808, + "scha": 42809, + "kerson": 42810, + "mourns": 42811, + "liquidity": 42812, + "quipment": 42813, + "hibs": 42814, + "ars": 42815, + "aeronau": 42816, + "slideshow": 42817, + "slabs": 42818, + "deliciousness": 42819, + "skitchen": 42820, + "htafc": 42821, + "fullerton": 42822, + "creighton": 42823, + "aerob": 42824, + "procrastination": 42825, + "azores": 42826, + "whitehall": 42827, + "ussoccer": 42828, + "mediation": 42829, + "djokernole": 42830, + "andme": 42831, + "umen": 42832, + "noxious": 42833, + "joss": 42834, + "ilife": 42835, + "annivers": 42836, + "sudanese": 42837, + "etres": 42838, + "undermine": 42839, + "wholefoods": 42840, + "disobe": 42841, + "kori": 42842, + "adele": 42843, + "eliz": 42844, + "canti": 42845, + "alon": 42846, + "gymnasium": 42847, + "sarkodie": 42848, + "meteorologist": 42849, + "ylde": 42850, + "steen": 42851, + "stampcollecting": 42852, + "nasal": 42853, + "lott": 42854, + "franks": 42855, + "exol": 42856, + "acki": 42857, + "goodyear": 42858, + "animalrights": 42859, + "yles": 42860, + "violets": 42861, + "mmes": 42862, + "sthel": 42863, + "rapping": 42864, + "tuscan": 42865, + "waiver": 42866, + "turner": 42867, + "eatlocal": 42868, + "northeasthour": 42869, + "animations": 42870, + "tommorow": 42871, + "tsh": 42872, + "ffame": 42873, + "brae": 42874, + "petron": 42875, + "glamour": 42876, + "bryn": 42877, + "dcs": 42878, + "bales": 42879, + "ðŁĶ¶": 42880, + "brov": 42881, + "brev": 42882, + "bons": 42883, + "physique": 42884, + "carne": 42885, + "xe": 42886, + "elixir": 42887, + "volved": 42888, + "loma": 42889, + "ìľł": 42890, + "æĺ": 42891, + "vanu": 42892, + "rigs": 42893, + "balance": 42894, + "vares": 42895, + "bonita": 42896, + "sprinkle": 42897, + "perfecto": 42898, + "dion": 42899, + "leak": 42900, + "calcutta": 42901, + "oba": 42902, + "dma": 42903, + "cmon": 42904, + "tuner": 42905, + "pneumonia": 42906, + "bogus": 42907, + "apologe": 42908, + "clough": 42909, + "borne": 42910, + "))))": 42911, + "revived": 42912, + "ovarian": 42913, + "nerf": 42914, + "clegg": 42915, + "fanfest": 42916, + "chou": 42917, + "realizes": 42918, + "mcn": 42919, + "ligu": 42920, + "legalize": 42921, + "justsaying": 42922, + "forster": 42923, + "bosni": 42924, + "khi": 42925, + "indom": 42926, + "heidel": 42927, + "encryp": 42928, + "siss": 42929, + "eddi": 42930, + "marbles": 42931, + "brisbane": 42932, + "ying": 42933, + "prepaid": 42934, + "walsall": 42935, + "cooperate": 42936, + "orchestr": 42937, + "marisa": 42938, + "howie": 42939, + "chewy": 42940, + "brenner": 42941, + "andromeda": 42942, + "egan": 42943, + "stocki": 42944, + "cavendish": 42945, + "agan": 42946, + "bano": 42947, + "deir": 42948, + "gog": 42949, + "blk": 42950, + "rethinking": 42951, + "chig": 42952, + "rheu": 42953, + "snip": 42954, + "peng": 42955, + "seminole": 42956, + "mswx": 42957, + "annex": 42958, + "lynda": 42959, + "lewishamilton": 42960, + "cumul": 42961, + "tbl": 42962, + "dolphin": 42963, + "aguero": 42964, + "............": 42965, + "prelude": 42966, + "atour": 42967, + "granger": 42968, + "tooting": 42969, + "rotun": 42970, + "disar": 42971, + "homeitems": 42972, + "dares": 42973, + "********": 42974, + "ðŁijĨ": 42975, + "compreh": 42976, + "jinx": 42977, + "aswell": 42978, + "irie": 42979, + "circulating": 42980, + "ðŁIJ¥": 42981, + "overboard": 42982, + "cultivate": 42983, + "rhett": 42984, + "orienteering": 42985, + "cak": 42986, + "balkans": 42987, + "sitt": 42988, + "jasmin": 42989, + "britneyspears": 42990, + "rotor": 42991, + "sealing": 42992, + "gbc": 42993, + "occi": 42994, + "fas": 42995, + "emancip": 42996, + "comer": 42997, + "wartime": 42998, + "tickle": 42999, + "sonny": 43000, + "paces": 43001, + "logg": 43002, + "atrix": 43003, + "srp": 43004, + "gwin": 43005, + "dobbs": 43006, + "uzbe": 43007, + "thewanted": 43008, + "drush": 43009, + "extru": 43010, + "micky": 43011, + "honorees": 43012, + "darwin": 43013, + "redux": 43014, + "mmj": 43015, + "rami": 43016, + "jalapeño": 43017, + "ioc": 43018, + "dover": 43019, + "juju": 43020, + "whitney": 43021, + "seng": 43022, + "enly": 43023, + "auch": 43024, + "archipelago": 43025, + "vigilant": 43026, + "mangal": 43027, + "wildest": 43028, + "paranoid": 43029, + "hali": 43030, + "bbly": 43031, + "sanctioned": 43032, + "realms": 43033, + "conco": 43034, + "uddin": 43035, + "csk": 43036, + "playtime": 43037, + "libra": 43038, + "savag": 43039, + "octane": 43040, + "rectan": 43041, + "return": 43042, + "parrish": 43043, + "morrha": 43044, + "ccp": 43045, + "cmu": 43046, + "sailed": 43047, + "sevent": 43048, + "rosie": 43049, + "piling": 43050, + "hew": 43051, + "boarded": 43052, + "segments": 43053, + "nephro": 43054, + "(.": 43055, + "crats": 43056, + "bakes": 43057, + "ðŁį¸": 43058, + "backtothe": 43059, + "sibling": 43060, + "kirkland": 43061, + "keo": 43062, + "guwa": 43063, + "breads": 43064, + "ðŁĺľðŁĺľ": 43065, + "tq": 43066, + "harassed": 43067, + "gau": 43068, + "wilbur": 43069, + "jisoo": 43070, + "eper": 43071, + "lisam": 43072, + "trippin": 43073, + "shino": 43074, + "rukh": 43075, + "beastmode": 43076, + "choa": 43077, + "instaweather": 43078, + "richland": 43079, + "gari": 43080, + "fez": 43081, + "cowboysnation": 43082, + "fursuit": 43083, + "krun": 43084, + "aen": 43085, + "sycamore": 43086, + "segun": 43087, + "entennial": 43088, + "dih": 43089, + "oax": 43090, + "demsinphilly": 43091, + "ðŁĻĢ": 43092, + "snhl": 43093, + "pennies": 43094, + "passwords": 43095, + "makin": 43096, + "tye": 43097, + "deng": 43098, + "knigh": 43099, + "jeeplife": 43100, + "helpline": 43101, + "afor": 43102, + "zzzz": 43103, + "steamy": 43104, + "picker": 43105, + "iterate": 43106, + "happeningnow": 43107, + "kib": 43108, + "bloomberg": 43109, + "martyrdom": 43110, + "bully": 43111, + "assortment": 43112, + "ahora": 43113, + "zoe": 43114, + "noi": 43115, + "illustri": 43116, + "agarwal": 43117, + "psc": 43118, + "electronica": 43119, + "recruiter": 43120, + "gardiner": 43121, + "radha": 43122, + "nafta": 43123, + "dotnet": 43124, + "piero": 43125, + "georg": 43126, + "bels": 43127, + "ðŁĺĤðŁĺį": 43128, + "tuberculosis": 43129, + "runnin": 43130, + "moris": 43131, + "hauling": 43132, + "evoc": 43133, + "brethren": 43134, + "shair": 43135, + "frameworks": 43136, + "astu": 43137, + "rigid": 43138, + "kuma": 43139, + "kreme": 43140, + "jinnah": 43141, + "insurers": 43142, + "nyu": 43143, + "fere": 43144, + "nollywood": 43145, + "goodvibes": 43146, + "-...": 43147, + "toile": 43148, + "skril": 43149, + "instaweatherpro": 43150, + "czech": 43151, + "pavel": 43152, + "onepiece": 43153, + "nikeplus": 43154, + "filet": 43155, + "cavity": 43156, + "ðŁı½âĢįâĻĤï¸ı": 43157, + "ðŁİ£": 43158, + "drastic": 43159, + "dailys": 43160, + "siamese": 43161, + "rebu": 43162, + "osteo": 43163, + "lark": 43164, + "fre": 43165, + "shelling": 43166, + "pé": 43167, + "gladys": 43168, + "ðŁıĢðŁıĢ": 43169, + "gustave": 43170, + "submerged": 43171, + "grandstand": 43172, + "attu": 43173, + "wont": 43174, + "fpv": 43175, + "bley": 43176, + "joni": 43177, + "angames": 43178, + "weighted": 43179, + "alou": 43180, + "श": 43181, + "lesbians": 43182, + "fj": 43183, + "annies": 43184, + "aml": 43185, + "doria": 43186, + "davin": 43187, + "beta": 43188, + "canc": 43189, + "madewithunity": 43190, + "haj": 43191, + "badlands": 43192, + "mul": 43193, + "bluec": 43194, + "pawn": 43195, + "covington": 43196, + "neurology": 43197, + "httweets": 43198, + "dyslexia": 43199, + "thelove": 43200, + "neat": 43201, + "forklift": 43202, + "automate": 43203, + "uneven": 43204, + "montess": 43205, + "hein": 43206, + "hag": 43207, + "relics": 43208, + "competitiveness": 43209, + "canelo": 43210, + "martens": 43211, + "bulletproof": 43212, + "skittles": 43213, + "gya": 43214, + "primo": 43215, + "americafirst": 43216, + "wooo": 43217, + "abortions": 43218, + "??!!": 43219, + "mache": 43220, + "lders": 43221, + "rlly": 43222, + "prelims": 43223, + "direct": 43224, + "course": 43225, + "swain": 43226, + "supercell": 43227, + "eccentric": 43228, + "stingray": 43229, + "plets": 43230, + "wilcox": 43231, + "westin": 43232, + "okanagan": 43233, + "kiran": 43234, + "carbo": 43235, + "bombings": 43236, + "rarest": 43237, + "boh": 43238, + "gawd": 43239, + "digg": 43240, + "moana": 43241, + "entirety": 43242, + "enclosed": 43243, + "dodgeball": 43244, + "parton": 43245, + "milkyway": 43246, + "atr": 43247, + "thoroughbred": 43248, + "really": 43249, + "qantas": 43250, + "epiphany": 43251, + "inee": 43252, + "aerosmith": 43253, + "spieth": 43254, + "arthro": 43255, + "ellini": 43256, + "dubu": 43257, + "braving": 43258, + "âļ½âļ½": 43259, + "restructuring": 43260, + "illuminate": 43261, + "equili": 43262, + "mpi": 43263, + "ashton": 43264, + "ponytail": 43265, + "mascots": 43266, + "flattering": 43267, + "crum": 43268, + "asta": 43269, + "à®°": 43270, + "strangerthings": 43271, + "barnab": 43272, + "رÙĬ": 43273, + "makeshift": 43274, + "gotcha": 43275, + "willam": 43276, + "choirs": 43277, + "kilometres": 43278, + "ghosh": 43279, + "euthan": 43280, + "dolly": 43281, + "unning": 43282, + "thear": 43283, + "crewe": 43284, + "wsw": 43285, + "jace": 43286, + "dismiss": 43287, + "kean": 43288, + "hota": 43289, + "khat": 43290, + "~>": 43291, + "thiru": 43292, + "rendez": 43293, + "hartman": 43294, + "teessi": 43295, + "casca": 43296, + "zah": 43297, + "hydrange": 43298, + "fod": 43299, + "awp": 43300, + "mzansi": 43301, + "thicker": 43302, + "nagoya": 43303, + "neva": 43304, + "stique": 43305, + "castel": 43306, + "damian": 43307, + "thereby": 43308, + "jiang": 43309, + "alek": 43310, + "musicislife": 43311, + "raq": 43312, + "callahan": 43313, + "gouache": 43314, + "somaliland": 43315, + "seanhannity": 43316, + "raheem": 43317, + "lose": 43318, + "elove": 43319, + "wharton": 43320, + "rectangular": 43321, + "illustrating": 43322, + "harne": 43323, + "autisma": 43324, + "scrapped": 43325, + "elland": 43326, + "decree": 43327, + "nagpur": 43328, + "kipp": 43329, + "sore": 43330, + "nmd": 43331, + "maas": 43332, + "guna": 43333, + "gartner": 43334, + "belli": 43335, + "thenight": 43336, + "jeon": 43337, + "genderequality": 43338, + "giver": 43339, + "ael": 43340, + "garments": 43341, + "neu": 43342, + "mardigras": 43343, + "marsden": 43344, + "rower": 43345, + "polluted": 43346, + "cameraman": 43347, + "vinod": 43348, + "beasley": 43349, + "croc": 43350, + "jiu": 43351, + "hollyoaks": 43352, + "anesthesia": 43353, + "alles": 43354, + "steward": 43355, + "latimes": 43356, + "ðŁĩºðŁĩ¸ðŁĩºðŁĩ¸ðŁĩºðŁĩ¸": 43357, + "tician": 43358, + "goria": 43359, + "comedic": 43360, + "ð٤Ķð٤Ķð٤Ķ": 43361, + "naive": 43362, + "slions": 43363, + "łĪ": 43364, + "burglar": 43365, + "ðŁĺŃðŁĺŃðŁĺŃðŁĺŃðŁĺŃ": 43366, + "yorkshi": 43367, + "señ": 43368, + "fanboy": 43369, + "laurel": 43370, + "incidence": 43371, + "potomac": 43372, + "roberta": 43373, + "presiden": 43374, + "pryor": 43375, + "osbourne": 43376, + "wku": 43377, + "teme": 43378, + "palae": 43379, + "ðŁ¥º": 43380, + "reboun": 43381, + "itude": 43382, + "reddish": 43383, + "khand": 43384, + "colonialism": 43385, + "northcarolina": 43386, + "ðĿĴ": 43387, + "mannequin": 43388, + "ladybird": 43389, + "tasty": 43390, + "knowledgeable": 43391, + "gshore": 43392, + "ðŁĮĮ": 43393, + "ன": 43394, + "quaker": 43395, + "salzburg": 43396, + "medalists": 43397, + "chyna": 43398, + "bridesmaid": 43399, + "maori": 43400, + "rop": 43401, + "outraged": 43402, + "inadequate": 43403, + "truckers": 43404, + "alana": 43405, + "ìĿ¼": 43406, + "rix": 43407, + "oooooooo": 43408, + "commandments": 43409, + "lambeth": 43410, + "aaj": 43411, + "ecofriendly": 43412, + "blaz": 43413, + "morecambe": 43414, + "bouncy": 43415, + "roux": 43416, + "raided": 43417, + "mized": 43418, + "shc": 43419, + "gawx": 43420, + "laboratories": 43421, + "rubs": 43422, + "restroom": 43423, + "consultations": 43424, + "cajun": 43425, + "virgini": 43426, + "soir": 43427, + "revue": 43428, + "plein": 43429, + "wager": 43430, + "ç¹": 43431, + "wedo": 43432, + "growingup": 43433, + "!ðŁĺĬ": 43434, + "faceted": 43435, + "sinners": 43436, + "hovering": 43437, + "tiene": 43438, + "seasoning": 43439, + "anja": 43440, + "leggo": 43441, + "ilis": 43442, + "flax": 43443, + "devo": 43444, + "ashram": 43445, + "matisse": 43446, + "keri": 43447, + "gower": 43448, + "botox": 43449, + "marshes": 43450, + "unhcr": 43451, + "tsm": 43452, + "optimus": 43453, + "duni": 43454, + "stuffs": 43455, + "sok": 43456, + "orderly": 43457, + "nbad": 43458, + "islamophobia": 43459, + "ravioli": 43460, + "faber": 43461, + "creds": 43462, + "wonka": 43463, + "infusion": 43464, + "overweight": 43465, + "dailynews": 43466, + "assimil": 43467, + "acollege": 43468, + "medallion": 43469, + "kilimanjaro": 43470, + "stiff": 43471, + "thames": 43472, + "sunken": 43473, + "thard": 43474, + "mydubai": 43475, + "hilariously": 43476, + "hannel": 43477, + "plumber": 43478, + "fairview": 43479, + "separating": 43480, + "rascal": 43481, + "quien": 43482, + "necessities": 43483, + "confederation": 43484, + "llll": 43485, + ":]": 43486, + "weaknesses": 43487, + "bronco": 43488, + "raffles": 43489, + "elot": 43490, + "ãĤ¸ãĥ": 43491, + "adventcalendar": 43492, + "ðŁİ¹": 43493, + "stravel": 43494, + "tunic": 43495, + "ksu": 43496, + "impeach": 43497, + "espionage": 43498, + "!-": 43499, + "diment": 43500, + "currant": 43501, + "biode": 43502, + "commuting": 43503, + "byron": 43504, + "ðŁĴĵðŁĴĵ": 43505, + "shaded": 43506, + "truro": 43507, + "crayons": 43508, + "arne": 43509, + "hsc": 43510, + "freaked": 43511, + "dramati": 43512, + "fleek": 43513, + "ucd": 43514, + "marlborough": 43515, + "^-": 43516, + "crossings": 43517, + "malo": 43518, + "blackops": 43519, + "binance": 43520, + "choked": 43521, + "cheney": 43522, + "plo": 43523, + "gestures": 43524, + "valedic": 43525, + "ryanair": 43526, + "remington": 43527, + "vcs": 43528, + "mckee": 43529, + "ecz": 43530, + "begs": 43531, + "nailart": 43532, + "mayorof": 43533, + "happyfathersday": 43534, + "wart": 43535, + "petitions": 43536, + "ningly": 43537, + "cleanenergy": 43538, + "brox": 43539, + "slalom": 43540, + "existent": 43541, + "abay": 43542, + "ugliest": 43543, + "tomp": 43544, + "stoma": 43545, + "selby": 43546, + "goalscorer": 43547, + "benji": 43548, + "overwhelmingly": 43549, + "lans": 43550, + "semiconductor": 43551, + "southkorea": 43552, + "rescheduled": 43553, + "skyl": 43554, + "enlisted": 43555, + "dowski": 43556, + "sidel": 43557, + "rosenberg": 43558, + "nasser": 43559, + "whitehead": 43560, + "prius": 43561, + "harare": 43562, + "enn": 43563, + "ryder": 43564, + "íĤ": 43565, + "mong": 43566, + "clasico": 43567, + "transporter": 43568, + "potty": 43569, + "isme": 43570, + "*****": 43571, + "vice": 43572, + "skit": 43573, + "odessa": 43574, + "lmp": 43575, + "hern": 43576, + "racially": 43577, + "pinoy": 43578, + "paraguay": 43579, + "obituary": 43580, + "goes": 43581, + "bucha": 43582, + "sidewalks": 43583, + "angular": 43584, + "unconstitutional": 43585, + "transitioning": 43586, + "ibu": 43587, + "guys": 43588, + "unpacking": 43589, + "oooooo": 43590, + "blackgirl": 43591, + "bergs": 43592, + "¯": 43593, + "wordoftheday": 43594, + "trumptrain": 43595, + "thunderbolt": 43596, + "msi": 43597, + "fascists": 43598, + "ब": 43599, + "tsk": 43600, + "collapses": 43601, + "rajesh": 43602, + "loveislove": 43603, + "migrating": 43604, + "setback": 43605, + "ðŁĺĬâĿ¤ï¸ı": 43606, + "tels": 43607, + "safetyfirst": 43608, + "narrated": 43609, + "jaejoong": 43610, + "unanswered": 43611, + "liqueur": 43612, + "ennes": 43613, + "dalgo": 43614, + "billings": 43615, + "saltwater": 43616, + "mermaids": 43617, + "longs": 43618, + "clapham": 43619, + "wearec": 43620, + "piccollage": 43621, + "nach": 43622, + "hace": 43623, + "poisoned": 43624, + "loth": 43625, + "agna": 43626, + "adelrey": 43627, + "guardia": 43628, + "polishing": 43629, + "peacekeeping": 43630, + "dall": 43631, + "pisa": 43632, + "lapland": 43633, + "processors": 43634, + "deandre": 43635, + "sobs": 43636, + "ponce": 43637, + "drains": 43638, + "cbe": 43639, + "ðŁİ¥:": 43640, + "splash": 43641, + "meatball": 43642, + "fontana": 43643, + "worcestershirehour": 43644, + "nev": 43645, + "brisk": 43646, + "bint": 43647, + "acr": 43648, + "pox": 43649, + "cayenne": 43650, + "skrillex": 43651, + "jfc": 43652, + "hahahahahahaha": 43653, + "glas": 43654, + "engul": 43655, + "temporal": 43656, + "onized": 43657, + "concre": 43658, + "compose": 43659, + "vibrations": 43660, + "planters": 43661, + "fert": 43662, + "criticalrolefanart": 43663, + "tbli": 43664, + "schallenge": 43665, + "huckabee": 43666, + "municipal": 43667, + "iambic": 43668, + "radios": 43669, + "nevis": 43670, + "durability": 43671, + "mccla": 43672, + "horseback": 43673, + "institutes": 43674, + "fulfill": 43675, + "attach": 43676, + "ateur": 43677, + "akan": 43678, + "resisting": 43679, + "illumination": 43680, + "handle": 43681, + "haircare": 43682, + "oment": 43683, + "macleod": 43684, + "kaiser": 43685, + "gno": 43686, + "beardown": 43687, + "lyf": 43688, + "glomer": 43689, + "distortion": 43690, + "zm": 43691, + "sank": 43692, + "roosters": 43693, + "isnow": 43694, + "asports": 43695, + "agen": 43696, + "woken": 43697, + "stgeorge": 43698, + "romper": 43699, + "myle": 43700, + "economists": 43701, + "ruto": 43702, + "twill": 43703, + "healthand": 43704, + "dito": 43705, + "wsl": 43706, + "tairp": 43707, + "prakash": 43708, + "micheal": 43709, + "hts": 43710, + "wrights": 43711, + "katsu": 43712, + "fiorentina": 43713, + "defenseman": 43714, + "ditch": 43715, + "varsity": 43716, + "texanscheer": 43717, + "baham": 43718, + "scanned": 43719, + "weil": 43720, + "seductive": 43721, + "ðŁijįðŁı½": 43722, + "fue": 43723, + "erwin": 43724, + "davison": 43725, + "terran": 43726, + "moods": 43727, + "woolf": 43728, + "resource": 43729, + "@.": 43730, + "cush": 43731, + "ðŁį°": 43732, + "regression": 43733, + "curled": 43734, + "lazer": 43735, + "joanne": 43736, + "abbott": 43737, + "moz": 43738, + "downers": 43739, + "mmmmmm": 43740, + "valentina": 43741, + "khair": 43742, + "dreamt": 43743, + "crook": 43744, + "chek": 43745, + "steaming": 43746, + "nephews": 43747, + "cleric": 43748, + "asober": 43749, + "indefinitely": 43750, + "wye": 43751, + "usnews": 43752, + "joyce": 43753, + "flushing": 43754, + "wynonnaearp": 43755, + "rondo": 43756, + "kiss": 43757, + "hotdog": 43758, + "barns": 43759, + "saxophon": 43760, + "farley": 43761, + "gasp": 43762, + "decreasing": 43763, + "alway": 43764, + "pex": 43765, + "lsd": 43766, + "shift": 43767, + "poutine": 43768, + "razz": 43769, + "rescuing": 43770, + "niko": 43771, + "hoch": 43772, + "ccl": 43773, + "uaap": 43774, + "nts": 43775, + "mcar": 43776, + "ilwx": 43777, + "conquering": 43778, + "kettering": 43779, + "sturdy": 43780, + "delaying": 43781, + "stok": 43782, + "vanished": 43783, + "cathar": 43784, + "bingham": 43785, + "inv": 43786, + "ichiro": 43787, + "hemo": 43788, + "budgeting": 43789, + "[...]": 43790, + "bess": 43791, + "sebastian": 43792, + "slowed": 43793, + "ðĿij": 43794, + "muslim": 43795, + "stuns": 43796, + "actonclimate": 43797, + "vea": 43798, + "seton": 43799, + "rosetta": 43800, + "ount": 43801, + "hardin": 43802, + "fluid": 43803, + "caw": 43804, + "ðŁ¥Ĥ": 43805, + "yacht": 43806, + "unl": 43807, + "sphy": 43808, + "provocative": 43809, + "oric": 43810, + "isback": 43811, + "___": 43812, + "nicolas": 43813, + "gyan": 43814, + "loose": 43815, + "flin": 43816, + "rebate": 43817, + ":::": 43818, + "!\"@": 43819, + "comicon": 43820, + "sheff": 43821, + "downstream": 43822, + "chichester": 43823, + "beachlife": 43824, + "momlife": 43825, + "diabete": 43826, + "arra": 43827, + "vane": 43828, + "oku": 43829, + "yeo": 43830, + "mango": 43831, + "tryout": 43832, + "appell": 43833, + "heirs": 43834, + "arjuna": 43835, + "ddu": 43836, + "naveen": 43837, + "movic": 43838, + "socialists": 43839, + "sback": 43840, + "criterion": 43841, + "soyuz": 43842, + "kher": 43843, + "daz": 43844, + "yolanda": 43845, + "wineoclock": 43846, + "reina": 43847, + "onew": 43848, + "leonard": 43849, + "endez": 43850, + "ubs": 43851, + "supportlocal": 43852, + "facilitated": 43853, + "caramelized": 43854, + "bpa": 43855, + "vuelta": 43856, + "mytho": 43857, + "mami": 43858, + "speare": 43859, + "nbaplayoffs": 43860, + "fevre": 43861, + "nickjonas": 43862, + "imprint": 43863, + "cso": 43864, + "craigslist": 43865, + "lasalle": 43866, + "gideon": 43867, + "hadoop": 43868, + "disregard": 43869, + "wud": 43870, + "tuc": 43871, + "magee": 43872, + "acoustics": 43873, + "taa": 43874, + "quie": 43875, + "pola": 43876, + "crt": 43877, + "dwyer": 43878, + "dissec": 43879, + "capitol": 43880, + "mention": 43881, + "knoll": 43882, + "heigh": 43883, + "finders": 43884, + "placements": 43885, + "lse": 43886, + "indira": 43887, + "guri": 43888, + "madhuridixit": 43889, + "kingdoms": 43890, + "iambicpent": 43891, + "georgina": 43892, + "jeky": 43893, + "conflicting": 43894, + "bayan": 43895, + "agatha": 43896, + "uphold": 43897, + "dron": 43898, + "vicar": 43899, + "expat": 43900, + "peripheral": 43901, + "pessi": 43902, + "faf": 43903, + "ancestor": 43904, + "?..": 43905, + "widget": 43906, + "punc": 43907, + "commenced": 43908, + "beavs": 43909, + "airwaves": 43910, + "addis": 43911, + "poa": 43912, + "desses": 43913, + "coden": 43914, + "vue": 43915, + "rupee": 43916, + "karin": 43917, + "spock": 43918, + "msy": 43919, + "ะ": 43920, + "prick": 43921, + "fillmore": 43922, + "tification": 43923, + "thingsto": 43924, + "sarde": 43925, + "emile": 43926, + "pereira": 43927, + "nad": 43928, + "brightening": 43929, + "arresting": 43930, + "woking": 43931, + "uscg": 43932, + "spill": 43933, + "raspberrypi": 43934, + "hugo": 43935, + "itec": 43936, + "isma": 43937, + "cufflinks": 43938, + "optimized": 43939, + "occ": 43940, + "miwx": 43941, + "enka": 43942, + "elited": 43943, + "affordable": 43944, + "sakh": 43945, + "coronado": 43946, + "hoh": 43947, + "atul": 43948, + "aioli": 43949, + "jimcantore": 43950, + "accounted": 43951, + "vinay": 43952, + "hermit": 43953, + "grooves": 43954, + "ranch": 43955, + "rilla": 43956, + "wetter": 43957, + "outof": 43958, + "veterin": 43959, + "nikov": 43960, + "kian": 43961, + "fairbanks": 43962, + "ramapho": 43963, + "niti": 43964, + "kko": 43965, + "rusty": 43966, + "nestle": 43967, + "tvxq": 43968, + "shaheer": 43969, + "âĿ¤âĿ¤âĿ¤âĿ¤": 43970, + "pennant": 43971, + "gemstones": 43972, + "demdebate": 43973, + "ðŁIJĬ": 43974, + "autonews": 43975, + "supportindiefilm": 43976, + "macho": 43977, + "vex": 43978, + "newsat": 43979, + "neti": 43980, + "concessions": 43981, + "candied": 43982, + "yofthe": 43983, + "macau": 43984, + "dends": 43985, + "cricketers": 43986, + "saniti": 43987, + "mariano": 43988, + "ghat": 43989, + "artoftheday": 43990, + "¡ľ": 43991, + "egos": 43992, + "genoa": 43993, + "chatbots": 43994, + "brier": 43995, + "allabout": 43996, + "monty": 43997, + "spied": 43998, + "rtr": 43999, + "comfort": 44000, + "snippets": 44001, + "realtime": 44002, + "grain": 44003, + "examined": 44004, + "enlightening": 44005, + "ttu": 44006, + "godbless": 44007, + "releasethe": 44008, + "singular": 44009, + "kians": 44010, + "haka": 44011, + "sorren": 44012, + "defect": 44013, + "marg": 44014, + "equities": 44015, + "dorian": 44016, + "suka": 44017, + "perl": 44018, + "aishwarya": 44019, + "pullover": 44020, + "precision": 44021, + "fairway": 44022, + "neve": 44023, + "riveting": 44024, + "villanova": 44025, + "encom": 44026, + "ako": 44027, + "passionately": 44028, + "europaleague": 44029, + "siempre": 44030, + "xvi": 44031, + "enlightened": 44032, + "cfr": 44033, + "âĺħâĺħâĺħâĺħ": 44034, + "wasteland": 44035, + "isf": 44036, + "newcomers": 44037, + "emergency": 44038, + "amphitheatre": 44039, + "-.": 44040, + "textbooks": 44041, + "figurative": 44042, + "tremb": 44043, + "pesc": 44044, + "abhin": 44045, + "abbot": 44046, + "acacia": 44047, + "hards": 44048, + "porsche": 44049, + "kauai": 44050, + "elisa": 44051, + "carrick": 44052, + "abou": 44053, + "ellier": 44054, + "bech": 44055, + "neutron": 44056, + "galapagos": 44057, + "ruben": 44058, + "innis": 44059, + "howto": 44060, + "nuns": 44061, + "sabine": 44062, + "iac": 44063, + "clinched": 44064, + "notori": 44065, + "fives": 44066, + "cairngor": 44067, + "peri": 44068, + "grc": 44069, + "ðŁĴ¯ðŁĴ¯": 44070, + "malm": 44071, + "twelfth": 44072, + "diff": 44073, + "routines": 44074, + "martyn": 44075, + "linden": 44076, + "synthesizer": 44077, + "number": 44078, + "gamecube": 44079, + "falkirk": 44080, + "byzantine": 44081, + "queuing": 44082, + "grill": 44083, + "scalable": 44084, + "charred": 44085, + "routing": 44086, + "herbali": 44087, + "grizz": 44088, + "ðŁĺŃðŁĺŃðŁĺŃ": 44089, + "toll": 44090, + "terminals": 44091, + "lpc": 44092, + "abd": 44093, + "warmups": 44094, + "removable": 44095, + "¯\\": 44096, + "vigo": 44097, + "papaya": 44098, + "neve": 44099, + "lovingly": 44100, + "jokers": 44101, + "ibles": 44102, + "ssett": 44103, + "potenti": 44104, + "pele": 44105, + "gigi": 44106, + "sadiq": 44107, + "legacy": 44108, + "sono": 44109, + "rupees": 44110, + "retarded": 44111, + "elee": 44112, + "parr": 44113, + "fiance": 44114, + "eyre": 44115, + "sayers": 44116, + "pendants": 44117, + "maknae": 44118, + "albans": 44119, + "adapting": 44120, + "pff": 44121, + "puberty": 44122, + "jiu": 44123, + "ingrad": 44124, + "hypocrite": 44125, + "diplomats": 44126, + "physical": 44127, + "robby": 44128, + "bonsai": 44129, + "ãģ·": 44130, + "fatt": 44131, + "catalunya": 44132, + "âľĸï¸ı": 44133, + "roma": 44134, + "moreland": 44135, + "soe": 44136, + "conversions": 44137, + "stlblues": 44138, + "sholm": 44139, + "grassy": 44140, + "prado": 44141, + "onu": 44142, + "assaulting": 44143, + ">_": 44144, + "settes": 44145, + "disgraceful": 44146, + "aphra": 44147, + "âļ½ï¸ıâļ½ï¸ı": 44148, + "प": 44149, + "kiln": 44150, + "goaltender": 44151, + "sru": 44152, + "philanthropist": 44153, + "bals": 44154, + "thn": 44155, + "studen": 44156, + "sandoval": 44157, + "dogrescue": 44158, + "elions": 44159, + "assessed": 44160, + "largo": 44161, + "hectares": 44162, + "shrm": 44163, + "saif": 44164, + "cleavage": 44165, + "noches": 44166, + "nene": 44167, + "fatalities": 44168, + "curing": 44169, + "cleanser": 44170, + "ales": 44171, + "pvp": 44172, + "southbank": 44173, + "pizzeria": 44174, + "marshals": 44175, + "knife": 44176, + "andover": 44177, + "tblightning": 44178, + "srsly": 44179, + "oute": 44180, + "digimon": 44181, + "timesofindia": 44182, + "promethe": 44183, + "lebo": 44184, + "fsu": 44185, + "witz": 44186, + "revere": 44187, + "manas": 44188, + "mamba": 44189, + "chica": 44190, + "guan": 44191, + "exhibitor": 44192, + "csrracing": 44193, + "dere": 44194, + "xxxxx": 44195, + "gusta": 44196, + "storytime": 44197, + "stoney": 44198, + "organics": 44199, + "andu": 44200, + "seam": 44201, + "minogue": 44202, + "anushkasharma": 44203, + "aba": 44204, + "ðŁİĻï¸ı": 44205, + "ugandan": 44206, + "chromatic": 44207, + "assn": 44208, + "documentaries": 44209, + "sht": 44210, + "rupaul": 44211, + "loyd": 44212, + "kats": 44213, + "eus": 44214, + "itech": 44215, + "medusa": 44216, + "panty": 44217, + "kellogg": 44218, + "etto": 44219, + "tallade": 44220, + "shaa": 44221, + "dost": 44222, + "pms": 44223, + "mariana": 44224, + "jester": 44225, + "crooks": 44226, + "ðŁĶ¬": 44227, + "mindanao": 44228, + "indhoven": 44229, + "ðŁ¤ª": 44230, + "lexi": 44231, + "tvn": 44232, + "janis": 44233, + "cote": 44234, + "ãģĨ": 44235, + "serrano": 44236, + "iwm": 44237, + "ðŁIJ¬": 44238, + "kke": 44239, + "distributors": 44240, + "capu": 44241, + "counterfeit": 44242, + "campsite": 44243, + "aggie": 44244, + "ðŁĺ¼": 44245, + "chhattisgarh": 44246, + "~@": 44247, + "stateu": 44248, + "sandi": 44249, + "preventable": 44250, + "cls": 44251, + "canne": 44252, + "mmc": 44253, + "iver": 44254, + "saharan": 44255, + "palis": 44256, + "nightout": 44257, + "dos": 44258, + "apia": 44259, + "abscbn": 44260, + "managerial": 44261, + "arose": 44262, + "mowx": 44263, + "arosa": 44264, + "ðŁĮ³": 44265, + "underdog": 44266, + "remover": 44267, + "astronomers": 44268, + "lentils": 44269, + "suscep": 44270, + "smoother": 44271, + "pendleton": 44272, + "faucet": 44273, + "emory": 44274, + "dalmati": 44275, + "afcb": 44276, + "ticus": 44277, + "exempt": 44278, + "enrol": 44279, + "dheim": 44280, + "ðŁIJº": 44281, + "restriction": 44282, + "starfish": 44283, + "stow": 44284, + "snorkel": 44285, + "thunderbirds": 44286, + "shead": 44287, + "homosexual": 44288, + "dyn": 44289, + "asli": 44290, + "andretti": 44291, + "douche": 44292, + "domo": 44293, + "tarmac": 44294, + "slumber": 44295, + "pronto": 44296, + "firstdayof": 44297, + "miniature": 44298, + "mariachi": 44299, + "argus": 44300, + "recommending": 44301, + "mobiles": 44302, + "ince": 44303, + "illustrious": 44304, + "orc": 44305, + "adverts": 44306, + "grits": 44307, + "weasel": 44308, + "pagoda": 44309, + "overpass": 44310, + "greys": 44311, + "maximus": 44312, + "armagh": 44313, + "woodland": 44314, + "sunni": 44315, + "ðŁĴī": 44316, + "ëĿ": 44317, + "tione": 44318, + "socio": 44319, + "hos": 44320, + "ð٤Ĺð٤Ĺ": 44321, + "windsor": 44322, + "subsequent": 44323, + "munchies": 44324, + "idh": 44325, + "excluding": 44326, + "emi": 44327, + "cuth": 44328, + "zai": 44329, + "weekdays": 44330, + "lawsuits": 44331, + "barnard": 44332, + "ت": 44333, + "petting": 44334, + "netes": 44335, + "mulligan": 44336, + "pharmacists": 44337, + "raquel": 44338, + "eton": 44339, + "cranston": 44340, + "gilded": 44341, + "cleary": 44342, + "ceph": 44343, + "raa": 44344, + "pamper": 44345, + "lombardi": 44346, + "asin": 44347, + "sherry": 44348, + "prod": 44349, + "forte": 44350, + "arianism": 44351, + "buffalobills": 44352, + "æľ¬": 44353, + "ðŁĶ¥#": 44354, + "uuu": 44355, + "justices": 44356, + "carina": 44357, + "natin": 44358, + "maslow": 44359, + "drooling": 44360, + "cognac": 44361, + "camber": 44362, + "elong": 44363, + "rdr": 44364, + "inen": 44365, + "convictions": 44366, + "amuse": 44367, + "trock": 44368, + "harmless": 44369, + "visitation": 44370, + "genomic": 44371, + "bland": 44372, + "benoit": 44373, + "chimp": 44374, + "tuscaloosa": 44375, + "greasy": 44376, + "xpo": 44377, + "gilt": 44378, + "seq": 44379, + "permitted": 44380, + "christmaseve": 44381, + "books": 44382, + "mue": 44383, + "oldschool": 44384, + "humanright": 44385, + "beati": 44386, + "ðŁĶĿ": 44387, + "shat": 44388, + "sculpting": 44389, + "hwan": 44390, + "fernandes": 44391, + "sciutto": 44392, + "fuentes": 44393, + "endeavors": 44394, + "maidstone": 44395, + "unparalleled": 44396, + "shouted": 44397, + "queenof": 44398, + "merc": 44399, + "bandic": 44400, + "veda": 44401, + "selangor": 44402, + "pile": 44403, + "jahan": 44404, + "intimidating": 44405, + "disappears": 44406, + "clich": 44407, + "zaha": 44408, + "wurst": 44409, + "hiv": 44410, + "fodils": 44411, + "cordless": 44412, + "aaaaaa": 44413, + "hydra": 44414, + "belinda": 44415, + "eels": 44416, + "buf": 44417, + "sustaining": 44418, + "rugbyleague": 44419, + "noc": 44420, + "brigitte": 44421, + "(ðŁĵ¸:": 44422, + "trombone": 44423, + "soothe": 44424, + "smog": 44425, + "adp": 44426, + "stable": 44427, + "ingley": 44428, + "diagnose": 44429, + "msg": 44430, + "wess": 44431, + "ticketing": 44432, + "onee": 44433, + "nswpol": 44434, + "eup": 44435, + "autopsy": 44436, + "adityanath": 44437, + "sundown": 44438, + "riverfront": 44439, + "siya": 44440, + "pis": 44441, + "hierarchy": 44442, + "durango": 44443, + "dijk": 44444, + "renshaw": 44445, + "heaps": 44446, + "epidemi": 44447, + "davidbowie": 44448, + "internetof": 44449, + "ddi": 44450, + "nationality": 44451, + "mbar": 44452, + "airy": 44453, + "winder": 44454, + "walia": 44455, + "elliott": 44456, + "cx": 44457, + "bavarian": 44458, + "platt": 44459, + "antw": 44460, + "wiwx": 44461, + "softer": 44462, + "neha": 44463, + "heller": 44464, + "thand": 44465, + "daniela": 44466, + "boast": 44467, + "degradation": 44468, + "ðŁĴ¦ðŁĴ¦": 44469, + "transforming": 44470, + "mane": 44471, + "avut": 44472, + "ðŁĺĪðŁĺĪ": 44473, + "voter": 44474, + "thee": 44475, + "tate": 44476, + "puff": 44477, + "indoor": 44478, + "soproud": 44479, + "boyce": 44480, + "borisjohnson": 44481, + "waitin": 44482, + "immunology": 44483, + "ðŁıĨðŁıĨðŁıĨ": 44484, + "âĿĮ": 44485, + "streetfood": 44486, + "lizasober": 44487, + "cavalier": 44488, + "celia": 44489, + "needle": 44490, + "motoring": 44491, + "gato": 44492, + ",)": 44493, + "rade": 44494, + "harvest": 44495, + "tms": 44496, + "jarpad": 44497, + "oney": 44498, + "airmen": 44499, + "vre": 44500, + "impairment": 44501, + "abhishek": 44502, + "snoop": 44503, + "lant": 44504, + "famously": 44505, + "blou": 44506, + "sze": 44507, + "gander": 44508, + "untouch": 44509, + "tuf": 44510, + "deejay": 44511, + "collateral": 44512, + "bind": 44513, + "ðŁļ©": 44514, + "pinning": 44515, + "icn": 44516, + "';": 44517, + "theeconomist": 44518, + "ultram": 44519, + "worldwaterday": 44520, + "tipoff": 44521, + "thei": 44522, + "feeders": 44523, + "campaign": 44524, + "scumb": 44525, + "dayweekend": 44526, + "yom": 44527, + "pedic": 44528, + "hough": 44529, + "psv": 44530, + "plin": 44531, + "onde": 44532, + "bostonmarathon": 44533, + "azzy": 44534, + "*_*": 44535, + "conley": 44536, + "thiago": 44537, + "hooo": 44538, + "galerie": 44539, + "lucid": 44540, + "jett": 44541, + "glitz": 44542, + "finalfantasy": 44543, + "achievers": 44544, + "yung": 44545, + "peregrine": 44546, + "ophi": 44547, + "dames": 44548, + "biomar": 44549, + "âĺĢï¸ıâĺĢï¸ı": 44550, + "skc": 44551, + "lics": 44552, + "flank": 44553, + "arrahman": 44554, + "hoof": 44555, + "upholstery": 44556, + "tats": 44557, + "woz": 44558, + "¿": 44559, + "snoring": 44560, + "raer": 44561, + "lju": 44562, + "apd": 44563, + "plating": 44564, + "kanu": 44565, + "imation": 44566, + "fragrances": 44567, + "mra": 44568, + "moray": 44569, + "mott": 44570, + "immuni": 44571, + "hearties": 44572, + "bhopal": 44573, + "timers": 44574, + "gata": 44575, + "colorway": 44576, + "carnation": 44577, + "winget": 44578, + "sighs": 44579, + "sville": 44580, + "optimist": 44581, + "chateau": 44582, + "olympians": 44583, + "cio": 44584, + "singersongwriter": 44585, + "nyo": 44586, + "fibers": 44587, + "burch": 44588, + "agro": 44589, + "milne": 44590, + "igbo": 44591, + "cramer": 44592, + "ationals": 44593, + "danube": 44594, + "padma": 44595, + "normani": 44596, + "enforced": 44597, + "breck": 44598, + "boehner": 44599, + "arden": 44600, + "surrendered": 44601, + "prosthetic": 44602, + "oma": 44603, + "hailed": 44604, + "calculations": 44605, + "wfa": 44606, + "bib": 44607, + "fcblive": 44608, + "fonda": 44609, + "westcoast": 44610, + "quests": 44611, + "friendly": 44612, + "towie": 44613, + "fitch": 44614, + "balot": 44615, + "stardom": 44616, + "scratching": 44617, + "hosa": 44618, + "thika": 44619, + "oven": 44620, + "stroke": 44621, + "outpost": 44622, + "pharmaceuticals": 44623, + "hikari": 44624, + "muy": 44625, + "afd": 44626, + "fallontonight": 44627, + "squat": 44628, + "oru": 44629, + "drained": 44630, + "chocolat": 44631, + "민": 44632, + "worths": 44633, + "rib": 44634, + "muj": 44635, + "thats": 44636, + "residente": 44637, + "itel": 44638, + "boost": 44639, + "migos": 44640, + "mulled": 44641, + "laa": 44642, + "etsyshop": 44643, + "donkeys": 44644, + "mek": 44645, + "ptc": 44646, + "flinders": 44647, + "ehs": 44648, + "rohit": 44649, + "muir": 44650, + "gad": 44651, + "compositions": 44652, + "åĨĻ": 44653, + "combustion": 44654, + "ikh": 44655, + "yemeni": 44656, + "waved": 44657, + "garci": 44658, + "akos": 44659, + "oods": 44660, + "fusion": 44661, + "seque": 44662, + "slan": 44663, + "plur": 44664, + "kicchasu": 44665, + "shenando": 44666, + "sams": 44667, + "worlden": 44668, + "horowitz": 44669, + "withme": 44670, + "microbes": 44671, + "kki": 44672, + "ðŁĴĶðŁĴĶ": 44673, + "wsu": 44674, + "patchwork": 44675, + "freer": 44676, + "yaki": 44677, + "theart": 44678, + "symbolism": 44679, + "miler": 44680, + "btn": 44681, + "mabu": 44682, + "sidekick": 44683, + "motivates": 44684, + "sagitt": 44685, + "naturals": 44686, + "serviced": 44687, + "psori": 44688, + "paola": 44689, + "quig": 44690, + "ibadan": 44691, + "giggs": 44692, + "ë³": 44693, + "scientology": 44694, + "sioux": 44695, + "salamat": 44696, + "dres": 44697, + "cadbury": 44698, + "dhawan": 44699, + "ción": 44700, + "_'": 44701, + "swapping": 44702, + "mariska": 44703, + "jamesbond": 44704, + "explosives": 44705, + "ayles": 44706, + "afer": 44707, + "sagu": 44708, + "censor": 44709, + "toma": 44710, + "jefferson": 44711, + "ringed": 44712, + "partist": 44713, + "irresponsible": 44714, + "aguilar": 44715, + "vacay": 44716, + "equitable": 44717, + "altrincham": 44718, + "acur": 44719, + "manish": 44720, + "germin": 44721, + "schooled": 44722, + "putter": 44723, + "edad": 44724, + "naval": 44725, + "toasty": 44726, + "solareclipse": 44727, + "dishu": 44728, + "coyne": 44729, + "acco": 44730, + "muck": 44731, + "maran": 44732, + "elos": 44733, + "lender": 44734, + "croix": 44735, + "worthless": 44736, + "haber": 44737, + "gunmen": 44738, + "ðŁįĵ": 44739, + "zenith": 44740, + "tenders": 44741, + "hurst": 44742, + "holtz": 44743, + "italians": 44744, + "carlow": 44745, + "ucd": 44746, + "characteristic": 44747, + "bung": 44748, + "avl": 44749, + "uth": 44750, + "sasia": 44751, + "rsl": 44752, + "redman": 44753, + "neighboring": 44754, + "greenpeace": 44755, + "stips": 44756, + "followparty": 44757, + "ygk": 44758, + "enos": 44759, + "omnibus": 44760, + "naissance": 44761, + "chrissy": 44762, + "secure": 44763, + "callback": 44764, + "jihoon": 44765, + "memory": 44766, + "blocker": 44767, + "lanta": 44768, + "daffodils": 44769, + "bilt": 44770, + "fferty": 44771, + "faust": 44772, + "iec": 44773, + "nipples": 44774, + "sog": 44775, + "mnd": 44776, + "jaguar": 44777, + "boldly": 44778, + "abpoli": 44779, + "proposition": 44780, + "gunsense": 44781, + "evansville": 44782, + "cutters": 44783, + "wego": 44784, + "doun": 44785, + "dox": 44786, + "stallions": 44787, + "kaj": 44788, + "shippers": 44789, + "jawa": 44790, + "volo": 44791, + "leven": 44792, + "paprika": 44793, + "kovich": 44794, + "jordi": 44795, + "inductees": 44796, + "appalling": 44797, + "dialysis": 44798, + "alleviate": 44799, + "âĢĶâĢĶ": 44800, + "pieter": 44801, + "midwi": 44802, + "qtr": 44803, + "juliette": 44804, + "intermission": 44805, + "hawks": 44806, + "actment": 44807, + "oneill": 44808, + "klin": 44809, + "vamps": 44810, + "famous": 44811, + "could": 44812, + "automobi": 44813, + "daan": 44814, + "westend": 44815, + "ellip": 44816, + "nhc": 44817, + "melanch": 44818, + "webseries": 44819, + "tongue": 44820, + "snatched": 44821, + "smyth": 44822, + "tangible": 44823, + "sli": 44824, + "easing": 44825, + "barstool": 44826, + "overlay": 44827, + "affordability": 44828, + "tinged": 44829, + "teras": 44830, + "ayush": 44831, + "wannaone": 44832, + "rhine": 44833, + "dana": 44834, + "shana": 44835, + "kendal": 44836, + "fertile": 44837, + "wir": 44838, + "repleni": 44839, + "larvae": 44840, + "isro": 44841, + "convos": 44842, + "abbrevi": 44843, + "ucc": 44844, + "hungry": 44845, + "burrows": 44846, + "ager": 44847, + "navi": 44848, + "matin": 44849, + "duper": 44850, + "cern": 44851, + "madon": 44852, + "ķï¸ı": 44853, + "éģ": 44854, + "tups": 44855, + "hyatt": 44856, + "shep": 44857, + "fridaynight": 44858, + "wiser": 44859, + "heidi": 44860, + "hatton": 44861, + "pgh": 44862, + "fountain": 44863, + "wristbands": 44864, + "ahmadiyya": 44865, + "aerial": 44866, + "subscribed": 44867, + "solos": 44868, + "mace": 44869, + "slayed": 44870, + "forfe": 44871, + "dulce": 44872, + "christmass": 44873, + "arunjaitley": 44874, + "violate": 44875, + "obstru": 44876, + "nieces": 44877, + "wvu": 44878, + "idyl": 44879, + "faze": 44880, + "preserves": 44881, + "infringe": 44882, + "premiers": 44883, + "intervals": 44884, + "agency": 44885, + "(©": 44886, + "standalone": 44887, + "dimes": 44888, + "boer": 44889, + "parameters": 44890, + "getit": 44891, + "ðŁĺĺðŁĺĺðŁĺĺðŁĺĺ": 44892, + "tulane": 44893, + "forgiven": 44894, + "scoll": 44895, + "mbps": 44896, + "smashbros": 44897, + "robbi": 44898, + "primavera": 44899, + "alist": 44900, + "ghostly": 44901, + "ayat": 44902, + "yeats": 44903, + "impressionist": 44904, + "earphones": 44905, + "caulfield": 44906, + "waikiki": 44907, + "salute": 44908, + "scou": 44909, + "muay": 44910, + "louisvuitton": 44911, + "bakhta": 44912, + "adog": 44913, + "inventions": 44914, + "hurd": 44915, + "foreclo": 44916, + "streamline": 44917, + "thalaivar": 44918, + "chsnews": 44919, + "willard": 44920, + "tsn": 44921, + "europarl": 44922, + "crusher": 44923, + "mysore": 44924, + "grower": 44925, + "raping": 44926, + "patti": 44927, + "gden": 44928, + "smw": 44929, + "mufti": 44930, + "kidman": 44931, + "abr": 44932, + "sounders": 44933, + "skeptical": 44934, + "ðŁĶİ": 44935, + "sundar": 44936, + "ime": 44937, + "ferg": 44938, + "featherweight": 44939, + "arlington": 44940, + "pasqu": 44941, + "agazine": 44942, + "wearable": 44943, + "natic": 44944, + "mcclure": 44945, + "intermitt": 44946, + "horde": 44947, + "sixties": 44948, + "carte": 44949, + "bhav": 44950, + "zeal": 44951, + "experiential": 44952, + "adorned": 44953, + "sommer": 44954, + "enote": 44955, + "hypothesis": 44956, + "stinky": 44957, + "proto": 44958, + "deadlines": 44959, + "vogel": 44960, + "musings": 44961, + "moncton": 44962, + "guter": 44963, + "fle": 44964, + "acion": 44965, + "voiceof": 44966, + "tasha": 44967, + "inhabitants": 44968, + "typeface": 44969, + "sba": 44970, + "btsx": 44971, + "ðŁĶĴ": 44972, + "worx": 44973, + "uhc": 44974, + "joko": 44975, + "cellars": 44976, + "goro": 44977, + "continuum": 44978, + "...&": 44979, + "weathercee": 44980, + "hap": 44981, + "srk": 44982, + "risers": 44983, + "lonelyplanet": 44984, + "unnamed": 44985, + "coeur": 44986, + "ðŁįĮ": 44987, + "theworld": 44988, + "ilike": 44989, + "fasten": 44990, + "amigo": 44991, + "riba": 44992, + "ramaphosa": 44993, + "staffers": 44994, + "hadley": 44995, + "??\"": 44996, + "fiore": 44997, + "salut": 44998, + "huff": 44999, + "bezos": 45000, + "Ñĭ": 45001, + "rader": 45002, + "kamala": 45003, + "inline": 45004, + "fillers": 45005, + "umatic": 45006, + "allin": 45007, + "shatter": 45008, + "rein": 45009, + "oku": 45010, + "chases": 45011, + "flagged": 45012, + "babymetal": 45013, + "waterstones": 45014, + "tsb": 45015, + "cutout": 45016, + "ophel": 45017, + "aama": 45018, + "rockabilly": 45019, + "stolic": 45020, + "jetblue": 45021, + "ichick": 45022, + "downton": 45023, + "uzbekistan": 45024, + "patna": 45025, + "laq": 45026, + "grange": 45027, + ")_/": 45028, + "subsidi": 45029, + "scp": 45030, + "newscast": 45031, + "itsa": 45032, + "tweetyour": 45033, + "emor": 45034, + "archaeologists": 45035, + "unification": 45036, + "porta": 45037, + "qx": 45038, + "protectors": 45039, + "prohib": 45040, + "charisma": 45041, + "cartag": 45042, + "renfre": 45043, + "sculpt": 45044, + "guwahati": 45045, + "dema": 45046, + "boop": 45047, + "unfpa": 45048, + "dexter": 45049, + "layla": 45050, + "alleges": 45051, + "soups": 45052, + "neveragain": 45053, + "lys": 45054, + "calc": 45055, + "baroness": 45056, + "visualize": 45057, + "gerber": 45058, + "absorbed": 45059, + "iers": 45060, + "ahan": 45061, + "fontein": 45062, + "detectors": 45063, + "verstappen": 45064, + "svc": 45065, + "formulated": 45066, + "acdc": 45067, + "lix": 45068, + "incompetent": 45069, + "bhk": 45070, + "lourdes": 45071, + "waterhouse": 45072, + "snowed": 45073, + "appreciative": 45074, + "sigma": 45075, + "lizasoberano": 45076, + "penned": 45077, + "paycheck": 45078, + "tallinn": 45079, + "fancafe": 45080, + "parisi": 45081, + "avalley": 45082, + "vig": 45083, + "rufc": 45084, + "hardship": 45085, + "socute": 45086, + "poise": 45087, + "ì¹": 45088, + "rothschild": 45089, + "kly": 45090, + "????????": 45091, + "lhp": 45092, + "ilay": 45093, + "fhs": 45094, + "amad": 45095, + "ideals": 45096, + "bradbury": 45097, + "balboa": 45098, + "nicot": 45099, + "kidnap": 45100, + "wolve": 45101, + "tasmanian": 45102, + "opt": 45103, + "matthias": 45104, + "ãĥ³ãĤ": 45105, + "supermarkets": 45106, + "mylittlepony": 45107, + "melee": 45108, + "lister": 45109, + "groun": 45110, + "fedora": 45111, + "kindness": 45112, + "enen": 45113, + "brahms": 45114, + "¯\\_(": 45115, + "roswell": 45116, + "marlene": 45117, + "icu": 45118, + "reformation": 45119, + "orail": 45120, + "hebrides": 45121, + "disparities": 45122, + "terracotta": 45123, + "swallows": 45124, + "reid": 45125, + "influencing": 45126, + "fluor": 45127, + "dene": 45128, + "tumour": 45129, + "blondes": 45130, + "thunderbird": 45131, + "sheva": 45132, + "mogadishu": 45133, + "kab": 45134, + "creeps": 45135, + "iving": 45136, + "eneed": 45137, + "annoy": 45138, + "âĶĢ": 45139, + "intrigue": 45140, + "enquiry": 45141, + "araj": 45142, + "tural": 45143, + "kubernetes": 45144, + "endlessly": 45145, + "dividends": 45146, + "tora": 45147, + "tish": 45148, + "commemorates": 45149, + "unra": 45150, + "trib": 45151, + "ponty": 45152, + "nem": 45153, + "dissent": 45154, + "brewingco": 45155, + "ðŁĺ½": 45156, + "normali": 45157, + "biof": 45158, + "(...": 45159, + "chillen": 45160, + "주": 45161, + "mellon": 45162, + "avis": 45163, + "mccormack": 45164, + "ingra": 45165, + "enriched": 45166, + "customerexperience": 45167, + "testosterone": 45168, + "snug": 45169, + "setti": 45170, + "geronimo": 45171, + "inquirer": 45172, + "breaches": 45173, + "verything": 45174, + "blooming": 45175, + "mura": 45176, + "dispos": 45177, + "bide": 45178, + "deva": 45179, + "shadesof": 45180, + "intrin": 45181, + "shev": 45182, + "sven": 45183, + "nayanthara": 45184, + "ganesha": 45185, + "cws": 45186, + "berta": 45187, + "labelled": 45188, + "useum": 45189, + "nicknamed": 45190, + "mahan": 45191, + "caruso": 45192, + "apur": 45193, + "ðŁijĨ": 45194, + "wq": 45195, + "orphanage": 45196, + "discarded": 45197, + "magnu": 45198, + "lue": 45199, + "jeon": 45200, + "bridgeport": 45201, + "pacing": 45202, + "mercury": 45203, + "(ðŁĵ¸": 45204, + "marxist": 45205, + "amphibious": 45206, + "transplantation": 45207, + "stitching": 45208, + "thenburg": 45209, + "gradual": 45210, + "ãĤĮ": 45211, + "roft": 45212, + "mails": 45213, + "inec": 45214, + "guyana": 45215, + "doppelg": 45216, + "vero": 45217, + "rewrite": 45218, + "headless": 45219, + "harbaugh": 45220, + "gateway": 45221, + "carsforsale": 45222, + "swi": 45223, + "stis": 45224, + "macht": 45225, + "unde": 45226, + "surabaya": 45227, + "stapleton": 45228, + "nurturing": 45229, + "milner": 45230, + "yao": 45231, + "lmaoooo": 45232, + "kosh": 45233, + "arsenal": 45234, + "kame": 45235, + "erry": 45236, + "arroyo": 45237, + "dismisses": 45238, + "rubbed": 45239, + "rcb": 45240, + "lewd": 45241, + "dilu": 45242, + "andor": 45243, + "vide": 45244, + "urin": 45245, + "intersec": 45246, + "haar": 45247, + "alb": 45248, + "yearswith": 45249, + "appleton": 45250, + "éal": 45251, + "ullivan": 45252, + "succu": 45253, + "monterrey": 45254, + "dmx": 45255, + "artemis": 45256, + "ronnie": 45257, + "farmland": 45258, + "sfootball": 45259, + "grotto": 45260, + "anthi": 45261, + "ãĢģ": 45262, + "à®Ł": 45263, + "vidya": 45264, + "jimmyfallon": 45265, + "àµį": 45266, + "tzer": 45267, + "gravitational": 45268, + "wthr": 45269, + "uhhh": 45270, + "ehr": 45271, + "tinker": 45272, + "tijuana": 45273, + "scranton": 45274, + "ramcharan": 45275, + "barclay": 45276, + "revan": 45277, + "msi": 45278, + "kap": 45279, + "wrs": 45280, + "wethenorth": 45281, + "toral": 45282, + "satu": 45283, + "grom": 45284, + "facep": 45285, + "erickson": 45286, + "zyn": 45287, + "sedge": 45288, + "oodle": 45289, + "spursofficial": 45290, + "dsp": 45291, + "sicilian": 45292, + "solihull": 45293, + "receivers": 45294, + "ladakh": 45295, + "hendrick": 45296, + "theri": 45297, + "presiding": 45298, + "mcguinness": 45299, + "litters": 45300, + "gunnar": 45301, + "ghoul": 45302, + "wib": 45303, + "ntv": 45304, + "karo": 45305, + "frock": 45306, + "blau": 45307, + "amplify": 45308, + "allis": 45309, + "ullah": 45310, + "memoirs": 45311, + "khloe": 45312, + "interceptions": 45313, + "petday": 45314, + "looney": 45315, + "confin": 45316, + "chay": 45317, + "piyushgoyal": 45318, + "frequencies": 45319, + "utz": 45320, + "eventual": 45321, + "warmly": 45322, + "oblivion": 45323, + "anka": 45324, + "tait": 45325, + "âĿ¤ï¸ı.": 45326, + "directorial": 45327, + "rulers": 45328, + "princes": 45329, + "muck": 45330, + "sturridge": 45331, + "deuce": 45332, + "abridged": 45333, + "baguette": 45334, + "uncles": 45335, + "pendu": 45336, + "minding": 45337, + "forrester": 45338, + "avila": 45339, + "waller": 45340, + "wallstreet": 45341, + "mentor": 45342, + "hino": 45343, + "highway": 45344, + "cromwell": 45345, + "fanartfriday": 45346, + "mbi": 45347, + "coyle": 45348, + "ahi": 45349, + "trove": 45350, + "spiegel": 45351, + "paytm": 45352, + "mcintosh": 45353, + "jansen": 45354, + "niti": 45355, + "nashville": 45356, + "leno": 45357, + "leicestershire": 45358, + "legos": 45359, + "dict": 45360, + "ðŁĵ½": 45361, + "spad": 45362, + "beverlyhills": 45363, + "syrah": 45364, + "separates": 45365, + "zain": 45366, + "unfit": 45367, + "drags": 45368, + "tania": 45369, + "overflowing": 45370, + "hrithik": 45371, + "hawthorn": 45372, + "zani": 45373, + "macfar": 45374, + "fide": 45375, + "totem": 45376, + "peds": 45377, + "fundamentally": 45378, + "calico": 45379, + "sinner": 45380, + "jä": 45381, + "hilde": 45382, + "dsd": 45383, + "tenay": 45384, + "tahit": 45385, + "milf": 45386, + "lieb": 45387, + "informing": 45388, + "uplift": 45389, + "rael": 45390, + "mortgages": 45391, + "lect": 45392, + "iiii": 45393, + "guillaume": 45394, + "composites": 45395, + "oldsmobile": 45396, + "lend": 45397, + "garth": 45398, + "commish": 45399, + "baptized": 45400, + "scorpions": 45401, + "rucker": 45402, + "bringbackour": 45403, + "alliance": 45404, + "thalapathy": 45405, + "tali": 45406, + "spans": 45407, + "eridge": 45408, + "witherspoon": 45409, + "linda": 45410, + "skylar": 45411, + "korn": 45412, + "homs": 45413, + "Äį": 45414, + "silenced": 45415, + "caffe": 45416, + "arty": 45417, + "distinguish": 45418, + "towed": 45419, + "pung": 45420, + "jessica": 45421, + "earnest": 45422, + "beaufort": 45423, + "tama": 45424, + "studyabroad": 45425, + "sikhs": 45426, + "newbie": 45427, + "navratri": 45428, + "marble": 45429, + "lounging": 45430, + "litter": 45431, + "dalit": 45432, + "sosa": 45433, + "izes": 45434, + "grade": 45435, + "compromising": 45436, + "triton": 45437, + "detta": 45438, + "vj": 45439, + "chauffe": 45440, + "spectral": 45441, + "powered": 45442, + "montessori": 45443, + "articulate": 45444, + "halton": 45445, + "alco": 45446, + "yey": 45447, + "mntwins": 45448, + "acounty": 45449, + "ðŁijıðŁı¾": 45450, + "âīĪ": 45451, + "madmen": 45452, + "kala": 45453, + "grum": 45454, + "chik": 45455, + "atis": 45456, + "sume": 45457, + "akhtar": 45458, + "jobsearch": 45459, + "highlighter": 45460, + "boath": 45461, + "âĦ¹": 45462, + "tarzan": 45463, + "lambo": 45464, + "âĽĦï¸ı": 45465, + "oxfam": 45466, + "dumpster": 45467, + "pretzels": 45468, + "macos": 45469, + "inclined": 45470, + "factual": 45471, + "advertisers": 45472, + "shui": 45473, + "puree": 45474, + "mlpfi": 45475, + "antidote": 45476, + "capo": 45477, + "pastr": 45478, + "mercado": 45479, + "button": 45480, + "armin": 45481, + "agg": 45482, + "lolla": 45483, + "horribly": 45484, + "errands": 45485, + "christophe": 45486, + "timesnow": 45487, + "mondaymotiv": 45488, + "liss": 45489, + "scandals": 45490, + "mci": 45491, + "disproportion": 45492, + "âĺİ": 45493, + "surpass": 45494, + "samaritan": 45495, + "sotho": 45496, + "purest": 45497, + "flatt": 45498, + "triviatuesday": 45499, + "delectable": 45500, + "leopold": 45501, + "hermione": 45502, + "choudhary": 45503, + "enrich": 45504, + "¡¡": 45505, + "subsidiary": 45506, + "inequalities": 45507, + "bachelor": 45508, + "autoimmune": 45509, + "lakota": 45510, + "ihop": 45511, + "adjec": 45512, + "thesimpsons": 45513, + "shes": 45514, + "sek": 45515, + "gretchen": 45516, + "upstream": 45517, + "hinakhan": 45518, + "copernic": 45519, + "xtina": 45520, + "lug": 45521, + "toughness": 45522, + "ead": 45523, + "clipped": 45524, + "bius": 45525, + "slv": 45526, + "fahren": 45527, + "deepak": 45528, + "cau": 45529, + "xan": 45530, + "immature": 45531, + "digni": 45532, + "bobs": 45533, + "shredding": 45534, + "buttery": 45535, + "accommodations": 45536, + "deven": 45537, + "chunks": 45538, + "superleague": 45539, + "skybet": 45540, + "kildare": 45541, + "jeet": 45542, + "ëį": 45543, + "cek": 45544, + "wrecks": 45545, + "propane": 45546, + "ohl": 45547, + "tbd": 45548, + "quoi": 45549, + "trumpp": 45550, + "mimo": 45551, + "reluctant": 45552, + "verne": 45553, + "oic": 45554, + "magh": 45555, + "arnau": 45556, + "sever": 45557, + "lidge": 45558, + "stairway": 45559, + "kicchasudeep": 45560, + "ðŁĶº": 45561, + "machining": 45562, + "aamaadmi": 45563, + "oti": 45564, + "cda": 45565, + "alit": 45566, + "pany": 45567, + "installs": 45568, + "acct": 45569, + "eshop": 45570, + "diem": 45571, + "hardwell": 45572, + "fulfillment": 45573, + "scafe": 45574, + "quack": 45575, + "extracts": 45576, + "sweetened": 45577, + "fighton": 45578, + "fdi": 45579, + "dinger": 45580, + "waltham": 45581, + "usur": 45582, + "referees": 45583, + "seokjin": 45584, + "grann": 45585, + "afrin": 45586, + "thn": 45587, + "schaf": 45588, + "parcels": 45589, + "betis": 45590, + "amarine": 45591, + "noman": 45592, + "khtar": 45593, + "moritz": 45594, + "coupling": 45595, + "barons": 45596, + "ðŁIJ¸": 45597, + "ø": 45598, + "slp": 45599, + "sadler": 45600, + "xander": 45601, + "triad": 45602, + "mcmillan": 45603, + "khz": 45604, + "dividing": 45605, + "ìĹijìĨĮ": 45606, + "daryl": 45607, + "zedd": 45608, + "leys": 45609, + "plaques": 45610, + "fluori": 45611, + "tipperary": 45612, + "onnell": 45613, + "didier": 45614, + "langford": 45615, + "imc": 45616, + "thesun": 45617, + "birdies": 45618, + "archa": 45619, + "yessss": 45620, + "tdi": 45621, + "daria": 45622, + "candace": 45623, + "altam": 45624, + "palaces": 45625, + "chit": 45626, + "santam": 45627, + "eventful": 45628, + "bookof": 45629, + "adb": 45630, + "monstax": 45631, + "creole": 45632, + "coel": 45633, + "âĸ½": 45634, + "wearen": 45635, + "stennis": 45636, + "sheath": 45637, + "atism": 45638, + "groningen": 45639, + "mlpfim": 45640, + "lepre": 45641, + "wrongly": 45642, + "rspca": 45643, + "rendezvous": 45644, + "acknowledging": 45645, + "pelvic": 45646, + "solicitor": 45647, + "slays": 45648, + "nuestra": 45649, + "lod": 45650, + "islander": 45651, + "feroci": 45652, + "fashionshow": 45653, + "rass": 45654, + "dgeon": 45655, + "adolescents": 45656, + "smashes": 45657, + "negligence": 45658, + "grateful": 45659, + "vedere": 45660, + "swoop": 45661, + "ingl": 45662, + "apolice": 45663, + "vandalism": 45664, + "gann": 45665, + "joao": 45666, + "disupdates": 45667, + "zimbabwe": 45668, + "underage": 45669, + "radiance": 45670, + "wof": 45671, + "bourgeo": 45672, + "plas": 45673, + "crani": 45674, + "ghue": 45675, + "wreckem": 45676, + "warrants": 45677, + "reform": 45678, + "jimmie": 45679, + "atwood": 45680, + "ysl": 45681, + "neilhimself": 45682, + "lbj": 45683, + "iman": 45684, + "tanto": 45685, + "noisse": 45686, + "verbs": 45687, + "equipo": 45688, + "altogether": 45689, + "mament": 45690, + "lice": 45691, + "douglass": 45692, + "tierney": 45693, + "primed": 45694, + "jhal": 45695, + "furnitu": 45696, + "brazili": 45697, + "vill": 45698, + "pastels": 45699, + "nison": 45700, + "uff": 45701, + "paralysis": 45702, + "jaye": 45703, + "impo": 45704, + "ðŁijģ": 45705, + "strategically": 45706, + "pakistanis": 45707, + "wassup": 45708, + "superbike": 45709, + "thanku": 45710, + "truelove": 45711, + "shaikh": 45712, + "israelis": 45713, + "vip": 45714, + "tog": 45715, + "lien": 45716, + "laker": 45717, + "greyhounds": 45718, + "culars": 45719, + "bianchi": 45720, + "balotelli": 45721, + "arran": 45722, + "loos": 45723, + "strates": 45724, + "hebron": 45725, + "arvo": 45726, + "sunderland": 45727, + "theal": 45728, + "tombstone": 45729, + "sandman": 45730, + "cpac": 45731, + "thanksgiving": 45732, + "lovehim": 45733, + "latino": 45734, + "anin": 45735, + "akaif": 45736, + "ĭãĤ": 45737, + "torquay": 45738, + "diest": 45739, + "allianz": 45740, + "ðŁĺķ": 45741, + "golfclub": 45742, + "cllr": 45743, + "walcott": 45744, + "schnau": 45745, + "prompted": 45746, + "nominating": 45747, + "lennox": 45748, + "valet": 45749, + "monro": 45750, + "mayward": 45751, + "eph": 45752, + "ðŁĶĶ": 45753, + "interoper": 45754, + "rda": 45755, + "reflex": 45756, + "armchair": 45757, + "ê°ķ": 45758, + "stripper": 45759, + "porti": 45760, + "pharm": 45761, + "hamza": 45762, + "nireland": 45763, + "neue": 45764, + "hpv": 45765, + "portfoli": 45766, + "sunburn": 45767, + "frisbee": 45768, + "beal": 45769, + "baptiste": 45770, + "xh": 45771, + "tym": 45772, + "prati": 45773, + "overs": 45774, + "hazrat": 45775, + "desert": 45776, + "derry": 45777, + "usky": 45778, + "emmett": 45779, + "acharya": 45780, + ")_/¯": 45781, + "shud": 45782, + "maya": 45783, + "hamill": 45784, + "raim": 45785, + "nrc": 45786, + "fittings": 45787, + "curvy": 45788, + "ðŁıĩ": 45789, + "sterling": 45790, + "à¥Ģ": 45791, + "walkin": 45792, + "shortcuts": 45793, + "milly": 45794, + "astur": 45795, + "alphabe": 45796, + "pli": 45797, + "pez": 45798, + "missyou": 45799, + "radford": 45800, + "mlg": 45801, + "taeyang": 45802, + "notjustlakes": 45803, + "dumps": 45804, + "serendip": 45805, + "leur": 45806, + "raving": 45807, + "ester": 45808, + "depriv": 45809, + "abscbn": 45810, + "ðŁijĩðŁı»": 45811, + "scarcity": 45812, + "ocr": 45813, + "meanings": 45814, + "capt": 45815, + "dahl": 45816, + "fermentation": 45817, + "brioche": 45818, + "towin": 45819, + "outlander": 45820, + "massimo": 45821, + "encro": 45822, + "ðŁ¥³": 45823, + "built": 45824, + "potam": 45825, + "kiri": 45826, + "tmw": 45827, + "monitored": 45828, + "kites": 45829, + "peoplesvote": 45830, + "grayson": 45831, + "íģ¬": 45832, + "afrika": 45833, + "adies": 45834, + "ivote": 45835, + "gyne": 45836, + "gannon": 45837, + "dix": 45838, + "cmc": 45839, + "oural": 45840, + "foxandfriends": 45841, + "beli": 45842, + "igne": 45843, + "glan": 45844, + "katrinakaif": 45845, + "copolitics": 45846, + "qualitative": 45847, + "psi": 45848, + "lucci": 45849, + "discoura": 45850, + "âĺ®": 45851, + "kelli": 45852, + "gautam": 45853, + "caracas": 45854, + "realest": 45855, + "pula": 45856, + "inus": 45857, + "hilltop": 45858, + "makeaw": 45859, + "attenborough": 45860, + "twy": 45861, + "rarity": 45862, + "peckham": 45863, + "mahon": 45864, + "cornelius": 45865, + "clinicians": 45866, + "tonline": 45867, + "tbi": 45868, + "paradise": 45869, + "kasi": 45870, + "inevit": 45871, + "freshness": 45872, + "collingwood": 45873, + "lunatic": 45874, + "defense": 45875, + "copd": 45876, + "infra": 45877, + "wainwright": 45878, + "sainsbury": 45879, + "alabam": 45880, + "tema": 45881, + "laco": 45882, + "checker": 45883, + "relegated": 45884, + "trent": 45885, + "stalks": 45886, + "huffpost": 45887, + "bhubaneswar": 45888, + "astral": 45889, + "shareyour": 45890, + "primrose": 45891, + "hime": 45892, + "catan": 45893, + "endment": 45894, + "endow": 45895, + "clemens": 45896, + "maloney": 45897, + "hilary": 45898, + "gametime": 45899, + "denise": 45900, + "collaborators": 45901, + "bwo": 45902, + "radicals": 45903, + "guetta": 45904, + "icion": 45905, + "aua": 45906, + "snapmatic": 45907, + "satchel": 45908, + "excavation": 45909, + "baseman": 45910, + "são": 45911, + "gnation": 45912, + "feld": 45913, + "survey": 45914, + "shahzad": 45915, + "mast": 45916, + "anirudhofficial": 45917, + "trucker": 45918, + "otago": 45919, + "geograph": 45920, + "ethel": 45921, + "âļ¡ï¸ıâļ¡ï¸ı": 45922, + "sver": 45923, + "mutt": 45924, + "internetofthings": 45925, + "anchored": 45926, + "whouse": 45927, + "bangla": 45928, + "balmain": 45929, + "ç¹ĭãģ": 45930, + "breakfa": 45931, + "áĢ": 45932, + "twister": 45933, + "tetris": 45934, + "cav": 45935, + "stags": 45936, + "gz": 45937, + "aub": 45938, + "stormed": 45939, + "helens": 45940, + "yarmouth": 45941, + "stasy": 45942, + "gustavo": 45943, + "cosc": 45944, + "vinson": 45945, + "upp": 45946, + "scricket": 45947, + "assumptions": 45948, + "appe": 45949, + "nuh": 45950, + "uer": 45951, + "premise": 45952, + "naga": 45953, + "eamon": 45954, + "coronary": 45955, + "naf": 45956, + "northside": 45957, + "elmer": 45958, + "rotar": 45959, + "outlining": 45960, + "elf": 45961, + "resurg": 45962, + "katelyn": 45963, + "incan": 45964, + "hysteria": 45965, + "cee": 45966, + "ambani": 45967, + "prolly": 45968, + "ĮãĤĬãģ": 45969, + "axes": 45970, + "sanjose": 45971, + "rembrandt": 45972, + "magpie": 45973, + "evenly": 45974, + "scorsese": 45975, + "quaint": 45976, + "fg": 45977, + "bbuk": 45978, + "indianfootball": 45979, + "weareall": 45980, + "spdwy": 45981, + "pisces": 45982, + "ecg": 45983, + "âĺħâĺħâĺħâĺħâĺħ": 45984, + "preorders": 45985, + ":|": 45986, + "nipple": 45987, + "salazar": 45988, + "jume": 45989, + "jailbreak": 45990, + "minn": 45991, + "bassett": 45992, + "zetta": 45993, + "jeffree": 45994, + "adjun": 45995, + "ticon": 45996, + "sandiego": 45997, + "drinklocal": 45998, + "cholera": 45999, + "solicitors": 46000, + "obo": 46001, + "compost": 46002, + "nian": 46003, + "wra": 46004, + "treach": 46005, + "icic": 46006, + "professional": 46007, + "delve": 46008, + "legate": 46009, + "historia": 46010, + "croissant": 46011, + "connoisse": 46012, + "namo": 46013, + "palliative": 46014, + "chemtrails": 46015, + "iority": 46016, + "globalwarming": 46017, + "comicart": 46018, + "behavioural": 46019, + "rested": 46020, + "lias": 46021, + "climates": 46022, + "ŁãģĦ": 46023, + "rutland": 46024, + "nourish": 46025, + "menopause": 46026, + "hotties": 46027, + "dementi": 46028, + "vespa": 46029, + "melville": 46030, + "analogue": 46031, + "tzman": 46032, + "strung": 46033, + "imperfect": 46034, + "glare": 46035, + "circling": 46036, + "rosberg": 46037, + "reco": 46038, + "ocity": 46039, + "loire": 46040, + "embe": 46041, + "dossier": 46042, + "neel": 46043, + "nando": 46044, + "mea": 46045, + "galvani": 46046, + "finesse": 46047, + "agp": 46048, + "berkeley": 46049, + "asim": 46050, + "âĺºâĺº": 46051, + "quilted": 46052, + "ishere": 46053, + "unmatched": 46054, + "potion": 46055, + "forz": 46056, + "atre": 46057, + "selfies": 46058, + "juliana": 46059, + "ðŁļ¶": 46060, + "âĸº": 46061, + "melton": 46062, + "âłĢâłĢâłĢâłĢâłĢâłĢâłĢâłĢ": 46063, + "spinrilla": 46064, + "purcell": 46065, + "edp": 46066, + "atleti": 46067, + "tonyawards": 46068, + "raja": 46069, + "progno": 46070, + "molten": 46071, + "stuff": 46072, + "pally": 46073, + "nobelprize": 46074, + "âĻ»ï¸ı": 46075, + "spiritual": 46076, + "speake": 46077, + "sasha": 46078, + "brium": 46079, + "truss": 46080, + "criticize": 46081, + "assassinscreed": 46082, + "yoruba": 46083, + "ulo": 46084, + "fireman": 46085, + "workinprogress": 46086, + "efcc": 46087, + "flares": 46088, + "robot": 46089, + "hikers": 46090, + "cll": 46091, + "shadowing": 46092, + "patsy": 46093, + "lehman": 46094, + "cns": 46095, + "å±": 46096, + "guadal": 46097, + "à±į": 46098, + "rape": 46099, + "rhonda": 46100, + "parallels": 46101, + "sonja": 46102, + "language": 46103, + "landings": 46104, + "zola": 46105, + "cramps": 46106, + "burning": 46107, + "appraisal": 46108, + "jolla": 46109, + "hamm": 46110, + "kasa": 46111, + "gully": 46112, + "fgo": 46113, + "ulysses": 46114, + "ribe": 46115, + "ðŁĴĦ": 46116, + "ibu": 46117, + "etienne": 46118, + "briar": 46119, + "finely": 46120, + "combating": 46121, + "yql": 46122, + "gotham": 46123, + "wechat": 46124, + "topaz": 46125, + "primaries": 46126, + "lse": 46127, + "izz": 46128, + "hele": 46129, + "disponible": 46130, + "cystic": 46131, + "belichick": 46132, + "thrush": 46133, + "kansascity": 46134, + "geom": 46135, + "solidi": 46136, + "redbubble": 46137, + "bystand": 46138, + "cambridgeshire": 46139, + "parfait": 46140, + "astle": 46141, + "owo": 46142, + "indore": 46143, + "stomping": 46144, + "smelly": 46145, + "ð٤ĸ": 46146, + "locomo": 46147, + "admitting": 46148, + "holme": 46149, + "clockwise": 46150, + "minsk": 46151, + "mcco": 46152, + "forget": 46153, + "evp": 46154, + "camra": 46155, + "abella": 46156, + "yotes": 46157, + "universityof": 46158, + "méxico": 46159, + "silverado": 46160, + "ricket": 46161, + "crombie": 46162, + "puj": 46163, + "eradicate": 46164, + "delight": 46165, + "ygo": 46166, + "glamping": 46167, + "vica": 46168, + "duggan": 46169, + "counters": 46170, + "cfd": 46171, + "scour": 46172, + "reactjs": 46173, + "puram": 46174, + "parasites": 46175, + "inki": 46176, + "villen": 46177, + "stella": 46178, + "limbo": 46179, + "angas": 46180, + "kcr": 46181, + "ðŁĴļðŁĴļðŁĴļ": 46182, + "vapori": 46183, + "mumford": 46184, + "oligar": 46185, + "à¼": 46186, + "aloo": 46187, + "booties": 46188, + "adr": 46189, + "kelli": 46190, + "drummers": 46191, + "avici": 46192, + "natureuk": 46193, + "ronal": 46194, + "intrac": 46195, + "unsplash": 46196, + "leche": 46197, + "goma": 46198, + "eline": 46199, + "enviro": 46200, + "bionic": 46201, + "bueno": 46202, + "mik": 46203, + "avin": 46204, + "starling": 46205, + "empowers": 46206, + "cakeday": 46207, + "boycot": 46208, + "ðŁĴļðŁĴļ": 46209, + "ðŁĮ¸ðŁĮ¸": 46210, + "vach": 46211, + "mci": 46212, + "fractures": 46213, + "geri": 46214, + "sking": 46215, + "excluded": 46216, + "luce": 46217, + "jave": 46218, + "iggy": 46219, + "eviden": 46220, + "akistan": 46221, + "awn": 46222, + "morals": 46223, + "lucifer": 46224, + "haban": 46225, + "tumbling": 46226, + "sundaymotivation": 46227, + "mosley": 46228, + "captainamerica": 46229, + "schicago": 46230, + "theone": 46231, + "motd": 46232, + "dts": 46233, + "ðŁIJ¼": 46234, + "repell": 46235, + "iii": 46236, + "locust": 46237, + "geospatial": 46238, + "mersey": 46239, + "immerse": 46240, + "descend": 46241, + "bernade": 46242, + "js": 46243, + "boatsales": 46244, + "winder": 46245, + "crank": 46246, + "singleton": 46247, + "candidacy": 46248, + "bena": 46249, + "ðŁı»âĢį": 46250, + "highlander": 46251, + "olt": 46252, + "kprs": 46253, + "healthylifestyle": 46254, + "fourteen": 46255, + "endthe": 46256, + "ithaca": 46257, + "circulated": 46258, + "rans": 46259, + "prevalent": 46260, + "havas": 46261, + "splendor": 46262, + "rooster": 46263, + "kalamazoo": 46264, + "jewellers": 46265, + "ennedy": 46266, + "rousey": 46267, + "esy": 46268, + "cannons": 46269, + "ornamental": 46270, + "////": 46271, + "rendon": 46272, + "winne": 46273, + "molding": 46274, + "eidmubarak": 46275, + "countess": 46276, + "simona": 46277, + "hawa": 46278, + "foes": 46279, + "duster": 46280, + "sbu": 46281, + "portray": 46282, + "marries": 46283, + "goodday": 46284, + "choco": 46285, + "achiever": 46286, + "ðŁĺ¹ðŁĺ¹": 46287, + "preneur": 46288, + "tramp": 46289, + "tomi": 46290, + "nbat": 46291, + "gardenchat": 46292, + "farrakhan": 46293, + "everglades": 46294, + "abru": 46295, + "sousa": 46296, + "sece": 46297, + "homeswee": 46298, + "terrestrial": 46299, + "barit": 46300, + "sridevi": 46301, + "olu": 46302, + "melinda": 46303, + "frick": 46304, + "candies": 46305, + "ðŁĺŃðŁĴķ": 46306, + "qureshi": 46307, + "familyfun": 46308, + "exorcist": 46309, + "cardinal": 46310, + "nyt": 46311, + "diesel": 46312, + "cumulus": 46313, + "capricorn": 46314, + "siology": 46315, + "lorna": 46316, + "dougie": 46317, + "andie": 46318, + "supersport": 46319, + "cfl": 46320, + "пÑĢи": 46321, + "sayang": 46322, + "peek": 46323, + "à¸Ĭ": 46324, + "lobe": 46325, + "jem": 46326, + "inglis": 46327, + "ggled": 46328, + "csn": 46329, + "amnesty": 46330, + "chups": 46331, + "baes": 46332, + "sauer": 46333, + "ðŁıIJ": 46334, + "mongolian": 46335, + "enet": 46336, + "backstreet": 46337, + "drilled": 46338, + "accessing": 46339, + "ceo": 46340, + "bse": 46341, + "aiken": 46342, + "purr": 46343, + "worsen": 46344, + "wheres": 46345, + "wark": 46346, + "testifying": 46347, + "buri": 46348, + "blast": 46349, + "awg": 46350, + "ðŁĵĭ": 46351, + "redefining": 46352, + "hearing": 46353, + "uci": 46354, + "cmp": 46355, + "boni": 46356, + "tailoring": 46357, + "taji": 46358, + "nocchi": 46359, + "emt": 46360, + "stephenking": 46361, + "neet": 46362, + "complains": 46363, + "campaigner": 46364, + "luciano": 46365, + "twilight": 46366, + "tiesto": 46367, + "passports": 46368, + "floyd": 46369, + "cathedr": 46370, + "naked": 46371, + "caregiver": 46372, + "bcoz": 46373, + "adecides": 46374, + "kuri": 46375, + "lyk": 46376, + "braries": 46377, + "drenched": 46378, + "disclose": 46379, + "ðŁĴªðŁı½": 46380, + "leblanc": 46381, + "jetty": 46382, + "garty": 46383, + "chipmun": 46384, + "bsu": 46385, + "rhythmic": 46386, + "icz": 46387, + "frid": 46388, + "annex": 46389, + "amex": 46390, + "soloist": 46391, + "lancers": 46392, + "arrowhead": 46393, + "specification": 46394, + "simulated": 46395, + "nais": 46396, + "inverte": 46397, + "bowing": 46398, + "worship": 46399, + "fz": 46400, + "aboss": 46401, + "shaq": 46402, + "ì¶ķ": 46403, + "challengers": 46404, + "anarch": 46405, + "aamaadmiparty": 46406, + "ãħĭãħĭãħĭ": 46407, + "suffolk": 46408, + "socorro": 46409, + "snell": 46410, + "cladding": 46411, + "absorbing": 46412, + "shawa": 46413, + "participates": 46414, + "ðŁįĶ": 46415, + "bookstores": 46416, + "baku": 46417, + "seaport": 46418, + "kojima": 46419, + "gaby": 46420, + "packard": 46421, + "electrician": 46422, + "letit": 46423, + "mowing": 46424, + "fawad": 46425, + "youngjae": 46426, + "hotmail": 46427, + "mening": 46428, + "urie": 46429, + "intimacy": 46430, + "conti": 46431, + ":\")": 46432, + "lifeisgood": 46433, + "inciner": 46434, + "idri": 46435, + "craziness": 46436, + "journos": 46437, + "franchi": 46438, + "bottlen": 46439, + "alda": 46440, + "ffes": 46441, + "kx": 46442, + "southwe": 46443, + "aira": 46444, + "clayton": 46445, + "scoti": 46446, + "fj": 46447, + "briga": 46448, + "ð٤ĺðŁı»": 46449, + "demonstrators": 46450, + "yz": 46451, + "stork": 46452, + "naq": 46453, + "cascades": 46454, + "travelchat": 46455, + "plata": 46456, + "padma": 46457, + "franci": 46458, + "attain": 46459, + "batgirl": 46460, + "lombard": 46461, + "hoos": 46462, + "ddos": 46463, + "neonatal": 46464, + "disclaimer": 46465, + "rss": 46466, + "rant": 46467, + "disen": 46468, + "texaste": 46469, + "socal": 46470, + "fractal": 46471, + "camry": 46472, + "strife": 46473, + "snacking": 46474, + "muh": 46475, + "santander": 46476, + "morons": 46477, + "graf": 46478, + "parades": 46479, + "huston": 46480, + "drupal": 46481, + "miento": 46482, + "kirstel": 46483, + "hyde": 46484, + "vomit": 46485, + "fortified": 46486, + "sphinx": 46487, + "dav": 46488, + "biryani": 46489, + "winnings": 46490, + "sbaseball": 46491, + "merged": 46492, + "lovelondon": 46493, + "lingering": 46494, + "dreambig": 46495, + "carleton": 46496, + "livelihood": 46497, + "django": 46498, + "astrid": 46499, + "grids": 46500, + "downe": 46501, + "bruised": 46502, + "sne": 46503, + "scarecrow": 46504, + "helium": 46505, + "fnc": 46506, + "biggs": 46507, + "anter": 46508, + "restorative": 46509, + "empires": 46510, + "abdel": 46511, + "lifestyle": 46512, + "kiwanis": 46513, + "colloquium": 46514, + "meen": 46515, + "prick": 46516, + "antique": 46517, + "zeb": 46518, + "mimic": 46519, + "edmonds": 46520, + "ðŁijĬðŁijĬ": 46521, + "qing": 46522, + "ppel": 46523, + "mcgill": 46524, + "interpreting": 46525, + "âŀķ": 46526, + "rashad": 46527, + "doka": 46528, + "narrator": 46529, + "electromagnetic": 46530, + "ashby": 46531, + "saura": 46532, + "irandeal": 46533, + "âģīï¸ı": 46534, + "krishnan": 46535, + "indi": 46536, + "ffen": 46537, + "brea": 46538, + "osman": 46539, + "multinational": 46540, + "chippe": 46541, + "recruiters": 46542, + "ausbiz": 46543, + "pounding": 46544, + "regen": 46545, + "cursor": 46546, + "refusal": 46547, + "macs": 46548, + "inak": 46549, + "axial": 46550, + "waifu": 46551, + "upcycled": 46552, + "hindustan": 46553, + "cassini": 46554, + "carlyle": 46555, + "scratches": 46556, + "reef": 46557, + "manatee": 46558, + "eatery": 46559, + "ðŁĵ¢": 46560, + "uncondition": 46561, + "senpai": 46562, + "onther": 46563, + "comicbook": 46564, + "prosciutto": 46565, + "demar": 46566, + "mise": 46567, + "mage": 46568, + "freec": 46569, + "ayesha": 46570, + "alder": 46571, + "androidgames": 46572, + "leyton": 46573, + "hock": 46574, + "doorway": 46575, + "chicagofire": 46576, + "aaliyah": 46577, + "swelling": 46578, + "bix": 46579, + ".ðŁĺĤ": 46580, + "evankirstel": 46581, + "torpedo": 46582, + "konstant": 46583, + "genevieve": 46584, + "maia": 46585, + "hauser": 46586, + "dotorg": 46587, + "hideous": 46588, + "fik": 46589, + "spraw": 46590, + "eek": 46591, + "zappa": 46592, + "wandered": 46593, + "''": 46594, + "rajan": 46595, + "bambi": 46596, + "($)": 46597, + "widening": 46598, + "toolbox": 46599, + "sair": 46600, + "illuminating": 46601, + "prays": 46602, + "outpatient": 46603, + "iw": 46604, + "dayo": 46605, + "lob": 46606, + "swfl": 46607, + "shades": 46608, + "gums": 46609, + "cookin": 46610, + "kodi": 46611, + "griffin": 46612, + "traumati": 46613, + "stea": 46614, + "slaughtered": 46615, + "godbless": 46616, + "airtime": 46617, + "pseudo": 46618, + "bsa": 46619, + "hauled": 46620, + "arif": 46621, + "à¸Ńà¸ĩ": 46622, + "lel": 46623, + "wcpo": 46624, + "militi": 46625, + "charters": 46626, + "worlda": 46627, + "ruk": 46628, + "kgs": 46629, + "digitalindia": 46630, + "isable": 46631, + "idyllic": 46632, + "espino": 46633, + "marietta": 46634, + "ebo": 46635, + "teamcanada": 46636, + "abour": 46637, + "wilton": 46638, + "rockstars": 46639, + "favored": 46640, + "physic": 46641, + "wrinkle": 46642, + "tbr": 46643, + "dprint": 46644, + "ballarat": 46645, + "adal": 46646, + "zey": 46647, + "ðŁĺįðŁĶ¥": 46648, + "tomlin": 46649, + "mtr": 46650, + "palsy": 46651, + "fenerbah": 46652, + "tighten": 46653, + "philia": 46654, + "ironing": 46655, + "ryu": 46656, + "bant": 46657, + "enquire": 46658, + "cair": 46659, + "aburger": 46660, + "trun": 46661, + "greenberg": 46662, + "chauhan": 46663, + "irina": 46664, + "shani": 46665, + "trendsetter": 46666, + "prett": 46667, + "zafar": 46668, + "alove": 46669, + "vici": 46670, + "panic": 46671, + "noo": 46672, + "lustre": 46673, + "disrupted": 46674, + "ballis": 46675, + "sonsof": 46676, + "monsi": 46677, + "instac": 46678, + "akest": 46679, + "ëĭ¤": 46680, + "kwame": 46681, + "horrormovies": 46682, + "district": 46683, + "saucy": 46684, + "mban": 46685, + "armies": 46686, + "withdrawn": 46687, + "medics": 46688, + "loftus": 46689, + "eroom": 46690, + "bekind": 46691, + "arns": 46692, + "allon": 46693, + "unison": 46694, + "davids": 46695, + "crat": 46696, + "nicotine": 46697, + "soor": 46698, + "smx": 46699, + "onco": 46700, + "cosplaying": 46701, + "zombies": 46702, + "harms": 46703, + "eger": 46704, + "rosy": 46705, + "moonshine": 46706, + "fein": 46707, + "cett": 46708, + "dubrov": 46709, + "regents": 46710, + "benitez": 46711, + "ðŁijıðŁı¼ðŁijıðŁı¼": 46712, + "stec": 46713, + "malia": 46714, + "prioritize": 46715, + "iceland": 46716, + "ftse": 46717, + "vamo": 46718, + "lamont": 46719, + "homosexuality": 46720, + "brees": 46721, + "regui": 46722, + "cbp": 46723, + "tej": 46724, + "skysports": 46725, + "detergent": 46726, + "shasta": 46727, + "derel": 46728, + "conservancy": 46729, + "colorized": 46730, + "accolades": 46731, + "viso": 46732, + "showyour": 46733, + "nanow": 46734, + "biceps": 46735, + "usability": 46736, + "bim": 46737, + "dailysketch": 46738, + "pearljam": 46739, + "strangest": 46740, + "megadeth": 46741, + "broadcasts": 46742, + "barren": 46743, + "arton": 46744, + "chriss": 46745, + "configu": 46746, + "lures": 46747, + "isthe": 46748, + "eul": 46749, + "railwayana": 46750, + "globalhealth": 46751, + "gianni": 46752, + "uaap": 46753, + "slum": 46754, + "consciously": 46755, + "abre": 46756, + "nup": 46757, + "budget": 46758, + "vada": 46759, + "esch": 46760, + "realness": 46761, + "erased": 46762, + "thunt": 46763, + "bez": 46764, + "armistice": 46765, + "ðŁij¹": 46766, + "shrun": 46767, + "oled": 46768, + "driverless": 46769, + "ðŁ¤·ðŁı»âĢįâĻĢï¸ı": 46770, + "wondr": 46771, + "skan": 46772, + "salaam": 46773, + "motherland": 46774, + "hwang": 46775, + "geno": 46776, + "gangnam": 46777, + "twright": 46778, + "endorsing": 46779, + "enic": 46780, + "adoration": 46781, + "paused": 46782, + "patricks": 46783, + "docked": 46784, + "platte": 46785, + "ffxv": 46786, + "ethnicity": 46787, + "autoshow": 46788, + "sideshow": 46789, + "afterlife": 46790, + "relocated": 46791, + "orphaned": 46792, + "foodnetwork": 46793, + "dareto": 46794, + "andra": 46795, + "slaps": 46796, + "vlive": 46797, + "swims": 46798, + "reimagined": 46799, + "mistle": 46800, + "revise": 46801, + "reality": 46802, + "bharti": 46803, + "ðŁĴĻðŁĴĽ": 46804, + "latest": 46805, + "proudest": 46806, + "grasses": 46807, + "lanyard": 46808, + "freshest": 46809, + "carcinoma": 46810, + "anomaly": 46811, + "ziegler": 46812, + "sumner": 46813, + "lyrix": 46814, + "gorg": 46815, + "isd": 46816, + "avel": 46817, + "swildlife": 46818, + "mesqu": 46819, + "johncena": 46820, + "euroleague": 46821, + "saber": 46822, + "masterful": 46823, + "yarra": 46824, + "cognition": 46825, + "jacobson": 46826, + "abolic": 46827, + "sirloin": 46828, + "shukla": 46829, + "mojito": 46830, + "supere": 46831, + "stweet": 46832, + "mez": 46833, + "esa": 46834, + "rudolf": 46835, + "gura": 46836, + "whereyou": 46837, + "ttm": 46838, + "wins": 46839, + "trustworthy": 46840, + "nyk": 46841, + "braden": 46842, + "tabletop": 46843, + "goodfood": 46844, + "eson": 46845, + "bek": 46846, + "linguistic": 46847, + "grays": 46848, + "chath": 46849, + "hcs": 46850, + "moni": 46851, + "deans": 46852, + "cussions": 46853, + "chell": 46854, + "slows": 46855, + "hemi": 46856, + "dapp": 46857, + "sharpie": 46858, + "boosters": 46859, + "aos": 46860, + "strack": 46861, + "sedona": 46862, + "mueller": 46863, + "hardwick": 46864, + "ornate": 46865, + "thora": 46866, + "salud": 46867, + "otwol": 46868, + "chum": 46869, + "miho": 46870, + "forage": 46871, + "thelittle": 46872, + "tearful": 46873, + "oneself": 46874, + "mindy": 46875, + "smg": 46876, + "gmbh": 46877, + "emerald": 46878, + "ðŁĶ´âļªï¸ı": 46879, + "tutti": 46880, + "receptions": 46881, + "revising": 46882, + "ibrox": 46883, + "topeka": 46884, + "salami": 46885, + "expanse": 46886, + "ibooks": 46887, + "dobson": 46888, + "clio": 46889, + "ats": 46890, + "ðŁļĮ": 46891, + "moha": 46892, + "isance": 46893, + "shutters": 46894, + "moot": 46895, + "janine": 46896, + "marvelcomics": 46897, + "jordani": 46898, + "poser": 46899, + "kenneth": 46900, + "hyung": 46901, + "deja": 46902, + "aseball": 46903, + "speciality": 46904, + "euston": 46905, + "classiccar": 46906, + "hadith": 46907, + "ðŁIJī": 46908, + "chasing": 46909, + "izo": 46910, + "grosven": 46911, + "aglia": 46912, + "thisdayinhistory": 46913, + "trow": 46914, + "omile": 46915, + "huar": 46916, + "byn": 46917, + "saline": 46918, + "divine": 46919, + "demonic": 46920, + "tyran": 46921, + "handover": 46922, + "revitalization": 46923, + "paella": 46924, + "cryptic": 46925, + "sedg": 46926, + "mend": 46927, + "dunkirk": 46928, + "bred": 46929, + "wald": 46930, + "sportscar": 46931, + "aard": 46932, + "wheaton": 46933, + "daener": 46934, + "klan": 46935, + "brt": 46936, + "bakhtawar": 46937, + "spires": 46938, + "schubert": 46939, + "roti": 46940, + "polish": 46941, + "ose": 46942, + "agame": 46943, + "wondercon": 46944, + "protestant": 46945, + "bosa": 46946, + "ðŁĺŁ": 46947, + "dü": 46948, + "joyride": 46949, + "gertrude": 46950, + "âĿĿ": 46951, + "gila": 46952, + "vh": 46953, + "twa": 46954, + "trav": 46955, + "swallowed": 46956, + "starve": 46957, + "lain": 46958, + "entren": 46959, + "reiki": 46960, + "sukh": 46961, + "craic": 46962, + "azu": 46963, + "webpage": 46964, + "keefe": 46965, + "hypothe": 46966, + "hirsch": 46967, + "helle": 46968, + "campground": 46969, + "wamy": 46970, + "travi": 46971, + "shahi": 46972, + "sandeep": 46973, + "rui": 46974, + "hanuman": 46975, + "dwp": 46976, + "repository": 46977, + "noor": 46978, + "noff": 46979, + "unreal": 46980, + "pell": 46981, + "blackhistory": 46982, + "harvick": 46983, + "mascar": 46984, + "payee": 46985, + "pasha": 46986, + "gastronomy": 46987, + "dÃŃ": 46988, + "aig": 46989, + "rosenthal": 46990, + "openday": 46991, + "embellished": 46992, + "ttip": 46993, + "sunbathing": 46994, + "gopack": 46995, + "endome": 46996, + "ï¸ı#": 46997, + "invalid": 46998, + "finalfour": 46999, + "stfu": 47000, + "squishy": 47001, + "rasta": 47002, + "mosch": 47003, + "jamesc": 47004, + "dietrich": 47005, + "sela": 47006, + "melb": 47007, + "elvi": 47008, + "tdp": 47009, + "suni": 47010, + "slit": 47011, + "jha": 47012, + "biza": 47013, + "spiked": 47014, + "lli": 47015, + "lillard": 47016, + "vampi": 47017, + "synopsis": 47018, + "azhar": 47019, + "kendricklamar": 47020, + "ĮãĤĬãģŁãģĦ": 47021, + "heartless": 47022, + "countryfile": 47023, + "airplay": 47024, + "arrogance": 47025, + "pree": 47026, + "virtuoso": 47027, + "ãħłãħłãħłãħł": 47028, + "raju": 47029, + "lebu": 47030, + "forward": 47031, + "tug": 47032, + "dros": 47033, + "mondaymotivaton": 47034, + "concepcion": 47035, + "thelo": 47036, + "padi": 47037, + "looool": 47038, + "ÑĢод": 47039, + "itss": 47040, + "ethical": 47041, + "enduro": 47042, + "__:": 47043, + "expenditure": 47044, + "monste": 47045, + "masking": 47046, + "terriers": 47047, + "ibis": 47048, + "ember": 47049, + "cumple": 47050, + "punctuation": 47051, + "piper": 47052, + "irvin": 47053, + "adee": 47054, + "yyyyyy": 47055, + "flashbacks": 47056, + "celsius": 47057, + "donnie": 47058, + "bogota": 47059, + "benevol": 47060, + "thescript": 47061, + "shilpa": 47062, + "prose": 47063, + "findia": 47064, + "zeke": 47065, + "neko": 47066, + "doves": 47067, + "blueslyrix": 47068, + "frosh": 47069, + "soweto": 47070, + "mplo": 47071, + "alai": 47072, + "sabi": 47073, + "raqqa": 47074, + "wftv": 47075, + "stroller": 47076, + "iansomerhalder": 47077, + "ðŁĶª": 47078, + "anon": 47079, + "moseley": 47080, + "!?!?": 47081, + "staking": 47082, + "moly": 47083, + "cartri": 47084, + "csg": 47085, + "astor": 47086, + "transcend": 47087, + "maer": 47088, + "deux": 47089, + "cowgirl": 47090, + "sask": 47091, + "punter": 47092, + "maken": 47093, + "oates": 47094, + "lovett": 47095, + "growler": 47096, + "sagin": 47097, + "vn": 47098, + "ssible": 47099, + "officeofrg": 47100, + "ymc": 47101, + "sabar": 47102, + "faulty": 47103, + "apha": 47104, + "akon": 47105, + "ðŁij«": 47106, + "snowdon": 47107, + "aew": 47108, + "raisethe": 47109, + "ðĿĵ": 47110, + "gruesome": 47111, + "clementine": 47112, + "sping": 47113, + "lata": 47114, + "worldenviron": 47115, + "mimic": 47116, + "canaria": 47117, + "bakhtawarbz": 47118, + "aoa": 47119, + "fala": 47120, + "ãĤŃ": 47121, + "aviva": 47122, + "youuuu": 47123, + "thigh": 47124, + "ladders": 47125, + "gumbo": 47126, + "tzky": 47127, + "fuzz": 47128, + "plasticpollution": 47129, + "estate": 47130, + "strengthened": 47131, + "kant": 47132, + "drin": 47133, + "calvert": 47134, + "transformational": 47135, + "frightened": 47136, + "maclean": 47137, + "elitedangerous": 47138, + "earthy": 47139, + "tson": 47140, + "toda": 47141, + "jnu": 47142, + "..,": 47143, + "michal": 47144, + "iban": 47145, + "jeong": 47146, + "isreal": 47147, + "simcoe": 47148, + "exclusives": 47149, + "bluebells": 47150, + "bene": 47151, + "teu": 47152, + "pilsner": 47153, + "penske": 47154, + "atheists": 47155, + "mpu": 47156, + "cartagena": 47157, + "ðŁĴĹðŁĴĹ": 47158, + "millionaires": 47159, + "kkkk": 47160, + "itar": 47161, + "subscriptions": 47162, + "remote": 47163, + "mafi": 47164, + "hinton": 47165, + "wcc": 47166, + "hok": 47167, + "dsb": 47168, + "ableton": 47169, + "seventy": 47170, + "punks": 47171, + "eindhoven": 47172, + "shone": 47173, + "mcfarlane": 47174, + "limpopo": 47175, + "emphasi": 47176, + "ü": 47177, + "sinfo": 47178, + "petre": 47179, + "mangrove": 47180, + "chino": 47181, + "bertie": 47182, + "playlists": 47183, + "pushawards": 47184, + "paf": 47185, + "debbie": 47186, + "cdo": 47187, + "rino": 47188, + "ðŁı¾âĢįâĻĤï¸ı": 47189, + "folke": 47190, + "bonnar": 47191, + "thine": 47192, + "slan": 47193, + "halter": 47194, + "evie": 47195, + "awsome": 47196, + "vultures": 47197, + "sparky": 47198, + "seizures": 47199, + "âľĶ": 47200, + "ramone": 47201, + "ineffe": 47202, + "aln": 47203, + "proctor": 47204, + "astra": 47205, + "thevoice": 47206, + "grote": 47207, + "scion": 47208, + "deadline": 47209, + "amaya": 47210, + "tainted": 47211, + "patterned": 47212, + "exceeding": 47213, + "crossfit": 47214, + "kaylee": 47215, + "dropbox": 47216, + "rushes": 47217, + "tackled": 47218, + "moby": 47219, + "retrogamer": 47220, + "ncbd": 47221, + "benefitting": 47222, + "shaykh": 47223, + "guildhall": 47224, + "gentry": 47225, + "dreamcast": 47226, + "dreaded": 47227, + "bundled": 47228, + "thaw": 47229, + "revolving": 47230, + "npt": 47231, + "kyliejenner": 47232, + "imaginative": 47233, + "roni": 47234, + "overcame": 47235, + "familytime": 47236, + "dsburg": 47237, + "carnaval": 47238, + "relationship": 47239, + "recognizable": 47240, + "coroner": 47241, + "hole": 47242, + "fanfic": 47243, + "emirates": 47244, + "burritos": 47245, + "analyse": 47246, + "thinner": 47247, + "nees": 47248, + "gallipoli": 47249, + "blr": 47250, + "catwoman": 47251, + "-->>": 47252, + "ault": 47253, + "adaily": 47254, + "naughty": 47255, + "ilio": 47256, + "solitaire": 47257, + "mtvbr": 47258, + "jocelyn": 47259, + "arunach": 47260, + "repent": 47261, + "southgate": 47262, + "hyacin": 47263, + "essential": 47264, + "fenton": 47265, + "andum": 47266, + "itor": 47267, + "gopal": 47268, + "slinger": 47269, + "posei": 47270, + "awil": 47271, + "wielding": 47272, + "raila": 47273, + "elias": 47274, + "asto": 47275, + "ä": 47276, + "tendency": 47277, + "strata": 47278, + "kert": 47279, + "<-": 47280, + "imacele": 47281, + "daes": 47282, + "stimulus": 47283, + "hanley": 47284, + "fitnes": 47285, + "ecstasy": 47286, + "limous": 47287, + "hailing": 47288, + "ð٤Ń": 47289, + "chiswick": 47290, + "taries": 47291, + "slav": 47292, + "puli": 47293, + "modernization": 47294, + "blackmail": 47295, + "bingham": 47296, + "hfx": 47297, + "++": 47298, + "ðŁĩ®ðŁĩ³": 47299, + "niv": 47300, + "wea": 47301, + "professor": 47302, + "koff": 47303, + "bolster": 47304, + "suave": 47305, + "sequences": 47306, + "pepperoni": 47307, + "notte": 47308, + "dren": 47309, + "ãģ¨ç¹ĭãģ": 47310, + "hsv": 47311, + "oga": 47312, + "aptly": 47313, + "zad": 47314, + "excelsi": 47315, + "rinka": 47316, + "moldova": 47317, + "minn": 47318, + "mabel": 47319, + "conferencing": 47320, + "basing": 47321, + "ofer": 47322, + "obsi": 47323, + "hamillhimself": 47324, + "careless": 47325, + "briefed": 47326, + "inherent": 47327, + "parish": 47328, + "dubnation": 47329, + "townsville": 47330, + "sarawak": 47331, + "geeky": 47332, + "doncasterisgreat": 47333, + "wasabi": 47334, + "gup": 47335, + "pheno": 47336, + "drainthe": 47337, + "carrieunderwood": 47338, + "bleeds": 47339, + "bbcworld": 47340, + "anew": 47341, + "altaf": 47342, + "dulwich": 47343, + "aniston": 47344, + "wti": 47345, + "sumatra": 47346, + "grafton": 47347, + "bln": 47348, + "mester": 47349, + "bodega": 47350, + "rego": 47351, + "esq": 47352, + "anjo": 47353, + "sumptuous": 47354, + "maisie": 47355, + "�": 47356, + "wilt": 47357, + "jakob": 47358, + "elvis": 47359, + "sepul": 47360, + "muster": 47361, + "airpollution": 47362, + "presidente": 47363, + "happymonday": 47364, + "extensively": 47365, + "flondon": 47366, + "tls": 47367, + "playing": 47368, + "peed": 47369, + "dinho": 47370, + "vardy": 47371, + "pika": 47372, + "niro": 47373, + "aucus": 47374, + "ðŁį¦": 47375, + "null": 47376, + "elondon": 47377, + "juventus": 47378, + "imagines": 47379, + "disab": 47380, + "lito": 47381, + "dura": 47382, + "workplaces": 47383, + "promote": 47384, + "mccaf": 47385, + "woodwork": 47386, + "wawx": 47387, + "ப": 47388, + "ttino": 47389, + "shari": 47390, + "semper": 47391, + "bettertogether": 47392, + "ðŁijĬðŁı»": 47393, + "zebra": 47394, + "pondering": 47395, + "enchil": 47396, + "hom": 47397, + "cosmic": 47398, + "tanz": 47399, + "mocked": 47400, + "eccc": 47401, + "athed": 47402, + "abolish": 47403, + "propeller": 47404, + "parisagreement": 47405, + "assemblies": 47406, + "industry": 47407, + "fraudulent": 47408, + "pesa": 47409, + "changmin": 47410, + "axx": 47411, + "ðŁĴµ": 47412, + "irrational": 47413, + "cusa": 47414, + "ramadhan": 47415, + "octavia": 47416, + "onelove": 47417, + "jacki": 47418, + "barak": 47419, + "taxider": 47420, + "serious": 47421, + "nathanfillion": 47422, + "mcen": 47423, + "chk": 47424, + "popart": 47425, + "gravity": 47426, + "coppola": 47427, + "readingfc": 47428, + "illusions": 47429, + "jig": 47430, + "wwx": 47431, + "resh": 47432, + "exporting": 47433, + "buzzard": 47434, + "âϤ": 47435, + "pcm": 47436, + "lanapar": 47437, + "kos": 47438, + "aromas": 47439, + "antalya": 47440, + "wwdc": 47441, + "vena": 47442, + "phila": 47443, + "ballin": 47444, + "ðŁijĦ": 47445, + "quinta": 47446, + "mao": 47447, + "fery": 47448, + "eighty": 47449, + "sentiments": 47450, + "safeguarding": 47451, + "rwa": 47452, + "puffs": 47453, + "lucille": 47454, + "decath": 47455, + "slu": 47456, + "nugent": 47457, + "deter": 47458, + "brazil": 47459, + "zeiss": 47460, + "superbowl": 47461, + "subsidy": 47462, + "altern": 47463, + "hidalgo": 47464, + "enzymes": 47465, + "ä½": 47466, + "tagne": 47467, + "hairdresser": 47468, + "adrien": 47469, + "walkout": 47470, + "opposes": 47471, + "cantina": 47472, + "bedside": 47473, + "afan": 47474, + "ðŁĶĹ": 47475, + "prophetic": 47476, + "danes": 47477, + "unsuccessful": 47478, + "supercharged": 47479, + "pkk": 47480, + "exemption": 47481, + "hartle": 47482, + "secular": 47483, + "clipping": 47484, + "brs": 47485, + "unitedway": 47486, + "cnet": 47487, + "patchy": 47488, + "hagan": 47489, + "een": 47490, + "âļľ": 47491, + "vara": 47492, + "sympathi": 47493, + "nevertrump": 47494, + "affirmation": 47495, + "omf": 47496, + "nycfc": 47497, + "maja": 47498, + "surro": 47499, + "keerth": 47500, + "upscale": 47501, + "sandalwood": 47502, + "monarchy": 47503, + "knobs": 47504, + "åĭ": 47505, + "potholes": 47506, + "hungergames": 47507, + "terraces": 47508, + "nasir": 47509, + "counsell": 47510, + "welcometo": 47511, + "waq": 47512, + "seaman": 47513, + "mita": 47514, + "stunningly": 47515, + "ontheroad": 47516, + "inability": 47517, + ")!!": 47518, + "bongo": 47519, + "antv": 47520, + "sput": 47521, + "worldenvironmentday": 47522, + "resusc": 47523, + "ytd": 47524, + "fim": 47525, + "eunhyuk": 47526, + "sachin": 47527, + "roseanne": 47528, + "clermont": 47529, + "apec": 47530, + "amina": 47531, + "vening": 47532, + "nantes": 47533, + "almost": 47534, + "sinus": 47535, + "exas": 47536, + "tyl": 47537, + "tien": 47538, + "plead": 47539, + "lancs": 47540, + "burnaby": 47541, + "rek": 47542, + "joom": 47543, + "observers": 47544, + "discography": 47545, + "clg": 47546, + "âϦ": 47547, + "snack": 47548, + "rti": 47549, + "oily": 47550, + "crystalli": 47551, + "brute": 47552, + "webdevelopment": 47553, + "toppings": 47554, + "laf": 47555, + "anis": 47556, + "adder": 47557, + "reliving": 47558, + "carlin": 47559, + "battleof": 47560, + "weg": 47561, + "syrian": 47562, + "pont": 47563, + "ndc": 47564, + "laghate": 47565, + "yuma": 47566, + "spp": 47567, + "piti": 47568, + "robbing": 47569, + "marting": 47570, + "reykja": 47571, + "rajput": 47572, + "ncds": 47573, + "kiewicz": 47574, + "âĢ¢âĢ¢": 47575, + "vampire": 47576, + "substantially": 47577, + "opioids": 47578, + "nepali": 47579, + "kline": 47580, + "aroo": 47581, + "understand": 47582, + "litt": 47583, + "uit": 47584, + "thrombo": 47585, + "saries": 47586, + "quot": 47587, + "balling": 47588, + "ttr": 47589, + "sgh": 47590, + "philipp": 47591, + "brant": 47592, + "acl": 47593, + "mello": 47594, + "whittaker": 47595, + ".;": 47596, + "defiant": 47597, + "bgc": 47598, + "replying": 47599, + "mirren": 47600, + "metamorpho": 47601, + "schwab": 47602, + "bulge": 47603, + "utilized": 47604, + "pickering": 47605, + "pardon": 47606, + "dsa": 47607, + "à¸Ī": 47608, + "dooley": 47609, + "cumulative": 47610, + "л": 47611, + "urgency": 47612, + "emir": 47613, + "+/-": 47614, + "¦Ī": 47615, + "otas": 47616, + "âı³": 47617, + "stationed": 47618, + "grapevine": 47619, + "arac": 47620, + "karanjohar": 47621, + "fancy": 47622, + "saul": 47623, + "coogs": 47624, + "lgbtq": 47625, + "اÙħ": 47626, + "javi": 47627, + "ummer": 47628, + "pll": 47629, + "denis": 47630, + "daipur": 47631, + "puffin": 47632, + "lewisham": 47633, + "fandom": 47634, + "cope": 47635, + "vesmatter": 47636, + "sve": 47637, + "helpless": 47638, + "deodor": 47639, + "ostrich": 47640, + "kazan": 47641, + "fridaythe": 47642, + "condor": 47643, + "vx": 47644, + "sophomores": 47645, + "robles": 47646, + "cutt": 47647, + "climbers": 47648, + "리": 47649, + "sleg": 47650, + "snf": 47651, + "macys": 47652, + "hydrating": 47653, + "groupe": 47654, + "poyn": 47655, + "moulin": 47656, + "hgtv": 47657, + "lmfaooo": 47658, + "sulphur": 47659, + "asdfghjkl": 47660, + "annabelle": 47661, + "humpback": 47662, + "braved": 47663, + "viswasam": 47664, + "multipurpose": 47665, + "humidi": 47666, + "escorted": 47667, + "barbican": 47668, + "fad": 47669, + "corsa": 47670, + "ðŁ¤«": 47671, + "pippa": 47672, + "hereto": 47673, + "cany": 47674, + "sergi": 47675, + "orcas": 47676, + "ovie": 47677, + "edou": 47678, + "sany": 47679, + "globalization": 47680, + "mancini": 47681, + "foodtruck": 47682, + "fis": 47683, + "defibrill": 47684, + "schre": 47685, + "smafia": 47686, + "lovewins": 47687, + "laut": 47688, + "kaka": 47689, + "hollande": 47690, + "gameon": 47691, + "resurgence": 47692, + "outside": 47693, + "olympiad": 47694, + "intan": 47695, + "abstraction": 47696, + "rapid": 47697, + "palom": 47698, + "calle": 47699, + "jasmin": 47700, + "attackers": 47701, + "swagg": 47702, + "mitra": 47703, + "kylo": 47704, + "ல": 47705, + "hermitage": 47706, + "gordo": 47707, + "eira": 47708, + "sosfam": 47709, + "rollout": 47710, + "excite": 47711, + "synod": 47712, + "merrill": 47713, + "cals": 47714, + "assa": 47715, + "livelihoods": 47716, + "juve": 47717, + "theblack": 47718, + "gopackgo": 47719, + "antlers": 47720, + "albanian": 47721, + "woolly": 47722, + "quiche": 47723, + "purification": 47724, + "areth": 47725, + "smarthome": 47726, + "nek": 47727, + "allblacks": 47728, + "mexicans": 47729, + "ism": 47730, + "germs": 47731, + "complexion": 47732, + "marck": 47733, + "ushi": 47734, + "ðŁIJIJ": 47735, + "charl": 47736, + "castic": 47737, + "tillerson": 47738, + "giuliani": 47739, + "biodegradable": 47740, + "malbec": 47741, + "bois": 47742, + "jubil": 47743, + "imes": 47744, + "rame": 47745, + "genetic": 47746, + "espnu": 47747, + "chley": 47748, + "soho": 47749, + "gopher": 47750, + "gsc": 47751, + "buuren": 47752, + "cube": 47753, + "bridesmaids": 47754, + "webinars": 47755, + "toe": 47756, + "manipur": 47757, + "violently": 47758, + "noticias": 47759, + "exchanging": 47760, + "chiev": 47761, + "replaceable": 47762, + "muaythai": 47763, + "buss": 47764, + "spil": 47765, + "instalment": 47766, + "divya": 47767, + "caitlin": 47768, + "olim": 47769, + "filtering": 47770, + "whirlwind": 47771, + "stared": 47772, + "priorit": 47773, + "pram": 47774, + "pompeii": 47775, + "monologue": 47776, + "kite": 47777, + "buka": 47778, + "â̦..": 47779, + "vaccine": 47780, + "brero": 47781, + "wozni": 47782, + "solent": 47783, + "referr": 47784, + "myrt": 47785, + "gridiron": 47786, + "galatasaray": 47787, + "froze": 47788, + "claremont": 47789, + "ðŁ¥ĥ": 47790, + "victorias": 47791, + "sseldorf": 47792, + "pastures": 47793, + "netneutrality": 47794, + "chor": 47795, + "ðŁijģ": 47796, + "ಿ": 47797, + "weho": 47798, + "symptom": 47799, + "josel": 47800, + "inous": 47801, + "dragoncon": 47802, + "powerball": 47803, + "pte": 47804, + "fourthofjuly": 47805, + "ecla": 47806, + "earbuds": 47807, + "whereabouts": 47808, + "saltlife": 47809, + "deprivation": 47810, + "chter": 47811, + "wiggle": 47812, + "system": 47813, + "psst": 47814, + "chaz": 47815, + "dany": 47816, + "rimo": 47817, + "oaxaca": 47818, + "lanaparrilla": 47819, + "barcelon": 47820, + "melancholy": 47821, + "wayback": 47822, + "hotro": 47823, + "nsi": 47824, + "lilly": 47825, + "kuro": 47826, + "jahan": 47827, + "intellect": 47828, + "boardgame": 47829, + "ðŁıĬ": 47830, + "sneakpeek": 47831, + "kprc": 47832, + "jails": 47833, + "candel": 47834, + "zanzi": 47835, + "mortimer": 47836, + "starch": 47837, + "rags": 47838, + "pfa": 47839, + "longlive": 47840, + "kart": 47841, + "girona": 47842, + "crocker": 47843, + "christoph": 47844, + "precautions": 47845, + "warship": 47846, + "perm": 47847, + "parent": 47848, + "vangogh": 47849, + "gifford": 47850, + "allegheny": 47851, + "rayn": 47852, + "utm": 47853, + "stencil": 47854, + "recalling": 47855, + "penney": 47856, + "zazzle": 47857, + "ìĥĿ": 47858, + "hinds": 47859, + "arenas": 47860, + "nuev": 47861, + "lawler": 47862, + "guin": 47863, + "dothis": 47864, + "ðŁijķ": 47865, + "ì¶ķíķĺ": 47866, + "weg": 47867, + "tib": 47868, + "ridin": 47869, + "complexes": 47870, + "turbulent": 47871, + "pesos": 47872, + "demarcus": 47873, + "vallarta": 47874, + "samsun": 47875, + "kisses": 47876, + "heinrich": 47877, + "deportes": 47878, + "wilms": 47879, + "urd": 47880, + "thenext": 47881, + "inkigayo": 47882, + "howi": 47883, + "firsts": 47884, + "carriage": 47885, + "cleanliness": 47886, + "maswar": 47887, + "isch": 47888, + "axel": 47889, + "sizzle": 47890, + "roadhouse": 47891, + "frans": 47892, + "entourage": 47893, + "cobble": 47894, + "booth": 47895, + "benedict": 47896, + "talon": 47897, + "fcu": 47898, + "yearofthe": 47899, + "rayon": 47900, + "raidernation": 47901, + "foyle": 47902, + "koval": 47903, + "pianos": 47904, + "lpg": 47905, + "burmese": 47906, + "manure": 47907, + "geocaching": 47908, + "coscino": 47909, + "bnp": 47910, + "ferra": 47911, + "strophy": 47912, + "marais": 47913, + "cees": 47914, + "legendof": 47915, + "katniss": 47916, + "enoch": 47917, + "aved": 47918, + "youknow": 47919, + "dprk": 47920, + "ðŁĺ¢ðŁĺ¢": 47921, + "spun": 47922, + "prost": 47923, + "sorrows": 47924, + "centred": 47925, + "kea": 47926, + "galicia": 47927, + "?ð٤Ķ": 47928, + "ÑĢода": 47929, + "bouchard": 47930, + "ðŁĴĻðŁĴľ": 47931, + "yui": 47932, + "seedlings": 47933, + "jonah": 47934, + "recovers": 47935, + "nyrd": 47936, + "boardroom": 47937, + "suma": 47938, + "myjaps": 47939, + "tung": 47940, + "shai": 47941, + "irgc": 47942, + "elio": 47943, + "wagons": 47944, + "kashi": 47945, + "policemen": 47946, + "johnnie": 47947, + "alecoscino": 47948, + "shopify": 47949, + "dotted": 47950, + "detri": 47951, + "vaw": 47952, + "tofficial": 47953, + "inyour": 47954, + "chalmers": 47955, + "traced": 47956, + "novi": 47957, + "byes": 47958, + "ariel": 47959, + "nippon": 47960, + "lapel": 47961, + "griez": 47962, + "bgs": 47963, + "fooling": 47964, + "dita": 47965, + "vijaysethu": 47966, + "nmwx": 47967, + "asot": 47968, + "kranti": 47969, + "helm": 47970, + "vedi": 47971, + "sickest": 47972, + "mochi": 47973, + "kabo": 47974, + "shrubs": 47975, + "hered": 47976, + "bsp": 47977, + "sqm": 47978, + "hamr": 47979, + "dulkar": 47980, + "antha": 47981, + "nrf": 47982, + "avoidance": 47983, + "aten": 47984, + "publix": 47985, + "bearers": 47986, + "nasi": 47987, + "hap": 47988, + "hells": 47989, + "ðŁĸ¥": 47990, + "ื": 47991, + "thelastjedi": 47992, + "ohwx": 47993, + "ðŁį«": 47994, + "wahoo": 47995, + "therese": 47996, + "recaps": 47997, + "ssnhq": 47998, + "birdphotography": 47999, + "vay": 48000, + "petti": 48001, + "paulo": 48002, + "belvedere": 48003, + "(*": 48004, + "grl": 48005, + "duvet": 48006, + "cpec": 48007, + "sait": 48008, + "porsch": 48009, + "measurable": 48010, + "aviators": 48011, + "fremantle": 48012, + "breen": 48013, + "onom": 48014, + "meand": 48015, + "lifesaving": 48016, + "euref": 48017, + "endon": 48018, + "embaras": 48019, + "airasia": 48020, + "elis": 48021, + "dunkin": 48022, + "starmagic": 48023, + "sill": 48024, + "portobello": 48025, + "kiefer": 48026, + "exe": 48027, + "muted": 48028, + "ãģ¦": 48029, + "wethepeople": 48030, + "logia": 48031, + "liberal": 48032, + "theforceawakens": 48033, + "mined": 48034, + "haunts": 48035, + "freckles": 48036, + "caretaker": 48037, + "sindia": 48038, + "âķIJ": 48039, + "devlin": 48040, + "liston": 48041, + "directioner": 48042, + "ohn": 48043, + "figaro": 48044, + "emmanuel": 48045, + "dubois": 48046, + "clones": 48047, + "bruise": 48048, + "ðŁİĪðŁİī": 48049, + "disinfe": 48050, + "dermatology": 48051, + "asr": 48052, + "swatch": 48053, + "discomfort": 48054, + "tamanna": 48055, + "piday": 48056, + "macken": 48057, + "katic": 48058, + "delusional": 48059, + "shawnee": 48060, + "gud": 48061, + "albino": 48062, + "pali": 48063, + "dingh": 48064, + "cucumbers": 48065, + "coffey": 48066, + "anticipating": 48067, + "treasured": 48068, + "websummit": 48069, + "sheltered": 48070, + "savor": 48071, + "pedagogy": 48072, + "mgs": 48073, + "shma": 48074, + "sbu": 48075, + "denali": 48076, + "campos": 48077, + "bubblegum": 48078, + "oir": 48079, + "leaps": 48080, + "yler": 48081, + "rone": 48082, + "sanskrit": 48083, + "mint": 48084, + "meatless": 48085, + "futurist": 48086, + "dude": 48087, + "avel": 48088, + "protested": 48089, + "squire": 48090, + "zaki": 48091, + "szn": 48092, + "harcourt": 48093, + "cyclone": 48094, + "bourdain": 48095, + "gatherings": 48096, + "dant": 48097, + "adventurer": 48098, + "paragon": 48099, + "altman": 48100, + "dding": 48101, + "banerjee": 48102, + "snorkeling": 48103, + "motherwell": 48104, + "missy": 48105, + "ender": 48106, + "glows": 48107, + "kiwis": 48108, + "chickpea": 48109, + "poro": 48110, + "efron": 48111, + "appt": 48112, + "uy": 48113, + "specified": 48114, + "gabby": 48115, + "estrada": 48116, + "combos": 48117, + "bourbon": 48118, + "vini": 48119, + "varun": 48120, + "stephani": 48121, + "keywords": 48122, + "carvings": 48123, + "amitabh": 48124, + "wrought": 48125, + "twal": 48126, + "reels": 48127, + "clubbing": 48128, + "ubiquit": 48129, + "crit": 48130, + "ambedkar": 48131, + "æĻ": 48132, + "pruning": 48133, + "vaccinated": 48134, + "boeing": 48135, + "sks": 48136, + "loona": 48137, + "hypnosis": 48138, + "edelman": 48139, + "phol": 48140, + "hew": 48141, + "colosse": 48142, + "mckinsey": 48143, + "uon": 48144, + "tote": 48145, + "sacrificing": 48146, + "oxi": 48147, + "nang": 48148, + "emu": 48149, + "пÑĢиÑĢода": 48150, + "mth": 48151, + "kerswednesday": 48152, + "argued": 48153, + "timelapse": 48154, + "risking": 48155, + "regulating": 48156, + "nigh": 48157, + "likelihood": 48158, + "cubic": 48159, + "auction": 48160, + "reinfor": 48161, + "pistor": 48162, + "noses": 48163, + "yel": 48164, + "snuggles": 48165, + "pei": 48166, + "jeanette": 48167, + "taku": 48168, + "rith": 48169, + "guyz": 48170, + "à¸ŀ": 48171, + "yte": 48172, + "verted": 48173, + "paysoff": 48174, + "jauregui": 48175, + "hooligans": 48176, + "procedural": 48177, + "mib": 48178, + "hardy": 48179, + "eleng": 48180, + "checkers": 48181, + "alline": 48182, + "themet": 48183, + "proudof": 48184, + "keerthyofficial": 48185, + "collaborator": 48186, + "niu": 48187, + "inflicted": 48188, + "advani": 48189, + "retwee": 48190, + "memoriam": 48191, + "ficial": 48192, + "tighter": 48193, + "salem": 48194, + "reviewers": 48195, + "brics": 48196, + "bendigo": 48197, + "amell": 48198, + "turkish": 48199, + "sushmaswar": 48200, + "paulson": 48201, + "palawan": 48202, + "mollie": 48203, + "stitcher": 48204, + "sburgh": 48205, + "iru": 48206, + "haydn": 48207, + "eners": 48208, + "aroa": 48209, + "uzzi": 48210, + "sarajevo": 48211, + "hela": 48212, + "apollo": 48213, + "ninety": 48214, + "vaca": 48215, + "spon": 48216, + "ventu": 48217, + "jelena": 48218, + "heifer": 48219, + "avoids": 48220, + "spine": 48221, + "prize": 48222, + "marist": 48223, + "recreating": 48224, + "mede": 48225, + "wooden": 48226, + "findlay": 48227, + "rofl": 48228, + "ndi": 48229, + "comprehend": 48230, + "yugo": 48231, + "yü": 48232, + "towork": 48233, + "ufos": 48234, + "sonar": 48235, + "piston": 48236, + "recording": 48237, + "tentative": 48238, + "artforsale": 48239, + "pellets": 48240, + "fredo": 48241, + "ÙĪØ±": 48242, + "muses": 48243, + "customization": 48244, + "profound": 48245, + "isner": 48246, + "ideally": 48247, + "siam": 48248, + "plankton": 48249, + "cmdr": 48250, + "manger": 48251, + "franken": 48252, + "customizable": 48253, + "म": 48254, + "walkaway": 48255, + "swivel": 48256, + "vastly": 48257, + "noton": 48258, + "lexa": 48259, + "exmoor": 48260, + "zas": 48261, + "tante": 48262, + "reductions": 48263, + "lolly": 48264, + "hipsters": 48265, + "benefited": 48266, + "ë²": 48267, + "wwwww": 48268, + "masculine": 48269, + "fiji": 48270, + "drey": 48271, + "phill": 48272, + "aneous": 48273, + "nicol": 48274, + "mendez": 48275, + "disappro": 48276, + "chner": 48277, + "throughs": 48278, + "shenmue": 48279, + "eastman": 48280, + "ðŁIJİ": 48281, + "yuck": 48282, + "undertale": 48283, + "reys": 48284, + "gobeavs": 48285, + "engen": 48286, + "cna": 48287, + "merr": 48288, + "birk": 48289, + "ãģ¨ç¹ĭãģĮãĤĬãģŁãģĦ": 48290, + "âĥ£@": 48291, + "ynna": 48292, + "steed": 48293, + "offender": 48294, + "atum": 48295, + "vanishing": 48296, + "presidenti": 48297, + "lovethem": 48298, + "gnocchi": 48299, + "friggin": 48300, + "peril": 48301, + "madhya": 48302, + "agne": 48303, + "deejay": 48304, + "marnock": 48305, + "mtb": 48306, + "foldable": 48307, + "@___": 48308, + "standre": 48309, + "bronx": 48310, + "bowski": 48311, + "finite": 48312, + "crockett": 48313, + "bsf": 48314, + "getit": 48315, + "serenawilliams": 48316, + "miro": 48317, + "ignatius": 48318, + "slay": 48319, + "rinse": 48320, + "fondue": 48321, + "seldom": 48322, + "smore": 48323, + "gani": 48324, + "dyce": 48325, + "dmitry": 48326, + "crumb": 48327, + "latepost": 48328, + "primark": 48329, + "ohana": 48330, + "florals": 48331, + "doa": 48332, + "remembranceday": 48333, + "dds": 48334, + "azione": 48335, + "toonami": 48336, + "airport": 48337, + "æĿ±": 48338, + "thad": 48339, + "fist": 48340, + "dinesh": 48341, + "drwho": 48342, + "adwords": 48343, + "admirer": 48344, + "proje": 48345, + "kyrgyz": 48346, + "à«": 48347, + "manifestation": 48348, + "lewan": 48349, + "jic": 48350, + "thibau": 48351, + "leased": 48352, + "vanity": 48353, + "nourished": 48354, + "nevertheless": 48355, + "augmente": 48356, + "fuelled": 48357, + "chead": 48358, + "wilshere": 48359, + "rudi": 48360, + "pz": 48361, + "myco": 48362, + "morro": 48363, + "herbalife": 48364, + "hardrock": 48365, + "deman": 48366, + "dreality": 48367, + "spades": 48368, + "cevic": 48369, + "bhai": 48370, + "baron": 48371, + "ultimatefan": 48372, + "hounews": 48373, + "tobi": 48374, + "strut": 48375, + "keel": 48376, + "affiliation": 48377, + "themasters": 48378, + "smal": 48379, + "hue": 48380, + "esteban": 48381, + "conv": 48382, + "omnic": 48383, + "databases": 48384, + "cov": 48385, + "terti": 48386, + "stg": 48387, + "snoopdogg": 48388, + "metabol": 48389, + "lethbridge": 48390, + "ðŁı»âĢįâĻĢï¸ı": 48391, + "yearling": 48392, + "residentevil": 48393, + "nwsl": 48394, + "iyaki": 48395, + "griezmann": 48396, + "cous": 48397, + "ðŁĵĿ:": 48398, + "torian": 48399, + "sami": 48400, + "ðŁĶ¥ðŁĶ¥ðŁĶ¥ðŁĶ¥ðŁĶ¥": 48401, + "gare": 48402, + "alliances": 48403, + "whitfield": 48404, + "wether": 48405, + "refining": 48406, + "coyi": 48407, + "kraken": 48408, + "ðŁĺĺâĿ¤": 48409, + "singularity": 48410, + "lili": 48411, + "hns": 48412, + "boldand": 48413, + "wawrinka": 48414, + "misogyny": 48415, + "lovers": 48416, + "cq": 48417, + "bdg": 48418, + "adona": 48419, + "garter": 48420, + "womenof": 48421, + "scd": 48422, + "recognising": 48423, + "muna": 48424, + "strou": 48425, + "signalling": 48426, + "laredo": 48427, + "hellboy": 48428, + "aleksand": 48429, + "unavailable": 48430, + "pediatric": 48431, + "asin": 48432, + "meria": 48433, + "rishi": 48434, + "futurism": 48435, + "wye": 48436, + "polarized": 48437, + "ewe": 48438, + "propel": 48439, + "informs": 48440, + "crease": 48441, + "~\"": 48442, + "artiston": 48443, + "likefor": 48444, + "heidelberg": 48445, + "erra": 48446, + "lifein": 48447, + "lenny": 48448, + "interrupt": 48449, + "coherent": 48450, + "caz": 48451, + "vickers": 48452, + "leveled": 48453, + "fbs": 48454, + "cabins": 48455, + "bummed": 48456, + "apostles": 48457, + "weh": 48458, + "tendon": 48459, + "souvenirs": 48460, + "infuri": 48461, + "pierce": 48462, + "asset": 48463, + "mlas": 48464, + "goth": 48465, + "diggin": 48466, + "annas": 48467, + "ylor": 48468, + "thwaite": 48469, + "swel": 48470, + "panera": 48471, + "murderers": 48472, + "crooked": 48473, + "bsgo": 48474, + "acu": 48475, + "aon": 48476, + "rean": 48477, + "oneof": 48478, + "kohl": 48479, + "bloodh": 48480, + "pesticide": 48481, + "lostdog": 48482, + "flexing": 48483, + "ëĤĺ": 48484, + "supra": 48485, + "eternally": 48486, + "ðŁļĻ": 48487, + "paolo": 48488, + "olan": 48489, + "momo": 48490, + "iselle": 48491, + "captainmarvel": 48492, + "slou": 48493, + "mistakenly": 48494, + "akhilesh": 48495, + "mert": 48496, + "ilinan": 48497, + "buon": 48498, + "balkan": 48499, + "mirro": 48500, + "millen": 48501, + "derail": 48502, + "damon": 48503, + "titi": 48504, + "bios": 48505, + "redon": 48506, + "picard": 48507, + "parte": 48508, + "ðŁ¤Ł": 48509, + "غ": 48510, + "sonics": 48511, + "firsth": 48512, + "ddc": 48513, + "vegans": 48514, + "turban": 48515, + "nigan": 48516, + "lottie": 48517, + "lyndon": 48518, + "starbuck": 48519, + "pinkfloyd": 48520, + "lifestyles": 48521, + "amara": 48522, + "ashe": 48523, + "rsc": 48524, + "vala": 48525, + "smer": 48526, + "cwgc": 48527, + "client": 48528, + "buenas": 48529, + "jagan": 48530, + "coops": 48531, + "ðŁijijðŁijij": 48532, + "specializes": 48533, + "snagged": 48534, + "glar": 48535, + "bennet": 48536, + "wildlifewednesday": 48537, + "bowden": 48538, + "pik": 48539, + "artin": 48540, + "emporium": 48541, + "arl": 48542, + "reba": 48543, + "passer": 48544, + "disappoints": 48545, + "additive": 48546, + "âľĬðŁı½": 48547, + "bayer": 48548, + "missoula": 48549, + "haskell": 48550, + "commences": 48551, + "nix": 48552, + "neman": 48553, + "exploited": 48554, + "plasticsurgery": 48555, + "ccd": 48556, + "asocial": 48557, + "vot": 48558, + "siegel": 48559, + "froome": 48560, + "kapam": 48561, + "fara": 48562, + "eha": 48563, + "probes": 48564, + "mwf": 48565, + "meeting": 48566, + "pbb": 48567, + "akins": 48568, + "mistletoe": 48569, + "kingdomhearts": 48570, + "forkids": 48571, + "ecr": 48572, + "bale": 48573, + "escorts": 48574, + "adidasoriginals": 48575, + "kwa": 48576, + "kts": 48577, + "halloffame": 48578, + "ðŁĺį.": 48579, + "wags": 48580, + "potted": 48581, + "owing": 48582, + "honeycomb": 48583, + "hefty": 48584, + "urology": 48585, + "merle": 48586, + "bpd": 48587, + "stripping": 48588, + "reich": 48589, + "kstate": 48590, + "guay": 48591, + "yonge": 48592, + "shakti": 48593, + "gloom": 48594, + "batt": 48595, + "sonom": 48596, + "nery": 48597, + "elba": 48598, + "blanks": 48599, + "helle": 48600, + "triplets": 48601, + "bombay": 48602, + "akarta": 48603, + "abia": 48604, + "transmitted": 48605, + "rolf": 48606, + "jais": 48607, + "angularjs": 48608, + "fierc": 48609, + "mss": 48610, + "trace": 48611, + "à¥ĩ": 48612, + "tombs": 48613, + "oldman": 48614, + "kombucha": 48615, + "fol": 48616, + "ehealth": 48617, + "cereals": 48618, + "arelli": 48619, + "inari": 48620, + "ðŁĴ©": 48621, + "wol": 48622, + "liberties": 48623, + "fawn": 48624, + "affirm": 48625, + "nunavut": 48626, + "hysterical": 48627, + "kdrama": 48628, + "artes": 48629, + "âĢ¢âĢ¢âĢ¢âĢ¢âĢ¢âĢ¢âĢ¢âĢ¢": 48630, + "valentin": 48631, + "manslaughter": 48632, + "gales": 48633, + "eoin": 48634, + "energized": 48635, + "dels": 48636, + "withdraws": 48637, + "stles": 48638, + "sarcastic": 48639, + "ramesh": 48640, + "incredibles": 48641, + "lockhart": 48642, + "yawn": 48643, + "ultimatefanlive": 48644, + "oooooooooooooooo": 48645, + "muen": 48646, + "gurudev": 48647, + "teer": 48648, + "peeling": 48649, + "newsnow": 48650, + "linguistics": 48651, + "directv": 48652, + "agend": 48653, + "unilever": 48654, + "ruger": 48655, + "handedly": 48656, + "erose": 48657, + "limel": 48658, + "thec": 48659, + "royalties": 48660, + "finishers": 48661, + "nrg": 48662, + "mgt": 48663, + "fidget": 48664, + "comps": 48665, + "bacon": 48666, + "aggressively": 48667, + "abit": 48668, + "châ": 48669, + "tarde": 48670, + "slugger": 48671, + "qanda": 48672, + "greening": 48673, + "dats": 48674, + "enslaved": 48675, + "spector": 48676, + "oye": 48677, + "freef": 48678, + "bhand": 48679, + "stopbrexit": 48680, + "misconceptions": 48681, + "cava": 48682, + "ðŁĺįðŁĺįðŁĺįðŁĺįðŁĺįðŁĺįðŁĺįðŁĺį": 48683, + "multitasking": 48684, + "housel": 48685, + "ferreira": 48686, + "centime": 48687, + "ankles": 48688, + "jodh": 48689, + "helly": 48690, + "frome": 48691, + "outtuesday": 48692, + "narnia": 48693, + "balaji": 48694, + "lbloggers": 48695, + "jyoti": 48696, + "ðŁįĩ": 48697, + "lancia": 48698, + "capri": 48699, + "yap": 48700, + "natash": 48701, + "downfall": 48702, + ".\"âĢĶ": 48703, + "î": 48704, + "ligament": 48705, + "coatings": 48706, + "aided": 48707, + "hiko": 48708, + "falling": 48709, + "encrypted": 48710, + "yegfood": 48711, + "infringement": 48712, + "cudi": 48713, + "cep": 48714, + "ðŁĺįðŁĺĤ": 48715, + "trad": 48716, + "superrugby": 48717, + "edwin": 48718, + "whiche": 48719, + "vimeo": 48720, + "layne": 48721, + "invigor": 48722, + "hehe": 48723, + "dubrovnik": 48724, + "bieber": 48725, + "utr": 48726, + "shaman": 48727, + "opers": 48728, + "hamill": 48729, + "enig": 48730, + "dif": 48731, + "arum": 48732, + "scrapbook": 48733, + "minh": 48734, + "divergence": 48735, + "mckinnon": 48736, + "lifetime": 48737, + "guterres": 48738, + "wille": 48739, + "pleas": 48740, + "patty": 48741, + "micron": 48742, + "kz": 48743, + "domaine": 48744, + "rusher": 48745, + "mds": 48746, + "chesney": 48747, + "screwdriver": 48748, + "âģ©,": 48749, + "sledge": 48750, + "hauer": 48751, + "chana": 48752, + "stamina": 48753, + "sprinkler": 48754, + "pln": 48755, + "heff": 48756, + "bolton": 48757, + "omon": 48758, + "carrington": 48759, + "accordion": 48760, + "jorge": 48761, + "interception": 48762, + "inputs": 48763, + "gull": 48764, + "transcription": 48765, + "vanuatu": 48766, + "itical": 48767, + "ethos": 48768, + "tich": 48769, + "spacey": 48770, + "peeking": 48771, + "umi": 48772, + "hager": 48773, + "psychotic": 48774, + "illian": 48775, + "illia": 48776, + "bonnaroo": 48777, + "anese": 48778, + "puc": 48779, + "laghateparth": 48780, + "enhall": 48781, + "economical": 48782, + "dredge": 48783, + "%-": 48784, + "uwe": 48785, + "tubular": 48786, + "scouncil": 48787, + "peasants": 48788, + "fler": 48789, + "tumbler": 48790, + "hep": 48791, + "fordham": 48792, + "rowley": 48793, + "initials": 48794, + "evasion": 48795, + "ernation": 48796, + "plugins": 48797, + "cochran": 48798, + "cattle": 48799, + "acidity": 48800, + "ðŁİĬðŁİī": 48801, + "regrann": 48802, + "jumpman": 48803, + "eface": 48804, + "xma": 48805, + "patriarchy": 48806, + "escobar": 48807, + "cristian": 48808, + "tipton": 48809, + "nueva": 48810, + "hackney": 48811, + "backseat": 48812, + "killarney": 48813, + "aidan": 48814, + "stadion": 48815, + "simultaneous": 48816, + "idaho": 48817, + "aje": 48818, + "uth": 48819, + "figure": 48820, + "clos": 48821, + "burk": 48822, + "voluntar": 48823, + "recite": 48824, + "macfarlane": 48825, + "curfew": 48826, + "boudo": 48827, + "wgn": 48828, + "stix": 48829, + "slap": 48830, + "scratched": 48831, + "phillip": 48832, + "journe": 48833, + "expelled": 48834, + "waz": 48835, + "uke": 48836, + "tatiana": 48837, + "oue": 48838, + "hopp": 48839, + "dimitri": 48840, + "ðŁĵ£": 48841, + "matologist": 48842, + "electrifying": 48843, + "bluffs": 48844, + "billsmafia": 48845, + "azcardinals": 48846, + "yaa": 48847, + "xmas": 48848, + "shara": 48849, + "rith": 48850, + "gills": 48851, + "dres": 48852, + "barton": 48853, + "authorization": 48854, + "imperialism": 48855, + "homeof": 48856, + "todo": 48857, + "footpath": 48858, + "bandwidth": 48859, + "visitspain": 48860, + "mohsin": 48861, + "erupted": 48862, + "miki": 48863, + "insignia": 48864, + "mikel": 48865, + "ssh": 48866, + "gera": 48867, + "bankholiday": 48868, + "awan": 48869, + "tweak": 48870, + "starcraft": 48871, + "eal": 48872, + "construction": 48873, + "skeletons": 48874, + "leep": 48875, + "inem": 48876, + "barclay": 48877, + "shipwreck": 48878, + "monsieur": 48879, + "yoh": 48880, + "ront": 48881, + "formative": 48882, + "sero": 48883, + "lep": 48884, + "horseman": 48885, + "hoosier": 48886, + "hazmat": 48887, + "cylinders": 48888, + "centi": 48889, + "ðŁĴ¥ðŁĴ¥ðŁĴ¥": 48890, + "reem": 48891, + "naire": 48892, + "musically": 48893, + "grasshopper": 48894, + "estonian": 48895, + "terminology": 48896, + "romain": 48897, + "bloggerrt": 48898, + "toxin": 48899, + "stance": 48900, + "cultivated": 48901, + "anast": 48902, + "ðŁIJį": 48903, + "shimano": 48904, + "gopher": 48905, + "enei": 48906, + "recyclable": 48907, + "gamification": 48908, + "fightfor": 48909, + "cq": 48910, + "avocados": 48911, + "keys": 48912, + "elike": 48913, + "glycer": 48914, + "shakur": 48915, + "mobilization": 48916, + "galley": 48917, + "explain": 48918, + "exchanged": 48919, + "peth": 48920, + "obedience": 48921, + "illage": 48922, + "ennis": 48923, + "ãĥŀ": 48924, + "wiv": 48925, + "wallabies": 48926, + "maar": 48927, + "igers": 48928, + "fintech": 48929, + "finalized": 48930, + "woj": 48931, + "meaningless": 48932, + "infield": 48933, + "onnaise": 48934, + "eet": 48935, + "bronte": 48936, + "passages": 48937, + "ðŁij§": 48938, + "strickland": 48939, + "northernlights": 48940, + "lomond": 48941, + "htc": 48942, + "wray": 48943, + "shifter": 48944, + "dialog": 48945, + "ðŁįį": 48946, + ">>>>>>": 48947, + "teatime": 48948, + "stech": 48949, + "sichuan": 48950, + "quill": 48951, + "franca": 48952, + "complementary": 48953, + "barrington": 48954, + "marcus": 48955, + "malam": 48956, + "goooo": 48957, + "forsa": 48958, + "electra": 48959, + "afs": 48960, + "âĹĨ": 48961, + "trife": 48962, + "snazzy": 48963, + "folia": 48964, + "andolan": 48965, + "afterdark": 48966, + "woodson": 48967, + "strade": 48968, + "littlest": 48969, + "ogun": 48970, + "conwy": 48971, + "cowards": 48972, + "ðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤ": 48973, + "íĬ¸": 48974, + "seul": 48975, + "murphy": 48976, + "dunks": 48977, + "kapilshar": 48978, + "joachim": 48979, + "womack": 48980, + "equality": 48981, + "averages": 48982, + "aine": 48983, + "ð٦Ī": 48984, + "tacular": 48985, + "disability": 48986, + "uked": 48987, + "midcentury": 48988, + "barthol": 48989, + "teasers": 48990, + "tabern": 48991, + "njcaa": 48992, + "spout": 48993, + "opi": 48994, + "kubball": 48995, + "blom": 48996, + "soar": 48997, + "populism": 48998, + "methyl": 48999, + "ðŁijĬðŁı¼": 49000, + "ospre": 49001, + "aloils": 49002, + "ðŁĵĸ": 49003, + "ðŁĮļ": 49004, + "xer": 49005, + "spilling": 49006, + "publica": 49007, + "cardam": 49008, + "adish": 49009, + "sacha": 49010, + "pkg": 49011, + "buda": 49012, + "lyricist": 49013, + "ibc": 49014, + "grump": 49015, + "hover": 49016, + "halep": 49017, + "antibody": 49018, + "anemone": 49019, + "âĻ¥âĻ¥âĻ¥âĻ¥": 49020, + "mcl": 49021, + "lithograph": 49022, + "ccu": 49023, + "sfest": 49024, + "pathic": 49025, + "callister": 49026, + "ottawa": 49027, + "gunsn": 49028, + "rutger": 49029, + "halibut": 49030, + "envision": 49031, + "differentiate": 49032, + "ðŁļĢðŁļĢ": 49033, + "piran": 49034, + "latel": 49035, + "ucn": 49036, + "troubad": 49037, + "raine": 49038, + "fiercely": 49039, + "learnenglish": 49040, + "lease": 49041, + "wexmondays": 49042, + "emit": 49043, + "drayton": 49044, + "burrell": 49045, + "scubadiving": 49046, + "holler": 49047, + "dru": 49048, + "clocked": 49049, + "wral": 49050, + "apro": 49051, + "translucent": 49052, + "wbo": 49053, + "patriarch": 49054, + "moja": 49055, + "lannister": 49056, + "fishery": 49057, + "nederland": 49058, + "mildly": 49059, + "mirai": 49060, + "mako": 49061, + "jap": 49062, + "ðŁĺ©ðŁĺ©ðŁĺ©": 49063, + "prostatec": 49064, + "panna": 49065, + "arama": 49066, + "undertaking": 49067, + "tompkins": 49068, + "neop": 49069, + "solids": 49070, + "savoury": 49071, + "eames": 49072, + "cutlery": 49073, + "woodbridge": 49074, + "steamer": 49075, + "rizzo": 49076, + "wildcat": 49077, + "ratna": 49078, + "laminated": 49079, + "kineni": 49080, + "jalap": 49081, + "aides": 49082, + "acknowledges": 49083, + "?!?!?!": 49084, + "!ðŁİī": 49085, + "wafc": 49086, + "maggio": 49087, + "haves": 49088, + "darje": 49089, + "ofi": 49090, + "gril": 49091, + "vasi": 49092, + "brux": 49093, + "mohd": 49094, + "fakespeare": 49095, + "arnold": 49096, + "rmb": 49097, + "forbe": 49098, + "walleye": 49099, + "rodi": 49100, + "therapeutics": 49101, + "strategi": 49102, + "obste": 49103, + "mudder": 49104, + "downloadable": 49105, + "ddings": 49106, + "dca": 49107, + "asiangames": 49108, + "campeon": 49109, + "appropriation": 49110, + "thcentury": 49111, + "ramatta": 49112, + "draped": 49113, + "bullion": 49114, + "muc": 49115, + "onex": 49116, + "segreg": 49117, + "ophelia": 49118, + "bodily": 49119, + "âĿ¤ðŁĺį": 49120, + "wizar": 49121, + "teased": 49122, + "ademy": 49123, + "toid": 49124, + "sura": 49125, + "lazarus": 49126, + "snickers": 49127, + "mase": 49128, + "loh": 49129, + "bowed": 49130, + "biblio": 49131, + "xchange": 49132, + "harlan": 49133, + "ghoshal": 49134, + "flavorful": 49135, + "bhagat": 49136, + "allez": 49137, + "whichever": 49138, + "tenstein": 49139, + "discer": 49140, + "organiser": 49141, + "mtg": 49142, + "dreamliner": 49143, + "tse": 49144, + "hokkaido": 49145, + "mok": 49146, + "indulgent": 49147, + "hickman": 49148, + "blinded": 49149, + "alyn": 49150, + "aaaah": 49151, + "spool": 49152, + "loughborough": 49153, + "interpret": 49154, + "etv": 49155, + "aristotle": 49156, + "optimizing": 49157, + "avicii": 49158, + "madurai": 49159, + "juli": 49160, + "nawaz": 49161, + "matchups": 49162, + "abide": 49163, + "painting": 49164, + "welling": 49165, + "veli": 49166, + "octagon": 49167, + "inscribed": 49168, + "poking": 49169, + "placer": 49170, + "lifecycle": 49171, + "kilig": 49172, + "gsp": 49173, + "elives": 49174, + "clements": 49175, + "nasheed": 49176, + "mesut": 49177, + "incarcerated": 49178, + "distilled": 49179, + "walang": 49180, + "delicacy": 49181, + "delgado": 49182, + "chez": 49183, + "chita": 49184, + "adero": 49185, + "tux": 49186, + "patil": 49187, + "odo": 49188, + "abhcosmetics": 49189, + "tvc": 49190, + "pbc": 49191, + "inaccurate": 49192, + "hardworkpaysoff": 49193, + "baller": 49194, + "quotation": 49195, + "merchandising": 49196, + "gastri": 49197, + "defenses": 49198, + "drogba": 49199, + "bexhill": 49200, + "bankno": 49201, + "winona": 49202, + "sieg": 49203, + "pgs": 49204, + "hahahha": 49205, + "aguchi": 49206, + "subram": 49207, + "miracle": 49208, + "desch": 49209, + "libre": 49210, + "bacher": 49211, + "entine": 49212, + "bbcradi": 49213, + "loudest": 49214, + "rps": 49215, + "pierc": 49216, + "fryer": 49217, + "stormtrooper": 49218, + "rafaelnadal": 49219, + "pasco": 49220, + "exhaustion": 49221, + "epiconetsy": 49222, + "rctid": 49223, + "kellie": 49224, + "gaines": 49225, + "dbz": 49226, + "smriti": 49227, + "sbridge": 49228, + "limited": 49229, + "claw": 49230, + "technical": 49231, + "biographical": 49232, + "adored": 49233, + "ะ": 49234, + "exclude": 49235, + "acadia": 49236, + "keyboards": 49237, + "furman": 49238, + "soca": 49239, + "suru": 49240, + "nips": 49241, + "swaps": 49242, + "serverless": 49243, + "rune": 49244, + "puffy": 49245, + "northampton": 49246, + "nishings": 49247, + "hender": 49248, + "cartridges": 49249, + "gunshot": 49250, + "ðŁĵ¹": 49251, + "filament": 49252, + "respondents": 49253, + "peyton": 49254, + "mountaineer": 49255, + "merging": 49256, + "lifespan": 49257, + "intimidation": 49258, + "pafc": 49259, + "nlwx": 49260, + "expansive": 49261, + "purr": 49262, + "fck": 49263, + "cae": 49264, + "atti": 49265, + "telethon": 49266, + "sohn": 49267, + "mendel": 49268, + "lopes": 49269, + "dori": 49270, + "unbroken": 49271, + "tered": 49272, + "tastings": 49273, + "inactive": 49274, + "disintegr": 49275, + "tassel": 49276, + "sharethe": 49277, + "piano": 49278, + "islay": 49279, + "airspace": 49280, + "zawa": 49281, + "ricciardo": 49282, + "mington": 49283, + "fresher": 49284, + "curry": 49285, + "revs": 49286, + "pharoah": 49287, + "hmv": 49288, + "exhilarating": 49289, + "whoo": 49290, + "linkin": 49291, + "krispy": 49292, + "competency": 49293, + "stewards": 49294, + "nebu": 49295, + "katsu": 49296, + "admins": 49297, + "bazar": 49298, + "asar": 49299, + "givingback": 49300, + "ssummit": 49301, + "songz": 49302, + "linus": 49303, + "rajkumar": 49304, + "farmington": 49305, + "fantasia": 49306, + "ðŁĺ´ðŁĺ´": 49307, + "sobri": 49308, + "lisse": 49309, + "barrymore": 49310, + "prism": 49311, + "blob": 49312, + "senew": 49313, + "monoxide": 49314, + "expire": 49315, + "eighteen": 49316, + "dipper": 49317, + "xiao": 49318, + "kilt": 49319, + "hinch": 49320, + "bbcsport": 49321, + "bamboo": 49322, + "pter": 49323, + "exal": 49324, + "ð٦ĭ": 49325, + "hamlin": 49326, + "expeditions": 49327, + "stargazing": 49328, + "foodsecurity": 49329, + "wylie": 49330, + "ulf": 49331, + "stingly": 49332, + "onstorm": 49333, + "loeb": 49334, + "broome": 49335, + "bnha": 49336, + "pancreatic": 49337, + "elive": 49338, + "!!!!!!!!!!!": 49339, + "therapper": 49340, + "orthopedic": 49341, + "avengersendgame": 49342, + "antitrust": 49343, + "ìļ°": 49344, + "gote": 49345, + "omd": 49346, + "offside": 49347, + "gyllen": 49348, + "wineries": 49349, + "whitewater": 49350, + "adl": 49351, + "lupita": 49352, + "exceeds": 49353, + "consisted": 49354, + "chewbacca": 49355, + "ashleigh": 49356, + "nhljets": 49357, + "issan": 49358, + "shld": 49359, + "hayat": 49360, + "cranberries": 49361, + "ð٤ĺðŁı½": 49362, + "rockthe": 49363, + "springtraining": 49364, + "fallout": 49365, + "dairyfree": 49366, + "waj": 49367, + "undecided": 49368, + "sown": 49369, + "rcn": 49370, + "northwales": 49371, + "httr": 49372, + "fumble": 49373, + "dits": 49374, + "compelled": 49375, + "populist": 49376, + "minted": 49377, + "blanchett": 49378, + ".''": 49379, + "propulsion": 49380, + "milla": 49381, + "auberg": 49382, + "hertz": 49383, + "hta": 49384, + "udaipur": 49385, + "serendipity": 49386, + "aztecs": 49387, + "alsace": 49388, + "ðŁIJij": 49389, + "lun": 49390, + "shoes": 49391, + "charli": 49392, + "garza": 49393, + "ðŁĴŁ": 49394, + "probiotics": 49395, + "foxtv": 49396, + "olis": 49397, + "miff": 49398, + "localized": 49399, + "diffuser": 49400, + "sigue": 49401, + "funko": 49402, + "rendous": 49403, + "ðŁĴij": 49404, + "jekyll": 49405, + "<|startoftext|>": 49406, + "<|endoftext|>": 49407 + }, + "merges": [ + "i n", + "t h", + "a n", + "r e", + "a r", + "e r", + "th e", + "in g", + "o u", + "o n", + "s t", + "o r", + "e n", + "o n", + "a l", + "a t", + "e r", + "i t", + "i n", + "t o", + "r o", + "i s", + "l e", + "i c", + "a t", + "an d", + "e d", + "o f", + "c h", + "o r", + "e s", + "i l", + "e l", + "s t", + "a c", + "o m", + "a m", + "l o", + "a n", + "a y", + "s h", + "r i", + "l i", + "t i", + "f or", + "n e", + "ð Ł", + "r a", + "h a", + "d e", + "o l", + "v e", + "s i", + "u r", + "a l", + "s e", + "' s", + "u n", + "d i", + "b e", + "l a", + "w h", + "o o", + "d ay", + "e n", + "m a", + "n o", + "l e", + "t o", + "ou r", + "i r", + "g h", + "w it", + "i t", + "y o", + "a s", + "s p", + "th is", + "t s", + "at i", + "yo u", + "wit h", + "a d", + "i s", + "a b", + "l y", + "w e", + "th e", + "t e", + "a s", + "a g", + "v i", + "p p", + "s u", + "h o", + "m y", + ". .", + "b u", + "c om", + "s e", + "er s", + "m e", + "m e", + "al l", + "c on", + "m o", + "k e", + "g e", + "ou t", + "en t", + "c o", + "f e", + "v er", + "a r", + "f ro", + "a u", + "p o", + "c e", + "gh t", + "ar e", + "s s", + "fro m", + "c h", + "t r", + "ou n", + "on e", + "b y", + "d o", + "t h", + "w or", + "er e", + "k e", + "p ro", + "f or", + "d s", + "b o", + "t a", + "w e", + "g o", + "h e", + "t er", + "in g", + "d e", + "b e", + "ati on", + "m or", + "a y", + "e x", + "il l", + "p e", + "k s", + "s c", + "l u", + "f u", + "q u", + "v er", + "ðŁ ĺ", + "j u", + "m u", + "at e", + "an d", + "v e", + "k ing", + "m ar", + "o p", + "h i", + ".. .", + "p re", + "a d", + "r u", + "th at", + "j o", + "o f", + "c e", + "ne w", + "a m", + "a p", + "g re", + "s s", + "d u", + "no w", + "y e", + "t ing", + "y our", + "it y", + "n i", + "c i", + "p ar", + "g u", + "f i", + "a f", + "p er", + "t er", + "u p", + "s o", + "g i", + "on s", + "g r", + "g e", + "b r", + "p l", + "' t", + "m i", + "in e", + "we e", + "b i", + "u s", + "sh o", + "ha ve", + "to day", + "a v", + "m an", + "en t", + "ac k", + "ur e", + "ou r", + "â Ģ", + "c u", + "l d", + "lo o", + "i m", + "ic e", + "s om", + "f in", + "re d", + "re n", + "oo d", + "w as", + "ti on", + "p i", + "i r", + "th er", + "t y", + "p h", + "ar d", + "e c", + "! !", + "m on", + "mor e", + "w ill", + "t ra", + "c an", + "c ol", + "p u", + "t e", + "w n", + "m b", + "s o", + "it i", + "ju st", + "n ing", + "h ere", + "t u", + "p a", + "p r", + "bu t", + "wh at", + "al ly", + "f ir", + "m in", + "c a", + "an t", + "s a", + "t ed", + "e v", + "m ent", + "f a", + "ge t", + "am e", + "ab out", + "g ra", + "no t", + "ha pp", + "ay s", + "m an", + "h is", + "ti me", + "li ke", + "g h", + "ha s", + "th an", + "lo ve", + "ar t", + "st e", + "d ing", + "h e", + "c re", + "w s", + "w at", + "d er", + "it e", + "s er", + "ac e", + "ag e", + "en d", + "st r", + "a w", + "st or", + "r e", + "c ar", + "el l", + "al l", + "p s", + "f ri", + "p ho", + "p or", + "d o", + "a k", + "w i", + "f re", + "wh o", + "sh i", + "b oo", + "s on", + "el l", + "wh en", + "il l", + "ho w", + "gre at", + "w in", + "e l", + "b l", + "s si", + "al i", + "som e", + "ðŁ Ĵ", + "t on", + "d er", + "le s", + "p la", + "ï ¸", + "e d", + "s ch", + "h u", + "on g", + "d on", + "k i", + "s h", + "an n", + "c or", + ". .", + "oun d", + "a z", + "in e", + "ar y", + "fu l", + "st u", + "ou ld", + "st i", + "g o", + "se e", + "ab le", + "ar s", + "l l", + "m is", + "b er", + "c k", + "w a", + "en ts", + "n o", + "si g", + "f e", + "fir st", + "e t", + "sp e", + "ac k", + "i f", + "ou s", + "' m", + "st er", + "a pp", + "an g", + "an ce", + "an s", + "g ood", + "b re", + "e ver", + "the y", + "t ic", + "com e", + "of f", + "b ack", + "as e", + "ing s", + "ol d", + "i ght", + "f o", + "h er", + "happ y", + "p ic", + "it s", + "v ing", + "u s", + "m at", + "h om", + "d y", + "e m", + "s k", + "y ing", + "the ir", + "le d", + "r y", + "u l", + "h ar", + "c k", + "t on", + "on al", + "h el", + "r ic", + "b ir", + "vi e", + "w ay", + "t ri", + "d a", + "p le", + "b ro", + "st o", + "oo l", + "ni ght", + "tr u", + "b a", + "re ad", + "re s", + "ye ar", + "f r", + "t or", + "al s", + "c oun", + "c la", + "t ure", + "v el", + "at ed", + "le c", + "en d", + "th ing", + "v o", + "ic i", + "be st", + "c an", + "wor k", + "la st", + "af ter", + "en ce", + "p ri", + "p e", + "e s", + "i l", + "âĢ ¦", + "d re", + "y s", + "o ver", + "i es", + "ðŁ ij", + "com m", + "t w", + "in k", + "s un", + "c l", + "li fe", + "t t", + "a ch", + "l and", + "s y", + "t re", + "t al", + "p ol", + "s m", + "du c", + "s al", + "f t", + "' re", + "ch e", + "w ar", + "t ur", + "ati ons", + "ac h", + "m s", + "il e", + "p m", + "ou gh", + "at e", + "st ar", + "wee k", + "! !!", + "c lu", + "th ere", + "n er", + "t om", + "s el", + "ï¸ ı", + "wor ld", + "v es", + "c am", + "go t", + "in ter", + "of f", + "u m", + "ton ight", + "o ther", + "h ou", + "loo k", + "j e", + "i d", + "si on", + "be au", + "at t", + "el i", + "or t", + "re c", + "f f", + "st er", + "su pp", + "g en", + "be en", + "il y", + "te am", + "m m", + "i c", + "pe op", + "it t", + "at s", + "on ly", + "mb er", + "en g", + "b ri", + "m p", + "k now", + "b ur", + "b ar", + "in s", + "lo w", + "sh e", + "ro w", + "â Ŀ", + "t ro", + "peop le", + "vi a", + "lo w", + "ag a", + "be t", + "x t", + "f ac", + "ch ar", + "e ar", + "w al", + "s en", + "f am", + "b le", + "n ati", + "is h", + "n or", + "g ame", + "li ve", + "s co", + "le y", + "d on", + "ic k", + "b all", + "ver y", + "the se", + "p an", + "i a", + "at ing", + "c r", + "a re", + "g ir", + "ma ke", + "st re", + "sho w", + ". \"", + "f l", + "u p", + "d r", + "than ks", + "il li", + "w om", + "st s", + "i g", + "s ur", + "ever y", + "c ur", + "vie w", + "le t", + "in to", + "mo st", + "n a", + "in di", + "g ar", + "ha d", + "s ou", + "v ed", + "an t", + "iti on", + "ma de", + "f ol", + "un i", + "it ed", + "ðŁ ı", + "ic al", + "th r", + "read y", + "ch ec", + "d ra", + "k es", + "boo k", + "e p", + "si c", + "mor ning", + "ne ws", + "c au", + "c t", + "w ell", + "an c", + "pho to", + "th an", + "or s", + "bir th", + "g g", + "ou t", + "ne xt", + "som e", + "en ing", + "stor y", + "ch ri", + "do wn", + "hom e", + "f fe", + "fre e", + "d a", + "b or", + "f il", + "ci al", + "than k", + "si de", + "le ar", + "qu e", + "l ine", + "t en", + "at es", + "ye ars", + "m y", + "pho to", + "beau ti", + "ri ght", + "n u", + "for m", + "shi p", + "b an", + "th er", + "d ays", + "g am", + "as on", + "g y", + "ðŁ İ", + "birth day", + "se t", + "ic k", + "e t", + "st ill", + "com ing", + "ta ke", + "ðŁ ĩ", + "b b", + "s ol", + "s on", + "d en", + "e p", + "mu sic", + "the m", + "de n", + "wh y", + "f oo", + "c ra", + "am az", + "w n", + "h ol", + "t ting", + "w r", + "u e", + "ma g", + "c ro", + "l an", + "c lo", + "b ra", + "a k", + "s ing", + "c al", + "re ad", + "' ve", + "jo h", + "b ab", + "d ri", + "b lo", + "bi g", + "er ic", + "in t", + "t or", + "tr y", + "l a", + "le g", + "hou se", + "m ic", + "v al", + "beauti ful", + "l itt", + "chec k", + "ne w", + "ver s", + "s w", + "ar i", + "pla y", + "h er", + "âĢ ĵ", + "w in", + "m a", + "con gr", + "sch ool", + "f un", + ". @", + "he al", + "ic h", + "d el", + "wh ere", + "l on", + "ke t", + "tw o", + "mu ch", + "wat ch", + "v en", + "d ed", + "a st", + "k ed", + "b as", + "go ing", + "m p", + "e ver", + "w ays", + "ro o", + "de sig", + "l y", + "s ed", + "to p", + "l in", + "ch an", + "to o", + "it ing", + "d ent", + "gh ts", + "t y", + "sp o", + "ne ed", + "b lu", + "in st", + "be ing", + "âĿ ¤", + "w el", + "l s", + "hi m", + "m ay", + "st ing", + "n a", + "el y", + "litt le", + "g a", + "n at", + "tom or", + "m c", + "h on", + "w ant", + "a ir", + "pi c", + "am eric", + "p er", + "le ss", + "wee k", + "ve l", + "a h", + "c ap", + "ch am", + "g er", + "ti m", + "tomor row", + "ne ss", + "st ate", + "h al", + "ser v", + "z e", + "o s", + "p at", + "v is", + "ex c", + "s in", + "f f", + "c ity", + "c en", + "an y", + "b el", + "su mm", + "t in", + "w ould", + "loo king", + "k o", + "ce le", + "fam ily", + "m er", + "po w", + "hel p", + "bu s", + "c o", + "c le", + "sel f", + "en s", + "ic s", + "th o", + "an i", + "ch o", + "le ad", + "b s", + "t wee", + "th ink", + "for e", + "ch il", + "vi de", + "di d", + "al e", + "ch i", + "v il", + "en ds", + "w ing", + "p as", + "' ll", + "v ol", + "s a", + "g s", + "man y", + "j ec", + "be fore", + "gra ph", + "n y", + "ur ing", + "w il", + "d d", + "bu il", + "f av", + "st ed", + "tr an", + "l ing", + "ou d", + "d ge", + "fi el", + "nati onal", + "st a", + "c er", + "w ere", + "in a", + "se ason", + "c ou", + "n ed", + "amaz ing", + "ti ons", + "cele br", + "n s", + "a th", + "he ad", + "s day", + "d ar", + "lo c", + "v in", + "an other", + "g oo", + "s at", + "n y", + "jo in", + "pre s", + "s es", + "s ing", + "an a", + "in ing", + ".. ..", + "c our", + "ï¸ ı", + "ac t", + "cau se", + "li ght", + "am s", + "t a", + "b al", + "f c", + "hi gh", + "off ici", + "t t", + "chri st", + "d ic", + "d ay", + "ra l", + "h or", + ": )", + "vi si", + "n am", + "o b", + "ma s", + "gh t", + "re ally", + "t un", + "fin d", + "thr ough", + "por t", + "u t", + "ti ve", + "st y", + "n e", + "or e", + "ðŁĺ Ĥ", + "supp ort", + "ne ver", + "ev en", + "ðŁ Ķ", + "h a", + "y a", + "l d", + "u k", + "r an", + "j am", + "wi th", + "me di", + "d es", + "ne y", + "ch ing", + "al e", + "h y", + "k in", + "! !", + "d y", + "pl ace", + "al so", + "b le", + "wh ich", + "bl ack", + "b li", + "s ay", + "par k", + "pl ay", + "ir e", + "vide o", + "week end", + "a il", + "ke y", + "p t", + "w ard", + "fri day", + "d in", + "ine ss", + "g ro", + "b en", + "al ways", + "t ball", + "ag o", + "m il", + "c y", + "pro duc", + "di sc", + "un der", + "ple ase", + "sp or", + "fu ll", + "e y", + "ðŁ Ļ", + "is e", + "iti es", + "c at", + "k no", + "u se", + "fo re", + "k er", + "ar t", + "hi gh", + "op en", + "s an", + "e f", + "our s", + "sh ed", + "st ri", + "d ro", + "aga in", + "i m", + "ðŁ ĵ", + "en jo", + "fu n", + "ge tting", + "p en", + "g er", + "c li", + "an y", + "ever y", + "e u", + "wom en", + "â ľ", + "e st", + "c ould", + "r y", + "\" @", + "th ou", + "sh a", + "comm un", + "b er", + "d ents", + "di s", + "wh ile", + "aw ay", + "di o", + "h am", + "g la", + "d ate", + "k a", + "mis s", + "un ch", + "w on", + "in f", + "roo m", + "g a", + "re al", + "ex per", + "di rec", + "sh ould", + "sp r", + "g ol", + "l ong", + "bet ter", + "or i", + "e y", + "i ence", + "il s", + "z z", + "h an", + "f ound", + "v s", + "â Ļ", + "po st", + "ti c", + "par t", + "m en", + "ren ce", + "ce ss", + "v ic", + "s il", + "sho p", + "ðŁĺ Ĥ", + "f ood", + "v al", + "sti c", + "y ou", + "s ays", + "e lec", + "st ar", + "o c", + "l and", + "i d", + "c tion", + "fiel d", + "s of", + "st art", + "wat er", + "fri ends", + "on es", + "ðŁ Į", + "f la", + "f ar", + "wh ite", + "par ty", + "in st", + "gr ou", + "t v", + "every one", + "m ent", + "j a", + "ch a", + "pr in", + "an ts", + "d uring", + "l at", + "l ar", + "we st", + "th en", + "k a", + "y oun", + "in sp", + "in te", + "we en", + "visi t", + "aga inst", + "re le", + "he ad", + "c es", + "to wn", + "loo ks", + "th re", + "re gi", + "ren t", + "pro jec", + "gir l", + "se ar", + "w o", + "m om", + "c ar", + "h un", + "pu bli", + "d i", + "p le", + "c all", + "c ri", + "u m", + "for d", + "per fe", + "fri end", + "h ard", + "ssi on", + "te st", + "pla ying", + "ar ound", + "be cause", + "ke ts", + "me et", + "sat ur", + "ar ti", + "wor k", + "j un", + "v en", + "r un", + "me mber", + "por t", + "su per", + "t wit", + "s am", + "el s", + "t ly", + "ad v", + "ati ve", + "at h", + "s ure", + "av ail", + "la r", + "s qu", + "ar ds", + "ev ent", + "m en", + "l l", + "o ver", + "lo gy", + "it al", + "tim es", + "m al", + "b ack", + "c oo", + "ma king", + "st ru", + "â ģ", + "it u", + "sh ar", + "g an", + "c as", + "s n", + "summ er", + "pic ture", + "f an", + "h in", + "christ mas", + "c y", + "pr oud", + "cham pi", + "desig n", + "pp ing", + "ho pe", + "c a", + "avail able", + "ma y", + "we d", + "photo graph", + "spe cial", + "sal e", + "sto p", + "er y", + "a we", + "al ity", + "hi story", + "am a", + "pre si", + "b ru", + "wor king", + "d one", + "d r", + "k en", + "fe at", + "w ood", + "ate st", + "sun day", + "mo vi", + "vel y", + "s le", + "f ace", + "sp ec", + "stu dents", + "b y", + "ha m", + "sp on", + "bus iness", + "d at", + "i e", + "i p", + "so ci", + "g lo", + "h and", + "re cor", + "r s", + "me e", + "ke ep", + "p ur", + "heal th", + "sh e", + "com ple", + "go d", + "da vi", + "col lec", + "li st", + "r a", + "clu b", + "t ers", + "in clu", + "th ings", + "pl an", + "â ĺ", + "joh n", + "sh ing", + "at ul", + "so on", + "blu e", + "g or", + "satur day", + "w on", + "congr atul", + "se e", + "âĿ¤ ï¸ı", + "tho se", + "ðŁĺ į", + "fin al", + "d ou", + "it h", + "o wn", + "ro ad", + "t our", + "a st", + "indi a", + "ti l", + "n d", + "f er", + "fav or", + "su l", + "lear n", + "fir e", + "ju st", + "grou p", + "a h", + "r ac", + "bo dy", + "u r", + "c are", + "à ¸", + "p lo", + "o h", + "po s", + "gi ve", + "te ch", + "su b", + "c ent", + "er ing", + "y m", + "il ity", + "f ic", + "lon don", + "v ir", + "gu ys", + "b a", + "ðŁ ¤", + "bab y", + "sc re", + "ðŁĺ į", + "tru mp", + "un der", + "chan ge", + "i an", + "col le", + "ss es", + "l er", + "ss ed", + "n ice", + "ann oun", + "pow er", + "s ar", + "a king", + "min i", + "s li", + "s wee", + "k ar", + "fu l", + "c ru", + "ac tion", + "a ther", + ") .", + "st and", + "de vel", + "a a", + "g an", + "le ft", + "lo l", + "re l", + "tran s", + "m ents", + "in t", + "e f", + "man ag", + "di g", + "gen er", + "do wn", + "p au", + "ti v", + "k u", + "th ur", + "k en", + "st on", + "f ans", + "tal k", + "twee t", + "t oo", + "sty le", + "pro te", + "se con", + "fr on", + "awe some", + "g l", + "p al", + "ne t", + "s or", + "la u", + "g on", + "sin ce", + "t ty", + "ser ies", + "me mor", + "b eli", + "fil m", + "di d", + "di es", + "o t", + "congratul ations", + "p ra", + "e ve", + "w oo", + "offici al", + "su c", + "in cre", + "b on", + "par t", + "pp ed", + "cla ss", + "si ve", + "bo y", + "cu l", + "perfe ct", + "t ou", + "d am", + "wel come", + "foo tball", + "h i", + "p ap", + "wa it", + "ad a", + "congr ats", + "youn g", + "exc ited", + "re ce", + "j an", + "v a", + "re d", + "st ra", + "medi a", + "' d", + "do es", + "le t", + "mu l", + "ill s", + "gre en", + "m el", + "to ge", + "fu ture", + "ye ster", + "vers ity", + "for m", + "ta in", + "i de", + "ch es", + "ki ds", + "qu i", + "ha ha", + "de ta", + "bi g", + "favor ite", + "gir ls", + "con tin", + "do m", + "sear ch", + "u al", + "a ir", + "d ers", + "mon th", + "c er", + "yester day", + "commun ity", + "ad e", + "do g", + "vil le", + "ic es", + "d eli", + "sy ste", + "ru n", + "is m", + "he art", + "c up", + "en ti", + "fe w", + "presi dent", + "e ds", + "un til", + "fe sti", + "o k", + "f lo", + "sa id", + "ol e", + "me d", + "tra vel", + " £", + "ph one", + "toge ther", + "fa st", + "lo t", + "gam es", + "sh ir", + "bet ween", + "y es", + "th ers", + "do ing", + "m ac", + "at or", + "b and", + "fol low", + "projec t", + "devel op", + "di ffe", + "con fe", + "spe ci", + "ca st", + "y s", + "bo ard", + "r d", + "i al", + "sh oo", + "r am", + "ha ving", + "sh are", + "fol low", + "on e", + "n ame", + "m r", + "pu t", + "disc u", + "or y", + "c ame", + "ou s", + "s ite", + "twit ter", + "t b", + "t it", + "fin ally", + "z ed", + "su per", + "com pan", + "us ing", + "all s", + "li st", + "r is", + "sho t", + "g al", + "t ar", + "de l", + "joh n", + "âĢ Ķ", + "some thing", + "ra m", + "inte re", + "wh e", + "b it", + "ðŁ į", + "stre et", + "oun d", + "a i", + "tic kets", + "movi e", + "re al", + "k y", + "ta king", + "o pp", + "c c", + "l am", + "m oun", + "in ve", + "bl ack", + "us ed", + "on line", + "y or", + "loc al", + "gu e", + "c ks", + "o w", + "ge st", + "bo ys", + "illi on", + "con t", + "re ci", + "in ed", + "eu ro", + "no w", + "se en", + "p h", + "te ach", + "de f", + "sou th", + "su ch", + "aw ard", + "mu st", + "is su", + "ca re", + "fe el", + "p lu", + "l atest", + "spor ts", + "we b", + "te x", + "e ment", + "s k", + "fi c", + "w an", + "te ch", + "o t", + "bo x", + "n er", + "fre e", + "t al", + "a sh", + "c ase", + "ho t", + "won der", + "mee ting", + "er a", + "ch all", + "ðŁ IJ", + "jo b", + "il i", + "c ool", + "j our", + "th s", + "m o", + "f el", + "di e", + "mic ha", + "e le", + "te am", + "serv ice", + "st and", + "ma kes", + "p ing", + "ear ly", + "com es", + "e k", + "ho li", + "v ers", + "ag ue", + "s au", + "thre e", + "mon day", + "fa shi", + "some one", + "th ro", + "se a", + "b ad", + "supp or", + "tur n", + "ur y", + "m ing", + "photograph y", + "n ic", + "mar k", + "pre tty", + "ss ing", + "wat ching", + "me mb", + "ar ri", + "coun ty", + "be ach", + "fr an", + "cen ter", + "pol ice", + "b at", + "publi c", + "t an", + "pre ss", + "s af", + "s y", + "ge ts", + "ro y", + "n ers", + "y our", + "bu y", + "st ers", + "sho w", + "as ed", + "chil dre", + "af ric", + "in es", + "sp ace", + "sc ri", + "h all", + "pa in", + "ar ing", + "hom e", + "m ur", + "heal th", + "ch ed", + "s and", + "rece i", + "gu y", + "e a", + "americ an", + "re si", + "childre n", + "- -", + "i ri", + "ing ton", + "coun try", + "ro ss", + "le n", + "ann a", + "boo ks", + "b c", + "e ce", + "d om", + "lo vely", + "k h", + "pe t", + "g y", + "g ri", + "st age", + "off ice", + "ro ck", + "m on", + "b ay", + "t able", + "su n", + "m ed", + "th in", + "l or", + "f low", + "( @", + "uni versity", + "stor e", + "fron t", + "goo d", + "z a", + "vo te", + "nor th", + "he y", + "an im", + "or der", + "mi d", + "with out", + "a de", + "re member", + "mar ket", + "? ?", + "mu s", + "tra ining", + "e duc", + "bu t", + "co ver", + "st an", + "sc en", + "b la", + "bre ak", + "l ou", + "s ame", + "g old", + "a in", + "o s", + "bo th", + "l it", + "ver n", + "a i", + "al bu", + "p a", + "enjo y", + "be g", + "ell ing", + "thur sday", + "inf o", + "s an", + "americ a", + "ha ir", + "te l", + "mar ch", + "con cer", + "colle ge", + "confe rence", + "ap p", + "h our", + "ch ang", + "â ļ", + "s our", + "ol s", + "we ather", + "w ar", + "p hi", + "festi val", + "secon d", + "cu te", + "pr ac", + "en er", + "str y", + "le a", + "pol it", + "s av", + "se n", + "o w", + "m i", + "ne ar", + "ou ght", + "z e", + "co ffe", + "w illi", + "d an", + "se y", + "davi d", + "e se", + "f an", + "de ci", + "the at", + "no v", + "ati on", + "tr ac", + "sc i", + "re view", + "c el", + "e m", + "u n", + "ju ly", + "or ig", + "ti on", + "d ru", + "form er", + "st ay", + "af ter", + "in v", + "too k", + "dat a", + "b al", + "tu es", + "d an", + "ev ening", + "ðŁĺĤ ðŁĺĤ", + "d ol", + "u res", + "pro vi", + "t s", + "e st", + "sig n", + "j ac", + "u k", + "s ong", + "ye t", + "bo w", + "in du", + "j ap", + "h oo", + "po int", + "any one", + "z y", + "i st", + "h ur", + "it al", + "buil ding", + "wom an", + "ch ur", + "j er", + "per for", + "co ach", + "le ague", + "ce ss", + "ne t", + "i mag", + "nati on", + "br it", + "qu e", + "aw ards", + "ag es", + "wor ks", + "c ed", + "man ce", + "l ate", + "ig n", + "mon ey", + "tru e", + "i i", + "t ell", + "pl ac", + "p ac", + "as y", + "wor ld", + "be hin", + "im port", + "read ing", + "gra m", + "gi ving", + "me t", + "h it", + "for ward", + "st om", + "pres ent", + "jun e", + "so cial", + "no on", + "mar t", + "hal f", + "s we", + "go vern", + "k er", + "deta ils", + "li sh", + "_ _", + "ac y", + "si a", + "ber t", + "f all", + "! !!!", + ") ,", + "th i", + "d iti", + "sp ort", + "k ing", + "f it", + "st af", + "c at", + "mu se", + "cen tr", + "y er", + "con tro", + "b loo", + "wal k", + "ac tu", + "did n", + "li m", + "lear ning", + "re search", + "wed ne", + "au th", + "h ours", + "k y", + "f ar", + "h en", + ".. ..", + "it ch", + "ri l", + "str ong", + "sk y", + "que sti", + "jam es", + "r on", + "d g", + "f ur", + "c in", + "do es", + "app ro", + "mar ke", + "tu res", + "ful ly", + "ch at", + "behin d", + "te m", + "fin i", + "mis sion", + "b att", + "fe el", + "he av", + "every thing", + "b ar", + "w ish", + "pre mi", + "i ma", + "exper ience", + "e ach", + "re port", + "swee t", + "tic s", + "spr ing", + "re spon", + "syste m", + "vic tor", + "l in", + "sa w", + "al ready", + "gh ter", + "f le", + "ã ĥ", + "br ing", + "albu m", + "- -", + "ell s", + "st an", + "to m", + "inter national", + "w ent", + "an ni", + "mat ch", + "pp er", + "st one", + "sm all", + "ra in", + "fashi on", + "are a", + "v an", + "ag ram", + "k o", + "thou ght", + "wor th", + "v an", + "m er", + "coffe e", + "it es", + "g n", + "arti st", + "c on", + "ar ch", + "c ir", + "se cre", + "gr ound", + "is o", + "h and", + "co m", + "bri dge", + "h s", + "x i", + "l ink", + "pu l", + "sp l", + "r ace", + "f li", + "ri ver", + "g as", + "di sco", + "d al", + "play er", + "f it", + "photo s", + "it y", + "o k", + "j or", + "tr a", + "ap ril", + "ad s", + "a di", + "sol u", + "beau ty", + "do or", + "me ss", + "up date", + "ali a", + "sch o", + "en ed", + "mom ent", + "sco t", + "sc ience", + "i or", + "ti es", + "ac ross", + "ous ly", + "sh es", + "does n", + "p age", + "wat er", + "m illion", + "cla ssi", + "l ic", + "ca st", + "form ation", + "micha el", + "ell o", + "s mo", + "in ts", + "vi sion", + "op ening", + "ld n", + "au str", + "tues day", + "win ner", + "po ssi", + "r ound", + "shir t", + "di t", + "b o", + "u es", + "il led", + "al ong", + "tri p", + "star ting", + "im pro", + "k an", + "per son", + "no t", + "re co", + "ne eds", + "c le", + "li e", + "re st", + "r ing", + "win ter", + "si mp", + "mo m", + "be er", + "fac e", + "tor s", + "us a", + "collec tion", + "ge or", + "se ssion", + "tr ying", + "la s", + "la ke", + "j en", + "orig in", + "stu dent", + "se cur", + "v in", + "pic s", + "ex pe", + "com p", + "gon na", + "e qu", + "b ad", + "le y", + "a u", + "memb ers", + "bre ak", + "w all", + "gi c", + "din ner", + "bu l", + "insp ir", + "r i", + "min d", + "ic a", + "win ning", + "tal king", + "t ren", + "s is", + "t en", + "wonder ful", + "s now", + "he ar", + "th om", + "no thing", + "gu i", + "st in", + "blo g", + "fe st", + "b un", + "le e", + "war ds", + "ch ance", + "dre ss", + "re n", + "pau l", + "p es", + "tech no", + "ru ssi", + "c ard", + "e ast", + "mar i", + "w ine", + "t i", + "la w", + "str ic", + "k i", + "ap e", + "au gu", + "pro fe", + "as h", + "cour se", + "ma il", + "ren tly", + "d un", + "m un", + "lo ve", + "is land", + "dri ve", + "s l", + "end ed", + "ma in", + "lo st", + "nat ure", + "âĿ¤ ï¸ı", + "ch ic", + "re por", + "p in", + "pr o", + "st ation", + "ce p", + "ta kes", + "compan y", + "go es", + "on d", + "ma ch", + "ra dio", + "d ad", + "ro ck", + "j a", + "p ay", + "champi on", + "e e", + "in de", + "tt a", + "ati c", + "t ab", + "beli eve", + "ener gy", + "z i", + "t at", + "wor d", + "on ce", + "re sul", + "y l", + "and re", + "an o", + "inst agram", + "clo se", + "t am", + "cu stom", + "w a", + "con om", + "sho ws", + "li fe", + "k in", + "ro b", + "t age", + "n ation", + "al most", + "list en", + "sa ve", + "re li", + "ac e", + "mar y", + "tre e", + "for get", + "j ack", + "wa iting", + "direc tor", + "h ill", + "bor n", + "te mp", + "f l", + "st e", + "on a", + "sing le", + "wedne sday", + "un ited", + "in o", + "@ _", + "ne l", + "celebr ate", + "en ding", + "de al", + "j i", + "can ada", + "hu ge", + "tr ack", + "âĢ ¢", + "f y", + "fan ta", + "an g", + "yor k", + "rele ase", + "p un", + "ep iso", + "wor ds", + "t our", + "p ack", + "i gh", + "classi c", + "perfor mance", + "ke t", + "after noon", + "recor d", + "win s", + "pro ble", + "âĿ ¤", + "f our", + "b ed", + "ban k", + "d ance", + "s la", + "cal led", + "mi ght", + "a p", + "pa st", + "ðŁ ļ", + "diffe rent", + "it e", + "gi ft", + "ssi ve", + "chur ch", + "c us", + "pro gram", + "ho tel", + "ic e", + "ma d", + "secur ity", + "en ge", + "d c", + "en ough", + "st a", + "e ty", + "de ad", + "g un", + "he ar", + "m ir", + "hu man", + "gre ss", + "oun ds", + "pi ece", + "bre aking", + "gar den", + "fi ght", + "vie ws", + "f ish", + "star ted", + "run ning", + "gre en", + "ser i", + "s m", + "as k", + "d or", + "de ath", + "e conom", + "er i", + "ir d", + "s er", + "l unch", + "âģ ¦", + "bo x", + "nat u", + "ba se", + "b an", + "f al", + "glo bal", + "wil d", + "wo w", + "out side", + "mo ve", + "le ad", + "an al", + "muse um", + "on g", + "ha w", + "pow er", + "than k", + "b ac", + "char ac", + "cam pa", + "dig ital", + "r o", + "op er", + "de v", + "w ol", + "p ati", + "f a", + "m ale", + "pap er", + "ill ing", + "c s", + "â ĥ", + "educ ation", + "ta ken", + "e ffe", + "m ou", + "s ad", + "\" .", + "bas ed", + "staf f", + "inclu ding", + "li ving", + "a c", + "ch ina", + "mo b", + "stor m", + "lu ck", + "ph il", + "o o", + "y n", + "tra vel", + "k el", + "ti al", + "pr ice", + "boo k", + "import ant", + "bi o", + "p ool", + "ny c", + "f ab", + "lo ad", + "? !", + "chall enge", + "cr y", + "ser ve", + "we ar", + "bu s", + "ta in", + "nu mber", + "ro r", + "k at", + "i z", + "th ough", + "ho sp", + "m m", + "fa ir", + "ut es", + "ho t", + "po p", + "fi ed", + "cam p", + "develop ment", + "li br", + "c ali", + "em s", + "âģ¦ @", + "b ol", + "is ed", + "stand ing", + "mo del", + "it a", + "g le", + "bro wn", + "ima ge", + "ve red", + "for ce", + "o il", + "par tic", + "sh u", + "da ily", + "la w", + "se c", + "cla ss", + "cam p", + "holi day", + "cl in", + "k ers", + "pres ent", + "gam e", + "incre di", + "er ship", + "inter view", + "b ill", + "du e", + "and y", + "ab o", + "in nov", + "ke y", + "ac ade", + "p il", + "mo der", + "st ars", + "br and", + "f er", + "wee ks", + "con si", + "pr e", + "sa fe", + "wr it", + "di um", + "la unch", + "marke ting", + "ann ual", + "as si", + "cour t", + "la dy", + "c ted", + "and a", + "in side", + "chil d", + "opp or", + "sm ith", + "centr e", + "gu e", + "âģ ©", + "f ren", + "st y", + "for t", + "ent ly", + "is n", + "ke ep", + "to ber", + "on y", + "bo y", + "al d", + "col la", + "de mo", + "le vel", + "com pet", + "ad o", + "b our", + "fanta stic", + "m ate", + "s u", + "sou th", + "oppor tun", + "vers ary", + "lat er", + "bu d", + "face book", + "la un", + "ster n", + "p it", + "! \"", + "ma j", + "gr am", + "tb t", + "fi re", + "happ y", + "a ks", + "wh ole", + "actu ally", + "ill er", + "ell a", + "lo ts", + "al ex", + "an ge", + "lan ds", + "ðŁĺ Ń", + "en ter", + "r ou", + "episo de", + "p ed", + "in ten", + "sh ire", + "wh o", + "pl an", + "h o", + "ca ke", + "we st", + "mag az", + "fre sh", + "c c", + "n ar", + "ch ris", + "wr iting", + "w er", + "n om", + "l o", + "mi dd", + "dre am", + "o l", + "ti onal", + "de b", + "> >", + "be come", + "s i", + "gr and", + "all ing", + "hi stor", + "ri de", + "i red", + "saf e", + "que en", + "ci l", + "in tro", + "vi l", + "d ani", + ".. .", + "ar tic", + "st at", + "sh ort", + "or ing", + "sel fi", + "mis si", + "do c", + "b it", + "g all", + "b om", + "i re", + "se lec", + "d ition", + "ðŁĶ ¥", + "fri end", + "be at", + "gh ting", + "ðŁĺ Ĭ", + "pe ace", + "ex hi", + "ant a", + "ab ility", + "il lu", + "j on", + "qu ality", + "tri bu", + "m es", + "play ers", + "fa ir", + "cu t", + "c ab", + "suc cess", + "b i", + "su s", + "pro mo", + "sch e", + "an ge", + "ic o", + "comm it", + "cat ch", + "ill a", + "kin d", + "feel ing", + "qu o", + "s ay", + "anni versary", + "spo t", + "mo ther", + "an e", + "p end", + "your self", + "op s", + "app le", + "min utes", + "p o", + "gr and", + "ri es", + "ha ha", + "care er", + "ed ition", + "de c", + "ric k", + "am i", + "concer t", + "iti ve", + "ge ous", + "d ly", + "t te", + "adv ent", + "i g", + "li ghts", + "ak er", + "sk y", + "âĥ £", + "r ay", + "fini shed", + "w ay", + "s d", + "ac coun", + "ðŁĴ ķ", + "ck y", + "ch el", + "lit er", + "pain ting", + "lo s", + "st un", + "techno logy", + "n as", + "ma r", + "b il", + "afric a", + "ki e", + "ey es", + "gol f", + "plu s", + "ni a", + "it ec", + "serv ices", + "wed ding", + "kno wn", + "te le", + ".. ...", + "star ts", + "pa ren", + "w ants", + "ati onal", + "mon ths", + "win do", + "fav our", + "er t", + "magaz ine", + "ex clu", + "re ve", + "b c", + "origin al", + "e ss", + "n al", + "an ti", + "st ro", + "t ice", + "stu dy", + "à ¤", + "v ac", + "nation al", + "fi ve", + "ra in", + "ve ment", + "u te", + "ver se", + "em er", + "ar my", + "possi ble", + "gue ss", + "val ley", + "ther n", + "cro w", + "m r", + "col or", + "on to", + "pic k", + "cle ar", + "dar k", + "t ac", + "wan ted", + "it ting", + "can cer", + "govern ment", + "di e", + "ri se", + "z ing", + "col d", + "f oun", + "stu dio", + "str ation", + "bro ther", + "a head", + "sh el", + "mic ro", + "ic ally", + "d au", + "sig ned", + "vi ol", + "a x", + "as se", + "i o", + "w re", + "spl ay", + "ch ick", + "augu st", + "pl at", + "ti ps", + "sp i", + "hu man", + "e asy", + "lo gi", + "mi ke", + "gro w", + "ag re", + "w w", + "sh ad", + "mo tiv", + "wi de", + "tur ns", + "om g", + "v ar", + "de fin", + "su g", + "j im", + "ðŁĶ ¥", + "t d", + "campa ign", + "nam ed", + "re tweet", + "co p", + "t v", + "le av", + "k is", + "dou ble", + "s mar", + "issu e", + "vil la", + "in formation", + "li es", + "sto ck", + "n t", + "di stric", + "sh or", + "mi x", + "er o", + "se p", + "me x", + "see ing", + "li ve", + "re min", + "co de", + "g ur", + "s c", + "wil d", + "l un", + "h ood", + "spo t", + "fa ther", + "fore ver", + "up d", + "tra f", + "f ly", + "ne ed", + "gra du", + "tra in", + "ma ke", + "s ab", + "be y", + "si ze", + "lead er", + "tal ks", + "e u", + "lo g", + "fo x", + "gor geous", + "le ss", + "le ts", + "sur pri", + "my self", + "no te", + "li ves", + "f ru", + "lo ved", + "se ver", + "de m", + "j i", + "so c", + "h old", + "do gs", + "n i", + "â ŀ", + "lea ve", + "air port", + "ben ef", + "ex pl", + "shi ps", + "comple te", + "ach i", + "gre at", + "vin tage", + "j ack", + "ro c", + "woo d", + "pri v", + "off er", + "ey e", + "ver sion", + "te a", + "co ach", + "off ic", + "w ell", + "g en", + "s at", + "h h", + "you th", + "o x", + "? \"", + "m t", + "mi x", + "g g", + "d le", + "natu ral", + "buil d", + "break fast", + "thin king", + "theat re", + "mo on", + "ber g", + "go als", + "geor ge", + "en e", + "exc ell", + "il ing", + "tun e", + "y ed", + "g ate", + "m it", + "net work", + "jo e", + "h ello", + "f b", + "tu be", + "we aring", + "ath le", + "stru c", + "har d", + "gla ss", + "g ers", + "thro w", + "g es", + "b t", + "indu stry", + "manag ement", + "ali st", + "go al", + "stre am", + "y el", + "a vi", + "ici ous", + "o thers", + "s ki", + "chri sti", + "bir d", + "e sc", + "m in", + "tr o", + "l t", + "j an", + "im p", + "ri ghts", + "sh a", + "or gan", + "cent ral", + "ar a", + "ro ll", + "favour ite", + "che ster", + "el se", + "p ay", + "car s", + "m ine", + "ste p", + "prac tice", + "maj or", + "h ang", + "ðŁĺ ĺ", + "n on", + "v ari", + "eng ine", + "vol un", + "di a", + "i led", + "arch itec", + "p ink", + "d s", + "th y", + "wa sh", + "web site", + "ba g", + "contro l", + "el li", + "f ra", + "an sw", + "d ence", + "y u", + "r on", + "ol a", + "g in", + "dr in", + "li c", + "cou ple", + "sp ar", + "g on", + "cre ate", + "c t", + "celebr ating", + "de ep", + "e at", + "te e", + "vo ice", + "dro p", + "vis it", + "at ors", + "sta dium", + "f t", + "w is", + "ro l", + "gra de", + "fam il", + "po ints", + "re pre", + "w as", + "traf fic", + "jap an", + "or g", + "hon or", + "tex as", + "man u", + "âĻ ¥", + "safe ty", + "re r", + "b ag", + "em plo", + "rele ased", + "re gu", + "ak a", + "n av", + "ro le", + "sen ior", + "spec t", + "cro ss", + "lin es", + "be st", + "p ack", + "s in", + "ti e", + "mis sing", + "sun set", + "li ber", + "is ing", + "j ay", + "sk i", + "champion ship", + "ac tiv", + "la dies", + "play ed", + "y y", + "pu bl", + "al o", + "pri de", + "s r", + "pa ki", + "lu x", + "sur vi", + "ck ed", + "e ts", + "cho col", + "austr alia", + "par is", + "mi les", + "h at", + "ment al", + "al a", + "me an", + "mob ile", + "en a", + "in si", + "f ound", + "chi ef", + "t ag", + "incredi ble", + "re turn", + "à ©", + "goo gle", + "fren ch", + "cre w", + "hal lo", + "ali an", + "j az", + "ch er", + "sil ver", + "nor th", + "eng lish", + "base ball", + "c af", + "lim ited", + "follow ing", + "app reci", + "ear th", + "k ir", + "ve mber", + "w ed", + "p tion", + "g ed", + "oc tober", + "fl ori", + "c r", + "en cy", + "ga ve", + "lor d", + "stu ff", + "ber ry", + "po st", + "sm ile", + "bro ad", + "st ate", + "gg er", + "me ans", + "ic y", + "gu n", + "y o", + "ma ster", + "bur g", + "han ds", + "ni e", + "/ /", + "uni on", + "brit ish", + "big gest", + "distric t", + "am ing", + "h il", + "o ce", + "per son", + "pas s", + "en vir", + "scho ols", + "arri ved", + "anc es", + "insp ired", + "ex pla", + "be n", + "libr ary", + "bo tt", + "am p", + "ste ph", + "cont act", + "b ang", + "m s", + "cali for", + "t old", + "batt le", + "b b", + "chic ago", + "âľ ¨", + "str ate", + "sh i", + "de ce", + "- )", + "ad d", + "la b", + "j ones", + "leg end", + "cast le", + "ing er", + "st ance", + "be l", + "ur a", + "re fu", + "lead ers", + "po t", + "se x", + "h ic", + "artic le", + "ki d", + "fr ance", + "x x", + "ex e", + "gui de", + "volun te", + "pr int", + "al i", + "ce o", + "twee ts", + "w x", + "scen e", + "vol u", + "ant i", + "h an", + "as soci", + "shar ing", + "ro se", + "mini ster", + "sh er", + "in ste", + "cle an", + "demo cr", + "po ster", + "sk in", + "p sy", + "pro per", + "cra zy", + "i am", + "o re", + "in i", + "any thing", + "po d", + "mo ving", + "cl ick", + "ex plo", + "com b", + "cra ft", + "f i", + "bloo d", + "is ra", + "publ ic", + "d ent", + "ol ym", + "eng land", + "a si", + "ch er", + "fac t", + "envir on", + "har ry", + "g one", + "me dic", + "enjo ying", + "just ice", + "j r", + "indi an", + "wi fe", + "s ound", + "t es", + "dra wing", + "p al", + "ide a", + "cr it", + "ju li", + "il er", + "war m", + "cl ar", + "thou ghts", + "def en", + "coun cil", + "intro duc", + "di ed", + "jan u", + "an i", + "s end", + "li er", + "m l", + "intere sting", + "tra de", + "win d", + "b ay", + "s ac", + "anc y", + "sour ce", + "b es", + "org ani", + "ar ly", + "lar ge", + "ff ici", + "ta g", + "u t", + "de sp", + "o es", + "tit le", + "sy m", + "pic tures", + "op en", + "wom en", + "sho wing", + "ri a", + "le ast", + "lead ership", + "cur rent", + "elec tr", + "val ent", + "list ening", + "c key", + "gener al", + "de ser", + "du ce", + "; )", + "c ent", + "ðŁĺį ðŁĺį", + "sco tt", + "po or", + "selfi e", + "ev ents", + "i on", + "wr ong", + "de v", + "h ill", + "sep te", + "cul ture", + "l ine", + "sor ry", + "s ent", + "si ster", + "ce pt", + "k ri", + "no vember", + "ar i", + "announ ce", + "z ation", + "br an", + "g ent", + "d u", + "l en", + "per s", + "f m", + "mart in", + "o p", + "e mb", + "om e", + "midd le", + "suc cess", + "pe ter", + "janu ary", + "f lu", + "rac ing", + "d av", + "bi ke", + "ðŁı »", + "pe t", + "shoo t", + "profe ssi", + "feat uring", + "septe mber", + "now playing", + "sta ur", + "z a", + "on ic", + "qu ick", + "bas ke", + "spe aking", + "mil it", + "z er", + "chick en", + "b ell", + "s ad", + "co ast", + "lo ving", + "y ers", + "d j", + "pan el", + "ver age", + "s wit", + "ic ks", + "b ou", + "califor nia", + "s am", + "paren ts", + "er o", + "k illed", + "ph ys", + "jo bs", + "mi gr", + "an th", + "e mo", + "hallo ween", + "and er", + "c m", + "compet ition", + "e ag", + "s ket", + "sp ir", + "may be", + "exclu sive", + "app e", + "jour ney", + "scre en", + "for d", + "i o", + "h ate", + "u g", + "sou l", + "her o", + "soci ety", + "sy n", + "gu it", + "n h", + "d j", + "as es", + "im pre", + "ti me", + "sal es", + "d d", + "f ts", + "summ it", + "stun ning", + "om s", + "tur ned", + "cle an", + "sof t", + "be at", + "re staur", + "de red", + "en ces", + "ma gic", + "di o", + "sh ine", + "gu est", + "health y", + "exhi b", + "stor ies", + "po pu", + "n is", + "el a", + "bel ow", + "fun ny", + "resul ts", + "s ne", + "cur rently", + "ar d", + "down load", + "f light", + "m al", + "f ine", + "p ad", + "ch u", + "ent ed", + "h at", + "ðŁij ı", + "ste ve", + "j o", + "mar k", + "r at", + "b all", + "p c", + "p on", + "b by", + "o li", + "ar ts", + "as ure", + "bow l", + "att ack", + "mi c", + "de ar", + "ran ge", + "en ter", + "chocol ate", + "br illi", + "ac cess", + ", \"", + "? ??", + "ch ap", + "con st", + "t n", + "mat ter", + "blu e", + "gall ery", + "em p", + "work shop", + "lead ing", + "y ours", + "baske tball", + "w anna", + "th u", + "_ _", + "mar ri", + "sle ep", + "bi a", + "ch e", + "ma d", + "imp act", + "o wn", + "si r", + "chan nel", + "euro pe", + "e sp", + "k itch", + "hosp ital", + "w ra", + "roy al", + "f s", + "ne u", + "qu ar", + "ne y", + "ac ks", + "ch ase", + "pp y", + "st al", + "at ely", + "ti m", + "dece mber", + "r are", + "per form", + "cre am", + "we ight", + "ch oo", + "ni ght", + "ha ven", + "fr anc", + "kh an", + "buil t", + "hel ping", + "tru st", + "ty pe", + "gol den", + "ta x", + "s now", + "s wi", + "di sa", + "questi ons", + "ve y", + "li ght", + "c n", + "cl oud", + "thom as", + "ag ed", + "sh ou", + "te ams", + "gr an", + "re ason", + "a a", + "you tube", + "v p", + "pi zz", + "manag er", + "bur y", + "cre dit", + "tre at", + "ma x", + "i k", + "ma in", + "g ing", + "de ad", + "pro bab", + "ye ah", + "ã Ĥ", + "br and", + "so li", + "pl ant", + "ta yl", + "gir l", + "ðŁĺ Ń", + "nam ent", + "au to", + "mess age", + "ko re", + "n ur", + "ter r", + "ag u", + "ma p", + "sen ting", + "lo ves", + "gi ves", + "g ab", + "z en", + "ro bert", + "con fir", + "w ars", + "o m", + "sta in", + "cam era", + "and er", + "won der", + "a b", + "ca p", + "s old", + "su it", + "wal king", + "contin ue", + "effe c", + "dau ghter", + "d anc", + "cha in", + "mul ti", + "ki d", + "y an", + "champi on", + "v o", + "ta ins", + "ho st", + "min i", + "mis sed", + "re sc", + "ly n", + "fin ish", + "del icious", + "s as", + "tayl or", + "i b", + "pro mis", + "produc ts", + "moun tain", + "flori da", + "regi ster", + "tre at", + "rec ent", + "fe male", + "boo th", + "mat t", + "ve hic", + "s op", + "mo tor", + "suppor ting", + "phi c", + "ex tre", + "dr ink", + "lan e", + "th ird", + "p s", + "con stru", + "ce re", + "far m", + "ðŁİ ī", + "tu red", + "ðŁij ī", + "c ats", + "a j", + "gi e", + "shoo ting", + "as ked", + "paki stan", + "am e", + "m b", + "g il", + "leg al", + "squ are", + "in vol", + "dra w", + "oo oo", + "!! !!", + "opportun ity", + "p y", + "e i", + "b ts", + "teach er", + "charac ter", + "john son", + "br on", + "ly wood", + "ch ine", + "c ing", + "c ine", + "d ge", + "gam ing", + "russi a", + "ci a", + "quo te", + "ric h", + "go v", + "flow ers", + "sp iri", + "st in", + "grow th", + "ðŁı ¼", + "comm er", + "j uni", + "mu m", + "r an", + "s na", + "a ren", + "c b", + "ac tor", + "col or", + "si t", + "pa ir", + "ch i", + "bo w", + "acade my", + "hel d", + "r ang", + "me tal", + "y l", + "ac tive", + "probab ly", + "t ch", + "need ed", + "spe e", + "cho ice", + "ital y", + "ry an", + "ðŁĩ º", + "flow er", + "v it", + "m n", + "found ation", + "b ak", + "si ons", + "ne igh", + "f loo", + "he ard", + "re mo", + "fre sh", + "ing ing", + "re f", + "to wn", + "cl ou", + "je sus", + "spiri t", + "cou ldn", + "z es", + "ðŁĴ Ļ", + "willi ams", + "pro ce", + "moder n", + "pro cess", + "sho es", + "cre ated", + "tri c", + "issu es", + "ann e", + "att en", + "de but", + "h r", + "n it", + "sti g", + "a po", + "e ps", + "z u", + "ã Ģ", + "si x", + "car ds", + "lan gu", + "fam ous", + "tour nament", + "se l", + "e bay", + "y n", + "st on", + "k ick", + "announ ced", + "k am", + "vo c", + "brilli ant", + "hou se", + "che ese", + "war ri", + "mus ic", + "ho ckey", + "ðŁĺĤ ðŁĺĤ", + "sk ills", + "au tom", + "smar t", + "med ical", + "mon y", + "e x", + "gu ar", + "gi ve", + "pers onal", + "ven tion", + "al li", + "pre ss", + "flo or", + "m c", + "victor y", + "hi m", + "simp le", + "th or", + "ðŁĩº ðŁĩ", + "ta il", + "lu cky", + "ale x", + "qu ite", + "bo t", + "ssi ons", + "chall eng", + "c ann", + "amaz on", + "h ell", + "b ought", + ") :", + "ed y", + "secre t", + "produc tion", + "inde pend", + "de fe", + "ad ded", + "p r", + "p ag", + "be d", + "gre atest", + "with in", + "j ay", + "ðŁ ¥", + "ire land", + "re ly", + "s d", + "te xt", + "dri ving", + "pro gram", + "spe ed", + "col um", + "str on", + "à ©", + "fore st", + "â ĸ", + "mach ine", + "co in", + "sc ar", + "oun t", + "bi e", + "¡ ï¸ı", + "por tra", + "comm on", + "wre st", + "recei ved", + "kno w", + "inve st", + "pl ans", + "ac cor", + "ad op", + "ter y", + "re ali", + "p p", + "k al", + "art work", + "me an", + "go d", + "inste ad", + "an ci", + "motiv ation", + "as ing", + "inspir ation", + "up coming", + "polit ical", + "euro pe", + "m ers", + "heav y", + "ðŁij į", + "fe bru", + "scot land", + "ou gh", + "b t", + "bo ss", + "sche du", + "spe ak", + "n ick", + "u red", + "in o", + "e k", + "ri sk", + "tor y", + "pres ents", + "b on", + "ru g", + "st ates", + "exhib ition", + "il o", + "m ill", + "br ought", + ": -)", + "tou ri", + "com e", + "offici ally", + "champi ons", + "do ors", + "re p", + "po se", + "ex tra", + "k ings", + "soc cer", + "squ ad", + "app lic", + "at a", + "some times", + "t ari", + "excell ent", + "ðŁĺ ĺ", + "stra ight", + "car ol", + "ri p", + "âĢ į", + "gra phic", + "m ol", + "elec tion", + "febru ary", + "as ons", + "l i", + "di r", + "m t", + "n ick", + "u su", + "m rs", + "com ics", + "inst itu", + "cor por", + "v i", + "ðŁĻ ı", + "tu ral", + "di se", + "ac ci", + "we are", + "am ong", + "sho pping", + "t ill", + "wh at", + "cha ir", + "sp an", + "chine se", + "innov ation", + "jo y", + "k it", + "cent ury", + "ob ama", + "ph ili", + "f c", + "re ach", + "c iti", + "ul ous", + "n on", + "d ang", + "happ ening", + "bur n", + "p el", + "or ange", + "d v", + "k ick", + "cla im", + "ing ham", + "ph y", + "no v", + "pod cast", + "wh i", + "ni ghts", + "ear lier", + "be ar", + "la h", + "exc iting", + "or a", + "gi ven", + "s lo", + "memor ies", + "contin ues", + "produc t", + "gh o", + "c d", + "kno ws", + "ðŁİ ī", + "publi shed", + "discu ss", + "y ard", + "i phone", + "tri es", + "w all", + "fe b", + "are n", + "tru th", + "win ners", + "tu re", + "diti onal", + "milit ary", + "proble m", + "m and", + "do g", + "lo ss", + "c ric", + "can adi", + "ve ter", + "villa ge", + "\" ,", + "y r", + "un g", + "don ald", + "ag ing", + "bir ds", + "sci enti", + "le s", + "th is", + "regi on", + "tic al", + "itt en", + "il a", + "ðŁĺ İ", + "d ad", + "di am", + "abo ve", + "st ren", + "li t", + "p ir", + "la b", + "fo cus", + "bus y", + "d ur", + "app ly", + "s ma", + "auth or", + "ac i", + "exe cu", + "dom in", + "re la", + "jack son", + "at o", + "wash ington", + "ðŁĻ Į", + "k ill", + "popu lar", + "ce ment", + "ro ad", + "e ating", + "loc ation", + "v ent", + "ar re", + "n an", + "cu sto", + "advent ure", + "or din", + "spor t", + "ul t", + "lo ck", + "questi on", + "dri ver", + "land sc", + "on i", + "k ins", + "p d", + "jor dan", + "te red", + "k k", + "a f", + "chil d", + "s p", + "just in", + "en i", + "s elling", + "z o", + "wh it", + "bo ston", + "partic ip", + "sig ning", + "happ ened", + "he at", + "m am", + "dre ams", + "lo ws", + "gra ph", + "the day", + "head ing", + "br o", + "ble ssed", + "vi c", + "ve gas", + "h d", + "in ning", + "ro man", + "and ro", + "den ti", + "u se", + "c it", + "pro gress", + "writ er", + "bo b", + "ff s", + "gro wing", + "b ly", + "aw are", + "ex am", + "sp ent", + "be t", + "sc ore", + "bey ond", + "do cu", + "ad el", + "s f", + "cou ra", + "colla bor", + "in c", + "priv ate", + "bo at", + "* *", + "z one", + "p ha", + "b ill", + "to tal", + "plan ning", + "to wards", + "plac es", + "pre view", + "cre ative", + "dam n", + "ide as", + "se ems", + "po ten", + "say ing", + "di splay", + "s w", + "a qu", + "lou is", + "by e", + "li l", + "e mail", + "we stern", + "ger many", + "ell er", + "re s", + "f ant", + "ment ary", + "de als", + "ric hard", + "jer sey", + "stren g", + "ra d", + "pizz a", + "mon d", + "w are", + "l ac", + "g i", + "ar chi", + "c d", + "yel low", + "rec ently", + "re ach", + "à ¹", + "kitch en", + "desig ned", + "tr y", + "g al", + "restaur ant", + "at ure", + "w w", + "j as", + "l ma", + "ðŁij Į", + "pa in", + "av o", + "min ute", + "sch ol", + "ther ap", + "tic ket", + "d ry", + "jap an", + "diti ons", + "ter ri", + "sel ves", + "happ en", + "t up", + "ma g", + "cop y", + "sh er", + "free dom", + "f ile", + "speci ally", + "tor onto", + "lo ad", + "g ary", + "re y", + "answ er", + "lo y", + "cau ght", + "pri ze", + "u ne", + "fic ation", + "ni ger", + "sy d", + "tou ch", + "feat ure", + "jaz z", + "recor ds", + "him self", + "di sh", + "ro ber", + "spot ted", + "ma ster", + "wa ve", + "fin als", + "bu ll", + "for um", + "al d", + "re comm", + "ch a", + "a e", + "d oo", + "inst ru", + "tru ly", + "l g", + "in k", + "bro thers", + "de st", + "j im", + "m it", + "clo sed", + "is on", + "tri ed", + "s anta", + "af fe", + "w an", + "hor se", + "g row", + "camp us", + "rel ation", + "nati ve", + "jour n", + "go v", + "o ct", + "k it", + "b ound", + "part ner", + "re ma", + "crow d", + "! )", + "c alls", + "ra il", + "qu ali", + "solu tion", + "con test", + "con vers", + "sn ap", + "b ase", + "in iti", + "ta x", + "y e", + "ent repre", + "it or", + "constru ction", + "foo d", + "present ed", + "n ings", + "cli mate", + "k m", + "mo del", + "b j", + "blo ck", + "present ation", + "dre am", + "fi x", + "c alling", + "bus ine", + "con gress", + "under stand", + "we b", + "val ue", + "ï¸ı âĥ£", + "mex ico", + "it ely", + "ki m", + "char ity", + "ref lec", + "bl an", + "fl ying", + "anal y", + "famil ies", + "b and", + "reci pe", + "celebr ation", + "ac cep", + "ar y", + "to t", + "g b", + "intere sted", + "cap tain", + "âĻ ¥", + "ti p", + "ab sol", + "bra z", + "inve stig", + "o logy", + "de c", + "tru ck", + "ver ing", + "c lear", + "don t", + "go tta", + "ad vis", + "beg ins", + "ma ss", + "de scri", + "blo ck", + "k im", + "davi d", + "son gs", + "memor ial", + "feat ures", + "su stain", + "' .", + "gra b", + "jo se", + "v a", + "con serv", + "se ts", + "man chester", + "fi ghting", + "de gre", + "ag a", + "in d", + "sle ep", + "pos ition", + "ha ir", + "sig ns", + "pol icy", + "it o", + "al ert", + "st am", + "sp end", + "w y", + "absol ut", + "d m", + "anim al", + "my ster", + "success ful", + "proble ms", + "ro bo", + "k ay", + "gar den", + "p d", + "may or", + "d ale", + "t ol", + "off ers", + "vis iting", + "friend ly", + "tre es", + "offic er", + "accoun t", + "ke vin", + "ðŁij į", + "gi ant", + "contin u", + "con su", + "tr act", + "n fl", + "ðŁĺ Ĭ", + "h q", + "b ility", + "a ar", + "dis ney", + "te en", + "on ed", + "wh ite", + "tra iler", + "de dic", + "al one", + "absolut ely", + "dig ital", + "willi am", + "in ation", + "s wa", + "e e", + "enti re", + "ger man", + "ro ll", + "h its", + "co st", + "st ay", + "th a", + "ali ve", + "accor ding", + "co t", + "liter ally", + "her it", + "re ti", + "haha ha", + "exper i", + "li kes", + "g t", + "ste el", + "__ __", + "ch air", + "christi an", + "to wer", + "diffe rence", + "m d", + "tre ss", + "mi d", + "prin ce", + "afric an", + "fe der", + "foo t", + "car ri", + "ser ved", + "r ice", + "sh all", + "feat ured", + "ck er", + "rec ru", + "po e", + "sen se", + "ni fic", + "com edy", + "cont ent", + "f at", + "po sted", + "con tribu", + "tim ate", + "li ver", + "mb le", + "inter net", + "ag e", + "europe an", + "cl ing", + "gla d", + "ff ic", + "sc o", + "ak es", + "el le", + "ter min", + "ton y", + "p ale", + "col our", + "seri ous", + "pat ri", + "movi es", + "b m", + "professi onal", + "ad o", + "al u", + "br inging", + "f alls", + "isra el", + "ter m", + "langu age", + "bro ok", + "man n", + "commun ic", + "can not", + "ac ti", + "p he", + "y an", + "entrepre ne", + "tur key", + "log ical", + "lon g", + "ar m", + "ur s", + "work ers", + "ing ly", + "gg s", + "ri c", + "tu al", + "recei ve", + "op ens", + "ge ar", + "soci al", + "fe et", + "c king", + "ad ver", + "fin an", + "fe els", + "sp la", + "h r", + "ea ster", + "bra in", + "ã ģ", + "fi g", + "le dge", + "ne arly", + "prote ct", + "ma ssive", + "e th", + "aw a", + "ðŁĺ ģ", + "y rs", + "aware ness", + "defin itely", + "k n", + "imag ine", + "k u", + "syste ms", + "ðŁij ı", + "f as", + "li k", + "provi de", + "am o", + "disco ver", + "inf lu", + "ma ker", + "g az", + "fit ness", + "stre et", + "er s", + "te d", + "w c", + "ys is", + "pos itive", + "hel ped", + "que st", + "andre w", + "bra d", + "b in", + "hang ing", + "l ing", + "bri ght", + "se ction", + "ma ss", + "ðŁĻ Į", + "follow ers", + "ho sting", + "tem por", + "fla g", + "a ve", + "let ter", + "k ur", + "re qui", + "of ten", + "cry p", + "su ff", + "âļ ½", + "russi an", + "treat ment", + "al le", + "ha y", + "l an", + "keep ing", + "hol y", + "power ful", + "pre dic", + "fun d", + "e specially", + "windo w", + "je wel", + "il y", + "ðŁĴ ľ", + "gener ation", + "app a", + "seri ously", + "o d", + "ðŁĺĤðŁĺĤ ðŁĺĤ", + "cer ti", + "iri sh", + "ðŁij Į", + "mi ami", + "be th", + "v ity", + "se cu", + "che f", + "cri me", + "graph y", + "ma x", + "arti sts", + "re volu", + "gu ard", + "spee ch", + "u c", + "upd ates", + "fac es", + "st ant", + "chang ed", + "repor ts", + "low er", + "pe ar", + "n c", + "k il", + "loo ked", + "spe aker", + "s f", + "re spect", + "ok ay", + "oce an", + "s itting", + "architec ture", + "tra il", + "se at", + "i ra", + "le g", + "japan ese", + "d am", + "u lar", + "sw im", + "polit ics", + "finan cial", + "ol d", + "mou th", + "at temp", + "de stin", + "fi shing", + "atten tion", + "me m", + "chang es", + "deci ded", + "reli gi", + "g in", + "c av", + "z z", + "ad am", + "ma c", + "wr ite", + "beg in", + "sc ul", + "al ter", + "is s", + "ath on", + "imag es", + "m oo", + "jo ined", + "ðŁĺ ī", + "âŀ ¡ï¸ı", + "pas sed", + "mu sli", + "h ir", + "lar gest", + "cam er", + "com ic", + "gh ted", + "rug by", + "bur gh", + "gg ing", + "te sting", + "pre par", + "lau gh", + "al ed", + "impro ve", + "beli ev", + "adv ice", + "sha res", + "he art", + "tur ning", + "s b", + "t el", + "caf e", + "n es", + "dani el", + "pat ter", + "t z", + "se tt", + "par k", + "c and", + "st ick", + "happ ens", + "bri an", + "ne west", + "e pic", + "ad or", + "ki es", + "war ning", + "anim als", + "custo m", + "ar c", + "di an", + "gol d", + "cor e", + "t f", + "c ity", + "pan ts", + "re ality", + "con fi", + "in ju", + "fo x", + "gu il", + "k new", + "âĺ º", + "cor rec", + "itu de", + "d den", + ". #", + "re duc", + "pas s", + "f on", + "y a", + "ow ner", + "re turns", + "n c", + "e ast", + "ap ol", + "in sur", + "th o", + "si m", + "juni or", + "be e", + "ang el", + "att le", + "elec tric", + "hor ror", + "cra sh", + "e ye", + "pat h", + "sou thern", + "emplo ye", + "ge o", + "t an", + "ha z", + "r ally", + "ðŁı »", + "proper ty", + "was n", + "enjo yed", + "gre y", + "g as", + "bre w", + "nor thern", + "hol ding", + "g p", + "ta ke", + "ch art", + "ly n", + "dr ama", + "z o", + "pa id", + "throw back", + "cu p", + "discu ssion", + "down town", + "w ill", + "le w", + "b is", + "t ary", + "bre ad", + "up on", + "r ate", + "teach ers", + "it ation", + "anc ed", + "cy cle", + "choo se", + "d c", + "ir an", + "co w", + "da ve", + "ra ise", + "prin cess", + "fa ith", + "- >", + "indu stri", + "sp ain", + "guit ar", + "fac ts", + "m n", + "sp en", + "cour te", + "go tt", + "projec ts", + "au di", + "o sc", + "pe ter", + "s and", + "intere st", + "happ iness", + "ven ue", + "sol di", + "surpri se", + "poten tial", + "per io", + "custom er", + "i i", + "g ni", + "manu fac", + "e co", + "bro ken", + "sing er", + "vel s", + "wal es", + "hu s", + "in j", + "f our", + "tal ent", + "d ying", + "mat the", + "fil m", + "jo ining", + "s ell", + "j ar", + "lma o", + "sur ger", + "bb c", + "sour ces", + "au stin", + "ni k", + "char les", + "f am", + "prin ci", + "ange l", + "cas h", + "lo t", + "o red", + "pla ys", + "pl ate", + "don e", + "memor y", + "br ings", + "n ba", + "solu tions", + "teach ing", + "gr ace", + "cir cu", + "hel ps", + "foun der", + "mar y", + "expl ore", + "de cor", + "par ts", + "ch o", + "inte gr", + "ha u", + "is es", + "pu tting", + "in er", + "r it", + "v y", + "mic hel", + "blu es", + "every day", + "for ms", + "bi o", + "ye ar", + "p in", + "t ter", + "spr ing", + ") )", + "po t", + "al ing", + "perform ing", + "sh an", + "plan et", + "mus ical", + "head s", + "it alian", + "stru gg", + "âĢį âĻ", + "w ings", + "pu mp", + "h h", + "tr ou", + "a id", + "pri me", + "ear th", + "pa int", + "mon t", + "am y", + "bb c", + "fab ulous", + "fru it", + "andro id", + "bour ne", + "cere mony", + "enti al", + "? ?", + "deb ate", + "on ing", + "dra ft", + "sol ar", + "t x", + "j am", + "cor n", + "!! !!!", + "bro o", + "mil k", + "po sed", + "o hi", + "mo vement", + "b ren", + "part ner", + "p g", + "et te", + "ar ies", + "sh out", + "n g", + "leav ing", + "t ells", + "sen s", + "ta ste", + "kel ly", + "wor l", + "gy m", + "ric h", + "e gy", + "pi d", + "ma s", + "â Ĥ", + "courte sy", + "fran k", + "incre ase", + "wr itten", + "pp ers", + "re l", + "ha i", + "s as", + "s ound", + "tt i", + "w ich", + "ri ver", + ".. .\"", + "a g", + "fel low", + "ro me", + "sm all", + "gen cy", + "ic an", + "lux ury", + "pro of", + "me t", + "wild life", + "mom ents", + "ra ther", + "cor ner", + "com pe", + "canadi an", + "lik ely", + "therap y", + "li am", + "econom ic", + "indi e", + "rou te", + "fi ght", + "ho pe", + "se tting", + "ant ly", + "cro ss", + "fant asy", + "de e", + "sket ch", + "comp li", + "ym i", + "ru les", + "engine ering", + "fig ure", + "ro w", + ". ,", + "f w", + "syd ney", + "w ou", + "t ation", + "dre w", + "us es", + "the re", + "sp read", + "struc ture", + "pat rick", + "appa rently", + "ro s", + "h ills", + "w we", + "ann y", + "com mission", + "di v", + "f ying", + "con sul", + "anal ysis", + "ex i", + "ten nis", + "vehic le", + "ðŁĺŃ ðŁĺŃ", + "as s", + "high ly", + "op ened", + "b ann", + "ðŁĴ Ļ", + "mp h", + "wi shing", + "v or", + "fi f", + "give away", + "r r", + "ra y", + "je ss", + "g at", + "ic ymi", + "x it", + "high est", + "yor k", + "pi e", + "invol ved", + "high er", + "ri e", + "mal ay", + "int elli", + "desp ite", + "che e", + "sar ah", + "be an", + "reco gni", + "ar sen", + "tal ented", + "pas sion", + "ic h", + "ab c", + "lead s", + "dise ase", + "v is", + "se c", + "pre senting", + "m illi", + "hol e", + "sho ts", + "de part", + "surger y", + "gov t", + "b in", + "du al", + "e vi", + "lon ger", + "ev ol", + "scre en", + "portra it", + "et c", + "lo se", + "ch at", + "p en", + "p i", + "om a", + "s ick", + "er c", + "compan ies", + "en try", + "plan e", + "gr y", + "ven e", + "liver pool", + "premi ere", + "sha red", + "a red", + "fil ms", + "ir a", + "holi days", + "cric ket", + "ici an", + "v ing", + ". )", + "ul timate", + "di vision", + "con duc", + "se pt", + "for ces", + "mon t", + "s mart", + "disa pp", + "sun shine", + "in d", + "b less", + "ma de", + "col ors", + "fran k", + "ir on", + "bott le", + "s go", + "m ood", + "j ason", + "er ic", + "bir th", + "te en", + "respon se", + "tar get", + "state ment", + "fe ar", + "th el", + "al um", + "ar ab", + "bl in", + "direc tion", + "ste ps", + "er ial", + "wor ked", + "at l", + "ðŁĴ ķ", + "fel t", + "pol i", + "scen es", + "hom es", + "b ell", + "e at", + "ate ful", + "t in", + "l ace", + "fol ks", + "p se", + "an n", + "wis dom", + "fa v", + "but ter", + "s r", + "are as", + "sm oo", + "bi z", + "dg es", + "app o", + "mo re", + "the m", + "effe ct", + "windo ws", + "sun ny", + "cap ital", + "tot ally", + "c ities", + "gr ant", + "mb ers", + "s low", + "au tu", + "il ities", + "w ro", + "ri sing", + "st ics", + "viol ence", + "i gh", + "qu ot", + "h it", + "t c", + "herit age", + "bu ff", + "ne s", + "z ar", + "den tial", + "ex ac", + "ed ge", + "de ep", + "aren a", + "be came", + "benef its", + "mar ks", + "mb er", + "a z", + "am es", + "pre ci", + "dra gon", + "re g", + "d ings", + "do s", + "ðŁĴ ª", + "n el", + "s ity", + "me al", + "di st", + "leg end", + "pur chase", + "pic al", + "st ick", + "f at", + "du ba", + "profe ss", + "car to", + "pro f", + "coun tries", + "respon si", + "se qu", + "fa b", + "tribu te", + "hon ored", + "prac tic", + "pur ple", + "an ton", + "pa red", + "t ough", + "summ er", + "environ ment", + "s ons", + "ðŁĻ ı", + "m ps", + "gi es", + "her oes", + "t elling", + "hen ry", + "f en", + "know ledge", + "Ģ ï¸ı", + "f r", + "ne g", + "u re", + "ac king", + "hear ts", + "s oo", + "hol lywood", + "ju mp", + "sau ce", + "schedu le", + "tur n", + "yo ga", + "cre ating", + "c ket", + "cre ek", + "â Ń", + "custom ers", + "ma dri", + "gu l", + "asse mb", + "moun t", + "c ell", + "to p", + "st al", + "dav is", + "t wi", + "sig n", + "premi er", + "iti ons", + "he aring", + "un k", + "pati ents", + "app ear", + "heav en", + "al ty", + "doc tor", + "a e", + "plat form", + "je ff", + "ðŁĵ ·", + "regi onal", + "bi d", + "box ing", + "ex ten", + "or ity", + "a w", + "w ise", + "il le", + "sever al", + "bi e", + "s itu", + "sy ria", + "âľ ħ", + "remin der", + "enter tain", + "li on", + "part ners", + "in n", + "ph ar", + "f au", + "pl s", + "expe cted", + "sug ar", + "deci sion", + "s b", + "ch ron", + "associ ation", + "leav es", + "vis ited", + "sh ap", + "ðŁĴ ĸ", + "fur ther", + "h ann", + "w i", + "run s", + "l er", + "fun ding", + "fil led", + ".. ....", + "tin y", + "han g", + "or g", + "co ol", + "se min", + "ðŁı Ĩ", + "spon s", + "nav y", + "sa int", + "dru g", + "d al", + "r oun", + "co vered", + "tra ditional", + "invest ment", + "de te", + "al ism", + "f low", + "n is", + "sun rise", + "fe at", + "f ted", + "we ird", + "je re", + "ve gan", + "medic ine", + "an o", + "ac cu", + "deli very", + "temp le", + "chang ing", + "wil son", + "phili pp", + "re fe", + "n d", + "is er", + "g ay", + "r and", + "ati ves", + "t ely", + "p and", + "intelli g", + "g are", + "am bas", + "de mon", + "commit tee", + "strate gy", + "refu ge", + "bud get", + "prote c", + "pi er", + "ex press", + "nom in", + "econom y", + "al low", + "ic on", + "gal ax", + "o h", + "indi vi", + "dem and", + "vir gin", + "lu ke", + "ali sts", + "man i", + "s mi", + "ju dge", + "ent y", + "mic hi", + "resul t", + "am ed", + "spe aks", + "' ,", + "hou ston", + "sh in", + "b ing", + "fl y", + "ch em", + "au to", + "v as", + "ge t", + "ar m", + "thank s", + "d in", + "gan g", + "x x", + "si on", + "loc ated", + "p l", + "jo sh", + "in fo", + "jo ins", + "adver ti", + "ot d", + "el d", + "si e", + "re asons", + "v ent", + "ðŁĩºðŁĩ ¸", + "â ł", + "convers ation", + "stu di", + "ðŁĶ¥ ðŁĶ¥", + "go s", + "s ounds", + "un it", + "mu sc", + "ge l", + "ack ed", + "pac i", + "co s", + "de re", + "u u", + "a o", + "la m", + "inspir ing", + "ar ms", + "tw are", + "mat ters", + "ad dic", + "du de", + "ex t", + "cri sis", + "b ath", + "me et", + "sing h", + "expe ct", + "del hi", + "resc ue", + "wor st", + "au g", + "shi pping", + "ser ving", + "st o", + "dar k", + "ac es", + "histor ic", + "landsc ape", + "desig ner", + "b illion", + "gr ateful", + "wa ke", + "e ve", + "m iller", + "hou sing", + "dy nam", + "is co", + "be ha", + "sh op", + "pr ou", + "e as", + "a sia", + "e ding", + "k on", + "depart ment", + "aw ar", + "mar ine", + "in ci", + "photograph er", + "ta pe", + "lo go", + "r ings", + "d it", + "-- --", + "vin yl", + "w c", + "vo ting", + "se ven", + "ambas sad", + "dal las", + "t u", + "com ment", + "k ra", + "b les", + "w ag", + "u d", + "au dio", + "stri ke", + "offici al", + "o ts", + "me tho", + "to ols", + "ra di", + "al an", + "hun t", + "wat ched", + "a ke", + "fa ke", + "drin king", + "mer ry", + "m l", + "b day", + "ri o", + "ni ke", + "c ant", + "re pe", + "co stu", + "mur der", + "ak ers", + "ch ers", + "ou ts", + "beg inning", + "so s", + "ad es", + "n in", + "not es", + "wro te", + "sol o", + "c i", + "li ghting", + "ur ban", + "bre xit", + "att end", + "shir ts", + "pla yo", + "ac tress", + "pl ic", + "stand ard", + "quot es", + "par ade", + "anci ent", + " ©", + "tur ing", + "re e", + "pri mary", + "fla sh", + "citi z", + "mat es", + "ste in", + "z i", + "clin ton", + "sk in", + "gen e", + "hu m", + "g ar", + "t le", + "y i", + "fo cu", + "de an", + "pl ants", + "cy ber", + "b u", + "om e", + "ho p", + "ad dress", + "ti x", + "gi fts", + "relation ship", + "sub scri", + "fe ed", + "exac tly", + "haw ks", + "ex o", + "stre ss", + "s n", + "arre sted", + "an e", + "sof tware", + "z ero", + "the me", + "mu mb", + "im migr", + "mi a", + "make up", + "ple asure", + "uni vers", + "har b", + "eng ine", + "ap er", + "r in", + "br a", + "institu te", + "le ather", + "al th", + "sing ing", + "co s", + "gh ty", + "me as", + "st ic", + "si de", + "insur ance", + "co t", + "pit ch", + "moun tains", + "cri min", + "su pre", + "valent ine", + "at er", + "wou ldn", + "sc ale", + "rel ated", + "re gar", + "star tup", + "pack ed", + "mi ke", + "week ly", + "p ts", + "coun t", + "ha r", + "gott en", + "min d", + "ber lin", + "con ditions", + "swit ch", + "cor n", + "sa ve", + "g li", + "emer gency", + "tun ed", + "sto ck", + "discu ssing", + "every body", + "s day", + "whe ther", + "wrest ling", + "ec es", + "gen der", + "ch en", + "ðŁij Ģ", + "madri d", + "mar athon", + "e gg", + "i er", + "th x", + "as king", + "kore a", + "wol f", + "ay a", + "g m", + "g au", + "at ory", + "v r", + "gra ss", + "k illing", + "b ble", + "ur o", + "un i", + "e th", + "sh ore", + "th en", + "re ale", + "bot tom", + "ex erc", + "k ar", + "or ies", + "ad ri", + "san ds", + "se x", + ". '", + "volunte ers", + "per form", + "par liam", + "inclu de", + "deli ghted", + "execu tive", + "fu el", + "kis s", + "ã ħ", + "char ge", + "h u", + "ca kes", + "ve t", + "g lu", + "agre e", + "pr ices", + "n au", + "h l", + "g ru", + "ra j", + "streng th", + "b ic", + "sp ending", + "al es", + "av en", + "b last", + ": (", + "yo f", + "nor mal", + "si x", + "qu ick", + "se a", + "d aw", + "mee ts", + "lo vers", + "upd ated", + "po tat", + "comple ted", + "coo k", + "opportun ities", + "p ure", + "organ ic", + "tem per", + "c am", + "avo id", + "par king", + "duba i", + "and o", + "di stri", + "to y", + "comple tely", + "don ald", + "tri al", + "bas s", + "b oun", + "back ground", + "v as", + "mar vel", + "lu m", + "ru s", + "t ool", + "com missi", + "throw back", + "fin ding", + "is lam", + "! ?", + "st op", + "e vil", + "or al", + "resi dents", + "i denti", + "o ak", + "ðŁİ ¶", + "l il", + "span ish", + "chap ter", + "sto pped", + "direc t", + "ho sted", + "pic ked", + "lab our", + "lew is", + "defen se", + "à ®", + "health care", + "wh is", + "mat h", + "pe ak", + "ra ised", + "fi x", + "bu ll", + "th ir", + "chel sea", + "fol k", + "tr e", + "can di", + "pau l", + "ei ther", + "ad am", + "poe try", + "jewel ry", + "ðŁ ¦", + "pr ay", + "Ø §", + "g c", + "o z", + "wi shes", + "fore ign", + "sun g", + "lear ned", + "en e", + "n ing", + "micha el", + "illu stration", + "legend ary", + "w av", + "b au", + "ðŁļ ¨", + "cal end", + "stre ets", + "â Ĩ", + "mon ster", + "bu ck", + "g r", + "scho ol", + "ba th", + "wa ste", + "ne ck", + "ha wa", + "be ach", + "re plac", + "jec t", + "on er", + "fac tory", + "coun t", + "ðŁĵ ¸", + "mor gan", + "der ing", + "se an", + "steph en", + "de p", + "no vel", + "vide os", + "ic al", + "press ure", + "arsen al", + "ex pre", + "ir s", + "tren ding", + "ss a", + "fla sh", + "re sear", + "thr ough", + "profess or", + "scul p", + "to s", + "gg ed", + "mm a", + "be e", + "a pe", + "hun ter", + "am i", + "he i", + "pla stic", + "bu cks", + "uni verse", + "le gen", + "niger ia", + "ple ased", + "ri s", + "thin ks", + "autu mn", + "i ds", + "d is", + "anth ony", + "ðŁı ½", + "ak ed", + "gla sses", + "fin ance", + "z er", + "k as", + "con tract", + "nu mbers", + "sh aw", + "partner ship", + "t il", + "laun ched", + "s al", + "victor ia", + "theat er", + "usu al", + "nam es", + "perio d", + "eli za", + "i th", + "bar cel", + "ro cks", + "bag s", + "mat e", + "distri bu", + "j on", + "di ffic", + "ali zed", + "cur ren", + "sco red", + "b ha", + "du blin", + "ro se", + "in ted", + "soli d", + "beha vi", + "wal ker", + "simp ly", + "garden s", + "head ed", + "in i", + "ohi o", + "we ap", + "f o", + "gl en", + "e state", + "ran dom", + "th under", + "thr u", + "k ill", + "jac ket", + "it i", + "entertain ment", + "thanks giving", + "ent al", + "en coura", + "el o", + "a ther", + "tan k", + "high lights", + "f ting", + "ru le", + "model s", + "bor der", + "bj p", + "hus band", + "in done", + "ken ya", + "be ars", + "al o", + "n inten", + "pi x", + "str o", + "or ders", + "sal ad", + "ro ads", + "n or", + "l ation", + "sop hi", + "ðŁı ¼", + "pi eces", + "b one", + "min s", + "inclu des", + "nu tr", + "phi l", + "s ent", + "fun dra", + "ga in", + "bor ough", + "n ad", + "mon day", + "activ ity", + "it ems", + "be coming", + "ken ne", + "de tro", + "car di", + "gue sts", + "u x", + "world wide", + "sever e", + "new s", + "thank ful", + "fic tion", + "ve ge", + "m all", + "si an", + "er al", + "inj ury", + "le e", + "men u", + "danc ing", + "scot ti", + "exam ple", + "( #", + "na i", + "studi os", + "ba i", + "ðŁĴ Ľ", + "j av", + "diam ond", + "vin ce", + "ric k", + "prote ction", + "lin col", + "cham ps", + "appro ach", + "d ar", + "m ile", + "clou ds", + "je ff", + "in fin", + "l ers", + "p les", + "pe ace", + "go p", + "âĻ ¡", + "tech n", + "str a", + "a verage", + "ef fort", + "introduc ing", + "di versity", + "austr alian", + "am p", + "boo st", + "s ke", + "pati ent", + "appreci ate", + "ici ans", + "pu r", + "f ell", + "woo ds", + "illu str", + "ðŁ ĸ", + "ag ency", + "ac tions", + "brit ain", + "under way", + "se attle", + "el and", + "ag o", + "f ill", + "stre aming", + "pro test", + "challeng es", + "ky o", + "et sy", + "coo king", + "exper t", + "ru ss", + "rain bow", + "commer cial", + "sp in", + "be ats", + "c ry", + "val u", + "el i", + "th row", + "gr ams", + "le vels", + "michi gan", + "c ad", + "ador able", + "const itu", + "w s", + "pu b", + "mid night", + "th at", + "net fli", + "braz il", + "die go", + "regu lar", + "jo y", + "âĤ ¬", + "li qu", + "ea stern", + "k ni", + "fl at", + "n p", + "bro wn", + "w er", + "se y", + "tt ers", + "ac ting", + "v anc", + "cy cling", + "program me", + "ra w", + "comple x", + "tat too", + "throwback thursday", + "se ssions", + "ro oms", + "si ght", + "speci es", + "bom b", + "lau gh", + "ke eps", + "mo on", + "offic ers", + "con ver", + "t r", + "ha sh", + "t ack", + "ri ous", + "ad ap", + "a j", + "reco gn", + "ex po", + "sug ge", + "confir med", + "rol ling", + "dre ssing", + "ic t", + "fri day", + "ph ones", + "ri dge", + "con cept", + "ro y", + "ke ys", + "ef for", + "c ate", + "k ne", + "ev en", + "l ay", + "commun ities", + "mo d", + "n az", + "every where", + "al ab", + "bit coin", + "ban ks", + "out door", + "feder al", + "sto res", + "h p", + "c al", + "m ely", + "sig nific", + "be ar", + "re public", + "clo ser", + "al lah", + "pic k", + "x d", + "pal ace", + "ch ill", + "b am", + "er ous", + "un a", + "al len", + "out standing", + "olym pic", + "supp ly", + "fi gu", + "v au", + "l p", + "char lie", + "un es", + "> >>", + "legen ds", + "ici al", + "co ast", + "benef it", + "mul ti", + "f its", + "far mers", + "am ount", + "si sters", + "har ve", + "hon ey", + "que en", + "b ers", + "pl ann", + "âŃ IJ", + "m u", + "barcel ona", + "al ber", + "stat us", + "re main", + "ex tra", + "c andy", + "vi ous", + "âľ Į", + "o v", + "warri ors", + "-- >", + "ju mp", + "am ar", + "x mas", + "stu dies", + "i ors", + "k or", + "don ate", + "pre p", + "fi sh", + "im a", + "pain ted", + "ad mini", + "co splay", + "spor ts", + "dro ps", + "fi ghter", + "evi dence", + "ðŁĴ ª", + "la ke", + "ro b", + "cine ma", + "pro file", + "à ±", + "stan ds", + "leg acy", + "sh ape", + "ro of", + "ci vil", + "i ans", + "sy l", + "sh am", + "vo ted", + "re tail", + "ph illi", + "li sted", + "du ty", + "n b", + "th es", + "f are", + "au ction", + "ffici al", + "stor ms", + "d p", + "l oun", + "sh ops", + "al y", + "ani me", + "multi ple", + "ðŁĺį ðŁĺį", + "psy cho", + "je an", + "ap art", + "candi date", + "gg y", + "con f", + "jose ph", + "w ick", + "me at", + "fr ame", + "c l", + "for got", + "ph y", + "f ing", + "li ed", + "re p", + "se ed", + "f all", + "u fc", + "nu t", + "lin d", + "mo de", + "fiel ds", + "en ce", + "s ley", + "ðŁ¤ Ķ", + "ch ill", + "follow ed", + "announ ces", + "cor ru", + "tro phy", + "them selves", + "ac le", + "al du", + "k ong", + "l on", + "s v", + "bro ke", + "ander son", + "ta i", + "stor y", + "tempor ary", + "activ ities", + "k ati", + "ari z", + "cry stal", + "spo ke", + "extre mely", + "tra ding", + "ðŁĴ ļ", + "à ¼", + "in ch", + "ed in", + "out fit", + "equ ip", + "ma di", + "form ed", + "be ef", + "po p", + "ti ger", + "this day", + "ti red", + "neigh b", + "re tro", + "is a", + "un t", + "t as", + "kan sas", + "de st", + "secon ds", + "ta y", + "hur ric", + "o u", + "galax y", + "dad dy", + "bro w", + "bur ger", + "en ced", + "de sk", + "ac cur", + "secre tary", + "el ite", + "k ab", + "ch in", + "touri sm", + "bud dy", + "ici de", + "dre ssed", + "u d", + "vac ation", + "che ers", + "com for", + "charac ters", + "j et", + "bu ying", + "l ins", + "n ap", + "reale state", + "li e", + "af c", + "i ii", + "f ame", + "n r", + "b at", + "ag ent", + "ma kers", + "âĢ ¼", + "sec tor", + "op ti", + "le on", + "di et", + "pra yer", + "hi p", + "mi r", + "le x", + "br y", + "an a", + "pas sing", + "w en", + "reco very", + "ak i", + "po pul", + "res ort", + "mar ia", + "stu ck", + "read s", + "ti er", + "perfe c", + "netfli x", + "p oo", + "cham p", + "o c", + "re duce", + "we red", + "comm ents", + "cla im", + "acci dent", + "s ag", + "h ack", + "sal t", + "kin da", + "k iller", + "i os", + "z y", + "ex change", + "lec ture", + "eng er", + "ic king", + "t au", + "reve als", + "pri son", + "z om", + "gh an", + "u l", + "jour nal", + "i ot", + "tr in", + "jon a", + "govern or", + "cap e", + "quar ter", + "spec tive", + "impre ssive", + "bab ies", + "t x", + "m ill", + "o y", + "har ri", + "jo int", + "su e", + "collabor ation", + "tren d", + "revolu tion", + "re new", + "alum ni", + "ge tt", + "sh ell", + "sun day", + "ent u", + "ni c", + "donald trump", + "block chain", + "paci fic", + "expla ins", + "sp y", + "ad voc", + "par adi", + "to f", + "star ring", + "p av", + "fe ed", + "br ac", + "smo ke", + "ham p", + "y am", + "to kyo", + "si mon", + "d h", + "e ffici", + "phys ical", + "n j", + "ell i", + "s low", + "gradu ate", + "americ ans", + "ti fy", + "f red", + "ap ore", + "fin ds", + "rob in", + "we t", + "not ice", + "se mi", + "un ve", + "k om", + "pil ot", + "scre ening", + "da ily", + "ðŁĴ Ĺ", + "roy al", + "sp a", + "vo tes", + "n ag", + "wh ate", + "att ending", + "exper im", + "ad dition", + "k ate", + "sto l", + "m ali", + "foo t", + "chri st", + "ch an", + "de e", + "lic en", + "glo bal", + "mo ore", + "ti a", + "bri gh", + "myster y", + "y ay", + "âĿ¤ï¸ı âĿ¤ï¸ı", + "cre ati", + "me chan", + "clo ck", + "di c", + "âĢ Ķ", + "pp er", + "al ph", + "through out", + "al low", + "re sources", + "selec tion", + "ham il", + "bb q", + "aa aa", + "virgin ia", + "dis ney", + "en g", + "so red", + "drin ks", + "f ancy", + "consi der", + "end a", + "jan e", + "hand made", + "du l", + "on tari", + "i us", + "s ville", + "color ado", + "whate ver", + "whe el", + "promis e", + "ne ver", + "desig ns", + "ab ly", + "sex ual", + "vanc ou", + "at i", + "con vention", + "cul tural", + "sing apore", + "pro mo", + "load ed", + "gla sgo", + "pp l", + "n oo", + "ke e", + "ste m", + "men tion", + "i do", + "cru ise", + "ri ding", + "be comes", + "be y", + "âļ½ ï¸ı", + "tw in", + "dedic ated", + "na sh", + "de si", + "work out", + "jen ni", + "i v", + "grou ps", + "rela x", + "pho eni", + "li ft", + "mix ed", + "m ck", + "p c", + "mu st", + "me tro", + "ci es", + "y ar", + "a im", + "ang er", + "i e", + "rec y", + "marri ed", + "dro pped", + "eng ag", + "le st", + "ambassad or", + "op h", + "de s", + "w ick", + "assi stant", + "nat ur", + "fa il", + "l td", + "shor t", + "k ap", + "sha w", + "bi gger", + "rema ins", + "crit ical", + "sur vey", + "co verage", + "er son", + "win d", + "n b", + "bil ly", + "let es", + "ac ts", + "jim my", + "at lan", + "al and", + "t c", + "import ance", + "dam age", + "f g", + "stor age", + "tw t", + "bon d", + "bal ance", + "cr ying", + "pu ppy", + "vo te", + "pu sh", + "ðŁĴ ľ", + "pol y", + "me l", + "lon don", + "terr ori", + "effec tive", + "corpor ate", + "atl anta", + "jac o", + "nas a", + "gre ek", + "sen ate", + "i sh", + "ev a", + "intellig ence", + "effor ts", + "al co", + "k un", + "h all", + "di ag", + "claim s", + "fir st", + "h b", + "ba e", + "v ul", + "pu ll", + " °", + "se par", + "spe ed", + "vic ti", + "on thisday", + "audi ence", + "r ates", + "te ach", + "fil ming", + "bu sh", + "son g", + "y um", + "br un", + "ra ine", + "aw a", + "par ks", + "ð Ŀ", + "ra bb", + "ra ch", + "ra id", + "reach ed", + "ra il", + "mo ves", + "selec ted", + "fr i", + "ra ising", + "om y", + "st ones", + "su k", + "franc isco", + "cas es", + "cap it", + "con fu", + "w tf", + "po ke", + "equip ment", + "gre g", + "ess ential", + "off ering", + "ne x", + "pi es", + "be c", + "cre ation", + "chair man", + "cro wn", + "w al", + "john ny", + "shi ft", + "ne ck", + "ban g", + "bir d", + "ðŁĺ ı", + "du ck", + "re serve", + "de pu", + "ma sters", + "over all", + "no tic", + "ju ice", + "sne ak", + "che er", + "cla sses", + "eag les", + "n ca", + "car pet", + "ci vil", + "coach es", + "har ris", + "u ps", + "b alls", + "dec or", + "mar tin", + "ro s", + "v ice", + "announ cement", + "who se", + "ti gers", + "ste red", + "c ts", + "dr am", + "ste el", + "youn g", + "inst all", + "supp o", + "recor ding", + "de ck", + "se ats", + "l der", + "ang le", + "bo t", + "sty les", + "elec tions", + "for tun", + "n ab", + "but ter", + "ari an", + "ka sh", + "in ner", + "ou red", + "be ast", + "we i", + "ic onic", + "exper ts", + "ne cess", + "b eng", + "jam es", + "li a", + "gre ece", + "ðŁĵ ·", + "ðŁĺ ģ", + "good bye", + "m itch", + "tw ice", + "mumb ai", + "ste am", + "ru sh", + "med al", + "ne tt", + "fashi on", + "t ar", + "r s", + "sav ing", + "ric ul", + "l m", + "sleep ing", + "brook lyn", + "mis s", + "sen ding", + "disco vered", + "sp here", + "of theday", + "k icks", + "missi ons", + "w right", + "er n", + "ght ly", + "i ous", + "mel bourne", + "star tu", + "mo ved", + "car ry", + "d ak", + "ag ues", + "bel gi", + "e ma", + "way ne", + "do t", + "er ie", + "pe l", + "it unes", + "matthe w", + "no body", + "est ab", + "cal m", + "win ds", + "lu c", + "prep are", + "tren ds", + "exerc ise", + "adv ant", + "ðŁĴ ¯", + "athle tics", + "app s", + "c tions", + "adv ance", + "laun ches", + "litt le", + "real donaldtrump", + "eliza beth", + "carol ina", + "hu b", + "hi dden", + "n w", + "us er", + "pol l", + "great er", + "mo st", + "f ed", + "p at", + "life style", + "s ati", + "sco res", + "marri age", + "l r", + "aven ue", + "de serve", + "ri f", + "ðŁ Ĺ", + "wat ch", + "champion ships", + "gr ay", + "en ni", + "cot ton", + "g om", + "whe re", + "pack age", + "su m", + "ab solu", + "new ly", + "foo ds", + "ty ler", + "assemb ly", + "musli m", + "ban k", + "re memb", + "op tions", + "produc er", + "land o", + "fun ds", + "u pper", + "shad ow", + "pro gre", + "co p", + "ing e", + "leg s", + "detro it", + "hill ary", + "jo se", + "gi ants", + "sou p", + "sustain able", + "t us", + "clo thes", + "roc king", + "n z", + "min ne", + "mat eri", + "bru ce", + "ear t", + "ca sting", + "independ ent", + "thou sands", + "ta h", + "de cl", + "veter ans", + "li ons", + "wra p", + "âĢ ¦", + "de ss", + "bl ing", + "st ine", + "e ggs", + "o on", + "clo sing", + "z ay", + "at t", + "bac on", + "fa il", + "ariz ona", + "de pre", + "gho st", + "new sp", + "w ers", + "vi p", + "li ked", + "id ent", + "volunte er", + "ad ult", + "pu pp", + "cir cle", + "mat erial", + "degre e", + "gro wn", + "boo m", + "calend ar", + "su r", + "vie wing", + "ath letes", + "ch and", + "re ll", + "asi an", + "en tr", + "vol ley", + "victi ms", + "bo dy", + "m ama", + "trans fer", + "ge ek", + "in dic", + "sav ed", + "ma i", + "g ent", + "it s", + "loun ge", + "k ol", + "the ory", + "situ ation", + "is lands", + "ar th", + "z oo", + "floo d", + "vi ously", + "show ed", + "parliam ent", + "ch ev", + "el ine", + "at trac", + "ab ad", + "ta il", + "h rs", + "lu s", + "por tu", + "gor y", + "provi des", + "to ys", + "de ath", + "in fe", + "an ce", + "g le", + "li am", + "lo ver", + "hu d", + "dv d", + "reve aled", + "g w", + "re ment", + "ca the", + "l ying", + "ra dio", + "der by", + "stor s", + "che mi", + "hosp it", + "âľ ¨", + "' :", + "ilo ve", + "le mon", + "re public", + "s ni", + "ne ss", + "do or", + "re action", + "pre gn", + "fla v", + "schol ar", + "spo tify", + "is ation", + "vis ual", + "aw are", + "spon sored", + "jo ke", + "less ons", + "leg is", + "lo ck", + "si mil", + "ðŁĺ ĭ", + "kin d", + "la y", + "ma h", + "ho ping", + "vancou ver", + "as er", + "clean ing", + "gal a", + "thre at", + "la p", + "ach e", + "ro mance", + "ex pen", + "re post", + "z am", + "e pi", + "mir ror", + "o ak", + "ad ul", + "bat man", + "s lu", + "l c", + "vie wed", + "re views", + "d ates", + "indone sia", + "acti vi", + "off en", + "lea f", + "i si", + "ag ricul", + "costu me", + "s ites", + "spir itu", + "appear ance", + "ir y", + "st air", + "applic ation", + "spec tac", + "ic ity", + "ski es", + "hand le", + "pun k", + "paradi se", + "t n", + "de al", + "provi ding", + "do c", + "recei ving", + "bre w", + "micro soft", + "à ¶", + "fer r", + "me tro", + "th ail", + "y um", + "car ter", + "à ¡", + "gent le", + "bre aks", + "coo per", + "show case", + "cu tting", + "egy pt", + "bab y", + "semin ar", + "gl ori", + "ss on", + "fa ve", + "re hear", + "lo tte", + "la dy", + "al as", + "pre p", + "deli vered", + "nu clear", + "ir o", + "engag ement", + "at ta", + "con ven", + "z an", + "gl ory", + "hol ds", + "busine sses", + "str ange", + "sch e", + "it self", + "gra d", + "mar kets", + "f alling", + "st ats", + "ge on", + "bu dd", + "li s", + "she et", + "thi si", + "co lo", + "deser t", + "regi stration", + "ig n", + "expla in", + "inter ior", + "la ws", + "writ ers", + "spr ings", + "k r", + "fri ed", + "blo om", + "inf ra", + "a o", + "cre d", + "pa st", + "line up", + "bo o", + "bre a", + "boo ts", + "celebr ity", + "att acks", + "bro ok", + "ev es", + "ex cu", + "cher ry", + "oo p", + "fas cin", + "boy friend", + "se as", + "n ine", + "effec ts", + "po wered", + "k ha", + "ðŁĺ Ģ", + "sh out", + "con dition", + "i j", + "her o", + "enter pri", + "win ter", + "applic ations", + "sho e", + "g el", + "batt le", + "pro grams", + "w art", + "ðŁĴ ¥", + "ra p", + "ho l", + "dang erous", + "di a", + "coun ter", + "ric s", + "i or", + "k night", + "co at", + "emo tional", + "at ures", + "d as", + "whe el", + "fore cast", + "tran sport", + "glasgo w", + "king dom", + "prepar ing", + "im medi", + "ff in", + "awar ded", + "prin ting", + "ro man", + "fight ers", + "any more", + "bel t", + "p ine", + "win e", + "x i", + "employe es", + "logi es", + "al led", + "de mo", + "birth day", + "ange les", + "lo g", + "dri vers", + "neck lace", + "k ath", + "s it", + "athle te", + "ef s", + "s burg", + "pur pose", + "resi stance", + "rele ases", + "t is", + "vari ous", + "deli ver", + "ch al", + "s anc", + "opp o", + "cra w", + "neu ro", + "dr a", + "suppor ters", + "sna p", + "diffic ult", + "swe ar", + "logi st", + "pa th", + "attemp t", + "à ¥", + "swim ming", + "ste ve", + "hur t", + "inclu ded", + "b ap", + "wa re", + "ðŁĴ ĭ", + "end ers", + "ja ke", + "le eds", + "cli mb", + "l b", + "im ple", + "li sa", + "clo thing", + "ðŁĺ İ", + "d t", + "com pla", + "sw ing", + "stra w", + "v als", + "k le", + "us ers", + "stor m", + "cu ts", + "ontari o", + "p an", + "hand some", + "i ow", + "ar gu", + "chec king", + "scotti sh", + "Ķ ï¸ı", + "si er", + "em ma", + "po d", + "patter n", + "de sh", + "en h", + "ed ward", + "t ing", + "k h", + "hal f", + "lincol n", + "mo ther", + "al leg", + "r c", + "volley ball", + "d n", + "g ay", + "all y", + "le ton", + "gro ve", + "l oud", + "adv anced", + "re spec", + "cli ent", + "supre me", + "thail and", + "ho w", + "gi g", + "to i", + "do t", + "dol lar", + "ðŁij ĩ", + "p it", + "r b", + "h n", + "produc ed", + "gg ers", + "âĨ Ĵ", + "ml b", + "can vas", + "fin eart", + "us d", + "in the", + "p son", + "actu al", + "s l", + "t b", + "ip ad", + "en sure", + "u mb", + "w d", + "sk a", + "mar s", + "k end", + "f eli", + "th ing", + "count down", + "absolu te", + "r out", + "dra l", + "p y", + "inju red", + "min t", + "hun ting", + "mm er", + "s age", + "li gh", + "ac ity", + "ex pan", + "mur ray", + "ar o", + "sec ure", + "four th", + "eag le", + "reli ef", + "st akes", + "industri al", + "clar k", + "under standing", + "see m", + "pl enty", + "sil ver", + "cla u", + "thre at", + "sa il", + "pro duce", + "ab str", + "is is", + "b r", + "eng ers", + "wor ry", + "bie ber", + "s j", + "just in", + "reali ze", + "ky le", + "esp n", + "fil ter", + "s ch", + "ty pes", + "game dev", + "d ing", + "twit ter", + "soldi ers", + "p om", + "car bon", + "y ards", + "child hood", + "ri ed", + "ke l", + "ele ph", + "t ons", + "key note", + "qui et", + "wi re", + "po sting", + "is sa", + "repre senting", + "bac ks", + "alex ander", + "celebr ates", + "ta ining", + "| |", + "ch or", + "esc ape", + "pe ek", + "ti ves", + "fiel d", + "ssi e", + "im pac", + "spons or", + "r c", + "we dd", + "cann ab", + "si des", + "trac ks", + "com par", + "con trac", + "techn ical", + "bi ble", + "expl oring", + "sh are", + "tra v", + "n ate", + "ill o", + "sc ru", + "m ingham", + "gun s", + "of the", + "sh ame", + "se es", + "ca tho", + "ac cess", + "ce l", + "repor ted", + " »", + "mari o", + "p ad", + "hope fully", + "ou se", + "y on", + "disapp o", + "ol o", + "p itt", + "pa c", + "ga p", + "cru sh", + "s g", + "k le", + "ge m", + "emp ire", + "dir ty", + "a is", + "avi ation", + "ze aland", + "fac ing", + "high way", + "d anny", + "spi der", + "ot ta", + "ðŁĺ Ħ", + "w y", + "col ours", + "in fl", + "co sts", + "olym pics", + "au s", + "h m", + "ho ward", + "pas ses", + "lau ren", + "mu sh", + "op in", + "r ho", + "disc ount", + "oper ation", + "em ily", + "mm m", + "cham ber", + "d il", + "to yo", + "shi p", + "sam u", + "pic tured", + "un ic", + "po l", + "keep er", + "carto on", + "st en", + "ig nor", + "n ations", + "n l", + "ta sting", + "deta il", + "offici als", + "mo tor", + "franc is", + "ed itor", + "ðŁij ĩ", + "pe ts", + "rang ers", + "t g", + "r n", + "w ri", + "nic hol", + "i se", + "spo ts", + "ani e", + "chec k", + "tri ple", + "ku mar", + "spe akers", + "ic ing", + "pre pared", + "ab use", + "friend ship", + "mon th", + "swi m", + "air e", + "sc ent", + "hamil ton", + "indi an", + "j es", + "yum my", + "te ars", + "da wn", + "i zed", + "worl ds", + "ðŁ ķ", + "b illi", + "st one", + "n hs", + "ba sic", + "p or", + "st le", + "ir on", + "ol der", + "cle vel", + "e ing", + "ðŁĺįðŁĺį ðŁĺį", + "prin ts", + "fir m", + "air craft", + "fin est", + "devel op", + "aar on", + "t z", + "gra ham", + "own ers", + "fo li", + "less on", + "qu es", + "bab e", + "cra ft", + "ph en", + "ju n", + "bir mingham", + "v ine", + "ll er", + "i an", + "fineart america", + "evol u", + "st ab", + "im per", + "war d", + "com ic", + "wi z", + "inv ited", + "du ke", + "mat ch", + "por ts", + "ro ger", + "diag no", + "ke pt", + "te st", + "vis u", + "r hy", + "so c", + "to x", + "b aker", + "sur face", + "co vers", + "man s", + "b its", + "x box", + "ff le", + "n an", + "gar d", + "h art", + "wat ers", + "v illa", + "re tro", + "light ning", + "catho lic", + "democr acy", + "neigh bor", + "pen n", + "cr an", + "jona than", + "la ura", + "vi bes", + "su b", + "coach ing", + "clear ly", + "uk raine", + "bra ve", + "commit ment", + "t all", + "mar t", + "ra p", + "mo di", + "sco tt", + "bro s", + "show er", + "ðŁı ¾", + "âĺº ï¸ı", + "cou sin", + "appro ach", + "br e", + "com pos", + "hil ari", + "phil ly", + "g ad", + "quick ly", + "ri an", + "t m", + "vir tual", + "hou ses", + "k t", + "phoeni x", + "w ire", + "ff y", + "b unch", + "anc ing", + "tal e", + "snap chat", + "star ter", + "h t", + "k icking", + "ap art", + "th y", + ") !", + "blo gger", + "it z", + "com fort", + "ang els", + "w ash", + "\" :", + "ar gent", + "re quest", + "hon est", + "mi ghty", + "bo bby", + "k g", + "ro l", + "thou se", + "ex po", + "h c", + "tab les", + "mag ical", + "po sts", + "de m", + "n w", + "or lando", + "ab er", + "* **", + "ðŁĺ ľ", + "environ mental", + "trans formation", + "mi le", + "w ic", + "hir ing", + "ma ine", + "bo ar", + "r ying", + "ti s", + "nit ure", + "twee ted", + "anton io", + "opin ion", + "fin ale", + "di y", + "f is", + "th in", + "trou ble", + "le go", + "fi les", + "qu art", + "sp a", + "curren cy", + "cli mate", + "fan art", + "rail way", + "sp ace", + "ban ds", + "dani el", + "mo tion", + "l eng", + "hol der", + "oc cu", + "mar ie", + "cathe dral", + "bu zz", + "bi es", + "nas car", + "bm w", + "bat tery", + "char lotte", + "doc tor", + "zz le", + "se ven", + "in san", + "d dy", + "st en", + "lab or", + "thr illed", + "se ren", + "docu mentary", + "wav es", + "cer tain", + "can did", + "allow ed", + "ninten do", + "star wars", + "ta p", + "home made", + "d les", + "ther ing", + "bre e", + "emp ty", + "pi ano", + "pos iti", + "coun try", + "por k", + "pu ts", + "per ry", + "m atic", + "spot light", + "ti st", + "or ities", + "we alth", + "c p", + "bar bar", + "commit ted", + "as sau", + "pro fit", + "e ight", + "hu l", + "fini shing", + "run ner", + "ss o", + "insp ec", + "char ged", + "christ op", + "lo sing", + "co al", + "ho o", + "ele v", + "de le", + "mo ham", + "don ation", + "c able", + "clin ic", + "j in", + "manag ed", + "ter ing", + "â ¬", + "ur ban", + "depu ty", + "bb er", + "bur n", + "acade mic", + "o tt", + "sta ke", + "it er", + "sto wn", + "ack er", + "advent ures", + "ad ams", + "gre g", + "pro m", + "vo l", + "ac qu", + "con gre", + "pa int", + "citiz ens", + "c all", + "af ford", + "v c", + "as ks", + "the tic", + "independ ence", + "â Ľ", + "h itting", + "bl on", + "fu ture", + "â ı", + "in no", + "gen e", + "bo ards", + "di stance", + "se t", + "re mem", + "th al", + "pre vent", + "l ang", + "ob jec", + "su sp", + "mat t", + "in duc", + "bor o", + "pi one", + "re di", + "vir tu", + "prin ted", + "sco pe", + "shar k", + "suc ce", + "a stron", + "il legal", + "j ag", + "c ting", + "ine e", + "at o", + "rob in", + "nutr ition", + "b f", + "du tch", + "b n", + "fur niture", + "for gotten", + "at ar", + "ru p", + "hy per", + "bran ch", + "communic ation", + "degre es", + "on ia", + "un cle", + "promo te", + "or che", + "wi i", + "j s", + "but ton", + "ma jor", + "c bs", + "bri stol", + "premi um", + "ordin ary", + "e dit", + "m g", + "we ed", + "st even", + ": '", + "gu s", + "te s", + "cap tured", + "dru gs", + "do w", + "wr ites", + "bi shop", + "whe els", + "ali zation", + "disco very", + "w r", + "rach el", + "ne il", + "hy dr", + "cu test", + "entreprene ur", + "kore an", + "ore gon", + "ul ty", + "perfec tly", + "suppor ted", + "histor ical", + "t wins", + "ell y", + "we l", + "de vil", + "in come", + "scienti sts", + "de leg", + "h en", + "on i", + "ic ed", + "gi o", + "cur ry", + "reve al", + "e g", + "buff alo", + "n ol", + "op era", + "camer on", + "haha haha", + "j ab", + "gradu ation", + "cra ig", + "r al", + "i f", + "organi zation", + "le ge", + "g ang", + "su d", + "edin burgh", + "l ack", + "fli es", + "g ate", + "thr ones", + "q b", + "the real", + "e leg", + "pp in", + "c les", + "jam ie", + "tn am", + "cryp to", + "ou l", + "p ages", + "a se", + "roo ts", + "stu pid", + "a did", + "boo t", + "prote in", + "s ap", + "si um", + "su s", + "end or", + "fun ction", + "don t", + "en na", + "ch y", + "squ e", + "wor ker", + "m tv", + "e a", + "k an", + "ðŁĴ ļ", + "mu s", + "professi on", + "t to", + "oper ations", + "al lo", + "c tor", + "inv ite", + "sc and", + "ou th", + "z im", + "lin ks", + "cli ents", + "sam sung", + "discu sses", + "n ell", + "ul tra", + "some where", + "ste wart", + "ine t", + "de z", + "b out", + "fac tor", + "ti an", + "tr ans", + "jere my", + "d b", + "ðŁĩ ¬", + "or n", + "develop ing", + "spo l", + "coo per", + "ma u", + "rememb ering", + "tre k", + "famil y", + "sen iors", + "fo ster", + "att ended", + "w ing", + "trans form", + "ele mentary", + "hor iz", + "li sting", + "malay sia", + "it ch", + "warri or", + "philipp ines", + "russ ell", + "m end", + "initi ative", + "cre ep", + "to ps", + "br iti", + "a ur", + "shar p", + "adverti sing", + "ug ly", + "achi ev", + "materi als", + "bu g", + "dev ice", + "bon us", + "fac ility", + "col e", + "nh l", + "y as", + "plann ed", + "pol e", + "excell ence", + "tr ick", + "con fl", + "r p", + "achi eve", + "lo an", + "swa g", + "jess ica", + "ho we", + "p our", + "sc u", + "z oo", + "r ated", + "dre sses", + "re bel", + "mex ican", + "co ordin", + "me ss", + "atlan tic", + "t l", + "osc ar", + "wal ks", + "phar mac", + "investig ation", + "... #", + "cc i", + "eas ily", + "monday motivation", + "y ment", + "au ti", + "for ced", + "ar med", + "colle agues", + "pap ers", + "pro per", + "sha ke", + "bu c", + "le an", + "exhi bit", + "e vement", + "co tt", + "bi z", + "sp er", + "k ent", + "sw an", + "/ @", + "girl friend", + "haw k", + "âĺ Ģï¸ı", + "mon o", + "ðŁĴ Ľ", + "stat ue", + "ðŁĺ ³", + "ra s", + "te eth", + "preci ous", + "t ile", + "p am", + "swi ft", + "v ali", + "no se", + "dr unk", + "experi ences", + "come back", + "gen ius", + "wor se", + "sh ef", + "ra d", + "ed it", + "hon our", + "au spol", + "lar ry", + "h ire", + "gor don", + "achi evement", + ".... ....", + "su icide", + "alter native", + "su p", + "sur roun", + "sha ke", + "ke ith", + "pe pper", + "tur k", + "crimin al", + "be ck", + "su m", + "w alls", + "cn n", + "an tic", + "of fe", + "col li", + "win es", + "high light", + "hawa ii", + "emb ar", + "l fc", + "ðŁĩ ®", + "m v", + "> >", + "at mo", + "wor d", + "car l", + "shout out", + "bre wing", + "ì Ŀ", + "do f", + "s ic", + "hot test", + "col on", + "hh h", + "shu t", + "low ing", + "volu me", + "apart ment", + "agre ement", + "de stro", + "we e", + "religi ous", + "iow a", + "ro d", + "land ing", + "re present", + "ðŁĵ· :", + "la s", + "usu ally", + "h l", + "c ac", + "sal v", + "al ong", + "laugh ing", + "be ans", + "remin ds", + "pha se", + "some body", + "ma sk", + "ran ked", + "dest roy", + "sc i", + "â̼ ï¸ı", + "gab ri", + "le o", + "ro a", + "fa iled", + "si l", + "refuge es", + "re vi", + "r ing", + "ber ries", + "coo kies", + "y y", + "conserv ation", + "sh ab", + "human s", + "de termin", + "a in", + "ni all", + "as su", + "mb a", + "fro m", + "extre me", + "vic es", + "commer ce", + "ght ful", + "or dered", + "suppor ts", + "re cap", + "v or", + "dro pping", + "correc t", + "pay ing", + "mean ing", + "n j", + "qui z", + "\" #", + "busine ss", + "ðŁĩ® ðŁĩ", + "indi gen", + "du st", + "box es", + "bl ind", + "x xx", + "zz y", + "ðŁĩ¬ ðŁĩ", + "ss els", + "s ant", + "dd le", + "hilari ous", + "desig n", + "wonder ing", + "vehic les", + "k re", + "ju d", + "rece ption", + "par ker", + "à Ń", + "pri vi", + "hy dro", + "sof tball", + "pol lu", + "lo cked", + "ba h", + "e ar", + "scri pt", + "di vi", + "br ace", + "geor ge", + "the ast", + "bel o", + "j al", + "tion ary", + "dent al", + "roc ket", + "pur ch", + "sh ak", + "manufac turing", + "e z", + "it is", + "con cep", + "tb all", + "ch s", + "direc ted", + "pra yers", + "oo k", + "phil os", + "vari ety", + "che ss", + "ser ver", + "g and", + "bal ti", + "ðŁĵ ¸", + "sel y", + "cru z", + "spectac ular", + "bur ning", + "re present", + "i z", + "t one", + "mer ce", + "h ell", + "bed room", + "estab li", + "bo l", + "com mon", + "ãĥ »", + "ab or", + "kit ty", + "hei ghts", + "re pair", + "willi am", + "qu ake", + "alab ama", + "popul ation", + "re v", + "re tt", + "i sts", + "n ite", + "le m", + "a ha", + "clevel and", + "r m", + "po ver", + "ob se", + "mon tre", + "man ia", + " ®", + "con ne", + "car ni", + "sh ah", + "f y", + "u a", + "sc or", + "strugg le", + "bo b", + "' '", + "appro pri", + "deci de", + "ff ed", + "ca ster", + "s ort", + "hun gry", + "dra g", + "ا Ù", + "gr ounds", + "d w", + "sli ghtly", + "car din", + "dead line", + "bron ze", + "web in", + "bar ry", + "sil ence", + "e uro", + "op tion", + "ear n", + "ðŁĴ ĸ", + "howe ver", + "na ren", + "na ils", + "bath room", + "v ine", + "ph d", + "min ing", + "gar age", + "( )", + "shou lder", + "defe at", + "di r", + "o v", + "liber ty", + "ple as", + "x on", + "com pre", + "a v", + "j in", + "ab les", + "sil ent", + "fam ili", + "vis its", + "di pl", + "ha bit", + "milli ons", + "regar ding", + "innov ative", + "sen ator", + "r ts", + "v on", + "k l", + "wh il", + "requi red", + "âĿ Ħ", + "lu v", + "presi dential", + "po cket", + "hun dre", + "sho wn", + "fro zen", + "to ward", + "fa st", + "confi dence", + "r ough", + "indivi dual", + "qu et", + "ðŁı ½", + "dom e", + "fi fa", + "engine er", + "z en", + "re mix", + "ðŁĺ ĥ", + "pl ant", + "min or", + "robin son", + "as y", + "pul led", + "cer tain", + "potat o", + "( :", + "pre s", + "oc ca", + "w it", + "it em", + "si e", + "d ating", + "thom pson", + "own ed", + "an u", + "vi e", + "te dly", + "good night", + "ex cept", + "ðŁĮ Ł", + "ira q", + "ki e", + "ren ces", + "li p", + "simil ar", + "sau di", + "vi g", + "arth ur", + "pic ks", + "mil an", + "hon da", + "ma xi", + "o g", + "ste st", + "ar ch", + "analy tics", + "ba sti", + "pear l", + "ter ry", + "hor se", + "ast ro", + "ac ce", + "laun ching", + "inter national", + "s no", + "ta sty", + "den ver", + "ir l", + "pe te", + "tor n", + "advant age", + "var sity", + "\" \"", + "sol e", + "g c", + "lan g", + "demon str", + "ol ds", + "un ity", + "ne ts", + "insp ire", + "cre te", + "nash ville", + "nel son", + "e ter", + "wal k", + "hy un", + "m ack", + "tre as", + "see king", + "ra ge", + "bru sh", + "ab and", + "whil st", + "co con", + "h ong", + "shel ter", + "i p", + "possi bly", + "so o", + "it ed", + "â Ħ", + "rac es", + "war ming", + "qu in", + "tele vision", + "mat ches", + "ra pi", + "ment al", + "pal m", + "jenni fer", + "rol ls", + "indi ana", + "b ars", + "cat ching", + "resc u", + "candid ates", + "fa re", + "âł Ģ", + "se o", + "vie tnam", + "alph a", + "michel le", + "visi ble", + "re gre", + "wn ed", + "app le", + "li p", + "f fe", + "li z", + "york shire", + "ha il", + "se asons", + "be gan", + "m d", + "k c", + "la p", + "fascin ating", + "hel p", + "ur y", + "u ms", + "nu ts", + "se m", + "along side", + "bri dge", + "ori al", + "o ve", + "world cup", + "briti sh", + "comfor table", + "i ve", + "hot els", + "fair s", + "hor ri", + "so x", + "d ining", + "stre am", + "bar ri", + "ss y", + "w im", + "ter ms", + "v u", + "pe re", + "l ens", + "wal ked", + "r or", + "l ars", + "shi eld", + "dou bt", + "pro to", + "cro ssing", + "me ant", + "medi um", + "ad ding", + "e b", + "che ap", + "fun c", + "pap er", + "bran ds", + "ry an", + "feed back", + "col lins", + "un known", + "tro pical", + "sand wich", + "fal len", + "for mu", + "selec t", + "lo ads", + "answ ers", + "or i", + "mag a", + "d or", + "du o", + "ali e", + "dru m", + "ur i", + "de er", + "sou l", + "sh ut", + "âĺ º", + "sto len", + "don ated", + "bu zz", + "patri ots", + "ha l", + "na sty", + "nomin ated", + "mon te", + "ki a", + "th ri", + "ing u", + "te sts", + "pe tro", + "ðŁij ij", + "ho sts", + "ne st", + "to pic", + "pat ch", + "m my", + "hu gh", + "ab ilities", + "ma the", + "s miles", + "g b", + "ag enda", + "insi ghts", + "chi p", + "ph an", + "fail ure", + "dg ers", + "ha i", + "signific ant", + "sho ck", + "ru ral", + "gl am", + "figu res", + "pot us", + "o ta", + "mini stry", + "appe ars", + "fe ar", + "r h", + "americ an", + "h att", + "son y", + "fi res", + "e di", + "n ou", + "e qui", + "wh en", + "univers al", + "mad ness", + "i x", + "sculp ture", + "b ach", + "t to", + "swe den", + "et a", + "en to", + "develop ed", + "month ly", + "ma ps", + "ra h", + "le d", + "del ta", + "sa ints", + "is lam", + "ben ch", + "fif th", + "v ard", + "so cks", + "wel coming", + "j e", + "tur ner", + "v b", + "ad i", + "nor way", + "ad y", + "hurric ane", + "por sche", + "tra dition", + "ex am", + "newsp aper", + "lu ci", + "a ver", + "ide al", + "d na", + "madi son", + "ðŁ §", + "wit ness", + "ac ou", + "insi ght", + "si mon", + "robo t", + "sna ke", + "n bc", + "ac o", + "ro ss", + "sh ment", + "religi on", + "ch ann", + "in su", + "camp bell", + "inst alled", + "we ather", + "hor ses", + "ol i", + "rober t", + "k az", + "ðŁı Ģ", + "veter an", + "th read", + "quar ter", + "ea sier", + "cap ture", + "hi pho", + "law rence", + "roman tic", + "pas sion", + "cl ay", + "ox ford", + "th ai", + "stu dying", + "fi a", + "elec ted", + "most ly", + "c b", + "tu mb", + "âĢįâĻ Ĥ", + "x l", + "sh an", + "fa ster", + "ev ans", + "sli de", + "sh ri", + "see k", + "mi es", + "chemi stry", + "pump kin", + "tu m", + ", ,", + "ro om", + "fi red", + "li ps", + "pres ence", + "af f", + "brew ery", + "arri ve", + "sw ag", + "photo graph", + "pen gu", + "chi ps", + "at tor", + "val ues", + "accur ate", + "con temporary", + "princi pal", + "cannab is", + "ari o", + "any where", + "gi a", + "democr ats", + "buil dings", + "li ved", + "ap s", + "neg ative", + "m are", + "bal lo", + "li on", + "diam on", + "loo k", + "re form", + "tom my", + "il la", + "tre ats", + "hundre ds", + "port land", + "wor thy", + "ex cep", + "ar ia", + "ido l", + "be er", + "cd n", + "y u", + "aw k", + "ðŁĩ ¨", + "c ells", + "à ³", + "ident ity", + "dra wn", + "de vil", + "f inger", + "th am", + "ðŁij Ĭ", + "ear ned", + "fin tech", + "dol ph", + "twee ting", + "evolu tion", + "ðŁĵ į", + "est im", + "m vp", + "n one", + "ðŁĩºðŁĩ ¸", + "toyo ta", + "au x", + "mar in", + "b old", + "l bs", + "ste ak", + "mur phy", + "it able", + "lou is", + "sol ve", + "pi a", + "sk ir", + "ill ino", + "webin ar", + "ban ana", + "lo v", + "th on", + "vo ters", + "afford able", + "defe ated", + "lm fa", + "air lines", + "super b", + "any way", + "deb t", + "bo red", + "ver si", + "me tal", + "responsi ble", + "m k", + "s se", + "f ay", + "cau sed", + "f p", + "recomm end", + "pla za", + "spor ting", + "alli ance", + "au stri", + "n n", + "t ours", + "surpri sed", + "arti f", + "th under", + "sur ve", + "wor e", + "bri ef", + "necess ary", + "z ie", + "ash ley", + "dra ke", + "r t", + "kni fe", + "im mun", + "char ges", + "a the", + "bri de", + "rep ly", + "g av", + "broad cast", + "pu er", + "brace let", + "cap acity", + "harve st", + "id k", + "perfor man", + "d ding", + "il ers", + "par a", + "jam a", + "pro vince", + "ch in", + "id ers", + "har i", + "te aser", + "ch en", + "re stor", + "r at", + "fl at", + "col om", + "ðŁĴ ŀ", + "ðŁĩ¨ ðŁĩ", + "smoo th", + "r t", + "p itch", + "stay ing", + "isra eli", + "t cot", + "per spective", + "do ck", + "open er", + "lo vel", + "x o", + "class room", + "l ington", + "go al", + "kenne dy", + "sh am", + "sp aces", + "mitch ell", + "home coming", + "uk i", + "claim ed", + "recru it", + "ing o", + "mu fc", + "mon it", + "g roo", + "resi dent", + "per cent", + "per man", + "otta wa", + "int ment", + "an xi", + "stand ards", + "wor ship", + "sche me", + "f x", + "pot ter", + "bi an", + "athle tic", + "af gh", + "s se", + "sat ell", + "par ties", + "âĿ¤ âĿ¤", + "infra structure", + "rela x", + "mo du", + "wor n", + "smo king", + "y ach", + "practic es", + "wc w", + "am b", + "dome stic", + "tay lor", + "k entu", + "provi ded", + "mo di", + "ve g", + "\" ...", + "ob serv", + "ðŁĺ ©", + "be ard", + "m our", + "an gry", + "ðŁĺ ±", + "startu ps", + "woo den", + "di ve", + "na il", + "anti que", + "ro ses", + "torn ado", + "m at", + "^ ^", + "su spect", + "far m", + "de vices", + "me ga", + "tu l", + "scholar ship", + "ge e", + "disa ster", + "arri val", + "po in", + "mar c", + "kati e", + "bb ed", + "fal se", + "deser ves", + "ric hard", + "ju ana", + "fre y", + "tion ed", + "hy bri", + "r w", + "sar ah", + "ach i", + "c ure", + "o le", + "mor ris", + "ch ic", + "broad way", + "la bel", + "pa k", + "pover ty", + "gol f", + "e red", + "f u", + "er ies", + "be es", + "alo gue", + "st el", + "wire less", + "je wish", + "ti de", + "blo cked", + "life time", + "b har", + "sp lit", + "am ster", + "th i", + "jo shu", + "br unch", + "ha ps", + "s for", + "oo ps", + "ka poor", + "hi king", + "suppo sed", + "ro of", + "re as", + "tra in", + "ti ght", + "tru mp", + "bas ically", + "r r", + "ea red", + "see ds", + "entr ance", + "c p", + "wi e", + "son ic", + "vic tim", + "he re", + "e h", + "ear rings", + "sal mon", + "arc tic", + "an ne", + "dou gla", + "corru ption", + "hann ah", + "ha sn", + "vo ices", + "con ce", + "att a", + "fle et", + "clin ical", + "democr atic", + "ton y", + "st ood", + "le f", + "twit ch", + "a il", + "honest ly", + "incre ased", + "dro me", + "don na", + "accep ted", + "visit ors", + "ap ar", + "ad or", + "p ar", + "jer ry", + "ra i", + "brand on", + "ab u", + "!! !!!!", + "me me", + "in gh", + "glori ous", + "b hu", + "pu mp", + "j ol", + "li ke", + "fi sher", + "ma z", + "ag an", + "destin ation", + "play list", + "le tters", + "gen u", + "br ace", + "celebr ated", + "bann er", + "r he", + "dra gon", + "ðŁĺ ħ", + "sig nature", + "gre y", + "âľ Ķï¸ı", + "al ice", + "be red", + "ph er", + "ber n", + "ca th", + "ga thering", + "sc oring", + "influ ence", + "sm iling", + "de pt", + "lo cal", + "a x", + "ac u", + "reti rement", + "hon or", + "her self", + "chem ical", + "asse ss", + "y all", + "fre qu", + "appreci ation", + "ac a", + "cho ir", + "cu z", + "so il", + "c il", + "repor ting", + "u h", + "enterpri se", + "gr at", + "jaco b", + "ru m", + "fe e", + "j ak", + "sp in", + "bi kes", + "phi a", + "ste re", + "p is", + "bloo d", + "t att", + "ra ft", + "war ren", + "sh eri", + "back stage", + "mar sh", + "hash tag", + "ther ine", + "re in", + "game day", + "guar an", + "reci pes", + "min ds", + "stron ger", + "issu ed", + "bic y", + "n ak", + "ment ed", + "sc ary", + "u x", + "pre vious", + "tt le", + "th ats", + "ac tors", + "u ma", + "tin a", + "bun ny", + "promo tion", + "u ss", + "oli ver", + "montre al", + "what s", + "appreci ated", + "la kes", + "excu se", + "kno wing", + "pri zes", + "musc le", + "shad es", + "sco t", + "ing redi", + "electr onic", + "ju an", + "comb at", + "s ri", + "e h", + "turk ish", + "l om", + "stri kes", + "pri son", + "re e", + "po pe", + "vi d", + "ol dest", + "dol l", + "sw iss", + "certi fied", + "cli p", + "re turning", + "lat or", + "le igh", + "tt es", + "wat son", + "heal ing", + "el im", + "per haps", + "ha ss", + "k au", + "d der", + "mou se", + "new castle", + "indigen ous", + "wel comes", + "co le", + "tau ght", + "no ise", + "appe ar", + "jo e", + "can on", + "wedne sday", + "u tah", + "c tive", + "dri ven", + "i v", + "c ell", + "stri p", + "ac c", + "focu sed", + "ar rest", + "sto cks", + "wo o", + "â Ĺ", + "notic ed", + "shad o", + "di spla", + "ter ror", + "bor ne", + "secon d", + "que ens", + "wo ke", + "ja il", + "no tt", + "cam bridge", + "har t", + "se af", + "fa x", + "ac cept", + "âĺ ħ", + "goo ds", + "k at", + "t win", + "h s", + "thou sand", + "s ins", + "su ite", + "amp ton", + "ar n", + "rele v", + "ric har", + "hoo ps", + "n bc", + "class ic", + "p ab", + "soldi er", + "de plo", + "le ans", + "install ation", + "cla sh", + "le ban", + "ee e", + "ti re", + "belo ved", + "fu sion", + "travel ing", + "ne i", + "coo kie", + "glo be", + "phys ics", + "s q", + "co l", + "wol ves", + "d l", + "ex it", + "\" -", + "foo tball", + "le af", + "ster ling", + "hi de", + "minne so", + "fresh man", + "natu re", + "indi e", + "supp lies", + "bri s", + "iri sh", + "ink tober", + "doo dle", + "ic op", + "mess ages", + "adul ts", + "recor ded", + "fix ed", + "ar do", + "offe red", + "under ground", + "dr one", + "p ine", + "ma inten", + "and re", + "ham mer", + "s x", + "r ound", + "hi ke", + "bra d", + "ro me", + "fu ll", + "on ey", + "ro ws", + "colum bia", + "archi ves", + "appro ved", + "bat ch", + "illino is", + "recogn ition", + "shou ldn", + "fo g", + "nca a", + "ke vin", + "human ity", + "al though", + "pow ers", + "p ou", + "s ar", + "pe st", + "alco hol", + "con sci", + "phil adel", + "en o", + "t m", + "ok la", + "cate gory", + "particip ate", + "accu sed", + "bri ef", + "po em", + "clu bs", + "consul t", + "ja b", + "big data", + "amster dam", + "ac ing", + "certi fic", + "n u", + "d at", + "impro ved", + "and y", + "campa ig", + "pale stin", + "p ace", + "mo bi", + "feel ings", + "wol f", + "bra in", + "pro pos", + "inter active", + "prin ce", + "inde x", + "c is", + "cha e", + "peace ful", + "co vering", + "ac o", + "cour ses", + "mon key", + "re place", + "b l", + "bloo dy", + "tal es", + "brigh ton", + "neighbor hood", + "g ates", + "spiritu al", + "af raid", + "bre ast", + "b ones", + "ðŁij ī", + "vide o", + "w au", + "tou ch", + "inju ries", + "car l", + "ri x", + "une x", + "âĢ ¢", + "fre d", + "consi dered", + "thu si", + "an ch", + "on y", + "u sa", + "graph ics", + "ac re", + "ðŁĺ ©", + "com memor", + "com mod", + "go ti", + "guar dian", + "star bucks", + "pre vention", + "haha haha", + "admini stration", + "portu gal", + "fac ulty", + "bet a", + "ul a", + "al bert", + "bre ath", + "er i", + "le tting", + "tr ic", + "ment ation", + "incredi bly", + "ten nes", + "v d", + "ðŁĻ Ī", + "ed die", + "br ick", + "gr ill", + "bt w", + "wat ches", + "resear chers", + "t ney", + "ni e", + "p as", + "a ster", + "vi br", + "poke mon", + "ch rome", + "go at", + "pitt s", + "il ly", + "festi ve", + "y d", + "can al", + "ðŁ Ĩ", + "fi es", + "car los", + "re que", + "partic i", + "tra ins", + "sam ple", + "temper ature", + "sym ph", + "pic king", + "in door", + "z ers", + "playo ffs", + "____ ____", + "ap es", + "ly rics", + "islam ic", + "performan ces", + "d ick", + "spar k", + "se as", + "hom a", + "gr ound", + "disc i", + "employe e", + "com mu", + "alas ka", + "al an", + "fe ast", + "dg ing", + "ban king", + "manu el", + "slow ly", + "tru cks", + "mc car", + "oo o", + "sc rat", + "orche stra", + "indivi du", + "m x", + "bre ath", + "stair s", + "equ ality", + "bla ke", + "loc ations", + "cocon ut", + "balti more", + "aa a", + "l c", + "ðŁı Ĩ", + "har vey", + "resi st", + "immigr ation", + "adid as", + "fil i", + "re f", + "lg bt", + "mo s", + "pp i", + "ken ny", + "terr or", + "ban e", + "apol is", + "s g", + "social media", + "ka i", + "hon est", + "as sas", + "bol lywood", + "âĢįâĻ Ģï¸ı", + "ferr ari", + "hor n", + "cryp to", + "bo om", + "mainten ance", + "i di", + "s man", + "w l", + "ext ended", + "in sul", + "ve s", + "go sp", + "tr i", + "pi g", + "tar ge", + "cel er", + "st ati", + "sm h", + "ri dic", + "appe al", + "? )", + "con clu", + "cos me", + "she ep", + "christop her", + "en thusi", + "po lish", + "me ts", + "oun ded", + "sustain ability", + "creati vity", + "con crete", + "ra i", + "ali en", + "ble ss", + "te es", + "clu b", + "ro t", + "bo s", + "ex ist", + "perfe ction", + "lu ck", + "rock y", + "expen sive", + "mean while", + "happy birthday", + "pre t", + "thr iller", + "ca ve", + "playo ff", + "som er", + "l u", + "le x", + "def ence", + "am writing", + "home less", + "pro phe", + "ch et", + "past or", + "ðŁ¤ £", + "land er", + "ww w", + "Ģ ï¸ı", + "tic a", + "! #", + "o tic", + "rad ar", + "po sters", + "pow der", + "po li", + "ha un", + "tra p", + "bl in", + "assau lt", + "shor ts", + "re y", + "sh y", + "squ ir", + "rac ist", + "gar lic", + "fu r", + "remo te", + "sm ell", + "impre ssed", + "fing ers", + "âł Ģ", + "din o", + "le ment", + "s nu", + "promo ting", + "str ing", + "produc tive", + "b age", + "ma son", + "ra z", + "direc tly", + "j k", + "ev al", + "ðŁij Ĭ", + "doc tors", + "co w", + "ri der", + "st v", + "re move", + "w u", + "na than", + "ro d", + "n r", + "= >", + "affe cted", + "inve st", + "mp tion", + "g inger", + "o d", + "agricul ture", + "s que", + "mu g", + "coun ting", + "ke e", + "mag nific", + "coo k", + "ani stan", + "roo t", + "plac ed", + "sym po", + "gh ana", + "un d", + "che er", + "thro wing", + "secre ts", + "f illing", + "opti mi", + "butter fly", + "bu bb", + "ðŁĺ ī", + "terri ble", + "d g", + "sil k", + "obse ssed", + "lo u", + "ai de", + "sal ute", + "mon u", + "philadel phia", + "scienti fic", + "i st", + "u ae", + "dess ert", + "bott les", + "can yon", + "ðŁĺ Ī", + "car ib", + "o ther", + "w ich", + "re source", + "guil ty", + "un d", + "le on", + "e ss", + "kan e", + "el e", + "tra iner", + "he im", + "an te", + "man age", + "roo kie", + "tre ated", + "po ses", + "rs vp", + "cau ses", + "aw ak", + "je well", + "le tt", + "on ics", + "tit les", + "cardi ff", + "g aga", + "bu mp", + "use ful", + "? !", + "loo se", + "bb ing", + ": :", + "argent ina", + "de bu", + "cy cl", + "wh el", + "dis gu", + "j el", + "k ills", + "bio logy", + "ex ter", + "tra sh", + "bo dies", + "tr am", + "circu it", + "expe ct", + "la ds", + "w ells", + "sho t", + "ge e", + "naren dr", + "fa stest", + "b ent", + "b ills", + "mar shall", + "h ats", + "intro duce", + "citi zen", + "im possible", + "gi b", + "az z", + "net working", + "r ant", + "thin k", + "in dy", + "st ops", + "f theday", + "bri an", + "* *", + "amo di", + "dom e", + "coura ge", + "pac king", + "af fairs", + "g n", + "si zed", + "ent ary", + "pol and", + "swit zer", + "afgh anistan", + "w u", + "ten der", + "subscri be", + "mo sco", + "att end", + "republic an", + "hon ey", + "âĢ ĭ", + "si mul", + "we ster", + "foo die", + "or o", + "midd le", + "ab t", + "co pies", + "ma je", + "narendr amodi", + "ty pical", + "inspir ational", + "vit am", + "wis con", + "cu bs", + "tiv ity", + "h ali", + "e ars", + "k ay", + "d are", + "mari juana", + "cu rious", + "an ia", + "tom ato", + "re mind", + "ðŁĩ ·", + "sc ared", + "cou p", + "po et", + "land ed", + "ri d", + "wra pped", + "mor ri", + "climb ing", + "e ws", + "fe eding", + "con tra", + "tho logy", + "gri d", + "ti vely", + "read er", + "la ser", + "di ving", + "di g", + "lat in", + "ti ed", + "shake spe", + "o ci", + "ad m", + "show ers", + "chu ck", + "mar cus", + "oo s", + "kne e", + "o live", + "ow l", + "dy lan", + "an no", + "g ym", + "deci sions", + "well ness", + "arri ves", + "sati s", + "chri s", + "thur s", + "ðŁ¤ £", + "inter views", + "thank you", + "switzer land", + "over night", + "journ alist", + "ser ves", + "vol can", + ".... ...", + "plo t", + "nic ol", + "car rying", + "mag ne", + "tre asure", + "ex p", + "be ver", + "ðŁĺ ¢", + "mar ty", + "mo le", + "don ations", + "recogni zed", + "b h", + "du s", + "sh ann", + "al do", + "success fully", + "ent e", + "ðŁĺĤðŁĺĤ ðŁĺĤðŁĺĤ", + "cab inet", + "cu is", + "tit led", + "d as", + "so l", + "strate gies", + "deli vering", + "ad ds", + "ani an", + "ne ther", + "ðŁĴ ĥ", + "con tain", + "su its", + "pa irs", + "to dd", + "rel la", + "ro pe", + "ci o", + "cro p", + "paint ings", + "su z", + "re jec", + "bu st", + "d h", + "fra ud", + "m h", + "contro l", + "je al", + "destroy ed", + "al lows", + "wo ol", + "minneso ta", + "om en", + "j u", + "sympo sium", + "d af", + "lim it", + "accoun ts", + "load ing", + "inter n", + "re solution", + "hol land", + "qu al", + "meet ings", + "gra ve", + "cam ping", + "v am", + "re nov", + "liber al", + "am ber", + "gre e", + "hu mb", + "fe ver", + "el ing", + "broo ks", + "à ²", + "be th", + "ad ed", + "al t", + "ro e", + "perform ed", + "jo sh", + "frank lin", + "nic ole", + "de ss", + "bb s", + "m g", + "net works", + "min im", + "al t", + "weap ons", + "gu y", + "jas on", + "g ha", + "harb our", + "at on", + "pra ise", + "kentu cky", + "bel fast", + "st icks", + "blo ss", + "ho pes", + "an thro", + "famili ar", + "wa it", + "ch ile", + "depre ssion", + "la x", + "je ts", + "le ice", + "recei ves", + "si er", + "an k", + "de x", + "inde ed", + "fle xi", + "fab ric", + "lam b", + "hel icop", + "am anda", + "âĢĶ âĢĶ", + "compe te", + "sn ack", + "techno logies", + "sy rian", + "mom s", + "mu ham", + "cho sen", + "an at", + "dev on", + "shar ks", + "re t", + "fundra iser", + "selfi es", + "st ations", + "communic ations", + "tennes see", + "tu tor", + "ro t", + "valu able", + "dynam ic", + "nur se", + "i ed", + "earth quake", + "deser ved", + "a ve", + "sar a", + "stre tch", + "dougla s", + "ne pal", + "à §", + "ob viously", + "d ame", + "ra pe", + "any body", + "k w", + "pat rol", + "hol ders", + "h anna", + "info graphic", + "ec o", + "be ating", + "stan ley", + "bo ats", + "ri bb", + "e z", + "wit ch", + "inv a", + "ac id", + "boar ding", + "- @", + "gi l", + "da ve", + "care ers", + "opp os", + "l loy", + "in ter", + "do pe", + "re su", + "j agu", + "sh ade", + "in dy", + "on ist", + "rel ations", + "ag en", + "ab le", + "inci dent", + "me ter", + "shar ma", + "id r", + "pro ve", + "immedi ately", + "tro ops", + "am an", + "g low", + "gaz a", + "blo cks", + "person al", + "chron ic", + "all er", + "si d", + "sh r", + "whats app", + "lu cy", + "ar chae", + "ho u", + "journ alism", + "our selves", + "go t", + "the med", + "shap ed", + "we ak", + "cas ual", + "leng th", + "sla m", + "ab bey", + "e v", + "coun ter", + "est a", + "reci pi", + "cha pel", + "expan sion", + "sel f", + "suff ering", + "sp ice", + "n z", + "sp art", + "desp er", + "boo king", + "quart ers", + "y on", + "ðŁĴ Ĺ", + "p k", + "continu ed", + "- #", + "man hatt", + "tal ked", + "sh en", + "com bo", + "hybri d", + "je ans", + "liqu id", + "se al", + "re tweets", + "ac celer", + "collec tive", + "t as", + ": ))", + "profession als", + "ra w", + "o tt", + "su san", + "ir ing", + "okla homa", + "re ven", + "survi val", + "cre ator", + "tran sit", + "st ac", + "sur f", + "i k", + "ed iting", + "ch illing", + "bai ley", + "ste al", + "ra ble", + "pa rent", + "hun ger", + "sn app", + "collec t", + "philos oph", + "dedic ation", + "c f", + "c m", + "le ep", + "repe at", + "re ha", + "un fortun", + "a er", + "a ero", + "abstr act", + "mon itor", + "ag ents", + "bu l", + "sci ence", + "harb or", + "drag ons", + "floo ding", + "ac compli", + "d ash", + "juli a", + "the red", + "tues day", + "cy ber", + "b low", + "ta ined", + "le m", + "refe rence", + "pp o", + "ne goti", + "char le", + "con nor", + "au lt", + "access ories", + "commissi oner", + "rain y", + "re ar", + "advis ory", + "luc as", + "ma id", + "co al", + "k av", + "pol o", + "ðŁı ¾", + "tran sport", + "mar gare", + "straw berry", + "bur ns", + "gre ens", + "ne v", + "partici pants", + "col in", + "belgi um", + "col our", + "in form", + "d ell", + "br on", + "cal y", + "kick off", + "strate gic", + "re union", + "hon ors", + "li b", + "egy p", + "âŃIJ ï¸ı", + "hy po", + "si zes", + "regi stered", + "bet es", + "relax ing", + "bloo m", + "inten se", + "valent ines", + "insan e", + "w wii", + "p x", + "tri o", + "bla de", + "wiscon sin", + "con e", + "plat in", + "ali ze", + "ra ven", + "incre asing", + "indi ans", + "il ian", + "bl u", + "rabb it", + "exten sion", + "je f", + "au di", + "fer ry", + "s ell", + "a day", + "us b", + "swe at", + "cham pag", + "metho d", + "mem ph", + "assi st", + "s by", + "ca pe", + "remo ved", + "mag n", + "v t", + "r ams", + "f bi", + "tack le", + "phe w", + "h on", + "motor cycle", + "su spec", + "eleph ant", + "sub ject", + "let te", + "da iry", + "whe at", + "awk ward", + "ac t", + "tro l", + "mit ted", + "zay n", + "sheri ff", + "ene my", + "con s", + "ke tt", + "bul ls", + "ev alu", + "bt c", + "satell ite", + "ho lo", + "por ter", + "dia betes", + "bet ter", + "rele asing", + "sur f", + ": -", + "se basti", + "collec ting", + "en cing", + "e thi", + "go ds", + "al ley", + "health y", + "m ills", + "sma sh", + "co pper", + "cr ack", + "read ers", + "sp ac", + "licen se", + "bas ket", + "bang la", + "en tic", + "om i", + "m ere", + "si vely", + "anim ation", + "lan es", + "dent ally", + "chill in", + "fi e", + "k aren", + "dep th", + "li pse", + "n g", + "ri p", + "mel o", + "sand y", + "ðŁijı ðŁijı", + "vin cent", + "nu t", + "hu g", + "who le", + "cre ates", + "? ???", + "âĿ¤ï¸ı âĿ¤ï¸ı", + "bak ed", + "up grade", + "rober ts", + "har a", + "carib bean", + "auth entic", + "mb s", + "mosco w", + "attor ney", + "wi ki", + "ch lo", + "hu ll", + "cor k", + "\" !", + "sty lish", + "ðŁĵ¸ :", + "di ary", + "impro ving", + "ex pand", + "bri ght", + "pollu tion", + "k nights", + "person ality", + "chec ked", + "fac ilities", + "z el", + "bow ling", + "gu er", + "ðŁİ Ĥ", + "on going", + "un its", + "hoo k", + "be ck", + "confl ict", + "to dd", + "far ming", + "educ ational", + "k ak", + "cla y", + "stro ke", + "bel ly", + "explo re", + "mill enni", + "th m", + "loo p", + "sm s", + "consi st", + "cir ca", + "br yan", + "d ab", + "youn ger", + "soli dar", + "pp a", + "experi enced", + "b ella", + "bo ard", + "shef field", + "steph en", + "consu mer", + "sub mit", + "spon sor", + "t ang", + "ag gre", + "comb ined", + "trac king", + "sand ers", + "b az", + "survi ve", + "fer red", + "equ al", + "se p", + "re ed", + "str ong", + "priv acy", + "st ap", + "un g", + "ac ry", + "pa sta", + "pir ates", + "ag er", + "fair y", + "du p", + "introduc ed", + "wi p", + "let s", + "spr ay", + "ðŁĵ º", + "gre w", + "a sts", + "pitts burgh", + "new york", + "jo ey", + "lau ren", + "tra de", + "ch op", + "pi pe", + "cla ire", + "behavi or", + "v ap", + "cre ws", + "lap top", + "ðŁ¤ Ĺ", + "che ster", + "disci pl", + "d f", + "out doors", + "k s", + "go ver", + "super star", + "cas ino", + "far mer", + "; -)", + "re turned", + "ðŁı Ī", + "ma il", + "roa sted", + "co sta", + "v ill", + "pe z", + "gard ening", + "distribu tion", + "sh ining", + "inve stors", + "ra sp", + "dec ades", + "reali zed", + "bar n", + "p ti", + "st able", + "ut d", + "pan thers", + "m ens", + "b n", + "ca de", + "bu cket", + "yn n", + "when ever", + "wa ke", + "da is", + "ber nie", + "lo dge", + "ju lie", + "atmo sphere", + "ðŁĺĺ ðŁĺĺ", + "major ity", + "par ti", + "exc it", + "cu t", + "me h", + "musli ms", + "be gun", + "fli ghts", + "vene ss", + "ce me", + "po sing", + "so le", + "g ou", + "dark ness", + "pe ach", + "cel tic", + "auth ority", + "grand ma", + "ful ness", + "smi th", + "speci fic", + "gar cia", + "co ins", + "good ness", + "aldu b", + "recru iting", + "den nis", + "gar y", + "sle eve", + "weap on", + "pl z", + "disco ver", + "harri son", + "recruit ment", + "ja i", + "ch im", + "com pared", + "tom s", + "mo thers", + "am y", + "archi ve", + "t ask", + "ben jam", + "se g", + "law yer", + "al um", + "inve sting", + "mi e", + "che z", + "j p", + "a ke", + "fl am", + "wall paper", + "âĻ¥ ï¸ı", + "t ton", + "che st", + "favor ites", + "we igh", + "coo lest", + "r ating", + "relev ant", + "lo gan", + "ma ple", + "run ners", + "pri or", + "peop le", + "ma ur", + "terrori st", + "te sted", + "carni val", + "su spen", + "me asure", + "m v", + "cyber security", + "app ren", + "terror ism", + "o z", + "v ital", + "ni es", + "gon z", + "fun ded", + "twi st", + "assess ment", + "die sel", + "en for", + "colum n", + "ad dressing", + "ca sts", + "pay ment", + "x ton", + "fi er", + ", '", + "la st", + "ne e", + "un less", + "clo se", + "sk ill", + "cuis ine", + "fun eral", + "ti les", + "a un", + "k ru", + "relation ships", + "ðŁĴ ¯", + "ev ent", + "âĢįâĻĤ ï¸ı", + "kind ness", + "pro posed", + "acou stic", + "a es", + "defen der", + "dan ce", + "h tt", + "w at", + "vo y", + "ðŁ¤ ĺ", + "au s", + "cli ff", + "sear ching", + "beauti fully", + "in qu", + "at l", + "speci alist", + "ðŁIJ ¶", + "da i", + "tra ils", + "class ics", + "inst ant", + "v ous", + "re venue", + "mar ch", + "kir k", + "fr inge", + "fire works", + "tri via", + "âĺ ħ", + "tr action", + "wal ter", + "mo to", + "l ily", + "att itude", + "cli mb", + "sc an", + "sav ings", + "c w", + "fa ith", + "cred its", + "ab led", + "gra ff", + "auto graph", + "he he", + "ran ch", + "ha d", + "ro gers", + "ðŁĮ ¹", + "f in", + "re qu", + "fol k", + "ad ditional", + "lyn n", + "u ber", + "dol lars", + "lo gic", + "wor th", + "so m", + "the sis", + "p ound", + "bi c", + "st ur", + "cer am", + "spen cer", + "en tered", + "v amp", + "organi zed", + "âľ Ī", + "pp s", + "tr on", + "merce des", + "no ti", + "compet itive", + "do w", + "ous ness", + "vic tor", + "gr illed", + "na i", + "pu tin", + "ab ra", + "bl ame", + "alex and", + "anim al", + "dec ent", + "p ent", + "inter ior", + ":' )", + "but ler", + "bal let", + "ðŁĴ Ķ", + "albu ms", + "down s", + "la d", + "si r", + "pla in", + "p ers", + "blon de", + "dis c", + "paki stan", + "se ment", + "ga a", + "w age", + "ch as", + "man i", + "co ps", + "terr it", + "lo l", + "lau ghter", + "ri vers", + "magnific ent", + "lam p", + "w b", + "new sle", + "char ts", + "ble ssing", + "p unch", + "lon gest", + "fl oral", + "cu tie", + "fare well", + "sto pping", + "mb b", + "bu d", + "chee se", + "de cla", + "si m", + "mc donald", + "de ter", + "you th", + "t ch", + "fre der", + "kin dle", + "fer n", + "at or", + "as leep", + "p ond", + "spr int", + "p ounds", + "la zy", + "gh e", + "fundra ising", + "dead ly", + "gran de", + "dou g", + "he y", + "lin da", + "consi dering", + "i um", + "gol den", + "vi k", + "auth ors", + "di ss", + "u ally", + "appropri ate", + "mor ning", + "y le", + "hon oring", + "foli o", + "be c", + "re bec", + "fin land", + "formu la", + "corn wall", + "sh ay", + "cau sing", + "bl end", + "sig nal", + "t ent", + "kash mir", + "nation als", + "har mony", + "sc out", + "acce ssi", + "he ight", + "medi eval", + "impro vement", + "ke es", + "prac tical", + "car d", + "de par", + "hu n", + "om ing", + "cal gary", + "ste l", + "bu bble", + "gur u", + "ma h", + "unex pe", + "n h", + "ed a", + "me at", + "i ge", + "si o", + "god dess", + "in ches", + "tun es", + "br itt", + "sti on", + "ra j", + "âĻ «", + "mer cy", + "ðŁĴ ĺ", + "sen ds", + "i est", + "pol ici", + "val e", + "reduc ed", + "as ap", + "vi jay", + "defen sive", + "celebr ations", + "ri ders", + "med itation", + "har mon", + "g ing", + " ¡", + "program ming", + "in au", + "sud den", + "m h", + "replac ement", + "sk u", + "j ar", + "gra des", + "ta st", + "k itt", + "brand ing", + "k aw", + "boo t", + "f ought", + "p ays", + "g f", + "iz ation", + "ho p", + "k k", + "activi st", + "v end", + "coast al", + "cha os", + "ðŁĶ ´", + "se me", + "bill board", + "li fting", + "cu mb", + "sc al", + "ðŁĸ ¤", + "stru ck", + "l v", + "indie dev", + "beat en", + "jun gle", + "al right", + "destin y", + "m ing", + "k c", + "ch ances", + "om an", + "q atar", + "cra f", + "tra ined", + "pri x", + "char m", + "o tive", + "s mu", + "e c", + "and ers", + "hand ed", + "al ban", + "certain ly", + "arri ving", + "i ze", + "sa i", + "tr ack", + "pain ter", + "hu mble", + "appo intment", + "head line", + "manag ing", + "mo d", + "as pe", + "andre a", + "à ¤", + "ethi op", + "un ited", + "exi st", + "bal i", + "k ad", + "n t", + "d red", + "re x", + "recogni ze", + "tam pa", + "be ers", + "ati a", + "he els", + "no te", + "transport ation", + "tur tle", + "re de", + "hipho p", + "sp icy", + "sp urs", + "⬠ĩ", + "cor p", + "ther n", + "to ast", + "hur ry", + "proper ties", + "ma ge", + "mar co", + "ele ments", + "bou ti", + "syn drome", + "ms g", + "develop er", + "gra ders", + "he im", + "re sil", + "off ices", + "del ay", + "di men", + "vin tag", + "barbar a", + "ðŁĺ ±", + "vene zu", + "cu lar", + "fac ed", + "bar n", + "ðŁĺ Ĩ", + "survi vor", + "wor m", + "confu sed", + "passion ate", + "Ø ±", + "identi fy", + "electr icity", + "sou ls", + "brad ley", + "repor tedly", + "lun ch", + "shel f", + "eli a", + "swee t", + "smoo th", + "emplo yment", + "am el", + "manhatt an", + "ste am", + "oun ts", + "ye p", + "li ving", + "un e", + "descri be", + "ca res", + "man ila", + "sha wn", + "ac ted", + "bas h", + "st even", + "re st", + "pet ition", + "div ine", + "wel sh", + "rac e", + "platin um", + "ðŁĮ ¸", + "p b", + "extra ordinary", + "solidar ity", + "m all", + "on ion", + "schedu led", + "game of", + "fer gu", + "de ms", + "nor m", + "p k", + "tri als", + "polici es", + "publi shing", + "st ole", + "fron t", + "charac ter", + "van ia", + "ex ce", + "sti e", + "sc a", + "resi dential", + "sa iling", + "ðŁĶ¥ðŁĶ¥ ðŁĶ¥", + "spons ors", + "th ick", + "champag ne", + "she pher", + "continu ing", + "ven ice", + "per th", + "na p", + "a ster", + "y ak", + "un limited", + "cho ices", + "ne o", + "hi v", + "repor ter", + "bru ssels", + "f old", + "dy s", + "se mi", + "la wn", + "it alia", + "wi fi", + "as k", + "em ed", + "fr ame", + "monit oring", + "ste ad", + "i da", + "gr in", + "is a", + "fli p", + "re stric", + "offen sive", + "atta ched", + "di sh", + "wh y", + "philli ps", + "gre et", + "p als", + "mix tape", + "v ou", + "fiel der", + "spar k", + "alber ta", + "g len", + "ca sh", + "s ri", + "u ri", + "ro dri", + "entreprene urs", + "climate change", + "p sy", + "d le", + "em ents", + "lin ked", + "nether lands", + "acci dentally", + "oppos ition", + "vel vet", + "ra ys", + "c w", + "om o", + "m f", + "lmfa o", + "newsle tter", + ": )", + "toi let", + "liter ature", + "di sp", + "phili p", + "uni form", + "sudden ly", + "head er", + "cool er", + "-- -", + "prou d", + "bri g", + "nis san", + "scienti st", + "j ah", + "con centr", + "pac ks", + "appo inted", + "so ap", + "eng age", + "cho se", + "âĻ ¡", + "se tup", + "jeal ous", + "har ry", + "g ation", + "tun nel", + "te mp", + "osc ars", + "dec ade", + "recomm ended", + "child ren", + "ab a", + "anxi ety", + "ve ments", + "sal on", + "pho too", + "organi z", + "mach ines", + "ab s", + "vil le", + "hy pe", + "ti ff", + "emer ging", + "av geek", + "[ #", + "contribu tion", + "bra dy", + "re sto", + "g mail", + "fit z", + "photo shoot", + "hel met", + "h t", + "eleg ant", + "ug anda", + "nur sing", + "or leans", + "pen n", + "na h", + "foo tage", + "em a", + "w o", + "w ad", + "concer ns", + "ve re", + "re mark", + "who ever", + "str ang", + "p t", + "qu it", + "sh ang", + "histor y", + "s ick", + "perman ent", + "ill ness", + "col d", + "visi on", + "he m", + "ar row", + "con vic", + "pin k", + "oc cup", + "bal d", + "ex hau", + "u of", + "am o", + "on t", + "ãĥ »", + "adop t", + "la id", + "smo ked", + "inter pre", + "ess enti", + "associ ated", + "b d", + "bb y", + "fi er", + "inst all", + "dipl om", + "con diti", + "c f", + "w ak", + "any a", + "gr aci", + "fi sher", + "s ss", + "ap r", + "il it", + "mus ician", + "symph ony", + "cor d", + "h ack", + "le gi", + "l v", + "bless ings", + "hum or", + "sc ra", + "e ti", + "min ster", + "trav elling", + "bu sh", + "jewell ery", + "li me", + "!! !", + "pregn ant", + "pe e", + "lo b", + "cap ital", + "ip a", + "pen cil", + "la bor", + "duc ks", + "prou dly", + "wedd ing", + "dere k", + "m w", + "pe g", + "valent ine", + "an gu", + "re treat", + "pro spect", + "dang er", + "vul ner", + "up set", + ", #", + "sr k", + "x im", + "thur sday", + "n fl", + "kis ses", + "re ds", + "cr ack", + "re ward", + "c u", + "ko k", + "me te", + "aband oned", + "it t", + "me als", + "sp ell", + "stan bul", + "del ays", + "ru m", + "le op", + "gu m", + "no va", + "super man", + "ch ick", + "m is", + "dram atic", + "inno cent", + "r ounds", + "re c", + "auti sm", + "bangla desh", + "mor al", + "mo vie", + "sp oo", + "k la", + "âĥ £", + "ou ting", + "mess i", + "ab road", + "loo kin", + "a im", + "q i", + "st ack", + "colla ge", + "à ¯", + "hud son", + "sc an", + "ho e", + "ch au", + "oc cur", + "comm ander", + "ho les", + "ðŁİ Ħ", + "bi as", + "v on", + "stick er", + "ma k", + "responsi bility", + "colum bus", + "sa int", + "ed mon", + "rac ism", + "far ms", + "w en", + "gul f", + "may o", + "!!!! !!!!", + "corpor ation", + "ba chel", + "el a", + "inter nal", + "je ep", + "fol lows", + "di alogue", + "de rer", + "smart phone", + "he len", + "rich mond", + "equ ity", + "s land", + "b g", + "ne ar", + "av i", + "memph is", + "we ir", + "discu ssed", + "bad ge", + "p up", + "mi stake", + "phen omen", + "un ite", + "ðŁ Ľ", + "de pic", + "ri des", + "in augu", + "n at", + "sof twitter", + "comb ination", + "gosp el", + "âļ ¾", + "ad mission", + "retro gaming", + "ðŁIJ ¾", + "sch u", + "mb o", + "jun ction", + "al arm", + "à ¦", + "gr ac", + "kh ali", + "k ul", + "m ale", + "cap tion", + "wi sh", + "te re", + "cor ps", + "ru bber", + "play station", + "er in", + "effici ent", + "l or", + "jo kes", + "in ary", + "nor man", + "lu is", + "inaugu ral", + "ch ed", + "âļ½ ï¸ı", + "di p", + "to e", + "str at", + "aa c", + "am u", + "pi er", + "co tt", + "comm and", + "tt en", + "sn oo", + "cu be", + "clo ses", + "class ical", + "s word", + "expre ssion", + "reach ing", + "n app", + "co st", + "affe ct", + "ric o", + "gi f", + "brea the", + "tri be", + "or tho", + "h ay", + "l g", + "fri es", + "n m", + "hi ding", + "richar ds", + "en de", + "mic ro", + "capit ol", + "cop y", + "ro m", + "regi me", + "mary land", + "tax i", + "di al", + "embar ra", + "un believ", + "ch t", + "v s", + "elim in", + "o dd", + "pen ny", + "sound track", + "l ings", + "trans ition", + "rema ining", + "a is", + "mali k", + "? !?", + "rand om", + "def end", + "ul tra", + "tru m", + "danc er", + "st ol", + "dri ve", + "a ver", + "ro ast", + "defin ition", + "se an", + "excit ement", + "partic ul", + "su rely", + "sh av", + "ber y", + "di shes", + "com m", + "is ol", + "i am", + "ob li", + "gho st", + "hugh es", + "chi efs", + "b as", + "conserv ative", + "speci al", + "fe min", + "sh ri", + "n ancy", + "inte l", + "tu ne", + "ðŁĩ ª", + "jo el", + "gg le", + "mo to", + "ðŁĺ Ķ", + "bu ck", + "d ag", + "antic ip", + "mont ana", + "gu id", + "fro g", + "ec raft", + "op e", + "dri ves", + "nu mer", + "x y", + "color ful", + "wednesday wisdom", + "illu min", + "bey on", + "inau gur", + "deep ly", + "pre fer", + "for tune", + "coo ked", + "ti ble", + "âĺ ķ", + "swe ater", + "it ter", + "tt y", + "u i", + "gi e", + "com plic", + "~ ~", + "tax es", + "cu ps", + "di verse", + "sam anth", + "âłĢ âłĢ", + "ba king", + "sy mp", + "wa i", + "be half", + "mer cur", + "travel s", + "ðŁİī ðŁİ", + "or ia", + "eng aged", + "jump ing", + "reti red", + "n aked", + "p uni", + "speed way", + "sci ences", + "rehear sal", + "on ym", + "dy ou", + "pl ates", + "r ati", + "kri sh", + "jaz z", + "car ol", + "ra f", + "pen alty", + "tim eline", + "ru by", + "engine ers", + "ra f", + "bel le", + "do se", + "che on", + "esc ap", + "me g", + "ran k", + "or d", + "me gan", + "mer ch", + "ec lipse", + "âĺº ï¸ı", + "ple dge", + "kir k", + "per si", + "leice ster", + "sa k", + "w k", + "saf ely", + "yy y", + "je t", + "promis ed", + "j c", + "en ne", + "no ah", + "re no", + "re a", + "ðŁĺĤðŁĺĤ ðŁĺĤðŁĺĤ", + "tra il", + "ðŁij Ģ", + "f d", + "soo o", + "ri min", + "w k", + "ภ²", + "i al", + "x ox", + "bis cu", + "d ale", + "fan dom", + "particip ating", + "fla g", + "privi lege", + "pe ach", + "mach ine", + "bo ston", + "gro ss", + "o g", + "mir acle", + "adop tion", + "u ss", + "mon sters", + "be ij", + "clar ke", + "pu shing", + "pra ying", + "ar o", + "d n", + "ell is", + "apol lo", + "od ds", + "refuge e", + "to w", + "b p", + "ðŁĩ¬ðŁĩ §", + "h end", + "app eared", + "memb ership", + "pe an", + "du m", + "viol ent", + "v y", + "potat oes", + "aw w", + "greet ings", + "t ts", + "ac on", + "sh ane", + "photograph ed", + "cra b", + "temper atures", + "cu ba", + "c fc", + "wel com", + "he l", + "in nings", + "m k", + "co de", + "kno ck", + "gra ss", + "swe dish", + "p ta", + "ick y", + "v at", + "lin ing", + "s q", + "sa p", + "ar c", + "announ cing", + "sk ins", + "cit yof", + "br ing", + "co x", + "gam er", + "it arian", + "i da", + "h d", + "ros se", + "sad ly", + "ge o", + "âļ ¡ï¸ı", + "tag s", + "fa ther", + "chan ge", + "l ance", + "whis key", + "adel aide", + "te c", + "stick ers", + "marke t", + "class y", + "bad ass", + "flo rence", + "lin er", + "fro st", + "k ate", + "ac on", + "scand al", + "es sex", + "ðŁĺ ı", + "vi vi", + "dr ill", + "blo ggers", + "recomm end", + "d ha", + "ac res", + "ro ma", + "bu y", + "gro cer", + "er ia", + "ma har", + "ff er", + "patter ns", + "ver i", + "com pu", + "st ev", + "ang a", + "ment or", + "do o", + "it ali", + "cdn poli", + "on ly", + "conduc t", + "elec tro", + "de f", + "wh ale", + "prepar ation", + "bicy cle", + "vi ral", + "turn out", + "bra ss", + "qu ad", + "hospit ality", + "pack aging", + "den cy", + "ceme tery", + "abo ard", + "dre aming", + "pic ture", + "t all", + "inv ent", + "ad mi", + "o e", + "tem ps", + "qu an", + "fun dam", + "pro mp", + "resi dence", + "mu d", + "sour i", + "âĦ ¢", + "graff iti", + "gi f", + "d nd", + "com p", + "s war", + "pe eps", + "pale stine", + "devil s", + "san g", + "assi stance", + "bi ke", + "missi ssi", + "inter viewed", + "ne phew", + "dru ms", + "v and", + "gentle men", + "n sw", + "inst a", + "leban on", + "ee ee", + "oli via", + "ver y", + "rou gh", + "industri es", + "m ation", + "ðŁĺ Ĵ", + "bar rel", + "n ay", + "po ps", + "moder n", + "ill y", + "are st", + "on ents", + "protec ting", + "v ans", + "e o", + "vi kings", + "restaur ants", + "re ck", + "jac kie", + "andre w", + "w illing", + "he ath", + "citiz en", + "disc rimin", + "๠Ī", + "stu art", + "m ys", + "hi p", + "tran sp", + "\" ?", + "te x", + "su shi", + "ke d", + "cro ssed", + "dist ur", + "pe dia", + "f ate", + "some how", + "mo th", + "proce ssing", + "is s", + "r in", + "u ts", + "yy c", + "ver t", + "lg bt", + "re id", + "on to", + "arab ia", + "habit at", + "= =", + "stre ak", + "simp son", + "addic tion", + "wim ble", + "deli vers", + "challeng ing", + "ðŁİ ¶", + "fran ch", + "e du", + "s me", + "ai ds", + "hur st", + "th am", + "tari an", + "remem bered", + "palestin ian", + "fe es", + "tru m", + "sket ch", + "ur u", + "fit ting", + "jes se", + "ðŁĶ¥ ðŁĶ¥", + "---- ----", + "ba ch", + "ici a", + "colo red", + "da h", + "associ ate", + "int el", + "s eller", + "p u", + "stu ffed", + "ac s", + "b s", + "sh in", + "cooper ation", + "certific ate", + "ab u", + "ingredi ents", + "re v", + "in ge", + "el der", + "christi an", + "bun dle", + "th ic", + "dir t", + "beij ing", + "comm it", + "ted dy", + "ed u", + "to day", + "s field", + "w yn", + "confir ms", + "lo o", + "j v", + "ene ss", + "al pha", + "vir us", + "ari um", + "gr ind", + "bri dges", + "introduc tion", + "pol ls", + "bac ter", + "z ach", + "termin al", + "ra iders", + "fla vor", + "zom bie", + "vo d", + "sp reading", + "gameof thrones", + "effici ency", + "lat ely", + "ale m", + "twee t", + "cri mes", + "cl er", + "de y", + "dg ed", + "hy un", + "pay ments", + "cir cus", + "ðŁĺŃ ðŁĺŃ", + "mis souri", + "lu b", + "episo des", + "c age", + "po s", + "mat ching", + "tumb lr", + "lin ed", + "ge st", + "am bi", + "nar r", + "ing ton", + "regu l", + "blo wn", + "is le", + "co co", + "on don", + "joshu a", + "tour ing", + "sm a", + "sau sage", + "best friend", + "bo eing", + "desi re", + "sav age", + "ra pper", + "de vo", + "te ar", + "take over", + "cow boys", + "po ker", + "par ag", + "pp e", + "h int", + "we ars", + "se th", + "ro les", + "l anc", + "man ga", + "form at", + "fl yer", + "c ay", + "mo or", + "ba ke", + "spla sh", + "v ad", + "ker ala", + "proce eds", + "sil ly", + "reflec tion", + "di str", + "wi d", + "su it", + "ci vic", + "yan kees", + "by n", + "migr ation", + "di stin", + "or ch", + "fe mini", + "quali fying", + "tu ri", + "o be", + "hun dred", + "cra p", + "wan g", + "mathe mat", + "bu re", + "expo sure", + "fergu son", + "seme ster", + "re serv", + "pl ym", + "a hu", + "fac ial", + "wa x", + "wor ried", + "ca b", + "vi o", + "as a", + "co d", + "to pics", + "p cs", + "hal o", + "rescu ed", + "horiz on", + "ar k", + "âļ ª", + "hol ly", + "el f", + "ul ti", + "pu p", + "quali fied", + "attend ance", + "ati vely", + "destro y", + "y c", + "for th", + "photoo ftheday", + "c ents", + "ic eland", + "meas ures", + "de sk", + "port folio", + "artic les", + "direc tors", + "dat ab", + "e w", + "creep y", + "oun ding", + "hon oured", + "mi st", + "j it", + "men tioned", + "port able", + "iti c", + "d ann", + "friday feeling", + "am id", + "ti ger", + "scri p", + "helicop ter", + "hard ware", + "expl or", + "work place", + "austri a", + "beat les", + "ber nar", + "spi der", + "disc o", + "cul t", + "lim its", + "shor tly", + "fin al", + "nin ja", + "lu ke", + "le bron", + "wal mart", + "o il", + "van illa", + "shi re", + "ye g", + "ak y", + "c s", + "bl er", + "collec ted", + "t g", + "rol led", + "speci als", + "b ff", + "pier re", + "sh im", + "vi er", + "flash back", + "restor ation", + "individu als", + "pro d", + "fre aking", + "tu rer", + "o a", + "re fre", + "mor oc", + "gre et", + "re yn", + "care ful", + "our ing", + "u sh", + "is d", + "g ill", + "vie w", + "thunder storm", + "b led", + "pic nic", + "guar di", + "pi g", + "ar k", + "syl vania", + "bann ed", + "u cl", + "vi jay", + "ori um", + "av engers", + "believ es", + "eu r", + "monu ment", + "concer ned", + "la bs", + "ber g", + "a ap", + "vi sh", + "sing les", + "can cel", + "z el", + "ar ab", + "ru th", + "too th", + "ar ta", + "sh af", + "chair s", + "r ack", + "dise ases", + "crow d", + "cl y", + "fle x", + "christ ma", + "artif icial", + "tom at", + "fin e", + "dra ws", + "advoc ate", + "fran ce", + "Ù Ĭ", + "ðŁĺ ³", + "heav y", + "s our", + "compre hen", + "no ble", + "aa p", + "hin du", + "cor al", + "g ars", + "ow en", + "n l", + "st all", + "yel low", + "mar ina", + "in ver", + "suppor t", + "tou gh", + "promis es", + "pi e", + "master piece", + "sco re", + "for ce", + "mor tg", + "crypto currency", + "o x", + "r ors", + "rock in", + "pro vin", + "ho g", + "no stal", + "oak land", + "pat rick", + "inclu sion", + "tra ffic", + "ah med", + "a ha", + "lux ury", + "con secu", + "de mon", + "âĸ º", + "b lowing", + "st ag", + ": \"", + "encoura ge", + "ben e", + "sku ll", + "do dge", + "bu ster", + "kin son", + "wit ne", + "er ror", + "lo west", + "fel low", + "à °", + "sh re", + "bl ur", + "vir gin", + "compos er", + "sli p", + "mor nings", + "ga ins", + "tab le", + "gra in", + "ari st", + "braz ilian", + "w we", + "tu es", + "ribb on", + "an ag", + "di st", + "sac rif", + "em brace", + "entreprene ur", + "af fili", + "de o", + "t ali", + "touri st", + "fat al", + "ì Ĭ", + "autom atic", + "ðŁĩ µ", + "we ak", + "wel fare", + "confir m", + "benjam in", + "fi ghts", + "alleg ed", + "me ad", + "strugg ling", + "pro secu", + "che f", + "à ¨", + "propos al", + "er n", + "ðŁĺ Ħ", + "dy k", + "on gs", + "hon g", + "m ack", + "mel on", + "on ent", + "ru sh", + "d ap", + "tol er", + "pro pag", + "c ze", + "trans lation", + "wal let", + "cott age", + "sa il", + "constitu tion", + "ðŁĴ Ģ", + "mun ici", + "fav or", + "storm hour", + "i h", + "ðŁĺ Į", + "approach ing", + "pin ned", + "j ed", + "niger ian", + "n ach", + "sh at", + "particul arly", + "mc don", + "camer as", + "anni e", + "admini str", + "he at", + "electr ical", + "char ming", + "gib son", + "bouti que", + "ex posed", + "ac tor", + "pil low", + "beach es", + "genu ine", + "margare t", + "ben nett", + "lou isi", + "pos itions", + "el y", + "shin y", + "ten tion", + "architec t", + "ren tal", + "ac qui", + "goo gle", + "sub way", + "mom ent", + "ðŁļ ¨", + "ri m", + "metho ds", + "cy cli", + "nor folk", + "Ù Ī", + "over whel", + "ra pid", + "we ar", + "happy birthday", + "progre ssive", + "ðŁĴ ¥", + "co gn", + "pap a", + "f ool", + "philosoph y", + "pol ar", + "jim my", + "wi g", + "ðŁĴ ĭ", + "oper ating", + "reduc tion", + "ph i", + "fla gs", + "to the", + "o di", + "a res", + "k oo", + "k ang", + "ar kansas", + "ash ton", + "wimble don", + "sci fi", + "attrac tive", + "mississi ppi", + "logi sts", + "ral ph", + "la bel", + "gradu ates", + "ma ha", + "home town", + "âľĮ ï¸ı", + "foun ded", + "on the", + "li z", + "trans l", + "mini mum", + "pre sti", + "ta m", + "gener ations", + "re bel", + "journ alists", + "par am", + "mc m", + "acry lic", + "death s", + "tes la", + "w t", + "bry ant", + "jer us", + "i stanbul", + "muham mad", + "ri ley", + "k ris", + "work shops", + "is o", + "coun ts", + "stre t", + "prote cted", + "trin ity", + "man ual", + "r hin", + "r il", + "pleas ant", + "le mon", + "ner d", + "har der", + "dar ren", + "bur y", + "ra h", + "bas is", + "mi gu", + "occa sion", + "li sts", + "âĿ¤ï¸ıâĿ¤ï¸ı âĿ¤ï¸ı", + "e b", + "de cre", + "hamp ton", + "ìĿ ´", + "tra vis", + "trans form", + "puer to", + "nh l", + "av oc", + "tri ps", + "unexpe cted", + "ve t", + "di dyou", + "bar ber", + "st ages", + "m son", + "re presented", + "for t", + "l al", + "pp le", + "nic ely", + "ignor e", + "qu il", + "qu inn", + "h k", + "carri er", + "remin ded", + "am ong", + "pass enger", + "el len", + "gue z", + "sc ape", + "mu ral", + "youn gest", + "ma sh", + "d ill", + "rout ine", + "stain less", + "jack son", + "gand hi", + "th al", + "on ers", + "edit orial", + "convers ations", + "sd ale", + "autom ation", + "i ke", + "า à¸", + "ðŁĩ ª", + "hau l", + "la ying", + "men tions", + "am en", + "abor tion", + "i bi", + "coun ties", + "ca therine", + "man ds", + "jam e", + "roll er", + "au t", + "n am", + "o logical", + "cep tion", + "ran king", + "tox ic", + "sn acks", + "victor ian", + "bang kok", + "psycho logy", + "re g", + "ang ela", + "respon d", + "sty le", + "sophi e", + "dak ota", + "achiev ed", + "mar ked", + "imper ial", + "in as", + "glo ves", + "sli m", + "confi dent", + "att acked", + "gg er", + "lon ely", + "valentine sday", + "re b", + "craft beer", + "orig in", + "zim bab", + "ce iling", + "te ens", + "other wise", + "w b", + "f ers", + "day sof", + "advis or", + "y ah", + "âĻ ª", + "en der", + "republic ans", + "av a", + "skir t", + "pi pel", + "chi e", + "jan e", + "ja x", + "ðŁĺ ĭ", + "âľ Ĭ", + "j ays", + "bre tt", + "bal o", + "cru cial", + "d har", + "as is", + "de au", + "lloy d", + "chat ting", + "âĿĦ ï¸ı", + "rel ay", + "remark able", + "n s", + "we t", + "bris bane", + "ðŁĶ ´", + "tion ally", + "f k", + "la yer", + "house hold", + "consecu tive", + "es is", + "pend ant", + "st ir", + "crit ic", + "su gar", + "photo shop", + "pa res", + "arti stic", + "do dgers", + "c un", + "cra fted", + "am end", + "bo at", + "âŃIJ ï¸ı", + "egyp tian", + "sa w", + "tra ge", + "small er", + "ox y", + "pa ired", + "nex t", + "i res", + "tac o", + "o y", + "u c", + "st i", + "a erial", + ": //", + "dr o", + "dot com", + "gg ins", + "r pg", + "ay e", + "le an", + "stri ker", + "lo bby", + "prote sts", + "pri ority", + "congre ss", + "am ate", + "inv it", + "r ington", + "mom my", + "th us", + "allow ing", + "pione er", + "enfor cement", + "g ori", + "tal k", + "dra g", + "du mb", + "bul let", + "san ge", + "er y", + "tar gets", + "ðŁĩ ¦", + "he ather", + "consi der", + "seaf ood", + "ve st", + "ris ks", + "% .", + "p g", + "sac red", + "he ating", + "kick ed", + "tto t", + ". -", + "chan di", + "co ven", + "po ol", + "pul se", + "i a", + "ro ster", + "shakespe are", + "es a", + "car go", + "pean ut", + "tro op", + "ac tion", + "tab let", + "home work", + "cast le", + "stru ction", + "mus icians", + "free zing", + "bu tt", + "justin bieber", + "j j", + "bah rain", + "an them", + "au dit", + "didyou know", + "na vig", + "guid ance", + "âĸ ¶", + "tur f", + "n un", + "fic ations", + "ye men", + "char ging", + "x c", + "bron cos", + "su bur", + "p ale", + "bor ing", + "among st", + "for the", + "em per", + "om fg", + "p j", + "expe cting", + "ðŁĴ «", + "st l", + "ad min", + "expect ations", + "sw an", + "shoo t", + "oooo o", + "min ent", + "ãĢ IJ", + "wall ace", + "stan g", + "satur day", + "adop ted", + "dou bles", + "hom ie", + "ome z", + "d han", + "vent ure", + "surroun ding", + "fi le", + "mob ility", + "de es", + "w ski", + "broo ke", + "emb ro", + "re members", + "kar a", + "test im", + "bo tan", + "m tv", + "sacrif ice", + "jerus alem", + "d l", + " ´", + "proper ly", + "ili on", + "as i", + "leg it", + "co pe", + "m cla", + "recy cling", + "lar ger", + "ðŁĴ ĵ", + "pat ric", + "gener ous", + "ja red", + "p f", + "mol ly", + "thom as", + "ju dges", + "h b", + "sor ts", + "bl vd", + "o ven", + "enter ing", + "plan es", + "be et", + "integr ation", + "boo ked", + "fre ed", + "ver n", + "ash es", + "to pped", + "de pot", + "welcom ed", + "ren a", + "m ick", + "d and", + "see ks", + "gam er", + "ran kings", + "ren e", + "mu t", + "whis ky", + "fire fighters", + "gu es", + "ga ther", + "tour ney", + "de men", + "y ang", + "new ton", + "autom otive", + "back yard", + "deta iled", + "mi st", + "to bac", + "fi ber", + "un usual", + "grat itude", + "sp are", + "ne ys", + ": *", + "per i", + "flo ating", + "fin alist", + "don ating", + "dre ss", + "bro ad", + "be the", + "econom ics", + "tai wan", + "ed wards", + "plu g", + "pra iri", + "val en", + "bab a", + "f ad", + "an as", + "har per", + "dis order", + "app lied", + "p att", + "bi kin", + "li ver", + "cu ri", + "carol ine", + "ann er", + "juli an", + "wal king", + "mal col", + "screen shot", + "co ding", + "skin care", + "activi sts", + "myster ious", + "ex act", + "blo cking", + "mercur y", + "bat ter", + "du mp", + "âľ Į", + "en se", + "li sh", + "ridic ulous", + "prote sters", + "ðŁĻ Ī", + "lu st", + "swe at", + "as s", + "ali ke", + "co dy", + "re ments", + "win ds", + "as pir", + "vi enna", + "pra y", + ".. .@", + "bo i", + "cand le", + "assi sts", + "te e", + "der son", + "p ony", + "f ence", + "con spir", + "âĺħ âĺħ", + "oo th", + "e pic", + "ba rely", + "a unt", + "b am", + "diamon ds", + "end less", + "scre ens", + "can cer", + "gr o", + "p st", + "pro spec", + "mo sque", + "help ful", + "ou ri", + "bro ther", + "gu jar", + "cri sti", + "ine z", + "to wers", + "ad dresses", + "gra y", + "bur ton", + "re tweeted", + "ðŁ¤ Ķ", + "n ity", + "du ck", + "super vis", + "jo an", + "kin der", + "sanc tu", + "pi ed", + "âı °", + "ł ï¸ı", + "m ati", + "reven ge", + "ce ster", + "eli fe", + "desig ners", + "back ed", + "bo li", + "wei ght", + "cou ch", + "su res", + "s its", + "shri mp", + "la gos", + "auth orities", + "os ity", + "hol ly", + "compu ting", + "fac tors", + "ab e", + "pan els", + "ram ad", + "sent ence", + "missi on", + "hol m", + "r b", + "d ads", + "shang hai", + "mon ey", + "she ets", + "sk ate", + "thre w", + "cup cakes", + "infin ite", + "l is", + "practic ing", + "ess ay", + "ka i", + "as ci", + "mo b", + "u gh", + "hol mes", + "re gg", + "ik h", + "mo ck", + "collec tions", + "pe p", + "o va", + "sal t", + "nan dez", + "co y", + "thre ats", + "tex ts", + "cin nam", + "pregn ancy", + "pen ding", + "stam p", + "flow er", + "g is", + "agre ed", + "pay ne", + "ro ver", + "ph ra", + "sof t", + "f fin", + "fa thers", + "pass engers", + "aw ays", + "al a", + "h es", + "li van", + "in s", + "samu el", + "ingu i", + "h of", + "j j", + "chen nai", + "cat al", + "om ic", + "he ath", + "ni ece", + "pump ed", + "integr ated", + "are l", + "no m", + "produc tivity", + "wan ting", + "vis a", + "di ana", + "tw il", + "it v", + "cam ps", + "ro wing", + "d ley", + "black and", + "gu ards", + "b ells", + "re verse", + "vi be", + "ric ky", + "mo ss", + "ny t", + "âĺ Ģï¸ı", + "el le", + "tro y", + "cu dd", + "ev an", + "women s", + "fo to", + "mi stakes", + "wick ed", + "mi l", + "c led", + "me mes", + "co smo", + "schol ar", + "ren o", + "ðŁĺ Ģ", + "v ents", + "# â̦", + "terrori sts", + "ca sey", + "cardin als", + "ðŁĺĬ ðŁĺĬ", + "venezu ela", + "bol a", + "liter acy", + "t w", + "en o", + "con tains", + "au stin", + "fin anci", + "ev an", + "har vard", + "origin ally", + "chev ro", + "her ald", + "nott ingham", + "manag ers", + "âŀ ¡", + "accep ting", + "wal sh", + "tutor ial", + "entrepreneur ship", + "yach t", + "requi rements", + "glen n", + "pe de", + "unfortun ately", + "ach ing", + "dais y", + "gi an", + "night mare", + "âĿ Ĺ", + "r ina", + "b art", + "ema ils", + "oppo site", + "who m", + "sa ke", + "pu zzle", + "da shi", + "par ty", + "blan ket", + "bus es", + "lo re", + "beau ty", + "reas on", + "pun jab", + "winds or", + "func tional", + "exi sting", + "hel lo", + "gli mp", + "con vin", + "la k", + "scre aming", + "rebec ca", + "bli ss", + "north west", + "infin ity", + "cosme tics", + "pul ling", + "coffe e", + "pl ing", + "op ho", + "colom bia", + "interior design", + "( +", + "emo tions", + "sa c", + "sun glasses", + "sav es", + "d f", + "six th", + "al y", + "ðŁĺ »", + "de en", + "dev ast", + "polit icians", + "lac rosse", + "g u", + "pe i", + "jav a", + "comb ine", + "coal ition", + "er ts", + "survi v", + "ch ad", + "stri an", + "n n", + "de vi", + "coun c", + "concer n", + "contro ller", + "bre ast", + "j ury", + "tu m", + "introduc es", + "la di", + "mobi le", + "al z", + "ste ady", + "nur ses", + "h acking", + "on line", + "oce an", + "ðŁİ Ħ", + "a am", + "ju ven", + "ic c", + "louisi ana", + "ar te", + "street art", + "is on", + "wn s", + "fr m", + "p anda", + "no ir", + "main tain", + "del ay", + "symp toms", + "thor n", + "ge ome", + "ter n", + "carri ed", + "p ru", + "pan or", + "as sy", + "per u", + "clou d", + "sp ra", + "pe di", + "e ste", + "tag ged", + "ðŁĺ Ŀ", + "shado ws", + "naz i", + "ا٠Ħ", + "cor ri", + "âĻ¥ âĻ¥", + "j ad", + "ðŁĩ «", + "form al", + "spo ken", + "ðŁĮ ŀ", + "enjo y", + "lo pez", + "out look", + "in ho", + "w ander", + "Ù ħ", + "ma ya", + "pe e", + "d ine", + "ãĢ ij", + "brief ing", + "suppor ter", + "ar ily", + "ght ers", + "natur ally", + "doctor who", + "j en", + "v ar", + "new year", + "re se", + "si mm", + "re x", + "con sequ", + "tomat oes", + "bur st", + "bra vo", + "bur gers", + "cr acking", + "nor theast", + "bi om", + "mush room", + "mar que", + "dou ble", + "ni er", + "v ag", + "tw enty", + "key board", + "win ni", + "jama ica", + "par ish", + ": -", + "mental health", + "ali zing", + "ren der", + "wa king", + "ðŁİ Ĥ", + "g ly", + "na than", + "wa shing", + "mel issa", + "jun g", + "loy al", + "chil i", + "song writer", + "guit arist", + "bo wie", + "neighb ors", + "onym ous", + "as set", + "ta i", + "head quarters", + "ðŁĮ Ī", + "i hear", + "ci gare", + "sur g", + ") \"", + "re pl", + "dar ling", + "ðŁĻ Ħ", + "z ak", + "sa re", + "ãħ ĭ", + "mic key", + "ware house", + "mass age", + "ine es", + "did nt", + "i w", + "hur ts", + "eng aging", + "mag ic", + "women in", + "k itten", + "mor s", + "c art", + "tit ans", + "colle ague", + "compe ting", + "er an", + "k hal", + "mar ble", + "dem and", + "del ight", + "et ary", + "bli zz", + "lou ise", + "m ls", + "fini shes", + "experim ent", + "conduc ted", + "electr onics", + "itt ers", + "car ing", + "wh ats", + "sym bol", + "jun g", + "e cu", + "pi x", + "con text", + "char ger", + "ðŁĺ ĩ", + "re ig", + "fra g", + "ë ĭ", + "ch ad", + "tru e", + "ker ry", + "def ending", + "a int", + "au ton", + "check out", + "bar nes", + "less ly", + "d t", + "m me", + "clou dy", + "second ary", + "are z", + "_ :", + "app a", + "const ant", + "\" )", + "ve ts", + "jo b", + "i ent", + "ðŁĺŃðŁĺŃ ðŁĺŃ", + "m j", + "fren ch", + "di ver", + "davi es", + "hh hh", + "e book", + "๠ī", + "mar iti", + "bree ze", + "susp ended", + "mat o", + "vi et", + "ra hu", + "se i", + "bol t", + "en ary", + "le is", + "kar l", + "fr amed", + "expla ining", + "ab c", + "de aling", + "nat o", + "ja ke", + "exp and", + "leon ard", + "establi shed", + "du b", + "ar men", + "el led", + "voc al", + "nichol as", + "ori ent", + "k yo", + "illustr ated", + "ah h", + "danc ers", + "milli on", + "ge ta", + "po pp", + "as u", + "mur dered", + "gi ble", + "sto ked", + "gri ffin", + "maxi mum", + "adri an", + "en counter", + "ther o", + "david son", + "ðŁį »", + "holi day", + "ev o", + "asse ts", + "car son", + "memor able", + "âļ ½", + "ob am", + "represent ative", + "cb d", + "tr icks", + "vo gue", + "vo ice", + "mm mm", + "sebasti an", + "cli f", + "ath y", + "par alle", + "ðŁ¤ ·", + "pa k", + "ev acu", + "e ats", + "ا Ø", + "tou ched", + "organ ised", + "spir its", + "can ad", + "gui ded", + "frame work", + "ðŁĮ Ł", + "pe d", + "natur al", + "ag ar", + "replac ed", + "anch or", + "ti t", + "sha h", + "organ is", + "super ior", + "r n", + "ch ro", + "eric a", + "st ill", + "cor on", + "chu ck", + "loc ks", + "or gan", + "ro sen", + "sc am", + "ben ed", + "/ #", + "ke en", + "tre vor", + "vamp ire", + "sor ted", + "! '", + "af ford", + "in tro", + "gr ace", + "ðŁĺ ľ", + "sau r", + "kick starter", + "influ en", + "v u", + "y up", + "po c", + "ðŁİ ¥", + "a ar", + "s ang", + "tre k", + "et sy", + "tb h", + "scre am", + "chevro let", + "pix el", + "shepher d", + "an or", + "gabri el", + "tw ood", + "sd cc", + "me ters", + "develop ers", + "clo sure", + "v w", + "twit ch", + "ì Ĺ", + "se oul", + "pr ice", + "ho g", + "n ish", + "hill ary", + "scrat ch", + "in cen", + "wag on", + "dis ability", + "pan ther", + "ch ats", + "g d", + "wit z", + "sus sex", + "l ate", + "den mark", + "ger ald", + "cancel led", + "net te", + "i x", + "nav al", + "bap tist", + "te t", + "y ad", + "ma th", + "ho y", + "r andy", + "po int", + "intel lec", + "fru its", + "w ool", + "gu in", + "pr on", + "the ft", + "con dem", + "mar ry", + "n ola", + "architec ts", + "cin cin", + "roc kets", + "gentle man", + "ex plan", + "t ate", + "do e", + "ra ises", + "wild life", + "w l", + "insi der", + "blan c", + "w p", + "for sale", + "ny c", + "po well", + "unbeliev able", + "pen s", + "goo dies", + "mu stang", + "p ens", + "st ays", + "squ ash", + "xox o", + "near by", + "ever ton", + "co co", + "le agu", + "k han", + "stu d", + "south west", + "con struc", + "s worth", + "cro atia", + "le a", + "su ms", + "aim s", + "e an", + "van ess", + "iti ous", + "pa thy", + "arc ade", + "b end", + "sugge sts", + "sac ram", + "roy als", + "ri er", + "em ir", + "in cl", + "an k", + "clar k", + "ri ght", + "vac c", + "ठ¾", + "tan e", + "li b", + "u sc", + "sal es", + "hu h", + "s ally", + "ver a", + "p ga", + "gro ws", + "dru m", + "tre e", + "eth ics", + "sug gest", + "is ab", + "se aled", + "pre viously", + "anim ated", + "ab du", + "ri ses", + "glo b", + "pre dat", + "scar f", + "del ic", + "om ar", + "ll i", + "sx sw", + "py thon", + "ne bra", + "fun k", + "reflec t", + "pav ilion", + "tic ally", + "ch asing", + "bak ery", + "inva sion", + "ko h", + "believ ed", + "co hen", + "con qu", + "cra fts", + "nat i", + "cle ver", + "govern ance", + "sam ples", + "fa ils", + "â Ķ", + "ti mo", + "r itu", + "stri king", + "inclu sive", + "sho cking", + "can t", + "requi res", + "dra wings", + "ภŃ", + "purch ased", + "du m", + "z ach", + "war ner", + "con sole", + "man sion", + "foun tain", + "circu m", + "e sh", + "is land", + "mil k", + "pro fits", + "hali fax", + "ri val", + "âľĪ ï¸ı", + "jen ny", + "sand ra", + "ny e", + "k elly", + "y al", + "qu ad", + "no s", + "inste in", + "fin alists", + "mid fielder", + "cu e", + "excep tional", + "a an", + "sa pp", + "gett in", + "sa a", + "f ati", + "sl ice", + "vol k", + "s wal", + "la sting", + "sum mary", + "it as", + "sm o", + "s z", + "âĺ Ĩ", + "ip l", + "fl ames", + "ene ws", + "ha v", + "hoo die", + "pitch er", + "win dy", + "re vol", + "centr al", + "ton ite", + "ðŁİī ðŁİī", + "sol ved", + "mil wau", + "organiz ations", + "wee ts", + "re fin", + "s th", + "ãĥ ¼", + "el in", + "ton a", + "cinnam on", + "ðŁİ ¨", + "ðŁİ ģ", + "ron aldo", + "pen insu", + "ome ga", + "el ds", + "desig ning", + "e igh", + "blu et", + "ben z", + "nu g", + "ash a", + "robo ts", + "su dan", + "choo sing", + "en do", + "ser ge", + "clo sely", + "hand y", + "fing er", + "be ing", + "ar te", + "survi ved", + "fl ame", + "mile stone", + "gu t", + "d war", + "fu tures", + "é e", + "el o", + "fri dge", + "eli c", + "ou ch", + "u b", + "p v", + "tit an", + "col lar", + "st ation", + "nev ada", + "aur ora", + "r d", + "dun can", + "âģ ł", + "bri en", + "mar sh", + "Ð ¾", + "to tal", + "ch ry", + "s ers", + "su ffe", + "ra chel", + "colle ge", + "to days", + "cour ts", + "ch it", + "re united", + "gym na", + "gen esis", + "be side", + "re presentation", + "ch ant", + "collec tor", + "ra k", + "ath ens", + "ni gh", + "mun ich", + "langu ages", + "fl u", + "particip ation", + "__ _", + "c v", + "spec trum", + "so da", + "co ver", + "refe ren", + "ab bo", + "ap a", + "public ation", + "ed m", + "mon ica", + "ar my", + "ðŁļ Ģ", + "div or", + "dr y", + "stre ams", + "robo tics", + "ci der", + "bull ying", + "appro val", + "sto ke", + "plat forms", + "sier ra", + "ex tin", + "i b", + "ha yes", + "succe ed", + "suff er", + "at ically", + "da i", + "lyn ch", + "h ound", + "del ines", + "ack now", + "d ated", + "exclu sively", + "he res", + "fac ilit", + "dam aged", + "char ter", + "la kers", + "fal con", + "unve iled", + "wel ove", + "e ase", + "pati ence", + "l one", + "gent le", + "gene tic", + "produc ing", + "g our", + "shann on", + "bil ities", + "zimbab we", + "p int", + "dau ghters", + "liter ary", + "bel le", + "cl am", + "surroun ded", + "k any", + "ne il", + "pir ate", + "rang er", + "hb d", + "nat alie", + "bel ong", + "olym pi", + "emb assy", + "sc ol", + "en er", + "ak in", + "lo ren", + "b h", + ": /", + "di va", + "den im", + "hi pp", + "ðŁĩµ ðŁĩ", + "arn old", + "? '", + "we ren", + "em power", + "dis abled", + "man or", + "rasp berry", + "b af", + "aw ful", + "dru mmer", + "kar dashi", + "n ash", + "machine learning", + "ch u", + "rebel s", + "tim ing", + "mon roe", + "ton gue", + "ran ge", + "pup ils", + "re ss", + "amaz on", + "b z", + "har ley", + "pal mer", + "ballo on", + "s ings", + "ic ec", + "j b", + "c ers", + "g ps", + "whi st", + "ri se", + "l t", + "oo oo", + "c attle", + "shoo ter", + "vod ka", + "uc l", + "mt g", + "le sli", + "jon as", + "di spo", + "at ric", + "ste in", + "vintag e", + "fir ms", + "flo yd", + "cow boy", + "soo oo", + "is aac", + "war craft", + "disney land", + "beauti ful", + "be am", + "franch ise", + "bu n", + "k ag", + "an on", + "tur bo", + "swee p", + "made in", + "kar achi", + "dete ctive", + "penn sylvania", + "contro versi", + "vitam in", + "a side", + "chron ic", + "descri bes", + "remo val", + "ha h", + "ap er", + "ten ed", + "u to", + "bad ly", + "mir ac", + "f ry", + "ye a", + "in jec", + "ther mal", + "comp act", + "th or", + "te ed", + "ur gent", + "l ite", + "g illi", + "sop hom", + "ic o", + "che m", + "p m", + "for k", + "fre ak", + "ch ak", + "recipi ent", + "i y", + "ni k", + "model ing", + "c ans", + "ðŁı Ģ", + "del ux", + "se am", + "surviv ors", + "rad ical", + "investig ating", + "reli able", + "f m", + "tur t", + "ligh thouse", + "to ol", + "go wn", + ") )", + "bo ts", + "auto graph", + "a id", + "bu ffe", + "h mm", + "horri ble", + "ssi onal", + "ann i", + "๠Ģ", + "k its", + "sch i", + "eter nal", + "hu ss", + "sens itive", + "r u", + "tast es", + "chec ks", + "im o", + "por tion", + "sk ate", + "e den", + "half time", + "fri ed", + "ri hanna", + "ti se", + "fl ick", + "ca in", + "s gt", + "âľ Ķ", + "sh au", + "sta ined", + "ra ffle", + "dro ve", + "sal man", + "princi ples", + "sh o", + "ar u", + "je ss", + "gu ine", + "gar bage", + "my an", + "jel ly", + "dis ru", + "z ia", + "q ld", + "ent ries", + "la v", + "fle w", + "ad mit", + "objec ts", + "comp are", + "ny times", + "cann es", + "p n", + "suff ol", + "ro c", + "d ana", + "e gg", + "hi st", + "coun sel", + "' !", + "phy si", + "imag ination", + "ad just", + "explo sion", + "plym outh", + "hor ror", + "elli ott", + "bour ne", + "de x", + "bre ed", + "au dio", + "lob ster", + "disappo inted", + "nation wide", + "( (", + "incre ases", + "austr ali", + "ce dar", + "star ing", + "rac ial", + "e is", + "g mt", + "visi ons", + "stay ed", + "discu ssions", + "de an", + "cur tis", + "mai den", + "stel lar", + "happ iest", + "h wy", + "pre season", + "car av", + "mon days", + "hospit als", + "glimp se", + "schol ars", + "ja i", + "ter race", + "ann a", + "goo se", + "gra ded", + "lot us", + "hun g", + "grocer y", + "stam ps", + "emper or", + "sc oop", + "in ser", + "c as", + "exist ence", + "he al", + "fal cons", + "mar vel", + "reduc ing", + "terri fic", + "magne tic", + "perfor ms", + "bar re", + "p us", + "tre ating", + "ic on", + "w h", + "decla red", + "tra uma", + "do d", + "come dian", + "nik on", + "bu gs", + "as m", + "mont gom", + "ibi za", + "comprehen sive", + "ha s", + "san ti", + "fellow ship", + "da sh", + "p sal", + "louis ville", + "sp y", + "fau lt", + "d the", + "fi led", + "vi sta", + "de sc", + "fe ars", + "you tu", + "sp s", + "es p", + "ri g", + "cri me", + "ber ger", + "wonder land", + "k ent", + "in formed", + "stev ens", + "my th", + "ast on", + "ir i", + "visit or", + "at ri", + "produc ers", + "al la", + "person ally", + "separ ate", + "agen cies", + "af ri", + "il an", + "spo ke", + "n ina", + "squ ad", + "di ves", + "de pend", + "li v", + "fier ce", + "enter taining", + "cha in", + "sc at", + "bor ders", + "pal ette", + "sp ro", + "os is", + "der by", + "tobac co", + "zi o", + "willi e", + "ju vent", + "zoo m", + "hol y", + "enti rely", + "af e", + "mart inez", + "be ds", + "pe a", + "bull dogs", + "ðŁĩª ðŁĩ", + "ib m", + "ne on", + "ethiop ia", + "team mates", + "plan ting", + "tw er", + "any time", + "for bes", + "ó n", + "run way", + "ner vous", + "ro ger", + "p ile", + "ch anc", + "apo caly", + "u w", + "o i", + "dr ought", + "territ ory", + "br ick", + "cre atures", + "go in", + "w aff", + "gre n", + "sou theast", + "je an", + "am bul", + "ed ited", + "stra p", + "c v", + "aar on", + "ãĥ» ãĥ»", + "t su", + "descri ption", + "kin dly", + "clu tch", + "im mer", + "en or", + "women sday", + "or ange", + "ra g", + "ob vious", + "hy der", + "chann els", + "man go", + "me yer", + "ra ining", + "ge tty", + "pil gri", + "coordin ator", + "up load", + "ninten do", + "don uts", + "san chez", + "app arel", + "j r", + "zz i", + ", @", + "jeff erson", + "accessi ble", + "great ly", + "e id", + "initi al", + "budd ha", + "par is", + "ma scot", + "â¬ĩ ï¸ı", + "sch war", + "si ri", + "sp inning", + "mortg age", + "e cho", + "end ange", + "ge dly", + "chlo e", + "enh ance", + "kar nat", + "k ry", + "explo res", + "ðŁĴ ģ", + "af fair", + "ic als", + "all a", + "dar t", + "dolph ins", + "diffe rences", + "squir rel", + "au gh", + "dr ones", + "ell en", + "re store", + "pa w", + "un for", + "pi ke", + "hil ton", + "colla b", + "consu mers", + "co inci", + "out comes", + "pp p", + "a q", + "coup on", + "li est", + "si ms", + "k ho", + "av es", + "spo on", + "pu dding", + "cor byn", + "hat ers", + "ex ams", + "sla ve", + ". !", + "p sa", + "app les", + "tam il", + "se d", + "co ke", + "zz o", + "lo sange", + "car bon", + "cla ir", + "... )", + "k hu", + "cra ig", + "explor ation", + "sanctu ary", + "su e", + "al way", + "demen tia", + "won ders", + "super hero", + "pakistan i", + "brown s", + "bluet ooth", + "lo cker", + "mar c", + "ev entu", + "delux e", + "rodri guez", + "âĿ¤ âĿ¤", + "ro bb", + "ðŁĴ ¦", + "lin ux", + "ten s", + "intellig ent", + "se ed", + "vo ter", + "s ler", + "pe aks", + "inter n", + "teen age", + "peninsu la", + "hand ling", + "ti e", + "cou sins", + "wen dy", + "me e", + "à¹Ģ à¸", + "din o", + "ðŁĴ °", + "ðŁĺ ĥ", + "ze e", + "s bury", + "trage dy", + "b k", + "bo re", + "z in", + "war ns", + "idi ot", + "tou ching", + "contin ental", + "tac os", + "saf ari", + "wa shed", + "po dium", + "morri son", + "fore sts", + "c bc", + "al on", + "partic ular", + "be ads", + "inv ented", + "lo ch", + "li ghter", + "where ver", + "i de", + "docu ments", + "a we", + "k r", + "no where", + "min er", + "st it", + "ro x", + "contribu te", + "har dy", + "cl an", + "ob ject", + "ca it", + "ðŁĴķ ðŁĴķ", + "happ ier", + "vege tables", + "t art", + "g ag", + "nom inee", + "heav ily", + "pan ic", + "j d", + "there sa", + "at m", + "u ph", + "s fc", + "su ri", + "drin k", + "n al", + "re vel", + "k l", + "avoc ado", + "nom ination", + "ma donna", + "shar on", + "malcol m", + "control led", + "sh ers", + "revi val", + "legis lation", + "shoo ts", + "n in", + "comm entary", + "pro s", + "human rights", + "str anger", + "mit ch", + "pipel ine", + "leg ally", + "th u", + "gil bert", + "tol l", + "gran ted", + "gh s", + "ir anian", + "refre shing", + "du k", + "ab i", + "pri me", + "jose ph", + "mo sa", + "stati stics", + "produc tions", + "mer ry", + "pat el", + "sa x", + "human itarian", + "struc tures", + "e missions", + "town s", + "fre el", + "ster ing", + "rat ings", + "alle gedly", + "cab in", + "st l", + "w ade", + "fl yers", + "tri m", + "promis ing", + "z u", + "bal lot", + "compar ison", + "free ze", + "ou ter", + "great ness", + "as sign", + "snow y", + "r ale", + "tor ies", + "med iter", + "kno ck", + "consult ant", + "cincin nati", + "analy st", + "sc oo", + "je ws", + "appro xim", + "pu re", + "portra its", + "cy rus", + "ation al", + "lo ans", + "acqu is", + "el u", + "accep table", + "uni on", + "water color", + "ru st", + "batt les", + "per fu", + "seas onal", + "ser ial", + "mind set", + "ri ot", + "fel d", + "enni al", + "clo set", + "pri est", + "tan ks", + "int l", + "scre w", + "bu m", + "ab dul", + "ou x", + "expla ined", + "ric a", + "imag ing", + "law yers", + "bu ried", + "ãĥ»ãĥ» ãĥ»", + "ear l", + "âĢ ķ", + "l ton", + "resto red", + "stri pes", + "fo ss", + "de mands", + "ste aling", + "alex is", + "mun d", + "ak er", + "ur us", + "war dro", + "hu gs", + "gen re", + "e go", + "Ù Ħ", + "particip ated", + "bab es", + "ban quet", + "ti ous", + "he mi", + "ds b", + "lo st", + "milwau kee", + "jen ner", + "ge m", + "ou tra", + "lo ses", + "id i", + "re ps", + "ðŁİ §", + "regu lation", + "fla w", + "f ang", + "vibr ant", + "ram p", + "ra ins", + "well being", + "so viet", + "vie wers", + "de po", + "libr aries", + "bi go", + "ser y", + "g ill", + "de struction", + "co z", + "c x", + "bri dal", + "al ds", + "plan ted", + "amate ur", + "lu d", + "che ering", + "show cas", + "pro file", + "i u", + "ver tical", + "pack ers", + "wiz ard", + "ski p", + "s light", + "be au", + "air ways", + "mu ch", + "re ra", + "ðŁĮ Ĭ", + "ab sor", + "pati o", + "pack ages", + "s ells", + "ment ally", + "ðŁĺ ¢", + "reyn olds", + "k are", + "tri bun", + "wal t", + "kn it", + "ta ste", + "sur rey", + "boun ce", + "cre ature", + "b are", + "bet ting", + "su re", + "mi ley", + "laugh s", + "al ore", + "cy n", + "t l", + "arti st", + "ann ah", + "war mer", + "dynam ics", + "lunch time", + "mariti me", + "vulner able", + "ðŁĴ ĥ", + "wol ver", + "dur ham", + "const antly", + "am in", + "si bl", + ": @", + "bul let", + "k ach", + "angel o", + "wil der", + "doo m", + "desk top", + "law suit", + "k ca", + "hen derson", + "inv iting", + "bet ty", + "ta wards", + "ra fa", + "le aked", + "and i", + "ge ms", + "af l", + "vel o", + "mediter ran", + "pro be", + "to tten", + "steph anie", + "sn ation", + "com be", + "q s", + "over come", + "assas sin", + "ra v", + "fil ip", + "winni peg", + "sh il", + "determin ed", + "k as", + "ou tre", + "regre t", + "gui des", + "aa a", + "ðŁĺ Ī", + "wi ves", + "mani fe", + "er ly", + "sm y", + "sh ima", + "x ing", + "pix el", + "jac ob", + "ac commod", + "to y", + "on o", + "po o", + "ti er", + "an swe", + "ðŁĴ ģ", + "ro sa", + "le ase", + "bel ongs", + "th ar", + "eventu ally", + "nei ther", + "go a", + "ski ing", + "at ra", + "ag h", + "broad casting", + "f ury", + "py ram", + "d ice", + "volk swag", + "wom ens", + "provi der", + "bom bs", + "miss ile", + "whi p", + "d ick", + "nor we", + "back up", + "el der", + "mat ure", + "concer ts", + "gi ous", + "sque e", + "good morning", + "bra ves", + "^ _", + "au ssie", + "lun a", + "mal es", + "he ck", + "for tn", + "rome o", + "steel ers", + "p n", + "pe er", + "re presents", + " «", + "kat y", + "migu el", + "requ ire", + "cha ins", + "l ur", + "immedi ate", + "ti mber", + "âĸ¶ ï¸ı", + "advoc acy", + "ex port", + "an z", + "tiff any", + "auth or", + "ðŁİ Ī", + "du des", + "chil ly", + "hi d", + "har m", + "bu g", + "mon ster", + "terri er", + "tu c", + "story telling", + "ta k", + "in ti", + "immigr ants", + "b is", + "reach es", + "com passion", + "john ny", + "contribu tions", + "ðŁIJ ¶", + "mechan ical", + "impre ssion", + "ran ks", + "ko be", + "men ting", + "bloss om", + "pab lo", + "buil der", + "bom bing", + "tw el", + "sul livan", + "om o", + "pe te", + "de mi", + "ku dos", + "w bb", + "t gif", + "mass ach", + "neighb or", + "che fs", + "eng ines", + "pun e", + "ga ined", + "phan tom", + "s days", + "ext end", + "gr an", + "cent ers", + "jac qu", + "dat asci", + "sleep y", + "el vis", + "answe red", + "s lot", + "con y", + "flexi ble", + "ti ally", + "le tics", + "% ,", + "andre ws", + "si ble", + "mom ma", + "vin o", + "do x", + "invit ational", + "twil ight", + "j ade", + "ill ery", + "joh ns", + "f ou", + "p v", + "-- ->", + "break down", + "billi on", + "prin ter", + "mon d", + "c bc", + "mag gie", + "legi on", + "du b", + "kur t", + "po or", + "paren ting", + "regi ons", + "bikin i", + "be ware", + "si onal", + "au burn", + "kid ding", + "amp les", + "sp an", + "con tempor", + "c ic", + "ha bits", + "ak o", + "pre fe", + "bud dies", + "it z", + "em ily", + "person nel", + "moun tain", + "ver sus", + "ðŁĺ ¬", + "ear ning", + "s ink", + "dar i", + "u u", + "s win", + "i ster", + "bru tal", + "n ac", + "kat a", + "clo th", + "am and", + "ðŁĶ Ĺ", + "ne o", + "alu min", + "week ends", + "nebra ska", + "co des", + "delay ed", + "brun o", + "pro ven", + "in c", + "i ght", + "fl an", + "or o", + "lam bert", + "regu lat", + "w f", + "massach use", + "kardashi an", + "bern ard", + "fi esta", + "volcan o", + "grand pa", + "anc a", + "d re", + "st itu", + "mean ing", + "fo am", + "au ck", + "at ed", + "r l", + "hot el", + "pers ons", + "dy nasty", + "ell or", + "ma i", + "am ne", + "sty ling", + "avi er", + "e g", + "vege tarian", + ", â̦", + "foun ders", + "sta in", + "g d", + "cy cles", + "sky line", + "trac tor", + "exi sts", + "tra l", + "kid ney", + "mar il", + "inst ag", + "se tte", + "addic t", + "tri angle", + "flash back", + "controversi al", + "z on", + "p ins", + "i as", + "tr ay", + "town ship", + "deleg ates", + "sp am", + "h ms", + "cr ane", + "peop les", + "o lo", + "fac tion", + "but es", + "on ica", + "deleg ation", + "new profile", + "eli er", + "mc a", + "w and", + "g ely", + "losange les", + "ber ke", + "ti ve", + "dis rup", + "zz a", + "cas a", + "jor dan", + "ford shire", + "ga thered", + "ic hi", + "atten dees", + "à¸Ń à¸", + "pe ppers", + "co in", + "bour bon", + "ern ity", + "ro tary", + "behavi our", + "jere my", + "team work", + "compli ance", + "tre mend", + "ðŁĩ §", + "bu hari", + "cam bo", + "bu yers", + "ha gen", + "bu ds", + "bay ern", + "mon te", + "sm ells", + "an za", + "ath lon", + "descri bed", + "work force", + "gi ving", + "ap i", + "invest ments", + "da il", + "sel ena", + "datab ase", + "th um", + "mor tal", + "stu dent", + "bu yer", + "do ver", + "gar ten", + "att le", + "loy alty", + "gen oci", + "holo cau", + "theat ers", + "ru ling", + "ven us", + "pat ent", + "ch un", + "ab by", + "awa ke", + "mass acre", + "bang alore", + "break ing", + "simm ons", + "ju sti", + "hal e", + "ed chat", + "gg les", + "haw k", + "mar king", + "head lines", + "stro m", + "co ve", + "breath taking", + "med als", + "hair cut", + "christ ine", + "tele graph", + "gujar at", + "ju ra", + "can e", + "sho re", + "propag anda", + "mu eller", + ".... ....", + "sa vi", + "stom ach", + "thro ws", + "ta b", + "war m", + "j ong", + "reno wned", + "hi r", + "ra is", + "mush rooms", + "guaran teed", + "bo a", + "m j", + "revolu tionary", + "certi fication", + "bru ins", + "jo in", + "w es", + "pas sport", + "c g", + "sex u", + "cap able", + "w v", + "ton es", + "jac kets", + "ac compan", + "spin ach", + "fore ver", + "bla ir", + "wat ts", + "g l", + "cou ples", + "prairi e", + "newprofile pic", + "logi stics", + "massachuse tts", + "jagu ar", + "o id", + "we al", + "under water", + "mo z", + "y i", + "ma ths", + "myan mar", + "pre ps", + "suffe red", + "tr ace", + "wal i", + "ah hh", + "bor g", + "st itch", + "cu lin", + "real ise", + "infe ction", + "discrimin ation", + "sh ame", + "an kle", + "hu mid", + "y t", + "brac ket", + "tru ck", + "tri u", + "ea ster", + "commun ity", + "post card", + "invol ving", + "ty ler", + "car amel", + "over view", + "ex amples", + "integr ity", + "base ment", + "instru ments", + "ani um", + "at us", + "gh er", + "laun dry", + "achi eve", + "gen eva", + "pr icing", + "hyder abad", + "beli ef", + "me ta", + "j aw", + "accoun ting", + "lead er", + "cristi ano", + "cou ture", + "cy p", + "vis ed", + ", ,,", + "k nu", + "h ick", + "break er", + "br am", + "ra b", + "mo or", + "ham as", + "gradu ating", + "pupp ies", + "ak h", + "ta h", + "ach es", + "ri e", + "op ini", + "g ta", + "re ign", + "tra gic", + "re ver", + "p ill", + "pine apple", + "tou ches", + "da re", + "le ys", + "il o", + "inter iors", + "sc outs", + "bar t", + "en zie", + "don o", + "bro ck", + "christi ans", + "ense mble", + " ·", + "cine mas", + "new port", + "air line", + "win ston", + "le igh", + "cont ents", + "pre scri", + "ur ge", + "tr out", + "fic ally", + "il ia", + "sub si", + "are r", + "âļ¾ ï¸ı", + "w ounded", + "ðŁĻ Ĥ", + "pe pper", + "ðŁĴ ŀ", + "fit ted", + "af f", + "re sur", + "thursday thoughts", + "z ero", + "archae ology", + "di v", + "je e", + "i on", + "awa iting", + "co zy", + "beauti es", + "bal d", + "dat a", + "gri zz", + "stal k", + "kin ds", + "cle ared", + "jess ic", + "regu lar", + "ali ens", + "plac e", + "bo s", + "bi zar", + "thisi s", + "ðŁĴ Ģ", + "totten ham", + "ma fia", + "s lam", + "ari ana", + "car roll", + "back pack", + "care y", + "uni v", + "r g", + "pe p", + "dig it", + "tatt oos", + "ag on", + "volunte ering", + "diffe ren", + "consu mption", + "ka thr", + "head phones", + "t shirt", + "o b", + "ele ment", + "re tail", + "sh ru", + "al gori", + "contain er", + "consci ous", + "fi l", + "com ing", + "ra sh", + "u rope", + "def ine", + "gi or", + "femini st", + "flow ing", + "rout es", + "gl aci", + "fer t", + "somer set", + "ant es", + "twee ps", + "$ $", + "h our", + "endange red", + "year sof", + "ro h", + "po pped", + "bac king", + "ba sil", + "bra ke", + "mon aco", + "lgbt q", + "pra gue", + "ut ility", + "cas si", + "gate way", + "haun ted", + "sch ul", + "ðŁİ µ", + "shou ld", + "walking dead", + "comple ting", + "dann y", + "montgom ery", + "pengu in", + "ss i", + "mer chandi", + "ðŁij ij", + "chur ch", + "h ates", + "cap tain", + "brea thing", + "ce t", + "fair ly", + "approach es", + "compan ion", + "surpri sing", + "kany e", + "pe y", + "hin di", + "targe ted", + "lor ds", + "de ut", + "di gging", + "ger man", + "ru t", + "ener gy", + "close st", + "y un", + "apo logi", + "ภ±", + "s ack", + "ru p", + "dd y", + "port al", + "d ough", + "b ats", + "ðŁĵ °", + "at ur", + "graph er", + "pi res", + "mo tors", + "ðŁĮ ¹", + "j c", + "dan g", + "tu k", + "clu e", + "us c", + "pag e", + "d less", + "bro ws", + "ju s", + "ad ing", + "re marks", + "oo m", + "car dio", + "ste fan", + "arm strong", + "âĢ¢ âĢ¢", + "ni est", + "belgi an", + "bi op", + "so y", + "lo f", + "í ĥ", + "q t", + "flashback friday", + "ce e", + "ģ à¸", + "wre ck", + "mar ines", + "amend ment", + "wardro be", + "vo y", + "bur ned", + "guit ars", + "ra inf", + "li fel", + "ssi l", + "oun ce", + "exter nal", + "c key", + "me sh", + "she ikh", + "inv itation", + "sugge sti", + "pop corn", + "phenomen al", + "an onymous", + "tun a", + "chic ago", + "o val", + "del y", + "loc als", + "( &", + "pro f", + "no vel", + "fin der", + "spar ks", + "la ven", + "in fu", + "nic ks", + "qu ant", + "ra e", + "exe c", + "dist ingui", + "st ances", + "mu tual", + "sh al", + "unve ils", + "edmon ton", + "zan ia", + "a dio", + "vie wer", + "brad ford", + "audit orium", + "qu is", + "re act", + "htt p", + "l ero", + "chee ky", + "impac ts", + "ta k", + "ed t", + "desper ate", + "t ay", + "ì Ħ", + "sett le", + "bar gain", + "resu me", + "un ite", + "thro wn", + "ke st", + "se ys", + "mar ching", + "am it", + "decl ine", + "sch ar", + "me tr", + "stan ford", + "lin ke", + "ber ra", + "dol ls", + "rug by", + "jam i", + "b or", + "road trip", + "dino saur", + "mi k", + "sun der", + "re m", + "b k", + "over seas", + "nau ghty", + "imple mentation", + "iam srk", + "lun cheon", + "fir ing", + "mi ami", + "pere z", + "the e", + "z on", + "gi fted", + "con version", + "ceram ic", + "¡ ï¸ı", + "pe dro", + "ì Ĩ", + "v ick", + "! @", + "he ed", + "si d", + "b w", + "docu ment", + "pl un", + "gr ants", + "fant asy", + "predic tions", + "vali d", + "car ved", + "gradu ated", + "ðŁijį ðŁı»", + "nation ally", + "ch y", + "af l", + "re sso", + "blan k", + "ri vals", + "j ig", + "e ties", + "om ics", + "une mp", + "b ound", + "sk o", + "inspec tion", + "par al", + "high s", + "cri sp", + "b ans", + "ob a", + "[ @", + "co spla", + "costu mes", + "rec all", + "mou th", + "ni gel", + "b ts", + "ter a", + "ko v", + "do cs", + "west minster", + "dic t", + "gra vity", + "kar i", + "ro gue", + "t ted", + "war k", + "ida ho", + "w end", + "aw i", + "queen sland", + "proce sses", + "cli ffe", + "m ick", + "com pens", + "op ol", + "the y", + "cl ari", + "wiki pedia", + "salman khan", + "haz ard", + "pre ston", + "swee test", + "pd f", + "che es", + "tr ilo", + "south africa", + "bur nt", + "( $", + "con tain", + "t p", + "sub mitted", + "sound cloud", + "at u", + "re z", + "word press", + "corru pt", + "n f", + "ma ker", + "í ķ", + "par as", + "adv ent", + "ri al", + "ca fe", + "fo ssil", + "!!!! !!!", + "co ws", + "c j", + "sp ur", + "institu tions", + "land mark", + "ent it", + "re ut", + "h is", + "alz heim", + "we mb", + "regg ae", + "mo squ", + "st at", + "identi fied", + "deal er", + "re am", + "re land", + "ten sion", + "ðŁĩ ©", + "wra pping", + "deep er", + "fr at", + "red dit", + "ar is", + "moroc co", + ".. \"", + "b low", + "ma pping", + "pri orities", + "ing a", + "swa p", + "re wards", + "conspir acy", + "creati ve", + "c j", + "congre ssional", + "vau lt", + "ple x", + "sophom ore", + "shad ow", + "ele ss", + "ðŁĺ ħ", + "dar ts", + "aldu b", + "anno ying", + "pro ps", + "n as", + "alumin um", + "h bo", + "offen se", + "j ill", + "oni ons", + "la ur", + "ta e", + "har dest", + "sh ro", + "ga ining", + "meas ure", + "ed tech", + "cyp rus", + "tar a", + "ang eli", + "car lo", + "go on", + "all i", + "im plic", + "ju pit", + "resil ience", + "ha il", + "bal anced", + ") ...", + "joy ce", + "gr a", + "th eli", + "defin ed", + "shi pped", + "main ly", + "min a", + "l m", + "sac ri", + "o ber", + "p im", + "claim ing", + "ent ers", + "co rey", + "bo k", + "cri ed", + "cool ing", + "dani elle", + "pharmac y", + "thor ough", + "ca ke", + "k lo", + "outre ach", + "z ens", + "digital marketing", + "val ent", + "sn p", + "her b", + "mr w", + "caf é", + "cap tures", + "no tre", + "triu mph", + "pan cakes", + "cu mber", + "spi ke", + "d ation", + "bi gg", + "sp er", + "crit ical", + "am al", + "too th", + "foun ding", + "a stro", + "' #", + "quan tum", + "th ames", + "un c", + "pri de", + "air bus", + "kno cked", + "un defeated", + "mediterran ean", + "cal cu", + "clo wn", + "sens or", + "ham mer", + "for give", + "cu shi", + "ber ry", + "maje stic", + "elec t", + "polit an", + "g ta", + "k ari", + "bur ke", + "sea hawks", + "volkswag en", + "re i", + "landsc apes", + "cas u", + "grand father", + "list ened", + "/ /", + "star trek", + "rainf all", + "fur ry", + "vi er", + "star k", + "rif le", + "ff a", + "leg es", + "hillary clinton", + "min us", + "correc tly", + "architec tural", + "pre ce", + "up side", + "box er", + "ðŁĻĮ ðŁı¼", + "is ai", + "de t", + "pro vo", + "tis sue", + "spoo ky", + "ve led", + "re con", + "prospec ts", + "que bec", + "âļ «", + "ig no", + "anat omy", + "shap es", + "w p", + "p interest", + "hor e", + "an es", + "pick up", + "ti p", + "pra desh", + "hu gh", + "co e", + "po k", + "gram my", + "well ington", + "sti gate", + "ri gh", + "lea p", + "king ston", + "scen ic", + "go sh", + "v ani", + "au g", + "s ary", + "zi er", + "bure au", + "lin son", + "con te", + "fra gr", + "all an", + "g aw", + "lan a", + "colli sion", + "surve ill", + "ren ais", + "ar range", + "s ali", + "do in", + "br ance", + "bren dan", + "our se", + "in coming", + "suspen sion", + "à ´", + "l la", + "educ ators", + "in tri", + "da e", + "bio graphy", + "bul gar", + "villa in", + "go thic", + "rw anda", + "e w", + "may or", + "meet up", + "democr at", + "mor gan", + "su dden", + "te sco", + "car rot", + "bom ber", + "mck in", + "re ne", + "fun day", + "agricul tural", + "haha h", + "show time", + "form ing", + "col a", + "scor pi", + "quo te", + "po ppy", + "s life", + "d az", + "tu b", + "ne n", + "mo t", + "ðŁĺ »", + "s ore", + "elder ly", + "o ve", + "skin ny", + "um i", + "anc o", + "man ship", + "we re", + "g v", + "k ah", + "fol ding", + "ne at", + "samanth a", + "dan ish", + "uk rain", + "humid ity", + "nu tri", + "jak arta", + "cand les", + "oooo oooo", + "at ile", + "streng th", + "i bra", + "bap ti", + "charle ston", + "fr ames", + "girl s", + "clear ing", + "glu ten", + "# #", + "super natural", + "ju bi", + "ph one", + "he in", + "dr un", + "le ak", + "invest or", + "y er", + "dom ain", + "ball room", + "mi sh", + "app li", + "off shore", + "bla ze", + "dor o", + "âĺķ ï¸ı", + "win ery", + "shar if", + "ad ore", + "n ir", + "saf er", + "si gh", + "as cri", + "strong ly", + "trac y", + "ck er", + "ol l", + "faith ful", + "ey ed", + "deli ghtful", + "vis m", + "karnat aka", + "tit an", + "wh ar", + "jer seys", + "re fur", + "heav en", + "gri p", + "pan ama", + "pre li", + "glu ten", + "o dd", + "cont ent", + "pon ti", + "tion ing", + "e commerce", + "feder ation", + "flaw less", + "ge ar", + "ti res", + "by r", + "pol ice", + "cu ban", + "tri butes", + "tic ul", + "chur ches", + "nur sery", + "di aries", + "muse ums", + "snapp ed", + "i van", + "wi ght", + "touri sts", + "ramad an", + "t rent", + "prophe t", + "won dered", + "focu sing", + "hi d", + "ic ons", + "i q", + "ambul ance", + "pi st", + "fun niest", + "time less", + "sr ilan", + "bu ys", + "ki ds", + "colour ful", + "a shi", + "ch ir", + "mu m", + "ðŁĵ ļ", + "let ter", + "x en", + "reut ers", + "pre serve", + "in ting", + "ste p", + "fu ji", + "uni ver", + "i u", + "show down", + "po ems", + "surveill ance", + "suspec ted", + "ta e", + "sol ving", + "tom b", + "mother sday", + "car pen", + "recru it", + "pil ots", + "bro c", + "mix ing", + "fri days", + "ty r", + "represent atives", + "tra pped", + "abdu l", + "free style", + "clu ster", + "âļ łï¸ı", + "k d", + "sk ill", + "pit t", + "ex o", + "commer ci", + "muse um", + "loc ally", + "g ina", + "no bel", + "immun e", + "fr ac", + "cap su", + "main ed", + "attemp ts", + "bull dog", + "be spoke", + "sing ers", + "sp elling", + "seg ment", + "nat ures", + "tic k", + "lip stick", + "clean er", + "gett able", + "preci sion", + "â̼ ï¸ı", + "th ood", + "re ef", + "no pe", + "bill y", + "di gi", + "mu si", + "ri val", + "figu red", + "tal ity", + "sun ny", + "ber k", + "aw ww", + "awa its", + "un real", + "co pen", + "asy lum", + "ex otic", + "bu en", + "mo ck", + "en able", + "arch y", + "fr a", + "pla stic", + "al mond", + "amp li", + "displa ys", + "abbo tt", + "s me", + "x p", + "ðŁĻ ĥ", + "graph ic", + "i ved", + "mar a", + "cau tion", + "lea ks", + "en berg", + "ul u", + "unic orn", + "cann on", + "appren tic", + "ðŁĺĺ ðŁĺĺ", + "b ball", + "wil low", + "at ics", + "am as", + "manufac turer", + "campaig ns", + "port ers", + "flo ors", + "l su", + "ty pe", + "ke j", + "honor ary", + "it im", + "to le", + "min ecraft", + "d x", + "ma sh", + "ri o", + "consequ ences", + "ron ald", + "go ssi", + "suffol k", + "mu se", + "r bi", + "live music", + "i van", + "ðŁİ ¤", + "le u", + "patri ot", + "man it", + "lan ca", + "home decor", + "de ar", + "sig ma", + "ti de", + "str ings", + "v ita", + "sequ el", + "try na", + "inve stigate", + "bor is", + "ve gan", + "barri er", + "mind fulness", + "web b", + "hu stle", + "in da", + "tan zania", + "str ay", + "tex as", + "c ag", + "diagno sis", + "wom an", + "g w", + "ob session", + "l ative", + "nu fc", + "fl ynn", + "moment um", + "sof a", + "wal d", + "vege table", + "tu cker", + "supp er", + "se ab", + "ar ro", + "se ag", + "ven ting", + "counc ill", + "sp lat", + "cal cul", + ".. #", + "com fy", + "odi sha", + "sto pp", + "war fare", + "ca es", + "à ¨", + "co y", + "price less", + "in sec", + "ðŁĺ Ľ", + "contro ls", + "empower ment", + "datasci ence", + "per pe", + "gen ic", + "e res", + "tru deau", + "man o", + "sla very", + "expand ing", + "ma he", + "fa iling", + "s aga", + "photograph s", + "cre st", + "re on", + "surf ing", + "hi e", + "ðŁį Ģ", + "ja e", + "fel lows", + "south ampton", + "sol om", + "ce ster", + "tab ility", + "hor n", + "se ct", + "he e", + "cole man", + "at las", + "explo rer", + "consul tation", + "copy right", + "organi zing", + "den ied", + "mon keys", + "noo dles", + "br is", + "fl or", + "dou gh", + "bon ds", + "sho cked", + "eco system", + "care fully", + "w m", + "apart ments", + "cur ve", + "san diego", + "must ard", + "comm en", + "cere mon", + "e ch", + "ru th", + "ðŁĻĮ ðŁı»", + "hawa i", + "fil med", + "te ar", + "as ingly", + "ca ir", + "wat t", + "instru ment", + "ou tta", + "ye ol", + "river side", + "ë °", + ". :", + "nor wich", + "alo g", + "migr ants", + "new man", + "ri de", + "spr ink", + "targe ting", + "beli eve", + "tor ch", + "reflec ts", + "per mission", + "ff man", + "ene mies", + "bas ics", + "se ized", + "sun days", + "le i", + "hass an", + "en do", + "h c", + "st ad", + "le ments", + "kk kk", + "nan o", + "shar k", + "man a", + "on ic", + "treat ments", + "ear ly", + "collabor ative", + "shu ttle", + "bran ches", + "mis ses", + "mained cm", + "ap ers", + "ky le", + "carri e", + "leis ure", + "sh et", + "bir ding", + "adv ances", + "ðŁĵ Ŀ", + "popu lar", + "di ane", + "a be", + "re war", + "neigh bour", + "k pop", + "remem brance", + "play ground", + "ru b", + "krish na", + "e bola", + "inqu iry", + "ep a", + "lu min", + "organ isation", + "abra ham", + "norm ally", + "pre ten", + "jan et", + "w t", + "ðŁĴ İ", + "encoura ging", + "a stic", + "bu mp", + "syd ney", + "s z", + "ss ss", + "gar rett", + "ðŁĵ »", + "consul ting", + "roman ia", + "spo tting", + "chanc ellor", + "ar ma", + "presti gious", + "ðĿ IJ", + "t ad", + "cry st", + "compe tit", + "rati o", + "cat aly", + "bro w", + "j ur", + "vi king", + "commu te", + "y day", + "la yers", + "du mb", + "esc al", + "genoci de", + "f ill", + "gu pta", + "ste pping", + "se i", + "fo to", + "wild cats", + "col i", + "projec t", + "ear nings", + "st r", + "ge ons", + "comple tion", + "b m", + "decor ated", + "craw ford", + "af ghan", + "sc are", + "visi bility", + "hi b", + "direc tion", + "stro ll", + "christ ina", + "alter nate", + "cl are", + "sty list", + "be hold", + "s ance", + "leop ard", + "acqui red", + "narr ative", + "ash i", + "the a", + "?? ??", + "pe as", + "at ch", + "sli des", + "le en", + "renew able", + "eng lish", + "qu ir", + "co aster", + "r x", + "fo ols", + "match day", + "mis m", + "amaz ing", + "z ig", + "ke ting", + "won t", + "to wel", + "di ab", + "sta ke", + "n m", + "mel t", + "e than", + "gra pe", + "polit ician", + "sm en", + "í ĺ", + "re o", + "wedd ings", + "cat cher", + "or acle", + "me mo", + "ðŁĮ ´", + "ec k", + "rob bie", + "norwe gian", + "oper ator", + "am or", + "se wing", + "ju l", + "x ie", + "u v", + "fif ty", + "me ga", + "tatt oo", + "liber als", + "u pri", + "traffic king", + "richard son", + "su v", + "ki p", + "mess y", + "tremend ous", + "gl ou", + "cour tney", + "la d", + "stere o", + "my ers", + "i dio", + "^_ ^", + "man ning", + "dy e", + "w d", + "thr one", + "jun k", + "as u", + "provin cial", + "k ook", + "wr c", + "fine art", + "hamp shire", + "renais sance", + "b red", + "fall out", + "s j", + "sn l", + "al am", + "tor ture", + "fy i", + "sh ines", + "pa w", + "ch ar", + "hen ry", + "c row", + "aci ous", + "di an", + "pa ige", + "ba re", + "stock holm", + "scen ery", + "ðŁĩ ·", + "jef frey", + "pu sh", + "decor ation", + "ne d", + "cu te", + "brig ade", + "laven der", + "inv ites", + "e sports", + "vo ir", + "dri ed", + "tran spl", + "sur geon", + "no vels", + "pul ls", + "son y", + "lun ar", + "man e", + "i vy", + "fru str", + "dor set", + "sa i", + "tor res", + "ssi on", + "shut down", + "suggesti ons", + "writ ing", + "e o", + "battle field", + "u ga", + "ðŁIJ ¾", + "vac u", + "spl ac", + "g it", + "u g", + "high land", + "% )", + "mer maid", + "sacram ento", + "ta ils", + "p w", + "ka h", + "t ell", + "enh anced", + "ì ķ", + "auck land", + "cru el", + "ðŁ¤ ©", + "au dre", + "sail or", + "gram mar", + "g love", + "de on", + "infl am", + "fresh ly", + "k ell", + "zi p", + "christi e", + "mil d", + "di xon", + "instru ctor", + "g ence", + "ãħ ł", + "sub jec", + "constitu tional", + "crow ds", + "in visible", + "ru ins", + "da k", + "si p", + "pla que", + "p ouring", + "comple x", + "z ine", + "ste ad", + "f let", + "trans mission", + "lo way", + "ar un", + "incre asingly", + "au d", + "transp aren", + "cro wned", + "sc oun", + "blizz ard", + "lux u", + "fi ers", + "achieve ments", + "hun ters", + "rock ed", + "bas in", + "vio let", + "pro ves", + "achiev ing", + "pro sper", + "se ga", + "flo at", + "vi an", + "xi v", + "pol ic", + "tur a", + "approxim ately", + "wander lust", + "keep ers", + "geta way", + "co d", + "pol is", + "br yan", + "col ts", + "tal ents", + "yo gur", + "gluten free", + "wri st", + "gr y", + "cze ch", + "ðŁİ Ī", + "ev ille", + "ðŁı Ī", + "to x", + "dani els", + "am er", + "bi ds", + "weare one", + "me tab", + "g t", + "boy z", + "pd x", + "pos session", + "pu shed", + "shr ine", + "reali stic", + "tri gger", + "na vi", + "ru mors", + "n af", + "jen kins", + "tr un", + "comm uni", + "à Ĺ", + "gam ers", + "arm or", + "moham med", + "bal cony", + "y ah", + "stron gest", + "rhy thm", + "unfor gettable", + "k p", + "ho bb", + "custo dy", + "greg or", + "r ita", + "aes thetic", + "il ation", + "sponsor ing", + "n ay", + "kid napp", + "sh s", + "ra jas", + "me g", + "signific antly", + "butt ons", + "la c", + "ver sions", + "essenti als", + "opini ons", + "k ro", + "d printing", + "wi dely", + "d k", + "ur an", + "y al", + "reque sted", + "c n", + "cur ric", + "plu m", + "gr un", + "v m", + "dev on", + "m yo", + "rel ation", + "juvent us", + "rou ge", + "min ority", + "min es", + "jupit er", + "n ine", + "oxy gen", + "fran kie", + "une sco", + "fab ric", + "disgu sting", + "sal man", + "dete ction", + "lan ka", + "d ac", + "ðŁĩ« ðŁĩ·", + "argu ment", + "shel ves", + "cel tics", + "rober to", + "pi gs", + "he dge", + "fau l", + "pow ering", + "butter flies", + "fi r", + "re make", + "att i", + "com o", + "emp ha", + "kend all", + "poke mon", + "se ating", + "d ans", + "bald win", + "ðŁij »", + "lesli e", + "one direction", + "ti mber", + "im an", + "fon t", + "e der", + "di on", + "ste ph", + "for mat", + "gre gory", + "pro p", + "he x", + "ru in", + "sor y", + "inf er", + "n aw", + "bar ak", + "sd gs", + "kar ao", + "lu sh", + "v ander", + "end ent", + "g is", + "a fro", + "soc cer", + "ay an", + "t uni", + "lun g", + "da yof", + "alex a", + "mar ath", + "addic ted", + "ag ile", + "hy gi", + "light weight", + "ì §", + "mand ela", + "jo ey", + "anc y", + "hu m", + "bi r", + "memor ial", + "jim in", + "ging er", + "v ak", + "jav ascri", + "cro ps", + "orig ins", + "d ari", + "pi per", + "im port", + "aggre ssive", + "predic tion", + "re pairs", + "cr acker", + "voy age", + "ni ke", + "mu mmy", + "linke din", + "country side", + "bor der", + "gla ss", + "per t", + "s als", + "sho e", + "autograph ed", + "wal nut", + "colle gi", + "sal ary", + "pa iring", + "ðŁĮ ¸", + "cath ol", + "swee the", + "defe ats", + "streng then", + "roof top", + "impro vements", + "barri ers", + "ur u", + "t ally", + "ru led", + "ðŁĨ ļ", + "nai ja", + "emo ji", + "per cent", + "gi o", + "pro bs", + "on ce", + "adm its", + "pa ths", + "li ar", + "day tona", + "pe ters", + "cal i", + "cal li", + "mu g", + "o sa", + "ap h", + "ab y", + "hy de", + "eth nic", + "pla ins", + "ol f", + "haha hahaha", + "holi c", + "?! ?!", + "su bli", + "bl acks", + "mo t", + "gh ton", + "lo vin", + "b rent", + "bar u", + "l ati", + "de w", + "ate au", + "q a", + "pain ful", + "bu sters", + "st atic", + "ðŁĩ¨ðŁĩ ¦", + "note book", + "out fits", + "si es", + "r f", + "floo ds", + "Ñ Ģ", + "thro at", + "su ici", + "ro vers", + "beng al", + "pre pares", + "blo g", + "mini ature", + "Ø ¨", + "am phi", + "com b", + "r sp", + "in timate", + "green e", + "Ì ĩ", + "al tar", + "surg ical", + "ves sel", + "... ?", + "gav in", + "g ator", + "threat ened", + "z ar", + "rob bery", + "di er", + "promo ted", + "y g", + "x s", + "su bs", + "inter viewing", + "threat ening", + "do zen", + "me ado", + "water fall", + "nintendo switch", + "cal um", + "mini sters", + "dro p", + "univers ities", + "war ned", + "tac tics", + "ðŁĩ ²", + "refu se", + "ad ju", + "v ast", + "ðŁĺ ´", + "mc fc", + "lib ya", + "no filter", + "distribu ted", + "re ser", + "ron nie", + "de co", + "javascri pt", + "mon k", + "intere sts", + "fle x", + "mar tha", + "sti es", + "oo d", + "ðŁ¤£ ðŁ¤£", + "e un", + "b ali", + "g omez", + "sti mul", + "moder ate", + "d ity", + "ir is", + "stra w", + "consist ent", + "direc tions", + "adop t", + "sal sa", + "cro o", + "reco vered", + "black friday", + "lan caster", + "accep t", + "weareone exo", + "buil ds", + "free man", + "air plane", + "diti on", + "bel ong", + "jam ie", + "pit ching", + "li f", + "om in", + "cri spy", + "pre pping", + "ve g", + "chan g", + "accompli shed", + "graci as", + "dolph in", + "elec tor", + "culin ary", + "super bowl", + "wal a", + "pur suit", + "black berry", + "be an", + "cardin al", + "pro ved", + "immigr ant", + "stric tly", + "holocau st", + "pass age", + "ha us", + "cou p", + "pur se", + "har ass", + "< <", + "le ed", + "ado be", + "st ad", + "legis lat", + "par ked", + "pri yan", + "sil va", + "kri st", + "s the", + "fun ky", + "ig a", + "sett lement", + "ph s", + "t mrw", + "stre ssed", + "hun t", + "ho ckey", + "treas ures", + "cham bers", + "ol u", + "hu t", + "mar ley", + "tex ture", + "wilder ness", + "mm ing", + "poten tially", + "om aha", + "ju dy", + "to es", + "spo iler", + "distingui shed", + "feli x", + "ah u", + "recommend ations", + "zom bies", + "hit ler", + "tri ple", + "colla pse", + "motiv ated", + "ulti mat", + "gg ling", + "so y", + "ci gar", + "fo ren", + "vine yard", + "gl itter", + "fin dings", + "colon ial", + "hun ter", + "eri k", + "den s", + "beet le", + "lot te", + "sub tle", + "s matter", + "tru sted", + "experim ental", + "nam ents", + "ðŁĺ Ĩ", + "regi on", + "acquis ition", + "bre eding", + "quarter back", + "am reading", + "oo td", + "ru de", + "initi atives", + "st out", + "hy ung", + "out come", + "al fred", + "mic s", + "exper tise", + "bacter ia", + "pengu ins", + "jump er", + "valen cia", + "bar k", + "ing day", + "sell ers", + "contrac ts", + "hou ston", + "commissi oned", + "adap tation", + "swan sea", + "santi ago", + "common wealth", + "ju dging", + "sub mission", + "sco rer", + "tom my", + "ñ o", + "ex quis", + "fil ing", + "explan ation", + "alli son", + "wemb ley", + "ri dge", + "chev y", + "san tos", + "own ership", + "cogn itive", + "favour ites", + "sh ed", + "phil anthro", + "dele ted", + "go dd", + "s nor", + "gui delines", + "ff ing", + "je ep", + "cli ps", + "sw amp", + "an or", + "guil d", + "bol ton", + "spring field", + "munici pal", + "goal keeper", + "ye on", + "ðŁĺįðŁĺį ðŁĺįðŁĺį", + "ãħĭ ãħĭ", + "water front", + "gra ve", + "contempor ary", + "ar ity", + "ÃŃ a", + "sle eps", + "sy rup", + "al am", + "pi re", + "co yo", + "moto gp", + "ty son", + "kej ri", + "cir cul", + "sing ly", + "cr unch", + "complic ated", + "nostal gia", + "k op", + "mo ve", + "k ale", + "mac ro", + "mid west", + "h ans", + "tri bal", + "nu de", + "௠į", + "bey once", + "congratul ate", + "cat er", + "leagu e", + "ðŁĻ Ĭ", + "la dder", + "cra shed", + "tech nic", + "karao ke", + "harass ment", + "ro ts", + "experi encing", + "kri sten", + "ðŁĩ ³", + "ðŁ¤ Ĺ", + "reflec tions", + "guin ness", + "illustr ator", + "ðŁĻı ðŁı»", + "cen ter", + "nar row", + "comm ons", + "regul ations", + "Ù Ĩ", + "har m", + "cro ft", + "cu ssion", + "hong kong", + "st ical", + "intern ship", + "zo e", + "cho p", + "hoo ds", + "estim ated", + "batter ies", + "berke ley", + "smooth ie", + "shau n", + "cro s", + "~ ~", + "cam pe", + "hu mp", + "b g", + "proto type", + "cl ick", + "shaw n", + "re viewed", + "tem pl", + "p f", + "jed i", + "blo gs", + "ray mond", + "as th", + "ba h", + "av ail", + "scot ch", + "leaf s", + "nik ki", + "to k", + "hol low", + "ur ges", + "of t", + "un like", + "lat in", + "u e", + "cat ering", + "mil i", + "alter nati", + "ma ver", + "Ð ¸", + "ag le", + "pre order", + "lu x", + "cu cu", + "ðŁijı ðŁijı", + "t art", + "âĿ¤âĿ¤ âĿ¤", + "arab ic", + "rapi dly", + "ar rang", + "all en", + "travel tuesday", + "pa ws", + "flo ws", + "st ability", + "flu id", + "ca pp", + "can berra", + "uu uu", + "sp ani", + "demon stration", + "m la", + "plac ement", + "m w", + "presi dents", + "awe som", + "bever ly", + "ani st", + "ne al", + "father sday", + "referen dum", + "la hore", + "o aks", + "deb bie", + "half way", + "gho sts", + "de bor", + "matthe ws", + "fi at", + "t fw", + "pre sen", + "rob i", + "de d", + "bro ck", + "laugh ed", + "am ounts", + "bam boo", + "kinder garten", + "eat en", + "mtv hottest", + "break out", + "u sic", + "fra ser", + "legis lative", + "p ang", + "modu le", + "sam my", + "go ver", + "ear ns", + "expe dition", + "gar h", + "concep ts", + "char lie", + "la va", + "bachel or", + "veg gies", + "deter mine", + "el lie", + "un locked", + "fru it", + "dal la", + "cou pe", + "wash ington", + "depo sit", + "iv ory", + "pau la", + "chic ag", + "gu cci", + "ðŁİ ĥ", + "cul tiv", + "pier ce", + "li fted", + "stu mb", + "re cover", + "musc les", + "conduc ting", + "cb s", + "mcla ren", + "sophi a", + "cel lu", + "oce ans", + "up loaded", + "game play", + "mal dives", + "kim ber", + "avo i", + "rac er", + "ca ine", + "cav s", + "h ana", + "li ga", + "ra ven", + "inter vention", + "inaugur ation", + "oo h", + "at traction", + "merchandi se", + "tune in", + "li king", + "juni ors", + "int ended", + "att acking", + "aqu arium", + "i wd", + "comp onents", + "sur ing", + "cent u", + "yogur t", + "ðŁı ĥ", + "show room", + "op tical", + "ty our", + "ju dge", + "yi eld", + "an to", + "pl c", + "transparen cy", + "recy cled", + "chi ef", + "ar om", + "ambassad ors", + "plan et", + "âĿĦ ï¸ı", + "om ed", + "vaness a", + "cour t", + "mar gar", + "hal ey", + "v r", + "reg ina", + "pd ates", + "hi span", + "live stream", + "âģ £", + "ya hoo", + "gal la", + "secu red", + "w ir", + "bene ath", + "off l", + "n il", + "am b", + "ye g", + "out let", + "u te", + "pe ep", + "lind say", + "bent ley", + "... !", + "he el", + "trilo gy", + "vo s", + "ty re", + "there fore", + "tor onto", + "ab i", + "simp li", + "ja e", + "exten sive", + "eleph ants", + "s or", + "orient ation", + "im peach", + "re play", + "constru cted", + "peter son", + "pa is", + "por ted", + "custom s", + "colla p", + "ad u", + "high lands", + "sal em", + "shel by", + "ko vic", + "stra in", + "ro sie", + "sen ators", + "snap s", + "bo bb", + "suz uki", + "bla des", + "k p", + "lo lo", + "gener ate", + "si ght", + "ma e", + "struc tural", + "predic t", + "jump ed", + "ah mad", + "sun g", + "just ice", + "gla m", + "vol vo", + "jubi lee", + "de tention", + "lo sses", + "pu ri", + "every time", + "Ð °", + "ra o", + "ed ge", + "li mer", + "rese mb", + "har old", + "re tri", + "sacri fic", + "surpri ses", + "am c", + "srilan ka", + "bar bie", + "men s", + "fin n", + "ag s", + "ukrain ian", + "em brac", + "î IJ", + "flav ors", + "hom er", + "lau re", + "ou th", + "pr iced", + "ver de", + "fir m", + "ah s", + "cu b", + "tre y", + "par anor", + "pro fit", + "in dv", + "who a", + "har sh", + "al ot", + "crit ics", + "hu bby", + "fi gur", + "gi ra", + "ca stro", + "chan el", + "in put", + "origin als", + "ten ant", + "yy yy", + "ture rs", + "lincol n", + "co on", + "lear n", + "ch ou", + "ac are", + "o les", + "din er", + "hy p", + "bizar re", + "mc r", + "let sgo", + "decor ating", + "ðŁĮ İ", + "al ison", + "ar vin", + "f d", + "reha b", + "mccar thy", + "lot tery", + "da h", + "minne apolis", + "eli gible", + "diagno sed", + "emer ald", + "destin ations", + "s ans", + "or y", + "bla zers", + "n v", + "ba il", + "digital art", + "no c", + "mal ta", + "sol ar", + "pi pes", + "alleg ations", + "no ck", + "po pe", + "bri d", + "premi er", + "n x", + "present ations", + "ef a", + "bo ws", + "val ve", + "opp onent", + "Į ë", + "visu al", + "ing le", + "cate gor", + "e ter", + "po is", + "dan i", + "at tract", + "neu tral", + "th ene", + "cra shes", + "fred die", + "ut ili", + "c st", + "awak ening", + "slo ven", + "quali fy", + "pro of", + "fair y", + "le v", + "fre ight", + "enjo ys", + "cup cake", + "flav our", + "â ķ", + "protec tive", + "ðŁijı ðŁı»", + "is u", + "ad mir", + "h mmm", + "continu ous", + "ai res", + "rap tors", + "showcas ing", + "y uk", + "pa ste", + "follow er", + "instru ctions", + "sp ru", + "@ __", + "the o", + "debu ts", + "ve tte", + "sto w", + "es of", + "ach ed", + "sul tan", + "sand wich", + "som alia", + "franc o", + "car ne", + "flu ffy", + "al pine", + "jas mine", + "he ated", + "viol in", + "ple ss", + "divor ce", + "per former", + "phi es", + "port sm", + "dar a", + "kir by", + "lo p", + "chill i", + "for th", + "sky pe", + "ðŁĩ®ðŁĩ ¹", + "celebr ities", + "ed y", + "ve e", + "po ison", + "ey el", + "gra bs", + "ssi c", + "un o", + "wester n", + "rail road", + "am er", + "numer ous", + "s v", + "fo w", + "fi st", + "âĢ ĭ", + "reque sts", + "mar tial", + "em my", + "accept ance", + "lau ra", + "ภ´", + "er up", + "hyun dai", + "out lander", + "u tt", + "wrest le", + "esp resso", + "demand ing", + "g dp", + "geo graphy", + "sas kat", + "tro ll", + "confe der", + "su es", + "se m", + "be ts", + "t ful", + "to sh", + "teach es", + "col oured", + "gal way", + "mac y", + "dis orders", + "bb cra", + "at em", + "fen der", + "lit ter", + "e sh", + "provi ders", + "renov ation", + "nomin ate", + "ps g", + "nomin ations", + "jen na", + "shar p", + "some day", + "z ur", + "bra ins", + "che shire", + "pre y", + "hu go", + " ¿", + "to ken", + "r v", + "car r", + "tac tical", + "zel da", + "kay la", + "fern ando", + "photograph ers", + "j our", + "umb rella", + "woo dy", + "congress man", + "du mp", + "le vy", + "ju an", + "d azz", + "sign als", + "la in", + "an u", + "mic hel", + "por ch", + "al den", + "sibl ings", + "y ale", + "pe el", + "sw ick", + "gg in", + "ll c", + "k ale", + "s con", + "il d", + "pat reon", + "re el", + "qu in", + "wit t", + "mar ty", + "moo dy", + "ton i", + "der y", + "g ators", + "speci fically", + "dd in", + "ly on", + "tr ick", + "meado ws", + "p j", + "bor gh", + "vi k", + "tu r", + "bron x", + "pu ff", + "lan tern", + "ðŁ¤ ¦", + "g ently", + "be stie", + "fac t", + "refu sed", + "fas ci", + "mp y", + "ðŁĶ µ", + "cross over", + "mead ow", + "indian apolis", + "duc ation", + "sle y", + "loo m", + "mix er", + "new music", + "film maker", + "prosper ity", + "li m", + "week end", + "cre amy", + "neu tr", + "lu ther", + "h v", + "nor thern", + "tw o", + "h ra", + "cat ches", + "appear ances", + "ha bit", + "kitt ens", + "n v", + "illa c", + "inf an", + "regar dless", + "liz ard", + "dun k", + "cur tain", + "ac om", + "in tu", + "ve z", + "e min", + "fl ats", + "calend ars", + "em power", + "ru ined", + "hun gary", + "vi d", + "we x", + "u lum", + "aber deen", + "o sa", + "k t", + "ma ssi", + "se emed", + "s den", + "' ?", + "tele phone", + "de fi", + "insp ires", + "me ow", + "z ones", + "bl ind", + "pl y", + "tuc son", + "advent ure", + "ge d", + "oy ster", + "ðŁijıðŁijı ðŁijı", + "out put", + "tt t", + "metal lic", + "sma sh", + "ucl a", + "sco ts", + "perfe ct", + "lu cy", + "regular ly", + "sp ic", + "rel ative", + "ath ers", + "mis e", + "batt ling", + "deci des", + "mat a", + "occu pied", + "random ly", + "cat softwitter", + "gi an", + "ball y", + "al ties", + "al lies", + "im men", + "sy rac", + "ðŁĴľ ðŁĴľ", + "l lan", + "au r", + "k ut", + "lam ar", + "affe cts", + "n ra", + "star war", + "ðŁ¤ ĺ", + "sc ram", + "en chan", + "pro cess", + "luxu rious", + "ar ray", + "sher lock", + "comp ati", + "dor f", + "stre ss", + "m su", + "s with", + "sal a", + "sof instagram", + "fo il", + "under stood", + "qu ay", + "r p", + "c ade", + "ja w", + "en ab", + "en coun", + "ðŁİī :", + "do ck", + "satur n", + "mu ll", + "lay out", + "ra rely", + "happ ily", + "fix ture", + "or ph", + "over looking", + "her bs", + "m itt", + "pil lar", + "nol an", + "pe tty", + "str y", + "u i", + "mu k", + "o res", + "o vers", + "á µ", + "re creation", + "we sley", + "ri t", + "kejri wal", + "sto cking", + "g v", + "subscri bers", + "moo se", + "ma e", + "ber t", + "opp re", + "assign ment", + "u ro", + "high lighting", + "cal vin", + "we igh", + "cambo dia", + "av on", + "ke m", + "dis abilities", + "read y", + "char gers", + "p ads", + "iz ing", + "illi an", + "tru ste", + "col leges", + "associ ates", + "alban y", + "mil ton", + "cr on", + "bu r", + "har dly", + "si ghts", + "anti ques", + "e cho", + "surpri singly", + "ha iti", + "cap t", + "ph p", + "op io", + "ine quality", + "equ al", + "ken y", + "sch mid", + "autograph s", + "ren t", + "qu er", + "cit rus", + "challeng ed", + "te c", + "epi de", + "fe st", + "z hou", + "li me", + "citizen ship", + "cry stal", + "convin ced", + "mess enger", + "copen hagen", + "âĿĹ ï¸ı", + "war ran", + "develop ments", + "ï¸ı âĥ£", + "fore x", + "hi ro", + "sne akers", + "xi de", + "vi va", + "stere o", + "bat ting", + "ss el", + "ho st", + "beng al", + "critic ism", + "q c", + "cr un", + "attemp ted", + "ry e", + "determin ation", + "cre ations", + "d read", + "label s", + "pos se", + "anc er", + "joh an", + "si ster", + "partner ships", + "les bian", + "k st", + "guaran tee", + "bar o", + "fix ing", + "ma son", + "m ous", + "chem icals", + "t less", + "bio diversity", + "par o", + "bhar at", + "ac ol", + "refu ge", + "en te", + "t iti", + "dys sey", + "respon ds", + "lef to", + "in er", + "se vel", + "rahu l", + "ol ine", + "frank fur", + "cho reo", + "enjoy able", + "c to", + "strugg les", + "wood land", + "heavy weight", + "gen s", + "rece p", + "ac cred", + "ðŁĺ ¡", + "trans formed", + "list en", + "at op", + "n k", + "sur ge", + "be re", + "gover nor", + "prison ers", + "clau de", + "t ill", + "mu lator", + "emo tion", + "water loo", + "star t", + "ðŁĩ º", + "clean ed", + "grand mother", + "fear less", + "afric an", + "astron omy", + "ðŁı ģ", + "ภĻ", + "the world", + "su itable", + "anth ony", + "k and", + "tt en", + "meaning ful", + "disc lo", + "jaco bs", + "à ¸", + "tom linson", + "ghe tti", + "ty pho", + "sub stan", + "as co", + "te k", + "nag ar", + "mu d", + "am on", + "vacc ine", + "f ty", + "fle sh", + "no el", + "infl ation", + "portu gue", + "glam our", + "tra m", + "v re", + "te qu", + "roun dup", + "w yn", + "rejec ted", + "mosa ic", + "si ghting", + "cal f", + "o ta", + "com position", + "go pro", + "gonz ale", + "e ed", + "b ard", + "tu e", + "effec tively", + "we en", + "al to", + "ri bs", + "rel ate", + "thir sty", + "fu rious", + "di m", + "ch ard", + "perfu me", + "s ny", + "chur chill", + "k of", + "master class", + "wa ve", + "ðŁĶ µ", + "er in", + "own s", + "to be", + "sk illed", + "te m", + "go f", + "en i", + "tor i", + "cra zy", + "l ick", + "resi stant", + "ici al", + "ag ar", + "! :", + "g ali", + "del aware", + "bl itz", + "koh li", + "pu ck", + "avail ability", + "hi malay", + "influ ential", + "cro chet", + "victor i", + "read ing", + "ho bby", + "vie t", + "j as", + "en gra", + "sk ul", + "ðŁĩ² ðŁĩ", + "educ ate", + "tech no", + "distric ts", + "blu es", + "se tt", + "seven th", + "lear ns", + "ee ee", + "apocaly pse", + "hang out", + "cru el", + "mu tu", + "bru h", + "hel en", + "she er", + "c tion", + "kle in", + "tex ans", + "ce real", + "sh ine", + "ne red", + "gra s", + "am bro", + "f ella", + "hin du", + "matthe w", + "li ma", + "mir anda", + "je wel", + "so ho", + "euro vision", + "neighb ours", + "chand ler", + "be sides", + "ðŁ¥ °", + "ast ros", + "thu mbs", + "ren ault", + "ra ve", + "hi red", + "ðŁĸ ¤", + "it ary", + "z or", + "bla zer", + "k ine", + "ea u", + "kat y", + "dc comics", + "pe c", + "ro dgers", + "water proof", + "kill ers", + "super int", + "pre serv", + "as so", + "brew ers", + "promo tional", + "sc am", + "villa ges", + "sket ches", + "ju icy", + "for life", + "au dit", + "so lo", + "fundam ental", + "len e", + "philipp ine", + "t end", + "conserv atives", + "sponsor ship", + "dd le", + "a ine", + "h tc", + "os i", + "hul k", + "w af", + "ภĻ", + "evalu ation", + "ant ine", + "sle e", + "robert son", + "roo sevel", + "ag i", + "sophi stic", + "emplo yers", + "bubb les", + "ko wski", + "inter action", + "sh u", + "bou le", + "ic an", + "j are", + "han k", + "leg itim", + "k nicks", + "kar ma", + "recei ver", + "per ks", + "u h", + "sta ir", + "sun i", + "labor atory", + "gra ves", + "voc als", + "oo t", + "c ture", + "thri ve", + "tic o", + "ãĥ ³", + "b w", + "carto ons", + "mcdon alds", + "dra w", + "y ung", + "pl er", + "li d", + "eth ical", + "groo ve", + "ent a", + "international womensday", + "pat ron", + "wor ries", + "ðŁİ ħ", + "ðŁij ĭ", + "ka therine", + "di az", + "tor i", + "bach chan", + "tru st", + "min eral", + "ic om", + "buil ders", + "bor n", + "col oring", + "lat te", + "ca se", + "revolu tion", + "tra der", + "ox id", + "chi pot", + "inst antly", + "sou thern", + "se hun", + "pro b", + "her nandez", + "lis bon", + "hu awe", + "p ong", + "me a", + "ro oney", + "wheel chair", + "ke en", + "be tt", + "cor in", + "regulat ory", + "di splac", + "ka ren", + "sch em", + "sun sets", + "wh ales", + "remin is", + "he p", + "hi de", + "mar cel", + "pand ora", + "do yle", + "th fc", + "ot to", + "no kia", + "trans gender", + "ko v", + "hawai ian", + "sha ve", + "so vere", + "exc er", + "nick i", + "pu g", + "st or", + "ro th", + "wee t", + "leg al", + "dig nity", + "po w", + "hom age", + "ðŁĩ³ ðŁĩ", + "s re", + "can on", + "la x", + "wo ah", + "quart z", + "ñ a", + "gree ting", + "flick r", + "nai robi", + "advoc ates", + "an c", + "vi i", + "eu gene", + "th ra", + "c re", + "el an", + "pen sion", + "th letics", + "ton i", + "re agan", + "x v", + "sto re", + "ben ch", + "har lem", + "todd ler", + "sent enced", + "âĻ¥ ï¸ı", + "glob ally", + "che aper", + "u f", + "ma m", + "nic o", + "ik u", + "tho u", + "ni st", + "dam i", + "th ala", + "rho des", + "sal e", + "bow ls", + "â Ī", + "las vegas", + "sanc tions", + "adm ire", + "mat ched", + "un able", + "travel er", + "ele ven", + "straw berries", + "âĢĶâĢĶ âĢĶâĢĶ", + "stu dio", + "jac ques", + "im s", + "valu ed", + "s no", + "cheese cake", + "n xt", + "e os", + "s x", + "f x", + "ton ic", + "hat ch", + "chic ks", + "gra ds", + "hand ic", + "r ory", + "as p", + "ri pped", + "denti st", + "n en", + "lu fc", + "âľ Ĭ", + "di ge", + "hop kins", + "sher man", + "f da", + "for all", + "ash ley", + "str and", + "h y", + "liqu or", + "buffe t", + "ess ence", + "phar ma", + "suri ya", + "ðŁĴĻ ðŁĴĻ", + "festi vals", + "z an", + "re fresh", + "pur ple", + "uni forms", + "kenne th", + "= )", + "as an", + "hel sin", + "transform ers", + "k ali", + "person alized", + "chal k", + "bo bby", + "â Į", + "the mes", + "depar ture", + "prin t", + "illustr ations", + "qui et", + "agre es", + "gri ff", + "Ø ³", + "m iti", + "toge ther", + "conven ience", + "ab ar", + "car lo", + "turt les", + "info sec", + "some what", + "ar lington", + "scholar ships", + "emir ates", + "mu ms", + "st ella", + "auton om", + "fe ather", + "g ore", + "nom inees", + "fragr ance", + "Ñ Ĥ", + "w ong", + "thea stern", + "gr e", + "z illa", + "is i", + "bump er", + "go o", + "do zens", + "ab duc", + "âļª ï¸ı", + "o ils", + "don ors", + "sil icon", + "i pod", + "fortn ite", + "ðŁĴ ¨", + "tor o", + "spark ling", + "consci ousness", + "pal a", + "nu m", + "moun ted", + "ffin s", + "thi eves", + "team mate", + "pra b", + "om er", + "ta pes", + "bo d", + "mit su", + "ste w", + "e re", + "p bs", + "tu sc", + "lo we", + "ra de", + "parliam entary", + "h m", + "ed gar", + "ðŁijĩ ðŁijĩ", + "to a", + "a gh", + "hon i", + "s late", + "ge ek", + "ap t", + "hard t", + "ta p", + "horiz on", + "grow th", + "make over", + "hi l", + "paper back", + "id an", + "reha bil", + "gi u", + "possi bilities", + "let tu", + "fran co", + "bo ss", + "ach er", + "does nt", + "mo e", + "ta ker", + "huss ain", + "ml k", + "di l", + "th ia", + "ham a", + "real ised", + "raven s", + "curric ulum", + "m ith", + "k night", + "ted x", + "r v", + "isai ah", + "cumb ria", + "birth days", + "f ing", + "pre z", + "mu barak", + "exquis ite", + "clear ance", + "y en", + "par i", + "ev o", + "à º", + "modi fied", + "app lying", + "imple ment", + "disco vering", + "chap man", + "indie game", + "dis k", + "crowd funding", + "mach in", + "li vel", + "sty led", + "âĿ Į", + "ma king", + "rehear sals", + "nutr iti", + "subscri ption", + "and ro", + "cre ators", + "car ries", + "ky lie", + "cam den", + "appren tice", + "tax pay", + "c ca", + "tuesday thoughts", + "pis sed", + "er man", + "dete c", + "freed om", + "mer i", + ".. !", + "psal m", + "sun light", + "per spec", + "be ings", + "book store", + "rock star", + "fun ctions", + "p ence", + "fav es", + "z n", + "obam acare", + "sp ill", + "coven try", + "pi geon", + "pi vo", + "ba it", + "kol kata", + "av al", + "don or", + "wa h", + "privi leg", + "tra ditions", + "rajas than", + "ten ess", + "portugue se", + "yn es", + "tack les", + "de fic", + "tor n", + "pol ling", + "thor ne", + "in a", + "bened ict", + "bar ry", + "cal ories", + "ver dict", + "save the", + "nor ton", + "off ice", + "main stream", + "impro ves", + "fr on", + "respon ding", + "real tor", + "scotti sh", + "de clar", + "r l", + "shi v", + "supp lier", + "re sting", + "swee ts", + "qu i", + ". â̦", + "whit ney", + "startu p", + "thank you", + "teach er", + "h alls", + "ha ve", + "hand made", + "pro ving", + "quar tet", + "ro chester", + "li an", + "virtu al", + "mend es", + "of icial", + "mid lands", + "x box", + "meas uring", + "o vo", + "accommod ation", + "bri des", + "collegi ate", + "intellec tual", + "in car", + "ni ag", + "ðŁį ·", + "sf w", + "coco a", + "co ats", + "civil ians", + "presi dency", + "mat rix", + "sweethe art", + "tri athlon", + "wag ner", + "ra dic", + "plann er", + "the o", + "execu tion", + "k um", + "the walkingdead", + "sc ar", + "ro tation", + "blo gging", + "bom b", + "re son", + "bb les", + "st are", + "assi sted", + "e do", + "brand ed", + "war nings", + "thor pe", + "acknow le", + "satis fied", + "sho res", + "ri d", + "dor a", + "phys ically", + "bi gh", + "appro ves", + "ha h", + "ric al", + "vers atile", + "pret end", + "lu m", + "ab hi", + "ye e", + "sp it", + "ãĢ Į", + "dj s", + "ash tra", + "j t", + "ven ues", + "gram mys", + "cy clo", + "tr acker", + "over watch", + "repl ica", + "el yn", + "nr l", + "lind sey", + "hom o", + "ballo ons", + "kitch en", + "si s", + "am os", + "ende av", + "ðŁĴ »", + "a rec", + "thu g", + "hoo ked", + "hr c", + "new york", + "bur gh", + "americ as", + "patric ia", + "ug u", + "ap athy", + "ha st", + "psy chi", + "cor k", + "petro l", + "ðŁİ ¬", + "ak u", + "po pping", + "psycho logical", + "au x", + "g ma", + "cad illac", + "wa ste", + "auth ent", + "bri stol", + "nam e", + "que er", + "to ber", + "jer ry", + "com in", + "ch ant", + "privileg ed", + "op ar", + "lo ser", + "tex t", + "mar ker", + "stri es", + "equ ally", + "ak i", + "christ mas", + "gare th", + "ble w", + "em ma", + "imag in", + "se als", + "che at", + "conditi oning", + "j ana", + "ren s", + "dar ies", + "o asis", + "disc ounts", + "coun cil", + "i ka", + "shir ley", + "vou cher", + "al ps", + "w x", + "q r", + "dri ft", + "attemp ting", + "ut c", + "Ø ª", + "gonzale z", + "m f", + "jo ker", + "paralle l", + "pa re", + "aspe cts", + "proce du", + "n p", + "am a", + "rale igh", + "bright en", + "gu ire", + "radi ation", + "cre scent", + "ho b", + "il le", + "str and", + "v ore", + "n ard", + "che st", + "di wali", + "av atar", + "al der", + "d ling", + "pa thetic", + "ðŁĴ ĺ", + "spir it", + "jor ge", + "film making", + "ðŁĻı ðŁĻı", + "challeng er", + "b j", + "down town", + "ht ml", + "ade qu", + "twi sted", + "in ely", + "( '", + "wra ps", + "oper ational", + "y ne", + "n us", + "mag net", + "market place", + "health ier", + "snap shot", + "dam on", + "inter ven", + "fe derer", + "ow ls", + "biscu its", + "j p", + "ro deo", + "blue berry", + "lec tion", + "fron tier", + "summ ers", + "re yes", + "pede strian", + "go l", + "caf fe", + "refur bi", + "bou lder", + "me ghan", + "speci alty", + "la ss", + "e i", + "suspec ts", + "appro x", + "rr r", + "ra th", + "st im", + "cru shed", + "he d", + "wh un", + "lo af", + "cr ore", + "river a", + "gene tics", + "so ck", + "wa sted", + "ny pd", + "answ ering", + "do ve", + "bel la", + "ol in", + "du n", + "fi ji", + "pre tty", + "spar kle", + "y un", + "j d", + "euro pa", + "li fts", + "am ber", + "mu r", + "te k", + "boy d", + "roy alty", + "in do", + "ri b", + "go tham", + "ti est", + "inst alling", + "ke mp", + "the photo", + "cos mic", + ") ))", + "whole sale", + "loy ment", + "eas y", + "su ing", + "sett led", + "af p", + "pro ver", + "suppor tive", + "re es", + "ne ath", + "deli ber", + "c é", + "wel come", + "pic oftheday", + "new born", + "pat ty", + "sun s", + "si est", + "fl int", + "diffe rently", + "spo ilers", + "troop er", + "g ins", + "cor y", + "look out", + "equi pped", + "ta pe", + "to by", + "resear cher", + "u sh", + "ke yes", + "al ma", + "induc tion", + "k w", + "k har", + "sl ick", + "bri de", + "e ur", + "cra ving", + "book ings", + "ch es", + "tr unk", + "vern on", + "sp her", + "cryst als", + "rel atively", + "pom pe", + "uni ons", + "val ley", + "par a", + "w ant", + "ok c", + "de af", + "ser gio", + "len non", + "sh ay", + "cr a", + "v at", + "he e", + "t we", + "liqu id", + "pol y", + "ðŁİ ģ", + "b ent", + "be aring", + "motor sport", + "bar be", + "te sti", + "han i", + "fin ancing", + "astron aut", + "water colour", + "ri sh", + "comic con", + "gar t", + "wr ong", + "ber n", + "it an", + "ste pped", + "fil ters", + "c low", + "me x", + "dem ons", + "all o", + "expand ed", + "comm and", + "et ers", + "go ats", + "si ri", + "y r", + "pot tery", + "mari on", + "i le", + "el an", + "san to", + "person a", + "du ke", + "hom eless", + "li ghted", + "wheel er", + "chang er", + "cab bage", + "sur real", + "ham burg", + "sma shed", + "str an", + "k not", + "i art", + "ob i", + "be dro", + "di al", + "th ick", + "b ingo", + "fu s", + "vacu um", + "con ve", + "ati ve", + "accur acy", + "accoun t", + "re fer", + "ri z", + "spider man", + "ban a", + "r ite", + "u b", + "ab s", + "medic al", + "lin k", + "si em", + "> >>>", + "be tra", + "g lowing", + "re actions", + "pupp et", + "spa ghetti", + "ang s", + "re medi", + "pray for", + "roy ce", + "char lotte", + "£ ï¸ı", + "gh et", + "affe cting", + "ro de", + "soci alist", + "mo ses", + "az i", + "o it", + "re porters", + "cd t", + "ap ing", + "s nat", + "minim al", + "wa ist", + "sie ge", + ">> >>", + "ri g", + "schmid t", + "h are", + "ec a", + "thor n", + "he mp", + "es the", + "cly de", + "th a", + "don ut", + "moham ed", + "ling erie", + "le gg", + "carpen ter", + "perform ers", + "de a", + "imag ined", + "cur se", + "la sh", + "ct r", + "agu a", + "ro ar", + "gr i", + "ro le", + "j fk", + "resur rec", + "roosevel t", + "maril yn", + "sm alle", + "will is", + "wa ited", + "char ities", + "the res", + "li k", + "origin al", + "car i", + "c ough", + "cru ci", + "la gun", + "contra st", + "k ou", + "arm our", + "re moving", + "t ent", + "maz da", + "bri ghter", + "thi ef", + "cor ner", + "tequ ila", + "buzz ing", + "al bi", + "p am", + "az ure", + "disc oun", + "pixel art", + "possi bility", + "ham ont", + "tra des", + "bu da", + "hi ve", + "vers y", + "fin ch", + "tran spa", + "em i", + "terri fying", + "in qui", + "g ba", + "sub stitu", + "collec ti", + "plac ing", + "cin dy", + "k ann", + "pa tho", + "diamon d", + "mour inho", + "guine a", + "anthro po", + "air s", + "pu mps", + "ì ļ", + "pas o", + "cur ling", + "an ita", + "resi dency", + "ne wh", + "jo on", + "cigare tte", + "que ue", + "ex trac", + "gam es", + "spl en", + "ex press", + "public ly", + "bon nie", + "tribun e", + "ba ek", + "reason able", + "c or", + "timo thy", + "she eran", + "Ä ±", + "f dn", + "su tton", + "concentr ation", + "carav an", + "x avier", + "al ger", + "cy lin", + "freder ick", + "ner ve", + "pe ak", + "lettu ce", + "j ail", + "pre game", + "kav an", + "up graded", + "eco logy", + "squad ron", + "gra pes", + "goo g", + "pa stry", + "ðŁĹ £", + "ãĥ¼ ãĥ", + "mil ano", + "awa z", + "presen ter", + "ðŁĮ ¿", + "her d", + "king s", + "tem plate", + "fl our", + "h v", + "k ley", + "i ya", + "spe c", + "at er", + "frankfur t", + "co ch", + "tex ting", + "del i", + "communi st", + "regi ment", + "ele anor", + "anticip ated", + "ðŁijĮ ðŁı»", + "thephoto hour", + "ran o", + "survi ving", + "simul ation", + "daw son", + "ar in", + "aqu a", + "m or", + "â̦ .", + "cin o", + "ira qi", + "sh az", + "dun dee", + "we s", + "dra u", + "hann ah", + "s news", + "occup ation", + "ste en", + "x m", + "ang les", + "sett ings", + "gur u", + "kno x", + "or ca", + "shap ing", + "w ent", + "dr illing", + "zz ie", + "br i", + "kis sing", + "fin d", + "ma ine", + "âŃIJï¸ı âŃIJï¸ı", + "ðŁĮ į", + "lar ry", + "bu sted", + "ta vern", + "acti vely", + "- \"", + "replac ing", + "no d", + "un lock", + ". \"", + "âŀ ¤", + "affili ate", + "to w", + "l n", + "happy newyear", + "di f", + "j m", + "green wich", + "contro versy", + "daw g", + "con dol", + "sav annah", + "compens ation", + "touch down", + "te o", + "amb itious", + "embro i", + "convic ted", + "iart g", + "bar ack", + "tr ance", + "testim ony", + "au dition", + "thum b", + "my ths", + "be x", + "que z", + "orch id", + "den y", + "entit led", + "hoo d", + "gr ant", + "in box", + "blue jays", + "r illa", + "smalle st", + "bur den", + "in famous", + "divi ded", + "boun daries", + "t ter", + "el t", + "wy oming", + "be verage", + "me sm", + "one ws", + "budd hist", + "y ana", + "as sad", + "is ms", + "bar rett", + "predic ted", + "back to", + "tw it", + "e there", + "cap tains", + "escap ed", + "ay o", + "lam borgh", + "gard ner", + "la ps", + "k al", + "adverti sement", + "insec ts", + "na po", + "am en", + "ac y", + "r and", + "g k", + "te h", + "k athle", + "tri dge", + "pan cake", + "at ro", + "pyram id", + "bu la", + "paral ym", + "gau ge", + "en cies", + "tom y", + "biscu it", + "but cher", + "quali fier", + "coun ty", + "ke i", + "po ols", + "dar ker", + "should ers", + "ðŁĩºðŁĩ¸ ðŁĩºðŁĩ¸", + "sp re", + "( \"", + "writ ers", + "g m", + "ðŁİ ĵ", + "k nit", + "hu ff", + "mt b", + "philli es", + "o st", + "den is", + "g art", + "licen sed", + "inter face", + "ex cel", + "d well", + "from the", + "co fficial", + "az zi", + "appear ing", + "fore st", + "n ana", + "ke ith", + "manufac turers", + "beck ham", + ") ?", + "e se", + "col ony", + "delic ate", + "ut ter", + "mc in", + "transpl ant", + "pre ferred", + "par d", + "ari e", + "hu b", + "po ds", + "perspec tives", + "pic t", + "del u", + "app er", + "be than", + "p mo", + "crimin als", + "femin ism", + "sh ack", + "circum stances", + "fel las", + "prote sting", + "wa x", + "sugge sted", + "t ator", + "dre w", + "om ni", + "fa ke", + "kath y", + "re b", + "del ine", + "ber ni", + "mi sty", + "ðŁij ©", + "er able", + "break through", + "men swear", + "millenni als", + "chan yeol", + "la z", + "inser t", + "rep lies", + "phra se", + "n x", + "ihear tawards", + "audre y", + "gran ite", + "rac ec", + "ori e", + "ter ra", + "innov ations", + "britt any", + "at eral", + "pe ar", + "bio logical", + "sh ments", + "institu tion", + "m sn", + "frequ ency", + "d man", + "neg lec", + "t f", + "ste fan", + "fox news", + "ty po", + "comm s", + "sequ ence", + "car men", + "wh ites", + "econom ist", + "exe ter", + "se um", + "re sorts", + "cas ually", + "bun de", + "divi de", + "Ø ¹", + "ga g", + "cre ed", + "reti re", + "cau cus", + "rapi ds", + "wrestle mania", + "tul sa", + "sunder land", + "fundam ent", + "o di", + "yam aha", + "v ary", + "intri gu", + "el se", + "be acon", + "an gie", + "tra ded", + "tran sm", + "g ents", + "kn itting", + "gal ac", + "ðĿ Ĺ", + "u to", + "sea side", + "hol t", + "re rs", + "far go", + "train ers", + "mon soon", + "b ale", + "sou ght", + "mad die", + "h w", + "co li", + "fr an", + "fav s", + "ðŁĴ Ķ", + "int ent", + "r ally", + "s bs", + "lemon ade", + "barack obama", + "bre ad", + "stick y", + "explo sive", + "chel ten", + "t j", + "as soc", + "ram en", + "hom ies", + "v log", + "mi ster", + "lor d", + "âĢįâĻ Ģï¸ı", + "aly ssa", + "sketch book", + "ru mble", + "cat ch", + "migr ant", + "discipl ine", + "un likely", + "chronic les", + "fl ora", + "sl ams", + "am id", + "s boro", + "coo p", + "ju mps", + "tran qu", + "mel is", + "sof ia", + "en ri", + "gab e", + "sy ri", + "nicol as", + "cha i", + "w v", + "be cky", + "foo ty", + "ta o", + "suppo se", + "ðŁĺįðŁĺį ðŁĺįðŁĺį", + "plu sh", + "ri sh", + "ðŁ¤ ĵ", + "k ha", + "satur days", + "ac cent", + "he c", + "lim it", + "carl ton", + "wi red", + "taylor swift", + "ðŁĺ ij", + "sq l", + "har ro", + "recipi ents", + "g at", + "go p", + "th of", + "amaz ed", + "gh an", + "ðŁıĨ ðŁıĨ", + "por to", + "cla re", + "di stant", + "na c", + "ohi o", + "ðŁĻı ðŁı¼", + "mt n", + "anti bio", + "dino sa", + "me sa", + "par tial", + "b v", + "lear nt", + "lov ato", + "questi on", + "ex tract", + "gossi p", + "gi bb", + "niag ara", + "ðŁij ¨", + "displa yed", + "so oner", + "ste vie", + "nug gets", + "ml n", + "bro m", + "tur b", + "give aways", + "stu pi", + "bl ink", + "c ili", + "conven ient", + "mo h", + "vi ve", + "f ric", + "cau se", + "cham ber", + "cu les", + "ne arest", + "is se", + "small biz", + "t j", + "canadi ans", + "smar ter", + "bra sil", + "ra re", + "que tte", + "w ha", + "cand le", + "at omic", + "ðŁijį ðŁijį", + "warri or", + "relax ed", + "stri ps", + "ne ur", + "k ka", + "r fc", + "jen sen", + "reco vering", + "respon ses", + "sal am", + "ortho dox", + "acti ve", + "ell ers", + "n it", + "âŃ IJ", + "metro politan", + "centu ries", + "vi da", + "gra ding", + "transpa rent", + "sim ple", + "do ts", + "superint endent", + "elev ator", + "autom ated", + "red skins", + "ima m", + "summer time", + "jona than", + "ge aring", + "michel le", + "confl ic", + "m ice", + "to te", + "publi sh", + "pa x", + ") -", + "na iled", + "á ´", + "tele scope", + "ser bia", + "ba b", + "ape u", + "st ically", + "sen ti", + "r ats", + "isol ated", + "grou p", + "hat red", + "paranor mal", + "stan ley", + "ali on", + "safe ty", + "l s", + "ठ°", + "nex us", + "alexand ra", + "mas ks", + "+ +", + "tr on", + "au k", + "brother hood", + "brow se", + "mix es", + "sim one", + "mu sk", + "appro ve", + "lo la", + "ex p", + "per th", + "fu turi", + "un seen", + "d m", + "chel se", + "sc outing", + "o we", + "portsm outh", + "k ram", + "mi ze", + "di spen", + "su p", + "d lc", + "adver t", + "tere sa", + "is le", + "cy cle", + "met all", + "shi elds", + "marin ers", + "ra z", + "ing en", + "fun d", + "an go", + "jon es", + "o ka", + "mad den", + "broc coli", + "domin ic", + "situ ations", + "mer o", + "cric ke", + "puni shment", + "d b", + "sha king", + "ðŁĺ ļ", + "m q", + "ari ans", + "le h", + "cla w", + "we ds", + "d ure", + "ni el", + "j elly", + "gour met", + "tra ders", + "le vi", + "w ages", + "kne es", + "wi se", + "heaven ly", + "avi d", + "melo dy", + "z ack", + "ban anas", + "apprentic e", + "pro p", + "fun ny", + "o de", + "respec ted", + "me gan", + "fe wer", + "dra fted", + "med it", + "gra pe", + "us army", + "cru sad", + "vo cali", + "prepar ations", + "non sense", + "us age", + "th r", + "ro th", + "wiz ards", + "insi de", + "promo tions", + "mon a", + "red sox", + "si g", + "eleg ance", + "ch ia", + "univer sal", + "ãĢ į", + "ra ja", + "un ga", + "pol lin", + "filip ino", + "ak a", + "t sun", + "ik on", + "bi king", + "decor ations", + "z ac", + "cade ts", + "hum our", + "ag m", + "re ppin", + "vac cin", + "elo ve", + "u w", + "dia be", + "galla gher", + "az er", + "do l", + "a while", + "pro minent", + "wel sh", + "t ann", + "' )", + "bi en", + "wa g", + "in al", + "c wc", + "wic ket", + "ur st", + "q anon", + "x e", + "out door", + "dun n", + "star r", + "co logy", + "ric ky", + "u efa", + "reb ounds", + "s music", + "inf ant", + "ðŁĻ ĭ", + "so p", + "u mber", + "hand ing", + "beg in", + "sor ting", + "ha sh", + "sp ati", + "re k", + "buda pest", + "black hawks", + "dele te", + "ro m", + "can did", + "auth ori", + "de bris", + "spe cul", + "inter section", + "marri ott", + "im ran", + "ðŁĺģ ðŁĺģ", + "cru ises", + "ram sey", + "rafa el", + "aware ness", + "vas cular", + "beyon cé", + "ru g", + "ðŁĺ Į", + "festi v", + "ar am", + "s able", + "bas il", + "p ill", + "flo oring", + "un beaten", + "implic ations", + "u f", + "w ound", + "for ge", + "poin ting", + "po ts", + "popular ity", + "ðŁijı ðŁı»", + "mani pul", + "s lots", + "deb ates", + "abs ence", + "ver mont", + "never forget", + "wri st", + "gl oria", + "ren ce", + "hu sk", + "mel ting", + "ðŁİ Ł", + "br aces", + "tim ely", + "transform ing", + "am ps", + "ma k", + "po e", + "ah an", + "gener ally", + "nd p", + "ale ppo", + "unic ef", + "pro fs", + "nor d", + "ma sk", + "jackson ville", + "v v", + "sh ells", + "bloom ing", + "oper ators", + "char coal", + "ne ville", + "ma gi", + "chi p", + "sam a", + "ir an", + "re forms", + "accu mul", + "ru e", + "æ ľ", + "web sites", + "ga on", + "devast ating", + "sto s", + "glaci er", + "ra pp", + "chipot le", + "pr a", + "or ous", + "rom ney", + "seas on", + "decor ative", + "c isco", + "dit ch", + "compla in", + "ll o", + "assu me", + "ðŁĺĤðŁĺĤ ðŁĺĤðŁĺĤðŁĺĤ", + "n els", + "cent ric", + "ft w", + "car rots", + "tat a", + "can ter", + "per ience", + "li ers", + "demo s", + "bl unt", + "oper ate", + "reserv ations", + "le ah", + "sub stance", + "di son", + "an te", + "elec tion", + "v ue", + "squ are", + "non profit", + "ca a", + "f su", + "y am", + "ãĤ ¤", + "v ladi", + "comple tes", + "mar i", + "philli p", + "ne ill", + "er as", + "ka it", + "men do", + "mahar ashtra", + "g p", + "dan e", + "provi dence", + "ther apeu", + "juven ile", + "me mo", + "in corpor", + "aa aa", + "seven teen", + "teen ager", + "à £", + "or ns", + "wi de", + "cu teness", + "tw d", + "ff les", + "bar a", + "com edy", + "over time", + "y az", + "bar on", + "unemp loyment", + "ðŁij ĭ", + "exter ior", + "den se", + "cent res", + "match up", + "history month", + "artif icial", + "qu it", + "e sk", + "war n", + "cr itic", + "j af", + "ðŁĵ ²", + "inform ative", + "fu els", + "recy cle", + "nam ing", + "stri pe", + "sol ic", + "mole cular", + "dee pi", + "con vo", + "s sel", + "na e", + "de scent", + "ti z", + "accoun tability", + "ter ry", + "r ito", + "sl ay", + "em o", + "dem ol", + "sens ation", + "co v", + "tor e", + "round table", + "y ol", + "excu ses", + "ॠį", + "tur quo", + "hh hh", + "pod casts", + "cele b", + "me ssi", + "li o", + "man n", + "contribu ted", + "u z", + "gener ator", + "ele ts", + "veg gie", + "indu l", + "en suring", + "detro it", + "pun jab", + "tran spor", + "instru ction", + "ad d", + "por cel", + "pan eli", + "cir cles", + "persi st", + "clay ton", + "sp n", + "dog softwitter", + "is nt", + "sp r", + "retail ers", + "p w", + "hun gar", + "el ena", + "mon aster", + "gu atem", + "je ssie", + "an z", + "ra shi", + "fle e", + "car ving", + "fau x", + "l al", + "hen ri", + "d jo", + "du ll", + "s ana", + "lar a", + "glo be", + "cri mson", + "com pass", + "pau se", + "na b", + "lion el", + "ba ths", + "u fo", + "invent ory", + "sin gh", + "sat an", + "ðŁĩ ¸", + "ce ments", + "in form", + "gener ated", + "bi den", + "av g", + "tas ks", + "de er", + "sa u", + "ja iled", + "pa stel", + "sc c", + "na il", + "steel e", + "per is", + "lamborgh ini", + "pur sue", + "mar gin", + "u ch", + "bo sch", + "dra in", + "cl ara", + "bo m", + "lat ino", + "web ster", + "rose mary", + "r ha", + "s oun", + "billion aire", + "not ch", + "percent age", + "con or", + "' \"", + "hom es", + "earth day", + "h ort", + "big gest", + "di sin", + "wal ton", + "edit ors", + "im ma", + "om ar", + "equi valent", + "pharmac eu", + "ah med", + "cam eo", + "han ni", + "under rated", + "ge ment", + "micro bi", + "v oo", + "honor able", + "obe sity", + "âļ ¡ï¸ı", + "limer ick", + "invol vement", + "st agram", + "boule vard", + "bur g", + "blackand white", + "liber ation", + "fi ve", + "inter im", + "sm m", + "rival ry", + "cap abilities", + "stat ements", + "thu mb", + "ve d", + "sw ans", + "bar ber", + "e que", + "seren a", + "hel m", + "noo dle", + "sam pling", + "n awaz", + "sing le", + "thunder storms", + "sh on", + "in ev", + "ë ¯", + "to pp", + "orch ard", + "bi an", + "ðŁĺ Ķ", + "door step", + "salv ation", + "marke ting", + "r ons", + "cle mson", + "ra vi", + "in take", + "stand with", + "sin a", + "ha iku", + "ple y", + "elector al", + "ph illy", + "la ys", + "electr ic", + "cap turing", + "u pp", + "er gy", + "believ ing", + "cul tures", + "es day", + "inva sive", + "ed ed", + "spee ch", + "end ur", + "viet nam", + "boy cott", + "pe de", + "deli ver", + "ðŁĴĸ ðŁĴĸ", + "mer chant", + "st ir", + "den ies", + "poc kets", + "o ti", + "cu ddle", + "ro land", + "mm ed", + "den ed", + "lear ners", + "hoo p", + "sour cing", + "h acked", + "di m", + "environ ments", + "ben son", + "jud icial", + "wor cester", + "pear ls", + "govern ments", + "arri vals", + "cor ners", + "tun ing", + "la bour", + "y m", + "or dering", + "le wi", + "i fe", + "hygi ene", + "thou ghtful", + "indone sian", + "campaig ning", + "princi ple", + "assau l", + "ru bb", + "at v", + "wil ly", + "en tre", + "il i", + "ph on", + "du ties", + "âĻ¥ âĻ¥", + "sn akes", + "lo op", + "am ar", + "conver tible", + "bon ding", + "ment oring", + "max well", + "ethere um", + "destro ying", + "ax is", + "ca iro", + "fin nish", + "sho ck", + "ðŁĺ IJ", + "cal eb", + "com a", + "pe dal", + "co re", + "contin ent", + "el son", + "temp o", + "helsin ki", + "ac p", + "tack ling", + "st ated", + "bl a", + "dou b", + "sma shing", + "a ja", + "camer on", + "disru ption", + "warm th", + "being salmankhan", + "bullet in", + "o de", + "syrac use", + "ar an", + "mc gregor", + "bul k", + "an ton", + "confir mation", + "sp ine", + "im ran", + "instru c", + "jac ks", + "chi o", + "pal m", + "str e", + "embarra ssing", + "un t", + "elimin ate", + "to ss", + "c ise", + "a ws", + "oni sts", + "sh inee", + "jo s", + "ho se", + "li vely", + "opp onents", + "mo vements", + "recogni zing", + "sandwich es", + "sh akes", + "exerc ises", + "se at", + "profe ssion", + "merry christmas", + "lu gg", + "adopt dont", + "mar vin", + "byr ne", + "un le", + "he t", + "ku wait", + "rah man", + "aspe ct", + "humb led", + "gen es", + "f and", + "long time", + ") ;", + "cam pu", + "an gus", + "ðŁijį ðŁı¼", + "q uran", + "sle eves", + "s lic", + "¸ ë", + "twel ve", + "your e", + "i ke", + "go gh", + "b st", + "dic tionary", + "reflec ting", + "to on", + "yar n", + "em bed", + "ðŁı ´", + "re serves", + "floo ded", + "ver iz", + "du sk", + "estab lish", + "pro li", + "au d", + "ritu al", + "or bit", + "declar ation", + "recor dings", + "cam o", + "cas sette", + "good luck", + "cu tter", + "bo p", + "b ho", + "che ating", + "paci fic", + "ma res", + "tim er", + "col t", + "tr ous", + "tomor row", + "han sen", + "ci e", + "w ang", + "ban i", + "circu lar", + "ac ute", + "far mer", + "co ys", + "p se", + "ir ving", + "w j", + "haw kins", + "b ison", + "ur day", + "cru ising", + "o te", + "k ath", + "whi stle", + "your selves", + "ant is", + "sla sh", + "thorough ly", + "ke sh", + "ser ie", + "ex em", + "en ig", + "guil d", + "sh red", + "ho gan", + "ap o", + "ä ¸", + "pu zz", + "ne tball", + "au ssi", + "panor ama", + "ws j", + "av is", + "ar ming", + "hum ph", + "brow ser", + "cri es", + "fo ggy", + "mat te", + "ðŁĮ »", + "it er", + "tal lest", + "by ron", + "cap tiv", + "je su", + "any ways", + "flag ship", + "p ton", + "we y", + "fay ette", + "financi al", + "f oul", + "solom on", + "jenni fer", + "cucu mber", + "ar gue", + "tex tile", + "wrest ler", + "john ston", + "pa stor", + "ðŁĺŃðŁĺŃ ðŁĺŃðŁĺŃ", + "cac tus", + "edi ble", + "re served", + "ric hie", + "met res", + "ingredi ent", + "h ella", + "un to", + "ch ol", + "cele bs", + "po ets", + "gra ham", + "hay den", + "coinci dence", + "b aw", + "communic ate", + "flet cher", + "/ -", + "tole do", + "ecu ador", + "coun sel", + "s laughter", + "line ar", + "at p", + "os u", + "jo el", + "ev ed", + "conqu er", + "ru stic", + "plic ity", + "recogn ise", + "room mate", + "cr acked", + "jas per", + "ph er", + "ðŁĮ º", + "wo ven", + "mo ist", + "ff c", + "ste ering", + "ni sh", + "stand ings", + "frequ ent", + "ar di", + "haz el", + "as msg", + "bau m", + "d art", + "si dd", + "nat h", + "ch ero", + "card board", + "c ss", + "n sfw", + "pa ir", + "ðŁĺį ðŁĺĺ", + "occur red", + "homeless ness", + "mal one", + "ph e", + "xi a", + "pad dy", + "decl are", + "theat re", + "b f", + "per sian", + "ta d", + "ax e", + "susp icious", + "lam b", + "mu cho", + "sen ior", + "st as", + "k ite", + "st ing", + "gra d", + "k af", + "wat ering", + "Ø ¯", + "spi ral", + "th ms", + "educ ator", + "jer ome", + "of c", + "clo ck", + "su l", + "pe mb", + ".... .....", + "park way", + "de aux", + "restric tions", + "m ons", + "need le", + "e j", + "le agues", + "water melon", + "am an", + "pl enary", + "max im", + "w ab", + "coming soon", + "bry ce", + "vi gil", + "super market", + "fortun ate", + "turquo ise", + "presi dent", + "li v", + "inter ns", + "feel in", + "fix tures", + "stun t", + "st aged", + "premi eres", + "lo k", + "prac titi", + "shor tage", + "log ne", + "ve c", + "con cor", + "roc ke", + "li g", + "com posed", + "syn thetic", + "di p", + "cam ila", + "ch is", + "j ou", + "su san", + "eye brows", + "supp lement", + "satis faction", + "moham mad", + "ti bet", + "house of", + "pu n", + "as sam", + "shado whun", + "psy ched", + "se duc", + "mand atory", + "her bert", + "sc allo", + "stream ers", + "proto col", + "block buster", + "produc es", + "sch nei", + "lau rel", + "tri be", + "time hop", + "pl a", + "mod elling", + "tv time", + "mtv stars", + "wi dow", + "me tric", + "ch am", + "con do", + "flow ering", + "ale c", + "d ms", + "inten sity", + " ¨", + "mccar tney", + "islam abad", + "k b", + "f fi", + "ph al", + "anal og", + "f ond", + "h acks", + "positi vity", + "treat y", + "sub marine", + "conne ct", + "sel en", + "categor ies", + "cu b", + "organi ze", + "si k", + "quote oftheday", + "remin ding", + "am or", + "loc king", + "ðŁijı ðŁı¼", + "comp ound", + "et te", + "b out", + "rec ur", + "fe rence", + "mi zz", + "tren d", + "hip ster", + "for tress", + "forth coming", + "preli min", + "o dyssey", + "ang p", + "del ici", + "even ings", + "ðŁĶ ¹", + "i q", + "d w", + "da ir", + "kathr yn", + "christian ity", + "moon light", + "ha b", + "wh oo", + "f bf", + "se th", + "genu inely", + "pa x", + "char ity", + "deplo yed", + "b nb", + "bu cs", + "ju dg", + "con ge", + "plant ation", + "im press", + "car a", + "sc lub", + "sco py", + "land ers", + "compla ints", + "b ama", + "re build", + "x y", + "real ism", + "sh our", + "le in", + "brac elets", + "mer a", + "assas sin", + "an chor", + "ðŁijĮ ðŁı¼", + "lin en", + "con fron", + "chronic le", + "comm ent", + "cat alog", + "il les", + "gor ge", + "me try", + "jung kook", + "love my", + "sent in", + "se em", + "fit ness", + "alli ed", + "ts man", + "digital transformation", + "pr an", + "lo ft", + "min ton", + "alden richards", + "en vel", + "cher ish", + "certain ty", + "zz z", + "rhin o", + "per kins", + "en rich", + "cape town", + "ome ter", + "sec tions", + "ske leton", + "def enders", + "ðŁĺ Ŀ", + "pen c", + "bri t", + "ja h", + "capital ism", + "ðŁ¥ ĩ", + "baz aar", + "re me", + "ex t", + "kk k", + "conver t", + "stor my", + "b ye", + "kar an", + "chry sler", + "ad os", + "pre ssed", + "syn c", + "ation day", + "dang er", + "bad ges", + "refu ses", + "em powering", + "ly m", + "ex ports", + "adoptdont shop", + "ðŁĩ ¯", + "th c", + "awa ited", + "focu ses", + "fin ed", + "o at", + "haha hah", + "âģ ©", + "n family", + "fi ona", + "luck ily", + "thr illing", + "ty ping", + "out break", + "di es", + "he u", + "craw l", + "ne sses", + "o ath", + "scri pts", + "gee ks", + "ðŁIJ Ŀ", + "p b", + "mathemat ics", + "al is", + "________ ________", + "gymna stics", + "acti vism", + "recommend ation", + "gre n", + "wa in", + "cour ty", + "n apol", + "cau li", + "hor nets", + "g als", + "jo ckey", + "dir ty", + "at ar", + "enor mous", + "pe st", + "greg ation", + "an os", + "ii ii", + "def ends", + "black historymonth", + "at x", + "mb c", + "lugg age", + "wit ch", + "co b", + "la sts", + "cu m", + "gg g", + "ba thing", + "n ar", + "ce bu", + "ðŁį ĥ", + "navig ation", + "min e", + "re jo", + "ðŁİ Ģ", + "gif tide", + "re ta", + "use less", + "pu ll", + "defic it", + "al lu", + "ati me", + "it v", + "tr illion", + "pu e", + "ac ies", + "proce dure", + "l ori", + "jen ny", + "c ad", + "ul ously", + "dr ac", + "promo tes", + "ing the", + "can u", + "woo hoo", + "na omi", + "zar dari", + "ts u", + "be ir", + "sd g", + "le ver", + "we ber", + "ab ud", + "lun d", + "crow ded", + "deplo yment", + "ter rain", + "ken ny", + "ho f", + "witne ssed", + "lo ch", + "j k", + "bul ly", + "w ren", + "poe try", + "do ff", + "ww i", + "mo red", + "din i", + "cul ture", + "promp t", + " ¥", + "maur ice", + "to pps", + "r m", + "cor respon", + "ab out", + "jewel s", + "gi br", + "eag le", + "ðŁĺĺ ðŁĺĺðŁĺĺ", + "l ending", + "sou ven", + "ç Ķ", + "contemporary art", + "establi shment", + "j ong", + "â̦ \"", + "gat or", + "patri otic", + "mc coy", + "v ape", + "human e", + "feli z", + "coach ella", + "re posting", + "ste als", + "fu ller", + "n ering", + "at ra", + "( -", + "bla ke", + "he ather", + "wor ms", + "discipl inary", + "rede mption", + "y ard", + "am in", + "\" @_", + "d nc", + "t ds", + "k appa", + "ne wark", + "comm its", + "spe ars", + "j ams", + "t and", + "msn bc", + "inter medi", + "aim ed", + "at ic", + "teen th", + "observ ation", + "kash mir", + "kavan augh", + "ou l", + "san francisco", + "re u", + "bel ated", + "cho w", + "pass word", + "st ills", + "deta ined", + "sar i", + "day ton", + "dar ren", + "itali an", + "ar th", + "amu sic", + "ar bit", + "w m", + "v m", + "he m", + "dou g", + "my r", + "a sho", + "pre v", + "vin d", + "bra h", + "sta g", + "ภµ", + "pre views", + "gu k", + "con taining", + "leon ardo", + "sad dle", + "ru shing", + "st av", + "lon gh", + "gam bling", + "ve gas", + "reserv ation", + "end ale", + "bal a", + "fl a", + "vari ant", + "he dge", + "bulgar ia", + "nat ali", + "we aver", + "sol st", + "encoura ged", + "ap c", + "as parag", + "ne st", + "cycli sts", + "fe l", + "ìĬ ¤", + "overwhel ming", + "pey ton", + "j it", + "a post", + "mb le", + "ble eding", + "neighbour hood", + "a very", + "expre ssions", + "mac donald", + "gi gs", + "mon ds", + "illu sion", + "n ct", + "cam ero", + "over head", + "my th", + "ol y", + "vi o", + "et v", + "lau rie", + "unve iling", + "pri or", + "con n", + "iron man", + "di ff", + "day in", + "crit ici", + "con go", + "re vision", + "wal e", + "direc tor", + "p ines", + "black pink", + "gar ner", + "cur ated", + "manit oba", + "h ac", + "common ly", + "bar ton", + ".... #", + "mor tality", + "live smatter", + "philos op", + "shor ter", + "con vince", + "fre ak", + "vend ors", + "insi ghtful", + "el ly", + "sens ors", + "e led", + "s berg", + "weight loss", + "u kip", + "sp ur", + "priv ate", + "qu a", + "ss c", + ", ...", + "supervis or", + "advis er", + "amaz ingly", + "less er", + "at es", + "mah on", + "oooo oo", + "sar as", + "pmo india", + "waff le", + "un ders", + "toler ance", + "sculp tures", + "her sh", + "kno cking", + "smo ke", + "cathol ic", + "gri m", + "tra veled", + "fli p", + "ge off", + "dinosa urs", + "sle pt", + "scar let", + "ok i", + "compla int", + "ob sc", + "nam i", + "la g", + "cross fit", + "u fc", + "mc cain", + "refe ree", + "sad ness", + "pen ny", + "li eu", + "mo de", + "ki er", + "vol s", + "w is", + "el on", + "she a", + "ba o", + "son ia", + "cla ire", + "em manuel", + "moist ure", + "di gest", + "vi ii", + "t eller", + "ch on", + "access ory", + "night club", + "foss il", + "aw an", + "hu sky", + "ab original", + "brand on", + "ffici ent", + "cou gars", + "ste d", + "ad mitted", + "igno red", + "content marketing", + "ag as", + "v ase", + "execu ted", + "negoti ations", + "she ad", + "n and", + "tab lets", + "go th", + "ts al", + "d fw", + "on ep", + "protec tor", + "sp ho", + "gaz ette", + "andre as", + "ss er", + "comp ilation", + "ha v", + "contain ers", + "bro ker", + "soc al", + "porcel ain", + "hy uk", + "air ing", + "ðŁĴ °", + "publi sher", + "scen ario", + "spart ans", + "re viewing", + "itu des", + "ed el", + "pear son", + "ba sh", + "mau i", + "a ad", + "ðŁĮ Ĭ", + "li u", + "ul ate", + "program mes", + "fav our", + "web design", + "real ty", + "motiv ational", + "cro sses", + "' ...", + "bus ch", + "adjust able", + "ar jun", + "mist ak", + "dimen sion", + "pi stol", + "weigh s", + "en y", + "unve il", + "indy car", + "gor don", + "f ade", + "fran ken", + "qual ities", + "bet t", + "loc ate", + "ker r", + "sp c", + "confu sion", + "ne e", + "luck y", + "bas es", + "dep ends", + "fire fighter", + "ol a", + "re t", + "mar oon", + "ðŁĶ Ĭ", + "w am", + "defin ing", + "whe at", + "bi l", + "é s", + "b hai", + "psy ch", + "ta u", + "ic ans", + "thi k", + "ob ile", + "inspec tor", + "ìĨ Įë", + "ill on", + "go s", + "ev angel", + "fa i", + "si st", + "voc ation", + "bur ge", + "chi stan", + "renew ed", + "enthusi asm", + "en ting", + "ag ri", + "ike a", + "m sc", + "aero space", + "sens iti", + "memo ir", + "hosp ice", + "co caine", + "der ry", + "mechan ics", + "Ħ à¸", + "tin o", + "reduc es", + "collec tors", + "in justice", + "supp re", + "v ana", + "ab un", + "nap a", + "su sa", + "os lo", + "e ff", + "en core", + "lic ence", + "ched dar", + "z al", + "moun t", + "ðŁĴ IJ", + "threat ens", + "!! \"", + "archi e", + "fu tsal", + "scu ba", + "jo s", + "gn on", + "se xi", + "s official", + "compar ing", + "domin ant", + "tof theday", + "fa it", + "propos als", + "gi ft", + "y as", + "cn c", + "l r", + "ha b", + "reser voir", + "beli efs", + "gener al", + "mar ti", + "t d", + "est e", + "ì ł", + "wi l", + "ðŁij ¯", + "ðŁĶ «", + "sp x", + "et work", + "excer pt", + "e instein", + "hir o", + "sil hou", + "team ed", + "per ception", + "corri dor", + "mental health", + "hin ts", + "ben ny", + "induc ted", + "sw x", + "wi desp", + "spe ak", + "cher yl", + "dru g", + "ðŁĺ ķ", + "h f", + "asparag us", + "myster ies", + "fitz gerald", + "off er", + "therap ist", + "care er", + "dam aging", + "ts d", + "per u", + "wei bo", + "y ay", + "phoeni x", + "disc re", + "mac book", + "bar ker", + "stig ma", + "sp read", + "roc kies", + "kang ar", + "bri dg", + "pa i", + "bi shop", + "ta iled", + "capsu le", + "ðŁĴ ĵ", + "ge of", + "roy ale", + "short listed", + "o ste", + "ash amed", + "ch app", + "key e", + "cl a", + "screen shot", + "austri an", + "nati ve", + "en ight", + "juli et", + "michel e", + "ðŁĮ ´", + "travel ers", + "pi l", + "football er", + "win chester", + "ðŁĻ Ħ", + "azer bai", + "gold eng", + "organis ations", + "interpre tation", + "predat or", + "ofthe week", + "lo gan", + "pok é", + "mari e", + "cal la", + "t nt", + "cin de", + "ge tic", + "fit fam", + "gra v", + "ow ens", + "ðŁĮ ±", + "shoot out", + "sal is", + "commissi ons", + "co he", + "p tic", + "ni xon", + "hi a", + "amb ition", + "mar ine", + "cruel ty", + "t k", + "cru de", + "sal ty", + "jim a", + "mon go", + "ir ony", + "on wards", + "arre sts", + "strang ers", + "ig er", + "cycli st", + "ra g", + "exten ds", + "tra dio", + "bour g", + "mo i", + "el la", + "e able", + "lex us", + "au l", + "der a", + "histor ian", + "mor ton", + "ti ff", + "man ner", + "ko t", + "d k", + "po inted", + "mar qu", + "a an", + "en ey", + "du blin", + "on poli", + "em ili", + "secre t", + "fl o", + "âļ ¡", + "ba j", + "ste ep", + "accompan ied", + "rum ours", + "dev i", + "purch asing", + "fi g", + "pu b", + "sch oo", + "autonom ous", + "go alie", + "x ia", + "autom atically", + "re vers", + "ter o", + "fu ku", + "titan ic", + "shoo k", + "sand als", + "see kers", + "exc av", + "nor dic", + "bigo live", + "ba ke", + "r att", + "z ak", + "ne p", + "ðŁĺ ¤", + "cand y", + "billi ons", + "book worm", + "pp et", + "à ³", + "sur faces", + "sc ars", + "phil ip", + "do gg", + "ci gars", + "co te", + "transl ated", + "cur ator", + "sin dh", + "han gover", + "bre wer", + "on es", + "el ton", + "ðŁĴª ðŁı¼", + "mar cu", + "elli ot", + "righ te", + "di oce", + "ru ss", + "rail ways", + "grand son", + "as cen", + "apo logy", + "awa it", + "mob ili", + "re spir", + "parti san", + "oli vi", + "stri ke", + "yo o", + "white house", + "expre ssed", + "pu ps", + "bed ford", + "cul tur", + "fro gs", + "fly ing", + "cav ali", + "c ds", + "fri ger", + "street photography", + "re solve", + "tali ban", + "kan g", + "cru shing", + "ju m", + "ðŁĺ Ĵ", + "william son", + "tan g", + "cur ly", + "t man", + "veter an", + "fa ire", + "artificial intelligence", + "un anim", + "pre n", + "back drop", + "fr ances", + "oc cer", + "doro thy", + "work ing", + "ar thr", + "conver ted", + "day light", + "serv ant", + "pad dle", + "compla ining", + "thir ty", + "nad al", + "ak u", + "ibra him", + "ad dressed", + "p iss", + "green house", + "batt alion", + "si mulator", + "out lets", + "embroi dery", + "ðŁĵ ±", + "fis cal", + "ger ard", + "sas sy", + "ðŁİī ðŁİīðŁİī", + "vent ures", + "mer it", + "public ity", + "ðŁij Ī", + "sophistic ated", + "c tu", + "conven tional", + "condol ences", + "isra el", + "tra dition", + "ar an", + "te ss", + "gla d", + "ðŁĺĬ ðŁĺĬ", + "correc tion", + "ge on", + "am d", + "or ship", + "be ast", + "ch ment", + "ì ŀ", + "nic o", + "wk nd", + "wel s", + "cushi on", + "beli e", + "vo c", + "idio ts", + "under neath", + "pu ma", + "corn ell", + "en ation", + "lu l", + "swa ch", + "ab ig", + "u rer", + "mi e", + "form erly", + "ca f", + "er nal", + "chor us", + "juli us", + "sen ator", + "âľ į", + "wh ir", + "salv ador", + "ph d", + "uni fied", + "boo ster", + "graph ical", + "w rec", + "son ny", + "mi z", + "dere rs", + "s all", + "ven s", + "tusc any", + "wi d", + "y ong", + "kur ds", + "w az", + "trol ls", + "mac ro", + "cat urday", + "pre ssing", + "sa sha", + "cent ennial", + "gu sts", + "em c", + "be fore", + "den ise", + "cu st", + "ðŁĵ ¢", + "lo oo", + "base l", + "eng land", + "y olo", + "ar du", + "manife sto", + "do ha", + "ì ľ", + "kni ves", + "bourne mouth", + "bi bl", + "bar b", + "al icia", + "Ø ©", + "com er", + "cycl one", + "g it", + "ane ws", + "character i", + "vent ura", + "in tra", + "sf giants", + "hu t", + "be a", + "dar win", + "ell er", + "al v", + "re ese", + "bl y", + "kar an", + "conclu sion", + "man ny", + "fla kes", + "unite blue", + "nad u", + "co pp", + "ed ges", + "lanca shire", + "i als", + "o tta", + "philipp e", + "l ent", + "che e", + "ment ors", + "festi val", + "an ism", + "compli mentary", + "r j", + "pu g", + "d ine", + "we i", + "cli ffs", + "sar my", + "ti veness", + "treas ury", + "il and", + "after math", + "rabb i", + "ou n", + "bou quet", + "herit age", + "zi on", + "sur render", + "shen an", + "in ks", + "kar l", + "gh ty", + "pol icing", + "exam ination", + "ce y", + "per su", + "measure ment", + "hydro gen", + "lu han", + "âłĢâłĢ âłĢâłĢ", + "war i", + "о Ð", + "j y", + "fow ler", + "mis h", + "al fre", + "âĺ ij", + "bb naija", + "cat alogue", + "recogn ised", + "sa ver", + "hu skies", + "col in", + "mun do", + "si va", + "p ng", + "discoun ted", + "man utd", + "fre sno", + "de vin", + "prelimin ary", + "tro phies", + "pla stics", + "du g", + "pro cu", + "indi go", + "g ard", + "dy lan", + "pit ches", + "ground breaking", + "in son", + "bl ac", + "an thology", + "f h", + "expl ic", + "r ard", + "admi ral", + "so chi", + "la shes", + "splen did", + "en vy", + "ad v", + "sex y", + "festiv ities", + "stic king", + "bi b", + "thr ill", + "op p", + "ari el", + "botan ical", + "endur ance", + "fe males", + "br icks", + "vat ican", + "black pool", + "ber mu", + "br ough", + "roll er", + "bi d", + "sue de", + "sloven ia", + "mm ing", + "ml b", + "med alist", + "di ans", + "rehabil itation", + "ne on", + "s go", + "li thu", + "ram os", + "z ed", + "pi anist", + "inten sive", + "broad band", + "stu dy", + "peter sburg", + "lu ca", + "ah hhh", + "phys ician", + "dill on", + "tele com", + "gri ef", + "mu n", + "ac ro", + "si ded", + "s ly", + "blo ws", + "classic cars", + "tri um", + "ar gy", + "? :", + "h ri", + "marsh mal", + "âĢ ĵ", + "to pping", + "war saw", + "tran sc", + "preserv ation", + "b av", + "re friger", + "experim ents", + "ä º", + "gl it", + "sli ga", + "g age", + "fac tor", + "flav ours", + "br ony", + "sp o", + "cook book", + "carri age", + "aw ay", + "ny fw", + "on ian", + "w g", + "simp sons", + "ro lex", + "ðŁı ¿", + "cro sby", + "ãħ ¤", + "cre di", + "syn dic", + "pu bs", + "ali fe", + "poor ly", + "mac ed", + "ðŁĺ ŀ", + "behin dthe", + "w enger", + "n ats", + "ðŁİ Ł", + "rubb ish", + "procedu res", + "typho on", + "opho bia", + "er do", + "fu el", + "vi era", + "bu mps", + "millenni um", + "new zealand", + "lec tures", + "it on", + "mil ky", + "respon ded", + "ê °", + "landsc ape", + ".. @", + "bo ther", + "âĸ ¶", + "z hang", + "huawe i", + "tu ition", + "s worn", + "in u", + "y or", + "pa olo", + "au ditions", + "ab il", + "malay sian", + "ho ps", + "fe athers", + "mp le", + "au ts", + "ã o", + "boun ty", + "ic he", + "ì ĺ", + "sh q", + "pin ot", + "ge ars", + "disapp ear", + "video games", + "t na", + "alzheim er", + "ðŁĮ ŀ", + "a ji", + "under wear", + "swit ching", + "sign age", + "o scar", + "ec on", + "dro w", + "cl int", + "pl ated", + "gun dy", + "emb lem", + "ho es", + "ici st", + "nel ly", + "juni or", + "road show", + "miner als", + "at le", + "alexand ria", + "ac claimed", + "v ell", + "shi va", + "ad he", + "en ne", + "amne sty", + "h ounds", + "councill or", + "ðŁĴ ¦", + "aes the", + "part nering", + "influ enced", + "mag no", + "fl are", + "extin ction", + "civil ian", + "maje sty", + "va il", + "law makers", + "rac ks", + "mc c", + "ori an", + "sp ices", + "er rors", + "may er", + "co ca", + "pa i", + "s ooooo", + "reti ring", + "ba thro", + "ðŁĻĮ ðŁĻĮ", + "âĸ ª", + "su f", + "endor sement", + "buil ding", + "broo ch", + "pal la", + "arvin d", + "ag ent", + "kar ate", + "r hi", + "c tv", + "ta ine", + "um m", + "ba x", + "reig ns", + "uni of", + "enterpri ses", + "adel e", + "fla ke", + "at tire", + "bru ce", + "ba hamas", + "gra vy", + "sa in", + "che ek", + "tri vi", + "lo v", + "e en", + "bb lo", + "lady gaga", + "itt a", + ". \"-", + "du stin", + "observ atory", + "eigh th", + "bloom berg", + "kh s", + "f cc", + "gi st", + "commemor ate", + "ve er", + "sexu ality", + "ed c", + "nic ole", + "vac ancy", + "u ser", + "son a", + ":' (", + "dipl oma", + "t end", + "up grades", + "Å Ł", + "jura ssic", + "cardi ac", + "dr s", + "widesp read", + "à ł", + "dail ies", + "vend or", + "sim plicity", + "wi der", + "len ses", + "supp lements", + "de pos", + "ob served", + "vin es", + "parti ally", + "renew al", + "collabor ate", + "ali g", + "fin ity", + "ph u", + "zz y", + "pe tit", + "ðŁĵ ħ", + "z in", + "i gu", + "sm ack", + "fall on", + "ðŁĵ £", + "back wards", + "comp onent", + "o so", + "compati ble", + "bin ding", + "zur ich", + "thom e", + "w ounds", + "ly ric", + "fresh men", + "sne aky", + "fi bro", + "di et", + "emplo yer", + "in sect", + "h ated", + "sch er", + "raz or", + "n sw", + "boo ker", + "califor ni", + "av fc", + " °", + "preten ding", + "pep si", + "al is", + "un titled", + "k art", + "grand parents", + "e the", + "o ck", + "lux emb", + "visu als", + "small business", + "abdul lah", + "min ho", + "su baru", + "h ra", + "reve aling", + "heart breaking", + "clar ity", + "am g", + "sl r", + "** **", + "âŀ ĸ", + "recor d", + "ici ary", + "min ded", + "ye h", + "exce ssive", + "knu ck", + "icec ream", + "tru th", + "ev ic", + "ta stic", + "ant arc", + "ren dering", + ", ,", + "mit t", + "loren zo", + "st patrick", + "bound ary", + "zi g", + "vo cab", + "osa ka", + "fur n", + "tu n", + "gu l", + "s ounding", + "blo gger", + "utter ly", + "g af", + "adv ancing", + "l cd", + "mar gin", + "lifel ong", + "solst ice", + "sh ra", + "wa its", + "ple ar", + "bre ach", + "en ligh", + "ad er", + "itt le", + "c ation", + "ho on", + "stu died", + "?? ???", + "k ash", + "ev angeli", + "ps l", + "wei ghts", + "met als", + "ty res", + "tur no", + "wi e", + "car b", + "g ale", + "se al", + "sun ite", + "am ic", + "patter son", + "á n", + "eu ph", + "up stairs", + "quali fiers", + "khali fa", + "apple music", + "ìĨĮë ħ", + "vau ghan", + "al ter", + "cru iser", + "mu a", + "t ana", + "kat rina", + "id ols", + "spo iled", + "secre tly", + "fi bre", + "part nered", + "um es", + "gi ov", + "com et", + "screenshot saturday", + "k eller", + "fil tr", + "fe t", + "con way", + "pe u", + "bad minton", + "gi d", + "m ound", + "don key", + "bu ff", + "lea ther", + "lar gely", + "bro ch", + "int ments", + "am use", + "r k", + "sto ve", + "impac ted", + "con t", + "cr acks", + "prison er", + "bar i", + "contrac tor", + "ori oles", + "domin ate", + "pol ar", + "am elia", + "dr c", + "ðŁijĮ ðŁijĮ", + "vi st", + "su arez", + "injec tion", + "blo oms", + "ðŁļ¨ ðŁļ¨", + "sti ff", + "pay pal", + "sno wing", + "thur sdays", + "goo se", + "we dge", + "educ ated", + "weak ness", + "de cker", + "abud ha", + "bree zy", + "Û Į", + "hope ful", + "o bi", + "rai der", + "gh am", + "de u", + "se ve", + "par tly", + "fu t", + "infu sed", + "mer ri", + "than e", + "some time", + "hu e", + "me in", + "cre dit", + "sli ding", + "ran de", + "cher ry", + "dead pool", + "sh ol", + "ar am", + "under wood", + "sky e", + "distur bing", + "m nt", + "poli shed", + "guardi ans", + "ha dn", + "pic asso", + "ari us", + "ak shay", + "ir ri", + "j h", + "happ en", + "la kh", + "dal ton", + "at the", + "s well", + "mar sha", + "re h", + "cour s", + "j kt", + "top us", + "serv ice", + "r ink", + "hack ers", + "dono van", + "hor o", + "tc m", + "may hem", + "cha se", + "dev ops", + "ken sing", + "sc up", + "sh ere", + "quali fication", + "c live", + "ton g", + "n ancy", + "mar is", + "der dale", + "ber man", + "cinde rella", + "jol ly", + "ci c", + "loo t", + "collecti bles", + "hom icide", + "g ge", + "epide mic", + "su ites", + "mu ddy", + "gi mme", + "e rec", + "- *", + "tal la", + "lis le", + "embro ide", + "ðŁĩ© ðŁĩª", + "veriz on", + "ve ctor", + "be anie", + "arti san", + "ga in", + "flo res", + "vi gil", + "u so", + "ðŁĻı ðŁı½", + "grin ding", + "gh er", + "air ports", + "respon sive", + "shaf t", + "can cel", + "ceremon ies", + "e me", + "at ari", + "bru shes", + "eag er", + "bo hemi", + "children s", + "yan kee", + "ma a", + "suspen se", + "mor an", + "mac ar", + "sun flower", + "cre w", + "vo id", + "ke ar", + "fashi oned", + "jen nings", + "sunday funday", + "sub missions", + "me ad", + "her man", + "wa i", + "crit ically", + "le um", + "baek hyun", + "for cing", + "co bra", + "ãģ ®", + "acqu ire", + "al k", + "ge ology", + "pri mar", + "import antly", + "ire z", + "bunde sliga", + "curi osity", + "sen a", + "stric t", + "con soli", + "win ters", + "ven om", + "chelten ham", + "ðŁį º", + "cen a", + "t at", + "ba in", + "glo ver", + "under cover", + "as ses", + "car n", + "memorial day", + "am eli", + "i rene", + "ch on", + "syn thesis", + "spe edy", + "mitsu bi", + "sla yer", + "compos ite", + "under stands", + "pe w", + "inter rup", + "hen ri", + "mor row", + "an om", + "thof july", + "g lee", + "thre e", + "ðŁĺ ®", + "and hi", + "ch att", + "renew ables", + "ye s", + "trans fers", + "!!!! !!!!", + "bab u", + "du ter", + "lo ops", + "pe ers", + "o ilers", + "pau lo", + "ic ation", + "h mu", + "war a", + "mer cer", + "hom eland", + "fu ji", + "ale y", + "year book", + "re m", + "re en", + "ab sur", + "bo is", + "] :", + "caes ar", + "shot gun", + "kur dish", + "o ren", + "ra e", + "anci es", + "ty pic", + "f h", + "def ault", + "re plic", + "lu k", + "trans actions", + "r ys", + "infan try", + "ðŁį ¾", + "cho w", + "chick ens", + "ba gh", + "wy att", + "ay e", + "gg i", + "bre ws", + "ed itions", + "mi ra", + "commen cement", + "pre su", + "peris cope", + "ic hi", + "guatem ala", + "zam bia", + "pain ts", + "wit ches", + "wan i", + "un dere", + "cro y", + "vo ws", + "us mc", + "hear ted", + "theat res", + "shu ffle", + "le vel", + "mul tic", + "squee ze", + "fer n", + "app et", + "post al", + "mal t", + "on board", + "ld nt", + "co o", + "s sc", + "k ac", + "ðŁĺ ĩ", + "sc rap", + "mar cos", + "deal ers", + "ann u", + "mill er", + "co ve", + "ul ary", + "vladi mir", + "be ef", + "th ur", + "pick led", + "se same", + "bengal uru", + "mo tt", + "kathle en", + "hi st", + "no tor", + "dr ank", + "du chess", + "snow fall", + "e ff", + "tin y", + "j n", + "sy our", + "speci alists", + "scot us", + "bay lor", + "eve rest", + "mali bu", + "pre m", + "harm ful", + "l ali", + "b ates", + "g ye", + "differen ti", + "and ra", + "geome try", + "el over", + "black out", + "== ==", + "ko ta", + "inter act", + "asi an", + "la yo", + "samu rai", + "fi del", + "exhau sted", + "gla di", + "pd t", + "spher ic", + "anti qu", + "guit ar", + "stu ri", + "ho pper", + "ang le", + "f ills", + "sla p", + "mi th", + "rod ney", + "ong i", + "in som", + "pre venting", + "cassi dy", + "ap ho", + "ore gon", + "lo in", + "ham mond", + "contribu ting", + "f n", + "gar ri", + "ori on", + "comp elling", + "escap ing", + "aim ing", + "plu mb", + "bi stro", + "be asts", + "concer ning", + "bo e", + "do pp", + "shop local", + "stumb led", + "âĤ ¹", + "naz is", + "âĢįâĻĤ ï¸ı", + "gest ure", + "war ts", + "us open", + "hi ggins", + "char li", + "hang s", + "bom bers", + "° :", + "fe eds", + "c ch", + "st il", + "nic ola", + "ðŁĵ º", + "clam ation", + "tro pic", + "af ro", + "ou k", + "expen ses", + "der rick", + "al ine", + "fa w", + "reg ard", + "im er", + "sat in", + "thi um", + "ry der", + "pear l", + "te ss", + "mm mmm", + "sen ses", + "ðŁĩ ¹", + "positi ve", + "exhau st", + "occu r", + "nor ris", + "lil ly", + "is les", + "direc ting", + "yo fficial", + "count less", + "sam ar", + "on stage", + "flo ck", + "mir rors", + "arch er", + "mo i", + "k d", + "vi v", + "in os", + "si kh", + "le i", + "sen sory", + "br its", + "kno x", + "chest nut", + "op y", + "coli seum", + "z af", + "di vin", + "adap ter", + ":) ))", + "tem ple", + "ku n", + "hel mets", + "t df", + "gu ide", + "m old", + "o ids", + "lu ther", + "he is", + "monaster y", + "sp ree", + "k lu", + "brit ney", + "jagu ars", + "gre ats", + "c cc", + "ky rie", + "machin ery", + "cric ket", + "re ro", + "ab o", + "aspir ing", + "semi finals", + "ale ss", + "sig natures", + "var d", + "me th", + "her bal", + "hol den", + "king dom", + "ap or", + "reg gie", + "ore o", + "palestin ians", + "em mys", + "sec tional", + "ro i", + "ney mar", + "qu el", + "cu ll", + "l ka", + "haz el", + "estim ate", + "ul ties", + "go w", + "be a", + "purch ases", + "bel ts", + "protec ts", + "m é", + "gue ssing", + "bb o", + "clau dia", + "fr acking", + "jon ny", + "el k", + "cel tic", + "al mighty", + "ra je", + "courty ard", + "ig i", + "can es", + "ðŁĴª ðŁı»", + "bank rup", + "le thal", + "âľĮ ï¸ı", + "graphic design", + "vad er", + "penc ils", + "rough ly", + "dan te", + "m fg", + "const ell", + "cam el", + "j b", + "bloss oms", + "en to", + "balo chistan", + "cine mato", + "ill ard", + "jer sey", + "con sent", + "dent ed", + "con templ", + "sch er", + "hol i", + "lou gh", + "st our", + "a yo", + "begin ners", + "cur b", + "v hs", + "a jax", + "du ff", + "av eng", + "dom est", + "commit ting", + "ai red", + "cha p", + "hedge hog", + "disappo inting", + "freel ance", + "in land", + "char ms", + "ðŁĺį âĿ¤ï¸ı", + "ai sh", + "m x", + "buck le", + "ti dal", + "per mit", + "bo ating", + "ra cha", + "kend rick", + "b ello", + "b hi", + "ple a", + "estim ates", + "l b", + "apo logies", + "jay a", + "bb l", + "ast oni", + "inter state", + "main taining", + "el bow", + "mu p", + "ep it", + "ðŁĺ ¡", + "viol ations", + "def end", + "be h", + "sl c", + "am ir", + "pur i", + "ti um", + "fi fa", + "blur ry", + "scri m", + "ðŁĻı ðŁı¾", + "ma ple", + "rel atives", + "âĺ Ŀ", + "cho c", + "con nor", + "⾨ ⾨", + "whi sp", + "list ings", + "ma ze", + "than king", + "ri dd", + "grass roots", + "shi fting", + "desper ately", + "gor illa", + "den i", + "ju les", + "stra th", + "g ley", + "ja in", + "bu ick", + "t anner", + "ðŁĴ Ŀ", + "ga e", + "pri m", + "it ors", + "n ano", + "separ ation", + "armen ia", + "bor deaux", + "ðŁ ħ", + "pj net", + "bu rial", + "e bon", + "glo ss", + "re new", + "gri er", + "spe eds", + "comic books", + "sym boli", + "pur poses", + "ãħł ãħł", + "spati al", + "no table", + "ci on", + "n ps", + "ho ffman", + "nor man", + "rt g", + "du sty", + "situ ated", + "tr an", + "k fc", + "em en", + "nic kel", + "hast ings", + "sett ling", + "gr it", + "l ena", + "w aw", + "art s", + "gu m", + "ca regi", + "le wis", + "sapp hire", + "rememb er", + "embed ded", + "t lc", + "bl at", + "serge ant", + "el sa", + "boot camp", + "bow man", + "photo graphic", + "pill ars", + "direction ers", + "classi fied", + "no is", + "ve er", + "barre ls", + "wh oop", + "ðŁĺ± ðŁĺ±", + "fe male", + "petro leum", + "medi a", + "e fc", + "poké mon", + "ठķ", + "enthusi astic", + "var un", + "pro files", + "pedi atric", + "acci dents", + "con rad", + "jan g", + "jo jo", + "ac or", + "ob server", + "l f", + "live stock", + "for gi", + "fo s", + "el m", + "an and", + "go e", + "c ere", + "avoi ding", + "gri t", + "om an", + "thank fully", + "scat tered", + "nick y", + "cylin der", + "chees y", + "di ver", + "mahe sh", + "cav es", + "ear liest", + "qu inte", + "subjec ts", + "b end", + "gul f", + "vocali st", + "glu e", + "pat ches", + "un stopp", + "sny der", + "demonstr ating", + "pi o", + "hor ns", + "wic kets", + "and the", + "r ama", + "yo on", + "stra ight", + "bed time", + "or ang", + "bul lets", + "sa urus", + "min ers", + "inci dents", + "! ...", + "ðŁİ ¸", + "ag ers", + "hand les", + "stat es", + "in ity", + "d ons", + "incredi ble", + "emin em", + "avi v", + "ru dy", + "moz art", + "folk lore", + "appli ances", + "mt l", + "fre y", + "di as", + "hu a", + "page ant", + "stri ve", + "im prison", + "bul lish", + "r ana", + "al erts", + "bb mas", + "hy per", + "derby shire", + "re cre", + "re dd", + "debor ah", + "cosmo s", + "law son", + "mel anie", + "psy cho", + "ho or", + "doo dles", + "sni per", + "shad y", + "man tle", + "canadi an", + "new year", + "inter actions", + "separ ated", + "cor ds", + "spiritu ality", + "ap u", + "it o", + "p ct", + "pel osi", + "rebel lion", + "se iz", + "wor cester", + "sec tors", + "ul i", + "san ta", + "Ð µ", + "ðŁĩªðŁĩ ¸", + "bi ased", + "class ical", + "gam ma", + "dee plear", + "emer ge", + "back er", + "sur ance", + "hand crafted", + "ðŁİ ¥", + "franc is", + "mill an", + "ic i", + "cro wn", + "wo w", + "stri ped", + "un fair", + "relax ation", + "³ ï¸ı", + "embrac ing", + "she alth", + "pale o", + "martin i", + "dist illery", + "wr ink", + "or k", + "na th", + "hay ley", + "cour thouse", + "si ber", + "sa di", + "quiet ly", + "mel t", + "m sm", + "me h", + "smart phones", + "rel ent", + "pp ing", + "war wick", + "co logne", + "gli a", + "cot ton", + "pro g", + "lon e", + "ip sw", + "star ters", + "expan ds", + "u mp", + "su ed", + "ski pper", + "infe ctions", + "ing le", + "à ¡", + "cler k", + "demonstr ate", + "ac ar", + "ðŁĺĤðŁĺĤ ðŁĺĤ", + "ti bet", + "bun s", + "alo m", + "demol ition", + "ssi a", + "g st", + "[ ]", + "so ar", + "âĺ Ģ", + "ðŁĺ ª", + "ðŁĵ Ĭ", + "dee pest", + "beyon d", + "are t", + "att ends", + "activ ated", + "di mit", + "âļª ï¸ı", + "high lighted", + "magaz ines", + "rum or", + "az za", + "steph ens", + "dol ph", + "sho ckey", + "mat s", + "we av", + "mel an", + "serv ers", + "tra um", + "ku sh", + "æ Ĺ", + "bab ys", + "pa z", + "a al", + "la use", + "break ers", + "canter bury", + "ul ture", + "mi ri", + "euro s", + "tane ous", + "impre ssions", + "du tch", + "il d", + "gh i", + "pur due", + "adequ ate", + "l p", + "sy ner", + "ang ler", + "du rable", + "gal ore", + "ro wn", + "mg mt", + "ðŁĵ Į", + "lu cia", + "âĺij ï¸ı", + "zay n", + "bor row", + ". (", + "north umber", + "cru sh", + "eng a", + "su sh", + "extra vag", + "t out", + "ma hal", + "ali stic", + "ther mo", + "gall eries", + "es se", + "chi bi", + "attrac tions", + "lex ington", + "legislat ure", + "docu mented", + "resi den", + "brow nies", + "w f", + "st ool", + "plan ets", + "sho ppers", + "conduc tor", + "ms p", + "tr icky", + "fru ity", + "end ra", + "feel the", + "whi pped", + "hair style", + "re fer", + "oo k", + "oc topus", + "audi ences", + "ku mar", + "after no", + "op tim", + "c fl", + "ni p", + "gen i", + "alpha bet", + "ann ab", + "lam in", + "accep ts", + "l ng", + "ðŁĺ «", + "t ine", + "ac om", + "cheer leaders", + "t k", + "gr on", + "v g", + "k ung", + "ja x", + "dha bi", + "r ss", + "mack enzie", + "beir ut", + "clean up", + "gy psy", + "st ell", + "bur ger", + "hurric anes", + "educ ation", + "st ina", + "âĻ¡ âĻ¡", + "unfortun ate", + "jere mi", + "bad ger", + "at ers", + ": â̦", + "ter ra", + "subli me", + "stu d", + "y mca", + "mr u", + "duter te", + "bren nan", + "bul b", + "mel o", + "yl on", + "hack er", + "c red", + "gu d", + "as an", + "pad illa", + "embroide red", + "vietnam ese", + "pione ers", + "projec tion", + "re boot", + "id c", + "an ey", + "pri mer", + "suff ers", + "win ding", + "p on", + "sto day", + "mor n", + "u ch", + "all in", + "adid as", + "eliza beth", + "tu ck", + "o graphy", + "ðŁļ Ģ", + "be g", + "os borne", + "ghet to", + "r h", + "cn n", + "ir ma", + "ma kin", + "cab les", + "mur ders", + "oc ks", + "inst a", + "al as", + "si k", + "cu ff", + "la re", + "foo dies", + "o vic", + "at om", + "geome tric", + "em pathy", + "ภµ", + "cent enary", + "newsp apers", + "administr ative", + "ðŁİ Ĭ", + "sti ve", + "contrac tors", + "le tt", + "tas mania", + "awesom eness", + "den sity", + "ve en", + "prince ton", + "frequ ently", + "re ject", + "gh i", + "modu lar", + "ceram ics", + "sh ag", + "ki wi", + "can vas", + "sweat shirt", + "an j", + "ti mm", + "napol i", + "il er", + "appe als", + "hamil ton", + "ma yo", + "we ave", + "arrang ed", + "whar f", + "occu py", + "b vb", + "as aki", + "ot ter", + "nor m", + "vi es", + "de tox", + "tion al", + "dere k", + "id ad", + "ad missions", + "constitu ency", + "u pper", + "woo t", + "allo y", + "se ve", + "lu b", + "un comfortable", + "ed win", + "ab re", + "d wight", + "ar che", + "virtu ally", + "sp ol", + "pri e", + "ai i", + "er r", + "swit ch", + "bar ack", + "se ok", + "cou l", + "wn t", + "pou l", + "o live", + "caffe ine", + "cardi ff", + "notor ious", + "de mp", + "ex cess", + "bar r", + "t ford", + "a jay", + "bump ed", + "my thology", + "shel ley", + "fal con", + "shakespe are", + "must angs", + "no ted", + "bon e", + "civil ization", + "sy d", + "par sons", + "un official", + "hy ped", + "sp ends", + "oppo sed", + "v ings", + "space x", + "noti fication", + "deci ding", + "bio tech", + "out si", + "sal ah", + "! .", + "fe d", + "ss y", + "c ms", + "bad gers", + "cr o", + "ela ine", + "n ba", + "dy our", + "n ant", + "honey moon", + "climb ed", + "conom y", + "ath a", + "m ell", + "ne bula", + "nature photography", + "juli e", + "bm x", + "inve sted", + "mon o", + "lieu tenant", + "wat kins", + "techn ician", + "o se", + "ka e", + "ì Ľ", + "mc queen", + "pre ach", + "trav eller", + "flexi bility", + "ze bra", + "reta iler", + "p ant", + "ben der", + "brand t", + "squ id", + "war rant", + "veri fied", + "cas s", + "pier cing", + "hon ours", + "t ying", + "mor ris", + "kis sed", + "op rah", + "panor amic", + "me i", + "splat oon", + "wich ita", + "ari as", + "gal li", + "indy ref", + "good times", + "athe ist", + "confe ssion", + "ow ski", + "re pping", + "ad ditions", + "mechan ism", + "z im", + "j ans", + "su f", + "cho pped", + "beg innings", + "vitam ins", + "ãħ¤ ãħ¤", + "or th", + "po les", + "ru b", + "antarc tica", + "indie film", + "web cam", + "ket ch", + "bre tt", + "cle ment", + "her on", + "defe ating", + "hydr o", + "buc ket", + "wand ering", + "sid ney", + "future of", + "b inge", + "on ies", + "knock out", + "administr ator", + "syn the", + "l ent", + "jan i", + "bar ley", + "premier league", + "ner ds", + "cr m", + "bra s", + "bot any", + "evol ved", + "rot ter", + "ro wed", + "tum or", + "weal thy", + " Ń", + "mon arch", + "li shed", + "da hl", + "ðŁİ ĥ", + "bu ch", + "ken yan", + "Ø §", + "red ness", + "assemb led", + "se mit", + "hud der", + "shro p", + "ran i", + "lear ning", + "mor y", + "iti a", + "geo graphic", + "worl dof", + "f b", + "pho sp", + "boo gie", + "am ped", + "? ...", + "che w", + "dwar f", + "ar us", + "s sen", + "ru sty", + "recru its", + "h k", + "gar de", + "app lause", + "vol umes", + "invol ves", + "ta c", + "hand bag", + "trans late", + "ffe l", + "se ym", + "aqu atic", + "trans fer", + "zo di", + "and r", + "acade mia", + "cr ater", + "te z", + "ar se", + "adap t", + "col oni", + "snow man", + "mal i", + "hang in", + "di schar", + "oy sters", + "pho e", + "colon el", + "w ba", + "hispan ic", + "thri ving", + "sh y", + "ag les", + "sales force", + "cre me", + "so les", + "la fayette", + "â ī", + "ter ia", + "ach a", + "sp erson", + "go go", + "car ly", + "the ore", + "am ore", + "vo x", + "af t", + "ãĤ ¹", + "stap le", + "mu ffin", + "di agram", + "ino x", + "su stained", + "av ent", + "me ta", + "arbit r", + "dec ay", + "ado le", + "Ð ½", + "ec ol", + "ph o", + "n k", + "o cu", + "gr anny", + "ç a", + "luxemb our", + "stad t", + "alber to", + "le vit", + "am as", + "d x", + "or phan", + "co bb", + "as c", + "lo gy", + "immen se", + "chan ts", + "off line", + "p ent", + "bre x", + "w inger", + "plan e", + "i el", + "nichol s", + "ca thy", + "nar uto", + "low ed", + "/ //", + "ignor ance", + "cat astro", + "you ts", + "sch en", + "buil d", + "haz i", + "s ine", + "critical role", + "du g", + "dete ct", + "lo gs", + "en amel", + "stpatrick sday", + "ed die", + "co pa", + "cigare ttes", + "ho ff", + "kay a", + "la goon", + "ra pha", + "air borne", + "choo se", + "puer tor", + "ke v", + "gui ding", + "fro sty", + "bor ough", + "mir a", + "ðŁİ Ĭ", + "cade t", + "anu sh", + "yo gi", + "e ger", + "fl ing", + "slo pe", + "nin th", + "we ston", + "foot wear", + "f n", + "may weather", + "a am", + "pla in", + "stair case", + "witne sses", + "work outs", + "ro bust", + "dex ter", + "co hort", + "ðŁļ Ĺ", + "sp ell", + "ha ze", + "o om", + "organ ising", + "wild fire", + "cont acts", + "av on", + "min o", + "upd ating", + "ðŁį »", + "li thium", + "ing ual", + "k is", + "au ga", + "lo com", + "de duc", + "u da", + "th ak", + "boy le", + "mp er", + "hot tie", + "eri k", + "re vised", + "is la", + "travel photography", + "oo za", + "en qui", + "confe rences", + "clo ver", + "g room", + "cur ves", + "live on", + "per f", + "displac ed", + "bo log", + "xx xx", + "ðŁĺ© ðŁĺ©", + "te al", + "ve ssels", + "rain forest", + "cal ci", + "pan ther", + "gira ffe", + "ta sted", + "imag ery", + "pad res", + "day time", + "bas s", + "ri pe", + "opio id", + "nu e", + "vin yl", + "invent or", + "sen s", + "process or", + "mu t", + "gad gets", + "bibl ical", + "shann on", + "jacqu eline", + "car y", + "the resistance", + "ali en", + "n vi", + "co sy", + "bi har", + "fo ley", + "ren d", + "mu gs", + "fa ken", + "cl one", + "ni allo", + "gra bbed", + "chi hu", + "power house", + "n tt", + "chero kee", + "spon ge", + "imple menting", + "rh ine", + "le one", + "ðŁį Ģ", + "pret tiest", + "infra red", + "impro v", + "swit ched", + "tu bes", + "con tr", + "bl k", + "projec ted", + "be aver", + "yo t", + "bbcra dio", + "thi gh", + "per secu", + "apologi ze", + "w ack", + "po ster", + "oli ver", + "az a", + "lou d", + "( ?)", + "f the", + "women shi", + "spar row", + "blu sh", + "us able", + "sc ales", + "it ative", + "peu ge", + "ne eding", + "legg ings", + "glam orous", + "mat ur", + "c z", + "wat t", + "da b", + "tam ar", + "et sym", + "bau er", + "heart felt", + "h n", + "else where", + "bir ch", + "alu mini", + "hu ck", + "e me", + "j l", + "traf ford", + "d z", + "por tions", + "ana sta", + "arthr itis", + "esp n", + "ber gen", + "viol ation", + "yo shi", + "c z", + "northumber land", + "clo sures", + "ðŁĩ¯ ðŁĩ", + "smi ley", + "r w", + "tel ugu", + "inten si", + "gre gg", + "ve ga", + "dun geon", + "south bound", + "ba il", + "domin ican", + "semi final", + "chap ters", + "h itch", + "van ity", + "trans iti", + "recomm ends", + "sati sf", + "bar ca", + "queen s", + "( (", + "de struc", + "stra it", + "ra vi", + "dess erts", + "in tru", + "har am", + "k os", + "fo e", + "fat ty", + "pais ley", + "magn itude", + "dri dge", + "com ey", + "schem es", + "vision ary", + "our t", + "down loaded", + "ðŁĻĮ ðŁı½", + "gd pr", + "lan i", + "p wc", + "gu ad", + "nic est", + "stake holders", + "re ferred", + "george town", + "arvind kejriwal", + "schnei der", + "in doors", + "all star", + "strand ed", + "gen der", + "ze pp", + "ma sses", + "ðŁIJ ±", + "pati ently", + "bl dg", + "z ab", + "we arab", + "vi vid", + "he ck", + "d ella", + "sy mb", + "je opar", + "la ger", + "à ª", + "comb ines", + "ne c", + "br ay", + "flo p", + "tx wx", + "jo ys", + "pon t", + "pro found", + "sur round", + "mad hu", + "ma ble", + "ay r", + "te as", + "n sa", + "open ly", + "er nest", + "ãĥ ©", + "to po", + "g na", + "anti oxid", + "ti an", + "e tr", + "c ello", + "ma thi", + "gener osity", + "b iting", + "man ic", + "kel sey", + "chee ks", + "ten der", + "w th", + "pron oun", + "ultimat ely", + "gu sta", + "ari anag", + "ger ry", + "ble ed", + "red dy", + "mic h", + "mitsubi shi", + "oper ated", + "sex ually", + "ma u", + "cl lr", + "vi ds", + "co c", + "mel ted", + "ðŁĮ Ī", + "q ld", + "ite ch", + "instru mental", + "end game", + "ðŁĵ ĸ", + "ener gi", + "brow nie", + "tam il", + "at in", + "domin ated", + "pra ises", + "fire place", + "sens ational", + "men a", + "k arti", + "un prece", + "ru pt", + "ori ental", + "mc cor", + "tour naments", + "scen ter", + "re eves", + "prescri ption", + "sam e", + "fra u", + "tru ffle", + "em bo", + "roman s", + "bla sts", + "techno logical", + "pr at", + "b sb", + "y ar", + "tren dy", + "ac l", + "al ad", + "ðŁį ģ", + "o hh", + "bankrup t", + "tho ven", + "regar ds", + "is er", + "war wick", + "vine yards", + "real m", + "niallo fficial", + "do ta", + "ge mini", + "to do", + "v able", + "¨ ¨", + "la u", + "wre ath", + "ju ve", + "nat asha", + "le ver", + "lor i", + "hor ser", + "cc tv", + "air bnb", + "es anders", + "sin clair", + "ema biggest", + "high school", + "con test", + "optimi stic", + "t te", + "ðŁĴķ ðŁĴķ", + "ss d", + "ye e", + "hel ena", + "con sen", + "ric ks", + "jes se", + "an ic", + "ðŁİ ¯", + "re acts", + "ro be", + "independ ence", + "vol tage", + "m ington", + "s ant", + "à¸Ļ à¸", + "-------- --------", + "sentin el", + "ke tt", + "rehear sing", + "aaaa aaaa", + "sof the", + "stir ling", + "sear ch", + "wi gan", + "stand out", + "sna il", + "pent agon", + "Ä ģ", + "ch lor", + "cru st", + "net any", + "chemi st", + "disapp eared", + "ric ardo", + "sp iders", + "bo se", + "war ren", + "me ssing", + "bann ers", + "gu el", + "par ach", + "ma id", + "coun ted", + "epi le", + "bon fire", + "speech less", + "se tter", + "meas ured", + "rejec ts", + "nik ki", + "le ster", + "foren sic", + "fab rics", + "alo ha", + "pre served", + "wat ford", + "deta iling", + "dar th", + "bo u", + "car ly", + "... '", + "tail gate", + "noti fications", + "å ¤", + "pas sive", + "trous ers", + "balo ch", + "ro ther", + "typic ally", + "à ¥", + "sp it", + "wi z", + "sic ily", + "technic ally", + "ex pose", + "st age", + "hu bb", + "cre am", + "cap s", + "po ke", + "sle ek", + "ju ne", + "tempor arily", + "de z", + "awak ens", + "l ame", + "_ -", + "ji ha", + "tues days", + "advis ed", + "advis ors", + "exi sted", + "dis agree", + "news room", + "lo sers", + "world tour", + "dr ying", + "al di", + "har ness", + "foot print", + "hobb it", + "p mln", + "i ro", + "que red", + "asse ss", + "gaz e", + "sa b", + "th ian", + "í Ĭ", + "ti f", + "ob serve", + "ev il", + "dra wer", + "swee p", + "cor y", + "co dy", + "kyo to", + "cal lum", + "n inj", + "lau rent", + "be i", + "sket ching", + "custom ized", + "du r", + "regre ts", + "knox ville", + "ìķ Ħ", + "mess aging", + "grac ie", + "abun dance", + "bi dding", + "bre wed", + "fl ouri", + "therapeu tic", + "alt itude", + "ho gs", + "bur ner", + "elec tro", + "wonder fully", + "he ater", + "post pon", + "li very", + "r all", + "ad as", + "a ac", + "sau l", + "brook lyn", + "play house", + "âĻ¥âĻ¥ âĻ¥", + "char itable", + "in y", + "z ah", + "compet itions", + "be av", + "plu gged", + "o is", + "do om", + "astron om", + "speci alized", + "max i", + "ta ps", + "cellu lar", + "depre ssed", + "folklore thursday", + "cri b", + "e mul", + "ë° ©", + "fi gh", + "ru z", + "car lisle", + "spe ar", + "side walk", + "de i", + "depend ent", + "lac es", + "nh s", + "ðŁĮ Ļ", + "reali zing", + "net work", + "ric he", + "re gin", + "re fresh", + "st ral", + "pa thology", + "pla id", + "psyched elic", + "hin d", + "u ka", + "algori thm", + "lin king", + "progre ssi", + "fe y", + "d ade", + "hydr ated", + "b ant", + "fam ed", + "cot sw", + "bo ise", + "as c", + "rac ing", + "ja vier", + "ww en", + "mar lins", + "poo p", + "swe pt", + "toni ghts", + "we f", + "ani me", + "slo vak", + "âŀĸ âŀĸ", + "cla us", + "lem me", + "cli ppers", + "re ls", + "arianag rande", + "r te", + "ko t", + "thal apathy", + "hungar ian", + "zu ma", + "y von", + "is u", + "jour neys", + "clin ics", + "be be", + "ww f", + "n ws", + "super heroes", + "er it", + "sle ague", + "identi fication", + "mo tto", + "ba i", + "sour ced", + "ill er", + "ap i", + "pri se", + "unprece dented", + "dam as", + "tuni sia", + "dra in", + "undere stim", + "e ther", + "quarter ly", + "rewar ding", + "al ham", + "wolver ine", + "cab ine", + "hyp no", + "nad ine", + "hav ana", + "da e", + "ðŁĵ Ī", + "dr on", + "read ings", + "b ati", + "pic o", + "mer ci", + "iti an", + "wal kers", + "el ope", + "mi key", + "god zilla", + "bur lington", + "abu ja", + "social ism", + "at ility", + "sh ell", + "harry potter", + "g no", + "ab ur", + "re leg", + "fel ici", + "ro gen", + "neuro science", + "inst in", + "ath am", + "vou chers", + "j arre", + "fu se", + "def ici", + "monte rey", + "de port", + "mid day", + "pp ard", + "fre ed", + "ame ter", + "wil t", + "n ingham", + "pr att", + "liber ty", + "slo gan", + "o to", + "pr i", + "co ated", + "c pd", + "ne tt", + "il las", + "mal awi", + "evol ve", + "accessi bility", + "ðŁĶ¥ðŁĶ¥ ðŁĶ¥ðŁĶ¥", + "or nament", + "b p", + "el is", + "son line", + "chi ro", + "fl ick", + "ib m", + "ar ak", + "en ables", + "gar land", + "san e", + "cu ties", + "tri p", + "rotter dam", + "n ys", + "lam ps", + "lu cas", + "bo g", + "ra ils", + "travel led", + "hic ks", + "en u", + "sab ha", + "scru b", + "hi er", + "hart ford", + "fo o", + "fer nandez", + "tre vor", + "mat tress", + "appo intments", + "ale j", + "fe i", + "o logist", + "saf ar", + "oc ta", + "sr c", + "sha un", + "ambi ent", + "dri c", + "bi ker", + "she e", + "must ache", + "h ta", + "bo one", + "her ty", + "car dio", + "bra kes", + "rec ital", + "consi sts", + "overwhel med", + "cau l", + "robb ins", + "im it", + "al th", + "ur l", + "bi bli", + "on ne", + "black livesmatter", + "diffic ulties", + "tel ang", + "tall er", + "ðŁĵ Ĩ", + "deb ating", + "bur rito", + "mo vember", + "strength ening", + "bo e", + "te stam", + "mirac les", + "base ball", + "re nee", + "ðŁijī ðŁı»", + "al fa", + "âĺ ĺ", + "unstopp able", + "ec s", + "g mo", + "giftide as", + "path way", + "fen cing", + "ðŁİ ¤", + "b ham", + "ra s", + "sk o", + "d led", + "thel ast", + "magn um", + "bin ary", + "wil de", + "wil der", + "wh ati", + "barbe cue", + "h ism", + "can oe", + "kur di", + "eli ve", + "advant ages", + "mad ame", + "bi er", + "mis sing", + "enter tain", + "air force", + "y ama", + "c is", + "hash tags", + "j is", + "ve il", + "dream y", + "ten se", + "may ward", + "ch ateau", + "hunt ington", + "âļ ĵ", + "v all", + "up on", + "bl ouse", + "dun es", + "ðŁĺ ´", + "fert ility", + "m ole", + "curren cies", + "st u", + "ber lin", + "toa sted", + "div as", + "wal t", + "lar k", + "por a", + "hit ter", + "um er", + "chil led", + "bal ancing", + "fa is", + "y in", + "or tiz", + "east enders", + "h ate", + "ur al", + "ap ril", + "tim el", + "à ±", + "per o", + "sto cked", + "respec ts", + "th t", + "best friends", + "giving tuesday", + "be ad", + "inv ent", + "im i", + "nap les", + "comb ining", + "tok ens", + "thir st", + "ma sc", + "par rot", + "sp u", + "dent on", + "* -*", + "t res", + "subur ban", + "wid th", + "si ve", + "con tender", + "siri us", + "lo k", + "troop ers", + "outra ge", + "tur bo", + "frag ile", + "me ssed", + "do h", + "disc ord", + "netany ahu", + "re sign", + "forgi veness", + "mo han", + "mun ch", + "cam ou", + "identi fying", + "enab ling", + "hot ter", + "thorn ton", + "jai pur", + "ar ya", + "ðŁı» âĢįâĻĢï¸ı", + "mu staf", + "maj ors", + "o ke", + "du ffy", + "roh ing", + "til t", + "ðŁĩ®ðŁĩ ³", + "rock star", + "she ep", + "hend rix", + "ra v", + "in vention", + "do u", + "lagun a", + "gru mpy", + "sw is", + "im pe", + ") '", + "you ths", + "bun ker", + "st ache", + "oppo se", + "indi es", + "acceler ate", + "ml p", + "ed en", + "w ann", + "k ail", + "akshay kumar", + "su pt", + "pol ym", + "midd leton", + "extra ordin", + "wil son", + "australi an", + "alumini um", + "way ne", + "alum nus", + "mat ics", + "gri m", + "er nie", + "opp a", + "competit ors", + "rand all", + "h ence", + "decla res", + "pre aching", + "sha he", + "can e", + "sustain able", + "stap les", + "le dge", + "ad ena", + "doctor al", + "bur gundy", + "decor ate", + "ren dered", + "ri sen", + "pr ank", + "di or", + "bee thoven", + "flo or", + "ac com", + "to t", + "ho dg", + "touri sm", + "say in", + "objec tive", + "mar kers", + "premi ership", + "en abled", + "camou fla", + "gi ant", + "Ñ ģ", + "smo key", + "ric ket", + "pan g", + "de pending", + "s ation", + "evol ving", + "inter cep", + "cen sus", + "tof the", + "re en", + "mendo za", + "trum pet", + "marke ters", + "an it", + "ðŁĻ Ĭ", + "north western", + "v la", + "foto gra", + "blackand white", + "che wan", + "wi g", + "tro om", + "ginger bread", + "k n", + "ro mero", + "n fc", + "or chi", + "fun ko", + "sour ce", + "f s", + "ra ped", + "o st", + "tar ot", + "ann ually", + "ðŁĺ ¬", + "r ill", + "del av", + ".. !!", + "se s", + "can n", + "medic are", + "ph el", + "ape x", + "guardi an", + "rema ined", + "r pm", + "a ñ", + "story month", + "instag ood", + "neighb our", + "p ing", + "sem ite", + "my stic", + "as cot", + "mat er", + "hand ful", + "dang ers", + "ti d", + "ana heim", + "opol y", + "sh allow", + "nami bia", + "tor ia", + "procu rement", + "big bang", + "announ cements", + "prosecu tor", + "beng als", + "sal le", + "en roll", + "ga stro", + "sugge stion", + "ba k", + "ha ul", + "budd hism", + "berni esanders", + "flu te", + "fati gue", + "cyn thia", + "cho i", + "ir win", + "gu a", + "str ous", + "h p", + "ba p", + "satisf ying", + "play a", + "ðŁİ ¼", + "inst ap", + "al ice", + "t p", + "irri gation", + "ðŁĩ¬ðŁĩ §", + "in tric", + "clu es", + "ple x", + "sa x", + "he pat", + "dump ed", + "signific ance", + "by u", + "medic ation", + "pro v", + "tough est", + "corn ish", + "âŀ ľ", + "kel ley", + "u v", + "si zz", + "si bling", + "me st", + "di stor", + "diplom atic", + "aun tie", + "b hat", + "son ic", + "bren da", + "pump kins", + "ro ch", + "black burn", + "ur ged", + "shi a", + "arrange ments", + "floo d", + "sa unders", + "lec turer", + "nou ri", + "popul ations", + "diplom acy", + "consist ently", + "ðŁ¤ Ļ", + "t mund", + "cauli flower", + "l ily", + "vocab ulary", + "vari eties", + "coo ker", + "up town", + "qu ent", + "mo sa", + "re inde", + "velo city", + "spru ce", + "social medi", + "i ber", + "volun tary", + "proce ssed", + "bal tic", + "y ang", + "leban ese", + "d p", + "dol ly", + "arrange ment", + "y uri", + "cran berry", + "kal yan", + "elev ation", + "cli ff", + "pu shes", + "ìĬ ¤", + "sil ic", + "co wx", + "eter nity", + "sla ves", + "vine gar", + "glou cester", + "con tained", + "breaking news", + "aga inst", + "renov ated", + "norm andy", + "hero in", + "ys m", + "mo ds", + "gre ek", + "un di", + "tren ch", + "v h", + "encoura ges", + "head ache", + "gr ange", + ": '", + "ever green", + "Ù Ĭ", + "reck on", + "ab used", + "th ru", + "cho ice", + "ti dy", + "col der", + "scho ice", + "ha in", + "bru m", + "li ars", + "bre it", + "yor ker", + "sh ack", + "he idi", + "micha els", + "sco pic", + "fasci st", + "play ful", + "ca c", + "yas ss", + "sh ad", + ".. ?", + "qu en", + "ram irez", + "clif ton", + "pr s", + "best fan", + "âģ ł", + "gener ating", + "head set", + "disappo intment", + "abstr act", + "bo iled", + "paren thood", + "azerbai jan", + "exhib iting", + "bom bay", + "oli vier", + "ko so", + "un lea", + "mat ernity", + "iz er", + "si ves", + "r hu", + "col l", + "saskat chewan", + "fre akin", + "de k", + "na g", + "stab ili", + "ðŁį ķ", + "organi zer", + "bo sses", + "ar u", + "u va", + "at able", + "ta un", + "after wards", + "fert ili", + "ver ge", + "az i", + "mor ph", + "๠ģà¸", + "jer k", + "cosme tic", + "ko w", + "stru st", + "ap ache", + "post cards", + "for mul", + "ì ĭ", + "spin al", + "jack pot", + "elec tri", + "à Ń", + "lo y", + "gra der", + "diab lo", + "ar di", + "he sit", + "f w", + "arch ery", + "pa sh", + "the ories", + "repe al", + "re live", + "per cy", + "âĺ Ĩ", + "im in", + "syn chron", + "sham poo", + "coup ons", + "o to", + "la i", + "thou ght", + "luxembour g", + "mo v", + "ðŁĺ ¥", + "ge mma", + "se ated", + "m ga", + "strat ford", + "un certainty", + "shi fts", + "est o", + "fo ol", + "fire arms", + "cor rie", + "ki ki", + "appa rent", + "p ills", + "olym pia", + "fi d", + "elev ated", + "de cks", + "ignor ing", + "av alan", + "ro v", + "whist le", + "p tsd", + "milit ants", + "robo tic", + "pac ers", + "quil t", + "bankrupt cy", + "lic h", + "per cussion", + "celebr ity", + "al s", + "( ;", + "su t", + "pokemon go", + "h g", + "off s", + "gibr altar", + "scre ams", + "billi e", + "gen ome", + "mar in", + "be ams", + "arch bishop", + "em in", + "bedro oms", + "g ated", + "ol ly", + "warran ty", + "at own", + "cudd les", + "gun na", + "k ic", + "vi ve", + "cy mru", + "nar row", + "pro b", + "le o", + "refe rences", + "manufac tured", + "cho pper", + "brun swick", + "sem is", + "don ia", + "r ye", + "man o", + "hur ting", + "? #", + "hol li", + "investig ations", + "c els", + "ðŁĵ ŀ", + "le ster", + "temp les", + "sto rey", + "mc mahon", + "toi lets", + "wo of", + "ï¸ İ", + "le verage", + "at om", + "night mares", + "victor ious", + "haun ting", + "custom er", + "ag i", + "yo ongi", + "mon ty", + "ver onica", + "w ur", + "inti mid", + "blan kets", + "volu tion", + "j m", + "âĺ İ", + "am on", + "jud ith", + "ðŁĺİ ðŁĺİ", + "distr acted", + "dri p", + "hurric ane", + "and es", + "revel ation", + "tro op", + "ab leg", + "col lin", + "tibet an", + "wor rying", + "inter nationally", + "eat er", + "camero on", + "brad or", + "y uk", + "ðŁĴĹ ðŁĴĹ", + "tra k", + "slo pes", + "ci er", + "ne a", + "ol er", + "ta ka", + "albi on", + "volcan ic", + "am n", + "a fi", + "ob stac", + "face time", + "ger ing", + "n pr", + "metall ica", + "organ ic", + "ðŁĴ ¡", + "ki dd", + "d ances", + "pemb ro", + "wash er", + "m its", + "om er", + "emo tionally", + "tan go", + "ip o", + "do cks", + "scan ning", + "spec s", + "tho m", + "the ology", + "emer gen", + "om i", + "g pa", + "selec tions", + "un necessary", + "ima ge", + "ter s", + "induc ed", + "gi gan", + "rent als", + "supp lied", + "m fa", + "shan kar", + "lat er", + "pa jam", + "cla ve", + "Ù ģ", + "ma hin", + "carl son", + "avi an", + "ano va", + "kati e", + "aj ith", + "design ated", + "chocol ates", + "investig ators", + "gla zed", + "prin cess", + "er ry", + "ra gn", + "ou rable", + "hr u", + "sun dance", + "peuge ot", + "steam punk", + "gh lin", + "gre ase", + "hi res", + "z ap", + "per ce", + "j ill", + "tom e", + "he hehe", + "joy ful", + "mae stro", + "ni shed", + "gene alo", + "v ich", + "p its", + "fox es", + "good man", + "emer son", + "lo bes", + "con verse", + "o ats", + "thom son", + "ra him", + "mal ware", + "ah i", + "man kind", + "re sin", + "im g", + "sw ood", + "kin der", + "sc roll", + "ar a", + "sak ura", + "ro bbed", + "xi on", + "ny a", + "c ism", + "ce dar", + "be in", + "mour ning", + "tor to", + "heath row", + "done gal", + "bar b", + "hydr ation", + "k or", + "elim ination", + "su pdates", + "hill s", + "appe ti", + "star red", + "ko m", + "gw en", + "dd d", + "cra y", + "sc anner", + "personal ised", + "seren ity", + "re design", + "meta ph", + "box ed", + "judg ment", + "no se", + "ë ¹", + "er ad", + "ac ne", + "supp liers", + "ener getic", + "v om", + "as ap", + "ðŁĶ ¸", + "ir vine", + "hat ch", + "la ss", + "ad ren", + "waff les", + "accur ately", + "ici o", + "itt le", + "se un", + "occup y", + "web cam", + "thene w", + "ent es", + "ga i", + "j w", + "accoun table", + "vis or", + "ir rit", + "licen sing", + "hudder sfield", + "gen ie", + "ðŁİ ¾", + "atmo spheric", + "ten sions", + "spart an", + "clif ford", + "ol an", + "north bound", + "ame en", + "cen sor", + "u el", + "ster y", + "$ $", + "far rell", + "hy ster", + "cl t", + "se dan", + "rep lied", + "descri bing", + "micro wave", + "sla b", + "pro sp", + "assi sting", + "ru bio", + "e than", + "hh hhh", + "gu ay", + "z man", + "ra ise", + "roll ing", + "o e", + "n ile", + "ambro se", + "scar borough", + "hero ic", + "coo ks", + "mor t", + "chop ra", + "ðŁĮ ·", + "to b", + "shav ing", + "stac ey", + "dor m", + "motor sports", + "wi ki", + "fol ds", + "sp iced", + "stress ful", + "liter al", + "fu dge", + "pe ggy", + "wa ite", + "tre sses", + "se sh", + "pr ic", + "ðŁİ ħ", + "fri ght", + "r va", + "mumb ai", + "po m", + "tt v", + "cel lar", + "tom e", + "andro id", + "dor is", + "tsun ami", + "tin der", + "o ec", + "m wc", + "dor tmund", + "no thin", + "l iti", + "so u", + "believe in", + "at u", + "kno cks", + "mag ni", + "ss sss", + "ro hit", + "ine ws", + "ang i", + "m andy", + "ke ttle", + "intermedi ate", + "av ant", + "cur l", + "endor sed", + "ori o", + "ur t", + "consider ation", + "wi res", + "shel ters", + "b ino", + "vik ram", + "imple mented", + "ly dia", + "bu k", + "paro dy", + "c news", + "under graduate", + "canu cks", + "sam i", + "polit ically", + "ro tten", + "gh z", + "tex tiles", + "over load", + "moder ni", + "recre ational", + "fli r", + "bat on", + "typo graphy", + "ov ation", + "intrigu ing", + "pilgri mage", + "al ge", + "ad ays", + "tcm party", + "sp elled", + "cur ls", + "boo ze", + "ste m", + "ann es", + "ir ls", + "spon ge", + "sho pper", + "sig nation", + "bra ss", + "mi stress", + "le ah", + "beg inner", + "lau derdale", + "augu st", + "pre school", + "ta ping", + "tai pei", + "execu tives", + "b d", + "rhe tor", + "esc or", + "immun o", + "deeplear ning", + "stat ues", + "it us", + "manu script", + "ly ric", + "cor vette", + "mol ly", + "la ge", + "de p", + "cn bc", + "le st", + "je ssi", + "fi fe", + "griff ith", + "oppo sing", + "ran g", + "dr ills", + "respec tful", + "p ity", + "d ell", + "har ding", + "play boy", + "blo ke", + "shut out", + "k ili", + "o sp", + "se attle", + "bc poli", + "mis es", + "journ als", + "team ing", + "es ther", + "fre ddy", + "Ķ ï¸ı", + "metr ics", + "no tre", + "gar ry", + "for ty", + "navi gate", + "perio ds", + "bened ic", + "j id", + "da w", + "ance stors", + "restor ing", + "con g", + "aller gy", + "tit anium", + "c ence", + "lean ing", + "ab bas", + "v ast", + "uc f", + "roof ing", + "e man", + "seve rely", + "vo gue", + "ve au", + "in bound", + "d z", + "tane ously", + "stret ching", + "man chester", + "dr yer", + "dav is", + "kan th", + "the game", + "it ted", + "re tain", + "el les", + "conge stion", + "frat ernity", + "ol lie", + "lo ki", + "fre ely", + "cho o", + "pon y", + "sc ep", + "tab ly", + "bal t", + "rock n", + "di me", + "lo gging", + "ðŁį ·", + "ad u", + "ha voc", + "water ford", + "char is", + "swee tie", + "run ning", + "ner d", + "erdo gan", + "z ara", + "weigh ing", + "fif ty", + "pre cise", + "low ell", + "kurdi stan", + "r yo", + "or th", + "syn th", + "lin ers", + "phenomen on", + "art illery", + "il legally", + "constru ct", + "nostal gic", + "gar th", + "al ta", + "shel ton", + "a sean", + "w ander", + "dur ban", + "di versi", + "bon o", + "cl on", + "le man", + "sh un", + "obstac les", + "appet ite", + "fe eder", + "respir atory", + "di xie", + "formu la", + "an to", + "so ber", + "extin ct", + "au c", + "ing les", + "legitim ate", + "; ;", + "min nie", + "ipsw ich", + "dram atically", + "ðŁijı ðŁı¼", + "ingh am", + "milit ary", + "mon et", + "us navy", + "for k", + "dun no", + "play er", + "q otd", + "st oo", + "ex or", + "ethiop ian", + "film fest", + "pe red", + "c ate", + "sau di", + "in ner", + "sin cere", + "tion ality", + "ale e", + "de eds", + "cooper ative", + "ir onic", + "cro cod", + "br ary", + "post season", + "cam per", + "can ary", + "e in", + "exten sions", + "nb d", + "sher wood", + "spo kane", + "hu mp", + "jit su", + "ê ¹", + "dar yl", + "p si", + "stab bed", + "offer ings", + "expe cts", + "cav al", + "body building", + "fr aming", + "f ca", + "ye arly", + "bom bed", + "sk il", + "resear ching", + "jud iciary", + "gree ted", + "tu dor", + "mil o", + "innov ate", + "ðŁĺ Ľ", + "r hs", + "ru by", + "contribu tor", + "fam er", + "soci ally", + "m lin", + "fi ery", + "ut ter", + "beau t", + "it os", + "de voted", + "rain bow", + "bar ney", + "pe ren", + "ar jun", + "r na", + "gab by", + "ut i", + "hann ity", + "pick le", + "ser v", + "qu akes", + "pp e", + "fe m", + "wh itec", + "j n", + "victor ies", + "ðŁ§ ¡", + "gol fer", + "congratul ates", + "resul ting", + "mechan ic", + "ur ve", + "cen tered", + "kie v", + "an s", + "in cub", + "< <", + "c mo", + "bestfan army", + "dap h", + "en ham", + "on cology", + "ku sh", + "t xt", + "ori ented", + "fashion able", + "c sr", + "sa hara", + "r ack", + "pd p", + "han son", + "ภĩ", + "ti ers", + "ra r", + "pan am", + "in sky", + "sa hi", + "testam ent", + "asth ma", + "in her", + "fisher ies", + "or der", + "ho we", + "gall on", + "ep is", + "suz anne", + "drow ning", + "paneli sts", + "ðŁĺ ²", + "ë ¦", + "al ach", + "commemor ative", + "at tribu", + "ðŁij »", + "mo o", + "visi onal", + "week sary", + "gu st", + "ak in", + "poin te", + "ee e", + "di spar", + "ni pp", + "dent al", + "st all", + "pi an", + "bor e", + "ul ster", + "tic k", + "ir r", + "tae hyung", + "micro phone", + "bermu da", + "ga ard", + "el er", + "plumb ing", + "hu gely", + "âļ« ï¸ı", + "race way", + "cam bridge", + "mar cel", + "burn ley", + "to ast", + "holly wood", + "fa sting", + "me red", + "hib ition", + "ca pped", + "benef icial", + "ow ning", + "cont amin", + "arab ian", + "to on", + "cap ac", + "hul u", + "sm ir", + "nutri ents", + "se in", + "graph s", + "con ditional", + "ðŁij ħ", + "or ac", + "play in", + "nor the", + "tor nad", + "mar ian", + "ju mbo", + "lex i", + "incredible india", + "road to", + "uk one", + "confu sing", + "sp h", + "shan k", + "pi ed", + "mq m", + "positi vely", + "sher ry", + "path ways", + "consi ders", + "tof u", + "argu ments", + "resil ient", + "che tt", + "with dra", + "ter o", + "ated ly", + "sw ana", + "he b", + "fli ght", + "har ley", + "decre ase", + "kind le", + "book shop", + "³ ï¸ı", + "marty rs", + "sm ur", + "mc cl", + "concer to", + "sti me", + "rejo ice", + "app lau", + "cle ment", + "mer kel", + "jai me", + "im mortal", + "isle of", + "mar co", + "youtu ber", + "stal king", + "me too", + "st ack", + "sp ouse", + "u st", + "lu v", + "âļ¾ ï¸ı", + "eque strian", + "ev ing", + "fl in", + "nick name", + "the big", + "as ar", + "st acks", + "wal ker", + "bor a", + "kidnapp ed", + "hur ling", + "humb old", + "rec alls", + "co pper", + "ann is", + "se o", + "mer ger", + "mu ir", + "ad dy", + "ðŁĴª ðŁĴª", + "be x", + "cr acy", + "con an", + "congratul ation", + "mid st", + "âĻ ¬", + "for bi", + "op tic", + "cr ate", + "crocod ile", + "mad agas", + "secur ing", + "ast on", + "o gue", + "savi or", + "salis bury", + "love it", + "fuji film", + "cast les", + "as st", + "ar rows", + "sp acious", + "tr s", + "poly vore", + "progre ssion", + "m ri", + "nel son", + "bi m", + "indic ator", + "o da", + "pe pe", + "re signation", + "gu t", + "sne aker", + "log ically", + "az y", + "are lla", + "te aring", + "jo shi", + "ssion ism", + "q pr", + "mari ah", + "p x", + "ble ed", + "mi an", + "med ley", + "we iss", + "ker ry", + "gat ory", + "at al", + "madi son", + "av enger", + "nab y", + "pl and", + "gi les", + "fresh water", + "d ington", + "ta j", + "demonstr ates", + "n tv", + "bul bs", + "sunday morning", + "pe ake", + "souven ir", + "wa h", + "ton nes", + "m kt", + "complex ity", + "con den", + "ross i", + "b ing", + "y ds", + "su k", + "n go", + "mid land", + "ol y", + "life is", + "ri pple", + "mo reno", + "dd ers", + "tu s", + "á ĥ", + "bou l", + "x a", + "hol dings", + "wn y", + "shadowhun ters", + "ke i", + "asp ire", + "m ous", + "ow en", + "so ak", + "skir ts", + "moun taine", + "stor ming", + "ch rome", + "ri ots", + "sar ato", + "amaz e", + "less ness", + "nav ar", + "crit eria", + "ra fa", + "indul ge", + "ay er", + "por to", + "nam o", + "........ ........", + "yi elds", + "val le", + "j h", + "mac ron", + "sa ins", + "dur ant", + "tra ilers", + "wo t", + "confeder ate", + "sh rin", + "id ol", + "form ally", + "ten e", + "motor cycles", + "than g", + "no de", + "bang er", + "dal y", + "p ats", + "enroll ment", + "au ctions", + "at al", + "ar bor", + "lo gos", + "de arest", + "trans action", + "dom ingo", + "fle a", + "ser mon", + "de ck", + "sin cere", + "questi oning", + "juli o", + "was p", + "pre tz", + "armen ian", + "k ham", + "inflam mation", + "picture sque", + "acci dental", + "film makers", + "ðŁĺ ļ", + "ðŁĴ į", + "ca sey", + "so b", + "yee zy", + "good will", + "parag ra", + "ss ly", + "fe ather", + "dy ed", + "assassin ation", + "na de", + "b cs", + "app lies", + "femin ine", + "fe u", + "ext ent", + "depu ties", + "l ack", + "psy chic", + "go i", + "kill ings", + "pse u", + "ðŁ¤ ª", + "un c", + "mar l", + "tan e", + "mck enna", + "sur fer", + "influ ences", + "free way", + "hack ney", + "mal aria", + "el and", + "te au", + "rema stered", + "Ø ±", + "raz or", + "gg y", + "cor ro", + "lak sh", + "fla ir", + "honest y", + "hoor ay", + "de pp", + "am c", + "wedne sdays", + "q a", + "ed its", + "- $", + "se villa", + "dou bled", + "human ities", + "c cot", + "som os", + "r ine", + "af a", + "si oux", + "re construction", + "wel ding", + "th reads", + "am ish", + "encoura gement", + "po der", + "bo ck", + "bal m", + "p tions", + "stand up", + "accompli shments", + "guar ding", + "convic tion", + "ac ion", + "napo leon", + "depic ting", + "att ack", + "su i", + "wear able", + "âĸª ï¸ı", + "pot ter", + "esc ort", + "vis e", + "to ts", + "bo on", + "event profs", + "angu lar", + "womenshi storymonth", + "bar row", + "sch i", + "ac comp", + "ti k", + "l end", + "kensing ton", + "wol fe", + "st acked", + "cra shing", + "exhi bit", + "wing ed", + "sab rina", + "ma sa", + "k ms", + "alway s", + "et t", + "pla sma", + "counsel ing", + "pick les", + "nfl draft", + "mr s", + "inev itable", + "coura geous", + "staf ford", + "writers life", + "ho s", + "e j", + "gh yun", + "trade mark", + "adri an", + "influen cer", + "coron ation", + "ra ging", + "explo red", + "usa f", + "excep tion", + "eu x", + "tan ker", + "sw ami", + "pac ket", + "ðŁij¨ âĢį", + "f en", + "she en", + "a ero", + "j l", + "re gal", + "nw t", + "au ster", + "meh ta", + "char ge", + "a ste", + "b ate", + "inf eld", + "racec ourse", + "collap sed", + "fle ece", + "z il", + "al lie", + "alternati ves", + "geor ges", + "ðŁĵ į", + "quir ky", + "fc b", + "nat geo", + "philanthro py", + "bra i", + "every day", + "ðŁIJ °", + "ach ers", + "ja an", + "fin es", + "q i", + "fisher man", + "distin ct", + "gri mes", + "nation alist", + "comm ence", + "ro wn", + "âĢ ³", + "z ing", + "f ter", + "hr w", + "baro que", + "bl ender", + "kitt y", + "hoo ks", + "c ited", + "w anda", + "consen sus", + "reinde er", + "an and", + "supp ly", + "me ds", + "v n", + "ol ph", + "rat chet", + "shel don", + "secur ities", + "ë°© íĥ", + "cro m", + "mosqu ito", + "j eric", + "im mac", + "dimen sions", + "â ¤", + "di ssi", + "sponge bob", + "dami en", + "steven son", + "jo anne", + "del ish", + "yi kes", + "than x", + "surve ys", + "postpon ed", + "alco holic", + "al ised", + "ðŁĻı ðŁı»", + "do ch", + "sen tim", + "mered ith", + "com pares", + "b ago", + "happy days", + "mo ss", + "ãħ ĭ", + "ne c", + "gn ment", + "frustr ated", + "comb in", + "ri v", + "ec lec", + "col lo", + "compli ment", + "actor slife", + "ct to", + "nic ar", + "op hon", + "apar the", + "man t", + "ja de", + "trol ley", + "optimi zation", + "eye on", + "eco logical", + "qui st", + "ep he", + "ॠĩ", + "cin co", + "appo ints", + "old school", + "c pr", + "behavi oral", + "min aj", + ":- (", + "tag ging", + "ev al", + "jo aqu", + "ðŁĺ «", + "ha k", + "de me", + "jama ican", + "so s", + "hy att", + "hand book", + "libr arian", + "hanni bal", + "pump ing", + "ch om", + "f man", + "ga i", + "hu ll", + "respon ders", + "green ville", + "n us", + "vau gh", + "ðŁİī ðŁİī", + "ta xi", + "gold berg", + "man tra", + "te ase", + "forbi dden", + "metho dist", + "ati vity", + "* ***", + "ec t", + "mc gr", + "Ħ ëĭ", + "se b", + "amid st", + "disapp ear", + "thy ro", + "phili ps", + "er ina", + "v icious", + "stream er", + "million aire", + "ma p", + "str ick", + "hack athon", + "gh a", + "ed ic", + "mi ka", + "pe ck", + "ill i", + "anto ine", + "ar ca", + "op tic", + "ma ure", + "ðŁĩ¦ ðŁĩº", + "cla shes", + "man ly", + "âĺ ģ", + "al var", + "and res", + "me i", + "el m", + "ww ww", + "al tered", + "l te", + "ê¹ Ģ", + "mo jo", + "for rest", + "thal ai", + "non t", + "spee ches", + "acknow ledge", + "ign ite", + "x factor", + "ðŁ¥ Ĥ", + "mead ow", + "disru pt", + "debu ted", + "scrim mage", + "pharmaceu tical", + "fi dd", + "found ations", + "philosop her", + "et al", + "publi shers", + "bo ys", + "c ke", + "ru gged", + "opti mism", + "re be", + "phil harmon", + "nar cis", + "ral lies", + "lu is", + "go blue", + "fol ded", + "un acceptable", + "optim al", + "li sa", + "pol aro", + "+ .", + "en za", + "âĿ £ï¸ı", + "mon opoly", + "grace ful", + "dair y", + "du a", + "diffic ulty", + "judge ment", + "o si", + "mer sey", + "flu x", + "new found", + "ter ns", + "dimen sional", + "in vic", + "al ba", + "am it", + "abudha bi", + "alger ia", + "autom obile", + "the ad", + "lo tion", + "acceler ator", + "vac ant", + "iti on", + "lu f", + "al ic", + "pl l", + "bla zing", + "ba z", + "sen e", + "ðŁij ¼", + "villa ins", + "direc tory", + "eis en", + "to ck", + "broch ure", + "ri pp", + "hb d", + "zayn malik", + "nic he", + "lo lol", + "certific ates", + "mor se", + "fac up", + "x ham", + "un wanted", + "im ports", + "carne gie", + "fan sign", + "mo u", + "r alph", + "destroy er", + "sw ing", + "trek king", + "cili ation", + "pit bull", + "g aps", + "ho well", + "defin itive", + "mc le", + "f ps", + "et z", + "bol ly", + "lyn n", + "gan o", + "at ure", + "fur suit", + "co il", + "na v", + "but ts", + "tro jans", + "eu re", + "en ko", + "sch umer", + "horri fic", + "install ment", + "br b", + "subur bs", + "a bel", + "vi r", + "de sh", + "cun ningham", + "ðŁIJ »", + "span n", + "sch we", + "ke mp", + "tr u", + "ste alth", + "qu es", + "le w", + "deli ghts", + "ko ch", + "hu mili", + "cr iti", + "il t", + "sp ells", + "mi ley", + "car ic", + "ðŁį ´", + "lc fc", + "substitu te", + "oun g", + "? !!", + "af fir", + "predic table", + "class of", + "er r", + "cy press", + "chand ra", + "age ing", + "__ __", + "ther land", + "don caster", + "el in", + "yo shi", + "sail ors", + "har ris", + "jo anna", + "niger ians", + "h ers", + "pla gue", + "pro cra", + "k no", + "can ton", + "busine s", + "un h", + "pra kash", + "c in", + "bow en", + "co ating", + "m als", + "be gging", + "smith son", + "ponti ac", + "sp ies", + "dam ian", + "pl ine", + "und ant", + "al ta", + "one ss", + "shame less", + "da q", + "bb m", + "wal es", + "stam pede", + "ser um", + "Ù Ĩ", + "cataly st", + "x n", + "ab sc", + "free zer", + "ch un", + "ari os", + "mc cre", + "fore head", + "he ars", + "damas cus", + "tac oma", + "ardu ino", + "encoun ters", + "stan ton", + "lg b", + "ab as", + "\" ..", + "ke te", + "drac ula", + "ele m", + "g ne", + "zepp elin", + "la brador", + "pul p", + "op tional", + "or n", + "russi ans", + "san itation", + "hil ary", + "etsym ntt", + "pen alties", + "au st", + "ig ans", + "olympi an", + "medic aid", + "vers ace", + "va pe", + "re stra", + "pe ep", + "sexi est", + "st alls", + "di le", + "the a", + "punjab i", + "pupp y", + "tuesday motivation", + "ðŁĵ ļ", + "the flash", + "roc ket", + "mo dest", + "chihu ahu", + "on na", + "k sa", + "hur dles", + "ca ve", + "fail ures", + "sp lit", + "bo ho", + "gur l", + "disappo int", + "ho ward", + "nug get", + "fran z", + "stal ert", + "kaz akh", + "for getting", + "sch ri", + "ag ate", + "am at", + "eve rett", + "du et", + "veter inary", + "juli an", + "ch ills", + "bra ve", + "ghost busters", + "lan do", + "gre ets", + "profit able", + "d é", + "ti r", + "ze e", + "om en", + "pd x", + "gray son", + "har i", + "fix es", + "stab bing", + "swim mer", + "symb ols", + "compli ments", + "po se", + "func tioning", + "th nx", + "gi r", + "corpor ations", + "bar low", + "lo e", + "off season", + "distin ctive", + "marvel ous", + "nik on", + "enri que", + "ky u", + "ja ws", + "amo to", + "lom bar", + "travel blogger", + "fa h", + "ouri sm", + "tri stan", + "so e", + "ce ase", + "ðŁı ħ", + "z ac", + "mck enzie", + "taxpay ers", + "swim suit", + "bl o", + "les ley", + "kan sas", + "w ks", + "ki el", + "provo king", + "my les", + "str ing", + "kangar oo", + "galac tic", + "fif th", + "s ke", + "we ir", + "ll is", + "mat ory", + "ðŁĩ ¿", + "un ci", + "re productive", + "roo ting", + "ti des", + "gad get", + ".... ......", + "alex ander", + "bow ler", + "scre w", + "apo log", + "eri ka", + "wal ters", + "shet ty", + "lan e", + "ban ter", + "as ant", + "me so", + "v ain", + "\" \"\"", + "us i", + "fer din", + "accomp lish", + "man sfield", + "bom bar", + "collabor ating", + "cla p", + "it ure", + "s da", + "smo ky", + "na k", + "im person", + "car la", + "com ra", + "bur gl", + "lo co", + "ti es", + "in hi", + "trac ey", + "se is", + "diss er", + "rr rr", + "dra y", + "prote ct", + "cor ona", + "hun ger", + "ck en", + "c eli", + "trou bled", + "predat ors", + "fic tional", + "shav ed", + "riche st", + "metab oli", + "ful ham", + "gro oming", + "mono chrome", + "wa sting", + "as co", + "ast e", + "ti sta", + "remedi es", + "ung soo", + "south end", + "perman ently", + "bu mble", + "procra stin", + "ident ical", + "practic ally", + "ma scul", + "su ke", + "assu red", + "val erie", + "devi ant", + "grizz lies", + "thi er", + "pur a", + "ne pal", + "not ts", + "bil ateral", + "spo il", + "car mel", + "cine matic", + "ph l", + "ni fty", + "ma o", + "hypo cri", + "la ser", + "pan try", + "mathemat ical", + "el isa", + "coordin ation", + "bel mont", + "a it", + "radi ant", + "bo iler", + "man g", + "f ag", + "cr c", + "h ams", + "br in", + "â¬ĩ ï¸ı", + "famil ia", + "âĿ £", + "sab er", + "ru pert", + "gg an", + "rit z", + "mic h", + "sal ford", + "le vi", + "gra l", + "ðŁĴ ¤", + "n ino", + "ce d", + "business man", + "ul tr", + "sim ply", + "compre ssion", + "pa ins", + "hal t", + "ë°©íĥ Ħ", + "landsc aping", + "n f", + "croo ked", + "er d", + "itt in", + "ddle ston", + "sur passed", + "ino a", + "da g", + "bl en", + "exten ding", + "at ing", + "al gae", + "ball er", + "u mar", + "snoo ker", + "col lu", + "flo wn", + "thu b", + "ridic ulously", + "ki sh", + "op le", + "di re", + "as ser", + "ari sto", + "sc iss", + "h ating", + "trou ble", + "syl via", + "suc cul", + "plo ts", + "sincere ly", + "al er", + "laure ate", + "br ack", + "att n", + "rif les", + "me to", + "collec tible", + "cu omo", + "conte stant", + "consist ency", + "ant z", + "rang es", + "abig ail", + "de b", + "mini ster", + "grow ers", + "an oo", + "hoo ver", + "dream er", + "nu cle", + "resear ch", + "mi y", + "sha hid", + "ma v", + "d honi", + "cin i", + "do j", + "hin dus", + "part ying", + "dal i", + "alon so", + "inform al", + "clark son", + "it ton", + "ki an", + "cit yo", + "mor i", + "la sted", + "as pen", + "libr ary", + "susp ici", + "qu at", + "den ial", + "fol der", + "ch ori", + "swee ping", + "eni x", + "ðŁį Ĥ", + "Ø Ń", + "nas car", + "handmade hour", + "mou l", + "heat wave", + "em er", + "exam ine", + "ib n", + "gr ind", + "po v", + "tion ist", + "m bo", + "she ila", + "integr ate", + "om es", + "take away", + "cer v", + "con nie", + "tic ket", + "ce led", + "bi en", + "visu ally", + "madagas car", + "sor ry", + "gu i", + "park run", + "tra its", + "la be", + "pois oning", + "ॠĢ", + "vi able", + "bohemi an", + "denti stry", + "bad os", + "spr outs", + "mask ed", + "te ddy", + "ðŁĺ ·", + "sa f", + "sa as", + "ji ang", + "ti ght", + "spe aker", + "withdra wal", + "bc n", + "as signed", + "class rooms", + "fle ming", + "ðŁĴ «", + "super girl", + "tot als", + "table top", + "e books", + "horizon tal", + "cra z", + "flu sh", + "j ard", + "c dc", + "er son", + "ãħ ł", + "green wood", + "ni h", + "co x", + "ad a", + "lit re", + "go ing", + "v icky", + "cur ved", + "lou ie", + "gra ins", + "hy e", + "lon ge", + "reme dy", + "tra inee", + "san jay", + "super stars", + "ma ser", + "man u", + "s age", + "wh l", + "ðŁĺĤ ðŁĺŃ", + "ðŁijį ðŁı»", + "m sd", + "en z", + "rab hu", + "j oo", + "gh u", + "ac er", + "e po", + "resurrec tion", + "justice for", + "bl ended", + "mo da", + "avalan che", + "france sco", + "re spective", + "g s", + "ye ast", + "wel ch", + "devo tion", + "ge tin", + "athe ism", + "am ic", + "carol yn", + "lo c", + "ld nont", + "ave c", + "us da", + "le gged", + "bra very", + "b lower", + "cow boy", + "he h", + "sti ble", + "buff al", + "chann el", + "run chat", + "âĺķ ï¸ı", + "ide ology", + "best seller", + "y oo", + "pe anu", + "bon ne", + "fel ic", + "edi son", + "fr actu", + "naren dra", + "pp ets", + "seym our", + "ri viera", + "he ctor", + "necess arily", + "bi anca", + "soci eties", + "the best", + "w g", + "sent ences", + "win k", + "vacc ines", + "pal ooza", + "jam ming", + "as f", + "mp us", + "agre ements", + "ec k", + "ba c", + "hon ore", + "com pul", + "wild cat", + "im posed", + "yo ga", + "hud son", + "can celed", + "l ich", + "fu zzy", + "es que", + "ch uk", + "w vu", + "se k", + "fli pping", + "r hon", + "wi shed", + "wh a", + "cap ability", + "len ovo", + "ìĨĮëħ Ħëĭ", + "vi vo", + "tv d", + "nor a", + "sil k", + "pas adena", + "yo semite", + "valu ation", + "clo cks", + "u ber", + "mr c", + "dar kest", + "au bre", + "ss o", + "bell y", + "wrest lers", + "kill in", + "lou der", + "buck ley", + "ge el", + "ad on", + "un s", + "appe aling", + "ðŁij ¯", + "semit ism", + "list ens", + "fit z", + "ãĥ³ ãĥ", + "ny lon", + "ar ty", + "seem ingly", + "hal a", + "su ited", + "et y", + "she ds", + "mu ffins", + "ap ric", + "um ents", + "u ta", + "jam mu", + "chelse afc", + "star z", + "yo ko", + "roo t", + "clean sing", + "di ar", + "pione ering", + "ihear tradio", + "dig iti", + "fin dyour", + "can o", + "ðŁĴ İ", + "z ol", + "spac ecraft", + "six ers", + "moi sturi", + "b ile", + "ti sts", + "hor ton", + "rang ing", + "colum bi", + "mete oro", + "senti ment", + "ep l", + "foo th", + "text book", + "drain age", + "r ly", + "sc ue", + "imran khan", + "ðŁĴ ¸", + "margar ita", + "ed dy", + "predic ts", + "gamer gate", + "advis e", + "growth hacking", + "love you", + "ug and", + "v f", + "beng hazi", + "s later", + "ne wor", + "ch el", + "independence day", + "p np", + "cul len", + "hoo dies", + "num bered", + "brit t", + "t sa", + "kl tu", + "s ages", + "mom o", + "onep lus", + "col l", + "gu ts", + "w ta", + "mesm eri", + "enh ancing", + "chiro prac", + "j is", + "teen agers", + "m one", + "constell ation", + "sweep stakes", + "e ze", + "slovak ia", + "la ye", + "pear ce", + "wa ver", + "po gba", + "k ron", + "sur geons", + "mar x", + "ti d", + "gg a", + "desc end", + "p ours", + "upri sing", + "wal la", + "sab bath", + "bachel ore", + "mack in", + "k am", + "peter borough", + "hor a", + "ðŁĮŁ ðŁĮŁ", + "think big", + "r j", + "hy drau", + "sp al", + "univers it", + "ðŁı ī", + "mail online", + "league of", + "ten ants", + "w ally", + "lan ce", + "heav ens", + "dd r", + "bol ts", + "am ir", + "i phone", + "ci gar", + "en du", + "re i", + "el abor", + "r inging", + "john son", + "characteri stics", + "sal oon", + "algori thms", + "tal kin", + "m tn", + "di ve", + "region als", + "ff ice", + "hat i", + "deviant art", + "so tto", + "shir o", + "l ama", + "k we", + "f aded", + "por ting", + "tu mmy", + "est ates", + "buen os", + "ðŁ¦ ģ", + "beli ever", + "pen etr", + "dar n", + "sp ite", + "can opy", + "fashi oni", + "t illa", + "pet als", + "eli jah", + "bra wl", + "marty r", + "ë°©íĥĦ ìĨĮëħĦëĭ", + "mid town", + "eric h", + "d apper", + "sm town", + "me gam", + "ww w", + "le le", + "on s", + "cat fish", + "fir th", + "fossil friday", + "ball park", + "th aw", + "pot ent", + "illi e", + "cre ep", + "car p", + "so ap", + "gun dam", + "infe c", + "yy yyy", + "ठ¨", + "z ag", + "rit t", + "calcu lator", + "bo ca", + "ok o", + "to ad", + "threat en", + "refin ed", + "olym pic", + "accompli shment", + "bacter ial", + "a ji", + "tat um", + "feli z", + "she ed", + "j at", + "th ic", + "jam al", + "ðĿ ĺ", + "lin a", + "ðŁIJ ¯", + "jo king", + "yot po", + "pin ch", + "ak ron", + "her b", + "motiv ation", + "li a", + "ho stage", + "cre ek", + "gam ble", + "russ ell", + "patt i", + "fo tos", + "c pc", + "bro ken", + "back the", + "cla ys", + "u mm", + "stock ton", + "mat ernal", + "ü r", + "la kel", + "cent ury", + "be k", + "infe cted", + "ภ¡", + "smack down", + "man ned", + "ta hoe", + "sm es", + "bas a", + "su la", + "augu sta", + ". *", + "rohing ya", + "gre ed", + "counsel or", + "silhou ette", + "gra vit", + "cla use", + "' -", + "bo bc", + "occa sions", + "now adays", + "dic tat", + "be ard", + "n ally", + "brigh test", + "kab ul", + "inc india", + "dhan ush", + "archae ological", + "che ape", + "mizz ou", + "d hi", + "ov ski", + "bax ter", + "asse mble", + "à ¢", + "gi gi", + "ac am", + "wis ely", + "haz ard", + "north ampton", + "âľĪ ï¸ı", + "me th", + "bla sting", + "re unite", + "mu lus", + "ali zes", + "t read", + "mil a", + "ed ward", + "ko va", + "pe sto", + "ðŁij ¶", + "vit z", + "hydrau lic", + "refurbi shed", + "mo tel", + "isab ella", + "hom me", + "sever ance", + "uph ol", + "mis erable", + "f ari", + "lat ter", + "ef er", + "crack ers", + "es l", + "ac io", + "yy j", + "in an", + "ec b", + "z ind", + "pan as", + "tru cking", + "re ed", + "sh aker", + "burge ss", + "em pire", + "ag nes", + "n ington", + "art works", + "fr s", + "ti le", + "bi ome", + "eu n", + "ch ong", + "americ ana", + "god father", + "go blin", + "i shi", + "! ).", + "temp ted", + "gen omics", + "mand ate", + "ck y", + "ðŁĴĻ ðŁĴĽ", + "som ali", + "br andy", + "in ven", + "spoke sperson", + "pc b", + "yu an", + "h g", + "fa z", + "starwar s", + "ro wan", + "blue grass", + "don g", + "d day", + "trin idad", + "er ton", + "ban ning", + "re tention", + "cu red", + "tober fest", + "re set", + "we is", + "deta ched", + "behindthe scenes", + "immun ity", + "ph a", + "bra y", + "ðŁij ½", + "ran cho", + "ram say", + "est onia", + "nd tv", + "] .", + "cab aret", + "tar o", + "d v", + "show cases", + "plu m", + "ðŁij ¸", + "son oma", + "pre pa", + "memor ab", + "e stu", + "drive way", + "u les", + "magn us", + "x r", + "nn n", + "much as", + "en ge", + "stre amed", + "fore stry", + "audio book", + "tro y", + "reck less", + "kil om", + "ru ler", + "ra k", + "proce ssion", + "i ons", + "po ole", + "noc tur", + "wh s", + "farm house", + "per a", + "par me", + "hypocri sy", + "s ics", + "v ant", + "cas k", + "holi stic", + "au st", + "Ð ¿", + "in do", + "ðŁij© âĢį", + "di so", + "disp atch", + "ol sen", + "make it", + "en nis", + "cent re", + "ar range", + "ðŁĮ ¼", + "sal ted", + "ea siest", + "f ate", + "reg atta", + "mo zz", + "ac an", + "sin i", + "g ically", + "ch ops", + "chick en", + "work in", + "ha gg", + "invol ve", + "wee ds", + "book day", + "wake up", + "ky r", + "michel in", + "fu ss", + "re juven", + "vac ancies", + "incar cer", + "m st", + "sc ents", + "sovere ign", + "kick er", + "à §", + "bo d", + "âĢĶ >", + "sa h", + "mob il", + "shrop shire", + "oph one", + "dress er", + "mis suni", + "hep burn", + "i mo", + "foli age", + "diagno stic", + "as san", + "cycl ing", + "guil t", + "c sa", + "puertor ico", + "win elover", + "wake field", + "do ggy", + "k he", + "pa pp", + "co g", + "al lot", + "cu ck", + "poe tic", + "mi o", + "re vit", + "mag ician", + "ç ¥", + "ant enna", + "west wood", + "mber g", + "lux e", + "oat meal", + "Ø ¬", + "te at", + "ffe e", + "sear ches", + "l ly", + "plu to", + "el on", + "let tering", + "inno cence", + "fa i", + "ann on", + "telang ana", + "ma it", + "neu ral", + "can ni", + "ar oma", + "a stor", + "fe x", + "co cac", + "mon etary", + "f ent", + "un sure", + "' @", + "indi rec", + "teh ran", + "isol ation", + "li bs", + "make up", + "merce des", + "ff y", + "he tero", + "de o", + "sco m", + "cur sed", + "veteran sday", + "franken stein", + "shre ws", + "de co", + "ge ese", + "lefto ver", + "ha did", + "vari able", + "acade mics", + "carol in", + "under going", + "vari ation", + "na h", + "ssi er", + "gamer sunite", + "pur suing", + "emer ged", + "ll ers", + "control ling", + "ro aring", + "mete or", + "vol t", + "daw gs", + "be aver", + "is life", + "bathro oms", + "aci onal", + "pre vent", + "lake district", + "in als", + "y ani", + "gra bbing", + "sac ks", + "le z", + "sw ay", + "k ool", + "time s", + "klo pp", + "la de", + "con cord", + "resul ted", + "revi ve", + "recon ciliation", + "ol and", + "az z", + "gir o", + "mand arin", + "de en", + "nutriti onal", + "is coming", + "van i", + "aw www", + "der ived", + "love your", + "stop the", + "shou ting", + "nov ak", + "ðŁĻĮ ðŁı¾", + "lo af", + "displa ying", + "sunday with", + "ma guire", + "ch eri", + "ðŁı Ł", + "re match", + "qu ic", + "Ú ©", + "y in", + "ðŁĺ ¹", + "ili ve", + "z ip", + "our ke", + "down loads", + "sw at", + "missi ss", + "care rs", + "t ment", + "proper ty", + "hahahaha haha", + "gi bbs", + "sur rey", + "ar ise", + "tic ism", + "sti a", + "ir ling", + "fro g", + "co se", + "bas sist", + "fore ig", + "lea u", + "pil lows", + "hol la", + "eli e", + "disclo sure", + "peanu ts", + "inte ch", + "ww c", + "plun ge", + "trium ph", + "cor i", + "sli ppers", + "ðŁĻı ðŁĻı", + "neutr ality", + "ma re", + "hair y", + "gang ster", + "hu mming", + "cust ard", + "mer lin", + "ale a", + "s by", + "dam p", + "mo han", + "ver bal", + "j st", + "gu tted", + "b jor", + "un finished", + "ðŁĩ¯ðŁĩ µ", + "un happy", + "âļ« ï¸ı", + "by pass", + "at su", + "fis cher", + "sa v", + "afric ans", + "re use", + "mid way", + "demo lished", + "ger rard", + "her cules", + "Ä Ł", + "medic ines", + "cl icking", + "sur round", + "jo ong", + "wav ing", + "tri bes", + "wet lands", + "offici el", + "argu ing", + "l le", + "do va", + "su zy", + "club house", + "ne gro", + "ob tain", + "ga o", + "gl ance", + "assi st", + "ch os", + "ãĤ ¢", + "âĺ ķ", + "adri d", + "occur s", + "st ans", + "par don", + "livel i", + "emplo yed", + "re visit", + "ff xiv", + "bb le", + "ne aring", + "min er", + "ðŁĺ ¹", + "giov anni", + "up to", + "mar vell", + "mar se", + "to wels", + "cb n", + "engine ered", + "y elling", + "spart an", + "si ans", + "ðŁĻĮ ðŁı¼", + "se v", + "coyo te", + "sta di", + "t cm", + "app en", + "shenan igans", + "open access", + "so aked", + "ma squ", + "le vine", + "stro kes", + "l k", + "aparthe id", + "hipho p", + "char don", + "may may", + "ha asan", + "stri pped", + "fr o", + "scri ption", + "f ton", + "h f", + "pri sons", + "marsh al", + "ķ ãĤ", + "an cho", + "com promise", + "classi fication", + "buzz feed", + "bblo ggers", + "deser ving", + ") /", + "s way", + "ob o", + "camp ers", + "poder nfamily", + "p oured", + "bri e", + "squir rels", + "se ize", + ": #", + "le k", + "ti mb", + "st acy", + "nas daq", + "repe atedly", + "br at", + "mi ghty", + "competit or", + "mah one", + "de si", + "o ke", + "bm w", + "shi e", + "f cb", + "cheape st", + "minim alist", + "par amount", + "n ate", + "har as", + "insan ity", + "lat eral", + "ment ality", + "mo zam", + "ta pped", + "yad av", + "u sp", + "b way", + "the od", + "bil t", + "ra ids", + "em press", + "adap ted", + "pat ron", + "nut shell", + "ag ra", + "be aded", + "sundaywith marsha", + "vi king", + "proce ed", + "main tained", + "thinkbig sundaywithmarsha", + "sn es", + "mus ica", + "to wer", + "ch ab", + "bo k", + "sm t", + "insul t", + "harve sting", + "windo w", + "ru ther", + "be ige", + "dec al", + "indic ate", + "ma iling", + "ri ft", + "po le", + "ander son", + "ch oral", + "sp ride", + "l ili", + "ev elyn", + "imrankhan pti", + ".... \"", + "ke red", + "un dp", + "water falls", + "se ars", + "le mans", + "world series", + "ri el", + "ani e", + "app ar", + "score rs", + "lam p", + "a than", + "phys icians", + "qu inoa", + "refu sing", + "vu itton", + "unle ash", + "s la", + "pat i", + "shou ts", + "inten tions", + "fo amed", + "europe an", + "neighbor hoods", + "me er", + "man son", + "du h", + "br at", + "con es", + "bow l", + "kazakh stan", + "ठ¿", + "in appropriate", + "del hi", + "ketch up", + "ful ton", + "s ys", + "consul t", + "gar field", + "to go", + "f ml", + "f led", + "b ds", + "facilit ate", + "ree bok", + "selfi e", + "elev ate", + "activ ate", + "bi ble", + "ca wx", + "b ys", + "cam ille", + "sy ou", + "sk ool", + "her t", + "w bc", + "ple dges", + "recor der", + "po sh", + "ac re", + "so aking", + "mat il", + "v sco", + "shoot ings", + "pla r", + "e con", + "ðŁĻĮ ðŁı»", + "rashi d", + "u bi", + "ðŁ¤ ¤", + "sw inging", + "wi pe", + "rap tor", + "m su", + "music video", + "dur ham", + "at tic", + "apar ty", + "fe tus", + "activ ation", + "aa z", + "motiv ate", + "ðŁĴķ ðŁĴķðŁĴķ", + "j al", + "ठ®", + "ag on", + "sche er", + "stal ker", + "fo ster", + "az zo", + "tele gram", + "vi gor", + "s laugh", + "screen shots", + "entrepre neu", + "kri stin", + "inten tion", + "ch illi", + "fr action", + "don a", + "ge a", + "tc u", + "s ite", + "la k", + "em il", + "d nt", + "bor o", + "wil kinson", + "re cu", + "ato day", + "t anya", + "bl anco", + "cd n", + "brilli antly", + "g cc", + "ac c", + "evacu ated", + "ther ine", + "den ny", + "cait lin", + "she pard", + "pou ch", + "hand held", + "sou theastern", + "ha a", + "à ´", + "re solutions", + "led ger", + "sr in", + "r ar", + "shat tered", + "chim ney", + "im with", + "mete or", + "hand led", + "ra ke", + "town send", + "en han", + "shi py", + "duc t", + "tw x", + "inflam matory", + "war hammer", + "theat rical", + "gro s", + "sk ar", + "sco tty", + "ni el", + "tit o", + "tin i", + "conne ction", + "_ .", + "goldeng lobes", + "sha q", + "ðŁı ³ï¸ı", + "hall way", + "fron ts", + "effec tiveness", + "gla ston", + "d hs", + "ex pi", + "to h", + "c pl", + "sc s", + "re o", + "ha g", + "resemb lance", + "hor an", + "abu sive", + "qu er", + "virtu e", + "cho lester", + "a q", + "shan e", + "m ce", + "carri ers", + "di stress", + "re wind", + " ¡", + "voo doo", + "int act", + "ann o", + "ðŁĺ ¤", + "pi led", + "adi a", + "ãĥ ³", + "en ow", + "di gs", + "light ly", + "goo fy", + "turb ine", + "governor s", + "con te", + "re open", + "pa h", + "i ve", + "cra fting", + "swee ps", + "jo di", + "an de", + "zu cker", + "kaw aii", + "o ko", + "v ai", + "out line", + "kri sti", + "ts n", + "insp o", + "qu int", + "fil thy", + "lyn ne", + "listen ers", + "depar ting", + "or d", + "t weed", + ", &", + "ale k", + "sel fish", + "nor ther", + "recogni zes", + "i ps", + "be s", + "a ed", + "w ills", + "pe at", + "surround ings", + "mon uments", + "ais le", + "be cker", + "la v", + "quant ity", + "v ah", + "helicop ters", + "tu cked", + "alv arez", + "sha pe", + "o bey", + "ad diti", + "road side", + "m ite", + "bl ers", + "ep age", + "j au", + "ignor ant", + "b ins", + "lu lu", + "x o", + "c fo", + "ee eee", + "apprentice ship", + "shef fiel", + "to i", + "ho k", + "faken ews", + "deplo y", + "aid an", + "husk ers", + "ãĢ İ", + "west brook", + "mi ster", + "confi gur", + "car r", + "fic a", + "proceed ings", + "ha w", + "ste ak", + "mur derer", + "pay day", + "a jo", + "p vc", + "don ates", + "bi af", + "nom nom", + "be it", + "k ali", + "x rp", + "ahmed abad", + "se mic", + "che y", + "x tra", + "an twer", + "head lining", + "squ ares", + "roun ded", + "flu ore", + "bol d", + "disa sters", + "am oo", + "gener ic", + "cran es", + "brief ly", + "gi g", + "auster ity", + "anticip ation", + "for ti", + "treas urer", + "cann y", + "ce cil", + "dete cted", + "check list", + "ภ§", + "pam ela", + "bar bados", + "an field", + "hear ty", + "tx lege", + "peren ni", + "arro g", + "ing ram", + "âĹ ı", + "ty ne", + "spo on", + "r ation", + "am ba", + "m be", + "cam el", + "h hs", + "york shire", + "reflec tive", + "fre aks", + "to k", + "ju do", + "partic les", + "du bs", + "ban jo", + "accred itation", + "prover bs", + "over dose", + "inte gral", + "gu ang", + "mc s", + "super car", + "af b", + "al vin", + "ail s", + "x tre", + "st aging", + "tw ent", + "rabb its", + "mar o", + "inste m", + "dol l", + "cr ay", + "sant ana", + "ble ach", + "mini ons", + "che ap", + "man t", + "di vers", + "catal onia", + "lo is", + "mat ri", + "cou gar", + "kay ak", + "e gre", + "p so", + "a ia", + "å ®", + "char lton", + "tr acked", + "sc ari", + "pe tt", + "f wd", + "x in", + "gra vel", + "br ic", + "bigg boss", + "ar den", + "hu gging", + "pal ms", + "st v", + "li mb", + "the movie", + "handic ap", + "ri me", + "z ai", + "stu b", + "indi a", + "lithu ania", + "rhy th", + "p ita", + "maced onia", + "high ered", + "brid get", + "schwar z", + "ske let", + "hi kes", + "ant arctic", + "c ps", + "mash up", + "Ð °", + "n ell", + "chand ra", + "he ir", + "an us", + "sher idan", + "mi mi", + "muse u", + "bec ca", + "an ir", + "bar rie", + "dioce se", + "compar able", + "ðŁı³ï¸ı âĢį", + "yuk on", + "me p", + "hor mon", + "mer ic", + "al f", + "con quered", + "christ church", + "ðŁĴĻ ðŁĴĻ", + "hazard ous", + "poo h", + "cont ing", + "retro spective", + "par ame", + "na ir", + "con sor", + "ho tra", + "astoni shing", + "cater pillar", + "u man", + "ti sm", + "t vs", + "serv ic", + "croy don", + "mor ales", + "c g", + "cu m", + "te ur", + "scan ada", + "s all", + "magno lia", + "el ise", + "th our", + "à® ¿", + "ag omez", + "phel ps", + "ë°©íĥĦìĨĮëħĦëĭ ¨", + "wh os", + "weav ing", + "si sd", + "pro poses", + "cro ws", + "pre sale", + "econom ies", + "bernar do", + "sha hid", + "air show", + "mc cann", + "hor ticul", + "nr l", + "du el", + "mongo lia", + "tou lou", + "requi rement", + "struc tured", + "ed i", + "o lives", + "he a", + "cu ter", + "Ð º", + "enthusi ast", + "harri et", + "domin ion", + "sub mer", + "ðŁį ĥ", + "sa ab", + "nes burg", + "mo ff", + "def ended", + "bur t", + "rewar ded", + "gold man", + "op tics", + "khali d", + "house holds", + "buc kets", + "ce cil", + "che ss", + "substan tial", + "ef l", + "oper ation", + "evalu ate", + "st n", + "rece ssion", + "l ll", + "tom as", + "tru ths", + "ak bar", + "s words", + "p act", + "embarra ss", + "ha o", + "ay urve", + "scrip ture", + "ny cc", + "op t", + "di ameter", + "sc ented", + "organi zers", + "re lat", + "ha e", + "dream ers", + "de se", + "ðŁĮ »", + "restric ted", + "n ale", + "r hp", + "dol an", + "mun ster", + "ha ired", + "consult ants", + "jo ints", + "hu mil", + "d ill", + "relent less", + "t é", + "af il", + "ut ilities", + "japan ese", + "condem n", + "pet ite", + "colli de", + "q f", + "peach es", + "cou rier", + "l ore", + "âĺİ ï¸ı", + "reli ability", + "ch uk", + "ðŁĻ ĥ", + "stu res", + "ge ther", + "ho stel", + "bi er", + "- _-", + "â ĩ", + "e ze", + "ta ilo", + "di ent", + "blu ff", + "chu ffed", + "pil ip", + "mon arch", + "e em", + "bu chan", + "b ick", + "op au", + "ku ps", + "ภ¢", + "pist ons", + "sp ins", + "m and", + "ce st", + "bur ne", + "v ile", + "cher ries", + "bec kett", + "need les", + "pan ch", + "ë Ĥ", + "haha h", + "trou bles", + "insi sts", + "do you", + "g mc", + "mor tar", + "deleg ate", + "in n", + "g anda", + "sin atra", + "ठ¤", + "spee ding", + "pu pil", + "pre mises", + "ali gnment", + "pi kach", + "as us", + "j alan", + "Ø µ", + "lime stone", + "fol kl", + "parme san", + "ce il", + "mo y", + "shawn mendes", + "ac up", + "hu st", + "ot es", + "med ina", + "ma di", + "gta v", + "censor ship", + "ar g", + "swe eney", + "sy kes", + "col o", + "foot steps", + "cann ed", + "adv ance", + "gta online", + "healthy living", + "ðŁį ¾", + "a ig", + "p ality", + "oc s", + "he brew", + "im minent", + "berk shire", + "jeremi ah", + "out going", + "bak er", + "entr ata", + "ma ids", + "gro ves", + "bo c", + "a del", + "m fw", + "con science", + "arm ys", + "nut ella", + "conte stalert", + "novel ist", + "la h", + "ban ker", + "marque z", + "ðŁı ¡", + "to ff", + "out age", + "gr p", + "ðŁĺŃðŁĺŃ ðŁĺŃðŁĺŃ", + "musc le", + "du dley", + "nvi dia", + "mi di", + "m uni", + "ess ays", + "dat ac", + "car ter", + "ภ£", + "t ans", + "i ves", + "public ations", + "al er", + "ok wx", + "il u", + "cu tt", + "har p", + "out law", + "luther an", + "br ill", + "bo lic", + "do well", + "green land", + "be sties", + "path i", + "pay ton", + "gue st", + "har den", + "ðŁ¤ ©", + "ann ed", + "evacu ation", + "po ised", + "mc der", + "b han", + "o i", + "envel ope", + "ci d", + "ca vi", + "ta pas", + "book review", + "grey hound", + "âĻ ª", + "fe ud", + "lun gs", + "for te", + "rai der", + "ff er", + "oni x", + "dep end", + "yn wa", + "rel ating", + "de vs", + "ðŁĴ IJ", + "acqui res", + "d ha", + "j yo", + "priv ati", + "can ine", + "k b", + "cra b", + "sar din", + "imag ining", + "k j", + "em por", + "down hill", + "ne z", + "ta eyeon", + "nick imin", + "gb p", + "à µ", + "w ap", + "sec co", + "ma shed", + "ðŁĴ¥ ðŁĴ¥", + "augu stine", + "diss ol", + "dic tator", + "â ĵ", + "vi per", + "ed fringe", + "vau x", + "hard work", + "book let", + "no x", + "chi ff", + "ðŁĴ ¨", + "observ ations", + "xbox one", + "u sher", + "ke er", + "lu p", + "dal las", + "cal gary", + "ma dra", + "di ous", + "k bs", + "wood ward", + "hero ine", + "lu mber", + "sea world", + "o ws", + "mc ke", + "maver ick", + "gu la", + "cross roads", + "fan g", + "s ade", + "nik ol", + "chee tah", + "me c", + "pp g", + "er ick", + "ðŁİ µ", + "tox ic", + "bj j", + "viol a", + "sp ire", + "ch ino", + "tra vis", + "institu tional", + "ha as", + "low ry", + "w ac", + "ea e", + "hu mid", + "mp ton", + "ru ck", + "je w", + "c ine", + "zim mer", + "se f", + "bhar at", + "fre es", + "aam ir", + "ðŁĴ ħ", + "z inc", + "wan e", + "multi player", + "royal wedding", + "e el", + "preci pit", + "qu ery", + "kimber ly", + "isa bel", + "ful fill", + "ig an", + "vau l", + "pan e", + "sc y", + "dig it", + "gun n", + "u tah", + "dog day", + "fi on", + "xia omi", + "da c", + "el ast", + "cha vez", + "ro blo", + "g ine", + "ten th", + "ab h", + "ke to", + "hur dle", + "na dia", + "memorab ilia", + "ha bs", + "qu an", + "h w", + "hv ac", + "pix ar", + "ec cle", + "kram er", + "accu ses", + "ðŁĴļ ðŁĴļ", + "per se", + "mean time", + "wa hl", + "atle tico", + "âĢ¢âĢ¢ âĢ¢âĢ¢", + "ott oman", + "no vo", + "k us", + "conne cted", + "tru sts", + "d mv", + "spen cer", + "rahu lg", + "do ve", + "sto kes", + "bolog na", + "enthusi asts", + "à ª", + "rockstar games", + "ted cruz", + "du ras", + "s acked", + "late x", + "immer sive", + "cer t", + "lu cin", + "princi pals", + "fa res", + "sa ils", + "far n", + "am ent", + "saf fron", + "quent in", + "check point", + "fer ris", + "ex cur", + "ðŁijī ðŁı¼", + "bai ley", + "se h", + "ter re", + "mad am", + "s band", + "wan derers", + "cumber batch", + "yy c", + "digit ally", + "blackandwhite photography", + "roll in", + "moroc can", + "ðŁĮ ħ", + "din ner", + "d well", + "to om", + "m ye", + "ez ra", + "cp fc", + "war hol", + "me er", + "jon ah", + "no aa", + "s gate", + "so on", + "secu lar", + "g ating", + "ti o", + "dri ver", + "si ssy", + "assan ge", + "ta th", + "ed mund", + "bobc ats", + "ra ji", + "po stage", + "stu ds", + "m gm", + "kat o", + "edin burgh", + "meet the", + "shir t", + "fa a", + "mens fashion", + "sp reads", + "wi m", + "car ts", + "phoe be", + "j ars", + "bot swana", + "Ù Ĥ", + "ed war", + "sk ar", + "ri ve", + "gu sty", + "c tv", + "ferdin and", + "su therland", + "nickimin aj", + "k v", + "si us", + "bee ch", + "re z", + "desi res", + "on ial", + "camp o", + "quar ry", + "lor raine", + "gil more", + "ig gy", + "µ ï¸ı", + "ho pping", + "avi z", + "ðŁĮ º", + "uni sex", + "dedic ate", + "att itudes", + "ste er", + "jun kie", + "rail way", + "y b", + "whi sper", + "key an", + "k us", + "ju g", + "di x", + "a ins", + "sum mon", + "ov ich", + "sy ed", + "her ald", + "ma ison", + "me ded", + "wild flower", + "main land", + "ri sky", + "ru kh", + "over looked", + "ki c", + "destro ys", + "nam an", + "ki p", + "z ano", + "champion sleague", + "ban dit", + "quin cy", + "smi le", + "cal vin", + "open ings", + "ta pp", + "ol ulu", + "spec tro", + "accred ited", + "ap k", + "pra ised", + "bar nett", + "pol len", + "premi ered", + "selen agomez", + "tou red", + "screen ings", + "uu u", + "mis o", + "en se", + "adam lambert", + "guel ph", + "har yana", + "hu tto", + "le ar", + "l tc", + "po ached", + "brex it", + "æ Ŀ", + "tt c", + "pa vement", + "mon gers", + "ro e", + "ad ers", + "ling ton", + "particip ant", + "ca red", + "ga il", + "y ates", + "lan tic", + "dash board", + "jo o", + "feli pe", + "ssi onist", + "bu m", + "s end", + "a eri", + "thu gs", + "luci fer", + "a he", + "dete ctor", + "fil ly", + "gas oline", + "ham per", + "hump day", + "the ta", + "the band", + "fore casts", + "o hhh", + "lo bb", + "hol l", + "cp u", + "az u", + "ad ar", + "hai ley", + "bu b", + "car t", + "quo ted", + "an archy", + "pan cre", + "twit art", + "al den", + "st ash", + "the less", + "or ni", + "belie bers", + "mor mon", + "partic le", + "avi ation", + "⬠Ĩ", + "webcam toy", + "sad dened", + "cru is", + "ham let", + "n ct", + "roll ins", + "marque e", + "saw yer", + "reli ance", + "a ura", + "di ec", + "soo thing", + "sig nings", + "ak is", + "à ³", + "at kins", + "aer op", + "ðŁĮ ¿", + "y ab", + "sh ari", + "con nol", + "du bbed", + "manufac ture", + "convin cing", + "feelthe bern", + "ra u", + "pu lit", + "on ec", + "gem stone", + "ur ging", + "bag u", + "ga h", + "aci ds", + "fi anc", + "zodi ac", + "sn oop", + "her rera", + "initi ated", + "ven ge", + "profess ors", + "pro di", + "stron ger", + "e mission", + "bb a", + "hal le", + "ta pp", + "haw an", + "wh im", + "compe ted", + "myr tle", + "ir port", + "cold play", + "ach e", + "ske p", + "m son", + "ss ic", + "calli graphy", + "swim mers", + "me y", + "pp c", + "thri ft", + "po c", + "re places", + "commu ter", + "âģ¦ âģ¦@", + "go ers", + "lo gue", + "para dig", + "bas kets", + "sensiti vity", + "joh an", + "atl antis", + "& &", + "suit case", + "anxi ous", + "l h", + "str i", + "gal loway", + "stre ad", + "war den", + "gr ounded", + "ffici ency", + "li feat", + "reli c", + "disgu ise", + "island ers", + "f cofficial", + "classical music", + "b mc", + "en field", + "bi que", + "oak ley", + "bat man", + "sla ying", + "ner ves", + "mul tit", + "calci um", + "projec tor", + "scott sdale", + "ant ino", + "gri ps", + "kim mel", + "des mond", + "prote stors", + "hi atus", + "metaboli sm", + "conclu ded", + "press er", + "ti pping", + "sli de", + "e to", + "hun ting", + "aus open", + "ri k", + "pp ery", + "innov ators", + "pitch ers", + "ag ger", + "fun gi", + "z ad", + "proli fic", + "rockn roll", + "bl ames", + "ct ar", + "stam ford", + "q ad", + "mozz arella", + "insan ely", + "den ver", + "ph ouse", + "nom ad", + "ï ¿", + "s ris", + "pro du", + "hen ley", + "pag an", + "am trak", + "ru bi", + "in cl", + "tu tor", + "sco tia", + "wo es", + "sing apo", + "fun nel", + "turn bull", + "know ledge", + "gri mm", + "real madrid", + "we are", + "missi les", + "con sol", + "emo jis", + "sne ak", + "smi ths", + "ru iz", + "br ou", + "i el", + "ha ver", + "ðŁĮ ļ", + "kin gof", + "basil ica", + "circul ation", + "prin ters", + "ta pping", + "ri dley", + "dra gged", + "ha j", + "writ er", + "fundament als", + "personal ities", + "me tre", + "stereo types", + "bur le", + "best of", + "n ffc", + "ha th", + "mini stries", + "a ali", + "trac ing", + "pav ed", + "ł ï¸ı", + "g ic", + "insp ire", + "tu g", + "ha re", + "repe ated", + "ex pon", + "lol li", + "rho de", + "pre cin", + "install ations", + "instag ram", + "az ar", + "i es", + "sole ly", + "du kes", + "mission ary", + "van guard", + "fursuit friday", + "on d", + "pol ari", + "ma st", + "har an", + "jos é", + "jack ed", + "ec oun", + "al ities", + "ne ph", + "ra vel", + "moder ated", + "sco w", + "s fb", + "uru guay", + "as o", + "ni g", + "au du", + "p ints", + "lat ina", + "ben z", + "m itting", + "char ted", + "mat ology", + "cit ro", + "biop ic", + "ðŁij Ń", + "djo kovic", + "fox y", + "agu il", + "so to", + "an ada", + "sin king", + "sc rap", + "hair s", + "bethan y", + "fact friday", + "ðŁIJ IJ", + "unlea shed", + ") (", + "contra dic", + "ram on", + "coast line", + "y ong", + "sn sd", + "li gan", + "p ome", + "mit age", + "ge tt", + "wat i", + "ri sk", + "so aring", + "bru sh", + "f pl", + "av an", + "å Ĩ", + "lar son", + "sh ear", + "mul til", + "blu r", + "multi media", + "chun ky", + "par i", + "n ani", + "weir d", + "cholester ol", + "char les", + "dream ed", + "tan ning", + "puzz les", + "fr am", + "hand ball", + "ch ag", + "beli ze", + "al u", + "bang s", + "Ñ Ħ", + "detec tives", + "mc g", + "ish q", + "bo thered", + "saf c", + "mp ing", + "ten eri", + "g ays", + "sail or", + "an gi", + "mul ticul", + "gue ssed", + "ros é", + "high ways", + "bro om", + "chatt anoo", + "- '", + "see ker", + "on ed", + "at f", + "lu c", + "> <", + "bar i", + "per cep", + "jewel ry", + "as ph", + "sor row", + "sl ing", + "mam moth", + "jac kie", + "ë §", + "wilt shire", + "sa o", + "can cell", + "im paired", + "tor ial", + "bre ed", + "guy en", + "jud ice", + "tit le", + "pro spective", + "applic ants", + "ðŁį Ĭ", + "epis cop", + "e id", + "b yo", + "stock ings", + "ðŁĴĥ ðŁĴĥ", + "ll p", + "sna g", + "keep it", + "l ough", + "ol son", + "matur ity", + "!! !\"", + "cop ter", + "i sha", + "bl i", + "wil mington", + "tr youts", + "th ai", + "ðŁ¥ ³", + "pe bble", + "kra ft", + "f p", + " º", + "ssi vely", + "li vin", + "contest ants", + "tex tures", + "jo an", + "h dr", + "film festival", + "prov ence", + "wi do", + "op end", + "c si", + "sto wn", + "cro ati", + "ad just", + "host ile", + "analy sts", + "il an", + "cu ppa", + "bru m", + "newfound land", + "good win", + "me tt", + "mall orca", + "plu gs", + "bu k", + "bb hutto", + "wrest le", + "sa ire", + "sho pped", + "for za", + "le head", + "vi vo", + "ba st", + "ro xy", + "reg is", + "hard working", + "hon olulu", + "desp air", + "young sters", + "ni g", + "impro mp", + "roll tide", + "de emed", + "tre ason", + "ru shed", + "for ged", + "ff f", + "pikach u", + "bri ggs", + "do it", + "ac cent", + "la us", + "gla ze", + "compet ent", + "a ho", + "photo g", + "mid field", + "le go", + "har vard", + "min orities", + "re illy", + "slic ed", + "once upon", + "initi ally", + "financi ally", + "landscape photography", + "har dro", + "qu o", + "mm ers", + "par kinson", + "smu gg", + "read iness", + "bru tally", + "glou cester", + "mp ed", + "bbhutto zardari", + "mur der", + "ye d", + "dat aviz", + "sr t", + "dow ning", + "bi ans", + "m ü", + "fle ck", + "fli pped", + "s ly", + "brilli ance", + "ri m", + "k um", + "bubb a", + "ko i", + "knit ted", + "sor g", + "ma is", + "ðŁĮ ²", + "ti ss", + "su stain", + "sen su", + "ak han", + "zi est", + "exam ines", + "chardon nay", + "user name", + "short list", + "re bs", + "on o", + "dar ing", + "hard wood", + "che que", + "righte ous", + "light ening", + "dir k", + "shra dd", + "du ra", + "down stairs", + "sh al", + "ami gos", + "ru ff", + "s law", + "ri es", + "red nation", + "man us", + "ðŁĩ§ ðŁĩ·", + "distin ction", + "u bun", + "dur an", + "mi gra", + "thi ans", + "la ver", + "domest ic", + "k x", + "jaz zy", + "justi fy", + "belong ing", + "insul ation", + "color stv", + "drun ken", + "chann eling", + "qu and", + "xi ii", + "enligh ten", + "kan o", + "fati ma", + "teen choice", + "terri fied", + "p ba", + "as ley", + "met museum", + "dun e", + "pack er", + "ki o", + "ðŁĴľ ðŁĴľ", + "bo iler", + "fas cism", + "ar mored", + "back grounds", + "in mates", + "embarra ssed", + "defin es", + "th d", + "we go", + "silic one", + "lo on", + "el ding", + "bor rowed", + "he mp", + "ak sh", + "kaw asaki", + "br y", + "de af", + "kill er", + "dispo sal", + "ðŁĩ °", + "glaston bury", + "un covered", + "o xide", + "po ff", + "d ant", + "k j", + "ku ro", + "dri zzle", + "peop les", + "fe e", + "pro pri", + "dd lovato", + "pi ggy", + "ot is", + "aller gies", + "u bis", + "pengu in", + "ser a", + "vi z", + "prosp erous", + "ici des", + "tornad oes", + "sene gal", + "web cast", + "sto red", + "enchan ted", + "bb cone", + "bay area", + "entrepreneu rial", + "rednation rising", + "experim enting", + "ang an", + "lot to", + "they re", + "por e", + "er p", + "seren e", + "east wood", + "bro kers", + "bar ge", + "stal lion", + "timber lake", + "tailo red", + "dy stop", + "b ate", + "lat ors", + "di xit", + "bran son", + "dynam o", + "ky lie", + "shame ful", + "bt wn", + "spring time", + "mix ture", + "s ounded", + "lu ton", + "dad es", + "mal a", + "op ra", + "en ic", + "rahulg andhi", + "se wer", + "~~ ~~", + "ky u", + "nor theastern", + "ca er", + "bc u", + "nir vana", + "kitch ens", + "ous y", + "al m", + "river dale", + "hid den", + "fl int", + "sp d", + "pat rons", + "katy perry", + "au gh", + "exhib itions", + "sm c", + "shu ts", + "at ore", + "da in", + "some thing", + "ber th", + "bo g", + "por ter", + "gen to", + "con cussion", + "ang lic", + "ro we", + "gr illing", + "scar lett", + "master ing", + "mor nin", + "comm ented", + "si me", + "si zing", + "christ y", + "ce os", + "st m", + "at ry", + "tari ffs", + "vac ation", + "pre judice", + "p su", + "paren tal", + "far age", + "can a", + "cap com", + "koso vo", + "you re", + "men stru", + "stal in", + "grape fruit", + "br an", + "che sa", + "dav en", + "exc el", + "!! )", + "๠Į", + "distribu tor", + "ce a", + "bride sma", + "millenni al", + "wa in", + "ob serving", + "mis ery", + "plan etary", + "expo sing", + "bra ised", + "comp ton", + "don gha", + "q l", + "spring steen", + "th ul", + "syl ve", + "cab o", + "pal ad", + "niel sen", + "gaz ing", + "ba ja", + "r oud", + "orchi ds", + "johan nesburg", + "se man", + "d ji", + "oper ative", + "affe ction", + "eclec tic", + "at c", + "mut ant", + "aw x", + "nic e", + "mel bourne", + "indu lg", + "tu lip", + "dias pora", + "wel p", + "big gie", + "mississ auga", + "retri ever", + "or an", + "tam my", + "c ta", + "hipp o", + "seas oned", + "ger mans", + "eng v", + "marvell ous", + "im f", + "rela ys", + "mon tan", + "maur iti", + "me ister", + "as surance", + "reig ning", + "su fficient", + "han e", + "no thing", + "pos se", + "nav y", + "in love", + "brigh ton", + "en qu", + "ch ung", + "sweat y", + "es c", + "cal ed", + "man s", + "nicar agua", + "sl ices", + "mo cha", + "washington post", + "bb n", + "dam ned", + "grow ing", + "en burg", + "lo an", + "me s", + "wh oops", + "believ ers", + "spi el", + "vo daf", + "l at", + "s led", + "cricke ter", + "brown e", + "golf ers", + "bar ra", + "wat chers", + "lu igi", + "sw amy", + "mom s", + "pit ched", + "san tor", + "cr s", + "si re", + "sc amp", + "bo de", + "ste war", + "jon ny", + "ent ity", + "pac qui", + "mind ful", + "min india", + "bear ded", + "temp t", + "scorpi on", + "eat on", + "authori zed", + "ar to", + "s vp", + "op athy", + "cch ini", + "house music", + "disney world", + "âĢĶ @", + "pro pose", + "di y", + "expen se", + "ten g", + "pupp ets", + "sm el", + "d aca", + "per ry", + "fin n", + "boo sting", + "lefto vers", + "cou gs", + "satell ites", + "man y", + "az e", + "g ong", + "fi e", + "metho do", + "fer ries", + "ðŁ¤Ķ ð٤Ķ", + "explore rs", + "load er", + "attrac ted", + "il ton", + "godd amn", + "pi azza", + "doc tr", + "sav ing", + "paragra ph", + "visu alization", + "may ors", + "work flow", + "ack les", + "ðŁĺĤðŁĺĤðŁĺĤðŁĺĤ ðŁĺĤðŁĺĤðŁĺĤðŁĺĤ", + "ठ¸", + "twer k", + "clu t", + "lo ver", + "te ases", + "si an", + "o te", + "deter ior", + "accor d", + "l fw", + "swar ovski", + "nat al", + "tra ps", + "k ina", + "analy ze", + "laye red", + "bever ages", + "un it", + "ran som", + "pe shaw", + "dest ined", + "astro logy", + "si pping", + "miley cyrus", + "cam ino", + "marshmal low", + "bli ss", + "out back", + "fa q", + "int oler", + "humil ity", + "po ppin", + "hallo ween", + "mon tene", + "op hy", + "nu n", + "tattoo ed", + "a as", + "ðŁĮ ³", + "dale y", + "qual ity", + "du sa", + "fisher men", + "swi f", + "ter rac", + "st au", + "le in", + "trol ling", + "ship ment", + "garden er", + "march madness", + "head band", + "gr t", + "bur nett", + "w and", + "!!!! !!!!!", + "gh e", + "du x", + "hu d", + "war ner", + "ðŁĩ ¦", + "ex ile", + "rescu e", + "rat a", + "d han", + "duc ati", + "dro wn", + "bl ends", + "spi e", + "alli gator", + "simul taneously", + "broo ke", + "u ke", + "k har", + "comm union", + "ri ka", + "ford fc", + "chin atown", + "you rown", + "me y", + "can al", + "syste matic", + "de pri", + "ox ford", + "an il", + "w ut", + "equ ation", + "be z", + "fle ur", + "the good", + "lang ley", + "ad ity", + "ed ith", + "al fie", + "о ÑĤ", + "en cry", + "br ill", + "ex emp", + "ce sar", + "mb ling", + "ab ri", + "sc icom", + "j ing", + "school ing", + "mi ka", + "mechan isms", + "impromp tu", + "rhe a", + "moo re", + "crime a", + "be sto", + "wri ght", + "el ders", + "ro ds", + "kam al", + "folkl ore", + "be et", + "mini on", + "reli eve", + "thr o", + "team usa", + "pas cal", + "made with", + "boli via", + "itt i", + "free bies", + "desi red", + "best selling", + "l iness", + "la den", + "ke ane", + "mi sts", + "hipp ie", + "atta chment", + "@ /", + "se w", + "flan agan", + "âĿĹ ï¸ı", + "supre mac", + "stl cards", + "si as", + "q u", + "rh ys", + "ste ep", + "val leys", + "v w", + "pav ing", + "disp at", + "al ison", + "por te", + "id u", + "new sc", + "soc ket", + "mo s", + "co star", + "re vo", + "prote ins", + "stanley cup", + "m cal", + "ear ring", + "se cs", + "mc lean", + "cap ric", + "nick elo", + "ad en", + "v c", + "shou se", + "adap tive", + "maxi mize", + "entertain er", + "pro se", + "gri ffi", + "six teen", + "lam ar", + "mi rage", + "saudi arabia", + "awe ather", + "ru st", + "in filtr", + "fashion week", + "ðŁĺĬðŁĺĬ ðŁĺĬ", + "selec tive", + "bubb le", + "a den", + "fen nel", + "deci sive", + "m ta", + "mock ing", + "mb les", + "st amp", + "mu le", + "bernar do", + "gr in", + "po tt", + "j ingle", + "vet tel", + "colom bian", + "cam o", + "motivation monday", + "ba han", + "p ly", + "dh ary", + "k ami", + "x men", + "sleep er", + "gar a", + "my sti", + "confi dential", + "conflic ts", + "p neu", + "ce s", + "insur tech", + "clean se", + "me rely", + "va is", + "tu x", + "the great", + "shar on", + "ma j", + "hol a", + "eco systems", + "aj ay", + "aa j", + "hu sh", + "har mon", + "backto school", + "wiki leaks", + "reflec ted", + "ðŁĺ ĵ", + "commemor ating", + "ac et", + "buck ingham", + "messi ah", + "tu ous", + "hor net", + "to be", + "d q", + "he ine", + "mi g", + "pl ate", + "nichol son", + "sp ie", + "cumber land", + "nor mal", + "pho bia", + "happy halloween", + "city fc", + "mc el", + "gilli an", + "ke to", + "lu de", + "de mise", + "su ga", + "str ate", + "mcgr ath", + "visit scotland", + "foo led", + "cb r", + "gc se", + "col ori", + "po td", + "missuni verse", + "fin ances", + "ma poli", + "for ks", + "Ø ´", + "cann on", + "medic inal", + "ðŁĹ ĵ", + "kh o", + "wre ck", + "pan to", + "bag el", + "gu ll", + "syndic ate", + "ic y", + "pr c", + "ki en", + "zi ka", + "ti sh", + "pe ta", + "c co", + "li za", + "ch ut", + "ex traction", + "el g", + "gl i", + "fu eled", + "pos it", + "respec tively", + "leice ster", + "br ink", + "vulner ability", + "im ported", + "e sha", + "ðŁ¦ ħ", + "r ural", + "re ll", + "gam ing", + "atlan tic", + "aband on", + "no ah", + "re solved", + "pro state", + "aller gic", + "ps d", + "âĺ ¹", + "dun geon", + "fang irl", + "illumin ated", + "m hs", + "white sox", + "d ently", + "ck o", + "endor se", + "over ly", + "dazz ling", + "prior iti", + "night life", + "ut il", + "be have", + "flam en", + "east bound", + "ðŁĴ Ł", + "ilove you", + "gov uk", + "mozam bique", + "alle gi", + "dr i", + "testim onial", + "ath s", + "ì§ Ģ", + "mm y", + "shab by", + "pro secco", + "friend ships", + "cal am", + "dam ages", + "off set", + "jura ssic", + "jun o", + "arre ll", + "ðŁĴ ©", + "interven tions", + "dare devil", + "car ver", + "run away", + "ran e", + "truste es", + "ha ute", + "dep ths", + "ðŁİ Ń", + "me in", + "sacrific es", + "con cier", + "ne sting", + "i zzy", + "me tam", + "ilove my", + "ur ine", + "du lu", + "mal hotra", + "ve ins", + "night ly", + "co at", + "an di", + "he witt", + "lon el", + "ci ble", + "wr ite", + "jen nie", + "sant ac", + "ĸ ï¸ı", + "str ato", + "singapo re", + "sop rano", + "kri sten", + "cheer ful", + "flee twood", + "fa iri", + "m eli", + "wa st", + "tur nt", + "sfor sale", + "sc rolling", + "angel ina", + "ren dition", + "jeric ho", + "nick y", + "or b", + "fla vo", + "patri ot", + "ash eville", + "sick ness", + "re fund", + "aggre ssion", + "b pl", + "ãĥ ĥ", + "elu sive", + "thi story", + "hang er", + "bu ffs", + "vil las", + "at kinson", + "sp h", + "ja it", + "decl ined", + "wo k", + "supre macy", + "oo tball", + "ey ang", + "ðŁİ ĵ", + "s ford", + "ath i", + "consu me", + "road ster", + "e so", + "u pro", + "reci pe", + "au f", + "uc i", + "ar on", + "oo oh", + "cs go", + "re ich", + "mc d", + "min ute", + "ladi es", + "pun k", + "rut gers", + "mee k", + "ariz on", + "ta j", + "land lord", + "de gra", + "autu mn", + "lyn x", + "us f", + "b hi", + "fairy tale", + "dongha e", + "bet sy", + "explo ded", + "chen nai", + "op a", + "pro tag", + "br ant", + "ðŁĵ °:", + "g f", + "pal li", + "ðŁı¼ âĢįâĻĢï¸ı", + "su t", + "ill ini", + "colum nist", + "shir tless", + "de centr", + "sear ched", + "ec or", + "bu ggy", + "s ack", + "ðŁĺĤ ðŁĺŃ", + "de t", + "ther i", + "or naments", + "bring back", + "to v", + "quarter finals", + "ic he", + "con stra", + "gi er", + "buchan an", + "vi x", + "kay aking", + "mu stread", + "swal low", + "mel b", + "sc af", + "op al", + "may oral", + "har at", + "ðŁ¦ ĭ", + "schedu les", + "id f", + "ha gue", + "ro z", + "a ah", + "d mc", + "du plic", + "ca che", + "orph an", + "frac ture", + "rec on", + "ch av", + "bun nies", + "al ain", + "mustaf a", + "ðŁİ Ļ", + "vac ations", + "dynam ite", + "tex ted", + "broad caster", + "ðŁĴ £", + "ste amed", + "rock er", + "di etary", + "luxury travel", + "inaugur ated", + "sa wards", + "vaugh n", + "lincoln shire", + "click ed", + "kra ja", + "f anc", + "remo ves", + "layo ffs", + "mc far", + "bre eds", + "win nie", + "jon ghyun", + "incen tive", + "vari ations", + "pat ton", + "atur day", + "persist ent", + "pr un", + "pi ers", + "dal es", + "æ ĸ", + "breast feeding", + "r ance", + "ta wa", + "Ĥ âĸ", + "mur doch", + "cap tive", + "thi stle", + "nic a", + "commod ity", + "cou ldnt", + "board walk", + "graci ous", + "practiti oners", + "n gc", + "scru m", + "ner o", + "camoufla ge", + "col on", + "he i", + "phys icist", + "saturday morning", + "ten er", + "si won", + "colum ns", + "bru ne", + "y vr", + "ba ir", + "reti res", + "hal am", + "cab er", + "shaz am", + "min u", + "cas cade", + "milk shake", + "gri d", + "d ren", + "vin cent", + "so dium", + "plat ter", + "cheer leader", + "chen ko", + "y ak", + "elimin ated", + "ty po", + "y man", + "re think", + "âĿ Ĺ", + "ts ville", + "bernardo kath", + "ex tr", + "ðŁĺģ ðŁĺģðŁĺģ", + "ta o", + "re per", + "mo ths", + "em powered", + "c iting", + "transpor ted", + "mon ks", + "san at", + "cle ars", + "bachelore tte", + "camp bell", + "racha el", + "har le", + "hand ler", + "climb s", + "inter ference", + "rele ase", + "sh and", + "r bs", + "hr h", + "ãģ ª", + "val le", + "r é", + "sli me", + "w akes", + "chu bby", + "slo an", + "el ves", + "ath en", + "attor neys", + "micro scope", + "ston er", + "sc aling", + "o be", + "c out", + "se man", + "mid week", + "bal sam", + "ðŁĺį âĿ¤", + "ti ful", + "v ish", + "lo tta", + "ri pping", + "re mn", + "ti re", + "le ap", + "ha vent", + "la by", + "hi mach", + "whisp ers", + "we in", + "ðŁİ ¸", + "wild flowers", + "se le", + "u cc", + "li ability", + "az ine", + "sw ings", + "k ya", + "ta ir", + "re main", + "e do", + "flo ps", + "poc ket", + "grand ad", + "exam iner", + "gr is", + "ffe ct", + "ðŁijĬ ðŁı»", + "stud ded", + "heart beat", + "de acon", + "firm ly", + "infec tious", + "ste f", + "out lines", + "le asing", + "cla ws", + "sen se", + "tab s", + "hoo t", + "mo sul", + "spa wn", + "co a", + "hog warts", + "ve in", + "alban ia", + "manu el", + "b ino", + "vaux hall", + "scot land", + "go bucks", + "mat ty", + "phy sio", + "tor ino", + "const able", + "investig ated", + "s lower", + "mistak en", + "bay er", + "wild fires", + "vo ic", + "x on", + "time to", + "chas sis", + "bar ric", + "pi on", + "bald head", + "woo k", + "regi str", + "dra fts", + "b hs", + "li gue", + "l ick", + "staf fordshire", + "baf ta", + "dar ry", + "je anne", + "ven ding", + "cor p", + "⼠³ï¸ı", + "kid dos", + "fen way", + "ca o", + "west bound", + "ðŁĺ Ļ", + "dv r", + "quick er", + "bla h", + "goo die", + "ðŁĴĭ ðŁĴĭ", + "vo x", + "esp er", + "fac ade", + "cor relation", + "red bull", + "rou p", + "decl ining", + "chi ve", + "mc gee", + "tur o", + "in der", + "f eller", + "fu g", + "il ysm", + "mar di", + "peshaw ar", + "ki eran", + "ine ma", + "meat balls", + "pe ck", + "depre ssing", + "sen sing", + "gi z", + "dd ington", + "spring watch", + "ro aming", + "yellow stone", + "horse shoe", + "am man", + "week day", + "ol or", + "ðŁ¥ °", + "boo sts", + "spr int", + "scar ves", + "je e", + "bee tro", + "cl an", + "all the", + "ìĦ ¸ë", + "enlighten ment", + "ado be", + "re generation", + "? @", + "cont ag", + "yach ts", + "to u", + "mor a", + "en voy", + "r ani", + "go li", + "dhanush kraja", + "wood working", + "streng ths", + "se di", + "disc s", + "ar ina", + "sc on", + "lit e", + "ano ther", + "ðŁ¥ Ĭ", + "ye men", + "gu ern", + "sav vy", + "lo yed", + "biom ed", + "heart break", + "comra des", + "milli e", + "pat ch", + "un f", + "jar vis", + "bl aming", + "commemor ation", + "ge y", + "å ¥", + "cardio vascular", + "alig ned", + "docu ment", + ". ?", + "aesthe tics", + "em u", + "the irs", + "le h", + "ps ic", + "si f", + "pl ateau", + "ex pend", + "domin ating", + "rob es", + "mauriti us", + "excep tionally", + "hom er", + "discover ies", + "bra un", + "ten nant", + "insul in", + "ðŁİ ®", + "car bs", + "te as", + "? !\"", + "zi e", + "franco is", + "brow sing", + "th ol", + "cla rence", + "hel per", + "ob tained", + "cas sie", + "le es", + "! ,", + "pome gran", + "hu bs", + "presti ge", + "] [", + "mach er", + "bott led", + "pun ch", + "pi pe", + "o ch", + "gall ons", + "deliver ies", + "u ra", + "un day", + "mon de", + "depic ts", + "re gency", + "outra geous", + "khal ed", + "car o", + "he arti", + "za g", + "develop mental", + "over coming", + "stati stical", + "flavo red", + "for ds", + "cre atives", + "lau rence", + "di as", + "sun screen", + "in ked", + "pre acher", + "n ul", + "impac ting", + "auti stic", + "âļ Ķï¸ı", + "o ss", + "pel icans", + "cele ste", + "v b", + "ru mp", + "mc gra", + "fair fax", + "hu mor", + "bbc news", + "row ling", + "cal der", + "seam less", + "ag ne", + "p ti", + "mix ed", + "t shirts", + "mer ci", + "b tob", + "women instem", + "genealo gy", + "pre ven", + "l our", + "cra dle", + "gi use", + "Ð ¾", + "chron o", + "fair ness", + "chocol ate", + "tor y", + "as da", + "pre scott", + "stret ched", + "al man", + "u il", + "re charge", + "in tre", + "ob st", + "hosp ital", + "hay ward", + "teneri fe", + "fried man", + "vap ing", + "confe ssions", + "ye ah", + "bal li", + "luck now", + "cor pse", + "sculp tor", + "amp ton", + "t pp", + "indic ates", + "sur plus", + "tru man", + "ðĿ Ļ", + "sin ha", + "in vo", + "sovere ign", + "ke v", + "establi shing", + "engra ved", + "assu ming", + "ðŁı ģ", + "sou za", + "fab i", + "ton ed", + "oun ge", + "del oit", + "dow ney", + "no ble", + "om or", + "car tridge", + "ðŁı IJ", + "u hur", + "hol loway", + "succe sses", + "r sa", + "âĦ ¢", + "ma zz", + "tw d", + "disc ourse", + ". <", + "y at", + "satis fy", + "com pri", + "ठ¹", + "graph ite", + "disser tation", + "ar ter", + "í Ķ", + "b ally", + "zom bi", + "ly ons", + "a ic", + "u bc", + "pra da", + "e il", + "da x", + "cla i", + "grand daughter", + "extravag anza", + "chall enge", + "ðŁ¤ ŀ", + "po ver", + "primar ily", + "dad dy", + "man a", + "bi kers", + "inqui ries", + "da un", + "fel ine", + "gener ative", + "he f", + "benef iting", + "lind sey", + "pol ka", + "demonstr ated", + "al le", + "rand y", + "o su", + "low key", + "weir dest", + "red bull", + "our y", + "n ous", + "wood stock", + "cre denti", + "nic er", + "g ado", + "aly ss", + "ap h", + "prepa redness", + "station ary", + "incorpor ated", + "dy er", + "sarato ga", + "cele sti", + ": \"", + "antibio tics", + "or gs", + "inde fin", + "ap ron", + "и Ð", + "fif teen", + "no f", + "ðŁĶ Ŀ", + "ph x", + "te ga", + "m z", + "organiz ational", + "on air", + "band ung", + "pleas ures", + "mor i", + "secre tari", + "rac coon", + "ca shi", + "pil ates", + "k on", + "geof frey", + "la o", + "kam p", + "depart ments", + "back packing", + "an am", + "à «", + "crack down", + "aun ty", + "on do", + "li zzie", + "ph ers", + "cu n", + "ðŁĩ ±", + "k pop", + "pu t", + "inten tional", + "connol ly", + "bar clays", + "hs fb", + "swin don", + "u ku", + "s ally", + "a int", + "âľ ħ", + "pen ang", + "up lifting", + "epile psy", + "inter ro", + "bun gal", + "go ku", + "blue berries", + "ठ¦", + "u ssia", + "sil ky", + "mou red", + "i stic", + "bri efs", + "me ats", + "go b", + "ch aser", + "state wide", + "pra sad", + "gl itch", + "ar in", + "ban ff", + "memb er", + "ðŁĺŃ âĿ¤ï¸ı", + "lo ving", + "hall a", + "ภ¡", + "smo kers", + "yak u", + "scicom m", + "physi o", + "sw ol", + "lem ons", + "gel ato", + "ch ool", + "capit als", + "ki stan", + "ti ghts", + "spi kes", + "trav ellers", + "ik lan", + "commissi oning", + "ar ine", + "emabiggest fans", + "empha sis", + "front line", + "pad dock", + "destruc tive", + "ba ha", + "l inger", + "je wish", + "shet land", + "mc gin", + "mon key", + "ko z", + "s one", + "raj ini", + "te h", + "y en", + "c vs", + "masqu er", + "gir ly", + "we sle", + "was nt", + "bro dy", + "termin ator", + "gil le", + "mag gi", + "bir die", + "jeopar dy", + "cu bic", + "vm ware", + "intric ate", + "an up", + "to pia", + "east on", + "sab res", + "investig ates", + "bu sting", + "bil ingual", + "valent ino", + "in format", + "fer re", + "advent ur", + "hydr ate", + "for sy", + "az iz", + "san to", + "e de", + "whist ler", + "continu ously", + "d ham", + "un used", + "ji had", + "addic tive", + "vi dy", + "do b", + "i do", + "fi ed", + "ni versary", + "n one", + "fu er", + "ðŁĺį ðŁĺĺ", + "coven ant", + "prin table", + "immac ulate", + "o em", + "cl t", + "serv ants", + "consu med", + "un released", + "sc um", + "pack aged", + "me re", + "ìĦ¸ë ¸", + "to by", + "ta f", + "spo ons", + "me al", + "f ball", + "fair field", + "jan et", + "silver stone", + "dart mouth", + "follow me", + "voy ager", + "kom bat", + "anni ver", + "ene w", + "mag dal", + "ho ve", + "sa th", + "grizz ly", + "car di", + "gart ner", + "sand y", + "kan ye", + "post ure", + "po ign", + "im pulse", + "radio logy", + "horiz ons", + "si am", + "aish war", + "= =>", + "no che", + "tr is", + "el yn", + "com me", + "du i", + "ce c", + "councill ors", + "cudd ling", + "creep ing", + "loc ke", + "manag es", + "trans ferred", + "ne cks", + "di er", + "dan o", + "v ick", + "lun ches", + "d he", + "en sures", + "cri ss", + "ul ster", + "bann on", + "cont enders", + "sp am", + "sweet ness", + "med al", + "hon duras", + "arc tic", + "ultra sound", + "in fr", + "disco vers", + "ei ffel", + "ca sters", + "ru ben", + "du st", + "awe ed", + "atri um", + "lest we", + "se ared", + "ðŁĵº :", + "ty ne", + "ex changes", + "little mix", + "l le", + "astron auts", + "hersh ey", + "work day", + "kno b", + "so v", + "re signs", + "today show", + "der man", + "an th", + "af c", + "ta ster", + "sw oo", + "sa eed", + "per ing", + "narrow ly", + "rn li", + "best buy", + "panas onic", + "obst acle", + "farmer s", + "ðŁİ Ļ", + "pa wan", + "ki est", + "ang ers", + "absur d", + "oh my", + "sin o", + "pist achi", + "sp ice", + "giu li", + "prime time", + "ko w", + "k ens", + "ex agger", + "! ?!", + "u ba", + "midd les", + "ju dd", + "e jec", + "slam med", + "pen sions", + "of a", + "re create", + "b hp", + "xx l", + "liver pool", + "thre sh", + "pur ity", + "ni eu", + "hol ics", + "wr ath", + "ra do", + "gli o", + "am ma", + "dile mma", + "cr u", + "lets go", + ".... @", + "âĿ ĵ", + "sugge sting", + "tru mps", + "hor us", + "f v", + "ic om", + "refer ring", + "predic tive", + "tar ts", + "ge tte", + "so ck", + "glo ssy", + "pin ky", + "al ec", + "thy me", + "ou ra", + "thero ad", + "pe tr", + "cr am", + "p fi", + "dv n", + "me ier", + "incen tives", + "tun nels", + "mobi l", + "rec ap", + "extra s", + "upri ght", + "rev amp", + "per severance", + ", -", + "ot p", + "mir ror", + "ar wx", + "ger ry", + "ma her", + "g or", + "hom epage", + "am is", + "ag ra", + "made le", + "best friend", + "sirius xm", + "bun dles", + "admir ing", + "t dsb", + "ðŁį ģ", + "ch as", + "slow ing", + "ro h", + "wall papers", + "â̦ /", + "tek ken", + "gang s", + "tal a", + "lind say", + "shou l", + "line backer", + "tool kit", + "ur anium", + "caly p", + "ab rams", + "mat thi", + "ðŁı ¿", + "hon ourable", + "da yo", + "ver sail", + "tan k", + "st c", + "fr itz", + "spl end", + "pat ag", + "anno yed", + "on day", + "devast ated", + "chattanoo ga", + "national ism", + "mas sey", + "jen n", + "tail or", + "dev gn", + "org ans", + "zu cchini", + "on fox", + "sat ire", + "wex ford", + "dis grace", + "no to", + "vol ta", + "âĿ¤ï¸ıâĿ¤ï¸ı âĿ¤ï¸ıâĿ¤ï¸ı", + "à ¶", + "home owners", + "poin ter", + "m cr", + "au sten", + "day sto", + "mo ons", + "pal ma", + "gra zing", + "e so", + "influen cers", + "shahid kapoor", + "compli ant", + "measure ments", + "develop s", + "y d", + "par l", + "p vt", + "rand olph", + "tor tured", + "ger ald", + "eli as", + "deepi kap", + "war mup", + "hick ory", + "g ap", + "co ffin", + "am our", + "re neg", + "moun ting", + "seven s", + "ig le", + "hi er", + "dec ad", + "tri ght", + "esc apes", + "wer ner", + "t fl", + "ful filled", + "ni ger", + "sour dough", + "re aper", + "choo ses", + "spin ner", + "week nd", + "fil tered", + "sh uk", + "kat i", + "old ham", + "open source", + "kh anna", + "at elier", + "conne c", + "opho bic", + "gla s", + "complic ations", + "ar son", + "counc ils", + "sm ol", + "as sy", + "lur king", + "ling ui", + "han ks", + "e in", + "Ù ħ", + "ru gs", + "n guyen", + "nou veau", + "men ace", + "le v", + "alad din", + "ru ining", + "round about", + "k m", + "con or", + "shoo ps", + "may day", + "traum atic", + "prab has", + "ka iser", + "k ita", + "rou ter", + "pe dro", + "re tar", + "stun ner", + "spani sh", + "distur bed", + "acade my", + "e learning", + "wit ty", + "sen g", + "fer al", + "av y", + "sta b", + "ke aton", + "ur du", + "ko to", + "hu i", + "coo ke", + "ari an", + "the personal", + "u ma", + "se ap", + "a sting", + "rhetor ic", + "hand writing", + "munici pality", + "consor tium", + "ðŁIJ Ł", + "glasgo w", + "ra ya", + "eli za", + "polym er", + "bro th", + "prac ti", + "correspon dent", + "addic ts", + "gay le", + "ail ing", + "o fe", + "p li", + "hear tw", + "st itch", + "sight ings", + "prie sts", + "sam o", + "slo th", + "good wood", + "roc co", + "sab c", + "summ it", + "l ace", + "pres ley", + "itt en", + "cin cy", + "thepersonal network", + "s week", + "pe gas", + "af con", + "regi stry", + "ci m", + "le th", + "dic ap", + "cand ice", + "flu ent", + "sm ack", + "pede stri", + "al oud", + "car ac", + "priyan kach", + "p gh", + "ir ons", + "dol ce", + "lat via", + "dece ased", + "thero ck", + "cla p", + "cen e", + "fo am", + "morris sey", + "gre t", + "essenti ally", + "com cast", + "be agle", + "argu es", + "ing ed", + "- â̦", + "sa g", + "ha san", + "ðŁĻ Ĩ", + "ðŁį °", + "nh ra", + "kann ada", + "indic ators", + "on er", + "bri xton", + "at as", + "screen play", + "sor ority", + "sha heed", + "he em", + "class mates", + "tain ment", + "es i", + "breast cancer", + "zucker berg", + "aur or", + "en cia", + "ref ers", + "kae per", + "vor tex", + "com part", + "lym ph", + "photograph ing", + "ste ff", + "rest ling", + "par sley", + "mom ento", + "th man", + "lac king", + "du tt", + "ocu lus", + "fin o", + "fren zy", + "ra sc", + "der n", + "dis missed", + "noo k", + "met gala", + "sh ill", + "rapha el", + "maver icks", + "exhib its", + "eag erly", + "c pa", + "amen ities", + ". âłĢ", + "exo dus", + "ern st", + "lit a", + "deal t", + "womens march", + "i ain", + "score board", + "campe ones", + "c en", + "ti ki", + "garri son", + "fidel ity", + "bra g", + "road map", + "psy chop", + "lo e", + "ble u", + "ðŁijĬ ðŁı¼", + "sau vi", + "spr inger", + "temp tation", + "ru dolph", + "ac ura", + "wic z", + "parach ute", + "stro l", + "len ny", + "zi k", + "dom s", + "nb af", + "al pac", + "vivi an", + "ro ve", + "pre et", + "perpe tu", + "sna ke", + "air soft", + "infl atable", + "prin ces", + "ati e", + "ffe y", + "pati ent", + "m ire", + "chel le", + "sl ack", + "groo vy", + "# :", + "up loading", + "!!!!!!!! !!!!!!!!", + "siem ens", + "provi sion", + "v fx", + "need y", + "f ats", + "to poli", + "bhu tto", + "sa thletics", + "alu ms", + "t winning", + "south western", + "adop ting", + "last night", + "man ne", + "la ga", + "tw ell", + "ac ia", + "-- --", + "eye wear", + "hur ley", + "fle e", + "sa ch", + "pe cker", + "cost ly", + "is k", + "cr ates", + "polic y", + "ero sion", + "in go", + "wer k", + "ðŁIJ į", + "torto ise", + "therap ies", + "inter net", + "chihuahu a", + "ri ps", + "fre i", + "ed or", + "tai ji", + "t fc", + "do d", + "demp sey", + "christ in", + "chen g", + "hi ps", + "gra eme", + "com passionate", + "cavali ers", + "histor ic", + "soul ful", + "crimin al", + "ja c", + "vin ci", + "expi red", + "sur at", + "turi smo", + "k ona", + "se aweed", + "ber ts", + "le ica", + "expre ssing", + "a al", + "wor t", + "break fast", + "her ring", + "am used", + "rhu barb", + "mar tian", + "cospla yer", + "y ash", + "stri al", + "ra ul", + "refer ral", + "dw ts", + "j w", + "ad ler", + "cur tains", + "gu r", + "val ence", + "tyr one", + "sw fc", + "coach ed", + "re born", + "diabe tic", + "cho ke", + "nor folk", + "investig ative", + "ðŁĴ¯ ðŁĴ¯", + "z id", + "v mas", + "phi e", + "objec tives", + "âľ ĭ", + "over due", + "di vers", + "mat su", + "ðŁİŁ ï¸ı", + "casu alties", + "ภ§", + "al k", + "stand ardi", + "re alist", + "arti facts", + "pand or", + "ke x", + "in vin", + "( !)", + "ine y", + "par aly", + "mr t", + "fay e", + "the voice", + "on ga", + "de ed", + "skin ner", + "az wx", + "speci men", + "priyankach opra", + "nu evo", + "bar kley", + "toulou se", + "resu mes", + "football ers", + "cit i", + "fe tch", + "è re", + "lestwe forget", + "ðŁĻ ĭ", + "ch unk", + "dri fting", + "manipul ation", + "equ als", + "pu tt", + "ky ungsoo", + "âĿ¤ï¸ı #", + "ela stic", + "par ano", + "fo y", + "do ping", + "cin cy", + "ss ler", + "interrup ted", + "al ay", + "ado res", + "ame thy", + "con voy", + "ãĢ ı", + "Ĭ ãģ", + "black list", + "gener als", + "sa chin", + "bru shed", + "oun ces", + "non stop", + "illi ams", + "bt sarmy", + "u av", + "ru ff", + "bur ma", + "bi k", + "defen ce", + "schul tz", + "bo asts", + "lonel iness", + "go re", + "trans forms", + "alum na", + "@ @", + "ra ppers", + "ne hru", + "car o", + "himalay an", + "wearab les", + "ge h", + "pepper mint", + "re development", + "flam ingo", + "cos by", + "big baldhead", + "ag ri", + "bare foot", + "sco pes", + "re gram", + "gh ana", + "ðŁİ «", + "i heart", + "sa die", + "carri e", + "microbi al", + "ku ala", + "sk ater", + "quer que", + "âĻ ©", + "gen res", + "reas oning", + "ch ased", + "as o", + "sli pped", + "en can", + "vam os", + "ker s", + "ad verse", + "mo il", + "commod ities", + "with you", + "sil ent", + "hy pe", + "an de", + "am ination", + "whi spe", + "lit z", + "âļ½ï¸ı âļ½ï¸ı", + "ri ff", + "pp y", + "lam bs", + "gan esh", + "ab sent", + "regu lator", + "marse ille", + "en roll", + "par cel", + "wa p", + "by rd", + "ðŁĩ Ń", + "tu ber", + "country music", + "par l", + "contro llers", + "responsi bilities", + "we y", + "ch ate", + "montene gro", + "chic o", + "mil an", + "l ms", + "tra inees", + "appropri ately", + "un certain", + "popp ies", + "ed sheeran", + "nutr itious", + "gar o", + "deut sch", + "awe some", + "ãĥ ¼", + "comfor tably", + "land marks", + "et i", + "re usable", + "daniel le", + "ro sal", + "co les", + "just ic", + "c cs", + "f anny", + "ni m", + "mc u", + "clin ch", + "at ene", + "mer ge", + "im db", + "ang lo", + "uc cino", + "pan ini", + "an not", + "bur berry", + "feat ure", + "predic ting", + "fashioni sta", + "s ask", + "imag inary", + "mm o", + "south sudan", + "spe ar", + "hu bble", + "jo inthe", + "coyo tes", + "sli go", + "ko dak", + "sit com", + "polaro id", + "roo ted", + "corru p", + "ðŁĻĮ ðŁĻĮ", + "bris ban", + "at z", + "ah l", + "re my", + "tal ent", + "aval on", + "ra da", + "pau line", + "locom otive", + "go ons", + "ne mo", + "maser ati", + "ic u", + "stu tt", + "histor ically", + "sm b", + "pres by", + "avo id", + "so oners", + "rhine stone", + "w ad", + "ri sing", + "tro t", + "mo des", + "reg ent", + "optimi ze", + "re ece", + "sm u", + "ver ti", + "newyork city", + "cor tez", + "ra c", + "in case", + "sin c", + "fiel ding", + "e tta", + "tiff any", + "al monds", + "sad dle", + "k rat", + "mat ter", + "g low", + "star ving", + "gl o", + "cra ppy", + "sl ur", + "st d", + "monit ors", + "recei pt", + "maymay entrata", + "mc il", + "un is", + "rain bows", + "cal dwell", + "pacqui ao", + "j op", + "a fe", + "hoo k", + "es sen", + "wiz ard", + "medi an", + "fla ws", + "com s", + "âĿ Ħ", + "ing h", + "ha ynes", + "anton io", + "tem plates", + "ou ter", + "na w", + "cardi gan", + "bel grade", + "ðŁĴ ī", + "hom o", + "a ise", + "ro pes", + "no ve", + "what you", + "tri gge", + "concep tion", + "ad ukone", + "na di", + "fri ars", + "sw er", + "adju sted", + "hot line", + "san ity", + "kau r", + "down loading", + "c gi", + "ten or", + "eth nic", + "app alach", + "ภ¸", + "pa g", + "gol ds", + "on set", + "investig ator", + "car tel", + "peace fully", + "jarre tt", + "cat alan", + "poli o", + "n um", + "fru stration", + "dhar ma", + "my life", + "âľĮ ðŁı»", + "aber deen", + "mu sa", + "bin der", + "spark ly", + "fle eing", + "instin ct", + "co ping", + "domin ance", + "ill ers", + "er a", + "u conn", + "lo oms", + "living ston", + "gal i", + "he s", + "c ma", + "bel a", + "se ley", + "mon k", + "la ch", + "mar x", + " ´", + "m erica", + "woman in", + "es sex", + "ra ina", + "jim i", + "nep tune", + "z ack", + "chine se", + "mart ins", + "chand elier", + "her n", + "with us", + "ear l", + "asph alt", + "modu les", + "st p", + "ul la", + "psychi atric", + "mile age", + "captiv ating", + "si der", + "men to", + "mor t", + "tran ce", + "tal bot", + "ab by", + "ì ĥ", + "âľĮ ðŁı¼", + "j ak", + "daw n", + "turn up", + "scre wed", + "fe ds", + "blue print", + "ðŁĴĸ ðŁĴĸ", + "har sh", + "er os", + "insom nia", + "ban kers", + "ta emin", + "mis conduct", + "hu mber", + "gi di", + "edu ardo", + "con a", + "musc ular", + "consu ming", + "ra sh", + "don nie", + "di pped", + "col lie", + "samu el", + "melt down", + "ðŁĺįðŁĺį ðŁĺį", + "me z", + "exam ining", + "schwar tz", + "pri stine", + "ðŁIJ Ŀ", + "ve it", + "ful filling", + "an esthe", + "gue sses", + "dra ft", + "som me", + "soli d", + "pati onal", + "ho ped", + "evolu tionary", + "all er", + "enter tained", + "sli ps", + "lud wig", + "conclu des", + "sen sible", + "bon net", + "cra ze", + "tra s", + "haz ards", + "const antine", + "ed ics", + "star trek", + "to c", + "occu pational", + "in cheon", + "deepikap adukone", + "pizz as", + "new comer", + "de part", + "oppre ssion", + "ebon y", + "foss ils", + "tro jan", + "el en", + "ste aks", + "k hou", + "positi oning", + "ug by", + "red cross", + "ak h", + "dol ce", + "us mnt", + "pp en", + "dil ig", + "ma vs", + "call er", + "cost ello", + "⼠Ħ", + "dy n", + "thing s", + "rhin os", + "a xi", + "sar kar", + "con vocation", + "att ers", + "ss ss", + "fun gus", + "eu gen", + "russ o", + "squ at", + "w sb", + "eli on", + "william sburg", + "s off", + "defici ency", + "be arer", + "o kin", + "key stone", + "t wain", + "cal ming", + "break able", + "wa res", + "horser acing", + "com bs", + "bun ting", + "u it", + "t land", + "ðŁĴĻðŁĴĻ ðŁĴĻ", + "ga stron", + "sab ot", + "ick ers", + "commissi oners", + "sen ate", + "ii ot", + "ath ena", + "nit rogen", + "an tony", + "ero tic", + "di alo", + "mis sou", + "hypo cr", + "âľ Ī", + "kaeper nick", + "can v", + "d roo", + "clevel and", + "o sh", + "mon sta", + "stefan o", + "^ )", + "sh ul", + "po ison", + "ha e", + "commerci als", + "ma ul", + "nit ro", + "co worker", + "alo e", + "vap or", + "t ents", + "russi an", + "qu id", + "question able", + "mid get", + "po ker", + "girl friends", + "sin the", + "erit rea", + "ten ure", + "depos its", + "buc keyes", + "spot ter", + "theod ore", + "trin ity", + "joaqu in", + "u cci", + "follow the", + "caf c", + "mp a", + "ðŁIJ »", + "plo tting", + "dom ino", + "ta ek", + "sion ally", + "dicap rio", + "pa p", + "car mel", + "ig er", + "bt cc", + "beth le", + "www bigbaldhead", + "foo die", + "bagh dad", + "mason ry", + "off ended", + "à ·", + "ภģ", + "sc ro", + "vers es", + "ori ent", + "ar ches", + "pi yu", + "know your", + "gre e", + "ta kers", + "gu ard", + "dish on", + "bucket list", + "bha fc", + "war dly", + "ðŁİīðŁİ Ĭ", + "leigh ton", + "pe w", + "stra y", + "assaul ted", + "in hal", + "ly fe", + "amar keting", + "l x", + "kat z", + "ubun tu", + "me o", + "carto onist", + "turno ver", + "mi z", + "dis like", + "mul len", + "mo f", + "bl and", + "hi des", + "emer ges", + "chori zo", + "truste e", + "ma hog", + "lan sing", + "paralym pic", + "fa int", + "fa una", + "ch al", + "sn ar", + "cat h", + "bent on", + "cast illo", + "sli ppery", + "apric ot", + "oec d", + "bar o", + "l z", + "he ming", + "clow ns", + "co workers", + "peru vian", + "commu ters", + "y ell", + "ðŁļ ´", + "under ing", + "v j", + "tt p", + "fli pk", + "w ana", + "soc ent", + "Ĥâĸ Ĥâĸ", + "ठĤ", + "oo sa", + "jag ger", + "di sm", + "e less", + "d ham", + "cali f", + "a official", + "ec lip", + "harro gate", + "gra pp", + "com rade", + "n tr", + "concentr ate", + "thi ghs", + "bit coin", + "bel arus", + "ë ĵ", + "end uring", + "now watching", + "industri al", + "pi p", + "ar on", + "ar at", + " ®", + "whit by", + "oooo ooo", + "sa ree", + "tic als", + "mis leading", + "yo on", + "year s", + "sle igh", + "roman ian", + "sciss ors", + "vam pires", + "ac up", + "ab ba", + "th weeksary", + "cent ri", + "fl ye", + "u o", + "c bi", + "bu ena", + "sin d", + "mar ino", + "bur r", + "re building", + "ठ²", + "anniver saire", + "ac ca", + "ðŁĴĢ ðŁĴĢ", + "gett ing", + "tu lips", + "wolf pack", + "âľį ï¸ı", + "more than", + "ta kin", + "ð٤ĺ ðŁı»", + "u be", + "mon ic", + "dou bts", + "mo wer", + "co balt", + "don ne", + "specul ation", + "argu ably", + "kak u", + "htt ps", + "prosecu tion", + "din ah", + "stam atic", + "disclo sed", + "bever ly", + "fl wx", + "cra bs", + "extraordin aire", + "war mest", + "imper i", + "o logists", + "trac es", + "par c", + "lake side", + "am r", + "ter i", + "hour ly", + "domin ation", + "ar row", + "shrews bury", + "ance stry", + "wr angler", + "trigge red", + "pen sac", + "roo ster", + "survi ves", + "a on", + "bo ko", + "val or", + "love is", + "la g", + "pe y", + "fo cal", + "out laws", + "bl anc", + "artic ho", + "wit s", + "marsh all", + "die go", + "support small", + "u ca", + "sa h", + "je et", + "syn ago", + "gover ning", + "ðŁĴ ¬", + "sal ads", + "cre ate", + "miri am", + "cen sored", + "ami de", + "no u", + "z eta", + "allegi ance", + "* )", + "bl m", + "ric an", + "pa stors", + "oly mpus", + "blo c", + "whir l", + "star ry", + "pr one", + "y k", + "p ne", + "congratul ating", + "be v", + "so ber", + "love island", + "sa ir", + "an ing", + "tutor ials", + "q e", + "lun d", + "in ist", + "cle ver", + "taxpay er", + "ali z", + "wren ch", + "dd ling", + "cap ri", + "h pa", + "ðŁı» âĢįâĻĤï¸ı", + "na j", + "o j", + "futuri stic", + "jelly fish", + "ðŁĶ¥ðŁĶ¥ ðŁĶ¥ðŁĶ¥", + "cel ery", + "plan k", + "fil a", + "ne me", + "un healthy", + "lec tions", + "ðŁ§ ¡", + "rit chie", + "n ws", + "mi kha", + "wonder woman", + "âĢ İ", + "hip stamatic", + "ka g", + "ðŁĴľðŁĴľ ðŁĴľ", + "poul try", + "mo w", + "wor ds", + "lo ff", + "ðŁ¤£ ðŁ¤£", + "relat able", + "re mixes", + "keny atta", + "ke m", + "re signed", + "fo d", + "stra igh", + "j lo", + "hu tch", + "box ers", + "colle en", + "mag s", + "instruc tional", + "ko l", + "attrac ts", + "pra g", + "account ant", + "go ggles", + "br u", + "th ole", + "mar row", + "leu ke", + "oc to", + "pon ds", + "bubb ly", + "he ist", + "ìĹ ij", + "im p", + "a har", + "ha unt", + "hall mark", + "psy ch", + "kkkk kkkk", + "col umb", + "jump suit", + "cost co", + "si delines", + "ag gies", + "over turned", + "ni b", + "key chain", + "fu k", + "f af", + "mi am", + "assist ants", + "cy cled", + "ri der", + "dam mit", + "red wings", + "mag es", + "kin s", + "ì Ĥ", + "ho d", + "son t", + "carol ine", + "\" '", + "cu le", + "bra id", + "fel ony", + "ar ities", + "ruther ford", + "depic tion", + "isab elle", + "ro ach", + "k day", + "fifth harmony", + "em y", + "li gam", + "bari sta", + "albu querque", + "gro ss", + "ðŁį º", + "oo ks", + "ðŁij ¼", + "dun can", + "try in", + "jag s", + "g ould", + "li tho", + "âģ £", + "а Ð", + "sam my", + "tun g", + "cas ser", + "apo lo", + "aaaa a", + "man g", + "as ics", + "sh en", + "p ye", + "tur bul", + "ss p", + "saint sfc", + "on lin", + "n anny", + "he ster", + "do z", + "ภĶ", + "th read", + "ren ts", + "kh and", + "ðŁĴª ðŁı½", + "un conditional", + "rob son", + "car re", + "ph on", + "sacrific ed", + " £", + "auto s", + "par ker", + "oc a", + "log in", + "kee gan", + "hard cover", + "dough nuts", + "ðŁĮ İ", + "spit fire", + "refresh ments", + "saskat oon", + "commod ore", + "j f", + "rub ber", + "halam adrid", + "child care", + "stra da", + "io m", + "ri k", + "dak ar", + "ther mom", + "cro pped", + "gar u", + "ali k", + "ven i", + "i ft", + "si ka", + "ritu als", + "z ul", + "e ch", + " ©", + "su dan", + "l land", + "i me", + "do cker", + "ì ¤", + "fe ared", + "fa o", + "wal ter", + "no g", + "mutu als", + "l h", + "ali gn", + "mon ia", + "concep tart", + "ðŁĻı ðŁı¼", + "sco e", + "compet ence", + "sw ine", + "ly me", + "laun ch", + "green er", + "abstract art", + "inqu is", + "gran ada", + "ga elic", + "flu ff", + "d backs", + "grave yard", + "ba be", + "acade mic", + "adventur ous", + "joh ann", + "~ !", + "bi bi", + "| #", + "pl ings", + "gett y", + "as b", + "âĿ¤ï¸ı @", + "staf f", + "religi ons", + "bang or", + "world bookday", + "me gh", + "de vin", + "ash ore", + "meri dian", + "gi thub", + "qui z", + "all stars", + "be stest", + "ir resi", + "ack er", + "do te", + "war rington", + "pol ly", + "newor leans", + "cr ou", + "wi gs", + "che y", + "smithson ian", + "la sag", + "de tour", + "bor is", + "stra ps", + "mari ah", + "inten tionally", + "ko h", + "ðŁį ¸", + "ssi an", + "mar issa", + "cor al", + "episcop al", + "casu alty", + "tom o", + "supply chain", + "sam p", + "on go", + "ro o", + "cavi ar", + "p fw", + "clau dio", + "buff alo", + "s ations", + "mat ty", + "snap back", + "l ds", + "al arms", + "mat te", + "âĺ Ķï¸ı", + "conditi oner", + "d ors", + "he x", + "fi zz", + "a stri", + "sus sex", + "secur ity", + "qa eda", + "all star", + "cocac ola", + "as one", + "cl icks", + "sc ans", + "mu te", + "he avier", + "ðŁİ §", + "âĺ ŀ", + "lv l", + "book boost", + "youtu be", + "fla shes", + "f jor", + "c su", + "explo de", + "do dge", + "cair n", + "gonz ales", + "th ill", + "pel le", + "hart ley", + "renew able", + "re tin", + "e stre", + "costar ica", + "shipy ard", + "nc fc", + "pri ya", + "a ghan", + "an ath", + "plu gin", + "co rey", + "re bound", + "or u", + "kat rin", + "hor mone", + "gi m", + "mahin dra", + "s sus", + "park land", + "har per", + "fanta stic", + "infer no", + "ep ilo", + "wrest ling", + "fe ct", + "c it", + "ac oun", + "to ssed", + "monu mental", + "char tered", + "bu st", + "pe tra", + "âĮ ļ", + "wildflower hour", + "sweat ers", + "* .", + "bl er", + "ate ch", + "go wan", + "demo graphic", + "bra l", + "suici de", + "renov ations", + "vu el", + "sin ister", + "ar mani", + "miso gy", + "ph arrell", + "nap s", + "un iting", + "crusad ers", + "cor gi", + "insu red", + "than i", + "no or", + "g q", + "d ada", + "bicy cles", + "snu ggle", + "sch an", + "ten berg", + "ss al", + "fe mme", + "bo il", + "½ ï¸ı", + "re ap", + "occur ring", + "hus sein", + "divi d", + "sto ke", + "sh alom", + "na ia", + "o lic", + "frustr ating", + "Ù ĩ", + "ig s", + "gro ver", + "scen arios", + "n ds", + "bru tality", + "med alli", + "bu on", + "sas s", + "skate boarding", + "ony x", + "lor ry", + "ny u", + "gau tam", + "mm ings", + "gu g", + "end i", + "lo thian", + "comm ando", + "chal k", + "ph ora", + "asse ssing", + "ti gh", + "crun chy", + "ad ay", + "is l", + "ci ara", + "pilgri ms", + "kam al", + "p to", + "brit anni", + "t ani", + "sm c", + "l ure", + "app store", + "ab y", + "golf ing", + "cl c", + "fa u", + "an as", + "shu tting", + "regul ated", + "carn age", + "scow boys", + "all enge", + "c ma", + "humbold t", + "rel le", + "ku mb", + "her i", + "refin ery", + "sound check", + "d wayne", + "bos nia", + "i sp", + "the alth", + "anni v", + "relev ance", + "my a", + "bag gage", + "dre ad", + "s bc", + "th ed", + "bu h", + "hi jab", + "lo id", + "ke w", + "c te", + "respec t", + "lovel ies", + "cu bes", + "celebr ate", + "dir t", + "sav ers", + "_ ,", + "gar ment", + "pulit zer", + "mas jid", + "beat port", + "al arts", + "encry ption", + "s ner", + "ple ads", + "found ry", + "sym metry", + "ru mi", + "birth place", + "scallo ps", + "supp le", + "pivo tal", + "t ati", + "no de", + "so d", + "pro xim", + "tr ics", + "col dest", + "bren t", + "mand u", + "cla ir", + "e ach", + "and alu", + "hi ddleston", + "ðŁIJ º", + "mel ts", + "v ance", + "pin n", + "se ments", + "scre ened", + "sa chs", + "o bl", + "ic ha", + "âĺĺ ï¸ı", + "school ers", + "heal ed", + "lo gged", + "ð٤ĺ ðŁı¼", + "ic us", + "bore dom", + "b ish", + "b ffs", + "tal king", + "sure sh", + "hoo kem", + "de on", + "de fl", + "ei leen", + "ðŁį ķ", + "women intech", + "ri sotto", + "rang er", + "adverti se", + "ภģà¸", + "tel ly", + "la go", + "dart moor", + "d ong", + "sk ates", + "lo go", + "un ner", + "mail box", + "ma sala", + "lo oooo", + "amethy st", + "che wing", + "c bb", + "australi ans", + "rc mp", + "game art", + "# ...", + "kor n", + "extre mism", + "fruit ful", + "anci ent", + "pu bg", + "pol ite", + "wh it", + "mur als", + "m gr", + "line man", + "dav ao", + "ste ms", + "ten nis", + "av age", + "tu pac", + "gigan tic", + "hs bc", + "auto biography", + "up the", + "ี à¹Ī", + "re gal", + "fig uring", + "ku l", + "mis sy", + "hoo p", + "gra s", + "for ums", + "back lash", + "abduc ted", + "p nw", + "min ic", + "bu tt", + "bott oms", + "at on", + "ven g", + "ðŁĮ ı", + "del aney", + "prab hu", + "fan club", + "over haul", + "health ye", + "sy no", + "aa f", + "ren amed", + "kim i", + "un cle", + "man city", + "se u", + "qu anti", + "este em", + "um in", + "en zo", + "mel vin", + "under go", + "j har", + "far ah", + "coast ers", + "humph rey", + "mh z", + "children s", + "^ .", + "d hi", + "disrup tive", + "integr ating", + "r nb", + "over sized", + "a ide", + "ne au", + "docu mentation", + "ðŁijĢ ðŁijĢ", + "pal o", + "hear th", + "ri yad", + "pun ctu", + "abc news", + "secu res", + "boy band", + "bir ch", + "ju co", + "tra ff", + "legislat ors", + "bay a", + "ãĤ ¯", + "no ises", + "collec ts", + "s warm", + "k ner", + "bi shops", + "stur geon", + "snapp ing", + "mo l", + "fre aky", + "chair person", + "tro p", + "lyn ch", + "car cin", + "art sy", + "e sto", + "cha i", + "fl ur", + "inv ali", + "sau sages", + "im el", + "j or", + "fun fact", + "wit ter", + "puni shed", + "ac ons", + "h ya", + "re versi", + "em c", + "dif fu", + "z x", + "sp aw", + "cla d", + "d mit", + "hol land", + "fre sco", + "pay roll", + "ab undant", + "stu ffing", + "mor o", + "c ny", + "boy cott", + "wend y", + "ele ven", + "pro voc", + "pil ot", + "tr x", + "be ad", + "climate action", + "ri on", + "assi e", + "ì ĸ", + "o sm", + "islam ic", + "ho ar", + "good reads", + "al ici", + "afterno ons", + "spoke sman", + "jo lie", + "it as", + "masc ara", + "âĻ© âĻ«", + "pre vail", + "beetro ot", + "lu jah", + "k li", + "dod ger", + " »", + "ru le", + "l n", + "scre am", + "ho bart", + "col bert", + "r tc", + "er m", + "pat ro", + "quo ting", + "s live", + "que st", + "non fiction", + "semin ary", + "prosecu tors", + "ve st", + "express way", + "g ge", + "nau tical", + "et f", + "ðŁİīðŁİ Ĭ", + "dur ation", + "cha ired", + "the film", + "fab io", + "she h", + "can o", + "ðŁĴª ðŁı»", + "with draw", + "! :)", + "cor pus", + "phen om", + "yel p", + "la wn", + "ent om", + "snapp er", + "but te", + "pin ball", + "pro xy", + "libr e", + "alle vi", + "n ada", + "gabri el", + "fo wl", + "eure ka", + "daph ne", + "tu nes", + "pun ched", + "wh ore", + "jo g", + "ren tial", + "man ners", + "o pe", + "wh ufc", + "gu th", + "revol t", + "sne aker", + "philharmon ic", + "ho ste", + "sovereign ty", + "ðŁĻıðŁĻı ðŁĻı", + "fish ing", + "sci art", + "fe ta", + "i pp", + "dump ing", + "kel own", + "gir i", + "dig its", + "sal u", + "san jay", + "twee ters", + "sp as", + "col chester", + "sc ab", + "ma dd", + "๠Ħà¸", + "Ä ĩ", + "ged don", + "march for", + "do p", + "maure en", + "un plugged", + "di do", + "fashion blogger", + "up a", + "mex ic", + "tar y", + "pol ye", + "jame son", + "v t", + "grin der", + "mad dy", + "consult ancy", + "¬ ë", + "leagueof legends", + "ac cents", + "um ni", + "jane iro", + "tu ss", + "h ens", + "ampli fier", + "to shi", + "pret tier", + "pre vents", + "new town", + "red wood", + "vant age", + "ball ard", + "ar tof", + "a she", + "a sion", + "lac ey", + "ap at", + "gro ve", + "ภĦ", + "rw and", + "real tors", + "tra itor", + "bed ding", + "ö r", + "zi on", + "fla shing", + "cam pan", + "boom er", + "secretari at", + "ab ol", + "liti gation", + "cont amination", + "se dly", + "shred ded", + "in for", + "do herty", + "bench mark", + "ro che", + "skate board", + "sho vel", + "i zz", + "to pper", + "o ster", + "laby rin", + "autu m", + "k ong", + "hum mus", + "vi z", + "tech news", + "kla us", + "am using", + "socialmedi amarketing", + "i des", + "cast ell", + "ste e", + "underestim ate", + "cal ab", + "pa ign", + "b illing", + "unanim ously", + "g mb", + "fly fishing", + "hath away", + "commerci al", + "colour ing", + "skul ls", + "pivo t", + "te p", + "tb c", + "motor way", + "x press", + "construc tive", + "pu k", + "under lying", + "kir sten", + "mani ac", + "cha o", + "se ma", + "chiff on", + "ðŁijĮ ðŁı»", + "ver ona", + "kom o", + "stan doff", + "wi ped", + "c ated", + "bla ir", + "wor kin", + "m sc", + "bethle hem", + "swi pe", + "unexpe c", + "pe es", + "pe tri", + "orig ami", + "ðŁij ħ", + "mex ico", + "flav or", + "ru dd", + "cannab is", + "mar u", + "ri ddle", + "wor shi", + "sil on", + "sch at", + "ap se", + "tang er", + "bi ous", + "e er", + "questi oned", + "o zar", + "dan k", + "angle sey", + "char an", + "bak u", + "compe ten", + "re pri", + "bat ter", + "sa xon", + "cal ves", + "leng ths", + "$ $$", + "âŀ ¡ï¸ı", + "immer sion", + "ga unt", + "car ry", + "cy to", + "b anda", + "shu tt", + "experi ence", + "el gin", + "mous se", + "ta z", + "ê µ", + "in correct", + "en z", + "b ham", + "mor on", + "so ver", + "ar un", + "ti pped", + "la ble", + "de arly", + "bau tista", + "í Ļ", + "mor tal", + "woo p", + "dt la", + "sho cks", + "dav os", + "ðŁĵ Ŀ", + "swim wear", + "her man", + "ðŁijĩ ðŁijĩ", + "z ir", + "neglec ted", + "grac ed", + "campu ses", + "av s", + "ar ora", + "swach hb", + "live pd", + "ac cra", + "enqui ries", + "shoo ters", + "kur t", + "vancou ver", + "brad ley", + "gar da", + "g ü", + "ol la", + "attrac ting", + "up ton", + "ne win", + "lu mia", + "furn ace", + "ev ers", + "e on", + "sw a", + "roo kies", + "a oc", + "v ss", + "bris ket", + "tor ch", + "yo da", + "heart land", + "tac o", + "ph ony", + "food bank", + "ab bey", + "bab ylon", + "u y", + "gre ate", + "expre sses", + "d andy", + "sc apes", + "survi vor", + "ron d", + "e ci", + "ha vin", + "ab el", + "chil dish", + "tor que", + "wav y", + "ur self", + "kanye west", + "year of", + "ale stine", + "o brien", + "al fon", + "sk ag", + "kore an", + "anchor age", + "val eri", + "de w", + "ðŁİ ¨", + "land slide", + "car ole", + "christ en", + "go phers", + "af i", + "priyan ka", + "q q", + "power of", + "it te", + "pc so", + "tw ol", + "pr y", + "intellec tu", + "guer rero", + "pi les", + "wish list", + "w ren", + "time table", + "ë ı", + "prodi gy", + "gibb ons", + ". /", + "ne ur", + "anz ac", + "mur ray", + "vie st", + "pla ster", + "la ir", + "art gallery", + "inter continental", + "g br", + "bell ator", + "nam joon", + "mam mals", + "am el", + "y aw", + "saras ota", + "cam ar", + "bud ding", + "sum mari", + "aco sta", + "la sh", + "ey ou", + "post graduate", + "instruc tors", + "ti g", + "const ant", + "were wolf", + "ic os", + "cla s", + "glen n", + "bud ge", + "ðŁĻ Ĥ", + "er ta", + "sta ins", + "persecu tion", + "cumb ri", + "o ch", + "syner gy", + "hu ang", + "scand in", + "mid terms", + "comment ator", + "regar ded", + "perpe tual", + "bo iling", + "al p", + "lan ge", + "sch le", + "fac eli", + "twee ta", + "ri dden", + "ok toberfest", + "charlotte sville", + "ik lan", + "jo u", + "ch atham", + "b sc", + "ðŁį ¦", + "stra uss", + "mel low", + "xx xx", + "happy hour", + "re actor", + "ww er", + "distr action", + "at orial", + "ðŁĴª ðŁı¼", + "twin peaks", + "fay ette", + "a or", + "ko k", + "bro om", + "sy fy", + "ou se", + "am ag", + "Ø ·", + "ubis oft", + "lu lu", + "hall mark", + "stu art", + "it ya", + "si deline", + "venge ance", + "re lu", + "sex ism", + "boun cing", + "un ites", + "gu stav", + "te ssa", + "stu mp", + "pro clamation", + "ima x", + "divid end", + "col by", + "ðŁį İ", + "play wright", + "un safe", + "co smo", + "ðŁĩ²ðŁĩ ½", + "cup board", + "constitu ents", + "ang lia", + "ram page", + "ðŁĺįðŁĺį ðŁĺįðŁĺįðŁĺį", + "than ked", + "take aways", + "shro ff", + "de bat", + "kh ur", + "conduc ts", + "format s", + "à ©", + "port age", + "graph ers", + "u ten", + "pre m", + "mo ines", + "condem ns", + "s ous", + "l ps", + "f cs", + "deal ership", + "leuke mia", + "bure au", + "ski d", + "guardi ola", + "ca ster", + "thir d", + "avoi ded", + "en cyclo", + "c sr", + "vi xx", + "analy zing", + "she ar", + "dulu th", + "shap iro", + "chan ting", + "stre sses", + "as be", + "mil itia", + "ãĥ ª", + "col lin", + "arsen e", + "sure sh", + "teach ings", + "yi xing", + "sh ill", + "nu des", + "sv u", + "clear water", + "war ped", + "pro life", + "artist son", + "it u", + "versail les", + "galax y", + "ax el", + "spring st", + "cal a", + "hu hu", + "sc u", + "commit ments", + "exe ter", + "poign ant", + "mo tion", + "conserv atory", + "row dy", + "rec alled", + "mu sk", + "emb elli", + "so the", + "âĺ Ģ", + "sto pper", + "sch ild", + "to pe", + "el mo", + "zi el", + "j om", + "barn sley", + "snow den", + "on tour", + "jour ney", + "hills borough", + "par ole", + "w ts", + "mo ving", + "ag ility", + "tiv o", + "ff ers", + "kindle unlimited", + "g wen", + "ann an", + "ah mad", + "tex tured", + "hepat itis", + "dra m", + "insi ders", + "tis sues", + "ãĥ Ħ", + "fc barcelona", + "cr atic", + "na acp", + "pe can", + "f gm", + "custom ize", + "concer t", + "g sm", + "pe g", + "p one", + "justin trudeau", + "super cars", + "happy holidays", + "bu lar", + "ado x", + "lap tops", + "digital health", + "destin ation", + "gradu ally", + "áĥ ¦", + "popp y", + "ss l", + "inhi bit", + "star light", + "of fro", + "glo omy", + "x per", + "hal der", + "im plants", + "le to", + "hass el", + "a as", + "un told", + "en ci", + "liber ia", + "or an", + "con tests", + "il ah", + "sma g", + "sc out", + "mari anne", + "cr yo", + "schedu ling", + "lo s", + "kan e", + "stutt gart", + "ne se", + "law rence", + "da in", + "pho tom", + "car ou", + "ภ£", + "g wy", + "national dogday", + "roa sting", + "band camp", + "kentu cky", + "stret ches", + "ke rel", + "ca she", + "ãĤ ¸", + "sta x", + "tran si", + "dog gie", + "at ric", + "hal le", + "ci vic", + "brow ning", + "lein ster", + "cat day", + "high land", + "joy ous", + "in cumb", + "or lando", + "ro mo", + "col ton", + "del ta", + "car ab", + "ro tc", + "aster oid", + "goose bumps", + "mo logy", + "yo ko", + "an ds", + "tomor rows", + "red carpet", + "sm p", + "ca sio", + "ðŁ¤£ðŁ¤£ ðŁ¤£", + "se au", + "rejec tion", + "rot ating", + "bi partisan", + "th un", + "mat i", + "bon i", + "ol l", + "ener gye", + "do it", + "l j", + "mother hood", + "lou ise", + "neck laces", + "el ite", + "ni x", + "l cs", + "en v", + "gl u", + "le sh", + "cran k", + "su sie", + "m clau", + "so tu", + "crow ley", + "rat ri", + "use d", + "bre ton", + "alfre do", + "ye o", + "travel pics", + "ti pp", + "elli son", + "sax ophone", + "me red", + "heu ghan", + "ta ine", + "f es", + "vi ro", + "suppo sedly", + "i as", + "dige stive", + "y le", + "li zzy", + "wildlife photography", + "bri anna", + "west field", + "ra ined", + "am her", + "ðŁĺĦ ðŁĺĦ", + "distribu te", + "bott om", + "pre serving", + "oil and", + "craf ty", + "de scen", + "col ling", + "shakespeare sunday", + "r wc", + "ang led", + "ci an", + "t ations", + "mon tage", + "me yers", + "france sca", + "ðŁĮ ·", + "wi ggins", + "san ford", + "volunte er", + "car ra", + "bar k", + "vari ed", + "pl in", + "am u", + "kap il", + "rock ers", + "qu ind", + "br ane", + "in mate", + "ent al", + "impro vis", + "michi gan", + "re tweeting", + "progre ssing", + "mercedes benz", + "smo ker", + "physi ology", + "dor ado", + "watt pad", + "h wa", + "sr bachchan", + "w ga", + "vol atility", + "hi re", + "ac ap", + "wn ba", + "hein z", + "stit ches", + "kidnapp ing", + "bur ys", + "lim b", + "f itters", + "thumb nail", + "ton e", + "mir and", + "desi rable", + "ad dison", + "tar an", + "tamil nadu", + "spec tator", + "soci ology", + "amit shah", + "remo tely", + "âĻ ¦", + "ham id", + "r ds", + "g lee", + "smooth ly", + "sch ro", + "er c", + "lali ga", + "he als", + "us f", + "ni shi", + "d hu", + "un il", + "h le", + "tro mb", + "bhu tan", + "pilip inas", + "se ung", + "whit man", + "te y", + "min ce", + "snow boarding", + "re au", + "k ker", + "av o", + "zach ary", + "ran veer", + "ti k", + "gover n", + "qu al", + "beck y", + "anthropo logy", + "att en", + "grocer ies", + "de bit", + "war p", + "sil icon", + "hawa ii", + "ðŁĴ ħ", + "pomegran ate", + "pe er", + "orang es", + "people schoice", + "end ure", + "ðŁĴĽ ðŁĴĽ", + "ãĤ¹ ãĥ", + "ac ial", + "a haha", + "stu k", + "imper ial", + "bl ond", + "pow der", + "kno ts", + "vin ce", + "wood lands", + "den a", + "watch in", + "mat cha", + "ma hat", + "galax ies", + "middles brough", + "k ö", + "stre e", + "resc ues", + "wal do", + "lero y", + "desp ic", + "real ities", + "tm nt", + "ha q", + "un o", + "pe c", + "bolly wood", + "blin ds", + "design thinking", + "he ms", + "and hra", + "ab sen", + "fan s", + "ste ch", + "shire hour", + "bla ine", + "shak ti", + "pu rely", + "ðŁı ı", + "tra fal", + "ke ynes", + "gr ate", + "to bias", + "spon taneous", + "satur ated", + "caval ry", + "pri sc", + "ðŁĺ ij", + "wh t", + "pas si", + "~~ ~", + "vir at", + "patt inson", + "la o", + "weir do", + "sym pathy", + "ju da", + "occa sionally", + "cred ited", + "stat u", + "es co", + "hil ly", + "esc ape", + "dischar ge", + "se er", + "may nard", + "sud bury", + "z lat", + "or al", + "we er", + "encoun tered", + "sm elling", + "over sight", + "ê ¸", + "that cher", + "mack ay", + "you can", + "fre ep", + "freed oms", + "prophe cy", + "ho e", + "ishq ba", + "dra ke", + "qu its", + "pel led", + "tur k", + "o vi", + "wesle yan", + "new music", + "leg g", + "ch eng", + "h illi", + "ay y", + "pan ties", + "ad versity", + "ad jac", + "vaccin ation", + "ju ke", + "ga c", + "exce ed", + "time sof", + "sta ining", + "ep cot", + "v ital", + "up ward", + "bethe sda", + "apar k", + "ma hi", + "camp fire", + "enchan ting", + "rha pso", + "h z", + "na ver", + "fa x", + "vali dation", + "ac ad", + "ny r", + "as ym", + "coordin ated", + "depar ted", + "all ery", + "var ies", + "spr ite", + "chap lin", + "ss occer", + "s wat", + "bre t", + "relu ct", + "tunes app", + "super star", + "reminis cing", + "o co", + "home grown", + "dough nut", + "un canny", + "la pd", + "thyro id", + "! âĿ¤ï¸ı", + "botan ic", + "bre s", + "sp ade", + "i ste", + "echo es", + "du lil", + "bur sting", + "qui ero", + "ðŁij İ", + "loy ola", + "amuse ment", + "ha ils", + "sleep y", + "burgl ary", + "âľ ı", + "ro gue", + "cot land", + "mo ors", + "low er", + "wic ked", + "ðŁĶ Ĭ", + "compet iti", + "argent ine", + "yvon ne", + "karti keyan", + "ili ary", + "gat sby", + "precin ct", + "six ty", + "na ji", + "cam s", + "practiti oner", + "ðŁĺ³ ðŁĺ³", + "pu ne", + "neg li", + "juli en", + "inv aded", + "cali br", + "cla m", + "duba i", + "mu k", + "lan tic", + "produc t", + "fe dex", + "ï¸ı :", + "eu ra", + "dari us", + "s ling", + "virtual reality", + "home stead", + "ðŁı³ï¸ıâĢį ðŁĮĪ", + "pac ed", + "in ha", + "pul mon", + "la zy", + "premi ering", + "ma stered", + "in he", + "con gregation", + "ba jo", + "sport ing", + "new jersey", + "hor ny", + "lma oo", + "leng thy", + "du t", + "yo gh", + "swe aring", + "philosoph ical", + "pap ua", + "in ski", + "know les", + "dy ke", + "âĢ ²", + "to ken", + "mc guire", + "ri ot", + "probab ility", + "mc con", + "gro s", + "su mat", + "c ite", + "da a", + "on da", + "mad dow", + "che w", + "board games", + "spar ked", + "re claimed", + "ad hd", + "ny se", + "imwith her", + "equ inox", + "boo ths", + "balsam ic", + "ha zy", + "dor chester", + "ag os", + "se aw", + "moder ator", + "seri ea", + "ander sen", + "pilgri m", + "âŃIJ âŃIJ", + "itch en", + "hal li", + "x ton", + "nathan iel", + "mun ition", + "celesti al", + "ga f", + "zo om", + "mark le", + "pen thouse", + "cal e", + "s fa", + "bar king", + "tu cket", + "em ery", + "cal orie", + "li que", + "ad ar", + "mc nam", + "tor tilla", + "wood pecker", + "mo town", + "bad ger", + "ayr shire", + "scram ble", + "dd ay", + "cra ziest", + "per rie", + "cho co", + "cast e", + "i ot", + "wre cked", + "selec ting", + "uss r", + "gra ft", + "pun t", + "lab ou", + "ir st", + "ba ek", + "Û Į", + "su ki", + "que u", + "ach at", + "te ster", + "aug mented", + "wc vb", + "sin ks", + "ðŁĵ »", + "ra ke", + "inter ne", + "be cause", + "belle vue", + "une arth", + "light en", + "ðŁĺ £", + "turn around", + "labe led", + "unemp loyed", + "twitter kurds", + "le ia", + "h ye", + "great er", + "ðŁIJ İ", + "tim ed", + "i red", + "e tt", + "limit ations", + "cab e", + "s out", + "bee ch", + "anni hil", + "re trac", + "yo ona", + "ang er", + "den nis", + "supp lying", + "di z", + "\" (", + "sc ur", + "gun man", + "su ho", + "sauvi gnon", + "ภ¥", + "wi ley", + "land on", + "choreo graphy", + "pre historic", + "ðŁı ĥ", + "var gas", + "assess ments", + "pinn acle", + "di i", + "chamber lain", + "ì Ī", + "v p", + "present ers", + "deut sche", + "sun shine", + "sal utes", + "r one", + "bu siest", + "- .-", + "motor ists", + "hemi sphere", + "al wx", + "ps p", + "ow a", + "den ying", + "cho c", + "gu tier", + "han uk", + "mus kete", + "jait ley", + "se wage", + "t ame", + "thin kers", + "shi m", + "se quo", + "pap ar", + "middle east", + "k wa", + "ke g", + "patag onia", + "no y", + "bar ça", + "take off", + "he a", + "à ¬", + "n sc", + "g dc", + "ðŁij Ī", + "mou stache", + "mel ania", + "thr a", + "â¬Ĩ ï¸ı", + "pier ced", + "ze us", + "fon ts", + "ber a", + "it iner", + "q atar", + "contr ary", + "ire land", + "i fy", + "ou los", + "commun al", + "fin s", + "un paid", + "pa a", + "ðŁijĩ ðŁı»", + "ri os", + "ou p", + "f iller", + "cafe teria", + "ภŃ", + "kas i", + "cali ber", + "z ulu", + "v sco", + "ts ford", + "dragon fly", + "smo kin", + "pi st", + "psycho logist", + "diplom at", + "we bs", + "buc cane", + "à® ¾", + "motiv ational", + "du ne", + "ba e", + "c fs", + "with out", + "er on", + "i ac", + "ate e", + "pen sion", + "fra zier", + "en sis", + "sk is", + "par ting", + "ger y", + "territ ories", + "nach os", + "eni ght", + "ever lasting", + "msd honi", + "tel e", + "sp un", + "po di", + "sab ah", + "environ mentally", + "ce ase", + "beau mont", + "mar ta", + "kel vin", + "ho ff", + "sun il", + "n da", + "co b", + "sh ale", + "ree dus", + "un boxing", + "u bio", + "re opened", + "n all", + "capsu les", + "mar r", + "himalay as", + "swee ter", + "ja z", + "f mr", + "twee ter", + "dha ka", + "na u", + "de mi", + "d fs", + "ta urus", + "fad ing", + "it utes", + "ci p", + "over flow", + "jef frey", + "don ny", + "car tunesapp", + "ðŁį ij", + "prefe cture", + "danc ed", + "c pt", + "ple asing", + "ital k", + "earth quakes", + "ul ation", + "hi o", + "ãĢ ĭ", + "ant an", + "nutri ent", + "de ere", + "selec ts", + "enrich ment", + "r iti", + "tram pol", + "bl amed", + "j ia", + "contribu tors", + "chesa peake", + "pi geons", + "tribun al", + "mad uro", + "w su", + "ilo ve", + "effici ently", + "dar cy", + "war ms", + "ar ra", + "ec u", + "ho wer", + "strugg led", + "rajini kanth", + "ðŁĺ¢ ðŁĺ¢", + "hou sing", + "str at", + "eli x", + "disp ro", + "raf fic", + "thi erry", + "na sty", + "c fb", + "staf fing", + "al ma", + "back ers", + "hen son", + "sky walker", + "reale state", + "roo s", + "ness y", + "chan ce", + "cair ns", + "c ci", + "pe dal", + "ly ft", + "cross word", + "wait er", + "only in", + "kru ger", + "k ir", + "alej andro", + "car tier", + "car rera", + "re paired", + "ou at", + "un clear", + "un breakable", + "today in", + "qu eries", + "jo dy", + "gen ital", + "win ner", + "to l", + "kelown a", + "fascin ated", + "ãĥ ¬", + "sris ri", + "squ ared", + "spr ung", + "negoti ate", + "priv ately", + "av en", + ">> >>>", + "g ical", + "gav in", + "chester field", + "zu mba", + "or r", + "nat alia", + "impeach ment", + "mn l", + "car at", + "criti que", + "credi ble", + "trac y", + "tan i", + "musi k", + "jig saw", + "gam bia", + "tol kien", + "fe u", + "as per", + "sav ory", + "fo xx", + "f itt", + "mar lon", + "l rt", + "v ell", + "p br", + "imprison ed", + "i om", + "chu l", + "wind shield", + "kay e", + "ba a", + "chor d", + "s art", + "al gon", + "minister ial", + "nat geo", + "la zio", + "nor ms", + "ðŁijį ðŁijį", + "lic king", + "fut bol", + "un sung", + "dalla scowboys", + "sh red", + "distur b", + "dev ine", + "be ards", + "ch f", + "b day", + "ro sso", + "ig or", + "ay i", + "si ren", + "k air", + "sti les", + "ro f", + "mag nets", + "un cover", + "mou se", + "bang ing", + "si ghted", + "spe ople", + "impac t", + "row land", + "kir a", + "environ ment", + "love the", + "p sis", + "mish ra", + "gl endale", + "ca jun", + "o che", + "de ception", + "sex ist", + "stra ws", + "s ga", + "buff er", + "apost le", + "sp l", + "pop up", + "ðŁļ Ĺ", + "r g", + "up er", + "ball in", + "i dy", + "occa sional", + "national park", + "ðŁı Ĭ", + "u an", + "innov ation", + "ภ«", + "te aparty", + "re tte", + "counter fe", + "b ha", + "rec s", + "ig en", + "ðŁĮ IJ", + "humming bird", + "cu r", + "ha ven", + "la zar", + "pue blo", + ": :", + "zi onist", + "op ath", + "inver ness", + "promo ter", + "carto on", + "cabine ts", + "mahog any", + "surve ying", + "r ational", + "feel ing", + "testi fy", + "so w", + "oc on", + "ภ¢", + "ne el", + "mar is", + "sol itary", + "che mo", + "rad cliffe", + "sim ons", + "ros ary", + "new er", + "jo die", + "re tali", + "pra wn", + "pad dy", + "hen ge", + "k ala", + "im plant", + "at y", + "bren twood", + "par adox", + "ene z", + "re designed", + "p our", + "wy d", + "al de", + "௠ģ", + "sol d", + "biomed ical", + "๠Ĥ", + "tt tt", + "mat teo", + "ys er", + "new ton", + "de bun", + "ner dy", + "loo l", + "wo on", + "elisa beth", + "ec c", + "wh i", + "ach o", + "salv age", + "sal aries", + "qu ity", + "navig ating", + "oph thal", + "con soles", + "re built", + "o pec", + "ast ers", + "sho red", + "set list", + "kathr yn", + "rhy mes", + "re visiting", + "ash ish", + "li ft", + "re post", + "sole il", + "âı ±", + "weal th", + "sa at", + "we c", + "king james", + "flipk art", + "field work", + "se gu", + "mo dal", + "bu b", + "are rs", + "ðŁį Ĵ", + "clo oney", + "pad dington", + "necess ity", + "guth rie", + "pen te", + "li mo", + "jo sie", + "ar tin", + "en c", + "l hs", + "betra yal", + "info graphics", + "i er", + "mo a", + "hear ings", + "bon jour", + "sym bolic", + "ag ro", + "wed ges", + "krist ina", + "wild flower", + "athle tic", + "photograph y", + "pe sh", + "ca hill", + "chi lean", + "gou l", + "fi oren", + "ðŁij ¶", + "z il", + "sk im", + "bad oo", + "deli a", + "tre ble", + "n cc", + "ðŁĩ¦ ðŁĩ", + "a house", + "bul lock", + "sol itude", + "ا٠Ĩ", + "can cers", + "futureof work", + "hu tch", + "water shed", + "war mongers", + "sp illed", + "colom bo", + "mo th", + "associ ations", + "weigh ed", + "global goals", + "not just", + "christ i", + "tor g", + "swe ating", + "man eu", + "clu sters", + "â̼ï¸ı â̼ï¸ı", + "ta ped", + "ul y", + "tru sting", + "yu suf", + "te in", + "ra b", + ", ,,,", + "sin ai", + "audi ble", + "explic it", + "cro wns", + "sch iz", + "at least", + "ðŁĹ £", + "de bra", + "je suit", + "ene gger", + "z hen", + "one sie", + "i it", + "ss f", + "gur gaon", + "chak ra", + "bear cats", + "k ran", + "k awa", + "reque sting", + "han over", + "g end", + "sor os", + "mer cy", + "lovel y", + "do omed", + "tim my", + "ku z", + "ul l", + "ab ram", + "sa ison", + "ãĥ «", + "clean ers", + "re mo", + "circu its", + "bar red", + "o th", + "mo ist", + "madele ine", + "gall o", + "u j", + "per mits", + "hea viest", + "car ols", + "az te", + "gior gio", + "flo ats", + "decl aring", + "us rc", + "min at", + "craf ts", + "pri ma", + "conven i", + "nickelo deon", + "danc ing", + "ceremon ial", + "blo gg", + "tw p", + "anglic an", + "she k", + "k nick", + "( ((", + "hubb ard", + "harve y", + "hit man", + "fen g", + "we some", + "for za", + "s word", + "op us", + "bro m", + "gi bility", + "z al", + "m unch", + "dance hall", + "gre edy", + "hd mi", + "re birth", + "ðŁĺĭ ðŁĺĭ", + "s world", + "figur ine", + "com post", + "k f", + "engra ving", + "gior no", + "st ana", + "k man", + "ham ster", + "compos ers", + "aj e", + "func tionality", + "pol k", + "is ons", + "air planes", + "te se", + "hor rors", + "musc at", + "gi ven", + "sp ence", + "ðŁĩ¸ ðŁĩ", + "eli ot", + "ach illes", + "fre ck", + "crypto currencies", + "sou ther", + "hal o", + "bor neo", + "polit ic", + "hahahaha h", + "up state", + "si ena", + "obsc ure", + "hau sen", + "lloy d", + "happy friday", + "motor bike", + "bon a", + "americ as", + "hol s", + "- (", + "spor ty", + "un aware", + "reven ues", + "christop her", + "bank sy", + "av an", + "ev apor", + "com press", + "eyel iner", + "to dos", + "buff y", + "renewable energy", + "ly rical", + "ar chan", + "rapi st", + "fair trade", + "lma ooo", + "beat z", + "pro active", + "la pse", + "ir ical", + "revers al", + "po de", + "mcin tyre", + "mac au", + "ãĥ ķãĤ", + "nash grier", + "f sa", + "g all", + "çĶ Ł", + "perpe tr", + "il ya", + "configur ation", + "% ;", + "str ange", + "rac i", + "ภĩ", + "pic kups", + "kov sky", + "mam mal", + "w ps", + "g able", + "compar ative", + "z h", + "save our", + "da vey", + "on etsy", + "mu ssels", + "mis er", + "cri stina", + "electr on", + "cra ve", + "lo ren", + "precipit ation", + "m z", + "ðŁį «", + "vin cen", + "snow board", + "no ida", + "ah n", + "marin ated", + "g tr", + "town hall", + "min is", + "bethe l", + "adv an", + "su ra", + "shi el", + "fur ry", + "ðŁĺĤðŁĺĤðŁĺĤðŁĺĤ ðŁĺĤðŁĺĤ", + "lyn d", + "so il", + "sc ence", + "sen eca", + "shar jah", + "dick ens", + "credenti als", + "av ar", + "per k", + "requ iring", + "pre fer", + "j ian", + "de ca", + "r ach", + "ing for", + "del e", + "be ep", + "ðŁĴ »", + "cis ely", + "hu ddle", + "green sboro", + "haw king", + "ho ax", + "hang ar", + "ç ľ", + "mis o", + "lo vin", + "gre ta", + "ab ad", + "logi e", + "at an", + "snow flake", + "mahe sh", + "fear the", + "al kal", + "bobb lehead", + "ba hn", + "ju dged", + "fu tu", + "feli x", + "ðŁį ĵ", + "pi ke", + "der iv", + "notic es", + "au er", + "dis super", + "or da", + "wi pes", + "am ino", + "stri kers", + "foo tb", + "dram as", + "pun ching", + "score less", + "heming way", + "bi h", + "bal lad", + "chat ter", + "am mo", + "kle in", + "fabric ation", + "kari m", + "z end", + "hi sto", + "vol ta", + "rock y", + "marke ter", + "xtre me", + "sequ encing", + "paradig m", + "cle ats", + "boom ing", + "âģł âģł", + "block ade", + "promp ts", + "yogh urt", + "pur pose", + "nu r", + "regu late", + "nois y", + "ing rid", + "bird watching", + "bar tender", + "Ù ĥ", + "wor dof", + "cha otic", + "shor ty", + "el dest", + "z app", + "onceupon atime", + "fl yo", + "rit os", + "mike quind", + "ðŁIJ ´", + "regi stering", + ". ]", + "ad ol", + "gg gg", + "pur ge", + "kid lit", + "ar bor", + "val ves", + "synago gue", + "o th", + "unanim ous", + "veri fication", + "dar rell", + "ãģ Ħ", + "vander bilt", + "tape stry", + "pro sper", + "did dy", + "dra fting", + "de cep", + "marqu is", + "st int", + "michael jackson", + "pee led", + "men us", + "bb b", + "sc are", + "ema il", + "wri gley", + "it is", + "f ell", + "some thin", + "bar ra", + "ed gar", + "di pping", + "pu ddle", + "sla de", + "lear ner", + "jal en", + "ðŁ§ IJ", + "the daily", + "mikequind azzi", + "ju x", + "iq bal", + "mckin ney", + "ra iser", + "ef an", + "dr one", + "cat o", + "pic ket", + "cro we", + "l att", + "uk o", + "giuse ppe", + "hin i", + "synthe si", + "ponti fex", + "song writing", + "to d", + "swit ches", + "din ners", + "h q", + "gabri elle", + "pensac ola", + "cir cle", + "expo ses", + "ev s", + "riyad h", + "pro men", + "o ck", + "sa j", + "cit ation", + "brew co", + "jo si", + "ep aper", + "dri f", + "point less", + "tang led", + "cri pp", + "line ups", + "fairi es", + "daz e", + "mour n", + "bla dder", + "sal z", + "bur undi", + "book mark", + "the people", + "sub sequ", + "princi pal", + "sk er", + "court ney", + "a oki", + "rac ers", + "ad m", + "mom a", + "critical role", + "hou n", + "shed ding", + "sa ka", + "ace ous", + "mck ay", + "hus bands", + " ½", + "me da", + "accu sations", + "ro sel", + "nc is", + "witne ssing", + "or ama", + "go ds", + "hil ton", + "el man", + "ÃŃ n", + "meg ap", + "cra ven", + "announ cer", + "crit eri", + "sheffiel dissuper", + "milit ant", + "consu l", + "hoo ded", + "aby ss", + "b x", + "ma dam", + "lo cu", + "mary am", + "manic ure", + "grat is", + "ac tresses", + "ros ario", + "this dayin", + "king ly", + "gn ome", + "cel ine", + "r ous", + "he el", + "lil ac", + "vish al", + "ab h", + "thor ns", + "s ls", + "ne al", + "construc ting", + "be ren", + "s lang", + "ma ins", + "far ra", + "sar ko", + "pai ge", + "gu iller", + "l ala", + "ice berg", + "nou n", + "plann ers", + "u mmm", + "ou ses", + "ill ary", + "ma an", + "box ing", + "zi pper", + "srin agar", + "migu el", + "o str", + "mp o", + "responsi bly", + "lan terns", + "appli ance", + "x b", + "gren ade", + "neglec t", + "dy sle", + "ham mock", + "ne ctar", + "wit cher", + "r gv", + "di ence", + "ser bian", + "seed ed", + "cru z", + "bi sh", + "sp he", + "e q", + "sky rim", + "alge bra", + "phil ately", + "bungal ow", + "ge off", + "y ves", + "demand ed", + "consider ations", + "the vamp", + "pawan kalyan", + "co ded", + "grit ty", + "erup tion", + "se infeld", + "uni denti", + "ëĭ Ī", + "wor m", + "ac us", + "se ung", + "dun g", + "ro land", + "su d", + "di visions", + "ab lanc", + "shor test", + "j f", + "p oun", + "plant based", + "be to", + "tough er", + "mc o", + "don et", + "mark us", + "v fl", + "ðŁı ł", + "open ing", + "co ward", + "caber net", + "o xi", + "burle sque", + "sand ra", + "su mo", + "consi st", + "tho t", + "cay man", + "motor ola", + "gutier rez", + "d slr", + "y w", + "no bel", + "nov ice", + "moms demand", + "grun ge", + "sp or", + "d cc", + "pre sses", + "sli st", + "allot ment", + "voc ational", + "ft c", + "pu ja", + "lo ven", + "utt arak", + "tan dem", + "sh ep", + "come dians", + "anat om", + "cant wait", + "healthye ating", + "west side", + "mar gins", + "chi ang", + "asbe stos", + "stupi dity", + "proble matic", + "fit bit", + ": $", + "ceil ings", + "shu a", + "protec tions", + "bio tic", + "beng ali", + "re sts", + "bien nale", + "tim o", + "cul min", + "e minent", + "affe ction", + "unbeliev ably", + "individu ally", + "canvas sing", + "wh itt", + "nov asco", + "chin son", + "h pe", + "go w", + "gloucester shire", + "pa o", + "thresh old", + "chev ron", + "s ine", + "we ther", + "pp ie", + "aqu ino", + "antwer p", + "âĸ ¬", + "po on", + "inst af", + "equ ine", + "cinemato graphy", + "nbaf inals", + "vali ant", + "kil kenny", + "te rence", + "syste mic", + "sr l", + "p ound", + "made ira", + "pl ough", + "tre cht", + "mat ed", + "mp d", + "ransom ware", + "ph in", + "li qui", + "bb ce", + "boom er", + "i standwith", + "con ju", + "r te", + "nar a", + "foo lish", + "da shing", + "vier nes", + "br ite", + "da u", + "juni per", + "ai da", + "you now", + "ra zer", + "de i", + "repe ating", + "comfor ting", + "adjac ent", + "e to", + "ca sted", + "chat ur", + "mu er", + "syn th", + "san itary", + "mac le", + "independ ent", + "law ful", + "e erie", + "h or", + "ðŁĴ Ń", + "am rit", + "vel o", + "station ery", + "mu f", + "may may", + "contempl ating", + "elabor ate", + "gre gor", + "dri es", + "ac col", + "ภļ", + "schwarz enegger", + "ill nesses", + "day break", + "follow back", + "collu sion", + "electr onic", + "jo vi", + "hiro shima", + "ta w", + "hom ec", + "mic ah", + "qu itting", + "fro sting", + "ben fica", + "hel i", + "s ical", + "pic cad", + "corpor ate", + "ment orship", + "you are", + "sing er", + "shi va", + "ru ne", + "ing er", + "ri um", + "play able", + "doo p", + "wil low", + "ter re", + "ni p", + "at d", + "war bler", + "profession ally", + "er ase", + "proce ed", + "pedestri ans", + "mis chief", + "ben ding", + "alas kan", + "c kett", + "mo p", + "dd les", + "shut ter", + "ge ared", + "atene o", + "ma deline", + "g ations", + "o sha", + "der ick", + "sw ild", + "an gry", + "pat ents", + "hun k", + "decre ased", + "fr y", + "ðŁĴĸðŁĴĸ ðŁĴĸ", + "sal on", + "quant ities", + "d ario", + "ni gel", + "ku ma", + "jen n", + "happ ye", + "xx x", + "rex perience", + "pro s", + "au sch", + "rele ssly", + "ham burger", + "fuku shima", + "er ne", + "stat ec", + "ren d", + "may field", + "j one", + "lef ty", + "bern stein", + "sm il", + "gener ates", + "fore station", + "band its", + "ta yo", + "r ca", + "ac ci", + "rodri go", + "kn app", + "elo vers", + "vege tation", + "u ral", + "le ft", + "ħ ï¸ı", + "worl dre", + "sur i", + "embar k", + "w son", + "ba you", + "mu ller", + "mo vers", + "ðŁķ º", + "presby ter", + "l f", + "cre e", + "bat b", + "sal am", + "demonstr ations", + "an ec", + "n pc", + "it ics", + "to graphy", + "re inst", + "thur st", + "tal e", + "off ences", + "smart city", + "bro tha", + "ofthe year", + "in valuable", + "ear n", + "ðŁijı ðŁı½", + "kre mlin", + "gra dy", + "town fc", + "guern sey", + "ma ha", + "contag ious", + "dre x", + "be en", + "( £", + "nati vity", + "k tm", + "somer halder", + "comp ounds", + "íķ ĺ", + "\" â̦", + "af g", + "ott news", + "h ound", + "fire fly", + "cil an", + "donet sk", + "volunte ered", + "ak ira", + "è ª", + "sing ul", + "st h", + "dro wned", + "mand o", + "he ir", + "ðŁİīðŁİ Ī", + "tax is", + "y uki", + "vel d", + "k ans", + "el k", + "ran ts", + "hash tag", + "t eng", + "ro g", + "a at", + "gru b", + "e ber", + "in india", + "colo ssus", + "sig ni", + "so ever", + "mile stones", + "der o", + "differen tial", + "phu ket", + "master mind", + "an gh", + "mel ani", + "bro ker", + "actor vijay", + "stun ned", + "continu ity", + "af fl", + "vo cal", + "perenni al", + "fianc é", + "in complete", + "hun ts", + "re issue", + "domin ates", + "tur meric", + "ro am", + "ri on", + "bag ged", + "nas sau", + "fu t", + "x ox", + "national trust", + "jo ye", + "san o", + "hearth stone", + "dis respect", + "le es", + "h se", + "siber ian", + "offe e", + "re stock", + "wolf gang", + "re gan", + "plan o", + "un wind", + "re par", + "mil le", + "] ,", + "skul l", + "fat ally", + "concep tual", + "ðŁĮ ²", + "f é", + "ber to", + "b ms", + "u a", + "mag na", + "notre dame", + "le te", + "la undering", + "heartw arming", + "buffe tt", + "go at", + "pe abo", + "wind mill", + "v ac", + "continu ally", + "az alea", + "mem brane", + "can cels", + "make yourown", + "athe red", + "p to", + "tor pe", + "ðŁĺ ł", + "ðŁĴ §", + "sc ares", + "le aking", + "z et", + "pix els", + "ac i", + "kh il", + "marath i", + "ðŁĻı ðŁı½", + "u la", + "tam u", + "chandi garh", + "z agre", + "aa b", + "pronoun ced", + "aubre y", + "sand er", + "pun ta", + "har low", + "ic elan", + "celebr atory", + "so t", + "unci ation", + "stru ly", + "mc dowell", + "deepi ka", + "remin ders", + "my stical", + "ct c", + "chat ted", + "s ica", + "bar gains", + "ch hat", + "ru bin", + "m net", + "oiland gas", + "pel ican", + "o at", + "mor ality", + "k our", + "i h", + "nu clear", + "gc u", + "ric her", + "vene zia", + "m ma", + "le ith", + "ac company", + "rich mond", + "sports net", + "ba ahu", + "smu ggling", + "mm i", + "ðŁĩ®ðŁĩ ª", + "twi sts", + "sahi b", + ".... .", + "amb itions", + "il lo", + "histor ical", + "fo rec", + "show biz", + "pon ies", + "chas ers", + "remo del", + "will ing", + "prince sses", + "am ple", + "cushi ons", + "ac les", + "lot r", + "da ch", + "an the", + "in corporate", + "new bury", + "ki ri", + "fried rich", + "ab v", + "ball ers", + "alber t", + "ðŁij Ń", + "let i", + "nan op", + "ci de", + "anal o", + "n sf", + ")) ))", + "griffi ths", + "valen ci", + "ro ano", + "fun run", + "babys itting", + "ca day", + "ent re", + "u ck", + "slu g", + "tic al", + "the sims", + "ro ar", + "car ney", + "g am", + "sto we", + "fi d", + "bun ny", + "sham rock", + "pe cu", + "mol ina", + "go cougs", + "con tributes", + "transform ation", + "mo y", + "v aj", + "sever y", + "antioxid ants", + "thir teen", + "sight seeing", + "l j", + "reversi ble", + "odd ly", + "hoo kah", + "nou vel", + "hal al", + "fe i", + "stab les", + "mul t", + "ho pped", + "bra ids", + "inter change", + "ghana ian", + "ww ww", + "eth no", + "con junction", + "ago v", + "ye ti", + "earth and", + "ts p", + "con serve", + "heir loom", + "metaph or", + "woo f", + "tor io", + "self less", + "n wa", + "em ilia", + "yl ene", + "y xe", + "gi ar", + "moder ating", + "pro bz", + "b fi", + "ne er", + "du mmy", + "hanuk kah", + "we bber", + "k v", + "eye brow", + "dag ger", + "su mp", + "ra ges", + "ork ney", + "tb o", + "hal sey", + "assign ments", + "tr onic", + "scri b", + "co on", + "an war", + "# âĢİ", + "jal ape", + "flori da", + "qu aid", + "haw keyes", + "âĻ¡ âĻ¡", + "street car", + "ro g", + "dat lantic", + "gran ola", + "un changed", + "expect ation", + "Ù ĩ", + "mar lin", + "gu mmy", + "ðŁĻı ðŁı¾", + "awareness month", + "oil painting", + "mu th", + "per ch", + "jun to", + "villa gers", + "mor g", + "che ated", + "web comic", + "the future", + "d ps", + "la kings", + "men tioning", + "vo or", + "ident ities", + "accor d", + "mc gu", + "l pga", + "rum our", + "massi vely", + "m pls", + "heal y", + "d ate", + "sp oli", + "re visited", + "on t", + "al and", + "scru tiny", + "lakel and", + "bl ending", + "< /", + "an kara", + "jami edor", + "metab olic", + "f ences", + "ann y", + "å ħ", + "semic on", + "oo tt", + "space ship", + "wack y", + "le ta", + "ap ac", + "she e", + "in herit", + "do res", + "ðŁĩ¨ðŁĩ ¦", + "gent e", + "tw ick", + "ri ms", + "gal ve", + "de ville", + "king fisher", + "scorpi o", + "ow l", + "al ar", + "vari an", + "ðŁĹ ĵ", + "vene tian", + "star dust", + "then orth", + "q ing", + "har rington", + "consul ate", + "spectac le", + "ho bbs", + "tur ks", + "gre er", + "mat ing", + "ðŁİ Ģ", + "ðŁĮ Ģ", + "direc ts", + "í ĭ", + "pompe o", + "vo iced", + "la os", + "tz u", + "pro me", + "pri sm", + "mer c", + "fortun ately", + "bc fc", + "mcdon nell", + "not sorry", + "smi led", + "t ba", + "for war", + "mid term", + "dar by", + "we instein", + "up grading", + "wol ff", + "bron co", + "cab ello", + "ðŁ¥ ĩ", + "fi able", + "shar pe", + "bat tered", + "sat o", + "myth ical", + "instap ic", + "pre pped", + "eni um", + "e spo", + "di aper", + "explan ations", + "who pping", + "ragn ar", + "pe el", + "antibio tic", + "l acks", + "harri son", + "li sm", + "au l", + "qu ail", + "martin a", + "sent encing", + "sc ams", + "di di", + "tr onics", + "ãħł ãħł", + "go ff", + "za in", + "param ore", + "cha ined", + "clin ton", + "li ff", + "cott ages", + "em on", + "reve rend", + "consu mer", + "ce an", + "t any", + "lum pur", + "e bay", + "sto ol", + "ðŁĺ» ðŁĺ»", + "ta pro", + "h ath", + "modern art", + "just ine", + "prover b", + "app y", + "tra x", + "mani fest", + "am bu", + "nai k", + "pe pp", + "r sd", + "mer chants", + "kitch ener", + "shi fted", + "li zz", + "âĺħâĺħ âĺħâĺħ", + "âĢĶâĢĶâĢĶâĢĶ âĢĶâĢĶâĢĶâĢĶ", + "uto pia", + "tom o", + "ou ted", + "com ers", + "chiroprac tic", + "book club", + "cin dy", + "pro hibition", + "se uss", + "ë¯ ¼", + "thin kin", + "rr rr", + "go fund", + "t ack", + "om b", + "catastro phic", + "ling u", + "guild ford", + "bo td", + "ॠĭ", + "plan ter", + "^ ^", + "win k", + "kath mandu", + "sto ppers", + "smooth ies", + "re efs", + "hin d", + "bell amy", + "Ħ ë", + "waste water", + "vo or", + "nat l", + "! ]", + "re el", + "y ap", + "scoo by", + "work space", + "corin thians", + "bl un", + "obli gation", + "g bbo", + "dy son", + "cra vings", + "ell ington", + "dap l", + "wre xham", + "earthand clouds", + "uk runchat", + "positi oned", + "kal b", + "four square", + "jo ck", + "im pending", + "even ing", + "ath y", + "pro claimed", + "c ites", + "ann apolis", + "san i", + "mar th", + "ir l", + "accom mo", + "ka a", + "fin a", + "y aa", + "di sper", + "ec ar", + "bha k", + "will y", + "ðŁĺĢ ðŁĺĢ", + "mcder mott", + "mo j", + "gener ational", + "u said", + "train ing", + "lon ely", + "lo res", + "impe cc", + "âĢ IJ", + "beav ers", + "ma ki", + "he b", + "aap l", + "å ı", + "wolver hampton", + "leader board", + "me u", + "c fa", + "easter n", + "hu r", + "civil war", + "ou rage", + "hor ned", + "le high", + "awar ds", + "evi dent", + "gi gab", + "r ous", + "ma del", + "ro byn", + "ur gently", + "k ors", + "en as", + "heis man", + "bam bam", + "fab ian", + "f om", + "evalu ating", + "assemb ly", + "out sourcing", + "hun tsville", + "ðŁĶ ª", + "justi fied", + "cashi er", + "sp aper", + "buc keye", + "analy tical", + "illumin ati", + "au tho", + "o j", + "sha de", + "geel ong", + "wh ey", + "he aton", + "terri bly", + "ele k", + "un charted", + "sd live", + "moto cross", + "her mes", + "dar shan", + "dar lington", + "cash mere", + "gri pping", + "cilan tro", + "pun ish", + "... :", + "ðŁĴ Ħ", + "inst ance", + "der i", + "lo bal", + "muk her", + "sp ar", + "thin ker", + "fre mont", + "com piled", + "color ado", + "vig ne", + "sm d", + "whe ad", + "villa ge", + "le ek", + "formula e", + "ta res", + "persist ence", + "?? ????", + "ped ago", + "he z", + "alzheim ers", + "vul ture", + "off ence", + "is great", + "suff ra", + "kick in", + "h mmmm", + "broad way", + "ï¸ı @", + "art i", + "alli son", + "endor ses", + "ry u", + "lolli pop", + "soy bean", + "kend all", + "cer a", + "inv ade", + "( ðŁĵ·:", + "conver ter", + "car pets", + "ho bo", + "fr it", + "pe ac", + "es qu", + "ern an", + "ou f", + "an il", + "di ffer", + "ch ing", + "bre cht", + "sp g", + "daven port", + "stra va", + "sever n", + "n gos", + "stor ians", + "fe te", + "parame dic", + "j hb", + "al amo", + "sne aking", + "gold coast", + "roof s", + "isi l", + "depic ted", + "projec tions", + "nu mb", + "o ss", + "ep i", + "glu cose", + "zid ane", + "infin iti", + "íĺ Ħ", + "ran som", + "ton ics", + "fal k", + "g ler", + "ou tw", + "re ss", + "week ly", + "the on", + "n ole", + "ðŁĩªðŁĩ º", + "vol ley", + "sum mar", + "neg ativity", + "sam son", + "ye w", + "aus votes", + "ju l", + "ju dy", + "f art", + "pra yed", + "pal ate", + "multicul tural", + "double header", + "cycl ones", + "pier re", + "ãģ ¨", + "âĺ łï¸ı", + "rt w", + "conver ting", + "wir ral", + "l ari", + "ir relevant", + "austin mahone", + "an che", + "ya an", + "sd f", + "$ .", + "explo ding", + "ulti mate", + "prof ici", + "gofund me", + "cell ence", + "ep stein", + "bul lied", + "sep tic", + "à® ¤", + "lu mber", + "cu ff", + "vsco cam", + "pl or", + "ภ¥", + "se ok", + "ro to", + "venezu elan", + "sor ta", + "spir ited", + "daniel padilla", + "team sisd", + "radio active", + "icelan dic", + "ðŁĴ ¤", + "ver e", + "accommo date", + "shi pp", + "ot ter", + "ol ina", + "e go", + "su la", + "san antonio", + "de as", + "simil arities", + "âļ ¾", + "y om", + "bro ward", + "å °", + "can cun", + "veri fy", + "on te", + "candle light", + "ìł ķ", + "inf ants", + "az am", + "ðŁĺ °", + "le ven", + "un stable", + "bloom ington", + "x ford", + "con tour", + "y p", + "innov ator", + "histor ies", + "po y", + "lolo lol", + "ex pires", + "cat alo", + "bill boards", + "an ab", + "el ic", + "novasco tia", + "fa ire", + "ìĿ ´", + "rock well", + "gr ille", + "az tec", + "joh or", + "ur struly", + "fi ren", + "dun lop", + "id le", + "port man", + "jo es", + "tx hsfb", + "hol m", + "cham ele", + "under world", + "lo ss", + "ti em", + "therap ists", + "past ure", + "pa ste", + "ing now", + "vul can", + "ra gon", + "lar kin", + "o shi", + "ho co", + "child hood", + "umb rel", + "success or", + "kath y", + "iz en", + "° ï¸ı", + "share holders", + "ol ga", + "ai b", + "he ap", + "fl aming", + "ro u", + "air tel", + "rat t", + "z ane", + "vo w", + "thor ough", + "sn ag", + "par th", + "un conscious", + "ve y", + "new release", + "gh ee", + "croati an", + "facilit ating", + "swan son", + "astor ia", + "to logy", + "master y", + "ðŁ¤ ij", + "bil bao", + "trou pe", + "the ori", + "chey enne", + "ro tt", + "shore line", + "gra sso", + "master chef", + "+ )", + "vi x", + "ellen show", + "as g", + "an ak", + "ku ya", + "safar ilive", + "debu ting", + "blu m", + "list ener", + "v ins", + "book shelf", + "smart cities", + "makeyourown lane", + "; ;", + "ðŁIJ ¯", + "ri zz", + "on ward", + "bull dog", + "bear ish", + "vir uses", + "fri gh", + "lin den", + "we iser", + "sn t", + "gon a", + "dre sden", + "fl anders", + "cu k", + "wheel ing", + "ba u", + "atu esday", + "surf ers", + "swi ft", + "mc call", + "arbitr ation", + "aw d", + "mon c", + "b ine", + "at x", + "re fr", + "mi ro", + "po sey", + "n are", + "rit ter", + "âģ ¦", + "play book", + "blow out", + "sports manship", + "s oooooo", + "malay alam", + "gri ms", + "bur bank", + "infin ity", + "sar gent", + "oit nb", + "joseph ine", + "ski pping", + "par kin", + "excur sion", + "semin ars", + "jo har", + "par tridge", + "post game", + "ll ll", + "blan che", + "temp ting", + "m na", + "lu ka", + "is ers", + "to ffee", + "bar ron", + "he mmings", + "sa e", + "go hawks", + "cu pid", + "li mbs", + "con se", + "un common", + "z ada", + "head shot", + "so ils", + "pione er", + "mam ma", + "sem itic", + "pan dey", + "jamiedor nan", + "spl its", + "vel a", + "son i", + "ra ff", + "t mobile", + "âŀ ĸ", + "pra wns", + "lit er", + "enjo yment", + "egg plant", + "tu b", + "cultur al", + "us ic", + "suspici on", + "sy cam", + "summ ed", + "ma du", + "ho ck", + "up wards", + "eye ing", + "ri ve", + "assas sins", + "âĤ ¬", + "out fy", + "chi ves", + "t ner", + "la is", + "por ridge", + "sad dest", + "w cc", + "vick i", + "sna ils", + "biz italk", + "mill an", + "ðŁĮ į", + "sam oa", + "j ing", + "mi key", + "gu j", + "chel ms", + "eli gibility", + "arma da", + "thro p", + "surger ies", + "ãĤ ¿", + "mo hawk", + "ex its", + "me m", + "is lington", + "c me", + "land fill", + "kait lyn", + "ðŁİ ¼", + "combin ations", + "tomorrow land", + "ver b", + "cor a", + "pre cisely", + "na om", + "ðŁĨ ķ", + "shr ink", + "sof tly", + "merce de", + "mand el", + "poo dle", + "ball erina", + "sop h", + "jux ta", + "y at", + "ary an", + "hesit ate", + "lo wered", + "gu lar", + "dungeon sand", + "ron an", + "my ri", + "sp f", + "men opau", + "gra sp", + "pa thi", + "fe asi", + "fla w", + "shi story", + "ste ward", + "gg le", + "fay re", + "cli que", + "credi bility", + "yo g", + "sec tion", + "mu sko", + "se ville", + "no tt", + "cal m", + "mate o", + "indic ted", + "fi ba", + "by l", + "lin o", + "u kin", + "!! #", + "enig ma", + "siri us", + "bu sc", + "ðŁį Ĭ", + "mac kerel", + "psal ms", + "a at", + "tomorrow spaper", + "ðŁĺ ĸ", + "p fc", + "........ ...", + "shre k", + "mul let", + "o sh", + "danger ously", + "immen sely", + "am ur", + "ðŁį Ĥ", + "pro por", + "sy a", + "london marathon", + "abo ve", + "obli gatory", + "pro v", + "ra cha", + "alex is", + "pri mary", + "sh h", + "ether net", + "d stv", + "cou gar", + "un lucky", + "ni l", + "steak house", + "mel a", + "fc bayern", + "cause way", + "ca therine", + "fluore scent", + "nx t", + "to kyo", + "au sp", + "releg ation", + "qui zz", + "shored itch", + "proud tobe", + "promo s", + "inter acting", + "home brew", + "da esh", + "w pg", + "stead ily", + "provin ces", + "bal lots", + "i ah", + "al to", + "< <<", + "you u", + "ri ley", + "prefe rence", + "tra verse", + "incen se", + "am munition", + "ho dges", + "# @", + "hail state", + "tart an", + "witch craft", + "vent ilation", + "liber tarian", + "! â̦", + "ow es", + "% !", + "ong chang", + "bru shing", + "le ic", + "fi ber", + "under attack", + "down load", + "ex pir", + "hy o", + "pompe y", + "mc bride", + "y ag", + "stre e", + "com bat", + "ten ding", + "ai ra", + "gug gen", + "ab ra", + "in na", + "fli ps", + "aw al", + "m ach", + "dol lar", + "inspir ations", + "z um", + "o du", + "it ty", + "video game", + "aqu aman", + "har u", + "bel fast", + "je b", + "but ch", + "us gs", + "calcu lus", + "go yal", + "mor gen", + "x finity", + "stand up", + "contrac ep", + "sab re", + "na be", + "in secure", + "gener ously", + "epit ome", + "l w", + "t ca", + "narr atives", + "don nell", + "pand as", + "ber gh", + "tu t", + "ker al", + "fel icity", + "br ampton", + "quinte t", + "nom ore", + "ðŁĶ ij", + "lo i", + "alham dulil", + "ðŁĶ¥ ðŁĶĹ", + "ston er", + "shaw l", + "clin ical", + "bren dan", + "gon e", + "fla wed", + "tri ppy", + "j g", + "al location", + "po aching", + "ve vo", + "mo cks", + "lef tist", + "bon uses", + "condem ned", + "abil ity", + "st ating", + "microbi ome", + "bio logist", + "for you", + "wahl berg", + "ss or", + "ift ar", + "w ul", + "ÑĦ оÑĤ", + "pom er", + "me me", + "ver te", + "tre ll", + "tra it", + "in let", + "hormon es", + "deliber ately", + "vill ar", + "battle ship", + "p bl", + "tw enti", + "ho kies", + "dal ail", + "say a", + "may fair", + "han s", + "die ts", + "⾨ ⾨", + "od in", + "hot spur", + "pap i", + "k ana", + "k amp", + "fin na", + "flo tus", + "ti ans", + "unic orns", + "tribe ca", + "chang ers", + "fore ground", + "out a", + "inv aders", + "gett ys", + "tomorrowspaper stoday", + "mac millan", + "hand written", + "w fp", + "u de", + "state of", + "base d", + "âĺģ ï¸ı", + "cas m", + "psy ched", + "histor ians", + "fol d", + "d da", + "ag grav", + "p ans", + "green way", + "au sv", + "ðŁĺ ¶", + "shradd ha", + "inde x", + "be sti", + "zim mer", + "t ness", + "eye shadow", + "ot te", + "go ts", + "distribu ting", + "pro min", + "yo l", + "ace a", + "tram rahim", + "hoo per", + "supre me", + "jam min", + "intu itive", + "quali fications", + "sli m", + "sid di", + "jay ne", + "tri pping", + "g tx", + "pun s", + "e manuel", + "om g", + "mid summer", + "in to", + "succul ent", + "ri en", + "new mexico", + "o or", + "hoo king", + "in f", + "ðŁ¤ Ŀ", + "flir ting", + "na hi", + "g friend", + "t ps", + "hel ix", + "z s", + "on ie", + "ct f", + "kri s", + "irresi stible", + "fla p", + "ðŁijıðŁı» ðŁijıðŁı»", + "us wnt", + "ru d", + "ram ps", + "pin oy", + "ot w", + "lol z", + "low ering", + "favor ite", + "t mc", + "phra ses", + "her mi", + "aver aging", + "em br", + "ben o", + "estu ary", + "sle eve", + "ribb ons", + "ta sh", + "ภ¹", + "x f", + "aw gs", + "sun ited", + "brew eries", + "anir ud", + "pun ches", + "ol die", + "ip ads", + "wi fey", + "land lords", + "d ji", + "gun ner", + "íķ ´", + "tex an", + "ex op", + "cas sandra", + "s off", + "ðŁļ «", + "igh ton", + "bak ers", + "awareness week", + "v all", + "ear p", + "bts bbmas", + "apologi zes", + "âļĵ ï¸ı", + "was ps", + "states man", + "snat ch", + "watch dog", + "ra fi", + "after party", + "spi ke", + "j er", + "peri ph", + "r nc", + "mu ll", + "le en", + "shi es", + "li eu", + "urstruly mahesh", + "mer ton", + "de sai", + "shi f", + "ðŁĮ ±", + "pe dic", + "gos ling", + "arrang ing", + "ww g", + "gen y", + "you uu", + "netfli x", + "e ttes", + "k wi", + "bernar dino", + "am iga", + "Ø ¨", + "kashmir i", + "t ings", + "emer itus", + "de cat", + "ab domin", + "dc i", + "pha ses", + "d jan", + "be am", + "op ry", + "i shed", + "the ellenshow", + "the st", + "habit ats", + "to ons", + "mclau ghlin", + "ri pper", + "micro biology", + "tal aga", + "clu eless", + "ss u", + "cro che", + "bro mance", + "longe vity", + "zagre b", + "prev ented", + "tra ve", + "spo ilt", + "darry l", + "migra ine", + "al cat", + "dd dd", + "vi v", + "ser pent", + "mat tel", + "jam a", + "con quest", + "î Ħ", + "sam sung", + "presbyter ian", + "ket ch", + "fire fox", + "mo tif", + "le c", + "cho pping", + "cher no", + "j ann", + "ðŁIJ °", + "pro lon", + "wake up", + "conver gence", + "mersey side", + "heart broken", + "lo oming", + "hal lucin", + "mai ze", + "commun ism", + "mo h", + "twitter storians", + "serge y", + "res eller", + "favor able", + "ed gy", + "re iter", + "mal aga", + "live me", + "ka hn", + "pul sion", + "big g", + "kim kardashian", + "ati o", + "tyr anny", + "ru ption", + "q ant", + "pro ven", + "by z", + "pu shaw", + "kri stin", + "e er", + "tar dis", + "ri z", + "awak en", + "mi ko", + "un documented", + "path finder", + "indirec t", + "resemb les", + "h ler", + "conce aled", + "scand al", + "re im", + "d nb", + "cr itters", + "attend ant", + "apprentice ships", + "aa u", + "scre amed", + "l su", + "fa h", + "har bour", + "ed d", + "bat sman", + "li ss", + "mi sha", + "spani el", + "it f", + "advan cement", + "fa c", + "close up", + "cecil ia", + "medi c", + "narcis si", + "lav ish", + "gi ac", + "ma ys", + "le it", + "wine wednesday", + "pushaw ard", + "let to", + "curren ts", + "bug atti", + "out ine", + "w j", + "un do", + "ler osis", + "devo tional", + "ðŁij «", + "on na", + "fais al", + "sa una", + "himach al", + "am ii", + "à® ®", + "di zzy", + "screen writing", + "ph x", + "sp n", + "ick i", + "ag irl", + "fi shes", + "wb z", + "pi m", + "bo ar", + "ac id", + "! ..", + "rocke feller", + "n ga", + "dra stically", + "simpli fy", + "dru mming", + "autum nal", + "gur mee", + "lor de", + "jo ann", + "give up", + "b our", + "am ura", + "der land", + "sim pler", + "wat son", + "tri dent", + "concor dia", + "bel lum", + "bre k", + "dum plings", + "vi on", + "dungeonsand dragons", + "sp ri", + "ascen sion", + "wil datlantic", + "u st", + "rob ins", + "legi on", + "insi st", + "jar o", + "gue ss", + "so b", + "bigh it", + "pool side", + "negoti ating", + "mc gill", + "bil d", + "techn icians", + "miti gation", + "ajay devgn", + "b to", + "ant en", + "cosmo politan", + "ðŁĺĬðŁĺĬ ðŁĺĬðŁĺĬ", + "patri oti", + "temp er", + "promen ade", + "nav ajo", + "nam m", + "wrink les", + "dc fc", + "le ach", + "bru nette", + "r f", + "cout inho", + "al ti", + "tradition ally", + "op tome", + "na z", + "accord ingly", + "rec ard", + "de ets", + "sw ell", + "po sure", + "whit ening", + "strang er", + "illi on", + "here ford", + "u wu", + "ro bber", + "cotsw olds", + "cl en", + "gor ge", + "nam aste", + "re lish", + "gri ff", + "adren aline", + "bla sio", + "val e", + "ê ²", + "toler ate", + "rail minindia", + "jen sen", + "ho ven", + "el lu", + "ob sole", + "eisen hower", + "unidenti fied", + "than niversary", + "body guard", + "Ø ¯", + "i dge", + "sch al", + "stock port", + "sn i", + "re taining", + "po po", + "pix ie", + "oli thic", + "ki er", + "ha jj", + "sa z", + "cor bin", + "!!!! !!!!!!", + "v it", + "me gat", + "de h", + "circu it", + "af fleck", + "theore tical", + "hope less", + "u ab", + "slu mp", + "b ice", + "jam med", + "let stalk", + "can i", + "side ways", + "labyrin th", + "re fs", + "ha hn", + "jare d", + "ðŁį ¹", + "jam bo", + "ph yl", + "enhan cement", + "c tr", + "ful lest", + "se ye", + "do ba", + "cho ic", + "yo s", + "cb j", + "andr é", + "re watch", + "pri ma", + "doctr ine", + "for gets", + "u hm", + "ar ound", + "u le", + "art lovers", + "shi raz", + "har th", + "ex tor", + "Å ¡", + "unexpec tedly", + "eli us", + "y x", + "em my", + "se ac", + "ðŁijĩðŁijĩ ðŁijĩ", + "correc ted", + "com bu", + "wom anc", + "cou gh", + "what son", + "publi shes", + "divers ity", + "back bone", + "lock down", + "mesmeri zing", + "nor te", + "ma b", + "desig ner", + "í ģ", + "ra gh", + "mole cules", + "get outside", + "the beatles", + "semicon duc", + "nach o", + "lun es", + "ham mers", + "sul tan", + "o on", + "fe ren", + "att ach", + "ar qu", + "uttarak hand", + "s ash", + "; -", + "tre ad", + "i ko", + "ar thur", + "scandin avian", + "r ation", + "ga el", + "charge able", + "fish y", + "v ma", + "hand bags", + "char a", + "ay ne", + "de fam", + "sett lers", + "qad ri", + "pal ais", + "in wx", + "apocaly ptic", + "poo ja", + "a es", + "at ories", + "proof ing", + "n lp", + "ts la", + "v ina", + "li do", + "dee phouse", + "informat ics", + "v v", + "pp ings", + "di ss", + "à ¯", + "uhur u", + "st ony", + "betra yed", + "b aff", + "my ra", + "as pen", + "allow ance", + "tam ara", + "ci f", + "cor bett", + "ser ge", + "di go", + "ambi gu", + "pain ters", + "p cr", + "p ca", + "nom s", + "lo ft", + "ve e", + "opend ata", + "ðŁIJ ±", + "alex andre", + "identi fies", + "fantasy football", + "re production", + "brom ley", + "ware agle", + "mm er", + "p ss", + "cu es", + "ay at", + "hut chinson", + "sar ac", + "jack man", + "ira h", + "ap ink", + "col s", + "aussi es", + "ex ecs", + "day ton", + "ðŁĻ Ĩ", + "im v", + "har am", + "chuck le", + "authent icity", + "ar do", + "incub ator", + "ภª", + "photo shopped", + "embrac ed", + "fight for", + "gor man", + "zz zz", + "schol astic", + "cri sps", + "te apo", + "mid night", + "ga ine", + "col lier", + "s ate", + "de tte", + "å Ń", + "imag ine", + "i ff", + "tw ili", + "i fication", + "teat ro", + "nor ma", + "es ur", + "emergen cies", + "rise up", + "r inger", + "hass le", + "cait lyn", + "tranqu il", + "vers a", + "se b", + "over look", + "gin i", + "bo go", + "se re", + "may ne", + "henri k", + "contamin ated", + "rhapso dy", + "pro portion", + "wildatlantic way", + "âģ© .", + "organis ers", + "tran e", + "stand ard", + "sper m", + "laun cher", + "ric ci", + "her ts", + "paper work", + "showcas ed", + "mer yl", + "pen a", + "p imp", + "disa strous", + "^. ^", + "phar a", + "x is", + "fron tal", + "sw irl", + "sp ills", + "swag ger", + "smart watch", + "sizz ling", + "savi our", + "cat ar", + "bb cr", + "refurbi shment", + "dr is", + "citro en", + "absor b", + "patrioti sm", + "il leg", + "chro mo", + "fresh ers", + "ru s", + "lim iting", + "ef ish", + "down ed", + "man dir", + "hazel nut", + "p all", + "mac on", + "disappear ing", + "quali fies", + "bo on", + "bar racks", + "am ine", + "gen dere", + "ðŁļ ĺ", + "j es", + "ãĥ Ń", + "qu ito", + "middle weight", + "sch au", + "quad ru", + "aci ones", + "limit less", + "ðŁijĮ ðŁı½", + "ch man", + "ar av", + "regulat ors", + "it up", + "batter sea", + "mil ford", + "g z", + "tic king", + "gh ou", + "cru shes", + "tu tu", + "dread ful", + "fam ine", + "for change", + "dalail ama", + "ðŁĴ į", + "whit aker", + "hash mi", + "h us", + "vo d", + "bet te", + "aa ah", + "iso o", + "ðŁ¥ Ī", + "ha ar", + "la ine", + "b v", + "all day", + "spr out", + "indie games", + "free bie", + "gree ks", + "but ler", + "ill in", + "ha al", + "ware ness", + "si ma", + "public health", + "gam a", + "wa a", + "oun g", + "goo oo", + "okin awa", + "off enders", + "im pose", + "ho c", + "young ster", + "story teller", + "sc ap", + "figh ter", + "+ ,", + "whit es", + "music monday", + "re za", + "go ducks", + "bri a", + "mi um", + "cas per", + "cru mbs", + "a ad", + "marti alarts", + "ch p", + "ri gged", + "tn g", + "harve sted", + "sa k", + "do jo", + "mill wall", + "b nw", + "oc d", + "histor yof", + "t mr", + "si rens", + "fan ci", + "caregi vers", + "vir a", + "son i", + "recur ring", + "acknowle dged", + "ðŁı Ł", + "oph ile", + "bu cky", + "stre ssing", + "roo k", + "di gger", + "vi val", + "san do", + "fle et", + "si ers", + "sel caday", + "refre shed", + "anti fa", + "a que", + "po lo", + "disappear ance", + "de mb", + "âĮļ ï¸ı", + "ren ted", + "ber ger", + "g mb", + "cu la", + "ss al", + "goo dy", + "u hh", + "marcel o", + "w anna", + "soft ware", + "shop small", + "turt le", + "tom as", + "fri sco", + "ðŁĺį ðŁĴķ", + "jim enez", + "c su", + "day z", + "an do", + "wyn ne", + "choreo grapher", + "cerv ical", + "trail blazers", + "ed g", + "zend aya", + "travel blog", + "el s", + "whole some", + "co g", + "lab out", + "ar ney", + "del le", + "su isse", + "ma si", + "ine se", + "om be", + "fi ddle", + "re claim", + "pa u", + "wat cher", + "sla in", + "ber ty", + "opti mum", + "el ites", + "min is", + "tur key", + "patro ls", + "ger ard", + "au reli", + "wild ly", + "wal tz", + "br gy", + "w ob", + "cre st", + "+ ++", + "ve z", + "fro sted", + "davi do", + "the x", + "param edics", + "p into", + "han k", + "du pont", + "ur g", + "fo stering", + "micro poetry", + "spec tre", + "---- >", + "ne uro", + "fri da", + "music al", + "galve ston", + "e ffic", + "sc ape", + "pal azzo", + "th all", + "pro visional", + "p js", + "au re", + "ðŁĶ ľ", + "mam amoo", + "kit ties", + "cre e", + "wa k", + "lo ool", + "lu pus", + "cn blue", + "à º", + "ðŁİ ¬", + "rac ed", + "tro se", + "om as", + "stri de", + "co ors", + "⤠µï¸ı", + "in comparable", + "cy ril", + "broad er", + "arec lipse", + "ðŁį Ķ", + "inter val", + "ti ru", + "co working", + "w aco", + "a ham", + "a bee", + "flouri sh", + "the times", + "ol ini", + "kick boxing", + "lu cer", + "at la", + "as un", + "casser ole", + "mi aw", + "lobb ying", + "jan ice", + "cir que", + "re flex", + "le ary", + "sanat omy", + "tem pest", + "se mb", + "mur dering", + "us av", + "ro bo", + "on et", + "p cc", + "nati ves", + "life of", + "sa ha", + "ruth less", + "rel ates", + "appeti zer", + "pye ongchang", + "nor d", + "er u", + "a thing", + "ug ly", + "pl ying", + "bran ce", + "organ ise", + "kend ra", + "dat o", + "chees es", + "par ma", + "burn out", + "a stra", + "pre toria", + "adjust ment", + "uk u", + "sl o", + "li ken", + "fav ors", + "cli ve", + "be ets", + "snow donia", + "go tv", + "sy n", + "open house", + "pan i", + "portra yed", + "sl ated", + "me cca", + "ren al", + "supportsmall streamers", + "staf fs", + "da o", + "bi ker", + "vik tor", + "tit us", + "admi red", + "ðŁĵ ±", + "hurric an", + "he ats", + "gl ory", + "photo genic", + "mer i", + "de por", + "burn ham", + "or angu", + "dj ing", + "impre ssionism", + "ign ition", + "ca i", + "w ynn", + "de pe", + "cove ted", + "colla gen", + "sau s", + "or nam", + "administr ators", + "ss on", + "nh politics", + "hahahaha hahahaha", + "aspir ations", + "r gb", + "swol len", + "so we", + "sc r", + "diver gent", + "hou ghton", + "han oi", + "d ory", + "ni ki", + "land ry", + "b cci", + "ðŁijĮ ðŁijĮ", + "is mail", + "tri pod", + "her d", + "bhat t", + "dress age", + "tab by", + "ingu ish", + "hur on", + "à³ į", + "à ł", + "to das", + "evangel ical", + "chor ds", + "st john", + "slo ppy", + "marty r", + "face book", + "ali ght", + "sen sei", + "kath niel", + "r ites", + "zi one", + "u o", + "revel ations", + "weight lifting", + "pan o", + "nc wx", + "ac ton", + "à® ķ", + "Ø ²", + "som a", + "ภĹ", + "respec ting", + "mar che", + "fore man", + "be tty", + "ki k", + "shi bu", + "po on", + "argy le", + "k swx", + "et z", + "mar bella", + "brac kets", + "stand by", + "fire side", + "defi ance", + "v ex", + "britanni a", + "in habit", + "appo int", + "piyu sh", + "le ash", + "sci ento", + "fla sk", + "sen na", + "> :", + "at roc", + "sand erson", + "id lib", + "dhan ush", + "ðŁĺ Ļ", + "en thr", + "hit ch", + "de dly", + "al ley", + "dor k", + "mon do", + "cudd ly", + "mis sin", + "ye sss", + "night ing", + "j pn", + "w ary", + "ump ire", + "ma z", + "ê ³", + "bab s", + "ĭ ãģ", + "stan ford", + "posse ssed", + "exce eded", + "ðŁĶ ¶", + "wall art", + "tra p", + "j il", + "hi bis", + "sp ying", + "scri be", + "khali l", + "trans lator", + "lu mb", + "di zed", + "ch c", + "super vision", + "shut ter", + "ja g", + "_ *", + "yester days", + "ms f", + "hi hi", + "gonz aga", + "gille spie", + "vive k", + "ec static", + "this morning", + "ch us", + "ed es", + "ston ed", + "be es", + "ðŁĩ¹ ðŁĩ", + "tur in", + "ho ver", + "at rics", + "ster n", + "sam heughan", + "auti sm", + "mi ya", + "eye witness", + "writ ings", + "travel tips", + "chut ney", + "px rtg", + "keny ans", + "my stic", + "k rit", + "/ $", + "red head", + "world ly", + "am us", + "op la", + "le ve", + "gab bana", + "se en", + "o clock", + "gang a", + "keen an", + "sc ent", + "ol dies", + "go green", + "corner stone", + "comp ly", + "con cours", + "ðŁİ¶ ðŁİ¶", + "ha an", + "con fis", + "aw son", + "cle op", + "î Ģ", + "su zu", + "sau té", + "al gar", + "subscri ber", + "este emed", + "ãĤ¤ ãĥ", + "worth while", + "mel rose", + "flo ck", + "bri ghtly", + "viol inist", + "p ere", + "sli pping", + "and co", + "si gh", + "ha van", + "cu lo", + "m sa", + "fibro sis", + "matil da", + "ra fting", + "aw ard", + "ë ª", + "mm mm", + "ge aux", + "ste iner", + "sin n", + "help ers", + "beet les", + "ai mee", + "tai wan", + "pistachi o", + "mac beth", + "m zan", + "descend ants", + "on sale", + "in r", + "il m", + "grou se", + "sa ig", + "mo w", + "bi gre", + "adjust ments", + "tu la", + "mathe w", + "transl ates", + "mu h", + "bol lah", + "ðŁĴĽ ðŁĴĻ", + "amo res", + "ab outs", + "bomb shell", + "bla ster", + "x avi", + "s ns", + "k roger", + "ga ther", + "erad ic", + "daf t", + "chem o", + "ben ches", + "ðŁĩ© ðŁĩ", + "ut v", + "our a", + "n ko", + "gator ade", + "biaf ra", + "ok state", + "im danielpadilla", + "dom ains", + "open ingday", + "kid do", + "do i", + "ric e", + "day care", + "mac millan", + "ba thurst", + "cheer leading", + "ðŁ¦ ģ", + "cash back", + "k won", + "hob bies", + "exem pl", + "ries ling", + "âļ ª", + "ag les", + "ny s", + "every thing", + "nav is", + "ad di", + "magne sium", + "faceli ft", + "ark ham", + "grand es", + "extre mist", + "don at", + "vit ality", + "pump kin", + "be tta", + "sl td", + "arti san", + "li by", + "pe aked", + "ah hhhh", + "mary am", + "assi m", + "un sc", + "ment e", + "al aya", + "low ers", + "ar as", + "gri ev", + "le ip", + "gr ati", + "cri ses", + "spr ints", + "exe cute", + "w to", + "ms d", + "mag ical", + "re viewer", + "spark les", + "juke box", + "ðŁĺĤ âĿ¤ï¸ı", + "pay back", + "licen ses", + "dun kin", + "bel t", + "lake wood", + "h ateful", + "bud gets", + "rev amped", + "ph erson", + "ky iv", + "went worth", + "ro sen", + "cru ise", + "gi ggle", + "def star", + "assassin scre", + "ym outh", + "win kle", + "w fc", + "band wagon", + "b kk", + "w iring", + "kear ney", + "south side", + "pe tit", + "! ðŁĺį", + "nor dic", + "mir za", + "mu gabe", + "v l", + "scon es", + "k tv", + "sand al", + "du c", + "m alls", + "ðŁĴŀ ðŁĴŀ", + "it c", + "al ay", + "im pair", + "un rest", + "flo ss", + "c é", + "ab ou", + "var ying", + "muse o", + "ser ver", + "di ya", + "hibis cus", + "ero y", + "mer ritt", + "fin dom", + "f pp", + "un usually", + "go tt", + "conting ent", + "ali aa", + "ball on", + "jo l", + "hi ked", + "zy me", + "ay r", + "ag n", + "ga z", + "perio dic", + "spar ty", + "practi sing", + "lin ton", + "tal is", + "cy pri", + "womanin biz", + "radio disney", + "ðŁĮ ¼", + "jump ers", + "endo cr", + "ðŁļ¨ ðŁļ¨", + "and on", + "shar apo", + "mi er", + "ma sonic", + "fac tories", + "vi en", + "bb ers", + "ìĽ IJ", + "hol d", + "ke bab", + "be ak", + "approach ed", + "ac milan", + "mun ro", + "ko sher", + "excell ency", + "negoti ation", + "walt disneyworld", + "cr ouch", + "te asing", + "suppre ssion", + "en ya", + "b ce", + "transformation tuesday", + "cal lie", + "vis was", + "p gat", + "ic ted", + "end ings", + "esc u", + "recru ited", + "it fc", + "collabor ations", + "g ino", + "snu ck", + "ausch witz", + "i fc", + "x ii", + "ke sha", + "ger vais", + "clo ak", + "x l", + "sa ad", + "prob ation", + "pre cau", + "mac in", + "anasta si", + "le k", + "e azy", + "daysof code", + "mariah carey", + "yo g", + "stit ched", + "boy friends", + "sh ar", + "ph ile", + "ag u", + "twin kle", + "phi shing", + "week ender", + "ic ton", + "gurmee tramrahim", + "al ton", + "l eness", + "all an", + "pen ultimate", + "kry stal", + "go u", + "lan de", + "dis mant", + "ab using", + "nor se", + "pat erson", + "ed mun", + "ap an", + "xi umin", + "sk el", + "cat walk", + "re act", + "wal led", + "t angle", + "br yn", + "ve to", + "super moon", + "cas ablanc", + "appreci ates", + "ski d", + "bo th", + "catal ina", + "ele ague", + "cyber monday", + "cau tious", + "ðŁ¤ ĵ", + "nov o", + "hamp ton", + "ha ye", + "jose f", + "var an", + "lo bos", + "roano ke", + "orph ans", + "tt in", + "squ ads", + "ishqba aaz", + "black panther", + "e tu", + "k sh", + "cru mble", + "cess na", + "reli eved", + "scul ly", + "pollin ators", + "explore canada", + "ki es", + "kam loops", + "kir an", + "pri mal", + "sett lements", + "hot spot", + "brain storming", + "ce dric", + "bi ennial", + "sh ant", + "âĻ¡âĻ¡ âĻ¡", + "do on", + "hear n", + "walk way", + "fe m", + "ve al", + "deport ation", + "tox ins", + "elimin ating", + "descen ding", + "by the", + "bla sphe", + "ha sta", + "comple ment", + "as cent", + "ri ga", + "provo st", + "âĸ ª", + "wee ping", + "anti semitism", + "employe e", + "unearth ed", + "pin o", + "natali e", + "bla d", + "ang ola", + "lock heed", + "in ian", + "ag r", + "ni ster", + "im pala", + "m ke", + "fan atic", + "âĺħ âĺħ", + "ðŁij ¸", + "lu ch", + "simpli fied", + "gall ery", + "econom ic", + "cy borg", + "con i", + "sel ma", + "in ception", + "ko ala", + "dv ds", + "cre sted", + "m mor", + "visi ble", + "n sd", + "ðŁĻĮ ðŁı½", + "w under", + "refriger ator", + "re opening", + "e era", + "carou sel", + "as p", + "balli stic", + "victor y", + "mo tive", + "tre y", + "sharapo va", + "si i", + "mon ter", + "int end", + "west chester", + "sp e", + "cy mb", + "vi dal", + "ll ama", + "uni v", + "fin er", + "crafts manship", + "jazz fest", + "b ch", + "ag gio", + "n cc", + "lamb da", + "tranqu ility", + "cis co", + "ba den", + "so bbing", + "of i", + "go ta", + "ru mored", + "war med", + "ore an", + "ac ton", + "mar ci", + "gh ani", + "âľ ĵ", + "as sorted", + "pembro ke", + "pen elope", + "da f", + "at ty", + "aim o", + "pretz el", + "carni val", + "than os", + "ko chi", + "mer sal", + "ham radio", + "ar twit", + "cas c", + "guer rilla", + "kush ner", + "k app", + "al ise", + "todd lers", + "steward ship", + "o tti", + "ter ri", + "tem pe", + "rest less", + "vit o", + "zay ed", + "rsp b", + "pi on", + "hi ppo", + "haw thorne", + "in as", + "am ily", + "nut cracker", + "lo p", + "d ali", + "tro pic", + "ðŁ¤ ł", + "ul o", + "jare dle", + "py rene", + "pale o", + "usa ir", + "m ould", + "it ated", + "gene tically", + "biom ass", + "ðŁĩ³ðŁĩ ±", + "do dd", + "practic ed", + "monarch s", + "un manned", + "m buhari", + "am al", + "photo gra", + "ko ol", + "bren don", + "ju ices", + "cu re", + "world bank", + "poin ters", + "ðŁĴ Ŀ", + "tur f", + "le ds", + "bor ussia", + "bapti sm", + "warwick shire", + "moun ts", + "gay o", + "be gg", + "co pied", + "asi ans", + "k g", + "moder nist", + "gi d", + "front man", + "concentr ated", + "y t", + "sc avenger", + "iron ically", + "adi c", + "ps n", + "ðŁ¥ ī", + "cultur ally", + "yu v", + "mac arthur", + "fertili zer", + "be withyou", + "ri gor", + "min ors", + "z oning", + "âĸ ł", + "ri r", + "adole scent", + "vin ny", + "ren g", + "sand stone", + "gu et", + "we sth", + "ple dged", + "lac ed", + "sp ide", + "v ai", + "ty coon", + "seiz ure", + "du p", + "appalach ian", + "ro k", + "cathol ics", + "sey chel", + "posse ss", + "la ger", + "jo di", + "cham p", + "stra s", + "d ina", + "cent uri", + "cal der", + "blur ay", + "ðŁĩ¨ðŁĩ ³", + "mo do", + "an nette", + "youtu bers", + "chap s", + "ang ling", + "label ing", + "a qui", + "pk wy", + "ly le", + "bi sexual", + "lit ur", + "dug out", + "li bby", + "grey sanatomy", + "sub stances", + "august us", + "rall ying", + "fi del", + "ing ue", + "äº º", + "hallmark channel", + "tooth brush", + "m á", + "adi rond", + "ag gi", + "ðŁĵį :", + "cru sade", + "tax ation", + "k z", + "i ver", + "dou bling", + "room ie", + "wa b", + "en rolled", + "az on", + "a ju", + "grand children", + "as df", + "ðŁ¥ º", + "mat ic", + "ough ton", + "utili ze", + "ðŁĴ £", + "pon der", + "rais in", + "dys function", + "co bain", + "butter nut", + "e man", + "su red", + "dri an", + "and friends", + "with the", + "on omy", + "heine ken", + "bri dal", + "leader ship", + "pyram ids", + "deutsch land", + "jo cel", + "bo wel", + "y qr", + "horse power", + "be acon", + "ing eni", + "gra dient", + "fer mented", + "mo om", + "thing y", + "pot assi", + "wrist band", + "bor d", + "bo died", + "ðŁĺŃ ðŁĺį", + "ma pp", + "ka u", + "cyber punk", + "ph ish", + "loo king", + "co ates", + "ap ur", + "am ie", + "uk labour", + "at in", + "g la", + "adop table", + "shel by", + "v illi", + "ri ya", + "m ingly", + "cli mber", + "bumble bee", + "ðŁĺ ¸", + "c sd", + "âĿ ¥", + "hospit alized", + "c ki", + "hat er", + "ch r", + "re tina", + "it a", + "fan base", + "beat rice", + "gwy ne", + "go ss", + "fo s", + "favor ited", + "swachhb harat", + "mal ade", + "mon mouth", + "\" [", + "si van", + "sh hh", + "command ing", + "sains burys", + "wee d", + "g man", + "ss w", + "rep tile", + "iv y", + "tro pics", + "roll ers", + "over cast", + "ex position", + "masquer ade", + "man crush", + "wa ist", + "spr inter", + "sle et", + "le vin", + "j pg", + "_ (", + "o pel", + "explo it", + "ap a", + "po we", + "wrec king", + "jong in", + "or b", + "er ick", + "bo sco", + "pra ising", + "ber tr", + "to wing", + "in security", + "ku t", + "resto cked", + "rr p", + "prescri bed", + "trafal gar", + "per t", + "g ases", + "app rais", + "g har", + "music als", + "âĸ¬ âĸ¬", + "mc fad", + "ag ony", + "conditi on", + "equi p", + "shi k", + "atra vel", + "ðŁĩ¿ ðŁĩ¦", + "ke h", + "abduc tion", + "pe oria", + "wil kins", + "g ms", + "as d", + "ev i", + "ðŁĴĹ ðŁĴĹðŁĴĹ", + "u z", + "mo c", + "halle lujah", + "guad alu", + "lou vre", + "dra wing", + "go ve", + "ph ant", + "fri e", + "web dev", + "program mer", + "z able", + "games com", + "clari fy", + "li th", + "kin ky", + "âĿ £", + "labour doorstep", + "son ata", + "ju ris", + "mai den", + "vi adu", + "buch arest", + "conditi oned", + "capit alist", + "u de", + "ps b", + "sp ca", + "lul la", + "footh ills", + "kay o", + "bon d", + "wom b", + "roun der", + "ce sar", + "bur sts", + "ap ra", + "sw oon", + "sab rin", + "fra grant", + "cle arer", + "ku brick", + "cli max", + "jour no", + "ag le", + "ðŁı½ âĢįâĻĢï¸ı", + "poo ch", + "hal e", + "sol it", + "sal mon", + "organis ms", + "bron son", + "art en", + "hodg son", + "alo ve", + "vent ure", + "bb i", + "ae a", + "ðŁIJ ¢", + "ld n", + "d nr", + "o zone", + "el las", + "man ny", + "azz ur", + "un beat", + "tru ffles", + "th ong", + "ma ñ", + "las ers", + "ley e", + "gettys burg", + "back packs", + "or is", + "ma ison", + "craw ling", + "la bra", + "cl ing", + "dra gging", + "ste al", + "dou bt", + "de van", + "ck ers", + "agent sof", + "photo bomb", + "elon musk", + "abo y", + "dist ances", + "story line", + "sp i", + "nor than", + "europe ans", + "wh ale", + "ser pent", + "ðŁļ ²", + "fi or", + "tr it", + "ox o", + "awar ding", + "class mate", + "su fc", + "smar test", + "rich es", + "pr k", + "big foot", + "ar mb", + "bi polar", + "dw elling", + "om ars", + "k wan", + "gri me", + "m eng", + "freder ick", + "navar ro", + "sorry notsorry", + "jaredle to", + "pa ve", + "sl ack", + "barn sley", + "att ar", + "evic tion", + "accumul ation", + "o ir", + "cat chy", + "wel ter", + "vik as", + "has see", + "nik ita", + "mo yes", + "mathe ws", + "shi v", + "gat wick", + "pro filing", + "compan ions", + "mar rake", + "an tics", + "ðŁĻĮðŁĻĮ ðŁĻĮ", + "se se", + "bo i", + "bart lett", + "poison ous", + "ab uses", + "ym m", + "kam pala", + "guggen heim", + "imv kohli", + "dol om", + "bre e", + "thro ttle", + "gare th", + "fitz patrick", + "un ya", + "par ad", + "mar got", + "j nr", + "we a", + "potassi um", + "p nc", + "disgu ised", + "cra sh", + "ren ergy", + "ill ic", + "coup led", + "ni els", + "ci ones", + "æĹ ¥", + "im ent", + "despic able", + "d ye", + "what cha", + "conne ctions", + "paralym pics", + "gaunt let", + "wait rose", + "suici dal", + "star ship", + "vap or", + "st ou", + "law maker", + "coo led", + "si mo", + "then o", + "offro ad", + "ja den", + "bas que", + "vick y", + "lu kaku", + "centr o", + "tri sh", + "strate gist", + "medic ations", + "hor st", + "b fc", + "gra il", + "sharp ly", + "ad itya", + "tom b", + "kau fman", + "tri pad", + "sam ba", + "pastor al", + "brit ney", + "sag an", + "hill side", + "mas ons", + "sar a", + "z one", + "x u", + "to tes", + "rob bie", + "app en", + "mon tag", + "der o", + "short film", + "charis matic", + "tat ors", + "ki ba", + "and ri", + "al arming", + "split ting", + "ic ar", + "th ug", + "scari est", + "sylve ster", + "an an", + "u trecht", + "a difference", + "me ade", + "bu ster", + "air strikes", + "cu ffs", + "account ants", + "ðŁĺ¡ ðŁĺ¡", + "new t", + "bo tt", + "issu ing", + "cl ancy", + "wwen etwork", + "kyu hyun", + "rese mble", + "pajam as", + "sin k", + "kin ney", + "sul ph", + "or k", + "li es", + "la gh", + "or ton", + "ra hul", + "d sc", + "we will", + "re am", + "collo qui", + "shar ia", + "hec tic", + "sar casm", + "land er", + "tm z", + "endor f", + "ro z", + "ham mered", + "fri s", + "w adi", + "pope francis", + "he it", + "flash light", + "un born", + "op es", + "hol iness", + "ðŁIJ ¦", + "nach t", + "im sa", + "gr acing", + "bj p", + "ver ts", + "c sc", + "home owner", + "a que", + "bigo try", + "anni e", + "bag h", + "âĿ¤ï¸ı ðŁĺį", + "car i", + "thom p", + "dispo sable", + "cardio logy", + "pat ented", + "hh hhhh", + "ld r", + "stephen son", + "cro res", + "fan ning", + "cli mat", + "ðŁijį ðŁijįðŁijį", + "ðŁijį ðŁı¼", + "aer on", + "piccad illy", + "bank rupt", + "sil via", + "emplo y", + "don ny", + "commen ting", + "screen writer", + "io ta", + "ce an", + "anc ers", + "tu an", + "street wear", + "ठ¯", + "sk ine", + "esp a", + "asi f", + "os ce", + "she ppard", + "more cam", + "bott le", + "der s", + "orac le", + "google play", + "aver aged", + "edmon ton", + "steph an", + "sister hood", + "cru sted", + "stag gering", + "methodo logy", + "congress woman", + "c abo", + "tri ggers", + "mil ky", + "gli de", + "tooth paste", + "room mates", + "nu ff", + "gu am", + "sprink les", + "alternati ve", + "wat fordfc", + "uof t", + "hal ey", + "cont acted", + "bun dy", + "pro stitu", + "gh ar", + "pre ston", + "on site", + "hil ar", + "g ts", + "c att", + "hamp stead", + "? ?!", + "ðŁĩ§ ðŁĩ", + "bbc qt", + "aless andro", + "resi st", + "ma idan", + "t ko", + "shad ing", + "pin up", + "gal lo", + "sin u", + "at ec", + "fun k", + "ac lu", + "stri des", + "rhy me", + "wet land", + "bbc springwatch", + "t ins", + "wild card", + "st our", + "flamen co", + "pau la", + "onto logy", + "gang sta", + "am ade", + "ãĤ «", + "t bs", + "skelet al", + "run ner", + "jard in", + "harri er", + "hun ted", + "z hen", + "believein film", + "de mean", + "au diti", + "re start", + "chon dri", + "âĿ¤ï¸ı ðŁĴĻ", + "mcla ren", + "ga b", + "sh um", + "au sa", + "lewi sham", + "y pg", + "k jv", + "fur nished", + "dor o", + "bon ded", + "mor ty", + "lat itude", + "_ )", + "lo va", + "water ways", + "vin ai", + "shor th", + "drun k", + "c ay", + "ay ana", + "kap lan", + "capp uccino", + "spr o", + "life boat", + "has bro", + "spol ice", + "tor on", + "do ing", + "dam n", + "sh ree", + "foun tains", + "ent ation", + "mar u", + "boar der", + "to pless", + "j ada", + "chan ning", + "ul ls", + "en closure", + "gib son", + "fractu red", + "brit ton", + "à ¶", + "t ous", + "por th", + "dra f", + "tra iling", + "mar gate", + "eli fe", + "down ward", + "lin n", + "gla des", + "girl power", + "ak rish", + "u ki", + "ron da", + "ts c", + "appreci ationday", + "vis ing", + "lo om", + "ðŁį ³", + "mex ican", + "ar gos", + "y ya", + "jad ine", + "south port", + "d end", + "si sta", + "rede em", + "men g", + "bra xton", + "antioxid ant", + "s key", + "mp g", + "fin ding", + "vibr ation", + "ce u", + "kh art", + "di mini", + "cl ine", + "shel ly", + "hin es", + "ī ï¸ı", + "to pical", + "no ver", + "ma xx", + "prim itive", + "illustr ate", + "b ounds", + "tren ton", + "join tly", + "breed ers", + "u chi", + "wakeup america", + "b ada", + "ðŁĹ £ï¸ı", + "gu acam", + "sp heres", + "pere gr", + "youth ful", + "lo lo", + "bir min", + "t ly", + "jeremy corbyn", + "defe cts", + "co sm", + "a rent", + "v aa", + "bag els", + "medi ac", + "cori ander", + "ic ago", + "g haz", + "ab bas", + "re model", + "struc turing", + "pu m", + "out law", + "ad ani", + "r bc", + "gul ls", + "n li", + "confu se", + "ðŁijĩ ðŁı¼", + "vil a", + "mcnam ara", + "correc tions", + "mug hal", + "ser i", + "re gain", + "ss b", + "lea ve", + "haha hah", + "gran de", + "di stressed", + "re chargeable", + "ho a", + "hou sed", + "sti l", + "attribu ted", + "opath ic", + "di ps", + "pri t", + "head phone", + "conclu de", + "pil o", + "he t", + "ut sa", + "nit in", + "je m", + "sni ppet", + "tutor ing", + "op er", + "sun k", + "en sla", + "cha u", + "ac orn", + "quinte ss", + "ran kin", + "affili ated", + "our lives", + "cl int", + "se ater", + "isa ac", + "ba shing", + "sme ar", + "nur se", + "doo dling", + "\" ;", + "sa ku", + "atroc ities", + "im am", + "g fs", + "viol ating", + "comm end", + "brad shaw", + "er ville", + "b illed", + "b be", + "thul hu", + "i phones", + "moo se", + "di os", + "re w", + "me thane", + "strang ely", + "whis ky", + "ti ghtly", + "spiel berg", + "radi us", + "notic ing", + "wi f", + "ig nati", + "i fa", + "ap is", + "w ali", + "ha itian", + "bu shes", + "y z", + "v l", + "ex ited", + "asse l", + "tru ec", + "dom en", + "ash er", + "in king", + "newyear seve", + "hend ricks", + "bat i", + "ìĿ´ ì", + "rich ter", + "mon santo", + "con line", + "agre at", + "ðŁ¤ ¯", + "master pieces", + "ar n", + "rough s", + "cle ve", + "se v", + "fashi ons", + "to ya", + "sh ail", + "cop eland", + "aqu ari", + "dec als", + "are you", + "y aya", + "a str", + "fon t", + "ml m", + "ar ca", + "pp or", + "pol lock", + "xper ia", + "conserv ation", + "chain saw", + "ag gie", + "?! ?!?", + "si le", + "sh on", + "ìĹ IJ", + "note books", + "marque tte", + "de us", + "bb led", + "spic er", + "mc cabe", + "nor wich", + "modi fication", + "boo sted", + "stru m", + "sales man", + "bang le", + "nis san", + "hez bollah", + "brea sts", + "a af", + "anth us", + "sk er", + "ow ed", + "her os", + "gi fs", + "fo sters", + "eat ers", + "du es", + "_ /", + "lymph oma", + "sf am", + "me gal", + "afri di", + "ag ic", + "p amp", + "jeal ousy", + "ðŁijĮ ðŁı¼", + "calcul ate", + "napp ing", + "g ale", + "ðŁ¦ Ħ", + "lub bock", + "assu med", + "ren ting", + "íĥ ľ", + "subur b", + "ãĤ ·", + "tech nic", + "u cla", + "in front", + "gar net", + "ster oids", + "stri ving", + "ho war", + "mo ver", + "le ton", + "bull do", + "is in", + "ci ao", + "sn z", + "fore front", + "d ams", + "mid wife", + "ma wards", + "cla pton", + "we in", + "subsi dies", + "spr oud", + "rother ham", + "phan tom", + "ar ach", + "spi el", + "rac ket", + "sel amat", + "no on", + "l bc", + "enti ally", + "ðŁĴ ¸", + "sil ve", + "m oud", + "kine tic", + "y asi", + "ðŁİ ©", + "o ol", + "mi ku", + "i za", + "fer a", + "flo ren", + "barber shop", + "groo t", + "z est", + "ne ars", + "stan is", + "z and", + "police man", + "juris dic", + "form ations", + "appar atus", + "sp d", + "arti fact", + "to sc", + "motiv ating", + "womanc rush", + "re dro", + "diagno stics", + "ra za", + "out fitters", + "el xn", + "dod gy", + "ry n", + "sh d", + "ortho don", + "ol de", + "jay anti", + "bal ances", + "quic kest", + "can ton", + "friday reads", + "! *", + "na a", + "a ak", + "ðŁĶ ·", + "behavi ors", + "rasp berries", + "ä »", + "polit ical", + "cam il", + "å ľ", + "di k", + "ast ounding", + "lie be", + "novel ty", + "tur moil", + "sul ly", + "spring break", + "hon ouring", + "cc g", + "ðŁı Ĵ", + "my little", + "ky c", + "pro ms", + "ðŁķ Ĭ", + "à ¨", + "bi ge", + "av ril", + "ðŁĩµðŁĩ °", + "mari on", + "as ants", + "sur ya", + "oc tag", + "luf than", + "ac ron", + "fayette ville", + "ti que", + "love s", + "en ca", + "de kalb", + "ta ver", + "de vote", + "aux iliary", + "joh annes", + "tread mill", + "ay an", + "qu r", + "donald son", + "cher yl", + "\" ....", + "s ven", + "kir sty", + "gun ners", + "ra dish", + "o ahu", + "v sky", + "i ble", + "con course", + "b ps", + "elo qu", + "ash ford", + "te bow", + "roblo x", + "ma da", + "dri ving", + "th day", + "spro ject", + "m ms", + "band ed", + ". !!", + "libr arians", + "flan nel", + "intoler ance", + "her al", + "ç µ", + "neme sis", + "list a", + "tar ak", + "cry pt", + "star plus", + "vish nu", + "sc ale", + "cr is", + "% ),", + "j illian", + "regg ae", + "pegas us", + "ol in", + "ip ment", + "man ic", + "l fc", + "godd ard", + "ite am", + "parl our", + "anch ors", + "lee minho", + "talla hassee", + "ant it", + "d ho", + "kid ney", + "y ash", + "batt led", + "az ad", + "gar is", + "faul kner", + "sni ff", + "papar azzi", + "ed m", + "phy llis", + "con tested", + "aa ay", + "se ca", + "k ton", + "vel ve", + "rain ier", + "for um", + "tam pab", + "ho sp", + "trac tors", + "ox fordshire", + "no tion", + "guang zhou", + "ðŁĺ ¯", + "ref ill", + "wednesday motivation", + "sli der", + "mukher jee", + "pr att", + "fon taine", + "alph on", + "af ar", + "ts i", + "pest icides", + "fi ends", + "mo cking", + "bra w", + "tran sat", + "do ses", + "co res", + "hom ophobia", + "docu menting", + "zlat an", + "con doms", + "s é", + "sun set", + "kun st", + "ton ga", + "ภª", + "v ation", + "sp ray", + "chow der", + "ra ps", + "palla dium", + "nor wood", + "music history", + "hoo ker", + "si si", + "osp rey", + "ph ys", + "conce ded", + "bob cat", + "ar mad", + "ze it", + "Ù Ħ", + "ðŁĺģ ðŁĺģ", + "mer idi", + "ðŁĩ· ðŁĩº", + "corn wall", + "! ),", + "touch downs", + "ze it", + "chal et", + "mm m", + "al che", + "gor illa", + "fo ss", + "ati ku", + "lumin ous", + "ivan ka", + "be ek", + "sta res", + "sw iss", + "âĿ¤âĿ¤ âĿ¤âĿ¤", + "scru bs", + "me ath", + "gusta v", + "jo gging", + "confe tti", + "as os", + "ers fc", + "breit bart", + "applic able", + "autho red", + "ya ho", + "h in", + "displac ement", + "j v", + "ðŁĮ¹ ðŁĮ¹", + "ot c", + "non profits", + "diec ast", + "gu sto", + "inte stin", + "c ages", + "me en", + "lu kas", + "moon ey", + "ðŁĺ ·", + "very day", + "tor ah", + "is sion", + "wa c", + "lever aging", + "ish able", + "cu se", + "le wood", + "may an", + "turn table", + "ju ice", + "tru sty", + "tu p", + "eti quette", + "supervis ors", + "stu n", + "gu zman", + "confe ren", + "ric o", + "fe ast", + "back ward", + "pol aris", + "mic he", + "jo g", + "h ing", + "field house", + "vel ing", + "sho cker", + "esc ence", + "ठ¾", + "vi be", + "anasta sia", + "mar ched", + "kill ing", + "Ķ ë", + "fe tt", + "exop lan", + "... (", + "snow day", + "lo h", + "ir ani", + "la khs", + "del a", + "po caly", + "boom ers", + "dictat orship", + "ac er", + "tur keys", + "quarter final", + "muskete ers", + "ðŁĴĽ ðŁĴļ", + "sf x", + "museum week", + "sc ala", + "ri sis", + "( ðŁĵ·", + "ãĢ Ĥ", + "z ies", + "bo eh", + "hu es", + "lu sci", + "dol a", + "impeach trump", + "roo d", + "don caster", + "tor re", + "hero es", + "fo yer", + "tar i", + "blur red", + "ke w", + "frank ly", + "dro id", + "ap al", + "Ð ¼", + "y af", + "bre t", + "par agu", + "cac ao", + "ðŁĻĮ ðŁı¾", + "ru e", + "head aches", + "shaw ty", + "char ley", + "pal er", + "go wns", + "correc tional", + "ðŁĺ© ðŁĺ©", + "breaking bad", + "ol ing", + "da p", + "endeav our", + "cit adel", + "tra d", + "incumb ent", + "medit ate", + "foo ted", + "ðŁĴ µ", + "shab bat", + "dayof the", + "wil lem", + "gal way", + "to red", + "marri age", + "f illion", + "sleeve less", + "aud itor", + "jin young", + "invin cible", + "kad una", + "a and", + "volcan oes", + "mon eti", + "indie gogo", + "buccane ers", + "ðŁijī ðŁı½", + "ãĢ Ĥ", + "lay ton", + "cuck oo", + "hu mber", + "buzz er", + "Ï ī", + "to re", + "stra ins", + "sto m", + "pa ine", + "s we", + "du ff", + "z ou", + "si mi", + "li pp", + "ur n", + "se agu", + "ðŁĶ ®", + "sun dae", + "hi c", + "ðŁĺ ¨", + "bull pen", + "u per", + "flyo ver", + "al dridge", + "glo bes", + "ali es", + "ken zie", + "ge es", + "y cle", + "sp lin", + "mag enta", + "j ha", + "bal u", + "gh orn", + "ti pper", + "wick er", + "taste of", + "con clave", + "ch ale", + "inv asi", + "cat er", + "dio xide", + "me gab", + "win n", + "at p", + "transform ative", + "nest led", + "hi g", + "bri dging", + "lil ies", + "chee red", + "bad dest", + "sc rolls", + "real is", + "dipl o", + "ðŁĶ «", + "conce ssion", + "prefe rences", + "explo des", + "er gon", + "introduc tory", + "ine au", + "ch af", + "som es", + "land rover", + "spir ation", + "sex y", + "sco recard", + "illustr ates", + "soul mate", + "wi en", + "inter disciplinary", + "fore casting", + "ent ities", + "glu ed", + "en lar", + "cur t", + "percep tions", + "boot leg", + "mi re", + "asho k", + "v az", + "hor ne", + "cal le", + "ac ulture", + "ther oy", + "night time", + "oc al", + "character design", + "ar mist", + "ðŁĺı ðŁĺı", + "yah oo", + "ac eae", + "to se", + "even to", + "sou t", + "nay anth", + "wh om", + "v are", + "ri gging", + "gen us", + "hi ve", + "com mands", + "sti e", + "day a", + "ethan ol", + "en f", + "hi fi", + "flu ence", + "cle mson", + "re invent", + "thermom eter", + "humor ous", + "emer ging", + "aci ón", + "ðŁĺĺ ðŁĺį", + "s ity", + "haw ke", + "accompan ying", + "t ility", + "ðŁĺ ª", + "re cess", + "protag onist", + "l ery", + "dun dal", + "int l", + "britt any", + "q bs", + "off the", + "marri ages", + "how to", + "viol ated", + "adel aide", + "wit t", + "lanc er", + "pak v", + "hu me", + "st ade", + "bra gging", + "ou tright", + "ad c", + "super st", + "real time", + "cu res", + "garden ers", + "ero ck", + "dale jr", + "ver o", + "bar tol", + "mo ti", + "mc fly", + "v pn", + "st ink", + "over rated", + "guer ra", + "e tis", + "ath ome", + "twd family", + "th ab", + "tn x", + "rafa el", + "family travel", + "x ley", + "sat anic", + "equ ations", + "ru dy", + "wal dorf", + "stan i", + "tu be", + "meas les", + "zimmer man", + "obli gations", + "i ously", + "bow ser", + "trans former", + "sho ppe", + "shak en", + "gh ouse", + "to d", + "ke tball", + "share holder", + "mar ca", + "kp mg", + "ak an", + "given chy", + "coast al", + "au th", + "roller coaster", + "mar ches", + "coordin ate", + "cine ma", + "apprentic es", + "par lor", + "mit o", + "men on", + "consider able", + "bar re", + "glo ss", + "enh ances", + "jaz eera", + "fal mouth", + "thra sh", + "stat en", + "k zn", + "eng el", + "samanth ap", + "flo ppy", + "sal om", + "ðŁıĨ ðŁıĨ", + "w ack", + "deliber ate", + "osc ill", + "herit ag", + "du sted", + "orni thology", + "pad dle", + "fer ns", + "bar un", + "cl ans", + "anticip ate", + "a ay", + "mat ically", + "é ĩ", + "tu mble", + "post man", + "unic ef", + "tro tter", + "op d", + "leaf let", + "ge ist", + "cease fire", + "scre ws", + "cre ation", + "wal nuts", + "longh orns", + "under statement", + "ab b", + "proxim ity", + "na x", + "un ity", + "turn pike", + "orda ined", + "dub step", + "chak ra", + "me ch", + "love her", + "look alike", + "donne in", + "vir on", + "Ù Ī", + "bang ers", + "vari ants", + "out dated", + "in ta", + "cri sto", + "sp elt", + "food and", + "f on", + "stefan i", + "margin al", + "hu tton", + "ti ara", + "tel ford", + "qu en", + "fair grounds", + "que tta", + "mikha il", + "heal er", + "v ball", + "ty re", + "under grad", + "gl end", + "hom ers", + "scri bed", + "main tains", + "po che", + "mis sal", + "mar ko", + "u as", + "á n", + "sh p", + "con vey", + "pad re", + "sab a", + "pu glia", + "madhu ri", + "pa xton", + "chap lain", + "n ago", + "ca si", + "... !!!", + "fli rt", + "sal eh", + "k are", + "di re", + "stam ped", + "extre me", + "ðŁĺĥ ðŁĺĥ", + "ho ppy", + "guadalu pe", + "advant aged", + "eu char", + "p low", + "un n", + "mac qu", + "port land", + "cla sh", + "pe s", + "lou bout", + "y p", + "keep ing", + "arca dia", + "fran kie", + "fi u", + "de th", + "encyclo pedia", + "si ze", + "inve sts", + "ðŁį ©", + "geo logical", + "fran ç", + "con front", + "ðŁĺ ¥", + "d ys", + "af m", + "tex an", + "graph ene", + "repost app", + "ac f", + "ur sula", + "gaz a", + "dd led", + "fu m", + "wsb tv", + "m be", + "fron tiers", + "chrono graph", + "ke s", + "inter faith", + "tab oo", + "spar ta", + "won do", + "flori st", + "em braces", + "ca w", + "no el", + "arch ers", + "ðŁIJ ·", + "roman o", + "ban an", + "sh akers", + "melo dies", + "geo thermal", + "se phora", + "ìļ °", + "оР´", + "pro c", + "hand shake", + "pan de", + "popul ated", + "slow down", + "hor tons", + "registr ations", + "un deni", + "lan ts", + "pas sover", + "thak ur", + "li ef", + "adhe sive", + "pe tal", + "micro scopy", + "memph is", + "confir ming", + "air drop", + "mesm er", + "perce ived", + "ming le", + "lifel ine", + "gh j", + "worcester shire", + "pas sions", + "ach er", + "el lar", + "ah o", + "firen ze", + "bar ang", + "letter man", + "hat field", + "lu cha", + "je ter", + "e shop", + "william s", + "horo scope", + "pre de", + "east bourne", + "dur ga", + "di version", + "al trin", + "seis mic", + "premi osm", + "nar co", + "ti r", + "ori g", + "or m", + "land fall", + "ci ous", + "lin do", + "max ine", + "x ico", + "tra y", + "os wald", + "c ba", + "ric otta", + "n cr", + "mar au", + "ภ²", + "gladi ator", + "ch ery", + "lun g", + "u me", + "po psic", + "lon ging", + "can als", + "ta ya", + "decentr alized", + "sho pp", + "pres sures", + "mahar aj", + "eti had", + "wal greens", + "succe ssion", + "sign aling", + "li g", + "staf fer", + "north korea", + "def ying", + "as ma", + "de g", + "peri meter", + "oak ville", + "m sk", + "balti more", + "rece ip", + "de ple", + "ðŁĺŃ ðŁĺĤ", + "jambo ree", + "> .<", + "rsp b", + "puni sher", + "consider ably", + "in tothe", + "pari sian", + "acceler ated", + "polye ster", + "low es", + "fr ying", + "sauté ed", + "mou ths", + "seychel les", + "ra x", + "go dis", + "dak ota", + "house wives", + "the me", + "mat inee", + "black bird", + "ye sung", + "pre fers", + "pelle gr", + "in ated", + "trun ks", + "stronger together", + "re pet", + "re pairing", + "ped als", + "toler ant", + "her r", + "dun ne", + "indic ation", + "decat ur", + "b tv", + "exhibit ors", + "ik on", + "friday motivation", + "bra gg", + "live tweet", + "al ves", + "womens art", + "foreig ners", + "wal lets", + "min dy", + "lan ey", + "bb in", + "tv miaw", + "lif ter", + "tar get", + "tam e", + "dr ou", + "astro photography", + "mp c", + "g pu", + "nord strom", + "fric tion", + "run off", + "lov able", + "sp nfamily", + "ext ingui", + "bloo dy", + "sch el", + "arti stry", + "sw ish", + "scar ce", + "ph ils", + "max im", + "pos sum", + "com promised", + "sty li", + "sc fc", + "is sa", + "birmin gham", + "sket ched", + "angel ica", + "ordin ance", + "je ts", + "conqu er", + "ðŁĺ IJ", + "online shopping", + "s ori", + "reason ably", + "nue stro", + "ar turo", + "ch l", + "benef ici", + "spho to", + "wel t", + "ni kk", + "ðŁ¤ ŀ", + "dan ao", + "for mid", + "as se", + "af irst", + "âľ Ĥ", + "gil lette", + "as sor", + "an onym", + "sel ca", + "fe mi", + "bear able", + "y and", + "ar mory", + "cre pe", + "celtic fc", + "bra vo", + "in expensive", + "de lec", + "ge cko", + "new market", + "snow flakes", + "kab ir", + "con tra", + "can ning", + "mor pho", + "gar wal", + "ðŁĴĥ ðŁı»", + "fight ing", + "mu tation", + "woo dy", + "ju gg", + "gr aces", + "premiosm tvmiaw", + "kenne dy", + "gu p", + "sa e", + "op ha", + "off spring", + "fini sher", + "bet ts", + "span ning", + "mar j", + "h one", + "sh ing", + "contin ents", + "samanthap rabhu", + "un related", + "l acy", + "explo sions", + "benjam in", + "sophi e", + "no ting", + "micro soft", + "as sen", + "a hoy", + "i ker", + "ho fer", + "mo e", + "ah madi", + "yan n", + "an ak", + "ma hi", + "be u", + "aha h", + "creep er", + "baahu bali", + "am at", + "pri ory", + "haw keye", + "deloit te", + "sko da", + "print making", + "assemb ling", + "mirac ulous", + "no ch", + "sw o", + "leg a", + "oper ates", + "border lands", + "eli e", + "stron gh", + "rep tiles", + "pir ate", + "un fold", + " ¯", + "qual comm", + "un predictable", + "ot r", + "rose wood", + "direc tional", + "counsel ors", + "corn ell", + "liber ated", + "j ad", + "ir regular", + "bulgar ian", + "high ness", + "vodaf one", + "sw ild", + "mini mize", + "gra zie", + "๠ĩ", + "r stats", + "stre ep", + "ome tric", + "humb le", + "lu mp", + "l ille", + "b ü", + "home depot", + "tripad visor", + "ki wan", + "a via", + "er z", + "ex ico", + "du f", + "blu men", + "mi zing", + "ar ma", + "in im", + "con stan", + "sor a", + "ju al", + "au n", + "tw ell", + "tren ches", + "her a", + "r k", + "po plar", + "recipe oftheday", + "ll an", + "bhu ban", + "short ages", + "ing don", + "bridge water", + "ðŁIJ ĺ", + "fortn ite", + "cam den", + "un cture", + "pro w", + "colon ies", + "t ks", + "n go", + "b hm", + "live pd", + "spl ace", + "sli ke", + "happye aster", + "ter rence", + "revol ver", + "j ed", + "yy yy", + "office of", + "m ts", + "exist ential", + "r ourke", + "explore bc", + "sse d", + "pri est", + "vix en", + "si ding", + "k pa", + "a har", + "ju ic", + "ob struc", + "foren sics", + "uk mfg", + "cancell ation", + "we ary", + "ab q", + "ele c", + "pri zed", + "deb ts", + "me zz", + "salv atore", + "m dc", + "gre tte", + "c gc", + "th on", + "snow storm", + "ts ch", + "cook ery", + "å ¹", + "wa xing", + "n acional", + "mur s", + "ra ve", + "cap es", + "ger main", + "dri pping", + "sub mitting", + "ome lette", + "iter ation", + "aj es", + "shim mer", + "fu eling", + "ðŁĩ§ ðŁĩª", + "li po", + "bo bble", + "un follow", + "islam ist", + "hi ber", + "cat s", + "agentsof shield", + "sen si", + "____ _", + "ster ia", + "inst al", + "ausp icious", + "har row", + "over land", + "femini sts", + "inst ant", + "char iot", + "blind ness", + "sp ed", + "sc arec", + "nu it", + "mini atures", + "ho seok", + "glo ck", + "fifa worldcup", + "e te", + "dis m", + "we iner", + "ex foli", + "ear ts", + "ภĶ", + "my art", + "man il", + "iss ant", + "form a", + "in cu", + "buffal ob", + "in tim", + "mc cul", + "anj ali", + "po po", + "un doub", + "hil a", + "fun gal", + "thank ful", + "fu tur", + "en dish", + "ren ds", + "th ar", + "she ff", + "ring o", + "nichol ls", + "io wa", + "po tom", + "cl ams", + "ãģ Ħ", + "acon f", + "stadi ums", + "di mp", + "di k", + "residen ces", + "do v", + "caric ature", + "seagu ll", + "kl m", + "confe ss", + "sla pped", + "cele b", + "turb ines", + "pp v", + "nur ture", + "el ab", + ".... .#", + "tu ff", + "de press", + "al far", + "amii bo", + "di spon", + "e wing", + "que er", + "friend s", + "for re", + "âĺ ¼", + "sw t", + "aqu arius", + "head liner", + "cur d", + "fi gs", + "o tters", + "love fl", + "kare em", + "go vegan", + "fri yay", + "consol ation", + "at ri", + "ì§ Ħ", + "âĺĿ ï¸ı", + "poly ne", + "gu ed", + "o ya", + "la us", + "intestin al", + "cam illa", + "scal p", + "pi r", + "leed s", + "horri fying", + "bore tum", + "dand elion", + "fer rer", + "ell ic", + "as x", + "so ren", + "re loaded", + "ale ague", + "navig ator", + "ine tte", + "add ams", + "al chemist", + "ak shay", + "dystop ian", + "awe c", + "n aya", + "al isa", + "ai led", + "ag or", + "avi ator", + "ali zer", + "smo bile", + "findyour park", + "cop ying", + "to ddy", + "sh ti", + "mon ger", + "cal houn", + "nap kin", + "break up", + "y atra", + "se thu", + "ric hi", + "eras mus", + "fer ry", + "am ore", + "prac tise", + "bo bo", + "power point", + "oo se", + "li ffe", + "chin a", + "sh ka", + "fad navis", + "du ane", + "war on", + "fal se", + "ðŁļ Ĥ", + "wa shes", + "disc ip", + "==== ====", + "g k", + "ab b", + "stub born", + "medi eval", + "p ci", + "ðŁį ª", + "maril yn", + "h yo", + "man di", + "cr i", + "prede cess", + "continu ation", + "om usic", + "s lat", + "wh al", + "mall ory", + "bon n", + "shen zhen", + "ca i", + "âĺ ĥ", + "sa fest", + "for wards", + "dra wers", + "bla sted", + "sle e", + "mor phe", + "mb ta", + "dumb ass", + "ÑĦоÑĤ о", + "alhamdulil lah", + "ec lub", + "al beit", + "heal ey", + "ayurve da", + "adverti sed", + "cro cs", + "itt les", + "bry son", + "be i", + "nj pw", + "honore e", + "fu sed", + "ðŁĶ ĺ", + "mul tin", + "n aga", + "de parts", + "ko p", + "kin o", + "jhar khand", + "ed na", + "ax le", + "mil ton", + "supremac ist", + "marrake ch", + "domin ic", + "tran script", + "] [#", + ": ).", + "wo c", + "sur rounds", + "o gil", + "leaf lets", + "co well", + "whe w", + "tru de", + "proli fer", + "succe s", + "sports man", + "con dom", + "po che", + "k up", + "imprison ment", + "{ }", + "scram bled", + "å Ľ", + "ka ine", + "cell phone", + "metam or", + "con i", + "remn ants", + "ee z", + "down pour", + "afterno on", + "exerc ising", + "ber ser", + "architec ture", + "wick low", + "m ns", + "is p", + "bo c", + "n iss", + "mn wild", + "stu mble", + "r si", + "lu ffy", + "sil en", + "dd ad", + "bul lies", + "haw ker", + "bb cc", + "scu ba", + "e pp", + "que ts", + "for aging", + "pal let", + "ha di", + "cinemato grapher", + "cat chers", + "to aster", + "k hi", + "lite coin", + "kid lit", + "amher st", + "maur icio", + "ip ad", + "mar malade", + "fe y", + "don nelly", + "g to", + "est as", + "cere bral", + "ant grasso", + "zz led", + "vir gil", + "swa pped", + "ðŁĺħ ðŁĺħ", + "no dapl", + "greate st", + "nhl bruins", + "fra ser", + "b mo", + "ane w", + ". âĿ¤ï¸ı", + "se gregation", + "remark ably", + "mccor mick", + "lo gger", + "er as", + "contrac ting", + "âłĢ âłĢ", + "yor ks", + "uku lele", + "touch screen", + "de cked", + "ben n", + "south wark", + "ra vin", + "nu mis", + "ðŁ¤ Ļ", + "ru t", + "gre co", + "eth ic", + "red neck", + "ar r", + "t cs", + "ih ri", + "ðŁĩ« ðŁĩ·", + "l k", + "inher ited", + "zy k", + "viadu ct", + "marty red", + "hi gu", + "ss n", + "be in", + "street style", + "fer gie", + "bank of", + "æĹ ¥", + "stake holder", + "exempl ary", + "cre ss", + "ess a", + "ero tica", + "intre pid", + "gom es", + "bra un", + "bethan y", + "bang tan", + "pulmon ary", + "m illing", + "doctor ate", + "trump russia", + "ठ°", + "s ani", + "bl att", + "pla u", + "depri ved", + "t le", + "ful ly", + "bour n", + "st ak", + "lufthan sa", + "kio sk", + "far oo", + "def y", + "bad an", + "ðŁĺĺ âĿ¤ï¸ı", + "rit z", + "tri sha", + "ran ds", + "middle sex", + "arab s", + "pro j", + "sport scenter", + "repe ats", + "iv f", + "bleed blue", + "as sure", + "o bs", + "territ orial", + "ele n", + "bever ley", + "ann ah", + "âĿ¤ï¸ıâĿ¤ï¸ı âĿ¤ï¸ıâĿ¤ï¸ı", + "z l", + "for good", + "science fiction", + "gla u", + "son ya", + "pri th", + "st weets", + "mix ers", + "mari o", + "ant elope", + "writing community", + "went z", + "den ham", + "be di", + "sf o", + "harley davidson", + "look book", + "immuno therapy", + "or phe", + "es ville", + "ed ged", + "tas k", + "sb ball", + "corro sion", + "kilom eters", + "co sting", + "play back", + "ke ke", + "di visi", + "u ter", + "re location", + "yel led", + "pen g", + "up beat", + "ser ve", + "âļ ł", + "hal en", + "stir ring", + "reh man", + "en v", + "schu macher", + "frag ment", + "alkal ine", + "sb k", + "resil i", + "share point", + "rol lover", + "tra sh", + "counter part", + "âĻ «", + "ob itu", + "à ½", + "ãĤ ¹", + "mul berry", + "ðŁİ Ĩ", + "auton omy", + "spra ying", + "nat l", + "love you", + "fran ki", + "nu k", + "esc ar", + "can teen", + "ali baba", + "de plor", + "mole cule", + "pu d", + "fort night", + "blon die", + "sp hin", + "portra yal", + "ta che", + "bu te", + "consi sting", + "freep alestine", + "c sp", + "im mort", + "d ns", + "ðŁĴ¥ ðŁĴ¥", + "tour de", + "coo king", + "archi val", + "ga thers", + "bit t", + "b anc", + "pre mature", + "snow ball", + "poetry day", + "lou dly", + "fug itive", + "ed ay", + "em ra", + "ðŁĩ¸ ðŁĩª", + "sci en", + "node js", + "jur gen", + "je ong", + "band ana", + "un is", + "fox sports", + "v andy", + "pro visions", + "wee p", + "tu k", + "i ko", + "h oun", + "zig gy", + "z r", + "fil let", + "bat a", + "tin k", + "con e", + "we want", + "k ilo", + "hor ace", + "sl t", + "sc t", + "stay tuned", + "victor ia", + "umb ria", + "att acker", + "ingham shire", + "fright ening", + "no ir", + "fr at", + "con tempt", + "lia ison", + "ho i", + "br ink", + "tr ill", + "ni agar", + "kick ass", + "dun das", + "not my", + "rho de", + "bu mble", + "no xi", + "fa g", + "spec tators", + "mancrush monday", + "jin ping", + "distr act", + "dais y", + "wal den", + "portra it", + "ar thistory", + "vol tron", + "ev el", + "is c", + "ac m", + "r ite", + "na o", + "de ported", + "swe ats", + "ru fus", + "lo bo", + "labor day", + "gam o", + "ihri thik", + "bl it", + "abdomin al", + "ãħ¤ãħ¤ ãħ¤ãħ¤", + "i it", + "e q", + "bu sy", + "allu arjun", + "un disclosed", + "de ton", + "pro create", + "ki l", + "ðŁİĤ ðŁİĤ", + "mitch ell", + "ki i", + "inherit ance", + "al p", + "jo burg", + "pat rolling", + "compul sory", + "un signed", + "ni am", + "l ga", + "eshop suk", + "tr illi", + "ma w", + "appreci ating", + "rock ab", + "mañ ana", + "an tal", + "mal vern", + "roy o", + "grand prix", + "sut ton", + "go ftheday", + "dig i", + "ãħĭãħĭ ãħĭãħĭ", + "t les", + "varan asi", + "erec ted", + "discip les", + "cont act", + "ðŁĺ µ", + "li d", + "⬠ĩ", + "scen tre", + "radi ator", + "ing tips", + "trans itions", + "thursday motivation", + "chem ical", + "separ ati", + "sal is", + "mi m", + "geo graphical", + "book fest", + "/ .", + "âľ ĭ", + "v ae", + "cur rie", + "ag garwal", + "acceler ation", + "the ses", + "lg m", + "u mass", + "pro portions", + "nat a", + "ani ans", + "ku ch", + "be acons", + "ap r", + "@ #", + "ðŁĴª ðŁı¾", + "nu ke", + "sher aton", + "ki o", + "ma kati", + "polit ico", + "mor ale", + "ì Ļ", + "econom ically", + "gg ly", + "ss en", + "pa stries", + "intern ships", + "vic ente", + "fanta ken", + "aveng ers", + "accu se", + "slee pover", + "indic ated", + "the dream", + "ster one", + "ren ders", + "fro st", + "ou i", + "gre gg", + "d ore", + "⾨ ⾨⾨", + "pu gs", + "sat y", + "nu mb", + "hems worth", + "tam i", + "la ssic", + "schi ff", + "igle sias", + "ag awa", + "] \"", + "re shi", + "game stop", + "divor ced", + "theat er", + "clau di", + "un conventional", + "prophe ts", + "ac in", + "twel f", + "tow ering", + "t ml", + "sc lerosis", + "k wan", + "ge ts", + "distur b", + "na ira", + "ener g", + "pir acy", + "pru itt", + "noti fied", + "hen na", + "bra m", + "ground water", + "bl s", + "opti mis", + "$ )", + "luci e", + "biz hour", + "fang irling", + "gr ills", + "or l", + "ver se", + "c ina", + "law less", + "artistson twitter", + "tele vised", + "marshmal lows", + "radio head", + "bar r", + "m fc", + "bre vi", + "mmor pg", + "g aya", + "âĸ «", + "sub titles", + "j t", + "disney land", + "to bago", + "nh m", + "groo ve", + "fi awec", + "\" /", + "ba o", + "scra bble", + "om ni", + "ff l", + "um c", + "si mba", + "ali er", + "ter rell", + "plu me", + "mi di", + "dig nit", + "co c", + "bru t", + "ad ata", + "alche my", + "d sm", + "ðŁĺĨ ðŁĺĨ", + "win try", + "spa res", + "cu er", + "conclu sions", + "to ys", + "od or", + "fl ann", + "gar vey", + "scrip tions", + "inspec tions", + "cat ap", + "ang lo", + "st louis", + "heim er", + "at ay", + "tr ich", + "en yc", + "chil ds", + "vent il", + "mont p", + "guiller mo", + "circu lare", + "z ell", + "mode led", + "craf tsman", + "al ina", + "stimul ation", + "cashe w", + "ju das", + "best of", + "to ire", + "susp ends", + "scol lege", + "real ising", + "by tes", + "bloo ds", + "as si", + "ðŁĴ ¿", + "o hs", + "ðŁį ĭ", + "scallo p", + "ठµ", + "gi fting", + "camo gie", + "wil kes", + "o zzy", + "ðŁ¤ ¤", + "ver onic", + "sav oy", + "deme tri", + "baby girl", + "ðŁĺį ðŁĺŃ", + "so x", + "cly de", + "induc tee", + "count down", + "self care", + "ठľ", + "vi ka", + "tor re", + "phd chat", + "pe ars", + "aw h", + "suff rage", + "le sn", + "admir ation", + "mp p", + "shark week", + "schul z", + "santor ini", + "clo ver", + "( *", + "stras bourg", + "ex iting", + "so yu", + "finger print", + "che a", + "ãĢ ľ", + "vin dic", + "song writers", + "so a", + "prou der", + "nam a", + "= ))", + "simple st", + "delici ously", + "gil les", + "u q", + "mn wx", + "ep p", + "sh un", + "ken nel", + "fall on", + "ðŁIJ £", + "sin d", + "tra gically", + "out es", + "modern ism", + "co ke", + "gy n", + "spi on", + "âĺ¹ ï¸ı", + "le am", + "compress or", + "apolog ise", + "twent yon", + "fan atics", + "âĻ »", + "sco tsman", + "sa wa", + "ko u", + "as er", + "ภļ", + "welter weight", + "phen om", + "twick enham", + "stri a", + "p out", + "ka z", + "gi am", + "cd p", + "ho y", + "emplo y", + "red mond", + "ภĦà¸", + "sm ere", + "trance family", + "proto cols", + "pie ce", + "lu iz", + "iter acy", + "carl s", + "united states", + "har med", + "phd life", + "ch aw", + "foot prints", + "l é", + "cho ker", + "z ana", + "sli pper", + "eric sson", + "insul ting", + "articho ke", + "advis ing", + "acquis itions", + "op or", + "mut ations", + "re ar", + "ॠģ", + "pod cast", + "wi ther", + "kun g", + "íĺ ¸", + "win slow", + "di apers", + "ðŁĵ¸ @", + "ec ker", + "col lar", + "hu ey", + "gi ro", + "mono gram", + "kas ich", + "si veness", + "malay si", + "arom atic", + "gre s", + "gali leo", + "u ji", + "rob b", + "dr m", + "none theless", + "as a", + ": >", + "lo a", + "l np", + "at work", + "ag t", + "laksh mi", + "pipel ines", + "id al", + "stre l", + "re all", + "chain z", + "stone wall", + "san sk", + "ðŁı ´", + "pied mont", + "hoste ss", + "ci u", + "t é", + "analy ses", + "wil helm", + "scott y", + "rw by", + "mosqu it", + "use mb", + "qu ins", + "ðŁij İ", + "tu cker", + "s conf", + "speci fications", + "psychi atry", + "broo kes", + "s ils", + "ol af", + "de to", + "co di", + "cli p", + "fil th", + "womancrush wednesday", + "go to", + "ang erous", + "be ale", + "w tc", + "paneli st", + "ne x", + "lar sen", + "emili o", + "tab leau", + "h itters", + "conce ived", + "americ ani", + "or tega", + "mar di", + "Ñ ĥ", + "pain tball", + "thir sty", + "new yorker", + "etis ation", + "go ss", + "we aker", + "u gh", + "tro ll", + "har ga", + "du al", + "ght ning", + "at ine", + "ðŁĺİ ðŁĺİðŁĺİ", + "cook out", + "pyrene es", + "po ss", + "authent ication", + "sports wear", + "yun ho", + "kir o", + "archi pel", + "shen ko", + "ren der", + "nov ation", + "divin ity", + "ðŁij £", + "su fi", + "humb ling", + "ge opol", + "devote es", + "wait ress", + "tr ough", + "py ro", + "i ba", + "bl ing", + "gra f", + "epilo ts", + "bt r", + "of tball", + "bas king", + "domin os", + "so om", + "r ath", + "sher yl", + "qu el", + "astronom ical", + "wel d", + "track list", + "sig nee", + "slee pless", + "com man", + "ch ron", + "summ on", + "pure michigan", + "cri spr", + "sli p", + "la gi", + "ra q", + "um u", + "thal ap", + "char med", + "scru mp", + "quad copter", + "ski p", + "peter sen", + "mun i", + "ðŁĮ ¾", + "mon aghan", + "tra ys", + "ick ed", + "canad aday", + "te gr", + "ï¿ ½", + "hot ness", + "heavy metal", + "ab ar", + "gop debate", + "az ul", + "spider man", + "sun flowers", + "ľ ë", + "web comics", + "bar d", + "Ð ²", + "nichol as", + "slu sh", + "ram an", + "mark ham", + "ffici al", + "ff ler", + "íĬ ¸", + "ple ss", + "anush ka", + "to to", + "sk aters", + "pro wrestling", + "compet es", + "ay ala", + "myster y", + "thr ills", + "mp g", + "independ ently", + "y ul", + "imper ative", + "formid able", + "tire less", + "st acking", + "ton gues", + "mal tese", + "pot ts", + "mat ti", + "char ting", + "chill out", + "super nova", + "ome o", + "sky sports", + "nu tty", + "ðŁĹĵ ï¸ı", + "ro han", + "insp ired", + "concier ge", + "ser ra", + "ma kk", + "gal at", + "chi pp", + "ye v", + "ì £", + "reim bur", + "op ul", + "kimber ley", + "i eee", + "bre men", + "ch itec", + "or in", + "nak u", + "bon kers", + "foo ty", + "emer gence", + "ðŁĨ ĺ", + "sti p", + "serge i", + "zo ey", + "ai me", + "wou ld", + "dy es", + "destin y", + "vinai grette", + "dri er", + "circulare conomy", + "an archi", + "ss r", + "sch el", + "cin er", + "gro om", + "determin ing", + "gar min", + "cal ais", + "incarcer ation", + "bu kit", + "no i", + "chelms ford", + "mckin ley", + "chi pped", + "belong ed", + "tu mors", + "str oud", + "mi i", + "influen za", + "wwen xt", + "tun dra", + "tele communications", + "cat sofinstagram", + "t ages", + "beat ty", + "o du", + "ml kday", + "oo per", + "dang le", + "ak ley", + "cru mb", + "anti gua", + "ti mbers", + "rou hani", + "ðŁĴª ðŁĴªðŁĴª", + "ha fi", + "... !!", + "w cs", + "coo p", + "sn c", + "lit res", + "ãĢ Ĭ", + "ha z", + "co z", + "k ant", + "green field", + "cur ti", + "y ale", + "flye agles", + "what soever", + "wor thing", + "rou lette", + "flyeagles fly", + "un da", + "a inted", + "stand ing", + "lusci ous", + "h pc", + "effic acy", + "ash land", + "me ghan", + "ky wx", + "n pr", + "bath tub", + "ac os", + "h ani", + "mar cor", + "man tis", + "da isi", + "bo ba", + "ab bie", + "mu til", + "vi al", + "spy der", + "po z", + "g ti", + "el fie", + "nigh tw", + "metro id", + "anton i", + "mad die", + "dh ry", + "dar lings", + "ten ds", + "taek wondo", + "atlan ta", + "me ow", + "chlo e", + "ãĥ İ", + "ym es", + "siber ia", + "k con", + "gu es", + "mar iner", + "fac il", + "azz le", + "[ ...", + "han nover", + "bav aria", + "vir go", + "te uk", + "u sps", + ") #", + "wall a", + "sam pson", + "need less", + "ver bally", + "hay ley", + "bow led", + "pi us", + "lam pard", + "ham string", + "vol vo", + "road safety", + "cho king", + "sor bet", + "a hem", + "healthy food", + "brai ded", + "horticul ture", + "cr ative", + "che ek", + "ad do", + "the force", + "ko ko", + "schiz oph", + "j ie", + "w ada", + "twentyon epilots", + "h bcu", + "pro ton", + "pau ls", + "lou isa", + "lat am", + "kyr gy", + "com pac", + "sd k", + "sap i", + "?? ?", + "liber alism", + "ep silon", + "ai den", + "w usa", + "spra yed", + "baske tball", + "kim ono", + "blue wave", + "ali as", + "ë§ Ī", + "mug shot", + "ce c", + "do gre", + "ad ora", + "ðŁĵ· @", + "kra kow", + "intrigu ed", + "exhau sting", + "astron omer", + "ven ison", + "lady bug", + "ci v", + "bra e", + "us m", + "bri be", + "acup uncture", + "pembro ke", + "ke ating", + "chi e", + "y ad", + "t si", + "sm i", + "see ding", + "gate shead", + "lis boa", + "gy p", + "canv ass", + "ðŁĶ´ âļªï¸ı", + "op i", + "ni r", + "soci etal", + "ly te", + "ati es", + "c sm", + "ar tery", + "al in", + "aka poor", + "abstr acts", + "â̦ â̦", + "teen wolf", + "ne we", + "travel gram", + "sentim ental", + "per ched", + "han del", + "ho ek", + "f ay", + "coordin ating", + "anim ate", + "man ian", + "effor t", + "jer ky", + "f ck", + "adri enne", + "ma bly", + "tra ding", + "my el", + "spi ro", + "sol a", + "stor ing", + "over drive", + "monday morning", + "dream team", + "pul se", + "bon di", + "ber nie", + "pgat our", + "tri poli", + "son am", + "plat t", + "âļ ¡", + "ag roup", + "îIJ Ĵ", + "inv ading", + "v cu", + "k ell", + "ñ os", + "un dead", + "pod casting", + "mercede sam", + "mana fort", + "cor tex", + "que so", + "impecc able", + "pal mer", + "wil doz", + "sport sc", + "guacam ole", + "dispen ser", + "cate gori", + "stun ts", + "per il", + "invit ations", + "dune din", + "xi e", + "achi eves", + "saf er", + "pre ds", + "ph an", + "knuck les", + "k ak", + "igno res", + "lovemy job", + "aru ba", + "ound ation", + "datac enter", + "co vert", + "gr ing", + "cou ple", + "ا ر", + "vol i", + "mc cle", + "arti sans", + "lu do", + "kal am", + "arom a", + "under taker", + "hu la", + "wiz kid", + "gu mb", + "god frey", + "bakers field", + "ker n", + "engine er", + "car ve", + "pal in", + "guaran tees", + "pe bbles", + "b ays", + "zi eg", + "fin k", + "â¬ĩï¸ı â¬ĩï¸ı", + "down pours", + "ro chelle", + "rasp berry", + "ðŁĺ ®", + "gra phies", + "stom p", + "caf es", + "ari zed", + "utt ar", + "cal vary", + "dri e", + "crusad er", + "bus an", + "tux edo", + "si u", + "seam us", + "cul tured", + "blan chard", + "town house", + "ge red", + "butter milk", + "flu ctu", + "roger federer", + "hel i", + "ðŁ¦ ĥ", + "u ous", + "ram esh", + "mu ppets", + "email marketing", + "ye ss", + "br ice", + "ri zio", + "pel o", + "donnein arte", + "u rable", + "inve stin", + "bump ing", + "raji v", + "sav a", + "thro wer", + "fore x", + "o hhhh", + "th rust", + "pull man", + "r fid", + "sep sis", + "le ed", + "fri ght", + "roun ding", + "ne b", + "ph ins", + "ai sha", + "utili zing", + "squ ats", + "gold smith", + "j ic", + "bo ks", + "vau s", + "i po", + "exclu sion", + "tari ff", + "po kes", + "min al", + "land s", + "en force", + "washington dc", + "or char", + "g x", + "mar ys", + "ey our", + "aussi e", + "bak ers", + "un popular", + "latin os", + "lar ge", + "pu tnam", + "bol o", + "wa de", + "pel o", + "di zz", + "ob struction", + "fla ppy", + "weare the", + "depend ence", + "pajam a", + "e te", + "y ann", + "e wan", + "disc la", + "a ay", + "kar ina", + "e ic", + "an trim", + "w soc", + "neg atively", + "kai do", + "fotogra fia", + "dh ru", + "colo ssal", + "mcle od", + "k wang", + "mani pu", + "ex hilar", + "us atoday", + "summer slam", + "co les", + "tapro om", + "unbeat able", + "de ma", + "tic ks", + "k ling", + "fil s", + "campaig ners", + "ภķ", + "brew ster", + "audu bon", + "qu ay", + "ch s", + "ki gali", + "d ler", + "strength ens", + "som al", + "sign ingday", + "gol ds", + "pig ment", + "orche stral", + "g q", + "lin kin", + "ðŁı ĩ", + "ta w", + "algar ve", + "ho v", + "ear le", + "gold fish", + "am ig", + "ex er", + "ben in", + "dru id", + "ðŁIJ ¸", + "she m", + "quat tro", + "mer cen", + "men te", + "incorpor ating", + "bon anza", + "state fair", + "en de", + "concep tions", + "e es", + "âĻ¥ï¸ı âĻ¥ï¸ı", + "d son", + "fire arm", + "orb ital", + "we h", + "multi p", + "fo b", + "requi em", + "p light", + "thou se", + "sa id", + "oc re", + "remem brance", + "n old", + "chi pping", + "be v", + "er t", + "ca thy", + "sy m", + "ri ggs", + "m ley", + "dialo gues", + "sl ender", + "how l", + "gau teng", + "wd w", + "to bi", + "smo kes", + "im plo", + "b pm", + "ad n", + "mom basa", + "cap sul", + "bloom field", + "artic ul", + "cle o", + "goog led", + "flu ffy", + "l ard", + "en zyme", + "ve sti", + "ibra hi", + "fl ame", + "e mea", + "out ages", + "dispro por", + "ble ak", + "an sel", + "ick er", + "st louis", + "stock market", + "good friday", + "sau lt", + "stal led", + "pro m", + "ep som", + "b é", + "the se", + "sau ces", + "me w", + "lit fest", + "pre d", + "re u", + "kar ak", + "si enna", + "ell in", + "bio technology", + "ï¸ıâĥ£ -", + "tac tic", + "sa in", + "por k", + "mon za", + "ka j", + "lu sh", + "compart ment", + "chang ing", + "shraddha kapoor", + "fo al", + "ar tem", + "cu ando", + "can ola", + "ori ente", + "me sse", + "d ited", + "br c", + "box er", + "bbc two", + "s st", + "ment day", + "em ing", + "de wey", + "kof i", + "âŀĸâŀĸ âŀĸâŀĸ", + "reali zation", + "smo l", + "tw ood", + "san je", + "flag staff", + "ber wick", + "cor set", + "can ary", + "whistle blower", + "et ched", + "com posing", + "squee zed", + "bow er", + "auto desk", + "ne h", + "mathi eu", + "ba ja", + "Å Ĥ", + "hy dra", + "da im", + "am eri", + "insi sted", + "mer lot", + "gar ros", + "heart news", + "gaine sville", + "cut ler", + "bo de", + "ðŁĺī ðŁĺī", + "lew es", + "scoun try", + "g sa", + "us u", + "cc m", + "god awgs", + "phara oh", + "cra e", + "mor ley", + "hyp noti", + "f ades", + "neur ons", + "fu zz", + "ing co", + "high landers", + "star k", + "vig ne", + "pac kets", + "amar illo", + "reu ben", + "insul ts", + "bas ic", + "vec tor", + "n me", + "ac ruz", + "tro s", + "transm itter", + "ðŁĺ ŀ", + "interpre t", + "ðŁĺ ²", + "pre quel", + "mc gowan", + "dis semin", + "ðŁĴĺ ðŁĴĺ", + "mascul inity", + "indie gamedev", + "ali ve", + "te t", + "pe tal", + "ema iled", + "ar med", + "ko o", + "he er", + "ba ird", + "super junior", + "metro polis", + "delav in", + "decl ines", + "stit utes", + "Û ģ", + "p tbo", + "g lan", + "cho res", + "e aling", + "chri ssy", + "ste mc", + "vi an", + "assassin ated", + "pron ounce", + "illeg als", + "discover y", + "cav ill", + "fri fotos", + "f al", + "so i", + "sabot age", + "t int", + "p dc", + "ðŁİīðŁİ Ī", + "ãĤ Ĭãģ", + "ji o", + "endeav or", + "in sig", + "commit tees", + "she arer", + "me tz", + "mar rying", + "h dd", + "g by", + "fre t", + "tri sh", + "pu l", + "scrip ted", + "sa ki", + "l w", + "ke ye", + "shim i", + "nan aimo", + "ca h", + "à «", + "tem pered", + "ici an", + "du gg", + "dish washer", + "air field", + "s rugby", + "gr inch", + "y st", + "r ms", + "mahat ma", + "lan kan", + "disc ar", + "dige stion", + "no des", + "l ls", + "om ic", + "gu tter", + "tis garh", + "feder ico", + "election day", + "bo he", + "master card", + "fire ball", + "âľ Ķï¸ı", + "oy ster", + "p ong", + "do k", + "en route", + "m vc", + "beat the", + "ali stair", + "shu b", + "sh aming", + "cherno byl", + "ghi bli", + "the s", + "pin ion", + "d bs", + "sal ts", + "ic tion", + "epi ph", + "nc pol", + "in convenience", + "whit ley", + "inspec ting", + "wood ley", + "wi ener", + "skil let", + "no les", + "m ca", + "h ina", + "a sha", + "willing ness", + "well ness", + "tam ed", + "show time", + "dis advantaged", + "ber nat", + "us n", + "mission aries", + "coun selling", + "arrog ant", + "quant itative", + "leg alization", + "ho dge", + "energye fficiency", + "cameron dallas", + "pos sessions", + "p bb", + "harris burg", + "v g", + "hindu ism", + "happy thanksgiving", + "fi b", + "re acting", + "tweeta picture", + "pol iti", + "mu ppet", + "hur rah", + "pac e", + "coast guard", + "guar ded", + "as am", + "par ry", + "fore very", + "x q", + "oom f", + "ke anu", + "j ind", + "ri st", + "customer service", + "sac red", + "ðŁĺ º", + "ton er", + "occur rence", + "mat u", + "val dez", + "red d", + "is ak", + "power rangers", + "pe asant", + "raj ini", + "abra ham", + "e mil", + "car do", + "tr il", + "hair styles", + "obsole te", + "sam pler", + "direc tive", + "delavin kisses", + "ver ton", + "glo s", + "sp ay", + "paler mo", + "com ets", + "man ziel", + "chicag of", + "ski pped", + "pic torial", + "h ant", + "b mi", + "a ol", + "re opens", + "pad dling", + "devo s", + "fra ud", + "bas eline", + "que ues", + "sp ired", + "sn are", + "eu ve", + "descri ptions", + "daisi es", + "ca ching", + "gall eria", + "tri mmed", + "stin o", + "recy cla", + "ic ular", + "bir ken", + "raw lings", + "fli x", + "chic as", + "b gt", + "lik eli", + "argy ll", + "thel ove", + "ga ston", + "bl anca", + "ha k", + "f one", + "sailor moon", + "h aci", + "ima c", + "fl yn", + "de can", + "bel les", + "ap ic", + "zo g", + "taun ton", + "con stance", + "lasag na", + "ker nel", + "in ka", + "har bor", + "collec tively", + "calcul ated", + "av ille", + "shil pa", + "pur du", + "gi mm", + "fun er", + "a est", + "pembroke shire", + "nighting ale", + "n unes", + "hyper tension", + "hu bert", + "sli ders", + "infer tility", + "comm ended", + "transat lantic", + "metr ical", + "!! @", + "Å Ł", + "ss g", + "bac ca", + "inver ted", + "fun factfriday", + "it ans", + "albu m", + "acqu ainted", + "ri er", + "whel an", + "sar ab", + "mu e", + "snoo ze", + "pi ff", + "agre eing", + "sp itting", + "jer maine", + "n ye", + "âľı ï¸ı", + "am bush", + "ze ph", + "con greg", + "univers ity", + "s app", + "wann abe", + "pat rice", + "ib d", + "do glo", + "fri dges", + "sun d", + "king ston", + "ar gon", + "kam en", + "hardro ck", + "ds ley", + "do lores", + "ì °", + "ota ku", + "pi ping", + "be having", + "âŃIJï¸ıâŃIJï¸ı âŃIJï¸ı", + "blue bird", + "an sari", + "teapo t", + "fire work", + "cro p", + "log ans", + "ty ped", + "thick ness", + "ig ers", + "c fp", + "dys functional", + "contra sting", + "et ty", + "aston martin", + "tx st", + "dra grace", + "at tributes", + "marath on", + "manu scripts", + "john stone", + "ðŁĺ± ðŁĺ±", + "bo er", + "ay u", + "aru gula", + "poo rest", + "con du", + "assu mption", + "anag h", + "no h", + "delav in", + "sit ter", + "g ö", + "mor ow", + "kick start", + "com i", + "gl acial", + "ghe ad", + "ba in", + "ker shaw", + "en dof", + "fre ud", + "om at", + "i af", + "hu g", + "sign up", + "each other", + "defin ite", + "tu bing", + "shak ira", + "ðŁijı ðŁı½", + "uu uu", + "sw in", + "sham bles", + "ol as", + "sk ell", + "brit ain", + "kn w", + "clu tter", + "om y", + "j ens", + "hang ed", + "city scape", + "scra ps", + "un locking", + "dead liest", + "er no", + "breast cancer", + "a it", + "inspec t", + "fu ri", + "ðŁĴ Į", + "ku d", + "ju le", + "or ah", + "mi ds", + "m dt", + "bur gring", + "r attle", + "pu sa", + "stal k", + "cle ans", + "iss ance", + "z ek", + "worth it", + "nam eis", + "musko ka", + "council man", + "urban art", + "bar rac", + "un solved", + "tu l", + "g ita", + "white board", + "soy beans", + "em ent", + "cont i", + "saturday motivation", + "conveni ently", + "doc king", + "t ado", + "âı ©", + "sp ino", + "puppy love", + "po f", + "fabric ated", + "robb ers", + "adop ts", + "ti fied", + "kk r", + "indulg ence", + "notic eable", + "macqu arie", + "chap el", + "sensu al", + "ki ko", + "melan oma", + "lore tta", + "li ance", + "ab en", + "sp lus", + "ga al", + "ac ele", + "lib dems", + "compar isons", + "ðŁĮ µ", + "rhy thms", + "mer y", + "en capsul", + "nap ier", + "ðŁijĮ ðŁijĮðŁijĮ", + "ðŁij IJ", + "plat z", + "fre sno", + "re formed", + "ran bir", + "el it", + "the best", + "bhu shan", + "vin nie", + "impro vised", + "s ittin", + "re created", + "e ba", + "ec ker", + "ac rob", + "pon te", + "cor d", + "gi ddy", + "eur usd", + "fe ver", + "intu ition", + "gar i", + "dum mies", + "bud weiser", + "amend ments", + "te tra", + "sch nit", + "ay as", + "mar ys", + "ci st", + "k ani", + "ker mit", + "ðŁĺ±ðŁĺ± ðŁĺ±", + "tin ker", + "strol ling", + "di visional", + "niger i", + "omin ous", + "menstru al", + "kar ab", + "k hy", + "bw fc", + "pan handle", + "l illi", + "well er", + "stra pped", + "son the", + "transfer ring", + "ethe real", + "sne aks", + "ru dol", + "gab les", + "jac king", + "cin code", + "for tune", + "canadi ens", + "con for", + "ab normal", + "frank lin", + "tit a", + "mu la", + "persi st", + "cu ties", + "ki el", + "ðŁĩ± ðŁĩ", + "her mann", + "aw k", + "fi asco", + "ko to", + "we ta", + "hi ker", + "budd y", + "preven tive", + "mcgra w", + "game boy", + "forsy th", + "top shop", + "si ob", + "sad h", + "in tram", + "follow art", + "so aps", + "dragon ball", + "ou x", + "morri son", + "๠ĥ", + "lu bric", + "adul thood", + "morri sons", + "âļ łï¸ı", + "her mo", + "ta ka", + "stall one", + "mis use", + "team gb", + "ra gha", + "con fined", + "at y", + "hom ophobic", + "nw o", + "sky news", + "ho ya", + "ac rosse", + "wi iu", + "pur ée", + "jed dah", + "ðŁ¤ §", + "advis ers", + "ph ine", + "an is", + "scrump tious", + "ë° ķ", + "c ke", + "vin y", + "ter m", + "s dc", + "o do", + "home school", + "vas c", + "leop ards", + "debor ah", + "illic it", + "cur ran", + "as roma", + "nau ght", + "mar ig", + "brand i", + "em p", + "ðŁĺį ðŁijĮ", + "î Į", + "su spend", + "lu z", + "initi ation", + "sch aft", + "jensen ackles", + "craw ler", + "post doc", + "des ks", + "trail blazer", + "den omin", + "tri x", + "no ise", + "po et", + "± ï¸ı", + "s mug", + "vol atile", + "proof s", + "pharmac ist", + "sardin ia", + "mash able", + "kim chi", + "co ed", + "schal ke", + "doo dled", + "c sw", + "sh ur", + "ro x", + "do k", + "chris brown", + "mathemat ician", + "ab ound", + "ang elic", + "rock ford", + "d ole", + "yor kers", + "ms n", + "g man", + "xavi er", + "bor rowing", + "mark ings", + "longh orn", + "k ja", + "diver ted", + "mm it", + "euph oria", + "ay yy", + "te a", + "pa h", + "ck i", + "un cut", + "li ven", + "ky ung", + "fan art", + "mer ing", + "red ding", + "amo vie", + "gri di", + "c thulhu", + "schol arly", + "ju dah", + "th bewithyou", + "eu calyp", + "ðŁIJ ķ", + "hert fordshire", + "cour troom", + "by u", + "auc tioned", + "ple ase", + "mar cia", + "ê° ĵ", + "succe eded", + "el as", + "arvin d", + "t lot", + "saig on", + "re tt", + "ra kesh", + "fd ny", + "as en", + "se bring", + "gladi ators", + "you know", + "v lad", + "gol a", + "par ap", + "ÑĢ Ð¸", + "sab cnews", + "one team", + "oh l", + "sun e", + "ri j", + "cd c", + "star gate", + "run down", + "plat o", + "ph c", + "chat ter", + "ra viol", + "mn f", + "mand ala", + "li et", + "ภķ", + "mari a", + "hun gover", + "consoli dation", + "fer rell", + "tradition al", + "ilove art", + "gal ap", + "ðŁı Į", + "que zon", + "espa ña", + "ðŁĩ¨ðŁĩ Ń", + "ho bby", + "steam boat", + "mali gn", + "guil lau", + "pro hi", + "its me", + "íĥ Ģ", + "in scription", + "al z", + "mari an", + "k ade", + "mm on", + "adju sting", + "ne sts", + "intern ally", + "ci r", + "vik ram", + "mal ala", + "k ph", + "fel icia", + "the real", + "cap tivity", + "at is", + "marcor ubio", + "kale ido", + "che v", + "mano j", + "le more", + "gent ri", + "vi ps", + "tro pe", + "\" âĢĶ", + "pair ings", + "mal nutrition", + "fr ay", + "desig nation", + "brun omars", + "az e", + "tor rential", + "pan zer", + "ga il", + "under the", + "the ological", + "schizoph re", + "dazz le", + "freder ic", + "mo par", + "ad illa", + "so ggy", + "ra un", + "medi ocre", + "colo rec", + "i fe", + "p inst", + "blu ef", + " ²", + "world water", + "gir oud", + "clar inet", + "ad olf", + "tar antino", + "receip ts", + "assu mp", + "ðŁij Ł", + "coffe es", + "âľĬ ðŁı¾", + "du plex", + "s of", + "r x", + "lin o", + "timber wolves", + "pan dit", + "mo tm", + "e ga", + "ay ama", + "ach s", + "outsi der", + "ll en", + "co er", + "til ly", + "cheese burger", + "ma ds", + "ple dis", + "emp ty", + "national parks", + "az iz", + "p mi", + "jun kies", + "f ener", + "sq n", + "è s", + "gener ation", + "cleop atra", + "bhuban es", + "mosqu es", + "ty free", + "popp ins", + "tw c", + "or well", + "n age", + "ka whi", + "hol low", + "dal ai", + "¨¨ ¨¨", + "ou ro", + "m health", + "gi on", + "az o", + "vis as", + "reneg ade", + "re ic", + "w sop", + "ðŁĴļ ðŁĴĽ", + "e chel", + "tox icity", + "mü n", + "bun k", + "stimul ating", + "asth our", + "\\ '", + "ep h", + "ende mic", + "cn bc", + "shrin king", + "peabo dy", + "michel angelo", + "can yon", + "wal e", + "su mi", + "si ders", + "inu it", + "? .", + "profession alism", + "dr acing", + "plat oon", + "p ons", + "out bound", + "maple leafs", + "de sol", + "cen cy", + "a than", + "ver ma", + "ru bbing", + "ok an", + "ðŁij ł", + "mull ins", + "authent ic", + "Å į", + "alman ac", + "ga ia", + "bb q", + "on imo", + "ke h", + "ty a", + "tou ts", + "y av", + "re posit", + ", .", + "wi ght", + "se eyou", + "cal lof", + "done sia", + "bar gaining", + "gr anth", + "sd su", + "amphi theater", + "p su", + "re watching", + "wine tasting", + "peak district", + "dete cting", + "thur man", + "phe e", + "èª ķ", + "u mich", + "re r", + "sculp ted", + "go le", + "name sake", + "ðŁĶ ģ", + "serv icing", + "bau gh", + "pu gh", + "pen cil", + "dar th", + "munch kin", + "at orium", + "ten ers", + "sun y", + "rolling stones", + "mag ing", + "star rer", + "i dris", + "fe instein", + "ag ron", + "âĺºï¸ı âĺºï¸ı", + "supervis ed", + "chamele on", + "aggre gate", + "succe ssive", + "mo gul", + "inst yle", + "pol dark", + "custom e", + "ohio state", + "ha ya", + "ci des", + "broker age", + "angel ou", + "fifa wwc", + "de forestation", + "al ton", + "pam ph", + "hu gged", + "ho bo", + "change able", + "ku ber", + "bur roughs", + "demon etisation", + "cape cod", + "vers atility", + "or ice", + "le ila", + "womenin science", + "tu a", + "he dges", + "embarrass ment", + "ali fe", + "so ars", + "ni ghter", + "hy mn", + "gi pp", + "chas u", + "tech s", + "ni all", + "k illa", + "hi ka", + "cam els", + "valu e", + " ¢", + "sc oops", + "mah moud", + "clu sive", + "adri ana", + "pac o", + "oz il", + "un as", + "transl ations", + "whispe rer", + "s bi", + "bu xton", + "bio tics", + "indi ffe", + "ken ney", + "k lar", + "et ching", + "barra best", + "inst ability", + "se ine", + "vo tel", + "blo gged", + "whis key", + "my space", + "t ant", + "lan dia", + "give back", + "illu s", + "aw ak", + "ac ab", + "f bloggers", + "cloud computing", + "blat ant", + "syri ans", + "band ra", + "sty n", + "an em", + "ke ted", + "kar thik", + "barun sob", + "pin ot", + "gu bernat", + "gay e", + "arti ste", + "i fied", + "conven tions", + "hu an", + "geni uses", + "eeee ee", + "fol ly", + "somer ville", + "pride month", + "ðŁĩºðŁĩ¸ ðŁĩºðŁĩ¸", + "chemo therapy", + "paul s", + "bak ar", + "ìĦ¸ë¸ IJ", + "taiwan ese", + "fol lo", + "c ss", + "re ign", + "nn nn", + "fla un", + "catastro phe", + "iti es", + "frag ments", + "extre mists", + "ym oun", + "car men", + "eze kiel", + "conne cting", + "se h", + "man ta", + "remodel ing", + "we ymouth", + "at oms", + "ce m", + "ne well", + "lu mi", + "the open", + "mo c", + "mili band", + "g land", + "z shq", + "mag gie", + "mani acs", + "m sp", + "ad y", + "cre ams", + "le anne", + "e sta", + "py g", + "af finity", + "pray er", + "dun bar", + "ligh troom", + "ac adi", + "wyn onna", + "roman tic", + "state dept", + "sick le", + "wh os", + "lam o", + "et our", + "fin ity", + "shru b", + "shar pen", + "pun dit", + "ed on", + "af ore", + "mar s", + "jeff ery", + "ter ps", + "medal list", + "kath arine", + "accu sing", + "ta z", + "roy d", + "from home", + "confron tation", + "alle gh", + "ðŁijī ðŁijī", + "refresh er", + "ran veer", + "never land", + "jo jo", + "lu crative", + "en am", + "ca ver", + "pa edi", + "man jaro", + "flu ids", + "the ssal", + "oppre ssed", + "mu ss", + "joh anna", + "Ø ®", + "cn g", + "buil dthe", + "sett les", + "s ith", + "fu ego", + "cl amp", + "ar ag", + "pay er", + "ted x", + "mand y", + "inter stellar", + "fr c", + "ch and", + "b cc", + "mo lo", + "len til", + "johan sson", + "grims by", + "nature lovers", + "ðŁļ¨ ðŁļ¨ðŁļ¨", + "shin de", + "x in", + "international dayof", + "transiti onal", + "sat a", + "cad dy", + "wo d", + "if u", + "ha ys", + "holl yo", + "j ang", + "ir c", + "co im", + "grad able", + "\" \"", + "ðŁį ´", + "ঠ¾", + "a el", + "n yo", + "west lake", + "time out", + "sof i", + "phenom ena", + "cultiv ation", + "ag no", + "un armed", + "so t", + "con j", + "gen o", + "royal navy", + "nutriti on", + "fair mont", + "ti relessly", + "sn g", + "re ty", + "mic a", + "lu cent", + "slo ane", + "droo l", + "riz al", + "od ell", + "critici zed", + ". '\"", + "la ze", + "deser ted", + "co der", + "pra s", + "l illian", + "itiner ary", + "dav y", + "an ap", + "whi pping", + "hobo ken", + "kare ena", + "çľ Ł", + "vi us", + "ter n", + "nan tucket", + "mis understood", + "bu laga", + "st ant", + "chin ook", + "z am", + "reli es", + "d ss", + "ed mond", + "sket chy", + "m ell", + "fe x", + "rec tor", + "dist ill", + "day dream", + "wine maker", + "ri pley", + "billion aires", + "hel ene", + "ati f", + "cul prit", + "bertr and", + "wou ldnt", + "ma pped", + "v ak", + "gla dly", + "parliam ent", + "kidlit art", + "ware ness", + "goli ath", + "âĨ ĵ", + "view point", + "tat ted", + "fu ls", + "dor sey", + "ang lers", + "li ds", + "ki ya", + "bow les", + "be h", + "b ite", + "compati bility", + "ance stral", + "pro x", + "beha ved", + "gubernat orial", + "ch field", + "sab an", + "z h", + "teen y", + "shibu ya", + "holli day", + "pan cy", + "âĿĦï¸ı âĿĦï¸ı", + "seun gri", + "? ,", + "ðŁĩ¦ ðŁĩ·", + "im itation", + "impac tful", + "any i", + "gene vie", + "añ os", + "bate man", + "gli der", + "af ar", + "ra sheed", + "effor tless", + "sh war", + "dach sh", + "er un", + "at os", + "kin i", + "ch d", + "kha ki", + "k lin", + "felici dades", + "bel o", + "as l", + "to ppers", + "fin ley", + "stac ey", + "rigor ous", + "kar ting", + "le ppard", + "car michael", + "be ret", + "c se", + "ak hi", + "mer ingue", + "ab an", + "ha ke", + "ger i", + "er jee", + "re sto", + "comm anders", + "pr it", + "fl or", + "ad ven", + "ex termin", + "remain der", + "å IJ", + "es g", + "martin o", + "lulla by", + "| @", + "mi gn", + "in store", + "big bang", + "cor di", + "cau ley", + "ante bellum", + "dg ate", + "cro ck", + "span dex", + "scaf folding", + "ore os", + "ê°ĵ ìĦ¸ë¸IJ", + "pom ona", + "ma uro", + "uni versi", + "re mi", + "af ootball", + "t ant", + "sm alls", + "ne h", + "worl do", + "tropic al", + "mor ph", + "jav elin", + "gla r", + "arqu itec", + "reminis cent", + "tu bs", + "spide y", + "make u", + "syl la", + "progressi ves", + "blo t", + "shor ten", + "keep in", + "ch ak", + "ang st", + "super food", + "decad ent", + "ston y", + "neuro logical", + "ar boretum", + "ann ak", + "fe ma", + "per cu", + "dis respectful", + "small biz", + "lo x", + "co om", + "c sc", + "bs bi", + "pre valence", + "him ss", + "esp an", + "mo ga", + "fr ampton", + "sky map", + "mas se", + "levi athan", + "( ).", + "noctur nal", + "car ameli", + "ang or", + "amne sia", + "outsi ders", + "she alth", + "rhin o", + "ant ag", + "ag io", + "ðŁĴ° ðŁĴ°", + "take me", + "kab addi", + "c si", + "m sh", + "coch rane", + "thessal oni", + "sil a", + "ha us", + "du sting", + "obe se", + "mack lemore", + "mani sh", + "len in", + "m dc", + "gro wn", + "shef field", + "s rs", + "ke le", + "car son", + "ch um", + "dah lia", + "can tore", + "opp o", + "how ling", + "cyber crime", + "sur realism", + "sc ran", + "fa iz", + "thre n", + "rac ists", + "r out", + "pk not", + "se mana", + "sin i", + "mc cull", + "ma chi", + "alfon so", + "y b", + "sar dar", + "kend rick", + "den g", + "reci pro", + "on f", + "doom sday", + "bri bery", + "custom iz", + "art is", + "c pi", + "ðŁĻĪ ðŁĻĪ", + "sla va", + "let te", + "en s", + "âĿ¤ï¸ı ðŁĺĺ", + "cra yon", + "ad an", + "tr c", + "migr ate", + "simp son", + "row ers", + "king sley", + "farmers market", + "shee han", + "ne phe", + "bor non", + "car ton", + "mic key", + "all ure", + "u lu", + "sli pknot", + "heb do", + "gui do", + "dog celebration", + "online marketing", + "acceler ating", + ") ..", + "origin ated", + "macar oni", + "ed tech", + "out field", + "mit z", + "disc us", + "adverti ser", + "man or", + "ha shi", + "descri p", + "cap ita", + "ful bright", + "recep tor", + "con n", + "con ey", + "spion age", + "r attle", + "pre st", + "u li", + "blog post", + "acker ay", + ") â̦", + "red velvet", + "mat th", + "inspir ing", + "b sd", + "ker ri", + "po con", + "mil lar", + "re pur", + "accent ure", + "ä ¹", + "ram bo", + "ragnar ok", + "dele ting", + "british museum", + "pat ory", + "leip zig", + "flori an", + "sci fi", + "in ers", + "br ate", + "yo y", + "melis sa", + "ab er", + "ma sa", + "po te", + "mosquit oes", + "transpl ant", + "r pa", + "; ))", + "bast ille", + "yl an", + "joye ux", + "melo dic", + "cap tions", + "atri st", + "roch dale", + "gott i", + "pew die", + "cuties aturday", + "who is", + "aqu aculture", + "tiv a", + "sp el", + "he ss", + "ha ji", + "fred die", + "co per", + "brand o", + "v k", + "photo book", + "* ,", + "my dayin", + "micha ela", + "brune i", + "sr ini", + "in te", + "Ä ±", + "de ol", + "d fc", + "separ ately", + "bun d", + "ve sts", + "to c", + "me ck", + "rein forced", + "constra ints", + "car roll", + "sq ft", + "re ver", + "cam per", + "bird man", + "in action", + "gener ators", + "triumph ant", + "pe sts", + "o vo", + "gy pt", + "al amo", + "sc aled", + "suresh pp", + "sd n", + "is mo", + "gi os", + ") @", + "justic eleague", + "restaur ant", + "gab i", + "den gue", + "next gen", + "exemp li", + "ap ex", + "inspir ational", + "down side", + "kid z", + "u pl", + "et na", + "alvar o", + "fel dman", + "bar net", + "m ha", + "es ch", + "bloo ded", + ">>>> >>>>", + "kan i", + "ho fficial", + "casablanc a", + "bir ds", + "ty ga", + "sw amp", + "o day", + "new castle", + "nb ap", + "ci sion", + "cho ols", + "af lo", + "ne p", + "mon ton", + "ak b", + "super model", + "down time", + "th os", + "sc wx", + "snoo py", + "ag greg", + "yo ke", + "nor cal", + "we tt", + "prolon ged", + "me tast", + "beat er", + "f ta", + "t lap", + "disgu sted", + "y h", + "voice over", + "itch y", + "ip c", + "ðŁİ ¾", + "phe asant", + "stra its", + "ram pant", + "j g", + "fer til", + "assu res", + "fortun es", + "sal inas", + "liz ards", + "kett le", + "i bs", + "cyn thi", + "he g", + "mc cr", + "soccer oos", + "happen ings", + "cor den", + "ðŁĺĤ ðŁijĮ", + "t ches", + "egre t", + "wolver ines", + "congratul ated", + "ho gg", + "bott ling", + "wr i", + "fer ri", + "bo sch", + "af ire", + "og den", + "s jo", + "j dm", + "sv t", + "con tex", + "tol lywood", + "min k", + "me se", + "super sonic", + "op oulos", + "å ¸", + "âĶ ģ", + "knuck le", + "gu ise", + "gam i", + "chu cky", + "z inger", + "radi al", + "compla ined", + "bo da", + "fe tal", + "discipl ines", + "cor ro", + "ðŁĩ®ðŁĩ ¹", + "op ted", + "filtr ation", + "ad nan", + "em cee", + "mi stre", + "insom ni", + "fer gus", + "tra jec", + "on don", + "med tech", + "tanger ine", + "madra s", + "gru e", + "cab s", + "z hu", + "sureshpp rabhu", + "insul ated", + "day swild", + "pp m", + "band ai", + "v day", + "s ff", + "squ id", + "lo thing", + "not dead", + "expre ssive", + "cu ll", + "ala stair", + "x u", + "up front", + "fish ers", + "en es", + "um d", + "dis missal", + "sti er", + "sel s", + "lu st", + "re active", + "prote ster", + "eyel ashes", + "al im", + "goo de", + "gre eng", + "da ir", + "com pen", + "anush ka", + "proto typing", + "ma pu", + "bear ings", + "ðŁIJ Ł", + "for me", + "bsbi botany", + "timo thy", + "out skirts", + "am bed", + "are tha", + "wend ell", + "stre aks", + "ni m", + "k pk", + "sne e", + "fit ter", + "quo ta", + "p ate", + "win ning", + "ðŁį Ń", + "sho pping", + "ma inst", + "cul ver", + "ste vie", + "mcfad den", + "counter parts", + "gren fell", + "fol som", + "dor set", + "tech crunch", + "⬠ħï¸ı", + "tip tuesday", + "us l", + "tre x", + "geor gie", + "ranveer official", + "lic ks", + "se wn", + "k f", + "' â̦", + "jap s", + "p ate", + "orth op", + "fe sta", + "stra s", + "mon tal", + "hammer smith", + "fore most", + "wido ws", + "mad re", + "ite z", + "mito chondri", + "lig ans", + "z ona", + "cari bou", + "m ss", + "andre i", + "weather channel", + "gh c", + ": ...", + "ta ft", + "awe ather", + "al isation", + "bru tal", + "bliss ful", + "nik ola", + "mal icious", + "q m", + "mpg vip", + "bro die", + "bl itz", + "applau d", + "dri bb", + "v ague", + "dog go", + "transl ating", + "interpre ted", + "hat ched", + "ge tyour", + "benefici aries", + "spar ring", + "caes ars", + "aw illiams", + "la hat", + "bro ke", + "ti mp", + "virtu es", + "rel ying", + "pie tro", + "k tn", + "ici sts", + "pab lo", + "lou i", + "a ag", + "pn pp", + "cha st", + "pul ses", + "fini sh", + "usair force", + "type writer", + "thomp son", + "dog s", + "ut to", + "ãģ į", + "sand al", + "new ly", + "do ge", + "z w", + "wan kers", + "ne gr", + "mu cha", + "determin es", + "black fish", + "sk unk", + "mu ps", + "instru ment", + "phy to", + "daysto go", + "skin ned", + "hai der", + "con ten", + "ðŁIJ¾ ðŁIJ¾", + "we iler", + "undoub tedly", + "chair ing", + "wall is", + "sh ard", + "zind abad", + "adul t", + "absor ption", + "pre sto", + "deplo ying", + "drum mond", + "battle front", + "seag ulls", + "how dy", + "juda ism", + "des de", + "part ition", + "âľ Ŀ", + "no logy", + "national bestfriend", + "lesn ar", + "film fare", + "co asts", + "christen sen", + "ac an", + "mb u", + "co pped", + "ru bble", + "sw c", + "fun nier", + "far ther", + "where as", + "nano technology", + "with stand", + "pil low", + "bow ers", + "to pe", + "it ly", + "con fit", + "ma kar", + "comfor ts", + "bo sh", + "cli pper", + "bal la", + "sti k", + "mil b", + "safe guard", + "musi que", + "eas port", + "ya z", + "pad ded", + "bad er", + "fore ign", + "chop in", + "archi ve", + "o ka", + "tran sporting", + "tml talk", + "aj it", + "consequ ence", + "sc roo", + "ff o", + "collabor ated", + "pug chat", + "ye mi", + "jav ed", + "au burn", + "o of", + "ma w", + "sau cer", + "miti gate", + "i les", + "evangeli st", + "ter ie", + "re cl", + "indic tment", + "cat a", + "bright ness", + "may the", + "whim sical", + "un lv", + "key word", + "cu min", + "med way", + "west world", + "tra w", + "im posing", + "form ity", + "coul ter", + "ab z", + "ny pd", + "grass i", + "kel sey", + "qld pol", + "clock work", + "f dr", + "di anne", + "âĺ ij", + "ad h", + "p ann", + "bra vely", + "ae ge", + "un lawful", + "ver di", + "pocaly pse", + "phar o", + "kar la", + "reson ance", + "ma stiff", + "la dak", + "bu u", + "ma iled", + "hi i", + "craw ley", + "tor rent", + "mach ado", + "liby an", + "effort lessly", + "fal sely", + "q vist", + "ke ef", + "craf thour", + "cheri shed", + "val kyrie", + "s ari", + "kal amaz", + "be he", + "ðŁĮ Ļ", + "th im", + "ro ddy", + "col trane", + "but chers", + "ach im", + "wk end", + "awk ward", + "cab rera", + ":) )))", + "fran c", + "decl an", + "con dos", + "a ja", + "pandor amusic", + "char ter", + "ph ill", + "mon trose", + "hatch back", + "handic app", + "gre aves", + "eucalyp tus", + "ut most", + "t son", + "bur ton", + "mid wives", + "in cur", + "ðŁĺį #", + "moo d", + "compre ssed", + "tom a", + "must ang", + "mo g", + "as ana", + "te stic", + "sho tel", + "in sol", + "cor sair", + "nh q", + "ben ny", + "sm ma", + "kap ur", + "in con", + "jon as", + "ener gies", + "don al", + "as ad", + "se z", + "n pa", + "archi ved", + "stimul ate", + "do p", + "hy d", + "gri eving", + "ãĥ Ī", + "ron a", + "why te", + "tree house", + "ss ell", + "sand ro", + "ko bo", + "ther most", + "se clu", + "hi ya", + "ge ez", + "mam as", + "prisc illa", + "flav oured", + "fas s", + "w old", + "maker space", + "cospla y", + "p tv", + "happy valentinesday", + "sequo ia", + "love craft", + "gu an", + "d tm", + "ci i", + "yoko hama", + "pos thum", + "re q", + "ðŁĶµ âļªï¸ı", + "galat asar", + "dol by", + "hamp tons", + "disturb ance", + "stone henge", + "ok c", + "disrup ting", + "month sary", + "jun gle", + "head lights", + "du stin", + "micro sof", + "happy mothersday", + "ko ko", + "gra zi", + "te sto", + "na idu", + "mal ay", + "ari al", + "ru mb", + "ab oo", + "har man", + "tra pe", + "spo ils", + "je ho", + "go dly", + "lock screen", + "z un", + "pi ous", + "ma gento", + "l enders", + "prob able", + "corpor al", + "m our", + "aw al", + "su a", + "call me", + "ton ne", + "go vin", + "devast ation", + "x j", + "gear box", + "war lock", + "per me", + "it ate", + "gaza underattack", + "du val", + "paras ite", + "clement e", + "le th", + "i va", + "fro zen", + "tho les", + "to bin", + "cair n", + "s ill", + "luc kiest", + "conver ts", + "st ale", + "pan cra", + "euro pale", + "wis dom", + "sch ur", + "ì ¶", + "verti go", + "bi j", + "u bc", + "nu re", + "righte ousness", + "mt c", + "factor y", + "ver st", + "revers ed", + "hur i", + "hee chul", + "fab er", + "ar r", + "ul ous", + "ven om", + "ph at", + "green ery", + "bra dy", + "à ¦", + ": ((", + "never giveup", + "di sha", + "mo ta", + "health care", + "dun ham", + "dex po", + "den zel", + "bb ins", + "f ics", + "wh am", + "mc g", + "eli an", + "wat a", + "str alia", + "tel lu", + "pe sky", + "spin off", + "ar moured", + "re acted", + "do fficial", + "te du", + "sag ar", + "mor ally", + "paralle led", + "fi os", + "dow ner", + "dau gh", + "re do", + "world cup", + "tari q", + "bar ne", + "glaci ers", + "oc cult", + "barbar ian", + "her mosa", + "!! !)", + "y ur", + "inter nation", + "p ss", + "sit u", + "p int", + "american air", + "sw am", + "dopp ler", + "ðŁĴĻ ðŁĴľ", + "cincode mayo", + "le van", + "hell enic", + "mc ne", + "ju di", + "yu h", + "st x", + "qu are", + "ðŁĺĤ .", + "sti g", + "g els", + "mot ley", + "hard work", + "euro zone", + "e ad", + "ç¥ Ń", + "seab ir", + "ci us", + "la id", + "alpac a", + "presu mably", + "pewdie pie", + "boo ted", + "am ari", + "tam ine", + "sol ace", + "bar row", + "acade mies", + "x ian", + "om ination", + "dun geons", + "b ma", + "de ity", + "ai k", + "stab il", + "hir a", + "affection ate", + "ving ne", + "new port", + "ãħĭ ãħĭ", + "thir ds", + "re tains", + "aroma therapy", + "ski er", + "ni ma", + "do pe", + "cr inge", + "con domin", + "to or", + "anim ator", + "sar aj", + "seas cape", + "minim alism", + "lake shore", + "calla way", + "berg man", + "ठĹ", + "whisp ering", + "stupi d", + "ri ghtful", + "requ is", + "ir n", + "se va", + "ut pol", + "tuber culo", + "squ ish", + "de but", + "govern mental", + "christ ine", + "all man", + "weap on", + "s ito", + "bur i", + "lo lita", + "leaf y", + "fu ch", + "tin ted", + "mck en", + "a hahaha", + "ðŁĩµðŁĩ ¹", + "repe al", + "ne gan", + "ðŁķ Ĭ", + "tail gating", + "game insight", + "ðŁıŁ ï¸ı", + "yaku za", + "z t", + "ti ring", + "pro posing", + "bow lers", + "tra itors", + "ak shi", + "cler gy", + "cit o", + "up sets", + "tu scal", + "symph onic", + "sil ently", + "shu ff", + "black well", + "ðŁĺĤ )", + "ko be", + "rober to", + "ri dg", + "dc u", + "mer ino", + "ft p", + "east side", + ". ~", + "nb l", + "mn leg", + "ts for", + "frau dul", + "ca pping", + "in my", + "gymna st", + "ston es", + "ss in", + "twe aks", + "shag gy", + "oak land", + "dem sin", + "sang ria", + "mm va", + "hen nessy", + "down ton", + "ri ghtly", + "in it", + "aga ve", + "ob last", + "northe ast", + "friend ship", + "dal a", + "tro phy", + "ðŁij ½", + "mag in", + "margar itas", + "ê ·", + "ww fc", + "fa sh", + "di ke", + "cu d", + "char t", + "ðŁij ®", + "refuge es", + "jop lin", + "n cs", + "imp y", + "firm ware", + "pas cu", + "flam in", + "health tech", + "bell letstalk", + "w aka", + "ol ls", + "la go", + "co wan", + "bombar dier", + "sh ome", + "ðŁĻ ħ", + "mc master", + "na ve", + "well s", + "u ta", + "tell ers", + "mis fits", + "kap il", + "face off", + "af firm", + "a pro", + "whit epaper", + "super yacht", + "speci mens", + "al located", + "... ,", + "- __", + "ka w", + "dachsh und", + "djo ker", + "s work", + "qui ere", + "or um", + "ðŁIJ ł", + "som m", + "c mt", + "ingh our", + "skin ny", + "lgb ti", + "gi ggles", + "break away", + "resear ched", + "par ity", + "my al", + "ms l", + "re tained", + "si vity", + "make inindia", + "sol ves", + "defam ation", + "wal tham", + "sri racha", + "road way", + "concep tu", + "al in", + "iw ant", + "å Ī", + "del ft", + "tender loin", + "ga ins", + "faul ts", + "sw ire", + "st ellen", + "pol lo", + "dy ne", + "bornon thisday", + "asdf ghj", + "sq l", + "sali m", + "advis es", + "vo ip", + "ìĹij ìĨ", + "un touched", + "she il", + "ontari o", + "uph ill", + "so bre", + "de shi", + "nov ella", + "du tton", + "craw fish", + "ا٠Ĩ", + "ma a", + "tw ine", + "kal in", + "ðŁĩµðŁĩ Ń", + "ye ss", + "brook s", + "hoo siers", + "ton ka", + "umbrel las", + "ay ers", + "ate am", + "acqu iring", + "su ction", + "ä n", + "wi es", + "tari ans", + "soci o", + "mat tb", + "shepher ds", + "o so", + "charity tuesday", + "s logans", + "ninj as", + "al bat", + "by te", + "bash ir", + "trampol ine", + "mydayin la", + "i ja", + "bas el", + "ror y", + "gol die", + "fi rec", + "un noticed", + "pecu liar", + "sch a", + "ker son", + "mour ns", + "liquid ity", + "qu ipment", + "hi bs", + "ar s", + "aeron au", + "slide show", + "sla bs", + "delici ousness", + "sk itchen", + "hta fc", + "full erton", + "cre ighton", + "aer ob", + "procrastin ation", + "az ores", + "white hall", + "uss occer", + "medi ation", + "djoker nole", + "and me", + "um en", + "noxi ous", + "jo ss", + "ili fe", + "anni vers", + "sudan ese", + "et res", + "under mine", + "whole foods", + "diso be", + "kor i", + "ade le", + "eli z", + "can ti", + "al on", + "gymna sium", + "sarko die", + "meteoro logist", + "yl de", + "ste en", + "stamp collecting", + "nas al", + "lo tt", + "fran ks", + "ex ol", + "ack i", + "good year", + "animal rights", + "y les", + "vio lets", + "mm es", + "s thel", + "ra pping", + "tu scan", + "wai ver", + "tur ner", + "eat local", + "northe asthour", + "anim ations", + "tom morow", + "t sh", + "ff ame", + "bra e", + "pe tron", + "glam our", + "br yn", + "d cs", + "bal es", + "ðŁĶ ¶", + "bro v", + "bre v", + "b ons", + "physi que", + "car ne", + "x e", + "elix ir", + "vol ved", + "l oma", + "ìľ ł", + "æ ĺ", + "van u", + "ri gs", + "bal ance", + "va res", + "bon ita", + "sprink le", + "perfec to", + "di on", + "le ak", + "calcu tta", + "o ba", + "d ma", + "c mon", + "tun er", + "pneu monia", + "bo gus", + "apolo ge", + "cl ough", + "bor ne", + ")) ))", + "revi ved", + "o varian", + "ner f", + "c legg", + "fan fest", + "cho u", + "reali zes", + "mc n", + "li gu", + "leg alize", + "just saying", + "for ster", + "bo sni", + "k hi", + "in dom", + "hei del", + "en cryp", + "si ss", + "ed di", + "mar bles", + "brisban e", + "y ing", + "pre paid", + "wal sall", + "cooper ate", + "orche str", + "mar isa", + "ho wie", + "che wy", + "bren ner", + "andro meda", + "e gan", + "sto cki", + "cav endish", + "ag an", + "ban o", + "de ir", + "go g", + "bl k", + "re thinking", + "ch ig", + "rhe u", + "sni p", + "p eng", + "semin ole", + "m swx", + "an nex", + "lyn da", + "lewisham ilton", + "cu mul", + "tb l", + "dolph in", + "agu ero", + "........ ....", + "pre lude", + "at our", + "gr anger", + "too ting", + "ro tun", + "dis ar", + "home items", + "da res", + "**** ****", + "ðŁij Ĩ", + "compre h", + "jin x", + "as well", + "iri e", + "circul ating", + "ðŁIJ ¥", + "over board", + "cultiv ate", + "rhe tt", + "oriente ering", + "ca k", + "bal kans", + "s itt", + "jas min", + "britney spears", + "ro tor", + "se aling", + "g bc", + "oc ci", + "f as", + "eman cip", + "com er", + "war time", + "tic kle", + "son ny", + "pac es", + "log g", + "at rix", + "sr p", + "g win", + "do bbs", + "uz be", + "the wanted", + "dru sh", + "ex tru", + "m icky", + "honore es", + "dar win", + "re dux", + "mm j", + "ram i", + "jalape ño", + "io c", + "do ver", + "ju ju", + "whit ney", + "s eng", + "en ly", + "au ch", + "archipel ago", + "vigil ant", + "man gal", + "wil dest", + "parano id", + "hal i", + "bb ly", + "sanc tioned", + "real ms", + "con co", + "u ddin", + "c sk", + "play time", + "libr a", + "sav ag", + "oc tane", + "rec tan", + "re turn", + "par rish", + "mor rha", + "cc p", + "c mu", + "sa iled", + "se vent", + "ro sie", + "pil ing", + "he w", + "boar ded", + "seg ments", + "neph ro", + "( .", + "cr ats", + "bak es", + "ðŁį ¸", + "back tothe", + "sibl ing", + "kirk land", + "ke o", + "gu wa", + "bre ads", + "ðŁĺľ ðŁĺľ", + "t q", + "haras sed", + "ga u", + "wil bur", + "j isoo", + "ep er", + "li sam", + "tri ppin", + "sh ino", + "ru kh", + "beast mode", + "cho a", + "inst aweather", + "rich land", + "gar i", + "fe z", + "cowboy snation", + "fur suit", + "k run", + "a en", + "sycam ore", + "se gun", + "ent ennial", + "di h", + "o ax", + "demsin philly", + "ðŁĻ Ģ", + "sn hl", + "pen nies", + "pass words", + "ma kin", + "ty e", + "d eng", + "kni gh", + "jeep life", + "hel pline", + "a for", + "zz zz", + "ste amy", + "pic ker", + "iter ate", + "happen ingnow", + "ki b", + "bloom berg", + "martyr dom", + "bul ly", + "assor tment", + "a hora", + "zo e", + "no i", + "illu stri", + "agar wal", + "p sc", + "electr onica", + "recruit er", + "gar diner", + "rad ha", + "naf ta", + "dot net", + "pi ero", + "geor g", + "bel s", + "ðŁĺĤ ðŁĺį", + "tuberculo sis", + "run nin", + "mor is", + "haul ing", + "ev oc", + "bre thren", + "sha ir", + "frame works", + "a stu", + "ri gid", + "ku ma", + "kre me", + "jin nah", + "insu rers", + "ny u", + "f ere", + "nol lywood", + "good vibes", + "- ...", + "toi le", + "sk ril", + "instaweather pro", + "cze ch", + "pa vel", + "one piece", + "nike plus", + "fi let", + "cav ity", + "ðŁı½ âĢįâĻĤï¸ı", + "ðŁİ £", + "dra stic", + "dail ys", + "siam ese", + "re bu", + "oste o", + "lar k", + "f re", + "sh elling", + "p é", + "glad ys", + "ðŁıĢ ðŁıĢ", + "gusta ve", + "submer ged", + "grand stand", + "att u", + "won t", + "f pv", + "b ley", + "jon i", + "ang ames", + "weigh ted", + "al ou", + "ठ¶", + "les bians", + "f j", + "anni es", + "am l", + "dor ia", + "dav in", + "be ta", + "can c", + "madewith unity", + "ha j", + "bad lands", + "mu l", + "blu ec", + "pa wn", + "cov ington", + "neuro logy", + "htt weets", + "dysle xia", + "thel ove", + "ne at", + "fork lift", + "autom ate", + "une ven", + "monte ss", + "he in", + "ha g", + "rel ics", + "competiti veness", + "can elo", + "mar tens", + "bullet proof", + "sk ittles", + "g ya", + "pri mo", + "americ afirst", + "woo o", + "abor tions", + "?? !!", + "ma che", + "ld ers", + "rl ly", + "preli ms", + "direc t", + "cour se", + "swa in", + "super cell", + "ec centric", + "sting ray", + "ple ts", + "wil cox", + "west in", + "okan agan", + "kir an", + "car bo", + "bomb ings", + "ra rest", + "bo h", + "gaw d", + "di gg", + "mo ana", + "enti rety", + "en closed", + "dodge ball", + "par ton", + "milky way", + "at r", + "thorough bred", + "re ally", + "qant as", + "epiph any", + "ine e", + "aero smith", + "spi eth", + "ar thro", + "ell ini", + "du bu", + "bra ving", + "âļ½ âļ½", + "re structuring", + "illumin ate", + "equ ili", + "mp i", + "ash ton", + "pony tail", + "ma scots", + "flat tering", + "cru m", + "ast a", + "à® °", + "stranger things", + "bar nab", + "ر ÙĬ", + "make shift", + "got cha", + "will am", + "cho irs", + "kilom etres", + "gho sh", + "eu than", + "dol ly", + "un ning", + "the ar", + "cre we", + "w sw", + "j ace", + "dis miss", + "ke an", + "ho ta", + "kh at", + "~ >", + "thir u", + "ren dez", + "hart man", + "tee ssi", + "cas ca", + "z ah", + "hydr ange", + "fo d", + "aw p", + "mzan si", + "thick er", + "nago ya", + "ne va", + "sti que", + "cast el", + "dam ian", + "there by", + "ji ang", + "ale k", + "music islife", + "ra q", + "calla han", + "gou ache", + "somal iland", + "sean hannity", + "ra heem", + "lo se", + "elo ve", + "whar ton", + "rectan gular", + "illustr ating", + "har ne", + "auti sma", + "scra pped", + "ell and", + "decre e", + "nag pur", + "ki pp", + "so re", + "n md", + "ma as", + "gun a", + "gart ner", + "bel li", + "then ight", + "je on", + "gendere quality", + "gi ver", + "a el", + "gar ments", + "ne u", + "mardi gras", + "mar sden", + "ro wer", + "pollu ted", + "camer aman", + "vin od", + "be asley", + "cro c", + "ji u", + "hollyo aks", + "anesthe sia", + "al les", + "ste ward", + "lati mes", + "ðŁĩºðŁĩ¸ðŁĩºðŁĩ¸ ðŁĩºðŁĩ¸", + "tic ian", + "gor ia", + "come dic", + "ðŁ¤Ķ ð٤Ķð٤Ķ", + "nai ve", + "sli ons", + "ł Ī", + "bur glar", + "ðŁĺŃðŁĺŃ ðŁĺŃðŁĺŃðŁĺŃ", + "york shi", + "se ñ", + "fan boy", + "lau rel", + "inci dence", + "potom ac", + "rober ta", + "presi den", + "pr yor", + "os bourne", + "w ku", + "te me", + "pal ae", + "ðŁ¥ º", + "re boun", + "itu de", + "red dish", + "k hand", + "coloni alism", + "north carolina", + "ðĿ Ĵ", + "manne quin", + "lady bird", + "ta sty", + "knowledge able", + "g shore", + "ðŁĮ Į", + "à® ©", + "qu aker", + "salz burg", + "med alists", + "chy na", + "bridesma id", + "ma ori", + "ro p", + "outra ged", + "in adequate", + "truck ers", + "al ana", + "ìĿ ¼", + "ri x", + "oooo oooo", + "command ments", + "lam beth", + "aa j", + "eco friendly", + "bla z", + "morecam be", + "boun cy", + "rou x", + "rai ded", + "mi zed", + "sh c", + "gaw x", + "labor atories", + "ru bs", + "rest room", + "consult ations", + "ca jun", + "virgin i", + "so ir", + "rev ue", + "ple in", + "wag er", + "ç ¹", + "we do", + "growing up", + "! ðŁĺĬ", + "face ted", + "sin ners", + "ho vering", + "ti ene", + "seas oning", + "an ja", + "leg go", + "il is", + "fla x", + "dev o", + "ash ram", + "mati sse", + "ker i", + "go wer", + "bo tox", + "mar shes", + "unh cr", + "ts m", + "opti mus", + "dun i", + "stu ffs", + "so k", + "order ly", + "n bad", + "islam ophobia", + "raviol i", + "fab er", + "cre ds", + "won ka", + "in fusion", + "over weight", + "daily news", + "assi mil", + "acol lege", + "medalli on", + "kili manjaro", + "sti ff", + "tham es", + "sun ken", + "th ard", + "my dubai", + "hilari ously", + "han nel", + "plu mber", + "fair view", + "separ ating", + "rasc al", + "qui en", + "necess ities", + "confeder ation", + "ll ll", + ": ]", + "weak nesses", + "bron co", + "ra ffles", + "el ot", + "ãĤ¸ ãĥ", + "advent calendar", + "ðŁİ ¹", + "stra vel", + "tun ic", + "k su", + "im peach", + "e spionage", + "! -", + "di ment", + "cur rant", + "bio de", + "commu ting", + "by ron", + "ðŁĴĵ ðŁĴĵ", + "shad ed", + "tr uro", + "cray ons", + "ar ne", + "h sc", + "fre aked", + "dram ati", + "fle ek", + "u cd", + "marl borough", + "^ -", + "cross ings", + "mal o", + "black ops", + "bin ance", + "cho ked", + "chen ey", + "pl o", + "ge stures", + "val edic", + "ryan air", + "rem ington", + "v cs", + "mc kee", + "ec z", + "be gs", + "nail art", + "mayor of", + "happy fathersday", + "war t", + "pet itions", + "n ingly", + "clean energy", + "bro x", + "sl alom", + "exist ent", + "ab ay", + "ug liest", + "tom p", + "stom a", + "sel by", + "goal scorer", + "ben ji", + "overwhel mingly", + "lan s", + "semiconduc tor", + "south korea", + "re scheduled", + "sk yl", + "en listed", + "dow ski", + "si del", + "rosen berg", + "nas ser", + "white head", + "pri us", + "har are", + "en n", + "ry der", + "í Ĥ", + "mon g", + "clas ico", + "transpor ter", + "po tty", + "is me", + "** ***", + "vic e", + "sk it", + "ode ssa", + "l mp", + "her n", + "raci ally", + "pin oy", + "paragu ay", + "obitu ary", + "go es", + "bu cha", + "side walks", + "angu lar", + "un constitutional", + "transiti oning", + "i bu", + "gu ys", + "un packing", + "oooo oo", + "black girl", + "ber gs", + " ¯", + "wordof theday", + "trump train", + "thunder bolt", + "m si", + "fasci sts", + "ठ¬", + "t sk", + "collap ses", + "raje sh", + "loveis love", + "migr ating", + "set back", + "ðŁĺĬ âĿ¤ï¸ı", + "t els", + "safety first", + "nar rated", + "jae joong", + "un answered", + "lique ur", + "en nes", + "dal go", + "bill ings", + "salt water", + "mer maids", + "lon gs", + "clap ham", + "we arec", + "pic collage", + "n ach", + "h ace", + "pois oned", + "lo th", + "ag na", + "adel rey", + "guar dia", + "poli shing", + "peace keeping", + "d all", + "p isa", + "la pland", + "process ors", + "de andre", + "so bs", + "p once", + "dra ins", + "c be", + "ðŁİ¥ :", + "spla sh", + "meat ball", + "fon tana", + "worcester shirehour", + "ne v", + "bri sk", + "b int", + "ac r", + "po x", + "cay enne", + "skril lex", + "j fc", + "hahahaha hahaha", + "gla s", + "en gul", + "tempor al", + "oni zed", + "con cre", + "com pose", + "vibr ations", + "plant ers", + "fer t", + "criticalrole fanart", + "t bli", + "sch allenge", + "huck abee", + "munici pal", + "iam bic", + "radi os", + "ne vis", + "dura bility", + "mc cla", + "horse back", + "inst itutes", + "ful fill", + "atta ch", + "ate ur", + "ak an", + "resi sting", + "illumin ation", + "hand le", + "hair care", + "om ent", + "macle od", + "ka iser", + "g no", + "bear down", + "ly f", + "gl omer", + "distor tion", + "z m", + "san k", + "roo sters", + "is now", + "as ports", + "ag en", + "wo ken", + "st george", + "ro mper", + "my le", + "econom ists", + "ru to", + "t will", + "health and", + "d ito", + "ws l", + "tair p", + "pra kash", + "mic heal", + "h ts", + "w rights", + "kat su", + "fioren tina", + "defen seman", + "d itch", + "var sity", + "texan scheer", + "ba ham", + "sc anned", + "we il", + "seduc tive", + "ðŁijį ðŁı½", + "fu e", + "er win", + "dav ison", + "ter ran", + "moo ds", + "wool f", + "re source", + "@ .", + "cu sh", + "ðŁį °", + "regre ssion", + "cur led", + "la zer", + "jo anne", + "ab bott", + "mo z", + "down ers", + "mm mmmm", + "valent ina", + "k hair", + "dream t", + "cro ok", + "che k", + "ste aming", + "nephe ws", + "cl eric", + "as ober", + "indefin itely", + "w ye", + "us news", + "joy ce", + "flu shing", + "wynonna earp", + "ron do", + "kis s", + "hot dog", + "bar ns", + "sax ophon", + "far ley", + "gas p", + "decre asing", + "al way", + "pe x", + "l sd", + "shi ft", + "p outine", + "ra zz", + "rescu ing", + "ni ko", + "ho ch", + "cc l", + "u aap", + "n ts", + "m car", + "il wx", + "conqu ering", + "ket tering", + "stur dy", + "delay ing", + "sto k", + "vani shed", + "cath ar", + "bin gham", + "in v", + "ic hiro", + "he mo", + "budge ting", + "[... ]", + "be ss", + "sebasti an", + "slow ed", + "ðĿ ij", + "musli m", + "stun s", + "acton climate", + "ve a", + "se ton", + "rose tta", + "oun t", + "hard in", + "flu id", + "ca w", + "ðŁ¥ Ĥ", + "yach t", + "un l", + "sp hy", + "provoc ative", + "or ic", + "is back", + "__ _", + "nicol as", + "gy an", + "loo se", + "fl in", + "reb ate", + ": ::", + "! \"@", + "com icon", + "she ff", + "down stream", + "chic hester", + "beach life", + "mom life", + "diabe te", + "ar ra", + "van e", + "ok u", + "ye o", + "man go", + "try out", + "app ell", + "he irs", + "arjun a", + "dd u", + "na veen", + "movi c", + "soci alists", + "s back", + "criteri on", + "soyu z", + "k her", + "da z", + "yol anda", + "wine oclock", + "re ina", + "one w", + "leon ard", + "en dez", + "u bs", + "support local", + "facilit ated", + "carameli zed", + "b pa", + "vuel ta", + "my tho", + "m ami", + "spe are", + "nbap layoffs", + "fe vre", + "nick jonas", + "im print", + "c so", + "craig slist", + "la salle", + "gi deon", + "ha doop", + "dis regard", + "w ud", + "tu c", + "ma gee", + "acou stics", + "ta a", + "qui e", + "pol a", + "cr t", + "dw yer", + "dis sec", + "capit ol", + "men tion", + "kn oll", + "he igh", + "fin ders", + "plac ements", + "l se", + "indi ra", + "gur i", + "madhuri dixit", + "kingdom s", + "iambic pent", + "geor gina", + "je ky", + "conflic ting", + "bay an", + "aga tha", + "uph old", + "dr on", + "vic ar", + "ex pat", + "periph eral", + "pe ssi", + "fa f", + "ance stor", + "? ..", + "wid get", + "pun c", + "comm enced", + "beav s", + "air waves", + "ad dis", + "po a", + "de sses", + "co den", + "vu e", + "ru pee", + "kar in", + "spo ck", + "m sy", + "ภ°", + "pr ick", + "fill more", + "ti fication", + "thing sto", + "sar de", + "em ile", + "pere ira", + "n ad", + "bright ening", + "arre sting", + "wo king", + "usc g", + "sp ill", + "raspberry pi", + "hu go", + "ite c", + "is ma", + "cuff links", + "optimi zed", + "oc c", + "mi wx", + "en ka", + "el ited", + "afford able", + "sa kh", + "coron ado", + "ho h", + "at ul", + "ai oli", + "jim cantore", + "accoun ted", + "vin ay", + "her mit", + "groo ves", + "ran ch", + "r illa", + "we tter", + "ou tof", + "veter in", + "ni kov", + "ki an", + "fair banks", + "ram apho", + "n iti", + "k ko", + "ru sty", + "ne stle", + "tv xq", + "shahe er", + "âĿ¤âĿ¤ âĿ¤âĿ¤", + "penn ant", + "gem stones", + "dem debate", + "ðŁIJ Ĭ", + "auton ews", + "support indiefilm", + "mach o", + "ve x", + "new sat", + "ne ti", + "conce ssions", + "can died", + "yof the", + "mac au", + "den ds", + "cricke ters", + "san iti", + "mari ano", + "gh at", + "ar toftheday", + "¡ ľ", + "e gos", + "gen oa", + "chat bots", + "bri er", + "al labout", + "mon ty", + "spi ed", + "r tr", + "comfor t", + "sni ppets", + "real time", + "gra in", + "exam ined", + "en lightening", + "tt u", + "god bless", + "release the", + "sing ular", + "ki ans", + "ha ka", + "sor ren", + "defe ct", + "mar g", + "equ ities", + "d orian", + "su ka", + "per l", + "aishwar ya", + "pul lover", + "preci sion", + "fair way", + "ne ve", + "rive ting", + "vill anova", + "en com", + "ak o", + "passion ately", + "europale ague", + "siem pre", + "x vi", + "enligh tened", + "c fr", + "âĺħâĺħ âĺħâĺħ", + "wast eland", + "is f", + "new comers", + "emergen cy", + "amphi theatre", + "- .", + "text books", + "figur ative", + "tre mb", + "pe sc", + "ab hin", + "ab bot", + "ac acia", + "har ds", + "por sche", + "kau ai", + "el isa", + "car rick", + "abo u", + "elli er", + "be ch", + "neu tron", + "galap agos", + "ru ben", + "in nis", + "how to", + "nun s", + "sab ine", + "i ac", + "clin ched", + "no tori", + "fi ves", + "cairn gor", + "per i", + "gr c", + "ðŁĴ¯ ðŁĴ¯", + "mal m", + "twelf th", + "di ff", + "rout ines", + "marty n", + "lin den", + "synthesi zer", + "nu mber", + "game cube", + "fal kirk", + "byz antine", + "queu ing", + "gr ill", + "scal able", + "char red", + "rou ting", + "her bali", + "gri zz", + "ðŁĺŃðŁĺŃ ðŁĺŃ", + "tol l", + "termin als", + "l pc", + "ab d", + "war mups", + "remo vable", + "¯ \\", + "vi go", + "pap aya", + "ne ve", + "lov ingly", + "jo kers", + "ib les", + "sse tt", + "poten ti", + "pel e", + "gi gi", + "sadi q", + "leg acy", + "son o", + "ru pees", + "retar ded", + "ele e", + "par r", + "fi ance", + "ey re", + "say ers", + "pend ants", + "mak nae", + "al bans", + "adap ting", + "p ff", + "pu berty", + "ji u", + "ing rad", + "hypocr ite", + "diplom ats", + "phys ical", + "rob by", + "bon sai", + "ãģ ·", + "f att", + "catal unya", + "âľ ĸï¸ı", + "ro ma", + "more land", + "so e", + "conver sions", + "stl blues", + "shol m", + "gra ssy", + "pra do", + "on u", + "assaul ting", + "> _", + "sett es", + "dis graceful", + "aph ra", + "âļ½ï¸ı âļ½ï¸ı", + "ठª", + "kil n", + "goal tender", + "s ru", + "philanthro pist", + "b als", + "th n", + "stu den", + "sando val", + "dogre scue", + "eli ons", + "asse ssed", + "lar go", + "hec tares", + "sh rm", + "sa if", + "cle avage", + "no ches", + "n ene", + "fat alities", + "cur ing", + "clean ser", + "al es", + "p vp", + "south bank", + "pizz eria", + "marsh als", + "kni fe", + "an dover", + "tbli ghtning", + "sr sly", + "ou te", + "digi mon", + "timesof india", + "prome the", + "le bo", + "f su", + "wit z", + "rever e", + "man as", + "mam ba", + "ch ica", + "gu an", + "exhibit or", + "csr racing", + "d ere", + "xx xxx", + "gu sta", + "story time", + "ston ey", + "organ ics", + "and u", + "se am", + "min ogue", + "anushka sharma", + "ab a", + "ðŁİĻ ï¸ı", + "ugand an", + "chro matic", + "as sn", + "document aries", + "sh t", + "ru paul", + "loy d", + "k ats", + "e us", + "ite ch", + "me dusa", + "pan ty", + "kel logg", + "et to", + "talla de", + "sha a", + "do st", + "p ms", + "mari ana", + "je ster", + "croo ks", + "ðŁĶ ¬", + "min danao", + "ind hoven", + "ðŁ¤ ª", + "le xi", + "tv n", + "jan is", + "co te", + "ãģ Ĩ", + "ser rano", + "iw m", + "ðŁIJ ¬", + "k ke", + "distribu tors", + "cap u", + "counterfe it", + "camp site", + "ag gie", + "ðŁĺ ¼", + "chhat tisgarh", + "~ @", + "state u", + "san di", + "prevent able", + "cl s", + "can ne", + "mm c", + "i ver", + "sa haran", + "pal is", + "night out", + "do s", + "ap ia", + "absc bn", + "manag erial", + "aro se", + "mo wx", + "aro sa", + "ðŁĮ ³", + "under dog", + "remo ver", + "astronom ers", + "lent ils", + "su scep", + "smoo ther", + "pend leton", + "fau cet", + "e mory", + "dal mati", + "af cb", + "tic us", + "exem pt", + "en rol", + "d heim", + "ðŁIJ º", + "restric tion", + "star fish", + "sto w", + "snor kel", + "thunder birds", + "she ad", + "homo sexual", + "dy n", + "as li", + "andre tti", + "dou che", + "dom o", + "tar mac", + "slu mber", + "pr onto", + "first dayof", + "mini ature", + "mari achi", + "argu s", + "recomm ending", + "mobi les", + "in ce", + "illustri ous", + "or c", + "adver ts", + "gr its", + "wea sel", + "pag oda", + "over pass", + "gre ys", + "maxi mus", + "arma gh", + "wood land", + "sun ni", + "ðŁĴ ī", + "ë Ŀ", + "ti one", + "soci o", + "ho s", + "ðŁ¤Ĺ ð٤Ĺ", + "wind sor", + "subsequ ent", + "munch ies", + "id h", + "exclu ding", + "e mi", + "cu th", + "z ai", + "week days", + "law suits", + "barn ard", + "Ø ª", + "pe tting", + "net es", + "mul ligan", + "pharmac ists", + "ra quel", + "e ton", + "cran ston", + "gil ded", + "cle ary", + "ce ph", + "ra a", + "pam per", + "lombar di", + "as in", + "sher ry", + "pro d", + "for te", + "ari anism", + "buffalob ills", + "æľ ¬", + "ðŁĶ¥ #", + "uu u", + "just ices", + "car ina", + "nat in", + "mas low", + "dro oling", + "cog nac", + "cam ber", + "el ong", + "r dr", + "in en", + "convic tions", + "am use", + "tro ck", + "harm less", + "visit ation", + "gen omic", + "bl and", + "beno it", + "chim p", + "tuscal oosa", + "gre asy", + "x po", + "gil t", + "se q", + "per mitted", + "christma seve", + "book s", + "mu e", + "old school", + "human right", + "be ati", + "ðŁĶ Ŀ", + "sh at", + "sculp ting", + "h wan", + "fern andes", + "sci utto", + "fu entes", + "endeav ors", + "maid stone", + "un paralleled", + "shou ted", + "queen of", + "mer c", + "band ic", + "ve da", + "sel angor", + "pi le", + "ja han", + "intimid ating", + "disapp ears", + "cl ich", + "za ha", + "w urst", + "hi v", + "fod ils", + "cor dless", + "aaaa aa", + "hy dra", + "bel inda", + "e els", + "bu f", + "su staining", + "rugby league", + "no c", + "brig itte", + "( ðŁĵ¸:", + "tromb one", + "soo the", + "smo g", + "ad p", + "stab le", + "ing ley", + "diagno se", + "ms g", + "we ss", + "tic keting", + "one e", + "nsw pol", + "e up", + "auto psy", + "adity anath", + "sun down", + "river front", + "si ya", + "p is", + "hier archy", + "dur ango", + "di jk", + "ren shaw", + "he aps", + "epide mi", + "david bowie", + "interne tof", + "dd i", + "nation ality", + "mb ar", + "air y", + "win der", + "w alia", + "elli ott", + "c x", + "bav arian", + "pl att", + "an tw", + "wi wx", + "sof ter", + "ne ha", + "h eller", + "th and", + "dani ela", + "bo ast", + "degra dation", + "ðŁĴ¦ ðŁĴ¦", + "transform ing", + "man e", + "av ut", + "ðŁĺĪ ðŁĺĪ", + "vo ter", + "the e", + "t ate", + "pu ff", + "in door", + "sop roud", + "boy ce", + "boris johnson", + "wait in", + "immun ology", + "ðŁıĨðŁıĨ ðŁıĨ", + "âĿ Į", + "street food", + "liz asober", + "cavali er", + "c elia", + "need le", + "motor ing", + "g ato", + ", )", + "ra de", + "harve st", + "t ms", + "jar pad", + "on ey", + "air men", + "v re", + "impair ment", + "abhi shek", + "snoo p", + "l ant", + "fam ously", + "bl ou", + "s ze", + "g ander", + "un touch", + "tu f", + "dee jay", + "col lateral", + "b ind", + "ðŁļ ©", + "pin ning", + "ic n", + "' ;", + "the economist", + "ul tram", + "worldwater day", + "ti poff", + "the i", + "feed ers", + "campa ign", + "sc umb", + "day weekend", + "yo m", + "pe dic", + "h ough", + "ps v", + "pl in", + "on de", + "boston marathon", + "az zy", + "* _*", + "con ley", + "thi ago", + "hoo o", + "gal erie", + "luci d", + "je tt", + "gl itz", + "final fantasy", + "achiev ers", + "y ung", + "peregr ine", + "op hi", + "dam es", + "biom ar", + "âĺĢï¸ı âĺĢï¸ı", + "sk c", + "l ics", + "fl ank", + "ar rahman", + "ho of", + "uphol stery", + "t ats", + "wo z", + " ¿", + "snor ing", + "ra er", + "l ju", + "ap d", + "pl ating", + "kan u", + "im ation", + "fragr ances", + "m ra", + "mor ay", + "mo tt", + "im muni", + "hearti es", + "bho pal", + "tim ers", + "g ata", + "color way", + "car nation", + "win get", + "si ghs", + "s ville", + "optimi st", + "chate au", + "olympi ans", + "ci o", + "singer songwriter", + "ny o", + "fi bers", + "bur ch", + "ag ro", + "mil ne", + "ig bo", + "cr amer", + "ation als", + "dan ube", + "pad ma", + "nor mani", + "en forced", + "bre ck", + "boeh ner", + "ar den", + "sur rendered", + "pros thetic", + "om a", + "ha iled", + "calcul ations", + "w fa", + "bi b", + "fcb live", + "fon da", + "west coast", + "que sts", + "friend ly", + "to wie", + "fit ch", + "bal ot", + "star dom", + "scrat ching", + "ho sa", + "thi ka", + "o ven", + "stro ke", + "out post", + "pharmaceu ticals", + "hi kari", + "mu y", + "af d", + "fallon tonight", + "squ at", + "or u", + "dra ined", + "chocol at", + "ë¯ ¼", + "wor ths", + "ri b", + "mu j", + "that s", + "residen te", + "it el", + "boo st", + "mi gos", + "mul led", + "la a", + "etsy shop", + "don keys", + "me k", + "p tc", + "flin ders", + "e hs", + "ro hit", + "mu ir", + "g ad", + "compos itions", + "åĨ Ļ", + "combu stion", + "i kh", + "yemen i", + "wav ed", + "gar ci", + "ak os", + "oo ds", + "fu sion", + "se que", + "s lan", + "pl ur", + "kic chasu", + "shenan do", + "s ams", + "worl den", + "horo witz", + "with me", + "mic robes", + "k ki", + "ðŁĴĶ ðŁĴĶ", + "w su", + "patch work", + "fre er", + "y aki", + "the art", + "symboli sm", + "mil er", + "bt n", + "ma bu", + "side kick", + "motiv ates", + "sag itt", + "natur als", + "serv iced", + "ps ori", + "pa ola", + "qu ig", + "i badan", + "gi ggs", + "ë ³", + "sciento logy", + "si oux", + "salam at", + "d res", + "cad bury", + "d hawan", + "ci ón", + "_ '", + "swa pping", + "maris ka", + "james bond", + "explo sives", + "ay les", + "af er", + "s agu", + "cen sor", + "tom a", + "jeff erson", + "ring ed", + "par tist", + "ir responsible", + "aguil ar", + "vac ay", + "equ itable", + "altrin cham", + "ac ur", + "man ish", + "ger min", + "schoo led", + "pu tter", + "ed ad", + "nav al", + "toast y", + "sol areclipse", + "dish u", + "coy ne", + "ac co", + "mu ck", + "mar an", + "el os", + "len der", + "cro ix", + "worth less", + "ha ber", + "gun men", + "ðŁį ĵ", + "zen ith", + "t enders", + "hur st", + "hol tz", + "itali ans", + "car low", + "u cd", + "characteri stic", + "bun g", + "av l", + "u th", + "sa sia", + "rs l", + "red man", + "neighbor ing", + "green peace", + "sti ps", + "follow party", + "y gk", + "en os", + "omni bus", + "na issance", + "chri ssy", + "secu re", + "call back", + "ji hoon", + "memor y", + "block er", + "l anta", + "daf fodils", + "bil t", + "ffer ty", + "fau st", + "ie c", + "nipp les", + "so g", + "m nd", + "jagu ar", + "bol dly", + "ab poli", + "pro position", + "gun sense", + "evan sville", + "cu tters", + "we go", + "dou n", + "do x", + "stal lions", + "ka j", + "shi ppers", + "j awa", + "vol o", + "le ven", + "pap rika", + "kov ich", + "jor di", + "induc tees", + "app alling", + "dial ysis", + "allevi ate", + "âĢĶ âĢĶ", + "pie ter", + "mid wi", + "q tr", + "juli ette", + "inter mission", + "haw ks", + "act ment", + "one ill", + "k lin", + "vam ps", + "fam ous", + "cou ld", + "autom obi", + "da an", + "west end", + "elli p", + "nh c", + "mel anch", + "web series", + "ton gue", + "snat ched", + "smy th", + "tan gible", + "sl i", + "e asing", + "bar stool", + "over lay", + "afford ability", + "ting ed", + "ter as", + "ay ush", + "wanna one", + "rh ine", + "dan a", + "sh ana", + "kend al", + "fer tile", + "w ir", + "repl eni", + "lar vae", + "is ro", + "con vos", + "ab brevi", + "u cc", + "hun gry", + "bur rows", + "ag er", + "nav i", + "mat in", + "du per", + "cer n", + "ma don", + "ķ ï¸ı", + "é ģ", + "tu ps", + "hy att", + "sh ep", + "friday night", + "wis er", + "hei di", + "hat ton", + "p gh", + "foun tain", + "wrist bands", + "ahmadi yya", + "aeri al", + "subscri bed", + "so los", + "m ace", + "sla yed", + "for fe", + "dul ce", + "christ mass", + "arun jaitley", + "viol ate", + "ob stru", + "ni eces", + "w vu", + "idy l", + "fa ze", + "pre serves", + "infr inge", + "premi ers", + "inter vals", + "agen cy", + "( ©", + "stand alone", + "di mes", + "bo er", + "param eters", + "ge tit", + "ðŁĺĺðŁĺĺ ðŁĺĺðŁĺĺ", + "tu lane", + "for given", + "scol l", + "mb ps", + "smash bros", + "rob bi", + "prima vera", + "ali st", + "ghost ly", + "ay at", + "ye ats", + "impre ssionist", + "ear phones", + "caul field", + "wai kiki", + "sal ute", + "sc ou", + "mu ay", + "louis vuitton", + "bak hta", + "ado g", + "inven tions", + "hur d", + "forec lo", + "stream line", + "thalai var", + "ch snews", + "will ard", + "t sn", + "euro parl", + "cru sher", + "my sore", + "gro wer", + "ra ping", + "pat ti", + "g den", + "sm w", + "muf ti", + "kid man", + "ab r", + "soun ders", + "skep tical", + "ðŁĶ İ", + "sun dar", + "i me", + "fer g", + "feather weight", + "ar lington", + "pas qu", + "ag azine", + "wearab le", + "nati c", + "mccl ure", + "inter mitt", + "hor de", + "six ties", + "car te", + "bha v", + "ze al", + "experi ential", + "ador ned", + "som mer", + "eno te", + "hypo thesis", + "stin ky", + "pro to", + "dead lines", + "vo gel", + "mus ings", + "monc ton", + "gu ter", + "f le", + "aci on", + "voice of", + "ta sha", + "inhabit ants", + "type face", + "s ba", + "bts x", + "ðŁĶ Ĵ", + "wor x", + "u hc", + "jo ko", + "cell ars", + "gor o", + "continu um", + "... &", + "weather cee", + "ha p", + "sr k", + "ris ers", + "lonely planet", + "un named", + "co eur", + "ðŁį Į", + "the world", + "ili ke", + "fa sten", + "ami go", + "ri ba", + "ramapho sa", + "staf fers", + "had ley", + "? ?\"", + "fi ore", + "sal ut", + "hu ff", + "bez os", + "Ñ ĭ", + "ra der", + "kam ala", + "in line", + "fill ers", + "um atic", + "all in", + "shat ter", + "re in", + "o ku", + "ch ases", + "fla gged", + "baby metal", + "water stones", + "ts b", + "cut out", + "op hel", + "aam a", + "rockab illy", + "sto lic", + "jet blue", + "ich ick", + "down ton", + "uzbe kistan", + "pat na", + "la q", + "gr ange", + ") _/", + "subsi di", + "sc p", + "newsc ast", + "it sa", + "twee tyour", + "e mor", + "archae ologists", + "uni fication", + "por ta", + "q x", + "protec tors", + "pro hib", + "charis ma", + "car tag", + "ren fre", + "scul pt", + "guwa hati", + "de ma", + "boo p", + "unf pa", + "dex ter", + "lay la", + "alleg es", + "sou ps", + "never again", + "l ys", + "cal c", + "bar oness", + "visu alize", + "ger ber", + "absor bed", + "i ers", + "a han", + "fon tein", + "detec tors", + "verst appen", + "sv c", + "formul ated", + "ac dc", + "li x", + "in competent", + "bh k", + "lour des", + "water house", + "snow ed", + "appreci ative", + "sig ma", + "lizasober ano", + "pen ned", + "pay check", + "tall inn", + "fanc afe", + "par isi", + "av alley", + "vi g", + "ru fc", + "hard ship", + "so cute", + "po ise", + "ì ¹", + "roth schild", + "k ly", + "???? ????", + "l hp", + "il ay", + "f hs", + "am ad", + "ide als", + "brad bury", + "bal boa", + "nic ot", + "kid nap", + "wol ve", + "tas manian", + "op t", + "matthi as", + "ãĥ³ ãĤ", + "super markets", + "mylittle pony", + "me lee", + "li ster", + "gr oun", + "fe dora", + "kind ness", + "en en", + "bra hms", + "¯\\ _(", + "ros well", + "mar lene", + "ic u", + "re formation", + "or ail", + "he brides", + "dispar ities", + "terrac otta", + "swal lows", + "re id", + "influ encing", + "flu or", + "den e", + "tum our", + "blon des", + "thunder bird", + "sh eva", + "moga dishu", + "ka b", + "cre eps", + "i ving", + "ene ed", + "anno y", + "âĶ Ģ", + "intri gue", + "enqu iry", + "ar aj", + "tur al", + "kuber netes", + "end lessly", + "divi dends", + "tor a", + "ti sh", + "commemor ates", + "un ra", + "tri b", + "pon ty", + "ne m", + "diss ent", + "brew ingco", + "ðŁĺ ½", + "nor mali", + "bi of", + "( ...", + "chil len", + "ì£ ¼", + "mell on", + "av is", + "mccor mack", + "ing ra", + "enrich ed", + "custome rexperience", + "testo sterone", + "snu g", + "sett i", + "ger onimo", + "inqui rer", + "bre aches", + "very thing", + "bloom ing", + "mu ra", + "dispo s", + "bi de", + "de va", + "shade sof", + "in trin", + "sh ev", + "s ven", + "nayanth ara", + "gan esha", + "c ws", + "ber ta", + "label led", + "use um", + "nick named", + "ma han", + "car uso", + "ap ur", + "ðŁij Ĩ", + "w q", + "orphan age", + "discar ded", + "mag nu", + "lu e", + "je on", + "bridge port", + "pac ing", + "mercur y", + "( ðŁĵ¸", + "marx ist", + "amphi bious", + "transplant ation", + "stit ching", + "then burg", + "gradu al", + "ãĤ Į", + "ro ft", + "ma ils", + "ine c", + "guy ana", + "dopp elg", + "ver o", + "re write", + "head less", + "harb augh", + "gate way", + "car sforsale", + "sw i", + "st is", + "mach t", + "un de", + "sura baya", + "stap leton", + "nur turing", + "mil ner", + "ya o", + "lma oooo", + "ko sh", + "arsen al", + "k ame", + "er ry", + "ar royo", + "dis misses", + "ru bbed", + "rc b", + "lew d", + "dil u", + "and or", + "vi de", + "ur in", + "inter sec", + "ha ar", + "al b", + "year swith", + "app leton", + "é al", + "ul livan", + "suc cu", + "monter rey", + "d mx", + "artem is", + "ron nie", + "farm land", + "s football", + "gro tto", + "anth i", + "ãĢ ģ", + "à® Ł", + "vid ya", + "jimmy fallon", + "ൠį", + "t zer", + "gravit ational", + "w thr", + "u hhh", + "e hr", + "tin ker", + "ti juana", + "scran ton", + "ram charan", + "bar clay", + "re van", + "m si", + "ka p", + "wr s", + "we thenorth", + "tor al", + "sat u", + "gro m", + "fac ep", + "erick son", + "z yn", + "se dge", + "oo dle", + "spur sofficial", + "ds p", + "sic ilian", + "soli hull", + "recei vers", + "ladak h", + "hend rick", + "ther i", + "presi ding", + "mc guinness", + "litt ers", + "gun nar", + "gh oul", + "wi b", + "n tv", + "kar o", + "fro ck", + "b lau", + "ampli fy", + "all is", + "ul lah", + "memo irs", + "kh loe", + "intercep tions", + "pet day", + "lo oney", + "con fin", + "ch ay", + "piyush goyal", + "frequ encies", + "ut z", + "event ual", + "warm ly", + "obli vion", + "an ka", + "ta it", + "âĿ¤ï¸ı .", + "director ial", + "ru lers", + "prince s", + "mu ck", + "stur ridge", + "deu ce", + "abri dged", + "bagu ette", + "un cles", + "pen du", + "min ding", + "forre ster", + "av ila", + "wall er", + "wall street", + "ment or", + "hin o", + "high way", + "crom well", + "fanart friday", + "mb i", + "co yle", + "a hi", + "tro ve", + "spie gel", + "pay tm", + "mcin tosh", + "jan sen", + "nit i", + "nash ville", + "len o", + "leicester shire", + "le gos", + "dic t", + "ðŁĵ ½", + "sp ad", + "beverly hills", + "sy rah", + "separ ates", + "z ain", + "un fit", + "dra gs", + "tan ia", + "over flowing", + "hri thik", + "haw thorn", + "z ani", + "mac far", + "fi de", + "to tem", + "pe ds", + "fundament ally", + "cal ico", + "sin ner", + "j ä", + "hil de", + "ds d", + "ten ay", + "ta hit", + "mil f", + "lie b", + "inform ing", + "up lift", + "ra el", + "mortg ages", + "lec t", + "ii ii", + "guillau me", + "compos ites", + "old smobile", + "l end", + "gar th", + "com mish", + "bapti zed", + "scorpi ons", + "ru cker", + "bringback our", + "alli ance", + "thalap athy", + "tal i", + "sp ans", + "eri dge", + "wither spoon", + "lin da", + "sky lar", + "kor n", + "hom s", + "Ä į", + "sil enced", + "caf fe", + "ar ty", + "dist inguish", + "to wed", + "pun g", + "jessic a", + "ear nest", + "beau fort", + "t ama", + "study abroad", + "si khs", + "new bie", + "nav ratri", + "mar ble", + "loun ging", + "lit ter", + "dal it", + "so sa", + "iz es", + "gra de", + "com promising", + "tr iton", + "de tta", + "v j", + "chau ffe", + "spec tral", + "powe red", + "montess ori", + "artic ulate", + "hal ton", + "al co", + "ye y", + "mn twins", + "acoun ty", + "ðŁijı ðŁı¾", + "âī Ī", + "mad men", + "kal a", + "gru m", + "chi k", + "ati s", + "su me", + "akh tar", + "job search", + "high lighter", + "bo ath", + "âĦ ¹", + "tar zan", + "lam bo", + "âĽĦ ï¸ı", + "ox fam", + "dump ster", + "pretz els", + "mac os", + "incl ined", + "fac tual", + "adverti sers", + "shu i", + "pu ree", + "ml pfi", + "anti dote", + "cap o", + "pa str", + "merc ado", + "but ton", + "ar min", + "ag g", + "lol la", + "horri bly", + "er rands", + "christop he", + "time snow", + "monday motiv", + "li ss", + "scand als", + "mc i", + "dispropor tion", + "âĺ İ", + "sur pass", + "samar itan", + "so tho", + "pu rest", + "fl att", + "trivi atuesday", + "delec table", + "leop old", + "hermi one", + "chou dhary", + "en rich", + "¡ ¡", + "subsi diary", + "ine qualities", + "bachel or", + "auto immune", + "la kota", + "i hop", + "ad jec", + "the simpsons", + "sh es", + "se k", + "gret chen", + "up stream", + "hin akhan", + "coper nic", + "x tina", + "lu g", + "tough ness", + "e ad", + "cli pped", + "bi us", + "sl v", + "fah ren", + "dee pak", + "ca u", + "x an", + "im mature", + "dig ni", + "bo bs", + "shred ding", + "but tery", + "accommod ations", + "de ven", + "chun ks", + "super league", + "sky bet", + "kil dare", + "je et", + "ë į", + "ce k", + "wrec ks", + "pro pane", + "oh l", + "tb d", + "quo i", + "trum pp", + "mi mo", + "reluct ant", + "ver ne", + "o ic", + "ma gh", + "ar nau", + "se ver", + "li dge", + "stair way", + "kicchasu deep", + "ðŁĶ º", + "mach ining", + "aama admi", + "ot i", + "c da", + "al it", + "pan y", + "inst alls", + "ac ct", + "e shop", + "di em", + "hard well", + "fulfill ment", + "sc afe", + "qu ack", + "extrac ts", + "swee tened", + "fi ghton", + "f di", + "d inger", + "wal tham", + "us ur", + "refe rees", + "seok jin", + "gran n", + "af rin", + "th n", + "sch af", + "par cels", + "bet is", + "amar ine", + "nom an", + "kh tar", + "mor itz", + "cou pling", + "bar ons", + "ðŁIJ ¸", + "à ¸", + "sl p", + "sad ler", + "x ander", + "tri ad", + "mc millan", + "kh z", + "divi ding", + "ìĹijìĨ Į", + "dar yl", + "zed d", + "le ys", + "pla ques", + "flu ori", + "tipper ary", + "on nell", + "di dier", + "lang ford", + "im c", + "the sun", + "bir dies", + "ar cha", + "ye ssss", + "t di", + "dar ia", + "cand ace", + "al tam", + "pal aces", + "ch it", + "sant am", + "event ful", + "book of", + "ad b", + "mon stax", + "cre ole", + "co el", + "âĸ ½", + "we aren", + "sten nis", + "she ath", + "ati sm", + "gron ingen", + "mlpfi m", + "le pre", + "wrong ly", + "rsp ca", + "rendez vous", + "acknowle dging", + "pel vic", + "solic itor", + "sla ys", + "nue stra", + "lo d", + "is lander", + "fer oci", + "fashion show", + "ra ss", + "dge on", + "adole scents", + "sma shes", + "negli gence", + "grate ful", + "ved ere", + "sw oop", + "ing l", + "apol ice", + "vand alism", + "gan n", + "jo ao", + "di supdates", + "zimbab we", + "under age", + "radi ance", + "w of", + "bour geo", + "pla s", + "cr ani", + "gh ue", + "wrec kem", + "warran ts", + "re form", + "jim mie", + "at wood", + "ys l", + "neil himself", + "l bj", + "i man", + "tan to", + "nois se", + "ver bs", + "equip o", + "al together", + "mam ent", + "l ice", + "dou glass", + "tier ney", + "pri med", + "j hal", + "furn itu", + "braz ili", + "v ill", + "past els", + "n ison", + "u ff", + "paral ysis", + "jay e", + "im po", + "ðŁij ģ", + "strate gically", + "pakistan is", + "was sup", + "super bike", + "thank u", + "tru elove", + "sha ikh", + "israel is", + "vi p", + "to g", + "li en", + "la ker", + "grey hounds", + "cul ars", + "bian chi", + "balot elli", + "ar ran", + "loo s", + "str ates", + "he bron", + "ar vo", + "sunder land", + "the al", + "tomb stone", + "sand man", + "c pac", + "thanks giving", + "love him", + "lat ino", + "an in", + "aka if", + "ĭ ãĤ", + "tor quay", + "di est", + "alli anz", + "ðŁĺ ķ", + "golf club", + "cl lr", + "wal cott", + "sch nau", + "promp ted", + "nomin ating", + "len nox", + "val et", + "mon ro", + "may ward", + "e ph", + "ðŁĶ Ķ", + "inter oper", + "r da", + "re flex", + "arm chair", + "ê° ķ", + "stri pper", + "por ti", + "ph arm", + "ham za", + "ni reland", + "ne ue", + "h pv", + "port foli", + "sun burn", + "fris bee", + "be al", + "bapti ste", + "x h", + "ty m", + "pr ati", + "o vers", + "haz rat", + "deser t", + "der ry", + "us ky", + "em mett", + "ach arya", + ")_/ ¯", + "shu d", + "may a", + "ham ill", + "ra im", + "nr c", + "fitt ings", + "cur vy", + "ðŁı ĩ", + "ster ling", + "ॠĢ", + "wal kin", + "short cuts", + "mil ly", + "ast ur", + "alpha be", + "pl i", + "pe z", + "miss you", + "rad ford", + "ml g", + "ta eyang", + "notjust lakes", + "du mps", + "seren dip", + "le ur", + "ra ving", + "e ster", + "de priv", + "absc bn", + "ðŁijĩ ðŁı»", + "scar city", + "o cr", + "mean ings", + "cap t", + "da hl", + "fer mentation", + "bri oche", + "to win", + "out lander", + "massi mo", + "en cro", + "ðŁ¥ ³", + "buil t", + "po tam", + "kir i", + "tm w", + "monit ored", + "k ites", + "peoples vote", + "gray son", + "íģ ¬", + "afri ka", + "a dies", + "i vote", + "gy ne", + "g annon", + "di x", + "c mc", + "ou ral", + "fox andfriends", + "bel i", + "ig ne", + "gl an", + "katrin akaif", + "co politics", + "qual itative", + "p si", + "lu cci", + "disc oura", + "âĺ ®", + "kel li", + "gau tam", + "carac as", + "reale st", + "pu la", + "in us", + "hill top", + "make aw", + "atten borough", + "tw y", + "r arity", + "peck ham", + "ma hon", + "corn elius", + "clin icians", + "ton line", + "tb i", + "paradi se", + "ka si", + "inev it", + "fresh ness", + "colling wood", + "lun atic", + "defen se", + "cop d", + "in fra", + "wain wright", + "sains bury", + "alab am", + "te ma", + "lac o", + "chec ker", + "releg ated", + "tren t", + "stal ks", + "huff post", + "bhubanes war", + "ast ral", + "share your", + "prim rose", + "hi me", + "cat an", + "end ment", + "en dow", + "cle mens", + "mal oney", + "hil ary", + "game time", + "den ise", + "collabor ators", + "b wo", + "radic als", + "gue tta", + "ici on", + "au a", + "snap matic", + "sat chel", + "excav ation", + "base man", + "s ão", + "gn ation", + "fel d", + "surve y", + "shah zad", + "ma st", + "anirud hofficial", + "tru cker", + "ot ago", + "geo graph", + "ethe l", + "âļ¡ï¸ı âļ¡ï¸ı", + "s ver", + "mu tt", + "internetof things", + "ancho red", + "wh ouse", + "bang la", + "bal main", + "ç¹ ĭãģ", + "break fa", + "á Ģ", + "twi ster", + "te tris", + "ca v", + "stag s", + "g z", + "au b", + "stor med", + "hel ens", + "yar mouth", + "st asy", + "gustav o", + "co sc", + "vin son", + "up p", + "sc ricket", + "assump tions", + "app e", + "nu h", + "u er", + "pre mise", + "n aga", + "e amon", + "coron ary", + "na f", + "north side", + "el mer", + "ro tar", + "out lining", + "el f", + "re surg", + "kat elyn", + "in can", + "hyster ia", + "ce e", + "am bani", + "pro lly", + "Į ãĤĬãģ", + "ax es", + "san jose", + "rem brandt", + "mag pie", + "even ly", + "scor sese", + "qu aint", + "f g", + "b buk", + "indian football", + "weare all", + "spd wy", + "pis ces", + "ec g", + "âĺħâĺħâĺħâĺħ âĺħ", + "pre orders", + ": |", + "ni pple", + "sal azar", + "ju me", + "jail break", + "min n", + "bas sett", + "ze tta", + "jef free", + "ad jun", + "tic on", + "san diego", + "drink local", + "chol era", + "solic itors", + "o bo", + "com post", + "ni an", + "wr a", + "tre ach", + "ic ic", + "profession al", + "del ve", + "leg ate", + "histor ia", + "cro issant", + "con noisse", + "nam o", + "palli ative", + "chem trails", + "i ority", + "global warming", + "comic art", + "behavi oural", + "re sted", + "li as", + "cli mates", + "Ł ãģĦ", + "rut land", + "nou rish", + "menopau se", + "hot ties", + "demen ti", + "ve spa", + "mel ville", + "anal ogue", + "tz man", + "str ung", + "im perfect", + "gl are", + "cir cling", + "ros berg", + "rec o", + "oc ity", + "lo ire", + "em be", + "do ssier", + "ne el", + "nan do", + "me a", + "gal vani", + "fin esse", + "ag p", + "berke ley", + "asi m", + "âĺº âĺº", + "quil ted", + "ish ere", + "un matched", + "po tion", + "for z", + "at re", + "selfi es", + "juli ana", + "ðŁļ ¶", + "âĸ º", + "mel ton", + "âłĢâłĢâłĢâłĢ âłĢâłĢâłĢâłĢ", + "spin rilla", + "pur cell", + "ed p", + "at leti", + "tony awards", + "ra ja", + "pro gno", + "mol ten", + "stu ff", + "p ally", + "nobel prize", + "âĻ» ï¸ı", + "spiritu al", + "spe ake", + "sa sha", + "bri um", + "tru ss", + "critici ze", + "assassinscre ed", + "yor uba", + "u lo", + "fire man", + "workin progress", + "ef cc", + "fla res", + "ro bot", + "hi kers", + "cl l", + "shado wing", + "pat sy", + "leh man", + "c ns", + "å ±", + "guad al", + "à± į", + "ra pe", + "r honda", + "paralle ls", + "son ja", + "langu age", + "land ings", + "z ola", + "cr amps", + "bur ning", + "apprais al", + "jol la", + "ham m", + "kas a", + "gul ly", + "f go", + "uly sses", + "ri be", + "ðŁĴ Ħ", + "ib u", + "eti enne", + "bri ar", + "fin ely", + "comb ating", + "y ql", + "go tham", + "we chat", + "to paz", + "primar ies", + "l se", + "iz z", + "hel e", + "dispon ible", + "cy stic", + "bel ichick", + "th rush", + "kansas city", + "ge om", + "soli di", + "red bubble", + "by stand", + "cambridge shire", + "par fait", + "ast le", + "ow o", + "ind ore", + "stom ping", + "sm elly", + "ðŁ¤ ĸ", + "locom o", + "adm itting", + "hol me", + "clock wise", + "min sk", + "mc co", + "for get", + "ev p", + "cam ra", + "ab ella", + "yo tes", + "universit yof", + "mé xico", + "silver ado", + "ric ket", + "crom bie", + "pu j", + "eradic ate", + "deli ght", + "y go", + "glam ping", + "vic a", + "du ggan", + "coun ters", + "cf d", + "sc our", + "react js", + "pu ram", + "paras ites", + "in ki", + "vill en", + "stel la", + "li mbo", + "ang as", + "k cr", + "ðŁĴļðŁĴļ ðŁĴļ", + "vap ori", + "mum ford", + "oli gar", + "à ¼", + "al oo", + "boo ties", + "ad r", + "k elli", + "dru mmers", + "av ici", + "nature uk", + "ron al", + "in trac", + "un splash", + "le che", + "g oma", + "el ine", + "envir o", + "bi onic", + "bu eno", + "mi k", + "av in", + "star ling", + "em powers", + "cake day", + "boy cot", + "ðŁĴļ ðŁĴļ", + "ðŁĮ¸ ðŁĮ¸", + "v ach", + "m ci", + "fractu res", + "ger i", + "sk ing", + "exclu ded", + "lu ce", + "ja ve", + "ig gy", + "evi den", + "aki stan", + "a wn", + "mor als", + "luci fer", + "ha ban", + "tumb ling", + "sunday motivation", + "mo sley", + "captain america", + "sch icago", + "the one", + "mo td", + "d ts", + "ðŁIJ ¼", + "rep ell", + "ii i", + "locu st", + "geo spatial", + "mer sey", + "immer se", + "desc end", + "ber nade", + "j s", + "boat sales", + "win der", + "cran k", + "sing leton", + "candid acy", + "ben a", + "ðŁı» âĢį", + "high lander", + "ol t", + "k prs", + "healthy lifestyle", + "four teen", + "end the", + "ith aca", + "circul ated", + "r ans", + "pre valent", + "ha vas", + "splend or", + "roo ster", + "kalamaz oo", + "jewell ers", + "enne dy", + "rou sey", + "es y", + "cann ons", + "ornam ental", + "// //", + "ren don", + "win ne", + "mol ding", + "eid mubarak", + "coun tess", + "simon a", + "ha wa", + "fo es", + "du ster", + "sb u", + "por tray", + "mar ries", + "goo dday", + "cho co", + "achi ever", + "ðŁĺ¹ ðŁĺ¹", + "pre neur", + "tr amp", + "tom i", + "n bat", + "garden chat", + "farra khan", + "ever glades", + "ab ru", + "sou sa", + "se ce", + "homes wee", + "terre strial", + "bar it", + "sri devi", + "ol u", + "mel inda", + "f rick", + "can dies", + "ðŁĺŃ ðŁĴķ", + "qu reshi", + "family fun", + "exor cist", + "cardin al", + "ny t", + "dies el", + "cu mulus", + "capric orn", + "si ology", + "lor na", + "dou gie", + "an die", + "super sport", + "c fl", + "п ÑĢи", + "say ang", + "pe ek", + "ภĬ", + "lo be", + "j em", + "ing lis", + "gg led", + "c sn", + "amne sty", + "chu ps", + "ba es", + "sau er", + "ðŁı IJ", + "mongo lian", + "en et", + "back street", + "dr illed", + "acce ssing", + "ce o", + "b se", + "ai ken", + "pur r", + "wor sen", + "whe res", + "war k", + "testi fying", + "bu ri", + "bla st", + "aw g", + "ðŁĵ ĭ", + "re defining", + "hear ing", + "u ci", + "c mp", + "bon i", + "tail oring", + "ta ji", + "noc chi", + "em t", + "stephen king", + "ne et", + "compla ins", + "campaig ner", + "luci ano", + "twili ght", + "ti esto", + "pas sports", + "flo yd", + "cathe dr", + "na ked", + "caregi ver", + "b coz", + "ade cides", + "ku ri", + "ly k", + "br aries", + "dren ched", + "disc lose", + "ðŁĴª ðŁı½", + "le blanc", + "je tty", + "gar ty", + "chip mun", + "b su", + "rhyth mic", + "ic z", + "fri d", + "anne x", + "ame x", + "solo ist", + "lanc ers", + "arro whead", + "speci fication", + "simul ated", + "na is", + "inver te", + "bo wing", + "wor ship", + "f z", + "abo ss", + "sha q", + "ì¶ ķ", + "challeng ers", + "an arch", + "aamaadmi party", + "ãħĭãħĭ ãħĭ", + "suffol k", + "so corro", + "sn ell", + "cla dding", + "absor bing", + "shaw a", + "particip ates", + "ðŁį Ķ", + "book stores", + "bak u", + "seap ort", + "ko jima", + "gab y", + "pack ard", + "electr ician", + "let it", + "mo wing", + "fa wad", + "young jae", + "hot mail", + "men ing", + "u rie", + "intim acy", + "con ti", + ": \")", + "lifeis good", + "in ciner", + "i dri", + "craz iness", + "jour nos", + "fran chi", + "bott len", + "al da", + "ff es", + "k x", + "south we", + "air a", + "clay ton", + "sco ti", + "f j", + "bri ga", + "ð٤ĺ ðŁı»", + "demonstr ators", + "y z", + "stor k", + "na q", + "casc ades", + "travel chat", + "plat a", + "pad ma", + "fran ci", + "at tain", + "bat girl", + "lom bard", + "hoo s", + "d dos", + "neon atal", + "discla imer", + "r ss", + "r ant", + "di sen", + "tex aste", + "so cal", + "frac tal", + "cam ry", + "stri fe", + "sn acking", + "mu h", + "sant ander", + "mor ons", + "gra f", + "par ades", + "hu ston", + "dru pal", + "mi ento", + "kir stel", + "hy de", + "vom it", + "forti fied", + "sphin x", + "da v", + "bir yani", + "win nings", + "s baseball", + "mer ged", + "lovel ondon", + "ling ering", + "dream big", + "car leton", + "liveli hood", + "djan go", + "astri d", + "gri ds", + "down e", + "bru ised", + "s ne", + "scarec row", + "hel ium", + "f nc", + "bi ggs", + "an ter", + "restor ative", + "em pires", + "ab del", + "life style", + "kiwan is", + "colloqui um", + "me en", + "pr ick", + "anti que", + "ze b", + "mi mic", + "edmon ds", + "ðŁijĬ ðŁijĬ", + "q ing", + "pp el", + "mc gill", + "interpre ting", + "âŀ ķ", + "rash ad", + "do ka", + "narr ator", + "electro magnetic", + "ash by", + "sau ra", + "iran deal", + "âģ īï¸ı", + "krish nan", + "in di", + "ff en", + "bre a", + "os man", + "multin ational", + "chi ppe", + "recruit ers", + "aus biz", + "p ounding", + "re gen", + "cur sor", + "refu sal", + "mac s", + "in ak", + "ax ial", + "wa ifu", + "up cycled", + "hindu stan", + "cas sini", + "carly le", + "scrat ches", + "re ef", + "man atee", + "eat ery", + "ðŁĵ ¢", + "un condition", + "sen pai", + "on ther", + "comic book", + "pro sciutto", + "de mar", + "mi se", + "ma ge", + "fre ec", + "aye sha", + "al der", + "android games", + "ley ton", + "ho ck", + "door way", + "chicagof ire", + "aali yah", + "sw elling", + "bi x", + ". ðŁĺĤ", + "evan kirstel", + "torpe do", + "kon stant", + "genevie ve", + "ma ia", + "ha user", + "do torg", + "hide ous", + "fi k", + "sp raw", + "e ek", + "z appa", + "wan dered", + "' '", + "ra jan", + "bam bi", + "( $)", + "wid ening", + "tool box", + "sa ir", + "illumin ating", + "pra ys", + "out patient", + "i w", + "day o", + "lo b", + "sw fl", + "sha des", + "gu ms", + "coo kin", + "ko di", + "gri ffin", + "traum ati", + "ste a", + "slaugh tered", + "god bless", + "air time", + "pseu do", + "b sa", + "hau led", + "ar if", + "à¸Ńภĩ", + "le l", + "wc po", + "mil iti", + "char ters", + "worl da", + "ru k", + "k gs", + "digital india", + "is able", + "idyl lic", + "esp ino", + "marie tta", + "e bo", + "team canada", + "ab our", + "wil ton", + "rock stars", + "fav ored", + "phys ic", + "wrink le", + "tb r", + "d print", + "ball arat", + "ad al", + "z ey", + "ðŁĺį ðŁĶ¥", + "tom lin", + "mt r", + "pal sy", + "fener bah", + "tight en", + "phil ia", + "ir oning", + "ry u", + "b ant", + "enqu ire", + "ca ir", + "abur ger", + "tru n", + "green berg", + "chau han", + "ir ina", + "sh ani", + "trend setter", + "pre tt", + "zaf ar", + "alo ve", + "v ici", + "pan ic", + "no o", + "lu stre", + "disrup ted", + "bal lis", + "son sof", + "mon si", + "inst ac", + "ake st", + "ëĭ ¤", + "kw ame", + "horror movies", + "distric t", + "sau cy", + "mb an", + "ar mies", + "with drawn", + "med ics", + "loft us", + "er oom", + "be kind", + "ar ns", + "all on", + "un ison", + "davi ds", + "cr at", + "nicot ine", + "so or", + "sm x", + "on co", + "cospla ying", + "zombi es", + "har ms", + "e ger", + "ro sy", + "moon shine", + "fe in", + "ce tt", + "du brov", + "reg ents", + "ben itez", + "ðŁijıðŁı¼ ðŁijıðŁı¼", + "ste c", + "m alia", + "prioriti ze", + "ic eland", + "ft se", + "v amo", + "lam ont", + "homo sexuality", + "bre es", + "regu i", + "cb p", + "te j", + "sky sports", + "deter gent", + "sha sta", + "de rel", + "conserv ancy", + "colori zed", + "accol ades", + "vis o", + "show your", + "nan ow", + "bice ps", + "us ability", + "bi m", + "dailys ketch", + "pearl jam", + "stran gest", + "mega deth", + "broad casts", + "bar ren", + "ar ton", + "chri ss", + "confi gu", + "lu res", + "is the", + "e ul", + "railway ana", + "global health", + "gi anni", + "u aap", + "s lum", + "consci ously", + "ab re", + "n up", + "bud get", + "v ada", + "e sch", + "real ness", + "er ased", + "th unt", + "be z", + "armist ice", + "ðŁij ¹", + "sh run", + "o led", + "driver less", + "ðŁ¤· ðŁı»âĢįâĻĢï¸ı", + "won dr", + "sk an", + "sal aam", + "mother land", + "h wang", + "gen o", + "gang nam", + "tw right", + "endor sing", + "en ic", + "ador ation", + "pau sed", + "patric ks", + "do cked", + "plat te", + "ff xv", + "ethnic ity", + "auto show", + "side show", + "after life", + "re located", + "orphan ed", + "food network", + "dare to", + "and ra", + "sla ps", + "v live", + "swim s", + "re imagined", + "mist le", + "re vise", + "real ity", + "bhar ti", + "ðŁĴĻ ðŁĴĽ", + "late st", + "prou dest", + "gra sses", + "lan yard", + "fresh est", + "carcin oma", + "anom aly", + "zieg ler", + "sum ner", + "ly rix", + "gor g", + "is d", + "av el", + "swild life", + "me squ", + "john cena", + "euro league", + "sab er", + "master ful", + "yar ra", + "cogn ition", + "jacob son", + "abo lic", + "sir loin", + "shuk la", + "moj ito", + "su pere", + "st weet", + "me z", + "e sa", + "rudol f", + "gur a", + "where you", + "tt m", + "win s", + "trust worthy", + "ny k", + "bra den", + "table top", + "good food", + "es on", + "be k", + "lingui stic", + "gra ys", + "ch ath", + "h cs", + "mon i", + "de ans", + "cu ssions", + "ch ell", + "slo ws", + "he mi", + "d app", + "shar pie", + "boo sters", + "a os", + "str ack", + "se dona", + "mu eller", + "hard wick", + "or nate", + "thor a", + "sal ud", + "o twol", + "ch um", + "mi ho", + "for age", + "thel ittle", + "tear ful", + "ones elf", + "min dy", + "sm g", + "gmb h", + "emer ald", + "ðŁĶ´ âļªï¸ı", + "tu tti", + "recep tions", + "re vising", + "i brox", + "tope ka", + "sal ami", + "expan se", + "i books", + "dob son", + "cli o", + "at s", + "ðŁļ Į", + "mo ha", + "is ance", + "shu tters", + "moo t", + "jan ine", + "marvel comics", + "jor dani", + "pos er", + "kenne th", + "hy ung", + "de ja", + "ase ball", + "speci ality", + "eu ston", + "classic car", + "had ith", + "ðŁIJ ī", + "chas ing", + "iz o", + "gros ven", + "ag lia", + "thisdayin history", + "t row", + "om ile", + "hu ar", + "by n", + "sal ine", + "div ine", + "demon ic", + "ty ran", + "han dover", + "revit alization", + "pa ella", + "cryp tic", + "se dg", + "m end", + "dun kirk", + "bre d", + "wal d", + "sport scar", + "a ard", + "whe aton", + "da ener", + "k lan", + "br t", + "bakhta war", + "spi res", + "schu bert", + "ro ti", + "poli sh", + "o se", + "ag ame", + "wonder con", + "prote stant", + "bo sa", + "ðŁĺ Ł", + "d ü", + "joy ride", + "ger trude", + "âĿ Ŀ", + "gil a", + "v h", + "tw a", + "tra v", + "swal lowed", + "star ve", + "la in", + "ent ren", + "rei ki", + "su kh", + "cra ic", + "az u", + "web page", + "kee fe", + "hypo the", + "hir sch", + "hel le", + "camp ground", + "w amy", + "tra vi", + "sha hi", + "san deep", + "ru i", + "han uman", + "dw p", + "reposit ory", + "no or", + "no ff", + "un real", + "p ell", + "black history", + "har vick", + "ma scar", + "pay ee", + "pa sha", + "gastron omy", + "d ÃŃ", + "ai g", + "rosen thal", + "open day", + "embelli shed", + "t tip", + "sun bathing", + "go pack", + "end ome", + "ï¸ı #", + "invali d", + "final four", + "st fu", + "squish y", + "ra sta", + "mo sch", + "jam esc", + "die trich", + "sel a", + "mel b", + "el vi", + "t dp", + "sun i", + "sli t", + "j ha", + "bi za", + "spi ked", + "l li", + "l illard", + "vam pi", + "syno psis", + "az har", + "kendrick lamar", + "ĮãĤĬãģ ŁãģĦ", + "heart less", + "country file", + "air play", + "arrog ance", + "pre e", + "virtu oso", + "ãħłãħł ãħłãħł", + "raj u", + "le bu", + "for ward", + "tu g", + "dro s", + "mondaymotiv aton", + "concep cion", + "thel o", + "pad i", + "looo ol", + "ÑĢ Ð¾Ð´", + "it ss", + "eth ical", + "end uro", + "__ :", + "expend iture", + "mon ste", + "mas king", + "terri ers", + "ib is", + "e mber", + "cu mple", + "punctu ation", + "pi per", + "ir vin", + "ade e", + "yy yyyy", + "flash backs", + "cel sius", + "don nie", + "bo gota", + "ben evol", + "the script", + "shil pa", + "pro se", + "fin dia", + "ze ke", + "ne ko", + "do ves", + "blues lyrix", + "fro sh", + "sowe to", + "mp lo", + "al ai", + "sab i", + "raq qa", + "wf tv", + "stro ller", + "ian somerhalder", + "ðŁĶ ª", + "an on", + "mo seley", + "! ?!?", + "sta king", + "mol y", + "car tri", + "c sg", + "ast or", + "transc end", + "ma er", + "de ux", + "cow girl", + "sas k", + "pun ter", + "ma ken", + "o ates", + "love tt", + "grow ler", + "sag in", + "v n", + "ssi ble", + "officeof rg", + "y mc", + "sab ar", + "faul ty", + "ap ha", + "ak on", + "ðŁij «", + "snow don", + "ae w", + "raise the", + "ðĿ ĵ", + "grue some", + "clement ine", + "sp ing", + "lat a", + "worlden viron", + "mi mic", + "can aria", + "bakhtawar bz", + "ao a", + "fal a", + "ãĤ Ń", + "avi va", + "you uuu", + "thi gh", + "la dders", + "gu mbo", + "tz ky", + "fu zz", + "plastic pollution", + "est ate", + "strength ened", + "k ant", + "dr in", + "cal vert", + "transform ational", + "frigh tened", + "mac lean", + "elited angerous", + "ear thy", + "t son", + "to da", + "j nu", + ".. ,", + "mic hal", + "i ban", + "je ong", + "is real", + "sim coe", + "exclu sives", + "blue bells", + "ben e", + "te u", + "pil sner", + "pens ke", + "athe ists", + "m pu", + "cartag ena", + "ðŁĴĹ ðŁĴĹ", + "million aires", + "kk kk", + "it ar", + "subscri ptions", + "remo te", + "ma fi", + "hin ton", + "w cc", + "ho k", + "ds b", + "ab leton", + "sevent y", + "pun ks", + "e indhoven", + "sh one", + "mcfar lane", + "lim popo", + "empha si", + "à ¼", + "sin fo", + "pe tre", + "man grove", + "ch ino", + "ber tie", + "play lists", + "push awards", + "p af", + "deb bie", + "c do", + "r ino", + "ðŁı¾ âĢįâĻĤï¸ı", + "fol ke", + "bon nar", + "th ine", + "sl an", + "hal ter", + "evi e", + "aw some", + "vul tures", + "spar ky", + "seiz ures", + "âľ Ķ", + "ram one", + "ine ffe", + "al n", + "pro ctor", + "ast ra", + "the voice", + "gro te", + "sci on", + "dead line", + "am aya", + "tain ted", + "patter ned", + "exce eding", + "cross fit", + "kay lee", + "drop box", + "ru shes", + "tack led", + "mo by", + "retro gamer", + "n cbd", + "benef itting", + "shay kh", + "guild hall", + "gen try", + "dream cast", + "dread ed", + "bun dled", + "th aw", + "revol ving", + "n pt", + "kylie jenner", + "imagin ative", + "ron i", + "over came", + "family time", + "ds burg", + "car naval", + "relation ship", + "recogni zable", + "cor oner", + "ho le", + "fan fic", + "emir ates", + "bur ritos", + "analy se", + "thin ner", + "ne es", + "galli poli", + "bl r", + "cat woman", + "-- >>", + "au lt", + "ada ily", + "nau ghty", + "ili o", + "solit aire", + "mtv br", + "jocel yn", + "arun ach", + "rep ent", + "south gate", + "hy acin", + "essenti al", + "fent on", + "and um", + "it or", + "go pal", + "sl inger", + "po sei", + "aw il", + "wi elding", + "ra ila", + "eli as", + "a sto", + "à ¤", + "tend ency", + "str ata", + "ker t", + "< -", + "im acele", + "da es", + "sti mulus", + "han ley", + "fit nes", + "ec stasy", + "lim ous", + "ha iling", + "ðŁ¤ Ń", + "chis wick", + "tar ies", + "sla v", + "pul i", + "moderni zation", + "black mail", + "b ingham", + "h fx", + "+ +", + "ðŁĩ®ðŁĩ ³", + "ni v", + "we a", + "profess or", + "k off", + "bol ster", + "su ave", + "sequ ences", + "pepper oni", + "not te", + "dre n", + "ãģ¨ ç¹ĭãģ", + "hs v", + "o ga", + "ap tly", + "z ad", + "excel si", + "rin ka", + "mol dova", + "min n", + "ma bel", + "conferen cing", + "bas ing", + "of er", + "ob si", + "hamill himself", + "care less", + "brief ed", + "inhe rent", + "par ish", + "dub nation", + "town sville", + "sar awak", + "gee ky", + "doncaster isgreat", + "was abi", + "gu p", + "phen o", + "dra inthe", + "carrie underwood", + "ble eds", + "bbc world", + "ane w", + "alta f", + "dul wich", + "ani ston", + "w ti", + "sumat ra", + "gra fton", + "bl n", + "me ster", + "bode ga", + "re go", + "es q", + "an jo", + "sump tuous", + "mai sie", + "ï¿ ½", + "wil t", + "jak ob", + "el vis", + "se pul", + "mu ster", + "air pollution", + "president e", + "happy monday", + "exten sively", + "fl ondon", + "t ls", + "play ing", + "pe ed", + "din ho", + "var dy", + "pi ka", + "n iro", + "au cus", + "ðŁį ¦", + "nu ll", + "el ondon", + "juvent us", + "imag ines", + "dis ab", + "lit o", + "d ura", + "work places", + "promo te", + "mc caf", + "wood work", + "waw x", + "à® ª", + "tt ino", + "shar i", + "sem per", + "better together", + "ðŁijĬ ðŁı»", + "ze bra", + "pon dering", + "en chil", + "ho m", + "cosm ic", + "tan z", + "mo cked", + "ec cc", + "ath ed", + "abo lish", + "prop eller", + "paris agreement", + "assemb lies", + "indu stry", + "fraudul ent", + "pe sa", + "chang min", + "ax x", + "ðŁĴ µ", + "irr ational", + "cu sa", + "ramad han", + "octa via", + "on elove", + "jac ki", + "bar ak", + "taxi der", + "seri ous", + "nathan fillion", + "mc en", + "ch k", + "po part", + "grav ity", + "copp ola", + "reading fc", + "illu sions", + "j ig", + "ww x", + "re sh", + "ex porting", + "buzz ard", + "âĻ ¤", + "p cm", + "lan apar", + "ko s", + "arom as", + "antal ya", + "ww dc", + "ven a", + "phil a", + "ball in", + "ðŁij Ħ", + "quin ta", + "ma o", + "f ery", + "eigh ty", + "sentim ents", + "safe guarding", + "r wa", + "pu ffs", + "luc ille", + "de cath", + "sl u", + "nu gent", + "de ter", + "braz il", + "ze iss", + "super bowl", + "subsi dy", + "alter n", + "hi dalgo", + "enz ymes", + "ä ½", + "tag ne", + "hair dresser", + "adri en", + "walk out", + "oppo ses", + "can tina", + "bed side", + "af an", + "ðŁĶ Ĺ", + "prophe tic", + "dan es", + "un successful", + "super charged", + "pk k", + "exem ption", + "hart le", + "secu lar", + "cli pping", + "br s", + "united way", + "c net", + "pat chy", + "ha gan", + "e en", + "âļ ľ", + "var a", + "sym pathi", + "never trump", + "affir mation", + "om f", + "ny cfc", + "ma ja", + "sur ro", + "keer th", + "up scale", + "sandal wood", + "mon archy", + "kno bs", + "å ĭ", + "po tholes", + "hunger games", + "ter races", + "na sir", + "coun sell", + "welcome to", + "wa q", + "se aman", + "m ita", + "stun ningly", + "on theroad", + "in ability", + ") !!", + "bon go", + "ant v", + "sp ut", + "worldenviron mentday", + "resu sc", + "y td", + "fi m", + "eun hyuk", + "sa chin", + "rose anne", + "cler mont", + "ape c", + "am ina", + "v ening", + "n antes", + "al most", + "sin us", + "ex as", + "ty l", + "ti en", + "ple ad", + "lanc s", + "bur naby", + "re k", + "jo om", + "observ ers", + "disco graphy", + "cl g", + "âĻ ¦", + "sn ack", + "r ti", + "o ily", + "crystal li", + "bru te", + "web development", + "topp ings", + "la f", + "an is", + "ad der", + "reli ving", + "car lin", + "battle of", + "we g", + "syri an", + "pon t", + "n dc", + "lagh ate", + "yu ma", + "sp p", + "p iti", + "ro bbing", + "mart ing", + "rey kja", + "raj put", + "nc ds", + "kie wicz", + "âĢ¢ âĢ¢", + "vam pire", + "substan tially", + "opio ids", + "nepal i", + "k line", + "ar oo", + "under stand", + "lit t", + "u it", + "thro mbo", + "sar ies", + "qu ot", + "b alling", + "t tr", + "s gh", + "philip p", + "br ant", + "ac l", + "m ello", + "whit taker", + ". ;", + "defi ant", + "b gc", + "repl ying", + "mir ren", + "metamor pho", + "sch wab", + "bul ge", + "utili zed", + "pick ering", + "par don", + "d sa", + "ภĪ", + "doo ley", + "cumul ative", + "Ð »", + "ur gency", + "e mir", + "+ /-", + "¦ Ī", + "ot as", + "âı ³", + "station ed", + "grape vine", + "ar ac", + "karan johar", + "f ancy", + "sau l", + "coo gs", + "lgbt q", + "ا٠ħ", + "jav i", + "u mmer", + "pl l", + "den is", + "dai pur", + "pu ffin", + "lewi sham", + "fand om", + "co pe", + "ves matter", + "s ve", + "hel pless", + "deo dor", + "ostr ich", + "kaz an", + "friday the", + "con dor", + "v x", + "sophom ores", + "rob les", + "cu tt", + "cli mbers", + "ë¦ ¬", + "sle g", + "sn f", + "mac ys", + "hydr ating", + "grou pe", + "po yn", + "mou lin", + "hg tv", + "lmfa ooo", + "sulph ur", + "asdfghj kl", + "annab elle", + "hump back", + "bra ved", + "viswas am", + "multi purpose", + "hu midi", + "escor ted", + "barb ican", + "f ad", + "cor sa", + "ðŁ¤ «", + "pi ppa", + "here to", + "can y", + "ser gi", + "or cas", + "o vie", + "ed ou", + "s any", + "glob alization", + "man cini", + "food truck", + "f is", + "defi brill", + "sch re", + "sma fia", + "love wins", + "la ut", + "k aka", + "hol lande", + "game on", + "resurg ence", + "out side", + "olympi ad", + "int an", + "abstr action", + "rapi d", + "pal om", + "cal le", + "jas min", + "attack ers", + "swag g", + "mit ra", + "ky lo", + "à® ²", + "her mitage", + "gor do", + "e ira", + "so sfam", + "roll out", + "exc ite", + "sy nod", + "mer rill", + "c als", + "as sa", + "liveli hoods", + "ju ve", + "the black", + "gopack go", + "ant lers", + "alban ian", + "wool ly", + "qu iche", + "puri fication", + "are th", + "smar thome", + "ne k", + "all blacks", + "mex icans", + "is m", + "ger ms", + "comple xion", + "mar ck", + "u shi", + "ðŁIJ IJ", + "char l", + "ca stic", + "till erson", + "giuli ani", + "biode gradable", + "mal bec", + "bo is", + "ju bil", + "im es", + "r ame", + "gene tic", + "esp nu", + "ch ley", + "so ho", + "go pher", + "g sc", + "buu ren", + "cu be", + "bridesma ids", + "webin ars", + "to e", + "mani pur", + "viol ently", + "notic ias", + "ex changing", + "chi ev", + "replac eable", + "muay thai", + "bu ss", + "sp il", + "instal ment", + "div ya", + "cait lin", + "o lim", + "fil tering", + "whirl wind", + "sta red", + "prior it", + "pr am", + "pompe ii", + "mono logue", + "k ite", + "bu ka", + "â̦ ..", + "vac cine", + "bre ro", + "woz ni", + "sol ent", + "re ferr", + "my rt", + "gridi ron", + "galatasar ay", + "fro ze", + "clare mont", + "ðŁ¥ ĥ", + "victori as", + "ssel dorf", + "pa stures", + "net neutrality", + "ch or", + "ðŁij ģ", + "ಠ¿", + "we ho", + "symp tom", + "jo sel", + "in ous", + "dragon con", + "power ball", + "p te", + "four thofjuly", + "ec la", + "ear buds", + "where abouts", + "salt life", + "depriv ation", + "ch ter", + "wi ggle", + "syste m", + "ps st", + "ch az", + "d any", + "ri mo", + "oax aca", + "lanapar rilla", + "barcel on", + "melanch oly", + "way back", + "ho tro", + "n si", + "l illy", + "kur o", + "ja han", + "intellec t", + "board game", + "ðŁı Ĭ", + "sneak peek", + "k prc", + "jail s", + "cand el", + "zan zi", + "mor timer", + "star ch", + "ra gs", + "p fa", + "long live", + "k art", + "gir ona", + "cro cker", + "christop h", + "precau tions", + "war ship", + "per m", + "paren t", + "van gogh", + "gif ford", + "allegh eny", + "ra yn", + "ut m", + "sten cil", + "rec alling", + "pen ney", + "z azzle", + "ìĥ Ŀ", + "hin ds", + "aren as", + "nu ev", + "law ler", + "gu in", + "do this", + "ðŁij ķ", + "ì¶ķ íķĺ", + "we g", + "ti b", + "ri din", + "complex es", + "turbul ent", + "pe sos", + "de marcus", + "vall arta", + "sam sun", + "kis ses", + "hein rich", + "deport es", + "wil ms", + "ur d", + "then ext", + "inki gayo", + "ho wi", + "fir sts", + "carri age", + "clean liness", + "mas war", + "is ch", + "ax el", + "si zzle", + "road house", + "fr ans", + "ent ourage", + "co bble", + "boo th", + "benedic t", + "tal on", + "fc u", + "year ofthe", + "ray on", + "raider nation", + "fo yle", + "ko val", + "pi anos", + "l pg", + "bur mese", + "man ure", + "geo caching", + "cosc ino", + "b np", + "fer ra", + "stro phy", + "mar ais", + "ce es", + "legen dof", + "kat niss", + "eno ch", + "av ed", + "you know", + "d prk", + "ðŁĺ¢ ðŁĺ¢", + "sp un", + "pro st", + "sor rows", + "cent red", + "ke a", + "gal icia", + "? ð٤Ķ", + "ÑĢод а", + "bou chard", + "ðŁĴĻ ðŁĴľ", + "yu i", + "seed lings", + "jon ah", + "reco vers", + "ny rd", + "board room", + "su ma", + "my japs", + "tun g", + "sha i", + "ir gc", + "eli o", + "wag ons", + "ka shi", + "polic emen", + "john nie", + "ale coscino", + "shop ify", + "dot ted", + "de tri", + "va w", + "to fficial", + "in your", + "chal mers", + "trac ed", + "no vi", + "by es", + "ari el", + "nipp on", + "la pel", + "gri ez", + "b gs", + "fool ing", + "d ita", + "vijay sethu", + "nm wx", + "as ot", + "kr anti", + "hel m", + "ve di", + "sic kest", + "mo chi", + "k abo", + "shru bs", + "he red", + "b sp", + "sq m", + "ham r", + "dul kar", + "anth a", + "nr f", + "avoid ance", + "at en", + "publi x", + "be arers", + "nas i", + "ha p", + "h ells", + "ðŁĸ ¥", + "ภ·", + "thelast jedi", + "oh wx", + "ðŁį «", + "wa hoo", + "there se", + "rec aps", + "ss nhq", + "bird photography", + "v ay", + "pet ti", + "pau lo", + "bel vedere", + "( *", + "gr l", + "du vet", + "c pec", + "sa it", + "por sch", + "meas urable", + "avi ators", + "fre mantle", + "bre en", + "on om", + "me and", + "life saving", + "eu ref", + "en don", + "embar as", + "aira sia", + "el is", + "dun kin", + "star magic", + "s ill", + "porto bello", + "ki efer", + "ex e", + "mu ted", + "ãģ ¦", + "we thepeople", + "logi a", + "liber al", + "theforce awakens", + "min ed", + "haun ts", + "freck les", + "care taker", + "s india", + "âķ IJ", + "dev lin", + "list on", + "direction er", + "oh n", + "fi garo", + "em manuel", + "du bois", + "cl ones", + "bru ise", + "ðŁİĪ ðŁİī", + "disin fe", + "der matology", + "as r", + "s watch", + "dis comfort", + "tam anna", + "pi day", + "mack en", + "k atic", + "delu sional", + "shaw nee", + "gu d", + "al bino", + "p ali", + "din gh", + "cucu mbers", + "coffe y", + "anticip ating", + "treas ured", + "web summit", + "shel tered", + "sav or", + "pedago gy", + "m gs", + "sh ma", + "s bu", + "den ali", + "cam pos", + "bubble gum", + "o ir", + "le aps", + "y ler", + "r one", + "sansk rit", + "min t", + "meat less", + "futuri st", + "du de", + "a vel", + "prote sted", + "squ ire", + "z aki", + "sz n", + "har court", + "cycl one", + "bour dain", + "gather ings", + "d ant", + "advent urer", + "parag on", + "alt man", + "dd ing", + "ban erjee", + "snorkel ing", + "mother well", + "mis sy", + "en der", + "glo ws", + "ki wis", + "chick pea", + "por o", + "e fron", + "app t", + "u y", + "speci fied", + "gab by", + "e strada", + "com bos", + "bour bon", + "vin i", + "var un", + "steph ani", + "key words", + "car vings", + "amit abh", + "wr ought", + "tw al", + "re els", + "clu bbing", + "ubi quit", + "cri t", + "ambed kar", + "æ Ļ", + "prun ing", + "vaccin ated", + "boe ing", + "s ks", + "lo ona", + "hypno sis", + "edel man", + "pho l", + "he w", + "colo sse", + "mckin sey", + "u on", + "to te", + "sacrific ing", + "ox i", + "n ang", + "e mu", + "пÑĢи ÑĢода", + "m th", + "kers wednesday", + "argu ed", + "timel apse", + "ris king", + "regul ating", + "ni gh", + "likeli hood", + "cu bic", + "au ction", + "rein for", + "pi stor", + "no ses", + "ye l", + "snu ggles", + "pe i", + "jean ette", + "ta ku", + "ri th", + "guy z", + "ภŀ", + "y te", + "ver ted", + "pay soff", + "jau regui", + "hoo ligans", + "procedu ral", + "mi b", + "har dy", + "el eng", + "chec kers", + "all ine", + "the met", + "prou dof", + "keerth yofficial", + "collabor ator", + "ni u", + "infl icted", + "adv ani", + "re twee", + "memor iam", + "f icial", + "ti ghter", + "sal em", + "re viewers", + "br ics", + "ben digo", + "am ell", + "tur kish", + "sush maswar", + "paul son", + "pal awan", + "mol lie", + "stitch er", + "s burgh", + "ir u", + "hay dn", + "en ers", + "aro a", + "u zzi", + "saraj evo", + "hel a", + "apol lo", + "nine ty", + "vac a", + "sp on", + "vent u", + "jel ena", + "hei fer", + "avo ids", + "sp ine", + "pri ze", + "mar ist", + "re creating", + "me de", + "woo den", + "find lay", + "ro fl", + "n di", + "compreh end", + "yu go", + "y ü", + "to work", + "u fos", + "son ar", + "pi ston", + "recor ding", + "tent ative", + "art forsale", + "pel lets", + "fre do", + "ÙĪ Ø±", + "mu ses", + "custom ization", + "pro found", + "is ner", + "ide ally", + "si am", + "plan kton", + "cm dr", + "man ger", + "fran ken", + "customiz able", + "ठ®", + "walk away", + "swi vel", + "vast ly", + "no ton", + "lex a", + "ex moor", + "z as", + "tan te", + "reduc tions", + "lol ly", + "hip sters", + "benef ited", + "ë ²", + "ww www", + "mascul ine", + "fi ji", + "dre y", + "ph ill", + "ane ous", + "nic ol", + "men dez", + "disapp ro", + "ch ner", + "through s", + "shen mue", + "east man", + "ðŁIJ İ", + "yu ck", + "under tale", + "re ys", + "go beavs", + "eng en", + "c na", + "mer r", + "bir k", + "ãģ¨ç¹ĭãģ ĮãĤĬãģŁãģĦ", + "âĥ£ @", + "yn na", + "ste ed", + "offen der", + "at um", + "vani shing", + "presi denti", + "love them", + "g nocchi", + "fri ggin", + "per il", + "mad hya", + "ag ne", + "dee jay", + "mar nock", + "m tb", + "fold able", + "@ ___", + "stand re", + "bron x", + "bow ski", + "fin ite", + "cro ckett", + "b sf", + "ge tit", + "seren awilliams", + "mir o", + "ignati us", + "sla y", + "rin se", + "fon due", + "sel dom", + "s more", + "gan i", + "dy ce", + "dmit ry", + "cru mb", + "late post", + "pri mark", + "oh ana", + "flor als", + "do a", + "remembrance day", + "d ds", + "azi one", + "toon ami", + "air port", + "æĿ ±", + "th ad", + "fi st", + "dine sh", + "dr who", + "ad words", + "admi rer", + "pro je", + "kyrgy z", + "à «", + "manife station", + "le wan", + "j ic", + "thi bau", + "le ased", + "van ity", + "nouri shed", + "never theless", + "aug mente", + "fu elled", + "che ad", + "wil shere", + "ru di", + "p z", + "my co", + "mor ro", + "herbali fe", + "hardro ck", + "de man", + "dre ality", + "sp ades", + "ce vic", + "bha i", + "bar on", + "ultimat efan", + "hou news", + "to bi", + "stru t", + "ke el", + "affili ation", + "the masters", + "sm al", + "hu e", + "este ban", + "con v", + "om nic", + "datab ases", + "co v", + "ter ti", + "st g", + "snoop dogg", + "metab ol", + "leth bridge", + "ðŁı» âĢįâĻĢï¸ı", + "year ling", + "residente vil", + "nws l", + "iy aki", + "griez mann", + "c ous", + "ðŁĵĿ :", + "tor ian", + "sam i", + "ðŁĶ¥ðŁĶ¥ ðŁĶ¥ðŁĶ¥ðŁĶ¥", + "g are", + "alli ances", + "whit field", + "we ther", + "refin ing", + "coy i", + "kra ken", + "ðŁĺĺ âĿ¤", + "singul arity", + "lil i", + "h ns", + "bol dand", + "waw rinka", + "misogy ny", + "lo vers", + "c q", + "b dg", + "ad ona", + "gar ter", + "women of", + "sc d", + "recogn ising", + "mun a", + "str ou", + "sign alling", + "lare do", + "hell boy", + "alek sand", + "un available", + "pedi atric", + "as in", + "mer ia", + "ri shi", + "futuri sm", + "w ye", + "polari zed", + "e we", + "pro pel", + "in forms", + "cre ase", + "~ \"", + "arti ston", + "like for", + "heidel berg", + "er ra", + "life in", + "len ny", + "inter rupt", + "cohe rent", + "ca z", + "vick ers", + "le veled", + "f bs", + "cab ins", + "bu mmed", + "apost les", + "we h", + "ten don", + "souven irs", + "infu ri", + "pier ce", + "asse t", + "m las", + "go th", + "di ggin", + "ann as", + "yl or", + "th waite", + "sw el", + "pan era", + "mur derers", + "croo ked", + "bs go", + "ac u", + "a on", + "re an", + "one of", + "ko hl", + "bloo dh", + "pest icide", + "lost dog", + "fle xing", + "ëĤ ĺ", + "su pra", + "eter nally", + "ðŁļ Ļ", + "pa olo", + "ol an", + "mom o", + "is elle", + "captain marvel", + "s lou", + "mistak enly", + "akhi lesh", + "mer t", + "il inan", + "bu on", + "bal kan", + "mir ro", + "mill en", + "der ail", + "dam on", + "tit i", + "bi os", + "re don", + "pic ard", + "par te", + "ðŁ¤ Ł", + "Ø º", + "son ics", + "fir sth", + "dd c", + "veg ans", + "tur ban", + "ni gan", + "lot tie", + "lyn don", + "star buck", + "pink floyd", + "life styles", + "am ara", + "a she", + "r sc", + "val a", + "sm er", + "cw gc", + "cli ent", + "buen as", + "jag an", + "coo ps", + "ðŁijij ðŁijij", + "speci alizes", + "snag ged", + "g lar", + "ben net", + "wildlife wednesday", + "bow den", + "pi k", + "art in", + "empor ium", + "ar l", + "re ba", + "pas ser", + "disappo ints", + "additi ve", + "âľĬ ðŁı½", + "bay er", + "missou la", + "ha skell", + "comm ences", + "ni x", + "ne man", + "explo ited", + "plastic surgery", + "cc d", + "aso cial", + "vo t", + "sie gel", + "fro ome", + "kap am", + "far a", + "e ha", + "pro bes", + "mw f", + "meet ing", + "p bb", + "ak ins", + "mistle toe", + "kingdom hearts", + "for kids", + "ec r", + "bal e", + "escor ts", + "adidas originals", + "k wa", + "k ts", + "hallo ffame", + "ðŁĺį .", + "wag s", + "pot ted", + "o wing", + "honey comb", + "he fty", + "uro logy", + "mer le", + "b pd", + "stri pping", + "re ich", + "k state", + "gu ay", + "yon ge", + "shak ti", + "g loom", + "bat t", + "son om", + "n ery", + "el ba", + "blan ks", + "hel le", + "triple ts", + "bom bay", + "ak arta", + "ab ia", + "transm itted", + "rol f", + "ja is", + "angular js", + "fi erc", + "m ss", + "trac e", + "ॠĩ", + "tom bs", + "old man", + "kom bucha", + "fo l", + "e health", + "cere als", + "are lli", + "in ari", + "ðŁĴ ©", + "wo l", + "liber ties", + "fa wn", + "af firm", + "nun avut", + "hyster ical", + "k drama", + "art es", + "âĢ¢âĢ¢âĢ¢âĢ¢ âĢ¢âĢ¢âĢ¢âĢ¢", + "valent in", + "man slaughter", + "gal es", + "eo in", + "energi zed", + "del s", + "with draws", + "st les", + "sar castic", + "ram esh", + "incredi bles", + "lock hart", + "ya wn", + "ultimatefan live", + "oooooooo oooooooo", + "mu en", + "guru dev", + "te er", + "pe eling", + "new snow", + "lingui stics", + "direc tv", + "ag end", + "uni lever", + "ru ger", + "han dedly", + "ero se", + "li mel", + "the c", + "royal ties", + "fini shers", + "nr g", + "m gt", + "fid get", + "com ps", + "bac on", + "aggre ssively", + "ab it", + "ch â", + "tar de", + "slu gger", + "q anda", + "gre ening", + "d ats", + "ensla ved", + "spec tor", + "o ye", + "fre ef", + "b hand", + "stop brexit", + "mis conceptions", + "cav a", + "ðŁĺįðŁĺįðŁĺįðŁĺį ðŁĺįðŁĺįðŁĺįðŁĺį", + "multit asking", + "hou sel", + "ferre ira", + "cen time", + "ank les", + "jo dh", + "hel ly", + "fro me", + "out tuesday", + "nar nia", + "bal aji", + "l bloggers", + "jyo ti", + "ðŁį ĩ", + "lan cia", + "cap ri", + "y ap", + "nat ash", + "down fall", + ".\" âĢĶ", + "à ®", + "ligam ent", + "coat ings", + "ai ded", + "hi ko", + "fall ing", + "encryp ted", + "yeg food", + "infringe ment", + "cu di", + "ce p", + "ðŁĺį ðŁĺĤ", + "tra d", + "super rugby", + "ed win", + "wh iche", + "vi meo", + "lay ne", + "in vigor", + "he he", + "dubrov nik", + "bie ber", + "u tr", + "sham an", + "op ers", + "ham ill", + "en ig", + "di f", + "ar um", + "scrap book", + "min h", + "diver gence", + "mckin non", + "life time", + "guter res", + "wil le", + "ple as", + "patt y", + "mic ron", + "k z", + "dom aine", + "ru sher", + "m ds", + "ches ney", + "screw driver", + "âģ© ,", + "sle dge", + "hau er", + "chan a", + "stam ina", + "sprink ler", + "pl n", + "he ff", + "bol ton", + "om on", + "car rington", + "accor dion", + "jor ge", + "inter ception", + "in puts", + "gu ll", + "tran scription", + "vanu atu", + "it ical", + "eth os", + "tic h", + "spac ey", + "pee king", + "u mi", + "ha ger", + "psycho tic", + "illi an", + "illi a", + "bonnar oo", + "an ese", + "pu c", + "laghate parth", + "en hall", + "econom ical", + "dre dge", + "% -", + "u we", + "tu bular", + "scoun cil", + "pe asants", + "fl er", + "tumb ler", + "he p", + "ford ham", + "row ley", + "initi als", + "ev asion", + "er nation", + "plu gins", + "coch ran", + "c attle", + "acid ity", + "ðŁİĬ ðŁİī", + "re grann", + "jump man", + "ef ace", + "x ma", + "patri archy", + "esco bar", + "cristi an", + "tip ton", + "nu eva", + "hack ney", + "back seat", + "kill arney", + "aid an", + "sta dion", + "simul taneous", + "ida ho", + "a je", + "u th", + "figu re", + "clo s", + "bur k", + "volun tar", + "rec ite", + "macfar lane", + "cur few", + "bou do", + "w gn", + "sti x", + "sla p", + "scrat ched", + "philli p", + "jour ne", + "ex pelled", + "wa z", + "u ke", + "tati ana", + "ou e", + "ho pp", + "dimit ri", + "ðŁĵ £", + "mato logist", + "electri fying", + "blu ffs", + "bill smafia", + "az cardinals", + "y aa", + "x mas", + "shar a", + "r ith", + "g ills", + "dre s", + "bar ton", + "authori zation", + "imperi alism", + "home of", + "to do", + "foot path", + "band width", + "visit spain", + "moh sin", + "erup ted", + "mi ki", + "insig nia", + "mike l", + "ss h", + "ger a", + "bank holiday", + "aw an", + "t weak", + "star craft", + "e al", + "construc tion", + "skelet ons", + "le ep", + "ine m", + "bar clay", + "ship wreck", + "monsi eur", + "yo h", + "ron t", + "form ative", + "ser o", + "le p", + "horse man", + "hoo sier", + "haz mat", + "cylin ders", + "cen ti", + "ðŁĴ¥ðŁĴ¥ ðŁĴ¥", + "re em", + "na ire", + "mus ically", + "gras shopper", + "est onian", + "termin ology", + "ro main", + "blogger rt", + "tox in", + "stan ce", + "cultiv ated", + "an ast", + "ðŁIJ į", + "shi mano", + "go pher", + "ene i", + "recycla ble", + "gam ification", + "fight for", + "c q", + "avoc ados", + "ke ys", + "eli ke", + "gly cer", + "shak ur", + "mobili zation", + "gal ley", + "expla in", + "ex changed", + "pe th", + "obe dience", + "illa ge", + "en nis", + "ãĥ ŀ", + "wi v", + "walla bies", + "ma ar", + "ig ers", + "fin tech", + "fin alized", + "wo j", + "meaning less", + "in field", + "onna ise", + "e et", + "bron te", + "pass ages", + "ðŁij §", + "strick land", + "northern lights", + "lom ond", + "h tc", + "wr ay", + "shi fter", + "di alog", + "ðŁį į", + ">> >>>>", + "te atime", + "ste ch", + "sic huan", + "qu ill", + "fran ca", + "comple mentary", + "bar rington", + "marcu s", + "mal am", + "goo oo", + "for sa", + "elec tra", + "af s", + "âĹ Ĩ", + "tri fe", + "sn azzy", + "fo lia", + "and olan", + "after dark", + "wood son", + "stra de", + "litt lest", + "o gun", + "con wy", + "co wards", + "ðŁĺĤðŁĺĤðŁĺĤðŁĺĤ ðŁĺĤðŁĺĤðŁĺĤ", + "íĬ ¸", + "se ul", + "mur phy", + "dun ks", + "kapil shar", + "jo achim", + "wom ack", + "equal ity", + "aver ages", + "a ine", + "ðŁ¦ Ī", + "tac ular", + "dis ability", + "u ked", + "mid century", + "bar thol", + "teas ers", + "tab ern", + "nj caa", + "sp out", + "op i", + "ku bball", + "bl om", + "so ar", + "popu lism", + "meth yl", + "ðŁijĬ ðŁı¼", + "o spre", + "alo ils", + "ðŁĵ ĸ", + "ðŁĮ ļ", + "x er", + "sp illing", + "publ ica", + "car dam", + "adi sh", + "sa cha", + "p kg", + "bu da", + "lyric ist", + "i bc", + "gru mp", + "ho ver", + "hal ep", + "anti body", + "anem one", + "âĻ¥âĻ¥ âĻ¥âĻ¥", + "m cl", + "litho graph", + "cc u", + "s fest", + "path ic", + "calli ster", + "otta wa", + "gun sn", + "rut ger", + "hali but", + "en vision", + "differenti ate", + "ðŁļĢ ðŁļĢ", + "pir an", + "lat el", + "uc n", + "trou bad", + "ra ine", + "fierc ely", + "learn english", + "lea se", + "wex mondays", + "em it", + "dray ton", + "bur rell", + "scuba diving", + "hol ler", + "dr u", + "clo cked", + "w ral", + "ap ro", + "trans lucent", + "w bo", + "patri arch", + "mo ja", + "lan nister", + "fish ery", + "ne derland", + "mil dly", + "mi rai", + "ma ko", + "ja p", + "ðŁĺ©ðŁĺ© ðŁĺ©", + "pro statec", + "p anna", + "ar ama", + "under taking", + "tomp kins", + "ne op", + "soli ds", + "sav oury", + "e ames", + "cut lery", + "wood bridge", + "steam er", + "ri zzo", + "wild cat", + "rat na", + "lamin ated", + "kin eni", + "jal ap", + "ai des", + "acknowle dges", + "?! ?!?!", + "! ðŁİī", + "w afc", + "mag gio", + "ha ves", + "dar je", + "of i", + "gr il", + "v asi", + "bru x", + "mo hd", + "fake speare", + "arn old", + "r mb", + "for be", + "wal leye", + "ro di", + "therapeu tics", + "strate gi", + "ob ste", + "mu dder", + "download able", + "dd ings", + "d ca", + "asi angames", + "campe on", + "appropri ation", + "th century", + "ram atta", + "dra ped", + "bul lion", + "mu c", + "one x", + "se greg", + "ophel ia", + "bod ily", + "âĿ¤ ðŁĺį", + "wi zar", + "te ased", + "ade my", + "to id", + "sur a", + "lazar us", + "sn ickers", + "ma se", + "lo h", + "bow ed", + "bibli o", + "x change", + "har lan", + "gho shal", + "flavor ful", + "bha gat", + "alle z", + "whiche ver", + "ten stein", + "disc er", + "organ iser", + "mt g", + "dream liner", + "t se", + "hok kaido", + "mo k", + "indulg ent", + "hick man", + "blin ded", + "al yn", + "aaa ah", + "sp ool", + "lough borough", + "inter pret", + "et v", + "aristo tle", + "optimi zing", + "avici i", + "madu rai", + "ju li", + "naw az", + "mat chups", + "ab ide", + "paint ing", + "w elling", + "vel i", + "octag on", + "in scribed", + "po king", + "plac er", + "life cycle", + "kili g", + "g sp", + "eli ves", + "cle ments", + "na sheed", + "me sut", + "incarcer ated", + "dist illed", + "wal ang", + "delic acy", + "del gado", + "che z", + "ch ita", + "ad ero", + "tu x", + "pati l", + "o do", + "abh cosmetics", + "tv c", + "p bc", + "in accurate", + "hardwork paysoff", + "ball er", + "quot ation", + "merchandi sing", + "ga stri", + "defen ses", + "dro gba", + "bex hill", + "ban kno", + "win ona", + "si eg", + "p gs", + "hahah ha", + "agu chi", + "su bram", + "mirac le", + "de sch", + "li bre", + "ba cher", + "ent ine", + "bbcra di", + "lou dest", + "r ps", + "pi erc", + "fr yer", + "storm trooper", + "rafael nadal", + "pas co", + "exhau stion", + "epic onetsy", + "rc tid", + "kel lie", + "ga ines", + "d bz", + "sm riti", + "s bridge", + "lim ited", + "cla w", + "technic al", + "bio graphical", + "ado red", + "ภ°", + "exclu de", + "ac adia", + "key boards", + "fur man", + "so ca", + "sur u", + "ni ps", + "sw aps", + "server less", + "run e", + "pu ffy", + "north ampton", + "nish ings", + "hen der", + "cartri dges", + "gun shot", + "ðŁĵ ¹", + "fil ament", + "respon dents", + "pey ton", + "mountaine er", + "mer ging", + "life span", + "intimid ation", + "p afc", + "nl wx", + "expan sive", + "pur r", + "f ck", + "ca e", + "at ti", + "tele thon", + "so hn", + "mend el", + "lo pes", + "dor i", + "un broken", + "te red", + "tast ings", + "in active", + "disin tegr", + "t assel", + "share the", + "pi ano", + "is lay", + "air space", + "z awa", + "ricci ardo", + "ming ton", + "fresh er", + "cur ry", + "re vs", + "pharo ah", + "h mv", + "exhilar ating", + "wh oo", + "lin kin", + "kri spy", + "competen cy", + "ste wards", + "ne bu", + "kat su", + "ad mins", + "baz ar", + "as ar", + "giving back", + "s summit", + "song z", + "lin us", + "raj kumar", + "farm ington", + "fanta sia", + "ðŁĺ´ ðŁĺ´", + "so bri", + "lis se", + "barry more", + "pri sm", + "blo b", + "sen ew", + "mono xide", + "exp ire", + "eigh teen", + "di pper", + "xi ao", + "kil t", + "hin ch", + "bbc sport", + "bam boo", + "p ter", + "ex al", + "ðŁ¦ ĭ", + "ham lin", + "expe ditions", + "star gazing", + "food security", + "wy lie", + "ul f", + "st ingly", + "on storm", + "lo eb", + "bro ome", + "bn ha", + "pancre atic", + "eli ve", + "!!!!!!!! !!!", + "ther apper", + "ortho pedic", + "avengers endgame", + "antit rust", + "ìļ °", + "go te", + "om d", + "off side", + "gy llen", + "win eries", + "white water", + "ad l", + "lu pita", + "exce eds", + "consi sted", + "chew bacca", + "ash leigh", + "nhl jets", + "is san", + "sh ld", + "hay at", + "cran berries", + "ð٤ĺ ðŁı½", + "rock the", + "spring training", + "fall out", + "dairy free", + "wa j", + "un decided", + "so wn", + "rc n", + "north wales", + "htt r", + "fu mble", + "d its", + "comp elled", + "popu list", + "min ted", + "blan chett", + ". ''", + "pro pulsion", + "m illa", + "au berg", + "her tz", + "h ta", + "u daipur", + "serendip ity", + "azte cs", + "als ace", + "ðŁIJ ij", + "lu n", + "sho es", + "char li", + "gar za", + "ðŁĴ Ł", + "pro biotics", + "fox tv", + "ol is", + "mi ff", + "loc alized", + "diffu ser", + "si gue", + "fun ko", + "rend ous", + "ðŁĴ ij", + "jeky ll" + ] + } +} \ No newline at end of file diff --git a/ComfyUI-SUPIR/configs/tokenizer/tokenizer_config.json b/ComfyUI-SUPIR/configs/tokenizer/tokenizer_config.json new file mode 100644 index 0000000000000000000000000000000000000000..702bb12920b291cade3706cf215c1604d2255d93 --- /dev/null +++ b/ComfyUI-SUPIR/configs/tokenizer/tokenizer_config.json @@ -0,0 +1,34 @@ +{ + "unk_token": { + "content": "<|endoftext|>", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": true, + "__type": "AddedToken" + }, + "bos_token": { + "content": "<|startoftext|>", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": true, + "__type": "AddedToken" + }, + "eos_token": { + "content": "<|endoftext|>", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": true, + "__type": "AddedToken" + }, + "pad_token": "<|endoftext|>", + "add_prefix_space": false, + "errors": "replace", + "do_lower_case": true, + "name_or_path": "openai/clip-vit-base-patch32", + "model_max_length": 77, + "special_tokens_map_file": "./special_tokens_map.json", + "tokenizer_class": "CLIPTokenizer" +} diff --git a/ComfyUI-SUPIR/configs/tokenizer/vocab.json b/ComfyUI-SUPIR/configs/tokenizer/vocab.json new file mode 100644 index 0000000000000000000000000000000000000000..4297ea6a8d2bae1fea8f48b45e257814dcb11f69 --- /dev/null +++ b/ComfyUI-SUPIR/configs/tokenizer/vocab.json @@ -0,0 +1 @@ +{"!": 0, "\"": 1, "#": 2, "$": 3, "%": 4, "&": 5, "'": 6, "(": 7, ")": 8, "*": 9, "+": 10, ",": 11, "-": 12, ".": 13, "/": 14, "0": 15, "1": 16, "2": 17, "3": 18, "4": 19, "5": 20, "6": 21, "7": 22, "8": 23, "9": 24, ":": 25, ";": 26, "<": 27, "=": 28, ">": 29, "?": 30, "@": 31, "A": 32, "B": 33, "C": 34, "D": 35, "E": 36, "F": 37, "G": 38, "H": 39, "I": 40, "J": 41, "K": 42, "L": 43, "M": 44, "N": 45, "O": 46, "P": 47, "Q": 48, "R": 49, "S": 50, "T": 51, "U": 52, "V": 53, "W": 54, "X": 55, "Y": 56, "Z": 57, "[": 58, "\\": 59, "]": 60, "^": 61, "_": 62, "`": 63, "a": 64, "b": 65, "c": 66, "d": 67, "e": 68, "f": 69, "g": 70, "h": 71, "i": 72, "j": 73, "k": 74, "l": 75, "m": 76, "n": 77, "o": 78, "p": 79, "q": 80, "r": 81, "s": 82, "t": 83, "u": 84, "v": 85, "w": 86, "x": 87, "y": 88, "z": 89, "{": 90, "|": 91, "}": 92, "~": 93, "¡": 94, "¢": 95, "£": 96, "¤": 97, "¥": 98, "¦": 99, "§": 100, "¨": 101, "©": 102, "ª": 103, "«": 104, "¬": 105, "®": 106, "¯": 107, "°": 108, "±": 109, "²": 110, "³": 111, "´": 112, "µ": 113, "¶": 114, "·": 115, "¸": 116, "¹": 117, "º": 118, "»": 119, "¼": 120, "½": 121, "¾": 122, "¿": 123, "À": 124, "Á": 125, "Â": 126, "Ã": 127, "Ä": 128, "Å": 129, "Æ": 130, "Ç": 131, "È": 132, "É": 133, "Ê": 134, "Ë": 135, "Ì": 136, "Í": 137, "Î": 138, "Ï": 139, "Ð": 140, "Ñ": 141, "Ò": 142, "Ó": 143, "Ô": 144, "Õ": 145, "Ö": 146, "×": 147, "Ø": 148, "Ù": 149, "Ú": 150, "Û": 151, "Ü": 152, "Ý": 153, "Þ": 154, "ß": 155, "à": 156, "á": 157, "â": 158, "ã": 159, "ä": 160, "å": 161, "æ": 162, "ç": 163, "è": 164, "é": 165, "ê": 166, "ë": 167, "ì": 168, "í": 169, "î": 170, "ï": 171, "ð": 172, "ñ": 173, "ò": 174, "ó": 175, "ô": 176, "õ": 177, "ö": 178, "÷": 179, "ø": 180, "ù": 181, "ú": 182, "û": 183, "ü": 184, "ý": 185, "þ": 186, "ÿ": 187, "Ā": 188, "ā": 189, "Ă": 190, "ă": 191, "Ą": 192, "ą": 193, "Ć": 194, "ć": 195, "Ĉ": 196, "ĉ": 197, "Ċ": 198, "ċ": 199, "Č": 200, "č": 201, "Ď": 202, "ď": 203, "Đ": 204, "đ": 205, "Ē": 206, "ē": 207, "Ĕ": 208, "ĕ": 209, "Ė": 210, "ė": 211, "Ę": 212, "ę": 213, "Ě": 214, "ě": 215, "Ĝ": 216, "ĝ": 217, "Ğ": 218, "ğ": 219, "Ġ": 220, "ġ": 221, "Ģ": 222, "ģ": 223, "Ĥ": 224, "ĥ": 225, "Ħ": 226, "ħ": 227, "Ĩ": 228, "ĩ": 229, "Ī": 230, "ī": 231, "Ĭ": 232, "ĭ": 233, "Į": 234, "į": 235, "İ": 236, "ı": 237, "IJ": 238, "ij": 239, "Ĵ": 240, "ĵ": 241, "Ķ": 242, "ķ": 243, "ĸ": 244, "Ĺ": 245, "ĺ": 246, "Ļ": 247, "ļ": 248, "Ľ": 249, "ľ": 250, "Ŀ": 251, "ŀ": 252, "Ł": 253, "ł": 254, "Ń": 255, "!": 256, "\"": 257, "#": 258, "$": 259, "%": 260, "&": 261, "'": 262, "(": 263, ")": 264, "*": 265, "+": 266, ",": 267, "-": 268, ".": 269, "/": 270, "0": 271, "1": 272, "2": 273, "3": 274, "4": 275, "5": 276, "6": 277, "7": 278, "8": 279, "9": 280, ":": 281, ";": 282, "<": 283, "=": 284, ">": 285, "?": 286, "@": 287, "A": 288, "B": 289, "C": 290, "D": 291, "E": 292, "F": 293, "G": 294, "H": 295, "I": 296, "J": 297, "K": 298, "L": 299, "M": 300, "N": 301, "O": 302, "P": 303, "Q": 304, "R": 305, "S": 306, "T": 307, "U": 308, "V": 309, "W": 310, "X": 311, "Y": 312, "Z": 313, "[": 314, "\\": 315, "]": 316, "^": 317, "_": 318, "`": 319, "a": 320, "b": 321, "c": 322, "d": 323, "e": 324, "f": 325, "g": 326, "h": 327, "i": 328, "j": 329, "k": 330, "l": 331, "m": 332, "n": 333, "o": 334, "p": 335, "q": 336, "r": 337, "s": 338, "t": 339, "u": 340, "v": 341, "w": 342, "x": 343, "y": 344, "z": 345, "{": 346, "|": 347, "}": 348, "~": 349, "¡": 350, "¢": 351, "£": 352, "¤": 353, "¥": 354, "¦": 355, "§": 356, "¨": 357, "©": 358, "ª": 359, "«": 360, "¬": 361, "®": 362, "¯": 363, "°": 364, "±": 365, "²": 366, "³": 367, "´": 368, "µ": 369, "¶": 370, "·": 371, "¸": 372, "¹": 373, "º": 374, "»": 375, "¼": 376, "½": 377, "¾": 378, "¿": 379, "À": 380, "Á": 381, "Â": 382, "Ã": 383, "Ä": 384, "Å": 385, "Æ": 386, "Ç": 387, "È": 388, "É": 389, "Ê": 390, "Ë": 391, "Ì": 392, "Í": 393, "Î": 394, "Ï": 395, "Ð": 396, "Ñ": 397, "Ò": 398, "Ó": 399, "Ô": 400, "Õ": 401, "Ö": 402, "×": 403, "Ø": 404, "Ù": 405, "Ú": 406, "Û": 407, "Ü": 408, "Ý": 409, "Þ": 410, "ß": 411, "à": 412, "á": 413, "â": 414, "ã": 415, "ä": 416, "å": 417, "æ": 418, "ç": 419, "è": 420, "é": 421, "ê": 422, "ë": 423, "ì": 424, "í": 425, "î": 426, "ï": 427, "ð": 428, "ñ": 429, "ò": 430, "ó": 431, "ô": 432, "õ": 433, "ö": 434, "÷": 435, "ø": 436, "ù": 437, "ú": 438, "û": 439, "ü": 440, "ý": 441, "þ": 442, "ÿ": 443, "Ā": 444, "ā": 445, "Ă": 446, "ă": 447, "Ą": 448, "ą": 449, "Ć": 450, "ć": 451, "Ĉ": 452, "ĉ": 453, "Ċ": 454, "ċ": 455, "Č": 456, "č": 457, "Ď": 458, "ď": 459, "Đ": 460, "đ": 461, "Ē": 462, "ē": 463, "Ĕ": 464, "ĕ": 465, "Ė": 466, "ė": 467, "Ę": 468, "ę": 469, "Ě": 470, "ě": 471, "Ĝ": 472, "ĝ": 473, "Ğ": 474, "ğ": 475, "Ġ": 476, "ġ": 477, "Ģ": 478, "ģ": 479, "Ĥ": 480, "ĥ": 481, "Ħ": 482, "ħ": 483, "Ĩ": 484, "ĩ": 485, "Ī": 486, "ī": 487, "Ĭ": 488, "ĭ": 489, "Į": 490, "į": 491, "İ": 492, "ı": 493, "IJ": 494, "ij": 495, "Ĵ": 496, "ĵ": 497, "Ķ": 498, "ķ": 499, "ĸ": 500, "Ĺ": 501, "ĺ": 502, "Ļ": 503, "ļ": 504, "Ľ": 505, "ľ": 506, "Ŀ": 507, "ŀ": 508, "Ł": 509, "ł": 510, "Ń": 511, "in": 512, "th": 513, "an": 514, "re": 515, "ar": 516, "er": 517, "the": 518, "ing": 519, "ou": 520, "on": 521, "st": 522, "or": 523, "en": 524, "on": 525, "al": 526, "at": 527, "er": 528, "it": 529, "in": 530, "to": 531, "ro": 532, "is": 533, "le": 534, "ic": 535, "at": 536, "and": 537, "ed": 538, "of": 539, "ch": 540, "or": 541, "es": 542, "il": 543, "el": 544, "st": 545, "ac": 546, "om": 547, "am": 548, "lo": 549, "an": 550, "ay": 551, "sh": 552, "ri": 553, "li": 554, "ti": 555, "for": 556, "ne": 557, "ðŁ": 558, "ra": 559, "ha": 560, "de": 561, "ol": 562, "ve": 563, "si": 564, "ur": 565, "al": 566, "se": 567, "'s": 568, "un": 569, "di": 570, "be": 571, "la": 572, "wh": 573, "oo": 574, "day": 575, "en": 576, "ma": 577, "no": 578, "le": 579, "to": 580, "our": 581, "ir": 582, "gh": 583, "wit": 584, "it": 585, "yo": 586, "as": 587, "sp": 588, "this": 589, "ts": 590, "ati": 591, "you": 592, "with": 593, "ad": 594, "is": 595, "ab": 596, "ly": 597, "we": 598, "the": 599, "te": 600, "as": 601, "ag": 602, "vi": 603, "pp": 604, "su": 605, "ho": 606, "my": 607, "..": 608, "bu": 609, "com": 610, "se": 611, "ers": 612, "me": 613, "me": 614, "all": 615, "con": 616, "mo": 617, "ke": 618, "ge": 619, "out": 620, "ent": 621, "co": 622, "fe": 623, "ver": 624, "ar": 625, "fro": 626, "au": 627, "po": 628, "ce": 629, "ght": 630, "are": 631, "ss": 632, "from": 633, "ch": 634, "tr": 635, "oun": 636, "one": 637, "by": 638, "do": 639, "th": 640, "wor": 641, "ere": 642, "ke": 643, "pro": 644, "for": 645, "ds": 646, "bo": 647, "ta": 648, "we": 649, "go": 650, "he": 651, "ter": 652, "ing": 653, "de": 654, "be": 655, "ation": 656, "mor": 657, "ay": 658, "ex": 659, "ill": 660, "pe": 661, "ks": 662, "sc": 663, "lu": 664, "fu": 665, "qu": 666, "ver": 667, "ðŁĺ": 668, "ju": 669, "mu": 670, "ate": 671, "and": 672, "ve": 673, "king": 674, "mar": 675, "op": 676, "hi": 677, "...": 678, "pre": 679, "ad": 680, "ru": 681, "that": 682, "jo": 683, "of": 684, "ce": 685, "new": 686, "am": 687, "ap": 688, "gre": 689, "ss": 690, "du": 691, "now": 692, "ye": 693, "ting": 694, "your": 695, "ity": 696, "ni": 697, "ci": 698, "par": 699, "gu": 700, "fi": 701, "af": 702, "per": 703, "ter": 704, "up": 705, "so": 706, "gi": 707, "ons": 708, "gr": 709, "ge": 710, "br": 711, "pl": 712, "'t": 713, "mi": 714, "ine": 715, "wee": 716, "bi": 717, "us": 718, "sho": 719, "have": 720, "today": 721, "av": 722, "man": 723, "ent": 724, "ack": 725, "ure": 726, "our": 727, "âĢ": 728, "cu": 729, "ld": 730, "loo": 731, "im": 732, "ice": 733, "som": 734, "fin": 735, "red": 736, "ren": 737, "ood": 738, "was": 739, "tion": 740, "pi": 741, "ir": 742, "ther": 743, "ty": 744, "ph": 745, "ard": 746, "ec": 747, "!!": 748, "mon": 749, "more": 750, "will": 751, "tra": 752, "can": 753, "col": 754, "pu": 755, "te": 756, "wn": 757, "mb": 758, "so": 759, "iti": 760, "just": 761, "ning": 762, "here": 763, "tu": 764, "pa": 765, "pr": 766, "but": 767, "what": 768, "ally": 769, "fir": 770, "min": 771, "ca": 772, "ant": 773, "sa": 774, "ted": 775, "ev": 776, "ment": 777, "fa": 778, "get": 779, "ame": 780, "about": 781, "gra": 782, "not": 783, "happ": 784, "ays": 785, "man": 786, "his": 787, "time": 788, "like": 789, "gh": 790, "has": 791, "than": 792, "love": 793, "art": 794, "ste": 795, "ding": 796, "he": 797, "cre": 798, "ws": 799, "wat": 800, "der": 801, "ite": 802, "ser": 803, "ace": 804, "age": 805, "end": 806, "str": 807, "aw": 808, "stor": 809, "re": 810, "car": 811, "ell": 812, "all": 813, "ps": 814, "fri": 815, "pho": 816, "por": 817, "do": 818, "ak": 819, "wi": 820, "fre": 821, "who": 822, "shi": 823, "boo": 824, "son": 825, "ell": 826, "when": 827, "ill": 828, "how": 829, "great": 830, "win": 831, "el": 832, "bl": 833, "ssi": 834, "ali": 835, "some": 836, "ðŁĴ": 837, "ton": 838, "der": 839, "les": 840, "pla": 841, "ï¸": 842, "ed": 843, "sch": 844, "hu": 845, "ong": 846, "don": 847, "ki": 848, "sh": 849, "ann": 850, "cor": 851, "..": 852, "ound": 853, "az": 854, "ine": 855, "ary": 856, "ful": 857, "stu": 858, "ould": 859, "sti": 860, "go": 861, "see": 862, "able": 863, "ars": 864, "ll": 865, "mis": 866, "ber": 867, "ck": 868, "wa": 869, "ents": 870, "no": 871, "sig": 872, "fe": 873, "first": 874, "et": 875, "spe": 876, "ack": 877, "if": 878, "ous": 879, "'m": 880, "ster": 881, "app": 882, "ang": 883, "ance": 884, "ans": 885, "good": 886, "bre": 887, "ever": 888, "they": 889, "tic": 890, "come": 891, "off": 892, "back": 893, "ase": 894, "ings": 895, "old": 896, "ight": 897, "fo": 898, "her": 899, "happy": 900, "pic": 901, "its": 902, "ving": 903, "us": 904, "mat": 905, "hom": 906, "dy": 907, "em": 908, "sk": 909, "ying": 910, "their": 911, "led": 912, "ry": 913, "ul": 914, "har": 915, "ck": 916, "ton": 917, "onal": 918, "hel": 919, "ric": 920, "bir": 921, "vie": 922, "way": 923, "tri": 924, "da": 925, "ple": 926, "bro": 927, "sto": 928, "ool": 929, "night": 930, "tru": 931, "ba": 932, "read": 933, "res": 934, "year": 935, "fr": 936, "tor": 937, "als": 938, "coun": 939, "cla": 940, "ture": 941, "vel": 942, "ated": 943, "lec": 944, "end": 945, "thing": 946, "vo": 947, "ici": 948, "best": 949, "can": 950, "work": 951, "last": 952, "after": 953, "ence": 954, "pri": 955, "pe": 956, "es": 957, "il": 958, "â̦": 959, "dre": 960, "ys": 961, "over": 962, "ies": 963, "ðŁij": 964, "comm": 965, "tw": 966, "ink": 967, "sun": 968, "cl": 969, "life": 970, "tt": 971, "ach": 972, "land": 973, "sy": 974, "tre": 975, "tal": 976, "pol": 977, "sm": 978, "duc": 979, "sal": 980, "ft": 981, "'re": 982, "che": 983, "war": 984, "tur": 985, "ations": 986, "ach": 987, "ms": 988, "ile": 989, "pm": 990, "ough": 991, "ate": 992, "star": 993, "week": 994, "!!!": 995, "clu": 996, "there": 997, "ner": 998, "tom": 999, "sel": 1000, "ï¸ı": 1001, "world": 1002, "ves": 1003, "cam": 1004, "got": 1005, "inter": 1006, "off": 1007, "um": 1008, "tonight": 1009, "other": 1010, "hou": 1011, "look": 1012, "je": 1013, "id": 1014, "sion": 1015, "beau": 1016, "att": 1017, "eli": 1018, "ort": 1019, "rec": 1020, "ff": 1021, "ster": 1022, "supp": 1023, "gen": 1024, "been": 1025, "ily": 1026, "team": 1027, "mm": 1028, "ic": 1029, "peop": 1030, "itt": 1031, "ats": 1032, "only": 1033, "mber": 1034, "eng": 1035, "bri": 1036, "mp": 1037, "know": 1038, "bur": 1039, "bar": 1040, "ins": 1041, "low": 1042, "she": 1043, "row": 1044, "âĿ": 1045, "tro": 1046, "people": 1047, "via": 1048, "low": 1049, "aga": 1050, "bet": 1051, "xt": 1052, "fac": 1053, "char": 1054, "ear": 1055, "wal": 1056, "sen": 1057, "fam": 1058, "ble": 1059, "nati": 1060, "ish": 1061, "nor": 1062, "game": 1063, "live": 1064, "sco": 1065, "ley": 1066, "don": 1067, "ick": 1068, "ball": 1069, "very": 1070, "these": 1071, "pan": 1072, "ia": 1073, "ating": 1074, "cr": 1075, "are": 1076, "gir": 1077, "make": 1078, "stre": 1079, "show": 1080, ".\"": 1081, "fl": 1082, "up": 1083, "dr": 1084, "thanks": 1085, "illi": 1086, "wom": 1087, "sts": 1088, "ig": 1089, "sur": 1090, "every": 1091, "cur": 1092, "view": 1093, "let": 1094, "into": 1095, "most": 1096, "na": 1097, "indi": 1098, "gar": 1099, "had": 1100, "sou": 1101, "ved": 1102, "ant": 1103, "ition": 1104, "made": 1105, "fol": 1106, "uni": 1107, "ited": 1108, "ðŁı": 1109, "ical": 1110, "thr": 1111, "ready": 1112, "chec": 1113, "dra": 1114, "kes": 1115, "book": 1116, "ep": 1117, "sic": 1118, "morning": 1119, "news": 1120, "cau": 1121, "ct": 1122, "well": 1123, "anc": 1124, "photo": 1125, "than": 1126, "ors": 1127, "birth": 1128, "gg": 1129, "out": 1130, "next": 1131, "some": 1132, "ening": 1133, "story": 1134, "chri": 1135, "down": 1136, "home": 1137, "ffe": 1138, "free": 1139, "da": 1140, "bor": 1141, "fil": 1142, "cial": 1143, "thank": 1144, "side": 1145, "lear": 1146, "que": 1147, "line": 1148, "ten": 1149, "ates": 1150, "years": 1151, "my": 1152, "photo": 1153, "beauti": 1154, "right": 1155, "nu": 1156, "form": 1157, "ship": 1158, "ban": 1159, "ther": 1160, "days": 1161, "gam": 1162, "ason": 1163, "gy": 1164, "ðŁİ": 1165, "birthday": 1166, "set": 1167, "ick": 1168, "et": 1169, "still": 1170, "coming": 1171, "take": 1172, "ðŁĩ": 1173, "bb": 1174, "sol": 1175, "son": 1176, "den": 1177, "ep": 1178, "music": 1179, "them": 1180, "den": 1181, "why": 1182, "foo": 1183, "cra": 1184, "amaz": 1185, "wn": 1186, "hol": 1187, "tting": 1188, "wr": 1189, "ue": 1190, "mag": 1191, "cro": 1192, "lan": 1193, "clo": 1194, "bra": 1195, "ak": 1196, "sing": 1197, "cal": 1198, "read": 1199, "'ve": 1200, "joh": 1201, "bab": 1202, "dri": 1203, "blo": 1204, "big": 1205, "eric": 1206, "int": 1207, "tor": 1208, "try": 1209, "la": 1210, "leg": 1211, "house": 1212, "mic": 1213, "val": 1214, "beautiful": 1215, "litt": 1216, "check": 1217, "new": 1218, "vers": 1219, "sw": 1220, "ari": 1221, "play": 1222, "her": 1223, "âĢĵ": 1224, "win": 1225, "ma": 1226, "congr": 1227, "school": 1228, "fun": 1229, ".@": 1230, "heal": 1231, "ich": 1232, "del": 1233, "where": 1234, "lon": 1235, "ket": 1236, "two": 1237, "much": 1238, "watch": 1239, "ven": 1240, "ded": 1241, "ast": 1242, "ked": 1243, "bas": 1244, "going": 1245, "mp": 1246, "ever": 1247, "ways": 1248, "roo": 1249, "desig": 1250, "ly": 1251, "sed": 1252, "top": 1253, "lin": 1254, "chan": 1255, "too": 1256, "iting": 1257, "dent": 1258, "ghts": 1259, "ty": 1260, "spo": 1261, "need": 1262, "blu": 1263, "inst": 1264, "being": 1265, "âĿ¤": 1266, "wel": 1267, "ls": 1268, "him": 1269, "may": 1270, "sting": 1271, "na": 1272, "ely": 1273, "little": 1274, "ga": 1275, "nat": 1276, "tomor": 1277, "mc": 1278, "hon": 1279, "want": 1280, "air": 1281, "pic": 1282, "americ": 1283, "per": 1284, "less": 1285, "week": 1286, "vel": 1287, "ah": 1288, "cap": 1289, "cham": 1290, "ger": 1291, "tim": 1292, "tomorrow": 1293, "ness": 1294, "state": 1295, "hal": 1296, "serv": 1297, "ze": 1298, "os": 1299, "pat": 1300, "vis": 1301, "exc": 1302, "sin": 1303, "ff": 1304, "city": 1305, "cen": 1306, "any": 1307, "bel": 1308, "summ": 1309, "tin": 1310, "would": 1311, "looking": 1312, "ko": 1313, "cele": 1314, "family": 1315, "mer": 1316, "pow": 1317, "help": 1318, "bus": 1319, "co": 1320, "cle": 1321, "self": 1322, "ens": 1323, "ics": 1324, "tho": 1325, "ani": 1326, "cho": 1327, "lead": 1328, "bs": 1329, "twee": 1330, "think": 1331, "fore": 1332, "chil": 1333, "vide": 1334, "did": 1335, "ale": 1336, "chi": 1337, "vil": 1338, "ends": 1339, "wing": 1340, "pas": 1341, "'ll": 1342, "vol": 1343, "sa": 1344, "gs": 1345, "many": 1346, "jec": 1347, "before": 1348, "graph": 1349, "ny": 1350, "uring": 1351, "wil": 1352, "dd": 1353, "buil": 1354, "fav": 1355, "sted": 1356, "tran": 1357, "ling": 1358, "oud": 1359, "dge": 1360, "fiel": 1361, "national": 1362, "sta": 1363, "cer": 1364, "were": 1365, "ina": 1366, "season": 1367, "cou": 1368, "ned": 1369, "amazing": 1370, "tions": 1371, "celebr": 1372, "ns": 1373, "ath": 1374, "head": 1375, "sday": 1376, "dar": 1377, "loc": 1378, "vin": 1379, "another": 1380, "goo": 1381, "sat": 1382, "ny": 1383, "join": 1384, "pres": 1385, "ses": 1386, "sing": 1387, "ana": 1388, "ining": 1389, "....": 1390, "cour": 1391, "ï¸ı": 1392, "act": 1393, "cause": 1394, "light": 1395, "ams": 1396, "ta": 1397, "bal": 1398, "fc": 1399, "high": 1400, "offici": 1401, "tt": 1402, "christ": 1403, "dic": 1404, "day": 1405, "ral": 1406, "hor": 1407, ":)": 1408, "visi": 1409, "nam": 1410, "ob": 1411, "mas": 1412, "ght": 1413, "really": 1414, "tun": 1415, "find": 1416, "through": 1417, "port": 1418, "ut": 1419, "tive": 1420, "sty": 1421, "ne": 1422, "ore": 1423, "ðŁĺĤ": 1424, "support": 1425, "never": 1426, "even": 1427, "ðŁĶ": 1428, "ha": 1429, "ya": 1430, "ld": 1431, "uk": 1432, "ran": 1433, "jam": 1434, "with": 1435, "medi": 1436, "des": 1437, "ney": 1438, "ching": 1439, "ale": 1440, "hy": 1441, "kin": 1442, "!!": 1443, "dy": 1444, "place": 1445, "also": 1446, "ble": 1447, "which": 1448, "black": 1449, "bli": 1450, "say": 1451, "park": 1452, "play": 1453, "ire": 1454, "video": 1455, "weekend": 1456, "ail": 1457, "key": 1458, "pt": 1459, "ward": 1460, "friday": 1461, "din": 1462, "iness": 1463, "gro": 1464, "ben": 1465, "always": 1466, "tball": 1467, "ago": 1468, "mil": 1469, "cy": 1470, "produc": 1471, "disc": 1472, "under": 1473, "please": 1474, "spor": 1475, "full": 1476, "ey": 1477, "ðŁĻ": 1478, "ise": 1479, "ities": 1480, "cat": 1481, "kno": 1482, "use": 1483, "fore": 1484, "ker": 1485, "art": 1486, "high": 1487, "open": 1488, "san": 1489, "ef": 1490, "ours": 1491, "shed": 1492, "stri": 1493, "dro": 1494, "again": 1495, "im": 1496, "ðŁĵ": 1497, "enjo": 1498, "fun": 1499, "getting": 1500, "pen": 1501, "ger": 1502, "cli": 1503, "any": 1504, "every": 1505, "eu": 1506, "women": 1507, "âľ": 1508, "est": 1509, "could": 1510, "ry": 1511, "\"@": 1512, "thou": 1513, "sha": 1514, "commun": 1515, "ber": 1516, "dents": 1517, "dis": 1518, "while": 1519, "away": 1520, "dio": 1521, "ham": 1522, "gla": 1523, "date": 1524, "ka": 1525, "miss": 1526, "unch": 1527, "won": 1528, "inf": 1529, "room": 1530, "ga": 1531, "real": 1532, "exper": 1533, "direc": 1534, "should": 1535, "spr": 1536, "gol": 1537, "long": 1538, "better": 1539, "ori": 1540, "ey": 1541, "ience": 1542, "ils": 1543, "zz": 1544, "han": 1545, "found": 1546, "vs": 1547, "âĻ": 1548, "post": 1549, "tic": 1550, "part": 1551, "men": 1552, "rence": 1553, "cess": 1554, "vic": 1555, "sil": 1556, "shop": 1557, "ðŁĺĤ": 1558, "food": 1559, "val": 1560, "stic": 1561, "you": 1562, "says": 1563, "elec": 1564, "star": 1565, "oc": 1566, "land": 1567, "id": 1568, "ction": 1569, "field": 1570, "sof": 1571, "start": 1572, "water": 1573, "friends": 1574, "ones": 1575, "ðŁĮ": 1576, "fla": 1577, "far": 1578, "white": 1579, "party": 1580, "inst": 1581, "grou": 1582, "tv": 1583, "everyone": 1584, "ment": 1585, "ja": 1586, "cha": 1587, "prin": 1588, "ants": 1589, "during": 1590, "lat": 1591, "lar": 1592, "west": 1593, "then": 1594, "ka": 1595, "youn": 1596, "insp": 1597, "inte": 1598, "ween": 1599, "visit": 1600, "against": 1601, "rele": 1602, "head": 1603, "ces": 1604, "town": 1605, "looks": 1606, "thre": 1607, "regi": 1608, "rent": 1609, "projec": 1610, "girl": 1611, "sear": 1612, "wo": 1613, "mom": 1614, "car": 1615, "hun": 1616, "publi": 1617, "di": 1618, "ple": 1619, "call": 1620, "cri": 1621, "um": 1622, "ford": 1623, "perfe": 1624, "friend": 1625, "hard": 1626, "ssion": 1627, "test": 1628, "playing": 1629, "around": 1630, "because": 1631, "kets": 1632, "meet": 1633, "satur": 1634, "arti": 1635, "work": 1636, "jun": 1637, "ven": 1638, "run": 1639, "member": 1640, "port": 1641, "super": 1642, "twit": 1643, "sam": 1644, "els": 1645, "tly": 1646, "adv": 1647, "ative": 1648, "ath": 1649, "sure": 1650, "avail": 1651, "lar": 1652, "squ": 1653, "ards": 1654, "event": 1655, "men": 1656, "ll": 1657, "over": 1658, "logy": 1659, "ital": 1660, "times": 1661, "mal": 1662, "back": 1663, "coo": 1664, "making": 1665, "stru": 1666, "âģ": 1667, "itu": 1668, "shar": 1669, "gan": 1670, "cas": 1671, "sn": 1672, "summer": 1673, "picture": 1674, "fan": 1675, "hin": 1676, "christmas": 1677, "cy": 1678, "proud": 1679, "champi": 1680, "design": 1681, "pping": 1682, "hope": 1683, "ca": 1684, "available": 1685, "may": 1686, "wed": 1687, "photograph": 1688, "special": 1689, "sale": 1690, "stop": 1691, "ery": 1692, "awe": 1693, "ality": 1694, "history": 1695, "ama": 1696, "presi": 1697, "bru": 1698, "working": 1699, "done": 1700, "dr": 1701, "ken": 1702, "feat": 1703, "wood": 1704, "atest": 1705, "sunday": 1706, "movi": 1707, "vely": 1708, "sle": 1709, "face": 1710, "spec": 1711, "students": 1712, "by": 1713, "ham": 1714, "spon": 1715, "business": 1716, "dat": 1717, "ie": 1718, "ip": 1719, "soci": 1720, "glo": 1721, "hand": 1722, "recor": 1723, "rs": 1724, "mee": 1725, "keep": 1726, "pur": 1727, "health": 1728, "she": 1729, "comple": 1730, "god": 1731, "davi": 1732, "collec": 1733, "list": 1734, "ra": 1735, "club": 1736, "ters": 1737, "inclu": 1738, "things": 1739, "plan": 1740, "âĺ": 1741, "john": 1742, "shing": 1743, "atul": 1744, "soon": 1745, "blue": 1746, "gor": 1747, "saturday": 1748, "won": 1749, "congratul": 1750, "see": 1751, "âĿ¤ï¸ı": 1752, "those": 1753, "ðŁĺį": 1754, "final": 1755, "dou": 1756, "ith": 1757, "own": 1758, "road": 1759, "tour": 1760, "ast": 1761, "india": 1762, "til": 1763, "nd": 1764, "fer": 1765, "favor": 1766, "sul": 1767, "learn": 1768, "fire": 1769, "just": 1770, "group": 1771, "ah": 1772, "rac": 1773, "body": 1774, "ur": 1775, "care": 1776, "à¸": 1777, "plo": 1778, "oh": 1779, "pos": 1780, "give": 1781, "tech": 1782, "sub": 1783, "cent": 1784, "ering": 1785, "ym": 1786, "ility": 1787, "fic": 1788, "london": 1789, "vir": 1790, "guys": 1791, "ba": 1792, "ð٤": 1793, "baby": 1794, "scre": 1795, "ðŁĺį": 1796, "trump": 1797, "under": 1798, "change": 1799, "ian": 1800, "colle": 1801, "sses": 1802, "ler": 1803, "ssed": 1804, "nice": 1805, "announ": 1806, "power": 1807, "sar": 1808, "aking": 1809, "mini": 1810, "sli": 1811, "swee": 1812, "kar": 1813, "ful": 1814, "cru": 1815, "action": 1816, "ather": 1817, ").": 1818, "stand": 1819, "devel": 1820, "aa": 1821, "gan": 1822, "left": 1823, "lol": 1824, "rel": 1825, "trans": 1826, "ments": 1827, "int": 1828, "ef": 1829, "manag": 1830, "dig": 1831, "gener": 1832, "down": 1833, "pau": 1834, "tiv": 1835, "ku": 1836, "thur": 1837, "ken": 1838, "ston": 1839, "fans": 1840, "talk": 1841, "tweet": 1842, "too": 1843, "style": 1844, "prote": 1845, "secon": 1846, "fron": 1847, "awesome": 1848, "gl": 1849, "pal": 1850, "net": 1851, "sor": 1852, "lau": 1853, "gon": 1854, "since": 1855, "tty": 1856, "series": 1857, "memor": 1858, "beli": 1859, "film": 1860, "did": 1861, "dies": 1862, "ot": 1863, "congratulations": 1864, "pra": 1865, "eve": 1866, "woo": 1867, "official": 1868, "suc": 1869, "incre": 1870, "bon": 1871, "part": 1872, "pped": 1873, "class": 1874, "sive": 1875, "boy": 1876, "cul": 1877, "perfect": 1878, "tou": 1879, "dam": 1880, "welcome": 1881, "football": 1882, "hi": 1883, "pap": 1884, "wait": 1885, "ada": 1886, "congrats": 1887, "young": 1888, "excited": 1889, "rece": 1890, "jan": 1891, "va": 1892, "red": 1893, "stra": 1894, "media": 1895, "'d": 1896, "does": 1897, "let": 1898, "mul": 1899, "ills": 1900, "green": 1901, "mel": 1902, "toge": 1903, "future": 1904, "yester": 1905, "versity": 1906, "form": 1907, "tain": 1908, "ide": 1909, "ches": 1910, "kids": 1911, "qui": 1912, "haha": 1913, "deta": 1914, "big": 1915, "favorite": 1916, "girls": 1917, "contin": 1918, "dom": 1919, "search": 1920, "ual": 1921, "air": 1922, "ders": 1923, "month": 1924, "cer": 1925, "yesterday": 1926, "community": 1927, "ade": 1928, "dog": 1929, "ville": 1930, "ices": 1931, "deli": 1932, "syste": 1933, "run": 1934, "ism": 1935, "heart": 1936, "cup": 1937, "enti": 1938, "few": 1939, "president": 1940, "eds": 1941, "until": 1942, "festi": 1943, "ok": 1944, "flo": 1945, "said": 1946, "ole": 1947, "med": 1948, "travel": 1949, "£": 1950, "phone": 1951, "together": 1952, "fast": 1953, "lot": 1954, "games": 1955, "shir": 1956, "between": 1957, "yes": 1958, "thers": 1959, "doing": 1960, "mac": 1961, "ator": 1962, "band": 1963, "follow": 1964, "project": 1965, "develop": 1966, "diffe": 1967, "confe": 1968, "speci": 1969, "cast": 1970, "ys": 1971, "board": 1972, "rd": 1973, "ial": 1974, "shoo": 1975, "ram": 1976, "having": 1977, "share": 1978, "follow": 1979, "one": 1980, "name": 1981, "mr": 1982, "put": 1983, "discu": 1984, "ory": 1985, "came": 1986, "ous": 1987, "site": 1988, "twitter": 1989, "tb": 1990, "tit": 1991, "finally": 1992, "zed": 1993, "super": 1994, "compan": 1995, "using": 1996, "alls": 1997, "list": 1998, "ris": 1999, "shot": 2000, "gal": 2001, "tar": 2002, "del": 2003, "john": 2004, "âĢĶ": 2005, "something": 2006, "ram": 2007, "intere": 2008, "whe": 2009, "bit": 2010, "ðŁį": 2011, "street": 2012, "ound": 2013, "ai": 2014, "tickets": 2015, "movie": 2016, "real": 2017, "ky": 2018, "taking": 2019, "opp": 2020, "cc": 2021, "lam": 2022, "moun": 2023, "inve": 2024, "black": 2025, "used": 2026, "online": 2027, "yor": 2028, "local": 2029, "gue": 2030, "cks": 2031, "ow": 2032, "gest": 2033, "boys": 2034, "illion": 2035, "cont": 2036, "reci": 2037, "ined": 2038, "euro": 2039, "now": 2040, "seen": 2041, "ph": 2042, "teach": 2043, "def": 2044, "south": 2045, "such": 2046, "award": 2047, "must": 2048, "issu": 2049, "care": 2050, "feel": 2051, "plu": 2052, "latest": 2053, "sports": 2054, "web": 2055, "tex": 2056, "ement": 2057, "sk": 2058, "fic": 2059, "wan": 2060, "tech": 2061, "ot": 2062, "box": 2063, "ner": 2064, "free": 2065, "tal": 2066, "ash": 2067, "case": 2068, "hot": 2069, "wonder": 2070, "meeting": 2071, "era": 2072, "chall": 2073, "ðŁIJ": 2074, "job": 2075, "ili": 2076, "cool": 2077, "jour": 2078, "ths": 2079, "mo": 2080, "fel": 2081, "die": 2082, "micha": 2083, "ele": 2084, "team": 2085, "service": 2086, "stand": 2087, "makes": 2088, "ping": 2089, "early": 2090, "comes": 2091, "ek": 2092, "holi": 2093, "vers": 2094, "ague": 2095, "sau": 2096, "three": 2097, "monday": 2098, "fashi": 2099, "someone": 2100, "thro": 2101, "sea": 2102, "bad": 2103, "suppor": 2104, "turn": 2105, "ury": 2106, "ming": 2107, "photography": 2108, "nic": 2109, "mark": 2110, "pretty": 2111, "ssing": 2112, "watching": 2113, "memb": 2114, "arri": 2115, "county": 2116, "beach": 2117, "fran": 2118, "center": 2119, "police": 2120, "bat": 2121, "public": 2122, "tan": 2123, "press": 2124, "saf": 2125, "sy": 2126, "gets": 2127, "roy": 2128, "ners": 2129, "your": 2130, "buy": 2131, "sters": 2132, "show": 2133, "ased": 2134, "childre": 2135, "afric": 2136, "ines": 2137, "space": 2138, "scri": 2139, "hall": 2140, "pain": 2141, "aring": 2142, "home": 2143, "mur": 2144, "health": 2145, "ched": 2146, "sand": 2147, "recei": 2148, "guy": 2149, "ea": 2150, "american": 2151, "resi": 2152, "children": 2153, "--": 2154, "iri": 2155, "ington": 2156, "country": 2157, "ross": 2158, "len": 2159, "anna": 2160, "books": 2161, "bc": 2162, "ece": 2163, "dom": 2164, "lovely": 2165, "kh": 2166, "pet": 2167, "gy": 2168, "gri": 2169, "stage": 2170, "office": 2171, "rock": 2172, "mon": 2173, "bay": 2174, "table": 2175, "sun": 2176, "med": 2177, "thin": 2178, "lor": 2179, "flow": 2180, "(@": 2181, "university": 2182, "store": 2183, "front": 2184, "good": 2185, "za": 2186, "vote": 2187, "north": 2188, "hey": 2189, "anim": 2190, "order": 2191, "mid": 2192, "without": 2193, "ade": 2194, "remember": 2195, "market": 2196, "??": 2197, "mus": 2198, "training": 2199, "educ": 2200, "but": 2201, "cover": 2202, "stan": 2203, "scen": 2204, "bla": 2205, "break": 2206, "lou": 2207, "same": 2208, "gold": 2209, "ain": 2210, "os": 2211, "both": 2212, "lit": 2213, "vern": 2214, "ai": 2215, "albu": 2216, "pa": 2217, "enjoy": 2218, "beg": 2219, "elling": 2220, "thursday": 2221, "info": 2222, "san": 2223, "america": 2224, "hair": 2225, "tel": 2226, "march": 2227, "concer": 2228, "college": 2229, "conference": 2230, "app": 2231, "hour": 2232, "chang": 2233, "âļ": 2234, "sour": 2235, "ols": 2236, "weather": 2237, "war": 2238, "phi": 2239, "festival": 2240, "second": 2241, "cute": 2242, "prac": 2243, "ener": 2244, "stry": 2245, "lea": 2246, "polit": 2247, "sav": 2248, "sen": 2249, "ow": 2250, "mi": 2251, "near": 2252, "ought": 2253, "ze": 2254, "coffe": 2255, "willi": 2256, "dan": 2257, "sey": 2258, "david": 2259, "ese": 2260, "fan": 2261, "deci": 2262, "theat": 2263, "nov": 2264, "ation": 2265, "trac": 2266, "sci": 2267, "review": 2268, "cel": 2269, "em": 2270, "un": 2271, "july": 2272, "orig": 2273, "tion": 2274, "dru": 2275, "former": 2276, "stay": 2277, "after": 2278, "inv": 2279, "took": 2280, "data": 2281, "bal": 2282, "tues": 2283, "dan": 2284, "evening": 2285, "ðŁĺĤðŁĺĤ": 2286, "dol": 2287, "ures": 2288, "provi": 2289, "ts": 2290, "est": 2291, "sign": 2292, "jac": 2293, "uk": 2294, "song": 2295, "yet": 2296, "bow": 2297, "indu": 2298, "jap": 2299, "hoo": 2300, "point": 2301, "anyone": 2302, "zy": 2303, "ist": 2304, "hur": 2305, "ital": 2306, "building": 2307, "woman": 2308, "chur": 2309, "jer": 2310, "perfor": 2311, "coach": 2312, "league": 2313, "cess": 2314, "net": 2315, "imag": 2316, "nation": 2317, "brit": 2318, "que": 2319, "awards": 2320, "ages": 2321, "works": 2322, "ced": 2323, "mance": 2324, "late": 2325, "ign": 2326, "money": 2327, "true": 2328, "ii": 2329, "tell": 2330, "plac": 2331, "pac": 2332, "asy": 2333, "world": 2334, "behin": 2335, "import": 2336, "reading": 2337, "gram": 2338, "giving": 2339, "met": 2340, "hit": 2341, "forward": 2342, "stom": 2343, "present": 2344, "june": 2345, "social": 2346, "noon": 2347, "mart": 2348, "half": 2349, "swe": 2350, "govern": 2351, "ker": 2352, "details": 2353, "lish": 2354, "__": 2355, "acy": 2356, "sia": 2357, "bert": 2358, "fall": 2359, "!!!!": 2360, "),": 2361, "thi": 2362, "diti": 2363, "sport": 2364, "king": 2365, "fit": 2366, "staf": 2367, "cat": 2368, "muse": 2369, "centr": 2370, "yer": 2371, "contro": 2372, "bloo": 2373, "walk": 2374, "actu": 2375, "didn": 2376, "lim": 2377, "learning": 2378, "research": 2379, "wedne": 2380, "auth": 2381, "hours": 2382, "ky": 2383, "far": 2384, "hen": 2385, "....": 2386, "itch": 2387, "ril": 2388, "strong": 2389, "sky": 2390, "questi": 2391, "james": 2392, "ron": 2393, "dg": 2394, "fur": 2395, "cin": 2396, "does": 2397, "appro": 2398, "marke": 2399, "tures": 2400, "fully": 2401, "chat": 2402, "behind": 2403, "tem": 2404, "fini": 2405, "mission": 2406, "batt": 2407, "feel": 2408, "heav": 2409, "everything": 2410, "bar": 2411, "wish": 2412, "premi": 2413, "ima": 2414, "experience": 2415, "each": 2416, "report": 2417, "sweet": 2418, "tics": 2419, "spring": 2420, "respon": 2421, "system": 2422, "victor": 2423, "lin": 2424, "saw": 2425, "already": 2426, "ghter": 2427, "fle": 2428, "ãĥ": 2429, "bring": 2430, "album": 2431, "--": 2432, "ells": 2433, "stan": 2434, "tom": 2435, "international": 2436, "went": 2437, "anni": 2438, "match": 2439, "pper": 2440, "stone": 2441, "small": 2442, "rain": 2443, "fashion": 2444, "area": 2445, "van": 2446, "agram": 2447, "ko": 2448, "thought": 2449, "worth": 2450, "van": 2451, "mer": 2452, "coffee": 2453, "ites": 2454, "gn": 2455, "artist": 2456, "con": 2457, "arch": 2458, "cir": 2459, "secre": 2460, "ground": 2461, "iso": 2462, "hand": 2463, "com": 2464, "bridge": 2465, "hs": 2466, "xi": 2467, "link": 2468, "pul": 2469, "spl": 2470, "race": 2471, "fli": 2472, "river": 2473, "gas": 2474, "disco": 2475, "dal": 2476, "player": 2477, "fit": 2478, "photos": 2479, "ity": 2480, "ok": 2481, "jor": 2482, "tra": 2483, "april": 2484, "ads": 2485, "adi": 2486, "solu": 2487, "beauty": 2488, "door": 2489, "mess": 2490, "update": 2491, "alia": 2492, "scho": 2493, "ened": 2494, "moment": 2495, "scot": 2496, "science": 2497, "ior": 2498, "ties": 2499, "across": 2500, "ously": 2501, "shes": 2502, "doesn": 2503, "page": 2504, "water": 2505, "million": 2506, "classi": 2507, "lic": 2508, "cast": 2509, "formation": 2510, "michael": 2511, "ello": 2512, "smo": 2513, "ints": 2514, "vision": 2515, "opening": 2516, "ldn": 2517, "austr": 2518, "tuesday": 2519, "winner": 2520, "possi": 2521, "round": 2522, "shirt": 2523, "dit": 2524, "bo": 2525, "ues": 2526, "illed": 2527, "along": 2528, "trip": 2529, "starting": 2530, "impro": 2531, "kan": 2532, "person": 2533, "not": 2534, "reco": 2535, "needs": 2536, "cle": 2537, "lie": 2538, "rest": 2539, "ring": 2540, "winter": 2541, "simp": 2542, "mom": 2543, "beer": 2544, "face": 2545, "tors": 2546, "usa": 2547, "collection": 2548, "geor": 2549, "session": 2550, "trying": 2551, "las": 2552, "lake": 2553, "jen": 2554, "origin": 2555, "student": 2556, "secur": 2557, "vin": 2558, "pics": 2559, "expe": 2560, "comp": 2561, "gonna": 2562, "equ": 2563, "bad": 2564, "ley": 2565, "au": 2566, "members": 2567, "break": 2568, "wall": 2569, "gic": 2570, "dinner": 2571, "bul": 2572, "inspir": 2573, "ri": 2574, "mind": 2575, "ica": 2576, "winning": 2577, "talking": 2578, "tren": 2579, "sis": 2580, "ten": 2581, "wonderful": 2582, "snow": 2583, "hear": 2584, "thom": 2585, "nothing": 2586, "gui": 2587, "stin": 2588, "blog": 2589, "fest": 2590, "bun": 2591, "lee": 2592, "wards": 2593, "chance": 2594, "dress": 2595, "ren": 2596, "paul": 2597, "pes": 2598, "techno": 2599, "russi": 2600, "card": 2601, "east": 2602, "mari": 2603, "wine": 2604, "ti": 2605, "law": 2606, "stric": 2607, "ki": 2608, "ape": 2609, "augu": 2610, "profe": 2611, "ash": 2612, "course": 2613, "mail": 2614, "rently": 2615, "dun": 2616, "mun": 2617, "love": 2618, "island": 2619, "drive": 2620, "sl": 2621, "ended": 2622, "main": 2623, "lost": 2624, "nature": 2625, "âĿ¤ï¸ı": 2626, "chic": 2627, "repor": 2628, "pin": 2629, "pro": 2630, "station": 2631, "cep": 2632, "takes": 2633, "company": 2634, "goes": 2635, "ond": 2636, "mach": 2637, "radio": 2638, "dad": 2639, "rock": 2640, "ja": 2641, "pay": 2642, "champion": 2643, "ee": 2644, "inde": 2645, "tta": 2646, "atic": 2647, "tab": 2648, "believe": 2649, "energy": 2650, "zi": 2651, "tat": 2652, "word": 2653, "once": 2654, "resul": 2655, "yl": 2656, "andre": 2657, "ano": 2658, "instagram": 2659, "close": 2660, "tam": 2661, "custom": 2662, "wa": 2663, "conom": 2664, "shows": 2665, "life": 2666, "kin": 2667, "rob": 2668, "tage": 2669, "nation": 2670, "almost": 2671, "listen": 2672, "save": 2673, "reli": 2674, "ace": 2675, "mary": 2676, "tree": 2677, "forget": 2678, "jack": 2679, "waiting": 2680, "director": 2681, "hill": 2682, "born": 2683, "temp": 2684, "fl": 2685, "ste": 2686, "ona": 2687, "single": 2688, "wednesday": 2689, "united": 2690, "ino": 2691, "@_": 2692, "nel": 2693, "celebrate": 2694, "ending": 2695, "deal": 2696, "ji": 2697, "canada": 2698, "huge": 2699, "track": 2700, "âĢ¢": 2701, "fy": 2702, "fanta": 2703, "ang": 2704, "york": 2705, "release": 2706, "pun": 2707, "episo": 2708, "words": 2709, "tour": 2710, "pack": 2711, "igh": 2712, "classic": 2713, "performance": 2714, "ket": 2715, "afternoon": 2716, "record": 2717, "wins": 2718, "proble": 2719, "âĿ¤": 2720, "four": 2721, "bed": 2722, "bank": 2723, "dance": 2724, "sla": 2725, "called": 2726, "might": 2727, "ap": 2728, "past": 2729, "ðŁļ": 2730, "different": 2731, "ite": 2732, "gift": 2733, "ssive": 2734, "church": 2735, "cus": 2736, "program": 2737, "hotel": 2738, "ice": 2739, "mad": 2740, "security": 2741, "enge": 2742, "dc": 2743, "enough": 2744, "sta": 2745, "ety": 2746, "dead": 2747, "gun": 2748, "hear": 2749, "mir": 2750, "human": 2751, "gress": 2752, "ounds": 2753, "piece": 2754, "breaking": 2755, "garden": 2756, "fight": 2757, "views": 2758, "fish": 2759, "started": 2760, "running": 2761, "green": 2762, "seri": 2763, "sm": 2764, "ask": 2765, "dor": 2766, "death": 2767, "econom": 2768, "eri": 2769, "ird": 2770, "ser": 2771, "lunch": 2772, "âģ¦": 2773, "box": 2774, "natu": 2775, "base": 2776, "ban": 2777, "fal": 2778, "global": 2779, "wild": 2780, "wow": 2781, "outside": 2782, "move": 2783, "lead": 2784, "anal": 2785, "museum": 2786, "ong": 2787, "haw": 2788, "power": 2789, "thank": 2790, "bac": 2791, "charac": 2792, "campa": 2793, "digital": 2794, "ro": 2795, "oper": 2796, "dev": 2797, "wol": 2798, "pati": 2799, "fa": 2800, "male": 2801, "paper": 2802, "illing": 2803, "cs": 2804, "âĥ": 2805, "education": 2806, "taken": 2807, "effe": 2808, "mou": 2809, "sad": 2810, "\".": 2811, "based": 2812, "staff": 2813, "including": 2814, "living": 2815, "ac": 2816, "china": 2817, "mob": 2818, "storm": 2819, "luck": 2820, "phil": 2821, "oo": 2822, "yn": 2823, "travel": 2824, "kel": 2825, "tial": 2826, "price": 2827, "book": 2828, "important": 2829, "bio": 2830, "pool": 2831, "nyc": 2832, "fab": 2833, "load": 2834, "?!": 2835, "challenge": 2836, "cry": 2837, "serve": 2838, "wear": 2839, "bus": 2840, "tain": 2841, "number": 2842, "ror": 2843, "kat": 2844, "iz": 2845, "though": 2846, "hosp": 2847, "mm": 2848, "fair": 2849, "utes": 2850, "hot": 2851, "pop": 2852, "fied": 2853, "camp": 2854, "development": 2855, "libr": 2856, "cali": 2857, "ems": 2858, "âģ¦@": 2859, "bol": 2860, "ised": 2861, "standing": 2862, "model": 2863, "ita": 2864, "gle": 2865, "brown": 2866, "image": 2867, "vered": 2868, "force": 2869, "oil": 2870, "partic": 2871, "shu": 2872, "daily": 2873, "law": 2874, "sec": 2875, "class": 2876, "camp": 2877, "holiday": 2878, "clin": 2879, "kers": 2880, "present": 2881, "game": 2882, "incredi": 2883, "ership": 2884, "interview": 2885, "bill": 2886, "due": 2887, "andy": 2888, "abo": 2889, "innov": 2890, "key": 2891, "acade": 2892, "pil": 2893, "moder": 2894, "stars": 2895, "brand": 2896, "fer": 2897, "weeks": 2898, "consi": 2899, "pre": 2900, "safe": 2901, "writ": 2902, "dium": 2903, "launch": 2904, "marketing": 2905, "annual": 2906, "assi": 2907, "court": 2908, "lady": 2909, "cted": 2910, "anda": 2911, "inside": 2912, "child": 2913, "oppor": 2914, "smith": 2915, "centre": 2916, "gue": 2917, "âģ©": 2918, "fren": 2919, "sty": 2920, "fort": 2921, "ently": 2922, "isn": 2923, "keep": 2924, "tober": 2925, "ony": 2926, "boy": 2927, "ald": 2928, "colla": 2929, "demo": 2930, "level": 2931, "compet": 2932, "ado": 2933, "bour": 2934, "fantastic": 2935, "mate": 2936, "su": 2937, "south": 2938, "opportun": 2939, "versary": 2940, "later": 2941, "bud": 2942, "facebook": 2943, "laun": 2944, "stern": 2945, "pit": 2946, "!\"": 2947, "maj": 2948, "gram": 2949, "tbt": 2950, "fire": 2951, "happy": 2952, "aks": 2953, "whole": 2954, "actually": 2955, "iller": 2956, "ella": 2957, "lots": 2958, "alex": 2959, "ange": 2960, "lands": 2961, "ðŁĺŃ": 2962, "enter": 2963, "rou": 2964, "episode": 2965, "ped": 2966, "inten": 2967, "shire": 2968, "who": 2969, "plan": 2970, "ho": 2971, "cake": 2972, "west": 2973, "magaz": 2974, "fresh": 2975, "cc": 2976, "nar": 2977, "chris": 2978, "writing": 2979, "wer": 2980, "nom": 2981, "lo": 2982, "midd": 2983, "dream": 2984, "ol": 2985, "tional": 2986, "deb": 2987, ">>": 2988, "become": 2989, "si": 2990, "grand": 2991, "alling": 2992, "histor": 2993, "ride": 2994, "ired": 2995, "safe": 2996, "queen": 2997, "cil": 2998, "intro": 2999, "vil": 3000, "dani": 3001, "...": 3002, "artic": 3003, "stat": 3004, "short": 3005, "oring": 3006, "selfi": 3007, "missi": 3008, "doc": 3009, "bit": 3010, "gall": 3011, "bom": 3012, "ire": 3013, "selec": 3014, "dition": 3015, "ðŁĶ¥": 3016, "friend": 3017, "beat": 3018, "ghting": 3019, "ðŁĺĬ": 3020, "peace": 3021, "exhi": 3022, "anta": 3023, "ability": 3024, "illu": 3025, "jon": 3026, "quality": 3027, "tribu": 3028, "mes": 3029, "players": 3030, "fair": 3031, "cut": 3032, "cab": 3033, "success": 3034, "bi": 3035, "sus": 3036, "promo": 3037, "sche": 3038, "ange": 3039, "ico": 3040, "commit": 3041, "catch": 3042, "illa": 3043, "kind": 3044, "feeling": 3045, "quo": 3046, "say": 3047, "anniversary": 3048, "spot": 3049, "mother": 3050, "ane": 3051, "pend": 3052, "yourself": 3053, "ops": 3054, "apple": 3055, "minutes": 3056, "po": 3057, "grand": 3058, "ries": 3059, "haha": 3060, "career": 3061, "edition": 3062, "dec": 3063, "rick": 3064, "ami": 3065, "concert": 3066, "itive": 3067, "geous": 3068, "dly": 3069, "tte": 3070, "advent": 3071, "ig": 3072, "lights": 3073, "aker": 3074, "sky": 3075, "âĥ£": 3076, "ray": 3077, "finished": 3078, "way": 3079, "sd": 3080, "accoun": 3081, "ðŁĴķ": 3082, "cky": 3083, "chel": 3084, "liter": 3085, "painting": 3086, "los": 3087, "stun": 3088, "technology": 3089, "nas": 3090, "mar": 3091, "bil": 3092, "africa": 3093, "kie": 3094, "eyes": 3095, "golf": 3096, "plus": 3097, "nia": 3098, "itec": 3099, "services": 3100, "wedding": 3101, "known": 3102, "tele": 3103, ".....": 3104, "starts": 3105, "paren": 3106, "wants": 3107, "ational": 3108, "months": 3109, "windo": 3110, "favour": 3111, "ert": 3112, "magazine": 3113, "exclu": 3114, "reve": 3115, "bc": 3116, "original": 3117, "ess": 3118, "nal": 3119, "anti": 3120, "stro": 3121, "tice": 3122, "study": 3123, "à¤": 3124, "vac": 3125, "national": 3126, "five": 3127, "rain": 3128, "vement": 3129, "ute": 3130, "verse": 3131, "emer": 3132, "army": 3133, "possible": 3134, "guess": 3135, "valley": 3136, "thern": 3137, "crow": 3138, "mr": 3139, "color": 3140, "onto": 3141, "pick": 3142, "clear": 3143, "dark": 3144, "tac": 3145, "wanted": 3146, "itting": 3147, "cancer": 3148, "government": 3149, "die": 3150, "rise": 3151, "zing": 3152, "cold": 3153, "foun": 3154, "studio": 3155, "stration": 3156, "brother": 3157, "ahead": 3158, "shel": 3159, "micro": 3160, "ically": 3161, "dau": 3162, "signed": 3163, "viol": 3164, "ax": 3165, "asse": 3166, "io": 3167, "wre": 3168, "splay": 3169, "chick": 3170, "august": 3171, "plat": 3172, "tips": 3173, "spi": 3174, "human": 3175, "easy": 3176, "logi": 3177, "mike": 3178, "grow": 3179, "agre": 3180, "ww": 3181, "shad": 3182, "motiv": 3183, "wide": 3184, "turns": 3185, "omg": 3186, "var": 3187, "defin": 3188, "sug": 3189, "jim": 3190, "ðŁĶ¥": 3191, "td": 3192, "campaign": 3193, "named": 3194, "retweet": 3195, "cop": 3196, "tv": 3197, "leav": 3198, "kis": 3199, "double": 3200, "smar": 3201, "issue": 3202, "villa": 3203, "information": 3204, "lies": 3205, "stock": 3206, "nt": 3207, "distric": 3208, "shor": 3209, "mix": 3210, "ero": 3211, "sep": 3212, "mex": 3213, "seeing": 3214, "live": 3215, "remin": 3216, "code": 3217, "gur": 3218, "sc": 3219, "wild": 3220, "lun": 3221, "hood": 3222, "spot": 3223, "father": 3224, "forever": 3225, "upd": 3226, "traf": 3227, "fly": 3228, "need": 3229, "gradu": 3230, "train": 3231, "make": 3232, "sab": 3233, "bey": 3234, "size": 3235, "leader": 3236, "talks": 3237, "eu": 3238, "log": 3239, "fox": 3240, "gorgeous": 3241, "less": 3242, "lets": 3243, "surpri": 3244, "myself": 3245, "note": 3246, "lives": 3247, "fru": 3248, "loved": 3249, "sever": 3250, "dem": 3251, "ji": 3252, "soc": 3253, "hold": 3254, "dogs": 3255, "ni": 3256, "âŀ": 3257, "leave": 3258, "airport": 3259, "benef": 3260, "expl": 3261, "ships": 3262, "complete": 3263, "achi": 3264, "great": 3265, "vintage": 3266, "jack": 3267, "roc": 3268, "wood": 3269, "priv": 3270, "offer": 3271, "eye": 3272, "version": 3273, "tea": 3274, "coach": 3275, "offic": 3276, "well": 3277, "gen": 3278, "sat": 3279, "hh": 3280, "youth": 3281, "ox": 3282, "?\"": 3283, "mt": 3284, "mix": 3285, "gg": 3286, "dle": 3287, "natural": 3288, "build": 3289, "breakfast": 3290, "thinking": 3291, "theatre": 3292, "moon": 3293, "berg": 3294, "goals": 3295, "george": 3296, "ene": 3297, "excell": 3298, "iling": 3299, "tune": 3300, "yed": 3301, "gate": 3302, "mit": 3303, "network": 3304, "joe": 3305, "hello": 3306, "fb": 3307, "tube": 3308, "wearing": 3309, "athle": 3310, "struc": 3311, "hard": 3312, "glass": 3313, "gers": 3314, "throw": 3315, "ges": 3316, "bt": 3317, "industry": 3318, "management": 3319, "alist": 3320, "goal": 3321, "stream": 3322, "yel": 3323, "avi": 3324, "icious": 3325, "others": 3326, "ski": 3327, "christi": 3328, "bird": 3329, "esc": 3330, "min": 3331, "tro": 3332, "lt": 3333, "jan": 3334, "imp": 3335, "rights": 3336, "sha": 3337, "organ": 3338, "central": 3339, "ara": 3340, "roll": 3341, "favourite": 3342, "chester": 3343, "else": 3344, "pay": 3345, "cars": 3346, "mine": 3347, "step": 3348, "practice": 3349, "major": 3350, "hang": 3351, "ðŁĺĺ": 3352, "non": 3353, "vari": 3354, "engine": 3355, "volun": 3356, "dia": 3357, "iled": 3358, "architec": 3359, "pink": 3360, "ds": 3361, "thy": 3362, "wash": 3363, "website": 3364, "bag": 3365, "control": 3366, "elli": 3367, "fra": 3368, "answ": 3369, "dence": 3370, "yu": 3371, "ron": 3372, "ola": 3373, "gin": 3374, "drin": 3375, "lic": 3376, "couple": 3377, "spar": 3378, "gon": 3379, "create": 3380, "ct": 3381, "celebrating": 3382, "deep": 3383, "eat": 3384, "tee": 3385, "voice": 3386, "drop": 3387, "visit": 3388, "ators": 3389, "stadium": 3390, "ft": 3391, "wis": 3392, "rol": 3393, "grade": 3394, "famil": 3395, "points": 3396, "repre": 3397, "was": 3398, "traffic": 3399, "japan": 3400, "org": 3401, "honor": 3402, "texas": 3403, "manu": 3404, "âĻ¥": 3405, "safety": 3406, "rer": 3407, "bag": 3408, "emplo": 3409, "released": 3410, "regu": 3411, "aka": 3412, "nav": 3413, "role": 3414, "senior": 3415, "spect": 3416, "cross": 3417, "lines": 3418, "best": 3419, "pack": 3420, "sin": 3421, "tie": 3422, "missing": 3423, "sunset": 3424, "liber": 3425, "ising": 3426, "jay": 3427, "ski": 3428, "championship": 3429, "activ": 3430, "ladies": 3431, "played": 3432, "yy": 3433, "publ": 3434, "alo": 3435, "pride": 3436, "sr": 3437, "paki": 3438, "lux": 3439, "survi": 3440, "cked": 3441, "ets": 3442, "chocol": 3443, "australia": 3444, "paris": 3445, "miles": 3446, "hat": 3447, "mental": 3448, "ala": 3449, "mean": 3450, "mobile": 3451, "ena": 3452, "insi": 3453, "found": 3454, "chief": 3455, "tag": 3456, "incredible": 3457, "return": 3458, "é": 3459, "google": 3460, "french": 3461, "crew": 3462, "hallo": 3463, "alian": 3464, "jaz": 3465, "cher": 3466, "silver": 3467, "north": 3468, "english": 3469, "baseball": 3470, "caf": 3471, "limited": 3472, "following": 3473, "appreci": 3474, "earth": 3475, "kir": 3476, "vember": 3477, "wed": 3478, "ption": 3479, "ged": 3480, "october": 3481, "flori": 3482, "cr": 3483, "ency": 3484, "gave": 3485, "lord": 3486, "stuff": 3487, "berry": 3488, "post": 3489, "smile": 3490, "broad": 3491, "state": 3492, "gger": 3493, "means": 3494, "icy": 3495, "gun": 3496, "yo": 3497, "master": 3498, "burg": 3499, "hands": 3500, "nie": 3501, "//": 3502, "union": 3503, "british": 3504, "biggest": 3505, "district": 3506, "aming": 3507, "hil": 3508, "oce": 3509, "person": 3510, "pass": 3511, "envir": 3512, "schools": 3513, "arrived": 3514, "ances": 3515, "inspired": 3516, "expla": 3517, "ben": 3518, "library": 3519, "bott": 3520, "amp": 3521, "steph": 3522, "contact": 3523, "bang": 3524, "ms": 3525, "califor": 3526, "told": 3527, "battle": 3528, "bb": 3529, "chicago": 3530, "⾨": 3531, "strate": 3532, "shi": 3533, "dece": 3534, "-)": 3535, "add": 3536, "lab": 3537, "jones": 3538, "legend": 3539, "castle": 3540, "inger": 3541, "stance": 3542, "bel": 3543, "ura": 3544, "refu": 3545, "leaders": 3546, "pot": 3547, "sex": 3548, "hic": 3549, "article": 3550, "kid": 3551, "france": 3552, "xx": 3553, "exe": 3554, "guide": 3555, "volunte": 3556, "print": 3557, "ali": 3558, "ceo": 3559, "tweets": 3560, "wx": 3561, "scene": 3562, "volu": 3563, "anti": 3564, "han": 3565, "associ": 3566, "sharing": 3567, "rose": 3568, "minister": 3569, "sher": 3570, "inste": 3571, "clean": 3572, "democr": 3573, "poster": 3574, "skin": 3575, "psy": 3576, "proper": 3577, "crazy": 3578, "iam": 3579, "ore": 3580, "ini": 3581, "anything": 3582, "pod": 3583, "moving": 3584, "click": 3585, "explo": 3586, "comb": 3587, "craft": 3588, "fi": 3589, "blood": 3590, "isra": 3591, "public": 3592, "dent": 3593, "olym": 3594, "england": 3595, "asi": 3596, "cher": 3597, "fact": 3598, "environ": 3599, "harry": 3600, "gone": 3601, "medic": 3602, "enjoying": 3603, "justice": 3604, "jr": 3605, "indian": 3606, "wife": 3607, "sound": 3608, "tes": 3609, "drawing": 3610, "pal": 3611, "idea": 3612, "crit": 3613, "juli": 3614, "iler": 3615, "warm": 3616, "clar": 3617, "thoughts": 3618, "defen": 3619, "council": 3620, "introduc": 3621, "died": 3622, "janu": 3623, "ani": 3624, "send": 3625, "lier": 3626, "ml": 3627, "interesting": 3628, "trade": 3629, "wind": 3630, "bay": 3631, "sac": 3632, "ancy": 3633, "source": 3634, "bes": 3635, "organi": 3636, "arly": 3637, "large": 3638, "ffici": 3639, "tag": 3640, "ut": 3641, "desp": 3642, "oes": 3643, "title": 3644, "sym": 3645, "pictures": 3646, "open": 3647, "women": 3648, "showing": 3649, "ria": 3650, "least": 3651, "leadership": 3652, "current": 3653, "electr": 3654, "valent": 3655, "listening": 3656, "ckey": 3657, "general": 3658, "deser": 3659, "duce": 3660, ";)": 3661, "cent": 3662, "ðŁĺįðŁĺį": 3663, "scott": 3664, "poor": 3665, "selfie": 3666, "events": 3667, "ion": 3668, "wrong": 3669, "dev": 3670, "hill": 3671, "septe": 3672, "culture": 3673, "line": 3674, "sorry": 3675, "sent": 3676, "sister": 3677, "cept": 3678, "kri": 3679, "november": 3680, "ari": 3681, "announce": 3682, "zation": 3683, "bran": 3684, "gent": 3685, "du": 3686, "len": 3687, "pers": 3688, "fm": 3689, "martin": 3690, "op": 3691, "emb": 3692, "ome": 3693, "middle": 3694, "success": 3695, "peter": 3696, "january": 3697, "flu": 3698, "racing": 3699, "dav": 3700, "bike": 3701, "ðŁı»": 3702, "pet": 3703, "shoot": 3704, "professi": 3705, "featuring": 3706, "september": 3707, "nowplaying": 3708, "staur": 3709, "za": 3710, "onic": 3711, "quick": 3712, "baske": 3713, "speaking": 3714, "milit": 3715, "zer": 3716, "chicken": 3717, "bell": 3718, "sad": 3719, "coast": 3720, "loving": 3721, "yers": 3722, "dj": 3723, "panel": 3724, "verage": 3725, "swit": 3726, "icks": 3727, "bou": 3728, "california": 3729, "sam": 3730, "parents": 3731, "ero": 3732, "killed": 3733, "phys": 3734, "jobs": 3735, "migr": 3736, "anth": 3737, "emo": 3738, "halloween": 3739, "ander": 3740, "cm": 3741, "competition": 3742, "eag": 3743, "sket": 3744, "spir": 3745, "maybe": 3746, "exclusive": 3747, "appe": 3748, "journey": 3749, "screen": 3750, "ford": 3751, "io": 3752, "hate": 3753, "ug": 3754, "soul": 3755, "hero": 3756, "society": 3757, "syn": 3758, "guit": 3759, "nh": 3760, "dj": 3761, "ases": 3762, "impre": 3763, "time": 3764, "sales": 3765, "dd": 3766, "fts": 3767, "summit": 3768, "stunning": 3769, "oms": 3770, "turned": 3771, "clean": 3772, "soft": 3773, "beat": 3774, "restaur": 3775, "dered": 3776, "ences": 3777, "magic": 3778, "dio": 3779, "shine": 3780, "guest": 3781, "healthy": 3782, "exhib": 3783, "stories": 3784, "popu": 3785, "nis": 3786, "ela": 3787, "below": 3788, "funny": 3789, "results": 3790, "sne": 3791, "currently": 3792, "ard": 3793, "download": 3794, "flight": 3795, "mal": 3796, "fine": 3797, "pad": 3798, "chu": 3799, "ented": 3800, "hat": 3801, "ðŁijı": 3802, "steve": 3803, "jo": 3804, "mark": 3805, "rat": 3806, "ball": 3807, "pc": 3808, "pon": 3809, "bby": 3810, "oli": 3811, "arts": 3812, "asure": 3813, "bowl": 3814, "attack": 3815, "mic": 3816, "dear": 3817, "range": 3818, "enter": 3819, "chocolate": 3820, "brilli": 3821, "access": 3822, ",\"": 3823, "???": 3824, "chap": 3825, "const": 3826, "tn": 3827, "matter": 3828, "blue": 3829, "gallery": 3830, "emp": 3831, "workshop": 3832, "leading": 3833, "yours": 3834, "basketball": 3835, "wanna": 3836, "thu": 3837, "__": 3838, "marri": 3839, "sleep": 3840, "bia": 3841, "che": 3842, "mad": 3843, "impact": 3844, "own": 3845, "sir": 3846, "channel": 3847, "europe": 3848, "esp": 3849, "kitch": 3850, "hospital": 3851, "wra": 3852, "royal": 3853, "fs": 3854, "neu": 3855, "quar": 3856, "ney": 3857, "acks": 3858, "chase": 3859, "ppy": 3860, "stal": 3861, "ately": 3862, "tim": 3863, "december": 3864, "rare": 3865, "perform": 3866, "cream": 3867, "weight": 3868, "choo": 3869, "night": 3870, "haven": 3871, "franc": 3872, "khan": 3873, "built": 3874, "helping": 3875, "trust": 3876, "type": 3877, "golden": 3878, "tax": 3879, "snow": 3880, "swi": 3881, "disa": 3882, "questions": 3883, "vey": 3884, "light": 3885, "cn": 3886, "cloud": 3887, "thomas": 3888, "aged": 3889, "shou": 3890, "teams": 3891, "gran": 3892, "reason": 3893, "aa": 3894, "youtube": 3895, "vp": 3896, "pizz": 3897, "manager": 3898, "bury": 3899, "credit": 3900, "treat": 3901, "max": 3902, "ik": 3903, "main": 3904, "ging": 3905, "dead": 3906, "probab": 3907, "yeah": 3908, "ãĤ": 3909, "brand": 3910, "soli": 3911, "plant": 3912, "tayl": 3913, "girl": 3914, "ðŁĺŃ": 3915, "nament": 3916, "auto": 3917, "message": 3918, "kore": 3919, "nur": 3920, "terr": 3921, "agu": 3922, "map": 3923, "senting": 3924, "loves": 3925, "gives": 3926, "gab": 3927, "zen": 3928, "robert": 3929, "confir": 3930, "wars": 3931, "om": 3932, "stain": 3933, "camera": 3934, "ander": 3935, "wonder": 3936, "ab": 3937, "cap": 3938, "sold": 3939, "suit": 3940, "walking": 3941, "continue": 3942, "effec": 3943, "daughter": 3944, "danc": 3945, "chain": 3946, "multi": 3947, "kid": 3948, "yan": 3949, "champion": 3950, "vo": 3951, "tains": 3952, "host": 3953, "mini": 3954, "missed": 3955, "resc": 3956, "lyn": 3957, "finish": 3958, "delicious": 3959, "sas": 3960, "taylor": 3961, "ib": 3962, "promis": 3963, "products": 3964, "mountain": 3965, "florida": 3966, "register": 3967, "treat": 3968, "recent": 3969, "female": 3970, "booth": 3971, "matt": 3972, "vehic": 3973, "sop": 3974, "motor": 3975, "supporting": 3976, "phic": 3977, "extre": 3978, "drink": 3979, "lane": 3980, "third": 3981, "ps": 3982, "constru": 3983, "cere": 3984, "farm": 3985, "ðŁİī": 3986, "tured": 3987, "ðŁijī": 3988, "cats": 3989, "aj": 3990, "gie": 3991, "shooting": 3992, "asked": 3993, "pakistan": 3994, "ame": 3995, "mb": 3996, "gil": 3997, "legal": 3998, "square": 3999, "invol": 4000, "draw": 4001, "oooo": 4002, "!!!!": 4003, "opportunity": 4004, "py": 4005, "ei": 4006, "bts": 4007, "teacher": 4008, "character": 4009, "johnson": 4010, "bron": 4011, "lywood": 4012, "chine": 4013, "cing": 4014, "cine": 4015, "dge": 4016, "gaming": 4017, "russia": 4018, "cia": 4019, "quote": 4020, "rich": 4021, "gov": 4022, "flowers": 4023, "spiri": 4024, "stin": 4025, "growth": 4026, "ðŁı¼": 4027, "commer": 4028, "juni": 4029, "mum": 4030, "ran": 4031, "sna": 4032, "aren": 4033, "cb": 4034, "actor": 4035, "color": 4036, "sit": 4037, "pair": 4038, "chi": 4039, "bow": 4040, "academy": 4041, "held": 4042, "rang": 4043, "metal": 4044, "yl": 4045, "active": 4046, "probably": 4047, "tch": 4048, "needed": 4049, "spee": 4050, "choice": 4051, "italy": 4052, "ryan": 4053, "ðŁĩº": 4054, "flower": 4055, "vit": 4056, "mn": 4057, "foundation": 4058, "bak": 4059, "sions": 4060, "neigh": 4061, "floo": 4062, "heard": 4063, "remo": 4064, "fresh": 4065, "inging": 4066, "ref": 4067, "town": 4068, "clou": 4069, "jesus": 4070, "spirit": 4071, "couldn": 4072, "zes": 4073, "ðŁĴĻ": 4074, "williams": 4075, "proce": 4076, "modern": 4077, "process": 4078, "shoes": 4079, "created": 4080, "tric": 4081, "issues": 4082, "anne": 4083, "atten": 4084, "debut": 4085, "hr": 4086, "nit": 4087, "stig": 4088, "apo": 4089, "eps": 4090, "zu": 4091, "ãĢ": 4092, "six": 4093, "cards": 4094, "langu": 4095, "famous": 4096, "tournament": 4097, "sel": 4098, "ebay": 4099, "yn": 4100, "ston": 4101, "kick": 4102, "announced": 4103, "kam": 4104, "voc": 4105, "brilliant": 4106, "house": 4107, "cheese": 4108, "warri": 4109, "music": 4110, "hockey": 4111, "ðŁĺĤðŁĺĤ": 4112, "skills": 4113, "autom": 4114, "smart": 4115, "medical": 4116, "mony": 4117, "ex": 4118, "guar": 4119, "give": 4120, "personal": 4121, "vention": 4122, "alli": 4123, "press": 4124, "floor": 4125, "mc": 4126, "victory": 4127, "him": 4128, "simple": 4129, "thor": 4130, "ðŁĩºðŁĩ": 4131, "tail": 4132, "lucky": 4133, "alex": 4134, "quite": 4135, "bot": 4136, "ssions": 4137, "challeng": 4138, "cann": 4139, "amazon": 4140, "hell": 4141, "bought": 4142, "):": 4143, "edy": 4144, "secret": 4145, "production": 4146, "independ": 4147, "defe": 4148, "added": 4149, "pr": 4150, "pag": 4151, "bed": 4152, "greatest": 4153, "within": 4154, "jay": 4155, "ðŁ¥": 4156, "ireland": 4157, "rely": 4158, "sd": 4159, "text": 4160, "driving": 4161, "program": 4162, "speed": 4163, "colum": 4164, "stron": 4165, "é": 4166, "forest": 4167, "âĸ": 4168, "machine": 4169, "coin": 4170, "scar": 4171, "ount": 4172, "bie": 4173, "¡ï¸ı": 4174, "portra": 4175, "common": 4176, "wrest": 4177, "received": 4178, "know": 4179, "invest": 4180, "plans": 4181, "accor": 4182, "adop": 4183, "tery": 4184, "reali": 4185, "pp": 4186, "kal": 4187, "artwork": 4188, "mean": 4189, "god": 4190, "instead": 4191, "anci": 4192, "motivation": 4193, "asing": 4194, "inspiration": 4195, "upcoming": 4196, "political": 4197, "europe": 4198, "mers": 4199, "heavy": 4200, "ðŁijį": 4201, "febru": 4202, "scotland": 4203, "ough": 4204, "bt": 4205, "boss": 4206, "schedu": 4207, "speak": 4208, "nick": 4209, "ured": 4210, "ino": 4211, "ek": 4212, "risk": 4213, "tory": 4214, "presents": 4215, "bon": 4216, "rug": 4217, "states": 4218, "exhibition": 4219, "ilo": 4220, "mill": 4221, "brought": 4222, ":-)": 4223, "touri": 4224, "come": 4225, "officially": 4226, "champions": 4227, "doors": 4228, "rep": 4229, "pose": 4230, "extra": 4231, "kings": 4232, "soccer": 4233, "squad": 4234, "applic": 4235, "ata": 4236, "sometimes": 4237, "tari": 4238, "excellent": 4239, "ðŁĺĺ": 4240, "straight": 4241, "carol": 4242, "rip": 4243, "âĢį": 4244, "graphic": 4245, "mol": 4246, "election": 4247, "february": 4248, "asons": 4249, "li": 4250, "dir": 4251, "mt": 4252, "nick": 4253, "usu": 4254, "mrs": 4255, "comics": 4256, "institu": 4257, "corpor": 4258, "vi": 4259, "ðŁĻı": 4260, "tural": 4261, "dise": 4262, "acci": 4263, "weare": 4264, "among": 4265, "shopping": 4266, "till": 4267, "what": 4268, "chair": 4269, "span": 4270, "chinese": 4271, "innovation": 4272, "joy": 4273, "kit": 4274, "century": 4275, "obama": 4276, "phili": 4277, "fc": 4278, "reach": 4279, "citi": 4280, "ulous": 4281, "non": 4282, "dang": 4283, "happening": 4284, "burn": 4285, "pel": 4286, "orange": 4287, "dv": 4288, "kick": 4289, "claim": 4290, "ingham": 4291, "phy": 4292, "nov": 4293, "podcast": 4294, "whi": 4295, "nights": 4296, "earlier": 4297, "bear": 4298, "lah": 4299, "exciting": 4300, "ora": 4301, "given": 4302, "slo": 4303, "memories": 4304, "continues": 4305, "product": 4306, "gho": 4307, "cd": 4308, "knows": 4309, "ðŁİī": 4310, "published": 4311, "discuss": 4312, "yard": 4313, "iphone": 4314, "tries": 4315, "wall": 4316, "feb": 4317, "aren": 4318, "truth": 4319, "winners": 4320, "ture": 4321, "ditional": 4322, "military": 4323, "problem": 4324, "mand": 4325, "dog": 4326, "loss": 4327, "cric": 4328, "canadi": 4329, "veter": 4330, "village": 4331, "\",": 4332, "yr": 4333, "ung": 4334, "donald": 4335, "aging": 4336, "birds": 4337, "scienti": 4338, "les": 4339, "this": 4340, "region": 4341, "tical": 4342, "itten": 4343, "ila": 4344, "ðŁĺİ": 4345, "dad": 4346, "diam": 4347, "above": 4348, "stren": 4349, "lit": 4350, "pir": 4351, "lab": 4352, "focus": 4353, "busy": 4354, "dur": 4355, "apply": 4356, "sma": 4357, "author": 4358, "aci": 4359, "execu": 4360, "domin": 4361, "rela": 4362, "jackson": 4363, "ato": 4364, "washington": 4365, "ðŁĻĮ": 4366, "kill": 4367, "popular": 4368, "cement": 4369, "road": 4370, "eating": 4371, "location": 4372, "vent": 4373, "arre": 4374, "nan": 4375, "custo": 4376, "adventure": 4377, "ordin": 4378, "sport": 4379, "ult": 4380, "lock": 4381, "question": 4382, "driver": 4383, "landsc": 4384, "oni": 4385, "kins": 4386, "pd": 4387, "jordan": 4388, "tered": 4389, "kk": 4390, "af": 4391, "child": 4392, "sp": 4393, "justin": 4394, "eni": 4395, "selling": 4396, "zo": 4397, "whit": 4398, "boston": 4399, "particip": 4400, "signing": 4401, "happened": 4402, "heat": 4403, "mam": 4404, "dreams": 4405, "lows": 4406, "graph": 4407, "theday": 4408, "heading": 4409, "bro": 4410, "blessed": 4411, "vic": 4412, "vegas": 4413, "hd": 4414, "inning": 4415, "roman": 4416, "andro": 4417, "denti": 4418, "use": 4419, "cit": 4420, "progress": 4421, "writer": 4422, "bob": 4423, "ffs": 4424, "growing": 4425, "bly": 4426, "aware": 4427, "exam": 4428, "spent": 4429, "bet": 4430, "score": 4431, "beyond": 4432, "docu": 4433, "adel": 4434, "sf": 4435, "coura": 4436, "collabor": 4437, "inc": 4438, "private": 4439, "boat": 4440, "**": 4441, "zone": 4442, "pha": 4443, "bill": 4444, "total": 4445, "planning": 4446, "towards": 4447, "places": 4448, "preview": 4449, "creative": 4450, "damn": 4451, "ideas": 4452, "seems": 4453, "poten": 4454, "saying": 4455, "display": 4456, "sw": 4457, "aqu": 4458, "louis": 4459, "bye": 4460, "lil": 4461, "email": 4462, "western": 4463, "germany": 4464, "eller": 4465, "res": 4466, "fant": 4467, "mentary": 4468, "deals": 4469, "richard": 4470, "jersey": 4471, "streng": 4472, "rad": 4473, "pizza": 4474, "mond": 4475, "ware": 4476, "lac": 4477, "gi": 4478, "archi": 4479, "cd": 4480, "yellow": 4481, "recently": 4482, "reach": 4483, "à¹": 4484, "kitchen": 4485, "designed": 4486, "try": 4487, "gal": 4488, "restaurant": 4489, "ature": 4490, "ww": 4491, "jas": 4492, "lma": 4493, "ðŁijĮ": 4494, "pain": 4495, "avo": 4496, "minute": 4497, "schol": 4498, "therap": 4499, "ticket": 4500, "dry": 4501, "japan": 4502, "ditions": 4503, "terri": 4504, "selves": 4505, "happen": 4506, "tup": 4507, "mag": 4508, "copy": 4509, "sher": 4510, "freedom": 4511, "file": 4512, "specially": 4513, "toronto": 4514, "load": 4515, "gary": 4516, "rey": 4517, "answer": 4518, "loy": 4519, "caught": 4520, "prize": 4521, "une": 4522, "fication": 4523, "niger": 4524, "syd": 4525, "touch": 4526, "feature": 4527, "jazz": 4528, "records": 4529, "himself": 4530, "dish": 4531, "rober": 4532, "spotted": 4533, "master": 4534, "wave": 4535, "finals": 4536, "bull": 4537, "forum": 4538, "ald": 4539, "recomm": 4540, "cha": 4541, "ae": 4542, "doo": 4543, "instru": 4544, "truly": 4545, "lg": 4546, "ink": 4547, "brothers": 4548, "dest": 4549, "jim": 4550, "mit": 4551, "closed": 4552, "ison": 4553, "tried": 4554, "santa": 4555, "affe": 4556, "wan": 4557, "horse": 4558, "grow": 4559, "campus": 4560, "relation": 4561, "native": 4562, "journ": 4563, "gov": 4564, "oct": 4565, "kit": 4566, "bound": 4567, "partner": 4568, "rema": 4569, "crowd": 4570, "!)": 4571, "calls": 4572, "rail": 4573, "quali": 4574, "solution": 4575, "contest": 4576, "convers": 4577, "snap": 4578, "base": 4579, "initi": 4580, "tax": 4581, "ye": 4582, "entrepre": 4583, "itor": 4584, "construction": 4585, "food": 4586, "presented": 4587, "nings": 4588, "climate": 4589, "km": 4590, "model": 4591, "bj": 4592, "block": 4593, "presentation": 4594, "dream": 4595, "fix": 4596, "calling": 4597, "busine": 4598, "congress": 4599, "understand": 4600, "web": 4601, "value": 4602, "ï¸ıâĥ£": 4603, "mexico": 4604, "itely": 4605, "kim": 4606, "charity": 4607, "reflec": 4608, "blan": 4609, "flying": 4610, "analy": 4611, "families": 4612, "band": 4613, "recipe": 4614, "celebration": 4615, "accep": 4616, "ary": 4617, "tot": 4618, "gb": 4619, "interested": 4620, "captain": 4621, "âĻ¥": 4622, "tip": 4623, "absol": 4624, "braz": 4625, "investig": 4626, "ology": 4627, "dec": 4628, "truck": 4629, "vering": 4630, "clear": 4631, "dont": 4632, "gotta": 4633, "advis": 4634, "begins": 4635, "mass": 4636, "descri": 4637, "block": 4638, "kim": 4639, "david": 4640, "songs": 4641, "memorial": 4642, "features": 4643, "sustain": 4644, "'.": 4645, "grab": 4646, "jose": 4647, "va": 4648, "conserv": 4649, "sets": 4650, "manchester": 4651, "fighting": 4652, "degre": 4653, "aga": 4654, "ind": 4655, "sleep": 4656, "position": 4657, "hair": 4658, "signs": 4659, "policy": 4660, "ito": 4661, "alert": 4662, "stam": 4663, "spend": 4664, "wy": 4665, "absolut": 4666, "dm": 4667, "animal": 4668, "myster": 4669, "successful": 4670, "problems": 4671, "robo": 4672, "kay": 4673, "garden": 4674, "pd": 4675, "mayor": 4676, "dale": 4677, "tol": 4678, "offers": 4679, "visiting": 4680, "friendly": 4681, "trees": 4682, "officer": 4683, "account": 4684, "kevin": 4685, "ðŁijį": 4686, "giant": 4687, "continu": 4688, "consu": 4689, "tract": 4690, "nfl": 4691, "ðŁĺĬ": 4692, "hq": 4693, "bility": 4694, "aar": 4695, "disney": 4696, "teen": 4697, "oned": 4698, "white": 4699, "trailer": 4700, "dedic": 4701, "alone": 4702, "absolutely": 4703, "digital": 4704, "william": 4705, "ination": 4706, "swa": 4707, "ee": 4708, "entire": 4709, "german": 4710, "roll": 4711, "hits": 4712, "cost": 4713, "stay": 4714, "tha": 4715, "alive": 4716, "according": 4717, "cot": 4718, "literally": 4719, "herit": 4720, "reti": 4721, "hahaha": 4722, "experi": 4723, "likes": 4724, "gt": 4725, "steel": 4726, "____": 4727, "chair": 4728, "christian": 4729, "tower": 4730, "difference": 4731, "md": 4732, "tress": 4733, "mid": 4734, "prince": 4735, "african": 4736, "feder": 4737, "foot": 4738, "carri": 4739, "served": 4740, "rice": 4741, "shall": 4742, "featured": 4743, "cker": 4744, "recru": 4745, "poe": 4746, "sense": 4747, "nific": 4748, "comedy": 4749, "content": 4750, "fat": 4751, "posted": 4752, "contribu": 4753, "timate": 4754, "liver": 4755, "mble": 4756, "internet": 4757, "age": 4758, "european": 4759, "cling": 4760, "glad": 4761, "ffic": 4762, "sco": 4763, "akes": 4764, "elle": 4765, "termin": 4766, "tony": 4767, "pale": 4768, "colour": 4769, "serious": 4770, "patri": 4771, "movies": 4772, "bm": 4773, "professional": 4774, "ado": 4775, "alu": 4776, "bringing": 4777, "falls": 4778, "israel": 4779, "term": 4780, "language": 4781, "brook": 4782, "mann": 4783, "communic": 4784, "cannot": 4785, "acti": 4786, "phe": 4787, "yan": 4788, "entreprene": 4789, "turkey": 4790, "logical": 4791, "long": 4792, "arm": 4793, "urs": 4794, "workers": 4795, "ingly": 4796, "ggs": 4797, "ric": 4798, "tual": 4799, "receive": 4800, "opens": 4801, "gear": 4802, "social": 4803, "feet": 4804, "cking": 4805, "adver": 4806, "finan": 4807, "feels": 4808, "spla": 4809, "hr": 4810, "easter": 4811, "brain": 4812, "ãģ": 4813, "fig": 4814, "ledge": 4815, "nearly": 4816, "protect": 4817, "massive": 4818, "eth": 4819, "awa": 4820, "ðŁĺģ": 4821, "yrs": 4822, "awareness": 4823, "definitely": 4824, "kn": 4825, "imagine": 4826, "ku": 4827, "systems": 4828, "ðŁijı": 4829, "fas": 4830, "lik": 4831, "provide": 4832, "amo": 4833, "discover": 4834, "influ": 4835, "maker": 4836, "gaz": 4837, "fitness": 4838, "street": 4839, "ers": 4840, "ted": 4841, "wc": 4842, "ysis": 4843, "positive": 4844, "helped": 4845, "quest": 4846, "andrew": 4847, "brad": 4848, "bin": 4849, "hanging": 4850, "ling": 4851, "bright": 4852, "section": 4853, "mass": 4854, "ðŁĻĮ": 4855, "followers": 4856, "hosting": 4857, "tempor": 4858, "flag": 4859, "ave": 4860, "letter": 4861, "kur": 4862, "requi": 4863, "often": 4864, "cryp": 4865, "suff": 4866, "âļ½": 4867, "russian": 4868, "treatment": 4869, "alle": 4870, "hay": 4871, "lan": 4872, "keeping": 4873, "holy": 4874, "powerful": 4875, "predic": 4876, "fund": 4877, "especially": 4878, "window": 4879, "jewel": 4880, "ily": 4881, "ðŁĴľ": 4882, "generation": 4883, "appa": 4884, "seriously": 4885, "od": 4886, "ðŁĺĤðŁĺĤðŁĺĤ": 4887, "certi": 4888, "irish": 4889, "ðŁijĮ": 4890, "miami": 4891, "beth": 4892, "vity": 4893, "secu": 4894, "chef": 4895, "crime": 4896, "graphy": 4897, "max": 4898, "artists": 4899, "revolu": 4900, "guard": 4901, "speech": 4902, "uc": 4903, "updates": 4904, "faces": 4905, "stant": 4906, "changed": 4907, "reports": 4908, "lower": 4909, "pear": 4910, "nc": 4911, "kil": 4912, "looked": 4913, "speaker": 4914, "sf": 4915, "respect": 4916, "okay": 4917, "ocean": 4918, "sitting": 4919, "architecture": 4920, "trail": 4921, "seat": 4922, "ira": 4923, "leg": 4924, "japanese": 4925, "dam": 4926, "ular": 4927, "swim": 4928, "politics": 4929, "financial": 4930, "old": 4931, "mouth": 4932, "attemp": 4933, "destin": 4934, "fishing": 4935, "attention": 4936, "mem": 4937, "changes": 4938, "decided": 4939, "religi": 4940, "gin": 4941, "cav": 4942, "zz": 4943, "adam": 4944, "mac": 4945, "write": 4946, "begin": 4947, "scul": 4948, "alter": 4949, "iss": 4950, "athon": 4951, "images": 4952, "moo": 4953, "joined": 4954, "ðŁĺī": 4955, "âŀ¡ï¸ı": 4956, "passed": 4957, "musli": 4958, "hir": 4959, "largest": 4960, "camer": 4961, "comic": 4962, "ghted": 4963, "rugby": 4964, "burgh": 4965, "gging": 4966, "testing": 4967, "prepar": 4968, "laugh": 4969, "aled": 4970, "improve": 4971, "believ": 4972, "advice": 4973, "shares": 4974, "heart": 4975, "turning": 4976, "sb": 4977, "tel": 4978, "cafe": 4979, "nes": 4980, "daniel": 4981, "patter": 4982, "tz": 4983, "sett": 4984, "park": 4985, "cand": 4986, "stick": 4987, "happens": 4988, "brian": 4989, "newest": 4990, "epic": 4991, "ador": 4992, "kies": 4993, "warning": 4994, "animals": 4995, "custom": 4996, "arc": 4997, "dian": 4998, "gold": 4999, "core": 5000, "tf": 5001, "city": 5002, "pants": 5003, "reality": 5004, "confi": 5005, "inju": 5006, "fox": 5007, "guil": 5008, "knew": 5009, "âĺº": 5010, "correc": 5011, "itude": 5012, "dden": 5013, ".#": 5014, "reduc": 5015, "pass": 5016, "fon": 5017, "ya": 5018, "owner": 5019, "returns": 5020, "nc": 5021, "east": 5022, "apol": 5023, "insur": 5024, "tho": 5025, "sim": 5026, "junior": 5027, "bee": 5028, "angel": 5029, "attle": 5030, "electric": 5031, "horror": 5032, "crash": 5033, "eye": 5034, "path": 5035, "southern": 5036, "employe": 5037, "geo": 5038, "tan": 5039, "haz": 5040, "rally": 5041, "ðŁı»": 5042, "property": 5043, "wasn": 5044, "enjoyed": 5045, "grey": 5046, "gas": 5047, "brew": 5048, "northern": 5049, "holding": 5050, "gp": 5051, "take": 5052, "chart": 5053, "lyn": 5054, "drama": 5055, "zo": 5056, "paid": 5057, "throwback": 5058, "cup": 5059, "discussion": 5060, "downtown": 5061, "will": 5062, "lew": 5063, "bis": 5064, "tary": 5065, "bread": 5066, "upon": 5067, "rate": 5068, "teachers": 5069, "itation": 5070, "anced": 5071, "cycle": 5072, "choose": 5073, "dc": 5074, "iran": 5075, "cow": 5076, "dave": 5077, "raise": 5078, "princess": 5079, "faith": 5080, "->": 5081, "industri": 5082, "spain": 5083, "guitar": 5084, "facts": 5085, "mn": 5086, "spen": 5087, "courte": 5088, "gott": 5089, "projects": 5090, "audi": 5091, "osc": 5092, "peter": 5093, "sand": 5094, "interest": 5095, "happiness": 5096, "venue": 5097, "soldi": 5098, "surprise": 5099, "potential": 5100, "perio": 5101, "customer": 5102, "ii": 5103, "gni": 5104, "manufac": 5105, "eco": 5106, "broken": 5107, "singer": 5108, "vels": 5109, "wales": 5110, "hus": 5111, "inj": 5112, "four": 5113, "talent": 5114, "dying": 5115, "matthe": 5116, "film": 5117, "joining": 5118, "sell": 5119, "jar": 5120, "lmao": 5121, "surger": 5122, "bbc": 5123, "sources": 5124, "austin": 5125, "nik": 5126, "charles": 5127, "fam": 5128, "princi": 5129, "angel": 5130, "cash": 5131, "lot": 5132, "ored": 5133, "plays": 5134, "plate": 5135, "done": 5136, "memory": 5137, "brings": 5138, "nba": 5139, "solutions": 5140, "teaching": 5141, "grace": 5142, "circu": 5143, "helps": 5144, "founder": 5145, "mary": 5146, "explore": 5147, "decor": 5148, "parts": 5149, "cho": 5150, "integr": 5151, "hau": 5152, "ises": 5153, "putting": 5154, "iner": 5155, "rit": 5156, "vy": 5157, "michel": 5158, "blues": 5159, "everyday": 5160, "forms": 5161, "bio": 5162, "year": 5163, "pin": 5164, "tter": 5165, "spring": 5166, "))": 5167, "pot": 5168, "aling": 5169, "performing": 5170, "shan": 5171, "planet": 5172, "musical": 5173, "heads": 5174, "italian": 5175, "strugg": 5176, "âĢįâĻ": 5177, "wings": 5178, "pump": 5179, "hh": 5180, "trou": 5181, "aid": 5182, "prime": 5183, "earth": 5184, "paint": 5185, "mont": 5186, "amy": 5187, "bbc": 5188, "fabulous": 5189, "fruit": 5190, "android": 5191, "bourne": 5192, "ceremony": 5193, "ential": 5194, "??": 5195, "debate": 5196, "oning": 5197, "draft": 5198, "solar": 5199, "tx": 5200, "jam": 5201, "corn": 5202, "!!!!!": 5203, "broo": 5204, "milk": 5205, "posed": 5206, "ohi": 5207, "movement": 5208, "bren": 5209, "partner": 5210, "pg": 5211, "ette": 5212, "aries": 5213, "shout": 5214, "ng": 5215, "leaving": 5216, "tells": 5217, "sens": 5218, "taste": 5219, "kelly": 5220, "worl": 5221, "gym": 5222, "rich": 5223, "egy": 5224, "pid": 5225, "mas": 5226, "âĤ": 5227, "courtesy": 5228, "frank": 5229, "increase": 5230, "written": 5231, "ppers": 5232, "rel": 5233, "hai": 5234, "sas": 5235, "sound": 5236, "tti": 5237, "wich": 5238, "river": 5239, "...\"": 5240, "ag": 5241, "fellow": 5242, "rome": 5243, "small": 5244, "gency": 5245, "ican": 5246, "luxury": 5247, "proof": 5248, "met": 5249, "wildlife": 5250, "moments": 5251, "rather": 5252, "corner": 5253, "compe": 5254, "canadian": 5255, "likely": 5256, "therapy": 5257, "liam": 5258, "economic": 5259, "indie": 5260, "route": 5261, "fight": 5262, "hope": 5263, "setting": 5264, "antly": 5265, "cross": 5266, "fantasy": 5267, "dee": 5268, "sketch": 5269, "compli": 5270, "ymi": 5271, "rules": 5272, "engineering": 5273, "figure": 5274, "row": 5275, ".,": 5276, "fw": 5277, "sydney": 5278, "wou": 5279, "tation": 5280, "drew": 5281, "uses": 5282, "there": 5283, "spread": 5284, "structure": 5285, "patrick": 5286, "apparently": 5287, "ros": 5288, "hills": 5289, "wwe": 5290, "anny": 5291, "commission": 5292, "div": 5293, "fying": 5294, "consul": 5295, "analysis": 5296, "exi": 5297, "tennis": 5298, "vehicle": 5299, "ðŁĺŃðŁĺŃ": 5300, "ass": 5301, "highly": 5302, "opened": 5303, "bann": 5304, "ðŁĴĻ": 5305, "mph": 5306, "wishing": 5307, "vor": 5308, "fif": 5309, "giveaway": 5310, "rr": 5311, "ray": 5312, "jess": 5313, "gat": 5314, "icymi": 5315, "xit": 5316, "highest": 5317, "york": 5318, "pie": 5319, "involved": 5320, "higher": 5321, "rie": 5322, "malay": 5323, "intelli": 5324, "despite": 5325, "chee": 5326, "sarah": 5327, "bean": 5328, "recogni": 5329, "arsen": 5330, "talented": 5331, "passion": 5332, "ich": 5333, "abc": 5334, "leads": 5335, "disease": 5336, "vis": 5337, "sec": 5338, "presenting": 5339, "milli": 5340, "hole": 5341, "shots": 5342, "depart": 5343, "surgery": 5344, "govt": 5345, "bin": 5346, "dual": 5347, "evi": 5348, "longer": 5349, "evol": 5350, "screen": 5351, "portrait": 5352, "etc": 5353, "lose": 5354, "chat": 5355, "pen": 5356, "pi": 5357, "oma": 5358, "sick": 5359, "erc": 5360, "companies": 5361, "entry": 5362, "plane": 5363, "gry": 5364, "vene": 5365, "liverpool": 5366, "premiere": 5367, "shared": 5368, "ared": 5369, "films": 5370, "ira": 5371, "holidays": 5372, "cricket": 5373, "ician": 5374, "ving": 5375, ".)": 5376, "ultimate": 5377, "division": 5378, "conduc": 5379, "sept": 5380, "forces": 5381, "mont": 5382, "smart": 5383, "disapp": 5384, "sunshine": 5385, "ind": 5386, "bless": 5387, "made": 5388, "colors": 5389, "frank": 5390, "iron": 5391, "bottle": 5392, "sgo": 5393, "mood": 5394, "jason": 5395, "eric": 5396, "birth": 5397, "teen": 5398, "response": 5399, "target": 5400, "statement": 5401, "fear": 5402, "thel": 5403, "alum": 5404, "arab": 5405, "blin": 5406, "direction": 5407, "steps": 5408, "erial": 5409, "worked": 5410, "atl": 5411, "ðŁĴķ": 5412, "felt": 5413, "poli": 5414, "scenes": 5415, "homes": 5416, "bell": 5417, "eat": 5418, "ateful": 5419, "tin": 5420, "lace": 5421, "folks": 5422, "pse": 5423, "ann": 5424, "wisdom": 5425, "fav": 5426, "butter": 5427, "sr": 5428, "areas": 5429, "smoo": 5430, "biz": 5431, "dges": 5432, "appo": 5433, "more": 5434, "them": 5435, "effect": 5436, "windows": 5437, "sunny": 5438, "capital": 5439, "totally": 5440, "cities": 5441, "grant": 5442, "mbers": 5443, "slow": 5444, "autu": 5445, "ilities": 5446, "wro": 5447, "rising": 5448, "stics": 5449, "violence": 5450, "igh": 5451, "quot": 5452, "hit": 5453, "tc": 5454, "heritage": 5455, "buff": 5456, "nes": 5457, "zar": 5458, "dential": 5459, "exac": 5460, "edge": 5461, "deep": 5462, "arena": 5463, "became": 5464, "benefits": 5465, "marks": 5466, "mber": 5467, "az": 5468, "ames": 5469, "preci": 5470, "dragon": 5471, "reg": 5472, "dings": 5473, "dos": 5474, "ðŁĴª": 5475, "nel": 5476, "sity": 5477, "meal": 5478, "dist": 5479, "legend": 5480, "purchase": 5481, "pical": 5482, "stick": 5483, "fat": 5484, "duba": 5485, "profess": 5486, "carto": 5487, "prof": 5488, "countries": 5489, "responsi": 5490, "sequ": 5491, "fab": 5492, "tribute": 5493, "honored": 5494, "practic": 5495, "purple": 5496, "anton": 5497, "pared": 5498, "tough": 5499, "summer": 5500, "environment": 5501, "sons": 5502, "ðŁĻı": 5503, "mps": 5504, "gies": 5505, "heroes": 5506, "telling": 5507, "henry": 5508, "fen": 5509, "knowledge": 5510, "Ģï¸ı": 5511, "fr": 5512, "neg": 5513, "ure": 5514, "acking": 5515, "hearts": 5516, "soo": 5517, "hollywood": 5518, "jump": 5519, "sauce": 5520, "schedule": 5521, "turn": 5522, "yoga": 5523, "creating": 5524, "cket": 5525, "creek": 5526, "âŃ": 5527, "customers": 5528, "madri": 5529, "gul": 5530, "assemb": 5531, "mount": 5532, "cell": 5533, "top": 5534, "stal": 5535, "davis": 5536, "twi": 5537, "sign": 5538, "premier": 5539, "itions": 5540, "hearing": 5541, "unk": 5542, "patients": 5543, "appear": 5544, "heaven": 5545, "alty": 5546, "doctor": 5547, "ae": 5548, "platform": 5549, "jeff": 5550, "ðŁĵ·": 5551, "regional": 5552, "bid": 5553, "boxing": 5554, "exten": 5555, "ority": 5556, "aw": 5557, "wise": 5558, "ille": 5559, "several": 5560, "bie": 5561, "situ": 5562, "syria": 5563, "âľħ": 5564, "reminder": 5565, "entertain": 5566, "lion": 5567, "partners": 5568, "inn": 5569, "phar": 5570, "fau": 5571, "pls": 5572, "expected": 5573, "sugar": 5574, "decision": 5575, "sb": 5576, "chron": 5577, "association": 5578, "leaves": 5579, "visited": 5580, "shap": 5581, "ðŁĴĸ": 5582, "further": 5583, "hann": 5584, "wi": 5585, "runs": 5586, "ler": 5587, "funding": 5588, "filled": 5589, "......": 5590, "tiny": 5591, "hang": 5592, "org": 5593, "cool": 5594, "semin": 5595, "ðŁıĨ": 5596, "spons": 5597, "navy": 5598, "saint": 5599, "drug": 5600, "dal": 5601, "roun": 5602, "covered": 5603, "traditional": 5604, "investment": 5605, "dete": 5606, "alism": 5607, "flow": 5608, "nis": 5609, "sunrise": 5610, "feat": 5611, "fted": 5612, "weird": 5613, "jere": 5614, "vegan": 5615, "medicine": 5616, "ano": 5617, "accu": 5618, "delivery": 5619, "temple": 5620, "changing": 5621, "wilson": 5622, "philipp": 5623, "refe": 5624, "nd": 5625, "iser": 5626, "gay": 5627, "rand": 5628, "atives": 5629, "tely": 5630, "pand": 5631, "intellig": 5632, "gare": 5633, "ambas": 5634, "demon": 5635, "committee": 5636, "strategy": 5637, "refuge": 5638, "budget": 5639, "protec": 5640, "pier": 5641, "express": 5642, "nomin": 5643, "economy": 5644, "allow": 5645, "icon": 5646, "galax": 5647, "oh": 5648, "indivi": 5649, "demand": 5650, "virgin": 5651, "luke": 5652, "alists": 5653, "mani": 5654, "smi": 5655, "judge": 5656, "enty": 5657, "michi": 5658, "result": 5659, "amed": 5660, "speaks": 5661, "',": 5662, "houston": 5663, "shin": 5664, "bing": 5665, "fly": 5666, "chem": 5667, "auto": 5668, "vas": 5669, "get": 5670, "arm": 5671, "thanks": 5672, "din": 5673, "gang": 5674, "xx": 5675, "sion": 5676, "located": 5677, "pl": 5678, "josh": 5679, "info": 5680, "joins": 5681, "adverti": 5682, "otd": 5683, "eld": 5684, "sie": 5685, "reasons": 5686, "vent": 5687, "ðŁĩºðŁĩ¸": 5688, "âł": 5689, "conversation": 5690, "studi": 5691, "ðŁĶ¥ðŁĶ¥": 5692, "gos": 5693, "sounds": 5694, "unit": 5695, "musc": 5696, "gel": 5697, "acked": 5698, "paci": 5699, "cos": 5700, "dere": 5701, "uu": 5702, "ao": 5703, "lam": 5704, "inspiring": 5705, "arms": 5706, "tware": 5707, "matters": 5708, "addic": 5709, "dude": 5710, "ext": 5711, "crisis": 5712, "bath": 5713, "meet": 5714, "singh": 5715, "expect": 5716, "delhi": 5717, "rescue": 5718, "worst": 5719, "aug": 5720, "shipping": 5721, "serving": 5722, "sto": 5723, "dark": 5724, "aces": 5725, "historic": 5726, "landscape": 5727, "designer": 5728, "billion": 5729, "grateful": 5730, "wake": 5731, "eve": 5732, "miller": 5733, "housing": 5734, "dynam": 5735, "isco": 5736, "beha": 5737, "shop": 5738, "prou": 5739, "eas": 5740, "asia": 5741, "eding": 5742, "kon": 5743, "department": 5744, "awar": 5745, "marine": 5746, "inci": 5747, "photographer": 5748, "tape": 5749, "logo": 5750, "rings": 5751, "dit": 5752, "----": 5753, "vinyl": 5754, "wc": 5755, "voting": 5756, "seven": 5757, "ambassad": 5758, "dallas": 5759, "tu": 5760, "comment": 5761, "kra": 5762, "bles": 5763, "wag": 5764, "ud": 5765, "audio": 5766, "strike": 5767, "official": 5768, "ots": 5769, "metho": 5770, "tools": 5771, "radi": 5772, "alan": 5773, "hunt": 5774, "watched": 5775, "ake": 5776, "fake": 5777, "drinking": 5778, "merry": 5779, "ml": 5780, "bday": 5781, "rio": 5782, "nike": 5783, "cant": 5784, "repe": 5785, "costu": 5786, "murder": 5787, "akers": 5788, "chers": 5789, "outs": 5790, "beginning": 5791, "sos": 5792, "ades": 5793, "nin": 5794, "notes": 5795, "wrote": 5796, "solo": 5797, "ci": 5798, "lighting": 5799, "urban": 5800, "brexit": 5801, "attend": 5802, "shirts": 5803, "playo": 5804, "actress": 5805, "plic": 5806, "standard": 5807, "quotes": 5808, "parade": 5809, "ancient": 5810, "©": 5811, "turing": 5812, "ree": 5813, "primary": 5814, "flash": 5815, "citiz": 5816, "mates": 5817, "stein": 5818, "zi": 5819, "clinton": 5820, "skin": 5821, "gene": 5822, "hum": 5823, "gar": 5824, "tle": 5825, "yi": 5826, "focu": 5827, "dean": 5828, "plants": 5829, "cyber": 5830, "bu": 5831, "ome": 5832, "hop": 5833, "address": 5834, "tix": 5835, "gifts": 5836, "relationship": 5837, "subscri": 5838, "feed": 5839, "exactly": 5840, "hawks": 5841, "exo": 5842, "stress": 5843, "sn": 5844, "arrested": 5845, "ane": 5846, "software": 5847, "zero": 5848, "theme": 5849, "mumb": 5850, "immigr": 5851, "mia": 5852, "makeup": 5853, "pleasure": 5854, "univers": 5855, "harb": 5856, "engine": 5857, "aper": 5858, "rin": 5859, "bra": 5860, "institute": 5861, "leather": 5862, "alth": 5863, "singing": 5864, "cos": 5865, "ghty": 5866, "meas": 5867, "stic": 5868, "side": 5869, "insurance": 5870, "cot": 5871, "pitch": 5872, "mountains": 5873, "crimin": 5874, "supre": 5875, "valentine": 5876, "ater": 5877, "wouldn": 5878, "scale": 5879, "related": 5880, "regar": 5881, "startup": 5882, "packed": 5883, "mike": 5884, "weekly": 5885, "pts": 5886, "count": 5887, "har": 5888, "gotten": 5889, "mind": 5890, "berlin": 5891, "conditions": 5892, "switch": 5893, "corn": 5894, "save": 5895, "gli": 5896, "emergency": 5897, "tuned": 5898, "stock": 5899, "discussing": 5900, "everybody": 5901, "sday": 5902, "whether": 5903, "wrestling": 5904, "eces": 5905, "gender": 5906, "chen": 5907, "ðŁijĢ": 5908, "madrid": 5909, "marathon": 5910, "egg": 5911, "ier": 5912, "thx": 5913, "asking": 5914, "korea": 5915, "wolf": 5916, "aya": 5917, "gm": 5918, "gau": 5919, "atory": 5920, "vr": 5921, "grass": 5922, "killing": 5923, "bble": 5924, "uro": 5925, "uni": 5926, "eth": 5927, "shore": 5928, "then": 5929, "reale": 5930, "bottom": 5931, "exerc": 5932, "kar": 5933, "ories": 5934, "adri": 5935, "sands": 5936, "sex": 5937, ".'": 5938, "volunteers": 5939, "perform": 5940, "parliam": 5941, "include": 5942, "delighted": 5943, "executive": 5944, "fuel": 5945, "kiss": 5946, "ãħ": 5947, "charge": 5948, "hu": 5949, "cakes": 5950, "vet": 5951, "glu": 5952, "agree": 5953, "prices": 5954, "nau": 5955, "hl": 5956, "gru": 5957, "raj": 5958, "strength": 5959, "bic": 5960, "spending": 5961, "ales": 5962, "aven": 5963, "blast": 5964, ":(": 5965, "yof": 5966, "normal": 5967, "six": 5968, "quick": 5969, "sea": 5970, "daw": 5971, "meets": 5972, "lovers": 5973, "updated": 5974, "potat": 5975, "completed": 5976, "cook": 5977, "opportunities": 5978, "pure": 5979, "organic": 5980, "temper": 5981, "cam": 5982, "avoid": 5983, "parking": 5984, "dubai": 5985, "ando": 5986, "distri": 5987, "toy": 5988, "completely": 5989, "donald": 5990, "trial": 5991, "bass": 5992, "boun": 5993, "background": 5994, "vas": 5995, "marvel": 5996, "lum": 5997, "rus": 5998, "tool": 5999, "commissi": 6000, "throwback": 6001, "finding": 6002, "islam": 6003, "!?": 6004, "stop": 6005, "evil": 6006, "oral": 6007, "residents": 6008, "identi": 6009, "oak": 6010, "ðŁİ¶": 6011, "lil": 6012, "spanish": 6013, "chapter": 6014, "stopped": 6015, "direct": 6016, "hosted": 6017, "picked": 6018, "labour": 6019, "lewis": 6020, "defense": 6021, "à®": 6022, "healthcare": 6023, "whis": 6024, "math": 6025, "peak": 6026, "raised": 6027, "fix": 6028, "bull": 6029, "thir": 6030, "chelsea": 6031, "folk": 6032, "tre": 6033, "candi": 6034, "paul": 6035, "either": 6036, "adam": 6037, "poetry": 6038, "jewelry": 6039, "ð٦": 6040, "pray": 6041, "ا": 6042, "gc": 6043, "oz": 6044, "wishes": 6045, "foreign": 6046, "sung": 6047, "learned": 6048, "ene": 6049, "ning": 6050, "michael": 6051, "illustration": 6052, "legendary": 6053, "wav": 6054, "bau": 6055, "ðŁļ¨": 6056, "calend": 6057, "streets": 6058, "âĨ": 6059, "monster": 6060, "buck": 6061, "gr": 6062, "school": 6063, "bath": 6064, "waste": 6065, "neck": 6066, "hawa": 6067, "beach": 6068, "replac": 6069, "ject": 6070, "oner": 6071, "factory": 6072, "count": 6073, "ðŁĵ¸": 6074, "morgan": 6075, "dering": 6076, "sean": 6077, "stephen": 6078, "dep": 6079, "novel": 6080, "videos": 6081, "ical": 6082, "pressure": 6083, "arsenal": 6084, "expre": 6085, "irs": 6086, "trending": 6087, "ssa": 6088, "flash": 6089, "resear": 6090, "through": 6091, "professor": 6092, "sculp": 6093, "tos": 6094, "gged": 6095, "mma": 6096, "bee": 6097, "ape": 6098, "hunter": 6099, "ami": 6100, "hei": 6101, "plastic": 6102, "bucks": 6103, "universe": 6104, "legen": 6105, "nigeria": 6106, "pleased": 6107, "ris": 6108, "thinks": 6109, "autumn": 6110, "ids": 6111, "dis": 6112, "anthony": 6113, "ðŁı½": 6114, "aked": 6115, "glasses": 6116, "finance": 6117, "zer": 6118, "kas": 6119, "contract": 6120, "numbers": 6121, "shaw": 6122, "partnership": 6123, "til": 6124, "launched": 6125, "sal": 6126, "victoria": 6127, "theater": 6128, "usual": 6129, "names": 6130, "period": 6131, "eliza": 6132, "ith": 6133, "barcel": 6134, "rocks": 6135, "bags": 6136, "mate": 6137, "distribu": 6138, "jon": 6139, "diffic": 6140, "alized": 6141, "curren": 6142, "scored": 6143, "bha": 6144, "dublin": 6145, "rose": 6146, "inted": 6147, "solid": 6148, "behavi": 6149, "walker": 6150, "simply": 6151, "gardens": 6152, "headed": 6153, "ini": 6154, "ohio": 6155, "weap": 6156, "fo": 6157, "glen": 6158, "estate": 6159, "random": 6160, "thunder": 6161, "thru": 6162, "kill": 6163, "jacket": 6164, "iti": 6165, "entertainment": 6166, "thanksgiving": 6167, "ental": 6168, "encoura": 6169, "elo": 6170, "ather": 6171, "tank": 6172, "highlights": 6173, "fting": 6174, "rule": 6175, "models": 6176, "border": 6177, "bjp": 6178, "husband": 6179, "indone": 6180, "kenya": 6181, "bears": 6182, "alo": 6183, "ninten": 6184, "pix": 6185, "stro": 6186, "orders": 6187, "salad": 6188, "roads": 6189, "nor": 6190, "lation": 6191, "sophi": 6192, "ðŁı¼": 6193, "pieces": 6194, "bone": 6195, "mins": 6196, "includes": 6197, "nutr": 6198, "phil": 6199, "sent": 6200, "fundra": 6201, "gain": 6202, "borough": 6203, "nad": 6204, "monday": 6205, "activity": 6206, "items": 6207, "becoming": 6208, "kenne": 6209, "detro": 6210, "cardi": 6211, "guests": 6212, "ux": 6213, "worldwide": 6214, "severe": 6215, "news": 6216, "thankful": 6217, "fiction": 6218, "vege": 6219, "mall": 6220, "sian": 6221, "eral": 6222, "injury": 6223, "lee": 6224, "menu": 6225, "dancing": 6226, "scotti": 6227, "example": 6228, "(#": 6229, "nai": 6230, "studios": 6231, "bai": 6232, "ðŁĴĽ": 6233, "jav": 6234, "diamond": 6235, "vince": 6236, "rick": 6237, "protection": 6238, "lincol": 6239, "champs": 6240, "approach": 6241, "dar": 6242, "mile": 6243, "clouds": 6244, "jeff": 6245, "infin": 6246, "lers": 6247, "ples": 6248, "peace": 6249, "gop": 6250, "âĻ¡": 6251, "techn": 6252, "stra": 6253, "average": 6254, "effort": 6255, "introducing": 6256, "diversity": 6257, "australian": 6258, "amp": 6259, "boost": 6260, "ske": 6261, "patient": 6262, "appreciate": 6263, "icians": 6264, "pur": 6265, "fell": 6266, "woods": 6267, "illustr": 6268, "ðŁĸ": 6269, "agency": 6270, "actions": 6271, "britain": 6272, "underway": 6273, "seattle": 6274, "eland": 6275, "ago": 6276, "fill": 6277, "streaming": 6278, "protest": 6279, "challenges": 6280, "kyo": 6281, "etsy": 6282, "cooking": 6283, "expert": 6284, "russ": 6285, "rainbow": 6286, "commercial": 6287, "spin": 6288, "beats": 6289, "cry": 6290, "valu": 6291, "eli": 6292, "throw": 6293, "grams": 6294, "levels": 6295, "michigan": 6296, "cad": 6297, "adorable": 6298, "constitu": 6299, "ws": 6300, "pub": 6301, "midnight": 6302, "that": 6303, "netfli": 6304, "brazil": 6305, "diego": 6306, "regular": 6307, "joy": 6308, "âĤ¬": 6309, "liqu": 6310, "eastern": 6311, "kni": 6312, "flat": 6313, "np": 6314, "brown": 6315, "wer": 6316, "sey": 6317, "tters": 6318, "acting": 6319, "vanc": 6320, "cycling": 6321, "programme": 6322, "raw": 6323, "complex": 6324, "tattoo": 6325, "throwbackthursday": 6326, "sessions": 6327, "rooms": 6328, "sight": 6329, "species": 6330, "bomb": 6331, "laugh": 6332, "keeps": 6333, "moon": 6334, "officers": 6335, "conver": 6336, "tr": 6337, "hash": 6338, "tack": 6339, "rious": 6340, "adap": 6341, "aj": 6342, "recogn": 6343, "expo": 6344, "sugge": 6345, "confirmed": 6346, "rolling": 6347, "dressing": 6348, "ict": 6349, "friday": 6350, "phones": 6351, "ridge": 6352, "concept": 6353, "roy": 6354, "keys": 6355, "effor": 6356, "cate": 6357, "kne": 6358, "even": 6359, "lay": 6360, "communities": 6361, "mod": 6362, "naz": 6363, "everywhere": 6364, "alab": 6365, "bitcoin": 6366, "banks": 6367, "outdoor": 6368, "federal": 6369, "stores": 6370, "hp": 6371, "cal": 6372, "mely": 6373, "signific": 6374, "bear": 6375, "republic": 6376, "closer": 6377, "allah": 6378, "pick": 6379, "xd": 6380, "palace": 6381, "chill": 6382, "bam": 6383, "erous": 6384, "una": 6385, "allen": 6386, "outstanding": 6387, "olympic": 6388, "supply": 6389, "figu": 6390, "vau": 6391, "lp": 6392, "charlie": 6393, "unes": 6394, ">>>": 6395, "legends": 6396, "icial": 6397, "coast": 6398, "benefit": 6399, "multi": 6400, "fits": 6401, "farmers": 6402, "amount": 6403, "sisters": 6404, "harve": 6405, "honey": 6406, "queen": 6407, "bers": 6408, "plann": 6409, "âŃIJ": 6410, "mu": 6411, "barcelona": 6412, "alber": 6413, "status": 6414, "remain": 6415, "extra": 6416, "candy": 6417, "vious": 6418, "âľĮ": 6419, "ov": 6420, "warriors": 6421, "-->": 6422, "jump": 6423, "amar": 6424, "xmas": 6425, "studies": 6426, "iors": 6427, "kor": 6428, "donate": 6429, "prep": 6430, "fish": 6431, "ima": 6432, "painted": 6433, "admini": 6434, "cosplay": 6435, "sports": 6436, "drops": 6437, "fighter": 6438, "evidence": 6439, "ðŁĴª": 6440, "lake": 6441, "rob": 6442, "cinema": 6443, "profile": 6444, "ñ": 6445, "stands": 6446, "legacy": 6447, "shape": 6448, "roof": 6449, "civil": 6450, "ians": 6451, "syl": 6452, "sham": 6453, "voted": 6454, "retail": 6455, "philli": 6456, "listed": 6457, "duty": 6458, "nb": 6459, "thes": 6460, "fare": 6461, "auction": 6462, "fficial": 6463, "storms": 6464, "dp": 6465, "loun": 6466, "shops": 6467, "aly": 6468, "anime": 6469, "multiple": 6470, "ðŁĺįðŁĺį": 6471, "psycho": 6472, "jean": 6473, "apart": 6474, "candidate": 6475, "ggy": 6476, "conf": 6477, "joseph": 6478, "wick": 6479, "meat": 6480, "frame": 6481, "cl": 6482, "forgot": 6483, "phy": 6484, "fing": 6485, "lied": 6486, "rep": 6487, "seed": 6488, "fall": 6489, "ufc": 6490, "nut": 6491, "lind": 6492, "mode": 6493, "fields": 6494, "ence": 6495, "sley": 6496, "ð٤Ķ": 6497, "chill": 6498, "followed": 6499, "announces": 6500, "corru": 6501, "trophy": 6502, "themselves": 6503, "acle": 6504, "aldu": 6505, "kong": 6506, "lon": 6507, "sv": 6508, "broke": 6509, "anderson": 6510, "tai": 6511, "story": 6512, "temporary": 6513, "activities": 6514, "kati": 6515, "ariz": 6516, "crystal": 6517, "spoke": 6518, "extremely": 6519, "trading": 6520, "ðŁĴļ": 6521, "ü": 6522, "inch": 6523, "edin": 6524, "outfit": 6525, "equip": 6526, "madi": 6527, "formed": 6528, "beef": 6529, "pop": 6530, "tiger": 6531, "thisday": 6532, "tired": 6533, "neighb": 6534, "retro": 6535, "isa": 6536, "unt": 6537, "tas": 6538, "kansas": 6539, "dest": 6540, "seconds": 6541, "tay": 6542, "hurric": 6543, "ou": 6544, "galaxy": 6545, "daddy": 6546, "brow": 6547, "burger": 6548, "enced": 6549, "desk": 6550, "accur": 6551, "secretary": 6552, "elite": 6553, "kab": 6554, "chin": 6555, "tourism": 6556, "buddy": 6557, "icide": 6558, "dressed": 6559, "ud": 6560, "vacation": 6561, "cheers": 6562, "comfor": 6563, "characters": 6564, "jet": 6565, "buying": 6566, "lins": 6567, "nap": 6568, "realestate": 6569, "lie": 6570, "afc": 6571, "iii": 6572, "fame": 6573, "nr": 6574, "bat": 6575, "agent": 6576, "makers": 6577, "â̼": 6578, "sector": 6579, "opti": 6580, "leon": 6581, "diet": 6582, "prayer": 6583, "hip": 6584, "mir": 6585, "lex": 6586, "bry": 6587, "ana": 6588, "passing": 6589, "wen": 6590, "recovery": 6591, "aki": 6592, "popul": 6593, "resort": 6594, "maria": 6595, "stuck": 6596, "reads": 6597, "tier": 6598, "perfec": 6599, "netflix": 6600, "poo": 6601, "champ": 6602, "oc": 6603, "reduce": 6604, "wered": 6605, "comments": 6606, "claim": 6607, "accident": 6608, "sag": 6609, "hack": 6610, "salt": 6611, "kinda": 6612, "killer": 6613, "ios": 6614, "zy": 6615, "exchange": 6616, "lecture": 6617, "enger": 6618, "icking": 6619, "tau": 6620, "reveals": 6621, "prison": 6622, "zom": 6623, "ghan": 6624, "ul": 6625, "journal": 6626, "iot": 6627, "trin": 6628, "jona": 6629, "governor": 6630, "cape": 6631, "quarter": 6632, "spective": 6633, "impressive": 6634, "babies": 6635, "tx": 6636, "mill": 6637, "oy": 6638, "harri": 6639, "joint": 6640, "sue": 6641, "collaboration": 6642, "trend": 6643, "revolution": 6644, "renew": 6645, "alumni": 6646, "gett": 6647, "shell": 6648, "sunday": 6649, "entu": 6650, "nic": 6651, "donaldtrump": 6652, "blockchain": 6653, "pacific": 6654, "explains": 6655, "spy": 6656, "advoc": 6657, "paradi": 6658, "tof": 6659, "starring": 6660, "pav": 6661, "feed": 6662, "brac": 6663, "smoke": 6664, "hamp": 6665, "yam": 6666, "tokyo": 6667, "simon": 6668, "dh": 6669, "effici": 6670, "physical": 6671, "nj": 6672, "elli": 6673, "slow": 6674, "graduate": 6675, "americans": 6676, "tify": 6677, "fred": 6678, "apore": 6679, "finds": 6680, "robin": 6681, "wet": 6682, "notice": 6683, "semi": 6684, "unve": 6685, "kom": 6686, "pilot": 6687, "screening": 6688, "daily": 6689, "ðŁĴĹ": 6690, "royal": 6691, "spa": 6692, "votes": 6693, "nag": 6694, "whate": 6695, "attending": 6696, "experim": 6697, "addition": 6698, "kate": 6699, "stol": 6700, "mali": 6701, "foot": 6702, "christ": 6703, "chan": 6704, "dee": 6705, "licen": 6706, "global": 6707, "moore": 6708, "tia": 6709, "brigh": 6710, "mystery": 6711, "yay": 6712, "âĿ¤ï¸ıâĿ¤ï¸ı": 6713, "creati": 6714, "mechan": 6715, "clock": 6716, "dic": 6717, "âĢĶ": 6718, "pper": 6719, "alph": 6720, "throughout": 6721, "allow": 6722, "resources": 6723, "selection": 6724, "hamil": 6725, "bbq": 6726, "aaaa": 6727, "virginia": 6728, "disney": 6729, "eng": 6730, "sored": 6731, "drinks": 6732, "fancy": 6733, "consider": 6734, "enda": 6735, "jane": 6736, "handmade": 6737, "dul": 6738, "ontari": 6739, "ius": 6740, "sville": 6741, "colorado": 6742, "whatever": 6743, "wheel": 6744, "promise": 6745, "never": 6746, "designs": 6747, "ably": 6748, "sexual": 6749, "vancou": 6750, "ati": 6751, "convention": 6752, "cultural": 6753, "singapore": 6754, "promo": 6755, "loaded": 6756, "glasgo": 6757, "ppl": 6758, "noo": 6759, "kee": 6760, "stem": 6761, "mention": 6762, "ido": 6763, "cruise": 6764, "riding": 6765, "becomes": 6766, "bey": 6767, "âļ½ï¸ı": 6768, "twin": 6769, "dedicated": 6770, "nash": 6771, "desi": 6772, "workout": 6773, "jenni": 6774, "iv": 6775, "groups": 6776, "relax": 6777, "phoeni": 6778, "lift": 6779, "mixed": 6780, "mck": 6781, "pc": 6782, "must": 6783, "metro": 6784, "cies": 6785, "yar": 6786, "aim": 6787, "anger": 6788, "ie": 6789, "recy": 6790, "married": 6791, "dropped": 6792, "engag": 6793, "lest": 6794, "ambassador": 6795, "oph": 6796, "des": 6797, "wick": 6798, "assistant": 6799, "natur": 6800, "fail": 6801, "ltd": 6802, "short": 6803, "kap": 6804, "shaw": 6805, "bigger": 6806, "remains": 6807, "critical": 6808, "survey": 6809, "coverage": 6810, "erson": 6811, "wind": 6812, "nb": 6813, "billy": 6814, "letes": 6815, "acts": 6816, "jimmy": 6817, "atlan": 6818, "aland": 6819, "tc": 6820, "importance": 6821, "damage": 6822, "fg": 6823, "storage": 6824, "twt": 6825, "bond": 6826, "balance": 6827, "crying": 6828, "puppy": 6829, "vote": 6830, "push": 6831, "ðŁĴľ": 6832, "poly": 6833, "mel": 6834, "london": 6835, "terrori": 6836, "effective": 6837, "corporate": 6838, "atlanta": 6839, "jaco": 6840, "nasa": 6841, "greek": 6842, "senate": 6843, "ish": 6844, "eva": 6845, "intelligence": 6846, "efforts": 6847, "alco": 6848, "kun": 6849, "hall": 6850, "diag": 6851, "claims": 6852, "first": 6853, "hb": 6854, "bae": 6855, "vul": 6856, "pull": 6857, "°": 6858, "separ": 6859, "speed": 6860, "victi": 6861, "onthisday": 6862, "audience": 6863, "rates": 6864, "teach": 6865, "filming": 6866, "bush": 6867, "song": 6868, "yum": 6869, "brun": 6870, "raine": 6871, "awa": 6872, "parks": 6873, "ðĿ": 6874, "rabb": 6875, "rach": 6876, "raid": 6877, "reached": 6878, "rail": 6879, "moves": 6880, "selected": 6881, "fri": 6882, "raising": 6883, "omy": 6884, "stones": 6885, "suk": 6886, "francisco": 6887, "cases": 6888, "capit": 6889, "confu": 6890, "wtf": 6891, "poke": 6892, "equipment": 6893, "greg": 6894, "essential": 6895, "offering": 6896, "nex": 6897, "pies": 6898, "bec": 6899, "creation": 6900, "chairman": 6901, "crown": 6902, "wal": 6903, "johnny": 6904, "shift": 6905, "neck": 6906, "bang": 6907, "bird": 6908, "ðŁĺı": 6909, "duck": 6910, "reserve": 6911, "depu": 6912, "masters": 6913, "overall": 6914, "notic": 6915, "juice": 6916, "sneak": 6917, "cheer": 6918, "classes": 6919, "eagles": 6920, "nca": 6921, "carpet": 6922, "civil": 6923, "coaches": 6924, "harris": 6925, "ups": 6926, "balls": 6927, "decor": 6928, "martin": 6929, "ros": 6930, "vice": 6931, "announcement": 6932, "whose": 6933, "tigers": 6934, "stered": 6935, "cts": 6936, "dram": 6937, "steel": 6938, "young": 6939, "install": 6940, "suppo": 6941, "recording": 6942, "deck": 6943, "seats": 6944, "lder": 6945, "angle": 6946, "bot": 6947, "styles": 6948, "elections": 6949, "fortun": 6950, "nab": 6951, "butter": 6952, "arian": 6953, "kash": 6954, "inner": 6955, "oured": 6956, "beast": 6957, "wei": 6958, "iconic": 6959, "experts": 6960, "necess": 6961, "beng": 6962, "james": 6963, "lia": 6964, "greece": 6965, "ðŁĵ·": 6966, "ðŁĺģ": 6967, "goodbye": 6968, "mitch": 6969, "twice": 6970, "mumbai": 6971, "steam": 6972, "rush": 6973, "medal": 6974, "nett": 6975, "fashion": 6976, "tar": 6977, "rs": 6978, "saving": 6979, "ricul": 6980, "lm": 6981, "sleeping": 6982, "brooklyn": 6983, "miss": 6984, "sending": 6985, "discovered": 6986, "sphere": 6987, "oftheday": 6988, "kicks": 6989, "missions": 6990, "wright": 6991, "ern": 6992, "ghtly": 6993, "ious": 6994, "melbourne": 6995, "startu": 6996, "moved": 6997, "carry": 6998, "dak": 6999, "agues": 7000, "belgi": 7001, "ema": 7002, "wayne": 7003, "dot": 7004, "erie": 7005, "pel": 7006, "itunes": 7007, "matthew": 7008, "nobody": 7009, "estab": 7010, "calm": 7011, "winds": 7012, "luc": 7013, "prepare": 7014, "trends": 7015, "exercise": 7016, "advant": 7017, "ðŁĴ¯": 7018, "athletics": 7019, "apps": 7020, "ctions": 7021, "advance": 7022, "launches": 7023, "little": 7024, "realdonaldtrump": 7025, "elizabeth": 7026, "carolina": 7027, "hub": 7028, "hidden": 7029, "nw": 7030, "user": 7031, "poll": 7032, "greater": 7033, "most": 7034, "fed": 7035, "pat": 7036, "lifestyle": 7037, "sati": 7038, "scores": 7039, "marriage": 7040, "lr": 7041, "avenue": 7042, "deserve": 7043, "rif": 7044, "ðŁĹ": 7045, "watch": 7046, "championships": 7047, "gray": 7048, "enni": 7049, "cotton": 7050, "gom": 7051, "where": 7052, "package": 7053, "sum": 7054, "absolu": 7055, "newly": 7056, "foods": 7057, "tyler": 7058, "assembly": 7059, "muslim": 7060, "bank": 7061, "rememb": 7062, "options": 7063, "producer": 7064, "lando": 7065, "funds": 7066, "upper": 7067, "shadow": 7068, "progre": 7069, "cop": 7070, "inge": 7071, "legs": 7072, "detroit": 7073, "hillary": 7074, "jose": 7075, "giants": 7076, "soup": 7077, "sustainable": 7078, "tus": 7079, "clothes": 7080, "rocking": 7081, "nz": 7082, "minne": 7083, "materi": 7084, "bruce": 7085, "eart": 7086, "casting": 7087, "independent": 7088, "thousands": 7089, "tah": 7090, "decl": 7091, "veterans": 7092, "lions": 7093, "wrap": 7094, "â̦": 7095, "dess": 7096, "bling": 7097, "stine": 7098, "eggs": 7099, "oon": 7100, "closing": 7101, "zay": 7102, "att": 7103, "bacon": 7104, "fail": 7105, "arizona": 7106, "depre": 7107, "ghost": 7108, "newsp": 7109, "wers": 7110, "vip": 7111, "liked": 7112, "ident": 7113, "volunteer": 7114, "adult": 7115, "pupp": 7116, "circle": 7117, "material": 7118, "degree": 7119, "grown": 7120, "boom": 7121, "calendar": 7122, "sur": 7123, "viewing": 7124, "athletes": 7125, "chand": 7126, "rell": 7127, "asian": 7128, "entr": 7129, "volley": 7130, "victims": 7131, "body": 7132, "mama": 7133, "transfer": 7134, "geek": 7135, "indic": 7136, "saved": 7137, "mai": 7138, "gent": 7139, "its": 7140, "lounge": 7141, "kol": 7142, "theory": 7143, "situation": 7144, "islands": 7145, "arth": 7146, "zoo": 7147, "flood": 7148, "viously": 7149, "showed": 7150, "parliament": 7151, "chev": 7152, "eline": 7153, "attrac": 7154, "abad": 7155, "tail": 7156, "hrs": 7157, "lus": 7158, "portu": 7159, "gory": 7160, "provides": 7161, "toys": 7162, "death": 7163, "infe": 7164, "ance": 7165, "gle": 7166, "liam": 7167, "lover": 7168, "hud": 7169, "dvd": 7170, "revealed": 7171, "gw": 7172, "rement": 7173, "cathe": 7174, "lying": 7175, "radio": 7176, "derby": 7177, "stors": 7178, "chemi": 7179, "hospit": 7180, "⾨": 7181, "':": 7182, "ilove": 7183, "lemon": 7184, "republic": 7185, "sni": 7186, "ness": 7187, "door": 7188, "reaction": 7189, "pregn": 7190, "flav": 7191, "scholar": 7192, "spotify": 7193, "isation": 7194, "visual": 7195, "aware": 7196, "sponsored": 7197, "joke": 7198, "lessons": 7199, "legis": 7200, "lock": 7201, "simil": 7202, "ðŁĺĭ": 7203, "kind": 7204, "lay": 7205, "mah": 7206, "hoping": 7207, "vancouver": 7208, "aser": 7209, "cleaning": 7210, "gala": 7211, "threat": 7212, "lap": 7213, "ache": 7214, "romance": 7215, "expen": 7216, "repost": 7217, "zam": 7218, "epi": 7219, "mirror": 7220, "oak": 7221, "adul": 7222, "batman": 7223, "slu": 7224, "lc": 7225, "viewed": 7226, "reviews": 7227, "dates": 7228, "indonesia": 7229, "activi": 7230, "offen": 7231, "leaf": 7232, "isi": 7233, "agricul": 7234, "costume": 7235, "sites": 7236, "spiritu": 7237, "appearance": 7238, "iry": 7239, "stair": 7240, "application": 7241, "spectac": 7242, "icity": 7243, "skies": 7244, "handle": 7245, "punk": 7246, "paradise": 7247, "tn": 7248, "deal": 7249, "providing": 7250, "doc": 7251, "receiving": 7252, "brew": 7253, "microsoft": 7254, "ö": 7255, "ferr": 7256, "metro": 7257, "thail": 7258, "yum": 7259, "carter": 7260, "á": 7261, "gentle": 7262, "breaks": 7263, "cooper": 7264, "showcase": 7265, "cutting": 7266, "egypt": 7267, "baby": 7268, "seminar": 7269, "glori": 7270, "sson": 7271, "fave": 7272, "rehear": 7273, "lotte": 7274, "lady": 7275, "alas": 7276, "prep": 7277, "delivered": 7278, "nuclear": 7279, "iro": 7280, "engagement": 7281, "atta": 7282, "conven": 7283, "zan": 7284, "glory": 7285, "holds": 7286, "businesses": 7287, "strange": 7288, "sche": 7289, "itself": 7290, "grad": 7291, "markets": 7292, "falling": 7293, "stats": 7294, "geon": 7295, "budd": 7296, "lis": 7297, "sheet": 7298, "thisi": 7299, "colo": 7300, "desert": 7301, "registration": 7302, "ign": 7303, "explain": 7304, "interior": 7305, "laws": 7306, "writers": 7307, "springs": 7308, "kr": 7309, "fried": 7310, "bloom": 7311, "infra": 7312, "ao": 7313, "cred": 7314, "past": 7315, "lineup": 7316, "boo": 7317, "brea": 7318, "boots": 7319, "celebrity": 7320, "attacks": 7321, "brook": 7322, "eves": 7323, "excu": 7324, "cherry": 7325, "oop": 7326, "fascin": 7327, "boyfriend": 7328, "seas": 7329, "nine": 7330, "effects": 7331, "powered": 7332, "kha": 7333, "ðŁĺĢ": 7334, "shout": 7335, "condition": 7336, "ij": 7337, "hero": 7338, "enterpri": 7339, "winter": 7340, "applications": 7341, "shoe": 7342, "gel": 7343, "battle": 7344, "programs": 7345, "wart": 7346, "ðŁĴ¥": 7347, "rap": 7348, "hol": 7349, "dangerous": 7350, "dia": 7351, "counter": 7352, "rics": 7353, "ior": 7354, "knight": 7355, "coat": 7356, "emotional": 7357, "atures": 7358, "das": 7359, "wheel": 7360, "forecast": 7361, "transport": 7362, "glasgow": 7363, "kingdom": 7364, "preparing": 7365, "immedi": 7366, "ffin": 7367, "awarded": 7368, "printing": 7369, "roman": 7370, "fighters": 7371, "anymore": 7372, "belt": 7373, "pine": 7374, "wine": 7375, "xi": 7376, "employees": 7377, "logies": 7378, "alled": 7379, "demo": 7380, "birthday": 7381, "angeles": 7382, "log": 7383, "drivers": 7384, "necklace": 7385, "kath": 7386, "sit": 7387, "athlete": 7388, "efs": 7389, "sburg": 7390, "purpose": 7391, "resistance": 7392, "releases": 7393, "tis": 7394, "various": 7395, "deliver": 7396, "chal": 7397, "sanc": 7398, "oppo": 7399, "craw": 7400, "neuro": 7401, "dra": 7402, "supporters": 7403, "snap": 7404, "difficult": 7405, "swear": 7406, "logist": 7407, "path": 7408, "attempt": 7409, "à¥": 7410, "swimming": 7411, "steve": 7412, "hurt": 7413, "included": 7414, "bap": 7415, "ware": 7416, "ðŁĴĭ": 7417, "enders": 7418, "jake": 7419, "leeds": 7420, "climb": 7421, "lb": 7422, "imple": 7423, "lisa": 7424, "clothing": 7425, "ðŁĺİ": 7426, "dt": 7427, "compla": 7428, "swing": 7429, "straw": 7430, "vals": 7431, "kle": 7432, "users": 7433, "storm": 7434, "cuts": 7435, "ontario": 7436, "pan": 7437, "handsome": 7438, "iow": 7439, "argu": 7440, "checking": 7441, "scottish": 7442, "Ķï¸ı": 7443, "sier": 7444, "emma": 7445, "pod": 7446, "pattern": 7447, "desh": 7448, "enh": 7449, "edward": 7450, "ting": 7451, "kh": 7452, "half": 7453, "lincoln": 7454, "mother": 7455, "alleg": 7456, "rc": 7457, "volleyball": 7458, "dn": 7459, "gay": 7460, "ally": 7461, "leton": 7462, "grove": 7463, "loud": 7464, "advanced": 7465, "respec": 7466, "client": 7467, "supreme": 7468, "thailand": 7469, "how": 7470, "gig": 7471, "toi": 7472, "dot": 7473, "dollar": 7474, "ðŁijĩ": 7475, "pit": 7476, "rb": 7477, "hn": 7478, "produced": 7479, "ggers": 7480, "âĨĴ": 7481, "mlb": 7482, "canvas": 7483, "fineart": 7484, "usd": 7485, "inthe": 7486, "pson": 7487, "actual": 7488, "sl": 7489, "tb": 7490, "ipad": 7491, "ensure": 7492, "umb": 7493, "wd": 7494, "ska": 7495, "mars": 7496, "kend": 7497, "feli": 7498, "thing": 7499, "countdown": 7500, "absolute": 7501, "rout": 7502, "dral": 7503, "py": 7504, "injured": 7505, "mint": 7506, "hunting": 7507, "mmer": 7508, "sage": 7509, "ligh": 7510, "acity": 7511, "expan": 7512, "murray": 7513, "aro": 7514, "secure": 7515, "fourth": 7516, "eagle": 7517, "relief": 7518, "stakes": 7519, "industrial": 7520, "clark": 7521, "understanding": 7522, "seem": 7523, "plenty": 7524, "silver": 7525, "clau": 7526, "threat": 7527, "sail": 7528, "produce": 7529, "abstr": 7530, "isis": 7531, "br": 7532, "engers": 7533, "worry": 7534, "bieber": 7535, "sj": 7536, "justin": 7537, "realize": 7538, "kyle": 7539, "espn": 7540, "filter": 7541, "sch": 7542, "types": 7543, "gamedev": 7544, "ding": 7545, "twitter": 7546, "soldiers": 7547, "pom": 7548, "carbon": 7549, "yards": 7550, "childhood": 7551, "ried": 7552, "kel": 7553, "eleph": 7554, "tons": 7555, "keynote": 7556, "quiet": 7557, "wire": 7558, "posting": 7559, "issa": 7560, "representing": 7561, "backs": 7562, "alexander": 7563, "celebrates": 7564, "taining": 7565, "||": 7566, "chor": 7567, "escape": 7568, "peek": 7569, "tives": 7570, "field": 7571, "ssie": 7572, "impac": 7573, "sponsor": 7574, "rc": 7575, "wedd": 7576, "cannab": 7577, "sides": 7578, "tracks": 7579, "compar": 7580, "contrac": 7581, "technical": 7582, "bible": 7583, "exploring": 7584, "share": 7585, "trav": 7586, "nate": 7587, "illo": 7588, "scru": 7589, "mingham": 7590, "guns": 7591, "ofthe": 7592, "shame": 7593, "sees": 7594, "catho": 7595, "access": 7596, "cel": 7597, "reported": 7598, "»": 7599, "mario": 7600, "pad": 7601, "hopefully": 7602, "ouse": 7603, "yon": 7604, "disappo": 7605, "olo": 7606, "pitt": 7607, "pac": 7608, "gap": 7609, "crush": 7610, "sg": 7611, "kle": 7612, "gem": 7613, "empire": 7614, "dirty": 7615, "ais": 7616, "aviation": 7617, "zealand": 7618, "facing": 7619, "highway": 7620, "danny": 7621, "spider": 7622, "otta": 7623, "ðŁĺĦ": 7624, "wy": 7625, "colours": 7626, "infl": 7627, "costs": 7628, "olympics": 7629, "aus": 7630, "hm": 7631, "howard": 7632, "passes": 7633, "lauren": 7634, "mush": 7635, "opin": 7636, "rho": 7637, "discount": 7638, "operation": 7639, "emily": 7640, "mmm": 7641, "chamber": 7642, "dil": 7643, "toyo": 7644, "ship": 7645, "samu": 7646, "pictured": 7647, "unic": 7648, "pol": 7649, "keeper": 7650, "cartoon": 7651, "sten": 7652, "ignor": 7653, "nations": 7654, "nl": 7655, "tasting": 7656, "detail": 7657, "officials": 7658, "motor": 7659, "francis": 7660, "editor": 7661, "ðŁijĩ": 7662, "pets": 7663, "rangers": 7664, "tg": 7665, "rn": 7666, "wri": 7667, "nichol": 7668, "ise": 7669, "spots": 7670, "anie": 7671, "check": 7672, "triple": 7673, "kumar": 7674, "speakers": 7675, "icing": 7676, "prepared": 7677, "abuse": 7678, "friendship": 7679, "month": 7680, "swim": 7681, "aire": 7682, "scent": 7683, "hamilton": 7684, "indian": 7685, "jes": 7686, "yummy": 7687, "tears": 7688, "dawn": 7689, "ized": 7690, "worlds": 7691, "ðŁķ": 7692, "billi": 7693, "stone": 7694, "nhs": 7695, "basic": 7696, "por": 7697, "stle": 7698, "iron": 7699, "older": 7700, "clevel": 7701, "eing": 7702, "ðŁĺįðŁĺįðŁĺį": 7703, "prints": 7704, "firm": 7705, "aircraft": 7706, "finest": 7707, "develop": 7708, "aaron": 7709, "tz": 7710, "graham": 7711, "owners": 7712, "foli": 7713, "lesson": 7714, "ques": 7715, "babe": 7716, "craft": 7717, "phen": 7718, "jun": 7719, "birmingham": 7720, "vine": 7721, "ller": 7722, "ian": 7723, "fineartamerica": 7724, "evolu": 7725, "stab": 7726, "imper": 7727, "ward": 7728, "comic": 7729, "wiz": 7730, "invited": 7731, "duke": 7732, "match": 7733, "ports": 7734, "roger": 7735, "diagno": 7736, "kept": 7737, "test": 7738, "visu": 7739, "rhy": 7740, "soc": 7741, "tox": 7742, "baker": 7743, "surface": 7744, "covers": 7745, "mans": 7746, "bits": 7747, "xbox": 7748, "ffle": 7749, "nan": 7750, "gard": 7751, "hart": 7752, "waters": 7753, "villa": 7754, "retro": 7755, "lightning": 7756, "catholic": 7757, "democracy": 7758, "neighbor": 7759, "penn": 7760, "cran": 7761, "jonathan": 7762, "laura": 7763, "vibes": 7764, "sub": 7765, "coaching": 7766, "clearly": 7767, "ukraine": 7768, "brave": 7769, "commitment": 7770, "tall": 7771, "mart": 7772, "rap": 7773, "modi": 7774, "scott": 7775, "bros": 7776, "shower": 7777, "ðŁı¾": 7778, "âĺºï¸ı": 7779, "cousin": 7780, "approach": 7781, "bre": 7782, "compos": 7783, "hilari": 7784, "philly": 7785, "gad": 7786, "quickly": 7787, "rian": 7788, "tm": 7789, "virtual": 7790, "houses": 7791, "kt": 7792, "phoenix": 7793, "wire": 7794, "ffy": 7795, "bunch": 7796, "ancing": 7797, "tale": 7798, "snapchat": 7799, "starter": 7800, "ht": 7801, "kicking": 7802, "apart": 7803, "thy": 7804, ")!": 7805, "blogger": 7806, "itz": 7807, "comfort": 7808, "angels": 7809, "wash": 7810, "\":": 7811, "argent": 7812, "request": 7813, "honest": 7814, "mighty": 7815, "bobby": 7816, "kg": 7817, "rol": 7818, "thouse": 7819, "expo": 7820, "hc": 7821, "tables": 7822, "magical": 7823, "posts": 7824, "dem": 7825, "nw": 7826, "orlando": 7827, "aber": 7828, "***": 7829, "ðŁĺľ": 7830, "environmental": 7831, "transformation": 7832, "mile": 7833, "wic": 7834, "hiring": 7835, "maine": 7836, "boar": 7837, "rying": 7838, "tis": 7839, "niture": 7840, "tweeted": 7841, "antonio": 7842, "opinion": 7843, "finale": 7844, "diy": 7845, "fis": 7846, "thin": 7847, "trouble": 7848, "lego": 7849, "files": 7850, "quart": 7851, "spa": 7852, "currency": 7853, "climate": 7854, "fanart": 7855, "railway": 7856, "space": 7857, "bands": 7858, "daniel": 7859, "motion": 7860, "leng": 7861, "holder": 7862, "occu": 7863, "marie": 7864, "cathedral": 7865, "buzz": 7866, "bies": 7867, "nascar": 7868, "bmw": 7869, "battery": 7870, "charlotte": 7871, "doctor": 7872, "zzle": 7873, "seven": 7874, "insan": 7875, "ddy": 7876, "sten": 7877, "labor": 7878, "thrilled": 7879, "seren": 7880, "documentary": 7881, "waves": 7882, "certain": 7883, "candid": 7884, "allowed": 7885, "nintendo": 7886, "starwars": 7887, "tap": 7888, "homemade": 7889, "dles": 7890, "thering": 7891, "bree": 7892, "empty": 7893, "piano": 7894, "positi": 7895, "country": 7896, "pork": 7897, "puts": 7898, "perry": 7899, "matic": 7900, "spotlight": 7901, "tist": 7902, "orities": 7903, "wealth": 7904, "cp": 7905, "barbar": 7906, "committed": 7907, "assau": 7908, "profit": 7909, "eight": 7910, "hul": 7911, "finishing": 7912, "runner": 7913, "sso": 7914, "inspec": 7915, "charged": 7916, "christop": 7917, "losing": 7918, "coal": 7919, "hoo": 7920, "elev": 7921, "dele": 7922, "moham": 7923, "donation": 7924, "cable": 7925, "clinic": 7926, "jin": 7927, "managed": 7928, "tering": 7929, "â¬": 7930, "urban": 7931, "deputy": 7932, "bber": 7933, "burn": 7934, "academic": 7935, "ott": 7936, "stake": 7937, "iter": 7938, "stown": 7939, "acker": 7940, "adventures": 7941, "adams": 7942, "greg": 7943, "prom": 7944, "vol": 7945, "acqu": 7946, "congre": 7947, "paint": 7948, "citizens": 7949, "call": 7950, "afford": 7951, "vc": 7952, "asks": 7953, "thetic": 7954, "independence": 7955, "âĽ": 7956, "hitting": 7957, "blon": 7958, "future": 7959, "âı": 7960, "inno": 7961, "gene": 7962, "boards": 7963, "distance": 7964, "set": 7965, "remem": 7966, "thal": 7967, "prevent": 7968, "lang": 7969, "objec": 7970, "susp": 7971, "matt": 7972, "induc": 7973, "boro": 7974, "pione": 7975, "redi": 7976, "virtu": 7977, "printed": 7978, "scope": 7979, "shark": 7980, "succe": 7981, "astron": 7982, "illegal": 7983, "jag": 7984, "cting": 7985, "inee": 7986, "ato": 7987, "robin": 7988, "nutrition": 7989, "bf": 7990, "dutch": 7991, "bn": 7992, "furniture": 7993, "forgotten": 7994, "atar": 7995, "rup": 7996, "hyper": 7997, "branch": 7998, "communication": 7999, "degrees": 8000, "onia": 8001, "uncle": 8002, "promote": 8003, "orche": 8004, "wii": 8005, "js": 8006, "button": 8007, "major": 8008, "cbs": 8009, "bristol": 8010, "premium": 8011, "ordinary": 8012, "edit": 8013, "mg": 8014, "weed": 8015, "steven": 8016, ":'": 8017, "gus": 8018, "tes": 8019, "captured": 8020, "drugs": 8021, "dow": 8022, "writes": 8023, "bishop": 8024, "wheels": 8025, "alization": 8026, "discovery": 8027, "wr": 8028, "rachel": 8029, "neil": 8030, "hydr": 8031, "cutest": 8032, "entrepreneur": 8033, "korean": 8034, "oregon": 8035, "ulty": 8036, "perfectly": 8037, "supported": 8038, "historical": 8039, "twins": 8040, "elly": 8041, "wel": 8042, "devil": 8043, "income": 8044, "scientists": 8045, "deleg": 8046, "hen": 8047, "oni": 8048, "iced": 8049, "gio": 8050, "curry": 8051, "reveal": 8052, "eg": 8053, "buffalo": 8054, "nol": 8055, "opera": 8056, "cameron": 8057, "hahahaha": 8058, "jab": 8059, "graduation": 8060, "craig": 8061, "ral": 8062, "if": 8063, "organization": 8064, "lege": 8065, "gang": 8066, "sud": 8067, "edinburgh": 8068, "lack": 8069, "flies": 8070, "gate": 8071, "thrones": 8072, "qb": 8073, "thereal": 8074, "eleg": 8075, "ppin": 8076, "cles": 8077, "jamie": 8078, "tnam": 8079, "crypto": 8080, "oul": 8081, "pages": 8082, "ase": 8083, "roots": 8084, "stupid": 8085, "adid": 8086, "boot": 8087, "protein": 8088, "sap": 8089, "sium": 8090, "sus": 8091, "endor": 8092, "function": 8093, "dont": 8094, "enna": 8095, "chy": 8096, "sque": 8097, "worker": 8098, "mtv": 8099, "ea": 8100, "kan": 8101, "ðŁĴļ": 8102, "mus": 8103, "profession": 8104, "tto": 8105, "operations": 8106, "allo": 8107, "ctor": 8108, "invite": 8109, "scand": 8110, "outh": 8111, "zim": 8112, "links": 8113, "clients": 8114, "samsung": 8115, "discusses": 8116, "nell": 8117, "ultra": 8118, "somewhere": 8119, "stewart": 8120, "inet": 8121, "dez": 8122, "bout": 8123, "factor": 8124, "tian": 8125, "trans": 8126, "jeremy": 8127, "db": 8128, "ðŁĩ¬": 8129, "orn": 8130, "developing": 8131, "spol": 8132, "cooper": 8133, "mau": 8134, "remembering": 8135, "trek": 8136, "family": 8137, "seniors": 8138, "foster": 8139, "attended": 8140, "wing": 8141, "transform": 8142, "elementary": 8143, "horiz": 8144, "listing": 8145, "malaysia": 8146, "itch": 8147, "warrior": 8148, "philippines": 8149, "russell": 8150, "mend": 8151, "initiative": 8152, "creep": 8153, "tops": 8154, "briti": 8155, "aur": 8156, "sharp": 8157, "advertising": 8158, "ugly": 8159, "achiev": 8160, "materials": 8161, "bug": 8162, "device": 8163, "bonus": 8164, "facility": 8165, "cole": 8166, "nhl": 8167, "yas": 8168, "planned": 8169, "pole": 8170, "excellence": 8171, "trick": 8172, "confl": 8173, "rp": 8174, "achieve": 8175, "loan": 8176, "swag": 8177, "jessica": 8178, "howe": 8179, "pour": 8180, "scu": 8181, "zoo": 8182, "rated": 8183, "dresses": 8184, "rebel": 8185, "mexican": 8186, "coordin": 8187, "mess": 8188, "atlantic": 8189, "tl": 8190, "oscar": 8191, "walks": 8192, "pharmac": 8193, "investigation": 8194, "...#": 8195, "cci": 8196, "easily": 8197, "mondaymotivation": 8198, "yment": 8199, "auti": 8200, "forced": 8201, "armed": 8202, "colleagues": 8203, "papers": 8204, "proper": 8205, "shake": 8206, "buc": 8207, "lean": 8208, "exhibit": 8209, "evement": 8210, "cott": 8211, "biz": 8212, "sper": 8213, "kent": 8214, "swan": 8215, "/@": 8216, "girlfriend": 8217, "hawk": 8218, "âĺĢï¸ı": 8219, "mono": 8220, "ðŁĴĽ": 8221, "statue": 8222, "ðŁĺ³": 8223, "ras": 8224, "teeth": 8225, "precious": 8226, "tile": 8227, "pam": 8228, "swift": 8229, "vali": 8230, "nose": 8231, "drunk": 8232, "experiences": 8233, "comeback": 8234, "genius": 8235, "worse": 8236, "shef": 8237, "rad": 8238, "edit": 8239, "honour": 8240, "auspol": 8241, "larry": 8242, "hire": 8243, "gordon": 8244, "achievement": 8245, "........": 8246, "suicide": 8247, "alternative": 8248, "sup": 8249, "surroun": 8250, "shake": 8251, "keith": 8252, "pepper": 8253, "turk": 8254, "criminal": 8255, "beck": 8256, "sum": 8257, "walls": 8258, "cnn": 8259, "antic": 8260, "offe": 8261, "colli": 8262, "wines": 8263, "highlight": 8264, "hawaii": 8265, "embar": 8266, "lfc": 8267, "ðŁĩ®": 8268, "mv": 8269, ">>": 8270, "atmo": 8271, "word": 8272, "carl": 8273, "shoutout": 8274, "brewing": 8275, "ìĿ": 8276, "dof": 8277, "sic": 8278, "hottest": 8279, "colon": 8280, "hhh": 8281, "shut": 8282, "lowing": 8283, "volume": 8284, "apartment": 8285, "agreement": 8286, "destro": 8287, "wee": 8288, "religious": 8289, "iowa": 8290, "rod": 8291, "landing": 8292, "represent": 8293, "ðŁĵ·:": 8294, "las": 8295, "usually": 8296, "hl": 8297, "cac": 8298, "salv": 8299, "along": 8300, "laughing": 8301, "beans": 8302, "reminds": 8303, "phase": 8304, "somebody": 8305, "mask": 8306, "ranked": 8307, "destroy": 8308, "sci": 8309, "â̼ï¸ı": 8310, "gabri": 8311, "leo": 8312, "roa": 8313, "failed": 8314, "sil": 8315, "refugees": 8316, "revi": 8317, "ring": 8318, "berries": 8319, "cookies": 8320, "yy": 8321, "conservation": 8322, "shab": 8323, "humans": 8324, "determin": 8325, "ain": 8326, "niall": 8327, "assu": 8328, "mba": 8329, "from": 8330, "extreme": 8331, "vices": 8332, "commerce": 8333, "ghtful": 8334, "ordered": 8335, "supports": 8336, "recap": 8337, "vor": 8338, "dropping": 8339, "correct": 8340, "paying": 8341, "meaning": 8342, "nj": 8343, "quiz": 8344, "\"#": 8345, "business": 8346, "ðŁĩ®ðŁĩ": 8347, "indigen": 8348, "dust": 8349, "boxes": 8350, "blind": 8351, "xxx": 8352, "zzy": 8353, "ðŁĩ¬ðŁĩ": 8354, "ssels": 8355, "sant": 8356, "ddle": 8357, "hilarious": 8358, "design": 8359, "wondering": 8360, "vehicles": 8361, "kre": 8362, "jud": 8363, "reception": 8364, "parker": 8365, "ÃŃ": 8366, "privi": 8367, "hydro": 8368, "softball": 8369, "pollu": 8370, "locked": 8371, "bah": 8372, "ear": 8373, "script": 8374, "divi": 8375, "brace": 8376, "george": 8377, "theast": 8378, "belo": 8379, "jal": 8380, "tionary": 8381, "dental": 8382, "rocket": 8383, "purch": 8384, "shak": 8385, "manufacturing": 8386, "ez": 8387, "itis": 8388, "concep": 8389, "tball": 8390, "chs": 8391, "directed": 8392, "prayers": 8393, "ook": 8394, "philos": 8395, "variety": 8396, "chess": 8397, "server": 8398, "gand": 8399, "balti": 8400, "ðŁĵ¸": 8401, "sely": 8402, "cruz": 8403, "spectacular": 8404, "burning": 8405, "represent": 8406, "iz": 8407, "tone": 8408, "merce": 8409, "hell": 8410, "bedroom": 8411, "establi": 8412, "bol": 8413, "common": 8414, "ãĥ»": 8415, "abor": 8416, "kitty": 8417, "heights": 8418, "repair": 8419, "william": 8420, "quake": 8421, "alabama": 8422, "population": 8423, "rev": 8424, "rett": 8425, "ists": 8426, "nite": 8427, "lem": 8428, "aha": 8429, "cleveland": 8430, "rm": 8431, "pover": 8432, "obse": 8433, "montre": 8434, "mania": 8435, "®": 8436, "conne": 8437, "carni": 8438, "shah": 8439, "fy": 8440, "ua": 8441, "scor": 8442, "struggle": 8443, "bob": 8444, "''": 8445, "appropri": 8446, "decide": 8447, "ffed": 8448, "caster": 8449, "sort": 8450, "hungry": 8451, "drag": 8452, "اÙ": 8453, "grounds": 8454, "dw": 8455, "slightly": 8456, "cardin": 8457, "deadline": 8458, "bronze": 8459, "webin": 8460, "barry": 8461, "silence": 8462, "euro": 8463, "option": 8464, "earn": 8465, "ðŁĴĸ": 8466, "however": 8467, "naren": 8468, "nails": 8469, "bathroom": 8470, "vine": 8471, "phd": 8472, "mining": 8473, "garage": 8474, "()": 8475, "shoulder": 8476, "defeat": 8477, "dir": 8478, "ov": 8479, "liberty": 8480, "pleas": 8481, "xon": 8482, "compre": 8483, "av": 8484, "jin": 8485, "ables": 8486, "silent": 8487, "famili": 8488, "visits": 8489, "dipl": 8490, "habit": 8491, "millions": 8492, "regarding": 8493, "innovative": 8494, "senator": 8495, "rts": 8496, "von": 8497, "kl": 8498, "whil": 8499, "required": 8500, "âĿĦ": 8501, "luv": 8502, "presidential": 8503, "pocket": 8504, "hundre": 8505, "shown": 8506, "frozen": 8507, "toward": 8508, "fast": 8509, "confidence": 8510, "rough": 8511, "individual": 8512, "quet": 8513, "ðŁı½": 8514, "dome": 8515, "fifa": 8516, "engineer": 8517, "zen": 8518, "remix": 8519, "ðŁĺĥ": 8520, "plant": 8521, "minor": 8522, "robinson": 8523, "asy": 8524, "pulled": 8525, "certain": 8526, "potato": 8527, "(:": 8528, "pres": 8529, "occa": 8530, "wit": 8531, "item": 8532, "sie": 8533, "dating": 8534, "thompson": 8535, "owned": 8536, "anu": 8537, "vie": 8538, "tedly": 8539, "goodnight": 8540, "except": 8541, "ðŁĮŁ": 8542, "iraq": 8543, "kie": 8544, "rences": 8545, "lip": 8546, "similar": 8547, "saudi": 8548, "vig": 8549, "arthur": 8550, "picks": 8551, "milan": 8552, "honda": 8553, "maxi": 8554, "og": 8555, "stest": 8556, "arch": 8557, "analytics": 8558, "basti": 8559, "pearl": 8560, "terry": 8561, "horse": 8562, "astro": 8563, "acce": 8564, "launching": 8565, "international": 8566, "sno": 8567, "tasty": 8568, "denver": 8569, "irl": 8570, "pete": 8571, "torn": 8572, "advantage": 8573, "varsity": 8574, "\"\"": 8575, "sole": 8576, "gc": 8577, "lang": 8578, "demonstr": 8579, "olds": 8580, "unity": 8581, "nets": 8582, "inspire": 8583, "crete": 8584, "nashville": 8585, "nelson": 8586, "eter": 8587, "walk": 8588, "hyun": 8589, "mack": 8590, "treas": 8591, "seeking": 8592, "rage": 8593, "brush": 8594, "aband": 8595, "whilst": 8596, "cocon": 8597, "hong": 8598, "shelter": 8599, "ip": 8600, "possibly": 8601, "soo": 8602, "ited": 8603, "âĦ": 8604, "races": 8605, "warming": 8606, "quin": 8607, "television": 8608, "matches": 8609, "rapi": 8610, "mental": 8611, "palm": 8612, "jennifer": 8613, "rolls": 8614, "indiana": 8615, "bars": 8616, "catching": 8617, "rescu": 8618, "candidates": 8619, "fare": 8620, "âłĢ": 8621, "seo": 8622, "vietnam": 8623, "alpha": 8624, "michelle": 8625, "visible": 8626, "regre": 8627, "wned": 8628, "apple": 8629, "lip": 8630, "ffe": 8631, "liz": 8632, "yorkshire": 8633, "hail": 8634, "seasons": 8635, "began": 8636, "md": 8637, "kc": 8638, "lap": 8639, "fascinating": 8640, "help": 8641, "ury": 8642, "ums": 8643, "nuts": 8644, "sem": 8645, "alongside": 8646, "bridge": 8647, "orial": 8648, "ove": 8649, "worldcup": 8650, "british": 8651, "comfortable": 8652, "ive": 8653, "hotels": 8654, "fairs": 8655, "horri": 8656, "sox": 8657, "dining": 8658, "stream": 8659, "barri": 8660, "ssy": 8661, "wim": 8662, "terms": 8663, "vu": 8664, "pere": 8665, "lens": 8666, "walked": 8667, "ror": 8668, "lars": 8669, "shield": 8670, "doubt": 8671, "proto": 8672, "crossing": 8673, "meant": 8674, "medium": 8675, "adding": 8676, "eb": 8677, "cheap": 8678, "func": 8679, "paper": 8680, "brands": 8681, "ryan": 8682, "feedback": 8683, "collins": 8684, "unknown": 8685, "tropical": 8686, "sandwich": 8687, "fallen": 8688, "formu": 8689, "select": 8690, "loads": 8691, "answers": 8692, "ori": 8693, "maga": 8694, "dor": 8695, "duo": 8696, "alie": 8697, "drum": 8698, "uri": 8699, "deer": 8700, "soul": 8701, "shut": 8702, "âĺº": 8703, "stolen": 8704, "donated": 8705, "buzz": 8706, "patriots": 8707, "hal": 8708, "nasty": 8709, "nominated": 8710, "monte": 8711, "kia": 8712, "thri": 8713, "ingu": 8714, "tests": 8715, "petro": 8716, "ðŁijij": 8717, "hosts": 8718, "nest": 8719, "topic": 8720, "patch": 8721, "mmy": 8722, "hugh": 8723, "abilities": 8724, "mathe": 8725, "smiles": 8726, "gb": 8727, "agenda": 8728, "insights": 8729, "chip": 8730, "phan": 8731, "failure": 8732, "dgers": 8733, "hai": 8734, "significant": 8735, "shock": 8736, "rural": 8737, "glam": 8738, "figures": 8739, "potus": 8740, "ota": 8741, "ministry": 8742, "appears": 8743, "fear": 8744, "rh": 8745, "american": 8746, "hatt": 8747, "sony": 8748, "fires": 8749, "edi": 8750, "nou": 8751, "equi": 8752, "when": 8753, "universal": 8754, "madness": 8755, "ix": 8756, "sculpture": 8757, "bach": 8758, "tto": 8759, "sweden": 8760, "eta": 8761, "ento": 8762, "developed": 8763, "monthly": 8764, "maps": 8765, "rah": 8766, "led": 8767, "delta": 8768, "saints": 8769, "islam": 8770, "bench": 8771, "fifth": 8772, "vard": 8773, "socks": 8774, "welcoming": 8775, "je": 8776, "turner": 8777, "vb": 8778, "adi": 8779, "norway": 8780, "ady": 8781, "hurricane": 8782, "porsche": 8783, "tradition": 8784, "exam": 8785, "newspaper": 8786, "luci": 8787, "aver": 8788, "ideal": 8789, "dna": 8790, "madison": 8791, "ð٧": 8792, "witness": 8793, "acou": 8794, "insight": 8795, "simon": 8796, "robot": 8797, "snake": 8798, "nbc": 8799, "aco": 8800, "ross": 8801, "shment": 8802, "religion": 8803, "chann": 8804, "insu": 8805, "campbell": 8806, "installed": 8807, "weather": 8808, "horses": 8809, "oli": 8810, "robert": 8811, "kaz": 8812, "ðŁıĢ": 8813, "veteran": 8814, "thread": 8815, "quarter": 8816, "easier": 8817, "capture": 8818, "hipho": 8819, "lawrence": 8820, "romantic": 8821, "passion": 8822, "clay": 8823, "oxford": 8824, "thai": 8825, "studying": 8826, "fia": 8827, "elected": 8828, "mostly": 8829, "cb": 8830, "tumb": 8831, "âĢįâĻĤ": 8832, "xl": 8833, "shan": 8834, "faster": 8835, "evans": 8836, "slide": 8837, "shri": 8838, "seek": 8839, "mies": 8840, "chemistry": 8841, "pumpkin": 8842, "tum": 8843, ",,": 8844, "room": 8845, "fired": 8846, "lips": 8847, "presence": 8848, "aff": 8849, "brewery": 8850, "arrive": 8851, "swag": 8852, "photograph": 8853, "pengu": 8854, "chips": 8855, "attor": 8856, "values": 8857, "accurate": 8858, "contemporary": 8859, "principal": 8860, "cannabis": 8861, "ario": 8862, "anywhere": 8863, "gia": 8864, "democrats": 8865, "buildings": 8866, "lived": 8867, "aps": 8868, "negative": 8869, "mare": 8870, "ballo": 8871, "lion": 8872, "diamon": 8873, "look": 8874, "reform": 8875, "tommy": 8876, "illa": 8877, "treats": 8878, "hundreds": 8879, "portland": 8880, "worthy": 8881, "excep": 8882, "aria": 8883, "idol": 8884, "beer": 8885, "cdn": 8886, "yu": 8887, "awk": 8888, "ðŁĩ¨": 8889, "cells": 8890, "ó": 8891, "identity": 8892, "drawn": 8893, "devil": 8894, "finger": 8895, "tham": 8896, "ðŁijĬ": 8897, "earned": 8898, "fintech": 8899, "dolph": 8900, "tweeting": 8901, "evolution": 8902, "ðŁĵį": 8903, "estim": 8904, "mvp": 8905, "none": 8906, "ðŁĩºðŁĩ¸": 8907, "toyota": 8908, "aux": 8909, "marin": 8910, "bold": 8911, "lbs": 8912, "steak": 8913, "murphy": 8914, "itable": 8915, "louis": 8916, "solve": 8917, "pia": 8918, "skir": 8919, "illino": 8920, "webinar": 8921, "banana": 8922, "lov": 8923, "thon": 8924, "voters": 8925, "affordable": 8926, "defeated": 8927, "lmfa": 8928, "airlines": 8929, "superb": 8930, "anyway": 8931, "debt": 8932, "bored": 8933, "versi": 8934, "metal": 8935, "responsible": 8936, "mk": 8937, "sse": 8938, "fay": 8939, "caused": 8940, "fp": 8941, "recommend": 8942, "plaza": 8943, "sporting": 8944, "alliance": 8945, "austri": 8946, "nn": 8947, "tours": 8948, "surprised": 8949, "artif": 8950, "thunder": 8951, "surve": 8952, "wore": 8953, "brief": 8954, "necessary": 8955, "zie": 8956, "ashley": 8957, "drake": 8958, "rt": 8959, "knife": 8960, "immun": 8961, "charges": 8962, "athe": 8963, "bride": 8964, "reply": 8965, "gav": 8966, "broadcast": 8967, "puer": 8968, "bracelet": 8969, "capacity": 8970, "harvest": 8971, "idk": 8972, "performan": 8973, "dding": 8974, "ilers": 8975, "para": 8976, "jama": 8977, "province": 8978, "chin": 8979, "iders": 8980, "hari": 8981, "teaser": 8982, "chen": 8983, "restor": 8984, "rat": 8985, "flat": 8986, "colom": 8987, "ðŁĴŀ": 8988, "ðŁĩ¨ðŁĩ": 8989, "smooth": 8990, "rt": 8991, "pitch": 8992, "staying": 8993, "israeli": 8994, "tcot": 8995, "perspective": 8996, "dock": 8997, "opener": 8998, "lovel": 8999, "xo": 9000, "classroom": 9001, "lington": 9002, "goal": 9003, "kennedy": 9004, "sham": 9005, "spaces": 9006, "mitchell": 9007, "homecoming": 9008, "uki": 9009, "claimed": 9010, "recruit": 9011, "ingo": 9012, "mufc": 9013, "monit": 9014, "groo": 9015, "resident": 9016, "percent": 9017, "perman": 9018, "ottawa": 9019, "intment": 9020, "anxi": 9021, "standards": 9022, "worship": 9023, "scheme": 9024, "fx": 9025, "potter": 9026, "bian": 9027, "athletic": 9028, "afgh": 9029, "sse": 9030, "satell": 9031, "parties": 9032, "âĿ¤âĿ¤": 9033, "infrastructure": 9034, "relax": 9035, "modu": 9036, "worn": 9037, "smoking": 9038, "yach": 9039, "practices": 9040, "wcw": 9041, "amb": 9042, "domestic": 9043, "taylor": 9044, "kentu": 9045, "provided": 9046, "modi": 9047, "veg": 9048, "\"...": 9049, "observ": 9050, "ðŁĺ©": 9051, "beard": 9052, "mour": 9053, "angry": 9054, "ðŁĺ±": 9055, "startups": 9056, "wooden": 9057, "dive": 9058, "nail": 9059, "antique": 9060, "roses": 9061, "tornado": 9062, "mat": 9063, "^^": 9064, "suspect": 9065, "farm": 9066, "devices": 9067, "mega": 9068, "tul": 9069, "scholarship": 9070, "gee": 9071, "disaster": 9072, "arrival": 9073, "poin": 9074, "marc": 9075, "katie": 9076, "bbed": 9077, "false": 9078, "deserves": 9079, "richard": 9080, "juana": 9081, "frey": 9082, "tioned": 9083, "hybri": 9084, "rw": 9085, "sarah": 9086, "achi": 9087, "cure": 9088, "ole": 9089, "morris": 9090, "chic": 9091, "broadway": 9092, "label": 9093, "pak": 9094, "poverty": 9095, "golf": 9096, "ered": 9097, "fu": 9098, "eries": 9099, "bees": 9100, "alogue": 9101, "stel": 9102, "wireless": 9103, "jewish": 9104, "tide": 9105, "blocked": 9106, "lifetime": 9107, "bhar": 9108, "split": 9109, "amster": 9110, "thi": 9111, "joshu": 9112, "brunch": 9113, "haps": 9114, "sfor": 9115, "oops": 9116, "kapoor": 9117, "hiking": 9118, "supposed": 9119, "roof": 9120, "reas": 9121, "train": 9122, "tight": 9123, "trump": 9124, "basically": 9125, "rr": 9126, "eared": 9127, "seeds": 9128, "entrance": 9129, "cp": 9130, "wie": 9131, "sonic": 9132, "victim": 9133, "here": 9134, "eh": 9135, "earrings": 9136, "salmon": 9137, "arctic": 9138, "anne": 9139, "dougla": 9140, "corruption": 9141, "hannah": 9142, "hasn": 9143, "voices": 9144, "conce": 9145, "atta": 9146, "fleet": 9147, "clinical": 9148, "democratic": 9149, "tony": 9150, "stood": 9151, "lef": 9152, "twitch": 9153, "ail": 9154, "honestly": 9155, "increased": 9156, "drome": 9157, "donna": 9158, "accepted": 9159, "visitors": 9160, "apar": 9161, "ador": 9162, "par": 9163, "jerry": 9164, "rai": 9165, "brandon": 9166, "abu": 9167, "!!!!!!": 9168, "meme": 9169, "ingh": 9170, "glorious": 9171, "bhu": 9172, "pump": 9173, "jol": 9174, "like": 9175, "fisher": 9176, "maz": 9177, "agan": 9178, "destination": 9179, "playlist": 9180, "letters": 9181, "genu": 9182, "brace": 9183, "celebrated": 9184, "banner": 9185, "rhe": 9186, "dragon": 9187, "ðŁĺħ": 9188, "signature": 9189, "grey": 9190, "âľĶï¸ı": 9191, "alice": 9192, "bered": 9193, "pher": 9194, "bern": 9195, "cath": 9196, "gathering": 9197, "scoring": 9198, "influence": 9199, "smiling": 9200, "dept": 9201, "local": 9202, "ax": 9203, "acu": 9204, "retirement": 9205, "honor": 9206, "herself": 9207, "chemical": 9208, "assess": 9209, "yall": 9210, "frequ": 9211, "appreciation": 9212, "aca": 9213, "choir": 9214, "cuz": 9215, "soil": 9216, "cil": 9217, "reporting": 9218, "uh": 9219, "enterprise": 9220, "grat": 9221, "jacob": 9222, "rum": 9223, "fee": 9224, "jak": 9225, "spin": 9226, "bikes": 9227, "phia": 9228, "stere": 9229, "pis": 9230, "blood": 9231, "tatt": 9232, "raft": 9233, "warren": 9234, "sheri": 9235, "backstage": 9236, "marsh": 9237, "hashtag": 9238, "therine": 9239, "rein": 9240, "gameday": 9241, "guaran": 9242, "recipes": 9243, "minds": 9244, "stronger": 9245, "issued": 9246, "bicy": 9247, "nak": 9248, "mented": 9249, "scary": 9250, "ux": 9251, "previous": 9252, "ttle": 9253, "thats": 9254, "actors": 9255, "uma": 9256, "tina": 9257, "bunny": 9258, "promotion": 9259, "uss": 9260, "oliver": 9261, "montreal": 9262, "whats": 9263, "appreciated": 9264, "lakes": 9265, "excuse": 9266, "knowing": 9267, "prizes": 9268, "muscle": 9269, "shades": 9270, "scot": 9271, "ingredi": 9272, "electronic": 9273, "juan": 9274, "combat": 9275, "sri": 9276, "eh": 9277, "turkish": 9278, "lom": 9279, "strikes": 9280, "prison": 9281, "ree": 9282, "pope": 9283, "vid": 9284, "oldest": 9285, "doll": 9286, "swiss": 9287, "certified": 9288, "clip": 9289, "returning": 9290, "lator": 9291, "leigh": 9292, "ttes": 9293, "watson": 9294, "healing": 9295, "elim": 9296, "perhaps": 9297, "hass": 9298, "kau": 9299, "dder": 9300, "mouse": 9301, "newcastle": 9302, "indigenous": 9303, "welcomes": 9304, "cole": 9305, "taught": 9306, "noise": 9307, "appear": 9308, "joe": 9309, "canon": 9310, "wednesday": 9311, "utah": 9312, "ctive": 9313, "driven": 9314, "iv": 9315, "cell": 9316, "strip": 9317, "acc": 9318, "focused": 9319, "arrest": 9320, "stocks": 9321, "woo": 9322, "âĹ": 9323, "noticed": 9324, "shado": 9325, "displa": 9326, "terror": 9327, "borne": 9328, "second": 9329, "queens": 9330, "woke": 9331, "jail": 9332, "nott": 9333, "cambridge": 9334, "hart": 9335, "seaf": 9336, "fax": 9337, "accept": 9338, "âĺħ": 9339, "goods": 9340, "kat": 9341, "twin": 9342, "hs": 9343, "thousand": 9344, "sins": 9345, "suite": 9346, "ampton": 9347, "arn": 9348, "relev": 9349, "richar": 9350, "hoops": 9351, "nbc": 9352, "classic": 9353, "pab": 9354, "soldier": 9355, "deplo": 9356, "leans": 9357, "installation": 9358, "clash": 9359, "leban": 9360, "eee": 9361, "tire": 9362, "beloved": 9363, "fusion": 9364, "traveling": 9365, "nei": 9366, "cookie": 9367, "globe": 9368, "physics": 9369, "sq": 9370, "col": 9371, "wolves": 9372, "dl": 9373, "exit": 9374, "\"-": 9375, "football": 9376, "leaf": 9377, "sterling": 9378, "hide": 9379, "minneso": 9380, "freshman": 9381, "nature": 9382, "indie": 9383, "supplies": 9384, "bris": 9385, "irish": 9386, "inktober": 9387, "doodle": 9388, "icop": 9389, "messages": 9390, "adults": 9391, "recorded": 9392, "fixed": 9393, "ardo": 9394, "offered": 9395, "underground": 9396, "drone": 9397, "pine": 9398, "mainten": 9399, "andre": 9400, "hammer": 9401, "sx": 9402, "round": 9403, "hike": 9404, "brad": 9405, "rome": 9406, "full": 9407, "oney": 9408, "rows": 9409, "columbia": 9410, "archives": 9411, "approved": 9412, "batch": 9413, "illinois": 9414, "recognition": 9415, "shouldn": 9416, "fog": 9417, "ncaa": 9418, "kevin": 9419, "humanity": 9420, "although": 9421, "powers": 9422, "pou": 9423, "sar": 9424, "pest": 9425, "alcohol": 9426, "consci": 9427, "philadel": 9428, "eno": 9429, "tm": 9430, "okla": 9431, "category": 9432, "participate": 9433, "accused": 9434, "brief": 9435, "poem": 9436, "clubs": 9437, "consult": 9438, "jab": 9439, "bigdata": 9440, "amsterdam": 9441, "acing": 9442, "certific": 9443, "nu": 9444, "dat": 9445, "improved": 9446, "andy": 9447, "campaig": 9448, "palestin": 9449, "pace": 9450, "mobi": 9451, "feelings": 9452, "wolf": 9453, "brain": 9454, "propos": 9455, "interactive": 9456, "prince": 9457, "index": 9458, "cis": 9459, "chae": 9460, "peaceful": 9461, "covering": 9462, "aco": 9463, "courses": 9464, "monkey": 9465, "replace": 9466, "bl": 9467, "bloody": 9468, "tales": 9469, "brighton": 9470, "neighborhood": 9471, "gates": 9472, "spiritual": 9473, "afraid": 9474, "breast": 9475, "bones": 9476, "ðŁijī": 9477, "video": 9478, "wau": 9479, "touch": 9480, "injuries": 9481, "carl": 9482, "rix": 9483, "unex": 9484, "âĢ¢": 9485, "fred": 9486, "considered": 9487, "thusi": 9488, "anch": 9489, "ony": 9490, "usa": 9491, "graphics": 9492, "acre": 9493, "ðŁĺ©": 9494, "commemor": 9495, "commod": 9496, "goti": 9497, "guardian": 9498, "starbucks": 9499, "prevention": 9500, "hahahaha": 9501, "administration": 9502, "portugal": 9503, "faculty": 9504, "beta": 9505, "ula": 9506, "albert": 9507, "breath": 9508, "eri": 9509, "letting": 9510, "tric": 9511, "mentation": 9512, "incredibly": 9513, "tennes": 9514, "vd": 9515, "ðŁĻĪ": 9516, "eddie": 9517, "brick": 9518, "grill": 9519, "btw": 9520, "watches": 9521, "researchers": 9522, "tney": 9523, "nie": 9524, "pas": 9525, "aster": 9526, "vibr": 9527, "pokemon": 9528, "chrome": 9529, "goat": 9530, "pitts": 9531, "illy": 9532, "festive": 9533, "yd": 9534, "canal": 9535, "ðŁĨ": 9536, "fies": 9537, "carlos": 9538, "reque": 9539, "partici": 9540, "trains": 9541, "sample": 9542, "temperature": 9543, "symph": 9544, "picking": 9545, "indoor": 9546, "zers": 9547, "playoffs": 9548, "________": 9549, "apes": 9550, "lyrics": 9551, "islamic": 9552, "performances": 9553, "dick": 9554, "spark": 9555, "seas": 9556, "homa": 9557, "ground": 9558, "disci": 9559, "employee": 9560, "commu": 9561, "alaska": 9562, "alan": 9563, "feast": 9564, "dging": 9565, "banking": 9566, "manuel": 9567, "slowly": 9568, "trucks": 9569, "mccar": 9570, "ooo": 9571, "scrat": 9572, "orchestra": 9573, "individu": 9574, "mx": 9575, "breath": 9576, "stairs": 9577, "equality": 9578, "blake": 9579, "locations": 9580, "coconut": 9581, "baltimore": 9582, "aaa": 9583, "lc": 9584, "ðŁıĨ": 9585, "harvey": 9586, "resist": 9587, "immigration": 9588, "adidas": 9589, "fili": 9590, "ref": 9591, "lgbt": 9592, "mos": 9593, "ppi": 9594, "kenny": 9595, "terror": 9596, "bane": 9597, "apolis": 9598, "sg": 9599, "socialmedia": 9600, "kai": 9601, "honest": 9602, "assas": 9603, "bollywood": 9604, "âĢįâĻĢï¸ı": 9605, "ferrari": 9606, "horn": 9607, "crypto": 9608, "boom": 9609, "maintenance": 9610, "idi": 9611, "sman": 9612, "wl": 9613, "extended": 9614, "insul": 9615, "ves": 9616, "gosp": 9617, "tri": 9618, "pig": 9619, "targe": 9620, "celer": 9621, "stati": 9622, "smh": 9623, "ridic": 9624, "appeal": 9625, "?)": 9626, "conclu": 9627, "cosme": 9628, "sheep": 9629, "christopher": 9630, "enthusi": 9631, "polish": 9632, "mets": 9633, "ounded": 9634, "sustainability": 9635, "creativity": 9636, "concrete": 9637, "rai": 9638, "alien": 9639, "bless": 9640, "tees": 9641, "club": 9642, "rot": 9643, "bos": 9644, "exist": 9645, "perfection": 9646, "luck": 9647, "rocky": 9648, "expensive": 9649, "meanwhile": 9650, "happybirthday": 9651, "pret": 9652, "thriller": 9653, "cave": 9654, "playoff": 9655, "somer": 9656, "lu": 9657, "lex": 9658, "defence": 9659, "amwriting": 9660, "homeless": 9661, "prophe": 9662, "chet": 9663, "pastor": 9664, "ðŁ¤£": 9665, "lander": 9666, "www": 9667, "Ģï¸ı": 9668, "tica": 9669, "!#": 9670, "otic": 9671, "radar": 9672, "posters": 9673, "powder": 9674, "poli": 9675, "haun": 9676, "trap": 9677, "blin": 9678, "assault": 9679, "shorts": 9680, "rey": 9681, "shy": 9682, "squir": 9683, "racist": 9684, "garlic": 9685, "fur": 9686, "remote": 9687, "smell": 9688, "impressed": 9689, "fingers": 9690, "âłĢ": 9691, "dino": 9692, "lement": 9693, "snu": 9694, "promoting": 9695, "string": 9696, "productive": 9697, "bage": 9698, "mason": 9699, "raz": 9700, "directly": 9701, "jk": 9702, "eval": 9703, "ðŁijĬ": 9704, "doctors": 9705, "cow": 9706, "rider": 9707, "stv": 9708, "remove": 9709, "wu": 9710, "nathan": 9711, "rod": 9712, "nr": 9713, "=>": 9714, "affected": 9715, "invest": 9716, "mption": 9717, "ginger": 9718, "od": 9719, "agriculture": 9720, "sque": 9721, "mug": 9722, "counting": 9723, "kee": 9724, "magnific": 9725, "cook": 9726, "anistan": 9727, "root": 9728, "placed": 9729, "sympo": 9730, "ghana": 9731, "und": 9732, "cheer": 9733, "throwing": 9734, "secrets": 9735, "filling": 9736, "optimi": 9737, "butterfly": 9738, "bubb": 9739, "ðŁĺī": 9740, "terrible": 9741, "dg": 9742, "silk": 9743, "obsessed": 9744, "lou": 9745, "aide": 9746, "salute": 9747, "monu": 9748, "philadelphia": 9749, "scientific": 9750, "ist": 9751, "uae": 9752, "dessert": 9753, "bottles": 9754, "canyon": 9755, "ðŁĺĪ": 9756, "carib": 9757, "other": 9758, "wich": 9759, "resource": 9760, "guilty": 9761, "und": 9762, "leon": 9763, "ess": 9764, "kane": 9765, "ele": 9766, "trainer": 9767, "heim": 9768, "ante": 9769, "manage": 9770, "rookie": 9771, "treated": 9772, "poses": 9773, "rsvp": 9774, "causes": 9775, "awak": 9776, "jewell": 9777, "lett": 9778, "onics": 9779, "titles": 9780, "cardiff": 9781, "gaga": 9782, "bump": 9783, "useful": 9784, "?!": 9785, "loose": 9786, "bbing": 9787, "::": 9788, "argentina": 9789, "debu": 9790, "cycl": 9791, "whel": 9792, "disgu": 9793, "jel": 9794, "kills": 9795, "biology": 9796, "exter": 9797, "trash": 9798, "bodies": 9799, "tram": 9800, "circuit": 9801, "expect": 9802, "lads": 9803, "wells": 9804, "shot": 9805, "gee": 9806, "narendr": 9807, "fastest": 9808, "bent": 9809, "bills": 9810, "marshall": 9811, "hats": 9812, "introduce": 9813, "citizen": 9814, "impossible": 9815, "gib": 9816, "azz": 9817, "networking": 9818, "rant": 9819, "think": 9820, "indy": 9821, "stops": 9822, "ftheday": 9823, "brian": 9824, "**": 9825, "amodi": 9826, "dome": 9827, "courage": 9828, "packing": 9829, "affairs": 9830, "gn": 9831, "sized": 9832, "entary": 9833, "poland": 9834, "switzer": 9835, "afghanistan": 9836, "wu": 9837, "tender": 9838, "subscribe": 9839, "mosco": 9840, "attend": 9841, "republican": 9842, "honey": 9843, "âĢĭ": 9844, "simul": 9845, "wester": 9846, "foodie": 9847, "oro": 9848, "middle": 9849, "abt": 9850, "copies": 9851, "maje": 9852, "narendramodi": 9853, "typical": 9854, "inspirational": 9855, "vitam": 9856, "wiscon": 9857, "cubs": 9858, "tivity": 9859, "hali": 9860, "ears": 9861, "kay": 9862, "dare": 9863, "marijuana": 9864, "curious": 9865, "ania": 9866, "tomato": 9867, "remind": 9868, "ðŁĩ·": 9869, "scared": 9870, "coup": 9871, "poet": 9872, "landed": 9873, "rid": 9874, "wrapped": 9875, "morri": 9876, "climbing": 9877, "ews": 9878, "feeding": 9879, "contra": 9880, "thology": 9881, "grid": 9882, "tively": 9883, "reader": 9884, "laser": 9885, "diving": 9886, "dig": 9887, "latin": 9888, "tied": 9889, "shakespe": 9890, "oci": 9891, "adm": 9892, "showers": 9893, "chuck": 9894, "marcus": 9895, "oos": 9896, "knee": 9897, "olive": 9898, "owl": 9899, "dylan": 9900, "anno": 9901, "gym": 9902, "decisions": 9903, "wellness": 9904, "arrives": 9905, "satis": 9906, "chris": 9907, "thurs": 9908, "ðŁ¤£": 9909, "interviews": 9910, "thankyou": 9911, "switzerland": 9912, "overnight": 9913, "journalist": 9914, "serves": 9915, "volcan": 9916, ".......": 9917, "plot": 9918, "nicol": 9919, "carrying": 9920, "magne": 9921, "treasure": 9922, "exp": 9923, "bever": 9924, "ðŁĺ¢": 9925, "marty": 9926, "mole": 9927, "donations": 9928, "recognized": 9929, "bh": 9930, "dus": 9931, "shann": 9932, "aldo": 9933, "successfully": 9934, "ente": 9935, "ðŁĺĤðŁĺĤðŁĺĤðŁĺĤ": 9936, "cabinet": 9937, "cuis": 9938, "titled": 9939, "das": 9940, "sol": 9941, "strategies": 9942, "delivering": 9943, "adds": 9944, "anian": 9945, "nether": 9946, "ðŁĴĥ": 9947, "contain": 9948, "suits": 9949, "pairs": 9950, "todd": 9951, "rella": 9952, "rope": 9953, "cio": 9954, "crop": 9955, "paintings": 9956, "suz": 9957, "rejec": 9958, "bust": 9959, "dh": 9960, "fraud": 9961, "mh": 9962, "control": 9963, "jeal": 9964, "destroyed": 9965, "allows": 9966, "wool": 9967, "minnesota": 9968, "omen": 9969, "ju": 9970, "symposium": 9971, "daf": 9972, "limit": 9973, "accounts": 9974, "loading": 9975, "intern": 9976, "resolution": 9977, "holland": 9978, "qual": 9979, "meetings": 9980, "grave": 9981, "camping": 9982, "vam": 9983, "renov": 9984, "liberal": 9985, "amber": 9986, "gree": 9987, "humb": 9988, "fever": 9989, "eling": 9990, "brooks": 9991, "à²": 9992, "beth": 9993, "aded": 9994, "alt": 9995, "roe": 9996, "performed": 9997, "josh": 9998, "franklin": 9999, "nicole": 10000, "dess": 10001, "bbs": 10002, "mg": 10003, "networks": 10004, "minim": 10005, "alt": 10006, "weapons": 10007, "guy": 10008, "jason": 10009, "gha": 10010, "harbour": 10011, "aton": 10012, "praise": 10013, "kentucky": 10014, "belfast": 10015, "sticks": 10016, "bloss": 10017, "hopes": 10018, "anthro": 10019, "familiar": 10020, "wait": 10021, "chile": 10022, "depression": 10023, "lax": 10024, "jets": 10025, "leice": 10026, "receives": 10027, "sier": 10028, "ank": 10029, "dex": 10030, "indeed": 10031, "flexi": 10032, "fabric": 10033, "lamb": 10034, "helicop": 10035, "amanda": 10036, "âĢĶâĢĶ": 10037, "compete": 10038, "snack": 10039, "technologies": 10040, "syrian": 10041, "moms": 10042, "muham": 10043, "chosen": 10044, "anat": 10045, "devon": 10046, "sharks": 10047, "ret": 10048, "fundraiser": 10049, "selfies": 10050, "stations": 10051, "communications": 10052, "tennessee": 10053, "tutor": 10054, "rot": 10055, "valuable": 10056, "dynamic": 10057, "nurse": 10058, "ied": 10059, "earthquake": 10060, "deserved": 10061, "ave": 10062, "sara": 10063, "stretch": 10064, "douglas": 10065, "nepal": 10066, "ç": 10067, "obviously": 10068, "dame": 10069, "rape": 10070, "anybody": 10071, "kw": 10072, "patrol": 10073, "holders": 10074, "hanna": 10075, "infographic": 10076, "eco": 10077, "beating": 10078, "stanley": 10079, "boats": 10080, "ribb": 10081, "ez": 10082, "witch": 10083, "inva": 10084, "acid": 10085, "boarding": 10086, "-@": 10087, "gil": 10088, "dave": 10089, "careers": 10090, "oppos": 10091, "lloy": 10092, "inter": 10093, "dope": 10094, "resu": 10095, "jagu": 10096, "shade": 10097, "indy": 10098, "onist": 10099, "relations": 10100, "agen": 10101, "able": 10102, "incident": 10103, "meter": 10104, "sharma": 10105, "idr": 10106, "prove": 10107, "immediately": 10108, "troops": 10109, "aman": 10110, "glow": 10111, "gaza": 10112, "blocks": 10113, "personal": 10114, "chronic": 10115, "aller": 10116, "sid": 10117, "shr": 10118, "whatsapp": 10119, "lucy": 10120, "archae": 10121, "hou": 10122, "journalism": 10123, "ourselves": 10124, "got": 10125, "themed": 10126, "shaped": 10127, "weak": 10128, "casual": 10129, "length": 10130, "slam": 10131, "abbey": 10132, "ev": 10133, "counter": 10134, "esta": 10135, "recipi": 10136, "chapel": 10137, "expansion": 10138, "self": 10139, "suffering": 10140, "spice": 10141, "nz": 10142, "spart": 10143, "desper": 10144, "booking": 10145, "quarters": 10146, "yon": 10147, "ðŁĴĹ": 10148, "pk": 10149, "continued": 10150, "-#": 10151, "manhatt": 10152, "talked": 10153, "shen": 10154, "combo": 10155, "hybrid": 10156, "jeans": 10157, "liquid": 10158, "seal": 10159, "retweets": 10160, "acceler": 10161, "collective": 10162, "tas": 10163, ":))": 10164, "professionals": 10165, "raw": 10166, "ott": 10167, "susan": 10168, "iring": 10169, "oklahoma": 10170, "reven": 10171, "survival": 10172, "creator": 10173, "transit": 10174, "stac": 10175, "surf": 10176, "ik": 10177, "editing": 10178, "chilling": 10179, "bailey": 10180, "steal": 10181, "rable": 10182, "parent": 10183, "hunger": 10184, "snapp": 10185, "collect": 10186, "philosoph": 10187, "dedication": 10188, "cf": 10189, "cm": 10190, "leep": 10191, "repeat": 10192, "reha": 10193, "unfortun": 10194, "aer": 10195, "aero": 10196, "abstract": 10197, "monitor": 10198, "agents": 10199, "bul": 10200, "science": 10201, "harbor": 10202, "dragons": 10203, "flooding": 10204, "accompli": 10205, "dash": 10206, "julia": 10207, "thered": 10208, "tuesday": 10209, "cyber": 10210, "blow": 10211, "tained": 10212, "lem": 10213, "reference": 10214, "ppo": 10215, "negoti": 10216, "charle": 10217, "connor": 10218, "ault": 10219, "accessories": 10220, "commissioner": 10221, "rainy": 10222, "rear": 10223, "advisory": 10224, "lucas": 10225, "maid": 10226, "coal": 10227, "kav": 10228, "polo": 10229, "ðŁı¾": 10230, "transport": 10231, "margare": 10232, "strawberry": 10233, "burns": 10234, "greens": 10235, "nev": 10236, "participants": 10237, "colin": 10238, "belgium": 10239, "colour": 10240, "inform": 10241, "dell": 10242, "bron": 10243, "caly": 10244, "kickoff": 10245, "strategic": 10246, "reunion": 10247, "honors": 10248, "lib": 10249, "egyp": 10250, "âŃIJï¸ı": 10251, "hypo": 10252, "sizes": 10253, "registered": 10254, "betes": 10255, "relaxing": 10256, "bloom": 10257, "intense": 10258, "valentines": 10259, "insane": 10260, "wwii": 10261, "px": 10262, "trio": 10263, "blade": 10264, "wisconsin": 10265, "cone": 10266, "platin": 10267, "alize": 10268, "raven": 10269, "increasing": 10270, "indians": 10271, "ilian": 10272, "blu": 10273, "rabbit": 10274, "extension": 10275, "jef": 10276, "audi": 10277, "ferry": 10278, "sell": 10279, "aday": 10280, "usb": 10281, "sweat": 10282, "champag": 10283, "method": 10284, "memph": 10285, "assist": 10286, "sby": 10287, "cape": 10288, "removed": 10289, "magn": 10290, "vt": 10291, "rams": 10292, "fbi": 10293, "tackle": 10294, "phew": 10295, "hon": 10296, "motorcycle": 10297, "suspec": 10298, "elephant": 10299, "subject": 10300, "lette": 10301, "dairy": 10302, "wheat": 10303, "awkward": 10304, "act": 10305, "trol": 10306, "mitted": 10307, "zayn": 10308, "sheriff": 10309, "enemy": 10310, "cons": 10311, "kett": 10312, "bulls": 10313, "evalu": 10314, "btc": 10315, "satellite": 10316, "holo": 10317, "porter": 10318, "diabetes": 10319, "better": 10320, "releasing": 10321, "surf": 10322, ":-": 10323, "sebasti": 10324, "collecting": 10325, "encing": 10326, "ethi": 10327, "gods": 10328, "alley": 10329, "healthy": 10330, "mills": 10331, "smash": 10332, "copper": 10333, "crack": 10334, "readers": 10335, "spac": 10336, "license": 10337, "basket": 10338, "bangla": 10339, "entic": 10340, "omi": 10341, "mere": 10342, "sively": 10343, "animation": 10344, "lanes": 10345, "dentally": 10346, "chillin": 10347, "fie": 10348, "karen": 10349, "depth": 10350, "lipse": 10351, "ng": 10352, "rip": 10353, "melo": 10354, "sandy": 10355, "ðŁijıðŁijı": 10356, "vincent": 10357, "nut": 10358, "hug": 10359, "whole": 10360, "creates": 10361, "????": 10362, "âĿ¤ï¸ıâĿ¤ï¸ı": 10363, "baked": 10364, "upgrade": 10365, "roberts": 10366, "hara": 10367, "caribbean": 10368, "authentic": 10369, "mbs": 10370, "moscow": 10371, "attorney": 10372, "wiki": 10373, "chlo": 10374, "hull": 10375, "cork": 10376, "\"!": 10377, "stylish": 10378, "ðŁĵ¸:": 10379, "diary": 10380, "improving": 10381, "expand": 10382, "bright": 10383, "pollution": 10384, "knights": 10385, "personality": 10386, "checked": 10387, "facilities": 10388, "zel": 10389, "bowling": 10390, "guer": 10391, "ðŁİĤ": 10392, "ongoing": 10393, "units": 10394, "hook": 10395, "beck": 10396, "conflict": 10397, "todd": 10398, "farming": 10399, "educational": 10400, "kak": 10401, "clay": 10402, "stroke": 10403, "belly": 10404, "explore": 10405, "millenni": 10406, "thm": 10407, "loop": 10408, "sms": 10409, "consist": 10410, "circa": 10411, "bryan": 10412, "dab": 10413, "younger": 10414, "solidar": 10415, "ppa": 10416, "experienced": 10417, "bella": 10418, "board": 10419, "sheffield": 10420, "stephen": 10421, "consumer": 10422, "submit": 10423, "sponsor": 10424, "tang": 10425, "aggre": 10426, "combined": 10427, "tracking": 10428, "sanders": 10429, "baz": 10430, "survive": 10431, "ferred": 10432, "equal": 10433, "sep": 10434, "reed": 10435, "strong": 10436, "privacy": 10437, "stap": 10438, "ung": 10439, "acry": 10440, "pasta": 10441, "pirates": 10442, "ager": 10443, "fairy": 10444, "dup": 10445, "introduced": 10446, "wip": 10447, "lets": 10448, "spray": 10449, "ðŁĵº": 10450, "grew": 10451, "asts": 10452, "pittsburgh": 10453, "newyork": 10454, "joey": 10455, "lauren": 10456, "trade": 10457, "chop": 10458, "pipe": 10459, "claire": 10460, "behavior": 10461, "vap": 10462, "crews": 10463, "laptop": 10464, "ð٤Ĺ": 10465, "chester": 10466, "discipl": 10467, "df": 10468, "outdoors": 10469, "ks": 10470, "gover": 10471, "superstar": 10472, "casino": 10473, "farmer": 10474, ";-)": 10475, "returned": 10476, "ðŁıĪ": 10477, "mail": 10478, "roasted": 10479, "costa": 10480, "vill": 10481, "pez": 10482, "gardening": 10483, "distribution": 10484, "shining": 10485, "investors": 10486, "rasp": 10487, "decades": 10488, "realized": 10489, "barn": 10490, "pti": 10491, "stable": 10492, "utd": 10493, "panthers": 10494, "mens": 10495, "bn": 10496, "cade": 10497, "bucket": 10498, "ynn": 10499, "whenever": 10500, "wake": 10501, "dais": 10502, "bernie": 10503, "lodge": 10504, "julie": 10505, "atmosphere": 10506, "ðŁĺĺðŁĺĺ": 10507, "majority": 10508, "parti": 10509, "excit": 10510, "cut": 10511, "meh": 10512, "muslims": 10513, "begun": 10514, "flights": 10515, "veness": 10516, "ceme": 10517, "posing": 10518, "sole": 10519, "gou": 10520, "darkness": 10521, "peach": 10522, "celtic": 10523, "authority": 10524, "grandma": 10525, "fulness": 10526, "smith": 10527, "specific": 10528, "garcia": 10529, "coins": 10530, "goodness": 10531, "aldub": 10532, "recruiting": 10533, "dennis": 10534, "gary": 10535, "sleeve": 10536, "weapon": 10537, "plz": 10538, "discover": 10539, "harrison": 10540, "recruitment": 10541, "jai": 10542, "chim": 10543, "compared": 10544, "toms": 10545, "mothers": 10546, "amy": 10547, "archive": 10548, "task": 10549, "benjam": 10550, "seg": 10551, "lawyer": 10552, "alum": 10553, "investing": 10554, "mie": 10555, "chez": 10556, "jp": 10557, "ake": 10558, "flam": 10559, "wallpaper": 10560, "âĻ¥ï¸ı": 10561, "tton": 10562, "chest": 10563, "favorites": 10564, "weigh": 10565, "coolest": 10566, "rating": 10567, "relevant": 10568, "logan": 10569, "maple": 10570, "runners": 10571, "prior": 10572, "people": 10573, "maur": 10574, "terrorist": 10575, "tested": 10576, "carnival": 10577, "suspen": 10578, "measure": 10579, "mv": 10580, "cybersecurity": 10581, "appren": 10582, "terrorism": 10583, "oz": 10584, "vital": 10585, "nies": 10586, "gonz": 10587, "funded": 10588, "twist": 10589, "assessment": 10590, "diesel": 10591, "enfor": 10592, "column": 10593, "addressing": 10594, "casts": 10595, "payment": 10596, "xton": 10597, "fier": 10598, ",'": 10599, "last": 10600, "nee": 10601, "unless": 10602, "close": 10603, "skill": 10604, "cuisine": 10605, "funeral": 10606, "tiles": 10607, "aun": 10608, "kru": 10609, "relationships": 10610, "ðŁĴ¯": 10611, "event": 10612, "âĢįâĻĤï¸ı": 10613, "kindness": 10614, "proposed": 10615, "acoustic": 10616, "aes": 10617, "defender": 10618, "dance": 10619, "htt": 10620, "wat": 10621, "voy": 10622, "ð٤ĺ": 10623, "aus": 10624, "cliff": 10625, "searching": 10626, "beautifully": 10627, "inqu": 10628, "atl": 10629, "specialist": 10630, "ðŁIJ¶": 10631, "dai": 10632, "trails": 10633, "classics": 10634, "instant": 10635, "vous": 10636, "revenue": 10637, "march": 10638, "kirk": 10639, "fringe": 10640, "fireworks": 10641, "trivia": 10642, "âĺħ": 10643, "traction": 10644, "walter": 10645, "moto": 10646, "lily": 10647, "attitude": 10648, "climb": 10649, "scan": 10650, "savings": 10651, "cw": 10652, "faith": 10653, "credits": 10654, "abled": 10655, "graff": 10656, "autograph": 10657, "hehe": 10658, "ranch": 10659, "had": 10660, "rogers": 10661, "ðŁĮ¹": 10662, "fin": 10663, "requ": 10664, "folk": 10665, "additional": 10666, "lynn": 10667, "uber": 10668, "dollars": 10669, "logic": 10670, "worth": 10671, "som": 10672, "thesis": 10673, "pound": 10674, "bic": 10675, "stur": 10676, "ceram": 10677, "spencer": 10678, "entered": 10679, "vamp": 10680, "organized": 10681, "âľĪ": 10682, "pps": 10683, "tron": 10684, "mercedes": 10685, "noti": 10686, "competitive": 10687, "dow": 10688, "ousness": 10689, "victor": 10690, "grilled": 10691, "nai": 10692, "putin": 10693, "abra": 10694, "blame": 10695, "alexand": 10696, "animal": 10697, "decent": 10698, "pent": 10699, "interior": 10700, ":')": 10701, "butler": 10702, "ballet": 10703, "ðŁĴĶ": 10704, "albums": 10705, "downs": 10706, "lad": 10707, "sir": 10708, "plain": 10709, "pers": 10710, "blonde": 10711, "disc": 10712, "pakistan": 10713, "sement": 10714, "gaa": 10715, "wage": 10716, "chas": 10717, "mani": 10718, "cops": 10719, "territ": 10720, "lol": 10721, "laughter": 10722, "rivers": 10723, "magnificent": 10724, "lamp": 10725, "wb": 10726, "newsle": 10727, "charts": 10728, "blessing": 10729, "punch": 10730, "longest": 10731, "floral": 10732, "cutie": 10733, "farewell": 10734, "stopping": 10735, "mbb": 10736, "bud": 10737, "cheese": 10738, "decla": 10739, "sim": 10740, "mcdonald": 10741, "deter": 10742, "youth": 10743, "tch": 10744, "freder": 10745, "kindle": 10746, "fern": 10747, "ator": 10748, "asleep": 10749, "pond": 10750, "sprint": 10751, "pounds": 10752, "lazy": 10753, "ghe": 10754, "fundraising": 10755, "deadly": 10756, "grande": 10757, "doug": 10758, "hey": 10759, "linda": 10760, "considering": 10761, "ium": 10762, "golden": 10763, "vik": 10764, "authors": 10765, "diss": 10766, "ually": 10767, "appropriate": 10768, "morning": 10769, "yle": 10770, "honoring": 10771, "folio": 10772, "bec": 10773, "rebec": 10774, "finland": 10775, "formula": 10776, "cornwall": 10777, "shay": 10778, "causing": 10779, "blend": 10780, "signal": 10781, "tent": 10782, "kashmir": 10783, "nationals": 10784, "harmony": 10785, "scout": 10786, "accessi": 10787, "height": 10788, "medieval": 10789, "improvement": 10790, "kees": 10791, "practical": 10792, "card": 10793, "depar": 10794, "hun": 10795, "oming": 10796, "calgary": 10797, "stel": 10798, "bubble": 10799, "guru": 10800, "mah": 10801, "unexpe": 10802, "nh": 10803, "eda": 10804, "meat": 10805, "ige": 10806, "sio": 10807, "goddess": 10808, "inches": 10809, "tunes": 10810, "britt": 10811, "stion": 10812, "raj": 10813, "âĻ«": 10814, "mercy": 10815, "ðŁĴĺ": 10816, "sends": 10817, "iest": 10818, "polici": 10819, "vale": 10820, "reduced": 10821, "asap": 10822, "vijay": 10823, "defensive": 10824, "celebrations": 10825, "riders": 10826, "meditation": 10827, "harmon": 10828, "ging": 10829, "¡": 10830, "programming": 10831, "inau": 10832, "sudden": 10833, "mh": 10834, "replacement": 10835, "sku": 10836, "jar": 10837, "grades": 10838, "tast": 10839, "kitt": 10840, "branding": 10841, "kaw": 10842, "boot": 10843, "fought": 10844, "pays": 10845, "gf": 10846, "ization": 10847, "hop": 10848, "kk": 10849, "activist": 10850, "vend": 10851, "coastal": 10852, "chaos": 10853, "ðŁĶ´": 10854, "seme": 10855, "billboard": 10856, "lifting": 10857, "cumb": 10858, "scal": 10859, "ðŁĸ¤": 10860, "struck": 10861, "lv": 10862, "indiedev": 10863, "beaten": 10864, "jungle": 10865, "alright": 10866, "destiny": 10867, "ming": 10868, "kc": 10869, "chances": 10870, "oman": 10871, "qatar": 10872, "craf": 10873, "trained": 10874, "prix": 10875, "charm": 10876, "otive": 10877, "smu": 10878, "ec": 10879, "anders": 10880, "handed": 10881, "alban": 10882, "certainly": 10883, "arriving": 10884, "ize": 10885, "sai": 10886, "track": 10887, "painter": 10888, "humble": 10889, "appointment": 10890, "headline": 10891, "managing": 10892, "mod": 10893, "aspe": 10894, "andrea": 10895, "ä": 10896, "ethiop": 10897, "united": 10898, "exist": 10899, "bali": 10900, "kad": 10901, "nt": 10902, "dred": 10903, "rex": 10904, "recognize": 10905, "tampa": 10906, "beers": 10907, "atia": 10908, "heels": 10909, "note": 10910, "transportation": 10911, "turtle": 10912, "rede": 10913, "hiphop": 10914, "spicy": 10915, "spurs": 10916, "â¬ĩ": 10917, "corp": 10918, "thern": 10919, "toast": 10920, "hurry": 10921, "properties": 10922, "mage": 10923, "marco": 10924, "elements": 10925, "bouti": 10926, "syndrome": 10927, "msg": 10928, "developer": 10929, "graders": 10930, "heim": 10931, "resil": 10932, "offices": 10933, "delay": 10934, "dimen": 10935, "vintag": 10936, "barbara": 10937, "ðŁĺ±": 10938, "venezu": 10939, "cular": 10940, "faced": 10941, "barn": 10942, "ðŁĺĨ": 10943, "survivor": 10944, "worm": 10945, "confused": 10946, "passionate": 10947, "ر": 10948, "identify": 10949, "electricity": 10950, "souls": 10951, "bradley": 10952, "reportedly": 10953, "lunch": 10954, "shelf": 10955, "elia": 10956, "sweet": 10957, "smooth": 10958, "employment": 10959, "amel": 10960, "manhattan": 10961, "steam": 10962, "ounts": 10963, "yep": 10964, "living": 10965, "une": 10966, "describe": 10967, "cares": 10968, "manila": 10969, "shawn": 10970, "acted": 10971, "bash": 10972, "steven": 10973, "rest": 10974, "petition": 10975, "divine": 10976, "welsh": 10977, "race": 10978, "platinum": 10979, "ðŁĮ¸": 10980, "pb": 10981, "extraordinary": 10982, "solidarity": 10983, "mall": 10984, "onion": 10985, "scheduled": 10986, "gameof": 10987, "fergu": 10988, "dems": 10989, "norm": 10990, "pk": 10991, "trials": 10992, "policies": 10993, "publishing": 10994, "stole": 10995, "front": 10996, "character": 10997, "vania": 10998, "exce": 10999, "stie": 11000, "sca": 11001, "residential": 11002, "sailing": 11003, "ðŁĶ¥ðŁĶ¥ðŁĶ¥": 11004, "sponsors": 11005, "thick": 11006, "champagne": 11007, "shepher": 11008, "continuing": 11009, "venice": 11010, "perth": 11011, "nap": 11012, "aster": 11013, "yak": 11014, "unlimited": 11015, "choices": 11016, "neo": 11017, "hiv": 11018, "reporter": 11019, "brussels": 11020, "fold": 11021, "dys": 11022, "semi": 11023, "lawn": 11024, "italia": 11025, "wifi": 11026, "ask": 11027, "emed": 11028, "frame": 11029, "monitoring": 11030, "stead": 11031, "ida": 11032, "grin": 11033, "isa": 11034, "flip": 11035, "restric": 11036, "offensive": 11037, "attached": 11038, "dish": 11039, "why": 11040, "phillips": 11041, "greet": 11042, "pals": 11043, "mixtape": 11044, "vou": 11045, "fielder": 11046, "spark": 11047, "alberta": 11048, "glen": 11049, "cash": 11050, "sri": 11051, "uri": 11052, "rodri": 11053, "entrepreneurs": 11054, "climatechange": 11055, "psy": 11056, "dle": 11057, "ements": 11058, "linked": 11059, "netherlands": 11060, "accidentally": 11061, "opposition": 11062, "velvet": 11063, "rays": 11064, "cw": 11065, "omo": 11066, "mf": 11067, "lmfao": 11068, "newsletter": 11069, ":)": 11070, "toilet": 11071, "literature": 11072, "disp": 11073, "philip": 11074, "uniform": 11075, "suddenly": 11076, "header": 11077, "cooler": 11078, "---": 11079, "proud": 11080, "brig": 11081, "nissan": 11082, "scientist": 11083, "jah": 11084, "concentr": 11085, "packs": 11086, "appointed": 11087, "soap": 11088, "engage": 11089, "chose": 11090, "âĻ¡": 11091, "setup": 11092, "jealous": 11093, "harry": 11094, "gation": 11095, "tunnel": 11096, "temp": 11097, "oscars": 11098, "decade": 11099, "recommended": 11100, "children": 11101, "aba": 11102, "anxiety": 11103, "vements": 11104, "salon": 11105, "photoo": 11106, "organiz": 11107, "machines": 11108, "abs": 11109, "ville": 11110, "hype": 11111, "tiff": 11112, "emerging": 11113, "avgeek": 11114, "[#": 11115, "contribution": 11116, "brady": 11117, "resto": 11118, "gmail": 11119, "fitz": 11120, "photoshoot": 11121, "helmet": 11122, "ht": 11123, "elegant": 11124, "uganda": 11125, "nursing": 11126, "orleans": 11127, "penn": 11128, "nah": 11129, "footage": 11130, "ema": 11131, "wo": 11132, "wad": 11133, "concerns": 11134, "vere": 11135, "remark": 11136, "whoever": 11137, "strang": 11138, "pt": 11139, "quit": 11140, "shang": 11141, "history": 11142, "sick": 11143, "permanent": 11144, "illness": 11145, "cold": 11146, "vision": 11147, "hem": 11148, "arrow": 11149, "convic": 11150, "pink": 11151, "occup": 11152, "bald": 11153, "exhau": 11154, "uof": 11155, "amo": 11156, "ont": 11157, "ãĥ»": 11158, "adopt": 11159, "laid": 11160, "smoked": 11161, "interpre": 11162, "essenti": 11163, "associated": 11164, "bd": 11165, "bby": 11166, "fier": 11167, "install": 11168, "diplom": 11169, "conditi": 11170, "cf": 11171, "wak": 11172, "anya": 11173, "graci": 11174, "fisher": 11175, "sss": 11176, "apr": 11177, "ilit": 11178, "musician": 11179, "symphony": 11180, "cord": 11181, "hack": 11182, "legi": 11183, "lv": 11184, "blessings": 11185, "humor": 11186, "scra": 11187, "eti": 11188, "minster": 11189, "travelling": 11190, "bush": 11191, "jewellery": 11192, "lime": 11193, "!!!": 11194, "pregnant": 11195, "pee": 11196, "lob": 11197, "capital": 11198, "ipa": 11199, "pencil": 11200, "labor": 11201, "ducks": 11202, "proudly": 11203, "wedding": 11204, "derek": 11205, "mw": 11206, "peg": 11207, "valentine": 11208, "angu": 11209, "retreat": 11210, "prospect": 11211, "danger": 11212, "vulner": 11213, "upset": 11214, ",#": 11215, "srk": 11216, "xim": 11217, "thursday": 11218, "nfl": 11219, "kisses": 11220, "reds": 11221, "crack": 11222, "reward": 11223, "cu": 11224, "kok": 11225, "mete": 11226, "abandoned": 11227, "itt": 11228, "meals": 11229, "spell": 11230, "stanbul": 11231, "delays": 11232, "rum": 11233, "leop": 11234, "gum": 11235, "nova": 11236, "superman": 11237, "chick": 11238, "mis": 11239, "dramatic": 11240, "innocent": 11241, "rounds": 11242, "rec": 11243, "autism": 11244, "bangladesh": 11245, "moral": 11246, "movie": 11247, "spoo": 11248, "kla": 11249, "âĥ£": 11250, "outing": 11251, "messi": 11252, "abroad": 11253, "lookin": 11254, "aim": 11255, "qi": 11256, "stack": 11257, "collage": 11258, "à¯": 11259, "hudson": 11260, "scan": 11261, "hoe": 11262, "chau": 11263, "occur": 11264, "commander": 11265, "holes": 11266, "ðŁİĦ": 11267, "bias": 11268, "von": 11269, "sticker": 11270, "mak": 11271, "responsibility": 11272, "columbus": 11273, "saint": 11274, "edmon": 11275, "racism": 11276, "farms": 11277, "wen": 11278, "gulf": 11279, "mayo": 11280, "!!!!!!!!": 11281, "corporation": 11282, "bachel": 11283, "ela": 11284, "internal": 11285, "jeep": 11286, "follows": 11287, "dialogue": 11288, "derer": 11289, "smartphone": 11290, "helen": 11291, "richmond": 11292, "equity": 11293, "sland": 11294, "bg": 11295, "near": 11296, "avi": 11297, "memphis": 11298, "weir": 11299, "discussed": 11300, "badge": 11301, "pup": 11302, "mistake": 11303, "phenomen": 11304, "unite": 11305, "ðŁĽ": 11306, "depic": 11307, "rides": 11308, "inaugu": 11309, "nat": 11310, "softwitter": 11311, "combination": 11312, "gospel": 11313, "âļ¾": 11314, "admission": 11315, "retrogaming": 11316, "ðŁIJ¾": 11317, "schu": 11318, "mbo": 11319, "junction": 11320, "alarm": 11321, "à¦": 11322, "grac": 11323, "khali": 11324, "kul": 11325, "male": 11326, "caption": 11327, "wish": 11328, "tere": 11329, "corps": 11330, "rubber": 11331, "playstation": 11332, "erin": 11333, "efficient": 11334, "lor": 11335, "jokes": 11336, "inary": 11337, "norman": 11338, "luis": 11339, "inaugural": 11340, "ched": 11341, "âļ½ï¸ı": 11342, "dip": 11343, "toe": 11344, "strat": 11345, "aac": 11346, "amu": 11347, "pier": 11348, "cott": 11349, "command": 11350, "tten": 11351, "snoo": 11352, "cube": 11353, "closes": 11354, "classical": 11355, "sword": 11356, "expression": 11357, "reaching": 11358, "napp": 11359, "cost": 11360, "affect": 11361, "rico": 11362, "gif": 11363, "breathe": 11364, "tribe": 11365, "ortho": 11366, "hay": 11367, "lg": 11368, "fries": 11369, "nm": 11370, "hiding": 11371, "richards": 11372, "ende": 11373, "micro": 11374, "capitol": 11375, "copy": 11376, "rom": 11377, "regime": 11378, "maryland": 11379, "taxi": 11380, "dial": 11381, "embarra": 11382, "unbeliev": 11383, "cht": 11384, "vs": 11385, "elimin": 11386, "odd": 11387, "penny": 11388, "soundtrack": 11389, "lings": 11390, "transition": 11391, "remaining": 11392, "ais": 11393, "malik": 11394, "?!?": 11395, "random": 11396, "defend": 11397, "ultra": 11398, "trum": 11399, "dancer": 11400, "stol": 11401, "drive": 11402, "aver": 11403, "roast": 11404, "definition": 11405, "sean": 11406, "excitement": 11407, "particul": 11408, "surely": 11409, "shav": 11410, "bery": 11411, "dishes": 11412, "comm": 11413, "isol": 11414, "iam": 11415, "obli": 11416, "ghost": 11417, "hughes": 11418, "chiefs": 11419, "bas": 11420, "conservative": 11421, "special": 11422, "femin": 11423, "shri": 11424, "nancy": 11425, "intel": 11426, "tune": 11427, "ðŁĩª": 11428, "joel": 11429, "ggle": 11430, "moto": 11431, "ðŁĺĶ": 11432, "buck": 11433, "dag": 11434, "anticip": 11435, "montana": 11436, "guid": 11437, "frog": 11438, "ecraft": 11439, "ope": 11440, "drives": 11441, "numer": 11442, "xy": 11443, "colorful": 11444, "wednesdaywisdom": 11445, "illumin": 11446, "beyon": 11447, "inaugur": 11448, "deeply": 11449, "prefer": 11450, "fortune": 11451, "cooked": 11452, "tible": 11453, "âĺķ": 11454, "sweater": 11455, "itter": 11456, "tty": 11457, "ui": 11458, "gie": 11459, "complic": 11460, "~~": 11461, "taxes": 11462, "cups": 11463, "diverse": 11464, "samanth": 11465, "âłĢâłĢ": 11466, "baking": 11467, "symp": 11468, "wai": 11469, "behalf": 11470, "mercur": 11471, "travels": 11472, "ðŁİīðŁİ": 11473, "oria": 11474, "engaged": 11475, "jumping": 11476, "retired": 11477, "naked": 11478, "puni": 11479, "speedway": 11480, "sciences": 11481, "rehearsal": 11482, "onym": 11483, "dyou": 11484, "plates": 11485, "rati": 11486, "krish": 11487, "jazz": 11488, "carol": 11489, "raf": 11490, "penalty": 11491, "timeline": 11492, "ruby": 11493, "engineers": 11494, "raf": 11495, "belle": 11496, "dose": 11497, "cheon": 11498, "escap": 11499, "meg": 11500, "rank": 11501, "ord": 11502, "megan": 11503, "merch": 11504, "eclipse": 11505, "âĺºï¸ı": 11506, "pledge": 11507, "kirk": 11508, "persi": 11509, "leicester": 11510, "sak": 11511, "wk": 11512, "safely": 11513, "yyy": 11514, "jet": 11515, "promised": 11516, "jc": 11517, "enne": 11518, "noah": 11519, "reno": 11520, "rea": 11521, "ðŁĺĤðŁĺĤðŁĺĤðŁĺĤ": 11522, "trail": 11523, "ðŁijĢ": 11524, "fd": 11525, "sooo": 11526, "rimin": 11527, "wk": 11528, "า": 11529, "ial": 11530, "xox": 11531, "biscu": 11532, "dale": 11533, "fandom": 11534, "participating": 11535, "flag": 11536, "privilege": 11537, "peach": 11538, "machine": 11539, "boston": 11540, "gross": 11541, "og": 11542, "miracle": 11543, "adoption": 11544, "uss": 11545, "monsters": 11546, "beij": 11547, "clarke": 11548, "pushing": 11549, "praying": 11550, "aro": 11551, "dn": 11552, "ellis": 11553, "apollo": 11554, "odds": 11555, "refugee": 11556, "tow": 11557, "bp": 11558, "ðŁĩ¬ðŁĩ§": 11559, "hend": 11560, "appeared": 11561, "membership": 11562, "pean": 11563, "dum": 11564, "violent": 11565, "vy": 11566, "potatoes": 11567, "aww": 11568, "greetings": 11569, "tts": 11570, "acon": 11571, "shane": 11572, "photographed": 11573, "crab": 11574, "temperatures": 11575, "cuba": 11576, "cfc": 11577, "welcom": 11578, "hel": 11579, "innings": 11580, "mk": 11581, "code": 11582, "knock": 11583, "grass": 11584, "swedish": 11585, "pta": 11586, "icky": 11587, "vat": 11588, "lining": 11589, "sq": 11590, "sap": 11591, "arc": 11592, "announcing": 11593, "skins": 11594, "cityof": 11595, "bring": 11596, "cox": 11597, "gamer": 11598, "itarian": 11599, "ida": 11600, "hd": 11601, "rosse": 11602, "sadly": 11603, "geo": 11604, "âļ¡ï¸ı": 11605, "tags": 11606, "father": 11607, "change": 11608, "lance": 11609, "whiskey": 11610, "adelaide": 11611, "tec": 11612, "stickers": 11613, "market": 11614, "classy": 11615, "badass": 11616, "florence": 11617, "liner": 11618, "frost": 11619, "kate": 11620, "acon": 11621, "scandal": 11622, "essex": 11623, "ðŁĺı": 11624, "vivi": 11625, "drill": 11626, "bloggers": 11627, "recommend": 11628, "dha": 11629, "acres": 11630, "roma": 11631, "buy": 11632, "grocer": 11633, "eria": 11634, "mahar": 11635, "ffer": 11636, "patterns": 11637, "veri": 11638, "compu": 11639, "stev": 11640, "anga": 11641, "mentor": 11642, "doo": 11643, "itali": 11644, "cdnpoli": 11645, "only": 11646, "conduct": 11647, "electro": 11648, "def": 11649, "whale": 11650, "preparation": 11651, "bicycle": 11652, "viral": 11653, "turnout": 11654, "brass": 11655, "quad": 11656, "hospitality": 11657, "packaging": 11658, "dency": 11659, "cemetery": 11660, "aboard": 11661, "dreaming": 11662, "picture": 11663, "tall": 11664, "invent": 11665, "admi": 11666, "oe": 11667, "temps": 11668, "quan": 11669, "fundam": 11670, "promp": 11671, "residence": 11672, "mud": 11673, "souri": 11674, "âĦ¢": 11675, "graffiti": 11676, "gif": 11677, "dnd": 11678, "comp": 11679, "swar": 11680, "peeps": 11681, "palestine": 11682, "devils": 11683, "sang": 11684, "assistance": 11685, "bike": 11686, "mississi": 11687, "interviewed": 11688, "nephew": 11689, "drums": 11690, "vand": 11691, "gentlemen": 11692, "nsw": 11693, "insta": 11694, "lebanon": 11695, "eeee": 11696, "olivia": 11697, "very": 11698, "rough": 11699, "industries": 11700, "mation": 11701, "ðŁĺĴ": 11702, "barrel": 11703, "nay": 11704, "pops": 11705, "modern": 11706, "illy": 11707, "arest": 11708, "onents": 11709, "protecting": 11710, "vans": 11711, "eo": 11712, "vikings": 11713, "restaurants": 11714, "reck": 11715, "jackie": 11716, "andrew": 11717, "willing": 11718, "heath": 11719, "citizen": 11720, "discrimin": 11721, "à¹Ī": 11722, "stuart": 11723, "mys": 11724, "hip": 11725, "transp": 11726, "\"?": 11727, "tex": 11728, "sushi": 11729, "ked": 11730, "crossed": 11731, "distur": 11732, "pedia": 11733, "fate": 11734, "somehow": 11735, "moth": 11736, "processing": 11737, "iss": 11738, "rin": 11739, "uts": 11740, "yyc": 11741, "vert": 11742, "lgbt": 11743, "reid": 11744, "onto": 11745, "arabia": 11746, "habitat": 11747, "==": 11748, "streak": 11749, "simpson": 11750, "addiction": 11751, "wimble": 11752, "delivers": 11753, "challenging": 11754, "ðŁİ¶": 11755, "franch": 11756, "edu": 11757, "sme": 11758, "aids": 11759, "hurst": 11760, "tham": 11761, "tarian": 11762, "remembered": 11763, "palestinian": 11764, "fees": 11765, "trum": 11766, "sketch": 11767, "uru": 11768, "fitting": 11769, "jesse": 11770, "ðŁĶ¥ðŁĶ¥": 11771, "--------": 11772, "bach": 11773, "icia": 11774, "colored": 11775, "dah": 11776, "associate": 11777, "intel": 11778, "seller": 11779, "pu": 11780, "stuffed": 11781, "acs": 11782, "bs": 11783, "shin": 11784, "cooperation": 11785, "certificate": 11786, "abu": 11787, "ingredients": 11788, "rev": 11789, "inge": 11790, "elder": 11791, "christian": 11792, "bundle": 11793, "thic": 11794, "dirt": 11795, "beijing": 11796, "commit": 11797, "teddy": 11798, "edu": 11799, "today": 11800, "sfield": 11801, "wyn": 11802, "confirms": 11803, "loo": 11804, "jv": 11805, "eness": 11806, "alpha": 11807, "virus": 11808, "arium": 11809, "grind": 11810, "bridges": 11811, "introduction": 11812, "polls": 11813, "bacter": 11814, "zach": 11815, "terminal": 11816, "raiders": 11817, "flavor": 11818, "zombie": 11819, "vod": 11820, "spreading": 11821, "gameofthrones": 11822, "efficiency": 11823, "lately": 11824, "alem": 11825, "tweet": 11826, "crimes": 11827, "cler": 11828, "dey": 11829, "dged": 11830, "hyun": 11831, "payments": 11832, "circus": 11833, "ðŁĺŃðŁĺŃ": 11834, "missouri": 11835, "lub": 11836, "episodes": 11837, "cage": 11838, "pos": 11839, "matching": 11840, "tumblr": 11841, "lined": 11842, "gest": 11843, "ambi": 11844, "narr": 11845, "ington": 11846, "regul": 11847, "blown": 11848, "isle": 11849, "coco": 11850, "ondon": 11851, "joshua": 11852, "touring": 11853, "sma": 11854, "sausage": 11855, "bestfriend": 11856, "boeing": 11857, "desire": 11858, "savage": 11859, "rapper": 11860, "devo": 11861, "tear": 11862, "takeover": 11863, "cowboys": 11864, "poker": 11865, "parag": 11866, "ppe": 11867, "hint": 11868, "wears": 11869, "seth": 11870, "roles": 11871, "lanc": 11872, "manga": 11873, "format": 11874, "flyer": 11875, "cay": 11876, "moor": 11877, "bake": 11878, "splash": 11879, "vad": 11880, "kerala": 11881, "proceeds": 11882, "silly": 11883, "reflection": 11884, "distr": 11885, "wid": 11886, "suit": 11887, "civic": 11888, "yankees": 11889, "byn": 11890, "migration": 11891, "distin": 11892, "orch": 11893, "femini": 11894, "qualifying": 11895, "turi": 11896, "obe": 11897, "hundred": 11898, "crap": 11899, "wang": 11900, "mathemat": 11901, "bure": 11902, "exposure": 11903, "ferguson": 11904, "semester": 11905, "reserv": 11906, "plym": 11907, "ahu": 11908, "facial": 11909, "wax": 11910, "worried": 11911, "cab": 11912, "vio": 11913, "asa": 11914, "cod": 11915, "topics": 11916, "pcs": 11917, "halo": 11918, "rescued": 11919, "horizon": 11920, "ark": 11921, "âļª": 11922, "holly": 11923, "elf": 11924, "ulti": 11925, "pup": 11926, "qualified": 11927, "attendance": 11928, "atively": 11929, "destroy": 11930, "yc": 11931, "forth": 11932, "photooftheday": 11933, "cents": 11934, "iceland": 11935, "measures": 11936, "desk": 11937, "portfolio": 11938, "articles": 11939, "directors": 11940, "datab": 11941, "ew": 11942, "creepy": 11943, "ounding": 11944, "honoured": 11945, "mist": 11946, "jit": 11947, "mentioned": 11948, "portable": 11949, "itic": 11950, "dann": 11951, "fridayfeeling": 11952, "amid": 11953, "tiger": 11954, "scrip": 11955, "helicopter": 11956, "hardware": 11957, "explor": 11958, "workplace": 11959, "austria": 11960, "beatles": 11961, "bernar": 11962, "spider": 11963, "disco": 11964, "cult": 11965, "limits": 11966, "shortly": 11967, "final": 11968, "ninja": 11969, "luke": 11970, "lebron": 11971, "walmart": 11972, "oil": 11973, "vanilla": 11974, "shire": 11975, "yeg": 11976, "aky": 11977, "cs": 11978, "bler": 11979, "collected": 11980, "tg": 11981, "rolled": 11982, "specials": 11983, "bff": 11984, "pierre": 11985, "shim": 11986, "vier": 11987, "flashback": 11988, "restoration": 11989, "individuals": 11990, "prod": 11991, "freaking": 11992, "turer": 11993, "oa": 11994, "refre": 11995, "moroc": 11996, "greet": 11997, "reyn": 11998, "careful": 11999, "ouring": 12000, "ush": 12001, "isd": 12002, "gill": 12003, "view": 12004, "thunderstorm": 12005, "bled": 12006, "picnic": 12007, "guardi": 12008, "pig": 12009, "ark": 12010, "sylvania": 12011, "banned": 12012, "ucl": 12013, "vijay": 12014, "orium": 12015, "avengers": 12016, "believes": 12017, "eur": 12018, "monument": 12019, "concerned": 12020, "labs": 12021, "berg": 12022, "aap": 12023, "vish": 12024, "singles": 12025, "cancel": 12026, "zel": 12027, "arab": 12028, "ruth": 12029, "tooth": 12030, "arta": 12031, "shaf": 12032, "chairs": 12033, "rack": 12034, "diseases": 12035, "crowd": 12036, "cly": 12037, "flex": 12038, "christma": 12039, "artificial": 12040, "tomat": 12041, "fine": 12042, "draws": 12043, "advocate": 12044, "france": 12045, "ÙĬ": 12046, "ðŁĺ³": 12047, "heavy": 12048, "sour": 12049, "comprehen": 12050, "noble": 12051, "aap": 12052, "hindu": 12053, "coral": 12054, "gars": 12055, "owen": 12056, "nl": 12057, "stall": 12058, "yellow": 12059, "marina": 12060, "inver": 12061, "support": 12062, "tough": 12063, "promises": 12064, "pie": 12065, "masterpiece": 12066, "score": 12067, "force": 12068, "mortg": 12069, "cryptocurrency": 12070, "ox": 12071, "rors": 12072, "rockin": 12073, "provin": 12074, "hog": 12075, "nostal": 12076, "oakland": 12077, "patrick": 12078, "inclusion": 12079, "traffic": 12080, "ahmed": 12081, "aha": 12082, "luxury": 12083, "consecu": 12084, "demon": 12085, "âĸº": 12086, "blowing": 12087, "stag": 12088, ":\"": 12089, "encourage": 12090, "bene": 12091, "skull": 12092, "dodge": 12093, "buster": 12094, "kinson": 12095, "witne": 12096, "error": 12097, "lowest": 12098, "fellow": 12099, "à°": 12100, "shre": 12101, "blur": 12102, "virgin": 12103, "composer": 12104, "slip": 12105, "mornings": 12106, "gains": 12107, "table": 12108, "grain": 12109, "arist": 12110, "brazilian": 12111, "wwe": 12112, "tues": 12113, "ribbon": 12114, "anag": 12115, "dist": 12116, "sacrif": 12117, "embrace": 12118, "entrepreneur": 12119, "affili": 12120, "deo": 12121, "tali": 12122, "tourist": 12123, "fatal": 12124, "ìĬ": 12125, "automatic": 12126, "ðŁĩµ": 12127, "weak": 12128, "welfare": 12129, "confirm": 12130, "benjamin": 12131, "fights": 12132, "alleged": 12133, "mead": 12134, "struggling": 12135, "prosecu": 12136, "chef": 12137, "è": 12138, "proposal": 12139, "ern": 12140, "ðŁĺĦ": 12141, "dyk": 12142, "ongs": 12143, "hong": 12144, "mack": 12145, "melon": 12146, "onent": 12147, "rush": 12148, "dap": 12149, "toler": 12150, "propag": 12151, "cze": 12152, "translation": 12153, "wallet": 12154, "cottage": 12155, "sail": 12156, "constitution": 12157, "ðŁĴĢ": 12158, "munici": 12159, "favor": 12160, "stormhour": 12161, "ih": 12162, "ðŁĺĮ": 12163, "approaching": 12164, "pinned": 12165, "jed": 12166, "nigerian": 12167, "nach": 12168, "shat": 12169, "particularly": 12170, "mcdon": 12171, "cameras": 12172, "annie": 12173, "administr": 12174, "heat": 12175, "electrical": 12176, "charming": 12177, "gibson": 12178, "boutique": 12179, "exposed": 12180, "actor": 12181, "pillow": 12182, "beaches": 12183, "genuine": 12184, "margaret": 12185, "bennett": 12186, "louisi": 12187, "positions": 12188, "ely": 12189, "shiny": 12190, "tention": 12191, "architect": 12192, "rental": 12193, "acqui": 12194, "google": 12195, "subway": 12196, "moment": 12197, "ðŁļ¨": 12198, "rim": 12199, "methods": 12200, "cycli": 12201, "norfolk": 12202, "ÙĪ": 12203, "overwhel": 12204, "rapid": 12205, "wear": 12206, "happybirthday": 12207, "progressive": 12208, "ðŁĴ¥": 12209, "cogn": 12210, "papa": 12211, "fool": 12212, "philosophy": 12213, "polar": 12214, "jimmy": 12215, "wig": 12216, "ðŁĴĭ": 12217, "operating": 12218, "reduction": 12219, "phi": 12220, "flags": 12221, "tothe": 12222, "odi": 12223, "ares": 12224, "koo": 12225, "kang": 12226, "arkansas": 12227, "ashton": 12228, "wimbledon": 12229, "scifi": 12230, "attractive": 12231, "mississippi": 12232, "logists": 12233, "ralph": 12234, "label": 12235, "graduates": 12236, "maha": 12237, "hometown": 12238, "âľĮï¸ı": 12239, "founded": 12240, "onthe": 12241, "liz": 12242, "transl": 12243, "minimum": 12244, "presti": 12245, "tam": 12246, "generations": 12247, "rebel": 12248, "journalists": 12249, "param": 12250, "mcm": 12251, "acrylic": 12252, "deaths": 12253, "tesla": 12254, "wt": 12255, "bryant": 12256, "jerus": 12257, "istanbul": 12258, "muhammad": 12259, "riley": 12260, "kris": 12261, "workshops": 12262, "iso": 12263, "counts": 12264, "stret": 12265, "protected": 12266, "trinity": 12267, "manual": 12268, "rhin": 12269, "ril": 12270, "pleasant": 12271, "lemon": 12272, "nerd": 12273, "harder": 12274, "darren": 12275, "bury": 12276, "rah": 12277, "basis": 12278, "migu": 12279, "occasion": 12280, "lists": 12281, "âĿ¤ï¸ıâĿ¤ï¸ıâĿ¤ï¸ı": 12282, "eb": 12283, "decre": 12284, "hampton": 12285, "ìĿ´": 12286, "travis": 12287, "transform": 12288, "puerto": 12289, "nhl": 12290, "avoc": 12291, "trips": 12292, "unexpected": 12293, "vet": 12294, "didyou": 12295, "barber": 12296, "stages": 12297, "mson": 12298, "represented": 12299, "fort": 12300, "lal": 12301, "pple": 12302, "nicely": 12303, "ignore": 12304, "quil": 12305, "quinn": 12306, "hk": 12307, "carrier": 12308, "reminded": 12309, "among": 12310, "passenger": 12311, "ellen": 12312, "guez": 12313, "scape": 12314, "mural": 12315, "youngest": 12316, "mash": 12317, "dill": 12318, "routine": 12319, "stainless": 12320, "jackson": 12321, "gandhi": 12322, "thal": 12323, "oners": 12324, "editorial": 12325, "conversations": 12326, "sdale": 12327, "automation": 12328, "ike": 12329, "าà¸": 12330, "ðŁĩª": 12331, "haul": 12332, "laying": 12333, "mentions": 12334, "amen": 12335, "abortion": 12336, "ibi": 12337, "counties": 12338, "catherine": 12339, "mands": 12340, "jame": 12341, "roller": 12342, "aut": 12343, "nam": 12344, "ological": 12345, "ception": 12346, "ranking": 12347, "toxic": 12348, "snacks": 12349, "victorian": 12350, "bangkok": 12351, "psychology": 12352, "reg": 12353, "angela": 12354, "respond": 12355, "style": 12356, "sophie": 12357, "dakota": 12358, "achieved": 12359, "marked": 12360, "imperial": 12361, "inas": 12362, "gloves": 12363, "slim": 12364, "confident": 12365, "attacked": 12366, "gger": 12367, "lonely": 12368, "valentinesday": 12369, "reb": 12370, "craftbeer": 12371, "origin": 12372, "zimbab": 12373, "ceiling": 12374, "teens": 12375, "otherwise": 12376, "wb": 12377, "fers": 12378, "daysof": 12379, "advisor": 12380, "yah": 12381, "âĻª": 12382, "ender": 12383, "republicans": 12384, "ava": 12385, "skirt": 12386, "pipel": 12387, "chie": 12388, "jane": 12389, "jax": 12390, "ðŁĺĭ": 12391, "âľĬ": 12392, "jays": 12393, "brett": 12394, "balo": 12395, "crucial": 12396, "dhar": 12397, "asis": 12398, "deau": 12399, "lloyd": 12400, "chatting": 12401, "âĿĦï¸ı": 12402, "relay": 12403, "remarkable": 12404, "ns": 12405, "wet": 12406, "brisbane": 12407, "ðŁĶ´": 12408, "tionally": 12409, "fk": 12410, "layer": 12411, "household": 12412, "consecutive": 12413, "esis": 12414, "pendant": 12415, "stir": 12416, "critic": 12417, "sugar": 12418, "photoshop": 12419, "pares": 12420, "artistic": 12421, "dodgers": 12422, "cun": 12423, "crafted": 12424, "amend": 12425, "boat": 12426, "âŃIJï¸ı": 12427, "egyptian": 12428, "saw": 12429, "trage": 12430, "smaller": 12431, "oxy": 12432, "paired": 12433, "next": 12434, "ires": 12435, "taco": 12436, "oy": 12437, "uc": 12438, "sti": 12439, "aerial": 12440, "://": 12441, "dro": 12442, "dotcom": 12443, "ggins": 12444, "rpg": 12445, "aye": 12446, "lean": 12447, "striker": 12448, "lobby": 12449, "protests": 12450, "priority": 12451, "congress": 12452, "amate": 12453, "invit": 12454, "rington": 12455, "mommy": 12456, "thus": 12457, "allowing": 12458, "pioneer": 12459, "enforcement": 12460, "gori": 12461, "talk": 12462, "drag": 12463, "dumb": 12464, "bullet": 12465, "sange": 12466, "ery": 12467, "targets": 12468, "ðŁĩ¦": 12469, "heather": 12470, "consider": 12471, "seafood": 12472, "vest": 12473, "risks": 12474, "%.": 12475, "pg": 12476, "sacred": 12477, "heating": 12478, "kicked": 12479, "ttot": 12480, ".-": 12481, "chandi": 12482, "coven": 12483, "pool": 12484, "pulse": 12485, "ia": 12486, "roster": 12487, "shakespeare": 12488, "esa": 12489, "cargo": 12490, "peanut": 12491, "troop": 12492, "action": 12493, "tablet": 12494, "homework": 12495, "castle": 12496, "struction": 12497, "musicians": 12498, "freezing": 12499, "butt": 12500, "justinbieber": 12501, "jj": 12502, "bahrain": 12503, "anthem": 12504, "audit": 12505, "didyouknow": 12506, "navig": 12507, "guidance": 12508, "âĸ¶": 12509, "turf": 12510, "nun": 12511, "fications": 12512, "yemen": 12513, "charging": 12514, "xc": 12515, "broncos": 12516, "subur": 12517, "pale": 12518, "boring": 12519, "amongst": 12520, "forthe": 12521, "emper": 12522, "omfg": 12523, "pj": 12524, "expecting": 12525, "ðŁĴ«": 12526, "stl": 12527, "admin": 12528, "expectations": 12529, "swan": 12530, "shoot": 12531, "ooooo": 12532, "minent": 12533, "ãĢIJ": 12534, "wallace": 12535, "stang": 12536, "saturday": 12537, "adopted": 12538, "doubles": 12539, "homie": 12540, "omez": 12541, "dhan": 12542, "venture": 12543, "surrounding": 12544, "file": 12545, "mobility": 12546, "dees": 12547, "wski": 12548, "brooke": 12549, "embro": 12550, "remembers": 12551, "kara": 12552, "testim": 12553, "botan": 12554, "mtv": 12555, "sacrifice": 12556, "jerusalem": 12557, "dl": 12558, "´": 12559, "properly": 12560, "ilion": 12561, "asi": 12562, "legit": 12563, "cope": 12564, "mcla": 12565, "recycling": 12566, "larger": 12567, "ðŁĴĵ": 12568, "patric": 12569, "generous": 12570, "jared": 12571, "pf": 12572, "molly": 12573, "thomas": 12574, "judges": 12575, "hb": 12576, "sorts": 12577, "blvd": 12578, "oven": 12579, "entering": 12580, "planes": 12581, "beet": 12582, "integration": 12583, "booked": 12584, "freed": 12585, "vern": 12586, "ashes": 12587, "topped": 12588, "depot": 12589, "welcomed": 12590, "rena": 12591, "mick": 12592, "dand": 12593, "seeks": 12594, "gamer": 12595, "rankings": 12596, "rene": 12597, "mut": 12598, "whisky": 12599, "firefighters": 12600, "gues": 12601, "gather": 12602, "tourney": 12603, "demen": 12604, "yang": 12605, "newton": 12606, "automotive": 12607, "backyard": 12608, "detailed": 12609, "mist": 12610, "tobac": 12611, "fiber": 12612, "unusual": 12613, "gratitude": 12614, "spare": 12615, "neys": 12616, ":*": 12617, "peri": 12618, "floating": 12619, "finalist": 12620, "donating": 12621, "dress": 12622, "broad": 12623, "bethe": 12624, "economics": 12625, "taiwan": 12626, "edwards": 12627, "plug": 12628, "prairi": 12629, "valen": 12630, "baba": 12631, "fad": 12632, "anas": 12633, "harper": 12634, "disorder": 12635, "applied": 12636, "patt": 12637, "bikin": 12638, "liver": 12639, "curi": 12640, "caroline": 12641, "anner": 12642, "julian": 12643, "walking": 12644, "malcol": 12645, "screenshot": 12646, "coding": 12647, "skincare": 12648, "activists": 12649, "mysterious": 12650, "exact": 12651, "blocking": 12652, "mercury": 12653, "batter": 12654, "dump": 12655, "âľĮ": 12656, "ense": 12657, "lish": 12658, "ridiculous": 12659, "protesters": 12660, "ðŁĻĪ": 12661, "lust": 12662, "sweat": 12663, "ass": 12664, "alike": 12665, "cody": 12666, "rements": 12667, "winds": 12668, "aspir": 12669, "vienna": 12670, "pray": 12671, "...@": 12672, "boi": 12673, "candle": 12674, "assists": 12675, "tee": 12676, "derson": 12677, "pony": 12678, "fence": 12679, "conspir": 12680, "âĺħâĺħ": 12681, "ooth": 12682, "epic": 12683, "barely": 12684, "aunt": 12685, "bam": 12686, "diamonds": 12687, "endless": 12688, "screens": 12689, "cancer": 12690, "gro": 12691, "pst": 12692, "prospec": 12693, "mosque": 12694, "helpful": 12695, "ouri": 12696, "brother": 12697, "gujar": 12698, "cristi": 12699, "inez": 12700, "towers": 12701, "addresses": 12702, "gray": 12703, "burton": 12704, "retweeted": 12705, "ð٤Ķ": 12706, "nity": 12707, "duck": 12708, "supervis": 12709, "joan": 12710, "kinder": 12711, "sanctu": 12712, "pied": 12713, "âı°": 12714, "łï¸ı": 12715, "mati": 12716, "revenge": 12717, "cester": 12718, "elife": 12719, "designers": 12720, "backed": 12721, "boli": 12722, "weight": 12723, "couch": 12724, "sures": 12725, "sits": 12726, "shrimp": 12727, "lagos": 12728, "authorities": 12729, "osity": 12730, "holly": 12731, "computing": 12732, "factors": 12733, "abe": 12734, "panels": 12735, "ramad": 12736, "sentence": 12737, "mission": 12738, "holm": 12739, "rb": 12740, "dads": 12741, "shanghai": 12742, "money": 12743, "sheets": 12744, "skate": 12745, "threw": 12746, "cupcakes": 12747, "infinite": 12748, "lis": 12749, "practicing": 12750, "essay": 12751, "kai": 12752, "asci": 12753, "mob": 12754, "ugh": 12755, "holmes": 12756, "regg": 12757, "ikh": 12758, "mock": 12759, "collections": 12760, "pep": 12761, "ova": 12762, "salt": 12763, "nandez": 12764, "coy": 12765, "threats": 12766, "texts": 12767, "cinnam": 12768, "pregnancy": 12769, "pending": 12770, "stamp": 12771, "flower": 12772, "gis": 12773, "agreed": 12774, "payne": 12775, "rover": 12776, "phra": 12777, "soft": 12778, "ffin": 12779, "fathers": 12780, "passengers": 12781, "aways": 12782, "ala": 12783, "hes": 12784, "livan": 12785, "ins": 12786, "samuel": 12787, "ingui": 12788, "hof": 12789, "jj": 12790, "chennai": 12791, "catal": 12792, "omic": 12793, "heath": 12794, "niece": 12795, "pumped": 12796, "integrated": 12797, "arel": 12798, "nom": 12799, "productivity": 12800, "wanting": 12801, "visa": 12802, "diana": 12803, "twil": 12804, "itv": 12805, "camps": 12806, "rowing": 12807, "dley": 12808, "blackand": 12809, "guards": 12810, "bells": 12811, "reverse": 12812, "vibe": 12813, "ricky": 12814, "moss": 12815, "nyt": 12816, "âĺĢï¸ı": 12817, "elle": 12818, "troy": 12819, "cudd": 12820, "evan": 12821, "womens": 12822, "foto": 12823, "mistakes": 12824, "wicked": 12825, "mil": 12826, "cled": 12827, "memes": 12828, "cosmo": 12829, "scholar": 12830, "reno": 12831, "ðŁĺĢ": 12832, "vents": 12833, "#â̦": 12834, "terrorists": 12835, "casey": 12836, "cardinals": 12837, "ðŁĺĬðŁĺĬ": 12838, "venezuela": 12839, "bola": 12840, "literacy": 12841, "tw": 12842, "eno": 12843, "contains": 12844, "austin": 12845, "financi": 12846, "evan": 12847, "harvard": 12848, "originally": 12849, "chevro": 12850, "herald": 12851, "nottingham": 12852, "managers": 12853, "âŀ¡": 12854, "accepting": 12855, "walsh": 12856, "tutorial": 12857, "entrepreneurship": 12858, "yacht": 12859, "requirements": 12860, "glenn": 12861, "pede": 12862, "unfortunately": 12863, "aching": 12864, "daisy": 12865, "gian": 12866, "nightmare": 12867, "âĿĹ": 12868, "rina": 12869, "bart": 12870, "emails": 12871, "opposite": 12872, "whom": 12873, "sake": 12874, "puzzle": 12875, "dashi": 12876, "party": 12877, "blanket": 12878, "buses": 12879, "lore": 12880, "beauty": 12881, "reason": 12882, "punjab": 12883, "windsor": 12884, "functional": 12885, "existing": 12886, "hello": 12887, "glimp": 12888, "convin": 12889, "lak": 12890, "screaming": 12891, "rebecca": 12892, "bliss": 12893, "northwest": 12894, "infinity": 12895, "cosmetics": 12896, "pulling": 12897, "coffee": 12898, "pling": 12899, "opho": 12900, "colombia": 12901, "interiordesign": 12902, "(+": 12903, "emotions": 12904, "sac": 12905, "sunglasses": 12906, "saves": 12907, "df": 12908, "sixth": 12909, "aly": 12910, "ðŁĺ»": 12911, "deen": 12912, "devast": 12913, "politicians": 12914, "lacrosse": 12915, "gu": 12916, "pei": 12917, "java": 12918, "combine": 12919, "coalition": 12920, "erts": 12921, "surviv": 12922, "chad": 12923, "strian": 12924, "nn": 12925, "devi": 12926, "counc": 12927, "concern": 12928, "controller": 12929, "breast": 12930, "jury": 12931, "tum": 12932, "introduces": 12933, "ladi": 12934, "mobile": 12935, "alz": 12936, "steady": 12937, "nurses": 12938, "hacking": 12939, "online": 12940, "ocean": 12941, "ðŁİĦ": 12942, "aam": 12943, "juven": 12944, "icc": 12945, "louisiana": 12946, "arte": 12947, "streetart": 12948, "ison": 12949, "wns": 12950, "frm": 12951, "panda": 12952, "noir": 12953, "maintain": 12954, "delay": 12955, "symptoms": 12956, "thorn": 12957, "geome": 12958, "tern": 12959, "carried": 12960, "pru": 12961, "panor": 12962, "assy": 12963, "peru": 12964, "cloud": 12965, "spra": 12966, "pedi": 12967, "este": 12968, "tagged": 12969, "ðŁĺĿ": 12970, "shadows": 12971, "nazi": 12972, "اÙĦ": 12973, "corri": 12974, "âĻ¥âĻ¥": 12975, "jad": 12976, "ðŁĩ«": 12977, "formal": 12978, "spoken": 12979, "ðŁĮŀ": 12980, "enjoy": 12981, "lopez": 12982, "outlook": 12983, "inho": 12984, "wander": 12985, "Ùħ": 12986, "maya": 12987, "pee": 12988, "dine": 12989, "ãĢij": 12990, "briefing": 12991, "supporter": 12992, "arily": 12993, "ghters": 12994, "naturally": 12995, "doctorwho": 12996, "jen": 12997, "var": 12998, "newyear": 12999, "rese": 13000, "simm": 13001, "rex": 13002, "consequ": 13003, "tomatoes": 13004, "burst": 13005, "bravo": 13006, "burgers": 13007, "cracking": 13008, "northeast": 13009, "biom": 13010, "mushroom": 13011, "marque": 13012, "double": 13013, "nier": 13014, "vag": 13015, "twenty": 13016, "keyboard": 13017, "winni": 13018, "jamaica": 13019, "parish": 13020, ":-": 13021, "mentalhealth": 13022, "alizing": 13023, "render": 13024, "waking": 13025, "ðŁİĤ": 13026, "gly": 13027, "nathan": 13028, "washing": 13029, "melissa": 13030, "jung": 13031, "loyal": 13032, "chili": 13033, "songwriter": 13034, "guitarist": 13035, "bowie": 13036, "neighbors": 13037, "onymous": 13038, "asset": 13039, "tai": 13040, "headquarters": 13041, "ðŁĮĪ": 13042, "ihear": 13043, "cigare": 13044, "surg": 13045, ")\"": 13046, "repl": 13047, "darling": 13048, "ðŁĻĦ": 13049, "zak": 13050, "sare": 13051, "ãħĭ": 13052, "mickey": 13053, "warehouse": 13054, "massage": 13055, "inees": 13056, "didnt": 13057, "iw": 13058, "hurts": 13059, "engaging": 13060, "magic": 13061, "womenin": 13062, "kitten": 13063, "mors": 13064, "cart": 13065, "titans": 13066, "colleague": 13067, "competing": 13068, "eran": 13069, "khal": 13070, "marble": 13071, "demand": 13072, "delight": 13073, "etary": 13074, "blizz": 13075, "louise": 13076, "mls": 13077, "finishes": 13078, "experiment": 13079, "conducted": 13080, "electronics": 13081, "itters": 13082, "caring": 13083, "whats": 13084, "symbol": 13085, "jung": 13086, "ecu": 13087, "pix": 13088, "context": 13089, "charger": 13090, "ðŁĺĩ": 13091, "reig": 13092, "frag": 13093, "ëĭ": 13094, "chad": 13095, "true": 13096, "kerry": 13097, "defending": 13098, "aint": 13099, "auton": 13100, "checkout": 13101, "barnes": 13102, "lessly": 13103, "dt": 13104, "mme": 13105, "cloudy": 13106, "secondary": 13107, "arez": 13108, "_:": 13109, "appa": 13110, "constant": 13111, "\")": 13112, "vets": 13113, "job": 13114, "ient": 13115, "ðŁĺŃðŁĺŃðŁĺŃ": 13116, "mj": 13117, "french": 13118, "diver": 13119, "davies": 13120, "hhhh": 13121, "ebook": 13122, "à¹ī": 13123, "mariti": 13124, "breeze": 13125, "suspended": 13126, "mato": 13127, "viet": 13128, "rahu": 13129, "sei": 13130, "bolt": 13131, "enary": 13132, "leis": 13133, "karl": 13134, "framed": 13135, "explaining": 13136, "abc": 13137, "dealing": 13138, "nato": 13139, "jake": 13140, "expand": 13141, "leonard": 13142, "established": 13143, "dub": 13144, "armen": 13145, "elled": 13146, "vocal": 13147, "nicholas": 13148, "orient": 13149, "kyo": 13150, "illustrated": 13151, "ahh": 13152, "dancers": 13153, "million": 13154, "geta": 13155, "popp": 13156, "asu": 13157, "murdered": 13158, "gible": 13159, "stoked": 13160, "griffin": 13161, "maximum": 13162, "adrian": 13163, "encounter": 13164, "thero": 13165, "davidson": 13166, "ðŁį»": 13167, "holiday": 13168, "evo": 13169, "assets": 13170, "carson": 13171, "memorable": 13172, "âļ½": 13173, "obam": 13174, "representative": 13175, "cbd": 13176, "tricks": 13177, "vogue": 13178, "voice": 13179, "mmmm": 13180, "sebastian": 13181, "clif": 13182, "athy": 13183, "paralle": 13184, "ðŁ¤·": 13185, "pak": 13186, "evacu": 13187, "eats": 13188, "اØ": 13189, "touched": 13190, "organised": 13191, "spirits": 13192, "canad": 13193, "guided": 13194, "framework": 13195, "ðŁĮŁ": 13196, "ped": 13197, "natural": 13198, "agar": 13199, "replaced": 13200, "anchor": 13201, "tit": 13202, "shah": 13203, "organis": 13204, "superior": 13205, "rn": 13206, "chro": 13207, "erica": 13208, "still": 13209, "coron": 13210, "chuck": 13211, "locks": 13212, "organ": 13213, "rosen": 13214, "scam": 13215, "bened": 13216, "/#": 13217, "keen": 13218, "trevor": 13219, "vampire": 13220, "sorted": 13221, "!'": 13222, "afford": 13223, "intro": 13224, "grace": 13225, "ðŁĺľ": 13226, "saur": 13227, "kickstarter": 13228, "influen": 13229, "vu": 13230, "yup": 13231, "poc": 13232, "ðŁİ¥": 13233, "aar": 13234, "sang": 13235, "trek": 13236, "etsy": 13237, "tbh": 13238, "scream": 13239, "chevrolet": 13240, "pixel": 13241, "shepherd": 13242, "anor": 13243, "gabriel": 13244, "twood": 13245, "sdcc": 13246, "meters": 13247, "developers": 13248, "closure": 13249, "vw": 13250, "twitch": 13251, "ìĹ": 13252, "seoul": 13253, "price": 13254, "hog": 13255, "nish": 13256, "hillary": 13257, "scratch": 13258, "incen": 13259, "wagon": 13260, "disability": 13261, "panther": 13262, "chats": 13263, "gd": 13264, "witz": 13265, "sussex": 13266, "late": 13267, "denmark": 13268, "gerald": 13269, "cancelled": 13270, "nette": 13271, "ix": 13272, "naval": 13273, "baptist": 13274, "tet": 13275, "yad": 13276, "math": 13277, "hoy": 13278, "randy": 13279, "point": 13280, "intellec": 13281, "fruits": 13282, "wool": 13283, "guin": 13284, "pron": 13285, "theft": 13286, "condem": 13287, "marry": 13288, "nola": 13289, "architects": 13290, "cincin": 13291, "rockets": 13292, "gentleman": 13293, "explan": 13294, "tate": 13295, "doe": 13296, "raises": 13297, "wildlife": 13298, "wl": 13299, "insider": 13300, "blanc": 13301, "wp": 13302, "forsale": 13303, "nyc": 13304, "powell": 13305, "unbelievable": 13306, "pens": 13307, "goodies": 13308, "mustang": 13309, "pens": 13310, "stays": 13311, "squash": 13312, "xoxo": 13313, "nearby": 13314, "everton": 13315, "coco": 13316, "leagu": 13317, "khan": 13318, "stud": 13319, "southwest": 13320, "construc": 13321, "sworth": 13322, "croatia": 13323, "lea": 13324, "sums": 13325, "aims": 13326, "ean": 13327, "vaness": 13328, "itious": 13329, "pathy": 13330, "arcade": 13331, "bend": 13332, "suggests": 13333, "sacram": 13334, "royals": 13335, "rier": 13336, "emir": 13337, "incl": 13338, "ank": 13339, "clark": 13340, "right": 13341, "vacc": 13342, "ा": 13343, "tane": 13344, "lib": 13345, "usc": 13346, "sales": 13347, "huh": 13348, "sally": 13349, "vera": 13350, "pga": 13351, "grows": 13352, "drum": 13353, "tree": 13354, "ethics": 13355, "suggest": 13356, "isab": 13357, "sealed": 13358, "previously": 13359, "animated": 13360, "abdu": 13361, "rises": 13362, "glob": 13363, "predat": 13364, "scarf": 13365, "delic": 13366, "omar": 13367, "lli": 13368, "sxsw": 13369, "python": 13370, "nebra": 13371, "funk": 13372, "reflect": 13373, "pavilion": 13374, "tically": 13375, "chasing": 13376, "bakery": 13377, "invasion": 13378, "koh": 13379, "believed": 13380, "cohen": 13381, "conqu": 13382, "crafts": 13383, "nati": 13384, "clever": 13385, "governance": 13386, "samples": 13387, "fails": 13388, "âĶ": 13389, "timo": 13390, "ritu": 13391, "striking": 13392, "inclusive": 13393, "shocking": 13394, "cant": 13395, "requires": 13396, "drawings": 13397, "à¸Ń": 13398, "purchased": 13399, "dum": 13400, "zach": 13401, "warner": 13402, "console": 13403, "mansion": 13404, "fountain": 13405, "circum": 13406, "esh": 13407, "island": 13408, "milk": 13409, "profits": 13410, "halifax": 13411, "rival": 13412, "âľĪï¸ı": 13413, "jenny": 13414, "sandra": 13415, "nye": 13416, "kelly": 13417, "yal": 13418, "quad": 13419, "nos": 13420, "instein": 13421, "finalists": 13422, "midfielder": 13423, "cue": 13424, "exceptional": 13425, "aan": 13426, "sapp": 13427, "gettin": 13428, "saa": 13429, "fati": 13430, "slice": 13431, "volk": 13432, "swal": 13433, "lasting": 13434, "summary": 13435, "itas": 13436, "smo": 13437, "sz": 13438, "âĺĨ": 13439, "ipl": 13440, "flames": 13441, "enews": 13442, "hav": 13443, "hoodie": 13444, "pitcher": 13445, "windy": 13446, "revol": 13447, "central": 13448, "tonite": 13449, "ðŁİīðŁİī": 13450, "solved": 13451, "milwau": 13452, "organizations": 13453, "weets": 13454, "refin": 13455, "sth": 13456, "ãĥ¼": 13457, "elin": 13458, "tona": 13459, "cinnamon": 13460, "ðŁİ¨": 13461, "ðŁİģ": 13462, "ronaldo": 13463, "peninsu": 13464, "omega": 13465, "elds": 13466, "designing": 13467, "eigh": 13468, "bluet": 13469, "benz": 13470, "nug": 13471, "asha": 13472, "robots": 13473, "sudan": 13474, "choosing": 13475, "endo": 13476, "serge": 13477, "closely": 13478, "handy": 13479, "finger": 13480, "being": 13481, "arte": 13482, "survived": 13483, "flame": 13484, "milestone": 13485, "gut": 13486, "dwar": 13487, "futures": 13488, "ée": 13489, "elo": 13490, "fridge": 13491, "elic": 13492, "ouch": 13493, "ub": 13494, "pv": 13495, "titan": 13496, "collar": 13497, "station": 13498, "nevada": 13499, "aurora": 13500, "rd": 13501, "duncan": 13502, "âģł": 13503, "brien": 13504, "marsh": 13505, "о": 13506, "total": 13507, "chry": 13508, "sers": 13509, "suffe": 13510, "rachel": 13511, "college": 13512, "todays": 13513, "courts": 13514, "chit": 13515, "reunited": 13516, "gymna": 13517, "genesis": 13518, "beside": 13519, "representation": 13520, "chant": 13521, "collector": 13522, "rak": 13523, "athens": 13524, "nigh": 13525, "munich": 13526, "languages": 13527, "flu": 13528, "participation": 13529, "___": 13530, "cv": 13531, "spectrum": 13532, "soda": 13533, "cover": 13534, "referen": 13535, "abbo": 13536, "apa": 13537, "publication": 13538, "edm": 13539, "monica": 13540, "army": 13541, "ðŁļĢ": 13542, "divor": 13543, "dry": 13544, "streams": 13545, "robotics": 13546, "cider": 13547, "bullying": 13548, "approval": 13549, "stoke": 13550, "platforms": 13551, "sierra": 13552, "extin": 13553, "ib": 13554, "hayes": 13555, "succeed": 13556, "suffer": 13557, "atically": 13558, "dai": 13559, "lynch": 13560, "hound": 13561, "delines": 13562, "acknow": 13563, "dated": 13564, "exclusively": 13565, "heres": 13566, "facilit": 13567, "damaged": 13568, "charter": 13569, "lakers": 13570, "falcon": 13571, "unveiled": 13572, "welove": 13573, "ease": 13574, "patience": 13575, "lone": 13576, "gentle": 13577, "genetic": 13578, "producing": 13579, "gour": 13580, "shannon": 13581, "bilities": 13582, "zimbabwe": 13583, "pint": 13584, "daughters": 13585, "literary": 13586, "belle": 13587, "clam": 13588, "surrounded": 13589, "kany": 13590, "neil": 13591, "pirate": 13592, "ranger": 13593, "hbd": 13594, "natalie": 13595, "belong": 13596, "olympi": 13597, "embassy": 13598, "scol": 13599, "ener": 13600, "akin": 13601, "loren": 13602, "bh": 13603, ":/": 13604, "diva": 13605, "denim": 13606, "hipp": 13607, "ðŁĩµðŁĩ": 13608, "arnold": 13609, "?'": 13610, "weren": 13611, "empower": 13612, "disabled": 13613, "manor": 13614, "raspberry": 13615, "baf": 13616, "awful": 13617, "drummer": 13618, "kardashi": 13619, "nash": 13620, "machinelearning": 13621, "chu": 13622, "rebels": 13623, "timing": 13624, "monroe": 13625, "tongue": 13626, "range": 13627, "pupils": 13628, "ress": 13629, "amazon": 13630, "bz": 13631, "harley": 13632, "palmer": 13633, "balloon": 13634, "sings": 13635, "icec": 13636, "jb": 13637, "cers": 13638, "gps": 13639, "whist": 13640, "rise": 13641, "lt": 13642, "oooo": 13643, "cattle": 13644, "shooter": 13645, "vodka": 13646, "ucl": 13647, "mtg": 13648, "lesli": 13649, "jonas": 13650, "dispo": 13651, "atric": 13652, "stein": 13653, "vintage": 13654, "firms": 13655, "floyd": 13656, "cowboy": 13657, "soooo": 13658, "isaac": 13659, "warcraft": 13660, "disneyland": 13661, "beautiful": 13662, "beam": 13663, "franchise": 13664, "bun": 13665, "kag": 13666, "anon": 13667, "turbo": 13668, "sweep": 13669, "madein": 13670, "karachi": 13671, "detective": 13672, "pennsylvania": 13673, "controversi": 13674, "vitamin": 13675, "aside": 13676, "chronic": 13677, "describes": 13678, "removal": 13679, "hah": 13680, "aper": 13681, "tened": 13682, "uto": 13683, "badly": 13684, "mirac": 13685, "fry": 13686, "yea": 13687, "injec": 13688, "thermal": 13689, "compact": 13690, "thor": 13691, "teed": 13692, "urgent": 13693, "lite": 13694, "gilli": 13695, "sophom": 13696, "ico": 13697, "chem": 13698, "pm": 13699, "fork": 13700, "freak": 13701, "chak": 13702, "recipient": 13703, "iy": 13704, "nik": 13705, "modeling": 13706, "cans": 13707, "ðŁıĢ": 13708, "delux": 13709, "seam": 13710, "survivors": 13711, "radical": 13712, "investigating": 13713, "reliable": 13714, "fm": 13715, "turt": 13716, "lighthouse": 13717, "tool": 13718, "gown": 13719, "))": 13720, "bots": 13721, "autograph": 13722, "aid": 13723, "buffe": 13724, "hmm": 13725, "horrible": 13726, "ssional": 13727, "anni": 13728, "à¹Ģ": 13729, "kits": 13730, "schi": 13731, "eternal": 13732, "huss": 13733, "sensitive": 13734, "ru": 13735, "tastes": 13736, "checks": 13737, "imo": 13738, "portion": 13739, "skate": 13740, "eden": 13741, "halftime": 13742, "fried": 13743, "rihanna": 13744, "tise": 13745, "flick": 13746, "cain": 13747, "sgt": 13748, "âľĶ": 13749, "shau": 13750, "stained": 13751, "raffle": 13752, "drove": 13753, "salman": 13754, "principles": 13755, "sho": 13756, "aru": 13757, "jess": 13758, "guine": 13759, "garbage": 13760, "myan": 13761, "jelly": 13762, "disru": 13763, "zia": 13764, "qld": 13765, "entries": 13766, "lav": 13767, "flew": 13768, "admit": 13769, "objects": 13770, "compare": 13771, "nytimes": 13772, "cannes": 13773, "pn": 13774, "suffol": 13775, "roc": 13776, "dana": 13777, "egg": 13778, "hist": 13779, "counsel": 13780, "'!": 13781, "physi": 13782, "imagination": 13783, "adjust": 13784, "explosion": 13785, "plymouth": 13786, "horror": 13787, "elliott": 13788, "bourne": 13789, "dex": 13790, "breed": 13791, "audio": 13792, "lobster": 13793, "disappointed": 13794, "nationwide": 13795, "((": 13796, "increases": 13797, "australi": 13798, "cedar": 13799, "staring": 13800, "racial": 13801, "eis": 13802, "gmt": 13803, "visions": 13804, "stayed": 13805, "discussions": 13806, "dean": 13807, "curtis": 13808, "maiden": 13809, "stellar": 13810, "happiest": 13811, "hwy": 13812, "preseason": 13813, "carav": 13814, "mondays": 13815, "hospitals": 13816, "glimpse": 13817, "scholars": 13818, "jai": 13819, "terrace": 13820, "anna": 13821, "goose": 13822, "graded": 13823, "lotus": 13824, "hung": 13825, "grocery": 13826, "stamps": 13827, "emperor": 13828, "scoop": 13829, "inser": 13830, "cas": 13831, "existence": 13832, "heal": 13833, "falcons": 13834, "marvel": 13835, "reducing": 13836, "terrific": 13837, "magnetic": 13838, "performs": 13839, "barre": 13840, "pus": 13841, "treating": 13842, "icon": 13843, "wh": 13844, "declared": 13845, "trauma": 13846, "dod": 13847, "comedian": 13848, "nikon": 13849, "bugs": 13850, "asm": 13851, "montgom": 13852, "ibiza": 13853, "comprehensive": 13854, "has": 13855, "santi": 13856, "fellowship": 13857, "dash": 13858, "psal": 13859, "louisville": 13860, "spy": 13861, "fault": 13862, "dthe": 13863, "filed": 13864, "vista": 13865, "desc": 13866, "fears": 13867, "youtu": 13868, "sps": 13869, "esp": 13870, "rig": 13871, "crime": 13872, "berger": 13873, "wonderland": 13874, "kent": 13875, "informed": 13876, "stevens": 13877, "myth": 13878, "aston": 13879, "iri": 13880, "visitor": 13881, "atri": 13882, "producers": 13883, "alla": 13884, "personally": 13885, "separate": 13886, "agencies": 13887, "afri": 13888, "ilan": 13889, "spoke": 13890, "nina": 13891, "squad": 13892, "dives": 13893, "depend": 13894, "liv": 13895, "fierce": 13896, "entertaining": 13897, "chain": 13898, "scat": 13899, "borders": 13900, "palette": 13901, "spro": 13902, "osis": 13903, "derby": 13904, "tobacco": 13905, "zio": 13906, "willie": 13907, "juvent": 13908, "zoom": 13909, "holy": 13910, "entirely": 13911, "afe": 13912, "martinez": 13913, "beds": 13914, "pea": 13915, "bulldogs": 13916, "ðŁĩªðŁĩ": 13917, "ibm": 13918, "neon": 13919, "ethiopia": 13920, "teammates": 13921, "planting": 13922, "twer": 13923, "anytime": 13924, "forbes": 13925, "ón": 13926, "runway": 13927, "nervous": 13928, "roger": 13929, "pile": 13930, "chanc": 13931, "apocaly": 13932, "uw": 13933, "oi": 13934, "drought": 13935, "territory": 13936, "brick": 13937, "creatures": 13938, "goin": 13939, "waff": 13940, "gren": 13941, "southeast": 13942, "jean": 13943, "ambul": 13944, "edited": 13945, "strap": 13946, "cv": 13947, "aaron": 13948, "ãĥ»ãĥ»": 13949, "tsu": 13950, "description": 13951, "kindly": 13952, "clutch": 13953, "immer": 13954, "enor": 13955, "womensday": 13956, "orange": 13957, "rag": 13958, "obvious": 13959, "hyder": 13960, "channels": 13961, "mango": 13962, "meyer": 13963, "raining": 13964, "getty": 13965, "pilgri": 13966, "coordinator": 13967, "upload": 13968, "nintendo": 13969, "donuts": 13970, "sanchez": 13971, "apparel": 13972, "jr": 13973, "zzi": 13974, ",@": 13975, "jefferson": 13976, "accessible": 13977, "greatly": 13978, "eid": 13979, "initial": 13980, "buddha": 13981, "paris": 13982, "mascot": 13983, "â¬ĩï¸ı": 13984, "schwar": 13985, "siri": 13986, "spinning": 13987, "mortgage": 13988, "echo": 13989, "endange": 13990, "gedly": 13991, "chloe": 13992, "enhance": 13993, "karnat": 13994, "kry": 13995, "explores": 13996, "ðŁĴģ": 13997, "affair": 13998, "icals": 13999, "alla": 14000, "dart": 14001, "dolphins": 14002, "differences": 14003, "squirrel": 14004, "augh": 14005, "drones": 14006, "ellen": 14007, "restore": 14008, "paw": 14009, "unfor": 14010, "pike": 14011, "hilton": 14012, "collab": 14013, "consumers": 14014, "coinci": 14015, "outcomes": 14016, "ppp": 14017, "aq": 14018, "coupon": 14019, "liest": 14020, "sims": 14021, "kho": 14022, "aves": 14023, "spoon": 14024, "pudding": 14025, "corbyn": 14026, "haters": 14027, "exams": 14028, "slave": 14029, ".!": 14030, "psa": 14031, "apples": 14032, "tamil": 14033, "sed": 14034, "coke": 14035, "zzo": 14036, "losange": 14037, "carbon": 14038, "clair": 14039, "...)": 14040, "khu": 14041, "craig": 14042, "exploration": 14043, "sanctuary": 14044, "sue": 14045, "alway": 14046, "dementia": 14047, "wonders": 14048, "superhero": 14049, "pakistani": 14050, "browns": 14051, "bluetooth": 14052, "locker": 14053, "marc": 14054, "eventu": 14055, "deluxe": 14056, "rodriguez": 14057, "âĿ¤âĿ¤": 14058, "robb": 14059, "ðŁĴ¦": 14060, "linux": 14061, "tens": 14062, "intelligent": 14063, "seed": 14064, "voter": 14065, "sler": 14066, "peaks": 14067, "intern": 14068, "teenage": 14069, "peninsula": 14070, "handling": 14071, "tie": 14072, "cousins": 14073, "wendy": 14074, "mee": 14075, "à¹Ģà¸": 14076, "dino": 14077, "ðŁĴ°": 14078, "ðŁĺĥ": 14079, "zee": 14080, "sbury": 14081, "tragedy": 14082, "bk": 14083, "bore": 14084, "zin": 14085, "warns": 14086, "idiot": 14087, "touching": 14088, "continental": 14089, "tacos": 14090, "safari": 14091, "washed": 14092, "podium": 14093, "morrison": 14094, "forests": 14095, "cbc": 14096, "alon": 14097, "particular": 14098, "beads": 14099, "invented": 14100, "loch": 14101, "lighter": 14102, "wherever": 14103, "ide": 14104, "documents": 14105, "awe": 14106, "kr": 14107, "nowhere": 14108, "miner": 14109, "stit": 14110, "rox": 14111, "contribute": 14112, "hardy": 14113, "clan": 14114, "object": 14115, "cait": 14116, "ðŁĴķðŁĴķ": 14117, "happier": 14118, "vegetables": 14119, "tart": 14120, "gag": 14121, "nominee": 14122, "heavily": 14123, "panic": 14124, "jd": 14125, "theresa": 14126, "atm": 14127, "uph": 14128, "sfc": 14129, "suri": 14130, "drink": 14131, "nal": 14132, "revel": 14133, "kl": 14134, "avocado": 14135, "nomination": 14136, "madonna": 14137, "sharon": 14138, "malcolm": 14139, "controlled": 14140, "shers": 14141, "revival": 14142, "legislation": 14143, "shoots": 14144, "nin": 14145, "commentary": 14146, "pros": 14147, "humanrights": 14148, "stranger": 14149, "mitch": 14150, "pipeline": 14151, "legally": 14152, "thu": 14153, "gilbert": 14154, "toll": 14155, "granted": 14156, "ghs": 14157, "iranian": 14158, "refreshing": 14159, "duk": 14160, "abi": 14161, "prime": 14162, "joseph": 14163, "mosa": 14164, "statistics": 14165, "productions": 14166, "merry": 14167, "patel": 14168, "sax": 14169, "humanitarian": 14170, "structures": 14171, "emissions": 14172, "towns": 14173, "freel": 14174, "stering": 14175, "ratings": 14176, "allegedly": 14177, "cabin": 14178, "stl": 14179, "wade": 14180, "flyers": 14181, "trim": 14182, "promising": 14183, "zu": 14184, "ballot": 14185, "comparison": 14186, "freeze": 14187, "outer": 14188, "greatness": 14189, "assign": 14190, "snowy": 14191, "rale": 14192, "tories": 14193, "mediter": 14194, "knock": 14195, "consultant": 14196, "cincinnati": 14197, "analyst": 14198, "scoo": 14199, "jews": 14200, "approxim": 14201, "pure": 14202, "portraits": 14203, "cyrus": 14204, "ational": 14205, "loans": 14206, "acquis": 14207, "elu": 14208, "acceptable": 14209, "union": 14210, "watercolor": 14211, "rust": 14212, "battles": 14213, "perfu": 14214, "seasonal": 14215, "serial": 14216, "mindset": 14217, "riot": 14218, "feld": 14219, "ennial": 14220, "closet": 14221, "priest": 14222, "tanks": 14223, "intl": 14224, "screw": 14225, "bum": 14226, "abdul": 14227, "oux": 14228, "explained": 14229, "rica": 14230, "imaging": 14231, "lawyers": 14232, "buried": 14233, "ãĥ»ãĥ»ãĥ»": 14234, "earl": 14235, "âĢķ": 14236, "lton": 14237, "restored": 14238, "stripes": 14239, "foss": 14240, "demands": 14241, "stealing": 14242, "alexis": 14243, "mund": 14244, "aker": 14245, "urus": 14246, "wardro": 14247, "hugs": 14248, "genre": 14249, "ego": 14250, "ÙĦ": 14251, "participated": 14252, "babes": 14253, "banquet": 14254, "tious": 14255, "hemi": 14256, "dsb": 14257, "lost": 14258, "milwaukee": 14259, "jenner": 14260, "gem": 14261, "outra": 14262, "loses": 14263, "idi": 14264, "reps": 14265, "ðŁİ§": 14266, "regulation": 14267, "flaw": 14268, "fang": 14269, "vibrant": 14270, "ramp": 14271, "rains": 14272, "wellbeing": 14273, "soviet": 14274, "viewers": 14275, "depo": 14276, "libraries": 14277, "bigo": 14278, "sery": 14279, "gill": 14280, "destruction": 14281, "coz": 14282, "cx": 14283, "bridal": 14284, "alds": 14285, "planted": 14286, "amateur": 14287, "lud": 14288, "cheering": 14289, "showcas": 14290, "profile": 14291, "iu": 14292, "vertical": 14293, "packers": 14294, "wizard": 14295, "skip": 14296, "slight": 14297, "beau": 14298, "airways": 14299, "much": 14300, "rera": 14301, "ðŁĮĬ": 14302, "absor": 14303, "patio": 14304, "packages": 14305, "sells": 14306, "mentally": 14307, "ðŁĺ¢": 14308, "reynolds": 14309, "kare": 14310, "tribun": 14311, "walt": 14312, "knit": 14313, "taste": 14314, "surrey": 14315, "bounce": 14316, "creature": 14317, "bare": 14318, "betting": 14319, "sure": 14320, "miley": 14321, "laughs": 14322, "alore": 14323, "cyn": 14324, "tl": 14325, "artist": 14326, "annah": 14327, "warmer": 14328, "dynamics": 14329, "lunchtime": 14330, "maritime": 14331, "vulnerable": 14332, "ðŁĴĥ": 14333, "wolver": 14334, "durham": 14335, "constantly": 14336, "amin": 14337, "sibl": 14338, ":@": 14339, "bullet": 14340, "kach": 14341, "angelo": 14342, "wilder": 14343, "doom": 14344, "desktop": 14345, "lawsuit": 14346, "kca": 14347, "henderson": 14348, "inviting": 14349, "betty": 14350, "tawards": 14351, "rafa": 14352, "leaked": 14353, "andi": 14354, "gems": 14355, "afl": 14356, "velo": 14357, "mediterran": 14358, "probe": 14359, "totten": 14360, "stephanie": 14361, "snation": 14362, "combe": 14363, "qs": 14364, "overcome": 14365, "assassin": 14366, "rav": 14367, "filip": 14368, "winnipeg": 14369, "shil": 14370, "determined": 14371, "kas": 14372, "outre": 14373, "regret": 14374, "guides": 14375, "aaa": 14376, "ðŁĺĪ": 14377, "wives": 14378, "manife": 14379, "erly": 14380, "smy": 14381, "shima": 14382, "xing": 14383, "pixel": 14384, "jacob": 14385, "accommod": 14386, "toy": 14387, "ono": 14388, "poo": 14389, "tier": 14390, "answe": 14391, "ðŁĴģ": 14392, "rosa": 14393, "lease": 14394, "belongs": 14395, "thar": 14396, "eventually": 14397, "neither": 14398, "goa": 14399, "skiing": 14400, "atra": 14401, "agh": 14402, "broadcasting": 14403, "fury": 14404, "pyram": 14405, "dice": 14406, "volkswag": 14407, "womens": 14408, "provider": 14409, "bombs": 14410, "missile": 14411, "whip": 14412, "dick": 14413, "norwe": 14414, "backup": 14415, "elder": 14416, "mature": 14417, "concerts": 14418, "gious": 14419, "squee": 14420, "goodmorning": 14421, "braves": 14422, "^_": 14423, "aussie": 14424, "luna": 14425, "males": 14426, "heck": 14427, "fortn": 14428, "romeo": 14429, "steelers": 14430, "pn": 14431, "peer": 14432, "represents": 14433, "«": 14434, "katy": 14435, "miguel": 14436, "require": 14437, "chains": 14438, "lur": 14439, "immediate": 14440, "timber": 14441, "âĸ¶ï¸ı": 14442, "advocacy": 14443, "export": 14444, "anz": 14445, "tiffany": 14446, "author": 14447, "ðŁİĪ": 14448, "dudes": 14449, "chilly": 14450, "hid": 14451, "harm": 14452, "bug": 14453, "monster": 14454, "terrier": 14455, "tuc": 14456, "storytelling": 14457, "tak": 14458, "inti": 14459, "immigrants": 14460, "bis": 14461, "reaches": 14462, "compassion": 14463, "johnny": 14464, "contributions": 14465, "ðŁIJ¶": 14466, "mechanical": 14467, "impression": 14468, "ranks": 14469, "kobe": 14470, "menting": 14471, "blossom": 14472, "pablo": 14473, "builder": 14474, "bombing": 14475, "twel": 14476, "sullivan": 14477, "omo": 14478, "pete": 14479, "demi": 14480, "kudos": 14481, "wbb": 14482, "tgif": 14483, "massach": 14484, "neighbor": 14485, "chefs": 14486, "engines": 14487, "pune": 14488, "gained": 14489, "phantom": 14490, "sdays": 14491, "extend": 14492, "gran": 14493, "centers": 14494, "jacqu": 14495, "datasci": 14496, "sleepy": 14497, "elvis": 14498, "answered": 14499, "slot": 14500, "cony": 14501, "flexible": 14502, "tially": 14503, "letics": 14504, "%,": 14505, "andrews": 14506, "sible": 14507, "momma": 14508, "vino": 14509, "dox": 14510, "invitational": 14511, "twilight": 14512, "jade": 14513, "illery": 14514, "johns": 14515, "fou": 14516, "pv": 14517, "--->": 14518, "breakdown": 14519, "billion": 14520, "printer": 14521, "mond": 14522, "cbc": 14523, "maggie": 14524, "legion": 14525, "dub": 14526, "kurt": 14527, "poor": 14528, "parenting": 14529, "regions": 14530, "bikini": 14531, "beware": 14532, "sional": 14533, "auburn": 14534, "kidding": 14535, "amples": 14536, "span": 14537, "contempor": 14538, "cic": 14539, "habits": 14540, "ako": 14541, "prefe": 14542, "buddies": 14543, "itz": 14544, "emily": 14545, "personnel": 14546, "mountain": 14547, "versus": 14548, "ðŁĺ¬": 14549, "earning": 14550, "sink": 14551, "dari": 14552, "uu": 14553, "swin": 14554, "ister": 14555, "brutal": 14556, "nac": 14557, "kata": 14558, "cloth": 14559, "amand": 14560, "ðŁĶĹ": 14561, "neo": 14562, "alumin": 14563, "weekends": 14564, "nebraska": 14565, "codes": 14566, "delayed": 14567, "bruno": 14568, "proven": 14569, "inc": 14570, "ight": 14571, "flan": 14572, "oro": 14573, "lambert": 14574, "regulat": 14575, "wf": 14576, "massachuse": 14577, "kardashian": 14578, "bernard": 14579, "fiesta": 14580, "volcano": 14581, "grandpa": 14582, "anca": 14583, "dre": 14584, "stitu": 14585, "meaning": 14586, "foam": 14587, "auck": 14588, "ated": 14589, "rl": 14590, "hotel": 14591, "persons": 14592, "dynasty": 14593, "ellor": 14594, "mai": 14595, "amne": 14596, "styling": 14597, "avier": 14598, "eg": 14599, "vegetarian": 14600, ",â̦": 14601, "founders": 14602, "stain": 14603, "gd": 14604, "cycles": 14605, "skyline": 14606, "tractor": 14607, "exists": 14608, "tral": 14609, "kidney": 14610, "maril": 14611, "instag": 14612, "sette": 14613, "addict": 14614, "triangle": 14615, "flashback": 14616, "controversial": 14617, "zon": 14618, "pins": 14619, "ias": 14620, "tray": 14621, "township": 14622, "delegates": 14623, "spam": 14624, "hms": 14625, "crane": 14626, "peoples": 14627, "olo": 14628, "faction": 14629, "butes": 14630, "onica": 14631, "delegation": 14632, "newprofile": 14633, "elier": 14634, "mca": 14635, "wand": 14636, "gely": 14637, "losangeles": 14638, "berke": 14639, "tive": 14640, "disrup": 14641, "zza": 14642, "casa": 14643, "jordan": 14644, "fordshire": 14645, "gathered": 14646, "ichi": 14647, "attendees": 14648, "à¸Ńà¸": 14649, "peppers": 14650, "coin": 14651, "bourbon": 14652, "ernity": 14653, "rotary": 14654, "behaviour": 14655, "jeremy": 14656, "teamwork": 14657, "compliance": 14658, "tremend": 14659, "ðŁĩ§": 14660, "buhari": 14661, "cambo": 14662, "buyers": 14663, "hagen": 14664, "buds": 14665, "bayern": 14666, "monte": 14667, "smells": 14668, "anza": 14669, "athlon": 14670, "described": 14671, "workforce": 14672, "giving": 14673, "api": 14674, "investments": 14675, "dail": 14676, "selena": 14677, "database": 14678, "thum": 14679, "mortal": 14680, "student": 14681, "buyer": 14682, "dover": 14683, "garten": 14684, "attle": 14685, "loyalty": 14686, "genoci": 14687, "holocau": 14688, "theaters": 14689, "ruling": 14690, "venus": 14691, "patent": 14692, "chun": 14693, "abby": 14694, "awake": 14695, "massacre": 14696, "bangalore": 14697, "breaking": 14698, "simmons": 14699, "justi": 14700, "hale": 14701, "edchat": 14702, "ggles": 14703, "hawk": 14704, "marking": 14705, "headlines": 14706, "strom": 14707, "cove": 14708, "breathtaking": 14709, "medals": 14710, "haircut": 14711, "christine": 14712, "telegraph": 14713, "gujarat": 14714, "jura": 14715, "cane": 14716, "shore": 14717, "propaganda": 14718, "mueller": 14719, "........": 14720, "savi": 14721, "stomach": 14722, "throws": 14723, "tab": 14724, "warm": 14725, "jong": 14726, "renowned": 14727, "hir": 14728, "rais": 14729, "mushrooms": 14730, "guaranteed": 14731, "boa": 14732, "mj": 14733, "revolutionary": 14734, "certification": 14735, "bruins": 14736, "join": 14737, "wes": 14738, "passport": 14739, "cg": 14740, "sexu": 14741, "capable": 14742, "wv": 14743, "tones": 14744, "jackets": 14745, "accompan": 14746, "spinach": 14747, "forever": 14748, "blair": 14749, "watts": 14750, "gl": 14751, "couples": 14752, "prairie": 14753, "newprofilepic": 14754, "logistics": 14755, "massachusetts": 14756, "jaguar": 14757, "oid": 14758, "weal": 14759, "underwater": 14760, "moz": 14761, "yi": 14762, "maths": 14763, "myanmar": 14764, "preps": 14765, "suffered": 14766, "trace": 14767, "wali": 14768, "ahhh": 14769, "borg": 14770, "stitch": 14771, "culin": 14772, "realise": 14773, "infection": 14774, "discrimination": 14775, "shame": 14776, "ankle": 14777, "humid": 14778, "yt": 14779, "bracket": 14780, "truck": 14781, "triu": 14782, "easter": 14783, "community": 14784, "postcard": 14785, "involving": 14786, "tyler": 14787, "caramel": 14788, "overview": 14789, "examples": 14790, "integrity": 14791, "basement": 14792, "instruments": 14793, "anium": 14794, "atus": 14795, "gher": 14796, "laundry": 14797, "achieve": 14798, "geneva": 14799, "pricing": 14800, "hyderabad": 14801, "belief": 14802, "meta": 14803, "jaw": 14804, "accounting": 14805, "leader": 14806, "cristiano": 14807, "couture": 14808, "cyp": 14809, "vised": 14810, ",,,": 14811, "knu": 14812, "hick": 14813, "breaker": 14814, "bram": 14815, "rab": 14816, "moor": 14817, "hamas": 14818, "graduating": 14819, "puppies": 14820, "akh": 14821, "tah": 14822, "aches": 14823, "rie": 14824, "opini": 14825, "gta": 14826, "reign": 14827, "tragic": 14828, "rever": 14829, "pill": 14830, "pineapple": 14831, "touches": 14832, "dare": 14833, "leys": 14834, "ilo": 14835, "interiors": 14836, "scouts": 14837, "bart": 14838, "enzie": 14839, "dono": 14840, "brock": 14841, "christians": 14842, "ensemble": 14843, "·": 14844, "cinemas": 14845, "newport": 14846, "airline": 14847, "winston": 14848, "leigh": 14849, "contents": 14850, "prescri": 14851, "urge": 14852, "trout": 14853, "fically": 14854, "ilia": 14855, "subsi": 14856, "arer": 14857, "âļ¾ï¸ı": 14858, "wounded": 14859, "ðŁĻĤ": 14860, "pepper": 14861, "ðŁĴŀ": 14862, "fitted": 14863, "aff": 14864, "resur": 14865, "thursdaythoughts": 14866, "zero": 14867, "archaeology": 14868, "div": 14869, "jee": 14870, "ion": 14871, "awaiting": 14872, "cozy": 14873, "beauties": 14874, "bald": 14875, "data": 14876, "grizz": 14877, "stalk": 14878, "kinds": 14879, "cleared": 14880, "jessic": 14881, "regular": 14882, "aliens": 14883, "place": 14884, "bos": 14885, "bizar": 14886, "thisis": 14887, "ðŁĴĢ": 14888, "tottenham": 14889, "mafia": 14890, "slam": 14891, "ariana": 14892, "carroll": 14893, "backpack": 14894, "carey": 14895, "univ": 14896, "rg": 14897, "pep": 14898, "digit": 14899, "tattoos": 14900, "agon": 14901, "volunteering": 14902, "differen": 14903, "consumption": 14904, "kathr": 14905, "headphones": 14906, "tshirt": 14907, "ob": 14908, "element": 14909, "retail": 14910, "shru": 14911, "algori": 14912, "container": 14913, "conscious": 14914, "fil": 14915, "coming": 14916, "rash": 14917, "urope": 14918, "define": 14919, "gior": 14920, "feminist": 14921, "flowing": 14922, "routes": 14923, "glaci": 14924, "fert": 14925, "somerset": 14926, "antes": 14927, "tweeps": 14928, "$$": 14929, "hour": 14930, "endangered": 14931, "yearsof": 14932, "roh": 14933, "popped": 14934, "backing": 14935, "basil": 14936, "brake": 14937, "monaco": 14938, "lgbtq": 14939, "prague": 14940, "utility": 14941, "cassi": 14942, "gateway": 14943, "haunted": 14944, "schul": 14945, "ðŁİµ": 14946, "should": 14947, "walkingdead": 14948, "completing": 14949, "danny": 14950, "montgomery": 14951, "penguin": 14952, "ssi": 14953, "merchandi": 14954, "ðŁijij": 14955, "church": 14956, "hates": 14957, "captain": 14958, "breathing": 14959, "cet": 14960, "fairly": 14961, "approaches": 14962, "companion": 14963, "surprising": 14964, "kanye": 14965, "pey": 14966, "hindi": 14967, "targeted": 14968, "lords": 14969, "deut": 14970, "digging": 14971, "german": 14972, "rut": 14973, "energy": 14974, "closest": 14975, "yun": 14976, "apologi": 14977, "ั": 14978, "sack": 14979, "rup": 14980, "ddy": 14981, "portal": 14982, "dough": 14983, "bats": 14984, "ðŁĵ°": 14985, "atur": 14986, "grapher": 14987, "pires": 14988, "motors": 14989, "ðŁĮ¹": 14990, "jc": 14991, "dang": 14992, "tuk": 14993, "clue": 14994, "usc": 14995, "page": 14996, "dless": 14997, "brows": 14998, "jus": 14999, "ading": 15000, "remarks": 15001, "oom": 15002, "cardio": 15003, "stefan": 15004, "armstrong": 15005, "âĢ¢âĢ¢": 15006, "niest": 15007, "belgian": 15008, "biop": 15009, "soy": 15010, "lof": 15011, "íĥ": 15012, "qt": 15013, "flashbackfriday": 15014, "cee": 15015, "ģà¸": 15016, "wreck": 15017, "marines": 15018, "amendment": 15019, "wardrobe": 15020, "voy": 15021, "burned": 15022, "guitars": 15023, "rainf": 15024, "lifel": 15025, "ssil": 15026, "ounce": 15027, "external": 15028, "ckey": 15029, "mesh": 15030, "sheikh": 15031, "invitation": 15032, "suggesti": 15033, "popcorn": 15034, "phenomenal": 15035, "anonymous": 15036, "tuna": 15037, "chicago": 15038, "oval": 15039, "dely": 15040, "locals": 15041, "(&": 15042, "prof": 15043, "novel": 15044, "finder": 15045, "sparks": 15046, "laven": 15047, "infu": 15048, "nicks": 15049, "quant": 15050, "rae": 15051, "exec": 15052, "distingui": 15053, "stances": 15054, "mutual": 15055, "shal": 15056, "unveils": 15057, "edmonton": 15058, "zania": 15059, "adio": 15060, "viewer": 15061, "bradford": 15062, "auditorium": 15063, "quis": 15064, "react": 15065, "http": 15066, "lero": 15067, "cheeky": 15068, "impacts": 15069, "tak": 15070, "edt": 15071, "desperate": 15072, "tay": 15073, "ìĦ": 15074, "settle": 15075, "bargain": 15076, "resume": 15077, "unite": 15078, "thrown": 15079, "kest": 15080, "seys": 15081, "marching": 15082, "amit": 15083, "decline": 15084, "schar": 15085, "metr": 15086, "stanford": 15087, "linke": 15088, "berra": 15089, "dolls": 15090, "rugby": 15091, "jami": 15092, "bor": 15093, "roadtrip": 15094, "dinosaur": 15095, "mik": 15096, "sunder": 15097, "rem": 15098, "bk": 15099, "overseas": 15100, "naughty": 15101, "implementation": 15102, "iamsrk": 15103, "luncheon": 15104, "firing": 15105, "miami": 15106, "perez": 15107, "thee": 15108, "zon": 15109, "gifted": 15110, "conversion": 15111, "ceramic": 15112, "¡ï¸ı": 15113, "pedro": 15114, "ìĨ": 15115, "vick": 15116, "!@": 15117, "heed": 15118, "sid": 15119, "bw": 15120, "document": 15121, "plun": 15122, "grants": 15123, "fantasy": 15124, "predictions": 15125, "valid": 15126, "carved": 15127, "graduated": 15128, "ðŁijįðŁı»": 15129, "nationally": 15130, "chy": 15131, "afl": 15132, "resso": 15133, "blank": 15134, "rivals": 15135, "jig": 15136, "eties": 15137, "omics": 15138, "unemp": 15139, "bound": 15140, "sko": 15141, "inspection": 15142, "paral": 15143, "highs": 15144, "crisp": 15145, "bans": 15146, "oba": 15147, "[@": 15148, "cospla": 15149, "costumes": 15150, "recall": 15151, "mouth": 15152, "nigel": 15153, "bts": 15154, "tera": 15155, "kov": 15156, "docs": 15157, "westminster": 15158, "dict": 15159, "gravity": 15160, "kari": 15161, "rogue": 15162, "tted": 15163, "wark": 15164, "idaho": 15165, "wend": 15166, "awi": 15167, "queensland": 15168, "processes": 15169, "cliffe": 15170, "mick": 15171, "compens": 15172, "opol": 15173, "they": 15174, "clari": 15175, "wikipedia": 15176, "salmankhan": 15177, "hazard": 15178, "preston": 15179, "sweetest": 15180, "pdf": 15181, "chees": 15182, "trilo": 15183, "southafrica": 15184, "burnt": 15185, "($": 15186, "contain": 15187, "tp": 15188, "submitted": 15189, "soundcloud": 15190, "atu": 15191, "rez": 15192, "wordpress": 15193, "corrupt": 15194, "nf": 15195, "maker": 15196, "íķ": 15197, "paras": 15198, "advent": 15199, "rial": 15200, "cafe": 15201, "fossil": 15202, "!!!!!!!": 15203, "cows": 15204, "cj": 15205, "spur": 15206, "institutions": 15207, "landmark": 15208, "entit": 15209, "reut": 15210, "his": 15211, "alzheim": 15212, "wemb": 15213, "reggae": 15214, "mosqu": 15215, "stat": 15216, "identified": 15217, "dealer": 15218, "ream": 15219, "reland": 15220, "tension": 15221, "ðŁĩ©": 15222, "wrapping": 15223, "deeper": 15224, "frat": 15225, "reddit": 15226, "aris": 15227, "morocco": 15228, "..\"": 15229, "blow": 15230, "mapping": 15231, "priorities": 15232, "inga": 15233, "swap": 15234, "rewards": 15235, "conspiracy": 15236, "creative": 15237, "cj": 15238, "congressional": 15239, "vault": 15240, "plex": 15241, "sophomore": 15242, "shadow": 15243, "eless": 15244, "ðŁĺħ": 15245, "darts": 15246, "aldub": 15247, "annoying": 15248, "props": 15249, "nas": 15250, "aluminum": 15251, "hbo": 15252, "offense": 15253, "jill": 15254, "onions": 15255, "laur": 15256, "tae": 15257, "hardest": 15258, "shro": 15259, "gaining": 15260, "measure": 15261, "edtech": 15262, "cyprus": 15263, "tara": 15264, "angeli": 15265, "carlo": 15266, "goon": 15267, "alli": 15268, "implic": 15269, "jupit": 15270, "resilience": 15271, "hail": 15272, "balanced": 15273, ")...": 15274, "joyce": 15275, "gra": 15276, "theli": 15277, "defined": 15278, "shipped": 15279, "mainly": 15280, "mina": 15281, "lm": 15282, "sacri": 15283, "ober": 15284, "pim": 15285, "claiming": 15286, "enters": 15287, "corey": 15288, "bok": 15289, "cried": 15290, "cooling": 15291, "danielle": 15292, "pharmacy": 15293, "thorough": 15294, "cake": 15295, "klo": 15296, "outreach": 15297, "zens": 15298, "digitalmarketing": 15299, "valent": 15300, "snp": 15301, "herb": 15302, "mrw": 15303, "café": 15304, "captures": 15305, "notre": 15306, "triumph": 15307, "pancakes": 15308, "cumber": 15309, "spike": 15310, "dation": 15311, "bigg": 15312, "sper": 15313, "critical": 15314, "amal": 15315, "tooth": 15316, "founding": 15317, "astro": 15318, "'#": 15319, "quantum": 15320, "thames": 15321, "unc": 15322, "pride": 15323, "airbus": 15324, "knocked": 15325, "undefeated": 15326, "mediterranean": 15327, "calcu": 15328, "clown": 15329, "sensor": 15330, "hammer": 15331, "forgive": 15332, "cushi": 15333, "berry": 15334, "majestic": 15335, "elect": 15336, "politan": 15337, "gta": 15338, "kari": 15339, "burke": 15340, "seahawks": 15341, "volkswagen": 15342, "rei": 15343, "landscapes": 15344, "casu": 15345, "grandfather": 15346, "listened": 15347, "//": 15348, "startrek": 15349, "rainfall": 15350, "furry": 15351, "vier": 15352, "stark": 15353, "rifle": 15354, "ffa": 15355, "leges": 15356, "hillaryclinton": 15357, "minus": 15358, "correctly": 15359, "architectural": 15360, "prece": 15361, "upside": 15362, "boxer": 15363, "ðŁĻĮðŁı¼": 15364, "isai": 15365, "det": 15366, "provo": 15367, "tissue": 15368, "spooky": 15369, "veled": 15370, "recon": 15371, "prospects": 15372, "quebec": 15373, "âļ«": 15374, "igno": 15375, "anatomy": 15376, "shapes": 15377, "wp": 15378, "pinterest": 15379, "hore": 15380, "anes": 15381, "pickup": 15382, "tip": 15383, "pradesh": 15384, "hugh": 15385, "coe": 15386, "pok": 15387, "grammy": 15388, "wellington": 15389, "stigate": 15390, "righ": 15391, "leap": 15392, "kingston": 15393, "scenic": 15394, "gosh": 15395, "vani": 15396, "aug": 15397, "sary": 15398, "zier": 15399, "bureau": 15400, "linson": 15401, "conte": 15402, "fragr": 15403, "allan": 15404, "gaw": 15405, "lana": 15406, "collision": 15407, "surveill": 15408, "renais": 15409, "arrange": 15410, "sali": 15411, "doin": 15412, "brance": 15413, "brendan": 15414, "ourse": 15415, "incoming": 15416, "suspension": 15417, "à´": 15418, "lla": 15419, "educators": 15420, "intri": 15421, "dae": 15422, "biography": 15423, "bulgar": 15424, "villain": 15425, "gothic": 15426, "rwanda": 15427, "ew": 15428, "mayor": 15429, "meetup": 15430, "democrat": 15431, "morgan": 15432, "sudden": 15433, "tesco": 15434, "carrot": 15435, "bomber": 15436, "mckin": 15437, "rene": 15438, "funday": 15439, "agricultural": 15440, "hahah": 15441, "showtime": 15442, "forming": 15443, "cola": 15444, "scorpi": 15445, "quote": 15446, "poppy": 15447, "slife": 15448, "daz": 15449, "tub": 15450, "nen": 15451, "mot": 15452, "ðŁĺ»": 15453, "sore": 15454, "elderly": 15455, "ove": 15456, "skinny": 15457, "umi": 15458, "anco": 15459, "manship": 15460, "were": 15461, "gv": 15462, "kah": 15463, "folding": 15464, "neat": 15465, "samantha": 15466, "danish": 15467, "ukrain": 15468, "humidity": 15469, "nutri": 15470, "jakarta": 15471, "candles": 15472, "oooooooo": 15473, "atile": 15474, "strength": 15475, "ibra": 15476, "bapti": 15477, "charleston": 15478, "frames": 15479, "girls": 15480, "clearing": 15481, "gluten": 15482, "##": 15483, "supernatural": 15484, "jubi": 15485, "phone": 15486, "hein": 15487, "drun": 15488, "leak": 15489, "investor": 15490, "yer": 15491, "domain": 15492, "ballroom": 15493, "mish": 15494, "appli": 15495, "offshore": 15496, "blaze": 15497, "doro": 15498, "âĺķï¸ı": 15499, "winery": 15500, "sharif": 15501, "adore": 15502, "nir": 15503, "safer": 15504, "sigh": 15505, "ascri": 15506, "strongly": 15507, "tracy": 15508, "cker": 15509, "oll": 15510, "faithful": 15511, "eyed": 15512, "delightful": 15513, "vism": 15514, "karnataka": 15515, "titan": 15516, "whar": 15517, "jerseys": 15518, "refur": 15519, "heaven": 15520, "grip": 15521, "panama": 15522, "preli": 15523, "gluten": 15524, "odd": 15525, "content": 15526, "ponti": 15527, "tioning": 15528, "ecommerce": 15529, "federation": 15530, "flawless": 15531, "gear": 15532, "tires": 15533, "byr": 15534, "police": 15535, "cuban": 15536, "tributes": 15537, "ticul": 15538, "churches": 15539, "nursery": 15540, "diaries": 15541, "museums": 15542, "snapped": 15543, "ivan": 15544, "wight": 15545, "tourists": 15546, "ramadan": 15547, "trent": 15548, "prophet": 15549, "wondered": 15550, "focusing": 15551, "hid": 15552, "icons": 15553, "iq": 15554, "ambulance": 15555, "pist": 15556, "funniest": 15557, "timeless": 15558, "srilan": 15559, "buys": 15560, "kids": 15561, "colourful": 15562, "ashi": 15563, "chir": 15564, "mum": 15565, "ðŁĵļ": 15566, "letter": 15567, "xen": 15568, "reuters": 15569, "preserve": 15570, "inting": 15571, "step": 15572, "fuji": 15573, "univer": 15574, "iu": 15575, "showdown": 15576, "poems": 15577, "surveillance": 15578, "suspected": 15579, "tae": 15580, "solving": 15581, "tomb": 15582, "mothersday": 15583, "carpen": 15584, "recruit": 15585, "pilots": 15586, "broc": 15587, "mixing": 15588, "fridays": 15589, "tyr": 15590, "representatives": 15591, "trapped": 15592, "abdul": 15593, "freestyle": 15594, "cluster": 15595, "âļłï¸ı": 15596, "kd": 15597, "skill": 15598, "pitt": 15599, "exo": 15600, "commerci": 15601, "museum": 15602, "locally": 15603, "gina": 15604, "nobel": 15605, "immune": 15606, "frac": 15607, "capsu": 15608, "mained": 15609, "attempts": 15610, "bulldog": 15611, "bespoke": 15612, "singers": 15613, "spelling": 15614, "segment": 15615, "natures": 15616, "tick": 15617, "lipstick": 15618, "cleaner": 15619, "gettable": 15620, "precision": 15621, "â̼ï¸ı": 15622, "thood": 15623, "reef": 15624, "nope": 15625, "billy": 15626, "digi": 15627, "musi": 15628, "rival": 15629, "figured": 15630, "tality": 15631, "sunny": 15632, "berk": 15633, "awww": 15634, "awaits": 15635, "unreal": 15636, "copen": 15637, "asylum": 15638, "exotic": 15639, "buen": 15640, "mock": 15641, "enable": 15642, "archy": 15643, "fra": 15644, "plastic": 15645, "almond": 15646, "ampli": 15647, "displays": 15648, "abbott": 15649, "sme": 15650, "xp": 15651, "ðŁĻĥ": 15652, "graphic": 15653, "ived": 15654, "mara": 15655, "caution": 15656, "leaks": 15657, "enberg": 15658, "ulu": 15659, "unicorn": 15660, "cannon": 15661, "apprentic": 15662, "ðŁĺĺðŁĺĺ": 15663, "bball": 15664, "willow": 15665, "atics": 15666, "amas": 15667, "manufacturer": 15668, "campaigns": 15669, "porters": 15670, "floors": 15671, "lsu": 15672, "type": 15673, "kej": 15674, "honorary": 15675, "itim": 15676, "tole": 15677, "minecraft": 15678, "dx": 15679, "mash": 15680, "rio": 15681, "consequences": 15682, "ronald": 15683, "gossi": 15684, "suffolk": 15685, "muse": 15686, "rbi": 15687, "livemusic": 15688, "ivan": 15689, "ðŁİ¤": 15690, "leu": 15691, "patriot": 15692, "manit": 15693, "lanca": 15694, "homedecor": 15695, "dear": 15696, "sigma": 15697, "tide": 15698, "strings": 15699, "vita": 15700, "sequel": 15701, "tryna": 15702, "investigate": 15703, "boris": 15704, "vegan": 15705, "barrier": 15706, "mindfulness": 15707, "webb": 15708, "hustle": 15709, "inda": 15710, "tanzania": 15711, "stray": 15712, "texas": 15713, "cag": 15714, "diagnosis": 15715, "woman": 15716, "gw": 15717, "obsession": 15718, "lative": 15719, "nufc": 15720, "flynn": 15721, "momentum": 15722, "sofa": 15723, "wald": 15724, "vegetable": 15725, "tucker": 15726, "supper": 15727, "seab": 15728, "arro": 15729, "seag": 15730, "venting": 15731, "councill": 15732, "splat": 15733, "calcul": 15734, "..#": 15735, "comfy": 15736, "odisha": 15737, "stopp": 15738, "warfare": 15739, "caes": 15740, "à¨": 15741, "coy": 15742, "priceless": 15743, "insec": 15744, "ðŁĺĽ": 15745, "controls": 15746, "empowerment": 15747, "datascience": 15748, "perpe": 15749, "genic": 15750, "eres": 15751, "trudeau": 15752, "mano": 15753, "slavery": 15754, "expanding": 15755, "mahe": 15756, "failing": 15757, "saga": 15758, "photographs": 15759, "crest": 15760, "reon": 15761, "surfing": 15762, "hie": 15763, "ðŁįĢ": 15764, "jae": 15765, "fellows": 15766, "southampton": 15767, "solom": 15768, "cester": 15769, "tability": 15770, "horn": 15771, "sect": 15772, "hee": 15773, "coleman": 15774, "atlas": 15775, "explorer": 15776, "consultation": 15777, "copyright": 15778, "organizing": 15779, "denied": 15780, "monkeys": 15781, "noodles": 15782, "bris": 15783, "flor": 15784, "dough": 15785, "bonds": 15786, "shocked": 15787, "ecosystem": 15788, "carefully": 15789, "wm": 15790, "apartments": 15791, "curve": 15792, "sandiego": 15793, "mustard": 15794, "commen": 15795, "ceremon": 15796, "ech": 15797, "ruth": 15798, "ðŁĻĮðŁı»": 15799, "hawai": 15800, "filmed": 15801, "tear": 15802, "asingly": 15803, "cair": 15804, "watt": 15805, "instrument": 15806, "outta": 15807, "yeol": 15808, "riverside": 15809, "ë°": 15810, ".:": 15811, "norwich": 15812, "alog": 15813, "migrants": 15814, "newman": 15815, "ride": 15816, "sprink": 15817, "targeting": 15818, "believe": 15819, "torch": 15820, "reflects": 15821, "permission": 15822, "ffman": 15823, "enemies": 15824, "basics": 15825, "seized": 15826, "sundays": 15827, "lei": 15828, "hassan": 15829, "endo": 15830, "hc": 15831, "stad": 15832, "lements": 15833, "kkkk": 15834, "nano": 15835, "shark": 15836, "mana": 15837, "onic": 15838, "treatments": 15839, "early": 15840, "collaborative": 15841, "shuttle": 15842, "branches": 15843, "misses": 15844, "mainedcm": 15845, "apers": 15846, "kyle": 15847, "carrie": 15848, "leisure": 15849, "shet": 15850, "birding": 15851, "advances": 15852, "ðŁĵĿ": 15853, "popular": 15854, "diane": 15855, "abe": 15856, "rewar": 15857, "neighbour": 15858, "kpop": 15859, "remembrance": 15860, "playground": 15861, "rub": 15862, "krishna": 15863, "ebola": 15864, "inquiry": 15865, "epa": 15866, "lumin": 15867, "organisation": 15868, "abraham": 15869, "normally": 15870, "preten": 15871, "janet": 15872, "wt": 15873, "ðŁĴİ": 15874, "encouraging": 15875, "astic": 15876, "bump": 15877, "sydney": 15878, "sz": 15879, "ssss": 15880, "garrett": 15881, "ðŁĵ»": 15882, "consulting": 15883, "romania": 15884, "spotting": 15885, "chancellor": 15886, "arma": 15887, "prestigious": 15888, "ðĿIJ": 15889, "tad": 15890, "cryst": 15891, "competit": 15892, "ratio": 15893, "cataly": 15894, "brow": 15895, "jur": 15896, "viking": 15897, "commute": 15898, "yday": 15899, "layers": 15900, "dumb": 15901, "escal": 15902, "genocide": 15903, "fill": 15904, "gupta": 15905, "stepping": 15906, "sei": 15907, "foto": 15908, "wildcats": 15909, "coli": 15910, "project": 15911, "earnings": 15912, "str": 15913, "geons": 15914, "completion": 15915, "bm": 15916, "decorated": 15917, "crawford": 15918, "afghan": 15919, "scare": 15920, "visibility": 15921, "hib": 15922, "direction": 15923, "stroll": 15924, "christina": 15925, "alternate": 15926, "clare": 15927, "stylist": 15928, "behold": 15929, "sance": 15930, "leopard": 15931, "acquired": 15932, "narrative": 15933, "ashi": 15934, "thea": 15935, "????": 15936, "peas": 15937, "atch": 15938, "slides": 15939, "leen": 15940, "renewable": 15941, "english": 15942, "quir": 15943, "coaster": 15944, "rx": 15945, "fools": 15946, "matchday": 15947, "mism": 15948, "amazing": 15949, "zig": 15950, "keting": 15951, "wont": 15952, "towel": 15953, "diab": 15954, "stake": 15955, "nm": 15956, "melt": 15957, "ethan": 15958, "grape": 15959, "politician": 15960, "smen": 15961, "íĺ": 15962, "reo": 15963, "weddings": 15964, "catcher": 15965, "oracle": 15966, "memo": 15967, "ðŁĮ´": 15968, "eck": 15969, "robbie": 15970, "norwegian": 15971, "operator": 15972, "amor": 15973, "sewing": 15974, "jul": 15975, "xie": 15976, "uv": 15977, "fifty": 15978, "mega": 15979, "tattoo": 15980, "liberals": 15981, "upri": 15982, "trafficking": 15983, "richardson": 15984, "suv": 15985, "kip": 15986, "messy": 15987, "tremendous": 15988, "glou": 15989, "courtney": 15990, "lad": 15991, "stereo": 15992, "myers": 15993, "idio": 15994, "^_^": 15995, "manning": 15996, "dye": 15997, "wd": 15998, "throne": 15999, "junk": 16000, "asu": 16001, "provincial": 16002, "kook": 16003, "wrc": 16004, "fineart": 16005, "hampshire": 16006, "renaissance": 16007, "bred": 16008, "fallout": 16009, "sj": 16010, "snl": 16011, "alam": 16012, "torture": 16013, "fyi": 16014, "shines": 16015, "paw": 16016, "char": 16017, "henry": 16018, "crow": 16019, "acious": 16020, "dian": 16021, "paige": 16022, "bare": 16023, "stockholm": 16024, "scenery": 16025, "ðŁĩ·": 16026, "jeffrey": 16027, "push": 16028, "decoration": 16029, "ned": 16030, "cute": 16031, "brigade": 16032, "lavender": 16033, "invites": 16034, "esports": 16035, "voir": 16036, "dried": 16037, "transpl": 16038, "surgeon": 16039, "novels": 16040, "pulls": 16041, "sony": 16042, "lunar": 16043, "mane": 16044, "ivy": 16045, "frustr": 16046, "dorset": 16047, "sai": 16048, "torres": 16049, "ssion": 16050, "shutdown": 16051, "suggestions": 16052, "writing": 16053, "eo": 16054, "battlefield": 16055, "uga": 16056, "ðŁIJ¾": 16057, "vacu": 16058, "splac": 16059, "git": 16060, "ug": 16061, "highland": 16062, "%)": 16063, "mermaid": 16064, "sacramento": 16065, "tails": 16066, "pw": 16067, "kah": 16068, "tell": 16069, "enhanced": 16070, "ìķ": 16071, "auckland": 16072, "cruel": 16073, "ðŁ¤©": 16074, "audre": 16075, "sailor": 16076, "grammar": 16077, "glove": 16078, "deon": 16079, "inflam": 16080, "freshly": 16081, "kell": 16082, "zip": 16083, "christie": 16084, "mild": 16085, "dixon": 16086, "instructor": 16087, "gence": 16088, "ãħł": 16089, "subjec": 16090, "constitutional": 16091, "crowds": 16092, "invisible": 16093, "ruins": 16094, "dak": 16095, "sip": 16096, "plaque": 16097, "pouring": 16098, "complex": 16099, "zine": 16100, "stead": 16101, "flet": 16102, "transmission": 16103, "loway": 16104, "arun": 16105, "increasingly": 16106, "aud": 16107, "transparen": 16108, "crowned": 16109, "scoun": 16110, "blizzard": 16111, "luxu": 16112, "fiers": 16113, "achievements": 16114, "hunters": 16115, "rocked": 16116, "basin": 16117, "violet": 16118, "proves": 16119, "achieving": 16120, "prosper": 16121, "sega": 16122, "float": 16123, "vian": 16124, "xiv": 16125, "polic": 16126, "tura": 16127, "approximately": 16128, "wanderlust": 16129, "keepers": 16130, "getaway": 16131, "cod": 16132, "polis": 16133, "bryan": 16134, "colts": 16135, "talents": 16136, "yogur": 16137, "glutenfree": 16138, "wrist": 16139, "gry": 16140, "czech": 16141, "ðŁİĪ": 16142, "eville": 16143, "ðŁıĪ": 16144, "tox": 16145, "daniels": 16146, "amer": 16147, "bids": 16148, "weareone": 16149, "metab": 16150, "gt": 16151, "boyz": 16152, "pdx": 16153, "possession": 16154, "pushed": 16155, "shrine": 16156, "realistic": 16157, "trigger": 16158, "navi": 16159, "rumors": 16160, "naf": 16161, "jenkins": 16162, "trun": 16163, "communi": 16164, "ÃĹ": 16165, "gamers": 16166, "armor": 16167, "mohammed": 16168, "balcony": 16169, "yah": 16170, "strongest": 16171, "rhythm": 16172, "unforgettable": 16173, "kp": 16174, "hobb": 16175, "custody": 16176, "gregor": 16177, "rita": 16178, "aesthetic": 16179, "ilation": 16180, "sponsoring": 16181, "nay": 16182, "kidnapp": 16183, "shs": 16184, "rajas": 16185, "meg": 16186, "significantly": 16187, "buttons": 16188, "lac": 16189, "versions": 16190, "essentials": 16191, "opinions": 16192, "kro": 16193, "dprinting": 16194, "widely": 16195, "dk": 16196, "uran": 16197, "yal": 16198, "requested": 16199, "cn": 16200, "curric": 16201, "plum": 16202, "grun": 16203, "vm": 16204, "devon": 16205, "myo": 16206, "relation": 16207, "juventus": 16208, "rouge": 16209, "minority": 16210, "mines": 16211, "jupiter": 16212, "nine": 16213, "oxygen": 16214, "frankie": 16215, "unesco": 16216, "fabric": 16217, "disgusting": 16218, "salman": 16219, "detection": 16220, "lanka": 16221, "dac": 16222, "ðŁĩ«ðŁĩ·": 16223, "argument": 16224, "shelves": 16225, "celtics": 16226, "roberto": 16227, "pigs": 16228, "hedge": 16229, "faul": 16230, "powering": 16231, "butterflies": 16232, "fir": 16233, "remake": 16234, "atti": 16235, "como": 16236, "empha": 16237, "kendall": 16238, "pokemon": 16239, "seating": 16240, "dans": 16241, "baldwin": 16242, "ðŁij»": 16243, "leslie": 16244, "onedirection": 16245, "timber": 16246, "iman": 16247, "font": 16248, "eder": 16249, "dion": 16250, "steph": 16251, "format": 16252, "gregory": 16253, "prop": 16254, "hex": 16255, "ruin": 16256, "sory": 16257, "infer": 16258, "naw": 16259, "barak": 16260, "sdgs": 16261, "karao": 16262, "lush": 16263, "vander": 16264, "endent": 16265, "gis": 16266, "afro": 16267, "soccer": 16268, "ayan": 16269, "tuni": 16270, "lung": 16271, "dayof": 16272, "alexa": 16273, "marath": 16274, "addicted": 16275, "agile": 16276, "hygi": 16277, "lightweight": 16278, "ì§": 16279, "mandela": 16280, "joey": 16281, "ancy": 16282, "hum": 16283, "bir": 16284, "memorial": 16285, "jimin": 16286, "ginger": 16287, "vak": 16288, "javascri": 16289, "crops": 16290, "origins": 16291, "dari": 16292, "piper": 16293, "import": 16294, "aggressive": 16295, "prediction": 16296, "repairs": 16297, "cracker": 16298, "voyage": 16299, "nike": 16300, "mummy": 16301, "linkedin": 16302, "countryside": 16303, "border": 16304, "glass": 16305, "pert": 16306, "sals": 16307, "shoe": 16308, "autographed": 16309, "walnut": 16310, "collegi": 16311, "salary": 16312, "pairing": 16313, "ðŁĮ¸": 16314, "cathol": 16315, "sweethe": 16316, "defeats": 16317, "strengthen": 16318, "rooftop": 16319, "improvements": 16320, "barriers": 16321, "uru": 16322, "tally": 16323, "ruled": 16324, "ðŁĨļ": 16325, "naija": 16326, "emoji": 16327, "percent": 16328, "gio": 16329, "probs": 16330, "once": 16331, "admits": 16332, "paths": 16333, "liar": 16334, "daytona": 16335, "peters": 16336, "cali": 16337, "calli": 16338, "mug": 16339, "osa": 16340, "aph": 16341, "aby": 16342, "hyde": 16343, "ethnic": 16344, "plains": 16345, "olf": 16346, "hahahahaha": 16347, "holic": 16348, "?!?!": 16349, "subli": 16350, "blacks": 16351, "mot": 16352, "ghton": 16353, "lovin": 16354, "brent": 16355, "baru": 16356, "lati": 16357, "dew": 16358, "ateau": 16359, "qa": 16360, "painful": 16361, "busters": 16362, "static": 16363, "ðŁĩ¨ðŁĩ¦": 16364, "notebook": 16365, "outfits": 16366, "sies": 16367, "rf": 16368, "floods": 16369, "ÑĢ": 16370, "throat": 16371, "suici": 16372, "rovers": 16373, "bengal": 16374, "prepares": 16375, "blog": 16376, "miniature": 16377, "ب": 16378, "amphi": 16379, "comb": 16380, "rsp": 16381, "intimate": 16382, "greene": 16383, "Ìĩ": 16384, "altar": 16385, "surgical": 16386, "vessel": 16387, "...?": 16388, "gavin": 16389, "gator": 16390, "threatened": 16391, "zar": 16392, "robbery": 16393, "dier": 16394, "promoted": 16395, "yg": 16396, "xs": 16397, "subs": 16398, "interviewing": 16399, "threatening": 16400, "dozen": 16401, "meado": 16402, "waterfall": 16403, "nintendoswitch": 16404, "calum": 16405, "ministers": 16406, "drop": 16407, "universities": 16408, "warned": 16409, "tactics": 16410, "ðŁĩ²": 16411, "refuse": 16412, "adju": 16413, "vast": 16414, "ðŁĺ´": 16415, "mcfc": 16416, "libya": 16417, "nofilter": 16418, "distributed": 16419, "reser": 16420, "ronnie": 16421, "deco": 16422, "javascript": 16423, "monk": 16424, "interests": 16425, "flex": 16426, "martha": 16427, "sties": 16428, "ood": 16429, "ðŁ¤£ðŁ¤£": 16430, "eun": 16431, "bali": 16432, "gomez": 16433, "stimul": 16434, "moderate": 16435, "dity": 16436, "iris": 16437, "straw": 16438, "consistent": 16439, "directions": 16440, "adopt": 16441, "salsa": 16442, "croo": 16443, "recovered": 16444, "blackfriday": 16445, "lancaster": 16446, "accept": 16447, "weareoneexo": 16448, "builds": 16449, "freeman": 16450, "airplane": 16451, "dition": 16452, "belong": 16453, "jamie": 16454, "pitching": 16455, "lif": 16456, "omin": 16457, "crispy": 16458, "prepping": 16459, "veg": 16460, "chang": 16461, "accomplished": 16462, "gracias": 16463, "dolphin": 16464, "elector": 16465, "culinary": 16466, "superbowl": 16467, "wala": 16468, "pursuit": 16469, "blackberry": 16470, "bean": 16471, "cardinal": 16472, "proved": 16473, "immigrant": 16474, "strictly": 16475, "holocaust": 16476, "passage": 16477, "haus": 16478, "coup": 16479, "purse": 16480, "harass": 16481, "<<": 16482, "leed": 16483, "adobe": 16484, "stad": 16485, "legislat": 16486, "parked": 16487, "priyan": 16488, "silva": 16489, "krist": 16490, "sthe": 16491, "funky": 16492, "iga": 16493, "settlement": 16494, "phs": 16495, "tmrw": 16496, "stressed": 16497, "hunt": 16498, "hockey": 16499, "treasures": 16500, "chambers": 16501, "olu": 16502, "hut": 16503, "marley": 16504, "texture": 16505, "wilderness": 16506, "mming": 16507, "potentially": 16508, "omaha": 16509, "judy": 16510, "toes": 16511, "spoiler": 16512, "distinguished": 16513, "felix": 16514, "ahu": 16515, "recommendations": 16516, "zombies": 16517, "hitler": 16518, "triple": 16519, "collapse": 16520, "motivated": 16521, "ultimat": 16522, "ggling": 16523, "soy": 16524, "cigar": 16525, "foren": 16526, "vineyard": 16527, "glitter": 16528, "findings": 16529, "colonial": 16530, "hunter": 16531, "erik": 16532, "dens": 16533, "beetle": 16534, "lotte": 16535, "subtle": 16536, "smatter": 16537, "trusted": 16538, "experimental": 16539, "naments": 16540, "ðŁĺĨ": 16541, "region": 16542, "acquisition": 16543, "breeding": 16544, "quarterback": 16545, "amreading": 16546, "ootd": 16547, "rude": 16548, "initiatives": 16549, "stout": 16550, "hyung": 16551, "outcome": 16552, "alfred": 16553, "mics": 16554, "expertise": 16555, "bacteria": 16556, "penguins": 16557, "jumper": 16558, "valencia": 16559, "bark": 16560, "ingday": 16561, "sellers": 16562, "contracts": 16563, "houston": 16564, "commissioned": 16565, "adaptation": 16566, "swansea": 16567, "santiago": 16568, "commonwealth": 16569, "judging": 16570, "submission": 16571, "scorer": 16572, "tommy": 16573, "ño": 16574, "exquis": 16575, "filing": 16576, "explanation": 16577, "allison": 16578, "wembley": 16579, "ridge": 16580, "chevy": 16581, "santos": 16582, "ownership": 16583, "cognitive": 16584, "favourites": 16585, "shed": 16586, "philanthro": 16587, "deleted": 16588, "godd": 16589, "snor": 16590, "guidelines": 16591, "ffing": 16592, "jeep": 16593, "clips": 16594, "swamp": 16595, "anor": 16596, "guild": 16597, "bolton": 16598, "springfield": 16599, "municipal": 16600, "goalkeeper": 16601, "yeon": 16602, "ðŁĺįðŁĺįðŁĺįðŁĺį": 16603, "ãħĭãħĭ": 16604, "waterfront": 16605, "grave": 16606, "contemporary": 16607, "arity": 16608, "ÃŃa": 16609, "sleeps": 16610, "syrup": 16611, "alam": 16612, "pire": 16613, "coyo": 16614, "motogp": 16615, "tyson": 16616, "kejri": 16617, "circul": 16618, "singly": 16619, "crunch": 16620, "complicated": 16621, "nostalgia": 16622, "kop": 16623, "move": 16624, "kale": 16625, "macro": 16626, "midwest": 16627, "hans": 16628, "tribal": 16629, "nude": 16630, "à¯į": 16631, "beyonce": 16632, "congratulate": 16633, "cater": 16634, "league": 16635, "ðŁĻĬ": 16636, "ladder": 16637, "crashed": 16638, "technic": 16639, "karaoke": 16640, "harassment": 16641, "rots": 16642, "experiencing": 16643, "kristen": 16644, "ðŁĩ³": 16645, "ð٤Ĺ": 16646, "reflections": 16647, "guinness": 16648, "illustrator": 16649, "ðŁĻıðŁı»": 16650, "center": 16651, "narrow": 16652, "commons": 16653, "regulations": 16654, "ÙĨ": 16655, "harm": 16656, "croft": 16657, "cussion": 16658, "hongkong": 16659, "stical": 16660, "internship": 16661, "zoe": 16662, "chop": 16663, "hoods": 16664, "estimated": 16665, "batteries": 16666, "berkeley": 16667, "smoothie": 16668, "shaun": 16669, "cros": 16670, "~~": 16671, "campe": 16672, "hump": 16673, "bg": 16674, "prototype": 16675, "click": 16676, "shawn": 16677, "reviewed": 16678, "templ": 16679, "pf": 16680, "jedi": 16681, "blogs": 16682, "raymond": 16683, "asth": 16684, "bah": 16685, "avail": 16686, "scotch": 16687, "leafs": 16688, "nikki": 16689, "tok": 16690, "hollow": 16691, "urges": 16692, "oft": 16693, "unlike": 16694, "latin": 16695, "ue": 16696, "catering": 16697, "mili": 16698, "alternati": 16699, "maver": 16700, "и": 16701, "agle": 16702, "preorder": 16703, "lux": 16704, "cucu": 16705, "ðŁijıðŁijı": 16706, "tart": 16707, "âĿ¤âĿ¤âĿ¤": 16708, "arabic": 16709, "rapidly": 16710, "arrang": 16711, "allen": 16712, "traveltuesday": 16713, "paws": 16714, "flows": 16715, "stability": 16716, "fluid": 16717, "capp": 16718, "canberra": 16719, "uuuu": 16720, "spani": 16721, "demonstration": 16722, "mla": 16723, "placement": 16724, "mw": 16725, "presidents": 16726, "awesom": 16727, "beverly": 16728, "anist": 16729, "neal": 16730, "fathersday": 16731, "referendum": 16732, "lahore": 16733, "oaks": 16734, "debbie": 16735, "halfway": 16736, "ghosts": 16737, "debor": 16738, "matthews": 16739, "fiat": 16740, "tfw": 16741, "presen": 16742, "robi": 16743, "ded": 16744, "brock": 16745, "laughed": 16746, "amounts": 16747, "bamboo": 16748, "kindergarten": 16749, "eaten": 16750, "mtvhottest": 16751, "breakout": 16752, "usic": 16753, "fraser": 16754, "legislative": 16755, "pang": 16756, "module": 16757, "sammy": 16758, "gover": 16759, "earns": 16760, "expedition": 16761, "garh": 16762, "concepts": 16763, "charlie": 16764, "lava": 16765, "bachelor": 16766, "veggies": 16767, "determine": 16768, "ellie": 16769, "unlocked": 16770, "fruit": 16771, "dalla": 16772, "coupe": 16773, "washington": 16774, "deposit": 16775, "ivory": 16776, "paula": 16777, "chicag": 16778, "gucci": 16779, "ðŁİĥ": 16780, "cultiv": 16781, "pierce": 16782, "lifted": 16783, "stumb": 16784, "recover": 16785, "muscles": 16786, "conducting": 16787, "cbs": 16788, "mclaren": 16789, "sophia": 16790, "cellu": 16791, "oceans": 16792, "uploaded": 16793, "gameplay": 16794, "maldives": 16795, "kimber": 16796, "avoi": 16797, "racer": 16798, "caine": 16799, "cavs": 16800, "hana": 16801, "liga": 16802, "raven": 16803, "intervention": 16804, "inauguration": 16805, "ooh": 16806, "attraction": 16807, "merchandise": 16808, "tunein": 16809, "liking": 16810, "juniors": 16811, "intended": 16812, "attacking": 16813, "aquarium": 16814, "iwd": 16815, "components": 16816, "suring": 16817, "centu": 16818, "yogurt": 16819, "ðŁıĥ": 16820, "showroom": 16821, "optical": 16822, "tyour": 16823, "judge": 16824, "yield": 16825, "anto": 16826, "plc": 16827, "transparency": 16828, "recycled": 16829, "chief": 16830, "arom": 16831, "ambassadors": 16832, "planet": 16833, "âĿĦï¸ı": 16834, "omed": 16835, "vanessa": 16836, "court": 16837, "margar": 16838, "haley": 16839, "vr": 16840, "regina": 16841, "pdates": 16842, "hispan": 16843, "livestream": 16844, "âģ£": 16845, "yahoo": 16846, "galla": 16847, "secured": 16848, "wir": 16849, "beneath": 16850, "offl": 16851, "nil": 16852, "amb": 16853, "yeg": 16854, "outlet": 16855, "ute": 16856, "peep": 16857, "lindsay": 16858, "bentley": 16859, "...!": 16860, "heel": 16861, "trilogy": 16862, "vos": 16863, "tyre": 16864, "therefore": 16865, "toronto": 16866, "abi": 16867, "simpli": 16868, "jae": 16869, "extensive": 16870, "elephants": 16871, "sor": 16872, "orientation": 16873, "impeach": 16874, "replay": 16875, "constructed": 16876, "peterson": 16877, "pais": 16878, "ported": 16879, "customs": 16880, "collap": 16881, "adu": 16882, "highlands": 16883, "salem": 16884, "shelby": 16885, "kovic": 16886, "strain": 16887, "rosie": 16888, "senators": 16889, "snaps": 16890, "bobb": 16891, "suzuki": 16892, "blades": 16893, "kp": 16894, "lolo": 16895, "generate": 16896, "sight": 16897, "mae": 16898, "structural": 16899, "predict": 16900, "jumped": 16901, "ahmad": 16902, "sung": 16903, "justice": 16904, "glam": 16905, "volvo": 16906, "jubilee": 16907, "detention": 16908, "losses": 16909, "puri": 16910, "everytime": 16911, "а": 16912, "rao": 16913, "edge": 16914, "limer": 16915, "resemb": 16916, "harold": 16917, "retri": 16918, "sacrific": 16919, "surprises": 16920, "amc": 16921, "srilanka": 16922, "barbie": 16923, "mens": 16924, "finn": 16925, "ags": 16926, "ukrainian": 16927, "embrac": 16928, "îIJ": 16929, "flavors": 16930, "homer": 16931, "laure": 16932, "outh": 16933, "priced": 16934, "verde": 16935, "firm": 16936, "ahs": 16937, "cub": 16938, "trey": 16939, "paranor": 16940, "profit": 16941, "indv": 16942, "whoa": 16943, "harsh": 16944, "alot": 16945, "critics": 16946, "hubby": 16947, "figur": 16948, "gira": 16949, "castro": 16950, "chanel": 16951, "input": 16952, "originals": 16953, "tenant": 16954, "yyyy": 16955, "turers": 16956, "lincoln": 16957, "coon": 16958, "learn": 16959, "chou": 16960, "acare": 16961, "oles": 16962, "diner": 16963, "hyp": 16964, "bizarre": 16965, "mcr": 16966, "letsgo": 16967, "decorating": 16968, "ðŁĮİ": 16969, "alison": 16970, "arvin": 16971, "fd": 16972, "rehab": 16973, "mccarthy": 16974, "lottery": 16975, "dah": 16976, "minneapolis": 16977, "eligible": 16978, "diagnosed": 16979, "emerald": 16980, "destinations": 16981, "sans": 16982, "ory": 16983, "blazers": 16984, "nv": 16985, "bail": 16986, "digitalart": 16987, "noc": 16988, "malta": 16989, "solar": 16990, "pipes": 16991, "allegations": 16992, "nock": 16993, "pope": 16994, "brid": 16995, "premier": 16996, "nx": 16997, "presentations": 16998, "efa": 16999, "bows": 17000, "valve": 17001, "opponent": 17002, "Įë": 17003, "visual": 17004, "ingle": 17005, "categor": 17006, "eter": 17007, "pois": 17008, "dani": 17009, "attract": 17010, "neutral": 17011, "thene": 17012, "crashes": 17013, "freddie": 17014, "utili": 17015, "cst": 17016, "awakening": 17017, "sloven": 17018, "qualify": 17019, "proof": 17020, "fairy": 17021, "lev": 17022, "freight": 17023, "enjoys": 17024, "cupcake": 17025, "flavour": 17026, "âķ": 17027, "protective": 17028, "ðŁijıðŁı»": 17029, "isu": 17030, "admir": 17031, "hmmm": 17032, "continuous": 17033, "aires": 17034, "raptors": 17035, "showcasing": 17036, "yuk": 17037, "paste": 17038, "follower": 17039, "instructions": 17040, "spru": 17041, "@__": 17042, "theo": 17043, "debuts": 17044, "vette": 17045, "stow": 17046, "esof": 17047, "ached": 17048, "sultan": 17049, "sandwich": 17050, "somalia": 17051, "franco": 17052, "carne": 17053, "fluffy": 17054, "alpine": 17055, "jasmine": 17056, "heated": 17057, "violin": 17058, "pless": 17059, "divorce": 17060, "performer": 17061, "phies": 17062, "portsm": 17063, "dara": 17064, "kirby": 17065, "lop": 17066, "chilli": 17067, "forth": 17068, "skype": 17069, "ðŁĩ®ðŁĩ¹": 17070, "celebrities": 17071, "edy": 17072, "vee": 17073, "poison": 17074, "eyel": 17075, "grabs": 17076, "ssic": 17077, "uno": 17078, "western": 17079, "railroad": 17080, "amer": 17081, "numerous": 17082, "sv": 17083, "fow": 17084, "fist": 17085, "âĢĭ": 17086, "requests": 17087, "martial": 17088, "emmy": 17089, "acceptance": 17090, "laura": 17091, "ิ": 17092, "erup": 17093, "hyundai": 17094, "outlander": 17095, "utt": 17096, "wrestle": 17097, "espresso": 17098, "demanding": 17099, "gdp": 17100, "geography": 17101, "saskat": 17102, "troll": 17103, "confeder": 17104, "sues": 17105, "sem": 17106, "bets": 17107, "tful": 17108, "tosh": 17109, "teaches": 17110, "coloured": 17111, "galway": 17112, "macy": 17113, "disorders": 17114, "bbcra": 17115, "atem": 17116, "fender": 17117, "litter": 17118, "esh": 17119, "providers": 17120, "renovation": 17121, "nominate": 17122, "psg": 17123, "nominations": 17124, "jenna": 17125, "sharp": 17126, "someday": 17127, "zur": 17128, "brains": 17129, "cheshire": 17130, "prey": 17131, "hugo": 17132, "¿": 17133, "token": 17134, "rv": 17135, "carr": 17136, "tactical": 17137, "zelda": 17138, "kayla": 17139, "fernando": 17140, "photographers": 17141, "jour": 17142, "umbrella": 17143, "woody": 17144, "congressman": 17145, "dump": 17146, "levy": 17147, "juan": 17148, "dazz": 17149, "signals": 17150, "lain": 17151, "anu": 17152, "michel": 17153, "porch": 17154, "alden": 17155, "siblings": 17156, "yale": 17157, "peel": 17158, "swick": 17159, "ggin": 17160, "llc": 17161, "kale": 17162, "scon": 17163, "ild": 17164, "patreon": 17165, "reel": 17166, "quin": 17167, "witt": 17168, "marty": 17169, "moody": 17170, "toni": 17171, "dery": 17172, "gators": 17173, "specifically": 17174, "ddin": 17175, "lyon": 17176, "trick": 17177, "meadows": 17178, "pj": 17179, "borgh": 17180, "vik": 17181, "tur": 17182, "bronx": 17183, "puff": 17184, "lantern": 17185, "ðŁ¤¦": 17186, "gently": 17187, "bestie": 17188, "fact": 17189, "refused": 17190, "fasci": 17191, "mpy": 17192, "ðŁĶµ": 17193, "crossover": 17194, "meadow": 17195, "indianapolis": 17196, "ducation": 17197, "sley": 17198, "loom": 17199, "mixer": 17200, "newmusic": 17201, "filmmaker": 17202, "prosperity": 17203, "lim": 17204, "weekend": 17205, "creamy": 17206, "neutr": 17207, "luther": 17208, "hv": 17209, "northern": 17210, "two": 17211, "hra": 17212, "catches": 17213, "appearances": 17214, "habit": 17215, "kittens": 17216, "nv": 17217, "illac": 17218, "infan": 17219, "regardless": 17220, "lizard": 17221, "dunk": 17222, "curtain": 17223, "acom": 17224, "intu": 17225, "vez": 17226, "emin": 17227, "flats": 17228, "calendars": 17229, "empower": 17230, "ruined": 17231, "hungary": 17232, "vid": 17233, "wex": 17234, "ulum": 17235, "aberdeen": 17236, "osa": 17237, "kt": 17238, "massi": 17239, "seemed": 17240, "sden": 17241, "'?": 17242, "telephone": 17243, "defi": 17244, "inspires": 17245, "meow": 17246, "zones": 17247, "blind": 17248, "ply": 17249, "tucson": 17250, "adventure": 17251, "ged": 17252, "oyster": 17253, "ðŁijıðŁijıðŁijı": 17254, "output": 17255, "ttt": 17256, "metallic": 17257, "smash": 17258, "ucla": 17259, "scots": 17260, "perfect": 17261, "lucy": 17262, "regularly": 17263, "spic": 17264, "relative": 17265, "athers": 17266, "mise": 17267, "battling": 17268, "decides": 17269, "mata": 17270, "occupied": 17271, "randomly": 17272, "catsoftwitter": 17273, "gian": 17274, "bally": 17275, "alties": 17276, "allies": 17277, "immen": 17278, "syrac": 17279, "ðŁĴľðŁĴľ": 17280, "llan": 17281, "aur": 17282, "kut": 17283, "lamar": 17284, "affects": 17285, "nra": 17286, "starwar": 17287, "ð٤ĺ": 17288, "scram": 17289, "enchan": 17290, "process": 17291, "luxurious": 17292, "array": 17293, "sherlock": 17294, "compati": 17295, "dorf": 17296, "stress": 17297, "msu": 17298, "swith": 17299, "sala": 17300, "sofinstagram": 17301, "foil": 17302, "understood": 17303, "quay": 17304, "rp": 17305, "cade": 17306, "jaw": 17307, "enab": 17308, "encoun": 17309, "ðŁİī:": 17310, "dock": 17311, "saturn": 17312, "mull": 17313, "layout": 17314, "rarely": 17315, "happily": 17316, "fixture": 17317, "orph": 17318, "overlooking": 17319, "herbs": 17320, "mitt": 17321, "pillar": 17322, "nolan": 17323, "petty": 17324, "stry": 17325, "ui": 17326, "muk": 17327, "ores": 17328, "overs": 17329, "áµ": 17330, "recreation": 17331, "wesley": 17332, "rit": 17333, "kejriwal": 17334, "stocking": 17335, "gv": 17336, "subscribers": 17337, "moose": 17338, "mae": 17339, "bert": 17340, "oppre": 17341, "assignment": 17342, "uro": 17343, "highlighting": 17344, "calvin": 17345, "weigh": 17346, "cambodia": 17347, "avon": 17348, "kem": 17349, "disabilities": 17350, "ready": 17351, "chargers": 17352, "pads": 17353, "izing": 17354, "illian": 17355, "truste": 17356, "colleges": 17357, "associates": 17358, "albany": 17359, "milton": 17360, "cron": 17361, "bur": 17362, "hardly": 17363, "sights": 17364, "antiques": 17365, "echo": 17366, "surprisingly": 17367, "haiti": 17368, "capt": 17369, "php": 17370, "opio": 17371, "inequality": 17372, "equal": 17373, "keny": 17374, "schmid": 17375, "autographs": 17376, "rent": 17377, "quer": 17378, "citrus": 17379, "challenged": 17380, "tec": 17381, "epide": 17382, "fest": 17383, "zhou": 17384, "lime": 17385, "citizenship": 17386, "crystal": 17387, "convinced": 17388, "messenger": 17389, "copenhagen": 17390, "âĿĹï¸ı": 17391, "warran": 17392, "developments": 17393, "ï¸ıâĥ£": 17394, "forex": 17395, "hiro": 17396, "sneakers": 17397, "xide": 17398, "viva": 17399, "stereo": 17400, "batting": 17401, "ssel": 17402, "host": 17403, "bengal": 17404, "criticism": 17405, "qc": 17406, "crun": 17407, "attempted": 17408, "rye": 17409, "determination": 17410, "creations": 17411, "dread": 17412, "labels": 17413, "posse": 17414, "ancer": 17415, "johan": 17416, "sister": 17417, "partnerships": 17418, "lesbian": 17419, "kst": 17420, "guarantee": 17421, "baro": 17422, "fixing": 17423, "mason": 17424, "mous": 17425, "chemicals": 17426, "tless": 17427, "biodiversity": 17428, "paro": 17429, "bharat": 17430, "acol": 17431, "refuge": 17432, "ente": 17433, "titi": 17434, "dyssey": 17435, "responds": 17436, "lefto": 17437, "iner": 17438, "sevel": 17439, "rahul": 17440, "oline": 17441, "frankfur": 17442, "choreo": 17443, "enjoyable": 17444, "cto": 17445, "struggles": 17446, "woodland": 17447, "heavyweight": 17448, "gens": 17449, "recep": 17450, "accred": 17451, "ðŁĺ¡": 17452, "transformed": 17453, "listen": 17454, "atop": 17455, "nk": 17456, "surge": 17457, "bere": 17458, "governor": 17459, "prisoners": 17460, "claude": 17461, "till": 17462, "mulator": 17463, "emotion": 17464, "waterloo": 17465, "start": 17466, "ðŁĩº": 17467, "cleaned": 17468, "grandmother": 17469, "fearless": 17470, "african": 17471, "astronomy": 17472, "ðŁıģ": 17473, "à¸Ļ": 17474, "theworld": 17475, "suitable": 17476, "anthony": 17477, "kand": 17478, "tten": 17479, "meaningful": 17480, "disclo": 17481, "jacobs": 17482, "ø": 17483, "tomlinson": 17484, "ghetti": 17485, "typho": 17486, "substan": 17487, "asco": 17488, "tek": 17489, "nagar": 17490, "mud": 17491, "amon": 17492, "vaccine": 17493, "fty": 17494, "flesh": 17495, "noel": 17496, "inflation": 17497, "portugue": 17498, "glamour": 17499, "tram": 17500, "vre": 17501, "tequ": 17502, "roundup": 17503, "wyn": 17504, "rejected": 17505, "mosaic": 17506, "sighting": 17507, "calf": 17508, "ota": 17509, "composition": 17510, "gopro": 17511, "gonzale": 17512, "eed": 17513, "bard": 17514, "tue": 17515, "effectively": 17516, "ween": 17517, "alto": 17518, "ribs": 17519, "relate": 17520, "thirsty": 17521, "furious": 17522, "dim": 17523, "chard": 17524, "perfume": 17525, "sny": 17526, "churchill": 17527, "kof": 17528, "masterclass": 17529, "wave": 17530, "ðŁĶµ": 17531, "erin": 17532, "owns": 17533, "tobe": 17534, "skilled": 17535, "tem": 17536, "gof": 17537, "eni": 17538, "tori": 17539, "crazy": 17540, "lick": 17541, "resistant": 17542, "icial": 17543, "agar": 17544, "!:": 17545, "gali": 17546, "delaware": 17547, "blitz": 17548, "kohli": 17549, "puck": 17550, "availability": 17551, "himalay": 17552, "influential": 17553, "crochet": 17554, "victori": 17555, "reading": 17556, "hobby": 17557, "viet": 17558, "jas": 17559, "engra": 17560, "skul": 17561, "ðŁĩ²ðŁĩ": 17562, "educate": 17563, "techno": 17564, "districts": 17565, "blues": 17566, "sett": 17567, "seventh": 17568, "learns": 17569, "eeee": 17570, "apocalypse": 17571, "hangout": 17572, "cruel": 17573, "mutu": 17574, "bruh": 17575, "helen": 17576, "sheer": 17577, "ction": 17578, "klein": 17579, "texans": 17580, "cereal": 17581, "shine": 17582, "nered": 17583, "gras": 17584, "ambro": 17585, "fella": 17586, "hindu": 17587, "matthew": 17588, "lima": 17589, "miranda": 17590, "jewel": 17591, "soho": 17592, "eurovision": 17593, "neighbours": 17594, "chandler": 17595, "besides": 17596, "ðŁ¥°": 17597, "astros": 17598, "thumbs": 17599, "renault": 17600, "rave": 17601, "hired": 17602, "ðŁĸ¤": 17603, "itary": 17604, "zor": 17605, "blazer": 17606, "kine": 17607, "eau": 17608, "katy": 17609, "dccomics": 17610, "pec": 17611, "rodgers": 17612, "waterproof": 17613, "killers": 17614, "superint": 17615, "preserv": 17616, "asso": 17617, "brewers": 17618, "promotional": 17619, "scam": 17620, "villages": 17621, "sketches": 17622, "juicy": 17623, "forlife": 17624, "audit": 17625, "solo": 17626, "fundamental": 17627, "lene": 17628, "philippine": 17629, "tend": 17630, "conservatives": 17631, "sponsorship": 17632, "ddle": 17633, "aine": 17634, "htc": 17635, "osi": 17636, "hulk": 17637, "waf": 17638, "à¸Ļ": 17639, "evaluation": 17640, "antine": 17641, "slee": 17642, "robertson": 17643, "roosevel": 17644, "agi": 17645, "sophistic": 17646, "employers": 17647, "bubbles": 17648, "kowski": 17649, "interaction": 17650, "shu": 17651, "boule": 17652, "ican": 17653, "jare": 17654, "hank": 17655, "legitim": 17656, "knicks": 17657, "karma": 17658, "receiver": 17659, "perks": 17660, "uh": 17661, "stair": 17662, "suni": 17663, "laboratory": 17664, "graves": 17665, "vocals": 17666, "oot": 17667, "cture": 17668, "thrive": 17669, "tico": 17670, "ãĥ³": 17671, "bw": 17672, "cartoons": 17673, "mcdonalds": 17674, "draw": 17675, "yung": 17676, "pler": 17677, "lid": 17678, "ethical": 17679, "groove": 17680, "enta": 17681, "internationalwomensday": 17682, "patron": 17683, "worries": 17684, "ðŁİħ": 17685, "ðŁijĭ": 17686, "katherine": 17687, "diaz": 17688, "tori": 17689, "bachchan": 17690, "trust": 17691, "mineral": 17692, "icom": 17693, "builders": 17694, "born": 17695, "coloring": 17696, "latte": 17697, "case": 17698, "revolution": 17699, "trader": 17700, "oxid": 17701, "chipot": 17702, "instantly": 17703, "southern": 17704, "sehun": 17705, "prob": 17706, "hernandez": 17707, "lisbon": 17708, "huawe": 17709, "pong": 17710, "mea": 17711, "rooney": 17712, "wheelchair": 17713, "keen": 17714, "bett": 17715, "corin": 17716, "regulatory": 17717, "displac": 17718, "karen": 17719, "schem": 17720, "sunsets": 17721, "whales": 17722, "reminis": 17723, "hep": 17724, "hide": 17725, "marcel": 17726, "pandora": 17727, "doyle": 17728, "thfc": 17729, "otto": 17730, "nokia": 17731, "transgender": 17732, "kov": 17733, "hawaiian": 17734, "shave": 17735, "sovere": 17736, "excer": 17737, "nicki": 17738, "pug": 17739, "stor": 17740, "roth": 17741, "weet": 17742, "legal": 17743, "dignity": 17744, "pow": 17745, "homage": 17746, "ðŁĩ³ðŁĩ": 17747, "sre": 17748, "canon": 17749, "lax": 17750, "woah": 17751, "quartz": 17752, "ña": 17753, "greeting": 17754, "flickr": 17755, "nairobi": 17756, "advocates": 17757, "anc": 17758, "vii": 17759, "eugene": 17760, "thra": 17761, "cre": 17762, "elan": 17763, "pension": 17764, "thletics": 17765, "toni": 17766, "reagan": 17767, "xv": 17768, "store": 17769, "bench": 17770, "harlem": 17771, "toddler": 17772, "sentenced": 17773, "âĻ¥ï¸ı": 17774, "globally": 17775, "cheaper": 17776, "uf": 17777, "mam": 17778, "nico": 17779, "iku": 17780, "thou": 17781, "nist": 17782, "dami": 17783, "thala": 17784, "rhodes": 17785, "sale": 17786, "bowls": 17787, "âĪ": 17788, "lasvegas": 17789, "sanctions": 17790, "admire": 17791, "matched": 17792, "unable": 17793, "traveler": 17794, "eleven": 17795, "strawberries": 17796, "âĢĶâĢĶâĢĶâĢĶ": 17797, "studio": 17798, "jacques": 17799, "ims": 17800, "valued": 17801, "sno": 17802, "cheesecake": 17803, "nxt": 17804, "eos": 17805, "sx": 17806, "fx": 17807, "tonic": 17808, "hatch": 17809, "chicks": 17810, "grads": 17811, "handic": 17812, "rory": 17813, "asp": 17814, "ripped": 17815, "dentist": 17816, "nen": 17817, "lufc": 17818, "âľĬ": 17819, "dige": 17820, "hopkins": 17821, "sherman": 17822, "fda": 17823, "forall": 17824, "ashley": 17825, "strand": 17826, "hy": 17827, "liquor": 17828, "buffet": 17829, "essence": 17830, "pharma": 17831, "suriya": 17832, "ðŁĴĻðŁĴĻ": 17833, "festivals": 17834, "zan": 17835, "refresh": 17836, "purple": 17837, "uniforms": 17838, "kenneth": 17839, "=)": 17840, "asan": 17841, "helsin": 17842, "transformers": 17843, "kali": 17844, "personalized": 17845, "chalk": 17846, "bobby": 17847, "âĮ": 17848, "themes": 17849, "departure": 17850, "print": 17851, "illustrations": 17852, "quiet": 17853, "agrees": 17854, "griff": 17855, "س": 17856, "miti": 17857, "together": 17858, "convenience": 17859, "abar": 17860, "carlo": 17861, "turtles": 17862, "infosec": 17863, "somewhat": 17864, "arlington": 17865, "scholarships": 17866, "emirates": 17867, "mums": 17868, "stella": 17869, "autonom": 17870, "feather": 17871, "gore": 17872, "nominees": 17873, "fragrance": 17874, "ÑĤ": 17875, "wong": 17876, "theastern": 17877, "gre": 17878, "zilla": 17879, "isi": 17880, "bumper": 17881, "goo": 17882, "dozens": 17883, "abduc": 17884, "âļªï¸ı": 17885, "oils": 17886, "donors": 17887, "silicon": 17888, "ipod": 17889, "fortnite": 17890, "ðŁĴ¨": 17891, "toro": 17892, "sparkling": 17893, "consciousness": 17894, "pala": 17895, "num": 17896, "mounted": 17897, "ffins": 17898, "thieves": 17899, "teammate": 17900, "prab": 17901, "omer": 17902, "tapes": 17903, "bod": 17904, "mitsu": 17905, "stew": 17906, "ere": 17907, "pbs": 17908, "tusc": 17909, "lowe": 17910, "rade": 17911, "parliamentary": 17912, "hm": 17913, "edgar": 17914, "ðŁijĩðŁijĩ": 17915, "toa": 17916, "agh": 17917, "honi": 17918, "slate": 17919, "geek": 17920, "apt": 17921, "hardt": 17922, "tap": 17923, "horizon": 17924, "growth": 17925, "makeover": 17926, "hil": 17927, "paperback": 17928, "idan": 17929, "rehabil": 17930, "giu": 17931, "possibilities": 17932, "lettu": 17933, "franco": 17934, "boss": 17935, "acher": 17936, "doesnt": 17937, "moe": 17938, "taker": 17939, "hussain": 17940, "mlk": 17941, "dil": 17942, "thia": 17943, "hama": 17944, "realised": 17945, "ravens": 17946, "curriculum": 17947, "mith": 17948, "knight": 17949, "tedx": 17950, "rv": 17951, "isaiah": 17952, "cumbria": 17953, "birthdays": 17954, "fing": 17955, "prez": 17956, "mubarak": 17957, "exquisite": 17958, "clearance": 17959, "yen": 17960, "pari": 17961, "evo": 17962, "ú": 17963, "modified": 17964, "applying": 17965, "implement": 17966, "discovering": 17967, "chapman": 17968, "indiegame": 17969, "disk": 17970, "crowdfunding": 17971, "machin": 17972, "livel": 17973, "styled": 17974, "âĿĮ": 17975, "making": 17976, "rehearsals": 17977, "nutriti": 17978, "subscription": 17979, "andro": 17980, "creators": 17981, "carries": 17982, "kylie": 17983, "camden": 17984, "apprentice": 17985, "taxpay": 17986, "cca": 17987, "tuesdaythoughts": 17988, "pissed": 17989, "erman": 17990, "detec": 17991, "freedom": 17992, "meri": 17993, "..!": 17994, "psalm": 17995, "sunlight": 17996, "perspec": 17997, "beings": 17998, "bookstore": 17999, "rockstar": 18000, "functions": 18001, "pence": 18002, "faves": 18003, "zn": 18004, "obamacare": 18005, "spill": 18006, "coventry": 18007, "pigeon": 18008, "pivo": 18009, "bait": 18010, "kolkata": 18011, "aval": 18012, "donor": 18013, "wah": 18014, "privileg": 18015, "traditions": 18016, "rajasthan": 18017, "teness": 18018, "portuguese": 18019, "ynes": 18020, "tackles": 18021, "defic": 18022, "torn": 18023, "polling": 18024, "thorne": 18025, "ina": 18026, "benedict": 18027, "barry": 18028, "calories": 18029, "verdict": 18030, "savethe": 18031, "norton": 18032, "office": 18033, "mainstream": 18034, "improves": 18035, "fron": 18036, "responding": 18037, "realtor": 18038, "scottish": 18039, "declar": 18040, "rl": 18041, "shiv": 18042, "supplier": 18043, "resting": 18044, "sweets": 18045, "qui": 18046, ".â̦": 18047, "whitney": 18048, "startup": 18049, "thankyou": 18050, "teacher": 18051, "halls": 18052, "have": 18053, "handmade": 18054, "proving": 18055, "quartet": 18056, "rochester": 18057, "lian": 18058, "virtual": 18059, "mendes": 18060, "oficial": 18061, "midlands": 18062, "xbox": 18063, "measuring": 18064, "ovo": 18065, "accommodation": 18066, "brides": 18067, "collegiate": 18068, "intellectual": 18069, "incar": 18070, "niag": 18071, "ðŁį·": 18072, "sfw": 18073, "cocoa": 18074, "coats": 18075, "civilians": 18076, "presidency": 18077, "matrix": 18078, "sweetheart": 18079, "triathlon": 18080, "wagner": 18081, "radic": 18082, "planner": 18083, "theo": 18084, "execution": 18085, "kum": 18086, "thewalkingdead": 18087, "scar": 18088, "rotation": 18089, "blogging": 18090, "bomb": 18091, "reson": 18092, "bbles": 18093, "stare": 18094, "assisted": 18095, "edo": 18096, "branded": 18097, "warnings": 18098, "thorpe": 18099, "acknowle": 18100, "satisfied": 18101, "shores": 18102, "rid": 18103, "dora": 18104, "physically": 18105, "bigh": 18106, "approves": 18107, "hah": 18108, "rical": 18109, "versatile": 18110, "pretend": 18111, "lum": 18112, "abhi": 18113, "yee": 18114, "spit": 18115, "ãĢĮ": 18116, "djs": 18117, "ashtra": 18118, "jt": 18119, "venues": 18120, "grammys": 18121, "cyclo": 18122, "tracker": 18123, "overwatch": 18124, "replica": 18125, "elyn": 18126, "nrl": 18127, "lindsey": 18128, "homo": 18129, "balloons": 18130, "kitchen": 18131, "sis": 18132, "amos": 18133, "endeav": 18134, "ðŁĴ»": 18135, "arec": 18136, "thug": 18137, "hooked": 18138, "hrc": 18139, "newyork": 18140, "burgh": 18141, "americas": 18142, "patricia": 18143, "ugu": 18144, "apathy": 18145, "hast": 18146, "psychi": 18147, "cork": 18148, "petrol": 18149, "ðŁİ¬": 18150, "aku": 18151, "popping": 18152, "psychological": 18153, "aux": 18154, "gma": 18155, "cadillac": 18156, "waste": 18157, "authent": 18158, "bristol": 18159, "name": 18160, "queer": 18161, "tober": 18162, "jerry": 18163, "comin": 18164, "chant": 18165, "privileged": 18166, "opar": 18167, "loser": 18168, "text": 18169, "marker": 18170, "stries": 18171, "equally": 18172, "aki": 18173, "christmas": 18174, "gareth": 18175, "blew": 18176, "emma": 18177, "imagin": 18178, "seals": 18179, "cheat": 18180, "conditioning": 18181, "jana": 18182, "rens": 18183, "daries": 18184, "oasis": 18185, "discounts": 18186, "council": 18187, "ika": 18188, "shirley": 18189, "voucher": 18190, "alps": 18191, "wx": 18192, "qr": 18193, "drift": 18194, "attempting": 18195, "utc": 18196, "ت": 18197, "gonzalez": 18198, "mf": 18199, "joker": 18200, "parallel": 18201, "pare": 18202, "aspects": 18203, "procedu": 18204, "np": 18205, "ama": 18206, "raleigh": 18207, "brighten": 18208, "guire": 18209, "radiation": 18210, "crescent": 18211, "hob": 18212, "ille": 18213, "strand": 18214, "vore": 18215, "nard": 18216, "chest": 18217, "diwali": 18218, "avatar": 18219, "alder": 18220, "dling": 18221, "pathetic": 18222, "ðŁĴĺ": 18223, "spirit": 18224, "jorge": 18225, "filmmaking": 18226, "ðŁĻıðŁĻı": 18227, "challenger": 18228, "bj": 18229, "downtown": 18230, "html": 18231, "adequ": 18232, "twisted": 18233, "inely": 18234, "('": 18235, "wraps": 18236, "operational": 18237, "yne": 18238, "nus": 18239, "magnet": 18240, "marketplace": 18241, "healthier": 18242, "snapshot": 18243, "damon": 18244, "interven": 18245, "federer": 18246, "owls": 18247, "biscuits": 18248, "jp": 18249, "rodeo": 18250, "blueberry": 18251, "lection": 18252, "frontier": 18253, "summers": 18254, "reyes": 18255, "pedestrian": 18256, "gol": 18257, "caffe": 18258, "refurbi": 18259, "boulder": 18260, "meghan": 18261, "specialty": 18262, "lass": 18263, "ei": 18264, "suspects": 18265, "approx": 18266, "rrr": 18267, "rath": 18268, "stim": 18269, "crushed": 18270, "hed": 18271, "whun": 18272, "loaf": 18273, "crore": 18274, "rivera": 18275, "genetics": 18276, "sock": 18277, "wasted": 18278, "nypd": 18279, "answering": 18280, "dove": 18281, "bella": 18282, "olin": 18283, "dun": 18284, "fiji": 18285, "pretty": 18286, "sparkle": 18287, "yun": 18288, "jd": 18289, "europa": 18290, "lifts": 18291, "amber": 18292, "mur": 18293, "tek": 18294, "boyd": 18295, "royalty": 18296, "indo": 18297, "rib": 18298, "gotham": 18299, "tiest": 18300, "installing": 18301, "kemp": 18302, "thephoto": 18303, "cosmic": 18304, ")))": 18305, "wholesale": 18306, "loyment": 18307, "easy": 18308, "suing": 18309, "settled": 18310, "afp": 18311, "prover": 18312, "supportive": 18313, "rees": 18314, "neath": 18315, "deliber": 18316, "cé": 18317, "welcome": 18318, "picoftheday": 18319, "newborn": 18320, "patty": 18321, "suns": 18322, "siest": 18323, "flint": 18324, "differently": 18325, "spoilers": 18326, "trooper": 18327, "gins": 18328, "cory": 18329, "lookout": 18330, "equipped": 18331, "tape": 18332, "toby": 18333, "researcher": 18334, "ush": 18335, "keyes": 18336, "alma": 18337, "induction": 18338, "kw": 18339, "khar": 18340, "slick": 18341, "bride": 18342, "eur": 18343, "craving": 18344, "bookings": 18345, "ches": 18346, "trunk": 18347, "vernon": 18348, "spher": 18349, "crystals": 18350, "relatively": 18351, "pompe": 18352, "unions": 18353, "valley": 18354, "para": 18355, "want": 18356, "okc": 18357, "deaf": 18358, "sergio": 18359, "lennon": 18360, "shay": 18361, "cra": 18362, "vat": 18363, "hee": 18364, "twe": 18365, "liquid": 18366, "poly": 18367, "ðŁİģ": 18368, "bent": 18369, "bearing": 18370, "motorsport": 18371, "barbe": 18372, "testi": 18373, "hani": 18374, "financing": 18375, "astronaut": 18376, "watercolour": 18377, "rish": 18378, "comiccon": 18379, "gart": 18380, "wrong": 18381, "bern": 18382, "itan": 18383, "stepped": 18384, "filters": 18385, "clow": 18386, "mex": 18387, "demons": 18388, "allo": 18389, "expanded": 18390, "command": 18391, "eters": 18392, "goats": 18393, "siri": 18394, "yr": 18395, "pottery": 18396, "marion": 18397, "ile": 18398, "elan": 18399, "santo": 18400, "persona": 18401, "duke": 18402, "homeless": 18403, "lighted": 18404, "wheeler": 18405, "changer": 18406, "cabbage": 18407, "surreal": 18408, "hamburg": 18409, "smashed": 18410, "stran": 18411, "knot": 18412, "iart": 18413, "obi": 18414, "bedro": 18415, "dial": 18416, "thick": 18417, "bingo": 18418, "fus": 18419, "vacuum": 18420, "conve": 18421, "ative": 18422, "accuracy": 18423, "account": 18424, "refer": 18425, "riz": 18426, "spiderman": 18427, "bana": 18428, "rite": 18429, "ub": 18430, "abs": 18431, "medical": 18432, "link": 18433, "siem": 18434, ">>>>": 18435, "betra": 18436, "glowing": 18437, "reactions": 18438, "puppet": 18439, "spaghetti": 18440, "angs": 18441, "remedi": 18442, "prayfor": 18443, "royce": 18444, "charlotte": 18445, "£ï¸ı": 18446, "ghet": 18447, "affecting": 18448, "rode": 18449, "socialist": 18450, "moses": 18451, "azi": 18452, "oit": 18453, "reporters": 18454, "cdt": 18455, "aping": 18456, "snat": 18457, "minimal": 18458, "waist": 18459, "siege": 18460, ">>>>": 18461, "rig": 18462, "schmidt": 18463, "hare": 18464, "eca": 18465, "thorn": 18466, "hemp": 18467, "esthe": 18468, "clyde": 18469, "tha": 18470, "donut": 18471, "mohamed": 18472, "lingerie": 18473, "legg": 18474, "carpenter": 18475, "performers": 18476, "dea": 18477, "imagined": 18478, "curse": 18479, "lash": 18480, "ctr": 18481, "agua": 18482, "roar": 18483, "gri": 18484, "role": 18485, "jfk": 18486, "resurrec": 18487, "roosevelt": 18488, "marilyn": 18489, "smalle": 18490, "willis": 18491, "waited": 18492, "charities": 18493, "theres": 18494, "lik": 18495, "original": 18496, "cari": 18497, "cough": 18498, "cruci": 18499, "lagun": 18500, "contrast": 18501, "kou": 18502, "armour": 18503, "removing": 18504, "tent": 18505, "mazda": 18506, "brighter": 18507, "thief": 18508, "corner": 18509, "tequila": 18510, "buzzing": 18511, "albi": 18512, "pam": 18513, "azure": 18514, "discoun": 18515, "pixelart": 18516, "possibility": 18517, "hamont": 18518, "trades": 18519, "buda": 18520, "hive": 18521, "versy": 18522, "finch": 18523, "transpa": 18524, "emi": 18525, "terrifying": 18526, "inqui": 18527, "gba": 18528, "substitu": 18529, "collecti": 18530, "placing": 18531, "cindy": 18532, "kann": 18533, "patho": 18534, "diamond": 18535, "mourinho": 18536, "guinea": 18537, "anthropo": 18538, "airs": 18539, "pumps": 18540, "ìļ": 18541, "paso": 18542, "curling": 18543, "anita": 18544, "residency": 18545, "newh": 18546, "joon": 18547, "cigarette": 18548, "queue": 18549, "extrac": 18550, "games": 18551, "splen": 18552, "express": 18553, "publicly": 18554, "bonnie": 18555, "tribune": 18556, "baek": 18557, "reasonable": 18558, "cor": 18559, "timothy": 18560, "sheeran": 18561, "ı": 18562, "fdn": 18563, "sutton": 18564, "concentration": 18565, "caravan": 18566, "xavier": 18567, "alger": 18568, "cylin": 18569, "frederick": 18570, "nerve": 18571, "peak": 18572, "lettuce": 18573, "jail": 18574, "pregame": 18575, "kavan": 18576, "upgraded": 18577, "ecology": 18578, "squadron": 18579, "grapes": 18580, "goog": 18581, "pastry": 18582, "ðŁĹ£": 18583, "ãĥ¼ãĥ": 18584, "milano": 18585, "awaz": 18586, "presenter": 18587, "ðŁĮ¿": 18588, "herd": 18589, "kings": 18590, "template": 18591, "flour": 18592, "hv": 18593, "kley": 18594, "iya": 18595, "spec": 18596, "ater": 18597, "frankfurt": 18598, "coch": 18599, "texting": 18600, "deli": 18601, "communist": 18602, "regiment": 18603, "eleanor": 18604, "anticipated": 18605, "ðŁijĮðŁı»": 18606, "thephotohour": 18607, "rano": 18608, "surviving": 18609, "simulation": 18610, "dawson": 18611, "arin": 18612, "aqua": 18613, "mor": 18614, "â̦.": 18615, "cino": 18616, "iraqi": 18617, "shaz": 18618, "dundee": 18619, "wes": 18620, "drau": 18621, "hannah": 18622, "snews": 18623, "occupation": 18624, "steen": 18625, "xm": 18626, "angles": 18627, "settings": 18628, "guru": 18629, "knox": 18630, "orca": 18631, "shaping": 18632, "went": 18633, "drilling": 18634, "zzie": 18635, "bri": 18636, "kissing": 18637, "find": 18638, "maine": 18639, "âŃIJï¸ıâŃIJï¸ı": 18640, "ðŁĮį": 18641, "larry": 18642, "busted": 18643, "tavern": 18644, "actively": 18645, "-\"": 18646, "replacing": 18647, "nod": 18648, "unlock": 18649, ".\"": 18650, "âŀ¤": 18651, "affiliate": 18652, "tow": 18653, "ln": 18654, "happynewyear": 18655, "dif": 18656, "jm": 18657, "greenwich": 18658, "controversy": 18659, "dawg": 18660, "condol": 18661, "savannah": 18662, "compensation": 18663, "touchdown": 18664, "teo": 18665, "ambitious": 18666, "embroi": 18667, "convicted": 18668, "iartg": 18669, "barack": 18670, "trance": 18671, "testimony": 18672, "audition": 18673, "thumb": 18674, "myths": 18675, "bex": 18676, "quez": 18677, "orchid": 18678, "deny": 18679, "entitled": 18680, "hood": 18681, "grant": 18682, "inbox": 18683, "bluejays": 18684, "rilla": 18685, "smallest": 18686, "burden": 18687, "infamous": 18688, "divided": 18689, "boundaries": 18690, "tter": 18691, "elt": 18692, "wyoming": 18693, "beverage": 18694, "mesm": 18695, "onews": 18696, "buddhist": 18697, "yana": 18698, "assad": 18699, "isms": 18700, "barrett": 18701, "predicted": 18702, "backto": 18703, "twit": 18704, "ethere": 18705, "captains": 18706, "escaped": 18707, "ayo": 18708, "lamborgh": 18709, "gardner": 18710, "laps": 18711, "kal": 18712, "advertisement": 18713, "insects": 18714, "napo": 18715, "amen": 18716, "acy": 18717, "rand": 18718, "gk": 18719, "teh": 18720, "kathle": 18721, "tridge": 18722, "pancake": 18723, "atro": 18724, "pyramid": 18725, "bula": 18726, "paralym": 18727, "gauge": 18728, "encies": 18729, "tomy": 18730, "biscuit": 18731, "butcher": 18732, "qualifier": 18733, "county": 18734, "kei": 18735, "pools": 18736, "darker": 18737, "shoulders": 18738, "ðŁĩºðŁĩ¸ðŁĩºðŁĩ¸": 18739, "spre": 18740, "(\"": 18741, "writers": 18742, "gm": 18743, "ðŁİĵ": 18744, "knit": 18745, "huff": 18746, "mtb": 18747, "phillies": 18748, "ost": 18749, "denis": 18750, "gart": 18751, "licensed": 18752, "interface": 18753, "excel": 18754, "dwell": 18755, "fromthe": 18756, "cofficial": 18757, "azzi": 18758, "appearing": 18759, "forest": 18760, "nana": 18761, "keith": 18762, "manufacturers": 18763, "beckham": 18764, ")?": 18765, "ese": 18766, "colony": 18767, "delicate": 18768, "utter": 18769, "mcin": 18770, "transplant": 18771, "preferred": 18772, "pard": 18773, "arie": 18774, "hub": 18775, "pods": 18776, "perspectives": 18777, "pict": 18778, "delu": 18779, "apper": 18780, "bethan": 18781, "pmo": 18782, "criminals": 18783, "feminism": 18784, "shack": 18785, "circumstances": 18786, "fellas": 18787, "protesting": 18788, "wax": 18789, "suggested": 18790, "tator": 18791, "drew": 18792, "omni": 18793, "fake": 18794, "kathy": 18795, "reb": 18796, "deline": 18797, "berni": 18798, "misty": 18799, "ðŁij©": 18800, "erable": 18801, "breakthrough": 18802, "menswear": 18803, "millennials": 18804, "chanyeol": 18805, "laz": 18806, "insert": 18807, "replies": 18808, "phrase": 18809, "nx": 18810, "iheartawards": 18811, "audrey": 18812, "granite": 18813, "racec": 18814, "orie": 18815, "terra": 18816, "innovations": 18817, "brittany": 18818, "ateral": 18819, "pear": 18820, "biological": 18821, "shments": 18822, "institution": 18823, "msn": 18824, "frequency": 18825, "dman": 18826, "neglec": 18827, "tf": 18828, "stefan": 18829, "foxnews": 18830, "typo": 18831, "comms": 18832, "sequence": 18833, "carmen": 18834, "whites": 18835, "economist": 18836, "exeter": 18837, "seum": 18838, "resorts": 18839, "casually": 18840, "bunde": 18841, "divide": 18842, "ع": 18843, "gag": 18844, "creed": 18845, "retire": 18846, "caucus": 18847, "rapids": 18848, "wrestlemania": 18849, "tulsa": 18850, "sunderland": 18851, "fundament": 18852, "odi": 18853, "yamaha": 18854, "vary": 18855, "intrigu": 18856, "else": 18857, "beacon": 18858, "angie": 18859, "traded": 18860, "transm": 18861, "gents": 18862, "knitting": 18863, "galac": 18864, "ðĿĹ": 18865, "uto": 18866, "seaside": 18867, "holt": 18868, "rers": 18869, "fargo": 18870, "trainers": 18871, "monsoon": 18872, "bale": 18873, "sought": 18874, "maddie": 18875, "hw": 18876, "coli": 18877, "fran": 18878, "favs": 18879, "ðŁĴĶ": 18880, "intent": 18881, "rally": 18882, "sbs": 18883, "lemonade": 18884, "barackobama": 18885, "bread": 18886, "sticky": 18887, "explosive": 18888, "chelten": 18889, "tj": 18890, "assoc": 18891, "ramen": 18892, "homies": 18893, "vlog": 18894, "mister": 18895, "lord": 18896, "âĢįâĻĢï¸ı": 18897, "alyssa": 18898, "sketchbook": 18899, "rumble": 18900, "catch": 18901, "migrant": 18902, "discipline": 18903, "unlikely": 18904, "chronicles": 18905, "flora": 18906, "slams": 18907, "amid": 18908, "sboro": 18909, "coop": 18910, "jumps": 18911, "tranqu": 18912, "melis": 18913, "sofia": 18914, "enri": 18915, "gabe": 18916, "syri": 18917, "nicolas": 18918, "chai": 18919, "wv": 18920, "becky": 18921, "footy": 18922, "tao": 18923, "suppose": 18924, "ðŁĺįðŁĺįðŁĺįðŁĺį": 18925, "plush": 18926, "rish": 18927, "ð٤ĵ": 18928, "kha": 18929, "saturdays": 18930, "accent": 18931, "hec": 18932, "limit": 18933, "carlton": 18934, "wired": 18935, "taylorswift": 18936, "ðŁĺij": 18937, "sql": 18938, "harro": 18939, "recipients": 18940, "gat": 18941, "gop": 18942, "thof": 18943, "amazed": 18944, "ghan": 18945, "ðŁıĨðŁıĨ": 18946, "porto": 18947, "clare": 18948, "distant": 18949, "nac": 18950, "ohio": 18951, "ðŁĻıðŁı¼": 18952, "mtn": 18953, "antibio": 18954, "dinosa": 18955, "mesa": 18956, "partial": 18957, "bv": 18958, "learnt": 18959, "lovato": 18960, "question": 18961, "extract": 18962, "gossip": 18963, "gibb": 18964, "niagara": 18965, "ðŁij¨": 18966, "displayed": 18967, "sooner": 18968, "stevie": 18969, "nuggets": 18970, "mln": 18971, "brom": 18972, "turb": 18973, "giveaways": 18974, "stupi": 18975, "blink": 18976, "cili": 18977, "convenient": 18978, "moh": 18979, "vive": 18980, "fric": 18981, "cause": 18982, "chamber": 18983, "cules": 18984, "nearest": 18985, "isse": 18986, "smallbiz": 18987, "tj": 18988, "canadians": 18989, "smarter": 18990, "brasil": 18991, "rare": 18992, "quette": 18993, "wha": 18994, "candle": 18995, "atomic": 18996, "ðŁijįðŁijį": 18997, "warrior": 18998, "relaxed": 18999, "strips": 19000, "neur": 19001, "kka": 19002, "rfc": 19003, "jensen": 19004, "recovering": 19005, "responses": 19006, "salam": 19007, "orthodox": 19008, "active": 19009, "ellers": 19010, "nit": 19011, "âŃIJ": 19012, "metropolitan": 19013, "centuries": 19014, "vida": 19015, "grading": 19016, "transparent": 19017, "simple": 19018, "dots": 19019, "superintendent": 19020, "elevator": 19021, "automated": 19022, "redskins": 19023, "imam": 19024, "summertime": 19025, "jonathan": 19026, "gearing": 19027, "michelle": 19028, "conflic": 19029, "mice": 19030, "tote": 19031, "publish": 19032, "pax": 19033, ")-": 19034, "nailed": 19035, "á´": 19036, "telescope": 19037, "serbia": 19038, "bab": 19039, "apeu": 19040, "stically": 19041, "senti": 19042, "rats": 19043, "isolated": 19044, "group": 19045, "hatred": 19046, "paranormal": 19047, "stanley": 19048, "alion": 19049, "safety": 19050, "ls": 19051, "र": 19052, "nexus": 19053, "alexandra": 19054, "masks": 19055, "++": 19056, "tron": 19057, "auk": 19058, "brotherhood": 19059, "browse": 19060, "mixes": 19061, "simone": 19062, "musk": 19063, "approve": 19064, "lola": 19065, "exp": 19066, "perth": 19067, "futuri": 19068, "unseen": 19069, "dm": 19070, "chelse": 19071, "scouting": 19072, "owe": 19073, "portsmouth": 19074, "kram": 19075, "mize": 19076, "dispen": 19077, "sup": 19078, "dlc": 19079, "advert": 19080, "teresa": 19081, "isle": 19082, "cycle": 19083, "metall": 19084, "shields": 19085, "mariners": 19086, "raz": 19087, "ingen": 19088, "fund": 19089, "ango": 19090, "jones": 19091, "oka": 19092, "madden": 19093, "broccoli": 19094, "dominic": 19095, "situations": 19096, "mero": 19097, "cricke": 19098, "punishment": 19099, "db": 19100, "shaking": 19101, "ðŁĺļ": 19102, "mq": 19103, "arians": 19104, "leh": 19105, "claw": 19106, "weds": 19107, "dure": 19108, "niel": 19109, "jelly": 19110, "gourmet": 19111, "traders": 19112, "levi": 19113, "wages": 19114, "knees": 19115, "wise": 19116, "heavenly": 19117, "avid": 19118, "melody": 19119, "zack": 19120, "bananas": 19121, "apprentice": 19122, "prop": 19123, "funny": 19124, "ode": 19125, "respected": 19126, "megan": 19127, "fewer": 19128, "drafted": 19129, "medit": 19130, "grape": 19131, "usarmy": 19132, "crusad": 19133, "vocali": 19134, "preparations": 19135, "nonsense": 19136, "usage": 19137, "thr": 19138, "roth": 19139, "wizards": 19140, "inside": 19141, "promotions": 19142, "mona": 19143, "redsox": 19144, "sig": 19145, "elegance": 19146, "chia": 19147, "universal": 19148, "ãĢį": 19149, "raja": 19150, "unga": 19151, "pollin": 19152, "filipino": 19153, "aka": 19154, "tsun": 19155, "ikon": 19156, "biking": 19157, "decorations": 19158, "zac": 19159, "cadets": 19160, "humour": 19161, "agm": 19162, "reppin": 19163, "vaccin": 19164, "elove": 19165, "uw": 19166, "diabe": 19167, "gallagher": 19168, "azer": 19169, "dol": 19170, "awhile": 19171, "prominent": 19172, "welsh": 19173, "tann": 19174, "')": 19175, "bien": 19176, "wag": 19177, "inal": 19178, "cwc": 19179, "wicket": 19180, "urst": 19181, "qanon": 19182, "xe": 19183, "outdoor": 19184, "dunn": 19185, "starr": 19186, "cology": 19187, "ricky": 19188, "uefa": 19189, "rebounds": 19190, "smusic": 19191, "infant": 19192, "ðŁĻĭ": 19193, "sop": 19194, "umber": 19195, "handing": 19196, "begin": 19197, "sorting": 19198, "hash": 19199, "spati": 19200, "rek": 19201, "budapest": 19202, "blackhawks": 19203, "delete": 19204, "rom": 19205, "candid": 19206, "authori": 19207, "debris": 19208, "specul": 19209, "intersection": 19210, "marriott": 19211, "imran": 19212, "ðŁĺģðŁĺģ": 19213, "cruises": 19214, "ramsey": 19215, "rafael": 19216, "awareness": 19217, "vascular": 19218, "beyoncé": 19219, "rug": 19220, "ðŁĺĮ": 19221, "festiv": 19222, "aram": 19223, "sable": 19224, "basil": 19225, "pill": 19226, "flooring": 19227, "unbeaten": 19228, "implications": 19229, "uf": 19230, "wound": 19231, "forge": 19232, "pointing": 19233, "pots": 19234, "popularity": 19235, "ðŁijıðŁı»": 19236, "manipul": 19237, "slots": 19238, "debates": 19239, "absence": 19240, "vermont": 19241, "neverforget": 19242, "wrist": 19243, "gloria": 19244, "rence": 19245, "husk": 19246, "melting": 19247, "ðŁİŁ": 19248, "braces": 19249, "timely": 19250, "transforming": 19251, "amps": 19252, "mak": 19253, "poe": 19254, "ahan": 19255, "generally": 19256, "ndp": 19257, "aleppo": 19258, "unicef": 19259, "profs": 19260, "nord": 19261, "mask": 19262, "jacksonville": 19263, "vv": 19264, "shells": 19265, "blooming": 19266, "operators": 19267, "charcoal": 19268, "neville": 19269, "magi": 19270, "chip": 19271, "sama": 19272, "iran": 19273, "reforms": 19274, "accumul": 19275, "rue": 19276, "æľ": 19277, "websites": 19278, "gaon": 19279, "devastating": 19280, "stos": 19281, "glacier": 19282, "rapp": 19283, "chipotle": 19284, "pra": 19285, "orous": 19286, "romney": 19287, "season": 19288, "decorative": 19289, "cisco": 19290, "ditch": 19291, "complain": 19292, "llo": 19293, "assume": 19294, "ðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤ": 19295, "nels": 19296, "centric": 19297, "ftw": 19298, "carrots": 19299, "tata": 19300, "canter": 19301, "perience": 19302, "liers": 19303, "demos": 19304, "blunt": 19305, "operate": 19306, "reservations": 19307, "leah": 19308, "substance": 19309, "dison": 19310, "ante": 19311, "election": 19312, "vue": 19313, "square": 19314, "nonprofit": 19315, "caa": 19316, "fsu": 19317, "yam": 19318, "ãĤ¤": 19319, "vladi": 19320, "completes": 19321, "mari": 19322, "phillip": 19323, "neill": 19324, "eras": 19325, "kait": 19326, "mendo": 19327, "maharashtra": 19328, "gp": 19329, "dane": 19330, "providence": 19331, "therapeu": 19332, "juvenile": 19333, "memo": 19334, "incorpor": 19335, "aaaa": 19336, "seventeen": 19337, "teenager": 19338, "ã": 19339, "orns": 19340, "wide": 19341, "cuteness": 19342, "twd": 19343, "ffles": 19344, "bara": 19345, "comedy": 19346, "overtime": 19347, "yaz": 19348, "baron": 19349, "unemployment": 19350, "ðŁijĭ": 19351, "exterior": 19352, "dense": 19353, "centres": 19354, "matchup": 19355, "historymonth": 19356, "artificial": 19357, "quit": 19358, "esk": 19359, "warn": 19360, "critic": 19361, "jaf": 19362, "ðŁĵ²": 19363, "informative": 19364, "fuels": 19365, "recycle": 19366, "naming": 19367, "stripe": 19368, "solic": 19369, "molecular": 19370, "deepi": 19371, "convo": 19372, "ssel": 19373, "nae": 19374, "descent": 19375, "tiz": 19376, "accountability": 19377, "terry": 19378, "rito": 19379, "slay": 19380, "emo": 19381, "demol": 19382, "sensation": 19383, "cov": 19384, "tore": 19385, "roundtable": 19386, "yol": 19387, "excuses": 19388, "à¥į": 19389, "turquo": 19390, "hhhh": 19391, "podcasts": 19392, "celeb": 19393, "messi": 19394, "lio": 19395, "mann": 19396, "contributed": 19397, "uz": 19398, "generator": 19399, "elets": 19400, "veggie": 19401, "indul": 19402, "ensuring": 19403, "detroit": 19404, "punjab": 19405, "transpor": 19406, "instruction": 19407, "add": 19408, "porcel": 19409, "paneli": 19410, "circles": 19411, "persist": 19412, "clayton": 19413, "spn": 19414, "dogsoftwitter": 19415, "isnt": 19416, "spr": 19417, "retailers": 19418, "pw": 19419, "hungar": 19420, "elena": 19421, "monaster": 19422, "guatem": 19423, "jessie": 19424, "anz": 19425, "rashi": 19426, "flee": 19427, "carving": 19428, "faux": 19429, "lal": 19430, "henri": 19431, "djo": 19432, "dull": 19433, "sana": 19434, "lara": 19435, "globe": 19436, "crimson": 19437, "compass": 19438, "pause": 19439, "nab": 19440, "lionel": 19441, "baths": 19442, "ufo": 19443, "inventory": 19444, "singh": 19445, "satan": 19446, "ðŁĩ¸": 19447, "cements": 19448, "inform": 19449, "generated": 19450, "biden": 19451, "avg": 19452, "tasks": 19453, "deer": 19454, "sau": 19455, "jailed": 19456, "pastel": 19457, "scc": 19458, "nail": 19459, "steele": 19460, "peris": 19461, "lamborghini": 19462, "pursue": 19463, "margin": 19464, "uch": 19465, "bosch": 19466, "drain": 19467, "clara": 19468, "bom": 19469, "latino": 19470, "webster": 19471, "rosemary": 19472, "rha": 19473, "soun": 19474, "billionaire": 19475, "notch": 19476, "percentage": 19477, "conor": 19478, "'\"": 19479, "homes": 19480, "earthday": 19481, "hort": 19482, "biggest": 19483, "disin": 19484, "walton": 19485, "editors": 19486, "imma": 19487, "omar": 19488, "equivalent": 19489, "pharmaceu": 19490, "ahmed": 19491, "cameo": 19492, "hanni": 19493, "underrated": 19494, "gement": 19495, "microbi": 19496, "voo": 19497, "honorable": 19498, "obesity": 19499, "âļ¡ï¸ı": 19500, "limerick": 19501, "involvement": 19502, "stagram": 19503, "boulevard": 19504, "burg": 19505, "blackandwhite": 19506, "liberation": 19507, "five": 19508, "interim": 19509, "smm": 19510, "rivalry": 19511, "capabilities": 19512, "statements": 19513, "thumb": 19514, "ved": 19515, "swans": 19516, "barber": 19517, "eque": 19518, "serena": 19519, "helm": 19520, "noodle": 19521, "sampling": 19522, "nawaz": 19523, "single": 19524, "thunderstorms": 19525, "shon": 19526, "inev": 19527, "ë¯": 19528, "topp": 19529, "orchard": 19530, "bian": 19531, "ðŁĺĶ": 19532, "doorstep": 19533, "salvation": 19534, "marketing": 19535, "rons": 19536, "clemson": 19537, "ravi": 19538, "intake": 19539, "standwith": 19540, "sina": 19541, "haiku": 19542, "pley": 19543, "electoral": 19544, "philly": 19545, "lays": 19546, "electric": 19547, "capturing": 19548, "upp": 19549, "ergy": 19550, "believing": 19551, "cultures": 19552, "esday": 19553, "invasive": 19554, "eded": 19555, "speech": 19556, "endur": 19557, "vietnam": 19558, "boycott": 19559, "pede": 19560, "deliver": 19561, "ðŁĴĸðŁĴĸ": 19562, "merchant": 19563, "stir": 19564, "denies": 19565, "pockets": 19566, "oti": 19567, "cuddle": 19568, "roland": 19569, "mmed": 19570, "dened": 19571, "learners": 19572, "hoop": 19573, "sourcing": 19574, "hacked": 19575, "dim": 19576, "environments": 19577, "benson": 19578, "judicial": 19579, "worcester": 19580, "pearls": 19581, "governments": 19582, "arrivals": 19583, "corners": 19584, "tuning": 19585, "labour": 19586, "ym": 19587, "ordering": 19588, "lewi": 19589, "ife": 19590, "hygiene": 19591, "thoughtful": 19592, "indonesian": 19593, "campaigning": 19594, "principle": 19595, "assaul": 19596, "rubb": 19597, "atv": 19598, "willy": 19599, "entre": 19600, "ili": 19601, "phon": 19602, "duties": 19603, "âĻ¥âĻ¥": 19604, "snakes": 19605, "loop": 19606, "amar": 19607, "convertible": 19608, "bonding": 19609, "mentoring": 19610, "maxwell": 19611, "ethereum": 19612, "destroying": 19613, "axis": 19614, "cairo": 19615, "finnish": 19616, "shock": 19617, "ðŁĺIJ": 19618, "caleb": 19619, "coma": 19620, "pedal": 19621, "core": 19622, "continent": 19623, "elson": 19624, "tempo": 19625, "helsinki": 19626, "acp": 19627, "tackling": 19628, "stated": 19629, "bla": 19630, "doub": 19631, "smashing": 19632, "aja": 19633, "cameron": 19634, "disruption": 19635, "warmth": 19636, "beingsalmankhan": 19637, "bulletin": 19638, "ode": 19639, "syracuse": 19640, "aran": 19641, "mcgregor": 19642, "bulk": 19643, "anton": 19644, "confirmation": 19645, "spine": 19646, "imran": 19647, "instruc": 19648, "jacks": 19649, "chio": 19650, "palm": 19651, "stre": 19652, "embarrassing": 19653, "unt": 19654, "eliminate": 19655, "toss": 19656, "cise": 19657, "aws": 19658, "onists": 19659, "shinee": 19660, "jos": 19661, "hose": 19662, "lively": 19663, "opponents": 19664, "movements": 19665, "recognizing": 19666, "sandwiches": 19667, "shakes": 19668, "exercises": 19669, "seat": 19670, "profession": 19671, "merrychristmas": 19672, "lugg": 19673, "adoptdont": 19674, "marvin": 19675, "byrne": 19676, "unle": 19677, "het": 19678, "kuwait": 19679, "rahman": 19680, "aspect": 19681, "humbled": 19682, "genes": 19683, "fand": 19684, "longtime": 19685, ");": 19686, "campu": 19687, "angus": 19688, "ðŁijįðŁı¼": 19689, "quran": 19690, "sleeves": 19691, "slic": 19692, "¸ë": 19693, "twelve": 19694, "youre": 19695, "ike": 19696, "gogh": 19697, "bst": 19698, "dictionary": 19699, "reflecting": 19700, "toon": 19701, "yarn": 19702, "embed": 19703, "ðŁı´": 19704, "reserves": 19705, "flooded": 19706, "veriz": 19707, "dusk": 19708, "establish": 19709, "proli": 19710, "aud": 19711, "ritual": 19712, "orbit": 19713, "declaration": 19714, "recordings": 19715, "camo": 19716, "cassette": 19717, "goodluck": 19718, "cutter": 19719, "bop": 19720, "bho": 19721, "cheating": 19722, "pacific": 19723, "mares": 19724, "timer": 19725, "colt": 19726, "trous": 19727, "tomorrow": 19728, "hansen": 19729, "cie": 19730, "wang": 19731, "bani": 19732, "circular": 19733, "acute": 19734, "farmer": 19735, "coys": 19736, "pse": 19737, "irving": 19738, "wj": 19739, "hawkins": 19740, "bison": 19741, "urday": 19742, "cruising": 19743, "ote": 19744, "kath": 19745, "whistle": 19746, "yourselves": 19747, "antis": 19748, "slash": 19749, "thoroughly": 19750, "kesh": 19751, "serie": 19752, "exem": 19753, "enig": 19754, "guild": 19755, "shred": 19756, "hogan": 19757, "apo": 19758, "ä¸": 19759, "puzz": 19760, "netball": 19761, "aussi": 19762, "panorama": 19763, "wsj": 19764, "avis": 19765, "arming": 19766, "humph": 19767, "browser": 19768, "cries": 19769, "foggy": 19770, "matte": 19771, "ðŁĮ»": 19772, "iter": 19773, "tallest": 19774, "byron": 19775, "captiv": 19776, "jesu": 19777, "anyways": 19778, "flagship": 19779, "pton": 19780, "wey": 19781, "fayette": 19782, "financial": 19783, "foul": 19784, "solomon": 19785, "jennifer": 19786, "cucumber": 19787, "argue": 19788, "textile": 19789, "wrestler": 19790, "johnston": 19791, "pastor": 19792, "ðŁĺŃðŁĺŃðŁĺŃðŁĺŃ": 19793, "cactus": 19794, "edible": 19795, "reserved": 19796, "richie": 19797, "metres": 19798, "ingredient": 19799, "hella": 19800, "unto": 19801, "chol": 19802, "celebs": 19803, "poets": 19804, "graham": 19805, "hayden": 19806, "coincidence": 19807, "baw": 19808, "communicate": 19809, "fletcher": 19810, "/-": 19811, "toledo": 19812, "ecuador": 19813, "counsel": 19814, "slaughter": 19815, "linear": 19816, "atp": 19817, "osu": 19818, "joel": 19819, "eved": 19820, "conquer": 19821, "rustic": 19822, "plicity": 19823, "recognise": 19824, "roommate": 19825, "cracked": 19826, "jasper": 19827, "pher": 19828, "ðŁĮº": 19829, "woven": 19830, "moist": 19831, "ffc": 19832, "steering": 19833, "nish": 19834, "standings": 19835, "frequent": 19836, "ardi": 19837, "hazel": 19838, "asmsg": 19839, "baum": 19840, "dart": 19841, "sidd": 19842, "nath": 19843, "chero": 19844, "cardboard": 19845, "css": 19846, "nsfw": 19847, "pair": 19848, "ðŁĺįðŁĺĺ": 19849, "occurred": 19850, "homelessness": 19851, "malone": 19852, "phe": 19853, "xia": 19854, "paddy": 19855, "declare": 19856, "theatre": 19857, "bf": 19858, "persian": 19859, "tad": 19860, "axe": 19861, "suspicious": 19862, "lamb": 19863, "mucho": 19864, "senior": 19865, "stas": 19866, "kite": 19867, "sting": 19868, "grad": 19869, "kaf": 19870, "watering": 19871, "د": 19872, "spiral": 19873, "thms": 19874, "educator": 19875, "jerome": 19876, "ofc": 19877, "clock": 19878, "sul": 19879, "pemb": 19880, ".........": 19881, "parkway": 19882, "deaux": 19883, "restrictions": 19884, "mons": 19885, "needle": 19886, "ej": 19887, "leagues": 19888, "watermelon": 19889, "aman": 19890, "plenary": 19891, "maxim": 19892, "wab": 19893, "comingsoon": 19894, "bryce": 19895, "vigil": 19896, "supermarket": 19897, "fortunate": 19898, "turquoise": 19899, "president": 19900, "liv": 19901, "interns": 19902, "feelin": 19903, "fixtures": 19904, "stunt": 19905, "staged": 19906, "premieres": 19907, "lok": 19908, "practiti": 19909, "shortage": 19910, "logne": 19911, "vec": 19912, "concor": 19913, "rocke": 19914, "lig": 19915, "composed": 19916, "synthetic": 19917, "dip": 19918, "camila": 19919, "chis": 19920, "jou": 19921, "susan": 19922, "eyebrows": 19923, "supplement": 19924, "satisfaction": 19925, "mohammad": 19926, "tibet": 19927, "houseof": 19928, "pun": 19929, "assam": 19930, "shadowhun": 19931, "psyched": 19932, "seduc": 19933, "mandatory": 19934, "herbert": 19935, "scallo": 19936, "streamers": 19937, "protocol": 19938, "blockbuster": 19939, "produces": 19940, "schnei": 19941, "laurel": 19942, "tribe": 19943, "timehop": 19944, "pla": 19945, "modelling": 19946, "tvtime": 19947, "mtvstars": 19948, "widow": 19949, "metric": 19950, "cham": 19951, "condo": 19952, "flowering": 19953, "alec": 19954, "dms": 19955, "intensity": 19956, "¨": 19957, "mccartney": 19958, "islamabad": 19959, "kb": 19960, "ffi": 19961, "phal": 19962, "analog": 19963, "fond": 19964, "hacks": 19965, "positivity": 19966, "treaty": 19967, "submarine": 19968, "connect": 19969, "selen": 19970, "categories": 19971, "cub": 19972, "organize": 19973, "sik": 19974, "quoteoftheday": 19975, "reminding": 19976, "amor": 19977, "locking": 19978, "ðŁijıðŁı¼": 19979, "compound": 19980, "ette": 19981, "bout": 19982, "recur": 19983, "ference": 19984, "mizz": 19985, "trend": 19986, "hipster": 19987, "fortress": 19988, "forthcoming": 19989, "prelimin": 19990, "odyssey": 19991, "angp": 19992, "delici": 19993, "evenings": 19994, "ðŁĶ¹": 19995, "iq": 19996, "dw": 19997, "dair": 19998, "kathryn": 19999, "christianity": 20000, "moonlight": 20001, "hab": 20002, "whoo": 20003, "fbf": 20004, "seth": 20005, "genuinely": 20006, "pax": 20007, "charity": 20008, "deployed": 20009, "bnb": 20010, "bucs": 20011, "judg": 20012, "conge": 20013, "plantation": 20014, "impress": 20015, "cara": 20016, "sclub": 20017, "scopy": 20018, "landers": 20019, "complaints": 20020, "bama": 20021, "rebuild": 20022, "xy": 20023, "realism": 20024, "shour": 20025, "lein": 20026, "bracelets": 20027, "mera": 20028, "assassin": 20029, "anchor": 20030, "ðŁijĮðŁı¼": 20031, "linen": 20032, "confron": 20033, "chronicle": 20034, "comment": 20035, "catalog": 20036, "illes": 20037, "gorge": 20038, "metry": 20039, "jungkook": 20040, "lovemy": 20041, "sentin": 20042, "seem": 20043, "fitness": 20044, "allied": 20045, "tsman": 20046, "digitaltransformation": 20047, "pran": 20048, "loft": 20049, "minton": 20050, "aldenrichards": 20051, "envel": 20052, "cherish": 20053, "certainty": 20054, "zzz": 20055, "rhino": 20056, "perkins": 20057, "enrich": 20058, "capetown": 20059, "ometer": 20060, "sections": 20061, "skeleton": 20062, "defenders": 20063, "ðŁĺĿ": 20064, "penc": 20065, "brit": 20066, "jah": 20067, "capitalism": 20068, "ðŁ¥ĩ": 20069, "bazaar": 20070, "reme": 20071, "ext": 20072, "kkk": 20073, "convert": 20074, "stormy": 20075, "bye": 20076, "karan": 20077, "chrysler": 20078, "ados": 20079, "pressed": 20080, "sync": 20081, "ationday": 20082, "danger": 20083, "badges": 20084, "refuses": 20085, "empowering": 20086, "lym": 20087, "exports": 20088, "adoptdontshop": 20089, "ðŁĩ¯": 20090, "thc": 20091, "awaited": 20092, "focuses": 20093, "fined": 20094, "oat": 20095, "hahahah": 20096, "âģ©": 20097, "nfamily": 20098, "fiona": 20099, "luckily": 20100, "thrilling": 20101, "typing": 20102, "outbreak": 20103, "dies": 20104, "heu": 20105, "crawl": 20106, "nesses": 20107, "oath": 20108, "scripts": 20109, "geeks": 20110, "ðŁIJĿ": 20111, "pb": 20112, "mathematics": 20113, "alis": 20114, "________________": 20115, "gymnastics": 20116, "activism": 20117, "recommendation": 20118, "gren": 20119, "wain": 20120, "courty": 20121, "napol": 20122, "cauli": 20123, "hornets": 20124, "gals": 20125, "jockey": 20126, "dirty": 20127, "atar": 20128, "enormous": 20129, "pest": 20130, "gregation": 20131, "anos": 20132, "iiii": 20133, "defends": 20134, "blackhistorymonth": 20135, "atx": 20136, "mbc": 20137, "luggage": 20138, "witch": 20139, "cob": 20140, "lasts": 20141, "cum": 20142, "ggg": 20143, "bathing": 20144, "nar": 20145, "cebu": 20146, "ðŁįĥ": 20147, "navigation": 20148, "mine": 20149, "rejo": 20150, "ðŁİĢ": 20151, "giftide": 20152, "reta": 20153, "useless": 20154, "pull": 20155, "deficit": 20156, "allu": 20157, "atime": 20158, "itv": 20159, "trillion": 20160, "pue": 20161, "acies": 20162, "procedure": 20163, "lori": 20164, "jenny": 20165, "cad": 20166, "ulously": 20167, "drac": 20168, "promotes": 20169, "ingthe": 20170, "canu": 20171, "woohoo": 20172, "naomi": 20173, "zardari": 20174, "tsu": 20175, "beir": 20176, "sdg": 20177, "lever": 20178, "weber": 20179, "abud": 20180, "lund": 20181, "crowded": 20182, "deployment": 20183, "terrain": 20184, "kenny": 20185, "hof": 20186, "witnessed": 20187, "loch": 20188, "jk": 20189, "bully": 20190, "wren": 20191, "poetry": 20192, "doff": 20193, "wwi": 20194, "mored": 20195, "dini": 20196, "culture": 20197, "prompt": 20198, "Â¥": 20199, "maurice": 20200, "topps": 20201, "rm": 20202, "correspon": 20203, "about": 20204, "jewels": 20205, "gibr": 20206, "eagle": 20207, "ðŁĺĺðŁĺĺðŁĺĺ": 20208, "lending": 20209, "souven": 20210, "çĶ": 20211, "contemporaryart": 20212, "establishment": 20213, "jong": 20214, "â̦\"": 20215, "gator": 20216, "patriotic": 20217, "mccoy": 20218, "vape": 20219, "humane": 20220, "feliz": 20221, "coachella": 20222, "reposting": 20223, "steals": 20224, "fuller": 20225, "nering": 20226, "atra": 20227, "(-": 20228, "blake": 20229, "heather": 20230, "worms": 20231, "disciplinary": 20232, "redemption": 20233, "yard": 20234, "amin": 20235, "\"@_": 20236, "dnc": 20237, "tds": 20238, "kappa": 20239, "newark": 20240, "commits": 20241, "spears": 20242, "jams": 20243, "tand": 20244, "msnbc": 20245, "intermedi": 20246, "aimed": 20247, "atic": 20248, "teenth": 20249, "observation": 20250, "kashmir": 20251, "kavanaugh": 20252, "oul": 20253, "sanfrancisco": 20254, "reu": 20255, "belated": 20256, "chow": 20257, "password": 20258, "stills": 20259, "detained": 20260, "sari": 20261, "dayton": 20262, "darren": 20263, "italian": 20264, "arth": 20265, "amusic": 20266, "arbit": 20267, "wm": 20268, "vm": 20269, "hem": 20270, "doug": 20271, "myr": 20272, "asho": 20273, "prev": 20274, "vind": 20275, "brah": 20276, "stag": 20277, "ี": 20278, "previews": 20279, "guk": 20280, "containing": 20281, "leonardo": 20282, "saddle": 20283, "rushing": 20284, "stav": 20285, "longh": 20286, "gambling": 20287, "vegas": 20288, "reservation": 20289, "endale": 20290, "bala": 20291, "fla": 20292, "variant": 20293, "hedge": 20294, "bulgaria": 20295, "natali": 20296, "weaver": 20297, "solst": 20298, "encouraged": 20299, "apc": 20300, "asparag": 20301, "nest": 20302, "cyclists": 20303, "fel": 20304, "ìĬ¤": 20305, "overwhelming": 20306, "peyton": 20307, "jit": 20308, "apost": 20309, "mble": 20310, "bleeding": 20311, "neighbourhood": 20312, "avery": 20313, "expressions": 20314, "macdonald": 20315, "gigs": 20316, "monds": 20317, "illusion": 20318, "nct": 20319, "camero": 20320, "overhead": 20321, "myth": 20322, "oly": 20323, "vio": 20324, "etv": 20325, "laurie": 20326, "unveiling": 20327, "prior": 20328, "conn": 20329, "ironman": 20330, "diff": 20331, "dayin": 20332, "critici": 20333, "congo": 20334, "revision": 20335, "wale": 20336, "director": 20337, "pines": 20338, "blackpink": 20339, "garner": 20340, "curated": 20341, "manitoba": 20342, "hac": 20343, "commonly": 20344, "barton": 20345, "....#": 20346, "mortality": 20347, "livesmatter": 20348, "philosop": 20349, "shorter": 20350, "convince": 20351, "freak": 20352, "vendors": 20353, "insightful": 20354, "elly": 20355, "sensors": 20356, "eled": 20357, "sberg": 20358, "weightloss": 20359, "ukip": 20360, "spur": 20361, "private": 20362, "qua": 20363, "ssc": 20364, ",...": 20365, "supervisor": 20366, "adviser": 20367, "amazingly": 20368, "lesser": 20369, "ates": 20370, "mahon": 20371, "oooooo": 20372, "saras": 20373, "pmoindia": 20374, "waffle": 20375, "unders": 20376, "tolerance": 20377, "sculptures": 20378, "hersh": 20379, "knocking": 20380, "smoke": 20381, "catholic": 20382, "grim": 20383, "traveled": 20384, "flip": 20385, "geoff": 20386, "dinosaurs": 20387, "slept": 20388, "scarlet": 20389, "oki": 20390, "complaint": 20391, "obsc": 20392, "nami": 20393, "lag": 20394, "crossfit": 20395, "ufc": 20396, "mccain": 20397, "referee": 20398, "sadness": 20399, "penny": 20400, "lieu": 20401, "mode": 20402, "kier": 20403, "vols": 20404, "wis": 20405, "elon": 20406, "shea": 20407, "bao": 20408, "sonia": 20409, "claire": 20410, "emmanuel": 20411, "moisture": 20412, "digest": 20413, "viii": 20414, "teller": 20415, "chon": 20416, "accessory": 20417, "nightclub": 20418, "fossil": 20419, "awan": 20420, "husky": 20421, "aboriginal": 20422, "brandon": 20423, "fficient": 20424, "cougars": 20425, "sted": 20426, "admitted": 20427, "ignored": 20428, "contentmarketing": 20429, "agas": 20430, "vase": 20431, "executed": 20432, "negotiations": 20433, "shead": 20434, "nand": 20435, "tablets": 20436, "goth": 20437, "tsal": 20438, "dfw": 20439, "onep": 20440, "protector": 20441, "spho": 20442, "gazette": 20443, "andreas": 20444, "sser": 20445, "compilation": 20446, "hav": 20447, "containers": 20448, "broker": 20449, "socal": 20450, "porcelain": 20451, "hyuk": 20452, "airing": 20453, "ðŁĴ°": 20454, "publisher": 20455, "scenario": 20456, "spartans": 20457, "reviewing": 20458, "itudes": 20459, "edel": 20460, "pearson": 20461, "bash": 20462, "maui": 20463, "aad": 20464, "ðŁĮĬ": 20465, "liu": 20466, "ulate": 20467, "programmes": 20468, "favour": 20469, "webdesign": 20470, "realty": 20471, "motivational": 20472, "crosses": 20473, "'...": 20474, "busch": 20475, "adjustable": 20476, "arjun": 20477, "mistak": 20478, "dimension": 20479, "pistol": 20480, "weighs": 20481, "eny": 20482, "unveil": 20483, "indycar": 20484, "gordon": 20485, "fade": 20486, "franken": 20487, "qualities": 20488, "bett": 20489, "locate": 20490, "kerr": 20491, "spc": 20492, "confusion": 20493, "nee": 20494, "lucky": 20495, "bases": 20496, "depends": 20497, "firefighter": 20498, "ola": 20499, "ret": 20500, "maroon": 20501, "ðŁĶĬ": 20502, "wam": 20503, "defining": 20504, "wheat": 20505, "bil": 20506, "és": 20507, "bhai": 20508, "psych": 20509, "tau": 20510, "icans": 20511, "thik": 20512, "obile": 20513, "inspector": 20514, "ìĨĮë": 20515, "illon": 20516, "gos": 20517, "evangel": 20518, "fai": 20519, "sist": 20520, "vocation": 20521, "burge": 20522, "chistan": 20523, "renewed": 20524, "enthusiasm": 20525, "enting": 20526, "agri": 20527, "ikea": 20528, "msc": 20529, "aerospace": 20530, "sensiti": 20531, "memoir": 20532, "hospice": 20533, "cocaine": 20534, "derry": 20535, "mechanics": 20536, "Ħà¸": 20537, "tino": 20538, "reduces": 20539, "collectors": 20540, "injustice": 20541, "suppre": 20542, "vana": 20543, "abun": 20544, "napa": 20545, "susa": 20546, "oslo": 20547, "eff": 20548, "encore": 20549, "licence": 20550, "cheddar": 20551, "zal": 20552, "mount": 20553, "ðŁĴIJ": 20554, "threatens": 20555, "!!\"": 20556, "archie": 20557, "futsal": 20558, "scuba": 20559, "jos": 20560, "gnon": 20561, "sexi": 20562, "sofficial": 20563, "comparing": 20564, "dominant": 20565, "toftheday": 20566, "fait": 20567, "proposals": 20568, "gift": 20569, "yas": 20570, "cnc": 20571, "lr": 20572, "hab": 20573, "reservoir": 20574, "beliefs": 20575, "general": 20576, "marti": 20577, "td": 20578, "este": 20579, "ìł": 20580, "wil": 20581, "ðŁij¯": 20582, "ðŁĶ«": 20583, "spx": 20584, "etwork": 20585, "excerpt": 20586, "einstein": 20587, "hiro": 20588, "silhou": 20589, "teamed": 20590, "perception": 20591, "corridor": 20592, "mentalhealth": 20593, "hints": 20594, "benny": 20595, "inducted": 20596, "swx": 20597, "widesp": 20598, "speak": 20599, "cheryl": 20600, "drug": 20601, "ðŁĺķ": 20602, "hf": 20603, "asparagus": 20604, "mysteries": 20605, "fitzgerald": 20606, "offer": 20607, "therapist": 20608, "career": 20609, "damaging": 20610, "tsd": 20611, "peru": 20612, "weibo": 20613, "yay": 20614, "phoenix": 20615, "discre": 20616, "macbook": 20617, "barker": 20618, "stigma": 20619, "spread": 20620, "rockies": 20621, "kangar": 20622, "bridg": 20623, "pai": 20624, "bishop": 20625, "tailed": 20626, "capsule": 20627, "ðŁĴĵ": 20628, "geof": 20629, "royale": 20630, "shortlisted": 20631, "oste": 20632, "ashamed": 20633, "chapp": 20634, "keye": 20635, "cla": 20636, "screenshot": 20637, "austrian": 20638, "native": 20639, "enight": 20640, "juliet": 20641, "michele": 20642, "ðŁĮ´": 20643, "travelers": 20644, "pil": 20645, "footballer": 20646, "winchester": 20647, "ðŁĻĦ": 20648, "azerbai": 20649, "goldeng": 20650, "organisations": 20651, "interpretation": 20652, "predator": 20653, "oftheweek": 20654, "logan": 20655, "poké": 20656, "marie": 20657, "calla": 20658, "tnt": 20659, "cinde": 20660, "getic": 20661, "fitfam": 20662, "grav": 20663, "owens": 20664, "ðŁĮ±": 20665, "shootout": 20666, "salis": 20667, "commissions": 20668, "cohe": 20669, "ptic": 20670, "nixon": 20671, "hia": 20672, "ambition": 20673, "marine": 20674, "cruelty": 20675, "tk": 20676, "crude": 20677, "salty": 20678, "jima": 20679, "mongo": 20680, "irony": 20681, "onwards": 20682, "arrests": 20683, "strangers": 20684, "iger": 20685, "cyclist": 20686, "rag": 20687, "extends": 20688, "tradio": 20689, "bourg": 20690, "moi": 20691, "ella": 20692, "eable": 20693, "lexus": 20694, "aul": 20695, "dera": 20696, "historian": 20697, "morton": 20698, "tiff": 20699, "manner": 20700, "kot": 20701, "dk": 20702, "pointed": 20703, "marqu": 20704, "aan": 20705, "eney": 20706, "dublin": 20707, "onpoli": 20708, "emili": 20709, "secret": 20710, "flo": 20711, "âļ¡": 20712, "baj": 20713, "steep": 20714, "accompanied": 20715, "rumours": 20716, "devi": 20717, "purchasing": 20718, "fig": 20719, "pub": 20720, "schoo": 20721, "autonomous": 20722, "goalie": 20723, "xia": 20724, "automatically": 20725, "revers": 20726, "tero": 20727, "fuku": 20728, "titanic": 20729, "shook": 20730, "sandals": 20731, "seekers": 20732, "excav": 20733, "nordic": 20734, "bigolive": 20735, "bake": 20736, "ratt": 20737, "zak": 20738, "nep": 20739, "ðŁĺ¤": 20740, "candy": 20741, "billions": 20742, "bookworm": 20743, "ppet": 20744, "à³": 20745, "surfaces": 20746, "scars": 20747, "philip": 20748, "dogg": 20749, "cigars": 20750, "cote": 20751, "translated": 20752, "curator": 20753, "sindh": 20754, "hangover": 20755, "brewer": 20756, "ones": 20757, "elton": 20758, "ðŁĴªðŁı¼": 20759, "marcu": 20760, "elliot": 20761, "righte": 20762, "dioce": 20763, "russ": 20764, "railways": 20765, "grandson": 20766, "ascen": 20767, "apology": 20768, "await": 20769, "mobili": 20770, "respir": 20771, "partisan": 20772, "olivi": 20773, "strike": 20774, "yoo": 20775, "whitehouse": 20776, "expressed": 20777, "pups": 20778, "bedford": 20779, "cultur": 20780, "frogs": 20781, "flying": 20782, "cavali": 20783, "cds": 20784, "friger": 20785, "streetphotography": 20786, "resolve": 20787, "taliban": 20788, "kang": 20789, "crushing": 20790, "jum": 20791, "ðŁĺĴ": 20792, "williamson": 20793, "tang": 20794, "curly": 20795, "tman": 20796, "veteran": 20797, "faire": 20798, "artificialintelligence": 20799, "unanim": 20800, "pren": 20801, "backdrop": 20802, "frances": 20803, "occer": 20804, "dorothy": 20805, "working": 20806, "arthr": 20807, "converted": 20808, "daylight": 20809, "servant": 20810, "paddle": 20811, "complaining": 20812, "thirty": 20813, "nadal": 20814, "aku": 20815, "ibrahim": 20816, "addressed": 20817, "piss": 20818, "greenhouse": 20819, "battalion": 20820, "simulator": 20821, "outlets": 20822, "embroidery": 20823, "ðŁĵ±": 20824, "fiscal": 20825, "gerard": 20826, "sassy": 20827, "ðŁİīðŁİīðŁİī": 20828, "ventures": 20829, "merit": 20830, "publicity": 20831, "ðŁijĪ": 20832, "sophisticated": 20833, "ctu": 20834, "conventional": 20835, "condolences": 20836, "israel": 20837, "tradition": 20838, "aran": 20839, "tess": 20840, "glad": 20841, "ðŁĺĬðŁĺĬ": 20842, "correction": 20843, "geon": 20844, "amd": 20845, "orship": 20846, "beast": 20847, "chment": 20848, "ìŀ": 20849, "nico": 20850, "wknd": 20851, "wels": 20852, "cushion": 20853, "belie": 20854, "voc": 20855, "idiots": 20856, "underneath": 20857, "puma": 20858, "cornell": 20859, "enation": 20860, "lul": 20861, "swach": 20862, "abig": 20863, "urer": 20864, "mie": 20865, "formerly": 20866, "caf": 20867, "ernal": 20868, "chorus": 20869, "julius": 20870, "senator": 20871, "âľį": 20872, "whir": 20873, "salvador": 20874, "phd": 20875, "unified": 20876, "booster": 20877, "graphical": 20878, "wrec": 20879, "sonny": 20880, "miz": 20881, "derers": 20882, "sall": 20883, "vens": 20884, "tuscany": 20885, "wid": 20886, "yong": 20887, "kurds": 20888, "waz": 20889, "trolls": 20890, "macro": 20891, "caturday": 20892, "pressing": 20893, "sasha": 20894, "centennial": 20895, "gusts": 20896, "emc": 20897, "before": 20898, "denise": 20899, "cust": 20900, "ðŁĵ¢": 20901, "looo": 20902, "basel": 20903, "england": 20904, "yolo": 20905, "ardu": 20906, "manifesto": 20907, "doha": 20908, "ìľ": 20909, "knives": 20910, "bournemouth": 20911, "bibl": 20912, "barb": 20913, "alicia": 20914, "Ø©": 20915, "comer": 20916, "cyclone": 20917, "git": 20918, "anews": 20919, "characteri": 20920, "ventura": 20921, "intra": 20922, "sfgiants": 20923, "hut": 20924, "bea": 20925, "darwin": 20926, "eller": 20927, "alv": 20928, "reese": 20929, "bly": 20930, "karan": 20931, "conclusion": 20932, "manny": 20933, "flakes": 20934, "uniteblue": 20935, "nadu": 20936, "copp": 20937, "edges": 20938, "lancashire": 20939, "ials": 20940, "otta": 20941, "philippe": 20942, "lent": 20943, "chee": 20944, "mentors": 20945, "festival": 20946, "anism": 20947, "complimentary": 20948, "rj": 20949, "pug": 20950, "dine": 20951, "wei": 20952, "cliffs": 20953, "sarmy": 20954, "tiveness": 20955, "treasury": 20956, "iland": 20957, "aftermath": 20958, "rabbi": 20959, "oun": 20960, "bouquet": 20961, "heritage": 20962, "zion": 20963, "surrender": 20964, "shenan": 20965, "inks": 20966, "karl": 20967, "ghty": 20968, "policing": 20969, "examination": 20970, "cey": 20971, "persu": 20972, "measurement": 20973, "hydrogen": 20974, "luhan": 20975, "âłĢâłĢâłĢâłĢ": 20976, "wari": 20977, "оÐ": 20978, "jy": 20979, "fowler": 20980, "mish": 20981, "alfre": 20982, "âĺij": 20983, "bbnaija": 20984, "catalogue": 20985, "recognised": 20986, "saver": 20987, "huskies": 20988, "colin": 20989, "mundo": 20990, "siva": 20991, "png": 20992, "discounted": 20993, "manutd": 20994, "fresno": 20995, "devin": 20996, "preliminary": 20997, "trophies": 20998, "plastics": 20999, "dug": 21000, "procu": 21001, "indigo": 21002, "gard": 21003, "dylan": 21004, "pitches": 21005, "groundbreaking": 21006, "inson": 21007, "blac": 21008, "anthology": 21009, "fh": 21010, "explic": 21011, "rard": 21012, "admiral": 21013, "sochi": 21014, "lashes": 21015, "splendid": 21016, "envy": 21017, "adv": 21018, "sexy": 21019, "festivities": 21020, "sticking": 21021, "bib": 21022, "thrill": 21023, "opp": 21024, "ariel": 21025, "botanical": 21026, "endurance": 21027, "females": 21028, "bricks": 21029, "vatican": 21030, "blackpool": 21031, "bermu": 21032, "brough": 21033, "roller": 21034, "bid": 21035, "suede": 21036, "slovenia": 21037, "mming": 21038, "mlb": 21039, "medalist": 21040, "dians": 21041, "rehabilitation": 21042, "neon": 21043, "sgo": 21044, "lithu": 21045, "ramos": 21046, "zed": 21047, "pianist": 21048, "intensive": 21049, "broadband": 21050, "study": 21051, "petersburg": 21052, "luca": 21053, "ahhhh": 21054, "physician": 21055, "dillon": 21056, "telecom": 21057, "grief": 21058, "mun": 21059, "acro": 21060, "sided": 21061, "sly": 21062, "blows": 21063, "classiccars": 21064, "trium": 21065, "argy": 21066, "?:": 21067, "hri": 21068, "marshmal": 21069, "âĢĵ": 21070, "topping": 21071, "warsaw": 21072, "transc": 21073, "preservation": 21074, "bav": 21075, "refriger": 21076, "experiments": 21077, "äº": 21078, "glit": 21079, "sliga": 21080, "gage": 21081, "factor": 21082, "flavours": 21083, "brony": 21084, "spo": 21085, "cookbook": 21086, "carriage": 21087, "away": 21088, "nyfw": 21089, "onian": 21090, "wg": 21091, "simpsons": 21092, "rolex": 21093, "ðŁı¿": 21094, "crosby": 21095, "ãħ¤": 21096, "credi": 21097, "syndic": 21098, "pubs": 21099, "alife": 21100, "poorly": 21101, "maced": 21102, "ðŁĺŀ": 21103, "behindthe": 21104, "wenger": 21105, "nats": 21106, "ðŁİŁ": 21107, "rubbish": 21108, "procedures": 21109, "typhoon": 21110, "ophobia": 21111, "erdo": 21112, "fuel": 21113, "viera": 21114, "bumps": 21115, "millennium": 21116, "newzealand": 21117, "lectures": 21118, "iton": 21119, "milky": 21120, "responded": 21121, "ê°": 21122, "landscape": 21123, "..@": 21124, "bother": 21125, "âĸ¶": 21126, "zhang": 21127, "huawei": 21128, "tuition": 21129, "sworn": 21130, "inu": 21131, "yor": 21132, "paolo": 21133, "auditions": 21134, "abil": 21135, "malaysian": 21136, "hops": 21137, "feathers": 21138, "mple": 21139, "auts": 21140, "ão": 21141, "bounty": 21142, "iche": 21143, "ìĺ": 21144, "shq": 21145, "pinot": 21146, "gears": 21147, "disappear": 21148, "videogames": 21149, "tna": 21150, "alzheimer": 21151, "ðŁĮŀ": 21152, "aji": 21153, "underwear": 21154, "switching": 21155, "signage": 21156, "oscar": 21157, "econ": 21158, "drow": 21159, "clint": 21160, "plated": 21161, "gundy": 21162, "emblem": 21163, "hoes": 21164, "icist": 21165, "nelly": 21166, "junior": 21167, "roadshow": 21168, "minerals": 21169, "atle": 21170, "alexandria": 21171, "acclaimed": 21172, "vell": 21173, "shiva": 21174, "adhe": 21175, "enne": 21176, "amnesty": 21177, "hounds": 21178, "councillor": 21179, "ðŁĴ¦": 21180, "aesthe": 21181, "partnering": 21182, "influenced": 21183, "magno": 21184, "flare": 21185, "extinction": 21186, "civilian": 21187, "majesty": 21188, "vail": 21189, "lawmakers": 21190, "racks": 21191, "mcc": 21192, "orian": 21193, "spices": 21194, "errors": 21195, "mayer": 21196, "coca": 21197, "pai": 21198, "sooooo": 21199, "retiring": 21200, "bathro": 21201, "ðŁĻĮðŁĻĮ": 21202, "âĸª": 21203, "suf": 21204, "endorsement": 21205, "building": 21206, "brooch": 21207, "palla": 21208, "arvind": 21209, "agent": 21210, "karate": 21211, "rhi": 21212, "ctv": 21213, "taine": 21214, "umm": 21215, "bax": 21216, "reigns": 21217, "uniof": 21218, "enterprises": 21219, "adele": 21220, "flake": 21221, "attire": 21222, "bruce": 21223, "bahamas": 21224, "gravy": 21225, "sain": 21226, "cheek": 21227, "trivi": 21228, "lov": 21229, "een": 21230, "bblo": 21231, "ladygaga": 21232, "itta": 21233, ".\"-": 21234, "dustin": 21235, "observatory": 21236, "eighth": 21237, "bloomberg": 21238, "khs": 21239, "fcc": 21240, "gist": 21241, "commemorate": 21242, "veer": 21243, "sexuality": 21244, "edc": 21245, "nicole": 21246, "vacancy": 21247, "user": 21248, "sona": 21249, ":'(": 21250, "diploma": 21251, "tend": 21252, "upgrades": 21253, "ÅŁ": 21254, "jurassic": 21255, "cardiac": 21256, "drs": 21257, "widespread": 21258, "Ãł": 21259, "dailies": 21260, "vendor": 21261, "simplicity": 21262, "wider": 21263, "lenses": 21264, "supplements": 21265, "depos": 21266, "observed": 21267, "vines": 21268, "partially": 21269, "renewal": 21270, "collaborate": 21271, "alig": 21272, "finity": 21273, "phu": 21274, "zzy": 21275, "petit": 21276, "ðŁĵħ": 21277, "zin": 21278, "igu": 21279, "smack": 21280, "fallon": 21281, "ðŁĵ£": 21282, "backwards": 21283, "component": 21284, "oso": 21285, "compatible": 21286, "binding": 21287, "zurich": 21288, "thome": 21289, "wounds": 21290, "lyric": 21291, "freshmen": 21292, "sneaky": 21293, "fibro": 21294, "diet": 21295, "employer": 21296, "insect": 21297, "hated": 21298, "scher": 21299, "razor": 21300, "nsw": 21301, "booker": 21302, "californi": 21303, "avfc": 21304, "°": 21305, "pretending": 21306, "pepsi": 21307, "alis": 21308, "untitled": 21309, "kart": 21310, "grandparents": 21311, "ethe": 21312, "ock": 21313, "luxemb": 21314, "visuals": 21315, "smallbusiness": 21316, "abdullah": 21317, "minho": 21318, "subaru": 21319, "hra": 21320, "revealing": 21321, "heartbreaking": 21322, "clarity": 21323, "amg": 21324, "slr": 21325, "****": 21326, "âŀĸ": 21327, "record": 21328, "iciary": 21329, "minded": 21330, "yeh": 21331, "excessive": 21332, "knuck": 21333, "icecream": 21334, "truth": 21335, "evic": 21336, "tastic": 21337, "antarc": 21338, "rendering": 21339, ",,": 21340, "mitt": 21341, "lorenzo": 21342, "stpatrick": 21343, "boundary": 21344, "zig": 21345, "vocab": 21346, "osaka": 21347, "furn": 21348, "tun": 21349, "gul": 21350, "sounding": 21351, "blogger": 21352, "utterly": 21353, "gaf": 21354, "advancing": 21355, "lcd": 21356, "margin": 21357, "lifelong": 21358, "solstice": 21359, "shra": 21360, "waits": 21361, "plear": 21362, "breach": 21363, "enligh": 21364, "ader": 21365, "ittle": 21366, "cation": 21367, "hoon": 21368, "studied": 21369, "?????": 21370, "kash": 21371, "evangeli": 21372, "psl": 21373, "weights": 21374, "metals": 21375, "tyres": 21376, "turno": 21377, "wie": 21378, "carb": 21379, "gale": 21380, "seal": 21381, "sunite": 21382, "amic": 21383, "patterson": 21384, "án": 21385, "euph": 21386, "upstairs": 21387, "qualifiers": 21388, "khalifa": 21389, "applemusic": 21390, "ìĨĮëħ": 21391, "vaughan": 21392, "alter": 21393, "cruiser": 21394, "mua": 21395, "tana": 21396, "katrina": 21397, "idols": 21398, "spoiled": 21399, "secretly": 21400, "fibre": 21401, "partnered": 21402, "umes": 21403, "giov": 21404, "comet": 21405, "screenshotsaturday": 21406, "keller": 21407, "filtr": 21408, "fet": 21409, "conway": 21410, "peu": 21411, "badminton": 21412, "gid": 21413, "mound": 21414, "donkey": 21415, "buff": 21416, "leather": 21417, "largely": 21418, "broch": 21419, "intments": 21420, "amuse": 21421, "rk": 21422, "stove": 21423, "impacted": 21424, "cont": 21425, "cracks": 21426, "prisoner": 21427, "bari": 21428, "contractor": 21429, "orioles": 21430, "dominate": 21431, "polar": 21432, "amelia": 21433, "drc": 21434, "ðŁijĮðŁijĮ": 21435, "vist": 21436, "suarez": 21437, "injection": 21438, "blooms": 21439, "ðŁļ¨ðŁļ¨": 21440, "stiff": 21441, "paypal": 21442, "snowing": 21443, "thursdays": 21444, "goose": 21445, "wedge": 21446, "educated": 21447, "weakness": 21448, "decker": 21449, "abudha": 21450, "breezy": 21451, "ÛĮ": 21452, "hopeful": 21453, "obi": 21454, "raider": 21455, "gham": 21456, "deu": 21457, "seve": 21458, "partly": 21459, "fut": 21460, "infused": 21461, "merri": 21462, "thane": 21463, "sometime": 21464, "hue": 21465, "mein": 21466, "credit": 21467, "sliding": 21468, "rande": 21469, "cherry": 21470, "deadpool": 21471, "shol": 21472, "aram": 21473, "underwood": 21474, "skye": 21475, "disturbing": 21476, "mnt": 21477, "polished": 21478, "guardians": 21479, "hadn": 21480, "picasso": 21481, "arius": 21482, "akshay": 21483, "irri": 21484, "jh": 21485, "happen": 21486, "lakh": 21487, "dalton": 21488, "atthe": 21489, "swell": 21490, "marsha": 21491, "reh": 21492, "cours": 21493, "jkt": 21494, "topus": 21495, "service": 21496, "rink": 21497, "hackers": 21498, "donovan": 21499, "horo": 21500, "tcm": 21501, "mayhem": 21502, "chase": 21503, "devops": 21504, "kensing": 21505, "scup": 21506, "shere": 21507, "qualification": 21508, "clive": 21509, "tong": 21510, "nancy": 21511, "maris": 21512, "derdale": 21513, "berman": 21514, "cinderella": 21515, "jolly": 21516, "cic": 21517, "loot": 21518, "collectibles": 21519, "homicide": 21520, "gge": 21521, "epidemic": 21522, "suites": 21523, "muddy": 21524, "gimme": 21525, "erec": 21526, "-*": 21527, "talla": 21528, "lisle": 21529, "embroide": 21530, "ðŁĩ©ðŁĩª": 21531, "verizon": 21532, "vector": 21533, "beanie": 21534, "artisan": 21535, "gain": 21536, "flores": 21537, "vigil": 21538, "uso": 21539, "ðŁĻıðŁı½": 21540, "grinding": 21541, "gher": 21542, "airports": 21543, "responsive": 21544, "shaft": 21545, "cancel": 21546, "ceremonies": 21547, "eme": 21548, "atari": 21549, "brushes": 21550, "eager": 21551, "bohemi": 21552, "childrens": 21553, "yankee": 21554, "maa": 21555, "suspense": 21556, "moran": 21557, "macar": 21558, "sunflower": 21559, "crew": 21560, "void": 21561, "kear": 21562, "fashioned": 21563, "jennings": 21564, "sundayfunday": 21565, "submissions": 21566, "mead": 21567, "herman": 21568, "wai": 21569, "critically": 21570, "leum": 21571, "baekhyun": 21572, "forcing": 21573, "cobra": 21574, "ãģ®": 21575, "acquire": 21576, "alk": 21577, "geology": 21578, "primar": 21579, "importantly": 21580, "irez": 21581, "bundesliga": 21582, "curiosity": 21583, "sena": 21584, "strict": 21585, "consoli": 21586, "winters": 21587, "venom": 21588, "cheltenham": 21589, "ðŁįº": 21590, "cena": 21591, "tat": 21592, "bain": 21593, "glover": 21594, "undercover": 21595, "asses": 21596, "carn": 21597, "memorialday": 21598, "ameli": 21599, "irene": 21600, "chon": 21601, "synthesis": 21602, "speedy": 21603, "mitsubi": 21604, "slayer": 21605, "composite": 21606, "understands": 21607, "pew": 21608, "interrup": 21609, "henri": 21610, "morrow": 21611, "anom": 21612, "thofjuly": 21613, "glee": 21614, "three": 21615, "ðŁĺ®": 21616, "andhi": 21617, "chatt": 21618, "renewables": 21619, "yes": 21620, "transfers": 21621, "!!!!!!!!": 21622, "babu": 21623, "duter": 21624, "loops": 21625, "peers": 21626, "oilers": 21627, "paulo": 21628, "ication": 21629, "hmu": 21630, "wara": 21631, "mercer": 21632, "homeland": 21633, "fuji": 21634, "aley": 21635, "yearbook": 21636, "rem": 21637, "reen": 21638, "absur": 21639, "bois": 21640, "]:": 21641, "caesar": 21642, "shotgun": 21643, "kurdish": 21644, "oren": 21645, "rae": 21646, "ancies": 21647, "typic": 21648, "fh": 21649, "default": 21650, "replic": 21651, "luk": 21652, "transactions": 21653, "rys": 21654, "infantry": 21655, "ðŁį¾": 21656, "chow": 21657, "chickens": 21658, "bagh": 21659, "wyatt": 21660, "aye": 21661, "ggi": 21662, "brews": 21663, "editions": 21664, "mira": 21665, "commencement": 21666, "presu": 21667, "periscope": 21668, "ichi": 21669, "guatemala": 21670, "zambia": 21671, "paints": 21672, "witches": 21673, "wani": 21674, "undere": 21675, "croy": 21676, "vows": 21677, "usmc": 21678, "hearted": 21679, "theatres": 21680, "shuffle": 21681, "level": 21682, "multic": 21683, "squeeze": 21684, "fern": 21685, "appet": 21686, "postal": 21687, "malt": 21688, "onboard": 21689, "ldnt": 21690, "coo": 21691, "ssc": 21692, "kac": 21693, "ðŁĺĩ": 21694, "scrap": 21695, "marcos": 21696, "dealers": 21697, "annu": 21698, "miller": 21699, "cove": 21700, "ulary": 21701, "vladimir": 21702, "beef": 21703, "thur": 21704, "pickled": 21705, "sesame": 21706, "bengaluru": 21707, "mott": 21708, "kathleen": 21709, "hist": 21710, "notor": 21711, "drank": 21712, "duchess": 21713, "snowfall": 21714, "eff": 21715, "tiny": 21716, "jn": 21717, "syour": 21718, "specialists": 21719, "scotus": 21720, "baylor": 21721, "everest": 21722, "malibu": 21723, "prem": 21724, "harmful": 21725, "lali": 21726, "bates": 21727, "gye": 21728, "differenti": 21729, "andra": 21730, "geometry": 21731, "elover": 21732, "blackout": 21733, "====": 21734, "kota": 21735, "interact": 21736, "asian": 21737, "layo": 21738, "samurai": 21739, "fidel": 21740, "exhausted": 21741, "gladi": 21742, "pdt": 21743, "spheric": 21744, "antiqu": 21745, "guitar": 21746, "sturi": 21747, "hopper": 21748, "angle": 21749, "fills": 21750, "slap": 21751, "mith": 21752, "rodney": 21753, "ongi": 21754, "insom": 21755, "preventing": 21756, "cassidy": 21757, "apho": 21758, "oregon": 21759, "loin": 21760, "hammond": 21761, "contributing": 21762, "fn": 21763, "garri": 21764, "orion": 21765, "compelling": 21766, "escaping": 21767, "aiming": 21768, "plumb": 21769, "bistro": 21770, "beasts": 21771, "concerning": 21772, "boe": 21773, "dopp": 21774, "shoplocal": 21775, "stumbled": 21776, "âĤ¹": 21777, "nazis": 21778, "âĢįâĻĤï¸ı": 21779, "gesture": 21780, "warts": 21781, "usopen": 21782, "higgins": 21783, "charli": 21784, "hangs": 21785, "bombers": 21786, "°:": 21787, "feeds": 21788, "cch": 21789, "stil": 21790, "nicola": 21791, "ðŁĵº": 21792, "clamation": 21793, "tropic": 21794, "afro": 21795, "ouk": 21796, "expenses": 21797, "derrick": 21798, "aline": 21799, "faw": 21800, "regard": 21801, "imer": 21802, "satin": 21803, "thium": 21804, "ryder": 21805, "pearl": 21806, "tess": 21807, "mmmmm": 21808, "senses": 21809, "ðŁĩ¹": 21810, "positive": 21811, "exhaust": 21812, "occur": 21813, "norris": 21814, "lilly": 21815, "isles": 21816, "directing": 21817, "yofficial": 21818, "countless": 21819, "samar": 21820, "onstage": 21821, "flock": 21822, "mirrors": 21823, "archer": 21824, "moi": 21825, "kd": 21826, "viv": 21827, "inos": 21828, "sikh": 21829, "lei": 21830, "sensory": 21831, "brits": 21832, "knox": 21833, "chestnut": 21834, "opy": 21835, "coliseum": 21836, "zaf": 21837, "divin": 21838, "adapter": 21839, ":)))": 21840, "temple": 21841, "kun": 21842, "helmets": 21843, "tdf": 21844, "guide": 21845, "mold": 21846, "oids": 21847, "luther": 21848, "heis": 21849, "monastery": 21850, "spree": 21851, "klu": 21852, "britney": 21853, "jaguars": 21854, "greats": 21855, "ccc": 21856, "kyrie": 21857, "machinery": 21858, "cricket": 21859, "rero": 21860, "abo": 21861, "aspiring": 21862, "semifinals": 21863, "aless": 21864, "signatures": 21865, "vard": 21866, "meth": 21867, "herbal": 21868, "holden": 21869, "kingdom": 21870, "apor": 21871, "reggie": 21872, "oreo": 21873, "palestinians": 21874, "emmys": 21875, "sectional": 21876, "roi": 21877, "neymar": 21878, "quel": 21879, "cull": 21880, "lka": 21881, "hazel": 21882, "estimate": 21883, "ulties": 21884, "gow": 21885, "bea": 21886, "purchases": 21887, "belts": 21888, "protects": 21889, "mé": 21890, "guessing": 21891, "bbo": 21892, "claudia": 21893, "fracking": 21894, "jonny": 21895, "elk": 21896, "celtic": 21897, "almighty": 21898, "raje": 21899, "courtyard": 21900, "igi": 21901, "canes": 21902, "ðŁĴªðŁı»": 21903, "bankrup": 21904, "lethal": 21905, "âľĮï¸ı": 21906, "graphicdesign": 21907, "vader": 21908, "pencils": 21909, "roughly": 21910, "dante": 21911, "mfg": 21912, "constell": 21913, "camel": 21914, "jb": 21915, "blossoms": 21916, "ento": 21917, "balochistan": 21918, "cinemato": 21919, "illard": 21920, "jersey": 21921, "consent": 21922, "dented": 21923, "contempl": 21924, "scher": 21925, "holi": 21926, "lough": 21927, "stour": 21928, "ayo": 21929, "beginners": 21930, "curb": 21931, "vhs": 21932, "ajax": 21933, "duff": 21934, "aveng": 21935, "domest": 21936, "committing": 21937, "aired": 21938, "chap": 21939, "hedgehog": 21940, "disappointing": 21941, "freelance": 21942, "inland": 21943, "charms": 21944, "ðŁĺįâĿ¤ï¸ı": 21945, "aish": 21946, "mx": 21947, "buckle": 21948, "tidal": 21949, "permit": 21950, "boating": 21951, "racha": 21952, "kendrick": 21953, "bello": 21954, "bhi": 21955, "plea": 21956, "estimates": 21957, "lb": 21958, "apologies": 21959, "jaya": 21960, "bbl": 21961, "astoni": 21962, "interstate": 21963, "maintaining": 21964, "elbow": 21965, "mup": 21966, "epit": 21967, "ðŁĺ¡": 21968, "violations": 21969, "defend": 21970, "beh": 21971, "slc": 21972, "amir": 21973, "puri": 21974, "tium": 21975, "fifa": 21976, "blurry": 21977, "scrim": 21978, "ðŁĻıðŁı¾": 21979, "maple": 21980, "relatives": 21981, "âĺĿ": 21982, "choc": 21983, "connor": 21984, "⾨⾨": 21985, "whisp": 21986, "listings": 21987, "maze": 21988, "thanking": 21989, "ridd": 21990, "grassroots": 21991, "shifting": 21992, "desperately": 21993, "gorilla": 21994, "deni": 21995, "jules": 21996, "strath": 21997, "gley": 21998, "jain": 21999, "buick": 22000, "tanner": 22001, "ðŁĴĿ": 22002, "gae": 22003, "prim": 22004, "itors": 22005, "nano": 22006, "separation": 22007, "armenia": 22008, "bordeaux": 22009, "ðŁħ": 22010, "pjnet": 22011, "burial": 22012, "ebon": 22013, "gloss": 22014, "renew": 22015, "grier": 22016, "speeds": 22017, "comicbooks": 22018, "symboli": 22019, "purposes": 22020, "ãħłãħł": 22021, "spatial": 22022, "notable": 22023, "cion": 22024, "nps": 22025, "hoffman": 22026, "norman": 22027, "rtg": 22028, "dusty": 22029, "situated": 22030, "tran": 22031, "kfc": 22032, "emen": 22033, "nickel": 22034, "hastings": 22035, "settling": 22036, "grit": 22037, "lena": 22038, "waw": 22039, "arts": 22040, "gum": 22041, "caregi": 22042, "lewis": 22043, "sapphire": 22044, "remember": 22045, "embedded": 22046, "tlc": 22047, "blat": 22048, "sergeant": 22049, "elsa": 22050, "bootcamp": 22051, "bowman": 22052, "photographic": 22053, "pillars": 22054, "directioners": 22055, "classified": 22056, "nois": 22057, "veer": 22058, "barrels": 22059, "whoop": 22060, "ðŁĺ±ðŁĺ±": 22061, "female": 22062, "petroleum": 22063, "media": 22064, "efc": 22065, "pokémon": 22066, "à¤ķ": 22067, "enthusiastic": 22068, "varun": 22069, "profiles": 22070, "pediatric": 22071, "accidents": 22072, "conrad": 22073, "jang": 22074, "jojo": 22075, "acor": 22076, "observer": 22077, "lf": 22078, "livestock": 22079, "forgi": 22080, "fos": 22081, "elm": 22082, "anand": 22083, "goe": 22084, "cere": 22085, "avoiding": 22086, "grit": 22087, "oman": 22088, "thankfully": 22089, "scattered": 22090, "nicky": 22091, "cylinder": 22092, "cheesy": 22093, "diver": 22094, "mahesh": 22095, "caves": 22096, "earliest": 22097, "quinte": 22098, "subjects": 22099, "bend": 22100, "gulf": 22101, "vocalist": 22102, "glue": 22103, "patches": 22104, "unstopp": 22105, "snyder": 22106, "demonstrating": 22107, "pio": 22108, "horns": 22109, "wickets": 22110, "andthe": 22111, "rama": 22112, "yoon": 22113, "straight": 22114, "bedtime": 22115, "orang": 22116, "bullets": 22117, "saurus": 22118, "miners": 22119, "incidents": 22120, "!...": 22121, "ðŁİ¸": 22122, "agers": 22123, "handles": 22124, "states": 22125, "inity": 22126, "dons": 22127, "incredible": 22128, "eminem": 22129, "aviv": 22130, "rudy": 22131, "mozart": 22132, "folklore": 22133, "appliances": 22134, "mtl": 22135, "frey": 22136, "dias": 22137, "hua": 22138, "pageant": 22139, "strive": 22140, "imprison": 22141, "bullish": 22142, "rana": 22143, "alerts": 22144, "bbmas": 22145, "hyper": 22146, "derbyshire": 22147, "recre": 22148, "redd": 22149, "deborah": 22150, "cosmos": 22151, "lawson": 22152, "melanie": 22153, "psycho": 22154, "hoor": 22155, "doodles": 22156, "sniper": 22157, "shady": 22158, "mantle": 22159, "canadian": 22160, "newyear": 22161, "interactions": 22162, "separated": 22163, "cords": 22164, "spirituality": 22165, "apu": 22166, "ito": 22167, "pct": 22168, "pelosi": 22169, "rebellion": 22170, "seiz": 22171, "worcester": 22172, "sectors": 22173, "uli": 22174, "santa": 22175, "е": 22176, "ðŁĩªðŁĩ¸": 22177, "biased": 22178, "classical": 22179, "gamma": 22180, "deeplear": 22181, "emerge": 22182, "backer": 22183, "surance": 22184, "handcrafted": 22185, "ðŁİ¥": 22186, "francis": 22187, "millan": 22188, "ici": 22189, "crown": 22190, "wow": 22191, "striped": 22192, "unfair": 22193, "relaxation": 22194, "³ï¸ı": 22195, "embracing": 22196, "shealth": 22197, "paleo": 22198, "martini": 22199, "distillery": 22200, "wrink": 22201, "ork": 22202, "nath": 22203, "hayley": 22204, "courthouse": 22205, "siber": 22206, "sadi": 22207, "quietly": 22208, "melt": 22209, "msm": 22210, "meh": 22211, "smartphones": 22212, "relent": 22213, "pping": 22214, "warwick": 22215, "cologne": 22216, "glia": 22217, "cotton": 22218, "prog": 22219, "lone": 22220, "ipsw": 22221, "starters": 22222, "expands": 22223, "ump": 22224, "sued": 22225, "skipper": 22226, "infections": 22227, "ingle": 22228, "á": 22229, "clerk": 22230, "demonstrate": 22231, "acar": 22232, "ðŁĺĤðŁĺĤðŁĺĤ": 22233, "tibet": 22234, "buns": 22235, "alom": 22236, "demolition": 22237, "ssia": 22238, "gst": 22239, "[]": 22240, "soar": 22241, "âĺĢ": 22242, "ðŁĺª": 22243, "ðŁĵĬ": 22244, "deepest": 22245, "beyond": 22246, "aret": 22247, "attends": 22248, "activated": 22249, "dimit": 22250, "âļªï¸ı": 22251, "highlighted": 22252, "magazines": 22253, "rumor": 22254, "azza": 22255, "stephens": 22256, "dolph": 22257, "shockey": 22258, "mats": 22259, "weav": 22260, "melan": 22261, "servers": 22262, "traum": 22263, "kush": 22264, "æĹ": 22265, "babys": 22266, "paz": 22267, "aal": 22268, "lause": 22269, "breakers": 22270, "canterbury": 22271, "ulture": 22272, "miri": 22273, "euros": 22274, "taneous": 22275, "impressions": 22276, "dutch": 22277, "ild": 22278, "ghi": 22279, "purdue": 22280, "adequate": 22281, "lp": 22282, "syner": 22283, "angler": 22284, "durable": 22285, "galore": 22286, "rown": 22287, "mgmt": 22288, "ðŁĵĮ": 22289, "lucia": 22290, "âĺijï¸ı": 22291, "zayn": 22292, "borrow": 22293, ".(": 22294, "northumber": 22295, "crush": 22296, "enga": 22297, "sush": 22298, "extravag": 22299, "tout": 22300, "mahal": 22301, "alistic": 22302, "thermo": 22303, "galleries": 22304, "esse": 22305, "chibi": 22306, "attractions": 22307, "lexington": 22308, "legislature": 22309, "documented": 22310, "residen": 22311, "brownies": 22312, "wf": 22313, "stool": 22314, "planets": 22315, "shoppers": 22316, "conductor": 22317, "msp": 22318, "tricky": 22319, "fruity": 22320, "endra": 22321, "feelthe": 22322, "whipped": 22323, "hairstyle": 22324, "refer": 22325, "ook": 22326, "octopus": 22327, "audiences": 22328, "kumar": 22329, "afterno": 22330, "optim": 22331, "cfl": 22332, "nip": 22333, "geni": 22334, "alphabet": 22335, "annab": 22336, "lamin": 22337, "accepts": 22338, "lng": 22339, "ðŁĺ«": 22340, "tine": 22341, "acom": 22342, "cheerleaders": 22343, "tk": 22344, "gron": 22345, "vg": 22346, "kung": 22347, "jax": 22348, "dhabi": 22349, "rss": 22350, "mackenzie": 22351, "beirut": 22352, "cleanup": 22353, "gypsy": 22354, "stell": 22355, "burger": 22356, "hurricanes": 22357, "education": 22358, "stina": 22359, "âĻ¡âĻ¡": 22360, "unfortunate": 22361, "jeremi": 22362, "badger": 22363, "aters": 22364, ":â̦": 22365, "terra": 22366, "sublime": 22367, "stud": 22368, "ymca": 22369, "mru": 22370, "duterte": 22371, "brennan": 22372, "bulb": 22373, "melo": 22374, "ylon": 22375, "hacker": 22376, "cred": 22377, "gud": 22378, "asan": 22379, "padilla": 22380, "embroidered": 22381, "vietnamese": 22382, "pioneers": 22383, "projection": 22384, "reboot": 22385, "idc": 22386, "aney": 22387, "primer": 22388, "suffers": 22389, "winding": 22390, "pon": 22391, "stoday": 22392, "morn": 22393, "uch": 22394, "allin": 22395, "adidas": 22396, "elizabeth": 22397, "tuck": 22398, "ography": 22399, "ðŁļĢ": 22400, "beg": 22401, "osborne": 22402, "ghetto": 22403, "rh": 22404, "cnn": 22405, "irma": 22406, "makin": 22407, "cables": 22408, "murders": 22409, "ocks": 22410, "insta": 22411, "alas": 22412, "sik": 22413, "cuff": 22414, "lare": 22415, "foodies": 22416, "ovic": 22417, "atom": 22418, "geometric": 22419, "empathy": 22420, "ี": 22421, "centenary": 22422, "newspapers": 22423, "administrative": 22424, "ðŁİĬ": 22425, "stive": 22426, "contractors": 22427, "lett": 22428, "tasmania": 22429, "awesomeness": 22430, "density": 22431, "veen": 22432, "princeton": 22433, "frequently": 22434, "reject": 22435, "ghi": 22436, "modular": 22437, "ceramics": 22438, "shag": 22439, "kiwi": 22440, "canvas": 22441, "sweatshirt": 22442, "anj": 22443, "timm": 22444, "napoli": 22445, "iler": 22446, "appeals": 22447, "hamilton": 22448, "mayo": 22449, "weave": 22450, "arranged": 22451, "wharf": 22452, "occupy": 22453, "bvb": 22454, "asaki": 22455, "otter": 22456, "norm": 22457, "vies": 22458, "detox": 22459, "tional": 22460, "derek": 22461, "idad": 22462, "admissions": 22463, "constituency": 22464, "upper": 22465, "woot": 22466, "alloy": 22467, "seve": 22468, "lub": 22469, "uncomfortable": 22470, "edwin": 22471, "abre": 22472, "dwight": 22473, "arche": 22474, "virtually": 22475, "spol": 22476, "prie": 22477, "aii": 22478, "err": 22479, "switch": 22480, "barack": 22481, "seok": 22482, "coul": 22483, "wnt": 22484, "poul": 22485, "olive": 22486, "caffeine": 22487, "cardiff": 22488, "notorious": 22489, "demp": 22490, "excess": 22491, "barr": 22492, "tford": 22493, "ajay": 22494, "bumped": 22495, "mythology": 22496, "shelley": 22497, "falcon": 22498, "shakespeare": 22499, "mustangs": 22500, "noted": 22501, "bone": 22502, "civilization": 22503, "syd": 22504, "parsons": 22505, "unofficial": 22506, "hyped": 22507, "spends": 22508, "opposed": 22509, "vings": 22510, "spacex": 22511, "notification": 22512, "deciding": 22513, "biotech": 22514, "outsi": 22515, "salah": 22516, "!.": 22517, "fed": 22518, "ssy": 22519, "cms": 22520, "badgers": 22521, "cro": 22522, "elaine": 22523, "nba": 22524, "dyour": 22525, "nant": 22526, "honeymoon": 22527, "climbed": 22528, "conomy": 22529, "atha": 22530, "mell": 22531, "nebula": 22532, "naturephotography": 22533, "julie": 22534, "bmx": 22535, "invested": 22536, "mono": 22537, "lieutenant": 22538, "watkins": 22539, "technician": 22540, "ose": 22541, "kae": 22542, "ìĽ": 22543, "mcqueen": 22544, "preach": 22545, "traveller": 22546, "flexibility": 22547, "zebra": 22548, "retailer": 22549, "pant": 22550, "bender": 22551, "brandt": 22552, "squid": 22553, "warrant": 22554, "verified": 22555, "cass": 22556, "piercing": 22557, "honours": 22558, "tying": 22559, "morris": 22560, "kissed": 22561, "oprah": 22562, "panoramic": 22563, "mei": 22564, "splatoon": 22565, "wichita": 22566, "arias": 22567, "galli": 22568, "indyref": 22569, "goodtimes": 22570, "atheist": 22571, "confession": 22572, "owski": 22573, "repping": 22574, "additions": 22575, "mechanism": 22576, "zim": 22577, "jans": 22578, "suf": 22579, "chopped": 22580, "beginnings": 22581, "vitamins": 22582, "ãħ¤ãħ¤": 22583, "orth": 22584, "poles": 22585, "rub": 22586, "antarctica": 22587, "indiefilm": 22588, "webcam": 22589, "ketch": 22590, "brett": 22591, "clement": 22592, "heron": 22593, "defeating": 22594, "hydro": 22595, "bucket": 22596, "wandering": 22597, "sidney": 22598, "futureof": 22599, "binge": 22600, "onies": 22601, "knockout": 22602, "administrator": 22603, "synthe": 22604, "lent": 22605, "jani": 22606, "barley": 22607, "premierleague": 22608, "nerds": 22609, "crm": 22610, "bras": 22611, "botany": 22612, "evolved": 22613, "rotter": 22614, "rowed": 22615, "tumor": 22616, "wealthy": 22617, "ÂŃ": 22618, "monarch": 22619, "lished": 22620, "dahl": 22621, "ðŁİĥ": 22622, "buch": 22623, "kenyan": 22624, "ا": 22625, "redness": 22626, "assembled": 22627, "semit": 22628, "hudder": 22629, "shrop": 22630, "rani": 22631, "learning": 22632, "mory": 22633, "itia": 22634, "geographic": 22635, "worldof": 22636, "fb": 22637, "phosp": 22638, "boogie": 22639, "amped": 22640, "?...": 22641, "chew": 22642, "dwarf": 22643, "arus": 22644, "ssen": 22645, "rusty": 22646, "recruits": 22647, "hk": 22648, "garde": 22649, "applause": 22650, "volumes": 22651, "involves": 22652, "tac": 22653, "handbag": 22654, "translate": 22655, "ffel": 22656, "seym": 22657, "aquatic": 22658, "transfer": 22659, "zodi": 22660, "andr": 22661, "academia": 22662, "crater": 22663, "tez": 22664, "arse": 22665, "adapt": 22666, "coloni": 22667, "snowman": 22668, "mali": 22669, "hangin": 22670, "dischar": 22671, "oysters": 22672, "phoe": 22673, "colonel": 22674, "wba": 22675, "hispanic": 22676, "thriving": 22677, "shy": 22678, "agles": 22679, "salesforce": 22680, "creme": 22681, "soles": 22682, "lafayette": 22683, "âī": 22684, "teria": 22685, "acha": 22686, "sperson": 22687, "gogo": 22688, "carly": 22689, "theore": 22690, "amore": 22691, "vox": 22692, "aft": 22693, "ãĤ¹": 22694, "staple": 22695, "muffin": 22696, "diagram": 22697, "inox": 22698, "sustained": 22699, "avent": 22700, "meta": 22701, "arbitr": 22702, "decay": 22703, "adole": 22704, "н": 22705, "ecol": 22706, "pho": 22707, "nk": 22708, "ocu": 22709, "granny": 22710, "ça": 22711, "luxembour": 22712, "stadt": 22713, "alberto": 22714, "levit": 22715, "amas": 22716, "dx": 22717, "orphan": 22718, "cobb": 22719, "asc": 22720, "logy": 22721, "immense": 22722, "chants": 22723, "offline": 22724, "pent": 22725, "brex": 22726, "winger": 22727, "plane": 22728, "iel": 22729, "nichols": 22730, "cathy": 22731, "naruto": 22732, "lowed": 22733, "///": 22734, "ignorance": 22735, "catastro": 22736, "youts": 22737, "schen": 22738, "build": 22739, "hazi": 22740, "sine": 22741, "criticalrole": 22742, "dug": 22743, "detect": 22744, "logs": 22745, "enamel": 22746, "stpatricksday": 22747, "eddie": 22748, "copa": 22749, "cigarettes": 22750, "hoff": 22751, "kaya": 22752, "lagoon": 22753, "rapha": 22754, "airborne": 22755, "choose": 22756, "puertor": 22757, "kev": 22758, "guiding": 22759, "frosty": 22760, "borough": 22761, "mira": 22762, "ðŁİĬ": 22763, "cadet": 22764, "anush": 22765, "yogi": 22766, "eger": 22767, "fling": 22768, "slope": 22769, "ninth": 22770, "weston": 22771, "footwear": 22772, "fn": 22773, "mayweather": 22774, "aam": 22775, "plain": 22776, "staircase": 22777, "witnesses": 22778, "workouts": 22779, "robust": 22780, "dexter": 22781, "cohort": 22782, "ðŁļĹ": 22783, "spell": 22784, "haze": 22785, "oom": 22786, "organising": 22787, "wildfire": 22788, "contacts": 22789, "avon": 22790, "mino": 22791, "updating": 22792, "ðŁį»": 22793, "lithium": 22794, "ingual": 22795, "kis": 22796, "auga": 22797, "locom": 22798, "deduc": 22799, "uda": 22800, "thak": 22801, "boyle": 22802, "mper": 22803, "hottie": 22804, "erik": 22805, "revised": 22806, "isla": 22807, "travelphotography": 22808, "ooza": 22809, "enqui": 22810, "conferences": 22811, "clover": 22812, "groom": 22813, "curves": 22814, "liveon": 22815, "perf": 22816, "displaced": 22817, "bolog": 22818, "xxxx": 22819, "ðŁĺ©ðŁĺ©": 22820, "teal": 22821, "vessels": 22822, "rainforest": 22823, "calci": 22824, "panther": 22825, "giraffe": 22826, "tasted": 22827, "imagery": 22828, "padres": 22829, "daytime": 22830, "bass": 22831, "ripe": 22832, "opioid": 22833, "nue": 22834, "vinyl": 22835, "inventor": 22836, "sens": 22837, "processor": 22838, "mut": 22839, "gadgets": 22840, "biblical": 22841, "shannon": 22842, "jacqueline": 22843, "cary": 22844, "theresistance": 22845, "alien": 22846, "nvi": 22847, "cosy": 22848, "bihar": 22849, "foley": 22850, "rend": 22851, "mugs": 22852, "faken": 22853, "clone": 22854, "niallo": 22855, "grabbed": 22856, "chihu": 22857, "powerhouse": 22858, "ntt": 22859, "cherokee": 22860, "sponge": 22861, "implementing": 22862, "rhine": 22863, "leone": 22864, "ðŁįĢ": 22865, "prettiest": 22866, "infrared": 22867, "improv": 22868, "switched": 22869, "tubes": 22870, "contr": 22871, "blk": 22872, "projected": 22873, "beaver": 22874, "yot": 22875, "bbcradio": 22876, "thigh": 22877, "persecu": 22878, "apologize": 22879, "wack": 22880, "poster": 22881, "oliver": 22882, "aza": 22883, "loud": 22884, "(?)": 22885, "fthe": 22886, "womenshi": 22887, "sparrow": 22888, "blush": 22889, "usable": 22890, "scales": 22891, "itative": 22892, "peuge": 22893, "needing": 22894, "leggings": 22895, "glamorous": 22896, "matur": 22897, "cz": 22898, "watt": 22899, "dab": 22900, "tamar": 22901, "etsym": 22902, "bauer": 22903, "heartfelt": 22904, "hn": 22905, "elsewhere": 22906, "birch": 22907, "alumini": 22908, "huck": 22909, "eme": 22910, "jl": 22911, "trafford": 22912, "dz": 22913, "portions": 22914, "anasta": 22915, "arthritis": 22916, "espn": 22917, "bergen": 22918, "violation": 22919, "yoshi": 22920, "cz": 22921, "northumberland": 22922, "closures": 22923, "ðŁĩ¯ðŁĩ": 22924, "smiley": 22925, "rw": 22926, "telugu": 22927, "intensi": 22928, "gregg": 22929, "vega": 22930, "dungeon": 22931, "southbound": 22932, "bail": 22933, "dominican": 22934, "semifinal": 22935, "chapters": 22936, "hitch": 22937, "vanity": 22938, "transiti": 22939, "recommends": 22940, "satisf": 22941, "barca": 22942, "queens": 22943, "((": 22944, "destruc": 22945, "strait": 22946, "ravi": 22947, "desserts": 22948, "intru": 22949, "haram": 22950, "kos": 22951, "foe": 22952, "fatty": 22953, "paisley": 22954, "magnitude": 22955, "dridge": 22956, "comey": 22957, "schemes": 22958, "visionary": 22959, "ourt": 22960, "downloaded": 22961, "ðŁĻĮðŁı½": 22962, "gdpr": 22963, "lani": 22964, "pwc": 22965, "guad": 22966, "nicest": 22967, "stakeholders": 22968, "referred": 22969, "georgetown": 22970, "arvindkejriwal": 22971, "schneider": 22972, "indoors": 22973, "allstar": 22974, "stranded": 22975, "gender": 22976, "zepp": 22977, "masses": 22978, "ðŁIJ±": 22979, "patiently": 22980, "bldg": 22981, "zab": 22982, "wearab": 22983, "vivid": 22984, "heck": 22985, "della": 22986, "symb": 22987, "jeopar": 22988, "lager": 22989, "àª": 22990, "combines": 22991, "nec": 22992, "bray": 22993, "flop": 22994, "txwx": 22995, "joys": 22996, "pont": 22997, "profound": 22998, "surround": 22999, "madhu": 23000, "mable": 23001, "ayr": 23002, "teas": 23003, "nsa": 23004, "openly": 23005, "ernest": 23006, "ãĥ©": 23007, "topo": 23008, "gna": 23009, "antioxid": 23010, "tian": 23011, "etr": 23012, "cello": 23013, "mathi": 23014, "generosity": 23015, "biting": 23016, "manic": 23017, "kelsey": 23018, "cheeks": 23019, "tender": 23020, "wth": 23021, "pronoun": 23022, "ultimately": 23023, "gusta": 23024, "arianag": 23025, "gerry": 23026, "bleed": 23027, "reddy": 23028, "mich": 23029, "mitsubishi": 23030, "operated": 23031, "sexually": 23032, "mau": 23033, "cllr": 23034, "vids": 23035, "coc": 23036, "melted": 23037, "ðŁĮĪ": 23038, "qld": 23039, "itech": 23040, "instrumental": 23041, "endgame": 23042, "ðŁĵĸ": 23043, "energi": 23044, "brownie": 23045, "tamil": 23046, "atin": 23047, "dominated": 23048, "praises": 23049, "fireplace": 23050, "sensational": 23051, "mena": 23052, "karti": 23053, "unprece": 23054, "rupt": 23055, "oriental": 23056, "mccor": 23057, "tournaments": 23058, "scenter": 23059, "reeves": 23060, "prescription": 23061, "same": 23062, "frau": 23063, "truffle": 23064, "embo": 23065, "romans": 23066, "blasts": 23067, "technological": 23068, "prat": 23069, "bsb": 23070, "yar": 23071, "trendy": 23072, "acl": 23073, "alad": 23074, "ðŁįģ": 23075, "ohh": 23076, "bankrupt": 23077, "thoven": 23078, "regards": 23079, "iser": 23080, "warwick": 23081, "vineyards": 23082, "realm": 23083, "niallofficial": 23084, "dota": 23085, "gemini": 23086, "todo": 23087, "vable": 23088, "¨¨": 23089, "lau": 23090, "wreath": 23091, "juve": 23092, "natasha": 23093, "lever": 23094, "lori": 23095, "horser": 23096, "cctv": 23097, "airbnb": 23098, "esanders": 23099, "sinclair": 23100, "emabiggest": 23101, "highschool": 23102, "contest": 23103, "optimistic": 23104, "tte": 23105, "ðŁĴķðŁĴķ": 23106, "ssd": 23107, "yee": 23108, "helena": 23109, "consen": 23110, "ricks": 23111, "jesse": 23112, "anic": 23113, "ðŁİ¯": 23114, "reacts": 23115, "robe": 23116, "independence": 23117, "voltage": 23118, "mington": 23119, "sant": 23120, "à¸Ļà¸": 23121, "----------------": 23122, "sentinel": 23123, "kett": 23124, "rehearsing": 23125, "aaaaaaaa": 23126, "softhe": 23127, "stirling": 23128, "search": 23129, "wigan": 23130, "standout": 23131, "snail": 23132, "pentagon": 23133, "Äģ": 23134, "chlor": 23135, "crust": 23136, "netany": 23137, "chemist": 23138, "disappeared": 23139, "ricardo": 23140, "spiders": 23141, "bose": 23142, "warren": 23143, "messing": 23144, "banners": 23145, "guel": 23146, "parach": 23147, "maid": 23148, "counted": 23149, "epile": 23150, "bonfire": 23151, "speechless": 23152, "setter": 23153, "measured": 23154, "rejects": 23155, "nikki": 23156, "lester": 23157, "forensic": 23158, "fabrics": 23159, "aloha": 23160, "preserved": 23161, "watford": 23162, "detailing": 23163, "darth": 23164, "bou": 23165, "carly": 23166, "...'": 23167, "tailgate": 23168, "notifications": 23169, "å¤": 23170, "passive": 23171, "trousers": 23172, "baloch": 23173, "rother": 23174, "typically": 23175, "Ã¥": 23176, "spit": 23177, "wiz": 23178, "sicily": 23179, "technically": 23180, "expose": 23181, "stage": 23182, "hubb": 23183, "cream": 23184, "caps": 23185, "poke": 23186, "sleek": 23187, "june": 23188, "temporarily": 23189, "dez": 23190, "awakens": 23191, "lame": 23192, "_-": 23193, "jiha": 23194, "tuesdays": 23195, "advised": 23196, "advisors": 23197, "existed": 23198, "disagree": 23199, "newsroom": 23200, "losers": 23201, "worldtour": 23202, "drying": 23203, "aldi": 23204, "harness": 23205, "footprint": 23206, "hobbit": 23207, "pmln": 23208, "iro": 23209, "quered": 23210, "assess": 23211, "gaze": 23212, "sab": 23213, "thian": 23214, "íĬ": 23215, "tif": 23216, "observe": 23217, "evil": 23218, "drawer": 23219, "sweep": 23220, "cory": 23221, "cody": 23222, "kyoto": 23223, "callum": 23224, "ninj": 23225, "laurent": 23226, "bei": 23227, "sketching": 23228, "customized": 23229, "dur": 23230, "regrets": 23231, "knoxville": 23232, "ìķĦ": 23233, "messaging": 23234, "gracie": 23235, "abundance": 23236, "bidding": 23237, "brewed": 23238, "flouri": 23239, "therapeutic": 23240, "altitude": 23241, "hogs": 23242, "burner": 23243, "electro": 23244, "wonderfully": 23245, "heater": 23246, "postpon": 23247, "livery": 23248, "rall": 23249, "adas": 23250, "aac": 23251, "saul": 23252, "brooklyn": 23253, "playhouse": 23254, "âĻ¥âĻ¥âĻ¥": 23255, "charitable": 23256, "iny": 23257, "zah": 23258, "competitions": 23259, "beav": 23260, "plugged": 23261, "ois": 23262, "doom": 23263, "astronom": 23264, "specialized": 23265, "maxi": 23266, "taps": 23267, "cellular": 23268, "depressed": 23269, "folklorethursday": 23270, "crib": 23271, "emul": 23272, "ë°©": 23273, "figh": 23274, "ruz": 23275, "carlisle": 23276, "spear": 23277, "sidewalk": 23278, "dei": 23279, "dependent": 23280, "laces": 23281, "nhs": 23282, "ðŁĮĻ": 23283, "realizing": 23284, "network": 23285, "riche": 23286, "regin": 23287, "refresh": 23288, "stral": 23289, "pathology": 23290, "plaid": 23291, "psychedelic": 23292, "hind": 23293, "uka": 23294, "algorithm": 23295, "linking": 23296, "progressi": 23297, "fey": 23298, "dade": 23299, "hydrated": 23300, "bant": 23301, "famed": 23302, "cotsw": 23303, "boise": 23304, "asc": 23305, "racing": 23306, "javier": 23307, "wwen": 23308, "marlins": 23309, "poop": 23310, "swept": 23311, "tonights": 23312, "wef": 23313, "anime": 23314, "slovak": 23315, "âŀĸâŀĸ": 23316, "claus": 23317, "lemme": 23318, "clippers": 23319, "rels": 23320, "arianagrande": 23321, "rte": 23322, "kot": 23323, "thalapathy": 23324, "hungarian": 23325, "zuma": 23326, "yvon": 23327, "isu": 23328, "journeys": 23329, "clinics": 23330, "bebe": 23331, "wwf": 23332, "nws": 23333, "superheroes": 23334, "erit": 23335, "sleague": 23336, "identification": 23337, "motto": 23338, "bai": 23339, "sourced": 23340, "iller": 23341, "api": 23342, "prise": 23343, "unprecedented": 23344, "damas": 23345, "tunisia": 23346, "drain": 23347, "underestim": 23348, "ether": 23349, "quarterly": 23350, "rewarding": 23351, "alham": 23352, "wolverine": 23353, "cabine": 23354, "hypno": 23355, "nadine": 23356, "havana": 23357, "dae": 23358, "ðŁĵĪ": 23359, "dron": 23360, "readings": 23361, "bati": 23362, "pico": 23363, "merci": 23364, "itian": 23365, "walkers": 23366, "elope": 23367, "mikey": 23368, "godzilla": 23369, "burlington": 23370, "abuja": 23371, "socialism": 23372, "atility": 23373, "shell": 23374, "harrypotter": 23375, "gno": 23376, "abur": 23377, "releg": 23378, "felici": 23379, "rogen": 23380, "neuroscience": 23381, "instin": 23382, "atham": 23383, "vouchers": 23384, "jarre": 23385, "fuse": 23386, "defici": 23387, "monterey": 23388, "deport": 23389, "midday": 23390, "ppard": 23391, "freed": 23392, "ameter": 23393, "wilt": 23394, "ningham": 23395, "pratt": 23396, "liberty": 23397, "slogan": 23398, "oto": 23399, "pri": 23400, "coated": 23401, "cpd": 23402, "nett": 23403, "illas": 23404, "malawi": 23405, "evolve": 23406, "accessibility": 23407, "ðŁĶ¥ðŁĶ¥ðŁĶ¥ðŁĶ¥": 23408, "ornament": 23409, "bp": 23410, "elis": 23411, "sonline": 23412, "chiro": 23413, "flick": 23414, "ibm": 23415, "arak": 23416, "enables": 23417, "garland": 23418, "sane": 23419, "cuties": 23420, "trip": 23421, "rotterdam": 23422, "nys": 23423, "lamps": 23424, "lucas": 23425, "bog": 23426, "rails": 23427, "travelled": 23428, "hicks": 23429, "enu": 23430, "sabha": 23431, "scrub": 23432, "hier": 23433, "hartford": 23434, "foo": 23435, "fernandez": 23436, "trevor": 23437, "mattress": 23438, "appointments": 23439, "alej": 23440, "fei": 23441, "ologist": 23442, "safar": 23443, "octa": 23444, "src": 23445, "shaun": 23446, "ambient": 23447, "dric": 23448, "biker": 23449, "shee": 23450, "mustache": 23451, "hta": 23452, "boone": 23453, "herty": 23454, "cardio": 23455, "brakes": 23456, "recital": 23457, "consists": 23458, "overwhelmed": 23459, "caul": 23460, "robbins": 23461, "imit": 23462, "alth": 23463, "url": 23464, "bibli": 23465, "onne": 23466, "blacklivesmatter": 23467, "difficulties": 23468, "telang": 23469, "taller": 23470, "ðŁĵĨ": 23471, "debating": 23472, "burrito": 23473, "movember": 23474, "strengthening": 23475, "boe": 23476, "testam": 23477, "miracles": 23478, "baseball": 23479, "renee": 23480, "ðŁijīðŁı»": 23481, "alfa": 23482, "âĺĺ": 23483, "unstoppable": 23484, "ecs": 23485, "gmo": 23486, "giftideas": 23487, "pathway": 23488, "fencing": 23489, "ðŁİ¤": 23490, "bham": 23491, "ras": 23492, "sko": 23493, "dled": 23494, "thelast": 23495, "magnum": 23496, "binary": 23497, "wilde": 23498, "wilder": 23499, "whati": 23500, "barbecue": 23501, "hism": 23502, "canoe": 23503, "kurdi": 23504, "elive": 23505, "advantages": 23506, "madame": 23507, "bier": 23508, "missing": 23509, "entertain": 23510, "airforce": 23511, "yama": 23512, "cis": 23513, "hashtags": 23514, "jis": 23515, "veil": 23516, "dreamy": 23517, "tense": 23518, "mayward": 23519, "chateau": 23520, "huntington": 23521, "âļĵ": 23522, "vall": 23523, "upon": 23524, "blouse": 23525, "dunes": 23526, "ðŁĺ´": 23527, "fertility": 23528, "mole": 23529, "currencies": 23530, "stu": 23531, "berlin": 23532, "toasted": 23533, "divas": 23534, "walt": 23535, "lark": 23536, "pora": 23537, "hitter": 23538, "umer": 23539, "chilled": 23540, "balancing": 23541, "fais": 23542, "yin": 23543, "ortiz": 23544, "eastenders": 23545, "hate": 23546, "ural": 23547, "april": 23548, "timel": 23549, "à±": 23550, "pero": 23551, "stocked": 23552, "respects": 23553, "tht": 23554, "bestfriends": 23555, "givingtuesday": 23556, "bead": 23557, "invent": 23558, "imi": 23559, "naples": 23560, "combining": 23561, "tokens": 23562, "thirst": 23563, "masc": 23564, "parrot": 23565, "spu": 23566, "denton": 23567, "*-*": 23568, "tres": 23569, "suburban": 23570, "width": 23571, "sive": 23572, "contender": 23573, "sirius": 23574, "lok": 23575, "troopers": 23576, "outrage": 23577, "turbo": 23578, "fragile": 23579, "messed": 23580, "doh": 23581, "discord": 23582, "netanyahu": 23583, "resign": 23584, "forgiveness": 23585, "mohan": 23586, "munch": 23587, "camou": 23588, "identifying": 23589, "enabling": 23590, "hotter": 23591, "thornton": 23592, "jaipur": 23593, "arya": 23594, "ðŁı»âĢįâĻĢï¸ı": 23595, "mustaf": 23596, "majors": 23597, "oke": 23598, "duffy": 23599, "rohing": 23600, "tilt": 23601, "ðŁĩ®ðŁĩ³": 23602, "rockstar": 23603, "sheep": 23604, "hendrix": 23605, "rav": 23606, "invention": 23607, "dou": 23608, "laguna": 23609, "grumpy": 23610, "swis": 23611, "impe": 23612, ")'": 23613, "youths": 23614, "bunker": 23615, "stache": 23616, "oppose": 23617, "indies": 23618, "accelerate": 23619, "mlp": 23620, "eden": 23621, "wann": 23622, "kail": 23623, "akshaykumar": 23624, "supt": 23625, "polym": 23626, "middleton": 23627, "extraordin": 23628, "wilson": 23629, "australian": 23630, "aluminium": 23631, "wayne": 23632, "alumnus": 23633, "matics": 23634, "grim": 23635, "ernie": 23636, "oppa": 23637, "competitors": 23638, "randall": 23639, "hence": 23640, "declares": 23641, "preaching": 23642, "shahe": 23643, "cane": 23644, "sustainable": 23645, "staples": 23646, "ledge": 23647, "adena": 23648, "doctoral": 23649, "burgundy": 23650, "decorate": 23651, "rendered": 23652, "risen": 23653, "prank": 23654, "dior": 23655, "beethoven": 23656, "floor": 23657, "accom": 23658, "tot": 23659, "hodg": 23660, "tourism": 23661, "sayin": 23662, "objective": 23663, "markers": 23664, "premiership": 23665, "enabled": 23666, "camoufla": 23667, "giant": 23668, "Ñģ": 23669, "smokey": 23670, "ricket": 23671, "pang": 23672, "depending": 23673, "sation": 23674, "evolving": 23675, "intercep": 23676, "census": 23677, "tofthe": 23678, "reen": 23679, "mendoza": 23680, "trumpet": 23681, "marketers": 23682, "anit": 23683, "ðŁĻĬ": 23684, "northwestern": 23685, "vla": 23686, "fotogra": 23687, "blackandwhite": 23688, "chewan": 23689, "wig": 23690, "troom": 23691, "gingerbread": 23692, "kn": 23693, "romero": 23694, "nfc": 23695, "orchi": 23696, "funko": 23697, "source": 23698, "fs": 23699, "raped": 23700, "ost": 23701, "tarot": 23702, "annually": 23703, "ðŁĺ¬": 23704, "rill": 23705, "delav": 23706, "..!!": 23707, "ses": 23708, "cann": 23709, "medicare": 23710, "phel": 23711, "apex": 23712, "guardian": 23713, "remained": 23714, "rpm": 23715, "añ": 23716, "storymonth": 23717, "instagood": 23718, "neighbour": 23719, "ping": 23720, "semite": 23721, "mystic": 23722, "ascot": 23723, "mater": 23724, "handful": 23725, "dangers": 23726, "tid": 23727, "anaheim": 23728, "opoly": 23729, "shallow": 23730, "namibia": 23731, "toria": 23732, "procurement": 23733, "bigbang": 23734, "announcements": 23735, "prosecutor": 23736, "bengals": 23737, "salle": 23738, "enroll": 23739, "gastro": 23740, "suggestion": 23741, "bak": 23742, "haul": 23743, "buddhism": 23744, "berniesanders": 23745, "flute": 23746, "fatigue": 23747, "cynthia": 23748, "choi": 23749, "irwin": 23750, "gua": 23751, "strous": 23752, "hp": 23753, "bap": 23754, "satisfying": 23755, "playa": 23756, "ðŁİ¼": 23757, "instap": 23758, "alice": 23759, "tp": 23760, "irrigation": 23761, "ðŁĩ¬ðŁĩ§": 23762, "intric": 23763, "clues": 23764, "plex": 23765, "sax": 23766, "hepat": 23767, "dumped": 23768, "significance": 23769, "byu": 23770, "medication": 23771, "prov": 23772, "toughest": 23773, "cornish": 23774, "âŀľ": 23775, "kelley": 23776, "uv": 23777, "sizz": 23778, "sibling": 23779, "mest": 23780, "distor": 23781, "diplomatic": 23782, "auntie": 23783, "bhat": 23784, "sonic": 23785, "brenda": 23786, "pumpkins": 23787, "roch": 23788, "blackburn": 23789, "urged": 23790, "shia": 23791, "arrangements": 23792, "flood": 23793, "saunders": 23794, "lecturer": 23795, "nouri": 23796, "populations": 23797, "diplomacy": 23798, "consistently": 23799, "ð٤Ļ": 23800, "tmund": 23801, "cauliflower": 23802, "lily": 23803, "vocabulary": 23804, "varieties": 23805, "cooker": 23806, "uptown": 23807, "quent": 23808, "mosa": 23809, "reinde": 23810, "velocity": 23811, "spruce": 23812, "socialmedi": 23813, "iber": 23814, "voluntary": 23815, "processed": 23816, "baltic": 23817, "yang": 23818, "lebanese": 23819, "dp": 23820, "dolly": 23821, "arrangement": 23822, "yuri": 23823, "cranberry": 23824, "kalyan": 23825, "elevation": 23826, "cliff": 23827, "pushes": 23828, "ìĬ¤": 23829, "silic": 23830, "cowx": 23831, "eternity": 23832, "slaves": 23833, "vinegar": 23834, "gloucester": 23835, "contained": 23836, "breakingnews": 23837, "against": 23838, "renovated": 23839, "normandy": 23840, "heroin": 23841, "ysm": 23842, "mods": 23843, "greek": 23844, "undi": 23845, "trench": 23846, "vh": 23847, "encourages": 23848, "headache": 23849, "grange": 23850, ":'": 23851, "evergreen": 23852, "ÙĬ": 23853, "reckon": 23854, "abused": 23855, "thru": 23856, "choice": 23857, "tidy": 23858, "colder": 23859, "schoice": 23860, "hain": 23861, "brum": 23862, "liars": 23863, "breit": 23864, "yorker": 23865, "shack": 23866, "heidi": 23867, "michaels": 23868, "scopic": 23869, "fascist": 23870, "playful": 23871, "cac": 23872, "yasss": 23873, "shad": 23874, "..?": 23875, "quen": 23876, "ramirez": 23877, "clifton": 23878, "prs": 23879, "bestfan": 23880, "âģł": 23881, "generating": 23882, "headset": 23883, "disappointment": 23884, "abstract": 23885, "boiled": 23886, "parenthood": 23887, "azerbaijan": 23888, "exhibiting": 23889, "bombay": 23890, "olivier": 23891, "koso": 23892, "unlea": 23893, "maternity": 23894, "izer": 23895, "sives": 23896, "rhu": 23897, "coll": 23898, "saskatchewan": 23899, "freakin": 23900, "dek": 23901, "nag": 23902, "stabili": 23903, "ðŁįķ": 23904, "organizer": 23905, "bosses": 23906, "aru": 23907, "uva": 23908, "atable": 23909, "taun": 23910, "afterwards": 23911, "fertili": 23912, "verge": 23913, "azi": 23914, "morph": 23915, "à¹ģà¸": 23916, "jerk": 23917, "cosmetic": 23918, "kow": 23919, "strust": 23920, "apache": 23921, "postcards": 23922, "formul": 23923, "ìĭ": 23924, "spinal": 23925, "jackpot": 23926, "electri": 23927, "ÃŃ": 23928, "loy": 23929, "grader": 23930, "diablo": 23931, "ardi": 23932, "hesit": 23933, "fw": 23934, "archery": 23935, "pash": 23936, "theories": 23937, "repeal": 23938, "relive": 23939, "percy": 23940, "âĺĨ": 23941, "imin": 23942, "synchron": 23943, "shampoo": 23944, "coupons": 23945, "oto": 23946, "lai": 23947, "thought": 23948, "luxembourg": 23949, "mov": 23950, "ðŁĺ¥": 23951, "gemma": 23952, "seated": 23953, "mga": 23954, "stratford": 23955, "uncertainty": 23956, "shifts": 23957, "esto": 23958, "fool": 23959, "firearms": 23960, "corrie": 23961, "kiki": 23962, "apparent": 23963, "pills": 23964, "olympia": 23965, "fid": 23966, "elevated": 23967, "decks": 23968, "ignoring": 23969, "avalan": 23970, "rov": 23971, "whistle": 23972, "ptsd": 23973, "militants": 23974, "robotic": 23975, "pacers": 23976, "quilt": 23977, "bankruptcy": 23978, "lich": 23979, "percussion": 23980, "celebrity": 23981, "als": 23982, "(;": 23983, "sut": 23984, "pokemongo": 23985, "hg": 23986, "offs": 23987, "gibraltar": 23988, "screams": 23989, "billie": 23990, "genome": 23991, "marin": 23992, "beams": 23993, "archbishop": 23994, "emin": 23995, "bedrooms": 23996, "gated": 23997, "olly": 23998, "warranty": 23999, "atown": 24000, "cuddles": 24001, "gunna": 24002, "kic": 24003, "vive": 24004, "cymru": 24005, "narrow": 24006, "prob": 24007, "leo": 24008, "references": 24009, "manufactured": 24010, "chopper": 24011, "brunswick": 24012, "semis": 24013, "donia": 24014, "rye": 24015, "mano": 24016, "hurting": 24017, "?#": 24018, "holli": 24019, "investigations": 24020, "cels": 24021, "ðŁĵŀ": 24022, "lester": 24023, "temples": 24024, "storey": 24025, "mcmahon": 24026, "toilets": 24027, "woof": 24028, "ï¸İ": 24029, "leverage": 24030, "atom": 24031, "nightmares": 24032, "victorious": 24033, "haunting": 24034, "customer": 24035, "agi": 24036, "yoongi": 24037, "monty": 24038, "veronica": 24039, "wur": 24040, "intimid": 24041, "blankets": 24042, "volution": 24043, "jm": 24044, "âĺİ": 24045, "amon": 24046, "judith": 24047, "ðŁĺİðŁĺİ": 24048, "distracted": 24049, "drip": 24050, "hurricane": 24051, "andes": 24052, "revelation": 24053, "troop": 24054, "ableg": 24055, "collin": 24056, "tibetan": 24057, "worrying": 24058, "internationally": 24059, "eater": 24060, "cameroon": 24061, "brador": 24062, "yuk": 24063, "ðŁĴĹðŁĴĹ": 24064, "trak": 24065, "slopes": 24066, "cier": 24067, "nea": 24068, "oler": 24069, "taka": 24070, "albion": 24071, "volcanic": 24072, "amn": 24073, "afi": 24074, "obstac": 24075, "facetime": 24076, "gering": 24077, "npr": 24078, "metallica": 24079, "organic": 24080, "ðŁĴ¡": 24081, "kidd": 24082, "dances": 24083, "pembro": 24084, "washer": 24085, "mits": 24086, "omer": 24087, "emotionally": 24088, "tango": 24089, "ipo": 24090, "docks": 24091, "scanning": 24092, "specs": 24093, "thom": 24094, "theology": 24095, "emergen": 24096, "omi": 24097, "gpa": 24098, "selections": 24099, "unnecessary": 24100, "image": 24101, "ters": 24102, "induced": 24103, "gigan": 24104, "rentals": 24105, "supplied": 24106, "mfa": 24107, "shankar": 24108, "later": 24109, "pajam": 24110, "clave": 24111, "Ùģ": 24112, "mahin": 24113, "carlson": 24114, "avian": 24115, "anova": 24116, "katie": 24117, "ajith": 24118, "designated": 24119, "chocolates": 24120, "investigators": 24121, "glazed": 24122, "princess": 24123, "erry": 24124, "ragn": 24125, "ourable": 24126, "hru": 24127, "sundance": 24128, "peugeot": 24129, "steampunk": 24130, "ghlin": 24131, "grease": 24132, "hires": 24133, "zap": 24134, "perce": 24135, "jill": 24136, "tome": 24137, "hehehe": 24138, "joyful": 24139, "maestro": 24140, "nished": 24141, "genealo": 24142, "vich": 24143, "pits": 24144, "foxes": 24145, "goodman": 24146, "emerson": 24147, "lobes": 24148, "converse": 24149, "oats": 24150, "thomson": 24151, "rahim": 24152, "malware": 24153, "ahi": 24154, "mankind": 24155, "resin": 24156, "img": 24157, "swood": 24158, "kinder": 24159, "scroll": 24160, "ara": 24161, "sakura": 24162, "robbed": 24163, "xion": 24164, "nya": 24165, "cism": 24166, "cedar": 24167, "bein": 24168, "mourning": 24169, "torto": 24170, "heathrow": 24171, "donegal": 24172, "barb": 24173, "hydration": 24174, "kor": 24175, "elimination": 24176, "supdates": 24177, "hills": 24178, "appeti": 24179, "starred": 24180, "kom": 24181, "gwen": 24182, "ddd": 24183, "cray": 24184, "scanner": 24185, "personalised": 24186, "serenity": 24187, "redesign": 24188, "metaph": 24189, "boxed": 24190, "judgment": 24191, "nose": 24192, "ë¹": 24193, "erad": 24194, "acne": 24195, "suppliers": 24196, "energetic": 24197, "vom": 24198, "asap": 24199, "ðŁĶ¸": 24200, "irvine": 24201, "hatch": 24202, "lass": 24203, "adren": 24204, "waffles": 24205, "accurately": 24206, "icio": 24207, "ittle": 24208, "seun": 24209, "occupy": 24210, "webcam": 24211, "thenew": 24212, "entes": 24213, "gai": 24214, "jw": 24215, "accountable": 24216, "visor": 24217, "irrit": 24218, "licensing": 24219, "huddersfield": 24220, "genie": 24221, "ðŁİ¾": 24222, "atmospheric": 24223, "tensions": 24224, "spartan": 24225, "clifford": 24226, "olan": 24227, "northbound": 24228, "ameen": 24229, "censor": 24230, "uel": 24231, "stery": 24232, "$$": 24233, "farrell": 24234, "hyster": 24235, "clt": 24236, "sedan": 24237, "replied": 24238, "describing": 24239, "microwave": 24240, "slab": 24241, "prosp": 24242, "assisting": 24243, "rubio": 24244, "ethan": 24245, "hhhhh": 24246, "guay": 24247, "zman": 24248, "raise": 24249, "rolling": 24250, "oe": 24251, "nile": 24252, "ambrose": 24253, "scarborough": 24254, "heroic": 24255, "cooks": 24256, "mort": 24257, "chopra": 24258, "ðŁĮ·": 24259, "tob": 24260, "shaving": 24261, "stacey": 24262, "dorm": 24263, "motorsports": 24264, "wiki": 24265, "folds": 24266, "spiced": 24267, "stressful": 24268, "literal": 24269, "fudge": 24270, "peggy": 24271, "waite": 24272, "tresses": 24273, "sesh": 24274, "pric": 24275, "ðŁİħ": 24276, "fright": 24277, "rva": 24278, "mumbai": 24279, "pom": 24280, "ttv": 24281, "cellar": 24282, "tome": 24283, "android": 24284, "doris": 24285, "tsunami": 24286, "tinder": 24287, "oec": 24288, "mwc": 24289, "dortmund": 24290, "nothin": 24291, "liti": 24292, "sou": 24293, "believein": 24294, "atu": 24295, "knocks": 24296, "magni": 24297, "sssss": 24298, "rohit": 24299, "inews": 24300, "angi": 24301, "mandy": 24302, "kettle": 24303, "intermediate": 24304, "avant": 24305, "curl": 24306, "endorsed": 24307, "orio": 24308, "urt": 24309, "consideration": 24310, "wires": 24311, "shelters": 24312, "bino": 24313, "vikram": 24314, "implemented": 24315, "lydia": 24316, "buk": 24317, "parody": 24318, "cnews": 24319, "undergraduate": 24320, "canucks": 24321, "sami": 24322, "politically": 24323, "rotten": 24324, "ghz": 24325, "textiles": 24326, "overload": 24327, "moderni": 24328, "recreational": 24329, "flir": 24330, "baton": 24331, "typography": 24332, "ovation": 24333, "intriguing": 24334, "pilgrimage": 24335, "alge": 24336, "adays": 24337, "tcmparty": 24338, "spelled": 24339, "curls": 24340, "booze": 24341, "stem": 24342, "annes": 24343, "irls": 24344, "sponge": 24345, "shopper": 24346, "signation": 24347, "brass": 24348, "mistress": 24349, "leah": 24350, "beginner": 24351, "lauderdale": 24352, "august": 24353, "preschool": 24354, "taping": 24355, "taipei": 24356, "executives": 24357, "bd": 24358, "rhetor": 24359, "escor": 24360, "immuno": 24361, "deeplearning": 24362, "statues": 24363, "itus": 24364, "manuscript": 24365, "lyric": 24366, "corvette": 24367, "molly": 24368, "lage": 24369, "dep": 24370, "cnbc": 24371, "lest": 24372, "jessi": 24373, "fife": 24374, "griffith": 24375, "opposing": 24376, "rang": 24377, "drills": 24378, "respectful": 24379, "pity": 24380, "dell": 24381, "harding": 24382, "playboy": 24383, "bloke": 24384, "shutout": 24385, "kili": 24386, "osp": 24387, "seattle": 24388, "bcpoli": 24389, "mises": 24390, "journals": 24391, "teaming": 24392, "esther": 24393, "freddy": 24394, "Ķï¸ı": 24395, "metrics": 24396, "notre": 24397, "garry": 24398, "forty": 24399, "navigate": 24400, "periods": 24401, "benedic": 24402, "jid": 24403, "daw": 24404, "ancestors": 24405, "restoring": 24406, "cong": 24407, "allergy": 24408, "titanium": 24409, "cence": 24410, "leaning": 24411, "abbas": 24412, "vast": 24413, "ucf": 24414, "roofing": 24415, "eman": 24416, "severely": 24417, "vogue": 24418, "veau": 24419, "inbound": 24420, "dz": 24421, "taneously": 24422, "stretching": 24423, "manchester": 24424, "dryer": 24425, "davis": 24426, "kanth": 24427, "thegame": 24428, "itted": 24429, "retain": 24430, "elles": 24431, "congestion": 24432, "fraternity": 24433, "ollie": 24434, "loki": 24435, "freely": 24436, "choo": 24437, "pony": 24438, "scep": 24439, "tably": 24440, "balt": 24441, "rockn": 24442, "dime": 24443, "logging": 24444, "ðŁį·": 24445, "adu": 24446, "havoc": 24447, "waterford": 24448, "charis": 24449, "sweetie": 24450, "running": 24451, "nerd": 24452, "erdogan": 24453, "zara": 24454, "weighing": 24455, "fifty": 24456, "precise": 24457, "lowell": 24458, "kurdistan": 24459, "ryo": 24460, "orth": 24461, "synth": 24462, "liners": 24463, "phenomenon": 24464, "artillery": 24465, "illegally": 24466, "construct": 24467, "nostalgic": 24468, "garth": 24469, "alta": 24470, "shelton": 24471, "asean": 24472, "wander": 24473, "durban": 24474, "diversi": 24475, "bono": 24476, "clon": 24477, "leman": 24478, "shun": 24479, "obstacles": 24480, "appetite": 24481, "feeder": 24482, "respiratory": 24483, "dixie": 24484, "formula": 24485, "anto": 24486, "sober": 24487, "extinct": 24488, "auc": 24489, "ingles": 24490, "legitimate": 24491, ";;": 24492, "minnie": 24493, "ipswich": 24494, "dramatically": 24495, "ðŁijıðŁı¼": 24496, "ingham": 24497, "military": 24498, "monet": 24499, "usnavy": 24500, "fork": 24501, "dunno": 24502, "player": 24503, "qotd": 24504, "stoo": 24505, "exor": 24506, "ethiopian": 24507, "filmfest": 24508, "pered": 24509, "cate": 24510, "saudi": 24511, "inner": 24512, "sincere": 24513, "tionality": 24514, "alee": 24515, "deeds": 24516, "cooperative": 24517, "ironic": 24518, "crocod": 24519, "brary": 24520, "postseason": 24521, "camper": 24522, "canary": 24523, "ein": 24524, "extensions": 24525, "nbd": 24526, "sherwood": 24527, "spokane": 24528, "hump": 24529, "jitsu": 24530, "ê¹": 24531, "daryl": 24532, "psi": 24533, "stabbed": 24534, "offerings": 24535, "expects": 24536, "caval": 24537, "bodybuilding": 24538, "framing": 24539, "fca": 24540, "yearly": 24541, "bombed": 24542, "skil": 24543, "researching": 24544, "judiciary": 24545, "greeted": 24546, "tudor": 24547, "milo": 24548, "innovate": 24549, "ðŁĺĽ": 24550, "rhs": 24551, "ruby": 24552, "contributor": 24553, "famer": 24554, "socially": 24555, "mlin": 24556, "fiery": 24557, "utter": 24558, "beaut": 24559, "itos": 24560, "devoted": 24561, "rainbow": 24562, "barney": 24563, "peren": 24564, "arjun": 24565, "rna": 24566, "gabby": 24567, "uti": 24568, "hannity": 24569, "pickle": 24570, "serv": 24571, "quakes": 24572, "ppe": 24573, "fem": 24574, "whitec": 24575, "jn": 24576, "victories": 24577, "ðŁ§¡": 24578, "golfer": 24579, "congratulates": 24580, "resulting": 24581, "mechanic": 24582, "urve": 24583, "centered": 24584, "kiev": 24585, "ans": 24586, "incub": 24587, "<<": 24588, "cmo": 24589, "bestfanarmy": 24590, "daph": 24591, "enham": 24592, "oncology": 24593, "kush": 24594, "txt": 24595, "oriented": 24596, "fashionable": 24597, "csr": 24598, "sahara": 24599, "rack": 24600, "pdp": 24601, "hanson": 24602, "à¸ĩ": 24603, "tiers": 24604, "rar": 24605, "panam": 24606, "insky": 24607, "sahi": 24608, "testament": 24609, "asthma": 24610, "inher": 24611, "fisheries": 24612, "order": 24613, "howe": 24614, "gallon": 24615, "epis": 24616, "suzanne": 24617, "drowning": 24618, "panelists": 24619, "ðŁĺ²": 24620, "ë¦": 24621, "alach": 24622, "commemorative": 24623, "attribu": 24624, "ðŁij»": 24625, "moo": 24626, "visional": 24627, "weeksary": 24628, "gust": 24629, "akin": 24630, "pointe": 24631, "eee": 24632, "dispar": 24633, "nipp": 24634, "dental": 24635, "stall": 24636, "pian": 24637, "bore": 24638, "ulster": 24639, "tick": 24640, "irr": 24641, "taehyung": 24642, "microphone": 24643, "bermuda": 24644, "gaard": 24645, "eler": 24646, "plumbing": 24647, "hugely": 24648, "âļ«ï¸ı": 24649, "raceway": 24650, "cambridge": 24651, "marcel": 24652, "burnley": 24653, "toast": 24654, "hollywood": 24655, "fasting": 24656, "mered": 24657, "hibition": 24658, "capped": 24659, "beneficial": 24660, "owning": 24661, "contamin": 24662, "arabian": 24663, "toon": 24664, "capac": 24665, "hulu": 24666, "smir": 24667, "nutrients": 24668, "sein": 24669, "graphs": 24670, "conditional": 24671, "ðŁijħ": 24672, "orac": 24673, "playin": 24674, "northe": 24675, "tornad": 24676, "marian": 24677, "jumbo": 24678, "lexi": 24679, "incredibleindia": 24680, "roadto": 24681, "ukone": 24682, "confusing": 24683, "sph": 24684, "shank": 24685, "pied": 24686, "mqm": 24687, "positively": 24688, "sherry": 24689, "pathways": 24690, "considers": 24691, "tofu": 24692, "arguments": 24693, "resilient": 24694, "chett": 24695, "withdra": 24696, "tero": 24697, "atedly": 24698, "swana": 24699, "heb": 24700, "flight": 24701, "harley": 24702, "decrease": 24703, "kindle": 24704, "bookshop": 24705, "³ï¸ı": 24706, "martyrs": 24707, "smur": 24708, "mccl": 24709, "concerto": 24710, "stime": 24711, "rejoice": 24712, "applau": 24713, "clement": 24714, "merkel": 24715, "jaime": 24716, "immortal": 24717, "isleof": 24718, "marco": 24719, "youtuber": 24720, "stalking": 24721, "metoo": 24722, "stack": 24723, "spouse": 24724, "ust": 24725, "luv": 24726, "âļ¾ï¸ı": 24727, "equestrian": 24728, "eving": 24729, "flin": 24730, "nickname": 24731, "thebig": 24732, "asar": 24733, "stacks": 24734, "walker": 24735, "bora": 24736, "kidnapped": 24737, "hurling": 24738, "humbold": 24739, "recalls": 24740, "copper": 24741, "annis": 24742, "seo": 24743, "merger": 24744, "muir": 24745, "addy": 24746, "ðŁĴªðŁĴª": 24747, "bex": 24748, "cracy": 24749, "conan": 24750, "congratulation": 24751, "midst": 24752, "âϬ": 24753, "forbi": 24754, "optic": 24755, "crate": 24756, "crocodile": 24757, "madagas": 24758, "securing": 24759, "aston": 24760, "ogue": 24761, "savior": 24762, "salisbury": 24763, "loveit": 24764, "fujifilm": 24765, "castles": 24766, "asst": 24767, "arrows": 24768, "spacious": 24769, "trs": 24770, "polyvore": 24771, "progression": 24772, "mri": 24773, "nelson": 24774, "bim": 24775, "indicator": 24776, "oda": 24777, "pepe": 24778, "resignation": 24779, "gut": 24780, "sneaker": 24781, "logically": 24782, "azy": 24783, "arella": 24784, "tearing": 24785, "joshi": 24786, "ssionism": 24787, "qpr": 24788, "mariah": 24789, "px": 24790, "bleed": 24791, "mian": 24792, "medley": 24793, "weiss": 24794, "kerry": 24795, "gatory": 24796, "atal": 24797, "madison": 24798, "avenger": 24799, "naby": 24800, "pland": 24801, "giles": 24802, "freshwater": 24803, "dington": 24804, "taj": 24805, "demonstrates": 24806, "ntv": 24807, "bulbs": 24808, "sundaymorning": 24809, "peake": 24810, "souvenir": 24811, "wah": 24812, "tonnes": 24813, "mkt": 24814, "complexity": 24815, "conden": 24816, "rossi": 24817, "bing": 24818, "yds": 24819, "suk": 24820, "ngo": 24821, "midland": 24822, "oly": 24823, "lifeis": 24824, "ripple": 24825, "moreno": 24826, "dders": 24827, "tus": 24828, "áĥ": 24829, "boul": 24830, "xa": 24831, "holdings": 24832, "wny": 24833, "shadowhunters": 24834, "kei": 24835, "aspire": 24836, "mous": 24837, "owen": 24838, "soak": 24839, "skirts": 24840, "mountaine": 24841, "storming": 24842, "chrome": 24843, "riots": 24844, "sarato": 24845, "amaze": 24846, "lessness": 24847, "navar": 24848, "criteria": 24849, "rafa": 24850, "indulge": 24851, "ayer": 24852, "porto": 24853, "namo": 24854, "................": 24855, "yields": 24856, "valle": 24857, "jh": 24858, "macron": 24859, "sains": 24860, "durant": 24861, "trailers": 24862, "wot": 24863, "confederate": 24864, "shrin": 24865, "idol": 24866, "formally": 24867, "tene": 24868, "motorcycles": 24869, "thang": 24870, "node": 24871, "banger": 24872, "daly": 24873, "pats": 24874, "enrollment": 24875, "auctions": 24876, "atal": 24877, "arbor": 24878, "logos": 24879, "dearest": 24880, "transaction": 24881, "domingo": 24882, "flea": 24883, "sermon": 24884, "deck": 24885, "sincere": 24886, "questioning": 24887, "julio": 24888, "wasp": 24889, "pretz": 24890, "armenian": 24891, "kham": 24892, "inflammation": 24893, "picturesque": 24894, "accidental": 24895, "filmmakers": 24896, "ðŁĺļ": 24897, "ðŁĴį": 24898, "casey": 24899, "sob": 24900, "yeezy": 24901, "goodwill": 24902, "paragra": 24903, "ssly": 24904, "feather": 24905, "dyed": 24906, "assassination": 24907, "nade": 24908, "bcs": 24909, "applies": 24910, "feminine": 24911, "feu": 24912, "extent": 24913, "deputies": 24914, "lack": 24915, "psychic": 24916, "goi": 24917, "killings": 24918, "pseu": 24919, "ðŁ¤ª": 24920, "unc": 24921, "marl": 24922, "tane": 24923, "mckenna": 24924, "surfer": 24925, "influences": 24926, "freeway": 24927, "hackney": 24928, "malaria": 24929, "eland": 24930, "teau": 24931, "remastered": 24932, "ر": 24933, "razor": 24934, "ggy": 24935, "corro": 24936, "laksh": 24937, "flair": 24938, "honesty": 24939, "hooray": 24940, "depp": 24941, "amc": 24942, "wednesdays": 24943, "qa": 24944, "edits": 24945, "-$": 24946, "sevilla": 24947, "doubled": 24948, "humanities": 24949, "ccot": 24950, "somos": 24951, "rine": 24952, "afa": 24953, "sioux": 24954, "reconstruction": 24955, "welding": 24956, "threads": 24957, "amish": 24958, "encouragement": 24959, "poder": 24960, "bock": 24961, "balm": 24962, "ptions": 24963, "standup": 24964, "accomplishments": 24965, "guarding": 24966, "conviction": 24967, "acion": 24968, "napoleon": 24969, "depicting": 24970, "attack": 24971, "sui": 24972, "wearable": 24973, "âĸªï¸ı": 24974, "potter": 24975, "escort": 24976, "vise": 24977, "tots": 24978, "boon": 24979, "eventprofs": 24980, "angular": 24981, "womenshistorymonth": 24982, "barrow": 24983, "schi": 24984, "accomp": 24985, "tik": 24986, "lend": 24987, "kensington": 24988, "wolfe": 24989, "stacked": 24990, "crashing": 24991, "exhibit": 24992, "winged": 24993, "sabrina": 24994, "masa": 24995, "kms": 24996, "always": 24997, "ett": 24998, "plasma": 24999, "counseling": 25000, "pickles": 25001, "nfldraft": 25002, "mrs": 25003, "inevitable": 25004, "courageous": 25005, "stafford": 25006, "writerslife": 25007, "hos": 25008, "ej": 25009, "ghyun": 25010, "trademark": 25011, "adrian": 25012, "influencer": 25013, "coronation": 25014, "raging": 25015, "explored": 25016, "usaf": 25017, "exception": 25018, "eux": 25019, "tanker": 25020, "swami": 25021, "packet": 25022, "ðŁij¨âĢį": 25023, "fen": 25024, "sheen": 25025, "aero": 25026, "jl": 25027, "regal": 25028, "nwt": 25029, "auster": 25030, "mehta": 25031, "charge": 25032, "aste": 25033, "bate": 25034, "infeld": 25035, "racecourse": 25036, "collapsed": 25037, "fleece": 25038, "zil": 25039, "allie": 25040, "alternatives": 25041, "georges": 25042, "ðŁĵį": 25043, "quirky": 25044, "fcb": 25045, "natgeo": 25046, "philanthropy": 25047, "brai": 25048, "everyday": 25049, "ðŁIJ°": 25050, "achers": 25051, "jaan": 25052, "fines": 25053, "qi": 25054, "fisherman": 25055, "distinct": 25056, "grimes": 25057, "nationalist": 25058, "commence": 25059, "rown": 25060, "â̳": 25061, "zing": 25062, "fter": 25063, "hrw": 25064, "baroque": 25065, "blender": 25066, "kitty": 25067, "hooks": 25068, "cited": 25069, "wanda": 25070, "consensus": 25071, "reindeer": 25072, "anand": 25073, "supply": 25074, "meds": 25075, "vn": 25076, "olph": 25077, "ratchet": 25078, "sheldon": 25079, "securities": 25080, "ë°©íĥ": 25081, "crom": 25082, "mosquito": 25083, "jeric": 25084, "immac": 25085, "dimensions": 25086, "â¤": 25087, "dissi": 25088, "spongebob": 25089, "damien": 25090, "stevenson": 25091, "joanne": 25092, "delish": 25093, "yikes": 25094, "thanx": 25095, "surveys": 25096, "postponed": 25097, "alcoholic": 25098, "alised": 25099, "ðŁĻıðŁı»": 25100, "doch": 25101, "sentim": 25102, "meredith": 25103, "compares": 25104, "bago": 25105, "happydays": 25106, "moss": 25107, "ãħĭ": 25108, "nec": 25109, "gnment": 25110, "frustrated": 25111, "combin": 25112, "riv": 25113, "eclec": 25114, "collo": 25115, "compliment": 25116, "actorslife": 25117, "ctto": 25118, "nicar": 25119, "ophon": 25120, "aparthe": 25121, "mant": 25122, "jade": 25123, "trolley": 25124, "optimization": 25125, "eyeon": 25126, "ecological": 25127, "quist": 25128, "ephe": 25129, "à¥ĩ": 25130, "cinco": 25131, "appoints": 25132, "oldschool": 25133, "cpr": 25134, "behavioral": 25135, "minaj": 25136, ":-(": 25137, "tagging": 25138, "eval": 25139, "joaqu": 25140, "ðŁĺ«": 25141, "hak": 25142, "deme": 25143, "jamaican": 25144, "sos": 25145, "hyatt": 25146, "handbook": 25147, "librarian": 25148, "hannibal": 25149, "pumping": 25150, "chom": 25151, "fman": 25152, "gai": 25153, "hull": 25154, "responders": 25155, "greenville": 25156, "nus": 25157, "vaugh": 25158, "ðŁİīðŁİī": 25159, "taxi": 25160, "goldberg": 25161, "mantra": 25162, "tease": 25163, "forbidden": 25164, "methodist": 25165, "ativity": 25166, "****": 25167, "ect": 25168, "mcgr": 25169, "Ħëĭ": 25170, "seb": 25171, "amidst": 25172, "disappear": 25173, "thyro": 25174, "philips": 25175, "erina": 25176, "vicious": 25177, "streamer": 25178, "millionaire": 25179, "map": 25180, "strick": 25181, "hackathon": 25182, "gha": 25183, "edic": 25184, "mika": 25185, "peck": 25186, "illi": 25187, "antoine": 25188, "arca": 25189, "optic": 25190, "maure": 25191, "ðŁĩ¦ðŁĩº": 25192, "clashes": 25193, "manly": 25194, "âĺģ": 25195, "alvar": 25196, "andres": 25197, "mei": 25198, "elm": 25199, "wwww": 25200, "altered": 25201, "lte": 25202, "ê¹Ģ": 25203, "mojo": 25204, "forrest": 25205, "thalai": 25206, "nont": 25207, "speeches": 25208, "acknowledge": 25209, "ignite": 25210, "xfactor": 25211, "ðŁ¥Ĥ": 25212, "meadow": 25213, "disrupt": 25214, "debuted": 25215, "scrimmage": 25216, "pharmaceutical": 25217, "fidd": 25218, "foundations": 25219, "philosopher": 25220, "etal": 25221, "publishers": 25222, "boys": 25223, "cke": 25224, "rugged": 25225, "optimism": 25226, "rebe": 25227, "philharmon": 25228, "narcis": 25229, "rallies": 25230, "luis": 25231, "goblue": 25232, "folded": 25233, "unacceptable": 25234, "optimal": 25235, "lisa": 25236, "polaro": 25237, "+.": 25238, "enza": 25239, "âĿ£ï¸ı": 25240, "monopoly": 25241, "graceful": 25242, "dairy": 25243, "dua": 25244, "difficulty": 25245, "judgement": 25246, "osi": 25247, "mersey": 25248, "flux": 25249, "newfound": 25250, "terns": 25251, "dimensional": 25252, "invic": 25253, "alba": 25254, "amit": 25255, "abudhabi": 25256, "algeria": 25257, "automobile": 25258, "thead": 25259, "lotion": 25260, "accelerator": 25261, "vacant": 25262, "ition": 25263, "luf": 25264, "alic": 25265, "pll": 25266, "blazing": 25267, "baz": 25268, "sene": 25269, "ðŁij¼": 25270, "villains": 25271, "directory": 25272, "eisen": 25273, "tock": 25274, "brochure": 25275, "ripp": 25276, "hbd": 25277, "zaynmalik": 25278, "niche": 25279, "lolol": 25280, "certificates": 25281, "morse": 25282, "facup": 25283, "xham": 25284, "unwanted": 25285, "imports": 25286, "carnegie": 25287, "fansign": 25288, "mou": 25289, "ralph": 25290, "destroyer": 25291, "swing": 25292, "trekking": 25293, "ciliation": 25294, "pitbull": 25295, "gaps": 25296, "howell": 25297, "definitive": 25298, "mcle": 25299, "fps": 25300, "etz": 25301, "bolly": 25302, "lynn": 25303, "gano": 25304, "ature": 25305, "fursuit": 25306, "coil": 25307, "nav": 25308, "butts": 25309, "trojans": 25310, "eure": 25311, "enko": 25312, "schumer": 25313, "horrific": 25314, "installment": 25315, "brb": 25316, "suburbs": 25317, "abel": 25318, "vir": 25319, "desh": 25320, "cunningham": 25321, "ðŁIJ»": 25322, "spann": 25323, "schwe": 25324, "kemp": 25325, "tru": 25326, "stealth": 25327, "ques": 25328, "lew": 25329, "delights": 25330, "koch": 25331, "humili": 25332, "criti": 25333, "ilt": 25334, "spells": 25335, "miley": 25336, "caric": 25337, "ðŁį´": 25338, "lcfc": 25339, "substitute": 25340, "oung": 25341, "?!!": 25342, "affir": 25343, "predictable": 25344, "classof": 25345, "err": 25346, "cypress": 25347, "chandra": 25348, "ageing": 25349, "____": 25350, "therland": 25351, "doncaster": 25352, "elin": 25353, "yoshi": 25354, "sailors": 25355, "harris": 25356, "joanna": 25357, "nigerians": 25358, "hers": 25359, "plague": 25360, "procra": 25361, "kno": 25362, "canton": 25363, "busines": 25364, "unh": 25365, "prakash": 25366, "cin": 25367, "bowen": 25368, "coating": 25369, "mals": 25370, "begging": 25371, "smithson": 25372, "pontiac": 25373, "spies": 25374, "damian": 25375, "pline": 25376, "undant": 25377, "alta": 25378, "oness": 25379, "shameless": 25380, "daq": 25381, "bbm": 25382, "wales": 25383, "stampede": 25384, "serum": 25385, "ÙĨ": 25386, "catalyst": 25387, "xn": 25388, "absc": 25389, "freezer": 25390, "chun": 25391, "arios": 25392, "mccre": 25393, "forehead": 25394, "hears": 25395, "damascus": 25396, "tacoma": 25397, "arduino": 25398, "encounters": 25399, "stanton": 25400, "lgb": 25401, "abas": 25402, "\"..": 25403, "kete": 25404, "dracula": 25405, "elem": 25406, "gne": 25407, "zeppelin": 25408, "labrador": 25409, "pulp": 25410, "optional": 25411, "orn": 25412, "russians": 25413, "sanitation": 25414, "hilary": 25415, "etsymntt": 25416, "penalties": 25417, "aust": 25418, "igans": 25419, "olympian": 25420, "medicaid": 25421, "versace": 25422, "vape": 25423, "restra": 25424, "peep": 25425, "sexiest": 25426, "stalls": 25427, "dile": 25428, "thea": 25429, "punjabi": 25430, "puppy": 25431, "tuesdaymotivation": 25432, "ðŁĵļ": 25433, "theflash": 25434, "rocket": 25435, "modest": 25436, "chihuahu": 25437, "onna": 25438, "ksa": 25439, "hurdles": 25440, "cave": 25441, "failures": 25442, "split": 25443, "boho": 25444, "gurl": 25445, "disappoint": 25446, "howard": 25447, "nugget": 25448, "franz": 25449, "stalert": 25450, "kazakh": 25451, "forgetting": 25452, "schri": 25453, "agate": 25454, "amat": 25455, "everett": 25456, "duet": 25457, "veterinary": 25458, "julian": 25459, "chills": 25460, "brave": 25461, "ghostbusters": 25462, "lando": 25463, "greets": 25464, "profitable": 25465, "dé": 25466, "tir": 25467, "zee": 25468, "omen": 25469, "pdx": 25470, "grayson": 25471, "hari": 25472, "fixes": 25473, "stabbing": 25474, "swimmer": 25475, "symbols": 25476, "compliments": 25477, "pose": 25478, "functioning": 25479, "thnx": 25480, "gir": 25481, "corporations": 25482, "barlow": 25483, "loe": 25484, "offseason": 25485, "distinctive": 25486, "marvelous": 25487, "nikon": 25488, "enrique": 25489, "kyu": 25490, "jaws": 25491, "amoto": 25492, "lombar": 25493, "travelblogger": 25494, "fah": 25495, "ourism": 25496, "tristan": 25497, "soe": 25498, "cease": 25499, "ðŁıħ": 25500, "zac": 25501, "mckenzie": 25502, "taxpayers": 25503, "swimsuit": 25504, "blo": 25505, "lesley": 25506, "kansas": 25507, "wks": 25508, "kiel": 25509, "provoking": 25510, "myles": 25511, "string": 25512, "kangaroo": 25513, "galactic": 25514, "fifth": 25515, "ske": 25516, "weir": 25517, "llis": 25518, "matory": 25519, "ðŁĩ¿": 25520, "unci": 25521, "reproductive": 25522, "rooting": 25523, "tides": 25524, "gadget": 25525, "..........": 25526, "alexander": 25527, "bowler": 25528, "screw": 25529, "apolog": 25530, "erika": 25531, "walters": 25532, "shetty": 25533, "lane": 25534, "banter": 25535, "asant": 25536, "meso": 25537, "vain": 25538, "\"\"\"": 25539, "usi": 25540, "ferdin": 25541, "accomplish": 25542, "mansfield": 25543, "bombar": 25544, "collaborating": 25545, "clap": 25546, "iture": 25547, "sda": 25548, "smoky": 25549, "nak": 25550, "imperson": 25551, "carla": 25552, "comra": 25553, "burgl": 25554, "loco": 25555, "ties": 25556, "inhi": 25557, "tracey": 25558, "seis": 25559, "disser": 25560, "rrrr": 25561, "dray": 25562, "protect": 25563, "corona": 25564, "hunger": 25565, "cken": 25566, "celi": 25567, "troubled": 25568, "predators": 25569, "fictional": 25570, "shaved": 25571, "richest": 25572, "metaboli": 25573, "fulham": 25574, "grooming": 25575, "monochrome": 25576, "wasting": 25577, "asco": 25578, "aste": 25579, "tista": 25580, "remedies": 25581, "ungsoo": 25582, "southend": 25583, "permanently": 25584, "bumble": 25585, "procrastin": 25586, "identical": 25587, "practically": 25588, "mascul": 25589, "suke": 25590, "assured": 25591, "valerie": 25592, "deviant": 25593, "grizzlies": 25594, "thier": 25595, "pura": 25596, "nepal": 25597, "notts": 25598, "bilateral": 25599, "spoil": 25600, "carmel": 25601, "cinematic": 25602, "phl": 25603, "nifty": 25604, "mao": 25605, "hypocri": 25606, "laser": 25607, "pantry": 25608, "mathematical": 25609, "elisa": 25610, "coordination": 25611, "belmont": 25612, "ait": 25613, "radiant": 25614, "boiler": 25615, "mang": 25616, "fag": 25617, "crc": 25618, "hams": 25619, "brin": 25620, "â¬ĩï¸ı": 25621, "familia": 25622, "âĿ£": 25623, "saber": 25624, "rupert": 25625, "ggan": 25626, "ritz": 25627, "mich": 25628, "salford": 25629, "levi": 25630, "gral": 25631, "ðŁĴ¤": 25632, "nino": 25633, "ced": 25634, "businessman": 25635, "ultr": 25636, "simply": 25637, "compression": 25638, "pains": 25639, "halt": 25640, "ë°©íĥĦ": 25641, "landscaping": 25642, "nf": 25643, "crooked": 25644, "erd": 25645, "ittin": 25646, "ddleston": 25647, "surpassed": 25648, "inoa": 25649, "dag": 25650, "blen": 25651, "extending": 25652, "ating": 25653, "algae": 25654, "baller": 25655, "umar": 25656, "snooker": 25657, "collu": 25658, "flown": 25659, "thub": 25660, "ridiculously": 25661, "kish": 25662, "ople": 25663, "dire": 25664, "asser": 25665, "aristo": 25666, "sciss": 25667, "hating": 25668, "trouble": 25669, "sylvia": 25670, "succul": 25671, "plots": 25672, "sincerely": 25673, "aler": 25674, "laureate": 25675, "brack": 25676, "attn": 25677, "rifles": 25678, "meto": 25679, "collectible": 25680, "cuomo": 25681, "contestant": 25682, "consistency": 25683, "antz": 25684, "ranges": 25685, "abigail": 25686, "deb": 25687, "minister": 25688, "growers": 25689, "anoo": 25690, "hoover": 25691, "dreamer": 25692, "nucle": 25693, "research": 25694, "miy": 25695, "shahid": 25696, "mav": 25697, "dhoni": 25698, "cini": 25699, "doj": 25700, "hindus": 25701, "partying": 25702, "dali": 25703, "alonso": 25704, "informal": 25705, "clarkson": 25706, "itton": 25707, "kian": 25708, "cityo": 25709, "mori": 25710, "lasted": 25711, "aspen": 25712, "library": 25713, "suspici": 25714, "quat": 25715, "denial": 25716, "folder": 25717, "chori": 25718, "sweeping": 25719, "enix": 25720, "ðŁįĤ": 25721, "ØŃ": 25722, "nascar": 25723, "handmadehour": 25724, "moul": 25725, "heatwave": 25726, "emer": 25727, "examine": 25728, "ibn": 25729, "grind": 25730, "pov": 25731, "tionist": 25732, "mbo": 25733, "sheila": 25734, "integrate": 25735, "omes": 25736, "takeaway": 25737, "cerv": 25738, "connie": 25739, "ticket": 25740, "celed": 25741, "bien": 25742, "visually": 25743, "madagascar": 25744, "sorry": 25745, "gui": 25746, "parkrun": 25747, "traits": 25748, "labe": 25749, "poisoning": 25750, "à¥Ģ": 25751, "viable": 25752, "bohemian": 25753, "dentistry": 25754, "bados": 25755, "sprouts": 25756, "masked": 25757, "teddy": 25758, "ðŁĺ·": 25759, "saf": 25760, "saas": 25761, "jiang": 25762, "tight": 25763, "speaker": 25764, "withdrawal": 25765, "bcn": 25766, "assigned": 25767, "classrooms": 25768, "fleming": 25769, "ðŁĴ«": 25770, "supergirl": 25771, "totals": 25772, "tabletop": 25773, "ebooks": 25774, "horizontal": 25775, "craz": 25776, "flush": 25777, "jard": 25778, "cdc": 25779, "erson": 25780, "ãħł": 25781, "greenwood": 25782, "nih": 25783, "cox": 25784, "ada": 25785, "litre": 25786, "going": 25787, "vicky": 25788, "curved": 25789, "louie": 25790, "grains": 25791, "hye": 25792, "longe": 25793, "remedy": 25794, "trainee": 25795, "sanjay": 25796, "superstars": 25797, "maser": 25798, "manu": 25799, "sage": 25800, "whl": 25801, "ðŁĺĤðŁĺŃ": 25802, "ðŁijįðŁı»": 25803, "msd": 25804, "enz": 25805, "rabhu": 25806, "joo": 25807, "ghu": 25808, "acer": 25809, "epo": 25810, "resurrection": 25811, "justicefor": 25812, "blended": 25813, "moda": 25814, "avalanche": 25815, "francesco": 25816, "respective": 25817, "gs": 25818, "yeast": 25819, "welch": 25820, "devotion": 25821, "getin": 25822, "atheism": 25823, "amic": 25824, "carolyn": 25825, "loc": 25826, "ldnont": 25827, "avec": 25828, "usda": 25829, "legged": 25830, "bravery": 25831, "blower": 25832, "cowboy": 25833, "heh": 25834, "stible": 25835, "buffal": 25836, "channel": 25837, "runchat": 25838, "âĺķï¸ı": 25839, "ideology": 25840, "bestseller": 25841, "yoo": 25842, "peanu": 25843, "bonne": 25844, "felic": 25845, "edison": 25846, "fractu": 25847, "narendra": 25848, "ppets": 25849, "seymour": 25850, "riviera": 25851, "hector": 25852, "necessarily": 25853, "bianca": 25854, "societies": 25855, "thebest": 25856, "wg": 25857, "sentences": 25858, "wink": 25859, "vaccines": 25860, "palooza": 25861, "jamming": 25862, "asf": 25863, "mpus": 25864, "agreements": 25865, "eck": 25866, "bac": 25867, "honore": 25868, "compul": 25869, "wildcat": 25870, "imposed": 25871, "yoga": 25872, "hudson": 25873, "canceled": 25874, "lich": 25875, "fuzzy": 25876, "esque": 25877, "chuk": 25878, "wvu": 25879, "sek": 25880, "flipping": 25881, "rhon": 25882, "wished": 25883, "wha": 25884, "capability": 25885, "lenovo": 25886, "ìĨĮëħĦëĭ": 25887, "vivo": 25888, "tvd": 25889, "nora": 25890, "silk": 25891, "pasadena": 25892, "yosemite": 25893, "valuation": 25894, "clocks": 25895, "uber": 25896, "mrc": 25897, "darkest": 25898, "aubre": 25899, "sso": 25900, "belly": 25901, "wrestlers": 25902, "killin": 25903, "louder": 25904, "buckley": 25905, "geel": 25906, "adon": 25907, "uns": 25908, "appealing": 25909, "ðŁij¯": 25910, "semitism": 25911, "listens": 25912, "fitz": 25913, "ãĥ³ãĥ": 25914, "nylon": 25915, "arty": 25916, "seemingly": 25917, "hala": 25918, "suited": 25919, "ety": 25920, "sheds": 25921, "muffins": 25922, "apric": 25923, "uments": 25924, "uta": 25925, "jammu": 25926, "chelseafc": 25927, "starz": 25928, "yoko": 25929, "root": 25930, "cleansing": 25931, "diar": 25932, "pioneering": 25933, "iheartradio": 25934, "digiti": 25935, "findyour": 25936, "cano": 25937, "ðŁĴİ": 25938, "zol": 25939, "spacecraft": 25940, "sixers": 25941, "moisturi": 25942, "bile": 25943, "tists": 25944, "horton": 25945, "ranging": 25946, "columbi": 25947, "meteoro": 25948, "sentiment": 25949, "epl": 25950, "footh": 25951, "textbook": 25952, "drainage": 25953, "rly": 25954, "scue": 25955, "imrankhan": 25956, "ðŁĴ¸": 25957, "margarita": 25958, "eddy": 25959, "predicts": 25960, "gamergate": 25961, "advise": 25962, "growthhacking": 25963, "loveyou": 25964, "ugand": 25965, "vf": 25966, "benghazi": 25967, "slater": 25968, "newor": 25969, "chel": 25970, "independenceday": 25971, "pnp": 25972, "cullen": 25973, "hoodies": 25974, "numbered": 25975, "britt": 25976, "tsa": 25977, "kltu": 25978, "sages": 25979, "momo": 25980, "oneplus": 25981, "coll": 25982, "guts": 25983, "wta": 25984, "mesmeri": 25985, "enhancing": 25986, "chiroprac": 25987, "jis": 25988, "teenagers": 25989, "mone": 25990, "constellation": 25991, "sweepstakes": 25992, "eze": 25993, "slovakia": 25994, "laye": 25995, "pearce": 25996, "waver": 25997, "pogba": 25998, "kron": 25999, "surgeons": 26000, "marx": 26001, "tid": 26002, "gga": 26003, "descend": 26004, "pours": 26005, "uprising": 26006, "walla": 26007, "sabbath": 26008, "bachelore": 26009, "mackin": 26010, "kam": 26011, "peterborough": 26012, "hora": 26013, "ðŁĮŁðŁĮŁ": 26014, "thinkbig": 26015, "rj": 26016, "hydrau": 26017, "spal": 26018, "universit": 26019, "ðŁıī": 26020, "mailonline": 26021, "leagueof": 26022, "tenants": 26023, "wally": 26024, "lance": 26025, "heavens": 26026, "ddr": 26027, "bolts": 26028, "amir": 26029, "iphone": 26030, "cigar": 26031, "endu": 26032, "rei": 26033, "elabor": 26034, "ringing": 26035, "johnson": 26036, "characteristics": 26037, "saloon": 26038, "algorithms": 26039, "talkin": 26040, "mtn": 26041, "dive": 26042, "regionals": 26043, "ffice": 26044, "hati": 26045, "deviantart": 26046, "sotto": 26047, "shiro": 26048, "lama": 26049, "kwe": 26050, "faded": 26051, "porting": 26052, "tummy": 26053, "estates": 26054, "buenos": 26055, "ð٦ģ": 26056, "believer": 26057, "penetr": 26058, "darn": 26059, "spite": 26060, "canopy": 26061, "fashioni": 26062, "tilla": 26063, "petals": 26064, "elijah": 26065, "brawl": 26066, "martyr": 26067, "ë°©íĥĦìĨĮëħĦëĭ": 26068, "midtown": 26069, "erich": 26070, "dapper": 26071, "smtown": 26072, "megam": 26073, "www": 26074, "lele": 26075, "ons": 26076, "catfish": 26077, "firth": 26078, "fossilfriday": 26079, "ballpark": 26080, "thaw": 26081, "potent": 26082, "illie": 26083, "creep": 26084, "carp": 26085, "soap": 26086, "gundam": 26087, "infec": 26088, "yyyyy": 26089, "न": 26090, "zag": 26091, "ritt": 26092, "calculator": 26093, "boca": 26094, "oko": 26095, "toad": 26096, "threaten": 26097, "refined": 26098, "olympic": 26099, "accomplishment": 26100, "bacterial": 26101, "aji": 26102, "tatum": 26103, "feliz": 26104, "sheed": 26105, "jat": 26106, "thic": 26107, "jamal": 26108, "ðĿĺ": 26109, "lina": 26110, "ðŁIJ¯": 26111, "joking": 26112, "yotpo": 26113, "pinch": 26114, "akron": 26115, "herb": 26116, "motivation": 26117, "lia": 26118, "hostage": 26119, "creek": 26120, "gamble": 26121, "russell": 26122, "patti": 26123, "fotos": 26124, "cpc": 26125, "broken": 26126, "backthe": 26127, "clays": 26128, "umm": 26129, "stockton": 26130, "maternal": 26131, "ür": 26132, "lakel": 26133, "century": 26134, "bek": 26135, "infected": 26136, "ม": 26137, "smackdown": 26138, "manned": 26139, "tahoe": 26140, "smes": 26141, "basa": 26142, "sula": 26143, "augusta": 26144, ".*": 26145, "rohingya": 26146, "greed": 26147, "counselor": 26148, "silhouette": 26149, "gravit": 26150, "clause": 26151, "'-": 26152, "bobc": 26153, "occasions": 26154, "nowadays": 26155, "dictat": 26156, "beard": 26157, "nally": 26158, "brightest": 26159, "kabul": 26160, "incindia": 26161, "dhanush": 26162, "archaeological": 26163, "cheape": 26164, "mizzou": 26165, "dhi": 26166, "ovski": 26167, "baxter": 26168, "assemble": 26169, "â": 26170, "gigi": 26171, "acam": 26172, "wisely": 26173, "hazard": 26174, "northampton": 26175, "âľĪï¸ı": 26176, "meth": 26177, "blasting": 26178, "reunite": 26179, "mulus": 26180, "alizes": 26181, "tread": 26182, "mila": 26183, "edward": 26184, "kova": 26185, "pesto": 26186, "ðŁij¶": 26187, "vitz": 26188, "hydraulic": 26189, "refurbished": 26190, "motel": 26191, "isabella": 26192, "homme": 26193, "severance": 26194, "uphol": 26195, "miserable": 26196, "fari": 26197, "latter": 26198, "efer": 26199, "crackers": 26200, "esl": 26201, "acio": 26202, "yyj": 26203, "inan": 26204, "ecb": 26205, "zind": 26206, "panas": 26207, "trucking": 26208, "reed": 26209, "shaker": 26210, "burgess": 26211, "empire": 26212, "agnes": 26213, "nington": 26214, "artworks": 26215, "frs": 26216, "tile": 26217, "biome": 26218, "eun": 26219, "chong": 26220, "americana": 26221, "godfather": 26222, "goblin": 26223, "ishi": 26224, "!).": 26225, "tempted": 26226, "genomics": 26227, "mandate": 26228, "cky": 26229, "ðŁĴĻðŁĴĽ": 26230, "somali": 26231, "brandy": 26232, "inven": 26233, "spokesperson": 26234, "pcb": 26235, "yuan": 26236, "hg": 26237, "faz": 26238, "starwars": 26239, "rowan": 26240, "bluegrass": 26241, "dong": 26242, "dday": 26243, "trinidad": 26244, "erton": 26245, "banning": 26246, "retention": 26247, "cured": 26248, "toberfest": 26249, "reset": 26250, "weis": 26251, "detached": 26252, "behindthescenes": 26253, "immunity": 26254, "pha": 26255, "bray": 26256, "ðŁij½": 26257, "rancho": 26258, "ramsay": 26259, "estonia": 26260, "ndtv": 26261, "].": 26262, "cabaret": 26263, "taro": 26264, "dv": 26265, "showcases": 26266, "plum": 26267, "ðŁij¸": 26268, "sonoma": 26269, "prepa": 26270, "memorab": 26271, "estu": 26272, "driveway": 26273, "ules": 26274, "magnus": 26275, "xr": 26276, "nnn": 26277, "muchas": 26278, "enge": 26279, "streamed": 26280, "forestry": 26281, "audiobook": 26282, "troy": 26283, "reckless": 26284, "kilom": 26285, "ruler": 26286, "rak": 26287, "procession": 26288, "ions": 26289, "poole": 26290, "noctur": 26291, "whs": 26292, "farmhouse": 26293, "pera": 26294, "parme": 26295, "hypocrisy": 26296, "sics": 26297, "vant": 26298, "cask": 26299, "holistic": 26300, "aust": 26301, "п": 26302, "indo": 26303, "ðŁij©âĢį": 26304, "diso": 26305, "dispatch": 26306, "olsen": 26307, "makeit": 26308, "ennis": 26309, "centre": 26310, "arrange": 26311, "ðŁĮ¼": 26312, "salted": 26313, "easiest": 26314, "fate": 26315, "regatta": 26316, "mozz": 26317, "acan": 26318, "sini": 26319, "gically": 26320, "chops": 26321, "chicken": 26322, "workin": 26323, "hagg": 26324, "involve": 26325, "weeds": 26326, "bookday": 26327, "wakeup": 26328, "kyr": 26329, "michelin": 26330, "fuss": 26331, "rejuven": 26332, "vacancies": 26333, "incarcer": 26334, "mst": 26335, "scents": 26336, "sovereign": 26337, "kicker": 26338, "à§": 26339, "bod": 26340, "âĢĶ>": 26341, "sah": 26342, "mobil": 26343, "shropshire": 26344, "ophone": 26345, "dresser": 26346, "missuni": 26347, "hepburn": 26348, "imo": 26349, "foliage": 26350, "diagnostic": 26351, "assan": 26352, "cycling": 26353, "guilt": 26354, "csa": 26355, "puertorico": 26356, "winelover": 26357, "wakefield": 26358, "doggy": 26359, "khe": 26360, "papp": 26361, "cog": 26362, "allot": 26363, "cuck": 26364, "poetic": 26365, "mio": 26366, "revit": 26367, "magician": 26368, "ç¥": 26369, "antenna": 26370, "westwood": 26371, "mberg": 26372, "luxe": 26373, "oatmeal": 26374, "ج": 26375, "teat": 26376, "ffee": 26377, "searches": 26378, "lly": 26379, "pluto": 26380, "elon": 26381, "lettering": 26382, "innocence": 26383, "fai": 26384, "annon": 26385, "telangana": 26386, "mait": 26387, "neural": 26388, "canni": 26389, "aroma": 26390, "astor": 26391, "fex": 26392, "cocac": 26393, "monetary": 26394, "fent": 26395, "unsure": 26396, "'@": 26397, "indirec": 26398, "tehran": 26399, "isolation": 26400, "libs": 26401, "makeup": 26402, "mercedes": 26403, "ffy": 26404, "hetero": 26405, "deo": 26406, "scom": 26407, "cursed": 26408, "veteransday": 26409, "frankenstein": 26410, "shrews": 26411, "deco": 26412, "geese": 26413, "leftover": 26414, "hadid": 26415, "variable": 26416, "academics": 26417, "carolin": 26418, "undergoing": 26419, "variation": 26420, "nah": 26421, "ssier": 26422, "gamersunite": 26423, "pursuing": 26424, "emerged": 26425, "llers": 26426, "controlling": 26427, "roaring": 26428, "meteor": 26429, "volt": 26430, "dawgs": 26431, "beaver": 26432, "islife": 26433, "bathrooms": 26434, "acional": 26435, "prevent": 26436, "lakedistrict": 26437, "inals": 26438, "yani": 26439, "grabbing": 26440, "sacks": 26441, "lez": 26442, "sway": 26443, "kool": 26444, "times": 26445, "klopp": 26446, "lade": 26447, "concord": 26448, "resulted": 26449, "revive": 26450, "reconciliation": 26451, "oland": 26452, "azz": 26453, "giro": 26454, "mandarin": 26455, "deen": 26456, "nutritional": 26457, "iscoming": 26458, "vani": 26459, "awwww": 26460, "derived": 26461, "loveyour": 26462, "stopthe": 26463, "shouting": 26464, "novak": 26465, "ðŁĻĮðŁı¾": 26466, "loaf": 26467, "displaying": 26468, "sundaywith": 26469, "maguire": 26470, "cheri": 26471, "ðŁıŁ": 26472, "rematch": 26473, "quic": 26474, "Ú©": 26475, "yin": 26476, "ðŁĺ¹": 26477, "ilive": 26478, "zip": 26479, "ourke": 26480, "downloads": 26481, "swat": 26482, "mississ": 26483, "carers": 26484, "tment": 26485, "property": 26486, "hahahahahaha": 26487, "gibbs": 26488, "surrey": 26489, "arise": 26490, "ticism": 26491, "stia": 26492, "irling": 26493, "frog": 26494, "cose": 26495, "bassist": 26496, "foreig": 26497, "leau": 26498, "pillows": 26499, "holla": 26500, "elie": 26501, "disclosure": 26502, "peanuts": 26503, "intech": 26504, "wwc": 26505, "plunge": 26506, "triumph": 26507, "cori": 26508, "slippers": 26509, "ðŁĻıðŁĻı": 26510, "neutrality": 26511, "mare": 26512, "hairy": 26513, "gangster": 26514, "humming": 26515, "custard": 26516, "merlin": 26517, "alea": 26518, "sby": 26519, "damp": 26520, "mohan": 26521, "verbal": 26522, "jst": 26523, "gutted": 26524, "bjor": 26525, "unfinished": 26526, "ðŁĩ¯ðŁĩµ": 26527, "unhappy": 26528, "âļ«ï¸ı": 26529, "bypass": 26530, "atsu": 26531, "fischer": 26532, "sav": 26533, "africans": 26534, "reuse": 26535, "midway": 26536, "demolished": 26537, "gerrard": 26538, "hercules": 26539, "ÄŁ": 26540, "medicines": 26541, "clicking": 26542, "surround": 26543, "joong": 26544, "waving": 26545, "tribes": 26546, "wetlands": 26547, "officiel": 26548, "arguing": 26549, "lle": 26550, "dova": 26551, "suzy": 26552, "clubhouse": 26553, "negro": 26554, "obtain": 26555, "gao": 26556, "glance": 26557, "assist": 26558, "chos": 26559, "ãĤ¢": 26560, "âĺķ": 26561, "adrid": 26562, "occurs": 26563, "stans": 26564, "pardon": 26565, "liveli": 26566, "employed": 26567, "revisit": 26568, "ffxiv": 26569, "bble": 26570, "nearing": 26571, "miner": 26572, "ðŁĺ¹": 26573, "giovanni": 26574, "upto": 26575, "marvell": 26576, "marse": 26577, "towels": 26578, "cbn": 26579, "engineered": 26580, "yelling": 26581, "spartan": 26582, "sians": 26583, "ðŁĻĮðŁı¼": 26584, "sev": 26585, "coyote": 26586, "stadi": 26587, "tcm": 26588, "appen": 26589, "shenanigans": 26590, "openaccess": 26591, "soaked": 26592, "masqu": 26593, "levine": 26594, "strokes": 26595, "lk": 26596, "apartheid": 26597, "hiphop": 26598, "chardon": 26599, "maymay": 26600, "haasan": 26601, "stripped": 26602, "fro": 26603, "scription": 26604, "fton": 26605, "hf": 26606, "prisons": 26607, "marshal": 26608, "ķãĤ": 26609, "ancho": 26610, "compromise": 26611, "classification": 26612, "buzzfeed": 26613, "bbloggers": 26614, "deserving": 26615, ")/": 26616, "sway": 26617, "obo": 26618, "campers": 26619, "podernfamily": 26620, "poured": 26621, "brie": 26622, "squirrels": 26623, "seize": 26624, ":#": 26625, "lek": 26626, "timb": 26627, "stacy": 26628, "nasdaq": 26629, "repeatedly": 26630, "brat": 26631, "mighty": 26632, "competitor": 26633, "mahone": 26634, "desi": 26635, "oke": 26636, "bmw": 26637, "shie": 26638, "fcb": 26639, "cheapest": 26640, "minimalist": 26641, "paramount": 26642, "nate": 26643, "haras": 26644, "insanity": 26645, "lateral": 26646, "mentality": 26647, "mozam": 26648, "tapped": 26649, "yadav": 26650, "usp": 26651, "bway": 26652, "theod": 26653, "bilt": 26654, "raids": 26655, "empress": 26656, "adapted": 26657, "patron": 26658, "nutshell": 26659, "agra": 26660, "beaded": 26661, "sundaywithmarsha": 26662, "viking": 26663, "proceed": 26664, "maintained": 26665, "thinkbigsundaywithmarsha": 26666, "snes": 26667, "musica": 26668, "tower": 26669, "chab": 26670, "bok": 26671, "smt": 26672, "insult": 26673, "harvesting": 26674, "window": 26675, "ruther": 26676, "beige": 26677, "decal": 26678, "indicate": 26679, "mailing": 26680, "rift": 26681, "pole": 26682, "anderson": 26683, "choral": 26684, "spride": 26685, "lili": 26686, "evelyn": 26687, "imrankhanpti": 26688, "....\"": 26689, "kered": 26690, "undp": 26691, "waterfalls": 26692, "sears": 26693, "lemans": 26694, "worldseries": 26695, "riel": 26696, "anie": 26697, "appar": 26698, "scorers": 26699, "lamp": 26700, "athan": 26701, "physicians": 26702, "quinoa": 26703, "refusing": 26704, "vuitton": 26705, "unleash": 26706, "sla": 26707, "pati": 26708, "shouts": 26709, "intentions": 26710, "foamed": 26711, "european": 26712, "neighborhoods": 26713, "meer": 26714, "manson": 26715, "duh": 26716, "brat": 26717, "cones": 26718, "bowl": 26719, "kazakhstan": 26720, "ि": 26721, "inappropriate": 26722, "delhi": 26723, "ketchup": 26724, "fulton": 26725, "sys": 26726, "consult": 26727, "garfield": 26728, "togo": 26729, "fml": 26730, "fled": 26731, "bds": 26732, "facilitate": 26733, "reebok": 26734, "selfie": 26735, "elevate": 26736, "activate": 26737, "bible": 26738, "cawx": 26739, "bys": 26740, "camille": 26741, "syou": 26742, "skool": 26743, "hert": 26744, "wbc": 26745, "pledges": 26746, "recorder": 26747, "posh": 26748, "acre": 26749, "soaking": 26750, "matil": 26751, "vsco": 26752, "shootings": 26753, "plar": 26754, "econ": 26755, "ðŁĻĮðŁı»": 26756, "rashid": 26757, "ubi": 26758, "ðŁ¤¤": 26759, "swinging": 26760, "wipe": 26761, "raptor": 26762, "msu": 26763, "musicvideo": 26764, "durham": 26765, "attic": 26766, "aparty": 26767, "fetus": 26768, "activation": 26769, "aaz": 26770, "motivate": 26771, "ðŁĴķðŁĴķðŁĴķ": 26772, "jal": 26773, "म": 26774, "agon": 26775, "scheer": 26776, "stalker": 26777, "foster": 26778, "azzo": 26779, "telegram": 26780, "vigor": 26781, "slaugh": 26782, "screenshots": 26783, "entrepreneu": 26784, "kristin": 26785, "intention": 26786, "chilli": 26787, "fraction": 26788, "dona": 26789, "gea": 26790, "tcu": 26791, "site": 26792, "lak": 26793, "emil": 26794, "dnt": 26795, "boro": 26796, "wilkinson": 26797, "recu": 26798, "atoday": 26799, "tanya": 26800, "blanco": 26801, "cdn": 26802, "brilliantly": 26803, "gcc": 26804, "acc": 26805, "evacuated": 26806, "therine": 26807, "denny": 26808, "caitlin": 26809, "shepard": 26810, "pouch": 26811, "handheld": 26812, "southeastern": 26813, "haa": 26814, "ô": 26815, "resolutions": 26816, "ledger": 26817, "srin": 26818, "rar": 26819, "shattered": 26820, "chimney": 26821, "imwith": 26822, "meteor": 26823, "handled": 26824, "rake": 26825, "townsend": 26826, "enhan": 26827, "shipy": 26828, "duct": 26829, "twx": 26830, "inflammatory": 26831, "warhammer": 26832, "theatrical": 26833, "gros": 26834, "skar": 26835, "scotty": 26836, "niel": 26837, "tito": 26838, "tini": 26839, "connection": 26840, "_.": 26841, "goldenglobes": 26842, "shaq": 26843, "ðŁı³ï¸ı": 26844, "hallway": 26845, "fronts": 26846, "effectiveness": 26847, "glaston": 26848, "dhs": 26849, "expi": 26850, "toh": 26851, "cpl": 26852, "scs": 26853, "reo": 26854, "hag": 26855, "resemblance": 26856, "horan": 26857, "abusive": 26858, "quer": 26859, "virtue": 26860, "cholester": 26861, "aq": 26862, "shane": 26863, "mce": 26864, "carriers": 26865, "distress": 26866, "rewind": 26867, "¡": 26868, "voodoo": 26869, "intact": 26870, "anno": 26871, "ðŁĺ¤": 26872, "piled": 26873, "adia": 26874, "ãĥ³": 26875, "enow": 26876, "digs": 26877, "lightly": 26878, "goofy": 26879, "turbine": 26880, "governors": 26881, "conte": 26882, "reopen": 26883, "pah": 26884, "ive": 26885, "crafting": 26886, "sweeps": 26887, "jodi": 26888, "ande": 26889, "zucker": 26890, "kawaii": 26891, "oko": 26892, "vai": 26893, "outline": 26894, "kristi": 26895, "tsn": 26896, "inspo": 26897, "quint": 26898, "filthy": 26899, "lynne": 26900, "listeners": 26901, "departing": 26902, "ord": 26903, "tweed": 26904, ",&": 26905, "alek": 26906, "selfish": 26907, "norther": 26908, "recognizes": 26909, "ips": 26910, "bes": 26911, "aed": 26912, "wills": 26913, "peat": 26914, "surroundings": 26915, "monuments": 26916, "aisle": 26917, "becker": 26918, "lav": 26919, "quantity": 26920, "vah": 26921, "helicopters": 26922, "tucked": 26923, "alvarez": 26924, "shape": 26925, "obey": 26926, "additi": 26927, "roadside": 26928, "mite": 26929, "blers": 26930, "epage": 26931, "jau": 26932, "ignorant": 26933, "bins": 26934, "lulu": 26935, "xo": 26936, "cfo": 26937, "eeeee": 26938, "apprenticeship": 26939, "sheffiel": 26940, "toi": 26941, "hok": 26942, "fakenews": 26943, "deploy": 26944, "aidan": 26945, "huskers": 26946, "ãĢİ": 26947, "westbrook": 26948, "mister": 26949, "configur": 26950, "carr": 26951, "fica": 26952, "proceedings": 26953, "haw": 26954, "steak": 26955, "murderer": 26956, "payday": 26957, "ajo": 26958, "pvc": 26959, "donates": 26960, "biaf": 26961, "nomnom": 26962, "beit": 26963, "kali": 26964, "xrp": 26965, "ahmedabad": 26966, "semic": 26967, "chey": 26968, "xtra": 26969, "antwer": 26970, "headlining": 26971, "squares": 26972, "rounded": 26973, "fluore": 26974, "bold": 26975, "disasters": 26976, "amoo": 26977, "generic": 26978, "cranes": 26979, "briefly": 26980, "gig": 26981, "austerity": 26982, "anticipation": 26983, "forti": 26984, "treasurer": 26985, "canny": 26986, "cecil": 26987, "detected": 26988, "checklist": 26989, "ว": 26990, "pamela": 26991, "barbados": 26992, "anfield": 26993, "hearty": 26994, "txlege": 26995, "perenni": 26996, "arrog": 26997, "ingram": 26998, "âĹı": 26999, "tyne": 27000, "spoon": 27001, "ration": 27002, "amba": 27003, "mbe": 27004, "camel": 27005, "hhs": 27006, "yorkshire": 27007, "reflective": 27008, "freaks": 27009, "tok": 27010, "judo": 27011, "particles": 27012, "dubs": 27013, "banjo": 27014, "accreditation": 27015, "proverbs": 27016, "overdose": 27017, "integral": 27018, "guang": 27019, "mcs": 27020, "supercar": 27021, "afb": 27022, "alvin": 27023, "ails": 27024, "xtre": 27025, "staging": 27026, "twent": 27027, "rabbits": 27028, "maro": 27029, "instem": 27030, "doll": 27031, "cray": 27032, "santana": 27033, "bleach": 27034, "minions": 27035, "cheap": 27036, "mant": 27037, "divers": 27038, "catalonia": 27039, "lois": 27040, "matri": 27041, "cougar": 27042, "kayak": 27043, "egre": 27044, "pso": 27045, "aia": 27046, "å®": 27047, "charlton": 27048, "tracked": 27049, "scari": 27050, "pett": 27051, "fwd": 27052, "xin": 27053, "gravel": 27054, "bric": 27055, "biggboss": 27056, "arden": 27057, "hugging": 27058, "palms": 27059, "stv": 27060, "limb": 27061, "themovie": 27062, "handicap": 27063, "rime": 27064, "zai": 27065, "stub": 27066, "india": 27067, "lithuania": 27068, "rhyth": 27069, "pita": 27070, "macedonia": 27071, "highered": 27072, "bridget": 27073, "schwarz": 27074, "skelet": 27075, "hikes": 27076, "antarctic": 27077, "cps": 27078, "mashup": 27079, "а": 27080, "nell": 27081, "chandra": 27082, "heir": 27083, "anus": 27084, "sheridan": 27085, "mimi": 27086, "museu": 27087, "becca": 27088, "anir": 27089, "barrie": 27090, "diocese": 27091, "comparable": 27092, "ðŁı³ï¸ıâĢį": 27093, "yukon": 27094, "mep": 27095, "hormon": 27096, "meric": 27097, "alf": 27098, "conquered": 27099, "christchurch": 27100, "ðŁĴĻðŁĴĻ": 27101, "hazardous": 27102, "pooh": 27103, "conting": 27104, "retrospective": 27105, "parame": 27106, "nair": 27107, "consor": 27108, "hotra": 27109, "astonishing": 27110, "caterpillar": 27111, "uman": 27112, "tism": 27113, "tvs": 27114, "servic": 27115, "croydon": 27116, "morales": 27117, "cg": 27118, "cum": 27119, "teur": 27120, "scanada": 27121, "sall": 27122, "magnolia": 27123, "elise": 27124, "thour": 27125, "ி": 27126, "agomez": 27127, "phelps": 27128, "ë°©íĥĦìĨĮëħĦëĭ¨": 27129, "whos": 27130, "weaving": 27131, "sisd": 27132, "proposes": 27133, "crows": 27134, "presale": 27135, "economies": 27136, "bernardo": 27137, "shahid": 27138, "airshow": 27139, "mccann": 27140, "horticul": 27141, "nrl": 27142, "duel": 27143, "mongolia": 27144, "toulou": 27145, "requirement": 27146, "structured": 27147, "edi": 27148, "olives": 27149, "hea": 27150, "cuter": 27151, "к": 27152, "enthusiast": 27153, "harriet": 27154, "dominion": 27155, "submer": 27156, "ðŁįĥ": 27157, "saab": 27158, "nesburg": 27159, "moff": 27160, "defended": 27161, "burt": 27162, "rewarded": 27163, "goldman": 27164, "optics": 27165, "khalid": 27166, "households": 27167, "buckets": 27168, "cecil": 27169, "chess": 27170, "substantial": 27171, "efl": 27172, "operation": 27173, "evaluate": 27174, "stn": 27175, "recession": 27176, "lll": 27177, "tomas": 27178, "truths": 27179, "akbar": 27180, "swords": 27181, "pact": 27182, "embarrass": 27183, "hao": 27184, "ayurve": 27185, "scripture": 27186, "nycc": 27187, "opt": 27188, "diameter": 27189, "scented": 27190, "organizers": 27191, "relat": 27192, "hae": 27193, "dreamers": 27194, "dese": 27195, "ðŁĮ»": 27196, "restricted": 27197, "nale": 27198, "rhp": 27199, "dolan": 27200, "munster": 27201, "haired": 27202, "consultants": 27203, "joints": 27204, "humil": 27205, "dill": 27206, "relentless": 27207, "té": 27208, "afil": 27209, "utilities": 27210, "japanese": 27211, "condemn": 27212, "petite": 27213, "collide": 27214, "qf": 27215, "peaches": 27216, "courier": 27217, "lore": 27218, "âĺİï¸ı": 27219, "reliability": 27220, "chuk": 27221, "ðŁĻĥ": 27222, "stures": 27223, "gether": 27224, "hostel": 27225, "bier": 27226, "-_-": 27227, "âĩ": 27228, "eze": 27229, "tailo": 27230, "dient": 27231, "bluff": 27232, "chuffed": 27233, "pilip": 27234, "monarch": 27235, "eem": 27236, "buchan": 27237, "bick": 27238, "opau": 27239, "kups": 27240, "ย": 27241, "pistons": 27242, "spins": 27243, "mand": 27244, "cest": 27245, "burne": 27246, "vile": 27247, "cherries": 27248, "beckett": 27249, "needles": 27250, "panch": 27251, "ëĤ": 27252, "hahah": 27253, "troubles": 27254, "insists": 27255, "doyou": 27256, "gmc": 27257, "mortar": 27258, "delegate": 27259, "inn": 27260, "ganda": 27261, "sinatra": 27262, "त": 27263, "speeding": 27264, "pupil": 27265, "premises": 27266, "alignment": 27267, "pikach": 27268, "asus": 27269, "jalan": 27270, "ص": 27271, "limestone": 27272, "folkl": 27273, "parmesan": 27274, "ceil": 27275, "moy": 27276, "shawnmendes": 27277, "acup": 27278, "hust": 27279, "otes": 27280, "medina": 27281, "madi": 27282, "gtav": 27283, "censorship": 27284, "arg": 27285, "sweeney": 27286, "sykes": 27287, "colo": 27288, "footsteps": 27289, "canned": 27290, "advance": 27291, "gtaonline": 27292, "healthyliving": 27293, "ðŁį¾": 27294, "aig": 27295, "pality": 27296, "ocs": 27297, "hebrew": 27298, "imminent": 27299, "berkshire": 27300, "jeremiah": 27301, "outgoing": 27302, "baker": 27303, "entrata": 27304, "maids": 27305, "groves": 27306, "boc": 27307, "adel": 27308, "mfw": 27309, "conscience": 27310, "armys": 27311, "nutella": 27312, "contestalert": 27313, "novelist": 27314, "lah": 27315, "banker": 27316, "marquez": 27317, "ðŁı¡": 27318, "toff": 27319, "outage": 27320, "grp": 27321, "ðŁĺŃðŁĺŃðŁĺŃðŁĺŃ": 27322, "muscle": 27323, "dudley": 27324, "nvidia": 27325, "midi": 27326, "muni": 27327, "essays": 27328, "datac": 27329, "carter": 27330, "ร": 27331, "tans": 27332, "ives": 27333, "publications": 27334, "aler": 27335, "okwx": 27336, "ilu": 27337, "cutt": 27338, "harp": 27339, "outlaw": 27340, "lutheran": 27341, "brill": 27342, "bolic": 27343, "dowell": 27344, "greenland": 27345, "besties": 27346, "pathi": 27347, "payton": 27348, "guest": 27349, "harden": 27350, "ðŁ¤©": 27351, "anned": 27352, "evacuation": 27353, "poised": 27354, "mcder": 27355, "bhan": 27356, "oi": 27357, "envelope": 27358, "cid": 27359, "cavi": 27360, "tapas": 27361, "bookreview": 27362, "greyhound": 27363, "âĻª": 27364, "feud": 27365, "lungs": 27366, "forte": 27367, "raider": 27368, "ffer": 27369, "onix": 27370, "depend": 27371, "ynwa": 27372, "relating": 27373, "devs": 27374, "ðŁĴIJ": 27375, "acquires": 27376, "dha": 27377, "jyo": 27378, "privati": 27379, "canine": 27380, "kb": 27381, "crab": 27382, "sardin": 27383, "imagining": 27384, "kj": 27385, "empor": 27386, "downhill": 27387, "nez": 27388, "taeyeon": 27389, "nickimin": 27390, "gbp": 27391, "àµ": 27392, "wap": 27393, "secco": 27394, "mashed": 27395, "ðŁĴ¥ðŁĴ¥": 27396, "augustine": 27397, "dissol": 27398, "dictator": 27399, "âĵ": 27400, "viper": 27401, "edfringe": 27402, "vaux": 27403, "hardwork": 27404, "booklet": 27405, "nox": 27406, "chiff": 27407, "ðŁĴ¨": 27408, "observations": 27409, "xboxone": 27410, "usher": 27411, "keer": 27412, "lup": 27413, "dallas": 27414, "calgary": 27415, "madra": 27416, "dious": 27417, "kbs": 27418, "woodward": 27419, "heroine": 27420, "lumber": 27421, "seaworld": 27422, "ows": 27423, "mcke": 27424, "maverick": 27425, "gula": 27426, "crossroads": 27427, "fang": 27428, "sade": 27429, "nikol": 27430, "cheetah": 27431, "mec": 27432, "ppg": 27433, "erick": 27434, "ðŁİµ": 27435, "toxic": 27436, "bjj": 27437, "viola": 27438, "spire": 27439, "chino": 27440, "travis": 27441, "institutional": 27442, "haas": 27443, "lowry": 27444, "wac": 27445, "eae": 27446, "humid": 27447, "mpton": 27448, "ruck": 27449, "jew": 27450, "cine": 27451, "zimmer": 27452, "sef": 27453, "bharat": 27454, "frees": 27455, "aamir": 27456, "ðŁĴħ": 27457, "zinc": 27458, "wane": 27459, "multiplayer": 27460, "royalwedding": 27461, "eel": 27462, "precipit": 27463, "query": 27464, "kimberly": 27465, "isabel": 27466, "fulfill": 27467, "igan": 27468, "vaul": 27469, "pane": 27470, "scy": 27471, "digit": 27472, "gunn": 27473, "utah": 27474, "dogday": 27475, "fion": 27476, "xiaomi": 27477, "dac": 27478, "elast": 27479, "chavez": 27480, "roblo": 27481, "gine": 27482, "tenth": 27483, "abh": 27484, "keto": 27485, "hurdle": 27486, "nadia": 27487, "memorabilia": 27488, "habs": 27489, "quan": 27490, "hw": 27491, "hvac": 27492, "pixar": 27493, "eccle": 27494, "kramer": 27495, "accuses": 27496, "ðŁĴļðŁĴļ": 27497, "perse": 27498, "meantime": 27499, "wahl": 27500, "atletico": 27501, "âĢ¢âĢ¢âĢ¢âĢ¢": 27502, "ottoman": 27503, "novo": 27504, "kus": 27505, "connected": 27506, "trusts": 27507, "dmv": 27508, "spencer": 27509, "rahulg": 27510, "dove": 27511, "stokes": 27512, "bologna": 27513, "enthusiasts": 27514, "ê": 27515, "rockstargames": 27516, "tedcruz": 27517, "duras": 27518, "sacked": 27519, "latex": 27520, "immersive": 27521, "cert": 27522, "lucin": 27523, "principals": 27524, "fares": 27525, "sails": 27526, "farn": 27527, "ament": 27528, "saffron": 27529, "quentin": 27530, "checkpoint": 27531, "ferris": 27532, "excur": 27533, "ðŁijīðŁı¼": 27534, "bailey": 27535, "seh": 27536, "terre": 27537, "madam": 27538, "sband": 27539, "wanderers": 27540, "cumberbatch": 27541, "yyc": 27542, "digitally": 27543, "blackandwhitephotography": 27544, "rollin": 27545, "moroccan": 27546, "ðŁĮħ": 27547, "dinner": 27548, "dwell": 27549, "toom": 27550, "mye": 27551, "ezra": 27552, "cpfc": 27553, "warhol": 27554, "meer": 27555, "jonah": 27556, "noaa": 27557, "sgate": 27558, "soon": 27559, "secular": 27560, "gating": 27561, "tio": 27562, "driver": 27563, "sissy": 27564, "assange": 27565, "tath": 27566, "edmund": 27567, "bobcats": 27568, "raji": 27569, "postage": 27570, "studs": 27571, "mgm": 27572, "kato": 27573, "edinburgh": 27574, "meetthe": 27575, "shirt": 27576, "faa": 27577, "mensfashion": 27578, "spreads": 27579, "wim": 27580, "carts": 27581, "phoebe": 27582, "jars": 27583, "botswana": 27584, "ÙĤ": 27585, "edwar": 27586, "skar": 27587, "rive": 27588, "gusty": 27589, "ctv": 27590, "ferdinand": 27591, "sutherland": 27592, "nickiminaj": 27593, "kv": 27594, "sius": 27595, "beech": 27596, "rez": 27597, "desires": 27598, "onial": 27599, "campo": 27600, "quarry": 27601, "lorraine": 27602, "gilmore": 27603, "iggy": 27604, "µï¸ı": 27605, "hopping": 27606, "aviz": 27607, "ðŁĮº": 27608, "unisex": 27609, "dedicate": 27610, "attitudes": 27611, "steer": 27612, "junkie": 27613, "railway": 27614, "yb": 27615, "whisper": 27616, "keyan": 27617, "kus": 27618, "jug": 27619, "dix": 27620, "ains": 27621, "summon": 27622, "ovich": 27623, "syed": 27624, "herald": 27625, "maison": 27626, "meded": 27627, "wildflower": 27628, "mainland": 27629, "risky": 27630, "rukh": 27631, "overlooked": 27632, "kic": 27633, "destroys": 27634, "naman": 27635, "kip": 27636, "zano": 27637, "championsleague": 27638, "bandit": 27639, "quincy": 27640, "smile": 27641, "calvin": 27642, "openings": 27643, "tapp": 27644, "olulu": 27645, "spectro": 27646, "accredited": 27647, "apk": 27648, "praised": 27649, "barnett": 27650, "pollen": 27651, "premiered": 27652, "selenagomez": 27653, "toured": 27654, "screenings": 27655, "uuu": 27656, "miso": 27657, "ense": 27658, "adamlambert": 27659, "guelph": 27660, "haryana": 27661, "hutto": 27662, "lear": 27663, "ltc": 27664, "poached": 27665, "brexit": 27666, "æĿ": 27667, "ttc": 27668, "pavement": 27669, "mongers": 27670, "roe": 27671, "aders": 27672, "lington": 27673, "participant": 27674, "cared": 27675, "gail": 27676, "yates": 27677, "lantic": 27678, "dashboard": 27679, "joo": 27680, "felipe": 27681, "ssionist": 27682, "bum": 27683, "send": 27684, "aeri": 27685, "thugs": 27686, "lucifer": 27687, "ahe": 27688, "detector": 27689, "filly": 27690, "gasoline": 27691, "hamper": 27692, "humpday": 27693, "theta": 27694, "theband": 27695, "forecasts": 27696, "ohhh": 27697, "lobb": 27698, "holl": 27699, "cpu": 27700, "azu": 27701, "adar": 27702, "hailey": 27703, "bub": 27704, "cart": 27705, "quoted": 27706, "anarchy": 27707, "pancre": 27708, "twitart": 27709, "alden": 27710, "stash": 27711, "theless": 27712, "orni": 27713, "beliebers": 27714, "mormon": 27715, "particle": 27716, "aviation": 27717, "â¬Ĩ": 27718, "webcamtoy": 27719, "saddened": 27720, "cruis": 27721, "hamlet": 27722, "nct": 27723, "rollins": 27724, "marquee": 27725, "sawyer": 27726, "reliance": 27727, "aura": 27728, "diec": 27729, "soothing": 27730, "signings": 27731, "akis": 27732, "ó": 27733, "atkins": 27734, "aerop": 27735, "ðŁĮ¿": 27736, "yab": 27737, "shari": 27738, "connol": 27739, "dubbed": 27740, "manufacture": 27741, "convincing": 27742, "feelthebern": 27743, "rau": 27744, "pulit": 27745, "onec": 27746, "gemstone": 27747, "urging": 27748, "bagu": 27749, "gah": 27750, "acids": 27751, "fianc": 27752, "zodiac": 27753, "snoop": 27754, "herrera": 27755, "initiated": 27756, "venge": 27757, "professors": 27758, "prodi": 27759, "stronger": 27760, "emission": 27761, "bba": 27762, "halle": 27763, "tapp": 27764, "hawan": 27765, "whim": 27766, "competed": 27767, "myrtle": 27768, "irport": 27769, "coldplay": 27770, "ache": 27771, "skep": 27772, "mson": 27773, "ssic": 27774, "calligraphy": 27775, "swimmers": 27776, "mey": 27777, "ppc": 27778, "thrift": 27779, "poc": 27780, "replaces": 27781, "commuter": 27782, "âģ¦âģ¦@": 27783, "goers": 27784, "logue": 27785, "paradig": 27786, "baskets": 27787, "sensitivity": 27788, "johan": 27789, "atlantis": 27790, "&&": 27791, "suitcase": 27792, "anxious": 27793, "lh": 27794, "stri": 27795, "galloway": 27796, "stread": 27797, "warden": 27798, "grounded": 27799, "fficiency": 27800, "lifeat": 27801, "relic": 27802, "disguise": 27803, "islanders": 27804, "fcofficial": 27805, "classicalmusic": 27806, "bmc": 27807, "enfield": 27808, "bique": 27809, "oakley": 27810, "batman": 27811, "slaying": 27812, "nerves": 27813, "multit": 27814, "calcium": 27815, "projector": 27816, "scottsdale": 27817, "antino": 27818, "grips": 27819, "kimmel": 27820, "desmond": 27821, "protestors": 27822, "hiatus": 27823, "metabolism": 27824, "concluded": 27825, "presser": 27826, "tipping": 27827, "slide": 27828, "eto": 27829, "hunting": 27830, "ausopen": 27831, "rik": 27832, "ppery": 27833, "innovators": 27834, "pitchers": 27835, "agger": 27836, "fungi": 27837, "zad": 27838, "prolific": 27839, "rocknroll": 27840, "blames": 27841, "ctar": 27842, "stamford": 27843, "qad": 27844, "mozzarella": 27845, "insanely": 27846, "denver": 27847, "phouse": 27848, "nomad": 27849, "ï¿": 27850, "sris": 27851, "produ": 27852, "henley": 27853, "pagan": 27854, "amtrak": 27855, "rubi": 27856, "incl": 27857, "tutor": 27858, "scotia": 27859, "woes": 27860, "singapo": 27861, "funnel": 27862, "turnbull": 27863, "knowledge": 27864, "grimm": 27865, "realmadrid": 27866, "weare": 27867, "missiles": 27868, "consol": 27869, "emojis": 27870, "sneak": 27871, "smiths": 27872, "ruiz": 27873, "brou": 27874, "iel": 27875, "haver": 27876, "ðŁĮļ": 27877, "kingof": 27878, "basilica": 27879, "circulation": 27880, "printers": 27881, "tapping": 27882, "ridley": 27883, "dragged": 27884, "haj": 27885, "writer": 27886, "fundamentals": 27887, "personalities": 27888, "metre": 27889, "stereotypes": 27890, "burle": 27891, "bestof": 27892, "nffc": 27893, "hath": 27894, "ministries": 27895, "aali": 27896, "tracing": 27897, "paved": 27898, "łï¸ı": 27899, "gic": 27900, "inspire": 27901, "tug": 27902, "hare": 27903, "repeated": 27904, "expon": 27905, "lolli": 27906, "rhode": 27907, "precin": 27908, "installations": 27909, "instagram": 27910, "azar": 27911, "ies": 27912, "solely": 27913, "dukes": 27914, "missionary": 27915, "vanguard": 27916, "fursuitfriday": 27917, "ond": 27918, "polari": 27919, "mast": 27920, "haran": 27921, "josé": 27922, "jacked": 27923, "ecoun": 27924, "alities": 27925, "neph": 27926, "ravel": 27927, "moderated": 27928, "scow": 27929, "sfb": 27930, "uruguay": 27931, "aso": 27932, "nig": 27933, "audu": 27934, "pints": 27935, "latina": 27936, "benz": 27937, "mitting": 27938, "charted": 27939, "matology": 27940, "citro": 27941, "biopic": 27942, "ðŁijŃ": 27943, "djokovic": 27944, "foxy": 27945, "aguil": 27946, "soto": 27947, "anada": 27948, "sinking": 27949, "scrap": 27950, "hairs": 27951, "bethany": 27952, "factfriday": 27953, "ðŁIJIJ": 27954, "unleashed": 27955, ")(": 27956, "contradic": 27957, "ramon": 27958, "coastline": 27959, "yong": 27960, "snsd": 27961, "ligan": 27962, "pome": 27963, "mitage": 27964, "gett": 27965, "wati": 27966, "risk": 27967, "soaring": 27968, "brush": 27969, "fpl": 27970, "avan": 27971, "åĨ": 27972, "larson": 27973, "shear": 27974, "multil": 27975, "blur": 27976, "multimedia": 27977, "chunky": 27978, "pari": 27979, "nani": 27980, "weird": 27981, "cholesterol": 27982, "charles": 27983, "dreamed": 27984, "tanning": 27985, "puzzles": 27986, "fram": 27987, "handball": 27988, "chag": 27989, "belize": 27990, "alu": 27991, "bangs": 27992, "ÑĦ": 27993, "detectives": 27994, "mcg": 27995, "ishq": 27996, "bothered": 27997, "safc": 27998, "mping": 27999, "teneri": 28000, "gays": 28001, "sailor": 28002, "angi": 28003, "multicul": 28004, "guessed": 28005, "rosé": 28006, "highways": 28007, "broom": 28008, "chattanoo": 28009, "-'": 28010, "seeker": 28011, "oned": 28012, "atf": 28013, "luc": 28014, "><": 28015, "bari": 28016, "percep": 28017, "jewelry": 28018, "asph": 28019, "sorrow": 28020, "sling": 28021, "mammoth": 28022, "jackie": 28023, "ë§": 28024, "wiltshire": 28025, "sao": 28026, "cancell": 28027, "impaired": 28028, "torial": 28029, "breed": 28030, "guyen": 28031, "judice": 28032, "title": 28033, "prospective": 28034, "applicants": 28035, "ðŁįĬ": 28036, "episcop": 28037, "eid": 28038, "byo": 28039, "stockings": 28040, "ðŁĴĥðŁĴĥ": 28041, "llp": 28042, "snag": 28043, "keepit": 28044, "lough": 28045, "olson": 28046, "maturity": 28047, "!!!\"": 28048, "copter": 28049, "isha": 28050, "bli": 28051, "wilmington": 28052, "tryouts": 28053, "thai": 28054, "ðŁ¥³": 28055, "pebble": 28056, "kraft": 28057, "fp": 28058, "º": 28059, "ssively": 28060, "livin": 28061, "contestants": 28062, "textures": 28063, "joan": 28064, "hdr": 28065, "filmfestival": 28066, "provence": 28067, "wido": 28068, "opend": 28069, "csi": 28070, "stown": 28071, "croati": 28072, "adjust": 28073, "hostile": 28074, "analysts": 28075, "ilan": 28076, "cuppa": 28077, "brum": 28078, "newfoundland": 28079, "goodwin": 28080, "mett": 28081, "mallorca": 28082, "plugs": 28083, "buk": 28084, "bbhutto": 28085, "wrestle": 28086, "saire": 28087, "shopped": 28088, "forza": 28089, "lehead": 28090, "vivo": 28091, "bast": 28092, "roxy": 28093, "regis": 28094, "hardworking": 28095, "honolulu": 28096, "despair": 28097, "youngsters": 28098, "nig": 28099, "impromp": 28100, "rolltide": 28101, "deemed": 28102, "treason": 28103, "rushed": 28104, "forged": 28105, "fff": 28106, "pikachu": 28107, "briggs": 28108, "doit": 28109, "accent": 28110, "laus": 28111, "glaze": 28112, "competent": 28113, "aho": 28114, "photog": 28115, "midfield": 28116, "lego": 28117, "harvard": 28118, "minorities": 28119, "reilly": 28120, "sliced": 28121, "onceupon": 28122, "initially": 28123, "financially": 28124, "landscapephotography": 28125, "hardro": 28126, "quo": 28127, "mmers": 28128, "parkinson": 28129, "smugg": 28130, "readiness": 28131, "brutally": 28132, "gloucester": 28133, "mped": 28134, "bbhuttozardari": 28135, "murder": 28136, "yed": 28137, "dataviz": 28138, "srt": 28139, "downing": 28140, "bians": 28141, "mü": 28142, "fleck": 28143, "flipped": 28144, "sly": 28145, "brilliance": 28146, "rim": 28147, "kum": 28148, "bubba": 28149, "koi": 28150, "knitted": 28151, "sorg": 28152, "mais": 28153, "ðŁĮ²": 28154, "tiss": 28155, "sustain": 28156, "sensu": 28157, "akhan": 28158, "ziest": 28159, "examines": 28160, "chardonnay": 28161, "username": 28162, "shortlist": 28163, "rebs": 28164, "ono": 28165, "daring": 28166, "hardwood": 28167, "cheque": 28168, "righteous": 28169, "lightening": 28170, "dirk": 28171, "shradd": 28172, "dura": 28173, "downstairs": 28174, "shal": 28175, "amigos": 28176, "ruff": 28177, "slaw": 28178, "ries": 28179, "rednation": 28180, "manus": 28181, "ðŁĩ§ðŁĩ·": 28182, "distinction": 28183, "ubun": 28184, "duran": 28185, "migra": 28186, "thians": 28187, "laver": 28188, "domestic": 28189, "kx": 28190, "jazzy": 28191, "justify": 28192, "belonging": 28193, "insulation": 28194, "colorstv": 28195, "drunken": 28196, "channeling": 28197, "quand": 28198, "xiii": 28199, "enlighten": 28200, "kano": 28201, "fatima": 28202, "teenchoice": 28203, "terrified": 28204, "pba": 28205, "asley": 28206, "metmuseum": 28207, "dune": 28208, "packer": 28209, "kio": 28210, "ðŁĴľðŁĴľ": 28211, "boiler": 28212, "fascism": 28213, "armored": 28214, "backgrounds": 28215, "inmates": 28216, "embarrassed": 28217, "defines": 28218, "thd": 28219, "wego": 28220, "silicone": 28221, "loon": 28222, "elding": 28223, "borrowed": 28224, "hemp": 28225, "aksh": 28226, "kawasaki": 28227, "bry": 28228, "deaf": 28229, "killer": 28230, "disposal": 28231, "ðŁĩ°": 28232, "glastonbury": 28233, "uncovered": 28234, "oxide": 28235, "poff": 28236, "dant": 28237, "kj": 28238, "kuro": 28239, "drizzle": 28240, "peoples": 28241, "fee": 28242, "propri": 28243, "ddlovato": 28244, "piggy": 28245, "otis": 28246, "allergies": 28247, "ubis": 28248, "penguin": 28249, "sera": 28250, "viz": 28251, "prosperous": 28252, "icides": 28253, "tornadoes": 28254, "senegal": 28255, "webcast": 28256, "stored": 28257, "enchanted": 28258, "bbcone": 28259, "bayarea": 28260, "entrepreneurial": 28261, "rednationrising": 28262, "experimenting": 28263, "angan": 28264, "lotto": 28265, "theyre": 28266, "pore": 28267, "erp": 28268, "serene": 28269, "eastwood": 28270, "brokers": 28271, "barge": 28272, "stallion": 28273, "timberlake": 28274, "tailored": 28275, "dystop": 28276, "bate": 28277, "lators": 28278, "dixit": 28279, "branson": 28280, "dynamo": 28281, "kylie": 28282, "shameful": 28283, "btwn": 28284, "springtime": 28285, "mixture": 28286, "sounded": 28287, "luton": 28288, "dades": 28289, "mala": 28290, "opra": 28291, "enic": 28292, "rahulgandhi": 28293, "sewer": 28294, "~~~~": 28295, "kyu": 28296, "northeastern": 28297, "caer": 28298, "bcu": 28299, "nirvana": 28300, "kitchens": 28301, "ousy": 28302, "alm": 28303, "riverdale": 28304, "hidden": 28305, "flint": 28306, "spd": 28307, "patrons": 28308, "katyperry": 28309, "augh": 28310, "exhibitions": 28311, "smc": 28312, "shuts": 28313, "atore": 28314, "dain": 28315, "something": 28316, "berth": 28317, "bog": 28318, "porter": 28319, "gento": 28320, "concussion": 28321, "anglic": 28322, "rowe": 28323, "grilling": 28324, "scarlett": 28325, "mastering": 28326, "mornin": 28327, "commented": 28328, "sime": 28329, "sizing": 28330, "christy": 28331, "ceos": 28332, "stm": 28333, "atry": 28334, "tariffs": 28335, "vacation": 28336, "prejudice": 28337, "psu": 28338, "parental": 28339, "farage": 28340, "cana": 28341, "capcom": 28342, "kosovo": 28343, "youre": 28344, "menstru": 28345, "stalin": 28346, "grapefruit": 28347, "bran": 28348, "chesa": 28349, "daven": 28350, "excel": 28351, "!!)": 28352, "à¹Į": 28353, "distributor": 28354, "cea": 28355, "bridesma": 28356, "millennial": 28357, "wain": 28358, "observing": 28359, "misery": 28360, "planetary": 28361, "exposing": 28362, "braised": 28363, "compton": 28364, "dongha": 28365, "ql": 28366, "springsteen": 28367, "thul": 28368, "sylve": 28369, "cabo": 28370, "palad": 28371, "nielsen": 28372, "gazing": 28373, "baja": 28374, "roud": 28375, "orchids": 28376, "johannesburg": 28377, "seman": 28378, "dji": 28379, "operative": 28380, "affection": 28381, "eclectic": 28382, "atc": 28383, "mutant": 28384, "awx": 28385, "nice": 28386, "melbourne": 28387, "indulg": 28388, "tulip": 28389, "diaspora": 28390, "welp": 28391, "biggie": 28392, "mississauga": 28393, "retriever": 28394, "oran": 28395, "tammy": 28396, "cta": 28397, "hippo": 28398, "seasoned": 28399, "germans": 28400, "engv": 28401, "marvellous": 28402, "imf": 28403, "relays": 28404, "montan": 28405, "mauriti": 28406, "meister": 28407, "assurance": 28408, "reigning": 28409, "sufficient": 28410, "hane": 28411, "nothing": 28412, "posse": 28413, "navy": 28414, "inlove": 28415, "brighton": 28416, "enqu": 28417, "chung": 28418, "sweaty": 28419, "esc": 28420, "caled": 28421, "mans": 28422, "nicaragua": 28423, "slices": 28424, "mocha": 28425, "washingtonpost": 28426, "bbn": 28427, "damned": 28428, "growing": 28429, "enburg": 28430, "loan": 28431, "mes": 28432, "whoops": 28433, "believers": 28434, "spiel": 28435, "vodaf": 28436, "lat": 28437, "sled": 28438, "cricketer": 28439, "browne": 28440, "golfers": 28441, "barra": 28442, "watchers": 28443, "luigi": 28444, "swamy": 28445, "moms": 28446, "pitched": 28447, "santor": 28448, "crs": 28449, "sire": 28450, "scamp": 28451, "bode": 28452, "stewar": 28453, "jonny": 28454, "entity": 28455, "pacqui": 28456, "mindful": 28457, "minindia": 28458, "bearded": 28459, "tempt": 28460, "scorpion": 28461, "eaton": 28462, "authorized": 28463, "arto": 28464, "svp": 28465, "opathy": 28466, "cchini": 28467, "housemusic": 28468, "disneyworld": 28469, "âĢĶ@": 28470, "propose": 28471, "diy": 28472, "expense": 28473, "teng": 28474, "puppets": 28475, "smel": 28476, "daca": 28477, "perry": 28478, "finn": 28479, "boosting": 28480, "leftovers": 28481, "cougs": 28482, "satellites": 28483, "many": 28484, "aze": 28485, "gong": 28486, "fie": 28487, "methodo": 28488, "ferries": 28489, "ð٤Ķð٤Ķ": 28490, "explorers": 28491, "loader": 28492, "attracted": 28493, "ilton": 28494, "goddamn": 28495, "piazza": 28496, "doctr": 28497, "saving": 28498, "paragraph": 28499, "visualization": 28500, "mayors": 28501, "workflow": 28502, "ackles": 28503, "ðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤ": 28504, "स": 28505, "twerk": 28506, "clut": 28507, "lover": 28508, "teases": 28509, "sian": 28510, "ote": 28511, "deterior": 28512, "accord": 28513, "lfw": 28514, "swarovski": 28515, "natal": 28516, "traps": 28517, "kina": 28518, "analyze": 28519, "layered": 28520, "beverages": 28521, "unit": 28522, "ransom": 28523, "peshaw": 28524, "destined": 28525, "astrology": 28526, "sipping": 28527, "mileycyrus": 28528, "camino": 28529, "marshmallow": 28530, "bliss": 28531, "outback": 28532, "faq": 28533, "intoler": 28534, "humility": 28535, "poppin": 28536, "halloween": 28537, "montene": 28538, "ophy": 28539, "nun": 28540, "tattooed": 28541, "aas": 28542, "ðŁĮ³": 28543, "daley": 28544, "quality": 28545, "dusa": 28546, "fishermen": 28547, "swif": 28548, "terrac": 28549, "stau": 28550, "lein": 28551, "trolling": 28552, "shipment": 28553, "gardener": 28554, "marchmadness": 28555, "headband": 28556, "grt": 28557, "burnett": 28558, "wand": 28559, "!!!!!!!!!": 28560, "ghe": 28561, "dux": 28562, "hud": 28563, "warner": 28564, "ðŁĩ¦": 28565, "exile": 28566, "rescue": 28567, "rata": 28568, "dhan": 28569, "ducati": 28570, "drown": 28571, "blends": 28572, "spie": 28573, "alligator": 28574, "simultaneously": 28575, "brooke": 28576, "uke": 28577, "khar": 28578, "communion": 28579, "rika": 28580, "fordfc": 28581, "chinatown": 28582, "yourown": 28583, "mey": 28584, "canal": 28585, "systematic": 28586, "depri": 28587, "oxford": 28588, "anil": 28589, "wut": 28590, "equation": 28591, "bez": 28592, "fleur": 28593, "thegood": 28594, "langley": 28595, "adity": 28596, "edith": 28597, "alfie": 28598, "оÑĤ": 28599, "encry": 28600, "brill": 28601, "exemp": 28602, "cesar": 28603, "mbling": 28604, "abri": 28605, "scicom": 28606, "jing": 28607, "schooling": 28608, "mika": 28609, "mechanisms": 28610, "impromptu": 28611, "rhea": 28612, "moore": 28613, "crimea": 28614, "besto": 28615, "wright": 28616, "elders": 28617, "rods": 28618, "kamal": 28619, "folklore": 28620, "beet": 28621, "minion": 28622, "relieve": 28623, "thro": 28624, "teamusa": 28625, "pascal": 28626, "madewith": 28627, "bolivia": 28628, "itti": 28629, "freebies": 28630, "desired": 28631, "bestselling": 28632, "liness": 28633, "laden": 28634, "keane": 28635, "mists": 28636, "hippie": 28637, "attachment": 28638, "@/": 28639, "sew": 28640, "flanagan": 28641, "âĿĹï¸ı": 28642, "supremac": 28643, "stlcards": 28644, "sias": 28645, "qu": 28646, "rhys": 28647, "steep": 28648, "valleys": 28649, "vw": 28650, "paving": 28651, "dispat": 28652, "alison": 28653, "porte": 28654, "idu": 28655, "newsc": 28656, "socket": 28657, "mos": 28658, "costar": 28659, "revo": 28660, "proteins": 28661, "stanleycup": 28662, "mcal": 28663, "earring": 28664, "secs": 28665, "mclean": 28666, "capric": 28667, "nickelo": 28668, "aden": 28669, "vc": 28670, "shouse": 28671, "adaptive": 28672, "maximize": 28673, "entertainer": 28674, "prose": 28675, "griffi": 28676, "sixteen": 28677, "lamar": 28678, "mirage": 28679, "saudiarabia": 28680, "aweather": 28681, "rust": 28682, "infiltr": 28683, "fashionweek": 28684, "ðŁĺĬðŁĺĬðŁĺĬ": 28685, "selective": 28686, "bubble": 28687, "aden": 28688, "fennel": 28689, "decisive": 28690, "mta": 28691, "mocking": 28692, "mbles": 28693, "stamp": 28694, "mule": 28695, "bernardo": 28696, "grin": 28697, "pott": 28698, "jingle": 28699, "vettel": 28700, "colombian": 28701, "camo": 28702, "motivationmonday": 28703, "bahan": 28704, "ply": 28705, "dhary": 28706, "kami": 28707, "xmen": 28708, "sleeper": 28709, "gara": 28710, "mysti": 28711, "confidential": 28712, "conflicts": 28713, "pneu": 28714, "ces": 28715, "insurtech": 28716, "cleanse": 28717, "merely": 28718, "vais": 28719, "tux": 28720, "thegreat": 28721, "sharon": 28722, "maj": 28723, "hola": 28724, "ecosystems": 28725, "ajay": 28726, "aaj": 28727, "hush": 28728, "harmon": 28729, "backtoschool": 28730, "wikileaks": 28731, "reflected": 28732, "ðŁĺĵ": 28733, "commemorating": 28734, "acet": 28735, "buckingham": 28736, "messiah": 28737, "tuous": 28738, "hornet": 28739, "tobe": 28740, "dq": 28741, "heine": 28742, "mig": 28743, "plate": 28744, "nicholson": 28745, "spie": 28746, "cumberland": 28747, "normal": 28748, "phobia": 28749, "happyhalloween": 28750, "cityfc": 28751, "mcel": 28752, "gillian": 28753, "keto": 28754, "lude": 28755, "demise": 28756, "suga": 28757, "strate": 28758, "mcgrath": 28759, "visitscotland": 28760, "fooled": 28761, "cbr": 28762, "gcse": 28763, "colori": 28764, "potd": 28765, "missuniverse": 28766, "finances": 28767, "mapoli": 28768, "forks": 28769, "Ø´": 28770, "cannon": 28771, "medicinal": 28772, "ðŁĹĵ": 28773, "kho": 28774, "wreck": 28775, "panto": 28776, "bagel": 28777, "gull": 28778, "syndicate": 28779, "icy": 28780, "prc": 28781, "kien": 28782, "zika": 28783, "tish": 28784, "peta": 28785, "cco": 28786, "liza": 28787, "chut": 28788, "extraction": 28789, "elg": 28790, "gli": 28791, "fueled": 28792, "posit": 28793, "respectively": 28794, "leicester": 28795, "brink": 28796, "vulnerability": 28797, "imported": 28798, "esha": 28799, "ð٦ħ": 28800, "rural": 28801, "rell": 28802, "gaming": 28803, "atlantic": 28804, "abandon": 28805, "noah": 28806, "resolved": 28807, "prostate": 28808, "allergic": 28809, "psd": 28810, "âĺ¹": 28811, "dungeon": 28812, "fangirl": 28813, "illuminated": 28814, "mhs": 28815, "whitesox": 28816, "dently": 28817, "cko": 28818, "endorse": 28819, "overly": 28820, "dazzling": 28821, "prioriti": 28822, "nightlife": 28823, "util": 28824, "behave": 28825, "flamen": 28826, "eastbound": 28827, "ðŁĴŁ": 28828, "iloveyou": 28829, "govuk": 28830, "mozambique": 28831, "allegi": 28832, "dri": 28833, "testimonial": 28834, "aths": 28835, "ì§Ģ": 28836, "mmy": 28837, "shabby": 28838, "prosecco": 28839, "friendships": 28840, "calam": 28841, "damages": 28842, "offset": 28843, "jurassic": 28844, "juno": 28845, "arrell": 28846, "ðŁĴ©": 28847, "interventions": 28848, "daredevil": 28849, "carver": 28850, "runaway": 28851, "rane": 28852, "trustees": 28853, "haute": 28854, "depths": 28855, "ðŁİŃ": 28856, "mein": 28857, "sacrifices": 28858, "concier": 28859, "nesting": 28860, "izzy": 28861, "metam": 28862, "ilovemy": 28863, "urine": 28864, "dulu": 28865, "malhotra": 28866, "veins": 28867, "nightly": 28868, "coat": 28869, "andi": 28870, "hewitt": 28871, "lonel": 28872, "cible": 28873, "write": 28874, "jennie": 28875, "santac": 28876, "ĸï¸ı": 28877, "strato": 28878, "singapore": 28879, "soprano": 28880, "kristen": 28881, "cheerful": 28882, "fleetwood": 28883, "fairi": 28884, "meli": 28885, "wast": 28886, "turnt": 28887, "sforsale": 28888, "scrolling": 28889, "angelina": 28890, "rendition": 28891, "jericho": 28892, "nicky": 28893, "orb": 28894, "flavo": 28895, "patriot": 28896, "asheville": 28897, "sickness": 28898, "refund": 28899, "aggression": 28900, "bpl": 28901, "ãĥĥ": 28902, "elusive": 28903, "thistory": 28904, "hanger": 28905, "buffs": 28906, "villas": 28907, "atkinson": 28908, "sph": 28909, "jait": 28910, "declined": 28911, "wok": 28912, "supremacy": 28913, "ootball": 28914, "eyang": 28915, "ðŁİĵ": 28916, "sford": 28917, "athi": 28918, "consume": 28919, "roadster": 28920, "eso": 28921, "upro": 28922, "recipe": 28923, "auf": 28924, "uci": 28925, "aron": 28926, "oooh": 28927, "csgo": 28928, "reich": 28929, "mcd": 28930, "minute": 28931, "ladies": 28932, "punk": 28933, "rutgers": 28934, "meek": 28935, "arizon": 28936, "taj": 28937, "landlord": 28938, "degra": 28939, "autumn": 28940, "lynx": 28941, "usf": 28942, "bhi": 28943, "fairytale": 28944, "donghae": 28945, "betsy": 28946, "exploded": 28947, "chennai": 28948, "opa": 28949, "protag": 28950, "brant": 28951, "ðŁĵ°:": 28952, "gf": 28953, "palli": 28954, "ðŁı¼âĢįâĻĢï¸ı": 28955, "sut": 28956, "illini": 28957, "columnist": 28958, "shirtless": 28959, "decentr": 28960, "searched": 28961, "ecor": 28962, "buggy": 28963, "sack": 28964, "ðŁĺĤðŁĺŃ": 28965, "det": 28966, "theri": 28967, "ornaments": 28968, "bringback": 28969, "tov": 28970, "quarterfinals": 28971, "iche": 28972, "constra": 28973, "gier": 28974, "buchanan": 28975, "vix": 28976, "kayaking": 28977, "mustread": 28978, "swallow": 28979, "melb": 28980, "scaf": 28981, "opal": 28982, "mayoral": 28983, "harat": 28984, "ð٦ĭ": 28985, "schedules": 28986, "idf": 28987, "hague": 28988, "roz": 28989, "aah": 28990, "dmc": 28991, "duplic": 28992, "cache": 28993, "orphan": 28994, "fracture": 28995, "recon": 28996, "chav": 28997, "bunnies": 28998, "alain": 28999, "mustafa": 29000, "ðŁİĻ": 29001, "vacations": 29002, "dynamite": 29003, "texted": 29004, "broadcaster": 29005, "ðŁĴ£": 29006, "steamed": 29007, "rocker": 29008, "dietary": 29009, "luxurytravel": 29010, "inaugurated": 29011, "sawards": 29012, "vaughn": 29013, "lincolnshire": 29014, "clicked": 29015, "kraja": 29016, "fanc": 29017, "removes": 29018, "layoffs": 29019, "mcfar": 29020, "breeds": 29021, "winnie": 29022, "jonghyun": 29023, "incentive": 29024, "variations": 29025, "patton": 29026, "aturday": 29027, "persistent": 29028, "prun": 29029, "piers": 29030, "dales": 29031, "æĸ": 29032, "breastfeeding": 29033, "rance": 29034, "tawa": 29035, "Ĥâĸ": 29036, "murdoch": 29037, "captive": 29038, "thistle": 29039, "nica": 29040, "commodity": 29041, "couldnt": 29042, "boardwalk": 29043, "gracious": 29044, "practitioners": 29045, "ngc": 29046, "scrum": 29047, "nero": 29048, "camouflage": 29049, "colon": 29050, "hei": 29051, "physicist": 29052, "saturdaymorning": 29053, "tener": 29054, "siwon": 29055, "columns": 29056, "brune": 29057, "yvr": 29058, "bair": 29059, "retires": 29060, "halam": 29061, "caber": 29062, "shazam": 29063, "minu": 29064, "cascade": 29065, "milkshake": 29066, "grid": 29067, "dren": 29068, "vincent": 29069, "sodium": 29070, "platter": 29071, "cheerleader": 29072, "chenko": 29073, "yak": 29074, "eliminated": 29075, "typo": 29076, "yman": 29077, "rethink": 29078, "âĿĹ": 29079, "tsville": 29080, "bernardokath": 29081, "extr": 29082, "ðŁĺģðŁĺģðŁĺģ": 29083, "tao": 29084, "reper": 29085, "moths": 29086, "empowered": 29087, "citing": 29088, "transported": 29089, "monks": 29090, "sanat": 29091, "clears": 29092, "bachelorette": 29093, "campbell": 29094, "rachael": 29095, "harle": 29096, "handler": 29097, "climbs": 29098, "interference": 29099, "release": 29100, "shand": 29101, "rbs": 29102, "hrh": 29103, "ãģª": 29104, "valle": 29105, "ré": 29106, "slime": 29107, "wakes": 29108, "chubby": 29109, "sloan": 29110, "elves": 29111, "athen": 29112, "attorneys": 29113, "microscope": 29114, "stoner": 29115, "scaling": 29116, "obe": 29117, "cout": 29118, "seman": 29119, "midweek": 29120, "balsam": 29121, "ðŁĺįâĿ¤": 29122, "tiful": 29123, "vish": 29124, "lotta": 29125, "ripping": 29126, "remn": 29127, "tire": 29128, "leap": 29129, "havent": 29130, "laby": 29131, "himach": 29132, "whispers": 29133, "wein": 29134, "ðŁİ¸": 29135, "wildflowers": 29136, "sele": 29137, "ucc": 29138, "liability": 29139, "azine": 29140, "swings": 29141, "kya": 29142, "tair": 29143, "remain": 29144, "edo": 29145, "flops": 29146, "pocket": 29147, "grandad": 29148, "examiner": 29149, "gris": 29150, "ffect": 29151, "ðŁijĬðŁı»": 29152, "studded": 29153, "heartbeat": 29154, "deacon": 29155, "firmly": 29156, "infectious": 29157, "stef": 29158, "outlines": 29159, "leasing": 29160, "claws": 29161, "sense": 29162, "tabs": 29163, "hoot": 29164, "mosul": 29165, "spawn": 29166, "coa": 29167, "hogwarts": 29168, "vein": 29169, "albania": 29170, "manuel": 29171, "bino": 29172, "vauxhall": 29173, "scotland": 29174, "gobucks": 29175, "matty": 29176, "physio": 29177, "torino": 29178, "constable": 29179, "investigated": 29180, "slower": 29181, "mistaken": 29182, "bayer": 29183, "wildfires": 29184, "voic": 29185, "xon": 29186, "timeto": 29187, "chassis": 29188, "barric": 29189, "pion": 29190, "baldhead": 29191, "wook": 29192, "registr": 29193, "drafts": 29194, "bhs": 29195, "ligue": 29196, "lick": 29197, "staffordshire": 29198, "bafta": 29199, "darry": 29200, "jeanne": 29201, "vending": 29202, "corp": 29203, "âĽ³ï¸ı": 29204, "kiddos": 29205, "fenway": 29206, "cao": 29207, "westbound": 29208, "ðŁĺĻ": 29209, "dvr": 29210, "quicker": 29211, "blah": 29212, "goodie": 29213, "ðŁĴĭðŁĴĭ": 29214, "vox": 29215, "esper": 29216, "facade": 29217, "correlation": 29218, "redbull": 29219, "roup": 29220, "declining": 29221, "chive": 29222, "mcgee": 29223, "turo": 29224, "inder": 29225, "feller": 29226, "fug": 29227, "ilysm": 29228, "mardi": 29229, "peshawar": 29230, "kieran": 29231, "inema": 29232, "meatballs": 29233, "peck": 29234, "depressing": 29235, "sensing": 29236, "giz": 29237, "ddington": 29238, "springwatch": 29239, "roaming": 29240, "yellowstone": 29241, "horseshoe": 29242, "amman": 29243, "weekday": 29244, "olor": 29245, "ðŁ¥°": 29246, "boosts": 29247, "sprint": 29248, "scarves": 29249, "jee": 29250, "beetro": 29251, "clan": 29252, "allthe": 29253, "ìĦ¸ë": 29254, "enlightenment": 29255, "adobe": 29256, "regeneration": 29257, "?@": 29258, "contag": 29259, "yachts": 29260, "tou": 29261, "mora": 29262, "envoy": 29263, "rani": 29264, "goli": 29265, "dhanushkraja": 29266, "woodworking": 29267, "strengths": 29268, "sedi": 29269, "discs": 29270, "arina": 29271, "scon": 29272, "lite": 29273, "another": 29274, "ðŁ¥Ĭ": 29275, "yemen": 29276, "guern": 29277, "savvy": 29278, "loyed": 29279, "biomed": 29280, "heartbreak": 29281, "comrades": 29282, "millie": 29283, "patch": 29284, "unf": 29285, "jarvis": 29286, "blaming": 29287, "commemoration": 29288, "gey": 29289, "å¥": 29290, "cardiovascular": 29291, "aligned": 29292, "document": 29293, ".?": 29294, "aesthetics": 29295, "emu": 29296, "theirs": 29297, "leh": 29298, "psic": 29299, "sif": 29300, "plateau": 29301, "expend": 29302, "dominating": 29303, "robes": 29304, "mauritius": 29305, "exceptionally": 29306, "homer": 29307, "discoveries": 29308, "braun": 29309, "tennant": 29310, "insulin": 29311, "ðŁİ®": 29312, "carbs": 29313, "teas": 29314, "?!\"": 29315, "zie": 29316, "francois": 29317, "browsing": 29318, "thol": 29319, "clarence": 29320, "helper": 29321, "obtained": 29322, "cassie": 29323, "lees": 29324, "!,": 29325, "pomegran": 29326, "hubs": 29327, "prestige": 29328, "][": 29329, "macher": 29330, "bottled": 29331, "punch": 29332, "pipe": 29333, "och": 29334, "gallons": 29335, "deliveries": 29336, "ura": 29337, "unday": 29338, "monde": 29339, "depicts": 29340, "regency": 29341, "outrageous": 29342, "khaled": 29343, "caro": 29344, "hearti": 29345, "zag": 29346, "developmental": 29347, "overcoming": 29348, "statistical": 29349, "flavored": 29350, "fords": 29351, "creatives": 29352, "laurence": 29353, "dias": 29354, "sunscreen": 29355, "inked": 29356, "preacher": 29357, "nul": 29358, "impacting": 29359, "autistic": 29360, "âļĶï¸ı": 29361, "oss": 29362, "pelicans": 29363, "celeste": 29364, "vb": 29365, "rump": 29366, "mcgra": 29367, "fairfax": 29368, "humor": 29369, "bbcnews": 29370, "rowling": 29371, "calder": 29372, "seamless": 29373, "agne": 29374, "pti": 29375, "mixed": 29376, "tshirts": 29377, "merci": 29378, "btob": 29379, "womeninstem": 29380, "genealogy": 29381, "preven": 29382, "lour": 29383, "cradle": 29384, "giuse": 29385, "о": 29386, "chrono": 29387, "fairness": 29388, "chocolate": 29389, "tory": 29390, "asda": 29391, "prescott": 29392, "stretched": 29393, "alman": 29394, "uil": 29395, "recharge": 29396, "intre": 29397, "obst": 29398, "hospital": 29399, "hayward": 29400, "tenerife": 29401, "friedman": 29402, "vaping": 29403, "confessions": 29404, "yeah": 29405, "balli": 29406, "lucknow": 29407, "corpse": 29408, "sculptor": 29409, "ampton": 29410, "tpp": 29411, "indicates": 29412, "surplus": 29413, "truman": 29414, "ðĿĻ": 29415, "sinha": 29416, "invo": 29417, "sovereign": 29418, "kev": 29419, "establishing": 29420, "engraved": 29421, "assuming": 29422, "ðŁıģ": 29423, "souza": 29424, "fabi": 29425, "toned": 29426, "ounge": 29427, "deloit": 29428, "downey": 29429, "noble": 29430, "omor": 29431, "cartridge": 29432, "ðŁıIJ": 29433, "uhur": 29434, "holloway": 29435, "successes": 29436, "rsa": 29437, "âĦ¢": 29438, "mazz": 29439, "twd": 29440, "discourse": 29441, ".<": 29442, "yat": 29443, "satisfy": 29444, "compri": 29445, "ह": 29446, "graphite": 29447, "dissertation": 29448, "arter": 29449, "íĶ": 29450, "bally": 29451, "zombi": 29452, "lyons": 29453, "aic": 29454, "ubc": 29455, "prada": 29456, "eil": 29457, "dax": 29458, "clai": 29459, "granddaughter": 29460, "extravaganza": 29461, "challenge": 29462, "ð٤ŀ": 29463, "pover": 29464, "primarily": 29465, "daddy": 29466, "mana": 29467, "bikers": 29468, "inquiries": 29469, "daun": 29470, "feline": 29471, "generative": 29472, "hef": 29473, "benefiting": 29474, "lindsey": 29475, "polka": 29476, "demonstrated": 29477, "alle": 29478, "randy": 29479, "osu": 29480, "lowkey": 29481, "weirdest": 29482, "redbull": 29483, "oury": 29484, "nous": 29485, "woodstock": 29486, "credenti": 29487, "nicer": 29488, "gado": 29489, "alyss": 29490, "aph": 29491, "preparedness": 29492, "stationary": 29493, "incorporated": 29494, "dyer": 29495, "saratoga": 29496, "celesti": 29497, ":\"": 29498, "antibiotics": 29499, "orgs": 29500, "indefin": 29501, "apron": 29502, "иÐ": 29503, "fifteen": 29504, "nof": 29505, "ðŁĶĿ": 29506, "phx": 29507, "tega": 29508, "mz": 29509, "organizational": 29510, "onair": 29511, "bandung": 29512, "pleasures": 29513, "mori": 29514, "secretari": 29515, "raccoon": 29516, "cashi": 29517, "pilates": 29518, "kon": 29519, "geoffrey": 29520, "lao": 29521, "kamp": 29522, "departments": 29523, "backpacking": 29524, "anam": 29525, "ë": 29526, "crackdown": 29527, "aunty": 29528, "ondo": 29529, "lizzie": 29530, "phers": 29531, "cun": 29532, "ðŁĩ±": 29533, "kpop": 29534, "put": 29535, "intentional": 29536, "connolly": 29537, "barclays": 29538, "hsfb": 29539, "swindon": 29540, "uku": 29541, "sally": 29542, "aint": 29543, "âľħ": 29544, "penang": 29545, "uplifting": 29546, "epilepsy": 29547, "interro": 29548, "bungal": 29549, "goku": 29550, "blueberries": 29551, "द": 29552, "ussia": 29553, "silky": 29554, "moured": 29555, "istic": 29556, "briefs": 29557, "meats": 29558, "gob": 29559, "chaser": 29560, "statewide": 29561, "prasad": 29562, "glitch": 29563, "arin": 29564, "banff": 29565, "member": 29566, "ðŁĺŃâĿ¤ï¸ı": 29567, "loving": 29568, "halla": 29569, "ม": 29570, "smokers": 29571, "yaku": 29572, "scicomm": 29573, "physio": 29574, "swol": 29575, "lemons": 29576, "gelato": 29577, "chool": 29578, "capitals": 29579, "kistan": 29580, "tights": 29581, "spikes": 29582, "travellers": 29583, "iklan": 29584, "commissioning": 29585, "arine": 29586, "emabiggestfans": 29587, "emphasis": 29588, "frontline": 29589, "paddock": 29590, "destructive": 29591, "baha": 29592, "linger": 29593, "jewish": 29594, "shetland": 29595, "mcgin": 29596, "monkey": 29597, "koz": 29598, "sone": 29599, "rajini": 29600, "teh": 29601, "yen": 29602, "cvs": 29603, "masquer": 29604, "girly": 29605, "wesle": 29606, "wasnt": 29607, "brody": 29608, "terminator": 29609, "gille": 29610, "maggi": 29611, "birdie": 29612, "jeopardy": 29613, "cubic": 29614, "vmware": 29615, "intricate": 29616, "anup": 29617, "topia": 29618, "easton": 29619, "sabres": 29620, "investigates": 29621, "busting": 29622, "bilingual": 29623, "valentino": 29624, "informat": 29625, "ferre": 29626, "adventur": 29627, "hydrate": 29628, "forsy": 29629, "aziz": 29630, "santo": 29631, "ede": 29632, "whistler": 29633, "continuously": 29634, "dham": 29635, "unused": 29636, "jihad": 29637, "addictive": 29638, "vidy": 29639, "dob": 29640, "ido": 29641, "fied": 29642, "niversary": 29643, "none": 29644, "fuer": 29645, "ðŁĺįðŁĺĺ": 29646, "covenant": 29647, "printable": 29648, "immaculate": 29649, "oem": 29650, "clt": 29651, "servants": 29652, "consumed": 29653, "unreleased": 29654, "scum": 29655, "packaged": 29656, "mere": 29657, "ìĦ¸ë¸": 29658, "toby": 29659, "taf": 29660, "spoons": 29661, "meal": 29662, "fball": 29663, "fairfield": 29664, "janet": 29665, "silverstone": 29666, "dartmouth": 29667, "followme": 29668, "voyager": 29669, "kombat": 29670, "anniver": 29671, "enew": 29672, "magdal": 29673, "hove": 29674, "sath": 29675, "grizzly": 29676, "cardi": 29677, "gartner": 29678, "sandy": 29679, "kanye": 29680, "posture": 29681, "poign": 29682, "impulse": 29683, "radiology": 29684, "horizons": 29685, "siam": 29686, "aishwar": 29687, "==>": 29688, "noche": 29689, "tris": 29690, "elyn": 29691, "comme": 29692, "dui": 29693, "cec": 29694, "councillors": 29695, "cuddling": 29696, "creeping": 29697, "locke": 29698, "manages": 29699, "transferred": 29700, "necks": 29701, "dier": 29702, "dano": 29703, "vick": 29704, "lunches": 29705, "dhe": 29706, "ensures": 29707, "criss": 29708, "ulster": 29709, "bannon": 29710, "contenders": 29711, "spam": 29712, "sweetness": 29713, "medal": 29714, "honduras": 29715, "arctic": 29716, "ultrasound": 29717, "infr": 29718, "discovers": 29719, "eiffel": 29720, "casters": 29721, "ruben": 29722, "dust": 29723, "aweed": 29724, "atrium": 29725, "lestwe": 29726, "seared": 29727, "ðŁĵº:": 29728, "tyne": 29729, "exchanges": 29730, "littlemix": 29731, "lle": 29732, "astronauts": 29733, "hershey": 29734, "workday": 29735, "knob": 29736, "sov": 29737, "resigns": 29738, "todayshow": 29739, "derman": 29740, "anth": 29741, "afc": 29742, "taster": 29743, "swoo": 29744, "saeed": 29745, "pering": 29746, "narrowly": 29747, "rnli": 29748, "bestbuy": 29749, "panasonic": 29750, "obstacle": 29751, "farmers": 29752, "ðŁİĻ": 29753, "pawan": 29754, "kiest": 29755, "angers": 29756, "absurd": 29757, "ohmy": 29758, "sino": 29759, "pistachi": 29760, "spice": 29761, "giuli": 29762, "primetime": 29763, "kow": 29764, "kens": 29765, "exagger": 29766, "!?!": 29767, "uba": 29768, "middles": 29769, "judd": 29770, "ejec": 29771, "slammed": 29772, "pensions": 29773, "ofa": 29774, "recreate": 29775, "bhp": 29776, "xxl": 29777, "liverpool": 29778, "thresh": 29779, "purity": 29780, "nieu": 29781, "holics": 29782, "wrath": 29783, "rado": 29784, "glio": 29785, "amma": 29786, "dilemma": 29787, "cru": 29788, "letsgo": 29789, "....@": 29790, "âĿĵ": 29791, "suggesting": 29792, "trumps": 29793, "horus": 29794, "fv": 29795, "icom": 29796, "referring": 29797, "predictive": 29798, "tarts": 29799, "gette": 29800, "sock": 29801, "glossy": 29802, "pinky": 29803, "alec": 29804, "thyme": 29805, "oura": 29806, "theroad": 29807, "petr": 29808, "cram": 29809, "pfi": 29810, "dvn": 29811, "meier": 29812, "incentives": 29813, "tunnels": 29814, "mobil": 29815, "recap": 29816, "extras": 29817, "upright": 29818, "revamp": 29819, "perseverance": 29820, ",-": 29821, "otp": 29822, "mirror": 29823, "arwx": 29824, "gerry": 29825, "maher": 29826, "gor": 29827, "homepage": 29828, "amis": 29829, "agra": 29830, "madele": 29831, "bestfriend": 29832, "siriusxm": 29833, "bundles": 29834, "admiring": 29835, "tdsb": 29836, "ðŁįģ": 29837, "chas": 29838, "slowing": 29839, "roh": 29840, "wallpapers": 29841, "â̦/": 29842, "tekken": 29843, "gangs": 29844, "tala": 29845, "lindsay": 29846, "shoul": 29847, "linebacker": 29848, "toolkit": 29849, "uranium": 29850, "calyp": 29851, "abrams": 29852, "matthi": 29853, "ðŁı¿": 29854, "honourable": 29855, "dayo": 29856, "versail": 29857, "tank": 29858, "stc": 29859, "fritz": 29860, "splend": 29861, "patag": 29862, "annoyed": 29863, "onday": 29864, "devastated": 29865, "chattanooga": 29866, "nationalism": 29867, "massey": 29868, "jenn": 29869, "tailor": 29870, "devgn": 29871, "organs": 29872, "zucchini": 29873, "onfox": 29874, "satire": 29875, "wexford": 29876, "disgrace": 29877, "noto": 29878, "volta": 29879, "âĿ¤ï¸ıâĿ¤ï¸ıâĿ¤ï¸ıâĿ¤ï¸ı": 29880, "à¶": 29881, "homeowners": 29882, "pointer": 29883, "mcr": 29884, "austen": 29885, "daysto": 29886, "moons": 29887, "palma": 29888, "grazing": 29889, "eso": 29890, "influencers": 29891, "shahidkapoor": 29892, "compliant": 29893, "measurements": 29894, "develops": 29895, "yd": 29896, "parl": 29897, "pvt": 29898, "randolph": 29899, "tortured": 29900, "gerald": 29901, "elias": 29902, "deepikap": 29903, "warmup": 29904, "hickory": 29905, "gap": 29906, "coffin": 29907, "amour": 29908, "reneg": 29909, "mounting": 29910, "sevens": 29911, "igle": 29912, "hier": 29913, "decad": 29914, "tright": 29915, "escapes": 29916, "werner": 29917, "tfl": 29918, "fulfilled": 29919, "niger": 29920, "sourdough": 29921, "reaper": 29922, "chooses": 29923, "spinner": 29924, "weeknd": 29925, "filtered": 29926, "shuk": 29927, "kati": 29928, "oldham": 29929, "opensource": 29930, "khanna": 29931, "atelier": 29932, "connec": 29933, "ophobic": 29934, "glas": 29935, "complications": 29936, "arson": 29937, "councils": 29938, "smol": 29939, "assy": 29940, "lurking": 29941, "lingui": 29942, "hanks": 29943, "ein": 29944, "Ùħ": 29945, "rugs": 29946, "nguyen": 29947, "nouveau": 29948, "menace": 29949, "lev": 29950, "aladdin": 29951, "ruining": 29952, "roundabout": 29953, "km": 29954, "conor": 29955, "shoops": 29956, "mayday": 29957, "traumatic": 29958, "prabhas": 29959, "kaiser": 29960, "kita": 29961, "router": 29962, "pedro": 29963, "retar": 29964, "stunner": 29965, "spanish": 29966, "disturbed": 29967, "academy": 29968, "elearning": 29969, "witty": 29970, "seng": 29971, "feral": 29972, "avy": 29973, "stab": 29974, "keaton": 29975, "urdu": 29976, "koto": 29977, "hui": 29978, "cooke": 29979, "arian": 29980, "thepersonal": 29981, "uma": 29982, "seap": 29983, "asting": 29984, "rhetoric": 29985, "handwriting": 29986, "municipality": 29987, "consortium": 29988, "ðŁIJŁ": 29989, "glasgow": 29990, "raya": 29991, "eliza": 29992, "polymer": 29993, "broth": 29994, "practi": 29995, "correspondent": 29996, "addicts": 29997, "gayle": 29998, "ailing": 29999, "ofe": 30000, "pli": 30001, "heartw": 30002, "stitch": 30003, "sightings": 30004, "priests": 30005, "samo": 30006, "sloth": 30007, "goodwood": 30008, "rocco": 30009, "sabc": 30010, "summit": 30011, "lace": 30012, "presley": 30013, "itten": 30014, "cincy": 30015, "thepersonalnetwork": 30016, "sweek": 30017, "pegas": 30018, "afcon": 30019, "registry": 30020, "cim": 30021, "leth": 30022, "dicap": 30023, "candice": 30024, "fluent": 30025, "smack": 30026, "pedestri": 30027, "aloud": 30028, "carac": 30029, "priyankach": 30030, "pgh": 30031, "irons": 30032, "dolce": 30033, "latvia": 30034, "deceased": 30035, "therock": 30036, "clap": 30037, "cene": 30038, "foam": 30039, "morrissey": 30040, "gret": 30041, "essentially": 30042, "comcast": 30043, "beagle": 30044, "argues": 30045, "inged": 30046, "-â̦": 30047, "sag": 30048, "hasan": 30049, "ðŁĻĨ": 30050, "ðŁį°": 30051, "nhra": 30052, "kannada": 30053, "indicators": 30054, "oner": 30055, "brixton": 30056, "atas": 30057, "screenplay": 30058, "sorority": 30059, "shaheed": 30060, "heem": 30061, "classmates": 30062, "tainment": 30063, "esi": 30064, "breastcancer": 30065, "zuckerberg": 30066, "auror": 30067, "encia": 30068, "refers": 30069, "kaeper": 30070, "vortex": 30071, "compart": 30072, "lymph": 30073, "photographing": 30074, "steff": 30075, "restling": 30076, "parsley": 30077, "momento": 30078, "thman": 30079, "lacking": 30080, "dutt": 30081, "oculus": 30082, "fino": 30083, "frenzy": 30084, "rasc": 30085, "dern": 30086, "dismissed": 30087, "nook": 30088, "metgala": 30089, "shill": 30090, "raphael": 30091, "mavericks": 30092, "exhibits": 30093, "eagerly": 30094, "cpa": 30095, "amenities": 30096, ".âłĢ": 30097, "exodus": 30098, "ernst": 30099, "lita": 30100, "dealt": 30101, "womensmarch": 30102, "iain": 30103, "scoreboard": 30104, "campeones": 30105, "cen": 30106, "tiki": 30107, "garrison": 30108, "fidelity": 30109, "brag": 30110, "roadmap": 30111, "psychop": 30112, "loe": 30113, "bleu": 30114, "ðŁijĬðŁı¼": 30115, "sauvi": 30116, "springer": 30117, "temptation": 30118, "rudolph": 30119, "acura": 30120, "wicz": 30121, "parachute": 30122, "strol": 30123, "lenny": 30124, "zik": 30125, "doms": 30126, "nbaf": 30127, "alpac": 30128, "vivian": 30129, "rove": 30130, "preet": 30131, "perpetu": 30132, "snake": 30133, "airsoft": 30134, "inflatable": 30135, "princes": 30136, "atie": 30137, "ffey": 30138, "patient": 30139, "mire": 30140, "chelle": 30141, "slack": 30142, "groovy": 30143, "#:": 30144, "uploading": 30145, "!!!!!!!!!!!!!!!!": 30146, "siemens": 30147, "provision": 30148, "vfx": 30149, "needy": 30150, "fats": 30151, "topoli": 30152, "bhutto": 30153, "sathletics": 30154, "alums": 30155, "twinning": 30156, "southwestern": 30157, "adopting": 30158, "lastnight": 30159, "manne": 30160, "laga": 30161, "twell": 30162, "acia": 30163, "----": 30164, "eyewear": 30165, "hurley": 30166, "flee": 30167, "sach": 30168, "pecker": 30169, "costly": 30170, "isk": 30171, "crates": 30172, "policy": 30173, "erosion": 30174, "ingo": 30175, "werk": 30176, "ðŁIJį": 30177, "tortoise": 30178, "therapies": 30179, "internet": 30180, "chihuahua": 30181, "rips": 30182, "frei": 30183, "edor": 30184, "taiji": 30185, "tfc": 30186, "dod": 30187, "dempsey": 30188, "christin": 30189, "cheng": 30190, "hips": 30191, "graeme": 30192, "compassionate": 30193, "cavaliers": 30194, "historic": 30195, "soulful": 30196, "criminal": 30197, "jac": 30198, "vinci": 30199, "expired": 30200, "surat": 30201, "turismo": 30202, "kona": 30203, "seaweed": 30204, "berts": 30205, "leica": 30206, "expressing": 30207, "aal": 30208, "wort": 30209, "breakfast": 30210, "herring": 30211, "amused": 30212, "rhubarb": 30213, "martian": 30214, "cosplayer": 30215, "yash": 30216, "strial": 30217, "raul": 30218, "referral": 30219, "dwts": 30220, "jw": 30221, "adler": 30222, "curtains": 30223, "gur": 30224, "valence": 30225, "tyrone": 30226, "swfc": 30227, "coached": 30228, "reborn": 30229, "diabetic": 30230, "choke": 30231, "norfolk": 30232, "investigative": 30233, "ðŁĴ¯ðŁĴ¯": 30234, "zid": 30235, "vmas": 30236, "phie": 30237, "objectives": 30238, "âľĭ": 30239, "overdue": 30240, "divers": 30241, "matsu": 30242, "ðŁİŁï¸ı": 30243, "casualties": 30244, "ว": 30245, "alk": 30246, "standardi": 30247, "realist": 30248, "artifacts": 30249, "pandor": 30250, "kex": 30251, "invin": 30252, "(!)": 30253, "iney": 30254, "paraly": 30255, "mrt": 30256, "faye": 30257, "thevoice": 30258, "onga": 30259, "deed": 30260, "skinner": 30261, "azwx": 30262, "specimen": 30263, "priyankachopra": 30264, "nuevo": 30265, "barkley": 30266, "toulouse": 30267, "resumes": 30268, "footballers": 30269, "citi": 30270, "fetch": 30271, "ère": 30272, "lestweforget": 30273, "ðŁĻĭ": 30274, "chunk": 30275, "drifting": 30276, "manipulation": 30277, "equals": 30278, "putt": 30279, "kyungsoo": 30280, "âĿ¤ï¸ı#": 30281, "elastic": 30282, "parano": 30283, "foy": 30284, "doping": 30285, "cincy": 30286, "ssler": 30287, "interrupted": 30288, "alay": 30289, "adores": 30290, "amethy": 30291, "convoy": 30292, "ãĢı": 30293, "Ĭãģ": 30294, "blacklist": 30295, "generals": 30296, "sachin": 30297, "brushed": 30298, "ounces": 30299, "nonstop": 30300, "illiams": 30301, "btsarmy": 30302, "uav": 30303, "ruff": 30304, "burma": 30305, "bik": 30306, "defence": 30307, "schultz": 30308, "boasts": 30309, "loneliness": 30310, "gore": 30311, "transforms": 30312, "alumna": 30313, "@@": 30314, "rappers": 30315, "nehru": 30316, "caro": 30317, "himalayan": 30318, "wearables": 30319, "geh": 30320, "peppermint": 30321, "redevelopment": 30322, "flamingo": 30323, "cosby": 30324, "bigbaldhead": 30325, "agri": 30326, "barefoot": 30327, "scopes": 30328, "regram": 30329, "ghana": 30330, "ðŁİ«": 30331, "iheart": 30332, "sadie": 30333, "carrie": 30334, "microbial": 30335, "kuala": 30336, "skater": 30337, "querque": 30338, "âĻ©": 30339, "genres": 30340, "reasoning": 30341, "chased": 30342, "aso": 30343, "slipped": 30344, "encan": 30345, "vamos": 30346, "kers": 30347, "adverse": 30348, "moil": 30349, "commodities": 30350, "withyou": 30351, "silent": 30352, "hype": 30353, "ande": 30354, "amination": 30355, "whispe": 30356, "litz": 30357, "âļ½ï¸ıâļ½ï¸ı": 30358, "riff": 30359, "ppy": 30360, "lambs": 30361, "ganesh": 30362, "absent": 30363, "regulator": 30364, "marseille": 30365, "enroll": 30366, "parcel": 30367, "wap": 30368, "byrd": 30369, "ðŁĩŃ": 30370, "tuber": 30371, "countrymusic": 30372, "parl": 30373, "controllers": 30374, "responsibilities": 30375, "wey": 30376, "chate": 30377, "montenegro": 30378, "chico": 30379, "milan": 30380, "lms": 30381, "trainees": 30382, "appropriately": 30383, "uncertain": 30384, "poppies": 30385, "edsheeran": 30386, "nutritious": 30387, "garo": 30388, "deutsch": 30389, "awesome": 30390, "ãĥ¼": 30391, "comfortably": 30392, "landmarks": 30393, "eti": 30394, "reusable": 30395, "danielle": 30396, "rosal": 30397, "coles": 30398, "justic": 30399, "ccs": 30400, "fanny": 30401, "nim": 30402, "mcu": 30403, "clinch": 30404, "atene": 30405, "merge": 30406, "imdb": 30407, "anglo": 30408, "uccino": 30409, "panini": 30410, "annot": 30411, "burberry": 30412, "feature": 30413, "predicting": 30414, "fashionista": 30415, "sask": 30416, "imaginary": 30417, "mmo": 30418, "southsudan": 30419, "spear": 30420, "hubble": 30421, "jointhe": 30422, "coyotes": 30423, "sligo": 30424, "kodak": 30425, "sitcom": 30426, "polaroid": 30427, "rooted": 30428, "corrup": 30429, "ðŁĻĮðŁĻĮ": 30430, "brisban": 30431, "atz": 30432, "ahl": 30433, "remy": 30434, "talent": 30435, "avalon": 30436, "rada": 30437, "pauline": 30438, "locomotive": 30439, "goons": 30440, "nemo": 30441, "maserati": 30442, "icu": 30443, "stutt": 30444, "historically": 30445, "smb": 30446, "presby": 30447, "avoid": 30448, "sooners": 30449, "rhinestone": 30450, "wad": 30451, "rising": 30452, "trot": 30453, "modes": 30454, "regent": 30455, "optimize": 30456, "reece": 30457, "smu": 30458, "verti": 30459, "newyorkcity": 30460, "cortez": 30461, "rac": 30462, "incase": 30463, "sinc": 30464, "fielding": 30465, "etta": 30466, "tiffany": 30467, "almonds": 30468, "saddle": 30469, "krat": 30470, "matter": 30471, "glow": 30472, "starving": 30473, "glo": 30474, "crappy": 30475, "slur": 30476, "std": 30477, "monitors": 30478, "receipt": 30479, "maymayentrata": 30480, "mcil": 30481, "unis": 30482, "rainbows": 30483, "caldwell": 30484, "pacquiao": 30485, "jop": 30486, "afe": 30487, "hook": 30488, "essen": 30489, "wizard": 30490, "median": 30491, "flaws": 30492, "coms": 30493, "âĿĦ": 30494, "ingh": 30495, "haynes": 30496, "antonio": 30497, "templates": 30498, "outer": 30499, "naw": 30500, "cardigan": 30501, "belgrade": 30502, "ðŁĴī": 30503, "homo": 30504, "aise": 30505, "ropes": 30506, "nove": 30507, "whatyou": 30508, "trigge": 30509, "conception": 30510, "adukone": 30511, "nadi": 30512, "friars": 30513, "swer": 30514, "adjusted": 30515, "hotline": 30516, "sanity": 30517, "kaur": 30518, "downloading": 30519, "cgi": 30520, "tenor": 30521, "ethnic": 30522, "appalach": 30523, "ุ": 30524, "pag": 30525, "golds": 30526, "onset": 30527, "investigator": 30528, "cartel": 30529, "peacefully": 30530, "jarrett": 30531, "catalan": 30532, "polio": 30533, "num": 30534, "frustration": 30535, "dharma": 30536, "mylife": 30537, "âľĮðŁı»": 30538, "aberdeen": 30539, "musa": 30540, "binder": 30541, "sparkly": 30542, "fleeing": 30543, "instinct": 30544, "coping": 30545, "dominance": 30546, "illers": 30547, "era": 30548, "uconn": 30549, "looms": 30550, "livingston": 30551, "gali": 30552, "hes": 30553, "cma": 30554, "bela": 30555, "seley": 30556, "monk": 30557, "lach": 30558, "marx": 30559, "´": 30560, "merica": 30561, "womanin": 30562, "essex": 30563, "raina": 30564, "jimi": 30565, "neptune": 30566, "zack": 30567, "chinese": 30568, "martins": 30569, "chandelier": 30570, "hern": 30571, "withus": 30572, "earl": 30573, "asphalt": 30574, "modules": 30575, "stp": 30576, "ulla": 30577, "psychiatric": 30578, "mileage": 30579, "captivating": 30580, "sider": 30581, "mento": 30582, "mort": 30583, "trance": 30584, "talbot": 30585, "abby": 30586, "ìĥ": 30587, "âľĮðŁı¼": 30588, "jak": 30589, "dawn": 30590, "turnup": 30591, "screwed": 30592, "feds": 30593, "blueprint": 30594, "ðŁĴĸðŁĴĸ": 30595, "harsh": 30596, "eros": 30597, "insomnia": 30598, "bankers": 30599, "taemin": 30600, "misconduct": 30601, "humber": 30602, "gidi": 30603, "eduardo": 30604, "cona": 30605, "muscular": 30606, "consuming": 30607, "rash": 30608, "donnie": 30609, "dipped": 30610, "collie": 30611, "samuel": 30612, "meltdown": 30613, "ðŁĺįðŁĺįðŁĺį": 30614, "mez": 30615, "examining": 30616, "schwartz": 30617, "pristine": 30618, "ðŁIJĿ": 30619, "veit": 30620, "fulfilling": 30621, "anesthe": 30622, "guesses": 30623, "draft": 30624, "somme": 30625, "solid": 30626, "pational": 30627, "hoped": 30628, "evolutionary": 30629, "aller": 30630, "entertained": 30631, "slips": 30632, "ludwig": 30633, "concludes": 30634, "sensible": 30635, "bonnet": 30636, "craze": 30637, "tras": 30638, "hazards": 30639, "constantine": 30640, "edics": 30641, "startrek": 30642, "toc": 30643, "occupational": 30644, "incheon": 30645, "deepikapadukone": 30646, "pizzas": 30647, "newcomer": 30648, "depart": 30649, "oppression": 30650, "ebony": 30651, "fossils": 30652, "trojan": 30653, "elen": 30654, "steaks": 30655, "khou": 30656, "positioning": 30657, "ugby": 30658, "redcross": 30659, "akh": 30660, "dolce": 30661, "usmnt": 30662, "ppen": 30663, "dilig": 30664, "mavs": 30665, "caller": 30666, "costello": 30667, "âĽĦ": 30668, "dyn": 30669, "things": 30670, "rhinos": 30671, "axi": 30672, "sarkar": 30673, "convocation": 30674, "atters": 30675, "ssss": 30676, "fungus": 30677, "eugen": 30678, "russo": 30679, "squat": 30680, "wsb": 30681, "elion": 30682, "williamsburg": 30683, "soff": 30684, "deficiency": 30685, "bearer": 30686, "okin": 30687, "keystone": 30688, "twain": 30689, "calming": 30690, "breakable": 30691, "wares": 30692, "horseracing": 30693, "combs": 30694, "bunting": 30695, "uit": 30696, "tland": 30697, "ðŁĴĻðŁĴĻðŁĴĻ": 30698, "gastron": 30699, "sabot": 30700, "ickers": 30701, "commissioners": 30702, "senate": 30703, "iiot": 30704, "athena": 30705, "nitrogen": 30706, "antony": 30707, "erotic": 30708, "dialo": 30709, "missou": 30710, "hypocr": 30711, "âľĪ": 30712, "kaepernick": 30713, "canv": 30714, "droo": 30715, "cleveland": 30716, "osh": 30717, "monsta": 30718, "stefano": 30719, "^)": 30720, "shul": 30721, "poison": 30722, "hae": 30723, "commercials": 30724, "maul": 30725, "nitro": 30726, "coworker": 30727, "aloe": 30728, "vapor": 30729, "tents": 30730, "russian": 30731, "quid": 30732, "questionable": 30733, "midget": 30734, "poker": 30735, "girlfriends": 30736, "sinthe": 30737, "eritrea": 30738, "tenure": 30739, "deposits": 30740, "buckeyes": 30741, "spotter": 30742, "theodore": 30743, "trinity": 30744, "joaquin": 30745, "ucci": 30746, "followthe": 30747, "cafc": 30748, "mpa": 30749, "ðŁIJ»": 30750, "plotting": 30751, "domino": 30752, "taek": 30753, "sionally": 30754, "dicaprio": 30755, "pap": 30756, "carmel": 30757, "iger": 30758, "btcc": 30759, "bethle": 30760, "wwwbigbaldhead": 30761, "foodie": 30762, "baghdad": 30763, "masonry": 30764, "offended": 30765, "à·": 30766, "à¸ģ": 30767, "scro": 30768, "verses": 30769, "orient": 30770, "arches": 30771, "piyu": 30772, "knowyour": 30773, "gree": 30774, "takers": 30775, "guard": 30776, "dishon": 30777, "bucketlist": 30778, "bhafc": 30779, "wardly": 30780, "ðŁİīðŁİĬ": 30781, "leighton": 30782, "pew": 30783, "stray": 30784, "assaulted": 30785, "inhal": 30786, "lyfe": 30787, "amarketing": 30788, "lx": 30789, "katz": 30790, "ubuntu": 30791, "meo": 30792, "cartoonist": 30793, "turnover": 30794, "miz": 30795, "dislike": 30796, "mullen": 30797, "mof": 30798, "bland": 30799, "hides": 30800, "emerges": 30801, "chorizo": 30802, "trustee": 30803, "mahog": 30804, "lansing": 30805, "paralympic": 30806, "faint": 30807, "fauna": 30808, "chal": 30809, "snar": 30810, "cath": 30811, "benton": 30812, "castillo": 30813, "slippery": 30814, "apricot": 30815, "oecd": 30816, "baro": 30817, "lz": 30818, "heming": 30819, "clowns": 30820, "coworkers": 30821, "peruvian": 30822, "commuters": 30823, "yell": 30824, "ðŁļ´": 30825, "undering": 30826, "vj": 30827, "ttp": 30828, "flipk": 30829, "wana": 30830, "socent": 30831, "ĤâĸĤâĸ": 30832, "à¤Ĥ": 30833, "oosa": 30834, "jagger": 30835, "dism": 30836, "eless": 30837, "dham": 30838, "calif": 30839, "aofficial": 30840, "eclip": 30841, "harrogate": 30842, "grapp": 30843, "comrade": 30844, "ntr": 30845, "concentrate": 30846, "thighs": 30847, "bitcoin": 30848, "belarus": 30849, "ëĵ": 30850, "enduring": 30851, "nowwatching": 30852, "industrial": 30853, "pip": 30854, "aron": 30855, "arat": 30856, "®": 30857, "whitby": 30858, "ooooooo": 30859, "saree": 30860, "ticals": 30861, "misleading": 30862, "yoon": 30863, "years": 30864, "sleigh": 30865, "romanian": 30866, "scissors": 30867, "vampires": 30868, "acup": 30869, "abba": 30870, "thweeksary": 30871, "centri": 30872, "flye": 30873, "uo": 30874, "cbi": 30875, "buena": 30876, "sind": 30877, "marino": 30878, "burr": 30879, "rebuilding": 30880, "ल": 30881, "anniversaire": 30882, "acca": 30883, "ðŁĴĢðŁĴĢ": 30884, "getting": 30885, "tulips": 30886, "wolfpack": 30887, "âľįï¸ı": 30888, "morethan": 30889, "takin": 30890, "ð٤ĺðŁı»": 30891, "ube": 30892, "monic": 30893, "doubts": 30894, "mower": 30895, "cobalt": 30896, "donne": 30897, "speculation": 30898, "arguably": 30899, "kaku": 30900, "https": 30901, "prosecution": 30902, "dinah": 30903, "stamatic": 30904, "disclosed": 30905, "beverly": 30906, "flwx": 30907, "crabs": 30908, "extraordinaire": 30909, "warmest": 30910, "imperi": 30911, "ologists": 30912, "traces": 30913, "parc": 30914, "lakeside": 30915, "amr": 30916, "teri": 30917, "hourly": 30918, "domination": 30919, "arrow": 30920, "shrewsbury": 30921, "ancestry": 30922, "wrangler": 30923, "triggered": 30924, "pensac": 30925, "rooster": 30926, "survives": 30927, "aon": 30928, "boko": 30929, "valor": 30930, "loveis": 30931, "lag": 30932, "pey": 30933, "focal": 30934, "outlaws": 30935, "blanc": 30936, "articho": 30937, "wits": 30938, "marshall": 30939, "diego": 30940, "supportsmall": 30941, "uca": 30942, "sah": 30943, "jeet": 30944, "synago": 30945, "governing": 30946, "ðŁĴ¬": 30947, "salads": 30948, "create": 30949, "miriam": 30950, "censored": 30951, "amide": 30952, "nou": 30953, "zeta": 30954, "allegiance": 30955, "*)": 30956, "blm": 30957, "rican": 30958, "pastors": 30959, "olympus": 30960, "bloc": 30961, "whirl": 30962, "starry": 30963, "prone": 30964, "yk": 30965, "pne": 30966, "congratulating": 30967, "bev": 30968, "sober": 30969, "loveisland": 30970, "sair": 30971, "aning": 30972, "tutorials": 30973, "qe": 30974, "lund": 30975, "inist": 30976, "clever": 30977, "taxpayer": 30978, "aliz": 30979, "wrench": 30980, "ddling": 30981, "capri": 30982, "hpa": 30983, "ðŁı»âĢįâĻĤï¸ı": 30984, "naj": 30985, "oj": 30986, "futuristic": 30987, "jellyfish": 30988, "ðŁĶ¥ðŁĶ¥ðŁĶ¥ðŁĶ¥": 30989, "celery": 30990, "plank": 30991, "fila": 30992, "neme": 30993, "unhealthy": 30994, "lections": 30995, "ðŁ§¡": 30996, "ritchie": 30997, "nws": 30998, "mikha": 30999, "wonderwoman": 31000, "âĢİ": 31001, "hipstamatic": 31002, "kag": 31003, "ðŁĴľðŁĴľðŁĴľ": 31004, "poultry": 31005, "mow": 31006, "words": 31007, "loff": 31008, "ðŁ¤£ðŁ¤£": 31009, "relatable": 31010, "remixes": 31011, "kenyatta": 31012, "kem": 31013, "resigned": 31014, "fod": 31015, "straigh": 31016, "jlo": 31017, "hutch": 31018, "boxers": 31019, "colleen": 31020, "mags": 31021, "instructional": 31022, "kol": 31023, "attracts": 31024, "prag": 31025, "accountant": 31026, "goggles": 31027, "bru": 31028, "thole": 31029, "marrow": 31030, "leuke": 31031, "octo": 31032, "ponds": 31033, "bubbly": 31034, "heist": 31035, "ìĹij": 31036, "imp": 31037, "ahar": 31038, "haunt": 31039, "hallmark": 31040, "psych": 31041, "kkkkkkkk": 31042, "columb": 31043, "jumpsuit": 31044, "costco": 31045, "sidelines": 31046, "aggies": 31047, "overturned": 31048, "nib": 31049, "keychain": 31050, "fuk": 31051, "faf": 31052, "miam": 31053, "assistants": 31054, "cycled": 31055, "rider": 31056, "dammit": 31057, "redwings": 31058, "mages": 31059, "kins": 31060, "ìĤ": 31061, "hod": 31062, "sont": 31063, "caroline": 31064, "\"'": 31065, "cule": 31066, "braid": 31067, "felony": 31068, "arities": 31069, "rutherford": 31070, "depiction": 31071, "isabelle": 31072, "roach": 31073, "kday": 31074, "fifthharmony": 31075, "emy": 31076, "ligam": 31077, "barista": 31078, "albuquerque": 31079, "gross": 31080, "ðŁįº": 31081, "ooks": 31082, "ðŁij¼": 31083, "duncan": 31084, "tryin": 31085, "jags": 31086, "gould": 31087, "litho": 31088, "âģ£": 31089, "аÐ": 31090, "sammy": 31091, "tung": 31092, "casser": 31093, "apolo": 31094, "aaaaa": 31095, "mang": 31096, "asics": 31097, "shen": 31098, "pye": 31099, "turbul": 31100, "ssp": 31101, "saintsfc": 31102, "onlin": 31103, "nanny": 31104, "hester": 31105, "doz": 31106, "à¸Ķ": 31107, "thread": 31108, "rents": 31109, "khand": 31110, "ðŁĴªðŁı½": 31111, "unconditional": 31112, "robson": 31113, "carre": 31114, "phon": 31115, "sacrificed": 31116, "£": 31117, "autos": 31118, "parker": 31119, "oca": 31120, "login": 31121, "keegan": 31122, "hardcover": 31123, "doughnuts": 31124, "ðŁĮİ": 31125, "spitfire": 31126, "refreshments": 31127, "saskatoon": 31128, "commodore": 31129, "jf": 31130, "rubber": 31131, "halamadrid": 31132, "childcare": 31133, "strada": 31134, "iom": 31135, "rik": 31136, "dakar": 31137, "thermom": 31138, "cropped": 31139, "garu": 31140, "alik": 31141, "veni": 31142, "ift": 31143, "sika": 31144, "rituals": 31145, "zul": 31146, "ech": 31147, "©": 31148, "sudan": 31149, "lland": 31150, "ime": 31151, "docker": 31152, "ì¤": 31153, "feared": 31154, "fao": 31155, "walter": 31156, "nog": 31157, "mutuals": 31158, "lh": 31159, "align": 31160, "monia": 31161, "conceptart": 31162, "ðŁĻıðŁı¼": 31163, "scoe": 31164, "competence": 31165, "swine": 31166, "lyme": 31167, "launch": 31168, "greener": 31169, "abstractart": 31170, "inquis": 31171, "granada": 31172, "gaelic": 31173, "fluff": 31174, "dbacks": 31175, "graveyard": 31176, "babe": 31177, "academic": 31178, "adventurous": 31179, "johann": 31180, "~!": 31181, "bibi": 31182, "|#": 31183, "plings": 31184, "getty": 31185, "asb": 31186, "âĿ¤ï¸ı@": 31187, "staff": 31188, "religions": 31189, "bangor": 31190, "worldbookday": 31191, "megh": 31192, "devin": 31193, "ashore": 31194, "meridian": 31195, "github": 31196, "quiz": 31197, "allstars": 31198, "bestest": 31199, "irresi": 31200, "acker": 31201, "dote": 31202, "warrington": 31203, "polly": 31204, "neworleans": 31205, "crou": 31206, "wigs": 31207, "chey": 31208, "smithsonian": 31209, "lasag": 31210, "detour": 31211, "boris": 31212, "straps": 31213, "mariah": 31214, "intentionally": 31215, "koh": 31216, "ðŁį¸": 31217, "ssian": 31218, "marissa": 31219, "coral": 31220, "episcopal": 31221, "casualty": 31222, "tomo": 31223, "supplychain": 31224, "samp": 31225, "ongo": 31226, "roo": 31227, "caviar": 31228, "pfw": 31229, "claudio": 31230, "buffalo": 31231, "sations": 31232, "matty": 31233, "snapback": 31234, "lds": 31235, "alarms": 31236, "matte": 31237, "âĺĶï¸ı": 31238, "conditioner": 31239, "dors": 31240, "hex": 31241, "fizz": 31242, "astri": 31243, "sussex": 31244, "security": 31245, "qaeda": 31246, "allstar": 31247, "cocacola": 31248, "asone": 31249, "clicks": 31250, "scans": 31251, "mute": 31252, "heavier": 31253, "ðŁİ§": 31254, "âĺŀ": 31255, "lvl": 31256, "bookboost": 31257, "youtube": 31258, "flashes": 31259, "fjor": 31260, "csu": 31261, "explode": 31262, "dodge": 31263, "cairn": 31264, "gonzales": 31265, "thill": 31266, "pelle": 31267, "hartley": 31268, "renewable": 31269, "retin": 31270, "estre": 31271, "costarica": 31272, "shipyard": 31273, "ncfc": 31274, "priya": 31275, "aghan": 31276, "anath": 31277, "plugin": 31278, "corey": 31279, "rebound": 31280, "oru": 31281, "katrin": 31282, "hormone": 31283, "gim": 31284, "mahindra": 31285, "ssus": 31286, "parkland": 31287, "harper": 31288, "fantastic": 31289, "inferno": 31290, "epilo": 31291, "wrestling": 31292, "fect": 31293, "cit": 31294, "acoun": 31295, "tossed": 31296, "monumental": 31297, "chartered": 31298, "bust": 31299, "petra": 31300, "âĮļ": 31301, "wildflowerhour": 31302, "sweaters": 31303, "*.": 31304, "bler": 31305, "atech": 31306, "gowan": 31307, "demographic": 31308, "bral": 31309, "suicide": 31310, "renovations": 31311, "vuel": 31312, "sinister": 31313, "armani": 31314, "misogy": 31315, "pharrell": 31316, "naps": 31317, "uniting": 31318, "crusaders": 31319, "corgi": 31320, "insured": 31321, "thani": 31322, "noor": 31323, "gq": 31324, "dada": 31325, "bicycles": 31326, "snuggle": 31327, "schan": 31328, "tenberg": 31329, "ssal": 31330, "femme": 31331, "boil": 31332, "½ï¸ı": 31333, "reap": 31334, "occurring": 31335, "hussein": 31336, "divid": 31337, "stoke": 31338, "shalom": 31339, "naia": 31340, "olic": 31341, "frustrating": 31342, "Ùĩ": 31343, "igs": 31344, "grover": 31345, "scenarios": 31346, "nds": 31347, "brutality": 31348, "medalli": 31349, "buon": 31350, "sass": 31351, "skateboarding": 31352, "onyx": 31353, "lorry": 31354, "nyu": 31355, "gautam": 31356, "mmings": 31357, "gug": 31358, "endi": 31359, "lothian": 31360, "commando": 31361, "chalk": 31362, "phora": 31363, "assessing": 31364, "tigh": 31365, "crunchy": 31366, "aday": 31367, "isl": 31368, "ciara": 31369, "pilgrims": 31370, "kamal": 31371, "pto": 31372, "britanni": 31373, "tani": 31374, "smc": 31375, "lure": 31376, "appstore": 31377, "aby": 31378, "golfing": 31379, "clc": 31380, "fau": 31381, "anas": 31382, "shutting": 31383, "regulated": 31384, "carnage": 31385, "scowboys": 31386, "allenge": 31387, "cma": 31388, "humboldt": 31389, "relle": 31390, "kumb": 31391, "heri": 31392, "refinery": 31393, "soundcheck": 31394, "dwayne": 31395, "bosnia": 31396, "isp": 31397, "thealth": 31398, "anniv": 31399, "relevance": 31400, "mya": 31401, "baggage": 31402, "dread": 31403, "sbc": 31404, "thed": 31405, "buh": 31406, "hijab": 31407, "loid": 31408, "kew": 31409, "cte": 31410, "respect": 31411, "lovelies": 31412, "cubes": 31413, "celebrate": 31414, "dirt": 31415, "savers": 31416, "_,": 31417, "garment": 31418, "pulitzer": 31419, "masjid": 31420, "beatport": 31421, "alarts": 31422, "encryption": 31423, "sner": 31424, "pleads": 31425, "foundry": 31426, "symmetry": 31427, "rumi": 31428, "birthplace": 31429, "scallops": 31430, "supple": 31431, "pivotal": 31432, "tati": 31433, "node": 31434, "sod": 31435, "proxim": 31436, "trics": 31437, "coldest": 31438, "brent": 31439, "mandu": 31440, "clair": 31441, "each": 31442, "andalu": 31443, "hiddleston": 31444, "ðŁIJº": 31445, "melts": 31446, "vance": 31447, "pinn": 31448, "sements": 31449, "screened": 31450, "sachs": 31451, "obl": 31452, "icha": 31453, "âĺĺï¸ı": 31454, "schoolers": 31455, "healed": 31456, "logged": 31457, "ð٤ĺðŁı¼": 31458, "icus": 31459, "boredom": 31460, "bish": 31461, "bffs": 31462, "talking": 31463, "suresh": 31464, "hookem": 31465, "deon": 31466, "defl": 31467, "eileen": 31468, "ðŁįķ": 31469, "womenintech": 31470, "risotto": 31471, "ranger": 31472, "advertise": 31473, "à¸ģà¸": 31474, "telly": 31475, "lago": 31476, "dartmoor": 31477, "dong": 31478, "skates": 31479, "logo": 31480, "unner": 31481, "mailbox": 31482, "masala": 31483, "looooo": 31484, "amethyst": 31485, "chewing": 31486, "cbb": 31487, "australians": 31488, "rcmp": 31489, "gameart": 31490, "#...": 31491, "korn": 31492, "extremism": 31493, "fruitful": 31494, "ancient": 31495, "pubg": 31496, "polite": 31497, "whit": 31498, "murals": 31499, "mgr": 31500, "lineman": 31501, "davao": 31502, "stems": 31503, "tennis": 31504, "avage": 31505, "tupac": 31506, "gigantic": 31507, "hsbc": 31508, "autobiography": 31509, "upthe": 31510, "ีà¹Ī": 31511, "regal": 31512, "figuring": 31513, "kul": 31514, "missy": 31515, "hoop": 31516, "gras": 31517, "forums": 31518, "backlash": 31519, "abducted": 31520, "pnw": 31521, "minic": 31522, "butt": 31523, "bottoms": 31524, "aton": 31525, "veng": 31526, "ðŁĮı": 31527, "delaney": 31528, "prabhu": 31529, "fanclub": 31530, "overhaul": 31531, "healthye": 31532, "syno": 31533, "aaf": 31534, "renamed": 31535, "kimi": 31536, "uncle": 31537, "mancity": 31538, "seu": 31539, "quanti": 31540, "esteem": 31541, "umin": 31542, "enzo": 31543, "melvin": 31544, "undergo": 31545, "jhar": 31546, "farah": 31547, "coasters": 31548, "humphrey": 31549, "mhz": 31550, "childrens": 31551, "^.": 31552, "dhi": 31553, "disruptive": 31554, "integrating": 31555, "rnb": 31556, "oversized": 31557, "aide": 31558, "neau": 31559, "documentation": 31560, "ðŁijĢðŁijĢ": 31561, "palo": 31562, "hearth": 31563, "riyad": 31564, "punctu": 31565, "abcnews": 31566, "secures": 31567, "boyband": 31568, "birch": 31569, "juco": 31570, "traff": 31571, "legislators": 31572, "baya": 31573, "ãĤ¯": 31574, "noises": 31575, "collects": 31576, "swarm": 31577, "kner": 31578, "bishops": 31579, "sturgeon": 31580, "snapping": 31581, "mol": 31582, "freaky": 31583, "chairperson": 31584, "trop": 31585, "lynch": 31586, "carcin": 31587, "artsy": 31588, "esto": 31589, "chai": 31590, "flur": 31591, "invali": 31592, "sausages": 31593, "imel": 31594, "jor": 31595, "funfact": 31596, "witter": 31597, "punished": 31598, "acons": 31599, "hya": 31600, "reversi": 31601, "emc": 31602, "diffu": 31603, "zx": 31604, "spaw": 31605, "clad": 31606, "dmit": 31607, "holland": 31608, "fresco": 31609, "payroll": 31610, "abundant": 31611, "stuffing": 31612, "moro": 31613, "cny": 31614, "boycott": 31615, "wendy": 31616, "eleven": 31617, "provoc": 31618, "pilot": 31619, "trx": 31620, "bead": 31621, "climateaction": 31622, "rion": 31623, "assie": 31624, "ìĸ": 31625, "osm": 31626, "islamic": 31627, "hoar": 31628, "goodreads": 31629, "alici": 31630, "afternoons": 31631, "spokesman": 31632, "jolie": 31633, "itas": 31634, "mascara": 31635, "âĻ©âĻ«": 31636, "prevail": 31637, "beetroot": 31638, "lujah": 31639, "kli": 31640, "dodger": 31641, "»": 31642, "rule": 31643, "ln": 31644, "scream": 31645, "hobart": 31646, "colbert": 31647, "rtc": 31648, "erm": 31649, "patro": 31650, "quoting": 31651, "slive": 31652, "quest": 31653, "nonfiction": 31654, "seminary": 31655, "prosecutors": 31656, "vest": 31657, "expressway": 31658, "gge": 31659, "nautical": 31660, "etf": 31661, "ðŁİīðŁİĬ": 31662, "duration": 31663, "chaired": 31664, "thefilm": 31665, "fabio": 31666, "sheh": 31667, "cano": 31668, "ðŁĴªðŁı»": 31669, "withdraw": 31670, "!:)": 31671, "corpus": 31672, "phenom": 31673, "yelp": 31674, "lawn": 31675, "entom": 31676, "snapper": 31677, "butte": 31678, "pinball": 31679, "proxy": 31680, "libre": 31681, "allevi": 31682, "nada": 31683, "gabriel": 31684, "fowl": 31685, "eureka": 31686, "daphne": 31687, "tunes": 31688, "punched": 31689, "whore": 31690, "jog": 31691, "rential": 31692, "manners": 31693, "ope": 31694, "whufc": 31695, "guth": 31696, "revolt": 31697, "sneaker": 31698, "philharmonic": 31699, "hoste": 31700, "sovereignty": 31701, "ðŁĻıðŁĻıðŁĻı": 31702, "fishing": 31703, "sciart": 31704, "feta": 31705, "ipp": 31706, "dumping": 31707, "kelown": 31708, "giri": 31709, "digits": 31710, "salu": 31711, "sanjay": 31712, "tweeters": 31713, "spas": 31714, "colchester": 31715, "scab": 31716, "madd": 31717, "à¹Ħà¸": 31718, "Äĩ": 31719, "geddon": 31720, "marchfor": 31721, "dop": 31722, "maureen": 31723, "unplugged": 31724, "dido": 31725, "fashionblogger": 31726, "upa": 31727, "mexic": 31728, "tary": 31729, "polye": 31730, "jameson": 31731, "vt": 31732, "grinder": 31733, "maddy": 31734, "consultancy": 31735, "¬ë": 31736, "leagueoflegends": 31737, "accents": 31738, "umni": 31739, "janeiro": 31740, "tuss": 31741, "hens": 31742, "amplifier": 31743, "toshi": 31744, "prettier": 31745, "prevents": 31746, "newtown": 31747, "redwood": 31748, "vantage": 31749, "ballard": 31750, "artof": 31751, "ashe": 31752, "asion": 31753, "lacey": 31754, "apat": 31755, "grove": 31756, "à¸Ħ": 31757, "rwand": 31758, "realtors": 31759, "traitor": 31760, "bedding": 31761, "ör": 31762, "zion": 31763, "flashing": 31764, "campan": 31765, "boomer": 31766, "secretariat": 31767, "abol": 31768, "litigation": 31769, "contamination": 31770, "sedly": 31771, "shredded": 31772, "infor": 31773, "doherty": 31774, "benchmark": 31775, "roche": 31776, "skateboard": 31777, "shovel": 31778, "izz": 31779, "topper": 31780, "oster": 31781, "labyrin": 31782, "autum": 31783, "kong": 31784, "hummus": 31785, "viz": 31786, "technews": 31787, "klaus": 31788, "amusing": 31789, "socialmediamarketing": 31790, "ides": 31791, "castell": 31792, "stee": 31793, "underestimate": 31794, "calab": 31795, "paign": 31796, "billing": 31797, "unanimously": 31798, "gmb": 31799, "flyfishing": 31800, "hathaway": 31801, "commercial": 31802, "colouring": 31803, "skulls": 31804, "pivot": 31805, "tep": 31806, "tbc": 31807, "motorway": 31808, "xpress": 31809, "constructive": 31810, "puk": 31811, "underlying": 31812, "kirsten": 31813, "maniac": 31814, "chao": 31815, "sema": 31816, "chiffon": 31817, "ðŁijĮðŁı»": 31818, "verona": 31819, "komo": 31820, "standoff": 31821, "wiped": 31822, "cated": 31823, "blair": 31824, "workin": 31825, "msc": 31826, "bethlehem": 31827, "swipe": 31828, "unexpec": 31829, "pees": 31830, "petri": 31831, "origami": 31832, "ðŁijħ": 31833, "mexico": 31834, "flavor": 31835, "rudd": 31836, "cannabis": 31837, "maru": 31838, "riddle": 31839, "worshi": 31840, "silon": 31841, "schat": 31842, "apse": 31843, "tanger": 31844, "bious": 31845, "eer": 31846, "questioned": 31847, "ozar": 31848, "dank": 31849, "anglesey": 31850, "charan": 31851, "baku": 31852, "competen": 31853, "repri": 31854, "batter": 31855, "saxon": 31856, "calves": 31857, "lengths": 31858, "$$$": 31859, "âŀ¡ï¸ı": 31860, "immersion": 31861, "gaunt": 31862, "carry": 31863, "cyto": 31864, "banda": 31865, "shutt": 31866, "experience": 31867, "elgin": 31868, "mousse": 31869, "taz": 31870, "êµ": 31871, "incorrect": 31872, "enz": 31873, "bham": 31874, "moron": 31875, "sover": 31876, "arun": 31877, "tipped": 31878, "lable": 31879, "dearly": 31880, "bautista": 31881, "íĻ": 31882, "mortal": 31883, "woop": 31884, "dtla": 31885, "shocks": 31886, "davos": 31887, "ðŁĵĿ": 31888, "swimwear": 31889, "herman": 31890, "ðŁijĩðŁijĩ": 31891, "zir": 31892, "neglected": 31893, "graced": 31894, "campuses": 31895, "avs": 31896, "arora": 31897, "swachhb": 31898, "livepd": 31899, "accra": 31900, "enquiries": 31901, "shooters": 31902, "kurt": 31903, "vancouver": 31904, "bradley": 31905, "garda": 31906, "gü": 31907, "olla": 31908, "attracting": 31909, "upton": 31910, "newin": 31911, "lumia": 31912, "furnace": 31913, "evers": 31914, "eon": 31915, "swa": 31916, "rookies": 31917, "aoc": 31918, "vss": 31919, "brisket": 31920, "torch": 31921, "yoda": 31922, "heartland": 31923, "taco": 31924, "phony": 31925, "foodbank": 31926, "abbey": 31927, "babylon": 31928, "uy": 31929, "greate": 31930, "expresses": 31931, "dandy": 31932, "scapes": 31933, "survivor": 31934, "rond": 31935, "eci": 31936, "havin": 31937, "abel": 31938, "childish": 31939, "torque": 31940, "wavy": 31941, "urself": 31942, "kanyewest": 31943, "yearof": 31944, "alestine": 31945, "obrien": 31946, "alfon": 31947, "skag": 31948, "korean": 31949, "anchorage": 31950, "valeri": 31951, "dew": 31952, "ðŁİ¨": 31953, "landslide": 31954, "carole": 31955, "christen": 31956, "gophers": 31957, "afi": 31958, "priyanka": 31959, "qq": 31960, "powerof": 31961, "itte": 31962, "pcso": 31963, "twol": 31964, "pry": 31965, "intellectu": 31966, "guerrero": 31967, "piles": 31968, "wishlist": 31969, "wren": 31970, "timetable": 31971, "ëı": 31972, "prodigy": 31973, "gibbons": 31974, "./": 31975, "neur": 31976, "anzac": 31977, "murray": 31978, "viest": 31979, "plaster": 31980, "lair": 31981, "artgallery": 31982, "intercontinental": 31983, "gbr": 31984, "bellator": 31985, "namjoon": 31986, "mammals": 31987, "amel": 31988, "yaw": 31989, "sarasota": 31990, "camar": 31991, "budding": 31992, "summari": 31993, "acosta": 31994, "lash": 31995, "eyou": 31996, "postgraduate": 31997, "instructors": 31998, "tig": 31999, "constant": 32000, "werewolf": 32001, "icos": 32002, "clas": 32003, "glenn": 32004, "budge": 32005, "ðŁĻĤ": 32006, "erta": 32007, "stains": 32008, "persecution": 32009, "cumbri": 32010, "och": 32011, "synergy": 32012, "huang": 32013, "scandin": 32014, "midterms": 32015, "commentator": 32016, "regarded": 32017, "perpetual": 32018, "boiling": 32019, "alp": 32020, "lange": 32021, "schle": 32022, "faceli": 32023, "tweeta": 32024, "ridden": 32025, "oktoberfest": 32026, "charlottesville": 32027, "iklan": 32028, "jou": 32029, "chatham": 32030, "bsc": 32031, "ðŁį¦": 32032, "strauss": 32033, "mellow": 32034, "xxxx": 32035, "happyhour": 32036, "reactor": 32037, "wwer": 32038, "distraction": 32039, "atorial": 32040, "ðŁĴªðŁı¼": 32041, "twinpeaks": 32042, "fayette": 32043, "aor": 32044, "kok": 32045, "broom": 32046, "syfy": 32047, "ouse": 32048, "amag": 32049, "Ø·": 32050, "ubisoft": 32051, "lulu": 32052, "hallmark": 32053, "stuart": 32054, "itya": 32055, "sideline": 32056, "vengeance": 32057, "relu": 32058, "sexism": 32059, "bouncing": 32060, "unites": 32061, "gustav": 32062, "tessa": 32063, "stump": 32064, "proclamation": 32065, "imax": 32066, "dividend": 32067, "colby": 32068, "ðŁįİ": 32069, "playwright": 32070, "unsafe": 32071, "cosmo": 32072, "ðŁĩ²ðŁĩ½": 32073, "cupboard": 32074, "constituents": 32075, "anglia": 32076, "rampage": 32077, "ðŁĺįðŁĺįðŁĺįðŁĺįðŁĺį": 32078, "thanked": 32079, "takeaways": 32080, "shroff": 32081, "debat": 32082, "khur": 32083, "conducts": 32084, "formats": 32085, "à©": 32086, "portage": 32087, "graphers": 32088, "uten": 32089, "prem": 32090, "moines": 32091, "condemns": 32092, "sous": 32093, "lps": 32094, "fcs": 32095, "dealership": 32096, "leukemia": 32097, "bureau": 32098, "skid": 32099, "guardiola": 32100, "caster": 32101, "third": 32102, "avoided": 32103, "encyclo": 32104, "csr": 32105, "vixx": 32106, "analyzing": 32107, "shear": 32108, "duluth": 32109, "shapiro": 32110, "chanting": 32111, "stresses": 32112, "asbe": 32113, "militia": 32114, "ãĥª": 32115, "collin": 32116, "arsene": 32117, "suresh": 32118, "teachings": 32119, "yixing": 32120, "shill": 32121, "nudes": 32122, "svu": 32123, "clearwater": 32124, "warped": 32125, "prolife": 32126, "artistson": 32127, "itu": 32128, "versailles": 32129, "galaxy": 32130, "axel": 32131, "springst": 32132, "cala": 32133, "huhu": 32134, "scu": 32135, "commitments": 32136, "exeter": 32137, "poignant": 32138, "motion": 32139, "conservatory": 32140, "rowdy": 32141, "recalled": 32142, "musk": 32143, "embelli": 32144, "sothe": 32145, "âĺĢ": 32146, "stopper": 32147, "schild": 32148, "tope": 32149, "elmo": 32150, "ziel": 32151, "jom": 32152, "barnsley": 32153, "snowden": 32154, "ontour": 32155, "journey": 32156, "hillsborough": 32157, "parole": 32158, "wts": 32159, "moving": 32160, "agility": 32161, "tivo": 32162, "ffers": 32163, "kindleunlimited": 32164, "gwen": 32165, "annan": 32166, "ahmad": 32167, "textured": 32168, "hepatitis": 32169, "dram": 32170, "insiders": 32171, "tissues": 32172, "ãĥĦ": 32173, "fcbarcelona": 32174, "cratic": 32175, "naacp": 32176, "pecan": 32177, "fgm": 32178, "customize": 32179, "concert": 32180, "gsm": 32181, "peg": 32182, "pone": 32183, "justintrudeau": 32184, "supercars": 32185, "happyholidays": 32186, "bular": 32187, "adox": 32188, "laptops": 32189, "digitalhealth": 32190, "destination": 32191, "gradually": 32192, "áĥ¦": 32193, "poppy": 32194, "ssl": 32195, "inhibit": 32196, "starlight": 32197, "offro": 32198, "gloomy": 32199, "xper": 32200, "halder": 32201, "implants": 32202, "leto": 32203, "hassel": 32204, "aas": 32205, "untold": 32206, "enci": 32207, "liberia": 32208, "oran": 32209, "contests": 32210, "ilah": 32211, "smag": 32212, "scout": 32213, "marianne": 32214, "cryo": 32215, "scheduling": 32216, "los": 32217, "kane": 32218, "stuttgart": 32219, "nese": 32220, "lawrence": 32221, "dain": 32222, "photom": 32223, "carou": 32224, "ร": 32225, "gwy": 32226, "nationaldogday": 32227, "roasting": 32228, "bandcamp": 32229, "kentucky": 32230, "stretches": 32231, "kerel": 32232, "cashe": 32233, "ãĤ¸": 32234, "stax": 32235, "transi": 32236, "doggie": 32237, "atric": 32238, "halle": 32239, "civic": 32240, "browning": 32241, "leinster": 32242, "catday": 32243, "highland": 32244, "joyous": 32245, "incumb": 32246, "orlando": 32247, "romo": 32248, "colton": 32249, "delta": 32250, "carab": 32251, "rotc": 32252, "asteroid": 32253, "goosebumps": 32254, "mology": 32255, "yoko": 32256, "ands": 32257, "tomorrows": 32258, "redcarpet": 32259, "smp": 32260, "casio": 32261, "ðŁ¤£ðŁ¤£ðŁ¤£": 32262, "seau": 32263, "rejection": 32264, "rotating": 32265, "bipartisan": 32266, "thun": 32267, "mati": 32268, "boni": 32269, "oll": 32270, "energye": 32271, "doit": 32272, "lj": 32273, "motherhood": 32274, "louise": 32275, "necklaces": 32276, "elite": 32277, "nix": 32278, "lcs": 32279, "env": 32280, "glu": 32281, "lesh": 32282, "crank": 32283, "susie": 32284, "mclau": 32285, "sotu": 32286, "crowley": 32287, "ratri": 32288, "used": 32289, "breton": 32290, "alfredo": 32291, "yeo": 32292, "travelpics": 32293, "tipp": 32294, "ellison": 32295, "saxophone": 32296, "mered": 32297, "heughan": 32298, "taine": 32299, "fes": 32300, "viro": 32301, "supposedly": 32302, "ias": 32303, "digestive": 32304, "yle": 32305, "lizzy": 32306, "wildlifephotography": 32307, "brianna": 32308, "westfield": 32309, "rained": 32310, "amher": 32311, "ðŁĺĦðŁĺĦ": 32312, "distribute": 32313, "bottom": 32314, "preserving": 32315, "oiland": 32316, "crafty": 32317, "descen": 32318, "colling": 32319, "shakespearesunday": 32320, "rwc": 32321, "angled": 32322, "cian": 32323, "tations": 32324, "montage": 32325, "meyers": 32326, "francesca": 32327, "ðŁĮ·": 32328, "wiggins": 32329, "sanford": 32330, "volunteer": 32331, "carra": 32332, "bark": 32333, "varied": 32334, "plin": 32335, "amu": 32336, "kapil": 32337, "rockers": 32338, "quind": 32339, "brane": 32340, "inmate": 32341, "ental": 32342, "improvis": 32343, "michigan": 32344, "retweeting": 32345, "progressing": 32346, "mercedesbenz": 32347, "smoker": 32348, "physiology": 32349, "dorado": 32350, "wattpad": 32351, "hwa": 32352, "srbachchan": 32353, "wga": 32354, "volatility": 32355, "hire": 32356, "acap": 32357, "wnba": 32358, "heinz": 32359, "stitches": 32360, "kidnapping": 32361, "burys": 32362, "limb": 32363, "fitters": 32364, "thumbnail": 32365, "tone": 32366, "mirand": 32367, "desirable": 32368, "addison": 32369, "taran": 32370, "tamilnadu": 32371, "spectator": 32372, "sociology": 32373, "amitshah": 32374, "remotely": 32375, "âϦ": 32376, "hamid": 32377, "rds": 32378, "glee": 32379, "smoothly": 32380, "schro": 32381, "erc": 32382, "laliga": 32383, "heals": 32384, "usf": 32385, "nishi": 32386, "dhu": 32387, "unil": 32388, "hle": 32389, "tromb": 32390, "bhutan": 32391, "pilipinas": 32392, "seung": 32393, "whitman": 32394, "tey": 32395, "mince": 32396, "snowboarding": 32397, "reau": 32398, "kker": 32399, "avo": 32400, "zachary": 32401, "ranveer": 32402, "tik": 32403, "govern": 32404, "qual": 32405, "becky": 32406, "anthropology": 32407, "atten": 32408, "groceries": 32409, "debit": 32410, "warp": 32411, "silicon": 32412, "hawaii": 32413, "ðŁĴħ": 32414, "pomegranate": 32415, "peer": 32416, "oranges": 32417, "peopleschoice": 32418, "endure": 32419, "ðŁĴĽðŁĴĽ": 32420, "ãĤ¹ãĥ": 32421, "acial": 32422, "ahaha": 32423, "stuk": 32424, "imperial": 32425, "blond": 32426, "powder": 32427, "knots": 32428, "vince": 32429, "woodlands": 32430, "dena": 32431, "watchin": 32432, "matcha": 32433, "mahat": 32434, "galaxies": 32435, "middlesbrough": 32436, "kö": 32437, "stree": 32438, "rescues": 32439, "waldo": 32440, "leroy": 32441, "despic": 32442, "realities": 32443, "tmnt": 32444, "haq": 32445, "uno": 32446, "pec": 32447, "bollywood": 32448, "blinds": 32449, "designthinking": 32450, "hems": 32451, "andhra": 32452, "absen": 32453, "fans": 32454, "stech": 32455, "shirehour": 32456, "blaine": 32457, "shakti": 32458, "purely": 32459, "ðŁıı": 32460, "trafal": 32461, "keynes": 32462, "grate": 32463, "tobias": 32464, "spontaneous": 32465, "saturated": 32466, "cavalry": 32467, "prisc": 32468, "ðŁĺij": 32469, "wht": 32470, "passi": 32471, "~~~": 32472, "virat": 32473, "pattinson": 32474, "lao": 32475, "weirdo": 32476, "sympathy": 32477, "juda": 32478, "occasionally": 32479, "credited": 32480, "statu": 32481, "esco": 32482, "hilly": 32483, "escape": 32484, "discharge": 32485, "seer": 32486, "maynard": 32487, "sudbury": 32488, "zlat": 32489, "oral": 32490, "weer": 32491, "encountered": 32492, "smelling": 32493, "oversight": 32494, "ê¸": 32495, "thatcher": 32496, "mackay": 32497, "youcan": 32498, "freep": 32499, "freedoms": 32500, "prophecy": 32501, "hoe": 32502, "ishqba": 32503, "drake": 32504, "quits": 32505, "pelled": 32506, "turk": 32507, "ovi": 32508, "wesleyan": 32509, "newmusic": 32510, "legg": 32511, "cheng": 32512, "hilli": 32513, "ayy": 32514, "panties": 32515, "adversity": 32516, "adjac": 32517, "vaccination": 32518, "juke": 32519, "gac": 32520, "exceed": 32521, "timesof": 32522, "staining": 32523, "epcot": 32524, "vital": 32525, "upward": 32526, "bethesda": 32527, "apark": 32528, "mahi": 32529, "campfire": 32530, "enchanting": 32531, "rhapso": 32532, "hz": 32533, "naver": 32534, "fax": 32535, "validation": 32536, "acad": 32537, "nyr": 32538, "asym": 32539, "coordinated": 32540, "departed": 32541, "allery": 32542, "varies": 32543, "sprite": 32544, "chaplin": 32545, "ssoccer": 32546, "swat": 32547, "bret": 32548, "reluct": 32549, "tunesapp": 32550, "superstar": 32551, "reminiscing": 32552, "oco": 32553, "homegrown": 32554, "doughnut": 32555, "uncanny": 32556, "lapd": 32557, "thyroid": 32558, "!âĿ¤ï¸ı": 32559, "botanic": 32560, "bres": 32561, "spade": 32562, "iste": 32563, "echoes": 32564, "dulil": 32565, "bursting": 32566, "quiero": 32567, "ðŁijİ": 32568, "loyola": 32569, "amusement": 32570, "hails": 32571, "sleepy": 32572, "burglary": 32573, "âľı": 32574, "rogue": 32575, "cotland": 32576, "moors": 32577, "lower": 32578, "wicked": 32579, "ðŁĶĬ": 32580, "competiti": 32581, "argentine": 32582, "yvonne": 32583, "kartikeyan": 32584, "iliary": 32585, "gatsby": 32586, "precinct": 32587, "sixty": 32588, "naji": 32589, "cams": 32590, "practitioner": 32591, "ðŁĺ³ðŁĺ³": 32592, "pune": 32593, "negli": 32594, "julien": 32595, "invaded": 32596, "calibr": 32597, "clam": 32598, "dubai": 32599, "muk": 32600, "lantic": 32601, "product": 32602, "fedex": 32603, "ï¸ı:": 32604, "eura": 32605, "darius": 32606, "sling": 32607, "virtualreality": 32608, "homestead": 32609, "ðŁı³ï¸ıâĢįðŁĮĪ": 32610, "paced": 32611, "inha": 32612, "pulmon": 32613, "lazy": 32614, "premiering": 32615, "mastered": 32616, "inhe": 32617, "congregation": 32618, "bajo": 32619, "sporting": 32620, "newjersey": 32621, "horny": 32622, "lmaoo": 32623, "lengthy": 32624, "dut": 32625, "yogh": 32626, "swearing": 32627, "philosophical": 32628, "papua": 32629, "inski": 32630, "knowles": 32631, "dyke": 32632, "â̲": 32633, "token": 32634, "mcguire": 32635, "riot": 32636, "probability": 32637, "mccon": 32638, "gros": 32639, "sumat": 32640, "cite": 32641, "daa": 32642, "onda": 32643, "maddow": 32644, "chew": 32645, "boardgames": 32646, "sparked": 32647, "reclaimed": 32648, "adhd": 32649, "nyse": 32650, "imwithher": 32651, "equinox": 32652, "booths": 32653, "balsamic": 32654, "hazy": 32655, "dorchester": 32656, "agos": 32657, "seaw": 32658, "moderator": 32659, "seriea": 32660, "andersen": 32661, "pilgrim": 32662, "âŃIJâŃIJ": 32663, "itchen": 32664, "halli": 32665, "xton": 32666, "nathaniel": 32667, "munition": 32668, "celestial": 32669, "gaf": 32670, "zoom": 32671, "markle": 32672, "penthouse": 32673, "cale": 32674, "sfa": 32675, "barking": 32676, "tucket": 32677, "emery": 32678, "calorie": 32679, "lique": 32680, "adar": 32681, "mcnam": 32682, "tortilla": 32683, "woodpecker": 32684, "motown": 32685, "badger": 32686, "ayrshire": 32687, "scramble": 32688, "dday": 32689, "craziest": 32690, "perrie": 32691, "choco": 32692, "caste": 32693, "iot": 32694, "wrecked": 32695, "selecting": 32696, "ussr": 32697, "graft": 32698, "punt": 32699, "labou": 32700, "irst": 32701, "baek": 32702, "ÛĮ": 32703, "suki": 32704, "queu": 32705, "achat": 32706, "tester": 32707, "augmented": 32708, "wcvb": 32709, "sinks": 32710, "ðŁĵ»": 32711, "rake": 32712, "interne": 32713, "because": 32714, "bellevue": 32715, "unearth": 32716, "lighten": 32717, "ðŁĺ£": 32718, "turnaround": 32719, "labeled": 32720, "unemployed": 32721, "twitterkurds": 32722, "leia": 32723, "hye": 32724, "greater": 32725, "ðŁIJİ": 32726, "timed": 32727, "ired": 32728, "ett": 32729, "limitations": 32730, "cabe": 32731, "sout": 32732, "beech": 32733, "annihil": 32734, "retrac": 32735, "yoona": 32736, "anger": 32737, "dennis": 32738, "supplying": 32739, "diz": 32740, "\"(": 32741, "scur": 32742, "gunman": 32743, "suho": 32744, "sauvignon": 32745, "ล": 32746, "wiley": 32747, "landon": 32748, "choreography": 32749, "prehistoric": 32750, "ðŁıĥ": 32751, "vargas": 32752, "assessments": 32753, "pinnacle": 32754, "dii": 32755, "chamberlain": 32756, "ìĪ": 32757, "vp": 32758, "presenters": 32759, "deutsche": 32760, "sunshine": 32761, "salutes": 32762, "rone": 32763, "busiest": 32764, "-.-": 32765, "motorists": 32766, "hemisphere": 32767, "alwx": 32768, "psp": 32769, "owa": 32770, "denying": 32771, "choc": 32772, "gutier": 32773, "hanuk": 32774, "muskete": 32775, "jaitley": 32776, "sewage": 32777, "tame": 32778, "thinkers": 32779, "shim": 32780, "sequo": 32781, "papar": 32782, "middleeast": 32783, "kwa": 32784, "keg": 32785, "patagonia": 32786, "noy": 32787, "barça": 32788, "takeoff": 32789, "hea": 32790, "à¬": 32791, "nsc": 32792, "gdc": 32793, "ðŁijĪ": 32794, "moustache": 32795, "melania": 32796, "thra": 32797, "â¬Ĩï¸ı": 32798, "pierced": 32799, "zeus": 32800, "fonts": 32801, "bera": 32802, "itiner": 32803, "qatar": 32804, "contrary": 32805, "ireland": 32806, "ify": 32807, "oulos": 32808, "communal": 32809, "fins": 32810, "unpaid": 32811, "paa": 32812, "ðŁijĩðŁı»": 32813, "rios": 32814, "oup": 32815, "filler": 32816, "cafeteria": 32817, "à¸Ń": 32818, "kasi": 32819, "caliber": 32820, "zulu": 32821, "vsco": 32822, "tsford": 32823, "dragonfly": 32824, "smokin": 32825, "pist": 32826, "psychologist": 32827, "diplomat": 32828, "webs": 32829, "buccane": 32830, "ா": 32831, "motivational": 32832, "dune": 32833, "bae": 32834, "cfs": 32835, "without": 32836, "eron": 32837, "iac": 32838, "atee": 32839, "pension": 32840, "frazier": 32841, "ensis": 32842, "skis": 32843, "parting": 32844, "gery": 32845, "territories": 32846, "nachos": 32847, "enight": 32848, "everlasting": 32849, "msdhoni": 32850, "tele": 32851, "spun": 32852, "podi": 32853, "sabah": 32854, "environmentally": 32855, "cease": 32856, "beaumont": 32857, "marta": 32858, "kelvin": 32859, "hoff": 32860, "sunil": 32861, "nda": 32862, "cob": 32863, "shale": 32864, "reedus": 32865, "unboxing": 32866, "ubio": 32867, "reopened": 32868, "nall": 32869, "capsules": 32870, "marr": 32871, "himalayas": 32872, "sweeter": 32873, "jaz": 32874, "fmr": 32875, "tweeter": 32876, "dhaka": 32877, "nau": 32878, "demi": 32879, "dfs": 32880, "taurus": 32881, "fading": 32882, "itutes": 32883, "cip": 32884, "overflow": 32885, "jeffrey": 32886, "donny": 32887, "cartunesapp": 32888, "ðŁįij": 32889, "prefecture": 32890, "danced": 32891, "cpt": 32892, "pleasing": 32893, "italk": 32894, "earthquakes": 32895, "ulation": 32896, "hio": 32897, "ãĢĭ": 32898, "antan": 32899, "nutrient": 32900, "deere": 32901, "selects": 32902, "enrichment": 32903, "riti": 32904, "trampol": 32905, "blamed": 32906, "jia": 32907, "contributors": 32908, "chesapeake": 32909, "pigeons": 32910, "tribunal": 32911, "maduro": 32912, "wsu": 32913, "ilove": 32914, "efficiently": 32915, "darcy": 32916, "warms": 32917, "arra": 32918, "ecu": 32919, "hower": 32920, "struggled": 32921, "rajinikanth": 32922, "ðŁĺ¢ðŁĺ¢": 32923, "housing": 32924, "strat": 32925, "elix": 32926, "dispro": 32927, "raffic": 32928, "thierry": 32929, "nasty": 32930, "cfb": 32931, "staffing": 32932, "alma": 32933, "backers": 32934, "henson": 32935, "skywalker": 32936, "realestate": 32937, "roos": 32938, "nessy": 32939, "chance": 32940, "cairns": 32941, "cci": 32942, "pedal": 32943, "lyft": 32944, "crossword": 32945, "waiter": 32946, "onlyin": 32947, "kruger": 32948, "kir": 32949, "alejandro": 32950, "cartier": 32951, "carrera": 32952, "repaired": 32953, "ouat": 32954, "unclear": 32955, "unbreakable": 32956, "todayin": 32957, "queries": 32958, "jody": 32959, "genital": 32960, "winner": 32961, "tol": 32962, "kelowna": 32963, "fascinated": 32964, "ãĥ¬": 32965, "srisri": 32966, "squared": 32967, "sprung": 32968, "negotiate": 32969, "privately": 32970, "aven": 32971, ">>>>>": 32972, "gical": 32973, "gavin": 32974, "chesterfield": 32975, "zumba": 32976, "orr": 32977, "natalia": 32978, "impeachment": 32979, "mnl": 32980, "carat": 32981, "critique": 32982, "credible": 32983, "tracy": 32984, "tani": 32985, "musik": 32986, "jigsaw": 32987, "gambia": 32988, "tolkien": 32989, "feu": 32990, "asper": 32991, "savory": 32992, "foxx": 32993, "fitt": 32994, "marlon": 32995, "lrt": 32996, "vell": 32997, "pbr": 32998, "imprisoned": 32999, "iom": 33000, "chul": 33001, "windshield": 33002, "kaye": 33003, "baa": 33004, "chord": 33005, "sart": 33006, "algon": 33007, "ministerial": 33008, "natgeo": 33009, "lazio": 33010, "norms": 33011, "ðŁijįðŁijį": 33012, "licking": 33013, "futbol": 33014, "unsung": 33015, "dallascowboys": 33016, "shred": 33017, "disturb": 33018, "devine": 33019, "beards": 33020, "chf": 33021, "bday": 33022, "rosso": 33023, "igor": 33024, "ayi": 33025, "siren": 33026, "kair": 33027, "stiles": 33028, "rof": 33029, "magnets": 33030, "uncover": 33031, "mouse": 33032, "banging": 33033, "sighted": 33034, "speople": 33035, "impact": 33036, "rowland": 33037, "kira": 33038, "environment": 33039, "lovethe": 33040, "psis": 33041, "mishra": 33042, "glendale": 33043, "cajun": 33044, "oche": 33045, "deception": 33046, "sexist": 33047, "straws": 33048, "sga": 33049, "buffer": 33050, "apostle": 33051, "spl": 33052, "popup": 33053, "ðŁļĹ": 33054, "rg": 33055, "uper": 33056, "ballin": 33057, "idy": 33058, "occasional": 33059, "nationalpark": 33060, "ðŁıĬ": 33061, "uan": 33062, "innovation": 33063, "ห": 33064, "teaparty": 33065, "rette": 33066, "counterfe": 33067, "bha": 33068, "recs": 33069, "igen": 33070, "ðŁĮIJ": 33071, "hummingbird": 33072, "cur": 33073, "haven": 33074, "lazar": 33075, "pueblo": 33076, "::": 33077, "zionist": 33078, "opath": 33079, "inverness": 33080, "promoter": 33081, "cartoon": 33082, "cabinets": 33083, "mahogany": 33084, "surveying": 33085, "rational": 33086, "feeling": 33087, "testify": 33088, "sow": 33089, "ocon": 33090, "ย": 33091, "neel": 33092, "maris": 33093, "solitary": 33094, "chemo": 33095, "radcliffe": 33096, "simons": 33097, "rosary": 33098, "newer": 33099, "jodie": 33100, "retali": 33101, "prawn": 33102, "paddy": 33103, "henge": 33104, "kala": 33105, "implant": 33106, "aty": 33107, "brentwood": 33108, "paradox": 33109, "enez": 33110, "redesigned": 33111, "pour": 33112, "wyd": 33113, "alde": 33114, "à¯ģ": 33115, "sold": 33116, "biomedical": 33117, "à¹Ĥ": 33118, "tttt": 33119, "matteo": 33120, "yser": 33121, "newton": 33122, "debun": 33123, "nerdy": 33124, "lool": 33125, "woon": 33126, "elisabeth": 33127, "ecc": 33128, "whi": 33129, "acho": 33130, "salvage": 33131, "salaries": 33132, "quity": 33133, "navigating": 33134, "ophthal": 33135, "consoles": 33136, "rebuilt": 33137, "opec": 33138, "asters": 33139, "shored": 33140, "setlist": 33141, "kathryn": 33142, "rhymes": 33143, "revisiting": 33144, "ashish": 33145, "lift": 33146, "repost": 33147, "soleil": 33148, "âı±": 33149, "wealth": 33150, "saat": 33151, "wec": 33152, "kingjames": 33153, "flipkart": 33154, "fieldwork": 33155, "segu": 33156, "modal": 33157, "bub": 33158, "arers": 33159, "ðŁįĴ": 33160, "clooney": 33161, "paddington": 33162, "necessity": 33163, "guthrie": 33164, "pente": 33165, "limo": 33166, "josie": 33167, "artin": 33168, "enc": 33169, "lhs": 33170, "betrayal": 33171, "infographics": 33172, "ier": 33173, "moa": 33174, "hearings": 33175, "bonjour": 33176, "symbolic": 33177, "agro": 33178, "wedges": 33179, "kristina": 33180, "wildflower": 33181, "athletic": 33182, "photography": 33183, "pesh": 33184, "cahill": 33185, "chilean": 33186, "goul": 33187, "fioren": 33188, "ðŁij¶": 33189, "zil": 33190, "skim": 33191, "badoo": 33192, "delia": 33193, "treble": 33194, "ncc": 33195, "ðŁĩ¦ðŁĩ": 33196, "ahouse": 33197, "bullock": 33198, "solitude": 33199, "اÙĨ": 33200, "cancers": 33201, "futureofwork": 33202, "hutch": 33203, "watershed": 33204, "warmongers": 33205, "spilled": 33206, "colombo": 33207, "moth": 33208, "associations": 33209, "weighed": 33210, "globalgoals": 33211, "notjust": 33212, "christi": 33213, "torg": 33214, "sweating": 33215, "maneu": 33216, "clusters": 33217, "â̼ï¸ıâ̼ï¸ı": 33218, "taped": 33219, "uly": 33220, "trusting": 33221, "yusuf": 33222, "tein": 33223, "rab": 33224, ",,,,": 33225, "sinai": 33226, "audible": 33227, "explicit": 33228, "crowns": 33229, "schiz": 33230, "atleast": 33231, "ðŁĹ£": 33232, "debra": 33233, "jesuit": 33234, "enegger": 33235, "zhen": 33236, "onesie": 33237, "iit": 33238, "ssf": 33239, "gurgaon": 33240, "chakra": 33241, "bearcats": 33242, "kran": 33243, "kawa": 33244, "requesting": 33245, "hanover": 33246, "gend": 33247, "soros": 33248, "mercy": 33249, "lovely": 33250, "doomed": 33251, "timmy": 33252, "kuz": 33253, "ull": 33254, "abram": 33255, "saison": 33256, "ãĥ«": 33257, "cleaners": 33258, "remo": 33259, "circuits": 33260, "barred": 33261, "oth": 33262, "moist": 33263, "madeleine": 33264, "gallo": 33265, "uj": 33266, "permits": 33267, "heaviest": 33268, "carols": 33269, "azte": 33270, "giorgio": 33271, "floats": 33272, "declaring": 33273, "usrc": 33274, "minat": 33275, "crafts": 33276, "prima": 33277, "conveni": 33278, "nickelodeon": 33279, "dancing": 33280, "ceremonial": 33281, "blogg": 33282, "twp": 33283, "anglican": 33284, "shek": 33285, "knick": 33286, "(((": 33287, "hubbard": 33288, "harvey": 33289, "hitman": 33290, "feng": 33291, "wesome": 33292, "forza": 33293, "sword": 33294, "opus": 33295, "brom": 33296, "gibility": 33297, "zal": 33298, "munch": 33299, "dancehall": 33300, "greedy": 33301, "hdmi": 33302, "rebirth": 33303, "ðŁĺĭðŁĺĭ": 33304, "sworld": 33305, "figurine": 33306, "compost": 33307, "kf": 33308, "engraving": 33309, "giorno": 33310, "stana": 33311, "kman": 33312, "hamster": 33313, "composers": 33314, "aje": 33315, "functionality": 33316, "polk": 33317, "isons": 33318, "airplanes": 33319, "tese": 33320, "horrors": 33321, "muscat": 33322, "given": 33323, "spence": 33324, "ðŁĩ¸ðŁĩ": 33325, "eliot": 33326, "achilles": 33327, "freck": 33328, "cryptocurrencies": 33329, "souther": 33330, "halo": 33331, "borneo": 33332, "politic": 33333, "hahahahah": 33334, "upstate": 33335, "siena": 33336, "obscure": 33337, "hausen": 33338, "lloyd": 33339, "happyfriday": 33340, "motorbike": 33341, "bona": 33342, "americas": 33343, "hols": 33344, "-(": 33345, "sporty": 33346, "unaware": 33347, "revenues": 33348, "christopher": 33349, "banksy": 33350, "avan": 33351, "evapor": 33352, "compress": 33353, "eyeliner": 33354, "todos": 33355, "buffy": 33356, "renewableenergy": 33357, "lyrical": 33358, "archan": 33359, "rapist": 33360, "fairtrade": 33361, "lmaooo": 33362, "beatz": 33363, "proactive": 33364, "lapse": 33365, "irical": 33366, "reversal": 33367, "pode": 33368, "mcintyre": 33369, "macau": 33370, "ãĥķãĤ": 33371, "nashgrier": 33372, "fsa": 33373, "gall": 33374, "çĶŁ": 33375, "perpetr": 33376, "ilya": 33377, "configuration": 33378, "%;": 33379, "strange": 33380, "raci": 33381, "à¸ĩ": 33382, "pickups": 33383, "kovsky": 33384, "mammal": 33385, "wps": 33386, "gable": 33387, "comparative": 33388, "zh": 33389, "saveour": 33390, "davey": 33391, "onetsy": 33392, "mussels": 33393, "miser": 33394, "cristina": 33395, "electron": 33396, "crave": 33397, "loren": 33398, "precipitation": 33399, "mz": 33400, "ðŁį«": 33401, "vincen": 33402, "snowboard": 33403, "noida": 33404, "ahn": 33405, "marinated": 33406, "gtr": 33407, "townhall": 33408, "minis": 33409, "bethel": 33410, "advan": 33411, "sura": 33412, "shiel": 33413, "furry": 33414, "ðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤ": 33415, "lynd": 33416, "soil": 33417, "scence": 33418, "seneca": 33419, "sharjah": 33420, "dickens": 33421, "credentials": 33422, "avar": 33423, "perk": 33424, "requiring": 33425, "prefer": 33426, "jian": 33427, "deca": 33428, "rach": 33429, "ingfor": 33430, "dele": 33431, "beep": 33432, "ðŁĴ»": 33433, "cisely": 33434, "huddle": 33435, "greensboro": 33436, "hawking": 33437, "hoax": 33438, "hangar": 33439, "çľ": 33440, "miso": 33441, "lovin": 33442, "greta": 33443, "abad": 33444, "logie": 33445, "atan": 33446, "snowflake": 33447, "mahesh": 33448, "fearthe": 33449, "alkal": 33450, "bobblehead": 33451, "bahn": 33452, "judged": 33453, "futu": 33454, "felix": 33455, "ðŁįĵ": 33456, "pike": 33457, "deriv": 33458, "notices": 33459, "auer": 33460, "dissuper": 33461, "orda": 33462, "wipes": 33463, "amino": 33464, "strikers": 33465, "footb": 33466, "dramas": 33467, "punching": 33468, "scoreless": 33469, "hemingway": 33470, "bih": 33471, "ballad": 33472, "chatter": 33473, "ammo": 33474, "klein": 33475, "fabrication": 33476, "karim": 33477, "zend": 33478, "histo": 33479, "volta": 33480, "rocky": 33481, "marketer": 33482, "xtreme": 33483, "sequencing": 33484, "paradigm": 33485, "cleats": 33486, "booming": 33487, "âģłâģł": 33488, "blockade": 33489, "prompts": 33490, "yoghurt": 33491, "purpose": 33492, "nur": 33493, "regulate": 33494, "noisy": 33495, "ingrid": 33496, "birdwatching": 33497, "bartender": 33498, "Ùĥ": 33499, "wordof": 33500, "chaotic": 33501, "shorty": 33502, "eldest": 33503, "zapp": 33504, "onceuponatime": 33505, "flyo": 33506, "ritos": 33507, "mikequind": 33508, "ðŁIJ´": 33509, "registering": 33510, ".]": 33511, "adol": 33512, "gggg": 33513, "purge": 33514, "kidlit": 33515, "arbor": 33516, "valves": 33517, "synagogue": 33518, "oth": 33519, "unanimous": 33520, "verification": 33521, "darrell": 33522, "ãģĦ": 33523, "vanderbilt": 33524, "tapestry": 33525, "prosper": 33526, "diddy": 33527, "drafting": 33528, "decep": 33529, "marquis": 33530, "stint": 33531, "michaeljackson": 33532, "peeled": 33533, "menus": 33534, "bbb": 33535, "scare": 33536, "email": 33537, "wrigley": 33538, "itis": 33539, "fell": 33540, "somethin": 33541, "barra": 33542, "edgar": 33543, "dipping": 33544, "puddle": 33545, "slade": 33546, "learner": 33547, "jalen": 33548, "ð٧IJ": 33549, "thedaily": 33550, "mikequindazzi": 33551, "jux": 33552, "iqbal": 33553, "mckinney": 33554, "raiser": 33555, "efan": 33556, "drone": 33557, "cato": 33558, "picket": 33559, "crowe": 33560, "latt": 33561, "uko": 33562, "giuseppe": 33563, "hini": 33564, "synthesi": 33565, "pontifex": 33566, "songwriting": 33567, "tod": 33568, "switches": 33569, "dinners": 33570, "hq": 33571, "gabrielle": 33572, "pensacola": 33573, "circle": 33574, "exposes": 33575, "evs": 33576, "riyadh": 33577, "promen": 33578, "ock": 33579, "saj": 33580, "citation": 33581, "brewco": 33582, "josi": 33583, "epaper": 33584, "drif": 33585, "pointless": 33586, "tangled": 33587, "cripp": 33588, "lineups": 33589, "fairies": 33590, "daze": 33591, "mourn": 33592, "bladder": 33593, "salz": 33594, "burundi": 33595, "bookmark": 33596, "thepeople": 33597, "subsequ": 33598, "principal": 33599, "sker": 33600, "courtney": 33601, "aoki": 33602, "racers": 33603, "adm": 33604, "moma": 33605, "criticalrole": 33606, "houn": 33607, "shedding": 33608, "saka": 33609, "aceous": 33610, "mckay": 33611, "husbands": 33612, "½": 33613, "meda": 33614, "accusations": 33615, "rosel": 33616, "ncis": 33617, "witnessing": 33618, "orama": 33619, "gods": 33620, "hilton": 33621, "elman": 33622, "ÃŃn": 33623, "megap": 33624, "craven": 33625, "announcer": 33626, "criteri": 33627, "sheffieldissuper": 33628, "militant": 33629, "consul": 33630, "hooded": 33631, "abyss": 33632, "bx": 33633, "madam": 33634, "locu": 33635, "maryam": 33636, "manicure": 33637, "gratis": 33638, "actresses": 33639, "rosario": 33640, "thisdayin": 33641, "kingly": 33642, "gnome": 33643, "celine": 33644, "rous": 33645, "heel": 33646, "lilac": 33647, "vishal": 33648, "abh": 33649, "thorns": 33650, "sls": 33651, "neal": 33652, "constructing": 33653, "beren": 33654, "slang": 33655, "mains": 33656, "farra": 33657, "sarko": 33658, "paige": 33659, "guiller": 33660, "lala": 33661, "iceberg": 33662, "noun": 33663, "planners": 33664, "ummm": 33665, "ouses": 33666, "illary": 33667, "maan": 33668, "boxing": 33669, "zipper": 33670, "srinagar": 33671, "miguel": 33672, "ostr": 33673, "mpo": 33674, "responsibly": 33675, "lanterns": 33676, "appliance": 33677, "xb": 33678, "grenade": 33679, "neglect": 33680, "dysle": 33681, "hammock": 33682, "nectar": 33683, "witcher": 33684, "rgv": 33685, "dience": 33686, "serbian": 33687, "seeded": 33688, "cruz": 33689, "bish": 33690, "sphe": 33691, "eq": 33692, "skyrim": 33693, "algebra": 33694, "philately": 33695, "bungalow": 33696, "geoff": 33697, "yves": 33698, "demanded": 33699, "considerations": 33700, "thevamp": 33701, "pawankalyan": 33702, "coded": 33703, "gritty": 33704, "eruption": 33705, "seinfeld": 33706, "unidenti": 33707, "ëĭĪ": 33708, "worm": 33709, "acus": 33710, "seung": 33711, "dung": 33712, "roland": 33713, "sud": 33714, "divisions": 33715, "ablanc": 33716, "shortest": 33717, "jf": 33718, "poun": 33719, "plantbased": 33720, "beto": 33721, "tougher": 33722, "mco": 33723, "donet": 33724, "markus": 33725, "vfl": 33726, "ðŁıł": 33727, "opening": 33728, "coward": 33729, "cabernet": 33730, "oxi": 33731, "burlesque": 33732, "sandra": 33733, "sumo": 33734, "consist": 33735, "thot": 33736, "cayman": 33737, "motorola": 33738, "gutierrez": 33739, "dslr": 33740, "yw": 33741, "nobel": 33742, "novice": 33743, "momsdemand": 33744, "grunge": 33745, "spor": 33746, "dcc": 33747, "presses": 33748, "slist": 33749, "allotment": 33750, "vocational": 33751, "ftc": 33752, "puja": 33753, "loven": 33754, "uttarak": 33755, "tandem": 33756, "shep": 33757, "comedians": 33758, "anatom": 33759, "cantwait": 33760, "healthyeating": 33761, "westside": 33762, "margins": 33763, "chiang": 33764, "asbestos": 33765, "stupidity": 33766, "problematic": 33767, "fitbit": 33768, ":$": 33769, "ceilings": 33770, "shua": 33771, "protections": 33772, "biotic": 33773, "bengali": 33774, "rests": 33775, "biennale": 33776, "timo": 33777, "culmin": 33778, "eminent": 33779, "affection": 33780, "unbelievably": 33781, "individually": 33782, "canvassing": 33783, "whitt": 33784, "novasco": 33785, "chinson": 33786, "hpe": 33787, "gow": 33788, "gloucestershire": 33789, "pao": 33790, "threshold": 33791, "chevron": 33792, "sine": 33793, "wether": 33794, "ppie": 33795, "aquino": 33796, "antwerp": 33797, "âĸ¬": 33798, "poon": 33799, "instaf": 33800, "equine": 33801, "cinematography": 33802, "nbafinals": 33803, "valiant": 33804, "kilkenny": 33805, "terence": 33806, "systemic": 33807, "srl": 33808, "pound": 33809, "madeira": 33810, "plough": 33811, "trecht": 33812, "mated": 33813, "mpd": 33814, "ransomware": 33815, "phin": 33816, "liqui": 33817, "bbce": 33818, "boomer": 33819, "istandwith": 33820, "conju": 33821, "rte": 33822, "nara": 33823, "foolish": 33824, "dashing": 33825, "viernes": 33826, "brite": 33827, "dau": 33828, "juniper": 33829, "aida": 33830, "younow": 33831, "razer": 33832, "dei": 33833, "repeating": 33834, "comforting": 33835, "adjacent": 33836, "eto": 33837, "casted": 33838, "chatur": 33839, "muer": 33840, "synth": 33841, "sanitary": 33842, "macle": 33843, "independent": 33844, "lawful": 33845, "eerie": 33846, "hor": 33847, "ðŁĴŃ": 33848, "amrit": 33849, "velo": 33850, "stationery": 33851, "muf": 33852, "maymay": 33853, "contemplating": 33854, "elaborate": 33855, "gregor": 33856, "dries": 33857, "accol": 33858, "à¸ļ": 33859, "schwarzenegger": 33860, "illnesses": 33861, "daybreak": 33862, "followback": 33863, "collusion": 33864, "electronic": 33865, "jovi": 33866, "hiroshima": 33867, "taw": 33868, "homec": 33869, "micah": 33870, "quitting": 33871, "frosting": 33872, "benfica": 33873, "heli": 33874, "sical": 33875, "piccad": 33876, "corporate": 33877, "mentorship": 33878, "youare": 33879, "singer": 33880, "shiva": 33881, "rune": 33882, "inger": 33883, "rium": 33884, "playable": 33885, "doop": 33886, "willow": 33887, "terre": 33888, "nip": 33889, "atd": 33890, "warbler": 33891, "professionally": 33892, "erase": 33893, "proceed": 33894, "pedestrians": 33895, "mischief": 33896, "bending": 33897, "alaskan": 33898, "ckett": 33899, "mop": 33900, "ddles": 33901, "shutter": 33902, "geared": 33903, "ateneo": 33904, "madeline": 33905, "gations": 33906, "osha": 33907, "derick": 33908, "swild": 33909, "angry": 33910, "patents": 33911, "hunk": 33912, "decreased": 33913, "fry": 33914, "ðŁĴĸðŁĴĸðŁĴĸ": 33915, "salon": 33916, "quantities": 33917, "dario": 33918, "nigel": 33919, "kuma": 33920, "jenn": 33921, "happye": 33922, "xxx": 33923, "rexperience": 33924, "pros": 33925, "ausch": 33926, "relessly": 33927, "hamburger": 33928, "fukushima": 33929, "erne": 33930, "statec": 33931, "rend": 33932, "mayfield": 33933, "jone": 33934, "lefty": 33935, "bernstein": 33936, "smil": 33937, "generates": 33938, "forestation": 33939, "bandits": 33940, "tayo": 33941, "rca": 33942, "acci": 33943, "rodrigo": 33944, "knapp": 33945, "elovers": 33946, "vegetation": 33947, "ural": 33948, "left": 33949, "ħï¸ı": 33950, "worldre": 33951, "suri": 33952, "embark": 33953, "wson": 33954, "bayou": 33955, "muller": 33956, "movers": 33957, "ðŁķº": 33958, "presbyter": 33959, "lf": 33960, "cree": 33961, "batb": 33962, "salam": 33963, "demonstrations": 33964, "anec": 33965, "npc": 33966, "itics": 33967, "tography": 33968, "reinst": 33969, "thurst": 33970, "tale": 33971, "offences": 33972, "smartcity": 33973, "brotha": 33974, "oftheyear": 33975, "invaluable": 33976, "earn": 33977, "ðŁijıðŁı½": 33978, "kremlin": 33979, "grady": 33980, "townfc": 33981, "guernsey": 33982, "maha": 33983, "contagious": 33984, "drex": 33985, "been": 33986, "(£": 33987, "nativity": 33988, "ktm": 33989, "somerhalder": 33990, "compounds": 33991, "íķĺ": 33992, "\"â̦": 33993, "afg": 33994, "ottnews": 33995, "hound": 33996, "firefly": 33997, "cilan": 33998, "donetsk": 33999, "volunteered": 34000, "akira": 34001, "èª": 34002, "singul": 34003, "sth": 34004, "drowned": 34005, "mando": 34006, "heir": 34007, "ðŁİīðŁİĪ": 34008, "taxis": 34009, "yuki": 34010, "veld": 34011, "kans": 34012, "elk": 34013, "rants": 34014, "hashtag": 34015, "teng": 34016, "rog": 34017, "aat": 34018, "grub": 34019, "eber": 34020, "inindia": 34021, "colossus": 34022, "signi": 34023, "soever": 34024, "milestones": 34025, "dero": 34026, "differential": 34027, "phuket": 34028, "mastermind": 34029, "angh": 34030, "melani": 34031, "broker": 34032, "actorvijay": 34033, "stunned": 34034, "continuity": 34035, "affl": 34036, "vocal": 34037, "perennial": 34038, "fiancé": 34039, "incomplete": 34040, "hunts": 34041, "reissue": 34042, "dominates": 34043, "turmeric": 34044, "roam": 34045, "rion": 34046, "bagged": 34047, "nassau": 34048, "fut": 34049, "xox": 34050, "nationaltrust": 34051, "joye": 34052, "sano": 34053, "hearthstone": 34054, "disrespect": 34055, "lees": 34056, "hse": 34057, "siberian": 34058, "offee": 34059, "restock": 34060, "wolfgang": 34061, "regan": 34062, "plano": 34063, "unwind": 34064, "repar": 34065, "mille": 34066, "],": 34067, "skull": 34068, "fatally": 34069, "conceptual": 34070, "ðŁĮ²": 34071, "fé": 34072, "berto": 34073, "bms": 34074, "ua": 34075, "magna": 34076, "notredame": 34077, "lete": 34078, "laundering": 34079, "heartwarming": 34080, "buffett": 34081, "goat": 34082, "peabo": 34083, "windmill": 34084, "vac": 34085, "continually": 34086, "azalea": 34087, "membrane": 34088, "cancels": 34089, "makeyourown": 34090, "athered": 34091, "pto": 34092, "torpe": 34093, "ðŁĺł": 34094, "ðŁĴ§": 34095, "scares": 34096, "leaking": 34097, "zet": 34098, "pixels": 34099, "aci": 34100, "khil": 34101, "marathi": 34102, "ðŁĻıðŁı½": 34103, "ula": 34104, "tamu": 34105, "chandigarh": 34106, "zagre": 34107, "aab": 34108, "pronounced": 34109, "aubrey": 34110, "sander": 34111, "punta": 34112, "harlow": 34113, "icelan": 34114, "celebratory": 34115, "sot": 34116, "unciation": 34117, "struly": 34118, "mcdowell": 34119, "deepika": 34120, "reminders": 34121, "mystical": 34122, "ctc": 34123, "chatted": 34124, "sica": 34125, "bargains": 34126, "chhat": 34127, "rubin": 34128, "mnet": 34129, "oilandgas": 34130, "pelican": 34131, "oat": 34132, "morality": 34133, "kour": 34134, "ih": 34135, "nuclear": 34136, "gcu": 34137, "richer": 34138, "venezia": 34139, "mma": 34140, "leith": 34141, "accompany": 34142, "richmond": 34143, "sportsnet": 34144, "baahu": 34145, "smuggling": 34146, "mmi": 34147, "ðŁĩ®ðŁĩª": 34148, "twists": 34149, "sahib": 34150, ".....": 34151, "ambitions": 34152, "illo": 34153, "historical": 34154, "forec": 34155, "showbiz": 34156, "ponies": 34157, "chasers": 34158, "remodel": 34159, "willing": 34160, "princesses": 34161, "ample": 34162, "cushions": 34163, "acles": 34164, "lotr": 34165, "dach": 34166, "anthe": 34167, "incorporate": 34168, "newbury": 34169, "kiri": 34170, "friedrich": 34171, "abv": 34172, "ballers": 34173, "albert": 34174, "ðŁijŃ": 34175, "leti": 34176, "nanop": 34177, "cide": 34178, "analo": 34179, "nsf": 34180, "))))": 34181, "griffiths": 34182, "valenci": 34183, "roano": 34184, "funrun": 34185, "babysitting": 34186, "caday": 34187, "entre": 34188, "uck": 34189, "slug": 34190, "tical": 34191, "thesims": 34192, "roar": 34193, "carney": 34194, "gam": 34195, "stowe": 34196, "fid": 34197, "bunny": 34198, "shamrock": 34199, "pecu": 34200, "molina": 34201, "gocougs": 34202, "contributes": 34203, "transformation": 34204, "moy": 34205, "vaj": 34206, "severy": 34207, "antioxidants": 34208, "thirteen": 34209, "sightseeing": 34210, "lj": 34211, "reversible": 34212, "oddly": 34213, "hookah": 34214, "nouvel": 34215, "halal": 34216, "fei": 34217, "stables": 34218, "mult": 34219, "hopped": 34220, "braids": 34221, "interchange": 34222, "ghanaian": 34223, "wwww": 34224, "ethno": 34225, "conjunction": 34226, "agov": 34227, "yeti": 34228, "earthand": 34229, "tsp": 34230, "conserve": 34231, "heirloom": 34232, "metaphor": 34233, "woof": 34234, "torio": 34235, "selfless": 34236, "nwa": 34237, "emilia": 34238, "ylene": 34239, "yxe": 34240, "giar": 34241, "moderating": 34242, "probz": 34243, "bfi": 34244, "neer": 34245, "dummy": 34246, "hanukkah": 34247, "webber": 34248, "kv": 34249, "eyebrow": 34250, "dagger": 34251, "sump": 34252, "rages": 34253, "orkney": 34254, "tbo": 34255, "halsey": 34256, "assignments": 34257, "tronic": 34258, "scrib": 34259, "coon": 34260, "anwar": 34261, "#âĢİ": 34262, "jalape": 34263, "florida": 34264, "quaid": 34265, "hawkeyes": 34266, "âĻ¡âĻ¡": 34267, "streetcar": 34268, "rog": 34269, "datlantic": 34270, "granola": 34271, "unchanged": 34272, "expectation": 34273, "Ùĩ": 34274, "marlin": 34275, "gummy": 34276, "ðŁĻıðŁı¾": 34277, "awarenessmonth": 34278, "oilpainting": 34279, "muth": 34280, "perch": 34281, "junto": 34282, "villagers": 34283, "morg": 34284, "cheated": 34285, "webcomic": 34286, "thefuture": 34287, "dps": 34288, "lakings": 34289, "mentioning": 34290, "voor": 34291, "identities": 34292, "accord": 34293, "mcgu": 34294, "lpga": 34295, "rumour": 34296, "massively": 34297, "mpls": 34298, "healy": 34299, "date": 34300, "spoli": 34301, "revisited": 34302, "ont": 34303, "aland": 34304, "scrutiny": 34305, "lakeland": 34306, "blending": 34307, "": 34308, "ankara": 34309, "jamiedor": 34310, "metabolic": 34311, "fences": 34312, "anny": 34313, "åħ": 34314, "semicon": 34315, "oott": 34316, "spaceship": 34317, "wacky": 34318, "leta": 34319, "apac": 34320, "shee": 34321, "inherit": 34322, "dores": 34323, "ðŁĩ¨ðŁĩ¦": 34324, "gente": 34325, "twick": 34326, "rims": 34327, "galve": 34328, "deville": 34329, "kingfisher": 34330, "scorpio": 34331, "owl": 34332, "alar": 34333, "varian": 34334, "ðŁĹĵ": 34335, "venetian": 34336, "stardust": 34337, "thenorth": 34338, "qing": 34339, "harrington": 34340, "consulate": 34341, "spectacle": 34342, "hobbs": 34343, "turks": 34344, "greer": 34345, "mating": 34346, "ðŁİĢ": 34347, "ðŁĮĢ": 34348, "directs": 34349, "íĭ": 34350, "pompeo": 34351, "voiced": 34352, "laos": 34353, "tzu": 34354, "prome": 34355, "prism": 34356, "merc": 34357, "fortunately": 34358, "bcfc": 34359, "mcdonnell": 34360, "notsorry": 34361, "smiled": 34362, "tba": 34363, "forwar": 34364, "midterm": 34365, "darby": 34366, "weinstein": 34367, "upgrading": 34368, "wolff": 34369, "bronco": 34370, "cabello": 34371, "ðŁ¥ĩ": 34372, "fiable": 34373, "sharpe": 34374, "battered": 34375, "sato": 34376, "mythical": 34377, "instapic": 34378, "prepped": 34379, "enium": 34380, "espo": 34381, "diaper": 34382, "explanations": 34383, "whopping": 34384, "ragnar": 34385, "peel": 34386, "antibiotic": 34387, "lacks": 34388, "harrison": 34389, "lism": 34390, "aul": 34391, "quail": 34392, "martina": 34393, "sentencing": 34394, "scams": 34395, "didi": 34396, "tronics": 34397, "ãħłãħł": 34398, "goff": 34399, "zain": 34400, "paramore": 34401, "chained": 34402, "clinton": 34403, "liff": 34404, "cottages": 34405, "emon": 34406, "reverend": 34407, "consumer": 34408, "cean": 34409, "tany": 34410, "lumpur": 34411, "ebay": 34412, "stool": 34413, "ðŁĺ»ðŁĺ»": 34414, "tapro": 34415, "hath": 34416, "modernart": 34417, "justine": 34418, "proverb": 34419, "appy": 34420, "trax": 34421, "manifest": 34422, "ambu": 34423, "naik": 34424, "pepp": 34425, "rsd": 34426, "merchants": 34427, "kitchener": 34428, "shifted": 34429, "lizz": 34430, "âĺħâĺħâĺħâĺħ": 34431, "âĢĶâĢĶâĢĶâĢĶâĢĶâĢĶâĢĶâĢĶ": 34432, "utopia": 34433, "tomo": 34434, "outed": 34435, "comers": 34436, "chiropractic": 34437, "bookclub": 34438, "cindy": 34439, "prohibition": 34440, "seuss": 34441, "민": 34442, "thinkin": 34443, "rrrr": 34444, "gofund": 34445, "tack": 34446, "omb": 34447, "catastrophic": 34448, "lingu": 34449, "guildford": 34450, "botd": 34451, "à¥ĭ": 34452, "planter": 34453, "^^": 34454, "wink": 34455, "kathmandu": 34456, "stoppers": 34457, "smoothies": 34458, "reefs": 34459, "hind": 34460, "bellamy": 34461, "Ħë": 34462, "wastewater": 34463, "voor": 34464, "natl": 34465, "!]": 34466, "reel": 34467, "yap": 34468, "scooby": 34469, "workspace": 34470, "corinthians": 34471, "blun": 34472, "obligation": 34473, "gbbo": 34474, "dyson": 34475, "cravings": 34476, "ellington": 34477, "dapl": 34478, "wrexham": 34479, "earthandclouds": 34480, "ukrunchat": 34481, "positioned": 34482, "kalb": 34483, "foursquare": 34484, "jock": 34485, "impending": 34486, "evening": 34487, "athy": 34488, "proclaimed": 34489, "cites": 34490, "annapolis": 34491, "sani": 34492, "marth": 34493, "irl": 34494, "accommo": 34495, "kaa": 34496, "fina": 34497, "yaa": 34498, "disper": 34499, "ecar": 34500, "bhak": 34501, "willy": 34502, "ðŁĺĢðŁĺĢ": 34503, "mcdermott": 34504, "moj": 34505, "generational": 34506, "usaid": 34507, "training": 34508, "lonely": 34509, "lores": 34510, "impecc": 34511, "âĢIJ": 34512, "beavers": 34513, "maki": 34514, "heb": 34515, "aapl": 34516, "åı": 34517, "wolverhampton": 34518, "leaderboard": 34519, "meu": 34520, "cfa": 34521, "eastern": 34522, "hur": 34523, "civilwar": 34524, "ourage": 34525, "horned": 34526, "lehigh": 34527, "awards": 34528, "evident": 34529, "gigab": 34530, "rous": 34531, "madel": 34532, "robyn": 34533, "urgently": 34534, "kors": 34535, "enas": 34536, "heisman": 34537, "bambam": 34538, "fabian": 34539, "fom": 34540, "evaluating": 34541, "assembly": 34542, "outsourcing": 34543, "huntsville": 34544, "ðŁĶª": 34545, "justified": 34546, "cashier": 34547, "spaper": 34548, "buckeye": 34549, "analytical": 34550, "illuminati": 34551, "autho": 34552, "oj": 34553, "shade": 34554, "geelong": 34555, "whey": 34556, "heaton": 34557, "terribly": 34558, "elek": 34559, "uncharted": 34560, "sdlive": 34561, "motocross": 34562, "hermes": 34563, "darshan": 34564, "darlington": 34565, "cashmere": 34566, "gripping": 34567, "cilantro": 34568, "punish": 34569, "...:": 34570, "ðŁĴĦ": 34571, "instance": 34572, "deri": 34573, "lobal": 34574, "mukher": 34575, "spar": 34576, "thinker": 34577, "fremont": 34578, "compiled": 34579, "colorado": 34580, "vigne": 34581, "smd": 34582, "whead": 34583, "village": 34584, "leek": 34585, "formulae": 34586, "tares": 34587, "persistence": 34588, "??????": 34589, "pedago": 34590, "hez": 34591, "alzheimers": 34592, "vulture": 34593, "offence": 34594, "isgreat": 34595, "suffra": 34596, "kickin": 34597, "hmmmm": 34598, "broadway": 34599, "ï¸ı@": 34600, "arti": 34601, "allison": 34602, "endorses": 34603, "ryu": 34604, "lollipop": 34605, "soybean": 34606, "kendall": 34607, "cera": 34608, "invade": 34609, "(ðŁĵ·:": 34610, "converter": 34611, "carpets": 34612, "hobo": 34613, "frit": 34614, "peac": 34615, "esqu": 34616, "ernan": 34617, "ouf": 34618, "anil": 34619, "differ": 34620, "ching": 34621, "brecht": 34622, "spg": 34623, "davenport": 34624, "strava": 34625, "severn": 34626, "ngos": 34627, "storians": 34628, "fete": 34629, "paramedic": 34630, "jhb": 34631, "alamo": 34632, "sneaking": 34633, "goldcoast": 34634, "roofs": 34635, "isil": 34636, "depicted": 34637, "projections": 34638, "numb": 34639, "oss": 34640, "epi": 34641, "glucose": 34642, "zidane": 34643, "infiniti": 34644, "íĺĦ": 34645, "ransom": 34646, "tonics": 34647, "falk": 34648, "gler": 34649, "outw": 34650, "ress": 34651, "weekly": 34652, "theon": 34653, "nole": 34654, "ðŁĩªðŁĩº": 34655, "volley": 34656, "summar": 34657, "negativity": 34658, "samson": 34659, "yew": 34660, "ausvotes": 34661, "jul": 34662, "judy": 34663, "fart": 34664, "prayed": 34665, "palate": 34666, "multicultural": 34667, "doubleheader": 34668, "cyclones": 34669, "pierre": 34670, "ãģ¨": 34671, "âĺłï¸ı": 34672, "rtw": 34673, "converting": 34674, "wirral": 34675, "lari": 34676, "irrelevant": 34677, "austinmahone": 34678, "anche": 34679, "yaan": 34680, "sdf": 34681, "$.": 34682, "exploding": 34683, "ultimate": 34684, "profici": 34685, "gofundme": 34686, "cellence": 34687, "epstein": 34688, "bullied": 34689, "septic": 34690, "த": 34691, "lumber": 34692, "cuff": 34693, "vscocam": 34694, "plor": 34695, "ล": 34696, "seok": 34697, "roto": 34698, "venezuelan": 34699, "sorta": 34700, "spirited": 34701, "danielpadilla": 34702, "teamsisd": 34703, "radioactive": 34704, "icelandic": 34705, "ðŁĴ¤": 34706, "vere": 34707, "accommodate": 34708, "shipp": 34709, "otter": 34710, "olina": 34711, "ego": 34712, "sula": 34713, "sanantonio": 34714, "deas": 34715, "similarities": 34716, "âļ¾": 34717, "yom": 34718, "broward": 34719, "å°": 34720, "cancun": 34721, "verify": 34722, "onte": 34723, "candlelight": 34724, "ìłķ": 34725, "infants": 34726, "azam": 34727, "ðŁĺ°": 34728, "leven": 34729, "unstable": 34730, "bloomington": 34731, "xford": 34732, "contour": 34733, "yp": 34734, "innovator": 34735, "histories": 34736, "poy": 34737, "lololol": 34738, "expires": 34739, "catalo": 34740, "billboards": 34741, "anab": 34742, "elic": 34743, "novascotia": 34744, "faire": 34745, "ìĿ´": 34746, "rockwell": 34747, "grille": 34748, "aztec": 34749, "johor": 34750, "urstruly": 34751, "firen": 34752, "dunlop": 34753, "idle": 34754, "portman": 34755, "joes": 34756, "txhsfb": 34757, "holm": 34758, "chamele": 34759, "underworld": 34760, "loss": 34761, "tiem": 34762, "therapists": 34763, "pasture": 34764, "paste": 34765, "ingnow": 34766, "vulcan": 34767, "ragon": 34768, "larkin": 34769, "oshi": 34770, "hoco": 34771, "childhood": 34772, "umbrel": 34773, "successor": 34774, "kathy": 34775, "izen": 34776, "°ï¸ı": 34777, "shareholders": 34778, "olga": 34779, "aib": 34780, "heap": 34781, "flaming": 34782, "rou": 34783, "airtel": 34784, "ratt": 34785, "zane": 34786, "vow": 34787, "thorough": 34788, "snag": 34789, "parth": 34790, "unconscious": 34791, "vey": 34792, "newrelease": 34793, "ghee": 34794, "croatian": 34795, "facilitating": 34796, "swanson": 34797, "astoria": 34798, "tology": 34799, "mastery": 34800, "ð٤ij": 34801, "bilbao": 34802, "troupe": 34803, "theori": 34804, "cheyenne": 34805, "rott": 34806, "shoreline": 34807, "grasso": 34808, "masterchef": 34809, "+)": 34810, "vix": 34811, "ellenshow": 34812, "asg": 34813, "anak": 34814, "kuya": 34815, "safarilive": 34816, "debuting": 34817, "blum": 34818, "listener": 34819, "vins": 34820, "bookshelf": 34821, "smartcities": 34822, "makeyourownlane": 34823, ";;": 34824, "ðŁIJ¯": 34825, "rizz": 34826, "onward": 34827, "bulldog": 34828, "bearish": 34829, "viruses": 34830, "frigh": 34831, "linden": 34832, "weiser": 34833, "snt": 34834, "gona": 34835, "dresden": 34836, "flanders": 34837, "cuk": 34838, "wheeling": 34839, "bau": 34840, "atuesday": 34841, "surfers": 34842, "swift": 34843, "mccall": 34844, "arbitration": 34845, "awd": 34846, "monc": 34847, "bine": 34848, "atx": 34849, "refr": 34850, "miro": 34851, "posey": 34852, "nare": 34853, "ritter": 34854, "âģ¦": 34855, "playbook": 34856, "blowout": 34857, "sportsmanship": 34858, "soooooo": 34859, "malayalam": 34860, "grims": 34861, "burbank": 34862, "infinity": 34863, "sargent": 34864, "oitnb": 34865, "josephine": 34866, "skipping": 34867, "parkin": 34868, "excursion": 34869, "seminars": 34870, "johar": 34871, "partridge": 34872, "postgame": 34873, "llll": 34874, "blanche": 34875, "tempting": 34876, "mna": 34877, "luka": 34878, "isers": 34879, "toffee": 34880, "barron": 34881, "hemmings": 34882, "sae": 34883, "gohawks": 34884, "cupid": 34885, "limbs": 34886, "conse": 34887, "uncommon": 34888, "zada": 34889, "headshot": 34890, "soils": 34891, "pioneer": 34892, "mamma": 34893, "semitic": 34894, "pandey": 34895, "jamiedornan": 34896, "splits": 34897, "vela": 34898, "soni": 34899, "raff": 34900, "tmobile": 34901, "âŀĸ": 34902, "prawns": 34903, "liter": 34904, "enjoyment": 34905, "eggplant": 34906, "tub": 34907, "cultural": 34908, "usic": 34909, "suspicion": 34910, "sycam": 34911, "summed": 34912, "madu": 34913, "hock": 34914, "upwards": 34915, "eyeing": 34916, "rive": 34917, "assassins": 34918, "âĤ¬": 34919, "outfy": 34920, "chives": 34921, "tner": 34922, "lais": 34923, "porridge": 34924, "saddest": 34925, "wcc": 34926, "vicki": 34927, "snails": 34928, "bizitalk": 34929, "millan": 34930, "ðŁĮį": 34931, "samoa": 34932, "jing": 34933, "mikey": 34934, "guj": 34935, "chelms": 34936, "eligibility": 34937, "armada": 34938, "throp": 34939, "surgeries": 34940, "ãĤ¿": 34941, "mohawk": 34942, "exits": 34943, "mem": 34944, "islington": 34945, "cme": 34946, "landfill": 34947, "kaitlyn": 34948, "ðŁİ¼": 34949, "combinations": 34950, "tomorrowland": 34951, "verb": 34952, "cora": 34953, "precisely": 34954, "naom": 34955, "ðŁĨķ": 34956, "shrink": 34957, "softly": 34958, "mercede": 34959, "mandel": 34960, "poodle": 34961, "ballerina": 34962, "soph": 34963, "juxta": 34964, "yat": 34965, "aryan": 34966, "hesitate": 34967, "lowered": 34968, "gular": 34969, "dungeonsand": 34970, "ronan": 34971, "myri": 34972, "spf": 34973, "menopau": 34974, "grasp": 34975, "pathi": 34976, "feasi": 34977, "flaw": 34978, "shistory": 34979, "steward": 34980, "ggle": 34981, "fayre": 34982, "clique": 34983, "credibility": 34984, "yog": 34985, "section": 34986, "musko": 34987, "seville": 34988, "nott": 34989, "calm": 34990, "mateo": 34991, "indicted": 34992, "fiba": 34993, "byl": 34994, "lino": 34995, "ukin": 34996, "!!#": 34997, "enigma": 34998, "sirius": 34999, "busc": 35000, "ðŁįĬ": 35001, "mackerel": 35002, "psalms": 35003, "aat": 35004, "tomorrowspaper": 35005, "ðŁĺĸ": 35006, "pfc": 35007, "...........": 35008, "shrek": 35009, "mullet": 35010, "osh": 35011, "dangerously": 35012, "immensely": 35013, "amur": 35014, "ðŁįĤ": 35015, "propor": 35016, "sya": 35017, "londonmarathon": 35018, "above": 35019, "obligatory": 35020, "prov": 35021, "racha": 35022, "alexis": 35023, "primary": 35024, "shh": 35025, "ethernet": 35026, "dstv": 35027, "cougar": 35028, "unlucky": 35029, "nil": 35030, "steakhouse": 35031, "mela": 35032, "fcbayern": 35033, "causeway": 35034, "catherine": 35035, "fluorescent": 35036, "nxt": 35037, "tokyo": 35038, "ausp": 35039, "relegation": 35040, "quizz": 35041, "shoreditch": 35042, "proudtobe": 35043, "promos": 35044, "interacting": 35045, "homebrew": 35046, "daesh": 35047, "wpg": 35048, "steadily": 35049, "provinces": 35050, "ballots": 35051, "iah": 35052, "alto": 35053, "<<<": 35054, "youu": 35055, "riley": 35056, "preference": 35057, "traverse": 35058, "incense": 35059, "ammunition": 35060, "hodges": 35061, "#@": 35062, "hailstate": 35063, "tartan": 35064, "witchcraft": 35065, "ventilation": 35066, "libertarian": 35067, "!â̦": 35068, "owes": 35069, "%!": 35070, "ongchang": 35071, "brushing": 35072, "leic": 35073, "fiber": 35074, "underattack": 35075, "download": 35076, "expir": 35077, "hyo": 35078, "pompey": 35079, "mcbride": 35080, "yag": 35081, "stree": 35082, "combat": 35083, "tending": 35084, "aira": 35085, "guggen": 35086, "abra": 35087, "inna": 35088, "flips": 35089, "awal": 35090, "mach": 35091, "dollar": 35092, "inspirations": 35093, "zum": 35094, "odu": 35095, "itty": 35096, "videogame": 35097, "aquaman": 35098, "haru": 35099, "belfast": 35100, "jeb": 35101, "butch": 35102, "usgs": 35103, "calculus": 35104, "goyal": 35105, "morgen": 35106, "xfinity": 35107, "standup": 35108, "contracep": 35109, "sabre": 35110, "nabe": 35111, "insecure": 35112, "generously": 35113, "epitome": 35114, "lw": 35115, "tca": 35116, "narratives": 35117, "donnell": 35118, "pandas": 35119, "bergh": 35120, "tut": 35121, "keral": 35122, "felicity": 35123, "brampton": 35124, "quintet": 35125, "nomore": 35126, "ðŁĶij": 35127, "loi": 35128, "alhamdulil": 35129, "ðŁĶ¥ðŁĶĹ": 35130, "stoner": 35131, "shawl": 35132, "clinical": 35133, "brendan": 35134, "gone": 35135, "flawed": 35136, "trippy": 35137, "jg": 35138, "allocation": 35139, "poaching": 35140, "vevo": 35141, "mocks": 35142, "leftist": 35143, "bonuses": 35144, "condemned": 35145, "ability": 35146, "stating": 35147, "microbiome": 35148, "biologist": 35149, "foryou": 35150, "wahlberg": 35151, "ssor": 35152, "iftar": 35153, "wul": 35154, "ÑĦоÑĤ": 35155, "pomer": 35156, "meme": 35157, "verte": 35158, "trell": 35159, "trait": 35160, "inlet": 35161, "hormones": 35162, "deliberately": 35163, "villar": 35164, "battleship": 35165, "pbl": 35166, "twenti": 35167, "hokies": 35168, "dalail": 35169, "saya": 35170, "mayfair": 35171, "hans": 35172, "diets": 35173, "⾨⾨": 35174, "odin": 35175, "hotspur": 35176, "papi": 35177, "kana": 35178, "kamp": 35179, "finna": 35180, "flotus": 35181, "tians": 35182, "unicorns": 35183, "tribeca": 35184, "changers": 35185, "foreground": 35186, "outa": 35187, "invaders": 35188, "gettys": 35189, "tomorrowspaperstoday": 35190, "macmillan": 35191, "handwritten": 35192, "wfp": 35193, "ude": 35194, "stateof": 35195, "based": 35196, "âĺģï¸ı": 35197, "casm": 35198, "psyched": 35199, "historians": 35200, "fold": 35201, "dda": 35202, "aggrav": 35203, "pans": 35204, "greenway": 35205, "ausv": 35206, "ðŁĺ¶": 35207, "shraddha": 35208, "index": 35209, "besti": 35210, "zimmer": 35211, "tness": 35212, "eyeshadow": 35213, "otte": 35214, "gots": 35215, "distributing": 35216, "promin": 35217, "yol": 35218, "acea": 35219, "tramrahim": 35220, "hooper": 35221, "supreme": 35222, "jammin": 35223, "intuitive": 35224, "qualifications": 35225, "slim": 35226, "siddi": 35227, "jayne": 35228, "tripping": 35229, "gtx": 35230, "puns": 35231, "emanuel": 35232, "omg": 35233, "midsummer": 35234, "into": 35235, "succulent": 35236, "rien": 35237, "newmexico": 35238, "oor": 35239, "hooking": 35240, "inf": 35241, "ð٤Ŀ": 35242, "flirting": 35243, "nahi": 35244, "gfriend": 35245, "tps": 35246, "helix": 35247, "zs": 35248, "onie": 35249, "ctf": 35250, "kris": 35251, "irresistible": 35252, "flap": 35253, "ðŁijıðŁı»ðŁijıðŁı»": 35254, "uswnt": 35255, "rud": 35256, "ramps": 35257, "pinoy": 35258, "otw": 35259, "lolz": 35260, "lowering": 35261, "favorite": 35262, "tmc": 35263, "phrases": 35264, "hermi": 35265, "averaging": 35266, "embr": 35267, "beno": 35268, "estuary": 35269, "sleeve": 35270, "ribbons": 35271, "tash": 35272, "ู": 35273, "xf": 35274, "awgs": 35275, "sunited": 35276, "breweries": 35277, "anirud": 35278, "punches": 35279, "oldie": 35280, "ipads": 35281, "wifey": 35282, "landlords": 35283, "dji": 35284, "gunner": 35285, "íķ´": 35286, "texan": 35287, "exop": 35288, "cassandra": 35289, "soff": 35290, "ðŁļ«": 35291, "ighton": 35292, "bakers": 35293, "awarenessweek": 35294, "vall": 35295, "earp": 35296, "btsbbmas": 35297, "apologizes": 35298, "âļĵï¸ı": 35299, "wasps": 35300, "statesman": 35301, "snatch": 35302, "watchdog": 35303, "rafi": 35304, "afterparty": 35305, "spike": 35306, "jer": 35307, "periph": 35308, "rnc": 35309, "mull": 35310, "leen": 35311, "shies": 35312, "lieu": 35313, "urstrulymahesh": 35314, "merton": 35315, "desai": 35316, "shif": 35317, "ðŁĮ±": 35318, "pedic": 35319, "gosling": 35320, "arranging": 35321, "wwg": 35322, "geny": 35323, "youuu": 35324, "netflix": 35325, "ettes": 35326, "kwi": 35327, "bernardino": 35328, "amiga": 35329, "ب": 35330, "kashmiri": 35331, "tings": 35332, "emeritus": 35333, "decat": 35334, "abdomin": 35335, "dci": 35336, "phases": 35337, "djan": 35338, "beam": 35339, "opry": 35340, "ished": 35341, "theellenshow": 35342, "thest": 35343, "habitats": 35344, "toons": 35345, "mclaughlin": 35346, "ripper": 35347, "microbiology": 35348, "talaga": 35349, "clueless": 35350, "ssu": 35351, "croche": 35352, "bromance": 35353, "longevity": 35354, "zagreb": 35355, "prevented": 35356, "trave": 35357, "spoilt": 35358, "darryl": 35359, "migraine": 35360, "alcat": 35361, "dddd": 35362, "viv": 35363, "serpent": 35364, "mattel": 35365, "jama": 35366, "conquest": 35367, "îĦ": 35368, "samsung": 35369, "presbyterian": 35370, "ketch": 35371, "firefox": 35372, "motif": 35373, "lec": 35374, "chopping": 35375, "cherno": 35376, "jann": 35377, "ðŁIJ°": 35378, "prolon": 35379, "wakeup": 35380, "convergence": 35381, "merseyside": 35382, "heartbroken": 35383, "looming": 35384, "hallucin": 35385, "maize": 35386, "communism": 35387, "moh": 35388, "twitterstorians": 35389, "sergey": 35390, "reseller": 35391, "favorable": 35392, "edgy": 35393, "reiter": 35394, "malaga": 35395, "liveme": 35396, "kahn": 35397, "pulsion": 35398, "bigg": 35399, "kimkardashian": 35400, "atio": 35401, "tyranny": 35402, "ruption": 35403, "qant": 35404, "proven": 35405, "byz": 35406, "pushaw": 35407, "kristin": 35408, "eer": 35409, "tardis": 35410, "riz": 35411, "awaken": 35412, "miko": 35413, "undocumented": 35414, "pathfinder": 35415, "indirect": 35416, "resembles": 35417, "hler": 35418, "concealed": 35419, "scandal": 35420, "reim": 35421, "dnb": 35422, "critters": 35423, "attendant": 35424, "apprenticeships": 35425, "aau": 35426, "screamed": 35427, "lsu": 35428, "fah": 35429, "harbour": 35430, "edd": 35431, "batsman": 35432, "liss": 35433, "misha": 35434, "spaniel": 35435, "itf": 35436, "advancement": 35437, "fac": 35438, "closeup": 35439, "cecilia": 35440, "medic": 35441, "narcissi": 35442, "lavish": 35443, "giac": 35444, "mays": 35445, "leit": 35446, "winewednesday": 35447, "pushaward": 35448, "letto": 35449, "currents": 35450, "bugatti": 35451, "outine": 35452, "wj": 35453, "undo": 35454, "lerosis": 35455, "devotional": 35456, "ðŁij«": 35457, "onna": 35458, "faisal": 35459, "sauna": 35460, "himachal": 35461, "amii": 35462, "à®®": 35463, "dizzy": 35464, "screenwriting": 35465, "phx": 35466, "spn": 35467, "icki": 35468, "agirl": 35469, "fishes": 35470, "wbz": 35471, "pim": 35472, "boar": 35473, "acid": 35474, "!..": 35475, "rockefeller": 35476, "nga": 35477, "drastically": 35478, "simplify": 35479, "drumming": 35480, "autumnal": 35481, "gurmee": 35482, "lorde": 35483, "joann": 35484, "giveup": 35485, "bour": 35486, "amura": 35487, "derland": 35488, "simpler": 35489, "watson": 35490, "trident": 35491, "concordia": 35492, "bellum": 35493, "brek": 35494, "dumplings": 35495, "vion": 35496, "dungeonsanddragons": 35497, "spri": 35498, "ascension": 35499, "wildatlantic": 35500, "ust": 35501, "robins": 35502, "legion": 35503, "insist": 35504, "jaro": 35505, "guess": 35506, "sob": 35507, "bighit": 35508, "poolside": 35509, "negotiating": 35510, "mcgill": 35511, "bild": 35512, "technicians": 35513, "mitigation": 35514, "ajaydevgn": 35515, "bto": 35516, "anten": 35517, "cosmopolitan": 35518, "ðŁĺĬðŁĺĬðŁĺĬðŁĺĬ": 35519, "patrioti": 35520, "temper": 35521, "promenade": 35522, "navajo": 35523, "namm": 35524, "wrinkles": 35525, "dcfc": 35526, "leach": 35527, "brunette": 35528, "rf": 35529, "coutinho": 35530, "alti": 35531, "traditionally": 35532, "optome": 35533, "naz": 35534, "accordingly": 35535, "recard": 35536, "deets": 35537, "swell": 35538, "posure": 35539, "whitening": 35540, "stranger": 35541, "illion": 35542, "hereford": 35543, "uwu": 35544, "robber": 35545, "cotswolds": 35546, "clen": 35547, "gorge": 35548, "namaste": 35549, "relish": 35550, "griff": 35551, "adrenaline": 35552, "blasio": 35553, "vale": 35554, "ê²": 35555, "tolerate": 35556, "railminindia": 35557, "jensen": 35558, "hoven": 35559, "ellu": 35560, "obsole": 35561, "eisenhower": 35562, "unidentified": 35563, "thanniversary": 35564, "bodyguard": 35565, "د": 35566, "idge": 35567, "schal": 35568, "stockport": 35569, "sni": 35570, "retaining": 35571, "popo": 35572, "pixie": 35573, "olithic": 35574, "kier": 35575, "hajj": 35576, "saz": 35577, "corbin": 35578, "!!!!!!!!!!": 35579, "vit": 35580, "megat": 35581, "deh": 35582, "circuit": 35583, "affleck": 35584, "theoretical": 35585, "hopeless": 35586, "uab": 35587, "slump": 35588, "bice": 35589, "jammed": 35590, "letstalk": 35591, "cani": 35592, "sideways": 35593, "labyrinth": 35594, "refs": 35595, "hahn": 35596, "jared": 35597, "ðŁį¹": 35598, "jambo": 35599, "phyl": 35600, "enhancement": 35601, "ctr": 35602, "fullest": 35603, "seye": 35604, "doba": 35605, "choic": 35606, "yos": 35607, "cbj": 35608, "andré": 35609, "rewatch": 35610, "prima": 35611, "doctrine": 35612, "forgets": 35613, "uhm": 35614, "around": 35615, "ule": 35616, "artlovers": 35617, "shiraz": 35618, "harth": 35619, "extor": 35620, "Å¡": 35621, "unexpectedly": 35622, "elius": 35623, "yx": 35624, "emmy": 35625, "seac": 35626, "ðŁijĩðŁijĩðŁijĩ": 35627, "corrected": 35628, "combu": 35629, "womanc": 35630, "cough": 35631, "whatson": 35632, "publishes": 35633, "diversity": 35634, "backbone": 35635, "lockdown": 35636, "mesmerizing": 35637, "norte": 35638, "mab": 35639, "designer": 35640, "íģ": 35641, "ragh": 35642, "molecules": 35643, "getoutside": 35644, "thebeatles": 35645, "semiconduc": 35646, "nacho": 35647, "lunes": 35648, "hammers": 35649, "sultan": 35650, "oon": 35651, "feren": 35652, "attach": 35653, "arqu": 35654, "uttarakhand": 35655, "sash": 35656, ";-": 35657, "tread": 35658, "iko": 35659, "arthur": 35660, "scandinavian": 35661, "ration": 35662, "gael": 35663, "chargeable": 35664, "fishy": 35665, "vma": 35666, "handbags": 35667, "chara": 35668, "ayne": 35669, "defam": 35670, "settlers": 35671, "qadri": 35672, "palais": 35673, "inwx": 35674, "apocalyptic": 35675, "pooja": 35676, "aes": 35677, "atories": 35678, "proofing": 35679, "nlp": 35680, "tsla": 35681, "vina": 35682, "lido": 35683, "deephouse": 35684, "informatics": 35685, "vv": 35686, "ppings": 35687, "diss": 35688, "ï": 35689, "uhuru": 35690, "stony": 35691, "betrayed": 35692, "baff": 35693, "myra": 35694, "aspen": 35695, "allowance": 35696, "tamara": 35697, "cif": 35698, "corbett": 35699, "serge": 35700, "digo": 35701, "ambigu": 35702, "painters": 35703, "pcr": 35704, "pca": 35705, "noms": 35706, "loft": 35707, "vee": 35708, "opendata": 35709, "ðŁIJ±": 35710, "alexandre": 35711, "identifies": 35712, "fantasyfootball": 35713, "reproduction": 35714, "bromley": 35715, "wareagle": 35716, "mmer": 35717, "pss": 35718, "cues": 35719, "ayat": 35720, "hutchinson": 35721, "sarac": 35722, "jackman": 35723, "irah": 35724, "apink": 35725, "cols": 35726, "aussies": 35727, "execs": 35728, "dayton": 35729, "ðŁĻĨ": 35730, "imv": 35731, "haram": 35732, "chuckle": 35733, "authenticity": 35734, "ardo": 35735, "incubator": 35736, "ส": 35737, "photoshopped": 35738, "embraced": 35739, "fightfor": 35740, "gorman": 35741, "zzzz": 35742, "scholastic": 35743, "crisps": 35744, "teapo": 35745, "midnight": 35746, "gaine": 35747, "collier": 35748, "sate": 35749, "dette": 35750, "åŃ": 35751, "imagine": 35752, "iff": 35753, "twili": 35754, "ification": 35755, "teatro": 35756, "norma": 35757, "esur": 35758, "emergencies": 35759, "riseup": 35760, "ringer": 35761, "hassle": 35762, "caitlyn": 35763, "tranquil": 35764, "versa": 35765, "seb": 35766, "overlook": 35767, "gini": 35768, "bogo": 35769, "sere": 35770, "mayne": 35771, "henrik": 35772, "contaminated": 35773, "rhapsody": 35774, "proportion": 35775, "wildatlanticway": 35776, "âģ©.": 35777, "organisers": 35778, "trane": 35779, "standard": 35780, "sperm": 35781, "launcher": 35782, "ricci": 35783, "herts": 35784, "paperwork": 35785, "showcased": 35786, "meryl": 35787, "pena": 35788, "pimp": 35789, "disastrous": 35790, "^.^": 35791, "phara": 35792, "xis": 35793, "frontal": 35794, "swirl": 35795, "spills": 35796, "swagger": 35797, "smartwatch": 35798, "sizzling": 35799, "saviour": 35800, "catar": 35801, "bbcr": 35802, "refurbishment": 35803, "dris": 35804, "citroen": 35805, "absorb": 35806, "patriotism": 35807, "illeg": 35808, "chromo": 35809, "freshers": 35810, "rus": 35811, "limiting": 35812, "efish": 35813, "downed": 35814, "mandir": 35815, "hazelnut": 35816, "pall": 35817, "macon": 35818, "disappearing": 35819, "qualifies": 35820, "boon": 35821, "barracks": 35822, "amine": 35823, "gendere": 35824, "ðŁļĺ": 35825, "jes": 35826, "ãĥŃ": 35827, "quito": 35828, "middleweight": 35829, "schau": 35830, "quadru": 35831, "aciones": 35832, "limitless": 35833, "ðŁijĮðŁı½": 35834, "chman": 35835, "arav": 35836, "regulators": 35837, "itup": 35838, "battersea": 35839, "milford": 35840, "gz": 35841, "ticking": 35842, "ghou": 35843, "crushes": 35844, "tutu": 35845, "dreadful": 35846, "famine": 35847, "forchange": 35848, "dalailama": 35849, "ðŁĴį": 35850, "whitaker": 35851, "hashmi": 35852, "hus": 35853, "vod": 35854, "bette": 35855, "aaah": 35856, "isoo": 35857, "ðŁ¥Ī": 35858, "haar": 35859, "laine": 35860, "bv": 35861, "allday": 35862, "sprout": 35863, "indiegames": 35864, "freebie": 35865, "greeks": 35866, "butler": 35867, "illin": 35868, "haal": 35869, "wareness": 35870, "sima": 35871, "publichealth": 35872, "gama": 35873, "waa": 35874, "oung": 35875, "goooo": 35876, "okinawa": 35877, "offenders": 35878, "impose": 35879, "hoc": 35880, "youngster": 35881, "storyteller": 35882, "scap": 35883, "fighter": 35884, "+,": 35885, "whites": 35886, "musicmonday": 35887, "reza": 35888, "goducks": 35889, "bria": 35890, "mium": 35891, "casper": 35892, "crumbs": 35893, "aad": 35894, "martialarts": 35895, "chp": 35896, "rigged": 35897, "tng": 35898, "harvested": 35899, "sak": 35900, "dojo": 35901, "millwall": 35902, "bnw": 35903, "ocd": 35904, "historyof": 35905, "tmr": 35906, "sirens": 35907, "fanci": 35908, "caregivers": 35909, "vira": 35910, "soni": 35911, "recurring": 35912, "acknowledged": 35913, "ðŁıŁ": 35914, "ophile": 35915, "bucky": 35916, "stressing": 35917, "rook": 35918, "digger": 35919, "vival": 35920, "sando": 35921, "fleet": 35922, "siers": 35923, "selcaday": 35924, "refreshed": 35925, "antifa": 35926, "aque": 35927, "polo": 35928, "disappearance": 35929, "demb": 35930, "âĮļï¸ı": 35931, "rented": 35932, "berger": 35933, "gmb": 35934, "cula": 35935, "ssal": 35936, "goody": 35937, "uhh": 35938, "marcelo": 35939, "wanna": 35940, "software": 35941, "shopsmall": 35942, "turtle": 35943, "tomas": 35944, "frisco": 35945, "ðŁĺįðŁĴķ": 35946, "jimenez": 35947, "csu": 35948, "dayz": 35949, "ando": 35950, "wynne": 35951, "choreographer": 35952, "cervical": 35953, "trailblazers": 35954, "edg": 35955, "zendaya": 35956, "travelblog": 35957, "els": 35958, "wholesome": 35959, "cog": 35960, "labout": 35961, "arney": 35962, "delle": 35963, "suisse": 35964, "masi": 35965, "inese": 35966, "ombe": 35967, "fiddle": 35968, "reclaim": 35969, "pau": 35970, "watcher": 35971, "slain": 35972, "berty": 35973, "optimum": 35974, "elites": 35975, "minis": 35976, "turkey": 35977, "patrols": 35978, "gerard": 35979, "aureli": 35980, "wildly": 35981, "waltz": 35982, "brgy": 35983, "wob": 35984, "crest": 35985, "+++": 35986, "vez": 35987, "frosted": 35988, "davido": 35989, "thex": 35990, "paramedics": 35991, "pinto": 35992, "hank": 35993, "dupont": 35994, "urg": 35995, "fostering": 35996, "micropoetry": 35997, "spectre": 35998, "---->": 35999, "neuro": 36000, "frida": 36001, "musical": 36002, "galveston": 36003, "effic": 36004, "scape": 36005, "palazzo": 36006, "thall": 36007, "provisional": 36008, "pjs": 36009, "aure": 36010, "ðŁĶľ": 36011, "mamamoo": 36012, "kitties": 36013, "cree": 36014, "wak": 36015, "loool": 36016, "lupus": 36017, "cnblue": 36018, "ú": 36019, "ðŁİ¬": 36020, "raced": 36021, "trose": 36022, "omas": 36023, "stride": 36024, "coors": 36025, "⤵ï¸ı": 36026, "incomparable": 36027, "cyril": 36028, "broader": 36029, "areclipse": 36030, "ðŁįĶ": 36031, "interval": 36032, "tiru": 36033, "coworking": 36034, "waco": 36035, "aham": 36036, "abee": 36037, "flourish": 36038, "thetimes": 36039, "olini": 36040, "kickboxing": 36041, "lucer": 36042, "atla": 36043, "asun": 36044, "casserole": 36045, "miaw": 36046, "lobbying": 36047, "janice": 36048, "cirque": 36049, "reflex": 36050, "leary": 36051, "sanatomy": 36052, "tempest": 36053, "semb": 36054, "murdering": 36055, "usav": 36056, "robo": 36057, "onet": 36058, "pcc": 36059, "natives": 36060, "lifeof": 36061, "saha": 36062, "ruthless": 36063, "relates": 36064, "appetizer": 36065, "pyeongchang": 36066, "nord": 36067, "eru": 36068, "athing": 36069, "ugly": 36070, "plying": 36071, "brance": 36072, "organise": 36073, "kendra": 36074, "dato": 36075, "cheeses": 36076, "parma": 36077, "burnout": 36078, "astra": 36079, "pretoria": 36080, "adjustment": 36081, "uku": 36082, "slo": 36083, "liken": 36084, "favors": 36085, "clive": 36086, "beets": 36087, "snowdonia": 36088, "gotv": 36089, "syn": 36090, "openhouse": 36091, "pani": 36092, "portrayed": 36093, "slated": 36094, "mecca": 36095, "renal": 36096, "supportsmallstreamers": 36097, "staffs": 36098, "dao": 36099, "biker": 36100, "viktor": 36101, "titus": 36102, "admired": 36103, "ðŁĵ±": 36104, "hurrican": 36105, "heats": 36106, "glory": 36107, "photogenic": 36108, "meri": 36109, "depor": 36110, "burnham": 36111, "orangu": 36112, "djing": 36113, "impressionism": 36114, "ignition": 36115, "cai": 36116, "wynn": 36117, "depe": 36118, "coveted": 36119, "collagen": 36120, "saus": 36121, "ornam": 36122, "administrators": 36123, "sson": 36124, "nhpolitics": 36125, "hahahahahahahaha": 36126, "aspirations": 36127, "rgb": 36128, "swollen": 36129, "sowe": 36130, "scr": 36131, "divergent": 36132, "houghton": 36133, "hanoi": 36134, "dory": 36135, "niki": 36136, "landry": 36137, "bcci": 36138, "ðŁijĮðŁijĮ": 36139, "ismail": 36140, "tripod": 36141, "herd": 36142, "bhatt": 36143, "dressage": 36144, "tabby": 36145, "inguish": 36146, "huron": 36147, "à³į": 36148, "Ãł": 36149, "todas": 36150, "evangelical": 36151, "chords": 36152, "stjohn": 36153, "sloppy": 36154, "martyr": 36155, "facebook": 36156, "alight": 36157, "sensei": 36158, "kathniel": 36159, "rites": 36160, "zione": 36161, "uo": 36162, "revelations": 36163, "weightlifting": 36164, "pano": 36165, "ncwx": 36166, "acton": 36167, "à®ķ": 36168, "ز": 36169, "soma": 36170, "à¸Ĺ": 36171, "respecting": 36172, "marche": 36173, "foreman": 36174, "betty": 36175, "kik": 36176, "shibu": 36177, "poon": 36178, "argyle": 36179, "kswx": 36180, "etz": 36181, "marbella": 36182, "brackets": 36183, "standby": 36184, "fireside": 36185, "defiance": 36186, "vex": 36187, "britannia": 36188, "inhabit": 36189, "appoint": 36190, "piyush": 36191, "leash": 36192, "sciento": 36193, "flask": 36194, "senna": 36195, ">:": 36196, "atroc": 36197, "sanderson": 36198, "idlib": 36199, "dhanush": 36200, "ðŁĺĻ": 36201, "enthr": 36202, "hitch": 36203, "dedly": 36204, "alley": 36205, "dork": 36206, "mondo": 36207, "cuddly": 36208, "missin": 36209, "yesss": 36210, "nighting": 36211, "jpn": 36212, "wary": 36213, "umpire": 36214, "maz": 36215, "ê³": 36216, "babs": 36217, "ĭãģ": 36218, "stanford": 36219, "possessed": 36220, "exceeded": 36221, "ðŁĶ¶": 36222, "wallart": 36223, "trap": 36224, "jil": 36225, "hibis": 36226, "spying": 36227, "scribe": 36228, "khalil": 36229, "translator": 36230, "lumb": 36231, "dized": 36232, "chc": 36233, "supervision": 36234, "shutter": 36235, "jag": 36236, "_*": 36237, "yesterdays": 36238, "msf": 36239, "hihi": 36240, "gonzaga": 36241, "gillespie": 36242, "vivek": 36243, "ecstatic": 36244, "thismorning": 36245, "chus": 36246, "edes": 36247, "stoned": 36248, "bees": 36249, "ðŁĩ¹ðŁĩ": 36250, "turin": 36251, "hover": 36252, "atrics": 36253, "stern": 36254, "samheughan": 36255, "autism": 36256, "miya": 36257, "eyewitness": 36258, "writings": 36259, "traveltips": 36260, "chutney": 36261, "pxrtg": 36262, "kenyans": 36263, "mystic": 36264, "krit": 36265, "/$": 36266, "redhead": 36267, "worldly": 36268, "amus": 36269, "opla": 36270, "leve": 36271, "gabbana": 36272, "seen": 36273, "oclock": 36274, "ganga": 36275, "keenan": 36276, "scent": 36277, "oldies": 36278, "gogreen": 36279, "cornerstone": 36280, "comply": 36281, "concours": 36282, "ðŁİ¶ðŁİ¶": 36283, "haan": 36284, "confis": 36285, "awson": 36286, "cleop": 36287, "îĢ": 36288, "suzu": 36289, "sauté": 36290, "algar": 36291, "subscriber": 36292, "esteemed": 36293, "ãĤ¤ãĥ": 36294, "worthwhile": 36295, "melrose": 36296, "flock": 36297, "brightly": 36298, "violinist": 36299, "pere": 36300, "slipping": 36301, "andco": 36302, "sigh": 36303, "havan": 36304, "culo": 36305, "msa": 36306, "fibrosis": 36307, "matilda": 36308, "rafting": 36309, "award": 36310, "ëª": 36311, "mmmm": 36312, "geaux": 36313, "steiner": 36314, "sinn": 36315, "helpers": 36316, "beetles": 36317, "aimee": 36318, "taiwan": 36319, "pistachio": 36320, "macbeth": 36321, "mzan": 36322, "descendants": 36323, "onsale": 36324, "inr": 36325, "ilm": 36326, "grouse": 36327, "saig": 36328, "mow": 36329, "bigre": 36330, "adjustments": 36331, "tula": 36332, "mathew": 36333, "translates": 36334, "muh": 36335, "bollah": 36336, "ðŁĴĽðŁĴĻ": 36337, "amores": 36338, "abouts": 36339, "bombshell": 36340, "blaster": 36341, "xavi": 36342, "sns": 36343, "kroger": 36344, "gather": 36345, "eradic": 36346, "daft": 36347, "chemo": 36348, "benches": 36349, "ðŁĩ©ðŁĩ": 36350, "utv": 36351, "oura": 36352, "nko": 36353, "gatorade": 36354, "biafra": 36355, "okstate": 36356, "imdanielpadilla": 36357, "domains": 36358, "openingday": 36359, "kiddo": 36360, "doi": 36361, "rice": 36362, "daycare": 36363, "macmillan": 36364, "bathurst": 36365, "cheerleading": 36366, "ð٦ģ": 36367, "cashback": 36368, "kwon": 36369, "hobbies": 36370, "exempl": 36371, "riesling": 36372, "âļª": 36373, "agles": 36374, "nys": 36375, "everything": 36376, "navis": 36377, "addi": 36378, "magnesium": 36379, "facelift": 36380, "arkham": 36381, "grandes": 36382, "extremist": 36383, "donat": 36384, "vitality": 36385, "pumpkin": 36386, "betta": 36387, "sltd": 36388, "artisan": 36389, "liby": 36390, "peaked": 36391, "ahhhhh": 36392, "maryam": 36393, "assim": 36394, "unsc": 36395, "mente": 36396, "alaya": 36397, "lowers": 36398, "aras": 36399, "griev": 36400, "leip": 36401, "grati": 36402, "crises": 36403, "sprints": 36404, "execute": 36405, "wto": 36406, "msd": 36407, "magical": 36408, "reviewer": 36409, "sparkles": 36410, "jukebox": 36411, "ðŁĺĤâĿ¤ï¸ı": 36412, "payback": 36413, "licenses": 36414, "dunkin": 36415, "belt": 36416, "lakewood": 36417, "hateful": 36418, "budgets": 36419, "revamped": 36420, "pherson": 36421, "kyiv": 36422, "wentworth": 36423, "rosen": 36424, "cruise": 36425, "giggle": 36426, "defstar": 36427, "assassinscre": 36428, "ymouth": 36429, "winkle": 36430, "wfc": 36431, "bandwagon": 36432, "bkk": 36433, "wiring": 36434, "kearney": 36435, "southside": 36436, "petit": 36437, "!ðŁĺį": 36438, "nordic": 36439, "mirza": 36440, "mugabe": 36441, "vl": 36442, "scones": 36443, "ktv": 36444, "sandal": 36445, "duc": 36446, "malls": 36447, "ðŁĴŀðŁĴŀ": 36448, "itc": 36449, "alay": 36450, "impair": 36451, "unrest": 36452, "floss": 36453, "cé": 36454, "abou": 36455, "varying": 36456, "museo": 36457, "server": 36458, "diya": 36459, "hibiscus": 36460, "eroy": 36461, "merritt": 36462, "findom": 36463, "fpp": 36464, "unusually": 36465, "gott": 36466, "contingent": 36467, "aliaa": 36468, "ballon": 36469, "jol": 36470, "hiked": 36471, "zyme": 36472, "ayr": 36473, "agn": 36474, "gaz": 36475, "periodic": 36476, "sparty": 36477, "practising": 36478, "linton": 36479, "talis": 36480, "cypri": 36481, "womaninbiz": 36482, "radiodisney": 36483, "ðŁĮ¼": 36484, "jumpers": 36485, "endocr": 36486, "ðŁļ¨ðŁļ¨": 36487, "andon": 36488, "sharapo": 36489, "mier": 36490, "masonic": 36491, "factories": 36492, "vien": 36493, "bbers": 36494, "ìĽIJ": 36495, "hold": 36496, "kebab": 36497, "beak": 36498, "approached": 36499, "acmilan": 36500, "munro": 36501, "kosher": 36502, "excellency": 36503, "negotiation": 36504, "waltdisneyworld": 36505, "crouch": 36506, "teasing": 36507, "suppression": 36508, "enya": 36509, "bce": 36510, "transformationtuesday": 36511, "callie": 36512, "viswas": 36513, "pgat": 36514, "icted": 36515, "endings": 36516, "escu": 36517, "recruited": 36518, "itfc": 36519, "collaborations": 36520, "gino": 36521, "snuck": 36522, "auschwitz": 36523, "ifc": 36524, "xii": 36525, "kesha": 36526, "gervais": 36527, "cloak": 36528, "xl": 36529, "saad": 36530, "probation": 36531, "precau": 36532, "macin": 36533, "anastasi": 36534, "lek": 36535, "eazy": 36536, "daysofcode": 36537, "mariahcarey": 36538, "yog": 36539, "stitched": 36540, "boyfriends": 36541, "shar": 36542, "phile": 36543, "agu": 36544, "twinkle": 36545, "phishing": 36546, "weekender": 36547, "icton": 36548, "gurmeetramrahim": 36549, "alton": 36550, "leness": 36551, "allan": 36552, "penultimate": 36553, "krystal": 36554, "gou": 36555, "lande": 36556, "dismant": 36557, "abusing": 36558, "norse": 36559, "paterson": 36560, "edmun": 36561, "apan": 36562, "xiumin": 36563, "skel": 36564, "catwalk": 36565, "react": 36566, "walled": 36567, "tangle": 36568, "bryn": 36569, "veto": 36570, "supermoon": 36571, "casablanc": 36572, "appreciates": 36573, "skid": 36574, "both": 36575, "catalina": 36576, "eleague": 36577, "cybermonday": 36578, "cautious": 36579, "ð٤ĵ": 36580, "novo": 36581, "hampton": 36582, "haye": 36583, "josef": 36584, "varan": 36585, "lobos": 36586, "roanoke": 36587, "orphans": 36588, "ttin": 36589, "squads": 36590, "ishqbaaaz": 36591, "blackpanther": 36592, "etu": 36593, "ksh": 36594, "crumble": 36595, "cessna": 36596, "relieved": 36597, "scully": 36598, "pollinators": 36599, "explorecanada": 36600, "kies": 36601, "kamloops": 36602, "kiran": 36603, "primal": 36604, "settlements": 36605, "hotspot": 36606, "brainstorming": 36607, "cedric": 36608, "biennial": 36609, "shant": 36610, "âĻ¡âĻ¡âĻ¡": 36611, "doon": 36612, "hearn": 36613, "walkway": 36614, "fem": 36615, "veal": 36616, "deportation": 36617, "toxins": 36618, "eliminating": 36619, "descending": 36620, "bythe": 36621, "blasphe": 36622, "hasta": 36623, "complement": 36624, "ascent": 36625, "riga": 36626, "provost": 36627, "âĸª": 36628, "weeping": 36629, "antisemitism": 36630, "employee": 36631, "unearthed": 36632, "pino": 36633, "natalie": 36634, "blad": 36635, "angola": 36636, "lockheed": 36637, "inian": 36638, "agr": 36639, "nister": 36640, "impala": 36641, "mke": 36642, "fanatic": 36643, "âĺħâĺħ": 36644, "ðŁij¸": 36645, "luch": 36646, "simplified": 36647, "gallery": 36648, "economic": 36649, "cyborg": 36650, "coni": 36651, "selma": 36652, "inception": 36653, "koala": 36654, "dvds": 36655, "crested": 36656, "mmor": 36657, "visible": 36658, "nsd": 36659, "ðŁĻĮðŁı½": 36660, "wunder": 36661, "refrigerator": 36662, "reopening": 36663, "eera": 36664, "carousel": 36665, "asp": 36666, "ballistic": 36667, "victory": 36668, "motive": 36669, "trey": 36670, "sharapova": 36671, "sii": 36672, "monter": 36673, "intend": 36674, "westchester": 36675, "spe": 36676, "cymb": 36677, "vidal": 36678, "llama": 36679, "univ": 36680, "finer": 36681, "craftsmanship": 36682, "jazzfest": 36683, "bch": 36684, "aggio": 36685, "ncc": 36686, "lambda": 36687, "tranquility": 36688, "cisco": 36689, "baden": 36690, "sobbing": 36691, "ofi": 36692, "gota": 36693, "rumored": 36694, "warmed": 36695, "orean": 36696, "acton": 36697, "marci": 36698, "ghani": 36699, "âľĵ": 36700, "assorted": 36701, "pembroke": 36702, "penelope": 36703, "daf": 36704, "atty": 36705, "aimo": 36706, "pretzel": 36707, "carnival": 36708, "thanos": 36709, "kochi": 36710, "mersal": 36711, "hamradio": 36712, "artwit": 36713, "casc": 36714, "guerrilla": 36715, "kushner": 36716, "kapp": 36717, "alise": 36718, "toddlers": 36719, "stewardship": 36720, "otti": 36721, "terri": 36722, "tempe": 36723, "restless": 36724, "vito": 36725, "zayed": 36726, "rspb": 36727, "pion": 36728, "hippo": 36729, "hawthorne": 36730, "inas": 36731, "amily": 36732, "nutcracker": 36733, "lop": 36734, "dali": 36735, "tropic": 36736, "ðŁ¤ł": 36737, "ulo": 36738, "jaredle": 36739, "pyrene": 36740, "paleo": 36741, "usair": 36742, "mould": 36743, "itated": 36744, "genetically": 36745, "biomass": 36746, "ðŁĩ³ðŁĩ±": 36747, "dodd": 36748, "practiced": 36749, "monarchs": 36750, "unmanned": 36751, "mbuhari": 36752, "amal": 36753, "photogra": 36754, "kool": 36755, "brendon": 36756, "juices": 36757, "cure": 36758, "worldbank": 36759, "pointers": 36760, "ðŁĴĿ": 36761, "turf": 36762, "leds": 36763, "borussia": 36764, "baptism": 36765, "warwickshire": 36766, "mounts": 36767, "gayo": 36768, "begg": 36769, "copied": 36770, "asians": 36771, "kg": 36772, "modernist": 36773, "gid": 36774, "frontman": 36775, "concentrated": 36776, "yt": 36777, "scavenger": 36778, "ironically": 36779, "adic": 36780, "psn": 36781, "ðŁ¥ī": 36782, "culturally": 36783, "yuv": 36784, "macarthur": 36785, "fertilizer": 36786, "bewithyou": 36787, "rigor": 36788, "minors": 36789, "zoning": 36790, "âĸł": 36791, "rir": 36792, "adolescent": 36793, "vinny": 36794, "reng": 36795, "sandstone": 36796, "guet": 36797, "westh": 36798, "pledged": 36799, "laced": 36800, "spide": 36801, "vai": 36802, "tycoon": 36803, "seizure": 36804, "dup": 36805, "appalachian": 36806, "rok": 36807, "catholics": 36808, "seychel": 36809, "possess": 36810, "lager": 36811, "jodi": 36812, "champ": 36813, "stras": 36814, "dina": 36815, "centuri": 36816, "calder": 36817, "bluray": 36818, "ðŁĩ¨ðŁĩ³": 36819, "modo": 36820, "annette": 36821, "youtubers": 36822, "chaps": 36823, "angling": 36824, "labeling": 36825, "aqui": 36826, "pkwy": 36827, "lyle": 36828, "bisexual": 36829, "litur": 36830, "dugout": 36831, "libby": 36832, "greysanatomy": 36833, "substances": 36834, "augustus": 36835, "rallying": 36836, "fidel": 36837, "ingue": 36838, "人": 36839, "hallmarkchannel": 36840, "toothbrush": 36841, "má": 36842, "adirond": 36843, "aggi": 36844, "ðŁĵį:": 36845, "crusade": 36846, "taxation": 36847, "kz": 36848, "iver": 36849, "doubling": 36850, "roomie": 36851, "wab": 36852, "enrolled": 36853, "azon": 36854, "aju": 36855, "grandchildren": 36856, "asdf": 36857, "ðŁ¥º": 36858, "matic": 36859, "oughton": 36860, "utilize": 36861, "ðŁĴ£": 36862, "ponder": 36863, "raisin": 36864, "dysfunction": 36865, "cobain": 36866, "butternut": 36867, "eman": 36868, "sured": 36869, "drian": 36870, "andfriends": 36871, "withthe": 36872, "onomy": 36873, "heineken": 36874, "bridal": 36875, "leadership": 36876, "pyramids": 36877, "deutschland": 36878, "jocel": 36879, "bowel": 36880, "yqr": 36881, "horsepower": 36882, "beacon": 36883, "ingeni": 36884, "gradient": 36885, "fermented": 36886, "moom": 36887, "thingy": 36888, "potassi": 36889, "wristband": 36890, "bord": 36891, "bodied": 36892, "ðŁĺŃðŁĺį": 36893, "mapp": 36894, "kau": 36895, "cyberpunk": 36896, "phish": 36897, "looking": 36898, "coates": 36899, "apur": 36900, "amie": 36901, "uklabour": 36902, "atin": 36903, "gla": 36904, "adoptable": 36905, "shelby": 36906, "villi": 36907, "riya": 36908, "mingly": 36909, "climber": 36910, "bumblebee": 36911, "ðŁĺ¸": 36912, "csd": 36913, "âĿ¥": 36914, "hospitalized": 36915, "cki": 36916, "hater": 36917, "chr": 36918, "retina": 36919, "ita": 36920, "fanbase": 36921, "beatrice": 36922, "gwyne": 36923, "goss": 36924, "fos": 36925, "favorited": 36926, "swachhbharat": 36927, "malade": 36928, "monmouth": 36929, "\"[": 36930, "sivan": 36931, "shhh": 36932, "commanding": 36933, "sainsburys": 36934, "weed": 36935, "gman": 36936, "ssw": 36937, "reptile": 36938, "ivy": 36939, "tropics": 36940, "rollers": 36941, "overcast": 36942, "exposition": 36943, "masquerade": 36944, "mancrush": 36945, "waist": 36946, "sprinter": 36947, "sleet": 36948, "levin": 36949, "jpg": 36950, "_(": 36951, "opel": 36952, "exploit": 36953, "apa": 36954, "powe": 36955, "wrecking": 36956, "jongin": 36957, "orb": 36958, "erick": 36959, "bosco": 36960, "praising": 36961, "bertr": 36962, "towing": 36963, "insecurity": 36964, "kut": 36965, "restocked": 36966, "rrp": 36967, "prescribed": 36968, "trafalgar": 36969, "pert": 36970, "gases": 36971, "apprais": 36972, "ghar": 36973, "musicals": 36974, "âĸ¬âĸ¬": 36975, "mcfad": 36976, "agony": 36977, "condition": 36978, "equip": 36979, "shik": 36980, "atravel": 36981, "ðŁĩ¿ðŁĩ¦": 36982, "keh": 36983, "abduction": 36984, "peoria": 36985, "wilkins": 36986, "gms": 36987, "asd": 36988, "evi": 36989, "ðŁĴĹðŁĴĹðŁĴĹ": 36990, "uz": 36991, "moc": 36992, "hallelujah": 36993, "guadalu": 36994, "louvre": 36995, "drawing": 36996, "gove": 36997, "phant": 36998, "frie": 36999, "webdev": 37000, "programmer": 37001, "zable": 37002, "gamescom": 37003, "clarify": 37004, "lith": 37005, "kinky": 37006, "âĿ£": 37007, "labourdoorstep": 37008, "sonata": 37009, "juris": 37010, "maiden": 37011, "viadu": 37012, "bucharest": 37013, "conditioned": 37014, "capitalist": 37015, "ude": 37016, "psb": 37017, "spca": 37018, "lulla": 37019, "foothills": 37020, "kayo": 37021, "bond": 37022, "womb": 37023, "rounder": 37024, "cesar": 37025, "bursts": 37026, "apra": 37027, "swoon": 37028, "sabrin": 37029, "fragrant": 37030, "clearer": 37031, "kubrick": 37032, "climax": 37033, "journo": 37034, "agle": 37035, "ðŁı½âĢįâĻĢï¸ı": 37036, "pooch": 37037, "hale": 37038, "solit": 37039, "salmon": 37040, "organisms": 37041, "bronson": 37042, "arten": 37043, "hodgson": 37044, "alove": 37045, "venture": 37046, "bbi": 37047, "aea": 37048, "ðŁIJ¢": 37049, "ldn": 37050, "dnr": 37051, "ozone": 37052, "ellas": 37053, "manny": 37054, "azzur": 37055, "unbeat": 37056, "truffles": 37057, "thong": 37058, "mañ": 37059, "lasers": 37060, "leye": 37061, "gettysburg": 37062, "backpacks": 37063, "oris": 37064, "maison": 37065, "crawling": 37066, "labra": 37067, "cling": 37068, "dragging": 37069, "steal": 37070, "doubt": 37071, "devan": 37072, "ckers": 37073, "agentsof": 37074, "photobomb": 37075, "elonmusk": 37076, "aboy": 37077, "distances": 37078, "storyline": 37079, "spi": 37080, "northan": 37081, "europeans": 37082, "whale": 37083, "serpent": 37084, "ðŁļ²": 37085, "fior": 37086, "trit": 37087, "oxo": 37088, "awarding": 37089, "classmate": 37090, "sufc": 37091, "smartest": 37092, "riches": 37093, "prk": 37094, "bigfoot": 37095, "armb": 37096, "bipolar": 37097, "dwelling": 37098, "omars": 37099, "kwan": 37100, "grime": 37101, "meng": 37102, "frederick": 37103, "navarro": 37104, "sorrynotsorry": 37105, "jaredleto": 37106, "pave": 37107, "slack": 37108, "barnsley": 37109, "attar": 37110, "eviction": 37111, "accumulation": 37112, "oir": 37113, "catchy": 37114, "welter": 37115, "vikas": 37116, "hassee": 37117, "nikita": 37118, "moyes": 37119, "mathews": 37120, "shiv": 37121, "gatwick": 37122, "profiling": 37123, "companions": 37124, "marrake": 37125, "antics": 37126, "ðŁĻĮðŁĻĮðŁĻĮ": 37127, "sese": 37128, "boi": 37129, "bartlett": 37130, "poisonous": 37131, "abuses": 37132, "ymm": 37133, "kampala": 37134, "guggenheim": 37135, "imvkohli": 37136, "dolom": 37137, "bree": 37138, "throttle": 37139, "gareth": 37140, "fitzpatrick": 37141, "unya": 37142, "parad": 37143, "margot": 37144, "jnr": 37145, "wea": 37146, "potassium": 37147, "pnc": 37148, "disguised": 37149, "crash": 37150, "renergy": 37151, "illic": 37152, "coupled": 37153, "niels": 37154, "ciones": 37155, "æĹ¥": 37156, "iment": 37157, "despicable": 37158, "dye": 37159, "whatcha": 37160, "connections": 37161, "paralympics": 37162, "gauntlet": 37163, "waitrose": 37164, "suicidal": 37165, "starship": 37166, "vapor": 37167, "stou": 37168, "lawmaker": 37169, "cooled": 37170, "simo": 37171, "theno": 37172, "offroad": 37173, "jaden": 37174, "basque": 37175, "vicky": 37176, "lukaku": 37177, "centro": 37178, "trish": 37179, "strategist": 37180, "medications": 37181, "horst": 37182, "bfc": 37183, "grail": 37184, "sharply": 37185, "aditya": 37186, "tomb": 37187, "kaufman": 37188, "tripad": 37189, "samba": 37190, "pastoral": 37191, "britney": 37192, "sagan": 37193, "hillside": 37194, "masons": 37195, "sara": 37196, "zone": 37197, "xu": 37198, "totes": 37199, "robbie": 37200, "appen": 37201, "montag": 37202, "dero": 37203, "shortfilm": 37204, "charismatic": 37205, "tators": 37206, "kiba": 37207, "andri": 37208, "alarming": 37209, "splitting": 37210, "icar": 37211, "thug": 37212, "scariest": 37213, "sylvester": 37214, "anan": 37215, "utrecht": 37216, "adifference": 37217, "meade": 37218, "buster": 37219, "airstrikes": 37220, "cuffs": 37221, "accountants": 37222, "ðŁĺ¡ðŁĺ¡": 37223, "newt": 37224, "bott": 37225, "issuing": 37226, "clancy": 37227, "wwenetwork": 37228, "kyuhyun": 37229, "resemble": 37230, "pajamas": 37231, "sink": 37232, "kinney": 37233, "sulph": 37234, "ork": 37235, "lies": 37236, "lagh": 37237, "orton": 37238, "rahul": 37239, "dsc": 37240, "wewill": 37241, "ream": 37242, "colloqui": 37243, "sharia": 37244, "hectic": 37245, "sarcasm": 37246, "lander": 37247, "tmz": 37248, "endorf": 37249, "roz": 37250, "hammered": 37251, "fris": 37252, "wadi": 37253, "popefrancis": 37254, "heit": 37255, "flashlight": 37256, "unborn": 37257, "opes": 37258, "holiness": 37259, "ðŁIJ¦": 37260, "nacht": 37261, "imsa": 37262, "gracing": 37263, "bjp": 37264, "verts": 37265, "csc": 37266, "homeowner": 37267, "aque": 37268, "bigotry": 37269, "annie": 37270, "bagh": 37271, "âĿ¤ï¸ıðŁĺį": 37272, "cari": 37273, "thomp": 37274, "disposable": 37275, "cardiology": 37276, "patented": 37277, "hhhhhh": 37278, "ldr": 37279, "stephenson": 37280, "crores": 37281, "fanning": 37282, "climat": 37283, "ðŁijįðŁijįðŁijį": 37284, "ðŁijįðŁı¼": 37285, "aeron": 37286, "piccadilly": 37287, "bankrupt": 37288, "silvia": 37289, "employ": 37290, "donny": 37291, "commenting": 37292, "screenwriter": 37293, "iota": 37294, "cean": 37295, "ancers": 37296, "tuan": 37297, "streetwear": 37298, "य": 37299, "skine": 37300, "espa": 37301, "asif": 37302, "osce": 37303, "sheppard": 37304, "morecam": 37305, "bottle": 37306, "ders": 37307, "oracle": 37308, "googleplay": 37309, "averaged": 37310, "edmonton": 37311, "stephan": 37312, "sisterhood": 37313, "crusted": 37314, "staggering": 37315, "methodology": 37316, "congresswoman": 37317, "cabo": 37318, "triggers": 37319, "milky": 37320, "glide": 37321, "toothpaste": 37322, "roommates": 37323, "nuff": 37324, "guam": 37325, "sprinkles": 37326, "alternative": 37327, "watfordfc": 37328, "uoft": 37329, "haley": 37330, "contacted": 37331, "bundy": 37332, "prostitu": 37333, "ghar": 37334, "preston": 37335, "onsite": 37336, "hilar": 37337, "gts": 37338, "catt": 37339, "hampstead": 37340, "??!": 37341, "ðŁĩ§ðŁĩ": 37342, "bbcqt": 37343, "alessandro": 37344, "resist": 37345, "maidan": 37346, "tko": 37347, "shading": 37348, "pinup": 37349, "gallo": 37350, "sinu": 37351, "atec": 37352, "funk": 37353, "aclu": 37354, "strides": 37355, "rhyme": 37356, "wetland": 37357, "bbcspringwatch": 37358, "tins": 37359, "wildcard": 37360, "stour": 37361, "flamenco": 37362, "paula": 37363, "ontology": 37364, "gangsta": 37365, "amade": 37366, "ãĤ«": 37367, "tbs": 37368, "skeletal": 37369, "runner": 37370, "jardin": 37371, "harrier": 37372, "hunted": 37373, "zhen": 37374, "believeinfilm": 37375, "demean": 37376, "auditi": 37377, "restart": 37378, "chondri": 37379, "âĿ¤ï¸ıðŁĴĻ": 37380, "mclaren": 37381, "gab": 37382, "shum": 37383, "ausa": 37384, "lewisham": 37385, "ypg": 37386, "kjv": 37387, "furnished": 37388, "doro": 37389, "bonded": 37390, "morty": 37391, "latitude": 37392, "_)": 37393, "lova": 37394, "waterways": 37395, "vinai": 37396, "shorth": 37397, "drunk": 37398, "cay": 37399, "ayana": 37400, "kaplan": 37401, "cappuccino": 37402, "spro": 37403, "lifeboat": 37404, "hasbro": 37405, "spolice": 37406, "toron": 37407, "doing": 37408, "damn": 37409, "shree": 37410, "fountains": 37411, "entation": 37412, "maru": 37413, "boarder": 37414, "topless": 37415, "jada": 37416, "channing": 37417, "ulls": 37418, "enclosure": 37419, "gibson": 37420, "fractured": 37421, "britton": 37422, "ö": 37423, "tous": 37424, "porth": 37425, "draf": 37426, "trailing": 37427, "margate": 37428, "elife": 37429, "downward": 37430, "linn": 37431, "glades": 37432, "girlpower": 37433, "akrish": 37434, "uki": 37435, "ronda": 37436, "tsc": 37437, "appreciationday": 37438, "vising": 37439, "loom": 37440, "ðŁį³": 37441, "mexican": 37442, "argos": 37443, "yya": 37444, "jadine": 37445, "southport": 37446, "dend": 37447, "sista": 37448, "redeem": 37449, "meng": 37450, "braxton": 37451, "antioxidant": 37452, "skey": 37453, "mpg": 37454, "finding": 37455, "vibration": 37456, "ceu": 37457, "khart": 37458, "dimini": 37459, "cline": 37460, "shelly": 37461, "hines": 37462, "īï¸ı": 37463, "topical": 37464, "nover": 37465, "maxx": 37466, "primitive": 37467, "illustrate": 37468, "bounds": 37469, "trenton": 37470, "jointly": 37471, "breeders": 37472, "uchi": 37473, "wakeupamerica": 37474, "bada": 37475, "ðŁĹ£ï¸ı": 37476, "guacam": 37477, "spheres": 37478, "peregr": 37479, "youthful": 37480, "lolo": 37481, "birmin": 37482, "tly": 37483, "jeremycorbyn": 37484, "defects": 37485, "cosm": 37486, "arent": 37487, "vaa": 37488, "bagels": 37489, "mediac": 37490, "coriander": 37491, "icago": 37492, "ghaz": 37493, "abbas": 37494, "remodel": 37495, "structuring": 37496, "pum": 37497, "outlaw": 37498, "adani": 37499, "rbc": 37500, "gulls": 37501, "nli": 37502, "confuse": 37503, "ðŁijĩðŁı¼": 37504, "vila": 37505, "mcnamara": 37506, "corrections": 37507, "mughal": 37508, "seri": 37509, "regain": 37510, "ssb": 37511, "leave": 37512, "hahahah": 37513, "grande": 37514, "distressed": 37515, "rechargeable": 37516, "hoa": 37517, "housed": 37518, "stil": 37519, "attributed": 37520, "opathic": 37521, "dips": 37522, "prit": 37523, "headphone": 37524, "conclude": 37525, "pilo": 37526, "het": 37527, "utsa": 37528, "nitin": 37529, "jem": 37530, "snippet": 37531, "tutoring": 37532, "oper": 37533, "sunk": 37534, "ensla": 37535, "chau": 37536, "acorn": 37537, "quintess": 37538, "rankin": 37539, "affiliated": 37540, "ourlives": 37541, "clint": 37542, "seater": 37543, "isaac": 37544, "bashing": 37545, "smear": 37546, "nurse": 37547, "doodling": 37548, "\";": 37549, "saku": 37550, "atrocities": 37551, "imam": 37552, "gfs": 37553, "violating": 37554, "commend": 37555, "bradshaw": 37556, "erville": 37557, "billed": 37558, "bbe": 37559, "thulhu": 37560, "iphones": 37561, "moose": 37562, "dios": 37563, "rew": 37564, "methane": 37565, "strangely": 37566, "whisky": 37567, "tightly": 37568, "spielberg": 37569, "radius": 37570, "noticing": 37571, "wif": 37572, "ignati": 37573, "ifa": 37574, "apis": 37575, "wali": 37576, "haitian": 37577, "bushes": 37578, "yz": 37579, "vl": 37580, "exited": 37581, "assel": 37582, "truec": 37583, "domen": 37584, "asher": 37585, "inking": 37586, "newyearseve": 37587, "hendricks": 37588, "bati": 37589, "ìĿ´ì": 37590, "richter": 37591, "monsanto": 37592, "conline": 37593, "agreat": 37594, "ðŁ¤¯": 37595, "masterpieces": 37596, "arn": 37597, "roughs": 37598, "cleve": 37599, "sev": 37600, "fashions": 37601, "toya": 37602, "shail": 37603, "copeland": 37604, "aquari": 37605, "decals": 37606, "areyou": 37607, "yaya": 37608, "astr": 37609, "font": 37610, "mlm": 37611, "arca": 37612, "ppor": 37613, "pollock": 37614, "xperia": 37615, "conservation": 37616, "chainsaw": 37617, "aggie": 37618, "?!?!?": 37619, "sile": 37620, "shon": 37621, "ìĹIJ": 37622, "notebooks": 37623, "marquette": 37624, "deus": 37625, "bbled": 37626, "spicer": 37627, "mccabe": 37628, "norwich": 37629, "modification": 37630, "boosted": 37631, "strum": 37632, "salesman": 37633, "bangle": 37634, "nissan": 37635, "hezbollah": 37636, "breasts": 37637, "aaf": 37638, "anthus": 37639, "sker": 37640, "owed": 37641, "heros": 37642, "gifs": 37643, "fosters": 37644, "eaters": 37645, "dues": 37646, "_/": 37647, "lymphoma": 37648, "sfam": 37649, "megal": 37650, "afridi": 37651, "agic": 37652, "pamp": 37653, "jealousy": 37654, "ðŁijĮðŁı¼": 37655, "calculate": 37656, "napping": 37657, "gale": 37658, "ð٦Ħ": 37659, "lubbock": 37660, "assumed": 37661, "renting": 37662, "íĥľ": 37663, "suburb": 37664, "ãĤ·": 37665, "technic": 37666, "ucla": 37667, "infront": 37668, "garnet": 37669, "steroids": 37670, "striving": 37671, "howar": 37672, "mover": 37673, "leton": 37674, "bulldo": 37675, "isin": 37676, "ciao": 37677, "snz": 37678, "forefront": 37679, "dams": 37680, "midwife": 37681, "mawards": 37682, "clapton": 37683, "wein": 37684, "subsidies": 37685, "sproud": 37686, "rotherham": 37687, "phantom": 37688, "arach": 37689, "spiel": 37690, "racket": 37691, "selamat": 37692, "noon": 37693, "lbc": 37694, "entially": 37695, "ðŁĴ¸": 37696, "silve": 37697, "moud": 37698, "kinetic": 37699, "yasi": 37700, "ðŁİ©": 37701, "ool": 37702, "miku": 37703, "iza": 37704, "fera": 37705, "floren": 37706, "barbershop": 37707, "groot": 37708, "zest": 37709, "nears": 37710, "stanis": 37711, "zand": 37712, "policeman": 37713, "jurisdic": 37714, "formations": 37715, "apparatus": 37716, "spd": 37717, "artifact": 37718, "tosc": 37719, "motivating": 37720, "womancrush": 37721, "redro": 37722, "diagnostics": 37723, "raza": 37724, "outfitters": 37725, "elxn": 37726, "dodgy": 37727, "ryn": 37728, "shd": 37729, "orthodon": 37730, "olde": 37731, "jayanti": 37732, "balances": 37733, "quickest": 37734, "canton": 37735, "fridayreads": 37736, "!*": 37737, "naa": 37738, "aak": 37739, "ðŁĶ·": 37740, "behaviors": 37741, "raspberries": 37742, "ä»": 37743, "political": 37744, "camil": 37745, "åľ": 37746, "dik": 37747, "astounding": 37748, "liebe": 37749, "novelty": 37750, "turmoil": 37751, "sully": 37752, "springbreak": 37753, "honouring": 37754, "ccg": 37755, "ðŁıĴ": 37756, "mylittle": 37757, "kyc": 37758, "proms": 37759, "ðŁķĬ": 37760, "è": 37761, "bige": 37762, "avril": 37763, "ðŁĩµðŁĩ°": 37764, "marion": 37765, "asants": 37766, "surya": 37767, "octag": 37768, "lufthan": 37769, "acron": 37770, "fayetteville": 37771, "tique": 37772, "loves": 37773, "enca": 37774, "dekalb": 37775, "taver": 37776, "devote": 37777, "auxiliary": 37778, "johannes": 37779, "treadmill": 37780, "ayan": 37781, "qur": 37782, "donaldson": 37783, "cheryl": 37784, "\"....": 37785, "sven": 37786, "kirsty": 37787, "gunners": 37788, "radish": 37789, "oahu": 37790, "vsky": 37791, "ible": 37792, "concourse": 37793, "bps": 37794, "eloqu": 37795, "ashford": 37796, "tebow": 37797, "roblox": 37798, "mada": 37799, "driving": 37800, "thday": 37801, "sproject": 37802, "mms": 37803, "banded": 37804, ".!!": 37805, "librarians": 37806, "flannel": 37807, "intolerance": 37808, "heral": 37809, "çµ": 37810, "nemesis": 37811, "lista": 37812, "tarak": 37813, "crypt": 37814, "starplus": 37815, "vishnu": 37816, "scale": 37817, "cris": 37818, "%),": 37819, "jillian": 37820, "reggae": 37821, "pegasus": 37822, "olin": 37823, "ipment": 37824, "manic": 37825, "lfc": 37826, "goddard": 37827, "iteam": 37828, "parlour": 37829, "anchors": 37830, "leeminho": 37831, "tallahassee": 37832, "antit": 37833, "dho": 37834, "kidney": 37835, "yash": 37836, "battled": 37837, "azad": 37838, "garis": 37839, "faulkner": 37840, "sniff": 37841, "paparazzi": 37842, "edm": 37843, "phyllis": 37844, "contested": 37845, "aaay": 37846, "seca": 37847, "kton": 37848, "velve": 37849, "rainier": 37850, "forum": 37851, "tampab": 37852, "hosp": 37853, "tractors": 37854, "oxfordshire": 37855, "notion": 37856, "guangzhou": 37857, "ðŁĺ¯": 37858, "refill": 37859, "wednesdaymotivation": 37860, "slider": 37861, "mukherjee": 37862, "pratt": 37863, "fontaine": 37864, "alphon": 37865, "afar": 37866, "tsi": 37867, "pesticides": 37868, "fiends": 37869, "mocking": 37870, "braw": 37871, "transat": 37872, "doses": 37873, "cores": 37874, "homophobia": 37875, "documenting": 37876, "zlatan": 37877, "condoms": 37878, "sé": 37879, "sunset": 37880, "kunst": 37881, "tonga": 37882, "ส": 37883, "vation": 37884, "spray": 37885, "chowder": 37886, "raps": 37887, "palladium": 37888, "norwood": 37889, "musichistory": 37890, "hooker": 37891, "sisi": 37892, "osprey": 37893, "phys": 37894, "conceded": 37895, "bobcat": 37896, "armad": 37897, "zeit": 37898, "ÙĦ": 37899, "ðŁĺģðŁĺģ": 37900, "meridi": 37901, "ðŁĩ·ðŁĩº": 37902, "cornwall": 37903, "!),": 37904, "touchdowns": 37905, "zeit": 37906, "chalet": 37907, "mmm": 37908, "alche": 37909, "gorilla": 37910, "foss": 37911, "atiku": 37912, "luminous": 37913, "ivanka": 37914, "beek": 37915, "stares": 37916, "swiss": 37917, "âĿ¤âĿ¤âĿ¤âĿ¤": 37918, "scrubs": 37919, "meath": 37920, "gustav": 37921, "jogging": 37922, "confetti": 37923, "asos": 37924, "ersfc": 37925, "breitbart": 37926, "applicable": 37927, "authored": 37928, "yaho": 37929, "hin": 37930, "displacement": 37931, "jv": 37932, "ðŁĮ¹ðŁĮ¹": 37933, "otc": 37934, "nonprofits": 37935, "diecast": 37936, "gusto": 37937, "intestin": 37938, "cages": 37939, "meen": 37940, "lukas": 37941, "mooney": 37942, "ðŁĺ·": 37943, "veryday": 37944, "torah": 37945, "ission": 37946, "wac": 37947, "leveraging": 37948, "ishable": 37949, "cuse": 37950, "lewood": 37951, "mayan": 37952, "turntable": 37953, "juice": 37954, "trusty": 37955, "tup": 37956, "etiquette": 37957, "supervisors": 37958, "stun": 37959, "guzman": 37960, "conferen": 37961, "rico": 37962, "feast": 37963, "backward": 37964, "polaris": 37965, "miche": 37966, "jog": 37967, "hing": 37968, "fieldhouse": 37969, "veling": 37970, "shocker": 37971, "escence": 37972, "ा": 37973, "vibe": 37974, "anastasia": 37975, "marched": 37976, "killing": 37977, "Ķë": 37978, "fett": 37979, "exoplan": 37980, "...(": 37981, "snowday": 37982, "loh": 37983, "irani": 37984, "lakhs": 37985, "dela": 37986, "pocaly": 37987, "boomers": 37988, "dictatorship": 37989, "acer": 37990, "turkeys": 37991, "quarterfinal": 37992, "musketeers": 37993, "ðŁĴĽðŁĴļ": 37994, "sfx": 37995, "museumweek": 37996, "scala": 37997, "risis": 37998, "(ðŁĵ·": 37999, "ãĢĤ": 38000, "zies": 38001, "boeh": 38002, "hues": 38003, "lusci": 38004, "dola": 38005, "impeachtrump": 38006, "rood": 38007, "doncaster": 38008, "torre": 38009, "heroes": 38010, "foyer": 38011, "tari": 38012, "blurred": 38013, "kew": 38014, "frankly": 38015, "droid": 38016, "apal": 38017, "м": 38018, "yaf": 38019, "bret": 38020, "paragu": 38021, "cacao": 38022, "ðŁĻĮðŁı¾": 38023, "rue": 38024, "headaches": 38025, "shawty": 38026, "charley": 38027, "paler": 38028, "gowns": 38029, "correctional": 38030, "ðŁĺ©ðŁĺ©": 38031, "breakingbad": 38032, "oling": 38033, "dap": 38034, "endeavour": 38035, "citadel": 38036, "trad": 38037, "incumbent": 38038, "meditate": 38039, "footed": 38040, "ðŁĴµ": 38041, "shabbat": 38042, "dayofthe": 38043, "willem": 38044, "galway": 38045, "tored": 38046, "marriage": 38047, "fillion": 38048, "sleeveless": 38049, "auditor": 38050, "jinyoung": 38051, "invincible": 38052, "kaduna": 38053, "aand": 38054, "volcanoes": 38055, "moneti": 38056, "indiegogo": 38057, "buccaneers": 38058, "ðŁijīðŁı½": 38059, "ãĢĤ": 38060, "layton": 38061, "cuckoo": 38062, "humber": 38063, "buzzer": 38064, "Ïī": 38065, "tore": 38066, "strains": 38067, "stom": 38068, "paine": 38069, "swe": 38070, "duff": 38071, "zou": 38072, "simi": 38073, "lipp": 38074, "urn": 38075, "seagu": 38076, "ðŁĶ®": 38077, "sundae": 38078, "hic": 38079, "ðŁĺ¨": 38080, "bullpen": 38081, "uper": 38082, "flyover": 38083, "aldridge": 38084, "globes": 38085, "alies": 38086, "kenzie": 38087, "gees": 38088, "ycle": 38089, "splin": 38090, "magenta": 38091, "jha": 38092, "balu": 38093, "ghorn": 38094, "tipper": 38095, "wicker": 38096, "tasteof": 38097, "conclave": 38098, "chale": 38099, "invasi": 38100, "cater": 38101, "dioxide": 38102, "megab": 38103, "winn": 38104, "atp": 38105, "transformative": 38106, "nestled": 38107, "hig": 38108, "bridging": 38109, "lilies": 38110, "cheered": 38111, "baddest": 38112, "scrolls": 38113, "realis": 38114, "diplo": 38115, "ðŁĶ«": 38116, "concession": 38117, "preferences": 38118, "explodes": 38119, "ergon": 38120, "introductory": 38121, "ineau": 38122, "chaf": 38123, "somes": 38124, "landrover": 38125, "spiration": 38126, "sexy": 38127, "scorecard": 38128, "illustrates": 38129, "soulmate": 38130, "wien": 38131, "interdisciplinary": 38132, "forecasting": 38133, "entities": 38134, "glued": 38135, "enlar": 38136, "curt": 38137, "perceptions": 38138, "bootleg": 38139, "mire": 38140, "ashok": 38141, "vaz": 38142, "horne": 38143, "calle": 38144, "aculture": 38145, "theroy": 38146, "nighttime": 38147, "ocal": 38148, "characterdesign": 38149, "armist": 38150, "ðŁĺıðŁĺı": 38151, "yahoo": 38152, "aceae": 38153, "tose": 38154, "evento": 38155, "sout": 38156, "nayanth": 38157, "whom": 38158, "vare": 38159, "rigging": 38160, "genus": 38161, "hive": 38162, "commands": 38163, "stie": 38164, "daya": 38165, "ethanol": 38166, "enf": 38167, "hifi": 38168, "fluence": 38169, "clemson": 38170, "reinvent": 38171, "thermometer": 38172, "humorous": 38173, "emerging": 38174, "ación": 38175, "ðŁĺĺðŁĺį": 38176, "sity": 38177, "hawke": 38178, "accompanying": 38179, "tility": 38180, "ðŁĺª": 38181, "recess": 38182, "protagonist": 38183, "lery": 38184, "dundal": 38185, "intl": 38186, "brittany": 38187, "qbs": 38188, "offthe": 38189, "marriages": 38190, "howto": 38191, "violated": 38192, "adelaide": 38193, "witt": 38194, "lancer": 38195, "pakv": 38196, "hume": 38197, "stade": 38198, "bragging": 38199, "outright": 38200, "adc": 38201, "superst": 38202, "realtime": 38203, "cures": 38204, "gardeners": 38205, "erock": 38206, "dalejr": 38207, "vero": 38208, "bartol": 38209, "moti": 38210, "mcfly": 38211, "vpn": 38212, "stink": 38213, "overrated": 38214, "guerra": 38215, "etis": 38216, "athome": 38217, "twdfamily": 38218, "thab": 38219, "tnx": 38220, "rafael": 38221, "familytravel": 38222, "xley": 38223, "satanic": 38224, "equations": 38225, "rudy": 38226, "waldorf": 38227, "stani": 38228, "tube": 38229, "measles": 38230, "zimmerman": 38231, "obligations": 38232, "iously": 38233, "bowser": 38234, "transformer": 38235, "shoppe": 38236, "shaken": 38237, "ghouse": 38238, "tod": 38239, "ketball": 38240, "shareholder": 38241, "marca": 38242, "kpmg": 38243, "akan": 38244, "givenchy": 38245, "coastal": 38246, "auth": 38247, "rollercoaster": 38248, "marches": 38249, "coordinate": 38250, "cinema": 38251, "apprentices": 38252, "parlor": 38253, "mito": 38254, "menon": 38255, "considerable": 38256, "barre": 38257, "gloss": 38258, "enhances": 38259, "jazeera": 38260, "falmouth": 38261, "thrash": 38262, "staten": 38263, "kzn": 38264, "engel": 38265, "samanthap": 38266, "floppy": 38267, "salom": 38268, "ðŁıĨðŁıĨ": 38269, "wack": 38270, "deliberate": 38271, "oscill": 38272, "heritag": 38273, "dusted": 38274, "ornithology": 38275, "paddle": 38276, "ferns": 38277, "barun": 38278, "clans": 38279, "anticipate": 38280, "aay": 38281, "matically": 38282, "éĩ": 38283, "tumble": 38284, "postman": 38285, "unicef": 38286, "trotter": 38287, "opd": 38288, "leaflet": 38289, "geist": 38290, "ceasefire": 38291, "screws": 38292, "creation": 38293, "walnuts": 38294, "longhorns": 38295, "understatement": 38296, "abb": 38297, "proximity": 38298, "nax": 38299, "unity": 38300, "turnpike": 38301, "ordained": 38302, "dubstep": 38303, "chakra": 38304, "mech": 38305, "loveher": 38306, "lookalike": 38307, "donnein": 38308, "viron": 38309, "ÙĪ": 38310, "bangers": 38311, "variants": 38312, "outdated": 38313, "inta": 38314, "cristo": 38315, "spelt": 38316, "foodand": 38317, "fon": 38318, "stefani": 38319, "marginal": 38320, "hutton": 38321, "tiara": 38322, "telford": 38323, "quen": 38324, "fairgrounds": 38325, "quetta": 38326, "mikhail": 38327, "healer": 38328, "vball": 38329, "tyre": 38330, "undergrad": 38331, "glend": 38332, "homers": 38333, "scribed": 38334, "maintains": 38335, "poche": 38336, "missal": 38337, "marko": 38338, "uas": 38339, "án": 38340, "shp": 38341, "convey": 38342, "padre": 38343, "saba": 38344, "puglia": 38345, "madhuri": 38346, "paxton": 38347, "chaplain": 38348, "nago": 38349, "casi": 38350, "...!!!": 38351, "flirt": 38352, "saleh": 38353, "kare": 38354, "dire": 38355, "stamped": 38356, "extreme": 38357, "ðŁĺĥðŁĺĥ": 38358, "hoppy": 38359, "guadalupe": 38360, "advantaged": 38361, "euchar": 38362, "plow": 38363, "unn": 38364, "macqu": 38365, "portland": 38366, "clash": 38367, "pes": 38368, "loubout": 38369, "yp": 38370, "keeping": 38371, "arcadia": 38372, "frankie": 38373, "fiu": 38374, "deth": 38375, "encyclopedia": 38376, "size": 38377, "invests": 38378, "ðŁį©": 38379, "geological": 38380, "franç": 38381, "confront": 38382, "ðŁĺ¥": 38383, "dys": 38384, "afm": 38385, "texan": 38386, "graphene": 38387, "repostapp": 38388, "acf": 38389, "ursula": 38390, "gaza": 38391, "ddled": 38392, "fum": 38393, "wsbtv": 38394, "mbe": 38395, "frontiers": 38396, "chronograph": 38397, "kes": 38398, "interfaith": 38399, "taboo": 38400, "sparta": 38401, "wondo": 38402, "florist": 38403, "embraces": 38404, "caw": 38405, "noel": 38406, "archers": 38407, "ðŁIJ·": 38408, "romano": 38409, "banan": 38410, "shakers": 38411, "melodies": 38412, "geothermal": 38413, "sephora": 38414, "ìļ°": 38415, "од": 38416, "proc": 38417, "handshake": 38418, "pande": 38419, "populated": 38420, "slowdown": 38421, "hortons": 38422, "registrations": 38423, "undeni": 38424, "lants": 38425, "passover": 38426, "thakur": 38427, "lief": 38428, "adhesive": 38429, "petal": 38430, "microscopy": 38431, "memphis": 38432, "confirming": 38433, "airdrop": 38434, "mesmer": 38435, "perceived": 38436, "mingle": 38437, "lifeline": 38438, "ghj": 38439, "worcestershire": 38440, "passions": 38441, "acher": 38442, "ellar": 38443, "aho": 38444, "firenze": 38445, "barang": 38446, "letterman": 38447, "hatfield": 38448, "lucha": 38449, "jeter": 38450, "eshop": 38451, "williams": 38452, "horoscope": 38453, "prede": 38454, "eastbourne": 38455, "durga": 38456, "diversion": 38457, "altrin": 38458, "seismic": 38459, "premiosm": 38460, "narco": 38461, "tir": 38462, "orig": 38463, "orm": 38464, "landfall": 38465, "cious": 38466, "lindo": 38467, "maxine": 38468, "xico": 38469, "tray": 38470, "oswald": 38471, "cba": 38472, "ricotta": 38473, "ncr": 38474, "marau": 38475, "า": 38476, "gladiator": 38477, "chery": 38478, "lung": 38479, "ume": 38480, "popsic": 38481, "longing": 38482, "canals": 38483, "taya": 38484, "decentralized": 38485, "shopp": 38486, "pressures": 38487, "maharaj": 38488, "etihad": 38489, "walgreens": 38490, "succession": 38491, "signaling": 38492, "lig": 38493, "staffer": 38494, "northkorea": 38495, "defying": 38496, "asma": 38497, "deg": 38498, "perimeter": 38499, "oakville": 38500, "msk": 38501, "baltimore": 38502, "receip": 38503, "deple": 38504, "ðŁĺŃðŁĺĤ": 38505, "jamboree": 38506, ">.<": 38507, "rspb": 38508, "punisher": 38509, "considerably": 38510, "intothe": 38511, "parisian": 38512, "accelerated": 38513, "polyester": 38514, "lowes": 38515, "frying": 38516, "sautéed": 38517, "mouths": 38518, "seychelles": 38519, "rax": 38520, "godis": 38521, "dakota": 38522, "housewives": 38523, "theme": 38524, "matinee": 38525, "blackbird": 38526, "yesung": 38527, "prefers": 38528, "pellegr": 38529, "inated": 38530, "trunks": 38531, "strongertogether": 38532, "repet": 38533, "repairing": 38534, "pedals": 38535, "tolerant": 38536, "herr": 38537, "dunne": 38538, "indication": 38539, "decatur": 38540, "btv": 38541, "exhibitors": 38542, "ikon": 38543, "fridaymotivation": 38544, "bragg": 38545, "livetweet": 38546, "alves": 38547, "womensart": 38548, "foreigners": 38549, "wallets": 38550, "mindy": 38551, "laney": 38552, "bbin": 38553, "tvmiaw": 38554, "lifter": 38555, "target": 38556, "tame": 38557, "drou": 38558, "astrophotography": 38559, "mpc": 38560, "gpu": 38561, "nordstrom": 38562, "friction": 38563, "runoff": 38564, "lovable": 38565, "spnfamily": 38566, "extingui": 38567, "bloody": 38568, "schel": 38569, "artistry": 38570, "swish": 38571, "scarce": 38572, "phils": 38573, "maxim": 38574, "possum": 38575, "compromised": 38576, "styli": 38577, "scfc": 38578, "issa": 38579, "birmingham": 38580, "sketched": 38581, "angelica": 38582, "ordinance": 38583, "jets": 38584, "conquer": 38585, "ðŁĺIJ": 38586, "onlineshopping": 38587, "sori": 38588, "reasonably": 38589, "nuestro": 38590, "arturo": 38591, "chl": 38592, "benefici": 38593, "sphoto": 38594, "welt": 38595, "nikk": 38596, "ð٤ŀ": 38597, "danao": 38598, "formid": 38599, "asse": 38600, "afirst": 38601, "âľĤ": 38602, "gillette": 38603, "assor": 38604, "anonym": 38605, "selca": 38606, "femi": 38607, "bearable": 38608, "yand": 38609, "armory": 38610, "crepe": 38611, "celticfc": 38612, "bravo": 38613, "inexpensive": 38614, "delec": 38615, "gecko": 38616, "newmarket": 38617, "snowflakes": 38618, "kabir": 38619, "contra": 38620, "canning": 38621, "morpho": 38622, "garwal": 38623, "ðŁĴĥðŁı»": 38624, "fighting": 38625, "mutation": 38626, "woody": 38627, "jugg": 38628, "graces": 38629, "premiosmtvmiaw": 38630, "kennedy": 38631, "gup": 38632, "sae": 38633, "opha": 38634, "offspring": 38635, "finisher": 38636, "betts": 38637, "spanning": 38638, "marj": 38639, "hone": 38640, "shing": 38641, "continents": 38642, "samanthaprabhu": 38643, "unrelated": 38644, "lacy": 38645, "explosions": 38646, "benjamin": 38647, "sophie": 38648, "noting": 38649, "microsoft": 38650, "assen": 38651, "ahoy": 38652, "iker": 38653, "hofer": 38654, "moe": 38655, "ahmadi": 38656, "yann": 38657, "anak": 38658, "mahi": 38659, "beu": 38660, "ahah": 38661, "creeper": 38662, "baahubali": 38663, "amat": 38664, "priory": 38665, "hawkeye": 38666, "deloitte": 38667, "skoda": 38668, "printmaking": 38669, "assembling": 38670, "miraculous": 38671, "noch": 38672, "swo": 38673, "lega": 38674, "operates": 38675, "borderlands": 38676, "elie": 38677, "strongh": 38678, "reptiles": 38679, "pirate": 38680, "unfold": 38681, "¯": 38682, "qualcomm": 38683, "unpredictable": 38684, "otr": 38685, "rosewood": 38686, "directional": 38687, "counselors": 38688, "cornell": 38689, "liberated": 38690, "jad": 38691, "irregular": 38692, "bulgarian": 38693, "highness": 38694, "vodafone": 38695, "swild": 38696, "minimize": 38697, "grazie": 38698, "à¹ĩ": 38699, "rstats": 38700, "streep": 38701, "ometric": 38702, "humble": 38703, "lump": 38704, "lille": 38705, "bü": 38706, "homedepot": 38707, "tripadvisor": 38708, "kiwan": 38709, "avia": 38710, "erz": 38711, "exico": 38712, "duf": 38713, "blumen": 38714, "mizing": 38715, "arma": 38716, "inim": 38717, "constan": 38718, "sora": 38719, "jual": 38720, "aun": 38721, "twell": 38722, "trenches": 38723, "hera": 38724, "rk": 38725, "poplar": 38726, "recipeoftheday": 38727, "llan": 38728, "bhuban": 38729, "shortages": 38730, "ingdon": 38731, "bridgewater": 38732, "ðŁIJĺ": 38733, "fortnite": 38734, "camden": 38735, "uncture": 38736, "prow": 38737, "colonies": 38738, "tks": 38739, "ngo": 38740, "bhm": 38741, "livepd": 38742, "splace": 38743, "slike": 38744, "happyeaster": 38745, "terrence": 38746, "revolver": 38747, "jed": 38748, "yyyy": 38749, "officeof": 38750, "mts": 38751, "existential": 38752, "rourke": 38753, "explorebc": 38754, "ssed": 38755, "priest": 38756, "vixen": 38757, "siding": 38758, "kpa": 38759, "ahar": 38760, "juic": 38761, "obstruc": 38762, "forensics": 38763, "ukmfg": 38764, "cancellation": 38765, "weary": 38766, "abq": 38767, "elec": 38768, "prized": 38769, "debts": 38770, "mezz": 38771, "salvatore": 38772, "mdc": 38773, "grette": 38774, "cgc": 38775, "thon": 38776, "snowstorm": 38777, "tsch": 38778, "cookery": 38779, "å¹": 38780, "waxing": 38781, "nacional": 38782, "murs": 38783, "rave": 38784, "capes": 38785, "germain": 38786, "dripping": 38787, "submitting": 38788, "omelette": 38789, "iteration": 38790, "ajes": 38791, "shimmer": 38792, "fueling": 38793, "ðŁĩ§ðŁĩª": 38794, "lipo": 38795, "bobble": 38796, "unfollow": 38797, "islamist": 38798, "hiber": 38799, "cats": 38800, "agentsofshield": 38801, "sensi": 38802, "_____": 38803, "steria": 38804, "instal": 38805, "auspicious": 38806, "harrow": 38807, "overland": 38808, "feminists": 38809, "instant": 38810, "chariot": 38811, "blindness": 38812, "sped": 38813, "scarec": 38814, "nuit": 38815, "miniatures": 38816, "hoseok": 38817, "glock": 38818, "fifaworldcup": 38819, "ete": 38820, "dism": 38821, "weiner": 38822, "exfoli": 38823, "earts": 38824, "à¸Ķ": 38825, "myart": 38826, "manil": 38827, "issant": 38828, "forma": 38829, "incu": 38830, "buffalob": 38831, "intim": 38832, "mccul": 38833, "anjali": 38834, "popo": 38835, "undoub": 38836, "hila": 38837, "fungal": 38838, "thankful": 38839, "futur": 38840, "endish": 38841, "rends": 38842, "thar": 38843, "sheff": 38844, "ringo": 38845, "nicholls": 38846, "iowa": 38847, "potom": 38848, "clams": 38849, "ãģĦ": 38850, "aconf": 38851, "stadiums": 38852, "dimp": 38853, "dik": 38854, "residences": 38855, "dov": 38856, "caricature": 38857, "seagull": 38858, "klm": 38859, "confess": 38860, "slapped": 38861, "celeb": 38862, "turbines": 38863, "ppv": 38864, "nurture": 38865, "elab": 38866, ".....#": 38867, "tuff": 38868, "depress": 38869, "alfar": 38870, "amiibo": 38871, "dispon": 38872, "ewing": 38873, "queer": 38874, "friends": 38875, "forre": 38876, "âĺ¼": 38877, "swt": 38878, "aquarius": 38879, "headliner": 38880, "curd": 38881, "figs": 38882, "otters": 38883, "lovefl": 38884, "kareem": 38885, "govegan": 38886, "friyay": 38887, "consolation": 38888, "atri": 38889, "ì§Ħ": 38890, "âĺĿï¸ı": 38891, "polyne": 38892, "gued": 38893, "oya": 38894, "laus": 38895, "intestinal": 38896, "camilla": 38897, "scalp": 38898, "pir": 38899, "leeds": 38900, "horrifying": 38901, "boretum": 38902, "dandelion": 38903, "ferrer": 38904, "ellic": 38905, "asx": 38906, "soren": 38907, "reloaded": 38908, "aleague": 38909, "navigator": 38910, "inette": 38911, "addams": 38912, "alchemist": 38913, "akshay": 38914, "dystopian": 38915, "awec": 38916, "naya": 38917, "alisa": 38918, "ailed": 38919, "agor": 38920, "aviator": 38921, "alizer": 38922, "smobile": 38923, "findyourpark": 38924, "copying": 38925, "toddy": 38926, "shti": 38927, "monger": 38928, "calhoun": 38929, "napkin": 38930, "breakup": 38931, "yatra": 38932, "sethu": 38933, "richi": 38934, "erasmus": 38935, "ferry": 38936, "amore": 38937, "practise": 38938, "bobo": 38939, "powerpoint": 38940, "oose": 38941, "liffe": 38942, "china": 38943, "shka": 38944, "fadnavis": 38945, "duane": 38946, "waron": 38947, "false": 38948, "ðŁļĤ": 38949, "washes": 38950, "discip": 38951, "========": 38952, "gk": 38953, "abb": 38954, "stubborn": 38955, "medieval": 38956, "pci": 38957, "ðŁįª": 38958, "marilyn": 38959, "hyo": 38960, "mandi": 38961, "cri": 38962, "predecess": 38963, "continuation": 38964, "omusic": 38965, "slat": 38966, "whal": 38967, "mallory": 38968, "bonn": 38969, "shenzhen": 38970, "cai": 38971, "âĺĥ": 38972, "safest": 38973, "forwards": 38974, "drawers": 38975, "blasted": 38976, "slee": 38977, "morphe": 38978, "mbta": 38979, "dumbass": 38980, "ÑĦоÑĤо": 38981, "alhamdulillah": 38982, "eclub": 38983, "albeit": 38984, "healey": 38985, "ayurveda": 38986, "advertised": 38987, "crocs": 38988, "ittles": 38989, "bryson": 38990, "bei": 38991, "njpw": 38992, "honoree": 38993, "fused": 38994, "ðŁĶĺ": 38995, "multin": 38996, "naga": 38997, "departs": 38998, "kop": 38999, "kino": 39000, "jharkhand": 39001, "edna": 39002, "axle": 39003, "milton": 39004, "supremacist": 39005, "marrakech": 39006, "dominic": 39007, "transcript": 39008, "][#": 39009, ":).": 39010, "woc": 39011, "surrounds": 39012, "ogil": 39013, "leaflets": 39014, "cowell": 39015, "whew": 39016, "trude": 39017, "prolifer": 39018, "succes": 39019, "sportsman": 39020, "condom": 39021, "poche": 39022, "kup": 39023, "imprisonment": 39024, "{}": 39025, "scrambled": 39026, "åĽ": 39027, "kaine": 39028, "cellphone": 39029, "metamor": 39030, "coni": 39031, "remnants": 39032, "eez": 39033, "downpour": 39034, "afternoon": 39035, "exercising": 39036, "berser": 39037, "architecture": 39038, "wicklow": 39039, "mns": 39040, "isp": 39041, "boc": 39042, "niss": 39043, "mnwild": 39044, "stumble": 39045, "rsi": 39046, "luffy": 39047, "silen": 39048, "ddad": 39049, "bullies": 39050, "hawker": 39051, "bbcc": 39052, "scuba": 39053, "epp": 39054, "quets": 39055, "foraging": 39056, "pallet": 39057, "hadi": 39058, "cinematographer": 39059, "catchers": 39060, "toaster": 39061, "khi": 39062, "litecoin": 39063, "kidlit": 39064, "amherst": 39065, "mauricio": 39066, "ipad": 39067, "marmalade": 39068, "fey": 39069, "donnelly": 39070, "gto": 39071, "estas": 39072, "cerebral": 39073, "antgrasso": 39074, "zzled": 39075, "virgil": 39076, "swapped": 39077, "ðŁĺħðŁĺħ": 39078, "nodapl": 39079, "greatest": 39080, "nhlbruins": 39081, "fraser": 39082, "bmo": 39083, "anew": 39084, ".âĿ¤ï¸ı": 39085, "segregation": 39086, "remarkably": 39087, "mccormick": 39088, "logger": 39089, "eras": 39090, "contracting": 39091, "âłĢâłĢ": 39092, "yorks": 39093, "ukulele": 39094, "touchscreen": 39095, "decked": 39096, "benn": 39097, "southwark": 39098, "ravin": 39099, "numis": 39100, "ð٤Ļ": 39101, "rut": 39102, "greco": 39103, "ethic": 39104, "redneck": 39105, "arr": 39106, "tcs": 39107, "ihri": 39108, "ðŁĩ«ðŁĩ·": 39109, "lk": 39110, "inherited": 39111, "zyk": 39112, "viaduct": 39113, "martyred": 39114, "higu": 39115, "ssn": 39116, "bein": 39117, "streetstyle": 39118, "fergie": 39119, "bankof": 39120, "æĹ¥": 39121, "stakeholder": 39122, "exemplary": 39123, "cress": 39124, "essa": 39125, "erotica": 39126, "intrepid": 39127, "gomes": 39128, "braun": 39129, "bethany": 39130, "bangtan": 39131, "pulmonary": 39132, "milling": 39133, "doctorate": 39134, "trumprussia": 39135, "र": 39136, "sani": 39137, "blatt": 39138, "plau": 39139, "deprived": 39140, "tle": 39141, "fully": 39142, "bourn": 39143, "stak": 39144, "lufthansa": 39145, "kiosk": 39146, "faroo": 39147, "defy": 39148, "badan": 39149, "ðŁĺĺâĿ¤ï¸ı": 39150, "ritz": 39151, "trisha": 39152, "rands": 39153, "middlesex": 39154, "arabs": 39155, "proj": 39156, "sportscenter": 39157, "repeats": 39158, "ivf": 39159, "bleedblue": 39160, "assure": 39161, "obs": 39162, "territorial": 39163, "elen": 39164, "beverley": 39165, "annah": 39166, "âĿ¤ï¸ıâĿ¤ï¸ıâĿ¤ï¸ıâĿ¤ï¸ı": 39167, "zl": 39168, "forgood": 39169, "sciencefiction": 39170, "glau": 39171, "sonya": 39172, "prith": 39173, "stweets": 39174, "mixers": 39175, "mario": 39176, "antelope": 39177, "writingcommunity": 39178, "wentz": 39179, "denham": 39180, "bedi": 39181, "sfo": 39182, "harleydavidson": 39183, "lookbook": 39184, "immunotherapy": 39185, "orphe": 39186, "esville": 39187, "edged": 39188, "task": 39189, "sbball": 39190, "corrosion": 39191, "kilometers": 39192, "costing": 39193, "playback": 39194, "keke": 39195, "divisi": 39196, "uter": 39197, "relocation": 39198, "yelled": 39199, "peng": 39200, "upbeat": 39201, "serve": 39202, "âļł": 39203, "halen": 39204, "stirring": 39205, "rehman": 39206, "env": 39207, "schumacher": 39208, "fragment": 39209, "alkaline": 39210, "sbk": 39211, "resili": 39212, "sharepoint": 39213, "rollover": 39214, "trash": 39215, "counterpart": 39216, "âĻ«": 39217, "obitu": 39218, "à½": 39219, "ãĤ¹": 39220, "mulberry": 39221, "ðŁİĨ": 39222, "autonomy": 39223, "spraying": 39224, "natl": 39225, "loveyou": 39226, "franki": 39227, "nuk": 39228, "escar": 39229, "canteen": 39230, "alibaba": 39231, "deplor": 39232, "molecule": 39233, "pud": 39234, "fortnight": 39235, "blondie": 39236, "sphin": 39237, "portrayal": 39238, "tache": 39239, "bute": 39240, "consisting": 39241, "freepalestine": 39242, "csp": 39243, "immort": 39244, "dns": 39245, "ðŁĴ¥ðŁĴ¥": 39246, "tourde": 39247, "cooking": 39248, "archival": 39249, "gathers": 39250, "bitt": 39251, "banc": 39252, "premature": 39253, "snowball": 39254, "poetryday": 39255, "loudly": 39256, "fugitive": 39257, "eday": 39258, "emra": 39259, "ðŁĩ¸ðŁĩª": 39260, "scien": 39261, "nodejs": 39262, "jurgen": 39263, "jeong": 39264, "bandana": 39265, "unis": 39266, "foxsports": 39267, "vandy": 39268, "provisions": 39269, "weep": 39270, "tuk": 39271, "iko": 39272, "houn": 39273, "ziggy": 39274, "zr": 39275, "fillet": 39276, "bata": 39277, "tink": 39278, "cone": 39279, "wewant": 39280, "kilo": 39281, "horace": 39282, "slt": 39283, "sct": 39284, "staytuned": 39285, "victoria": 39286, "umbria": 39287, "attacker": 39288, "inghamshire": 39289, "frightening": 39290, "noir": 39291, "frat": 39292, "contempt": 39293, "liaison": 39294, "hoi": 39295, "brink": 39296, "trill": 39297, "niagar": 39298, "kickass": 39299, "dundas": 39300, "notmy": 39301, "rhode": 39302, "bumble": 39303, "noxi": 39304, "fag": 39305, "spectators": 39306, "mancrushmonday": 39307, "jinping": 39308, "distract": 39309, "daisy": 39310, "walden": 39311, "portrait": 39312, "arthistory": 39313, "voltron": 39314, "evel": 39315, "isc": 39316, "acm": 39317, "rite": 39318, "nao": 39319, "deported": 39320, "sweats": 39321, "rufus": 39322, "lobo": 39323, "laborday": 39324, "gamo": 39325, "ihrithik": 39326, "blit": 39327, "abdominal": 39328, "ãħ¤ãħ¤ãħ¤ãħ¤": 39329, "iit": 39330, "eq": 39331, "busy": 39332, "alluarjun": 39333, "undisclosed": 39334, "deton": 39335, "procreate": 39336, "kil": 39337, "ðŁİĤðŁİĤ": 39338, "mitchell": 39339, "kii": 39340, "inheritance": 39341, "alp": 39342, "joburg": 39343, "patrolling": 39344, "compulsory": 39345, "unsigned": 39346, "niam": 39347, "lga": 39348, "eshopsuk": 39349, "trilli": 39350, "maw": 39351, "appreciating": 39352, "rockab": 39353, "mañana": 39354, "antal": 39355, "malvern": 39356, "royo": 39357, "grandprix": 39358, "sutton": 39359, "goftheday": 39360, "digi": 39361, "ãħĭãħĭãħĭãħĭ": 39362, "tles": 39363, "varanasi": 39364, "erected": 39365, "disciples": 39366, "contact": 39367, "ðŁĺµ": 39368, "lid": 39369, "â¬ĩ": 39370, "scentre": 39371, "radiator": 39372, "ingtips": 39373, "transitions": 39374, "thursdaymotivation": 39375, "chemical": 39376, "separati": 39377, "salis": 39378, "mim": 39379, "geographical": 39380, "bookfest": 39381, "/.": 39382, "âľĭ": 39383, "vae": 39384, "currie": 39385, "aggarwal": 39386, "acceleration": 39387, "theses": 39388, "lgm": 39389, "umass": 39390, "proportions": 39391, "nata": 39392, "anians": 39393, "kuch": 39394, "beacons": 39395, "apr": 39396, "@#": 39397, "ðŁĴªðŁı¾": 39398, "nuke": 39399, "sheraton": 39400, "kio": 39401, "makati": 39402, "politico": 39403, "morale": 39404, "ìĻ": 39405, "economically": 39406, "ggly": 39407, "ssen": 39408, "pastries": 39409, "internships": 39410, "vicente": 39411, "fantaken": 39412, "avengers": 39413, "accuse": 39414, "sleepover": 39415, "indicated": 39416, "thedream": 39417, "sterone": 39418, "renders": 39419, "frost": 39420, "oui": 39421, "gregg": 39422, "dore": 39423, "⾨⾨⾨": 39424, "pugs": 39425, "saty": 39426, "numb": 39427, "hemsworth": 39428, "tami": 39429, "lassic": 39430, "schiff": 39431, "iglesias": 39432, "agawa": 39433, "]\"": 39434, "reshi": 39435, "gamestop": 39436, "divorced": 39437, "theater": 39438, "claudi": 39439, "unconventional": 39440, "prophets": 39441, "acin": 39442, "twelf": 39443, "towering": 39444, "tml": 39445, "sclerosis": 39446, "kwan": 39447, "gets": 39448, "disturb": 39449, "naira": 39450, "energ": 39451, "piracy": 39452, "pruitt": 39453, "notified": 39454, "henna": 39455, "bram": 39456, "groundwater": 39457, "bls": 39458, "optimis": 39459, "$)": 39460, "lucie": 39461, "bizhour": 39462, "fangirling": 39463, "grills": 39464, "orl": 39465, "verse": 39466, "cina": 39467, "lawless": 39468, "artistsontwitter": 39469, "televised": 39470, "marshmallows": 39471, "radiohead": 39472, "barr": 39473, "mfc": 39474, "brevi": 39475, "mmorpg": 39476, "gaya": 39477, "âĸ«": 39478, "subtitles": 39479, "jt": 39480, "disneyland": 39481, "tobago": 39482, "nhm": 39483, "groove": 39484, "fiawec": 39485, "\"/": 39486, "bao": 39487, "scrabble": 39488, "omni": 39489, "ffl": 39490, "umc": 39491, "simba": 39492, "alier": 39493, "terrell": 39494, "plume": 39495, "midi": 39496, "dignit": 39497, "coc": 39498, "brut": 39499, "adata": 39500, "alchemy": 39501, "dsm": 39502, "ðŁĺĨðŁĺĨ": 39503, "wintry": 39504, "spares": 39505, "cuer": 39506, "conclusions": 39507, "toys": 39508, "odor": 39509, "flann": 39510, "garvey": 39511, "scriptions": 39512, "inspections": 39513, "catap": 39514, "anglo": 39515, "stlouis": 39516, "heimer": 39517, "atay": 39518, "trich": 39519, "enyc": 39520, "childs": 39521, "ventil": 39522, "montp": 39523, "guillermo": 39524, "circulare": 39525, "zell": 39526, "modeled": 39527, "craftsman": 39528, "alina": 39529, "stimulation": 39530, "cashew": 39531, "judas": 39532, "bestof": 39533, "toire": 39534, "suspends": 39535, "scollege": 39536, "realising": 39537, "bytes": 39538, "bloods": 39539, "assi": 39540, "ðŁĴ¿": 39541, "ohs": 39542, "ðŁįĭ": 39543, "scallop": 39544, "व": 39545, "gifting": 39546, "camogie": 39547, "wilkes": 39548, "ozzy": 39549, "ðŁ¤¤": 39550, "veronic": 39551, "savoy": 39552, "demetri": 39553, "babygirl": 39554, "ðŁĺįðŁĺŃ": 39555, "sox": 39556, "clyde": 39557, "inductee": 39558, "countdown": 39559, "selfcare": 39560, "à¤ľ": 39561, "vika": 39562, "torre": 39563, "phdchat": 39564, "pears": 39565, "awh": 39566, "suffrage": 39567, "lesn": 39568, "admiration": 39569, "mpp": 39570, "sharkweek": 39571, "schulz": 39572, "santorini": 39573, "clover": 39574, "(*": 39575, "strasbourg": 39576, "exiting": 39577, "soyu": 39578, "fingerprint": 39579, "chea": 39580, "ãĢľ": 39581, "vindic": 39582, "songwriters": 39583, "soa": 39584, "prouder": 39585, "nama": 39586, "=))": 39587, "simplest": 39588, "deliciously": 39589, "gilles": 39590, "uq": 39591, "mnwx": 39592, "epp": 39593, "shun": 39594, "kennel": 39595, "fallon": 39596, "ðŁIJ£": 39597, "sind": 39598, "tragically": 39599, "outes": 39600, "modernism": 39601, "coke": 39602, "gyn": 39603, "spion": 39604, "âĺ¹ï¸ı": 39605, "leam": 39606, "compressor": 39607, "apologise": 39608, "twentyon": 39609, "fanatics": 39610, "âĻ»": 39611, "scotsman": 39612, "sawa": 39613, "kou": 39614, "aser": 39615, "à¸ļ": 39616, "welterweight": 39617, "phenom": 39618, "twickenham": 39619, "stria": 39620, "pout": 39621, "kaz": 39622, "giam": 39623, "cdp": 39624, "hoy": 39625, "employ": 39626, "redmond": 39627, "à¸Ħà¸": 39628, "smere": 39629, "trancefamily": 39630, "protocols": 39631, "piece": 39632, "luiz": 39633, "iteracy": 39634, "carls": 39635, "unitedstates": 39636, "harmed": 39637, "phdlife": 39638, "chaw": 39639, "footprints": 39640, "lé": 39641, "choker": 39642, "zana": 39643, "slipper": 39644, "ericsson": 39645, "insulting": 39646, "artichoke": 39647, "advising": 39648, "acquisitions": 39649, "opor": 39650, "mutations": 39651, "rear": 39652, "à¥ģ": 39653, "podcast": 39654, "wither": 39655, "kung": 39656, "íĺ¸": 39657, "winslow": 39658, "diapers": 39659, "ðŁĵ¸@": 39660, "ecker": 39661, "collar": 39662, "huey": 39663, "giro": 39664, "monogram": 39665, "kasich": 39666, "siveness": 39667, "malaysi": 39668, "aromatic": 39669, "gres": 39670, "galileo": 39671, "uji": 39672, "robb": 39673, "drm": 39674, "nonetheless": 39675, "asa": 39676, ":>": 39677, "loa": 39678, "lnp": 39679, "atwork": 39680, "agt": 39681, "lakshmi": 39682, "pipelines": 39683, "idal": 39684, "strel": 39685, "reall": 39686, "chainz": 39687, "stonewall": 39688, "sansk": 39689, "ðŁı´": 39690, "piedmont": 39691, "hostess": 39692, "ciu": 39693, "té": 39694, "analyses": 39695, "wilhelm": 39696, "scotty": 39697, "rwby": 39698, "mosquit": 39699, "usemb": 39700, "quins": 39701, "ðŁijİ": 39702, "tucker": 39703, "sconf": 39704, "specifications": 39705, "psychiatry": 39706, "brookes": 39707, "sils": 39708, "olaf": 39709, "deto": 39710, "codi": 39711, "clip": 39712, "filth": 39713, "womancrushwednesday": 39714, "goto": 39715, "angerous": 39716, "beale": 39717, "wtc": 39718, "panelist": 39719, "nex": 39720, "larsen": 39721, "emilio": 39722, "tableau": 39723, "hitters": 39724, "conceived": 39725, "americani": 39726, "ortega": 39727, "mardi": 39728, "Ñĥ": 39729, "paintball": 39730, "thirsty": 39731, "newyorker": 39732, "etisation": 39733, "goss": 39734, "weaker": 39735, "ugh": 39736, "troll": 39737, "harga": 39738, "dual": 39739, "ghtning": 39740, "atine": 39741, "ðŁĺİðŁĺİðŁĺİ": 39742, "cookout": 39743, "pyrenees": 39744, "poss": 39745, "authentication": 39746, "sportswear": 39747, "yunho": 39748, "kiro": 39749, "archipel": 39750, "shenko": 39751, "render": 39752, "novation": 39753, "divinity": 39754, "ðŁij£": 39755, "sufi": 39756, "humbling": 39757, "geopol": 39758, "devotees": 39759, "waitress": 39760, "trough": 39761, "pyro": 39762, "iba": 39763, "bling": 39764, "graf": 39765, "epilots": 39766, "btr": 39767, "oftball": 39768, "basking": 39769, "dominos": 39770, "soom": 39771, "rath": 39772, "sheryl": 39773, "quel": 39774, "astronomical": 39775, "weld": 39776, "tracklist": 39777, "signee": 39778, "sleepless": 39779, "comman": 39780, "chron": 39781, "summon": 39782, "puremichigan": 39783, "crispr": 39784, "slip": 39785, "lagi": 39786, "raq": 39787, "umu": 39788, "thalap": 39789, "charmed": 39790, "scrump": 39791, "quadcopter": 39792, "skip": 39793, "petersen": 39794, "muni": 39795, "ðŁĮ¾": 39796, "monaghan": 39797, "trays": 39798, "icked": 39799, "canadaday": 39800, "tegr": 39801, "�": 39802, "hotness": 39803, "heavymetal": 39804, "abar": 39805, "gopdebate": 39806, "azul": 39807, "spiderman": 39808, "sunflowers": 39809, "ľë": 39810, "webcomics": 39811, "bard": 39812, "в": 39813, "nicholas": 39814, "slush": 39815, "raman": 39816, "markham": 39817, "fficial": 39818, "ffler": 39819, "íĬ¸": 39820, "pless": 39821, "anushka": 39822, "toto": 39823, "skaters": 39824, "prowrestling": 39825, "competes": 39826, "ayala": 39827, "mystery": 39828, "thrills": 39829, "mpg": 39830, "independently": 39831, "yul": 39832, "imperative": 39833, "formidable": 39834, "tireless": 39835, "stacking": 39836, "tongues": 39837, "maltese": 39838, "potts": 39839, "matti": 39840, "charting": 39841, "chillout": 39842, "supernova": 39843, "omeo": 39844, "skysports": 39845, "nutty": 39846, "ðŁĹĵï¸ı": 39847, "rohan": 39848, "inspired": 39849, "concierge": 39850, "serra": 39851, "makk": 39852, "galat": 39853, "chipp": 39854, "yev": 39855, "ì£": 39856, "reimbur": 39857, "opul": 39858, "kimberley": 39859, "ieee": 39860, "bremen": 39861, "chitec": 39862, "orin": 39863, "naku": 39864, "bonkers": 39865, "footy": 39866, "emergence": 39867, "ðŁĨĺ": 39868, "stip": 39869, "sergei": 39870, "zoey": 39871, "aime": 39872, "would": 39873, "dyes": 39874, "destiny": 39875, "vinaigrette": 39876, "drier": 39877, "circulareconomy": 39878, "anarchi": 39879, "ssr": 39880, "schel": 39881, "ciner": 39882, "groom": 39883, "determining": 39884, "garmin": 39885, "calais": 39886, "incarceration": 39887, "bukit": 39888, "noi": 39889, "chelmsford": 39890, "mckinley": 39891, "chipped": 39892, "belonged": 39893, "tumors": 39894, "stroud": 39895, "mii": 39896, "influenza": 39897, "wwenxt": 39898, "tundra": 39899, "telecommunications": 39900, "catsofinstagram": 39901, "tages": 39902, "beatty": 39903, "odu": 39904, "mlkday": 39905, "ooper": 39906, "dangle": 39907, "akley": 39908, "crumb": 39909, "antigua": 39910, "timbers": 39911, "rouhani": 39912, "ðŁĴªðŁĴªðŁĴª": 39913, "hafi": 39914, "...!!": 39915, "wcs": 39916, "coop": 39917, "snc": 39918, "litres": 39919, "ãĢĬ": 39920, "haz": 39921, "coz": 39922, "kant": 39923, "greenfield": 39924, "curti": 39925, "yale": 39926, "flyeagles": 39927, "whatsoever": 39928, "worthing": 39929, "roulette": 39930, "flyeaglesfly": 39931, "unda": 39932, "ainted": 39933, "standing": 39934, "luscious": 39935, "hpc": 39936, "efficacy": 39937, "ashland": 39938, "meghan": 39939, "kywx": 39940, "npr": 39941, "bathtub": 39942, "acos": 39943, "hani": 39944, "marcor": 39945, "mantis": 39946, "daisi": 39947, "boba": 39948, "abbie": 39949, "mutil": 39950, "vial": 39951, "spyder": 39952, "poz": 39953, "gti": 39954, "elfie": 39955, "nightw": 39956, "metroid": 39957, "antoni": 39958, "maddie": 39959, "dhry": 39960, "darlings": 39961, "tends": 39962, "taekwondo": 39963, "atlanta": 39964, "meow": 39965, "chloe": 39966, "ãĥİ": 39967, "ymes": 39968, "siberia": 39969, "kcon": 39970, "gues": 39971, "mariner": 39972, "facil": 39973, "azzle": 39974, "[...": 39975, "hannover": 39976, "bavaria": 39977, "virgo": 39978, "teuk": 39979, "usps": 39980, ")#": 39981, "walla": 39982, "sampson": 39983, "needless": 39984, "verbally": 39985, "hayley": 39986, "bowled": 39987, "pius": 39988, "lampard": 39989, "hamstring": 39990, "volvo": 39991, "roadsafety": 39992, "choking": 39993, "sorbet": 39994, "ahem": 39995, "healthyfood": 39996, "braided": 39997, "horticulture": 39998, "crative": 39999, "cheek": 40000, "addo": 40001, "theforce": 40002, "koko": 40003, "schizoph": 40004, "jie": 40005, "wada": 40006, "twentyonepilots": 40007, "hbcu": 40008, "proton": 40009, "pauls": 40010, "louisa": 40011, "latam": 40012, "kyrgy": 40013, "compac": 40014, "sdk": 40015, "sapi": 40016, "???": 40017, "liberalism": 40018, "epsilon": 40019, "aiden": 40020, "wusa": 40021, "sprayed": 40022, "basketball": 40023, "kimono": 40024, "bluewave": 40025, "alias": 40026, "ë§Ī": 40027, "mugshot": 40028, "cec": 40029, "dogre": 40030, "adora": 40031, "ðŁĵ·@": 40032, "krakow": 40033, "intrigued": 40034, "exhausting": 40035, "astronomer": 40036, "venison": 40037, "ladybug": 40038, "civ": 40039, "brae": 40040, "usm": 40041, "bribe": 40042, "acupuncture": 40043, "pembroke": 40044, "keating": 40045, "chie": 40046, "yad": 40047, "tsi": 40048, "smi": 40049, "seeding": 40050, "gateshead": 40051, "lisboa": 40052, "gyp": 40053, "canvass": 40054, "ðŁĶ´âļªï¸ı": 40055, "opi": 40056, "nir": 40057, "societal": 40058, "lyte": 40059, "aties": 40060, "csm": 40061, "artery": 40062, "alin": 40063, "akapoor": 40064, "abstracts": 40065, "â̦â̦": 40066, "teenwolf": 40067, "newe": 40068, "travelgram": 40069, "sentimental": 40070, "perched": 40071, "handel": 40072, "hoek": 40073, "fay": 40074, "coordinating": 40075, "animate": 40076, "manian": 40077, "effort": 40078, "jerky": 40079, "fck": 40080, "adrienne": 40081, "mably": 40082, "trading": 40083, "myel": 40084, "spiro": 40085, "sola": 40086, "storing": 40087, "overdrive": 40088, "mondaymorning": 40089, "dreamteam": 40090, "pulse": 40091, "bondi": 40092, "bernie": 40093, "pgatour": 40094, "tripoli": 40095, "sonam": 40096, "platt": 40097, "âļ¡": 40098, "agroup": 40099, "îIJĴ": 40100, "invading": 40101, "vcu": 40102, "kell": 40103, "ños": 40104, "undead": 40105, "podcasting": 40106, "mercedesam": 40107, "manafort": 40108, "cortex": 40109, "queso": 40110, "impeccable": 40111, "palmer": 40112, "wildoz": 40113, "sportsc": 40114, "guacamole": 40115, "dispenser": 40116, "categori": 40117, "stunts": 40118, "peril": 40119, "invitations": 40120, "dunedin": 40121, "xie": 40122, "achieves": 40123, "safer": 40124, "preds": 40125, "phan": 40126, "knuckles": 40127, "kak": 40128, "ignores": 40129, "lovemyjob": 40130, "aruba": 40131, "oundation": 40132, "datacenter": 40133, "covert": 40134, "gring": 40135, "couple": 40136, "ار": 40137, "voli": 40138, "mccle": 40139, "artisans": 40140, "ludo": 40141, "kalam": 40142, "aroma": 40143, "undertaker": 40144, "hula": 40145, "wizkid": 40146, "gumb": 40147, "godfrey": 40148, "bakersfield": 40149, "kern": 40150, "engineer": 40151, "carve": 40152, "palin": 40153, "guarantees": 40154, "pebbles": 40155, "bays": 40156, "zieg": 40157, "fink": 40158, "â¬ĩï¸ıâ¬ĩï¸ı": 40159, "downpours": 40160, "rochelle": 40161, "raspberry": 40162, "ðŁĺ®": 40163, "graphies": 40164, "stomp": 40165, "cafes": 40166, "arized": 40167, "uttar": 40168, "calvary": 40169, "drie": 40170, "crusader": 40171, "busan": 40172, "tuxedo": 40173, "siu": 40174, "seamus": 40175, "cultured": 40176, "blanchard": 40177, "townhouse": 40178, "gered": 40179, "buttermilk": 40180, "fluctu": 40181, "rogerfederer": 40182, "heli": 40183, "ð٦ĥ": 40184, "uous": 40185, "ramesh": 40186, "muppets": 40187, "emailmarketing": 40188, "yess": 40189, "brice": 40190, "rizio": 40191, "pelo": 40192, "donneinarte": 40193, "urable": 40194, "investin": 40195, "bumping": 40196, "rajiv": 40197, "sava": 40198, "thrower": 40199, "forex": 40200, "ohhhh": 40201, "thrust": 40202, "pullman": 40203, "rfid": 40204, "sepsis": 40205, "leed": 40206, "fright": 40207, "rounding": 40208, "neb": 40209, "phins": 40210, "aisha": 40211, "utilizing": 40212, "squats": 40213, "goldsmith": 40214, "jic": 40215, "boks": 40216, "vaus": 40217, "ipo": 40218, "exclusion": 40219, "tariff": 40220, "pokes": 40221, "minal": 40222, "lands": 40223, "enforce": 40224, "washingtondc": 40225, "orchar": 40226, "gx": 40227, "marys": 40228, "eyour": 40229, "aussie": 40230, "bakers": 40231, "unpopular": 40232, "latinos": 40233, "large": 40234, "putnam": 40235, "bolo": 40236, "wade": 40237, "pelo": 40238, "dizz": 40239, "obstruction": 40240, "flappy": 40241, "wearethe": 40242, "dependence": 40243, "pajama": 40244, "ete": 40245, "yann": 40246, "ewan": 40247, "discla": 40248, "aay": 40249, "karina": 40250, "eic": 40251, "antrim": 40252, "wsoc": 40253, "negatively": 40254, "kaido": 40255, "fotografia": 40256, "dhru": 40257, "colossal": 40258, "mcleod": 40259, "kwang": 40260, "manipu": 40261, "exhilar": 40262, "usatoday": 40263, "summerslam": 40264, "coles": 40265, "taproom": 40266, "unbeatable": 40267, "dema": 40268, "ticks": 40269, "kling": 40270, "fils": 40271, "campaigners": 40272, "à¸ķ": 40273, "brewster": 40274, "audubon": 40275, "quay": 40276, "chs": 40277, "kigali": 40278, "dler": 40279, "strengthens": 40280, "somal": 40281, "signingday": 40282, "golds": 40283, "pigment": 40284, "orchestral": 40285, "gq": 40286, "linkin": 40287, "ðŁıĩ": 40288, "taw": 40289, "algarve": 40290, "hov": 40291, "earle": 40292, "goldfish": 40293, "amig": 40294, "exer": 40295, "benin": 40296, "druid": 40297, "ðŁIJ¸": 40298, "shem": 40299, "quattro": 40300, "mercen": 40301, "mente": 40302, "incorporating": 40303, "bonanza": 40304, "statefair": 40305, "ende": 40306, "conceptions": 40307, "ees": 40308, "âĻ¥ï¸ıâĻ¥ï¸ı": 40309, "dson": 40310, "firearm": 40311, "orbital": 40312, "weh": 40313, "multip": 40314, "fob": 40315, "requiem": 40316, "plight": 40317, "thouse": 40318, "said": 40319, "ocre": 40320, "remembrance": 40321, "nold": 40322, "chipping": 40323, "bev": 40324, "ert": 40325, "cathy": 40326, "sym": 40327, "riggs": 40328, "mley": 40329, "dialogues": 40330, "slender": 40331, "howl": 40332, "gauteng": 40333, "wdw": 40334, "tobi": 40335, "smokes": 40336, "implo": 40337, "bpm": 40338, "adn": 40339, "mombasa": 40340, "capsul": 40341, "bloomfield": 40342, "articul": 40343, "cleo": 40344, "googled": 40345, "fluffy": 40346, "lard": 40347, "enzyme": 40348, "vesti": 40349, "ibrahi": 40350, "flame": 40351, "emea": 40352, "outages": 40353, "dispropor": 40354, "bleak": 40355, "ansel": 40356, "icker": 40357, "stlouis": 40358, "stockmarket": 40359, "goodfriday": 40360, "sault": 40361, "stalled": 40362, "prom": 40363, "epsom": 40364, "bé": 40365, "these": 40366, "sauces": 40367, "mew": 40368, "litfest": 40369, "pred": 40370, "reu": 40371, "karak": 40372, "sienna": 40373, "ellin": 40374, "biotechnology": 40375, "ï¸ıâĥ£-": 40376, "tactic": 40377, "sain": 40378, "pork": 40379, "monza": 40380, "kaj": 40381, "lush": 40382, "compartment": 40383, "changing": 40384, "shraddhakapoor": 40385, "foal": 40386, "artem": 40387, "cuando": 40388, "canola": 40389, "oriente": 40390, "messe": 40391, "dited": 40392, "brc": 40393, "boxer": 40394, "bbctwo": 40395, "sst": 40396, "mentday": 40397, "eming": 40398, "dewey": 40399, "kofi": 40400, "âŀĸâŀĸâŀĸâŀĸ": 40401, "realization": 40402, "smol": 40403, "twood": 40404, "sanje": 40405, "flagstaff": 40406, "berwick": 40407, "corset": 40408, "canary": 40409, "whistleblower": 40410, "etched": 40411, "composing": 40412, "squeezed": 40413, "bower": 40414, "autodesk": 40415, "neh": 40416, "mathieu": 40417, "baja": 40418, "ÅĤ": 40419, "hydra": 40420, "daim": 40421, "ameri": 40422, "insisted": 40423, "merlot": 40424, "garros": 40425, "heartnews": 40426, "gainesville": 40427, "cutler": 40428, "bode": 40429, "ðŁĺīðŁĺī": 40430, "lewes": 40431, "scountry": 40432, "gsa": 40433, "usu": 40434, "ccm": 40435, "godawgs": 40436, "pharaoh": 40437, "crae": 40438, "morley": 40439, "hypnoti": 40440, "fades": 40441, "neurons": 40442, "fuzz": 40443, "ingco": 40444, "highlanders": 40445, "stark": 40446, "vigne": 40447, "packets": 40448, "amarillo": 40449, "reuben": 40450, "insults": 40451, "basic": 40452, "vector": 40453, "nme": 40454, "acruz": 40455, "tros": 40456, "transmitter": 40457, "ðŁĺŀ": 40458, "interpret": 40459, "ðŁĺ²": 40460, "prequel": 40461, "mcgowan": 40462, "dissemin": 40463, "ðŁĴĺðŁĴĺ": 40464, "masculinity": 40465, "indiegamedev": 40466, "alive": 40467, "tet": 40468, "petal": 40469, "emailed": 40470, "armed": 40471, "koo": 40472, "heer": 40473, "baird": 40474, "superjunior": 40475, "metropolis": 40476, "delavin": 40477, "declines": 40478, "stitutes": 40479, "Ûģ": 40480, "ptbo": 40481, "glan": 40482, "chores": 40483, "ealing": 40484, "chrissy": 40485, "stemc": 40486, "vian": 40487, "assassinated": 40488, "pronounce": 40489, "illegals": 40490, "discovery": 40491, "cavill": 40492, "frifotos": 40493, "fal": 40494, "soi": 40495, "sabotage": 40496, "tint": 40497, "pdc": 40498, "ðŁİīðŁİĪ": 40499, "ãĤĬãģ": 40500, "jio": 40501, "endeavor": 40502, "insig": 40503, "committees": 40504, "shearer": 40505, "metz": 40506, "marrying": 40507, "hdd": 40508, "gby": 40509, "fret": 40510, "trish": 40511, "pul": 40512, "scripted": 40513, "saki": 40514, "lw": 40515, "keye": 40516, "shimi": 40517, "nanaimo": 40518, "cah": 40519, "ë": 40520, "tempered": 40521, "ician": 40522, "dugg": 40523, "dishwasher": 40524, "airfield": 40525, "srugby": 40526, "grinch": 40527, "yst": 40528, "rms": 40529, "mahatma": 40530, "lankan": 40531, "discar": 40532, "digestion": 40533, "nodes": 40534, "lls": 40535, "omic": 40536, "gutter": 40537, "tisgarh": 40538, "federico": 40539, "electionday": 40540, "bohe": 40541, "mastercard": 40542, "fireball": 40543, "âľĶï¸ı": 40544, "oyster": 40545, "pong": 40546, "dok": 40547, "enroute": 40548, "mvc": 40549, "beatthe": 40550, "alistair": 40551, "shub": 40552, "shaming": 40553, "chernobyl": 40554, "ghibli": 40555, "thes": 40556, "pinion": 40557, "dbs": 40558, "salts": 40559, "iction": 40560, "epiph": 40561, "ncpol": 40562, "inconvenience": 40563, "whitley": 40564, "inspecting": 40565, "woodley": 40566, "wiener": 40567, "skillet": 40568, "noles": 40569, "mca": 40570, "hina": 40571, "asha": 40572, "willingness": 40573, "wellness": 40574, "tamed": 40575, "showtime": 40576, "disadvantaged": 40577, "bernat": 40578, "usn": 40579, "missionaries": 40580, "counselling": 40581, "arrogant": 40582, "quantitative": 40583, "legalization": 40584, "hodge": 40585, "energyefficiency": 40586, "camerondallas": 40587, "possessions": 40588, "pbb": 40589, "harrisburg": 40590, "vg": 40591, "hinduism": 40592, "happythanksgiving": 40593, "fib": 40594, "reacting": 40595, "tweetapicture": 40596, "politi": 40597, "muppet": 40598, "hurrah": 40599, "pace": 40600, "coastguard": 40601, "guarded": 40602, "asam": 40603, "parry": 40604, "forevery": 40605, "xq": 40606, "oomf": 40607, "keanu": 40608, "jind": 40609, "rist": 40610, "customerservice": 40611, "sacred": 40612, "ðŁĺº": 40613, "toner": 40614, "occurrence": 40615, "matu": 40616, "valdez": 40617, "redd": 40618, "isak": 40619, "powerrangers": 40620, "peasant": 40621, "rajini": 40622, "abraham": 40623, "emil": 40624, "cardo": 40625, "tril": 40626, "hairstyles": 40627, "obsolete": 40628, "sampler": 40629, "directive": 40630, "delavinkisses": 40631, "verton": 40632, "glos": 40633, "spay": 40634, "palermo": 40635, "comets": 40636, "manziel": 40637, "chicagof": 40638, "skipped": 40639, "pictorial": 40640, "hant": 40641, "bmi": 40642, "aol": 40643, "reopens": 40644, "paddling": 40645, "devos": 40646, "fraud": 40647, "baseline": 40648, "queues": 40649, "spired": 40650, "snare": 40651, "euve": 40652, "descriptions": 40653, "daisies": 40654, "caching": 40655, "galleria": 40656, "trimmed": 40657, "stino": 40658, "recycla": 40659, "icular": 40660, "birken": 40661, "rawlings": 40662, "flix": 40663, "chicas": 40664, "bgt": 40665, "likeli": 40666, "argyll": 40667, "thelove": 40668, "gaston": 40669, "blanca": 40670, "hak": 40671, "fone": 40672, "sailormoon": 40673, "haci": 40674, "imac": 40675, "flyn": 40676, "decan": 40677, "belles": 40678, "apic": 40679, "zog": 40680, "taunton": 40681, "constance": 40682, "lasagna": 40683, "kernel": 40684, "inka": 40685, "harbor": 40686, "collectively": 40687, "calculated": 40688, "aville": 40689, "shilpa": 40690, "purdu": 40691, "gimm": 40692, "funer": 40693, "aest": 40694, "pembrokeshire": 40695, "nightingale": 40696, "nunes": 40697, "hypertension": 40698, "hubert": 40699, "sliders": 40700, "infertility": 40701, "commended": 40702, "transatlantic": 40703, "metrical": 40704, "!!@": 40705, "ÅŁ": 40706, "ssg": 40707, "bacca": 40708, "inverted": 40709, "funfactfriday": 40710, "itans": 40711, "album": 40712, "acquainted": 40713, "rier": 40714, "whelan": 40715, "sarab": 40716, "mue": 40717, "snooze": 40718, "piff": 40719, "agreeing": 40720, "spitting": 40721, "jermaine": 40722, "nye": 40723, "âľıï¸ı": 40724, "ambush": 40725, "zeph": 40726, "congreg": 40727, "university": 40728, "sapp": 40729, "wannabe": 40730, "patrice": 40731, "ibd": 40732, "doglo": 40733, "fridges": 40734, "sund": 40735, "kingston": 40736, "argon": 40737, "kamen": 40738, "hardrock": 40739, "dsley": 40740, "dolores": 40741, "ì°": 40742, "otaku": 40743, "piping": 40744, "behaving": 40745, "âŃIJï¸ıâŃIJï¸ıâŃIJï¸ı": 40746, "bluebird": 40747, "ansari": 40748, "teapot": 40749, "firework": 40750, "crop": 40751, "logans": 40752, "typed": 40753, "thickness": 40754, "igers": 40755, "cfp": 40756, "dysfunctional": 40757, "contrasting": 40758, "etty": 40759, "astonmartin": 40760, "txst": 40761, "dragrace": 40762, "attributes": 40763, "marathon": 40764, "manuscripts": 40765, "johnstone": 40766, "ðŁĺ±ðŁĺ±": 40767, "boer": 40768, "ayu": 40769, "arugula": 40770, "poorest": 40771, "condu": 40772, "assumption": 40773, "anagh": 40774, "noh": 40775, "delavin": 40776, "sitter": 40777, "gö": 40778, "morow": 40779, "kickstart": 40780, "comi": 40781, "glacial": 40782, "ghead": 40783, "bain": 40784, "kershaw": 40785, "endof": 40786, "freud": 40787, "omat": 40788, "iaf": 40789, "hug": 40790, "signup": 40791, "eachother": 40792, "definite": 40793, "tubing": 40794, "shakira": 40795, "ðŁijıðŁı½": 40796, "uuuu": 40797, "swin": 40798, "shambles": 40799, "olas": 40800, "skell": 40801, "britain": 40802, "knw": 40803, "clutter": 40804, "omy": 40805, "jens": 40806, "hanged": 40807, "cityscape": 40808, "scraps": 40809, "unlocking": 40810, "deadliest": 40811, "erno": 40812, "breastcancer": 40813, "ait": 40814, "inspect": 40815, "furi": 40816, "ðŁĴĮ": 40817, "kud": 40818, "jule": 40819, "orah": 40820, "mids": 40821, "mdt": 40822, "burgring": 40823, "rattle": 40824, "pusa": 40825, "stalk": 40826, "cleans": 40827, "issance": 40828, "zek": 40829, "worthit": 40830, "nameis": 40831, "muskoka": 40832, "councilman": 40833, "urbanart": 40834, "barrac": 40835, "unsolved": 40836, "tul": 40837, "gita": 40838, "whiteboard": 40839, "soybeans": 40840, "ement": 40841, "conti": 40842, "saturdaymotivation": 40843, "conveniently": 40844, "docking": 40845, "tado": 40846, "âı©": 40847, "spino": 40848, "puppylove": 40849, "pof": 40850, "fabricated": 40851, "robbers": 40852, "adopts": 40853, "tified": 40854, "kkr": 40855, "indulgence": 40856, "noticeable": 40857, "macquarie": 40858, "chapel": 40859, "sensual": 40860, "kiko": 40861, "melanoma": 40862, "loretta": 40863, "liance": 40864, "aben": 40865, "splus": 40866, "gaal": 40867, "acele": 40868, "libdems": 40869, "comparisons": 40870, "ðŁĮµ": 40871, "rhythms": 40872, "mery": 40873, "encapsul": 40874, "napier": 40875, "ðŁijĮðŁijĮðŁijĮ": 40876, "ðŁijIJ": 40877, "platz": 40878, "fresno": 40879, "reformed": 40880, "ranbir": 40881, "elit": 40882, "thebest": 40883, "bhushan": 40884, "vinnie": 40885, "improvised": 40886, "sittin": 40887, "recreated": 40888, "eba": 40889, "ecker": 40890, "acrob": 40891, "ponte": 40892, "cord": 40893, "giddy": 40894, "eurusd": 40895, "fever": 40896, "intuition": 40897, "gari": 40898, "dummies": 40899, "budweiser": 40900, "amendments": 40901, "tetra": 40902, "schnit": 40903, "ayas": 40904, "marys": 40905, "cist": 40906, "kani": 40907, "kermit": 40908, "ðŁĺ±ðŁĺ±ðŁĺ±": 40909, "tinker": 40910, "strolling": 40911, "divisional": 40912, "nigeri": 40913, "ominous": 40914, "menstrual": 40915, "karab": 40916, "khy": 40917, "bwfc": 40918, "panhandle": 40919, "lilli": 40920, "weller": 40921, "strapped": 40922, "sonthe": 40923, "transferring": 40924, "ethereal": 40925, "sneaks": 40926, "rudol": 40927, "gables": 40928, "jacking": 40929, "cincode": 40930, "fortune": 40931, "canadiens": 40932, "confor": 40933, "abnormal": 40934, "franklin": 40935, "tita": 40936, "mula": 40937, "persist": 40938, "cuties": 40939, "kiel": 40940, "ðŁĩ±ðŁĩ": 40941, "hermann": 40942, "awk": 40943, "fiasco": 40944, "koto": 40945, "weta": 40946, "hiker": 40947, "buddy": 40948, "preventive": 40949, "mcgraw": 40950, "gameboy": 40951, "forsyth": 40952, "topshop": 40953, "siob": 40954, "sadh": 40955, "intram": 40956, "followart": 40957, "soaps": 40958, "dragonball": 40959, "oux": 40960, "morrison": 40961, "à¹ĥ": 40962, "lubric": 40963, "adulthood": 40964, "morrisons": 40965, "âļłï¸ı": 40966, "hermo": 40967, "taka": 40968, "stallone": 40969, "misuse": 40970, "teamgb": 40971, "ragha": 40972, "confined": 40973, "aty": 40974, "homophobic": 40975, "nwo": 40976, "skynews": 40977, "hoya": 40978, "acrosse": 40979, "wiiu": 40980, "purée": 40981, "jeddah": 40982, "ðŁ¤§": 40983, "advisers": 40984, "phine": 40985, "anis": 40986, "scrumptious": 40987, "ë°ķ": 40988, "cke": 40989, "viny": 40990, "term": 40991, "sdc": 40992, "odo": 40993, "homeschool": 40994, "vasc": 40995, "leopards": 40996, "deborah": 40997, "illicit": 40998, "curran": 40999, "asroma": 41000, "naught": 41001, "marig": 41002, "brandi": 41003, "emp": 41004, "ðŁĺįðŁijĮ": 41005, "îĮ": 41006, "suspend": 41007, "luz": 41008, "initiation": 41009, "schaft": 41010, "jensenackles": 41011, "crawler": 41012, "postdoc": 41013, "desks": 41014, "trailblazer": 41015, "denomin": 41016, "trix": 41017, "noise": 41018, "poet": 41019, "±ï¸ı": 41020, "smug": 41021, "volatile": 41022, "proofs": 41023, "pharmacist": 41024, "sardinia": 41025, "mashable": 41026, "kimchi": 41027, "coed": 41028, "schalke": 41029, "doodled": 41030, "csw": 41031, "shur": 41032, "rox": 41033, "dok": 41034, "chrisbrown": 41035, "mathematician": 41036, "abound": 41037, "angelic": 41038, "rockford": 41039, "dole": 41040, "yorkers": 41041, "msn": 41042, "gman": 41043, "xavier": 41044, "borrowing": 41045, "markings": 41046, "longhorn": 41047, "kja": 41048, "diverted": 41049, "mmit": 41050, "euphoria": 41051, "ayyy": 41052, "tea": 41053, "pah": 41054, "cki": 41055, "uncut": 41056, "liven": 41057, "kyung": 41058, "fanart": 41059, "mering": 41060, "redding": 41061, "amovie": 41062, "gridi": 41063, "cthulhu": 41064, "scholarly": 41065, "judah": 41066, "thbewithyou": 41067, "eucalyp": 41068, "ðŁIJķ": 41069, "hertfordshire": 41070, "courtroom": 41071, "byu": 41072, "auctioned": 41073, "please": 41074, "marcia": 41075, "ê°ĵ": 41076, "succeeded": 41077, "elas": 41078, "arvind": 41079, "tlot": 41080, "saigon": 41081, "rett": 41082, "rakesh": 41083, "fdny": 41084, "asen": 41085, "sebring": 41086, "gladiators": 41087, "youknow": 41088, "vlad": 41089, "gola": 41090, "parap": 41091, "ÑĢи": 41092, "sabcnews": 41093, "oneteam": 41094, "ohl": 41095, "sune": 41096, "rij": 41097, "cdc": 41098, "stargate": 41099, "rundown": 41100, "plato": 41101, "phc": 41102, "chatter": 41103, "raviol": 41104, "mnf": 41105, "mandala": 41106, "liet": 41107, "à¸ķ": 41108, "maria": 41109, "hungover": 41110, "consolidation": 41111, "ferrell": 41112, "traditional": 41113, "iloveart": 41114, "galap": 41115, "ðŁıĮ": 41116, "quezon": 41117, "españa": 41118, "ðŁĩ¨ðŁĩŃ": 41119, "hobby": 41120, "steamboat": 41121, "malign": 41122, "guillau": 41123, "prohi": 41124, "itsme": 41125, "íĥĢ": 41126, "inscription": 41127, "alz": 41128, "marian": 41129, "kade": 41130, "mmon": 41131, "adjusting": 41132, "nests": 41133, "internally": 41134, "cir": 41135, "vikram": 41136, "malala": 41137, "kph": 41138, "felicia": 41139, "thereal": 41140, "captivity": 41141, "atis": 41142, "marcorubio": 41143, "kaleido": 41144, "chev": 41145, "manoj": 41146, "lemore": 41147, "gentri": 41148, "vips": 41149, "trope": 41150, "\"âĢĶ": 41151, "pairings": 41152, "malnutrition": 41153, "fray": 41154, "designation": 41155, "brunomars": 41156, "aze": 41157, "torrential": 41158, "panzer": 41159, "gail": 41160, "underthe": 41161, "theological": 41162, "schizophre": 41163, "dazzle": 41164, "frederic": 41165, "mopar": 41166, "adilla": 41167, "soggy": 41168, "raun": 41169, "mediocre": 41170, "colorec": 41171, "ife": 41172, "pinst": 41173, "bluef": 41174, "²": 41175, "worldwater": 41176, "giroud": 41177, "clarinet": 41178, "adolf": 41179, "tarantino": 41180, "receipts": 41181, "assump": 41182, "ðŁijŁ": 41183, "coffees": 41184, "âľĬðŁı¾": 41185, "duplex": 41186, "sof": 41187, "rx": 41188, "lino": 41189, "timberwolves": 41190, "pandit": 41191, "motm": 41192, "ega": 41193, "ayama": 41194, "achs": 41195, "outsider": 41196, "llen": 41197, "coer": 41198, "tilly": 41199, "cheeseburger": 41200, "mads": 41201, "pledis": 41202, "empty": 41203, "nationalparks": 41204, "aziz": 41205, "pmi": 41206, "junkies": 41207, "fener": 41208, "sqn": 41209, "ès": 41210, "generation": 41211, "cleopatra": 41212, "bhubanes": 41213, "mosques": 41214, "tyfree": 41215, "poppins": 41216, "twc": 41217, "orwell": 41218, "nage": 41219, "kawhi": 41220, "hollow": 41221, "dalai": 41222, "¨¨¨¨": 41223, "ouro": 41224, "mhealth": 41225, "gion": 41226, "azo": 41227, "visas": 41228, "renegade": 41229, "reic": 41230, "wsop": 41231, "ðŁĴļðŁĴĽ": 41232, "echel": 41233, "toxicity": 41234, "mün": 41235, "bunk": 41236, "stimulating": 41237, "asthour": 41238, "\\'": 41239, "eph": 41240, "endemic": 41241, "cnbc": 41242, "shrinking": 41243, "peabody": 41244, "michelangelo": 41245, "canyon": 41246, "wale": 41247, "sumi": 41248, "siders": 41249, "inuit": 41250, "?.": 41251, "professionalism": 41252, "dracing": 41253, "platoon": 41254, "pons": 41255, "outbound": 41256, "mapleleafs": 41257, "desol": 41258, "cency": 41259, "athan": 41260, "verma": 41261, "rubbing": 41262, "okan": 41263, "ðŁijł": 41264, "mullins": 41265, "authentic": 41266, "Åį": 41267, "almanac": 41268, "gaia": 41269, "bbq": 41270, "onimo": 41271, "keh": 41272, "tya": 41273, "touts": 41274, "yav": 41275, "reposit": 41276, ",.": 41277, "wight": 41278, "seeyou": 41279, "callof": 41280, "donesia": 41281, "bargaining": 41282, "granth": 41283, "sdsu": 41284, "amphitheater": 41285, "psu": 41286, "rewatching": 41287, "winetasting": 41288, "peakdistrict": 41289, "detecting": 41290, "thurman": 41291, "phee": 41292, "èªķ": 41293, "umich": 41294, "rer": 41295, "sculpted": 41296, "gole": 41297, "namesake": 41298, "ðŁĶģ": 41299, "servicing": 41300, "baugh": 41301, "pugh": 41302, "pencil": 41303, "darth": 41304, "munchkin": 41305, "atorium": 41306, "teners": 41307, "suny": 41308, "rollingstones": 41309, "maging": 41310, "starrer": 41311, "idris": 41312, "feinstein": 41313, "agron": 41314, "âĺºï¸ıâĺºï¸ı": 41315, "supervised": 41316, "chameleon": 41317, "aggregate": 41318, "successive": 41319, "mogul": 41320, "instyle": 41321, "poldark": 41322, "custome": 41323, "ohiostate": 41324, "haya": 41325, "cides": 41326, "brokerage": 41327, "angelou": 41328, "fifawwc": 41329, "deforestation": 41330, "alton": 41331, "pamph": 41332, "hugged": 41333, "hobo": 41334, "changeable": 41335, "kuber": 41336, "burroughs": 41337, "demonetisation": 41338, "capecod": 41339, "versatility": 41340, "orice": 41341, "leila": 41342, "womeninscience": 41343, "tua": 41344, "hedges": 41345, "embarrassment": 41346, "alife": 41347, "soars": 41348, "nighter": 41349, "hymn": 41350, "gipp": 41351, "chasu": 41352, "techs": 41353, "niall": 41354, "killa": 41355, "hika": 41356, "camels": 41357, "value": 41358, "¢": 41359, "scoops": 41360, "mahmoud": 41361, "clusive": 41362, "adriana": 41363, "paco": 41364, "ozil": 41365, "unas": 41366, "translations": 41367, "whisperer": 41368, "sbi": 41369, "buxton": 41370, "biotics": 41371, "indiffe": 41372, "kenney": 41373, "klar": 41374, "etching": 41375, "barrabest": 41376, "instability": 41377, "seine": 41378, "votel": 41379, "blogged": 41380, "whiskey": 41381, "myspace": 41382, "tant": 41383, "landia": 41384, "giveback": 41385, "illus": 41386, "awak": 41387, "acab": 41388, "fbloggers": 41389, "cloudcomputing": 41390, "blatant": 41391, "syrians": 41392, "bandra": 41393, "styn": 41394, "anem": 41395, "keted": 41396, "karthik": 41397, "barunsob": 41398, "pinot": 41399, "gubernat": 41400, "gaye": 41401, "artiste": 41402, "ified": 41403, "conventions": 41404, "huan": 41405, "geniuses": 41406, "eeeeee": 41407, "folly": 41408, "somerville": 41409, "pridemonth": 41410, "ðŁĩºðŁĩ¸ðŁĩºðŁĩ¸": 41411, "chemotherapy": 41412, "pauls": 41413, "bakar": 41414, "ìĦ¸ë¸IJ": 41415, "taiwanese": 41416, "follo": 41417, "css": 41418, "reign": 41419, "nnnn": 41420, "flaun": 41421, "catastrophe": 41422, "ities": 41423, "fragments": 41424, "extremists": 41425, "ymoun": 41426, "carmen": 41427, "ezekiel": 41428, "connecting": 41429, "seh": 41430, "manta": 41431, "remodeling": 41432, "weymouth": 41433, "atoms": 41434, "cem": 41435, "newell": 41436, "lumi": 41437, "theopen": 41438, "moc": 41439, "miliband": 41440, "gland": 41441, "zshq": 41442, "maggie": 41443, "maniacs": 41444, "msp": 41445, "ady": 41446, "creams": 41447, "leanne": 41448, "esta": 41449, "pyg": 41450, "affinity": 41451, "prayer": 41452, "dunbar": 41453, "lightroom": 41454, "acadi": 41455, "wynonna": 41456, "romantic": 41457, "statedept": 41458, "sickle": 41459, "whos": 41460, "lamo": 41461, "etour": 41462, "finity": 41463, "shrub": 41464, "sharpen": 41465, "pundit": 41466, "edon": 41467, "afore": 41468, "mars": 41469, "jeffery": 41470, "terps": 41471, "medallist": 41472, "katharine": 41473, "accusing": 41474, "taz": 41475, "royd": 41476, "fromhome": 41477, "confrontation": 41478, "allegh": 41479, "ðŁijīðŁijī": 41480, "refresher": 41481, "ranveer": 41482, "neverland": 41483, "jojo": 41484, "lucrative": 41485, "enam": 41486, "caver": 41487, "paedi": 41488, "manjaro": 41489, "fluids": 41490, "thessal": 41491, "oppressed": 41492, "muss": 41493, "johanna": 41494, "Ø®": 41495, "cng": 41496, "buildthe": 41497, "settles": 41498, "sith": 41499, "fuego": 41500, "clamp": 41501, "arag": 41502, "payer": 41503, "tedx": 41504, "mandy": 41505, "interstellar": 41506, "frc": 41507, "chand": 41508, "bcc": 41509, "molo": 41510, "lentil": 41511, "johansson": 41512, "grimsby": 41513, "naturelovers": 41514, "ðŁļ¨ðŁļ¨ðŁļ¨": 41515, "shinde": 41516, "xin": 41517, "internationaldayof": 41518, "transitional": 41519, "sata": 41520, "caddy": 41521, "wod": 41522, "ifu": 41523, "hays": 41524, "hollyo": 41525, "jang": 41526, "irc": 41527, "coim": 41528, "gradable": 41529, "\"\"": 41530, "ðŁį´": 41531, "া": 41532, "ael": 41533, "nyo": 41534, "westlake": 41535, "timeout": 41536, "sofi": 41537, "phenomena": 41538, "cultivation": 41539, "agno": 41540, "unarmed": 41541, "sot": 41542, "conj": 41543, "geno": 41544, "royalnavy": 41545, "nutrition": 41546, "fairmont": 41547, "tirelessly": 41548, "sng": 41549, "rety": 41550, "mica": 41551, "lucent": 41552, "sloane": 41553, "drool": 41554, "rizal": 41555, "odell": 41556, "criticized": 41557, ".'\"": 41558, "laze": 41559, "deserted": 41560, "coder": 41561, "pras": 41562, "lillian": 41563, "itinerary": 41564, "davy": 41565, "anap": 41566, "whipping": 41567, "hoboken": 41568, "kareena": 41569, "羣": 41570, "vius": 41571, "tern": 41572, "nantucket": 41573, "misunderstood": 41574, "bulaga": 41575, "stant": 41576, "chinook": 41577, "zam": 41578, "relies": 41579, "dss": 41580, "edmond": 41581, "sketchy": 41582, "mell": 41583, "fex": 41584, "rector": 41585, "distill": 41586, "daydream": 41587, "winemaker": 41588, "ripley": 41589, "billionaires": 41590, "helene": 41591, "atif": 41592, "culprit": 41593, "bertrand": 41594, "wouldnt": 41595, "mapped": 41596, "vak": 41597, "gladly": 41598, "parliament": 41599, "kidlitart": 41600, "wareness": 41601, "goliath": 41602, "âĨĵ": 41603, "viewpoint": 41604, "tatted": 41605, "fuls": 41606, "dorsey": 41607, "anglers": 41608, "lids": 41609, "kiya": 41610, "bowles": 41611, "beh": 41612, "bite": 41613, "compatibility": 41614, "ancestral": 41615, "prox": 41616, "behaved": 41617, "gubernatorial": 41618, "chfield": 41619, "saban": 41620, "zh": 41621, "teeny": 41622, "shibuya": 41623, "holliday": 41624, "pancy": 41625, "âĿĦï¸ıâĿĦï¸ı": 41626, "seungri": 41627, "?,": 41628, "ðŁĩ¦ðŁĩ·": 41629, "imitation": 41630, "impactful": 41631, "anyi": 41632, "genevie": 41633, "años": 41634, "bateman": 41635, "glider": 41636, "afar": 41637, "rasheed": 41638, "effortless": 41639, "shwar": 41640, "dachsh": 41641, "erun": 41642, "atos": 41643, "kini": 41644, "chd": 41645, "khaki": 41646, "klin": 41647, "felicidades": 41648, "belo": 41649, "asl": 41650, "toppers": 41651, "finley": 41652, "stacey": 41653, "rigorous": 41654, "karting": 41655, "leppard": 41656, "carmichael": 41657, "beret": 41658, "cse": 41659, "akhi": 41660, "meringue": 41661, "aban": 41662, "hake": 41663, "geri": 41664, "erjee": 41665, "resto": 41666, "commanders": 41667, "prit": 41668, "flor": 41669, "adven": 41670, "extermin": 41671, "remainder": 41672, "åIJ": 41673, "esg": 41674, "martino": 41675, "lullaby": 41676, "|@": 41677, "mign": 41678, "instore": 41679, "bigbang": 41680, "cordi": 41681, "cauley": 41682, "antebellum": 41683, "dgate": 41684, "crock": 41685, "spandex": 41686, "scaffolding": 41687, "oreos": 41688, "ê°ĵìĦ¸ë¸IJ": 41689, "pomona": 41690, "mauro": 41691, "universi": 41692, "remi": 41693, "afootball": 41694, "tant": 41695, "smalls": 41696, "neh": 41697, "worldo": 41698, "tropical": 41699, "morph": 41700, "javelin": 41701, "glar": 41702, "arquitec": 41703, "reminiscent": 41704, "tubs": 41705, "spidey": 41706, "makeu": 41707, "sylla": 41708, "progressives": 41709, "blot": 41710, "shorten": 41711, "keepin": 41712, "chak": 41713, "angst": 41714, "superfood": 41715, "decadent": 41716, "stony": 41717, "neurological": 41718, "arboretum": 41719, "annak": 41720, "fema": 41721, "percu": 41722, "disrespectful": 41723, "smallbiz": 41724, "lox": 41725, "coom": 41726, "csc": 41727, "bsbi": 41728, "prevalence": 41729, "himss": 41730, "espan": 41731, "moga": 41732, "frampton": 41733, "skymap": 41734, "masse": 41735, "leviathan": 41736, "().": 41737, "nocturnal": 41738, "carameli": 41739, "angor": 41740, "amnesia": 41741, "outsiders": 41742, "shealth": 41743, "rhino": 41744, "antag": 41745, "agio": 41746, "ðŁĴ°ðŁĴ°": 41747, "takeme": 41748, "kabaddi": 41749, "csi": 41750, "msh": 41751, "cochrane": 41752, "thessaloni": 41753, "sila": 41754, "haus": 41755, "dusting": 41756, "obese": 41757, "macklemore": 41758, "manish": 41759, "lenin": 41760, "mdc": 41761, "grown": 41762, "sheffield": 41763, "srs": 41764, "kele": 41765, "carson": 41766, "chum": 41767, "dahlia": 41768, "cantore": 41769, "oppo": 41770, "howling": 41771, "cybercrime": 41772, "surrealism": 41773, "scran": 41774, "faiz": 41775, "thren": 41776, "racists": 41777, "rout": 41778, "pknot": 41779, "semana": 41780, "sini": 41781, "mccull": 41782, "machi": 41783, "alfonso": 41784, "yb": 41785, "sardar": 41786, "kendrick": 41787, "deng": 41788, "recipro": 41789, "onf": 41790, "doomsday": 41791, "bribery": 41792, "customiz": 41793, "artis": 41794, "cpi": 41795, "ðŁĻĪðŁĻĪ": 41796, "slava": 41797, "lette": 41798, "ens": 41799, "âĿ¤ï¸ıðŁĺĺ": 41800, "crayon": 41801, "adan": 41802, "trc": 41803, "migrate": 41804, "simpson": 41805, "rowers": 41806, "kingsley": 41807, "farmersmarket": 41808, "sheehan": 41809, "nephe": 41810, "bornon": 41811, "carton": 41812, "mickey": 41813, "allure": 41814, "ulu": 41815, "slipknot": 41816, "hebdo": 41817, "guido": 41818, "dogcelebration": 41819, "onlinemarketing": 41820, "accelerating": 41821, ")..": 41822, "originated": 41823, "macaroni": 41824, "edtech": 41825, "outfield": 41826, "mitz": 41827, "discus": 41828, "advertiser": 41829, "manor": 41830, "hashi": 41831, "descrip": 41832, "capita": 41833, "fulbright": 41834, "receptor": 41835, "conn": 41836, "coney": 41837, "spionage": 41838, "rattle": 41839, "prest": 41840, "uli": 41841, "blogpost": 41842, "ackeray": 41843, ")â̦": 41844, "redvelvet": 41845, "matth": 41846, "inspiring": 41847, "bsd": 41848, "kerri": 41849, "pocon": 41850, "millar": 41851, "repur": 41852, "accenture": 41853, "ä¹": 41854, "rambo": 41855, "ragnarok": 41856, "deleting": 41857, "britishmuseum": 41858, "patory": 41859, "leipzig": 41860, "florian": 41861, "scifi": 41862, "iners": 41863, "brate": 41864, "yoy": 41865, "melissa": 41866, "aber": 41867, "masa": 41868, "pote": 41869, "mosquitoes": 41870, "transplant": 41871, "rpa": 41872, ";))": 41873, "bastille": 41874, "ylan": 41875, "joyeux": 41876, "melodic": 41877, "captions": 41878, "atrist": 41879, "rochdale": 41880, "gotti": 41881, "pewdie": 41882, "cutiesaturday": 41883, "whois": 41884, "aquaculture": 41885, "tiva": 41886, "spel": 41887, "hess": 41888, "haji": 41889, "freddie": 41890, "coper": 41891, "brando": 41892, "vk": 41893, "photobook": 41894, "*,": 41895, "mydayin": 41896, "michaela": 41897, "brunei": 41898, "srini": 41899, "inte": 41900, "ı": 41901, "deol": 41902, "dfc": 41903, "separately": 41904, "bund": 41905, "vests": 41906, "toc": 41907, "meck": 41908, "reinforced": 41909, "constraints": 41910, "carroll": 41911, "sqft": 41912, "rever": 41913, "camper": 41914, "birdman": 41915, "inaction": 41916, "generators": 41917, "triumphant": 41918, "pests": 41919, "ovo": 41920, "gypt": 41921, "alamo": 41922, "scaled": 41923, "sureshpp": 41924, "sdn": 41925, "ismo": 41926, "gios": 41927, ")@": 41928, "justiceleague": 41929, "restaurant": 41930, "gabi": 41931, "dengue": 41932, "nextgen": 41933, "exempli": 41934, "apex": 41935, "inspirational": 41936, "downside": 41937, "kidz": 41938, "upl": 41939, "etna": 41940, "alvaro": 41941, "feldman": 41942, "barnet": 41943, "mha": 41944, "esch": 41945, "blooded": 41946, ">>>>>>>>": 41947, "kani": 41948, "hofficial": 41949, "casablanca": 41950, "birds": 41951, "tyga": 41952, "swamp": 41953, "oday": 41954, "newcastle": 41955, "nbap": 41956, "cision": 41957, "chools": 41958, "aflo": 41959, "nep": 41960, "monton": 41961, "akb": 41962, "supermodel": 41963, "downtime": 41964, "thos": 41965, "scwx": 41966, "snoopy": 41967, "aggreg": 41968, "yoke": 41969, "norcal": 41970, "wett": 41971, "prolonged": 41972, "metast": 41973, "beater": 41974, "fta": 41975, "tlap": 41976, "disgusted": 41977, "yh": 41978, "voiceover": 41979, "itchy": 41980, "ipc": 41981, "ðŁİ¾": 41982, "pheasant": 41983, "straits": 41984, "rampant": 41985, "jg": 41986, "fertil": 41987, "assures": 41988, "fortunes": 41989, "salinas": 41990, "lizards": 41991, "kettle": 41992, "ibs": 41993, "cynthi": 41994, "heg": 41995, "mccr": 41996, "socceroos": 41997, "happenings": 41998, "corden": 41999, "ðŁĺĤðŁijĮ": 42000, "tches": 42001, "egret": 42002, "wolverines": 42003, "congratulated": 42004, "hogg": 42005, "bottling": 42006, "wri": 42007, "ferri": 42008, "bosch": 42009, "afire": 42010, "ogden": 42011, "sjo": 42012, "jdm": 42013, "svt": 42014, "contex": 42015, "tollywood": 42016, "mink": 42017, "mese": 42018, "supersonic": 42019, "opoulos": 42020, "å¸": 42021, "âĶģ": 42022, "knuckle": 42023, "guise": 42024, "gami": 42025, "chucky": 42026, "zinger": 42027, "radial": 42028, "complained": 42029, "boda": 42030, "fetal": 42031, "disciplines": 42032, "corro": 42033, "ðŁĩ®ðŁĩ¹": 42034, "opted": 42035, "filtration": 42036, "adnan": 42037, "emcee": 42038, "mistre": 42039, "insomni": 42040, "fergus": 42041, "trajec": 42042, "ondon": 42043, "medtech": 42044, "tangerine": 42045, "madras": 42046, "grue": 42047, "cabs": 42048, "zhu": 42049, "sureshpprabhu": 42050, "insulated": 42051, "dayswild": 42052, "ppm": 42053, "bandai": 42054, "vday": 42055, "sff": 42056, "squid": 42057, "lothing": 42058, "notdead": 42059, "expressive": 42060, "cull": 42061, "alastair": 42062, "xu": 42063, "upfront": 42064, "fishers": 42065, "enes": 42066, "umd": 42067, "dismissal": 42068, "stier": 42069, "sels": 42070, "lust": 42071, "reactive": 42072, "protester": 42073, "eyelashes": 42074, "alim": 42075, "goode": 42076, "greeng": 42077, "dair": 42078, "compen": 42079, "anushka": 42080, "prototyping": 42081, "mapu": 42082, "bearings": 42083, "ðŁIJŁ": 42084, "forme": 42085, "bsbibotany": 42086, "timothy": 42087, "outskirts": 42088, "ambed": 42089, "aretha": 42090, "wendell": 42091, "streaks": 42092, "nim": 42093, "kpk": 42094, "snee": 42095, "fitter": 42096, "quota": 42097, "pate": 42098, "winning": 42099, "ðŁįŃ": 42100, "shopping": 42101, "mainst": 42102, "culver": 42103, "stevie": 42104, "mcfadden": 42105, "counterparts": 42106, "grenfell": 42107, "folsom": 42108, "dorset": 42109, "techcrunch": 42110, "â¬ħï¸ı": 42111, "tiptuesday": 42112, "usl": 42113, "trex": 42114, "georgie": 42115, "ranveerofficial": 42116, "licks": 42117, "sewn": 42118, "kf": 42119, "'â̦": 42120, "japs": 42121, "pate": 42122, "orthop": 42123, "festa": 42124, "stras": 42125, "montal": 42126, "hammersmith": 42127, "foremost": 42128, "widows": 42129, "madre": 42130, "itez": 42131, "mitochondri": 42132, "ligans": 42133, "zona": 42134, "caribou": 42135, "mss": 42136, "andrei": 42137, "weatherchannel": 42138, "ghc": 42139, ":...": 42140, "taft": 42141, "aweather": 42142, "alisation": 42143, "brutal": 42144, "blissful": 42145, "nikola": 42146, "malicious": 42147, "qm": 42148, "mpgvip": 42149, "brodie": 42150, "blitz": 42151, "applaud": 42152, "dribb": 42153, "vague": 42154, "doggo": 42155, "translating": 42156, "interpreted": 42157, "hatched": 42158, "getyour": 42159, "beneficiaries": 42160, "sparring": 42161, "caesars": 42162, "awilliams": 42163, "lahat": 42164, "broke": 42165, "timp": 42166, "virtues": 42167, "relying": 42168, "pietro": 42169, "ktn": 42170, "icists": 42171, "pablo": 42172, "loui": 42173, "aag": 42174, "pnpp": 42175, "chast": 42176, "pulses": 42177, "finish": 42178, "usairforce": 42179, "typewriter": 42180, "thompson": 42181, "dogs": 42182, "utto": 42183, "ãģį": 42184, "sandal": 42185, "newly": 42186, "doge": 42187, "zw": 42188, "wankers": 42189, "negr": 42190, "mucha": 42191, "determines": 42192, "blackfish": 42193, "skunk": 42194, "mups": 42195, "instrument": 42196, "phyto": 42197, "daystogo": 42198, "skinned": 42199, "haider": 42200, "conten": 42201, "ðŁIJ¾ðŁIJ¾": 42202, "weiler": 42203, "undoubtedly": 42204, "chairing": 42205, "wallis": 42206, "shard": 42207, "zindabad": 42208, "adult": 42209, "absorption": 42210, "presto": 42211, "deploying": 42212, "drummond": 42213, "battlefront": 42214, "seagulls": 42215, "howdy": 42216, "judaism": 42217, "desde": 42218, "partition": 42219, "âľĿ": 42220, "nology": 42221, "nationalbestfriend": 42222, "lesnar": 42223, "filmfare": 42224, "coasts": 42225, "christensen": 42226, "acan": 42227, "mbu": 42228, "copped": 42229, "rubble": 42230, "swc": 42231, "funnier": 42232, "farther": 42233, "whereas": 42234, "nanotechnology": 42235, "withstand": 42236, "pillow": 42237, "bowers": 42238, "tope": 42239, "itly": 42240, "confit": 42241, "makar": 42242, "comforts": 42243, "bosh": 42244, "clipper": 42245, "balla": 42246, "stik": 42247, "milb": 42248, "safeguard": 42249, "musique": 42250, "easport": 42251, "yaz": 42252, "padded": 42253, "bader": 42254, "foreign": 42255, "chopin": 42256, "archive": 42257, "oka": 42258, "transporting": 42259, "tmltalk": 42260, "ajit": 42261, "consequence": 42262, "scroo": 42263, "ffo": 42264, "collaborated": 42265, "pugchat": 42266, "yemi": 42267, "javed": 42268, "auburn": 42269, "oof": 42270, "maw": 42271, "saucer": 42272, "mitigate": 42273, "iles": 42274, "evangelist": 42275, "terie": 42276, "recl": 42277, "indictment": 42278, "cata": 42279, "brightness": 42280, "maythe": 42281, "whimsical": 42282, "unlv": 42283, "keyword": 42284, "cumin": 42285, "medway": 42286, "westworld": 42287, "traw": 42288, "imposing": 42289, "formity": 42290, "coulter": 42291, "abz": 42292, "nypd": 42293, "grassi": 42294, "kelsey": 42295, "qldpol": 42296, "clockwork": 42297, "fdr": 42298, "dianne": 42299, "âĺij": 42300, "adh": 42301, "pann": 42302, "bravely": 42303, "aege": 42304, "unlawful": 42305, "verdi": 42306, "pocalypse": 42307, "pharo": 42308, "karla": 42309, "resonance": 42310, "mastiff": 42311, "ladak": 42312, "buu": 42313, "mailed": 42314, "hii": 42315, "crawley": 42316, "torrent": 42317, "machado": 42318, "libyan": 42319, "effortlessly": 42320, "falsely": 42321, "qvist": 42322, "keef": 42323, "crafthour": 42324, "cherished": 42325, "valkyrie": 42326, "sari": 42327, "kalamaz": 42328, "behe": 42329, "ðŁĮĻ": 42330, "thim": 42331, "roddy": 42332, "coltrane": 42333, "butchers": 42334, "achim": 42335, "wkend": 42336, "awkward": 42337, "cabrera": 42338, ":))))": 42339, "franc": 42340, "declan": 42341, "condos": 42342, "aja": 42343, "pandoramusic": 42344, "charter": 42345, "phill": 42346, "montrose": 42347, "hatchback": 42348, "handicapp": 42349, "greaves": 42350, "eucalyptus": 42351, "utmost": 42352, "tson": 42353, "burton": 42354, "midwives": 42355, "incur": 42356, "ðŁĺį#": 42357, "mood": 42358, "compressed": 42359, "toma": 42360, "mustang": 42361, "mog": 42362, "asana": 42363, "testic": 42364, "shotel": 42365, "insol": 42366, "corsair": 42367, "nhq": 42368, "benny": 42369, "smma": 42370, "kapur": 42371, "incon": 42372, "jonas": 42373, "energies": 42374, "donal": 42375, "asad": 42376, "sez": 42377, "npa": 42378, "archived": 42379, "stimulate": 42380, "dop": 42381, "hyd": 42382, "grieving": 42383, "ãĥĪ": 42384, "rona": 42385, "whyte": 42386, "treehouse": 42387, "ssell": 42388, "sandro": 42389, "kobo": 42390, "thermost": 42391, "seclu": 42392, "hiya": 42393, "geez": 42394, "mamas": 42395, "priscilla": 42396, "flavoured": 42397, "fass": 42398, "wold": 42399, "makerspace": 42400, "cosplay": 42401, "ptv": 42402, "happyvalentinesday": 42403, "sequoia": 42404, "lovecraft": 42405, "guan": 42406, "dtm": 42407, "cii": 42408, "yokohama": 42409, "posthum": 42410, "req": 42411, "ðŁĶµâļªï¸ı": 42412, "galatasar": 42413, "dolby": 42414, "hamptons": 42415, "disturbance": 42416, "stonehenge": 42417, "okc": 42418, "disrupting": 42419, "monthsary": 42420, "jungle": 42421, "headlights": 42422, "dustin": 42423, "microsof": 42424, "happymothersday": 42425, "koko": 42426, "grazi": 42427, "testo": 42428, "naidu": 42429, "malay": 42430, "arial": 42431, "rumb": 42432, "aboo": 42433, "harman": 42434, "trape": 42435, "spoils": 42436, "jeho": 42437, "godly": 42438, "lockscreen": 42439, "zun": 42440, "pious": 42441, "magento": 42442, "lenders": 42443, "probable": 42444, "corporal": 42445, "mour": 42446, "awal": 42447, "sua": 42448, "callme": 42449, "tonne": 42450, "govin": 42451, "devastation": 42452, "xj": 42453, "gearbox": 42454, "warlock": 42455, "perme": 42456, "itate": 42457, "gazaunderattack": 42458, "duval": 42459, "parasite": 42460, "clemente": 42461, "leth": 42462, "iva": 42463, "frozen": 42464, "tholes": 42465, "tobin": 42466, "cairn": 42467, "sill": 42468, "luckiest": 42469, "converts": 42470, "stale": 42471, "pancra": 42472, "europale": 42473, "wisdom": 42474, "schur": 42475, "ì¶": 42476, "vertigo": 42477, "bij": 42478, "ubc": 42479, "nure": 42480, "righteousness": 42481, "mtc": 42482, "factory": 42483, "verst": 42484, "reversed": 42485, "huri": 42486, "heechul": 42487, "faber": 42488, "arr": 42489, "ulous": 42490, "venom": 42491, "phat": 42492, "greenery": 42493, "brady": 42494, "æ": 42495, ":((": 42496, "nevergiveup": 42497, "disha": 42498, "mota": 42499, "healthcare": 42500, "dunham": 42501, "dexpo": 42502, "denzel": 42503, "bbins": 42504, "fics": 42505, "wham": 42506, "mcg": 42507, "elian": 42508, "wata": 42509, "stralia": 42510, "tellu": 42511, "pesky": 42512, "spinoff": 42513, "armoured": 42514, "reacted": 42515, "dofficial": 42516, "tedu": 42517, "sagar": 42518, "morally": 42519, "paralleled": 42520, "fios": 42521, "downer": 42522, "daugh": 42523, "redo": 42524, "worldcup": 42525, "tariq": 42526, "barne": 42527, "glaciers": 42528, "occult": 42529, "barbarian": 42530, "hermosa": 42531, "!!!)": 42532, "yur": 42533, "internation": 42534, "pss": 42535, "situ": 42536, "pint": 42537, "americanair": 42538, "swam": 42539, "doppler": 42540, "ðŁĴĻðŁĴľ": 42541, "cincodemayo": 42542, "levan": 42543, "hellenic": 42544, "mcne": 42545, "judi": 42546, "yuh": 42547, "stx": 42548, "quare": 42549, "ðŁĺĤ.": 42550, "stig": 42551, "gels": 42552, "motley": 42553, "hardwork": 42554, "eurozone": 42555, "ead": 42556, "ç¥Ń": 42557, "seabir": 42558, "cius": 42559, "laid": 42560, "alpaca": 42561, "presumably": 42562, "pewdiepie": 42563, "booted": 42564, "amari": 42565, "tamine": 42566, "solace": 42567, "barrow": 42568, "academies": 42569, "xian": 42570, "omination": 42571, "dungeons": 42572, "bma": 42573, "deity": 42574, "aik": 42575, "stabil": 42576, "hira": 42577, "affectionate": 42578, "vingne": 42579, "newport": 42580, "ãħĭãħĭ": 42581, "thirds": 42582, "retains": 42583, "aromatherapy": 42584, "skier": 42585, "nima": 42586, "dope": 42587, "cringe": 42588, "condomin": 42589, "toor": 42590, "animator": 42591, "saraj": 42592, "seascape": 42593, "minimalism": 42594, "lakeshore": 42595, "callaway": 42596, "bergman": 42597, "à¤Ĺ": 42598, "whispering": 42599, "stupid": 42600, "rightful": 42601, "requis": 42602, "irn": 42603, "seva": 42604, "utpol": 42605, "tuberculo": 42606, "squish": 42607, "debut": 42608, "governmental": 42609, "christine": 42610, "allman": 42611, "weapon": 42612, "sito": 42613, "buri": 42614, "lolita": 42615, "leafy": 42616, "fuch": 42617, "tinted": 42618, "mcken": 42619, "ahahaha": 42620, "ðŁĩµðŁĩ¹": 42621, "repeal": 42622, "negan": 42623, "ðŁķĬ": 42624, "tailgating": 42625, "gameinsight": 42626, "ðŁıŁï¸ı": 42627, "yakuza": 42628, "zt": 42629, "tiring": 42630, "proposing": 42631, "bowlers": 42632, "traitors": 42633, "akshi": 42634, "clergy": 42635, "cito": 42636, "upsets": 42637, "tuscal": 42638, "symphonic": 42639, "silently": 42640, "shuff": 42641, "blackwell": 42642, "ðŁĺĤ)": 42643, "kobe": 42644, "roberto": 42645, "ridg": 42646, "dcu": 42647, "merino": 42648, "ftp": 42649, "eastside": 42650, ".~": 42651, "nbl": 42652, "mnleg": 42653, "tsfor": 42654, "fraudul": 42655, "capping": 42656, "inmy": 42657, "gymnast": 42658, "stones": 42659, "ssin": 42660, "tweaks": 42661, "shaggy": 42662, "oakland": 42663, "demsin": 42664, "sangria": 42665, "mmva": 42666, "hennessy": 42667, "downton": 42668, "rightly": 42669, "init": 42670, "agave": 42671, "oblast": 42672, "northeast": 42673, "friendship": 42674, "dala": 42675, "trophy": 42676, "ðŁij½": 42677, "magin": 42678, "margaritas": 42679, "ê·": 42680, "wwfc": 42681, "fash": 42682, "dike": 42683, "cud": 42684, "chart": 42685, "ðŁij®": 42686, "refugees": 42687, "joplin": 42688, "ncs": 42689, "impy": 42690, "firmware": 42691, "pascu": 42692, "flamin": 42693, "healthtech": 42694, "bellletstalk": 42695, "waka": 42696, "olls": 42697, "lago": 42698, "cowan": 42699, "bombardier": 42700, "shome": 42701, "ðŁĻħ": 42702, "mcmaster": 42703, "nave": 42704, "wells": 42705, "uta": 42706, "tellers": 42707, "misfits": 42708, "kapil": 42709, "faceoff": 42710, "affirm": 42711, "apro": 42712, "whitepaper": 42713, "superyacht": 42714, "specimens": 42715, "allocated": 42716, "...,": 42717, "-__": 42718, "kaw": 42719, "dachshund": 42720, "djoker": 42721, "swork": 42722, "quiere": 42723, "orum": 42724, "ðŁIJł": 42725, "somm": 42726, "cmt": 42727, "inghour": 42728, "skinny": 42729, "lgbti": 42730, "giggles": 42731, "breakaway": 42732, "researched": 42733, "parity": 42734, "myal": 42735, "msl": 42736, "retained": 42737, "sivity": 42738, "makeinindia": 42739, "solves": 42740, "defamation": 42741, "waltham": 42742, "sriracha": 42743, "roadway": 42744, "conceptu": 42745, "alin": 42746, "iwant": 42747, "åĪ": 42748, "delft": 42749, "tenderloin": 42750, "gains": 42751, "faults": 42752, "swire": 42753, "stellen": 42754, "pollo": 42755, "dyne": 42756, "bornonthisday": 42757, "asdfghj": 42758, "sql": 42759, "salim": 42760, "advises": 42761, "voip": 42762, "ìĹijìĨ": 42763, "untouched": 42764, "sheil": 42765, "ontario": 42766, "uphill": 42767, "sobre": 42768, "deshi": 42769, "novella": 42770, "dutton": 42771, "crawfish": 42772, "اÙĨ": 42773, "maa": 42774, "twine": 42775, "kalin": 42776, "ðŁĩµðŁĩŃ": 42777, "yess": 42778, "brooks": 42779, "hoosiers": 42780, "tonka": 42781, "umbrellas": 42782, "ayers": 42783, "ateam": 42784, "acquiring": 42785, "suction": 42786, "än": 42787, "wies": 42788, "tarians": 42789, "socio": 42790, "mattb": 42791, "shepherds": 42792, "oso": 42793, "charitytuesday": 42794, "slogans": 42795, "ninjas": 42796, "albat": 42797, "byte": 42798, "bashir": 42799, "trampoline": 42800, "mydayinla": 42801, "ija": 42802, "basel": 42803, "rory": 42804, "goldie": 42805, "firec": 42806, "unnoticed": 42807, "peculiar": 42808, "scha": 42809, "kerson": 42810, "mourns": 42811, "liquidity": 42812, "quipment": 42813, "hibs": 42814, "ars": 42815, "aeronau": 42816, "slideshow": 42817, "slabs": 42818, "deliciousness": 42819, "skitchen": 42820, "htafc": 42821, "fullerton": 42822, "creighton": 42823, "aerob": 42824, "procrastination": 42825, "azores": 42826, "whitehall": 42827, "ussoccer": 42828, "mediation": 42829, "djokernole": 42830, "andme": 42831, "umen": 42832, "noxious": 42833, "joss": 42834, "ilife": 42835, "annivers": 42836, "sudanese": 42837, "etres": 42838, "undermine": 42839, "wholefoods": 42840, "disobe": 42841, "kori": 42842, "adele": 42843, "eliz": 42844, "canti": 42845, "alon": 42846, "gymnasium": 42847, "sarkodie": 42848, "meteorologist": 42849, "ylde": 42850, "steen": 42851, "stampcollecting": 42852, "nasal": 42853, "lott": 42854, "franks": 42855, "exol": 42856, "acki": 42857, "goodyear": 42858, "animalrights": 42859, "yles": 42860, "violets": 42861, "mmes": 42862, "sthel": 42863, "rapping": 42864, "tuscan": 42865, "waiver": 42866, "turner": 42867, "eatlocal": 42868, "northeasthour": 42869, "animations": 42870, "tommorow": 42871, "tsh": 42872, "ffame": 42873, "brae": 42874, "petron": 42875, "glamour": 42876, "bryn": 42877, "dcs": 42878, "bales": 42879, "ðŁĶ¶": 42880, "brov": 42881, "brev": 42882, "bons": 42883, "physique": 42884, "carne": 42885, "xe": 42886, "elixir": 42887, "volved": 42888, "loma": 42889, "ìľł": 42890, "æĺ": 42891, "vanu": 42892, "rigs": 42893, "balance": 42894, "vares": 42895, "bonita": 42896, "sprinkle": 42897, "perfecto": 42898, "dion": 42899, "leak": 42900, "calcutta": 42901, "oba": 42902, "dma": 42903, "cmon": 42904, "tuner": 42905, "pneumonia": 42906, "bogus": 42907, "apologe": 42908, "clough": 42909, "borne": 42910, "))))": 42911, "revived": 42912, "ovarian": 42913, "nerf": 42914, "clegg": 42915, "fanfest": 42916, "chou": 42917, "realizes": 42918, "mcn": 42919, "ligu": 42920, "legalize": 42921, "justsaying": 42922, "forster": 42923, "bosni": 42924, "khi": 42925, "indom": 42926, "heidel": 42927, "encryp": 42928, "siss": 42929, "eddi": 42930, "marbles": 42931, "brisbane": 42932, "ying": 42933, "prepaid": 42934, "walsall": 42935, "cooperate": 42936, "orchestr": 42937, "marisa": 42938, "howie": 42939, "chewy": 42940, "brenner": 42941, "andromeda": 42942, "egan": 42943, "stocki": 42944, "cavendish": 42945, "agan": 42946, "bano": 42947, "deir": 42948, "gog": 42949, "blk": 42950, "rethinking": 42951, "chig": 42952, "rheu": 42953, "snip": 42954, "peng": 42955, "seminole": 42956, "mswx": 42957, "annex": 42958, "lynda": 42959, "lewishamilton": 42960, "cumul": 42961, "tbl": 42962, "dolphin": 42963, "aguero": 42964, "............": 42965, "prelude": 42966, "atour": 42967, "granger": 42968, "tooting": 42969, "rotun": 42970, "disar": 42971, "homeitems": 42972, "dares": 42973, "********": 42974, "ðŁijĨ": 42975, "compreh": 42976, "jinx": 42977, "aswell": 42978, "irie": 42979, "circulating": 42980, "ðŁIJ¥": 42981, "overboard": 42982, "cultivate": 42983, "rhett": 42984, "orienteering": 42985, "cak": 42986, "balkans": 42987, "sitt": 42988, "jasmin": 42989, "britneyspears": 42990, "rotor": 42991, "sealing": 42992, "gbc": 42993, "occi": 42994, "fas": 42995, "emancip": 42996, "comer": 42997, "wartime": 42998, "tickle": 42999, "sonny": 43000, "paces": 43001, "logg": 43002, "atrix": 43003, "srp": 43004, "gwin": 43005, "dobbs": 43006, "uzbe": 43007, "thewanted": 43008, "drush": 43009, "extru": 43010, "micky": 43011, "honorees": 43012, "darwin": 43013, "redux": 43014, "mmj": 43015, "rami": 43016, "jalapeño": 43017, "ioc": 43018, "dover": 43019, "juju": 43020, "whitney": 43021, "seng": 43022, "enly": 43023, "auch": 43024, "archipelago": 43025, "vigilant": 43026, "mangal": 43027, "wildest": 43028, "paranoid": 43029, "hali": 43030, "bbly": 43031, "sanctioned": 43032, "realms": 43033, "conco": 43034, "uddin": 43035, "csk": 43036, "playtime": 43037, "libra": 43038, "savag": 43039, "octane": 43040, "rectan": 43041, "return": 43042, "parrish": 43043, "morrha": 43044, "ccp": 43045, "cmu": 43046, "sailed": 43047, "sevent": 43048, "rosie": 43049, "piling": 43050, "hew": 43051, "boarded": 43052, "segments": 43053, "nephro": 43054, "(.": 43055, "crats": 43056, "bakes": 43057, "ðŁį¸": 43058, "backtothe": 43059, "sibling": 43060, "kirkland": 43061, "keo": 43062, "guwa": 43063, "breads": 43064, "ðŁĺľðŁĺľ": 43065, "tq": 43066, "harassed": 43067, "gau": 43068, "wilbur": 43069, "jisoo": 43070, "eper": 43071, "lisam": 43072, "trippin": 43073, "shino": 43074, "rukh": 43075, "beastmode": 43076, "choa": 43077, "instaweather": 43078, "richland": 43079, "gari": 43080, "fez": 43081, "cowboysnation": 43082, "fursuit": 43083, "krun": 43084, "aen": 43085, "sycamore": 43086, "segun": 43087, "entennial": 43088, "dih": 43089, "oax": 43090, "demsinphilly": 43091, "ðŁĻĢ": 43092, "snhl": 43093, "pennies": 43094, "passwords": 43095, "makin": 43096, "tye": 43097, "deng": 43098, "knigh": 43099, "jeeplife": 43100, "helpline": 43101, "afor": 43102, "zzzz": 43103, "steamy": 43104, "picker": 43105, "iterate": 43106, "happeningnow": 43107, "kib": 43108, "bloomberg": 43109, "martyrdom": 43110, "bully": 43111, "assortment": 43112, "ahora": 43113, "zoe": 43114, "noi": 43115, "illustri": 43116, "agarwal": 43117, "psc": 43118, "electronica": 43119, "recruiter": 43120, "gardiner": 43121, "radha": 43122, "nafta": 43123, "dotnet": 43124, "piero": 43125, "georg": 43126, "bels": 43127, "ðŁĺĤðŁĺį": 43128, "tuberculosis": 43129, "runnin": 43130, "moris": 43131, "hauling": 43132, "evoc": 43133, "brethren": 43134, "shair": 43135, "frameworks": 43136, "astu": 43137, "rigid": 43138, "kuma": 43139, "kreme": 43140, "jinnah": 43141, "insurers": 43142, "nyu": 43143, "fere": 43144, "nollywood": 43145, "goodvibes": 43146, "-...": 43147, "toile": 43148, "skril": 43149, "instaweatherpro": 43150, "czech": 43151, "pavel": 43152, "onepiece": 43153, "nikeplus": 43154, "filet": 43155, "cavity": 43156, "ðŁı½âĢįâĻĤï¸ı": 43157, "ðŁİ£": 43158, "drastic": 43159, "dailys": 43160, "siamese": 43161, "rebu": 43162, "osteo": 43163, "lark": 43164, "fre": 43165, "shelling": 43166, "pé": 43167, "gladys": 43168, "ðŁıĢðŁıĢ": 43169, "gustave": 43170, "submerged": 43171, "grandstand": 43172, "attu": 43173, "wont": 43174, "fpv": 43175, "bley": 43176, "joni": 43177, "angames": 43178, "weighted": 43179, "alou": 43180, "श": 43181, "lesbians": 43182, "fj": 43183, "annies": 43184, "aml": 43185, "doria": 43186, "davin": 43187, "beta": 43188, "canc": 43189, "madewithunity": 43190, "haj": 43191, "badlands": 43192, "mul": 43193, "bluec": 43194, "pawn": 43195, "covington": 43196, "neurology": 43197, "httweets": 43198, "dyslexia": 43199, "thelove": 43200, "neat": 43201, "forklift": 43202, "automate": 43203, "uneven": 43204, "montess": 43205, "hein": 43206, "hag": 43207, "relics": 43208, "competitiveness": 43209, "canelo": 43210, "martens": 43211, "bulletproof": 43212, "skittles": 43213, "gya": 43214, "primo": 43215, "americafirst": 43216, "wooo": 43217, "abortions": 43218, "??!!": 43219, "mache": 43220, "lders": 43221, "rlly": 43222, "prelims": 43223, "direct": 43224, "course": 43225, "swain": 43226, "supercell": 43227, "eccentric": 43228, "stingray": 43229, "plets": 43230, "wilcox": 43231, "westin": 43232, "okanagan": 43233, "kiran": 43234, "carbo": 43235, "bombings": 43236, "rarest": 43237, "boh": 43238, "gawd": 43239, "digg": 43240, "moana": 43241, "entirety": 43242, "enclosed": 43243, "dodgeball": 43244, "parton": 43245, "milkyway": 43246, "atr": 43247, "thoroughbred": 43248, "really": 43249, "qantas": 43250, "epiphany": 43251, "inee": 43252, "aerosmith": 43253, "spieth": 43254, "arthro": 43255, "ellini": 43256, "dubu": 43257, "braving": 43258, "âļ½âļ½": 43259, "restructuring": 43260, "illuminate": 43261, "equili": 43262, "mpi": 43263, "ashton": 43264, "ponytail": 43265, "mascots": 43266, "flattering": 43267, "crum": 43268, "asta": 43269, "à®°": 43270, "strangerthings": 43271, "barnab": 43272, "رÙĬ": 43273, "makeshift": 43274, "gotcha": 43275, "willam": 43276, "choirs": 43277, "kilometres": 43278, "ghosh": 43279, "euthan": 43280, "dolly": 43281, "unning": 43282, "thear": 43283, "crewe": 43284, "wsw": 43285, "jace": 43286, "dismiss": 43287, "kean": 43288, "hota": 43289, "khat": 43290, "~>": 43291, "thiru": 43292, "rendez": 43293, "hartman": 43294, "teessi": 43295, "casca": 43296, "zah": 43297, "hydrange": 43298, "fod": 43299, "awp": 43300, "mzansi": 43301, "thicker": 43302, "nagoya": 43303, "neva": 43304, "stique": 43305, "castel": 43306, "damian": 43307, "thereby": 43308, "jiang": 43309, "alek": 43310, "musicislife": 43311, "raq": 43312, "callahan": 43313, "gouache": 43314, "somaliland": 43315, "seanhannity": 43316, "raheem": 43317, "lose": 43318, "elove": 43319, "wharton": 43320, "rectangular": 43321, "illustrating": 43322, "harne": 43323, "autisma": 43324, "scrapped": 43325, "elland": 43326, "decree": 43327, "nagpur": 43328, "kipp": 43329, "sore": 43330, "nmd": 43331, "maas": 43332, "guna": 43333, "gartner": 43334, "belli": 43335, "thenight": 43336, "jeon": 43337, "genderequality": 43338, "giver": 43339, "ael": 43340, "garments": 43341, "neu": 43342, "mardigras": 43343, "marsden": 43344, "rower": 43345, "polluted": 43346, "cameraman": 43347, "vinod": 43348, "beasley": 43349, "croc": 43350, "jiu": 43351, "hollyoaks": 43352, "anesthesia": 43353, "alles": 43354, "steward": 43355, "latimes": 43356, "ðŁĩºðŁĩ¸ðŁĩºðŁĩ¸ðŁĩºðŁĩ¸": 43357, "tician": 43358, "goria": 43359, "comedic": 43360, "ð٤Ķð٤Ķð٤Ķ": 43361, "naive": 43362, "slions": 43363, "łĪ": 43364, "burglar": 43365, "ðŁĺŃðŁĺŃðŁĺŃðŁĺŃðŁĺŃ": 43366, "yorkshi": 43367, "señ": 43368, "fanboy": 43369, "laurel": 43370, "incidence": 43371, "potomac": 43372, "roberta": 43373, "presiden": 43374, "pryor": 43375, "osbourne": 43376, "wku": 43377, "teme": 43378, "palae": 43379, "ðŁ¥º": 43380, "reboun": 43381, "itude": 43382, "reddish": 43383, "khand": 43384, "colonialism": 43385, "northcarolina": 43386, "ðĿĴ": 43387, "mannequin": 43388, "ladybird": 43389, "tasty": 43390, "knowledgeable": 43391, "gshore": 43392, "ðŁĮĮ": 43393, "ன": 43394, "quaker": 43395, "salzburg": 43396, "medalists": 43397, "chyna": 43398, "bridesmaid": 43399, "maori": 43400, "rop": 43401, "outraged": 43402, "inadequate": 43403, "truckers": 43404, "alana": 43405, "ìĿ¼": 43406, "rix": 43407, "oooooooo": 43408, "commandments": 43409, "lambeth": 43410, "aaj": 43411, "ecofriendly": 43412, "blaz": 43413, "morecambe": 43414, "bouncy": 43415, "roux": 43416, "raided": 43417, "mized": 43418, "shc": 43419, "gawx": 43420, "laboratories": 43421, "rubs": 43422, "restroom": 43423, "consultations": 43424, "cajun": 43425, "virgini": 43426, "soir": 43427, "revue": 43428, "plein": 43429, "wager": 43430, "ç¹": 43431, "wedo": 43432, "growingup": 43433, "!ðŁĺĬ": 43434, "faceted": 43435, "sinners": 43436, "hovering": 43437, "tiene": 43438, "seasoning": 43439, "anja": 43440, "leggo": 43441, "ilis": 43442, "flax": 43443, "devo": 43444, "ashram": 43445, "matisse": 43446, "keri": 43447, "gower": 43448, "botox": 43449, "marshes": 43450, "unhcr": 43451, "tsm": 43452, "optimus": 43453, "duni": 43454, "stuffs": 43455, "sok": 43456, "orderly": 43457, "nbad": 43458, "islamophobia": 43459, "ravioli": 43460, "faber": 43461, "creds": 43462, "wonka": 43463, "infusion": 43464, "overweight": 43465, "dailynews": 43466, "assimil": 43467, "acollege": 43468, "medallion": 43469, "kilimanjaro": 43470, "stiff": 43471, "thames": 43472, "sunken": 43473, "thard": 43474, "mydubai": 43475, "hilariously": 43476, "hannel": 43477, "plumber": 43478, "fairview": 43479, "separating": 43480, "rascal": 43481, "quien": 43482, "necessities": 43483, "confederation": 43484, "llll": 43485, ":]": 43486, "weaknesses": 43487, "bronco": 43488, "raffles": 43489, "elot": 43490, "ãĤ¸ãĥ": 43491, "adventcalendar": 43492, "ðŁİ¹": 43493, "stravel": 43494, "tunic": 43495, "ksu": 43496, "impeach": 43497, "espionage": 43498, "!-": 43499, "diment": 43500, "currant": 43501, "biode": 43502, "commuting": 43503, "byron": 43504, "ðŁĴĵðŁĴĵ": 43505, "shaded": 43506, "truro": 43507, "crayons": 43508, "arne": 43509, "hsc": 43510, "freaked": 43511, "dramati": 43512, "fleek": 43513, "ucd": 43514, "marlborough": 43515, "^-": 43516, "crossings": 43517, "malo": 43518, "blackops": 43519, "binance": 43520, "choked": 43521, "cheney": 43522, "plo": 43523, "gestures": 43524, "valedic": 43525, "ryanair": 43526, "remington": 43527, "vcs": 43528, "mckee": 43529, "ecz": 43530, "begs": 43531, "nailart": 43532, "mayorof": 43533, "happyfathersday": 43534, "wart": 43535, "petitions": 43536, "ningly": 43537, "cleanenergy": 43538, "brox": 43539, "slalom": 43540, "existent": 43541, "abay": 43542, "ugliest": 43543, "tomp": 43544, "stoma": 43545, "selby": 43546, "goalscorer": 43547, "benji": 43548, "overwhelmingly": 43549, "lans": 43550, "semiconductor": 43551, "southkorea": 43552, "rescheduled": 43553, "skyl": 43554, "enlisted": 43555, "dowski": 43556, "sidel": 43557, "rosenberg": 43558, "nasser": 43559, "whitehead": 43560, "prius": 43561, "harare": 43562, "enn": 43563, "ryder": 43564, "íĤ": 43565, "mong": 43566, "clasico": 43567, "transporter": 43568, "potty": 43569, "isme": 43570, "*****": 43571, "vice": 43572, "skit": 43573, "odessa": 43574, "lmp": 43575, "hern": 43576, "racially": 43577, "pinoy": 43578, "paraguay": 43579, "obituary": 43580, "goes": 43581, "bucha": 43582, "sidewalks": 43583, "angular": 43584, "unconstitutional": 43585, "transitioning": 43586, "ibu": 43587, "guys": 43588, "unpacking": 43589, "oooooo": 43590, "blackgirl": 43591, "bergs": 43592, "¯": 43593, "wordoftheday": 43594, "trumptrain": 43595, "thunderbolt": 43596, "msi": 43597, "fascists": 43598, "ब": 43599, "tsk": 43600, "collapses": 43601, "rajesh": 43602, "loveislove": 43603, "migrating": 43604, "setback": 43605, "ðŁĺĬâĿ¤ï¸ı": 43606, "tels": 43607, "safetyfirst": 43608, "narrated": 43609, "jaejoong": 43610, "unanswered": 43611, "liqueur": 43612, "ennes": 43613, "dalgo": 43614, "billings": 43615, "saltwater": 43616, "mermaids": 43617, "longs": 43618, "clapham": 43619, "wearec": 43620, "piccollage": 43621, "nach": 43622, "hace": 43623, "poisoned": 43624, "loth": 43625, "agna": 43626, "adelrey": 43627, "guardia": 43628, "polishing": 43629, "peacekeeping": 43630, "dall": 43631, "pisa": 43632, "lapland": 43633, "processors": 43634, "deandre": 43635, "sobs": 43636, "ponce": 43637, "drains": 43638, "cbe": 43639, "ðŁİ¥:": 43640, "splash": 43641, "meatball": 43642, "fontana": 43643, "worcestershirehour": 43644, "nev": 43645, "brisk": 43646, "bint": 43647, "acr": 43648, "pox": 43649, "cayenne": 43650, "skrillex": 43651, "jfc": 43652, "hahahahahahaha": 43653, "glas": 43654, "engul": 43655, "temporal": 43656, "onized": 43657, "concre": 43658, "compose": 43659, "vibrations": 43660, "planters": 43661, "fert": 43662, "criticalrolefanart": 43663, "tbli": 43664, "schallenge": 43665, "huckabee": 43666, "municipal": 43667, "iambic": 43668, "radios": 43669, "nevis": 43670, "durability": 43671, "mccla": 43672, "horseback": 43673, "institutes": 43674, "fulfill": 43675, "attach": 43676, "ateur": 43677, "akan": 43678, "resisting": 43679, "illumination": 43680, "handle": 43681, "haircare": 43682, "oment": 43683, "macleod": 43684, "kaiser": 43685, "gno": 43686, "beardown": 43687, "lyf": 43688, "glomer": 43689, "distortion": 43690, "zm": 43691, "sank": 43692, "roosters": 43693, "isnow": 43694, "asports": 43695, "agen": 43696, "woken": 43697, "stgeorge": 43698, "romper": 43699, "myle": 43700, "economists": 43701, "ruto": 43702, "twill": 43703, "healthand": 43704, "dito": 43705, "wsl": 43706, "tairp": 43707, "prakash": 43708, "micheal": 43709, "hts": 43710, "wrights": 43711, "katsu": 43712, "fiorentina": 43713, "defenseman": 43714, "ditch": 43715, "varsity": 43716, "texanscheer": 43717, "baham": 43718, "scanned": 43719, "weil": 43720, "seductive": 43721, "ðŁijįðŁı½": 43722, "fue": 43723, "erwin": 43724, "davison": 43725, "terran": 43726, "moods": 43727, "woolf": 43728, "resource": 43729, "@.": 43730, "cush": 43731, "ðŁį°": 43732, "regression": 43733, "curled": 43734, "lazer": 43735, "joanne": 43736, "abbott": 43737, "moz": 43738, "downers": 43739, "mmmmmm": 43740, "valentina": 43741, "khair": 43742, "dreamt": 43743, "crook": 43744, "chek": 43745, "steaming": 43746, "nephews": 43747, "cleric": 43748, "asober": 43749, "indefinitely": 43750, "wye": 43751, "usnews": 43752, "joyce": 43753, "flushing": 43754, "wynonnaearp": 43755, "rondo": 43756, "kiss": 43757, "hotdog": 43758, "barns": 43759, "saxophon": 43760, "farley": 43761, "gasp": 43762, "decreasing": 43763, "alway": 43764, "pex": 43765, "lsd": 43766, "shift": 43767, "poutine": 43768, "razz": 43769, "rescuing": 43770, "niko": 43771, "hoch": 43772, "ccl": 43773, "uaap": 43774, "nts": 43775, "mcar": 43776, "ilwx": 43777, "conquering": 43778, "kettering": 43779, "sturdy": 43780, "delaying": 43781, "stok": 43782, "vanished": 43783, "cathar": 43784, "bingham": 43785, "inv": 43786, "ichiro": 43787, "hemo": 43788, "budgeting": 43789, "[...]": 43790, "bess": 43791, "sebastian": 43792, "slowed": 43793, "ðĿij": 43794, "muslim": 43795, "stuns": 43796, "actonclimate": 43797, "vea": 43798, "seton": 43799, "rosetta": 43800, "ount": 43801, "hardin": 43802, "fluid": 43803, "caw": 43804, "ðŁ¥Ĥ": 43805, "yacht": 43806, "unl": 43807, "sphy": 43808, "provocative": 43809, "oric": 43810, "isback": 43811, "___": 43812, "nicolas": 43813, "gyan": 43814, "loose": 43815, "flin": 43816, "rebate": 43817, ":::": 43818, "!\"@": 43819, "comicon": 43820, "sheff": 43821, "downstream": 43822, "chichester": 43823, "beachlife": 43824, "momlife": 43825, "diabete": 43826, "arra": 43827, "vane": 43828, "oku": 43829, "yeo": 43830, "mango": 43831, "tryout": 43832, "appell": 43833, "heirs": 43834, "arjuna": 43835, "ddu": 43836, "naveen": 43837, "movic": 43838, "socialists": 43839, "sback": 43840, "criterion": 43841, "soyuz": 43842, "kher": 43843, "daz": 43844, "yolanda": 43845, "wineoclock": 43846, "reina": 43847, "onew": 43848, "leonard": 43849, "endez": 43850, "ubs": 43851, "supportlocal": 43852, "facilitated": 43853, "caramelized": 43854, "bpa": 43855, "vuelta": 43856, "mytho": 43857, "mami": 43858, "speare": 43859, "nbaplayoffs": 43860, "fevre": 43861, "nickjonas": 43862, "imprint": 43863, "cso": 43864, "craigslist": 43865, "lasalle": 43866, "gideon": 43867, "hadoop": 43868, "disregard": 43869, "wud": 43870, "tuc": 43871, "magee": 43872, "acoustics": 43873, "taa": 43874, "quie": 43875, "pola": 43876, "crt": 43877, "dwyer": 43878, "dissec": 43879, "capitol": 43880, "mention": 43881, "knoll": 43882, "heigh": 43883, "finders": 43884, "placements": 43885, "lse": 43886, "indira": 43887, "guri": 43888, "madhuridixit": 43889, "kingdoms": 43890, "iambicpent": 43891, "georgina": 43892, "jeky": 43893, "conflicting": 43894, "bayan": 43895, "agatha": 43896, "uphold": 43897, "dron": 43898, "vicar": 43899, "expat": 43900, "peripheral": 43901, "pessi": 43902, "faf": 43903, "ancestor": 43904, "?..": 43905, "widget": 43906, "punc": 43907, "commenced": 43908, "beavs": 43909, "airwaves": 43910, "addis": 43911, "poa": 43912, "desses": 43913, "coden": 43914, "vue": 43915, "rupee": 43916, "karin": 43917, "spock": 43918, "msy": 43919, "ะ": 43920, "prick": 43921, "fillmore": 43922, "tification": 43923, "thingsto": 43924, "sarde": 43925, "emile": 43926, "pereira": 43927, "nad": 43928, "brightening": 43929, "arresting": 43930, "woking": 43931, "uscg": 43932, "spill": 43933, "raspberrypi": 43934, "hugo": 43935, "itec": 43936, "isma": 43937, "cufflinks": 43938, "optimized": 43939, "occ": 43940, "miwx": 43941, "enka": 43942, "elited": 43943, "affordable": 43944, "sakh": 43945, "coronado": 43946, "hoh": 43947, "atul": 43948, "aioli": 43949, "jimcantore": 43950, "accounted": 43951, "vinay": 43952, "hermit": 43953, "grooves": 43954, "ranch": 43955, "rilla": 43956, "wetter": 43957, "outof": 43958, "veterin": 43959, "nikov": 43960, "kian": 43961, "fairbanks": 43962, "ramapho": 43963, "niti": 43964, "kko": 43965, "rusty": 43966, "nestle": 43967, "tvxq": 43968, "shaheer": 43969, "âĿ¤âĿ¤âĿ¤âĿ¤": 43970, "pennant": 43971, "gemstones": 43972, "demdebate": 43973, "ðŁIJĬ": 43974, "autonews": 43975, "supportindiefilm": 43976, "macho": 43977, "vex": 43978, "newsat": 43979, "neti": 43980, "concessions": 43981, "candied": 43982, "yofthe": 43983, "macau": 43984, "dends": 43985, "cricketers": 43986, "saniti": 43987, "mariano": 43988, "ghat": 43989, "artoftheday": 43990, "¡ľ": 43991, "egos": 43992, "genoa": 43993, "chatbots": 43994, "brier": 43995, "allabout": 43996, "monty": 43997, "spied": 43998, "rtr": 43999, "comfort": 44000, "snippets": 44001, "realtime": 44002, "grain": 44003, "examined": 44004, "enlightening": 44005, "ttu": 44006, "godbless": 44007, "releasethe": 44008, "singular": 44009, "kians": 44010, "haka": 44011, "sorren": 44012, "defect": 44013, "marg": 44014, "equities": 44015, "dorian": 44016, "suka": 44017, "perl": 44018, "aishwarya": 44019, "pullover": 44020, "precision": 44021, "fairway": 44022, "neve": 44023, "riveting": 44024, "villanova": 44025, "encom": 44026, "ako": 44027, "passionately": 44028, "europaleague": 44029, "siempre": 44030, "xvi": 44031, "enlightened": 44032, "cfr": 44033, "âĺħâĺħâĺħâĺħ": 44034, "wasteland": 44035, "isf": 44036, "newcomers": 44037, "emergency": 44038, "amphitheatre": 44039, "-.": 44040, "textbooks": 44041, "figurative": 44042, "tremb": 44043, "pesc": 44044, "abhin": 44045, "abbot": 44046, "acacia": 44047, "hards": 44048, "porsche": 44049, "kauai": 44050, "elisa": 44051, "carrick": 44052, "abou": 44053, "ellier": 44054, "bech": 44055, "neutron": 44056, "galapagos": 44057, "ruben": 44058, "innis": 44059, "howto": 44060, "nuns": 44061, "sabine": 44062, "iac": 44063, "clinched": 44064, "notori": 44065, "fives": 44066, "cairngor": 44067, "peri": 44068, "grc": 44069, "ðŁĴ¯ðŁĴ¯": 44070, "malm": 44071, "twelfth": 44072, "diff": 44073, "routines": 44074, "martyn": 44075, "linden": 44076, "synthesizer": 44077, "number": 44078, "gamecube": 44079, "falkirk": 44080, "byzantine": 44081, "queuing": 44082, "grill": 44083, "scalable": 44084, "charred": 44085, "routing": 44086, "herbali": 44087, "grizz": 44088, "ðŁĺŃðŁĺŃðŁĺŃ": 44089, "toll": 44090, "terminals": 44091, "lpc": 44092, "abd": 44093, "warmups": 44094, "removable": 44095, "¯\\": 44096, "vigo": 44097, "papaya": 44098, "neve": 44099, "lovingly": 44100, "jokers": 44101, "ibles": 44102, "ssett": 44103, "potenti": 44104, "pele": 44105, "gigi": 44106, "sadiq": 44107, "legacy": 44108, "sono": 44109, "rupees": 44110, "retarded": 44111, "elee": 44112, "parr": 44113, "fiance": 44114, "eyre": 44115, "sayers": 44116, "pendants": 44117, "maknae": 44118, "albans": 44119, "adapting": 44120, "pff": 44121, "puberty": 44122, "jiu": 44123, "ingrad": 44124, "hypocrite": 44125, "diplomats": 44126, "physical": 44127, "robby": 44128, "bonsai": 44129, "ãģ·": 44130, "fatt": 44131, "catalunya": 44132, "âľĸï¸ı": 44133, "roma": 44134, "moreland": 44135, "soe": 44136, "conversions": 44137, "stlblues": 44138, "sholm": 44139, "grassy": 44140, "prado": 44141, "onu": 44142, "assaulting": 44143, ">_": 44144, "settes": 44145, "disgraceful": 44146, "aphra": 44147, "âļ½ï¸ıâļ½ï¸ı": 44148, "प": 44149, "kiln": 44150, "goaltender": 44151, "sru": 44152, "philanthropist": 44153, "bals": 44154, "thn": 44155, "studen": 44156, "sandoval": 44157, "dogrescue": 44158, "elions": 44159, "assessed": 44160, "largo": 44161, "hectares": 44162, "shrm": 44163, "saif": 44164, "cleavage": 44165, "noches": 44166, "nene": 44167, "fatalities": 44168, "curing": 44169, "cleanser": 44170, "ales": 44171, "pvp": 44172, "southbank": 44173, "pizzeria": 44174, "marshals": 44175, "knife": 44176, "andover": 44177, "tblightning": 44178, "srsly": 44179, "oute": 44180, "digimon": 44181, "timesofindia": 44182, "promethe": 44183, "lebo": 44184, "fsu": 44185, "witz": 44186, "revere": 44187, "manas": 44188, "mamba": 44189, "chica": 44190, "guan": 44191, "exhibitor": 44192, "csrracing": 44193, "dere": 44194, "xxxxx": 44195, "gusta": 44196, "storytime": 44197, "stoney": 44198, "organics": 44199, "andu": 44200, "seam": 44201, "minogue": 44202, "anushkasharma": 44203, "aba": 44204, "ðŁİĻï¸ı": 44205, "ugandan": 44206, "chromatic": 44207, "assn": 44208, "documentaries": 44209, "sht": 44210, "rupaul": 44211, "loyd": 44212, "kats": 44213, "eus": 44214, "itech": 44215, "medusa": 44216, "panty": 44217, "kellogg": 44218, "etto": 44219, "tallade": 44220, "shaa": 44221, "dost": 44222, "pms": 44223, "mariana": 44224, "jester": 44225, "crooks": 44226, "ðŁĶ¬": 44227, "mindanao": 44228, "indhoven": 44229, "ðŁ¤ª": 44230, "lexi": 44231, "tvn": 44232, "janis": 44233, "cote": 44234, "ãģĨ": 44235, "serrano": 44236, "iwm": 44237, "ðŁIJ¬": 44238, "kke": 44239, "distributors": 44240, "capu": 44241, "counterfeit": 44242, "campsite": 44243, "aggie": 44244, "ðŁĺ¼": 44245, "chhattisgarh": 44246, "~@": 44247, "stateu": 44248, "sandi": 44249, "preventable": 44250, "cls": 44251, "canne": 44252, "mmc": 44253, "iver": 44254, "saharan": 44255, "palis": 44256, "nightout": 44257, "dos": 44258, "apia": 44259, "abscbn": 44260, "managerial": 44261, "arose": 44262, "mowx": 44263, "arosa": 44264, "ðŁĮ³": 44265, "underdog": 44266, "remover": 44267, "astronomers": 44268, "lentils": 44269, "suscep": 44270, "smoother": 44271, "pendleton": 44272, "faucet": 44273, "emory": 44274, "dalmati": 44275, "afcb": 44276, "ticus": 44277, "exempt": 44278, "enrol": 44279, "dheim": 44280, "ðŁIJº": 44281, "restriction": 44282, "starfish": 44283, "stow": 44284, "snorkel": 44285, "thunderbirds": 44286, "shead": 44287, "homosexual": 44288, "dyn": 44289, "asli": 44290, "andretti": 44291, "douche": 44292, "domo": 44293, "tarmac": 44294, "slumber": 44295, "pronto": 44296, "firstdayof": 44297, "miniature": 44298, "mariachi": 44299, "argus": 44300, "recommending": 44301, "mobiles": 44302, "ince": 44303, "illustrious": 44304, "orc": 44305, "adverts": 44306, "grits": 44307, "weasel": 44308, "pagoda": 44309, "overpass": 44310, "greys": 44311, "maximus": 44312, "armagh": 44313, "woodland": 44314, "sunni": 44315, "ðŁĴī": 44316, "ëĿ": 44317, "tione": 44318, "socio": 44319, "hos": 44320, "ð٤Ĺð٤Ĺ": 44321, "windsor": 44322, "subsequent": 44323, "munchies": 44324, "idh": 44325, "excluding": 44326, "emi": 44327, "cuth": 44328, "zai": 44329, "weekdays": 44330, "lawsuits": 44331, "barnard": 44332, "ت": 44333, "petting": 44334, "netes": 44335, "mulligan": 44336, "pharmacists": 44337, "raquel": 44338, "eton": 44339, "cranston": 44340, "gilded": 44341, "cleary": 44342, "ceph": 44343, "raa": 44344, "pamper": 44345, "lombardi": 44346, "asin": 44347, "sherry": 44348, "prod": 44349, "forte": 44350, "arianism": 44351, "buffalobills": 44352, "æľ¬": 44353, "ðŁĶ¥#": 44354, "uuu": 44355, "justices": 44356, "carina": 44357, "natin": 44358, "maslow": 44359, "drooling": 44360, "cognac": 44361, "camber": 44362, "elong": 44363, "rdr": 44364, "inen": 44365, "convictions": 44366, "amuse": 44367, "trock": 44368, "harmless": 44369, "visitation": 44370, "genomic": 44371, "bland": 44372, "benoit": 44373, "chimp": 44374, "tuscaloosa": 44375, "greasy": 44376, "xpo": 44377, "gilt": 44378, "seq": 44379, "permitted": 44380, "christmaseve": 44381, "books": 44382, "mue": 44383, "oldschool": 44384, "humanright": 44385, "beati": 44386, "ðŁĶĿ": 44387, "shat": 44388, "sculpting": 44389, "hwan": 44390, "fernandes": 44391, "sciutto": 44392, "fuentes": 44393, "endeavors": 44394, "maidstone": 44395, "unparalleled": 44396, "shouted": 44397, "queenof": 44398, "merc": 44399, "bandic": 44400, "veda": 44401, "selangor": 44402, "pile": 44403, "jahan": 44404, "intimidating": 44405, "disappears": 44406, "clich": 44407, "zaha": 44408, "wurst": 44409, "hiv": 44410, "fodils": 44411, "cordless": 44412, "aaaaaa": 44413, "hydra": 44414, "belinda": 44415, "eels": 44416, "buf": 44417, "sustaining": 44418, "rugbyleague": 44419, "noc": 44420, "brigitte": 44421, "(ðŁĵ¸:": 44422, "trombone": 44423, "soothe": 44424, "smog": 44425, "adp": 44426, "stable": 44427, "ingley": 44428, "diagnose": 44429, "msg": 44430, "wess": 44431, "ticketing": 44432, "onee": 44433, "nswpol": 44434, "eup": 44435, "autopsy": 44436, "adityanath": 44437, "sundown": 44438, "riverfront": 44439, "siya": 44440, "pis": 44441, "hierarchy": 44442, "durango": 44443, "dijk": 44444, "renshaw": 44445, "heaps": 44446, "epidemi": 44447, "davidbowie": 44448, "internetof": 44449, "ddi": 44450, "nationality": 44451, "mbar": 44452, "airy": 44453, "winder": 44454, "walia": 44455, "elliott": 44456, "cx": 44457, "bavarian": 44458, "platt": 44459, "antw": 44460, "wiwx": 44461, "softer": 44462, "neha": 44463, "heller": 44464, "thand": 44465, "daniela": 44466, "boast": 44467, "degradation": 44468, "ðŁĴ¦ðŁĴ¦": 44469, "transforming": 44470, "mane": 44471, "avut": 44472, "ðŁĺĪðŁĺĪ": 44473, "voter": 44474, "thee": 44475, "tate": 44476, "puff": 44477, "indoor": 44478, "soproud": 44479, "boyce": 44480, "borisjohnson": 44481, "waitin": 44482, "immunology": 44483, "ðŁıĨðŁıĨðŁıĨ": 44484, "âĿĮ": 44485, "streetfood": 44486, "lizasober": 44487, "cavalier": 44488, "celia": 44489, "needle": 44490, "motoring": 44491, "gato": 44492, ",)": 44493, "rade": 44494, "harvest": 44495, "tms": 44496, "jarpad": 44497, "oney": 44498, "airmen": 44499, "vre": 44500, "impairment": 44501, "abhishek": 44502, "snoop": 44503, "lant": 44504, "famously": 44505, "blou": 44506, "sze": 44507, "gander": 44508, "untouch": 44509, "tuf": 44510, "deejay": 44511, "collateral": 44512, "bind": 44513, "ðŁļ©": 44514, "pinning": 44515, "icn": 44516, "';": 44517, "theeconomist": 44518, "ultram": 44519, "worldwaterday": 44520, "tipoff": 44521, "thei": 44522, "feeders": 44523, "campaign": 44524, "scumb": 44525, "dayweekend": 44526, "yom": 44527, "pedic": 44528, "hough": 44529, "psv": 44530, "plin": 44531, "onde": 44532, "bostonmarathon": 44533, "azzy": 44534, "*_*": 44535, "conley": 44536, "thiago": 44537, "hooo": 44538, "galerie": 44539, "lucid": 44540, "jett": 44541, "glitz": 44542, "finalfantasy": 44543, "achievers": 44544, "yung": 44545, "peregrine": 44546, "ophi": 44547, "dames": 44548, "biomar": 44549, "âĺĢï¸ıâĺĢï¸ı": 44550, "skc": 44551, "lics": 44552, "flank": 44553, "arrahman": 44554, "hoof": 44555, "upholstery": 44556, "tats": 44557, "woz": 44558, "¿": 44559, "snoring": 44560, "raer": 44561, "lju": 44562, "apd": 44563, "plating": 44564, "kanu": 44565, "imation": 44566, "fragrances": 44567, "mra": 44568, "moray": 44569, "mott": 44570, "immuni": 44571, "hearties": 44572, "bhopal": 44573, "timers": 44574, "gata": 44575, "colorway": 44576, "carnation": 44577, "winget": 44578, "sighs": 44579, "sville": 44580, "optimist": 44581, "chateau": 44582, "olympians": 44583, "cio": 44584, "singersongwriter": 44585, "nyo": 44586, "fibers": 44587, "burch": 44588, "agro": 44589, "milne": 44590, "igbo": 44591, "cramer": 44592, "ationals": 44593, "danube": 44594, "padma": 44595, "normani": 44596, "enforced": 44597, "breck": 44598, "boehner": 44599, "arden": 44600, "surrendered": 44601, "prosthetic": 44602, "oma": 44603, "hailed": 44604, "calculations": 44605, "wfa": 44606, "bib": 44607, "fcblive": 44608, "fonda": 44609, "westcoast": 44610, "quests": 44611, "friendly": 44612, "towie": 44613, "fitch": 44614, "balot": 44615, "stardom": 44616, "scratching": 44617, "hosa": 44618, "thika": 44619, "oven": 44620, "stroke": 44621, "outpost": 44622, "pharmaceuticals": 44623, "hikari": 44624, "muy": 44625, "afd": 44626, "fallontonight": 44627, "squat": 44628, "oru": 44629, "drained": 44630, "chocolat": 44631, "민": 44632, "worths": 44633, "rib": 44634, "muj": 44635, "thats": 44636, "residente": 44637, "itel": 44638, "boost": 44639, "migos": 44640, "mulled": 44641, "laa": 44642, "etsyshop": 44643, "donkeys": 44644, "mek": 44645, "ptc": 44646, "flinders": 44647, "ehs": 44648, "rohit": 44649, "muir": 44650, "gad": 44651, "compositions": 44652, "åĨĻ": 44653, "combustion": 44654, "ikh": 44655, "yemeni": 44656, "waved": 44657, "garci": 44658, "akos": 44659, "oods": 44660, "fusion": 44661, "seque": 44662, "slan": 44663, "plur": 44664, "kicchasu": 44665, "shenando": 44666, "sams": 44667, "worlden": 44668, "horowitz": 44669, "withme": 44670, "microbes": 44671, "kki": 44672, "ðŁĴĶðŁĴĶ": 44673, "wsu": 44674, "patchwork": 44675, "freer": 44676, "yaki": 44677, "theart": 44678, "symbolism": 44679, "miler": 44680, "btn": 44681, "mabu": 44682, "sidekick": 44683, "motivates": 44684, "sagitt": 44685, "naturals": 44686, "serviced": 44687, "psori": 44688, "paola": 44689, "quig": 44690, "ibadan": 44691, "giggs": 44692, "ë³": 44693, "scientology": 44694, "sioux": 44695, "salamat": 44696, "dres": 44697, "cadbury": 44698, "dhawan": 44699, "ción": 44700, "_'": 44701, "swapping": 44702, "mariska": 44703, "jamesbond": 44704, "explosives": 44705, "ayles": 44706, "afer": 44707, "sagu": 44708, "censor": 44709, "toma": 44710, "jefferson": 44711, "ringed": 44712, "partist": 44713, "irresponsible": 44714, "aguilar": 44715, "vacay": 44716, "equitable": 44717, "altrincham": 44718, "acur": 44719, "manish": 44720, "germin": 44721, "schooled": 44722, "putter": 44723, "edad": 44724, "naval": 44725, "toasty": 44726, "solareclipse": 44727, "dishu": 44728, "coyne": 44729, "acco": 44730, "muck": 44731, "maran": 44732, "elos": 44733, "lender": 44734, "croix": 44735, "worthless": 44736, "haber": 44737, "gunmen": 44738, "ðŁįĵ": 44739, "zenith": 44740, "tenders": 44741, "hurst": 44742, "holtz": 44743, "italians": 44744, "carlow": 44745, "ucd": 44746, "characteristic": 44747, "bung": 44748, "avl": 44749, "uth": 44750, "sasia": 44751, "rsl": 44752, "redman": 44753, "neighboring": 44754, "greenpeace": 44755, "stips": 44756, "followparty": 44757, "ygk": 44758, "enos": 44759, "omnibus": 44760, "naissance": 44761, "chrissy": 44762, "secure": 44763, "callback": 44764, "jihoon": 44765, "memory": 44766, "blocker": 44767, "lanta": 44768, "daffodils": 44769, "bilt": 44770, "fferty": 44771, "faust": 44772, "iec": 44773, "nipples": 44774, "sog": 44775, "mnd": 44776, "jaguar": 44777, "boldly": 44778, "abpoli": 44779, "proposition": 44780, "gunsense": 44781, "evansville": 44782, "cutters": 44783, "wego": 44784, "doun": 44785, "dox": 44786, "stallions": 44787, "kaj": 44788, "shippers": 44789, "jawa": 44790, "volo": 44791, "leven": 44792, "paprika": 44793, "kovich": 44794, "jordi": 44795, "inductees": 44796, "appalling": 44797, "dialysis": 44798, "alleviate": 44799, "âĢĶâĢĶ": 44800, "pieter": 44801, "midwi": 44802, "qtr": 44803, "juliette": 44804, "intermission": 44805, "hawks": 44806, "actment": 44807, "oneill": 44808, "klin": 44809, "vamps": 44810, "famous": 44811, "could": 44812, "automobi": 44813, "daan": 44814, "westend": 44815, "ellip": 44816, "nhc": 44817, "melanch": 44818, "webseries": 44819, "tongue": 44820, "snatched": 44821, "smyth": 44822, "tangible": 44823, "sli": 44824, "easing": 44825, "barstool": 44826, "overlay": 44827, "affordability": 44828, "tinged": 44829, "teras": 44830, "ayush": 44831, "wannaone": 44832, "rhine": 44833, "dana": 44834, "shana": 44835, "kendal": 44836, "fertile": 44837, "wir": 44838, "repleni": 44839, "larvae": 44840, "isro": 44841, "convos": 44842, "abbrevi": 44843, "ucc": 44844, "hungry": 44845, "burrows": 44846, "ager": 44847, "navi": 44848, "matin": 44849, "duper": 44850, "cern": 44851, "madon": 44852, "ķï¸ı": 44853, "éģ": 44854, "tups": 44855, "hyatt": 44856, "shep": 44857, "fridaynight": 44858, "wiser": 44859, "heidi": 44860, "hatton": 44861, "pgh": 44862, "fountain": 44863, "wristbands": 44864, "ahmadiyya": 44865, "aerial": 44866, "subscribed": 44867, "solos": 44868, "mace": 44869, "slayed": 44870, "forfe": 44871, "dulce": 44872, "christmass": 44873, "arunjaitley": 44874, "violate": 44875, "obstru": 44876, "nieces": 44877, "wvu": 44878, "idyl": 44879, "faze": 44880, "preserves": 44881, "infringe": 44882, "premiers": 44883, "intervals": 44884, "agency": 44885, "(©": 44886, "standalone": 44887, "dimes": 44888, "boer": 44889, "parameters": 44890, "getit": 44891, "ðŁĺĺðŁĺĺðŁĺĺðŁĺĺ": 44892, "tulane": 44893, "forgiven": 44894, "scoll": 44895, "mbps": 44896, "smashbros": 44897, "robbi": 44898, "primavera": 44899, "alist": 44900, "ghostly": 44901, "ayat": 44902, "yeats": 44903, "impressionist": 44904, "earphones": 44905, "caulfield": 44906, "waikiki": 44907, "salute": 44908, "scou": 44909, "muay": 44910, "louisvuitton": 44911, "bakhta": 44912, "adog": 44913, "inventions": 44914, "hurd": 44915, "foreclo": 44916, "streamline": 44917, "thalaivar": 44918, "chsnews": 44919, "willard": 44920, "tsn": 44921, "europarl": 44922, "crusher": 44923, "mysore": 44924, "grower": 44925, "raping": 44926, "patti": 44927, "gden": 44928, "smw": 44929, "mufti": 44930, "kidman": 44931, "abr": 44932, "sounders": 44933, "skeptical": 44934, "ðŁĶİ": 44935, "sundar": 44936, "ime": 44937, "ferg": 44938, "featherweight": 44939, "arlington": 44940, "pasqu": 44941, "agazine": 44942, "wearable": 44943, "natic": 44944, "mcclure": 44945, "intermitt": 44946, "horde": 44947, "sixties": 44948, "carte": 44949, "bhav": 44950, "zeal": 44951, "experiential": 44952, "adorned": 44953, "sommer": 44954, "enote": 44955, "hypothesis": 44956, "stinky": 44957, "proto": 44958, "deadlines": 44959, "vogel": 44960, "musings": 44961, "moncton": 44962, "guter": 44963, "fle": 44964, "acion": 44965, "voiceof": 44966, "tasha": 44967, "inhabitants": 44968, "typeface": 44969, "sba": 44970, "btsx": 44971, "ðŁĶĴ": 44972, "worx": 44973, "uhc": 44974, "joko": 44975, "cellars": 44976, "goro": 44977, "continuum": 44978, "...&": 44979, "weathercee": 44980, "hap": 44981, "srk": 44982, "risers": 44983, "lonelyplanet": 44984, "unnamed": 44985, "coeur": 44986, "ðŁįĮ": 44987, "theworld": 44988, "ilike": 44989, "fasten": 44990, "amigo": 44991, "riba": 44992, "ramaphosa": 44993, "staffers": 44994, "hadley": 44995, "??\"": 44996, "fiore": 44997, "salut": 44998, "huff": 44999, "bezos": 45000, "Ñĭ": 45001, "rader": 45002, "kamala": 45003, "inline": 45004, "fillers": 45005, "umatic": 45006, "allin": 45007, "shatter": 45008, "rein": 45009, "oku": 45010, "chases": 45011, "flagged": 45012, "babymetal": 45013, "waterstones": 45014, "tsb": 45015, "cutout": 45016, "ophel": 45017, "aama": 45018, "rockabilly": 45019, "stolic": 45020, "jetblue": 45021, "ichick": 45022, "downton": 45023, "uzbekistan": 45024, "patna": 45025, "laq": 45026, "grange": 45027, ")_/": 45028, "subsidi": 45029, "scp": 45030, "newscast": 45031, "itsa": 45032, "tweetyour": 45033, "emor": 45034, "archaeologists": 45035, "unification": 45036, "porta": 45037, "qx": 45038, "protectors": 45039, "prohib": 45040, "charisma": 45041, "cartag": 45042, "renfre": 45043, "sculpt": 45044, "guwahati": 45045, "dema": 45046, "boop": 45047, "unfpa": 45048, "dexter": 45049, "layla": 45050, "alleges": 45051, "soups": 45052, "neveragain": 45053, "lys": 45054, "calc": 45055, "baroness": 45056, "visualize": 45057, "gerber": 45058, "absorbed": 45059, "iers": 45060, "ahan": 45061, "fontein": 45062, "detectors": 45063, "verstappen": 45064, "svc": 45065, "formulated": 45066, "acdc": 45067, "lix": 45068, "incompetent": 45069, "bhk": 45070, "lourdes": 45071, "waterhouse": 45072, "snowed": 45073, "appreciative": 45074, "sigma": 45075, "lizasoberano": 45076, "penned": 45077, "paycheck": 45078, "tallinn": 45079, "fancafe": 45080, "parisi": 45081, "avalley": 45082, "vig": 45083, "rufc": 45084, "hardship": 45085, "socute": 45086, "poise": 45087, "ì¹": 45088, "rothschild": 45089, "kly": 45090, "????????": 45091, "lhp": 45092, "ilay": 45093, "fhs": 45094, "amad": 45095, "ideals": 45096, "bradbury": 45097, "balboa": 45098, "nicot": 45099, "kidnap": 45100, "wolve": 45101, "tasmanian": 45102, "opt": 45103, "matthias": 45104, "ãĥ³ãĤ": 45105, "supermarkets": 45106, "mylittlepony": 45107, "melee": 45108, "lister": 45109, "groun": 45110, "fedora": 45111, "kindness": 45112, "enen": 45113, "brahms": 45114, "¯\\_(": 45115, "roswell": 45116, "marlene": 45117, "icu": 45118, "reformation": 45119, "orail": 45120, "hebrides": 45121, "disparities": 45122, "terracotta": 45123, "swallows": 45124, "reid": 45125, "influencing": 45126, "fluor": 45127, "dene": 45128, "tumour": 45129, "blondes": 45130, "thunderbird": 45131, "sheva": 45132, "mogadishu": 45133, "kab": 45134, "creeps": 45135, "iving": 45136, "eneed": 45137, "annoy": 45138, "âĶĢ": 45139, "intrigue": 45140, "enquiry": 45141, "araj": 45142, "tural": 45143, "kubernetes": 45144, "endlessly": 45145, "dividends": 45146, "tora": 45147, "tish": 45148, "commemorates": 45149, "unra": 45150, "trib": 45151, "ponty": 45152, "nem": 45153, "dissent": 45154, "brewingco": 45155, "ðŁĺ½": 45156, "normali": 45157, "biof": 45158, "(...": 45159, "chillen": 45160, "주": 45161, "mellon": 45162, "avis": 45163, "mccormack": 45164, "ingra": 45165, "enriched": 45166, "customerexperience": 45167, "testosterone": 45168, "snug": 45169, "setti": 45170, "geronimo": 45171, "inquirer": 45172, "breaches": 45173, "verything": 45174, "blooming": 45175, "mura": 45176, "dispos": 45177, "bide": 45178, "deva": 45179, "shadesof": 45180, "intrin": 45181, "shev": 45182, "sven": 45183, "nayanthara": 45184, "ganesha": 45185, "cws": 45186, "berta": 45187, "labelled": 45188, "useum": 45189, "nicknamed": 45190, "mahan": 45191, "caruso": 45192, "apur": 45193, "ðŁijĨ": 45194, "wq": 45195, "orphanage": 45196, "discarded": 45197, "magnu": 45198, "lue": 45199, "jeon": 45200, "bridgeport": 45201, "pacing": 45202, "mercury": 45203, "(ðŁĵ¸": 45204, "marxist": 45205, "amphibious": 45206, "transplantation": 45207, "stitching": 45208, "thenburg": 45209, "gradual": 45210, "ãĤĮ": 45211, "roft": 45212, "mails": 45213, "inec": 45214, "guyana": 45215, "doppelg": 45216, "vero": 45217, "rewrite": 45218, "headless": 45219, "harbaugh": 45220, "gateway": 45221, "carsforsale": 45222, "swi": 45223, "stis": 45224, "macht": 45225, "unde": 45226, "surabaya": 45227, "stapleton": 45228, "nurturing": 45229, "milner": 45230, "yao": 45231, "lmaoooo": 45232, "kosh": 45233, "arsenal": 45234, "kame": 45235, "erry": 45236, "arroyo": 45237, "dismisses": 45238, "rubbed": 45239, "rcb": 45240, "lewd": 45241, "dilu": 45242, "andor": 45243, "vide": 45244, "urin": 45245, "intersec": 45246, "haar": 45247, "alb": 45248, "yearswith": 45249, "appleton": 45250, "éal": 45251, "ullivan": 45252, "succu": 45253, "monterrey": 45254, "dmx": 45255, "artemis": 45256, "ronnie": 45257, "farmland": 45258, "sfootball": 45259, "grotto": 45260, "anthi": 45261, "ãĢģ": 45262, "à®Ł": 45263, "vidya": 45264, "jimmyfallon": 45265, "àµį": 45266, "tzer": 45267, "gravitational": 45268, "wthr": 45269, "uhhh": 45270, "ehr": 45271, "tinker": 45272, "tijuana": 45273, "scranton": 45274, "ramcharan": 45275, "barclay": 45276, "revan": 45277, "msi": 45278, "kap": 45279, "wrs": 45280, "wethenorth": 45281, "toral": 45282, "satu": 45283, "grom": 45284, "facep": 45285, "erickson": 45286, "zyn": 45287, "sedge": 45288, "oodle": 45289, "spursofficial": 45290, "dsp": 45291, "sicilian": 45292, "solihull": 45293, "receivers": 45294, "ladakh": 45295, "hendrick": 45296, "theri": 45297, "presiding": 45298, "mcguinness": 45299, "litters": 45300, "gunnar": 45301, "ghoul": 45302, "wib": 45303, "ntv": 45304, "karo": 45305, "frock": 45306, "blau": 45307, "amplify": 45308, "allis": 45309, "ullah": 45310, "memoirs": 45311, "khloe": 45312, "interceptions": 45313, "petday": 45314, "looney": 45315, "confin": 45316, "chay": 45317, "piyushgoyal": 45318, "frequencies": 45319, "utz": 45320, "eventual": 45321, "warmly": 45322, "oblivion": 45323, "anka": 45324, "tait": 45325, "âĿ¤ï¸ı.": 45326, "directorial": 45327, "rulers": 45328, "princes": 45329, "muck": 45330, "sturridge": 45331, "deuce": 45332, "abridged": 45333, "baguette": 45334, "uncles": 45335, "pendu": 45336, "minding": 45337, "forrester": 45338, "avila": 45339, "waller": 45340, "wallstreet": 45341, "mentor": 45342, "hino": 45343, "highway": 45344, "cromwell": 45345, "fanartfriday": 45346, "mbi": 45347, "coyle": 45348, "ahi": 45349, "trove": 45350, "spiegel": 45351, "paytm": 45352, "mcintosh": 45353, "jansen": 45354, "niti": 45355, "nashville": 45356, "leno": 45357, "leicestershire": 45358, "legos": 45359, "dict": 45360, "ðŁĵ½": 45361, "spad": 45362, "beverlyhills": 45363, "syrah": 45364, "separates": 45365, "zain": 45366, "unfit": 45367, "drags": 45368, "tania": 45369, "overflowing": 45370, "hrithik": 45371, "hawthorn": 45372, "zani": 45373, "macfar": 45374, "fide": 45375, "totem": 45376, "peds": 45377, "fundamentally": 45378, "calico": 45379, "sinner": 45380, "jä": 45381, "hilde": 45382, "dsd": 45383, "tenay": 45384, "tahit": 45385, "milf": 45386, "lieb": 45387, "informing": 45388, "uplift": 45389, "rael": 45390, "mortgages": 45391, "lect": 45392, "iiii": 45393, "guillaume": 45394, "composites": 45395, "oldsmobile": 45396, "lend": 45397, "garth": 45398, "commish": 45399, "baptized": 45400, "scorpions": 45401, "rucker": 45402, "bringbackour": 45403, "alliance": 45404, "thalapathy": 45405, "tali": 45406, "spans": 45407, "eridge": 45408, "witherspoon": 45409, "linda": 45410, "skylar": 45411, "korn": 45412, "homs": 45413, "Äį": 45414, "silenced": 45415, "caffe": 45416, "arty": 45417, "distinguish": 45418, "towed": 45419, "pung": 45420, "jessica": 45421, "earnest": 45422, "beaufort": 45423, "tama": 45424, "studyabroad": 45425, "sikhs": 45426, "newbie": 45427, "navratri": 45428, "marble": 45429, "lounging": 45430, "litter": 45431, "dalit": 45432, "sosa": 45433, "izes": 45434, "grade": 45435, "compromising": 45436, "triton": 45437, "detta": 45438, "vj": 45439, "chauffe": 45440, "spectral": 45441, "powered": 45442, "montessori": 45443, "articulate": 45444, "halton": 45445, "alco": 45446, "yey": 45447, "mntwins": 45448, "acounty": 45449, "ðŁijıðŁı¾": 45450, "âīĪ": 45451, "madmen": 45452, "kala": 45453, "grum": 45454, "chik": 45455, "atis": 45456, "sume": 45457, "akhtar": 45458, "jobsearch": 45459, "highlighter": 45460, "boath": 45461, "âĦ¹": 45462, "tarzan": 45463, "lambo": 45464, "âĽĦï¸ı": 45465, "oxfam": 45466, "dumpster": 45467, "pretzels": 45468, "macos": 45469, "inclined": 45470, "factual": 45471, "advertisers": 45472, "shui": 45473, "puree": 45474, "mlpfi": 45475, "antidote": 45476, "capo": 45477, "pastr": 45478, "mercado": 45479, "button": 45480, "armin": 45481, "agg": 45482, "lolla": 45483, "horribly": 45484, "errands": 45485, "christophe": 45486, "timesnow": 45487, "mondaymotiv": 45488, "liss": 45489, "scandals": 45490, "mci": 45491, "disproportion": 45492, "âĺİ": 45493, "surpass": 45494, "samaritan": 45495, "sotho": 45496, "purest": 45497, "flatt": 45498, "triviatuesday": 45499, "delectable": 45500, "leopold": 45501, "hermione": 45502, "choudhary": 45503, "enrich": 45504, "¡¡": 45505, "subsidiary": 45506, "inequalities": 45507, "bachelor": 45508, "autoimmune": 45509, "lakota": 45510, "ihop": 45511, "adjec": 45512, "thesimpsons": 45513, "shes": 45514, "sek": 45515, "gretchen": 45516, "upstream": 45517, "hinakhan": 45518, "copernic": 45519, "xtina": 45520, "lug": 45521, "toughness": 45522, "ead": 45523, "clipped": 45524, "bius": 45525, "slv": 45526, "fahren": 45527, "deepak": 45528, "cau": 45529, "xan": 45530, "immature": 45531, "digni": 45532, "bobs": 45533, "shredding": 45534, "buttery": 45535, "accommodations": 45536, "deven": 45537, "chunks": 45538, "superleague": 45539, "skybet": 45540, "kildare": 45541, "jeet": 45542, "ëį": 45543, "cek": 45544, "wrecks": 45545, "propane": 45546, "ohl": 45547, "tbd": 45548, "quoi": 45549, "trumpp": 45550, "mimo": 45551, "reluctant": 45552, "verne": 45553, "oic": 45554, "magh": 45555, "arnau": 45556, "sever": 45557, "lidge": 45558, "stairway": 45559, "kicchasudeep": 45560, "ðŁĶº": 45561, "machining": 45562, "aamaadmi": 45563, "oti": 45564, "cda": 45565, "alit": 45566, "pany": 45567, "installs": 45568, "acct": 45569, "eshop": 45570, "diem": 45571, "hardwell": 45572, "fulfillment": 45573, "scafe": 45574, "quack": 45575, "extracts": 45576, "sweetened": 45577, "fighton": 45578, "fdi": 45579, "dinger": 45580, "waltham": 45581, "usur": 45582, "referees": 45583, "seokjin": 45584, "grann": 45585, "afrin": 45586, "thn": 45587, "schaf": 45588, "parcels": 45589, "betis": 45590, "amarine": 45591, "noman": 45592, "khtar": 45593, "moritz": 45594, "coupling": 45595, "barons": 45596, "ðŁIJ¸": 45597, "ø": 45598, "slp": 45599, "sadler": 45600, "xander": 45601, "triad": 45602, "mcmillan": 45603, "khz": 45604, "dividing": 45605, "ìĹijìĨĮ": 45606, "daryl": 45607, "zedd": 45608, "leys": 45609, "plaques": 45610, "fluori": 45611, "tipperary": 45612, "onnell": 45613, "didier": 45614, "langford": 45615, "imc": 45616, "thesun": 45617, "birdies": 45618, "archa": 45619, "yessss": 45620, "tdi": 45621, "daria": 45622, "candace": 45623, "altam": 45624, "palaces": 45625, "chit": 45626, "santam": 45627, "eventful": 45628, "bookof": 45629, "adb": 45630, "monstax": 45631, "creole": 45632, "coel": 45633, "âĸ½": 45634, "wearen": 45635, "stennis": 45636, "sheath": 45637, "atism": 45638, "groningen": 45639, "mlpfim": 45640, "lepre": 45641, "wrongly": 45642, "rspca": 45643, "rendezvous": 45644, "acknowledging": 45645, "pelvic": 45646, "solicitor": 45647, "slays": 45648, "nuestra": 45649, "lod": 45650, "islander": 45651, "feroci": 45652, "fashionshow": 45653, "rass": 45654, "dgeon": 45655, "adolescents": 45656, "smashes": 45657, "negligence": 45658, "grateful": 45659, "vedere": 45660, "swoop": 45661, "ingl": 45662, "apolice": 45663, "vandalism": 45664, "gann": 45665, "joao": 45666, "disupdates": 45667, "zimbabwe": 45668, "underage": 45669, "radiance": 45670, "wof": 45671, "bourgeo": 45672, "plas": 45673, "crani": 45674, "ghue": 45675, "wreckem": 45676, "warrants": 45677, "reform": 45678, "jimmie": 45679, "atwood": 45680, "ysl": 45681, "neilhimself": 45682, "lbj": 45683, "iman": 45684, "tanto": 45685, "noisse": 45686, "verbs": 45687, "equipo": 45688, "altogether": 45689, "mament": 45690, "lice": 45691, "douglass": 45692, "tierney": 45693, "primed": 45694, "jhal": 45695, "furnitu": 45696, "brazili": 45697, "vill": 45698, "pastels": 45699, "nison": 45700, "uff": 45701, "paralysis": 45702, "jaye": 45703, "impo": 45704, "ðŁijģ": 45705, "strategically": 45706, "pakistanis": 45707, "wassup": 45708, "superbike": 45709, "thanku": 45710, "truelove": 45711, "shaikh": 45712, "israelis": 45713, "vip": 45714, "tog": 45715, "lien": 45716, "laker": 45717, "greyhounds": 45718, "culars": 45719, "bianchi": 45720, "balotelli": 45721, "arran": 45722, "loos": 45723, "strates": 45724, "hebron": 45725, "arvo": 45726, "sunderland": 45727, "theal": 45728, "tombstone": 45729, "sandman": 45730, "cpac": 45731, "thanksgiving": 45732, "lovehim": 45733, "latino": 45734, "anin": 45735, "akaif": 45736, "ĭãĤ": 45737, "torquay": 45738, "diest": 45739, "allianz": 45740, "ðŁĺķ": 45741, "golfclub": 45742, "cllr": 45743, "walcott": 45744, "schnau": 45745, "prompted": 45746, "nominating": 45747, "lennox": 45748, "valet": 45749, "monro": 45750, "mayward": 45751, "eph": 45752, "ðŁĶĶ": 45753, "interoper": 45754, "rda": 45755, "reflex": 45756, "armchair": 45757, "ê°ķ": 45758, "stripper": 45759, "porti": 45760, "pharm": 45761, "hamza": 45762, "nireland": 45763, "neue": 45764, "hpv": 45765, "portfoli": 45766, "sunburn": 45767, "frisbee": 45768, "beal": 45769, "baptiste": 45770, "xh": 45771, "tym": 45772, "prati": 45773, "overs": 45774, "hazrat": 45775, "desert": 45776, "derry": 45777, "usky": 45778, "emmett": 45779, "acharya": 45780, ")_/¯": 45781, "shud": 45782, "maya": 45783, "hamill": 45784, "raim": 45785, "nrc": 45786, "fittings": 45787, "curvy": 45788, "ðŁıĩ": 45789, "sterling": 45790, "à¥Ģ": 45791, "walkin": 45792, "shortcuts": 45793, "milly": 45794, "astur": 45795, "alphabe": 45796, "pli": 45797, "pez": 45798, "missyou": 45799, "radford": 45800, "mlg": 45801, "taeyang": 45802, "notjustlakes": 45803, "dumps": 45804, "serendip": 45805, "leur": 45806, "raving": 45807, "ester": 45808, "depriv": 45809, "abscbn": 45810, "ðŁijĩðŁı»": 45811, "scarcity": 45812, "ocr": 45813, "meanings": 45814, "capt": 45815, "dahl": 45816, "fermentation": 45817, "brioche": 45818, "towin": 45819, "outlander": 45820, "massimo": 45821, "encro": 45822, "ðŁ¥³": 45823, "built": 45824, "potam": 45825, "kiri": 45826, "tmw": 45827, "monitored": 45828, "kites": 45829, "peoplesvote": 45830, "grayson": 45831, "íģ¬": 45832, "afrika": 45833, "adies": 45834, "ivote": 45835, "gyne": 45836, "gannon": 45837, "dix": 45838, "cmc": 45839, "oural": 45840, "foxandfriends": 45841, "beli": 45842, "igne": 45843, "glan": 45844, "katrinakaif": 45845, "copolitics": 45846, "qualitative": 45847, "psi": 45848, "lucci": 45849, "discoura": 45850, "âĺ®": 45851, "kelli": 45852, "gautam": 45853, "caracas": 45854, "realest": 45855, "pula": 45856, "inus": 45857, "hilltop": 45858, "makeaw": 45859, "attenborough": 45860, "twy": 45861, "rarity": 45862, "peckham": 45863, "mahon": 45864, "cornelius": 45865, "clinicians": 45866, "tonline": 45867, "tbi": 45868, "paradise": 45869, "kasi": 45870, "inevit": 45871, "freshness": 45872, "collingwood": 45873, "lunatic": 45874, "defense": 45875, "copd": 45876, "infra": 45877, "wainwright": 45878, "sainsbury": 45879, "alabam": 45880, "tema": 45881, "laco": 45882, "checker": 45883, "relegated": 45884, "trent": 45885, "stalks": 45886, "huffpost": 45887, "bhubaneswar": 45888, "astral": 45889, "shareyour": 45890, "primrose": 45891, "hime": 45892, "catan": 45893, "endment": 45894, "endow": 45895, "clemens": 45896, "maloney": 45897, "hilary": 45898, "gametime": 45899, "denise": 45900, "collaborators": 45901, "bwo": 45902, "radicals": 45903, "guetta": 45904, "icion": 45905, "aua": 45906, "snapmatic": 45907, "satchel": 45908, "excavation": 45909, "baseman": 45910, "são": 45911, "gnation": 45912, "feld": 45913, "survey": 45914, "shahzad": 45915, "mast": 45916, "anirudhofficial": 45917, "trucker": 45918, "otago": 45919, "geograph": 45920, "ethel": 45921, "âļ¡ï¸ıâļ¡ï¸ı": 45922, "sver": 45923, "mutt": 45924, "internetofthings": 45925, "anchored": 45926, "whouse": 45927, "bangla": 45928, "balmain": 45929, "ç¹ĭãģ": 45930, "breakfa": 45931, "áĢ": 45932, "twister": 45933, "tetris": 45934, "cav": 45935, "stags": 45936, "gz": 45937, "aub": 45938, "stormed": 45939, "helens": 45940, "yarmouth": 45941, "stasy": 45942, "gustavo": 45943, "cosc": 45944, "vinson": 45945, "upp": 45946, "scricket": 45947, "assumptions": 45948, "appe": 45949, "nuh": 45950, "uer": 45951, "premise": 45952, "naga": 45953, "eamon": 45954, "coronary": 45955, "naf": 45956, "northside": 45957, "elmer": 45958, "rotar": 45959, "outlining": 45960, "elf": 45961, "resurg": 45962, "katelyn": 45963, "incan": 45964, "hysteria": 45965, "cee": 45966, "ambani": 45967, "prolly": 45968, "ĮãĤĬãģ": 45969, "axes": 45970, "sanjose": 45971, "rembrandt": 45972, "magpie": 45973, "evenly": 45974, "scorsese": 45975, "quaint": 45976, "fg": 45977, "bbuk": 45978, "indianfootball": 45979, "weareall": 45980, "spdwy": 45981, "pisces": 45982, "ecg": 45983, "âĺħâĺħâĺħâĺħâĺħ": 45984, "preorders": 45985, ":|": 45986, "nipple": 45987, "salazar": 45988, "jume": 45989, "jailbreak": 45990, "minn": 45991, "bassett": 45992, "zetta": 45993, "jeffree": 45994, "adjun": 45995, "ticon": 45996, "sandiego": 45997, "drinklocal": 45998, "cholera": 45999, "solicitors": 46000, "obo": 46001, "compost": 46002, "nian": 46003, "wra": 46004, "treach": 46005, "icic": 46006, "professional": 46007, "delve": 46008, "legate": 46009, "historia": 46010, "croissant": 46011, "connoisse": 46012, "namo": 46013, "palliative": 46014, "chemtrails": 46015, "iority": 46016, "globalwarming": 46017, "comicart": 46018, "behavioural": 46019, "rested": 46020, "lias": 46021, "climates": 46022, "ŁãģĦ": 46023, "rutland": 46024, "nourish": 46025, "menopause": 46026, "hotties": 46027, "dementi": 46028, "vespa": 46029, "melville": 46030, "analogue": 46031, "tzman": 46032, "strung": 46033, "imperfect": 46034, "glare": 46035, "circling": 46036, "rosberg": 46037, "reco": 46038, "ocity": 46039, "loire": 46040, "embe": 46041, "dossier": 46042, "neel": 46043, "nando": 46044, "mea": 46045, "galvani": 46046, "finesse": 46047, "agp": 46048, "berkeley": 46049, "asim": 46050, "âĺºâĺº": 46051, "quilted": 46052, "ishere": 46053, "unmatched": 46054, "potion": 46055, "forz": 46056, "atre": 46057, "selfies": 46058, "juliana": 46059, "ðŁļ¶": 46060, "âĸº": 46061, "melton": 46062, "âłĢâłĢâłĢâłĢâłĢâłĢâłĢâłĢ": 46063, "spinrilla": 46064, "purcell": 46065, "edp": 46066, "atleti": 46067, "tonyawards": 46068, "raja": 46069, "progno": 46070, "molten": 46071, "stuff": 46072, "pally": 46073, "nobelprize": 46074, "âĻ»ï¸ı": 46075, "spiritual": 46076, "speake": 46077, "sasha": 46078, "brium": 46079, "truss": 46080, "criticize": 46081, "assassinscreed": 46082, "yoruba": 46083, "ulo": 46084, "fireman": 46085, "workinprogress": 46086, "efcc": 46087, "flares": 46088, "robot": 46089, "hikers": 46090, "cll": 46091, "shadowing": 46092, "patsy": 46093, "lehman": 46094, "cns": 46095, "å±": 46096, "guadal": 46097, "à±į": 46098, "rape": 46099, "rhonda": 46100, "parallels": 46101, "sonja": 46102, "language": 46103, "landings": 46104, "zola": 46105, "cramps": 46106, "burning": 46107, "appraisal": 46108, "jolla": 46109, "hamm": 46110, "kasa": 46111, "gully": 46112, "fgo": 46113, "ulysses": 46114, "ribe": 46115, "ðŁĴĦ": 46116, "ibu": 46117, "etienne": 46118, "briar": 46119, "finely": 46120, "combating": 46121, "yql": 46122, "gotham": 46123, "wechat": 46124, "topaz": 46125, "primaries": 46126, "lse": 46127, "izz": 46128, "hele": 46129, "disponible": 46130, "cystic": 46131, "belichick": 46132, "thrush": 46133, "kansascity": 46134, "geom": 46135, "solidi": 46136, "redbubble": 46137, "bystand": 46138, "cambridgeshire": 46139, "parfait": 46140, "astle": 46141, "owo": 46142, "indore": 46143, "stomping": 46144, "smelly": 46145, "ð٤ĸ": 46146, "locomo": 46147, "admitting": 46148, "holme": 46149, "clockwise": 46150, "minsk": 46151, "mcco": 46152, "forget": 46153, "evp": 46154, "camra": 46155, "abella": 46156, "yotes": 46157, "universityof": 46158, "méxico": 46159, "silverado": 46160, "ricket": 46161, "crombie": 46162, "puj": 46163, "eradicate": 46164, "delight": 46165, "ygo": 46166, "glamping": 46167, "vica": 46168, "duggan": 46169, "counters": 46170, "cfd": 46171, "scour": 46172, "reactjs": 46173, "puram": 46174, "parasites": 46175, "inki": 46176, "villen": 46177, "stella": 46178, "limbo": 46179, "angas": 46180, "kcr": 46181, "ðŁĴļðŁĴļðŁĴļ": 46182, "vapori": 46183, "mumford": 46184, "oligar": 46185, "à¼": 46186, "aloo": 46187, "booties": 46188, "adr": 46189, "kelli": 46190, "drummers": 46191, "avici": 46192, "natureuk": 46193, "ronal": 46194, "intrac": 46195, "unsplash": 46196, "leche": 46197, "goma": 46198, "eline": 46199, "enviro": 46200, "bionic": 46201, "bueno": 46202, "mik": 46203, "avin": 46204, "starling": 46205, "empowers": 46206, "cakeday": 46207, "boycot": 46208, "ðŁĴļðŁĴļ": 46209, "ðŁĮ¸ðŁĮ¸": 46210, "vach": 46211, "mci": 46212, "fractures": 46213, "geri": 46214, "sking": 46215, "excluded": 46216, "luce": 46217, "jave": 46218, "iggy": 46219, "eviden": 46220, "akistan": 46221, "awn": 46222, "morals": 46223, "lucifer": 46224, "haban": 46225, "tumbling": 46226, "sundaymotivation": 46227, "mosley": 46228, "captainamerica": 46229, "schicago": 46230, "theone": 46231, "motd": 46232, "dts": 46233, "ðŁIJ¼": 46234, "repell": 46235, "iii": 46236, "locust": 46237, "geospatial": 46238, "mersey": 46239, "immerse": 46240, "descend": 46241, "bernade": 46242, "js": 46243, "boatsales": 46244, "winder": 46245, "crank": 46246, "singleton": 46247, "candidacy": 46248, "bena": 46249, "ðŁı»âĢį": 46250, "highlander": 46251, "olt": 46252, "kprs": 46253, "healthylifestyle": 46254, "fourteen": 46255, "endthe": 46256, "ithaca": 46257, "circulated": 46258, "rans": 46259, "prevalent": 46260, "havas": 46261, "splendor": 46262, "rooster": 46263, "kalamazoo": 46264, "jewellers": 46265, "ennedy": 46266, "rousey": 46267, "esy": 46268, "cannons": 46269, "ornamental": 46270, "////": 46271, "rendon": 46272, "winne": 46273, "molding": 46274, "eidmubarak": 46275, "countess": 46276, "simona": 46277, "hawa": 46278, "foes": 46279, "duster": 46280, "sbu": 46281, "portray": 46282, "marries": 46283, "goodday": 46284, "choco": 46285, "achiever": 46286, "ðŁĺ¹ðŁĺ¹": 46287, "preneur": 46288, "tramp": 46289, "tomi": 46290, "nbat": 46291, "gardenchat": 46292, "farrakhan": 46293, "everglades": 46294, "abru": 46295, "sousa": 46296, "sece": 46297, "homeswee": 46298, "terrestrial": 46299, "barit": 46300, "sridevi": 46301, "olu": 46302, "melinda": 46303, "frick": 46304, "candies": 46305, "ðŁĺŃðŁĴķ": 46306, "qureshi": 46307, "familyfun": 46308, "exorcist": 46309, "cardinal": 46310, "nyt": 46311, "diesel": 46312, "cumulus": 46313, "capricorn": 46314, "siology": 46315, "lorna": 46316, "dougie": 46317, "andie": 46318, "supersport": 46319, "cfl": 46320, "пÑĢи": 46321, "sayang": 46322, "peek": 46323, "à¸Ĭ": 46324, "lobe": 46325, "jem": 46326, "inglis": 46327, "ggled": 46328, "csn": 46329, "amnesty": 46330, "chups": 46331, "baes": 46332, "sauer": 46333, "ðŁıIJ": 46334, "mongolian": 46335, "enet": 46336, "backstreet": 46337, "drilled": 46338, "accessing": 46339, "ceo": 46340, "bse": 46341, "aiken": 46342, "purr": 46343, "worsen": 46344, "wheres": 46345, "wark": 46346, "testifying": 46347, "buri": 46348, "blast": 46349, "awg": 46350, "ðŁĵĭ": 46351, "redefining": 46352, "hearing": 46353, "uci": 46354, "cmp": 46355, "boni": 46356, "tailoring": 46357, "taji": 46358, "nocchi": 46359, "emt": 46360, "stephenking": 46361, "neet": 46362, "complains": 46363, "campaigner": 46364, "luciano": 46365, "twilight": 46366, "tiesto": 46367, "passports": 46368, "floyd": 46369, "cathedr": 46370, "naked": 46371, "caregiver": 46372, "bcoz": 46373, "adecides": 46374, "kuri": 46375, "lyk": 46376, "braries": 46377, "drenched": 46378, "disclose": 46379, "ðŁĴªðŁı½": 46380, "leblanc": 46381, "jetty": 46382, "garty": 46383, "chipmun": 46384, "bsu": 46385, "rhythmic": 46386, "icz": 46387, "frid": 46388, "annex": 46389, "amex": 46390, "soloist": 46391, "lancers": 46392, "arrowhead": 46393, "specification": 46394, "simulated": 46395, "nais": 46396, "inverte": 46397, "bowing": 46398, "worship": 46399, "fz": 46400, "aboss": 46401, "shaq": 46402, "ì¶ķ": 46403, "challengers": 46404, "anarch": 46405, "aamaadmiparty": 46406, "ãħĭãħĭãħĭ": 46407, "suffolk": 46408, "socorro": 46409, "snell": 46410, "cladding": 46411, "absorbing": 46412, "shawa": 46413, "participates": 46414, "ðŁįĶ": 46415, "bookstores": 46416, "baku": 46417, "seaport": 46418, "kojima": 46419, "gaby": 46420, "packard": 46421, "electrician": 46422, "letit": 46423, "mowing": 46424, "fawad": 46425, "youngjae": 46426, "hotmail": 46427, "mening": 46428, "urie": 46429, "intimacy": 46430, "conti": 46431, ":\")": 46432, "lifeisgood": 46433, "inciner": 46434, "idri": 46435, "craziness": 46436, "journos": 46437, "franchi": 46438, "bottlen": 46439, "alda": 46440, "ffes": 46441, "kx": 46442, "southwe": 46443, "aira": 46444, "clayton": 46445, "scoti": 46446, "fj": 46447, "briga": 46448, "ð٤ĺðŁı»": 46449, "demonstrators": 46450, "yz": 46451, "stork": 46452, "naq": 46453, "cascades": 46454, "travelchat": 46455, "plata": 46456, "padma": 46457, "franci": 46458, "attain": 46459, "batgirl": 46460, "lombard": 46461, "hoos": 46462, "ddos": 46463, "neonatal": 46464, "disclaimer": 46465, "rss": 46466, "rant": 46467, "disen": 46468, "texaste": 46469, "socal": 46470, "fractal": 46471, "camry": 46472, "strife": 46473, "snacking": 46474, "muh": 46475, "santander": 46476, "morons": 46477, "graf": 46478, "parades": 46479, "huston": 46480, "drupal": 46481, "miento": 46482, "kirstel": 46483, "hyde": 46484, "vomit": 46485, "fortified": 46486, "sphinx": 46487, "dav": 46488, "biryani": 46489, "winnings": 46490, "sbaseball": 46491, "merged": 46492, "lovelondon": 46493, "lingering": 46494, "dreambig": 46495, "carleton": 46496, "livelihood": 46497, "django": 46498, "astrid": 46499, "grids": 46500, "downe": 46501, "bruised": 46502, "sne": 46503, "scarecrow": 46504, "helium": 46505, "fnc": 46506, "biggs": 46507, "anter": 46508, "restorative": 46509, "empires": 46510, "abdel": 46511, "lifestyle": 46512, "kiwanis": 46513, "colloquium": 46514, "meen": 46515, "prick": 46516, "antique": 46517, "zeb": 46518, "mimic": 46519, "edmonds": 46520, "ðŁijĬðŁijĬ": 46521, "qing": 46522, "ppel": 46523, "mcgill": 46524, "interpreting": 46525, "âŀķ": 46526, "rashad": 46527, "doka": 46528, "narrator": 46529, "electromagnetic": 46530, "ashby": 46531, "saura": 46532, "irandeal": 46533, "âģīï¸ı": 46534, "krishnan": 46535, "indi": 46536, "ffen": 46537, "brea": 46538, "osman": 46539, "multinational": 46540, "chippe": 46541, "recruiters": 46542, "ausbiz": 46543, "pounding": 46544, "regen": 46545, "cursor": 46546, "refusal": 46547, "macs": 46548, "inak": 46549, "axial": 46550, "waifu": 46551, "upcycled": 46552, "hindustan": 46553, "cassini": 46554, "carlyle": 46555, "scratches": 46556, "reef": 46557, "manatee": 46558, "eatery": 46559, "ðŁĵ¢": 46560, "uncondition": 46561, "senpai": 46562, "onther": 46563, "comicbook": 46564, "prosciutto": 46565, "demar": 46566, "mise": 46567, "mage": 46568, "freec": 46569, "ayesha": 46570, "alder": 46571, "androidgames": 46572, "leyton": 46573, "hock": 46574, "doorway": 46575, "chicagofire": 46576, "aaliyah": 46577, "swelling": 46578, "bix": 46579, ".ðŁĺĤ": 46580, "evankirstel": 46581, "torpedo": 46582, "konstant": 46583, "genevieve": 46584, "maia": 46585, "hauser": 46586, "dotorg": 46587, "hideous": 46588, "fik": 46589, "spraw": 46590, "eek": 46591, "zappa": 46592, "wandered": 46593, "''": 46594, "rajan": 46595, "bambi": 46596, "($)": 46597, "widening": 46598, "toolbox": 46599, "sair": 46600, "illuminating": 46601, "prays": 46602, "outpatient": 46603, "iw": 46604, "dayo": 46605, "lob": 46606, "swfl": 46607, "shades": 46608, "gums": 46609, "cookin": 46610, "kodi": 46611, "griffin": 46612, "traumati": 46613, "stea": 46614, "slaughtered": 46615, "godbless": 46616, "airtime": 46617, "pseudo": 46618, "bsa": 46619, "hauled": 46620, "arif": 46621, "à¸Ńà¸ĩ": 46622, "lel": 46623, "wcpo": 46624, "militi": 46625, "charters": 46626, "worlda": 46627, "ruk": 46628, "kgs": 46629, "digitalindia": 46630, "isable": 46631, "idyllic": 46632, "espino": 46633, "marietta": 46634, "ebo": 46635, "teamcanada": 46636, "abour": 46637, "wilton": 46638, "rockstars": 46639, "favored": 46640, "physic": 46641, "wrinkle": 46642, "tbr": 46643, "dprint": 46644, "ballarat": 46645, "adal": 46646, "zey": 46647, "ðŁĺįðŁĶ¥": 46648, "tomlin": 46649, "mtr": 46650, "palsy": 46651, "fenerbah": 46652, "tighten": 46653, "philia": 46654, "ironing": 46655, "ryu": 46656, "bant": 46657, "enquire": 46658, "cair": 46659, "aburger": 46660, "trun": 46661, "greenberg": 46662, "chauhan": 46663, "irina": 46664, "shani": 46665, "trendsetter": 46666, "prett": 46667, "zafar": 46668, "alove": 46669, "vici": 46670, "panic": 46671, "noo": 46672, "lustre": 46673, "disrupted": 46674, "ballis": 46675, "sonsof": 46676, "monsi": 46677, "instac": 46678, "akest": 46679, "ëĭ¤": 46680, "kwame": 46681, "horrormovies": 46682, "district": 46683, "saucy": 46684, "mban": 46685, "armies": 46686, "withdrawn": 46687, "medics": 46688, "loftus": 46689, "eroom": 46690, "bekind": 46691, "arns": 46692, "allon": 46693, "unison": 46694, "davids": 46695, "crat": 46696, "nicotine": 46697, "soor": 46698, "smx": 46699, "onco": 46700, "cosplaying": 46701, "zombies": 46702, "harms": 46703, "eger": 46704, "rosy": 46705, "moonshine": 46706, "fein": 46707, "cett": 46708, "dubrov": 46709, "regents": 46710, "benitez": 46711, "ðŁijıðŁı¼ðŁijıðŁı¼": 46712, "stec": 46713, "malia": 46714, "prioritize": 46715, "iceland": 46716, "ftse": 46717, "vamo": 46718, "lamont": 46719, "homosexuality": 46720, "brees": 46721, "regui": 46722, "cbp": 46723, "tej": 46724, "skysports": 46725, "detergent": 46726, "shasta": 46727, "derel": 46728, "conservancy": 46729, "colorized": 46730, "accolades": 46731, "viso": 46732, "showyour": 46733, "nanow": 46734, "biceps": 46735, "usability": 46736, "bim": 46737, "dailysketch": 46738, "pearljam": 46739, "strangest": 46740, "megadeth": 46741, "broadcasts": 46742, "barren": 46743, "arton": 46744, "chriss": 46745, "configu": 46746, "lures": 46747, "isthe": 46748, "eul": 46749, "railwayana": 46750, "globalhealth": 46751, "gianni": 46752, "uaap": 46753, "slum": 46754, "consciously": 46755, "abre": 46756, "nup": 46757, "budget": 46758, "vada": 46759, "esch": 46760, "realness": 46761, "erased": 46762, "thunt": 46763, "bez": 46764, "armistice": 46765, "ðŁij¹": 46766, "shrun": 46767, "oled": 46768, "driverless": 46769, "ðŁ¤·ðŁı»âĢįâĻĢï¸ı": 46770, "wondr": 46771, "skan": 46772, "salaam": 46773, "motherland": 46774, "hwang": 46775, "geno": 46776, "gangnam": 46777, "twright": 46778, "endorsing": 46779, "enic": 46780, "adoration": 46781, "paused": 46782, "patricks": 46783, "docked": 46784, "platte": 46785, "ffxv": 46786, "ethnicity": 46787, "autoshow": 46788, "sideshow": 46789, "afterlife": 46790, "relocated": 46791, "orphaned": 46792, "foodnetwork": 46793, "dareto": 46794, "andra": 46795, "slaps": 46796, "vlive": 46797, "swims": 46798, "reimagined": 46799, "mistle": 46800, "revise": 46801, "reality": 46802, "bharti": 46803, "ðŁĴĻðŁĴĽ": 46804, "latest": 46805, "proudest": 46806, "grasses": 46807, "lanyard": 46808, "freshest": 46809, "carcinoma": 46810, "anomaly": 46811, "ziegler": 46812, "sumner": 46813, "lyrix": 46814, "gorg": 46815, "isd": 46816, "avel": 46817, "swildlife": 46818, "mesqu": 46819, "johncena": 46820, "euroleague": 46821, "saber": 46822, "masterful": 46823, "yarra": 46824, "cognition": 46825, "jacobson": 46826, "abolic": 46827, "sirloin": 46828, "shukla": 46829, "mojito": 46830, "supere": 46831, "stweet": 46832, "mez": 46833, "esa": 46834, "rudolf": 46835, "gura": 46836, "whereyou": 46837, "ttm": 46838, "wins": 46839, "trustworthy": 46840, "nyk": 46841, "braden": 46842, "tabletop": 46843, "goodfood": 46844, "eson": 46845, "bek": 46846, "linguistic": 46847, "grays": 46848, "chath": 46849, "hcs": 46850, "moni": 46851, "deans": 46852, "cussions": 46853, "chell": 46854, "slows": 46855, "hemi": 46856, "dapp": 46857, "sharpie": 46858, "boosters": 46859, "aos": 46860, "strack": 46861, "sedona": 46862, "mueller": 46863, "hardwick": 46864, "ornate": 46865, "thora": 46866, "salud": 46867, "otwol": 46868, "chum": 46869, "miho": 46870, "forage": 46871, "thelittle": 46872, "tearful": 46873, "oneself": 46874, "mindy": 46875, "smg": 46876, "gmbh": 46877, "emerald": 46878, "ðŁĶ´âļªï¸ı": 46879, "tutti": 46880, "receptions": 46881, "revising": 46882, "ibrox": 46883, "topeka": 46884, "salami": 46885, "expanse": 46886, "ibooks": 46887, "dobson": 46888, "clio": 46889, "ats": 46890, "ðŁļĮ": 46891, "moha": 46892, "isance": 46893, "shutters": 46894, "moot": 46895, "janine": 46896, "marvelcomics": 46897, "jordani": 46898, "poser": 46899, "kenneth": 46900, "hyung": 46901, "deja": 46902, "aseball": 46903, "speciality": 46904, "euston": 46905, "classiccar": 46906, "hadith": 46907, "ðŁIJī": 46908, "chasing": 46909, "izo": 46910, "grosven": 46911, "aglia": 46912, "thisdayinhistory": 46913, "trow": 46914, "omile": 46915, "huar": 46916, "byn": 46917, "saline": 46918, "divine": 46919, "demonic": 46920, "tyran": 46921, "handover": 46922, "revitalization": 46923, "paella": 46924, "cryptic": 46925, "sedg": 46926, "mend": 46927, "dunkirk": 46928, "bred": 46929, "wald": 46930, "sportscar": 46931, "aard": 46932, "wheaton": 46933, "daener": 46934, "klan": 46935, "brt": 46936, "bakhtawar": 46937, "spires": 46938, "schubert": 46939, "roti": 46940, "polish": 46941, "ose": 46942, "agame": 46943, "wondercon": 46944, "protestant": 46945, "bosa": 46946, "ðŁĺŁ": 46947, "dü": 46948, "joyride": 46949, "gertrude": 46950, "âĿĿ": 46951, "gila": 46952, "vh": 46953, "twa": 46954, "trav": 46955, "swallowed": 46956, "starve": 46957, "lain": 46958, "entren": 46959, "reiki": 46960, "sukh": 46961, "craic": 46962, "azu": 46963, "webpage": 46964, "keefe": 46965, "hypothe": 46966, "hirsch": 46967, "helle": 46968, "campground": 46969, "wamy": 46970, "travi": 46971, "shahi": 46972, "sandeep": 46973, "rui": 46974, "hanuman": 46975, "dwp": 46976, "repository": 46977, "noor": 46978, "noff": 46979, "unreal": 46980, "pell": 46981, "blackhistory": 46982, "harvick": 46983, "mascar": 46984, "payee": 46985, "pasha": 46986, "gastronomy": 46987, "dÃŃ": 46988, "aig": 46989, "rosenthal": 46990, "openday": 46991, "embellished": 46992, "ttip": 46993, "sunbathing": 46994, "gopack": 46995, "endome": 46996, "ï¸ı#": 46997, "invalid": 46998, "finalfour": 46999, "stfu": 47000, "squishy": 47001, "rasta": 47002, "mosch": 47003, "jamesc": 47004, "dietrich": 47005, "sela": 47006, "melb": 47007, "elvi": 47008, "tdp": 47009, "suni": 47010, "slit": 47011, "jha": 47012, "biza": 47013, "spiked": 47014, "lli": 47015, "lillard": 47016, "vampi": 47017, "synopsis": 47018, "azhar": 47019, "kendricklamar": 47020, "ĮãĤĬãģŁãģĦ": 47021, "heartless": 47022, "countryfile": 47023, "airplay": 47024, "arrogance": 47025, "pree": 47026, "virtuoso": 47027, "ãħłãħłãħłãħł": 47028, "raju": 47029, "lebu": 47030, "forward": 47031, "tug": 47032, "dros": 47033, "mondaymotivaton": 47034, "concepcion": 47035, "thelo": 47036, "padi": 47037, "looool": 47038, "ÑĢод": 47039, "itss": 47040, "ethical": 47041, "enduro": 47042, "__:": 47043, "expenditure": 47044, "monste": 47045, "masking": 47046, "terriers": 47047, "ibis": 47048, "ember": 47049, "cumple": 47050, "punctuation": 47051, "piper": 47052, "irvin": 47053, "adee": 47054, "yyyyyy": 47055, "flashbacks": 47056, "celsius": 47057, "donnie": 47058, "bogota": 47059, "benevol": 47060, "thescript": 47061, "shilpa": 47062, "prose": 47063, "findia": 47064, "zeke": 47065, "neko": 47066, "doves": 47067, "blueslyrix": 47068, "frosh": 47069, "soweto": 47070, "mplo": 47071, "alai": 47072, "sabi": 47073, "raqqa": 47074, "wftv": 47075, "stroller": 47076, "iansomerhalder": 47077, "ðŁĶª": 47078, "anon": 47079, "moseley": 47080, "!?!?": 47081, "staking": 47082, "moly": 47083, "cartri": 47084, "csg": 47085, "astor": 47086, "transcend": 47087, "maer": 47088, "deux": 47089, "cowgirl": 47090, "sask": 47091, "punter": 47092, "maken": 47093, "oates": 47094, "lovett": 47095, "growler": 47096, "sagin": 47097, "vn": 47098, "ssible": 47099, "officeofrg": 47100, "ymc": 47101, "sabar": 47102, "faulty": 47103, "apha": 47104, "akon": 47105, "ðŁij«": 47106, "snowdon": 47107, "aew": 47108, "raisethe": 47109, "ðĿĵ": 47110, "gruesome": 47111, "clementine": 47112, "sping": 47113, "lata": 47114, "worldenviron": 47115, "mimic": 47116, "canaria": 47117, "bakhtawarbz": 47118, "aoa": 47119, "fala": 47120, "ãĤŃ": 47121, "aviva": 47122, "youuuu": 47123, "thigh": 47124, "ladders": 47125, "gumbo": 47126, "tzky": 47127, "fuzz": 47128, "plasticpollution": 47129, "estate": 47130, "strengthened": 47131, "kant": 47132, "drin": 47133, "calvert": 47134, "transformational": 47135, "frightened": 47136, "maclean": 47137, "elitedangerous": 47138, "earthy": 47139, "tson": 47140, "toda": 47141, "jnu": 47142, "..,": 47143, "michal": 47144, "iban": 47145, "jeong": 47146, "isreal": 47147, "simcoe": 47148, "exclusives": 47149, "bluebells": 47150, "bene": 47151, "teu": 47152, "pilsner": 47153, "penske": 47154, "atheists": 47155, "mpu": 47156, "cartagena": 47157, "ðŁĴĹðŁĴĹ": 47158, "millionaires": 47159, "kkkk": 47160, "itar": 47161, "subscriptions": 47162, "remote": 47163, "mafi": 47164, "hinton": 47165, "wcc": 47166, "hok": 47167, "dsb": 47168, "ableton": 47169, "seventy": 47170, "punks": 47171, "eindhoven": 47172, "shone": 47173, "mcfarlane": 47174, "limpopo": 47175, "emphasi": 47176, "ü": 47177, "sinfo": 47178, "petre": 47179, "mangrove": 47180, "chino": 47181, "bertie": 47182, "playlists": 47183, "pushawards": 47184, "paf": 47185, "debbie": 47186, "cdo": 47187, "rino": 47188, "ðŁı¾âĢįâĻĤï¸ı": 47189, "folke": 47190, "bonnar": 47191, "thine": 47192, "slan": 47193, "halter": 47194, "evie": 47195, "awsome": 47196, "vultures": 47197, "sparky": 47198, "seizures": 47199, "âľĶ": 47200, "ramone": 47201, "ineffe": 47202, "aln": 47203, "proctor": 47204, "astra": 47205, "thevoice": 47206, "grote": 47207, "scion": 47208, "deadline": 47209, "amaya": 47210, "tainted": 47211, "patterned": 47212, "exceeding": 47213, "crossfit": 47214, "kaylee": 47215, "dropbox": 47216, "rushes": 47217, "tackled": 47218, "moby": 47219, "retrogamer": 47220, "ncbd": 47221, "benefitting": 47222, "shaykh": 47223, "guildhall": 47224, "gentry": 47225, "dreamcast": 47226, "dreaded": 47227, "bundled": 47228, "thaw": 47229, "revolving": 47230, "npt": 47231, "kyliejenner": 47232, "imaginative": 47233, "roni": 47234, "overcame": 47235, "familytime": 47236, "dsburg": 47237, "carnaval": 47238, "relationship": 47239, "recognizable": 47240, "coroner": 47241, "hole": 47242, "fanfic": 47243, "emirates": 47244, "burritos": 47245, "analyse": 47246, "thinner": 47247, "nees": 47248, "gallipoli": 47249, "blr": 47250, "catwoman": 47251, "-->>": 47252, "ault": 47253, "adaily": 47254, "naughty": 47255, "ilio": 47256, "solitaire": 47257, "mtvbr": 47258, "jocelyn": 47259, "arunach": 47260, "repent": 47261, "southgate": 47262, "hyacin": 47263, "essential": 47264, "fenton": 47265, "andum": 47266, "itor": 47267, "gopal": 47268, "slinger": 47269, "posei": 47270, "awil": 47271, "wielding": 47272, "raila": 47273, "elias": 47274, "asto": 47275, "ä": 47276, "tendency": 47277, "strata": 47278, "kert": 47279, "<-": 47280, "imacele": 47281, "daes": 47282, "stimulus": 47283, "hanley": 47284, "fitnes": 47285, "ecstasy": 47286, "limous": 47287, "hailing": 47288, "ð٤Ń": 47289, "chiswick": 47290, "taries": 47291, "slav": 47292, "puli": 47293, "modernization": 47294, "blackmail": 47295, "bingham": 47296, "hfx": 47297, "++": 47298, "ðŁĩ®ðŁĩ³": 47299, "niv": 47300, "wea": 47301, "professor": 47302, "koff": 47303, "bolster": 47304, "suave": 47305, "sequences": 47306, "pepperoni": 47307, "notte": 47308, "dren": 47309, "ãģ¨ç¹ĭãģ": 47310, "hsv": 47311, "oga": 47312, "aptly": 47313, "zad": 47314, "excelsi": 47315, "rinka": 47316, "moldova": 47317, "minn": 47318, "mabel": 47319, "conferencing": 47320, "basing": 47321, "ofer": 47322, "obsi": 47323, "hamillhimself": 47324, "careless": 47325, "briefed": 47326, "inherent": 47327, "parish": 47328, "dubnation": 47329, "townsville": 47330, "sarawak": 47331, "geeky": 47332, "doncasterisgreat": 47333, "wasabi": 47334, "gup": 47335, "pheno": 47336, "drainthe": 47337, "carrieunderwood": 47338, "bleeds": 47339, "bbcworld": 47340, "anew": 47341, "altaf": 47342, "dulwich": 47343, "aniston": 47344, "wti": 47345, "sumatra": 47346, "grafton": 47347, "bln": 47348, "mester": 47349, "bodega": 47350, "rego": 47351, "esq": 47352, "anjo": 47353, "sumptuous": 47354, "maisie": 47355, "�": 47356, "wilt": 47357, "jakob": 47358, "elvis": 47359, "sepul": 47360, "muster": 47361, "airpollution": 47362, "presidente": 47363, "happymonday": 47364, "extensively": 47365, "flondon": 47366, "tls": 47367, "playing": 47368, "peed": 47369, "dinho": 47370, "vardy": 47371, "pika": 47372, "niro": 47373, "aucus": 47374, "ðŁį¦": 47375, "null": 47376, "elondon": 47377, "juventus": 47378, "imagines": 47379, "disab": 47380, "lito": 47381, "dura": 47382, "workplaces": 47383, "promote": 47384, "mccaf": 47385, "woodwork": 47386, "wawx": 47387, "ப": 47388, "ttino": 47389, "shari": 47390, "semper": 47391, "bettertogether": 47392, "ðŁijĬðŁı»": 47393, "zebra": 47394, "pondering": 47395, "enchil": 47396, "hom": 47397, "cosmic": 47398, "tanz": 47399, "mocked": 47400, "eccc": 47401, "athed": 47402, "abolish": 47403, "propeller": 47404, "parisagreement": 47405, "assemblies": 47406, "industry": 47407, "fraudulent": 47408, "pesa": 47409, "changmin": 47410, "axx": 47411, "ðŁĴµ": 47412, "irrational": 47413, "cusa": 47414, "ramadhan": 47415, "octavia": 47416, "onelove": 47417, "jacki": 47418, "barak": 47419, "taxider": 47420, "serious": 47421, "nathanfillion": 47422, "mcen": 47423, "chk": 47424, "popart": 47425, "gravity": 47426, "coppola": 47427, "readingfc": 47428, "illusions": 47429, "jig": 47430, "wwx": 47431, "resh": 47432, "exporting": 47433, "buzzard": 47434, "âϤ": 47435, "pcm": 47436, "lanapar": 47437, "kos": 47438, "aromas": 47439, "antalya": 47440, "wwdc": 47441, "vena": 47442, "phila": 47443, "ballin": 47444, "ðŁijĦ": 47445, "quinta": 47446, "mao": 47447, "fery": 47448, "eighty": 47449, "sentiments": 47450, "safeguarding": 47451, "rwa": 47452, "puffs": 47453, "lucille": 47454, "decath": 47455, "slu": 47456, "nugent": 47457, "deter": 47458, "brazil": 47459, "zeiss": 47460, "superbowl": 47461, "subsidy": 47462, "altern": 47463, "hidalgo": 47464, "enzymes": 47465, "ä½": 47466, "tagne": 47467, "hairdresser": 47468, "adrien": 47469, "walkout": 47470, "opposes": 47471, "cantina": 47472, "bedside": 47473, "afan": 47474, "ðŁĶĹ": 47475, "prophetic": 47476, "danes": 47477, "unsuccessful": 47478, "supercharged": 47479, "pkk": 47480, "exemption": 47481, "hartle": 47482, "secular": 47483, "clipping": 47484, "brs": 47485, "unitedway": 47486, "cnet": 47487, "patchy": 47488, "hagan": 47489, "een": 47490, "âļľ": 47491, "vara": 47492, "sympathi": 47493, "nevertrump": 47494, "affirmation": 47495, "omf": 47496, "nycfc": 47497, "maja": 47498, "surro": 47499, "keerth": 47500, "upscale": 47501, "sandalwood": 47502, "monarchy": 47503, "knobs": 47504, "åĭ": 47505, "potholes": 47506, "hungergames": 47507, "terraces": 47508, "nasir": 47509, "counsell": 47510, "welcometo": 47511, "waq": 47512, "seaman": 47513, "mita": 47514, "stunningly": 47515, "ontheroad": 47516, "inability": 47517, ")!!": 47518, "bongo": 47519, "antv": 47520, "sput": 47521, "worldenvironmentday": 47522, "resusc": 47523, "ytd": 47524, "fim": 47525, "eunhyuk": 47526, "sachin": 47527, "roseanne": 47528, "clermont": 47529, "apec": 47530, "amina": 47531, "vening": 47532, "nantes": 47533, "almost": 47534, "sinus": 47535, "exas": 47536, "tyl": 47537, "tien": 47538, "plead": 47539, "lancs": 47540, "burnaby": 47541, "rek": 47542, "joom": 47543, "observers": 47544, "discography": 47545, "clg": 47546, "âϦ": 47547, "snack": 47548, "rti": 47549, "oily": 47550, "crystalli": 47551, "brute": 47552, "webdevelopment": 47553, "toppings": 47554, "laf": 47555, "anis": 47556, "adder": 47557, "reliving": 47558, "carlin": 47559, "battleof": 47560, "weg": 47561, "syrian": 47562, "pont": 47563, "ndc": 47564, "laghate": 47565, "yuma": 47566, "spp": 47567, "piti": 47568, "robbing": 47569, "marting": 47570, "reykja": 47571, "rajput": 47572, "ncds": 47573, "kiewicz": 47574, "âĢ¢âĢ¢": 47575, "vampire": 47576, "substantially": 47577, "opioids": 47578, "nepali": 47579, "kline": 47580, "aroo": 47581, "understand": 47582, "litt": 47583, "uit": 47584, "thrombo": 47585, "saries": 47586, "quot": 47587, "balling": 47588, "ttr": 47589, "sgh": 47590, "philipp": 47591, "brant": 47592, "acl": 47593, "mello": 47594, "whittaker": 47595, ".;": 47596, "defiant": 47597, "bgc": 47598, "replying": 47599, "mirren": 47600, "metamorpho": 47601, "schwab": 47602, "bulge": 47603, "utilized": 47604, "pickering": 47605, "pardon": 47606, "dsa": 47607, "à¸Ī": 47608, "dooley": 47609, "cumulative": 47610, "л": 47611, "urgency": 47612, "emir": 47613, "+/-": 47614, "¦Ī": 47615, "otas": 47616, "âı³": 47617, "stationed": 47618, "grapevine": 47619, "arac": 47620, "karanjohar": 47621, "fancy": 47622, "saul": 47623, "coogs": 47624, "lgbtq": 47625, "اÙħ": 47626, "javi": 47627, "ummer": 47628, "pll": 47629, "denis": 47630, "daipur": 47631, "puffin": 47632, "lewisham": 47633, "fandom": 47634, "cope": 47635, "vesmatter": 47636, "sve": 47637, "helpless": 47638, "deodor": 47639, "ostrich": 47640, "kazan": 47641, "fridaythe": 47642, "condor": 47643, "vx": 47644, "sophomores": 47645, "robles": 47646, "cutt": 47647, "climbers": 47648, "리": 47649, "sleg": 47650, "snf": 47651, "macys": 47652, "hydrating": 47653, "groupe": 47654, "poyn": 47655, "moulin": 47656, "hgtv": 47657, "lmfaooo": 47658, "sulphur": 47659, "asdfghjkl": 47660, "annabelle": 47661, "humpback": 47662, "braved": 47663, "viswasam": 47664, "multipurpose": 47665, "humidi": 47666, "escorted": 47667, "barbican": 47668, "fad": 47669, "corsa": 47670, "ðŁ¤«": 47671, "pippa": 47672, "hereto": 47673, "cany": 47674, "sergi": 47675, "orcas": 47676, "ovie": 47677, "edou": 47678, "sany": 47679, "globalization": 47680, "mancini": 47681, "foodtruck": 47682, "fis": 47683, "defibrill": 47684, "schre": 47685, "smafia": 47686, "lovewins": 47687, "laut": 47688, "kaka": 47689, "hollande": 47690, "gameon": 47691, "resurgence": 47692, "outside": 47693, "olympiad": 47694, "intan": 47695, "abstraction": 47696, "rapid": 47697, "palom": 47698, "calle": 47699, "jasmin": 47700, "attackers": 47701, "swagg": 47702, "mitra": 47703, "kylo": 47704, "ல": 47705, "hermitage": 47706, "gordo": 47707, "eira": 47708, "sosfam": 47709, "rollout": 47710, "excite": 47711, "synod": 47712, "merrill": 47713, "cals": 47714, "assa": 47715, "livelihoods": 47716, "juve": 47717, "theblack": 47718, "gopackgo": 47719, "antlers": 47720, "albanian": 47721, "woolly": 47722, "quiche": 47723, "purification": 47724, "areth": 47725, "smarthome": 47726, "nek": 47727, "allblacks": 47728, "mexicans": 47729, "ism": 47730, "germs": 47731, "complexion": 47732, "marck": 47733, "ushi": 47734, "ðŁIJIJ": 47735, "charl": 47736, "castic": 47737, "tillerson": 47738, "giuliani": 47739, "biodegradable": 47740, "malbec": 47741, "bois": 47742, "jubil": 47743, "imes": 47744, "rame": 47745, "genetic": 47746, "espnu": 47747, "chley": 47748, "soho": 47749, "gopher": 47750, "gsc": 47751, "buuren": 47752, "cube": 47753, "bridesmaids": 47754, "webinars": 47755, "toe": 47756, "manipur": 47757, "violently": 47758, "noticias": 47759, "exchanging": 47760, "chiev": 47761, "replaceable": 47762, "muaythai": 47763, "buss": 47764, "spil": 47765, "instalment": 47766, "divya": 47767, "caitlin": 47768, "olim": 47769, "filtering": 47770, "whirlwind": 47771, "stared": 47772, "priorit": 47773, "pram": 47774, "pompeii": 47775, "monologue": 47776, "kite": 47777, "buka": 47778, "â̦..": 47779, "vaccine": 47780, "brero": 47781, "wozni": 47782, "solent": 47783, "referr": 47784, "myrt": 47785, "gridiron": 47786, "galatasaray": 47787, "froze": 47788, "claremont": 47789, "ðŁ¥ĥ": 47790, "victorias": 47791, "sseldorf": 47792, "pastures": 47793, "netneutrality": 47794, "chor": 47795, "ðŁijģ": 47796, "ಿ": 47797, "weho": 47798, "symptom": 47799, "josel": 47800, "inous": 47801, "dragoncon": 47802, "powerball": 47803, "pte": 47804, "fourthofjuly": 47805, "ecla": 47806, "earbuds": 47807, "whereabouts": 47808, "saltlife": 47809, "deprivation": 47810, "chter": 47811, "wiggle": 47812, "system": 47813, "psst": 47814, "chaz": 47815, "dany": 47816, "rimo": 47817, "oaxaca": 47818, "lanaparrilla": 47819, "barcelon": 47820, "melancholy": 47821, "wayback": 47822, "hotro": 47823, "nsi": 47824, "lilly": 47825, "kuro": 47826, "jahan": 47827, "intellect": 47828, "boardgame": 47829, "ðŁıĬ": 47830, "sneakpeek": 47831, "kprc": 47832, "jails": 47833, "candel": 47834, "zanzi": 47835, "mortimer": 47836, "starch": 47837, "rags": 47838, "pfa": 47839, "longlive": 47840, "kart": 47841, "girona": 47842, "crocker": 47843, "christoph": 47844, "precautions": 47845, "warship": 47846, "perm": 47847, "parent": 47848, "vangogh": 47849, "gifford": 47850, "allegheny": 47851, "rayn": 47852, "utm": 47853, "stencil": 47854, "recalling": 47855, "penney": 47856, "zazzle": 47857, "ìĥĿ": 47858, "hinds": 47859, "arenas": 47860, "nuev": 47861, "lawler": 47862, "guin": 47863, "dothis": 47864, "ðŁijķ": 47865, "ì¶ķíķĺ": 47866, "weg": 47867, "tib": 47868, "ridin": 47869, "complexes": 47870, "turbulent": 47871, "pesos": 47872, "demarcus": 47873, "vallarta": 47874, "samsun": 47875, "kisses": 47876, "heinrich": 47877, "deportes": 47878, "wilms": 47879, "urd": 47880, "thenext": 47881, "inkigayo": 47882, "howi": 47883, "firsts": 47884, "carriage": 47885, "cleanliness": 47886, "maswar": 47887, "isch": 47888, "axel": 47889, "sizzle": 47890, "roadhouse": 47891, "frans": 47892, "entourage": 47893, "cobble": 47894, "booth": 47895, "benedict": 47896, "talon": 47897, "fcu": 47898, "yearofthe": 47899, "rayon": 47900, "raidernation": 47901, "foyle": 47902, "koval": 47903, "pianos": 47904, "lpg": 47905, "burmese": 47906, "manure": 47907, "geocaching": 47908, "coscino": 47909, "bnp": 47910, "ferra": 47911, "strophy": 47912, "marais": 47913, "cees": 47914, "legendof": 47915, "katniss": 47916, "enoch": 47917, "aved": 47918, "youknow": 47919, "dprk": 47920, "ðŁĺ¢ðŁĺ¢": 47921, "spun": 47922, "prost": 47923, "sorrows": 47924, "centred": 47925, "kea": 47926, "galicia": 47927, "?ð٤Ķ": 47928, "ÑĢода": 47929, "bouchard": 47930, "ðŁĴĻðŁĴľ": 47931, "yui": 47932, "seedlings": 47933, "jonah": 47934, "recovers": 47935, "nyrd": 47936, "boardroom": 47937, "suma": 47938, "myjaps": 47939, "tung": 47940, "shai": 47941, "irgc": 47942, "elio": 47943, "wagons": 47944, "kashi": 47945, "policemen": 47946, "johnnie": 47947, "alecoscino": 47948, "shopify": 47949, "dotted": 47950, "detri": 47951, "vaw": 47952, "tofficial": 47953, "inyour": 47954, "chalmers": 47955, "traced": 47956, "novi": 47957, "byes": 47958, "ariel": 47959, "nippon": 47960, "lapel": 47961, "griez": 47962, "bgs": 47963, "fooling": 47964, "dita": 47965, "vijaysethu": 47966, "nmwx": 47967, "asot": 47968, "kranti": 47969, "helm": 47970, "vedi": 47971, "sickest": 47972, "mochi": 47973, "kabo": 47974, "shrubs": 47975, "hered": 47976, "bsp": 47977, "sqm": 47978, "hamr": 47979, "dulkar": 47980, "antha": 47981, "nrf": 47982, "avoidance": 47983, "aten": 47984, "publix": 47985, "bearers": 47986, "nasi": 47987, "hap": 47988, "hells": 47989, "ðŁĸ¥": 47990, "ื": 47991, "thelastjedi": 47992, "ohwx": 47993, "ðŁį«": 47994, "wahoo": 47995, "therese": 47996, "recaps": 47997, "ssnhq": 47998, "birdphotography": 47999, "vay": 48000, "petti": 48001, "paulo": 48002, "belvedere": 48003, "(*": 48004, "grl": 48005, "duvet": 48006, "cpec": 48007, "sait": 48008, "porsch": 48009, "measurable": 48010, "aviators": 48011, "fremantle": 48012, "breen": 48013, "onom": 48014, "meand": 48015, "lifesaving": 48016, "euref": 48017, "endon": 48018, "embaras": 48019, "airasia": 48020, "elis": 48021, "dunkin": 48022, "starmagic": 48023, "sill": 48024, "portobello": 48025, "kiefer": 48026, "exe": 48027, "muted": 48028, "ãģ¦": 48029, "wethepeople": 48030, "logia": 48031, "liberal": 48032, "theforceawakens": 48033, "mined": 48034, "haunts": 48035, "freckles": 48036, "caretaker": 48037, "sindia": 48038, "âķIJ": 48039, "devlin": 48040, "liston": 48041, "directioner": 48042, "ohn": 48043, "figaro": 48044, "emmanuel": 48045, "dubois": 48046, "clones": 48047, "bruise": 48048, "ðŁİĪðŁİī": 48049, "disinfe": 48050, "dermatology": 48051, "asr": 48052, "swatch": 48053, "discomfort": 48054, "tamanna": 48055, "piday": 48056, "macken": 48057, "katic": 48058, "delusional": 48059, "shawnee": 48060, "gud": 48061, "albino": 48062, "pali": 48063, "dingh": 48064, "cucumbers": 48065, "coffey": 48066, "anticipating": 48067, "treasured": 48068, "websummit": 48069, "sheltered": 48070, "savor": 48071, "pedagogy": 48072, "mgs": 48073, "shma": 48074, "sbu": 48075, "denali": 48076, "campos": 48077, "bubblegum": 48078, "oir": 48079, "leaps": 48080, "yler": 48081, "rone": 48082, "sanskrit": 48083, "mint": 48084, "meatless": 48085, "futurist": 48086, "dude": 48087, "avel": 48088, "protested": 48089, "squire": 48090, "zaki": 48091, "szn": 48092, "harcourt": 48093, "cyclone": 48094, "bourdain": 48095, "gatherings": 48096, "dant": 48097, "adventurer": 48098, "paragon": 48099, "altman": 48100, "dding": 48101, "banerjee": 48102, "snorkeling": 48103, "motherwell": 48104, "missy": 48105, "ender": 48106, "glows": 48107, "kiwis": 48108, "chickpea": 48109, "poro": 48110, "efron": 48111, "appt": 48112, "uy": 48113, "specified": 48114, "gabby": 48115, "estrada": 48116, "combos": 48117, "bourbon": 48118, "vini": 48119, "varun": 48120, "stephani": 48121, "keywords": 48122, "carvings": 48123, "amitabh": 48124, "wrought": 48125, "twal": 48126, "reels": 48127, "clubbing": 48128, "ubiquit": 48129, "crit": 48130, "ambedkar": 48131, "æĻ": 48132, "pruning": 48133, "vaccinated": 48134, "boeing": 48135, "sks": 48136, "loona": 48137, "hypnosis": 48138, "edelman": 48139, "phol": 48140, "hew": 48141, "colosse": 48142, "mckinsey": 48143, "uon": 48144, "tote": 48145, "sacrificing": 48146, "oxi": 48147, "nang": 48148, "emu": 48149, "пÑĢиÑĢода": 48150, "mth": 48151, "kerswednesday": 48152, "argued": 48153, "timelapse": 48154, "risking": 48155, "regulating": 48156, "nigh": 48157, "likelihood": 48158, "cubic": 48159, "auction": 48160, "reinfor": 48161, "pistor": 48162, "noses": 48163, "yel": 48164, "snuggles": 48165, "pei": 48166, "jeanette": 48167, "taku": 48168, "rith": 48169, "guyz": 48170, "à¸ŀ": 48171, "yte": 48172, "verted": 48173, "paysoff": 48174, "jauregui": 48175, "hooligans": 48176, "procedural": 48177, "mib": 48178, "hardy": 48179, "eleng": 48180, "checkers": 48181, "alline": 48182, "themet": 48183, "proudof": 48184, "keerthyofficial": 48185, "collaborator": 48186, "niu": 48187, "inflicted": 48188, "advani": 48189, "retwee": 48190, "memoriam": 48191, "ficial": 48192, "tighter": 48193, "salem": 48194, "reviewers": 48195, "brics": 48196, "bendigo": 48197, "amell": 48198, "turkish": 48199, "sushmaswar": 48200, "paulson": 48201, "palawan": 48202, "mollie": 48203, "stitcher": 48204, "sburgh": 48205, "iru": 48206, "haydn": 48207, "eners": 48208, "aroa": 48209, "uzzi": 48210, "sarajevo": 48211, "hela": 48212, "apollo": 48213, "ninety": 48214, "vaca": 48215, "spon": 48216, "ventu": 48217, "jelena": 48218, "heifer": 48219, "avoids": 48220, "spine": 48221, "prize": 48222, "marist": 48223, "recreating": 48224, "mede": 48225, "wooden": 48226, "findlay": 48227, "rofl": 48228, "ndi": 48229, "comprehend": 48230, "yugo": 48231, "yü": 48232, "towork": 48233, "ufos": 48234, "sonar": 48235, "piston": 48236, "recording": 48237, "tentative": 48238, "artforsale": 48239, "pellets": 48240, "fredo": 48241, "ÙĪØ±": 48242, "muses": 48243, "customization": 48244, "profound": 48245, "isner": 48246, "ideally": 48247, "siam": 48248, "plankton": 48249, "cmdr": 48250, "manger": 48251, "franken": 48252, "customizable": 48253, "म": 48254, "walkaway": 48255, "swivel": 48256, "vastly": 48257, "noton": 48258, "lexa": 48259, "exmoor": 48260, "zas": 48261, "tante": 48262, "reductions": 48263, "lolly": 48264, "hipsters": 48265, "benefited": 48266, "ë²": 48267, "wwwww": 48268, "masculine": 48269, "fiji": 48270, "drey": 48271, "phill": 48272, "aneous": 48273, "nicol": 48274, "mendez": 48275, "disappro": 48276, "chner": 48277, "throughs": 48278, "shenmue": 48279, "eastman": 48280, "ðŁIJİ": 48281, "yuck": 48282, "undertale": 48283, "reys": 48284, "gobeavs": 48285, "engen": 48286, "cna": 48287, "merr": 48288, "birk": 48289, "ãģ¨ç¹ĭãģĮãĤĬãģŁãģĦ": 48290, "âĥ£@": 48291, "ynna": 48292, "steed": 48293, "offender": 48294, "atum": 48295, "vanishing": 48296, "presidenti": 48297, "lovethem": 48298, "gnocchi": 48299, "friggin": 48300, "peril": 48301, "madhya": 48302, "agne": 48303, "deejay": 48304, "marnock": 48305, "mtb": 48306, "foldable": 48307, "@___": 48308, "standre": 48309, "bronx": 48310, "bowski": 48311, "finite": 48312, "crockett": 48313, "bsf": 48314, "getit": 48315, "serenawilliams": 48316, "miro": 48317, "ignatius": 48318, "slay": 48319, "rinse": 48320, "fondue": 48321, "seldom": 48322, "smore": 48323, "gani": 48324, "dyce": 48325, "dmitry": 48326, "crumb": 48327, "latepost": 48328, "primark": 48329, "ohana": 48330, "florals": 48331, "doa": 48332, "remembranceday": 48333, "dds": 48334, "azione": 48335, "toonami": 48336, "airport": 48337, "æĿ±": 48338, "thad": 48339, "fist": 48340, "dinesh": 48341, "drwho": 48342, "adwords": 48343, "admirer": 48344, "proje": 48345, "kyrgyz": 48346, "à«": 48347, "manifestation": 48348, "lewan": 48349, "jic": 48350, "thibau": 48351, "leased": 48352, "vanity": 48353, "nourished": 48354, "nevertheless": 48355, "augmente": 48356, "fuelled": 48357, "chead": 48358, "wilshere": 48359, "rudi": 48360, "pz": 48361, "myco": 48362, "morro": 48363, "herbalife": 48364, "hardrock": 48365, "deman": 48366, "dreality": 48367, "spades": 48368, "cevic": 48369, "bhai": 48370, "baron": 48371, "ultimatefan": 48372, "hounews": 48373, "tobi": 48374, "strut": 48375, "keel": 48376, "affiliation": 48377, "themasters": 48378, "smal": 48379, "hue": 48380, "esteban": 48381, "conv": 48382, "omnic": 48383, "databases": 48384, "cov": 48385, "terti": 48386, "stg": 48387, "snoopdogg": 48388, "metabol": 48389, "lethbridge": 48390, "ðŁı»âĢįâĻĢï¸ı": 48391, "yearling": 48392, "residentevil": 48393, "nwsl": 48394, "iyaki": 48395, "griezmann": 48396, "cous": 48397, "ðŁĵĿ:": 48398, "torian": 48399, "sami": 48400, "ðŁĶ¥ðŁĶ¥ðŁĶ¥ðŁĶ¥ðŁĶ¥": 48401, "gare": 48402, "alliances": 48403, "whitfield": 48404, "wether": 48405, "refining": 48406, "coyi": 48407, "kraken": 48408, "ðŁĺĺâĿ¤": 48409, "singularity": 48410, "lili": 48411, "hns": 48412, "boldand": 48413, "wawrinka": 48414, "misogyny": 48415, "lovers": 48416, "cq": 48417, "bdg": 48418, "adona": 48419, "garter": 48420, "womenof": 48421, "scd": 48422, "recognising": 48423, "muna": 48424, "strou": 48425, "signalling": 48426, "laredo": 48427, "hellboy": 48428, "aleksand": 48429, "unavailable": 48430, "pediatric": 48431, "asin": 48432, "meria": 48433, "rishi": 48434, "futurism": 48435, "wye": 48436, "polarized": 48437, "ewe": 48438, "propel": 48439, "informs": 48440, "crease": 48441, "~\"": 48442, "artiston": 48443, "likefor": 48444, "heidelberg": 48445, "erra": 48446, "lifein": 48447, "lenny": 48448, "interrupt": 48449, "coherent": 48450, "caz": 48451, "vickers": 48452, "leveled": 48453, "fbs": 48454, "cabins": 48455, "bummed": 48456, "apostles": 48457, "weh": 48458, "tendon": 48459, "souvenirs": 48460, "infuri": 48461, "pierce": 48462, "asset": 48463, "mlas": 48464, "goth": 48465, "diggin": 48466, "annas": 48467, "ylor": 48468, "thwaite": 48469, "swel": 48470, "panera": 48471, "murderers": 48472, "crooked": 48473, "bsgo": 48474, "acu": 48475, "aon": 48476, "rean": 48477, "oneof": 48478, "kohl": 48479, "bloodh": 48480, "pesticide": 48481, "lostdog": 48482, "flexing": 48483, "ëĤĺ": 48484, "supra": 48485, "eternally": 48486, "ðŁļĻ": 48487, "paolo": 48488, "olan": 48489, "momo": 48490, "iselle": 48491, "captainmarvel": 48492, "slou": 48493, "mistakenly": 48494, "akhilesh": 48495, "mert": 48496, "ilinan": 48497, "buon": 48498, "balkan": 48499, "mirro": 48500, "millen": 48501, "derail": 48502, "damon": 48503, "titi": 48504, "bios": 48505, "redon": 48506, "picard": 48507, "parte": 48508, "ðŁ¤Ł": 48509, "غ": 48510, "sonics": 48511, "firsth": 48512, "ddc": 48513, "vegans": 48514, "turban": 48515, "nigan": 48516, "lottie": 48517, "lyndon": 48518, "starbuck": 48519, "pinkfloyd": 48520, "lifestyles": 48521, "amara": 48522, "ashe": 48523, "rsc": 48524, "vala": 48525, "smer": 48526, "cwgc": 48527, "client": 48528, "buenas": 48529, "jagan": 48530, "coops": 48531, "ðŁijijðŁijij": 48532, "specializes": 48533, "snagged": 48534, "glar": 48535, "bennet": 48536, "wildlifewednesday": 48537, "bowden": 48538, "pik": 48539, "artin": 48540, "emporium": 48541, "arl": 48542, "reba": 48543, "passer": 48544, "disappoints": 48545, "additive": 48546, "âľĬðŁı½": 48547, "bayer": 48548, "missoula": 48549, "haskell": 48550, "commences": 48551, "nix": 48552, "neman": 48553, "exploited": 48554, "plasticsurgery": 48555, "ccd": 48556, "asocial": 48557, "vot": 48558, "siegel": 48559, "froome": 48560, "kapam": 48561, "fara": 48562, "eha": 48563, "probes": 48564, "mwf": 48565, "meeting": 48566, "pbb": 48567, "akins": 48568, "mistletoe": 48569, "kingdomhearts": 48570, "forkids": 48571, "ecr": 48572, "bale": 48573, "escorts": 48574, "adidasoriginals": 48575, "kwa": 48576, "kts": 48577, "halloffame": 48578, "ðŁĺį.": 48579, "wags": 48580, "potted": 48581, "owing": 48582, "honeycomb": 48583, "hefty": 48584, "urology": 48585, "merle": 48586, "bpd": 48587, "stripping": 48588, "reich": 48589, "kstate": 48590, "guay": 48591, "yonge": 48592, "shakti": 48593, "gloom": 48594, "batt": 48595, "sonom": 48596, "nery": 48597, "elba": 48598, "blanks": 48599, "helle": 48600, "triplets": 48601, "bombay": 48602, "akarta": 48603, "abia": 48604, "transmitted": 48605, "rolf": 48606, "jais": 48607, "angularjs": 48608, "fierc": 48609, "mss": 48610, "trace": 48611, "à¥ĩ": 48612, "tombs": 48613, "oldman": 48614, "kombucha": 48615, "fol": 48616, "ehealth": 48617, "cereals": 48618, "arelli": 48619, "inari": 48620, "ðŁĴ©": 48621, "wol": 48622, "liberties": 48623, "fawn": 48624, "affirm": 48625, "nunavut": 48626, "hysterical": 48627, "kdrama": 48628, "artes": 48629, "âĢ¢âĢ¢âĢ¢âĢ¢âĢ¢âĢ¢âĢ¢âĢ¢": 48630, "valentin": 48631, "manslaughter": 48632, "gales": 48633, "eoin": 48634, "energized": 48635, "dels": 48636, "withdraws": 48637, "stles": 48638, "sarcastic": 48639, "ramesh": 48640, "incredibles": 48641, "lockhart": 48642, "yawn": 48643, "ultimatefanlive": 48644, "oooooooooooooooo": 48645, "muen": 48646, "gurudev": 48647, "teer": 48648, "peeling": 48649, "newsnow": 48650, "linguistics": 48651, "directv": 48652, "agend": 48653, "unilever": 48654, "ruger": 48655, "handedly": 48656, "erose": 48657, "limel": 48658, "thec": 48659, "royalties": 48660, "finishers": 48661, "nrg": 48662, "mgt": 48663, "fidget": 48664, "comps": 48665, "bacon": 48666, "aggressively": 48667, "abit": 48668, "châ": 48669, "tarde": 48670, "slugger": 48671, "qanda": 48672, "greening": 48673, "dats": 48674, "enslaved": 48675, "spector": 48676, "oye": 48677, "freef": 48678, "bhand": 48679, "stopbrexit": 48680, "misconceptions": 48681, "cava": 48682, "ðŁĺįðŁĺįðŁĺįðŁĺįðŁĺįðŁĺįðŁĺįðŁĺį": 48683, "multitasking": 48684, "housel": 48685, "ferreira": 48686, "centime": 48687, "ankles": 48688, "jodh": 48689, "helly": 48690, "frome": 48691, "outtuesday": 48692, "narnia": 48693, "balaji": 48694, "lbloggers": 48695, "jyoti": 48696, "ðŁįĩ": 48697, "lancia": 48698, "capri": 48699, "yap": 48700, "natash": 48701, "downfall": 48702, ".\"âĢĶ": 48703, "î": 48704, "ligament": 48705, "coatings": 48706, "aided": 48707, "hiko": 48708, "falling": 48709, "encrypted": 48710, "yegfood": 48711, "infringement": 48712, "cudi": 48713, "cep": 48714, "ðŁĺįðŁĺĤ": 48715, "trad": 48716, "superrugby": 48717, "edwin": 48718, "whiche": 48719, "vimeo": 48720, "layne": 48721, "invigor": 48722, "hehe": 48723, "dubrovnik": 48724, "bieber": 48725, "utr": 48726, "shaman": 48727, "opers": 48728, "hamill": 48729, "enig": 48730, "dif": 48731, "arum": 48732, "scrapbook": 48733, "minh": 48734, "divergence": 48735, "mckinnon": 48736, "lifetime": 48737, "guterres": 48738, "wille": 48739, "pleas": 48740, "patty": 48741, "micron": 48742, "kz": 48743, "domaine": 48744, "rusher": 48745, "mds": 48746, "chesney": 48747, "screwdriver": 48748, "âģ©,": 48749, "sledge": 48750, "hauer": 48751, "chana": 48752, "stamina": 48753, "sprinkler": 48754, "pln": 48755, "heff": 48756, "bolton": 48757, "omon": 48758, "carrington": 48759, "accordion": 48760, "jorge": 48761, "interception": 48762, "inputs": 48763, "gull": 48764, "transcription": 48765, "vanuatu": 48766, "itical": 48767, "ethos": 48768, "tich": 48769, "spacey": 48770, "peeking": 48771, "umi": 48772, "hager": 48773, "psychotic": 48774, "illian": 48775, "illia": 48776, "bonnaroo": 48777, "anese": 48778, "puc": 48779, "laghateparth": 48780, "enhall": 48781, "economical": 48782, "dredge": 48783, "%-": 48784, "uwe": 48785, "tubular": 48786, "scouncil": 48787, "peasants": 48788, "fler": 48789, "tumbler": 48790, "hep": 48791, "fordham": 48792, "rowley": 48793, "initials": 48794, "evasion": 48795, "ernation": 48796, "plugins": 48797, "cochran": 48798, "cattle": 48799, "acidity": 48800, "ðŁİĬðŁİī": 48801, "regrann": 48802, "jumpman": 48803, "eface": 48804, "xma": 48805, "patriarchy": 48806, "escobar": 48807, "cristian": 48808, "tipton": 48809, "nueva": 48810, "hackney": 48811, "backseat": 48812, "killarney": 48813, "aidan": 48814, "stadion": 48815, "simultaneous": 48816, "idaho": 48817, "aje": 48818, "uth": 48819, "figure": 48820, "clos": 48821, "burk": 48822, "voluntar": 48823, "recite": 48824, "macfarlane": 48825, "curfew": 48826, "boudo": 48827, "wgn": 48828, "stix": 48829, "slap": 48830, "scratched": 48831, "phillip": 48832, "journe": 48833, "expelled": 48834, "waz": 48835, "uke": 48836, "tatiana": 48837, "oue": 48838, "hopp": 48839, "dimitri": 48840, "ðŁĵ£": 48841, "matologist": 48842, "electrifying": 48843, "bluffs": 48844, "billsmafia": 48845, "azcardinals": 48846, "yaa": 48847, "xmas": 48848, "shara": 48849, "rith": 48850, "gills": 48851, "dres": 48852, "barton": 48853, "authorization": 48854, "imperialism": 48855, "homeof": 48856, "todo": 48857, "footpath": 48858, "bandwidth": 48859, "visitspain": 48860, "mohsin": 48861, "erupted": 48862, "miki": 48863, "insignia": 48864, "mikel": 48865, "ssh": 48866, "gera": 48867, "bankholiday": 48868, "awan": 48869, "tweak": 48870, "starcraft": 48871, "eal": 48872, "construction": 48873, "skeletons": 48874, "leep": 48875, "inem": 48876, "barclay": 48877, "shipwreck": 48878, "monsieur": 48879, "yoh": 48880, "ront": 48881, "formative": 48882, "sero": 48883, "lep": 48884, "horseman": 48885, "hoosier": 48886, "hazmat": 48887, "cylinders": 48888, "centi": 48889, "ðŁĴ¥ðŁĴ¥ðŁĴ¥": 48890, "reem": 48891, "naire": 48892, "musically": 48893, "grasshopper": 48894, "estonian": 48895, "terminology": 48896, "romain": 48897, "bloggerrt": 48898, "toxin": 48899, "stance": 48900, "cultivated": 48901, "anast": 48902, "ðŁIJį": 48903, "shimano": 48904, "gopher": 48905, "enei": 48906, "recyclable": 48907, "gamification": 48908, "fightfor": 48909, "cq": 48910, "avocados": 48911, "keys": 48912, "elike": 48913, "glycer": 48914, "shakur": 48915, "mobilization": 48916, "galley": 48917, "explain": 48918, "exchanged": 48919, "peth": 48920, "obedience": 48921, "illage": 48922, "ennis": 48923, "ãĥŀ": 48924, "wiv": 48925, "wallabies": 48926, "maar": 48927, "igers": 48928, "fintech": 48929, "finalized": 48930, "woj": 48931, "meaningless": 48932, "infield": 48933, "onnaise": 48934, "eet": 48935, "bronte": 48936, "passages": 48937, "ðŁij§": 48938, "strickland": 48939, "northernlights": 48940, "lomond": 48941, "htc": 48942, "wray": 48943, "shifter": 48944, "dialog": 48945, "ðŁįį": 48946, ">>>>>>": 48947, "teatime": 48948, "stech": 48949, "sichuan": 48950, "quill": 48951, "franca": 48952, "complementary": 48953, "barrington": 48954, "marcus": 48955, "malam": 48956, "goooo": 48957, "forsa": 48958, "electra": 48959, "afs": 48960, "âĹĨ": 48961, "trife": 48962, "snazzy": 48963, "folia": 48964, "andolan": 48965, "afterdark": 48966, "woodson": 48967, "strade": 48968, "littlest": 48969, "ogun": 48970, "conwy": 48971, "cowards": 48972, "ðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤ": 48973, "íĬ¸": 48974, "seul": 48975, "murphy": 48976, "dunks": 48977, "kapilshar": 48978, "joachim": 48979, "womack": 48980, "equality": 48981, "averages": 48982, "aine": 48983, "ð٦Ī": 48984, "tacular": 48985, "disability": 48986, "uked": 48987, "midcentury": 48988, "barthol": 48989, "teasers": 48990, "tabern": 48991, "njcaa": 48992, "spout": 48993, "opi": 48994, "kubball": 48995, "blom": 48996, "soar": 48997, "populism": 48998, "methyl": 48999, "ðŁijĬðŁı¼": 49000, "ospre": 49001, "aloils": 49002, "ðŁĵĸ": 49003, "ðŁĮļ": 49004, "xer": 49005, "spilling": 49006, "publica": 49007, "cardam": 49008, "adish": 49009, "sacha": 49010, "pkg": 49011, "buda": 49012, "lyricist": 49013, "ibc": 49014, "grump": 49015, "hover": 49016, "halep": 49017, "antibody": 49018, "anemone": 49019, "âĻ¥âĻ¥âĻ¥âĻ¥": 49020, "mcl": 49021, "lithograph": 49022, "ccu": 49023, "sfest": 49024, "pathic": 49025, "callister": 49026, "ottawa": 49027, "gunsn": 49028, "rutger": 49029, "halibut": 49030, "envision": 49031, "differentiate": 49032, "ðŁļĢðŁļĢ": 49033, "piran": 49034, "latel": 49035, "ucn": 49036, "troubad": 49037, "raine": 49038, "fiercely": 49039, "learnenglish": 49040, "lease": 49041, "wexmondays": 49042, "emit": 49043, "drayton": 49044, "burrell": 49045, "scubadiving": 49046, "holler": 49047, "dru": 49048, "clocked": 49049, "wral": 49050, "apro": 49051, "translucent": 49052, "wbo": 49053, "patriarch": 49054, "moja": 49055, "lannister": 49056, "fishery": 49057, "nederland": 49058, "mildly": 49059, "mirai": 49060, "mako": 49061, "jap": 49062, "ðŁĺ©ðŁĺ©ðŁĺ©": 49063, "prostatec": 49064, "panna": 49065, "arama": 49066, "undertaking": 49067, "tompkins": 49068, "neop": 49069, "solids": 49070, "savoury": 49071, "eames": 49072, "cutlery": 49073, "woodbridge": 49074, "steamer": 49075, "rizzo": 49076, "wildcat": 49077, "ratna": 49078, "laminated": 49079, "kineni": 49080, "jalap": 49081, "aides": 49082, "acknowledges": 49083, "?!?!?!": 49084, "!ðŁİī": 49085, "wafc": 49086, "maggio": 49087, "haves": 49088, "darje": 49089, "ofi": 49090, "gril": 49091, "vasi": 49092, "brux": 49093, "mohd": 49094, "fakespeare": 49095, "arnold": 49096, "rmb": 49097, "forbe": 49098, "walleye": 49099, "rodi": 49100, "therapeutics": 49101, "strategi": 49102, "obste": 49103, "mudder": 49104, "downloadable": 49105, "ddings": 49106, "dca": 49107, "asiangames": 49108, "campeon": 49109, "appropriation": 49110, "thcentury": 49111, "ramatta": 49112, "draped": 49113, "bullion": 49114, "muc": 49115, "onex": 49116, "segreg": 49117, "ophelia": 49118, "bodily": 49119, "âĿ¤ðŁĺį": 49120, "wizar": 49121, "teased": 49122, "ademy": 49123, "toid": 49124, "sura": 49125, "lazarus": 49126, "snickers": 49127, "mase": 49128, "loh": 49129, "bowed": 49130, "biblio": 49131, "xchange": 49132, "harlan": 49133, "ghoshal": 49134, "flavorful": 49135, "bhagat": 49136, "allez": 49137, "whichever": 49138, "tenstein": 49139, "discer": 49140, "organiser": 49141, "mtg": 49142, "dreamliner": 49143, "tse": 49144, "hokkaido": 49145, "mok": 49146, "indulgent": 49147, "hickman": 49148, "blinded": 49149, "alyn": 49150, "aaaah": 49151, "spool": 49152, "loughborough": 49153, "interpret": 49154, "etv": 49155, "aristotle": 49156, "optimizing": 49157, "avicii": 49158, "madurai": 49159, "juli": 49160, "nawaz": 49161, "matchups": 49162, "abide": 49163, "painting": 49164, "welling": 49165, "veli": 49166, "octagon": 49167, "inscribed": 49168, "poking": 49169, "placer": 49170, "lifecycle": 49171, "kilig": 49172, "gsp": 49173, "elives": 49174, "clements": 49175, "nasheed": 49176, "mesut": 49177, "incarcerated": 49178, "distilled": 49179, "walang": 49180, "delicacy": 49181, "delgado": 49182, "chez": 49183, "chita": 49184, "adero": 49185, "tux": 49186, "patil": 49187, "odo": 49188, "abhcosmetics": 49189, "tvc": 49190, "pbc": 49191, "inaccurate": 49192, "hardworkpaysoff": 49193, "baller": 49194, "quotation": 49195, "merchandising": 49196, "gastri": 49197, "defenses": 49198, "drogba": 49199, "bexhill": 49200, "bankno": 49201, "winona": 49202, "sieg": 49203, "pgs": 49204, "hahahha": 49205, "aguchi": 49206, "subram": 49207, "miracle": 49208, "desch": 49209, "libre": 49210, "bacher": 49211, "entine": 49212, "bbcradi": 49213, "loudest": 49214, "rps": 49215, "pierc": 49216, "fryer": 49217, "stormtrooper": 49218, "rafaelnadal": 49219, "pasco": 49220, "exhaustion": 49221, "epiconetsy": 49222, "rctid": 49223, "kellie": 49224, "gaines": 49225, "dbz": 49226, "smriti": 49227, "sbridge": 49228, "limited": 49229, "claw": 49230, "technical": 49231, "biographical": 49232, "adored": 49233, "ะ": 49234, "exclude": 49235, "acadia": 49236, "keyboards": 49237, "furman": 49238, "soca": 49239, "suru": 49240, "nips": 49241, "swaps": 49242, "serverless": 49243, "rune": 49244, "puffy": 49245, "northampton": 49246, "nishings": 49247, "hender": 49248, "cartridges": 49249, "gunshot": 49250, "ðŁĵ¹": 49251, "filament": 49252, "respondents": 49253, "peyton": 49254, "mountaineer": 49255, "merging": 49256, "lifespan": 49257, "intimidation": 49258, "pafc": 49259, "nlwx": 49260, "expansive": 49261, "purr": 49262, "fck": 49263, "cae": 49264, "atti": 49265, "telethon": 49266, "sohn": 49267, "mendel": 49268, "lopes": 49269, "dori": 49270, "unbroken": 49271, "tered": 49272, "tastings": 49273, "inactive": 49274, "disintegr": 49275, "tassel": 49276, "sharethe": 49277, "piano": 49278, "islay": 49279, "airspace": 49280, "zawa": 49281, "ricciardo": 49282, "mington": 49283, "fresher": 49284, "curry": 49285, "revs": 49286, "pharoah": 49287, "hmv": 49288, "exhilarating": 49289, "whoo": 49290, "linkin": 49291, "krispy": 49292, "competency": 49293, "stewards": 49294, "nebu": 49295, "katsu": 49296, "admins": 49297, "bazar": 49298, "asar": 49299, "givingback": 49300, "ssummit": 49301, "songz": 49302, "linus": 49303, "rajkumar": 49304, "farmington": 49305, "fantasia": 49306, "ðŁĺ´ðŁĺ´": 49307, "sobri": 49308, "lisse": 49309, "barrymore": 49310, "prism": 49311, "blob": 49312, "senew": 49313, "monoxide": 49314, "expire": 49315, "eighteen": 49316, "dipper": 49317, "xiao": 49318, "kilt": 49319, "hinch": 49320, "bbcsport": 49321, "bamboo": 49322, "pter": 49323, "exal": 49324, "ð٦ĭ": 49325, "hamlin": 49326, "expeditions": 49327, "stargazing": 49328, "foodsecurity": 49329, "wylie": 49330, "ulf": 49331, "stingly": 49332, "onstorm": 49333, "loeb": 49334, "broome": 49335, "bnha": 49336, "pancreatic": 49337, "elive": 49338, "!!!!!!!!!!!": 49339, "therapper": 49340, "orthopedic": 49341, "avengersendgame": 49342, "antitrust": 49343, "ìļ°": 49344, "gote": 49345, "omd": 49346, "offside": 49347, "gyllen": 49348, "wineries": 49349, "whitewater": 49350, "adl": 49351, "lupita": 49352, "exceeds": 49353, "consisted": 49354, "chewbacca": 49355, "ashleigh": 49356, "nhljets": 49357, "issan": 49358, "shld": 49359, "hayat": 49360, "cranberries": 49361, "ð٤ĺðŁı½": 49362, "rockthe": 49363, "springtraining": 49364, "fallout": 49365, "dairyfree": 49366, "waj": 49367, "undecided": 49368, "sown": 49369, "rcn": 49370, "northwales": 49371, "httr": 49372, "fumble": 49373, "dits": 49374, "compelled": 49375, "populist": 49376, "minted": 49377, "blanchett": 49378, ".''": 49379, "propulsion": 49380, "milla": 49381, "auberg": 49382, "hertz": 49383, "hta": 49384, "udaipur": 49385, "serendipity": 49386, "aztecs": 49387, "alsace": 49388, "ðŁIJij": 49389, "lun": 49390, "shoes": 49391, "charli": 49392, "garza": 49393, "ðŁĴŁ": 49394, "probiotics": 49395, "foxtv": 49396, "olis": 49397, "miff": 49398, "localized": 49399, "diffuser": 49400, "sigue": 49401, "funko": 49402, "rendous": 49403, "ðŁĴij": 49404, "jekyll": 49405, "<|startoftext|>": 49406, "<|endoftext|>": 49407} \ No newline at end of file diff --git a/ComfyUI-SUPIR/nodes.py b/ComfyUI-SUPIR/nodes.py new file mode 100644 index 0000000000000000000000000000000000000000..083bb6f74a764cfd7dd70bf739bbefc58964f4ba --- /dev/null +++ b/ComfyUI-SUPIR/nodes.py @@ -0,0 +1,368 @@ +import os +import torch +from torch.nn import functional as F +from omegaconf import OmegaConf +import comfy.utils +import comfy.model_management as mm +import folder_paths +from nodes import ImageScaleBy +from nodes import ImageScale +import torch.cuda +from .sgm.util import instantiate_from_config +from .SUPIR.util import convert_dtype, load_state_dict +import open_clip +from contextlib import contextmanager + +from transformers import ( + CLIPTextModel, + CLIPTokenizer, + CLIPTextConfig, + +) +script_directory = os.path.dirname(os.path.abspath(__file__)) + +def dummy_build_vision_tower(*args, **kwargs): + # Monkey patch the CLIP class before you create an instance. + return None + +@contextmanager +def patch_build_vision_tower(): + original_build_vision_tower = open_clip.model._build_vision_tower + open_clip.model._build_vision_tower = dummy_build_vision_tower + + try: + yield + finally: + open_clip.model._build_vision_tower = original_build_vision_tower + +def build_text_model_from_openai_state_dict( + state_dict: dict, + cast_dtype=torch.float16, + ): + + embed_dim = state_dict["text_projection"].shape[1] + context_length = state_dict["positional_embedding"].shape[0] + vocab_size = state_dict["token_embedding.weight"].shape[0] + transformer_width = state_dict["ln_final.weight"].shape[0] + transformer_heads = transformer_width // 64 + transformer_layers = len(set(k.split(".")[2] for k in state_dict if k.startswith(f"transformer.resblocks"))) + + vision_cfg = None + text_cfg = open_clip.CLIPTextCfg( + context_length=context_length, + vocab_size=vocab_size, + width=transformer_width, + heads=transformer_heads, + layers=transformer_layers, + ) + + with patch_build_vision_tower(): + model = open_clip.CLIP( + embed_dim, + vision_cfg=vision_cfg, + text_cfg=text_cfg, + quick_gelu=True, + cast_dtype=cast_dtype, + ) + + model.load_state_dict(state_dict, strict=False) + model = model.eval() + for param in model.parameters(): + param.requires_grad = False + return model + +class SUPIR_Upscale: + upscale_methods = ["nearest-exact", "bilinear", "area", "bicubic", "lanczos"] + + @classmethod + def INPUT_TYPES(s): + return {"required": { + "supir_model": (folder_paths.get_filename_list("checkpoints"),), + "sdxl_model": (folder_paths.get_filename_list("checkpoints"),), + "image": ("IMAGE",), + "seed": ("INT", {"default": 123, "min": 0, "max": 0xffffffffffffffff, "step": 1}), + "resize_method": (s.upscale_methods, {"default": "lanczos"}), + "scale_by": ("FLOAT", {"default": 1.0, "min": 0.01, "max": 20.0, "step": 0.01}), + "steps": ("INT", {"default": 45, "min": 3, "max": 4096, "step": 1}), + "restoration_scale": ("FLOAT", {"default": -1.0, "min": -1.0, "max": 6.0, "step": 1.0}), + "cfg_scale": ("FLOAT", {"default": 4.0, "min": 0, "max": 100, "step": 0.01}), + "a_prompt": ("STRING", {"multiline": True, "default": "high quality, detailed", }), + "n_prompt": ("STRING", {"multiline": True, "default": "bad quality, blurry, messy", }), + "s_churn": ("INT", {"default": 5, "min": 0, "max": 40, "step": 1}), + "s_noise": ("FLOAT", {"default": 1.003, "min": 1.0, "max": 1.1, "step": 0.001}), + "control_scale": ("FLOAT", {"default": 1.0, "min": 0, "max": 10.0, "step": 0.05}), + "cfg_scale_start": ("FLOAT", {"default": 4.0, "min": 0.0, "max": 100.0, "step": 0.05}), + "control_scale_start": ("FLOAT", {"default": 0.0, "min": 0, "max": 1.0, "step": 0.05}), + "color_fix_type": ( + [ + 'None', + 'AdaIn', + 'Wavelet', + ], { + "default": 'Wavelet' + }), + "keep_model_loaded": ("BOOLEAN", {"default": True}), + "use_tiled_vae": ("BOOLEAN", {"default": True}), + "encoder_tile_size_pixels": ("INT", {"default": 512, "min": 64, "max": 8192, "step": 64}), + "decoder_tile_size_latent": ("INT", {"default": 64, "min": 32, "max": 8192, "step": 64}), + }, + "optional": { + "captions": ("STRING", {"forceInput": True, "multiline": False, "default": "", }), + "diffusion_dtype": ( + [ + 'fp16', + 'bf16', + 'fp32', + 'auto' + ], { + "default": 'auto' + }), + "encoder_dtype": ( + [ + 'bf16', + 'fp32', + 'auto' + ], { + "default": 'auto' + }), + "batch_size": ("INT", {"default": 1, "min": 1, "max": 128, "step": 1}), + "use_tiled_sampling": ("BOOLEAN", {"default": False}), + "sampler_tile_size": ("INT", {"default": 1024, "min": 64, "max": 4096, "step": 32}), + "sampler_tile_stride": ("INT", {"default": 512, "min": 32, "max": 2048, "step": 32}), + "fp8_unet": ("BOOLEAN", {"default": False}), + "fp8_vae": ("BOOLEAN", {"default": False}), + "sampler": ( + [ + 'RestoreDPMPP2MSampler', + 'RestoreEDMSampler', + ], { + "default": 'RestoreEDMSampler' + }), + } + } + + RETURN_TYPES = ("IMAGE",) + RETURN_NAMES = ("upscaled_image",) + FUNCTION = "process" + + CATEGORY = "SUPIR" + + def process(self, steps, image, color_fix_type, seed, scale_by, cfg_scale, resize_method, s_churn, s_noise, + encoder_tile_size_pixels, decoder_tile_size_latent, + control_scale, cfg_scale_start, control_scale_start, restoration_scale, keep_model_loaded, + a_prompt, n_prompt, sdxl_model, supir_model, use_tiled_vae, use_tiled_sampling=False, sampler_tile_size=128, sampler_tile_stride=64, captions="", diffusion_dtype="auto", + encoder_dtype="auto", batch_size=1, fp8_unet=False, fp8_vae=False, sampler="RestoreEDMSampler"): + device = mm.get_torch_device() + mm.unload_all_models() + + SUPIR_MODEL_PATH = folder_paths.get_full_path("checkpoints", supir_model) + SDXL_MODEL_PATH = folder_paths.get_full_path("checkpoints", sdxl_model) + + config_path = os.path.join(script_directory, "options/SUPIR_v0.yaml") + config_path_tiled = os.path.join(script_directory, "options/SUPIR_v0_tiled.yaml") + clip_config_path = os.path.join(script_directory, "configs/clip_vit_config.json") + tokenizer_path = os.path.join(script_directory, "configs/tokenizer") + + custom_config = { + 'sdxl_model': sdxl_model, + 'diffusion_dtype': diffusion_dtype, + 'encoder_dtype': encoder_dtype, + 'use_tiled_vae': use_tiled_vae, + 'supir_model': supir_model, + 'use_tiled_sampling': use_tiled_sampling, + 'fp8_unet': fp8_unet, + 'fp8_vae': fp8_vae, + 'sampler': sampler + } + + if diffusion_dtype == 'auto': + try: + if mm.should_use_fp16(): + print("Diffusion using fp16") + dtype = torch.float16 + model_dtype = 'fp16' + if mm.should_use_bf16(): + print("Diffusion using bf16") + dtype = torch.bfloat16 + model_dtype = 'bf16' + else: + print("Diffusion using using fp32") + dtype = torch.float32 + model_dtype = 'fp32' + except: + raise AttributeError("ComfyUI too old, can't autodecet properly. Set your dtypes manually.") + else: + print(f"Diffusion using using {diffusion_dtype}") + dtype = convert_dtype(diffusion_dtype) + model_dtype = diffusion_dtype + + if encoder_dtype == 'auto': + try: + if mm.should_use_bf16(): + print("Encoder using bf16") + vae_dtype = 'bf16' + else: + print("Encoder using using fp32") + vae_dtype = 'fp32' + except: + raise AttributeError("ComfyUI too old, can't autodetect properly. Set your dtypes manually.") + else: + vae_dtype = encoder_dtype + print(f"Encoder using using {vae_dtype}") + + if not hasattr(self, "model") or self.model is None or self.current_config != custom_config: + self.current_config = custom_config + self.model = None + + mm.soft_empty_cache() + + if use_tiled_sampling: + config = OmegaConf.load(config_path_tiled) + config.model.params.sampler_config.params.tile_size = sampler_tile_size // 8 + config.model.params.sampler_config.params.tile_stride = sampler_tile_stride // 8 + config.model.params.sampler_config.target = f".sgm.modules.diffusionmodules.sampling.Tiled{sampler}" + print("Using tiled sampling") + else: + config = OmegaConf.load(config_path) + config.model.params.sampler_config.target = f".sgm.modules.diffusionmodules.sampling.{sampler}" + print("Using non-tiled sampling") + + if mm.XFORMERS_IS_AVAILABLE: + config.model.params.control_stage_config.params.spatial_transformer_attn_type = "softmax-xformers" + config.model.params.network_config.params.spatial_transformer_attn_type = "softmax-xformers" + config.model.params.first_stage_config.params.ddconfig.attn_type = "vanilla-xformers" + + config.model.params.ae_dtype = vae_dtype + config.model.params.diffusion_dtype = model_dtype + + self.model = instantiate_from_config(config.model).cpu() + + try: + print(f'Attempting to load SUPIR model: [{SUPIR_MODEL_PATH}]') + supir_state_dict = load_state_dict(SUPIR_MODEL_PATH) + + except: + raise Exception("Failed to load SUPIR model") + try: + print(f"Attempting to load SDXL model: [{SDXL_MODEL_PATH}]") + sdxl_state_dict = load_state_dict(SDXL_MODEL_PATH) + except: + raise Exception("Failed to load SDXL model") + self.model.load_state_dict(supir_state_dict, strict=False) + self.model.load_state_dict(sdxl_state_dict, strict=False) + + del supir_state_dict + + #first clip model from SDXL checkpoint + try: + print("Loading first clip model from SDXL checkpoint") + + replace_prefix = {} + replace_prefix["conditioner.embedders.0.transformer."] = "" + + sd = comfy.utils.state_dict_prefix_replace(sdxl_state_dict, replace_prefix, filter_keys=False) + clip_text_config = CLIPTextConfig.from_pretrained(clip_config_path) + self.model.conditioner.embedders[0].tokenizer = CLIPTokenizer.from_pretrained(tokenizer_path) + self.model.conditioner.embedders[0].transformer = CLIPTextModel(clip_text_config) + self.model.conditioner.embedders[0].transformer.load_state_dict(sd, strict=False) + self.model.conditioner.embedders[0].eval() + for param in self.model.conditioner.embedders[0].parameters(): + param.requires_grad = False + except: + raise Exception("Failed to load first clip model from SDXL checkpoint") + + del sdxl_state_dict + + #second clip model from SDXL checkpoint + try: + print("Loading second clip model from SDXL checkpoint") + replace_prefix2 = {} + replace_prefix2["conditioner.embedders.1.model."] = "" + sd = comfy.utils.state_dict_prefix_replace(sd, replace_prefix2, filter_keys=True) + clip_g = build_text_model_from_openai_state_dict(sd, cast_dtype=dtype) + self.model.conditioner.embedders[1].model = clip_g + except: + raise Exception("Failed to load second clip model from SDXL checkpoint") + + del sd, clip_g + mm.soft_empty_cache() + + self.model.to(dtype) + + #only unets and/or vae to fp8 + if fp8_unet: + self.model.model.to(torch.float8_e4m3fn) + if fp8_vae: + self.model.first_stage_model.to(torch.float8_e4m3fn) + + if use_tiled_vae: + self.model.init_tile_vae(encoder_tile_size=encoder_tile_size_pixels, decoder_tile_size=decoder_tile_size_latent) + + upscaled_image, = ImageScaleBy.upscale(self, image, resize_method, scale_by) + B, H, W, C = upscaled_image.shape + new_height = H if H % 64 == 0 else ((H // 64) + 1) * 64 + new_width = W if W % 64 == 0 else ((W // 64) + 1) * 64 + upscaled_image = upscaled_image.permute(0, 3, 1, 2) + resized_image = F.interpolate(upscaled_image, size=(new_height, new_width), mode='bicubic', align_corners=False) + resized_image = resized_image.to(device) + + captions_list = [] + captions_list.append(captions) + print("captions: ", captions_list) + + use_linear_CFG = cfg_scale_start > 0 + use_linear_control_scale = control_scale_start > 0 + out = [] + pbar = comfy.utils.ProgressBar(B) + + batched_images = [resized_image[i:i + batch_size] for i in + range(0, len(resized_image), batch_size)] + captions_list = captions_list * resized_image.shape[0] + batched_captions = [captions_list[i:i + batch_size] for i in range(0, len(captions_list), batch_size)] + + mm.soft_empty_cache() + i = 1 + for imgs, caps in zip(batched_images, batched_captions): + try: + samples = self.model.batchify_sample(imgs, caps, num_steps=steps, + restoration_scale=restoration_scale, s_churn=s_churn, + s_noise=s_noise, cfg_scale=cfg_scale, control_scale=control_scale, + seed=seed, + num_samples=1, p_p=a_prompt, n_p=n_prompt, + color_fix_type=color_fix_type, + use_linear_CFG=use_linear_CFG, + use_linear_control_scale=use_linear_control_scale, + cfg_scale_start=cfg_scale_start, + control_scale_start=control_scale_start) + except torch.cuda.OutOfMemoryError as e: + mm.free_memory(mm.get_total_memory(mm.get_torch_device()), mm.get_torch_device()) + self.model = None + mm.soft_empty_cache() + print("It's likely that too large of an image or batch_size for SUPIR was used," + " and it has devoured all of the memory it had reserved, you may need to restart ComfyUI. Make sure you are using tiled_vae, " + " you can also try using fp8 for reduced memory usage if your system supports it.") + raise e + + out.append(samples.squeeze(0).cpu()) + print("Sampled ", i * len(imgs), " out of ", B) + i = i + 1 + pbar.update(1) + if not keep_model_loaded: + self.model = None + mm.soft_empty_cache() + + if len(out[0].shape) == 4: + out_stacked = torch.cat(out, dim=0).cpu().to(torch.float32).permute(0, 2, 3, 1) + else: + out_stacked = torch.stack(out, dim=0).cpu().to(torch.float32).permute(0, 2, 3, 1) + + final_image, = ImageScale.upscale(self, out_stacked, resize_method, W, H, crop="disabled") + + return (final_image,) + +NODE_CLASS_MAPPINGS = { + "SUPIR_Upscale": SUPIR_Upscale +} +NODE_DISPLAY_NAME_MAPPINGS = { + "SUPIR_Upscale": "SUPIR_Upscale" +} \ No newline at end of file diff --git a/ComfyUI-SUPIR/nodes_v2.py b/ComfyUI-SUPIR/nodes_v2.py new file mode 100644 index 0000000000000000000000000000000000000000..d90ef950e817853c3ecddcf675f4076ecc04773f --- /dev/null +++ b/ComfyUI-SUPIR/nodes_v2.py @@ -0,0 +1,1227 @@ +import os +import torch +from omegaconf import OmegaConf +import comfy.utils +import comfy.model_management as mm +import folder_paths +import torch.cuda +import torch.nn.functional as F +from .sgm.util import instantiate_from_config +from .SUPIR.util import convert_dtype, load_state_dict +from .sgm.modules.distributions.distributions import DiagonalGaussianDistribution +import open_clip +from contextlib import contextmanager, nullcontext +import gc + +from contextlib import nullcontext +try: + from accelerate import init_empty_weights + from accelerate.utils import set_module_tensor_to_device + is_accelerate_available = True +except: + pass + +from transformers import ( + CLIPTextModel, + CLIPTokenizer, + CLIPTextConfig, + +) +script_directory = os.path.dirname(os.path.abspath(__file__)) + +def dummy_build_vision_tower(*args, **kwargs): + # Monkey patch the CLIP class before you create an instance. + return None + +@contextmanager +def patch_build_vision_tower(): + original_build_vision_tower = open_clip.model._build_vision_tower + open_clip.model._build_vision_tower = dummy_build_vision_tower + + try: + yield + finally: + open_clip.model._build_vision_tower = original_build_vision_tower + +def build_text_model_from_openai_state_dict( + state_dict: dict, + device, + cast_dtype=torch.float16, + ): + + embed_dim = state_dict["text_projection"].shape[1] + context_length = state_dict["positional_embedding"].shape[0] + vocab_size = state_dict["token_embedding.weight"].shape[0] + transformer_width = state_dict["ln_final.weight"].shape[0] + transformer_heads = transformer_width // 64 + transformer_layers = len(set(k.split(".")[2] for k in state_dict if k.startswith(f"transformer.resblocks"))) + + vision_cfg = None + text_cfg = open_clip.CLIPTextCfg( + context_length=context_length, + vocab_size=vocab_size, + width=transformer_width, + heads=transformer_heads, + layers=transformer_layers, + ) + + with patch_build_vision_tower(): + with (init_empty_weights() if is_accelerate_available else nullcontext()): + model = open_clip.CLIP( + embed_dim, + vision_cfg=vision_cfg, + text_cfg=text_cfg, + quick_gelu=True, + cast_dtype=cast_dtype, + ) + if is_accelerate_available: + for key in state_dict: + set_module_tensor_to_device(model, key, device=device, value=state_dict[key]) + else: + model.load_state_dict(state_dict, strict=False) + model = model.eval() + for param in model.parameters(): + param.requires_grad = False + return model + +class SUPIR_encode: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "SUPIR_VAE": ("SUPIRVAE",), + "image": ("IMAGE",), + "use_tiled_vae": ("BOOLEAN", {"default": True}), + "encoder_tile_size": ("INT", {"default": 512, "min": 64, "max": 8192, "step": 64}), + "encoder_dtype": ( + [ + 'bf16', + 'fp32', + 'auto' + ], { + "default": 'auto' + }), + } + } + + RETURN_TYPES = ("LATENT",) + RETURN_NAMES = ("latent",) + FUNCTION = "encode" + CATEGORY = "SUPIR" + + def encode(self, SUPIR_VAE, image, encoder_dtype, use_tiled_vae, encoder_tile_size): + device = mm.get_torch_device() + mm.unload_all_models() + if encoder_dtype == 'auto': + try: + if mm.should_use_bf16(): + print("Encoder using bf16") + vae_dtype = 'bf16' + else: + print("Encoder using fp32") + vae_dtype = 'fp32' + except: + raise AttributeError("ComfyUI version too old, can't autodetect properly. Set your dtypes manually.") + else: + vae_dtype = encoder_dtype + print(f"Encoder using {vae_dtype}") + + dtype = convert_dtype(vae_dtype) + + image = image.permute(0, 3, 1, 2) + B, C, H, W = image.shape + downscale_ratio = 32 + orig_H, orig_W = H, W + if W % downscale_ratio != 0: + W = W - (W % downscale_ratio) + if H % downscale_ratio != 0: + H = H - (H % downscale_ratio) + if orig_H % downscale_ratio != 0 or orig_W % downscale_ratio != 0: + image = F.interpolate(image, size=(H, W), mode="bicubic") + resized_image = image.to(device) + + if use_tiled_vae: + from .SUPIR.utils.tilevae import VAEHook + # Store the `original_forward` only if it hasn't been stored already + if not hasattr(SUPIR_VAE.encoder, 'original_forward'): + SUPIR_VAE.encoder.original_forward = SUPIR_VAE.encoder.forward + SUPIR_VAE.encoder.forward = VAEHook( + SUPIR_VAE.encoder, encoder_tile_size, is_decoder=False, fast_decoder=False, + fast_encoder=False, color_fix=False, to_gpu=True) + else: + # Only assign `original_forward` back if it exists + if hasattr(SUPIR_VAE.encoder, 'original_forward'): + SUPIR_VAE.encoder.forward = SUPIR_VAE.encoder.original_forward + + pbar = comfy.utils.ProgressBar(B) + out = [] + for img in resized_image: + + SUPIR_VAE.to(dtype).to(device) + + autocast_condition = (dtype != torch.float32) and not comfy.model_management.is_device_mps(device) + with torch.autocast(comfy.model_management.get_autocast_device(device), dtype=dtype) if autocast_condition else nullcontext(): + + z = SUPIR_VAE.encode(img.unsqueeze(0)) + z = z * 0.13025 + out.append(z) + pbar.update(1) + + if len(out[0].shape) == 4: + samples_out_stacked = torch.cat(out, dim=0) + else: + samples_out_stacked = torch.stack(out, dim=0) + return ({"samples":samples_out_stacked, "original_size": [orig_H, orig_W]},) + +class SUPIR_decode: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "SUPIR_VAE": ("SUPIRVAE",), + "latents": ("LATENT",), + "use_tiled_vae": ("BOOLEAN", {"default": True}), + "decoder_tile_size": ("INT", {"default": 512, "min": 64, "max": 8192, "step": 64}), + } + } + + RETURN_TYPES = ("IMAGE",) + RETURN_NAMES = ("image",) + FUNCTION = "decode" + CATEGORY = "SUPIR" + + def decode(self, SUPIR_VAE, latents, use_tiled_vae, decoder_tile_size): + device = mm.get_torch_device() + mm.unload_all_models() + samples = latents["samples"] + + B, H, W, C = samples.shape + + pbar = comfy.utils.ProgressBar(B) + + if mm.should_use_bf16(): + print("Decoder using bf16") + dtype = torch.bfloat16 + else: + print("Decoder using fp32") + dtype = torch.float32 + print("SUPIR decoder using", dtype) + + SUPIR_VAE.to(dtype).to(device) + samples = samples.to(device) + + if use_tiled_vae: + from .SUPIR.utils.tilevae import VAEHook + # Store the `original_forward` only if it hasn't been stored already + if not hasattr(SUPIR_VAE.decoder, 'original_forward'): + SUPIR_VAE.decoder.original_forward = SUPIR_VAE.decoder.forward + SUPIR_VAE.decoder.forward = VAEHook( + SUPIR_VAE.decoder, decoder_tile_size // 8, is_decoder=True, fast_decoder=False, + fast_encoder=False, color_fix=False, to_gpu=True) + else: + # Only assign `original_forward` back if it exists + if hasattr(SUPIR_VAE.decoder, 'original_forward'): + SUPIR_VAE.decoder.forward = SUPIR_VAE.decoder.original_forward + + out = [] + for sample in samples: + autocast_condition = (dtype != torch.float32) and not comfy.model_management.is_device_mps(device) + with torch.autocast(comfy.model_management.get_autocast_device(device), dtype=dtype) if autocast_condition else nullcontext(): + sample = 1.0 / 0.13025 * sample + decoded_image = SUPIR_VAE.decode(sample.unsqueeze(0)) + out.append(decoded_image) + pbar.update(1) + + decoded_out= torch.cat(out, dim=0).float() + + if "original_size" in latents and latents["original_size"] is not None: + orig_H, orig_W = latents["original_size"] + if decoded_out.shape[2] != orig_H or decoded_out.shape[3] != orig_W: + print("Restoring original dimensions: ", orig_W,"x",orig_H) + decoded_out = F.interpolate(decoded_out, size=(orig_H, orig_W), mode="bicubic") + + decoded_out = torch.clip(decoded_out, 0, 1) + decoded_out = decoded_out.cpu().to(torch.float32).permute(0, 2, 3, 1) + + + return (decoded_out,) + +class SUPIR_first_stage: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "SUPIR_VAE": ("SUPIRVAE",), + "image": ("IMAGE",), + "use_tiled_vae": ("BOOLEAN", {"default": True}), + "encoder_tile_size": ("INT", {"default": 512, "min": 64, "max": 8192, "step": 64}), + "decoder_tile_size": ("INT", {"default": 512, "min": 64, "max": 8192, "step": 64}), + "encoder_dtype": ( + [ + 'bf16', + 'fp32', + 'auto' + ], { + "default": 'auto' + }), + } + } + + RETURN_TYPES = ("SUPIRVAE", "IMAGE", "LATENT",) + RETURN_NAMES = ("SUPIR_VAE", "denoised_image", "denoised_latents",) + FUNCTION = "process" + CATEGORY = "SUPIR" + DESCRIPTION = """ +SUPIR "first stage" processing. +Encodes and decodes the image using SUPIR's "denoise_encoder", purpose +is to fix compression artifacts and such, ends up blurring the image often +which is expected. Can be replaced with any other denoiser/blur or not used at all. +""" + + def process(self, SUPIR_VAE, image, encoder_dtype, use_tiled_vae, encoder_tile_size, decoder_tile_size): + device = mm.get_torch_device() + mm.unload_all_models() + if encoder_dtype == 'auto': + try: + + if mm.should_use_bf16(): + print("Encoder using bf16") + vae_dtype = 'bf16' + else: + print("Encoder using fp32") + vae_dtype = 'fp32' + except: + raise AttributeError("ComfyUI version too old, can't autodetect properly. Set your dtypes manually.") + else: + vae_dtype = encoder_dtype + print(f"Encoder using {vae_dtype}") + + dtype = convert_dtype(vae_dtype) + + if use_tiled_vae: + from .SUPIR.utils.tilevae import VAEHook + # Store the `original_forward` only if it hasn't been stored already + if not hasattr(SUPIR_VAE.encoder, 'original_forward'): + SUPIR_VAE.denoise_encoder.original_forward = SUPIR_VAE.denoise_encoder.forward + SUPIR_VAE.decoder.original_forward = SUPIR_VAE.decoder.forward + + SUPIR_VAE.denoise_encoder.forward = VAEHook( + SUPIR_VAE.denoise_encoder, encoder_tile_size, is_decoder=False, fast_decoder=False, + fast_encoder=False, color_fix=False, to_gpu=True) + + SUPIR_VAE.decoder.forward = VAEHook( + SUPIR_VAE.decoder, decoder_tile_size // 8, is_decoder=True, fast_decoder=False, + fast_encoder=False, color_fix=False, to_gpu=True) + else: + # Only assign `original_forward` back if it exists + if hasattr(SUPIR_VAE.denoise_encoder, 'original_forward'): + SUPIR_VAE.denoise_encoder.forward = SUPIR_VAE.denoise_encoder.original_forward + SUPIR_VAE.decoder.forward = SUPIR_VAE.decoder.original_forward + + image = image.permute(0, 3, 1, 2) + B, C, H, W = image.shape + downscale_ratio = 32 + orig_H, orig_W = H, W + if W % downscale_ratio != 0: + W = W - (W % downscale_ratio) + if H % downscale_ratio != 0: + H = H - (H % downscale_ratio) + if orig_H % downscale_ratio != 0 or orig_W % downscale_ratio != 0: + image = F.interpolate(image, size=(H, W), mode="bicubic") + resized_image = image.to(device) + + pbar = comfy.utils.ProgressBar(B) + out = [] + out_samples = [] + for img in resized_image: + + SUPIR_VAE.to(dtype).to(device) + + autocast_condition = (dtype != torch.float32) and not comfy.model_management.is_device_mps(device) + with torch.autocast(comfy.model_management.get_autocast_device(device), dtype=dtype) if autocast_condition else nullcontext(): + + h = SUPIR_VAE.denoise_encoder(img.unsqueeze(0)) + moments = SUPIR_VAE.quant_conv(h) + posterior = DiagonalGaussianDistribution(moments) + sample = posterior.sample() + decoded_images = SUPIR_VAE.decode(sample).float() + + out.append(decoded_images.cpu()) + out_samples.append(sample.cpu() * 0.13025) + pbar.update(1) + + + out_stacked = torch.cat(out, dim=0).to(torch.float32).permute(0, 2, 3, 1) + out_samples_stacked = torch.cat(out_samples, dim=0) + original_size = [orig_H, orig_W] + return (SUPIR_VAE, out_stacked, {"samples": out_samples_stacked, "original_size": original_size},) + +class SUPIR_sample: + + @classmethod + def INPUT_TYPES(s): + return {"required": { + "SUPIR_model": ("SUPIRMODEL",), + "latents": ("LATENT",), + "positive": ("SUPIR_cond_pos",), + "negative": ("SUPIR_cond_neg",), + "seed": ("INT", {"default": 123, "min": 0, "max": 0xffffffffffffffff, "step": 1}), + "steps": ("INT", {"default": 45, "min": 3, "max": 4096, "step": 1}), + "cfg_scale_start": ("FLOAT", {"default": 4.0, "min": 0.0, "max": 100.0, "step": 0.01}), + "cfg_scale_end": ("FLOAT", {"default": 4.0, "min": 0, "max": 100.0, "step": 0.01}), + "EDM_s_churn": ("INT", {"default": 5, "min": 0, "max": 40, "step": 1}), + "s_noise": ("FLOAT", {"default": 1.003, "min": 1.0, "max": 1.1, "step": 0.001}), + "DPMPP_eta": ("FLOAT", {"default": 1.0, "min": 0, "max": 10.0, "step": 0.01}), + "control_scale_start": ("FLOAT", {"default": 1.0, "min": 0, "max": 10.0, "step": 0.01}), + "control_scale_end": ("FLOAT", {"default": 1.0, "min": 0, "max": 10.0, "step": 0.01}), + "restore_cfg": ("FLOAT", {"default": -1.0, "min": -1.0, "max": 20.0, "step": 0.01}), + "keep_model_loaded": ("BOOLEAN", {"default": False}), + "sampler": ( + [ + 'RestoreDPMPP2MSampler', + 'RestoreEDMSampler', + 'TiledRestoreDPMPP2MSampler', + 'TiledRestoreEDMSampler', + ], { + "default": 'RestoreEDMSampler' + }), + }, + "optional": { + "sampler_tile_size": ("INT", {"default": 1024, "min": 64, "max": 4096, "step": 32}), + "sampler_tile_stride": ("INT", {"default": 512, "min": 32, "max": 2048, "step": 32}), + } + } + + RETURN_TYPES = ("LATENT",) + RETURN_NAMES = ("latent",) + FUNCTION = "sample" + CATEGORY = "SUPIR" + DESCRIPTION = """ +- **latent:** +Latent to sample from, when using SUPIR latent this is just for the noise shape, +it's actually not used otherwise here. Identical to feeding this comfy empty latent. +If fed anything else it's used as it is, no noise is added. +- **cfg:** +Linearly scaled CFG is always used, first step will use the cfg_scale_start value, +and that is interpolated to the cfg_scale_end value at last step. +To disable scaling set these values to be the same. +- **EDM_s_churn:** +controls the rate of adaptation of the diffusion process to changes in noise levels +over time. Has no effect with DPMPP samplers. +- **s_noise:** +This parameter directly controls the amount of noise added to the image at each +step of the diffusion process. +- **DPMPP_eta:** +Scaling factor that influences the diffusion process by adjusting how the denoising +process adapts to changes in noise levels over time. +No effect with EDM samplers. +- **control_scale:** +The strenght of the SUPIR control model, scales linearly from start to end. +Lower values allow more freedom from the input image. +- **restore_cfg:** +Controls the degree of restoration towards the original image during the diffusion +process. It allows for dome fine-tuning of the process. +- **samplers:** +EDM samplers need lots of steps but generally have better quality. +DPMPP samplers work well with lower steps, good for lightning models. +Tiled samplers enable tiled diffusion process, this is very slow but allows higher +resolutions to be used by saving VRAM. Tile size should be chosen so the image +is evenly tiled. Tile stride affects the overlap of the tiles. Check the +SUPIR Tiles -node for preview to understand how the image is tiled. + +""" + + def sample(self, SUPIR_model, latents, steps, seed, cfg_scale_end, EDM_s_churn, s_noise, positive, negative, + cfg_scale_start, control_scale_start, control_scale_end, restore_cfg, keep_model_loaded, DPMPP_eta, + sampler, sampler_tile_size=1024, sampler_tile_stride=512): + + torch.manual_seed(seed) + device = mm.get_torch_device() + mm.unload_all_models() + mm.soft_empty_cache() + + self.sampler_config = { + 'target': f'.sgm.modules.diffusionmodules.sampling.{sampler}', + 'params': { + 'num_steps': steps, + 'restore_cfg': restore_cfg, + 's_churn': EDM_s_churn, + 's_noise': s_noise, + 'discretization_config': { + 'target': '.sgm.modules.diffusionmodules.discretizer.LegacyDDPMDiscretization' + }, + 'guider_config': { + 'target': '.sgm.modules.diffusionmodules.guiders.LinearCFG', + 'params': { + 'scale': cfg_scale_start, + 'scale_min': cfg_scale_end + } + } + } + } + if 'Tiled' in sampler: + self.sampler_config['params']['tile_size'] = sampler_tile_size // 8 + self.sampler_config['params']['tile_stride'] = sampler_tile_stride // 8 + if 'DPMPP' in sampler: + self.sampler_config['params']['eta'] = DPMPP_eta + self.sampler_config['params']['restore_cfg'] = -1 + if not hasattr (self,'sampler') or self.sampler_config != self.current_sampler_config: + self.sampler = instantiate_from_config(self.sampler_config) + self.current_sampler_config = self.sampler_config + + print("sampler_config: ", self.sampler_config) + + SUPIR_model.denoiser.to(device) + SUPIR_model.model.diffusion_model.to(device) + SUPIR_model.model.control_model.to(device) + + use_linear_control_scale = control_scale_start != control_scale_end + + denoiser = lambda input, sigma, c, control_scale: SUPIR_model.denoiser(SUPIR_model.model, input, sigma, c, control_scale) + + original_size = positive['original_size'] + positive = positive['cond'] + negative = negative['uncond'] + samples = latents["samples"] + samples = samples.to(device) + #print("positives: ", len(positive)) + #print("negatives: ", len(negative)) + out = [] + pbar = comfy.utils.ProgressBar(samples.shape[0]) + for i, sample in enumerate(samples): + try: + if 'original_size' in latents: + print("Using random noise") + noised_z = torch.randn_like(sample.unsqueeze(0), device=samples.device) + else: + print("Using latent from input") + noised_z = torch.randn_like(sample.unsqueeze(0), device=samples.device) + noised_z += sample.unsqueeze(0) + if len(positive) != len(samples): + print("Tiled sampling") + _samples = self.sampler(denoiser, noised_z, cond=positive, uc=negative, x_center=sample.unsqueeze(0), control_scale=control_scale_end, + use_linear_control_scale=use_linear_control_scale, control_scale_start=control_scale_start) + else: + #print("positives[i]: ", len(positive[i])) + #print("negatives[i]: ", len(negative[i])) + _samples = self.sampler(denoiser, noised_z, cond=positive[i], uc=negative[i], x_center=sample.unsqueeze(0), control_scale=control_scale_end, + use_linear_control_scale=use_linear_control_scale, control_scale_start=control_scale_start) + + + except torch.cuda.OutOfMemoryError as e: + mm.free_memory(mm.get_total_memory(mm.get_torch_device()), mm.get_torch_device()) + SUPIR_model = None + mm.soft_empty_cache() + print("It's likely that too large of an image or batch_size for SUPIR was used," + " and it has devoured all of the memory it had reserved, you may need to restart ComfyUI. Make sure you are using tiled_vae, " + " you can also try using fp8 for reduced memory usage if your system supports it.") + raise e + out.append(_samples) + print("Sampled ", i+1, " of ", samples.shape[0]) + pbar.update(1) + + if not keep_model_loaded: + SUPIR_model.denoiser.to('cpu') + SUPIR_model.model.diffusion_model.to('cpu') + SUPIR_model.model.control_model.to('cpu') + mm.soft_empty_cache() + + if len(out[0].shape) == 4: + samples_out_stacked = torch.cat(out, dim=0) + else: + samples_out_stacked = torch.stack(out, dim=0) + + if original_size is None: + samples_out_stacked = samples_out_stacked / 0.13025 + + return ({"samples":samples_out_stacked, "original_size": original_size},) + +class SUPIR_conditioner: + # @classmethod + # def IS_CHANGED(s): + # return "" + @classmethod + def INPUT_TYPES(s): + return {"required": { + "SUPIR_model": ("SUPIRMODEL",), + "latents": ("LATENT",), + "positive_prompt": ("STRING", {"multiline": True, "default": "high quality, detailed", }), + "negative_prompt": ("STRING", {"multiline": True, "default": "bad quality, blurry, messy", }), + }, + "optional": { + "captions": ("STRING", {"forceInput": True, "multiline": False, "default": "", }), + } + } + + RETURN_TYPES = ("SUPIR_cond_pos", "SUPIR_cond_neg",) + RETURN_NAMES = ("positive", "negative",) + FUNCTION = "condition" + CATEGORY = "SUPIR" + DESCRIPTION = """ +Creates the conditioning for the sampler. +Caption input is optional, when it receives a single caption, it's added to the positive prompt. + +If a list of caption is given for single input image, the captions need to match the number of tiles, +refer to the SUPIR Tiles node. + +If a list of captions is given and it matches the incoming image batch, each image uses corresponding caption. +""" + + def condition(self, SUPIR_model, latents, positive_prompt, negative_prompt, captions=""): + + device = mm.get_torch_device() + mm.soft_empty_cache() + + if "original_size" in latents: + original_size = latents["original_size"] + samples = latents["samples"] + else: + original_size = None + samples = latents["samples"] * 0.13025 + + N, H, W, C = samples.shape + import copy + + if not isinstance(captions, list): + captions_list = [] + captions_list.append([captions]) + captions_list = captions_list * N + else: + captions_list = captions + + print("captions: ", captions_list) + + SUPIR_model.conditioner.to(device) + samples = samples.to(device) + + uc = [] + pbar = comfy.utils.ProgressBar(N) + autocast_condition = (SUPIR_model.model.dtype != torch.float32) and not comfy.model_management.is_device_mps(device) + with torch.autocast(comfy.model_management.get_autocast_device(device), dtype=SUPIR_model.model.dtype) if autocast_condition else nullcontext(): + if N != len(captions_list): #Tiled captioning + print("Tiled captioning") + c = [] + uc = [] + for i, caption in enumerate(captions_list): + cond = {} + cond['original_size_as_tuple'] = torch.tensor([[1024, 1024]]).to(device) + cond['crop_coords_top_left'] = torch.tensor([[0, 0]]).to(device) + cond['target_size_as_tuple'] = torch.tensor([[1024, 1024]]).to(device) + cond['aesthetic_score'] = torch.tensor([[9.0]]).to(device) + cond['control'] = samples[0].unsqueeze(0) + + uncond = copy.deepcopy(cond) + uncond['txt'] = [negative_prompt] + + cond['txt'] = [''.join([caption[0], positive_prompt])] + if i == 0: + _c, uc = SUPIR_model.conditioner.get_unconditional_conditioning(cond, uncond) + else: + _c, _ = SUPIR_model.conditioner.get_unconditional_conditioning(cond, None) + + c.append(_c) + pbar.update(1) + else: #batch captioning + print("Batch captioning") + c = [] + uc = [] + for i, sample in enumerate(samples): + + cond = {} + cond['original_size_as_tuple'] = torch.tensor([[1024, 1024]]).to(device) + cond['crop_coords_top_left'] = torch.tensor([[0, 0]]).to(device) + cond['target_size_as_tuple'] = torch.tensor([[1024, 1024]]).to(device) + cond['aesthetic_score'] = torch.tensor([[9.0]]).to(device) + cond['control'] = sample.unsqueeze(0) + + uncond = copy.deepcopy(cond) + uncond['txt'] = [negative_prompt] + cond['txt'] = [''.join([captions_list[i][0], positive_prompt])] + _c, _uc = SUPIR_model.conditioner.get_unconditional_conditioning(cond, uncond) + c.append(_c) + uc.append(_uc) + + pbar.update(1) + + + SUPIR_model.conditioner.to('cpu') + + if "original_size" in latents: + original_size = latents["original_size"] + else: + original_size = None + + return ({"cond": c, "original_size":original_size}, {"uncond": uc},) + +class SUPIR_model_loader: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "supir_model": (folder_paths.get_filename_list("checkpoints"),), + "sdxl_model": (folder_paths.get_filename_list("checkpoints"),), + "fp8_unet": ("BOOLEAN", {"default": False}), + "diffusion_dtype": ( + [ + 'fp16', + 'bf16', + 'fp32', + 'auto' + ], { + "default": 'auto' + }), + }, + } + + RETURN_TYPES = ("SUPIRMODEL", "SUPIRVAE") + RETURN_NAMES = ("SUPIR_model","SUPIR_VAE",) + FUNCTION = "process" + CATEGORY = "SUPIR" + DESCRIPTION = """ +Old loader, not recommended to be used. +Loads the SUPIR model and the selected SDXL model and merges them. +""" + + def process(self, supir_model, sdxl_model, diffusion_dtype, fp8_unet): + device = mm.get_torch_device() + mm.unload_all_models() + + SUPIR_MODEL_PATH = folder_paths.get_full_path("checkpoints", supir_model) + SDXL_MODEL_PATH = folder_paths.get_full_path("checkpoints", sdxl_model) + + config_path = os.path.join(script_directory, "options/SUPIR_v0.yaml") + clip_config_path = os.path.join(script_directory, "configs/clip_vit_config.json") + tokenizer_path = os.path.join(script_directory, "configs/tokenizer") + + custom_config = { + 'sdxl_model': sdxl_model, + 'diffusion_dtype': diffusion_dtype, + 'supir_model': supir_model, + 'fp8_unet': fp8_unet, + } + + if diffusion_dtype == 'auto': + try: + if mm.should_use_fp16(): + print("Diffusion using fp16") + dtype = torch.float16 + model_dtype = 'fp16' + elif mm.should_use_bf16(): + print("Diffusion using bf16") + dtype = torch.bfloat16 + model_dtype = 'bf16' + else: + print("Diffusion using fp32") + dtype = torch.float32 + model_dtype = 'fp32' + except: + raise AttributeError("ComfyUI version too old, can't autodetect properly. Set your dtypes manually.") + else: + print(f"Diffusion using {diffusion_dtype}") + dtype = convert_dtype(diffusion_dtype) + model_dtype = diffusion_dtype + + if not hasattr(self, "model") or self.model is None or self.current_config != custom_config: + self.current_config = custom_config + self.model = None + + mm.soft_empty_cache() + + config = OmegaConf.load(config_path) + + if mm.XFORMERS_IS_AVAILABLE: + print("Using XFORMERS") + config.model.params.control_stage_config.params.spatial_transformer_attn_type = "softmax-xformers" + config.model.params.network_config.params.spatial_transformer_attn_type = "softmax-xformers" + config.model.params.first_stage_config.params.ddconfig.attn_type = "vanilla-xformers" + + config.model.params.diffusion_dtype = model_dtype + config.model.target = ".SUPIR.models.SUPIR_model_v2.SUPIRModel" + pbar = comfy.utils.ProgressBar(5) + + self.model = instantiate_from_config(config.model).cpu() + self.model.model.dtype = dtype + pbar.update(1) + try: + print(f"Attempting to load SDXL model: [{SDXL_MODEL_PATH}]") + sdxl_state_dict = load_state_dict(SDXL_MODEL_PATH) + self.model.load_state_dict(sdxl_state_dict, strict=False) + if fp8_unet: + self.model.model.to(torch.float8_e4m3fn) + else: + self.model.model.to(dtype) + pbar.update(1) + except: + raise Exception("Failed to load SDXL model") + + #first clip model from SDXL checkpoint + try: + print("Loading first clip model from SDXL checkpoint") + + replace_prefix = {} + replace_prefix["conditioner.embedders.0.transformer."] = "" + + sd = comfy.utils.state_dict_prefix_replace(sdxl_state_dict, replace_prefix, filter_keys=False) + clip_text_config = CLIPTextConfig.from_pretrained(clip_config_path) + self.model.conditioner.embedders[0].tokenizer = CLIPTokenizer.from_pretrained(tokenizer_path) + self.model.conditioner.embedders[0].transformer = CLIPTextModel(clip_text_config) + self.model.conditioner.embedders[0].transformer.load_state_dict(sd, strict=False) + self.model.conditioner.embedders[0].eval() + self.model.conditioner.embedders[0].to(dtype) + for param in self.model.conditioner.embedders[0].parameters(): + param.requires_grad = False + pbar.update(1) + except: + raise Exception("Failed to load first clip model from SDXL checkpoint") + + del sdxl_state_dict + + #second clip model from SDXL checkpoint + try: + print("Loading second clip model from SDXL checkpoint") + replace_prefix2 = {} + replace_prefix2["conditioner.embedders.1.model."] = "" + sd = comfy.utils.state_dict_prefix_replace(sd, replace_prefix2, filter_keys=True) + clip_g = build_text_model_from_openai_state_dict(sd, device, cast_dtype=dtype) + self.model.conditioner.embedders[1].model = clip_g + self.model.conditioner.embedders[1].to(dtype) + pbar.update(1) + except: + raise Exception("Failed to load second clip model from SDXL checkpoint") + + del sd, clip_g + + try: + print(f'Attempting to load SUPIR model: [{SUPIR_MODEL_PATH}]') + supir_state_dict = load_state_dict(SUPIR_MODEL_PATH) + self.model.load_state_dict(supir_state_dict, strict=False) + if fp8_unet: + self.model.model.to(torch.float8_e4m3fn) + else: + self.model.model.to(dtype) + del supir_state_dict + pbar.update(1) + except: + raise Exception("Failed to load SUPIR model") + mm.soft_empty_cache() + + return (self.model, self.model.first_stage_model,) + +class SUPIR_model_loader_v2: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "model" :("MODEL",), + "clip": ("CLIP",), + "vae": ("VAE",), + "supir_model": (folder_paths.get_filename_list("checkpoints"),), + "fp8_unet": ("BOOLEAN", {"default": False}), + "diffusion_dtype": ( + [ + 'fp16', + 'bf16', + 'fp32', + 'auto' + ], { + "default": 'auto' + }), + }, + "optional": { + "high_vram": ("BOOLEAN", {"default": False}), + } + } + + RETURN_TYPES = ("SUPIRMODEL", "SUPIRVAE") + RETURN_NAMES = ("SUPIR_model","SUPIR_VAE",) + FUNCTION = "process" + CATEGORY = "SUPIR" + DESCRIPTION = """ +Loads the SUPIR model and merges it with the SDXL model. + +Diffusion type should be kept on auto, unless you have issues loading the model. +fp8_unet casts the unet weights to torch.float8_e4m3fn, which saves a lot of VRAM but has slight quality impact. +high_vram: uses Accelerate to load weights to GPU, slightly faster model loading. +""" + + def process(self, supir_model, diffusion_dtype, fp8_unet, model, clip, vae, high_vram=False): + if high_vram: + device = mm.get_torch_device() + else: + device = mm.unet_offload_device() + print("Loading weights to: ", device) + mm.unload_all_models() + + SUPIR_MODEL_PATH = folder_paths.get_full_path("checkpoints", supir_model) + + config_path = os.path.join(script_directory, "options/SUPIR_v0.yaml") + clip_config_path = os.path.join(script_directory, "configs/clip_vit_config.json") + tokenizer_path = os.path.join(script_directory, "configs/tokenizer") + + custom_config = { + 'diffusion_dtype': diffusion_dtype, + 'supir_model': supir_model, + 'fp8_unet': fp8_unet, + 'model': model, + "clip": clip, + "vae": vae + } + + if diffusion_dtype == 'auto': + try: + if mm.should_use_fp16(): + print("Diffusion using fp16") + dtype = torch.float16 + elif mm.should_use_bf16(): + print("Diffusion using bf16") + dtype = torch.bfloat16 + else: + print("Diffusion using fp32") + dtype = torch.float32 + except: + raise AttributeError("ComfyUI version too old, can't autodecet properly. Set your dtypes manually.") + else: + print(f"Diffusion using {diffusion_dtype}") + dtype = convert_dtype(diffusion_dtype) + + if not hasattr(self, "model") or self.model is None or self.current_config != custom_config: + self.current_config = custom_config + self.model = None + + mm.soft_empty_cache() + + config = OmegaConf.load(config_path) + if mm.XFORMERS_IS_AVAILABLE: + print("Using XFORMERS") + config.model.params.control_stage_config.params.spatial_transformer_attn_type = "softmax-xformers" + config.model.params.network_config.params.spatial_transformer_attn_type = "softmax-xformers" + config.model.params.first_stage_config.params.ddconfig.attn_type = "vanilla-xformers" + + config.model.target = ".SUPIR.models.SUPIR_model_v2.SUPIRModel" + pbar = comfy.utils.ProgressBar(5) + + #with (init_empty_weights() if is_accelerate_available else nullcontext()): + self.model = instantiate_from_config(config.model).cpu() + self.model.model.dtype = dtype + pbar.update(1) + try: + print(f"Attempting to load SDXL model from node inputs") + mm.load_model_gpu(model) + sdxl_state_dict = model.model.state_dict_for_saving(None, vae.get_sd(), None) + if is_accelerate_available: + for key in sdxl_state_dict: + set_module_tensor_to_device(self.model, key, device=device, dtype=dtype, value=sdxl_state_dict[key]) + else: + self.model.load_state_dict(sdxl_state_dict, strict=False) + if fp8_unet: + self.model.model.to(torch.float8_e4m3fn) + else: + self.model.model.to(dtype) + del sdxl_state_dict + pbar.update(1) + except: + raise Exception("Failed to load SDXL model") + gc.collect() + mm.soft_empty_cache() + #first clip model from SDXL checkpoint + try: + print("Loading first clip model from SDXL checkpoint") + clip_sd = None + clip_model = clip.load_model() + mm.load_model_gpu(clip_model) + clip_sd = clip.get_sd() + clip_sd = model.model.model_config.process_clip_state_dict_for_saving(clip_sd) + + replace_prefix = {} + replace_prefix["conditioner.embedders.0.transformer."] = "" + + clip_l_sd = comfy.utils.state_dict_prefix_replace(clip_sd, replace_prefix, filter_keys=True) + clip_text_config = CLIPTextConfig.from_pretrained(clip_config_path) + self.model.conditioner.embedders[0].tokenizer = CLIPTokenizer.from_pretrained(tokenizer_path) + with (init_empty_weights() if is_accelerate_available else nullcontext()): + self.model.conditioner.embedders[0].transformer = CLIPTextModel(clip_text_config) + if is_accelerate_available: + for key in clip_l_sd: + set_module_tensor_to_device(self.model.conditioner.embedders[0].transformer, key, device=device, dtype=dtype, value=clip_l_sd[key]) + else: + self.model.conditioner.embedders[0].transformer.load_state_dict(clip_l_sd, strict=False) + self.model.conditioner.embedders[0].eval() + for param in self.model.conditioner.embedders[0].parameters(): + param.requires_grad = False + self.model.conditioner.embedders[0].to(dtype) + del clip_l_sd + pbar.update(1) + except: + raise Exception("Failed to load first clip model from SDXL checkpoint") + gc.collect() + mm.soft_empty_cache() + #second clip model from SDXL checkpoint + try: + print("Loading second clip model from SDXL checkpoint") + replace_prefix2 = {} + replace_prefix2["conditioner.embedders.1.model."] = "" + clip_g_sd = comfy.utils.state_dict_prefix_replace(clip_sd, replace_prefix2, filter_keys=True) + clip_g = build_text_model_from_openai_state_dict(clip_g_sd, device, cast_dtype=dtype) + self.model.conditioner.embedders[1].model = clip_g + self.model.conditioner.embedders[1].model.to(dtype) + del clip_g_sd + pbar.update(1) + except: + raise Exception("Failed to load second clip model from SDXL checkpoint") + + try: + print(f'Attempting to load SUPIR model: [{SUPIR_MODEL_PATH}]') + supir_state_dict = load_state_dict(SUPIR_MODEL_PATH) + if "Q" not in supir_model or not is_accelerate_available: #I don't know why this doesn't work with the Q model. + for key in supir_state_dict: + set_module_tensor_to_device(self.model, key, device=device, dtype=dtype, value=supir_state_dict[key]) + else: + self.model.load_state_dict(supir_state_dict, strict=False) + if fp8_unet: + self.model.model.to(torch.float8_e4m3fn) + else: + self.model.model.to(dtype) + del supir_state_dict + pbar.update(1) + except: + raise Exception("Failed to load SUPIR model") + mm.soft_empty_cache() + + return (self.model, self.model.first_stage_model,) + +class SUPIR_model_loader_v2_clip: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "model" :("MODEL",), + "clip_l": ("CLIP",), + "clip_g": ("CLIP",), + "vae": ("VAE",), + "supir_model": (folder_paths.get_filename_list("checkpoints"),), + "fp8_unet": ("BOOLEAN", {"default": False}), + "diffusion_dtype": ( + [ + 'fp16', + 'bf16', + 'fp32', + 'auto' + ], { + "default": 'auto' + }), + }, + "optional": { + "high_vram": ("BOOLEAN", {"default": False}), + } + } + + RETURN_TYPES = ("SUPIRMODEL", "SUPIRVAE") + RETURN_NAMES = ("SUPIR_model","SUPIR_VAE",) + FUNCTION = "process" + CATEGORY = "SUPIR" + DESCRIPTION = """ +Loads the SUPIR model and merges it with the SDXL model. + +Diffusion type should be kept on auto, unless you have issues loading the model. +fp8_unet casts the unet weights to torch.float8_e4m3fn, which saves a lot of VRAM but has slight quality impact. +high_vram: uses Accelerate to load weights to GPU, slightly faster model loading. +""" + + def process(self, supir_model, diffusion_dtype, fp8_unet, model, clip_l, clip_g, vae, high_vram=False): + if high_vram: + device = mm.get_torch_device() + else: + device = mm.unet_offload_device() + print("Loading weights to: ", device) + mm.unload_all_models() + + SUPIR_MODEL_PATH = folder_paths.get_full_path("checkpoints", supir_model) + + config_path = os.path.join(script_directory, "options/SUPIR_v0.yaml") + clip_config_path = os.path.join(script_directory, "configs/clip_vit_config.json") + tokenizer_path = os.path.join(script_directory, "configs/tokenizer") + + custom_config = { + 'diffusion_dtype': diffusion_dtype, + 'supir_model': supir_model, + 'fp8_unet': fp8_unet, + 'model': model, + "clip": clip_l, + "clip_g": clip_g, + "vae": vae + } + + if diffusion_dtype == 'auto': + try: + if mm.should_use_fp16(): + print("Diffusion using fp16") + dtype = torch.float16 + elif mm.should_use_bf16(): + print("Diffusion using bf16") + dtype = torch.bfloat16 + else: + print("Diffusion using fp32") + dtype = torch.float32 + except: + raise AttributeError("ComfyUI version too old, can't autodecet properly. Set your dtypes manually.") + else: + print(f"Diffusion using {diffusion_dtype}") + dtype = convert_dtype(diffusion_dtype) + + if not hasattr(self, "model") or self.model is None or self.current_config != custom_config: + self.current_config = custom_config + self.model = None + + mm.soft_empty_cache() + + config = OmegaConf.load(config_path) + if mm.XFORMERS_IS_AVAILABLE: + print("Using XFORMERS") + config.model.params.control_stage_config.params.spatial_transformer_attn_type = "softmax-xformers" + config.model.params.network_config.params.spatial_transformer_attn_type = "softmax-xformers" + config.model.params.first_stage_config.params.ddconfig.attn_type = "vanilla-xformers" + + config.model.target = ".SUPIR.models.SUPIR_model_v2.SUPIRModel" + pbar = comfy.utils.ProgressBar(5) + + #with (init_empty_weights() if is_accelerate_available else nullcontext()): + self.model = instantiate_from_config(config.model).cpu() + self.model.model.dtype = dtype + pbar.update(1) + try: + print(f"Attempting to load SDXL model from node inputs") + mm.load_model_gpu(model) + sdxl_state_dict = model.model.state_dict_for_saving(None, vae.get_sd(), None) + if is_accelerate_available: + for key in sdxl_state_dict: + set_module_tensor_to_device(self.model, key, device=device, dtype=dtype, value=sdxl_state_dict[key]) + else: + self.model.load_state_dict(sdxl_state_dict, strict=False) + if fp8_unet: + self.model.model.to(torch.float8_e4m3fn) + else: + self.model.model.to(dtype) + del sdxl_state_dict + pbar.update(1) + except: + raise Exception("Failed to load SDXL model") + gc.collect() + mm.soft_empty_cache() + #first clip model from SDXL checkpoint + try: + print("Loading first clip model from SDXL checkpoint") + clip_l_sd = None + clip_l_model = clip_l.load_model() + mm.load_model_gpu(clip_l_model) + clip_l_sd = clip_l.get_sd() + clip_l_sd = model.model.model_config.process_clip_state_dict_for_saving(clip_l_sd) + + replace_prefix = {} + replace_prefix["conditioner.embedders.0.transformer."] = "" + + clip_l_sd = comfy.utils.state_dict_prefix_replace(clip_l_sd, replace_prefix, filter_keys=True) + clip_text_config = CLIPTextConfig.from_pretrained(clip_config_path) + self.model.conditioner.embedders[0].tokenizer = CLIPTokenizer.from_pretrained(tokenizer_path) + with (init_empty_weights() if is_accelerate_available else nullcontext()): + self.model.conditioner.embedders[0].transformer = CLIPTextModel(clip_text_config) + if is_accelerate_available: + for key in clip_l_sd: + set_module_tensor_to_device(self.model.conditioner.embedders[0].transformer, key, device=device, dtype=dtype, value=clip_l_sd[key]) + else: + self.model.conditioner.embedders[0].transformer.load_state_dict(clip_l_sd, strict=False) + self.model.conditioner.embedders[0].eval() + for param in self.model.conditioner.embedders[0].parameters(): + param.requires_grad = False + self.model.conditioner.embedders[0].to(dtype) + del clip_l_sd + pbar.update(1) + except: + raise Exception("Failed to load first clip model from SDXL checkpoint") + gc.collect() + mm.soft_empty_cache() + #second clip model from SDXL checkpoint + try: + print("Loading second clip model from SDXL checkpoint") + clip_g_sd = None + clip_g_model = clip_g.load_model() + mm.load_model_gpu(clip_g_model) + clip_g_sd = clip_g.get_sd() + clip_g_sd = model.model.model_config.process_clip_state_dict_for_saving(clip_g_sd) + + replace_prefix2 = {} + replace_prefix2["conditioner.embedders.1.model."] = "" + clip_g_sd = comfy.utils.state_dict_prefix_replace(clip_g_sd, replace_prefix2, filter_keys=True) + clip_g = build_text_model_from_openai_state_dict(clip_g_sd, device, cast_dtype=dtype) + self.model.conditioner.embedders[1].model = clip_g + self.model.conditioner.embedders[1].model.to(dtype) + del clip_g_sd + pbar.update(1) + except: + raise Exception("Failed to load second clip model from SDXL checkpoint") + + try: + print(f'Attempting to load SUPIR model: [{SUPIR_MODEL_PATH}]') + supir_state_dict = load_state_dict(SUPIR_MODEL_PATH) + if "Q" not in supir_model or not is_accelerate_available: #I don't know why this doesn't work with the Q model. + for key in supir_state_dict: + set_module_tensor_to_device(self.model, key, device=device, dtype=dtype, value=supir_state_dict[key]) + else: + self.model.load_state_dict(supir_state_dict, strict=False) + if fp8_unet: + self.model.model.to(torch.float8_e4m3fn) + else: + self.model.model.to(dtype) + del supir_state_dict + pbar.update(1) + except: + raise Exception("Failed to load SUPIR model") + mm.soft_empty_cache() + + return (self.model, self.model.first_stage_model,) + +class SUPIR_tiles: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "image": ("IMAGE",), + "tile_size": ("INT", {"default": 512, "min": 64, "max": 8192, "step": 64}), + "tile_stride": ("INT", {"default": 256, "min": 64, "max": 8192, "step": 64}), + + } + } + + RETURN_TYPES = ("IMAGE", "INT", "INT",) + RETURN_NAMES = ("image_tiles", "tile_size", "tile_stride",) + FUNCTION = "tile" + CATEGORY = "SUPIR" + DESCRIPTION = """ +Tiles the image with same function as the Tiled samplers use. +Useful for previewing the tiling and generating captions per tile (WIP feature) +""" + + def tile(self, image, tile_size, tile_stride): + + def _sliding_windows(h: int, w: int, tile_size: int, tile_stride: int): + hi_list = list(range(0, h - tile_size + 1, tile_stride)) + if (h - tile_size) % tile_stride != 0: + hi_list.append(h - tile_size) + + wi_list = list(range(0, w - tile_size + 1, tile_stride)) + if (w - tile_size) % tile_stride != 0: + wi_list.append(w - tile_size) + + coords = [] + for hi in hi_list: + for wi in wi_list: + coords.append((hi, hi + tile_size, wi, wi + tile_size)) + return coords + + image = image.permute(0, 3, 1, 2) + _, _, h, w = image.shape + + tiles_iterator = _sliding_windows(h, w, tile_size, tile_stride) + + tiles = [] + for hi, hi_end, wi, wi_end in tiles_iterator: + tile = image[:, :, hi:hi_end, wi:wi_end] + + tiles.append(tile) + out = torch.cat(tiles, dim=0).to(torch.float32).permute(0, 2, 3, 1) + print(out.shape) + print("len(tiles): ", len(tiles)) + + return (out, tile_size, tile_stride,) diff --git a/ComfyUI-SUPIR/options/SUPIR_v0.yaml b/ComfyUI-SUPIR/options/SUPIR_v0.yaml new file mode 100644 index 0000000000000000000000000000000000000000..3171a2b39527fd38e461c260840f36fa235bd998 --- /dev/null +++ b/ComfyUI-SUPIR/options/SUPIR_v0.yaml @@ -0,0 +1,156 @@ +model: + target: .SUPIR.models.SUPIR_model.SUPIRModel + params: + ae_dtype: bf16 + diffusion_dtype: fp16 + scale_factor: 0.13025 + disable_first_stage_autocast: True + network_wrapper: .sgm.modules.diffusionmodules.wrappers.ControlWrapper + + denoiser_config: + target: .sgm.modules.diffusionmodules.denoiser.DiscreteDenoiserWithControl + params: + num_idx: 1000 + weighting_config: + target: .sgm.modules.diffusionmodules.denoiser_weighting.EpsWeighting + scaling_config: + target: .sgm.modules.diffusionmodules.denoiser_scaling.EpsScaling + discretization_config: + target: .sgm.modules.diffusionmodules.discretizer.LegacyDDPMDiscretization + + control_stage_config: + target: .SUPIR.modules.SUPIR_v0.GLVControl + params: + adm_in_channels: 2816 + num_classes: sequential + use_checkpoint: True + in_channels: 4 + out_channels: 4 + model_channels: 320 + attention_resolutions: [4, 2] + num_res_blocks: 2 + channel_mult: [1, 2, 4] + num_head_channels: 64 + use_spatial_transformer: True + use_linear_in_transformer: True + transformer_depth: [1, 2, 10] # note: the first is unused (due to attn_res starting at 2) 32, 16, 8 --> 64, 32, 16 +# transformer_depth: [1, 1, 4] + context_dim: 2048 + spatial_transformer_attn_type: softmax + legacy: False + input_upscale: 1 + + network_config: + target: .SUPIR.modules.SUPIR_v0.LightGLVUNet + params: + mode: XL-base + project_type: ZeroSFT + project_channel_scale: 2 + adm_in_channels: 2816 + num_classes: sequential + use_checkpoint: True + in_channels: 4 + out_channels: 4 + model_channels: 320 + attention_resolutions: [4, 2] + num_res_blocks: 2 + channel_mult: [1, 2, 4] + num_head_channels: 64 + use_spatial_transformer: True + use_linear_in_transformer: True + transformer_depth: [1, 2, 10] # note: the first is unused (due to attn_res starting at 2) 32, 16, 8 --> 64, 32, 16 + context_dim: 2048 + spatial_transformer_attn_type: softmax + legacy: False + + conditioner_config: + target: .sgm.modules.GeneralConditionerWithControl + params: + emb_models: + # crossattn cond + - is_trainable: False + input_key: txt + target: .sgm.modules.encoders.modules.FrozenCLIPEmbedder + params: + layer: hidden + layer_idx: 11 + # crossattn and vector cond + - is_trainable: False + input_key: txt + target: .sgm.modules.encoders.modules.FrozenOpenCLIPEmbedder2 + params: + arch: ViT-bigG-14 + version: laion2b_s39b_b160k + freeze: True + layer: penultimate + always_return_pooled: True + legacy: False + # vector cond + - is_trainable: False + input_key: original_size_as_tuple + target: .sgm.modules.encoders.modules.ConcatTimestepEmbedderND + params: + outdim: 256 # multiplied by two + # vector cond + - is_trainable: False + input_key: crop_coords_top_left + target: .sgm.modules.encoders.modules.ConcatTimestepEmbedderND + params: + outdim: 256 # multiplied by two + # vector cond + - is_trainable: False + input_key: target_size_as_tuple + target: .sgm.modules.encoders.modules.ConcatTimestepEmbedderND + params: + outdim: 256 # multiplied by two + + first_stage_config: + target: .sgm.models.autoencoder.AutoencoderKLInferenceWrapper + params: + ckpt_path: ~ + embed_dim: 4 + monitor: val/rec_loss + ddconfig: + attn_type: vanilla + double_z: true + z_channels: 4 + resolution: 256 + in_channels: 3 + out_ch: 3 + ch: 128 + ch_mult: [ 1, 2, 4, 4 ] + num_res_blocks: 2 + attn_resolutions: [ ] + dropout: 0.0 + lossconfig: + target: torch.nn.Identity + + sampler_config: + target: .sgm.modules.diffusionmodules.sampling.RestoreEDMSampler + params: + num_steps: 100 + restore_cfg: 4.0 + s_churn: 0 + s_noise: 1.003 + discretization_config: + target: .sgm.modules.diffusionmodules.discretizer.LegacyDDPMDiscretization + guider_config: + target: .sgm.modules.diffusionmodules.guiders.LinearCFG + params: + scale: 7.5 + scale_min: 4.0 + + p_p: + 'Cinematic, High Contrast, highly detailed, taken using a Canon EOS R camera, + hyper detailed photo - realistic maximum detail, 32k, Color Grading, ultra HD, extreme meticulous detailing, + skin pore detailing, hyper sharpness, perfect without deformations.' + n_p: + 'painting, oil painting, illustration, drawing, art, sketch, oil painting, cartoon, CG Style, 3D render, + unreal engine, blurring, dirty, messy, worst quality, low quality, frames, watermark, signature, + jpeg artifacts, deformed, lowres, over-smooth' + +SDXL_CKPT: /opt/data/private/AIGC_pretrain/SDXL_cache/sd_xl_base_1.0_0.9vae.safetensors +SUPIR_CKPT_F: /opt/data/private/AIGC_pretrain/SUPIR_cache/SUPIR-v0F.ckpt +SUPIR_CKPT_Q: /opt/data/private/AIGC_pretrain/SUPIR_cache/SUPIR-v0Q.ckpt +SUPIR_CKPT: ~ + diff --git a/ComfyUI-SUPIR/options/SUPIR_v0_tiled.yaml b/ComfyUI-SUPIR/options/SUPIR_v0_tiled.yaml new file mode 100644 index 0000000000000000000000000000000000000000..d0c4b6c4af8d228804b4330c3dc060f9740eff20 --- /dev/null +++ b/ComfyUI-SUPIR/options/SUPIR_v0_tiled.yaml @@ -0,0 +1,158 @@ +model: + target: .SUPIR.models.SUPIR_model.SUPIRModel + params: + ae_dtype: bf16 + diffusion_dtype: fp16 + scale_factor: 0.13025 + disable_first_stage_autocast: True + network_wrapper: .sgm.modules.diffusionmodules.wrappers.ControlWrapper + + denoiser_config: + target: .sgm.modules.diffusionmodules.denoiser.DiscreteDenoiserWithControl + params: + num_idx: 1000 + weighting_config: + target: .sgm.modules.diffusionmodules.denoiser_weighting.EpsWeighting + scaling_config: + target: .sgm.modules.diffusionmodules.denoiser_scaling.EpsScaling + discretization_config: + target: .sgm.modules.diffusionmodules.discretizer.LegacyDDPMDiscretization + + control_stage_config: + target: .SUPIR.modules.SUPIR_v0.GLVControl + params: + adm_in_channels: 2816 + num_classes: sequential + use_checkpoint: True + in_channels: 4 + out_channels: 4 + model_channels: 320 + attention_resolutions: [4, 2] + num_res_blocks: 2 + channel_mult: [1, 2, 4] + num_head_channels: 64 + use_spatial_transformer: True + use_linear_in_transformer: True + transformer_depth: [1, 2, 10] # note: the first is unused (due to attn_res starting at 2) 32, 16, 8 --> 64, 32, 16 +# transformer_depth: [1, 1, 4] + context_dim: 2048 + spatial_transformer_attn_type: softmax + legacy: False + input_upscale: 1 + + network_config: + target: .SUPIR.modules.SUPIR_v0.LightGLVUNet + params: + mode: XL-base + project_type: ZeroSFT + project_channel_scale: 2 + adm_in_channels: 2816 + num_classes: sequential + use_checkpoint: True + in_channels: 4 + out_channels: 4 + model_channels: 320 + attention_resolutions: [4, 2] + num_res_blocks: 2 + channel_mult: [1, 2, 4] + num_head_channels: 64 + use_spatial_transformer: True + use_linear_in_transformer: True + transformer_depth: [1, 2, 10] # note: the first is unused (due to attn_res starting at 2) 32, 16, 8 --> 64, 32, 16 + context_dim: 2048 + spatial_transformer_attn_type: softmax + legacy: False + + conditioner_config: + target: .sgm.modules.GeneralConditionerWithControl + params: + emb_models: + # crossattn cond + - is_trainable: False + input_key: txt + target: .sgm.modules.encoders.modules.FrozenCLIPEmbedder + params: + layer: hidden + layer_idx: 11 + # crossattn and vector cond + - is_trainable: False + input_key: txt + target: .sgm.modules.encoders.modules.FrozenOpenCLIPEmbedder2 + params: + arch: ViT-bigG-14 + version: laion2b_s39b_b160k + freeze: True + layer: penultimate + always_return_pooled: True + legacy: False + # vector cond + - is_trainable: False + input_key: original_size_as_tuple + target: .sgm.modules.encoders.modules.ConcatTimestepEmbedderND + params: + outdim: 256 # multiplied by two + # vector cond + - is_trainable: False + input_key: crop_coords_top_left + target: .sgm.modules.encoders.modules.ConcatTimestepEmbedderND + params: + outdim: 256 # multiplied by two + # vector cond + - is_trainable: False + input_key: target_size_as_tuple + target: .sgm.modules.encoders.modules.ConcatTimestepEmbedderND + params: + outdim: 256 # multiplied by two + + first_stage_config: + target: .sgm.models.autoencoder.AutoencoderKLInferenceWrapper + params: + ckpt_path: ~ + embed_dim: 4 + monitor: val/rec_loss + ddconfig: + attn_type: vanilla + double_z: true + z_channels: 4 + resolution: 256 + in_channels: 3 + out_ch: 3 + ch: 128 + ch_mult: [ 1, 2, 4, 4 ] + num_res_blocks: 2 + attn_resolutions: [ ] + dropout: 0.0 + lossconfig: + target: torch.nn.Identity + + sampler_config: + target: .sgm.modules.diffusionmodules.sampling.TiledRestoreEDMSampler + params: + num_steps: 100 + restore_cfg: 4.0 + s_churn: 0 + s_noise: 1.003 + tile_size: 128 + tile_stride: 64 + discretization_config: + target: .sgm.modules.diffusionmodules.discretizer.LegacyDDPMDiscretization + guider_config: + target: .sgm.modules.diffusionmodules.guiders.LinearCFG + params: + scale: 7.5 + scale_min: 4.0 + + p_p: + 'Cinematic, High Contrast, highly detailed, taken using a Canon EOS R camera, + hyper detailed photo - realistic maximum detail, 32k, Color Grading, ultra HD, extreme meticulous detailing, + skin pore detailing, hyper sharpness, perfect without deformations.' + n_p: + 'painting, oil painting, illustration, drawing, art, sketch, oil painting, cartoon, CG Style, 3D render, + unreal engine, blurring, dirty, messy, worst quality, low quality, frames, watermark, signature, + jpeg artifacts, deformed, lowres, over-smooth' + +SDXL_CKPT: /opt/data/private/AIGC_pretrain/SDXL_cache/sd_xl_base_1.0_0.9vae.safetensors +SUPIR_CKPT_F: /opt/data/private/AIGC_pretrain/SUPIR_cache/SUPIR-v0F.ckpt +SUPIR_CKPT_Q: /opt/data/private/AIGC_pretrain/SUPIR_cache/SUPIR-v0Q.ckpt +SUPIR_CKPT: ~ + diff --git a/ComfyUI-SUPIR/pyproject.toml b/ComfyUI-SUPIR/pyproject.toml new file mode 100644 index 0000000000000000000000000000000000000000..a3ce15caa740954fe26ac38c7f94f5af4430d05c --- /dev/null +++ b/ComfyUI-SUPIR/pyproject.toml @@ -0,0 +1,14 @@ +[project] +name = "comfyui-supir" +description = "Wrapper nodes to use SUPIR upscaling process in ComfyUI" +version = "1.0.1" +license = "LICENSE" +dependencies = ["transformers>=4.28.1", "fsspec>=2023.4.0", "kornia>=0.6.9", "open-clip-torch>=2.24.0", "Pillow>=9.4.0", "pytorch-lightning>=2.2.1", "omegaconf", "accelerate"] + +[project.urls] +Repository = "https://github.com/kijai/ComfyUI-SUPIR" + +[tool.comfy] +PublisherId = "kijai" +DisplayName = "ComfyUI-SUPIR" +Icon = "" diff --git a/ComfyUI-SUPIR/requirements.txt b/ComfyUI-SUPIR/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..d6977ccf809a7e55ca3ffd36b53847e17ce035fd --- /dev/null +++ b/ComfyUI-SUPIR/requirements.txt @@ -0,0 +1,6 @@ +transformers>=4.28.1 +open-clip-torch>=2.24.0 +Pillow>=9.4.0 +pytorch-lightning>=2.2.1 +omegaconf +accelerate \ No newline at end of file diff --git a/ComfyUI-SUPIR/sgm/__init__.py b/ComfyUI-SUPIR/sgm/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..24bc84af8b1041de34b9816e0507cb1ac207bd13 --- /dev/null +++ b/ComfyUI-SUPIR/sgm/__init__.py @@ -0,0 +1,4 @@ +from .models import AutoencodingEngine, DiffusionEngine +from .util import get_configs_path, instantiate_from_config + +__version__ = "0.1.0" diff --git a/ComfyUI-SUPIR/sgm/lr_scheduler.py b/ComfyUI-SUPIR/sgm/lr_scheduler.py new file mode 100644 index 0000000000000000000000000000000000000000..b2f4d384c1fcaff0df13e0564450d3fa972ace42 --- /dev/null +++ b/ComfyUI-SUPIR/sgm/lr_scheduler.py @@ -0,0 +1,135 @@ +import numpy as np + + +class LambdaWarmUpCosineScheduler: + """ + note: use with a base_lr of 1.0 + """ + + def __init__( + self, + warm_up_steps, + lr_min, + lr_max, + lr_start, + max_decay_steps, + verbosity_interval=0, + ): + self.lr_warm_up_steps = warm_up_steps + self.lr_start = lr_start + self.lr_min = lr_min + self.lr_max = lr_max + self.lr_max_decay_steps = max_decay_steps + self.last_lr = 0.0 + self.verbosity_interval = verbosity_interval + + def schedule(self, n, **kwargs): + if self.verbosity_interval > 0: + if n % self.verbosity_interval == 0: + print(f"current step: {n}, recent lr-multiplier: {self.last_lr}") + if n < self.lr_warm_up_steps: + lr = ( + self.lr_max - self.lr_start + ) / self.lr_warm_up_steps * n + self.lr_start + self.last_lr = lr + return lr + else: + t = (n - self.lr_warm_up_steps) / ( + self.lr_max_decay_steps - self.lr_warm_up_steps + ) + t = min(t, 1.0) + lr = self.lr_min + 0.5 * (self.lr_max - self.lr_min) * ( + 1 + np.cos(t * np.pi) + ) + self.last_lr = lr + return lr + + def __call__(self, n, **kwargs): + return self.schedule(n, **kwargs) + + +class LambdaWarmUpCosineScheduler2: + """ + supports repeated iterations, configurable via lists + note: use with a base_lr of 1.0. + """ + + def __init__( + self, warm_up_steps, f_min, f_max, f_start, cycle_lengths, verbosity_interval=0 + ): + assert ( + len(warm_up_steps) + == len(f_min) + == len(f_max) + == len(f_start) + == len(cycle_lengths) + ) + self.lr_warm_up_steps = warm_up_steps + self.f_start = f_start + self.f_min = f_min + self.f_max = f_max + self.cycle_lengths = cycle_lengths + self.cum_cycles = np.cumsum([0] + list(self.cycle_lengths)) + self.last_f = 0.0 + self.verbosity_interval = verbosity_interval + + def find_in_interval(self, n): + interval = 0 + for cl in self.cum_cycles[1:]: + if n <= cl: + return interval + interval += 1 + + def schedule(self, n, **kwargs): + cycle = self.find_in_interval(n) + n = n - self.cum_cycles[cycle] + if self.verbosity_interval > 0: + if n % self.verbosity_interval == 0: + print( + f"current step: {n}, recent lr-multiplier: {self.last_f}, " + f"current cycle {cycle}" + ) + if n < self.lr_warm_up_steps[cycle]: + f = (self.f_max[cycle] - self.f_start[cycle]) / self.lr_warm_up_steps[ + cycle + ] * n + self.f_start[cycle] + self.last_f = f + return f + else: + t = (n - self.lr_warm_up_steps[cycle]) / ( + self.cycle_lengths[cycle] - self.lr_warm_up_steps[cycle] + ) + t = min(t, 1.0) + f = self.f_min[cycle] + 0.5 * (self.f_max[cycle] - self.f_min[cycle]) * ( + 1 + np.cos(t * np.pi) + ) + self.last_f = f + return f + + def __call__(self, n, **kwargs): + return self.schedule(n, **kwargs) + + +class LambdaLinearScheduler(LambdaWarmUpCosineScheduler2): + def schedule(self, n, **kwargs): + cycle = self.find_in_interval(n) + n = n - self.cum_cycles[cycle] + if self.verbosity_interval > 0: + if n % self.verbosity_interval == 0: + print( + f"current step: {n}, recent lr-multiplier: {self.last_f}, " + f"current cycle {cycle}" + ) + + if n < self.lr_warm_up_steps[cycle]: + f = (self.f_max[cycle] - self.f_start[cycle]) / self.lr_warm_up_steps[ + cycle + ] * n + self.f_start[cycle] + self.last_f = f + return f + else: + f = self.f_min[cycle] + (self.f_max[cycle] - self.f_min[cycle]) * ( + self.cycle_lengths[cycle] - n + ) / (self.cycle_lengths[cycle]) + self.last_f = f + return f diff --git a/ComfyUI-SUPIR/sgm/models/__init__.py b/ComfyUI-SUPIR/sgm/models/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..c410b3747afc208e4204c8f140170e0a7808eace --- /dev/null +++ b/ComfyUI-SUPIR/sgm/models/__init__.py @@ -0,0 +1,2 @@ +from .autoencoder import AutoencodingEngine +from .diffusion import DiffusionEngine diff --git a/ComfyUI-SUPIR/sgm/models/autoencoder.py b/ComfyUI-SUPIR/sgm/models/autoencoder.py new file mode 100644 index 0000000000000000000000000000000000000000..eaa2df77df7e865459f02716fab1880288039149 --- /dev/null +++ b/ComfyUI-SUPIR/sgm/models/autoencoder.py @@ -0,0 +1,337 @@ +import re +from abc import abstractmethod +from contextlib import contextmanager +from typing import Any, Dict, Tuple, Union + +import pytorch_lightning as pl +import torch +from omegaconf import ListConfig +from packaging import version +from safetensors.torch import load_file as load_safetensors + +from ..modules.diffusionmodules.model import Decoder, Encoder +from ..modules.distributions.distributions import DiagonalGaussianDistribution +from ..modules.ema import LitEma +from ..util import default, get_obj_from_str, instantiate_from_config + +import comfy.ops +ops = comfy.ops.manual_cast + +class AbstractAutoencoder(pl.LightningModule): + """ + This is the base class for all autoencoders, including image autoencoders, image autoencoders with discriminators, + unCLIP models, etc. Hence, it is fairly general, and specific features + (e.g. discriminator training, encoding, decoding) must be implemented in subclasses. + """ + + def __init__( + self, + ema_decay: Union[None, float] = None, + monitor: Union[None, str] = None, + input_key: str = "jpg", + ckpt_path: Union[None, str] = None, + ignore_keys: Union[Tuple, list, ListConfig] = (), + ): + super().__init__() + self.input_key = input_key + self.use_ema = ema_decay is not None + if monitor is not None: + self.monitor = monitor + + if self.use_ema: + self.model_ema = LitEma(self, decay=ema_decay) + print(f"Keeping EMAs of {len(list(self.model_ema.buffers()))}.") + + if ckpt_path is not None: + self.init_from_ckpt(ckpt_path, ignore_keys=ignore_keys) + + if version.parse(torch.__version__) >= version.parse("2.0.0"): + self.automatic_optimization = False + + def init_from_ckpt( + self, path: str, ignore_keys: Union[Tuple, list, ListConfig] = tuple() + ) -> None: + if path.endswith("ckpt"): + sd = torch.load(path, map_location="cpu")["state_dict"] + elif path.endswith("safetensors"): + sd = load_safetensors(path) + else: + raise NotImplementedError + + keys = list(sd.keys()) + for k in keys: + for ik in ignore_keys: + if re.match(ik, k): + print("Deleting key {} from state_dict.".format(k)) + del sd[k] + missing, unexpected = self.load_state_dict(sd, strict=False) + print( + f"Restored from {path} with {len(missing)} missing and {len(unexpected)} unexpected keys" + ) + if len(missing) > 0: + print(f"Missing Keys: {missing}") + if len(unexpected) > 0: + print(f"Unexpected Keys: {unexpected}") + + @abstractmethod + def get_input(self, batch) -> Any: + raise NotImplementedError() + + def on_train_batch_end(self, *args, **kwargs): + # for EMA computation + if self.use_ema: + self.model_ema(self) + + @contextmanager + def ema_scope(self, context=None): + if self.use_ema: + self.model_ema.store(self.parameters()) + self.model_ema.copy_to(self) + if context is not None: + print(f"{context}: Switched to EMA weights") + try: + yield None + finally: + if self.use_ema: + self.model_ema.restore(self.parameters()) + if context is not None: + print(f"{context}: Restored training weights") + + @abstractmethod + def encode(self, *args, **kwargs) -> torch.Tensor: + raise NotImplementedError("encode()-method of abstract base class called") + + @abstractmethod + def decode(self, *args, **kwargs) -> torch.Tensor: + raise NotImplementedError("decode()-method of abstract base class called") + + def instantiate_optimizer_from_config(self, params, lr, cfg): + print(f"loading >>> {cfg['target']} <<< optimizer from config") + return get_obj_from_str(cfg["target"])( + params, lr=lr, **cfg.get("params", dict()) + ) + + def configure_optimizers(self) -> Any: + raise NotImplementedError() + + +class AutoencodingEngine(AbstractAutoencoder): + """ + Base class for all image autoencoders that we train, like VQGAN or AutoencoderKL + (we also restore them explicitly as special cases for legacy reasons). + Regularizations such as KL or VQ are moved to the regularizer class. + """ + + def __init__( + self, + *args, + encoder_config: Dict, + decoder_config: Dict, + loss_config: Dict, + regularizer_config: Dict, + optimizer_config: Union[Dict, None] = None, + lr_g_factor: float = 1.0, + **kwargs, + ): + super().__init__(*args, **kwargs) + # todo: add options to freeze encoder/decoder + self.encoder = instantiate_from_config(encoder_config) + self.decoder = instantiate_from_config(decoder_config) + self.loss = instantiate_from_config(loss_config) + self.regularization = instantiate_from_config(regularizer_config) + self.optimizer_config = default( + optimizer_config, {"target": "torch.optim.Adam"} + ) + self.lr_g_factor = lr_g_factor + + def get_input(self, batch: Dict) -> torch.Tensor: + # assuming unified data format, dataloader returns a dict. + # image tensors should be scaled to -1 ... 1 and in channels-first format (e.g., bchw instead if bhwc) + return batch[self.input_key] + + def get_autoencoder_params(self) -> list: + params = ( + list(self.encoder.parameters()) + + list(self.decoder.parameters()) + + list(self.regularization.get_trainable_parameters()) + + list(self.loss.get_trainable_autoencoder_parameters()) + ) + return params + + def get_discriminator_params(self) -> list: + params = list(self.loss.get_trainable_parameters()) # e.g., discriminator + return params + + def get_last_layer(self): + return self.decoder.get_last_layer() + + def encode(self, x: Any, return_reg_log: bool = False) -> Any: + z = self.encoder(x) + z, reg_log = self.regularization(z) + if return_reg_log: + return z, reg_log + return z + + def decode(self, z: Any) -> torch.Tensor: + x = self.decoder(z) + return x + + def forward(self, x: Any) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor]: + z, reg_log = self.encode(x, return_reg_log=True) + dec = self.decode(z) + return z, dec, reg_log + + def training_step(self, batch, batch_idx, optimizer_idx) -> Any: + x = self.get_input(batch) + z, xrec, regularization_log = self(x) + + if optimizer_idx == 0: + # autoencode + aeloss, log_dict_ae = self.loss( + regularization_log, + x, + xrec, + optimizer_idx, + self.global_step, + last_layer=self.get_last_layer(), + split="train", + ) + + self.log_dict( + log_dict_ae, prog_bar=False, logger=True, on_step=True, on_epoch=True + ) + return aeloss + + if optimizer_idx == 1: + # discriminator + discloss, log_dict_disc = self.loss( + regularization_log, + x, + xrec, + optimizer_idx, + self.global_step, + last_layer=self.get_last_layer(), + split="train", + ) + self.log_dict( + log_dict_disc, prog_bar=False, logger=True, on_step=True, on_epoch=True + ) + return discloss + + def validation_step(self, batch, batch_idx) -> Dict: + log_dict = self._validation_step(batch, batch_idx) + with self.ema_scope(): + log_dict_ema = self._validation_step(batch, batch_idx, postfix="_ema") + log_dict.update(log_dict_ema) + return log_dict + + def _validation_step(self, batch, batch_idx, postfix="") -> Dict: + x = self.get_input(batch) + + z, xrec, regularization_log = self(x) + aeloss, log_dict_ae = self.loss( + regularization_log, + x, + xrec, + 0, + self.global_step, + last_layer=self.get_last_layer(), + split="val" + postfix, + ) + + discloss, log_dict_disc = self.loss( + regularization_log, + x, + xrec, + 1, + self.global_step, + last_layer=self.get_last_layer(), + split="val" + postfix, + ) + self.log(f"val{postfix}/rec_loss", log_dict_ae[f"val{postfix}/rec_loss"]) + log_dict_ae.update(log_dict_disc) + self.log_dict(log_dict_ae) + return log_dict_ae + + def configure_optimizers(self) -> Any: + ae_params = self.get_autoencoder_params() + disc_params = self.get_discriminator_params() + + opt_ae = self.instantiate_optimizer_from_config( + ae_params, + default(self.lr_g_factor, 1.0) * self.learning_rate, + self.optimizer_config, + ) + opt_disc = self.instantiate_optimizer_from_config( + disc_params, self.learning_rate, self.optimizer_config + ) + + return [opt_ae, opt_disc], [] + + @torch.no_grad() + def log_images(self, batch: Dict, **kwargs) -> Dict: + log = dict() + x = self.get_input(batch) + _, xrec, _ = self(x) + log["inputs"] = x + log["reconstructions"] = xrec + with self.ema_scope(): + _, xrec_ema, _ = self(x) + log["reconstructions_ema"] = xrec_ema + return log + + +class AutoencoderKL(AutoencodingEngine): + def __init__(self, embed_dim: int, **kwargs): + ddconfig = kwargs.pop("ddconfig") + ckpt_path = kwargs.pop("ckpt_path", None) + ignore_keys = kwargs.pop("ignore_keys", ()) + super().__init__( + encoder_config={"target": "torch.nn.Identity"}, + decoder_config={"target": "torch.nn.Identity"}, + regularizer_config={"target": "torch.nn.Identity"}, + loss_config=kwargs.pop("lossconfig"), + **kwargs, + ) + assert ddconfig["double_z"] + self.encoder = Encoder(**ddconfig) + self.decoder = Decoder(**ddconfig) + self.quant_conv = ops.Conv2d(2 * ddconfig["z_channels"], 2 * embed_dim, 1) + self.post_quant_conv = ops.Conv2d(embed_dim, ddconfig["z_channels"], 1) + self.embed_dim = embed_dim + + if ckpt_path is not None: + self.init_from_ckpt(ckpt_path, ignore_keys=ignore_keys) + + def encode(self, x): + assert ( + not self.training + ), f"{self.__class__.__name__} only supports inference currently" + h = self.encoder(x) + moments = self.quant_conv(h) + posterior = DiagonalGaussianDistribution(moments) + return posterior + + def decode(self, z, **decoder_kwargs): + z = self.post_quant_conv(z) + dec = self.decoder(z, **decoder_kwargs) + return dec + + +class AutoencoderKLInferenceWrapper(AutoencoderKL): + def encode(self, x): + return super().encode(x).sample() + + +class IdentityFirstStage(AbstractAutoencoder): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + def get_input(self, x: Any) -> Any: + return x + + def encode(self, x: Any, *args, **kwargs) -> Any: + return x + + def decode(self, x: Any, *args, **kwargs) -> Any: + return x diff --git a/ComfyUI-SUPIR/sgm/models/diffusion.py b/ComfyUI-SUPIR/sgm/models/diffusion.py new file mode 100644 index 0000000000000000000000000000000000000000..2e23d36618d824a04560c4b40de3805c94f309fd --- /dev/null +++ b/ComfyUI-SUPIR/sgm/models/diffusion.py @@ -0,0 +1,322 @@ +from contextlib import contextmanager +from typing import Any, Dict, List, Tuple, Union + +import pytorch_lightning as pl +import torch +from omegaconf import ListConfig, OmegaConf +from safetensors.torch import load_file as load_safetensors +from torch.optim.lr_scheduler import LambdaLR + +from ..modules import UNCONDITIONAL_CONFIG +from ..modules.diffusionmodules.wrappers import OPENAIUNETWRAPPER +from ..modules.ema import LitEma +from ..util import ( + default, + disabled_train, + get_obj_from_str, + instantiate_from_config, + log_txt_as_img, +) +import comfy.model_management + +device = comfy.model_management.get_torch_device() + +class DiffusionEngine(pl.LightningModule): + def __init__( + self, + network_config, + denoiser_config, + first_stage_config, + conditioner_config: Union[None, Dict, ListConfig, OmegaConf] = None, + sampler_config: Union[None, Dict, ListConfig, OmegaConf] = None, + optimizer_config: Union[None, Dict, ListConfig, OmegaConf] = None, + scheduler_config: Union[None, Dict, ListConfig, OmegaConf] = None, + loss_fn_config: Union[None, Dict, ListConfig, OmegaConf] = None, + network_wrapper: Union[None, str] = None, + ckpt_path: Union[None, str] = None, + use_ema: bool = False, + ema_decay_rate: float = 0.9999, + scale_factor: float = 1.0, + disable_first_stage_autocast=False, + input_key: str = "jpg", + log_keys: Union[List, None] = None, + no_cond_log: bool = False, + compile_model: bool = False, + ): + super().__init__() + self.log_keys = log_keys + self.input_key = input_key + self.optimizer_config = default( + optimizer_config, {"target": "torch.optim.AdamW"} + ) + model = instantiate_from_config(network_config) + self.model = get_obj_from_str(default(network_wrapper, OPENAIUNETWRAPPER))( + model, compile_model=compile_model + ) + + self.denoiser = instantiate_from_config(denoiser_config) + self.sampler = ( + instantiate_from_config(sampler_config) + if sampler_config is not None + else None + ) + self.conditioner = instantiate_from_config( + default(conditioner_config, UNCONDITIONAL_CONFIG) + ) + self.scheduler_config = scheduler_config + self._init_first_stage(first_stage_config) + + self.loss_fn = ( + instantiate_from_config(loss_fn_config) + if loss_fn_config is not None + else None + ) + + self.use_ema = use_ema + if self.use_ema: + self.model_ema = LitEma(self.model, decay=ema_decay_rate) + print(f"Keeping EMAs of {len(list(self.model_ema.buffers()))}.") + + self.scale_factor = scale_factor + self.disable_first_stage_autocast = disable_first_stage_autocast + self.no_cond_log = no_cond_log + + if ckpt_path is not None: + self.init_from_ckpt(ckpt_path) + + def init_from_ckpt( + self, + path: str, + ) -> None: + if path.endswith("ckpt"): + sd = torch.load(path, map_location="cpu")["state_dict"] + elif path.endswith("safetensors"): + sd = load_safetensors(path) + else: + raise NotImplementedError + + missing, unexpected = self.load_state_dict(sd, strict=False) + print( + f"Restored from {path} with {len(missing)} missing and {len(unexpected)} unexpected keys" + ) + if len(missing) > 0: + print(f"Missing Keys: {missing}") + if len(unexpected) > 0: + print(f"Unexpected Keys: {unexpected}") + + def _init_first_stage(self, config): + model = instantiate_from_config(config).eval() + model.train = disabled_train + for param in model.parameters(): + param.requires_grad = False + self.first_stage_model = model + + def get_input(self, batch): + # assuming unified data format, dataloader returns a dict. + # image tensors should be scaled to -1 ... 1 and in bchw format + return batch[self.input_key] + + @torch.no_grad() + def decode_first_stage(self, z): + z = 1.0 / self.scale_factor * z + with torch.autocast(device, enabled=not self.disable_first_stage_autocast): + out = self.first_stage_model.decode(z) + return out + + @torch.no_grad() + def encode_first_stage(self, x): + with torch.autocast(device, enabled=not self.disable_first_stage_autocast): + z = self.first_stage_model.encode(x) + z = self.scale_factor * z + return z + + def forward(self, x, batch): + loss = self.loss_fn(self.model, self.denoiser, self.conditioner, x, batch) + loss_mean = loss.mean() + loss_dict = {"loss": loss_mean} + return loss_mean, loss_dict + + def shared_step(self, batch: Dict) -> Any: + x = self.get_input(batch) + x = self.encode_first_stage(x) + batch["global_step"] = self.global_step + loss, loss_dict = self(x, batch) + return loss, loss_dict + + def training_step(self, batch, batch_idx): + loss, loss_dict = self.shared_step(batch) + + self.log_dict( + loss_dict, prog_bar=True, logger=True, on_step=True, on_epoch=False + ) + + self.log( + "global_step", + self.global_step, + prog_bar=True, + logger=True, + on_step=True, + on_epoch=False, + ) + + # if self.scheduler_config is not None: + lr = self.optimizers().param_groups[0]["lr"] + self.log( + "lr_abs", lr, prog_bar=True, logger=True, on_step=True, on_epoch=False + ) + + return loss + + def on_train_start(self, *args, **kwargs): + if self.sampler is None or self.loss_fn is None: + raise ValueError("Sampler and loss function need to be set for training.") + + def on_train_batch_end(self, *args, **kwargs): + if self.use_ema: + self.model_ema(self.model) + + @contextmanager + def ema_scope(self, context=None): + if self.use_ema: + self.model_ema.store(self.model.parameters()) + self.model_ema.copy_to(self.model) + if context is not None: + print(f"{context}: Switched to EMA weights") + try: + yield None + finally: + if self.use_ema: + self.model_ema.restore(self.model.parameters()) + if context is not None: + print(f"{context}: Restored training weights") + + def instantiate_optimizer_from_config(self, params, lr, cfg): + return get_obj_from_str(cfg["target"])( + params, lr=lr, **cfg.get("params", dict()) + ) + + def configure_optimizers(self): + lr = self.learning_rate + params = list(self.model.parameters()) + for embedder in self.conditioner.embedders: + if embedder.is_trainable: + params = params + list(embedder.parameters()) + opt = self.instantiate_optimizer_from_config(params, lr, self.optimizer_config) + if self.scheduler_config is not None: + scheduler = instantiate_from_config(self.scheduler_config) + print("Setting up LambdaLR scheduler...") + scheduler = [ + { + "scheduler": LambdaLR(opt, lr_lambda=scheduler.schedule), + "interval": "step", + "frequency": 1, + } + ] + return [opt], scheduler + return opt + + @torch.no_grad() + def sample( + self, + cond: Dict, + uc: Union[Dict, None] = None, + batch_size: int = 16, + shape: Union[None, Tuple, List] = None, + **kwargs, + ): + randn = torch.randn(batch_size, *shape).to(self.device) + + denoiser = lambda input, sigma, c: self.denoiser( + self.model, input, sigma, c, **kwargs + ) + samples = self.sampler(denoiser, randn, cond, uc=uc) + return samples + + @torch.no_grad() + def log_conditionings(self, batch: Dict, n: int) -> Dict: + """ + Defines heuristics to log different conditionings. + These can be lists of strings (text-to-image), tensors, ints, ... + """ + image_h, image_w = batch[self.input_key].shape[2:] + log = dict() + + for embedder in self.conditioner.embedders: + if ( + (self.log_keys is None) or (embedder.input_key in self.log_keys) + ) and not self.no_cond_log: + x = batch[embedder.input_key][:n] + if isinstance(x, torch.Tensor): + if x.dim() == 1: + # class-conditional, convert integer to string + x = [str(x[i].item()) for i in range(x.shape[0])] + xc = log_txt_as_img((image_h, image_w), x, size=image_h // 4) + elif x.dim() == 2: + # size and crop cond and the like + x = [ + "x".join([str(xx) for xx in x[i].tolist()]) + for i in range(x.shape[0]) + ] + xc = log_txt_as_img((image_h, image_w), x, size=image_h // 20) + else: + raise NotImplementedError() + elif isinstance(x, (List, ListConfig)): + if isinstance(x[0], str): + # strings + xc = log_txt_as_img((image_h, image_w), x, size=image_h // 20) + else: + raise NotImplementedError() + else: + raise NotImplementedError() + log[embedder.input_key] = xc + return log + + @torch.no_grad() + def log_images( + self, + batch: Dict, + N: int = 8, + sample: bool = True, + ucg_keys: List[str] = None, + **kwargs, + ) -> Dict: + conditioner_input_keys = [e.input_key for e in self.conditioner.embedders] + if ucg_keys: + assert all(map(lambda x: x in conditioner_input_keys, ucg_keys)), ( + "Each defined ucg key for sampling must be in the provided conditioner input keys," + f"but we have {ucg_keys} vs. {conditioner_input_keys}" + ) + else: + ucg_keys = conditioner_input_keys + log = dict() + + x = self.get_input(batch) + + c, uc = self.conditioner.get_unconditional_conditioning( + batch, + force_uc_zero_embeddings=ucg_keys + if len(self.conditioner.embedders) > 0 + else [], + ) + + sampling_kwargs = {} + + N = min(x.shape[0], N) + x = x.to(self.device)[:N] + log["inputs"] = x + z = self.encode_first_stage(x) + log["reconstructions"] = self.decode_first_stage(z) + log.update(self.log_conditionings(batch, N)) + + for k in c: + if isinstance(c[k], torch.Tensor): + c[k], uc[k] = map(lambda y: y[k][:N].to(self.device), (c, uc)) + + if sample: + with self.ema_scope("Plotting"): + samples = self.sample( + c, shape=z.shape[1:], uc=uc, batch_size=N, **sampling_kwargs + ) + samples = self.decode_first_stage(samples) + log["samples"] = samples + return log diff --git a/ComfyUI-SUPIR/sgm/modules/__init__.py b/ComfyUI-SUPIR/sgm/modules/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..56508b459cd911dc3d5ac0b216635f626abfa8a4 --- /dev/null +++ b/ComfyUI-SUPIR/sgm/modules/__init__.py @@ -0,0 +1,8 @@ +from .encoders.modules import GeneralConditioner +from .encoders.modules import GeneralConditionerWithControl +from .encoders.modules import PreparedConditioner + +UNCONDITIONAL_CONFIG = { + "target": ".sgm.modules.GeneralConditioner", + "params": {"emb_models": []}, +} diff --git a/ComfyUI-SUPIR/sgm/modules/attention.py b/ComfyUI-SUPIR/sgm/modules/attention.py new file mode 100644 index 0000000000000000000000000000000000000000..bb846287e6395e50cbdd6ab653715795059cd34b --- /dev/null +++ b/ComfyUI-SUPIR/sgm/modules/attention.py @@ -0,0 +1,638 @@ +import math +from inspect import isfunction +from typing import Any, Optional + +import torch +import torch.nn.functional as F +# from einops._torch_specific import allow_ops_in_compiled_graph +# allow_ops_in_compiled_graph() +from einops import rearrange, repeat +from packaging import version +from torch import nn + +import comfy.ops +ops = comfy.ops.manual_cast + +if version.parse(torch.__version__) >= version.parse("2.0.0"): + SDP_IS_AVAILABLE = True + from torch.backends.cuda import SDPBackend, sdp_kernel + + BACKEND_MAP = { + SDPBackend.MATH: { + "enable_math": True, + "enable_flash": False, + "enable_mem_efficient": False, + }, + SDPBackend.FLASH_ATTENTION: { + "enable_math": False, + "enable_flash": True, + "enable_mem_efficient": False, + }, + SDPBackend.EFFICIENT_ATTENTION: { + "enable_math": False, + "enable_flash": False, + "enable_mem_efficient": True, + }, + None: {"enable_math": True, "enable_flash": True, "enable_mem_efficient": True}, + } +else: + from contextlib import nullcontext + + SDP_IS_AVAILABLE = False + sdp_kernel = nullcontext + BACKEND_MAP = {} + print( + f"No SDP backend available, likely because you are running in pytorch versions < 2.0. In fact, " + f"you are using PyTorch {torch.__version__}. You might want to consider upgrading." + ) + +try: + import xformers + import xformers.ops + + XFORMERS_IS_AVAILABLE = True +except: + XFORMERS_IS_AVAILABLE = False + print("no module 'xformers'. Processing without...") + +from .diffusionmodules.util import checkpoint + + +def exists(val): + return val is not None + + +def uniq(arr): + return {el: True for el in arr}.keys() + + +def default(val, d): + if exists(val): + return val + return d() if isfunction(d) else d + + +def max_neg_value(t): + return -torch.finfo(t.dtype).max + + +def init_(tensor): + dim = tensor.shape[-1] + std = 1 / math.sqrt(dim) + tensor.uniform_(-std, std) + return tensor + + +# feedforward +class GEGLU(nn.Module): + def __init__(self, dim_in, dim_out): + super().__init__() + self.proj = ops.Linear(dim_in, dim_out * 2) + + def forward(self, x): + x, gate = self.proj(x).chunk(2, dim=-1) + return x * F.gelu(gate) + + +class FeedForward(nn.Module): + def __init__(self, dim, dim_out=None, mult=4, glu=False, dropout=0.0): + super().__init__() + inner_dim = int(dim * mult) + dim_out = default(dim_out, dim) + project_in = ( + nn.Sequential(ops.Linear(dim, inner_dim), nn.GELU()) + if not glu + else GEGLU(dim, inner_dim) + ) + + self.net = nn.Sequential( + project_in, nn.Dropout(dropout), ops.Linear(inner_dim, dim_out) + ) + + def forward(self, x): + return self.net(x) + + +def zero_module(module): + """ + Zero out the parameters of a module and return it. + """ + for p in module.parameters(): + p.detach().zero_() + return module + + +def Normalize(in_channels): + return ops.GroupNorm( + num_groups=32, num_channels=in_channels, eps=1e-6, affine=True + ) + + +class LinearAttention(nn.Module): + def __init__(self, dim, heads=4, dim_head=32): + super().__init__() + self.heads = heads + hidden_dim = dim_head * heads + self.to_qkv = ops.Conv2d(dim, hidden_dim * 3, 1, bias=False) + self.to_out = ops.Conv2d(hidden_dim, dim, 1) + + def forward(self, x): + b, c, h, w = x.shape + qkv = self.to_qkv(x) + q, k, v = rearrange( + qkv, "b (qkv heads c) h w -> qkv b heads c (h w)", heads=self.heads, qkv=3 + ) + k = k.softmax(dim=-1) + context = torch.einsum("bhdn,bhen->bhde", k, v) + out = torch.einsum("bhde,bhdn->bhen", context, q) + out = rearrange( + out, "b heads c (h w) -> b (heads c) h w", heads=self.heads, h=h, w=w + ) + return self.to_out(out) + + +class SpatialSelfAttention(nn.Module): + def __init__(self, in_channels): + super().__init__() + self.in_channels = in_channels + + self.norm = Normalize(in_channels) + self.q = ops.Conv2d( + in_channels, in_channels, kernel_size=1, stride=1, padding=0 + ) + self.k = ops.Conv2d( + in_channels, in_channels, kernel_size=1, stride=1, padding=0 + ) + self.v = ops.Conv2d( + in_channels, in_channels, kernel_size=1, stride=1, padding=0 + ) + self.proj_out = ops.Conv2d( + in_channels, in_channels, kernel_size=1, stride=1, padding=0 + ) + + def forward(self, x): + h_ = x + h_ = self.norm(h_) + q = self.q(h_) + k = self.k(h_) + v = self.v(h_) + + # compute attention + b, c, h, w = q.shape + q = rearrange(q, "b c h w -> b (h w) c") + k = rearrange(k, "b c h w -> b c (h w)") + w_ = torch.einsum("bij,bjk->bik", q, k) + + w_ = w_ * (int(c) ** (-0.5)) + w_ = torch.nn.functional.softmax(w_, dim=2) + + # attend to values + v = rearrange(v, "b c h w -> b c (h w)") + w_ = rearrange(w_, "b i j -> b j i") + h_ = torch.einsum("bij,bjk->bik", v, w_) + h_ = rearrange(h_, "b c (h w) -> b c h w", h=h) + h_ = self.proj_out(h_) + + return x + h_ + + +class CrossAttention(nn.Module): + def __init__( + self, + query_dim, + context_dim=None, + heads=8, + dim_head=64, + dropout=0.0, + backend=None, + ): + super().__init__() + inner_dim = dim_head * heads + context_dim = default(context_dim, query_dim) + + self.scale = dim_head**-0.5 + self.heads = heads + + self.to_q = ops.Linear(query_dim, inner_dim, bias=False) + self.to_k = ops.Linear(context_dim, inner_dim, bias=False) + self.to_v = ops.Linear(context_dim, inner_dim, bias=False) + + self.to_out = nn.Sequential( + ops.Linear(inner_dim, query_dim), nn.Dropout(dropout) + ) + self.backend = backend + + def forward( + self, + x, + context=None, + mask=None, + additional_tokens=None, + n_times_crossframe_attn_in_self=0, + ): + h = self.heads + + if additional_tokens is not None: + # get the number of masked tokens at the beginning of the output sequence + n_tokens_to_mask = additional_tokens.shape[1] + # add additional token + x = torch.cat([additional_tokens, x], dim=1) + + q = self.to_q(x) + context = default(context, x) + k = self.to_k(context) + v = self.to_v(context) + + if n_times_crossframe_attn_in_self: + # reprogramming cross-frame attention as in https://arxiv.org/abs/2303.13439 + assert x.shape[0] % n_times_crossframe_attn_in_self == 0 + n_cp = x.shape[0] // n_times_crossframe_attn_in_self + k = repeat( + k[::n_times_crossframe_attn_in_self], "b ... -> (b n) ...", n=n_cp + ) + v = repeat( + v[::n_times_crossframe_attn_in_self], "b ... -> (b n) ...", n=n_cp + ) + + q, k, v = map(lambda t: rearrange(t, "b n (h d) -> b h n d", h=h), (q, k, v)) + + ## old + """ + sim = einsum('b i d, b j d -> b i j', q, k) * self.scale + del q, k + + if exists(mask): + mask = rearrange(mask, 'b ... -> b (...)') + max_neg_value = -torch.finfo(sim.dtype).max + mask = repeat(mask, 'b j -> (b h) () j', h=h) + sim.masked_fill_(~mask, max_neg_value) + + # attention, what we cannot get enough of + sim = sim.softmax(dim=-1) + + out = einsum('b i j, b j d -> b i d', sim, v) + """ + ## new + with sdp_kernel(**BACKEND_MAP[self.backend]): + # print("dispatching into backend", self.backend, "q/k/v shape: ", q.shape, k.shape, v.shape) + out = F.scaled_dot_product_attention( + q, k, v, attn_mask=mask + ) # scale is dim_head ** -0.5 per default + + del q, k, v + out = rearrange(out, "b h n d -> b n (h d)", h=h) + + if additional_tokens is not None: + # remove additional token + out = out[:, n_tokens_to_mask:] + return self.to_out(out) + + +class MemoryEfficientCrossAttention(nn.Module): + # https://github.com/MatthieuTPHR/diffusers/blob/d80b531ff8060ec1ea982b65a1b8df70f73aa67c/src/diffusers/models/attention.py#L223 + def __init__( + self, query_dim, context_dim=None, heads=8, dim_head=64, dropout=0.0, **kwargs + ): + super().__init__() + #print( + # f"Setting up {self.__class__.__name__}. Query dim is {query_dim}, context_dim is {context_dim} and using " + # f"{heads} heads with a dimension of {dim_head}." + #) + inner_dim = dim_head * heads + context_dim = default(context_dim, query_dim) + + self.heads = heads + self.dim_head = dim_head + + self.to_q = ops.Linear(query_dim, inner_dim, bias=False) + self.to_k = ops.Linear(context_dim, inner_dim, bias=False) + self.to_v = ops.Linear(context_dim, inner_dim, bias=False) + + self.to_out = nn.Sequential( + ops.Linear(inner_dim, query_dim), nn.Dropout(dropout) + ) + self.attention_op: Optional[Any] = None + + def forward( + self, + x, + context=None, + mask=None, + additional_tokens=None, + n_times_crossframe_attn_in_self=0, + ): + if additional_tokens is not None: + # get the number of masked tokens at the beginning of the output sequence + n_tokens_to_mask = additional_tokens.shape[1] + # add additional token + x = torch.cat([additional_tokens, x], dim=1) + q = self.to_q(x) + context = default(context, x) + k = self.to_k(context) + v = self.to_v(context) + + if n_times_crossframe_attn_in_self: + # reprogramming cross-frame attention as in https://arxiv.org/abs/2303.13439 + assert x.shape[0] % n_times_crossframe_attn_in_self == 0 + # n_cp = x.shape[0]//n_times_crossframe_attn_in_self + k = repeat( + k[::n_times_crossframe_attn_in_self], + "b ... -> (b n) ...", + n=n_times_crossframe_attn_in_self, + ) + v = repeat( + v[::n_times_crossframe_attn_in_self], + "b ... -> (b n) ...", + n=n_times_crossframe_attn_in_self, + ) + + b, _, _ = q.shape + q, k, v = map( + lambda t: t.unsqueeze(3) + .reshape(b, t.shape[1], self.heads, self.dim_head) + .permute(0, 2, 1, 3) + .reshape(b * self.heads, t.shape[1], self.dim_head) + .contiguous(), + (q, k, v), + ) + + # actually compute the attention, what we cannot get enough of + out = xformers.ops.memory_efficient_attention( + q, k, v, attn_bias=None, op=self.attention_op + ) + + # TODO: Use this directly in the attention operation, as a bias + if exists(mask): + raise NotImplementedError + out = ( + out.unsqueeze(0) + .reshape(b, self.heads, out.shape[1], self.dim_head) + .permute(0, 2, 1, 3) + .reshape(b, out.shape[1], self.heads * self.dim_head) + ) + if additional_tokens is not None: + # remove additional token + out = out[:, n_tokens_to_mask:] + return self.to_out(out) + + +class BasicTransformerBlock(nn.Module): + ATTENTION_MODES = { + "softmax": CrossAttention, # vanilla attention + "softmax-xformers": MemoryEfficientCrossAttention, # ampere + } + + def __init__( + self, + dim, + n_heads, + d_head, + dropout=0.0, + context_dim=None, + gated_ff=True, + checkpoint=True, + disable_self_attn=False, + attn_mode="softmax", + sdp_backend=None, + ): + super().__init__() + assert attn_mode in self.ATTENTION_MODES + if attn_mode != "softmax" and not XFORMERS_IS_AVAILABLE: + print( + f"Attention mode '{attn_mode}' is not available. Falling back to native attention. " + f"This is not a problem in Pytorch >= 2.0. FYI, you are running with PyTorch version {torch.__version__}" + ) + attn_mode = "softmax" + elif attn_mode == "softmax" and not SDP_IS_AVAILABLE: + print( + "We do not support vanilla attention anymore, as it is too expensive. Sorry." + ) + if not XFORMERS_IS_AVAILABLE: + assert ( + False + ), "Please install xformers via e.g. 'pip install xformers==0.0.16'" + else: + print("Falling back to xformers efficient attention.") + attn_mode = "softmax-xformers" + attn_cls = self.ATTENTION_MODES[attn_mode] + if version.parse(torch.__version__) >= version.parse("2.0.0"): + assert sdp_backend is None or isinstance(sdp_backend, SDPBackend) + else: + assert sdp_backend is None + self.disable_self_attn = disable_self_attn + self.attn1 = attn_cls( + query_dim=dim, + heads=n_heads, + dim_head=d_head, + dropout=dropout, + context_dim=context_dim if self.disable_self_attn else None, + backend=sdp_backend, + ) # is a self-attention if not self.disable_self_attn + self.ff = FeedForward(dim, dropout=dropout, glu=gated_ff) + self.attn2 = attn_cls( + query_dim=dim, + context_dim=context_dim, + heads=n_heads, + dim_head=d_head, + dropout=dropout, + backend=sdp_backend, + ) # is self-attn if context is none + self.norm1 = ops.LayerNorm(dim) + self.norm2 = ops.LayerNorm(dim) + self.norm3 = ops.LayerNorm(dim) + self.checkpoint = checkpoint + #if self.checkpoint: + #print(f"{self.__class__.__name__} is using checkpointing") + + def forward( + self, x, context=None, additional_tokens=None, n_times_crossframe_attn_in_self=0 + ): + kwargs = {"x": x} + + if context is not None: + kwargs.update({"context": context}) + + if additional_tokens is not None: + kwargs.update({"additional_tokens": additional_tokens}) + + if n_times_crossframe_attn_in_self: + kwargs.update( + {"n_times_crossframe_attn_in_self": n_times_crossframe_attn_in_self} + ) + + # return mixed_checkpoint(self._forward, kwargs, self.parameters(), self.checkpoint) + return checkpoint( + self._forward, (x, context), self.parameters(), self.checkpoint + ) + + def _forward( + self, x, context=None, additional_tokens=None, n_times_crossframe_attn_in_self=0 + ): + x = ( + self.attn1( + self.norm1(x), + context=context if self.disable_self_attn else None, + additional_tokens=additional_tokens, + n_times_crossframe_attn_in_self=n_times_crossframe_attn_in_self + if not self.disable_self_attn + else 0, + ) + + x + ) + x = ( + self.attn2( + self.norm2(x), context=context, additional_tokens=additional_tokens + ) + + x + ) + x = self.ff(self.norm3(x)) + x + return x + + +class BasicTransformerSingleLayerBlock(nn.Module): + ATTENTION_MODES = { + "softmax": CrossAttention, # vanilla attention + "softmax-xformers": MemoryEfficientCrossAttention # on the A100s not quite as fast as the above version + # (todo might depend on head_dim, check, falls back to semi-optimized kernels for dim!=[16,32,64,128]) + } + + def __init__( + self, + dim, + n_heads, + d_head, + dropout=0.0, + context_dim=None, + gated_ff=True, + checkpoint=True, + attn_mode="softmax", + ): + super().__init__() + assert attn_mode in self.ATTENTION_MODES + attn_cls = self.ATTENTION_MODES[attn_mode] + self.attn1 = attn_cls( + query_dim=dim, + heads=n_heads, + dim_head=d_head, + dropout=dropout, + context_dim=context_dim, + ) + self.ff = FeedForward(dim, dropout=dropout, glu=gated_ff) + self.norm1 = ops.LayerNorm(dim) + self.norm2 = ops.LayerNorm(dim) + self.checkpoint = checkpoint + + def forward(self, x, context=None): + return checkpoint( + self._forward, (x, context), self.parameters(), self.checkpoint + ) + + def _forward(self, x, context=None): + x = self.attn1(self.norm1(x), context=context) + x + x = self.ff(self.norm2(x)) + x + return x + + +class SpatialTransformer(nn.Module): + """ + Transformer block for image-like data. + First, project the input (aka embedding) + and reshape to b, t, d. + Then apply standard transformer action. + Finally, reshape to image + NEW: use_linear for more efficiency instead of the 1x1 convs + """ + + def __init__( + self, + in_channels, + n_heads, + d_head, + depth=1, + dropout=0.0, + context_dim=None, + disable_self_attn=False, + use_linear=False, + attn_type="softmax", + use_checkpoint=True, + # sdp_backend=SDPBackend.FLASH_ATTENTION + sdp_backend=None, + ): + super().__init__() + # print( + # f"constructing {self.__class__.__name__} of depth {depth} w/ {in_channels} channels and {n_heads} heads" + # ) + from omegaconf import ListConfig + + if exists(context_dim) and not isinstance(context_dim, (list, ListConfig)): + context_dim = [context_dim] + if exists(context_dim) and isinstance(context_dim, list): + if depth != len(context_dim): + #print( + # f"WARNING: {self.__class__.__name__}: Found context dims {context_dim} of depth {len(context_dim)}, " + # f"which does not match the specified 'depth' of {depth}. Setting context_dim to {depth * [context_dim[0]]} now." + # ) + # depth does not match context dims. + assert all( + map(lambda x: x == context_dim[0], context_dim) + ), "need homogenous context_dim to match depth automatically" + context_dim = depth * [context_dim[0]] + elif context_dim is None: + context_dim = [None] * depth + self.in_channels = in_channels + inner_dim = n_heads * d_head + self.norm = Normalize(in_channels) + if not use_linear: + self.proj_in = ops.Conv2d( + in_channels, inner_dim, kernel_size=1, stride=1, padding=0 + ) + else: + self.proj_in = ops.Linear(in_channels, inner_dim) + + self.transformer_blocks = nn.ModuleList( + [ + BasicTransformerBlock( + inner_dim, + n_heads, + d_head, + dropout=dropout, + context_dim=context_dim[d], + disable_self_attn=disable_self_attn, + attn_mode=attn_type, + checkpoint=use_checkpoint, + sdp_backend=sdp_backend, + ) + for d in range(depth) + ] + ) + if not use_linear: + self.proj_out = zero_module( + ops.Conv2d(inner_dim, in_channels, kernel_size=1, stride=1, padding=0) + ) + else: + # self.proj_out = zero_module(Linear(in_channels, inner_dim)) + self.proj_out = zero_module(ops.Linear(inner_dim, in_channels)) + self.use_linear = use_linear + + def forward(self, x, context=None): + # note: if no context is given, cross-attention defaults to self-attention + if not isinstance(context, list): + context = [context] + b, c, h, w = x.shape + x_in = x + x = self.norm(x) + if not self.use_linear: + x = self.proj_in(x) + x = rearrange(x, "b c h w -> b (h w) c").contiguous() + if self.use_linear: + x = self.proj_in(x) + for i, block in enumerate(self.transformer_blocks): + if i > 0 and len(context) == 1: + i = 0 # use same context for each block + x = block(x, context=context[i]) + if self.use_linear: + x = self.proj_out(x) + x = rearrange(x, "b (h w) c -> b c h w", h=h, w=w).contiguous() + if not self.use_linear: + x = self.proj_out(x) + return x + x_in diff --git a/ComfyUI-SUPIR/sgm/modules/autoencoding/__init__.py b/ComfyUI-SUPIR/sgm/modules/autoencoding/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/ComfyUI-SUPIR/sgm/modules/autoencoding/losses/__init__.py b/ComfyUI-SUPIR/sgm/modules/autoencoding/losses/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..cc1bdb49120761b0f1303d6c42597cbfb1b3f997 --- /dev/null +++ b/ComfyUI-SUPIR/sgm/modules/autoencoding/losses/__init__.py @@ -0,0 +1,246 @@ +from typing import Any, Union + +import torch +import torch.nn as nn +from einops import rearrange + +from ....util import default, instantiate_from_config +from ..lpips.loss.lpips import LPIPS +from ..lpips.model.model import NLayerDiscriminator, weights_init +from ..lpips.vqperceptual import hinge_d_loss, vanilla_d_loss + + +def adopt_weight(weight, global_step, threshold=0, value=0.0): + if global_step < threshold: + weight = value + return weight + + +class LatentLPIPS(nn.Module): + def __init__( + self, + decoder_config, + perceptual_weight=1.0, + latent_weight=1.0, + scale_input_to_tgt_size=False, + scale_tgt_to_input_size=False, + perceptual_weight_on_inputs=0.0, + ): + super().__init__() + self.scale_input_to_tgt_size = scale_input_to_tgt_size + self.scale_tgt_to_input_size = scale_tgt_to_input_size + self.init_decoder(decoder_config) + self.perceptual_loss = LPIPS().eval() + self.perceptual_weight = perceptual_weight + self.latent_weight = latent_weight + self.perceptual_weight_on_inputs = perceptual_weight_on_inputs + + def init_decoder(self, config): + self.decoder = instantiate_from_config(config) + if hasattr(self.decoder, "encoder"): + del self.decoder.encoder + + def forward(self, latent_inputs, latent_predictions, image_inputs, split="train"): + log = dict() + loss = (latent_inputs - latent_predictions) ** 2 + log[f"{split}/latent_l2_loss"] = loss.mean().detach() + image_reconstructions = None + if self.perceptual_weight > 0.0: + image_reconstructions = self.decoder.decode(latent_predictions) + image_targets = self.decoder.decode(latent_inputs) + perceptual_loss = self.perceptual_loss( + image_targets.contiguous(), image_reconstructions.contiguous() + ) + loss = ( + self.latent_weight * loss.mean() + + self.perceptual_weight * perceptual_loss.mean() + ) + log[f"{split}/perceptual_loss"] = perceptual_loss.mean().detach() + + if self.perceptual_weight_on_inputs > 0.0: + image_reconstructions = default( + image_reconstructions, self.decoder.decode(latent_predictions) + ) + if self.scale_input_to_tgt_size: + image_inputs = torch.nn.functional.interpolate( + image_inputs, + image_reconstructions.shape[2:], + mode="bicubic", + antialias=True, + ) + elif self.scale_tgt_to_input_size: + image_reconstructions = torch.nn.functional.interpolate( + image_reconstructions, + image_inputs.shape[2:], + mode="bicubic", + antialias=True, + ) + + perceptual_loss2 = self.perceptual_loss( + image_inputs.contiguous(), image_reconstructions.contiguous() + ) + loss = loss + self.perceptual_weight_on_inputs * perceptual_loss2.mean() + log[f"{split}/perceptual_loss_on_inputs"] = perceptual_loss2.mean().detach() + return loss, log + + +class GeneralLPIPSWithDiscriminator(nn.Module): + def __init__( + self, + disc_start: int, + logvar_init: float = 0.0, + pixelloss_weight=1.0, + disc_num_layers: int = 3, + disc_in_channels: int = 3, + disc_factor: float = 1.0, + disc_weight: float = 1.0, + perceptual_weight: float = 1.0, + disc_loss: str = "hinge", + scale_input_to_tgt_size: bool = False, + dims: int = 2, + learn_logvar: bool = False, + regularization_weights: Union[None, dict] = None, + ): + super().__init__() + self.dims = dims + if self.dims > 2: + print( + f"running with dims={dims}. This means that for perceptual loss calculation, " + f"the LPIPS loss will be applied to each frame independently. " + ) + self.scale_input_to_tgt_size = scale_input_to_tgt_size + assert disc_loss in ["hinge", "vanilla"] + self.pixel_weight = pixelloss_weight + self.perceptual_loss = LPIPS().eval() + self.perceptual_weight = perceptual_weight + # output log variance + self.logvar = nn.Parameter(torch.ones(size=()) * logvar_init) + self.learn_logvar = learn_logvar + + self.discriminator = NLayerDiscriminator( + input_nc=disc_in_channels, n_layers=disc_num_layers, use_actnorm=False + ).apply(weights_init) + self.discriminator_iter_start = disc_start + self.disc_loss = hinge_d_loss if disc_loss == "hinge" else vanilla_d_loss + self.disc_factor = disc_factor + self.discriminator_weight = disc_weight + self.regularization_weights = default(regularization_weights, {}) + + def get_trainable_parameters(self) -> Any: + return self.discriminator.parameters() + + def get_trainable_autoencoder_parameters(self) -> Any: + if self.learn_logvar: + yield self.logvar + yield from () + + def calculate_adaptive_weight(self, nll_loss, g_loss, last_layer=None): + if last_layer is not None: + nll_grads = torch.autograd.grad(nll_loss, last_layer, retain_graph=True)[0] + g_grads = torch.autograd.grad(g_loss, last_layer, retain_graph=True)[0] + else: + nll_grads = torch.autograd.grad( + nll_loss, self.last_layer[0], retain_graph=True + )[0] + g_grads = torch.autograd.grad( + g_loss, self.last_layer[0], retain_graph=True + )[0] + + d_weight = torch.norm(nll_grads) / (torch.norm(g_grads) + 1e-4) + d_weight = torch.clamp(d_weight, 0.0, 1e4).detach() + d_weight = d_weight * self.discriminator_weight + return d_weight + + def forward( + self, + regularization_log, + inputs, + reconstructions, + optimizer_idx, + global_step, + last_layer=None, + split="train", + weights=None, + ): + if self.scale_input_to_tgt_size: + inputs = torch.nn.functional.interpolate( + inputs, reconstructions.shape[2:], mode="bicubic", antialias=True + ) + + if self.dims > 2: + inputs, reconstructions = map( + lambda x: rearrange(x, "b c t h w -> (b t) c h w"), + (inputs, reconstructions), + ) + + rec_loss = torch.abs(inputs.contiguous() - reconstructions.contiguous()) + if self.perceptual_weight > 0: + p_loss = self.perceptual_loss( + inputs.contiguous(), reconstructions.contiguous() + ) + rec_loss = rec_loss + self.perceptual_weight * p_loss + + nll_loss = rec_loss / torch.exp(self.logvar) + self.logvar + weighted_nll_loss = nll_loss + if weights is not None: + weighted_nll_loss = weights * nll_loss + weighted_nll_loss = torch.sum(weighted_nll_loss) / weighted_nll_loss.shape[0] + nll_loss = torch.sum(nll_loss) / nll_loss.shape[0] + + # now the GAN part + if optimizer_idx == 0: + # generator update + logits_fake = self.discriminator(reconstructions.contiguous()) + g_loss = -torch.mean(logits_fake) + + if self.disc_factor > 0.0: + try: + d_weight = self.calculate_adaptive_weight( + nll_loss, g_loss, last_layer=last_layer + ) + except RuntimeError: + assert not self.training + d_weight = torch.tensor(0.0) + else: + d_weight = torch.tensor(0.0) + + disc_factor = adopt_weight( + self.disc_factor, global_step, threshold=self.discriminator_iter_start + ) + loss = weighted_nll_loss + d_weight * disc_factor * g_loss + log = dict() + for k in regularization_log: + if k in self.regularization_weights: + loss = loss + self.regularization_weights[k] * regularization_log[k] + log[f"{split}/{k}"] = regularization_log[k].detach().mean() + + log.update( + { + "{}/total_loss".format(split): loss.clone().detach().mean(), + "{}/logvar".format(split): self.logvar.detach(), + "{}/nll_loss".format(split): nll_loss.detach().mean(), + "{}/rec_loss".format(split): rec_loss.detach().mean(), + "{}/d_weight".format(split): d_weight.detach(), + "{}/disc_factor".format(split): torch.tensor(disc_factor), + "{}/g_loss".format(split): g_loss.detach().mean(), + } + ) + + return loss, log + + if optimizer_idx == 1: + # second pass for discriminator update + logits_real = self.discriminator(inputs.contiguous().detach()) + logits_fake = self.discriminator(reconstructions.contiguous().detach()) + + disc_factor = adopt_weight( + self.disc_factor, global_step, threshold=self.discriminator_iter_start + ) + d_loss = disc_factor * self.disc_loss(logits_real, logits_fake) + + log = { + "{}/disc_loss".format(split): d_loss.clone().detach().mean(), + "{}/logits_real".format(split): logits_real.detach().mean(), + "{}/logits_fake".format(split): logits_fake.detach().mean(), + } + return d_loss, log diff --git a/ComfyUI-SUPIR/sgm/modules/autoencoding/lpips/__init__.py b/ComfyUI-SUPIR/sgm/modules/autoencoding/lpips/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/ComfyUI-SUPIR/sgm/modules/autoencoding/lpips/loss/LICENSE b/ComfyUI-SUPIR/sgm/modules/autoencoding/lpips/loss/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..924cfc85b8d63ef538f5676f830a2a8497932108 --- /dev/null +++ b/ComfyUI-SUPIR/sgm/modules/autoencoding/lpips/loss/LICENSE @@ -0,0 +1,23 @@ +Copyright (c) 2018, Richard Zhang, Phillip Isola, Alexei A. Efros, Eli Shechtman, Oliver Wang +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. \ No newline at end of file diff --git a/ComfyUI-SUPIR/sgm/modules/autoencoding/lpips/loss/__init__.py b/ComfyUI-SUPIR/sgm/modules/autoencoding/lpips/loss/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/ComfyUI-SUPIR/sgm/modules/autoencoding/lpips/loss/lpips.py b/ComfyUI-SUPIR/sgm/modules/autoencoding/lpips/loss/lpips.py new file mode 100644 index 0000000000000000000000000000000000000000..d145eacc7173901ea19758818f104a0001ba47ad --- /dev/null +++ b/ComfyUI-SUPIR/sgm/modules/autoencoding/lpips/loss/lpips.py @@ -0,0 +1,149 @@ +"""Stripped version of https://github.com/richzhang/PerceptualSimilarity/tree/master/models""" + +from collections import namedtuple + +import torch +import torch.nn as nn +from torchvision import models + +from ..util import get_ckpt_path + +import comfy.ops +ops = comfy.ops.manual_cast + +class LPIPS(nn.Module): + # Learned perceptual metric + def __init__(self, use_dropout=True): + super().__init__() + self.scaling_layer = ScalingLayer() + self.chns = [64, 128, 256, 512, 512] # vg16 features + self.net = vgg16(pretrained=True, requires_grad=False) + self.lin0 = NetLinLayer(self.chns[0], use_dropout=use_dropout) + self.lin1 = NetLinLayer(self.chns[1], use_dropout=use_dropout) + self.lin2 = NetLinLayer(self.chns[2], use_dropout=use_dropout) + self.lin3 = NetLinLayer(self.chns[3], use_dropout=use_dropout) + self.lin4 = NetLinLayer(self.chns[4], use_dropout=use_dropout) + self.load_from_pretrained() + for param in self.parameters(): + param.requires_grad = False + + def load_from_pretrained(self, name="vgg_lpips"): + ckpt = get_ckpt_path(name, "sgm/modules/autoencoding/lpips/loss") + self.load_state_dict( + torch.load(ckpt, map_location=torch.device("cpu")), strict=False + ) + print("loaded pretrained LPIPS loss from {}".format(ckpt)) + + @classmethod + def from_pretrained(cls, name="vgg_lpips"): + if name != "vgg_lpips": + raise NotImplementedError + model = cls() + ckpt = get_ckpt_path(name) + model.load_state_dict( + torch.load(ckpt, map_location=torch.device("cpu")), strict=False + ) + return model + + def forward(self, input, target): + in0_input, in1_input = (self.scaling_layer(input), self.scaling_layer(target)) + outs0, outs1 = self.net(in0_input), self.net(in1_input) + feats0, feats1, diffs = {}, {}, {} + lins = [self.lin0, self.lin1, self.lin2, self.lin3, self.lin4] + for kk in range(len(self.chns)): + feats0[kk], feats1[kk] = normalize_tensor(outs0[kk]), normalize_tensor( + outs1[kk] + ) + diffs[kk] = (feats0[kk] - feats1[kk]) ** 2 + + res = [ + spatial_average(lins[kk].model(diffs[kk]), keepdim=True) + for kk in range(len(self.chns)) + ] + val = res[0] + for l in range(1, len(self.chns)): + val += res[l] + return val + + +class ScalingLayer(nn.Module): + def __init__(self): + super(ScalingLayer, self).__init__() + self.register_buffer( + "shift", torch.Tensor([-0.030, -0.088, -0.188])[None, :, None, None] + ) + self.register_buffer( + "scale", torch.Tensor([0.458, 0.448, 0.450])[None, :, None, None] + ) + + def forward(self, inp): + return (inp - self.shift) / self.scale + + +class NetLinLayer(nn.Module): + """A single linear layer which does a 1x1 conv""" + + def __init__(self, chn_in, chn_out=1, use_dropout=False): + super(NetLinLayer, self).__init__() + layers = ( + [ + nn.Dropout(), + ] + if (use_dropout) + else [] + ) + layers += [ + ops.Conv2d(chn_in, chn_out, 1, stride=1, padding=0, bias=False), + ] + self.model = nn.Sequential(*layers) + + +class vgg16(torch.nn.Module): + def __init__(self, requires_grad=False, pretrained=True): + super(vgg16, self).__init__() + vgg_pretrained_features = models.vgg16(pretrained=pretrained).features + self.slice1 = torch.nn.Sequential() + self.slice2 = torch.nn.Sequential() + self.slice3 = torch.nn.Sequential() + self.slice4 = torch.nn.Sequential() + self.slice5 = torch.nn.Sequential() + self.N_slices = 5 + for x in range(4): + self.slice1.add_module(str(x), vgg_pretrained_features[x]) + for x in range(4, 9): + self.slice2.add_module(str(x), vgg_pretrained_features[x]) + for x in range(9, 16): + self.slice3.add_module(str(x), vgg_pretrained_features[x]) + for x in range(16, 23): + self.slice4.add_module(str(x), vgg_pretrained_features[x]) + for x in range(23, 30): + self.slice5.add_module(str(x), vgg_pretrained_features[x]) + if not requires_grad: + for param in self.parameters(): + param.requires_grad = False + + def forward(self, X): + h = self.slice1(X) + h_relu1_2 = h + h = self.slice2(h) + h_relu2_2 = h + h = self.slice3(h) + h_relu3_3 = h + h = self.slice4(h) + h_relu4_3 = h + h = self.slice5(h) + h_relu5_3 = h + vgg_outputs = namedtuple( + "VggOutputs", ["relu1_2", "relu2_2", "relu3_3", "relu4_3", "relu5_3"] + ) + out = vgg_outputs(h_relu1_2, h_relu2_2, h_relu3_3, h_relu4_3, h_relu5_3) + return out + + +def normalize_tensor(x, eps=1e-10): + norm_factor = torch.sqrt(torch.sum(x**2, dim=1, keepdim=True)) + return x / (norm_factor + eps) + + +def spatial_average(x, keepdim=True): + return x.mean([2, 3], keepdim=keepdim) diff --git a/ComfyUI-SUPIR/sgm/modules/autoencoding/lpips/model/LICENSE b/ComfyUI-SUPIR/sgm/modules/autoencoding/lpips/model/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..4b356e66b5aa689b339f1a80a9f1b5ba378003bb --- /dev/null +++ b/ComfyUI-SUPIR/sgm/modules/autoencoding/lpips/model/LICENSE @@ -0,0 +1,58 @@ +Copyright (c) 2017, Jun-Yan Zhu and Taesung Park +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +--------------------------- LICENSE FOR pix2pix -------------------------------- +BSD License + +For pix2pix software +Copyright (c) 2016, Phillip Isola and Jun-Yan Zhu +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +----------------------------- LICENSE FOR DCGAN -------------------------------- +BSD License + +For dcgan.torch software + +Copyright (c) 2015, Facebook, Inc. All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + +Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. + +Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + +Neither the name Facebook nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. \ No newline at end of file diff --git a/ComfyUI-SUPIR/sgm/modules/autoencoding/lpips/model/__init__.py b/ComfyUI-SUPIR/sgm/modules/autoencoding/lpips/model/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/ComfyUI-SUPIR/sgm/modules/autoencoding/lpips/model/model.py b/ComfyUI-SUPIR/sgm/modules/autoencoding/lpips/model/model.py new file mode 100644 index 0000000000000000000000000000000000000000..1c201516035986db9bf0793eef50c64e4e9cc734 --- /dev/null +++ b/ComfyUI-SUPIR/sgm/modules/autoencoding/lpips/model/model.py @@ -0,0 +1,89 @@ +import functools + +import torch.nn as nn + +from ..util import ActNorm +import comfy.ops +ops = comfy.ops.manual_cast + +def weights_init(m): + classname = m.__class__.__name__ + if classname.find("Conv") != -1: + nn.init.normal_(m.weight.data, 0.0, 0.02) + elif classname.find("BatchNorm") != -1: + nn.init.normal_(m.weight.data, 1.0, 0.02) + nn.init.constant_(m.bias.data, 0) + + +class NLayerDiscriminator(nn.Module): + """Defines a PatchGAN discriminator as in Pix2Pix + --> see https://github.com/junyanz/pytorch-CycleGAN-and-pix2pix/blob/master/models/networks.py + """ + + def __init__(self, input_nc=3, ndf=64, n_layers=3, use_actnorm=False): + """Construct a PatchGAN discriminator + Parameters: + input_nc (int) -- the number of channels in input images + ndf (int) -- the number of filters in the last conv layer + n_layers (int) -- the number of conv layers in the discriminator + norm_layer -- normalization layer + """ + super(NLayerDiscriminator, self).__init__() + if not use_actnorm: + norm_layer = nn.BatchNorm2d + else: + norm_layer = ActNorm + if ( + type(norm_layer) == functools.partial + ): # no need to use bias as BatchNorm2d has affine parameters + use_bias = norm_layer.func != nn.BatchNorm2d + else: + use_bias = norm_layer != nn.BatchNorm2d + + kw = 4 + padw = 1 + sequence = [ + ops.Conv2d(input_nc, ndf, kernel_size=kw, stride=2, padding=padw), + nn.LeakyReLU(0.2, True), + ] + nf_mult = 1 + nf_mult_prev = 1 + for n in range(1, n_layers): # gradually increase the number of filters + nf_mult_prev = nf_mult + nf_mult = min(2**n, 8) + sequence += [ + ops.Conv2d( + ndf * nf_mult_prev, + ndf * nf_mult, + kernel_size=kw, + stride=2, + padding=padw, + bias=use_bias, + ), + norm_layer(ndf * nf_mult), + nn.LeakyReLU(0.2, True), + ] + + nf_mult_prev = nf_mult + nf_mult = min(2**n_layers, 8) + sequence += [ + ops.Conv2d( + ndf * nf_mult_prev, + ndf * nf_mult, + kernel_size=kw, + stride=1, + padding=padw, + bias=use_bias, + ), + norm_layer(ndf * nf_mult), + nn.LeakyReLU(0.2, True), + ] + + sequence += [ + ops.Conv2d(ndf * nf_mult, 1, kernel_size=kw, stride=1, padding=padw) + ] # output 1 channel prediction map + self.main = nn.Sequential(*sequence) + + def forward(self, input): + """Standard forward.""" + return self.main(input) diff --git a/ComfyUI-SUPIR/sgm/modules/autoencoding/lpips/util.py b/ComfyUI-SUPIR/sgm/modules/autoencoding/lpips/util.py new file mode 100644 index 0000000000000000000000000000000000000000..49c76e370bf16888ab61f42844b3c9f14ad9014c --- /dev/null +++ b/ComfyUI-SUPIR/sgm/modules/autoencoding/lpips/util.py @@ -0,0 +1,128 @@ +import hashlib +import os + +import requests +import torch +import torch.nn as nn +from tqdm import tqdm + +URL_MAP = {"vgg_lpips": "https://heibox.uni-heidelberg.de/f/607503859c864bc1b30b/?dl=1"} + +CKPT_MAP = {"vgg_lpips": "vgg.pth"} + +MD5_MAP = {"vgg_lpips": "d507d7349b931f0638a25a48a722f98a"} + + +def download(url, local_path, chunk_size=1024): + os.makedirs(os.path.split(local_path)[0], exist_ok=True) + with requests.get(url, stream=True) as r: + total_size = int(r.headers.get("content-length", 0)) + with tqdm(total=total_size, unit="B", unit_scale=True) as pbar: + with open(local_path, "wb") as f: + for data in r.iter_content(chunk_size=chunk_size): + if data: + f.write(data) + pbar.update(chunk_size) + + +def md5_hash(path): + with open(path, "rb") as f: + content = f.read() + return hashlib.md5(content).hexdigest() + + +def get_ckpt_path(name, root, check=False): + assert name in URL_MAP + path = os.path.join(root, CKPT_MAP[name]) + if not os.path.exists(path) or (check and not md5_hash(path) == MD5_MAP[name]): + print("Downloading {} model from {} to {}".format(name, URL_MAP[name], path)) + download(URL_MAP[name], path) + md5 = md5_hash(path) + assert md5 == MD5_MAP[name], md5 + return path + + +class ActNorm(nn.Module): + def __init__( + self, num_features, logdet=False, affine=True, allow_reverse_init=False + ): + assert affine + super().__init__() + self.logdet = logdet + self.loc = nn.Parameter(torch.zeros(1, num_features, 1, 1)) + self.scale = nn.Parameter(torch.ones(1, num_features, 1, 1)) + self.allow_reverse_init = allow_reverse_init + + self.register_buffer("initialized", torch.tensor(0, dtype=torch.uint8)) + + def initialize(self, input): + with torch.no_grad(): + flatten = input.permute(1, 0, 2, 3).contiguous().view(input.shape[1], -1) + mean = ( + flatten.mean(1) + .unsqueeze(1) + .unsqueeze(2) + .unsqueeze(3) + .permute(1, 0, 2, 3) + ) + std = ( + flatten.std(1) + .unsqueeze(1) + .unsqueeze(2) + .unsqueeze(3) + .permute(1, 0, 2, 3) + ) + + self.loc.data.copy_(-mean) + self.scale.data.copy_(1 / (std + 1e-6)) + + def forward(self, input, reverse=False): + if reverse: + return self.reverse(input) + if len(input.shape) == 2: + input = input[:, :, None, None] + squeeze = True + else: + squeeze = False + + _, _, height, width = input.shape + + if self.training and self.initialized.item() == 0: + self.initialize(input) + self.initialized.fill_(1) + + h = self.scale * (input + self.loc) + + if squeeze: + h = h.squeeze(-1).squeeze(-1) + + if self.logdet: + log_abs = torch.log(torch.abs(self.scale)) + logdet = height * width * torch.sum(log_abs) + logdet = logdet * torch.ones(input.shape[0]).to(input) + return h, logdet + + return h + + def reverse(self, output): + if self.training and self.initialized.item() == 0: + if not self.allow_reverse_init: + raise RuntimeError( + "Initializing ActNorm in reverse direction is " + "disabled by default. Use allow_reverse_init=True to enable." + ) + else: + self.initialize(output) + self.initialized.fill_(1) + + if len(output.shape) == 2: + output = output[:, :, None, None] + squeeze = True + else: + squeeze = False + + h = output / self.scale - self.loc + + if squeeze: + h = h.squeeze(-1).squeeze(-1) + return h diff --git a/ComfyUI-SUPIR/sgm/modules/autoencoding/lpips/vqperceptual.py b/ComfyUI-SUPIR/sgm/modules/autoencoding/lpips/vqperceptual.py new file mode 100644 index 0000000000000000000000000000000000000000..6195f0a6ed7ee6fd32c1bccea071e6075e95ee43 --- /dev/null +++ b/ComfyUI-SUPIR/sgm/modules/autoencoding/lpips/vqperceptual.py @@ -0,0 +1,17 @@ +import torch +import torch.nn.functional as F + + +def hinge_d_loss(logits_real, logits_fake): + loss_real = torch.mean(F.relu(1.0 - logits_real)) + loss_fake = torch.mean(F.relu(1.0 + logits_fake)) + d_loss = 0.5 * (loss_real + loss_fake) + return d_loss + + +def vanilla_d_loss(logits_real, logits_fake): + d_loss = 0.5 * ( + torch.mean(torch.nn.functional.softplus(-logits_real)) + + torch.mean(torch.nn.functional.softplus(logits_fake)) + ) + return d_loss diff --git a/ComfyUI-SUPIR/sgm/modules/autoencoding/regularizers/__init__.py b/ComfyUI-SUPIR/sgm/modules/autoencoding/regularizers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..8de3212d3be4f58e621e8caa6e31dd8dc32b6929 --- /dev/null +++ b/ComfyUI-SUPIR/sgm/modules/autoencoding/regularizers/__init__.py @@ -0,0 +1,53 @@ +from abc import abstractmethod +from typing import Any, Tuple + +import torch +import torch.nn as nn +import torch.nn.functional as F + +from ....modules.distributions.distributions import DiagonalGaussianDistribution + + +class AbstractRegularizer(nn.Module): + def __init__(self): + super().__init__() + + def forward(self, z: torch.Tensor) -> Tuple[torch.Tensor, dict]: + raise NotImplementedError() + + @abstractmethod + def get_trainable_parameters(self) -> Any: + raise NotImplementedError() + + +class DiagonalGaussianRegularizer(AbstractRegularizer): + def __init__(self, sample: bool = True): + super().__init__() + self.sample = sample + + def get_trainable_parameters(self) -> Any: + yield from () + + def forward(self, z: torch.Tensor) -> Tuple[torch.Tensor, dict]: + log = dict() + posterior = DiagonalGaussianDistribution(z) + if self.sample: + z = posterior.sample() + else: + z = posterior.mode() + kl_loss = posterior.kl() + kl_loss = torch.sum(kl_loss) / kl_loss.shape[0] + log["kl_loss"] = kl_loss + return z, log + + +def measure_perplexity(predicted_indices, num_centroids): + # src: https://github.com/karpathy/deep-vector-quantization/blob/main/model.py + # eval cluster perplexity. when perplexity == num_embeddings then all clusters are used exactly equally + encodings = ( + F.one_hot(predicted_indices, num_centroids).float().reshape(-1, num_centroids) + ) + avg_probs = encodings.mean(0) + perplexity = (-(avg_probs * torch.log(avg_probs + 1e-10)).sum()).exp() + cluster_use = torch.sum(avg_probs > 0) + return perplexity, cluster_use diff --git a/ComfyUI-SUPIR/sgm/modules/diffusionmodules/__init__.py b/ComfyUI-SUPIR/sgm/modules/diffusionmodules/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..867b69e89fb3f2969985e3458a50eab3f77d0891 --- /dev/null +++ b/ComfyUI-SUPIR/sgm/modules/diffusionmodules/__init__.py @@ -0,0 +1,7 @@ +from .denoiser import Denoiser +from .discretizer import Discretization +from .loss import StandardDiffusionLoss +from .model import Decoder, Encoder, Model +from .openaimodel import UNetModel +from .sampling import BaseDiffusionSampler +from .wrappers import OpenAIWrapper diff --git a/ComfyUI-SUPIR/sgm/modules/diffusionmodules/denoiser.py b/ComfyUI-SUPIR/sgm/modules/diffusionmodules/denoiser.py new file mode 100644 index 0000000000000000000000000000000000000000..a88b54069974720107d7740807dd797616d38282 --- /dev/null +++ b/ComfyUI-SUPIR/sgm/modules/diffusionmodules/denoiser.py @@ -0,0 +1,73 @@ +import torch.nn as nn + +from ...util import append_dims, instantiate_from_config + + +class Denoiser(nn.Module): + def __init__(self, weighting_config, scaling_config): + super().__init__() + + self.weighting = instantiate_from_config(weighting_config) + self.scaling = instantiate_from_config(scaling_config) + + def possibly_quantize_sigma(self, sigma): + return sigma + + def possibly_quantize_c_noise(self, c_noise): + return c_noise + + def w(self, sigma): + return self.weighting(sigma) + + def __call__(self, network, input, sigma, cond): + sigma = self.possibly_quantize_sigma(sigma) + sigma_shape = sigma.shape + sigma = append_dims(sigma, input.ndim) + c_skip, c_out, c_in, c_noise = self.scaling(sigma) + c_noise = self.possibly_quantize_c_noise(c_noise.reshape(sigma_shape)) + return network(input * c_in, c_noise, cond) * c_out + input * c_skip + + +class DiscreteDenoiser(Denoiser): + def __init__( + self, + weighting_config, + scaling_config, + num_idx, + discretization_config, + do_append_zero=False, + quantize_c_noise=True, + flip=True, + ): + super().__init__(weighting_config, scaling_config) + sigmas = instantiate_from_config(discretization_config)( + num_idx, do_append_zero=do_append_zero, flip=flip + ) + self.register_buffer("sigmas", sigmas) + self.quantize_c_noise = quantize_c_noise + + def sigma_to_idx(self, sigma): + dists = sigma - self.sigmas[:, None] + return dists.abs().argmin(dim=0).view(sigma.shape) + + def idx_to_sigma(self, idx): + return self.sigmas[idx] + + def possibly_quantize_sigma(self, sigma): + return self.idx_to_sigma(self.sigma_to_idx(sigma)) + + def possibly_quantize_c_noise(self, c_noise): + if self.quantize_c_noise: + return self.sigma_to_idx(c_noise) + else: + return c_noise + + +class DiscreteDenoiserWithControl(DiscreteDenoiser): + def __call__(self, network, input, sigma, cond, control_scale): + sigma = self.possibly_quantize_sigma(sigma) + sigma_shape = sigma.shape + sigma = append_dims(sigma, input.ndim) + c_skip, c_out, c_in, c_noise = self.scaling(sigma) + c_noise = self.possibly_quantize_c_noise(c_noise.reshape(sigma_shape)) + return network(input * c_in, c_noise, cond, control_scale) * c_out + input * c_skip diff --git a/ComfyUI-SUPIR/sgm/modules/diffusionmodules/denoiser_scaling.py b/ComfyUI-SUPIR/sgm/modules/diffusionmodules/denoiser_scaling.py new file mode 100644 index 0000000000000000000000000000000000000000..f8a2ac6732ea78f1030b21bebd14063d52ac2a82 --- /dev/null +++ b/ComfyUI-SUPIR/sgm/modules/diffusionmodules/denoiser_scaling.py @@ -0,0 +1,31 @@ +import torch + + +class EDMScaling: + def __init__(self, sigma_data=0.5): + self.sigma_data = sigma_data + + def __call__(self, sigma): + c_skip = self.sigma_data**2 / (sigma**2 + self.sigma_data**2) + c_out = sigma * self.sigma_data / (sigma**2 + self.sigma_data**2) ** 0.5 + c_in = 1 / (sigma**2 + self.sigma_data**2) ** 0.5 + c_noise = 0.25 * sigma.log() + return c_skip, c_out, c_in, c_noise + + +class EpsScaling: + def __call__(self, sigma): + c_skip = torch.ones_like(sigma, device=sigma.device) + c_out = -sigma + c_in = 1 / (sigma**2 + 1.0) ** 0.5 + c_noise = sigma.clone() + return c_skip, c_out, c_in, c_noise + + +class VScaling: + def __call__(self, sigma): + c_skip = 1.0 / (sigma**2 + 1.0) + c_out = -sigma / (sigma**2 + 1.0) ** 0.5 + c_in = 1.0 / (sigma**2 + 1.0) ** 0.5 + c_noise = sigma.clone() + return c_skip, c_out, c_in, c_noise diff --git a/ComfyUI-SUPIR/sgm/modules/diffusionmodules/denoiser_weighting.py b/ComfyUI-SUPIR/sgm/modules/diffusionmodules/denoiser_weighting.py new file mode 100644 index 0000000000000000000000000000000000000000..4cd6e87f4ea96dbefe1dcf5dea80e527f0937719 --- /dev/null +++ b/ComfyUI-SUPIR/sgm/modules/diffusionmodules/denoiser_weighting.py @@ -0,0 +1,24 @@ +import torch + +class UnitWeighting: + def __call__(self, sigma): + return torch.ones_like(sigma, device=sigma.device) + + +class EDMWeighting: + def __init__(self, sigma_data=0.5): + self.sigma_data = sigma_data + + def __call__(self, sigma): + return (sigma**2 + self.sigma_data**2) / (sigma * self.sigma_data) ** 2 + + +class VWeighting(EDMWeighting): + def __init__(self): + super().__init__(sigma_data=1.0) + + +class EpsWeighting: + def __call__(self, sigma): + return sigma**-2 + diff --git a/ComfyUI-SUPIR/sgm/modules/diffusionmodules/discretizer.py b/ComfyUI-SUPIR/sgm/modules/diffusionmodules/discretizer.py new file mode 100644 index 0000000000000000000000000000000000000000..4135ac99b86655845b00dbe92702c217b995adfe --- /dev/null +++ b/ComfyUI-SUPIR/sgm/modules/diffusionmodules/discretizer.py @@ -0,0 +1,69 @@ +from abc import abstractmethod +from functools import partial + +import numpy as np +import torch + +from ...modules.diffusionmodules.util import make_beta_schedule +from ...util import append_zero + + +def generate_roughly_equally_spaced_steps( + num_substeps: int, max_step: int +) -> np.ndarray: + return np.linspace(max_step - 1, 0, num_substeps, endpoint=False).astype(int)[::-1] + + +class Discretization: + def __call__(self, n, do_append_zero=True, device="cpu", flip=False): + sigmas = self.get_sigmas(n, device=device) + sigmas = append_zero(sigmas) if do_append_zero else sigmas + return sigmas if not flip else torch.flip(sigmas, (0,)) + + @abstractmethod + def get_sigmas(self, n, device): + pass + + +class EDMDiscretization(Discretization): + def __init__(self, sigma_min=0.02, sigma_max=80.0, rho=7.0): + self.sigma_min = sigma_min + self.sigma_max = sigma_max + self.rho = rho + + def get_sigmas(self, n, device="cpu"): + ramp = torch.linspace(0, 1, n, device=device) + min_inv_rho = self.sigma_min ** (1 / self.rho) + max_inv_rho = self.sigma_max ** (1 / self.rho) + sigmas = (max_inv_rho + ramp * (min_inv_rho - max_inv_rho)) ** self.rho + return sigmas + + +class LegacyDDPMDiscretization(Discretization): + def __init__( + self, + linear_start=0.00085, + linear_end=0.0120, + num_timesteps=1000, + ): + super().__init__() + self.num_timesteps = num_timesteps + betas = make_beta_schedule( + "linear", num_timesteps, linear_start=linear_start, linear_end=linear_end + ) + alphas = 1.0 - betas + self.alphas_cumprod = np.cumprod(alphas, axis=0) + self.to_torch = partial(torch.tensor, dtype=torch.float32) + + def get_sigmas(self, n, device="cpu"): + if n < self.num_timesteps: + timesteps = generate_roughly_equally_spaced_steps(n, self.num_timesteps) + alphas_cumprod = self.alphas_cumprod[timesteps] + elif n == self.num_timesteps: + alphas_cumprod = self.alphas_cumprod + else: + raise ValueError + + to_torch = partial(torch.tensor, dtype=torch.float32, device=device) + sigmas = to_torch((1 - alphas_cumprod) / alphas_cumprod) ** 0.5 + return torch.flip(sigmas, (0,)) diff --git a/ComfyUI-SUPIR/sgm/modules/diffusionmodules/guiders.py b/ComfyUI-SUPIR/sgm/modules/diffusionmodules/guiders.py new file mode 100644 index 0000000000000000000000000000000000000000..dbee4fe68780fc6c62be6c6a8278ebc4b008b003 --- /dev/null +++ b/ComfyUI-SUPIR/sgm/modules/diffusionmodules/guiders.py @@ -0,0 +1,88 @@ +from functools import partial + +import torch + +from ...util import default, instantiate_from_config + + +class VanillaCFG: + """ + implements parallelized CFG + """ + + def __init__(self, scale, dyn_thresh_config=None): + scale_schedule = lambda scale, sigma: scale # independent of step + self.scale_schedule = partial(scale_schedule, scale) + self.dyn_thresh = instantiate_from_config( + default( + dyn_thresh_config, + { + "target": ".sgm.modules.diffusionmodules.sampling_utils.NoDynamicThresholding" + }, + ) + ) + + def __call__(self, x, sigma): + x_u, x_c = x.chunk(2) + scale_value = self.scale_schedule(sigma) + x_pred = self.dyn_thresh(x_u, x_c, scale_value) + return x_pred + + def prepare_inputs(self, x, s, c, uc): + c_out = dict() + + for k in c: + if k in ["vector", "crossattn", "concat", "control", 'control_vector', 'mask_x']: + c_out[k] = torch.cat((uc[k], c[k]), 0) + else: + assert c[k] == uc[k] + c_out[k] = c[k] + return torch.cat([x] * 2), torch.cat([s] * 2), c_out + + + +class LinearCFG: + def __init__(self, scale, scale_min=None, dyn_thresh_config=None): + if scale_min is None: + scale_min = scale + scale_schedule = lambda scale, scale_min, sigma: (scale - scale_min) * sigma / 14.6146 + scale_min + self.scale_schedule = partial(scale_schedule, scale, scale_min) + self.dyn_thresh = instantiate_from_config( + default( + dyn_thresh_config, + { + "target": ".sgm.modules.diffusionmodules.sampling_utils.NoDynamicThresholding" + }, + ) + ) + + def __call__(self, x, sigma): + x_u, x_c = x.chunk(2) + scale_value = self.scale_schedule(sigma) + x_pred = self.dyn_thresh(x_u, x_c, scale_value) + return x_pred + + def prepare_inputs(self, x, s, c, uc): + c_out = dict() + + for k in c: + if k in ["vector", "crossattn", "concat", "control", 'control_vector', 'mask_x']: + c_out[k] = torch.cat((uc[k], c[k]), 0) + else: + assert c[k] == uc[k] + c_out[k] = c[k] + return torch.cat([x] * 2), torch.cat([s] * 2), c_out + + + +class IdentityGuider: + def __call__(self, x, sigma): + return x + + def prepare_inputs(self, x, s, c, uc): + c_out = dict() + + for k in c: + c_out[k] = c[k] + + return x, s, c_out diff --git a/ComfyUI-SUPIR/sgm/modules/diffusionmodules/loss.py b/ComfyUI-SUPIR/sgm/modules/diffusionmodules/loss.py new file mode 100644 index 0000000000000000000000000000000000000000..508230c9f828a0e54b330a1788623dcbb1efa0db --- /dev/null +++ b/ComfyUI-SUPIR/sgm/modules/diffusionmodules/loss.py @@ -0,0 +1,69 @@ +from typing import List, Optional, Union + +import torch +import torch.nn as nn +from omegaconf import ListConfig + +from ...util import append_dims, instantiate_from_config +from ...modules.autoencoding.lpips.loss.lpips import LPIPS + + +class StandardDiffusionLoss(nn.Module): + def __init__( + self, + sigma_sampler_config, + type="l2", + offset_noise_level=0.0, + batch2model_keys: Optional[Union[str, List[str], ListConfig]] = None, + ): + super().__init__() + + assert type in ["l2", "l1", "lpips"] + + self.sigma_sampler = instantiate_from_config(sigma_sampler_config) + + self.type = type + self.offset_noise_level = offset_noise_level + + if type == "lpips": + self.lpips = LPIPS().eval() + + if not batch2model_keys: + batch2model_keys = [] + + if isinstance(batch2model_keys, str): + batch2model_keys = [batch2model_keys] + + self.batch2model_keys = set(batch2model_keys) + + def __call__(self, network, denoiser, conditioner, input, batch): + cond = conditioner(batch) + additional_model_inputs = { + key: batch[key] for key in self.batch2model_keys.intersection(batch) + } + + sigmas = self.sigma_sampler(input.shape[0]).to(input.device) + noise = torch.randn_like(input) + if self.offset_noise_level > 0.0: + noise = noise + self.offset_noise_level * append_dims( + torch.randn(input.shape[0], device=input.device), input.ndim + ) + noised_input = input + noise * append_dims(sigmas, input.ndim) + model_output = denoiser( + network, noised_input, sigmas, cond, **additional_model_inputs + ) + w = append_dims(denoiser.w(sigmas), input.ndim) + return self.get_loss(model_output, input, w) + + def get_loss(self, model_output, target, w): + if self.type == "l2": + return torch.mean( + (w * (model_output - target) ** 2).reshape(target.shape[0], -1), 1 + ) + elif self.type == "l1": + return torch.mean( + (w * (model_output - target).abs()).reshape(target.shape[0], -1), 1 + ) + elif self.type == "lpips": + loss = self.lpips(model_output, target).reshape(-1) + return loss diff --git a/ComfyUI-SUPIR/sgm/modules/diffusionmodules/model.py b/ComfyUI-SUPIR/sgm/modules/diffusionmodules/model.py new file mode 100644 index 0000000000000000000000000000000000000000..c78a7c8c8227231912f09426c4c00f3a38bb4ba9 --- /dev/null +++ b/ComfyUI-SUPIR/sgm/modules/diffusionmodules/model.py @@ -0,0 +1,755 @@ +# pytorch_diffusion + derived encoder decoder +import math +from typing import Any, Callable, Optional + +import numpy as np +import torch +import torch.nn as nn +from einops import rearrange +from packaging import version + +try: + import xformers + import xformers.ops + + XFORMERS_IS_AVAILABLE = True +except: + XFORMERS_IS_AVAILABLE = False + print("no module 'xformers'. Processing without...") + +from ...modules.attention import LinearAttention, MemoryEfficientCrossAttention + +import comfy.ops +ops = comfy.ops.manual_cast + +def get_timestep_embedding(timesteps, embedding_dim): + """ + This matches the implementation in Denoising Diffusion Probabilistic Models: + From Fairseq. + Build sinusoidal embeddings. + This matches the implementation in tensor2tensor, but differs slightly + from the description in Section 3.5 of "Attention Is All You Need". + """ + assert len(timesteps.shape) == 1 + + half_dim = embedding_dim // 2 + emb = math.log(10000) / (half_dim - 1) + emb = torch.exp(torch.arange(half_dim, dtype=torch.float32) * -emb) + emb = emb.to(device=timesteps.device) + emb = timesteps.float()[:, None] * emb[None, :] + emb = torch.cat([torch.sin(emb), torch.cos(emb)], dim=1) + if embedding_dim % 2 == 1: # zero pad + emb = torch.nn.functional.pad(emb, (0, 1, 0, 0)) + return emb + + +def nonlinearity(x): + # swish + return x * torch.sigmoid(x) + + +def Normalize(in_channels, num_groups=32): + return ops.GroupNorm( + num_groups=num_groups, num_channels=in_channels, eps=1e-6, affine=True + ) + + +class Upsample(nn.Module): + def __init__(self, in_channels, with_conv): + super().__init__() + self.with_conv = with_conv + if self.with_conv: + self.conv = ops.Conv2d( + in_channels, in_channels, kernel_size=3, stride=1, padding=1 + ) + + def forward(self, x): + try: + x = torch.nn.functional.interpolate(x, scale_factor=2.0, mode="nearest") + except: #operation not implemented for bf16 + b, c, h, w = x.shape + out = torch.empty((b, c, h*2, w*2), dtype=x.dtype, layout=x.layout, device=x.device) + split = 8 + l = out.shape[1] // split + for i in range(0, out.shape[1], l): + out[:,i:i+l] = torch.nn.functional.interpolate(x[:,i:i+l].to(torch.float32), scale_factor=2.0, mode="nearest").to(x.dtype) + del x + x = out + if self.with_conv: + x = self.conv(x) + return x + + +class Downsample(nn.Module): + def __init__(self, in_channels, with_conv): + super().__init__() + self.with_conv = with_conv + if self.with_conv: + # no asymmetric padding in torch conv, must do it ourselves + self.conv = ops.Conv2d( + in_channels, in_channels, kernel_size=3, stride=2, padding=0 + ) + + def forward(self, x): + if self.with_conv: + pad = (0, 1, 0, 1) + x = torch.nn.functional.pad(x, pad, mode="constant", value=0) + x = self.conv(x) + else: + x = torch.nn.functional.avg_pool2d(x, kernel_size=2, stride=2) + return x + + +class ResnetBlock(nn.Module): + def __init__( + self, + *, + in_channels, + out_channels=None, + conv_shortcut=False, + dropout, + temb_channels=512, + ): + super().__init__() + self.in_channels = in_channels + out_channels = in_channels if out_channels is None else out_channels + self.out_channels = out_channels + self.use_conv_shortcut = conv_shortcut + + self.norm1 = Normalize(in_channels) + self.conv1 = ops.Conv2d( + in_channels, out_channels, kernel_size=3, stride=1, padding=1 + ) + if temb_channels > 0: + self.temb_proj = ops.Linear(temb_channels, out_channels) + self.norm2 = Normalize(out_channels) + self.dropout = torch.nn.Dropout(dropout) + self.conv2 = ops.Conv2d( + out_channels, out_channels, kernel_size=3, stride=1, padding=1 + ) + if self.in_channels != self.out_channels: + if self.use_conv_shortcut: + self.conv_shortcut = ops.Conv2d( + in_channels, out_channels, kernel_size=3, stride=1, padding=1 + ) + else: + self.nin_shortcut = ops.Conv2d( + in_channels, out_channels, kernel_size=1, stride=1, padding=0 + ) + + def forward(self, x, temb): + h = x + h = self.norm1(h) + h = nonlinearity(h) + h = self.conv1(h) + + if temb is not None: + h = h + self.temb_proj(nonlinearity(temb))[:, :, None, None] + + h = self.norm2(h) + h = nonlinearity(h) + h = self.dropout(h) + h = self.conv2(h) + + if self.in_channels != self.out_channels: + if self.use_conv_shortcut: + x = self.conv_shortcut(x) + else: + x = self.nin_shortcut(x) + + return x + h + + +class LinAttnBlock(LinearAttention): + """to match AttnBlock usage""" + + def __init__(self, in_channels): + super().__init__(dim=in_channels, heads=1, dim_head=in_channels) + + +class AttnBlock(nn.Module): + def __init__(self, in_channels): + super().__init__() + self.in_channels = in_channels + + self.norm = Normalize(in_channels) + self.q = ops.Conv2d( + in_channels, in_channels, kernel_size=1, stride=1, padding=0 + ) + self.k = ops.Conv2d( + in_channels, in_channels, kernel_size=1, stride=1, padding=0 + ) + self.v = ops.Conv2d( + in_channels, in_channels, kernel_size=1, stride=1, padding=0 + ) + self.proj_out = ops.Conv2d( + in_channels, in_channels, kernel_size=1, stride=1, padding=0 + ) + + def attention(self, h_: torch.Tensor) -> torch.Tensor: + h_ = self.norm(h_) + q = self.q(h_) + k = self.k(h_) + v = self.v(h_) + + b, c, h, w = q.shape + q, k, v = map( + lambda x: rearrange(x, "b c h w -> b 1 (h w) c").contiguous(), (q, k, v) + ) + h_ = torch.nn.functional.scaled_dot_product_attention( + q, k, v + ) # scale is dim ** -0.5 per default + # compute attention + + return rearrange(h_, "b 1 (h w) c -> b c h w", h=h, w=w, c=c, b=b) + + def forward(self, x, **kwargs): + h_ = x + h_ = self.attention(h_) + h_ = self.proj_out(h_) + return x + h_ + + +class MemoryEfficientAttnBlock(nn.Module): + """ + Uses xformers efficient implementation, + see https://github.com/MatthieuTPHR/diffusers/blob/d80b531ff8060ec1ea982b65a1b8df70f73aa67c/src/diffusers/models/attention.py#L223 + Note: this is a single-head self-attention operation + """ + + # + def __init__(self, in_channels): + super().__init__() + self.in_channels = in_channels + + self.norm = Normalize(in_channels) + self.q = ops.Conv2d( + in_channels, in_channels, kernel_size=1, stride=1, padding=0 + ) + self.k = ops.Conv2d( + in_channels, in_channels, kernel_size=1, stride=1, padding=0 + ) + self.v = ops.Conv2d( + in_channels, in_channels, kernel_size=1, stride=1, padding=0 + ) + self.proj_out = ops.Conv2d( + in_channels, in_channels, kernel_size=1, stride=1, padding=0 + ) + self.attention_op: Optional[Any] = None + + def attention(self, h_: torch.Tensor) -> torch.Tensor: + h_ = self.norm(h_) + q = self.q(h_) + k = self.k(h_) + v = self.v(h_) + + # compute attention + B, C, H, W = q.shape + q, k, v = map(lambda x: rearrange(x, "b c h w -> b (h w) c"), (q, k, v)) + + q, k, v = map( + lambda t: t.unsqueeze(3) + .reshape(B, t.shape[1], 1, C) + .permute(0, 2, 1, 3) + .reshape(B * 1, t.shape[1], C) + .contiguous(), + (q, k, v), + ) + out = xformers.ops.memory_efficient_attention( + q, k, v, attn_bias=None, op=self.attention_op + ) + + out = ( + out.unsqueeze(0) + .reshape(B, 1, out.shape[1], C) + .permute(0, 2, 1, 3) + .reshape(B, out.shape[1], C) + ) + return rearrange(out, "b (h w) c -> b c h w", b=B, h=H, w=W, c=C) + + def forward(self, x, **kwargs): + h_ = x + h_ = self.attention(h_) + h_ = self.proj_out(h_) + return x + h_ + + +class MemoryEfficientCrossAttentionWrapper(MemoryEfficientCrossAttention): + def forward(self, x, context=None, mask=None, **unused_kwargs): + b, c, h, w = x.shape + x = rearrange(x, "b c h w -> b (h w) c") + out = super().forward(x, context=context, mask=mask) + out = rearrange(out, "b (h w) c -> b c h w", h=h, w=w, c=c) + return x + out + + +def make_attn(in_channels, attn_type="vanilla", attn_kwargs=None): + assert attn_type in [ + "vanilla", + "vanilla-xformers", + "memory-efficient-cross-attn", + "linear", + "none", + ], f"attn_type {attn_type} unknown" + if ( + version.parse(torch.__version__) < version.parse("2.0.0") + and attn_type != "none" + ): + assert XFORMERS_IS_AVAILABLE, ( + f"We do not support vanilla attention in {torch.__version__} anymore, " + f"as it is too expensive. Please install xformers via e.g. 'pip install xformers==0.0.16'" + ) + attn_type = "vanilla-xformers" + print(f"making attention of type '{attn_type}' with {in_channels} in_channels") + if attn_type == "vanilla": + assert attn_kwargs is None + return AttnBlock(in_channels) + elif attn_type == "vanilla-xformers": + print(f"building MemoryEfficientAttnBlock with {in_channels} in_channels...") + return MemoryEfficientAttnBlock(in_channels) + elif type == "memory-efficient-cross-attn": + attn_kwargs["query_dim"] = in_channels + return MemoryEfficientCrossAttentionWrapper(**attn_kwargs) + elif attn_type == "none": + return nn.Identity(in_channels) + else: + return LinAttnBlock(in_channels) + + +class Model(nn.Module): + def __init__( + self, + *, + ch, + out_ch, + ch_mult=(1, 2, 4, 8), + num_res_blocks, + attn_resolutions, + dropout=0.0, + resamp_with_conv=True, + in_channels, + resolution, + use_timestep=True, + use_linear_attn=False, + attn_type="vanilla", + ): + super().__init__() + if use_linear_attn: + attn_type = "linear" + self.ch = ch + self.temb_ch = self.ch * 4 + self.num_resolutions = len(ch_mult) + self.num_res_blocks = num_res_blocks + self.resolution = resolution + self.in_channels = in_channels + + self.use_timestep = use_timestep + if self.use_timestep: + # timestep embedding + self.temb = nn.Module() + self.temb.dense = nn.ModuleList( + [ + ops.Linear(self.ch, self.temb_ch), + ops.Linear(self.temb_ch, self.temb_ch), + ] + ) + + # downsampling + self.conv_in = ops.Conv2d( + in_channels, self.ch, kernel_size=3, stride=1, padding=1 + ) + + curr_res = resolution + in_ch_mult = (1,) + tuple(ch_mult) + self.down = nn.ModuleList() + for i_level in range(self.num_resolutions): + block = nn.ModuleList() + attn = nn.ModuleList() + block_in = ch * in_ch_mult[i_level] + block_out = ch * ch_mult[i_level] + for i_block in range(self.num_res_blocks): + block.append( + ResnetBlock( + in_channels=block_in, + out_channels=block_out, + temb_channels=self.temb_ch, + dropout=dropout, + ) + ) + block_in = block_out + if curr_res in attn_resolutions: + attn.append(make_attn(block_in, attn_type=attn_type)) + down = nn.Module() + down.block = block + down.attn = attn + if i_level != self.num_resolutions - 1: + down.downsample = Downsample(block_in, resamp_with_conv) + curr_res = curr_res // 2 + self.down.append(down) + + # middle + self.mid = nn.Module() + self.mid.block_1 = ResnetBlock( + in_channels=block_in, + out_channels=block_in, + temb_channels=self.temb_ch, + dropout=dropout, + ) + self.mid.attn_1 = make_attn(block_in, attn_type=attn_type) + self.mid.block_2 = ResnetBlock( + in_channels=block_in, + out_channels=block_in, + temb_channels=self.temb_ch, + dropout=dropout, + ) + + # upsampling + self.up = nn.ModuleList() + for i_level in reversed(range(self.num_resolutions)): + block = nn.ModuleList() + attn = nn.ModuleList() + block_out = ch * ch_mult[i_level] + skip_in = ch * ch_mult[i_level] + for i_block in range(self.num_res_blocks + 1): + if i_block == self.num_res_blocks: + skip_in = ch * in_ch_mult[i_level] + block.append( + ResnetBlock( + in_channels=block_in + skip_in, + out_channels=block_out, + temb_channels=self.temb_ch, + dropout=dropout, + ) + ) + block_in = block_out + if curr_res in attn_resolutions: + attn.append(make_attn(block_in, attn_type=attn_type)) + up = nn.Module() + up.block = block + up.attn = attn + if i_level != 0: + up.upsample = Upsample(block_in, resamp_with_conv) + curr_res = curr_res * 2 + self.up.insert(0, up) # prepend to get consistent order + + # end + self.norm_out = Normalize(block_in) + self.conv_out = ops.Conv2d( + block_in, out_ch, kernel_size=3, stride=1, padding=1 + ) + + def forward(self, x, t=None, context=None): + # assert x.shape[2] == x.shape[3] == self.resolution + if context is not None: + # assume aligned context, cat along channel axis + x = torch.cat((x, context), dim=1) + if self.use_timestep: + # timestep embedding + assert t is not None + temb = get_timestep_embedding(t, self.ch) + temb = self.temb.dense[0](temb) + temb = nonlinearity(temb) + temb = self.temb.dense[1](temb) + else: + temb = None + + # downsampling + hs = [self.conv_in(x)] + for i_level in range(self.num_resolutions): + for i_block in range(self.num_res_blocks): + h = self.down[i_level].block[i_block](hs[-1], temb) + if len(self.down[i_level].attn) > 0: + h = self.down[i_level].attn[i_block](h) + hs.append(h) + if i_level != self.num_resolutions - 1: + hs.append(self.down[i_level].downsample(hs[-1])) + + # middle + h = hs[-1] + h = self.mid.block_1(h, temb) + h = self.mid.attn_1(h) + h = self.mid.block_2(h, temb) + + # upsampling + for i_level in reversed(range(self.num_resolutions)): + for i_block in range(self.num_res_blocks + 1): + h = self.up[i_level].block[i_block]( + torch.cat([h, hs.pop()], dim=1), temb + ) + if len(self.up[i_level].attn) > 0: + h = self.up[i_level].attn[i_block](h) + if i_level != 0: + h = self.up[i_level].upsample(h) + + # end + h = self.norm_out(h) + h = nonlinearity(h) + h = self.conv_out(h) + return h + + def get_last_layer(self): + return self.conv_out.weight + + +class Encoder(nn.Module): + def __init__( + self, + *, + ch, + out_ch, + ch_mult=(1, 2, 4, 8), + num_res_blocks, + attn_resolutions, + dropout=0.0, + resamp_with_conv=True, + in_channels, + resolution, + z_channels, + double_z=True, + use_linear_attn=False, + attn_type="vanilla", + **ignore_kwargs, + ): + super().__init__() + if use_linear_attn: + attn_type = "linear" + self.ch = ch + self.temb_ch = 0 + self.num_resolutions = len(ch_mult) + self.num_res_blocks = num_res_blocks + self.resolution = resolution + self.in_channels = in_channels + + # downsampling + self.conv_in = ops.Conv2d( + in_channels, self.ch, kernel_size=3, stride=1, padding=1 + ) + + curr_res = resolution + in_ch_mult = (1,) + tuple(ch_mult) + self.in_ch_mult = in_ch_mult + self.down = nn.ModuleList() + for i_level in range(self.num_resolutions): + block = nn.ModuleList() + attn = nn.ModuleList() + block_in = ch * in_ch_mult[i_level] + block_out = ch * ch_mult[i_level] + for i_block in range(self.num_res_blocks): + block.append( + ResnetBlock( + in_channels=block_in, + out_channels=block_out, + temb_channels=self.temb_ch, + dropout=dropout, + ) + ) + block_in = block_out + if curr_res in attn_resolutions: + attn.append(make_attn(block_in, attn_type=attn_type)) + down = nn.Module() + down.block = block + down.attn = attn + if i_level != self.num_resolutions - 1: + down.downsample = Downsample(block_in, resamp_with_conv) + curr_res = curr_res // 2 + self.down.append(down) + + # middle + self.mid = nn.Module() + self.mid.block_1 = ResnetBlock( + in_channels=block_in, + out_channels=block_in, + temb_channels=self.temb_ch, + dropout=dropout, + ) + self.mid.attn_1 = make_attn(block_in, attn_type=attn_type) + self.mid.block_2 = ResnetBlock( + in_channels=block_in, + out_channels=block_in, + temb_channels=self.temb_ch, + dropout=dropout, + ) + + # end + self.norm_out = Normalize(block_in) + self.conv_out = ops.Conv2d( + block_in, + 2 * z_channels if double_z else z_channels, + kernel_size=3, + stride=1, + padding=1, + ) + + def forward(self, x): + # timestep embedding + temb = None + + # downsampling + hs = [self.conv_in(x)] + for i_level in range(self.num_resolutions): + for i_block in range(self.num_res_blocks): + h = self.down[i_level].block[i_block](hs[-1], temb) + if len(self.down[i_level].attn) > 0: + h = self.down[i_level].attn[i_block](h) + hs.append(h) + if i_level != self.num_resolutions - 1: + hs.append(self.down[i_level].downsample(hs[-1])) + + # middle + h = hs[-1] + h = self.mid.block_1(h, temb) + h = self.mid.attn_1(h) + h = self.mid.block_2(h, temb) + + # end + h = self.norm_out(h) + h = nonlinearity(h) + h = self.conv_out(h) + return h + + +class Decoder(nn.Module): + def __init__( + self, + *, + ch, + out_ch, + ch_mult=(1, 2, 4, 8), + num_res_blocks, + attn_resolutions, + dropout=0.0, + resamp_with_conv=True, + in_channels, + resolution, + z_channels, + give_pre_end=False, + tanh_out=False, + use_linear_attn=False, + attn_type="vanilla", + **ignorekwargs, + ): + super().__init__() + if use_linear_attn: + attn_type = "linear" + self.ch = ch + self.temb_ch = 0 + self.num_resolutions = len(ch_mult) + self.num_res_blocks = num_res_blocks + self.resolution = resolution + self.in_channels = in_channels + self.give_pre_end = give_pre_end + self.tanh_out = tanh_out + + # compute in_ch_mult, block_in and curr_res at lowest res + in_ch_mult = (1,) + tuple(ch_mult) + block_in = ch * ch_mult[self.num_resolutions - 1] + curr_res = resolution // 2 ** (self.num_resolutions - 1) + self.z_shape = (1, z_channels, curr_res, curr_res) + print( + "Working with z of shape {} = {} dimensions.".format( + self.z_shape, np.prod(self.z_shape) + ) + ) + + make_attn_cls = self._make_attn() + make_resblock_cls = self._make_resblock() + make_conv_cls = self._make_conv() + # z to block_in + self.conv_in = ops.Conv2d( + z_channels, block_in, kernel_size=3, stride=1, padding=1 + ) + + # middle + self.mid = nn.Module() + self.mid.block_1 = make_resblock_cls( + in_channels=block_in, + out_channels=block_in, + temb_channels=self.temb_ch, + dropout=dropout, + ) + self.mid.attn_1 = make_attn_cls(block_in, attn_type=attn_type) + self.mid.block_2 = make_resblock_cls( + in_channels=block_in, + out_channels=block_in, + temb_channels=self.temb_ch, + dropout=dropout, + ) + + # upsampling + self.up = nn.ModuleList() + for i_level in reversed(range(self.num_resolutions)): + block = nn.ModuleList() + attn = nn.ModuleList() + block_out = ch * ch_mult[i_level] + for i_block in range(self.num_res_blocks + 1): + block.append( + make_resblock_cls( + in_channels=block_in, + out_channels=block_out, + temb_channels=self.temb_ch, + dropout=dropout, + ) + ) + block_in = block_out + if curr_res in attn_resolutions: + attn.append(make_attn_cls(block_in, attn_type=attn_type)) + up = nn.Module() + up.block = block + up.attn = attn + if i_level != 0: + up.upsample = Upsample(block_in, resamp_with_conv) + curr_res = curr_res * 2 + self.up.insert(0, up) # prepend to get consistent order + + # end + self.norm_out = Normalize(block_in) + self.conv_out = make_conv_cls( + block_in, out_ch, kernel_size=3, stride=1, padding=1 + ) + + def _make_attn(self) -> Callable: + return make_attn + + def _make_resblock(self) -> Callable: + return ResnetBlock + + def _make_conv(self) -> Callable: + return ops.Conv2d + + def get_last_layer(self, **kwargs): + return self.conv_out.weight + + def forward(self, z, **kwargs): + # assert z.shape[1:] == self.z_shape[1:] + self.last_z_shape = z.shape + + # timestep embedding + temb = None + + # z to block_in + h = self.conv_in(z) + + # middle + h = self.mid.block_1(h, temb, **kwargs) + h = self.mid.attn_1(h, **kwargs) + h = self.mid.block_2(h, temb, **kwargs) + + # upsampling + for i_level in reversed(range(self.num_resolutions)): + for i_block in range(self.num_res_blocks + 1): + h = self.up[i_level].block[i_block](h, temb, **kwargs) + if len(self.up[i_level].attn) > 0: + h = self.up[i_level].attn[i_block](h, **kwargs) + if i_level != 0: + h = self.up[i_level].upsample(h) + + # end + if self.give_pre_end: + return h + + h = self.norm_out(h) + h = nonlinearity(h) + h = self.conv_out(h, **kwargs) + if self.tanh_out: + h = torch.tanh(h) + return h diff --git a/ComfyUI-SUPIR/sgm/modules/diffusionmodules/openaimodel.py b/ComfyUI-SUPIR/sgm/modules/diffusionmodules/openaimodel.py new file mode 100644 index 0000000000000000000000000000000000000000..b52bbf26b727e77d446cd22e51ac203271ce6dc6 --- /dev/null +++ b/ComfyUI-SUPIR/sgm/modules/diffusionmodules/openaimodel.py @@ -0,0 +1,1272 @@ +import math +from abc import abstractmethod +from functools import partial +from typing import Iterable + +import numpy as np +import torch as th +import torch.nn as nn +import torch.nn.functional as F +# from einops._torch_specific import allow_ops_in_compiled_graph +# allow_ops_in_compiled_graph() +from einops import rearrange + +from ...modules.attention import SpatialTransformer +from ...modules.diffusionmodules.util import ( + avg_pool_nd, + checkpoint, + conv_nd, + linear, + normalization, + timestep_embedding, + zero_module, +) +from ...util import default, exists + + +# dummy replace +def convert_module_to_f16(x): + pass + + +def convert_module_to_f32(x): + pass + + +## go +class AttentionPool2d(nn.Module): + """ + Adapted from CLIP: https://github.com/openai/CLIP/blob/main/clip/model.py + """ + + def __init__( + self, + spacial_dim: int, + embed_dim: int, + num_heads_channels: int, + output_dim: int = None, + ): + super().__init__() + self.positional_embedding = nn.Parameter( + th.randn(embed_dim, spacial_dim**2 + 1) / embed_dim**0.5 + ) + self.qkv_proj = conv_nd(1, embed_dim, 3 * embed_dim, 1) + self.c_proj = conv_nd(1, embed_dim, output_dim or embed_dim, 1) + self.num_heads = embed_dim // num_heads_channels + self.attention = QKVAttention(self.num_heads) + + def forward(self, x): + b, c, *_spatial = x.shape + x = x.reshape(b, c, -1) # NC(HW) + x = th.cat([x.mean(dim=-1, keepdim=True), x], dim=-1) # NC(HW+1) + x = x + self.positional_embedding[None, :, :].to(x.dtype) # NC(HW+1) + x = self.qkv_proj(x) + x = self.attention(x) + x = self.c_proj(x) + return x[:, :, 0] + + +class TimestepBlock(nn.Module): + """ + Any module where forward() takes timestep embeddings as a second argument. + """ + + @abstractmethod + def forward(self, x, emb): + """ + Apply the module to `x` given `emb` timestep embeddings. + """ + + +class TimestepEmbedSequential(nn.Sequential, TimestepBlock): + """ + A sequential module that passes timestep embeddings to the children that + support it as an extra input. + """ + + def forward( + self, + x, + emb, + context=None, + skip_time_mix=False, + time_context=None, + num_video_frames=None, + time_context_cat=None, + use_crossframe_attention_in_spatial_layers=False, + ): + for layer in self: + if isinstance(layer, TimestepBlock): + x = layer(x, emb) + elif isinstance(layer, SpatialTransformer): + x = layer(x, context) + else: + x = layer(x) + return x + + +class Upsample(nn.Module): + """ + An upsampling layer with an optional convolution. + :param channels: channels in the inputs and outputs. + :param use_conv: a bool determining if a convolution is applied. + :param dims: determines if the signal is 1D, 2D, or 3D. If 3D, then + upsampling occurs in the inner-two dimensions. + """ + + def __init__( + self, channels, use_conv, dims=2, out_channels=None, padding=1, third_up=False + ): + super().__init__() + self.channels = channels + self.out_channels = out_channels or channels + self.use_conv = use_conv + self.dims = dims + self.third_up = third_up + if use_conv: + self.conv = conv_nd( + dims, self.channels, self.out_channels, 3, padding=padding + ) + + def forward(self, x): + # support fp32 only + _dtype = x.dtype + x = x.to(th.float32) + + assert x.shape[1] == self.channels + if self.dims == 3: + t_factor = 1 if not self.third_up else 2 + x = F.interpolate( + x, + (t_factor * x.shape[2], x.shape[3] * 2, x.shape[4] * 2), + mode="nearest", + ) + else: + x = F.interpolate(x, scale_factor=2, mode="nearest") + + x = x.to(_dtype) # support fp32 only + + if self.use_conv: + x = self.conv(x) + return x + + +class TransposedUpsample(nn.Module): + "Learned 2x upsampling without padding" + + def __init__(self, channels, out_channels=None, ks=5): + super().__init__() + self.channels = channels + self.out_channels = out_channels or channels + + self.up = nn.ConvTranspose2d( + self.channels, self.out_channels, kernel_size=ks, stride=2 + ) + + def forward(self, x): + return self.up(x) + + +class Downsample(nn.Module): + """ + A downsampling layer with an optional convolution. + :param channels: channels in the inputs and outputs. + :param use_conv: a bool determining if a convolution is applied. + :param dims: determines if the signal is 1D, 2D, or 3D. If 3D, then + downsampling occurs in the inner-two dimensions. + """ + + def __init__( + self, channels, use_conv, dims=2, out_channels=None, padding=1, third_down=False + ): + super().__init__() + self.channels = channels + self.out_channels = out_channels or channels + self.use_conv = use_conv + self.dims = dims + stride = 2 if dims != 3 else ((1, 2, 2) if not third_down else (2, 2, 2)) + if use_conv: + # print(f"Building a Downsample layer with {dims} dims.") + # print( + # f" --> settings are: \n in-chn: {self.channels}, out-chn: {self.out_channels}, " + # f"kernel-size: 3, stride: {stride}, padding: {padding}" + # ) + # if dims == 3: + # print(f" --> Downsampling third axis (time): {third_down}") + self.op = conv_nd( + dims, + self.channels, + self.out_channels, + 3, + stride=stride, + padding=padding, + ) + else: + assert self.channels == self.out_channels + self.op = avg_pool_nd(dims, kernel_size=stride, stride=stride) + + def forward(self, x): + assert x.shape[1] == self.channels + return self.op(x) + + +class ResBlock(TimestepBlock): + """ + A residual block that can optionally change the number of channels. + :param channels: the number of input channels. + :param emb_channels: the number of timestep embedding channels. + :param dropout: the rate of dropout. + :param out_channels: if specified, the number of out channels. + :param use_conv: if True and out_channels is specified, use a spatial + convolution instead of a smaller 1x1 convolution to change the + channels in the skip connection. + :param dims: determines if the signal is 1D, 2D, or 3D. + :param use_checkpoint: if True, use gradient checkpointing on this module. + :param up: if True, use this block for upsampling. + :param down: if True, use this block for downsampling. + """ + + def __init__( + self, + channels, + emb_channels, + dropout, + out_channels=None, + use_conv=False, + use_scale_shift_norm=False, + dims=2, + use_checkpoint=False, + up=False, + down=False, + kernel_size=3, + exchange_temb_dims=False, + skip_t_emb=False, + ): + super().__init__() + self.channels = channels + self.emb_channels = emb_channels + self.dropout = dropout + self.out_channels = out_channels or channels + self.use_conv = use_conv + self.use_checkpoint = use_checkpoint + self.use_scale_shift_norm = use_scale_shift_norm + self.exchange_temb_dims = exchange_temb_dims + + if isinstance(kernel_size, Iterable): + padding = [k // 2 for k in kernel_size] + else: + padding = kernel_size // 2 + + self.in_layers = nn.Sequential( + normalization(channels), + nn.SiLU(), + conv_nd(dims, channels, self.out_channels, kernel_size, padding=padding), + ) + + self.updown = up or down + + if up: + self.h_upd = Upsample(channels, False, dims) + self.x_upd = Upsample(channels, False, dims) + elif down: + self.h_upd = Downsample(channels, False, dims) + self.x_upd = Downsample(channels, False, dims) + else: + self.h_upd = self.x_upd = nn.Identity() + + self.skip_t_emb = skip_t_emb + self.emb_out_channels = ( + 2 * self.out_channels if use_scale_shift_norm else self.out_channels + ) + if self.skip_t_emb: + print(f"Skipping timestep embedding in {self.__class__.__name__}") + assert not self.use_scale_shift_norm + self.emb_layers = None + self.exchange_temb_dims = False + else: + self.emb_layers = nn.Sequential( + nn.SiLU(), + linear( + emb_channels, + self.emb_out_channels, + ), + ) + + self.out_layers = nn.Sequential( + normalization(self.out_channels), + nn.SiLU(), + nn.Dropout(p=dropout), + zero_module( + conv_nd( + dims, + self.out_channels, + self.out_channels, + kernel_size, + padding=padding, + ) + ), + ) + + if self.out_channels == channels: + self.skip_connection = nn.Identity() + elif use_conv: + self.skip_connection = conv_nd( + dims, channels, self.out_channels, kernel_size, padding=padding + ) + else: + self.skip_connection = conv_nd(dims, channels, self.out_channels, 1) + + def forward(self, x, emb): + """ + Apply the block to a Tensor, conditioned on a timestep embedding. + :param x: an [N x C x ...] Tensor of features. + :param emb: an [N x emb_channels] Tensor of timestep embeddings. + :return: an [N x C x ...] Tensor of outputs. + """ + return checkpoint( + self._forward, (x, emb), self.parameters(), self.use_checkpoint + ) + + def _forward(self, x, emb): + if self.updown: + in_rest, in_conv = self.in_layers[:-1], self.in_layers[-1] + h = in_rest(x) + h = self.h_upd(h) + x = self.x_upd(x) + h = in_conv(h) + else: + h = self.in_layers(x) + + if self.skip_t_emb: + emb_out = th.zeros_like(h) + else: + emb_out = self.emb_layers(emb).type(h.dtype) + while len(emb_out.shape) < len(h.shape): + emb_out = emb_out[..., None] + if self.use_scale_shift_norm: + out_norm, out_rest = self.out_layers[0], self.out_layers[1:] + scale, shift = th.chunk(emb_out, 2, dim=1) + h = out_norm(h) * (1 + scale) + shift + h = out_rest(h) + else: + if self.exchange_temb_dims: + emb_out = rearrange(emb_out, "b t c ... -> b c t ...") + h = h + emb_out + h = self.out_layers(h) + return self.skip_connection(x) + h + + +class AttentionBlock(nn.Module): + """ + An attention block that allows spatial positions to attend to each other. + Originally ported from here, but adapted to the N-d case. + https://github.com/hojonathanho/diffusion/blob/1e0dceb3b3495bbe19116a5e1b3596cd0706c543/diffusion_tf/models/unet.py#L66. + """ + + def __init__( + self, + channels, + num_heads=1, + num_head_channels=-1, + use_checkpoint=False, + use_new_attention_order=False, + ): + super().__init__() + self.channels = channels + if num_head_channels == -1: + self.num_heads = num_heads + else: + assert ( + channels % num_head_channels == 0 + ), f"q,k,v channels {channels} is not divisible by num_head_channels {num_head_channels}" + self.num_heads = channels // num_head_channels + self.use_checkpoint = use_checkpoint + self.norm = normalization(channels) + self.qkv = conv_nd(1, channels, channels * 3, 1) + if use_new_attention_order: + # split qkv before split heads + self.attention = QKVAttention(self.num_heads) + else: + # split heads before split qkv + self.attention = QKVAttentionLegacy(self.num_heads) + + self.proj_out = zero_module(conv_nd(1, channels, channels, 1)) + + def forward(self, x, **kwargs): + # TODO add crossframe attention and use mixed checkpoint + return checkpoint( + self._forward, (x,), self.parameters(), True + ) # TODO: check checkpoint usage, is True # TODO: fix the .half call!!! + # return pt_checkpoint(self._forward, x) # pytorch + + def _forward(self, x): + b, c, *spatial = x.shape + x = x.reshape(b, c, -1) + qkv = self.qkv(self.norm(x)) + h = self.attention(qkv) + h = self.proj_out(h) + return (x + h).reshape(b, c, *spatial) + + +def count_flops_attn(model, _x, y): + """ + A counter for the `thop` package to count the operations in an + attention operation. + Meant to be used like: + macs, params = thop.profile( + model, + inputs=(inputs, timestamps), + custom_ops={QKVAttention: QKVAttention.count_flops}, + ) + """ + b, c, *spatial = y[0].shape + num_spatial = int(np.prod(spatial)) + # We perform two matmuls with the same number of ops. + # The first computes the weight matrix, the second computes + # the combination of the value vectors. + matmul_ops = 2 * b * (num_spatial**2) * c + model.total_ops += th.DoubleTensor([matmul_ops]) + + +class QKVAttentionLegacy(nn.Module): + """ + A module which performs QKV attention. Matches legacy QKVAttention + input/ouput heads shaping + """ + + def __init__(self, n_heads): + super().__init__() + self.n_heads = n_heads + + def forward(self, qkv): + """ + Apply QKV attention. + :param qkv: an [N x (H * 3 * C) x T] tensor of Qs, Ks, and Vs. + :return: an [N x (H * C) x T] tensor after attention. + """ + bs, width, length = qkv.shape + assert width % (3 * self.n_heads) == 0 + ch = width // (3 * self.n_heads) + q, k, v = qkv.reshape(bs * self.n_heads, ch * 3, length).split(ch, dim=1) + scale = 1 / math.sqrt(math.sqrt(ch)) + weight = th.einsum( + "bct,bcs->bts", q * scale, k * scale + ) # More stable with f16 than dividing afterwards + weight = th.softmax(weight.float(), dim=-1).type(weight.dtype) + a = th.einsum("bts,bcs->bct", weight, v) + return a.reshape(bs, -1, length) + + @staticmethod + def count_flops(model, _x, y): + return count_flops_attn(model, _x, y) + + +class QKVAttention(nn.Module): + """ + A module which performs QKV attention and splits in a different order. + """ + + def __init__(self, n_heads): + super().__init__() + self.n_heads = n_heads + + def forward(self, qkv): + """ + Apply QKV attention. + :param qkv: an [N x (3 * H * C) x T] tensor of Qs, Ks, and Vs. + :return: an [N x (H * C) x T] tensor after attention. + """ + bs, width, length = qkv.shape + assert width % (3 * self.n_heads) == 0 + ch = width // (3 * self.n_heads) + q, k, v = qkv.chunk(3, dim=1) + scale = 1 / math.sqrt(math.sqrt(ch)) + weight = th.einsum( + "bct,bcs->bts", + (q * scale).view(bs * self.n_heads, ch, length), + (k * scale).view(bs * self.n_heads, ch, length), + ) # More stable with f16 than dividing afterwards + weight = th.softmax(weight.float(), dim=-1).type(weight.dtype) + a = th.einsum("bts,bcs->bct", weight, v.reshape(bs * self.n_heads, ch, length)) + return a.reshape(bs, -1, length) + + @staticmethod + def count_flops(model, _x, y): + return count_flops_attn(model, _x, y) + + +class Timestep(nn.Module): + def __init__(self, dim): + super().__init__() + self.dim = dim + + def forward(self, t): + return timestep_embedding(t, self.dim) + + +class UNetModel(nn.Module): + """ + The full UNet model with attention and timestep embedding. + :param in_channels: channels in the input Tensor. + :param model_channels: base channel count for the model. + :param out_channels: channels in the output Tensor. + :param num_res_blocks: number of residual blocks per downsample. + :param attention_resolutions: a collection of downsample rates at which + attention will take place. May be a set, list, or tuple. + For example, if this contains 4, then at 4x downsampling, attention + will be used. + :param dropout: the dropout probability. + :param channel_mult: channel multiplier for each level of the UNet. + :param conv_resample: if True, use learned convolutions for upsampling and + downsampling. + :param dims: determines if the signal is 1D, 2D, or 3D. + :param num_classes: if specified (as an int), then this model will be + class-conditional with `num_classes` classes. + :param use_checkpoint: use gradient checkpointing to reduce memory usage. + :param num_heads: the number of attention heads in each attention layer. + :param num_heads_channels: if specified, ignore num_heads and instead use + a fixed channel width per attention head. + :param num_heads_upsample: works with num_heads to set a different number + of heads for upsampling. Deprecated. + :param use_scale_shift_norm: use a FiLM-like conditioning mechanism. + :param resblock_updown: use residual blocks for up/downsampling. + :param use_new_attention_order: use a different attention pattern for potentially + increased efficiency. + """ + + def __init__( + self, + in_channels, + model_channels, + out_channels, + num_res_blocks, + attention_resolutions, + dropout=0, + channel_mult=(1, 2, 4, 8), + conv_resample=True, + dims=2, + num_classes=None, + use_checkpoint=False, + use_fp16=False, + num_heads=-1, + num_head_channels=-1, + num_heads_upsample=-1, + use_scale_shift_norm=False, + resblock_updown=False, + use_new_attention_order=False, + use_spatial_transformer=False, # custom transformer support + transformer_depth=1, # custom transformer support + context_dim=None, # custom transformer support + n_embed=None, # custom support for prediction of discrete ids into codebook of first stage vq model + legacy=True, + disable_self_attentions=None, + num_attention_blocks=None, + disable_middle_self_attn=False, + use_linear_in_transformer=False, + spatial_transformer_attn_type="softmax", + adm_in_channels=None, + use_fairscale_checkpoint=False, + offload_to_cpu=False, + transformer_depth_middle=None, + ): + super().__init__() + from omegaconf.listconfig import ListConfig + + if use_spatial_transformer: + assert ( + context_dim is not None + ), "Fool!! You forgot to include the dimension of your cross-attention conditioning..." + + if context_dim is not None: + assert ( + use_spatial_transformer + ), "Fool!! You forgot to use the spatial transformer for your cross-attention conditioning..." + if type(context_dim) == ListConfig: + context_dim = list(context_dim) + + if num_heads_upsample == -1: + num_heads_upsample = num_heads + + if num_heads == -1: + assert ( + num_head_channels != -1 + ), "Either num_heads or num_head_channels has to be set" + + if num_head_channels == -1: + assert ( + num_heads != -1 + ), "Either num_heads or num_head_channels has to be set" + + self.in_channels = in_channels + self.model_channels = model_channels + self.out_channels = out_channels + if isinstance(transformer_depth, int): + transformer_depth = len(channel_mult) * [transformer_depth] + elif isinstance(transformer_depth, ListConfig): + transformer_depth = list(transformer_depth) + transformer_depth_middle = default( + transformer_depth_middle, transformer_depth[-1] + ) + + if isinstance(num_res_blocks, int): + self.num_res_blocks = len(channel_mult) * [num_res_blocks] + else: + if len(num_res_blocks) != len(channel_mult): + raise ValueError( + "provide num_res_blocks either as an int (globally constant) or " + "as a list/tuple (per-level) with the same length as channel_mult" + ) + self.num_res_blocks = num_res_blocks + # self.num_res_blocks = num_res_blocks + if disable_self_attentions is not None: + # should be a list of booleans, indicating whether to disable self-attention in TransformerBlocks or not + assert len(disable_self_attentions) == len(channel_mult) + if num_attention_blocks is not None: + assert len(num_attention_blocks) == len(self.num_res_blocks) + assert all( + map( + lambda i: self.num_res_blocks[i] >= num_attention_blocks[i], + range(len(num_attention_blocks)), + ) + ) + print( + f"Constructor of UNetModel received num_attention_blocks={num_attention_blocks}. " + f"This option has LESS priority than attention_resolutions {attention_resolutions}, " + f"i.e., in cases where num_attention_blocks[i] > 0 but 2**i not in attention_resolutions, " + f"attention will still not be set." + ) # todo: convert to warning + + self.attention_resolutions = attention_resolutions + self.dropout = dropout + self.channel_mult = channel_mult + self.conv_resample = conv_resample + self.num_classes = num_classes + self.use_checkpoint = use_checkpoint + if use_fp16: + print("WARNING: use_fp16 was dropped and has no effect anymore.") + # self.dtype = th.float16 if use_fp16 else th.float32 + self.num_heads = num_heads + self.num_head_channels = num_head_channels + self.num_heads_upsample = num_heads_upsample + self.predict_codebook_ids = n_embed is not None + + assert use_fairscale_checkpoint != use_checkpoint or not ( + use_checkpoint or use_fairscale_checkpoint + ) + + self.use_fairscale_checkpoint = False + checkpoint_wrapper_fn = ( + partial(checkpoint_wrapper, offload_to_cpu=offload_to_cpu) + if self.use_fairscale_checkpoint + else lambda x: x + ) + + time_embed_dim = model_channels * 4 + self.time_embed = checkpoint_wrapper_fn( + nn.Sequential( + linear(model_channels, time_embed_dim), + nn.SiLU(), + linear(time_embed_dim, time_embed_dim), + ) + ) + + if self.num_classes is not None: + if isinstance(self.num_classes, int): + self.label_emb = nn.Embedding(num_classes, time_embed_dim) + elif self.num_classes == "continuous": + print("setting up linear c_adm embedding layer") + self.label_emb = nn.Linear(1, time_embed_dim) + elif self.num_classes == "timestep": + self.label_emb = checkpoint_wrapper_fn( + nn.Sequential( + Timestep(model_channels), + nn.Sequential( + linear(model_channels, time_embed_dim), + nn.SiLU(), + linear(time_embed_dim, time_embed_dim), + ), + ) + ) + elif self.num_classes == "sequential": + assert adm_in_channels is not None + self.label_emb = nn.Sequential( + nn.Sequential( + linear(adm_in_channels, time_embed_dim), + nn.SiLU(), + linear(time_embed_dim, time_embed_dim), + ) + ) + else: + raise ValueError() + + self.input_blocks = nn.ModuleList( + [ + TimestepEmbedSequential( + conv_nd(dims, in_channels, model_channels, 3, padding=1) + ) + ] + ) + self._feature_size = model_channels + input_block_chans = [model_channels] + ch = model_channels + ds = 1 + for level, mult in enumerate(channel_mult): + for nr in range(self.num_res_blocks[level]): + layers = [ + checkpoint_wrapper_fn( + ResBlock( + ch, + time_embed_dim, + dropout, + out_channels=mult * model_channels, + dims=dims, + use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, + ) + ) + ] + ch = mult * model_channels + if ds in attention_resolutions: + if num_head_channels == -1: + dim_head = ch // num_heads + else: + num_heads = ch // num_head_channels + dim_head = num_head_channels + if legacy: + # num_heads = 1 + dim_head = ( + ch // num_heads + if use_spatial_transformer + else num_head_channels + ) + if exists(disable_self_attentions): + disabled_sa = disable_self_attentions[level] + else: + disabled_sa = False + + if ( + not exists(num_attention_blocks) + or nr < num_attention_blocks[level] + ): + layers.append( + checkpoint_wrapper_fn( + AttentionBlock( + ch, + use_checkpoint=use_checkpoint, + num_heads=num_heads, + num_head_channels=dim_head, + use_new_attention_order=use_new_attention_order, + ) + ) + if not use_spatial_transformer + else checkpoint_wrapper_fn( + SpatialTransformer( + ch, + num_heads, + dim_head, + depth=transformer_depth[level], + context_dim=context_dim, + disable_self_attn=disabled_sa, + use_linear=use_linear_in_transformer, + attn_type=spatial_transformer_attn_type, + use_checkpoint=use_checkpoint, + ) + ) + ) + self.input_blocks.append(TimestepEmbedSequential(*layers)) + self._feature_size += ch + input_block_chans.append(ch) + if level != len(channel_mult) - 1: + out_ch = ch + self.input_blocks.append( + TimestepEmbedSequential( + checkpoint_wrapper_fn( + ResBlock( + ch, + time_embed_dim, + dropout, + out_channels=out_ch, + dims=dims, + use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, + down=True, + ) + ) + if resblock_updown + else Downsample( + ch, conv_resample, dims=dims, out_channels=out_ch + ) + ) + ) + ch = out_ch + input_block_chans.append(ch) + ds *= 2 + self._feature_size += ch + + if num_head_channels == -1: + dim_head = ch // num_heads + else: + num_heads = ch // num_head_channels + dim_head = num_head_channels + if legacy: + # num_heads = 1 + dim_head = ch // num_heads if use_spatial_transformer else num_head_channels + self.middle_block = TimestepEmbedSequential( + checkpoint_wrapper_fn( + ResBlock( + ch, + time_embed_dim, + dropout, + dims=dims, + use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, + ) + ), + checkpoint_wrapper_fn( + AttentionBlock( + ch, + use_checkpoint=use_checkpoint, + num_heads=num_heads, + num_head_channels=dim_head, + use_new_attention_order=use_new_attention_order, + ) + ) + if not use_spatial_transformer + else checkpoint_wrapper_fn( + SpatialTransformer( # always uses a self-attn + ch, + num_heads, + dim_head, + depth=transformer_depth_middle, + context_dim=context_dim, + disable_self_attn=disable_middle_self_attn, + use_linear=use_linear_in_transformer, + attn_type=spatial_transformer_attn_type, + use_checkpoint=use_checkpoint, + ) + ), + checkpoint_wrapper_fn( + ResBlock( + ch, + time_embed_dim, + dropout, + dims=dims, + use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, + ) + ), + ) + self._feature_size += ch + + self.output_blocks = nn.ModuleList([]) + for level, mult in list(enumerate(channel_mult))[::-1]: + for i in range(self.num_res_blocks[level] + 1): + ich = input_block_chans.pop() + layers = [ + checkpoint_wrapper_fn( + ResBlock( + ch + ich, + time_embed_dim, + dropout, + out_channels=model_channels * mult, + dims=dims, + use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, + ) + ) + ] + ch = model_channels * mult + if ds in attention_resolutions: + if num_head_channels == -1: + dim_head = ch // num_heads + else: + num_heads = ch // num_head_channels + dim_head = num_head_channels + if legacy: + # num_heads = 1 + dim_head = ( + ch // num_heads + if use_spatial_transformer + else num_head_channels + ) + if exists(disable_self_attentions): + disabled_sa = disable_self_attentions[level] + else: + disabled_sa = False + + if ( + not exists(num_attention_blocks) + or i < num_attention_blocks[level] + ): + layers.append( + checkpoint_wrapper_fn( + AttentionBlock( + ch, + use_checkpoint=use_checkpoint, + num_heads=num_heads_upsample, + num_head_channels=dim_head, + use_new_attention_order=use_new_attention_order, + ) + ) + if not use_spatial_transformer + else checkpoint_wrapper_fn( + SpatialTransformer( + ch, + num_heads, + dim_head, + depth=transformer_depth[level], + context_dim=context_dim, + disable_self_attn=disabled_sa, + use_linear=use_linear_in_transformer, + attn_type=spatial_transformer_attn_type, + use_checkpoint=use_checkpoint, + ) + ) + ) + if level and i == self.num_res_blocks[level]: + out_ch = ch + layers.append( + checkpoint_wrapper_fn( + ResBlock( + ch, + time_embed_dim, + dropout, + out_channels=out_ch, + dims=dims, + use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, + up=True, + ) + ) + if resblock_updown + else Upsample(ch, conv_resample, dims=dims, out_channels=out_ch) + ) + ds //= 2 + self.output_blocks.append(TimestepEmbedSequential(*layers)) + self._feature_size += ch + + self.out = checkpoint_wrapper_fn( + nn.Sequential( + normalization(ch), + nn.SiLU(), + zero_module(conv_nd(dims, model_channels, out_channels, 3, padding=1)), + ) + ) + if self.predict_codebook_ids: + self.id_predictor = checkpoint_wrapper_fn( + nn.Sequential( + normalization(ch), + conv_nd(dims, model_channels, n_embed, 1), + # nn.LogSoftmax(dim=1) # change to cross_entropy and produce non-normalized logits + ) + ) + + def convert_to_fp16(self): + """ + Convert the torso of the model to float16. + """ + self.input_blocks.apply(convert_module_to_f16) + self.middle_block.apply(convert_module_to_f16) + self.output_blocks.apply(convert_module_to_f16) + + def convert_to_fp32(self): + """ + Convert the torso of the model to float32. + """ + self.input_blocks.apply(convert_module_to_f32) + self.middle_block.apply(convert_module_to_f32) + self.output_blocks.apply(convert_module_to_f32) + + def forward(self, x, timesteps=None, context=None, y=None, **kwargs): + """ + Apply the model to an input batch. + :param x: an [N x C x ...] Tensor of inputs. + :param timesteps: a 1-D batch of timesteps. + :param context: conditioning plugged in via crossattn + :param y: an [N] Tensor of labels, if class-conditional. + :return: an [N x C x ...] Tensor of outputs. + """ + assert (y is not None) == ( + self.num_classes is not None + ), "must specify y if and only if the model is class-conditional" + hs = [] + + t_emb = timestep_embedding(timesteps, self.model_channels, repeat_only=False).to(x.dtype) + emb = self.time_embed(t_emb) + + if self.num_classes is not None: + assert y.shape[0] == x.shape[0] + emb = emb + self.label_emb(y) + + # h = x.type(self.dtype) + h = x + for module in self.input_blocks: + h = module(h, emb, context) + hs.append(h) + h = self.middle_block(h, emb, context) + for module in self.output_blocks: + h = th.cat([h, hs.pop()], dim=1) + h = module(h, emb, context) + h = h.type(x.dtype) + if self.predict_codebook_ids: + assert False, "not supported anymore. what the f*** are you doing?" + else: + return self.out(h) + + +class NoTimeUNetModel(UNetModel): + def forward(self, x, timesteps=None, context=None, y=None, **kwargs): + timesteps = th.zeros_like(timesteps) + return super().forward(x, timesteps, context, y, **kwargs) + + +class EncoderUNetModel(nn.Module): + """ + The half UNet model with attention and timestep embedding. + For usage, see UNet. + """ + + def __init__( + self, + image_size, + in_channels, + model_channels, + out_channels, + num_res_blocks, + attention_resolutions, + dropout=0, + channel_mult=(1, 2, 4, 8), + conv_resample=True, + dims=2, + use_checkpoint=False, + use_fp16=False, + num_heads=1, + num_head_channels=-1, + num_heads_upsample=-1, + use_scale_shift_norm=False, + resblock_updown=False, + use_new_attention_order=False, + pool="adaptive", + *args, + **kwargs, + ): + super().__init__() + + if num_heads_upsample == -1: + num_heads_upsample = num_heads + + self.in_channels = in_channels + self.model_channels = model_channels + self.out_channels = out_channels + self.num_res_blocks = num_res_blocks + self.attention_resolutions = attention_resolutions + self.dropout = dropout + self.channel_mult = channel_mult + self.conv_resample = conv_resample + self.use_checkpoint = use_checkpoint + self.dtype = th.float16 if use_fp16 else th.float32 + self.num_heads = num_heads + self.num_head_channels = num_head_channels + self.num_heads_upsample = num_heads_upsample + + time_embed_dim = model_channels * 4 + self.time_embed = nn.Sequential( + linear(model_channels, time_embed_dim), + nn.SiLU(), + linear(time_embed_dim, time_embed_dim), + ) + + self.input_blocks = nn.ModuleList( + [ + TimestepEmbedSequential( + conv_nd(dims, in_channels, model_channels, 3, padding=1) + ) + ] + ) + self._feature_size = model_channels + input_block_chans = [model_channels] + ch = model_channels + ds = 1 + for level, mult in enumerate(channel_mult): + for _ in range(num_res_blocks): + layers = [ + ResBlock( + ch, + time_embed_dim, + dropout, + out_channels=mult * model_channels, + dims=dims, + use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, + ) + ] + ch = mult * model_channels + if ds in attention_resolutions: + layers.append( + AttentionBlock( + ch, + use_checkpoint=use_checkpoint, + num_heads=num_heads, + num_head_channels=num_head_channels, + use_new_attention_order=use_new_attention_order, + ) + ) + self.input_blocks.append(TimestepEmbedSequential(*layers)) + self._feature_size += ch + input_block_chans.append(ch) + if level != len(channel_mult) - 1: + out_ch = ch + self.input_blocks.append( + TimestepEmbedSequential( + ResBlock( + ch, + time_embed_dim, + dropout, + out_channels=out_ch, + dims=dims, + use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, + down=True, + ) + if resblock_updown + else Downsample( + ch, conv_resample, dims=dims, out_channels=out_ch + ) + ) + ) + ch = out_ch + input_block_chans.append(ch) + ds *= 2 + self._feature_size += ch + + self.middle_block = TimestepEmbedSequential( + ResBlock( + ch, + time_embed_dim, + dropout, + dims=dims, + use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, + ), + AttentionBlock( + ch, + use_checkpoint=use_checkpoint, + num_heads=num_heads, + num_head_channels=num_head_channels, + use_new_attention_order=use_new_attention_order, + ), + ResBlock( + ch, + time_embed_dim, + dropout, + dims=dims, + use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, + ), + ) + self._feature_size += ch + self.pool = pool + if pool == "adaptive": + self.out = nn.Sequential( + normalization(ch), + nn.SiLU(), + nn.AdaptiveAvgPool2d((1, 1)), + zero_module(conv_nd(dims, ch, out_channels, 1)), + nn.Flatten(), + ) + elif pool == "attention": + assert num_head_channels != -1 + self.out = nn.Sequential( + normalization(ch), + nn.SiLU(), + AttentionPool2d( + (image_size // ds), ch, num_head_channels, out_channels + ), + ) + elif pool == "spatial": + self.out = nn.Sequential( + nn.Linear(self._feature_size, 2048), + nn.ReLU(), + nn.Linear(2048, self.out_channels), + ) + elif pool == "spatial_v2": + self.out = nn.Sequential( + nn.Linear(self._feature_size, 2048), + normalization(2048), + nn.SiLU(), + nn.Linear(2048, self.out_channels), + ) + else: + raise NotImplementedError(f"Unexpected {pool} pooling") + + def convert_to_fp16(self): + """ + Convert the torso of the model to float16. + """ + self.input_blocks.apply(convert_module_to_f16) + self.middle_block.apply(convert_module_to_f16) + + def convert_to_fp32(self): + """ + Convert the torso of the model to float32. + """ + self.input_blocks.apply(convert_module_to_f32) + self.middle_block.apply(convert_module_to_f32) + + def forward(self, x, timesteps): + """ + Apply the model to an input batch. + :param x: an [N x C x ...] Tensor of inputs. + :param timesteps: a 1-D batch of timesteps. + :return: an [N x K] Tensor of outputs. + """ + emb = self.time_embed(timestep_embedding(timesteps, self.model_channels)) + + results = [] + # h = x.type(self.dtype) + h = x + for module in self.input_blocks: + h = module(h, emb) + if self.pool.startswith("spatial"): + results.append(h.type(x.dtype).mean(dim=(2, 3))) + h = self.middle_block(h, emb) + if self.pool.startswith("spatial"): + results.append(h.type(x.dtype).mean(dim=(2, 3))) + h = th.cat(results, axis=-1) + return self.out(h) + else: + h = h.type(x.dtype) + return self.out(h) + + +if __name__ == "__main__": + + class Dummy(nn.Module): + def __init__(self, in_channels=3, model_channels=64): + super().__init__() + self.input_blocks = nn.ModuleList( + [ + TimestepEmbedSequential( + conv_nd(2, in_channels, model_channels, 3, padding=1) + ) + ] + ) + + model = UNetModel( + use_checkpoint=True, + image_size=64, + in_channels=4, + out_channels=4, + model_channels=128, + attention_resolutions=[4, 2], + num_res_blocks=2, + channel_mult=[1, 2, 4], + num_head_channels=64, + use_spatial_transformer=False, + use_linear_in_transformer=True, + transformer_depth=1, + legacy=False, + ).cuda() + x = th.randn(11, 4, 64, 64).cuda() + t = th.randint(low=0, high=10, size=(11,), device="cuda") + o = model(x, t) + print("done.") diff --git a/ComfyUI-SUPIR/sgm/modules/diffusionmodules/sampling.py b/ComfyUI-SUPIR/sgm/modules/diffusionmodules/sampling.py new file mode 100644 index 0000000000000000000000000000000000000000..9a4f9f805a23e2554f937a2499752db6ddcc1400 --- /dev/null +++ b/ComfyUI-SUPIR/sgm/modules/diffusionmodules/sampling.py @@ -0,0 +1,751 @@ +""" + Partially ported from https://github.com/crowsonkb/k-diffusion/blob/master/k_diffusion/sampling.py +""" + + +from typing import Dict, Union + +import torch +from omegaconf import ListConfig, OmegaConf +from tqdm import tqdm +from comfy.k_diffusion.sampling import BrownianTreeNoiseSampler, get_sigmas_karras +from comfy.k_diffusion.sampling import BrownianTreeNoiseSampler, get_sigmas_karras +from ...modules.diffusionmodules.sampling_utils import ( + get_ancestral_step, + linear_multistep_coeff, + to_d, + to_neg_log_sigma, + to_sigma, +) +from ...util import append_dims, default, instantiate_from_config +import copy + +DEFAULT_GUIDER = {"target": ".sgm.modules.diffusionmodules.guiders.IdentityGuider"} + +import comfy.model_management +device = comfy.model_management.get_torch_device() + +class BaseDiffusionSampler: + def __init__( + self, + discretization_config: Union[Dict, ListConfig, OmegaConf], + num_steps: Union[int, None] = None, + guider_config: Union[Dict, ListConfig, OmegaConf, None] = None, + verbose: bool = False, + device: str = "cuda", + ): + self.num_steps = num_steps + self.discretization = instantiate_from_config(discretization_config) + self.guider = instantiate_from_config( + default( + guider_config, + DEFAULT_GUIDER, + ) + ) + self.verbose = verbose + self.device = comfy.model_management.get_torch_device() + + def prepare_sampling_loop(self, x, cond, uc=None, num_steps=None): + sigmas = self.discretization( + self.num_steps if num_steps is None else num_steps, device=self.device + ) + uc = default(uc, cond) + + x *= torch.sqrt(1.0 + sigmas[0] ** 2.0) + num_sigmas = len(sigmas) + + s_in = x.new_ones([x.shape[0]]) + + return x, s_in, sigmas, num_sigmas, cond, uc + + def denoise(self, x, denoiser, sigma, cond, uc): + denoised = denoiser(*self.guider.prepare_inputs(x, sigma, cond, uc)) + denoised = self.guider(denoised, sigma) + return denoised + + def get_sigma_gen(self, num_sigmas): + sigma_generator = range(num_sigmas - 1) + if self.verbose: + print("#" * 30, " Sampling setting ", "#" * 30) + print(f"Sampler: {self.__class__.__name__}") + print(f"Discretization: {self.discretization.__class__.__name__}") + print(f"Guider: {self.guider.__class__.__name__}") + sigma_generator = tqdm( + sigma_generator, + total=num_sigmas, + desc=f"Sampling with {self.__class__.__name__} for {num_sigmas} steps", + ) + return sigma_generator + + +class SingleStepDiffusionSampler(BaseDiffusionSampler): + def sampler_step(self, sigma, next_sigma, denoiser, x, cond, uc, *args, **kwargs): + raise NotImplementedError + + def euler_step(self, x, d, dt): + return x + dt * d + + +class EDMSampler(SingleStepDiffusionSampler): + def __init__( + self, s_churn=0.0, s_tmin=0.0, s_tmax=float("inf"), s_noise=1.0, *args, **kwargs + ): + super().__init__(*args, **kwargs) + + self.s_churn = s_churn + self.s_tmin = s_tmin + self.s_tmax = s_tmax + self.s_noise = s_noise + + def sampler_step(self, sigma, next_sigma, denoiser, x, cond, uc=None, gamma=0.0): + sigma_hat = sigma * (gamma + 1.0) + if gamma > 0: + eps = torch.randn_like(x) * self.s_noise + x = x + eps * append_dims(sigma_hat**2 - sigma**2, x.ndim) ** 0.5 + + denoised = self.denoise(x, denoiser, sigma_hat, cond, uc) + # print('denoised', denoised.mean(axis=[0, 2, 3])) + d = to_d(x, sigma_hat, denoised) + dt = append_dims(next_sigma - sigma_hat, x.ndim) + + euler_step = self.euler_step(x, d, dt) + x = self.possible_correction_step( + euler_step, x, d, dt, next_sigma, denoiser, cond, uc + ) + return x + + def __call__(self, denoiser, x, cond, uc=None, num_steps=None): + x, s_in, sigmas, num_sigmas, cond, uc = self.prepare_sampling_loop( + x, cond, uc, num_steps + ) + + for i in self.get_sigma_gen(num_sigmas): + gamma = ( + min(self.s_churn / (num_sigmas - 1), 2**0.5 - 1) + if self.s_tmin <= sigmas[i] <= self.s_tmax + else 0.0 + ) + x = self.sampler_step( + s_in * sigmas[i], + s_in * sigmas[i + 1], + denoiser, + x, + cond, + uc, + gamma, + ) + + return x + + +class AncestralSampler(SingleStepDiffusionSampler): + def __init__(self, eta=1.0, s_noise=1.0, *args, **kwargs): + super().__init__(*args, **kwargs) + + self.eta = eta + self.s_noise = s_noise + self.noise_sampler = lambda x: torch.randn_like(x) + + def ancestral_euler_step(self, x, denoised, sigma, sigma_down): + d = to_d(x, sigma, denoised) + dt = append_dims(sigma_down - sigma, x.ndim) + + return self.euler_step(x, d, dt) + + def ancestral_step(self, x, sigma, next_sigma, sigma_up): + x = torch.where( + append_dims(next_sigma, x.ndim) > 0.0, + x + self.noise_sampler(x) * self.s_noise * append_dims(sigma_up, x.ndim), + x, + ) + return x + + def __call__(self, denoiser, x, cond, uc=None, num_steps=None): + x, s_in, sigmas, num_sigmas, cond, uc = self.prepare_sampling_loop( + x, cond, uc, num_steps + ) + + for i in self.get_sigma_gen(num_sigmas): + x = self.sampler_step( + s_in * sigmas[i], + s_in * sigmas[i + 1], + denoiser, + x, + cond, + uc, + ) + + return x + + +class LinearMultistepSampler(BaseDiffusionSampler): + def __init__( + self, + order=4, + *args, + **kwargs, + ): + super().__init__(*args, **kwargs) + + self.order = order + + def __call__(self, denoiser, x, cond, uc=None, num_steps=None, **kwargs): + x, s_in, sigmas, num_sigmas, cond, uc = self.prepare_sampling_loop( + x, cond, uc, num_steps + ) + + ds = [] + sigmas_cpu = sigmas.detach().cpu().numpy() + for i in self.get_sigma_gen(num_sigmas): + sigma = s_in * sigmas[i] + denoised = denoiser( + *self.guider.prepare_inputs(x, sigma, cond, uc), **kwargs + ) + denoised = self.guider(denoised, sigma) + d = to_d(x, sigma, denoised) + ds.append(d) + if len(ds) > self.order: + ds.pop(0) + cur_order = min(i + 1, self.order) + coeffs = [ + linear_multistep_coeff(cur_order, sigmas_cpu, i, j) + for j in range(cur_order) + ] + x = x + sum(coeff * d for coeff, d in zip(coeffs, reversed(ds))) + + return x + + +class EulerEDMSampler(EDMSampler): + def possible_correction_step( + self, euler_step, x, d, dt, next_sigma, denoiser, cond, uc + ): + # print("euler_step: ", euler_step.mean(axis=[0, 2, 3])) + return euler_step + + +class HeunEDMSampler(EDMSampler): + def possible_correction_step( + self, euler_step, x, d, dt, next_sigma, denoiser, cond, uc + ): + if torch.sum(next_sigma) < 1e-14: + # Save a network evaluation if all noise levels are 0 + return euler_step + else: + denoised = self.denoise(euler_step, denoiser, next_sigma, cond, uc) + d_new = to_d(euler_step, next_sigma, denoised) + d_prime = (d + d_new) / 2.0 + + # apply correction if noise level is not 0 + x = torch.where( + append_dims(next_sigma, x.ndim) > 0.0, x + d_prime * dt, euler_step + ) + return x + + +class EulerAncestralSampler(AncestralSampler): + def sampler_step(self, sigma, next_sigma, denoiser, x, cond, uc): + sigma_down, sigma_up = get_ancestral_step(sigma, next_sigma, eta=self.eta) + denoised = self.denoise(x, denoiser, sigma, cond, uc) + x = self.ancestral_euler_step(x, denoised, sigma, sigma_down) + x = self.ancestral_step(x, sigma, next_sigma, sigma_up) + + return x + + +class DPMPP2SAncestralSampler(AncestralSampler): + def get_variables(self, sigma, sigma_down): + t, t_next = [to_neg_log_sigma(s) for s in (sigma, sigma_down)] + h = t_next - t + s = t + 0.5 * h + return h, s, t, t_next + + def get_mult(self, h, s, t, t_next): + mult1 = to_sigma(s) / to_sigma(t) + mult2 = (-0.5 * h).expm1() + mult3 = to_sigma(t_next) / to_sigma(t) + mult4 = (-h).expm1() + + return mult1, mult2, mult3, mult4 + + def sampler_step(self, sigma, next_sigma, denoiser, x, cond, uc=None, **kwargs): + sigma_down, sigma_up = get_ancestral_step(sigma, next_sigma, eta=self.eta) + denoised = self.denoise(x, denoiser, sigma, cond, uc) + x_euler = self.ancestral_euler_step(x, denoised, sigma, sigma_down) + + if torch.sum(sigma_down) < 1e-14: + # Save a network evaluation if all noise levels are 0 + x = x_euler + else: + h, s, t, t_next = self.get_variables(sigma, sigma_down) + mult = [ + append_dims(mult, x.ndim) for mult in self.get_mult(h, s, t, t_next) + ] + + x2 = mult[0] * x - mult[1] * denoised + denoised2 = self.denoise(x2, denoiser, to_sigma(s), cond, uc) + x_dpmpp2s = mult[2] * x - mult[3] * denoised2 + + # apply correction if noise level is not 0 + x = torch.where(append_dims(sigma_down, x.ndim) > 0.0, x_dpmpp2s, x_euler) + + x = self.ancestral_step(x, sigma, next_sigma, sigma_up) + return x + + +class DPMPP2MSampler(BaseDiffusionSampler): + def get_variables(self, sigma, next_sigma, previous_sigma=None): + t, t_next = [to_neg_log_sigma(s) for s in (sigma, next_sigma)] + h = t_next - t + + if previous_sigma is not None: + h_last = t - to_neg_log_sigma(previous_sigma) + r = h_last / h + return h, r, t, t_next + else: + return h, None, t, t_next + + def get_mult(self, h, r, t, t_next, previous_sigma): + mult1 = to_sigma(t_next) / to_sigma(t) + mult2 = (-h).expm1() + + if previous_sigma is not None: + mult3 = 1 + 1 / (2 * r) + mult4 = 1 / (2 * r) + return mult1, mult2, mult3, mult4 + else: + return mult1, mult2 + + def sampler_step( + self, + old_denoised, + previous_sigma, + sigma, + next_sigma, + denoiser, + x, + cond, + uc=None, + ): + denoised = self.denoise(x, denoiser, sigma, cond, uc) + + h, r, t, t_next = self.get_variables(sigma, next_sigma, previous_sigma) + mult = [ + append_dims(mult, x.ndim) + for mult in self.get_mult(h, r, t, t_next, previous_sigma) + ] + + x_standard = mult[0] * x - mult[1] * denoised + if old_denoised is None or torch.sum(next_sigma) < 1e-14: + # Save a network evaluation if all noise levels are 0 or on the first step + return x_standard, denoised + else: + denoised_d = mult[2] * denoised - mult[3] * old_denoised + x_advanced = mult[0] * x - mult[1] * denoised_d + + # apply correction if noise level is not 0 and not first step + x = torch.where( + append_dims(next_sigma, x.ndim) > 0.0, x_advanced, x_standard + ) + + return x, denoised + + def __call__(self, denoiser, x, cond, uc=None, num_steps=None, **kwargs): + x, s_in, sigmas, num_sigmas, cond, uc = self.prepare_sampling_loop( + x, cond, uc, num_steps + ) + + old_denoised = None + for i in self.get_sigma_gen(num_sigmas): + x, old_denoised = self.sampler_step( + old_denoised, + None if i == 0 else s_in * sigmas[i - 1], + s_in * sigmas[i], + s_in * sigmas[i + 1], + denoiser, + x, + cond, + uc=uc, + ) + + return x + +def to_d_center(denoised, x_center, x): + b = denoised.shape[0] + v_center = (denoised - x_center).view(b, -1) + v_denoise = (x - denoised).view(b, -1) + d_center = v_center - v_denoise * (v_center * v_denoise).sum(dim=1).view(b, 1) / \ + (v_denoise * v_denoise).sum(dim=1).view(b, 1) + d_center = d_center / d_center.view(x.shape[0], -1).norm(dim=1).view(-1, 1) + return d_center.view(denoised.shape) + +import comfy.utils + +class RestoreEDMSampler(SingleStepDiffusionSampler): + def __init__( + self, s_churn=0.0, s_tmin=0.0, s_tmax=float("inf"), s_noise=1.0, restore_cfg=4.0, + restore_cfg_s_tmin=0.05, *args, **kwargs + ): + super().__init__(*args, **kwargs) + + self.s_churn = s_churn + self.s_tmin = s_tmin + self.s_tmax = s_tmax + self.s_noise = s_noise + self.restore_cfg = restore_cfg + self.restore_cfg_s_tmin = restore_cfg_s_tmin + self.sigma_max = 14.6146 + + def denoise(self, x, denoiser, sigma, cond, uc, control_scale=1.0): + denoised = denoiser(*self.guider.prepare_inputs(x, sigma, cond, uc), control_scale) + denoised = self.guider(denoised, sigma) + return denoised + + + def sampler_step(self, sigma, next_sigma, denoiser, x, cond, uc=None, gamma=0.0, x_center=None, eps_noise=None, + control_scale=1.0, use_linear_control_scale=False, control_scale_start=0.0): + sigma_hat = sigma * (gamma + 1.0) + if gamma > 0: + if eps_noise is not None: + eps = eps_noise * self.s_noise + else: + eps = torch.randn_like(x) * self.s_noise + x = x + eps * append_dims(sigma_hat**2 - sigma**2, x.ndim) ** 0.5 + + if use_linear_control_scale: + control_scale = (sigma[0].item() / self.sigma_max) * (control_scale_start - control_scale) + control_scale + + denoised = self.denoise(x, denoiser, sigma_hat, cond, uc, control_scale=control_scale) + + if (next_sigma[0] > self.restore_cfg_s_tmin) and (self.restore_cfg > 0): + d_center = (denoised - x_center) + denoised = denoised - d_center * ((sigma.view(-1, 1, 1, 1) / self.sigma_max) ** self.restore_cfg) + + d = to_d(x, sigma_hat, denoised) + dt = append_dims(next_sigma - sigma_hat, x.ndim) + x = self.euler_step(x, d, dt) + return x + + def __call__(self, denoiser, x, cond, uc=None, num_steps=None, x_center=None, control_scale=1.0, + use_linear_control_scale=False, control_scale_start=0.0): + x, s_in, sigmas, num_sigmas, cond, uc = self.prepare_sampling_loop( + x, cond, uc, num_steps + ) + pbar_comfy = comfy.utils.ProgressBar(num_sigmas) + for _idx, i in enumerate(self.get_sigma_gen(num_sigmas)): + gamma = ( + min(self.s_churn / (num_sigmas - 1), 2**0.5 - 1) + if self.s_tmin <= sigmas[i] <= self.s_tmax + else 0.0 + ) + x = self.sampler_step( + s_in * sigmas[i], + s_in * sigmas[i + 1], + denoiser, + x, + cond, + uc, + gamma, + x_center, + control_scale=control_scale, + use_linear_control_scale=use_linear_control_scale, + control_scale_start=control_scale_start, + ) + pbar_comfy.update(1) + return x + +class TiledRestoreEDMSampler(RestoreEDMSampler): + def __init__(self, tile_size=128, tile_stride=64, *args, **kwargs): + super().__init__(*args, **kwargs) + self.tile_size = tile_size + self.tile_stride = tile_stride + self.tile_weights = gaussian_weights(self.tile_size, self.tile_size, 1) + + def __call__(self, denoiser, x, cond, uc=None, num_steps=None, x_center=None, control_scale=1.0, + use_linear_control_scale=False, control_scale_start=0.0): + cond_copy = copy.deepcopy(cond) + uc_copy = copy.deepcopy(uc) + use_local_prompt = isinstance(cond_copy, list) + b, _, h, w = x.shape + latent_tiles_iterator = _sliding_windows(h, w, self.tile_size, self.tile_stride) + tile_weights = self.tile_weights.repeat(b, 1, 1, 1) + if not use_local_prompt: + LQ_latent = cond_copy['control'] + else: + assert len(cond_copy) == len(latent_tiles_iterator), "Number of local prompts should be equal to number of tiles" + LQ_latent = cond_copy[0]['control'] + clean_LQ_latent = x_center + x, s_in, sigmas, num_sigmas, cond_copy, uc_copy = self.prepare_sampling_loop( + x, cond_copy, uc_copy, num_steps + ) + pbar_comfy = comfy.utils.ProgressBar(num_sigmas) + for _idx, i in enumerate(self.get_sigma_gen(num_sigmas)): + gamma = ( + min(self.s_churn / (num_sigmas - 1), 2**0.5 - 1) + if self.s_tmin <= sigmas[i] <= self.s_tmax + else 0.0 + ) + x_next = torch.zeros_like(x) + count = torch.zeros_like(x) + eps_noise = torch.randn_like(x) + for j, (hi, hi_end, wi, wi_end) in enumerate(latent_tiles_iterator): + x_tile = x[:, :, hi:hi_end, wi:wi_end] + _eps_noise = eps_noise[:, :, hi:hi_end, wi:wi_end] + x_center_tile = clean_LQ_latent[:, :, hi:hi_end, wi:wi_end] + if use_local_prompt: + _cond = cond_copy[j] + else: + _cond = cond_copy + _cond['control'] = LQ_latent[:, :, hi:hi_end, wi:wi_end] + uc_copy['control'] = LQ_latent[:, :, hi:hi_end, wi:wi_end] + _x = self.sampler_step( + s_in * sigmas[i], + s_in * sigmas[i + 1], + denoiser, + x_tile, + _cond, + uc_copy, + gamma, + x_center_tile, + eps_noise=_eps_noise, + control_scale=control_scale, + use_linear_control_scale=use_linear_control_scale, + control_scale_start=control_scale_start, + ) + x_next[:, :, hi:hi_end, wi:wi_end] += _x * tile_weights + count[:, :, hi:hi_end, wi:wi_end] += tile_weights + x_next /= count + x = x_next + pbar_comfy.update(1) + return x + + +def gaussian_weights(tile_width, tile_height, nbatches): + """Generates a gaussian mask of weights for tile contributions""" + from numpy import pi, exp, sqrt + import numpy as np + + latent_width = tile_width + latent_height = tile_height + + var = 0.01 + midpoint = (latent_width - 1) / 2 # -1 because index goes from 0 to latent_width - 1 + x_probs = [exp(-(x - midpoint) * (x - midpoint) / (latent_width * latent_width) / (2 * var)) / sqrt(2 * pi * var) + for x in range(latent_width)] + midpoint = latent_height / 2 + y_probs = [exp(-(y - midpoint) * (y - midpoint) / (latent_height * latent_height) / (2 * var)) / sqrt(2 * pi * var) + for y in range(latent_height)] + + if comfy.model_management.is_device_mps(device): + weights = np.outer(y_probs, x_probs).astype(np.float32) + else: + weights = np.outer(y_probs, x_probs) + + return torch.tile(torch.tensor(weights, device=device), (nbatches, 4, 1, 1)) + + +def _sliding_windows(h: int, w: int, tile_size: int, tile_stride: int): + hi_list = list(range(0, h - tile_size + 1, tile_stride)) + if (h - tile_size) % tile_stride != 0: + hi_list.append(h - tile_size) + + wi_list = list(range(0, w - tile_size + 1, tile_stride)) + if (w - tile_size) % tile_stride != 0: + wi_list.append(w - tile_size) + + coords = [] + for hi in hi_list: + for wi in wi_list: + coords.append((hi, hi + tile_size, wi, wi + tile_size)) + return coords + +class RestoreDPMPP2MSampler(DPMPP2MSampler): + def __init__(self, s_churn=0.0, s_tmin=0.0, s_tmax=float("inf"), s_noise=1.0, restore_cfg=4.0, + restore_cfg_s_tmin=0.05, eta=1., *args, **kwargs): + self.s_noise = s_noise + self.eta = eta + self.restore_cfg = restore_cfg + self.restore_cfg_s_tmin = restore_cfg_s_tmin + self.sigma_max = 14.6146 + super().__init__(*args, **kwargs) + + def denoise(self, x, denoiser, sigma, cond, uc, control_scale=1.0): + denoised = denoiser(*self.guider.prepare_inputs(x, sigma, cond, uc), control_scale) + denoised = self.guider(denoised, sigma) + return denoised + + def get_mult(self, h, r, t, t_next, previous_sigma): + eta_h = self.eta * h + mult1 = to_sigma(t_next) / to_sigma(t) * (-eta_h).exp() + mult2 = (-h -eta_h).expm1() + + if previous_sigma is not None: + mult3 = 1 + 1 / (2 * r) + mult4 = 1 / (2 * r) + return mult1, mult2, mult3, mult4 + else: + return mult1, mult2 + + + def sampler_step( + self, + old_denoised, + previous_sigma, + sigma, + next_sigma, + denoiser, + x, + cond, + uc=None, + eps_noise=None, + x_center=None, + control_scale=1.0, + use_linear_control_scale=False, + control_scale_start=0.0 + ): + if use_linear_control_scale: + control_scale = (sigma[0].item() / self.sigma_max) * (control_scale_start - control_scale) + control_scale + + denoised = self.denoise(x, denoiser, sigma, cond, uc, control_scale=control_scale) + + if (next_sigma[0] > self.restore_cfg_s_tmin) and (self.restore_cfg > 0): + d_center = (denoised - x_center) + denoised = denoised - d_center * ((sigma.view(-1, 1, 1, 1) / self.sigma_max) ** self.restore_cfg) + + h, r, t, t_next = self.get_variables(sigma, next_sigma, previous_sigma) + eta_h = self.eta * h + mult = [ + append_dims(mult, x.ndim) + for mult in self.get_mult(h, r, t, t_next, previous_sigma) + ] + + x_standard = mult[0] * x - mult[1] * denoised + if old_denoised is None or torch.sum(next_sigma) < 1e-14: + # Save a network evaluation if all noise levels are 0 or on the first step + return x_standard, denoised + else: + denoised_d = mult[2] * denoised - mult[3] * old_denoised + x_advanced = mult[0] * x - mult[1] * denoised_d + + # apply correction if noise level is not 0 and not first step + x = torch.where( + append_dims(next_sigma, x.ndim) > 0.0, x_advanced, x_standard + ) + if self.eta: + x = x + eps_noise * next_sigma * (-2 * eta_h).expm1().neg().sqrt() * self.s_noise + + return x, denoised + + def __call__(self, denoiser, x, cond, uc=None, num_steps=None, x_center=None, control_scale=1.0, + use_linear_control_scale=False, control_scale_start=0.0, **kwargs): + x, s_in, sigmas, num_sigmas, cond, uc = self.prepare_sampling_loop( + x, cond, uc, num_steps + ) + sigmas_min, sigmas_max = sigmas[-2].cpu(), sigmas[0].cpu() + sigmas_new = get_sigmas_karras(self.num_steps, sigmas_min, sigmas_max, device=x.device) + sigmas = sigmas_new + + noise_sampler = BrownianTreeNoiseSampler(x, sigmas_min, sigmas_max) + + old_denoised = None + pbar_comfy = comfy.utils.ProgressBar(num_sigmas) + for i in self.get_sigma_gen(num_sigmas): + if i > 0 and torch.sum(s_in * sigmas[i + 1]) > 1e-14: + eps_noise = noise_sampler(s_in * sigmas[i], s_in * sigmas[i + 1]) + else: + eps_noise = None + x, old_denoised = self.sampler_step( + old_denoised, + None if i == 0 else s_in * sigmas[i - 1], + s_in * sigmas[i], + s_in * sigmas[i + 1], + denoiser, + x, + cond, + uc=uc, + eps_noise=eps_noise, + control_scale=control_scale, + x_center=x_center, + use_linear_control_scale=use_linear_control_scale, + control_scale_start=control_scale_start, + ) + pbar_comfy.update(1) + + return x + +class TiledRestoreDPMPP2MSampler(RestoreDPMPP2MSampler): + def __init__(self, tile_size=128, tile_stride=64, *args, **kwargs): + + super().__init__(*args, **kwargs) + self.tile_size = tile_size + self.tile_stride = tile_stride + self.tile_weights = gaussian_weights(self.tile_size, self.tile_size, 1) + + def __call__(self, denoiser, x, cond, uc=None, num_steps=None, control_scale=1.0, **kwargs): + use_local_prompt = isinstance(cond, list) + b, _, h, w = x.shape + latent_tiles_iterator = _sliding_windows(h, w, self.tile_size, self.tile_stride) + print(f"Image divided into {len(latent_tiles_iterator)} tiles") + print("Conds received: ", len(cond)) + cond_copy = copy.deepcopy(cond) + uc_copy = copy.deepcopy(uc) + tile_weights = self.tile_weights.repeat(b, 1, 1, 1) + if not use_local_prompt: + LQ_latent = cond['control'] + else: + assert len(cond_copy) == len(latent_tiles_iterator), "Number of local prompts should be equal to number of tiles" + LQ_latent = cond_copy[0]['control'] + print("LQ_latent shape: ",LQ_latent.shape) + x, s_in, sigmas, num_sigmas, cond_copy, uc_copy = self.prepare_sampling_loop( + x, cond_copy, uc_copy, num_steps + ) + sigmas_min, sigmas_max = sigmas[-2].cpu(), sigmas[0].cpu() + sigmas_new = get_sigmas_karras(self.num_steps, sigmas_min, sigmas_max, device=x.device) + sigmas = sigmas_new + + noise_sampler = BrownianTreeNoiseSampler(x, sigmas_min, sigmas_max) + + old_denoised = None + pbar_comfy = comfy.utils.ProgressBar(num_sigmas) + for _idx, i in enumerate(self.get_sigma_gen(num_sigmas)): + if i > 0 and torch.sum(s_in * sigmas[i + 1]) > 1e-14: + eps_noise = noise_sampler(s_in * sigmas[i], s_in * sigmas[i + 1]) + else: + eps_noise = torch.zeros_like(x) + x_next = torch.zeros_like(x) + old_denoised_next = torch.zeros_like(x) + count = torch.zeros_like(x) + for j, (hi, hi_end, wi, wi_end) in enumerate(latent_tiles_iterator): + x_tile = x[:, :, hi:hi_end, wi:wi_end] + _eps_noise = eps_noise[:, :, hi:hi_end, wi:wi_end] + if old_denoised is not None: + old_denoised_tile = old_denoised[:, :, hi:hi_end, wi:wi_end] + else: + old_denoised_tile = None + if use_local_prompt: + _cond = cond_copy[j] + else: + _cond = cond_copy + _cond['control'] = LQ_latent[:, :, hi:hi_end, wi:wi_end] + uc_copy['control'] = LQ_latent[:, :, hi:hi_end, wi:wi_end] + _x, _old_denoised = self.sampler_step( + old_denoised_tile, + None if i == 0 else s_in * sigmas[i - 1], + s_in * sigmas[i], + s_in * sigmas[i + 1], + denoiser, + x_tile, + _cond, + uc=uc_copy, + eps_noise=_eps_noise, + control_scale=control_scale, + ) + x_next[:, :, hi:hi_end, wi:wi_end] += _x * tile_weights + old_denoised_next[:, :, hi:hi_end, wi:wi_end] += _old_denoised * tile_weights + count[:, :, hi:hi_end, wi:wi_end] += tile_weights + old_denoised_next /= count + x_next /= count + x = x_next + old_denoised = old_denoised_next + pbar_comfy.update(1) + return x diff --git a/ComfyUI-SUPIR/sgm/modules/diffusionmodules/sampling_utils.py b/ComfyUI-SUPIR/sgm/modules/diffusionmodules/sampling_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..82bc5faa91fe12cacdf89e5c730b436801463eeb --- /dev/null +++ b/ComfyUI-SUPIR/sgm/modules/diffusionmodules/sampling_utils.py @@ -0,0 +1,48 @@ +import torch +from scipy import integrate + +from ...util import append_dims + + +class NoDynamicThresholding: + def __call__(self, uncond, cond, scale): + return uncond + scale.view(-1, 1, 1, 1) * (cond - uncond) + + +def linear_multistep_coeff(order, t, i, j, epsrel=1e-4): + if order - 1 > i: + raise ValueError(f"Order {order} too high for step {i}") + + def fn(tau): + prod = 1.0 + for k in range(order): + if j == k: + continue + prod *= (tau - t[i - k]) / (t[i - j] - t[i - k]) + return prod + + return integrate.quad(fn, t[i], t[i + 1], epsrel=epsrel)[0] + + +def get_ancestral_step(sigma_from, sigma_to, eta=1.0): + if not eta: + return sigma_to, 0.0 + sigma_up = torch.minimum( + sigma_to, + eta + * (sigma_to**2 * (sigma_from**2 - sigma_to**2) / sigma_from**2) ** 0.5, + ) + sigma_down = (sigma_to**2 - sigma_up**2) ** 0.5 + return sigma_down, sigma_up + + +def to_d(x, sigma, denoised): + return (x - denoised) / append_dims(sigma, x.ndim) + + +def to_neg_log_sigma(sigma): + return sigma.log().neg() + + +def to_sigma(neg_log_sigma): + return neg_log_sigma.neg().exp() diff --git a/ComfyUI-SUPIR/sgm/modules/diffusionmodules/sigma_sampling.py b/ComfyUI-SUPIR/sgm/modules/diffusionmodules/sigma_sampling.py new file mode 100644 index 0000000000000000000000000000000000000000..369135f8a1daa63e1141aa604b759eb0b2ab6203 --- /dev/null +++ b/ComfyUI-SUPIR/sgm/modules/diffusionmodules/sigma_sampling.py @@ -0,0 +1,40 @@ +import torch + +from ...util import default, instantiate_from_config + + +class EDMSampling: + def __init__(self, p_mean=-1.2, p_std=1.2): + self.p_mean = p_mean + self.p_std = p_std + + def __call__(self, n_samples, rand=None): + log_sigma = self.p_mean + self.p_std * default(rand, torch.randn((n_samples,))) + return log_sigma.exp() + + +class DiscreteSampling: + def __init__(self, discretization_config, num_idx, do_append_zero=False, flip=True, idx_range=None): + self.num_idx = num_idx + self.sigmas = instantiate_from_config(discretization_config)( + num_idx, do_append_zero=do_append_zero, flip=flip + ) + self.idx_range = idx_range + + def idx_to_sigma(self, idx): + # print(self.sigmas[idx]) + return self.sigmas[idx] + + def __call__(self, n_samples, rand=None): + if self.idx_range is None: + idx = default( + rand, + torch.randint(0, self.num_idx, (n_samples,)), + ) + else: + idx = default( + rand, + torch.randint(self.idx_range[0], self.idx_range[1], (n_samples,)), + ) + return self.idx_to_sigma(idx) + diff --git a/ComfyUI-SUPIR/sgm/modules/diffusionmodules/util.py b/ComfyUI-SUPIR/sgm/modules/diffusionmodules/util.py new file mode 100644 index 0000000000000000000000000000000000000000..bc357571dcc6372319ceb075cf7402bf0d737953 --- /dev/null +++ b/ComfyUI-SUPIR/sgm/modules/diffusionmodules/util.py @@ -0,0 +1,315 @@ +""" +adopted from +https://github.com/openai/improved-diffusion/blob/main/improved_diffusion/gaussian_diffusion.py +and +https://github.com/lucidrains/denoising-diffusion-pytorch/blob/7706bdfc6f527f58d33f84b7b522e61e6e3164b3/denoising_diffusion_pytorch/denoising_diffusion_pytorch.py +and +https://github.com/openai/guided-diffusion/blob/0ba878e517b276c45d1195eb29f6f5f72659a05b/guided_diffusion/nn.py + +thanks! +""" + +import math + +import torch +import torch.nn as nn +from einops import repeat + +import comfy.model_management +device = comfy.model_management.get_torch_device() +from contextlib import nullcontext + +import comfy.ops +ops = comfy.ops.manual_cast + +def make_beta_schedule( + schedule, + n_timestep, + linear_start=1e-4, + linear_end=2e-2, +): + if schedule == "linear": + betas = ( + torch.linspace( + linear_start**0.5, linear_end**0.5, n_timestep, dtype=torch.float64 + ) + ** 2 + ) + return betas.numpy() + + +def extract_into_tensor(a, t, x_shape): + b, *_ = t.shape + out = a.gather(-1, t) + return out.reshape(b, *((1,) * (len(x_shape) - 1))) + + +def mixed_checkpoint(func, inputs: dict, params, flag): + """ + Evaluate a function without caching intermediate activations, allowing for + reduced memory at the expense of extra compute in the backward pass. This differs from the original checkpoint function + borrowed from https://github.com/openai/guided-diffusion/blob/0ba878e517b276c45d1195eb29f6f5f72659a05b/guided_diffusion/nn.py in that + it also works with non-tensor inputs + :param func: the function to evaluate. + :param inputs: the argument dictionary to pass to `func`. + :param params: a sequence of parameters `func` depends on but does not + explicitly take as arguments. + :param flag: if False, disable gradient checkpointing. + """ + if flag: + tensor_keys = [key for key in inputs if isinstance(inputs[key], torch.Tensor)] + tensor_inputs = [ + inputs[key] for key in inputs if isinstance(inputs[key], torch.Tensor) + ] + non_tensor_keys = [ + key for key in inputs if not isinstance(inputs[key], torch.Tensor) + ] + non_tensor_inputs = [ + inputs[key] for key in inputs if not isinstance(inputs[key], torch.Tensor) + ] + args = tuple(tensor_inputs) + tuple(non_tensor_inputs) + tuple(params) + return MixedCheckpointFunction.apply( + func, + len(tensor_inputs), + len(non_tensor_inputs), + tensor_keys, + non_tensor_keys, + *args, + ) + else: + return func(**inputs) + + +class MixedCheckpointFunction(torch.autograd.Function): + @staticmethod + def forward( + ctx, + run_function, + length_tensors, + length_non_tensors, + tensor_keys, + non_tensor_keys, + *args, + ): + ctx.end_tensors = length_tensors + ctx.end_non_tensors = length_tensors + length_non_tensors + ctx.gpu_autocast_kwargs = { + "enabled": torch.is_autocast_enabled(), + "dtype": torch.get_autocast_gpu_dtype(), + "cache_enabled": torch.is_autocast_cache_enabled(), + } + assert ( + len(tensor_keys) == length_tensors + and len(non_tensor_keys) == length_non_tensors + ) + + ctx.input_tensors = { + key: val for (key, val) in zip(tensor_keys, list(args[: ctx.end_tensors])) + } + ctx.input_non_tensors = { + key: val + for (key, val) in zip( + non_tensor_keys, list(args[ctx.end_tensors : ctx.end_non_tensors]) + ) + } + ctx.run_function = run_function + ctx.input_params = list(args[ctx.end_non_tensors :]) + + with torch.no_grad(): + output_tensors = ctx.run_function( + **ctx.input_tensors, **ctx.input_non_tensors + ) + return output_tensors + + @staticmethod + def backward(ctx, *output_grads): + # additional_args = {key: ctx.input_tensors[key] for key in ctx.input_tensors if not isinstance(ctx.input_tensors[key],torch.Tensor)} + ctx.input_tensors = { + key: ctx.input_tensors[key].detach().requires_grad_(True) + for key in ctx.input_tensors + } + + with torch.enable_grad(), torch.cuda.amp.autocast(**ctx.gpu_autocast_kwargs): + # Fixes a bug where the first op in run_function modifies the + # Tensor storage in place, which is not allowed for detach()'d + # Tensors. + shallow_copies = { + key: ctx.input_tensors[key].view_as(ctx.input_tensors[key]) + for key in ctx.input_tensors + } + # shallow_copies.update(additional_args) + output_tensors = ctx.run_function(**shallow_copies, **ctx.input_non_tensors) + input_grads = torch.autograd.grad( + output_tensors, + list(ctx.input_tensors.values()) + ctx.input_params, + output_grads, + allow_unused=True, + ) + del ctx.input_tensors + del ctx.input_params + del output_tensors + return ( + (None, None, None, None, None) + + input_grads[: ctx.end_tensors] + + (None,) * (ctx.end_non_tensors - ctx.end_tensors) + + input_grads[ctx.end_tensors :] + ) + + +def checkpoint(func, inputs, params, flag): + """ + Evaluate a function without caching intermediate activations, allowing for + reduced memory at the expense of extra compute in the backward pass. + :param func: the function to evaluate. + :param inputs: the argument sequence to pass to `func`. + :param params: a sequence of parameters `func` depends on but does not + explicitly take as arguments. + :param flag: if False, disable gradient checkpointing. + """ + if flag: + args = tuple(inputs) + tuple(params) + return CheckpointFunction.apply(func, len(inputs), *args) + else: + return func(*inputs) + + +class CheckpointFunction(torch.autograd.Function): + @staticmethod + def forward(ctx, run_function, length, *args): + ctx.run_function = run_function + ctx.input_tensors = list(args[:length]) + ctx.input_params = list(args[length:]) + ctx.gpu_autocast_kwargs = { + "enabled": torch.is_autocast_enabled(), + "dtype": torch.get_autocast_gpu_dtype(), + "cache_enabled": torch.is_autocast_cache_enabled(), + } + with torch.no_grad(): + output_tensors = ctx.run_function(*ctx.input_tensors) + return output_tensors + + @staticmethod + def backward(ctx, *output_grads): + ctx.input_tensors = [x.detach().requires_grad_(True) for x in ctx.input_tensors] + autocast_condition = (ctx.input_tensors.dtype == torch.float16 or ctx.input_tensors.dtype == torch.bfloat16) and not comfy.model_management.is_device_mps(device) + with torch.autocast(comfy.model_management.get_autocast_device(device), dtype=ctx.input_tensors.dtype) if autocast_condition else nullcontext(): + # Fixes a bug where the first op in run_function modifies the + # Tensor storage in place, which is not allowed for detach()'d + # Tensors. + shallow_copies = [x.view_as(x) for x in ctx.input_tensors] + output_tensors = ctx.run_function(*shallow_copies) + input_grads = torch.autograd.grad( + output_tensors, + ctx.input_tensors + ctx.input_params, + output_grads, + allow_unused=True, + ) + del ctx.input_tensors + del ctx.input_params + del output_tensors + return (None, None) + input_grads + + +def timestep_embedding(timesteps, dim, max_period=10000, repeat_only=False): + """ + Create sinusoidal timestep embeddings. + :param timesteps: a 1-D Tensor of N indices, one per batch element. + These may be fractional. + :param dim: the dimension of the output. + :param max_period: controls the minimum frequency of the embeddings. + :return: an [N x dim] Tensor of positional embeddings. + """ + if not repeat_only: + half = dim // 2 + freqs = torch.exp( + -math.log(max_period) + * torch.arange(start=0, end=half, dtype=torch.float32) + / half + ).to(device=timesteps.device) + args = timesteps[:, None].float() * freqs[None] + embedding = torch.cat([torch.cos(args), torch.sin(args)], dim=-1) + if dim % 2: + embedding = torch.cat( + [embedding, torch.zeros_like(embedding[:, :1])], dim=-1 + ) + else: + embedding = repeat(timesteps, "b -> b d", d=dim) + return embedding + + +def zero_module(module): + """ + Zero out the parameters of a module and return it. + """ + for p in module.parameters(): + p.detach().zero_() + return module + + +def scale_module(module, scale): + """ + Scale the parameters of a module and return it. + """ + for p in module.parameters(): + p.detach().mul_(scale) + return module + + +def mean_flat(tensor): + """ + Take the mean over all non-batch dimensions. + """ + return tensor.mean(dim=list(range(1, len(tensor.shape)))) + + +def normalization(channels): + """ + Make a standard normalization layer. + :param channels: number of input channels. + :return: an nn.Module for normalization. + """ + return GroupNorm32(32, channels) + + +# PyTorch 1.7 has SiLU, but we support PyTorch 1.5. +class SiLU(nn.Module): + def forward(self, x): + return x * torch.sigmoid(x) + + +class GroupNorm32(nn.GroupNorm): + def forward(self, x): + # return super().forward(x.float()).type(x.dtype) + return super().forward(x) + +def conv_nd(dims, *args, **kwargs): + """ + Create a 1D, 2D, or 3D convolution module. + """ + if dims == 1: + return ops.Conv1d(*args, **kwargs) + elif dims == 2: + return ops.Conv2d(*args, **kwargs) + elif dims == 3: + return ops.Conv3d(*args, **kwargs) + raise ValueError(f"unsupported dimensions: {dims}") + + +def linear(*args, **kwargs): + """ + Create a linear module. + """ + return ops.Linear(*args, **kwargs) + + +def avg_pool_nd(dims, *args, **kwargs): + """ + Create a 1D, 2D, or 3D average pooling module. + """ + if dims == 1: + return nn.AvgPool1d(*args, **kwargs) + elif dims == 2: + return nn.AvgPool2d(*args, **kwargs) + elif dims == 3: + return nn.AvgPool3d(*args, **kwargs) + raise ValueError(f"unsupported dimensions: {dims}") diff --git a/ComfyUI-SUPIR/sgm/modules/diffusionmodules/wrappers.py b/ComfyUI-SUPIR/sgm/modules/diffusionmodules/wrappers.py new file mode 100644 index 0000000000000000000000000000000000000000..a899e8e92bd335851b2aa4fa67860cfc5edc2940 --- /dev/null +++ b/ComfyUI-SUPIR/sgm/modules/diffusionmodules/wrappers.py @@ -0,0 +1,106 @@ +import torch +import torch.nn as nn +from packaging import version +# import torch._dynamo +# torch._dynamo.config.suppress_errors = True +# torch._dynamo.config.cache_size_limit = 512 + +OPENAIUNETWRAPPER = ".sgm.modules.diffusionmodules.wrappers.OpenAIWrapper" +import comfy.model_management +from contextlib import nullcontext +device = comfy.model_management.get_torch_device() + +class IdentityWrapper(nn.Module): + def __init__(self, diffusion_model, compile_model: bool = False): + super().__init__() + compile = ( + torch.compile + if (version.parse(torch.__version__) >= version.parse("2.0.0")) + and compile_model + else lambda x: x + ) + self.diffusion_model = compile(diffusion_model) + + def forward(self, *args, **kwargs): + return self.diffusion_model(*args, **kwargs) + + +class OpenAIWrapper(IdentityWrapper): + def forward( + self, x: torch.Tensor, t: torch.Tensor, c: dict, **kwargs + ) -> torch.Tensor: + x = torch.cat((x, c.get("concat", torch.Tensor([]).type_as(x))), dim=1) + return self.diffusion_model( + x, + timesteps=t, + context=c.get("crossattn", None), + y=c.get("vector", None), + **kwargs, + ) + + +class OpenAIHalfWrapper(IdentityWrapper): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.diffusion_model = self.diffusion_model.half() + + def forward( + self, x: torch.Tensor, t: torch.Tensor, c: dict, **kwargs + ) -> torch.Tensor: + x = torch.cat((x, c.get("concat", torch.Tensor([]).type_as(x))), dim=1) + _context = c.get("crossattn", None) + _y = c.get("vector", None) + if _context is not None: + _context = _context.half() + if _y is not None: + _y = _y.half() + x = x.half() + t = t.half() + + out = self.diffusion_model( + x, + timesteps=t, + context=_context, + y=_y, + **kwargs, + ) + return out.float() + + +class ControlWrapper(nn.Module): + def __init__(self, diffusion_model, compile_model: bool = False, dtype=torch.float32): + super().__init__() + self.compile = ( + torch.compile + if (version.parse(torch.__version__) >= version.parse("2.0.0")) + and compile_model + else lambda x: x + ) + self.diffusion_model = self.compile(diffusion_model) + self.control_model = None + self.dtype = dtype + + def load_control_model(self, control_model): + self.control_model = self.compile(control_model) + + def forward( + self, x: torch.Tensor, t: torch.Tensor, c: dict, control_scale=1, **kwargs + ) -> torch.Tensor: + autocast_condition = (self.dtype == torch.float16 or self.dtype == torch.bfloat16) and not comfy.model_management.is_device_mps(device) + with torch.autocast(comfy.model_management.get_autocast_device(device), dtype=self.dtype) if autocast_condition else nullcontext(): + control = self.control_model(x=c.get("control", None), timesteps=t, xt=x, + control_vector=c.get("control_vector", None), + mask_x=c.get("mask_x", None), + context=c.get("crossattn", None), + y=c.get("vector", None)) + out = self.diffusion_model( + x, + timesteps=t, + context=c.get("crossattn", None), + y=c.get("vector", None), + control=control, + control_scale=control_scale, + **kwargs, + ) + return out.float() + diff --git a/ComfyUI-SUPIR/sgm/modules/distributions/__init__.py b/ComfyUI-SUPIR/sgm/modules/distributions/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/ComfyUI-SUPIR/sgm/modules/distributions/distributions.py b/ComfyUI-SUPIR/sgm/modules/distributions/distributions.py new file mode 100644 index 0000000000000000000000000000000000000000..016be35523187ea366db9ade391fe8ee276db60b --- /dev/null +++ b/ComfyUI-SUPIR/sgm/modules/distributions/distributions.py @@ -0,0 +1,102 @@ +import numpy as np +import torch + + +class AbstractDistribution: + def sample(self): + raise NotImplementedError() + + def mode(self): + raise NotImplementedError() + + +class DiracDistribution(AbstractDistribution): + def __init__(self, value): + self.value = value + + def sample(self): + return self.value + + def mode(self): + return self.value + + +class DiagonalGaussianDistribution(object): + def __init__(self, parameters, deterministic=False): + self.parameters = parameters + self.mean, self.logvar = torch.chunk(parameters, 2, dim=1) + self.logvar = torch.clamp(self.logvar, -30.0, 20.0) + self.deterministic = deterministic + self.std = torch.exp(0.5 * self.logvar) + self.var = torch.exp(self.logvar) + if self.deterministic: + self.var = self.std = torch.zeros_like(self.mean).to( + device=self.parameters.device + ) + + def sample(self): + x = self.mean + self.std * torch.randn(self.mean.shape).to( + device=self.parameters.device + ) + return x + + def kl(self, other=None): + if self.deterministic: + return torch.Tensor([0.0]) + else: + if other is None: + return 0.5 * torch.sum( + torch.pow(self.mean, 2) + self.var - 1.0 - self.logvar, + dim=[1, 2, 3], + ) + else: + return 0.5 * torch.sum( + torch.pow(self.mean - other.mean, 2) / other.var + + self.var / other.var + - 1.0 + - self.logvar + + other.logvar, + dim=[1, 2, 3], + ) + + def nll(self, sample, dims=[1, 2, 3]): + if self.deterministic: + return torch.Tensor([0.0]) + logtwopi = np.log(2.0 * np.pi) + return 0.5 * torch.sum( + logtwopi + self.logvar + torch.pow(sample - self.mean, 2) / self.var, + dim=dims, + ) + + def mode(self): + return self.mean + + +def normal_kl(mean1, logvar1, mean2, logvar2): + """ + source: https://github.com/openai/guided-diffusion/blob/27c20a8fab9cb472df5d6bdd6c8d11c8f430b924/guided_diffusion/losses.py#L12 + Compute the KL divergence between two gaussians. + Shapes are automatically broadcasted, so batches can be compared to + scalars, among other use cases. + """ + tensor = None + for obj in (mean1, logvar1, mean2, logvar2): + if isinstance(obj, torch.Tensor): + tensor = obj + break + assert tensor is not None, "at least one argument must be a Tensor" + + # Force variances to be Tensors. Broadcasting helps convert scalars to + # Tensors, but it does not work for torch.exp(). + logvar1, logvar2 = [ + x if isinstance(x, torch.Tensor) else torch.tensor(x).to(tensor) + for x in (logvar1, logvar2) + ] + + return 0.5 * ( + -1.0 + + logvar2 + - logvar1 + + torch.exp(logvar1 - logvar2) + + ((mean1 - mean2) ** 2) * torch.exp(-logvar2) + ) diff --git a/ComfyUI-SUPIR/sgm/modules/ema.py b/ComfyUI-SUPIR/sgm/modules/ema.py new file mode 100644 index 0000000000000000000000000000000000000000..97b5ae2b230f89b4dba57e44c4f851478ad86f68 --- /dev/null +++ b/ComfyUI-SUPIR/sgm/modules/ema.py @@ -0,0 +1,86 @@ +import torch +from torch import nn + + +class LitEma(nn.Module): + def __init__(self, model, decay=0.9999, use_num_upates=True): + super().__init__() + if decay < 0.0 or decay > 1.0: + raise ValueError("Decay must be between 0 and 1") + + self.m_name2s_name = {} + self.register_buffer("decay", torch.tensor(decay, dtype=torch.float32)) + self.register_buffer( + "num_updates", + torch.tensor(0, dtype=torch.int) + if use_num_upates + else torch.tensor(-1, dtype=torch.int), + ) + + for name, p in model.named_parameters(): + if p.requires_grad: + # remove as '.'-character is not allowed in buffers + s_name = name.replace(".", "") + self.m_name2s_name.update({name: s_name}) + self.register_buffer(s_name, p.clone().detach().data) + + self.collected_params = [] + + def reset_num_updates(self): + del self.num_updates + self.register_buffer("num_updates", torch.tensor(0, dtype=torch.int)) + + def forward(self, model): + decay = self.decay + + if self.num_updates >= 0: + self.num_updates += 1 + decay = min(self.decay, (1 + self.num_updates) / (10 + self.num_updates)) + + one_minus_decay = 1.0 - decay + + with torch.no_grad(): + m_param = dict(model.named_parameters()) + shadow_params = dict(self.named_buffers()) + + for key in m_param: + if m_param[key].requires_grad: + sname = self.m_name2s_name[key] + shadow_params[sname] = shadow_params[sname].type_as(m_param[key]) + shadow_params[sname].sub_( + one_minus_decay * (shadow_params[sname] - m_param[key]) + ) + else: + assert not key in self.m_name2s_name + + def copy_to(self, model): + m_param = dict(model.named_parameters()) + shadow_params = dict(self.named_buffers()) + for key in m_param: + if m_param[key].requires_grad: + m_param[key].data.copy_(shadow_params[self.m_name2s_name[key]].data) + else: + assert not key in self.m_name2s_name + + def store(self, parameters): + """ + Save the current parameters for restoring later. + Args: + parameters: Iterable of `torch.nn.Parameter`; the parameters to be + temporarily stored. + """ + self.collected_params = [param.clone() for param in parameters] + + def restore(self, parameters): + """ + Restore the parameters stored with the `store` method. + Useful to validate the model with EMA parameters without affecting the + original optimization process. Store the parameters before the + `copy_to` method. After validation (or model saving), use this to + restore the former parameters. + Args: + parameters: Iterable of `torch.nn.Parameter`; the parameters to be + updated with the stored parameters. + """ + for c_param, param in zip(self.collected_params, parameters): + param.data.copy_(c_param.data) diff --git a/ComfyUI-SUPIR/sgm/modules/encoders/__init__.py b/ComfyUI-SUPIR/sgm/modules/encoders/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/ComfyUI-SUPIR/sgm/modules/encoders/modules.py b/ComfyUI-SUPIR/sgm/modules/encoders/modules.py new file mode 100644 index 0000000000000000000000000000000000000000..a17e8e93e9549c5f48d3b9d0fe993ab131dc8011 --- /dev/null +++ b/ComfyUI-SUPIR/sgm/modules/encoders/modules.py @@ -0,0 +1,1092 @@ +from contextlib import nullcontext +from functools import partial +from typing import Dict, List, Optional, Tuple, Union + +#import kornia +import numpy as np +import open_clip +import torch +import torch.nn as nn +from einops import rearrange, repeat +from omegaconf import ListConfig +from torch.utils.checkpoint import checkpoint +from transformers import ( + ByT5Tokenizer, + CLIPTextModel, + CLIPTokenizer, + T5EncoderModel, + T5Tokenizer, +) + +from ...modules.autoencoding.regularizers import DiagonalGaussianRegularizer +from ...modules.diffusionmodules.model import Encoder +from ...modules.diffusionmodules.openaimodel import Timestep +from ...modules.diffusionmodules.util import extract_into_tensor, make_beta_schedule +from ...modules.distributions.distributions import DiagonalGaussianDistribution +from ...util import ( + autocast, + count_params, + default, + disabled_train, + expand_dims_like, + instantiate_from_config, +) + +from ....CKPT_PTH import SDXL_CLIP1_PATH, SDXL_CLIP2_CKPT_PTH +import comfy.model_management +device = comfy.model_management.get_torch_device() + +import comfy.ops +ops = comfy.ops.manual_cast + +class AbstractEmbModel(nn.Module): + def __init__(self): + super().__init__() + self._is_trainable = None + self._ucg_rate = None + self._input_key = None + + @property + def is_trainable(self) -> bool: + return self._is_trainable + + @property + def ucg_rate(self) -> Union[float, torch.Tensor]: + return self._ucg_rate + + @property + def input_key(self) -> str: + return self._input_key + + @is_trainable.setter + def is_trainable(self, value: bool): + self._is_trainable = value + + @ucg_rate.setter + def ucg_rate(self, value: Union[float, torch.Tensor]): + self._ucg_rate = value + + @input_key.setter + def input_key(self, value: str): + self._input_key = value + + @is_trainable.deleter + def is_trainable(self): + del self._is_trainable + + @ucg_rate.deleter + def ucg_rate(self): + del self._ucg_rate + + @input_key.deleter + def input_key(self): + del self._input_key + + +class GeneralConditioner(nn.Module): + OUTPUT_DIM2KEYS = {2: "vector", 3: "crossattn", 4: "concat", 5: "concat"} + KEY2CATDIM = {"vector": 1, "crossattn": 2, "concat": 1, 'control_vector': 1} + + def __init__(self, emb_models: Union[List, ListConfig]): + super().__init__() + embedders = [] + for n, embconfig in enumerate(emb_models): + embedder = instantiate_from_config(embconfig) + assert isinstance( + embedder, AbstractEmbModel + ), f"embedder model {embedder.__class__.__name__} has to inherit from AbstractEmbModel" + embedder.is_trainable = embconfig.get("is_trainable", False) + embedder.ucg_rate = embconfig.get("ucg_rate", 0.0) + if not embedder.is_trainable: + embedder.train = disabled_train + for param in embedder.parameters(): + param.requires_grad = False + embedder.eval() + # print( + # f"Initialized embedder #{n}: {embedder.__class__.__name__} " + # f"with {count_params(embedder, False)} params. Trainable: {embedder.is_trainable}" + # ) + + if "input_key" in embconfig: + embedder.input_key = embconfig["input_key"] + elif "input_keys" in embconfig: + embedder.input_keys = embconfig["input_keys"] + else: + raise KeyError( + f"need either 'input_key' or 'input_keys' for embedder {embedder.__class__.__name__}" + ) + + embedder.legacy_ucg_val = embconfig.get("legacy_ucg_value", None) + if embedder.legacy_ucg_val is not None: + embedder.ucg_prng = np.random.RandomState() + + embedders.append(embedder) + self.embedders = nn.ModuleList(embedders) + + def possibly_get_ucg_val(self, embedder: AbstractEmbModel, batch: Dict) -> Dict: + assert embedder.legacy_ucg_val is not None + p = embedder.ucg_rate + val = embedder.legacy_ucg_val + for i in range(len(batch[embedder.input_key])): + if embedder.ucg_prng.choice(2, p=[1 - p, p]): + batch[embedder.input_key][i] = val + return batch + + def forward( + self, batch: Dict, force_zero_embeddings: Optional[List] = None + ) -> Dict: + output = dict() + if force_zero_embeddings is None: + force_zero_embeddings = [] + for embedder in self.embedders: + embedding_context = nullcontext if embedder.is_trainable else torch.no_grad + with embedding_context(): + if hasattr(embedder, "input_key") and (embedder.input_key is not None): + if embedder.legacy_ucg_val is not None: + batch = self.possibly_get_ucg_val(embedder, batch) + emb_out = embedder(batch[embedder.input_key]) + elif hasattr(embedder, "input_keys"): + emb_out = embedder(*[batch[k] for k in embedder.input_keys]) + assert isinstance( + emb_out, (torch.Tensor, list, tuple) + ), f"encoder outputs must be tensors or a sequence, but got {type(emb_out)}" + if not isinstance(emb_out, (list, tuple)): + emb_out = [emb_out] + for emb in emb_out: + out_key = self.OUTPUT_DIM2KEYS[emb.dim()] + if embedder.ucg_rate > 0.0 and embedder.legacy_ucg_val is None: + emb = ( + expand_dims_like( + torch.bernoulli( + (1.0 - embedder.ucg_rate) + * torch.ones(emb.shape[0], device=emb.device) + ), + emb, + ) + * emb + ) + if ( + hasattr(embedder, "input_key") + and embedder.input_key in force_zero_embeddings + ): + emb = torch.zeros_like(emb) + if out_key in output: + output[out_key] = torch.cat( + (output[out_key], emb), self.KEY2CATDIM[out_key] + ) + else: + output[out_key] = emb + return output + + def get_unconditional_conditioning( + self, batch_c, batch_uc=None, force_uc_zero_embeddings=None + ): + if force_uc_zero_embeddings is None: + force_uc_zero_embeddings = [] + ucg_rates = list() + for embedder in self.embedders: + ucg_rates.append(embedder.ucg_rate) + embedder.ucg_rate = 0.0 + c = self(batch_c) + uc = self(batch_c if batch_uc is None else batch_uc, force_uc_zero_embeddings) + + for embedder, rate in zip(self.embedders, ucg_rates): + embedder.ucg_rate = rate + return c, uc + + +class GeneralConditionerWithControl(GeneralConditioner): + def forward( + self, batch: Dict, force_zero_embeddings: Optional[List] = None + ) -> Dict: + output = dict() + if force_zero_embeddings is None: + force_zero_embeddings = [] + for embedder in self.embedders: + embedding_context = nullcontext if embedder.is_trainable else torch.no_grad + with embedding_context(): + if hasattr(embedder, "input_key") and (embedder.input_key is not None): + if embedder.legacy_ucg_val is not None: + batch = self.possibly_get_ucg_val(embedder, batch) + emb_out = embedder(batch[embedder.input_key]) + elif hasattr(embedder, "input_keys"): + emb_out = embedder(*[batch[k] for k in embedder.input_keys]) + assert isinstance( + emb_out, (torch.Tensor, list, tuple) + ), f"encoder outputs must be tensors or a sequence, but got {type(emb_out)}" + if not isinstance(emb_out, (list, tuple)): + emb_out = [emb_out] + for emb in emb_out: + if 'control_vector' in embedder.input_key: + out_key = 'control_vector' + else: + out_key = self.OUTPUT_DIM2KEYS[emb.dim()] + if embedder.ucg_rate > 0.0 and embedder.legacy_ucg_val is None: + emb = ( + expand_dims_like( + torch.bernoulli( + (1.0 - embedder.ucg_rate) + * torch.ones(emb.shape[0], device=emb.device) + ), + emb, + ) + * emb + ) + if ( + hasattr(embedder, "input_key") + and embedder.input_key in force_zero_embeddings + ): + emb = torch.zeros_like(emb) + if out_key in output: + output[out_key] = torch.cat( + (output[out_key], emb), self.KEY2CATDIM[out_key] + ) + else: + output[out_key] = emb + + output["control"] = batch["control"] + return output + + +class PreparedConditioner(nn.Module): + def __init__(self, cond_pth, un_cond_pth=None): + super().__init__() + conditions = torch.load(cond_pth) + for k, v in conditions.items(): + self.register_buffer(k, v) + self.un_cond_pth = un_cond_pth + if un_cond_pth is not None: + un_conditions = torch.load(un_cond_pth) + for k, v in un_conditions.items(): + self.register_buffer(k+'_uc', v) + + + @torch.no_grad() + def forward( + self, batch: Dict, return_uc=False + ) -> Dict: + output = dict() + for k, v in self.state_dict().items(): + if not return_uc: + if k.endswith("_uc"): + continue + else: + output[k] = v.detach().clone().repeat(batch['control'].shape[0], *[1 for _ in range(v.ndim - 1)]) + else: + if k.endswith("_uc"): + output[k[:-3]] = v.detach().clone().repeat(batch['control'].shape[0], *[1 for _ in range(v.ndim - 1)]) + else: + continue + output["control"] = batch["control"] + + for k, v in output.items(): + if isinstance(v, torch.Tensor): + assert (torch.isnan(v).any()) is not None + return output + + def get_unconditional_conditioning( + self, batch_c, batch_uc=None, force_uc_zero_embeddings=None + ): + c = self(batch_c) + if self.un_cond_pth is not None: + uc = self(batch_c, return_uc=True) + else: + uc = None + return c, uc + + + +class InceptionV3(nn.Module): + """Wrapper around the https://github.com/mseitzer/pytorch-fid inception + port with an additional squeeze at the end""" + + def __init__(self, normalize_input=False, **kwargs): + super().__init__() + from pytorch_fid import inception + + kwargs["resize_input"] = True + self.model = inception.InceptionV3(normalize_input=normalize_input, **kwargs) + + def forward(self, inp): + # inp = kornia.geometry.resize(inp, (299, 299), + # interpolation='bicubic', + # align_corners=False, + # antialias=True) + # inp = inp.clamp(min=-1, max=1) + + outp = self.model(inp) + + if len(outp) == 1: + return outp[0].squeeze() + + return outp + + +class IdentityEncoder(AbstractEmbModel): + def encode(self, x): + return x + + def forward(self, x): + return x + + +class ClassEmbedder(AbstractEmbModel): + def __init__(self, embed_dim, n_classes=1000, add_sequence_dim=False): + super().__init__() + self.embedding = nn.Embedding(n_classes, embed_dim) + self.n_classes = n_classes + self.add_sequence_dim = add_sequence_dim + + def forward(self, c): + c = self.embedding(c) + if self.add_sequence_dim: + c = c[:, None, :] + return c + + def get_unconditional_conditioning(self, bs, device=device): + uc_class = ( + self.n_classes - 1 + ) # 1000 classes --> 0 ... 999, one extra class for ucg (class 1000) + uc = torch.ones((bs,), device=device) * uc_class + uc = {self.key: uc.long()} + return uc + + +class ClassEmbedderForMultiCond(ClassEmbedder): + def forward(self, batch, key=None, disable_dropout=False): + out = batch + key = default(key, self.key) + islist = isinstance(batch[key], list) + if islist: + batch[key] = batch[key][0] + c_out = super().forward(batch, key, disable_dropout) + out[key] = [c_out] if islist else c_out + return out + + +class FrozenT5Embedder(AbstractEmbModel): + """Uses the T5 transformer encoder for text""" + + def __init__( + self, version="google/t5-v1_1-xxl", device=device, max_length=77, freeze=True + ): # others are google/t5-v1_1-xl and google/t5-v1_1-xxl + super().__init__() + self.tokenizer = T5Tokenizer.from_pretrained(version) + self.transformer = T5EncoderModel.from_pretrained(version) + self.device = device + self.max_length = max_length + if freeze: + self.freeze() + + def freeze(self): + self.transformer = self.transformer.eval() + + for param in self.parameters(): + param.requires_grad = False + + # @autocast + def forward(self, text): + batch_encoding = self.tokenizer( + text, + truncation=True, + max_length=self.max_length, + return_length=True, + return_overflowing_tokens=False, + padding="max_length", + return_tensors="pt", + ) + tokens = batch_encoding["input_ids"].to(self.device) + with torch.autocast(device, enabled=False): + outputs = self.transformer(input_ids=tokens) + z = outputs.last_hidden_state + return z + + def encode(self, text): + return self(text) + + +class FrozenByT5Embedder(AbstractEmbModel): + """ + Uses the ByT5 transformer encoder for text. Is character-aware. + """ + + def __init__( + self, version="google/byt5-base", device=device, max_length=77, freeze=True + ): # others are google/t5-v1_1-xl and google/t5-v1_1-xxl + super().__init__() + self.tokenizer = ByT5Tokenizer.from_pretrained(version) + self.transformer = T5EncoderModel.from_pretrained(version) + self.device = device + self.max_length = max_length + if freeze: + self.freeze() + + def freeze(self): + self.transformer = self.transformer.eval() + + for param in self.parameters(): + param.requires_grad = False + + def forward(self, text): + batch_encoding = self.tokenizer( + text, + truncation=True, + max_length=self.max_length, + return_length=True, + return_overflowing_tokens=False, + padding="max_length", + return_tensors="pt", + ) + tokens = batch_encoding["input_ids"].to(self.device) + with torch.autocast(device, enabled=False): + outputs = self.transformer(input_ids=tokens) + z = outputs.last_hidden_state + return z + + def encode(self, text): + return self(text) + + +class FrozenCLIPEmbedder(AbstractEmbModel): + """Uses the CLIP transformer encoder for text (from huggingface)""" + + LAYERS = ["last", "pooled", "hidden"] + + def __init__( + self, + version="openai/clip-vit-large-patch14", + device=device, + max_length=77, + freeze=True, + layer="last", + layer_idx=None, + always_return_pooled=False, + ): # clip-vit-base-patch32 + super().__init__() + assert layer in self.LAYERS + #self.tokenizer = CLIPTokenizer.from_pretrained(version if SDXL_CLIP1_PATH is None else SDXL_CLIP1_PATH) + #self.transformer = CLIPTextModel.from_pretrained(version if SDXL_CLIP1_PATH is None else SDXL_CLIP1_PATH) + #self.clip_text_config = CLIPTextConfig.from_pretrained("openai/clip-vit-large-patch14") + #self.tokenizer = CLIPTokenizer.from_pretrained(version) + self.tokenizer = None + #self.transformer = CLIPTextModel(self.clip_text_config) + self.transformer = None + self.device = device + self.max_length = max_length + #if freeze: + # self.freeze() + self.layer = layer + self.layer_idx = layer_idx + self.return_pooled = always_return_pooled + if layer == "hidden": + assert layer_idx is not None + assert 0 <= abs(layer_idx) <= 12 + + def freeze(self): + self.transformer = self.transformer.eval() + + for param in self.parameters(): + param.requires_grad = False + + #@autocast + def forward(self, text): + batch_encoding = self.tokenizer( + text, + truncation=True, + max_length=self.max_length, + return_length=True, + return_overflowing_tokens=False, + padding="max_length", + return_tensors="pt", + ) + tokens = batch_encoding["input_ids"].to(self.device) + outputs = self.transformer( + input_ids=tokens, output_hidden_states=self.layer == "hidden" + ) + if self.layer == "last": + z = outputs.last_hidden_state + elif self.layer == "pooled": + z = outputs.pooler_output[:, None, :] + else: + z = outputs.hidden_states[self.layer_idx] + if self.return_pooled: + return z, outputs.pooler_output + return z + + def encode(self, text): + return self(text) + + +class FrozenOpenCLIPEmbedder2(AbstractEmbModel): + """ + Uses the OpenCLIP transformer encoder for text + """ + + LAYERS = ["pooled", "last", "penultimate"] + + def __init__( + self, + arch="ViT-H-14", + version="laion2b_s32b_b79k", + device=device, + max_length=77, + freeze=True, + layer="last", + always_return_pooled=False, + legacy=True, + ): + super().__init__() + assert layer in self.LAYERS + # model, _, _ = open_clip.create_model_and_transforms( + # arch, + # device=torch.device("cpu"), + # pretrained=version if SDXL_CLIP2_CKPT_PTH is None else SDXL_CLIP2_CKPT_PTH, + # ) + + # del model.visual + # self.model = model + self.model = None + self.device = device + self.max_length = max_length + self.return_pooled = always_return_pooled + #if freeze: + # self.freeze() + self.layer = layer + if self.layer == "last": + self.layer_idx = 0 + elif self.layer == "penultimate": + self.layer_idx = 1 + else: + raise NotImplementedError() + self.legacy = legacy + + def freeze(self): + self.model = self.model.eval() + for param in self.parameters(): + param.requires_grad = False + + #@autocast + def forward(self, text): + tokens = open_clip.tokenize(text) + z = self.encode_with_transformer(tokens.to(self.device)) + if not self.return_pooled and self.legacy: + return z + if self.return_pooled: + assert not self.legacy + return z[self.layer], z["pooled"] + return z[self.layer] + + def encode_with_transformer(self, text): + x = self.model.token_embedding(text) # [batch_size, n_ctx, d_model] + x = x + self.model.positional_embedding + x = x.permute(1, 0, 2) # NLD -> LND + try: + x = self.text_transformer_forward(x, attn_mask=self.model.attn_mask) + except: + x = self.text_transformer_forward_batch_first(x, attn_mask=self.model.attn_mask) + if self.legacy: + x = x[self.layer] + x = self.model.ln_final(x) + return x + else: + # x is a dict and will stay a dict + o = x["last"] + o = self.model.ln_final(o) + pooled = self.pool(o, text) + x["pooled"] = pooled + return x + + def pool(self, x, text): + # take features from the eot embedding (eot_token is the highest number in each sequence) + x = ( + x[torch.arange(x.shape[0]), text.argmax(dim=-1)] + @ self.model.text_projection + ) + return x + + def text_transformer_forward(self, x: torch.Tensor, attn_mask=None): + outputs = {} + for i, r in enumerate(self.model.transformer.resblocks): + if i == len(self.model.transformer.resblocks) - 1: + outputs["penultimate"] = x.permute(1, 0, 2) # LND -> NLD + if ( + self.model.transformer.grad_checkpointing + and not torch.jit.is_scripting() + ): + x = checkpoint(r, x, attn_mask) + else: + x = r(x, attn_mask=attn_mask) + outputs["last"] = x.permute(1, 0, 2) # LND -> NLD + return outputs + + def text_transformer_forward_batch_first(self, x: torch.Tensor, attn_mask=None): + x = x.permute(1, 0, 2) # LND -> NLD + outputs = {} + for i, r in enumerate(self.model.transformer.resblocks): + if i == len(self.model.transformer.resblocks) - 1: + outputs["penultimate"] = x + if ( + self.model.transformer.grad_checkpointing + and not torch.jit.is_scripting() + ): + x = checkpoint(r, x, attn_mask) + else: + x = r(x, attn_mask=attn_mask) + outputs["last"] = x + return outputs + + def encode(self, text): + return self(text) + + +class FrozenOpenCLIPEmbedder(AbstractEmbModel): + LAYERS = [ + # "pooled", + "last", + "penultimate", + ] + + def __init__( + self, + arch="ViT-H-14", + version="laion2b_s32b_b79k", + device=device, + max_length=77, + freeze=True, + layer="last", + ): + super().__init__() + assert layer in self.LAYERS + model, _, _ = open_clip.create_model_and_transforms( + arch, device=torch.device("cpu"), pretrained=version + ) + del model.visual + self.model = model + + self.device = device + self.max_length = max_length + if freeze: + self.freeze() + self.layer = layer + if self.layer == "last": + self.layer_idx = 0 + elif self.layer == "penultimate": + self.layer_idx = 1 + else: + raise NotImplementedError() + + def freeze(self): + self.model = self.model.eval() + for param in self.parameters(): + param.requires_grad = False + + def forward(self, text): + tokens = open_clip.tokenize(text) + z = self.encode_with_transformer(tokens.to(self.device)) + return z + + def encode_with_transformer(self, text): + x = self.model.token_embedding(text) # [batch_size, n_ctx, d_model] + x = x + self.model.positional_embedding + x = x.permute(1, 0, 2) # NLD -> LND + x = self.text_transformer_forward(x, attn_mask=self.model.attn_mask) + x = x.permute(1, 0, 2) # LND -> NLD + x = self.model.ln_final(x) + return x + + def text_transformer_forward(self, x: torch.Tensor, attn_mask=None): + for i, r in enumerate(self.model.transformer.resblocks): + if i == len(self.model.transformer.resblocks) - self.layer_idx: + break + if ( + self.model.transformer.grad_checkpointing + and not torch.jit.is_scripting() + ): + x = checkpoint(r, x, attn_mask) + else: + x = r(x, attn_mask=attn_mask) + return x + + def encode(self, text): + return self(text) + + +class FrozenOpenCLIPImageEmbedder(AbstractEmbModel): + """ + Uses the OpenCLIP vision transformer encoder for images + """ + + def __init__( + self, + arch="ViT-H-14", + version="laion2b_s32b_b79k", + device=device, + max_length=77, + freeze=True, + antialias=True, + ucg_rate=0.0, + unsqueeze_dim=False, + repeat_to_max_len=False, + num_image_crops=0, + output_tokens=False, + ): + super().__init__() + model, _, _ = open_clip.create_model_and_transforms( + arch, + device=torch.device("cpu"), + pretrained=version, + ) + del model.transformer + self.model = model + self.max_crops = num_image_crops + self.pad_to_max_len = self.max_crops > 0 + self.repeat_to_max_len = repeat_to_max_len and (not self.pad_to_max_len) + self.device = device + self.max_length = max_length + if freeze: + self.freeze() + + self.antialias = antialias + + self.register_buffer( + "mean", torch.Tensor([0.48145466, 0.4578275, 0.40821073]), persistent=False + ) + self.register_buffer( + "std", torch.Tensor([0.26862954, 0.26130258, 0.27577711]), persistent=False + ) + self.ucg_rate = ucg_rate + self.unsqueeze_dim = unsqueeze_dim + self.stored_batch = None + self.model.visual.output_tokens = output_tokens + self.output_tokens = output_tokens + + def preprocess(self, x): + # normalize to [0,1] + x = kornia.geometry.resize( + x, + (224, 224), + interpolation="bicubic", + align_corners=True, + antialias=self.antialias, + ) + x = (x + 1.0) / 2.0 + # renormalize according to clip + x = kornia.enhance.normalize(x, self.mean, self.std) + return x + + def freeze(self): + self.model = self.model.eval() + for param in self.parameters(): + param.requires_grad = False + + # @autocast + def forward(self, image, no_dropout=False): + z = self.encode_with_vision_transformer(image) + tokens = None + if self.output_tokens: + z, tokens = z[0], z[1] + z = z.to(image.dtype) + if self.ucg_rate > 0.0 and not no_dropout and not (self.max_crops > 0): + z = ( + torch.bernoulli( + (1.0 - self.ucg_rate) * torch.ones(z.shape[0], device=z.device) + )[:, None] + * z + ) + if tokens is not None: + tokens = ( + expand_dims_like( + torch.bernoulli( + (1.0 - self.ucg_rate) + * torch.ones(tokens.shape[0], device=tokens.device) + ), + tokens, + ) + * tokens + ) + if self.unsqueeze_dim: + z = z[:, None, :] + if self.output_tokens: + assert not self.repeat_to_max_len + assert not self.pad_to_max_len + return tokens, z + if self.repeat_to_max_len: + if z.dim() == 2: + z_ = z[:, None, :] + else: + z_ = z + return repeat(z_, "b 1 d -> b n d", n=self.max_length), z + elif self.pad_to_max_len: + assert z.dim() == 3 + z_pad = torch.cat( + ( + z, + torch.zeros( + z.shape[0], + self.max_length - z.shape[1], + z.shape[2], + device=z.device, + ), + ), + 1, + ) + return z_pad, z_pad[:, 0, ...] + return z + + def encode_with_vision_transformer(self, img): + # if self.max_crops > 0: + # img = self.preprocess_by_cropping(img) + if img.dim() == 5: + assert self.max_crops == img.shape[1] + img = rearrange(img, "b n c h w -> (b n) c h w") + img = self.preprocess(img) + if not self.output_tokens: + assert not self.model.visual.output_tokens + x = self.model.visual(img) + tokens = None + else: + assert self.model.visual.output_tokens + x, tokens = self.model.visual(img) + if self.max_crops > 0: + x = rearrange(x, "(b n) d -> b n d", n=self.max_crops) + # drop out between 0 and all along the sequence axis + x = ( + torch.bernoulli( + (1.0 - self.ucg_rate) + * torch.ones(x.shape[0], x.shape[1], 1, device=x.device) + ) + * x + ) + if tokens is not None: + tokens = rearrange(tokens, "(b n) t d -> b t (n d)", n=self.max_crops) + print( + f"You are running very experimental token-concat in {self.__class__.__name__}. " + f"Check what you are doing, and then remove this message." + ) + if self.output_tokens: + return x, tokens + return x + + def encode(self, text): + return self(text) + + +class FrozenCLIPT5Encoder(AbstractEmbModel): + def __init__( + self, + clip_version="openai/clip-vit-large-patch14", + t5_version="google/t5-v1_1-xl", + device=device, + clip_max_length=77, + t5_max_length=77, + ): + super().__init__() + self.clip_encoder = FrozenCLIPEmbedder( + clip_version, device, max_length=clip_max_length + ) + self.t5_encoder = FrozenT5Embedder(t5_version, device, max_length=t5_max_length) + print( + f"{self.clip_encoder.__class__.__name__} has {count_params(self.clip_encoder) * 1.e-6:.2f} M parameters, " + f"{self.t5_encoder.__class__.__name__} comes with {count_params(self.t5_encoder) * 1.e-6:.2f} M params." + ) + + def encode(self, text): + return self(text) + + def forward(self, text): + clip_z = self.clip_encoder.encode(text) + t5_z = self.t5_encoder.encode(text) + return [clip_z, t5_z] + + +class SpatialRescaler(nn.Module): + def __init__( + self, + n_stages=1, + method="bilinear", + multiplier=0.5, + in_channels=3, + out_channels=None, + bias=False, + wrap_video=False, + kernel_size=1, + remap_output=False, + ): + super().__init__() + self.n_stages = n_stages + assert self.n_stages >= 0 + assert method in [ + "nearest", + "linear", + "bilinear", + "trilinear", + "bicubic", + "area", + ] + self.multiplier = multiplier + self.interpolator = partial(torch.nn.functional.interpolate, mode=method) + self.remap_output = out_channels is not None or remap_output + if self.remap_output: + print( + f"Spatial Rescaler mapping from {in_channels} to {out_channels} channels after resizing." + ) + self.channel_mapper = ops.Conv2d( + in_channels, + out_channels, + kernel_size=kernel_size, + bias=bias, + padding=kernel_size // 2, + ) + self.wrap_video = wrap_video + + def forward(self, x): + if self.wrap_video and x.ndim == 5: + B, C, T, H, W = x.shape + x = rearrange(x, "b c t h w -> b t c h w") + x = rearrange(x, "b t c h w -> (b t) c h w") + + for stage in range(self.n_stages): + x = self.interpolator(x, scale_factor=self.multiplier) + + if self.wrap_video: + x = rearrange(x, "(b t) c h w -> b t c h w", b=B, t=T, c=C) + x = rearrange(x, "b t c h w -> b c t h w") + if self.remap_output: + x = self.channel_mapper(x) + return x + + def encode(self, x): + return self(x) + + +class LowScaleEncoder(nn.Module): + def __init__( + self, + model_config, + linear_start, + linear_end, + timesteps=1000, + max_noise_level=250, + output_size=64, + scale_factor=1.0, + ): + super().__init__() + self.max_noise_level = max_noise_level + self.model = instantiate_from_config(model_config) + self.augmentation_schedule = self.register_schedule( + timesteps=timesteps, linear_start=linear_start, linear_end=linear_end + ) + self.out_size = output_size + self.scale_factor = scale_factor + + def register_schedule( + self, + beta_schedule="linear", + timesteps=1000, + linear_start=1e-4, + linear_end=2e-2, + cosine_s=8e-3, + ): + betas = make_beta_schedule( + beta_schedule, + timesteps, + linear_start=linear_start, + linear_end=linear_end, + cosine_s=cosine_s, + ) + alphas = 1.0 - betas + alphas_cumprod = np.cumprod(alphas, axis=0) + alphas_cumprod_prev = np.append(1.0, alphas_cumprod[:-1]) + + (timesteps,) = betas.shape + self.num_timesteps = int(timesteps) + self.linear_start = linear_start + self.linear_end = linear_end + assert ( + alphas_cumprod.shape[0] == self.num_timesteps + ), "alphas have to be defined for each timestep" + + to_torch = partial(torch.tensor, dtype=torch.float32) + + self.register_buffer("betas", to_torch(betas)) + self.register_buffer("alphas_cumprod", to_torch(alphas_cumprod)) + self.register_buffer("alphas_cumprod_prev", to_torch(alphas_cumprod_prev)) + + # calculations for diffusion q(x_t | x_{t-1}) and others + self.register_buffer("sqrt_alphas_cumprod", to_torch(np.sqrt(alphas_cumprod))) + self.register_buffer( + "sqrt_one_minus_alphas_cumprod", to_torch(np.sqrt(1.0 - alphas_cumprod)) + ) + self.register_buffer( + "log_one_minus_alphas_cumprod", to_torch(np.log(1.0 - alphas_cumprod)) + ) + self.register_buffer( + "sqrt_recip_alphas_cumprod", to_torch(np.sqrt(1.0 / alphas_cumprod)) + ) + self.register_buffer( + "sqrt_recipm1_alphas_cumprod", to_torch(np.sqrt(1.0 / alphas_cumprod - 1)) + ) + + def q_sample(self, x_start, t, noise=None): + noise = default(noise, lambda: torch.randn_like(x_start)) + return ( + extract_into_tensor(self.sqrt_alphas_cumprod, t, x_start.shape) * x_start + + extract_into_tensor(self.sqrt_one_minus_alphas_cumprod, t, x_start.shape) + * noise + ) + + def forward(self, x): + z = self.model.encode(x) + if isinstance(z, DiagonalGaussianDistribution): + z = z.sample() + z = z * self.scale_factor + noise_level = torch.randint( + 0, self.max_noise_level, (x.shape[0],), device=x.device + ).long() + z = self.q_sample(z, noise_level) + if self.out_size is not None: + z = torch.nn.functional.interpolate(z, size=self.out_size, mode="nearest") + # z = z.repeat_interleave(2, -2).repeat_interleave(2, -1) + return z, noise_level + + def decode(self, z): + z = z / self.scale_factor + return self.model.decode(z) + + +class ConcatTimestepEmbedderND(AbstractEmbModel): + """embeds each dimension independently and concatenates them""" + + def __init__(self, outdim): + super().__init__() + self.timestep = Timestep(outdim) + self.outdim = outdim + + def forward(self, x): + if x.ndim == 1: + x = x[:, None] + assert len(x.shape) == 2 + b, dims = x.shape[0], x.shape[1] + x = rearrange(x, "b d -> (b d)") + emb = self.timestep(x) + emb = rearrange(emb, "(b d) d2 -> b (d d2)", b=b, d=dims, d2=self.outdim) + return emb + + +class GaussianEncoder(Encoder, AbstractEmbModel): + def __init__( + self, weight: float = 1.0, flatten_output: bool = True, *args, **kwargs + ): + super().__init__(*args, **kwargs) + self.posterior = DiagonalGaussianRegularizer() + self.weight = weight + self.flatten_output = flatten_output + + def forward(self, x) -> Tuple[Dict, torch.Tensor]: + z = super().forward(x) + z, log = self.posterior(z) + log["loss"] = log["kl_loss"] + log["weight"] = self.weight + if self.flatten_output: + z = rearrange(z, "b c h w -> b (h w ) c") + return log, z diff --git a/ComfyUI-SUPIR/sgm/util.py b/ComfyUI-SUPIR/sgm/util.py new file mode 100644 index 0000000000000000000000000000000000000000..e18f93dd0e1e25c2292d0388e6e15a78a535c68b --- /dev/null +++ b/ComfyUI-SUPIR/sgm/util.py @@ -0,0 +1,250 @@ +import functools +import importlib +import os +from functools import partial +from inspect import isfunction + +#import fsspec +import numpy as np +import torch +from PIL import Image, ImageDraw, ImageFont +from safetensors.torch import load_file as load_safetensors + + +def disabled_train(self, mode=True): + """Overwrite model.train with this function to make sure train/eval mode + does not change anymore.""" + return self + + +def get_string_from_tuple(s): + try: + # Check if the string starts and ends with parentheses + if s[0] == "(" and s[-1] == ")": + # Convert the string to a tuple + t = eval(s) + # Check if the type of t is tuple + if type(t) == tuple: + return t[0] + else: + pass + except: + pass + return s + + +def is_power_of_two(n): + """ + chat.openai.com/chat + Return True if n is a power of 2, otherwise return False. + + The function is_power_of_two takes an integer n as input and returns True if n is a power of 2, otherwise it returns False. + The function works by first checking if n is less than or equal to 0. If n is less than or equal to 0, it can't be a power of 2, so the function returns False. + If n is greater than 0, the function checks whether n is a power of 2 by using a bitwise AND operation between n and n-1. If n is a power of 2, then it will have only one bit set to 1 in its binary representation. When we subtract 1 from a power of 2, all the bits to the right of that bit become 1, and the bit itself becomes 0. So, when we perform a bitwise AND between n and n-1, we get 0 if n is a power of 2, and a non-zero value otherwise. + Thus, if the result of the bitwise AND operation is 0, then n is a power of 2 and the function returns True. Otherwise, the function returns False. + + """ + if n <= 0: + return False + return (n & (n - 1)) == 0 + + +def autocast(f, enabled=True): + def do_autocast(*args, **kwargs): + with torch.cuda.amp.autocast( + enabled=enabled, + dtype=torch.get_autocast_gpu_dtype(), + cache_enabled=torch.is_autocast_cache_enabled(), + ): + return f(*args, **kwargs) + + return do_autocast + + +def load_partial_from_config(config): + return partial(get_obj_from_str(config["target"]), **config.get("params", dict())) + + +def log_txt_as_img(wh, xc, size=10): + # wh a tuple of (width, height) + # xc a list of captions to plot + b = len(xc) + txts = list() + for bi in range(b): + txt = Image.new("RGB", wh, color="white") + draw = ImageDraw.Draw(txt) + font = ImageFont.truetype("data/DejaVuSans.ttf", size=size) + nc = int(40 * (wh[0] / 256)) + if isinstance(xc[bi], list): + text_seq = xc[bi][0] + else: + text_seq = xc[bi] + lines = "\n".join( + text_seq[start : start + nc] for start in range(0, len(text_seq), nc) + ) + + try: + draw.text((0, 0), lines, fill="black", font=font) + except UnicodeEncodeError: + print("Cant encode string for logging. Skipping.") + + txt = np.array(txt).transpose(2, 0, 1) / 127.5 - 1.0 + txts.append(txt) + txts = np.stack(txts) + txts = torch.tensor(txts) + return txts + + +def partialclass(cls, *args, **kwargs): + class NewCls(cls): + __init__ = functools.partialmethod(cls.__init__, *args, **kwargs) + + return NewCls + + +def make_path_absolute(path): + fs, p = fsspec.core.url_to_fs(path) + if fs.protocol == "file": + return os.path.abspath(p) + return path + + +def ismap(x): + if not isinstance(x, torch.Tensor): + return False + return (len(x.shape) == 4) and (x.shape[1] > 3) + + +def isimage(x): + if not isinstance(x, torch.Tensor): + return False + return (len(x.shape) == 4) and (x.shape[1] == 3 or x.shape[1] == 1) + + +def isheatmap(x): + if not isinstance(x, torch.Tensor): + return False + + return x.ndim == 2 + + +def isneighbors(x): + if not isinstance(x, torch.Tensor): + return False + return x.ndim == 5 and (x.shape[2] == 3 or x.shape[2] == 1) + + +def exists(x): + return x is not None + + +def expand_dims_like(x, y): + while x.dim() != y.dim(): + x = x.unsqueeze(-1) + return x + + +def default(val, d): + if exists(val): + return val + return d() if isfunction(d) else d + + +def mean_flat(tensor): + """ + https://github.com/openai/guided-diffusion/blob/27c20a8fab9cb472df5d6bdd6c8d11c8f430b924/guided_diffusion/nn.py#L86 + Take the mean over all non-batch dimensions. + """ + return tensor.mean(dim=list(range(1, len(tensor.shape)))) + + +def count_params(model, verbose=False): + total_params = sum(p.numel() for p in model.parameters()) + if verbose: + print(f"{model.__class__.__name__} has {total_params * 1.e-6:.2f} M params.") + return total_params + + +def instantiate_from_config(config): + if not "target" in config: + if config == "__is_first_stage__": + return None + elif config == "__is_unconditional__": + return None + raise KeyError("Expected key `target` to instantiate.") + return get_obj_from_str(config["target"])(**config.get("params", dict())) + + + +def get_obj_from_str(string, reload=False, invalidate_cache=True): + package_directory_name = os.path.basename(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) + module, cls = string.rsplit(".", 1) + if invalidate_cache: + importlib.invalidate_caches() + if reload: + module_imp = importlib.import_module(module) + importlib.reload(module_imp) + return getattr(importlib.import_module(module, package=package_directory_name), cls) + + +def append_zero(x): + return torch.cat([x, x.new_zeros([1])]) + + +def append_dims(x, target_dims): + """Appends dimensions to the end of a tensor until it has target_dims dimensions.""" + dims_to_append = target_dims - x.ndim + if dims_to_append < 0: + raise ValueError( + f"input has {x.ndim} dims but target_dims is {target_dims}, which is less" + ) + return x[(...,) + (None,) * dims_to_append] + + +def load_model_from_config(config, ckpt, verbose=True, freeze=True): + print(f"Loading model from {ckpt}") + if ckpt.endswith("ckpt"): + pl_sd = torch.load(ckpt, map_location="cpu") + if "global_step" in pl_sd: + print(f"Global Step: {pl_sd['global_step']}") + sd = pl_sd["state_dict"] + elif ckpt.endswith("safetensors"): + sd = load_safetensors(ckpt) + else: + raise NotImplementedError + + model = instantiate_from_config(config.model) + + m, u = model.load_state_dict(sd, strict=False) + + if len(m) > 0 and verbose: + print("missing keys:") + print(m) + if len(u) > 0 and verbose: + print("unexpected keys:") + print(u) + + if freeze: + for param in model.parameters(): + param.requires_grad = False + + model.eval() + return model + + +def get_configs_path() -> str: + """ + Get the `configs` directory. + For a working copy, this is the one in the root of the repository, + but for an installed copy, it's in the `sgm` package (see pyproject.toml). + """ + this_dir = os.path.dirname(__file__) + candidates = ( + os.path.join(this_dir, "configs"), + os.path.join(this_dir, "..", "configs"), + ) + for candidate in candidates: + candidate = os.path.abspath(candidate) + if os.path.isdir(candidate): + return candidate + raise FileNotFoundError(f"Could not find SGM configs in {candidates}") diff --git a/ComfyUI-layerdiffuse/LICENSE b/ComfyUI-layerdiffuse/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..989e2c59e973a05cfbfe9de678b7f2af777b0713 --- /dev/null +++ b/ComfyUI-layerdiffuse/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/ComfyUI-layerdiffuse/README.md b/ComfyUI-layerdiffuse/README.md new file mode 100644 index 0000000000000000000000000000000000000000..2e496ff14a256666c9ca367176feca3b2343b24b --- /dev/null +++ b/ComfyUI-layerdiffuse/README.md @@ -0,0 +1,65 @@ +# ComfyUI-layerdiffuse +ComfyUI implementation of https://github.com/layerdiffusion/LayerDiffuse. + +## Installation +Download the repository and unpack it into the custom_nodes folder in the ComfyUI installation directory. + +Or clone via GIT, starting from ComfyUI installation directory: +```bash +cd custom_nodes +git clone git@github.com:huchenlei/ComfyUI-layerdiffuse.git +``` + +Run `pip install -r requirements.txt` to install python dependencies. You might experience version conflict on diffusers if you have other extensions that depend on other versions of diffusers. In this case, it is recommended to set up separate Python venvs. + +## Workflows +### [Generate foreground](https://github.com/huchenlei/ComfyUI-layerdiffuse/blob/main/examples/layer_diffusion_fg_example_rgba.json) +![rgba](https://github.com/huchenlei/ComfyUI-layerdiffuse/assets/20929282/5e6085e5-d997-4a0a-b589-257d65eb1eb2) + +### [Generate foreground (RGB + alpha)](https://github.com/huchenlei/ComfyUI-layerdiffuse/blob/main/examples/layer_diffusion_fg_example.json) +If you want more control of getting RGB images and alpha channel mask separately, you can use this workflow. +![readme1](https://github.com/huchenlei/ComfyUI-layerdiffuse/assets/20929282/4825b81c-7089-4806-bce7-777229421707) + +### [Blending (FG/BG)](https://github.com/huchenlei/ComfyUI-layerdiffuse/blob/main/examples/layer_diffusion_cond_example.json) +Blending given FG +![fg_cond](https://github.com/huchenlei/ComfyUI-layerdiffuse/assets/20929282/7f7dee80-6e57-4570-b304-d1f7e5dc3aad) + +Blending given BG +![bg_cond](https://github.com/huchenlei/ComfyUI-layerdiffuse/assets/20929282/e3a79218-6123-453b-a54b-2f338db1c12d) + +### [Extract FG from Blended + BG](https://github.com/huchenlei/ComfyUI-layerdiffuse/blob/main/examples/layer_diffusion_diff_fg.json) +![diff_bg](https://github.com/huchenlei/ComfyUI-layerdiffuse/assets/20929282/45c7207d-72ff-4fb0-9c91-687040781837) + +### [Extract BG from Blended + FG](https://github.com/huchenlei/ComfyUI-layerdiffuse/blob/main/examples/layer_diffusion_diff_bg.json) +[Forge impl's sanity check](https://github.com/layerdiffuse/sd-forge-layerdiffuse#sanity-check) sets `Stop at` to 0.5 to get better quality BG. +This workflow might be inferior compared to other object removal workflows. +![diff_fg](https://github.com/huchenlei/ComfyUI-layerdiffuse/assets/20929282/05a10add-68b0-473a-acee-5853e4720322) + +### [Extract BG from Blended + FG (Stop at 0.5)](https://github.com/huchenlei/ComfyUI-layerdiffuse/blob/main/examples/layer_diffusion_diff_bg_stop_at.json) +In [SD Forge impl](https://github.com/layerdiffuse/sd-forge-layerdiffuse), there is a `stop at` param that determines when +layer diffuse should stop in the denoising process. In the background, what this param does is unapply the LoRA and c_concat cond after a certain step +threshold. This is hard/risky to implement directly in ComfyUI as it requires manually loading a model that has every change except the layer diffusion +change applied. A workaround in ComfyUI is to have another img2img pass on the layer diffuse result to simulate the effect of `stop at` param. +![diff_fg_stop_at](https://github.com/huchenlei/ComfyUI-layerdiffuse/assets/20929282/e383c9d3-2d47-40c2-b764-b0bd48243ee8) + + +### [Generate FG from BG combined](https://github.com/huchenlei/ComfyUI-layerdiffuse/blob/main/examples/layer_diffusion_cond_fg_all.json) +Combines previous workflows to generate blended and FG given BG. We found that there are some color variations in the extracted FG. Need to confirm +with layer diffusion authors whether this is expected. +![fg_all](https://github.com/huchenlei/ComfyUI-layerdiffuse/assets/20929282/f4c18585-961a-473a-a616-aa3776bacd41) + +### [2024-3-9] [Generate FG + Blended given BG](https://github.com/huchenlei/ComfyUI-layerdiffuse/blob/main/examples/layer_diffusion_cond_joint_bg.json) +Need batch size = 2N. Currently only for SD15. +![sd15_cond_joint_bg](https://github.com/huchenlei/ComfyUI-layerdiffuse/assets/20929282/9bbfe5c1-14a0-421d-bf06-85e301bf8065) + +### [2024-3-9] [Generate BG + Blended given FG](https://github.com/huchenlei/ComfyUI-layerdiffuse/blob/main/examples/layer_diffusion_cond_joint_fg.json) +Need batch size = 2N. Currently only for SD15. +![sd15_cond_joint_fg](https://github.com/huchenlei/ComfyUI-layerdiffuse/assets/20929282/65af8b38-cf4c-4667-b76f-3013a0be0a48) + +### [2024-3-9] [Generate BG + FG + Blended together](https://github.com/huchenlei/ComfyUI-layerdiffuse/blob/main/examples/layer_diffusion_joint.json) +Need batch size = 3N. Currently only for SD15. +![sd15_joint](https://github.com/huchenlei/ComfyUI-layerdiffuse/assets/20929282/e5545809-e3fb-4683-acf5-8728195cb2bc) + +## Note +- Currently only SDXL/SD15 are supported. See https://github.com/layerdiffuse/sd-forge-layerdiffuse#model-notes for more details. +- To decode RGBA result, the generation dimension must be multiple of 64. Otherwise, you will get decode error: ![image](https://github.com/huchenlei/ComfyUI-layerdiffuse/assets/20929282/ff055f99-9297-4ff1-9a33-065aaadcf98e) diff --git a/ComfyUI-layerdiffuse/__init__.py b/ComfyUI-layerdiffuse/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..0dc10696ecee25358c4480a7b665a14d90401755 --- /dev/null +++ b/ComfyUI-layerdiffuse/__init__.py @@ -0,0 +1,3 @@ +from .layered_diffusion import NODE_CLASS_MAPPINGS, NODE_DISPLAY_NAME_MAPPINGS + +__all__ = ['NODE_CLASS_MAPPINGS', 'NODE_DISPLAY_NAME_MAPPINGS'] \ No newline at end of file diff --git a/ComfyUI-layerdiffuse/examples/layer_diffusion_cond_example.json b/ComfyUI-layerdiffuse/examples/layer_diffusion_cond_example.json new file mode 100644 index 0000000000000000000000000000000000000000..09028c4565711cc4f6cb142be36957cba76721b0 --- /dev/null +++ b/ComfyUI-layerdiffuse/examples/layer_diffusion_cond_example.json @@ -0,0 +1,668 @@ +{ + "last_node_id": 35, + "last_link_id": 52, + "nodes": [ + { + "id": 4, + "type": "CheckpointLoaderSimple", + "pos": [ + 5, + 479 + ], + "size": { + "0": 315, + "1": 98 + }, + "flags": {}, + "order": 0, + "mode": 0, + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 38 + ], + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 3, + 5 + ], + "slot_index": 1 + }, + { + "name": "VAE", + "type": "VAE", + "links": [ + 22, + 49 + ], + "slot_index": 2 + } + ], + "properties": { + "Node name for S&R": "CheckpointLoaderSimple" + }, + "widgets_values": [ + "juggernautXL_v8Rundiffusion.safetensors" + ] + }, + { + "id": 29, + "type": "VAEEncode", + "pos": [ + 212, + -22 + ], + "size": { + "0": 210, + "1": 46 + }, + "flags": {}, + "order": 6, + "mode": 0, + "inputs": [ + { + "name": "pixels", + "type": "IMAGE", + "link": 51 + }, + { + "name": "vae", + "type": "VAE", + "link": 49, + "slot_index": 1 + } + ], + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 47 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEEncode" + } + }, + { + "id": 30, + "type": "LoadImage", + "pos": [ + -363, + 209 + ], + "size": { + "0": 315, + "1": 314 + }, + "flags": {}, + "order": 1, + "mode": 0, + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 50 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "MASK", + "type": "MASK", + "links": null, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "LoadImage" + }, + "widgets_values": [ + "309219693-e7e2d80e-ffbe-4724-812a-5139a88027e3.png", + "image" + ] + }, + { + "id": 20, + "type": "PreviewImage", + "pos": [ + 1556, + 138 + ], + "size": { + "0": 611.2340087890625, + "1": 633.9354858398438 + }, + "flags": {}, + "order": 11, + "mode": 0, + "inputs": [ + { + "name": "images", + "type": "IMAGE", + "link": 29 + } + ], + "properties": { + "Node name for S&R": "PreviewImage" + } + }, + { + "id": 6, + "type": "CLIPTextEncode", + "pos": [ + 415, + 186 + ], + "size": { + "0": 422.84503173828125, + "1": 164.31304931640625 + }, + "flags": {}, + "order": 3, + "mode": 0, + "inputs": [ + { + "name": "clip", + "type": "CLIP", + "link": 3 + } + ], + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 39 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "old man sitting, high quality\n\n" + ] + }, + { + "id": 7, + "type": "CLIPTextEncode", + "pos": [ + 413, + 389 + ], + "size": { + "0": 425.27801513671875, + "1": 180.6060791015625 + }, + "flags": {}, + "order": 4, + "mode": 0, + "inputs": [ + { + "name": "clip", + "type": "CLIP", + "link": 5 + } + ], + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 40 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "text, watermark" + ] + }, + { + "id": 34, + "type": "PreviewImage", + "pos": [ + 213, + -346 + ], + "size": { + "0": 210, + "1": 246 + }, + "flags": {}, + "order": 7, + "mode": 0, + "inputs": [ + { + "name": "images", + "type": "IMAGE", + "link": 52 + } + ], + "properties": { + "Node name for S&R": "PreviewImage" + } + }, + { + "id": 14, + "type": "VAEDecode", + "pos": [ + 1275, + 198 + ], + "size": { + "0": 210, + "1": 46 + }, + "flags": {}, + "order": 10, + "mode": 0, + "inputs": [ + { + "name": "samples", + "type": "LATENT", + "link": 21 + }, + { + "name": "vae", + "type": "VAE", + "link": 22, + "slot_index": 1 + } + ], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 29 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEDecode" + } + }, + { + "id": 3, + "type": "KSampler", + "pos": [ + 913, + 181 + ], + "size": { + "0": 315, + "1": 262 + }, + "flags": {}, + "order": 9, + "mode": 0, + "inputs": [ + { + "name": "model", + "type": "MODEL", + "link": 41 + }, + { + "name": "positive", + "type": "CONDITIONING", + "link": 46 + }, + { + "name": "negative", + "type": "CONDITIONING", + "link": 45 + }, + { + "name": "latent_image", + "type": "LATENT", + "link": 2 + } + ], + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 21 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "KSampler" + }, + "widgets_values": [ + 100676796092754, + "randomize", + 20, + 8, + "euler", + "normal", + 1 + ] + }, + { + "id": 5, + "type": "EmptyLatentImage", + "pos": [ + 475, + 666 + ], + "size": { + "0": 315, + "1": 106 + }, + "flags": {}, + "order": 2, + "mode": 0, + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 2 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "EmptyLatentImage" + }, + "widgets_values": [ + 1024, + 1024, + 1 + ] + }, + { + "id": 33, + "type": "ImageResize+", + "pos": [ + -146, + -16 + ], + "size": { + "0": 315, + "1": 170 + }, + "flags": {}, + "order": 5, + "mode": 0, + "inputs": [ + { + "name": "image", + "type": "IMAGE", + "link": 50 + } + ], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 51, + 52 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "width", + "type": "INT", + "links": null, + "shape": 3 + }, + { + "name": "height", + "type": "INT", + "links": null, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "ImageResize+" + }, + "widgets_values": [ + 1024, + 1024, + "nearest", + false + ] + }, + { + "id": 28, + "type": "LayeredDiffusionCondApply", + "pos": [ + 465, + -26 + ], + "size": { + "0": 315, + "1": 142 + }, + "flags": {}, + "order": 8, + "mode": 0, + "inputs": [ + { + "name": "model", + "type": "MODEL", + "link": 38 + }, + { + "name": "cond", + "type": "CONDITIONING", + "link": 39 + }, + { + "name": "uncond", + "type": "CONDITIONING", + "link": 40 + }, + { + "name": "latent", + "type": "LATENT", + "link": 47 + } + ], + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 41 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 46 + ], + "shape": 3, + "slot_index": 1 + }, + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 45 + ], + "shape": 3, + "slot_index": 2 + } + ], + "properties": { + "Node name for S&R": "LayeredDiffusionCondApply" + }, + "widgets_values": [ + "SDXL, Background", + 1 + ], + "color": "#232", + "bgcolor": "#353" + } + ], + "links": [ + [ + 2, + 5, + 0, + 3, + 3, + "LATENT" + ], + [ + 3, + 4, + 1, + 6, + 0, + "CLIP" + ], + [ + 5, + 4, + 1, + 7, + 0, + "CLIP" + ], + [ + 21, + 3, + 0, + 14, + 0, + "LATENT" + ], + [ + 22, + 4, + 2, + 14, + 1, + "VAE" + ], + [ + 29, + 14, + 0, + 20, + 0, + "IMAGE" + ], + [ + 38, + 4, + 0, + 28, + 0, + "MODEL" + ], + [ + 39, + 6, + 0, + 28, + 1, + "CONDITIONING" + ], + [ + 40, + 7, + 0, + 28, + 2, + "CONDITIONING" + ], + [ + 41, + 28, + 0, + 3, + 0, + "MODEL" + ], + [ + 45, + 28, + 2, + 3, + 2, + "CONDITIONING" + ], + [ + 46, + 28, + 1, + 3, + 1, + "CONDITIONING" + ], + [ + 47, + 29, + 0, + 28, + 3, + "LATENT" + ], + [ + 49, + 4, + 2, + 29, + 1, + "VAE" + ], + [ + 50, + 30, + 0, + 33, + 0, + "IMAGE" + ], + [ + 51, + 33, + 0, + 29, + 0, + "IMAGE" + ], + [ + 52, + 33, + 0, + 34, + 0, + "IMAGE" + ] + ], + "groups": [], + "config": {}, + "extra": {}, + "version": 0.4 +} \ No newline at end of file diff --git a/ComfyUI-layerdiffuse/examples/layer_diffusion_cond_fg_all.json b/ComfyUI-layerdiffuse/examples/layer_diffusion_cond_fg_all.json new file mode 100644 index 0000000000000000000000000000000000000000..0b9a51c19504e3fdc8f0f8f082c5d1f36ffe5d87 --- /dev/null +++ b/ComfyUI-layerdiffuse/examples/layer_diffusion_cond_fg_all.json @@ -0,0 +1,951 @@ +{ + "last_node_id": 56, + "last_link_id": 104, + "nodes": [ + { + "id": 14, + "type": "VAEDecode", + "pos": [ + 1286, + 187 + ], + "size": { + "0": 210, + "1": 46 + }, + "flags": {}, + "order": 12, + "mode": 0, + "inputs": [ + { + "name": "samples", + "type": "LATENT", + "link": 21 + }, + { + "name": "vae", + "type": "VAE", + "link": 22, + "slot_index": 1 + } + ], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 65 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEDecode" + } + }, + { + "id": 40, + "type": "LayeredDiffusionDecodeRGBA", + "pos": [ + 1533, + 189 + ], + "size": { + "0": 243.60000610351562, + "1": 102 + }, + "flags": {}, + "order": 13, + "mode": 0, + "inputs": [ + { + "name": "samples", + "type": "LATENT", + "link": 67 + }, + { + "name": "images", + "type": "IMAGE", + "link": 65 + } + ], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 66 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "LayeredDiffusionDecodeRGBA" + }, + "widgets_values": [ + "SDXL", + 16 + ], + "color": "#232", + "bgcolor": "#353" + }, + { + "id": 47, + "type": "VAEDecode", + "pos": [ + 1360, + 900 + ], + "size": { + "0": 210, + "1": 46 + }, + "flags": {}, + "order": 8, + "mode": 0, + "inputs": [ + { + "name": "samples", + "type": "LATENT", + "link": 74 + }, + { + "name": "vae", + "type": "VAE", + "link": 104, + "slot_index": 1 + } + ], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 96 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEDecode" + } + }, + { + "id": 49, + "type": "VAEEncode", + "pos": [ + 280, + 690 + ], + "size": { + "0": 210, + "1": 46 + }, + "flags": {}, + "order": 5, + "mode": 0, + "inputs": [ + { + "name": "pixels", + "type": "IMAGE", + "link": 77 + }, + { + "name": "vae", + "type": "VAE", + "link": 102, + "slot_index": 1 + } + ], + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 89, + 97 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEEncode" + } + }, + { + "id": 56, + "type": "PreviewImage", + "pos": [ + 1800, + 900 + ], + "size": { + "0": 611.2340087890625, + "1": 633.9354858398438 + }, + "flags": {}, + "order": 10, + "mode": 0, + "inputs": [ + { + "name": "images", + "type": "IMAGE", + "link": 96 + } + ], + "properties": { + "Node name for S&R": "PreviewImage" + } + }, + { + "id": 6, + "type": "CLIPTextEncode", + "pos": [ + 415, + 186 + ], + "size": { + "0": 422.84503173828125, + "1": 164.31304931640625 + }, + "flags": {}, + "order": 3, + "mode": 0, + "inputs": [ + { + "name": "clip", + "type": "CLIP", + "link": 3 + } + ], + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 56, + 99 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "an old man sitting, high quality\n\n" + ] + }, + { + "id": 7, + "type": "CLIPTextEncode", + "pos": [ + 413, + 389 + ], + "size": { + "0": 425.27801513671875, + "1": 180.6060791015625 + }, + "flags": {}, + "order": 4, + "mode": 0, + "inputs": [ + { + "name": "clip", + "type": "CLIP", + "link": 5 + } + ], + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 57, + 100 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "text, watermark" + ] + }, + { + "id": 42, + "type": "KSampler", + "pos": [ + 990, + 850 + ], + "size": { + "0": 315, + "1": 262 + }, + "flags": {}, + "order": 7, + "mode": 0, + "inputs": [ + { + "name": "model", + "type": "MODEL", + "link": 93 + }, + { + "name": "positive", + "type": "CONDITIONING", + "link": 94 + }, + { + "name": "negative", + "type": "CONDITIONING", + "link": 95 + }, + { + "name": "latent_image", + "type": "LATENT", + "link": 71 + } + ], + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 74, + 98 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "KSampler" + }, + "widgets_values": [ + 216474886443753, + "randomize", + 20, + 8, + "euler", + "normal", + 1 + ] + }, + { + "id": 3, + "type": "KSampler", + "pos": [ + 913, + 182 + ], + "size": { + "0": 315, + "1": 262 + }, + "flags": {}, + "order": 11, + "mode": 0, + "inputs": [ + { + "name": "model", + "type": "MODEL", + "link": 62 + }, + { + "name": "positive", + "type": "CONDITIONING", + "link": 63 + }, + { + "name": "negative", + "type": "CONDITIONING", + "link": 64 + }, + { + "name": "latent_image", + "type": "LATENT", + "link": 101, + "slot_index": 3 + } + ], + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 21, + 67 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "KSampler" + }, + "widgets_values": [ + 137168876920770, + "randomize", + 20, + 8, + "euler", + "normal", + 1 + ] + }, + { + "id": 4, + "type": "CheckpointLoaderSimple", + "pos": [ + -54, + 488 + ], + "size": { + "0": 315, + "1": 98 + }, + "flags": {}, + "order": 0, + "mode": 0, + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 55, + 103 + ], + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 3, + 5 + ], + "slot_index": 1 + }, + { + "name": "VAE", + "type": "VAE", + "links": [ + 22, + 102, + 104 + ], + "slot_index": 2 + } + ], + "properties": { + "Node name for S&R": "CheckpointLoaderSimple" + }, + "widgets_values": [ + "juggernautXL_v8Rundiffusion.safetensors" + ] + }, + { + "id": 50, + "type": "LoadImage", + "pos": [ + -59, + 686 + ], + "size": { + "0": 315, + "1": 314 + }, + "flags": {}, + "order": 1, + "mode": 0, + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 77 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "MASK", + "type": "MASK", + "links": null, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "LoadImage" + }, + "widgets_values": [ + "chair.png", + "image" + ] + }, + { + "id": 44, + "type": "EmptyLatentImage", + "pos": [ + 524, + 944 + ], + "size": { + "0": 315, + "1": 106 + }, + "flags": {}, + "order": 2, + "mode": 0, + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 71, + 101 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "EmptyLatentImage" + }, + "widgets_values": [ + 1024, + 1024, + 1 + ] + }, + { + "id": 55, + "type": "LayeredDiffusionCondApply", + "pos": [ + 530, + 680 + ], + "size": { + "0": 315, + "1": 142 + }, + "flags": {}, + "order": 6, + "mode": 0, + "inputs": [ + { + "name": "model", + "type": "MODEL", + "link": 103, + "slot_index": 0 + }, + { + "name": "cond", + "type": "CONDITIONING", + "link": 99 + }, + { + "name": "uncond", + "type": "CONDITIONING", + "link": 100 + }, + { + "name": "latent", + "type": "LATENT", + "link": 89, + "slot_index": 3 + } + ], + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 93 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 94 + ], + "shape": 3, + "slot_index": 1 + }, + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 95 + ], + "shape": 3, + "slot_index": 2 + } + ], + "properties": { + "Node name for S&R": "LayeredDiffusionCondApply" + }, + "widgets_values": [ + "SDXL, Background", + 1 + ], + "color": "#232", + "bgcolor": "#353" + }, + { + "id": 37, + "type": "LayeredDiffusionDiffApply", + "pos": [ + 457, + -37 + ], + "size": { + "0": 342.5999755859375, + "1": 162 + }, + "flags": {}, + "order": 9, + "mode": 0, + "inputs": [ + { + "name": "model", + "type": "MODEL", + "link": 55, + "slot_index": 0 + }, + { + "name": "cond", + "type": "CONDITIONING", + "link": 56, + "slot_index": 1 + }, + { + "name": "uncond", + "type": "CONDITIONING", + "link": 57, + "slot_index": 2 + }, + { + "name": "blended_latent", + "type": "LATENT", + "link": 98 + }, + { + "name": "latent", + "type": "LATENT", + "link": 97 + } + ], + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 62 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 63 + ], + "shape": 3, + "slot_index": 1 + }, + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 64 + ], + "shape": 3, + "slot_index": 2 + } + ], + "properties": { + "Node name for S&R": "LayeredDiffusionDiffApply" + }, + "widgets_values": [ + "SDXL, Background", + 1 + ], + "color": "#232", + "bgcolor": "#353" + }, + { + "id": 20, + "type": "PreviewImage", + "pos": [ + 1815, + 194 + ], + "size": { + "0": 611.2340087890625, + "1": 633.9354858398438 + }, + "flags": {}, + "order": 14, + "mode": 0, + "inputs": [ + { + "name": "images", + "type": "IMAGE", + "link": 66 + } + ], + "properties": { + "Node name for S&R": "PreviewImage" + } + } + ], + "links": [ + [ + 3, + 4, + 1, + 6, + 0, + "CLIP" + ], + [ + 5, + 4, + 1, + 7, + 0, + "CLIP" + ], + [ + 21, + 3, + 0, + 14, + 0, + "LATENT" + ], + [ + 22, + 4, + 2, + 14, + 1, + "VAE" + ], + [ + 55, + 4, + 0, + 37, + 0, + "MODEL" + ], + [ + 56, + 6, + 0, + 37, + 1, + "CONDITIONING" + ], + [ + 57, + 7, + 0, + 37, + 2, + "CONDITIONING" + ], + [ + 62, + 37, + 0, + 3, + 0, + "MODEL" + ], + [ + 63, + 37, + 1, + 3, + 1, + "CONDITIONING" + ], + [ + 64, + 37, + 2, + 3, + 2, + "CONDITIONING" + ], + [ + 65, + 14, + 0, + 40, + 1, + "IMAGE" + ], + [ + 66, + 40, + 0, + 20, + 0, + "IMAGE" + ], + [ + 67, + 3, + 0, + 40, + 0, + "LATENT" + ], + [ + 71, + 44, + 0, + 42, + 3, + "LATENT" + ], + [ + 74, + 42, + 0, + 47, + 0, + "LATENT" + ], + [ + 77, + 50, + 0, + 49, + 0, + "IMAGE" + ], + [ + 89, + 49, + 0, + 55, + 3, + "LATENT" + ], + [ + 93, + 55, + 0, + 42, + 0, + "MODEL" + ], + [ + 94, + 55, + 1, + 42, + 1, + "CONDITIONING" + ], + [ + 95, + 55, + 2, + 42, + 2, + "CONDITIONING" + ], + [ + 96, + 47, + 0, + 56, + 0, + "IMAGE" + ], + [ + 97, + 49, + 0, + 37, + 4, + "LATENT" + ], + [ + 98, + 42, + 0, + 37, + 3, + "LATENT" + ], + [ + 99, + 6, + 0, + 55, + 1, + "CONDITIONING" + ], + [ + 100, + 7, + 0, + 55, + 2, + "CONDITIONING" + ], + [ + 101, + 44, + 0, + 3, + 3, + "LATENT" + ], + [ + 102, + 4, + 2, + 49, + 1, + "VAE" + ], + [ + 103, + 4, + 0, + 55, + 0, + "MODEL" + ], + [ + 104, + 4, + 2, + 47, + 1, + "VAE" + ] + ], + "groups": [], + "config": {}, + "extra": {}, + "version": 0.4 +} \ No newline at end of file diff --git a/ComfyUI-layerdiffuse/examples/layer_diffusion_cond_joint_bg.json b/ComfyUI-layerdiffuse/examples/layer_diffusion_cond_joint_bg.json new file mode 100644 index 0000000000000000000000000000000000000000..efd3a5af7e4689d4aa7f5276a331c29a431cce22 --- /dev/null +++ b/ComfyUI-layerdiffuse/examples/layer_diffusion_cond_joint_bg.json @@ -0,0 +1,723 @@ +{ + "last_node_id": 53, + "last_link_id": 88, + "nodes": [ + { + "id": 33, + "type": "ImageResize+", + "pos": [ + 50, + -10 + ], + "size": { + "0": 315, + "1": 170 + }, + "flags": {}, + "order": 5, + "mode": 0, + "inputs": [ + { + "name": "image", + "type": "IMAGE", + "link": 50 + } + ], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 52, + 54 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "width", + "type": "INT", + "links": null, + "shape": 3 + }, + { + "name": "height", + "type": "INT", + "links": null, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "ImageResize+" + }, + "widgets_values": [ + 512, + 512, + "nearest", + false + ] + }, + { + "id": 6, + "type": "CLIPTextEncode", + "pos": [ + 415, + 186 + ], + "size": { + "0": 422.84503173828125, + "1": 164.31304931640625 + }, + "flags": {}, + "order": 3, + "mode": 0, + "inputs": [ + { + "name": "clip", + "type": "CLIP", + "link": 3 + } + ], + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 64 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "old man sitting, high quality\n\n" + ] + }, + { + "id": 7, + "type": "CLIPTextEncode", + "pos": [ + 413, + 389 + ], + "size": { + "0": 425.27801513671875, + "1": 180.6060791015625 + }, + "flags": {}, + "order": 4, + "mode": 0, + "inputs": [ + { + "name": "clip", + "type": "CLIP", + "link": 5 + } + ], + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 65 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "text, watermark" + ] + }, + { + "id": 3, + "type": "KSampler", + "pos": [ + 915, + 176 + ], + "size": { + "0": 315, + "1": 262 + }, + "flags": {}, + "order": 8, + "mode": 0, + "inputs": [ + { + "name": "model", + "type": "MODEL", + "link": 56 + }, + { + "name": "positive", + "type": "CONDITIONING", + "link": 64 + }, + { + "name": "negative", + "type": "CONDITIONING", + "link": 65 + }, + { + "name": "latent_image", + "type": "LATENT", + "link": 2 + } + ], + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 70, + 84 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "KSampler" + }, + "widgets_values": [ + 674865838825506, + "randomize", + 20, + 8, + "euler", + "normal", + 1 + ] + }, + { + "id": 50, + "type": "PreviewImage", + "pos": [ + 2040, + -120 + ], + "size": { + "0": 210, + "1": 246 + }, + "flags": {}, + "order": 12, + "mode": 0, + "inputs": [ + { + "name": "images", + "type": "IMAGE", + "link": 85 + } + ], + "properties": { + "Node name for S&R": "PreviewImage" + } + }, + { + "id": 5, + "type": "EmptyLatentImage", + "pos": [ + 475, + 666 + ], + "size": { + "0": 315, + "1": 106 + }, + "flags": {}, + "order": 0, + "mode": 0, + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 2 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "EmptyLatentImage" + }, + "widgets_values": [ + 512, + 512, + 4 + ] + }, + { + "id": 44, + "type": "VAEDecode", + "pos": [ + 1260, + 180 + ], + "size": { + "0": 210, + "1": 46 + }, + "flags": {}, + "order": 9, + "mode": 0, + "inputs": [ + { + "name": "samples", + "type": "LATENT", + "link": 70 + }, + { + "name": "vae", + "type": "VAE", + "link": 88, + "slot_index": 1 + } + ], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 75, + 83 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEDecode" + } + }, + { + "id": 4, + "type": "CheckpointLoaderSimple", + "pos": [ + 5, + 479 + ], + "size": { + "0": 315, + "1": 98 + }, + "flags": {}, + "order": 1, + "mode": 0, + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 55 + ], + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 3, + 5 + ], + "slot_index": 1 + }, + { + "name": "VAE", + "type": "VAE", + "links": [ + 88 + ], + "slot_index": 2 + } + ], + "properties": { + "Node name for S&R": "CheckpointLoaderSimple" + }, + "widgets_values": [ + "realisticVisionV20_v20.safetensors" + ] + }, + { + "id": 46, + "type": "PreviewImage", + "pos": [ + 1460, + 410 + ], + "size": [ + 406.59525756835933, + 340.5699157714844 + ], + "flags": {}, + "order": 10, + "mode": 0, + "inputs": [ + { + "name": "images", + "type": "IMAGE", + "link": 75 + } + ], + "properties": { + "Node name for S&R": "PreviewImage" + } + }, + { + "id": 34, + "type": "PreviewImage", + "pos": [ + 471, + -337 + ], + "size": { + "0": 210, + "1": 246 + }, + "flags": {}, + "order": 6, + "mode": 0, + "inputs": [ + { + "name": "images", + "type": "IMAGE", + "link": 52 + } + ], + "properties": { + "Node name for S&R": "PreviewImage" + } + }, + { + "id": 37, + "type": "LayeredDiffusionCondJointApply", + "pos": [ + 429, + -13 + ], + "size": { + "0": 388, + "1": 138 + }, + "flags": {}, + "order": 7, + "mode": 0, + "inputs": [ + { + "name": "model", + "type": "MODEL", + "link": 55, + "slot_index": 0 + }, + { + "name": "image", + "type": "IMAGE", + "link": 54, + "slot_index": 1 + }, + { + "name": "cond", + "type": "CONDITIONING", + "link": null + }, + { + "name": "blended_cond", + "type": "CONDITIONING", + "link": null + } + ], + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 56 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "LayeredDiffusionCondJointApply" + }, + "widgets_values": [ + "SD15, Background, attn_sharing, Batch size (2N)" + ], + "color": "#232", + "bgcolor": "#353" + }, + { + "id": 52, + "type": "LayeredDiffusionDecodeSplit", + "pos": [ + 1544, + 177 + ], + "size": { + "0": 315, + "1": 146 + }, + "flags": {}, + "order": 11, + "mode": 0, + "inputs": [ + { + "name": "samples", + "type": "LATENT", + "link": 84 + }, + { + "name": "images", + "type": "IMAGE", + "link": 83 + } + ], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 85 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 86 + ], + "shape": 3, + "slot_index": 1 + }, + { + "name": "IMAGE", + "type": "IMAGE", + "links": null, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "LayeredDiffusionDecodeSplit" + }, + "widgets_values": [ + 2, + "SDXL", + 16 + ], + "color": "#232", + "bgcolor": "#353" + }, + { + "id": 51, + "type": "PreviewImage", + "pos": [ + 2040, + 201 + ], + "size": { + "0": 210, + "1": 246 + }, + "flags": {}, + "order": 13, + "mode": 0, + "inputs": [ + { + "name": "images", + "type": "IMAGE", + "link": 86 + } + ], + "properties": { + "Node name for S&R": "PreviewImage" + } + }, + { + "id": 30, + "type": "LoadImage", + "pos": [ + -313, + -10 + ], + "size": { + "0": 315, + "1": 314 + }, + "flags": {}, + "order": 2, + "mode": 0, + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 50 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "MASK", + "type": "MASK", + "links": null, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "LoadImage" + }, + "widgets_values": [ + "309219693-e7e2d80e-ffbe-4724-812a-5139a88027e3.png", + "image" + ] + } + ], + "links": [ + [ + 2, + 5, + 0, + 3, + 3, + "LATENT" + ], + [ + 3, + 4, + 1, + 6, + 0, + "CLIP" + ], + [ + 5, + 4, + 1, + 7, + 0, + "CLIP" + ], + [ + 50, + 30, + 0, + 33, + 0, + "IMAGE" + ], + [ + 52, + 33, + 0, + 34, + 0, + "IMAGE" + ], + [ + 54, + 33, + 0, + 37, + 1, + "IMAGE" + ], + [ + 55, + 4, + 0, + 37, + 0, + "MODEL" + ], + [ + 56, + 37, + 0, + 3, + 0, + "MODEL" + ], + [ + 64, + 6, + 0, + 3, + 1, + "CONDITIONING" + ], + [ + 65, + 7, + 0, + 3, + 2, + "CONDITIONING" + ], + [ + 70, + 3, + 0, + 44, + 0, + "LATENT" + ], + [ + 75, + 44, + 0, + 46, + 0, + "IMAGE" + ], + [ + 83, + 44, + 0, + 52, + 1, + "IMAGE" + ], + [ + 84, + 3, + 0, + 52, + 0, + "LATENT" + ], + [ + 85, + 52, + 0, + 50, + 0, + "IMAGE" + ], + [ + 86, + 52, + 1, + 51, + 0, + "IMAGE" + ], + [ + 88, + 4, + 2, + 44, + 1, + "VAE" + ] + ], + "groups": [], + "config": {}, + "extra": {}, + "version": 0.4 +} \ No newline at end of file diff --git a/ComfyUI-layerdiffuse/examples/layer_diffusion_cond_joint_fg.json b/ComfyUI-layerdiffuse/examples/layer_diffusion_cond_joint_fg.json new file mode 100644 index 0000000000000000000000000000000000000000..641f4956de52ea430b9e90590a00fd03d80802c5 --- /dev/null +++ b/ComfyUI-layerdiffuse/examples/layer_diffusion_cond_joint_fg.json @@ -0,0 +1,480 @@ +{ + "last_node_id": 53, + "last_link_id": 90, + "nodes": [ + { + "id": 7, + "type": "CLIPTextEncode", + "pos": [ + 413, + 389 + ], + "size": { + "0": 425.27801513671875, + "1": 180.6060791015625 + }, + "flags": {}, + "order": 4, + "mode": 0, + "inputs": [ + { + "name": "clip", + "type": "CLIP", + "link": 5 + } + ], + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 65 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "text, watermark" + ] + }, + { + "id": 4, + "type": "CheckpointLoaderSimple", + "pos": [ + 5, + 479 + ], + "size": { + "0": 315, + "1": 98 + }, + "flags": {}, + "order": 0, + "mode": 0, + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 55 + ], + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 3, + 5 + ], + "slot_index": 1 + }, + { + "name": "VAE", + "type": "VAE", + "links": [ + 88 + ], + "slot_index": 2 + } + ], + "properties": { + "Node name for S&R": "CheckpointLoaderSimple" + }, + "widgets_values": [ + "realisticVisionV20_v20.safetensors" + ] + }, + { + "id": 46, + "type": "PreviewImage", + "pos": [ + 1525, + 183 + ], + "size": [ + 406.59525756835933, + 340.5699157714844 + ], + "flags": {}, + "order": 8, + "mode": 0, + "inputs": [ + { + "name": "images", + "type": "IMAGE", + "link": 75 + } + ], + "properties": { + "Node name for S&R": "PreviewImage" + } + }, + { + "id": 37, + "type": "LayeredDiffusionCondJointApply", + "pos": [ + 436, + -13 + ], + "size": { + "0": 388, + "1": 138 + }, + "flags": {}, + "order": 5, + "mode": 0, + "inputs": [ + { + "name": "model", + "type": "MODEL", + "link": 55, + "slot_index": 0 + }, + { + "name": "image", + "type": "IMAGE", + "link": 90, + "slot_index": 1 + }, + { + "name": "cond", + "type": "CONDITIONING", + "link": null + }, + { + "name": "blended_cond", + "type": "CONDITIONING", + "link": null + } + ], + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 56 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "LayeredDiffusionCondJointApply" + }, + "widgets_values": [ + "SD15, Foreground, attn_sharing, Batch size (2N)" + ], + "color": "#232", + "bgcolor": "#353" + }, + { + "id": 5, + "type": "EmptyLatentImage", + "pos": [ + 465, + 671 + ], + "size": { + "0": 315, + "1": 106 + }, + "flags": {}, + "order": 1, + "mode": 0, + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 2 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "EmptyLatentImage" + }, + "widgets_values": [ + 512, + 512, + 4 + ] + }, + { + "id": 6, + "type": "CLIPTextEncode", + "pos": [ + 415, + 186 + ], + "size": { + "0": 422.84503173828125, + "1": 164.31304931640625 + }, + "flags": {}, + "order": 3, + "mode": 0, + "inputs": [ + { + "name": "clip", + "type": "CLIP", + "link": 3 + } + ], + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 64 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "\n" + ] + }, + { + "id": 3, + "type": "KSampler", + "pos": [ + 903, + 180 + ], + "size": { + "0": 315, + "1": 262 + }, + "flags": {}, + "order": 6, + "mode": 0, + "inputs": [ + { + "name": "model", + "type": "MODEL", + "link": 56 + }, + { + "name": "positive", + "type": "CONDITIONING", + "link": 64 + }, + { + "name": "negative", + "type": "CONDITIONING", + "link": 65 + }, + { + "name": "latent_image", + "type": "LATENT", + "link": 2 + } + ], + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 70 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "KSampler" + }, + "widgets_values": [ + 748570836161213, + "randomize", + 20, + 8, + "euler", + "normal", + 1 + ] + }, + { + "id": 44, + "type": "VAEDecode", + "pos": [ + 1258, + 184 + ], + "size": { + "0": 210, + "1": 46 + }, + "flags": {}, + "order": 7, + "mode": 0, + "inputs": [ + { + "name": "samples", + "type": "LATENT", + "link": 70 + }, + { + "name": "vae", + "type": "VAE", + "link": 88, + "slot_index": 1 + } + ], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 75 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEDecode" + } + }, + { + "id": 30, + "type": "LoadImage", + "pos": [ + 6, + 5 + ], + "size": { + "0": 315, + "1": 314 + }, + "flags": {}, + "order": 2, + "mode": 0, + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 90 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "MASK", + "type": "MASK", + "links": null, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "LoadImage" + }, + "widgets_values": [ + "dog (2).png", + "image" + ] + } + ], + "links": [ + [ + 2, + 5, + 0, + 3, + 3, + "LATENT" + ], + [ + 3, + 4, + 1, + 6, + 0, + "CLIP" + ], + [ + 5, + 4, + 1, + 7, + 0, + "CLIP" + ], + [ + 55, + 4, + 0, + 37, + 0, + "MODEL" + ], + [ + 56, + 37, + 0, + 3, + 0, + "MODEL" + ], + [ + 64, + 6, + 0, + 3, + 1, + "CONDITIONING" + ], + [ + 65, + 7, + 0, + 3, + 2, + "CONDITIONING" + ], + [ + 70, + 3, + 0, + 44, + 0, + "LATENT" + ], + [ + 75, + 44, + 0, + 46, + 0, + "IMAGE" + ], + [ + 88, + 4, + 2, + 44, + 1, + "VAE" + ], + [ + 90, + 30, + 0, + 37, + 1, + "IMAGE" + ] + ], + "groups": [], + "config": {}, + "extra": {}, + "version": 0.4 +} \ No newline at end of file diff --git a/ComfyUI-layerdiffuse/examples/layer_diffusion_diff_bg.json b/ComfyUI-layerdiffuse/examples/layer_diffusion_diff_bg.json new file mode 100644 index 0000000000000000000000000000000000000000..bf2c18528fdf0f9ef904d346db18683390fc48f3 --- /dev/null +++ b/ComfyUI-layerdiffuse/examples/layer_diffusion_diff_bg.json @@ -0,0 +1,750 @@ +{ + "last_node_id": 40, + "last_link_id": 67, + "nodes": [ + { + "id": 4, + "type": "CheckpointLoaderSimple", + "pos": [ + 5, + 479 + ], + "size": { + "0": 315, + "1": 98 + }, + "flags": {}, + "order": 0, + "mode": 0, + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 55 + ], + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 3, + 5 + ], + "slot_index": 1 + }, + { + "name": "VAE", + "type": "VAE", + "links": [ + 22, + 49, + 58 + ], + "slot_index": 2 + } + ], + "properties": { + "Node name for S&R": "CheckpointLoaderSimple" + }, + "widgets_values": [ + "juggernautXL_v8Rundiffusion.safetensors" + ] + }, + { + "id": 7, + "type": "CLIPTextEncode", + "pos": [ + 413, + 389 + ], + "size": { + "0": 425.27801513671875, + "1": 180.6060791015625 + }, + "flags": {}, + "order": 5, + "mode": 0, + "inputs": [ + { + "name": "clip", + "type": "CLIP", + "link": 5 + } + ], + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 57 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "text, watermark" + ] + }, + { + "id": 5, + "type": "EmptyLatentImage", + "pos": [ + 475, + 666 + ], + "size": { + "0": 315, + "1": 106 + }, + "flags": {}, + "order": 1, + "mode": 0, + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 2 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "EmptyLatentImage" + }, + "widgets_values": [ + 1024, + 1024, + 1 + ] + }, + { + "id": 39, + "type": "VAEEncode", + "pos": [ + 201, + -391 + ], + "size": { + "0": 210, + "1": 46 + }, + "flags": {}, + "order": 6, + "mode": 0, + "inputs": [ + { + "name": "pixels", + "type": "IMAGE", + "link": 59, + "slot_index": 0 + }, + { + "name": "vae", + "type": "VAE", + "link": 58, + "slot_index": 1 + } + ], + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 60 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEEncode" + } + }, + { + "id": 29, + "type": "VAEEncode", + "pos": [ + 210, + -20 + ], + "size": { + "0": 210, + "1": 46 + }, + "flags": {}, + "order": 7, + "mode": 0, + "inputs": [ + { + "name": "pixels", + "type": "IMAGE", + "link": 53 + }, + { + "name": "vae", + "type": "VAE", + "link": 49, + "slot_index": 1 + } + ], + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 61 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEEncode" + } + }, + { + "id": 38, + "type": "LoadImage", + "pos": [ + -137, + -388 + ], + "size": { + "0": 288.47406005859375, + "1": 317.46051025390625 + }, + "flags": {}, + "order": 2, + "mode": 0, + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 59 + ], + "shape": 3 + }, + { + "name": "MASK", + "type": "MASK", + "links": null, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "LoadImage" + }, + "widgets_values": [ + "old_man.png", + "image" + ] + }, + { + "id": 30, + "type": "LoadImage", + "pos": [ + -146, + -22 + ], + "size": { + "0": 315, + "1": 314 + }, + "flags": {}, + "order": 3, + "mode": 0, + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 53 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "MASK", + "type": "MASK", + "links": null, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "LoadImage" + }, + "widgets_values": [ + "chair.png", + "image" + ] + }, + { + "id": 6, + "type": "CLIPTextEncode", + "pos": [ + 415, + 186 + ], + "size": { + "0": 422.84503173828125, + "1": 164.31304931640625 + }, + "flags": {}, + "order": 4, + "mode": 0, + "inputs": [ + { + "name": "clip", + "type": "CLIP", + "link": 3 + } + ], + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 56 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "an old man sitting, high quality\n\n" + ] + }, + { + "id": 14, + "type": "VAEDecode", + "pos": [ + 1286, + 187 + ], + "size": { + "0": 210, + "1": 46 + }, + "flags": {}, + "order": 10, + "mode": 0, + "inputs": [ + { + "name": "samples", + "type": "LATENT", + "link": 21 + }, + { + "name": "vae", + "type": "VAE", + "link": 22, + "slot_index": 1 + } + ], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 65 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEDecode" + } + }, + { + "id": 3, + "type": "KSampler", + "pos": [ + 913, + 182 + ], + "size": { + "0": 315, + "1": 262 + }, + "flags": {}, + "order": 9, + "mode": 0, + "inputs": [ + { + "name": "model", + "type": "MODEL", + "link": 62 + }, + { + "name": "positive", + "type": "CONDITIONING", + "link": 63 + }, + { + "name": "negative", + "type": "CONDITIONING", + "link": 64 + }, + { + "name": "latent_image", + "type": "LATENT", + "link": 2 + } + ], + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 21, + 67 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "KSampler" + }, + "widgets_values": [ + 462370085958750, + "fixed", + 20, + 8, + "euler", + "normal", + 1 + ] + }, + { + "id": 20, + "type": "PreviewImage", + "pos": [ + 1800, + 190 + ], + "size": { + "0": 611.2340087890625, + "1": 633.9354858398438 + }, + "flags": {}, + "order": 12, + "mode": 0, + "inputs": [ + { + "name": "images", + "type": "IMAGE", + "link": 66 + } + ], + "properties": { + "Node name for S&R": "PreviewImage" + } + }, + { + "id": 37, + "type": "LayeredDiffusionDiffApply", + "pos": [ + 457, + -37 + ], + "size": { + "0": 342.5999755859375, + "1": 162 + }, + "flags": {}, + "order": 8, + "mode": 0, + "inputs": [ + { + "name": "model", + "type": "MODEL", + "link": 55, + "slot_index": 0 + }, + { + "name": "cond", + "type": "CONDITIONING", + "link": 56, + "slot_index": 1 + }, + { + "name": "uncond", + "type": "CONDITIONING", + "link": 57, + "slot_index": 2 + }, + { + "name": "blended_latent", + "type": "LATENT", + "link": 60 + }, + { + "name": "latent", + "type": "LATENT", + "link": 61 + } + ], + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 62 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 63 + ], + "shape": 3, + "slot_index": 1 + }, + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 64 + ], + "shape": 3, + "slot_index": 2 + } + ], + "properties": { + "Node name for S&R": "LayeredDiffusionDiffApply" + }, + "widgets_values": [ + "SDXL, Background", + 1 + ], + "color": "#232", + "bgcolor": "#353" + }, + { + "id": 40, + "type": "LayeredDiffusionDecodeRGBA", + "pos": [ + 1533, + 189 + ], + "size": { + "0": 243.60000610351562, + "1": 102 + }, + "flags": {}, + "order": 11, + "mode": 0, + "inputs": [ + { + "name": "samples", + "type": "LATENT", + "link": 67 + }, + { + "name": "images", + "type": "IMAGE", + "link": 65 + } + ], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 66 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "LayeredDiffusionDecodeRGBA" + }, + "widgets_values": [ + "SDXL", + 16 + ], + "color": "#232", + "bgcolor": "#353" + } + ], + "links": [ + [ + 2, + 5, + 0, + 3, + 3, + "LATENT" + ], + [ + 3, + 4, + 1, + 6, + 0, + "CLIP" + ], + [ + 5, + 4, + 1, + 7, + 0, + "CLIP" + ], + [ + 21, + 3, + 0, + 14, + 0, + "LATENT" + ], + [ + 22, + 4, + 2, + 14, + 1, + "VAE" + ], + [ + 49, + 4, + 2, + 29, + 1, + "VAE" + ], + [ + 53, + 30, + 0, + 29, + 0, + "IMAGE" + ], + [ + 55, + 4, + 0, + 37, + 0, + "MODEL" + ], + [ + 56, + 6, + 0, + 37, + 1, + "CONDITIONING" + ], + [ + 57, + 7, + 0, + 37, + 2, + "CONDITIONING" + ], + [ + 58, + 4, + 2, + 39, + 1, + "VAE" + ], + [ + 59, + 38, + 0, + 39, + 0, + "IMAGE" + ], + [ + 60, + 39, + 0, + 37, + 3, + "LATENT" + ], + [ + 61, + 29, + 0, + 37, + 4, + "LATENT" + ], + [ + 62, + 37, + 0, + 3, + 0, + "MODEL" + ], + [ + 63, + 37, + 1, + 3, + 1, + "CONDITIONING" + ], + [ + 64, + 37, + 2, + 3, + 2, + "CONDITIONING" + ], + [ + 65, + 14, + 0, + 40, + 1, + "IMAGE" + ], + [ + 66, + 40, + 0, + 20, + 0, + "IMAGE" + ], + [ + 67, + 3, + 0, + 40, + 0, + "LATENT" + ] + ], + "groups": [], + "config": {}, + "extra": {}, + "version": 0.4 +} \ No newline at end of file diff --git a/ComfyUI-layerdiffuse/examples/layer_diffusion_diff_bg_stop_at.json b/ComfyUI-layerdiffuse/examples/layer_diffusion_diff_bg_stop_at.json new file mode 100644 index 0000000000000000000000000000000000000000..78e9d1cfab387356284a3e7b69a0186c207091a0 --- /dev/null +++ b/ComfyUI-layerdiffuse/examples/layer_diffusion_diff_bg_stop_at.json @@ -0,0 +1,877 @@ +{ + "last_node_id": 45, + "last_link_id": 86, + "nodes": [ + { + "id": 5, + "type": "EmptyLatentImage", + "pos": [ + 475, + 666 + ], + "size": { + "0": 315, + "1": 106 + }, + "flags": {}, + "order": 0, + "mode": 0, + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 2 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "EmptyLatentImage" + }, + "widgets_values": [ + 1024, + 1024, + 1 + ] + }, + { + "id": 38, + "type": "LoadImage", + "pos": [ + -137, + -388 + ], + "size": { + "0": 288.47406005859375, + "1": 317.46051025390625 + }, + "flags": {}, + "order": 1, + "mode": 0, + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 59 + ], + "shape": 3 + }, + { + "name": "MASK", + "type": "MASK", + "links": null, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "LoadImage" + }, + "widgets_values": [ + "blended.png", + "image" + ] + }, + { + "id": 39, + "type": "VAEEncode", + "pos": [ + 201, + -391 + ], + "size": { + "0": 210, + "1": 46 + }, + "flags": {}, + "order": 7, + "mode": 0, + "inputs": [ + { + "name": "pixels", + "type": "IMAGE", + "link": 59, + "slot_index": 0 + }, + { + "name": "vae", + "type": "VAE", + "link": 58, + "slot_index": 1 + } + ], + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 60 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEEncode" + } + }, + { + "id": 29, + "type": "VAEEncode", + "pos": [ + 210, + -20 + ], + "size": { + "0": 210, + "1": 46 + }, + "flags": {}, + "order": 6, + "mode": 0, + "inputs": [ + { + "name": "pixels", + "type": "IMAGE", + "link": 53 + }, + { + "name": "vae", + "type": "VAE", + "link": 49, + "slot_index": 1 + } + ], + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 61 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEEncode" + } + }, + { + "id": 30, + "type": "LoadImage", + "pos": [ + -146, + -22 + ], + "size": { + "0": 315, + "1": 314 + }, + "flags": {}, + "order": 2, + "mode": 0, + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 53 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "MASK", + "type": "MASK", + "links": null, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "LoadImage" + }, + "widgets_values": [ + "dog.png", + "image" + ] + }, + { + "id": 6, + "type": "CLIPTextEncode", + "pos": [ + 415, + 186 + ], + "size": { + "0": 422.84503173828125, + "1": 164.31304931640625 + }, + "flags": {}, + "order": 4, + "mode": 0, + "inputs": [ + { + "name": "clip", + "type": "CLIP", + "link": 3 + } + ], + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 56, + 81 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "a room, high quality\n\n" + ] + }, + { + "id": 7, + "type": "CLIPTextEncode", + "pos": [ + 413, + 389 + ], + "size": { + "0": 425.27801513671875, + "1": 180.6060791015625 + }, + "flags": {}, + "order": 5, + "mode": 0, + "inputs": [ + { + "name": "clip", + "type": "CLIP", + "link": 5 + } + ], + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 57, + 82 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "text, watermark" + ] + }, + { + "id": 42, + "type": "PreviewImage", + "pos": [ + 1830, + -500 + ], + "size": { + "0": 611.2340087890625, + "1": 633.9354858398438 + }, + "flags": {}, + "order": 12, + "mode": 0, + "inputs": [ + { + "name": "images", + "type": "IMAGE", + "link": 76 + } + ], + "properties": { + "Node name for S&R": "PreviewImage" + } + }, + { + "id": 20, + "type": "PreviewImage", + "pos": [ + 1830, + 186 + ], + "size": { + "0": 611.2340087890625, + "1": 633.9354858398438 + }, + "flags": {}, + "order": 14, + "mode": 0, + "inputs": [ + { + "name": "images", + "type": "IMAGE", + "link": 29 + } + ], + "properties": { + "Node name for S&R": "PreviewImage" + } + }, + { + "id": 4, + "type": "CheckpointLoaderSimple", + "pos": [ + 5, + 479 + ], + "size": { + "0": 315, + "1": 98 + }, + "flags": {}, + "order": 3, + "mode": 0, + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 55, + 80 + ], + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 3, + 5 + ], + "slot_index": 1 + }, + { + "name": "VAE", + "type": "VAE", + "links": [ + 22, + 49, + 58, + 75 + ], + "slot_index": 2 + } + ], + "properties": { + "Node name for S&R": "CheckpointLoaderSimple" + }, + "widgets_values": [ + "juggernautXL_v8Rundiffusion.safetensors" + ] + }, + { + "id": 41, + "type": "VAEDecode", + "pos": [ + 1600, + -500 + ], + "size": { + "0": 210, + "1": 46 + }, + "flags": {}, + "order": 10, + "mode": 0, + "inputs": [ + { + "name": "samples", + "type": "LATENT", + "link": 77 + }, + { + "name": "vae", + "type": "VAE", + "link": 75, + "slot_index": 1 + } + ], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 76 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEDecode" + } + }, + { + "id": 14, + "type": "VAEDecode", + "pos": [ + 1588, + 186 + ], + "size": { + "0": 210, + "1": 46 + }, + "flags": {}, + "order": 13, + "mode": 0, + "inputs": [ + { + "name": "samples", + "type": "LATENT", + "link": 85 + }, + { + "name": "vae", + "type": "VAE", + "link": 22, + "slot_index": 1 + } + ], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 29 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEDecode" + } + }, + { + "id": 3, + "type": "KSampler", + "pos": [ + 913, + 181 + ], + "size": { + "0": 315, + "1": 262 + }, + "flags": {}, + "order": 9, + "mode": 0, + "inputs": [ + { + "name": "model", + "type": "MODEL", + "link": 62 + }, + { + "name": "positive", + "type": "CONDITIONING", + "link": 63 + }, + { + "name": "negative", + "type": "CONDITIONING", + "link": 64 + }, + { + "name": "latent_image", + "type": "LATENT", + "link": 2 + } + ], + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 77, + 86 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "KSampler" + }, + "widgets_values": [ + 462370085958750, + "fixed", + 20, + 8, + "euler", + "normal", + 1 + ] + }, + { + "id": 45, + "type": "KSamplerAdvanced", + "pos": [ + 1249, + 179 + ], + "size": { + "0": 315, + "1": 334 + }, + "flags": {}, + "order": 11, + "mode": 0, + "inputs": [ + { + "name": "model", + "type": "MODEL", + "link": 80 + }, + { + "name": "positive", + "type": "CONDITIONING", + "link": 81 + }, + { + "name": "negative", + "type": "CONDITIONING", + "link": 82 + }, + { + "name": "latent_image", + "type": "LATENT", + "link": 86 + } + ], + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 85 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "KSamplerAdvanced" + }, + "widgets_values": [ + "enable", + 0, + "fixed", + 20, + 8, + "euler", + "normal", + 10, + 10000, + "disable" + ] + }, + { + "id": 37, + "type": "LayeredDiffusionDiffApply", + "pos": [ + 456, + -44 + ], + "size": { + "0": 342.5999755859375, + "1": 186 + }, + "flags": {}, + "order": 8, + "mode": 0, + "inputs": [ + { + "name": "model", + "type": "MODEL", + "link": 55, + "slot_index": 0 + }, + { + "name": "cond", + "type": "CONDITIONING", + "link": 56, + "slot_index": 1 + }, + { + "name": "uncond", + "type": "CONDITIONING", + "link": 57, + "slot_index": 2 + }, + { + "name": "blended_latent", + "type": "LATENT", + "link": 60 + }, + { + "name": "latent", + "type": "LATENT", + "link": 61 + } + ], + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 62 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 63 + ], + "shape": 3, + "slot_index": 1 + }, + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 64 + ], + "shape": 3, + "slot_index": 2 + } + ], + "properties": { + "Node name for S&R": "LayeredDiffusionDiffApply" + }, + "widgets_values": [ + "SDXL, Foreground", + 1 + ], + "color": "#232", + "bgcolor": "#353" + } + ], + "links": [ + [ + 2, + 5, + 0, + 3, + 3, + "LATENT" + ], + [ + 3, + 4, + 1, + 6, + 0, + "CLIP" + ], + [ + 5, + 4, + 1, + 7, + 0, + "CLIP" + ], + [ + 22, + 4, + 2, + 14, + 1, + "VAE" + ], + [ + 29, + 14, + 0, + 20, + 0, + "IMAGE" + ], + [ + 49, + 4, + 2, + 29, + 1, + "VAE" + ], + [ + 53, + 30, + 0, + 29, + 0, + "IMAGE" + ], + [ + 55, + 4, + 0, + 37, + 0, + "MODEL" + ], + [ + 56, + 6, + 0, + 37, + 1, + "CONDITIONING" + ], + [ + 57, + 7, + 0, + 37, + 2, + "CONDITIONING" + ], + [ + 58, + 4, + 2, + 39, + 1, + "VAE" + ], + [ + 59, + 38, + 0, + 39, + 0, + "IMAGE" + ], + [ + 60, + 39, + 0, + 37, + 3, + "LATENT" + ], + [ + 61, + 29, + 0, + 37, + 4, + "LATENT" + ], + [ + 62, + 37, + 0, + 3, + 0, + "MODEL" + ], + [ + 63, + 37, + 1, + 3, + 1, + "CONDITIONING" + ], + [ + 64, + 37, + 2, + 3, + 2, + "CONDITIONING" + ], + [ + 75, + 4, + 2, + 41, + 1, + "VAE" + ], + [ + 76, + 41, + 0, + 42, + 0, + "IMAGE" + ], + [ + 77, + 3, + 0, + 41, + 0, + "LATENT" + ], + [ + 80, + 4, + 0, + 45, + 0, + "MODEL" + ], + [ + 81, + 6, + 0, + 45, + 1, + "CONDITIONING" + ], + [ + 82, + 7, + 0, + 45, + 2, + "CONDITIONING" + ], + [ + 85, + 45, + 0, + 14, + 0, + "LATENT" + ], + [ + 86, + 3, + 0, + 45, + 3, + "LATENT" + ] + ], + "groups": [], + "config": {}, + "extra": {}, + "version": 0.4 +} \ No newline at end of file diff --git a/ComfyUI-layerdiffuse/examples/layer_diffusion_diff_fg.json b/ComfyUI-layerdiffuse/examples/layer_diffusion_diff_fg.json new file mode 100644 index 0000000000000000000000000000000000000000..229ba53310d48b7c53a355d8bc213cc9f1b4c833 --- /dev/null +++ b/ComfyUI-layerdiffuse/examples/layer_diffusion_diff_fg.json @@ -0,0 +1,686 @@ +{ + "last_node_id": 39, + "last_link_id": 64, + "nodes": [ + { + "id": 4, + "type": "CheckpointLoaderSimple", + "pos": [ + 5, + 479 + ], + "size": { + "0": 315, + "1": 98 + }, + "flags": {}, + "order": 0, + "mode": 0, + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 55 + ], + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 3, + 5 + ], + "slot_index": 1 + }, + { + "name": "VAE", + "type": "VAE", + "links": [ + 22, + 49, + 58 + ], + "slot_index": 2 + } + ], + "properties": { + "Node name for S&R": "CheckpointLoaderSimple" + }, + "widgets_values": [ + "juggernautXL_v8Rundiffusion.safetensors" + ] + }, + { + "id": 7, + "type": "CLIPTextEncode", + "pos": [ + 413, + 389 + ], + "size": { + "0": 425.27801513671875, + "1": 180.6060791015625 + }, + "flags": {}, + "order": 5, + "mode": 0, + "inputs": [ + { + "name": "clip", + "type": "CLIP", + "link": 5 + } + ], + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 57 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "text, watermark" + ] + }, + { + "id": 5, + "type": "EmptyLatentImage", + "pos": [ + 475, + 666 + ], + "size": { + "0": 315, + "1": 106 + }, + "flags": {}, + "order": 1, + "mode": 0, + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 2 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "EmptyLatentImage" + }, + "widgets_values": [ + 1024, + 1024, + 1 + ] + }, + { + "id": 14, + "type": "VAEDecode", + "pos": [ + 1286, + 187 + ], + "size": { + "0": 210, + "1": 46 + }, + "flags": {}, + "order": 10, + "mode": 0, + "inputs": [ + { + "name": "samples", + "type": "LATENT", + "link": 21 + }, + { + "name": "vae", + "type": "VAE", + "link": 22, + "slot_index": 1 + } + ], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 29 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEDecode" + } + }, + { + "id": 20, + "type": "PreviewImage", + "pos": [ + 1558, + 189 + ], + "size": { + "0": 611.2340087890625, + "1": 633.9354858398438 + }, + "flags": {}, + "order": 11, + "mode": 0, + "inputs": [ + { + "name": "images", + "type": "IMAGE", + "link": 29 + } + ], + "properties": { + "Node name for S&R": "PreviewImage" + } + }, + { + "id": 3, + "type": "KSampler", + "pos": [ + 913, + 181 + ], + "size": { + "0": 315, + "1": 262 + }, + "flags": {}, + "order": 9, + "mode": 0, + "inputs": [ + { + "name": "model", + "type": "MODEL", + "link": 62 + }, + { + "name": "positive", + "type": "CONDITIONING", + "link": 63 + }, + { + "name": "negative", + "type": "CONDITIONING", + "link": 64 + }, + { + "name": "latent_image", + "type": "LATENT", + "link": 2 + } + ], + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 21 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "KSampler" + }, + "widgets_values": [ + 462370085958750, + "fixed", + 20, + 8, + "euler", + "normal", + 1 + ] + }, + { + "id": 38, + "type": "LoadImage", + "pos": [ + -137, + -388 + ], + "size": { + "0": 288.47406005859375, + "1": 317.46051025390625 + }, + "flags": {}, + "order": 2, + "mode": 0, + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 59 + ], + "shape": 3 + }, + { + "name": "MASK", + "type": "MASK", + "links": null, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "LoadImage" + }, + "widgets_values": [ + "blended.png", + "image" + ] + }, + { + "id": 39, + "type": "VAEEncode", + "pos": [ + 201, + -391 + ], + "size": { + "0": 210, + "1": 46 + }, + "flags": {}, + "order": 6, + "mode": 0, + "inputs": [ + { + "name": "pixels", + "type": "IMAGE", + "link": 59, + "slot_index": 0 + }, + { + "name": "vae", + "type": "VAE", + "link": 58, + "slot_index": 1 + } + ], + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 60 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEEncode" + } + }, + { + "id": 29, + "type": "VAEEncode", + "pos": [ + 210, + -20 + ], + "size": { + "0": 210, + "1": 46 + }, + "flags": {}, + "order": 7, + "mode": 0, + "inputs": [ + { + "name": "pixels", + "type": "IMAGE", + "link": 53 + }, + { + "name": "vae", + "type": "VAE", + "link": 49, + "slot_index": 1 + } + ], + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 61 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEEncode" + } + }, + { + "id": 6, + "type": "CLIPTextEncode", + "pos": [ + 415, + 186 + ], + "size": { + "0": 422.84503173828125, + "1": 164.31304931640625 + }, + "flags": {}, + "order": 4, + "mode": 0, + "inputs": [ + { + "name": "clip", + "type": "CLIP", + "link": 3 + } + ], + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 56 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "a room, high quality\n\n" + ] + }, + { + "id": 30, + "type": "LoadImage", + "pos": [ + -146, + -22 + ], + "size": { + "0": 315, + "1": 314 + }, + "flags": {}, + "order": 3, + "mode": 0, + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 53 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "MASK", + "type": "MASK", + "links": null, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "LoadImage" + }, + "widgets_values": [ + "dog.png", + "image" + ] + }, + { + "id": 37, + "type": "LayeredDiffusionDiffApply", + "pos": [ + 456, + -44 + ], + "size": { + "0": 342.5999755859375, + "1": 162 + }, + "flags": {}, + "order": 8, + "mode": 0, + "inputs": [ + { + "name": "model", + "type": "MODEL", + "link": 55, + "slot_index": 0 + }, + { + "name": "cond", + "type": "CONDITIONING", + "link": 56, + "slot_index": 1 + }, + { + "name": "uncond", + "type": "CONDITIONING", + "link": 57, + "slot_index": 2 + }, + { + "name": "blended_latent", + "type": "LATENT", + "link": 60 + }, + { + "name": "latent", + "type": "LATENT", + "link": 61 + } + ], + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 62 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 63 + ], + "shape": 3, + "slot_index": 1 + }, + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 64 + ], + "shape": 3, + "slot_index": 2 + } + ], + "properties": { + "Node name for S&R": "LayeredDiffusionDiffApply" + }, + "widgets_values": [ + "SDXL, Foreground", + 1 + ], + "color": "#232", + "bgcolor": "#353" + } + ], + "links": [ + [ + 2, + 5, + 0, + 3, + 3, + "LATENT" + ], + [ + 3, + 4, + 1, + 6, + 0, + "CLIP" + ], + [ + 5, + 4, + 1, + 7, + 0, + "CLIP" + ], + [ + 21, + 3, + 0, + 14, + 0, + "LATENT" + ], + [ + 22, + 4, + 2, + 14, + 1, + "VAE" + ], + [ + 29, + 14, + 0, + 20, + 0, + "IMAGE" + ], + [ + 49, + 4, + 2, + 29, + 1, + "VAE" + ], + [ + 53, + 30, + 0, + 29, + 0, + "IMAGE" + ], + [ + 55, + 4, + 0, + 37, + 0, + "MODEL" + ], + [ + 56, + 6, + 0, + 37, + 1, + "CONDITIONING" + ], + [ + 57, + 7, + 0, + 37, + 2, + "CONDITIONING" + ], + [ + 58, + 4, + 2, + 39, + 1, + "VAE" + ], + [ + 59, + 38, + 0, + 39, + 0, + "IMAGE" + ], + [ + 60, + 39, + 0, + 37, + 3, + "LATENT" + ], + [ + 61, + 29, + 0, + 37, + 4, + "LATENT" + ], + [ + 62, + 37, + 0, + 3, + 0, + "MODEL" + ], + [ + 63, + 37, + 1, + 3, + 1, + "CONDITIONING" + ], + [ + 64, + 37, + 2, + 3, + 2, + "CONDITIONING" + ] + ], + "groups": [], + "config": {}, + "extra": {}, + "version": 0.4 +} \ No newline at end of file diff --git a/ComfyUI-layerdiffuse/examples/layer_diffusion_fg_example.json b/ComfyUI-layerdiffuse/examples/layer_diffusion_fg_example.json new file mode 100644 index 0000000000000000000000000000000000000000..23be5b8edfd173c12c964f43566b4913549cbc45 --- /dev/null +++ b/ComfyUI-layerdiffuse/examples/layer_diffusion_fg_example.json @@ -0,0 +1,733 @@ +{ + "last_node_id": 29, + "last_link_id": 40, + "nodes": [ + { + "id": 7, + "type": "CLIPTextEncode", + "pos": [ + 413, + 389 + ], + "size": { + "0": 425.27801513671875, + "1": 180.6060791015625 + }, + "flags": {}, + "order": 4, + "mode": 0, + "inputs": [ + { + "name": "clip", + "type": "CLIP", + "link": 5 + } + ], + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 6 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "text, watermark" + ] + }, + { + "id": 6, + "type": "CLIPTextEncode", + "pos": [ + 415, + 186 + ], + "size": { + "0": 422.84503173828125, + "1": 164.31304931640625 + }, + "flags": {}, + "order": 3, + "mode": 0, + "inputs": [ + { + "name": "clip", + "type": "CLIP", + "link": 3 + } + ], + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 4 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "beautiful scenery nature glass bottle landscape, , purple galaxy bottle," + ] + }, + { + "id": 4, + "type": "CheckpointLoaderSimple", + "pos": [ + 5, + 479 + ], + "size": { + "0": 315, + "1": 98 + }, + "flags": {}, + "order": 0, + "mode": 0, + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 18 + ], + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 3, + 5 + ], + "slot_index": 1 + }, + { + "name": "VAE", + "type": "VAE", + "links": [ + 22 + ], + "slot_index": 2 + } + ], + "properties": { + "Node name for S&R": "CheckpointLoaderSimple" + }, + "widgets_values": [ + "juggernautXL_v8Rundiffusion.safetensors" + ] + }, + { + "id": 14, + "type": "VAEDecode", + "pos": [ + 1275, + 198 + ], + "size": { + "0": 210, + "1": 46 + }, + "flags": {}, + "order": 6, + "mode": 0, + "inputs": [ + { + "name": "samples", + "type": "LATENT", + "link": 21 + }, + { + "name": "vae", + "type": "VAE", + "link": 22, + "slot_index": 1 + } + ], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 24, + 29 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEDecode" + } + }, + { + "id": 20, + "type": "PreviewImage", + "pos": [ + 1547, + 472 + ], + "size": { + "0": 289.6058349609375, + "1": 299.6588134765625 + }, + "flags": {}, + "order": 8, + "mode": 0, + "inputs": [ + { + "name": "images", + "type": "IMAGE", + "link": 29 + } + ], + "properties": { + "Node name for S&R": "PreviewImage" + } + }, + { + "id": 5, + "type": "EmptyLatentImage", + "pos": [ + 475, + 707 + ], + "size": { + "0": 315, + "1": 106 + }, + "flags": {}, + "order": 1, + "mode": 0, + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 2 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "EmptyLatentImage" + }, + "widgets_values": [ + 1024, + 1024, + 1 + ] + }, + { + "id": 3, + "type": "KSampler", + "pos": [ + 911, + 198 + ], + "size": { + "0": 315, + "1": 262 + }, + "flags": {}, + "order": 5, + "mode": 0, + "inputs": [ + { + "name": "model", + "type": "MODEL", + "link": 19 + }, + { + "name": "positive", + "type": "CONDITIONING", + "link": 4 + }, + { + "name": "negative", + "type": "CONDITIONING", + "link": 6 + }, + { + "name": "latent_image", + "type": "LATENT", + "link": 2 + } + ], + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 21, + 23 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "KSampler" + }, + "widgets_values": [ + 984560333937969, + "randomize", + 20, + 8, + "euler", + "normal", + 1 + ] + }, + { + "id": 25, + "type": "PreviewImage", + "pos": [ + 2244, + 194 + ], + "size": { + "0": 289.6058349609375, + "1": 299.6588134765625 + }, + "flags": {}, + "order": 12, + "mode": 0, + "inputs": [ + { + "name": "images", + "type": "IMAGE", + "link": 33 + } + ], + "properties": { + "Node name for S&R": "PreviewImage" + } + }, + { + "id": 24, + "type": "MaskToImage", + "pos": [ + 1921, + 192 + ], + "size": { + "0": 210, + "1": 26 + }, + "flags": {}, + "order": 10, + "mode": 0, + "inputs": [ + { + "name": "mask", + "type": "MASK", + "link": 32 + } + ], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 33 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "MaskToImage" + } + }, + { + "id": 23, + "type": "PreviewImage", + "pos": [ + 1965, + 479 + ], + "size": { + "0": 289.6058349609375, + "1": 299.6588134765625 + }, + "flags": {}, + "order": 9, + "mode": 0, + "inputs": [ + { + "name": "images", + "type": "IMAGE", + "link": 31 + } + ], + "properties": { + "Node name for S&R": "PreviewImage" + } + }, + { + "id": 27, + "type": "PreviewImage", + "pos": [ + 2243, + -164 + ], + "size": { + "0": 289.6058349609375, + "1": 299.6588134765625 + }, + "flags": {}, + "order": 14, + "mode": 0, + "inputs": [ + { + "name": "images", + "type": "IMAGE", + "link": 40 + } + ], + "properties": { + "Node name for S&R": "PreviewImage" + } + }, + { + "id": 15, + "type": "LayeredDiffusionDecode", + "pos": [ + 1586, + 195 + ], + "size": { + "0": 210, + "1": 102 + }, + "flags": {}, + "order": 7, + "mode": 0, + "inputs": [ + { + "name": "samples", + "type": "LATENT", + "link": 23 + }, + { + "name": "images", + "type": "IMAGE", + "link": 24 + } + ], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 31, + 37 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "MASK", + "type": "MASK", + "links": [ + 32, + 38 + ], + "shape": 3, + "slot_index": 1 + } + ], + "properties": { + "Node name for S&R": "LayeredDiffusionDecode" + }, + "widgets_values": [ + "SDXL", + 16 + ] + }, + { + "id": 28, + "type": "JoinImageWithAlpha", + "pos": [ + 1928, + -59 + ], + "size": { + "0": 210, + "1": 46 + }, + "flags": {}, + "order": 13, + "mode": 0, + "inputs": [ + { + "name": "image", + "type": "IMAGE", + "link": 37 + }, + { + "name": "alpha", + "type": "MASK", + "link": 39 + } + ], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 40 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "JoinImageWithAlpha" + } + }, + { + "id": 29, + "type": "InvertMask", + "pos": [ + 1931, + 44 + ], + "size": { + "0": 210, + "1": 26 + }, + "flags": {}, + "order": 11, + "mode": 0, + "inputs": [ + { + "name": "mask", + "type": "MASK", + "link": 38 + } + ], + "outputs": [ + { + "name": "MASK", + "type": "MASK", + "links": [ + 39 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "InvertMask" + } + }, + { + "id": 13, + "type": "LayeredDiffusionApply", + "pos": [ + 468, + -2 + ], + "size": { + "0": 327.8314208984375, + "1": 106.42147827148438 + }, + "flags": {}, + "order": 2, + "mode": 0, + "inputs": [ + { + "name": "model", + "type": "MODEL", + "link": 18 + } + ], + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 19 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "LayeredDiffusionApply" + }, + "widgets_values": [ + "SDXL, Conv Injection", + 1 + ] + } + ], + "links": [ + [ + 2, + 5, + 0, + 3, + 3, + "LATENT" + ], + [ + 3, + 4, + 1, + 6, + 0, + "CLIP" + ], + [ + 4, + 6, + 0, + 3, + 1, + "CONDITIONING" + ], + [ + 5, + 4, + 1, + 7, + 0, + "CLIP" + ], + [ + 6, + 7, + 0, + 3, + 2, + "CONDITIONING" + ], + [ + 18, + 4, + 0, + 13, + 0, + "MODEL" + ], + [ + 19, + 13, + 0, + 3, + 0, + "MODEL" + ], + [ + 21, + 3, + 0, + 14, + 0, + "LATENT" + ], + [ + 22, + 4, + 2, + 14, + 1, + "VAE" + ], + [ + 23, + 3, + 0, + 15, + 0, + "LATENT" + ], + [ + 24, + 14, + 0, + 15, + 1, + "IMAGE" + ], + [ + 29, + 14, + 0, + 20, + 0, + "IMAGE" + ], + [ + 31, + 15, + 0, + 23, + 0, + "IMAGE" + ], + [ + 32, + 15, + 1, + 24, + 0, + "MASK" + ], + [ + 33, + 24, + 0, + 25, + 0, + "IMAGE" + ], + [ + 37, + 15, + 0, + 28, + 0, + "IMAGE" + ], + [ + 38, + 15, + 1, + 29, + 0, + "MASK" + ], + [ + 39, + 29, + 0, + 28, + 1, + "MASK" + ], + [ + 40, + 28, + 0, + 27, + 0, + "IMAGE" + ] + ], + "groups": [], + "config": {}, + "extra": {}, + "version": 0.4 +} \ No newline at end of file diff --git a/ComfyUI-layerdiffuse/examples/layer_diffusion_fg_example_rgba.json b/ComfyUI-layerdiffuse/examples/layer_diffusion_fg_example_rgba.json new file mode 100644 index 0000000000000000000000000000000000000000..8294d0e1c7f8e94f238374fe0e72aebe85cf015a --- /dev/null +++ b/ComfyUI-layerdiffuse/examples/layer_diffusion_fg_example_rgba.json @@ -0,0 +1,511 @@ +{ + "last_node_id": 36, + "last_link_id": 51, + "nodes": [ + { + "id": 7, + "type": "CLIPTextEncode", + "pos": [ + 413, + 389 + ], + "size": { + "0": 425.27801513671875, + "1": 180.6060791015625 + }, + "flags": {}, + "order": 4, + "mode": 0, + "inputs": [ + { + "name": "clip", + "type": "CLIP", + "link": 5 + } + ], + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 6 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "text, watermark" + ] + }, + { + "id": 6, + "type": "CLIPTextEncode", + "pos": [ + 415, + 186 + ], + "size": { + "0": 422.84503173828125, + "1": 164.31304931640625 + }, + "flags": {}, + "order": 3, + "mode": 0, + "inputs": [ + { + "name": "clip", + "type": "CLIP", + "link": 3 + } + ], + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 4 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "beautiful scenery nature glass bottle landscape, , purple galaxy bottle," + ] + }, + { + "id": 4, + "type": "CheckpointLoaderSimple", + "pos": [ + 5, + 479 + ], + "size": { + "0": 315, + "1": 98 + }, + "flags": {}, + "order": 0, + "mode": 0, + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 18 + ], + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 3, + 5 + ], + "slot_index": 1 + }, + { + "name": "VAE", + "type": "VAE", + "links": [ + 22 + ], + "slot_index": 2 + } + ], + "properties": { + "Node name for S&R": "CheckpointLoaderSimple" + }, + "widgets_values": [ + "juggernautXL_v8Rundiffusion.safetensors" + ] + }, + { + "id": 14, + "type": "VAEDecode", + "pos": [ + 1275, + 198 + ], + "size": { + "0": 210, + "1": 46 + }, + "flags": {}, + "order": 6, + "mode": 0, + "inputs": [ + { + "name": "samples", + "type": "LATENT", + "link": 21 + }, + { + "name": "vae", + "type": "VAE", + "link": 22, + "slot_index": 1 + } + ], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 29, + 50 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEDecode" + } + }, + { + "id": 3, + "type": "KSampler", + "pos": [ + 911, + 198 + ], + "size": { + "0": 315, + "1": 262 + }, + "flags": {}, + "order": 5, + "mode": 0, + "inputs": [ + { + "name": "model", + "type": "MODEL", + "link": 19 + }, + { + "name": "positive", + "type": "CONDITIONING", + "link": 4 + }, + { + "name": "negative", + "type": "CONDITIONING", + "link": 6 + }, + { + "name": "latent_image", + "type": "LATENT", + "link": 2 + } + ], + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 21, + 49 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "KSampler" + }, + "widgets_values": [ + 1029477926308287, + "randomize", + 20, + 8, + "euler", + "normal", + 1 + ] + }, + { + "id": 5, + "type": "EmptyLatentImage", + "pos": [ + 480, + 691 + ], + "size": { + "0": 315, + "1": 106 + }, + "flags": {}, + "order": 1, + "mode": 0, + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 2 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "EmptyLatentImage" + }, + "widgets_values": [ + 1024, + 1024, + 1 + ] + }, + { + "id": 36, + "type": "LayeredDiffusionDecodeRGBA", + "pos": [ + 1589, + 199 + ], + "size": { + "0": 243.60000610351562, + "1": 102 + }, + "flags": {}, + "order": 8, + "mode": 0, + "inputs": [ + { + "name": "samples", + "type": "LATENT", + "link": 49 + }, + { + "name": "images", + "type": "IMAGE", + "link": 50 + } + ], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 51 + ], + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "LayeredDiffusionDecodeRGBA" + }, + "widgets_values": [ + "SDXL", + 16 + ] + }, + { + "id": 27, + "type": "PreviewImage", + "pos": [ + 1930, + 197 + ], + "size": { + "0": 289.6058349609375, + "1": 299.6588134765625 + }, + "flags": {}, + "order": 9, + "mode": 0, + "inputs": [ + { + "name": "images", + "type": "IMAGE", + "link": 51, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "PreviewImage" + } + }, + { + "id": 20, + "type": "PreviewImage", + "pos": [ + 1570, + 365 + ], + "size": { + "0": 289.6058349609375, + "1": 299.6588134765625 + }, + "flags": {}, + "order": 7, + "mode": 0, + "inputs": [ + { + "name": "images", + "type": "IMAGE", + "link": 29 + } + ], + "properties": { + "Node name for S&R": "PreviewImage" + } + }, + { + "id": 13, + "type": "LayeredDiffusionApply", + "pos": [ + 468, + -2 + ], + "size": { + "0": 327.8314208984375, + "1": 106.42147827148438 + }, + "flags": {}, + "order": 2, + "mode": 0, + "inputs": [ + { + "name": "model", + "type": "MODEL", + "link": 18 + } + ], + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 19 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "LayeredDiffusionApply" + }, + "widgets_values": [ + "SDXL, Conv Injection", + 1 + ] + } + ], + "links": [ + [ + 2, + 5, + 0, + 3, + 3, + "LATENT" + ], + [ + 3, + 4, + 1, + 6, + 0, + "CLIP" + ], + [ + 4, + 6, + 0, + 3, + 1, + "CONDITIONING" + ], + [ + 5, + 4, + 1, + 7, + 0, + "CLIP" + ], + [ + 6, + 7, + 0, + 3, + 2, + "CONDITIONING" + ], + [ + 18, + 4, + 0, + 13, + 0, + "MODEL" + ], + [ + 19, + 13, + 0, + 3, + 0, + "MODEL" + ], + [ + 21, + 3, + 0, + 14, + 0, + "LATENT" + ], + [ + 22, + 4, + 2, + 14, + 1, + "VAE" + ], + [ + 29, + 14, + 0, + 20, + 0, + "IMAGE" + ], + [ + 49, + 3, + 0, + 36, + 0, + "LATENT" + ], + [ + 50, + 14, + 0, + 36, + 1, + "IMAGE" + ], + [ + 51, + 36, + 0, + 27, + 0, + "IMAGE" + ] + ], + "groups": [], + "config": {}, + "extra": {}, + "version": 0.4 +} \ No newline at end of file diff --git a/ComfyUI-layerdiffuse/examples/layer_diffusion_joint.json b/ComfyUI-layerdiffuse/examples/layer_diffusion_joint.json new file mode 100644 index 0000000000000000000000000000000000000000..d95faceb222263271be7f529d8a5e6691c217d25 --- /dev/null +++ b/ComfyUI-layerdiffuse/examples/layer_diffusion_joint.json @@ -0,0 +1,703 @@ +{ + "last_node_id": 27, + "last_link_id": 42, + "nodes": [ + { + "id": 7, + "type": "CLIPTextEncode", + "pos": [ + 413, + 389 + ], + "size": { + "0": 425.27801513671875, + "1": 180.6060791015625 + }, + "flags": {}, + "order": 3, + "mode": 0, + "inputs": [ + { + "name": "clip", + "type": "CLIP", + "link": 5 + } + ], + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 6 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "text, watermark" + ] + }, + { + "id": 3, + "type": "KSampler", + "pos": [ + 891, + 192 + ], + "size": { + "0": 315, + "1": 262 + }, + "flags": {}, + "order": 7, + "mode": 0, + "inputs": [ + { + "name": "model", + "type": "MODEL", + "link": 32 + }, + { + "name": "positive", + "type": "CONDITIONING", + "link": 4 + }, + { + "name": "negative", + "type": "CONDITIONING", + "link": 6 + }, + { + "name": "latent_image", + "type": "LATENT", + "link": 2 + } + ], + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 21, + 33 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "KSampler" + }, + "widgets_values": [ + 960762378448318, + "randomize", + 20, + 8, + "euler", + "normal", + 1 + ] + }, + { + "id": 14, + "type": "VAEDecode", + "pos": [ + 1275, + 198 + ], + "size": { + "0": 210, + "1": 46 + }, + "flags": {}, + "order": 8, + "mode": 0, + "inputs": [ + { + "name": "samples", + "type": "LATENT", + "link": 21 + }, + { + "name": "vae", + "type": "VAE", + "link": 22, + "slot_index": 1 + } + ], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 34 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEDecode" + } + }, + { + "id": 4, + "type": "CheckpointLoaderSimple", + "pos": [ + 5, + 479 + ], + "size": { + "0": 315, + "1": 98 + }, + "flags": {}, + "order": 0, + "mode": 0, + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 31 + ], + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 3, + 5, + 41, + 42 + ], + "slot_index": 1 + }, + { + "name": "VAE", + "type": "VAE", + "links": [ + 22 + ], + "slot_index": 2 + } + ], + "properties": { + "Node name for S&R": "CheckpointLoaderSimple" + }, + "widgets_values": [ + "realisticVisionV20_v20.safetensors" + ] + }, + { + "id": 27, + "type": "CLIPTextEncode", + "pos": [ + -20, + -20 + ], + "size": { + "0": 422.84503173828125, + "1": 164.31304931640625 + }, + "flags": {}, + "order": 5, + "mode": 0, + "inputs": [ + { + "name": "clip", + "type": "CLIP", + "link": 42, + "slot_index": 0 + } + ], + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 40 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "a cozy room" + ] + }, + { + "id": 21, + "type": "LayeredDiffusionJointApply", + "pos": [ + 469, + -9 + ], + "size": { + "0": 315, + "1": 118 + }, + "flags": {}, + "order": 6, + "mode": 0, + "inputs": [ + { + "name": "model", + "type": "MODEL", + "link": 31, + "slot_index": 0 + }, + { + "name": "fg_cond", + "type": "CONDITIONING", + "link": 39 + }, + { + "name": "bg_cond", + "type": "CONDITIONING", + "link": 40 + }, + { + "name": "blended_cond", + "type": "CONDITIONING", + "link": 38 + } + ], + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 32 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "LayeredDiffusionJointApply" + }, + "widgets_values": [ + "SD15, attn_sharing, Batch size (3N)" + ], + "color": "#232", + "bgcolor": "#353" + }, + { + "id": 22, + "type": "LayeredDiffusionDecodeSplit", + "pos": [ + 1534, + 193 + ], + "size": { + "0": 315, + "1": 146 + }, + "flags": {}, + "order": 9, + "mode": 0, + "inputs": [ + { + "name": "samples", + "type": "LATENT", + "link": 33, + "slot_index": 0 + }, + { + "name": "images", + "type": "IMAGE", + "link": 34 + } + ], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 35 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 36 + ], + "shape": 3, + "slot_index": 1 + }, + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 37 + ], + "shape": 3, + "slot_index": 2 + } + ], + "properties": { + "Node name for S&R": "LayeredDiffusionDecodeSplit" + }, + "widgets_values": [ + 3, + "SD15", + 16 + ], + "color": "#232", + "bgcolor": "#353" + }, + { + "id": 5, + "type": "EmptyLatentImage", + "pos": [ + 466, + 645 + ], + "size": { + "0": 315, + "1": 106 + }, + "flags": {}, + "order": 1, + "mode": 0, + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 2 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "EmptyLatentImage" + }, + "widgets_values": [ + 512, + 512, + 6 + ] + }, + { + "id": 26, + "type": "CLIPTextEncode", + "pos": [ + -20, + -235 + ], + "size": { + "0": 422.84503173828125, + "1": 164.31304931640625 + }, + "flags": {}, + "order": 4, + "mode": 0, + "inputs": [ + { + "name": "clip", + "type": "CLIP", + "link": 41, + "slot_index": 0 + } + ], + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 39 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "a sitting dog" + ] + }, + { + "id": 6, + "type": "CLIPTextEncode", + "pos": [ + 413, + 172 + ], + "size": { + "0": 422.84503173828125, + "1": 164.31304931640625 + }, + "flags": {}, + "order": 2, + "mode": 0, + "inputs": [ + { + "name": "clip", + "type": "CLIP", + "link": 3 + } + ], + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 4, + 38 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "A dog sitting in a cozy room" + ] + }, + { + "id": 23, + "type": "PreviewImage", + "pos": [ + 1931, + -98 + ], + "size": [ + 522.1710021972658, + 259.90739746093755 + ], + "flags": {}, + "order": 10, + "mode": 0, + "inputs": [ + { + "name": "images", + "type": "IMAGE", + "link": 35 + } + ], + "properties": { + "Node name for S&R": "PreviewImage" + } + }, + { + "id": 24, + "type": "PreviewImage", + "pos": [ + 1933, + 222 + ], + "size": [ + 517.9710037231448, + 258.9074523925782 + ], + "flags": {}, + "order": 11, + "mode": 0, + "inputs": [ + { + "name": "images", + "type": "IMAGE", + "link": 36 + } + ], + "properties": { + "Node name for S&R": "PreviewImage" + } + }, + { + "id": 25, + "type": "PreviewImage", + "pos": [ + 1933, + 536 + ], + "size": [ + 516.8710037231444, + 270.5074523925782 + ], + "flags": {}, + "order": 12, + "mode": 0, + "inputs": [ + { + "name": "images", + "type": "IMAGE", + "link": 37 + } + ], + "properties": { + "Node name for S&R": "PreviewImage" + } + } + ], + "links": [ + [ + 2, + 5, + 0, + 3, + 3, + "LATENT" + ], + [ + 3, + 4, + 1, + 6, + 0, + "CLIP" + ], + [ + 4, + 6, + 0, + 3, + 1, + "CONDITIONING" + ], + [ + 5, + 4, + 1, + 7, + 0, + "CLIP" + ], + [ + 6, + 7, + 0, + 3, + 2, + "CONDITIONING" + ], + [ + 21, + 3, + 0, + 14, + 0, + "LATENT" + ], + [ + 22, + 4, + 2, + 14, + 1, + "VAE" + ], + [ + 31, + 4, + 0, + 21, + 0, + "MODEL" + ], + [ + 32, + 21, + 0, + 3, + 0, + "MODEL" + ], + [ + 33, + 3, + 0, + 22, + 0, + "LATENT" + ], + [ + 34, + 14, + 0, + 22, + 1, + "IMAGE" + ], + [ + 35, + 22, + 0, + 23, + 0, + "IMAGE" + ], + [ + 36, + 22, + 1, + 24, + 0, + "IMAGE" + ], + [ + 37, + 22, + 2, + 25, + 0, + "IMAGE" + ], + [ + 38, + 6, + 0, + 21, + 3, + "CONDITIONING" + ], + [ + 39, + 26, + 0, + 21, + 1, + "CONDITIONING" + ], + [ + 40, + 27, + 0, + 21, + 2, + "CONDITIONING" + ], + [ + 41, + 4, + 1, + 26, + 0, + "CLIP" + ], + [ + 42, + 4, + 1, + 27, + 0, + "CLIP" + ] + ], + "groups": [], + "config": {}, + "extra": {}, + "version": 0.4 +} \ No newline at end of file diff --git a/ComfyUI-layerdiffuse/layered_diffusion.py b/ComfyUI-layerdiffuse/layered_diffusion.py new file mode 100644 index 0000000000000000000000000000000000000000..3ab1d8e8e7cfc46b1659fd6221bcdd863b75a4ba --- /dev/null +++ b/ComfyUI-layerdiffuse/layered_diffusion.py @@ -0,0 +1,631 @@ +import os +from enum import Enum +import torch +import copy +from typing import Optional, List +from dataclasses import dataclass + +import folder_paths +import comfy.model_management +import comfy.model_base +import comfy.supported_models +import comfy.supported_models_base +from comfy.model_patcher import ModelPatcher +from folder_paths import get_folder_paths +from comfy.utils import load_torch_file +from comfy_extras.nodes_compositing import JoinImageWithAlpha +from comfy.conds import CONDRegular +from .lib_layerdiffusion.utils import ( +# load_file_from_url, + to_lora_patch_dict, +) +from .lib_layerdiffusion.models import TransparentVAEDecoder +from .lib_layerdiffusion.attention_sharing import AttentionSharingPatcher +from .lib_layerdiffusion.enums import StableDiffusionVersion + +from load_file_from_url import load_file_from_url + +def get_layer_model_root(): + if "layer_model" in folder_paths.folder_names_and_paths: + layer_model_root = get_folder_paths("layer_model")[0] + else: + layer_model_root = os.path.join(folder_paths.models_dir, "layer_model") + return layer_model_root + +load_layer_model_state_dict = load_torch_file + + +class LayeredDiffusionDecode: + """ + Decode alpha channel value from pixel value. + [B, C=3, H, W] => [B, C=4, H, W] + Outputs RGB image + Alpha mask. + """ + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "samples": ("LATENT",), + "images": ("IMAGE",), + "sd_version": ( + [ + StableDiffusionVersion.SD1x.value, + StableDiffusionVersion.SDXL.value, + ], + { + "default": StableDiffusionVersion.SDXL.value, + }, + ), + "sub_batch_size": ( + "INT", + {"default": 16, "min": 1, "max": 4096, "step": 1}, + ), + }, + } + + RETURN_TYPES = ("IMAGE", "MASK") + FUNCTION = "decode" + CATEGORY = "layer_diffuse" + + def __init__(self) -> None: + self.vae_transparent_decoder = {} + + def decode(self, samples, images, sd_version: str, sub_batch_size: int): + """ + sub_batch_size: How many images to decode in a single pass. + See https://github.com/huchenlei/ComfyUI-layerdiffuse/pull/4 for more + context. + """ + sd_version = StableDiffusionVersion(sd_version) + if sd_version == StableDiffusionVersion.SD1x: + url = "https://huggingface.co/LayerDiffusion/layerdiffusion-v1/resolve/main/layer_sd15_vae_transparent_decoder.safetensors" + file_name = "layer_sd15_vae_transparent_decoder.safetensors" + elif sd_version == StableDiffusionVersion.SDXL: + url = "https://huggingface.co/LayerDiffusion/layerdiffusion-v1/resolve/main/vae_transparent_decoder.safetensors" + file_name = "vae_transparent_decoder.safetensors" + + if not self.vae_transparent_decoder.get(sd_version): + model_path = load_file_from_url( + url=url, model_dir=get_layer_model_root(), file_name=file_name + ) + self.vae_transparent_decoder[sd_version] = TransparentVAEDecoder( + load_torch_file(model_path), + device=comfy.model_management.get_torch_device(), + dtype=( + torch.float16 + if comfy.model_management.should_use_fp16() + else torch.float32 + ), + ) + pixel = images.movedim(-1, 1) # [B, H, W, C] => [B, C, H, W] + + # Decoder requires dimension to be 64-aligned. + B, C, H, W = pixel.shape + assert H % 64 == 0, f"Height({H}) is not multiple of 64." + assert W % 64 == 0, f"Height({W}) is not multiple of 64." + + decoded = [] + for start_idx in range(0, samples["samples"].shape[0], sub_batch_size): + decoded.append( + self.vae_transparent_decoder[sd_version].decode_pixel( + pixel[start_idx : start_idx + sub_batch_size], + samples["samples"][start_idx : start_idx + sub_batch_size], + ) + ) + pixel_with_alpha = torch.cat(decoded, dim=0) + + # [B, C, H, W] => [B, H, W, C] + pixel_with_alpha = pixel_with_alpha.movedim(1, -1) + image = pixel_with_alpha[..., 1:] + alpha = pixel_with_alpha[..., 0] + return (image, alpha) + + +class LayeredDiffusionDecodeRGBA(LayeredDiffusionDecode): + """ + Decode alpha channel value from pixel value. + [B, C=3, H, W] => [B, C=4, H, W] + Outputs RGBA image. + """ + + RETURN_TYPES = ("IMAGE",) + + def decode(self, samples, images, sd_version: str, sub_batch_size: int): + image, mask = super().decode(samples, images, sd_version, sub_batch_size) + alpha = 1.0 - mask + return JoinImageWithAlpha().join_image_with_alpha(image, alpha) + + +class LayeredDiffusionDecodeSplit(LayeredDiffusionDecodeRGBA): + """Decode RGBA every N images.""" + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "samples": ("LATENT",), + "images": ("IMAGE",), + # Do RGBA decode every N output images. + "frames": ( + "INT", + {"default": 2, "min": 2, "max": s.MAX_FRAMES, "step": 1}, + ), + "sd_version": ( + [ + StableDiffusionVersion.SD1x.value, + StableDiffusionVersion.SDXL.value, + ], + { + "default": StableDiffusionVersion.SDXL.value, + }, + ), + "sub_batch_size": ( + "INT", + {"default": 16, "min": 1, "max": 4096, "step": 1}, + ), + }, + } + + MAX_FRAMES = 3 + RETURN_TYPES = ("IMAGE",) * MAX_FRAMES + + def decode( + self, + samples, + images: torch.Tensor, + frames: int, + sd_version: str, + sub_batch_size: int, + ): + sliced_samples = copy.copy(samples) + sliced_samples["samples"] = sliced_samples["samples"][::frames] + return tuple( + ( + ( + super(LayeredDiffusionDecodeSplit, self).decode( + sliced_samples, imgs, sd_version, sub_batch_size + )[0] + if i == 0 + else imgs + ) + for i in range(frames) + for imgs in (images[i::frames],) + ) + ) + (None,) * (self.MAX_FRAMES - frames) + + +class LayerMethod(Enum): + ATTN = "Attention Injection" + CONV = "Conv Injection" + + +class LayerType(Enum): + FG = "Foreground" + BG = "Background" + + +@dataclass +class LayeredDiffusionBase: + model_file_name: str + model_url: str + sd_version: StableDiffusionVersion + attn_sharing: bool = False + injection_method: Optional[LayerMethod] = None + cond_type: Optional[LayerType] = None + # Number of output images per run. + frames: int = 1 + + @property + def config_string(self) -> str: + injection_method = self.injection_method.value if self.injection_method else "" + cond_type = self.cond_type.value if self.cond_type else "" + attn_sharing = "attn_sharing" if self.attn_sharing else "" + frames = f"Batch size ({self.frames}N)" if self.frames != 1 else "" + return ", ".join( + x + for x in ( + self.sd_version.value, + injection_method, + cond_type, + attn_sharing, + frames, + ) + if x + ) + + def apply_c_concat(self, cond, uncond, c_concat): + """Set foreground/background concat condition.""" + + def write_c_concat(cond): + new_cond = [] + for t in cond: + n = [t[0], t[1].copy()] + if "model_conds" not in n[1]: + n[1]["model_conds"] = {} + n[1]["model_conds"]["c_concat"] = CONDRegular(c_concat) + new_cond.append(n) + return new_cond + + return (write_c_concat(cond), write_c_concat(uncond)) + + def apply_layered_diffusion( + self, + model: ModelPatcher, + weight: float, + ): + """Patch model""" + model_path = load_file_from_url( + url=self.model_url, + model_dir=get_layer_model_root(), + file_name=self.model_file_name, + ) + def pad_diff_weight(v): + if len(v) == 1: + return ("diff", [v[0], {"pad_weight": True}]) + elif len(v) == 2 and v[0] == "diff": + return ("diff", [v[1][0], {"pad_weight": True}]) + else: + return v + + layer_lora_state_dict = load_layer_model_state_dict(model_path) + layer_lora_patch_dict = { + k: pad_diff_weight(v) + for k, v in to_lora_patch_dict(layer_lora_state_dict).items() + } + work_model = model.clone() + work_model.add_patches(layer_lora_patch_dict, weight) + return (work_model,) + + def apply_layered_diffusion_attn_sharing( + self, + model: ModelPatcher, + control_img: Optional[torch.TensorType] = None, + ): + """Patch model with attn sharing""" + model_path = load_file_from_url( + url=self.model_url, + model_dir=get_layer_model_root(), + file_name=self.model_file_name, + ) + layer_lora_state_dict = load_layer_model_state_dict(model_path) + work_model = model.clone() + patcher = AttentionSharingPatcher( + work_model, self.frames, use_control=control_img is not None + ) + patcher.load_state_dict(layer_lora_state_dict, strict=True) + if control_img is not None: + patcher.set_control(control_img) + return (work_model,) + + +def get_model_sd_version(model: ModelPatcher) -> StableDiffusionVersion: + """Get model's StableDiffusionVersion.""" + base: comfy.model_base.BaseModel = model.model + model_config: comfy.supported_models.supported_models_base.BASE = base.model_config + if isinstance(model_config, comfy.supported_models.SDXL): + return StableDiffusionVersion.SDXL + elif isinstance( + model_config, (comfy.supported_models.SD15, comfy.supported_models.SD20) + ): + # SD15 and SD20 are compatible with each other. + return StableDiffusionVersion.SD1x + else: + raise Exception(f"Unsupported SD Version: {type(model_config)}.") + + +class LayeredDiffusionFG: + """Generate foreground with transparent background.""" + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "model": ("MODEL",), + "config": ([c.config_string for c in s.MODELS],), + "weight": ( + "FLOAT", + {"default": 1.0, "min": -1, "max": 3, "step": 0.05}, + ), + }, + } + + RETURN_TYPES = ("MODEL",) + FUNCTION = "apply_layered_diffusion" + CATEGORY = "layer_diffuse" + MODELS = ( + LayeredDiffusionBase( + model_file_name="layer_xl_transparent_attn.safetensors", + model_url="https://huggingface.co/LayerDiffusion/layerdiffusion-v1/resolve/main/layer_xl_transparent_attn.safetensors", + sd_version=StableDiffusionVersion.SDXL, + injection_method=LayerMethod.ATTN, + ), + LayeredDiffusionBase( + model_file_name="layer_xl_transparent_conv.safetensors", + model_url="https://huggingface.co/LayerDiffusion/layerdiffusion-v1/resolve/main/layer_xl_transparent_conv.safetensors", + sd_version=StableDiffusionVersion.SDXL, + injection_method=LayerMethod.CONV, + ), + LayeredDiffusionBase( + model_file_name="layer_sd15_transparent_attn.safetensors", + model_url="https://huggingface.co/LayerDiffusion/layerdiffusion-v1/resolve/main/layer_sd15_transparent_attn.safetensors", + sd_version=StableDiffusionVersion.SD1x, + injection_method=LayerMethod.ATTN, + attn_sharing=True, + ), + ) + + def apply_layered_diffusion( + self, + model: ModelPatcher, + config: str, + weight: float, + ): + ld_model = [m for m in self.MODELS if m.config_string == config][0] + assert get_model_sd_version(model) == ld_model.sd_version + if ld_model.attn_sharing: + return ld_model.apply_layered_diffusion_attn_sharing(model) + else: + return ld_model.apply_layered_diffusion(model, weight) + + +class LayeredDiffusionJoint: + """Generate FG + BG + Blended in one inference batch. Batch size = 3N.""" + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "model": ("MODEL",), + "config": ([c.config_string for c in s.MODELS],), + }, + "optional": { + "fg_cond": ("CONDITIONING",), + "bg_cond": ("CONDITIONING",), + "blended_cond": ("CONDITIONING",), + }, + } + + RETURN_TYPES = ("MODEL",) + FUNCTION = "apply_layered_diffusion" + CATEGORY = "layer_diffuse" + MODELS = ( + LayeredDiffusionBase( + model_file_name="layer_sd15_joint.safetensors", + model_url="https://huggingface.co/LayerDiffusion/layerdiffusion-v1/resolve/main/layer_sd15_joint.safetensors", + sd_version=StableDiffusionVersion.SD1x, + attn_sharing=True, + frames=3, + ), + ) + + def apply_layered_diffusion( + self, + model: ModelPatcher, + config: str, + fg_cond: Optional[List[List[torch.TensorType]]] = None, + bg_cond: Optional[List[List[torch.TensorType]]] = None, + blended_cond: Optional[List[List[torch.TensorType]]] = None, + ): + ld_model = [m for m in self.MODELS if m.config_string == config][0] + assert get_model_sd_version(model) == ld_model.sd_version + assert ld_model.attn_sharing + work_model = ld_model.apply_layered_diffusion_attn_sharing(model)[0] + work_model.model_options.setdefault("transformer_options", {}) + work_model.model_options["transformer_options"]["cond_overwrite"] = [ + cond[0][0] if cond is not None else None + for cond in ( + fg_cond, + bg_cond, + blended_cond, + ) + ] + return (work_model,) + + +class LayeredDiffusionCond: + """Generate foreground + background given background / foreground. + - FG => Blended + - BG => Blended + """ + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "model": ("MODEL",), + "cond": ("CONDITIONING",), + "uncond": ("CONDITIONING",), + "latent": ("LATENT",), + "config": ([c.config_string for c in s.MODELS],), + "weight": ( + "FLOAT", + {"default": 1.0, "min": -1, "max": 3, "step": 0.05}, + ), + }, + } + + RETURN_TYPES = ("MODEL", "CONDITIONING", "CONDITIONING") + FUNCTION = "apply_layered_diffusion" + CATEGORY = "layer_diffuse" + MODELS = ( + LayeredDiffusionBase( + model_file_name="layer_xl_fg2ble.safetensors", + model_url="https://huggingface.co/LayerDiffusion/layerdiffusion-v1/resolve/main/layer_xl_fg2ble.safetensors", + sd_version=StableDiffusionVersion.SDXL, + cond_type=LayerType.FG, + ), + LayeredDiffusionBase( + model_file_name="layer_xl_bg2ble.safetensors", + model_url="https://huggingface.co/LayerDiffusion/layerdiffusion-v1/resolve/main/layer_xl_bg2ble.safetensors", + sd_version=StableDiffusionVersion.SDXL, + cond_type=LayerType.BG, + ), + ) + + def apply_layered_diffusion( + self, + model: ModelPatcher, + cond, + uncond, + latent, + config: str, + weight: float, + ): + ld_model = [m for m in self.MODELS if m.config_string == config][0] + assert get_model_sd_version(model) == ld_model.sd_version + c_concat = model.model.latent_format.process_in(latent["samples"]) + return ld_model.apply_layered_diffusion( + model, weight + ) + ld_model.apply_c_concat(cond, uncond, c_concat) + + +class LayeredDiffusionCondJoint: + """Generate fg/bg + blended given fg/bg. + - FG => Blended + BG + - BG => Blended + FG + """ + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "model": ("MODEL",), + "image": ("IMAGE",), + "config": ([c.config_string for c in s.MODELS],), + }, + "optional": { + "cond": ("CONDITIONING",), + "blended_cond": ("CONDITIONING",), + }, + } + + RETURN_TYPES = ("MODEL",) + FUNCTION = "apply_layered_diffusion" + CATEGORY = "layer_diffuse" + MODELS = ( + LayeredDiffusionBase( + model_file_name="layer_sd15_fg2bg.safetensors", + model_url="https://huggingface.co/LayerDiffusion/layerdiffusion-v1/resolve/main/layer_sd15_fg2bg.safetensors", + sd_version=StableDiffusionVersion.SD1x, + attn_sharing=True, + frames=2, + cond_type=LayerType.FG, + ), + LayeredDiffusionBase( + model_file_name="layer_sd15_bg2fg.safetensors", + model_url="https://huggingface.co/LayerDiffusion/layerdiffusion-v1/resolve/main/layer_sd15_bg2fg.safetensors", + sd_version=StableDiffusionVersion.SD1x, + attn_sharing=True, + frames=2, + cond_type=LayerType.BG, + ), + ) + + def apply_layered_diffusion( + self, + model: ModelPatcher, + image, + config: str, + cond: Optional[List[List[torch.TensorType]]] = None, + blended_cond: Optional[List[List[torch.TensorType]]] = None, + ): + ld_model = [m for m in self.MODELS if m.config_string == config][0] + assert get_model_sd_version(model) == ld_model.sd_version + assert ld_model.attn_sharing + work_model = ld_model.apply_layered_diffusion_attn_sharing( + model, control_img=image.movedim(-1, 1) + )[0] + work_model.model_options.setdefault("transformer_options", {}) + work_model.model_options["transformer_options"]["cond_overwrite"] = [ + cond[0][0] if cond is not None else None + for cond in ( + cond, + blended_cond, + ) + ] + return (work_model,) + + +class LayeredDiffusionDiff: + """Extract FG/BG from blended image. + - Blended + FG => BG + - Blended + BG => FG + """ + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "model": ("MODEL",), + "cond": ("CONDITIONING",), + "uncond": ("CONDITIONING",), + "blended_latent": ("LATENT",), + "latent": ("LATENT",), + "config": ([c.config_string for c in s.MODELS],), + "weight": ( + "FLOAT", + {"default": 1.0, "min": -1, "max": 3, "step": 0.05}, + ), + }, + } + + RETURN_TYPES = ("MODEL", "CONDITIONING", "CONDITIONING") + FUNCTION = "apply_layered_diffusion" + CATEGORY = "layer_diffuse" + MODELS = ( + LayeredDiffusionBase( + model_file_name="layer_xl_fgble2bg.safetensors", + model_url="https://huggingface.co/LayerDiffusion/layerdiffusion-v1/resolve/main/layer_xl_fgble2bg.safetensors", + sd_version=StableDiffusionVersion.SDXL, + cond_type=LayerType.FG, + ), + LayeredDiffusionBase( + model_file_name="layer_xl_bgble2fg.safetensors", + model_url="https://huggingface.co/LayerDiffusion/layerdiffusion-v1/resolve/main/layer_xl_bgble2fg.safetensors", + sd_version=StableDiffusionVersion.SDXL, + cond_type=LayerType.BG, + ), + ) + + def apply_layered_diffusion( + self, + model: ModelPatcher, + cond, + uncond, + blended_latent, + latent, + config: str, + weight: float, + ): + ld_model = [m for m in self.MODELS if m.config_string == config][0] + assert get_model_sd_version(model) == ld_model.sd_version + c_concat = model.model.latent_format.process_in( + torch.cat([latent["samples"], blended_latent["samples"]], dim=1) + ) + return ld_model.apply_layered_diffusion( + model, weight + ) + ld_model.apply_c_concat(cond, uncond, c_concat) + + +NODE_CLASS_MAPPINGS = { + "LayeredDiffusionApply": LayeredDiffusionFG, + "LayeredDiffusionJointApply": LayeredDiffusionJoint, + "LayeredDiffusionCondApply": LayeredDiffusionCond, + "LayeredDiffusionCondJointApply": LayeredDiffusionCondJoint, + "LayeredDiffusionDiffApply": LayeredDiffusionDiff, + "LayeredDiffusionDecode": LayeredDiffusionDecode, + "LayeredDiffusionDecodeRGBA": LayeredDiffusionDecodeRGBA, + "LayeredDiffusionDecodeSplit": LayeredDiffusionDecodeSplit, +} + +NODE_DISPLAY_NAME_MAPPINGS = { + "LayeredDiffusionApply": "Layer Diffuse Apply", + "LayeredDiffusionJointApply": "Layer Diffuse Joint Apply", + "LayeredDiffusionCondApply": "Layer Diffuse Cond Apply", + "LayeredDiffusionCondJointApply": "Layer Diffuse Cond Joint Apply", + "LayeredDiffusionDiffApply": "Layer Diffuse Diff Apply", + "LayeredDiffusionDecode": "Layer Diffuse Decode", + "LayeredDiffusionDecodeRGBA": "Layer Diffuse Decode (RGBA)", + "LayeredDiffusionDecodeSplit": "Layer Diffuse Decode (Split)", +} diff --git a/ComfyUI-layerdiffuse/lib_layerdiffusion/__init__.py b/ComfyUI-layerdiffuse/lib_layerdiffusion/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/ComfyUI-layerdiffuse/lib_layerdiffusion/attention_sharing.py b/ComfyUI-layerdiffuse/lib_layerdiffusion/attention_sharing.py new file mode 100644 index 0000000000000000000000000000000000000000..ae424fcb79c04fe0c6d531d4bff0e4e3f485f51b --- /dev/null +++ b/ComfyUI-layerdiffuse/lib_layerdiffusion/attention_sharing.py @@ -0,0 +1,360 @@ +# Currently only sd15 + +import functools +import torch +import einops + +from comfy import model_management, utils +from comfy.ldm.modules.attention import optimized_attention + + +module_mapping_sd15 = { + 0: "input_blocks.1.1.transformer_blocks.0.attn1", + 1: "input_blocks.1.1.transformer_blocks.0.attn2", + 2: "input_blocks.2.1.transformer_blocks.0.attn1", + 3: "input_blocks.2.1.transformer_blocks.0.attn2", + 4: "input_blocks.4.1.transformer_blocks.0.attn1", + 5: "input_blocks.4.1.transformer_blocks.0.attn2", + 6: "input_blocks.5.1.transformer_blocks.0.attn1", + 7: "input_blocks.5.1.transformer_blocks.0.attn2", + 8: "input_blocks.7.1.transformer_blocks.0.attn1", + 9: "input_blocks.7.1.transformer_blocks.0.attn2", + 10: "input_blocks.8.1.transformer_blocks.0.attn1", + 11: "input_blocks.8.1.transformer_blocks.0.attn2", + 12: "output_blocks.3.1.transformer_blocks.0.attn1", + 13: "output_blocks.3.1.transformer_blocks.0.attn2", + 14: "output_blocks.4.1.transformer_blocks.0.attn1", + 15: "output_blocks.4.1.transformer_blocks.0.attn2", + 16: "output_blocks.5.1.transformer_blocks.0.attn1", + 17: "output_blocks.5.1.transformer_blocks.0.attn2", + 18: "output_blocks.6.1.transformer_blocks.0.attn1", + 19: "output_blocks.6.1.transformer_blocks.0.attn2", + 20: "output_blocks.7.1.transformer_blocks.0.attn1", + 21: "output_blocks.7.1.transformer_blocks.0.attn2", + 22: "output_blocks.8.1.transformer_blocks.0.attn1", + 23: "output_blocks.8.1.transformer_blocks.0.attn2", + 24: "output_blocks.9.1.transformer_blocks.0.attn1", + 25: "output_blocks.9.1.transformer_blocks.0.attn2", + 26: "output_blocks.10.1.transformer_blocks.0.attn1", + 27: "output_blocks.10.1.transformer_blocks.0.attn2", + 28: "output_blocks.11.1.transformer_blocks.0.attn1", + 29: "output_blocks.11.1.transformer_blocks.0.attn2", + 30: "middle_block.1.transformer_blocks.0.attn1", + 31: "middle_block.1.transformer_blocks.0.attn2", +} + + +def compute_cond_mark(cond_or_uncond, sigmas): + cond_or_uncond_size = int(sigmas.shape[0]) + + cond_mark = [] + for cx in cond_or_uncond: + cond_mark += [cx] * cond_or_uncond_size + + cond_mark = torch.Tensor(cond_mark).to(sigmas) + return cond_mark + + +class LoRALinearLayer(torch.nn.Module): + def __init__(self, in_features: int, out_features: int, rank: int = 256, org=None): + super().__init__() + self.down = torch.nn.Linear(in_features, rank, bias=False) + self.up = torch.nn.Linear(rank, out_features, bias=False) + self.org = [org] + + def forward(self, h): + org_weight = self.org[0].weight.to(h) + org_bias = self.org[0].bias.to(h) if self.org[0].bias is not None else None + down_weight = self.down.weight + up_weight = self.up.weight + final_weight = org_weight + torch.mm(up_weight, down_weight) + return torch.nn.functional.linear(h, final_weight, org_bias) + + +class AttentionSharingUnit(torch.nn.Module): + # `transformer_options` passed to the most recent BasicTransformerBlock.forward + # call. + transformer_options: dict = {} + + def __init__(self, module, frames=2, use_control=True, rank=256): + super().__init__() + + self.heads = module.heads + self.frames = frames + self.original_module = [module] + q_in_channels, q_out_channels = ( + module.to_q.in_features, + module.to_q.out_features, + ) + k_in_channels, k_out_channels = ( + module.to_k.in_features, + module.to_k.out_features, + ) + v_in_channels, v_out_channels = ( + module.to_v.in_features, + module.to_v.out_features, + ) + o_in_channels, o_out_channels = ( + module.to_out[0].in_features, + module.to_out[0].out_features, + ) + + hidden_size = k_out_channels + + self.to_q_lora = [ + LoRALinearLayer(q_in_channels, q_out_channels, rank, module.to_q) + for _ in range(self.frames) + ] + self.to_k_lora = [ + LoRALinearLayer(k_in_channels, k_out_channels, rank, module.to_k) + for _ in range(self.frames) + ] + self.to_v_lora = [ + LoRALinearLayer(v_in_channels, v_out_channels, rank, module.to_v) + for _ in range(self.frames) + ] + self.to_out_lora = [ + LoRALinearLayer(o_in_channels, o_out_channels, rank, module.to_out[0]) + for _ in range(self.frames) + ] + + self.to_q_lora = torch.nn.ModuleList(self.to_q_lora) + self.to_k_lora = torch.nn.ModuleList(self.to_k_lora) + self.to_v_lora = torch.nn.ModuleList(self.to_v_lora) + self.to_out_lora = torch.nn.ModuleList(self.to_out_lora) + + self.temporal_i = torch.nn.Linear( + in_features=hidden_size, out_features=hidden_size + ) + self.temporal_n = torch.nn.LayerNorm( + hidden_size, elementwise_affine=True, eps=1e-6 + ) + self.temporal_q = torch.nn.Linear( + in_features=hidden_size, out_features=hidden_size + ) + self.temporal_k = torch.nn.Linear( + in_features=hidden_size, out_features=hidden_size + ) + self.temporal_v = torch.nn.Linear( + in_features=hidden_size, out_features=hidden_size + ) + self.temporal_o = torch.nn.Linear( + in_features=hidden_size, out_features=hidden_size + ) + + self.control_convs = None + + if use_control: + self.control_convs = [ + torch.nn.Sequential( + torch.nn.Conv2d(256, 256, kernel_size=3, padding=1, stride=1), + torch.nn.SiLU(), + torch.nn.Conv2d(256, hidden_size, kernel_size=1), + ) + for _ in range(self.frames) + ] + self.control_convs = torch.nn.ModuleList(self.control_convs) + + self.control_signals = None + + def forward(self, h, context=None, value=None): + transformer_options = self.transformer_options + + modified_hidden_states = einops.rearrange( + h, "(b f) d c -> f b d c", f=self.frames + ) + + if self.control_convs is not None: + context_dim = int(modified_hidden_states.shape[2]) + control_outs = [] + for f in range(self.frames): + control_signal = self.control_signals[context_dim].to( + modified_hidden_states + ) + control = self.control_convs[f](control_signal) + control = einops.rearrange(control, "b c h w -> b (h w) c") + control_outs.append(control) + control_outs = torch.stack(control_outs, dim=0) + modified_hidden_states = modified_hidden_states + control_outs.to( + modified_hidden_states + ) + + if context is None: + framed_context = modified_hidden_states + else: + framed_context = einops.rearrange( + context, "(b f) d c -> f b d c", f=self.frames + ) + + framed_cond_mark = einops.rearrange( + compute_cond_mark( + transformer_options["cond_or_uncond"], + transformer_options["sigmas"], + ), + "(b f) -> f b", + f=self.frames, + ).to(modified_hidden_states) + + attn_outs = [] + for f in range(self.frames): + fcf = framed_context[f] + + if context is not None: + cond_overwrite = transformer_options.get("cond_overwrite", []) + if len(cond_overwrite) > f: + cond_overwrite = cond_overwrite[f] + else: + cond_overwrite = None + if cond_overwrite is not None: + cond_mark = framed_cond_mark[f][:, None, None] + fcf = cond_overwrite.to(fcf) * (1.0 - cond_mark) + fcf * cond_mark + + q = self.to_q_lora[f](modified_hidden_states[f]) + k = self.to_k_lora[f](fcf) + v = self.to_v_lora[f](fcf) + o = optimized_attention(q, k, v, self.heads) + o = self.to_out_lora[f](o) + o = self.original_module[0].to_out[1](o) + attn_outs.append(o) + + attn_outs = torch.stack(attn_outs, dim=0) + modified_hidden_states = modified_hidden_states + attn_outs.to( + modified_hidden_states + ) + modified_hidden_states = einops.rearrange( + modified_hidden_states, "f b d c -> (b f) d c", f=self.frames + ) + + x = modified_hidden_states + x = self.temporal_n(x) + x = self.temporal_i(x) + d = x.shape[1] + + x = einops.rearrange(x, "(b f) d c -> (b d) f c", f=self.frames) + + q = self.temporal_q(x) + k = self.temporal_k(x) + v = self.temporal_v(x) + + x = optimized_attention(q, k, v, self.heads) + x = self.temporal_o(x) + x = einops.rearrange(x, "(b d) f c -> (b f) d c", d=d) + + modified_hidden_states = modified_hidden_states + x + + return modified_hidden_states - h + + @classmethod + def hijack_transformer_block(cls): + def register_get_transformer_options(func): + @functools.wraps(func) + def forward(self, x, context=None, transformer_options={}): + cls.transformer_options = transformer_options + return func(self, x, context, transformer_options) + + return forward + + from comfy.ldm.modules.attention import BasicTransformerBlock + + BasicTransformerBlock.forward = register_get_transformer_options( + BasicTransformerBlock.forward + ) + + +AttentionSharingUnit.hijack_transformer_block() + + +class AdditionalAttentionCondsEncoder(torch.nn.Module): + def __init__(self): + super().__init__() + + self.blocks_0 = torch.nn.Sequential( + torch.nn.Conv2d(3, 32, kernel_size=3, padding=1, stride=1), + torch.nn.SiLU(), + torch.nn.Conv2d(32, 32, kernel_size=3, padding=1, stride=1), + torch.nn.SiLU(), + torch.nn.Conv2d(32, 64, kernel_size=3, padding=1, stride=2), + torch.nn.SiLU(), + torch.nn.Conv2d(64, 64, kernel_size=3, padding=1, stride=1), + torch.nn.SiLU(), + torch.nn.Conv2d(64, 128, kernel_size=3, padding=1, stride=2), + torch.nn.SiLU(), + torch.nn.Conv2d(128, 128, kernel_size=3, padding=1, stride=1), + torch.nn.SiLU(), + torch.nn.Conv2d(128, 256, kernel_size=3, padding=1, stride=2), + torch.nn.SiLU(), + torch.nn.Conv2d(256, 256, kernel_size=3, padding=1, stride=1), + torch.nn.SiLU(), + ) # 64*64*256 + + self.blocks_1 = torch.nn.Sequential( + torch.nn.Conv2d(256, 256, kernel_size=3, padding=1, stride=2), + torch.nn.SiLU(), + torch.nn.Conv2d(256, 256, kernel_size=3, padding=1, stride=1), + torch.nn.SiLU(), + ) # 32*32*256 + + self.blocks_2 = torch.nn.Sequential( + torch.nn.Conv2d(256, 256, kernel_size=3, padding=1, stride=2), + torch.nn.SiLU(), + torch.nn.Conv2d(256, 256, kernel_size=3, padding=1, stride=1), + torch.nn.SiLU(), + ) # 16*16*256 + + self.blocks_3 = torch.nn.Sequential( + torch.nn.Conv2d(256, 256, kernel_size=3, padding=1, stride=2), + torch.nn.SiLU(), + torch.nn.Conv2d(256, 256, kernel_size=3, padding=1, stride=1), + torch.nn.SiLU(), + ) # 8*8*256 + + self.blks = [self.blocks_0, self.blocks_1, self.blocks_2, self.blocks_3] + + def __call__(self, h): + results = {} + for b in self.blks: + h = b(h) + results[int(h.shape[2]) * int(h.shape[3])] = h + return results + + +class HookerLayers(torch.nn.Module): + def __init__(self, layer_list): + super().__init__() + self.layers = torch.nn.ModuleList(layer_list) + + +class AttentionSharingPatcher(torch.nn.Module): + def __init__(self, unet, frames=2, use_control=True, rank=256): + super().__init__() + model_management.unload_model_clones(unet) + + units = [] + for i in range(32): + real_key = module_mapping_sd15[i] + attn_module = utils.get_attr(unet.model.diffusion_model, real_key) + u = AttentionSharingUnit( + attn_module, frames=frames, use_control=use_control, rank=rank + ) + units.append(u) + unet.add_object_patch("diffusion_model." + real_key, u) + + self.hookers = HookerLayers(units) + + if use_control: + self.kwargs_encoder = AdditionalAttentionCondsEncoder() + else: + self.kwargs_encoder = None + + self.dtype = torch.float32 + if model_management.should_use_fp16(model_management.get_torch_device()): + self.dtype = torch.float16 + self.hookers.half() + return + + def set_control(self, img): + img = img.cpu().float() * 2.0 - 1.0 + signals = self.kwargs_encoder(img) + for m in self.hookers.layers: + m.control_signals = signals + return diff --git a/ComfyUI-layerdiffuse/lib_layerdiffusion/enums.py b/ComfyUI-layerdiffuse/lib_layerdiffusion/enums.py new file mode 100644 index 0000000000000000000000000000000000000000..c4623e3c256e67c423d4c18abfe4a971a4f22522 --- /dev/null +++ b/ComfyUI-layerdiffuse/lib_layerdiffusion/enums.py @@ -0,0 +1,23 @@ +from enum import Enum + + +class ResizeMode(Enum): + RESIZE = "Just Resize" + CROP_AND_RESIZE = "Crop and Resize" + RESIZE_AND_FILL = "Resize and Fill" + + def int_value(self): + if self == ResizeMode.RESIZE: + return 0 + elif self == ResizeMode.CROP_AND_RESIZE: + return 1 + elif self == ResizeMode.RESIZE_AND_FILL: + return 2 + return 0 + + +class StableDiffusionVersion(Enum): + """The version family of stable diffusion model.""" + + SD1x = "SD15" + SDXL = "SDXL" diff --git a/ComfyUI-layerdiffuse/lib_layerdiffusion/models.py b/ComfyUI-layerdiffuse/lib_layerdiffusion/models.py new file mode 100644 index 0000000000000000000000000000000000000000..19396f772dbc0b8fd83abfca165cd4e4b4de81ae --- /dev/null +++ b/ComfyUI-layerdiffuse/lib_layerdiffusion/models.py @@ -0,0 +1,330 @@ +import torch.nn as nn +import torch +import cv2 +import numpy as np + +from tqdm import tqdm +from typing import Optional, Tuple +from diffusers.configuration_utils import ConfigMixin, register_to_config +from diffusers.models.modeling_utils import ModelMixin +import importlib.metadata +from packaging.version import parse + +diffusers_version = importlib.metadata.version('diffusers') + +def check_diffusers_version(min_version="0.25.0"): + assert parse(diffusers_version) >= parse( + min_version + ), f"diffusers>={min_version} requirement not satisfied. Please install correct diffusers version." + +check_diffusers_version() + +if parse(diffusers_version) >= parse("0.29.0"): + from diffusers.models.unets.unet_2d_blocks import UNetMidBlock2D, get_down_block, get_up_block +else: + from diffusers.models.unet_2d_blocks import UNetMidBlock2D, get_down_block, get_up_block + + +def zero_module(module): + """ + Zero out the parameters of a module and return it. + """ + for p in module.parameters(): + p.detach().zero_() + return module + + +class LatentTransparencyOffsetEncoder(torch.nn.Module): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.blocks = torch.nn.Sequential( + torch.nn.Conv2d(4, 32, kernel_size=3, padding=1, stride=1), + nn.SiLU(), + torch.nn.Conv2d(32, 32, kernel_size=3, padding=1, stride=1), + nn.SiLU(), + torch.nn.Conv2d(32, 64, kernel_size=3, padding=1, stride=2), + nn.SiLU(), + torch.nn.Conv2d(64, 64, kernel_size=3, padding=1, stride=1), + nn.SiLU(), + torch.nn.Conv2d(64, 128, kernel_size=3, padding=1, stride=2), + nn.SiLU(), + torch.nn.Conv2d(128, 128, kernel_size=3, padding=1, stride=1), + nn.SiLU(), + torch.nn.Conv2d(128, 256, kernel_size=3, padding=1, stride=2), + nn.SiLU(), + torch.nn.Conv2d(256, 256, kernel_size=3, padding=1, stride=1), + nn.SiLU(), + zero_module(torch.nn.Conv2d(256, 4, kernel_size=3, padding=1, stride=1)), + ) + + def __call__(self, x): + return self.blocks(x) + + +# 1024 * 1024 * 3 -> 16 * 16 * 512 -> 1024 * 1024 * 3 +class UNet1024(ModelMixin, ConfigMixin): + @register_to_config + def __init__( + self, + in_channels: int = 3, + out_channels: int = 3, + down_block_types: Tuple[str] = ( + "DownBlock2D", + "DownBlock2D", + "DownBlock2D", + "DownBlock2D", + "AttnDownBlock2D", + "AttnDownBlock2D", + "AttnDownBlock2D", + ), + up_block_types: Tuple[str] = ( + "AttnUpBlock2D", + "AttnUpBlock2D", + "AttnUpBlock2D", + "UpBlock2D", + "UpBlock2D", + "UpBlock2D", + "UpBlock2D", + ), + block_out_channels: Tuple[int] = (32, 32, 64, 128, 256, 512, 512), + layers_per_block: int = 2, + mid_block_scale_factor: float = 1, + downsample_padding: int = 1, + downsample_type: str = "conv", + upsample_type: str = "conv", + dropout: float = 0.0, + act_fn: str = "silu", + attention_head_dim: Optional[int] = 8, + norm_num_groups: int = 4, + norm_eps: float = 1e-5, + ): + super().__init__() + + # input + self.conv_in = nn.Conv2d( + in_channels, block_out_channels[0], kernel_size=3, padding=(1, 1) + ) + self.latent_conv_in = zero_module( + nn.Conv2d(4, block_out_channels[2], kernel_size=1) + ) + + self.down_blocks = nn.ModuleList([]) + self.mid_block = None + self.up_blocks = nn.ModuleList([]) + + # down + output_channel = block_out_channels[0] + for i, down_block_type in enumerate(down_block_types): + input_channel = output_channel + output_channel = block_out_channels[i] + is_final_block = i == len(block_out_channels) - 1 + + down_block = get_down_block( + down_block_type, + num_layers=layers_per_block, + in_channels=input_channel, + out_channels=output_channel, + temb_channels=None, + add_downsample=not is_final_block, + resnet_eps=norm_eps, + resnet_act_fn=act_fn, + resnet_groups=norm_num_groups, + attention_head_dim=( + attention_head_dim + if attention_head_dim is not None + else output_channel + ), + downsample_padding=downsample_padding, + resnet_time_scale_shift="default", + downsample_type=downsample_type, + dropout=dropout, + ) + self.down_blocks.append(down_block) + + # mid + self.mid_block = UNetMidBlock2D( + in_channels=block_out_channels[-1], + temb_channels=None, + dropout=dropout, + resnet_eps=norm_eps, + resnet_act_fn=act_fn, + output_scale_factor=mid_block_scale_factor, + resnet_time_scale_shift="default", + attention_head_dim=( + attention_head_dim + if attention_head_dim is not None + else block_out_channels[-1] + ), + resnet_groups=norm_num_groups, + attn_groups=None, + add_attention=True, + ) + + # up + reversed_block_out_channels = list(reversed(block_out_channels)) + output_channel = reversed_block_out_channels[0] + for i, up_block_type in enumerate(up_block_types): + prev_output_channel = output_channel + output_channel = reversed_block_out_channels[i] + input_channel = reversed_block_out_channels[ + min(i + 1, len(block_out_channels) - 1) + ] + + is_final_block = i == len(block_out_channels) - 1 + + up_block = get_up_block( + up_block_type, + num_layers=layers_per_block + 1, + in_channels=input_channel, + out_channels=output_channel, + prev_output_channel=prev_output_channel, + temb_channels=None, + add_upsample=not is_final_block, + resnet_eps=norm_eps, + resnet_act_fn=act_fn, + resnet_groups=norm_num_groups, + attention_head_dim=( + attention_head_dim + if attention_head_dim is not None + else output_channel + ), + resnet_time_scale_shift="default", + upsample_type=upsample_type, + dropout=dropout, + ) + self.up_blocks.append(up_block) + prev_output_channel = output_channel + + # out + self.conv_norm_out = nn.GroupNorm( + num_channels=block_out_channels[0], num_groups=norm_num_groups, eps=norm_eps + ) + self.conv_act = nn.SiLU() + self.conv_out = nn.Conv2d( + block_out_channels[0], out_channels, kernel_size=3, padding=1 + ) + + def forward(self, x, latent): + sample_latent = self.latent_conv_in(latent) + sample = self.conv_in(x) + emb = None + + down_block_res_samples = (sample,) + for i, downsample_block in enumerate(self.down_blocks): + if i == 3: + sample = sample + sample_latent + + sample, res_samples = downsample_block(hidden_states=sample, temb=emb) + down_block_res_samples += res_samples + + sample = self.mid_block(sample, emb) + + for upsample_block in self.up_blocks: + res_samples = down_block_res_samples[-len(upsample_block.resnets) :] + down_block_res_samples = down_block_res_samples[ + : -len(upsample_block.resnets) + ] + sample = upsample_block(sample, res_samples, emb) + + sample = self.conv_norm_out(sample) + sample = self.conv_act(sample) + sample = self.conv_out(sample) + return sample + + +def checkerboard(shape): + return np.indices(shape).sum(axis=0) % 2 + + +def fill_checkerboard_bg(y: torch.Tensor) -> torch.Tensor: + alpha = y[..., :1] + fg = y[..., 1:] + B, H, W, C = fg.shape + cb = checkerboard(shape=(H // 64, W // 64)) + cb = cv2.resize(cb, (W, H), interpolation=cv2.INTER_NEAREST) + cb = (0.5 + (cb - 0.5) * 0.1)[None, ..., None] + cb = torch.from_numpy(cb).to(fg) + vis = fg * alpha + cb * (1 - alpha) + return vis + + +class TransparentVAEDecoder: + def __init__(self, sd, device, dtype): + self.load_device = device + self.dtype = dtype + + model = UNet1024(in_channels=3, out_channels=4) + model.load_state_dict(sd, strict=True) + model.to(self.load_device, dtype=self.dtype) + model.eval() + self.model = model + + @torch.no_grad() + def estimate_single_pass(self, pixel, latent): + y = self.model(pixel, latent) + return y + + @torch.no_grad() + def estimate_augmented(self, pixel, latent): + args = [ + [False, 0], + [False, 1], + [False, 2], + [False, 3], + [True, 0], + [True, 1], + [True, 2], + [True, 3], + ] + + result = [] + + for flip, rok in tqdm(args): + feed_pixel = pixel.clone() + feed_latent = latent.clone() + + if flip: + feed_pixel = torch.flip(feed_pixel, dims=(3,)) + feed_latent = torch.flip(feed_latent, dims=(3,)) + + feed_pixel = torch.rot90(feed_pixel, k=rok, dims=(2, 3)) + feed_latent = torch.rot90(feed_latent, k=rok, dims=(2, 3)) + + eps = self.estimate_single_pass(feed_pixel, feed_latent).clip(0, 1) + eps = torch.rot90(eps, k=-rok, dims=(2, 3)) + + if flip: + eps = torch.flip(eps, dims=(3,)) + + result += [eps] + + result = torch.stack(result, dim=0) + if self.load_device == torch.device("mps"): + ''' + In case that apple silicon devices would crash when calling torch.median() on tensors + in gpu vram with dimensions higher than 4, we move it to cpu, call torch.median() + and then move the result back to gpu. + ''' + median = torch.median(result.cpu(), dim=0).values + median = median.to(device=self.load_device, dtype=self.dtype) + else: + median = torch.median(result, dim=0).values + return median + + @torch.no_grad() + def decode_pixel( + self, pixel: torch.TensorType, latent: torch.TensorType + ) -> torch.TensorType: + # pixel.shape = [B, C=3, H, W] + assert pixel.shape[1] == 3 + pixel_device = pixel.device + pixel_dtype = pixel.dtype + + pixel = pixel.to(device=self.load_device, dtype=self.dtype) + latent = latent.to(device=self.load_device, dtype=self.dtype) + # y.shape = [B, C=4, H, W] + y = self.estimate_augmented(pixel, latent) + y = y.clip(0, 1) + assert y.shape[1] == 4 + # Restore image to original device of input image. + return y.to(pixel_device, dtype=pixel_dtype) diff --git a/ComfyUI-layerdiffuse/lib_layerdiffusion/utils.py b/ComfyUI-layerdiffuse/lib_layerdiffusion/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..fa9134119bf13232f6b36b4b5234c5db65ed000e --- /dev/null +++ b/ComfyUI-layerdiffuse/lib_layerdiffusion/utils.py @@ -0,0 +1,135 @@ +import numpy as np +from .enums import ResizeMode +import cv2 +import torch +import os +from urllib.parse import urlparse +from typing import Optional + + +def rgba2rgbfp32(x): + rgb = x[..., :3].astype(np.float32) / 255.0 + a = x[..., 3:4].astype(np.float32) / 255.0 + return 0.5 + (rgb - 0.5) * a + + +def to255unit8(x): + return (x * 255.0).clip(0, 255).astype(np.uint8) + + +def safe_numpy(x): + # A very safe method to make sure that Apple/Mac works + y = x + + # below is very boring but do not change these. If you change these Apple or Mac may fail. + y = y.copy() + y = np.ascontiguousarray(y) + y = y.copy() + return y + + +def high_quality_resize(x, size): + if x.shape[0] != size[1] or x.shape[1] != size[0]: + if (size[0] * size[1]) < (x.shape[0] * x.shape[1]): + interpolation = cv2.INTER_AREA + else: + interpolation = cv2.INTER_LANCZOS4 + + y = cv2.resize(x, size, interpolation=interpolation) + else: + y = x + return y + + +def crop_and_resize_image(detected_map, resize_mode, h, w): + if resize_mode == ResizeMode.RESIZE: + detected_map = high_quality_resize(detected_map, (w, h)) + detected_map = safe_numpy(detected_map) + return detected_map + + old_h, old_w, _ = detected_map.shape + old_w = float(old_w) + old_h = float(old_h) + k0 = float(h) / old_h + k1 = float(w) / old_w + + def safeint(x): + return int(np.round(x)) + + if resize_mode == ResizeMode.RESIZE_AND_FILL: + k = min(k0, k1) + borders = np.concatenate([detected_map[0, :, :], detected_map[-1, :, :], detected_map[:, 0, :], detected_map[:, -1, :]], axis=0) + high_quality_border_color = np.median(borders, axis=0).astype(detected_map.dtype) + high_quality_background = np.tile(high_quality_border_color[None, None], [h, w, 1]) + detected_map = high_quality_resize(detected_map, (safeint(old_w * k), safeint(old_h * k))) + new_h, new_w, _ = detected_map.shape + pad_h = max(0, (h - new_h) // 2) + pad_w = max(0, (w - new_w) // 2) + high_quality_background[pad_h:pad_h + new_h, pad_w:pad_w + new_w] = detected_map + detected_map = high_quality_background + detected_map = safe_numpy(detected_map) + return detected_map + else: + k = max(k0, k1) + detected_map = high_quality_resize(detected_map, (safeint(old_w * k), safeint(old_h * k))) + new_h, new_w, _ = detected_map.shape + pad_h = max(0, (new_h - h) // 2) + pad_w = max(0, (new_w - w) // 2) + detected_map = detected_map[pad_h:pad_h+h, pad_w:pad_w+w] + detected_map = safe_numpy(detected_map) + return detected_map + + +def pytorch_to_numpy(x): + return [np.clip(255. * y.cpu().numpy(), 0, 255).astype(np.uint8) for y in x] + + +def numpy_to_pytorch(x): + y = x.astype(np.float32) / 255.0 + y = y[None] + y = np.ascontiguousarray(y.copy()) + y = torch.from_numpy(y).float() + return y + + +def load_file_from_url( + url: str, + *, + model_dir: str, + progress: bool = True, + file_name: Optional[str] = None, +) -> str: + """Download a file from `url` into `model_dir`, using the file present if possible. + + Returns the path to the downloaded file. + """ + os.makedirs(model_dir, exist_ok=True) + if not file_name: + parts = urlparse(url) + file_name = os.path.basename(parts.path) + cached_file = os.path.abspath(os.path.join(model_dir, file_name)) + if not os.path.exists(cached_file): + print(f'Downloading: "{url}" to {cached_file}\n') + from torch.hub import download_url_to_file + download_url_to_file(url, cached_file, progress=progress) + return cached_file + + +def to_lora_patch_dict(state_dict: dict) -> dict: + """ Convert raw lora state_dict to patch_dict that can be applied on + modelpatcher.""" + patch_dict = {} + for k, w in state_dict.items(): + model_key, patch_type, weight_index = k.split('::') + if model_key not in patch_dict: + patch_dict[model_key] = {} + if patch_type not in patch_dict[model_key]: + patch_dict[model_key][patch_type] = [None] * 16 + patch_dict[model_key][patch_type][int(weight_index)] = w + + patch_flat = {} + for model_key, v in patch_dict.items(): + for patch_type, weight_list in v.items(): + patch_flat[model_key] = (patch_type, weight_list) + + return patch_flat diff --git a/ComfyUI-layerdiffuse/pyproject.toml b/ComfyUI-layerdiffuse/pyproject.toml new file mode 100644 index 0000000000000000000000000000000000000000..2b36e909fcbdf15f64dfb08edd5a2db3f51e194d --- /dev/null +++ b/ComfyUI-layerdiffuse/pyproject.toml @@ -0,0 +1,15 @@ +[project] +name = "comfyui-layerdiffuse" +description = "Transparent Image Layer Diffusion using Latent Transparency" +version = "1.0.1" +license = { file = "LICENSE" } +dependencies = ["diffusers>=0.25.0", "opencv-python"] + +[project.urls] +Repository = "https://github.com/huchenlei/ComfyUI-layerdiffuse" + +# Used by Comfy Registry https://comfyregistry.org +[tool.comfy] +PublisherId = "huchenlei" +DisplayName = "ComfyUI-layerdiffuse" +Icon = "" diff --git a/ComfyUI-layerdiffuse/requirements.txt b/ComfyUI-layerdiffuse/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..9b01f9734756eb21f560c7736f24238ec2ed9c35 --- /dev/null +++ b/ComfyUI-layerdiffuse/requirements.txt @@ -0,0 +1,2 @@ +diffusers>=0.29.0 +opencv-python diff --git a/ComfyUI_ExtraModels/DiT/LICENSE-DiT b/ComfyUI_ExtraModels/DiT/LICENSE-DiT new file mode 100644 index 0000000000000000000000000000000000000000..a115f899f8d09ef3b1def4a16c7bae1a0bd50fbe --- /dev/null +++ b/ComfyUI_ExtraModels/DiT/LICENSE-DiT @@ -0,0 +1,400 @@ + +Attribution-NonCommercial 4.0 International + +======================================================================= + +Creative Commons Corporation ("Creative Commons") is not a law firm and +does not provide legal services or legal advice. Distribution of +Creative Commons public licenses does not create a lawyer-client or +other relationship. Creative Commons makes its licenses and related +information available on an "as-is" basis. Creative Commons gives no +warranties regarding its licenses, any material licensed under their +terms and conditions, or any related information. Creative Commons +disclaims all liability for damages resulting from their use to the +fullest extent possible. + +Using Creative Commons Public Licenses + +Creative Commons public licenses provide a standard set of terms and +conditions that creators and other rights holders may use to share +original works of authorship and other material subject to copyright +and certain other rights specified in the public license below. The +following considerations are for informational purposes only, are not +exhaustive, and do not form part of our licenses. + + Considerations for licensors: Our public licenses are + intended for use by those authorized to give the public + permission to use material in ways otherwise restricted by + copyright and certain other rights. Our licenses are + irrevocable. Licensors should read and understand the terms + and conditions of the license they choose before applying it. + Licensors should also secure all rights necessary before + applying our licenses so that the public can reuse the + material as expected. Licensors should clearly mark any + material not subject to the license. This includes other CC- + licensed material, or material used under an exception or + limitation to copyright. More considerations for licensors: + wiki.creativecommons.org/Considerations_for_licensors + + Considerations for the public: By using one of our public + licenses, a licensor grants the public permission to use the + licensed material under specified terms and conditions. If + the licensor's permission is not necessary for any reason--for + example, because of any applicable exception or limitation to + copyright--then that use is not regulated by the license. Our + licenses grant only permissions under copyright and certain + other rights that a licensor has authority to grant. Use of + the licensed material may still be restricted for other + reasons, including because others have copyright or other + rights in the material. A licensor may make special requests, + such as asking that all changes be marked or described. + Although not required by our licenses, you are encouraged to + respect those requests where reasonable. More_considerations + for the public: + wiki.creativecommons.org/Considerations_for_licensees + +======================================================================= + +Creative Commons Attribution-NonCommercial 4.0 International Public +License + +By exercising the Licensed Rights (defined below), You accept and agree +to be bound by the terms and conditions of this Creative Commons +Attribution-NonCommercial 4.0 International Public License ("Public +License"). To the extent this Public License may be interpreted as a +contract, You are granted the Licensed Rights in consideration of Your +acceptance of these terms and conditions, and the Licensor grants You +such rights in consideration of benefits the Licensor receives from +making the Licensed Material available under these terms and +conditions. + +Section 1 -- Definitions. + + a. Adapted Material means material subject to Copyright and Similar + Rights that is derived from or based upon the Licensed Material + and in which the Licensed Material is translated, altered, + arranged, transformed, or otherwise modified in a manner requiring + permission under the Copyright and Similar Rights held by the + Licensor. For purposes of this Public License, where the Licensed + Material is a musical work, performance, or sound recording, + Adapted Material is always produced where the Licensed Material is + synched in timed relation with a moving image. + + b. Adapter's License means the license You apply to Your Copyright + and Similar Rights in Your contributions to Adapted Material in + accordance with the terms and conditions of this Public License. + + c. Copyright and Similar Rights means copyright and/or similar rights + closely related to copyright including, without limitation, + performance, broadcast, sound recording, and Sui Generis Database + Rights, without regard to how the rights are labeled or + categorized. For purposes of this Public License, the rights + specified in Section 2(b)(1)-(2) are not Copyright and Similar + Rights. + d. Effective Technological Measures means those measures that, in the + absence of proper authority, may not be circumvented under laws + fulfilling obligations under Article 11 of the WIPO Copyright + Treaty adopted on December 20, 1996, and/or similar international + agreements. + + e. Exceptions and Limitations means fair use, fair dealing, and/or + any other exception or limitation to Copyright and Similar Rights + that applies to Your use of the Licensed Material. + + f. Licensed Material means the artistic or literary work, database, + or other material to which the Licensor applied this Public + License. + + g. Licensed Rights means the rights granted to You subject to the + terms and conditions of this Public License, which are limited to + all Copyright and Similar Rights that apply to Your use of the + Licensed Material and that the Licensor has authority to license. + + h. Licensor means the individual(s) or entity(ies) granting rights + under this Public License. + + i. NonCommercial means not primarily intended for or directed towards + commercial advantage or monetary compensation. For purposes of + this Public License, the exchange of the Licensed Material for + other material subject to Copyright and Similar Rights by digital + file-sharing or similar means is NonCommercial provided there is + no payment of monetary compensation in connection with the + exchange. + + j. Share means to provide material to the public by any means or + process that requires permission under the Licensed Rights, such + as reproduction, public display, public performance, distribution, + dissemination, communication, or importation, and to make material + available to the public including in ways that members of the + public may access the material from a place and at a time + individually chosen by them. + + k. Sui Generis Database Rights means rights other than copyright + resulting from Directive 96/9/EC of the European Parliament and of + the Council of 11 March 1996 on the legal protection of databases, + as amended and/or succeeded, as well as other essentially + equivalent rights anywhere in the world. + + l. You means the individual or entity exercising the Licensed Rights + under this Public License. Your has a corresponding meaning. + +Section 2 -- Scope. + + a. License grant. + + 1. Subject to the terms and conditions of this Public License, + the Licensor hereby grants You a worldwide, royalty-free, + non-sublicensable, non-exclusive, irrevocable license to + exercise the Licensed Rights in the Licensed Material to: + + a. reproduce and Share the Licensed Material, in whole or + in part, for NonCommercial purposes only; and + + b. produce, reproduce, and Share Adapted Material for + NonCommercial purposes only. + + 2. Exceptions and Limitations. For the avoidance of doubt, where + Exceptions and Limitations apply to Your use, this Public + License does not apply, and You do not need to comply with + its terms and conditions. + + 3. Term. The term of this Public License is specified in Section + 6(a). + + 4. Media and formats; technical modifications allowed. The + Licensor authorizes You to exercise the Licensed Rights in + all media and formats whether now known or hereafter created, + and to make technical modifications necessary to do so. The + Licensor waives and/or agrees not to assert any right or + authority to forbid You from making technical modifications + necessary to exercise the Licensed Rights, including + technical modifications necessary to circumvent Effective + Technological Measures. For purposes of this Public License, + simply making modifications authorized by this Section 2(a) + (4) never produces Adapted Material. + + 5. Downstream recipients. + + a. Offer from the Licensor -- Licensed Material. Every + recipient of the Licensed Material automatically + receives an offer from the Licensor to exercise the + Licensed Rights under the terms and conditions of this + Public License. + + b. No downstream restrictions. You may not offer or impose + any additional or different terms or conditions on, or + apply any Effective Technological Measures to, the + Licensed Material if doing so restricts exercise of the + Licensed Rights by any recipient of the Licensed + Material. + + 6. No endorsement. Nothing in this Public License constitutes or + may be construed as permission to assert or imply that You + are, or that Your use of the Licensed Material is, connected + with, or sponsored, endorsed, or granted official status by, + the Licensor or others designated to receive attribution as + provided in Section 3(a)(1)(A)(i). + + b. Other rights. + + 1. Moral rights, such as the right of integrity, are not + licensed under this Public License, nor are publicity, + privacy, and/or other similar personality rights; however, to + the extent possible, the Licensor waives and/or agrees not to + assert any such rights held by the Licensor to the limited + extent necessary to allow You to exercise the Licensed + Rights, but not otherwise. + + 2. Patent and trademark rights are not licensed under this + Public License. + + 3. To the extent possible, the Licensor waives any right to + collect royalties from You for the exercise of the Licensed + Rights, whether directly or through a collecting society + under any voluntary or waivable statutory or compulsory + licensing scheme. In all other cases the Licensor expressly + reserves any right to collect such royalties, including when + the Licensed Material is used other than for NonCommercial + purposes. + +Section 3 -- License Conditions. + +Your exercise of the Licensed Rights is expressly made subject to the +following conditions. + + a. Attribution. + + 1. If You Share the Licensed Material (including in modified + form), You must: + + a. retain the following if it is supplied by the Licensor + with the Licensed Material: + + i. identification of the creator(s) of the Licensed + Material and any others designated to receive + attribution, in any reasonable manner requested by + the Licensor (including by pseudonym if + designated); + + ii. a copyright notice; + + iii. a notice that refers to this Public License; + + iv. a notice that refers to the disclaimer of + warranties; + + v. a URI or hyperlink to the Licensed Material to the + extent reasonably practicable; + + b. indicate if You modified the Licensed Material and + retain an indication of any previous modifications; and + + c. indicate the Licensed Material is licensed under this + Public License, and include the text of, or the URI or + hyperlink to, this Public License. + + 2. You may satisfy the conditions in Section 3(a)(1) in any + reasonable manner based on the medium, means, and context in + which You Share the Licensed Material. For example, it may be + reasonable to satisfy the conditions by providing a URI or + hyperlink to a resource that includes the required + information. + + 3. If requested by the Licensor, You must remove any of the + information required by Section 3(a)(1)(A) to the extent + reasonably practicable. + + 4. If You Share Adapted Material You produce, the Adapter's + License You apply must not prevent recipients of the Adapted + Material from complying with this Public License. + +Section 4 -- Sui Generis Database Rights. + +Where the Licensed Rights include Sui Generis Database Rights that +apply to Your use of the Licensed Material: + + a. for the avoidance of doubt, Section 2(a)(1) grants You the right + to extract, reuse, reproduce, and Share all or a substantial + portion of the contents of the database for NonCommercial purposes + only; + + b. if You include all or a substantial portion of the database + contents in a database in which You have Sui Generis Database + Rights, then the database in which You have Sui Generis Database + Rights (but not its individual contents) is Adapted Material; and + + c. You must comply with the conditions in Section 3(a) if You Share + all or a substantial portion of the contents of the database. + +For the avoidance of doubt, this Section 4 supplements and does not +replace Your obligations under this Public License where the Licensed +Rights include other Copyright and Similar Rights. + +Section 5 -- Disclaimer of Warranties and Limitation of Liability. + + a. UNLESS OTHERWISE SEPARATELY UNDERTAKEN BY THE LICENSOR, TO THE + EXTENT POSSIBLE, THE LICENSOR OFFERS THE LICENSED MATERIAL AS-IS + AND AS-AVAILABLE, AND MAKES NO REPRESENTATIONS OR WARRANTIES OF + ANY KIND CONCERNING THE LICENSED MATERIAL, WHETHER EXPRESS, + IMPLIED, STATUTORY, OR OTHER. THIS INCLUDES, WITHOUT LIMITATION, + WARRANTIES OF TITLE, MERCHANTABILITY, FITNESS FOR A PARTICULAR + PURPOSE, NON-INFRINGEMENT, ABSENCE OF LATENT OR OTHER DEFECTS, + ACCURACY, OR THE PRESENCE OR ABSENCE OF ERRORS, WHETHER OR NOT + KNOWN OR DISCOVERABLE. WHERE DISCLAIMERS OF WARRANTIES ARE NOT + ALLOWED IN FULL OR IN PART, THIS DISCLAIMER MAY NOT APPLY TO YOU. + + b. TO THE EXTENT POSSIBLE, IN NO EVENT WILL THE LICENSOR BE LIABLE + TO YOU ON ANY LEGAL THEORY (INCLUDING, WITHOUT LIMITATION, + NEGLIGENCE) OR OTHERWISE FOR ANY DIRECT, SPECIAL, INDIRECT, + INCIDENTAL, CONSEQUENTIAL, PUNITIVE, EXEMPLARY, OR OTHER LOSSES, + COSTS, EXPENSES, OR DAMAGES ARISING OUT OF THIS PUBLIC LICENSE OR + USE OF THE LICENSED MATERIAL, EVEN IF THE LICENSOR HAS BEEN + ADVISED OF THE POSSIBILITY OF SUCH LOSSES, COSTS, EXPENSES, OR + DAMAGES. WHERE A LIMITATION OF LIABILITY IS NOT ALLOWED IN FULL OR + IN PART, THIS LIMITATION MAY NOT APPLY TO YOU. + + c. The disclaimer of warranties and limitation of liability provided + above shall be interpreted in a manner that, to the extent + possible, most closely approximates an absolute disclaimer and + waiver of all liability. + +Section 6 -- Term and Termination. + + a. This Public License applies for the term of the Copyright and + Similar Rights licensed here. However, if You fail to comply with + this Public License, then Your rights under this Public License + terminate automatically. + + b. Where Your right to use the Licensed Material has terminated under + Section 6(a), it reinstates: + + 1. automatically as of the date the violation is cured, provided + it is cured within 30 days of Your discovery of the + violation; or + + 2. upon express reinstatement by the Licensor. + + For the avoidance of doubt, this Section 6(b) does not affect any + right the Licensor may have to seek remedies for Your violations + of this Public License. + + c. For the avoidance of doubt, the Licensor may also offer the + Licensed Material under separate terms or conditions or stop + distributing the Licensed Material at any time; however, doing so + will not terminate this Public License. + + d. Sections 1, 5, 6, 7, and 8 survive termination of this Public + License. + +Section 7 -- Other Terms and Conditions. + + a. The Licensor shall not be bound by any additional or different + terms or conditions communicated by You unless expressly agreed. + + b. Any arrangements, understandings, or agreements regarding the + Licensed Material not stated herein are separate from and + independent of the terms and conditions of this Public License. + +Section 8 -- Interpretation. + + a. For the avoidance of doubt, this Public License does not, and + shall not be interpreted to, reduce, limit, restrict, or impose + conditions on any use of the Licensed Material that could lawfully + be made without permission under this Public License. + + b. To the extent possible, if any provision of this Public License is + deemed unenforceable, it shall be automatically reformed to the + minimum extent necessary to make it enforceable. If the provision + cannot be reformed, it shall be severed from this Public License + without affecting the enforceability of the remaining terms and + conditions. + + c. No term or condition of this Public License will be waived and no + failure to comply consented to unless expressly agreed to by the + Licensor. + + d. Nothing in this Public License constitutes or may be interpreted + as a limitation upon, or waiver of, any privileges and immunities + that apply to the Licensor or You, including from the legal + processes of any jurisdiction or authority. + +======================================================================= + +Creative Commons is not a party to its public +licenses. Notwithstanding, Creative Commons may elect to apply one of +its public licenses to material it publishes and in those instances +will be considered the “Licensor.” The text of the Creative Commons +public licenses is dedicated to the public domain under the CC0 Public +Domain Dedication. Except for the limited purpose of indicating that +material is shared under a Creative Commons public license or as +otherwise permitted by the Creative Commons policies published at +creativecommons.org/policies, Creative Commons does not authorize the +use of the trademark "Creative Commons" or any other trademark or logo +of Creative Commons without its prior written consent including, +without limitation, in connection with any unauthorized modifications +to any of its public licenses or any other arrangements, +understandings, or agreements concerning use of licensed material. For +the avoidance of doubt, this paragraph does not form part of the +public licenses. + +Creative Commons may be contacted at creativecommons.org. diff --git a/ComfyUI_ExtraModels/DiT/conf.py b/ComfyUI_ExtraModels/DiT/conf.py new file mode 100644 index 0000000000000000000000000000000000000000..04fe5d4d1256158861f46a3187bf8d1c3f164d64 --- /dev/null +++ b/ComfyUI_ExtraModels/DiT/conf.py @@ -0,0 +1,120 @@ +""" +List of all DiT model types / settings +""" +sampling_settings = { + "beta_schedule" : "sqrt_linear", + "linear_start" : 0.0001, + "linear_end" : 0.02, + "timesteps" : 1000, +} + +dit_conf = { + "XL/2": { # DiT_XL_2 + "unet_config": { + "depth" : 28, + "num_heads" : 16, + "patch_size" : 2, + "hidden_size" : 1152, + }, + "sampling_settings" : sampling_settings, + }, + "XL/4": { # DiT_XL_4 + "unet_config": { + "depth" : 28, + "num_heads" : 16, + "patch_size" : 4, + "hidden_size" : 1152, + }, + "sampling_settings" : sampling_settings, + }, + "XL/8": { # DiT_XL_8 + "unet_config": { + "depth" : 28, + "num_heads" : 16, + "patch_size" : 8, + "hidden_size" : 1152, + }, + "sampling_settings" : sampling_settings, + }, + "L/2": { # DiT_L_2 + "unet_config": { + "depth" : 24, + "num_heads" : 16, + "patch_size" : 2, + "hidden_size" : 1024, + }, + "sampling_settings" : sampling_settings, + }, + "L/4": { # DiT_L_4 + "unet_config": { + "depth" : 24, + "num_heads" : 16, + "patch_size" : 4, + "hidden_size" : 1024, + }, + "sampling_settings" : sampling_settings, + }, + "L/8": { # DiT_L_8 + "unet_config": { + "depth" : 24, + "num_heads" : 16, + "patch_size" : 8, + "hidden_size" : 1024, + }, + "sampling_settings" : sampling_settings, + }, + "B/2": { # DiT_B_2 + "unet_config": { + "depth" : 12, + "num_heads" : 12, + "patch_size" : 2, + "hidden_size" : 768, + }, + "sampling_settings" : sampling_settings, + }, + "B/4": { # DiT_B_4 + "unet_config": { + "depth" : 12, + "num_heads" : 12, + "patch_size" : 4, + "hidden_size" : 768, + }, + "sampling_settings" : sampling_settings, + }, + "B/8": { # DiT_B_8 + "unet_config": { + "depth" : 12, + "num_heads" : 12, + "patch_size" : 8, + "hidden_size" : 768, + }, + "sampling_settings" : sampling_settings, + }, + "S/2": { # DiT_S_2 + "unet_config": { + "depth" : 12, + "num_heads" : 6, + "patch_size" : 2, + "hidden_size" : 384, + }, + "sampling_settings" : sampling_settings, + }, + "S/4": { # DiT_S_4 + "unet_config": { + "depth" : 12, + "num_heads" : 6, + "patch_size" : 4, + "hidden_size" : 384, + }, + "sampling_settings" : sampling_settings, + }, + "S/8": { # DiT_S_8 + "unet_config": { + "depth" : 12, + "num_heads" : 6, + "patch_size" : 8, + "hidden_size" : 384, + }, + "sampling_settings" : sampling_settings, + }, +} diff --git a/ComfyUI_ExtraModels/DiT/labels/imagenet1000.json b/ComfyUI_ExtraModels/DiT/labels/imagenet1000.json new file mode 100644 index 0000000000000000000000000000000000000000..7e5b4c6dee3f095ca0aa3da286f3af9d6e627b72 --- /dev/null +++ b/ComfyUI_ExtraModels/DiT/labels/imagenet1000.json @@ -0,0 +1,1002 @@ +{ + "0": "tench, Tinca tinca", + "1": "goldfish, Carassius auratus", + "2": "great white shark, white shark, man-eater, man-eating shark, Carcharodon carcharias", + "3": "tiger shark, Galeocerdo cuvieri", + "4": "hammerhead, hammerhead shark", + "5": "electric ray, crampfish, numbfish, torpedo", + "6": "stingray", + "7": "cock", + "8": "hen", + "9": "ostrich, Struthio camelus", + "10": "brambling, Fringilla montifringilla", + "11": "goldfinch, Carduelis carduelis", + "12": "house finch, linnet, Carpodacus mexicanus", + "13": "junco, snowbird", + "14": "indigo bunting, indigo finch, indigo bird, Passerina cyanea", + "15": "robin, American robin, Turdus migratorius", + "16": "bulbul", + "17": "jay", + "18": "magpie", + "19": "chickadee", + "20": "water ouzel, dipper", + "21": "kite", + "22": "bald eagle, American eagle, Haliaeetus leucocephalus", + "23": "vulture", + "24": "great grey owl, great gray owl, Strix nebulosa", + "25": "European fire salamander, Salamandra salamandra", + "26": "common newt, Triturus vulgaris", + "27": "eft", + "28": "spotted salamander, Ambystoma maculatum", + "29": "axolotl, mud puppy, Ambystoma mexicanum", + "30": "bullfrog, Rana catesbeiana", + "31": "tree frog, tree-frog", + "32": "tailed frog, bell toad, ribbed toad, tailed toad, Ascaphus trui", + "33": "loggerhead, loggerhead turtle, Caretta caretta", + "34": "leatherback turtle, leatherback, leathery turtle, Dermochelys coriacea", + "35": "mud turtle", + "36": "terrapin", + "37": "box turtle, box tortoise", + "38": "banded gecko", + "39": "common iguana, iguana, Iguana iguana", + "40": "American chameleon, anole, Anolis carolinensis", + "41": "whiptail, whiptail lizard", + "42": "agama", + "43": "frilled lizard, Chlamydosaurus kingi", + "44": "alligator lizard", + "45": "Gila monster, Heloderma suspectum", + "46": "green lizard, Lacerta viridis", + "47": "African chameleon, Chamaeleo chamaeleon", + "48": "Komodo dragon, Komodo lizard, dragon lizard, giant lizard, Varanus komodoensis", + "49": "African crocodile, Nile crocodile, Crocodylus niloticus", + "50": "American alligator, Alligator mississipiensis", + "51": "triceratops", + "52": "thunder snake, worm snake, Carphophis amoenus", + "53": "ringneck snake, ring-necked snake, ring snake", + "54": "hognose snake, puff adder, sand viper", + "55": "green snake, grass snake", + "56": "king snake, kingsnake", + "57": "garter snake, grass snake", + "58": "water snake", + "59": "vine snake", + "60": "night snake, Hypsiglena torquata", + "61": "boa constrictor, Constrictor constrictor", + "62": "rock python, rock snake, Python sebae", + "63": "Indian cobra, Naja naja", + "64": "green mamba", + "65": "sea snake", + "66": "horned viper, cerastes, sand viper, horned asp, Cerastes cornutus", + "67": "diamondback, diamondback rattlesnake, Crotalus adamanteus", + "68": "sidewinder, horned rattlesnake, Crotalus cerastes", + "69": "trilobite", + "70": "harvestman, daddy longlegs, Phalangium opilio", + "71": "scorpion", + "72": "black and gold garden spider, Argiope aurantia", + "73": "barn spider, Araneus cavaticus", + "74": "garden spider, Aranea diademata", + "75": "black widow, Latrodectus mactans", + "76": "tarantula", + "77": "wolf spider, hunting spider", + "78": "tick", + "79": "centipede", + "80": "black grouse", + "81": "ptarmigan", + "82": "ruffed grouse, partridge, Bonasa umbellus", + "83": "prairie chicken, prairie grouse, prairie fowl", + "84": "peacock", + "85": "quail", + "86": "partridge", + "87": "African grey, African gray, Psittacus erithacus", + "88": "macaw", + "89": "sulphur-crested cockatoo, Kakatoe galerita, Cacatua galerita", + "90": "lorikeet", + "91": "coucal", + "92": "bee eater", + "93": "hornbill", + "94": "hummingbird", + "95": "jacamar", + "96": "toucan", + "97": "drake", + "98": "red-breasted merganser, Mergus serrator", + "99": "goose", + "100": "black swan, Cygnus atratus", + "101": "tusker", + "102": "echidna, spiny anteater, anteater", + "103": "platypus, duckbill, duckbilled platypus, duck-billed platypus, Ornithorhynchus anatinus", + "104": "wallaby, brush kangaroo", + "105": "koala, koala bear, kangaroo bear, native bear, Phascolarctos cinereus", + "106": "wombat", + "107": "jellyfish", + "108": "sea anemone, anemone", + "109": "brain coral", + "110": "flatworm, platyhelminth", + "111": "nematode, nematode worm, roundworm", + "112": "conch", + "113": "snail", + "114": "slug", + "115": "sea slug, nudibranch", + "116": "chiton, coat-of-mail shell, sea cradle, polyplacophore", + "117": "chambered nautilus, pearly nautilus, nautilus", + "118": "Dungeness crab, Cancer magister", + "119": "rock crab, Cancer irroratus", + "120": "fiddler crab", + "121": "king crab, Alaska crab, Alaskan king crab, Alaska king crab, Paralithodes camtschatica", + "122": "American lobster, Northern lobster, Maine lobster, Homarus americanus", + "123": "spiny lobster, langouste, rock lobster, crawfish, crayfish, sea crawfish", + "124": "crayfish, crawfish, crawdad, crawdaddy", + "125": "hermit crab", + "126": "isopod", + "127": "white stork, Ciconia ciconia", + "128": "black stork, Ciconia nigra", + "129": "spoonbill", + "130": "flamingo", + "131": "little blue heron, Egretta caerulea", + "132": "American egret, great white heron, Egretta albus", + "133": "bittern", + "134": "crane", + "135": "limpkin, Aramus pictus", + "136": "European gallinule, Porphyrio porphyrio", + "137": "American coot, marsh hen, mud hen, water hen, Fulica americana", + "138": "bustard", + "139": "ruddy turnstone, Arenaria interpres", + "140": "red-backed sandpiper, dunlin, Erolia alpina", + "141": "redshank, Tringa totanus", + "142": "dowitcher", + "143": "oystercatcher, oyster catcher", + "144": "pelican", + "145": "king penguin, Aptenodytes patagonica", + "146": "albatross, mollymawk", + "147": "grey whale, gray whale, devilfish, Eschrichtius gibbosus, Eschrichtius robustus", + "148": "killer whale, killer, orca, grampus, sea wolf, Orcinus orca", + "149": "dugong, Dugong dugon", + "150": "sea lion", + "151": "Chihuahua", + "152": "Japanese spaniel", + "153": "Maltese dog, Maltese terrier, Maltese", + "154": "Pekinese, Pekingese, Peke", + "155": "Shih-Tzu", + "156": "Blenheim spaniel", + "157": "papillon", + "158": "toy terrier", + "159": "Rhodesian ridgeback", + "160": "Afghan hound, Afghan", + "161": "basset, basset hound", + "162": "beagle", + "163": "bloodhound, sleuthhound", + "164": "bluetick", + "165": "black-and-tan coonhound", + "166": "Walker hound, Walker foxhound", + "167": "English foxhound", + "168": "redbone", + "169": "borzoi, Russian wolfhound", + "170": "Irish wolfhound", + "171": "Italian greyhound", + "172": "whippet", + "173": "Ibizan hound, Ibizan Podenco", + "174": "Norwegian elkhound, elkhound", + "175": "otterhound, otter hound", + "176": "Saluki, gazelle hound", + "177": "Scottish deerhound, deerhound", + "178": "Weimaraner", + "179": "Staffordshire bullterrier, Staffordshire bull terrier", + "180": "American Staffordshire terrier, Staffordshire terrier, American pit bull terrier, pit bull terrier", + "181": "Bedlington terrier", + "182": "Border terrier", + "183": "Kerry blue terrier", + "184": "Irish terrier", + "185": "Norfolk terrier", + "186": "Norwich terrier", + "187": "Yorkshire terrier", + "188": "wire-haired fox terrier", + "189": "Lakeland terrier", + "190": "Sealyham terrier, Sealyham", + "191": "Airedale, Airedale terrier", + "192": "cairn, cairn terrier", + "193": "Australian terrier", + "194": "Dandie Dinmont, Dandie Dinmont terrier", + "195": "Boston bull, Boston terrier", + "196": "miniature schnauzer", + "197": "giant schnauzer", + "198": "standard schnauzer", + "199": "Scotch terrier, Scottish terrier, Scottie", + "200": "Tibetan terrier, chrysanthemum dog", + "201": "silky terrier, Sydney silky", + "202": "soft-coated wheaten terrier", + "203": "West Highland white terrier", + "204": "Lhasa, Lhasa apso", + "205": "flat-coated retriever", + "206": "curly-coated retriever", + "207": "golden retriever", + "208": "Labrador retriever", + "209": "Chesapeake Bay retriever", + "210": "German short-haired pointer", + "211": "vizsla, Hungarian pointer", + "212": "English setter", + "213": "Irish setter, red setter", + "214": "Gordon setter", + "215": "Brittany spaniel", + "216": "clumber, clumber spaniel", + "217": "English springer, English springer spaniel", + "218": "Welsh springer spaniel", + "219": "cocker spaniel, English cocker spaniel, cocker", + "220": "Sussex spaniel", + "221": "Irish water spaniel", + "222": "kuvasz", + "223": "schipperke", + "224": "groenendael", + "225": "malinois", + "226": "briard", + "227": "kelpie", + "228": "komondor", + "229": "Old English sheepdog, bobtail", + "230": "Shetland sheepdog, Shetland sheep dog, Shetland", + "231": "collie", + "232": "Border collie", + "233": "Bouvier des Flandres, Bouviers des Flandres", + "234": "Rottweiler", + "235": "German shepherd, German shepherd dog, German police dog, alsatian", + "236": "Doberman, Doberman pinscher", + "237": "miniature pinscher", + "238": "Greater Swiss Mountain dog", + "239": "Bernese mountain dog", + "240": "Appenzeller", + "241": "EntleBucher", + "242": "boxer", + "243": "bull mastiff", + "244": "Tibetan mastiff", + "245": "French bulldog", + "246": "Great Dane", + "247": "Saint Bernard, St Bernard", + "248": "Eskimo dog, husky", + "249": "malamute, malemute, Alaskan malamute", + "250": "Siberian husky", + "251": "dalmatian, coach dog, carriage dog", + "252": "affenpinscher, monkey pinscher, monkey dog", + "253": "basenji", + "254": "pug, pug-dog", + "255": "Leonberg", + "256": "Newfoundland, Newfoundland dog", + "257": "Great Pyrenees", + "258": "Samoyed, Samoyede", + "259": "Pomeranian", + "260": "chow, chow chow", + "261": "keeshond", + "262": "Brabancon griffon", + "263": "Pembroke, Pembroke Welsh corgi", + "264": "Cardigan, Cardigan Welsh corgi", + "265": "toy poodle", + "266": "miniature poodle", + "267": "standard poodle", + "268": "Mexican hairless", + "269": "timber wolf, grey wolf, gray wolf, Canis lupus", + "270": "white wolf, Arctic wolf, Canis lupus tundrarum", + "271": "red wolf, maned wolf, Canis rufus, Canis niger", + "272": "coyote, prairie wolf, brush wolf, Canis latrans", + "273": "dingo, warrigal, warragal, Canis dingo", + "274": "dhole, Cuon alpinus", + "275": "African hunting dog, hyena dog, Cape hunting dog, Lycaon pictus", + "276": "hyena, hyaena", + "277": "red fox, Vulpes vulpes", + "278": "kit fox, Vulpes macrotis", + "279": "Arctic fox, white fox, Alopex lagopus", + "280": "grey fox, gray fox, Urocyon cinereoargenteus", + "281": "tabby, tabby cat", + "282": "tiger cat", + "283": "Persian cat", + "284": "Siamese cat, Siamese", + "285": "Egyptian cat", + "286": "cougar, puma, catamount, mountain lion, painter, panther, Felis concolor", + "287": "lynx, catamount", + "288": "leopard, Panthera pardus", + "289": "snow leopard, ounce, Panthera uncia", + "290": "jaguar, panther, Panthera onca, Felis onca", + "291": "lion, king of beasts, Panthera leo", + "292": "tiger, Panthera tigris", + "293": "cheetah, chetah, Acinonyx jubatus", + "294": "brown bear, bruin, Ursus arctos", + "295": "American black bear, black bear, Ursus americanus, Euarctos americanus", + "296": "ice bear, polar bear, Ursus Maritimus, Thalarctos maritimus", + "297": "sloth bear, Melursus ursinus, Ursus ursinus", + "298": "mongoose", + "299": "meerkat, mierkat", + "300": "tiger beetle", + "301": "ladybug, ladybeetle, lady beetle, ladybird, ladybird beetle", + "302": "ground beetle, carabid beetle", + "303": "long-horned beetle, longicorn, longicorn beetle", + "304": "leaf beetle, chrysomelid", + "305": "dung beetle", + "306": "rhinoceros beetle", + "307": "weevil", + "308": "fly", + "309": "bee", + "310": "ant, emmet, pismire", + "311": "grasshopper, hopper", + "312": "cricket", + "313": "walking stick, walkingstick, stick insect", + "314": "cockroach, roach", + "315": "mantis, mantid", + "316": "cicada, cicala", + "317": "leafhopper", + "318": "lacewing, lacewing fly", + "319": "dragonfly, darning needle, devil's darning needle, sewing needle, snake feeder, snake doctor, mosquito hawk, skeeter hawk", + "320": "damselfly", + "321": "admiral", + "322": "ringlet, ringlet butterfly", + "323": "monarch, monarch butterfly, milkweed butterfly, Danaus plexippus", + "324": "cabbage butterfly", + "325": "sulphur butterfly, sulfur butterfly", + "326": "lycaenid, lycaenid butterfly", + "327": "starfish, sea star", + "328": "sea urchin", + "329": "sea cucumber, holothurian", + "330": "wood rabbit, cottontail, cottontail rabbit", + "331": "hare", + "332": "Angora, Angora rabbit", + "333": "hamster", + "334": "porcupine, hedgehog", + "335": "fox squirrel, eastern fox squirrel, Sciurus niger", + "336": "marmot", + "337": "beaver", + "338": "guinea pig, Cavia cobaya", + "339": "sorrel", + "340": "zebra", + "341": "hog, pig, grunter, squealer, Sus scrofa", + "342": "wild boar, boar, Sus scrofa", + "343": "warthog", + "344": "hippopotamus, hippo, river horse, Hippopotamus amphibius", + "345": "ox", + "346": "water buffalo, water ox, Asiatic buffalo, Bubalus bubalis", + "347": "bison", + "348": "ram, tup", + "349": "bighorn, bighorn sheep, cimarron, Rocky Mountain bighorn, Rocky Mountain sheep, Ovis canadensis", + "350": "ibex, Capra ibex", + "351": "hartebeest", + "352": "impala, Aepyceros melampus", + "353": "gazelle", + "354": "Arabian camel, dromedary, Camelus dromedarius", + "355": "llama", + "356": "weasel", + "357": "mink", + "358": "polecat, fitch, foulmart, foumart, Mustela putorius", + "359": "black-footed ferret, ferret, Mustela nigripes", + "360": "otter", + "361": "skunk, polecat, wood pussy", + "362": "badger", + "363": "armadillo", + "364": "three-toed sloth, ai, Bradypus tridactylus", + "365": "orangutan, orang, orangutang, Pongo pygmaeus", + "366": "gorilla, Gorilla gorilla", + "367": "chimpanzee, chimp, Pan troglodytes", + "368": "gibbon, Hylobates lar", + "369": "siamang, Hylobates syndactylus, Symphalangus syndactylus", + "370": "guenon, guenon monkey", + "371": "patas, hussar monkey, Erythrocebus patas", + "372": "baboon", + "373": "macaque", + "374": "langur", + "375": "colobus, colobus monkey", + "376": "proboscis monkey, Nasalis larvatus", + "377": "marmoset", + "378": "capuchin, ringtail, Cebus capucinus", + "379": "howler monkey, howler", + "380": "titi, titi monkey", + "381": "spider monkey, Ateles geoffroyi", + "382": "squirrel monkey, Saimiri sciureus", + "383": "Madagascar cat, ring-tailed lemur, Lemur catta", + "384": "indri, indris, Indri indri, Indri brevicaudatus", + "385": "Indian elephant, Elephas maximus", + "386": "African elephant, Loxodonta africana", + "387": "lesser panda, red panda, panda, bear cat, cat bear, Ailurus fulgens", + "388": "giant panda, panda, panda bear, coon bear, Ailuropoda melanoleuca", + "389": "barracouta, snoek", + "390": "eel", + "391": "coho, cohoe, coho salmon, blue jack, silver salmon, Oncorhynchus kisutch", + "392": "rock beauty, Holocanthus tricolor", + "393": "anemone fish", + "394": "sturgeon", + "395": "gar, garfish, garpike, billfish, Lepisosteus osseus", + "396": "lionfish", + "397": "puffer, pufferfish, blowfish, globefish", + "398": "abacus", + "399": "abaya", + "400": "academic gown, academic robe, judge's robe", + "401": "accordion, piano accordion, squeeze box", + "402": "acoustic guitar", + "403": "aircraft carrier, carrier, flattop, attack aircraft carrier", + "404": "airliner", + "405": "airship, dirigible", + "406": "altar", + "407": "ambulance", + "408": "amphibian, amphibious vehicle", + "409": "analog clock", + "410": "apiary, bee house", + "411": "apron", + "412": "ashcan, trash can, garbage can, wastebin, ash bin, ash-bin, ashbin, dustbin, trash barrel, trash bin", + "413": "assault rifle, assault gun", + "414": "backpack, back pack, knapsack, packsack, rucksack, haversack", + "415": "bakery, bakeshop, bakehouse", + "416": "balance beam, beam", + "417": "balloon", + "418": "ballpoint, ballpoint pen, ballpen, Biro", + "419": "Band Aid", + "420": "banjo", + "421": "bannister, banister, balustrade, balusters, handrail", + "422": "barbell", + "423": "barber chair", + "424": "barbershop", + "425": "barn", + "426": "barometer", + "427": "barrel, cask", + "428": "barrow, garden cart, lawn cart, wheelbarrow", + "429": "baseball", + "430": "basketball", + "431": "bassinet", + "432": "bassoon", + "433": "bathing cap, swimming cap", + "434": "bath towel", + "435": "bathtub, bathing tub, bath, tub", + "436": "beach wagon, station wagon, wagon, estate car, beach waggon, station waggon, waggon", + "437": "beacon, lighthouse, beacon light, pharos", + "438": "beaker", + "439": "bearskin, busby, shako", + "440": "beer bottle", + "441": "beer glass", + "442": "bell cote, bell cot", + "443": "bib", + "444": "bicycle-built-for-two, tandem bicycle, tandem", + "445": "bikini, two-piece", + "446": "binder, ring-binder", + "447": "binoculars, field glasses, opera glasses", + "448": "birdhouse", + "449": "boathouse", + "450": "bobsled, bobsleigh, bob", + "451": "bolo tie, bolo, bola tie, bola", + "452": "bonnet, poke bonnet", + "453": "bookcase", + "454": "bookshop, bookstore, bookstall", + "455": "bottlecap", + "456": "bow", + "457": "bow tie, bow-tie, bowtie", + "458": "brass, memorial tablet, plaque", + "459": "brassiere, bra, bandeau", + "460": "breakwater, groin, groyne, mole, bulwark, seawall, jetty", + "461": "breastplate, aegis, egis", + "462": "broom", + "463": "bucket, pail", + "464": "buckle", + "465": "bulletproof vest", + "466": "bullet train, bullet", + "467": "butcher shop, meat market", + "468": "cab, hack, taxi, taxicab", + "469": "caldron, cauldron", + "470": "candle, taper, wax light", + "471": "cannon", + "472": "canoe", + "473": "can opener, tin opener", + "474": "cardigan", + "475": "car mirror", + "476": "carousel, carrousel, merry-go-round, roundabout, whirligig", + "477": "carpenter's kit, tool kit", + "478": "carton", + "479": "car wheel", + "480": "cash machine, cash dispenser, automated teller machine, automatic teller machine, automated teller, automatic teller, ATM", + "481": "cassette", + "482": "cassette player", + "483": "castle", + "484": "catamaran", + "485": "CD player", + "486": "cello, violoncello", + "487": "cellular telephone, cellular phone, cellphone, cell, mobile phone", + "488": "chain", + "489": "chainlink fence", + "490": "chain mail, ring mail, mail, chain armor, chain armour, ring armor, ring armour", + "491": "chain saw, chainsaw", + "492": "chest", + "493": "chiffonier, commode", + "494": "chime, bell, gong", + "495": "china cabinet, china closet", + "496": "Christmas stocking", + "497": "church, church building", + "498": "cinema, movie theater, movie theatre, movie house, picture palace", + "499": "cleaver, meat cleaver, chopper", + "500": "cliff dwelling", + "501": "cloak", + "502": "clog, geta, patten, sabot", + "503": "cocktail shaker", + "504": "coffee mug", + "505": "coffeepot", + "506": "coil, spiral, volute, whorl, helix", + "507": "combination lock", + "508": "computer keyboard, keypad", + "509": "confectionery, confectionary, candy store", + "510": "container ship, containership, container vessel", + "511": "convertible", + "512": "corkscrew, bottle screw", + "513": "cornet, horn, trumpet, trump", + "514": "cowboy boot", + "515": "cowboy hat, ten-gallon hat", + "516": "cradle", + "517": "crane", + "518": "crash helmet", + "519": "crate", + "520": "crib, cot", + "521": "Crock Pot", + "522": "croquet ball", + "523": "crutch", + "524": "cuirass", + "525": "dam, dike, dyke", + "526": "desk", + "527": "desktop computer", + "528": "dial telephone, dial phone", + "529": "diaper, nappy, napkin", + "530": "digital clock", + "531": "digital watch", + "532": "dining table, board", + "533": "dishrag, dishcloth", + "534": "dishwasher, dish washer, dishwashing machine", + "535": "disk brake, disc brake", + "536": "dock, dockage, docking facility", + "537": "dogsled, dog sled, dog sleigh", + "538": "dome", + "539": "doormat, welcome mat", + "540": "drilling platform, offshore rig", + "541": "drum, membranophone, tympan", + "542": "drumstick", + "543": "dumbbell", + "544": "Dutch oven", + "545": "electric fan, blower", + "546": "electric guitar", + "547": "electric locomotive", + "548": "entertainment center", + "549": "envelope", + "550": "espresso maker", + "551": "face powder", + "552": "feather boa, boa", + "553": "file, file cabinet, filing cabinet", + "554": "fireboat", + "555": "fire engine, fire truck", + "556": "fire screen, fireguard", + "557": "flagpole, flagstaff", + "558": "flute, transverse flute", + "559": "folding chair", + "560": "football helmet", + "561": "forklift", + "562": "fountain", + "563": "fountain pen", + "564": "four-poster", + "565": "freight car", + "566": "French horn, horn", + "567": "frying pan, frypan, skillet", + "568": "fur coat", + "569": "garbage truck, dustcart", + "570": "gasmask, respirator, gas helmet", + "571": "gas pump, gasoline pump, petrol pump, island dispenser", + "572": "goblet", + "573": "go-kart", + "574": "golf ball", + "575": "golfcart, golf cart", + "576": "gondola", + "577": "gong, tam-tam", + "578": "gown", + "579": "grand piano, grand", + "580": "greenhouse, nursery, glasshouse", + "581": "grille, radiator grille", + "582": "grocery store, grocery, food market, market", + "583": "guillotine", + "584": "hair slide", + "585": "hair spray", + "586": "half track", + "587": "hammer", + "588": "hamper", + "589": "hand blower, blow dryer, blow drier, hair dryer, hair drier", + "590": "hand-held computer, hand-held microcomputer", + "591": "handkerchief, hankie, hanky, hankey", + "592": "hard disc, hard disk, fixed disk", + "593": "harmonica, mouth organ, harp, mouth harp", + "594": "harp", + "595": "harvester, reaper", + "596": "hatchet", + "597": "holster", + "598": "home theater, home theatre", + "599": "honeycomb", + "600": "hook, claw", + "601": "hoopskirt, crinoline", + "602": "horizontal bar, high bar", + "603": "horse cart, horse-cart", + "604": "hourglass", + "605": "iPod", + "606": "iron, smoothing iron", + "607": "jack-o'-lantern", + "608": "jean, blue jean, denim", + "609": "jeep, landrover", + "610": "jersey, T-shirt, tee shirt", + "611": "jigsaw puzzle", + "612": "jinrikisha, ricksha, rickshaw", + "613": "joystick", + "614": "kimono", + "615": "knee pad", + "616": "knot", + "617": "lab coat, laboratory coat", + "618": "ladle", + "619": "lampshade, lamp shade", + "620": "laptop, laptop computer", + "621": "lawn mower, mower", + "622": "lens cap, lens cover", + "623": "letter opener, paper knife, paperknife", + "624": "library", + "625": "lifeboat", + "626": "lighter, light, igniter, ignitor", + "627": "limousine, limo", + "628": "liner, ocean liner", + "629": "lipstick, lip rouge", + "630": "Loafer", + "631": "lotion", + "632": "loudspeaker, speaker, speaker unit, loudspeaker system, speaker system", + "633": "loupe, jeweler's loupe", + "634": "lumbermill, sawmill", + "635": "magnetic compass", + "636": "mailbag, postbag", + "637": "mailbox, letter box", + "638": "maillot", + "639": "maillot, tank suit", + "640": "manhole cover", + "641": "maraca", + "642": "marimba, xylophone", + "643": "mask", + "644": "matchstick", + "645": "maypole", + "646": "maze, labyrinth", + "647": "measuring cup", + "648": "medicine chest, medicine cabinet", + "649": "megalith, megalithic structure", + "650": "microphone, mike", + "651": "microwave, microwave oven", + "652": "military uniform", + "653": "milk can", + "654": "minibus", + "655": "miniskirt, mini", + "656": "minivan", + "657": "missile", + "658": "mitten", + "659": "mixing bowl", + "660": "mobile home, manufactured home", + "661": "Model T", + "662": "modem", + "663": "monastery", + "664": "monitor", + "665": "moped", + "666": "mortar", + "667": "mortarboard", + "668": "mosque", + "669": "mosquito net", + "670": "motor scooter, scooter", + "671": "mountain bike, all-terrain bike, off-roader", + "672": "mountain tent", + "673": "mouse, computer mouse", + "674": "mousetrap", + "675": "moving van", + "676": "muzzle", + "677": "nail", + "678": "neck brace", + "679": "necklace", + "680": "nipple", + "681": "notebook, notebook computer", + "682": "obelisk", + "683": "oboe, hautboy, hautbois", + "684": "ocarina, sweet potato", + "685": "odometer, hodometer, mileometer, milometer", + "686": "oil filter", + "687": "organ, pipe organ", + "688": "oscilloscope, scope, cathode-ray oscilloscope, CRO", + "689": "overskirt", + "690": "oxcart", + "691": "oxygen mask", + "692": "packet", + "693": "paddle, boat paddle", + "694": "paddlewheel, paddle wheel", + "695": "padlock", + "696": "paintbrush", + "697": "pajama, pyjama, pj's, jammies", + "698": "palace", + "699": "panpipe, pandean pipe, syrinx", + "700": "paper towel", + "701": "parachute, chute", + "702": "parallel bars, bars", + "703": "park bench", + "704": "parking meter", + "705": "passenger car, coach, carriage", + "706": "patio, terrace", + "707": "pay-phone, pay-station", + "708": "pedestal, plinth, footstall", + "709": "pencil box, pencil case", + "710": "pencil sharpener", + "711": "perfume, essence", + "712": "Petri dish", + "713": "photocopier", + "714": "pick, plectrum, plectron", + "715": "pickelhaube", + "716": "picket fence, paling", + "717": "pickup, pickup truck", + "718": "pier", + "719": "piggy bank, penny bank", + "720": "pill bottle", + "721": "pillow", + "722": "ping-pong ball", + "723": "pinwheel", + "724": "pirate, pirate ship", + "725": "pitcher, ewer", + "726": "plane, carpenter's plane, woodworking plane", + "727": "planetarium", + "728": "plastic bag", + "729": "plate rack", + "730": "plow, plough", + "731": "plunger, plumber's helper", + "732": "Polaroid camera, Polaroid Land camera", + "733": "pole", + "734": "police van, police wagon, paddy wagon, patrol wagon, wagon, black Maria", + "735": "poncho", + "736": "pool table, billiard table, snooker table", + "737": "pop bottle, soda bottle", + "738": "pot, flowerpot", + "739": "potter's wheel", + "740": "power drill", + "741": "prayer rug, prayer mat", + "742": "printer", + "743": "prison, prison house", + "744": "projectile, missile", + "745": "projector", + "746": "puck, hockey puck", + "747": "punching bag, punch bag, punching ball, punchball", + "748": "purse", + "749": "quill, quill pen", + "750": "quilt, comforter, comfort, puff", + "751": "racer, race car, racing car", + "752": "racket, racquet", + "753": "radiator", + "754": "radio, wireless", + "755": "radio telescope, radio reflector", + "756": "rain barrel", + "757": "recreational vehicle, RV, R.V.", + "758": "reel", + "759": "reflex camera", + "760": "refrigerator, icebox", + "761": "remote control, remote", + "762": "restaurant, eating house, eating place, eatery", + "763": "revolver, six-gun, six-shooter", + "764": "rifle", + "765": "rocking chair, rocker", + "766": "rotisserie", + "767": "rubber eraser, rubber, pencil eraser", + "768": "rugby ball", + "769": "rule, ruler", + "770": "running shoe", + "771": "safe", + "772": "safety pin", + "773": "saltshaker, salt shaker", + "774": "sandal", + "775": "sarong", + "776": "sax, saxophone", + "777": "scabbard", + "778": "scale, weighing machine", + "779": "school bus", + "780": "schooner", + "781": "scoreboard", + "782": "screen, CRT screen", + "783": "screw", + "784": "screwdriver", + "785": "seat belt, seatbelt", + "786": "sewing machine", + "787": "shield, buckler", + "788": "shoe shop, shoe-shop, shoe store", + "789": "shoji", + "790": "shopping basket", + "791": "shopping cart", + "792": "shovel", + "793": "shower cap", + "794": "shower curtain", + "795": "ski", + "796": "ski mask", + "797": "sleeping bag", + "798": "slide rule, slipstick", + "799": "sliding door", + "800": "slot, one-armed bandit", + "801": "snorkel", + "802": "snowmobile", + "803": "snowplow, snowplough", + "804": "soap dispenser", + "805": "soccer ball", + "806": "sock", + "807": "solar dish, solar collector, solar furnace", + "808": "sombrero", + "809": "soup bowl", + "810": "space bar", + "811": "space heater", + "812": "space shuttle", + "813": "spatula", + "814": "speedboat", + "815": "spider web, spider's web", + "816": "spindle", + "817": "sports car, sport car", + "818": "spotlight, spot", + "819": "stage", + "820": "steam locomotive", + "821": "steel arch bridge", + "822": "steel drum", + "823": "stethoscope", + "824": "stole", + "825": "stone wall", + "826": "stopwatch, stop watch", + "827": "stove", + "828": "strainer", + "829": "streetcar, tram, tramcar, trolley, trolley car", + "830": "stretcher", + "831": "studio couch, day bed", + "832": "stupa, tope", + "833": "submarine, pigboat, sub, U-boat", + "834": "suit, suit of clothes", + "835": "sundial", + "836": "sunglass", + "837": "sunglasses, dark glasses, shades", + "838": "sunscreen, sunblock, sun blocker", + "839": "suspension bridge", + "840": "swab, swob, mop", + "841": "sweatshirt", + "842": "swimming trunks, bathing trunks", + "843": "swing", + "844": "switch, electric switch, electrical switch", + "845": "syringe", + "846": "table lamp", + "847": "tank, army tank, armored combat vehicle, armoured combat vehicle", + "848": "tape player", + "849": "teapot", + "850": "teddy, teddy bear", + "851": "television, television system", + "852": "tennis ball", + "853": "thatch, thatched roof", + "854": "theater curtain, theatre curtain", + "855": "thimble", + "856": "thresher, thrasher, threshing machine", + "857": "throne", + "858": "tile roof", + "859": "toaster", + "860": "tobacco shop, tobacconist shop, tobacconist", + "861": "toilet seat", + "862": "torch", + "863": "totem pole", + "864": "tow truck, tow car, wrecker", + "865": "toyshop", + "866": "tractor", + "867": "trailer truck, tractor trailer, trucking rig, rig, articulated lorry, semi", + "868": "tray", + "869": "trench coat", + "870": "tricycle, trike, velocipede", + "871": "trimaran", + "872": "tripod", + "873": "triumphal arch", + "874": "trolleybus, trolley coach, trackless trolley", + "875": "trombone", + "876": "tub, vat", + "877": "turnstile", + "878": "typewriter keyboard", + "879": "umbrella", + "880": "unicycle, monocycle", + "881": "upright, upright piano", + "882": "vacuum, vacuum cleaner", + "883": "vase", + "884": "vault", + "885": "velvet", + "886": "vending machine", + "887": "vestment", + "888": "viaduct", + "889": "violin, fiddle", + "890": "volleyball", + "891": "waffle iron", + "892": "wall clock", + "893": "wallet, billfold, notecase, pocketbook", + "894": "wardrobe, closet, press", + "895": "warplane, military plane", + "896": "washbasin, handbasin, washbowl, lavabo, wash-hand basin", + "897": "washer, automatic washer, washing machine", + "898": "water bottle", + "899": "water jug", + "900": "water tower", + "901": "whiskey jug", + "902": "whistle", + "903": "wig", + "904": "window screen", + "905": "window shade", + "906": "Windsor tie", + "907": "wine bottle", + "908": "wing", + "909": "wok", + "910": "wooden spoon", + "911": "wool, woolen, woollen", + "912": "worm fence, snake fence, snake-rail fence, Virginia fence", + "913": "wreck", + "914": "yawl", + "915": "yurt", + "916": "web site, website, internet site, site", + "917": "comic book", + "918": "crossword puzzle, crossword", + "919": "street sign", + "920": "traffic light, traffic signal, stoplight", + "921": "book jacket, dust cover, dust jacket, dust wrapper", + "922": "menu", + "923": "plate", + "924": "guacamole", + "925": "consomme", + "926": "hot pot, hotpot", + "927": "trifle", + "928": "ice cream, icecream", + "929": "ice lolly, lolly, lollipop, popsicle", + "930": "French loaf", + "931": "bagel, beigel", + "932": "pretzel", + "933": "cheeseburger", + "934": "hotdog, hot dog, red hot", + "935": "mashed potato", + "936": "head cabbage", + "937": "broccoli", + "938": "cauliflower", + "939": "zucchini, courgette", + "940": "spaghetti squash", + "941": "acorn squash", + "942": "butternut squash", + "943": "cucumber, cuke", + "944": "artichoke, globe artichoke", + "945": "bell pepper", + "946": "cardoon", + "947": "mushroom", + "948": "Granny Smith", + "949": "strawberry", + "950": "orange", + "951": "lemon", + "952": "fig", + "953": "pineapple, ananas", + "954": "banana", + "955": "jackfruit, jak, jack", + "956": "custard apple", + "957": "pomegranate", + "958": "hay", + "959": "carbonara", + "960": "chocolate sauce, chocolate syrup", + "961": "dough", + "962": "meat loaf, meatloaf", + "963": "pizza, pizza pie", + "964": "potpie", + "965": "burrito", + "966": "red wine", + "967": "espresso", + "968": "cup", + "969": "eggnog", + "970": "alp", + "971": "bubble", + "972": "cliff, drop, drop-off", + "973": "coral reef", + "974": "geyser", + "975": "lakeside, lakeshore", + "976": "promontory, headland, head, foreland", + "977": "sandbar, sand bar", + "978": "seashore, coast, seacoast, sea-coast", + "979": "valley, vale", + "980": "volcano", + "981": "ballplayer, baseball player", + "982": "groom, bridegroom", + "983": "scuba diver", + "984": "rapeseed", + "985": "daisy", + "986": "yellow lady's slipper, yellow lady-slipper, Cypripedium calceolus, Cypripedium parviflorum", + "987": "corn", + "988": "acorn", + "989": "hip, rose hip, rosehip", + "990": "buckeye, horse chestnut, conker", + "991": "coral fungus", + "992": "agaric", + "993": "gyromitra", + "994": "stinkhorn, carrion fungus", + "995": "earthstar", + "996": "hen-of-the-woods, hen of the woods, Polyporus frondosus, Grifola frondosa", + "997": "bolete", + "998": "ear, spike, capitulum", + "999": "toilet tissue, toilet paper, bathroom tissue" +} \ No newline at end of file diff --git a/ComfyUI_ExtraModels/DiT/loader.py b/ComfyUI_ExtraModels/DiT/loader.py new file mode 100644 index 0000000000000000000000000000000000000000..1995bf4bddde2d4ec34a55dd815dbe4830857aa3 --- /dev/null +++ b/ComfyUI_ExtraModels/DiT/loader.py @@ -0,0 +1,61 @@ +import comfy.supported_models_base +import comfy.latent_formats +import comfy.model_patcher +import comfy.model_base +import comfy.utils +import torch +from comfy import model_management + +class EXM_DiT(comfy.supported_models_base.BASE): + unet_config = {} + unet_extra_config = {} + latent_format = comfy.latent_formats.SD15 + + def __init__(self, model_conf): + self.unet_config = model_conf.get("unet_config", {}) + self.sampling_settings = model_conf.get("sampling_settings", {}) + self.latent_format = self.latent_format() + # UNET is handled by extension + self.unet_config["disable_unet_model_creation"] = True + + def model_type(self, state_dict, prefix=""): + return comfy.model_base.ModelType.EPS + +def load_dit(model_path, model_conf): + state_dict = comfy.utils.load_torch_file(model_path) + state_dict = state_dict.get("model", state_dict) + parameters = comfy.utils.calculate_parameters(state_dict) + unet_dtype = model_management.unet_dtype(model_params=parameters) + load_device = comfy.model_management.get_torch_device() + offload_device = comfy.model_management.unet_offload_device() + + # ignore fp8/etc and use directly for now + manual_cast_dtype = model_management.unet_manual_cast(unet_dtype, load_device) + if manual_cast_dtype: + print(f"DiT: falling back to {manual_cast_dtype}") + unet_dtype = manual_cast_dtype + + model_conf["unet_config"]["num_classes"] = state_dict["y_embedder.embedding_table.weight"].shape[0] - 1 # adj. for empty + + model_conf = EXM_DiT(model_conf) + model = comfy.model_base.BaseModel( + model_conf, + model_type=comfy.model_base.ModelType.EPS, + device=model_management.get_torch_device() + ) + + from .model import DiT + model.diffusion_model = DiT(**model_conf.unet_config) + + model.diffusion_model.load_state_dict(state_dict) + model.diffusion_model.dtype = unet_dtype + model.diffusion_model.eval() + model.diffusion_model.to(unet_dtype) + + model_patcher = comfy.model_patcher.ModelPatcher( + model, + load_device = load_device, + offload_device = offload_device, + current_device = "cpu", + ) + return model_patcher diff --git a/ComfyUI_ExtraModels/DiT/model.py b/ComfyUI_ExtraModels/DiT/model.py new file mode 100644 index 0000000000000000000000000000000000000000..e08200db48e3f5c9e6dd4177c701befbe5467dad --- /dev/null +++ b/ComfyUI_ExtraModels/DiT/model.py @@ -0,0 +1,326 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. + +# This source code is licensed under the license found in the +# LICENSE file in the root directory of this source tree. +# -------------------------------------------------------- +# References: +# GLIDE: https://github.com/openai/glide-text2im +# MAE: https://github.com/facebookresearch/mae/blob/main/models_mae.py +# -------------------------------------------------------- + +import torch +import torch.nn as nn +import numpy as np +import math +from timm.models.vision_transformer import PatchEmbed, Attention, Mlp + + +def modulate(x, shift, scale): + return x * (1 + scale.unsqueeze(1)) + shift.unsqueeze(1) + + +################################################################################# +# Embedding Layers for Timesteps and Class Labels # +################################################################################# + +class TimestepEmbedder(nn.Module): + """ + Embeds scalar timesteps into vector representations. + """ + def __init__(self, hidden_size, frequency_embedding_size=256): + super().__init__() + self.mlp = nn.Sequential( + nn.Linear(frequency_embedding_size, hidden_size, bias=True), + nn.SiLU(), + nn.Linear(hidden_size, hidden_size, bias=True), + ) + self.frequency_embedding_size = frequency_embedding_size + + @staticmethod + def timestep_embedding(t, dim, max_period=10000): + """ + Create sinusoidal timestep embeddings. + :param t: a 1-D Tensor of N indices, one per batch element. + These may be fractional. + :param dim: the dimension of the output. + :param max_period: controls the minimum frequency of the embeddings. + :return: an (N, D) Tensor of positional embeddings. + """ + # https://github.com/openai/glide-text2im/blob/main/glide_text2im/nn.py + half = dim // 2 + freqs = torch.exp( + -math.log(max_period) * torch.arange(start=0, end=half, dtype=torch.float32) / half + ).to(device=t.device) + args = t[:, None].float() * freqs[None] + embedding = torch.cat([torch.cos(args), torch.sin(args)], dim=-1) + if dim % 2: + embedding = torch.cat([embedding, torch.zeros_like(embedding[:, :1])], dim=-1) + return embedding.to(dtype=t.dtype) + + def forward(self, t): + t_freq = self.timestep_embedding(t, self.frequency_embedding_size) + t_emb = self.mlp(t_freq) + return t_emb + + +class LabelEmbedder(nn.Module): + """ + Embeds class labels into vector representations. Also handles label dropout for classifier-free guidance. + """ + def __init__(self, num_classes, hidden_size, dropout_prob): + super().__init__() + use_cfg_embedding = dropout_prob > 0 + self.embedding_table = nn.Embedding(num_classes + use_cfg_embedding, hidden_size) + self.num_classes = num_classes + self.dropout_prob = dropout_prob + + def token_drop(self, labels, force_drop_ids=None): + """ + Drops labels to enable classifier-free guidance. + """ + if force_drop_ids is None: + drop_ids = torch.rand(labels.shape[0], device=labels.device) < self.dropout_prob + else: + drop_ids = force_drop_ids == 1 + labels = torch.where(drop_ids, self.num_classes, labels) + return labels + + def forward(self, labels, train, force_drop_ids=None): + use_dropout = self.dropout_prob > 0 + if (train and use_dropout) or (force_drop_ids is not None): + labels = self.token_drop(labels, force_drop_ids) + embeddings = self.embedding_table(labels) + return embeddings + + +################################################################################# +# Core DiT Model # +################################################################################# + +class DiTBlock(nn.Module): + """ + A DiT block with adaptive layer norm zero (adaLN-Zero) conditioning. + """ + def __init__(self, hidden_size, num_heads, mlp_ratio=4.0, **block_kwargs): + super().__init__() + self.norm1 = nn.LayerNorm(hidden_size, elementwise_affine=False, eps=1e-6) + self.attn = Attention(hidden_size, num_heads=num_heads, qkv_bias=True, **block_kwargs) + self.norm2 = nn.LayerNorm(hidden_size, elementwise_affine=False, eps=1e-6) + mlp_hidden_dim = int(hidden_size * mlp_ratio) + approx_gelu = lambda: nn.GELU(approximate="tanh") + self.mlp = Mlp(in_features=hidden_size, hidden_features=mlp_hidden_dim, act_layer=approx_gelu, drop=0) + self.adaLN_modulation = nn.Sequential( + nn.SiLU(), + nn.Linear(hidden_size, 6 * hidden_size, bias=True) + ) + + def forward(self, x, c): + shift_msa, scale_msa, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.adaLN_modulation(c).chunk(6, dim=1) + x = x + gate_msa.unsqueeze(1) * self.attn(modulate(self.norm1(x), shift_msa, scale_msa)) + x = x + gate_mlp.unsqueeze(1) * self.mlp(modulate(self.norm2(x), shift_mlp, scale_mlp)) + return x + + +class FinalLayer(nn.Module): + """ + The final layer of DiT. + """ + def __init__(self, hidden_size, patch_size, out_channels): + super().__init__() + self.norm_final = nn.LayerNorm(hidden_size, elementwise_affine=False, eps=1e-6) + self.linear = nn.Linear(hidden_size, patch_size * patch_size * out_channels, bias=True) + self.adaLN_modulation = nn.Sequential( + nn.SiLU(), + nn.Linear(hidden_size, 2 * hidden_size, bias=True) + ) + + def forward(self, x, c): + shift, scale = self.adaLN_modulation(c).chunk(2, dim=1) + x = modulate(self.norm_final(x), shift, scale) + x = self.linear(x) + return x + + +class DiT(nn.Module): + """ + Diffusion model with a Transformer backbone. + """ + def __init__( + self, + input_size=32, + patch_size=2, + in_channels=4, + hidden_size=1152, + depth=28, + num_heads=16, + mlp_ratio=4.0, + class_dropout_prob=0.1, + num_classes=1000, + learn_sigma=True, + **kwargs, + ): + super().__init__() + self.learn_sigma = learn_sigma + self.in_channels = in_channels + self.out_channels = in_channels * 2 if learn_sigma else in_channels + self.patch_size = patch_size + self.num_heads = num_heads + + self.x_embedder = PatchEmbed(input_size, patch_size, in_channels, hidden_size, bias=True) + self.t_embedder = TimestepEmbedder(hidden_size) + self.y_embedder = LabelEmbedder(num_classes, hidden_size, class_dropout_prob) + num_patches = self.x_embedder.num_patches + # Will use fixed sin-cos embedding: + self.pos_embed = nn.Parameter(torch.zeros(1, num_patches, hidden_size), requires_grad=False) + + self.blocks = nn.ModuleList([ + DiTBlock(hidden_size, num_heads, mlp_ratio=mlp_ratio) for _ in range(depth) + ]) + self.final_layer = FinalLayer(hidden_size, patch_size, self.out_channels) + self.initialize_weights() + + def initialize_weights(self): + # Initialize transformer layers: + def _basic_init(module): + if isinstance(module, nn.Linear): + torch.nn.init.xavier_uniform_(module.weight) + if module.bias is not None: + nn.init.constant_(module.bias, 0) + self.apply(_basic_init) + + # Initialize (and freeze) pos_embed by sin-cos embedding: + pos_embed = get_2d_sincos_pos_embed(self.pos_embed.shape[-1], int(self.x_embedder.num_patches ** 0.5)) + self.pos_embed.data.copy_(torch.from_numpy(pos_embed).float().unsqueeze(0)) + + # Initialize patch_embed like nn.Linear (instead of nn.Conv2d): + w = self.x_embedder.proj.weight.data + nn.init.xavier_uniform_(w.view([w.shape[0], -1])) + nn.init.constant_(self.x_embedder.proj.bias, 0) + + # Initialize label embedding table: + nn.init.normal_(self.y_embedder.embedding_table.weight, std=0.02) + + # Initialize timestep embedding MLP: + nn.init.normal_(self.t_embedder.mlp[0].weight, std=0.02) + nn.init.normal_(self.t_embedder.mlp[2].weight, std=0.02) + + # Zero-out adaLN modulation layers in DiT blocks: + for block in self.blocks: + nn.init.constant_(block.adaLN_modulation[-1].weight, 0) + nn.init.constant_(block.adaLN_modulation[-1].bias, 0) + + # Zero-out output layers: + nn.init.constant_(self.final_layer.adaLN_modulation[-1].weight, 0) + nn.init.constant_(self.final_layer.adaLN_modulation[-1].bias, 0) + nn.init.constant_(self.final_layer.linear.weight, 0) + nn.init.constant_(self.final_layer.linear.bias, 0) + + def unpatchify(self, x): + """ + x: (N, T, patch_size**2 * C) + imgs: (N, H, W, C) + """ + c = self.out_channels + p = self.x_embedder.patch_size[0] + h = w = int(x.shape[1] ** 0.5) + assert h * w == x.shape[1] + + x = x.reshape(shape=(x.shape[0], h, w, p, p, c)) + x = torch.einsum('nhwpqc->nchpwq', x) + imgs = x.reshape(shape=(x.shape[0], c, h * p, h * p)) + return imgs + + def forward_raw(self, x, t, y): + """ + Forward pass of DiT. + x: (N, C, H, W) tensor of spatial inputs (images or latent representations of images) + t: (N,) tensor of diffusion timesteps + y: (N,) tensor of class labels + """ + x = self.x_embedder(x) + self.pos_embed # (N, T, D), where T = H * W / patch_size ** 2 + t = self.t_embedder(t) # (N, D) + y = self.y_embedder(y, self.training) # (N, D) + c = t + y # (N, D) + for block in self.blocks: + x = block(x, c) # (N, T, D) + x = self.final_layer(x, c) # (N, T, patch_size ** 2 * out_channels) + x = self.unpatchify(x) # (N, out_channels, H, W) + return x + + def forward(self, x, timesteps, context, y=None, **kwargs): + """ + Forward pass that adapts comfy input to original forward function + x: (N, C, H, W) tensor of spatial inputs (images or latent representations of images) + timesteps: (N,) tensor of diffusion timesteps + context: (N, [LabelID]) conditioning + y: extra conditioning. + """ + ## Remove outer array from cond + context = context[:, 0] + + ## run original forward pass + out = self.forward_raw( + x = x.to(self.dtype), + t = timesteps.to(self.dtype), + y = context.to(torch.int), + ) + + ## only return EPS + out = out.to(torch.float) + eps, rest = out[:, :self.in_channels], out[:, self.in_channels:] + return eps + +################################################################################# +# Sine/Cosine Positional Embedding Functions # +################################################################################# +# https://github.com/facebookresearch/mae/blob/main/util/pos_embed.py + +def get_2d_sincos_pos_embed(embed_dim, grid_size, cls_token=False, extra_tokens=0): + """ + grid_size: int of the grid height and width + return: + pos_embed: [grid_size*grid_size, embed_dim] or [1+grid_size*grid_size, embed_dim] (w/ or w/o cls_token) + """ + grid_h = np.arange(grid_size, dtype=np.float32) + grid_w = np.arange(grid_size, dtype=np.float32) + grid = np.meshgrid(grid_w, grid_h) # here w goes first + grid = np.stack(grid, axis=0) + + grid = grid.reshape([2, 1, grid_size, grid_size]) + pos_embed = get_2d_sincos_pos_embed_from_grid(embed_dim, grid) + if cls_token and extra_tokens > 0: + pos_embed = np.concatenate([np.zeros([extra_tokens, embed_dim]), pos_embed], axis=0) + return pos_embed + + +def get_2d_sincos_pos_embed_from_grid(embed_dim, grid): + assert embed_dim % 2 == 0 + + # use half of dimensions to encode grid_h + emb_h = get_1d_sincos_pos_embed_from_grid(embed_dim // 2, grid[0]) # (H*W, D/2) + emb_w = get_1d_sincos_pos_embed_from_grid(embed_dim // 2, grid[1]) # (H*W, D/2) + + emb = np.concatenate([emb_h, emb_w], axis=1) # (H*W, D) + return emb + + +def get_1d_sincos_pos_embed_from_grid(embed_dim, pos): + """ + embed_dim: output dimension for each position + pos: a list of positions to be encoded: size (M,) + out: (M, D) + """ + assert embed_dim % 2 == 0 + omega = np.arange(embed_dim // 2, dtype=np.float64) + omega /= embed_dim / 2. + omega = 1. / 10000**omega # (D/2,) + + pos = pos.reshape(-1) # (M,) + out = np.einsum('m,d->md', pos, omega) # (M, D/2), outer product + + emb_sin = np.sin(out) # (M, D/2) + emb_cos = np.cos(out) # (M, D/2) + + emb = np.concatenate([emb_sin, emb_cos], axis=1) # (M, D) + return emb diff --git a/ComfyUI_ExtraModels/DiT/nodes.py b/ComfyUI_ExtraModels/DiT/nodes.py new file mode 100644 index 0000000000000000000000000000000000000000..a80ec34228b757dda9b3c3d54a4eb71220a73307 --- /dev/null +++ b/ComfyUI_ExtraModels/DiT/nodes.py @@ -0,0 +1,98 @@ +import os +import json +import torch +import folder_paths + +from .conf import dit_conf +from .loader import load_dit + +class DitCheckpointLoader: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "ckpt_name": (folder_paths.get_filename_list("checkpoints"),), + "model": (list(dit_conf.keys()),), + "image_size": ([256, 512],), + # "num_classes": ("INT", {"default": 1000, "min": 0,}), + } + } + RETURN_TYPES = ("MODEL",) + RETURN_NAMES = ("model",) + FUNCTION = "load_checkpoint" + CATEGORY = "ExtraModels/DiT" + TITLE = "DitCheckpointLoader" + + def load_checkpoint(self, ckpt_name, model, image_size): + ckpt_path = folder_paths.get_full_path("checkpoints", ckpt_name) + model_conf = dit_conf[model] + model_conf["unet_config"]["input_size"] = image_size // 8 + # model_conf["unet_config"]["num_classes"] = num_classes + dit = load_dit( + model_path = ckpt_path, + model_conf = model_conf, + ) + return (dit,) + +# todo: this needs frontend code to display properly +def get_label_data(label_file="labels/imagenet1000.json"): + label_path = os.path.join( + os.path.dirname(os.path.realpath(__file__)), + label_file, + ) + label_data = {0: "None"} + with open(label_path, "r") as f: + label_data = json.loads(f.read()) + return label_data +label_data = get_label_data() + +class DiTCondLabelSelect: + @classmethod + def INPUT_TYPES(s): + global label_data + return { + "required": { + "model" : ("MODEL",), + "label_name": (list(label_data.values()),), + } + } + + RETURN_TYPES = ("CONDITIONING",) + RETURN_NAMES = ("class",) + FUNCTION = "cond_label" + CATEGORY = "ExtraModels/DiT" + TITLE = "DiTCondLabelSelect" + + def cond_label(self, model, label_name): + global label_data + class_labels = [int(k) for k,v in label_data.items() if v == label_name] + y = torch.tensor([[class_labels[0]]]).to(torch.int) + return ([[y, {}]], ) + +class DiTCondLabelEmpty: + @classmethod + def INPUT_TYPES(s): + global label_data + return { + "required": { + "model" : ("MODEL",), + } + } + + RETURN_TYPES = ("CONDITIONING",) + RETURN_NAMES = ("empty",) + FUNCTION = "cond_empty" + CATEGORY = "ExtraModels/DiT" + TITLE = "DiTCondLabelEmpty" + + def cond_empty(self, model): + # [ID of last class + 1] == [num_classes] + y_null = model.model.model_config.unet_config["num_classes"] + y = torch.tensor([[y_null]]).to(torch.int) + return ([[y, {}]], ) + +NODE_CLASS_MAPPINGS = { + "DitCheckpointLoader" : DitCheckpointLoader, + "DiTCondLabelSelect" : DiTCondLabelSelect, + "DiTCondLabelEmpty" : DiTCondLabelEmpty, +} diff --git a/ComfyUI_ExtraModels/HunYuanDiT/LICENSE-HYDiT b/ComfyUI_ExtraModels/HunYuanDiT/LICENSE-HYDiT new file mode 100644 index 0000000000000000000000000000000000000000..61ea65d146896b65b1e843ba04c10315e820912c --- /dev/null +++ b/ComfyUI_ExtraModels/HunYuanDiT/LICENSE-HYDiT @@ -0,0 +1,74 @@ +TENCENT HUNYUAN COMMUNITY LICENSE AGREEMENT +Tencent Hunyuan Release Date: 2024/5/14 +By clicking to agree or by using, reproducing, modifying, distributing, performing or displaying any portion or element of the Tencent Hunyuan Works, including via any Hosted Service, You will be deemed to have recognized and accepted the content of this Agreement, which is effective immediately. +1. DEFINITIONS. +a. “Acceptable Use Policy” shall mean the policy made available by Tencent as set forth in the Exhibit A. +b. “Agreement” shall mean the terms and conditions for use, reproduction, distribution, modification, performance and displaying of the Hunyuan Works or any portion or element thereof set forth herein. +c. “Documentation” shall mean the specifications, manuals and documentation for Tencent Hunyuan made publicly available by Tencent. +d. “Hosted Service” shall mean a hosted service offered via an application programming interface (API), web access, or any other electronic or remote means. +e. “Licensee,” “You” or “Your” shall mean a natural person or legal entity exercising the rights granted by this Agreement and/or using the Tencent Hunyuan Works for any purpose and in any field of use. +f. “Materials” shall mean, collectively, Tencent’s proprietary Tencent Hunyuan and Documentation (and any portion thereof) as made available by Tencent under this Agreement. +g. “Model Derivatives” shall mean all: (i) modifications to Tencent Hunyuan or any Model Derivative of Tencent Hunyuan; (ii) works based on Tencent Hunyuan or any Model Derivative of Tencent Hunyuan; or (iii) any other machine learning model which is created by transfer of patterns of the weights, parameters, operations, or Output of Tencent Hunyuan or any Model Derivative of Tencent Hunyuan, to that model in order to cause that model to perform similarly to Tencent Hunyuan or a Model Derivative of Tencent Hunyuan, including distillation methods, methods that use intermediate data representations, or methods based on the generation of synthetic data Outputs by Tencent Hunyuan or a Model Derivative of Tencent Hunyuan for training that model. For clarity, Outputs by themselves are not deemed Model Derivatives. +h. “Output” shall mean the information and/or content output of Tencent Hunyuan or a Model Derivative that results from operating or otherwise using Tencent Hunyuan or a Model Derivative, including via a Hosted Service. +i. “Tencent,” “We” or “Us” shall mean THL A29 Limited. +j. “Tencent Hunyuan” shall mean the large language models, image/video/audio/3D generation models, and multimodal large language models and their software and algorithms, including trained model weights, parameters (including optimizer states), machine-learning model code, inference-enabling code, training-enabling code, fine-tuning enabling code and other elements of the foregoing made publicly available by Us at https://huggingface.co/Tencent-Hunyuan/HunyuanDiT and https://github.com/Tencent/HunyuanDiT . +k. “Tencent Hunyuan Works” shall mean: (i) the Materials; (ii) Model Derivatives; and (iii) all derivative works thereof. +l. “Third Party” or “Third Parties” shall mean individuals or legal entities that are not under common control with Us or You. +m. “including” shall mean including but not limited to. +2. GRANT OF RIGHTS. +We grant You a non-exclusive, worldwide, non-transferable and royalty-free limited license under Tencent’s intellectual property or other rights owned by Us embodied in or utilized by the Materials to use, reproduce, distribute, create derivative works of (including Model Derivatives), and make modifications to the Materials, only in accordance with the terms of this Agreement and the Acceptable Use Policy, and You must not violate (or encourage or permit anyone else to violate) any term of this Agreement or the Acceptable Use Policy. +3. DISTRIBUTION. +You may, subject to Your compliance with this Agreement, distribute or make available to Third Parties the Tencent Hunyuan Works, provided that You meet all of the following conditions: +a. You must provide all such Third Party recipients of the Tencent Hunyuan Works or products or services using them a copy of this Agreement; +b. You must cause any modified files to carry prominent notices stating that You changed the files; +c. You are encouraged to: (i) publish at least one technology introduction blogpost or one public statement expressing Your experience of using the Tencent Hunyuan Works; and (ii) mark the products or services developed by using the Tencent Hunyuan Works to indicate that the product/service is “Powered by Tencent Hunyuan”; and +d. All distributions to Third Parties (other than through a Hosted Service) must be accompanied by a “Notice” text file that contains the following notice: “Tencent Hunyuan is licensed under the Tencent Hunyuan Community License Agreement, Copyright © 2024 Tencent. All Rights Reserved. The trademark rights of “Tencent Hunyuan” are owned by Tencent or its affiliate.” +You may add Your own copyright statement to Your modifications and, except as set forth in this Section and in Section 5, may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Model Derivatives as a whole, provided Your use, reproduction, modification, distribution, performance and display of the work otherwise complies with the terms and conditions of this Agreement. If You receive Tencent Hunyuan Works from a Licensee as part of an integrated end user product, then this Section 3 of this Agreement will not apply to You. +4. ADDITIONAL COMMERCIAL TERMS. +If, on the Tencent Hunyuan version release date, the monthly active users of all products or services made available by or for Licensee is greater than 100 million monthly active users in the preceding calendar month, You must request a license from Tencent, which Tencent may grant to You in its sole discretion, and You are not authorized to exercise any of the rights under this Agreement unless or until Tencent otherwise expressly grants You such rights. +5. RULES OF USE. +a. Your use of the Tencent Hunyuan Works must comply with applicable laws and regulations (including trade compliance laws and regulations) and adhere to the Acceptable Use Policy for the Tencent Hunyuan Works, which is hereby incorporated by reference into this Agreement. You must include the use restrictions referenced in these Sections 5(a) and 5(b) as an enforceable provision in any agreement (e.g., license agreement, terms of use, etc.) governing the use and/or distribution of Tencent Hunyuan Works and You must provide notice to subsequent users to whom You distribute that Tencent Hunyuan Works are subject to the use restrictions in these Sections 5(a) and 5(b). +b. You must not use the Tencent Hunyuan Works or any Output or results of the Tencent Hunyuan Works to improve any other large language model (other than Tencent Hunyuan or Model Derivatives thereof). +6. INTELLECTUAL PROPERTY. +a. Subject to Tencent’s ownership of Tencent Hunyuan Works made by or for Tencent and intellectual property rights therein, conditioned upon Your compliance with the terms and conditions of this Agreement, as between You and Tencent, You will be the owner of any derivative works and modifications of the Materials and any Model Derivatives that are made by or for You. +b. No trademark licenses are granted under this Agreement, and in connection with the Tencent Hunyuan Works, Licensee may not use any name or mark owned by or associated with Tencent or any of its affiliates, except as required for reasonable and customary use in describing and distributing the Tencent Hunyuan Works. Tencent hereby grants You a license to use “Tencent Hunyuan” (the “Mark”) solely as required to comply with the provisions of Section 3(c), provided that You comply with any applicable laws related to trademark protection. All goodwill arising out of Your use of the Mark will inure to the benefit of Tencent. +c. If You commence a lawsuit or other proceedings (including a cross-claim or counterclaim in a lawsuit) against Us or any person or entity alleging that the Materials or any Output, or any portion of any of the foregoing, infringe any intellectual property or other right owned or licensable by You, then all licenses granted to You under this Agreement shall terminate as of the date such lawsuit or other proceeding is filed. You will defend, indemnify and hold harmless Us from and against any claim by any Third Party arising out of or related to Your or the Third Party’s use or distribution of the Tencent Hunyuan Works. +d. Tencent claims no rights in Outputs You generate. You and Your users are solely responsible for Outputs and their subsequent uses. +7. DISCLAIMERS OF WARRANTY AND LIMITATIONS OF LIABILITY. +a. We are not obligated to support, update, provide training for, or develop any further version of the Tencent Hunyuan Works or to grant any license thereto. +b. UNLESS AND ONLY TO THE EXTENT REQUIRED BY APPLICABLE LAW, THE TENCENT HUNYUAN WORKS AND ANY OUTPUT AND RESULTS THEREFROM ARE PROVIDED “AS IS” WITHOUT ANY EXPRESS OR IMPLIED WARRANTIES OF ANY KIND INCLUDING ANY WARRANTIES OF TITLE, MERCHANTABILITY, NONINFRINGEMENT, COURSE OF DEALING, USAGE OF TRADE, OR FITNESS FOR A PARTICULAR PURPOSE. YOU ARE SOLELY RESPONSIBLE FOR DETERMINING THE APPROPRIATENESS OF USING, REPRODUCING, MODIFYING, PERFORMING, DISPLAYING OR DISTRIBUTING ANY OF THE TENCENT HUNYUAN WORKS OR OUTPUTS AND ASSUME ANY AND ALL RISKS ASSOCIATED WITH YOUR OR A THIRD PARTY’S USE OR DISTRIBUTION OF ANY OF THE TENCENT HUNYUAN WORKS OR OUTPUTS AND YOUR EXERCISE OF RIGHTS AND PERMISSIONS UNDER THIS AGREEMENT. +c. TO THE FULLEST EXTENT PERMITTED BY APPLICABLE LAW, IN NO EVENT SHALL TENCENT OR ITS AFFILIATES BE LIABLE UNDER ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, TORT, NEGLIGENCE, PRODUCTS LIABILITY, OR OTHERWISE, FOR ANY DAMAGES, INCLUDING ANY DIRECT, INDIRECT, SPECIAL, INCIDENTAL, EXEMPLARY, CONSEQUENTIAL OR PUNITIVE DAMAGES, OR LOST PROFITS OF ANY KIND ARISING FROM THIS AGREEMENT OR RELATED TO ANY OF THE TENCENT HUNYUAN WORKS OR OUTPUTS, EVEN IF TENCENT OR ITS AFFILIATES HAVE BEEN ADVISED OF THE POSSIBILITY OF ANY OF THE FOREGOING. +8. SURVIVAL AND TERMINATION. +a. The term of this Agreement shall commence upon Your acceptance of this Agreement or access to the Materials and will continue in full force and effect until terminated in accordance with the terms and conditions herein. +b. We may terminate this Agreement if You breach any of the terms or conditions of this Agreement. Upon termination of this Agreement, You must promptly delete and cease use of the Tencent Hunyuan Works. Sections 6(a), 6(c), 7 and 9 shall survive the termination of this Agreement. +9. GOVERNING LAW AND JURISDICTION. +a. This Agreement and any dispute arising out of or relating to it will be governed by the laws of the Hong Kong Special Administrative Region of the People’s Republic of China, without regard to conflict of law principles, and the UN Convention on Contracts for the International Sale of Goods does not apply to this Agreement. +b. Exclusive jurisdiction and venue for any dispute arising out of or relating to this Agreement will be a court of competent jurisdiction in the Hong Kong Special Administrative Region of the People’s Republic of China, and Tencent and Licensee consent to the exclusive jurisdiction of such court with respect to any such dispute. +  + +EXHIBIT A +ACCEPTABLE USE POLICY + +Tencent reserves the right to update this Acceptable Use Policy from time to time. +Last modified: 2024/5/14 + +Tencent endeavors to promote safe and fair use of its tools and features, including Tencent Hunyuan. You agree not to use Tencent Hunyuan or Model Derivatives: +1. In any way that violates any applicable national, federal, state, local, international or any other law or regulation; +2. To harm Yourself or others; +3. To repurpose or distribute output from Tencent Hunyuan or any Model Derivatives to harm Yourself or others; +4. To override or circumvent the safety guardrails and safeguards We have put in place; +5. For the purpose of exploiting, harming or attempting to exploit or harm minors in any way; +6. To generate or disseminate verifiably false information and/or content with the purpose of harming others or influencing elections; +7. To generate or facilitate false online engagement, including fake reviews and other means of fake online engagement; +8. To intentionally defame, disparage or otherwise harass others; +9. To generate and/or disseminate malware (including ransomware) or any other content to be used for the purpose of harming electronic systems; +10. To generate or disseminate personal identifiable information with the purpose of harming others; +11. To generate or disseminate information (including images, code, posts, articles), and place the information in any public context (including –through the use of bot generated tweets), without expressly and conspicuously identifying that the information and/or content is machine generated; +12. To impersonate another individual without consent, authorization, or legal right; +13. To make high-stakes automated decisions in domains that affect an individual’s safety, rights or wellbeing (e.g., law enforcement, migration, medicine/health, management of critical infrastructure, safety components of products, essential services, credit, employment, housing, education, social scoring, or insurance); +14. In a manner that violates or disrespects the social ethics and moral standards of other countries or regions; +15. To perform, facilitate, threaten, incite, plan, promote or encourage violent extremism or terrorism; +16. For any use intended to discriminate against or harm individuals or groups based on protected characteristics or categories, online or offline social behavior or known or predicted personal or personality characteristics; +17. To intentionally exploit any of the vulnerabilities of a specific group of persons based on their age, social, physical or mental characteristics, in order to materially distort the behavior of a person pertaining to that group in a manner that causes or is likely to cause that person or another person physical or psychological harm; +18. For military purposes; +19. To engage in the unauthorized or unlicensed practice of any profession including, but not limited to, financial, legal, medical/health, or other professional practices. diff --git a/ComfyUI_ExtraModels/HunYuanDiT/conf.py b/ComfyUI_ExtraModels/HunYuanDiT/conf.py new file mode 100644 index 0000000000000000000000000000000000000000..924bf012a6b63a94051b70978990771cb40bdfb9 --- /dev/null +++ b/ComfyUI_ExtraModels/HunYuanDiT/conf.py @@ -0,0 +1,61 @@ +""" +List of all HYDiT model types / settings +""" +from argparse import Namespace +hydit_args = Namespace(**{ # normally from argparse + "infer_mode": "torch", + "norm": "layer", + "learn_sigma": True, + "text_states_dim": 1024, + "text_states_dim_t5": 2048, + "text_len": 77, + "text_len_t5": 256, +}) + +hydit_conf = { + "G/2": { # Seems to be the main one + "unet_config": { + "depth" : 40, + "num_heads" : 16, + "patch_size" : 2, + "hidden_size" : 1408, + "mlp_ratio" : 4.3637, + "input_size": (1024//8, 1024//8), + "args": hydit_args, + }, + "sampling_settings" : { + "beta_schedule" : "linear", + "linear_start" : 0.00085, + "linear_end" : 0.03, + "timesteps" : 1000, + }, + }, + "G/2-1.2": { + "unet_config": { + "depth" : 40, + "num_heads" : 16, + "patch_size" : 2, + "hidden_size" : 1408, + "mlp_ratio" : 4.3637, + "input_size": (1024//8, 1024//8), + "cond_style": False, + "cond_res" : False, + "args": hydit_args, + }, + "sampling_settings" : { + "beta_schedule" : "linear", + "linear_start" : 0.00085, + "linear_end" : 0.018, + "timesteps" : 1000, + }, + } +} + +# these are the same as regular DiT, I think +from ..DiT.conf import dit_conf +for name in ["XL/2", "L/2", "B/2"]: + hydit_conf[name] = { + "unet_config": dit_conf[name]["unet_config"].copy(), + "sampling_settings": hydit_conf["G/2"]["sampling_settings"], + } + hydit_conf[name]["unet_config"]["args"] = hydit_args diff --git a/ComfyUI_ExtraModels/HunYuanDiT/config_clip.json b/ComfyUI_ExtraModels/HunYuanDiT/config_clip.json new file mode 100644 index 0000000000000000000000000000000000000000..f6298741b427e1045bae70d7f7c62b53641ad0a5 --- /dev/null +++ b/ComfyUI_ExtraModels/HunYuanDiT/config_clip.json @@ -0,0 +1,34 @@ +{ + "_name_or_path": "hfl/chinese-roberta-wwm-ext-large", + "architectures": [ + "BertModel" + ], + "attention_probs_dropout_prob": 0.1, + "bos_token_id": 0, + "classifier_dropout": null, + "directionality": "bidi", + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout_prob": 0.1, + "hidden_size": 1024, + "initializer_range": 0.02, + "intermediate_size": 4096, + "layer_norm_eps": 1e-12, + "max_position_embeddings": 512, + "model_type": "bert", + "num_attention_heads": 16, + "num_hidden_layers": 24, + "output_past": true, + "pad_token_id": 0, + "pooler_fc_size": 768, + "pooler_num_attention_heads": 12, + "pooler_num_fc_layers": 3, + "pooler_size_per_head": 128, + "pooler_type": "first_token_transform", + "position_embedding_type": "absolute", + "torch_dtype": "float32", + "transformers_version": "4.22.1", + "type_vocab_size": 2, + "use_cache": true, + "vocab_size": 47020 +} diff --git a/ComfyUI_ExtraModels/HunYuanDiT/config_mt5.json b/ComfyUI_ExtraModels/HunYuanDiT/config_mt5.json new file mode 100644 index 0000000000000000000000000000000000000000..d55cc43f81d596f10e6cdbc594e1b9429834a4a5 --- /dev/null +++ b/ComfyUI_ExtraModels/HunYuanDiT/config_mt5.json @@ -0,0 +1,33 @@ +{ + "_name_or_path": "mt5", + "architectures": [ + "MT5EncoderModel" + ], + "classifier_dropout": 0.0, + "d_ff": 5120, + "d_kv": 64, + "d_model": 2048, + "decoder_start_token_id": 0, + "dense_act_fn": "gelu_new", + "dropout_rate": 0.1, + "eos_token_id": 1, + "feed_forward_proj": "gated-gelu", + "initializer_factor": 1.0, + "is_encoder_decoder": true, + "is_gated_act": true, + "layer_norm_epsilon": 1e-06, + "model_type": "mt5", + "num_decoder_layers": 24, + "num_heads": 32, + "num_layers": 24, + "output_past": true, + "pad_token_id": 0, + "relative_attention_max_distance": 128, + "relative_attention_num_buckets": 32, + "tie_word_embeddings": false, + "tokenizer_class": "T5Tokenizer", + "torch_dtype": "float16", + "transformers_version": "4.40.2", + "use_cache": true, + "vocab_size": 250112 +} diff --git a/ComfyUI_ExtraModels/HunYuanDiT/loader.py b/ComfyUI_ExtraModels/HunYuanDiT/loader.py new file mode 100644 index 0000000000000000000000000000000000000000..2aa69175afe35c21aa64bd35ceaca34c52363222 --- /dev/null +++ b/ComfyUI_ExtraModels/HunYuanDiT/loader.py @@ -0,0 +1,81 @@ +import comfy.supported_models_base +import comfy.latent_formats +import comfy.model_patcher +import comfy.model_base +import comfy.utils +import comfy.conds +import torch +from comfy import model_management +from tqdm import tqdm + +class EXM_HYDiT(comfy.supported_models_base.BASE): + unet_config = {} + unet_extra_config = {} + latent_format = comfy.latent_formats.SDXL + + def __init__(self, model_conf): + self.unet_config = model_conf.get("unet_config", {}) + self.sampling_settings = model_conf.get("sampling_settings", {}) + self.latent_format = self.latent_format() + # UNET is handled by extension + self.unet_config["disable_unet_model_creation"] = True + + def model_type(self, state_dict, prefix=""): + return comfy.model_base.ModelType.V_PREDICTION + +class EXM_HYDiT_Model(comfy.model_base.BaseModel): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + def extra_conds(self, **kwargs): + out = super().extra_conds(**kwargs) + + for name in ["context_t5", "context_mask", "context_t5_mask"]: + out[name] = comfy.conds.CONDRegular(kwargs[name]) + + src_size_cond = kwargs.get("src_size_cond", None) + if src_size_cond is not None: + out["src_size_cond"] = comfy.conds.CONDRegular(torch.tensor(src_size_cond)) + + return out + +def load_hydit(model_path, model_conf): + state_dict = comfy.utils.load_torch_file(model_path) + state_dict = state_dict.get("model", state_dict) + + parameters = comfy.utils.calculate_parameters(state_dict) + unet_dtype = model_management.unet_dtype(model_params=parameters) + load_device = comfy.model_management.get_torch_device() + offload_device = comfy.model_management.unet_offload_device() + + # ignore fp8/etc and use directly for now + manual_cast_dtype = model_management.unet_manual_cast(unet_dtype, load_device) + if manual_cast_dtype: + print(f"HunYuanDiT: falling back to {manual_cast_dtype}") + unet_dtype = manual_cast_dtype + + model_conf = EXM_HYDiT(model_conf) + model = EXM_HYDiT_Model( + model_conf, + model_type=comfy.model_base.ModelType.V_PREDICTION, + device=model_management.get_torch_device() + ) + + from .models.models import HunYuanDiT + model.diffusion_model = HunYuanDiT( + **model_conf.unet_config, + log_fn=tqdm.write, + ) + + model.diffusion_model.load_state_dict(state_dict) + model.diffusion_model.dtype = unet_dtype + model.diffusion_model.eval() + model.diffusion_model.to(unet_dtype) + + model_patcher = comfy.model_patcher.ModelPatcher( + model, + load_device = load_device, + offload_device = offload_device, + current_device = "cpu", + ) + return model_patcher diff --git a/ComfyUI_ExtraModels/HunYuanDiT/models/attn_layers.py b/ComfyUI_ExtraModels/HunYuanDiT/models/attn_layers.py new file mode 100644 index 0000000000000000000000000000000000000000..b767d83cab56679b486e7d0f33d15bdfbcb66b18 --- /dev/null +++ b/ComfyUI_ExtraModels/HunYuanDiT/models/attn_layers.py @@ -0,0 +1,374 @@ +import torch +import torch.nn as nn +from typing import Tuple, Union, Optional + +try: + import flash_attn + if hasattr(flash_attn, '__version__') and int(flash_attn.__version__[0]) == 2: + from flash_attn.flash_attn_interface import flash_attn_kvpacked_func + from flash_attn.modules.mha import FlashSelfAttention, FlashCrossAttention + else: + from flash_attn.flash_attn_interface import flash_attn_unpadded_kvpacked_func + from flash_attn.modules.mha import FlashSelfAttention, FlashCrossAttention +except Exception as e: + print(f'flash_attn import failed: {e}') + + +def reshape_for_broadcast(freqs_cis: Union[torch.Tensor, Tuple[torch.Tensor]], x: torch.Tensor, head_first=False): + """ + Reshape frequency tensor for broadcasting it with another tensor. + + This function reshapes the frequency tensor to have the same shape as the target tensor 'x' + for the purpose of broadcasting the frequency tensor during element-wise operations. + + Args: + freqs_cis (Union[torch.Tensor, Tuple[torch.Tensor]]): Frequency tensor to be reshaped. + x (torch.Tensor): Target tensor for broadcasting compatibility. + head_first (bool): head dimension first (except batch dim) or not. + + Returns: + torch.Tensor: Reshaped frequency tensor. + + Raises: + AssertionError: If the frequency tensor doesn't match the expected shape. + AssertionError: If the target tensor 'x' doesn't have the expected number of dimensions. + """ + ndim = x.ndim + assert 0 <= 1 < ndim + + if isinstance(freqs_cis, tuple): + # freqs_cis: (cos, sin) in real space + if head_first: + assert freqs_cis[0].shape == (x.shape[-2], x.shape[-1]), f'freqs_cis shape {freqs_cis[0].shape} does not match x shape {x.shape}' + shape = [d if i == ndim - 2 or i == ndim - 1 else 1 for i, d in enumerate(x.shape)] + else: + assert freqs_cis[0].shape == (x.shape[1], x.shape[-1]), f'freqs_cis shape {freqs_cis[0].shape} does not match x shape {x.shape}' + shape = [d if i == 1 or i == ndim - 1 else 1 for i, d in enumerate(x.shape)] + return freqs_cis[0].view(*shape), freqs_cis[1].view(*shape) + else: + # freqs_cis: values in complex space + if head_first: + assert freqs_cis.shape == (x.shape[-2], x.shape[-1]), f'freqs_cis shape {freqs_cis.shape} does not match x shape {x.shape}' + shape = [d if i == ndim - 2 or i == ndim - 1 else 1 for i, d in enumerate(x.shape)] + else: + assert freqs_cis.shape == (x.shape[1], x.shape[-1]), f'freqs_cis shape {freqs_cis.shape} does not match x shape {x.shape}' + shape = [d if i == 1 or i == ndim - 1 else 1 for i, d in enumerate(x.shape)] + return freqs_cis.view(*shape) + + +def rotate_half(x): + x_real, x_imag = x.float().reshape(*x.shape[:-1], -1, 2).unbind(-1) # [B, S, H, D//2] + return torch.stack([-x_imag, x_real], dim=-1).flatten(3) + + +def apply_rotary_emb( + xq: torch.Tensor, + xk: Optional[torch.Tensor], + freqs_cis: Union[torch.Tensor, Tuple[torch.Tensor]], + head_first: bool = False, +) -> Tuple[torch.Tensor, torch.Tensor]: + """ + Apply rotary embeddings to input tensors using the given frequency tensor. + + This function applies rotary embeddings to the given query 'xq' and key 'xk' tensors using the provided + frequency tensor 'freqs_cis'. The input tensors are reshaped as complex numbers, and the frequency tensor + is reshaped for broadcasting compatibility. The resulting tensors contain rotary embeddings and are + returned as real tensors. + + Args: + xq (torch.Tensor): Query tensor to apply rotary embeddings. [B, S, H, D] + xk (torch.Tensor): Key tensor to apply rotary embeddings. [B, S, H, D] + freqs_cis (Union[torch.Tensor, Tuple[torch.Tensor]]): Precomputed frequency tensor for complex exponentials. + head_first (bool): head dimension first (except batch dim) or not. + + Returns: + Tuple[torch.Tensor, torch.Tensor]: Tuple of modified query tensor and key tensor with rotary embeddings. + + """ + xk_out = None + if isinstance(freqs_cis, tuple): + cos, sin = reshape_for_broadcast(freqs_cis, xq, head_first) # [S, D] + cos, sin = cos.to(xq.device), sin.to(xq.device) + xq_out = (xq.float() * cos + rotate_half(xq.float()) * sin).type_as(xq) + if xk is not None: + xk_out = (xk.float() * cos + rotate_half(xk.float()) * sin).type_as(xk) + else: + xq_ = torch.view_as_complex(xq.float().reshape(*xq.shape[:-1], -1, 2)) # [B, S, H, D//2] + freqs_cis = reshape_for_broadcast(freqs_cis, xq_, head_first).to(xq.device) # [S, D//2] --> [1, S, 1, D//2] + xq_out = torch.view_as_real(xq_ * freqs_cis).flatten(3).type_as(xq) + if xk is not None: + xk_ = torch.view_as_complex(xk.float().reshape(*xk.shape[:-1], -1, 2)) # [B, S, H, D//2] + xk_out = torch.view_as_real(xk_ * freqs_cis).flatten(3).type_as(xk) + + return xq_out, xk_out + + +class FlashSelfMHAModified(nn.Module): + """ + Use QK Normalization. + """ + def __init__(self, + dim, + num_heads, + qkv_bias=True, + qk_norm=False, + attn_drop=0.0, + proj_drop=0.0, + device=None, + dtype=None, + norm_layer=nn.LayerNorm, + ): + factory_kwargs = {'device': device, 'dtype': dtype} + super().__init__() + self.dim = dim + self.num_heads = num_heads + assert self.dim % num_heads == 0, "self.kdim must be divisible by num_heads" + self.head_dim = self.dim // num_heads + assert self.head_dim % 8 == 0 and self.head_dim <= 128, "Only support head_dim <= 128 and divisible by 8" + + self.Wqkv = nn.Linear(dim, 3 * dim, bias=qkv_bias, **factory_kwargs) + # TODO: eps should be 1 / 65530 if using fp16 + self.q_norm = norm_layer(self.head_dim, elementwise_affine=True, eps=1e-6) if qk_norm else nn.Identity() + self.k_norm = norm_layer(self.head_dim, elementwise_affine=True, eps=1e-6) if qk_norm else nn.Identity() + self.inner_attn = FlashSelfAttention(attention_dropout=attn_drop) + self.out_proj = nn.Linear(dim, dim, bias=qkv_bias, **factory_kwargs) + self.proj_drop = nn.Dropout(proj_drop) + + def forward(self, x, freqs_cis_img=None): + """ + Parameters + ---------- + x: torch.Tensor + (batch, seqlen, hidden_dim) (where hidden_dim = num heads * head dim) + freqs_cis_img: torch.Tensor + (batch, hidden_dim // 2), RoPE for image + """ + b, s, d = x.shape + + qkv = self.Wqkv(x) + qkv = qkv.view(b, s, 3, self.num_heads, self.head_dim) # [b, s, 3, h, d] + q, k, v = qkv.unbind(dim=2) # [b, s, h, d] + q = self.q_norm(q).half() # [b, s, h, d] + k = self.k_norm(k).half() + + # Apply RoPE if needed + if freqs_cis_img is not None: + qq, kk = apply_rotary_emb(q, k, freqs_cis_img) + assert qq.shape == q.shape and kk.shape == k.shape, f'qq: {qq.shape}, q: {q.shape}, kk: {kk.shape}, k: {k.shape}' + q, k = qq, kk + + qkv = torch.stack([q, k, v], dim=2) # [b, s, 3, h, d] + context = self.inner_attn(qkv) + out = self.out_proj(context.view(b, s, d)) + out = self.proj_drop(out) + + out_tuple = (out,) + + return out_tuple + + +class FlashCrossMHAModified(nn.Module): + """ + Use QK Normalization. + """ + def __init__(self, + qdim, + kdim, + num_heads, + qkv_bias=True, + qk_norm=False, + attn_drop=0.0, + proj_drop=0.0, + device=None, + dtype=None, + norm_layer=nn.LayerNorm, + ): + factory_kwargs = {'device': device, 'dtype': dtype} + super().__init__() + self.qdim = qdim + self.kdim = kdim + self.num_heads = num_heads + assert self.qdim % num_heads == 0, "self.qdim must be divisible by num_heads" + self.head_dim = self.qdim // num_heads + assert self.head_dim % 8 == 0 and self.head_dim <= 128, "Only support head_dim <= 128 and divisible by 8" + + self.scale = self.head_dim ** -0.5 + + self.q_proj = nn.Linear(qdim, qdim, bias=qkv_bias, **factory_kwargs) + self.kv_proj = nn.Linear(kdim, 2 * qdim, bias=qkv_bias, **factory_kwargs) + + # TODO: eps should be 1 / 65530 if using fp16 + self.q_norm = norm_layer(self.head_dim, elementwise_affine=True, eps=1e-6) if qk_norm else nn.Identity() + self.k_norm = norm_layer(self.head_dim, elementwise_affine=True, eps=1e-6) if qk_norm else nn.Identity() + + self.inner_attn = FlashCrossAttention(attention_dropout=attn_drop) + self.out_proj = nn.Linear(qdim, qdim, bias=qkv_bias, **factory_kwargs) + self.proj_drop = nn.Dropout(proj_drop) + + def forward(self, x, y, freqs_cis_img=None): + """ + Parameters + ---------- + x: torch.Tensor + (batch, seqlen1, hidden_dim) (where hidden_dim = num_heads * head_dim) + y: torch.Tensor + (batch, seqlen2, hidden_dim2) + freqs_cis_img: torch.Tensor + (batch, hidden_dim // num_heads), RoPE for image + """ + b, s1, _ = x.shape # [b, s1, D] + _, s2, _ = y.shape # [b, s2, 1024] + + q = self.q_proj(x).view(b, s1, self.num_heads, self.head_dim) # [b, s1, h, d] + kv = self.kv_proj(y).view(b, s2, 2, self.num_heads, self.head_dim) # [b, s2, 2, h, d] + k, v = kv.unbind(dim=2) # [b, s2, h, d] + q = self.q_norm(q).half() # [b, s1, h, d] + k = self.k_norm(k).half() # [b, s2, h, d] + + # Apply RoPE if needed + if freqs_cis_img is not None: + qq, _ = apply_rotary_emb(q, None, freqs_cis_img) + assert qq.shape == q.shape, f'qq: {qq.shape}, q: {q.shape}' + q = qq # [b, s1, h, d] + kv = torch.stack([k, v], dim=2) # [b, s1, 2, h, d] + context = self.inner_attn(q, kv) # [b, s1, h, d] + context = context.view(b, s1, -1) # [b, s1, D] + + out = self.out_proj(context) + out = self.proj_drop(out) + + out_tuple = (out,) + + return out_tuple + + +class CrossAttention(nn.Module): + """ + Use QK Normalization. + """ + def __init__(self, + qdim, + kdim, + num_heads, + qkv_bias=True, + qk_norm=False, + attn_drop=0.0, + proj_drop=0.0, + device=None, + dtype=None, + norm_layer=nn.LayerNorm, + ): + factory_kwargs = {'device': device, 'dtype': dtype} + super().__init__() + self.qdim = qdim + self.kdim = kdim + self.num_heads = num_heads + assert self.qdim % num_heads == 0, "self.qdim must be divisible by num_heads" + self.head_dim = self.qdim // num_heads + assert self.head_dim % 8 == 0 and self.head_dim <= 128, "Only support head_dim <= 128 and divisible by 8" + self.scale = self.head_dim ** -0.5 + + self.q_proj = nn.Linear(qdim, qdim, bias=qkv_bias, **factory_kwargs) + self.kv_proj = nn.Linear(kdim, 2 * qdim, bias=qkv_bias, **factory_kwargs) + + # TODO: eps should be 1 / 65530 if using fp16 + self.q_norm = norm_layer(self.head_dim, elementwise_affine=True, eps=1e-6) if qk_norm else nn.Identity() + self.k_norm = norm_layer(self.head_dim, elementwise_affine=True, eps=1e-6) if qk_norm else nn.Identity() + self.attn_drop = nn.Dropout(attn_drop) + self.out_proj = nn.Linear(qdim, qdim, bias=qkv_bias, **factory_kwargs) + self.proj_drop = nn.Dropout(proj_drop) + + def forward(self, x, y, freqs_cis_img=None): + """ + Parameters + ---------- + x: torch.Tensor + (batch, seqlen1, hidden_dim) (where hidden_dim = num heads * head dim) + y: torch.Tensor + (batch, seqlen2, hidden_dim2) + freqs_cis_img: torch.Tensor + (batch, hidden_dim // 2), RoPE for image + """ + b, s1, c = x.shape # [b, s1, D] + _, s2, c = y.shape # [b, s2, 1024] + + q = self.q_proj(x).view(b, s1, self.num_heads, self.head_dim) # [b, s1, h, d] + kv = self.kv_proj(y).view(b, s2, 2, self.num_heads, self.head_dim) # [b, s2, 2, h, d] + k, v = kv.unbind(dim=2) # [b, s, h, d] + q = self.q_norm(q) + k = self.k_norm(k) + + # Apply RoPE if needed + if freqs_cis_img is not None: + qq, _ = apply_rotary_emb(q, None, freqs_cis_img) + assert qq.shape == q.shape, f'qq: {qq.shape}, q: {q.shape}' + q = qq + + q = q * self.scale + q = q.transpose(-2, -3).contiguous() # q -> B, L1, H, C - B, H, L1, C + k = k.permute(0, 2, 3, 1).contiguous() # k -> B, L2, H, C - B, H, C, L2 + attn = q @ k # attn -> B, H, L1, L2 + attn = attn.softmax(dim=-1) # attn -> B, H, L1, L2 + attn = self.attn_drop(attn) + x = attn @ v.transpose(-2, -3) # v -> B, L2, H, C - B, H, L2, C x-> B, H, L1, C + context = x.transpose(1, 2) # context -> B, H, L1, C - B, L1, H, C + + context = context.contiguous().view(b, s1, -1) + + out = self.out_proj(context) # context.reshape - B, L1, -1 + out = self.proj_drop(out) + + out_tuple = (out,) + + return out_tuple + + +class Attention(nn.Module): + """ + We rename some layer names to align with flash attention + """ + def __init__(self, dim, num_heads, qkv_bias=True, qk_norm=False, attn_drop=0., proj_drop=0., + norm_layer=nn.LayerNorm, + ): + super().__init__() + self.dim = dim + self.num_heads = num_heads + assert self.dim % num_heads == 0, 'dim should be divisible by num_heads' + self.head_dim = self.dim // num_heads + # This assertion is aligned with flash attention + assert self.head_dim % 8 == 0 and self.head_dim <= 128, "Only support head_dim <= 128 and divisible by 8" + self.scale = self.head_dim ** -0.5 + + # qkv --> Wqkv + self.Wqkv = nn.Linear(dim, dim * 3, bias=qkv_bias) + # TODO: eps should be 1 / 65530 if using fp16 + self.q_norm = norm_layer(self.head_dim, elementwise_affine=True, eps=1e-6) if qk_norm else nn.Identity() + self.k_norm = norm_layer(self.head_dim, elementwise_affine=True, eps=1e-6) if qk_norm else nn.Identity() + self.attn_drop = nn.Dropout(attn_drop) + self.out_proj = nn.Linear(dim, dim) + self.proj_drop = nn.Dropout(proj_drop) + + def forward(self, x, freqs_cis_img=None): + B, N, C = x.shape + qkv = self.Wqkv(x).reshape(B, N, 3, self.num_heads, self.head_dim).permute(2, 0, 3, 1, 4) # [3, b, h, s, d] + q, k, v = qkv.unbind(0) # [b, h, s, d] + q = self.q_norm(q) # [b, h, s, d] + k = self.k_norm(k) # [b, h, s, d] + + # Apply RoPE if needed + if freqs_cis_img is not None: + qq, kk = apply_rotary_emb(q, k, freqs_cis_img, head_first=True) + assert qq.shape == q.shape and kk.shape == k.shape, \ + f'qq: {qq.shape}, q: {q.shape}, kk: {kk.shape}, k: {k.shape}' + q, k = qq, kk + + # just use SDP here for now + x = torch.nn.functional.scaled_dot_product_attention( + q, k, v, + ).permute(0, 2, 1, 3).contiguous().reshape(B, N, C) + x = self.out_proj(x) + x = self.proj_drop(x) + + out_tuple = (x,) + + return out_tuple diff --git a/ComfyUI_ExtraModels/HunYuanDiT/models/embedders.py b/ComfyUI_ExtraModels/HunYuanDiT/models/embedders.py new file mode 100644 index 0000000000000000000000000000000000000000..9fe08cba22eef41ca9fd9f70fe6f062a4dd606c8 --- /dev/null +++ b/ComfyUI_ExtraModels/HunYuanDiT/models/embedders.py @@ -0,0 +1,111 @@ +import math +import torch +import torch.nn as nn +from einops import repeat + +from timm.models.layers import to_2tuple + + +class PatchEmbed(nn.Module): + """ 2D Image to Patch Embedding + + Image to Patch Embedding using Conv2d + + A convolution based approach to patchifying a 2D image w/ embedding projection. + + Based on the impl in https://github.com/google-research/vision_transformer + + Hacked together by / Copyright 2020 Ross Wightman + + Remove the _assert function in forward function to be compatible with multi-resolution images. + """ + def __init__( + self, + img_size=224, + patch_size=16, + in_chans=3, + embed_dim=768, + norm_layer=None, + flatten=True, + bias=True, + ): + super().__init__() + if isinstance(img_size, int): + img_size = to_2tuple(img_size) + elif isinstance(img_size, (tuple, list)) and len(img_size) == 2: + img_size = tuple(img_size) + else: + raise ValueError(f"img_size must be int or tuple/list of length 2. Got {img_size}") + patch_size = to_2tuple(patch_size) + self.img_size = img_size + self.patch_size = patch_size + self.grid_size = (img_size[0] // patch_size[0], img_size[1] // patch_size[1]) + self.num_patches = self.grid_size[0] * self.grid_size[1] + self.flatten = flatten + + self.proj = nn.Conv2d(in_chans, embed_dim, kernel_size=patch_size, stride=patch_size, bias=bias) + self.norm = norm_layer(embed_dim) if norm_layer else nn.Identity() + + def update_image_size(self, img_size): + self.img_size = img_size + self.grid_size = (img_size[0] // self.patch_size[0], img_size[1] // self.patch_size[1]) + self.num_patches = self.grid_size[0] * self.grid_size[1] + + def forward(self, x): + # B, C, H, W = x.shape + # _assert(H == self.img_size[0], f"Input image height ({H}) doesn't match model ({self.img_size[0]}).") + # _assert(W == self.img_size[1], f"Input image width ({W}) doesn't match model ({self.img_size[1]}).") + x = self.proj(x) + if self.flatten: + x = x.flatten(2).transpose(1, 2) # BCHW -> BNC + x = self.norm(x) + return x + + +def timestep_embedding(t, dim, max_period=10000, repeat_only=False): + """ + Create sinusoidal timestep embeddings. + :param t: a 1-D Tensor of N indices, one per batch element. + These may be fractional. + :param dim: the dimension of the output. + :param max_period: controls the minimum frequency of the embeddings. + :return: an (N, D) Tensor of positional embeddings. + """ + # https://github.com/openai/glide-text2im/blob/main/glide_text2im/nn.py + if not repeat_only: + half = dim // 2 + freqs = torch.exp( + -math.log(max_period) + * torch.arange(start=0, end=half, dtype=torch.float32) + / half + ).to(device=t.device) # size: [dim/2], 一个指数衰减的曲线 + args = t[:, None].float() * freqs[None] + embedding = torch.cat([torch.cos(args), torch.sin(args)], dim=-1) + if dim % 2: + embedding = torch.cat( + [embedding, torch.zeros_like(embedding[:, :1])], dim=-1 + ) + else: + embedding = repeat(t, "b -> b d", d=dim) + return embedding + + +class TimestepEmbedder(nn.Module): + """ + Embeds scalar timesteps into vector representations. + """ + def __init__(self, hidden_size, frequency_embedding_size=256, out_size=None): + super().__init__() + if out_size is None: + out_size = hidden_size + self.mlp = nn.Sequential( + nn.Linear(frequency_embedding_size, hidden_size, bias=True), + nn.SiLU(), + nn.Linear(hidden_size, out_size, bias=True), + ) + self.frequency_embedding_size = frequency_embedding_size + + def forward(self, t): + t_freq = timestep_embedding(t, self.frequency_embedding_size).type(self.mlp[0].weight.dtype) + t_emb = self.mlp(t_freq) + return t_emb diff --git a/ComfyUI_ExtraModels/HunYuanDiT/models/models.py b/ComfyUI_ExtraModels/HunYuanDiT/models/models.py new file mode 100644 index 0000000000000000000000000000000000000000..7dae413e1030e9892947156c7d93f7cfd3a38d3a --- /dev/null +++ b/ComfyUI_ExtraModels/HunYuanDiT/models/models.py @@ -0,0 +1,439 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F +from timm.models.vision_transformer import Mlp + +from .attn_layers import Attention, FlashCrossMHAModified, FlashSelfMHAModified, CrossAttention +from .embedders import TimestepEmbedder, PatchEmbed, timestep_embedding +from .norm_layers import RMSNorm +from .poolers import AttentionPool +from .posemb_layers import get_2d_rotary_pos_embed, get_fill_resize_and_crop + +def modulate(x, shift, scale): + return x * (1 + scale.unsqueeze(1)) + shift.unsqueeze(1) + + +class FP32_Layernorm(nn.LayerNorm): + def forward(self, inputs: torch.Tensor) -> torch.Tensor: + origin_dtype = inputs.dtype + return F.layer_norm(inputs.float(), self.normalized_shape, self.weight.float(), self.bias.float(), + self.eps).to(origin_dtype) + + +class FP32_SiLU(nn.SiLU): + def forward(self, inputs: torch.Tensor) -> torch.Tensor: + return torch.nn.functional.silu(inputs.float(), inplace=False).to(inputs.dtype) + + +class HunYuanDiTBlock(nn.Module): + """ + A HunYuanDiT block with `add` conditioning. + """ + def __init__(self, + hidden_size, + c_emb_size, + num_heads, + mlp_ratio=4.0, + text_states_dim=1024, + use_flash_attn=False, + qk_norm=False, + norm_type="layer", + skip=False, + ): + super().__init__() + self.use_flash_attn = use_flash_attn + use_ele_affine = True + + if norm_type == "layer": + norm_layer = FP32_Layernorm + elif norm_type == "rms": + norm_layer = RMSNorm + else: + raise ValueError(f"Unknown norm_type: {norm_type}") + + # ========================= Self-Attention ========================= + self.norm1 = norm_layer(hidden_size, elementwise_affine=use_ele_affine, eps=1e-6) + if use_flash_attn: + self.attn1 = FlashSelfMHAModified(hidden_size, num_heads=num_heads, qkv_bias=True, qk_norm=qk_norm) + else: + self.attn1 = Attention(hidden_size, num_heads=num_heads, qkv_bias=True, qk_norm=qk_norm) + + # ========================= FFN ========================= + self.norm2 = norm_layer(hidden_size, elementwise_affine=use_ele_affine, eps=1e-6) + mlp_hidden_dim = int(hidden_size * mlp_ratio) + approx_gelu = lambda: nn.GELU(approximate="tanh") + self.mlp = Mlp(in_features=hidden_size, hidden_features=mlp_hidden_dim, act_layer=approx_gelu, drop=0) + + # ========================= Add ========================= + # Simply use add like SDXL. + self.default_modulation = nn.Sequential( + FP32_SiLU(), + nn.Linear(c_emb_size, hidden_size, bias=True) + ) + + # ========================= Cross-Attention ========================= + if use_flash_attn: + self.attn2 = FlashCrossMHAModified(hidden_size, text_states_dim, num_heads=num_heads, qkv_bias=True, + qk_norm=qk_norm) + else: + self.attn2 = CrossAttention(hidden_size, text_states_dim, num_heads=num_heads, qkv_bias=True, + qk_norm=qk_norm) + self.norm3 = norm_layer(hidden_size, elementwise_affine=True, eps=1e-6) + + # ========================= Skip Connection ========================= + if skip: + self.skip_norm = norm_layer(2 * hidden_size, elementwise_affine=True, eps=1e-6) + self.skip_linear = nn.Linear(2 * hidden_size, hidden_size) + else: + self.skip_linear = None + + def forward(self, x, c=None, text_states=None, freq_cis_img=None, skip=None): + # Long Skip Connection + if self.skip_linear is not None: + cat = torch.cat([x, skip], dim=-1) + cat = self.skip_norm(cat) + x = self.skip_linear(cat) + + # Self-Attention + shift_msa = self.default_modulation(c).unsqueeze(dim=1) + attn_inputs = ( + self.norm1(x) + shift_msa, freq_cis_img, + ) + x = x + self.attn1(*attn_inputs)[0] + + # Cross-Attention + cross_inputs = ( + self.norm3(x), text_states, freq_cis_img + ) + x = x + self.attn2(*cross_inputs)[0] + + # FFN Layer + mlp_inputs = self.norm2(x) + x = x + self.mlp(mlp_inputs) + + return x + + +class FinalLayer(nn.Module): + """ + The final layer of HunYuanDiT. + """ + def __init__(self, final_hidden_size, c_emb_size, patch_size, out_channels): + super().__init__() + self.norm_final = nn.LayerNorm(final_hidden_size, elementwise_affine=False, eps=1e-6) + self.linear = nn.Linear(final_hidden_size, patch_size * patch_size * out_channels, bias=True) + self.adaLN_modulation = nn.Sequential( + FP32_SiLU(), + nn.Linear(c_emb_size, 2 * final_hidden_size, bias=True) + ) + + def forward(self, x, c): + shift, scale = self.adaLN_modulation(c).chunk(2, dim=1) + x = modulate(self.norm_final(x), shift, scale) + x = self.linear(x) + return x + + +class HunYuanDiT(nn.Module): + """ + HunYuanDiT: Diffusion model with a Transformer backbone. + + Parameters + ---------- + args: argparse.Namespace + The arguments parsed by argparse. + input_size: tuple + The size of the input image. + patch_size: int + The size of the patch. + in_channels: int + The number of input channels. + hidden_size: int + The hidden size of the transformer backbone. + depth: int + The number of transformer blocks. + num_heads: int + The number of attention heads. + mlp_ratio: float + The ratio of the hidden size of the MLP in the transformer block. + log_fn: callable + The logging function. + """ + def __init__( + self, args, + input_size=(32, 32), + patch_size=2, + in_channels=4, + hidden_size=1152, + depth=28, + num_heads=16, + mlp_ratio=4.0, + log_fn=print, + cond_style=True, + cond_res=True, + **kwargs, + ): + super().__init__() + self.args = args + self.log_fn = log_fn + self.depth = depth + self.learn_sigma = args.learn_sigma + self.in_channels = in_channels + self.out_channels = in_channels * 2 if args.learn_sigma else in_channels + self.patch_size = patch_size + self.num_heads = num_heads + self.hidden_size = hidden_size + self.head_size = hidden_size // num_heads + self.text_states_dim = args.text_states_dim + self.text_states_dim_t5 = args.text_states_dim_t5 + self.text_len = args.text_len + self.text_len_t5 = args.text_len_t5 + self.norm = args.norm + self.cond_res = cond_res + self.cond_style = cond_style + + use_flash_attn = args.infer_mode == 'fa' + if use_flash_attn: + log_fn(f" Enable Flash Attention.") + qk_norm = True # See http://arxiv.org/abs/2302.05442 for details. + + self.mlp_t5 = nn.Sequential( + nn.Linear(self.text_states_dim_t5, self.text_states_dim_t5 * 4, bias=True), + FP32_SiLU(), + nn.Linear(self.text_states_dim_t5 * 4, self.text_states_dim, bias=True), + ) + # learnable replace + self.text_embedding_padding = nn.Parameter( + torch.randn(self.text_len + self.text_len_t5, self.text_states_dim, dtype=torch.float32)) + + # Attention pooling + self.pooler = AttentionPool(self.text_len_t5, self.text_states_dim_t5, num_heads=8, output_dim=1024) + + + self.extra_in_dim = 0 + if self.cond_res: + # Image size and crop size conditions + self.extra_in_dim += 256 * 6 + if self.cond_style: + # Here we use a default learned embedder layer for future extension. + self.style_embedder = nn.Embedding(1, hidden_size) + self.extra_in_dim += hidden_size + + # Text embedding for `add` + self.last_size = input_size + self.x_embedder = PatchEmbed(input_size, patch_size, in_channels, hidden_size) + self.t_embedder = TimestepEmbedder(hidden_size) + self.extra_in_dim += 1024 + self.extra_embedder = nn.Sequential( + nn.Linear(self.extra_in_dim, hidden_size * 4), + FP32_SiLU(), + nn.Linear(hidden_size * 4, hidden_size, bias=True), + ) + + # Image embedding + num_patches = self.x_embedder.num_patches + log_fn(f" Number of tokens: {num_patches}") + + # HUnYuanDiT Blocks + self.blocks = nn.ModuleList([ + HunYuanDiTBlock(hidden_size=hidden_size, + c_emb_size=hidden_size, + num_heads=num_heads, + mlp_ratio=mlp_ratio, + text_states_dim=self.text_states_dim, + use_flash_attn=use_flash_attn, + qk_norm=qk_norm, + norm_type=self.norm, + skip=layer > depth // 2, + ) + for layer in range(depth) + ]) + + self.final_layer = FinalLayer(hidden_size, hidden_size, patch_size, self.out_channels) + self.unpatchify_channels = self.out_channels + + def forward_raw(self, + x, + t, + encoder_hidden_states=None, + text_embedding_mask=None, + encoder_hidden_states_t5=None, + text_embedding_mask_t5=None, + image_meta_size=None, + style=None, + cos_cis_img=None, + sin_cis_img=None, + return_dict=False, + ): + """ + Forward pass of the encoder. + + Parameters + ---------- + x: torch.Tensor + (B, D, H, W) + t: torch.Tensor + (B) + encoder_hidden_states: torch.Tensor + CLIP text embedding, (B, L_clip, D) + text_embedding_mask: torch.Tensor + CLIP text embedding mask, (B, L_clip) + encoder_hidden_states_t5: torch.Tensor + T5 text embedding, (B, L_t5, D) + text_embedding_mask_t5: torch.Tensor + T5 text embedding mask, (B, L_t5) + image_meta_size: torch.Tensor + (B, 6) + style: torch.Tensor + (B) + cos_cis_img: torch.Tensor + sin_cis_img: torch.Tensor + return_dict: bool + Whether to return a dictionary. + """ + + text_states = encoder_hidden_states # 2,77,1024 + text_states_t5 = encoder_hidden_states_t5 # 2,256,2048 + text_states_mask = text_embedding_mask.bool() # 2,77 + text_states_t5_mask = text_embedding_mask_t5.bool() # 2,256 + b_t5, l_t5, c_t5 = text_states_t5.shape + text_states_t5 = self.mlp_t5(text_states_t5.view(-1, c_t5)) + text_states = torch.cat([text_states, text_states_t5.view(b_t5, l_t5, -1)], dim=1) # 2,205,1024 + clip_t5_mask = torch.cat([text_states_mask, text_states_t5_mask], dim=-1) + + clip_t5_mask = clip_t5_mask + text_states = torch.where(clip_t5_mask.unsqueeze(2), text_states, self.text_embedding_padding.to(text_states)) + + _, _, oh, ow = x.shape + th, tw = oh // self.patch_size, ow // self.patch_size + + # ========================= Build time and image embedding ========================= + t = self.t_embedder(t) + x = self.x_embedder(x) + + # Get image RoPE embedding according to `reso`lution. + freqs_cis_img = (cos_cis_img, sin_cis_img) + + # ========================= Concatenate all extra vectors ========================= + # Build text tokens with pooling + extra_vec = self.pooler(encoder_hidden_states_t5) + + if self.cond_res: + # Build image meta size tokens + image_meta_size = timestep_embedding(image_meta_size.view(-1), 256) # [B * 6, 256] + # if self.args.use_fp16: + # image_meta_size = image_meta_size.half() + + image_meta_size = image_meta_size.view(-1, 6 * 256) + extra_vec = torch.cat([extra_vec, image_meta_size], dim=1) # [B, D + 6 * 256] + + if self.cond_style: + # Build style tokens + style_embedding = self.style_embedder(style) + extra_vec = torch.cat([extra_vec, style_embedding], dim=1) + + # Concatenate all extra vectors + c = t + self.extra_embedder(extra_vec.to(self.dtype)) # [B, D] + + # ========================= Forward pass through HunYuanDiT blocks ========================= + skips = [] + for layer, block in enumerate(self.blocks): + if layer > self.depth // 2: + skip = skips.pop() + x = block(x, c, text_states, freqs_cis_img, skip) # (N, L, D) + else: + x = block(x, c, text_states, freqs_cis_img) # (N, L, D) + + if layer < (self.depth // 2 - 1): + skips.append(x) + + # ========================= Final layer ========================= + x = self.final_layer(x, c) # (N, L, patch_size ** 2 * out_channels) + x = self.unpatchify(x, th, tw) # (N, out_channels, H, W) + + if return_dict: + return {'x': x} + return x + + def calc_rope(self, height, width): + """ + Probably not the best in terms of perf to have this here + """ + th = height // 8 // self.patch_size + tw = width // 8 // self.patch_size + base_size = 512 // 8 // self.patch_size + start, stop = get_fill_resize_and_crop((th, tw), base_size) + sub_args = [start, stop, (th, tw)] + rope = get_2d_rotary_pos_embed(self.head_size, *sub_args) + return rope + + def forward(self, x, timesteps, context, context_mask=None, context_t5=None, context_t5_mask=None, src_size_cond=(1024,1024), **kwargs): + """ + Forward pass that adapts comfy input to original forward function + x: (N, C, H, W) tensor of spatial inputs (images or latent representations of images) + timesteps: (N,) tensor of diffusion timesteps + context: (N, 1, 77, C) CLIP conditioning + context_t5: (N, 1, 256, C) MT5 conditioning + """ + # context_mask = torch.zeros(x.shape[0], 77, device=x.device) + # context_t5_mask = torch.zeros(x.shape[0], 256, device=x.device) + + # style + style = torch.as_tensor([0] * (x.shape[0]), device=x.device) + + # image size - todo separate for cond/uncond when batched + if torch.is_tensor(src_size_cond): + src_size_cond = (int(src_size_cond[0][0]), int(src_size_cond[0][1])) + + image_size = (x.shape[2]//2*16, x.shape[3]//2*16) + size_cond = list(src_size_cond) + [image_size[1], image_size[0], 0, 0] + image_meta_size = torch.as_tensor([size_cond] * x.shape[0], device=x.device) + + # RoPE + rope = self.calc_rope(*image_size) + + # Update x_embedder if image size changed + if self.last_size != image_size: + from tqdm import tqdm + tqdm.write(f"HyDiT: New image size {image_size}") + self.x_embedder.update_image_size( + (image_size[0]//8, image_size[1]//8), + ) + self.last_size = image_size + + # Run original forward pass + out = self.forward_raw( + x = x.to(self.dtype), + t = timesteps.to(self.dtype), + encoder_hidden_states = context.to(self.dtype), + text_embedding_mask = context_mask.to(self.dtype), + encoder_hidden_states_t5 = context_t5.to(self.dtype), + text_embedding_mask_t5 = context_t5_mask.to(self.dtype), + image_meta_size = image_meta_size.to(self.dtype), + style = style, + cos_cis_img = rope[0], + sin_cis_img = rope[1], + ) + + # return + out = out.to(torch.float) + if self.learn_sigma: + eps, rest = out[:, :self.in_channels], out[:, self.in_channels:] + return eps + else: + return out + + def unpatchify(self, x, h, w): + """ + x: (N, T, patch_size**2 * C) + imgs: (N, H, W, C) + """ + c = self.unpatchify_channels + p = self.x_embedder.patch_size[0] + # h = w = int(x.shape[1] ** 0.5) + assert h * w == x.shape[1] + + x = x.reshape(shape=(x.shape[0], h, w, p, p, c)) + x = torch.einsum('nhwpqc->nchpwq', x) + imgs = x.reshape(shape=(x.shape[0], c, h * p, w * p)) + return imgs diff --git a/ComfyUI_ExtraModels/HunYuanDiT/models/norm_layers.py b/ComfyUI_ExtraModels/HunYuanDiT/models/norm_layers.py new file mode 100644 index 0000000000000000000000000000000000000000..5204ad9e5aabd5dad05a0f84408ff85f96cfa8db --- /dev/null +++ b/ComfyUI_ExtraModels/HunYuanDiT/models/norm_layers.py @@ -0,0 +1,68 @@ +import torch +import torch.nn as nn + + +class RMSNorm(nn.Module): + def __init__(self, dim: int, elementwise_affine=True, eps: float = 1e-6): + """ + Initialize the RMSNorm normalization layer. + + Args: + dim (int): The dimension of the input tensor. + eps (float, optional): A small value added to the denominator for numerical stability. Default is 1e-6. + + Attributes: + eps (float): A small value added to the denominator for numerical stability. + weight (nn.Parameter): Learnable scaling parameter. + + """ + super().__init__() + self.eps = eps + if elementwise_affine: + self.weight = nn.Parameter(torch.ones(dim)) + + def _norm(self, x): + """ + Apply the RMSNorm normalization to the input tensor. + + Args: + x (torch.Tensor): The input tensor. + + Returns: + torch.Tensor: The normalized tensor. + + """ + return x * torch.rsqrt(x.pow(2).mean(-1, keepdim=True) + self.eps) + + def forward(self, x): + """ + Forward pass through the RMSNorm layer. + + Args: + x (torch.Tensor): The input tensor. + + Returns: + torch.Tensor: The output tensor after applying RMSNorm. + + """ + output = self._norm(x.float()).type_as(x) + if hasattr(self, "weight"): + output = output * self.weight + return output + + +class GroupNorm32(nn.GroupNorm): + def __init__(self, num_groups, num_channels, eps=1e-5, dtype=None): + super().__init__(num_groups=num_groups, num_channels=num_channels, eps=eps, dtype=dtype) + + def forward(self, x): + y = super().forward(x).to(x.dtype) + return y + +def normalization(channels, dtype=None): + """ + Make a standard normalization layer. + :param channels: number of input channels. + :return: an nn.Module for normalization. + """ + return GroupNorm32(num_channels=channels, num_groups=32, dtype=dtype) diff --git a/ComfyUI_ExtraModels/HunYuanDiT/models/poolers.py b/ComfyUI_ExtraModels/HunYuanDiT/models/poolers.py new file mode 100644 index 0000000000000000000000000000000000000000..a4adcaca51fded2268a644ca4c70d5b33dfcd3b0 --- /dev/null +++ b/ComfyUI_ExtraModels/HunYuanDiT/models/poolers.py @@ -0,0 +1,39 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F + + +class AttentionPool(nn.Module): + def __init__(self, spacial_dim: int, embed_dim: int, num_heads: int, output_dim: int = None): + super().__init__() + self.positional_embedding = nn.Parameter(torch.randn(spacial_dim + 1, embed_dim) / embed_dim ** 0.5) + self.k_proj = nn.Linear(embed_dim, embed_dim) + self.q_proj = nn.Linear(embed_dim, embed_dim) + self.v_proj = nn.Linear(embed_dim, embed_dim) + self.c_proj = nn.Linear(embed_dim, output_dim or embed_dim) + self.num_heads = num_heads + + def forward(self, x): + x = x.permute(1, 0, 2) # NLC -> LNC + x = torch.cat([x.mean(dim=0, keepdim=True), x], dim=0) # (L+1)NC + x = x + self.positional_embedding[:, None, :].to(x.dtype) # (L+1)NC + x, _ = F.multi_head_attention_forward( + query=x[:1], key=x, value=x, + embed_dim_to_check=x.shape[-1], + num_heads=self.num_heads, + q_proj_weight=self.q_proj.weight, + k_proj_weight=self.k_proj.weight, + v_proj_weight=self.v_proj.weight, + in_proj_weight=None, + in_proj_bias=torch.cat([self.q_proj.bias, self.k_proj.bias, self.v_proj.bias]), + bias_k=None, + bias_v=None, + add_zero_attn=False, + dropout_p=0, + out_proj_weight=self.c_proj.weight, + out_proj_bias=self.c_proj.bias, + use_separate_proj_weight=True, + training=self.training, + need_weights=False + ) + return x.squeeze(0) diff --git a/ComfyUI_ExtraModels/HunYuanDiT/models/posemb_layers.py b/ComfyUI_ExtraModels/HunYuanDiT/models/posemb_layers.py new file mode 100644 index 0000000000000000000000000000000000000000..62c83df77d85b05710e10dd1464638d611249306 --- /dev/null +++ b/ComfyUI_ExtraModels/HunYuanDiT/models/posemb_layers.py @@ -0,0 +1,225 @@ +import torch +import numpy as np +from typing import Union + + +def _to_tuple(x): + if isinstance(x, int): + return x, x + else: + return x + + +def get_fill_resize_and_crop(src, tgt): # src 来源的分辨率 tgt base 分辨率 + th, tw = _to_tuple(tgt) + h, w = _to_tuple(src) + + tr = th / tw # base 分辨率 + r = h / w # 目标分辨率 + + # resize + if r > tr: + resize_height = th + resize_width = int(round(th / h * w)) + else: + resize_width = tw + resize_height = int(round(tw / w * h)) # 根据base分辨率,将目标分辨率resize下来 + + crop_top = int(round((th - resize_height) / 2.0)) + crop_left = int(round((tw - resize_width) / 2.0)) + + return (crop_top, crop_left), (crop_top + resize_height, crop_left + resize_width) + + +def get_meshgrid(start, *args): + if len(args) == 0: + # start is grid_size + num = _to_tuple(start) + start = (0, 0) + stop = num + elif len(args) == 1: + # start is start, args[0] is stop, step is 1 + start = _to_tuple(start) + stop = _to_tuple(args[0]) + num = (stop[0] - start[0], stop[1] - start[1]) + elif len(args) == 2: + # start is start, args[0] is stop, args[1] is num + start = _to_tuple(start) # 左上角 eg: 12,0 + stop = _to_tuple(args[0]) # 右下角 eg: 20,32 + num = _to_tuple(args[1]) # 目标大小 eg: 32,124 + else: + raise ValueError(f"len(args) should be 0, 1 or 2, but got {len(args)}") + + grid_h = np.linspace(start[0], stop[0], num[0], endpoint=False, dtype=np.float32) # 12-20 中间差值32份 0-32 中间差值124份 + grid_w = np.linspace(start[1], stop[1], num[1], endpoint=False, dtype=np.float32) + grid = np.meshgrid(grid_w, grid_h) # here w goes first + grid = np.stack(grid, axis=0) # [2, W, H] + return grid + +################################################################################# +# Sine/Cosine Positional Embedding Functions # +################################################################################# +# https://github.com/facebookresearch/mae/blob/main/util/pos_embed.py + +def get_2d_sincos_pos_embed(embed_dim, start, *args, cls_token=False, extra_tokens=0): + """ + grid_size: int of the grid height and width + return: + pos_embed: [grid_size*grid_size, embed_dim] or [1+grid_size*grid_size, embed_dim] (w/ or w/o cls_token) + """ + grid = get_meshgrid(start, *args) # [2, H, w] + # grid_h = np.arange(grid_size, dtype=np.float32) + # grid_w = np.arange(grid_size, dtype=np.float32) + # grid = np.meshgrid(grid_w, grid_h) # here w goes first + # grid = np.stack(grid, axis=0) # [2, W, H] + + grid = grid.reshape([2, 1, *grid.shape[1:]]) + pos_embed = get_2d_sincos_pos_embed_from_grid(embed_dim, grid) + if cls_token and extra_tokens > 0: + pos_embed = np.concatenate([np.zeros([extra_tokens, embed_dim]), pos_embed], axis=0) + return pos_embed + + +def get_2d_sincos_pos_embed_from_grid(embed_dim, grid): + assert embed_dim % 2 == 0 + + # use half of dimensions to encode grid_h + emb_h = get_1d_sincos_pos_embed_from_grid(embed_dim // 2, grid[0]) # (H*W, D/2) + emb_w = get_1d_sincos_pos_embed_from_grid(embed_dim // 2, grid[1]) # (H*W, D/2) + + emb = np.concatenate([emb_h, emb_w], axis=1) # (H*W, D) + return emb + + +def get_1d_sincos_pos_embed_from_grid(embed_dim, pos): + """ + embed_dim: output dimension for each position + pos: a list of positions to be encoded: size (W,H) + out: (M, D) + """ + assert embed_dim % 2 == 0 + omega = np.arange(embed_dim // 2, dtype=np.float64) + omega /= embed_dim / 2. + omega = 1. / 10000**omega # (D/2,) + + pos = pos.reshape(-1) # (M,) + out = np.einsum('m,d->md', pos, omega) # (M, D/2), outer product + + emb_sin = np.sin(out) # (M, D/2) + emb_cos = np.cos(out) # (M, D/2) + + emb = np.concatenate([emb_sin, emb_cos], axis=1) # (M, D) + return emb + + +################################################################################# +# Rotary Positional Embedding Functions # +################################################################################# +# https://github.com/facebookresearch/llama/blob/main/llama/model.py#L443 + +def get_2d_rotary_pos_embed(embed_dim, start, *args, use_real=True): + """ + This is a 2d version of precompute_freqs_cis, which is a RoPE for image tokens with 2d structure. + + Parameters + ---------- + embed_dim: int + embedding dimension size + start: int or tuple of int + If len(args) == 0, start is num; If len(args) == 1, start is start, args[0] is stop, step is 1; + If len(args) == 2, start is start, args[0] is stop, args[1] is num. + use_real: bool + If True, return real part and imaginary part separately. Otherwise, return complex numbers. + + Returns + ------- + pos_embed: torch.Tensor + [HW, D/2] + """ + grid = get_meshgrid(start, *args) # [2, H, w] + grid = grid.reshape([2, 1, *grid.shape[1:]]) # 返回一个采样矩阵 分辨率与目标分辨率一致 + pos_embed = get_2d_rotary_pos_embed_from_grid(embed_dim, grid, use_real=use_real) + return pos_embed + + +def get_2d_rotary_pos_embed_from_grid(embed_dim, grid, use_real=False): + assert embed_dim % 4 == 0 + + # use half of dimensions to encode grid_h + emb_h = get_1d_rotary_pos_embed(embed_dim // 2, grid[0].reshape(-1), use_real=use_real) # (H*W, D/4) + emb_w = get_1d_rotary_pos_embed(embed_dim // 2, grid[1].reshape(-1), use_real=use_real) # (H*W, D/4) + + if use_real: + cos = torch.cat([emb_h[0], emb_w[0]], dim=1) # (H*W, D/2) + sin = torch.cat([emb_h[1], emb_w[1]], dim=1) # (H*W, D/2) + return cos, sin + else: + emb = torch.cat([emb_h, emb_w], dim=1) # (H*W, D/2) + return emb + + +def get_1d_rotary_pos_embed(dim: int, pos: Union[np.ndarray, int], theta: float = 10000.0, use_real=False): + """ + Precompute the frequency tensor for complex exponentials (cis) with given dimensions. + + This function calculates a frequency tensor with complex exponentials using the given dimension 'dim' + and the end index 'end'. The 'theta' parameter scales the frequencies. + The returned tensor contains complex values in complex64 data type. + + Args: + dim (int): Dimension of the frequency tensor. + pos (np.ndarray, int): Position indices for the frequency tensor. [S] or scalar + theta (float, optional): Scaling factor for frequency computation. Defaults to 10000.0. + use_real (bool, optional): If True, return real part and imaginary part separately. + Otherwise, return complex numbers. + + Returns: + torch.Tensor: Precomputed frequency tensor with complex exponentials. [S, D/2] + + """ + if isinstance(pos, int): + pos = np.arange(pos) + freqs = 1.0 / (theta ** (torch.arange(0, dim, 2)[: (dim // 2)].float() / dim)) # [D/2] + t = torch.from_numpy(pos).to(freqs.device) # type: ignore # [S] + freqs = torch.outer(t, freqs).float() # type: ignore # [S, D/2] + if use_real: + freqs_cos = freqs.cos().repeat_interleave(2, dim=1) # [S, D] + freqs_sin = freqs.sin().repeat_interleave(2, dim=1) # [S, D] + return freqs_cos, freqs_sin + else: + freqs_cis = torch.polar(torch.ones_like(freqs), freqs) # complex64 # [S, D/2] + return freqs_cis + + + +def calc_sizes(rope_img, patch_size, th, tw): + """ 计算 RoPE 的尺寸. """ + if rope_img == 'extend': + # 拓展模式 + sub_args = [(th, tw)] + elif rope_img.startswith('base'): + # 基于一个尺寸, 其他尺寸插值获得. + base_size = int(rope_img[4:]) // 8 // patch_size # 基于512作为base,其他根据512差值得到 + start, stop = get_fill_resize_and_crop((th, tw), base_size) # 需要在32x32里面 crop的左上角和右下角 + sub_args = [start, stop, (th, tw)] + else: + raise ValueError(f"Unknown rope_img: {rope_img}") + return sub_args + + +def init_image_posemb(rope_img, + resolutions, + patch_size, + hidden_size, + num_heads, + log_fn, + rope_real=True, + ): + freqs_cis_img = {} + for reso in resolutions: + th, tw = reso.height // 8 // patch_size, reso.width // 8 // patch_size + sub_args = calc_sizes(rope_img, patch_size, th, tw) # [左上角, 右下角, 目标高宽] 需要在32x32里面 crop的左上角和右下角 + freqs_cis_img[str(reso)] = get_2d_rotary_pos_embed(hidden_size // num_heads, *sub_args, use_real=rope_real) + log_fn(f" Using image RoPE ({rope_img}) ({'real' if rope_real else 'complex'}): {sub_args} | ({reso}) " + f"{freqs_cis_img[str(reso)][0].shape if rope_real else freqs_cis_img[str(reso)].shape}") + return freqs_cis_img diff --git a/ComfyUI_ExtraModels/HunYuanDiT/mt5_tokenizer/config.json b/ComfyUI_ExtraModels/HunYuanDiT/mt5_tokenizer/config.json new file mode 100644 index 0000000000000000000000000000000000000000..fba9f465d19fc95adb764fd995dc859df26d6ff4 --- /dev/null +++ b/ComfyUI_ExtraModels/HunYuanDiT/mt5_tokenizer/config.json @@ -0,0 +1,33 @@ +{ + "_name_or_path": "mt5", + "architectures": [ + "MT5ForConditionalGeneration" + ], + "classifier_dropout": 0.0, + "d_ff": 5120, + "d_kv": 64, + "d_model": 2048, + "decoder_start_token_id": 0, + "dense_act_fn": "gelu_new", + "dropout_rate": 0.1, + "eos_token_id": 1, + "feed_forward_proj": "gated-gelu", + "initializer_factor": 1.0, + "is_encoder_decoder": true, + "is_gated_act": true, + "layer_norm_epsilon": 1e-06, + "model_type": "mt5", + "num_decoder_layers": 24, + "num_heads": 32, + "num_layers": 24, + "output_past": true, + "pad_token_id": 0, + "relative_attention_max_distance": 128, + "relative_attention_num_buckets": 32, + "tie_word_embeddings": false, + "tokenizer_class": "T5Tokenizer", + "torch_dtype": "float16", + "transformers_version": "4.40.2", + "use_cache": true, + "vocab_size": 250112 +} diff --git a/ComfyUI_ExtraModels/HunYuanDiT/mt5_tokenizer/special_tokens_map.json b/ComfyUI_ExtraModels/HunYuanDiT/mt5_tokenizer/special_tokens_map.json new file mode 100644 index 0000000000000000000000000000000000000000..6dc4d430ddbd24171268d73da061ce9f0b092911 --- /dev/null +++ b/ComfyUI_ExtraModels/HunYuanDiT/mt5_tokenizer/special_tokens_map.json @@ -0,0 +1 @@ +{"eos_token": "", "unk_token": "", "pad_token": ""} \ No newline at end of file diff --git a/ComfyUI_ExtraModels/HunYuanDiT/mt5_tokenizer/spiece.model b/ComfyUI_ExtraModels/HunYuanDiT/mt5_tokenizer/spiece.model new file mode 100644 index 0000000000000000000000000000000000000000..e417801865fd66bd40f9d45d46b6d0d0c2aa36b6 --- /dev/null +++ b/ComfyUI_ExtraModels/HunYuanDiT/mt5_tokenizer/spiece.model @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ef78f86560d809067d12bac6c09f19a462cb3af3f54d2b8acbba26e1433125d6 +size 4309802 diff --git a/ComfyUI_ExtraModels/HunYuanDiT/mt5_tokenizer/tokenizer_config.json b/ComfyUI_ExtraModels/HunYuanDiT/mt5_tokenizer/tokenizer_config.json new file mode 100644 index 0000000000000000000000000000000000000000..712e82c8fde9e07d534178c2dfbdd063572d00c4 --- /dev/null +++ b/ComfyUI_ExtraModels/HunYuanDiT/mt5_tokenizer/tokenizer_config.json @@ -0,0 +1 @@ +{"eos_token": "", "unk_token": "", "pad_token": "", "extra_ids": 0, "additional_special_tokens": null, "special_tokens_map_file": "/home/patrick/.cache/torch/transformers/685ac0ca8568ec593a48b61b0a3c272beee9bc194a3c7241d15dcadb5f875e53.f76030f3ec1b96a8199b2593390c610e76ca8028ef3d24680000619ffb646276", "tokenizer_file": null, "name_or_path": "google/mt5-small"} \ No newline at end of file diff --git a/ComfyUI_ExtraModels/HunYuanDiT/nodes.py b/ComfyUI_ExtraModels/HunYuanDiT/nodes.py new file mode 100644 index 0000000000000000000000000000000000000000..bcc1180f02432b29f8c2b56fa727c0d5d95be22f --- /dev/null +++ b/ComfyUI_ExtraModels/HunYuanDiT/nodes.py @@ -0,0 +1,198 @@ +import os +import folder_paths +from copy import deepcopy + +from .conf import hydit_conf +from .loader import load_hydit + +class HYDiTCheckpointLoader: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "ckpt_name": (folder_paths.get_filename_list("checkpoints"),), + "model": (list(hydit_conf.keys()),{"default":"G/2"}), + } + } + RETURN_TYPES = ("MODEL",) + RETURN_NAMES = ("model",) + FUNCTION = "load_checkpoint" + CATEGORY = "ExtraModels/HunyuanDiT" + TITLE = "Hunyuan DiT Checkpoint Loader" + + def load_checkpoint(self, ckpt_name, model): + ckpt_path = folder_paths.get_full_path("checkpoints", ckpt_name) + model_conf = hydit_conf[model] + model = load_hydit( + model_path = ckpt_path, + model_conf = model_conf, + ) + return (model,) + +#### temp stuff for the text encoder #### +import torch +from .tenc import load_clip, load_t5 +from ..utils.dtype import string_to_dtype +dtypes = [ + "default", + "auto (comfy)", + "FP32", + "FP16", + "BF16" +] + +class HYDiTTextEncoderLoader: + @classmethod + def INPUT_TYPES(s): + devices = ["auto", "cpu", "gpu"] + # hack for using second GPU as offload + for k in range(1, torch.cuda.device_count()): + devices.append(f"cuda:{k}") + return { + "required": { + "clip_name": (folder_paths.get_filename_list("clip"),), + "mt5_name": (folder_paths.get_filename_list("t5"),), + "device": (devices, {"default":"cpu"}), + "dtype": (dtypes,), + } + } + + RETURN_TYPES = ("CLIP", "T5") + FUNCTION = "load_model" + CATEGORY = "ExtraModels/HunyuanDiT" + TITLE = "Hunyuan DiT Text Encoder Loader" + + def load_model(self, clip_name, mt5_name, device, dtype): + dtype = string_to_dtype(dtype, "text_encoder") + if device == "cpu": + assert dtype in [None, torch.float32, torch.bfloat16], f"Can't use dtype '{dtype}' with CPU! Set dtype to 'default' or 'bf16'." + + clip = load_clip( + model_path = folder_paths.get_full_path("clip", clip_name), + device = device, + dtype = dtype, + ) + t5 = load_t5( + model_path = folder_paths.get_full_path("t5", mt5_name), + device = device, + dtype = dtype, + ) + return(clip, t5) + +class HYDiTTextEncode: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "text": ("STRING", {"multiline": True}), + "text_t5": ("STRING", {"multiline": True}), + "CLIP": ("CLIP",), + "T5": ("T5",), + } + } + + RETURN_TYPES = ("CONDITIONING",) + FUNCTION = "encode" + CATEGORY = "ExtraModels/HunyuanDiT" + TITLE = "Hunyuan DiT Text Encode" + + def encode(self, text, text_t5, CLIP, T5): + # T5 + T5.load_model() + t5_pre = T5.tokenizer( + text_t5, + max_length = T5.cond_stage_model.max_length, + padding = 'max_length', + truncation = True, + return_attention_mask = True, + add_special_tokens = True, + return_tensors = 'pt' + ) + t5_mask = t5_pre["attention_mask"] + with torch.no_grad(): + t5_outs = T5.cond_stage_model.transformer( + input_ids = t5_pre["input_ids"].to(T5.load_device), + attention_mask = t5_mask.to(T5.load_device), + output_hidden_states = True, + ) + # to-do: replace -1 for clip skip + t5_embs = t5_outs["hidden_states"][-1].float().cpu() + + # "clip" + CLIP.load_model() + clip_pre = CLIP.tokenizer( + text, + max_length = CLIP.cond_stage_model.max_length, + padding = 'max_length', + truncation = True, + return_attention_mask = True, + add_special_tokens = True, + return_tensors = 'pt' + ) + clip_mask = clip_pre["attention_mask"] + with torch.no_grad(): + clip_outs = CLIP.cond_stage_model.transformer( + input_ids = clip_pre["input_ids"].to(CLIP.load_device), + attention_mask = clip_mask.to(CLIP.load_device), + ) + # to-do: add hidden states + clip_embs = clip_outs[0].float().cpu() + + # combined cond + return ([[ + clip_embs, { + "context_t5": t5_embs, + "context_mask": clip_mask.float(), + "context_t5_mask": t5_mask.float() + } + ]],) + +class HYDiTTextEncodeSimple(HYDiTTextEncode): + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "text": ("STRING", {"multiline": True}), + "CLIP": ("CLIP",), + "T5": ("T5",), + } + } + + FUNCTION = "encode_simple" + TITLE = "Hunyuan DiT Text Encode (simple)" + + def encode_simple(self, text, **args): + return self.encode(text=text, text_t5=text, **args) + +class HYDiTSrcSizeCond: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "cond": ("CONDITIONING", ), + "width": ("INT", {"default": 1024.0, "min": 0, "max": 8192, "step": 16}), + "height": ("INT", {"default": 1024.0, "min": 0, "max": 8192, "step": 16}), + } + } + + RETURN_TYPES = ("CONDITIONING",) + RETURN_NAMES = ("cond",) + FUNCTION = "add_cond" + CATEGORY = "ExtraModels/HunyuanDiT" + TITLE = "Hunyuan DiT Size Conditioning (advanced)" + + def add_cond(self, cond, width, height): + cond = deepcopy(cond) + for c in range(len(cond)): + cond[c][1].update({ + "src_size_cond": [[height, width]], + }) + return (cond,) + +NODE_CLASS_MAPPINGS = { + "HYDiTCheckpointLoader": HYDiTCheckpointLoader, + "HYDiTTextEncoderLoader": HYDiTTextEncoderLoader, + "HYDiTTextEncode": HYDiTTextEncode, + "HYDiTTextEncodeSimple": HYDiTTextEncodeSimple, + "HYDiTSrcSizeCond": HYDiTSrcSizeCond, +} diff --git a/ComfyUI_ExtraModels/HunYuanDiT/tenc.py b/ComfyUI_ExtraModels/HunYuanDiT/tenc.py new file mode 100644 index 0000000000000000000000000000000000000000..a41b6a6dc614790a9adb4faa35d0f01053d53a01 --- /dev/null +++ b/ComfyUI_ExtraModels/HunYuanDiT/tenc.py @@ -0,0 +1,181 @@ +# This is for loading the CLIP (bert?) + mT5 encoder for HunYuanDiT +import os +import torch +from transformers import AutoTokenizer, modeling_utils +from transformers import T5Config, T5EncoderModel, BertConfig, BertModel + +from comfy import model_management +import comfy.model_patcher +import comfy.utils + +class mT5Model(torch.nn.Module): + def __init__(self, textmodel_json_config=None, device="cpu", max_length=256, freeze=True, dtype=None): + super().__init__() + self.device = device + self.dtype = dtype + self.max_length = max_length + if textmodel_json_config is None: + textmodel_json_config = os.path.join( + os.path.dirname(os.path.realpath(__file__)), + f"config_mt5.json" + ) + config = T5Config.from_json_file(textmodel_json_config) + with modeling_utils.no_init_weights(): + self.transformer = T5EncoderModel(config) + self.to(dtype) + if freeze: + self.freeze() + + def freeze(self): + self.transformer = self.transformer.eval() + for param in self.parameters(): + param.requires_grad = False + + def load_sd(self, sd): + return self.transformer.load_state_dict(sd, strict=False) + + def to(self, *args, **kwargs): + return self.transformer.to(*args, **kwargs) + +class hyCLIPModel(torch.nn.Module): + def __init__(self, textmodel_json_config=None, device="cpu", max_length=77, freeze=True, dtype=None): + super().__init__() + self.device = device + self.dtype = dtype + self.max_length = max_length + if textmodel_json_config is None: + textmodel_json_config = os.path.join( + os.path.dirname(os.path.realpath(__file__)), + f"config_clip.json" + ) + config = BertConfig.from_json_file(textmodel_json_config) + with modeling_utils.no_init_weights(): + self.transformer = BertModel(config) + self.to(dtype) + if freeze: + self.freeze() + + def freeze(self): + self.transformer = self.transformer.eval() + for param in self.parameters(): + param.requires_grad = False + + def load_sd(self, sd): + return self.transformer.load_state_dict(sd, strict=False) + + def to(self, *args, **kwargs): + return self.transformer.to(*args, **kwargs) + +class EXM_HyDiT_Tenc_Temp: + def __init__(self, no_init=False, device="cpu", dtype=None, model_class="mT5", *kwargs): + if no_init: + return + + size = 8 if model_class == "mT5" else 2 + if dtype == torch.float32: + size *= 2 + size *= (1024**3) + + if device == "auto": + self.load_device = model_management.text_encoder_device() + self.offload_device = model_management.text_encoder_offload_device() + self.init_device = "cpu" + elif device == "cpu": + size = 0 # doesn't matter + self.load_device = "cpu" + self.offload_device = "cpu" + self.init_device="cpu" + elif device.startswith("cuda"): + print("Direct CUDA device override!\nVRAM will not be freed by default.") + size = 0 # not used + self.load_device = device + self.offload_device = device + self.init_device = device + else: + self.load_device = model_management.get_torch_device() + self.offload_device = "cpu" + self.init_device="cpu" + + self.dtype = dtype + self.device = self.load_device + if model_class == "mT5": + self.cond_stage_model = mT5Model( + device = self.load_device, + dtype = self.dtype, + ) + tokenizer_args = {"subfolder": "t2i/mt5"} # web + tokenizer_path = os.path.join( # local + os.path.dirname(os.path.realpath(__file__)), + "mt5_tokenizer", + ) + else: + self.cond_stage_model = hyCLIPModel( + device = self.load_device, + dtype = self.dtype, + ) + tokenizer_args = {"subfolder": "t2i/tokenizer",} # web + tokenizer_path = os.path.join( # local + os.path.dirname(os.path.realpath(__file__)), + "tokenizer", + ) + # self.tokenizer = AutoTokenizer.from_pretrained( + # "Tencent-Hunyuan/HunyuanDiT", + # **tokenizer_args + # ) + self.tokenizer = AutoTokenizer.from_pretrained(tokenizer_path) + self.patcher = comfy.model_patcher.ModelPatcher( + self.cond_stage_model, + load_device = self.load_device, + offload_device = self.offload_device, + current_device = self.load_device, + size = size, + ) + + def clone(self): + n = EXM_HyDiT_Tenc_Temp(no_init=True) + n.patcher = self.patcher.clone() + n.cond_stage_model = self.cond_stage_model + n.tokenizer = self.tokenizer + return n + + def load_sd(self, sd): + return self.cond_stage_model.load_sd(sd) + + def get_sd(self): + return self.cond_stage_model.state_dict() + + def load_model(self): + if self.load_device != "cpu": + model_management.load_model_gpu(self.patcher) + return self.patcher + + def add_patches(self, patches, strength_patch=1.0, strength_model=1.0): + return self.patcher.add_patches(patches, strength_patch, strength_model) + + def get_key_patches(self): + return self.patcher.get_key_patches() + +def load_clip(model_path, **kwargs): + model = EXM_HyDiT_Tenc_Temp(model_class="clip", **kwargs) + sd = comfy.utils.load_torch_file(model_path) + + prefix = "bert." + state_dict = {} + for key in sd: + nkey = key + if key.startswith(prefix): + nkey = key[len(prefix):] + state_dict[nkey] = sd[key] + + m, e = model.load_sd(state_dict) + if len(m) > 0 or len(e) > 0: + print(f"HYDiT: clip missing {len(m)} keys ({len(e)} extra)") + return model + +def load_t5(model_path, **kwargs): + model = EXM_HyDiT_Tenc_Temp(model_class="mT5", **kwargs) + sd = comfy.utils.load_torch_file(model_path) + m, e = model.load_sd(sd) + if len(m) > 0 or len(e) > 0: + print(f"HYDiT: mT5 missing {len(m)} keys ({len(e)} extra)") + return model diff --git a/ComfyUI_ExtraModels/HunYuanDiT/tokenizer/config.json b/ComfyUI_ExtraModels/HunYuanDiT/tokenizer/config.json new file mode 100644 index 0000000000000000000000000000000000000000..f6298741b427e1045bae70d7f7c62b53641ad0a5 --- /dev/null +++ b/ComfyUI_ExtraModels/HunYuanDiT/tokenizer/config.json @@ -0,0 +1,34 @@ +{ + "_name_or_path": "hfl/chinese-roberta-wwm-ext-large", + "architectures": [ + "BertModel" + ], + "attention_probs_dropout_prob": 0.1, + "bos_token_id": 0, + "classifier_dropout": null, + "directionality": "bidi", + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout_prob": 0.1, + "hidden_size": 1024, + "initializer_range": 0.02, + "intermediate_size": 4096, + "layer_norm_eps": 1e-12, + "max_position_embeddings": 512, + "model_type": "bert", + "num_attention_heads": 16, + "num_hidden_layers": 24, + "output_past": true, + "pad_token_id": 0, + "pooler_fc_size": 768, + "pooler_num_attention_heads": 12, + "pooler_num_fc_layers": 3, + "pooler_size_per_head": 128, + "pooler_type": "first_token_transform", + "position_embedding_type": "absolute", + "torch_dtype": "float32", + "transformers_version": "4.22.1", + "type_vocab_size": 2, + "use_cache": true, + "vocab_size": 47020 +} diff --git a/ComfyUI_ExtraModels/HunYuanDiT/tokenizer/special_tokens_map.json b/ComfyUI_ExtraModels/HunYuanDiT/tokenizer/special_tokens_map.json new file mode 100644 index 0000000000000000000000000000000000000000..a8b3208c2884c4efb86e49300fdd3dc877220cdf --- /dev/null +++ b/ComfyUI_ExtraModels/HunYuanDiT/tokenizer/special_tokens_map.json @@ -0,0 +1,7 @@ +{ + "cls_token": "[CLS]", + "mask_token": "[MASK]", + "pad_token": "[PAD]", + "sep_token": "[SEP]", + "unk_token": "[UNK]" +} diff --git a/ComfyUI_ExtraModels/HunYuanDiT/tokenizer/tokenizer_config.json b/ComfyUI_ExtraModels/HunYuanDiT/tokenizer/tokenizer_config.json new file mode 100644 index 0000000000000000000000000000000000000000..a14356073e11a885074a7cdbddc749463cefd911 --- /dev/null +++ b/ComfyUI_ExtraModels/HunYuanDiT/tokenizer/tokenizer_config.json @@ -0,0 +1,16 @@ +{ + "cls_token": "[CLS]", + "do_basic_tokenize": true, + "do_lower_case": true, + "mask_token": "[MASK]", + "name_or_path": "hfl/chinese-roberta-wwm-ext", + "never_split": null, + "pad_token": "[PAD]", + "sep_token": "[SEP]", + "special_tokens_map_file": "/home/chenweifeng/.cache/huggingface/hub/models--hfl--chinese-roberta-wwm-ext/snapshots/5c58d0b8ec1d9014354d691c538661bf00bfdb44/special_tokens_map.json", + "strip_accents": null, + "tokenize_chinese_chars": true, + "tokenizer_class": "BertTokenizer", + "unk_token": "[UNK]", + "model_max_length": 77 +} diff --git a/ComfyUI_ExtraModels/HunYuanDiT/tokenizer/vocab.txt b/ComfyUI_ExtraModels/HunYuanDiT/tokenizer/vocab.txt new file mode 100644 index 0000000000000000000000000000000000000000..6246906805d02aca01714c71e4c8d77b69a7a131 --- /dev/null +++ b/ComfyUI_ExtraModels/HunYuanDiT/tokenizer/vocab.txt @@ -0,0 +1,47020 @@ +[PAD] +[unused1] +[unused2] +[unused3] +[unused4] +[unused5] +[unused6] +[unused7] +[unused8] +[unused9] +[unused10] +[unused11] +[unused12] +[unused13] +[unused14] +[unused15] +[unused16] +[unused17] +[unused18] +[unused19] +[unused20] +[unused21] +[unused22] +[unused23] +[unused24] +[unused25] +[unused26] +[unused27] +[unused28] +[unused29] +[unused30] +[unused31] +[unused32] +[unused33] +[unused34] +[unused35] +[unused36] +[unused37] +[unused38] +[unused39] +[unused40] +[unused41] +[unused42] +[unused43] +[unused44] +[unused45] +[unused46] +[unused47] +[unused48] +[unused49] +[unused50] +[unused51] +[unused52] +[unused53] +[unused54] +[unused55] +[unused56] +[unused57] +[unused58] +[unused59] +[unused60] +[unused61] +[unused62] +[unused63] +[unused64] +[unused65] +[unused66] +[unused67] +[unused68] +[unused69] +[unused70] +[unused71] +[unused72] +[unused73] +[unused74] +[unused75] +[unused76] +[unused77] +[unused78] +[unused79] +[unused80] +[unused81] +[unused82] +[unused83] +[unused84] +[unused85] +[unused86] +[unused87] +[unused88] +[unused89] +[unused90] +[unused91] +[unused92] +[unused93] +[unused94] +[unused95] +[unused96] +[unused97] +[unused98] +[unused99] +[UNK] +[CLS] +[SEP] +[MASK] + + +! +" +# +$ +% +& +' +( +) +* ++ +, +- +. +/ +0 +1 +2 +3 +4 +5 +6 +7 +8 +9 +: +; +< += +> +? +@ +[ +\ +] +^ +_ +a +b +c +d +e +f +g +h +i +j +k +l +m +n +o +p +q +r +s +t +u +v +w +x +y +z +{ +| +} +~ +£ +¤ +¥ +§ +© +« +® +° +± +² +³ +µ +· +¹ +º +» +¼ +× +ß +æ +÷ +ø +đ +ŋ +ɔ +ə +ɡ +ʰ +ˇ +ˈ +ˊ +ˋ +ˍ +ː +˙ +˚ +ˢ +α +β +γ +δ +ε +η +θ +ι +κ +λ +μ +ν +ο +π +ρ +ς +σ +τ +υ +φ +χ +ψ +ω +а +б +в +г +д +е +ж +з +и +к +л +м +н +о +п +р +с +т +у +ф +х +ц +ч +ш +ы +ь +я +і +ا +ب +ة +ت +د +ر +س +ع +ل +م +ن +ه +و +ي +۩ +ก +ง +น +ม +ย +ร +อ +า +เ +๑ +་ +ღ +ᄀ +ᄁ +ᄂ +ᄃ +ᄅ +ᄆ +ᄇ +ᄈ +ᄉ +ᄋ +ᄌ +ᄎ +ᄏ +ᄐ +ᄑ +ᄒ +ᅡ +ᅢ +ᅣ +ᅥ +ᅦ +ᅧ +ᅨ +ᅩ +ᅪ +ᅬ +ᅭ +ᅮ +ᅯ +ᅲ +ᅳ +ᅴ +ᅵ +ᆨ +ᆫ +ᆯ +ᆷ +ᆸ +ᆺ +ᆻ +ᆼ +ᗜ +ᵃ +ᵉ +ᵍ +ᵏ +ᵐ +ᵒ +ᵘ +‖ +„ +† +• +‥ +‧ +
 +‰ +′ +″ +‹ +› +※ +‿ +⁄ +ⁱ +⁺ +ⁿ +₁ +₂ +₃ +₄ +€ +℃ +№ +™ +ⅰ +ⅱ +ⅲ +ⅳ +ⅴ +← +↑ +→ +↓ +↔ +↗ +↘ +⇒ +∀ +− +∕ +∙ +√ +∞ +∟ +∠ +∣ +∥ +∩ +∮ +∶ +∼ +∽ +≈ +≒ +≡ +≤ +≥ +≦ +≧ +≪ +≫ +⊙ +⋅ +⋈ +⋯ +⌒ +① +② +③ +④ +⑤ +⑥ +⑦ +⑧ +⑨ +⑩ +⑴ +⑵ +⑶ +⑷ +⑸ +⒈ +⒉ +⒊ +⒋ +ⓒ +ⓔ +ⓘ +─ +━ +│ +┃ +┅ +┆ +┊ +┌ +└ +├ +┣ +═ +║ +╚ +╞ +╠ +╭ +╮ +╯ +╰ +╱ +╳ +▂ +▃ +▅ +▇ +█ +▉ +▋ +▌ +▍ +▎ +■ +□ +▪ +▫ +▬ +▲ +△ +▶ +► +▼ +▽ +◆ +◇ +○ +◎ +● +◕ +◠ +◢ +◤ +☀ +★ +☆ +☕ +☞ +☺ +☼ +♀ +♂ +♠ +♡ +♣ +♥ +♦ +♪ +♫ +♬ +✈ +✔ +✕ +✖ +✦ +✨ +✪ +✰ +✿ +❀ +❤ +➜ +➤ +⦿ +、 +。 +〃 +々 +〇 +〈 +〉 +《 +》 +「 +」 +『 +』 +【 +】 +〓 +〔 +〕 +〖 +〗 +〜 +〝 +〞 +ぁ +あ +ぃ +い +う +ぇ +え +お +か +き +く +け +こ +さ +し +す +せ +そ +た +ち +っ +つ +て +と +な +に +ぬ +ね +の +は +ひ +ふ +へ +ほ +ま +み +む +め +も +ゃ +や +ゅ +ゆ +ょ +よ +ら +り +る +れ +ろ +わ +を +ん +゜ +ゝ +ァ +ア +ィ +イ +ゥ +ウ +ェ +エ +ォ +オ +カ +キ +ク +ケ +コ +サ +シ +ス +セ +ソ +タ +チ +ッ +ツ +テ +ト +ナ +ニ +ヌ +ネ +ノ +ハ +ヒ +フ +ヘ +ホ +マ +ミ +ム +メ +モ +ャ +ヤ +ュ +ユ +ョ +ヨ +ラ +リ +ル +レ +ロ +ワ +ヲ +ン +ヶ +・ +ー +ヽ +ㄅ +ㄆ +ㄇ +ㄉ +ㄋ +ㄌ +ㄍ +ㄎ +ㄏ +ㄒ +ㄚ +ㄛ +ㄞ +ㄟ +ㄢ +ㄤ +ㄥ +ㄧ +ㄨ +ㆍ +㈦ +㊣ +㎡ +㗎 +一 +丁 +七 +万 +丈 +三 +上 +下 +不 +与 +丐 +丑 +专 +且 +丕 +世 +丘 +丙 +业 +丛 +东 +丝 +丞 +丟 +両 +丢 +两 +严 +並 +丧 +丨 +个 +丫 +中 +丰 +串 +临 +丶 +丸 +丹 +为 +主 +丼 +丽 +举 +丿 +乂 +乃 +久 +么 +义 +之 +乌 +乍 +乎 +乏 +乐 +乒 +乓 +乔 +乖 +乗 +乘 +乙 +乜 +九 +乞 +也 +习 +乡 +书 +乩 +买 +乱 +乳 +乾 +亀 +亂 +了 +予 +争 +事 +二 +于 +亏 +云 +互 +五 +井 +亘 +亙 +亚 +些 +亜 +亞 +亟 +亡 +亢 +交 +亥 +亦 +产 +亨 +亩 +享 +京 +亭 +亮 +亲 +亳 +亵 +人 +亿 +什 +仁 +仃 +仄 +仅 +仆 +仇 +今 +介 +仍 +从 +仏 +仑 +仓 +仔 +仕 +他 +仗 +付 +仙 +仝 +仞 +仟 +代 +令 +以 +仨 +仪 +们 +仮 +仰 +仲 +件 +价 +任 +份 +仿 +企 +伉 +伊 +伍 +伎 +伏 +伐 +休 +伕 +众 +优 +伙 +会 +伝 +伞 +伟 +传 +伢 +伤 +伦 +伪 +伫 +伯 +估 +伴 +伶 +伸 +伺 +似 +伽 +佃 +但 +佇 +佈 +位 +低 +住 +佐 +佑 +体 +佔 +何 +佗 +佘 +余 +佚 +佛 +作 +佝 +佞 +佟 +你 +佢 +佣 +佤 +佥 +佩 +佬 +佯 +佰 +佳 +併 +佶 +佻 +佼 +使 +侃 +侄 +來 +侈 +例 +侍 +侏 +侑 +侖 +侗 +供 +依 +侠 +価 +侣 +侥 +侦 +侧 +侨 +侬 +侮 +侯 +侵 +侶 +侷 +便 +係 +促 +俄 +俊 +俎 +俏 +俐 +俑 +俗 +俘 +俚 +保 +俞 +俟 +俠 +信 +俨 +俩 +俪 +俬 +俭 +修 +俯 +俱 +俳 +俸 +俺 +俾 +倆 +倉 +個 +倌 +倍 +倏 +們 +倒 +倔 +倖 +倘 +候 +倚 +倜 +借 +倡 +値 +倦 +倩 +倪 +倫 +倬 +倭 +倶 +债 +值 +倾 +偃 +假 +偈 +偉 +偌 +偎 +偏 +偕 +做 +停 +健 +側 +偵 +偶 +偷 +偻 +偽 +偿 +傀 +傅 +傍 +傑 +傘 +備 +傚 +傢 +傣 +傥 +储 +傩 +催 +傭 +傲 +傳 +債 +傷 +傻 +傾 +僅 +働 +像 +僑 +僕 +僖 +僚 +僥 +僧 +僭 +僮 +僱 +僵 +價 +僻 +儀 +儂 +億 +儆 +儉 +儋 +儒 +儕 +儘 +償 +儡 +優 +儲 +儷 +儼 +儿 +兀 +允 +元 +兄 +充 +兆 +兇 +先 +光 +克 +兌 +免 +児 +兑 +兒 +兔 +兖 +党 +兜 +兢 +入 +內 +全 +兩 +八 +公 +六 +兮 +兰 +共 +兲 +关 +兴 +兵 +其 +具 +典 +兹 +养 +兼 +兽 +冀 +内 +円 +冇 +冈 +冉 +冊 +册 +再 +冏 +冒 +冕 +冗 +写 +军 +农 +冠 +冢 +冤 +冥 +冨 +冪 +冬 +冯 +冰 +冲 +决 +况 +冶 +冷 +冻 +冼 +冽 +冾 +净 +凄 +准 +凇 +凈 +凉 +凋 +凌 +凍 +减 +凑 +凛 +凜 +凝 +几 +凡 +凤 +処 +凪 +凭 +凯 +凰 +凱 +凳 +凶 +凸 +凹 +出 +击 +函 +凿 +刀 +刁 +刃 +分 +切 +刈 +刊 +刍 +刎 +刑 +划 +列 +刘 +则 +刚 +创 +初 +删 +判 +別 +刨 +利 +刪 +别 +刮 +到 +制 +刷 +券 +刹 +刺 +刻 +刽 +剁 +剂 +剃 +則 +剉 +削 +剋 +剌 +前 +剎 +剐 +剑 +剔 +剖 +剛 +剜 +剝 +剣 +剤 +剥 +剧 +剩 +剪 +副 +割 +創 +剷 +剽 +剿 +劃 +劇 +劈 +劉 +劊 +劍 +劏 +劑 +力 +劝 +办 +功 +加 +务 +劣 +动 +助 +努 +劫 +劭 +励 +劲 +劳 +労 +劵 +効 +劾 +势 +勁 +勃 +勇 +勉 +勋 +勐 +勒 +動 +勖 +勘 +務 +勛 +勝 +勞 +募 +勢 +勤 +勧 +勳 +勵 +勸 +勺 +勻 +勾 +勿 +匀 +包 +匆 +匈 +匍 +匐 +匕 +化 +北 +匙 +匝 +匠 +匡 +匣 +匪 +匮 +匯 +匱 +匹 +区 +医 +匾 +匿 +區 +十 +千 +卅 +升 +午 +卉 +半 +卍 +华 +协 +卑 +卒 +卓 +協 +单 +卖 +南 +単 +博 +卜 +卞 +卟 +占 +卡 +卢 +卤 +卦 +卧 +卫 +卮 +卯 +印 +危 +即 +却 +卵 +卷 +卸 +卻 +卿 +厂 +厄 +厅 +历 +厉 +压 +厌 +厕 +厘 +厚 +厝 +原 +厢 +厥 +厦 +厨 +厩 +厭 +厮 +厲 +厳 +去 +县 +叁 +参 +參 +又 +叉 +及 +友 +双 +反 +収 +发 +叔 +取 +受 +变 +叙 +叛 +叟 +叠 +叡 +叢 +口 +古 +句 +另 +叨 +叩 +只 +叫 +召 +叭 +叮 +可 +台 +叱 +史 +右 +叵 +叶 +号 +司 +叹 +叻 +叼 +叽 +吁 +吃 +各 +吆 +合 +吉 +吊 +吋 +同 +名 +后 +吏 +吐 +向 +吒 +吓 +吕 +吖 +吗 +君 +吝 +吞 +吟 +吠 +吡 +否 +吧 +吨 +吩 +含 +听 +吭 +吮 +启 +吱 +吳 +吴 +吵 +吶 +吸 +吹 +吻 +吼 +吽 +吾 +呀 +呂 +呃 +呆 +呈 +告 +呋 +呎 +呐 +呓 +呕 +呗 +员 +呛 +呜 +呢 +呤 +呦 +周 +呱 +呲 +味 +呵 +呷 +呸 +呻 +呼 +命 +咀 +咁 +咂 +咄 +咆 +咋 +和 +咎 +咏 +咐 +咒 +咔 +咕 +咖 +咗 +咘 +咙 +咚 +咛 +咣 +咤 +咦 +咧 +咨 +咩 +咪 +咫 +咬 +咭 +咯 +咱 +咲 +咳 +咸 +咻 +咽 +咿 +哀 +品 +哂 +哄 +哆 +哇 +哈 +哉 +哋 +哌 +响 +哎 +哏 +哐 +哑 +哒 +哔 +哗 +哟 +員 +哥 +哦 +哧 +哨 +哩 +哪 +哭 +哮 +哲 +哺 +哼 +哽 +唁 +唄 +唆 +唇 +唉 +唏 +唐 +唑 +唔 +唠 +唤 +唧 +唬 +售 +唯 +唰 +唱 +唳 +唷 +唸 +唾 +啃 +啄 +商 +啉 +啊 +問 +啓 +啕 +啖 +啜 +啞 +啟 +啡 +啤 +啥 +啦 +啧 +啪 +啫 +啬 +啮 +啰 +啱 +啲 +啵 +啶 +啷 +啸 +啻 +啼 +啾 +喀 +喂 +喃 +善 +喆 +喇 +喉 +喊 +喋 +喎 +喏 +喔 +喘 +喙 +喚 +喜 +喝 +喟 +喧 +喪 +喫 +喬 +單 +喰 +喱 +喲 +喳 +喵 +営 +喷 +喹 +喺 +喻 +喽 +嗅 +嗆 +嗇 +嗎 +嗑 +嗒 +嗓 +嗔 +嗖 +嗚 +嗜 +嗝 +嗟 +嗡 +嗣 +嗤 +嗦 +嗨 +嗪 +嗬 +嗯 +嗰 +嗲 +嗳 +嗶 +嗷 +嗽 +嘀 +嘅 +嘆 +嘈 +嘉 +嘌 +嘍 +嘎 +嘔 +嘖 +嘗 +嘘 +嘚 +嘛 +嘜 +嘞 +嘟 +嘢 +嘣 +嘤 +嘧 +嘩 +嘭 +嘮 +嘯 +嘰 +嘱 +嘲 +嘴 +嘶 +嘸 +嘹 +嘻 +嘿 +噁 +噌 +噎 +噓 +噔 +噗 +噙 +噜 +噠 +噢 +噤 +器 +噩 +噪 +噬 +噱 +噴 +噶 +噸 +噹 +噻 +噼 +嚀 +嚇 +嚎 +嚏 +嚐 +嚓 +嚕 +嚟 +嚣 +嚥 +嚨 +嚮 +嚴 +嚷 +嚼 +囂 +囉 +囊 +囍 +囑 +囔 +囗 +囚 +四 +囝 +回 +囟 +因 +囡 +团 +団 +囤 +囧 +囪 +囫 +园 +困 +囱 +囲 +図 +围 +囹 +固 +国 +图 +囿 +圃 +圄 +圆 +圈 +國 +圍 +圏 +園 +圓 +圖 +團 +圜 +土 +圣 +圧 +在 +圩 +圭 +地 +圳 +场 +圻 +圾 +址 +坂 +均 +坊 +坍 +坎 +坏 +坐 +坑 +块 +坚 +坛 +坝 +坞 +坟 +坠 +坡 +坤 +坦 +坨 +坪 +坯 +坳 +坵 +坷 +垂 +垃 +垄 +型 +垒 +垚 +垛 +垠 +垢 +垣 +垦 +垩 +垫 +垭 +垮 +垵 +埂 +埃 +埋 +城 +埔 +埕 +埗 +域 +埠 +埤 +埵 +執 +埸 +培 +基 +埼 +堀 +堂 +堃 +堅 +堆 +堇 +堑 +堕 +堙 +堡 +堤 +堪 +堯 +堰 +報 +場 +堵 +堺 +堿 +塊 +塌 +塑 +塔 +塗 +塘 +塚 +塞 +塢 +塩 +填 +塬 +塭 +塵 +塾 +墀 +境 +墅 +墉 +墊 +墒 +墓 +増 +墘 +墙 +墜 +增 +墟 +墨 +墩 +墮 +墳 +墻 +墾 +壁 +壅 +壆 +壇 +壊 +壑 +壓 +壕 +壘 +壞 +壟 +壢 +壤 +壩 +士 +壬 +壮 +壯 +声 +売 +壳 +壶 +壹 +壺 +壽 +处 +备 +変 +复 +夏 +夔 +夕 +外 +夙 +多 +夜 +够 +夠 +夢 +夥 +大 +天 +太 +夫 +夭 +央 +夯 +失 +头 +夷 +夸 +夹 +夺 +夾 +奂 +奄 +奇 +奈 +奉 +奋 +奎 +奏 +奐 +契 +奔 +奕 +奖 +套 +奘 +奚 +奠 +奢 +奥 +奧 +奪 +奬 +奮 +女 +奴 +奶 +奸 +她 +好 +如 +妃 +妄 +妆 +妇 +妈 +妊 +妍 +妒 +妓 +妖 +妘 +妙 +妝 +妞 +妣 +妤 +妥 +妨 +妩 +妪 +妮 +妲 +妳 +妹 +妻 +妾 +姆 +姉 +姊 +始 +姍 +姐 +姑 +姒 +姓 +委 +姗 +姚 +姜 +姝 +姣 +姥 +姦 +姨 +姪 +姫 +姬 +姹 +姻 +姿 +威 +娃 +娄 +娅 +娆 +娇 +娉 +娑 +娓 +娘 +娛 +娜 +娟 +娠 +娣 +娥 +娩 +娱 +娲 +娴 +娶 +娼 +婀 +婁 +婆 +婉 +婊 +婕 +婚 +婢 +婦 +婧 +婪 +婭 +婴 +婵 +婶 +婷 +婺 +婿 +媒 +媚 +媛 +媞 +媧 +媲 +媳 +媽 +媾 +嫁 +嫂 +嫉 +嫌 +嫑 +嫔 +嫖 +嫘 +嫚 +嫡 +嫣 +嫦 +嫩 +嫲 +嫵 +嫻 +嬅 +嬉 +嬌 +嬗 +嬛 +嬢 +嬤 +嬪 +嬰 +嬴 +嬷 +嬸 +嬿 +孀 +孃 +子 +孑 +孔 +孕 +孖 +字 +存 +孙 +孚 +孛 +孜 +孝 +孟 +孢 +季 +孤 +学 +孩 +孪 +孫 +孬 +孰 +孱 +孳 +孵 +學 +孺 +孽 +孿 +宁 +它 +宅 +宇 +守 +安 +宋 +完 +宏 +宓 +宕 +宗 +官 +宙 +定 +宛 +宜 +宝 +实 +実 +宠 +审 +客 +宣 +室 +宥 +宦 +宪 +宫 +宮 +宰 +害 +宴 +宵 +家 +宸 +容 +宽 +宾 +宿 +寂 +寄 +寅 +密 +寇 +富 +寐 +寒 +寓 +寛 +寝 +寞 +察 +寡 +寢 +寥 +實 +寧 +寨 +審 +寫 +寬 +寮 +寰 +寵 +寶 +寸 +对 +寺 +寻 +导 +対 +寿 +封 +専 +射 +将 +將 +專 +尉 +尊 +尋 +對 +導 +小 +少 +尔 +尕 +尖 +尘 +尚 +尝 +尤 +尧 +尬 +就 +尴 +尷 +尸 +尹 +尺 +尻 +尼 +尽 +尾 +尿 +局 +屁 +层 +屄 +居 +屆 +屈 +屉 +届 +屋 +屌 +屍 +屎 +屏 +屐 +屑 +展 +屜 +属 +屠 +屡 +屢 +層 +履 +屬 +屯 +山 +屹 +屿 +岀 +岁 +岂 +岌 +岐 +岑 +岔 +岖 +岗 +岘 +岙 +岚 +岛 +岡 +岩 +岫 +岬 +岭 +岱 +岳 +岷 +岸 +峇 +峋 +峒 +峙 +峡 +峤 +峥 +峦 +峨 +峪 +峭 +峯 +峰 +峴 +島 +峻 +峽 +崁 +崂 +崆 +崇 +崎 +崑 +崔 +崖 +崗 +崙 +崛 +崧 +崩 +崭 +崴 +崽 +嵇 +嵊 +嵋 +嵌 +嵐 +嵘 +嵩 +嵬 +嵯 +嶂 +嶄 +嶇 +嶋 +嶙 +嶺 +嶼 +嶽 +巅 +巍 +巒 +巔 +巖 +川 +州 +巡 +巢 +工 +左 +巧 +巨 +巩 +巫 +差 +己 +已 +巳 +巴 +巷 +巻 +巽 +巾 +巿 +币 +市 +布 +帅 +帆 +师 +希 +帐 +帑 +帕 +帖 +帘 +帚 +帛 +帜 +帝 +帥 +带 +帧 +師 +席 +帮 +帯 +帰 +帳 +帶 +帷 +常 +帼 +帽 +幀 +幂 +幄 +幅 +幌 +幔 +幕 +幟 +幡 +幢 +幣 +幫 +干 +平 +年 +并 +幸 +幹 +幺 +幻 +幼 +幽 +幾 +广 +庁 +広 +庄 +庆 +庇 +床 +序 +庐 +库 +应 +底 +庖 +店 +庙 +庚 +府 +庞 +废 +庠 +度 +座 +庫 +庭 +庵 +庶 +康 +庸 +庹 +庾 +廁 +廂 +廃 +廈 +廉 +廊 +廓 +廖 +廚 +廝 +廟 +廠 +廢 +廣 +廬 +廳 +延 +廷 +建 +廿 +开 +弁 +异 +弃 +弄 +弈 +弊 +弋 +式 +弑 +弒 +弓 +弔 +引 +弗 +弘 +弛 +弟 +张 +弥 +弦 +弧 +弩 +弭 +弯 +弱 +張 +強 +弹 +强 +弼 +弾 +彅 +彆 +彈 +彌 +彎 +归 +当 +录 +彗 +彙 +彝 +形 +彤 +彥 +彦 +彧 +彩 +彪 +彫 +彬 +彭 +彰 +影 +彷 +役 +彻 +彼 +彿 +往 +征 +径 +待 +徇 +很 +徉 +徊 +律 +後 +徐 +徑 +徒 +従 +徕 +得 +徘 +徙 +徜 +從 +徠 +御 +徨 +復 +循 +徬 +微 +徳 +徴 +徵 +德 +徹 +徼 +徽 +心 +必 +忆 +忌 +忍 +忏 +忐 +忑 +忒 +忖 +志 +忘 +忙 +応 +忠 +忡 +忤 +忧 +忪 +快 +忱 +念 +忻 +忽 +忿 +怀 +态 +怂 +怅 +怆 +怎 +怏 +怒 +怔 +怕 +怖 +怙 +怜 +思 +怠 +怡 +急 +怦 +性 +怨 +怪 +怯 +怵 +总 +怼 +恁 +恃 +恆 +恋 +恍 +恐 +恒 +恕 +恙 +恚 +恢 +恣 +恤 +恥 +恨 +恩 +恪 +恫 +恬 +恭 +息 +恰 +恳 +恵 +恶 +恸 +恺 +恻 +恼 +恿 +悄 +悅 +悉 +悌 +悍 +悔 +悖 +悚 +悟 +悠 +患 +悦 +您 +悩 +悪 +悬 +悯 +悱 +悲 +悴 +悵 +悶 +悸 +悻 +悼 +悽 +情 +惆 +惇 +惊 +惋 +惑 +惕 +惘 +惚 +惜 +惟 +惠 +惡 +惦 +惧 +惨 +惩 +惫 +惬 +惭 +惮 +惯 +惰 +惱 +想 +惴 +惶 +惹 +惺 +愁 +愆 +愈 +愉 +愍 +意 +愕 +愚 +愛 +愜 +感 +愣 +愤 +愧 +愫 +愷 +愿 +慄 +慈 +態 +慌 +慎 +慑 +慕 +慘 +慚 +慟 +慢 +慣 +慧 +慨 +慫 +慮 +慰 +慳 +慵 +慶 +慷 +慾 +憂 +憊 +憋 +憎 +憐 +憑 +憔 +憚 +憤 +憧 +憨 +憩 +憫 +憬 +憲 +憶 +憾 +懂 +懇 +懈 +應 +懊 +懋 +懑 +懒 +懦 +懲 +懵 +懶 +懷 +懸 +懺 +懼 +懾 +懿 +戀 +戈 +戊 +戌 +戍 +戎 +戏 +成 +我 +戒 +戕 +或 +战 +戚 +戛 +戟 +戡 +戦 +截 +戬 +戮 +戰 +戲 +戳 +戴 +戶 +户 +戸 +戻 +戾 +房 +所 +扁 +扇 +扈 +扉 +手 +才 +扎 +扑 +扒 +打 +扔 +払 +托 +扛 +扣 +扦 +执 +扩 +扪 +扫 +扬 +扭 +扮 +扯 +扰 +扱 +扳 +扶 +批 +扼 +找 +承 +技 +抄 +抉 +把 +抑 +抒 +抓 +投 +抖 +抗 +折 +抚 +抛 +抜 +択 +抟 +抠 +抡 +抢 +护 +报 +抨 +披 +抬 +抱 +抵 +抹 +押 +抽 +抿 +拂 +拄 +担 +拆 +拇 +拈 +拉 +拋 +拌 +拍 +拎 +拐 +拒 +拓 +拔 +拖 +拗 +拘 +拙 +拚 +招 +拜 +拟 +拡 +拢 +拣 +拥 +拦 +拧 +拨 +择 +括 +拭 +拮 +拯 +拱 +拳 +拴 +拷 +拼 +拽 +拾 +拿 +持 +挂 +指 +挈 +按 +挎 +挑 +挖 +挙 +挚 +挛 +挝 +挞 +挟 +挠 +挡 +挣 +挤 +挥 +挨 +挪 +挫 +振 +挲 +挹 +挺 +挽 +挾 +捂 +捅 +捆 +捉 +捋 +捌 +捍 +捎 +捏 +捐 +捕 +捞 +损 +捡 +换 +捣 +捧 +捨 +捩 +据 +捱 +捲 +捶 +捷 +捺 +捻 +掀 +掂 +掃 +掇 +授 +掉 +掌 +掏 +掐 +排 +掖 +掘 +掙 +掛 +掠 +採 +探 +掣 +接 +控 +推 +掩 +措 +掬 +掰 +掲 +掳 +掴 +掷 +掸 +掺 +揀 +揃 +揄 +揆 +揉 +揍 +描 +提 +插 +揖 +揚 +換 +握 +揣 +揩 +揪 +揭 +揮 +援 +揶 +揸 +揹 +揽 +搀 +搁 +搂 +搅 +損 +搏 +搐 +搓 +搔 +搖 +搗 +搜 +搞 +搡 +搪 +搬 +搭 +搵 +搶 +携 +搽 +摀 +摁 +摄 +摆 +摇 +摈 +摊 +摒 +摔 +摘 +摞 +摟 +摧 +摩 +摯 +摳 +摸 +摹 +摺 +摻 +撂 +撃 +撅 +撇 +撈 +撐 +撑 +撒 +撓 +撕 +撚 +撞 +撤 +撥 +撩 +撫 +撬 +播 +撮 +撰 +撲 +撵 +撷 +撸 +撻 +撼 +撿 +擀 +擁 +擂 +擄 +擅 +擇 +擊 +擋 +操 +擎 +擒 +擔 +擘 +據 +擞 +擠 +擡 +擢 +擦 +擬 +擰 +擱 +擲 +擴 +擷 +擺 +擼 +擾 +攀 +攏 +攒 +攔 +攘 +攙 +攜 +攝 +攞 +攢 +攣 +攤 +攥 +攪 +攫 +攬 +支 +收 +攸 +改 +攻 +放 +政 +故 +效 +敌 +敍 +敎 +敏 +救 +敕 +敖 +敗 +敘 +教 +敛 +敝 +敞 +敢 +散 +敦 +敬 +数 +敲 +整 +敵 +敷 +數 +斂 +斃 +文 +斋 +斌 +斎 +斐 +斑 +斓 +斗 +料 +斛 +斜 +斟 +斡 +斤 +斥 +斧 +斩 +斫 +斬 +断 +斯 +新 +斷 +方 +於 +施 +旁 +旃 +旅 +旋 +旌 +旎 +族 +旖 +旗 +无 +既 +日 +旦 +旧 +旨 +早 +旬 +旭 +旮 +旱 +时 +旷 +旺 +旻 +昀 +昂 +昆 +昇 +昉 +昊 +昌 +明 +昏 +易 +昔 +昕 +昙 +星 +映 +春 +昧 +昨 +昭 +是 +昱 +昴 +昵 +昶 +昼 +显 +晁 +時 +晃 +晉 +晋 +晌 +晏 +晒 +晓 +晔 +晕 +晖 +晗 +晚 +晝 +晞 +晟 +晤 +晦 +晨 +晩 +普 +景 +晰 +晴 +晶 +晷 +智 +晾 +暂 +暄 +暇 +暈 +暉 +暌 +暐 +暑 +暖 +暗 +暝 +暢 +暧 +暨 +暫 +暮 +暱 +暴 +暸 +暹 +曄 +曆 +曇 +曉 +曖 +曙 +曜 +曝 +曠 +曦 +曬 +曰 +曲 +曳 +更 +書 +曹 +曼 +曾 +替 +最 +會 +月 +有 +朋 +服 +朐 +朔 +朕 +朗 +望 +朝 +期 +朦 +朧 +木 +未 +末 +本 +札 +朮 +术 +朱 +朴 +朵 +机 +朽 +杀 +杂 +权 +杆 +杈 +杉 +李 +杏 +材 +村 +杓 +杖 +杜 +杞 +束 +杠 +条 +来 +杨 +杭 +杯 +杰 +東 +杳 +杵 +杷 +杼 +松 +板 +极 +构 +枇 +枉 +枋 +析 +枕 +林 +枚 +果 +枝 +枢 +枣 +枪 +枫 +枭 +枯 +枰 +枱 +枳 +架 +枷 +枸 +柄 +柏 +某 +柑 +柒 +染 +柔 +柘 +柚 +柜 +柞 +柠 +柢 +查 +柩 +柬 +柯 +柱 +柳 +柴 +柵 +査 +柿 +栀 +栃 +栄 +栅 +标 +栈 +栉 +栋 +栎 +栏 +树 +栓 +栖 +栗 +校 +栩 +株 +样 +核 +根 +格 +栽 +栾 +桀 +桁 +桂 +桃 +桅 +框 +案 +桉 +桌 +桎 +桐 +桑 +桓 +桔 +桜 +桠 +桡 +桢 +档 +桥 +桦 +桧 +桨 +桩 +桶 +桿 +梁 +梅 +梆 +梏 +梓 +梗 +條 +梟 +梢 +梦 +梧 +梨 +梭 +梯 +械 +梳 +梵 +梶 +检 +棂 +棄 +棉 +棋 +棍 +棒 +棕 +棗 +棘 +棚 +棟 +棠 +棣 +棧 +森 +棱 +棲 +棵 +棹 +棺 +椁 +椅 +椋 +植 +椎 +椒 +検 +椪 +椭 +椰 +椹 +椽 +椿 +楂 +楊 +楓 +楔 +楚 +楝 +楞 +楠 +楣 +楨 +楫 +業 +楮 +極 +楷 +楸 +楹 +楼 +楽 +概 +榄 +榆 +榈 +榉 +榔 +榕 +榖 +榛 +榜 +榨 +榫 +榭 +榮 +榱 +榴 +榷 +榻 +槁 +槃 +構 +槌 +槍 +槎 +槐 +槓 +様 +槛 +槟 +槤 +槭 +槲 +槳 +槻 +槽 +槿 +樁 +樂 +樊 +樑 +樓 +標 +樞 +樟 +模 +樣 +権 +横 +樫 +樯 +樱 +樵 +樸 +樹 +樺 +樽 +樾 +橄 +橇 +橋 +橐 +橘 +橙 +機 +橡 +橢 +橫 +橱 +橹 +橼 +檀 +檄 +檎 +檐 +檔 +檗 +檜 +檢 +檬 +檯 +檳 +檸 +檻 +櫃 +櫚 +櫛 +櫥 +櫸 +櫻 +欄 +權 +欒 +欖 +欠 +次 +欢 +欣 +欧 +欲 +欸 +欺 +欽 +款 +歆 +歇 +歉 +歌 +歎 +歐 +歓 +歙 +歛 +歡 +止 +正 +此 +步 +武 +歧 +歩 +歪 +歯 +歲 +歳 +歴 +歷 +歸 +歹 +死 +歼 +殁 +殃 +殆 +殇 +殉 +殊 +残 +殒 +殓 +殖 +殘 +殞 +殡 +殤 +殭 +殯 +殲 +殴 +段 +殷 +殺 +殼 +殿 +毀 +毁 +毂 +毅 +毆 +毋 +母 +毎 +每 +毒 +毓 +比 +毕 +毗 +毘 +毙 +毛 +毡 +毫 +毯 +毽 +氈 +氏 +氐 +民 +氓 +气 +氖 +気 +氙 +氛 +氟 +氡 +氢 +氣 +氤 +氦 +氧 +氨 +氪 +氫 +氮 +氯 +氰 +氲 +水 +氷 +永 +氹 +氾 +汀 +汁 +求 +汆 +汇 +汉 +汎 +汐 +汕 +汗 +汙 +汛 +汝 +汞 +江 +池 +污 +汤 +汨 +汩 +汪 +汰 +汲 +汴 +汶 +汹 +決 +汽 +汾 +沁 +沂 +沃 +沅 +沈 +沉 +沌 +沏 +沐 +沒 +沓 +沖 +沙 +沛 +沟 +没 +沢 +沣 +沥 +沦 +沧 +沪 +沫 +沭 +沮 +沱 +河 +沸 +油 +治 +沼 +沽 +沾 +沿 +況 +泄 +泉 +泊 +泌 +泓 +法 +泗 +泛 +泞 +泠 +泡 +波 +泣 +泥 +注 +泪 +泫 +泮 +泯 +泰 +泱 +泳 +泵 +泷 +泸 +泻 +泼 +泽 +泾 +洁 +洄 +洋 +洒 +洗 +洙 +洛 +洞 +津 +洩 +洪 +洮 +洱 +洲 +洵 +洶 +洸 +洹 +活 +洼 +洽 +派 +流 +浃 +浄 +浅 +浆 +浇 +浊 +测 +济 +浏 +浑 +浒 +浓 +浔 +浙 +浚 +浜 +浣 +浦 +浩 +浪 +浬 +浮 +浯 +浴 +海 +浸 +涂 +涅 +涇 +消 +涉 +涌 +涎 +涓 +涔 +涕 +涙 +涛 +涝 +涞 +涟 +涠 +涡 +涣 +涤 +润 +涧 +涨 +涩 +涪 +涮 +涯 +液 +涵 +涸 +涼 +涿 +淀 +淄 +淅 +淆 +淇 +淋 +淌 +淑 +淒 +淖 +淘 +淙 +淚 +淞 +淡 +淤 +淦 +淨 +淩 +淪 +淫 +淬 +淮 +深 +淳 +淵 +混 +淹 +淺 +添 +淼 +清 +済 +渉 +渊 +渋 +渍 +渎 +渐 +渔 +渗 +渙 +渚 +減 +渝 +渠 +渡 +渣 +渤 +渥 +渦 +温 +測 +渭 +港 +渲 +渴 +游 +渺 +渾 +湃 +湄 +湊 +湍 +湖 +湘 +湛 +湟 +湧 +湫 +湮 +湯 +湳 +湾 +湿 +満 +溃 +溅 +溉 +溏 +源 +準 +溜 +溝 +溟 +溢 +溥 +溧 +溪 +溫 +溯 +溱 +溴 +溶 +溺 +溼 +滁 +滂 +滄 +滅 +滇 +滋 +滌 +滑 +滓 +滔 +滕 +滙 +滚 +滝 +滞 +滟 +满 +滢 +滤 +滥 +滦 +滨 +滩 +滬 +滯 +滲 +滴 +滷 +滸 +滾 +滿 +漁 +漂 +漆 +漉 +漏 +漓 +演 +漕 +漠 +漢 +漣 +漩 +漪 +漫 +漬 +漯 +漱 +漲 +漳 +漸 +漾 +漿 +潆 +潇 +潋 +潍 +潑 +潔 +潘 +潛 +潜 +潞 +潟 +潢 +潤 +潦 +潧 +潭 +潮 +潰 +潴 +潸 +潺 +潼 +澀 +澄 +澆 +澈 +澍 +澎 +澗 +澜 +澡 +澤 +澧 +澱 +澳 +澹 +激 +濁 +濂 +濃 +濑 +濒 +濕 +濘 +濛 +濟 +濠 +濡 +濤 +濫 +濬 +濮 +濯 +濱 +濺 +濾 +瀅 +瀆 +瀉 +瀋 +瀏 +瀑 +瀕 +瀘 +瀚 +瀛 +瀝 +瀞 +瀟 +瀧 +瀨 +瀬 +瀰 +瀾 +灌 +灏 +灑 +灘 +灝 +灞 +灣 +火 +灬 +灭 +灯 +灰 +灵 +灶 +灸 +灼 +災 +灾 +灿 +炀 +炁 +炅 +炉 +炊 +炎 +炒 +炔 +炕 +炖 +炙 +炜 +炫 +炬 +炭 +炮 +炯 +炳 +炷 +炸 +点 +為 +炼 +炽 +烁 +烂 +烃 +烈 +烊 +烏 +烘 +烙 +烛 +烟 +烤 +烦 +烧 +烨 +烩 +烫 +烬 +热 +烯 +烷 +烹 +烽 +焉 +焊 +焕 +焖 +焗 +焘 +焙 +焚 +焜 +無 +焦 +焯 +焰 +焱 +然 +焼 +煅 +煉 +煊 +煌 +煎 +煒 +煖 +煙 +煜 +煞 +煤 +煥 +煦 +照 +煨 +煩 +煮 +煲 +煸 +煽 +熄 +熊 +熏 +熒 +熔 +熙 +熟 +熠 +熨 +熬 +熱 +熵 +熹 +熾 +燁 +燃 +燄 +燈 +燉 +燊 +燎 +燒 +燔 +燕 +燙 +燜 +營 +燥 +燦 +燧 +燭 +燮 +燴 +燻 +燼 +燿 +爆 +爍 +爐 +爛 +爪 +爬 +爭 +爰 +爱 +爲 +爵 +父 +爷 +爸 +爹 +爺 +爻 +爽 +爾 +牆 +片 +版 +牌 +牍 +牒 +牙 +牛 +牝 +牟 +牠 +牡 +牢 +牦 +牧 +物 +牯 +牲 +牴 +牵 +特 +牺 +牽 +犀 +犁 +犄 +犊 +犍 +犒 +犢 +犧 +犬 +犯 +状 +犷 +犸 +犹 +狀 +狂 +狄 +狈 +狎 +狐 +狒 +狗 +狙 +狞 +狠 +狡 +狩 +独 +狭 +狮 +狰 +狱 +狸 +狹 +狼 +狽 +猎 +猕 +猖 +猗 +猙 +猛 +猜 +猝 +猥 +猩 +猪 +猫 +猬 +献 +猴 +猶 +猷 +猾 +猿 +獄 +獅 +獎 +獐 +獒 +獗 +獠 +獣 +獨 +獭 +獰 +獲 +獵 +獷 +獸 +獺 +獻 +獼 +獾 +玄 +率 +玉 +王 +玑 +玖 +玛 +玟 +玠 +玥 +玩 +玫 +玮 +环 +现 +玲 +玳 +玷 +玺 +玻 +珀 +珂 +珅 +珈 +珉 +珊 +珍 +珏 +珐 +珑 +珙 +珞 +珠 +珣 +珥 +珩 +珪 +班 +珮 +珲 +珺 +現 +球 +琅 +理 +琇 +琉 +琊 +琍 +琏 +琐 +琛 +琢 +琥 +琦 +琨 +琪 +琬 +琮 +琰 +琲 +琳 +琴 +琵 +琶 +琺 +琼 +瑀 +瑁 +瑄 +瑋 +瑕 +瑗 +瑙 +瑚 +瑛 +瑜 +瑞 +瑟 +瑠 +瑣 +瑤 +瑩 +瑪 +瑯 +瑰 +瑶 +瑾 +璀 +璁 +璃 +璇 +璉 +璋 +璎 +璐 +璜 +璞 +璟 +璧 +璨 +環 +璽 +璿 +瓊 +瓏 +瓒 +瓜 +瓢 +瓣 +瓤 +瓦 +瓮 +瓯 +瓴 +瓶 +瓷 +甄 +甌 +甕 +甘 +甙 +甚 +甜 +生 +產 +産 +甥 +甦 +用 +甩 +甫 +甬 +甭 +甯 +田 +由 +甲 +申 +电 +男 +甸 +町 +画 +甾 +畀 +畅 +界 +畏 +畑 +畔 +留 +畜 +畝 +畢 +略 +畦 +番 +畫 +異 +畲 +畳 +畴 +當 +畸 +畹 +畿 +疆 +疇 +疊 +疏 +疑 +疔 +疖 +疗 +疙 +疚 +疝 +疟 +疡 +疣 +疤 +疥 +疫 +疮 +疯 +疱 +疲 +疳 +疵 +疸 +疹 +疼 +疽 +疾 +痂 +病 +症 +痈 +痉 +痊 +痍 +痒 +痔 +痕 +痘 +痙 +痛 +痞 +痠 +痢 +痣 +痤 +痧 +痨 +痪 +痫 +痰 +痱 +痴 +痹 +痺 +痼 +痿 +瘀 +瘁 +瘋 +瘍 +瘓 +瘘 +瘙 +瘟 +瘠 +瘡 +瘢 +瘤 +瘦 +瘧 +瘩 +瘪 +瘫 +瘴 +瘸 +瘾 +療 +癇 +癌 +癒 +癖 +癜 +癞 +癡 +癢 +癣 +癥 +癫 +癬 +癮 +癱 +癲 +癸 +発 +登 +發 +白 +百 +皂 +的 +皆 +皇 +皈 +皋 +皎 +皑 +皓 +皖 +皙 +皚 +皮 +皰 +皱 +皴 +皺 +皿 +盂 +盃 +盅 +盆 +盈 +益 +盎 +盏 +盐 +监 +盒 +盔 +盖 +盗 +盘 +盛 +盜 +盞 +盟 +盡 +監 +盤 +盥 +盧 +盪 +目 +盯 +盱 +盲 +直 +相 +盹 +盼 +盾 +省 +眈 +眉 +看 +県 +眙 +眞 +真 +眠 +眦 +眨 +眩 +眯 +眶 +眷 +眸 +眺 +眼 +眾 +着 +睁 +睇 +睏 +睐 +睑 +睛 +睜 +睞 +睡 +睢 +督 +睥 +睦 +睨 +睪 +睫 +睬 +睹 +睽 +睾 +睿 +瞄 +瞅 +瞇 +瞋 +瞌 +瞎 +瞑 +瞒 +瞓 +瞞 +瞟 +瞠 +瞥 +瞧 +瞩 +瞪 +瞬 +瞭 +瞰 +瞳 +瞻 +瞼 +瞿 +矇 +矍 +矗 +矚 +矛 +矜 +矢 +矣 +知 +矩 +矫 +短 +矮 +矯 +石 +矶 +矽 +矾 +矿 +码 +砂 +砌 +砍 +砒 +研 +砖 +砗 +砚 +砝 +砣 +砥 +砧 +砭 +砰 +砲 +破 +砷 +砸 +砺 +砼 +砾 +础 +硅 +硐 +硒 +硕 +硝 +硫 +硬 +确 +硯 +硼 +碁 +碇 +碉 +碌 +碍 +碎 +碑 +碓 +碗 +碘 +碚 +碛 +碟 +碣 +碧 +碩 +碰 +碱 +碳 +碴 +確 +碼 +碾 +磁 +磅 +磊 +磋 +磐 +磕 +磚 +磡 +磨 +磬 +磯 +磲 +磷 +磺 +礁 +礎 +礙 +礡 +礦 +礪 +礫 +礴 +示 +礼 +社 +祀 +祁 +祂 +祇 +祈 +祉 +祎 +祐 +祕 +祖 +祗 +祚 +祛 +祜 +祝 +神 +祟 +祠 +祢 +祥 +票 +祭 +祯 +祷 +祸 +祺 +祿 +禀 +禁 +禄 +禅 +禍 +禎 +福 +禛 +禦 +禧 +禪 +禮 +禱 +禹 +禺 +离 +禽 +禾 +禿 +秀 +私 +秃 +秆 +秉 +秋 +种 +科 +秒 +秘 +租 +秣 +秤 +秦 +秧 +秩 +秭 +积 +称 +秸 +移 +秽 +稀 +稅 +程 +稍 +税 +稔 +稗 +稚 +稜 +稞 +稟 +稠 +稣 +種 +稱 +稲 +稳 +稷 +稹 +稻 +稼 +稽 +稿 +穀 +穂 +穆 +穌 +積 +穎 +穗 +穢 +穩 +穫 +穴 +究 +穷 +穹 +空 +穿 +突 +窃 +窄 +窈 +窍 +窑 +窒 +窓 +窕 +窖 +窗 +窘 +窜 +窝 +窟 +窠 +窥 +窦 +窨 +窩 +窪 +窮 +窯 +窺 +窿 +竄 +竅 +竇 +竊 +立 +竖 +站 +竜 +竞 +竟 +章 +竣 +童 +竭 +端 +競 +竹 +竺 +竽 +竿 +笃 +笆 +笈 +笋 +笏 +笑 +笔 +笙 +笛 +笞 +笠 +符 +笨 +第 +笹 +笺 +笼 +筆 +等 +筊 +筋 +筍 +筏 +筐 +筑 +筒 +答 +策 +筛 +筝 +筠 +筱 +筲 +筵 +筷 +筹 +签 +简 +箇 +箋 +箍 +箏 +箐 +箔 +箕 +算 +箝 +管 +箩 +箫 +箭 +箱 +箴 +箸 +節 +篁 +範 +篆 +篇 +築 +篑 +篓 +篙 +篝 +篠 +篡 +篤 +篩 +篪 +篮 +篱 +篷 +簇 +簌 +簍 +簡 +簦 +簧 +簪 +簫 +簷 +簸 +簽 +簾 +簿 +籁 +籃 +籌 +籍 +籐 +籟 +籠 +籤 +籬 +籮 +籲 +米 +类 +籼 +籽 +粄 +粉 +粑 +粒 +粕 +粗 +粘 +粟 +粤 +粥 +粧 +粪 +粮 +粱 +粲 +粳 +粵 +粹 +粼 +粽 +精 +粿 +糅 +糊 +糍 +糕 +糖 +糗 +糙 +糜 +糞 +糟 +糠 +糧 +糬 +糯 +糰 +糸 +系 +糾 +紀 +紂 +約 +紅 +紉 +紊 +紋 +納 +紐 +紓 +純 +紗 +紘 +紙 +級 +紛 +紜 +素 +紡 +索 +紧 +紫 +紮 +累 +細 +紳 +紹 +紺 +終 +絃 +組 +絆 +経 +結 +絕 +絞 +絡 +絢 +給 +絨 +絮 +統 +絲 +絳 +絵 +絶 +絹 +綁 +綏 +綑 +經 +継 +続 +綜 +綠 +綢 +綦 +綫 +綬 +維 +綱 +網 +綴 +綵 +綸 +綺 +綻 +綽 +綾 +綿 +緊 +緋 +総 +緑 +緒 +緘 +線 +緝 +緞 +締 +緣 +編 +緩 +緬 +緯 +練 +緹 +緻 +縁 +縄 +縈 +縛 +縝 +縣 +縫 +縮 +縱 +縴 +縷 +總 +績 +繁 +繃 +繆 +繇 +繋 +織 +繕 +繚 +繞 +繡 +繩 +繪 +繫 +繭 +繳 +繹 +繼 +繽 +纂 +續 +纍 +纏 +纓 +纔 +纖 +纜 +纠 +红 +纣 +纤 +约 +级 +纨 +纪 +纫 +纬 +纭 +纯 +纰 +纱 +纲 +纳 +纵 +纶 +纷 +纸 +纹 +纺 +纽 +纾 +线 +绀 +练 +组 +绅 +细 +织 +终 +绊 +绍 +绎 +经 +绑 +绒 +结 +绔 +绕 +绘 +给 +绚 +绛 +络 +绝 +绞 +统 +绡 +绢 +绣 +绥 +绦 +继 +绩 +绪 +绫 +续 +绮 +绯 +绰 +绳 +维 +绵 +绶 +绷 +绸 +绻 +综 +绽 +绾 +绿 +缀 +缄 +缅 +缆 +缇 +缈 +缉 +缎 +缓 +缔 +缕 +编 +缘 +缙 +缚 +缜 +缝 +缠 +缢 +缤 +缥 +缨 +缩 +缪 +缭 +缮 +缰 +缱 +缴 +缸 +缺 +缽 +罂 +罄 +罌 +罐 +网 +罔 +罕 +罗 +罚 +罡 +罢 +罩 +罪 +置 +罰 +署 +罵 +罷 +罹 +羁 +羅 +羈 +羊 +羌 +美 +羔 +羚 +羞 +羟 +羡 +羣 +群 +羥 +羧 +羨 +義 +羯 +羲 +羸 +羹 +羽 +羿 +翁 +翅 +翊 +翌 +翎 +習 +翔 +翘 +翟 +翠 +翡 +翦 +翩 +翰 +翱 +翳 +翹 +翻 +翼 +耀 +老 +考 +耄 +者 +耆 +耋 +而 +耍 +耐 +耒 +耕 +耗 +耘 +耙 +耦 +耨 +耳 +耶 +耷 +耸 +耻 +耽 +耿 +聂 +聆 +聊 +聋 +职 +聒 +联 +聖 +聘 +聚 +聞 +聪 +聯 +聰 +聲 +聳 +聴 +聶 +職 +聽 +聾 +聿 +肃 +肄 +肅 +肆 +肇 +肉 +肋 +肌 +肏 +肓 +肖 +肘 +肚 +肛 +肝 +肠 +股 +肢 +肤 +肥 +肩 +肪 +肮 +肯 +肱 +育 +肴 +肺 +肽 +肾 +肿 +胀 +胁 +胃 +胄 +胆 +背 +胍 +胎 +胖 +胚 +胛 +胜 +胝 +胞 +胡 +胤 +胥 +胧 +胫 +胭 +胯 +胰 +胱 +胳 +胴 +胶 +胸 +胺 +能 +脂 +脅 +脆 +脇 +脈 +脉 +脊 +脍 +脏 +脐 +脑 +脓 +脖 +脘 +脚 +脛 +脣 +脩 +脫 +脯 +脱 +脲 +脳 +脸 +脹 +脾 +腆 +腈 +腊 +腋 +腌 +腎 +腐 +腑 +腓 +腔 +腕 +腥 +腦 +腩 +腫 +腭 +腮 +腰 +腱 +腳 +腴 +腸 +腹 +腺 +腻 +腼 +腾 +腿 +膀 +膈 +膊 +膏 +膑 +膘 +膚 +膛 +膜 +膝 +膠 +膦 +膨 +膩 +膳 +膺 +膻 +膽 +膾 +膿 +臀 +臂 +臃 +臆 +臉 +臊 +臍 +臓 +臘 +臟 +臣 +臥 +臧 +臨 +自 +臬 +臭 +至 +致 +臺 +臻 +臼 +臾 +舀 +舂 +舅 +舆 +與 +興 +舉 +舊 +舌 +舍 +舎 +舐 +舒 +舔 +舖 +舗 +舛 +舜 +舞 +舟 +航 +舫 +般 +舰 +舱 +舵 +舶 +舷 +舸 +船 +舺 +舾 +艇 +艋 +艘 +艙 +艦 +艮 +良 +艰 +艱 +色 +艳 +艷 +艹 +艺 +艾 +节 +芃 +芈 +芊 +芋 +芍 +芎 +芒 +芙 +芜 +芝 +芡 +芥 +芦 +芩 +芪 +芫 +芬 +芭 +芮 +芯 +花 +芳 +芷 +芸 +芹 +芻 +芽 +芾 +苁 +苄 +苇 +苋 +苍 +苏 +苑 +苒 +苓 +苔 +苕 +苗 +苛 +苜 +苞 +苟 +苡 +苣 +若 +苦 +苫 +苯 +英 +苷 +苹 +苻 +茁 +茂 +范 +茄 +茅 +茉 +茎 +茏 +茗 +茜 +茧 +茨 +茫 +茬 +茭 +茯 +茱 +茲 +茴 +茵 +茶 +茸 +茹 +茼 +荀 +荃 +荆 +草 +荊 +荏 +荐 +荒 +荔 +荖 +荘 +荚 +荞 +荟 +荠 +荡 +荣 +荤 +荥 +荧 +荨 +荪 +荫 +药 +荳 +荷 +荸 +荻 +荼 +荽 +莅 +莆 +莉 +莊 +莎 +莒 +莓 +莖 +莘 +莞 +莠 +莢 +莧 +莪 +莫 +莱 +莲 +莴 +获 +莹 +莺 +莽 +莿 +菀 +菁 +菅 +菇 +菈 +菊 +菌 +菏 +菓 +菖 +菘 +菜 +菟 +菠 +菡 +菩 +華 +菱 +菲 +菸 +菽 +萁 +萃 +萄 +萊 +萋 +萌 +萍 +萎 +萘 +萝 +萤 +营 +萦 +萧 +萨 +萩 +萬 +萱 +萵 +萸 +萼 +落 +葆 +葉 +著 +葚 +葛 +葡 +董 +葦 +葩 +葫 +葬 +葭 +葯 +葱 +葳 +葵 +葷 +葺 +蒂 +蒋 +蒐 +蒔 +蒙 +蒜 +蒞 +蒟 +蒡 +蒨 +蒲 +蒸 +蒹 +蒻 +蒼 +蒿 +蓁 +蓄 +蓆 +蓉 +蓋 +蓑 +蓓 +蓖 +蓝 +蓟 +蓦 +蓬 +蓮 +蓼 +蓿 +蔑 +蔓 +蔔 +蔗 +蔘 +蔚 +蔡 +蔣 +蔥 +蔫 +蔬 +蔭 +蔵 +蔷 +蔺 +蔻 +蔼 +蔽 +蕁 +蕃 +蕈 +蕉 +蕊 +蕎 +蕙 +蕤 +蕨 +蕩 +蕪 +蕭 +蕲 +蕴 +蕻 +蕾 +薄 +薅 +薇 +薈 +薊 +薏 +薑 +薔 +薙 +薛 +薦 +薨 +薩 +薪 +薬 +薯 +薰 +薹 +藉 +藍 +藏 +藐 +藓 +藕 +藜 +藝 +藤 +藥 +藩 +藹 +藻 +藿 +蘆 +蘇 +蘊 +蘋 +蘑 +蘚 +蘭 +蘸 +蘼 +蘿 +虎 +虏 +虐 +虑 +虔 +處 +虚 +虛 +虜 +虞 +號 +虢 +虧 +虫 +虬 +虱 +虹 +虻 +虽 +虾 +蚀 +蚁 +蚂 +蚊 +蚌 +蚓 +蚕 +蚜 +蚝 +蚣 +蚤 +蚩 +蚪 +蚯 +蚱 +蚵 +蛀 +蛆 +蛇 +蛊 +蛋 +蛎 +蛐 +蛔 +蛙 +蛛 +蛟 +蛤 +蛭 +蛮 +蛰 +蛳 +蛹 +蛻 +蛾 +蜀 +蜂 +蜃 +蜆 +蜇 +蜈 +蜊 +蜍 +蜒 +蜓 +蜕 +蜗 +蜘 +蜚 +蜜 +蜡 +蜢 +蜥 +蜱 +蜴 +蜷 +蜻 +蜿 +蝇 +蝈 +蝉 +蝌 +蝎 +蝕 +蝗 +蝙 +蝟 +蝠 +蝦 +蝨 +蝴 +蝶 +蝸 +蝼 +螂 +螃 +融 +螞 +螢 +螨 +螯 +螳 +螺 +蟀 +蟄 +蟆 +蟋 +蟎 +蟑 +蟒 +蟠 +蟬 +蟲 +蟹 +蟻 +蟾 +蠅 +蠍 +蠔 +蠕 +蠛 +蠟 +蠡 +蠢 +蠣 +蠱 +蠶 +蠹 +蠻 +血 +衄 +衅 +衆 +行 +衍 +術 +衔 +街 +衙 +衛 +衝 +衞 +衡 +衢 +衣 +补 +表 +衩 +衫 +衬 +衮 +衰 +衲 +衷 +衹 +衾 +衿 +袁 +袂 +袄 +袅 +袈 +袋 +袍 +袒 +袖 +袜 +袞 +袤 +袪 +被 +袭 +袱 +裁 +裂 +装 +裆 +裊 +裏 +裔 +裕 +裘 +裙 +補 +裝 +裟 +裡 +裤 +裨 +裱 +裳 +裴 +裸 +裹 +製 +裾 +褂 +複 +褐 +褒 +褓 +褔 +褚 +褥 +褪 +褫 +褲 +褶 +褻 +襁 +襄 +襟 +襠 +襪 +襬 +襯 +襲 +西 +要 +覃 +覆 +覇 +見 +規 +覓 +視 +覚 +覦 +覧 +親 +覬 +観 +覷 +覺 +覽 +觀 +见 +观 +规 +觅 +视 +览 +觉 +觊 +觎 +觐 +觑 +角 +觞 +解 +觥 +触 +觸 +言 +訂 +計 +訊 +討 +訓 +訕 +訖 +託 +記 +訛 +訝 +訟 +訣 +訥 +訪 +設 +許 +訳 +訴 +訶 +診 +註 +証 +詆 +詐 +詔 +評 +詛 +詞 +詠 +詡 +詢 +詣 +試 +詩 +詫 +詬 +詭 +詮 +詰 +話 +該 +詳 +詹 +詼 +誅 +誇 +誉 +誌 +認 +誓 +誕 +誘 +語 +誠 +誡 +誣 +誤 +誥 +誦 +誨 +說 +説 +読 +誰 +課 +誹 +誼 +調 +諄 +談 +請 +諏 +諒 +論 +諗 +諜 +諡 +諦 +諧 +諫 +諭 +諮 +諱 +諳 +諷 +諸 +諺 +諾 +謀 +謁 +謂 +謄 +謊 +謎 +謐 +謔 +謗 +謙 +講 +謝 +謠 +謨 +謬 +謹 +謾 +譁 +證 +譎 +譏 +識 +譙 +譚 +譜 +警 +譬 +譯 +議 +譲 +譴 +護 +譽 +讀 +變 +讓 +讚 +讞 +计 +订 +认 +讥 +讧 +讨 +让 +讪 +讫 +训 +议 +讯 +记 +讲 +讳 +讴 +讶 +讷 +许 +讹 +论 +讼 +讽 +设 +访 +诀 +证 +诃 +评 +诅 +识 +诈 +诉 +诊 +诋 +词 +诏 +译 +试 +诗 +诘 +诙 +诚 +诛 +话 +诞 +诟 +诠 +诡 +询 +诣 +诤 +该 +详 +诧 +诩 +诫 +诬 +语 +误 +诰 +诱 +诲 +说 +诵 +诶 +请 +诸 +诺 +读 +诽 +课 +诿 +谀 +谁 +调 +谄 +谅 +谆 +谈 +谊 +谋 +谌 +谍 +谎 +谏 +谐 +谑 +谒 +谓 +谔 +谕 +谗 +谘 +谙 +谚 +谛 +谜 +谟 +谢 +谣 +谤 +谥 +谦 +谧 +谨 +谩 +谪 +谬 +谭 +谯 +谱 +谲 +谴 +谶 +谷 +豁 +豆 +豇 +豈 +豉 +豊 +豌 +豎 +豐 +豔 +豚 +象 +豢 +豪 +豫 +豬 +豹 +豺 +貂 +貅 +貌 +貓 +貔 +貘 +貝 +貞 +負 +財 +貢 +貧 +貨 +販 +貪 +貫 +責 +貯 +貰 +貳 +貴 +貶 +買 +貸 +費 +貼 +貽 +貿 +賀 +賁 +賂 +賃 +賄 +資 +賈 +賊 +賑 +賓 +賜 +賞 +賠 +賡 +賢 +賣 +賤 +賦 +質 +賬 +賭 +賴 +賺 +購 +賽 +贅 +贈 +贊 +贍 +贏 +贓 +贖 +贛 +贝 +贞 +负 +贡 +财 +责 +贤 +败 +账 +货 +质 +贩 +贪 +贫 +贬 +购 +贮 +贯 +贰 +贱 +贲 +贴 +贵 +贷 +贸 +费 +贺 +贻 +贼 +贾 +贿 +赁 +赂 +赃 +资 +赅 +赈 +赊 +赋 +赌 +赎 +赏 +赐 +赓 +赔 +赖 +赘 +赚 +赛 +赝 +赞 +赠 +赡 +赢 +赣 +赤 +赦 +赧 +赫 +赭 +走 +赳 +赴 +赵 +赶 +起 +趁 +超 +越 +趋 +趕 +趙 +趟 +趣 +趨 +足 +趴 +趵 +趸 +趺 +趾 +跃 +跄 +跆 +跋 +跌 +跎 +跑 +跖 +跚 +跛 +距 +跟 +跡 +跤 +跨 +跩 +跪 +路 +跳 +践 +跷 +跹 +跺 +跻 +踉 +踊 +踌 +踏 +踐 +踝 +踞 +踟 +踢 +踩 +踪 +踮 +踱 +踴 +踵 +踹 +蹂 +蹄 +蹇 +蹈 +蹉 +蹊 +蹋 +蹑 +蹒 +蹙 +蹟 +蹣 +蹤 +蹦 +蹩 +蹬 +蹭 +蹲 +蹴 +蹶 +蹺 +蹼 +蹿 +躁 +躇 +躉 +躊 +躋 +躍 +躏 +躪 +身 +躬 +躯 +躲 +躺 +軀 +車 +軋 +軌 +軍 +軒 +軟 +転 +軸 +軼 +軽 +軾 +較 +載 +輒 +輓 +輔 +輕 +輛 +輝 +輟 +輩 +輪 +輯 +輸 +輻 +輾 +輿 +轄 +轅 +轆 +轉 +轍 +轎 +轟 +车 +轧 +轨 +轩 +转 +轭 +轮 +软 +轰 +轲 +轴 +轶 +轻 +轼 +载 +轿 +较 +辄 +辅 +辆 +辇 +辈 +辉 +辊 +辍 +辐 +辑 +输 +辕 +辖 +辗 +辘 +辙 +辛 +辜 +辞 +辟 +辣 +辦 +辨 +辩 +辫 +辭 +辮 +辯 +辰 +辱 +農 +边 +辺 +辻 +込 +辽 +达 +迁 +迂 +迄 +迅 +过 +迈 +迎 +运 +近 +返 +还 +这 +进 +远 +违 +连 +迟 +迢 +迤 +迥 +迦 +迩 +迪 +迫 +迭 +述 +迴 +迷 +迸 +迹 +迺 +追 +退 +送 +适 +逃 +逅 +逆 +选 +逊 +逍 +透 +逐 +递 +途 +逕 +逗 +這 +通 +逛 +逝 +逞 +速 +造 +逢 +連 +逮 +週 +進 +逵 +逶 +逸 +逻 +逼 +逾 +遁 +遂 +遅 +遇 +遊 +運 +遍 +過 +遏 +遐 +遑 +遒 +道 +達 +違 +遗 +遙 +遛 +遜 +遞 +遠 +遢 +遣 +遥 +遨 +適 +遭 +遮 +遲 +遴 +遵 +遶 +遷 +選 +遺 +遼 +遽 +避 +邀 +邁 +邂 +邃 +還 +邇 +邈 +邊 +邋 +邏 +邑 +邓 +邕 +邛 +邝 +邢 +那 +邦 +邨 +邪 +邬 +邮 +邯 +邰 +邱 +邳 +邵 +邸 +邹 +邺 +邻 +郁 +郅 +郊 +郎 +郑 +郜 +郝 +郡 +郢 +郤 +郦 +郧 +部 +郫 +郭 +郴 +郵 +郷 +郸 +都 +鄂 +鄉 +鄒 +鄔 +鄙 +鄞 +鄢 +鄧 +鄭 +鄰 +鄱 +鄲 +鄺 +酉 +酊 +酋 +酌 +配 +酐 +酒 +酗 +酚 +酝 +酢 +酣 +酥 +酩 +酪 +酬 +酮 +酯 +酰 +酱 +酵 +酶 +酷 +酸 +酿 +醃 +醇 +醉 +醋 +醍 +醐 +醒 +醚 +醛 +醜 +醞 +醣 +醪 +醫 +醬 +醮 +醯 +醴 +醺 +釀 +釁 +采 +釉 +释 +釋 +里 +重 +野 +量 +釐 +金 +釗 +釘 +釜 +針 +釣 +釦 +釧 +釵 +鈀 +鈉 +鈍 +鈎 +鈔 +鈕 +鈞 +鈣 +鈦 +鈪 +鈴 +鈺 +鈾 +鉀 +鉄 +鉅 +鉉 +鉑 +鉗 +鉚 +鉛 +鉤 +鉴 +鉻 +銀 +銃 +銅 +銑 +銓 +銖 +銘 +銜 +銬 +銭 +銮 +銳 +銷 +銹 +鋁 +鋅 +鋒 +鋤 +鋪 +鋰 +鋸 +鋼 +錄 +錐 +錘 +錚 +錠 +錢 +錦 +錨 +錫 +錮 +錯 +録 +錳 +錶 +鍊 +鍋 +鍍 +鍛 +鍥 +鍰 +鍵 +鍺 +鍾 +鎂 +鎊 +鎌 +鎏 +鎔 +鎖 +鎗 +鎚 +鎧 +鎬 +鎮 +鎳 +鏈 +鏖 +鏗 +鏘 +鏞 +鏟 +鏡 +鏢 +鏤 +鏽 +鐘 +鐮 +鐲 +鐳 +鐵 +鐸 +鐺 +鑄 +鑊 +鑑 +鑒 +鑣 +鑫 +鑰 +鑲 +鑼 +鑽 +鑾 +鑿 +针 +钉 +钊 +钎 +钏 +钒 +钓 +钗 +钙 +钛 +钜 +钝 +钞 +钟 +钠 +钡 +钢 +钣 +钤 +钥 +钦 +钧 +钨 +钩 +钮 +钯 +钰 +钱 +钳 +钴 +钵 +钺 +钻 +钼 +钾 +钿 +铀 +铁 +铂 +铃 +铄 +铅 +铆 +铉 +铎 +铐 +铛 +铜 +铝 +铠 +铡 +铢 +铣 +铤 +铨 +铩 +铬 +铭 +铮 +铰 +铲 +铵 +银 +铸 +铺 +链 +铿 +销 +锁 +锂 +锄 +锅 +锆 +锈 +锉 +锋 +锌 +锏 +锐 +锑 +错 +锚 +锟 +锡 +锢 +锣 +锤 +锥 +锦 +锭 +键 +锯 +锰 +锲 +锵 +锹 +锺 +锻 +镀 +镁 +镂 +镇 +镉 +镌 +镍 +镐 +镑 +镕 +镖 +镗 +镛 +镜 +镣 +镭 +镯 +镰 +镳 +镶 +長 +长 +門 +閃 +閉 +開 +閎 +閏 +閑 +閒 +間 +閔 +閘 +閡 +関 +閣 +閥 +閨 +閩 +閱 +閲 +閹 +閻 +閾 +闆 +闇 +闊 +闌 +闍 +闔 +闕 +闖 +闘 +關 +闡 +闢 +门 +闪 +闫 +闭 +问 +闯 +闰 +闲 +间 +闵 +闷 +闸 +闹 +闺 +闻 +闽 +闾 +阀 +阁 +阂 +阅 +阆 +阇 +阈 +阉 +阎 +阐 +阑 +阔 +阕 +阖 +阙 +阚 +阜 +队 +阡 +阪 +阮 +阱 +防 +阳 +阴 +阵 +阶 +阻 +阿 +陀 +陂 +附 +际 +陆 +陇 +陈 +陋 +陌 +降 +限 +陕 +陛 +陝 +陞 +陟 +陡 +院 +陣 +除 +陨 +险 +陪 +陰 +陲 +陳 +陵 +陶 +陷 +陸 +険 +陽 +隅 +隆 +隈 +隊 +隋 +隍 +階 +随 +隐 +隔 +隕 +隘 +隙 +際 +障 +隠 +隣 +隧 +隨 +險 +隱 +隴 +隶 +隸 +隻 +隼 +隽 +难 +雀 +雁 +雄 +雅 +集 +雇 +雉 +雋 +雌 +雍 +雎 +雏 +雑 +雒 +雕 +雖 +雙 +雛 +雜 +雞 +離 +難 +雨 +雪 +雯 +雰 +雲 +雳 +零 +雷 +雹 +電 +雾 +需 +霁 +霄 +霆 +震 +霈 +霉 +霊 +霍 +霎 +霏 +霑 +霓 +霖 +霜 +霞 +霧 +霭 +霰 +露 +霸 +霹 +霽 +霾 +靂 +靄 +靈 +青 +靓 +靖 +静 +靚 +靛 +靜 +非 +靠 +靡 +面 +靥 +靦 +革 +靳 +靴 +靶 +靼 +鞅 +鞋 +鞍 +鞏 +鞑 +鞘 +鞠 +鞣 +鞦 +鞭 +韆 +韋 +韌 +韓 +韜 +韦 +韧 +韩 +韬 +韭 +音 +韵 +韶 +韻 +響 +頁 +頂 +頃 +項 +順 +須 +頌 +預 +頑 +頒 +頓 +頗 +領 +頜 +頡 +頤 +頫 +頭 +頰 +頷 +頸 +頹 +頻 +頼 +顆 +題 +額 +顎 +顏 +顔 +願 +顛 +類 +顧 +顫 +顯 +顱 +顴 +页 +顶 +顷 +项 +顺 +须 +顼 +顽 +顾 +顿 +颁 +颂 +预 +颅 +领 +颇 +颈 +颉 +颊 +颌 +颍 +颐 +频 +颓 +颔 +颖 +颗 +题 +颚 +颛 +颜 +额 +颞 +颠 +颡 +颢 +颤 +颦 +颧 +風 +颯 +颱 +颳 +颶 +颼 +飄 +飆 +风 +飒 +飓 +飕 +飘 +飙 +飚 +飛 +飞 +食 +飢 +飨 +飩 +飪 +飯 +飲 +飼 +飽 +飾 +餃 +餅 +餉 +養 +餌 +餐 +餒 +餓 +餘 +餚 +餛 +餞 +餡 +館 +餮 +餵 +餾 +饅 +饈 +饋 +饌 +饍 +饑 +饒 +饕 +饗 +饞 +饥 +饨 +饪 +饬 +饭 +饮 +饯 +饰 +饱 +饲 +饴 +饵 +饶 +饷 +饺 +饼 +饽 +饿 +馀 +馁 +馄 +馅 +馆 +馈 +馋 +馍 +馏 +馒 +馔 +首 +馗 +香 +馥 +馨 +馬 +馭 +馮 +馳 +馴 +駁 +駄 +駅 +駆 +駐 +駒 +駕 +駛 +駝 +駭 +駱 +駿 +騁 +騎 +騏 +験 +騙 +騨 +騰 +騷 +驀 +驅 +驊 +驍 +驒 +驕 +驗 +驚 +驛 +驟 +驢 +驥 +马 +驭 +驮 +驯 +驰 +驱 +驳 +驴 +驶 +驷 +驸 +驹 +驻 +驼 +驾 +驿 +骁 +骂 +骄 +骅 +骆 +骇 +骈 +骊 +骋 +验 +骏 +骐 +骑 +骗 +骚 +骛 +骜 +骞 +骠 +骡 +骤 +骥 +骧 +骨 +骯 +骰 +骶 +骷 +骸 +骼 +髂 +髅 +髋 +髏 +髒 +髓 +體 +髖 +高 +髦 +髪 +髮 +髯 +髻 +鬃 +鬆 +鬍 +鬓 +鬚 +鬟 +鬢 +鬣 +鬥 +鬧 +鬱 +鬼 +魁 +魂 +魄 +魅 +魇 +魍 +魏 +魔 +魘 +魚 +魯 +魷 +鮑 +鮨 +鮪 +鮭 +鮮 +鯉 +鯊 +鯖 +鯛 +鯨 +鯰 +鯽 +鰍 +鰓 +鰭 +鰲 +鰻 +鰾 +鱈 +鱉 +鱔 +鱗 +鱷 +鱸 +鱼 +鱿 +鲁 +鲈 +鲍 +鲑 +鲛 +鲜 +鲟 +鲢 +鲤 +鲨 +鲫 +鲱 +鲲 +鲶 +鲷 +鲸 +鳃 +鳄 +鳅 +鳌 +鳍 +鳕 +鳖 +鳗 +鳝 +鳞 +鳥 +鳩 +鳳 +鳴 +鳶 +鴉 +鴕 +鴛 +鴦 +鴨 +鴻 +鴿 +鵑 +鵜 +鵝 +鵡 +鵬 +鵰 +鵲 +鶘 +鶩 +鶯 +鶴 +鷗 +鷲 +鷹 +鷺 +鸚 +鸞 +鸟 +鸠 +鸡 +鸢 +鸣 +鸥 +鸦 +鸨 +鸪 +鸭 +鸯 +鸳 +鸵 +鸽 +鸾 +鸿 +鹂 +鹃 +鹄 +鹅 +鹈 +鹉 +鹊 +鹌 +鹏 +鹑 +鹕 +鹘 +鹜 +鹞 +鹤 +鹦 +鹧 +鹫 +鹭 +鹰 +鹳 +鹵 +鹹 +鹼 +鹽 +鹿 +麂 +麋 +麒 +麓 +麗 +麝 +麟 +麥 +麦 +麩 +麴 +麵 +麸 +麺 +麻 +麼 +麽 +麾 +黃 +黄 +黍 +黎 +黏 +黑 +黒 +黔 +默 +黛 +黜 +黝 +點 +黠 +黨 +黯 +黴 +鼋 +鼎 +鼐 +鼓 +鼠 +鼬 +鼹 +鼻 +鼾 +齁 +齊 +齋 +齐 +齒 +齡 +齢 +齣 +齦 +齿 +龄 +龅 +龈 +龊 +龋 +龌 +龍 +龐 +龔 +龕 +龙 +龚 +龛 +龜 +龟 +︰ +︱ +︶ +︿ +﹁ +﹂ +﹍ +﹏ +﹐ +﹑ +﹒ +﹔ +﹕ +﹖ +﹗ +﹙ +﹚ +﹝ +﹞ +﹡ +﹣ +! +" +# +$ +% +& +' +( +) +* ++ +, +- +. +/ +0 +1 +2 +3 +4 +5 +6 +7 +8 +9 +: +; +< += +> +? +@ +[ +\ +] +^ +_ +` +a +b +c +d +e +f +g +h +i +j +k +l +m +n +o +p +q +r +s +t +u +v +w +x +y +z +{ +| +} +~ +。 +「 +」 +、 +・ +ッ +ー +イ +ク +シ +ス +ト +ノ +フ +ラ +ル +ン +゙ +゚ + ̄ +¥ +👍 +🔥 +😂 +😎 +... +yam +10 +2017 +12 +11 +2016 +20 +30 +15 +06 +lofter +##s +2015 +by +16 +14 +18 +13 +24 +17 +2014 +21 +##0 +22 +19 +25 +23 +com +100 +00 +05 +2013 +##a +03 +09 +08 +28 +##2 +50 +01 +04 +##1 +27 +02 +2012 +##3 +26 +##e +07 +##8 +##5 +##6 +##4 +##9 +##7 +29 +2011 +40 +##t +2010 +##o +##d +##i +2009 +##n +app +www +the +##m +31 +##c +##l +##y +##r +##g +2008 +60 +http +200 +qq +##p +80 +##f +google +pixnet +90 +cookies +tripadvisor +500 +##er +##k +35 +##h +facebook +2007 +2000 +70 +##b +of +##x +##u +45 +300 +iphone +32 +1000 +2006 +48 +ip +36 +in +38 +3d +##w +##ing +55 +ctrip +##on +##v +33 +##の +to +34 +400 +id +2005 +it +37 +windows +llc +top +99 +42 +39 +000 +led +at +##an +41 +51 +52 +46 +49 +43 +53 +44 +##z +android +58 +and +59 +2004 +56 +vr +##か +5000 +2003 +47 +blogthis +twitter +54 +##le +150 +ok +2018 +57 +75 +cn +no +ios +##in +##mm +##00 +800 +on +te +3000 +65 +2001 +360 +95 +ig +lv +120 +##ng +##を +##us +##に +pc +てす +── +600 +##te +85 +2002 +88 +##ed +html +ncc +wifi +email +64 +blog +is +##10 +##て +mail +online +##al +dvd +##ic +studio +##は +##℃ +##ia +##と +line +vip +72 +##q +98 +##ce +##en +for +##is +##ra +##es +##j +usb +net +cp +1999 +asia +4g +##cm +diy +new +3c +##お +ta +66 +language +vs +apple +tw +86 +web +##ne +ipad +62 +you +##re +101 +68 +##tion +ps +de +bt +pony +atm +##2017 +1998 +67 +##ch +ceo +##or +go +##na +av +pro +cafe +96 +pinterest +97 +63 +pixstyleme3c +##ta +more +said +##2016 +1997 +mp3 +700 +##ll +nba +jun +##20 +92 +tv +1995 +pm +61 +76 +nbsp +250 +##ie +linux +##ma +cd +110 +hd +##17 +78 +##ion +77 +6000 +am +##th +##st +94 +##se +##et +69 +180 +gdp +my +105 +81 +abc +89 +flash +79 +one +93 +1990 +1996 +##ck +gps +##も +##ly +web885 +106 +2020 +91 +##ge +4000 +1500 +xd +boss +isbn +1994 +org +##ry +me +love +##11 +0fork +73 +##12 +3g +##ter +##ar +71 +82 +##la +hotel +130 +1970 +pk +83 +87 +140 +ie +##os +##30 +##el +74 +##50 +seo +cpu +##ml +p2p +84 +may +##る +sun +tue +internet +cc +posted +youtube +##at +##ン +##man +ii +##ル +##15 +abs +nt +pdf +yahoo +ago +1980 +##it +news +mac +104 +##てす +##me +##り +java +1992 +spa +##de +##nt +hk +all +plus +la +1993 +##mb +##16 +##ve +west +##da +160 +air +##い +##ps +から +##to +1989 +logo +htc +php +https +fi +momo +##son +sat +##ke +##80 +ebd +suv +wi +day +apk +##88 +##um +mv +galaxy +wiki +or +brake +##ス +1200 +する +this +1991 +mon +##こ +❤2017 +po +##ない +javascript +life +home +june +##ss +system +900 +##ー +##0 +pp +1988 +world +fb +4k +br +##as +ic +ai +leonardo +safari +##60 +live +free +xx +wed +win7 +kiehl +##co +lg +o2o +##go +us +235 +1949 +mm +しい +vfm +kanye +##90 +##2015 +##id +jr +##ey +123 +rss +##sa +##ro +##am +##no +thu +fri +350 +##sh +##ki +103 +comments +name +##のて +##pe +##ine +max +1987 +8000 +uber +##mi +##ton +wordpress +office +1986 +1985 +##ment +107 +bd +win10 +##ld +##li +gmail +bb +dior +##rs +##ri +##rd +##ます +up +cad +##® +dr +して +read +##21 +をお +##io +##99 +url +1984 +pvc +paypal +show +policy +##40 +##ty +##18 +with +##★ +##01 +txt +102 +##ba +dna +from +post +mini +ar +taiwan +john +##ga +privacy +agoda +##13 +##ny +word +##24 +##22 +##by +##ur +##hz +1982 +##ang +265 +cookie +netscape +108 +##ka +##~ +##ad +house +share +note +ibm +code +hello +nike +sim +survey +##016 +1979 +1950 +wikia +##32 +##017 +5g +cbc +##tor +##kg +1983 +##rt +##14 +campaign +store +2500 +os +##ct +##ts +##° +170 +api +##ns +365 +excel +##な +##ao +##ら +##し +~~ +##nd +university +163 +には +518 +##70 +##ya +##il +##25 +pierre +ipo +0020 +897 +##23 +hotels +##ian +のお +125 +years +6606 +##ers +##26 +high +##day +time +##ay +bug +##line +##く +##す +##be +xp +talk2yam +yamservice +10000 +coco +##dy +sony +##ies +1978 +microsoft +david +people +##ha +1960 +instagram +intel +その +##ot +iso +1981 +##va +115 +##mo +##land +xxx +man +co +ltxsw +##ation +baby +220 +##pa +##ol +1945 +7000 +tag +450 +##ue +msn +##31 +oppo +##ト +##ca +control +##om +st +chrome +##ure +##ん +be +##き +lol +##19 +した +##bo +240 +lady +##100 +##way +##から +4600 +##ko +##do +##un +4s +corporation +168 +##ni +herme +##28 +cp +978 +##up +##06 +ui +##ds +ppt +admin +three +します +bbc +re +128 +##48 +ca +##015 +##35 +hp +##ee +tpp +##た +##ive +×× +root +##cc +##ました +##ble +##ity +adobe +park +114 +et +oled +city +##ex +##ler +##ap +china +##book +20000 +view +##ice +global +##km +your +hong +##mg +out +##ms +ng +ebay +##29 +menu +ubuntu +##cy +rom +##view +open +ktv +do +server +##lo +if +english +##ね +##5 +##oo +1600 +##02 +step1 +kong +club +135 +july +inc +1976 +mr +hi +##net +touch +##ls +##ii +michael +lcd +##05 +##33 +phone +james +step2 +1300 +ios9 +##box +dc +##2 +##ley +samsung +111 +280 +pokemon +css +##ent +##les +いいえ +##1 +s8 +atom +play +bmw +##said +sa +etf +ctrl +♥yoyo♥ +##55 +2025 +##2014 +##66 +adidas +amazon +1958 +##ber +##ner +visa +##77 +##der +1800 +connectivity +##hi +firefox +109 +118 +hr +so +style +mark +pop +ol +skip +1975 +as +##27 +##ir +##61 +190 +mba +##う +##ai +le +##ver +1900 +cafe2017 +lte +super +113 +129 +##ron +amd +like +##☆ +are +##ster +we +##sk +paul +data +international +##ft +longchamp +ssd +good +##ート +##ti +reply +##my +↓↓↓ +apr +star +##ker +source +136 +js +112 +get +force +photo +##one +126 +##2013 +##ow +link +bbs +1972 +goods +##lin +python +119 +##ip +game +##ics +##ません +blue +##● +520 +##45 +page +itunes +##03 +1955 +260 +1968 +gt +gif +618 +##ff +##47 +group +くたさい +about +bar +ganji +##nce +music +lee +not +1977 +1971 +1973 +##per +an +faq +comment +##って +days +##ock +116 +##bs +1974 +1969 +v1 +player +1956 +xbox +sql +fm +f1 +139 +##ah +210 +##lv +##mp +##000 +melody +1957 +##3 +550 +17life +199 +1966 +xml +market +##au +##71 +999 +##04 +what +gl +##95 +##age +tips +##68 +book +##ting +mysql +can +1959 +230 +##ung +wonderland +watch +10℃ +##ction +9000 +mar +mobile +1946 +1962 +article +##db +part +▲top +party +って +1967 +1964 +1948 +##07 +##ore +##op +この +dj +##78 +##38 +010 +main +225 +1965 +##ong +art +320 +ad +134 +020 +##73 +117 +pm2 +japan +228 +##08 +ts +1963 +##ica +der +sm +##36 +2019 +##wa +ct +##7 +##や +##64 +1937 +homemesh +search +##85 +##れは +##tv +##di +macbook +##9 +##くたさい +service +##♥ +type +った +750 +##ier +##si +##75 +##います +##ok +best +##ット +goris +lock +##った +cf +3m +big +##ut +ftp +carol +##vi +10 +1961 +happy +sd +##ac +122 +anti +pe +cnn +iii +1920 +138 +##ラ +1940 +esp +jan +tags +##98 +##51 +august +vol +##86 +154 +##™ +##fs +##れ +##sion +design +ac +##ム +press +jordan +ppp +that +key +check +##6 +##tt +##㎡ +1080p +##lt +power +##42 +1952 +##bc +vivi +##ック +he +133 +121 +jpg +##rry +201 +175 +3500 +1947 +nb +##ted +##rn +しています +1954 +usd +##t00 +master +##ンク +001 +model +##58 +al +##09 +1953 +##34 +ram +goo +ても +##ui +127 +1930 +red +##ary +rpg +item +##pm +##41 +270 +##za +project +##2012 +hot +td +blogabstract +##ger +##62 +650 +##44 +gr2 +##します +##m +black +electronic +nfc +year +asus +また +html5 +cindy +##hd +m3 +132 +esc +##od +booking +##53 +fed +tvb +##81 +##ina +mit +165 +##いる +chan +192 +distribution +next +になる +peter +bios +steam +cm +1941 +にも +pk10 +##ix +##65 +##91 +dec +nasa +##ana +icecat +00z +b1 +will +##46 +li +se +##ji +##み +##ard +oct +##ain +jp +##ze +##bi +cio +##56 +smart +h5 +##39 +##port +curve +vpn +##nm +##dia +utc +##あり +12345678910 +##52 +rmvb +chanel +a4 +miss +##and +##im +media +who +##63 +she +girl +5s +124 +vera +##して +class +vivo +king +##フ +##ei +national +ab +1951 +5cm +888 +145 +ipod +ap +1100 +5mm +211 +ms +2756 +##69 +mp4 +msci +##po +##89 +131 +mg +index +380 +##bit +##out +##zz +##97 +##67 +158 +apec +##8 +photoshop +opec +¥799 +ては +##96 +##tes +##ast +2g +○○ +##ール +¥2899 +##ling +##よ +##ory +1938 +##ical +kitty +content +##43 +step3 +##cn +win8 +155 +vc +1400 +iphone7 +robert +##した +tcl +137 +beauty +##87 +en +dollars +##ys +##oc +step +pay +yy +a1 +##2011 +##lly +##ks +##♪ +1939 +188 +download +1944 +sep +exe +ph +います +school +gb +center +pr +street +##board +uv +##37 +##lan +winrar +##que +##ua +##com +1942 +1936 +480 +gpu +##4 +ettoday +fu +tom +##54 +##ren +##via +149 +##72 +b2b +144 +##79 +##tch +rose +arm +mb +##49 +##ial +##nn +nvidia +step4 +mvp +00㎡ +york +156 +##イ +how +cpi +591 +2765 +gov +kg +joe +##xx +mandy +pa +##ser +copyright +fashion +1935 +don +##け +ecu +##ist +##art +erp +wap +have +##lm +talk +##ek +##ning +##if +ch +##ite +video +1943 +cs +san +iot +look +##84 +##2010 +##ku +october +##ux +trump +##hs +##ide +box +141 +first +##ins +april +##ight +##83 +185 +angel +protected +aa +151 +162 +x1 +m2 +##fe +##× +##ho +size +143 +min +ofo +fun +gomaji +ex +hdmi +food +dns +march +chris +kevin +##のか +##lla +##pp +##ec +ag +ems +6s +720p +##rm +##ham +off +##92 +asp +team +fandom +ed +299 +▌♥ +##ell +info +されています +##82 +sina +4066 +161 +##able +##ctor +330 +399 +315 +dll +rights +ltd +idc +jul +3kg +1927 +142 +ma +surface +##76 +##ク +~~~ +304 +mall +eps +146 +green +##59 +map +space +donald +v2 +sodu +##light +1931 +148 +1700 +まて +310 +reserved +htm +##han +##57 +2d +178 +mod +##ise +##tions +152 +ti +##shi +doc +1933 +icp +055 +wang +##ram +shopping +aug +##pi +##well +now +wam +b2 +からお +##hu +236 +1928 +##gb +266 +f2 +##93 +153 +mix +##ef +##uan +bwl +##plus +##res +core +##ess +tea +5℃ +hktvmall +nhk +##ate +list +##ese +301 +feb +4m +inn +ての +nov +159 +12345 +daniel +##ci +pass +##bet +##nk +coffee +202 +ssl +airbnb +##ute +fbi +woshipm +skype +ea +cg +sp +##fc +##www +yes +edge +alt +007 +##94 +fpga +##ght +##gs +iso9001 +さい +##ile +##wood +##uo +image +lin +icon +american +##em +1932 +set +says +##king +##tive +blogger +##74 +なと +256 +147 +##ox +##zy +##red +##ium +##lf +nokia +claire +##リ +##ding +november +lohas +##500 +##tic +##マ +##cs +##ある +##che +##ire +##gy +##ult +db +january +win +##カ +166 +road +ptt +##ま +##つ +198 +##fa +##mer +anna +pchome +はい +udn +ef +420 +##time +##tte +2030 +##ア +g20 +white +かかります +1929 +308 +garden +eleven +di +##おります +chen +309b +777 +172 +young +cosplay +ちてない +4500 +bat +##123 +##tra +##ては +kindle +npc +steve +etc +##ern +##| +call +xperia +ces +travel +sk +s7 +##ous +1934 +##int +みいたたけます +183 +edu +file +cho +qr +##car +##our +186 +##ant +##d +eric +1914 +rends +##jo +##する +mastercard +##2000 +kb +##min +290 +##ino +vista +##ris +##ud +jack +2400 +##set +169 +pos +1912 +##her +##ou +taipei +しく +205 +beta +##ませんか +232 +##fi +express +255 +body +##ill +aphojoy +user +december +meiki +##ick +tweet +richard +##av +##ᆫ +iphone6 +##dd +ちてすか +views +##mark +321 +pd +##00 +times +##▲ +level +##ash +10g +point +5l +##ome +208 +koreanmall +##ak +george +q2 +206 +wma +tcp +##200 +スタッフ +full +mlb +##lle +##watch +tm +run +179 +911 +smith +business +##und +1919 +color +##tal +222 +171 +##less +moon +4399 +##rl +update +pcb +shop +499 +157 +little +なし +end +##mhz +van +dsp +easy +660 +##house +##key +history +##o +oh +##001 +##hy +##web +oem +let +was +##2009 +##gg +review +##wan +182 +##°c +203 +uc +title +##val +united +233 +2021 +##ons +doi +trivago +overdope +sbs +##ance +##ち +grand +special +573032185 +imf +216 +wx17house +##so +##ーム +audi +##he +london +william +##rp +##ake +science +beach +cfa +amp +ps4 +880 +##800 +##link +##hp +crm +ferragamo +bell +make +##eng +195 +under +zh +photos +2300 +##style +##ント +via +176 +da +##gi +company +i7 +##ray +thomas +370 +ufo +i5 +##max +plc +ben +back +research +8g +173 +mike +##pc +##ッフ +september +189 +##ace +vps +february +167 +pantos +wp +lisa +1921 +★★ +jquery +night +long +offer +##berg +##news +1911 +##いて +ray +fks +wto +せます +over +164 +340 +##all +##rus +1924 +##888 +##works +blogtitle +loftpermalink +##→ +187 +martin +test +ling +km +##め +15000 +fda +v3 +##ja +##ロ +wedding +かある +outlet +family +##ea +をこ +##top +story +##ness +salvatore +##lu +204 +swift +215 +room +している +oracle +##ul +1925 +sam +b2c +week +pi +rock +##のは +##a +##けと +##ean +##300 +##gle +cctv +after +chinese +##back +powered +x2 +##tan +1918 +##nes +##イン +canon +only +181 +##zi +##las +say +##oe +184 +##sd +221 +##bot +##world +##zo +sky +made +top100 +just +1926 +pmi +802 +234 +gap +##vr +177 +les +174 +▲topoct +ball +vogue +vi +ing +ofweek +cos +##list +##ort +▲topmay +##なら +##lon +として +last +##tc +##of +##bus +##gen +real +eva +##コ +a3 +nas +##lie +##ria +##coin +##bt +▲topapr +his +212 +cat +nata +vive +health +⋯⋯ +drive +sir +▲topmar +du +cup +##カー +##ook +##よう +##sy +alex +msg +tour +しました +3ce +##word +193 +ebooks +r8 +block +318 +##より +2200 +nice +pvp +207 +months +1905 +rewards +##ther +1917 +0800 +##xi +##チ +##sc +micro +850 +gg +blogfp +op +1922 +daily +m1 +264 +true +##bb +ml +##tar +##のお +##ky +anthony +196 +253 +##yo +state +218 +##ara +##aa +##rc +##tz +##ston +より +gear +##eo +##ade +ge +see +1923 +##win +##ura +ss +heart +##den +##ita +down +##sm +el +png +2100 +610 +rakuten +whatsapp +bay +dream +add +##use +680 +311 +pad +gucci +mpv +##ode +##fo +island +▲topjun +##▼ +223 +jason +214 +chicago +##❤ +しの +##hone +io +##れる +##ことか +sogo +be2 +##ology +990 +cloud +vcd +##con +2~3 +##ford +##joy +##kb +##こさいます +##rade +but +##ach +docker +##ful +rfid +ul +##ase +hit +ford +##star +580 +##○ +11 +a2 +sdk +reading +edited +##are +cmos +##mc +238 +siri +light +##ella +##ため +bloomberg +##read +pizza +##ison +jimmy +##vm +college +node +journal +ba +18k +##play +245 +##cer +20 +magic +##yu +191 +jump +288 +tt +##ings +asr +##lia +3200 +step5 +network +##cd +mc +いします +1234 +pixstyleme +273 +##600 +2800 +money +★★★★★ +1280 +12 +430 +bl +みの +act +##tus +tokyo +##rial +##life +emba +##ae +saas +tcs +##rk +##wang +summer +##sp +ko +##ving +390 +premium +##その +netflix +##ヒ +uk +mt +##lton +right +frank +two +209 +える +##ple +##cal +021 +##んな +##sen +##ville +hold +nexus +dd +##ius +てお +##mah +##なく +tila +zero +820 +ce +##tin +resort +##ws +charles +old +p10 +5d +report +##360 +##ru +##には +bus +vans +lt +##est +pv +##レ +links +rebecca +##ツ +##dm +azure +##365 +きな +limited +bit +4gb +##mon +1910 +moto +##eam +213 +1913 +var +eos +なとの +226 +blogspot +された +699 +e3 +dos +dm +fc +##ments +##ik +##kw +boy +##bin +##ata +960 +er +##せ +219 +##vin +##tu +##ula +194 +##∥ +station +##ろ +##ature +835 +files +zara +hdr +top10 +nature +950 +magazine +s6 +marriott +##シ +avira +case +##っと +tab +##ran +tony +##home +oculus +im +##ral +jean +saint +cry +307 +rosie +##force +##ini +ice +##bert +のある +##nder +##mber +pet +2600 +##◆ +plurk +▲topdec +##sis +00kg +▲topnov +720 +##ence +tim +##ω +##nc +##ても +##name +log +ips +great +ikea +malaysia +unix +##イト +3600 +##ncy +##nie +12000 +akb48 +##ye +##oid +404 +##chi +##いた +oa +xuehai +##1000 +##orm +##rf +275 +さん +##ware +##リー +980 +ho +##pro +text +##era +560 +bob +227 +##ub +##2008 +8891 +scp +avi +##zen +2022 +mi +wu +museum +qvod +apache +lake +jcb +▲topaug +★★★ +ni +##hr +hill +302 +ne +weibo +490 +ruby +##ーシ +##ヶ +##row +4d +▲topjul +iv +##ish +github +306 +mate +312 +##スト +##lot +##ane +andrew +のハイト +##tina +t1 +rf +ed2k +##vel +##900 +way +final +りの +ns +5a +705 +197 +##メ +sweet +bytes +##ene +▲topjan +231 +##cker +##2007 +##px +100g +topapp +229 +helpapp +rs +low +14k +g4g +care +630 +ldquo +あり +##fork +leave +rm +edition +##gan +##zon +##qq +▲topsep +##google +##ism +gold +224 +explorer +##zer +toyota +category +select +visual +##labels +restaurant +##md +posts +s1 +##ico +もっと +angelababy +123456 +217 +sports +s3 +mbc +1915 +してくたさい +shell +x86 +candy +##new +kbs +face +xl +470 +##here +4a +swissinfo +v8 +▲topfeb +dram +##ual +##vice +3a +##wer +sport +q1 +ios10 +public +int +card +##c +ep +au +rt +##れた +1080 +bill +##mll +kim +30 +460 +wan +##uk +##ミ +x3 +298 +0t +scott +##ming +239 +e5 +##3d +h7n9 +worldcat +brown +##あります +##vo +##led +##580 +##ax +249 +410 +##ert +paris +##~6 +polo +925 +##lr +599 +##ナ +capital +##hing +bank +cv +1g +##chat +##s +##たい +adc +##ule +2m +##e +digital +hotmail +268 +##pad +870 +bbq +quot +##ring +before +wali +##まて +mcu +2k +2b +という +costco +316 +north +333 +switch +##city +##p +philips +##mann +management +panasonic +##cl +##vd +##ping +##rge +alice +##lk +##ましょう +css3 +##ney +vision +alpha +##ular +##400 +##tter +lz +にお +##ありません +mode +gre +1916 +pci +##tm +237 +1~2 +##yan +##そ +について +##let +##キ +work +war +coach +ah +mary +##ᅵ +huang +##pt +a8 +pt +follow +##berry +1895 +##ew +a5 +ghost +##ション +##wn +##og +south +##code +girls +##rid +action +villa +git +r11 +table +games +##cket +error +##anonymoussaid +##ag +here +##ame +##gc +qa +##■ +##lis +gmp +##gin +vmalife +##cher +yu +wedding +##tis +demo +dragon +530 +soho +social +bye +##rant +river +orz +acer +325 +##↑ +##ース +##ats +261 +del +##ven +440 +ups +##ように +##ター +305 +value +macd +yougou +##dn +661 +##ano +ll +##urt +##rent +continue +script +##wen +##ect +paper +263 +319 +shift +##chel +##フト +##cat +258 +x5 +fox +243 +##さん +car +aaa +##blog +loading +##yn +##tp +kuso +799 +si +sns +イカせるテンマ +ヒンクテンマ3 +rmb +vdc +forest +central +prime +help +ultra +##rmb +##ような +241 +square +688 +##しい +のないフロクに +##field +##reen +##ors +##ju +c1 +start +510 +##air +##map +cdn +##wo +cba +stephen +m8 +100km +##get +opera +##base +##ood +vsa +com™ +##aw +##ail +251 +なのて +count +t2 +##ᅡ +##een +2700 +hop +##gp +vsc +tree +##eg +##ose +816 +285 +##ories +##shop +alphago +v4 +1909 +simon +##ᆼ +fluke62max +zip +スホンサー +##sta +louis +cr +bas +##~10 +bc +##yer +hadoop +##ube +##wi +1906 +0755 +hola +##low +place +centre +5v +d3 +##fer +252 +##750 +##media +281 +540 +0l +exchange +262 +series +##ハー +##san +eb +##bank +##k +q3 +##nge +##mail +take +##lp +259 +1888 +client +east +cache +event +vincent +##ールを +きを +##nse +sui +855 +adchoice +##и +##stry +##なたの +246 +##zone +ga +apps +sea +##ab +248 +cisco +##タ +##rner +kymco +##care +dha +##pu +##yi +minkoff +royal +p1 +への +annie +269 +collection +kpi +playstation +257 +になります +866 +bh +##bar +queen +505 +radio +1904 +andy +armani +##xy +manager +iherb +##ery +##share +spring +raid +johnson +1908 +##ob +volvo +hall +##ball +v6 +our +taylor +##hk +bi +242 +##cp +kate +bo +water +technology +##rie +サイトは +277 +##ona +##sl +hpv +303 +gtx +hip +rdquo +jayz +stone +##lex +##rum +namespace +##やり +620 +##ale +##atic +des +##erson +##ql +##ves +##type +enter +##この +##てきます +d2 +##168 +##mix +##bian +との +a9 +jj +ky +##lc +access +movie +##hc +リストに +tower +##ration +##mit +ます +##nch +ua +tel +prefix +##o2 +1907 +##point +1901 +ott +~10 +##http +##ury +baidu +##ink +member +##logy +bigbang +nownews +##js +##shot +##tb +##こと +247 +eba +##tics +##lus +ける +v5 +spark +##ama +there +##ions +god +##lls +##down +hiv +##ress +burberry +day2 +##kv +◆◆ +jeff +related +film +edit +joseph +283 +##ark +cx +32gb +order +g9 +30000 +##ans +##tty +s5 +##bee +かあります +thread +xr +buy +sh +005 +land +spotify +mx +##ari +276 +##verse +×email +sf +why +##ことて +244 +7headlines +nego +sunny +dom +exo +401 +666 +positioning +fit +rgb +##tton +278 +kiss +alexa +adam +lp +みリストを +##g +mp +##ties +##llow +amy +##du +np +002 +institute +271 +##rth +##lar +2345 +590 +##des +sidebar +15 +imax +site +##cky +##kit +##ime +##009 +season +323 +##fun +##ンター +##ひ +gogoro +a7 +pu +lily +fire +twd600 +##ッセーシを +いて +##vis +30ml +##cture +##をお +information +##オ +close +friday +##くれる +yi +nick +てすか +##tta +##tel +6500 +##lock +cbd +economy +254 +かお +267 +tinker +double +375 +8gb +voice +##app +oops +channel +today +985 +##right +raw +xyz +##+ +jim +edm +##cent +7500 +supreme +814 +ds +##its +##asia +dropbox +##てすか +##tti +books +272 +100ml +##tle +##ller +##ken +##more +##boy +sex +309 +##dom +t3 +##ider +##なります +##unch +1903 +810 +feel +5500 +##かった +##put +により +s2 +mo +##gh +men +ka +amoled +div +##tr +##n1 +port +howard +##tags +ken +dnf +##nus +adsense +##а +ide +##へ +buff +thunder +##town +##ique +has +##body +auto +pin +##erry +tee +てした +295 +number +##the +##013 +object +psp +cool +udnbkk +16gb +##mic +miui +##tro +most +r2 +##alk +##nity +1880 +±0 +##いました +428 +s4 +law +version +##oa +n1 +sgs +docomo +##tf +##ack +henry +fc2 +##ded +##sco +##014 +##rite +286 +0mm +linkedin +##ada +##now +wii +##ndy +ucbug +##◎ +sputniknews +legalminer +##ika +##xp +2gb +##bu +q10 +oo +b6 +come +##rman +cheese +ming +maker +##gm +nikon +##fig +ppi +kelly +##ります +jchere +てきます +ted +md +003 +fgo +tech +##tto +dan +soc +##gl +##len +hair +earth +640 +521 +img +##pper +##a1 +##てきる +##ロク +acca +##ition +##ference +suite +##ig +outlook +##mond +##cation +398 +##pr +279 +101vip +358 +##999 +282 +64gb +3800 +345 +airport +##over +284 +##おり +jones +##ith +lab +##su +##いるのて +co2 +town +piece +##llo +no1 +vmware +24h +##qi +focus +reader +##admin +##ora +tb +false +##log +1898 +know +lan +838 +##ces +f4 +##ume +motel +stop +##oper +na +flickr +netcomponents +##af +##─ +pose +williams +local +##ound +##cg +##site +##iko +いお +274 +5m +gsm +con +##ath +1902 +friends +##hip +cell +317 +##rey +780 +cream +##cks +012 +##dp +facebooktwitterpinterestgoogle +sso +324 +shtml +song +swiss +##mw +##キンク +lumia +xdd +string +tiffany +522 +marc +られた +insee +russell +sc +dell +##ations +ok +camera +289 +##vs +##flow +##late +classic +287 +##nter +stay +g1 +mtv +512 +##ever +##lab +##nger +qe +sata +ryan +d1 +50ml +cms +##cing +su +292 +3300 +editor +296 +##nap +security +sunday +association +##ens +##700 +##bra +acg +##かり +sofascore +とは +mkv +##ign +jonathan +gary +build +labels +##oto +tesla +moba +qi +gohappy +general +ajax +1024 +##かる +サイト +society +##test +##urs +wps +fedora +##ich +mozilla +328 +##480 +##dr +usa +urn +##lina +##r +grace +##die +##try +##ader +1250 +##なり +elle +570 +##chen +##ᆯ +price +##ten +uhz +##ough +eq +##hen +states +push +session +balance +wow +506 +##cus +##py +when +##ward +##ep +34e +wong +library +prada +##サイト +##cle +running +##ree +313 +ck +date +q4 +##ctive +##ool +##> +mk +##ira +##163 +388 +die +secret +rq +dota +buffet +は1ヶ +e6 +##ez +pan +368 +ha +##card +##cha +2a +##さ +alan +day3 +eye +f3 +##end +france +keep +adi +rna +tvbs +##ala +solo +nova +##え +##tail +##ょう +support +##ries +##なる +##ved +base +copy +iis +fps +##ways +hero +hgih +profile +fish +mu +ssh +entertainment +chang +##wd +click +cake +##ond +pre +##tom +kic +pixel +##ov +##fl +product +6a +##pd +dear +##gate +es +yumi +audio +##² +##sky +echo +bin +where +##ture +329 +##ape +find +sap +isis +##なと +nand +##101 +##load +##ream +band +a6 +525 +never +##post +festival +50cm +##we +555 +guide +314 +zenfone +##ike +335 +gd +forum +jessica +strong +alexander +##ould +software +allen +##ious +program +360° +else +lohasthree +##gar +することかてきます +please +##れます +rc +##ggle +##ric +bim +50000 +##own +eclipse +355 +brian +3ds +##side +061 +361 +##other +##ける +##tech +##ator +485 +engine +##ged +##t +plaza +##fit +cia +ngo +westbrook +shi +tbs +50mm +##みませんか +sci +291 +reuters +##ily +contextlink +##hn +af +##cil +bridge +very +##cel +1890 +cambridge +##ize +15g +##aid +##data +790 +frm +##head +award +butler +##sun +meta +##mar +america +ps3 +puma +pmid +##すか +lc +670 +kitchen +##lic +オーフン5 +きなしソフトサーヒス +そして +day1 +future +★★★★ +##text +##page +##rris +pm1 +##ket +fans +##っています +1001 +christian +bot +kids +trackback +##hai +c3 +display +##hl +n2 +1896 +idea +さんも +##sent +airmail +##ug +##men +pwm +けます +028 +##lution +369 +852 +awards +schemas +354 +asics +wikipedia +font +##tional +##vy +c2 +293 +##れている +##dget +##ein +っている +contact +pepper +スキル +339 +##~5 +294 +##uel +##ument +730 +##hang +みてす +q5 +##sue +rain +##ndi +wei +swatch +##cept +わせ +331 +popular +##ste +##tag +p2 +501 +trc +1899 +##west +##live +justin +honda +ping +messenger +##rap +v9 +543 +##とは +unity +appqq +はすへて +025 +leo +##tone +##テ +##ass +uniqlo +##010 +502 +her +jane +memory +moneydj +##tical +human +12306 +していると +##m2 +coc +miacare +##mn +tmt +##core +vim +kk +##may +fan +target +use +too +338 +435 +2050 +867 +737 +fast +##2c +services +##ope +omega +energy +##わ +pinkoi +1a +##なから +##rain +jackson +##ement +##シャンルの +374 +366 +そんな +p9 +rd +##ᆨ +1111 +##tier +##vic +zone +##│ +385 +690 +dl +isofix +cpa +m4 +322 +kimi +めて +davis +##lay +lulu +##uck +050 +weeks +qs +##hop +920 +##n +ae +##ear +~5 +eia +405 +##fly +korea +jpeg +boost +##ship +small +##リア +1860 +eur +297 +425 +valley +##iel +simple +##ude +rn +k2 +##ena +されます +non +patrick +しているから +##ナー +feed +5757 +30g +process +well +qqmei +##thing +they +aws +lu +pink +##ters +##kin +または +board +##vertisement +wine +##ien +unicode +##dge +r1 +359 +##tant +いを +##twitter +##3c +cool1 +される +##れて +##l +isp +##012 +standard +45㎡2 +402 +##150 +matt +##fu +326 +##iner +googlemsn +pixnetfacebookyahoo +##ラン +x7 +886 +##uce +メーカー +sao +##ev +##きました +##file +9678 +403 +xddd +shirt +6l +##rio +##hat +3mm +givenchy +ya +bang +##lio +monday +crystal +ロクイン +##abc +336 +head +890 +ubuntuforumwikilinuxpastechat +##vc +##~20 +##rity +cnc +7866 +ipv6 +null +1897 +##ost +yang +imsean +tiger +##fet +##ンス +352 +##= +dji +327 +ji +maria +##come +##んて +foundation +3100 +##beth +##なった +1m +601 +active +##aft +##don +3p +sr +349 +emma +##khz +living +415 +353 +1889 +341 +709 +457 +sas +x6 +##face +pptv +x4 +##mate +han +sophie +##jing +337 +fifa +##mand +other +sale +inwedding +##gn +てきちゃいます +##mmy +##pmlast +bad +nana +nbc +してみてくたさいね +なとはお +##wu +##かあります +##あ +note7 +single +##340 +せからこ +してくたさい♪この +しにはとんとんワークケートを +するとあなたにもっとマッチした +ならワークケートへ +もみつかっちゃうかも +ワークケートの +##bel +window +##dio +##ht +union +age +382 +14 +##ivity +##y +コメント +domain +neo +##isa +##lter +5k +f5 +steven +##cts +powerpoint +tft +self +g2 +ft +##テル +zol +##act +mwc +381 +343 +もう +nbapop +408 +てある +eds +ace +##room +previous +author +tomtom +il +##ets +hu +financial +☆☆☆ +っています +bp +5t +chi +1gb +##hg +fairmont +cross +008 +gay +h2 +function +##けて +356 +also +1b +625 +##ータ +##raph +1894 +3~5 +##ils +i3 +334 +avenue +##host +による +##bon +##tsu +message +navigation +50g +fintech +h6 +##ことを +8cm +##ject +##vas +##firm +credit +##wf +xxxx +form +##nor +##space +huawei +plan +json +sbl +##dc +machine +921 +392 +wish +##120 +##sol +windows7 +edward +##ために +development +washington +##nsis +lo +818 +##sio +##ym +##bor +planet +##~8 +##wt +ieee +gpa +##めて +camp +ann +gm +##tw +##oka +connect +##rss +##work +##atus +wall +chicken +soul +2mm +##times +fa +##ather +##cord +009 +##eep +hitachi +gui +harry +##pan +e1 +disney +##press +##ーション +wind +386 +frigidaire +##tl +liu +hsu +332 +basic +von +ev +いた +てきる +スホンサーサイト +learning +##ull +expedia +archives +change +##wei +santa +cut +ins +6gb +turbo +brand +cf1 +508 +004 +return +747 +##rip +h1 +##nis +##をこ +128gb +##にお +3t +application +しており +emc +rx +##oon +384 +quick +412 +15058 +wilson +wing +chapter +##bug +beyond +##cms +##dar +##oh +zoom +e2 +trip +sb +##nba +rcep +342 +aspx +ci +080 +gc +gnu +める +##count +advanced +dance +dv +##url +##ging +367 +8591 +am09 +shadow +battle +346 +##i +##cia +##という +emily +##のてす +##tation +host +ff +techorz +sars +##mini +##mporary +##ering +nc +4200 +798 +##next +cma +##mbps +##gas +##ift +##dot +##ィ +455 +##~17 +amana +##りの +426 +##ros +ir +00㎡1 +##eet +##ible +##↓ +710 +ˋ▽ˊ +##aka +dcs +iq +##v +l1 +##lor +maggie +##011 +##iu +588 +##~1 +830 +##gt +1tb +articles +create +##burg +##iki +database +fantasy +##rex +##cam +dlc +dean +##you +hard +path +gaming +victoria +maps +cb +##lee +##itor +overchicstoretvhome +systems +##xt +416 +p3 +sarah +760 +##nan +407 +486 +x9 +install +second +626 +##ann +##ph +##rcle +##nic +860 +##nar +ec +##とう +768 +metro +chocolate +##rian +~4 +##table +##しています +skin +##sn +395 +mountain +##0mm +inparadise +6m +7x24 +ib +4800 +##jia +eeworld +creative +g5 +g3 +357 +parker +ecfa +village +からの +18000 +sylvia +サーヒス +hbl +##ques +##onsored +##x2 +##きます +##v4 +##tein +ie6 +383 +##stack +389 +ver +##ads +##baby +sound +bbe +##110 +##lone +##uid +ads +022 +gundam +351 +thinkpad +006 +scrum +match +##ave +mems +##470 +##oy +##なりました +##talk +glass +lamigo +span +##eme +job +##a5 +jay +wade +kde +498 +##lace +ocean +tvg +##covery +##r3 +##ners +##rea +junior +think +##aine +cover +##ision +##sia +↓↓ +##bow +msi +413 +458 +406 +##love +711 +801 +soft +z2 +##pl +456 +1840 +mobil +mind +##uy +427 +nginx +##oi +めた +##rr +6221 +##mple +##sson +##ーシてす +371 +##nts +91tv +comhd +crv3000 +##uard +1868 +397 +deep +lost +field +gallery +##bia +rate +spf +redis +traction +930 +icloud +011 +なら +fe +jose +372 +##tory +into +sohu +fx +899 +379 +kicstart2 +##hia +すく +##~3 +##sit +ra +24 +##walk +##xure +500g +##pact +pacific +xa +natural +carlo +##250 +##walker +1850 +##can +cto +gigi +516 +##サー +pen +##hoo +ob +matlab +##b +##yy +13913459 +##iti +mango +##bbs +sense +c5 +oxford +##ニア +walker +jennifer +##ola +course +##bre +701 +##pus +##rder +lucky +075 +##ぁ +ivy +なお +##nia +sotheby +side +##ugh +joy +##orage +##ush +##bat +##dt +364 +r9 +##2d +##gio +511 +country +wear +##lax +##~7 +##moon +393 +seven +study +411 +348 +lonzo +8k +##ェ +evolution +##イフ +##kk +gs +kd +##レス +arduino +344 +b12 +##lux +arpg +##rdon +cook +##x5 +dark +five +##als +##ida +とても +sign +362 +##ちの +something +20mm +##nda +387 +##posted +fresh +tf +1870 +422 +cam +##mine +##skip +##form +##ssion +education +394 +##tee +dyson +stage +##jie +want +##night +epson +pack +あります +##ppy +テリヘル +##█ +wd +##eh +##rence +left +##lvin +golden +mhz +discovery +##trix +##n2 +loft +##uch +##dra +##sse +speed +~1 +1mdb +sorry +welcome +##urn +wave +gaga +##lmer +teddy +##160 +トラックハック +せよ +611 +##f2016 +378 +rp +##sha +rar +##あなたに +##きた +840 +holiday +##ュー +373 +074 +##vg +##nos +##rail +gartner +gi +6p +##dium +kit +488 +b3 +eco +##ろう +20g +sean +##stone +autocad +nu +##np +f16 +write +029 +m5 +##ias +images +atp +##dk +fsm +504 +1350 +ve +52kb +##xxx +##のに +##cake +414 +unit +lim +ru +1v +##ification +published +angela +16g +analytics +ak +##q +##nel +gmt +##icon +again +##₂ +##bby +ios11 +445 +かこさいます +waze +いてす +##ハ +9985 +##ust +##ティー +framework +##007 +iptv +delete +52sykb +cl +wwdc +027 +30cm +##fw +##ての +1389 +##xon +brandt +##ses +##dragon +tc +vetements +anne +monte +modern +official +##へて +##ere +##nne +##oud +もちろん +50 +etnews +##a2 +##graphy +421 +863 +##ちゃん +444 +##rtex +##てお +l2 +##gma +mount +ccd +たと +archive +morning +tan +ddos +e7 +##ホ +day4 +##ウ +gis +453 +its +495 +factory +bruce +pg +##ito +ってくたさい +guest +cdma +##lling +536 +n3 +しかし +3~4 +mega +eyes +ro +13 +women +dac +church +##jun +singapore +##facebook +6991 +starbucks +##tos +##stin +##shine +zen +##mu +tina +20℃ +1893 +##たけて +503 +465 +request +##gence +qt +##っ +1886 +347 +363 +q7 +##zzi +diary +##tore +409 +##ead +468 +cst +##osa +canada +agent +va +##jiang +##ちは +##ーク +##lam +sg +##nix +##sday +##よって +g6 +##master +bing +##zl +charlie +16 +8mm +nb40 +##ーン +thai +##ルフ +ln284ct +##itz +##2f +bonnie +##food +##lent +originals +##stro +##lts +418 +∟∣ +##bscribe +children +ntd +yesstyle +##かも +hmv +##tment +d5 +2cm +arts +sms +##pn +##я +##いい +topios9 +539 +lifestyle +virtual +##ague +xz +##deo +muji +024 +unt +##nnis +##ᅩ +faq1 +1884 +396 +##ette +fly +64㎡ +はしめまして +441 +curry +##pop +のこ +release +##← +##◆◆ +##cast +073 +ありな +500ml +##ews +5c +##stle +ios7 +##ima +787 +dog +lenovo +##r4 +roger +013 +cbs +vornado +100m +417 +##desk +##クok +##ald +1867 +9595 +2900 +##van +oil +##x +some +break +common +##jy +##lines +g7 +twice +419 +ella +nano +belle +にこ +##mes +##self +##note +jb +##ことかてきます +benz +##との +##ova +451 +save +##wing +##ますのて +kai +りは +##hua +##rect +rainer +##unge +448 +##0m +adsl +##かな +guestname +##uma +##kins +##zu +tokichoi +##price +county +##med +##mus +rmk +391 +address +vm +えて +openload +##group +##hin +##iginal +amg +urban +##oz +jobs +emi +##public +beautiful +##sch +album +##dden +##bell +jerry +works +hostel +miller +##drive +##rmin +##10 +376 +boot +828 +##370 +##fx +##cm~ +1885 +##nome +##ctionary +##oman +##lish +##cr +##hm +433 +##how +432 +francis +xi +c919 +b5 +evernote +##uc +vga +##3000 +coupe +##urg +##cca +##uality +019 +6g +れる +multi +##また +##ett +em +hey +##ani +##tax +##rma +inside +than +740 +leonnhurt +##jin +ict +れた +bird +notes +200mm +くの +##dical +##lli +result +442 +iu +ee +438 +smap +gopro +##last +yin +pure +998 +32g +けた +5kg +##dan +##rame +mama +##oot +bean +marketing +##hur +2l +bella +sync +xuite +##ground +515 +discuz +##getrelax +##ince +##bay +##5s +cj +##イス +gmat +apt +##pass +jing +##rix +c4 +rich +##とても +niusnews +##ello +bag +770 +##eting +##mobile +18 +culture +015 +##のてすか +377 +1020 +area +##ience +616 +details +gp +universal +silver +dit +はお +private +ddd +u11 +kanshu +##ified +fung +##nny +dx +##520 +tai +475 +023 +##fr +##lean +3s +##pin +429 +##rin +25000 +ly +rick +##bility +usb3 +banner +##baru +##gion +metal +dt +vdf +1871 +karl +qualcomm +bear +1010 +oldid +ian +jo +##tors +population +##ernel +1882 +mmorpg +##mv +##bike +603 +##© +ww +friend +##ager +exhibition +##del +##pods +fpx +structure +##free +##tings +kl +##rley +##copyright +##mma +california +3400 +orange +yoga +4l +canmake +honey +##anda +##コメント +595 +nikkie +##ルハイト +dhl +publishing +##mall +##gnet +20cm +513 +##クセス +##┅ +e88 +970 +##dog +fishbase +##! +##" +### +##$ +##% +##& +##' +##( +##) +##* +##+ +##, +##- +##. +##/ +##: +##; +##< +##= +##> +##? +##@ +##[ +##\ +##] +##^ +##_ +##{ +##| +##} +##~ +##£ +##¤ +##¥ +##§ +##« +##± +##³ +##µ +##· +##¹ +##º +##» +##¼ +##ß +##æ +##÷ +##ø +##đ +##ŋ +##ɔ +##ə +##ɡ +##ʰ +##ˇ +##ˈ +##ˊ +##ˋ +##ˍ +##ː +##˙ +##˚ +##ˢ +##α +##β +##γ +##δ +##ε +##η +##θ +##ι +##κ +##λ +##μ +##ν +##ο +##π +##ρ +##ς +##σ +##τ +##υ +##φ +##χ +##ψ +##б +##в +##г +##д +##е +##ж +##з +##к +##л +##м +##н +##о +##п +##р +##с +##т +##у +##ф +##х +##ц +##ч +##ш +##ы +##ь +##і +##ا +##ب +##ة +##ت +##د +##ر +##س +##ع +##ل +##م +##ن +##ه +##و +##ي +##۩ +##ก +##ง +##น +##ม +##ย +##ร +##อ +##า +##เ +##๑ +##་ +##ღ +##ᄀ +##ᄁ +##ᄂ +##ᄃ +##ᄅ +##ᄆ +##ᄇ +##ᄈ +##ᄉ +##ᄋ +##ᄌ +##ᄎ +##ᄏ +##ᄐ +##ᄑ +##ᄒ +##ᅢ +##ᅣ +##ᅥ +##ᅦ +##ᅧ +##ᅨ +##ᅪ +##ᅬ +##ᅭ +##ᅮ +##ᅯ +##ᅲ +##ᅳ +##ᅴ +##ᆷ +##ᆸ +##ᆺ +##ᆻ +##ᗜ +##ᵃ +##ᵉ +##ᵍ +##ᵏ +##ᵐ +##ᵒ +##ᵘ +##‖ +##„ +##† +##• +##‥ +##‧ +##
 +##‰ +##′ +##″ +##‹ +##› +##※ +##‿ +##⁄ +##ⁱ +##⁺ +##ⁿ +##₁ +##₃ +##₄ +##€ +##№ +##ⅰ +##ⅱ +##ⅲ +##ⅳ +##ⅴ +##↔ +##↗ +##↘ +##⇒ +##∀ +##− +##∕ +##∙ +##√ +##∞ +##∟ +##∠ +##∣ +##∩ +##∮ +##∶ +##∼ +##∽ +##≈ +##≒ +##≡ +##≤ +##≥ +##≦ +##≧ +##≪ +##≫ +##⊙ +##⋅ +##⋈ +##⋯ +##⌒ +##① +##② +##③ +##④ +##⑤ +##⑥ +##⑦ +##⑧ +##⑨ +##⑩ +##⑴ +##⑵ +##⑶ +##⑷ +##⑸ +##⒈ +##⒉ +##⒊ +##⒋ +##ⓒ +##ⓔ +##ⓘ +##━ +##┃ +##┆ +##┊ +##┌ +##└ +##├ +##┣ +##═ +##║ +##╚ +##╞ +##╠ +##╭ +##╮ +##╯ +##╰ +##╱ +##╳ +##▂ +##▃ +##▅ +##▇ +##▉ +##▋ +##▌ +##▍ +##▎ +##□ +##▪ +##▫ +##▬ +##△ +##▶ +##► +##▽ +##◇ +##◕ +##◠ +##◢ +##◤ +##☀ +##☕ +##☞ +##☺ +##☼ +##♀ +##♂ +##♠ +##♡ +##♣ +##♦ +##♫ +##♬ +##✈ +##✔ +##✕ +##✖ +##✦ +##✨ +##✪ +##✰ +##✿ +##❀ +##➜ +##➤ +##⦿ +##、 +##。 +##〃 +##々 +##〇 +##〈 +##〉 +##《 +##》 +##「 +##」 +##『 +##』 +##【 +##】 +##〓 +##〔 +##〕 +##〖 +##〗 +##〜 +##〝 +##〞 +##ぃ +##ぇ +##ぬ +##ふ +##ほ +##む +##ゃ +##ゅ +##ゆ +##ょ +##゜ +##ゝ +##ァ +##ゥ +##エ +##ォ +##ケ +##サ +##セ +##ソ +##ッ +##ニ +##ヌ +##ネ +##ノ +##ヘ +##モ +##ャ +##ヤ +##ュ +##ユ +##ョ +##ヨ +##ワ +##ヲ +##・ +##ヽ +##ㄅ +##ㄆ +##ㄇ +##ㄉ +##ㄋ +##ㄌ +##ㄍ +##ㄎ +##ㄏ +##ㄒ +##ㄚ +##ㄛ +##ㄞ +##ㄟ +##ㄢ +##ㄤ +##ㄥ +##ㄧ +##ㄨ +##ㆍ +##㈦ +##㊣ +##㗎 +##一 +##丁 +##七 +##万 +##丈 +##三 +##上 +##下 +##不 +##与 +##丐 +##丑 +##专 +##且 +##丕 +##世 +##丘 +##丙 +##业 +##丛 +##东 +##丝 +##丞 +##丟 +##両 +##丢 +##两 +##严 +##並 +##丧 +##丨 +##个 +##丫 +##中 +##丰 +##串 +##临 +##丶 +##丸 +##丹 +##为 +##主 +##丼 +##丽 +##举 +##丿 +##乂 +##乃 +##久 +##么 +##义 +##之 +##乌 +##乍 +##乎 +##乏 +##乐 +##乒 +##乓 +##乔 +##乖 +##乗 +##乘 +##乙 +##乜 +##九 +##乞 +##也 +##习 +##乡 +##书 +##乩 +##买 +##乱 +##乳 +##乾 +##亀 +##亂 +##了 +##予 +##争 +##事 +##二 +##于 +##亏 +##云 +##互 +##五 +##井 +##亘 +##亙 +##亚 +##些 +##亜 +##亞 +##亟 +##亡 +##亢 +##交 +##亥 +##亦 +##产 +##亨 +##亩 +##享 +##京 +##亭 +##亮 +##亲 +##亳 +##亵 +##人 +##亿 +##什 +##仁 +##仃 +##仄 +##仅 +##仆 +##仇 +##今 +##介 +##仍 +##从 +##仏 +##仑 +##仓 +##仔 +##仕 +##他 +##仗 +##付 +##仙 +##仝 +##仞 +##仟 +##代 +##令 +##以 +##仨 +##仪 +##们 +##仮 +##仰 +##仲 +##件 +##价 +##任 +##份 +##仿 +##企 +##伉 +##伊 +##伍 +##伎 +##伏 +##伐 +##休 +##伕 +##众 +##优 +##伙 +##会 +##伝 +##伞 +##伟 +##传 +##伢 +##伤 +##伦 +##伪 +##伫 +##伯 +##估 +##伴 +##伶 +##伸 +##伺 +##似 +##伽 +##佃 +##但 +##佇 +##佈 +##位 +##低 +##住 +##佐 +##佑 +##体 +##佔 +##何 +##佗 +##佘 +##余 +##佚 +##佛 +##作 +##佝 +##佞 +##佟 +##你 +##佢 +##佣 +##佤 +##佥 +##佩 +##佬 +##佯 +##佰 +##佳 +##併 +##佶 +##佻 +##佼 +##使 +##侃 +##侄 +##來 +##侈 +##例 +##侍 +##侏 +##侑 +##侖 +##侗 +##供 +##依 +##侠 +##価 +##侣 +##侥 +##侦 +##侧 +##侨 +##侬 +##侮 +##侯 +##侵 +##侶 +##侷 +##便 +##係 +##促 +##俄 +##俊 +##俎 +##俏 +##俐 +##俑 +##俗 +##俘 +##俚 +##保 +##俞 +##俟 +##俠 +##信 +##俨 +##俩 +##俪 +##俬 +##俭 +##修 +##俯 +##俱 +##俳 +##俸 +##俺 +##俾 +##倆 +##倉 +##個 +##倌 +##倍 +##倏 +##們 +##倒 +##倔 +##倖 +##倘 +##候 +##倚 +##倜 +##借 +##倡 +##値 +##倦 +##倩 +##倪 +##倫 +##倬 +##倭 +##倶 +##债 +##值 +##倾 +##偃 +##假 +##偈 +##偉 +##偌 +##偎 +##偏 +##偕 +##做 +##停 +##健 +##側 +##偵 +##偶 +##偷 +##偻 +##偽 +##偿 +##傀 +##傅 +##傍 +##傑 +##傘 +##備 +##傚 +##傢 +##傣 +##傥 +##储 +##傩 +##催 +##傭 +##傲 +##傳 +##債 +##傷 +##傻 +##傾 +##僅 +##働 +##像 +##僑 +##僕 +##僖 +##僚 +##僥 +##僧 +##僭 +##僮 +##僱 +##僵 +##價 +##僻 +##儀 +##儂 +##億 +##儆 +##儉 +##儋 +##儒 +##儕 +##儘 +##償 +##儡 +##優 +##儲 +##儷 +##儼 +##儿 +##兀 +##允 +##元 +##兄 +##充 +##兆 +##兇 +##先 +##光 +##克 +##兌 +##免 +##児 +##兑 +##兒 +##兔 +##兖 +##党 +##兜 +##兢 +##入 +##內 +##全 +##兩 +##八 +##公 +##六 +##兮 +##兰 +##共 +##兲 +##关 +##兴 +##兵 +##其 +##具 +##典 +##兹 +##养 +##兼 +##兽 +##冀 +##内 +##円 +##冇 +##冈 +##冉 +##冊 +##册 +##再 +##冏 +##冒 +##冕 +##冗 +##写 +##军 +##农 +##冠 +##冢 +##冤 +##冥 +##冨 +##冪 +##冬 +##冯 +##冰 +##冲 +##决 +##况 +##冶 +##冷 +##冻 +##冼 +##冽 +##冾 +##净 +##凄 +##准 +##凇 +##凈 +##凉 +##凋 +##凌 +##凍 +##减 +##凑 +##凛 +##凜 +##凝 +##几 +##凡 +##凤 +##処 +##凪 +##凭 +##凯 +##凰 +##凱 +##凳 +##凶 +##凸 +##凹 +##出 +##击 +##函 +##凿 +##刀 +##刁 +##刃 +##分 +##切 +##刈 +##刊 +##刍 +##刎 +##刑 +##划 +##列 +##刘 +##则 +##刚 +##创 +##初 +##删 +##判 +##別 +##刨 +##利 +##刪 +##别 +##刮 +##到 +##制 +##刷 +##券 +##刹 +##刺 +##刻 +##刽 +##剁 +##剂 +##剃 +##則 +##剉 +##削 +##剋 +##剌 +##前 +##剎 +##剐 +##剑 +##剔 +##剖 +##剛 +##剜 +##剝 +##剣 +##剤 +##剥 +##剧 +##剩 +##剪 +##副 +##割 +##創 +##剷 +##剽 +##剿 +##劃 +##劇 +##劈 +##劉 +##劊 +##劍 +##劏 +##劑 +##力 +##劝 +##办 +##功 +##加 +##务 +##劣 +##动 +##助 +##努 +##劫 +##劭 +##励 +##劲 +##劳 +##労 +##劵 +##効 +##劾 +##势 +##勁 +##勃 +##勇 +##勉 +##勋 +##勐 +##勒 +##動 +##勖 +##勘 +##務 +##勛 +##勝 +##勞 +##募 +##勢 +##勤 +##勧 +##勳 +##勵 +##勸 +##勺 +##勻 +##勾 +##勿 +##匀 +##包 +##匆 +##匈 +##匍 +##匐 +##匕 +##化 +##北 +##匙 +##匝 +##匠 +##匡 +##匣 +##匪 +##匮 +##匯 +##匱 +##匹 +##区 +##医 +##匾 +##匿 +##區 +##十 +##千 +##卅 +##升 +##午 +##卉 +##半 +##卍 +##华 +##协 +##卑 +##卒 +##卓 +##協 +##单 +##卖 +##南 +##単 +##博 +##卜 +##卞 +##卟 +##占 +##卡 +##卢 +##卤 +##卦 +##卧 +##卫 +##卮 +##卯 +##印 +##危 +##即 +##却 +##卵 +##卷 +##卸 +##卻 +##卿 +##厂 +##厄 +##厅 +##历 +##厉 +##压 +##厌 +##厕 +##厘 +##厚 +##厝 +##原 +##厢 +##厥 +##厦 +##厨 +##厩 +##厭 +##厮 +##厲 +##厳 +##去 +##县 +##叁 +##参 +##參 +##又 +##叉 +##及 +##友 +##双 +##反 +##収 +##发 +##叔 +##取 +##受 +##变 +##叙 +##叛 +##叟 +##叠 +##叡 +##叢 +##口 +##古 +##句 +##另 +##叨 +##叩 +##只 +##叫 +##召 +##叭 +##叮 +##可 +##台 +##叱 +##史 +##右 +##叵 +##叶 +##号 +##司 +##叹 +##叻 +##叼 +##叽 +##吁 +##吃 +##各 +##吆 +##合 +##吉 +##吊 +##吋 +##同 +##名 +##后 +##吏 +##吐 +##向 +##吒 +##吓 +##吕 +##吖 +##吗 +##君 +##吝 +##吞 +##吟 +##吠 +##吡 +##否 +##吧 +##吨 +##吩 +##含 +##听 +##吭 +##吮 +##启 +##吱 +##吳 +##吴 +##吵 +##吶 +##吸 +##吹 +##吻 +##吼 +##吽 +##吾 +##呀 +##呂 +##呃 +##呆 +##呈 +##告 +##呋 +##呎 +##呐 +##呓 +##呕 +##呗 +##员 +##呛 +##呜 +##呢 +##呤 +##呦 +##周 +##呱 +##呲 +##味 +##呵 +##呷 +##呸 +##呻 +##呼 +##命 +##咀 +##咁 +##咂 +##咄 +##咆 +##咋 +##和 +##咎 +##咏 +##咐 +##咒 +##咔 +##咕 +##咖 +##咗 +##咘 +##咙 +##咚 +##咛 +##咣 +##咤 +##咦 +##咧 +##咨 +##咩 +##咪 +##咫 +##咬 +##咭 +##咯 +##咱 +##咲 +##咳 +##咸 +##咻 +##咽 +##咿 +##哀 +##品 +##哂 +##哄 +##哆 +##哇 +##哈 +##哉 +##哋 +##哌 +##响 +##哎 +##哏 +##哐 +##哑 +##哒 +##哔 +##哗 +##哟 +##員 +##哥 +##哦 +##哧 +##哨 +##哩 +##哪 +##哭 +##哮 +##哲 +##哺 +##哼 +##哽 +##唁 +##唄 +##唆 +##唇 +##唉 +##唏 +##唐 +##唑 +##唔 +##唠 +##唤 +##唧 +##唬 +##售 +##唯 +##唰 +##唱 +##唳 +##唷 +##唸 +##唾 +##啃 +##啄 +##商 +##啉 +##啊 +##問 +##啓 +##啕 +##啖 +##啜 +##啞 +##啟 +##啡 +##啤 +##啥 +##啦 +##啧 +##啪 +##啫 +##啬 +##啮 +##啰 +##啱 +##啲 +##啵 +##啶 +##啷 +##啸 +##啻 +##啼 +##啾 +##喀 +##喂 +##喃 +##善 +##喆 +##喇 +##喉 +##喊 +##喋 +##喎 +##喏 +##喔 +##喘 +##喙 +##喚 +##喜 +##喝 +##喟 +##喧 +##喪 +##喫 +##喬 +##單 +##喰 +##喱 +##喲 +##喳 +##喵 +##営 +##喷 +##喹 +##喺 +##喻 +##喽 +##嗅 +##嗆 +##嗇 +##嗎 +##嗑 +##嗒 +##嗓 +##嗔 +##嗖 +##嗚 +##嗜 +##嗝 +##嗟 +##嗡 +##嗣 +##嗤 +##嗦 +##嗨 +##嗪 +##嗬 +##嗯 +##嗰 +##嗲 +##嗳 +##嗶 +##嗷 +##嗽 +##嘀 +##嘅 +##嘆 +##嘈 +##嘉 +##嘌 +##嘍 +##嘎 +##嘔 +##嘖 +##嘗 +##嘘 +##嘚 +##嘛 +##嘜 +##嘞 +##嘟 +##嘢 +##嘣 +##嘤 +##嘧 +##嘩 +##嘭 +##嘮 +##嘯 +##嘰 +##嘱 +##嘲 +##嘴 +##嘶 +##嘸 +##嘹 +##嘻 +##嘿 +##噁 +##噌 +##噎 +##噓 +##噔 +##噗 +##噙 +##噜 +##噠 +##噢 +##噤 +##器 +##噩 +##噪 +##噬 +##噱 +##噴 +##噶 +##噸 +##噹 +##噻 +##噼 +##嚀 +##嚇 +##嚎 +##嚏 +##嚐 +##嚓 +##嚕 +##嚟 +##嚣 +##嚥 +##嚨 +##嚮 +##嚴 +##嚷 +##嚼 +##囂 +##囉 +##囊 +##囍 +##囑 +##囔 +##囗 +##囚 +##四 +##囝 +##回 +##囟 +##因 +##囡 +##团 +##団 +##囤 +##囧 +##囪 +##囫 +##园 +##困 +##囱 +##囲 +##図 +##围 +##囹 +##固 +##国 +##图 +##囿 +##圃 +##圄 +##圆 +##圈 +##國 +##圍 +##圏 +##園 +##圓 +##圖 +##團 +##圜 +##土 +##圣 +##圧 +##在 +##圩 +##圭 +##地 +##圳 +##场 +##圻 +##圾 +##址 +##坂 +##均 +##坊 +##坍 +##坎 +##坏 +##坐 +##坑 +##块 +##坚 +##坛 +##坝 +##坞 +##坟 +##坠 +##坡 +##坤 +##坦 +##坨 +##坪 +##坯 +##坳 +##坵 +##坷 +##垂 +##垃 +##垄 +##型 +##垒 +##垚 +##垛 +##垠 +##垢 +##垣 +##垦 +##垩 +##垫 +##垭 +##垮 +##垵 +##埂 +##埃 +##埋 +##城 +##埔 +##埕 +##埗 +##域 +##埠 +##埤 +##埵 +##執 +##埸 +##培 +##基 +##埼 +##堀 +##堂 +##堃 +##堅 +##堆 +##堇 +##堑 +##堕 +##堙 +##堡 +##堤 +##堪 +##堯 +##堰 +##報 +##場 +##堵 +##堺 +##堿 +##塊 +##塌 +##塑 +##塔 +##塗 +##塘 +##塚 +##塞 +##塢 +##塩 +##填 +##塬 +##塭 +##塵 +##塾 +##墀 +##境 +##墅 +##墉 +##墊 +##墒 +##墓 +##増 +##墘 +##墙 +##墜 +##增 +##墟 +##墨 +##墩 +##墮 +##墳 +##墻 +##墾 +##壁 +##壅 +##壆 +##壇 +##壊 +##壑 +##壓 +##壕 +##壘 +##壞 +##壟 +##壢 +##壤 +##壩 +##士 +##壬 +##壮 +##壯 +##声 +##売 +##壳 +##壶 +##壹 +##壺 +##壽 +##处 +##备 +##変 +##复 +##夏 +##夔 +##夕 +##外 +##夙 +##多 +##夜 +##够 +##夠 +##夢 +##夥 +##大 +##天 +##太 +##夫 +##夭 +##央 +##夯 +##失 +##头 +##夷 +##夸 +##夹 +##夺 +##夾 +##奂 +##奄 +##奇 +##奈 +##奉 +##奋 +##奎 +##奏 +##奐 +##契 +##奔 +##奕 +##奖 +##套 +##奘 +##奚 +##奠 +##奢 +##奥 +##奧 +##奪 +##奬 +##奮 +##女 +##奴 +##奶 +##奸 +##她 +##好 +##如 +##妃 +##妄 +##妆 +##妇 +##妈 +##妊 +##妍 +##妒 +##妓 +##妖 +##妘 +##妙 +##妝 +##妞 +##妣 +##妤 +##妥 +##妨 +##妩 +##妪 +##妮 +##妲 +##妳 +##妹 +##妻 +##妾 +##姆 +##姉 +##姊 +##始 +##姍 +##姐 +##姑 +##姒 +##姓 +##委 +##姗 +##姚 +##姜 +##姝 +##姣 +##姥 +##姦 +##姨 +##姪 +##姫 +##姬 +##姹 +##姻 +##姿 +##威 +##娃 +##娄 +##娅 +##娆 +##娇 +##娉 +##娑 +##娓 +##娘 +##娛 +##娜 +##娟 +##娠 +##娣 +##娥 +##娩 +##娱 +##娲 +##娴 +##娶 +##娼 +##婀 +##婁 +##婆 +##婉 +##婊 +##婕 +##婚 +##婢 +##婦 +##婧 +##婪 +##婭 +##婴 +##婵 +##婶 +##婷 +##婺 +##婿 +##媒 +##媚 +##媛 +##媞 +##媧 +##媲 +##媳 +##媽 +##媾 +##嫁 +##嫂 +##嫉 +##嫌 +##嫑 +##嫔 +##嫖 +##嫘 +##嫚 +##嫡 +##嫣 +##嫦 +##嫩 +##嫲 +##嫵 +##嫻 +##嬅 +##嬉 +##嬌 +##嬗 +##嬛 +##嬢 +##嬤 +##嬪 +##嬰 +##嬴 +##嬷 +##嬸 +##嬿 +##孀 +##孃 +##子 +##孑 +##孔 +##孕 +##孖 +##字 +##存 +##孙 +##孚 +##孛 +##孜 +##孝 +##孟 +##孢 +##季 +##孤 +##学 +##孩 +##孪 +##孫 +##孬 +##孰 +##孱 +##孳 +##孵 +##學 +##孺 +##孽 +##孿 +##宁 +##它 +##宅 +##宇 +##守 +##安 +##宋 +##完 +##宏 +##宓 +##宕 +##宗 +##官 +##宙 +##定 +##宛 +##宜 +##宝 +##实 +##実 +##宠 +##审 +##客 +##宣 +##室 +##宥 +##宦 +##宪 +##宫 +##宮 +##宰 +##害 +##宴 +##宵 +##家 +##宸 +##容 +##宽 +##宾 +##宿 +##寂 +##寄 +##寅 +##密 +##寇 +##富 +##寐 +##寒 +##寓 +##寛 +##寝 +##寞 +##察 +##寡 +##寢 +##寥 +##實 +##寧 +##寨 +##審 +##寫 +##寬 +##寮 +##寰 +##寵 +##寶 +##寸 +##对 +##寺 +##寻 +##导 +##対 +##寿 +##封 +##専 +##射 +##将 +##將 +##專 +##尉 +##尊 +##尋 +##對 +##導 +##小 +##少 +##尔 +##尕 +##尖 +##尘 +##尚 +##尝 +##尤 +##尧 +##尬 +##就 +##尴 +##尷 +##尸 +##尹 +##尺 +##尻 +##尼 +##尽 +##尾 +##尿 +##局 +##屁 +##层 +##屄 +##居 +##屆 +##屈 +##屉 +##届 +##屋 +##屌 +##屍 +##屎 +##屏 +##屐 +##屑 +##展 +##屜 +##属 +##屠 +##屡 +##屢 +##層 +##履 +##屬 +##屯 +##山 +##屹 +##屿 +##岀 +##岁 +##岂 +##岌 +##岐 +##岑 +##岔 +##岖 +##岗 +##岘 +##岙 +##岚 +##岛 +##岡 +##岩 +##岫 +##岬 +##岭 +##岱 +##岳 +##岷 +##岸 +##峇 +##峋 +##峒 +##峙 +##峡 +##峤 +##峥 +##峦 +##峨 +##峪 +##峭 +##峯 +##峰 +##峴 +##島 +##峻 +##峽 +##崁 +##崂 +##崆 +##崇 +##崎 +##崑 +##崔 +##崖 +##崗 +##崙 +##崛 +##崧 +##崩 +##崭 +##崴 +##崽 +##嵇 +##嵊 +##嵋 +##嵌 +##嵐 +##嵘 +##嵩 +##嵬 +##嵯 +##嶂 +##嶄 +##嶇 +##嶋 +##嶙 +##嶺 +##嶼 +##嶽 +##巅 +##巍 +##巒 +##巔 +##巖 +##川 +##州 +##巡 +##巢 +##工 +##左 +##巧 +##巨 +##巩 +##巫 +##差 +##己 +##已 +##巳 +##巴 +##巷 +##巻 +##巽 +##巾 +##巿 +##币 +##市 +##布 +##帅 +##帆 +##师 +##希 +##帐 +##帑 +##帕 +##帖 +##帘 +##帚 +##帛 +##帜 +##帝 +##帥 +##带 +##帧 +##師 +##席 +##帮 +##帯 +##帰 +##帳 +##帶 +##帷 +##常 +##帼 +##帽 +##幀 +##幂 +##幄 +##幅 +##幌 +##幔 +##幕 +##幟 +##幡 +##幢 +##幣 +##幫 +##干 +##平 +##年 +##并 +##幸 +##幹 +##幺 +##幻 +##幼 +##幽 +##幾 +##广 +##庁 +##広 +##庄 +##庆 +##庇 +##床 +##序 +##庐 +##库 +##应 +##底 +##庖 +##店 +##庙 +##庚 +##府 +##庞 +##废 +##庠 +##度 +##座 +##庫 +##庭 +##庵 +##庶 +##康 +##庸 +##庹 +##庾 +##廁 +##廂 +##廃 +##廈 +##廉 +##廊 +##廓 +##廖 +##廚 +##廝 +##廟 +##廠 +##廢 +##廣 +##廬 +##廳 +##延 +##廷 +##建 +##廿 +##开 +##弁 +##异 +##弃 +##弄 +##弈 +##弊 +##弋 +##式 +##弑 +##弒 +##弓 +##弔 +##引 +##弗 +##弘 +##弛 +##弟 +##张 +##弥 +##弦 +##弧 +##弩 +##弭 +##弯 +##弱 +##張 +##強 +##弹 +##强 +##弼 +##弾 +##彅 +##彆 +##彈 +##彌 +##彎 +##归 +##当 +##录 +##彗 +##彙 +##彝 +##形 +##彤 +##彥 +##彦 +##彧 +##彩 +##彪 +##彫 +##彬 +##彭 +##彰 +##影 +##彷 +##役 +##彻 +##彼 +##彿 +##往 +##征 +##径 +##待 +##徇 +##很 +##徉 +##徊 +##律 +##後 +##徐 +##徑 +##徒 +##従 +##徕 +##得 +##徘 +##徙 +##徜 +##從 +##徠 +##御 +##徨 +##復 +##循 +##徬 +##微 +##徳 +##徴 +##徵 +##德 +##徹 +##徼 +##徽 +##心 +##必 +##忆 +##忌 +##忍 +##忏 +##忐 +##忑 +##忒 +##忖 +##志 +##忘 +##忙 +##応 +##忠 +##忡 +##忤 +##忧 +##忪 +##快 +##忱 +##念 +##忻 +##忽 +##忿 +##怀 +##态 +##怂 +##怅 +##怆 +##怎 +##怏 +##怒 +##怔 +##怕 +##怖 +##怙 +##怜 +##思 +##怠 +##怡 +##急 +##怦 +##性 +##怨 +##怪 +##怯 +##怵 +##总 +##怼 +##恁 +##恃 +##恆 +##恋 +##恍 +##恐 +##恒 +##恕 +##恙 +##恚 +##恢 +##恣 +##恤 +##恥 +##恨 +##恩 +##恪 +##恫 +##恬 +##恭 +##息 +##恰 +##恳 +##恵 +##恶 +##恸 +##恺 +##恻 +##恼 +##恿 +##悄 +##悅 +##悉 +##悌 +##悍 +##悔 +##悖 +##悚 +##悟 +##悠 +##患 +##悦 +##您 +##悩 +##悪 +##悬 +##悯 +##悱 +##悲 +##悴 +##悵 +##悶 +##悸 +##悻 +##悼 +##悽 +##情 +##惆 +##惇 +##惊 +##惋 +##惑 +##惕 +##惘 +##惚 +##惜 +##惟 +##惠 +##惡 +##惦 +##惧 +##惨 +##惩 +##惫 +##惬 +##惭 +##惮 +##惯 +##惰 +##惱 +##想 +##惴 +##惶 +##惹 +##惺 +##愁 +##愆 +##愈 +##愉 +##愍 +##意 +##愕 +##愚 +##愛 +##愜 +##感 +##愣 +##愤 +##愧 +##愫 +##愷 +##愿 +##慄 +##慈 +##態 +##慌 +##慎 +##慑 +##慕 +##慘 +##慚 +##慟 +##慢 +##慣 +##慧 +##慨 +##慫 +##慮 +##慰 +##慳 +##慵 +##慶 +##慷 +##慾 +##憂 +##憊 +##憋 +##憎 +##憐 +##憑 +##憔 +##憚 +##憤 +##憧 +##憨 +##憩 +##憫 +##憬 +##憲 +##憶 +##憾 +##懂 +##懇 +##懈 +##應 +##懊 +##懋 +##懑 +##懒 +##懦 +##懲 +##懵 +##懶 +##懷 +##懸 +##懺 +##懼 +##懾 +##懿 +##戀 +##戈 +##戊 +##戌 +##戍 +##戎 +##戏 +##成 +##我 +##戒 +##戕 +##或 +##战 +##戚 +##戛 +##戟 +##戡 +##戦 +##截 +##戬 +##戮 +##戰 +##戲 +##戳 +##戴 +##戶 +##户 +##戸 +##戻 +##戾 +##房 +##所 +##扁 +##扇 +##扈 +##扉 +##手 +##才 +##扎 +##扑 +##扒 +##打 +##扔 +##払 +##托 +##扛 +##扣 +##扦 +##执 +##扩 +##扪 +##扫 +##扬 +##扭 +##扮 +##扯 +##扰 +##扱 +##扳 +##扶 +##批 +##扼 +##找 +##承 +##技 +##抄 +##抉 +##把 +##抑 +##抒 +##抓 +##投 +##抖 +##抗 +##折 +##抚 +##抛 +##抜 +##択 +##抟 +##抠 +##抡 +##抢 +##护 +##报 +##抨 +##披 +##抬 +##抱 +##抵 +##抹 +##押 +##抽 +##抿 +##拂 +##拄 +##担 +##拆 +##拇 +##拈 +##拉 +##拋 +##拌 +##拍 +##拎 +##拐 +##拒 +##拓 +##拔 +##拖 +##拗 +##拘 +##拙 +##拚 +##招 +##拜 +##拟 +##拡 +##拢 +##拣 +##拥 +##拦 +##拧 +##拨 +##择 +##括 +##拭 +##拮 +##拯 +##拱 +##拳 +##拴 +##拷 +##拼 +##拽 +##拾 +##拿 +##持 +##挂 +##指 +##挈 +##按 +##挎 +##挑 +##挖 +##挙 +##挚 +##挛 +##挝 +##挞 +##挟 +##挠 +##挡 +##挣 +##挤 +##挥 +##挨 +##挪 +##挫 +##振 +##挲 +##挹 +##挺 +##挽 +##挾 +##捂 +##捅 +##捆 +##捉 +##捋 +##捌 +##捍 +##捎 +##捏 +##捐 +##捕 +##捞 +##损 +##捡 +##换 +##捣 +##捧 +##捨 +##捩 +##据 +##捱 +##捲 +##捶 +##捷 +##捺 +##捻 +##掀 +##掂 +##掃 +##掇 +##授 +##掉 +##掌 +##掏 +##掐 +##排 +##掖 +##掘 +##掙 +##掛 +##掠 +##採 +##探 +##掣 +##接 +##控 +##推 +##掩 +##措 +##掬 +##掰 +##掲 +##掳 +##掴 +##掷 +##掸 +##掺 +##揀 +##揃 +##揄 +##揆 +##揉 +##揍 +##描 +##提 +##插 +##揖 +##揚 +##換 +##握 +##揣 +##揩 +##揪 +##揭 +##揮 +##援 +##揶 +##揸 +##揹 +##揽 +##搀 +##搁 +##搂 +##搅 +##損 +##搏 +##搐 +##搓 +##搔 +##搖 +##搗 +##搜 +##搞 +##搡 +##搪 +##搬 +##搭 +##搵 +##搶 +##携 +##搽 +##摀 +##摁 +##摄 +##摆 +##摇 +##摈 +##摊 +##摒 +##摔 +##摘 +##摞 +##摟 +##摧 +##摩 +##摯 +##摳 +##摸 +##摹 +##摺 +##摻 +##撂 +##撃 +##撅 +##撇 +##撈 +##撐 +##撑 +##撒 +##撓 +##撕 +##撚 +##撞 +##撤 +##撥 +##撩 +##撫 +##撬 +##播 +##撮 +##撰 +##撲 +##撵 +##撷 +##撸 +##撻 +##撼 +##撿 +##擀 +##擁 +##擂 +##擄 +##擅 +##擇 +##擊 +##擋 +##操 +##擎 +##擒 +##擔 +##擘 +##據 +##擞 +##擠 +##擡 +##擢 +##擦 +##擬 +##擰 +##擱 +##擲 +##擴 +##擷 +##擺 +##擼 +##擾 +##攀 +##攏 +##攒 +##攔 +##攘 +##攙 +##攜 +##攝 +##攞 +##攢 +##攣 +##攤 +##攥 +##攪 +##攫 +##攬 +##支 +##收 +##攸 +##改 +##攻 +##放 +##政 +##故 +##效 +##敌 +##敍 +##敎 +##敏 +##救 +##敕 +##敖 +##敗 +##敘 +##教 +##敛 +##敝 +##敞 +##敢 +##散 +##敦 +##敬 +##数 +##敲 +##整 +##敵 +##敷 +##數 +##斂 +##斃 +##文 +##斋 +##斌 +##斎 +##斐 +##斑 +##斓 +##斗 +##料 +##斛 +##斜 +##斟 +##斡 +##斤 +##斥 +##斧 +##斩 +##斫 +##斬 +##断 +##斯 +##新 +##斷 +##方 +##於 +##施 +##旁 +##旃 +##旅 +##旋 +##旌 +##旎 +##族 +##旖 +##旗 +##无 +##既 +##日 +##旦 +##旧 +##旨 +##早 +##旬 +##旭 +##旮 +##旱 +##时 +##旷 +##旺 +##旻 +##昀 +##昂 +##昆 +##昇 +##昉 +##昊 +##昌 +##明 +##昏 +##易 +##昔 +##昕 +##昙 +##星 +##映 +##春 +##昧 +##昨 +##昭 +##是 +##昱 +##昴 +##昵 +##昶 +##昼 +##显 +##晁 +##時 +##晃 +##晉 +##晋 +##晌 +##晏 +##晒 +##晓 +##晔 +##晕 +##晖 +##晗 +##晚 +##晝 +##晞 +##晟 +##晤 +##晦 +##晨 +##晩 +##普 +##景 +##晰 +##晴 +##晶 +##晷 +##智 +##晾 +##暂 +##暄 +##暇 +##暈 +##暉 +##暌 +##暐 +##暑 +##暖 +##暗 +##暝 +##暢 +##暧 +##暨 +##暫 +##暮 +##暱 +##暴 +##暸 +##暹 +##曄 +##曆 +##曇 +##曉 +##曖 +##曙 +##曜 +##曝 +##曠 +##曦 +##曬 +##曰 +##曲 +##曳 +##更 +##書 +##曹 +##曼 +##曾 +##替 +##最 +##會 +##月 +##有 +##朋 +##服 +##朐 +##朔 +##朕 +##朗 +##望 +##朝 +##期 +##朦 +##朧 +##木 +##未 +##末 +##本 +##札 +##朮 +##术 +##朱 +##朴 +##朵 +##机 +##朽 +##杀 +##杂 +##权 +##杆 +##杈 +##杉 +##李 +##杏 +##材 +##村 +##杓 +##杖 +##杜 +##杞 +##束 +##杠 +##条 +##来 +##杨 +##杭 +##杯 +##杰 +##東 +##杳 +##杵 +##杷 +##杼 +##松 +##板 +##极 +##构 +##枇 +##枉 +##枋 +##析 +##枕 +##林 +##枚 +##果 +##枝 +##枢 +##枣 +##枪 +##枫 +##枭 +##枯 +##枰 +##枱 +##枳 +##架 +##枷 +##枸 +##柄 +##柏 +##某 +##柑 +##柒 +##染 +##柔 +##柘 +##柚 +##柜 +##柞 +##柠 +##柢 +##查 +##柩 +##柬 +##柯 +##柱 +##柳 +##柴 +##柵 +##査 +##柿 +##栀 +##栃 +##栄 +##栅 +##标 +##栈 +##栉 +##栋 +##栎 +##栏 +##树 +##栓 +##栖 +##栗 +##校 +##栩 +##株 +##样 +##核 +##根 +##格 +##栽 +##栾 +##桀 +##桁 +##桂 +##桃 +##桅 +##框 +##案 +##桉 +##桌 +##桎 +##桐 +##桑 +##桓 +##桔 +##桜 +##桠 +##桡 +##桢 +##档 +##桥 +##桦 +##桧 +##桨 +##桩 +##桶 +##桿 +##梁 +##梅 +##梆 +##梏 +##梓 +##梗 +##條 +##梟 +##梢 +##梦 +##梧 +##梨 +##梭 +##梯 +##械 +##梳 +##梵 +##梶 +##检 +##棂 +##棄 +##棉 +##棋 +##棍 +##棒 +##棕 +##棗 +##棘 +##棚 +##棟 +##棠 +##棣 +##棧 +##森 +##棱 +##棲 +##棵 +##棹 +##棺 +##椁 +##椅 +##椋 +##植 +##椎 +##椒 +##検 +##椪 +##椭 +##椰 +##椹 +##椽 +##椿 +##楂 +##楊 +##楓 +##楔 +##楚 +##楝 +##楞 +##楠 +##楣 +##楨 +##楫 +##業 +##楮 +##極 +##楷 +##楸 +##楹 +##楼 +##楽 +##概 +##榄 +##榆 +##榈 +##榉 +##榔 +##榕 +##榖 +##榛 +##榜 +##榨 +##榫 +##榭 +##榮 +##榱 +##榴 +##榷 +##榻 +##槁 +##槃 +##構 +##槌 +##槍 +##槎 +##槐 +##槓 +##様 +##槛 +##槟 +##槤 +##槭 +##槲 +##槳 +##槻 +##槽 +##槿 +##樁 +##樂 +##樊 +##樑 +##樓 +##標 +##樞 +##樟 +##模 +##樣 +##権 +##横 +##樫 +##樯 +##樱 +##樵 +##樸 +##樹 +##樺 +##樽 +##樾 +##橄 +##橇 +##橋 +##橐 +##橘 +##橙 +##機 +##橡 +##橢 +##橫 +##橱 +##橹 +##橼 +##檀 +##檄 +##檎 +##檐 +##檔 +##檗 +##檜 +##檢 +##檬 +##檯 +##檳 +##檸 +##檻 +##櫃 +##櫚 +##櫛 +##櫥 +##櫸 +##櫻 +##欄 +##權 +##欒 +##欖 +##欠 +##次 +##欢 +##欣 +##欧 +##欲 +##欸 +##欺 +##欽 +##款 +##歆 +##歇 +##歉 +##歌 +##歎 +##歐 +##歓 +##歙 +##歛 +##歡 +##止 +##正 +##此 +##步 +##武 +##歧 +##歩 +##歪 +##歯 +##歲 +##歳 +##歴 +##歷 +##歸 +##歹 +##死 +##歼 +##殁 +##殃 +##殆 +##殇 +##殉 +##殊 +##残 +##殒 +##殓 +##殖 +##殘 +##殞 +##殡 +##殤 +##殭 +##殯 +##殲 +##殴 +##段 +##殷 +##殺 +##殼 +##殿 +##毀 +##毁 +##毂 +##毅 +##毆 +##毋 +##母 +##毎 +##每 +##毒 +##毓 +##比 +##毕 +##毗 +##毘 +##毙 +##毛 +##毡 +##毫 +##毯 +##毽 +##氈 +##氏 +##氐 +##民 +##氓 +##气 +##氖 +##気 +##氙 +##氛 +##氟 +##氡 +##氢 +##氣 +##氤 +##氦 +##氧 +##氨 +##氪 +##氫 +##氮 +##氯 +##氰 +##氲 +##水 +##氷 +##永 +##氹 +##氾 +##汀 +##汁 +##求 +##汆 +##汇 +##汉 +##汎 +##汐 +##汕 +##汗 +##汙 +##汛 +##汝 +##汞 +##江 +##池 +##污 +##汤 +##汨 +##汩 +##汪 +##汰 +##汲 +##汴 +##汶 +##汹 +##決 +##汽 +##汾 +##沁 +##沂 +##沃 +##沅 +##沈 +##沉 +##沌 +##沏 +##沐 +##沒 +##沓 +##沖 +##沙 +##沛 +##沟 +##没 +##沢 +##沣 +##沥 +##沦 +##沧 +##沪 +##沫 +##沭 +##沮 +##沱 +##河 +##沸 +##油 +##治 +##沼 +##沽 +##沾 +##沿 +##況 +##泄 +##泉 +##泊 +##泌 +##泓 +##法 +##泗 +##泛 +##泞 +##泠 +##泡 +##波 +##泣 +##泥 +##注 +##泪 +##泫 +##泮 +##泯 +##泰 +##泱 +##泳 +##泵 +##泷 +##泸 +##泻 +##泼 +##泽 +##泾 +##洁 +##洄 +##洋 +##洒 +##洗 +##洙 +##洛 +##洞 +##津 +##洩 +##洪 +##洮 +##洱 +##洲 +##洵 +##洶 +##洸 +##洹 +##活 +##洼 +##洽 +##派 +##流 +##浃 +##浄 +##浅 +##浆 +##浇 +##浊 +##测 +##济 +##浏 +##浑 +##浒 +##浓 +##浔 +##浙 +##浚 +##浜 +##浣 +##浦 +##浩 +##浪 +##浬 +##浮 +##浯 +##浴 +##海 +##浸 +##涂 +##涅 +##涇 +##消 +##涉 +##涌 +##涎 +##涓 +##涔 +##涕 +##涙 +##涛 +##涝 +##涞 +##涟 +##涠 +##涡 +##涣 +##涤 +##润 +##涧 +##涨 +##涩 +##涪 +##涮 +##涯 +##液 +##涵 +##涸 +##涼 +##涿 +##淀 +##淄 +##淅 +##淆 +##淇 +##淋 +##淌 +##淑 +##淒 +##淖 +##淘 +##淙 +##淚 +##淞 +##淡 +##淤 +##淦 +##淨 +##淩 +##淪 +##淫 +##淬 +##淮 +##深 +##淳 +##淵 +##混 +##淹 +##淺 +##添 +##淼 +##清 +##済 +##渉 +##渊 +##渋 +##渍 +##渎 +##渐 +##渔 +##渗 +##渙 +##渚 +##減 +##渝 +##渠 +##渡 +##渣 +##渤 +##渥 +##渦 +##温 +##測 +##渭 +##港 +##渲 +##渴 +##游 +##渺 +##渾 +##湃 +##湄 +##湊 +##湍 +##湖 +##湘 +##湛 +##湟 +##湧 +##湫 +##湮 +##湯 +##湳 +##湾 +##湿 +##満 +##溃 +##溅 +##溉 +##溏 +##源 +##準 +##溜 +##溝 +##溟 +##溢 +##溥 +##溧 +##溪 +##溫 +##溯 +##溱 +##溴 +##溶 +##溺 +##溼 +##滁 +##滂 +##滄 +##滅 +##滇 +##滋 +##滌 +##滑 +##滓 +##滔 +##滕 +##滙 +##滚 +##滝 +##滞 +##滟 +##满 +##滢 +##滤 +##滥 +##滦 +##滨 +##滩 +##滬 +##滯 +##滲 +##滴 +##滷 +##滸 +##滾 +##滿 +##漁 +##漂 +##漆 +##漉 +##漏 +##漓 +##演 +##漕 +##漠 +##漢 +##漣 +##漩 +##漪 +##漫 +##漬 +##漯 +##漱 +##漲 +##漳 +##漸 +##漾 +##漿 +##潆 +##潇 +##潋 +##潍 +##潑 +##潔 +##潘 +##潛 +##潜 +##潞 +##潟 +##潢 +##潤 +##潦 +##潧 +##潭 +##潮 +##潰 +##潴 +##潸 +##潺 +##潼 +##澀 +##澄 +##澆 +##澈 +##澍 +##澎 +##澗 +##澜 +##澡 +##澤 +##澧 +##澱 +##澳 +##澹 +##激 +##濁 +##濂 +##濃 +##濑 +##濒 +##濕 +##濘 +##濛 +##濟 +##濠 +##濡 +##濤 +##濫 +##濬 +##濮 +##濯 +##濱 +##濺 +##濾 +##瀅 +##瀆 +##瀉 +##瀋 +##瀏 +##瀑 +##瀕 +##瀘 +##瀚 +##瀛 +##瀝 +##瀞 +##瀟 +##瀧 +##瀨 +##瀬 +##瀰 +##瀾 +##灌 +##灏 +##灑 +##灘 +##灝 +##灞 +##灣 +##火 +##灬 +##灭 +##灯 +##灰 +##灵 +##灶 +##灸 +##灼 +##災 +##灾 +##灿 +##炀 +##炁 +##炅 +##炉 +##炊 +##炎 +##炒 +##炔 +##炕 +##炖 +##炙 +##炜 +##炫 +##炬 +##炭 +##炮 +##炯 +##炳 +##炷 +##炸 +##点 +##為 +##炼 +##炽 +##烁 +##烂 +##烃 +##烈 +##烊 +##烏 +##烘 +##烙 +##烛 +##烟 +##烤 +##烦 +##烧 +##烨 +##烩 +##烫 +##烬 +##热 +##烯 +##烷 +##烹 +##烽 +##焉 +##焊 +##焕 +##焖 +##焗 +##焘 +##焙 +##焚 +##焜 +##無 +##焦 +##焯 +##焰 +##焱 +##然 +##焼 +##煅 +##煉 +##煊 +##煌 +##煎 +##煒 +##煖 +##煙 +##煜 +##煞 +##煤 +##煥 +##煦 +##照 +##煨 +##煩 +##煮 +##煲 +##煸 +##煽 +##熄 +##熊 +##熏 +##熒 +##熔 +##熙 +##熟 +##熠 +##熨 +##熬 +##熱 +##熵 +##熹 +##熾 +##燁 +##燃 +##燄 +##燈 +##燉 +##燊 +##燎 +##燒 +##燔 +##燕 +##燙 +##燜 +##營 +##燥 +##燦 +##燧 +##燭 +##燮 +##燴 +##燻 +##燼 +##燿 +##爆 +##爍 +##爐 +##爛 +##爪 +##爬 +##爭 +##爰 +##爱 +##爲 +##爵 +##父 +##爷 +##爸 +##爹 +##爺 +##爻 +##爽 +##爾 +##牆 +##片 +##版 +##牌 +##牍 +##牒 +##牙 +##牛 +##牝 +##牟 +##牠 +##牡 +##牢 +##牦 +##牧 +##物 +##牯 +##牲 +##牴 +##牵 +##特 +##牺 +##牽 +##犀 +##犁 +##犄 +##犊 +##犍 +##犒 +##犢 +##犧 +##犬 +##犯 +##状 +##犷 +##犸 +##犹 +##狀 +##狂 +##狄 +##狈 +##狎 +##狐 +##狒 +##狗 +##狙 +##狞 +##狠 +##狡 +##狩 +##独 +##狭 +##狮 +##狰 +##狱 +##狸 +##狹 +##狼 +##狽 +##猎 +##猕 +##猖 +##猗 +##猙 +##猛 +##猜 +##猝 +##猥 +##猩 +##猪 +##猫 +##猬 +##献 +##猴 +##猶 +##猷 +##猾 +##猿 +##獄 +##獅 +##獎 +##獐 +##獒 +##獗 +##獠 +##獣 +##獨 +##獭 +##獰 +##獲 +##獵 +##獷 +##獸 +##獺 +##獻 +##獼 +##獾 +##玄 +##率 +##玉 +##王 +##玑 +##玖 +##玛 +##玟 +##玠 +##玥 +##玩 +##玫 +##玮 +##环 +##现 +##玲 +##玳 +##玷 +##玺 +##玻 +##珀 +##珂 +##珅 +##珈 +##珉 +##珊 +##珍 +##珏 +##珐 +##珑 +##珙 +##珞 +##珠 +##珣 +##珥 +##珩 +##珪 +##班 +##珮 +##珲 +##珺 +##現 +##球 +##琅 +##理 +##琇 +##琉 +##琊 +##琍 +##琏 +##琐 +##琛 +##琢 +##琥 +##琦 +##琨 +##琪 +##琬 +##琮 +##琰 +##琲 +##琳 +##琴 +##琵 +##琶 +##琺 +##琼 +##瑀 +##瑁 +##瑄 +##瑋 +##瑕 +##瑗 +##瑙 +##瑚 +##瑛 +##瑜 +##瑞 +##瑟 +##瑠 +##瑣 +##瑤 +##瑩 +##瑪 +##瑯 +##瑰 +##瑶 +##瑾 +##璀 +##璁 +##璃 +##璇 +##璉 +##璋 +##璎 +##璐 +##璜 +##璞 +##璟 +##璧 +##璨 +##環 +##璽 +##璿 +##瓊 +##瓏 +##瓒 +##瓜 +##瓢 +##瓣 +##瓤 +##瓦 +##瓮 +##瓯 +##瓴 +##瓶 +##瓷 +##甄 +##甌 +##甕 +##甘 +##甙 +##甚 +##甜 +##生 +##產 +##産 +##甥 +##甦 +##用 +##甩 +##甫 +##甬 +##甭 +##甯 +##田 +##由 +##甲 +##申 +##电 +##男 +##甸 +##町 +##画 +##甾 +##畀 +##畅 +##界 +##畏 +##畑 +##畔 +##留 +##畜 +##畝 +##畢 +##略 +##畦 +##番 +##畫 +##異 +##畲 +##畳 +##畴 +##當 +##畸 +##畹 +##畿 +##疆 +##疇 +##疊 +##疏 +##疑 +##疔 +##疖 +##疗 +##疙 +##疚 +##疝 +##疟 +##疡 +##疣 +##疤 +##疥 +##疫 +##疮 +##疯 +##疱 +##疲 +##疳 +##疵 +##疸 +##疹 +##疼 +##疽 +##疾 +##痂 +##病 +##症 +##痈 +##痉 +##痊 +##痍 +##痒 +##痔 +##痕 +##痘 +##痙 +##痛 +##痞 +##痠 +##痢 +##痣 +##痤 +##痧 +##痨 +##痪 +##痫 +##痰 +##痱 +##痴 +##痹 +##痺 +##痼 +##痿 +##瘀 +##瘁 +##瘋 +##瘍 +##瘓 +##瘘 +##瘙 +##瘟 +##瘠 +##瘡 +##瘢 +##瘤 +##瘦 +##瘧 +##瘩 +##瘪 +##瘫 +##瘴 +##瘸 +##瘾 +##療 +##癇 +##癌 +##癒 +##癖 +##癜 +##癞 +##癡 +##癢 +##癣 +##癥 +##癫 +##癬 +##癮 +##癱 +##癲 +##癸 +##発 +##登 +##發 +##白 +##百 +##皂 +##的 +##皆 +##皇 +##皈 +##皋 +##皎 +##皑 +##皓 +##皖 +##皙 +##皚 +##皮 +##皰 +##皱 +##皴 +##皺 +##皿 +##盂 +##盃 +##盅 +##盆 +##盈 +##益 +##盎 +##盏 +##盐 +##监 +##盒 +##盔 +##盖 +##盗 +##盘 +##盛 +##盜 +##盞 +##盟 +##盡 +##監 +##盤 +##盥 +##盧 +##盪 +##目 +##盯 +##盱 +##盲 +##直 +##相 +##盹 +##盼 +##盾 +##省 +##眈 +##眉 +##看 +##県 +##眙 +##眞 +##真 +##眠 +##眦 +##眨 +##眩 +##眯 +##眶 +##眷 +##眸 +##眺 +##眼 +##眾 +##着 +##睁 +##睇 +##睏 +##睐 +##睑 +##睛 +##睜 +##睞 +##睡 +##睢 +##督 +##睥 +##睦 +##睨 +##睪 +##睫 +##睬 +##睹 +##睽 +##睾 +##睿 +##瞄 +##瞅 +##瞇 +##瞋 +##瞌 +##瞎 +##瞑 +##瞒 +##瞓 +##瞞 +##瞟 +##瞠 +##瞥 +##瞧 +##瞩 +##瞪 +##瞬 +##瞭 +##瞰 +##瞳 +##瞻 +##瞼 +##瞿 +##矇 +##矍 +##矗 +##矚 +##矛 +##矜 +##矢 +##矣 +##知 +##矩 +##矫 +##短 +##矮 +##矯 +##石 +##矶 +##矽 +##矾 +##矿 +##码 +##砂 +##砌 +##砍 +##砒 +##研 +##砖 +##砗 +##砚 +##砝 +##砣 +##砥 +##砧 +##砭 +##砰 +##砲 +##破 +##砷 +##砸 +##砺 +##砼 +##砾 +##础 +##硅 +##硐 +##硒 +##硕 +##硝 +##硫 +##硬 +##确 +##硯 +##硼 +##碁 +##碇 +##碉 +##碌 +##碍 +##碎 +##碑 +##碓 +##碗 +##碘 +##碚 +##碛 +##碟 +##碣 +##碧 +##碩 +##碰 +##碱 +##碳 +##碴 +##確 +##碼 +##碾 +##磁 +##磅 +##磊 +##磋 +##磐 +##磕 +##磚 +##磡 +##磨 +##磬 +##磯 +##磲 +##磷 +##磺 +##礁 +##礎 +##礙 +##礡 +##礦 +##礪 +##礫 +##礴 +##示 +##礼 +##社 +##祀 +##祁 +##祂 +##祇 +##祈 +##祉 +##祎 +##祐 +##祕 +##祖 +##祗 +##祚 +##祛 +##祜 +##祝 +##神 +##祟 +##祠 +##祢 +##祥 +##票 +##祭 +##祯 +##祷 +##祸 +##祺 +##祿 +##禀 +##禁 +##禄 +##禅 +##禍 +##禎 +##福 +##禛 +##禦 +##禧 +##禪 +##禮 +##禱 +##禹 +##禺 +##离 +##禽 +##禾 +##禿 +##秀 +##私 +##秃 +##秆 +##秉 +##秋 +##种 +##科 +##秒 +##秘 +##租 +##秣 +##秤 +##秦 +##秧 +##秩 +##秭 +##积 +##称 +##秸 +##移 +##秽 +##稀 +##稅 +##程 +##稍 +##税 +##稔 +##稗 +##稚 +##稜 +##稞 +##稟 +##稠 +##稣 +##種 +##稱 +##稲 +##稳 +##稷 +##稹 +##稻 +##稼 +##稽 +##稿 +##穀 +##穂 +##穆 +##穌 +##積 +##穎 +##穗 +##穢 +##穩 +##穫 +##穴 +##究 +##穷 +##穹 +##空 +##穿 +##突 +##窃 +##窄 +##窈 +##窍 +##窑 +##窒 +##窓 +##窕 +##窖 +##窗 +##窘 +##窜 +##窝 +##窟 +##窠 +##窥 +##窦 +##窨 +##窩 +##窪 +##窮 +##窯 +##窺 +##窿 +##竄 +##竅 +##竇 +##竊 +##立 +##竖 +##站 +##竜 +##竞 +##竟 +##章 +##竣 +##童 +##竭 +##端 +##競 +##竹 +##竺 +##竽 +##竿 +##笃 +##笆 +##笈 +##笋 +##笏 +##笑 +##笔 +##笙 +##笛 +##笞 +##笠 +##符 +##笨 +##第 +##笹 +##笺 +##笼 +##筆 +##等 +##筊 +##筋 +##筍 +##筏 +##筐 +##筑 +##筒 +##答 +##策 +##筛 +##筝 +##筠 +##筱 +##筲 +##筵 +##筷 +##筹 +##签 +##简 +##箇 +##箋 +##箍 +##箏 +##箐 +##箔 +##箕 +##算 +##箝 +##管 +##箩 +##箫 +##箭 +##箱 +##箴 +##箸 +##節 +##篁 +##範 +##篆 +##篇 +##築 +##篑 +##篓 +##篙 +##篝 +##篠 +##篡 +##篤 +##篩 +##篪 +##篮 +##篱 +##篷 +##簇 +##簌 +##簍 +##簡 +##簦 +##簧 +##簪 +##簫 +##簷 +##簸 +##簽 +##簾 +##簿 +##籁 +##籃 +##籌 +##籍 +##籐 +##籟 +##籠 +##籤 +##籬 +##籮 +##籲 +##米 +##类 +##籼 +##籽 +##粄 +##粉 +##粑 +##粒 +##粕 +##粗 +##粘 +##粟 +##粤 +##粥 +##粧 +##粪 +##粮 +##粱 +##粲 +##粳 +##粵 +##粹 +##粼 +##粽 +##精 +##粿 +##糅 +##糊 +##糍 +##糕 +##糖 +##糗 +##糙 +##糜 +##糞 +##糟 +##糠 +##糧 +##糬 +##糯 +##糰 +##糸 +##系 +##糾 +##紀 +##紂 +##約 +##紅 +##紉 +##紊 +##紋 +##納 +##紐 +##紓 +##純 +##紗 +##紘 +##紙 +##級 +##紛 +##紜 +##素 +##紡 +##索 +##紧 +##紫 +##紮 +##累 +##細 +##紳 +##紹 +##紺 +##終 +##絃 +##組 +##絆 +##経 +##結 +##絕 +##絞 +##絡 +##絢 +##給 +##絨 +##絮 +##統 +##絲 +##絳 +##絵 +##絶 +##絹 +##綁 +##綏 +##綑 +##經 +##継 +##続 +##綜 +##綠 +##綢 +##綦 +##綫 +##綬 +##維 +##綱 +##網 +##綴 +##綵 +##綸 +##綺 +##綻 +##綽 +##綾 +##綿 +##緊 +##緋 +##総 +##緑 +##緒 +##緘 +##線 +##緝 +##緞 +##締 +##緣 +##編 +##緩 +##緬 +##緯 +##練 +##緹 +##緻 +##縁 +##縄 +##縈 +##縛 +##縝 +##縣 +##縫 +##縮 +##縱 +##縴 +##縷 +##總 +##績 +##繁 +##繃 +##繆 +##繇 +##繋 +##織 +##繕 +##繚 +##繞 +##繡 +##繩 +##繪 +##繫 +##繭 +##繳 +##繹 +##繼 +##繽 +##纂 +##續 +##纍 +##纏 +##纓 +##纔 +##纖 +##纜 +##纠 +##红 +##纣 +##纤 +##约 +##级 +##纨 +##纪 +##纫 +##纬 +##纭 +##纯 +##纰 +##纱 +##纲 +##纳 +##纵 +##纶 +##纷 +##纸 +##纹 +##纺 +##纽 +##纾 +##线 +##绀 +##练 +##组 +##绅 +##细 +##织 +##终 +##绊 +##绍 +##绎 +##经 +##绑 +##绒 +##结 +##绔 +##绕 +##绘 +##给 +##绚 +##绛 +##络 +##绝 +##绞 +##统 +##绡 +##绢 +##绣 +##绥 +##绦 +##继 +##绩 +##绪 +##绫 +##续 +##绮 +##绯 +##绰 +##绳 +##维 +##绵 +##绶 +##绷 +##绸 +##绻 +##综 +##绽 +##绾 +##绿 +##缀 +##缄 +##缅 +##缆 +##缇 +##缈 +##缉 +##缎 +##缓 +##缔 +##缕 +##编 +##缘 +##缙 +##缚 +##缜 +##缝 +##缠 +##缢 +##缤 +##缥 +##缨 +##缩 +##缪 +##缭 +##缮 +##缰 +##缱 +##缴 +##缸 +##缺 +##缽 +##罂 +##罄 +##罌 +##罐 +##网 +##罔 +##罕 +##罗 +##罚 +##罡 +##罢 +##罩 +##罪 +##置 +##罰 +##署 +##罵 +##罷 +##罹 +##羁 +##羅 +##羈 +##羊 +##羌 +##美 +##羔 +##羚 +##羞 +##羟 +##羡 +##羣 +##群 +##羥 +##羧 +##羨 +##義 +##羯 +##羲 +##羸 +##羹 +##羽 +##羿 +##翁 +##翅 +##翊 +##翌 +##翎 +##習 +##翔 +##翘 +##翟 +##翠 +##翡 +##翦 +##翩 +##翰 +##翱 +##翳 +##翹 +##翻 +##翼 +##耀 +##老 +##考 +##耄 +##者 +##耆 +##耋 +##而 +##耍 +##耐 +##耒 +##耕 +##耗 +##耘 +##耙 +##耦 +##耨 +##耳 +##耶 +##耷 +##耸 +##耻 +##耽 +##耿 +##聂 +##聆 +##聊 +##聋 +##职 +##聒 +##联 +##聖 +##聘 +##聚 +##聞 +##聪 +##聯 +##聰 +##聲 +##聳 +##聴 +##聶 +##職 +##聽 +##聾 +##聿 +##肃 +##肄 +##肅 +##肆 +##肇 +##肉 +##肋 +##肌 +##肏 +##肓 +##肖 +##肘 +##肚 +##肛 +##肝 +##肠 +##股 +##肢 +##肤 +##肥 +##肩 +##肪 +##肮 +##肯 +##肱 +##育 +##肴 +##肺 +##肽 +##肾 +##肿 +##胀 +##胁 +##胃 +##胄 +##胆 +##背 +##胍 +##胎 +##胖 +##胚 +##胛 +##胜 +##胝 +##胞 +##胡 +##胤 +##胥 +##胧 +##胫 +##胭 +##胯 +##胰 +##胱 +##胳 +##胴 +##胶 +##胸 +##胺 +##能 +##脂 +##脅 +##脆 +##脇 +##脈 +##脉 +##脊 +##脍 +##脏 +##脐 +##脑 +##脓 +##脖 +##脘 +##脚 +##脛 +##脣 +##脩 +##脫 +##脯 +##脱 +##脲 +##脳 +##脸 +##脹 +##脾 +##腆 +##腈 +##腊 +##腋 +##腌 +##腎 +##腐 +##腑 +##腓 +##腔 +##腕 +##腥 +##腦 +##腩 +##腫 +##腭 +##腮 +##腰 +##腱 +##腳 +##腴 +##腸 +##腹 +##腺 +##腻 +##腼 +##腾 +##腿 +##膀 +##膈 +##膊 +##膏 +##膑 +##膘 +##膚 +##膛 +##膜 +##膝 +##膠 +##膦 +##膨 +##膩 +##膳 +##膺 +##膻 +##膽 +##膾 +##膿 +##臀 +##臂 +##臃 +##臆 +##臉 +##臊 +##臍 +##臓 +##臘 +##臟 +##臣 +##臥 +##臧 +##臨 +##自 +##臬 +##臭 +##至 +##致 +##臺 +##臻 +##臼 +##臾 +##舀 +##舂 +##舅 +##舆 +##與 +##興 +##舉 +##舊 +##舌 +##舍 +##舎 +##舐 +##舒 +##舔 +##舖 +##舗 +##舛 +##舜 +##舞 +##舟 +##航 +##舫 +##般 +##舰 +##舱 +##舵 +##舶 +##舷 +##舸 +##船 +##舺 +##舾 +##艇 +##艋 +##艘 +##艙 +##艦 +##艮 +##良 +##艰 +##艱 +##色 +##艳 +##艷 +##艹 +##艺 +##艾 +##节 +##芃 +##芈 +##芊 +##芋 +##芍 +##芎 +##芒 +##芙 +##芜 +##芝 +##芡 +##芥 +##芦 +##芩 +##芪 +##芫 +##芬 +##芭 +##芮 +##芯 +##花 +##芳 +##芷 +##芸 +##芹 +##芻 +##芽 +##芾 +##苁 +##苄 +##苇 +##苋 +##苍 +##苏 +##苑 +##苒 +##苓 +##苔 +##苕 +##苗 +##苛 +##苜 +##苞 +##苟 +##苡 +##苣 +##若 +##苦 +##苫 +##苯 +##英 +##苷 +##苹 +##苻 +##茁 +##茂 +##范 +##茄 +##茅 +##茉 +##茎 +##茏 +##茗 +##茜 +##茧 +##茨 +##茫 +##茬 +##茭 +##茯 +##茱 +##茲 +##茴 +##茵 +##茶 +##茸 +##茹 +##茼 +##荀 +##荃 +##荆 +##草 +##荊 +##荏 +##荐 +##荒 +##荔 +##荖 +##荘 +##荚 +##荞 +##荟 +##荠 +##荡 +##荣 +##荤 +##荥 +##荧 +##荨 +##荪 +##荫 +##药 +##荳 +##荷 +##荸 +##荻 +##荼 +##荽 +##莅 +##莆 +##莉 +##莊 +##莎 +##莒 +##莓 +##莖 +##莘 +##莞 +##莠 +##莢 +##莧 +##莪 +##莫 +##莱 +##莲 +##莴 +##获 +##莹 +##莺 +##莽 +##莿 +##菀 +##菁 +##菅 +##菇 +##菈 +##菊 +##菌 +##菏 +##菓 +##菖 +##菘 +##菜 +##菟 +##菠 +##菡 +##菩 +##華 +##菱 +##菲 +##菸 +##菽 +##萁 +##萃 +##萄 +##萊 +##萋 +##萌 +##萍 +##萎 +##萘 +##萝 +##萤 +##营 +##萦 +##萧 +##萨 +##萩 +##萬 +##萱 +##萵 +##萸 +##萼 +##落 +##葆 +##葉 +##著 +##葚 +##葛 +##葡 +##董 +##葦 +##葩 +##葫 +##葬 +##葭 +##葯 +##葱 +##葳 +##葵 +##葷 +##葺 +##蒂 +##蒋 +##蒐 +##蒔 +##蒙 +##蒜 +##蒞 +##蒟 +##蒡 +##蒨 +##蒲 +##蒸 +##蒹 +##蒻 +##蒼 +##蒿 +##蓁 +##蓄 +##蓆 +##蓉 +##蓋 +##蓑 +##蓓 +##蓖 +##蓝 +##蓟 +##蓦 +##蓬 +##蓮 +##蓼 +##蓿 +##蔑 +##蔓 +##蔔 +##蔗 +##蔘 +##蔚 +##蔡 +##蔣 +##蔥 +##蔫 +##蔬 +##蔭 +##蔵 +##蔷 +##蔺 +##蔻 +##蔼 +##蔽 +##蕁 +##蕃 +##蕈 +##蕉 +##蕊 +##蕎 +##蕙 +##蕤 +##蕨 +##蕩 +##蕪 +##蕭 +##蕲 +##蕴 +##蕻 +##蕾 +##薄 +##薅 +##薇 +##薈 +##薊 +##薏 +##薑 +##薔 +##薙 +##薛 +##薦 +##薨 +##薩 +##薪 +##薬 +##薯 +##薰 +##薹 +##藉 +##藍 +##藏 +##藐 +##藓 +##藕 +##藜 +##藝 +##藤 +##藥 +##藩 +##藹 +##藻 +##藿 +##蘆 +##蘇 +##蘊 +##蘋 +##蘑 +##蘚 +##蘭 +##蘸 +##蘼 +##蘿 +##虎 +##虏 +##虐 +##虑 +##虔 +##處 +##虚 +##虛 +##虜 +##虞 +##號 +##虢 +##虧 +##虫 +##虬 +##虱 +##虹 +##虻 +##虽 +##虾 +##蚀 +##蚁 +##蚂 +##蚊 +##蚌 +##蚓 +##蚕 +##蚜 +##蚝 +##蚣 +##蚤 +##蚩 +##蚪 +##蚯 +##蚱 +##蚵 +##蛀 +##蛆 +##蛇 +##蛊 +##蛋 +##蛎 +##蛐 +##蛔 +##蛙 +##蛛 +##蛟 +##蛤 +##蛭 +##蛮 +##蛰 +##蛳 +##蛹 +##蛻 +##蛾 +##蜀 +##蜂 +##蜃 +##蜆 +##蜇 +##蜈 +##蜊 +##蜍 +##蜒 +##蜓 +##蜕 +##蜗 +##蜘 +##蜚 +##蜜 +##蜡 +##蜢 +##蜥 +##蜱 +##蜴 +##蜷 +##蜻 +##蜿 +##蝇 +##蝈 +##蝉 +##蝌 +##蝎 +##蝕 +##蝗 +##蝙 +##蝟 +##蝠 +##蝦 +##蝨 +##蝴 +##蝶 +##蝸 +##蝼 +##螂 +##螃 +##融 +##螞 +##螢 +##螨 +##螯 +##螳 +##螺 +##蟀 +##蟄 +##蟆 +##蟋 +##蟎 +##蟑 +##蟒 +##蟠 +##蟬 +##蟲 +##蟹 +##蟻 +##蟾 +##蠅 +##蠍 +##蠔 +##蠕 +##蠛 +##蠟 +##蠡 +##蠢 +##蠣 +##蠱 +##蠶 +##蠹 +##蠻 +##血 +##衄 +##衅 +##衆 +##行 +##衍 +##術 +##衔 +##街 +##衙 +##衛 +##衝 +##衞 +##衡 +##衢 +##衣 +##补 +##表 +##衩 +##衫 +##衬 +##衮 +##衰 +##衲 +##衷 +##衹 +##衾 +##衿 +##袁 +##袂 +##袄 +##袅 +##袈 +##袋 +##袍 +##袒 +##袖 +##袜 +##袞 +##袤 +##袪 +##被 +##袭 +##袱 +##裁 +##裂 +##装 +##裆 +##裊 +##裏 +##裔 +##裕 +##裘 +##裙 +##補 +##裝 +##裟 +##裡 +##裤 +##裨 +##裱 +##裳 +##裴 +##裸 +##裹 +##製 +##裾 +##褂 +##複 +##褐 +##褒 +##褓 +##褔 +##褚 +##褥 +##褪 +##褫 +##褲 +##褶 +##褻 +##襁 +##襄 +##襟 +##襠 +##襪 +##襬 +##襯 +##襲 +##西 +##要 +##覃 +##覆 +##覇 +##見 +##規 +##覓 +##視 +##覚 +##覦 +##覧 +##親 +##覬 +##観 +##覷 +##覺 +##覽 +##觀 +##见 +##观 +##规 +##觅 +##视 +##览 +##觉 +##觊 +##觎 +##觐 +##觑 +##角 +##觞 +##解 +##觥 +##触 +##觸 +##言 +##訂 +##計 +##訊 +##討 +##訓 +##訕 +##訖 +##託 +##記 +##訛 +##訝 +##訟 +##訣 +##訥 +##訪 +##設 +##許 +##訳 +##訴 +##訶 +##診 +##註 +##証 +##詆 +##詐 +##詔 +##評 +##詛 +##詞 +##詠 +##詡 +##詢 +##詣 +##試 +##詩 +##詫 +##詬 +##詭 +##詮 +##詰 +##話 +##該 +##詳 +##詹 +##詼 +##誅 +##誇 +##誉 +##誌 +##認 +##誓 +##誕 +##誘 +##語 +##誠 +##誡 +##誣 +##誤 +##誥 +##誦 +##誨 +##說 +##説 +##読 +##誰 +##課 +##誹 +##誼 +##調 +##諄 +##談 +##請 +##諏 +##諒 +##論 +##諗 +##諜 +##諡 +##諦 +##諧 +##諫 +##諭 +##諮 +##諱 +##諳 +##諷 +##諸 +##諺 +##諾 +##謀 +##謁 +##謂 +##謄 +##謊 +##謎 +##謐 +##謔 +##謗 +##謙 +##講 +##謝 +##謠 +##謨 +##謬 +##謹 +##謾 +##譁 +##證 +##譎 +##譏 +##識 +##譙 +##譚 +##譜 +##警 +##譬 +##譯 +##議 +##譲 +##譴 +##護 +##譽 +##讀 +##變 +##讓 +##讚 +##讞 +##计 +##订 +##认 +##讥 +##讧 +##讨 +##让 +##讪 +##讫 +##训 +##议 +##讯 +##记 +##讲 +##讳 +##讴 +##讶 +##讷 +##许 +##讹 +##论 +##讼 +##讽 +##设 +##访 +##诀 +##证 +##诃 +##评 +##诅 +##识 +##诈 +##诉 +##诊 +##诋 +##词 +##诏 +##译 +##试 +##诗 +##诘 +##诙 +##诚 +##诛 +##话 +##诞 +##诟 +##诠 +##诡 +##询 +##诣 +##诤 +##该 +##详 +##诧 +##诩 +##诫 +##诬 +##语 +##误 +##诰 +##诱 +##诲 +##说 +##诵 +##诶 +##请 +##诸 +##诺 +##读 +##诽 +##课 +##诿 +##谀 +##谁 +##调 +##谄 +##谅 +##谆 +##谈 +##谊 +##谋 +##谌 +##谍 +##谎 +##谏 +##谐 +##谑 +##谒 +##谓 +##谔 +##谕 +##谗 +##谘 +##谙 +##谚 +##谛 +##谜 +##谟 +##谢 +##谣 +##谤 +##谥 +##谦 +##谧 +##谨 +##谩 +##谪 +##谬 +##谭 +##谯 +##谱 +##谲 +##谴 +##谶 +##谷 +##豁 +##豆 +##豇 +##豈 +##豉 +##豊 +##豌 +##豎 +##豐 +##豔 +##豚 +##象 +##豢 +##豪 +##豫 +##豬 +##豹 +##豺 +##貂 +##貅 +##貌 +##貓 +##貔 +##貘 +##貝 +##貞 +##負 +##財 +##貢 +##貧 +##貨 +##販 +##貪 +##貫 +##責 +##貯 +##貰 +##貳 +##貴 +##貶 +##買 +##貸 +##費 +##貼 +##貽 +##貿 +##賀 +##賁 +##賂 +##賃 +##賄 +##資 +##賈 +##賊 +##賑 +##賓 +##賜 +##賞 +##賠 +##賡 +##賢 +##賣 +##賤 +##賦 +##質 +##賬 +##賭 +##賴 +##賺 +##購 +##賽 +##贅 +##贈 +##贊 +##贍 +##贏 +##贓 +##贖 +##贛 +##贝 +##贞 +##负 +##贡 +##财 +##责 +##贤 +##败 +##账 +##货 +##质 +##贩 +##贪 +##贫 +##贬 +##购 +##贮 +##贯 +##贰 +##贱 +##贲 +##贴 +##贵 +##贷 +##贸 +##费 +##贺 +##贻 +##贼 +##贾 +##贿 +##赁 +##赂 +##赃 +##资 +##赅 +##赈 +##赊 +##赋 +##赌 +##赎 +##赏 +##赐 +##赓 +##赔 +##赖 +##赘 +##赚 +##赛 +##赝 +##赞 +##赠 +##赡 +##赢 +##赣 +##赤 +##赦 +##赧 +##赫 +##赭 +##走 +##赳 +##赴 +##赵 +##赶 +##起 +##趁 +##超 +##越 +##趋 +##趕 +##趙 +##趟 +##趣 +##趨 +##足 +##趴 +##趵 +##趸 +##趺 +##趾 +##跃 +##跄 +##跆 +##跋 +##跌 +##跎 +##跑 +##跖 +##跚 +##跛 +##距 +##跟 +##跡 +##跤 +##跨 +##跩 +##跪 +##路 +##跳 +##践 +##跷 +##跹 +##跺 +##跻 +##踉 +##踊 +##踌 +##踏 +##踐 +##踝 +##踞 +##踟 +##踢 +##踩 +##踪 +##踮 +##踱 +##踴 +##踵 +##踹 +##蹂 +##蹄 +##蹇 +##蹈 +##蹉 +##蹊 +##蹋 +##蹑 +##蹒 +##蹙 +##蹟 +##蹣 +##蹤 +##蹦 +##蹩 +##蹬 +##蹭 +##蹲 +##蹴 +##蹶 +##蹺 +##蹼 +##蹿 +##躁 +##躇 +##躉 +##躊 +##躋 +##躍 +##躏 +##躪 +##身 +##躬 +##躯 +##躲 +##躺 +##軀 +##車 +##軋 +##軌 +##軍 +##軒 +##軟 +##転 +##軸 +##軼 +##軽 +##軾 +##較 +##載 +##輒 +##輓 +##輔 +##輕 +##輛 +##輝 +##輟 +##輩 +##輪 +##輯 +##輸 +##輻 +##輾 +##輿 +##轄 +##轅 +##轆 +##轉 +##轍 +##轎 +##轟 +##车 +##轧 +##轨 +##轩 +##转 +##轭 +##轮 +##软 +##轰 +##轲 +##轴 +##轶 +##轻 +##轼 +##载 +##轿 +##较 +##辄 +##辅 +##辆 +##辇 +##辈 +##辉 +##辊 +##辍 +##辐 +##辑 +##输 +##辕 +##辖 +##辗 +##辘 +##辙 +##辛 +##辜 +##辞 +##辟 +##辣 +##辦 +##辨 +##辩 +##辫 +##辭 +##辮 +##辯 +##辰 +##辱 +##農 +##边 +##辺 +##辻 +##込 +##辽 +##达 +##迁 +##迂 +##迄 +##迅 +##过 +##迈 +##迎 +##运 +##近 +##返 +##还 +##这 +##进 +##远 +##违 +##连 +##迟 +##迢 +##迤 +##迥 +##迦 +##迩 +##迪 +##迫 +##迭 +##述 +##迴 +##迷 +##迸 +##迹 +##迺 +##追 +##退 +##送 +##适 +##逃 +##逅 +##逆 +##选 +##逊 +##逍 +##透 +##逐 +##递 +##途 +##逕 +##逗 +##這 +##通 +##逛 +##逝 +##逞 +##速 +##造 +##逢 +##連 +##逮 +##週 +##進 +##逵 +##逶 +##逸 +##逻 +##逼 +##逾 +##遁 +##遂 +##遅 +##遇 +##遊 +##運 +##遍 +##過 +##遏 +##遐 +##遑 +##遒 +##道 +##達 +##違 +##遗 +##遙 +##遛 +##遜 +##遞 +##遠 +##遢 +##遣 +##遥 +##遨 +##適 +##遭 +##遮 +##遲 +##遴 +##遵 +##遶 +##遷 +##選 +##遺 +##遼 +##遽 +##避 +##邀 +##邁 +##邂 +##邃 +##還 +##邇 +##邈 +##邊 +##邋 +##邏 +##邑 +##邓 +##邕 +##邛 +##邝 +##邢 +##那 +##邦 +##邨 +##邪 +##邬 +##邮 +##邯 +##邰 +##邱 +##邳 +##邵 +##邸 +##邹 +##邺 +##邻 +##郁 +##郅 +##郊 +##郎 +##郑 +##郜 +##郝 +##郡 +##郢 +##郤 +##郦 +##郧 +##部 +##郫 +##郭 +##郴 +##郵 +##郷 +##郸 +##都 +##鄂 +##鄉 +##鄒 +##鄔 +##鄙 +##鄞 +##鄢 +##鄧 +##鄭 +##鄰 +##鄱 +##鄲 +##鄺 +##酉 +##酊 +##酋 +##酌 +##配 +##酐 +##酒 +##酗 +##酚 +##酝 +##酢 +##酣 +##酥 +##酩 +##酪 +##酬 +##酮 +##酯 +##酰 +##酱 +##酵 +##酶 +##酷 +##酸 +##酿 +##醃 +##醇 +##醉 +##醋 +##醍 +##醐 +##醒 +##醚 +##醛 +##醜 +##醞 +##醣 +##醪 +##醫 +##醬 +##醮 +##醯 +##醴 +##醺 +##釀 +##釁 +##采 +##釉 +##释 +##釋 +##里 +##重 +##野 +##量 +##釐 +##金 +##釗 +##釘 +##釜 +##針 +##釣 +##釦 +##釧 +##釵 +##鈀 +##鈉 +##鈍 +##鈎 +##鈔 +##鈕 +##鈞 +##鈣 +##鈦 +##鈪 +##鈴 +##鈺 +##鈾 +##鉀 +##鉄 +##鉅 +##鉉 +##鉑 +##鉗 +##鉚 +##鉛 +##鉤 +##鉴 +##鉻 +##銀 +##銃 +##銅 +##銑 +##銓 +##銖 +##銘 +##銜 +##銬 +##銭 +##銮 +##銳 +##銷 +##銹 +##鋁 +##鋅 +##鋒 +##鋤 +##鋪 +##鋰 +##鋸 +##鋼 +##錄 +##錐 +##錘 +##錚 +##錠 +##錢 +##錦 +##錨 +##錫 +##錮 +##錯 +##録 +##錳 +##錶 +##鍊 +##鍋 +##鍍 +##鍛 +##鍥 +##鍰 +##鍵 +##鍺 +##鍾 +##鎂 +##鎊 +##鎌 +##鎏 +##鎔 +##鎖 +##鎗 +##鎚 +##鎧 +##鎬 +##鎮 +##鎳 +##鏈 +##鏖 +##鏗 +##鏘 +##鏞 +##鏟 +##鏡 +##鏢 +##鏤 +##鏽 +##鐘 +##鐮 +##鐲 +##鐳 +##鐵 +##鐸 +##鐺 +##鑄 +##鑊 +##鑑 +##鑒 +##鑣 +##鑫 +##鑰 +##鑲 +##鑼 +##鑽 +##鑾 +##鑿 +##针 +##钉 +##钊 +##钎 +##钏 +##钒 +##钓 +##钗 +##钙 +##钛 +##钜 +##钝 +##钞 +##钟 +##钠 +##钡 +##钢 +##钣 +##钤 +##钥 +##钦 +##钧 +##钨 +##钩 +##钮 +##钯 +##钰 +##钱 +##钳 +##钴 +##钵 +##钺 +##钻 +##钼 +##钾 +##钿 +##铀 +##铁 +##铂 +##铃 +##铄 +##铅 +##铆 +##铉 +##铎 +##铐 +##铛 +##铜 +##铝 +##铠 +##铡 +##铢 +##铣 +##铤 +##铨 +##铩 +##铬 +##铭 +##铮 +##铰 +##铲 +##铵 +##银 +##铸 +##铺 +##链 +##铿 +##销 +##锁 +##锂 +##锄 +##锅 +##锆 +##锈 +##锉 +##锋 +##锌 +##锏 +##锐 +##锑 +##错 +##锚 +##锟 +##锡 +##锢 +##锣 +##锤 +##锥 +##锦 +##锭 +##键 +##锯 +##锰 +##锲 +##锵 +##锹 +##锺 +##锻 +##镀 +##镁 +##镂 +##镇 +##镉 +##镌 +##镍 +##镐 +##镑 +##镕 +##镖 +##镗 +##镛 +##镜 +##镣 +##镭 +##镯 +##镰 +##镳 +##镶 +##長 +##长 +##門 +##閃 +##閉 +##開 +##閎 +##閏 +##閑 +##閒 +##間 +##閔 +##閘 +##閡 +##関 +##閣 +##閥 +##閨 +##閩 +##閱 +##閲 +##閹 +##閻 +##閾 +##闆 +##闇 +##闊 +##闌 +##闍 +##闔 +##闕 +##闖 +##闘 +##關 +##闡 +##闢 +##门 +##闪 +##闫 +##闭 +##问 +##闯 +##闰 +##闲 +##间 +##闵 +##闷 +##闸 +##闹 +##闺 +##闻 +##闽 +##闾 +##阀 +##阁 +##阂 +##阅 +##阆 +##阇 +##阈 +##阉 +##阎 +##阐 +##阑 +##阔 +##阕 +##阖 +##阙 +##阚 +##阜 +##队 +##阡 +##阪 +##阮 +##阱 +##防 +##阳 +##阴 +##阵 +##阶 +##阻 +##阿 +##陀 +##陂 +##附 +##际 +##陆 +##陇 +##陈 +##陋 +##陌 +##降 +##限 +##陕 +##陛 +##陝 +##陞 +##陟 +##陡 +##院 +##陣 +##除 +##陨 +##险 +##陪 +##陰 +##陲 +##陳 +##陵 +##陶 +##陷 +##陸 +##険 +##陽 +##隅 +##隆 +##隈 +##隊 +##隋 +##隍 +##階 +##随 +##隐 +##隔 +##隕 +##隘 +##隙 +##際 +##障 +##隠 +##隣 +##隧 +##隨 +##險 +##隱 +##隴 +##隶 +##隸 +##隻 +##隼 +##隽 +##难 +##雀 +##雁 +##雄 +##雅 +##集 +##雇 +##雉 +##雋 +##雌 +##雍 +##雎 +##雏 +##雑 +##雒 +##雕 +##雖 +##雙 +##雛 +##雜 +##雞 +##離 +##難 +##雨 +##雪 +##雯 +##雰 +##雲 +##雳 +##零 +##雷 +##雹 +##電 +##雾 +##需 +##霁 +##霄 +##霆 +##震 +##霈 +##霉 +##霊 +##霍 +##霎 +##霏 +##霑 +##霓 +##霖 +##霜 +##霞 +##霧 +##霭 +##霰 +##露 +##霸 +##霹 +##霽 +##霾 +##靂 +##靄 +##靈 +##青 +##靓 +##靖 +##静 +##靚 +##靛 +##靜 +##非 +##靠 +##靡 +##面 +##靥 +##靦 +##革 +##靳 +##靴 +##靶 +##靼 +##鞅 +##鞋 +##鞍 +##鞏 +##鞑 +##鞘 +##鞠 +##鞣 +##鞦 +##鞭 +##韆 +##韋 +##韌 +##韓 +##韜 +##韦 +##韧 +##韩 +##韬 +##韭 +##音 +##韵 +##韶 +##韻 +##響 +##頁 +##頂 +##頃 +##項 +##順 +##須 +##頌 +##預 +##頑 +##頒 +##頓 +##頗 +##領 +##頜 +##頡 +##頤 +##頫 +##頭 +##頰 +##頷 +##頸 +##頹 +##頻 +##頼 +##顆 +##題 +##額 +##顎 +##顏 +##顔 +##願 +##顛 +##類 +##顧 +##顫 +##顯 +##顱 +##顴 +##页 +##顶 +##顷 +##项 +##顺 +##须 +##顼 +##顽 +##顾 +##顿 +##颁 +##颂 +##预 +##颅 +##领 +##颇 +##颈 +##颉 +##颊 +##颌 +##颍 +##颐 +##频 +##颓 +##颔 +##颖 +##颗 +##题 +##颚 +##颛 +##颜 +##额 +##颞 +##颠 +##颡 +##颢 +##颤 +##颦 +##颧 +##風 +##颯 +##颱 +##颳 +##颶 +##颼 +##飄 +##飆 +##风 +##飒 +##飓 +##飕 +##飘 +##飙 +##飚 +##飛 +##飞 +##食 +##飢 +##飨 +##飩 +##飪 +##飯 +##飲 +##飼 +##飽 +##飾 +##餃 +##餅 +##餉 +##養 +##餌 +##餐 +##餒 +##餓 +##餘 +##餚 +##餛 +##餞 +##餡 +##館 +##餮 +##餵 +##餾 +##饅 +##饈 +##饋 +##饌 +##饍 +##饑 +##饒 +##饕 +##饗 +##饞 +##饥 +##饨 +##饪 +##饬 +##饭 +##饮 +##饯 +##饰 +##饱 +##饲 +##饴 +##饵 +##饶 +##饷 +##饺 +##饼 +##饽 +##饿 +##馀 +##馁 +##馄 +##馅 +##馆 +##馈 +##馋 +##馍 +##馏 +##馒 +##馔 +##首 +##馗 +##香 +##馥 +##馨 +##馬 +##馭 +##馮 +##馳 +##馴 +##駁 +##駄 +##駅 +##駆 +##駐 +##駒 +##駕 +##駛 +##駝 +##駭 +##駱 +##駿 +##騁 +##騎 +##騏 +##験 +##騙 +##騨 +##騰 +##騷 +##驀 +##驅 +##驊 +##驍 +##驒 +##驕 +##驗 +##驚 +##驛 +##驟 +##驢 +##驥 +##马 +##驭 +##驮 +##驯 +##驰 +##驱 +##驳 +##驴 +##驶 +##驷 +##驸 +##驹 +##驻 +##驼 +##驾 +##驿 +##骁 +##骂 +##骄 +##骅 +##骆 +##骇 +##骈 +##骊 +##骋 +##验 +##骏 +##骐 +##骑 +##骗 +##骚 +##骛 +##骜 +##骞 +##骠 +##骡 +##骤 +##骥 +##骧 +##骨 +##骯 +##骰 +##骶 +##骷 +##骸 +##骼 +##髂 +##髅 +##髋 +##髏 +##髒 +##髓 +##體 +##髖 +##高 +##髦 +##髪 +##髮 +##髯 +##髻 +##鬃 +##鬆 +##鬍 +##鬓 +##鬚 +##鬟 +##鬢 +##鬣 +##鬥 +##鬧 +##鬱 +##鬼 +##魁 +##魂 +##魄 +##魅 +##魇 +##魍 +##魏 +##魔 +##魘 +##魚 +##魯 +##魷 +##鮑 +##鮨 +##鮪 +##鮭 +##鮮 +##鯉 +##鯊 +##鯖 +##鯛 +##鯨 +##鯰 +##鯽 +##鰍 +##鰓 +##鰭 +##鰲 +##鰻 +##鰾 +##鱈 +##鱉 +##鱔 +##鱗 +##鱷 +##鱸 +##鱼 +##鱿 +##鲁 +##鲈 +##鲍 +##鲑 +##鲛 +##鲜 +##鲟 +##鲢 +##鲤 +##鲨 +##鲫 +##鲱 +##鲲 +##鲶 +##鲷 +##鲸 +##鳃 +##鳄 +##鳅 +##鳌 +##鳍 +##鳕 +##鳖 +##鳗 +##鳝 +##鳞 +##鳥 +##鳩 +##鳳 +##鳴 +##鳶 +##鴉 +##鴕 +##鴛 +##鴦 +##鴨 +##鴻 +##鴿 +##鵑 +##鵜 +##鵝 +##鵡 +##鵬 +##鵰 +##鵲 +##鶘 +##鶩 +##鶯 +##鶴 +##鷗 +##鷲 +##鷹 +##鷺 +##鸚 +##鸞 +##鸟 +##鸠 +##鸡 +##鸢 +##鸣 +##鸥 +##鸦 +##鸨 +##鸪 +##鸭 +##鸯 +##鸳 +##鸵 +##鸽 +##鸾 +##鸿 +##鹂 +##鹃 +##鹄 +##鹅 +##鹈 +##鹉 +##鹊 +##鹌 +##鹏 +##鹑 +##鹕 +##鹘 +##鹜 +##鹞 +##鹤 +##鹦 +##鹧 +##鹫 +##鹭 +##鹰 +##鹳 +##鹵 +##鹹 +##鹼 +##鹽 +##鹿 +##麂 +##麋 +##麒 +##麓 +##麗 +##麝 +##麟 +##麥 +##麦 +##麩 +##麴 +##麵 +##麸 +##麺 +##麻 +##麼 +##麽 +##麾 +##黃 +##黄 +##黍 +##黎 +##黏 +##黑 +##黒 +##黔 +##默 +##黛 +##黜 +##黝 +##點 +##黠 +##黨 +##黯 +##黴 +##鼋 +##鼎 +##鼐 +##鼓 +##鼠 +##鼬 +##鼹 +##鼻 +##鼾 +##齁 +##齊 +##齋 +##齐 +##齒 +##齡 +##齢 +##齣 +##齦 +##齿 +##龄 +##龅 +##龈 +##龊 +##龋 +##龌 +##龍 +##龐 +##龔 +##龕 +##龙 +##龚 +##龛 +##龜 +##龟 +##︰ +##︱ +##︶ +##︿ +##﹁ +##﹂ +##﹍ +##﹏ +##﹐ +##﹑ +##﹒ +##﹔ +##﹕ +##﹖ +##﹗ +##﹙ +##﹚ +##﹝ +##﹞ +##﹡ +##﹣ +##! +##" +### +##$ +##% +##& +##' +##( +##) +##* +##, +##- +##. +##/ +##: +##; +##< +##? +##@ +##[ +##\ +##] +##^ +##_ +##` +##f +##h +##j +##u +##w +##z +##{ +##} +##。 +##「 +##」 +##、 +##・ +##ッ +##ー +##イ +##ク +##シ +##ス +##ト +##ノ +##フ +##ラ +##ル +##ン +##゙ +##゚ +## ̄ +##¥ +##👍 +##🔥 +##😂 +##😎 +[unused0] +[unused100] +[unused101] +[unused102] +[unused103] +[unused104] +[unused105] +[unused106] +[unused107] +[unused108] +[unused109] +[unused110] +[unused111] +[unused112] +[unused113] +[unused114] +[unused115] +[unused116] +[unused117] +[unused118] +[unused119] +[unused120] +[unused121] +[unused122] +[unused123] +[unused124] +[unused125] +[unused126] +[unused127] +[unused128] +[unused129] +[unused130] +[unused131] +[unused132] +[unused133] +[unused134] +[unused135] +[unused136] +[unused137] +[unused138] +[unused139] +[unused140] +[unused141] +[unused142] +[unused143] +[unused144] +[unused145] +[unused146] +[unused147] +[unused148] +[unused149] +[unused150] +[unused151] +[unused152] +[unused153] +[unused154] +[unused155] +[unused156] +[unused157] +[unused158] +[unused159] +[unused160] +[unused161] +[unused162] +[unused163] +[unused164] +[unused165] +[unused166] +[unused167] +[unused168] +[unused169] +[unused170] +[unused171] +[unused172] +[unused173] +[unused174] +[unused175] +[unused176] +[unused177] +[unused178] +[unused179] +[unused180] +[unused181] +[unused182] +[unused183] +[unused184] +[unused185] +[unused186] +[unused187] +[unused188] +[unused189] +[unused190] +[unused191] +[unused192] +[unused193] +[unused194] +[unused195] +[unused196] +[unused197] +[unused198] +[unused199] +[unused200] +[unused201] +[unused202] +[unused203] +[unused204] +[unused205] +[unused206] +[unused207] +[unused208] +[unused209] +[unused210] +[unused211] +[unused212] +[unused213] +[unused214] +[unused215] +[unused216] +[unused217] +[unused218] +[unused219] +[unused220] +[unused221] +[unused222] +[unused223] +[unused224] +[unused225] +[unused226] +[unused227] +[unused228] +[unused229] +[unused230] +[unused231] +[unused232] +[unused233] +[unused234] +[unused235] +[unused236] +[unused237] +[unused238] +[unused239] +[unused240] +[unused241] +[unused242] +[unused243] +[unused244] +[unused245] +[unused246] +[unused247] +[unused248] +[unused249] +[unused250] +[unused251] +[unused252] +[unused253] +[unused254] +[unused255] +[unused256] +[unused257] +[unused258] +[unused259] +[unused260] +[unused261] +[unused262] +[unused263] +[unused264] +[unused265] +[unused266] +[unused267] +[unused268] +[unused269] +[unused270] +[unused271] +[unused272] +[unused273] +[unused274] +[unused275] +[unused276] +[unused277] +[unused278] +[unused279] +[unused280] +[unused281] +[unused282] +[unused283] +[unused284] +[unused285] +[unused286] +[unused287] +[unused288] +[unused289] +[unused290] +[unused291] +[unused292] +[unused293] +[unused294] +[unused295] +[unused296] +[unused297] +[unused298] +[unused299] +[unused300] +[unused301] +[unused302] +[unused303] +[unused304] +[unused305] +[unused306] +[unused307] +[unused308] +[unused309] +[unused310] +[unused311] +[unused312] +[unused313] +[unused314] +[unused315] +[unused316] +[unused317] +[unused318] +[unused319] +[unused320] +[unused321] +[unused322] +[unused323] +[unused324] +[unused325] +[unused326] +[unused327] +[unused328] +[unused329] +[unused330] +[unused331] +[unused332] +[unused333] +[unused334] +[unused335] +[unused336] +[unused337] +[unused338] +[unused339] +[unused340] +[unused341] +[unused342] +[unused343] +[unused344] +[unused345] +[unused346] +[unused347] +[unused348] +[unused349] +[unused350] +[unused351] +[unused352] +[unused353] +[unused354] +[unused355] +[unused356] +[unused357] +[unused358] +[unused359] +[unused360] +[unused361] +[unused362] +[unused363] +[unused364] +[unused365] +[unused366] +[unused367] +[unused368] +[unused369] +[unused370] +[unused371] +[unused372] +[unused373] +[unused374] +[unused375] +[unused376] +[unused377] +[unused378] +[unused379] +[unused380] +[unused381] +[unused382] +[unused383] +[unused384] +[unused385] +[unused386] +[unused387] +[unused388] +[unused389] +[unused390] +[unused391] +[unused392] +[unused393] +[unused394] +[unused395] +[unused396] +[unused397] +[unused398] +[unused399] +[unused400] +[unused401] +[unused402] +[unused403] +[unused404] +[unused405] +[unused406] +[unused407] +[unused408] +[unused409] +[unused410] +[unused411] +[unused412] +[unused413] +[unused414] +[unused415] +[unused416] +[unused417] +[unused418] +[unused419] +[unused420] +[unused421] +[unused422] +[unused423] +[unused424] +[unused425] +[unused426] +[unused427] +[unused428] +[unused429] +[unused430] +[unused431] +[unused432] +[unused433] +[unused434] +[unused435] +[unused436] +[unused437] +[unused438] +[unused439] +[unused440] +[unused441] +[unused442] +[unused443] +[unused444] +[unused445] +[unused446] +[unused447] +[unused448] +[unused449] +[unused450] +[unused451] +[unused452] +[unused453] +[unused454] +[unused455] +[unused456] +[unused457] +[unused458] +[unused459] +[unused460] +[unused461] +[unused462] +[unused463] +[unused464] +[unused465] +[unused466] +[unused467] +[unused468] +[unused469] +[unused470] +[unused471] +[unused472] +[unused473] +[unused474] +[unused475] +[unused476] +[unused477] +[unused478] +[unused479] +[unused480] +[unused481] +[unused482] +[unused483] +[unused484] +[unused485] +[unused486] +[unused487] +[unused488] +[unused489] +[unused490] +[unused491] +[unused492] +[unused493] +[unused494] +[unused495] +[unused496] +[unused497] +[unused498] +[unused499] +[unused500] +[unused501] +[unused502] +[unused503] +[unused504] +[unused505] +[unused506] +[unused507] +[unused508] +[unused509] +[unused510] +[unused511] +[unused512] +[unused513] +[unused514] +[unused515] +[unused516] +[unused517] +[unused518] +[unused519] +[unused520] +[unused521] +[unused522] +[unused523] +[unused524] +[unused525] +[unused526] +[unused527] +[unused528] +[unused529] +[unused530] +[unused531] +[unused532] +[unused533] +[unused534] +[unused535] +[unused536] +[unused537] +[unused538] +[unused539] +[unused540] +[unused541] +[unused542] +[unused543] +[unused544] +[unused545] +[unused546] +[unused547] +[unused548] +[unused549] +[unused550] +[unused551] +[unused552] +[unused553] +[unused554] +[unused555] +[unused556] +[unused557] +[unused558] +[unused559] +[unused560] +[unused561] +[unused562] +[unused563] +[unused564] +[unused565] +[unused566] +[unused567] +[unused568] +[unused569] +[unused570] +[unused571] +[unused572] +[unused573] +[unused574] +[unused575] +[unused576] +[unused577] +[unused578] +[unused579] +[unused580] +[unused581] +[unused582] +[unused583] +[unused584] +[unused585] +[unused586] +[unused587] +[unused588] +[unused589] +[unused590] +[unused591] +[unused592] +[unused593] +[unused594] +[unused595] +[unused596] +[unused597] +[unused598] +[unused599] +[unused600] +[unused601] +[unused602] +[unused603] +[unused604] +[unused605] +[unused606] +[unused607] +[unused608] +[unused609] +[unused610] +[unused611] +[unused612] +[unused613] +[unused614] +[unused615] +[unused616] +[unused617] +[unused618] +[unused619] +[unused620] +[unused621] +[unused622] +[unused623] +[unused624] +[unused625] +[unused626] +[unused627] +[unused628] +[unused629] +[unused630] +[unused631] +[unused632] +[unused633] +[unused634] +[unused635] +[unused636] +[unused637] +[unused638] +[unused639] +[unused640] +[unused641] +[unused642] +[unused643] +[unused644] +[unused645] +[unused646] +[unused647] +[unused648] +[unused649] +[unused650] +[unused651] +[unused652] +[unused653] +[unused654] +[unused655] +[unused656] +[unused657] +[unused658] +[unused659] +[unused660] +[unused661] +[unused662] +[unused663] +[unused664] +[unused665] +[unused666] +[unused667] +[unused668] +[unused669] +[unused670] +[unused671] +[unused672] +[unused673] +[unused674] +[unused675] +[unused676] +[unused677] +[unused678] +[unused679] +[unused680] +[unused681] +[unused682] +[unused683] +[unused684] +[unused685] +[unused686] +[unused687] +[unused688] +[unused689] +[unused690] +[unused691] +[unused692] +[unused693] +[unused694] +[unused695] +[unused696] +[unused697] +[unused698] +[unused699] +[unused700] +[unused701] +[unused702] +[unused703] +[unused704] +[unused705] +[unused706] +[unused707] +[unused708] +[unused709] +[unused710] +[unused711] +[unused712] +[unused713] +[unused714] +[unused715] +[unused716] +[unused717] +[unused718] +[unused719] +[unused720] +[unused721] +[unused722] +[unused723] +[unused724] +[unused725] +[unused726] +[unused727] +[unused728] +[unused729] +[unused730] +[unused731] +[unused732] +[unused733] +[unused734] +[unused735] +[unused736] +[unused737] +[unused738] +[unused739] +[unused740] +[unused741] +[unused742] +[unused743] +[unused744] +[unused745] +[unused746] +[unused747] +[unused748] +[unused749] +[unused750] +[unused751] +[unused752] +[unused753] +[unused754] +[unused755] +[unused756] +[unused757] +[unused758] +[unused759] +[unused760] +[unused761] +[unused762] +[unused763] +[unused764] +[unused765] +[unused766] +[unused767] +[unused768] +[unused769] +[unused770] +[unused771] +[unused772] +[unused773] +[unused774] +[unused775] +[unused776] +[unused777] +[unused778] +[unused779] +[unused780] +[unused781] +[unused782] +[unused783] +[unused784] +[unused785] +[unused786] +[unused787] +[unused788] +[unused789] +[unused790] +[unused791] +[unused792] +[unused793] +[unused794] +[unused795] +[unused796] +[unused797] +[unused798] +[unused799] +[unused800] +[unused801] +[unused802] +[unused803] +[unused804] +[unused805] +[unused806] +[unused807] +[unused808] +[unused809] +[unused810] +[unused811] +[unused812] +[unused813] +[unused814] +[unused815] +[unused816] +[unused817] +[unused818] +[unused819] +[unused820] +[unused821] +[unused822] +[unused823] +[unused824] +[unused825] +[unused826] +[unused827] +[unused828] +[unused829] +[unused830] +[unused831] +[unused832] +[unused833] +[unused834] +[unused835] +[unused836] +[unused837] +[unused838] +[unused839] +[unused840] +[unused841] +[unused842] +[unused843] +[unused844] +[unused845] +[unused846] +[unused847] +[unused848] +[unused849] +[unused850] +[unused851] +[unused852] +[unused853] +[unused854] +[unused855] +[unused856] +[unused857] +[unused858] +[unused859] +[unused860] +[unused861] +[unused862] +[unused863] +[unused864] +[unused865] +[unused866] +[unused867] +[unused868] +[unused869] +[unused870] +[unused871] +[unused872] +[unused873] +[unused874] +[unused875] +[unused876] +[unused877] +[unused878] +[unused879] +[unused880] +[unused881] +[unused882] +[unused883] +[unused884] +[unused885] +[unused886] +[unused887] +[unused888] +[unused889] +[unused890] +[unused891] +[unused892] +[unused893] +[unused894] +[unused895] +[unused896] +[unused897] +[unused898] +[unused899] +[unused900] +[unused901] +[unused902] +[unused903] +[unused904] +[unused905] +[unused906] +[unused907] +[unused908] +[unused909] +[unused910] +[unused911] +[unused912] +[unused913] +[unused914] +[unused915] +[unused916] +[unused917] +[unused918] +[unused919] +[unused920] +[unused921] +[unused922] +[unused923] +[unused924] +[unused925] +[unused926] +[unused927] +[unused928] +[unused929] +[unused930] +[unused931] +[unused932] +[unused933] +[unused934] +[unused935] +[unused936] +[unused937] +[unused938] +[unused939] +[unused940] +[unused941] +[unused942] +[unused943] +[unused944] +[unused945] +[unused946] +[unused947] +[unused948] +[unused949] +[unused950] +[unused951] +[unused952] +[unused953] +[unused954] +[unused955] +[unused956] +[unused957] +[unused958] +[unused959] +[unused960] +[unused961] +[unused962] +[unused963] +[unused964] +[unused965] +[unused966] +[unused967] +[unused968] +[unused969] +[unused970] +[unused971] +[unused972] +[unused973] +[unused974] +[unused975] +[unused976] +[unused977] +[unused978] +[unused979] +[unused980] +[unused981] +[unused982] +[unused983] +[unused984] +[unused985] +[unused986] +[unused987] +[unused988] +[unused989] +[unused990] +[unused991] +[unused992] +[unused993] +` +¡ +¢ +¦ +¨ +ª +¬ +´ +¶ +½ +¾ +¿ +ð +þ +ħ +ı +ł +œ +ƒ +ɐ +ɑ +ɒ +ɕ +ɛ +ɣ +ɨ +ɪ +ɫ +ɬ +ɯ +ɲ +ɴ +ɹ +ɾ +ʀ +ʁ +ʂ +ʃ +ʉ +ʊ +ʋ +ʌ +ʎ +ʐ +ʑ +ʒ +ʔ +ʲ +ʳ +ʷ +ʸ +ʻ +ʼ +ʾ +ʿ +ˡ +ˣ +ˤ +ζ +ξ +щ +ъ +э +ю +ђ +є +ј +љ +њ +ћ +ӏ +ա +բ +գ +դ +ե +թ +ի +լ +կ +հ +մ +յ +ն +ո +պ +ս +վ +տ +ր +ւ +ք +־ +א +ב +ג +ד +ה +ו +ז +ח +ט +י +ך +כ +ל +ם +מ +ן +נ +ס +ע +ף +פ +ץ +צ +ק +ר +ש +ת +، +ء +ث +ج +ح +خ +ذ +ز +ش +ص +ض +ط +ظ +غ +ـ +ف +ق +ك +ى +ٹ +پ +چ +ک +گ +ں +ھ +ہ +ی +ے +अ +आ +उ +ए +क +ख +ग +च +ज +ट +ड +ण +त +थ +द +ध +न +प +ब +भ +म +य +र +ल +व +श +ष +स +ह +ा +ि +ी +ो +। +॥ +ং +অ +আ +ই +উ +এ +ও +ক +খ +গ +চ +ছ +জ +ট +ড +ণ +ত +থ +দ +ধ +ন +প +ব +ভ +ম +য +র +ল +শ +ষ +স +হ +া +ি +ী +ে +க +ச +ட +த +ந +ன +ப +ம +ய +ர +ல +ள +வ +ா +ி +ு +ே +ை +ನ +ರ +ಾ +ක +ය +ර +ල +ව +ා +ต +ท +พ +ล +ว +ส +། +ག +ང +ད +ན +པ +བ +མ +འ +ར +ལ +ས +မ +ა +ბ +გ +დ +ე +ვ +თ +ი +კ +ლ +მ +ნ +ო +რ +ს +ტ +უ +ᄊ +ᴬ +ᴮ +ᴰ +ᴵ +ᴺ +ᵀ +ᵇ +ᵈ +ᵖ +ᵗ +ᵢ +ᵣ +ᵤ +ᵥ +ᶜ +ᶠ +‐ +‑ +‒ +– +— +― +‘ +’ +‚ +“ +” +‡ +… +⁰ +⁴ +⁵ +⁶ +⁷ +⁸ +⁹ +⁻ +₀ +₅ +₆ +₇ +₈ +₉ +₊ +₍ +₎ +ₐ +ₑ +ₒ +ₓ +ₕ +ₖ +ₗ +ₘ +ₙ +ₚ +ₛ +ₜ +₤ +₩ +₱ +₹ +ℓ +ℝ +⅓ +⅔ +↦ +⇄ +⇌ +∂ +∅ +∆ +∇ +∈ +∗ +∘ +∧ +∨ +∪ +⊂ +⊆ +⊕ +⊗ +☉ +♭ +♯ +⟨ +⟩ +ⱼ +⺩ +⺼ +⽥ +亻 +宀 +彳 +忄 +扌 +氵 +疒 +糹 +訁 +辶 +阝 +龸 +fi +fl +had +were +which +him +their +been +would +then +them +could +during +through +between +while +later +around +did +such +being +used +against +many +both +these +known +until +even +didn +because +born +since +still +became +any +including +took +same +each +called +much +however +four +another +found +won +going +away +hand +several +following +released +played +began +district +those +held +own +early +league +government +came +based +thought +looked +along +went +few +father +former +located +got +though +every +century +without +within +building +large +named +started +once +should +built +british +death +moved +door +need +president +wasn +although +due +major +died +third +knew +asked +turned +wanted +together +received +son +served +different +behind +himself +felt +members +football +near +having +saw +mother +army +front +late +hands +put +division +across +told +often +ever +french +six +include +tell +among +species +really +according +half +original +gave +making +enough +opened +must +included +given +german +woman +community +might +million +court +short +round +seen +always +become +sure +almost +director +council +career +things +using +couldn +better +students +married +nothing +worked +others +record +anything +continued +give +military +established +returned +does +written +thing +feet +far +already +championship +western +department +role +various +production +television +produced +working +region +present +period +looking +least +total +england +wife +per +brother +soon +political +taken +created +further +able +reached +joined +upon +done +important +either +appeared +position +ground +lead +election +arms +police +instead +words +moment +someone +announced +less +wrote +past +followed +founded +finally +india +taking +records +considered +northern +toward +european +outside +described +track +playing +heard +professional +australia +miles +yet +trying +blood +southern +maybe +everything +mouth +race +recorded +above +daughter +points +middle +move +tried +elected +closed +ten +minister +chief +person +similar +brought +rest +formed +floor +doing +killed +training +needed +turn +finished +railway +rather +sent +example +ran +term +coming +currently +forces +despite +areas +fact +dead +originally +germany +probably +developed +pulled +stood +signed +songs +child +eventually +met +average +teams +minutes +current +kind +decided +usually +eastern +seemed +episode +bed +added +indian +route +available +throughout +addition +appointed +eight +construction +mean +remained +schools +sometimes +events +possible +australian +forward +debut +seat +performance +committee +features +character +herself +lot +russian +range +hours +sold +quickly +directed +guitar +performed +players +smile +myself +placed +province +towards +wouldn +leading +whole +designed +census +europe +attack +japanese +getting +alone +lower +wide +hospital +believe +changed +sister +gone +hadn +ship +studies +academy +shot +below +involved +kept +largest +especially +beginning +movement +section +female +professor +lord +longer +walked +actually +civil +families +thus +aircraft +completed +includes +captain +fight +vocals +featured +fourth +officer +hear +means +medical +groups +lips +competition +entire +lived +leaving +federal +tournament +passed +independent +kingdom +spent +fine +doesn +reported +fall +raised +itself +replaced +leader +theatre +whose +parents +spanish +canadian +degree +writing +awarded +higher +coast +provided +senior +organization +stopped +onto +countries +parts +conference +interest +saying +allowed +earlier +matter +winning +try +happened +moving +los +breath +nearly +mid +certain +italian +african +standing +fell +artist +shows +deal +mine +industry +everyone +republic +provide +student +primary +owned +older +heavy +1st +makes +attention +anyone +africa +stated +length +ended +fingers +command +staff +foreign +opening +governor +okay +medal +kill +introduced +chest +hell +feeling +success +meet +reason +meeting +novel +trade +buildings +guy +goal +native +husband +previously +entered +producer +operations +takes +covered +forced +roman +complete +successful +texas +cold +traditional +films +clear +approximately +nine +prince +question +tracks +ireland +regional +personal +operation +economic +holding +twenty +additional +hour +regular +historic +places +whom +shook +km² +secretary +prior +scored +units +ask +property +ready +immediately +month +listed +contract +themselves +lines +navy +writer +meant +runs +practice +championships +singer +commission +required +starting +generally +giving +attended +couple +stand +catholic +caught +executive +thinking +chair +quite +shoulder +hope +decision +plays +defeated +municipality +whether +offered +slowly +pain +direction +mission +mostly +noted +individual +managed +lives +plant +helped +except +studied +computer +figure +relationship +issue +significant +loss +smiled +gun +highest +male +bring +goals +mexico +problem +distance +commercial +completely +location +annual +famous +neck +caused +italy +understand +greek +highway +wrong +comes +appearance +issues +musical +companies +castle +income +assembly +bass +initially +parliament +artists +experience +particular +walk +foot +engineering +talking +dropped +boys +stars +remember +carried +train +stadium +angeles +evidence +becoming +assistant +soviet +upper +youth +reach +actor +numerous +nodded +arrived +minute +believed +complex +victory +associated +temple +chance +perhaps +bishop +launched +particularly +retired +subject +prize +contains +yeah +theory +empire +suddenly +waiting +trust +recording +terms +champion +religious +zealand +names +2nd +ancient +corner +represented +legal +justice +cause +watched +brothers +material +changes +simply +response +answer +historical +stories +straight +feature +increased +administration +virginia +activities +cultural +overall +winner +programs +basketball +legs +guard +cast +doctor +flight +results +remains +cost +effect +winter +larger +islands +problems +chairman +grew +commander +isn +failed +selected +hurt +fort +regiment +majority +plans +shown +pretty +irish +characters +directly +scene +likely +operated +allow +matches +looks +houses +fellow +marriage +rules +florida +expected +nearby +congress +peace +recent +wait +subsequently +variety +serving +agreed +poor +attempt +wood +democratic +rural +mile +appears +township +soldiers +##ized +pennsylvania +closer +fighting +claimed +score +physical +filled +genus +specific +sitting +mom +therefore +supported +status +fear +cases +meaning +wales +minor +spain +vice +parish +separate +horse +fifth +remaining +branch +presented +stared +uses +forms +baseball +exactly +choice +discovered +composed +truth +russia +dad +ring +referred +numbers +greater +metres +slightly +direct +increase +responsible +crew +rule +trees +troops +broke +goes +individuals +hundred +weight +creek +sleep +defense +provides +ordered +jewish +safe +judge +whatever +corps +realized +growing +cities +gaze +lies +spread +letter +showed +situation +mayor +transport +watching +workers +extended +expression +normal +chart +multiple +border +mrs +walls +piano +heat +cannot +earned +products +drama +era +authority +seasons +join +grade +difficult +territory +mainly +stations +squadron +stepped +iron +19th +serve +appear +speak +broken +charge +knowledge +kilometres +removed +ships +campus +pushed +britain +leaves +recently +boston +latter +acquired +poland +quality +officers +presence +planned +nations +mass +broadcast +influence +wild +emperor +electric +headed +ability +promoted +yellow +ministry +throat +smaller +politician +latin +spoke +cars +males +lack +acting +seeing +consists +estate +pressure +newspaper +olympics +conditions +beat +elements +walking +vote +needs +carolina +featuring +levels +francisco +purpose +females +dutch +duke +ahead +gas +safety +serious +turning +highly +lieutenant +firm +amount +mixed +proposed +perfect +agreement +affairs +3rd +seconds +contemporary +paid +prison +label +administrative +intended +constructed +academic +teacher +races +formerly +nation +issued +shut +drums +housing +seems +graduated +mentioned +picked +recognized +shortly +protection +picture +notable +elections +1980s +loved +percent +racing +elizabeth +volume +hockey +beside +settled +competed +replied +drew +actress +marine +scotland +steel +glanced +farm +risk +tonight +positive +singles +effects +gray +screen +residents +sides +none +secondary +literature +polish +destroyed +flying +founder +households +lay +reserve +industrial +younger +approach +appearances +ones +finish +powerful +fully +growth +honor +jersey +projects +revealed +infantry +pair +equipment +visit +evening +grant +effort +treatment +buried +republican +primarily +bottom +owner +1970s +israel +gives +remain +spot +produce +champions +accepted +ways +##ally +losing +split +capacity +basis +trial +questions +20th +guess +officially +memorial +naval +initial +##ization +whispered +median +engineer +sydney +columbia +strength +tears +senate +asian +draw +warm +supposed +transferred +leaned +candidate +escape +mountains +potential +activity +seem +traffic +murder +slow +orchestra +haven +agency +taught +website +comedy +unable +storm +planning +albums +rugby +environment +scientific +grabbed +protect +boat +typically +damage +principal +divided +dedicated +ohio +pick +fought +driver +empty +shoulders +sort +thank +berlin +prominent +account +freedom +necessary +efforts +headquarters +follows +alongside +suggested +operating +steps +technical +begin +easily +teeth +speaking +settlement +scale +renamed +enemy +semi +joint +compared +scottish +leadership +analysis +offers +georgia +pieces +captured +animal +deputy +organized +combined +method +challenge +1960s +huge +wants +battalion +sons +rise +crime +types +facilities +telling +platform +sit +1990s +tells +assigned +pull +commonly +alive +letters +concept +conducted +wearing +happen +bought +becomes +holy +gets +defeat +languages +purchased +occurred +titled +declared +applied +sciences +concert +sounds +jazz +brain +painting +fleet +tax +michigan +animals +leaders +episodes +birth +clubs +palace +critical +refused +fair +leg +laughed +returning +surrounding +participated +formation +lifted +pointed +connected +rome +medicine +laid +powers +tall +shared +focused +knowing +yards +entrance +falls +calling +sources +chosen +beneath +resources +yard +nominated +silence +defined +gained +thirty +bodies +adopted +christmas +widely +register +apart +iran +premier +serves +unknown +parties +generation +continues +fields +brigade +quiet +teaching +clothes +impact +weapons +partner +flat +theater +relations +plants +suffered +begins +seats +armed +models +worth +laws +communities +classes +background +knows +thanks +quarter +reaching +humans +carry +killing +format +setting +architecture +disease +railroad +possibly +arthur +thoughts +doors +density +crowd +illinois +stomach +tone +unique +reports +anyway +liberal +vehicle +thick +dry +drug +faced +largely +facility +theme +holds +creation +strange +colonel +revolution +politics +turns +silent +rail +relief +independence +combat +shape +determined +sales +learned +4th +finger +providing +heritage +fiction +situated +designated +allowing +hosted +sight +interview +estimated +reduced +toronto +footballer +keeping +guys +damn +claim +motion +sixth +stayed +rear +receive +handed +twelve +dress +audience +granted +brazil +spirit +##ated +noticed +olympic +representative +tight +trouble +reviews +drink +vampire +missing +roles +ranked +newly +household +finals +critics +phase +massachusetts +pilot +unlike +philadelphia +bright +guns +crown +organizations +roof +respectively +clearly +tongue +marked +circle +bronze +expanded +sexual +supply +yourself +inspired +labour +reference +draft +connection +reasons +driving +jesus +cells +entry +neither +trail +claims +atlantic +orders +labor +nose +afraid +identified +intelligence +calls +cancer +attacked +passing +positions +imperial +grey +swedish +avoid +extra +uncle +covers +allows +surprise +materials +fame +hunter +citizens +figures +environmental +confirmed +shit +titles +performing +difference +acts +attacks +existing +votes +opportunity +nor +entirely +trains +opposite +pakistan +develop +resulted +representatives +actions +reality +pressed +barely +conversation +faculty +northwest +ends +documentary +nuclear +stock +sets +eat +alternative +resulting +creating +surprised +cemetery +drop +finding +cricket +streets +tradition +ride +ear +explained +composer +injury +apartment +municipal +educational +occupied +netherlands +clean +billion +constitution +learn +maximum +classical +lose +opposition +ontario +hills +rolled +ending +drawn +permanent +lewis +sites +chamber +scoring +height +lyrics +staring +officials +snow +oldest +qualified +interior +apparently +succeeded +thousand +dinner +lights +existence +heavily +greatest +conservative +send +bowl +catch +duty +speech +authorities +princess +performances +versions +shall +graduate +pictures +effective +remembered +poetry +desk +crossed +starring +starts +passenger +sharp +acres +ass +weather +falling +rank +fund +supporting +adult +heads +southeast +lane +condition +transfer +prevent +regions +earl +federation +relatively +answered +besides +obtained +portion +reaction +liked +peak +counter +religion +chain +rare +convention +aid +lie +vehicles +perform +squad +wonder +lying +crazy +sword +attempted +centuries +weren +philosophy +interested +sweden +wolf +frequently +abandoned +literary +alliance +task +entitled +threw +promotion +tiny +soccer +visited +achieved +defence +internal +persian +methods +arrested +otherwise +programming +villages +elementary +districts +rooms +criminal +conflict +worry +trained +attempts +waited +signal +truck +subsequent +programme +communist +faith +sector +carrying +laugh +controlled +korean +showing +origin +fuel +evil +brief +identity +darkness +pool +missed +publication +wings +invited +briefly +standards +kissed +ideas +climate +causing +walter +worse +albert +winners +desire +aged +northeast +dangerous +gate +doubt +wooden +poet +rising +funding +communications +communication +violence +copies +prepared +investigation +skills +pulling +containing +ultimately +offices +singing +understanding +tomorrow +christ +ward +pope +stands +5th +flow +studios +aired +commissioned +contained +exist +americans +wrestling +approved +kid +employed +respect +suit +asking +increasing +frame +angry +selling +1950s +thin +finds +temperature +statement +ali +explain +inhabitants +towns +extensive +narrow +flowers +promise +somewhere +closely +bureau +cape +weekly +presidential +legislative +launch +founding +artillery +strike +un +institutions +roll +writers +landing +chose +anymore +attorney +billboard +receiving +agricultural +breaking +sought +dave +admitted +lands +mexican +##bury +specifically +hole +moscow +roads +accident +proved +struck +guards +stuff +slid +expansion +melbourne +opposed +sub +southwest +architect +failure +plane +tank +listen +regarding +wet +introduction +metropolitan +fighter +inch +grown +gene +anger +fixed +khan +domestic +worldwide +chapel +mill +functions +examples +developing +turkey +hits +pocket +antonio +papers +grow +unless +circuit +18th +concerned +attached +journalist +selection +journey +converted +provincial +painted +hearing +aren +bands +negative +aside +wondered +knight +lap +noise +billy +shooting +bedroom +priest +resistance +motor +homes +sounded +giant +scenes +equal +comic +patients +hidden +solid +actual +bringing +afternoon +touched +funds +consisted +marie +canal +treaty +turkish +recognition +residence +cathedral +broad +knees +incident +shaped +fired +norwegian +handle +cheek +contest +represent +representing +birds +advantage +emergency +wrapped +drawing +notice +broadcasting +somehow +bachelor +seventh +collected +registered +establishment +assumed +chemical +personnel +retirement +portuguese +wore +tied +device +threat +progress +advance +##ised +banks +hired +manchester +nfl +teachers +structures +forever +tennis +helping +saturday +applications +junction +incorporated +neighborhood +dressed +ceremony +influenced +hers +stairs +decades +inner +kansas +hung +hoped +gain +scheduled +downtown +engaged +austria +clock +norway +certainly +pale +victor +employees +plate +putting +surrounded +##ists +finishing +blues +tropical +minnesota +consider +philippines +accept +retrieved +concern +anderson +properties +institution +gordon +successfully +vietnam +backing +outstanding +muslim +crossing +folk +producing +usual +demand +occurs +observed +lawyer +educated +pleasure +budget +items +quietly +colorado +philip +typical +##worth +derived +survived +asks +mental +jake +jews +distinguished +sri +extremely +athletic +loud +thousands +worried +transportation +horses +weapon +arena +importance +users +objects +contributed +douglas +aware +senator +johnny +sisters +engines +flag +investment +samuel +shock +capable +clark +row +wheel +refers +familiar +biggest +wins +hate +maintained +drove +hamilton +expressed +injured +underground +churches +wars +tunnel +passes +stupid +agriculture +softly +cabinet +regarded +joining +indiana +dates +spend +behavior +woods +protein +gently +chase +morgan +mention +burning +wake +combination +occur +mirror +leads +indeed +impossible +paintings +covering +soldier +locations +attendance +sell +historian +wisconsin +invasion +argued +painter +diego +changing +egypt +experienced +inches +missouri +grounds +spoken +switzerland +reform +rolling +forget +massive +resigned +burned +tennessee +locked +values +improved +wounded +universe +sick +dating +facing +purchase +##pur +moments +merged +anniversary +coal +brick +understood +causes +dynasty +queensland +establish +stores +crisis +promote +hoping +cards +referee +extension +raise +arizona +improve +colonial +formal +charged +palm +hide +rescue +faces +feelings +candidates +juan +6th +courses +weekend +luke +cash +fallen +delivered +affected +installed +carefully +tries +hollywood +costs +lincoln +responsibility +shore +proper +normally +maryland +assistance +constant +offering +friendly +waters +persons +realize +contain +trophy +partnership +factor +musicians +bound +oregon +indicated +houston +medium +consisting +somewhat +cycle +beer +moore +frederick +gotten +worst +weak +approached +arranged +chin +loan +bond +fifteen +pattern +disappeared +translated +##zed +lip +arab +capture +interests +insurance +shifted +cave +prix +warning +sections +courts +coat +plot +smell +golf +favorite +maintain +knife +voted +degrees +finance +quebec +opinion +translation +manner +ruled +operate +productions +choose +musician +confused +tired +separated +stream +techniques +committed +attend +ranking +kings +throw +passengers +measure +horror +mining +sand +danger +salt +calm +decade +dam +require +runner +rush +associate +greece +rivers +consecutive +matthew +##ski +sighed +sq +documents +closing +tie +accused +islamic +distributed +directors +organisation +7th +breathing +mad +lit +arrival +concrete +taste +composition +shaking +faster +amateur +adjacent +stating +twin +flew +publications +obviously +ridge +storage +carl +pages +concluded +desert +driven +universities +ages +terminal +sequence +borough +constituency +cousin +economics +dreams +margaret +notably +reduce +montreal +17th +ears +saved +vocal +riding +roughly +threatened +meters +meanwhile +landed +compete +repeated +grass +czech +regularly +charges +sudden +appeal +solution +describes +classification +glad +parking +belt +physics +rachel +hungarian +participate +expedition +damaged +gift +childhood +fifty +mathematics +jumped +letting +defensive +mph +testing +hundreds +shoot +owners +matters +smoke +israeli +kentucky +dancing +mounted +grandfather +designs +profit +argentina +truly +lawrence +cole +begun +detroit +willing +branches +smiling +decide +miami +enjoyed +recordings +##dale +poverty +ethnic +arabic +accompanied +fishing +determine +residential +acid +returns +starred +strategy +forty +businesses +equivalent +commonwealth +distinct +ill +seriously +##ped +harris +replace +rio +imagine +formula +ensure +additionally +scheme +conservation +occasionally +purposes +feels +favor +1930s +contrast +hanging +hunt +movies +instruments +victims +danish +christopher +busy +demon +sugar +earliest +colony +studying +duties +belgium +slipped +carter +visible +stages +iraq +commune +forming +continuing +talked +counties +legend +bathroom +option +tail +clay +daughters +afterwards +severe +jaw +visitors +devices +aviation +entering +subjects +temporary +swimming +forth +smooth +bush +operates +rocks +movements +signs +eddie +voices +honorary +memories +dallas +measures +racial +promised +harvard +16th +parliamentary +indicate +benefit +flesh +dublin +louisiana +patient +sleeping +membership +coastal +medieval +wanting +element +scholars +rice +limit +survive +makeup +rating +definitely +collaboration +obvious +baron +birthday +linked +soil +diocese +ncaa +offensive +shouldn +waist +plain +ross +organ +resolution +manufacturing +adding +relative +kennedy +whilst +moth +gardens +crash +heading +partners +credited +carlos +moves +cable +marshall +depending +bottle +represents +rejected +responded +existed +denmark +##ating +treated +graham +routes +talent +commissioner +drugs +secure +tests +reign +restored +photography +contributions +oklahoma +designer +disc +grin +seattle +robin +paused +atlanta +unusual +praised +las +laughing +satellite +hungary +visiting +interesting +factors +deck +poems +norman +##water +stuck +speaker +rifle +premiered +comics +actors +reputation +eliminated +8th +ceiling +prisoners +leather +austin +mississippi +rapidly +admiral +parallel +charlotte +guilty +tools +gender +divisions +fruit +laboratory +nelson +marry +rapid +aunt +tribe +requirements +aspects +suicide +amongst +adams +bone +ukraine +kick +sees +edinburgh +clothing +column +rough +gods +hunting +broadway +gathered +concerns +spending +ty +12th +snapped +requires +solar +bones +cavalry +iowa +drinking +waste +franklin +charity +thompson +stewart +tip +landscape +enjoy +singh +poem +listening +eighth +fred +differences +adapted +bomb +ukrainian +surgery +corporate +masters +anywhere +waves +odd +portugal +orleans +dick +debate +kent +eating +puerto +cleared +expect +cinema +guitarist +blocks +electrical +agree +involving +depth +dying +panel +struggle +peninsula +adults +novels +emerged +vienna +debuted +shoes +tamil +songwriter +meets +prove +beating +instance +heaven +scared +sending +marks +artistic +passage +superior +significantly +retained +##izing +technique +cheeks +warren +maintenance +destroy +extreme +allied +appearing +fill +advice +alabama +qualifying +policies +cleveland +hat +battery +authors +10th +soundtrack +acted +dated +lb +glance +equipped +coalition +funny +outer +ambassador +roy +possibility +couples +campbell +loose +ethan +supplies +gonna +monster +shake +agents +frequency +springs +dogs +practices +gang +plastic +easier +suggests +gulf +blade +exposed +colors +industries +markets +nervous +electoral +charts +legislation +ownership +##idae +appointment +shield +assault +socialist +abbey +monument +license +throne +employment +replacement +charter +suffering +accounts +oak +connecticut +strongly +wright +colour +13th +context +welsh +networks +voiced +gabriel +forehead +manage +schedule +totally +remix +forests +occupation +print +nicholas +brazilian +strategic +vampires +engineers +roots +seek +correct +instrumental +und +alfred +backed +stanley +robinson +traveled +wayne +austrian +achieve +exit +rates +strip +whereas +sing +deeply +adventure +bobby +jamie +careful +components +cap +useful +personality +knee +pushing +hosts +protest +ottoman +symphony +boundary +processes +considering +considerable +tons +cooper +trading +conduct +illegal +revolutionary +definition +harder +jacob +circumstances +destruction +popularity +grip +classified +liverpool +baltimore +flows +seeking +honour +approval +mechanical +till +happening +statue +critic +increasingly +immediate +describe +commerce +stare +indonesia +meat +rounds +boats +baker +orthodox +depression +formally +worn +naked +muttered +sentence +11th +document +criticism +wished +vessel +spiritual +bent +virgin +minimum +murray +lunch +danny +printed +compilation +keyboards +blow +belonged +raising +cutting +pittsburgh +9th +shadows +hated +indigenous +jon +15th +barry +scholar +oliver +stick +susan +meetings +attracted +spell +romantic +ye +demanded +customers +logan +revival +keys +modified +commanded +jeans +upset +phil +detective +hiding +resident +##bly +experiences +diamond +defeating +coverage +lucas +external +parks +franchise +helen +bible +successor +percussion +celebrated +lift +clan +romania +##ied +mills +nobody +achievement +shrugged +fault +rhythm +initiative +breakfast +carbon +lasted +violent +wound +killer +gradually +filmed +°c +processing +remove +criticized +guests +sang +chemistry +legislature +##bridge +uniform +escaped +integrated +proposal +purple +denied +liquid +influential +morris +nights +stones +intense +experimental +twisted +pace +nazi +mitchell +ny +blind +reporter +newspapers +14th +centers +burn +basin +forgotten +surviving +filed +collections +monastery +losses +manual +couch +description +appropriate +merely +missions +sebastian +restoration +replacing +triple +elder +julia +warriors +benjamin +julian +convinced +stronger +amazing +declined +versus +merchant +happens +output +finland +bare +barbara +absence +ignored +dawn +injuries +producers +luis +##ities +kw +admit +expensive +electricity +exception +symbol +ladies +shower +sheriff +characteristics +##je +aimed +button +ratio +effectively +summit +angle +jury +bears +foster +vessels +pants +executed +evans +dozen +advertising +kicked +patrol +competitions +lifetime +principles +athletics +birmingham +sponsored +rob +nomination +acoustic +creature +longest +credits +harbor +dust +josh +territories +milk +infrastructure +completion +thailand +indians +leon +archbishop +assist +pitch +blake +arrangement +girlfriend +serbian +operational +hence +sad +scent +fur +sessions +refer +rarely +exists +1892 +scientists +dirty +penalty +burst +portrait +seed +pole +limits +rival +stable +grave +constitutional +alcohol +arrest +flower +mystery +devil +architectural +relationships +greatly +habitat +##istic +larry +progressive +remote +cotton +preserved +reaches +cited +vast +scholarship +decisions +teach +editions +knocked +eve +searching +partly +participation +animated +fate +excellent +alternate +saints +youngest +climbed +suggest +discussion +staying +choir +lakes +jacket +revenue +nevertheless +peaked +instrument +wondering +annually +managing +neil +1891 +signing +terry +apply +clinical +brooklyn +aim +catherine +fuck +farmers +figured +ninth +pride +hugh +ordinary +involvement +comfortable +shouted +encouraged +representation +sharing +panic +exact +cargo +competing +fat +cried +1920s +occasions +cabin +borders +utah +marcus +##isation +badly +muscles +victorian +transition +warner +bet +permission +slave +terrible +similarly +shares +seth +uefa +possession +medals +benefits +colleges +lowered +perfectly +transit +##kar +publisher +##ened +harrison +deaths +elevation +asleep +machines +sigh +ash +hardly +argument +occasion +parent +decline +contribution +concentration +opportunities +hispanic +guardian +extent +emotions +hips +mason +volumes +bloody +controversy +diameter +steady +mistake +phoenix +identify +violin +departure +richmond +spin +funeral +enemies +1864 +literally +connor +random +sergeant +grab +confusion +1865 +transmission +informed +leaning +sacred +suspended +thinks +gates +portland +luck +agencies +yours +hull +expert +muscle +layer +practical +sculpture +jerusalem +latest +lloyd +statistics +deeper +recommended +warrior +arkansas +mess +supports +greg +eagle +recovered +rated +concerts +rushed +stops +eggs +premiere +keith +delhi +turner +pit +affair +belief +paint +##zing +victim +withdrew +bonus +styles +fled +glasgow +technologies +funded +adaptation +portrayed +cooperation +supporters +judges +bernard +hallway +ralph +graduating +controversial +distant +continental +spider +bite +recognize +intention +mixing +egyptian +bow +tourism +suppose +claiming +dominated +participants +nurse +partially +tape +psychology +essential +touring +duo +voting +civilian +emotional +channels +apparent +hebrew +1887 +tommy +carrier +intersection +beast +hudson +bench +discuss +costa +##ered +detailed +behalf +drivers +unfortunately +obtain +rocky +##dae +siege +friendship +1861 +hang +governments +collins +respond +wildlife +preferred +operator +laura +pregnant +videos +dennis +suspected +boots +instantly +weird +automatic +businessman +alleged +placing +throwing +mood +1862 +perry +venue +jet +remainder +passion +biological +boyfriend +1863 +dirt +buffalo +ron +segment +abuse +genre +thrown +stroke +colored +stress +exercise +displayed +struggled +abroad +dramatic +wonderful +thereafter +madrid +component +widespread +##sed +tale +citizen +todd +vancouver +overseas +forcing +crying +descent +discussed +substantial +ranks +regime +provinces +drum +zane +tribes +proof +researchers +volunteer +manor +silk +milan +donated +allies +venture +principle +delivery +enterprise +bars +traditionally +witch +reminded +copper +pete +inter +colin +grinned +elsewhere +competitive +frequent +scream +tension +texts +submarine +finnish +defending +defend +pat +detail +affiliated +stuart +themes +periods +tool +belgian +ruling +crimes +answers +folded +licensed +demolished +hans +lucy +1881 +lion +traded +photographs +writes +craig +trials +generated +beth +noble +debt +percentage +yorkshire +erected +viewed +grades +confidence +ceased +islam +telephone +retail +chile +m² +roberts +sixteen +commented +hampshire +innocent +dual +pounds +checked +regulations +afghanistan +sung +rico +liberty +assets +bigger +options +angels +relegated +tribute +wells +attending +leaf +romanian +monthly +patterns +gmina +madison +hurricane +rev +##ians +bristol +elite +valuable +disaster +democracy +awareness +germans +freyja +loop +absolutely +paying +populations +maine +sole +prayer +spencer +releases +doorway +bull +lover +midnight +conclusion +thirteen +mediterranean +nhl +proud +sample +##hill +drummer +guinea +murphy +climb +instant +attributed +horn +ain +railways +autumn +ferry +opponent +traveling +secured +corridor +stretched +tales +sheet +trinity +cattle +helps +indicates +manhattan +murdered +fitted +gentle +grandmother +mines +shocked +vegas +produces +caribbean +belong +continuous +desperate +drunk +historically +trio +waved +raf +dealing +nathan +murmured +interrupted +residing +scientist +pioneer +harold +aaron +delta +attempting +minority +believes +chorus +tend +lots +eyed +indoor +load +shots +updated +jail +concerning +connecting +wealth +slaves +arrive +rangers +sufficient +rebuilt +##wick +cardinal +flood +muhammad +whenever +relation +runners +moral +repair +viewers +arriving +revenge +punk +assisted +bath +fairly +breathe +lists +innings +illustrated +whisper +nearest +voters +clinton +ties +ultimate +screamed +beijing +lions +andre +fictional +gathering +comfort +radar +suitable +dismissed +hms +ban +pine +wrist +atmosphere +voivodeship +bid +timber +##ned +giants +cameron +recovery +uss +identical +categories +switched +serbia +laughter +noah +ensemble +therapy +peoples +touching +##off +locally +pearl +platforms +everywhere +ballet +tables +lanka +herbert +outdoor +toured +derek +1883 +spaces +contested +swept +1878 +exclusive +slight +connections +winds +prisoner +collective +bangladesh +tube +publicly +wealthy +isolated +insisted +fortune +ticket +spotted +reportedly +animation +enforcement +tanks +decides +wider +lowest +owen +nod +hitting +gregory +furthermore +magazines +fighters +solutions +pointing +requested +peru +reed +chancellor +knights +mask +worker +eldest +flames +reduction +volunteers +reporting +wire +advisory +endemic +origins +settlers +pursue +knock +consumer +1876 +eu +compound +creatures +mansion +sentenced +ivan +deployed +guitars +frowned +involves +mechanism +kilometers +perspective +shops +terminus +duncan +alien +fist +bridges +##pers +heroes +derby +swallowed +patent +sara +illness +characterized +adventures +slide +hawaii +jurisdiction +organised +adelaide +walks +biology +rogers +swing +tightly +boundaries +prepare +implementation +stolen +certified +colombia +edwards +garage +recalled +rage +harm +nigeria +breast +furniture +pupils +settle +cuba +balls +alaska +21st +linear +thrust +celebration +latino +genetic +terror +##ening +lightning +fee +witness +lodge +establishing +skull +earning +hood +rebellion +sporting +warned +missile +devoted +activist +porch +worship +fourteen +package +decorated +##shire +housed +chess +sailed +doctors +oscar +joan +treat +garcia +harbour +jeremy +traditions +dominant +jacques +##gon +relocated +1879 +amendment +sized +companion +simultaneously +volleyball +spun +acre +increases +stopping +loves +belongs +affect +drafted +tossed +scout +battles +1875 +filming +shoved +munich +tenure +vertical +romance +argue +craft +ranging +opens +honest +tyler +yesterday +muslims +reveal +snake +immigrants +radical +screaming +speakers +firing +saving +belonging +ease +lighting +prefecture +blame +farmer +hungry +grows +rubbed +beam +sur +subsidiary +armenian +dropping +conventional +qualify +spots +sweat +festivals +immigration +physician +discover +exposure +sandy +explanation +isaac +implemented +##fish +hart +initiated +stakes +presents +heights +householder +pleased +tourist +regardless +slip +closest +surely +sultan +brings +riley +preparation +aboard +slammed +baptist +experiment +ongoing +interstate +organic +playoffs +1877 +hindu +tours +tier +plenty +arrangements +talks +trapped +excited +sank +athens +1872 +denver +welfare +suburb +athletes +trick +diverse +belly +exclusively +yelled +conversion +1874 +internationally +computers +conductor +abilities +sensitive +dispute +measured +globe +rocket +prices +amsterdam +flights +tigers +municipalities +emotion +references +explains +airlines +manufactured +archaeological +1873 +interpretation +devon +##ites +settlements +kissing +absolute +improvement +impressed +barcelona +sullivan +jefferson +towers +jesse +julie +grandson +gauge +regard +rings +interviews +trace +raymond +thumb +departments +burns +serial +bulgarian +scores +demonstrated +1866 +kyle +alberta +underneath +romanized +relieved +acquisition +phrase +cliff +reveals +cuts +merger +custom +nee +gilbert +graduation +assessment +difficulty +demands +swung +democrat +commons +1940s +grove +completing +focuses +sum +substitute +bearing +stretch +reception +reflected +essentially +destination +pairs +##ched +survival +resource +##bach +promoting +doubles +messages +tear +##fully +parade +florence +harvey +incumbent +partial +pedro +frozen +procedure +olivia +controls +shelter +personally +temperatures +brisbane +tested +sits +marble +comprehensive +oxygen +leonard +##kov +inaugural +iranian +referring +quarters +attitude +mainstream +lined +mars +dakota +norfolk +unsuccessful +explosion +helicopter +congressional +##sing +inspector +bitch +seal +departed +divine +coaching +examination +punishment +manufacturer +sink +columns +unincorporated +signals +nevada +squeezed +dylan +dining +martial +manuel +eighteen +elevator +brushed +plates +ministers +congregation +slept +specialized +taxes +restricted +negotiations +likes +statistical +arnold +inspiration +execution +bold +intermediate +significance +margin +ruler +wheels +gothic +intellectual +dependent +listened +eligible +buses +widow +syria +earn +cincinnati +collapsed +recipient +secrets +accessible +philippine +maritime +goddess +clerk +surrender +breaks +playoff +ideal +beetle +aspect +soap +regulation +strings +expand +anglo +shorter +crosses +retreat +tough +coins +wallace +directions +pressing +shipping +locomotives +comparison +topics +nephew +distinction +honors +travelled +sierra +ibn +fortress +recognised +carved +1869 +clients +intent +coaches +describing +bread +##ington +beaten +northwestern +merit +collapse +challenges +historians +objective +submitted +virus +attacking +drake +assume +diseases +stem +leeds +farming +glasses +visits +nowhere +fellowship +relevant +carries +restaurants +experiments +constantly +bases +targets +shah +tenth +opponents +verse +territorial +writings +corruption +instruction +inherited +reverse +emphasis +employee +arch +keeps +rabbi +watson +payment +uh +nancy +##tre +venice +fastest +sexy +banned +adrian +properly +ruth +touchdown +dollar +boards +metre +circles +edges +favour +travels +liberation +scattered +firmly +holland +permitted +diesel +kenya +den +originated +demons +resumed +dragged +rider +servant +blinked +extend +torn +##sey +input +meal +everybody +cylinder +kinds +camps +bullet +logic +croatian +evolved +healthy +fool +wise +preserve +pradesh +respective +artificial +gross +corresponding +convicted +cage +caroline +dialogue +##dor +narrative +stranger +mario +christianity +failing +trent +commanding +buddhist +1848 +maurice +focusing +yale +bike +altitude +mouse +revised +##sley +veteran +pulls +theology +crashed +campaigns +legion +##ability +drag +excellence +customer +cancelled +intensity +excuse +liga +participating +contributing +printing +##burn +variable +curious +legacy +renaissance +symptoms +binding +vocalist +dancer +grammar +gospel +democrats +enters +diplomatic +hitler +clouds +mathematical +quit +defended +oriented +##heim +fundamental +hardware +impressive +equally +convince +confederate +guilt +chuck +sliding +magnetic +narrowed +petersburg +bulgaria +otto +phd +skill +hopes +pitcher +reservoir +hearts +automatically +expecting +mysterious +bennett +extensively +imagined +seeds +monitor +fix +##ative +journalism +struggling +signature +ranch +encounter +photographer +observation +protests +influences +calendar +cruz +croatia +locomotive +hughes +naturally +shakespeare +basement +hook +uncredited +faded +theories +approaches +dare +phillips +filling +fury +obama +efficient +arc +deliver +breeding +inducted +leagues +efficiency +axis +montana +eagles +##ked +supplied +instructions +karen +picking +indicating +trap +anchor +practically +christians +tomb +vary +occasional +electronics +lords +readers +newcastle +faint +innovation +collect +situations +engagement +claude +mixture +##feld +peer +tissue +lean +°f +floors +architects +reducing +rope +1859 +ottawa +##har +samples +banking +declaration +proteins +resignation +francois +saudi +advocate +exhibited +armor +twins +divorce +##ras +abraham +reviewed +temporarily +matrix +physically +pulse +curled +difficulties +bengal +usage +##ban +riders +certificate +holes +warsaw +distinctive +mutual +1857 +customs +circular +eugene +removal +loaded +mere +vulnerable +depicted +generations +dame +heir +enormous +lightly +climbing +pitched +lessons +pilots +nepal +preparing +brad +louise +renowned +liam +##ably +shaw +brilliant +bills +##nik +fucking +mainland +pleasant +seized +veterans +jerked +fail +brush +radiation +stored +warmth +southeastern +nate +sin +raced +berkeley +joke +athlete +designation +trunk +roland +qualification +heels +artwork +receives +judicial +reserves +##bed +woke +installation +abu +floating +fake +lesser +excitement +interface +concentrated +addressed +characteristic +amanda +saxophone +monk +releasing +egg +dies +interaction +defender +outbreak +glory +loving +sequel +consciousness +awake +ski +enrolled +handling +rookie +brow +somebody +biography +warfare +amounts +contracts +presentation +fabric +dissolved +challenged +meter +psychological +elevated +rally +accurate +##tha +hospitals +undergraduate +specialist +venezuela +exhibit +shed +nursing +protestant +fluid +structural +footage +jared +consistent +prey +##ska +succession +reflect +exile +lebanon +wiped +suspect +shanghai +resting +integration +preservation +marvel +variant +pirates +sheep +rounded +capita +sailing +colonies +manuscript +deemed +variations +clarke +functional +emerging +boxing +relaxed +curse +azerbaijan +heavyweight +nickname +editorial +rang +grid +tightened +earthquake +flashed +miguel +rushing +##ches +improvements +boxes +brooks +consumption +molecular +felix +societies +repeatedly +variation +aids +civic +graphics +professionals +realm +autonomous +receiver +delayed +workshop +militia +chairs +canyon +harsh +extending +lovely +happiness +##jan +stake +eyebrows +embassy +wellington +hannah +corners +bishops +swear +cloth +contents +namely +commenced +1854 +stanford +nashville +courage +graphic +commitment +garrison +hamlet +clearing +rebels +attraction +literacy +cooking +ruins +temples +jenny +humanity +celebrate +hasn +freight +sixty +rebel +bastard +newton +deer +##ges +##ching +smiles +delaware +singers +approaching +assists +flame +boulevard +barrel +planted +pursuit +consequences +shallow +invitation +rode +depot +ernest +kane +rod +concepts +preston +topic +chambers +striking +blast +arrives +descendants +montgomery +ranges +worlds +chaos +praise +fewer +1855 +sanctuary +mud +programmes +maintaining +harper +bore +handsome +closure +tournaments +nebraska +linda +facade +puts +satisfied +argentine +dale +cork +dome +panama +##yl +1858 +tasks +experts +##ates +feeding +equation +engage +bryan +um +quartet +disbanded +sheffield +blocked +gasped +delay +kisses +connects +##non +sts +poured +creator +publishers +guided +ellis +extinct +hug +gaining +##ord +complicated +poll +clenched +investigate +thereby +quantum +spine +cdp +humor +kills +administered +semifinals +encountered +ignore +commentary +##maker +bother +roosevelt +plains +halfway +flowing +cultures +crack +imprisoned +neighboring +airline +gather +wolves +marathon +transformed +cruise +organisations +punch +exhibitions +numbered +alarm +ratings +daddy +silently +##stein +queens +colours +impression +guidance +tactical +##rat +marshal +della +arrow +rested +feared +tender +owns +bitter +advisor +escort +##ides +spare +farms +grants +dragons +encourage +colleagues +cameras +sucked +pile +spirits +prague +statements +suspension +landmark +fence +torture +recreation +bags +permanently +survivors +pond +spy +predecessor +bombing +coup +protecting +transformation +glow +##lands +dug +priests +andrea +feat +barn +jumping +##ologist +casualties +stern +auckland +pipe +serie +revealing +trevor +mercy +spectrum +consist +governing +collaborated +possessed +epic +comprises +blew +shane +lopez +honored +magical +sacrifice +judgment +perceived +hammer +baronet +tune +das +missionary +sheets +neutral +oral +threatening +attractive +shade +aims +seminary +estates +1856 +michel +wounds +refugees +manufacturers +mercury +syndrome +porter +##iya +##din +hamburg +identification +upstairs +purse +widened +pause +cared +breathed +affiliate +santiago +prevented +celtic +fisher +recruited +byzantine +reconstruction +farther +diet +sake +spite +sensation +blank +separation +##hon +vladimir +armies +anime +accommodate +orbit +cult +sofia +##ify +founders +sustained +disorder +honours +northeastern +mia +crops +violet +threats +blanket +fires +canton +followers +southwestern +prototype +voyage +assignment +altered +moderate +protocol +pistol +questioned +brass +lifting +1852 +math +authored +doug +dimensional +dynamic +1851 +pronounced +grateful +quest +uncomfortable +boom +presidency +stevens +relating +politicians +barrier +quinn +diana +mosque +tribal +palmer +portions +sometime +chester +treasure +bend +millions +reforms +registration +consequently +monitoring +ate +preliminary +brandon +invented +eaten +exterior +intervention +ports +documented +displays +lecture +sally +favourite +vermont +invisible +isle +breed +journalists +relay +speaks +backward +explore +midfielder +actively +stefan +procedures +cannon +blond +kenneth +centered +servants +chains +libraries +malcolm +essex +henri +slavery +##hal +facts +fairy +coached +cassie +cats +washed +cop +announcement +2000s +vinyl +activated +marco +frontier +growled +curriculum +##das +loyal +accomplished +leslie +ritual +kenny +vii +napoleon +hollow +hybrid +jungle +stationed +friedrich +counted +##ulated +platinum +theatrical +seated +col +rubber +glen +diversity +healing +extends +provisions +administrator +columbus +tributary +assured +##uous +prestigious +examined +lectures +grammy +ronald +associations +bailey +allan +essays +flute +believing +consultant +proceedings +travelling +1853 +kerala +yugoslavia +buddy +methodist +burial +centres +batman +discontinued +dock +stockholm +lungs +severely +citing +manga +steal +mumbai +iraqi +robot +celebrity +bride +broadcasts +abolished +pot +joel +overhead +franz +packed +reconnaissance +johann +acknowledged +introduce +handled +doctorate +developments +drinks +alley +palestine +##aki +proceeded +recover +bradley +grain +patch +afford +infection +nationalist +legendary +interchange +virtually +gen +gravity +exploration +amber +vital +wishes +powell +doctrine +elbow +screenplay +##bird +contribute +indonesian +creates +enzyme +kylie +discipline +drops +manila +hunger +layers +suffer +fever +bits +monica +keyboard +manages +##hood +searched +appeals +##bad +testament +grande +reid +##war +beliefs +congo +requiring +casey +1849 +regret +streak +rape +depends +syrian +sprint +pound +tourists +upcoming +pub +tense +##els +practiced +nationwide +guild +motorcycle +liz +##zar +chiefs +desired +elena +precious +absorbed +relatives +booth +pianist +##mal +citizenship +exhausted +wilhelm +##ceae +##hed +noting +quarterback +urge +hectares +##gue +holly +blonde +davies +parked +sustainable +stepping +twentieth +airfield +nest +chip +##nell +shaft +paulo +requirement +paradise +tobacco +trans +renewed +vietnamese +suggesting +catching +holmes +enjoying +trips +colt +holder +butterfly +nerve +reformed +cherry +bowling +trailer +carriage +goodbye +appreciate +toy +joshua +interactive +enabled +involve +##kan +collar +determination +bunch +recall +shorts +superintendent +episcopal +frustration +giovanni +nineteenth +laser +privately +array +circulation +##ovic +armstrong +deals +painful +permit +discrimination +aires +retiring +cottage +horizon +ellen +jamaica +ripped +fernando +chapters +patron +lecturer +behaviour +genes +georgian +export +solomon +rivals +seventeen +rodriguez +princeton +independently +sox +1847 +arguing +entity +casting +hank +criteria +oakland +geographic +milwaukee +reflection +expanding +conquest +dubbed +halt +brave +brunswick +arched +curtis +divorced +predominantly +somerset +streams +ugly +zoo +horrible +curved +buenos +fierce +dictionary +vector +theological +unions +handful +stability +punjab +segments +altar +ignoring +gesture +monsters +pastor +thighs +unexpected +operators +abruptly +coin +compiled +associates +improving +migration +compact +collegiate +quarterfinals +roster +restore +assembled +hurry +oval +##cies +1846 +flags +martha +victories +sharply +##rated +argues +deadly +drawings +symbols +performer +griffin +restrictions +editing +andrews +journals +arabia +compositions +dee +pierce +removing +hindi +casino +runway +civilians +minds +##zation +refuge +rent +retain +potentially +conferences +suburban +conducting +descended +massacre +ammunition +terrain +fork +souls +counts +chelsea +durham +drives +cab +perth +realizing +palestinian +finn +simpson +##dal +betty +moreover +particles +cardinals +tent +evaluation +extraordinary +inscription +wednesday +chloe +maintains +panels +ashley +trucks +##nation +cluster +sunlight +strikes +zhang +dialect +tucked +collecting +##mas +##sville +quoted +evan +franco +aria +buying +cleaning +closet +provision +apollo +clinic +rat +necessarily +##ising +venues +flipped +cent +spreading +trustees +checking +authorized +disappointed +##ado +notion +duration +trumpet +hesitated +topped +brussels +rolls +theoretical +hint +define +aggressive +repeat +wash +peaceful +optical +width +allegedly +mcdonald +strict +##illa +investors +jam +witnesses +sounding +miranda +michelle +hugo +harmony +valid +lynn +glared +nina +headquartered +diving +boarding +gibson +albanian +marsh +routine +dealt +enhanced +intelligent +substance +targeted +enlisted +discovers +spinning +observations +pissed +smoking +capitol +varied +costume +seemingly +indies +compensation +surgeon +thursday +arsenal +westminster +suburbs +rid +anglican +##ridge +knots +foods +alumni +lighter +fraser +whoever +portal +scandal +gavin +advised +instructor +flooding +terrorist +teenage +interim +senses +duck +teen +thesis +abby +eager +overcome +newport +glenn +rises +shame +prompted +priority +forgot +bomber +nicolas +protective +cartoon +katherine +breeze +lonely +trusted +henderson +richardson +relax +palms +remarkable +legends +cricketer +essay +ordained +edmund +rifles +trigger +##uri +##away +sail +alert +1830 +audiences +penn +sussex +siblings +pursued +indianapolis +resist +rosa +consequence +succeed +avoided +1845 +##ulation +inland +##tie +##nna +counsel +profession +chronicle +hurried +##una +eyebrow +eventual +bleeding +innovative +cure +committees +accounting +scope +hardy +heather +tenor +gut +herald +codes +tore +scales +wagon +luxury +tin +prefer +fountain +triangle +bonds +darling +convoy +dried +traced +beings +troy +accidentally +slam +findings +smelled +joey +lawyers +outcome +steep +bosnia +configuration +shifting +toll +brook +performers +lobby +philosophical +construct +shrine +aggregate +cox +phenomenon +savage +insane +solely +reynolds +nationally +holdings +consideration +enable +edgar +fights +relegation +chances +atomic +hub +conjunction +awkward +reactions +currency +finale +kumar +underwent +steering +elaborate +gifts +comprising +melissa +veins +reasonable +sunshine +solve +trails +inhabited +elimination +ethics +huh +ana +molly +consent +apartments +layout +marines +hunters +bulk +##oma +hometown +##wall +##mont +cracked +reads +neighbouring +withdrawn +admission +wingspan +damned +anthology +lancashire +brands +batting +forgive +cuban +awful +##lyn +dimensions +imagination +dante +tracking +desperately +goalkeeper +##yne +groaned +workshops +confident +burton +gerald +milton +circus +uncertain +slope +copenhagen +sophia +fog +philosopher +portraits +accent +cycling +varying +gripped +larvae +garrett +specified +scotia +mature +luther +kurt +rap +##kes +aerial +ferdinand +heated +transported +##shan +safely +nonetheless +##orn +##gal +motors +demanding +##sburg +startled +##brook +ally +generate +caps +ghana +stained +mentions +beds +afterward +##bling +utility +##iro +richards +1837 +conspiracy +conscious +shining +footsteps +observer +cyprus +urged +loyalty +developer +probability +olive +upgraded +gym +miracle +insects +graves +1844 +ourselves +hydrogen +katie +tickets +poets +planes +prevention +witnessed +dense +jin +randy +tang +warehouse +monroe +archived +elderly +investigations +alec +granite +mineral +conflicts +controlling +aboriginal +mechanics +stan +stark +rhode +skirt +est +bombs +respected +##horn +imposed +limestone +deny +nominee +memphis +grabbing +disabled +amusement +frankfurt +corn +referendum +varies +slowed +disk +firms +unconscious +incredible +clue +sue +##zhou +twist +##cio +joins +idaho +chad +developers +computing +destroyer +mortal +tucker +kingston +choices +carson +whitney +geneva +pretend +dimension +staged +plateau +maya +##une +freestyle +rovers +##ids +tristan +classroom +prospect +##hus +honestly +diploma +lied +thermal +auxiliary +feast +unlikely +iata +morocco +pounding +treasury +lithuania +considerably +1841 +dish +1812 +geological +matching +stumbled +destroying +marched +brien +advances +nicole +settling +measuring +directing +##mie +tuesday +bassist +capabilities +stunned +fraud +torpedo +##phone +anton +wisdom +surveillance +ruined +##ulate +lawsuit +healthcare +theorem +halls +trend +aka +horizontal +dozens +acquire +lasting +swim +hawk +gorgeous +fees +vicinity +decrease +adoption +tactics +##ography +pakistani +##ole +draws +##hall +willie +burke +heath +algorithm +integral +powder +elliott +brigadier +jackie +tate +varieties +darker +##cho +lately +cigarette +specimens +adds +##ensis +##inger +exploded +finalist +murders +wilderness +arguments +nicknamed +acceptance +onwards +manufacture +robertson +jets +tampa +enterprises +loudly +composers +nominations +1838 +malta +inquiry +automobile +hosting +viii +rays +tilted +grief +museums +strategies +furious +euro +equality +cohen +poison +surrey +wireless +governed +ridiculous +moses +##esh +vanished +barnes +attract +morrison +istanbul +##iness +absent +rotation +petition +janet +##logical +satisfaction +custody +deliberately +observatory +comedian +surfaces +pinyin +novelist +strictly +canterbury +oslo +monks +embrace +jealous +photograph +continent +dorothy +marina +excess +holden +allegations +explaining +stack +avoiding +lance +storyline +majesty +poorly +spike +bradford +raven +travis +classics +proven +voltage +pillow +fists +butt +1842 +interpreted +1839 +gage +telegraph +lens +promising +expelled +casual +collector +zones +silly +nintendo +##kh +downstairs +chef +suspicious +afl +flies +vacant +uganda +pregnancy +condemned +lutheran +estimates +cheap +decree +saxon +proximity +stripped +idiot +deposits +contrary +presenter +magnus +glacier +offense +edwin +##ori +upright +##long +bolt +##ois +toss +geographical +##izes +environments +delicate +marking +abstract +xavier +nails +windsor +plantation +occurring +equity +saskatchewan +fears +drifted +sequences +vegetation +revolt +##stic +1843 +sooner +fusion +opposing +nato +skating +1836 +secretly +ruin +lease +flora +anxiety +##ological +##mia +bout +taxi +emmy +frost +rainbow +compounds +foundations +rainfall +assassination +nightmare +dominican +achievements +deserve +orlando +intact +armenia +##nte +calgary +valentine +marion +proclaimed +theodore +bells +courtyard +thigh +gonzalez +console +troop +minimal +everyday +supporter +terrorism +buck +openly +presbyterian +activists +carpet +##iers +rubbing +uprising +cute +conceived +legally +##cht +millennium +cello +velocity +rescued +cardiff +1835 +rex +concentrate +senators +beard +rendered +glowing +battalions +scouts +competitors +sculptor +catalogue +arctic +ion +raja +bicycle +glancing +lawn +##woman +gentleman +lighthouse +publish +predicted +calculated +variants +##gne +strain +winston +deceased +touchdowns +brady +caleb +sinking +echoed +crush +hon +blessed +protagonist +hayes +endangered +magnitude +editors +##tine +estimate +responsibilities +##mel +backup +laying +consumed +sealed +zurich +lovers +frustrated +##eau +ahmed +kicking +treasurer +1832 +biblical +refuse +terrified +pump +agrees +genuine +imprisonment +refuses +plymouth +lou +##nen +tara +trembling +antarctic +ton +learns +##tas +crap +crucial +faction +atop +##borough +wrap +lancaster +odds +hopkins +erik +lyon +##eon +bros +snap +locality +empress +crowned +cal +acclaimed +chuckled +clara +sends +mild +towel +wishing +assuming +interviewed +##bal +interactions +eden +cups +helena +indie +beck +##fire +batteries +filipino +wizard +parted +traces +##born +rows +idol +albany +delegates +##ees +##sar +discussions +notre +instructed +belgrade +highways +suggestion +lauren +possess +orientation +alexandria +abdul +beats +salary +reunion +ludwig +alright +wagner +intimate +pockets +slovenia +hugged +brighton +merchants +cruel +stole +trek +slopes +repairs +enrollment +politically +underlying +promotional +counting +boeing +isabella +naming +keen +bacteria +listing +separately +belfast +ussr +lithuanian +anybody +ribs +sphere +martinez +cock +embarrassed +proposals +fragments +nationals +##wski +premises +fin +alpine +matched +freely +bounded +jace +sleeve +pier +populated +evident +##like +frances +flooded +##dle +frightened +pour +trainer +framed +visitor +challenging +pig +wickets +##fold +infected +##pes +arose +reward +ecuador +oblast +vale +shuttle +##usa +bach +rankings +forbidden +cornwall +accordance +salem +consumers +bruno +fantastic +toes +machinery +resolved +julius +remembering +propaganda +iceland +bombardment +tide +contacts +wives +##rah +concerto +macdonald +albania +implement +daisy +tapped +sudan +helmet +mistress +crop +sunk +finest +##craft +hostile +boxer +fr +paths +adjusted +habit +ballot +supervision +soprano +bullets +wicked +sunset +regiments +disappear +lamp +performs +##gia +rabbit +digging +incidents +entries +##cion +dishes +introducing +##ati +##fied +freshman +slot +jill +tackles +baroque +backs +##iest +lone +sponsor +destiny +altogether +convert +##aro +consensus +shapes +demonstration +basically +feminist +auction +artifacts +##bing +strongest +halifax +allmusic +mighty +smallest +precise +alexandra +viola +##los +##ille +manuscripts +##illo +dancers +ari +managers +monuments +blades +barracks +springfield +maiden +consolidated +electron +berry +airing +wheat +nobel +inclusion +blair +payments +geography +bee +eleanor +react +##hurst +afc +manitoba +lineup +fitness +recreational +investments +airborne +disappointment +##dis +edmonton +viewing +renovation +infant +bankruptcy +roses +aftermath +pavilion +carpenter +withdrawal +ladder +discussing +popped +reliable +agreements +rochester +##abad +curves +bombers +rao +reverend +decreased +choosing +stiff +consulting +naples +crawford +tracy +ribbon +cops +crushed +deciding +unified +teenager +accepting +flagship +poles +sanchez +inspection +revived +skilled +induced +exchanged +flee +locals +tragedy +swallow +hanna +demonstrate +##ela +salvador +flown +contestants +civilization +##ines +wanna +rhodes +fletcher +hector +knocking +considers +nash +mechanisms +sensed +mentally +walt +unclear +##eus +renovated +madame +crews +governmental +undertaken +monkey +##ben +##ato +fatal +armored +copa +caves +governance +grasp +perception +certification +froze +damp +tugged +wyoming +##rg +##ero +newman +nerves +curiosity +graph +##ami +withdraw +tunnels +dull +meredith +moss +exhibits +neighbors +communicate +accuracy +explored +raiders +republicans +secular +kat +superman +penny +criticised +freed +conviction +ham +likewise +delegation +gotta +doll +promises +technological +myth +nationality +resolve +convent +sharon +dig +sip +coordinator +entrepreneur +fold +##dine +capability +councillor +synonym +blown +swan +cursed +1815 +jonas +haired +sofa +canvas +keeper +rivalry +##hart +rapper +speedway +swords +postal +maxwell +estonia +potter +recurring +errors +##oni +cognitive +1834 +claws +nadu +roberto +bce +wrestler +ellie +infinite +ink +##tia +presumably +finite +staircase +noel +patricia +nacional +chill +eternal +tu +preventing +prussia +fossil +limbs +##logist +ernst +frog +perez +rene +prussian +##ios +molecules +regulatory +answering +opinions +sworn +lengths +supposedly +hypothesis +upward +habitats +seating +ancestors +drank +yield +synthesis +researcher +modest +##var +mothers +peered +voluntary +homeland +acclaim +##igan +static +valve +luxembourg +alto +carroll +receptor +norton +ambulance +##tian +johnston +catholics +depicting +jointly +elephant +gloria +mentor +badge +ahmad +distinguish +remarked +councils +precisely +allison +advancing +detection +crowded +cooperative +ankle +mercedes +dagger +surrendered +pollution +commit +subway +jeffrey +lesson +sculptures +provider +##fication +membrane +timothy +rectangular +fiscal +heating +teammate +basket +particle +anonymous +deployment +missiles +courthouse +proportion +shoe +sec +complaints +forbes +blacks +abandon +remind +sizes +overwhelming +autobiography +natalie +##awa +risks +contestant +countryside +babies +scorer +invaded +enclosed +proceed +hurling +disorders +##cu +reflecting +continuously +cruiser +graduates +freeway +investigated +ore +deserved +maid +blocking +phillip +jorge +shakes +dove +mann +variables +lacked +burden +accompanying +que +consistently +organizing +provisional +complained +endless +tubes +juice +georges +krishna +mick +thriller +laps +arcade +sage +snail +shannon +laurence +seoul +vacation +presenting +hire +churchill +surprisingly +prohibited +savannah +technically +##oli +##lessly +testimony +suited +speeds +toys +romans +flowering +measurement +talented +kay +settings +charleston +expectations +shattered +achieving +triumph +ceremonies +portsmouth +lanes +mandatory +loser +stretching +cologne +realizes +seventy +cornell +careers +webb +##ulating +americas +budapest +ava +suspicion +yo +conrad +sterling +jessie +rector +##az +1831 +transform +organize +loans +christine +volcanic +warrant +slender +summers +subfamily +newer +danced +dynamics +rhine +proceeds +heinrich +gastropod +commands +sings +facilitate +easter +positioned +responses +expense +fruits +yanked +imported +25th +velvet +vic +primitive +tribune +baldwin +neighbourhood +donna +rip +hay +##uro +1814 +espn +welcomed +##aria +qualifier +glare +highland +timing +##cted +shells +eased +geometry +louder +exciting +slovakia +##iz +savings +prairie +marching +rafael +tonnes +##lled +curtain +preceding +shy +heal +greene +worthy +##pot +detachment +bury +sherman +##eck +reinforced +seeks +bottles +contracted +duchess +outfit +walsh +mickey +geoffrey +archer +squeeze +dawson +eliminate +invention +##enberg +neal +##eth +stance +dealer +coral +maple +retire +simplified +1833 +hid +watts +backwards +jules +##oke +genesis +frames +rebounds +burma +woodland +moist +santos +whispers +drained +subspecies +streaming +ulster +burnt +correspondence +maternal +gerard +denis +stealing +genius +duchy +##oria +inaugurated +momentum +suits +placement +sovereign +clause +thames +##hara +confederation +reservation +sketch +yankees +lets +rotten +charm +hal +verses +commercially +dot +salon +citation +adopt +winnipeg +mist +allocated +cairo +jenkins +interference +objectives +##wind +1820 +portfolio +armoured +sectors +initiatives +integrity +exercises +robe +tap +gazed +##tones +distracted +rulers +favorable +jerome +tended +cart +factories +##eri +diplomat +valued +gravel +charitable +calvin +exploring +shepherd +terrace +pupil +##ural +reflects +##rch +governors +shelf +depths +##nberg +trailed +crest +tackle +##nian +hatred +##kai +clare +makers +ethiopia +longtime +detected +embedded +lacking +slapped +rely +thomson +anticipation +morton +successive +agnes +screenwriter +straightened +philippe +playwright +haunted +licence +iris +intentions +sutton +logical +correctly +##weight +branded +licked +tipped +silva +ricky +narrator +requests +##ents +greeted +supernatural +cow +##wald +lung +refusing +employer +strait +gaelic +liner +##piece +zoe +sabha +##mba +driveway +harvest +prints +bates +reluctantly +threshold +algebra +ira +wherever +coupled +assumption +picks +designers +raids +gentlemen +roller +blowing +leipzig +locks +screw +dressing +strand +##lings +scar +dwarf +depicts +##nu +nods +differ +boris +##eur +yuan +flip +##gie +mob +invested +questioning +applying +shout +##sel +gameplay +blamed +illustrations +bothered +weakness +rehabilitation +##zes +envelope +rumors +miners +leicester +subtle +kerry +ferguson +premiership +bengali +prof +catches +remnants +dana +##rily +shouting +presidents +baltic +ought +ghosts +dances +sailors +shirley +fancy +dominic +##bie +madonna +##rick +bark +buttons +gymnasium +ashes +liver +toby +oath +providence +doyle +evangelical +nixon +cement +carnegie +embarked +hatch +surroundings +guarantee +needing +pirate +essence +filter +crane +hammond +projected +immune +percy +twelfth +regent +doctoral +damon +mikhail +##ichi +critically +elect +realised +abortion +acute +screening +mythology +steadily +frown +nottingham +kirk +wa +minneapolis +##rra +module +algeria +nautical +encounters +surprising +statues +availability +shirts +pie +alma +brows +munster +mack +soup +crater +tornado +sanskrit +cedar +explosive +bordered +dixon +planets +stamp +exam +happily +##bble +carriers +kidnapped +accommodation +emigrated +##met +knockout +correspondent +violation +profits +peaks +lang +specimen +agenda +ancestry +pottery +spelling +equations +obtaining +ki +linking +1825 +debris +asylum +buddhism +##ants +gazette +dental +eligibility +fathers +averaged +zimbabwe +francesco +coloured +hissed +translator +lynch +mandate +humanities +mackenzie +uniforms +##iana +asset +fitting +samantha +genera +rim +beloved +shark +riot +entities +expressions +indo +carmen +slipping +owing +abbot +neighbor +sidney +rats +recommendations +encouraging +squadrons +anticipated +commanders +conquered +donations +diagnosed +divide +##iva +guessed +decoration +vernon +auditorium +revelation +conversations +##kers +##power +herzegovina +dash +alike +protested +lateral +herman +accredited +##gent +freeman +mel +fiji +crow +crimson +##rine +livestock +##pped +humanitarian +bored +oz +whip +##lene +##ali +legitimate +alter +grinning +spelled +anxious +oriental +wesley +##nin +##hole +carnival +controller +detect +##ssa +bowed +educator +kosovo +macedonia +##sin +occupy +mastering +stephanie +janeiro +para +unaware +nurses +noon +hopefully +ranger +combine +sociology +polar +rica +##eer +neill +##sman +holocaust +doubled +lust +1828 +decent +cooling +unveiled +1829 +nsw +homer +chapman +meyer +dive +mae +reagan +expertise +##gled +darwin +brooke +sided +prosecution +investigating +comprised +petroleum +genres +reluctant +differently +trilogy +johns +vegetables +corpse +highlighted +lounge +pension +unsuccessfully +elegant +aided +ivory +beatles +amelia +cain +dubai +immigrant +babe +underwater +combining +mumbled +atlas +horns +accessed +ballad +physicians +homeless +gestured +rpm +freak +louisville +corporations +patriots +prizes +rational +warn +modes +decorative +overnight +din +troubled +phantom +monarch +sheer +##dorf +generals +guidelines +organs +addresses +enhance +curling +parishes +cord +##kie +caesar +deutsche +bavaria +coleman +cyclone +##eria +bacon +petty +##yama +##old +hampton +diagnosis +1824 +throws +complexity +rita +disputed +pablo +marketed +trafficking +##ulus +examine +plague +formats +vault +faithful +##bourne +webster +highlights +##ient +phones +vacuum +sandwich +modeling +##gated +bolivia +clergy +qualities +isabel +##nas +##ars +wears +screams +reunited +annoyed +bra +##ancy +##rate +differential +transmitter +tattoo +container +poker +##och +excessive +resides +cowboys +##tum +augustus +trash +providers +statute +retreated +balcony +reversed +void +storey +preceded +masses +leap +laughs +neighborhoods +wards +schemes +falcon +santo +battlefield +ronnie +lesbian +venus +##dian +beg +sandstone +daylight +punched +gwen +analog +stroked +wwe +acceptable +measurements +toxic +##kel +adequate +surgical +economist +parameters +varsity +##sberg +quantity +##chy +##rton +countess +generating +precision +diamonds +expressway +##ı +1821 +uruguay +talents +galleries +expenses +scanned +colleague +outlets +ryder +lucien +##ila +paramount +syracuse +dim +fangs +gown +sweep +##sie +missionaries +websites +sentences +adviser +val +trademark +spells +##plane +patience +starter +slim +##borg +toe +incredibly +shoots +elliot +nobility +##wyn +cowboy +endorsed +gardner +tendency +persuaded +organisms +emissions +kazakhstan +amused +boring +chips +themed +##hand +constantinople +chasing +systematic +guatemala +borrowed +erin +carey +##hard +highlands +struggles +1810 +##ifying +##ced +exceptions +develops +enlarged +kindergarten +castro +##rina +leigh +zombie +juvenile +##most +consul +sailor +hyde +clarence +intensive +pinned +nasty +useless +jung +clayton +stuffed +exceptional +ix +apostolic +transactions +exempt +swinging +cove +religions +shields +dairy +bypass +pursuing +joyce +bombay +chassis +southampton +chat +interact +redesignated +##pen +nascar +pray +salmon +rigid +regained +malaysian +grim +publicity +constituted +capturing +toilet +delegate +purely +tray +drift +loosely +striker +weakened +trinidad +mitch +itv +defines +transmitted +scarlet +nodding +fitzgerald +narrowly +tooth +standings +virtue +##wara +##cting +chateau +gloves +lid +hurting +conservatory +##pel +sinclair +reopened +sympathy +nigerian +strode +advocated +optional +chronic +discharge +suck +compatible +laurel +stella +fails +wage +dodge +informal +sorts +levi +buddha +villagers +chronicles +heavier +summoned +gateway +eleventh +jewelry +translations +accordingly +seas +##ency +fiber +pyramid +cubic +dragging +##ista +caring +##ops +contacted +lunar +lisbon +patted +1826 +sacramento +theft +madagascar +subtropical +disputes +holidays +piper +willow +mare +cane +newfoundland +benny +companions +dong +raj +observe +roar +charming +plaque +tibetan +fossils +enacted +manning +bubble +tanzania +##eda +##hir +funk +swamp +deputies +cloak +ufc +scenario +par +scratch +metals +anthem +guru +engaging +specially +##boat +dialects +nineteen +cecil +duet +disability +unofficial +##lies +defunct +moonlight +drainage +surname +puzzle +switching +conservatives +mammals +knox +broadcaster +sidewalk +cope +##ried +benson +princes +peterson +##sal +bedford +sharks +eli +wreck +alberto +gasp +archaeology +lgbt +teaches +securities +madness +compromise +waving +coordination +davidson +visions +leased +possibilities +eighty +fernandez +enthusiasm +assassin +sponsorship +reviewer +kingdoms +estonian +laboratories +##fy +##nal +applies +verb +celebrations +##zzo +rowing +lightweight +sadness +submit +balanced +dude +explicitly +metric +magnificent +mound +brett +mohammad +mistakes +irregular +sanders +betrayed +shipped +surge +##enburg +reporters +termed +georg +pity +verbal +bulls +abbreviated +enabling +appealed +sicily +sting +heel +sweetheart +bart +spacecraft +brutal +monarchy +aberdeen +cameo +diane +survivor +clyde +##aries +complaint +##makers +clarinet +delicious +chilean +karnataka +coordinates +1818 +panties +##rst +pretending +dramatically +kiev +tends +distances +catalog +launching +instances +telecommunications +portable +lindsay +vatican +##eim +angles +aliens +marker +stint +screens +bolton +##rne +judy +wool +benedict +plasma +europa +imaging +filmmaker +swiftly +contributor +opted +stamps +apologize +financing +butter +gideon +sophisticated +alignment +avery +chemicals +yearly +speculation +prominence +professionally +immortal +institutional +inception +wrists +identifying +tribunal +derives +gains +papal +preference +linguistic +vince +operative +brewery +##ont +unemployment +boyd +##ured +##outs +albeit +prophet +1813 +##rad +quarterly +asteroid +cleaned +radius +temper +##llen +telugu +jerk +viscount +##ote +glimpse +##aya +yacht +hawaiian +baden +laptop +readily +##gu +monetary +offshore +scots +watches +##yang +##arian +upgrade +needle +lea +encyclopedia +flank +fingertips +delight +teachings +confirm +roth +beaches +midway +winters +##iah +teasing +daytime +beverly +gambling +##backs +regulated +clement +hermann +tricks +knot +##shing +##uring +##vre +detached +ecological +owed +specialty +byron +inventor +bats +stays +screened +unesco +midland +trim +affection +##ander +jess +thoroughly +feedback +chennai +strained +heartbeat +wrapping +overtime +pleaded +##sworth +leisure +oclc +##tate +##ele +feathers +angelo +thirds +nuts +surveys +clever +gill +commentator +##dos +darren +rides +gibraltar +dissolution +dedication +shin +meals +saddle +elvis +reds +chaired +taller +appreciation +functioning +niece +favored +advocacy +robbie +criminals +suffolk +yugoslav +passport +constable +congressman +hastings +##rov +consecrated +sparks +ecclesiastical +confined +##ovich +muller +floyd +nora +1822 +paved +1827 +cumberland +ned +saga +spiral +appreciated +collaborative +treating +similarities +feminine +finishes +##ib +jade +import +##hot +champagne +mice +securing +celebrities +helsinki +attributes +##gos +cousins +phases +ache +lucia +gandhi +submission +vicar +spear +shine +tasmania +biting +detention +constitute +tighter +seasonal +##gus +terrestrial +matthews +effectiveness +parody +philharmonic +##onic +1816 +strangers +encoded +consortium +guaranteed +regards +shifts +tortured +collision +supervisor +inform +broader +insight +theaters +armour +emeritus +blink +incorporates +mapping +handball +flexible +##nta +substantially +generous +thief +carr +loses +1793 +prose +ucla +romeo +generic +metallic +realization +damages +commissioners +zach +default +helicopters +lengthy +stems +partnered +spectators +rogue +indication +penalties +teresa +1801 +sen +##tric +dalton +##wich +irving +photographic +##vey +deaf +peters +excluded +unsure +##vable +patterson +crawled +##zio +resided +whipped +latvia +slower +ecole +pipes +employers +maharashtra +comparable +textile +pageant +##gel +alphabet +binary +irrigation +chartered +choked +antoine +offs +waking +supplement +quantities +demolition +regain +locate +urdu +folks +scary +andreas +whites +##ava +classrooms +mw +aesthetic +publishes +valleys +guides +cubs +johannes +bryant +conventions +affecting +##itt +drain +awesome +isolation +prosecutor +ambitious +apology +captive +downs +atmospheric +lorenzo +aisle +beef +foul +##onia +kidding +composite +disturbed +illusion +natives +##ffer +rockets +riverside +wartime +painters +adolf +melted +uncertainty +simulation +hawks +progressed +meantime +builder +spray +breach +unhappy +regina +russians +determining +tram +1806 +##quin +aging +1823 +garion +rented +mister +diaz +terminated +clip +1817 +depend +nervously +disco +owe +defenders +shiva +notorious +disbelief +shiny +worcester +##gation +##yr +trailing +undertook +islander +belarus +limitations +watershed +fuller +overlooking +utilized +raphael +1819 +synthetic +breakdown +klein +##nate +moaned +memoir +lamb +practicing +##erly +cellular +arrows +exotic +witches +charted +rey +hut +hierarchy +subdivision +freshwater +giuseppe +aloud +reyes +qatar +marty +sideways +utterly +sexually +jude +prayers +mccarthy +softball +blend +damien +##gging +##metric +wholly +erupted +lebanese +negro +revenues +tasted +comparative +teamed +transaction +labeled +maori +sovereignty +parkway +trauma +gran +malay +advancement +descendant +buzz +salvation +inventory +symbolic +##making +antarctica +mps +##bro +mohammed +myanmar +holt +submarines +tones +##lman +locker +patriarch +bangkok +emerson +remarks +predators +kin +afghan +confession +norwich +rental +emerge +advantages +##zel +rca +##hold +shortened +storms +aidan +##matic +autonomy +compliance +##quet +dudley +##osis +1803 +motto +documentation +summary +professors +spectacular +christina +archdiocese +flashing +innocence +remake +##dell +psychic +reef +scare +employ +sticks +meg +gus +leans +accompany +bergen +tomas +doom +wages +pools +##bes +breasts +scholarly +alison +outline +brittany +breakthrough +willis +realistic +##cut +##boro +competitor +##stan +pike +picnic +designing +commercials +washing +villain +skiing +costumes +auburn +halted +executives +logistics +cycles +vowel +applicable +barrett +exclaimed +eurovision +eternity +ramon +##umi +modifications +sweeping +disgust +torch +aviv +ensuring +rude +dusty +sonic +donovan +outskirts +cu +pathway +##band +##gun +disciplines +acids +cadet +paired +sketches +##sive +marriages +folding +peers +slovak +implies +admired +##beck +1880s +leopold +instinct +attained +weston +megan +horace +##ination +dorsal +ingredients +evolutionary +complications +deity +lethal +brushing +levy +deserted +institutes +posthumously +delivering +telescope +coronation +motivated +rapids +luc +flicked +pays +volcano +tanner +weighed +##nica +crowds +frankie +gifted +addressing +granddaughter +winding +##rna +constantine +gomez +##front +landscapes +rudolf +anthropology +slate +werewolf +astronomy +circa +rouge +dreaming +sack +knelt +drowned +naomi +prolific +tracked +freezing +herb +agony +randall +twisting +wendy +deposit +touches +vein +wheeler +##bbled +batted +retaining +tire +presently +compare +specification +daemon +nigel +##grave +merry +recommendation +czechoslovakia +sandra +roma +##sts +lambert +inheritance +sheikh +winchester +cries +examining +##yle +comeback +cuisine +nave +##iv +retrieve +tomatoes +barker +polished +defining +irene +lantern +personalities +begging +tract +swore +1809 +##gic +omaha +brotherhood +haiti +##ots +exeter +##ete +##zia +steele +dumb +pearson +surveyed +elisabeth +trends +fritz +bugs +fraction +calmly +viking +##birds +tug +inserted +unusually +##ield +confronted +distress +crashing +brent +turks +resign +##olo +cambodia +gabe +sauce +##kal +evelyn +extant +clusters +quarry +teenagers +luna +##lers +##ister +affiliation +drill +##ashi +panthers +scenic +libya +anita +strengthen +inscriptions +##cated +lace +sued +judith +riots +##uted +mint +##eta +preparations +midst +dub +challenger +##vich +mock +displaced +wicket +breaths +enables +schmidt +analyst +##lum +highlight +automotive +axe +josef +newark +sufficiently +resembles +50th +##pal +flushed +mum +traits +##ante +commodore +incomplete +warming +titular +ceremonial +ethical +celebrating +eighteenth +cao +lima +medalist +mobility +strips +snakes +miniature +zagreb +barton +escapes +umbrella +automated +doubted +differs +cooled +georgetown +dresden +cooked +fade +wyatt +jacobs +carlton +abundant +stereo +madras +inning +spur +malayalam +begged +osaka +groan +escaping +charging +dose +##aj +bud +papa +communists +advocates +edged +tri +resemble +peaking +necklace +fried +montenegro +saxony +goose +glances +stuttgart +curator +recruit +grocery +sympathetic +##tting +##fort +lotus +randolph +ancestor +##rand +succeeding +jupiter +1798 +macedonian +##heads +hiking +1808 +handing +fischer +##itive +garbage +##pies +prone +singular +papua +inclined +attractions +italia +pouring +motioned +grandma +garnered +jacksonville +corp +ego +ringing +aluminum +##hausen +ordering +##foot +drawer +traders +synagogue +##kawa +resistant +wandering +fragile +fiona +teased +hardcore +soaked +jubilee +decisive +exposition +mercer +poster +valencia +hale +kuwait +1811 +##ises +##wr +##eed +tavern +gamma +johan +##uer +airways +amino +gil +vocational +domains +torres +generator +folklore +outcomes +##keeper +canberra +shooter +fl +beams +confrontation +##gram +aligned +forestry +pipeline +jax +motorway +conception +decay +coffin +##cott +stalin +1805 +escorted +minded +##nam +sitcom +purchasing +twilight +veronica +additions +passive +tensions +straw +frequencies +1804 +refugee +cultivation +##iate +christie +clary +bulletin +crept +disposal +##rich +##zong +processor +crescent +##rol +emphasized +whale +nazis +aurora +dwelling +hauled +sponsors +toledo +ideology +theatres +tessa +cerambycidae +saves +turtle +cone +suspects +kara +rusty +yelling +greeks +mozart +shades +cocked +participant +shire +spit +freeze +necessity +##cos +inmates +nielsen +councillors +loaned +uncommon +omar +peasants +botanical +offspring +daniels +formations +jokes +1794 +pioneers +sigma +licensing +##sus +wheelchair +polite +1807 +liquor +pratt +trustee +##uta +forewings +balloon +kilometre +camping +explicit +casually +shawn +foolish +teammates +nm +hassan +carrie +judged +satisfy +vanessa +knives +selective +flowed +##lice +stressed +eliza +mathematician +cease +cultivated +##roy +commissions +browns +##ania +destroyers +sheridan +meadow +##rius +minerals +##cial +downstream +clash +gram +memoirs +ventures +baha +seymour +archie +midlands +edith +fare +flynn +invite +canceled +tiles +stabbed +boulder +incorporate +amended +camden +facial +mollusk +unreleased +descriptions +grabs +raises +ramp +shiver +##rose +coined +pioneering +tunes +qing +warwick +tops +melanie +giles +##rous +wandered +##inal +annexed +30th +unnamed +##ished +organizational +airplane +normandy +stoke +whistle +blessing +violations +chased +holders +shotgun +##ctic +reactor +##vik +tires +tearing +shores +fortified +mascot +constituencies +columnist +productive +tibet +##rta +lineage +hooked +tapes +judging +cody +##gger +hansen +kashmir +triggered +##eva +solved +cliffs +##tree +resisted +anatomy +protesters +transparent +implied +##iga +injection +mattress +excluding +##mbo +defenses +helpless +devotion +##elli +growl +liberals +weber +phenomena +atoms +plug +##iff +mortality +apprentice +howe +convincing +swimmer +barber +leone +promptly +sodium +def +nowadays +arise +##oning +gloucester +corrected +dignity +norm +erie +##ders +elders +evacuated +compression +##yar +hartford +backpack +reasoning +accepts +24th +wipe +millimetres +marcel +##oda +dodgers +albion +1790 +overwhelmed +aerospace +oaks +1795 +showcase +acknowledge +recovering +nolan +ashe +hurts +geology +fashioned +disappearance +farewell +swollen +shrug +marquis +wimbledon +rue +1792 +commemorate +reduces +experiencing +inevitable +calcutta +##court +murderer +sticking +fisheries +imagery +bloom +##inus +gustav +hesitation +memorable +viral +beans +accidents +tunisia +antenna +spilled +consort +treatments +aye +perimeter +##gard +donation +hostage +migrated +banker +addiction +apex +lil +trout +##ously +conscience +##nova +rams +sands +genome +passionate +troubles +##lets +amid +##ibility +##ret +higgins +exceed +vikings +##vie +payne +##zan +muscular +defendant +sucking +##wal +ibrahim +fuselage +claudia +vfl +europeans +snails +interval +##garh +preparatory +statewide +tasked +lacrosse +viktor +##lation +angola +##hra +flint +implications +employs +teens +patrons +stall +weekends +barriers +scrambled +nucleus +tehran +jenna +parsons +lifelong +robots +displacement +##bles +precipitation +knuckles +clutched +1802 +marrying +ecology +marx +accusations +declare +scars +kolkata +mat +meadows +bermuda +skeleton +finalists +vintage +crawl +coordinate +affects +subjected +orchestral +mistaken +mirrors +dipped +relied +arches +candle +##nick +incorporating +wildly +fond +basilica +owl +fringe +rituals +whispering +stirred +feud +tertiary +slick +goat +honorable +whereby +ricardo +stripes +parachute +adjoining +submerged +synthesizer +##gren +intend +positively +ninety +phi +beaver +partition +fellows +alexis +prohibition +carlisle +bizarre +fraternity +doubts +icy +aquatic +sneak +sonny +combines +airports +crude +supervised +spatial +merge +alfonso +##bic +corrupt +scan +undergo +##ams +disabilities +colombian +comparing +dolphins +perkins +reprinted +unanimous +bounced +hairs +underworld +midwest +semester +bucket +paperback +miniseries +coventry +demise +##leigh +demonstrations +sensor +rotating +yan +##hler +arrange +soils +##idge +hyderabad +labs +brakes +grandchildren +##nde +negotiated +rover +ferrari +continuation +directorate +augusta +stevenson +counterpart +gore +##rda +nursery +rican +ave +collectively +broadly +pastoral +repertoire +asserted +discovering +nordic +styled +fiba +cunningham +harley +middlesex +survives +tumor +tempo +zack +aiming +lok +urgent +##nto +devils +contractor +turin +##wl +bliss +repaired +simmons +moan +astronomical +negotiate +lyric +1890s +lara +bred +clad +angus +pbs +engineered +posed +hernandez +possessions +elbows +psychiatric +strokes +confluence +electorate +lifts +campuses +lava +alps +##ution +##date +physicist +woody +##ographic +##itis +juliet +reformation +sparhawk +complement +suppressed +jewel +##½ +floated +##kas +continuity +sadly +##ische +inability +melting +scanning +paula +flour +judaism +safer +vague +solving +curb +##stown +financially +gable +bees +expired +miserable +cassidy +dominion +1789 +cupped +robbery +facto +amos +warden +resume +tallest +marvin +pounded +declaring +gasoline +##aux +darkened +sophomore +##mere +erection +gossip +televised +risen +dial +##eu +pillars +passages +profound +arabian +ashton +silicon +nail +##lated +##hardt +fleming +firearms +ducked +circuits +blows +waterloo +titans +fireplace +cheshire +financed +activation +algorithms +constituent +catcher +cherokee +partnerships +sexuality +platoon +tragic +vivian +guarded +whiskey +meditation +poetic +##nga +porto +listeners +dominance +kendra +mona +chandler +factions +22nd +salisbury +attitudes +derivative +##ido +##haus +intake +paced +javier +illustrator +barrels +bias +cockpit +burnett +dreamed +ensuing +receptors +someday +hawkins +mattered +##lal +slavic +1799 +jesuit +cameroon +wasted +wax +lowering +victorious +freaking +outright +hancock +librarian +sensing +bald +calcium +myers +tablet +announcing +barack +shipyard +pharmaceutical +greenwich +flush +medley +patches +wolfgang +speeches +acquiring +exams +nikolai +hayden +kannada +reilly +waitress +abdomen +devastated +capped +pseudonym +pharmacy +fulfill +paraguay +1796 +clicked +##trom +archipelago +syndicated +##hman +lumber +orgasm +rejection +clifford +lorraine +advent +mafia +rodney +brock +##used +##elia +cassette +chamberlain +despair +mongolia +sensors +developmental +upstream +##alis +spanning +trombone +basque +seeded +interred +renewable +rhys +leapt +revision +molecule +##ages +chord +vicious +nord +shivered +23rd +arlington +debts +corpus +sunrise +bays +blackburn +centimetres +##uded +shuddered +strangely +gripping +cartoons +isabelle +orbital +##ppa +seals +proving +refusal +strengthened +bust +assisting +baghdad +batsman +portrayal +mara +pushes +spears +og +##cock +reside +nathaniel +brennan +1776 +confirmation +caucus +##worthy +markings +yemen +nobles +ku +lazy +viewer +catalan +encompasses +sawyer +##fall +sparked +substances +patents +braves +arranger +evacuation +sergio +persuade +dover +tolerance +penguin +cum +jockey +insufficient +townships +occupying +declining +plural +processed +projection +puppet +flanders +introduces +liability +##yon +gymnastics +antwerp +hobart +candles +jeep +wes +observers +chaplain +bundle +glorious +##hine +hazel +flung +sol +excavations +dumped +stares +bangalore +triangular +icelandic +intervals +expressing +turbine +##vers +songwriting +crafts +##igo +jasmine +ditch +rite +entertaining +comply +sorrow +wrestlers +basel +emirates +marian +rivera +helpful +##some +caution +downward +networking +##atory +##tered +darted +genocide +emergence +replies +specializing +spokesman +convenient +unlocked +fading +augustine +concentrations +resemblance +elijah +investigator +andhra +##uda +promotes +##rrell +fleeing +simone +announcer +lydia +weaver +residency +modification +##fest +stretches +alternatively +nat +lowe +lacks +##ented +pam +tile +concealed +inferior +abdullah +residences +tissues +vengeance +##ided +moisture +peculiar +groove +bologna +jennings +ninja +oversaw +zombies +pumping +batch +livingston +emerald +installations +1797 +peel +nitrogen +rama +##fying +schooling +strands +responding +werner +lime +casa +accurately +targeting +##rod +underway +##uru +hemisphere +lester +##yard +occupies +griffith +angrily +reorganized +##owing +courtney +deposited +estadio +##ifies +dunn +exiled +##ying +checks +##combe +successes +unexpectedly +blu +assessed +##flower +observing +sacked +spiders +kn +nodes +prosperity +audrey +divisional +broncos +tangled +adjust +feeds +erosion +paolo +surf +directory +snatched +humid +admiralty +screwed +reddish +##nese +modules +trench +lamps +bind +leah +bucks +competes +##nz +transcription +isles +violently +clutching +pga +cyclist +inflation +flats +ragged +unnecessary +##hian +stubborn +coordinated +harriet +baba +disqualified +insect +wolfe +##fies +reinforcements +rocked +duel +winked +embraced +bricks +##raj +hiatus +defeats +pending +brightly +jealousy +##xton +##uki +lena +colorful +##dley +stein +kidney +##shu +underwear +wanderers +##haw +##icus +guardians +m³ +roared +habits +##wise +permits +uranium +punished +disguise +bundesliga +elise +dundee +erotic +partisan +collectors +float +individually +rendering +behavioral +bucharest +ser +hare +valerie +corporal +nutrition +proportional +immense +##kis +pavement +##zie +##eld +sutherland +crouched +1775 +suzuki +trades +endurance +operas +crosby +prayed +priory +rory +socially +gujarat +walton +cube +pasha +privilege +lennon +floods +thorne +waterfall +nipple +scouting +approve +##lov +minorities +voter +dwight +extensions +assure +ballroom +slap +dripping +privileges +rejoined +confessed +demonstrating +patriotic +yell +investor +##uth +pagan +slumped +squares +confront +bert +embarrassment +aston +urging +sweater +starr +yuri +brains +williamson +commuter +mortar +structured +selfish +exports +##jon +cds +##him +unfinished +##rre +mortgage +destinations +##nagar +canoe +solitary +buchanan +delays +magistrate +fk +##pling +motivation +##lier +##vier +recruiting +assess +##mouth +malik +antique +1791 +pius +rahman +reich +tub +zhou +smashed +airs +galway +xii +conditioning +honduras +discharged +dexter +##pf +lionel +debates +lemon +volunteered +dioxide +procession +devi +sic +tremendous +advertisements +colts +transferring +verdict +hanover +decommissioned +utter +relate +pac +racism +beacon +limp +similarity +terra +occurrence +ant +becky +capt +updates +armament +richie +pal +##graph +halloween +mayo +##ssen +##bone +cara +serena +fcc +dolls +obligations +##dling +violated +lafayette +jakarta +exploitation +infamous +iconic +##lah +##park +moody +reginald +dread +spill +crystals +olivier +modeled +bluff +equilibrium +separating +notices +ordnance +extinction +onset +cosmic +attachment +sammy +expose +privy +anchored +##bil +abbott +admits +bending +baritone +emmanuel +policeman +vaughan +winged +climax +dresses +denny +polytechnic +mohamed +burmese +authentic +nikki +genetics +grandparents +homestead +gaza +postponed +metacritic +una +##sby +unstable +dissertation +##cian +curls +obscure +uncovered +bronx +praying +disappearing +##hoe +prehistoric +coke +turret +mutations +nonprofit +pits +monaco +##usion +prominently +dispatched +podium +##mir +uci +##uation +fortifications +birthplace +kendall +##lby +##oll +preacher +rack +goodman +persistent +##ott +countless +jaime +recorder +lexington +persecution +jumps +renewal +wagons +crushing +##holder +decorations +##lake +abundance +wrath +laundry +£1 +garde +jeanne +beetles +peasant +splitting +caste +sergei +##rer +##ema +scripts +##ively +rub +satellites +##vor +inscribed +verlag +scrapped +gale +packages +chick +potato +slogan +kathleen +arabs +##culture +counterparts +reminiscent +choral +##tead +rand +retains +bushes +dane +accomplish +courtesy +closes +##oth +slaughter +hague +krakow +lawson +tailed +elias +ginger +##ttes +canopy +betrayal +rebuilding +turf +##hof +frowning +allegiance +brigades +kicks +rebuild +polls +alias +nationalism +rowan +audition +bowie +fortunately +recognizes +harp +dillon +horrified +##oro +renault +ropes +presumed +rewarded +infrared +wiping +accelerated +illustration +presses +practitioners +badminton +##iard +detained +##tera +recognizing +relates +misery +##sies +##tly +reproduction +piercing +potatoes +thornton +esther +manners +hbo +##aan +ours +bullshit +ernie +perennial +sensitivity +illuminated +rupert +##iss +rfc +nassau +##dock +staggered +socialism +##haven +appointments +nonsense +prestige +sharma +haul +solidarity +##rata +igor +pedestrian +##uit +baxter +tenants +wires +medication +unlimited +guiding +impacts +diabetes +##rama +sasha +pas +clive +extraction +continually +constraints +##bilities +sonata +hunted +sixteenth +chu +planting +quote +mayer +pretended +spat +ceramic +##cci +curtains +pigs +pitching +##dad +latvian +sore +dayton +##sted +patrols +slice +playground +##nted +shone +stool +apparatus +inadequate +mates +treason +##ija +desires +##liga +##croft +somalia +laurent +mir +grape +obliged +chevrolet +thirteenth +stunning +enthusiastic +##ede +accounted +concludes +currents +basil +##kovic +drought +##rica +mai +##aire +shove +posting +##shed +pilgrimage +humorous +packing +fry +pencil +wines +smells +marilyn +aching +newest +clung +bon +neighbours +sanctioned +##pie +mug +##stock +drowning +hydraulic +##vil +hiring +reminder +lilly +investigators +##ncies +sour +##eous +compulsory +packet +##rion +##graphic +##elle +cannes +##inate +depressed +##rit +heroic +importantly +theresa +##tled +conway +saturn +marginal +rae +##xia +corresponds +royce +pact +jasper +explosives +packaging +aluminium +##ttered +denotes +rhythmic +spans +assignments +hereditary +outlined +originating +sundays +lad +reissued +greeting +beatrice +##dic +pillar +marcos +plots +handbook +alcoholic +judiciary +avant +slides +extract +masculine +blur +##eum +homage +trembled +owens +hymn +trey +signaling +socks +accumulated +reacted +attic +theo +lining +angie +distraction +primera +talbot +creativity +billed +##hey +deacon +eduardo +identifies +proposition +dizzy +gunner +hogan +##yam +##pping +##hol +ja +##chan +jensen +reconstructed +##berger +clearance +darius +##nier +abe +harlem +plea +dei +circled +emotionally +notation +fascist +neville +exceeded +upwards +viable +ducks +workforce +racer +limiting +shri +##lson +possesses +kerr +moths +devastating +laden +disturbing +locking +gal +fearing +accreditation +flavor +aide +1870s +mountainous +##baum +melt +##ures +texture +servers +soda +herd +##nium +erect +puzzled +hum +peggy +examinations +gould +testified +geoff +ren +devised +sacks +##law +denial +posters +grunted +cesar +tutor +gerry +offerings +byrne +falcons +combinations +incoming +pardon +rocking +26th +avengers +flared +mankind +seller +uttar +loch +nadia +stroking +exposing +fertile +ancestral +instituted +##has +noises +prophecy +taxation +eminent +vivid +pol +##bol +dart +indirect +multimedia +notebook +upside +displaying +adrenaline +referenced +geometric +##iving +progression +##ddy +blunt +announce +##far +implementing +##lav +aggression +liaison +cooler +cares +headache +plantations +gorge +dots +impulse +thickness +ashamed +averaging +kathy +obligation +precursor +fowler +symmetry +thee +hears +##rai +undergoing +butcher +bowler +##lip +cigarettes +subscription +goodness +##ically +browne +##hos +kyoto +donor +##erty +damaging +friction +drifting +expeditions +hardened +prostitution +fauna +blankets +claw +tossing +snarled +butterflies +recruits +investigative +coated +healed +communal +hai +xiii +academics +boone +psychologist +restless +lahore +stephens +brendan +foreigners +printer +ached +explode +27th +deed +scratched +dared +##pole +cardiac +1780 +okinawa +proto +commando +compelled +oddly +electrons +replica +thanksgiving +##rist +sheila +deliberate +stafford +tidal +representations +hercules +ou +##path +##iated +kidnapping +lenses +##tling +deficit +samoa +mouths +consuming +computational +maze +granting +smirk +razor +fixture +ideals +inviting +aiden +nominal +issuing +julio +pitt +ramsey +docks +##oss +exhaust +##owed +bavarian +draped +anterior +mating +ethiopian +explores +noticing +##nton +discarded +convenience +hoffman +endowment +beasts +cartridge +mormon +paternal +probe +sleeves +interfere +lump +deadline +jenks +bulldogs +scrap +alternating +justified +reproductive +nam +seize +descending +secretariat +kirby +grouped +smash +panther +sedan +tapping +lola +cheer +germanic +unfortunate +##eter +unrelated +##fan +subordinate +##sdale +suzanne +advertisement +##ility +horsepower +##lda +cautiously +discourse +luigi +##mans +##fields +noun +prevalent +mao +schneider +everett +surround +governorate +kira +##avia +westward +##take +misty +rails +sustainability +unused +##rating +packs +toast +unwilling +regulate +thy +suffrage +nile +awe +assam +definitions +travelers +affordable +##rb +conferred +sells +undefeated +beneficial +torso +basal +repeating +remixes +bahrain +cables +fang +##itated +excavated +numbering +statutory +deluxe +##lian +forested +ramirez +derbyshire +zeus +slamming +transfers +astronomer +banana +lottery +berg +histories +bamboo +##uchi +resurrection +posterior +bowls +vaguely +##thi +thou +preserving +tensed +offence +##inas +meyrick +callum +ridden +watt +langdon +tying +lowland +snorted +daring +truman +##hale +##girl +aura +overly +filing +weighing +goa +infections +philanthropist +saunders +eponymous +##owski +latitude +perspectives +reviewing +mets +commandant +radial +##kha +flashlight +reliability +koch +vowels +amazed +ada +elaine +supper +##encies +predator +debated +soviets +cola +##boards +##nah +compartment +crooked +arbitrary +fourteenth +havana +majors +steelers +clips +profitable +ambush +exited +packers +##tile +nude +cracks +fungi +limb +trousers +josie +shelby +tens +frederic +##ος +definite +smoothly +constellation +insult +baton +discs +lingering +##nco +conclusions +lent +staging +becker +grandpa +shaky +##tron +einstein +obstacles +adverse +economically +##moto +mccartney +thor +dismissal +motions +readings +nostrils +treatise +##pace +squeezing +evidently +prolonged +1783 +venezuelan +je +marguerite +beirut +takeover +shareholders +##vent +denise +digit +airplay +norse +##bbling +imaginary +pills +hubert +blaze +vacated +eliminating +vine +mansfield +retrospective +barrow +borne +clutch +bail +forensic +weaving +##nett +##witz +desktop +citadel +promotions +worrying +dorset +subdivided +##iating +manned +expeditionary +pickup +synod +chuckle +barney +##rz +##ffin +functionality +karachi +litigation +meanings +lick +anders +##ffed +execute +curl +oppose +ankles +typhoon +##ache +linguistics +compassion +pressures +grazing +perfection +##iting +immunity +monopoly +muddy +backgrounds +namibia +francesca +monitors +attracting +stunt +tuition +##ии +vegetable +##mates +##quent +mgm +jen +complexes +forts +cellar +bites +seventeenth +royals +flemish +failures +mast +charities +##cular +peruvian +capitals +macmillan +ipswich +outward +frigate +postgraduate +folds +employing +##ouse +concurrently +fiery +##tai +contingent +nightmares +monumental +nicaragua +##kowski +lizard +mal +fielding +gig +reject +harding +##ipe +coastline +##cin +beethoven +humphrey +innovations +##tam +norris +doris +solicitor +obey +niagara +shelves +bourbon +nightclub +specifications +hilton +##ndo +centennial +dispersed +worm +neglected +briggs +kuala +uneasy +##nstein +##bound +##aking +##burgh +awaiting +pronunciation +##bbed +##quest +eh +optimal +zhu +raped +greens +presided +brenda +worries +venetian +marxist +turnout +##lius +refined +braced +sins +grasped +sunderland +nickel +speculated +lowell +cyrillic +communism +fundraising +resembling +colonists +mutant +freddie +usc +##mos +gratitude +##run +mural +##lous +chemist +reminds +28th +steals +tess +pietro +##ingen +promoter +ri +microphone +honoured +rai +sant +##qui +feather +##nson +burlington +kurdish +terrorists +deborah +sickness +##wed +hazard +irritated +desperation +veil +clarity +##rik +jewels +xv +##gged +##ows +##cup +berkshire +unfair +mysteries +orchid +winced +exhaustion +renovations +stranded +obe +infinity +##nies +adapt +redevelopment +thanked +registry +olga +domingo +noir +tudor +ole +commenting +behaviors +##ais +crisp +pauline +probable +stirling +wigan +paralympics +panting +surpassed +##rew +luca +barred +famed +##sters +cassandra +waiter +carolyn +exported +##orted +andres +destructive +deeds +jonah +castles +vacancy +##glass +1788 +orchard +yep +famine +belarusian +sprang +##forth +skinny +##mis +administrators +rotterdam +zambia +zhao +boiler +discoveries +##ride +##physics +lucius +disappointing +outreach +spoon +##frame +qualifications +unanimously +enjoys +regency +##iidae +stade +realism +veterinary +rodgers +dump +alain +chestnut +castile +censorship +rumble +gibbs +communion +reggae +inactivated +logs +loads +##houses +homosexual +##iano +ale +informs +##cas +phrases +plaster +linebacker +ambrose +kaiser +fascinated +limerick +recruitment +forge +mastered +##nding +leinster +rooted +threaten +##strom +borneo +##hes +suggestions +scholarships +propeller +documentaries +patronage +coats +constructing +invest +neurons +comet +entirety +shouts +identities +annoying +unchanged +wary +##antly +##ogy +neat +oversight +##kos +phillies +replay +constance +##kka +incarnation +humble +skies +minus +##acy +smithsonian +guerrilla +jar +cadets +##plate +surplus +audit +##aru +cracking +joanna +louisa +pacing +##lights +intentionally +##iri +diner +nwa +imprint +australians +tong +unprecedented +bunker +naive +specialists +ark +nichols +railing +leaked +pedal +##uka +shrub +longing +roofs +captains +neural +tuned +##ntal +##jet +emission +medina +frantic +codex +definitive +sid +abolition +intensified +stocks +enrique +sustain +genoa +oxide +##written +clues +cha +##gers +tributaries +fragment +venom +##ente +##sca +muffled +vain +sire +laos +##ingly +##hana +hastily +snapping +surfaced +sentiment +motive +##oft +contests +approximate +mesa +luckily +dinosaur +exchanges +propelled +accord +bourne +relieve +tow +masks +offended +##ues +cynthia +##mmer +rains +bartender +zinc +reviewers +lois +##sai +legged +arrogant +rafe +comprise +handicap +blockade +inlet +lagoon +copied +drilling +shelley +petals +##inian +mandarin +obsolete +##inated +onward +arguably +productivity +praising +seldom +busch +discusses +raleigh +shortage +ranged +stanton +encouragement +firstly +conceded +overs +temporal +##uke +cbe +##bos +woo +certainty +pumps +##pton +stalked +##uli +lizzie +periodic +thieves +weaker +gases +shoving +chooses +wc +##chemical +prompting +weights +##kill +robust +flanked +sticky +tuberculosis +##eb +##eal +christchurch +resembled +wallet +reese +inappropriate +pictured +distract +fixing +fiddle +giggled +burger +heirs +hairy +mechanic +torque +obsessed +chiefly +cheng +logging +extracted +meaningful +numb +##vsky +gloucestershire +reminding +unite +##lit +breeds +diminished +clown +glove +1860s +archibald +focal +freelance +sliced +depiction +##yk +organism +switches +sights +stray +crawling +##ril +lever +leningrad +interpretations +loops +anytime +reel +alicia +delighted +##ech +inhaled +xiv +suitcase +bernie +vega +licenses +northampton +exclusion +induction +monasteries +racecourse +homosexuality +##sfield +##rky +dimitri +michele +alternatives +ions +commentators +genuinely +objected +pork +hospitality +fencing +stephan +warships +peripheral +wit +drunken +wrinkled +quentin +spends +departing +chung +numerical +spokesperson +johannesburg +caliber +killers +##udge +assumes +neatly +demographic +abigail +bloc +mounting +##lain +bentley +slightest +xu +recipients +##jk +merlin +##writer +seniors +prisons +blinking +hindwings +flickered +kappa +##hel +80s +strengthening +appealing +brewing +gypsy +mali +lashes +hulk +unpleasant +harassment +bio +treaties +predict +instrumentation +pulp +troupe +boiling +mantle +##ffe +##vn +dividing +handles +verbs +##onal +coconut +senegal +thorough +gum +momentarily +##sto +cocaine +panicked +destined +##turing +teatro +denying +weary +captained +mans +##hawks +wakefield +bollywood +thankfully +cyril +amendments +##bahn +consultation +stud +reflections +kindness +1787 +internally +##ovo +tex +mosaic +distribute +paddy +seeming +##hic +piers +##mura +popularly +winger +kang +sentinel +mccoy +##anza +covenant +##bag +verge +fireworks +suppress +thrilled +dominate +##jar +swansea +reconciliation +stiffened +cue +dorian +##uf +damascus +amor +ida +foremost +##aga +porsche +unseen +dir +##had +##azi +stony +lexi +melodies +##nko +angular +integer +podcast +ants +inherent +jaws +justify +persona +##olved +josephine +##nr +##ressed +customary +flashes +gala +cyrus +glaring +backyard +ariel +physiology +greenland +stir +avon +atletico +finch +methodology +ked +mas +catholicism +townsend +branding +quincy +fits +containers +1777 +ashore +aragon +forearm +poisoning +adopting +conquer +grinding +amnesty +keller +finances +evaluate +forged +lankan +instincts +##uto +guam +bosnian +photographed +workplace +desirable +protector +allocation +intently +encourages +willy +##sten +bodyguard +electro +brighter +bihar +##chev +lasts +opener +amphibious +sal +verde +arte +##cope +captivity +vocabulary +yields +##tted +agreeing +desmond +pioneered +##chus +strap +campaigned +railroads +##ович +emblem +##dre +stormed +##ulous +marijuana +northumberland +##nath +bowen +landmarks +beaumont +##qua +danube +##bler +attorneys +th +flyers +critique +villains +cass +mutation +acc +##0s +colombo +mckay +motif +sampling +concluding +syndicate +##rell +neon +stables +warnings +clint +mourning +wilkinson +##tated +merrill +leopard +evenings +exhaled +emil +sonia +ezra +discrete +stove +farrell +fifteenth +prescribed +superhero +##rier +worms +helm +wren +##duction +expo +##rator +hq +unfamiliar +antony +prevents +acceleration +fiercely +mari +painfully +calculations +cheaper +ign +clifton +irvine +davenport +mozambique +pierced +##evich +wonders +##wig +##cate +##iling +crusade +ware +enzymes +reasonably +mls +##coe +mater +ambition +bunny +eliot +kernel +##fin +asphalt +headmaster +torah +aden +lush +pins +waived +##yas +joao +substrate +enforce +##grad +##ules +alvarez +selections +epidemic +tempted +bremen +translates +ensured +waterfront +29th +forrest +manny +malone +kramer +reigning +simpler +absorption +engraved +##ffy +evaluated +1778 +haze +comforting +crossover +##abe +thorn +##rift +##imo +suppression +fatigue +cutter +wurttemberg +##orf +enforced +hovering +proprietary +samurai +syllable +ascent +lacey +tick +lars +tractor +merchandise +rep +bouncing +defendants +##yre +huntington +##oko +standardized +##hor +##hima +assassinated +predecessors +rainy +liar +assurance +lyrical +##uga +secondly +flattened +parameter +undercover +##mity +bordeaux +punish +ridges +markers +exodus +inactive +hesitate +debbie +nyc +pledge +savoy +nagar +offset +organist +##tium +hesse +marin +converting +##iver +diagram +propulsion +validity +reverted +supportive +ministries +clans +responds +proclamation +##inae +ein +pleading +patriot +birch +islanders +strauss +hates +##dh +brandenburg +concession +1900s +killings +textbook +antiquity +cinematography +wharf +embarrassing +setup +creed +farmland +inequality +centred +signatures +fallon +##ingham +##uts +ceylon +gazing +directive +laurie +##tern +globally +##uated +##dent +allah +excavation +threads +##cross +frantically +icc +utilize +determines +respiratory +thoughtful +receptions +##dicate +merging +chandra +seine +builders +builds +diagnostic +dev +visibility +goddamn +analyses +dhaka +proves +chancel +concurrent +curiously +canadians +pumped +restoring +1850s +turtles +jaguar +sinister +spinal +declan +vows +1784 +glowed +capitalism +swirling +universidad +##lder +##oat +soloist +##genic +##oor +coincidence +beginnings +nissan +dip +resorts +caucasus +combustion +infectious +##eno +pigeon +serpent +##itating +conclude +masked +salad +jew +##gr +surreal +toni +##wc +harmonica +##gins +##etic +##coat +fishermen +intending +bravery +##wave +klaus +titan +wembley +taiwanese +ransom +40th +incorrect +hussein +eyelids +cooke +dramas +utilities +##etta +##print +eisenhower +principally +granada +lana +##rak +openings +concord +##bl +bethany +connie +morality +sega +##mons +##nard +earnings +##kara +##cine +communes +##rel +coma +composing +softened +severed +grapes +nguyen +analyzed +warlord +hubbard +heavenly +behave +slovenian +##hit +##ony +hailed +filmmakers +trance +caldwell +skye +unrest +coward +likelihood +##aging +bern +taliban +honolulu +propose +browser +imagining +cobra +contributes +dukes +instinctively +conan +violinist +##ores +accessories +gradual +##amp +quotes +sioux +##dating +undertake +intercepted +sparkling +compressed +fungus +tombs +haley +imposing +rests +degradation +lincolnshire +retailers +wetlands +tulsa +distributor +dungeon +nun +greenhouse +convey +atlantis +aft +exits +oman +dresser +lyons +##sti +joking +eddy +judgement +omitted +digits +##game +juniors +##rae +cents +stricken +une +##ngo +wizards +weir +breton +nan +technician +fibers +liking +royalty +persia +terribly +magician +##rable +##unt +vance +cafeteria +booker +camille +warmer +##static +consume +cavern +gaps +compass +contemporaries +foyer +soothing +graveyard +maj +plunged +blush +##wear +cascade +demonstrates +ordinance +##nov +boyle +##lana +rockefeller +shaken +banjo +izzy +##ense +breathless +vines +##eman +alterations +chromosome +dwellings +feudal +mole +catalonia +relics +tenant +mandated +##fm +fridge +hats +honesty +patented +raul +heap +cruisers +accusing +enlightenment +infants +wherein +chatham +contractors +affinity +hc +osborne +piston +traps +maturity +##rana +lagos +##zal +peering +##nay +attendant +dealers +protocols +subset +prospects +biographical +##cre +artery +##zers +insignia +nuns +endured +##eration +recommend +schwartz +serbs +berger +cromwell +crossroads +enduring +clasped +grounded +##bine +marseille +twitched +abel +choke +catalyst +moldova +italians +##tist +disastrous +wee +##oured +##nti +wwf +nope +##piration +##asa +expresses +thumbs +##nza +coca +1781 +cheating +##ption +skipped +sensory +heidelberg +spies +satan +dangers +semifinal +bohemia +whitish +confusing +shipbuilding +relies +surgeons +landings +ravi +baku +moor +suffix +alejandro +##yana +litre +upheld +##unk +rajasthan +##rek +coaster +insists +posture +scenarios +etienne +favoured +appoint +transgender +elephants +poked +greenwood +defences +fulfilled +militant +somali +1758 +chalk +potent +##ucci +migrants +wink +assistants +nos +restriction +activism +niger +##ario +colon +shaun +##sat +daphne +##erated +swam +congregations +reprise +considerations +magnet +playable +xvi +overthrow +tobias +knob +chavez +coding +##mers +propped +katrina +orient +newcomer +##suke +temperate +##pool +farmhouse +interrogation +committing +##vert +forthcoming +strawberry +joaquin +macau +ponds +shocking +siberia +##cellular +chant +contributors +##nant +##ologists +sped +absorb +hail +1782 +spared +##hore +barbados +karate +opus +originates +saul +##xie +evergreen +leaped +##rock +correlation +exaggerated +weekday +unification +bump +tracing +brig +afb +pathways +utilizing +disturbance +kneeling +##stad +##guchi +100th +pune +##thy +decreasing +manipulation +miriam +academia +ecosystem +occupational +rbi +##lem +rift +rotary +stacked +incorporation +awakening +generators +guerrero +racist +##omy +cyber +derivatives +culminated +allie +annals +panzer +sainte +pops +zu +austro +##vate +algerian +politely +nicholson +mornings +educate +tastes +thrill +dartmouth +##gating +##jee +regan +differing +concentrating +choreography +divinity +pledged +alexandre +routing +gregor +madeline +##idal +apocalypse +##hora +gunfire +culminating +elves +fined +liang +lam +programmed +tar +guessing +transparency +gabrielle +##gna +cancellation +flexibility +##lining +accession +shea +stronghold +nets +specializes +##rgan +abused +hasan +sgt +exceeding +admiration +supermarket +photographers +specialised +tilt +resonance +hmm +perfume +sami +threatens +garland +botany +guarding +boiled +greet +puppy +russo +supplier +wilmington +vibrant +vijay +##bius +paralympic +grumbled +paige +faa +licking +margins +hurricanes +##gong +fest +grenade +ripping +##uz +counseling +weigh +##sian +needles +wiltshire +edison +costly +##not +fulton +tramway +redesigned +staffordshire +gasping +watkins +sleepy +candidacy +monkeys +timeline +throbbing +##bid +##sos +berth +uzbekistan +vanderbilt +bothering +overturned +ballots +gem +##iger +sunglasses +subscribers +hooker +compelling +ang +exceptionally +saloon +stab +##rdi +carla +terrifying +##vision +coil +##oids +satisfying +vendors +31st +mackay +deities +overlooked +ambient +bahamas +felipe +olympia +whirled +botanist +advertised +tugging +disciples +morales +unionist +rites +foley +morse +motives +creepy +##₀ +soo +##sz +bargain +highness +frightening +turnpike +tory +reorganization +depict +biographer +unopposed +manifesto +##gles +institut +emile +accidental +kapoor +##dam +kilkenny +cortex +lively +romanesque +jain +shan +cannons +##ske +petrol +echoing +amalgamated +disappears +cautious +proposes +sanctions +trenton +flotilla +aus +contempt +tor +canary +cote +theirs +##hun +conceptual +deleted +fascinating +paso +blazing +elf +honourable +hutchinson +##eiro +##outh +##zin +surveyor +amidst +wooded +reissue +intro +##ono +cobb +shelters +newsletter +hanson +brace +encoding +confiscated +dem +caravan +marino +scroll +melodic +cows +imam +##adi +##aneous +northward +searches +biodiversity +cora +roaring +##bers +connell +theologian +halo +compose +pathetic +unmarried +dynamo +az +calculation +toulouse +deserves +humour +nr +forgiveness +tam +undergone +martyr +pamela +myths +whore +counselor +hicks +heavens +battleship +electromagnetic +stellar +establishments +presley +hopped +##chin +temptation +90s +wills +##yuan +nhs +##nya +seminars +##yev +adaptations +gong +asher +lex +indicator +sikh +tobago +cites +goin +##yte +satirical +##gies +characterised +correspond +bubbles +lure +participates +##vid +eruption +skate +therapeutic +1785 +canals +wholesale +defaulted +sac +petit +##zzled +virgil +leak +ravens +portraying +##yx +ghetto +creators +dams +portray +vicente +##rington +fae +namesake +bounty +##arium +joachim +##ota +##iser +aforementioned +axle +snout +depended +dismantled +reuben +##ibly +gallagher +##lau +earnest +##ieu +##iary +inflicted +objections +##llar +asa +gritted +##athy +jericho +##sea +##was +flick +underside +ceramics +undead +substituted +eastward +undoubtedly +wheeled +chimney +##iche +guinness +siding +traitor +baptiste +disguised +inauguration +tipperary +choreographer +perched +warmed +stationary +##ntes +bacterial +##aurus +flores +phosphate +attacker +invaders +alvin +intersects +indirectly +immigrated +businessmen +cornelius +valves +narrated +pill +sober +nationale +monastic +applicants +scenery +##jack +motifs +constitutes +##osh +jurisdictions +tuning +irritation +woven +##uddin +fertility +gao +##erie +antagonist +impatient +glacial +hides +boarded +denominations +interception +##jas +nicola +algebraic +marquess +bahn +parole +buyers +bait +turbines +paperwork +bestowed +natasha +renee +oceans +purchases +vaccine +##tock +fixtures +playhouse +integrate +jai +oswald +intellectuals +booked +nests +mortimer +##isi +obsession +sept +##gler +##sum +scrutiny +simultaneous +squinted +##shin +collects +oven +shankar +penned +remarkably +slips +luggage +spectral +1786 +collaborations +louie +consolidation +##ailed +##ivating +hoover +blackpool +harness +ignition +vest +tails +belmont +mongol +skinner +##nae +visually +mage +derry +##tism +##unce +stevie +transitional +##rdy +redskins +drying +prep +prospective +annoyance +oversee +##loaded +fills +##books +announces +scowled +respects +prasad +mystic +tucson +##vale +revue +springer +bankrupt +1772 +aristotle +habsburg +##geny +dal +natal +nut +pod +chewing +darts +moroccan +walkover +rosario +lenin +punjabi +##ße +grossed +scattering +wired +invasive +hui +polynomial +corridors +wakes +gina +portrays +##cratic +arid +retreating +erich +irwin +sniper +##dha +linen +lindsey +maneuver +butch +shutting +socio +bounce +commemorative +postseason +jeremiah +pines +mystical +beads +abbas +furnace +bidding +consulted +assaulted +empirical +rubble +enclosure +sob +weakly +cancel +polly +yielded +##emann +curly +prediction +battered +70s +vhs +jacqueline +render +sails +barked +detailing +grayson +riga +sloane +raging +##yah +herbs +bravo +##athlon +alloy +giggle +imminent +suffers +assumptions +waltz +##itate +accomplishments +##ited +bathing +remixed +deception +##emia +deepest +##eis +balkan +frogs +##rong +slab +##pate +philosophers +peterborough +grains +imports +dickinson +rwanda +##atics +1774 +dirk +tablets +##rove +clone +##rice +caretaker +hostilities +mclean +##gre +regimental +treasures +norms +impose +tsar +tango +diplomacy +variously +complain +recognise +arrests +1779 +celestial +pulitzer +##dus +libretto +##moor +adele +splash +expectation +lds +confronts +##izer +spontaneous +harmful +wedge +entrepreneurs +buyer +bilingual +translate +rugged +conner +circulated +uae +eaton +##gra +##zzle +lingered +lockheed +vishnu +reelection +alonso +##oom +joints +yankee +headline +cooperate +heinz +laureate +invading +##sford +echoes +scandinavian +##dham +hugging +vitamin +salute +micah +hind +trader +##sper +radioactive +##ndra +militants +poisoned +ratified +remark +campeonato +deprived +wander +prop +##dong +##tani +##eye +chiang +darcy +##oping +mandolin +spice +statesman +babylon +walled +forgetting +afro +##cap +giorgio +buffer +##polis +planetary +##gis +overlap +terminals +kinda +centenary +##bir +arising +manipulate +elm +ke +1770 +##tad +chrysler +mapped +moose +pomeranian +quad +macarthur +assemblies +shoreline +recalls +stratford +##rted +noticeable +##evic +imp +##rita +##sque +accustomed +supplying +tents +disgusted +sipped +filters +khz +reno +selecting +luftwaffe +mcmahon +tyne +masterpiece +carriages +collided +dunes +exercised +flare +remembers +muzzle +heck +##rson +burgess +lunged +middleton +boycott +bilateral +##sity +hazardous +lumpur +multiplayer +spotlight +jackets +goldman +liege +porcelain +rag +waterford +attracts +hopeful +battling +ottomans +kensington +baked +hymns +cheyenne +lattice +levine +borrow +polymer +clashes +michaels +monitored +commitments +denounced +##von +cavity +##oney +hobby +akin +##holders +futures +intricate +cornish +patty +##oned +illegally +dolphin +##lag +barlow +yellowish +maddie +apologized +luton +plagued +##puram +##rds +sway +fanny +łodz +##rino +psi +suspicions +hanged +##eding +initiate +charlton +##por +nak +competent +analytical +annex +wardrobe +reservations +sect +fairfax +hedge +piled +buckingham +uneven +bauer +simplicity +snyder +interpret +accountability +donors +moderately +byrd +continents +##cite +disciple +jamaican +nominees +##uss +mongolian +diver +attackers +eagerly +ideological +pillows +miracles +apartheid +revolver +sulfur +clinics +moran +##enko +ile +katy +rhetoric +##icated +chronology +recycling +##hrer +elongated +mughal +pascal +profiles +vibration +databases +domination +##fare +matthias +digest +rehearsal +polling +weiss +initiation +reeves +clinging +flourished +impress +##hoff +buckley +symposium +rhythms +weed +emphasize +transforming +##taking +##yman +accountant +analyze +flicker +foil +priesthood +voluntarily +decreases +##hya +slater +sv +charting +mcgill +##lde +moreno +besieged +zur +robes +##phic +admitting +deported +turmoil +peyton +earthquakes +##ares +nationalists +beau +clair +brethren +interrupt +welch +curated +galerie +requesting +##ested +impending +steward +viper +##vina +complaining +beautifully +brandy +foam +nl +1660 +alessandro +punches +laced +explanations +##lim +attribute +clit +reggie +discomfort +##cards +smoothed +whales +##cene +adler +countered +duffy +disciplinary +widening +recipe +reliance +conducts +goats +gradient +preaching +##shaw +matilda +quasi +striped +meridian +cannabis +cordoba +certificates +##agh +##tering +graffiti +hangs +pilgrims +repeats +##ych +revive +urine +etat +##hawk +fueled +belts +fuzzy +susceptible +mauritius +salle +sincere +beers +hooks +##cki +arbitration +entrusted +advise +sniffed +seminar +junk +donnell +processors +principality +strapped +celia +mendoza +everton +fortunes +prejudice +starving +reassigned +steamer +##lund +tuck +evenly +foreman +##ffen +dans +envisioned +slit +baseman +liberia +rosemary +##weed +electrified +periodically +potassium +stride +contexts +sperm +slade +mariners +influx +bianca +subcommittee +##rane +spilling +icao +estuary +##nock +delivers +##ulata +isa +mira +bohemian +dessert +##sbury +welcoming +proudly +slowing +##chs +musee +ascension +russ +##vian +waits +##psy +africans +exploit +##morphic +eccentric +crab +peck +entrances +formidable +marketplace +groom +bolted +metabolism +patton +robbins +courier +payload +endure +##ifier +andes +refrigerator +ornate +##uca +ruthless +illegitimate +masonry +strasbourg +bikes +apples +quintet +willingly +niche +bakery +corpses +energetic +##cliffe +##sser +##ards +centimeters +centro +fuscous +cretaceous +rancho +##yde +andrei +telecom +tottenham +oasis +ordination +vulnerability +presiding +corey +penguins +sims +##pis +malawi +piss +correction +##cked +##ffle +##ryn +countdown +detectives +psychiatrist +psychedelic +dinosaurs +blouse +choi +vowed +randomly +##pol +49ers +scrub +blanche +bruins +dusseldorf +##using +unwanted +##ums +dominique +elevations +headlights +om +laguna +##oga +1750 +famously +ignorance +shrewsbury +breuning +che +confederacy +greco +overhaul +##screen +paz +skirts +disagreement +cruelty +jagged +phoebe +shifter +hovered +viruses +##wes +##lined +landlord +squirrel +dashed +ornamental +gag +wally +grange +literal +spurs +undisclosed +proceeding +billie +orphan +spanned +humidity +indy +weighted +presentations +explosions +lucian +##tary +vaughn +hindus +##anga +##hell +psycho +daytona +protects +efficiently +rematch +sly +tandem +##oya +rebranded +impaired +hee +metropolis +peach +godfrey +diaspora +ethnicity +prosperous +gleaming +dar +grossing +playback +##rden +stripe +pistols +##tain +births +labelled +##cating +rudy +alba +##onne +aquarium +hostility +##tase +shudder +sumatra +hardest +lakers +consonant +creeping +demos +homicide +capsule +zeke +liberties +expulsion +pueblo +##comb +trait +transporting +##ddin +##neck +##yna +depart +gregg +mold +ledge +hangar +oldham +playboy +termination +analysts +gmbh +romero +##itic +insist +cradle +filthy +brightness +slash +shootout +deposed +bordering +##truct +microwave +tumbled +sheltered +cathy +werewolves +messy +andersen +convex +clapped +clinched +satire +wasting +edo +rufus +##jak +mont +##etti +poznan +##keeping +restructuring +transverse +##rland +azerbaijani +slovene +gestures +roommate +choking +shear +##quist +vanguard +oblivious +##hiro +disagreed +baptism +##lich +coliseum +##aceae +salvage +societe +cory +locke +relocation +relying +versailles +ahl +swelling +##elo +cheerful +##edes +gin +sarajevo +obstacle +diverted +##nac +messed +thoroughbred +fluttered +utrecht +chewed +acquaintance +assassins +dispatch +mirza +##wart +salzburg +swell +yen +##gee +idle +ligue +samson +##nds +##igh +playful +spawned +##cise +tease +##case +burgundy +stirring +skeptical +interceptions +marathi +##dies +bedrooms +aroused +pinch +##lik +preferences +tattoos +buster +digitally +projecting +rust +##ital +kitten +priorities +addison +pseudo +##guard +dusk +icons +sermon +##psis +##iba +##lift +ju +truce +rink +##dah +##wy +defects +psychiatry +offences +calculate +glucose +##iful +##rized +##unda +francaise +##hari +richest +warwickshire +carly +1763 +purity +redemption +lending +##cious +muse +bruises +cerebral +aero +carving +preface +terminology +invade +monty +anarchist +blurred +##iled +rossi +treats +guts +shu +foothills +ballads +undertaking +premise +cecilia +affiliates +blasted +conditional +wilder +minors +drone +rudolph +buffy +swallowing +horton +attested +rutherford +howell +primetime +livery +penal +##bis +minimize +hydro +wrecked +wrought +palazzo +##gling +cans +vernacular +friedman +nobleman +shale +walnut +danielle +##ection +##tley +sears +##kumar +chords +lend +flipping +streamed +por +dracula +gallons +sacrifices +gamble +orphanage +##iman +mckenzie +##gible +boxers +daly +##balls +##ان +##ific +##rative +##iq +exploited +slated +##uity +circling +hillary +pinched +goldberg +provost +campaigning +piles +ironically +jong +mohan +successors +usaf +##tem +##ught +autobiographical +haute +preserves +##ending +acquitted +comparisons +hydroelectric +gangs +cypriot +torpedoes +rushes +derive +bumps +instability +fiat +pets +##mbe +silas +dye +reckless +settler +##itation +heats +##writing +canonical +maltese +fins +mushroom +stacy +aspen +avid +##kur +##loading +vickers +gaston +hillside +statutes +wilde +gail +kung +sabine +comfortably +motorcycles +##rgo +pneumonia +fetch +##sonic +axel +faintly +parallels +##oop +mclaren +spouse +compton +interdisciplinary +miner +##eni +clamped +##chal +##llah +separates +versa +##mler +scarborough +labrador +##lity +##osing +rutgers +hurdles +como +burt +divers +wichita +cade +coincided +bruised +mla +vineyard +##ili +##brush +notch +mentioning +jase +hearted +kits +doe +##acle +pomerania +##ady +ronan +seizure +pavel +problematic +##zaki +domenico +##ulin +catering +penelope +dependence +parental +emilio +ministerial +atkinson +##bolic +clarkson +chargers +colby +grill +peeked +arises +summon +##aged +fools +##grapher +faculties +qaeda +##vial +garner +refurbished +##hwa +geelong +disasters +nudged +bs +shareholder +lori +algae +reinstated +rot +##ades +##nous +invites +stainless +inclusive +##itude +diocesan +til +##icz +denomination +##xa +benton +floral +registers +##erman +##kell +absurd +brunei +guangzhou +hitter +retaliation +##uled +##eve +blanc +nh +consistency +contamination +##eres +dire +palermo +broadcasters +diaries +inspire +vols +brewer +tightening +mixtape +hormone +##tok +stokes +##color +##dly +##ssi +##ometer +##lington +sanitation +##tility +intercontinental +##adt +¹⁄₂ +cylinders +economies +favourable +unison +croix +gertrude +odyssey +vanity +dangling +##logists +upgrades +dice +middleweight +practitioner +henrik +parlor +orion +angered +lac +blurted +##rri +sensual +intends +swings +angled +##phs +husky +attain +peerage +precinct +textiles +cheltenham +shuffled +dai +confess +tasting +bhutan +##riation +tyrone +segregation +abrupt +ruiz +##rish +smirked +blackwell +confidential +browning +amounted +vase +scarce +fabulous +raided +staple +guyana +unemployed +glider +shay +##tow +carmine +troll +intervene +squash +superstar +cylindrical +len +roadway +researched +handy +##rium +##jana +lao +declares +##rring +##tadt +##elin +##kova +willem +shrubs +napoleonic +realms +skater +volkswagen +##ł +tad +hara +archaeologist +awkwardly +eerie +##kind +wiley +##heimer +titus +organizers +cfl +crusaders +lama +vent +enraged +thankful +occupants +maximilian +##gaard +possessing +textbooks +##oran +collaborator +quaker +##ulo +avalanche +mono +silky +straits +isaiah +mustang +surged +resolutions +potomac +descend +kilograms +plato +strains +saturdays +##olin +bernstein +##ype +holstein +ponytail +belize +conversely +heroine +perpetual +##ylus +charcoal +piedmont +glee +negotiating +backdrop +prologue +##jah +pasadena +climbs +ramos +sunni +##holm +##tner +##tri +anand +deficiency +hertfordshire +stout +##avi +aperture +orioles +##irs +doncaster +intrigued +bombed +coating +otis +##mat +cocktail +##jit +##eto +amir +arousal +sar +##proof +dixie +pots +whereabouts +##fted +drains +bullying +cottages +scripture +coherent +fore +poe +appetite +##uration +sampled +##ators +derrick +rotor +jays +peacock +installment +##rro +advisors +##coming +rodeo +scotch +##mot +##fen +##vant +ensued +rodrigo +dictatorship +martyrs +twenties +towed +incidence +marta +rainforest +sai +scaled +##cles +oceanic +qualifiers +symphonic +mcbride +dislike +generalized +aubrey +colonization +##iation +##lion +##ssing +disliked +lublin +salesman +##ulates +spherical +whatsoever +sweating +avalon +contention +punt +severity +alderman +atari +##dina +##grant +##rop +scarf +seville +vertices +annexation +fairfield +fascination +inspiring +launches +palatinate +regretted +##rca +feral +##iom +elk +nap +olsen +reddy +yong +##leader +##iae +garment +transports +feng +gracie +outrage +viceroy +insides +##esis +breakup +grady +organizer +softer +grimaced +murals +galicia +arranging +vectors +##rsten +##sb +##cens +sloan +##eka +bitten +ara +fender +nausea +bumped +kris +banquet +comrades +detector +persisted +##llan +adjustment +endowed +cinemas +sellers +##uman +peek +epa +kindly +neglect +simpsons +talon +mausoleum +runaway +hangul +lookout +##cic +coughed +acquainted +chloride +quicker +accordion +neolithic +##qa +artemis +coefficient +lenny +pandora +tx +##xed +ecstasy +litter +segunda +chairperson +gemma +hiss +rumor +vow +nasal +antioch +compensate +patiently +transformers +##eded +judo +morrow +penis +posthumous +bandits +husbands +denote +flaming +##any +##phones +langley +yorker +1760 +walters +##kle +gubernatorial +fatty +leroy +outlaw +##nine +unpublished +poole +jakob +##ᵢ +##ₙ +crete +distorted +superiority +##dhi +intercept +crust +mig +claus +crashes +stallion +frontal +armistice +##estinal +elton +aj +encompassing +camel +commemorated +malaria +woodward +calf +cigar +penetrate +##oso +willard +##rno +##uche +illustrate +amusing +convergence +noteworthy +##lma +##rva +journeys +realise +manfred +##sable +##vocation +hearings +fiance +##posed +educators +provoked +adjusting +##cturing +modular +stockton +paterson +vlad +rejects +electors +selena +maureen +##tres +##rce +swirled +##num +proportions +nanny +pawn +naturalist +parma +apostles +awoke +ethel +wen +##bey +monsoon +overview +##inating +mccain +rendition +risky +adorned +##ih +equestrian +germain +nj +conspicuous +confirming +##yoshi +shivering +##imeter +milestone +rumours +flinched +bounds +smacked +token +##bei +lectured +automobiles +##shore +impacted +##iable +nouns +nero +##leaf +ismail +prostitute +trams +bridget +sud +stimulus +impressions +reins +revolves +##gned +giro +honeymoon +##swell +criterion +##sms +##uil +libyan +prefers +##osition +preview +sucks +accusation +bursts +metaphor +diffusion +tolerate +faye +betting +cinematographer +liturgical +specials +bitterly +humboldt +##ckle +flux +rattled +##itzer +archaeologists +odor +authorised +marshes +discretion +##ов +alarmed +archaic +inverse +##leton +explorers +##pine +drummond +tsunami +woodlands +##minate +##tland +booklet +insanity +owning +insert +crafted +calculus +receivers +stung +##eca +##nched +prevailing +travellers +eyeing +lila +graphs +##borne +julien +##won +morale +adaptive +therapist +erica +cw +libertarian +bowman +pitches +vita +##ional +crook +##entation +caledonia +mutiny +##sible +1840s +automation +flock +##pia +ironic +pathology +##imus +remarried +joker +withstand +energies +##att +shropshire +hostages +madeleine +tentatively +conflicting +mateo +recipes +euros +mercenaries +nico +##ndon +albuquerque +augmented +mythical +bel +freud +##child +cough +##lica +freddy +lillian +genetically +nuremberg +calder +bonn +outdoors +paste +suns +urgency +vin +restraint +tyson +##cera +##selle +barrage +bethlehem +kahn +##par +mounts +nippon +barony +happier +ryu +makeshift +sheldon +blushed +castillo +barking +listener +taped +bethel +fluent +headlines +pornography +rum +disclosure +sighing +mace +doubling +gunther +manly +##plex +interventions +physiological +forwards +emerges +##tooth +##gny +compliment +rib +recession +visibly +barge +faults +connector +exquisite +prefect +##rlin +patio +##cured +elevators +italics +pena +wasp +satin +botswana +graceful +respectable +##jima +##rter +##oic +franciscan +generates +##dl +alfredo +disgusting +##olate +##iously +sherwood +warns +cod +promo +cheryl +sino +##escu +twitch +##zhi +brownish +thom +ortiz +##dron +densely +##beat +carmel +reinforce +##bana +anastasia +downhill +vertex +contaminated +remembrance +harmonic +homework +fiancee +gears +olds +angelica +ramsay +quiz +colliery +sevens +##cape +autism +##hil +walkway +##boats +ruben +abnormal +ounce +khmer +##bbe +zachary +bedside +morphology +punching +##olar +sparrow +convinces +hewitt +queer +remastered +rods +mabel +solemn +notified +lyricist +symmetric +##xide +encore +passports +wildcats +##uni +baja +##pac +mildly +##ease +bleed +commodity +mounds +glossy +orchestras +##omo +damian +prelude +ambitions +##vet +awhile +remotely +##aud +asserts +imply +##iques +distinctly +modelling +remedy +##dded +windshield +dani +xiao +##endra +audible +powerplant +invalid +elemental +acquisitions +##hala +immaculate +libby +plata +smuggling +ventilation +denoted +minh +##morphism +differed +dion +kelley +lore +mocking +sabbath +spikes +hygiene +drown +runoff +stylized +tally +liberated +aux +interpreter +righteous +aba +siren +reaper +pearce +millie +##cier +##yra +gaius +##iso +captures +##ttering +dorm +claudio +##sic +benches +knighted +blackness +##ored +discount +fumble +oxidation +routed +novak +perpendicular +spoiled +fracture +splits +pads +topology +##cats +axes +fortunate +offenders +protestants +esteem +broadband +convened +frankly +hound +prototypes +isil +facilitated +keel +##sher +sahara +awaited +bubba +orb +prosecutors +hem +##xing +relaxing +remnant +romney +sorted +slalom +stefano +ulrich +##active +exemption +folder +pauses +foliage +hitchcock +epithet +criticisms +##aca +ballistic +brody +hinduism +chaotic +youths +equals +##pala +pts +thicker +analogous +capitalist +improvised +overseeing +sinatra +ascended +beverage +straightforward +##kon +curran +bois +induce +surveying +emperors +sax +unpopular +cartoonist +fused +##mble +unto +##yuki +localities +##cko +##ln +darlington +slain +academie +lobbying +sediment +puzzles +##grass +defiance +dickens +manifest +tongues +alumnus +arbor +coincide +appalachian +mustafa +examiner +cabaret +traumatic +yves +bracelet +draining +heroin +magnum +baths +odessa +consonants +mitsubishi +##gua +kellan +vaudeville +joked +straps +probation +##ław +ceded +interfaces +##pas +##zawa +blinding +viet +rothschild +museo +huddersfield +tactic +##storm +brackets +dazed +incorrectly +##vu +reg +glazed +fearful +manifold +benefited +irony +stumbling +##rte +willingness +balkans +mei +wraps +##aba +injected +##lea +gu +syed +harmless +##hammer +bray +takeoff +poppy +timor +cardboard +astronaut +purdue +weeping +southbound +cursing +stalls +diagonal +##neer +lamar +bryce +comte +weekdays +harrington +##uba +negatively +##see +lays +grouping +##cken +##henko +affirmed +halle +modernist +##lai +hodges +smelling +aristocratic +baptized +dismiss +justification +oilers +coupling +qin +snack +healer +##qing +gardener +layla +battled +formulated +stephenson +gravitational +##gill +1768 +granny +coordinating +suites +##ioned +monarchs +##cote +##hips +blended +barrister +deposition +fia +mina +policemen +paranoid +##pressed +churchyard +covert +crumpled +creep +abandoning +tr +transmit +conceal +barr +understands +readiness +spire +##cology +##enia +startling +unlock +vida +bowled +slots +##nat +##islav +spaced +trusting +admire +rig +slack +casualty +classmates +##odes +##rar +##rked +amherst +furnished +evolve +foundry +menace +mead +##lein +flu +wesleyan +##kled +monterey +webber +##vos +wil +##mith +##на +bartholomew +justices +restrained +##cke +amenities +mediated +sewage +trenches +mainz +##thus +1800s +##cula +##inski +caine +bonding +converts +spheres +superseded +marianne +crypt +sweaty +ensign +historia +##br +spruce +##ask +forks +thoughtfully +yukon +pamphlet +ames +##uter +karma +##yya +bryn +negotiation +sighs +incapable +##mbre +##ntial +actresses +taft +##mill +luce +prevailed +##amine +1773 +motionless +envoy +testify +investing +sculpted +instructors +provence +kali +cullen +horseback +##while +goodwin +##jos +gaa +norte +##ldon +modify +wavelength +abd +skinned +sprinter +forecast +scheduling +marries +squared +tentative +##chman +boer +##isch +bolts +swap +fisherman +assyrian +impatiently +guthrie +martins +murdoch +tanya +nicely +dolly +lacy +med +syn +decks +fashionable +millionaire +surfing +heaved +tammy +consulate +attendees +routinely +fuse +saxophonist +backseat +malaya +##lord +scowl +tau +##ishly +sighted +steaming +##rks +##holes +##hong +ching +##wife +bless +conserved +jurassic +stacey +zion +chunk +rigorous +blaine +peabody +slayer +dismay +brewers +nz +##jer +det +##glia +glover +postwar +penetration +sylvester +imitation +vertically +airlift +heiress +knoxville +viva +##uin +macon +##rim +##fighter +##gonal +janice +##orescence +##wari +marius +belongings +leicestershire +blanco +inverted +preseason +sanity +sobbing +##due +##elt +##dled +collingwood +regeneration +flickering +shortest +##mount +##osi +feminism +##lat +sherlock +cabinets +fumbled +northbound +precedent +snaps +##mme +researching +##akes +guillaume +insights +manipulated +vapor +neighbour +gangster +frey +stalking +scarcely +callie +barnett +tendencies +doomed +assessing +slung +panchayat +ambiguous +bartlett +##etto +distributing +violating +wolverhampton +##hetic +swami +histoire +##urus +liable +pounder +groin +hussain +larsen +popping +surprises +##atter +vie +curt +##station +mute +relocate +musicals +authorization +richter +##sef +immortality +tna +bombings +deteriorated +yiddish +##acious +robbed +colchester +ao +verified +balancing +apostle +swayed +recognizable +oxfordshire +retention +nottinghamshire +contender +judd +invitational +shrimp +uhf +##icient +cleaner +longitudinal +tanker +##mur +acronym +broker +koppen +sundance +suppliers +##gil +clipped +fuels +petite +##anne +landslide +helene +diversion +populous +landowners +auspices +melville +quantitative +##xes +ferries +nicky +##llus +doo +haunting +roche +carver +downed +unavailable +##pathy +approximation +hiroshima +##hue +garfield +valle +comparatively +keyboardist +traveler +##eit +congestion +calculating +subsidiaries +##bate +serb +modernization +fairies +deepened +ville +averages +##lore +inflammatory +tonga +##itch +co₂ +squads +##hea +gigantic +serum +enjoyment +retailer +verona +35th +cis +##phobic +magna +technicians +##vati +arithmetic +##sport +levin +##dation +amtrak +chow +sienna +##eyer +backstage +entrepreneurship +##otic +learnt +tao +##udy +worcestershire +formulation +baggage +hesitant +bali +sabotage +##kari +barren +enhancing +murmur +pl +freshly +putnam +syntax +aces +medicines +resentment +bandwidth +##sier +grins +chili +guido +##sei +framing +implying +gareth +lissa +genevieve +pertaining +admissions +geo +thorpe +proliferation +sato +bela +analyzing +parting +##gor +awakened +##isman +huddled +secrecy +##kling +hush +gentry +dungeons +##ego +coasts +##utz +sacrificed +##chule +landowner +mutually +prevalence +programmer +adolescent +disrupted +seaside +gee +trusts +vamp +georgie +##nesian +##iol +schedules +sindh +##market +etched +hm +sparse +bey +beaux +scratching +gliding +unidentified +collaborating +gems +jesuits +oro +accumulation +shaping +mbe +anal +##xin +enthusiasts +newscast +##egan +janata +dewey +parkinson +ankara +biennial +towering +inconsistent +##chet +thriving +terminate +cabins +furiously +eats +advocating +donkey +marley +muster +phyllis +leiden +##user +grassland +glittering +iucn +loneliness +memorandum +armenians +##ddle +popularized +rhodesia +60s +lame +##illon +sans +bikini +header +orbits +##finger +##ulator +sharif +spines +biotechnology +strolled +naughty +yates +##wire +fremantle +milo +##mour +abducted +removes +##atin +humming +##chrome +##ester +hume +pivotal +##rates +armand +grams +believers +elector +rte +apron +bis +scraped +##yria +endorsement +initials +##llation +dotted +hints +buzzing +emigration +nearer +indicators +##ulu +coarse +neutron +protectorate +##uze +directional +exploits +pains +loire +1830s +proponents +guggenheim +rabbits +ritchie +hectare +inputs +hutton +##raz +verify +##ako +boilers +longitude +##lev +skeletal +yer +emilia +citrus +compromised +##gau +prescription +paragraph +eduard +cadillac +attire +categorized +kenyan +weddings +charley +##bourg +entertain +monmouth +##lles +nutrients +davey +mesh +incentive +practised +ecosystems +kemp +subdued +overheard +##rya +bodily +maxim +##nius +apprenticeship +ursula +##fight +lodged +rug +silesian +unconstitutional +patel +inspected +coyote +unbeaten +##hak +34th +disruption +convict +parcel +##nham +collier +implicated +mallory +##iac +susannah +winkler +##rber +shia +phelps +sediments +graphical +robotic +##sner +adulthood +mart +smoked +##isto +kathryn +clarified +##aran +divides +convictions +oppression +pausing +burying +##mt +federico +mathias +eileen +##tana +kite +hunched +##acies +##atz +disadvantage +liza +kinetic +greedy +paradox +yokohama +dowager +trunks +ventured +##gement +gupta +vilnius +olaf +##thest +crimean +hopper +##ej +progressively +arturo +mouthed +arrondissement +##fusion +rubin +simulcast +oceania +##orum +##stra +##rred +busiest +intensely +navigator +cary +##vine +##hini +##bies +fife +rowe +rowland +posing +insurgents +shafts +lawsuits +activate +conor +inward +culturally +garlic +##eering +eclectic +##hui +##kee +##nl +furrowed +vargas +meteorological +rendezvous +##aus +culinary +commencement +##dition +quota +##notes +mommy +salaries +overlapping +mule +##iology +##mology +sums +wentworth +##isk +##zione +mainline +subgroup +##illy +hack +plaintiff +verdi +bulb +differentiation +engagements +multinational +supplemented +bertrand +caller +regis +##naire +##sler +##arts +##imated +blossom +propagation +kilometer +viaduct +vineyards +##uate +beckett +optimization +golfer +songwriters +seminal +semitic +thud +volatile +evolving +ridley +##wley +trivial +distributions +scandinavia +jiang +wrestled +insistence +emphasizes +napkin +##ods +adjunct +rhyme +##ricted +##eti +hopeless +surrounds +tremble +32nd +smoky +##ntly +oils +medicinal +padded +steer +wilkes +concessions +hue +uniquely +blinded +landon +##lane +hendrix +commemorating +dex +specify +chicks +##ggio +intercity +morley +##torm +highlighting +##oting +pang +oblique +stalled +##liner +flirting +newborn +1769 +bishopric +shaved +currie +dharma +spartan +##ooped +favorites +smug +novella +sirens +abusive +creations +espana +##lage +paradigm +semiconductor +sheen +##rdo +##yen +##zak +nrl +renew +##pose +##tur +adjutant +marches +norma +##enity +ineffective +weimar +grunt +##gat +lordship +plotting +expenditure +infringement +lbs +refrain +mimi +mistakenly +postmaster +1771 +##bara +ras +motorsports +tito +subjective +##zza +bully +stew +##kaya +prescott +##raphic +##zam +bids +styling +paranormal +reeve +sneaking +exploding +katz +akbar +migrant +syllables +indefinitely +##ogical +destroys +replaces +applause +##phine +pest +##fide +articulated +bertie +##cars +##ptic +courtroom +crowley +aesthetics +cummings +tehsil +hormones +titanic +dangerously +##ibe +stadion +jaenelle +auguste +ciudad +##chu +mysore +partisans +lucan +philipp +##aly +debating +henley +interiors +##rano +##tious +homecoming +beyonce +usher +henrietta +prepares +weeds +ely +plucked +##pire +##dable +luxurious +##aq +artifact +password +pasture +juno +maddy +minsk +##dder +##ologies +##rone +assessments +martian +royalist +1765 +examines +##mani +nino +parry +scooped +relativity +##eli +##uting +##cao +congregational +noisy +traverse +##agawa +strikeouts +nickelodeon +obituary +transylvania +binds +depictions +polk +trolley +##yed +##lard +breeders +##under +dryly +hokkaido +1762 +strengths +stacks +bonaparte +neared +prostitutes +stamped +anaheim +gutierrez +sinai +##zzling +bram +fresno +madhya +proton +##lena +##llum +##phon +reelected +wanda +##anus +##lb +ample +distinguishing +##yler +grasping +sermons +tomato +bland +stimulation +avenues +##eux +spreads +scarlett +fern +pentagon +assert +baird +chesapeake +calmed +distortion +fatalities +##olis +correctional +pricing +##astic +##gina +prom +dammit +ying +collaborate +##chia +welterweight +33rd +pointer +substitution +bonded +umpire +communicating +multitude +paddle +##obe +federally +intimacy +##insky +betray +ssr +##lett +##lves +##therapy +airbus +##tery +functioned +ud +bearer +biomedical +##hire +##nca +condom +brink +ik +##nical +macy +flap +gma +experimented +jelly +lavender +##icles +##ulia +munro +##mian +##tial +rye +##rle +60th +gigs +hottest +rotated +predictions +fuji +bu +##erence +##omi +barangay +##fulness +##sas +clocks +##rwood +##liness +cereal +roe +wight +decker +uttered +babu +onion +forcibly +##df +petra +sarcasm +hartley +peeled +storytelling +##xley +##ysis +##ffa +fibre +kiel +auditor +fig +harald +greenville +##berries +geographically +nell +quartz +##athic +cemeteries +crossings +nah +holloway +reptiles +chun +sichuan +snowy +corrections +##ivo +zheng +ambassadors +blacksmith +fielded +fluids +hardcover +turnover +medications +melvin +academies +##erton +roach +absorbing +spaniards +colton +##founded +outsider +espionage +kelsey +edible +##ulf +dora +establishes +##sham +##tries +contracting +##tania +cinematic +costello +nesting +##uron +connolly +duff +##nology +mma +##mata +fergus +sexes +optics +spectator +woodstock +banning +##hee +##fle +differentiate +outfielder +refinery +gerhard +horde +lair +drastically +##udi +landfall +##cheng +motorsport +odi +##achi +predominant +quay +skins +##ental +edna +harshly +complementary +murdering +##aves +wreckage +ono +outstretched +lennox +munitions +galen +reconcile +scalp +bicycles +gillespie +questionable +rosenberg +guillermo +jarvis +kabul +opium +yd +##twined +abuses +decca +outpost +##cino +sensible +neutrality +ponce +anchorage +atkins +turrets +inadvertently +disagree +libre +vodka +reassuring +weighs +##yal +glide +jumper +ceilings +repertory +outs +stain +##bial +envy +##ucible +smashing +heightened +policing +hyun +mixes +lai +prima +##ples +celeste +##bina +lucrative +intervened +kc +manually +##rned +stature +staffed +bun +bastards +nairobi +priced +##auer +thatcher +##kia +tripped +comune +##ogan +##pled +brasil +incentives +emanuel +hereford +musica +##kim +benedictine +biennale +##lani +eureka +gardiner +rb +knocks +sha +##ael +##elled +##onate +efficacy +ventura +masonic +sanford +maize +leverage +##feit +capacities +santana +##aur +novelty +vanilla +##cter +##tour +benin +##oir +neptune +drafting +tallinn +##cable +humiliation +##boarding +schleswig +fabian +bernardo +liturgy +spectacle +sweeney +pont +routledge +cosmos +ut +hilt +sleek +universally +##eville +##gawa +typed +##dry +favors +allegheny +glaciers +##rly +recalling +aziz +parasite +requiem +auf +##berto +##llin +illumination +##breaker +##issa +festivities +bows +govern +vibe +vp +sprawled +larson +pilgrim +bwf +leaping +##rts +##ssel +alexei +greyhound +hoarse +##dler +##oration +seneca +##cule +gaping +##ulously +##pura +cinnamon +##gens +##rricular +craven +fantasies +houghton +engined +reigned +dictator +supervising +##oris +bogota +commentaries +unnatural +fingernails +spirituality +tighten +canadiens +protesting +intentional +cheers +sparta +##ytic +##iere +##zine +widen +belgarath +controllers +dodd +iaaf +navarre +##ication +defect +squire +steiner +whisky +##mins +inevitably +tome +##gold +chew +##lid +elastic +##aby +streaked +alliances +jailed +regal +##ined +##phy +czechoslovak +narration +absently +##uld +bluegrass +guangdong +quran +criticizing +hose +hari +##liest +##owa +skier +streaks +deploy +##lom +raft +bose +dialed +huff +##eira +haifa +simplest +bursting +endings +sultanate +##titled +franks +whitman +ensures +sven +##ggs +collaborators +forster +organising +banished +napier +injustice +teller +layered +thump +##otti +roc +battleships +evidenced +fugitive +sadie +robotics +##roud +equatorial +geologist +##iza +yielding +##bron +##sr +internationale +mecca +##diment +skyline +toad +uploaded +reflective +undrafted +lal +leafs +bayern +##dai +lakshmi +shortlisted +##stick +##wicz +camouflage +donate +christi +lau +##acio +disclosed +nemesis +1761 +assemble +straining +northamptonshire +tal +##asi +bernardino +premature +heidi +42nd +coefficients +galactic +reproduce +buzzed +sensations +zionist +monsieur +myrtle +archery +strangled +musically +viewpoint +antiquities +bei +trailers +seahawks +cured +pee +preferring +tasmanian +lange +sul +##working +colder +overland +lucivar +massey +gatherings +haitian +##smith +disapproval +flaws +##cco +##enbach +1766 +npr +##icular +boroughs +creole +forums +techno +1755 +dent +abdominal +streetcar +##eson +##stream +procurement +gemini +predictable +##tya +acheron +christoph +feeder +fronts +vendor +bernhard +jammu +tumors +slang +##uber +goaltender +twists +curving +manson +vuelta +mer +peanut +confessions +pouch +unpredictable +allowance +theodor +vascular +##factory +bala +authenticity +metabolic +coughing +nanjing +##cea +pembroke +##bard +splendid +36th +hourly +##ahu +elmer +handel +##ivate +awarding +thrusting +experimentation +##hesion +caressed +entertained +steak +##rangle +biologist +orphans +baroness +oyster +stepfather +##dridge +mirage +reefs +speeding +barons +1764 +inhabit +preached +repealed +##tral +honoring +boogie +captives +administer +johanna +##imate +gel +suspiciously +1767 +sobs +##dington +backbone +hayward +garry +##folding +##nesia +maxi +##oof +##ppe +ellison +galileo +##stand +crimea +frenzy +amour +bumper +matrices +natalia +baking +garth +palestinians +##grove +smack +conveyed +ensembles +gardening +##manship +##rup +##stituting +1640 +harvesting +topography +shifters +dormitory +##carriage +##lston +ist +skulls +##stadt +dolores +jewellery +sarawak +##wai +##zier +fences +christy +confinement +tumbling +credibility +fir +stench +##bria +##plication +##nged +##sam +virtues +##belt +marjorie +pba +##eem +##made +celebrates +schooner +agitated +barley +fulfilling +anthropologist +restrict +novi +regulating +##nent +padres +##rani +##hesive +loyola +tabitha +milky +olson +proprietor +crambidae +guarantees +intercollegiate +ljubljana +hilda +##sko +ignorant +hooded +sardinia +##lidae +##vation +frontman +privileged +witchcraft +jammed +laude +poking +##than +bracket +amazement +yunnan +##erus +maharaja +linnaeus +commissioning +milano +peacefully +##logies +akira +rani +regulator +grasses +##rance +luzon +crows +compiler +gretchen +seaman +edouard +buccaneers +ellington +hamlets +whig +socialists +##anto +directorial +easton +mythological +##kr +##vary +rhineland +semantic +taut +dune +inventions +succeeds +##iter +replication +branched +##pired +prosecuted +kangaroo +penetrated +##avian +middlesbrough +doses +bleak +madam +predatory +relentless +##vili +reluctance +##vir +hailey +crore +silvery +1759 +monstrous +swimmers +transmissions +hawthorn +informing +##eral +toilets +caracas +crouch +##sett +cartel +hadley +##aling +alexia +yvonne +##biology +cinderella +eton +superb +blizzard +stabbing +industrialist +maximus +##orus +groves +maud +clade +oversized +comedic +##bella +rosen +nomadic +fulham +montane +beverages +galaxies +redundant +swarm +##rot +##folia +##llis +buckinghamshire +fen +bearings +bahadur +##rom +gilles +phased +dynamite +faber +benoit +##ount +fractured +tailored +anya +spices +westwood +cairns +auditions +inflammation +steamed +##rocity +##acion +##urne +skyla +thereof +watford +torment +archdeacon +transforms +demeanor +fucked +serge +##sor +mckenna +minas +entertainer +##icide +caress +originate +residue +##sty +1740 +##ilised +##org +beech +##wana +subsidies +##ghton +emptied +gladstone +firefighters +voodoo +het +nightingale +tamara +edmond +ingredient +weaknesses +silhouette +compatibility +withdrawing +hampson +##mona +anguish +giggling +bookstore +southernmost +tilting +##vance +bai +economical +briefcase +dreadful +hinted +projections +shattering +totaling +##rogate +analogue +indicted +periodical +fullback +##dman +haynes +##tenberg +##ffs +##ishment +1745 +thirst +stumble +penang +vigorous +##ddling +##kor +##lium +octave +##ove +##enstein +##inen +##ones +siberian +##uti +cbn +repeal +swaying +##vington +khalid +tanaka +unicorn +otago +plastered +lobe +riddle +##rella +perch +##ishing +croydon +filtered +graeme +tripoli +##ossa +crocodile +##chers +sufi +mined +##tung +inferno +lsu +##phi +swelled +utilizes +£2 +cale +periodicals +styx +hike +informally +coop +lund +##tidae +ala +hen +qui +transformations +disposed +sheath +chickens +##cade +fitzroy +silesia +unacceptable +odisha +1650 +sabrina +spokane +ratios +athena +massage +shen +dilemma +##drum +##riz +##hul +corona +doubtful +niall +##pha +##bino +fines +cite +acknowledging +bangor +ballard +bathurst +##resh +huron +mustered +alzheimer +garments +kinase +tyre +warship +flashback +pulmonary +braun +cheat +kamal +cyclists +constructions +grenades +ndp +traveller +excuses +stomped +signalling +trimmed +futsal +mosques +relevance +##wine +wta +##vah +hoc +##riding +optimistic +##´s +deco +interacting +rejecting +moniker +waterways +##ieri +##oku +mayors +gdansk +outnumbered +pearls +##ended +##hampton +fairs +totals +dominating +notions +stairway +compiling +pursed +commodities +grease +yeast +##jong +carthage +griffiths +residual +amc +contraction +laird +sapphire +##marine +##ivated +amalgamation +dissolve +inclination +lyle +packaged +altitudes +suez +canons +graded +lurched +narrowing +boasts +guise +enrico +##ovsky +rower +scarred +bree +cub +iberian +protagonists +bargaining +proposing +trainers +voyages +fishes +##aea +##ivist +##verance +encryption +artworks +kazan +sabre +cleopatra +hepburn +rotting +supremacy +mecklenburg +##brate +burrows +hazards +outgoing +flair +organizes +##ctions +scorpion +##usions +boo +chevalier +dunedin +slapping +ineligible +pensions +##omic +manufactures +emails +bismarck +weakening +blackish +ding +mcgee +quo +##rling +northernmost +manpower +greed +sampson +clicking +##ange +##horpe +##inations +##roving +torre +##eptive +##moral +symbolism +38th +asshole +meritorious +outfits +splashed +biographies +sprung +astros +##tale +filly +raoul +nw +tokugawa +linden +clubhouse +##apa +tracts +romano +##pio +putin +chained +dickson +gunshot +moe +gunn +rashid +##tails +zipper +##bas +##nea +contrasted +##ply +##udes +plum +pharaoh +##pile +aw +comedies +ingrid +sandwiches +subdivisions +mariana +kamen +hz +delaney +veto +herring +##words +possessive +outlines +##roup +siemens +stairwell +gallantry +messiah +palais +yells +zeppelin +bolivar +##cede +smackdown +mckinley +##mora +##yt +muted +geologic +finely +unitary +avatar +hamas +maynard +rees +bog +contrasting +##rut +liv +chico +disposition +##erate +becca +dmitry +yeshiva +narratives +##lva +##ulton +mercenary +sharpe +tempered +navigate +stealth +amassed +keynes +##lini +untouched +##rrie +havoc +lithium +##fighting +abyss +graf +southward +wolverine +balloons +implements +ngos +transitions +##icum +ambushed +concacaf +dormant +economists +##dim +costing +csi +rana +universite +boulders +verity +##llon +collin +mellon +misses +cypress +fluorescent +lifeless +spence +##ulla +crewe +shepard +pak +revelations +jolly +gibbons +paw +##dro +##quel +freeing +shack +fries +palatine +##hiko +accompaniment +cruising +recycled +##aver +erwin +sorting +synthesizers +dyke +realities +strides +enslaved +wetland +##ghan +competence +gunpowder +grassy +maroon +reactors +objection +##oms +carlson +gearbox +macintosh +radios +shelton +##sho +clergyman +prakash +mongols +trophies +oricon +stimuli +twenty20 +cantonese +cortes +mirrored +##saurus +bhp +cristina +melancholy +##lating +enjoyable +nuevo +##wny +downfall +schumacher +##ind +banging +lausanne +rumbled +paramilitary +reflex +ax +amplitude +migratory +##gall +##ups +midi +barnard +lastly +sherry +##nall +keystone +##kra +carleton +slippery +coloring +foe +socket +otter +##rgos +mats +##tose +consultants +bafta +bison +topping +primal +abandonment +transplant +atoll +hideous +mort +pained +reproduced +tae +howling +##turn +unlawful +billionaire +hotter +poised +lansing +##chang +dinamo +retro +messing +domesday +##mina +blitz +timed +##athing +##kley +ascending +gesturing +##izations +signaled +tis +chinatown +mermaid +savanna +jameson +##aint +catalina +##pet +##hers +cochrane +cy +chatting +##kus +alerted +computation +mused +noelle +majestic +mohawk +campo +octagonal +##sant +##hend +aspiring +##mart +comprehend +iona +paralyzed +shimmering +swindon +rhone +##eley +reputed +configurations +pitchfork +agitation +francais +gillian +lipstick +##ilo +outsiders +pontifical +resisting +bitterness +sewer +rockies +##edd +##ucher +misleading +1756 +exiting +galloway +##nging +risked +##heart +commemoration +schultz +##rka +integrating +##rsa +poses +shrieked +##weiler +guineas +gladys +jerking +owls +goldsmith +nightly +penetrating +##unced +lia +ignited +betsy +##aring +##thorpe +follower +vigorously +##rave +coded +kiran +knit +zoology +tbilisi +##bered +repository +govt +deciduous +dino +growling +##bba +enhancement +unleashed +chanting +pussy +biochemistry +##eric +kettle +repression +toxicity +nrhp +##arth +##kko +##bush +ernesto +commended +outspoken +mca +parchment +kristen +##aton +bisexual +raked +glamour +navajo +conditioned +showcased +##hma +spacious +youthful +##esa +usl +appliances +junta +brest +layne +conglomerate +enchanted +chao +loosened +picasso +circulating +inspect +montevideo +##centric +##kti +piazza +spurred +##aith +bari +freedoms +poultry +stamford +lieu +indigo +sarcastic +bahia +stump +attach +dvds +frankenstein +lille +approx +scriptures +pollen +##script +nmi +overseen +##ivism +tides +proponent +newmarket +inherit +milling +##erland +centralized +##rou +distributors +credentials +drawers +abbreviation +##lco +downing +uncomfortably +ripe +##oes +erase +franchises +populace +##bery +##khar +decomposition +pleas +##tet +daryl +sabah +##wide +fearless +genie +lesions +annette +##ogist +oboe +appendix +nair +dripped +petitioned +maclean +mosquito +parrot +hampered +1648 +operatic +reservoirs +##tham +irrelevant +jolt +summarized +##fp +medallion +##taff +clawed +harlow +narrower +goddard +marcia +bodied +fremont +suarez +altering +tempest +mussolini +porn +##isms +sweetly +oversees +walkers +solitude +grimly +shrines +ich +supervisors +hostess +dietrich +legitimacy +brushes +expressive +##yp +dissipated +##rse +localized +systemic +##nikov +gettysburg +##uaries +dialogues +muttering +housekeeper +sicilian +discouraged +##frey +beamed +kaladin +halftime +kidnap +##amo +##llet +1754 +synonymous +depleted +instituto +insulin +reprised +##opsis +clashed +##ctric +interrupting +radcliffe +insisting +medici +1715 +ejected +playfully +turbulent +starvation +##rini +shipment +rebellious +petersen +verification +merits +##rified +cakes +##charged +1757 +milford +shortages +spying +fidelity +##aker +emitted +storylines +harvested +seismic +##iform +cheung +kilda +theoretically +barbie +lynx +##rgy +##tius +goblin +mata +poisonous +##nburg +reactive +residues +obedience +##евич +conjecture +##rac +hating +sixties +kicker +moaning +motown +##bha +emancipation +neoclassical +##hering +consoles +ebert +professorship +##tures +sustaining +assaults +obeyed +affluent +incurred +tornadoes +##eber +##zow +emphasizing +highlanders +cheated +helmets +##ctus +internship +terence +bony +executions +legislators +berries +peninsular +tinged +##aco +1689 +amplifier +corvette +ribbons +lavish +pennant +##lander +worthless +##chfield +##forms +mariano +pyrenees +expenditures +##icides +chesterfield +mandir +tailor +39th +sergey +nestled +willed +aristocracy +devotees +goodnight +raaf +rumored +weaponry +remy +appropriations +harcourt +burr +riaa +##lence +limitation +unnoticed +guo +soaking +swamps +##tica +collapsing +tatiana +descriptive +brigham +psalm +##chment +maddox +##lization +patti +caliph +##aja +akron +injuring +serra +##ganj +basins +##sari +astonished +launcher +##church +hilary +wilkins +sewing +##sf +stinging +##fia +##ncia +underwood +startup +compilations +vibrations +embankment +jurist +bard +juventus +groundwater +kern +palaces +helium +boca +cramped +marissa +soto +##worm +jae +princely +##ggy +faso +bazaar +warmly +##voking +pairing +##lite +##grate +##nets +wien +freaked +ulysses +rebirth +##alia +mummy +guzman +jimenez +stilled +##nitz +trajectory +tha +woken +archival +professions +##pts +##pta +hilly +shadowy +shrink +##bolt +norwood +glued +migrate +stereotypes +devoid +##pheus +evacuate +horrors +infancy +gotham +knowles +optic +downloaded +sachs +kingsley +parramatta +darryl +mor +##onale +shady +commence +confesses +kan +##meter +##placed +marlborough +roundabout +regents +frigates +##imating +gothenburg +revoked +carvings +clockwise +convertible +intruder +##sche +banged +##ogo +vicky +bourgeois +##mony +dupont +footing +##gum +##real +buckle +yun +penthouse +sane +serviced +stakeholders +neumann +##eers +comb +##gam +catchment +pinning +rallies +typing +##elles +forefront +freiburg +sweetie +giacomo +widowed +goodwill +worshipped +aspirations +midday +##vat +fishery +##trick +bournemouth +turk +hearth +ethanol +guadalajara +murmurs +sl +##uge +afforded +scripted +##hta +wah +##jn +coroner +translucent +memorials +puck +progresses +clumsy +##race +candace +recounted +##slin +##uve +filtering +##mac +howl +strata +heron +leveled +##ays +dubious +##oja +##wheel +citations +exhibiting +##laya +##mics +turkic +##lberg +injunction +##ennial +antibodies +organise +##rigues +cardiovascular +cushion +inverness +##zquez +dia +cocoa +sibling +##tman +##roid +expanse +feasible +tunisian +algiers +##relli +rus +dso +westphalia +bro +tacoma +downloads +##ours +konrad +duran +##hdi +continuum +jett +compares +legislator +secession +##nable +##gues +##zuka +translating +reacher +##gley +##ła +aleppo +##agi +orchards +trapping +linguist +versatile +drumming +postage +calhoun +superiors +##mx +barefoot +leary +##cis +ignacio +alfa +kaplan +##rogen +bratislava +mori +##vot +disturb +haas +cartridges +gilmore +radiated +salford +tunic +hades +##ulsive +archeological +delilah +magistrates +auditioned +brewster +charters +empowerment +blogs +cappella +dynasties +iroquois +whipping +##krishna +raceway +truths +myra +weaken +judah +mcgregor +##horse +mic +refueling +37th +burnley +bosses +markus +premio +query +##gga +dunbar +##economic +darkest +lyndon +sealing +commendation +reappeared +##mun +addicted +ezio +slaughtered +satisfactory +shuffle +##eves +##thic +##uj +fortification +warrington +##otto +resurrected +fargo +mane +##utable +##lei +foreword +ox +##aris +##vern +abrams +hua +##mento +sakura +##alo +sentimental +##skaya +midfield +##eses +sturdy +scrolls +macleod +##kyu +entropy +##lance +mitochondrial +cicero +excelled +thinner +convoys +perceive +##oslav +##urable +systematically +grind +burkina +##tagram +ops +##aman +guantanamo +##cloth +##tite +forcefully +wavy +##jou +pointless +##linger +##tze +layton +portico +superficial +clerical +outlaws +##hism +burials +muir +##inn +creditors +hauling +rattle +##leg +calais +monde +archers +reclaimed +dwell +wexford +hellenic +falsely +remorse +##tek +dough +furnishings +##uttered +gabon +neurological +novice +##igraphy +contemplated +pulpit +nightstand +saratoga +##istan +documenting +pulsing +taluk +##firmed +busted +marital +##rien +disagreements +wasps +##yes +hodge +mcdonnell +mimic +fran +pendant +dhabi +musa +##nington +congratulations +argent +darrell +concussion +losers +regrets +thessaloniki +reversal +donaldson +hardwood +thence +achilles +ritter +##eran +demonic +jurgen +prophets +goethe +eki +classmate +##cking +yank +irrational +##inging +perished +seductive +qur +sourced +##crat +##typic +mustard +ravine +barre +horizontally +characterization +phylogenetic +boise +##dit +##runner +##tower +brutally +intercourse +seduce +##bbing +fay +ferris +ogden +amar +nik +unarmed +##inator +evaluating +kyrgyzstan +sweetness +##lford +##oki +mccormick +meiji +notoriety +stimulate +disrupt +figuring +instructional +mcgrath +##zoo +groundbreaking +##lto +flinch +khorasan +agrarian +bengals +mixer +radiating +##sov +ingram +pitchers +nad +tariff +##cript +tata +##codes +##emi +##ungen +appellate +lehigh +##bled +##giri +brawl +duct +texans +##ciation +##ropolis +skipper +speculative +vomit +doctrines +stresses +davy +graders +whitehead +jozef +timely +cumulative +haryana +paints +appropriately +boon +cactus +##ales +##pid +dow +legions +##pit +perceptions +1730 +picturesque +##yse +periphery +rune +wr +##aha +celtics +sentencing +whoa +##erin +confirms +variance +moines +mathews +spade +rave +fronted +blending +alleging +reared +##paper +grassroots +eroded +##physical +directs +ordeal +##sław +accelerate +hacker +rooftop +##inia +lev +buys +cebu +devote +##lce +specialising +##ulsion +choreographed +repetition +warehouses +##ryl +paisley +tuscany +analogy +sorcerer +hash +huts +shards +descends +exclude +nix +chaplin +ito +vane +##drich +causeway +misconduct +limo +orchestrated +glands +jana +##kot +u2 +##sons +branching +contrasts +scoop +longed +##virus +chattanooga +syrup +cornerstone +##tized +##mind +##iaceae +careless +precedence +frescoes +##uet +chilled +consult +modelled +snatch +peat +##thermal +caucasian +humane +relaxation +spins +temperance +##lbert +occupations +lambda +hybrids +moons +##oese +rolf +societal +yerevan +ness +##ssler +befriended +mechanized +nominate +trough +boasted +cues +seater +##hom +bends +##tangle +conductors +emptiness +eurasian +adriatic +tian +##cie +anxiously +lark +propellers +chichester +jock +##holding +credible +recounts +tori +loyalist +abduction +##hoot +##redo +nepali +##mite +ventral +tempting +##ango +##crats +steered +##wice +javelin +dipping +laborers +prentice +looming +titanium +badges +emir +tensor +##ntation +egyptians +rash +denies +hawthorne +lombard +showers +wehrmacht +dietary +trojan +##reus +welles +executing +horseshoe +lifeboat +##lak +elsa +infirmary +nearing +roberta +boyer +mutter +trillion +joanne +##fine +##oked +sinks +vortex +uruguayan +clasp +sirius +##block +accelerator +prohibit +sunken +byu +chronological +diplomats +ochreous +symmetrical +1644 +maia +##tology +salts +reigns +atrocities +##ия +hess +bared +issn +##vyn +cater +saturated +##cycle +##isse +sable +voyager +dyer +yusuf +##inge +fountains +wolff +##nni +engraving +rollins +atheist +ominous +##ault +herr +chariot +martina +strung +##fell +##farlane +horrific +sahib +gazes +saetan +erased +ptolemy +##olic +flushing +lauderdale +analytic +##ices +navarro +beak +gorilla +herrera +broom +guadalupe +raiding +sykes +bsc +deliveries +1720 +invasions +carmichael +tajikistan +thematic +ecumenical +sentiments +onstage +##rians +##brand +##sume +catastrophic +flanks +molten +##arns +waller +aimee +terminating +##icing +alternately +##oche +nehru +printers +outraged +##eving +empires +template +banners +repetitive +za +##oise +vegetarian +##tell +guiana +opt +cavendish +lucknow +synthesized +##hani +##mada +finalized +##ctable +fictitious +mayoral +unreliable +##enham +embracing +peppers +rbis +##chio +##neo +inhibition +slashed +togo +orderly +embroidered +salty +barron +benito +totaled +##dak +pubs +simulated +caden +devin +tolkien +momma +welding +sesame +##ept +gottingen +hardness +shaman +temeraire +adequately +pediatric +assertion +radicals +composure +cadence +seafood +beaufort +lazarus +mani +warily +cunning +kurdistan +cantata +##kir +ares +##clusive +nape +townland +geared +insulted +flutter +boating +violate +draper +dumping +malmo +##hh +##romatic +firearm +alta +bono +obscured +##clave +exceeds +panorama +unbelievable +##train +preschool +##essed +disconnected +installing +rescuing +secretaries +accessibility +##castle +##ifice +##film +bouts +slug +waterway +mindanao +##buro +##ratic +halves +calming +liter +maternity +adorable +bragg +electrification +mcc +##dote +roxy +schizophrenia +munoz +kaye +whaling +mil +tingling +tolerant +##ago +unconventional +volcanoes +##finder +deportivo +##llie +robson +kaufman +neuroscience +wai +deportation +masovian +scraping +converse +##bh +hacking +bulge +##oun +administratively +yao +mammoth +booster +claremont +hooper +nomenclature +pursuits +mclaughlin +melinda +##sul +catfish +barclay +substrates +taxa +zee +kimberly +packets +padma +##ality +borrowing +ostensibly +solvent +##bri +##genesis +##mist +lukas +shreveport +veracruz +##lou +##wives +cheney +anatolia +hobbs +##zyn +cyclic +radiant +alistair +greenish +siena +dat +independents +##bation +conform +pieter +hyper +applicant +bradshaw +spores +telangana +vinci +inexpensive +nuclei +jang +nme +spd +cradled +receptionist +pow +##rika +fascism +##ifer +experimenting +##ading +##iec +##region +jocelyn +maris +stair +nocturnal +toro +constabulary +elgin +##kker +msc +##giving +##schen +##rase +doherty +doping +sarcastically +batter +maneuvers +##cano +##apple +##gai +##git +intrinsic +##nst +##stor +1753 +showtime +cafes +gasps +lviv +ushered +##thed +fours +restart +astonishment +transmitting +flyer +shrugs +##sau +intriguing +cones +dictated +mushrooms +medial +##kovsky +##elman +escorting +gaped +godfather +##door +##sell +djs +recaptured +timetable +vila +1710 +aerodrome +mortals +scientology +##orne +angelina +mag +convection +unpaid +insertion +intermittent +lego +##nated +endeavor +kota +pereira +##lz +bwv +glamorgan +insults +agatha +fey +##cend +fleetwood +mahogany +protruding +steamship +zeta +##arty +mcguire +suspense +##sphere +advising +urges +##wala +hurriedly +meteor +gilded +inline +arroyo +stalker +##oge +excitedly +revered +##cure +earle +introductory +##break +##ilde +mutants +puff +pulses +reinforcement +##haling +curses +lizards +stalk +correlated +##fixed +fallout +macquarie +##unas +bearded +denton +heaving +##ocation +winery +assign +dortmund +##lkirk +everest +invariant +charismatic +susie +##elling +bled +lesley +telegram +sumner +bk +##ogen +wilcox +needy +colbert +duval +##iferous +##mbled +allotted +attends +imperative +##hita +replacements +hawker +##inda +insurgency +##zee +##eke +casts +##yla +ives +transitioned +##pack +##powering +authoritative +baylor +flex +cringed +plaintiffs +woodrow +##skie +drastic +ape +aroma +unfolded +commotion +preoccupied +theta +routines +lasers +privatization +wand +domino +ek +clenching +nsa +strategically +showered +bile +handkerchief +pere +storing +christophe +insulting +nakamura +romani +asiatic +magdalena +palma +cruises +stripping +konstantin +soaring +##berman +colloquially +forerunner +havilland +incarcerated +parasites +sincerity +##utus +disks +plank +saigon +##ining +corbin +homo +ornaments +powerhouse +##tlement +chong +fastened +feasibility +idf +morphological +usable +##nish +##zuki +aqueduct +jaguars +keepers +##flies +aleksandr +faust +assigns +ewing +bacterium +hurled +tricky +hungarians +integers +wallis +yamaha +##isha +hushed +oblivion +aviator +evangelist +friars +##eller +monograph +ode +##nary +airplanes +labourers +charms +##nee +1661 +hagen +tnt +rudder +fiesta +transcript +dorothea +ska +inhibitor +maccabi +retorted +raining +encompassed +clauses +menacing +1642 +lineman +##gist +vamps +##dick +gloom +##rera +dealings +easing +seekers +##nut +##pment +helens +unmanned +##anu +##isson +basics +##amy +##ckman +adjustments +1688 +brutality +horne +##zell +##mable +aggregator +##thal +rhino +##drick +##vira +counters +##rting +mn +montenegrin +packard +##unciation +##♭ +##kki +reclaim +scholastic +thugs +pulsed +##icia +syriac +quan +saddam +banda +kobe +blaming +buddies +dissent +##lusion +##usia +corbett +jaya +delle +erratic +lexie +##hesis +amiga +hermes +##pressing +##leen +chapels +gospels +jamal +##uating +compute +revolving +warp +##sso +##thes +armory +##eras +##gol +antrim +loki +##kow +##asian +##good +##zano +braid +handwriting +subdistrict +funky +pantheon +##iculate +concurrency +estimation +improper +juliana +##his +newcomers +johnstone +staten +communicated +##oco +##alle +sausage +stormy +##stered +##tters +superfamily +##grade +acidic +collateral +tabloid +##oped +##rza +bladder +austen +##ellant +mcgraw +##hay +hannibal +mein +aquino +lucifer +wo +badger +boar +cher +christensen +greenberg +interruption +##kken +jem +mocked +bottoms +cambridgeshire +##lide +sprawling +##bbly +eastwood +ghent +synth +##buck +advisers +##bah +nominally +hapoel +qu +daggers +estranged +fabricated +towels +vinnie +wcw +misunderstanding +anglia +nothin +unmistakable +##dust +##lova +chilly +marquette +truss +##edge +##erine +reece +##lty +##chemist +##connected +41st +bash +raion +waterfalls +##ump +##main +labyrinth +queue +theorist +##istle +bharatiya +flexed +soundtracks +rooney +leftist +patrolling +wharton +plainly +alleviate +eastman +schuster +topographic +engages +immensely +unbearable +fairchild +1620 +dona +lurking +parisian +oliveira +ia +indictment +hahn +bangladeshi +##aster +##uming +##ential +antonia +expects +indoors +kildare +harlan +##logue +##ogenic +##sities +forgiven +##wat +childish +tavi +##mide +##orra +plausible +grimm +successively +scooted +##bola +##rith +spartans +emery +flatly +epilogue +##wark +flourish +##iny +##tracted +##overs +##oshi +bestseller +distressed +receipt +spitting +hermit +topological +##cot +drilled +subunit +francs +##layer +eel +##fk +##itas +octopus +footprint +petitions +##say +##foil +interfering +leaking +palo +##metry +thistle +valiant +##pic +narayan +mcpherson +##fast +gonzales +##enne +dustin +novgorod +solos +##zman +doin +##patient +##meyer +soluble +ashland +cuffs +carole +pendleton +whistling +vassal +##river +deviation +revisited +constituents +rallied +rotate +loomed +##eil +##nting +amateurs +augsburg +auschwitz +crowns +skeletons +##cona +bonnet +dummy +globalization +simeon +sleeper +mandal +differentiated +##crow +##mare +milne +bundled +exasperated +talmud +owes +segregated +##feng +##uary +dentist +piracy +props +##rang +devlin +##torium +malicious +paws +##laid +dependency +##ergy +##fers +##enna +pistons +rourke +jed +grammatical +tres +maha +wig +ghostly +jayne +##achal +##creen +##ilis +##lins +designate +##with +arrogance +cambodian +clones +showdown +throttle +twain +##ception +lobes +metz +nagoya +braking +##furt +roaming +##minster +amin +crippled +##llary +indifferent +hoffmann +idols +intimidating +1751 +influenza +memo +onions +1748 +bandage +consciously +##landa +##rage +clandestine +observes +swiped +tangle +##ener +##jected +##trum +##bill +##lta +hugs +congresses +josiah +spirited +##dek +humanist +managerial +filmmaking +inmate +rhymes +debuting +grimsby +ur +##laze +duplicate +vigor +republished +bolshevik +refurbishment +antibiotics +martini +methane +newscasts +royale +horizons +levant +iain +visas +##ischen +paler +##around +manifestation +snuck +alf +chop +futile +pedestal +rehab +##kat +bmg +kerman +res +fairbanks +jarrett +abstraction +saharan +##zek +1746 +procedural +clearer +kincaid +sash +luciano +##ffey +crunch +helmut +##vara +revolutionaries +##tute +creamy +leach +##mmon +1747 +permitting +nes +plight +wendell +##lese +contra +clancy +ipa +mach +staples +autopsy +disturbances +nueva +karin +pontiac +##uding +proxy +venerable +haunt +leto +bergman +expands +##helm +wal +##pipe +canning +celine +cords +obesity +##enary +intrusion +planner +##phate +reasoned +sequencing +harrow +##chon +##dora +marred +mcintyre +repay +tarzan +darting +harrisburg +margarita +repulsed +##lding +belinda +hamburger +novo +compliant +runways +bingham +registrar +skyscraper +cuthbert +improvisation +livelihood +##corp +##elial +admiring +##dened +sporadic +believer +casablanca +popcorn +asha +shovel +##bek +##dice +coiled +tangible +##dez +casper +elsie +resin +tenderness +rectory +##ivision +avail +sonar +##mori +boutique +##dier +guerre +bathed +upbringing +vaulted +sandals +blessings +##naut +##utnant +1680 +foxes +pia +corrosion +hesitantly +confederates +crystalline +footprints +shapiro +tirana +valentin +drones +45th +microscope +shipments +texted +inquisition +wry +guernsey +unauthorized +resigning +ripple +schubert +stu +reassure +felony +##ardo +brittle +koreans +##havan +##ives +dun +implicit +tyres +##aldi +##lth +magnolia +##ehan +##puri +##poulos +aggressively +fei +gr +familiarity +##poo +indicative +##trust +fundamentally +jimmie +overrun +anchors +moans +##opus +britannia +armagh +purposely +seizing +##vao +bewildered +mundane +avoidance +cosmopolitan +geometridae +quartermaster +caf +chatter +engulfed +gleam +purge +##icate +juliette +jurisprudence +guerra +revisions +##bn +casimir +brew +##jm +1749 +clapton +cloudy +conde +hermitage +simulations +torches +vincenzo +matteo +##rill +hidalgo +booming +westbound +accomplishment +tentacles +unaffected +##sius +annabelle +flopped +sloping +##litz +dreamer +interceptor +vu +##loh +consecration +copying +messaging +breaker +climates +hospitalized +1752 +torino +afternoons +winfield +witnessing +##teacher +breakers +choirs +sawmill +coldly +##ege +sipping +haste +uninhabited +conical +bibliography +pamphlets +severn +edict +##oca +deux +illnesses +grips +rehearsals +sis +thinkers +tame +##keepers +1690 +acacia +reformer +##osed +##rys +shuffling +##iring +##shima +eastbound +ionic +rhea +flees +littered +##oum +rocker +vomiting +groaning +champ +overwhelmingly +civilizations +paces +sloop +adoptive +##tish +skaters +##vres +aiding +nikola +shriek +##ignon +pharmaceuticals +tuna +calvert +gustavo +stocked +yearbook +##urai +##mana +computed +subsp +riff +hanoi +kelvin +hamid +moors +pastures +summons +jihad +nectar +##ctors +bayou +untitled +pleasing +vastly +republics +intellect +##ulio +##tou +crumbling +stylistic +##ی +consolation +frequented +h₂o +walden +widows +##iens +##ignment +chunks +improves +grit +recited +##dev +snarl +sociological +##arte +##gul +inquired +##held +bruise +clube +consultancy +homogeneous +hornets +multiplication +pasta +prick +savior +##grin +##kou +##phile +yoon +##gara +grimes +vanishing +cheering +reacting +bn +distillery +##quisite +##vity +coe +dockyard +massif +##jord +escorts +voss +##valent +byte +chopped +hawke +illusions +workings +floats +##koto +##vac +kv +annapolis +madden +##onus +alvaro +noctuidae +##cum +##scopic +avenge +steamboat +forte +illustrates +erika +##trip +dew +nationalities +bran +manifested +thirsty +diversified +muscled +reborn +##standing +arson +##lessness +##dran +##logram +##boys +##kushima +##vious +willoughby +##phobia +alsace +dashboard +yuki +##chai +granville +myspace +publicized +tricked +##gang +adjective +##ater +relic +reorganisation +enthusiastically +indications +saxe +##lassified +consolidate +iec +padua +helplessly +ramps +renaming +regulars +pedestrians +accents +convicts +inaccurate +lowers +mana +##pati +barrie +bjp +outta +someplace +berwick +flanking +invoked +marrow +sparsely +excerpts +clothed +rei +##ginal +wept +##straße +##vish +##ptive +membranes +aquitaine +creeks +cutler +sheppard +implementations +##dur +fragrance +budge +concordia +magnesium +marcelo +##antes +gladly +vibrating +##rral +##ggles +montrose +##omba +lew +seamus +1630 +cocky +##ament +##uen +bjorn +##rrick +fielder +fluttering +##lase +methyl +kimberley +mcdowell +reductions +barbed +##jic +##tonic +aeronautical +condensed +distracting +##promising +huffed +##cala +##sle +claudius +invincible +missy +pious +balthazar +##lang +butte +combo +orson +##dication +myriad +1707 +silenced +##fed +##rh +netball +yourselves +##oza +clarify +heller +peg +durban +etudes +offender +roast +blackmail +curvature +##woods +vile +illicit +suriname +##linson +overture +1685 +bubbling +gymnast +tucking +##mming +##ouin +maldives +##bala +gurney +##dda +##eased +##oides +backside +pinto +jars +racehorse +tending +##rdial +baronetcy +wiener +duly +##rke +barbarian +cupping +flawed +##thesis +bertha +pleistocene +puddle +swearing +##nob +##tically +fleeting +prostate +amulet +educating +##mined +##tler +75th +jens +respondents +cavaliers +papacy +raju +##iente +##ulum +##tip +funnel +disneyland +##lley +sociologist +##iam +faulkner +louvre +menon +##dson +##ower +afterlife +mannheim +peptide +referees +comedians +meaningless +##anger +##laise +fabrics +hurley +renal +sleeps +##bour +##icle +breakout +kristin +roadside +animator +clover +disdain +unsafe +redesign +##urity +firth +barnsley +portage +reset +narrows +commandos +expansive +speechless +tubular +essendon +eyelashes +smashwords +##yad +##bang +##claim +craved +sprinted +chet +somme +astor +wrocław +orton +bane +##erving +##uing +mischief +##amps +##sund +scaling +terre +##xious +impairment +offenses +undermine +moi +soy +contiguous +arcadia +inuit +seam +##tops +macbeth +rebelled +##icative +##iot +elaborated +frs +uniformed +##dberg +powerless +priscilla +stimulated +qc +arboretum +frustrating +trieste +bullock +##nified +enriched +glistening +intern +##adia +locus +nouvelle +ollie +ike +lash +starboard +tapestry +headlined +hove +rigged +##vite +pollock +##yme +thrive +clustered +cas +roi +gleamed +olympiad +##lino +pressured +regimes +##hosis +##lick +ripley +##ophone +kickoff +gallon +rockwell +##arable +crusader +glue +revolutions +scrambling +1714 +grover +##jure +englishman +aztec +contemplating +coven +preach +triumphant +tufts +##esian +rotational +##phus +falkland +##brates +strewn +clarissa +rejoin +environmentally +glint +banded +drenched +moat +albanians +johor +rr +maestro +malley +nouveau +shaded +taxonomy +adhere +bunk +airfields +##ritan +1741 +encompass +remington +tran +##erative +amelie +mazda +friar +morals +passions +##zai +breadth +vis +##hae +argus +burnham +caressing +insider +rudd +##imov +##rso +italianate +murderous +textual +wainwright +armada +bam +weave +timer +##taken +##nh +fra +##crest +ardent +salazar +taps +tunis +##ntino +allegro +gland +philanthropic +##chester +implication +##optera +esq +judas +noticeably +wynn +##dara +inched +indexed +crises +villiers +bandit +royalties +patterned +cupboard +interspersed +accessory +isla +kendrick +entourage +stitches +##esthesia +headwaters +##ior +interlude +distraught +draught +1727 +##basket +biased +sy +transient +triad +subgenus +adapting +kidd +shortstop +##umatic +dimly +spiked +mcleod +reprint +nellie +pretoria +windmill +##cek +singled +##mps +reunite +##orous +bankers +outlying +##omp +##ports +##tream +apologies +cosmetics +patsy +##deh +##ocks +##yson +bender +nantes +serene +##nad +lucha +mmm +##cius +##gli +cmll +coinage +nestor +juarez +##rook +smeared +sprayed +twitching +sterile +irina +embodied +juveniles +enveloped +miscellaneous +cancers +dq +gulped +luisa +crested +swat +donegal +ref +##anov +##acker +hearst +mercantile +##lika +doorbell +vicki +##alla +##som +bilbao +psychologists +stryker +sw +horsemen +turkmenistan +wits +##national +anson +mathew +screenings +##umb +rihanna +##agne +##nessy +aisles +##iani +##osphere +hines +kenton +saskatoon +tasha +truncated +##champ +##itan +mildred +advises +fredrik +interpreting +inhibitors +##athi +spectroscopy +##hab +##kong +karim +panda +##oia +##nail +conqueror +kgb +leukemia +##dity +arrivals +cheered +pisa +phosphorus +shielded +##riated +mammal +unitarian +urgently +chopin +sanitary +##mission +spicy +drugged +hinges +##tort +tipping +trier +impoverished +westchester +##caster +epoch +nonstop +##gman +##khov +aromatic +centrally +cerro +##tively +##vio +billions +modulation +sedimentary +facilitating +outrageous +goldstein +##eak +##kt +ld +maitland +penultimate +pollard +##dance +fleets +spaceship +vertebrae +##nig +alcoholism +als +recital +##bham +##omics +##bm +trois +##tropical +commemorates +##meric +marge +##raction +1643 +cosmetic +ravaged +##ige +catastrophe +eng +##shida +albrecht +arterial +bellamy +decor +harmon +##rde +bulbs +synchronized +vito +easiest +shetland +shielding +wnba +##glers +##ssar +##riam +brianna +cumbria +##aceous +##rard +cores +thayer +##nsk +brood +hilltop +luminous +carts +keynote +larkin +logos +##cta +##mund +##quay +lilith +tinted +wrestle +mobilization +##uses +sequential +siam +bloomfield +takahashi +##ieving +presenters +ringo +blazed +witty +##oven +##ignant +devastation +haydn +harmed +newt +therese +##peed +gershwin +molina +rabbis +sudanese +innate +restarted +##sack +##fus +slices +wb +##shah +enroll +hypothetical +hysterical +1743 +fabio +indefinite +warped +exchanging +unsuitable +##sboro +gallo +1603 +bret +cobalt +homemade +##hunter +operatives +##dhar +terraces +durable +latch +pens +whorls +##ctuated +##eaux +billing +ligament +succumbed +##gly +regulators +spawn +##brick +##stead +filmfare +rochelle +##nzo +1725 +circumstance +saber +supplements +##nsky +##tson +crowe +wellesley +carrot +##9th +##movable +primate +drury +sincerely +topical +##mad +##rao +callahan +kyiv +smarter +tits +undo +##yeh +announcements +anthologies +barrio +nebula +##islaus +##shaft +##tyn +bodyguards +assassinate +barns +emmett +scully +##yd +##eland +##tino +##itarian +demoted +gorman +lashed +prized +adventist +writ +##gui +alla +invertebrates +##ausen +1641 +amman +1742 +align +healy +redistribution +##gf +##rize +insulation +##drop +adherents +hezbollah +vitro +ferns +yanking +registering +uppsala +cheerleading +confines +mischievous +tully +##ross +49th +docked +roam +stipulated +pumpkin +##bry +prompt +##ezer +blindly +shuddering +craftsmen +frail +scented +katharine +scramble +shaggy +sponge +helix +zaragoza +43rd +backlash +fontaine +seizures +posse +cowan +nonfiction +telenovela +wwii +hammered +undone +##gpur +encircled +irs +##ivation +artefacts +oneself +searing +smallpox +##belle +##osaurus +shandong +breached +upland +blushing +rankin +infinitely +psyche +tolerated +docking +evicted +##col +unmarked +##lving +gnome +lettering +litres +musique +##oint +benevolent +##jal +blackened +##anna +mccall +racers +tingle +##ocene +##orestation +introductions +radically +##hiff +##باد +1610 +1739 +munchen +plead +##nka +condo +scissors +##sight +##tens +apprehension +##cey +##yin +hallmark +watering +formulas +sequels +##llas +aggravated +bae +commencing +##building +enfield +prohibits +marne +vedic +civilized +euclidean +jagger +beforehand +blasts +dumont +##arney +##nem +conversions +hierarchical +rios +simulator +##dya +##lellan +hedges +oleg +thrusts +shadowed +darby +maximize +1744 +gregorian +##nded +##routed +sham +unspecified +##hog +emory +factual +##smo +fooled +##rger +ortega +wellness +marlon +##oton +##urance +casket +keating +ley +enclave +##ayan +char +influencing +jia +##chenko +ammonia +erebidae +incompatible +violins +cornered +##arat +grooves +astronauts +columbian +rampant +fabrication +kyushu +mahmud +vanish +##dern +mesopotamia +##lete +##rgen +caspian +kenji +pitted +##vered +grimace +roanoke +tchaikovsky +twinned +##analysis +##awan +xinjiang +arias +clemson +kazakh +sizable +1662 +##khand +##vard +plunge +tatum +vittorio +##nden +cholera +##dana +bracing +indifference +projectile +superliga +##chee +realises +upgrading +porte +retribution +##vies +nk +stil +##resses +ama +bureaucracy +blackberry +bosch +testosterone +collapses +greer +##pathic +ioc +fifties +malls +##erved +bao +baskets +adolescents +siegfried +##osity +##tosis +mantra +detecting +existent +fledgling +##cchi +dissatisfied +gan +telecommunication +mingled +sobbed +controversies +outdated +taxis +##raus +fright +slams +##lham +##fect +##tten +detectors +fetal +tanned +##uw +fray +goth +olympian +skipping +mandates +scratches +sheng +unspoken +hyundai +tracey +hotspur +restrictive +##buch +americana +mundo +##bari +burroughs +diva +vulcan +##6th +distinctions +thumping +##ngen +mikey +sheds +fide +rescues +springsteen +vested +valuation +##ece +##ely +pinnacle +rake +sylvie +##edo +almond +quivering +##irus +alteration +faltered +##wad +51st +hydra +ticked +##kato +recommends +##dicated +antigua +arjun +stagecoach +wilfred +trickle +pronouns +##pon +aryan +nighttime +##anian +gall +pea +stitch +##hei +leung +milos +##dini +eritrea +starved +snowfall +kant +parasitic +cot +discus +hana +strikers +appleton +kitchens +##erina +##partisan +##itha +##vius +disclose +metis +##channel +1701 +##vera +fitch +1735 +blooded +##tila +decimal +##tang +##bai +cyclones +eun +bottled +peas +pensacola +basha +bolivian +crabs +boil +lanterns +partridge +roofed +1645 +necks +##phila +opined +patting +##kla +##lland +chuckles +volta +whereupon +##nche +devout +euroleague +suicidal +##dee +inherently +involuntary +knitting +nasser +##hide +puppets +colourful +courageous +southend +stills +miraculous +hodgson +richer +rochdale +ethernet +greta +uniting +prism +umm +##haya +##itical +##utation +deterioration +pointe +prowess +##ropriation +lids +scranton +billings +subcontinent +##koff +##scope +brute +kellogg +psalms +degraded +##vez +stanisław +##ructured +ferreira +pun +astonishing +gunnar +##yat +arya +prc +gottfried +##tight +excursion +##ographer +dina +##quil +##nare +huffington +illustrious +wilbur +verandah +##zard +naacp +##odle +constructive +fjord +kade +##naud +generosity +thrilling +baseline +cayman +frankish +plastics +accommodations +zoological +##fting +cedric +qb +motorized +##dome +##otted +squealed +tackled +canucks +budgets +situ +asthma +dail +gabled +grasslands +whimpered +writhing +judgments +minnie +##carbon +bananas +grille +domes +monique +odin +maguire +markham +tierney +##estra +##chua +libel +poke +speedy +atrium +laval +notwithstanding +##edly +fai +kala +##sur +robb +##sma +listings +luz +supplementary +tianjin +##acing +enzo +jd +ric +scanner +croats +transcribed +arden +##hair +##raphy +##lver +seventies +staggering +alam +horticultural +hs +regression +timbers +blasting +##ounded +montagu +manipulating +##cit +catalytic +1550 +troopers +##meo +condemnation +fitzpatrick +##oire +##roved +inexperienced +1670 +castes +##lative +outing +dubois +flicking +quarrel +ste +learners +1625 +whistled +##class +classify +tariffs +temperament +folly +liszt +##yles +immersed +jordanian +ceasefire +apparel +extras +maru +fished +##bio +harta +stockport +assortment +craftsman +paralysis +transmitters +##cola +blindness +##wk +fatally +proficiency +solemnly +##orno +repairing +amore +groceries +ultraviolet +##chase +schoolhouse +##tua +resurgence +nailed +##otype +ruse +saliva +diagrams +##tructing +albans +rann +thirties +antennas +hilarious +cougars +paddington +stats +##eger +breakaway +reza +authorship +prohibiting +scoffed +##etz +##ttle +conscription +defected +trondheim +##fires +ivanov +keenan +##adan +##ciful +##fb +##slow +locating +##ials +##tford +cadiz +basalt +blankly +interned +rags +rattling +##tick +carpathian +reassured +bum +guildford +iss +staunch +##onga +astronomers +sera +sofie +emergencies +susquehanna +##heard +duc +mastery +vh1 +williamsburg +bayer +buckled +craving +##khan +##rdes +bloomington +##write +alton +barbecue +##bians +justine +##hri +##ndt +delightful +smartphone +newtown +photon +retrieval +peugeot +hissing +##monium +##orough +flavors +lighted +relaunched +tainted +##games +##lysis +anarchy +microscopic +hopping +adept +evade +evie +##beau +inhibit +sinn +adjustable +hurst +intuition +wilton +44th +lawful +lowlands +stockings +thierry +##dalen +##hila +##nai +fates +prank +maison +lobbied +provocative +1724 +utopia +##qual +carbonate +gujarati +purcell +##rford +curtiss +##mei +overgrown +arenas +mediation +swallows +##rnik +respectful +turnbull +##hedron +##hope +alyssa +ozone +##ʻi +ami +gestapo +johansson +snooker +canteen +cuff +declines +empathy +stigma +##ags +##raine +taxpayers +volga +##wright +##copic +lifespan +overcame +tattooed +enactment +giggles +##ador +##camp +barrington +bribe +obligatory +orbiting +peng +##enas +elusive +sucker +##vating +cong +hardship +empowered +anticipating +estrada +cryptic +greasy +detainees +planck +sudbury +plaid +dod +kayla +##ears +##vb +##zd +mortally +##hein +cognition +radha +liechtenstein +meade +richly +argyle +harpsichord +liberalism +trumpets +lauded +tyrant +salsa +tiled +lear +promoters +reused +slicing +trident +##chuk +##gami +##lka +cantor +checkpoint +##points +gaul +leger +mammalian +##tov +##aar +##schaft +doha +frenchman +nirvana +##vino +delgado +headlining +##eron +##iography +jug +tko +1649 +naga +intersections +benfica +nawab +##suka +ashford +gulp +##deck +##vill +##rug +brentford +frazier +pleasures +dunne +potsdam +shenzhen +dentistry +##tec +flanagan +##dorff +##hear +chorale +dinah +prem +quezon +##rogated +relinquished +sutra +terri +##pani +flaps +##rissa +poly +##rnet +homme +aback +##eki +linger +womb +##kson +##lewood +doorstep +orthodoxy +threaded +westfield +##rval +dioceses +fridays +subsided +##gata +loyalists +##biotic +##ettes +letterman +lunatic +prelate +tenderly +invariably +souza +thug +winslow +##otide +furlongs +gogh +jeopardy +##runa +pegasus +##umble +humiliated +standalone +tagged +##roller +freshmen +klan +##bright +attaining +initiating +transatlantic +logged +viz +##uance +1723 +combatants +intervening +stephane +chieftain +despised +grazed +cdc +galveston +godzilla +macro +simulate +##planes +parades +##esses +##ductive +##unes +equator +overdose +##cans +##hosh +##lifting +joshi +epstein +sonora +treacherous +aquatics +manchu +responsive +##sation +supervisory +##christ +##llins +##ibar +##balance +##uso +kimball +karlsruhe +mab +##emy +ignores +phonetic +spaghetti +almighty +danzig +rumbling +tombstone +designations +lured +outset +##felt +supermarkets +grupo +kei +kraft +susanna +##blood +comprehension +genealogy +##aghan +##verted +redding +##ythe +1722 +bowing +##pore +##roi +lest +sharpened +fulbright +valkyrie +sikhs +##unds +swans +bouquet +merritt +##tage +##venting +commuted +redhead +clerks +leasing +cesare +dea +hazy +##vances +fledged +greenfield +servicemen +##gical +armando +blackout +sagged +downloadable +intra +potion +pods +##4th +##mism +attendants +gambia +stale +##ntine +plump +asteroids +rediscovered +buds +flea +hive +##neas +1737 +classifications +debuts +##eles +olympus +scala +##eurs +##gno +##mute +hummed +sigismund +visuals +wiggled +await +pilasters +clench +sulfate +##ances +bellevue +enigma +trainee +snort +##sw +clouded +denim +##rank +churning +hartman +lodges +riches +sima +##missible +accountable +socrates +regulates +mueller +1702 +avoids +solids +himalayas +nutrient +pup +##jevic +squat +fades +nec +##lates +##pina +##rona +##ου +privateer +tequila +##gative +##mpton +hornet +immortals +##dou +asturias +cleansing +dario +##rries +##anta +etymology +servicing +zhejiang +##venor +##nx +horned +erasmus +rayon +relocating +£10 +##bags +escalated +promenade +stubble +2010s +artisans +axial +liquids +mora +sho +yoo +##tsky +bundles +oldies +##nally +notification +bastion +##ths +sparkle +##lved +1728 +leash +pathogen +highs +##hmi +immature +gonzaga +ignatius +mansions +monterrey +sweets +bryson +##loe +polled +regatta +brightest +pei +rosy +squid +hatfield +payroll +addict +meath +cornerback +heaviest +lodging +##mage +capcom +rippled +##sily +barnet +mayhem +ymca +snuggled +rousseau +##cute +blanchard +fragmented +leighton +chromosomes +risking +##strel +##utter +corinne +coyotes +cynical +hiroshi +yeomanry +##ractive +ebook +grading +mandela +plume +agustin +magdalene +##rkin +bea +femme +trafford +##coll +##lun +##tance +52nd +fourier +upton +##mental +camilla +gust +iihf +islamabad +longevity +##kala +feldman +netting +##rization +endeavour +foraging +mfa +orr +##open +greyish +contradiction +graz +##ruff +handicapped +marlene +tweed +oaxaca +spp +campos +miocene +pri +configured +cooks +pluto +cozy +pornographic +##entes +70th +fairness +glided +jonny +lynne +rounding +sired +##emon +##nist +remade +uncover +##mack +complied +lei +newsweek +##jured +##parts +##enting +##pg +finer +guerrillas +athenian +deng +disused +stepmother +accuse +gingerly +seduction +confronting +##going +gora +nostalgia +sabres +virginity +wrenched +##minated +syndication +wielding +eyre +##gnon +##igny +behaved +taxpayer +sweeps +##growth +childless +gallant +##ywood +amplified +geraldine +scrape +##ffi +babylonian +fresco +##rdan +##kney +##position +1718 +restricting +tack +fukuoka +osborn +selector +partnering +##dlow +kia +tak +whitley +gables +##mania +mri +softness +immersion +##bots +##evsky +1713 +chilling +insignificant +pcs +##uis +elites +lina +purported +supplemental +teaming +##americana +##dding +##inton +proficient +rouen +##nage +##rret +niccolo +selects +##bread +fluffy +1621 +gruff +knotted +mukherjee +polgara +thrash +nicholls +secluded +smoothing +thru +corsica +loaf +whitaker +inquiries +##rrier +##kam +indochina +marlins +myles +peking +##tea +extracts +pastry +superhuman +connacht +vogel +##ditional +##het +##udged +##lash +gloss +quarries +refit +teaser +##alic +##gaon +20s +materialized +sling +camped +pickering +tung +tracker +pursuant +##cide +cranes +##cini +##typical +##viere +anhalt +overboard +workout +chores +fares +orphaned +stains +##logie +fenton +surpassing +joyah +triggers +##itte +grandmaster +##lass +##lists +clapping +fraudulent +ledger +nagasaki +##cor +##nosis +##tsa +eucalyptus +tun +##icio +##rney +##tara +dax +heroism +ina +wrexham +onboard +unsigned +##dates +moshe +galley +winnie +droplets +exiles +praises +watered +noodles +##aia +fein +leland +multicultural +stink +bingo +comets +erskine +modernized +canned +constraint +domestically +chemotherapy +featherweight +stifled +##mum +darkly +irresistible +refreshing +hasty +isolate +##oys +kitchener +planners +##wehr +cages +yarn +implant +toulon +elects +childbirth +yue +##lind +rightful +sportsman +junctions +remodeled +specifies +##rgh +##oons +complimented +##urgent +lister +ot +##logic +bequeathed +cheekbones +fontana +gabby +##dial +amadeus +corrugated +maverick +resented +triangles +##hered +##usly +nazareth +tyrol +1675 +assent +poorer +sectional +aegean +##cous +nylon +ghanaian +##egorical +##weig +cushions +forbid +fusiliers +obstruction +somerville +##scia +dime +earrings +elliptical +leyte +oder +polymers +timmy +midtown +piloted +settles +continual +externally +mayfield +##uh +enrichment +henson +keane +persians +1733 +benji +braden +pep +##efe +contenders +pepsi +valet +##isches +##asse +##earing +goofy +stroll +##amen +authoritarian +occurrences +adversary +ahmedabad +tangent +toppled +dorchester +1672 +modernism +marxism +islamist +charlemagne +exponential +racks +brunette +pic +skirmish +##bund +##lad +##powered +##yst +hoisted +messina +shatter +##ctum +jedi +vantage +##music +##neil +clemens +mahmoud +corrupted +authentication +lowry +nils +##washed +omnibus +wounding +jillian +##itors +##opped +serialized +narcotics +handheld +##arm +##plicity +intersecting +stimulating +##onis +crate +fellowships +hemingway +casinos +climatic +fordham +copeland +drip +beatty +leaflets +robber +brothel +madeira +##hedral +sphinx +ultrasound +##vana +valor +forbade +leonid +villas +##aldo +duane +marquez +##cytes +disadvantaged +forearms +kawasaki +reacts +consular +lax +uncles +uphold +##hopper +concepcion +dorsey +lass +##izan +arching +passageway +1708 +researches +tia +internationals +##graphs +##opers +distinguishes +javanese +divert +##uven +plotted +##listic +##rwin +##erik +##tify +affirmative +signifies +validation +##bson +kari +felicity +georgina +zulu +##eros +##rained +##rath +overcoming +argyll +##rbin +1734 +chiba +ratification +windy +earls +parapet +##marks +hunan +pristine +astrid +punta +##gart +brodie +##kota +##oder +malaga +minerva +rouse +##phonic +bellowed +pagoda +portals +reclamation +##gur +##odies +##⁄₄ +parentheses +quoting +allergic +palette +showcases +benefactor +heartland +nonlinear +##tness +bladed +cheerfully +scans +##ety +1666 +girlfriends +pedersen +hiram +sous +##liche +##nator +1683 +##nery +##orio +##umen +bobo +primaries +smiley +##cb +unearthed +uniformly +fis +metadata +1635 +ind +##oted +recoil +##titles +##tura +##ια +hilbert +jamestown +mcmillan +tulane +seychelles +##frid +antics +coli +fated +stucco +##grants +1654 +bulky +accolades +arrays +caledonian +carnage +optimism +puebla +##tative +##cave +enforcing +rotherham +dunlop +aeronautics +chimed +incline +zoning +archduke +hellenistic +##oses +##sions +candi +thong +##ople +magnate +rustic +##rsk +projective +slant +##offs +danes +hollis +vocalists +##ammed +congenital +contend +gesellschaft +##ocating +##pressive +douglass +quieter +##kshi +howled +salim +spontaneously +townsville +buena +southport +##bold +kato +1638 +faerie +stiffly +##vus +##rled +flawless +realising +taboo +##7th +straightening +jena +##hid +cartwright +berber +bertram +soloists +noses +coping +fission +hardin +inca +##cen +1717 +mobilized +vhf +##raf +biscuits +curate +##anial +gaunt +neighbourhoods +1540 +##abas +blanca +bypassed +sockets +behold +coincidentally +##bane +nara +shave +splinter +terrific +##arion +##erian +commonplace +juris +redwood +waistband +boxed +caitlin +fingerprints +jennie +naturalized +##ired +balfour +craters +jody +bungalow +hugely +quilt +glitter +pigeons +undertaker +bulging +constrained +##sil +##akh +assimilation +reworked +##person +persuasion +##pants +felicia +##cliff +##ulent +1732 +explodes +##dun +##inium +##zic +lyman +vulture +hog +overlook +begs +northwards +ow +spoil +##urer +fatima +favorably +accumulate +sargent +sorority +corresponded +dispersal +kochi +toned +##imi +##lita +internacional +newfound +##agger +##lynn +##rigue +booths +peanuts +##eborg +medicare +muriel +nur +##uram +crates +millennia +pajamas +worsened +##breakers +jimi +vanuatu +yawned +##udeau +carousel +##hony +hurdle +##ccus +##mounted +##pod +rv +##eche +airship +ambiguity +compulsion +recapture +##claiming +arthritis +##osomal +1667 +asserting +ngc +sniffing +dade +discontent +glendale +ported +##amina +defamation +rammed +##scent +fling +livingstone +##fleet +875 +apocalyptic +comrade +##lowe +cessna +eine +persecuted +subsistence +demi +hoop +reliefs +coptic +progressing +stemmed +perpetrators +1665 +priestess +##nio +dobson +ebony +rooster +itf +tortricidae +##bbon +##jian +cleanup +##jean +##øy +1721 +eighties +taxonomic +holiness +##hearted +##spar +antilles +showcasing +stabilized +##nb +gia +mascara +michelangelo +dawned +##uria +##vinsky +extinguished +fitz +grotesque +£100 +##fera +##loid +##mous +barges +neue +throbbed +cipher +johnnie +##mpt +outburst +##swick +spearheaded +administrations +heartbreak +pixels +pleasantly +##enay +lombardy +plush +##nsed +bobbie +##hly +reapers +tremor +xiang +minogue +substantive +hitch +barak +##wyl +kwan +##encia +910 +obscene +elegance +indus +surfer +bribery +conserve +##hyllum +##masters +horatio +##fat +apes +rebound +psychotic +##pour +iteration +##mium +##vani +botanic +horribly +antiques +dispose +paxton +##hli +##wg +timeless +1704 +disregard +engraver +hounds +##bau +##version +looted +uno +facilitates +groans +masjid +rutland +antibody +disqualification +decatur +footballers +quake +slacks +48th +rein +scribe +stabilize +commits +exemplary +tho +##hort +##chison +pantry +traversed +##hiti +disrepair +identifiable +vibrated +baccalaureate +csa +interviewing +##iensis +##raße +greaves +wealthiest +classed +jogged +£5 +##atal +illuminating +knicks +respecting +##uno +scrubbed +##iji +##dles +kruger +moods +growls +raider +silvia +chefs +kam +cree +percival +##terol +gunter +counterattack +defiant +henan +ze +##rasia +##riety +equivalence +submissions +##fra +##thor +bautista +mechanically +##heater +cornice +herbal +templar +##mering +outputs +ruining +ligand +renumbered +extravagant +mika +blockbuster +eta +insurrection +##ilia +darkening +ferocious +pianos +strife +kinship +##aer +melee +##anor +##iste +##oue +decidedly +weep +##jad +##missive +##ppel +puget +unease +##gnant +1629 +hammering +kassel +wessex +##lga +bromwich +egan +paranoia +utilization +##atable +##idad +contradictory +provoke +##ols +##ouring +##tangled +knesset +##very +##lette +plumbing +##sden +greensboro +occult +sniff +zev +beaming +gamer +haggard +mahal +##olt +##pins +mendes +utmost +briefing +gunnery +##gut +##pher +##zh +##rok +1679 +khalifa +sonya +##boot +principals +urbana +wiring +##liffe +##minating +##rrado +dahl +nyu +skepticism +townspeople +ithaca +lobster +somethin +##fur +##arina +##−1 +freighter +zimmerman +biceps +contractual +##herton +amend +hurrying +subconscious +##anal +meng +clermont +spawning +##eia +##lub +dignitaries +impetus +snacks +spotting +twigs +##bilis +##cz +##ouk +libertadores +nic +skylar +##aina +gustave +asean +##anum +dieter +legislatures +flirt +bromley +trolls +umar +##bbies +##tyle +blah +parc +bridgeport +crank +negligence +##nction +46th +constantin +molded +bandages +seriousness +00pm +siegel +carpets +compartments +upbeat +statehood +##dner +##edging +marko +platt +##hane +paving +##iy +1738 +abbess +impatience +limousine +nbl +lucille +mojo +nightfall +robbers +##nais +karel +brisk +calves +replicate +ascribed +telescopes +##olf +intimidated +ballast +specialization +aerodynamic +caliphate +visionary +##arded +epsilon +##aday +##onte +aggregation +auditory +boosted +reunification +kathmandu +loco +robyn +acknowledges +appointing +humanoid +newell +redeveloped +restraints +##tained +barbarians +chopper +1609 +italiana +##lez +##lho +investigates +wrestlemania +##anies +##bib +##falls +creaked +dragoons +gravely +minions +stupidity +volley +##harat +##week +musik +##eries +##uously +fungal +massimo +semantics +malvern +##ahl +##pee +discourage +embryo +imperialism +1910s +profoundly +##ddled +jiangsu +sparkled +stat +##holz +sweatshirt +tobin +##iction +sneered +##cheon +##oit +brit +causal +smyth +##neuve +diffuse +perrin +silvio +##ipes +##recht +detonated +iqbal +selma +##nism +##zumi +roasted +##riders +tay +##ados +##mament +##mut +##rud +completes +nipples +flavour +hirsch +##laus +calderon +sneakers +moravian +##ksha +1622 +##imeters +bodo +##isance +##pre +##ronia +anatomical +excerpt +##lke +dh +kunst +##tablished +##scoe +biomass +panted +unharmed +gael +housemates +montpellier +coa +rodents +tonic +hickory +singleton +##taro +1719 +aldo +breaststroke +dempsey +och +rocco +##cuit +merton +dissemination +midsummer +serials +##idi +haji +polynomials +enoch +prematurely +shutter +taunton +£3 +##grating +##inates +archangel +harassed +##asco +archway +dazzling +##ecin +1736 +sumo +wat +##kovich +1086 +honneur +##ently +##nostic +##ttal +##idon +1605 +1716 +rents +##gnan +hires +##ikh +##dant +howie +##rons +handler +retracted +shocks +1632 +arun +duluth +kepler +trumpeter +##lary +peeking +seasoned +trooper +##mara +laszlo +##iciencies +##rti +heterosexual +##inatory +indira +jogging +##inga +##lism +beit +dissatisfaction +malice +##ately +nedra +peeling +##rgeon +47th +stadiums +vertigo +##ains +iced +restroom +##plify +##tub +illustrating +pear +##chner +##sibility +inorganic +rappers +receipts +watery +##kura +lucinda +##oulos +reintroduced +##8th +##tched +gracefully +saxons +nutritional +wastewater +rained +favourites +bedrock +fisted +hallways +likeness +upscale +##lateral +1580 +blinds +prequel +##pps +##tama +deter +humiliating +restraining +tn +vents +1659 +laundering +recess +rosary +tractors +coulter +federer +##ifiers +##plin +persistence +##quitable +geschichte +pendulum +quakers +##beam +bassett +pictorial +koln +##sitor +drills +reciprocal +shooters +##cton +##tees +converge +pip +dmitri +donnelly +yamamoto +aqua +azores +demographics +hypnotic +spitfire +suspend +wryly +roderick +##rran +sebastien +##asurable +mavericks +##fles +himalayan +prodigy +##iance +transvaal +demonstrators +handcuffs +dodged +mcnamara +sublime +1726 +crazed +##efined +##till +ivo +pondered +reconciled +shrill +sava +##duk +bal +heresy +jaipur +goran +##nished +lux +shelly +whitehall +##hre +israelis +peacekeeping +##wled +1703 +demetrius +ousted +##arians +##zos +beale +anwar +backstroke +raged +shrinking +cremated +##yck +benign +towing +wadi +darmstadt +landfill +parana +soothe +colleen +sidewalks +mayfair +tumble +hepatitis +ferrer +superstructure +##gingly +##urse +##wee +anthropological +translators +##mies +closeness +hooves +##pw +mondays +##roll +##vita +landscaping +##urized +purification +sock +thorns +thwarted +jalan +tiberius +##taka +saline +##rito +confidently +khyber +sculptors +##ij +brahms +hammersmith +inspectors +battista +fivb +fragmentation +hackney +##uls +arresting +exercising +antoinette +bedfordshire +##zily +dyed +##hema +1656 +racetrack +variability +##tique +1655 +austrians +deteriorating +madman +theorists +aix +lehman +weathered +1731 +decreed +eruptions +1729 +flaw +quinlan +sorbonne +flutes +nunez +1711 +adored +downwards +fable +rasped +1712 +moritz +mouthful +renegade +shivers +stunts +dysfunction +restrain +translit +pancakes +##avio +##cision +##tray +vial +##lden +bain +##maid +##oxide +chihuahua +malacca +vimes +##rba +##rnier +1664 +donnie +plaques +##ually +bangs +floppy +huntsville +loretta +nikolay +##otte +eater +handgun +ubiquitous +##hett +eras +zodiac +1634 +##omorphic +1820s +##zog +cochran +##bula +##lithic +warring +##rada +dalai +excused +blazers +mcconnell +reeling +este +##abi +geese +hoax +taxon +##bla +guitarists +condemning +hunts +inversion +moffat +taekwondo +##lvis +1624 +stammered +##rest +##rzy +sousa +fundraiser +marylebone +navigable +uptown +cabbage +daniela +salman +shitty +whimper +##kian +##utive +programmers +protections +##rmi +##rued +forceful +##enes +fuss +##tao +##wash +brat +oppressive +reykjavik +spartak +ticking +##inkles +##kiewicz +adolph +horst +maui +protege +straighten +cpc +landau +concourse +clements +resultant +##ando +imaginative +joo +reactivated +##rem +##ffled +##uising +consultative +##guide +flop +kaitlyn +mergers +parenting +somber +##vron +supervise +vidhan +##imum +courtship +exemplified +harmonies +medallist +refining +##rrow +##ка +amara +##hum +goalscorer +sited +overshadowed +rohan +displeasure +secretive +multiplied +osman +##orth +engravings +padre +##kali +##veda +miniatures +mis +##yala +clap +pali +rook +##cana +1692 +57th +antennae +astro +oskar +1628 +bulldog +crotch +hackett +yucatan +##sure +amplifiers +brno +ferrara +migrating +##gree +thanking +turing +##eza +mccann +ting +andersson +onslaught +gaines +ganga +incense +standardization +##mation +sentai +scuba +stuffing +turquoise +waivers +alloys +##vitt +regaining +vaults +##clops +##gizing +digger +furry +memorabilia +probing +##iad +payton +rec +deutschland +filippo +opaque +seamen +zenith +afrikaans +##filtration +disciplined +inspirational +##merie +banco +confuse +grafton +tod +##dgets +championed +simi +anomaly +biplane +##ceptive +electrode +##para +1697 +cleavage +crossbow +swirl +informant +##lars +##osta +afi +bonfire +spec +##oux +lakeside +slump +##culus +##lais +##qvist +##rrigan +1016 +facades +borg +inwardly +cervical +pointedly +stabilization +##odon +chests +1699 +hacked +ctv +orthogonal +suzy +##lastic +gaulle +jacobite +rearview +##erted +ashby +##drik +##igate +##mise +##zbek +affectionately +canine +disperse +latham +##istles +##ivar +spielberg +##orin +##idium +ezekiel +cid +##sg +durga +middletown +##cina +customized +frontiers +harden +##etano +##zzy +1604 +bolsheviks +coloration +yoko +##bedo +briefs +slabs +debra +liquidation +plumage +##oin +blossoms +dementia +subsidy +1611 +proctor +relational +jerseys +parochial +ter +##ici +esa +peshawar +cavalier +loren +idiots +shamrock +1646 +dutton +malabar +mustache +##endez +##ocytes +referencing +terminates +marche +yarmouth +##sop +acton +mated +seton +subtly +baptised +beige +extremes +jolted +kristina +telecast +##actic +safeguard +waldo +##baldi +##bular +endeavors +sloppy +subterranean +##ensburg +##itung +delicately +pigment +tq +##scu +1626 +collisions +coveted +herds +##personal +##meister +##nberger +chopra +##ricting +abnormalities +defective +galician +lucie +##dilly +alligator +likened +##genase +burundi +clears +complexion +derelict +deafening +diablo +fingered +champaign +dogg +enlist +isotope +labeling +mrna +##erre +brilliance +marvelous +##ayo +1652 +crawley +ether +footed +dwellers +deserts +hamish +rubs +warlock +skimmed +##lizer +buick +embark +heraldic +irregularities +##ajan +kiara +##kulam +##ieg +antigen +kowalski +##lge +oakley +visitation +##mbit +vt +##suit +1570 +murderers +##miento +##rites +chimneys +##sling +condemn +custer +exchequer +havre +##ghi +fluctuations +##rations +dfb +hendricks +vaccines +##tarian +nietzsche +biking +juicy +##duced +brooding +scrolling +selangor +##ragan +annum +boomed +seminole +sugarcane +##dna +departmental +dismissing +innsbruck +arteries +ashok +batavia +daze +kun +overtook +##rga +##tlan +beheaded +gaddafi +holm +electronically +faulty +galilee +fractures +kobayashi +##lized +gunmen +magma +aramaic +mala +eastenders +inference +messengers +bf +##qu +bathrooms +##vere +1658 +flashbacks +ideally +misunderstood +##jali +##weather +mendez +##grounds +uncanny +##iii +1709 +friendships +##nbc +sacrament +accommodated +reiterated +logistical +pebbles +thumped +##escence +administering +decrees +drafts +##flight +##cased +##tula +futuristic +picket +intimidation +winthrop +##fahan +interfered +afar +francoise +morally +uta +cochin +croft +dwarfs +##bruck +##dents +##nami +biker +##hner +##meral +##isen +##ometric +##pres +##ан +brightened +meek +parcels +securely +gunners +##jhl +##zko +agile +hysteria +##lten +##rcus +bukit +champs +chevy +cuckoo +leith +sadler +theologians +welded +##section +1663 +plurality +xander +##rooms +##formed +shredded +temps +intimately +pau +tormented +##lok +##stellar +1618 +charred +essen +##mmel +alarms +spraying +ascot +blooms +twinkle +##abia +##apes +internment +obsidian +##chaft +snoop +##dav +##ooping +malibu +##tension +quiver +##itia +hays +mcintosh +travers +walsall +##ffie +1623 +beverley +schwarz +plunging +structurally +rosenthal +vikram +##tsk +ghz +##onda +##tiv +chalmers +groningen +pew +reckon +unicef +##rvis +55th +##gni +1651 +sulawesi +avila +cai +metaphysical +screwing +turbulence +##mberg +augusto +samba +56th +baffled +momentary +toxin +##urian +##wani +aachen +condoms +dali +steppe +##oed +##year +adolescence +dauphin +electrically +inaccessible +microscopy +nikita +##ega +atv +##enter +##oles +##oteric +accountants +punishments +wrongly +bribes +adventurous +clinch +flinders +southland +##hem +##kata +gough +##ciency +lads +soared +##ה +undergoes +deformation +outlawed +rubbish +##arus +##mussen +##nidae +##rzburg +arcs +##ingdon +##tituted +1695 +wheelbase +wheeling +bombardier +campground +zebra +##lices +##oj +##bain +lullaby +##ecure +donetsk +wylie +grenada +##arding +##ης +squinting +eireann +opposes +##andra +maximal +runes +##broken +##cuting +##iface +##ror +##rosis +additive +britney +adultery +triggering +##drome +detrimental +aarhus +containment +jc +swapped +vichy +##ioms +madly +##oric +##rag +brant +##ckey +1560 +1612 +broughton +rustling +##stems +##uder +asbestos +mentoring +##nivorous +finley +leaps +##isan +apical +pry +slits +substitutes +##dict +intuitive +fantasia +insistent +unreasonable +##igen +##vna +domed +hannover +margot +ponder +##zziness +impromptu +jian +rampage +stemming +##eft +andrey +gerais +whichever +amnesia +appropriated +anzac +clicks +modifying +ultimatum +cambrian +maids +verve +yellowstone +##mbs +conservatoire +##scribe +adherence +dinners +spectra +imperfect +mysteriously +sidekick +tatar +tuba +##aks +##ifolia +distrust +##athan +##zle +ronin +zac +##pse +celaena +instrumentalist +scents +skopje +##mbling +comical +compensated +vidal +condor +intersect +jingle +wavelengths +##urrent +mcqueen +##izzly +carp +weasel +militias +postdoctoral +eugen +gunslinger +##ɛ +faux +hospice +##for +appalled +derivation +dwarves +##elis +dilapidated +##folk +astoria +philology +##lwyn +##otho +##saka +inducing +philanthropy +##bf +##itative +geek +markedly +##yce +bessie +indices +##flict +frowns +resolving +weightlifting +tugs +cleric +contentious +1653 +mania +rms +##miya +##reate +##ruck +##tucket +bien +eels +marek +##ayton +##cence +discreet +unofficially +##ife +leaks +##bber +1705 +dung +compressor +hillsborough +pandit +shillings +distal +##skin +##tat +nosed +##nir +mangrove +undeveloped +##idia +textures +##inho +##rise +irritating +nay +amazingly +bancroft +apologetic +compassionate +kata +symphonies +##lovic +airspace +##lch +gifford +precautions +fulfillment +sevilla +vulgar +martinique +##urities +looting +piccolo +tidy +##dermott +quadrant +armchair +incomes +mathematicians +stampede +nilsson +##inking +##scan +foo +quarterfinal +##ostal +shang +shouldered +squirrels +##owe +vinegar +##bner +##rchy +##systems +delaying +##trics +ars +dwyer +rhapsody +sponsoring +##gration +bipolar +cinder +starters +##olio +##urst +signage +##nty +aground +figurative +mons +acquaintances +duets +erroneously +soyuz +elliptic +recreated +##cultural +##quette +##ssed +##tma +##zcz +moderator +scares +##itaire +##stones +##udence +juniper +sighting +##just +##nsen +britten +calabria +ry +bop +cramer +forsyth +stillness +airmen +gathers +unfit +##umber +##upt +taunting +seeker +streamlined +##bution +holster +schumann +tread +vox +##gano +##onzo +strive +dil +reforming +covent +newbury +predicting +##orro +decorate +tre +##puted +andover +asahi +dept +dunkirk +gills +##tori +buren +huskies +##stis +##stov +abstracts +bets +loosen +##opa +1682 +yearning +##glio +##sir +berman +effortlessly +enamel +napoli +persist +##peration +##uez +attache +elisa +invitations +##kic +accelerating +reindeer +boardwalk +clutches +nelly +polka +##kei +adamant +huey +lough +unbroken +adventurer +embroidery +inspecting +stanza +##ducted +naia +taluka +##pone +##roids +chases +deprivation +florian +##ppet +earthly +##lib +##ssee +colossal +foreigner +vet +freaks +patrice +rosewood +triassic +upstate +##pkins +dominates +ata +chants +ks +vo +##bley +##raya +##rmed +agra +infiltrate +##ailing +##ilation +##tzer +##uppe +##werk +binoculars +enthusiast +fujian +squeak +##avs +abolitionist +almeida +boredom +hampstead +marsden +rations +##ands +inflated +bonuses +rosalie +patna +##rco +detachments +penitentiary +54th +flourishing +woolf +##dion +##etched +papyrus +##lster +##nsor +##toy +bobbed +dismounted +endelle +inhuman +motorola +wince +wreath +##ticus +hideout +inspections +sanjay +disgrace +infused +pudding +stalks +##urbed +arsenic +leases +##hyl +##rrard +collarbone +##waite +##wil +dowry +##bant +##edance +genealogical +nitrate +salamanca +scandals +thyroid +necessitated +##` +##¡ +##¢ +##¦ +##¨ +##ª +##¬ +##´ +##¶ +##¾ +##¿ +##ð +##þ +##ħ +##œ +##ƒ +##ɐ +##ɑ +##ɒ +##ɕ +##ɣ +##ɨ +##ɪ +##ɫ +##ɬ +##ɯ +##ɲ +##ɴ +##ɹ +##ɾ +##ʀ +##ʁ +##ʂ +##ʃ +##ʉ +##ʊ +##ʋ +##ʌ +##ʎ +##ʐ +##ʑ +##ʒ +##ʔ +##ʲ +##ʳ +##ʷ +##ʸ +##ʻ +##ʼ +##ʾ +##ʿ +##ˡ +##ˣ +##ˤ +##ζ +##ξ +##щ +##ъ +##э +##ю +##ђ +##є +##ј +##љ +##њ +##ћ +##ӏ +##ա +##բ +##գ +##դ +##ե +##թ +##ի +##լ +##կ +##հ +##մ +##յ +##ն +##ո +##պ +##ս +##վ +##տ +##ր +##ւ +##ք +##־ +##א +##ב +##ג +##ד +##ו +##ז +##ח +##ט +##י +##ך +##כ +##ל +##ם +##מ +##ן +##נ +##ס +##ע +##ף +##פ +##ץ +##צ +##ק +##ר +##ש +##ת +##، +##ء +##ث +##ج +##ح +##خ +##ذ +##ز +##ش +##ص +##ض +##ط +##ظ +##غ +##ـ +##ف +##ق +##ك +##ى +##ٹ +##پ +##چ +##ک +##گ +##ں +##ھ +##ہ +##ے +##अ +##आ +##उ +##ए +##क +##ख +##ग +##च +##ज +##ट +##ड +##ण +##त +##थ +##द +##ध +##न +##प +##ब +##भ +##म +##य +##र +##ल +##व +##श +##ष +##स +##ह +##ा +##ि +##ी +##ो +##। +##॥ +##ং +##অ +##আ +##ই +##উ +##এ +##ও +##ক +##খ +##গ +##চ +##ছ +##জ +##ট +##ড +##ণ +##ত +##থ +##দ +##ধ +##ন +##প +##ব +##ভ +##ম +##য +##র +##ল +##শ +##ষ +##স +##হ +##া +##ি +##ী +##ে +##க +##ச +##ட +##த +##ந +##ன +##ப +##ம +##ய +##ர +##ல +##ள +##வ +##ா +##ி +##ு +##ே +##ை +##ನ +##ರ +##ಾ +##ක +##ය +##ර +##ල +##ව +##ා +##ต +##ท +##พ +##ล +##ว +##ส +##། +##ག +##ང +##ད +##ན +##པ +##བ +##མ +##འ +##ར +##ལ +##ས +##မ +##ა +##ბ +##გ +##დ +##ე +##ვ +##თ +##ი +##კ +##ლ +##მ +##ნ +##ო +##რ +##ს +##ტ +##უ +##ᄊ +##ᴬ +##ᴮ +##ᴰ +##ᴵ +##ᴺ +##ᵀ +##ᵇ +##ᵈ +##ᵖ +##ᵗ +##ᵣ +##ᵤ +##ᵥ +##ᶜ +##ᶠ +##‐ +##‑ +##‒ +##– +##— +##― +##‘ +##’ +##‚ +##“ +##” +##‡ +##… +##⁰ +##⁴ +##⁵ +##⁶ +##⁷ +##⁸ +##⁹ +##⁻ +##₅ +##₆ +##₇ +##₈ +##₉ +##₊ +##₍ +##₎ +##ₐ +##ₑ +##ₒ +##ₓ +##ₕ +##ₖ +##ₗ +##ₘ +##ₚ +##ₛ +##ₜ +##₤ +##₩ +##₱ +##₹ +##ℓ +##ℝ +##⅓ +##⅔ +##↦ +##⇄ +##⇌ +##∂ +##∅ +##∆ +##∇ +##∈ +##∗ +##∘ +##∧ +##∨ +##∪ +##⊂ +##⊆ +##⊕ +##⊗ +##☉ +##♯ +##⟨ +##⟩ +##ⱼ +##⺩ +##⺼ +##⽥ +##亻 +##宀 +##彳 +##忄 +##扌 +##氵 +##疒 +##糹 +##訁 +##辶 +##阝 +##龸 +##fi +##fl diff --git a/ComfyUI_ExtraModels/LICENSE b/ComfyUI_ExtraModels/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..261eeb9e9f8b2b4b0d119366dda99c6fd7d35c64 --- /dev/null +++ b/ComfyUI_ExtraModels/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/ComfyUI_ExtraModels/MiaoBi/nodes.py b/ComfyUI_ExtraModels/MiaoBi/nodes.py new file mode 100644 index 0000000000000000000000000000000000000000..83a190224dc51b249e57469e5793fb5c7fd2c13f --- /dev/null +++ b/ComfyUI_ExtraModels/MiaoBi/nodes.py @@ -0,0 +1,76 @@ +import os +import folder_paths + +import comfy.sd +import comfy.diffusers_load +from .tokenizer import MiaoBiTokenizer + +class MiaoBiCLIPLoader: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "clip_name": (folder_paths.get_filename_list("clip"),), + } + } + + RETURN_TYPES = ("CLIP",) + FUNCTION = "load_mbclip" + CATEGORY = "ExtraModels/MiaoBi" + TITLE = "MiaoBi CLIP Loader" + + def load_mbclip(self, clip_name): + clip_type = comfy.sd.CLIPType.STABLE_DIFFUSION + clip_path = folder_paths.get_full_path("clip", clip_name) + clip = comfy.sd.load_clip( + ckpt_paths=[clip_path], + embedding_directory=folder_paths.get_folder_paths("embeddings"), + clip_type=clip_type + ) + # override tokenizer + clip.tokenizer.clip_l = MiaoBiTokenizer() + return (clip,) + + +class MiaoBiDiffusersLoader: + @classmethod + def INPUT_TYPES(cls): + paths = [] + for search_path in folder_paths.get_folder_paths("diffusers"): + if os.path.exists(search_path): + for root, subdir, files in os.walk(search_path, followlinks=True): + if "model_index.json" in files: + paths.append(os.path.relpath(root, start=search_path)) + + return { + "required": { + "model_path": (paths,), + } + } + + RETURN_TYPES = ("MODEL", "CLIP", "VAE") + FUNCTION = "load_mbcheckpoint" + CATEGORY = "ExtraModels/MiaoBi" + TITLE = "MiaoBi Checkpoint Loader (Diffusers)" + + def load_mbcheckpoint(self, model_path, output_vae=True, output_clip=True): + for search_path in folder_paths.get_folder_paths("diffusers"): + if os.path.exists(search_path): + path = os.path.join(search_path, model_path) + if os.path.exists(path): + model_path = path + break + unet, clip, vae = comfy.diffusers_load.load_diffusers( + model_path, + output_vae = output_vae, + output_clip = output_clip, + embedding_directory = folder_paths.get_folder_paths("embeddings") + ) + # override tokenizer + clip.tokenizer.clip_l = MiaoBiTokenizer() + return (unet, clip, vae) + +NODE_CLASS_MAPPINGS = { + "MiaoBiCLIPLoader": MiaoBiCLIPLoader, + "MiaoBiDiffusersLoader": MiaoBiDiffusersLoader, +} \ No newline at end of file diff --git a/ComfyUI_ExtraModels/MiaoBi/tokenizer.py b/ComfyUI_ExtraModels/MiaoBi/tokenizer.py new file mode 100644 index 0000000000000000000000000000000000000000..666e8504d63ee81512e17717b89eb85dd397c61a --- /dev/null +++ b/ComfyUI_ExtraModels/MiaoBi/tokenizer.py @@ -0,0 +1,24 @@ +import os +from transformers import AutoTokenizer +from comfy.sd1_clip import SDTokenizer + +class MiaoBiTokenizer(SDTokenizer): + def __init__(self, **kwargs): + super().__init__(**kwargs) + tokenizer_path = os.path.join( + os.path.dirname(os.path.realpath(__file__)), + f"tokenizer" + ) + # remote code ok, see `clip_tokenizer_roberta.py`, no ckpt vocab + self.tokenizer = AutoTokenizer.from_pretrained(tokenizer_path, trust_remote_code=True) + + empty = self.tokenizer('')["input_ids"] + if self.tokens_start: + self.start_token = empty[0] + self.end_token = empty[1] + else: + self.start_token = None + self.end_token = empty[0] + + vocab = self.tokenizer.get_vocab() + self.inv_vocab = {v: k for k, v in vocab.items()} \ No newline at end of file diff --git a/ComfyUI_ExtraModels/MiaoBi/tokenizer/clip_tokenizer_roberta.py b/ComfyUI_ExtraModels/MiaoBi/tokenizer/clip_tokenizer_roberta.py new file mode 100644 index 0000000000000000000000000000000000000000..165ce7a02e90bb313e6fabb802090999dc35241d --- /dev/null +++ b/ComfyUI_ExtraModels/MiaoBi/tokenizer/clip_tokenizer_roberta.py @@ -0,0 +1,246 @@ +from transformers.models.bert.tokenization_bert import * +import os + + +class CLIPTokenizerRoberta(PreTrainedTokenizer): + r""" + Construct a BERT tokenizer. Based on WordPiece. + + This tokenizer inherits from [`PreTrainedTokenizer`] which contains most of the main methods. Users should refer to + this superclass for more information regarding those methods. + + Args: + vocab_file (`str`): + File containing the vocabulary. + do_lower_case (`bool`, *optional*, defaults to `True`): + Whether or not to lowercase the input when tokenizing. + do_basic_tokenize (`bool`, *optional*, defaults to `True`): + Whether or not to do basic tokenization before WordPiece. + never_split (`Iterable`, *optional*): + Collection of tokens which will never be split during tokenization. Only has an effect when + `do_basic_tokenize=True` + unk_token (`str`, *optional*, defaults to `"[UNK]"`): + The unknown token. A token that is not in the vocabulary cannot be converted to an ID and is set to be this + token instead. + sep_token (`str`, *optional*, defaults to `"[SEP]"`): + The separator token, which is used when building a sequence from multiple sequences, e.g. two sequences for + sequence classification or for a text and a question for question answering. It is also used as the last + token of a sequence built with special tokens. + pad_token (`str`, *optional*, defaults to `"[PAD]"`): + The token used for padding, for example when batching sequences of different lengths. + cls_token (`str`, *optional*, defaults to `"[CLS]"`): + The classifier token which is used when doing sequence classification (classification of the whole sequence + instead of per-token classification). It is the first token of the sequence when built with special tokens. + mask_token (`str`, *optional*, defaults to `"[MASK]"`): + The token used for masking values. This is the token used when training this model with masked language + modeling. This is the token which the model will try to predict. + tokenize_chinese_chars (`bool`, *optional*, defaults to `True`): + Whether or not to tokenize Chinese characters. + + This should likely be deactivated for Japanese (see this + [issue](https://github.com/huggingface/transformers/issues/328)). + strip_accents (`bool`, *optional*): + Whether or not to strip all accents. If this option is not specified, then it will be determined by the + value for `lowercase` (as in the original BERT). + """ + + vocab_files_names = VOCAB_FILES_NAMES + #pretrained_vocab_files_map = PRETRAINED_VOCAB_FILES_MAP + #pretrained_init_configuration = PRETRAINED_INIT_CONFIGURATION + #max_model_input_sizes = PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES + + def __init__( + self, + vocab_file, + do_lower_case=True, + do_basic_tokenize=True, + never_split=None, + unk_token="[UNK]", + sep_token="[SEP]", + pad_token="[PAD]", + cls_token="[CLS]", + mask_token="[MASK]", + tokenize_chinese_chars=True, + strip_accents=None, + **kwargs + ): + if not os.path.isfile(vocab_file): + raise ValueError( + f"Can't find a vocabulary file at path '{vocab_file}'. To load the vocabulary from a Google pretrained" + " model use `tokenizer = BertTokenizer.from_pretrained(PRETRAINED_MODEL_NAME)`" + ) + self.vocab = load_vocab(vocab_file) + self.ids_to_tokens = collections.OrderedDict([(ids, tok) for tok, ids in self.vocab.items()]) + self.do_basic_tokenize = do_basic_tokenize + if do_basic_tokenize: + self.basic_tokenizer = BasicTokenizer( + do_lower_case=do_lower_case, + never_split=never_split, + tokenize_chinese_chars=tokenize_chinese_chars, + strip_accents=strip_accents, + ) + self.wordpiece_tokenizer = WordpieceTokenizer(vocab=self.vocab, unk_token=str(unk_token)) + + super().__init__( + do_lower_case=do_lower_case, + do_basic_tokenize=do_basic_tokenize, + never_split=never_split, + unk_token=unk_token, + sep_token=sep_token, + pad_token=pad_token, + cls_token=cls_token, + mask_token=mask_token, + tokenize_chinese_chars=tokenize_chinese_chars, + strip_accents=strip_accents, + **kwargs, + ) + + @property + def do_lower_case(self): + return self.basic_tokenizer.do_lower_case + + @property + def vocab_size(self): + return len(self.vocab) + + def get_vocab(self): + return dict(self.vocab, **self.added_tokens_encoder) + + def _tokenize(self, text): + split_tokens = [] + if self.do_basic_tokenize: + for token in self.basic_tokenizer.tokenize(text, never_split=self.all_special_tokens): + + # If the token is part of the never_split set + if token in self.basic_tokenizer.never_split: + split_tokens.append(token) + else: + split_tokens += self.wordpiece_tokenizer.tokenize(token) + else: + split_tokens = self.wordpiece_tokenizer.tokenize(text) + return split_tokens + + def _convert_token_to_id(self, token): + """Converts a token (str) in an id using the vocab.""" + return self.vocab.get(token, self.vocab.get(self.unk_token)) + + def _convert_id_to_token(self, index): + """Converts an index (integer) in a token (str) using the vocab.""" + return self.ids_to_tokens.get(index, self.unk_token) + + def convert_tokens_to_string(self, tokens): + """Converts a sequence of tokens (string) in a single string.""" + out_string = " ".join(tokens).replace(" ##", "").strip() + return out_string + + def build_inputs_with_special_tokens( + self, token_ids_0: List[int], token_ids_1: Optional[List[int]] = None + ) -> List[int]: + """ + Build model inputs from a sequence or a pair of sequence for sequence classification tasks by concatenating and + adding special tokens. A BERT sequence has the following format: + + - single sequence: `[CLS] X [SEP]` + - pair of sequences: `[CLS] A [SEP] B [SEP]` + + Args: + token_ids_0 (`List[int]`): + List of IDs to which the special tokens will be added. + token_ids_1 (`List[int]`, *optional*): + Optional second list of IDs for sequence pairs. + + Returns: + `List[int]`: List of [input IDs](../glossary#input-ids) with the appropriate special tokens. + """ + sep = [49407] + cls = [49406] + + if token_ids_1 is None: + return cls + token_ids_0 + sep + # return [self.cls_token_id] + token_ids_0 + [self.sep_token_id] + # cls = [self.cls_token_id] + # sep = [self.sep_token_id] + + return cls + token_ids_0 + sep + token_ids_1 + sep + + def get_special_tokens_mask( + self, token_ids_0: List[int], token_ids_1: Optional[List[int]] = None, + already_has_special_tokens: bool = False + ) -> List[int]: + """ + Retrieve sequence ids from a token list that has no special tokens added. This method is called when adding + special tokens using the tokenizer `prepare_for_model` method. + + Args: + token_ids_0 (`List[int]`): + List of IDs. + token_ids_1 (`List[int]`, *optional*): + Optional second list of IDs for sequence pairs. + already_has_special_tokens (`bool`, *optional*, defaults to `False`): + Whether or not the token list is already formatted with special tokens for the model. + + Returns: + `List[int]`: A list of integers in the range [0, 1]: 1 for a special token, 0 for a sequence token. + """ + + if already_has_special_tokens: + return super().get_special_tokens_mask( + token_ids_0=token_ids_0, token_ids_1=token_ids_1, already_has_special_tokens=True + ) + + if token_ids_1 is not None: + return [1] + ([0] * len(token_ids_0)) + [1] + ([0] * len(token_ids_1)) + [1] + return [1] + ([0] * len(token_ids_0)) + [1] + + def create_token_type_ids_from_sequences( + self, token_ids_0: List[int], token_ids_1: Optional[List[int]] = None + ) -> List[int]: + """ + Create a mask from the two sequences passed to be used in a sequence-pair classification task. A BERT sequence + pair mask has the following format: + + ``` + 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 + | first sequence | second sequence | + ``` + + If `token_ids_1` is `None`, this method only returns the first portion of the mask (0s). + + Args: + token_ids_0 (`List[int]`): + List of IDs. + token_ids_1 (`List[int]`, *optional*): + Optional second list of IDs for sequence pairs. + + Returns: + `List[int]`: List of [token type IDs](../glossary#token-type-ids) according to the given sequence(s). + """ + # sep = [self.sep_token_id] + # cls = [self.cls_token_id] + sep = [49407] + cls = [49406] + if token_ids_1 is None: + return len(cls + token_ids_0 + sep) * [0] + return len(cls + token_ids_0 + sep) * [0] + len(token_ids_1 + sep) * [1] + + def save_vocabulary(self, save_directory: str, filename_prefix: Optional[str] = None) -> Tuple[str]: + index = 0 + if os.path.isdir(save_directory): + vocab_file = os.path.join( + save_directory, (filename_prefix + "-" if filename_prefix else "") + VOCAB_FILES_NAMES["vocab_file"] + ) + else: + vocab_file = (filename_prefix + "-" if filename_prefix else "") + save_directory + with open(vocab_file, "w", encoding="utf-8") as writer: + for token, token_index in sorted(self.vocab.items(), key=lambda kv: kv[1]): + if index != token_index: + logger.warning( + f"Saving vocabulary to {vocab_file}: vocabulary indices are not consecutive." + " Please check that the vocabulary is not corrupted!" + ) + index = token_index + writer.write(token + "\n") + index += 1 + return (vocab_file,) + + diff --git a/ComfyUI_ExtraModels/MiaoBi/tokenizer/special_tokens_map.json b/ComfyUI_ExtraModels/MiaoBi/tokenizer/special_tokens_map.json new file mode 100644 index 0000000000000000000000000000000000000000..9bbecc17cabbcbd3112c14d6982b51403b264bfa --- /dev/null +++ b/ComfyUI_ExtraModels/MiaoBi/tokenizer/special_tokens_map.json @@ -0,0 +1,37 @@ +{ + "cls_token": { + "content": "[CLS]", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + }, + "mask_token": { + "content": "[MASK]", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + }, + "pad_token": { + "content": "[PAD]", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + }, + "sep_token": { + "content": "[SEP]", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + }, + "unk_token": { + "content": "[UNK]", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + } +} diff --git a/ComfyUI_ExtraModels/MiaoBi/tokenizer/tokenizer_config.json b/ComfyUI_ExtraModels/MiaoBi/tokenizer/tokenizer_config.json new file mode 100644 index 0000000000000000000000000000000000000000..8ed12ea310c24f8e45357a482ccad7fba998747c --- /dev/null +++ b/ComfyUI_ExtraModels/MiaoBi/tokenizer/tokenizer_config.json @@ -0,0 +1,64 @@ +{ + "added_tokens_decoder": { + "0": { + "content": "[PAD]", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "100": { + "content": "[UNK]", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "101": { + "content": "[CLS]", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "102": { + "content": "[SEP]", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "103": { + "content": "[MASK]", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + } + }, + "auto_map": { + "AutoTokenizer": [ + "clip_tokenizer_roberta.CLIPTokenizerRoberta", + null + ] + }, + "clean_up_tokenization_spaces": true, + "cls_token": "[CLS]", + "do_basic_tokenize": true, + "do_lower_case": true, + "mask_token": "[MASK]", + "model_max_length": 77, + "never_split": null, + "pad_token": "[PAD]", + "sep_token": "[SEP]", + "strip_accents": null, + "tokenize_chinese_chars": true, + "tokenizer_class": "CLIPTokenizerRoberta", + "unk_token": "[UNK]", + "use_fast": true +} diff --git a/ComfyUI_ExtraModels/MiaoBi/tokenizer/vocab.txt b/ComfyUI_ExtraModels/MiaoBi/tokenizer/vocab.txt new file mode 100644 index 0000000000000000000000000000000000000000..ca4f9781030019ab9b253c6dcb8c7878b6dc87a5 --- /dev/null +++ b/ComfyUI_ExtraModels/MiaoBi/tokenizer/vocab.txt @@ -0,0 +1,21128 @@ +[PAD] +[unused1] +[unused2] +[unused3] +[unused4] +[unused5] +[unused6] +[unused7] +[unused8] +[unused9] +[unused10] +[unused11] +[unused12] +[unused13] +[unused14] +[unused15] +[unused16] +[unused17] +[unused18] +[unused19] +[unused20] +[unused21] +[unused22] +[unused23] +[unused24] +[unused25] +[unused26] +[unused27] +[unused28] +[unused29] +[unused30] +[unused31] +[unused32] +[unused33] +[unused34] +[unused35] +[unused36] +[unused37] +[unused38] +[unused39] +[unused40] +[unused41] +[unused42] +[unused43] +[unused44] +[unused45] +[unused46] +[unused47] +[unused48] +[unused49] +[unused50] +[unused51] +[unused52] +[unused53] +[unused54] +[unused55] +[unused56] +[unused57] +[unused58] +[unused59] +[unused60] +[unused61] +[unused62] +[unused63] +[unused64] +[unused65] +[unused66] +[unused67] +[unused68] +[unused69] +[unused70] +[unused71] +[unused72] +[unused73] +[unused74] +[unused75] +[unused76] +[unused77] +[unused78] +[unused79] +[unused80] +[unused81] +[unused82] +[unused83] +[unused84] +[unused85] +[unused86] +[unused87] +[unused88] +[unused89] +[unused90] +[unused91] +[unused92] +[unused93] +[unused94] +[unused95] +[unused96] +[unused97] +[unused98] +[unused99] +[UNK] +[CLS] +[SEP] +[MASK] + + +! +" +# +$ +% +& +' +( +) +* ++ +, +- +. +/ +0 +1 +2 +3 +4 +5 +6 +7 +8 +9 +: +; +< += +> +? +@ +[ +\ +] +^ +_ +a +b +c +d +e +f +g +h +i +j +k +l +m +n +o +p +q +r +s +t +u +v +w +x +y +z +{ +| +} +~ +£ +¤ +¥ +§ +© +« +® +° +± +² +³ +µ +· +¹ +º +» +¼ +× +ß +æ +÷ +ø +đ +ŋ +ɔ +ə +ɡ +ʰ +ˇ +ˈ +ˊ +ˋ +ˍ +ː +˙ +˚ +ˢ +α +β +γ +δ +ε +η +θ +ι +κ +λ +μ +ν +ο +π +ρ +ς +σ +τ +υ +φ +χ +ψ +ω +а +б +в +г +д +е +ж +з +и +к +л +м +н +о +п +р +с +т +у +ф +х +ц +ч +ш +ы +ь +я +і +ا +ب +ة +ت +د +ر +س +ع +ل +م +ن +ه +و +ي +۩ +ก +ง +น +ม +ย +ร +อ +า +เ +๑ +་ +ღ +ᄀ +ᄁ +ᄂ +ᄃ +ᄅ +ᄆ +ᄇ +ᄈ +ᄉ +ᄋ +ᄌ +ᄎ +ᄏ +ᄐ +ᄑ +ᄒ +ᅡ +ᅢ +ᅣ +ᅥ +ᅦ +ᅧ +ᅨ +ᅩ +ᅪ +ᅬ +ᅭ +ᅮ +ᅯ +ᅲ +ᅳ +ᅴ +ᅵ +ᆨ +ᆫ +ᆯ +ᆷ +ᆸ +ᆺ +ᆻ +ᆼ +ᗜ +ᵃ +ᵉ +ᵍ +ᵏ +ᵐ +ᵒ +ᵘ +‖ +„ +† +• +‥ +‧ +
 +‰ +′ +″ +‹ +› +※ +‿ +⁄ +ⁱ +⁺ +ⁿ +₁ +₂ +₃ +₄ +€ +℃ +№ +™ +ⅰ +ⅱ +ⅲ +ⅳ +ⅴ +← +↑ +→ +↓ +↔ +↗ +↘ +⇒ +∀ +− +∕ +∙ +√ +∞ +∟ +∠ +∣ +∥ +∩ +∮ +∶ +∼ +∽ +≈ +≒ +≡ +≤ +≥ +≦ +≧ +≪ +≫ +⊙ +⋅ +⋈ +⋯ +⌒ +① +② +③ +④ +⑤ +⑥ +⑦ +⑧ +⑨ +⑩ +⑴ +⑵ +⑶ +⑷ +⑸ +⒈ +⒉ +⒊ +⒋ +ⓒ +ⓔ +ⓘ +─ +━ +│ +┃ +┅ +┆ +┊ +┌ +└ +├ +┣ +═ +║ +╚ +╞ +╠ +╭ +╮ +╯ +╰ +╱ +╳ +▂ +▃ +▅ +▇ +█ +▉ +▋ +▌ +▍ +▎ +■ +□ +▪ +▫ +▬ +▲ +△ +▶ +► +▼ +▽ +◆ +◇ +○ +◎ +● +◕ +◠ +◢ +◤ +☀ +★ +☆ +☕ +☞ +☺ +☼ +♀ +♂ +♠ +♡ +♣ +♥ +♦ +♪ +♫ +♬ +✈ +✔ +✕ +✖ +✦ +✨ +✪ +✰ +✿ +❀ +❤ +➜ +➤ +⦿ +、 +。 +〃 +々 +〇 +〈 +〉 +《 +》 +「 +」 +『 +』 +【 +】 +〓 +〔 +〕 +〖 +〗 +〜 +〝 +〞 +ぁ +あ +ぃ +い +う +ぇ +え +お +か +き +く +け +こ +さ +し +す +せ +そ +た +ち +っ +つ +て +と +な +に +ぬ +ね +の +は +ひ +ふ +へ +ほ +ま +み +む +め +も +ゃ +や +ゅ +ゆ +ょ +よ +ら +り +る +れ +ろ +わ +を +ん +゜ +ゝ +ァ +ア +ィ +イ +ゥ +ウ +ェ +エ +ォ +オ +カ +キ +ク +ケ +コ +サ +シ +ス +セ +ソ +タ +チ +ッ +ツ +テ +ト +ナ +ニ +ヌ +ネ +ノ +ハ +ヒ +フ +ヘ +ホ +マ +ミ +ム +メ +モ +ャ +ヤ +ュ +ユ +ョ +ヨ +ラ +リ +ル +レ +ロ +ワ +ヲ +ン +ヶ +・ +ー +ヽ +ㄅ +ㄆ +ㄇ +ㄉ +ㄋ +ㄌ +ㄍ +ㄎ +ㄏ +ㄒ +ㄚ +ㄛ +ㄞ +ㄟ +ㄢ +ㄤ +ㄥ +ㄧ +ㄨ +ㆍ +㈦ +㊣ +㎡ +㗎 +一 +丁 +七 +万 +丈 +三 +上 +下 +不 +与 +丐 +丑 +专 +且 +丕 +世 +丘 +丙 +业 +丛 +东 +丝 +丞 +丟 +両 +丢 +两 +严 +並 +丧 +丨 +个 +丫 +中 +丰 +串 +临 +丶 +丸 +丹 +为 +主 +丼 +丽 +举 +丿 +乂 +乃 +久 +么 +义 +之 +乌 +乍 +乎 +乏 +乐 +乒 +乓 +乔 +乖 +乗 +乘 +乙 +乜 +九 +乞 +也 +习 +乡 +书 +乩 +买 +乱 +乳 +乾 +亀 +亂 +了 +予 +争 +事 +二 +于 +亏 +云 +互 +五 +井 +亘 +亙 +亚 +些 +亜 +亞 +亟 +亡 +亢 +交 +亥 +亦 +产 +亨 +亩 +享 +京 +亭 +亮 +亲 +亳 +亵 +人 +亿 +什 +仁 +仃 +仄 +仅 +仆 +仇 +今 +介 +仍 +从 +仏 +仑 +仓 +仔 +仕 +他 +仗 +付 +仙 +仝 +仞 +仟 +代 +令 +以 +仨 +仪 +们 +仮 +仰 +仲 +件 +价 +任 +份 +仿 +企 +伉 +伊 +伍 +伎 +伏 +伐 +休 +伕 +众 +优 +伙 +会 +伝 +伞 +伟 +传 +伢 +伤 +伦 +伪 +伫 +伯 +估 +伴 +伶 +伸 +伺 +似 +伽 +佃 +但 +佇 +佈 +位 +低 +住 +佐 +佑 +体 +佔 +何 +佗 +佘 +余 +佚 +佛 +作 +佝 +佞 +佟 +你 +佢 +佣 +佤 +佥 +佩 +佬 +佯 +佰 +佳 +併 +佶 +佻 +佼 +使 +侃 +侄 +來 +侈 +例 +侍 +侏 +侑 +侖 +侗 +供 +依 +侠 +価 +侣 +侥 +侦 +侧 +侨 +侬 +侮 +侯 +侵 +侶 +侷 +便 +係 +促 +俄 +俊 +俎 +俏 +俐 +俑 +俗 +俘 +俚 +保 +俞 +俟 +俠 +信 +俨 +俩 +俪 +俬 +俭 +修 +俯 +俱 +俳 +俸 +俺 +俾 +倆 +倉 +個 +倌 +倍 +倏 +們 +倒 +倔 +倖 +倘 +候 +倚 +倜 +借 +倡 +値 +倦 +倩 +倪 +倫 +倬 +倭 +倶 +债 +值 +倾 +偃 +假 +偈 +偉 +偌 +偎 +偏 +偕 +做 +停 +健 +側 +偵 +偶 +偷 +偻 +偽 +偿 +傀 +傅 +傍 +傑 +傘 +備 +傚 +傢 +傣 +傥 +储 +傩 +催 +傭 +傲 +傳 +債 +傷 +傻 +傾 +僅 +働 +像 +僑 +僕 +僖 +僚 +僥 +僧 +僭 +僮 +僱 +僵 +價 +僻 +儀 +儂 +億 +儆 +儉 +儋 +儒 +儕 +儘 +償 +儡 +優 +儲 +儷 +儼 +儿 +兀 +允 +元 +兄 +充 +兆 +兇 +先 +光 +克 +兌 +免 +児 +兑 +兒 +兔 +兖 +党 +兜 +兢 +入 +內 +全 +兩 +八 +公 +六 +兮 +兰 +共 +兲 +关 +兴 +兵 +其 +具 +典 +兹 +养 +兼 +兽 +冀 +内 +円 +冇 +冈 +冉 +冊 +册 +再 +冏 +冒 +冕 +冗 +写 +军 +农 +冠 +冢 +冤 +冥 +冨 +冪 +冬 +冯 +冰 +冲 +决 +况 +冶 +冷 +冻 +冼 +冽 +冾 +净 +凄 +准 +凇 +凈 +凉 +凋 +凌 +凍 +减 +凑 +凛 +凜 +凝 +几 +凡 +凤 +処 +凪 +凭 +凯 +凰 +凱 +凳 +凶 +凸 +凹 +出 +击 +函 +凿 +刀 +刁 +刃 +分 +切 +刈 +刊 +刍 +刎 +刑 +划 +列 +刘 +则 +刚 +创 +初 +删 +判 +別 +刨 +利 +刪 +别 +刮 +到 +制 +刷 +券 +刹 +刺 +刻 +刽 +剁 +剂 +剃 +則 +剉 +削 +剋 +剌 +前 +剎 +剐 +剑 +剔 +剖 +剛 +剜 +剝 +剣 +剤 +剥 +剧 +剩 +剪 +副 +割 +創 +剷 +剽 +剿 +劃 +劇 +劈 +劉 +劊 +劍 +劏 +劑 +力 +劝 +办 +功 +加 +务 +劣 +动 +助 +努 +劫 +劭 +励 +劲 +劳 +労 +劵 +効 +劾 +势 +勁 +勃 +勇 +勉 +勋 +勐 +勒 +動 +勖 +勘 +務 +勛 +勝 +勞 +募 +勢 +勤 +勧 +勳 +勵 +勸 +勺 +勻 +勾 +勿 +匀 +包 +匆 +匈 +匍 +匐 +匕 +化 +北 +匙 +匝 +匠 +匡 +匣 +匪 +匮 +匯 +匱 +匹 +区 +医 +匾 +匿 +區 +十 +千 +卅 +升 +午 +卉 +半 +卍 +华 +协 +卑 +卒 +卓 +協 +单 +卖 +南 +単 +博 +卜 +卞 +卟 +占 +卡 +卢 +卤 +卦 +卧 +卫 +卮 +卯 +印 +危 +即 +却 +卵 +卷 +卸 +卻 +卿 +厂 +厄 +厅 +历 +厉 +压 +厌 +厕 +厘 +厚 +厝 +原 +厢 +厥 +厦 +厨 +厩 +厭 +厮 +厲 +厳 +去 +县 +叁 +参 +參 +又 +叉 +及 +友 +双 +反 +収 +发 +叔 +取 +受 +变 +叙 +叛 +叟 +叠 +叡 +叢 +口 +古 +句 +另 +叨 +叩 +只 +叫 +召 +叭 +叮 +可 +台 +叱 +史 +右 +叵 +叶 +号 +司 +叹 +叻 +叼 +叽 +吁 +吃 +各 +吆 +合 +吉 +吊 +吋 +同 +名 +后 +吏 +吐 +向 +吒 +吓 +吕 +吖 +吗 +君 +吝 +吞 +吟 +吠 +吡 +否 +吧 +吨 +吩 +含 +听 +吭 +吮 +启 +吱 +吳 +吴 +吵 +吶 +吸 +吹 +吻 +吼 +吽 +吾 +呀 +呂 +呃 +呆 +呈 +告 +呋 +呎 +呐 +呓 +呕 +呗 +员 +呛 +呜 +呢 +呤 +呦 +周 +呱 +呲 +味 +呵 +呷 +呸 +呻 +呼 +命 +咀 +咁 +咂 +咄 +咆 +咋 +和 +咎 +咏 +咐 +咒 +咔 +咕 +咖 +咗 +咘 +咙 +咚 +咛 +咣 +咤 +咦 +咧 +咨 +咩 +咪 +咫 +咬 +咭 +咯 +咱 +咲 +咳 +咸 +咻 +咽 +咿 +哀 +品 +哂 +哄 +哆 +哇 +哈 +哉 +哋 +哌 +响 +哎 +哏 +哐 +哑 +哒 +哔 +哗 +哟 +員 +哥 +哦 +哧 +哨 +哩 +哪 +哭 +哮 +哲 +哺 +哼 +哽 +唁 +唄 +唆 +唇 +唉 +唏 +唐 +唑 +唔 +唠 +唤 +唧 +唬 +售 +唯 +唰 +唱 +唳 +唷 +唸 +唾 +啃 +啄 +商 +啉 +啊 +問 +啓 +啕 +啖 +啜 +啞 +啟 +啡 +啤 +啥 +啦 +啧 +啪 +啫 +啬 +啮 +啰 +啱 +啲 +啵 +啶 +啷 +啸 +啻 +啼 +啾 +喀 +喂 +喃 +善 +喆 +喇 +喉 +喊 +喋 +喎 +喏 +喔 +喘 +喙 +喚 +喜 +喝 +喟 +喧 +喪 +喫 +喬 +單 +喰 +喱 +喲 +喳 +喵 +営 +喷 +喹 +喺 +喻 +喽 +嗅 +嗆 +嗇 +嗎 +嗑 +嗒 +嗓 +嗔 +嗖 +嗚 +嗜 +嗝 +嗟 +嗡 +嗣 +嗤 +嗦 +嗨 +嗪 +嗬 +嗯 +嗰 +嗲 +嗳 +嗶 +嗷 +嗽 +嘀 +嘅 +嘆 +嘈 +嘉 +嘌 +嘍 +嘎 +嘔 +嘖 +嘗 +嘘 +嘚 +嘛 +嘜 +嘞 +嘟 +嘢 +嘣 +嘤 +嘧 +嘩 +嘭 +嘮 +嘯 +嘰 +嘱 +嘲 +嘴 +嘶 +嘸 +嘹 +嘻 +嘿 +噁 +噌 +噎 +噓 +噔 +噗 +噙 +噜 +噠 +噢 +噤 +器 +噩 +噪 +噬 +噱 +噴 +噶 +噸 +噹 +噻 +噼 +嚀 +嚇 +嚎 +嚏 +嚐 +嚓 +嚕 +嚟 +嚣 +嚥 +嚨 +嚮 +嚴 +嚷 +嚼 +囂 +囉 +囊 +囍 +囑 +囔 +囗 +囚 +四 +囝 +回 +囟 +因 +囡 +团 +団 +囤 +囧 +囪 +囫 +园 +困 +囱 +囲 +図 +围 +囹 +固 +国 +图 +囿 +圃 +圄 +圆 +圈 +國 +圍 +圏 +園 +圓 +圖 +團 +圜 +土 +圣 +圧 +在 +圩 +圭 +地 +圳 +场 +圻 +圾 +址 +坂 +均 +坊 +坍 +坎 +坏 +坐 +坑 +块 +坚 +坛 +坝 +坞 +坟 +坠 +坡 +坤 +坦 +坨 +坪 +坯 +坳 +坵 +坷 +垂 +垃 +垄 +型 +垒 +垚 +垛 +垠 +垢 +垣 +垦 +垩 +垫 +垭 +垮 +垵 +埂 +埃 +埋 +城 +埔 +埕 +埗 +域 +埠 +埤 +埵 +執 +埸 +培 +基 +埼 +堀 +堂 +堃 +堅 +堆 +堇 +堑 +堕 +堙 +堡 +堤 +堪 +堯 +堰 +報 +場 +堵 +堺 +堿 +塊 +塌 +塑 +塔 +塗 +塘 +塚 +塞 +塢 +塩 +填 +塬 +塭 +塵 +塾 +墀 +境 +墅 +墉 +墊 +墒 +墓 +増 +墘 +墙 +墜 +增 +墟 +墨 +墩 +墮 +墳 +墻 +墾 +壁 +壅 +壆 +壇 +壊 +壑 +壓 +壕 +壘 +壞 +壟 +壢 +壤 +壩 +士 +壬 +壮 +壯 +声 +売 +壳 +壶 +壹 +壺 +壽 +处 +备 +変 +复 +夏 +夔 +夕 +外 +夙 +多 +夜 +够 +夠 +夢 +夥 +大 +天 +太 +夫 +夭 +央 +夯 +失 +头 +夷 +夸 +夹 +夺 +夾 +奂 +奄 +奇 +奈 +奉 +奋 +奎 +奏 +奐 +契 +奔 +奕 +奖 +套 +奘 +奚 +奠 +奢 +奥 +奧 +奪 +奬 +奮 +女 +奴 +奶 +奸 +她 +好 +如 +妃 +妄 +妆 +妇 +妈 +妊 +妍 +妒 +妓 +妖 +妘 +妙 +妝 +妞 +妣 +妤 +妥 +妨 +妩 +妪 +妮 +妲 +妳 +妹 +妻 +妾 +姆 +姉 +姊 +始 +姍 +姐 +姑 +姒 +姓 +委 +姗 +姚 +姜 +姝 +姣 +姥 +姦 +姨 +姪 +姫 +姬 +姹 +姻 +姿 +威 +娃 +娄 +娅 +娆 +娇 +娉 +娑 +娓 +娘 +娛 +娜 +娟 +娠 +娣 +娥 +娩 +娱 +娲 +娴 +娶 +娼 +婀 +婁 +婆 +婉 +婊 +婕 +婚 +婢 +婦 +婧 +婪 +婭 +婴 +婵 +婶 +婷 +婺 +婿 +媒 +媚 +媛 +媞 +媧 +媲 +媳 +媽 +媾 +嫁 +嫂 +嫉 +嫌 +嫑 +嫔 +嫖 +嫘 +嫚 +嫡 +嫣 +嫦 +嫩 +嫲 +嫵 +嫻 +嬅 +嬉 +嬌 +嬗 +嬛 +嬢 +嬤 +嬪 +嬰 +嬴 +嬷 +嬸 +嬿 +孀 +孃 +子 +孑 +孔 +孕 +孖 +字 +存 +孙 +孚 +孛 +孜 +孝 +孟 +孢 +季 +孤 +学 +孩 +孪 +孫 +孬 +孰 +孱 +孳 +孵 +學 +孺 +孽 +孿 +宁 +它 +宅 +宇 +守 +安 +宋 +完 +宏 +宓 +宕 +宗 +官 +宙 +定 +宛 +宜 +宝 +实 +実 +宠 +审 +客 +宣 +室 +宥 +宦 +宪 +宫 +宮 +宰 +害 +宴 +宵 +家 +宸 +容 +宽 +宾 +宿 +寂 +寄 +寅 +密 +寇 +富 +寐 +寒 +寓 +寛 +寝 +寞 +察 +寡 +寢 +寥 +實 +寧 +寨 +審 +寫 +寬 +寮 +寰 +寵 +寶 +寸 +对 +寺 +寻 +导 +対 +寿 +封 +専 +射 +将 +將 +專 +尉 +尊 +尋 +對 +導 +小 +少 +尔 +尕 +尖 +尘 +尚 +尝 +尤 +尧 +尬 +就 +尴 +尷 +尸 +尹 +尺 +尻 +尼 +尽 +尾 +尿 +局 +屁 +层 +屄 +居 +屆 +屈 +屉 +届 +屋 +屌 +屍 +屎 +屏 +屐 +屑 +展 +屜 +属 +屠 +屡 +屢 +層 +履 +屬 +屯 +山 +屹 +屿 +岀 +岁 +岂 +岌 +岐 +岑 +岔 +岖 +岗 +岘 +岙 +岚 +岛 +岡 +岩 +岫 +岬 +岭 +岱 +岳 +岷 +岸 +峇 +峋 +峒 +峙 +峡 +峤 +峥 +峦 +峨 +峪 +峭 +峯 +峰 +峴 +島 +峻 +峽 +崁 +崂 +崆 +崇 +崎 +崑 +崔 +崖 +崗 +崙 +崛 +崧 +崩 +崭 +崴 +崽 +嵇 +嵊 +嵋 +嵌 +嵐 +嵘 +嵩 +嵬 +嵯 +嶂 +嶄 +嶇 +嶋 +嶙 +嶺 +嶼 +嶽 +巅 +巍 +巒 +巔 +巖 +川 +州 +巡 +巢 +工 +左 +巧 +巨 +巩 +巫 +差 +己 +已 +巳 +巴 +巷 +巻 +巽 +巾 +巿 +币 +市 +布 +帅 +帆 +师 +希 +帐 +帑 +帕 +帖 +帘 +帚 +帛 +帜 +帝 +帥 +带 +帧 +師 +席 +帮 +帯 +帰 +帳 +帶 +帷 +常 +帼 +帽 +幀 +幂 +幄 +幅 +幌 +幔 +幕 +幟 +幡 +幢 +幣 +幫 +干 +平 +年 +并 +幸 +幹 +幺 +幻 +幼 +幽 +幾 +广 +庁 +広 +庄 +庆 +庇 +床 +序 +庐 +库 +应 +底 +庖 +店 +庙 +庚 +府 +庞 +废 +庠 +度 +座 +庫 +庭 +庵 +庶 +康 +庸 +庹 +庾 +廁 +廂 +廃 +廈 +廉 +廊 +廓 +廖 +廚 +廝 +廟 +廠 +廢 +廣 +廬 +廳 +延 +廷 +建 +廿 +开 +弁 +异 +弃 +弄 +弈 +弊 +弋 +式 +弑 +弒 +弓 +弔 +引 +弗 +弘 +弛 +弟 +张 +弥 +弦 +弧 +弩 +弭 +弯 +弱 +張 +強 +弹 +强 +弼 +弾 +彅 +彆 +彈 +彌 +彎 +归 +当 +录 +彗 +彙 +彝 +形 +彤 +彥 +彦 +彧 +彩 +彪 +彫 +彬 +彭 +彰 +影 +彷 +役 +彻 +彼 +彿 +往 +征 +径 +待 +徇 +很 +徉 +徊 +律 +後 +徐 +徑 +徒 +従 +徕 +得 +徘 +徙 +徜 +從 +徠 +御 +徨 +復 +循 +徬 +微 +徳 +徴 +徵 +德 +徹 +徼 +徽 +心 +必 +忆 +忌 +忍 +忏 +忐 +忑 +忒 +忖 +志 +忘 +忙 +応 +忠 +忡 +忤 +忧 +忪 +快 +忱 +念 +忻 +忽 +忿 +怀 +态 +怂 +怅 +怆 +怎 +怏 +怒 +怔 +怕 +怖 +怙 +怜 +思 +怠 +怡 +急 +怦 +性 +怨 +怪 +怯 +怵 +总 +怼 +恁 +恃 +恆 +恋 +恍 +恐 +恒 +恕 +恙 +恚 +恢 +恣 +恤 +恥 +恨 +恩 +恪 +恫 +恬 +恭 +息 +恰 +恳 +恵 +恶 +恸 +恺 +恻 +恼 +恿 +悄 +悅 +悉 +悌 +悍 +悔 +悖 +悚 +悟 +悠 +患 +悦 +您 +悩 +悪 +悬 +悯 +悱 +悲 +悴 +悵 +悶 +悸 +悻 +悼 +悽 +情 +惆 +惇 +惊 +惋 +惑 +惕 +惘 +惚 +惜 +惟 +惠 +惡 +惦 +惧 +惨 +惩 +惫 +惬 +惭 +惮 +惯 +惰 +惱 +想 +惴 +惶 +惹 +惺 +愁 +愆 +愈 +愉 +愍 +意 +愕 +愚 +愛 +愜 +感 +愣 +愤 +愧 +愫 +愷 +愿 +慄 +慈 +態 +慌 +慎 +慑 +慕 +慘 +慚 +慟 +慢 +慣 +慧 +慨 +慫 +慮 +慰 +慳 +慵 +慶 +慷 +慾 +憂 +憊 +憋 +憎 +憐 +憑 +憔 +憚 +憤 +憧 +憨 +憩 +憫 +憬 +憲 +憶 +憾 +懂 +懇 +懈 +應 +懊 +懋 +懑 +懒 +懦 +懲 +懵 +懶 +懷 +懸 +懺 +懼 +懾 +懿 +戀 +戈 +戊 +戌 +戍 +戎 +戏 +成 +我 +戒 +戕 +或 +战 +戚 +戛 +戟 +戡 +戦 +截 +戬 +戮 +戰 +戲 +戳 +戴 +戶 +户 +戸 +戻 +戾 +房 +所 +扁 +扇 +扈 +扉 +手 +才 +扎 +扑 +扒 +打 +扔 +払 +托 +扛 +扣 +扦 +执 +扩 +扪 +扫 +扬 +扭 +扮 +扯 +扰 +扱 +扳 +扶 +批 +扼 +找 +承 +技 +抄 +抉 +把 +抑 +抒 +抓 +投 +抖 +抗 +折 +抚 +抛 +抜 +択 +抟 +抠 +抡 +抢 +护 +报 +抨 +披 +抬 +抱 +抵 +抹 +押 +抽 +抿 +拂 +拄 +担 +拆 +拇 +拈 +拉 +拋 +拌 +拍 +拎 +拐 +拒 +拓 +拔 +拖 +拗 +拘 +拙 +拚 +招 +拜 +拟 +拡 +拢 +拣 +拥 +拦 +拧 +拨 +择 +括 +拭 +拮 +拯 +拱 +拳 +拴 +拷 +拼 +拽 +拾 +拿 +持 +挂 +指 +挈 +按 +挎 +挑 +挖 +挙 +挚 +挛 +挝 +挞 +挟 +挠 +挡 +挣 +挤 +挥 +挨 +挪 +挫 +振 +挲 +挹 +挺 +挽 +挾 +捂 +捅 +捆 +捉 +捋 +捌 +捍 +捎 +捏 +捐 +捕 +捞 +损 +捡 +换 +捣 +捧 +捨 +捩 +据 +捱 +捲 +捶 +捷 +捺 +捻 +掀 +掂 +掃 +掇 +授 +掉 +掌 +掏 +掐 +排 +掖 +掘 +掙 +掛 +掠 +採 +探 +掣 +接 +控 +推 +掩 +措 +掬 +掰 +掲 +掳 +掴 +掷 +掸 +掺 +揀 +揃 +揄 +揆 +揉 +揍 +描 +提 +插 +揖 +揚 +換 +握 +揣 +揩 +揪 +揭 +揮 +援 +揶 +揸 +揹 +揽 +搀 +搁 +搂 +搅 +損 +搏 +搐 +搓 +搔 +搖 +搗 +搜 +搞 +搡 +搪 +搬 +搭 +搵 +搶 +携 +搽 +摀 +摁 +摄 +摆 +摇 +摈 +摊 +摒 +摔 +摘 +摞 +摟 +摧 +摩 +摯 +摳 +摸 +摹 +摺 +摻 +撂 +撃 +撅 +撇 +撈 +撐 +撑 +撒 +撓 +撕 +撚 +撞 +撤 +撥 +撩 +撫 +撬 +播 +撮 +撰 +撲 +撵 +撷 +撸 +撻 +撼 +撿 +擀 +擁 +擂 +擄 +擅 +擇 +擊 +擋 +操 +擎 +擒 +擔 +擘 +據 +擞 +擠 +擡 +擢 +擦 +擬 +擰 +擱 +擲 +擴 +擷 +擺 +擼 +擾 +攀 +攏 +攒 +攔 +攘 +攙 +攜 +攝 +攞 +攢 +攣 +攤 +攥 +攪 +攫 +攬 +支 +收 +攸 +改 +攻 +放 +政 +故 +效 +敌 +敍 +敎 +敏 +救 +敕 +敖 +敗 +敘 +教 +敛 +敝 +敞 +敢 +散 +敦 +敬 +数 +敲 +整 +敵 +敷 +數 +斂 +斃 +文 +斋 +斌 +斎 +斐 +斑 +斓 +斗 +料 +斛 +斜 +斟 +斡 +斤 +斥 +斧 +斩 +斫 +斬 +断 +斯 +新 +斷 +方 +於 +施 +旁 +旃 +旅 +旋 +旌 +旎 +族 +旖 +旗 +无 +既 +日 +旦 +旧 +旨 +早 +旬 +旭 +旮 +旱 +时 +旷 +旺 +旻 +昀 +昂 +昆 +昇 +昉 +昊 +昌 +明 +昏 +易 +昔 +昕 +昙 +星 +映 +春 +昧 +昨 +昭 +是 +昱 +昴 +昵 +昶 +昼 +显 +晁 +時 +晃 +晉 +晋 +晌 +晏 +晒 +晓 +晔 +晕 +晖 +晗 +晚 +晝 +晞 +晟 +晤 +晦 +晨 +晩 +普 +景 +晰 +晴 +晶 +晷 +智 +晾 +暂 +暄 +暇 +暈 +暉 +暌 +暐 +暑 +暖 +暗 +暝 +暢 +暧 +暨 +暫 +暮 +暱 +暴 +暸 +暹 +曄 +曆 +曇 +曉 +曖 +曙 +曜 +曝 +曠 +曦 +曬 +曰 +曲 +曳 +更 +書 +曹 +曼 +曾 +替 +最 +會 +月 +有 +朋 +服 +朐 +朔 +朕 +朗 +望 +朝 +期 +朦 +朧 +木 +未 +末 +本 +札 +朮 +术 +朱 +朴 +朵 +机 +朽 +杀 +杂 +权 +杆 +杈 +杉 +李 +杏 +材 +村 +杓 +杖 +杜 +杞 +束 +杠 +条 +来 +杨 +杭 +杯 +杰 +東 +杳 +杵 +杷 +杼 +松 +板 +极 +构 +枇 +枉 +枋 +析 +枕 +林 +枚 +果 +枝 +枢 +枣 +枪 +枫 +枭 +枯 +枰 +枱 +枳 +架 +枷 +枸 +柄 +柏 +某 +柑 +柒 +染 +柔 +柘 +柚 +柜 +柞 +柠 +柢 +查 +柩 +柬 +柯 +柱 +柳 +柴 +柵 +査 +柿 +栀 +栃 +栄 +栅 +标 +栈 +栉 +栋 +栎 +栏 +树 +栓 +栖 +栗 +校 +栩 +株 +样 +核 +根 +格 +栽 +栾 +桀 +桁 +桂 +桃 +桅 +框 +案 +桉 +桌 +桎 +桐 +桑 +桓 +桔 +桜 +桠 +桡 +桢 +档 +桥 +桦 +桧 +桨 +桩 +桶 +桿 +梁 +梅 +梆 +梏 +梓 +梗 +條 +梟 +梢 +梦 +梧 +梨 +梭 +梯 +械 +梳 +梵 +梶 +检 +棂 +棄 +棉 +棋 +棍 +棒 +棕 +棗 +棘 +棚 +棟 +棠 +棣 +棧 +森 +棱 +棲 +棵 +棹 +棺 +椁 +椅 +椋 +植 +椎 +椒 +検 +椪 +椭 +椰 +椹 +椽 +椿 +楂 +楊 +楓 +楔 +楚 +楝 +楞 +楠 +楣 +楨 +楫 +業 +楮 +極 +楷 +楸 +楹 +楼 +楽 +概 +榄 +榆 +榈 +榉 +榔 +榕 +榖 +榛 +榜 +榨 +榫 +榭 +榮 +榱 +榴 +榷 +榻 +槁 +槃 +構 +槌 +槍 +槎 +槐 +槓 +様 +槛 +槟 +槤 +槭 +槲 +槳 +槻 +槽 +槿 +樁 +樂 +樊 +樑 +樓 +標 +樞 +樟 +模 +樣 +権 +横 +樫 +樯 +樱 +樵 +樸 +樹 +樺 +樽 +樾 +橄 +橇 +橋 +橐 +橘 +橙 +機 +橡 +橢 +橫 +橱 +橹 +橼 +檀 +檄 +檎 +檐 +檔 +檗 +檜 +檢 +檬 +檯 +檳 +檸 +檻 +櫃 +櫚 +櫛 +櫥 +櫸 +櫻 +欄 +權 +欒 +欖 +欠 +次 +欢 +欣 +欧 +欲 +欸 +欺 +欽 +款 +歆 +歇 +歉 +歌 +歎 +歐 +歓 +歙 +歛 +歡 +止 +正 +此 +步 +武 +歧 +歩 +歪 +歯 +歲 +歳 +歴 +歷 +歸 +歹 +死 +歼 +殁 +殃 +殆 +殇 +殉 +殊 +残 +殒 +殓 +殖 +殘 +殞 +殡 +殤 +殭 +殯 +殲 +殴 +段 +殷 +殺 +殼 +殿 +毀 +毁 +毂 +毅 +毆 +毋 +母 +毎 +每 +毒 +毓 +比 +毕 +毗 +毘 +毙 +毛 +毡 +毫 +毯 +毽 +氈 +氏 +氐 +民 +氓 +气 +氖 +気 +氙 +氛 +氟 +氡 +氢 +氣 +氤 +氦 +氧 +氨 +氪 +氫 +氮 +氯 +氰 +氲 +水 +氷 +永 +氹 +氾 +汀 +汁 +求 +汆 +汇 +汉 +汎 +汐 +汕 +汗 +汙 +汛 +汝 +汞 +江 +池 +污 +汤 +汨 +汩 +汪 +汰 +汲 +汴 +汶 +汹 +決 +汽 +汾 +沁 +沂 +沃 +沅 +沈 +沉 +沌 +沏 +沐 +沒 +沓 +沖 +沙 +沛 +沟 +没 +沢 +沣 +沥 +沦 +沧 +沪 +沫 +沭 +沮 +沱 +河 +沸 +油 +治 +沼 +沽 +沾 +沿 +況 +泄 +泉 +泊 +泌 +泓 +法 +泗 +泛 +泞 +泠 +泡 +波 +泣 +泥 +注 +泪 +泫 +泮 +泯 +泰 +泱 +泳 +泵 +泷 +泸 +泻 +泼 +泽 +泾 +洁 +洄 +洋 +洒 +洗 +洙 +洛 +洞 +津 +洩 +洪 +洮 +洱 +洲 +洵 +洶 +洸 +洹 +活 +洼 +洽 +派 +流 +浃 +浄 +浅 +浆 +浇 +浊 +测 +济 +浏 +浑 +浒 +浓 +浔 +浙 +浚 +浜 +浣 +浦 +浩 +浪 +浬 +浮 +浯 +浴 +海 +浸 +涂 +涅 +涇 +消 +涉 +涌 +涎 +涓 +涔 +涕 +涙 +涛 +涝 +涞 +涟 +涠 +涡 +涣 +涤 +润 +涧 +涨 +涩 +涪 +涮 +涯 +液 +涵 +涸 +涼 +涿 +淀 +淄 +淅 +淆 +淇 +淋 +淌 +淑 +淒 +淖 +淘 +淙 +淚 +淞 +淡 +淤 +淦 +淨 +淩 +淪 +淫 +淬 +淮 +深 +淳 +淵 +混 +淹 +淺 +添 +淼 +清 +済 +渉 +渊 +渋 +渍 +渎 +渐 +渔 +渗 +渙 +渚 +減 +渝 +渠 +渡 +渣 +渤 +渥 +渦 +温 +測 +渭 +港 +渲 +渴 +游 +渺 +渾 +湃 +湄 +湊 +湍 +湖 +湘 +湛 +湟 +湧 +湫 +湮 +湯 +湳 +湾 +湿 +満 +溃 +溅 +溉 +溏 +源 +準 +溜 +溝 +溟 +溢 +溥 +溧 +溪 +溫 +溯 +溱 +溴 +溶 +溺 +溼 +滁 +滂 +滄 +滅 +滇 +滋 +滌 +滑 +滓 +滔 +滕 +滙 +滚 +滝 +滞 +滟 +满 +滢 +滤 +滥 +滦 +滨 +滩 +滬 +滯 +滲 +滴 +滷 +滸 +滾 +滿 +漁 +漂 +漆 +漉 +漏 +漓 +演 +漕 +漠 +漢 +漣 +漩 +漪 +漫 +漬 +漯 +漱 +漲 +漳 +漸 +漾 +漿 +潆 +潇 +潋 +潍 +潑 +潔 +潘 +潛 +潜 +潞 +潟 +潢 +潤 +潦 +潧 +潭 +潮 +潰 +潴 +潸 +潺 +潼 +澀 +澄 +澆 +澈 +澍 +澎 +澗 +澜 +澡 +澤 +澧 +澱 +澳 +澹 +激 +濁 +濂 +濃 +濑 +濒 +濕 +濘 +濛 +濟 +濠 +濡 +濤 +濫 +濬 +濮 +濯 +濱 +濺 +濾 +瀅 +瀆 +瀉 +瀋 +瀏 +瀑 +瀕 +瀘 +瀚 +瀛 +瀝 +瀞 +瀟 +瀧 +瀨 +瀬 +瀰 +瀾 +灌 +灏 +灑 +灘 +灝 +灞 +灣 +火 +灬 +灭 +灯 +灰 +灵 +灶 +灸 +灼 +災 +灾 +灿 +炀 +炁 +炅 +炉 +炊 +炎 +炒 +炔 +炕 +炖 +炙 +炜 +炫 +炬 +炭 +炮 +炯 +炳 +炷 +炸 +点 +為 +炼 +炽 +烁 +烂 +烃 +烈 +烊 +烏 +烘 +烙 +烛 +烟 +烤 +烦 +烧 +烨 +烩 +烫 +烬 +热 +烯 +烷 +烹 +烽 +焉 +焊 +焕 +焖 +焗 +焘 +焙 +焚 +焜 +無 +焦 +焯 +焰 +焱 +然 +焼 +煅 +煉 +煊 +煌 +煎 +煒 +煖 +煙 +煜 +煞 +煤 +煥 +煦 +照 +煨 +煩 +煮 +煲 +煸 +煽 +熄 +熊 +熏 +熒 +熔 +熙 +熟 +熠 +熨 +熬 +熱 +熵 +熹 +熾 +燁 +燃 +燄 +燈 +燉 +燊 +燎 +燒 +燔 +燕 +燙 +燜 +營 +燥 +燦 +燧 +燭 +燮 +燴 +燻 +燼 +燿 +爆 +爍 +爐 +爛 +爪 +爬 +爭 +爰 +爱 +爲 +爵 +父 +爷 +爸 +爹 +爺 +爻 +爽 +爾 +牆 +片 +版 +牌 +牍 +牒 +牙 +牛 +牝 +牟 +牠 +牡 +牢 +牦 +牧 +物 +牯 +牲 +牴 +牵 +特 +牺 +牽 +犀 +犁 +犄 +犊 +犍 +犒 +犢 +犧 +犬 +犯 +状 +犷 +犸 +犹 +狀 +狂 +狄 +狈 +狎 +狐 +狒 +狗 +狙 +狞 +狠 +狡 +狩 +独 +狭 +狮 +狰 +狱 +狸 +狹 +狼 +狽 +猎 +猕 +猖 +猗 +猙 +猛 +猜 +猝 +猥 +猩 +猪 +猫 +猬 +献 +猴 +猶 +猷 +猾 +猿 +獄 +獅 +獎 +獐 +獒 +獗 +獠 +獣 +獨 +獭 +獰 +獲 +獵 +獷 +獸 +獺 +獻 +獼 +獾 +玄 +率 +玉 +王 +玑 +玖 +玛 +玟 +玠 +玥 +玩 +玫 +玮 +环 +现 +玲 +玳 +玷 +玺 +玻 +珀 +珂 +珅 +珈 +珉 +珊 +珍 +珏 +珐 +珑 +珙 +珞 +珠 +珣 +珥 +珩 +珪 +班 +珮 +珲 +珺 +現 +球 +琅 +理 +琇 +琉 +琊 +琍 +琏 +琐 +琛 +琢 +琥 +琦 +琨 +琪 +琬 +琮 +琰 +琲 +琳 +琴 +琵 +琶 +琺 +琼 +瑀 +瑁 +瑄 +瑋 +瑕 +瑗 +瑙 +瑚 +瑛 +瑜 +瑞 +瑟 +瑠 +瑣 +瑤 +瑩 +瑪 +瑯 +瑰 +瑶 +瑾 +璀 +璁 +璃 +璇 +璉 +璋 +璎 +璐 +璜 +璞 +璟 +璧 +璨 +環 +璽 +璿 +瓊 +瓏 +瓒 +瓜 +瓢 +瓣 +瓤 +瓦 +瓮 +瓯 +瓴 +瓶 +瓷 +甄 +甌 +甕 +甘 +甙 +甚 +甜 +生 +產 +産 +甥 +甦 +用 +甩 +甫 +甬 +甭 +甯 +田 +由 +甲 +申 +电 +男 +甸 +町 +画 +甾 +畀 +畅 +界 +畏 +畑 +畔 +留 +畜 +畝 +畢 +略 +畦 +番 +畫 +異 +畲 +畳 +畴 +當 +畸 +畹 +畿 +疆 +疇 +疊 +疏 +疑 +疔 +疖 +疗 +疙 +疚 +疝 +疟 +疡 +疣 +疤 +疥 +疫 +疮 +疯 +疱 +疲 +疳 +疵 +疸 +疹 +疼 +疽 +疾 +痂 +病 +症 +痈 +痉 +痊 +痍 +痒 +痔 +痕 +痘 +痙 +痛 +痞 +痠 +痢 +痣 +痤 +痧 +痨 +痪 +痫 +痰 +痱 +痴 +痹 +痺 +痼 +痿 +瘀 +瘁 +瘋 +瘍 +瘓 +瘘 +瘙 +瘟 +瘠 +瘡 +瘢 +瘤 +瘦 +瘧 +瘩 +瘪 +瘫 +瘴 +瘸 +瘾 +療 +癇 +癌 +癒 +癖 +癜 +癞 +癡 +癢 +癣 +癥 +癫 +癬 +癮 +癱 +癲 +癸 +発 +登 +發 +白 +百 +皂 +的 +皆 +皇 +皈 +皋 +皎 +皑 +皓 +皖 +皙 +皚 +皮 +皰 +皱 +皴 +皺 +皿 +盂 +盃 +盅 +盆 +盈 +益 +盎 +盏 +盐 +监 +盒 +盔 +盖 +盗 +盘 +盛 +盜 +盞 +盟 +盡 +監 +盤 +盥 +盧 +盪 +目 +盯 +盱 +盲 +直 +相 +盹 +盼 +盾 +省 +眈 +眉 +看 +県 +眙 +眞 +真 +眠 +眦 +眨 +眩 +眯 +眶 +眷 +眸 +眺 +眼 +眾 +着 +睁 +睇 +睏 +睐 +睑 +睛 +睜 +睞 +睡 +睢 +督 +睥 +睦 +睨 +睪 +睫 +睬 +睹 +睽 +睾 +睿 +瞄 +瞅 +瞇 +瞋 +瞌 +瞎 +瞑 +瞒 +瞓 +瞞 +瞟 +瞠 +瞥 +瞧 +瞩 +瞪 +瞬 +瞭 +瞰 +瞳 +瞻 +瞼 +瞿 +矇 +矍 +矗 +矚 +矛 +矜 +矢 +矣 +知 +矩 +矫 +短 +矮 +矯 +石 +矶 +矽 +矾 +矿 +码 +砂 +砌 +砍 +砒 +研 +砖 +砗 +砚 +砝 +砣 +砥 +砧 +砭 +砰 +砲 +破 +砷 +砸 +砺 +砼 +砾 +础 +硅 +硐 +硒 +硕 +硝 +硫 +硬 +确 +硯 +硼 +碁 +碇 +碉 +碌 +碍 +碎 +碑 +碓 +碗 +碘 +碚 +碛 +碟 +碣 +碧 +碩 +碰 +碱 +碳 +碴 +確 +碼 +碾 +磁 +磅 +磊 +磋 +磐 +磕 +磚 +磡 +磨 +磬 +磯 +磲 +磷 +磺 +礁 +礎 +礙 +礡 +礦 +礪 +礫 +礴 +示 +礼 +社 +祀 +祁 +祂 +祇 +祈 +祉 +祎 +祐 +祕 +祖 +祗 +祚 +祛 +祜 +祝 +神 +祟 +祠 +祢 +祥 +票 +祭 +祯 +祷 +祸 +祺 +祿 +禀 +禁 +禄 +禅 +禍 +禎 +福 +禛 +禦 +禧 +禪 +禮 +禱 +禹 +禺 +离 +禽 +禾 +禿 +秀 +私 +秃 +秆 +秉 +秋 +种 +科 +秒 +秘 +租 +秣 +秤 +秦 +秧 +秩 +秭 +积 +称 +秸 +移 +秽 +稀 +稅 +程 +稍 +税 +稔 +稗 +稚 +稜 +稞 +稟 +稠 +稣 +種 +稱 +稲 +稳 +稷 +稹 +稻 +稼 +稽 +稿 +穀 +穂 +穆 +穌 +積 +穎 +穗 +穢 +穩 +穫 +穴 +究 +穷 +穹 +空 +穿 +突 +窃 +窄 +窈 +窍 +窑 +窒 +窓 +窕 +窖 +窗 +窘 +窜 +窝 +窟 +窠 +窥 +窦 +窨 +窩 +窪 +窮 +窯 +窺 +窿 +竄 +竅 +竇 +竊 +立 +竖 +站 +竜 +竞 +竟 +章 +竣 +童 +竭 +端 +競 +竹 +竺 +竽 +竿 +笃 +笆 +笈 +笋 +笏 +笑 +笔 +笙 +笛 +笞 +笠 +符 +笨 +第 +笹 +笺 +笼 +筆 +等 +筊 +筋 +筍 +筏 +筐 +筑 +筒 +答 +策 +筛 +筝 +筠 +筱 +筲 +筵 +筷 +筹 +签 +简 +箇 +箋 +箍 +箏 +箐 +箔 +箕 +算 +箝 +管 +箩 +箫 +箭 +箱 +箴 +箸 +節 +篁 +範 +篆 +篇 +築 +篑 +篓 +篙 +篝 +篠 +篡 +篤 +篩 +篪 +篮 +篱 +篷 +簇 +簌 +簍 +簡 +簦 +簧 +簪 +簫 +簷 +簸 +簽 +簾 +簿 +籁 +籃 +籌 +籍 +籐 +籟 +籠 +籤 +籬 +籮 +籲 +米 +类 +籼 +籽 +粄 +粉 +粑 +粒 +粕 +粗 +粘 +粟 +粤 +粥 +粧 +粪 +粮 +粱 +粲 +粳 +粵 +粹 +粼 +粽 +精 +粿 +糅 +糊 +糍 +糕 +糖 +糗 +糙 +糜 +糞 +糟 +糠 +糧 +糬 +糯 +糰 +糸 +系 +糾 +紀 +紂 +約 +紅 +紉 +紊 +紋 +納 +紐 +紓 +純 +紗 +紘 +紙 +級 +紛 +紜 +素 +紡 +索 +紧 +紫 +紮 +累 +細 +紳 +紹 +紺 +終 +絃 +組 +絆 +経 +結 +絕 +絞 +絡 +絢 +給 +絨 +絮 +統 +絲 +絳 +絵 +絶 +絹 +綁 +綏 +綑 +經 +継 +続 +綜 +綠 +綢 +綦 +綫 +綬 +維 +綱 +網 +綴 +綵 +綸 +綺 +綻 +綽 +綾 +綿 +緊 +緋 +総 +緑 +緒 +緘 +線 +緝 +緞 +締 +緣 +編 +緩 +緬 +緯 +練 +緹 +緻 +縁 +縄 +縈 +縛 +縝 +縣 +縫 +縮 +縱 +縴 +縷 +總 +績 +繁 +繃 +繆 +繇 +繋 +織 +繕 +繚 +繞 +繡 +繩 +繪 +繫 +繭 +繳 +繹 +繼 +繽 +纂 +續 +纍 +纏 +纓 +纔 +纖 +纜 +纠 +红 +纣 +纤 +约 +级 +纨 +纪 +纫 +纬 +纭 +纯 +纰 +纱 +纲 +纳 +纵 +纶 +纷 +纸 +纹 +纺 +纽 +纾 +线 +绀 +练 +组 +绅 +细 +织 +终 +绊 +绍 +绎 +经 +绑 +绒 +结 +绔 +绕 +绘 +给 +绚 +绛 +络 +绝 +绞 +统 +绡 +绢 +绣 +绥 +绦 +继 +绩 +绪 +绫 +续 +绮 +绯 +绰 +绳 +维 +绵 +绶 +绷 +绸 +绻 +综 +绽 +绾 +绿 +缀 +缄 +缅 +缆 +缇 +缈 +缉 +缎 +缓 +缔 +缕 +编 +缘 +缙 +缚 +缜 +缝 +缠 +缢 +缤 +缥 +缨 +缩 +缪 +缭 +缮 +缰 +缱 +缴 +缸 +缺 +缽 +罂 +罄 +罌 +罐 +网 +罔 +罕 +罗 +罚 +罡 +罢 +罩 +罪 +置 +罰 +署 +罵 +罷 +罹 +羁 +羅 +羈 +羊 +羌 +美 +羔 +羚 +羞 +羟 +羡 +羣 +群 +羥 +羧 +羨 +義 +羯 +羲 +羸 +羹 +羽 +羿 +翁 +翅 +翊 +翌 +翎 +習 +翔 +翘 +翟 +翠 +翡 +翦 +翩 +翰 +翱 +翳 +翹 +翻 +翼 +耀 +老 +考 +耄 +者 +耆 +耋 +而 +耍 +耐 +耒 +耕 +耗 +耘 +耙 +耦 +耨 +耳 +耶 +耷 +耸 +耻 +耽 +耿 +聂 +聆 +聊 +聋 +职 +聒 +联 +聖 +聘 +聚 +聞 +聪 +聯 +聰 +聲 +聳 +聴 +聶 +職 +聽 +聾 +聿 +肃 +肄 +肅 +肆 +肇 +肉 +肋 +肌 +肏 +肓 +肖 +肘 +肚 +肛 +肝 +肠 +股 +肢 +肤 +肥 +肩 +肪 +肮 +肯 +肱 +育 +肴 +肺 +肽 +肾 +肿 +胀 +胁 +胃 +胄 +胆 +背 +胍 +胎 +胖 +胚 +胛 +胜 +胝 +胞 +胡 +胤 +胥 +胧 +胫 +胭 +胯 +胰 +胱 +胳 +胴 +胶 +胸 +胺 +能 +脂 +脅 +脆 +脇 +脈 +脉 +脊 +脍 +脏 +脐 +脑 +脓 +脖 +脘 +脚 +脛 +脣 +脩 +脫 +脯 +脱 +脲 +脳 +脸 +脹 +脾 +腆 +腈 +腊 +腋 +腌 +腎 +腐 +腑 +腓 +腔 +腕 +腥 +腦 +腩 +腫 +腭 +腮 +腰 +腱 +腳 +腴 +腸 +腹 +腺 +腻 +腼 +腾 +腿 +膀 +膈 +膊 +膏 +膑 +膘 +膚 +膛 +膜 +膝 +膠 +膦 +膨 +膩 +膳 +膺 +膻 +膽 +膾 +膿 +臀 +臂 +臃 +臆 +臉 +臊 +臍 +臓 +臘 +臟 +臣 +臥 +臧 +臨 +自 +臬 +臭 +至 +致 +臺 +臻 +臼 +臾 +舀 +舂 +舅 +舆 +與 +興 +舉 +舊 +舌 +舍 +舎 +舐 +舒 +舔 +舖 +舗 +舛 +舜 +舞 +舟 +航 +舫 +般 +舰 +舱 +舵 +舶 +舷 +舸 +船 +舺 +舾 +艇 +艋 +艘 +艙 +艦 +艮 +良 +艰 +艱 +色 +艳 +艷 +艹 +艺 +艾 +节 +芃 +芈 +芊 +芋 +芍 +芎 +芒 +芙 +芜 +芝 +芡 +芥 +芦 +芩 +芪 +芫 +芬 +芭 +芮 +芯 +花 +芳 +芷 +芸 +芹 +芻 +芽 +芾 +苁 +苄 +苇 +苋 +苍 +苏 +苑 +苒 +苓 +苔 +苕 +苗 +苛 +苜 +苞 +苟 +苡 +苣 +若 +苦 +苫 +苯 +英 +苷 +苹 +苻 +茁 +茂 +范 +茄 +茅 +茉 +茎 +茏 +茗 +茜 +茧 +茨 +茫 +茬 +茭 +茯 +茱 +茲 +茴 +茵 +茶 +茸 +茹 +茼 +荀 +荃 +荆 +草 +荊 +荏 +荐 +荒 +荔 +荖 +荘 +荚 +荞 +荟 +荠 +荡 +荣 +荤 +荥 +荧 +荨 +荪 +荫 +药 +荳 +荷 +荸 +荻 +荼 +荽 +莅 +莆 +莉 +莊 +莎 +莒 +莓 +莖 +莘 +莞 +莠 +莢 +莧 +莪 +莫 +莱 +莲 +莴 +获 +莹 +莺 +莽 +莿 +菀 +菁 +菅 +菇 +菈 +菊 +菌 +菏 +菓 +菖 +菘 +菜 +菟 +菠 +菡 +菩 +華 +菱 +菲 +菸 +菽 +萁 +萃 +萄 +萊 +萋 +萌 +萍 +萎 +萘 +萝 +萤 +营 +萦 +萧 +萨 +萩 +萬 +萱 +萵 +萸 +萼 +落 +葆 +葉 +著 +葚 +葛 +葡 +董 +葦 +葩 +葫 +葬 +葭 +葯 +葱 +葳 +葵 +葷 +葺 +蒂 +蒋 +蒐 +蒔 +蒙 +蒜 +蒞 +蒟 +蒡 +蒨 +蒲 +蒸 +蒹 +蒻 +蒼 +蒿 +蓁 +蓄 +蓆 +蓉 +蓋 +蓑 +蓓 +蓖 +蓝 +蓟 +蓦 +蓬 +蓮 +蓼 +蓿 +蔑 +蔓 +蔔 +蔗 +蔘 +蔚 +蔡 +蔣 +蔥 +蔫 +蔬 +蔭 +蔵 +蔷 +蔺 +蔻 +蔼 +蔽 +蕁 +蕃 +蕈 +蕉 +蕊 +蕎 +蕙 +蕤 +蕨 +蕩 +蕪 +蕭 +蕲 +蕴 +蕻 +蕾 +薄 +薅 +薇 +薈 +薊 +薏 +薑 +薔 +薙 +薛 +薦 +薨 +薩 +薪 +薬 +薯 +薰 +薹 +藉 +藍 +藏 +藐 +藓 +藕 +藜 +藝 +藤 +藥 +藩 +藹 +藻 +藿 +蘆 +蘇 +蘊 +蘋 +蘑 +蘚 +蘭 +蘸 +蘼 +蘿 +虎 +虏 +虐 +虑 +虔 +處 +虚 +虛 +虜 +虞 +號 +虢 +虧 +虫 +虬 +虱 +虹 +虻 +虽 +虾 +蚀 +蚁 +蚂 +蚊 +蚌 +蚓 +蚕 +蚜 +蚝 +蚣 +蚤 +蚩 +蚪 +蚯 +蚱 +蚵 +蛀 +蛆 +蛇 +蛊 +蛋 +蛎 +蛐 +蛔 +蛙 +蛛 +蛟 +蛤 +蛭 +蛮 +蛰 +蛳 +蛹 +蛻 +蛾 +蜀 +蜂 +蜃 +蜆 +蜇 +蜈 +蜊 +蜍 +蜒 +蜓 +蜕 +蜗 +蜘 +蜚 +蜜 +蜡 +蜢 +蜥 +蜱 +蜴 +蜷 +蜻 +蜿 +蝇 +蝈 +蝉 +蝌 +蝎 +蝕 +蝗 +蝙 +蝟 +蝠 +蝦 +蝨 +蝴 +蝶 +蝸 +蝼 +螂 +螃 +融 +螞 +螢 +螨 +螯 +螳 +螺 +蟀 +蟄 +蟆 +蟋 +蟎 +蟑 +蟒 +蟠 +蟬 +蟲 +蟹 +蟻 +蟾 +蠅 +蠍 +蠔 +蠕 +蠛 +蠟 +蠡 +蠢 +蠣 +蠱 +蠶 +蠹 +蠻 +血 +衄 +衅 +衆 +行 +衍 +術 +衔 +街 +衙 +衛 +衝 +衞 +衡 +衢 +衣 +补 +表 +衩 +衫 +衬 +衮 +衰 +衲 +衷 +衹 +衾 +衿 +袁 +袂 +袄 +袅 +袈 +袋 +袍 +袒 +袖 +袜 +袞 +袤 +袪 +被 +袭 +袱 +裁 +裂 +装 +裆 +裊 +裏 +裔 +裕 +裘 +裙 +補 +裝 +裟 +裡 +裤 +裨 +裱 +裳 +裴 +裸 +裹 +製 +裾 +褂 +複 +褐 +褒 +褓 +褔 +褚 +褥 +褪 +褫 +褲 +褶 +褻 +襁 +襄 +襟 +襠 +襪 +襬 +襯 +襲 +西 +要 +覃 +覆 +覇 +見 +規 +覓 +視 +覚 +覦 +覧 +親 +覬 +観 +覷 +覺 +覽 +觀 +见 +观 +规 +觅 +视 +览 +觉 +觊 +觎 +觐 +觑 +角 +觞 +解 +觥 +触 +觸 +言 +訂 +計 +訊 +討 +訓 +訕 +訖 +託 +記 +訛 +訝 +訟 +訣 +訥 +訪 +設 +許 +訳 +訴 +訶 +診 +註 +証 +詆 +詐 +詔 +評 +詛 +詞 +詠 +詡 +詢 +詣 +試 +詩 +詫 +詬 +詭 +詮 +詰 +話 +該 +詳 +詹 +詼 +誅 +誇 +誉 +誌 +認 +誓 +誕 +誘 +語 +誠 +誡 +誣 +誤 +誥 +誦 +誨 +說 +説 +読 +誰 +課 +誹 +誼 +調 +諄 +談 +請 +諏 +諒 +論 +諗 +諜 +諡 +諦 +諧 +諫 +諭 +諮 +諱 +諳 +諷 +諸 +諺 +諾 +謀 +謁 +謂 +謄 +謊 +謎 +謐 +謔 +謗 +謙 +講 +謝 +謠 +謨 +謬 +謹 +謾 +譁 +證 +譎 +譏 +識 +譙 +譚 +譜 +警 +譬 +譯 +議 +譲 +譴 +護 +譽 +讀 +變 +讓 +讚 +讞 +计 +订 +认 +讥 +讧 +讨 +让 +讪 +讫 +训 +议 +讯 +记 +讲 +讳 +讴 +讶 +讷 +许 +讹 +论 +讼 +讽 +设 +访 +诀 +证 +诃 +评 +诅 +识 +诈 +诉 +诊 +诋 +词 +诏 +译 +试 +诗 +诘 +诙 +诚 +诛 +话 +诞 +诟 +诠 +诡 +询 +诣 +诤 +该 +详 +诧 +诩 +诫 +诬 +语 +误 +诰 +诱 +诲 +说 +诵 +诶 +请 +诸 +诺 +读 +诽 +课 +诿 +谀 +谁 +调 +谄 +谅 +谆 +谈 +谊 +谋 +谌 +谍 +谎 +谏 +谐 +谑 +谒 +谓 +谔 +谕 +谗 +谘 +谙 +谚 +谛 +谜 +谟 +谢 +谣 +谤 +谥 +谦 +谧 +谨 +谩 +谪 +谬 +谭 +谯 +谱 +谲 +谴 +谶 +谷 +豁 +豆 +豇 +豈 +豉 +豊 +豌 +豎 +豐 +豔 +豚 +象 +豢 +豪 +豫 +豬 +豹 +豺 +貂 +貅 +貌 +貓 +貔 +貘 +貝 +貞 +負 +財 +貢 +貧 +貨 +販 +貪 +貫 +責 +貯 +貰 +貳 +貴 +貶 +買 +貸 +費 +貼 +貽 +貿 +賀 +賁 +賂 +賃 +賄 +資 +賈 +賊 +賑 +賓 +賜 +賞 +賠 +賡 +賢 +賣 +賤 +賦 +質 +賬 +賭 +賴 +賺 +購 +賽 +贅 +贈 +贊 +贍 +贏 +贓 +贖 +贛 +贝 +贞 +负 +贡 +财 +责 +贤 +败 +账 +货 +质 +贩 +贪 +贫 +贬 +购 +贮 +贯 +贰 +贱 +贲 +贴 +贵 +贷 +贸 +费 +贺 +贻 +贼 +贾 +贿 +赁 +赂 +赃 +资 +赅 +赈 +赊 +赋 +赌 +赎 +赏 +赐 +赓 +赔 +赖 +赘 +赚 +赛 +赝 +赞 +赠 +赡 +赢 +赣 +赤 +赦 +赧 +赫 +赭 +走 +赳 +赴 +赵 +赶 +起 +趁 +超 +越 +趋 +趕 +趙 +趟 +趣 +趨 +足 +趴 +趵 +趸 +趺 +趾 +跃 +跄 +跆 +跋 +跌 +跎 +跑 +跖 +跚 +跛 +距 +跟 +跡 +跤 +跨 +跩 +跪 +路 +跳 +践 +跷 +跹 +跺 +跻 +踉 +踊 +踌 +踏 +踐 +踝 +踞 +踟 +踢 +踩 +踪 +踮 +踱 +踴 +踵 +踹 +蹂 +蹄 +蹇 +蹈 +蹉 +蹊 +蹋 +蹑 +蹒 +蹙 +蹟 +蹣 +蹤 +蹦 +蹩 +蹬 +蹭 +蹲 +蹴 +蹶 +蹺 +蹼 +蹿 +躁 +躇 +躉 +躊 +躋 +躍 +躏 +躪 +身 +躬 +躯 +躲 +躺 +軀 +車 +軋 +軌 +軍 +軒 +軟 +転 +軸 +軼 +軽 +軾 +較 +載 +輒 +輓 +輔 +輕 +輛 +輝 +輟 +輩 +輪 +輯 +輸 +輻 +輾 +輿 +轄 +轅 +轆 +轉 +轍 +轎 +轟 +车 +轧 +轨 +轩 +转 +轭 +轮 +软 +轰 +轲 +轴 +轶 +轻 +轼 +载 +轿 +较 +辄 +辅 +辆 +辇 +辈 +辉 +辊 +辍 +辐 +辑 +输 +辕 +辖 +辗 +辘 +辙 +辛 +辜 +辞 +辟 +辣 +辦 +辨 +辩 +辫 +辭 +辮 +辯 +辰 +辱 +農 +边 +辺 +辻 +込 +辽 +达 +迁 +迂 +迄 +迅 +过 +迈 +迎 +运 +近 +返 +还 +这 +进 +远 +违 +连 +迟 +迢 +迤 +迥 +迦 +迩 +迪 +迫 +迭 +述 +迴 +迷 +迸 +迹 +迺 +追 +退 +送 +适 +逃 +逅 +逆 +选 +逊 +逍 +透 +逐 +递 +途 +逕 +逗 +這 +通 +逛 +逝 +逞 +速 +造 +逢 +連 +逮 +週 +進 +逵 +逶 +逸 +逻 +逼 +逾 +遁 +遂 +遅 +遇 +遊 +運 +遍 +過 +遏 +遐 +遑 +遒 +道 +達 +違 +遗 +遙 +遛 +遜 +遞 +遠 +遢 +遣 +遥 +遨 +適 +遭 +遮 +遲 +遴 +遵 +遶 +遷 +選 +遺 +遼 +遽 +避 +邀 +邁 +邂 +邃 +還 +邇 +邈 +邊 +邋 +邏 +邑 +邓 +邕 +邛 +邝 +邢 +那 +邦 +邨 +邪 +邬 +邮 +邯 +邰 +邱 +邳 +邵 +邸 +邹 +邺 +邻 +郁 +郅 +郊 +郎 +郑 +郜 +郝 +郡 +郢 +郤 +郦 +郧 +部 +郫 +郭 +郴 +郵 +郷 +郸 +都 +鄂 +鄉 +鄒 +鄔 +鄙 +鄞 +鄢 +鄧 +鄭 +鄰 +鄱 +鄲 +鄺 +酉 +酊 +酋 +酌 +配 +酐 +酒 +酗 +酚 +酝 +酢 +酣 +酥 +酩 +酪 +酬 +酮 +酯 +酰 +酱 +酵 +酶 +酷 +酸 +酿 +醃 +醇 +醉 +醋 +醍 +醐 +醒 +醚 +醛 +醜 +醞 +醣 +醪 +醫 +醬 +醮 +醯 +醴 +醺 +釀 +釁 +采 +釉 +释 +釋 +里 +重 +野 +量 +釐 +金 +釗 +釘 +釜 +針 +釣 +釦 +釧 +釵 +鈀 +鈉 +鈍 +鈎 +鈔 +鈕 +鈞 +鈣 +鈦 +鈪 +鈴 +鈺 +鈾 +鉀 +鉄 +鉅 +鉉 +鉑 +鉗 +鉚 +鉛 +鉤 +鉴 +鉻 +銀 +銃 +銅 +銑 +銓 +銖 +銘 +銜 +銬 +銭 +銮 +銳 +銷 +銹 +鋁 +鋅 +鋒 +鋤 +鋪 +鋰 +鋸 +鋼 +錄 +錐 +錘 +錚 +錠 +錢 +錦 +錨 +錫 +錮 +錯 +録 +錳 +錶 +鍊 +鍋 +鍍 +鍛 +鍥 +鍰 +鍵 +鍺 +鍾 +鎂 +鎊 +鎌 +鎏 +鎔 +鎖 +鎗 +鎚 +鎧 +鎬 +鎮 +鎳 +鏈 +鏖 +鏗 +鏘 +鏞 +鏟 +鏡 +鏢 +鏤 +鏽 +鐘 +鐮 +鐲 +鐳 +鐵 +鐸 +鐺 +鑄 +鑊 +鑑 +鑒 +鑣 +鑫 +鑰 +鑲 +鑼 +鑽 +鑾 +鑿 +针 +钉 +钊 +钎 +钏 +钒 +钓 +钗 +钙 +钛 +钜 +钝 +钞 +钟 +钠 +钡 +钢 +钣 +钤 +钥 +钦 +钧 +钨 +钩 +钮 +钯 +钰 +钱 +钳 +钴 +钵 +钺 +钻 +钼 +钾 +钿 +铀 +铁 +铂 +铃 +铄 +铅 +铆 +铉 +铎 +铐 +铛 +铜 +铝 +铠 +铡 +铢 +铣 +铤 +铨 +铩 +铬 +铭 +铮 +铰 +铲 +铵 +银 +铸 +铺 +链 +铿 +销 +锁 +锂 +锄 +锅 +锆 +锈 +锉 +锋 +锌 +锏 +锐 +锑 +错 +锚 +锟 +锡 +锢 +锣 +锤 +锥 +锦 +锭 +键 +锯 +锰 +锲 +锵 +锹 +锺 +锻 +镀 +镁 +镂 +镇 +镉 +镌 +镍 +镐 +镑 +镕 +镖 +镗 +镛 +镜 +镣 +镭 +镯 +镰 +镳 +镶 +長 +长 +門 +閃 +閉 +開 +閎 +閏 +閑 +閒 +間 +閔 +閘 +閡 +関 +閣 +閥 +閨 +閩 +閱 +閲 +閹 +閻 +閾 +闆 +闇 +闊 +闌 +闍 +闔 +闕 +闖 +闘 +關 +闡 +闢 +门 +闪 +闫 +闭 +问 +闯 +闰 +闲 +间 +闵 +闷 +闸 +闹 +闺 +闻 +闽 +闾 +阀 +阁 +阂 +阅 +阆 +阇 +阈 +阉 +阎 +阐 +阑 +阔 +阕 +阖 +阙 +阚 +阜 +队 +阡 +阪 +阮 +阱 +防 +阳 +阴 +阵 +阶 +阻 +阿 +陀 +陂 +附 +际 +陆 +陇 +陈 +陋 +陌 +降 +限 +陕 +陛 +陝 +陞 +陟 +陡 +院 +陣 +除 +陨 +险 +陪 +陰 +陲 +陳 +陵 +陶 +陷 +陸 +険 +陽 +隅 +隆 +隈 +隊 +隋 +隍 +階 +随 +隐 +隔 +隕 +隘 +隙 +際 +障 +隠 +隣 +隧 +隨 +險 +隱 +隴 +隶 +隸 +隻 +隼 +隽 +难 +雀 +雁 +雄 +雅 +集 +雇 +雉 +雋 +雌 +雍 +雎 +雏 +雑 +雒 +雕 +雖 +雙 +雛 +雜 +雞 +離 +難 +雨 +雪 +雯 +雰 +雲 +雳 +零 +雷 +雹 +電 +雾 +需 +霁 +霄 +霆 +震 +霈 +霉 +霊 +霍 +霎 +霏 +霑 +霓 +霖 +霜 +霞 +霧 +霭 +霰 +露 +霸 +霹 +霽 +霾 +靂 +靄 +靈 +青 +靓 +靖 +静 +靚 +靛 +靜 +非 +靠 +靡 +面 +靥 +靦 +革 +靳 +靴 +靶 +靼 +鞅 +鞋 +鞍 +鞏 +鞑 +鞘 +鞠 +鞣 +鞦 +鞭 +韆 +韋 +韌 +韓 +韜 +韦 +韧 +韩 +韬 +韭 +音 +韵 +韶 +韻 +響 +頁 +頂 +頃 +項 +順 +須 +頌 +預 +頑 +頒 +頓 +頗 +領 +頜 +頡 +頤 +頫 +頭 +頰 +頷 +頸 +頹 +頻 +頼 +顆 +題 +額 +顎 +顏 +顔 +願 +顛 +類 +顧 +顫 +顯 +顱 +顴 +页 +顶 +顷 +项 +顺 +须 +顼 +顽 +顾 +顿 +颁 +颂 +预 +颅 +领 +颇 +颈 +颉 +颊 +颌 +颍 +颐 +频 +颓 +颔 +颖 +颗 +题 +颚 +颛 +颜 +额 +颞 +颠 +颡 +颢 +颤 +颦 +颧 +風 +颯 +颱 +颳 +颶 +颼 +飄 +飆 +风 +飒 +飓 +飕 +飘 +飙 +飚 +飛 +飞 +食 +飢 +飨 +飩 +飪 +飯 +飲 +飼 +飽 +飾 +餃 +餅 +餉 +養 +餌 +餐 +餒 +餓 +餘 +餚 +餛 +餞 +餡 +館 +餮 +餵 +餾 +饅 +饈 +饋 +饌 +饍 +饑 +饒 +饕 +饗 +饞 +饥 +饨 +饪 +饬 +饭 +饮 +饯 +饰 +饱 +饲 +饴 +饵 +饶 +饷 +饺 +饼 +饽 +饿 +馀 +馁 +馄 +馅 +馆 +馈 +馋 +馍 +馏 +馒 +馔 +首 +馗 +香 +馥 +馨 +馬 +馭 +馮 +馳 +馴 +駁 +駄 +駅 +駆 +駐 +駒 +駕 +駛 +駝 +駭 +駱 +駿 +騁 +騎 +騏 +験 +騙 +騨 +騰 +騷 +驀 +驅 +驊 +驍 +驒 +驕 +驗 +驚 +驛 +驟 +驢 +驥 +马 +驭 +驮 +驯 +驰 +驱 +驳 +驴 +驶 +驷 +驸 +驹 +驻 +驼 +驾 +驿 +骁 +骂 +骄 +骅 +骆 +骇 +骈 +骊 +骋 +验 +骏 +骐 +骑 +骗 +骚 +骛 +骜 +骞 +骠 +骡 +骤 +骥 +骧 +骨 +骯 +骰 +骶 +骷 +骸 +骼 +髂 +髅 +髋 +髏 +髒 +髓 +體 +髖 +高 +髦 +髪 +髮 +髯 +髻 +鬃 +鬆 +鬍 +鬓 +鬚 +鬟 +鬢 +鬣 +鬥 +鬧 +鬱 +鬼 +魁 +魂 +魄 +魅 +魇 +魍 +魏 +魔 +魘 +魚 +魯 +魷 +鮑 +鮨 +鮪 +鮭 +鮮 +鯉 +鯊 +鯖 +鯛 +鯨 +鯰 +鯽 +鰍 +鰓 +鰭 +鰲 +鰻 +鰾 +鱈 +鱉 +鱔 +鱗 +鱷 +鱸 +鱼 +鱿 +鲁 +鲈 +鲍 +鲑 +鲛 +鲜 +鲟 +鲢 +鲤 +鲨 +鲫 +鲱 +鲲 +鲶 +鲷 +鲸 +鳃 +鳄 +鳅 +鳌 +鳍 +鳕 +鳖 +鳗 +鳝 +鳞 +鳥 +鳩 +鳳 +鳴 +鳶 +鴉 +鴕 +鴛 +鴦 +鴨 +鴻 +鴿 +鵑 +鵜 +鵝 +鵡 +鵬 +鵰 +鵲 +鶘 +鶩 +鶯 +鶴 +鷗 +鷲 +鷹 +鷺 +鸚 +鸞 +鸟 +鸠 +鸡 +鸢 +鸣 +鸥 +鸦 +鸨 +鸪 +鸭 +鸯 +鸳 +鸵 +鸽 +鸾 +鸿 +鹂 +鹃 +鹄 +鹅 +鹈 +鹉 +鹊 +鹌 +鹏 +鹑 +鹕 +鹘 +鹜 +鹞 +鹤 +鹦 +鹧 +鹫 +鹭 +鹰 +鹳 +鹵 +鹹 +鹼 +鹽 +鹿 +麂 +麋 +麒 +麓 +麗 +麝 +麟 +麥 +麦 +麩 +麴 +麵 +麸 +麺 +麻 +麼 +麽 +麾 +黃 +黄 +黍 +黎 +黏 +黑 +黒 +黔 +默 +黛 +黜 +黝 +點 +黠 +黨 +黯 +黴 +鼋 +鼎 +鼐 +鼓 +鼠 +鼬 +鼹 +鼻 +鼾 +齁 +齊 +齋 +齐 +齒 +齡 +齢 +齣 +齦 +齿 +龄 +龅 +龈 +龊 +龋 +龌 +龍 +龐 +龔 +龕 +龙 +龚 +龛 +龜 +龟 +︰ +︱ +︶ +︿ +﹁ +﹂ +﹍ +﹏ +﹐ +﹑ +﹒ +﹔ +﹕ +﹖ +﹗ +﹙ +﹚ +﹝ +﹞ +﹡ +﹣ +! +" +# +$ +% +& +' +( +) +* ++ +, +- +. +/ +0 +1 +2 +3 +4 +5 +6 +7 +8 +9 +: +; +< += +> +? +@ +[ +\ +] +^ +_ +` +a +b +c +d +e +f +g +h +i +j +k +l +m +n +o +p +q +r +s +t +u +v +w +x +y +z +{ +| +} +~ +。 +「 +」 +、 +・ +ッ +ー +イ +ク +シ +ス +ト +ノ +フ +ラ +ル +ン +゙ +゚ + ̄ +¥ +👍 +🔥 +😂 +😎 +... +yam +10 +2017 +12 +11 +2016 +20 +30 +15 +06 +lofter +##s +2015 +by +16 +14 +18 +13 +24 +17 +2014 +21 +##0 +22 +19 +25 +23 +com +100 +00 +05 +2013 +##a +03 +09 +08 +28 +##2 +50 +01 +04 +##1 +27 +02 +2012 +##3 +26 +##e +07 +##8 +##5 +##6 +##4 +##9 +##7 +29 +2011 +40 +##t +2010 +##o +##d +##i +2009 +##n +app +www +the +##m +31 +##c +##l +##y +##r +##g +2008 +60 +http +200 +qq +##p +80 +##f +google +pixnet +90 +cookies +tripadvisor +500 +##er +##k +35 +##h +facebook +2007 +2000 +70 +##b +of +##x +##u +45 +300 +iphone +32 +1000 +2006 +48 +ip +36 +in +38 +3d +##w +##ing +55 +ctrip +##on +##v +33 +##の +to +34 +400 +id +2005 +it +37 +windows +llc +top +99 +42 +39 +000 +led +at +##an +41 +51 +52 +46 +49 +43 +53 +44 +##z +android +58 +and +59 +2004 +56 +vr +##か +5000 +2003 +47 +blogthis +twitter +54 +##le +150 +ok +2018 +57 +75 +cn +no +ios +##in +##mm +##00 +800 +on +te +3000 +65 +2001 +360 +95 +ig +lv +120 +##ng +##を +##us +##に +pc +てす +── +600 +##te +85 +2002 +88 +##ed +html +ncc +wifi +email +64 +blog +is +##10 +##て +mail +online +##al +dvd +##ic +studio +##は +##℃ +##ia +##と +line +vip +72 +##q +98 +##ce +##en +for +##is +##ra +##es +##j +usb +net +cp +1999 +asia +4g +##cm +diy +new +3c +##お +ta +66 +language +vs +apple +tw +86 +web +##ne +ipad +62 +you +##re +101 +68 +##tion +ps +de +bt +pony +atm +##2017 +1998 +67 +##ch +ceo +##or +go +##na +av +pro +cafe +96 +pinterest +97 +63 +pixstyleme3c +##ta +more +said +##2016 +1997 +mp3 +700 +##ll +nba +jun +##20 +92 +tv +1995 +pm +61 +76 +nbsp +250 +##ie +linux +##ma +cd +110 +hd +##17 +78 +##ion +77 +6000 +am +##th +##st +94 +##se +##et +69 +180 +gdp +my +105 +81 +abc +89 +flash +79 +one +93 +1990 +1996 +##ck +gps +##も +##ly +web885 +106 +2020 +91 +##ge +4000 +1500 +xd +boss +isbn +1994 +org +##ry +me +love +##11 +0fork +73 +##12 +3g +##ter +##ar +71 +82 +##la +hotel +130 +1970 +pk +83 +87 +140 +ie +##os +##30 +##el +74 +##50 +seo +cpu +##ml +p2p +84 +may +##る +sun +tue +internet +cc +posted +youtube +##at +##ン +##man +ii +##ル +##15 +abs +nt +pdf +yahoo +ago +1980 +##it +news +mac +104 +##てす +##me +##り +java +1992 +spa +##de +##nt +hk +all +plus +la +1993 +##mb +##16 +##ve +west +##da +160 +air +##い +##ps +から +##to +1989 +logo +htc +php +https +fi +momo +##son +sat +##ke +##80 +ebd +suv +wi +day +apk +##88 +##um +mv +galaxy +wiki +or +brake +##ス +1200 +する +this +1991 +mon +##こ +❤2017 +po +##ない +javascript +life +home +june +##ss +system +900 +##ー +##0 +pp +1988 +world +fb +4k +br +##as +ic +ai +leonardo +safari +##60 +live +free +xx +wed +win7 +kiehl +##co +lg +o2o +##go +us +235 +1949 +mm +しい +vfm +kanye +##90 +##2015 +##id +jr +##ey +123 +rss +##sa +##ro +##am +##no +thu +fri +350 +##sh +##ki +103 +comments +name +##のて +##pe +##ine +max +1987 +8000 +uber +##mi +##ton +wordpress +office +1986 +1985 +##ment +107 +bd +win10 +##ld +##li +gmail +bb +dior +##rs +##ri +##rd +##ます +up +cad +##® +dr +して +read +##21 +をお +##io +##99 +url +1984 +pvc +paypal +show +policy +##40 +##ty +##18 +with +##★ +##01 +txt +102 +##ba +dna +from +post +mini +ar +taiwan +john +##ga +privacy +agoda +##13 +##ny +word +##24 +##22 +##by +##ur +##hz +1982 +##ang +265 +cookie +netscape +108 +##ka +##~ +##ad +house +share +note +ibm +code +hello +nike +sim +survey +##016 +1979 +1950 +wikia +##32 +##017 +5g +cbc +##tor +##kg +1983 +##rt +##14 +campaign +store +2500 +os +##ct +##ts +##° +170 +api +##ns +365 +excel +##な +##ao +##ら +##し +~~ +##nd +university +163 +には +518 +##70 +##ya +##il +##25 +pierre +ipo +0020 +897 +##23 +hotels +##ian +のお +125 +years +6606 +##ers +##26 +high +##day +time +##ay +bug +##line +##く +##す +##be +xp +talk2yam +yamservice +10000 +coco +##dy +sony +##ies +1978 +microsoft +david +people +##ha +1960 +instagram +intel +その +##ot +iso +1981 +##va +115 +##mo +##land +xxx +man +co +ltxsw +##ation +baby +220 +##pa +##ol +1945 +7000 +tag +450 +##ue +msn +##31 +oppo +##ト +##ca +control +##om +st +chrome +##ure +##ん +be +##き +lol +##19 +した +##bo +240 +lady +##100 +##way +##から +4600 +##ko +##do +##un +4s +corporation +168 +##ni +herme +##28 +cp +978 +##up +##06 +ui +##ds +ppt +admin +three +します +bbc +re +128 +##48 +ca +##015 +##35 +hp +##ee +tpp +##た +##ive +×× +root +##cc +##ました +##ble +##ity +adobe +park +114 +et +oled +city +##ex +##ler +##ap +china +##book +20000 +view +##ice +global +##km +your +hong +##mg +out +##ms +ng +ebay +##29 +menu +ubuntu +##cy +rom +##view +open +ktv +do +server +##lo +if +english +##ね +##5 +##oo +1600 +##02 +step1 +kong +club +135 +july +inc +1976 +mr +hi +##net +touch +##ls +##ii +michael +lcd +##05 +##33 +phone +james +step2 +1300 +ios9 +##box +dc +##2 +##ley +samsung +111 +280 +pokemon +css +##ent +##les +いいえ +##1 +s8 +atom +play +bmw +##said +sa +etf +ctrl +♥yoyo♥ +##55 +2025 +##2014 +##66 +adidas +amazon +1958 +##ber +##ner +visa +##77 +##der +1800 +connectivity +##hi +firefox +109 +118 +hr +so +style +mark +pop +ol +skip +1975 +as +##27 +##ir +##61 +190 +mba +##う +##ai +le +##ver +1900 +cafe2017 +lte +super +113 +129 +##ron +amd +like +##☆ +are +##ster +we +##sk +paul +data +international +##ft +longchamp +ssd +good +##ート +##ti +reply +##my +↓↓↓ +apr +star +##ker +source +136 +js +112 +get +force +photo +##one +126 +##2013 +##ow +link +bbs +1972 +goods +##lin +python +119 +##ip +game +##ics +##ません +blue +##● +520 +##45 +page +itunes +##03 +1955 +260 +1968 +gt +gif +618 +##ff +##47 +group +くたさい +about +bar +ganji +##nce +music +lee +not +1977 +1971 +1973 +##per +an +faq +comment +##って +days +##ock +116 +##bs +1974 +1969 +v1 +player +1956 +xbox +sql +fm +f1 +139 +##ah +210 +##lv +##mp +##000 +melody +1957 +##3 +550 +17life +199 +1966 +xml +market +##au +##71 +999 +##04 +what +gl +##95 +##age +tips +##68 +book +##ting +mysql +can +1959 +230 +##ung +wonderland +watch +10℃ +##ction +9000 +mar +mobile +1946 +1962 +article +##db +part +▲top +party +って +1967 +1964 +1948 +##07 +##ore +##op +この +dj +##78 +##38 +010 +main +225 +1965 +##ong +art +320 +ad +134 +020 +##73 +117 +pm2 +japan +228 +##08 +ts +1963 +##ica +der +sm +##36 +2019 +##wa +ct +##7 +##や +##64 +1937 +homemesh +search +##85 +##れは +##tv +##di +macbook +##9 +##くたさい +service +##♥ +type +った +750 +##ier +##si +##75 +##います +##ok +best +##ット +goris +lock +##った +cf +3m +big +##ut +ftp +carol +##vi +10 +1961 +happy +sd +##ac +122 +anti +pe +cnn +iii +1920 +138 +##ラ +1940 +esp +jan +tags +##98 +##51 +august +vol +##86 +154 +##™ +##fs +##れ +##sion +design +ac +##ム +press +jordan +ppp +that +key +check +##6 +##tt +##㎡ +1080p +##lt +power +##42 +1952 +##bc +vivi +##ック +he +133 +121 +jpg +##rry +201 +175 +3500 +1947 +nb +##ted +##rn +しています +1954 +usd +##t00 +master +##ンク +001 +model +##58 +al +##09 +1953 +##34 +ram +goo +ても +##ui +127 +1930 +red +##ary +rpg +item +##pm +##41 +270 +##za +project +##2012 +hot +td +blogabstract +##ger +##62 +650 +##44 +gr2 +##します +##m +black +electronic +nfc +year +asus +また +html5 +cindy +##hd +m3 +132 +esc +##od +booking +##53 +fed +tvb +##81 +##ina +mit +165 +##いる +chan +192 +distribution +next +になる +peter +bios +steam +cm +1941 +にも +pk10 +##ix +##65 +##91 +dec +nasa +##ana +icecat +00z +b1 +will +##46 +li +se +##ji +##み +##ard +oct +##ain +jp +##ze +##bi +cio +##56 +smart +h5 +##39 +##port +curve +vpn +##nm +##dia +utc +##あり +12345678910 +##52 +rmvb +chanel +a4 +miss +##and +##im +media +who +##63 +she +girl +5s +124 +vera +##して +class +vivo +king +##フ +##ei +national +ab +1951 +5cm +888 +145 +ipod +ap +1100 +5mm +211 +ms +2756 +##69 +mp4 +msci +##po +##89 +131 +mg +index +380 +##bit +##out +##zz +##97 +##67 +158 +apec +##8 +photoshop +opec +¥799 +ては +##96 +##tes +##ast +2g +○○ +##ール +¥2899 +##ling +##よ +##ory +1938 +##ical +kitty +content +##43 +step3 +##cn +win8 +155 +vc +1400 +iphone7 +robert +##した +tcl +137 +beauty +##87 +en +dollars +##ys +##oc +step +pay +yy +a1 +##2011 +##lly +##ks +##♪ +1939 +188 +download +1944 +sep +exe +ph +います +school +gb +center +pr +street +##board +uv +##37 +##lan +winrar +##que +##ua +##com +1942 +1936 +480 +gpu +##4 +ettoday +fu +tom +##54 +##ren +##via +149 +##72 +b2b +144 +##79 +##tch +rose +arm +mb +##49 +##ial +##nn +nvidia +step4 +mvp +00㎡ +york +156 +##イ +how +cpi +591 +2765 +gov +kg +joe +##xx +mandy +pa +##ser +copyright +fashion +1935 +don +##け +ecu +##ist +##art +erp +wap +have +##lm +talk +##ek +##ning +##if +ch +##ite +video +1943 +cs +san +iot +look +##84 +##2010 +##ku +october +##ux +trump +##hs +##ide +box +141 +first +##ins +april +##ight +##83 +185 +angel +protected +aa +151 +162 +x1 +m2 +##fe +##× +##ho +size +143 +min +ofo +fun +gomaji +ex +hdmi +food +dns +march +chris +kevin +##のか +##lla +##pp +##ec +ag +ems +6s +720p +##rm +##ham +off +##92 +asp +team +fandom +ed +299 +▌♥ +##ell +info +されています +##82 +sina +4066 +161 +##able +##ctor +330 +399 +315 +dll +rights +ltd +idc +jul +3kg +1927 +142 +ma +surface +##76 +##ク +~~~ +304 +mall +eps +146 +green +##59 +map +space +donald +v2 +sodu +##light +1931 +148 +1700 +まて +310 +reserved +htm +##han +##57 +2d +178 +mod +##ise +##tions +152 +ti +##shi +doc +1933 +icp +055 +wang +##ram +shopping +aug +##pi +##well +now +wam +b2 +からお +##hu +236 +1928 +##gb +266 +f2 +##93 +153 +mix +##ef +##uan +bwl +##plus +##res +core +##ess +tea +5℃ +hktvmall +nhk +##ate +list +##ese +301 +feb +4m +inn +ての +nov +159 +12345 +daniel +##ci +pass +##bet +##nk +coffee +202 +ssl +airbnb +##ute +fbi +woshipm +skype +ea +cg +sp +##fc +##www +yes +edge +alt +007 +##94 +fpga +##ght +##gs +iso9001 +さい +##ile +##wood +##uo +image +lin +icon +american +##em +1932 +set +says +##king +##tive +blogger +##74 +なと +256 +147 +##ox +##zy +##red +##ium +##lf +nokia +claire +##リ +##ding +november +lohas +##500 +##tic +##マ +##cs +##ある +##che +##ire +##gy +##ult +db +january +win +##カ +166 +road +ptt +##ま +##つ +198 +##fa +##mer +anna +pchome +はい +udn +ef +420 +##time +##tte +2030 +##ア +g20 +white +かかります +1929 +308 +garden +eleven +di +##おります +chen +309b +777 +172 +young +cosplay +ちてない +4500 +bat +##123 +##tra +##ては +kindle +npc +steve +etc +##ern +##| +call +xperia +ces +travel +sk +s7 +##ous +1934 +##int +みいたたけます +183 +edu +file +cho +qr +##car +##our +186 +##ant +##d +eric +1914 +rends +##jo +##する +mastercard +##2000 +kb +##min +290 +##ino +vista +##ris +##ud +jack +2400 +##set +169 +pos +1912 +##her +##ou +taipei +しく +205 +beta +##ませんか +232 +##fi +express +255 +body +##ill +aphojoy +user +december +meiki +##ick +tweet +richard +##av +##ᆫ +iphone6 +##dd +ちてすか +views +##mark +321 +pd +##00 +times +##▲ +level +##ash +10g +point +5l +##ome +208 +koreanmall +##ak +george +q2 +206 +wma +tcp +##200 +スタッフ +full +mlb +##lle +##watch +tm +run +179 +911 +smith +business +##und +1919 +color +##tal +222 +171 +##less +moon +4399 +##rl +update +pcb +shop +499 +157 +little +なし +end +##mhz +van +dsp +easy +660 +##house +##key +history +##o +oh +##001 +##hy +##web +oem +let +was +##2009 +##gg +review +##wan +182 +##°c +203 +uc +title +##val +united +233 +2021 +##ons +doi +trivago +overdope +sbs +##ance +##ち +grand +special +573032185 +imf +216 +wx17house +##so +##ーム +audi +##he +london +william +##rp +##ake +science +beach +cfa +amp +ps4 +880 +##800 +##link +##hp +crm +ferragamo +bell +make +##eng +195 +under +zh +photos +2300 +##style +##ント +via +176 +da +##gi +company +i7 +##ray +thomas +370 +ufo +i5 +##max +plc +ben +back +research +8g +173 +mike +##pc +##ッフ +september +189 +##ace +vps +february +167 +pantos +wp +lisa +1921 +★★ +jquery +night +long +offer +##berg +##news +1911 +##いて +ray +fks +wto +せます +over +164 +340 +##all +##rus +1924 +##888 +##works +blogtitle +loftpermalink +##→ +187 +martin +test +ling +km +##め +15000 +fda +v3 +##ja +##ロ +wedding +かある +outlet +family +##ea +をこ +##top +story +##ness +salvatore +##lu +204 +swift +215 +room +している +oracle +##ul +1925 +sam +b2c +week +pi +rock +##のは +##a +##けと +##ean +##300 +##gle +cctv +after +chinese +##back +powered +x2 +##tan +1918 +##nes +##イン +canon +only +181 +##zi +##las +say +##oe +184 +##sd +221 +##bot +##world +##zo +sky +made +top100 +just +1926 +pmi +802 +234 +gap +##vr +177 +les +174 +▲topoct +ball +vogue +vi +ing +ofweek +cos +##list +##ort +▲topmay +##なら +##lon +として +last +##tc +##of +##bus +##gen +real +eva +##コ +a3 +nas +##lie +##ria +##coin +##bt +▲topapr +his +212 +cat +nata +vive +health +⋯⋯ +drive +sir +▲topmar +du +cup +##カー +##ook +##よう +##sy +alex +msg +tour +しました +3ce +##word +193 +ebooks +r8 +block +318 +##より +2200 +nice +pvp +207 +months +1905 +rewards +##ther +1917 +0800 +##xi +##チ +##sc +micro +850 +gg +blogfp +op +1922 +daily +m1 +264 +true +##bb +ml +##tar +##のお +##ky +anthony +196 +253 +##yo +state +218 +##ara +##aa +##rc +##tz +##ston +より +gear +##eo +##ade +ge +see +1923 +##win +##ura +ss +heart +##den +##ita +down +##sm +el +png +2100 +610 +rakuten +whatsapp +bay +dream +add +##use +680 +311 +pad +gucci +mpv +##ode +##fo +island +▲topjun +##▼ +223 +jason +214 +chicago +##❤ +しの +##hone +io +##れる +##ことか +sogo +be2 +##ology +990 +cloud +vcd +##con +2~3 +##ford +##joy +##kb +##こさいます +##rade +but +##ach +docker +##ful +rfid +ul +##ase +hit +ford +##star +580 +##○ +11 +a2 +sdk +reading +edited +##are +cmos +##mc +238 +siri +light +##ella +##ため +bloomberg +##read +pizza +##ison +jimmy +##vm +college +node +journal +ba +18k +##play +245 +##cer +20 +magic +##yu +191 +jump +288 +tt +##ings +asr +##lia +3200 +step5 +network +##cd +mc +いします +1234 +pixstyleme +273 +##600 +2800 +money +★★★★★ +1280 +12 +430 +bl +みの +act +##tus +tokyo +##rial +##life +emba +##ae +saas +tcs +##rk +##wang +summer +##sp +ko +##ving +390 +premium +##その +netflix +##ヒ +uk +mt +##lton +right +frank +two +209 +える +##ple +##cal +021 +##んな +##sen +##ville +hold +nexus +dd +##ius +てお +##mah +##なく +tila +zero +820 +ce +##tin +resort +##ws +charles +old +p10 +5d +report +##360 +##ru +##には +bus +vans +lt +##est +pv +##レ +links +rebecca +##ツ +##dm +azure +##365 +きな +limited +bit +4gb +##mon +1910 +moto +##eam +213 +1913 +var +eos +なとの +226 +blogspot +された +699 +e3 +dos +dm +fc +##ments +##ik +##kw +boy +##bin +##ata +960 +er +##せ +219 +##vin +##tu +##ula +194 +##∥ +station +##ろ +##ature +835 +files +zara +hdr +top10 +nature +950 +magazine +s6 +marriott +##シ +avira +case +##っと +tab +##ran +tony +##home +oculus +im +##ral +jean +saint +cry +307 +rosie +##force +##ini +ice +##bert +のある +##nder +##mber +pet +2600 +##◆ +plurk +▲topdec +##sis +00kg +▲topnov +720 +##ence +tim +##ω +##nc +##ても +##name +log +ips +great +ikea +malaysia +unix +##イト +3600 +##ncy +##nie +12000 +akb48 +##ye +##oid +404 +##chi +##いた +oa +xuehai +##1000 +##orm +##rf +275 +さん +##ware +##リー +980 +ho +##pro +text +##era +560 +bob +227 +##ub +##2008 +8891 +scp +avi +##zen +2022 +mi +wu +museum +qvod +apache +lake +jcb +▲topaug +★★★ +ni +##hr +hill +302 +ne +weibo +490 +ruby +##ーシ +##ヶ +##row +4d +▲topjul +iv +##ish +github +306 +mate +312 +##スト +##lot +##ane +andrew +のハイト +##tina +t1 +rf +ed2k +##vel +##900 +way +final +りの +ns +5a +705 +197 +##メ +sweet +bytes +##ene +▲topjan +231 +##cker +##2007 +##px +100g +topapp +229 +helpapp +rs +low +14k +g4g +care +630 +ldquo +あり +##fork +leave +rm +edition +##gan +##zon +##qq +▲topsep +##google +##ism +gold +224 +explorer +##zer +toyota +category +select +visual +##labels +restaurant +##md +posts +s1 +##ico +もっと +angelababy +123456 +217 +sports +s3 +mbc +1915 +してくたさい +shell +x86 +candy +##new +kbs +face +xl +470 +##here +4a +swissinfo +v8 +▲topfeb +dram +##ual +##vice +3a +##wer +sport +q1 +ios10 +public +int +card +##c +ep +au +rt +##れた +1080 +bill +##mll +kim +30 +460 +wan +##uk +##ミ +x3 +298 +0t +scott +##ming +239 +e5 +##3d +h7n9 +worldcat +brown +##あります +##vo +##led +##580 +##ax +249 +410 +##ert +paris +##~6 +polo +925 +##lr +599 +##ナ +capital +##hing +bank +cv +1g +##chat +##s +##たい +adc +##ule +2m +##e +digital +hotmail +268 +##pad +870 +bbq +quot +##ring +before +wali +##まて +mcu +2k +2b +という +costco +316 +north +333 +switch +##city +##p +philips +##mann +management +panasonic +##cl +##vd +##ping +##rge +alice +##lk +##ましょう +css3 +##ney +vision +alpha +##ular +##400 +##tter +lz +にお +##ありません +mode +gre +1916 +pci +##tm +237 +1~2 +##yan +##そ +について +##let +##キ +work +war +coach +ah +mary +##ᅵ +huang +##pt +a8 +pt +follow +##berry +1895 +##ew +a5 +ghost +##ション +##wn +##og +south +##code +girls +##rid +action +villa +git +r11 +table +games +##cket +error +##anonymoussaid +##ag +here +##ame +##gc +qa +##■ +##lis +gmp +##gin +vmalife +##cher +yu +wedding +##tis +demo +dragon +530 +soho +social +bye +##rant +river +orz +acer +325 +##↑ +##ース +##ats +261 +del +##ven +440 +ups +##ように +##ター +305 +value +macd +yougou +##dn +661 +##ano +ll +##urt +##rent +continue +script +##wen +##ect +paper +263 +319 +shift +##chel +##フト +##cat +258 +x5 +fox +243 +##さん +car +aaa +##blog +loading +##yn +##tp +kuso +799 +si +sns +イカせるテンマ +ヒンクテンマ3 +rmb +vdc +forest +central +prime +help +ultra +##rmb +##ような +241 +square +688 +##しい +のないフロクに +##field +##reen +##ors +##ju +c1 +start +510 +##air +##map +cdn +##wo +cba +stephen +m8 +100km +##get +opera +##base +##ood +vsa +com™ +##aw +##ail +251 +なのて +count +t2 +##ᅡ +##een +2700 +hop +##gp +vsc +tree +##eg +##ose +816 +285 +##ories +##shop +alphago +v4 +1909 +simon +##ᆼ +fluke62max +zip +スホンサー +##sta +louis +cr +bas +##~10 +bc +##yer +hadoop +##ube +##wi +1906 +0755 +hola +##low +place +centre +5v +d3 +##fer +252 +##750 +##media +281 +540 +0l +exchange +262 +series +##ハー +##san +eb +##bank +##k +q3 +##nge +##mail +take +##lp +259 +1888 +client +east +cache +event +vincent +##ールを +きを +##nse +sui +855 +adchoice +##и +##stry +##なたの +246 +##zone +ga +apps +sea +##ab +248 +cisco +##タ +##rner +kymco +##care +dha +##pu +##yi +minkoff +royal +p1 +への +annie +269 +collection +kpi +playstation +257 +になります +866 +bh +##bar +queen +505 +radio +1904 +andy +armani +##xy +manager +iherb +##ery +##share +spring +raid +johnson +1908 +##ob +volvo +hall +##ball +v6 +our +taylor +##hk +bi +242 +##cp +kate +bo +water +technology +##rie +サイトは +277 +##ona +##sl +hpv +303 +gtx +hip +rdquo +jayz +stone +##lex +##rum +namespace +##やり +620 +##ale +##atic +des +##erson +##ql +##ves +##type +enter +##この +##てきます +d2 +##168 +##mix +##bian +との +a9 +jj +ky +##lc +access +movie +##hc +リストに +tower +##ration +##mit +ます +##nch +ua +tel +prefix +##o2 +1907 +##point +1901 +ott +~10 +##http +##ury +baidu +##ink +member +##logy +bigbang +nownews +##js +##shot +##tb +##こと +247 +eba +##tics +##lus +ける +v5 +spark +##ama +there +##ions +god +##lls +##down +hiv +##ress +burberry +day2 +##kv +◆◆ +jeff +related +film +edit +joseph +283 +##ark +cx +32gb +order +g9 +30000 +##ans +##tty +s5 +##bee +かあります +thread +xr +buy +sh +005 +land +spotify +mx +##ari +276 +##verse +×email +sf +why +##ことて +244 +7headlines +nego +sunny +dom +exo +401 +666 +positioning +fit +rgb +##tton +278 +kiss +alexa +adam +lp +みリストを +##g +mp +##ties +##llow +amy +##du +np +002 +institute +271 +##rth +##lar +2345 +590 +##des +sidebar +15 +imax +site +##cky +##kit +##ime +##009 +season +323 +##fun +##ンター +##ひ +gogoro +a7 +pu +lily +fire +twd600 +##ッセーシを +いて +##vis +30ml +##cture +##をお +information +##オ +close +friday +##くれる +yi +nick +てすか +##tta +##tel +6500 +##lock +cbd +economy +254 +かお +267 +tinker +double +375 +8gb +voice +##app +oops +channel +today +985 +##right +raw +xyz +##+ +jim +edm +##cent +7500 +supreme +814 +ds +##its +##asia +dropbox +##てすか +##tti +books +272 +100ml +##tle +##ller +##ken +##more +##boy +sex +309 +##dom +t3 +##ider +##なります +##unch +1903 +810 +feel +5500 +##かった +##put +により +s2 +mo +##gh +men +ka +amoled +div +##tr +##n1 +port +howard +##tags +ken +dnf +##nus +adsense +##а +ide +##へ +buff +thunder +##town +##ique +has +##body +auto +pin +##erry +tee +てした +295 +number +##the +##013 +object +psp +cool +udnbkk +16gb +##mic +miui +##tro +most +r2 +##alk +##nity +1880 +±0 +##いました +428 +s4 +law +version +##oa +n1 +sgs +docomo +##tf +##ack +henry +fc2 +##ded +##sco +##014 +##rite +286 +0mm +linkedin +##ada +##now +wii +##ndy +ucbug +##◎ +sputniknews +legalminer +##ika +##xp +2gb +##bu +q10 +oo +b6 +come +##rman +cheese +ming +maker +##gm +nikon +##fig +ppi +kelly +##ります +jchere +てきます +ted +md +003 +fgo +tech +##tto +dan +soc +##gl +##len +hair +earth +640 +521 +img +##pper +##a1 +##てきる +##ロク +acca +##ition +##ference +suite +##ig +outlook +##mond +##cation +398 +##pr +279 +101vip +358 +##999 +282 +64gb +3800 +345 +airport +##over +284 +##おり +jones +##ith +lab +##su +##いるのて +co2 +town +piece +##llo +no1 +vmware +24h +##qi +focus +reader +##admin +##ora +tb +false +##log +1898 +know +lan +838 +##ces +f4 +##ume +motel +stop +##oper +na +flickr +netcomponents +##af +##─ +pose +williams +local +##ound +##cg +##site +##iko +いお +274 +5m +gsm +con +##ath +1902 +friends +##hip +cell +317 +##rey +780 +cream +##cks +012 +##dp +facebooktwitterpinterestgoogle +sso +324 +shtml +song +swiss +##mw +##キンク +lumia +xdd +string +tiffany +522 +marc +られた +insee +russell +sc +dell +##ations +ok +camera +289 +##vs +##flow +##late +classic +287 +##nter +stay +g1 +mtv +512 +##ever +##lab +##nger +qe +sata +ryan +d1 +50ml +cms +##cing +su +292 +3300 +editor +296 +##nap +security +sunday +association +##ens +##700 +##bra +acg +##かり +sofascore +とは +mkv +##ign +jonathan +gary +build +labels +##oto +tesla +moba +qi +gohappy +general +ajax +1024 +##かる +サイト +society +##test +##urs +wps +fedora +##ich +mozilla +328 +##480 +##dr +usa +urn +##lina +##r +grace +##die +##try +##ader +1250 +##なり +elle +570 +##chen +##ᆯ +price +##ten +uhz +##ough +eq +##hen +states +push +session +balance +wow +506 +##cus +##py +when +##ward +##ep +34e +wong +library +prada +##サイト +##cle +running +##ree +313 +ck +date +q4 +##ctive +##ool +##> +mk +##ira +##163 +388 +die +secret +rq +dota +buffet +は1ヶ +e6 +##ez +pan +368 +ha +##card +##cha +2a +##さ +alan +day3 +eye +f3 +##end +france +keep +adi +rna +tvbs +##ala +solo +nova +##え +##tail +##ょう +support +##ries +##なる +##ved +base +copy +iis +fps +##ways +hero +hgih +profile +fish +mu +ssh +entertainment +chang +##wd +click +cake +##ond +pre +##tom +kic +pixel +##ov +##fl +product +6a +##pd +dear +##gate +es +yumi +audio +##² +##sky +echo +bin +where +##ture +329 +##ape +find +sap +isis +##なと +nand +##101 +##load +##ream +band +a6 +525 +never +##post +festival +50cm +##we +555 +guide +314 +zenfone +##ike +335 +gd +forum +jessica +strong +alexander +##ould +software +allen +##ious +program +360° +else +lohasthree +##gar +することかてきます +please +##れます +rc +##ggle +##ric +bim +50000 +##own +eclipse +355 +brian +3ds +##side +061 +361 +##other +##ける +##tech +##ator +485 +engine +##ged +##t +plaza +##fit +cia +ngo +westbrook +shi +tbs +50mm +##みませんか +sci +291 +reuters +##ily +contextlink +##hn +af +##cil +bridge +very +##cel +1890 +cambridge +##ize +15g +##aid +##data +790 +frm +##head +award +butler +##sun +meta +##mar +america +ps3 +puma +pmid +##すか +lc +670 +kitchen +##lic +オーフン5 +きなしソフトサーヒス +そして +day1 +future +★★★★ +##text +##page +##rris +pm1 +##ket +fans +##っています +1001 +christian +bot +kids +trackback +##hai +c3 +display +##hl +n2 +1896 +idea +さんも +##sent +airmail +##ug +##men +pwm +けます +028 +##lution +369 +852 +awards +schemas +354 +asics +wikipedia +font +##tional +##vy +c2 +293 +##れている +##dget +##ein +っている +contact +pepper +スキル +339 +##~5 +294 +##uel +##ument +730 +##hang +みてす +q5 +##sue +rain +##ndi +wei +swatch +##cept +わせ +331 +popular +##ste +##tag +p2 +501 +trc +1899 +##west +##live +justin +honda +ping +messenger +##rap +v9 +543 +##とは +unity +appqq +はすへて +025 +leo +##tone +##テ +##ass +uniqlo +##010 +502 +her +jane +memory +moneydj +##tical +human +12306 +していると +##m2 +coc +miacare +##mn +tmt +##core +vim +kk +##may +fan +target +use +too +338 +435 +2050 +867 +737 +fast +##2c +services +##ope +omega +energy +##わ +pinkoi +1a +##なから +##rain +jackson +##ement +##シャンルの +374 +366 +そんな +p9 +rd +##ᆨ +1111 +##tier +##vic +zone +##│ +385 +690 +dl +isofix +cpa +m4 +322 +kimi +めて +davis +##lay +lulu +##uck +050 +weeks +qs +##hop +920 +##n +ae +##ear +~5 +eia +405 +##fly +korea +jpeg +boost +##ship +small +##リア +1860 +eur +297 +425 +valley +##iel +simple +##ude +rn +k2 +##ena +されます +non +patrick +しているから +##ナー +feed +5757 +30g +process +well +qqmei +##thing +they +aws +lu +pink +##ters +##kin +または +board +##vertisement +wine +##ien +unicode +##dge +r1 +359 +##tant +いを +##twitter +##3c +cool1 +される +##れて +##l +isp +##012 +standard +45㎡2 +402 +##150 +matt +##fu +326 +##iner +googlemsn +pixnetfacebookyahoo +##ラン +x7 +886 +##uce +メーカー +sao +##ev +##きました +##file +9678 +403 +xddd +shirt +6l +##rio +##hat +3mm +givenchy +ya +bang +##lio +monday +crystal +ロクイン +##abc +336 +head +890 +ubuntuforumwikilinuxpastechat +##vc +##~20 +##rity +cnc +7866 +ipv6 +null +1897 +##ost +yang +imsean +tiger +##fet +##ンス +352 +##= +dji +327 +ji +maria +##come +##んて +foundation +3100 +##beth +##なった +1m +601 +active +##aft +##don +3p +sr +349 +emma +##khz +living +415 +353 +1889 +341 +709 +457 +sas +x6 +##face +pptv +x4 +##mate +han +sophie +##jing +337 +fifa +##mand +other +sale +inwedding +##gn +てきちゃいます +##mmy +##pmlast +bad +nana +nbc +してみてくたさいね +なとはお +##wu +##かあります +##あ +note7 +single +##340 +せからこ +してくたさい♪この +しにはとんとんワークケートを +するとあなたにもっとマッチした +ならワークケートへ +もみつかっちゃうかも +ワークケートの +##bel +window +##dio +##ht +union +age +382 +14 +##ivity +##y +コメント +domain +neo +##isa +##lter +5k +f5 +steven +##cts +powerpoint +tft +self +g2 +ft +##テル +zol +##act +mwc +381 +343 +もう +nbapop +408 +てある +eds +ace +##room +previous +author +tomtom +il +##ets +hu +financial +☆☆☆ +っています +bp +5t +chi +1gb +##hg +fairmont +cross +008 +gay +h2 +function +##けて +356 +also +1b +625 +##ータ +##raph +1894 +3~5 +##ils +i3 +334 +avenue +##host +による +##bon +##tsu +message +navigation +50g +fintech +h6 +##ことを +8cm +##ject +##vas +##firm +credit +##wf +xxxx +form +##nor +##space +huawei +plan +json +sbl +##dc +machine +921 +392 +wish +##120 +##sol +windows7 +edward +##ために +development +washington +##nsis +lo +818 +##sio +##ym +##bor +planet +##~8 +##wt +ieee +gpa +##めて +camp +ann +gm +##tw +##oka +connect +##rss +##work +##atus +wall +chicken +soul +2mm +##times +fa +##ather +##cord +009 +##eep +hitachi +gui +harry +##pan +e1 +disney +##press +##ーション +wind +386 +frigidaire +##tl +liu +hsu +332 +basic +von +ev +いた +てきる +スホンサーサイト +learning +##ull +expedia +archives +change +##wei +santa +cut +ins +6gb +turbo +brand +cf1 +508 +004 +return +747 +##rip +h1 +##nis +##をこ +128gb +##にお +3t +application +しており +emc +rx +##oon +384 +quick +412 +15058 +wilson +wing +chapter +##bug +beyond +##cms +##dar +##oh +zoom +e2 +trip +sb +##nba +rcep +342 +aspx +ci +080 +gc +gnu +める +##count +advanced +dance +dv +##url +##ging +367 +8591 +am09 +shadow +battle +346 +##i +##cia +##という +emily +##のてす +##tation +host +ff +techorz +sars +##mini +##mporary +##ering +nc +4200 +798 +##next +cma +##mbps +##gas +##ift +##dot +##ィ +455 +##~17 +amana +##りの +426 +##ros +ir +00㎡1 +##eet +##ible +##↓ +710 +ˋ▽ˊ +##aka +dcs +iq +##v +l1 +##lor +maggie +##011 +##iu +588 +##~1 +830 +##gt +1tb +articles +create +##burg +##iki +database +fantasy +##rex +##cam +dlc +dean +##you +hard +path +gaming +victoria +maps +cb +##lee +##itor +overchicstoretvhome +systems +##xt +416 +p3 +sarah +760 +##nan +407 +486 +x9 +install +second +626 +##ann +##ph +##rcle +##nic +860 +##nar +ec +##とう +768 +metro +chocolate +##rian +~4 +##table +##しています +skin +##sn +395 +mountain +##0mm +inparadise +6m +7x24 +ib +4800 +##jia +eeworld +creative +g5 +g3 +357 +parker +ecfa +village +からの +18000 +sylvia +サーヒス +hbl +##ques +##onsored +##x2 +##きます +##v4 +##tein +ie6 +383 +##stack +389 +ver +##ads +##baby +sound +bbe +##110 +##lone +##uid +ads +022 +gundam +351 +thinkpad +006 +scrum +match +##ave +mems +##470 +##oy +##なりました +##talk +glass +lamigo +span +##eme +job +##a5 +jay +wade +kde +498 +##lace +ocean +tvg +##covery +##r3 +##ners +##rea +junior +think +##aine +cover +##ision +##sia +↓↓ +##bow +msi +413 +458 +406 +##love +711 +801 +soft +z2 +##pl +456 +1840 +mobil +mind +##uy +427 +nginx +##oi +めた +##rr +6221 +##mple +##sson +##ーシてす +371 +##nts +91tv +comhd +crv3000 +##uard +1868 +397 +deep +lost +field +gallery +##bia +rate +spf +redis +traction +930 +icloud +011 +なら +fe +jose +372 +##tory +into +sohu +fx +899 +379 +kicstart2 +##hia +すく +##~3 +##sit +ra +24 +##walk +##xure +500g +##pact +pacific +xa +natural +carlo +##250 +##walker +1850 +##can +cto +gigi +516 +##サー +pen +##hoo +ob +matlab +##b +##yy +13913459 +##iti +mango +##bbs +sense +c5 +oxford +##ニア +walker +jennifer +##ola +course +##bre +701 +##pus +##rder +lucky +075 +##ぁ +ivy +なお +##nia +sotheby +side +##ugh +joy +##orage +##ush +##bat +##dt +364 +r9 +##2d +##gio +511 +country +wear +##lax +##~7 +##moon +393 +seven +study +411 +348 +lonzo +8k +##ェ +evolution +##イフ +##kk +gs +kd +##レス +arduino +344 +b12 +##lux +arpg +##rdon +cook +##x5 +dark +five +##als +##ida +とても +sign +362 +##ちの +something +20mm +##nda +387 +##posted +fresh +tf +1870 +422 +cam +##mine +##skip +##form +##ssion +education +394 +##tee +dyson +stage +##jie +want +##night +epson +pack +あります +##ppy +テリヘル +##█ +wd +##eh +##rence +left +##lvin +golden +mhz +discovery +##trix +##n2 +loft +##uch +##dra +##sse +speed +~1 +1mdb +sorry +welcome +##urn +wave +gaga +##lmer +teddy +##160 +トラックハック +せよ +611 +##f2016 +378 +rp +##sha +rar +##あなたに +##きた +840 +holiday +##ュー +373 +074 +##vg +##nos +##rail +gartner +gi +6p +##dium +kit +488 +b3 +eco +##ろう +20g +sean +##stone +autocad +nu +##np +f16 +write +029 +m5 +##ias +images +atp +##dk +fsm +504 +1350 +ve +52kb +##xxx +##のに +##cake +414 +unit +lim +ru +1v +##ification +published +angela +16g +analytics +ak +##q +##nel +gmt +##icon +again +##₂ +##bby +ios11 +445 +かこさいます +waze +いてす +##ハ +9985 +##ust +##ティー +framework +##007 +iptv +delete +52sykb +cl +wwdc +027 +30cm +##fw +##ての +1389 +##xon +brandt +##ses +##dragon +tc +vetements +anne +monte +modern +official +##へて +##ere +##nne +##oud +もちろん +50 +etnews +##a2 +##graphy +421 +863 +##ちゃん +444 +##rtex +##てお +l2 +##gma +mount +ccd +たと +archive +morning +tan +ddos +e7 +##ホ +day4 +##ウ +gis +453 +its +495 +factory +bruce +pg +##ito +ってくたさい +guest +cdma +##lling +536 +n3 +しかし +3~4 +mega +eyes +ro +13 +women +dac +church +##jun +singapore +##facebook +6991 +starbucks +##tos +##stin +##shine +zen +##mu +tina +20℃ +1893 +##たけて +503 +465 +request +##gence +qt +##っ +1886 +347 +363 +q7 +##zzi +diary +##tore +409 +##ead +468 +cst +##osa +canada +agent +va +##jiang +##ちは +##ーク +##lam +sg +##nix +##sday +##よって +g6 +##master +bing +##zl +charlie +16 +8mm +nb40 +##ーン +thai +##ルフ +ln284ct +##itz +##2f +bonnie +##food +##lent +originals +##stro +##lts +418 +∟∣ +##bscribe +children +ntd +yesstyle +##かも +hmv +##tment +d5 +2cm +arts +sms +##pn +##я +##いい +topios9 +539 +lifestyle +virtual +##ague +xz +##deo +muji +024 +unt +##nnis +##ᅩ +faq1 +1884 +396 +##ette +fly +64㎡ +はしめまして +441 +curry +##pop +のこ +release +##← +##◆◆ +##cast +073 +ありな +500ml +##ews +5c +##stle +ios7 +##ima +787 +dog +lenovo +##r4 +roger +013 +cbs +vornado +100m +417 +##desk +##クok +##ald +1867 +9595 +2900 +##van +oil +##x +some +break +common +##jy +##lines +g7 +twice +419 +ella +nano +belle +にこ +##mes +##self +##note +jb +##ことかてきます +benz +##との +##ova +451 +save +##wing +##ますのて +kai +りは +##hua +##rect +rainer +##unge +448 +##0m +adsl +##かな +guestname +##uma +##kins +##zu +tokichoi +##price +county +##med +##mus +rmk +391 +address +vm +えて +openload +##group +##hin +##iginal +amg +urban +##oz +jobs +emi +##public +beautiful +##sch +album +##dden +##bell +jerry +works +hostel +miller +##drive +##rmin +##10 +376 +boot +828 +##370 +##fx +##cm~ +1885 +##nome +##ctionary +##oman +##lish +##cr +##hm +433 +##how +432 +francis +xi +c919 +b5 +evernote +##uc +vga +##3000 +coupe +##urg +##cca +##uality +019 +6g +れる +multi +##また +##ett +em +hey +##ani +##tax +##rma +inside +than +740 +leonnhurt +##jin +ict +れた +bird +notes +200mm +くの +##dical +##lli +result +442 +iu +ee +438 +smap +gopro +##last +yin +pure +998 +32g +けた +5kg +##dan +##rame +mama +##oot +bean +marketing +##hur +2l +bella +sync +xuite +##ground +515 +discuz +##getrelax +##ince +##bay +##5s +cj +##イス +gmat +apt +##pass +jing +##rix +c4 +rich +##とても +niusnews +##ello +bag +770 +##eting +##mobile +18 +culture +015 +##のてすか +377 +1020 +area +##ience +616 +details +gp +universal +silver +dit +はお +private +ddd +u11 +kanshu +##ified +fung +##nny +dx +##520 +tai +475 +023 +##fr +##lean +3s +##pin +429 +##rin +25000 +ly +rick +##bility +usb3 +banner +##baru +##gion +metal +dt +vdf +1871 +karl +qualcomm +bear +1010 +oldid +ian +jo +##tors +population +##ernel +1882 +mmorpg +##mv +##bike +603 +##© +ww +friend +##ager +exhibition +##del +##pods +fpx +structure +##free +##tings +kl +##rley +##copyright +##mma +california +3400 +orange +yoga +4l +canmake +honey +##anda +##コメント +595 +nikkie +##ルハイト +dhl +publishing +##mall +##gnet +20cm +513 +##クセス +##┅ +e88 +970 +##dog +fishbase +##! +##" +### +##$ +##% +##& +##' +##( +##) +##* +##+ +##, +##- +##. +##/ +##: +##; +##< +##= +##> +##? +##@ +##[ +##\ +##] +##^ +##_ +##{ +##| +##} +##~ +##£ +##¤ +##¥ +##§ +##« +##± +##³ +##µ +##· +##¹ +##º +##» +##¼ +##ß +##æ +##÷ +##ø +##đ +##ŋ +##ɔ +##ə +##ɡ +##ʰ +##ˇ +##ˈ +##ˊ +##ˋ +##ˍ +##ː +##˙ +##˚ +##ˢ +##α +##β +##γ +##δ +##ε +##η +##θ +##ι +##κ +##λ +##μ +##ν +##ο +##π +##ρ +##ς +##σ +##τ +##υ +##φ +##χ +##ψ +##б +##в +##г +##д +##е +##ж +##з +##к +##л +##м +##н +##о +##п +##р +##с +##т +##у +##ф +##х +##ц +##ч +##ш +##ы +##ь +##і +##ا +##ب +##ة +##ت +##د +##ر +##س +##ع +##ل +##م +##ن +##ه +##و +##ي +##۩ +##ก +##ง +##น +##ม +##ย +##ร +##อ +##า +##เ +##๑ +##་ +##ღ +##ᄀ +##ᄁ +##ᄂ +##ᄃ +##ᄅ +##ᄆ +##ᄇ +##ᄈ +##ᄉ +##ᄋ +##ᄌ +##ᄎ +##ᄏ +##ᄐ +##ᄑ +##ᄒ +##ᅢ +##ᅣ +##ᅥ +##ᅦ +##ᅧ +##ᅨ +##ᅪ +##ᅬ +##ᅭ +##ᅮ +##ᅯ +##ᅲ +##ᅳ +##ᅴ +##ᆷ +##ᆸ +##ᆺ +##ᆻ +##ᗜ +##ᵃ +##ᵉ +##ᵍ +##ᵏ +##ᵐ +##ᵒ +##ᵘ +##‖ +##„ +##† +##• +##‥ +##‧ +##
 +##‰ +##′ +##″ +##‹ +##› +##※ +##‿ +##⁄ +##ⁱ +##⁺ +##ⁿ +##₁ +##₃ +##₄ +##€ +##№ +##ⅰ +##ⅱ +##ⅲ +##ⅳ +##ⅴ +##↔ +##↗ +##↘ +##⇒ +##∀ +##− +##∕ +##∙ +##√ +##∞ +##∟ +##∠ +##∣ +##∩ +##∮ +##∶ +##∼ +##∽ +##≈ +##≒ +##≡ +##≤ +##≥ +##≦ +##≧ +##≪ +##≫ +##⊙ +##⋅ +##⋈ +##⋯ +##⌒ +##① +##② +##③ +##④ +##⑤ +##⑥ +##⑦ +##⑧ +##⑨ +##⑩ +##⑴ +##⑵ +##⑶ +##⑷ +##⑸ +##⒈ +##⒉ +##⒊ +##⒋ +##ⓒ +##ⓔ +##ⓘ +##━ +##┃ +##┆ +##┊ +##┌ +##└ +##├ +##┣ +##═ +##║ +##╚ +##╞ +##╠ +##╭ +##╮ +##╯ +##╰ +##╱ +##╳ +##▂ +##▃ +##▅ +##▇ +##▉ +##▋ +##▌ +##▍ +##▎ +##□ +##▪ +##▫ +##▬ +##△ +##▶ +##► +##▽ +##◇ +##◕ +##◠ +##◢ +##◤ +##☀ +##☕ +##☞ +##☺ +##☼ +##♀ +##♂ +##♠ +##♡ +##♣ +##♦ +##♫ +##♬ +##✈ +##✔ +##✕ +##✖ +##✦ +##✨ +##✪ +##✰ +##✿ +##❀ +##➜ +##➤ +##⦿ +##、 +##。 +##〃 +##々 +##〇 +##〈 +##〉 +##《 +##》 +##「 +##」 +##『 +##』 +##【 +##】 +##〓 +##〔 +##〕 +##〖 +##〗 +##〜 +##〝 +##〞 +##ぃ +##ぇ +##ぬ +##ふ +##ほ +##む +##ゃ +##ゅ +##ゆ +##ょ +##゜ +##ゝ +##ァ +##ゥ +##エ +##ォ +##ケ +##サ +##セ +##ソ +##ッ +##ニ +##ヌ +##ネ +##ノ +##ヘ +##モ +##ャ +##ヤ +##ュ +##ユ +##ョ +##ヨ +##ワ +##ヲ +##・ +##ヽ +##ㄅ +##ㄆ +##ㄇ +##ㄉ +##ㄋ +##ㄌ +##ㄍ +##ㄎ +##ㄏ +##ㄒ +##ㄚ +##ㄛ +##ㄞ +##ㄟ +##ㄢ +##ㄤ +##ㄥ +##ㄧ +##ㄨ +##ㆍ +##㈦ +##㊣ +##㗎 +##一 +##丁 +##七 +##万 +##丈 +##三 +##上 +##下 +##不 +##与 +##丐 +##丑 +##专 +##且 +##丕 +##世 +##丘 +##丙 +##业 +##丛 +##东 +##丝 +##丞 +##丟 +##両 +##丢 +##两 +##严 +##並 +##丧 +##丨 +##个 +##丫 +##中 +##丰 +##串 +##临 +##丶 +##丸 +##丹 +##为 +##主 +##丼 +##丽 +##举 +##丿 +##乂 +##乃 +##久 +##么 +##义 +##之 +##乌 +##乍 +##乎 +##乏 +##乐 +##乒 +##乓 +##乔 +##乖 +##乗 +##乘 +##乙 +##乜 +##九 +##乞 +##也 +##习 +##乡 +##书 +##乩 +##买 +##乱 +##乳 +##乾 +##亀 +##亂 +##了 +##予 +##争 +##事 +##二 +##于 +##亏 +##云 +##互 +##五 +##井 +##亘 +##亙 +##亚 +##些 +##亜 +##亞 +##亟 +##亡 +##亢 +##交 +##亥 +##亦 +##产 +##亨 +##亩 +##享 +##京 +##亭 +##亮 +##亲 +##亳 +##亵 +##人 +##亿 +##什 +##仁 +##仃 +##仄 +##仅 +##仆 +##仇 +##今 +##介 +##仍 +##从 +##仏 +##仑 +##仓 +##仔 +##仕 +##他 +##仗 +##付 +##仙 +##仝 +##仞 +##仟 +##代 +##令 +##以 +##仨 +##仪 +##们 +##仮 +##仰 +##仲 +##件 +##价 +##任 +##份 +##仿 +##企 +##伉 +##伊 +##伍 +##伎 +##伏 +##伐 +##休 +##伕 +##众 +##优 +##伙 +##会 +##伝 +##伞 +##伟 +##传 +##伢 +##伤 +##伦 +##伪 +##伫 +##伯 +##估 +##伴 +##伶 +##伸 +##伺 +##似 +##伽 +##佃 +##但 +##佇 +##佈 +##位 +##低 +##住 +##佐 +##佑 +##体 +##佔 +##何 +##佗 +##佘 +##余 +##佚 +##佛 +##作 +##佝 +##佞 +##佟 +##你 +##佢 +##佣 +##佤 +##佥 +##佩 +##佬 +##佯 +##佰 +##佳 +##併 +##佶 +##佻 +##佼 +##使 +##侃 +##侄 +##來 +##侈 +##例 +##侍 +##侏 +##侑 +##侖 +##侗 +##供 +##依 +##侠 +##価 +##侣 +##侥 +##侦 +##侧 +##侨 +##侬 +##侮 +##侯 +##侵 +##侶 +##侷 +##便 +##係 +##促 +##俄 +##俊 +##俎 +##俏 +##俐 +##俑 +##俗 +##俘 +##俚 +##保 +##俞 +##俟 +##俠 +##信 +##俨 +##俩 +##俪 +##俬 +##俭 +##修 +##俯 +##俱 +##俳 +##俸 +##俺 +##俾 +##倆 +##倉 +##個 +##倌 +##倍 +##倏 +##們 +##倒 +##倔 +##倖 +##倘 +##候 +##倚 +##倜 +##借 +##倡 +##値 +##倦 +##倩 +##倪 +##倫 +##倬 +##倭 +##倶 +##债 +##值 +##倾 +##偃 +##假 +##偈 +##偉 +##偌 +##偎 +##偏 +##偕 +##做 +##停 +##健 +##側 +##偵 +##偶 +##偷 +##偻 +##偽 +##偿 +##傀 +##傅 +##傍 +##傑 +##傘 +##備 +##傚 +##傢 +##傣 +##傥 +##储 +##傩 +##催 +##傭 +##傲 +##傳 +##債 +##傷 +##傻 +##傾 +##僅 +##働 +##像 +##僑 +##僕 +##僖 +##僚 +##僥 +##僧 +##僭 +##僮 +##僱 +##僵 +##價 +##僻 +##儀 +##儂 +##億 +##儆 +##儉 +##儋 +##儒 +##儕 +##儘 +##償 +##儡 +##優 +##儲 +##儷 +##儼 +##儿 +##兀 +##允 +##元 +##兄 +##充 +##兆 +##兇 +##先 +##光 +##克 +##兌 +##免 +##児 +##兑 +##兒 +##兔 +##兖 +##党 +##兜 +##兢 +##入 +##內 +##全 +##兩 +##八 +##公 +##六 +##兮 +##兰 +##共 +##兲 +##关 +##兴 +##兵 +##其 +##具 +##典 +##兹 +##养 +##兼 +##兽 +##冀 +##内 +##円 +##冇 +##冈 +##冉 +##冊 +##册 +##再 +##冏 +##冒 +##冕 +##冗 +##写 +##军 +##农 +##冠 +##冢 +##冤 +##冥 +##冨 +##冪 +##冬 +##冯 +##冰 +##冲 +##决 +##况 +##冶 +##冷 +##冻 +##冼 +##冽 +##冾 +##净 +##凄 +##准 +##凇 +##凈 +##凉 +##凋 +##凌 +##凍 +##减 +##凑 +##凛 +##凜 +##凝 +##几 +##凡 +##凤 +##処 +##凪 +##凭 +##凯 +##凰 +##凱 +##凳 +##凶 +##凸 +##凹 +##出 +##击 +##函 +##凿 +##刀 +##刁 +##刃 +##分 +##切 +##刈 +##刊 +##刍 +##刎 +##刑 +##划 +##列 +##刘 +##则 +##刚 +##创 +##初 +##删 +##判 +##別 +##刨 +##利 +##刪 +##别 +##刮 +##到 +##制 +##刷 +##券 +##刹 +##刺 +##刻 +##刽 +##剁 +##剂 +##剃 +##則 +##剉 +##削 +##剋 +##剌 +##前 +##剎 +##剐 +##剑 +##剔 +##剖 +##剛 +##剜 +##剝 +##剣 +##剤 +##剥 +##剧 +##剩 +##剪 +##副 +##割 +##創 +##剷 +##剽 +##剿 +##劃 +##劇 +##劈 +##劉 +##劊 +##劍 +##劏 +##劑 +##力 +##劝 +##办 +##功 +##加 +##务 +##劣 +##动 +##助 +##努 +##劫 +##劭 +##励 +##劲 +##劳 +##労 +##劵 +##効 +##劾 +##势 +##勁 +##勃 +##勇 +##勉 +##勋 +##勐 +##勒 +##動 +##勖 +##勘 +##務 +##勛 +##勝 +##勞 +##募 +##勢 +##勤 +##勧 +##勳 +##勵 +##勸 +##勺 +##勻 +##勾 +##勿 +##匀 +##包 +##匆 +##匈 +##匍 +##匐 +##匕 +##化 +##北 +##匙 +##匝 +##匠 +##匡 +##匣 +##匪 +##匮 +##匯 +##匱 +##匹 +##区 +##医 +##匾 +##匿 +##區 +##十 +##千 +##卅 +##升 +##午 +##卉 +##半 +##卍 +##华 +##协 +##卑 +##卒 +##卓 +##協 +##单 +##卖 +##南 +##単 +##博 +##卜 +##卞 +##卟 +##占 +##卡 +##卢 +##卤 +##卦 +##卧 +##卫 +##卮 +##卯 +##印 +##危 +##即 +##却 +##卵 +##卷 +##卸 +##卻 +##卿 +##厂 +##厄 +##厅 +##历 +##厉 +##压 +##厌 +##厕 +##厘 +##厚 +##厝 +##原 +##厢 +##厥 +##厦 +##厨 +##厩 +##厭 +##厮 +##厲 +##厳 +##去 +##县 +##叁 +##参 +##參 +##又 +##叉 +##及 +##友 +##双 +##反 +##収 +##发 +##叔 +##取 +##受 +##变 +##叙 +##叛 +##叟 +##叠 +##叡 +##叢 +##口 +##古 +##句 +##另 +##叨 +##叩 +##只 +##叫 +##召 +##叭 +##叮 +##可 +##台 +##叱 +##史 +##右 +##叵 +##叶 +##号 +##司 +##叹 +##叻 +##叼 +##叽 +##吁 +##吃 +##各 +##吆 +##合 +##吉 +##吊 +##吋 +##同 +##名 +##后 +##吏 +##吐 +##向 +##吒 +##吓 +##吕 +##吖 +##吗 +##君 +##吝 +##吞 +##吟 +##吠 +##吡 +##否 +##吧 +##吨 +##吩 +##含 +##听 +##吭 +##吮 +##启 +##吱 +##吳 +##吴 +##吵 +##吶 +##吸 +##吹 +##吻 +##吼 +##吽 +##吾 +##呀 +##呂 +##呃 +##呆 +##呈 +##告 +##呋 +##呎 +##呐 +##呓 +##呕 +##呗 +##员 +##呛 +##呜 +##呢 +##呤 +##呦 +##周 +##呱 +##呲 +##味 +##呵 +##呷 +##呸 +##呻 +##呼 +##命 +##咀 +##咁 +##咂 +##咄 +##咆 +##咋 +##和 +##咎 +##咏 +##咐 +##咒 +##咔 +##咕 +##咖 +##咗 +##咘 +##咙 +##咚 +##咛 +##咣 +##咤 +##咦 +##咧 +##咨 +##咩 +##咪 +##咫 +##咬 +##咭 +##咯 +##咱 +##咲 +##咳 +##咸 +##咻 +##咽 +##咿 +##哀 +##品 +##哂 +##哄 +##哆 +##哇 +##哈 +##哉 +##哋 +##哌 +##响 +##哎 +##哏 +##哐 +##哑 +##哒 +##哔 +##哗 +##哟 +##員 +##哥 +##哦 +##哧 +##哨 +##哩 +##哪 +##哭 +##哮 +##哲 +##哺 +##哼 +##哽 +##唁 +##唄 +##唆 +##唇 +##唉 +##唏 +##唐 +##唑 +##唔 +##唠 +##唤 +##唧 +##唬 +##售 +##唯 +##唰 +##唱 +##唳 +##唷 +##唸 +##唾 +##啃 +##啄 +##商 +##啉 +##啊 +##問 +##啓 +##啕 +##啖 +##啜 +##啞 +##啟 +##啡 +##啤 +##啥 +##啦 +##啧 +##啪 +##啫 +##啬 +##啮 +##啰 +##啱 +##啲 +##啵 +##啶 +##啷 +##啸 +##啻 +##啼 +##啾 +##喀 +##喂 +##喃 +##善 +##喆 +##喇 +##喉 +##喊 +##喋 +##喎 +##喏 +##喔 +##喘 +##喙 +##喚 +##喜 +##喝 +##喟 +##喧 +##喪 +##喫 +##喬 +##單 +##喰 +##喱 +##喲 +##喳 +##喵 +##営 +##喷 +##喹 +##喺 +##喻 +##喽 +##嗅 +##嗆 +##嗇 +##嗎 +##嗑 +##嗒 +##嗓 +##嗔 +##嗖 +##嗚 +##嗜 +##嗝 +##嗟 +##嗡 +##嗣 +##嗤 +##嗦 +##嗨 +##嗪 +##嗬 +##嗯 +##嗰 +##嗲 +##嗳 +##嗶 +##嗷 +##嗽 +##嘀 +##嘅 +##嘆 +##嘈 +##嘉 +##嘌 +##嘍 +##嘎 +##嘔 +##嘖 +##嘗 +##嘘 +##嘚 +##嘛 +##嘜 +##嘞 +##嘟 +##嘢 +##嘣 +##嘤 +##嘧 +##嘩 +##嘭 +##嘮 +##嘯 +##嘰 +##嘱 +##嘲 +##嘴 +##嘶 +##嘸 +##嘹 +##嘻 +##嘿 +##噁 +##噌 +##噎 +##噓 +##噔 +##噗 +##噙 +##噜 +##噠 +##噢 +##噤 +##器 +##噩 +##噪 +##噬 +##噱 +##噴 +##噶 +##噸 +##噹 +##噻 +##噼 +##嚀 +##嚇 +##嚎 +##嚏 +##嚐 +##嚓 +##嚕 +##嚟 +##嚣 +##嚥 +##嚨 +##嚮 +##嚴 +##嚷 +##嚼 +##囂 +##囉 +##囊 +##囍 +##囑 +##囔 +##囗 +##囚 +##四 +##囝 +##回 +##囟 +##因 +##囡 +##团 +##団 +##囤 +##囧 +##囪 +##囫 +##园 +##困 +##囱 +##囲 +##図 +##围 +##囹 +##固 +##国 +##图 +##囿 +##圃 +##圄 +##圆 +##圈 +##國 +##圍 +##圏 +##園 +##圓 +##圖 +##團 +##圜 +##土 +##圣 +##圧 +##在 +##圩 +##圭 +##地 +##圳 +##场 +##圻 +##圾 +##址 +##坂 +##均 +##坊 +##坍 +##坎 +##坏 +##坐 +##坑 +##块 +##坚 +##坛 +##坝 +##坞 +##坟 +##坠 +##坡 +##坤 +##坦 +##坨 +##坪 +##坯 +##坳 +##坵 +##坷 +##垂 +##垃 +##垄 +##型 +##垒 +##垚 +##垛 +##垠 +##垢 +##垣 +##垦 +##垩 +##垫 +##垭 +##垮 +##垵 +##埂 +##埃 +##埋 +##城 +##埔 +##埕 +##埗 +##域 +##埠 +##埤 +##埵 +##執 +##埸 +##培 +##基 +##埼 +##堀 +##堂 +##堃 +##堅 +##堆 +##堇 +##堑 +##堕 +##堙 +##堡 +##堤 +##堪 +##堯 +##堰 +##報 +##場 +##堵 +##堺 +##堿 +##塊 +##塌 +##塑 +##塔 +##塗 +##塘 +##塚 +##塞 +##塢 +##塩 +##填 +##塬 +##塭 +##塵 +##塾 +##墀 +##境 +##墅 +##墉 +##墊 +##墒 +##墓 +##増 +##墘 +##墙 +##墜 +##增 +##墟 +##墨 +##墩 +##墮 +##墳 +##墻 +##墾 +##壁 +##壅 +##壆 +##壇 +##壊 +##壑 +##壓 +##壕 +##壘 +##壞 +##壟 +##壢 +##壤 +##壩 +##士 +##壬 +##壮 +##壯 +##声 +##売 +##壳 +##壶 +##壹 +##壺 +##壽 +##处 +##备 +##変 +##复 +##夏 +##夔 +##夕 +##外 +##夙 +##多 +##夜 +##够 +##夠 +##夢 +##夥 +##大 +##天 +##太 +##夫 +##夭 +##央 +##夯 +##失 +##头 +##夷 +##夸 +##夹 +##夺 +##夾 +##奂 +##奄 +##奇 +##奈 +##奉 +##奋 +##奎 +##奏 +##奐 +##契 +##奔 +##奕 +##奖 +##套 +##奘 +##奚 +##奠 +##奢 +##奥 +##奧 +##奪 +##奬 +##奮 +##女 +##奴 +##奶 +##奸 +##她 +##好 +##如 +##妃 +##妄 +##妆 +##妇 +##妈 +##妊 +##妍 +##妒 +##妓 +##妖 +##妘 +##妙 +##妝 +##妞 +##妣 +##妤 +##妥 +##妨 +##妩 +##妪 +##妮 +##妲 +##妳 +##妹 +##妻 +##妾 +##姆 +##姉 +##姊 +##始 +##姍 +##姐 +##姑 +##姒 +##姓 +##委 +##姗 +##姚 +##姜 +##姝 +##姣 +##姥 +##姦 +##姨 +##姪 +##姫 +##姬 +##姹 +##姻 +##姿 +##威 +##娃 +##娄 +##娅 +##娆 +##娇 +##娉 +##娑 +##娓 +##娘 +##娛 +##娜 +##娟 +##娠 +##娣 +##娥 +##娩 +##娱 +##娲 +##娴 +##娶 +##娼 +##婀 +##婁 +##婆 +##婉 +##婊 +##婕 +##婚 +##婢 +##婦 +##婧 +##婪 +##婭 +##婴 +##婵 +##婶 +##婷 +##婺 +##婿 +##媒 +##媚 +##媛 +##媞 +##媧 +##媲 +##媳 +##媽 +##媾 +##嫁 +##嫂 +##嫉 +##嫌 +##嫑 +##嫔 +##嫖 +##嫘 +##嫚 +##嫡 +##嫣 +##嫦 +##嫩 +##嫲 +##嫵 +##嫻 +##嬅 +##嬉 +##嬌 +##嬗 +##嬛 +##嬢 +##嬤 +##嬪 +##嬰 +##嬴 +##嬷 +##嬸 +##嬿 +##孀 +##孃 +##子 +##孑 +##孔 +##孕 +##孖 +##字 +##存 +##孙 +##孚 +##孛 +##孜 +##孝 +##孟 +##孢 +##季 +##孤 +##学 +##孩 +##孪 +##孫 +##孬 +##孰 +##孱 +##孳 +##孵 +##學 +##孺 +##孽 +##孿 +##宁 +##它 +##宅 +##宇 +##守 +##安 +##宋 +##完 +##宏 +##宓 +##宕 +##宗 +##官 +##宙 +##定 +##宛 +##宜 +##宝 +##实 +##実 +##宠 +##审 +##客 +##宣 +##室 +##宥 +##宦 +##宪 +##宫 +##宮 +##宰 +##害 +##宴 +##宵 +##家 +##宸 +##容 +##宽 +##宾 +##宿 +##寂 +##寄 +##寅 +##密 +##寇 +##富 +##寐 +##寒 +##寓 +##寛 +##寝 +##寞 +##察 +##寡 +##寢 +##寥 +##實 +##寧 +##寨 +##審 +##寫 +##寬 +##寮 +##寰 +##寵 +##寶 +##寸 +##对 +##寺 +##寻 +##导 +##対 +##寿 +##封 +##専 +##射 +##将 +##將 +##專 +##尉 +##尊 +##尋 +##對 +##導 +##小 +##少 +##尔 +##尕 +##尖 +##尘 +##尚 +##尝 +##尤 +##尧 +##尬 +##就 +##尴 +##尷 +##尸 +##尹 +##尺 +##尻 +##尼 +##尽 +##尾 +##尿 +##局 +##屁 +##层 +##屄 +##居 +##屆 +##屈 +##屉 +##届 +##屋 +##屌 +##屍 +##屎 +##屏 +##屐 +##屑 +##展 +##屜 +##属 +##屠 +##屡 +##屢 +##層 +##履 +##屬 +##屯 +##山 +##屹 +##屿 +##岀 +##岁 +##岂 +##岌 +##岐 +##岑 +##岔 +##岖 +##岗 +##岘 +##岙 +##岚 +##岛 +##岡 +##岩 +##岫 +##岬 +##岭 +##岱 +##岳 +##岷 +##岸 +##峇 +##峋 +##峒 +##峙 +##峡 +##峤 +##峥 +##峦 +##峨 +##峪 +##峭 +##峯 +##峰 +##峴 +##島 +##峻 +##峽 +##崁 +##崂 +##崆 +##崇 +##崎 +##崑 +##崔 +##崖 +##崗 +##崙 +##崛 +##崧 +##崩 +##崭 +##崴 +##崽 +##嵇 +##嵊 +##嵋 +##嵌 +##嵐 +##嵘 +##嵩 +##嵬 +##嵯 +##嶂 +##嶄 +##嶇 +##嶋 +##嶙 +##嶺 +##嶼 +##嶽 +##巅 +##巍 +##巒 +##巔 +##巖 +##川 +##州 +##巡 +##巢 +##工 +##左 +##巧 +##巨 +##巩 +##巫 +##差 +##己 +##已 +##巳 +##巴 +##巷 +##巻 +##巽 +##巾 +##巿 +##币 +##市 +##布 +##帅 +##帆 +##师 +##希 +##帐 +##帑 +##帕 +##帖 +##帘 +##帚 +##帛 +##帜 +##帝 +##帥 +##带 +##帧 +##師 +##席 +##帮 +##帯 +##帰 +##帳 +##帶 +##帷 +##常 +##帼 +##帽 +##幀 +##幂 +##幄 +##幅 +##幌 +##幔 +##幕 +##幟 +##幡 +##幢 +##幣 +##幫 +##干 +##平 +##年 +##并 +##幸 +##幹 +##幺 +##幻 +##幼 +##幽 +##幾 +##广 +##庁 +##広 +##庄 +##庆 +##庇 +##床 +##序 +##庐 +##库 +##应 +##底 +##庖 +##店 +##庙 +##庚 +##府 +##庞 +##废 +##庠 +##度 +##座 +##庫 +##庭 +##庵 +##庶 +##康 +##庸 +##庹 +##庾 +##廁 +##廂 +##廃 +##廈 +##廉 +##廊 +##廓 +##廖 +##廚 +##廝 +##廟 +##廠 +##廢 +##廣 +##廬 +##廳 +##延 +##廷 +##建 +##廿 +##开 +##弁 +##异 +##弃 +##弄 +##弈 +##弊 +##弋 +##式 +##弑 +##弒 +##弓 +##弔 +##引 +##弗 +##弘 +##弛 +##弟 +##张 +##弥 +##弦 +##弧 +##弩 +##弭 +##弯 +##弱 +##張 +##強 +##弹 +##强 +##弼 +##弾 +##彅 +##彆 +##彈 +##彌 +##彎 +##归 +##当 +##录 +##彗 +##彙 +##彝 +##形 +##彤 +##彥 +##彦 +##彧 +##彩 +##彪 +##彫 +##彬 +##彭 +##彰 +##影 +##彷 +##役 +##彻 +##彼 +##彿 +##往 +##征 +##径 +##待 +##徇 +##很 +##徉 +##徊 +##律 +##後 +##徐 +##徑 +##徒 +##従 +##徕 +##得 +##徘 +##徙 +##徜 +##從 +##徠 +##御 +##徨 +##復 +##循 +##徬 +##微 +##徳 +##徴 +##徵 +##德 +##徹 +##徼 +##徽 +##心 +##必 +##忆 +##忌 +##忍 +##忏 +##忐 +##忑 +##忒 +##忖 +##志 +##忘 +##忙 +##応 +##忠 +##忡 +##忤 +##忧 +##忪 +##快 +##忱 +##念 +##忻 +##忽 +##忿 +##怀 +##态 +##怂 +##怅 +##怆 +##怎 +##怏 +##怒 +##怔 +##怕 +##怖 +##怙 +##怜 +##思 +##怠 +##怡 +##急 +##怦 +##性 +##怨 +##怪 +##怯 +##怵 +##总 +##怼 +##恁 +##恃 +##恆 +##恋 +##恍 +##恐 +##恒 +##恕 +##恙 +##恚 +##恢 +##恣 +##恤 +##恥 +##恨 +##恩 +##恪 +##恫 +##恬 +##恭 +##息 +##恰 +##恳 +##恵 +##恶 +##恸 +##恺 +##恻 +##恼 +##恿 +##悄 +##悅 +##悉 +##悌 +##悍 +##悔 +##悖 +##悚 +##悟 +##悠 +##患 +##悦 +##您 +##悩 +##悪 +##悬 +##悯 +##悱 +##悲 +##悴 +##悵 +##悶 +##悸 +##悻 +##悼 +##悽 +##情 +##惆 +##惇 +##惊 +##惋 +##惑 +##惕 +##惘 +##惚 +##惜 +##惟 +##惠 +##惡 +##惦 +##惧 +##惨 +##惩 +##惫 +##惬 +##惭 +##惮 +##惯 +##惰 +##惱 +##想 +##惴 +##惶 +##惹 +##惺 +##愁 +##愆 +##愈 +##愉 +##愍 +##意 +##愕 +##愚 +##愛 +##愜 +##感 +##愣 +##愤 +##愧 +##愫 +##愷 +##愿 +##慄 +##慈 +##態 +##慌 +##慎 +##慑 +##慕 +##慘 +##慚 +##慟 +##慢 +##慣 +##慧 +##慨 +##慫 +##慮 +##慰 +##慳 +##慵 +##慶 +##慷 +##慾 +##憂 +##憊 +##憋 +##憎 +##憐 +##憑 +##憔 +##憚 +##憤 +##憧 +##憨 +##憩 +##憫 +##憬 +##憲 +##憶 +##憾 +##懂 +##懇 +##懈 +##應 +##懊 +##懋 +##懑 +##懒 +##懦 +##懲 +##懵 +##懶 +##懷 +##懸 +##懺 +##懼 +##懾 +##懿 +##戀 +##戈 +##戊 +##戌 +##戍 +##戎 +##戏 +##成 +##我 +##戒 +##戕 +##或 +##战 +##戚 +##戛 +##戟 +##戡 +##戦 +##截 +##戬 +##戮 +##戰 +##戲 +##戳 +##戴 +##戶 +##户 +##戸 +##戻 +##戾 +##房 +##所 +##扁 +##扇 +##扈 +##扉 +##手 +##才 +##扎 +##扑 +##扒 +##打 +##扔 +##払 +##托 +##扛 +##扣 +##扦 +##执 +##扩 +##扪 +##扫 +##扬 +##扭 +##扮 +##扯 +##扰 +##扱 +##扳 +##扶 +##批 +##扼 +##找 +##承 +##技 +##抄 +##抉 +##把 +##抑 +##抒 +##抓 +##投 +##抖 +##抗 +##折 +##抚 +##抛 +##抜 +##択 +##抟 +##抠 +##抡 +##抢 +##护 +##报 +##抨 +##披 +##抬 +##抱 +##抵 +##抹 +##押 +##抽 +##抿 +##拂 +##拄 +##担 +##拆 +##拇 +##拈 +##拉 +##拋 +##拌 +##拍 +##拎 +##拐 +##拒 +##拓 +##拔 +##拖 +##拗 +##拘 +##拙 +##拚 +##招 +##拜 +##拟 +##拡 +##拢 +##拣 +##拥 +##拦 +##拧 +##拨 +##择 +##括 +##拭 +##拮 +##拯 +##拱 +##拳 +##拴 +##拷 +##拼 +##拽 +##拾 +##拿 +##持 +##挂 +##指 +##挈 +##按 +##挎 +##挑 +##挖 +##挙 +##挚 +##挛 +##挝 +##挞 +##挟 +##挠 +##挡 +##挣 +##挤 +##挥 +##挨 +##挪 +##挫 +##振 +##挲 +##挹 +##挺 +##挽 +##挾 +##捂 +##捅 +##捆 +##捉 +##捋 +##捌 +##捍 +##捎 +##捏 +##捐 +##捕 +##捞 +##损 +##捡 +##换 +##捣 +##捧 +##捨 +##捩 +##据 +##捱 +##捲 +##捶 +##捷 +##捺 +##捻 +##掀 +##掂 +##掃 +##掇 +##授 +##掉 +##掌 +##掏 +##掐 +##排 +##掖 +##掘 +##掙 +##掛 +##掠 +##採 +##探 +##掣 +##接 +##控 +##推 +##掩 +##措 +##掬 +##掰 +##掲 +##掳 +##掴 +##掷 +##掸 +##掺 +##揀 +##揃 +##揄 +##揆 +##揉 +##揍 +##描 +##提 +##插 +##揖 +##揚 +##換 +##握 +##揣 +##揩 +##揪 +##揭 +##揮 +##援 +##揶 +##揸 +##揹 +##揽 +##搀 +##搁 +##搂 +##搅 +##損 +##搏 +##搐 +##搓 +##搔 +##搖 +##搗 +##搜 +##搞 +##搡 +##搪 +##搬 +##搭 +##搵 +##搶 +##携 +##搽 +##摀 +##摁 +##摄 +##摆 +##摇 +##摈 +##摊 +##摒 +##摔 +##摘 +##摞 +##摟 +##摧 +##摩 +##摯 +##摳 +##摸 +##摹 +##摺 +##摻 +##撂 +##撃 +##撅 +##撇 +##撈 +##撐 +##撑 +##撒 +##撓 +##撕 +##撚 +##撞 +##撤 +##撥 +##撩 +##撫 +##撬 +##播 +##撮 +##撰 +##撲 +##撵 +##撷 +##撸 +##撻 +##撼 +##撿 +##擀 +##擁 +##擂 +##擄 +##擅 +##擇 +##擊 +##擋 +##操 +##擎 +##擒 +##擔 +##擘 +##據 +##擞 +##擠 +##擡 +##擢 +##擦 +##擬 +##擰 +##擱 +##擲 +##擴 +##擷 +##擺 +##擼 +##擾 +##攀 +##攏 +##攒 +##攔 +##攘 +##攙 +##攜 +##攝 +##攞 +##攢 +##攣 +##攤 +##攥 +##攪 +##攫 +##攬 +##支 +##收 +##攸 +##改 +##攻 +##放 +##政 +##故 +##效 +##敌 +##敍 +##敎 +##敏 +##救 +##敕 +##敖 +##敗 +##敘 +##教 +##敛 +##敝 +##敞 +##敢 +##散 +##敦 +##敬 +##数 +##敲 +##整 +##敵 +##敷 +##數 +##斂 +##斃 +##文 +##斋 +##斌 +##斎 +##斐 +##斑 +##斓 +##斗 +##料 +##斛 +##斜 +##斟 +##斡 +##斤 +##斥 +##斧 +##斩 +##斫 +##斬 +##断 +##斯 +##新 +##斷 +##方 +##於 +##施 +##旁 +##旃 +##旅 +##旋 +##旌 +##旎 +##族 +##旖 +##旗 +##无 +##既 +##日 +##旦 +##旧 +##旨 +##早 +##旬 +##旭 +##旮 +##旱 +##时 +##旷 +##旺 +##旻 +##昀 +##昂 +##昆 +##昇 +##昉 +##昊 +##昌 +##明 +##昏 +##易 +##昔 +##昕 +##昙 +##星 +##映 +##春 +##昧 +##昨 +##昭 +##是 +##昱 +##昴 +##昵 +##昶 +##昼 +##显 +##晁 +##時 +##晃 +##晉 +##晋 +##晌 +##晏 +##晒 +##晓 +##晔 +##晕 +##晖 +##晗 +##晚 +##晝 +##晞 +##晟 +##晤 +##晦 +##晨 +##晩 +##普 +##景 +##晰 +##晴 +##晶 +##晷 +##智 +##晾 +##暂 +##暄 +##暇 +##暈 +##暉 +##暌 +##暐 +##暑 +##暖 +##暗 +##暝 +##暢 +##暧 +##暨 +##暫 +##暮 +##暱 +##暴 +##暸 +##暹 +##曄 +##曆 +##曇 +##曉 +##曖 +##曙 +##曜 +##曝 +##曠 +##曦 +##曬 +##曰 +##曲 +##曳 +##更 +##書 +##曹 +##曼 +##曾 +##替 +##最 +##會 +##月 +##有 +##朋 +##服 +##朐 +##朔 +##朕 +##朗 +##望 +##朝 +##期 +##朦 +##朧 +##木 +##未 +##末 +##本 +##札 +##朮 +##术 +##朱 +##朴 +##朵 +##机 +##朽 +##杀 +##杂 +##权 +##杆 +##杈 +##杉 +##李 +##杏 +##材 +##村 +##杓 +##杖 +##杜 +##杞 +##束 +##杠 +##条 +##来 +##杨 +##杭 +##杯 +##杰 +##東 +##杳 +##杵 +##杷 +##杼 +##松 +##板 +##极 +##构 +##枇 +##枉 +##枋 +##析 +##枕 +##林 +##枚 +##果 +##枝 +##枢 +##枣 +##枪 +##枫 +##枭 +##枯 +##枰 +##枱 +##枳 +##架 +##枷 +##枸 +##柄 +##柏 +##某 +##柑 +##柒 +##染 +##柔 +##柘 +##柚 +##柜 +##柞 +##柠 +##柢 +##查 +##柩 +##柬 +##柯 +##柱 +##柳 +##柴 +##柵 +##査 +##柿 +##栀 +##栃 +##栄 +##栅 +##标 +##栈 +##栉 +##栋 +##栎 +##栏 +##树 +##栓 +##栖 +##栗 +##校 +##栩 +##株 +##样 +##核 +##根 +##格 +##栽 +##栾 +##桀 +##桁 +##桂 +##桃 +##桅 +##框 +##案 +##桉 +##桌 +##桎 +##桐 +##桑 +##桓 +##桔 +##桜 +##桠 +##桡 +##桢 +##档 +##桥 +##桦 +##桧 +##桨 +##桩 +##桶 +##桿 +##梁 +##梅 +##梆 +##梏 +##梓 +##梗 +##條 +##梟 +##梢 +##梦 +##梧 +##梨 +##梭 +##梯 +##械 +##梳 +##梵 +##梶 +##检 +##棂 +##棄 +##棉 +##棋 +##棍 +##棒 +##棕 +##棗 +##棘 +##棚 +##棟 +##棠 +##棣 +##棧 +##森 +##棱 +##棲 +##棵 +##棹 +##棺 +##椁 +##椅 +##椋 +##植 +##椎 +##椒 +##検 +##椪 +##椭 +##椰 +##椹 +##椽 +##椿 +##楂 +##楊 +##楓 +##楔 +##楚 +##楝 +##楞 +##楠 +##楣 +##楨 +##楫 +##業 +##楮 +##極 +##楷 +##楸 +##楹 +##楼 +##楽 +##概 +##榄 +##榆 +##榈 +##榉 +##榔 +##榕 +##榖 +##榛 +##榜 +##榨 +##榫 +##榭 +##榮 +##榱 +##榴 +##榷 +##榻 +##槁 +##槃 +##構 +##槌 +##槍 +##槎 +##槐 +##槓 +##様 +##槛 +##槟 +##槤 +##槭 +##槲 +##槳 +##槻 +##槽 +##槿 +##樁 +##樂 +##樊 +##樑 +##樓 +##標 +##樞 +##樟 +##模 +##樣 +##権 +##横 +##樫 +##樯 +##樱 +##樵 +##樸 +##樹 +##樺 +##樽 +##樾 +##橄 +##橇 +##橋 +##橐 +##橘 +##橙 +##機 +##橡 +##橢 +##橫 +##橱 +##橹 +##橼 +##檀 +##檄 +##檎 +##檐 +##檔 +##檗 +##檜 +##檢 +##檬 +##檯 +##檳 +##檸 +##檻 +##櫃 +##櫚 +##櫛 +##櫥 +##櫸 +##櫻 +##欄 +##權 +##欒 +##欖 +##欠 +##次 +##欢 +##欣 +##欧 +##欲 +##欸 +##欺 +##欽 +##款 +##歆 +##歇 +##歉 +##歌 +##歎 +##歐 +##歓 +##歙 +##歛 +##歡 +##止 +##正 +##此 +##步 +##武 +##歧 +##歩 +##歪 +##歯 +##歲 +##歳 +##歴 +##歷 +##歸 +##歹 +##死 +##歼 +##殁 +##殃 +##殆 +##殇 +##殉 +##殊 +##残 +##殒 +##殓 +##殖 +##殘 +##殞 +##殡 +##殤 +##殭 +##殯 +##殲 +##殴 +##段 +##殷 +##殺 +##殼 +##殿 +##毀 +##毁 +##毂 +##毅 +##毆 +##毋 +##母 +##毎 +##每 +##毒 +##毓 +##比 +##毕 +##毗 +##毘 +##毙 +##毛 +##毡 +##毫 +##毯 +##毽 +##氈 +##氏 +##氐 +##民 +##氓 +##气 +##氖 +##気 +##氙 +##氛 +##氟 +##氡 +##氢 +##氣 +##氤 +##氦 +##氧 +##氨 +##氪 +##氫 +##氮 +##氯 +##氰 +##氲 +##水 +##氷 +##永 +##氹 +##氾 +##汀 +##汁 +##求 +##汆 +##汇 +##汉 +##汎 +##汐 +##汕 +##汗 +##汙 +##汛 +##汝 +##汞 +##江 +##池 +##污 +##汤 +##汨 +##汩 +##汪 +##汰 +##汲 +##汴 +##汶 +##汹 +##決 +##汽 +##汾 +##沁 +##沂 +##沃 +##沅 +##沈 +##沉 +##沌 +##沏 +##沐 +##沒 +##沓 +##沖 +##沙 +##沛 +##沟 +##没 +##沢 +##沣 +##沥 +##沦 +##沧 +##沪 +##沫 +##沭 +##沮 +##沱 +##河 +##沸 +##油 +##治 +##沼 +##沽 +##沾 +##沿 +##況 +##泄 +##泉 +##泊 +##泌 +##泓 +##法 +##泗 +##泛 +##泞 +##泠 +##泡 +##波 +##泣 +##泥 +##注 +##泪 +##泫 +##泮 +##泯 +##泰 +##泱 +##泳 +##泵 +##泷 +##泸 +##泻 +##泼 +##泽 +##泾 +##洁 +##洄 +##洋 +##洒 +##洗 +##洙 +##洛 +##洞 +##津 +##洩 +##洪 +##洮 +##洱 +##洲 +##洵 +##洶 +##洸 +##洹 +##活 +##洼 +##洽 +##派 +##流 +##浃 +##浄 +##浅 +##浆 +##浇 +##浊 +##测 +##济 +##浏 +##浑 +##浒 +##浓 +##浔 +##浙 +##浚 +##浜 +##浣 +##浦 +##浩 +##浪 +##浬 +##浮 +##浯 +##浴 +##海 +##浸 +##涂 +##涅 +##涇 +##消 +##涉 +##涌 +##涎 +##涓 +##涔 +##涕 +##涙 +##涛 +##涝 +##涞 +##涟 +##涠 +##涡 +##涣 +##涤 +##润 +##涧 +##涨 +##涩 +##涪 +##涮 +##涯 +##液 +##涵 +##涸 +##涼 +##涿 +##淀 +##淄 +##淅 +##淆 +##淇 +##淋 +##淌 +##淑 +##淒 +##淖 +##淘 +##淙 +##淚 +##淞 +##淡 +##淤 +##淦 +##淨 +##淩 +##淪 +##淫 +##淬 +##淮 +##深 +##淳 +##淵 +##混 +##淹 +##淺 +##添 +##淼 +##清 +##済 +##渉 +##渊 +##渋 +##渍 +##渎 +##渐 +##渔 +##渗 +##渙 +##渚 +##減 +##渝 +##渠 +##渡 +##渣 +##渤 +##渥 +##渦 +##温 +##測 +##渭 +##港 +##渲 +##渴 +##游 +##渺 +##渾 +##湃 +##湄 +##湊 +##湍 +##湖 +##湘 +##湛 +##湟 +##湧 +##湫 +##湮 +##湯 +##湳 +##湾 +##湿 +##満 +##溃 +##溅 +##溉 +##溏 +##源 +##準 +##溜 +##溝 +##溟 +##溢 +##溥 +##溧 +##溪 +##溫 +##溯 +##溱 +##溴 +##溶 +##溺 +##溼 +##滁 +##滂 +##滄 +##滅 +##滇 +##滋 +##滌 +##滑 +##滓 +##滔 +##滕 +##滙 +##滚 +##滝 +##滞 +##滟 +##满 +##滢 +##滤 +##滥 +##滦 +##滨 +##滩 +##滬 +##滯 +##滲 +##滴 +##滷 +##滸 +##滾 +##滿 +##漁 +##漂 +##漆 +##漉 +##漏 +##漓 +##演 +##漕 +##漠 +##漢 +##漣 +##漩 +##漪 +##漫 +##漬 +##漯 +##漱 +##漲 +##漳 +##漸 +##漾 +##漿 +##潆 +##潇 +##潋 +##潍 +##潑 +##潔 +##潘 +##潛 +##潜 +##潞 +##潟 +##潢 +##潤 +##潦 +##潧 +##潭 +##潮 +##潰 +##潴 +##潸 +##潺 +##潼 +##澀 +##澄 +##澆 +##澈 +##澍 +##澎 +##澗 +##澜 +##澡 +##澤 +##澧 +##澱 +##澳 +##澹 +##激 +##濁 +##濂 +##濃 +##濑 +##濒 +##濕 +##濘 +##濛 +##濟 +##濠 +##濡 +##濤 +##濫 +##濬 +##濮 +##濯 +##濱 +##濺 +##濾 +##瀅 +##瀆 +##瀉 +##瀋 +##瀏 +##瀑 +##瀕 +##瀘 +##瀚 +##瀛 +##瀝 +##瀞 +##瀟 +##瀧 +##瀨 +##瀬 +##瀰 +##瀾 +##灌 +##灏 +##灑 +##灘 +##灝 +##灞 +##灣 +##火 +##灬 +##灭 +##灯 +##灰 +##灵 +##灶 +##灸 +##灼 +##災 +##灾 +##灿 +##炀 +##炁 +##炅 +##炉 +##炊 +##炎 +##炒 +##炔 +##炕 +##炖 +##炙 +##炜 +##炫 +##炬 +##炭 +##炮 +##炯 +##炳 +##炷 +##炸 +##点 +##為 +##炼 +##炽 +##烁 +##烂 +##烃 +##烈 +##烊 +##烏 +##烘 +##烙 +##烛 +##烟 +##烤 +##烦 +##烧 +##烨 +##烩 +##烫 +##烬 +##热 +##烯 +##烷 +##烹 +##烽 +##焉 +##焊 +##焕 +##焖 +##焗 +##焘 +##焙 +##焚 +##焜 +##無 +##焦 +##焯 +##焰 +##焱 +##然 +##焼 +##煅 +##煉 +##煊 +##煌 +##煎 +##煒 +##煖 +##煙 +##煜 +##煞 +##煤 +##煥 +##煦 +##照 +##煨 +##煩 +##煮 +##煲 +##煸 +##煽 +##熄 +##熊 +##熏 +##熒 +##熔 +##熙 +##熟 +##熠 +##熨 +##熬 +##熱 +##熵 +##熹 +##熾 +##燁 +##燃 +##燄 +##燈 +##燉 +##燊 +##燎 +##燒 +##燔 +##燕 +##燙 +##燜 +##營 +##燥 +##燦 +##燧 +##燭 +##燮 +##燴 +##燻 +##燼 +##燿 +##爆 +##爍 +##爐 +##爛 +##爪 +##爬 +##爭 +##爰 +##爱 +##爲 +##爵 +##父 +##爷 +##爸 +##爹 +##爺 +##爻 +##爽 +##爾 +##牆 +##片 +##版 +##牌 +##牍 +##牒 +##牙 +##牛 +##牝 +##牟 +##牠 +##牡 +##牢 +##牦 +##牧 +##物 +##牯 +##牲 +##牴 +##牵 +##特 +##牺 +##牽 +##犀 +##犁 +##犄 +##犊 +##犍 +##犒 +##犢 +##犧 +##犬 +##犯 +##状 +##犷 +##犸 +##犹 +##狀 +##狂 +##狄 +##狈 +##狎 +##狐 +##狒 +##狗 +##狙 +##狞 +##狠 +##狡 +##狩 +##独 +##狭 +##狮 +##狰 +##狱 +##狸 +##狹 +##狼 +##狽 +##猎 +##猕 +##猖 +##猗 +##猙 +##猛 +##猜 +##猝 +##猥 +##猩 +##猪 +##猫 +##猬 +##献 +##猴 +##猶 +##猷 +##猾 +##猿 +##獄 +##獅 +##獎 +##獐 +##獒 +##獗 +##獠 +##獣 +##獨 +##獭 +##獰 +##獲 +##獵 +##獷 +##獸 +##獺 +##獻 +##獼 +##獾 +##玄 +##率 +##玉 +##王 +##玑 +##玖 +##玛 +##玟 +##玠 +##玥 +##玩 +##玫 +##玮 +##环 +##现 +##玲 +##玳 +##玷 +##玺 +##玻 +##珀 +##珂 +##珅 +##珈 +##珉 +##珊 +##珍 +##珏 +##珐 +##珑 +##珙 +##珞 +##珠 +##珣 +##珥 +##珩 +##珪 +##班 +##珮 +##珲 +##珺 +##現 +##球 +##琅 +##理 +##琇 +##琉 +##琊 +##琍 +##琏 +##琐 +##琛 +##琢 +##琥 +##琦 +##琨 +##琪 +##琬 +##琮 +##琰 +##琲 +##琳 +##琴 +##琵 +##琶 +##琺 +##琼 +##瑀 +##瑁 +##瑄 +##瑋 +##瑕 +##瑗 +##瑙 +##瑚 +##瑛 +##瑜 +##瑞 +##瑟 +##瑠 +##瑣 +##瑤 +##瑩 +##瑪 +##瑯 +##瑰 +##瑶 +##瑾 +##璀 +##璁 +##璃 +##璇 +##璉 +##璋 +##璎 +##璐 +##璜 +##璞 +##璟 +##璧 +##璨 +##環 +##璽 +##璿 +##瓊 +##瓏 +##瓒 +##瓜 +##瓢 +##瓣 +##瓤 +##瓦 +##瓮 +##瓯 +##瓴 +##瓶 +##瓷 +##甄 +##甌 +##甕 +##甘 +##甙 +##甚 +##甜 +##生 +##產 +##産 +##甥 +##甦 +##用 +##甩 +##甫 +##甬 +##甭 +##甯 +##田 +##由 +##甲 +##申 +##电 +##男 +##甸 +##町 +##画 +##甾 +##畀 +##畅 +##界 +##畏 +##畑 +##畔 +##留 +##畜 +##畝 +##畢 +##略 +##畦 +##番 +##畫 +##異 +##畲 +##畳 +##畴 +##當 +##畸 +##畹 +##畿 +##疆 +##疇 +##疊 +##疏 +##疑 +##疔 +##疖 +##疗 +##疙 +##疚 +##疝 +##疟 +##疡 +##疣 +##疤 +##疥 +##疫 +##疮 +##疯 +##疱 +##疲 +##疳 +##疵 +##疸 +##疹 +##疼 +##疽 +##疾 +##痂 +##病 +##症 +##痈 +##痉 +##痊 +##痍 +##痒 +##痔 +##痕 +##痘 +##痙 +##痛 +##痞 +##痠 +##痢 +##痣 +##痤 +##痧 +##痨 +##痪 +##痫 +##痰 +##痱 +##痴 +##痹 +##痺 +##痼 +##痿 +##瘀 +##瘁 +##瘋 +##瘍 +##瘓 +##瘘 +##瘙 +##瘟 +##瘠 +##瘡 +##瘢 +##瘤 +##瘦 +##瘧 +##瘩 +##瘪 +##瘫 +##瘴 +##瘸 +##瘾 +##療 +##癇 +##癌 +##癒 +##癖 +##癜 +##癞 +##癡 +##癢 +##癣 +##癥 +##癫 +##癬 +##癮 +##癱 +##癲 +##癸 +##発 +##登 +##發 +##白 +##百 +##皂 +##的 +##皆 +##皇 +##皈 +##皋 +##皎 +##皑 +##皓 +##皖 +##皙 +##皚 +##皮 +##皰 +##皱 +##皴 +##皺 +##皿 +##盂 +##盃 +##盅 +##盆 +##盈 +##益 +##盎 +##盏 +##盐 +##监 +##盒 +##盔 +##盖 +##盗 +##盘 +##盛 +##盜 +##盞 +##盟 +##盡 +##監 +##盤 +##盥 +##盧 +##盪 +##目 +##盯 +##盱 +##盲 +##直 +##相 +##盹 +##盼 +##盾 +##省 +##眈 +##眉 +##看 +##県 +##眙 +##眞 +##真 +##眠 +##眦 +##眨 +##眩 +##眯 +##眶 +##眷 +##眸 +##眺 +##眼 +##眾 +##着 +##睁 +##睇 +##睏 +##睐 +##睑 +##睛 +##睜 +##睞 +##睡 +##睢 +##督 +##睥 +##睦 +##睨 +##睪 +##睫 +##睬 +##睹 +##睽 +##睾 +##睿 +##瞄 +##瞅 +##瞇 +##瞋 +##瞌 +##瞎 +##瞑 +##瞒 +##瞓 +##瞞 +##瞟 +##瞠 +##瞥 +##瞧 +##瞩 +##瞪 +##瞬 +##瞭 +##瞰 +##瞳 +##瞻 +##瞼 +##瞿 +##矇 +##矍 +##矗 +##矚 +##矛 +##矜 +##矢 +##矣 +##知 +##矩 +##矫 +##短 +##矮 +##矯 +##石 +##矶 +##矽 +##矾 +##矿 +##码 +##砂 +##砌 +##砍 +##砒 +##研 +##砖 +##砗 +##砚 +##砝 +##砣 +##砥 +##砧 +##砭 +##砰 +##砲 +##破 +##砷 +##砸 +##砺 +##砼 +##砾 +##础 +##硅 +##硐 +##硒 +##硕 +##硝 +##硫 +##硬 +##确 +##硯 +##硼 +##碁 +##碇 +##碉 +##碌 +##碍 +##碎 +##碑 +##碓 +##碗 +##碘 +##碚 +##碛 +##碟 +##碣 +##碧 +##碩 +##碰 +##碱 +##碳 +##碴 +##確 +##碼 +##碾 +##磁 +##磅 +##磊 +##磋 +##磐 +##磕 +##磚 +##磡 +##磨 +##磬 +##磯 +##磲 +##磷 +##磺 +##礁 +##礎 +##礙 +##礡 +##礦 +##礪 +##礫 +##礴 +##示 +##礼 +##社 +##祀 +##祁 +##祂 +##祇 +##祈 +##祉 +##祎 +##祐 +##祕 +##祖 +##祗 +##祚 +##祛 +##祜 +##祝 +##神 +##祟 +##祠 +##祢 +##祥 +##票 +##祭 +##祯 +##祷 +##祸 +##祺 +##祿 +##禀 +##禁 +##禄 +##禅 +##禍 +##禎 +##福 +##禛 +##禦 +##禧 +##禪 +##禮 +##禱 +##禹 +##禺 +##离 +##禽 +##禾 +##禿 +##秀 +##私 +##秃 +##秆 +##秉 +##秋 +##种 +##科 +##秒 +##秘 +##租 +##秣 +##秤 +##秦 +##秧 +##秩 +##秭 +##积 +##称 +##秸 +##移 +##秽 +##稀 +##稅 +##程 +##稍 +##税 +##稔 +##稗 +##稚 +##稜 +##稞 +##稟 +##稠 +##稣 +##種 +##稱 +##稲 +##稳 +##稷 +##稹 +##稻 +##稼 +##稽 +##稿 +##穀 +##穂 +##穆 +##穌 +##積 +##穎 +##穗 +##穢 +##穩 +##穫 +##穴 +##究 +##穷 +##穹 +##空 +##穿 +##突 +##窃 +##窄 +##窈 +##窍 +##窑 +##窒 +##窓 +##窕 +##窖 +##窗 +##窘 +##窜 +##窝 +##窟 +##窠 +##窥 +##窦 +##窨 +##窩 +##窪 +##窮 +##窯 +##窺 +##窿 +##竄 +##竅 +##竇 +##竊 +##立 +##竖 +##站 +##竜 +##竞 +##竟 +##章 +##竣 +##童 +##竭 +##端 +##競 +##竹 +##竺 +##竽 +##竿 +##笃 +##笆 +##笈 +##笋 +##笏 +##笑 +##笔 +##笙 +##笛 +##笞 +##笠 +##符 +##笨 +##第 +##笹 +##笺 +##笼 +##筆 +##等 +##筊 +##筋 +##筍 +##筏 +##筐 +##筑 +##筒 +##答 +##策 +##筛 +##筝 +##筠 +##筱 +##筲 +##筵 +##筷 +##筹 +##签 +##简 +##箇 +##箋 +##箍 +##箏 +##箐 +##箔 +##箕 +##算 +##箝 +##管 +##箩 +##箫 +##箭 +##箱 +##箴 +##箸 +##節 +##篁 +##範 +##篆 +##篇 +##築 +##篑 +##篓 +##篙 +##篝 +##篠 +##篡 +##篤 +##篩 +##篪 +##篮 +##篱 +##篷 +##簇 +##簌 +##簍 +##簡 +##簦 +##簧 +##簪 +##簫 +##簷 +##簸 +##簽 +##簾 +##簿 +##籁 +##籃 +##籌 +##籍 +##籐 +##籟 +##籠 +##籤 +##籬 +##籮 +##籲 +##米 +##类 +##籼 +##籽 +##粄 +##粉 +##粑 +##粒 +##粕 +##粗 +##粘 +##粟 +##粤 +##粥 +##粧 +##粪 +##粮 +##粱 +##粲 +##粳 +##粵 +##粹 +##粼 +##粽 +##精 +##粿 +##糅 +##糊 +##糍 +##糕 +##糖 +##糗 +##糙 +##糜 +##糞 +##糟 +##糠 +##糧 +##糬 +##糯 +##糰 +##糸 +##系 +##糾 +##紀 +##紂 +##約 +##紅 +##紉 +##紊 +##紋 +##納 +##紐 +##紓 +##純 +##紗 +##紘 +##紙 +##級 +##紛 +##紜 +##素 +##紡 +##索 +##紧 +##紫 +##紮 +##累 +##細 +##紳 +##紹 +##紺 +##終 +##絃 +##組 +##絆 +##経 +##結 +##絕 +##絞 +##絡 +##絢 +##給 +##絨 +##絮 +##統 +##絲 +##絳 +##絵 +##絶 +##絹 +##綁 +##綏 +##綑 +##經 +##継 +##続 +##綜 +##綠 +##綢 +##綦 +##綫 +##綬 +##維 +##綱 +##網 +##綴 +##綵 +##綸 +##綺 +##綻 +##綽 +##綾 +##綿 +##緊 +##緋 +##総 +##緑 +##緒 +##緘 +##線 +##緝 +##緞 +##締 +##緣 +##編 +##緩 +##緬 +##緯 +##練 +##緹 +##緻 +##縁 +##縄 +##縈 +##縛 +##縝 +##縣 +##縫 +##縮 +##縱 +##縴 +##縷 +##總 +##績 +##繁 +##繃 +##繆 +##繇 +##繋 +##織 +##繕 +##繚 +##繞 +##繡 +##繩 +##繪 +##繫 +##繭 +##繳 +##繹 +##繼 +##繽 +##纂 +##續 +##纍 +##纏 +##纓 +##纔 +##纖 +##纜 +##纠 +##红 +##纣 +##纤 +##约 +##级 +##纨 +##纪 +##纫 +##纬 +##纭 +##纯 +##纰 +##纱 +##纲 +##纳 +##纵 +##纶 +##纷 +##纸 +##纹 +##纺 +##纽 +##纾 +##线 +##绀 +##练 +##组 +##绅 +##细 +##织 +##终 +##绊 +##绍 +##绎 +##经 +##绑 +##绒 +##结 +##绔 +##绕 +##绘 +##给 +##绚 +##绛 +##络 +##绝 +##绞 +##统 +##绡 +##绢 +##绣 +##绥 +##绦 +##继 +##绩 +##绪 +##绫 +##续 +##绮 +##绯 +##绰 +##绳 +##维 +##绵 +##绶 +##绷 +##绸 +##绻 +##综 +##绽 +##绾 +##绿 +##缀 +##缄 +##缅 +##缆 +##缇 +##缈 +##缉 +##缎 +##缓 +##缔 +##缕 +##编 +##缘 +##缙 +##缚 +##缜 +##缝 +##缠 +##缢 +##缤 +##缥 +##缨 +##缩 +##缪 +##缭 +##缮 +##缰 +##缱 +##缴 +##缸 +##缺 +##缽 +##罂 +##罄 +##罌 +##罐 +##网 +##罔 +##罕 +##罗 +##罚 +##罡 +##罢 +##罩 +##罪 +##置 +##罰 +##署 +##罵 +##罷 +##罹 +##羁 +##羅 +##羈 +##羊 +##羌 +##美 +##羔 +##羚 +##羞 +##羟 +##羡 +##羣 +##群 +##羥 +##羧 +##羨 +##義 +##羯 +##羲 +##羸 +##羹 +##羽 +##羿 +##翁 +##翅 +##翊 +##翌 +##翎 +##習 +##翔 +##翘 +##翟 +##翠 +##翡 +##翦 +##翩 +##翰 +##翱 +##翳 +##翹 +##翻 +##翼 +##耀 +##老 +##考 +##耄 +##者 +##耆 +##耋 +##而 +##耍 +##耐 +##耒 +##耕 +##耗 +##耘 +##耙 +##耦 +##耨 +##耳 +##耶 +##耷 +##耸 +##耻 +##耽 +##耿 +##聂 +##聆 +##聊 +##聋 +##职 +##聒 +##联 +##聖 +##聘 +##聚 +##聞 +##聪 +##聯 +##聰 +##聲 +##聳 +##聴 +##聶 +##職 +##聽 +##聾 +##聿 +##肃 +##肄 +##肅 +##肆 +##肇 +##肉 +##肋 +##肌 +##肏 +##肓 +##肖 +##肘 +##肚 +##肛 +##肝 +##肠 +##股 +##肢 +##肤 +##肥 +##肩 +##肪 +##肮 +##肯 +##肱 +##育 +##肴 +##肺 +##肽 +##肾 +##肿 +##胀 +##胁 +##胃 +##胄 +##胆 +##背 +##胍 +##胎 +##胖 +##胚 +##胛 +##胜 +##胝 +##胞 +##胡 +##胤 +##胥 +##胧 +##胫 +##胭 +##胯 +##胰 +##胱 +##胳 +##胴 +##胶 +##胸 +##胺 +##能 +##脂 +##脅 +##脆 +##脇 +##脈 +##脉 +##脊 +##脍 +##脏 +##脐 +##脑 +##脓 +##脖 +##脘 +##脚 +##脛 +##脣 +##脩 +##脫 +##脯 +##脱 +##脲 +##脳 +##脸 +##脹 +##脾 +##腆 +##腈 +##腊 +##腋 +##腌 +##腎 +##腐 +##腑 +##腓 +##腔 +##腕 +##腥 +##腦 +##腩 +##腫 +##腭 +##腮 +##腰 +##腱 +##腳 +##腴 +##腸 +##腹 +##腺 +##腻 +##腼 +##腾 +##腿 +##膀 +##膈 +##膊 +##膏 +##膑 +##膘 +##膚 +##膛 +##膜 +##膝 +##膠 +##膦 +##膨 +##膩 +##膳 +##膺 +##膻 +##膽 +##膾 +##膿 +##臀 +##臂 +##臃 +##臆 +##臉 +##臊 +##臍 +##臓 +##臘 +##臟 +##臣 +##臥 +##臧 +##臨 +##自 +##臬 +##臭 +##至 +##致 +##臺 +##臻 +##臼 +##臾 +##舀 +##舂 +##舅 +##舆 +##與 +##興 +##舉 +##舊 +##舌 +##舍 +##舎 +##舐 +##舒 +##舔 +##舖 +##舗 +##舛 +##舜 +##舞 +##舟 +##航 +##舫 +##般 +##舰 +##舱 +##舵 +##舶 +##舷 +##舸 +##船 +##舺 +##舾 +##艇 +##艋 +##艘 +##艙 +##艦 +##艮 +##良 +##艰 +##艱 +##色 +##艳 +##艷 +##艹 +##艺 +##艾 +##节 +##芃 +##芈 +##芊 +##芋 +##芍 +##芎 +##芒 +##芙 +##芜 +##芝 +##芡 +##芥 +##芦 +##芩 +##芪 +##芫 +##芬 +##芭 +##芮 +##芯 +##花 +##芳 +##芷 +##芸 +##芹 +##芻 +##芽 +##芾 +##苁 +##苄 +##苇 +##苋 +##苍 +##苏 +##苑 +##苒 +##苓 +##苔 +##苕 +##苗 +##苛 +##苜 +##苞 +##苟 +##苡 +##苣 +##若 +##苦 +##苫 +##苯 +##英 +##苷 +##苹 +##苻 +##茁 +##茂 +##范 +##茄 +##茅 +##茉 +##茎 +##茏 +##茗 +##茜 +##茧 +##茨 +##茫 +##茬 +##茭 +##茯 +##茱 +##茲 +##茴 +##茵 +##茶 +##茸 +##茹 +##茼 +##荀 +##荃 +##荆 +##草 +##荊 +##荏 +##荐 +##荒 +##荔 +##荖 +##荘 +##荚 +##荞 +##荟 +##荠 +##荡 +##荣 +##荤 +##荥 +##荧 +##荨 +##荪 +##荫 +##药 +##荳 +##荷 +##荸 +##荻 +##荼 +##荽 +##莅 +##莆 +##莉 +##莊 +##莎 +##莒 +##莓 +##莖 +##莘 +##莞 +##莠 +##莢 +##莧 +##莪 +##莫 +##莱 +##莲 +##莴 +##获 +##莹 +##莺 +##莽 +##莿 +##菀 +##菁 +##菅 +##菇 +##菈 +##菊 +##菌 +##菏 +##菓 +##菖 +##菘 +##菜 +##菟 +##菠 +##菡 +##菩 +##華 +##菱 +##菲 +##菸 +##菽 +##萁 +##萃 +##萄 +##萊 +##萋 +##萌 +##萍 +##萎 +##萘 +##萝 +##萤 +##营 +##萦 +##萧 +##萨 +##萩 +##萬 +##萱 +##萵 +##萸 +##萼 +##落 +##葆 +##葉 +##著 +##葚 +##葛 +##葡 +##董 +##葦 +##葩 +##葫 +##葬 +##葭 +##葯 +##葱 +##葳 +##葵 +##葷 +##葺 +##蒂 +##蒋 +##蒐 +##蒔 +##蒙 +##蒜 +##蒞 +##蒟 +##蒡 +##蒨 +##蒲 +##蒸 +##蒹 +##蒻 +##蒼 +##蒿 +##蓁 +##蓄 +##蓆 +##蓉 +##蓋 +##蓑 +##蓓 +##蓖 +##蓝 +##蓟 +##蓦 +##蓬 +##蓮 +##蓼 +##蓿 +##蔑 +##蔓 +##蔔 +##蔗 +##蔘 +##蔚 +##蔡 +##蔣 +##蔥 +##蔫 +##蔬 +##蔭 +##蔵 +##蔷 +##蔺 +##蔻 +##蔼 +##蔽 +##蕁 +##蕃 +##蕈 +##蕉 +##蕊 +##蕎 +##蕙 +##蕤 +##蕨 +##蕩 +##蕪 +##蕭 +##蕲 +##蕴 +##蕻 +##蕾 +##薄 +##薅 +##薇 +##薈 +##薊 +##薏 +##薑 +##薔 +##薙 +##薛 +##薦 +##薨 +##薩 +##薪 +##薬 +##薯 +##薰 +##薹 +##藉 +##藍 +##藏 +##藐 +##藓 +##藕 +##藜 +##藝 +##藤 +##藥 +##藩 +##藹 +##藻 +##藿 +##蘆 +##蘇 +##蘊 +##蘋 +##蘑 +##蘚 +##蘭 +##蘸 +##蘼 +##蘿 +##虎 +##虏 +##虐 +##虑 +##虔 +##處 +##虚 +##虛 +##虜 +##虞 +##號 +##虢 +##虧 +##虫 +##虬 +##虱 +##虹 +##虻 +##虽 +##虾 +##蚀 +##蚁 +##蚂 +##蚊 +##蚌 +##蚓 +##蚕 +##蚜 +##蚝 +##蚣 +##蚤 +##蚩 +##蚪 +##蚯 +##蚱 +##蚵 +##蛀 +##蛆 +##蛇 +##蛊 +##蛋 +##蛎 +##蛐 +##蛔 +##蛙 +##蛛 +##蛟 +##蛤 +##蛭 +##蛮 +##蛰 +##蛳 +##蛹 +##蛻 +##蛾 +##蜀 +##蜂 +##蜃 +##蜆 +##蜇 +##蜈 +##蜊 +##蜍 +##蜒 +##蜓 +##蜕 +##蜗 +##蜘 +##蜚 +##蜜 +##蜡 +##蜢 +##蜥 +##蜱 +##蜴 +##蜷 +##蜻 +##蜿 +##蝇 +##蝈 +##蝉 +##蝌 +##蝎 +##蝕 +##蝗 +##蝙 +##蝟 +##蝠 +##蝦 +##蝨 +##蝴 +##蝶 +##蝸 +##蝼 +##螂 +##螃 +##融 +##螞 +##螢 +##螨 +##螯 +##螳 +##螺 +##蟀 +##蟄 +##蟆 +##蟋 +##蟎 +##蟑 +##蟒 +##蟠 +##蟬 +##蟲 +##蟹 +##蟻 +##蟾 +##蠅 +##蠍 +##蠔 +##蠕 +##蠛 +##蠟 +##蠡 +##蠢 +##蠣 +##蠱 +##蠶 +##蠹 +##蠻 +##血 +##衄 +##衅 +##衆 +##行 +##衍 +##術 +##衔 +##街 +##衙 +##衛 +##衝 +##衞 +##衡 +##衢 +##衣 +##补 +##表 +##衩 +##衫 +##衬 +##衮 +##衰 +##衲 +##衷 +##衹 +##衾 +##衿 +##袁 +##袂 +##袄 +##袅 +##袈 +##袋 +##袍 +##袒 +##袖 +##袜 +##袞 +##袤 +##袪 +##被 +##袭 +##袱 +##裁 +##裂 +##装 +##裆 +##裊 +##裏 +##裔 +##裕 +##裘 +##裙 +##補 +##裝 +##裟 +##裡 +##裤 +##裨 +##裱 +##裳 +##裴 +##裸 +##裹 +##製 +##裾 +##褂 +##複 +##褐 +##褒 +##褓 +##褔 +##褚 +##褥 +##褪 +##褫 +##褲 +##褶 +##褻 +##襁 +##襄 +##襟 +##襠 +##襪 +##襬 +##襯 +##襲 +##西 +##要 +##覃 +##覆 +##覇 +##見 +##規 +##覓 +##視 +##覚 +##覦 +##覧 +##親 +##覬 +##観 +##覷 +##覺 +##覽 +##觀 +##见 +##观 +##规 +##觅 +##视 +##览 +##觉 +##觊 +##觎 +##觐 +##觑 +##角 +##觞 +##解 +##觥 +##触 +##觸 +##言 +##訂 +##計 +##訊 +##討 +##訓 +##訕 +##訖 +##託 +##記 +##訛 +##訝 +##訟 +##訣 +##訥 +##訪 +##設 +##許 +##訳 +##訴 +##訶 +##診 +##註 +##証 +##詆 +##詐 +##詔 +##評 +##詛 +##詞 +##詠 +##詡 +##詢 +##詣 +##試 +##詩 +##詫 +##詬 +##詭 +##詮 +##詰 +##話 +##該 +##詳 +##詹 +##詼 +##誅 +##誇 +##誉 +##誌 +##認 +##誓 +##誕 +##誘 +##語 +##誠 +##誡 +##誣 +##誤 +##誥 +##誦 +##誨 +##說 +##説 +##読 +##誰 +##課 +##誹 +##誼 +##調 +##諄 +##談 +##請 +##諏 +##諒 +##論 +##諗 +##諜 +##諡 +##諦 +##諧 +##諫 +##諭 +##諮 +##諱 +##諳 +##諷 +##諸 +##諺 +##諾 +##謀 +##謁 +##謂 +##謄 +##謊 +##謎 +##謐 +##謔 +##謗 +##謙 +##講 +##謝 +##謠 +##謨 +##謬 +##謹 +##謾 +##譁 +##證 +##譎 +##譏 +##識 +##譙 +##譚 +##譜 +##警 +##譬 +##譯 +##議 +##譲 +##譴 +##護 +##譽 +##讀 +##變 +##讓 +##讚 +##讞 +##计 +##订 +##认 +##讥 +##讧 +##讨 +##让 +##讪 +##讫 +##训 +##议 +##讯 +##记 +##讲 +##讳 +##讴 +##讶 +##讷 +##许 +##讹 +##论 +##讼 +##讽 +##设 +##访 +##诀 +##证 +##诃 +##评 +##诅 +##识 +##诈 +##诉 +##诊 +##诋 +##词 +##诏 +##译 +##试 +##诗 +##诘 +##诙 +##诚 +##诛 +##话 +##诞 +##诟 +##诠 +##诡 +##询 +##诣 +##诤 +##该 +##详 +##诧 +##诩 +##诫 +##诬 +##语 +##误 +##诰 +##诱 +##诲 +##说 +##诵 +##诶 +##请 +##诸 +##诺 +##读 +##诽 +##课 +##诿 +##谀 +##谁 +##调 +##谄 +##谅 +##谆 +##谈 +##谊 +##谋 +##谌 +##谍 +##谎 +##谏 +##谐 +##谑 +##谒 +##谓 +##谔 +##谕 +##谗 +##谘 +##谙 +##谚 +##谛 +##谜 +##谟 +##谢 +##谣 +##谤 +##谥 +##谦 +##谧 +##谨 +##谩 +##谪 +##谬 +##谭 +##谯 +##谱 +##谲 +##谴 +##谶 +##谷 +##豁 +##豆 +##豇 +##豈 +##豉 +##豊 +##豌 +##豎 +##豐 +##豔 +##豚 +##象 +##豢 +##豪 +##豫 +##豬 +##豹 +##豺 +##貂 +##貅 +##貌 +##貓 +##貔 +##貘 +##貝 +##貞 +##負 +##財 +##貢 +##貧 +##貨 +##販 +##貪 +##貫 +##責 +##貯 +##貰 +##貳 +##貴 +##貶 +##買 +##貸 +##費 +##貼 +##貽 +##貿 +##賀 +##賁 +##賂 +##賃 +##賄 +##資 +##賈 +##賊 +##賑 +##賓 +##賜 +##賞 +##賠 +##賡 +##賢 +##賣 +##賤 +##賦 +##質 +##賬 +##賭 +##賴 +##賺 +##購 +##賽 +##贅 +##贈 +##贊 +##贍 +##贏 +##贓 +##贖 +##贛 +##贝 +##贞 +##负 +##贡 +##财 +##责 +##贤 +##败 +##账 +##货 +##质 +##贩 +##贪 +##贫 +##贬 +##购 +##贮 +##贯 +##贰 +##贱 +##贲 +##贴 +##贵 +##贷 +##贸 +##费 +##贺 +##贻 +##贼 +##贾 +##贿 +##赁 +##赂 +##赃 +##资 +##赅 +##赈 +##赊 +##赋 +##赌 +##赎 +##赏 +##赐 +##赓 +##赔 +##赖 +##赘 +##赚 +##赛 +##赝 +##赞 +##赠 +##赡 +##赢 +##赣 +##赤 +##赦 +##赧 +##赫 +##赭 +##走 +##赳 +##赴 +##赵 +##赶 +##起 +##趁 +##超 +##越 +##趋 +##趕 +##趙 +##趟 +##趣 +##趨 +##足 +##趴 +##趵 +##趸 +##趺 +##趾 +##跃 +##跄 +##跆 +##跋 +##跌 +##跎 +##跑 +##跖 +##跚 +##跛 +##距 +##跟 +##跡 +##跤 +##跨 +##跩 +##跪 +##路 +##跳 +##践 +##跷 +##跹 +##跺 +##跻 +##踉 +##踊 +##踌 +##踏 +##踐 +##踝 +##踞 +##踟 +##踢 +##踩 +##踪 +##踮 +##踱 +##踴 +##踵 +##踹 +##蹂 +##蹄 +##蹇 +##蹈 +##蹉 +##蹊 +##蹋 +##蹑 +##蹒 +##蹙 +##蹟 +##蹣 +##蹤 +##蹦 +##蹩 +##蹬 +##蹭 +##蹲 +##蹴 +##蹶 +##蹺 +##蹼 +##蹿 +##躁 +##躇 +##躉 +##躊 +##躋 +##躍 +##躏 +##躪 +##身 +##躬 +##躯 +##躲 +##躺 +##軀 +##車 +##軋 +##軌 +##軍 +##軒 +##軟 +##転 +##軸 +##軼 +##軽 +##軾 +##較 +##載 +##輒 +##輓 +##輔 +##輕 +##輛 +##輝 +##輟 +##輩 +##輪 +##輯 +##輸 +##輻 +##輾 +##輿 +##轄 +##轅 +##轆 +##轉 +##轍 +##轎 +##轟 +##车 +##轧 +##轨 +##轩 +##转 +##轭 +##轮 +##软 +##轰 +##轲 +##轴 +##轶 +##轻 +##轼 +##载 +##轿 +##较 +##辄 +##辅 +##辆 +##辇 +##辈 +##辉 +##辊 +##辍 +##辐 +##辑 +##输 +##辕 +##辖 +##辗 +##辘 +##辙 +##辛 +##辜 +##辞 +##辟 +##辣 +##辦 +##辨 +##辩 +##辫 +##辭 +##辮 +##辯 +##辰 +##辱 +##農 +##边 +##辺 +##辻 +##込 +##辽 +##达 +##迁 +##迂 +##迄 +##迅 +##过 +##迈 +##迎 +##运 +##近 +##返 +##还 +##这 +##进 +##远 +##违 +##连 +##迟 +##迢 +##迤 +##迥 +##迦 +##迩 +##迪 +##迫 +##迭 +##述 +##迴 +##迷 +##迸 +##迹 +##迺 +##追 +##退 +##送 +##适 +##逃 +##逅 +##逆 +##选 +##逊 +##逍 +##透 +##逐 +##递 +##途 +##逕 +##逗 +##這 +##通 +##逛 +##逝 +##逞 +##速 +##造 +##逢 +##連 +##逮 +##週 +##進 +##逵 +##逶 +##逸 +##逻 +##逼 +##逾 +##遁 +##遂 +##遅 +##遇 +##遊 +##運 +##遍 +##過 +##遏 +##遐 +##遑 +##遒 +##道 +##達 +##違 +##遗 +##遙 +##遛 +##遜 +##遞 +##遠 +##遢 +##遣 +##遥 +##遨 +##適 +##遭 +##遮 +##遲 +##遴 +##遵 +##遶 +##遷 +##選 +##遺 +##遼 +##遽 +##避 +##邀 +##邁 +##邂 +##邃 +##還 +##邇 +##邈 +##邊 +##邋 +##邏 +##邑 +##邓 +##邕 +##邛 +##邝 +##邢 +##那 +##邦 +##邨 +##邪 +##邬 +##邮 +##邯 +##邰 +##邱 +##邳 +##邵 +##邸 +##邹 +##邺 +##邻 +##郁 +##郅 +##郊 +##郎 +##郑 +##郜 +##郝 +##郡 +##郢 +##郤 +##郦 +##郧 +##部 +##郫 +##郭 +##郴 +##郵 +##郷 +##郸 +##都 +##鄂 +##鄉 +##鄒 +##鄔 +##鄙 +##鄞 +##鄢 +##鄧 +##鄭 +##鄰 +##鄱 +##鄲 +##鄺 +##酉 +##酊 +##酋 +##酌 +##配 +##酐 +##酒 +##酗 +##酚 +##酝 +##酢 +##酣 +##酥 +##酩 +##酪 +##酬 +##酮 +##酯 +##酰 +##酱 +##酵 +##酶 +##酷 +##酸 +##酿 +##醃 +##醇 +##醉 +##醋 +##醍 +##醐 +##醒 +##醚 +##醛 +##醜 +##醞 +##醣 +##醪 +##醫 +##醬 +##醮 +##醯 +##醴 +##醺 +##釀 +##釁 +##采 +##釉 +##释 +##釋 +##里 +##重 +##野 +##量 +##釐 +##金 +##釗 +##釘 +##釜 +##針 +##釣 +##釦 +##釧 +##釵 +##鈀 +##鈉 +##鈍 +##鈎 +##鈔 +##鈕 +##鈞 +##鈣 +##鈦 +##鈪 +##鈴 +##鈺 +##鈾 +##鉀 +##鉄 +##鉅 +##鉉 +##鉑 +##鉗 +##鉚 +##鉛 +##鉤 +##鉴 +##鉻 +##銀 +##銃 +##銅 +##銑 +##銓 +##銖 +##銘 +##銜 +##銬 +##銭 +##銮 +##銳 +##銷 +##銹 +##鋁 +##鋅 +##鋒 +##鋤 +##鋪 +##鋰 +##鋸 +##鋼 +##錄 +##錐 +##錘 +##錚 +##錠 +##錢 +##錦 +##錨 +##錫 +##錮 +##錯 +##録 +##錳 +##錶 +##鍊 +##鍋 +##鍍 +##鍛 +##鍥 +##鍰 +##鍵 +##鍺 +##鍾 +##鎂 +##鎊 +##鎌 +##鎏 +##鎔 +##鎖 +##鎗 +##鎚 +##鎧 +##鎬 +##鎮 +##鎳 +##鏈 +##鏖 +##鏗 +##鏘 +##鏞 +##鏟 +##鏡 +##鏢 +##鏤 +##鏽 +##鐘 +##鐮 +##鐲 +##鐳 +##鐵 +##鐸 +##鐺 +##鑄 +##鑊 +##鑑 +##鑒 +##鑣 +##鑫 +##鑰 +##鑲 +##鑼 +##鑽 +##鑾 +##鑿 +##针 +##钉 +##钊 +##钎 +##钏 +##钒 +##钓 +##钗 +##钙 +##钛 +##钜 +##钝 +##钞 +##钟 +##钠 +##钡 +##钢 +##钣 +##钤 +##钥 +##钦 +##钧 +##钨 +##钩 +##钮 +##钯 +##钰 +##钱 +##钳 +##钴 +##钵 +##钺 +##钻 +##钼 +##钾 +##钿 +##铀 +##铁 +##铂 +##铃 +##铄 +##铅 +##铆 +##铉 +##铎 +##铐 +##铛 +##铜 +##铝 +##铠 +##铡 +##铢 +##铣 +##铤 +##铨 +##铩 +##铬 +##铭 +##铮 +##铰 +##铲 +##铵 +##银 +##铸 +##铺 +##链 +##铿 +##销 +##锁 +##锂 +##锄 +##锅 +##锆 +##锈 +##锉 +##锋 +##锌 +##锏 +##锐 +##锑 +##错 +##锚 +##锟 +##锡 +##锢 +##锣 +##锤 +##锥 +##锦 +##锭 +##键 +##锯 +##锰 +##锲 +##锵 +##锹 +##锺 +##锻 +##镀 +##镁 +##镂 +##镇 +##镉 +##镌 +##镍 +##镐 +##镑 +##镕 +##镖 +##镗 +##镛 +##镜 +##镣 +##镭 +##镯 +##镰 +##镳 +##镶 +##長 +##长 +##門 +##閃 +##閉 +##開 +##閎 +##閏 +##閑 +##閒 +##間 +##閔 +##閘 +##閡 +##関 +##閣 +##閥 +##閨 +##閩 +##閱 +##閲 +##閹 +##閻 +##閾 +##闆 +##闇 +##闊 +##闌 +##闍 +##闔 +##闕 +##闖 +##闘 +##關 +##闡 +##闢 +##门 +##闪 +##闫 +##闭 +##问 +##闯 +##闰 +##闲 +##间 +##闵 +##闷 +##闸 +##闹 +##闺 +##闻 +##闽 +##闾 +##阀 +##阁 +##阂 +##阅 +##阆 +##阇 +##阈 +##阉 +##阎 +##阐 +##阑 +##阔 +##阕 +##阖 +##阙 +##阚 +##阜 +##队 +##阡 +##阪 +##阮 +##阱 +##防 +##阳 +##阴 +##阵 +##阶 +##阻 +##阿 +##陀 +##陂 +##附 +##际 +##陆 +##陇 +##陈 +##陋 +##陌 +##降 +##限 +##陕 +##陛 +##陝 +##陞 +##陟 +##陡 +##院 +##陣 +##除 +##陨 +##险 +##陪 +##陰 +##陲 +##陳 +##陵 +##陶 +##陷 +##陸 +##険 +##陽 +##隅 +##隆 +##隈 +##隊 +##隋 +##隍 +##階 +##随 +##隐 +##隔 +##隕 +##隘 +##隙 +##際 +##障 +##隠 +##隣 +##隧 +##隨 +##險 +##隱 +##隴 +##隶 +##隸 +##隻 +##隼 +##隽 +##难 +##雀 +##雁 +##雄 +##雅 +##集 +##雇 +##雉 +##雋 +##雌 +##雍 +##雎 +##雏 +##雑 +##雒 +##雕 +##雖 +##雙 +##雛 +##雜 +##雞 +##離 +##難 +##雨 +##雪 +##雯 +##雰 +##雲 +##雳 +##零 +##雷 +##雹 +##電 +##雾 +##需 +##霁 +##霄 +##霆 +##震 +##霈 +##霉 +##霊 +##霍 +##霎 +##霏 +##霑 +##霓 +##霖 +##霜 +##霞 +##霧 +##霭 +##霰 +##露 +##霸 +##霹 +##霽 +##霾 +##靂 +##靄 +##靈 +##青 +##靓 +##靖 +##静 +##靚 +##靛 +##靜 +##非 +##靠 +##靡 +##面 +##靥 +##靦 +##革 +##靳 +##靴 +##靶 +##靼 +##鞅 +##鞋 +##鞍 +##鞏 +##鞑 +##鞘 +##鞠 +##鞣 +##鞦 +##鞭 +##韆 +##韋 +##韌 +##韓 +##韜 +##韦 +##韧 +##韩 +##韬 +##韭 +##音 +##韵 +##韶 +##韻 +##響 +##頁 +##頂 +##頃 +##項 +##順 +##須 +##頌 +##預 +##頑 +##頒 +##頓 +##頗 +##領 +##頜 +##頡 +##頤 +##頫 +##頭 +##頰 +##頷 +##頸 +##頹 +##頻 +##頼 +##顆 +##題 +##額 +##顎 +##顏 +##顔 +##願 +##顛 +##類 +##顧 +##顫 +##顯 +##顱 +##顴 +##页 +##顶 +##顷 +##项 +##顺 +##须 +##顼 +##顽 +##顾 +##顿 +##颁 +##颂 +##预 +##颅 +##领 +##颇 +##颈 +##颉 +##颊 +##颌 +##颍 +##颐 +##频 +##颓 +##颔 +##颖 +##颗 +##题 +##颚 +##颛 +##颜 +##额 +##颞 +##颠 +##颡 +##颢 +##颤 +##颦 +##颧 +##風 +##颯 +##颱 +##颳 +##颶 +##颼 +##飄 +##飆 +##风 +##飒 +##飓 +##飕 +##飘 +##飙 +##飚 +##飛 +##飞 +##食 +##飢 +##飨 +##飩 +##飪 +##飯 +##飲 +##飼 +##飽 +##飾 +##餃 +##餅 +##餉 +##養 +##餌 +##餐 +##餒 +##餓 +##餘 +##餚 +##餛 +##餞 +##餡 +##館 +##餮 +##餵 +##餾 +##饅 +##饈 +##饋 +##饌 +##饍 +##饑 +##饒 +##饕 +##饗 +##饞 +##饥 +##饨 +##饪 +##饬 +##饭 +##饮 +##饯 +##饰 +##饱 +##饲 +##饴 +##饵 +##饶 +##饷 +##饺 +##饼 +##饽 +##饿 +##馀 +##馁 +##馄 +##馅 +##馆 +##馈 +##馋 +##馍 +##馏 +##馒 +##馔 +##首 +##馗 +##香 +##馥 +##馨 +##馬 +##馭 +##馮 +##馳 +##馴 +##駁 +##駄 +##駅 +##駆 +##駐 +##駒 +##駕 +##駛 +##駝 +##駭 +##駱 +##駿 +##騁 +##騎 +##騏 +##験 +##騙 +##騨 +##騰 +##騷 +##驀 +##驅 +##驊 +##驍 +##驒 +##驕 +##驗 +##驚 +##驛 +##驟 +##驢 +##驥 +##马 +##驭 +##驮 +##驯 +##驰 +##驱 +##驳 +##驴 +##驶 +##驷 +##驸 +##驹 +##驻 +##驼 +##驾 +##驿 +##骁 +##骂 +##骄 +##骅 +##骆 +##骇 +##骈 +##骊 +##骋 +##验 +##骏 +##骐 +##骑 +##骗 +##骚 +##骛 +##骜 +##骞 +##骠 +##骡 +##骤 +##骥 +##骧 +##骨 +##骯 +##骰 +##骶 +##骷 +##骸 +##骼 +##髂 +##髅 +##髋 +##髏 +##髒 +##髓 +##體 +##髖 +##高 +##髦 +##髪 +##髮 +##髯 +##髻 +##鬃 +##鬆 +##鬍 +##鬓 +##鬚 +##鬟 +##鬢 +##鬣 +##鬥 +##鬧 +##鬱 +##鬼 +##魁 +##魂 +##魄 +##魅 +##魇 +##魍 +##魏 +##魔 +##魘 +##魚 +##魯 +##魷 +##鮑 +##鮨 +##鮪 +##鮭 +##鮮 +##鯉 +##鯊 +##鯖 +##鯛 +##鯨 +##鯰 +##鯽 +##鰍 +##鰓 +##鰭 +##鰲 +##鰻 +##鰾 +##鱈 +##鱉 +##鱔 +##鱗 +##鱷 +##鱸 +##鱼 +##鱿 +##鲁 +##鲈 +##鲍 +##鲑 +##鲛 +##鲜 +##鲟 +##鲢 +##鲤 +##鲨 +##鲫 +##鲱 +##鲲 +##鲶 +##鲷 +##鲸 +##鳃 +##鳄 +##鳅 +##鳌 +##鳍 +##鳕 +##鳖 +##鳗 +##鳝 +##鳞 +##鳥 +##鳩 +##鳳 +##鳴 +##鳶 +##鴉 +##鴕 +##鴛 +##鴦 +##鴨 +##鴻 +##鴿 +##鵑 +##鵜 +##鵝 +##鵡 +##鵬 +##鵰 +##鵲 +##鶘 +##鶩 +##鶯 +##鶴 +##鷗 +##鷲 +##鷹 +##鷺 +##鸚 +##鸞 +##鸟 +##鸠 +##鸡 +##鸢 +##鸣 +##鸥 +##鸦 +##鸨 +##鸪 +##鸭 +##鸯 +##鸳 +##鸵 +##鸽 +##鸾 +##鸿 +##鹂 +##鹃 +##鹄 +##鹅 +##鹈 +##鹉 +##鹊 +##鹌 +##鹏 +##鹑 +##鹕 +##鹘 +##鹜 +##鹞 +##鹤 +##鹦 +##鹧 +##鹫 +##鹭 +##鹰 +##鹳 +##鹵 +##鹹 +##鹼 +##鹽 +##鹿 +##麂 +##麋 +##麒 +##麓 +##麗 +##麝 +##麟 +##麥 +##麦 +##麩 +##麴 +##麵 +##麸 +##麺 +##麻 +##麼 +##麽 +##麾 +##黃 +##黄 +##黍 +##黎 +##黏 +##黑 +##黒 +##黔 +##默 +##黛 +##黜 +##黝 +##點 +##黠 +##黨 +##黯 +##黴 +##鼋 +##鼎 +##鼐 +##鼓 +##鼠 +##鼬 +##鼹 +##鼻 +##鼾 +##齁 +##齊 +##齋 +##齐 +##齒 +##齡 +##齢 +##齣 +##齦 +##齿 +##龄 +##龅 +##龈 +##龊 +##龋 +##龌 +##龍 +##龐 +##龔 +##龕 +##龙 +##龚 +##龛 +##龜 +##龟 +##︰ +##︱ +##︶ +##︿ +##﹁ +##﹂ +##﹍ +##﹏ +##﹐ +##﹑ +##﹒ +##﹔ +##﹕ +##﹖ +##﹗ +##﹙ +##﹚ +##﹝ +##﹞ +##﹡ +##﹣ +##! +##" +### +##$ +##% +##& +##' +##( +##) +##* +##, +##- +##. +##/ +##: +##; +##< +##? +##@ +##[ +##\ +##] +##^ +##_ +##` +##f +##h +##j +##u +##w +##z +##{ +##} +##。 +##「 +##」 +##、 +##・ +##ッ +##ー +##イ +##ク +##シ +##ス +##ト +##ノ +##フ +##ラ +##ル +##ン +##゙ +##゚ +## ̄ +##¥ +##👍 +##🔥 +##😂 +##😎 diff --git a/ComfyUI_ExtraModels/PixArt/LICENSE-PixArt b/ComfyUI_ExtraModels/PixArt/LICENSE-PixArt new file mode 100644 index 0000000000000000000000000000000000000000..0ad25db4bd1d86c452db3f9602ccdbe172438f52 --- /dev/null +++ b/ComfyUI_ExtraModels/PixArt/LICENSE-PixArt @@ -0,0 +1,661 @@ + GNU AFFERO GENERAL PUBLIC LICENSE + Version 3, 19 November 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU Affero General Public License is a free, copyleft license for +software and other kinds of works, specifically designed to ensure +cooperation with the community in the case of network server software. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +our General Public Licenses are intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + Developers that use our General Public Licenses protect your rights +with two steps: (1) assert copyright on the software, and (2) offer +you this License which gives you legal permission to copy, distribute +and/or modify the software. + + A secondary benefit of defending all users' freedom is that +improvements made in alternate versions of the program, if they +receive widespread use, become available for other developers to +incorporate. Many developers of free software are heartened and +encouraged by the resulting cooperation. However, in the case of +software used on network servers, this result may fail to come about. +The GNU General Public License permits making a modified version and +letting the public access it on a server without ever releasing its +source code to the public. + + The GNU Affero General Public License is designed specifically to +ensure that, in such cases, the modified source code becomes available +to the community. It requires the operator of a network server to +provide the source code of the modified version running there to the +users of that server. Therefore, public use of a modified version, on +a publicly accessible server, gives the public access to the source +code of the modified version. + + An older license, called the Affero General Public License and +published by Affero, was designed to accomplish similar goals. This is +a different license, not a version of the Affero GPL, but Affero has +released a new version of the Affero GPL which permits relicensing under +this license. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU Affero General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Remote Network Interaction; Use with the GNU General Public License. + + Notwithstanding any other provision of this License, if you modify the +Program, your modified version must prominently offer all users +interacting with it remotely through a computer network (if your version +supports such interaction) an opportunity to receive the Corresponding +Source of your version by providing access to the Corresponding Source +from a network server at no charge, through some standard or customary +means of facilitating copying of software. This Corresponding Source +shall include the Corresponding Source for any work covered by version 3 +of the GNU General Public License that is incorporated pursuant to the +following paragraph. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the work with which it is combined will remain governed by version +3 of the GNU General Public License. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU Affero General Public License from time to time. Such new versions +will be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU Affero General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU Affero General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU Affero General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU Affero General Public License as published + by the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU Affero General Public License for more details. + + You should have received a copy of the GNU Affero General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If your software can interact with users remotely through a computer +network, you should also make sure that it provides a way for users to +get its source. For example, if your program is a web application, its +interface could display a "Source" link that leads users to an archive +of the code. There are many ways you could offer source, and different +solutions will be better for different programs; see section 13 for the +specific requirements. + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU AGPL, see +. diff --git a/ComfyUI_ExtraModels/PixArt/conf.py b/ComfyUI_ExtraModels/PixArt/conf.py new file mode 100644 index 0000000000000000000000000000000000000000..567c44433052a1812a8ced96556e2b1802fca8a3 --- /dev/null +++ b/ComfyUI_ExtraModels/PixArt/conf.py @@ -0,0 +1,140 @@ +""" +List of all PixArt model types / settings +""" + +sampling_settings = { + "beta_schedule" : "sqrt_linear", + "linear_start" : 0.0001, + "linear_end" : 0.02, + "timesteps" : 1000, +} + +pixart_conf = { + "PixArtMS_XL_2": { # models/PixArtMS + "target": "PixArtMS", + "unet_config": { + "input_size" : 1024//8, + "depth" : 28, + "num_heads" : 16, + "patch_size" : 2, + "hidden_size" : 1152, + "pe_interpolation": 2, + }, + "sampling_settings" : sampling_settings, + }, + "PixArtMS_Sigma_XL_2": { + "target": "PixArtMSSigma", + "unet_config": { + "input_size" : 1024//8, + "token_num" : 300, + "depth" : 28, + "num_heads" : 16, + "patch_size" : 2, + "hidden_size" : 1152, + "micro_condition": False, + "pe_interpolation": 2, + "model_max_length": 300, + }, + "sampling_settings" : sampling_settings, + }, + "PixArtMS_Sigma_XL_2_900M": { + "target": "PixArtMSSigma", + "unet_config": { + "input_size": 1024 // 8, + "token_num": 300, + "depth": 42, + "num_heads": 16, + "patch_size": 2, + "hidden_size": 1152, + "micro_condition": False, + "pe_interpolation": 2, + "model_max_length": 300, + }, + "sampling_settings": sampling_settings, + }, + "PixArtMS_Sigma_XL_2_2K": { + "target": "PixArtMSSigma", + "unet_config": { + "input_size" : 2048//8, + "token_num" : 300, + "depth" : 28, + "num_heads" : 16, + "patch_size" : 2, + "hidden_size" : 1152, + "micro_condition": False, + "pe_interpolation": 4, + "model_max_length": 300, + }, + "sampling_settings" : sampling_settings, + }, + "PixArt_XL_2": { # models/PixArt + "target": "PixArt", + "unet_config": { + "input_size" : 512//8, + "token_num" : 120, + "depth" : 28, + "num_heads" : 16, + "patch_size" : 2, + "hidden_size" : 1152, + "pe_interpolation": 1, + }, + "sampling_settings" : sampling_settings, + }, +} + +pixart_conf.update({ # controlnet models + "ControlPixArtHalf": { + "target": "ControlPixArtHalf", + "unet_config": pixart_conf["PixArt_XL_2"]["unet_config"], + "sampling_settings": pixart_conf["PixArt_XL_2"]["sampling_settings"], + }, + "ControlPixArtMSHalf": { + "target": "ControlPixArtMSHalf", + "unet_config": pixart_conf["PixArtMS_XL_2"]["unet_config"], + "sampling_settings": pixart_conf["PixArtMS_XL_2"]["sampling_settings"], + } +}) + +pixart_res = { + "PixArtMS_XL_2": { # models/PixArtMS 1024x1024 + '0.25': [512, 2048], '0.26': [512, 1984], '0.27': [512, 1920], '0.28': [512, 1856], + '0.32': [576, 1792], '0.33': [576, 1728], '0.35': [576, 1664], '0.40': [640, 1600], + '0.42': [640, 1536], '0.48': [704, 1472], '0.50': [704, 1408], '0.52': [704, 1344], + '0.57': [768, 1344], '0.60': [768, 1280], '0.68': [832, 1216], '0.72': [832, 1152], + '0.78': [896, 1152], '0.82': [896, 1088], '0.88': [960, 1088], '0.94': [960, 1024], + '1.00': [1024,1024], '1.07': [1024, 960], '1.13': [1088, 960], '1.21': [1088, 896], + '1.29': [1152, 896], '1.38': [1152, 832], '1.46': [1216, 832], '1.67': [1280, 768], + '1.75': [1344, 768], '2.00': [1408, 704], '2.09': [1472, 704], '2.40': [1536, 640], + '2.50': [1600, 640], '2.89': [1664, 576], '3.00': [1728, 576], '3.11': [1792, 576], + '3.62': [1856, 512], '3.75': [1920, 512], '3.88': [1984, 512], '4.00': [2048, 512], + }, + "PixArt_XL_2": { # models/PixArt 512x512 + '0.25': [256,1024], '0.26': [256, 992], '0.27': [256, 960], '0.28': [256, 928], + '0.32': [288, 896], '0.33': [288, 864], '0.35': [288, 832], '0.40': [320, 800], + '0.42': [320, 768], '0.48': [352, 736], '0.50': [352, 704], '0.52': [352, 672], + '0.57': [384, 672], '0.60': [384, 640], '0.68': [416, 608], '0.72': [416, 576], + '0.78': [448, 576], '0.82': [448, 544], '0.88': [480, 544], '0.94': [480, 512], + '1.00': [512, 512], '1.07': [512, 480], '1.13': [544, 480], '1.21': [544, 448], + '1.29': [576, 448], '1.38': [576, 416], '1.46': [608, 416], '1.67': [640, 384], + '1.75': [672, 384], '2.00': [704, 352], '2.09': [736, 352], '2.40': [768, 320], + '2.50': [800, 320], '2.89': [832, 288], '3.00': [864, 288], '3.11': [896, 288], + '3.62': [928, 256], '3.75': [960, 256], '3.88': [992, 256], '4.00': [1024,256] + }, + "PixArtMS_Sigma_XL_2_2K": { + '0.25': [1024, 4096], '0.26': [1024, 3968], '0.27': [1024, 3840], '0.28': [1024, 3712], + '0.32': [1152, 3584], '0.33': [1152, 3456], '0.35': [1152, 3328], '0.40': [1280, 3200], + '0.42': [1280, 3072], '0.48': [1408, 2944], '0.50': [1408, 2816], '0.52': [1408, 2688], + '0.57': [1536, 2688], '0.60': [1536, 2560], '0.68': [1664, 2432], '0.72': [1664, 2304], + '0.78': [1792, 2304], '0.82': [1792, 2176], '0.88': [1920, 2176], '0.94': [1920, 2048], + '1.00': [2048, 2048], '1.07': [2048, 1920], '1.13': [2176, 1920], '1.21': [2176, 1792], + '1.29': [2304, 1792], '1.38': [2304, 1664], '1.46': [2432, 1664], '1.67': [2560, 1536], + '1.75': [2688, 1536], '2.00': [2816, 1408], '2.09': [2944, 1408], '2.40': [3072, 1280], + '2.50': [3200, 1280], '2.89': [3328, 1152], '3.00': [3456, 1152], '3.11': [3584, 1152], + '3.62': [3712, 1024], '3.75': [3840, 1024], '3.88': [3968, 1024], '4.00': [4096, 1024] + } +} +# These should be the same +pixart_res.update({ + "PixArtMS_Sigma_XL_2": pixart_res["PixArtMS_XL_2"], + "PixArtMS_Sigma_XL_2_512": pixart_res["PixArt_XL_2"], +}) diff --git a/ComfyUI_ExtraModels/PixArt/diffusers_convert.py b/ComfyUI_ExtraModels/PixArt/diffusers_convert.py new file mode 100644 index 0000000000000000000000000000000000000000..195e126ff2f6d91ff374307792c47b5df2efe1d6 --- /dev/null +++ b/ComfyUI_ExtraModels/PixArt/diffusers_convert.py @@ -0,0 +1,210 @@ +# For using the diffusers format weights +# Based on the original ComfyUI function + +# https://github.com/PixArt-alpha/PixArt-alpha/blob/master/tools/convert_pixart_alpha_to_diffusers.py +import torch + +conversion_map_ms = [ # for multi_scale_train (MS) + # Resolution + ("csize_embedder.mlp.0.weight", "adaln_single.emb.resolution_embedder.linear_1.weight"), + ("csize_embedder.mlp.0.bias", "adaln_single.emb.resolution_embedder.linear_1.bias"), + ("csize_embedder.mlp.2.weight", "adaln_single.emb.resolution_embedder.linear_2.weight"), + ("csize_embedder.mlp.2.bias", "adaln_single.emb.resolution_embedder.linear_2.bias"), + # Aspect ratio + ("ar_embedder.mlp.0.weight", "adaln_single.emb.aspect_ratio_embedder.linear_1.weight"), + ("ar_embedder.mlp.0.bias", "adaln_single.emb.aspect_ratio_embedder.linear_1.bias"), + ("ar_embedder.mlp.2.weight", "adaln_single.emb.aspect_ratio_embedder.linear_2.weight"), + ("ar_embedder.mlp.2.bias", "adaln_single.emb.aspect_ratio_embedder.linear_2.bias"), +] + +def get_depth(state_dict): + return sum(key.endswith('.attn1.to_k.bias') for key in state_dict.keys()) + +def get_lora_depth(state_dict): + return sum(key.endswith('.attn1.to_k.lora_A.weight') for key in state_dict.keys()) + +def get_conversion_map(state_dict): + conversion_map = [ # main SD conversion map (PixArt reference, HF Diffusers) + # Patch embeddings + ("x_embedder.proj.weight", "pos_embed.proj.weight"), + ("x_embedder.proj.bias", "pos_embed.proj.bias"), + # Caption projection + ("y_embedder.y_embedding", "caption_projection.y_embedding"), + ("y_embedder.y_proj.fc1.weight", "caption_projection.linear_1.weight"), + ("y_embedder.y_proj.fc1.bias", "caption_projection.linear_1.bias"), + ("y_embedder.y_proj.fc2.weight", "caption_projection.linear_2.weight"), + ("y_embedder.y_proj.fc2.bias", "caption_projection.linear_2.bias"), + # AdaLN-single LN + ("t_embedder.mlp.0.weight", "adaln_single.emb.timestep_embedder.linear_1.weight"), + ("t_embedder.mlp.0.bias", "adaln_single.emb.timestep_embedder.linear_1.bias"), + ("t_embedder.mlp.2.weight", "adaln_single.emb.timestep_embedder.linear_2.weight"), + ("t_embedder.mlp.2.bias", "adaln_single.emb.timestep_embedder.linear_2.bias"), + # Shared norm + ("t_block.1.weight", "adaln_single.linear.weight"), + ("t_block.1.bias", "adaln_single.linear.bias"), + # Final block + ("final_layer.linear.weight", "proj_out.weight"), + ("final_layer.linear.bias", "proj_out.bias"), + ("final_layer.scale_shift_table", "scale_shift_table"), + ] + + # Add actual transformer blocks + for depth in range(get_depth(state_dict)): + # Transformer blocks + conversion_map += [ + (f"blocks.{depth}.scale_shift_table", f"transformer_blocks.{depth}.scale_shift_table"), + # Projection + (f"blocks.{depth}.attn.proj.weight", f"transformer_blocks.{depth}.attn1.to_out.0.weight"), + (f"blocks.{depth}.attn.proj.bias", f"transformer_blocks.{depth}.attn1.to_out.0.bias"), + # Feed-forward + (f"blocks.{depth}.mlp.fc1.weight", f"transformer_blocks.{depth}.ff.net.0.proj.weight"), + (f"blocks.{depth}.mlp.fc1.bias", f"transformer_blocks.{depth}.ff.net.0.proj.bias"), + (f"blocks.{depth}.mlp.fc2.weight", f"transformer_blocks.{depth}.ff.net.2.weight"), + (f"blocks.{depth}.mlp.fc2.bias", f"transformer_blocks.{depth}.ff.net.2.bias"), + # Cross-attention (proj) + (f"blocks.{depth}.cross_attn.proj.weight" ,f"transformer_blocks.{depth}.attn2.to_out.0.weight"), + (f"blocks.{depth}.cross_attn.proj.bias" ,f"transformer_blocks.{depth}.attn2.to_out.0.bias"), + ] + return conversion_map + +def find_prefix(state_dict, target_key): + prefix = "" + for k in state_dict.keys(): + if k.endswith(target_key): + prefix = k.split(target_key)[0] + break + return prefix + +def convert_state_dict(state_dict): + if "adaln_single.emb.resolution_embedder.linear_1.weight" in state_dict.keys(): + cmap = get_conversion_map(state_dict) + conversion_map_ms + else: + cmap = get_conversion_map(state_dict) + + missing = [k for k,v in cmap if v not in state_dict] + new_state_dict = {k: state_dict[v] for k,v in cmap if k not in missing} + matched = list(v for k,v in cmap if v in state_dict.keys()) + + for depth in range(get_depth(state_dict)): + for wb in ["weight", "bias"]: + # Self Attention + key = lambda a: f"transformer_blocks.{depth}.attn1.to_{a}.{wb}" + new_state_dict[f"blocks.{depth}.attn.qkv.{wb}"] = torch.cat(( + state_dict[key('q')], state_dict[key('k')], state_dict[key('v')] + ), dim=0) + matched += [key('q'), key('k'), key('v')] + + # Cross-attention (linear) + key = lambda a: f"transformer_blocks.{depth}.attn2.to_{a}.{wb}" + new_state_dict[f"blocks.{depth}.cross_attn.q_linear.{wb}"] = state_dict[key('q')] + new_state_dict[f"blocks.{depth}.cross_attn.kv_linear.{wb}"] = torch.cat(( + state_dict[key('k')], state_dict[key('v')] + ), dim=0) + matched += [key('q'), key('k'), key('v')] + + if len(matched) < len(state_dict): + print(f"PixArt: UNET conversion has leftover keys! ({len(matched)} vs {len(state_dict)})") + print(list( set(state_dict.keys()) - set(matched) )) + + if len(missing) > 0: + print(f"PixArt: UNET conversion has missing keys!") + print(missing) + + return new_state_dict + +# Same as above but for LoRA weights: +def convert_lora_state_dict(state_dict, peft=True): + # koyha + rep_ak = lambda x: x.replace(".weight", ".lora_down.weight") + rep_bk = lambda x: x.replace(".weight", ".lora_up.weight") + rep_pk = lambda x: x.replace(".weight", ".alpha") + if peft: # peft + rep_ap = lambda x: x.replace(".weight", ".lora_A.weight") + rep_bp = lambda x: x.replace(".weight", ".lora_B.weight") + rep_pp = lambda x: x.replace(".weight", ".alpha") + + prefix = find_prefix(state_dict, "adaln_single.linear.lora_A.weight") + state_dict = {k[len(prefix):]:v for k,v in state_dict.items()} + else: # OneTrainer + rep_ap = lambda x: x.replace(".", "_")[:-7] + ".lora_down.weight" + rep_bp = lambda x: x.replace(".", "_")[:-7] + ".lora_up.weight" + rep_pp = lambda x: x.replace(".", "_")[:-7] + ".alpha" + + prefix = "lora_transformer_" + t5_marker = "lora_te_encoder" + t5_keys = [] + for key in list(state_dict.keys()): + if key.startswith(prefix): + state_dict[key[len(prefix):]] = state_dict.pop(key) + elif t5_marker in key: + t5_keys.append(state_dict.pop(key)) + if len(t5_keys) > 0: + print(f"Text Encoder not supported for PixArt LoRA, ignoring {len(t5_keys)} keys") + + cmap = [] + cmap_unet = get_conversion_map(state_dict) + conversion_map_ms # todo: 512 model + for k, v in cmap_unet: + if v.endswith(".weight"): + cmap.append((rep_ak(k), rep_ap(v))) + cmap.append((rep_bk(k), rep_bp(v))) + if not peft: + cmap.append((rep_pk(k), rep_pp(v))) + + missing = [k for k,v in cmap if v not in state_dict] + new_state_dict = {k: state_dict[v] for k,v in cmap if k not in missing} + matched = list(v for k,v in cmap if v in state_dict.keys()) + + lora_depth = get_lora_depth(state_dict) + for fp, fk in ((rep_ap, rep_ak),(rep_bp, rep_bk)): + for depth in range(lora_depth): + # Self Attention + key = lambda a: fp(f"transformer_blocks.{depth}.attn1.to_{a}.weight") + new_state_dict[fk(f"blocks.{depth}.attn.qkv.weight")] = torch.cat(( + state_dict[key('q')], state_dict[key('k')], state_dict[key('v')] + ), dim=0) + + matched += [key('q'), key('k'), key('v')] + if not peft: + akey = lambda a: rep_pp(f"transformer_blocks.{depth}.attn1.to_{a}.weight") + new_state_dict[rep_pk((f"blocks.{depth}.attn.qkv.weight"))] = state_dict[akey("q")] + matched += [akey('q'), akey('k'), akey('v')] + + # Self Attention projection? + key = lambda a: fp(f"transformer_blocks.{depth}.attn1.to_{a}.weight") + new_state_dict[fk(f"blocks.{depth}.attn.proj.weight")] = state_dict[key('out.0')] + matched += [key('out.0')] + + # Cross-attention (linear) + key = lambda a: fp(f"transformer_blocks.{depth}.attn2.to_{a}.weight") + new_state_dict[fk(f"blocks.{depth}.cross_attn.q_linear.weight")] = state_dict[key('q')] + new_state_dict[fk(f"blocks.{depth}.cross_attn.kv_linear.weight")] = torch.cat(( + state_dict[key('k')], state_dict[key('v')] + ), dim=0) + matched += [key('q'), key('k'), key('v')] + if not peft: + akey = lambda a: rep_pp(f"transformer_blocks.{depth}.attn2.to_{a}.weight") + new_state_dict[rep_pk((f"blocks.{depth}.cross_attn.q_linear.weight"))] = state_dict[akey("q")] + new_state_dict[rep_pk((f"blocks.{depth}.cross_attn.kv_linear.weight"))] = state_dict[akey("k")] + matched += [akey('q'), akey('k'), akey('v')] + + # Cross Attention projection? + key = lambda a: fp(f"transformer_blocks.{depth}.attn2.to_{a}.weight") + new_state_dict[fk(f"blocks.{depth}.cross_attn.proj.weight")] = state_dict[key('out.0')] + matched += [key('out.0')] + + key = fp(f"transformer_blocks.{depth}.ff.net.0.proj.weight") + new_state_dict[fk(f"blocks.{depth}.mlp.fc1.weight")] = state_dict[key] + matched += [key] + + key = fp(f"transformer_blocks.{depth}.ff.net.2.weight") + new_state_dict[fk(f"blocks.{depth}.mlp.fc2.weight")] = state_dict[key] + matched += [key] + + if len(matched) < len(state_dict): + print(f"PixArt: LoRA conversion has leftover keys! ({len(matched)} vs {len(state_dict)})") + print(list( set(state_dict.keys()) - set(matched) )) + + if len(missing) > 0: + print(f"PixArt: LoRA conversion has missing keys! (probably)") + print(missing) + + return new_state_dict diff --git a/ComfyUI_ExtraModels/PixArt/loader.py b/ComfyUI_ExtraModels/PixArt/loader.py new file mode 100644 index 0000000000000000000000000000000000000000..7fcb16b3405f7e8d7ab21e5ca408dd7300860b9e --- /dev/null +++ b/ComfyUI_ExtraModels/PixArt/loader.py @@ -0,0 +1,181 @@ +import comfy.supported_models_base +import comfy.latent_formats +import comfy.model_patcher +import comfy.model_base +import comfy.utils +import comfy.conds +import torch +import math +from comfy import model_management +from .diffusers_convert import convert_state_dict + +class EXM_PixArt(comfy.supported_models_base.BASE): + unet_config = {} + unet_extra_config = {} + latent_format = comfy.latent_formats.SD15 + + def __init__(self, model_conf): + self.model_target = model_conf.get("target") + self.unet_config = model_conf.get("unet_config", {}) + self.sampling_settings = model_conf.get("sampling_settings", {}) + self.latent_format = self.latent_format() + # UNET is handled by extension + self.unet_config["disable_unet_model_creation"] = True + + def model_type(self, state_dict, prefix=""): + return comfy.model_base.ModelType.EPS + +class EXM_PixArt_Model(comfy.model_base.BaseModel): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + def extra_conds(self, **kwargs): + out = super().extra_conds(**kwargs) + + img_hw = kwargs.get("img_hw", None) + if img_hw is not None: + out["img_hw"] = comfy.conds.CONDRegular(torch.tensor(img_hw)) + + aspect_ratio = kwargs.get("aspect_ratio", None) + if aspect_ratio is not None: + out["aspect_ratio"] = comfy.conds.CONDRegular(torch.tensor(aspect_ratio)) + + cn_hint = kwargs.get("cn_hint", None) + if cn_hint is not None: + out["cn_hint"] = comfy.conds.CONDRegular(cn_hint) + + return out + +def load_pixart(model_path, model_conf=None): + state_dict = comfy.utils.load_torch_file(model_path) + state_dict = state_dict.get("model", state_dict) + + # prefix + for prefix in ["model.diffusion_model.",]: + if any(True for x in state_dict if x.startswith(prefix)): + state_dict = {k[len(prefix):]:v for k,v in state_dict.items()} + + # diffusers + if "adaln_single.linear.weight" in state_dict: + state_dict = convert_state_dict(state_dict) # Diffusers + + # guess auto config + if model_conf is None: + model_conf = guess_pixart_config(state_dict) + + parameters = comfy.utils.calculate_parameters(state_dict) + unet_dtype = model_management.unet_dtype(model_params=parameters) + load_device = comfy.model_management.get_torch_device() + offload_device = comfy.model_management.unet_offload_device() + + # ignore fp8/etc and use directly for now + manual_cast_dtype = model_management.unet_manual_cast(unet_dtype, load_device) + if manual_cast_dtype: + print(f"PixArt: falling back to {manual_cast_dtype}") + unet_dtype = manual_cast_dtype + + model_conf = EXM_PixArt(model_conf) # convert to object + model = EXM_PixArt_Model( # same as comfy.model_base.BaseModel + model_conf, + model_type=comfy.model_base.ModelType.EPS, + device=model_management.get_torch_device() + ) + + if model_conf.model_target == "PixArtMS": + from .models.PixArtMS import PixArtMS + model.diffusion_model = PixArtMS(**model_conf.unet_config) + elif model_conf.model_target == "PixArt": + from .models.PixArt import PixArt + model.diffusion_model = PixArt(**model_conf.unet_config) + elif model_conf.model_target == "PixArtMSSigma": + from .models.PixArtMS import PixArtMS + model.diffusion_model = PixArtMS(**model_conf.unet_config) + model.latent_format = comfy.latent_formats.SDXL() + elif model_conf.model_target == "ControlPixArtMSHalf": + from .models.PixArtMS import PixArtMS + from .models.pixart_controlnet import ControlPixArtMSHalf + model.diffusion_model = PixArtMS(**model_conf.unet_config) + model.diffusion_model = ControlPixArtMSHalf(model.diffusion_model) + elif model_conf.model_target == "ControlPixArtHalf": + from .models.PixArt import PixArt + from .models.pixart_controlnet import ControlPixArtHalf + model.diffusion_model = PixArt(**model_conf.unet_config) + model.diffusion_model = ControlPixArtHalf(model.diffusion_model) + else: + raise NotImplementedError(f"Unknown model target '{model_conf.model_target}'") + + m, u = model.diffusion_model.load_state_dict(state_dict, strict=False) + if len(m) > 0: print("Missing UNET keys", m) + if len(u) > 0: print("Leftover UNET keys", u) + model.diffusion_model.dtype = unet_dtype + model.diffusion_model.eval() + model.diffusion_model.to(unet_dtype) + + model_patcher = comfy.model_patcher.ModelPatcher( + model, + load_device = load_device, + offload_device = offload_device, + current_device = "cpu", + ) + return model_patcher + +def guess_pixart_config(sd): + """ + Guess config based on converted state dict. + """ + # Shared settings based on DiT_XL_2 - could be enumerated + config = { + "num_heads" : 16, # get from attention + "patch_size" : 2, # final layer I guess? + "hidden_size" : 1152, # pos_embed.shape[2] + } + config["depth"] = sum([key.endswith(".attn.proj.weight") for key in sd.keys()]) or 28 + + try: + # this is not present in the diffusers version for sigma? + config["model_max_length"] = sd["y_embedder.y_embedding"].shape[0] + except KeyError: + # need better logic to guess this + config["model_max_length"] = 300 + + if "pos_embed" in sd: + config["input_size"] = int(math.sqrt(sd["pos_embed"].shape[1])) * config["patch_size"] + config["pe_interpolation"] = config["input_size"] // (512//8) # dumb guess + + target_arch = "PixArtMS" + if config["model_max_length"] == 300: + # Sigma + target_arch = "PixArtMSSigma" + config["micro_condition"] = False + if "input_size" not in config: + # The diffusers weights for 1K/2K are exactly the same...? + # replace patch embed logic with HyDiT? + print(f"PixArt: diffusers weights - 2K model will be broken, use manual loading!") + config["input_size"] = 1024//8 + else: + # Alpha + if "csize_embedder.mlp.0.weight" in sd: + # MS (microconds) + target_arch = "PixArtMS" + config["micro_condition"] = True + if "input_size" not in config: + config["input_size"] = 1024//8 + config["pe_interpolation"] = 2 + else: + # PixArt + target_arch = "PixArt" + if "input_size" not in config: + config["input_size"] = 512//8 + config["pe_interpolation"] = 1 + + print("PixArt guessed config:", target_arch, config) + return { + "target": target_arch, + "unet_config": config, + "sampling_settings": { + "beta_schedule" : "sqrt_linear", + "linear_start" : 0.0001, + "linear_end" : 0.02, + "timesteps" : 1000, + } + } diff --git a/ComfyUI_ExtraModels/PixArt/lora.py b/ComfyUI_ExtraModels/PixArt/lora.py new file mode 100644 index 0000000000000000000000000000000000000000..71b07908ea32afd4d83170f138e6df64ba7021c3 --- /dev/null +++ b/ComfyUI_ExtraModels/PixArt/lora.py @@ -0,0 +1,147 @@ +import os +import copy +import json +import torch +import comfy.lora +import comfy.model_management +from comfy.model_patcher import ModelPatcher +from .diffusers_convert import convert_lora_state_dict + +class EXM_PixArt_ModelPatcher(ModelPatcher): + def calculate_weight(self, patches, weight, key): + """ + This is almost the same as the comfy function, but stripped down to just the LoRA patch code. + The problem with the original code is the q/k/v keys being combined into one for the attention. + In the diffusers code, they're treated as separate keys, but in the reference code they're recombined (q+kv|qkv). + This means, for example, that the [1152,1152] weights become [3456,1152] in the state dict. + The issue with this is that the LoRA weights are [128,1152],[1152,128] and become [384,1162],[3456,128] instead. + + This is the best thing I could think of that would fix that, but it's very fragile. + - Check key shape to determine if it needs the fallback logic + - Cut the input into parts based on the shape (undoing the torch.cat) + - Do the matrix multiplication logic + - Recombine them to match the expected shape + """ + for p in patches: + alpha = p[0] + v = p[1] + strength_model = p[2] + if strength_model != 1.0: + weight *= strength_model + + if isinstance(v, list): + v = (self.calculate_weight(v[1:], v[0].clone(), key), ) + + if len(v) == 2: + patch_type = v[0] + v = v[1] + + if patch_type == "lora": + mat1 = comfy.model_management.cast_to_device(v[0], weight.device, torch.float32) + mat2 = comfy.model_management.cast_to_device(v[1], weight.device, torch.float32) + if v[2] is not None: + alpha *= v[2] / mat2.shape[0] + try: + mat1 = mat1.flatten(start_dim=1) + mat2 = mat2.flatten(start_dim=1) + + ch1 = mat1.shape[0] // mat2.shape[1] + ch2 = mat2.shape[0] // mat1.shape[1] + ### Fallback logic for shape mismatch ### + if mat1.shape[0] != mat2.shape[1] and ch1 == ch2 and (mat1.shape[0]/mat2.shape[1])%1 == 0: + mat1 = mat1.chunk(ch1, dim=0) + mat2 = mat2.chunk(ch1, dim=0) + weight += torch.cat( + [alpha * torch.mm(mat1[x], mat2[x]) for x in range(ch1)], + dim=0, + ).reshape(weight.shape).type(weight.dtype) + else: + weight += (alpha * torch.mm(mat1, mat2)).reshape(weight.shape).type(weight.dtype) + except Exception as e: + print("ERROR", key, e) + return weight + + def clone(self): + n = EXM_PixArt_ModelPatcher(self.model, self.load_device, self.offload_device, self.size, self.current_device, weight_inplace_update=self.weight_inplace_update) + n.patches = {} + for k in self.patches: + n.patches[k] = self.patches[k][:] + + n.object_patches = self.object_patches.copy() + n.model_options = copy.deepcopy(self.model_options) + n.model_keys = self.model_keys + return n + +def replace_model_patcher(model): + n = EXM_PixArt_ModelPatcher( + model = model.model, + size = model.size, + load_device = model.load_device, + offload_device = model.offload_device, + current_device = model.current_device, + weight_inplace_update = model.weight_inplace_update, + ) + n.patches = {} + for k in model.patches: + n.patches[k] = model.patches[k][:] + + n.object_patches = model.object_patches.copy() + n.model_options = copy.deepcopy(model.model_options) + return n + +def find_peft_alpha(path): + def load_json(json_path): + with open(json_path) as f: + data = json.load(f) + alpha = data.get("lora_alpha") + alpha = alpha or data.get("alpha") + if not alpha: + print(" Found config but `lora_alpha` is missing!") + else: + print(f" Found config at {json_path} [alpha:{alpha}]") + return alpha + + # For some weird reason peft doesn't include the alpha in the actual model + print("PixArt: Warning! This is a PEFT LoRA. Trying to find config...") + files = [ + f"{os.path.splitext(path)[0]}.json", + f"{os.path.splitext(path)[0]}.config.json", + os.path.join(os.path.dirname(path),"adapter_config.json"), + ] + for file in files: + if os.path.isfile(file): + return load_json(file) + + print(" Missing config/alpha! assuming alpha of 8. Consider converting it/adding a config json to it.") + return 8.0 + +def load_pixart_lora(model, lora, lora_path, strength): + k_back = lambda x: x.replace(".lora_up.weight", "") + # need to convert the actual weights for this to work. + if any(True for x in lora.keys() if x.endswith("adaln_single.linear.lora_A.weight")): + lora = convert_lora_state_dict(lora, peft=True) + alpha = find_peft_alpha(lora_path) + lora.update({f"{k_back(x)}.alpha":torch.tensor(alpha) for x in lora.keys() if "lora_up" in x}) + else: # OneTrainer + lora = convert_lora_state_dict(lora, peft=False) + + key_map = {k_back(x):f"diffusion_model.{k_back(x)}.weight" for x in lora.keys() if "lora_up" in x} # fake + + loaded = comfy.lora.load_lora(lora, key_map) + if model is not None: + # switch to custom model patcher when using LoRAs + if isinstance(model, EXM_PixArt_ModelPatcher): + new_modelpatcher = model.clone() + else: + new_modelpatcher = replace_model_patcher(model) + k = new_modelpatcher.add_patches(loaded, strength) + else: + k = () + new_modelpatcher = None + + k = set(k) + for x in loaded: + if (x not in k): + print("NOT LOADED", x) + + return new_modelpatcher diff --git a/ComfyUI_ExtraModels/PixArt/models/PixArt.py b/ComfyUI_ExtraModels/PixArt/models/PixArt.py new file mode 100644 index 0000000000000000000000000000000000000000..4d6cf93be017348dcafc42754552ca132112bd9d --- /dev/null +++ b/ComfyUI_ExtraModels/PixArt/models/PixArt.py @@ -0,0 +1,250 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. + +# This source code is licensed under the license found in the +# LICENSE file in the root directory of this source tree. +# -------------------------------------------------------- +# References: +# GLIDE: https://github.com/openai/glide-text2im +# MAE: https://github.com/facebookresearch/mae/blob/main/models_mae.py +# -------------------------------------------------------- +import math +import torch +import torch.nn as nn +import os +import numpy as np +from timm.models.layers import DropPath +from timm.models.vision_transformer import PatchEmbed, Mlp + + +from .utils import auto_grad_checkpoint, to_2tuple +from .PixArt_blocks import t2i_modulate, CaptionEmbedder, AttentionKVCompress, MultiHeadCrossAttention, T2IFinalLayer, TimestepEmbedder, LabelEmbedder, FinalLayer + + +class PixArtBlock(nn.Module): + """ + A PixArt block with adaptive layer norm (adaLN-single) conditioning. + """ + def __init__(self, hidden_size, num_heads, mlp_ratio=4.0, drop_path=0, input_size=None, sampling=None, sr_ratio=1, qk_norm=False, **block_kwargs): + super().__init__() + self.norm1 = nn.LayerNorm(hidden_size, elementwise_affine=False, eps=1e-6) + self.attn = AttentionKVCompress( + hidden_size, num_heads=num_heads, qkv_bias=True, sampling=sampling, sr_ratio=sr_ratio, + qk_norm=qk_norm, **block_kwargs + ) + self.cross_attn = MultiHeadCrossAttention(hidden_size, num_heads, **block_kwargs) + self.norm2 = nn.LayerNorm(hidden_size, elementwise_affine=False, eps=1e-6) + # to be compatible with lower version pytorch + approx_gelu = lambda: nn.GELU(approximate="tanh") + self.mlp = Mlp(in_features=hidden_size, hidden_features=int(hidden_size * mlp_ratio), act_layer=approx_gelu, drop=0) + self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity() + self.scale_shift_table = nn.Parameter(torch.randn(6, hidden_size) / hidden_size ** 0.5) + self.sampling = sampling + self.sr_ratio = sr_ratio + + def forward(self, x, y, t, mask=None, **kwargs): + B, N, C = x.shape + + shift_msa, scale_msa, gate_msa, shift_mlp, scale_mlp, gate_mlp = (self.scale_shift_table[None] + t.reshape(B, 6, -1)).chunk(6, dim=1) + x = x + self.drop_path(gate_msa * self.attn(t2i_modulate(self.norm1(x), shift_msa, scale_msa)).reshape(B, N, C)) + x = x + self.cross_attn(x, y, mask) + x = x + self.drop_path(gate_mlp * self.mlp(t2i_modulate(self.norm2(x), shift_mlp, scale_mlp))) + + return x + + +### Core PixArt Model ### +class PixArt(nn.Module): + """ + Diffusion model with a Transformer backbone. + """ + def __init__( + self, + input_size=32, + patch_size=2, + in_channels=4, + hidden_size=1152, + depth=28, + num_heads=16, + mlp_ratio=4.0, + class_dropout_prob=0.1, + pred_sigma=True, + drop_path: float = 0., + caption_channels=4096, + pe_interpolation=1.0, + pe_precision=None, + config=None, + model_max_length=120, + qk_norm=False, + kv_compress_config=None, + **kwargs, + ): + super().__init__() + self.pred_sigma = pred_sigma + self.in_channels = in_channels + self.out_channels = in_channels * 2 if pred_sigma else in_channels + self.patch_size = patch_size + self.num_heads = num_heads + self.pe_interpolation = pe_interpolation + self.pe_precision = pe_precision + self.depth = depth + + self.x_embedder = PatchEmbed(input_size, patch_size, in_channels, hidden_size, bias=True) + self.t_embedder = TimestepEmbedder(hidden_size) + num_patches = self.x_embedder.num_patches + self.base_size = input_size // self.patch_size + # Will use fixed sin-cos embedding: + self.register_buffer("pos_embed", torch.zeros(1, num_patches, hidden_size)) + + approx_gelu = lambda: nn.GELU(approximate="tanh") + self.t_block = nn.Sequential( + nn.SiLU(), + nn.Linear(hidden_size, 6 * hidden_size, bias=True) + ) + self.y_embedder = CaptionEmbedder( + in_channels=caption_channels, hidden_size=hidden_size, uncond_prob=class_dropout_prob, + act_layer=approx_gelu, token_num=model_max_length + ) + drop_path = [x.item() for x in torch.linspace(0, drop_path, depth)] # stochastic depth decay rule + self.kv_compress_config = kv_compress_config + if kv_compress_config is None: + self.kv_compress_config = { + 'sampling': None, + 'scale_factor': 1, + 'kv_compress_layer': [], + } + self.blocks = nn.ModuleList([ + PixArtBlock( + hidden_size, num_heads, mlp_ratio=mlp_ratio, drop_path=drop_path[i], + input_size=(input_size // patch_size, input_size // patch_size), + sampling=self.kv_compress_config['sampling'], + sr_ratio=int( + self.kv_compress_config['scale_factor'] + ) if i in self.kv_compress_config['kv_compress_layer'] else 1, + qk_norm=qk_norm, + ) + for i in range(depth) + ]) + self.final_layer = T2IFinalLayer(hidden_size, patch_size, self.out_channels) + + def forward_raw(self, x, t, y, mask=None, data_info=None): + """ + Original forward pass of PixArt. + x: (N, C, H, W) tensor of spatial inputs (images or latent representations of images) + t: (N,) tensor of diffusion timesteps + y: (N, 1, 120, C) tensor of class labels + """ + x = x.to(self.dtype) + timestep = t.to(self.dtype) + y = y.to(self.dtype) + pos_embed = self.pos_embed.to(self.dtype) + self.h, self.w = x.shape[-2]//self.patch_size, x.shape[-1]//self.patch_size + x = self.x_embedder(x) + pos_embed # (N, T, D), where T = H * W / patch_size ** 2 + t = self.t_embedder(timestep.to(x.dtype)) # (N, D) + t0 = self.t_block(t) + y = self.y_embedder(y, self.training) # (N, 1, L, D) + if mask is not None: + if mask.shape[0] != y.shape[0]: + mask = mask.repeat(y.shape[0] // mask.shape[0], 1) + mask = mask.squeeze(1).squeeze(1) + y = y.squeeze(1).masked_select(mask.unsqueeze(-1) != 0).view(1, -1, x.shape[-1]) + y_lens = mask.sum(dim=1).tolist() + else: + y_lens = [y.shape[2]] * y.shape[0] + y = y.squeeze(1).view(1, -1, x.shape[-1]) + for block in self.blocks: + x = auto_grad_checkpoint(block, x, y, t0, y_lens) # (N, T, D) #support grad checkpoint + x = self.final_layer(x, t) # (N, T, patch_size ** 2 * out_channels) + x = self.unpatchify(x) # (N, out_channels, H, W) + return x + + def forward(self, x, timesteps, context, y=None, **kwargs): + """ + Forward pass that adapts comfy input to original forward function + x: (N, C, H, W) tensor of spatial inputs (images or latent representations of images) + timesteps: (N,) tensor of diffusion timesteps + context: (N, 1, 120, C) conditioning + y: extra conditioning. + """ + ## Still accepts the input w/o that dim but returns garbage + if len(context.shape) == 3: + context = context.unsqueeze(1) + + ## run original forward pass + out = self.forward_raw( + x = x.to(self.dtype), + t = timesteps.to(self.dtype), + y = context.to(self.dtype), + ) + + ## only return EPS + out = out.to(torch.float) + eps, rest = out[:, :self.in_channels], out[:, self.in_channels:] + return eps + + def unpatchify(self, x): + """ + x: (N, T, patch_size**2 * C) + imgs: (N, H, W, C) + """ + c = self.out_channels + p = self.x_embedder.patch_size[0] + h = w = int(x.shape[1] ** 0.5) + assert h * w == x.shape[1] + + x = x.reshape(shape=(x.shape[0], h, w, p, p, c)) + x = torch.einsum('nhwpqc->nchpwq', x) + imgs = x.reshape(shape=(x.shape[0], c, h * p, h * p)) + return imgs + + +def get_2d_sincos_pos_embed(embed_dim, grid_size, cls_token=False, extra_tokens=0, pe_interpolation=1.0, base_size=16): + """ + grid_size: int of the grid height and width + return: + pos_embed: [grid_size*grid_size, embed_dim] or [1+grid_size*grid_size, embed_dim] (w/ or w/o cls_token) + """ + if isinstance(grid_size, int): + grid_size = to_2tuple(grid_size) + grid_h = np.arange(grid_size[0], dtype=np.float32) / (grid_size[0]/base_size) / pe_interpolation + grid_w = np.arange(grid_size[1], dtype=np.float32) / (grid_size[1]/base_size) / pe_interpolation + grid = np.meshgrid(grid_w, grid_h) # here w goes first + grid = np.stack(grid, axis=0) + grid = grid.reshape([2, 1, grid_size[1], grid_size[0]]) + + pos_embed = get_2d_sincos_pos_embed_from_grid(embed_dim, grid) + if cls_token and extra_tokens > 0: + pos_embed = np.concatenate([np.zeros([extra_tokens, embed_dim]), pos_embed], axis=0) + return pos_embed.astype(np.float32) + + +def get_2d_sincos_pos_embed_from_grid(embed_dim, grid): + assert embed_dim % 2 == 0 + + # use half of dimensions to encode grid_h + emb_h = get_1d_sincos_pos_embed_from_grid(embed_dim // 2, grid[0]) # (H*W, D/2) + emb_w = get_1d_sincos_pos_embed_from_grid(embed_dim // 2, grid[1]) # (H*W, D/2) + + emb = np.concatenate([emb_h, emb_w], axis=1) # (H*W, D) + return emb + + +def get_1d_sincos_pos_embed_from_grid(embed_dim, pos): + """ + embed_dim: output dimension for each position + pos: a list of positions to be encoded: size (M,) + out: (M, D) + """ + assert embed_dim % 2 == 0 + omega = np.arange(embed_dim // 2, dtype=np.float64) + omega /= embed_dim / 2. + omega = 1. / 10000 ** omega # (D/2,) + + pos = pos.reshape(-1) # (M,) + out = np.einsum('m,d->md', pos, omega) # (M, D/2), outer product + + emb_sin = np.sin(out) # (M, D/2) + emb_cos = np.cos(out) # (M, D/2) + + emb = np.concatenate([emb_sin, emb_cos], axis=1) # (M, D) + return emb diff --git a/ComfyUI_ExtraModels/PixArt/models/PixArtMS.py b/ComfyUI_ExtraModels/PixArt/models/PixArtMS.py new file mode 100644 index 0000000000000000000000000000000000000000..34ada90ed128a22ad1050a7fe14a286e7bae4051 --- /dev/null +++ b/ComfyUI_ExtraModels/PixArt/models/PixArtMS.py @@ -0,0 +1,273 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. + +# This source code is licensed under the license found in the +# LICENSE file in the root directory of this source tree. +# -------------------------------------------------------- +# References: +# GLIDE: https://github.com/openai/glide-text2im +# MAE: https://github.com/facebookresearch/mae/blob/main/models_mae.py +# -------------------------------------------------------- +import torch +import torch.nn as nn +from tqdm import tqdm +from timm.models.layers import DropPath +from timm.models.vision_transformer import Mlp + +from .utils import auto_grad_checkpoint, to_2tuple +from .PixArt_blocks import t2i_modulate, CaptionEmbedder, AttentionKVCompress, MultiHeadCrossAttention, T2IFinalLayer, TimestepEmbedder, SizeEmbedder +from .PixArt import PixArt, get_2d_sincos_pos_embed + + +class PatchEmbed(nn.Module): + """ + 2D Image to Patch Embedding + """ + def __init__( + self, + patch_size=16, + in_chans=3, + embed_dim=768, + norm_layer=None, + flatten=True, + bias=True, + ): + super().__init__() + patch_size = to_2tuple(patch_size) + self.patch_size = patch_size + self.flatten = flatten + self.proj = nn.Conv2d(in_chans, embed_dim, kernel_size=patch_size, stride=patch_size, bias=bias) + self.norm = norm_layer(embed_dim) if norm_layer else nn.Identity() + + def forward(self, x): + x = self.proj(x) + if self.flatten: + x = x.flatten(2).transpose(1, 2) # BCHW -> BNC + x = self.norm(x) + return x + + +class PixArtMSBlock(nn.Module): + """ + A PixArt block with adaptive layer norm zero (adaLN-Zero) conditioning. + """ + def __init__(self, hidden_size, num_heads, mlp_ratio=4.0, drop_path=0., input_size=None, + sampling=None, sr_ratio=1, qk_norm=False, **block_kwargs): + super().__init__() + self.hidden_size = hidden_size + self.norm1 = nn.LayerNorm(hidden_size, elementwise_affine=False, eps=1e-6) + self.attn = AttentionKVCompress( + hidden_size, num_heads=num_heads, qkv_bias=True, sampling=sampling, sr_ratio=sr_ratio, + qk_norm=qk_norm, **block_kwargs + ) + self.cross_attn = MultiHeadCrossAttention(hidden_size, num_heads, **block_kwargs) + self.norm2 = nn.LayerNorm(hidden_size, elementwise_affine=False, eps=1e-6) + # to be compatible with lower version pytorch + approx_gelu = lambda: nn.GELU(approximate="tanh") + self.mlp = Mlp(in_features=hidden_size, hidden_features=int(hidden_size * mlp_ratio), act_layer=approx_gelu, drop=0) + self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity() + self.scale_shift_table = nn.Parameter(torch.randn(6, hidden_size) / hidden_size ** 0.5) + + def forward(self, x, y, t, mask=None, HW=None, **kwargs): + B, N, C = x.shape + + shift_msa, scale_msa, gate_msa, shift_mlp, scale_mlp, gate_mlp = (self.scale_shift_table[None] + t.reshape(B, 6, -1)).chunk(6, dim=1) + x = x + self.drop_path(gate_msa * self.attn(t2i_modulate(self.norm1(x), shift_msa, scale_msa), HW=HW)) + x = x + self.cross_attn(x, y, mask) + x = x + self.drop_path(gate_mlp * self.mlp(t2i_modulate(self.norm2(x), shift_mlp, scale_mlp))) + + return x + + +### Core PixArt Model ### +class PixArtMS(PixArt): + """ + Diffusion model with a Transformer backbone. + """ + def __init__( + self, + input_size=32, + patch_size=2, + in_channels=4, + hidden_size=1152, + depth=28, + num_heads=16, + mlp_ratio=4.0, + class_dropout_prob=0.1, + learn_sigma=True, + pred_sigma=True, + drop_path: float = 0., + caption_channels=4096, + pe_interpolation=None, + pe_precision=None, + config=None, + model_max_length=120, + micro_condition=True, + qk_norm=False, + kv_compress_config=None, + **kwargs, + ): + super().__init__( + input_size=input_size, + patch_size=patch_size, + in_channels=in_channels, + hidden_size=hidden_size, + depth=depth, + num_heads=num_heads, + mlp_ratio=mlp_ratio, + class_dropout_prob=class_dropout_prob, + learn_sigma=learn_sigma, + pred_sigma=pred_sigma, + drop_path=drop_path, + pe_interpolation=pe_interpolation, + config=config, + model_max_length=model_max_length, + qk_norm=qk_norm, + kv_compress_config=kv_compress_config, + **kwargs, + ) + self.dtype = torch.get_default_dtype() + self.h = self.w = 0 + approx_gelu = lambda: nn.GELU(approximate="tanh") + self.t_block = nn.Sequential( + nn.SiLU(), + nn.Linear(hidden_size, 6 * hidden_size, bias=True) + ) + self.x_embedder = PatchEmbed(patch_size, in_channels, hidden_size, bias=True) + self.y_embedder = CaptionEmbedder(in_channels=caption_channels, hidden_size=hidden_size, uncond_prob=class_dropout_prob, act_layer=approx_gelu, token_num=model_max_length) + self.micro_conditioning = micro_condition + if self.micro_conditioning: + self.csize_embedder = SizeEmbedder(hidden_size//3) # c_size embed + self.ar_embedder = SizeEmbedder(hidden_size//3) # aspect ratio embed + drop_path = [x.item() for x in torch.linspace(0, drop_path, depth)] # stochastic depth decay rule + if kv_compress_config is None: + kv_compress_config = { + 'sampling': None, + 'scale_factor': 1, + 'kv_compress_layer': [], + } + self.blocks = nn.ModuleList([ + PixArtMSBlock( + hidden_size, num_heads, mlp_ratio=mlp_ratio, drop_path=drop_path[i], + input_size=(input_size // patch_size, input_size // patch_size), + sampling=kv_compress_config['sampling'], + sr_ratio=int(kv_compress_config['scale_factor']) if i in kv_compress_config['kv_compress_layer'] else 1, + qk_norm=qk_norm, + ) + for i in range(depth) + ]) + self.final_layer = T2IFinalLayer(hidden_size, patch_size, self.out_channels) + + def forward_raw(self, x, t, y, mask=None, data_info=None, **kwargs): + """ + Original forward pass of PixArt. + x: (N, C, H, W) tensor of spatial inputs (images or latent representations of images) + t: (N,) tensor of diffusion timesteps + y: (N, 1, 120, C) tensor of class labels + """ + bs = x.shape[0] + x = x.to(self.dtype) + timestep = t.to(self.dtype) + y = y.to(self.dtype) + + pe_interpolation = self.pe_interpolation + if pe_interpolation is None or self.pe_precision is not None: + # calculate pe_interpolation on-the-fly + pe_interpolation = round((x.shape[-1]+x.shape[-2])/2.0 / (512/8.0), self.pe_precision or 0) + + self.h, self.w = x.shape[-2]//self.patch_size, x.shape[-1]//self.patch_size + pos_embed = torch.from_numpy( + get_2d_sincos_pos_embed( + self.pos_embed.shape[-1], (self.h, self.w), pe_interpolation=pe_interpolation, + base_size=self.base_size + ) + ).unsqueeze(0).to(device=x.device, dtype=self.dtype) + + x = self.x_embedder(x) + pos_embed # (N, T, D), where T = H * W / patch_size ** 2 + t = self.t_embedder(timestep) # (N, D) + + if self.micro_conditioning: + c_size, ar = data_info['img_hw'].to(self.dtype), data_info['aspect_ratio'].to(self.dtype) + csize = self.csize_embedder(c_size, bs) # (N, D) + ar = self.ar_embedder(ar, bs) # (N, D) + t = t + torch.cat([csize, ar], dim=1) + + t0 = self.t_block(t) + y = self.y_embedder(y, self.training) # (N, D) + + if mask is not None: + if mask.shape[0] != y.shape[0]: + mask = mask.repeat(y.shape[0] // mask.shape[0], 1) + mask = mask.squeeze(1).squeeze(1) + y = y.squeeze(1).masked_select(mask.unsqueeze(-1) != 0).view(1, -1, x.shape[-1]) + y_lens = mask.sum(dim=1).tolist() + else: + y_lens = [y.shape[2]] * y.shape[0] + y = y.squeeze(1).view(1, -1, x.shape[-1]) + for block in self.blocks: + x = auto_grad_checkpoint(block, x, y, t0, y_lens, (self.h, self.w), **kwargs) # (N, T, D) #support grad checkpoint + + x = self.final_layer(x, t) # (N, T, patch_size ** 2 * out_channels) + x = self.unpatchify(x) # (N, out_channels, H, W) + + return x + + def forward(self, x, timesteps, context, img_hw=None, aspect_ratio=None, **kwargs): + """ + Forward pass that adapts comfy input to original forward function + x: (N, C, H, W) tensor of spatial inputs (images or latent representations of images) + timesteps: (N,) tensor of diffusion timesteps + context: (N, 1, 120, C) conditioning + img_hw: height|width conditioning + aspect_ratio: aspect ratio conditioning + """ + ## size/ar from cond with fallback based on the latent image shape. + bs = x.shape[0] + data_info = {} + if img_hw is None: + data_info["img_hw"] = torch.tensor( + [[x.shape[2]*8, x.shape[3]*8]], + dtype=self.dtype, + device=x.device + ).repeat(bs, 1) + else: + data_info["img_hw"] = img_hw.to(dtype=x.dtype, device=x.device) + if aspect_ratio is None or True: + data_info["aspect_ratio"] = torch.tensor( + [[x.shape[2]/x.shape[3]]], + dtype=self.dtype, + device=x.device + ).repeat(bs, 1) + else: + data_info["aspect_ratio"] = aspect_ratio.to(dtype=x.dtype, device=x.device) + + ## Still accepts the input w/o that dim but returns garbage + if len(context.shape) == 3: + context = context.unsqueeze(1) + + ## run original forward pass + out = self.forward_raw( + x = x.to(self.dtype), + t = timesteps.to(self.dtype), + y = context.to(self.dtype), + data_info=data_info, + ) + + ## only return EPS + out = out.to(torch.float) + eps, rest = out[:, :self.in_channels], out[:, self.in_channels:] + return eps + + def unpatchify(self, x): + """ + x: (N, T, patch_size**2 * C) + imgs: (N, H, W, C) + """ + c = self.out_channels + p = self.x_embedder.patch_size[0] + assert self.h * self.w == x.shape[1] + + x = x.reshape(shape=(x.shape[0], self.h, self.w, p, p, c)) + x = torch.einsum('nhwpqc->nchpwq', x) + imgs = x.reshape(shape=(x.shape[0], c, self.h * p, self.w * p)) + return imgs diff --git a/ComfyUI_ExtraModels/PixArt/models/PixArt_blocks.py b/ComfyUI_ExtraModels/PixArt/models/PixArt_blocks.py new file mode 100644 index 0000000000000000000000000000000000000000..df2eae08cca5e56b1385be8119c3d143c8e7b630 --- /dev/null +++ b/ComfyUI_ExtraModels/PixArt/models/PixArt_blocks.py @@ -0,0 +1,501 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. + +# This source code is licensed under the license found in the +# LICENSE file in the root directory of this source tree. +# -------------------------------------------------------- +# References: +# GLIDE: https://github.com/openai/glide-text2im +# MAE: https://github.com/facebookresearch/mae/blob/main/models_mae.py +# -------------------------------------------------------- +import math +import torch +import torch.nn as nn +import torch.nn.functional as F +from timm.models.vision_transformer import Mlp, Attention as Attention_ +from einops import rearrange + +sdpa_32b = None +Q_4GB_LIMIT = 32000000 +"""If q is greater than this, the operation will likely require >4GB VRAM, which will fail on Intel Arc Alchemist GPUs without a workaround.""" +# 2k = 37 748 736 +# 1024 = 9 437 184 +# 2k model goes very slightly over 4GB + +from comfy import model_management +if model_management.xformers_enabled(): + import xformers + import xformers.ops +else: + if model_management.xpu_available: + import intel_extension_for_pytorch as ipex + import os + if not torch.xpu.has_fp64_dtype() and not os.environ.get('IPEX_FORCE_ATTENTION_SLICE', None): + from ...utils.IPEX.attention import scaled_dot_product_attention_32_bit + sdpa_32b = scaled_dot_product_attention_32_bit + print("Using IPEX 4GB SDPA workaround") + else: + print("No IPEX 4GB workaround") + +def modulate(x, shift, scale): + return x * (1 + scale.unsqueeze(1)) + shift.unsqueeze(1) + +def t2i_modulate(x, shift, scale): + return x * (1 + scale) + shift + +class MultiHeadCrossAttention(nn.Module): + def __init__(self, d_model, num_heads, attn_drop=0., proj_drop=0., **block_kwargs): + super(MultiHeadCrossAttention, self).__init__() + assert d_model % num_heads == 0, "d_model must be divisible by num_heads" + + self.d_model = d_model + self.num_heads = num_heads + self.head_dim = d_model // num_heads + + self.q_linear = nn.Linear(d_model, d_model) + self.kv_linear = nn.Linear(d_model, d_model*2) + self.attn_drop = nn.Dropout(attn_drop) + self.proj = nn.Linear(d_model, d_model) + self.proj_drop = nn.Dropout(proj_drop) + + def forward(self, x, cond, mask=None): + # query/value: img tokens; key: condition; mask: if padding tokens + B, N, C = x.shape + + q = self.q_linear(x).view(1, -1, self.num_heads, self.head_dim) + kv = self.kv_linear(cond).view(1, -1, 2, self.num_heads, self.head_dim) + k, v = kv.unbind(2) + + if model_management.xformers_enabled(): + attn_bias = None + if mask is not None: + attn_bias = xformers.ops.fmha.BlockDiagonalMask.from_seqlens([N] * B, mask) + x = xformers.ops.memory_efficient_attention( + q, k, v, + p=self.attn_drop.p, + attn_bias=attn_bias + ) + else: + q, k, v = map(lambda t: t.permute(0, 2, 1, 3),(q, k, v),) + attn_mask = None + if mask is not None and len(mask) > 1: + + # Create equivalent of xformer diagonal block mask, still only correct for square masks + # But depth doesn't matter as tensors can expand in that dimension + attn_mask_template = torch.ones( + [q.shape[2] // B, mask[0]], + dtype=torch.bool, + device=q.device + ) + attn_mask = torch.block_diag(attn_mask_template) + + # create a mask on the diagonal for each mask in the batch + for n in range(B - 1): + attn_mask = torch.block_diag(attn_mask, attn_mask_template) + + p = getattr(self.attn_drop, "p", 0) # IPEX.optimize() will turn attn_drop into an Identity() + + if sdpa_32b is not None and (q.element_size() * q.nelement()) > Q_4GB_LIMIT: + sdpa = sdpa_32b + else: + sdpa = torch.nn.functional.scaled_dot_product_attention + + x = sdpa( + q, k, v, + attn_mask=attn_mask, + dropout_p=p + ).permute(0, 2, 1, 3).contiguous() + x = x.view(B, -1, C) + x = self.proj(x) + x = self.proj_drop(x) + return x + + +class AttentionKVCompress(Attention_): + """Multi-head Attention block with KV token compression and qk norm.""" + + def __init__( + self, + dim, + num_heads=8, + qkv_bias=True, + sampling='conv', + sr_ratio=1, + qk_norm=False, + **block_kwargs, + ): + """ + Args: + dim (int): Number of input channels. + num_heads (int): Number of attention heads. + qkv_bias (bool: If True, add a learnable bias to query, key, value. + """ + super().__init__(dim, num_heads=num_heads, qkv_bias=qkv_bias, **block_kwargs) + + self.sampling=sampling # ['conv', 'ave', 'uniform', 'uniform_every'] + self.sr_ratio = sr_ratio + if sr_ratio > 1 and sampling == 'conv': + # Avg Conv Init. + self.sr = nn.Conv2d(dim, dim, groups=dim, kernel_size=sr_ratio, stride=sr_ratio) + self.sr.weight.data.fill_(1/sr_ratio**2) + self.sr.bias.data.zero_() + self.norm = nn.LayerNorm(dim) + if qk_norm: + self.q_norm = nn.LayerNorm(dim) + self.k_norm = nn.LayerNorm(dim) + else: + self.q_norm = nn.Identity() + self.k_norm = nn.Identity() + + def downsample_2d(self, tensor, H, W, scale_factor, sampling=None): + if sampling is None or scale_factor == 1: + return tensor + B, N, C = tensor.shape + + if sampling == 'uniform_every': + return tensor[:, ::scale_factor], int(N // scale_factor) + + tensor = tensor.reshape(B, H, W, C).permute(0, 3, 1, 2) + new_H, new_W = int(H / scale_factor), int(W / scale_factor) + new_N = new_H * new_W + + if sampling == 'ave': + tensor = F.interpolate( + tensor, scale_factor=1 / scale_factor, mode='nearest' + ).permute(0, 2, 3, 1) + elif sampling == 'uniform': + tensor = tensor[:, :, ::scale_factor, ::scale_factor].permute(0, 2, 3, 1) + elif sampling == 'conv': + tensor = self.sr(tensor).reshape(B, C, -1).permute(0, 2, 1) + tensor = self.norm(tensor) + else: + raise ValueError + + return tensor.reshape(B, new_N, C).contiguous(), new_N + + def forward(self, x, mask=None, HW=None, block_id=None): + B, N, C = x.shape # 2 4096 1152 + new_N = N + if HW is None: + H = W = int(N ** 0.5) + else: + H, W = HW + qkv = self.qkv(x).reshape(B, N, 3, C) + + q, k, v = qkv.unbind(2) + dtype = q.dtype + q = self.q_norm(q) + k = self.k_norm(k) + + # KV compression + if self.sr_ratio > 1: + k, new_N = self.downsample_2d(k, H, W, self.sr_ratio, sampling=self.sampling) + v, new_N = self.downsample_2d(v, H, W, self.sr_ratio, sampling=self.sampling) + + q = q.reshape(B, N, self.num_heads, C // self.num_heads).to(dtype) + k = k.reshape(B, new_N, self.num_heads, C // self.num_heads).to(dtype) + v = v.reshape(B, new_N, self.num_heads, C // self.num_heads).to(dtype) + + attn_bias = None + if mask is not None: + attn_bias = torch.zeros([B * self.num_heads, q.shape[1], k.shape[1]], dtype=q.dtype, device=q.device) + attn_bias.masked_fill_(mask.squeeze(1).repeat(self.num_heads, 1, 1) == 0, float('-inf')) + # Switch between torch / xformers attention + if model_management.xformers_enabled(): + x = xformers.ops.memory_efficient_attention( + q, k, v, + p=self.attn_drop.p, + attn_bias=attn_bias + ) + else: + q, k, v = map(lambda t: t.transpose(1, 2),(q, k, v),) + + p = getattr(self.attn_drop, "p", 0) # IPEX.optimize() will turn attn_drop into an Identity() + + if sdpa_32b is not None and (q.element_size() * q.nelement()) > Q_4GB_LIMIT: + sdpa = sdpa_32b + else: + sdpa = torch.nn.functional.scaled_dot_product_attention + + x = sdpa( + q, k, v, + dropout_p=p, + attn_mask=attn_bias + ).transpose(1, 2).contiguous() + x = x.view(B, N, C) + x = self.proj(x) + x = self.proj_drop(x) + return x + + +################################################################################# +# AMP attention with fp32 softmax to fix loss NaN problem during training # +################################################################################# +class Attention(Attention_): + def forward(self, x): + B, N, C = x.shape + qkv = self.qkv(x).reshape(B, N, 3, self.num_heads, C // self.num_heads).permute(2, 0, 3, 1, 4) + q, k, v = qkv.unbind(0) # make torchscript happy (cannot use tensor as tuple) + use_fp32_attention = getattr(self, 'fp32_attention', False) + if use_fp32_attention: + q, k = q.float(), k.float() + with torch.cuda.amp.autocast(enabled=not use_fp32_attention): + attn = (q @ k.transpose(-2, -1)) * self.scale + attn = attn.softmax(dim=-1) + + attn = self.attn_drop(attn) + + x = (attn @ v).transpose(1, 2).reshape(B, N, C) + x = self.proj(x) + x = self.proj_drop(x) + return x + + +class FinalLayer(nn.Module): + """ + The final layer of PixArt. + """ + + def __init__(self, hidden_size, patch_size, out_channels): + super().__init__() + self.norm_final = nn.LayerNorm(hidden_size, elementwise_affine=False, eps=1e-6) + self.linear = nn.Linear(hidden_size, patch_size * patch_size * out_channels, bias=True) + self.adaLN_modulation = nn.Sequential( + nn.SiLU(), + nn.Linear(hidden_size, 2 * hidden_size, bias=True) + ) + + def forward(self, x, c): + shift, scale = self.adaLN_modulation(c).chunk(2, dim=1) + x = modulate(self.norm_final(x), shift, scale) + x = self.linear(x) + return x + + +class T2IFinalLayer(nn.Module): + """ + The final layer of PixArt. + """ + + def __init__(self, hidden_size, patch_size, out_channels): + super().__init__() + self.norm_final = nn.LayerNorm(hidden_size, elementwise_affine=False, eps=1e-6) + self.linear = nn.Linear(hidden_size, patch_size * patch_size * out_channels, bias=True) + self.scale_shift_table = nn.Parameter(torch.randn(2, hidden_size) / hidden_size ** 0.5) + self.out_channels = out_channels + + def forward(self, x, t): + shift, scale = (self.scale_shift_table[None] + t[:, None]).chunk(2, dim=1) + x = t2i_modulate(self.norm_final(x), shift, scale) + x = self.linear(x) + return x + + +class MaskFinalLayer(nn.Module): + """ + The final layer of PixArt. + """ + + def __init__(self, final_hidden_size, c_emb_size, patch_size, out_channels): + super().__init__() + self.norm_final = nn.LayerNorm(final_hidden_size, elementwise_affine=False, eps=1e-6) + self.linear = nn.Linear(final_hidden_size, patch_size * patch_size * out_channels, bias=True) + self.adaLN_modulation = nn.Sequential( + nn.SiLU(), + nn.Linear(c_emb_size, 2 * final_hidden_size, bias=True) + ) + def forward(self, x, t): + shift, scale = self.adaLN_modulation(t).chunk(2, dim=1) + x = modulate(self.norm_final(x), shift, scale) + x = self.linear(x) + return x + + +class DecoderLayer(nn.Module): + """ + The final layer of PixArt. + """ + + def __init__(self, hidden_size, decoder_hidden_size): + super().__init__() + self.norm_decoder = nn.LayerNorm(hidden_size, elementwise_affine=False, eps=1e-6) + self.linear = nn.Linear(hidden_size, decoder_hidden_size, bias=True) + self.adaLN_modulation = nn.Sequential( + nn.SiLU(), + nn.Linear(hidden_size, 2 * hidden_size, bias=True) + ) + def forward(self, x, t): + shift, scale = self.adaLN_modulation(t).chunk(2, dim=1) + x = modulate(self.norm_decoder(x), shift, scale) + x = self.linear(x) + return x + + +################################################################################# +# Embedding Layers for Timesteps and Class Labels # +################################################################################# +class TimestepEmbedder(nn.Module): + """ + Embeds scalar timesteps into vector representations. + """ + + def __init__(self, hidden_size, frequency_embedding_size=256): + super().__init__() + self.mlp = nn.Sequential( + nn.Linear(frequency_embedding_size, hidden_size, bias=True), + nn.SiLU(), + nn.Linear(hidden_size, hidden_size, bias=True), + ) + self.frequency_embedding_size = frequency_embedding_size + + @staticmethod + def timestep_embedding(t, dim, max_period=10000): + """ + Create sinusoidal timestep embeddings. + :param t: a 1-D Tensor of N indices, one per batch element. + These may be fractional. + :param dim: the dimension of the output. + :param max_period: controls the minimum frequency of the embeddings. + :return: an (N, D) Tensor of positional embeddings. + """ + # https://github.com/openai/glide-text2im/blob/main/glide_text2im/nn.py + half = dim // 2 + freqs = torch.exp( + -math.log(max_period) * torch.arange(start=0, end=half, dtype=torch.float32, device=t.device) / half) + args = t[:, None].float() * freqs[None] + embedding = torch.cat([torch.cos(args), torch.sin(args)], dim=-1) + if dim % 2: + embedding = torch.cat([embedding, torch.zeros_like(embedding[:, :1])], dim=-1) + return embedding + + def forward(self, t): + t_freq = self.timestep_embedding(t, self.frequency_embedding_size) + t_emb = self.mlp(t_freq.to(t.dtype)) + return t_emb + + +class SizeEmbedder(TimestepEmbedder): + """ + Embeds scalar timesteps into vector representations. + """ + + def __init__(self, hidden_size, frequency_embedding_size=256): + super().__init__(hidden_size=hidden_size, frequency_embedding_size=frequency_embedding_size) + self.mlp = nn.Sequential( + nn.Linear(frequency_embedding_size, hidden_size, bias=True), + nn.SiLU(), + nn.Linear(hidden_size, hidden_size, bias=True), + ) + self.frequency_embedding_size = frequency_embedding_size + self.outdim = hidden_size + + def forward(self, s, bs): + if s.ndim == 1: + s = s[:, None] + assert s.ndim == 2 + if s.shape[0] != bs: + s = s.repeat(bs//s.shape[0], 1) + assert s.shape[0] == bs + b, dims = s.shape[0], s.shape[1] + s = rearrange(s, "b d -> (b d)") + s_freq = self.timestep_embedding(s, self.frequency_embedding_size) + s_emb = self.mlp(s_freq.to(s.dtype)) + s_emb = rearrange(s_emb, "(b d) d2 -> b (d d2)", b=b, d=dims, d2=self.outdim) + return s_emb + + +class LabelEmbedder(nn.Module): + """ + Embeds class labels into vector representations. Also handles label dropout for classifier-free guidance. + """ + + def __init__(self, num_classes, hidden_size, dropout_prob): + super().__init__() + use_cfg_embedding = dropout_prob > 0 + self.embedding_table = nn.Embedding(num_classes + use_cfg_embedding, hidden_size) + self.num_classes = num_classes + self.dropout_prob = dropout_prob + + def token_drop(self, labels, force_drop_ids=None): + """ + Drops labels to enable classifier-free guidance. + """ + if force_drop_ids is None: + drop_ids = torch.rand(labels.shape[0]).cuda() < self.dropout_prob + else: + drop_ids = force_drop_ids == 1 + labels = torch.where(drop_ids, self.num_classes, labels) + return labels + + def forward(self, labels, train, force_drop_ids=None): + use_dropout = self.dropout_prob > 0 + if (train and use_dropout) or (force_drop_ids is not None): + labels = self.token_drop(labels, force_drop_ids) + embeddings = self.embedding_table(labels) + return embeddings + + +class CaptionEmbedder(nn.Module): + """ + Embeds class labels into vector representations. Also handles label dropout for classifier-free guidance. + """ + + def __init__(self, in_channels, hidden_size, uncond_prob, act_layer=nn.GELU(approximate='tanh'), token_num=120): + super().__init__() + self.y_proj = Mlp(in_features=in_channels, hidden_features=hidden_size, out_features=hidden_size, act_layer=act_layer, drop=0) + self.register_buffer("y_embedding", nn.Parameter(torch.randn(token_num, in_channels) / in_channels ** 0.5)) + self.uncond_prob = uncond_prob + + def token_drop(self, caption, force_drop_ids=None): + """ + Drops labels to enable classifier-free guidance. + """ + if force_drop_ids is None: + drop_ids = torch.rand(caption.shape[0]).cuda() < self.uncond_prob + else: + drop_ids = force_drop_ids == 1 + caption = torch.where(drop_ids[:, None, None, None], self.y_embedding, caption) + return caption + + def forward(self, caption, train, force_drop_ids=None): + if train: + assert caption.shape[2:] == self.y_embedding.shape + use_dropout = self.uncond_prob > 0 + if (train and use_dropout) or (force_drop_ids is not None): + caption = self.token_drop(caption, force_drop_ids) + caption = self.y_proj(caption) + return caption + + +class CaptionEmbedderDoubleBr(nn.Module): + """ + Embeds class labels into vector representations. Also handles label dropout for classifier-free guidance. + """ + + def __init__(self, in_channels, hidden_size, uncond_prob, act_layer=nn.GELU(approximate='tanh'), token_num=120): + super().__init__() + self.proj = Mlp(in_features=in_channels, hidden_features=hidden_size, out_features=hidden_size, act_layer=act_layer, drop=0) + self.embedding = nn.Parameter(torch.randn(1, in_channels) / 10 ** 0.5) + self.y_embedding = nn.Parameter(torch.randn(token_num, in_channels) / 10 ** 0.5) + self.uncond_prob = uncond_prob + + def token_drop(self, global_caption, caption, force_drop_ids=None): + """ + Drops labels to enable classifier-free guidance. + """ + if force_drop_ids is None: + drop_ids = torch.rand(global_caption.shape[0]).cuda() < self.uncond_prob + else: + drop_ids = force_drop_ids == 1 + global_caption = torch.where(drop_ids[:, None], self.embedding, global_caption) + caption = torch.where(drop_ids[:, None, None, None], self.y_embedding, caption) + return global_caption, caption + + def forward(self, caption, train, force_drop_ids=None): + assert caption.shape[2: ] == self.y_embedding.shape + global_caption = caption.mean(dim=2).squeeze() + use_dropout = self.uncond_prob > 0 + if (train and use_dropout) or (force_drop_ids is not None): + global_caption, caption = self.token_drop(global_caption, caption, force_drop_ids) + y_embed = self.proj(global_caption) + return y_embed, caption diff --git a/ComfyUI_ExtraModels/PixArt/models/pixart_controlnet.py b/ComfyUI_ExtraModels/PixArt/models/pixart_controlnet.py new file mode 100644 index 0000000000000000000000000000000000000000..37fa4c1dcc0e6de44719c7fe897e2ecb0653eedb --- /dev/null +++ b/ComfyUI_ExtraModels/PixArt/models/pixart_controlnet.py @@ -0,0 +1,312 @@ +import re +import torch +import torch.nn as nn + +from copy import deepcopy +from torch import Tensor +from torch.nn import Module, Linear, init +from typing import Any, Mapping + +from .PixArt import PixArt, get_2d_sincos_pos_embed +from .PixArtMS import PixArtMSBlock, PixArtMS +from .utils import auto_grad_checkpoint + +# The implementation of ControlNet-Half architrecture +# https://github.com/lllyasviel/ControlNet/discussions/188 +class ControlT2IDitBlockHalf(Module): + def __init__(self, base_block: PixArtMSBlock, block_index: 0) -> None: + super().__init__() + self.copied_block = deepcopy(base_block) + self.block_index = block_index + + for p in self.copied_block.parameters(): + p.requires_grad_(True) + + self.copied_block.load_state_dict(base_block.state_dict()) + self.copied_block.train() + + self.hidden_size = hidden_size = base_block.hidden_size + if self.block_index == 0: + self.before_proj = Linear(hidden_size, hidden_size) + init.zeros_(self.before_proj.weight) + init.zeros_(self.before_proj.bias) + self.after_proj = Linear(hidden_size, hidden_size) + init.zeros_(self.after_proj.weight) + init.zeros_(self.after_proj.bias) + + def forward(self, x, y, t, mask=None, c=None): + + if self.block_index == 0: + # the first block + c = self.before_proj(c) + c = self.copied_block(x + c, y, t, mask) + c_skip = self.after_proj(c) + else: + # load from previous c and produce the c for skip connection + c = self.copied_block(c, y, t, mask) + c_skip = self.after_proj(c) + + return c, c_skip + + +# The implementation of ControlPixArtHalf net +class ControlPixArtHalf(Module): + # only support single res model + def __init__(self, base_model: PixArt, copy_blocks_num: int = 13) -> None: + super().__init__() + self.dtype = torch.get_default_dtype() + self.base_model = base_model.eval() + self.controlnet = [] + self.copy_blocks_num = copy_blocks_num + self.total_blocks_num = len(base_model.blocks) + for p in self.base_model.parameters(): + p.requires_grad_(False) + + # Copy first copy_blocks_num block + for i in range(copy_blocks_num): + self.controlnet.append(ControlT2IDitBlockHalf(base_model.blocks[i], i)) + self.controlnet = nn.ModuleList(self.controlnet) + + def __getattr__(self, name: str) -> Tensor or Module: + if name in ['forward', 'forward_with_dpmsolver', 'forward_with_cfg', 'forward_c', 'load_state_dict']: + return self.__dict__[name] + elif name in ['base_model', 'controlnet']: + return super().__getattr__(name) + else: + return getattr(self.base_model, name) + + def forward_c(self, c): + self.h, self.w = c.shape[-2]//self.patch_size, c.shape[-1]//self.patch_size + pos_embed = torch.from_numpy(get_2d_sincos_pos_embed(self.pos_embed.shape[-1], (self.h, self.w), lewei_scale=self.lewei_scale, base_size=self.base_size)).unsqueeze(0).to(c.device).to(self.dtype) + return self.x_embedder(c) + pos_embed if c is not None else c + + # def forward(self, x, t, c, **kwargs): + # return self.base_model(x, t, c=self.forward_c(c), **kwargs) + def forward_raw(self, x, timestep, y, mask=None, data_info=None, c=None, **kwargs): + # modify the original PixArtMS forward function + if c is not None: + c = c.to(self.dtype) + c = self.forward_c(c) + """ + Forward pass of PixArt. + x: (N, C, H, W) tensor of spatial inputs (images or latent representations of images) + t: (N,) tensor of diffusion timesteps + y: (N, 1, 120, C) tensor of class labels + """ + x = x.to(self.dtype) + timestep = timestep.to(self.dtype) + y = y.to(self.dtype) + pos_embed = self.pos_embed.to(self.dtype) + self.h, self.w = x.shape[-2]//self.patch_size, x.shape[-1]//self.patch_size + x = self.x_embedder(x) + pos_embed # (N, T, D), where T = H * W / patch_size ** 2 + t = self.t_embedder(timestep.to(x.dtype)) # (N, D) + t0 = self.t_block(t) + y = self.y_embedder(y, self.training) # (N, 1, L, D) + if mask is not None: + if mask.shape[0] != y.shape[0]: + mask = mask.repeat(y.shape[0] // mask.shape[0], 1) + mask = mask.squeeze(1).squeeze(1) + y = y.squeeze(1).masked_select(mask.unsqueeze(-1) != 0).view(1, -1, x.shape[-1]) + y_lens = mask.sum(dim=1).tolist() + else: + y_lens = [y.shape[2]] * y.shape[0] + y = y.squeeze(1).view(1, -1, x.shape[-1]) + + # define the first layer + x = auto_grad_checkpoint(self.base_model.blocks[0], x, y, t0, y_lens, **kwargs) # (N, T, D) #support grad checkpoint + + if c is not None: + # update c + for index in range(1, self.copy_blocks_num + 1): + c, c_skip = auto_grad_checkpoint(self.controlnet[index - 1], x, y, t0, y_lens, c, **kwargs) + x = auto_grad_checkpoint(self.base_model.blocks[index], x + c_skip, y, t0, y_lens, **kwargs) + + # update x + for index in range(self.copy_blocks_num + 1, self.total_blocks_num): + x = auto_grad_checkpoint(self.base_model.blocks[index], x, y, t0, y_lens, **kwargs) + else: + for index in range(1, self.total_blocks_num): + x = auto_grad_checkpoint(self.base_model.blocks[index], x, y, t0, y_lens, **kwargs) + + x = self.final_layer(x, t) # (N, T, patch_size ** 2 * out_channels) + x = self.unpatchify(x) # (N, out_channels, H, W) + return x + + def forward(self, x, timesteps, context, cn_hint=None, **kwargs): + """ + Forward pass that adapts comfy input to original forward function + x: (N, C, H, W) tensor of spatial inputs (images or latent representations of images) + timesteps: (N,) tensor of diffusion timesteps + context: (N, 1, 120, C) conditioning + cn_hint: controlnet hint + """ + ## Still accepts the input w/o that dim but returns garbage + if len(context.shape) == 3: + context = context.unsqueeze(1) + + ## run original forward pass + out = self.forward_raw( + x = x.to(self.dtype), + timestep = timesteps.to(self.dtype), + y = context.to(self.dtype), + c = cn_hint, + ) + + ## only return EPS + out = out.to(torch.float) + eps, rest = out[:, :self.in_channels], out[:, self.in_channels:] + return eps + + def forward_with_dpmsolver(self, x, t, y, data_info, c, **kwargs): + model_out = self.forward_raw(x, t, y, data_info=data_info, c=c, **kwargs) + return model_out.chunk(2, dim=1)[0] + + # def forward_with_dpmsolver(self, x, t, y, data_info, c, **kwargs): + # return self.base_model.forward_with_dpmsolver(x, t, y, data_info=data_info, c=self.forward_c(c), **kwargs) + + def forward_with_cfg(self, x, t, y, cfg_scale, data_info, c, **kwargs): + return self.base_model.forward_with_cfg(x, t, y, cfg_scale, data_info, c=self.forward_c(c), **kwargs) + + def load_state_dict(self, state_dict: Mapping[str, Any], strict: bool = True): + if all((k.startswith('base_model') or k.startswith('controlnet')) for k in state_dict.keys()): + return super().load_state_dict(state_dict, strict) + else: + new_key = {} + for k in state_dict.keys(): + new_key[k] = re.sub(r"(blocks\.\d+)(.*)", r"\1.base_block\2", k) + for k, v in new_key.items(): + if k != v: + print(f"replace {k} to {v}") + state_dict[v] = state_dict.pop(k) + + return self.base_model.load_state_dict(state_dict, strict) + + def unpatchify(self, x): + """ + x: (N, T, patch_size**2 * C) + imgs: (N, H, W, C) + """ + c = self.out_channels + p = self.x_embedder.patch_size[0] + assert self.h * self.w == x.shape[1] + + x = x.reshape(shape=(x.shape[0], self.h, self.w, p, p, c)) + x = torch.einsum('nhwpqc->nchpwq', x) + imgs = x.reshape(shape=(x.shape[0], c, self.h * p, self.w * p)) + return imgs + + # @property + # def dtype(self): + ## 返回模型参数的数据类型 + # return next(self.parameters()).dtype + + +# The implementation for PixArtMS_Half + 1024 resolution +class ControlPixArtMSHalf(ControlPixArtHalf): + # support multi-scale res model (multi-scale model can also be applied to single reso training & inference) + def __init__(self, base_model: PixArtMS, copy_blocks_num: int = 13) -> None: + super().__init__(base_model=base_model, copy_blocks_num=copy_blocks_num) + + def forward_raw(self, x, timestep, y, mask=None, data_info=None, c=None, **kwargs): + # modify the original PixArtMS forward function + """ + Forward pass of PixArt. + x: (N, C, H, W) tensor of spatial inputs (images or latent representations of images) + t: (N,) tensor of diffusion timesteps + y: (N, 1, 120, C) tensor of class labels + """ + if c is not None: + c = c.to(self.dtype) + c = self.forward_c(c) + bs = x.shape[0] + x = x.to(self.dtype) + timestep = timestep.to(self.dtype) + y = y.to(self.dtype) + c_size, ar = data_info['img_hw'].to(self.dtype), data_info['aspect_ratio'].to(self.dtype) + self.h, self.w = x.shape[-2]//self.patch_size, x.shape[-1]//self.patch_size + + pos_embed = torch.from_numpy(get_2d_sincos_pos_embed(self.pos_embed.shape[-1], (self.h, self.w), lewei_scale=self.lewei_scale, base_size=self.base_size)).unsqueeze(0).to(x.device).to(self.dtype) + x = self.x_embedder(x) + pos_embed # (N, T, D), where T = H * W / patch_size ** 2 + t = self.t_embedder(timestep) # (N, D) + csize = self.csize_embedder(c_size, bs) # (N, D) + ar = self.ar_embedder(ar, bs) # (N, D) + t = t + torch.cat([csize, ar], dim=1) + t0 = self.t_block(t) + y = self.y_embedder(y, self.training) # (N, D) + if mask is not None: + if mask.shape[0] != y.shape[0]: + mask = mask.repeat(y.shape[0] // mask.shape[0], 1) + mask = mask.squeeze(1).squeeze(1) + y = y.squeeze(1).masked_select(mask.unsqueeze(-1) != 0).view(1, -1, x.shape[-1]) + y_lens = mask.sum(dim=1).tolist() + else: + y_lens = [y.shape[2]] * y.shape[0] + y = y.squeeze(1).view(1, -1, x.shape[-1]) + + # define the first layer + x = auto_grad_checkpoint(self.base_model.blocks[0], x, y, t0, y_lens, **kwargs) # (N, T, D) #support grad checkpoint + + if c is not None: + # update c + for index in range(1, self.copy_blocks_num + 1): + c, c_skip = auto_grad_checkpoint(self.controlnet[index - 1], x, y, t0, y_lens, c, **kwargs) + x = auto_grad_checkpoint(self.base_model.blocks[index], x + c_skip, y, t0, y_lens, **kwargs) + + # update x + for index in range(self.copy_blocks_num + 1, self.total_blocks_num): + x = auto_grad_checkpoint(self.base_model.blocks[index], x, y, t0, y_lens, **kwargs) + else: + for index in range(1, self.total_blocks_num): + x = auto_grad_checkpoint(self.base_model.blocks[index], x, y, t0, y_lens, **kwargs) + + x = self.final_layer(x, t) # (N, T, patch_size ** 2 * out_channels) + x = self.unpatchify(x) # (N, out_channels, H, W) + return x + + def forward(self, x, timesteps, context, img_hw=None, aspect_ratio=None, cn_hint=None, **kwargs): + """ + Forward pass that adapts comfy input to original forward function + x: (N, C, H, W) tensor of spatial inputs (images or latent representations of images) + timesteps: (N,) tensor of diffusion timesteps + context: (N, 1, 120, C) conditioning + img_hw: height|width conditioning + aspect_ratio: aspect ratio conditioning + cn_hint: controlnet hint + """ + ## size/ar from cond with fallback based on the latent image shape. + bs = x.shape[0] + data_info = {} + if img_hw is None: + data_info["img_hw"] = torch.tensor( + [[x.shape[2]*8, x.shape[3]*8]], + dtype=self.dtype, + device=x.device + ).repeat(bs, 1) + else: + data_info["img_hw"] = img_hw.to(x.dtype) + if aspect_ratio is None or True: + data_info["aspect_ratio"] = torch.tensor( + [[x.shape[2]/x.shape[3]]], + dtype=self.dtype, + device=x.device + ).repeat(bs, 1) + else: + data_info["aspect_ratio"] = aspect_ratio.to(x.dtype) + + ## Still accepts the input w/o that dim but returns garbage + if len(context.shape) == 3: + context = context.unsqueeze(1) + + ## run original forward pass + out = self.forward_raw( + x = x.to(self.dtype), + timestep = timesteps.to(self.dtype), + y = context.to(self.dtype), + c = cn_hint, + data_info=data_info, + ) + + ## only return EPS + out = out.to(torch.float) + eps, rest = out[:, :self.in_channels], out[:, self.in_channels:] + return eps diff --git a/ComfyUI_ExtraModels/PixArt/models/utils.py b/ComfyUI_ExtraModels/PixArt/models/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..9f7762148226a32b7c20d8527fa144a14acc275c --- /dev/null +++ b/ComfyUI_ExtraModels/PixArt/models/utils.py @@ -0,0 +1,122 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F +from torch.utils.checkpoint import checkpoint, checkpoint_sequential +from collections.abc import Iterable +from itertools import repeat + +def _ntuple(n): + def parse(x): + if isinstance(x, Iterable) and not isinstance(x, str): + return x + return tuple(repeat(x, n)) + return parse + +to_1tuple = _ntuple(1) +to_2tuple = _ntuple(2) + +def set_grad_checkpoint(model, use_fp32_attention=False, gc_step=1): + assert isinstance(model, nn.Module) + + def set_attr(module): + module.grad_checkpointing = True + module.fp32_attention = use_fp32_attention + module.grad_checkpointing_step = gc_step + model.apply(set_attr) + +def auto_grad_checkpoint(module, *args, **kwargs): + if getattr(module, 'grad_checkpointing', False): + if isinstance(module, Iterable): + gc_step = module[0].grad_checkpointing_step + return checkpoint_sequential(module, gc_step, *args, **kwargs) + else: + return checkpoint(module, *args, **kwargs) + return module(*args, **kwargs) + +def checkpoint_sequential(functions, step, input, *args, **kwargs): + + # Hack for keyword-only parameter in a python 2.7-compliant way + preserve = kwargs.pop('preserve_rng_state', True) + if kwargs: + raise ValueError("Unexpected keyword arguments: " + ",".join(arg for arg in kwargs)) + + def run_function(start, end, functions): + def forward(input): + for j in range(start, end + 1): + input = functions[j](input, *args) + return input + return forward + + if isinstance(functions, torch.nn.Sequential): + functions = list(functions.children()) + + # the last chunk has to be non-volatile + end = -1 + segment = len(functions) // step + for start in range(0, step * (segment - 1), step): + end = start + step - 1 + input = checkpoint(run_function(start, end, functions), input, preserve_rng_state=preserve) + return run_function(end + 1, len(functions) - 1, functions)(input) + +def get_rel_pos(q_size, k_size, rel_pos): + """ + Get relative positional embeddings according to the relative positions of + query and key sizes. + Args: + q_size (int): size of query q. + k_size (int): size of key k. + rel_pos (Tensor): relative position embeddings (L, C). + + Returns: + Extracted positional embeddings according to relative positions. + """ + max_rel_dist = int(2 * max(q_size, k_size) - 1) + # Interpolate rel pos if needed. + if rel_pos.shape[0] != max_rel_dist: + # Interpolate rel pos. + rel_pos_resized = F.interpolate( + rel_pos.reshape(1, rel_pos.shape[0], -1).permute(0, 2, 1), + size=max_rel_dist, + mode="linear", + ) + rel_pos_resized = rel_pos_resized.reshape(-1, max_rel_dist).permute(1, 0) + else: + rel_pos_resized = rel_pos + + # Scale the coords with short length if shapes for q and k are different. + q_coords = torch.arange(q_size)[:, None] * max(k_size / q_size, 1.0) + k_coords = torch.arange(k_size)[None, :] * max(q_size / k_size, 1.0) + relative_coords = (q_coords - k_coords) + (k_size - 1) * max(q_size / k_size, 1.0) + + return rel_pos_resized[relative_coords.long()] + +def add_decomposed_rel_pos(attn, q, rel_pos_h, rel_pos_w, q_size, k_size): + """ + Calculate decomposed Relative Positional Embeddings from :paper:`mvitv2`. + https://github.com/facebookresearch/mvit/blob/19786631e330df9f3622e5402b4a419a263a2c80/mvit/models/attention.py # noqa B950 + Args: + attn (Tensor): attention map. + q (Tensor): query q in the attention layer with shape (B, q_h * q_w, C). + rel_pos_h (Tensor): relative position embeddings (Lh, C) for height axis. + rel_pos_w (Tensor): relative position embeddings (Lw, C) for width axis. + q_size (Tuple): spatial sequence size of query q with (q_h, q_w). + k_size (Tuple): spatial sequence size of key k with (k_h, k_w). + + Returns: + attn (Tensor): attention map with added relative positional embeddings. + """ + q_h, q_w = q_size + k_h, k_w = k_size + Rh = get_rel_pos(q_h, k_h, rel_pos_h) + Rw = get_rel_pos(q_w, k_w, rel_pos_w) + + B, _, dim = q.shape + r_q = q.reshape(B, q_h, q_w, dim) + rel_h = torch.einsum("bhwc,hkc->bhwk", r_q, Rh) + rel_w = torch.einsum("bhwc,wkc->bhwk", r_q, Rw) + + attn = ( + attn.view(B, q_h, q_w, k_h, k_w) + rel_h[:, :, :, :, None] + rel_w[:, :, :, None, :] + ).view(B, q_h * q_w, k_h * k_w) + + return attn diff --git a/ComfyUI_ExtraModels/PixArt/nodes.py b/ComfyUI_ExtraModels/PixArt/nodes.py new file mode 100644 index 0000000000000000000000000000000000000000..0b8f19f777001dac777d8155bf44a477678f769e --- /dev/null +++ b/ComfyUI_ExtraModels/PixArt/nodes.py @@ -0,0 +1,274 @@ +import os +import json +import torch +import folder_paths + +from comfy import utils +from .conf import pixart_conf, pixart_res +from .lora import load_pixart_lora +from .loader import load_pixart + +class PixArtCheckpointLoader: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "ckpt_name": (folder_paths.get_filename_list("checkpoints"),), + "model": (list(pixart_conf.keys()),), + } + } + RETURN_TYPES = ("MODEL",) + RETURN_NAMES = ("model",) + FUNCTION = "load_checkpoint" + CATEGORY = "ExtraModels/PixArt" + TITLE = "PixArt Checkpoint Loader" + + def load_checkpoint(self, ckpt_name, model): + ckpt_path = folder_paths.get_full_path("checkpoints", ckpt_name) + model_conf = pixart_conf[model] + model = load_pixart( + model_path = ckpt_path, + model_conf = model_conf, + ) + return (model,) + +class PixArtCheckpointLoaderSimple(PixArtCheckpointLoader): + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "ckpt_name": (folder_paths.get_filename_list("checkpoints"),), + } + } + TITLE = "PixArt Checkpoint Loader (auto)" + + def load_checkpoint(self, ckpt_name): + ckpt_path = folder_paths.get_full_path("checkpoints", ckpt_name) + model = load_pixart(model_path=ckpt_path) + return (model,) + +class PixArtResolutionSelect(): + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "model": (list(pixart_res.keys()),), + # keys are the same for both + "ratio": (list(pixart_res["PixArtMS_XL_2"].keys()),{"default":"1.00"}), + } + } + RETURN_TYPES = ("INT","INT") + RETURN_NAMES = ("width","height") + FUNCTION = "get_res" + CATEGORY = "ExtraModels/PixArt" + TITLE = "PixArt Resolution Select" + + def get_res(self, model, ratio): + width, height = pixart_res[model][ratio] + return (width,height) + +class PixArtLoraLoader: + def __init__(self): + self.loaded_lora = None + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "model": ("MODEL",), + "lora_name": (folder_paths.get_filename_list("loras"), ), + "strength": ("FLOAT", {"default": 1.0, "min": -20.0, "max": 20.0, "step": 0.01}), + } + } + RETURN_TYPES = ("MODEL",) + FUNCTION = "load_lora" + CATEGORY = "ExtraModels/PixArt" + TITLE = "PixArt Load LoRA" + + def load_lora(self, model, lora_name, strength,): + if strength == 0: + return (model) + + lora_path = folder_paths.get_full_path("loras", lora_name) + lora = None + if self.loaded_lora is not None: + if self.loaded_lora[0] == lora_path: + lora = self.loaded_lora[1] + else: + temp = self.loaded_lora + self.loaded_lora = None + del temp + + if lora is None: + lora = utils.load_torch_file(lora_path, safe_load=True) + self.loaded_lora = (lora_path, lora) + + model_lora = load_pixart_lora(model, lora, lora_path, strength,) + return (model_lora,) + +class PixArtResolutionCond: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "cond": ("CONDITIONING", ), + "width": ("INT", {"default": 1024.0, "min": 0, "max": 8192}), + "height": ("INT", {"default": 1024.0, "min": 0, "max": 8192}), + } + } + + RETURN_TYPES = ("CONDITIONING",) + RETURN_NAMES = ("cond",) + FUNCTION = "add_cond" + CATEGORY = "ExtraModels/PixArt" + TITLE = "PixArt Resolution Conditioning" + + def add_cond(self, cond, width, height): + for c in range(len(cond)): + cond[c][1].update({ + "img_hw": [[height, width]], + "aspect_ratio": [[height/width]], + }) + return (cond,) + +class PixArtControlNetCond: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "cond": ("CONDITIONING",), + "latent": ("LATENT",), + # "image": ("IMAGE",), + # "vae": ("VAE",), + # "strength": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}) + } + } + + RETURN_TYPES = ("CONDITIONING",) + RETURN_NAMES = ("cond",) + FUNCTION = "add_cond" + CATEGORY = "ExtraModels/PixArt" + TITLE = "PixArt ControlNet Conditioning" + + def add_cond(self, cond, latent): + for c in range(len(cond)): + cond[c][1]["cn_hint"] = latent["samples"] * 0.18215 + return (cond,) + +class PixArtT5TextEncode: + """ + Reference code, mostly to verify compatibility. + Once everything works, this should instead inherit from the + T5 text encode node and simply add the extra conds (res/ar). + """ + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "text": ("STRING", {"multiline": True}), + "T5": ("T5",), + } + } + + RETURN_TYPES = ("CONDITIONING",) + FUNCTION = "encode" + CATEGORY = "ExtraModels/PixArt" + TITLE = "PixArt T5 Text Encode [Reference]" + + def mask_feature(self, emb, mask): + if emb.shape[0] == 1: + keep_index = mask.sum().item() + return emb[:, :, :keep_index, :], keep_index + else: + masked_feature = emb * mask[:, None, :, None] + return masked_feature, emb.shape[2] + + def encode(self, text, T5): + text = text.lower().strip() + tokenizer_out = T5.tokenizer.tokenizer( + text, + max_length = 120, + padding = 'max_length', + truncation = True, + return_attention_mask = True, + add_special_tokens = True, + return_tensors = 'pt' + ) + tokens = tokenizer_out["input_ids"] + mask = tokenizer_out["attention_mask"] + embs = T5.cond_stage_model.transformer( + input_ids = tokens.to(T5.load_device), + attention_mask = mask.to(T5.load_device), + )['last_hidden_state'].float()[:, None] + masked_embs, keep_index = self.mask_feature( + embs.detach().to("cpu"), + mask.detach().to("cpu") + ) + masked_embs = masked_embs.squeeze(0) # match CLIP/internal + print("Encoded T5:", masked_embs.shape) + return ([[masked_embs, {}]], ) + +class PixArtT5FromSD3CLIP: + """ + Split the T5 text encoder away from SD3 + """ + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "sd3_clip": ("CLIP",), + "padding": ("INT", {"default": 1, "min": 1, "max": 300}), + } + } + + RETURN_TYPES = ("CLIP",) + RETURN_NAMES = ("t5",) + FUNCTION = "split" + CATEGORY = "ExtraModels/PixArt" + TITLE = "PixArt T5 from SD3 CLIP" + + def split(self, sd3_clip, padding): + try: + from comfy.text_encoders.sd3_clip import SD3Tokenizer, SD3ClipModel + except ImportError: + # fallback for older ComfyUI versions + from comfy.sd3_clip import SD3Tokenizer, SD3ClipModel + import copy + + clip = sd3_clip.clone() + assert clip.cond_stage_model.t5xxl is not None, "CLIP must have T5 loaded!" + + # remove transformer + transformer = clip.cond_stage_model.t5xxl.transformer + clip.cond_stage_model.t5xxl.transformer = None + + # clone object + tmp = SD3ClipModel(clip_l=False, clip_g=False, t5=False) + tmp.t5xxl = copy.deepcopy(clip.cond_stage_model.t5xxl) + # put transformer back + clip.cond_stage_model.t5xxl.transformer = transformer + tmp.t5xxl.transformer = transformer + + # override special tokens + tmp.t5xxl.special_tokens = copy.deepcopy(clip.cond_stage_model.t5xxl.special_tokens) + tmp.t5xxl.special_tokens.pop("end") # make sure empty tokens match + + # tokenizer + tok = SD3Tokenizer() + tok.t5xxl.min_length = padding + + clip.cond_stage_model = tmp + clip.tokenizer = tok + + return (clip, ) + +NODE_CLASS_MAPPINGS = { + "PixArtCheckpointLoader" : PixArtCheckpointLoader, + "PixArtCheckpointLoaderSimple" : PixArtCheckpointLoaderSimple, + "PixArtResolutionSelect" : PixArtResolutionSelect, + "PixArtLoraLoader" : PixArtLoraLoader, + "PixArtT5TextEncode" : PixArtT5TextEncode, + "PixArtResolutionCond" : PixArtResolutionCond, + "PixArtControlNetCond" : PixArtControlNetCond, + "PixArtT5FromSD3CLIP": PixArtT5FromSD3CLIP, +} diff --git a/ComfyUI_ExtraModels/README.md b/ComfyUI_ExtraModels/README.md new file mode 100644 index 0000000000000000000000000000000000000000..39a125e10d3bab287b5d0873aab425a7f91572e8 --- /dev/null +++ b/ComfyUI_ExtraModels/README.md @@ -0,0 +1,257 @@ +# Extra Models for ComfyUI + +This repository aims to add support for various different image diffusion models to ComfyUI. + +## Installation + +Simply clone this repo to your custom_nodes folder using the following command: + +`git clone https://github.com/city96/ComfyUI_ExtraModels custom_nodes/ComfyUI_ExtraModels` + +You will also have to install the requirements from the provided file by running `pip install -r requirements.txt` inside your VENV/conda env. If you downloaded the standalone version of ComfyUI, then follow the steps below. + +### Standalone ComfyUI + +I haven't tested this completely, so if you know what you're doing, use the regular venv/`git clone` install option when installing ComfyUI. + +Go to the where you unpacked `ComfyUI_windows_portable` to (where your run_nvidia_gpu.bat file is) and open a command line window. Press `CTRL+SHIFT+Right click` in an empty space and click "Open PowerShell window here". + +Clone the repository to your custom nodes folder, assuming haven't installed in through the manager. + +`git clone https://github.com/city96/ComfyUI_ExtraModels .\ComfyUI\custom_nodes\ComfyUI_ExtraModels` + +To install the requirements on windows, run these commands in the same window: +``` +.\python_embedded\python.exe -s -m pip install -r .\ComfyUI\custom_nodes\ComfyUI_ExtraModels\requirements.txt +.\python_embedded\python.exe -s -m pip install bitsandbytes --prefer-binary --extra-index-url=https://jllllll.github.io/bitsandbytes-windows-webui +``` + +To update, open the command line window like before and run the following commands: + +``` +cd .\ComfyUI\custom_nodes\ComfyUI_ExtraModels\ +git pull +``` + +Alternatively, use the manager, assuming it has an update function. + + + +## PixArt + +[Original Repo](https://github.com/PixArt-alpha/PixArt-alpha) + +### Model info / implementation +- Uses T5 text encoder instead of clip +- Available in 512 and 1024 versions, needs specific pre-defined resolutions to work correctly +- Same latent space as SD1.5 (works with the SD1.5 VAE) +- Attention needs optimization, images look worse without xformers. + +### Usage + +1. Download the model weights from the [PixArt alpha repo](https://huggingface.co/PixArt-alpha/PixArt-alpha/tree/main) - you most likely want the 1024px one - `PixArt-XL-2-1024-MS.pth` +2. Place them in your checkpoints folder +3. Load them with the correct PixArt checkpoint loader +4. **Follow the T5v11 section of this readme** to set up the T5 text encoder + +> [!TIP] +> You should be able to use the model with the default KSampler if you're on the latest version of the node. +> In theory, this should allow you to use longer prompts as well as things like doing img2img. + +Limitations: +- `PixArt DPM Sampler` requires the negative prompt to be shorter than the positive prompt. +- `PixArt DPM Sampler` can only work with a batch size of 1. +- `PixArt T5 Text Encode` is from the reference implementation, therefore it doesn't support weights. `T5 Text Encode` support weights, but I can't attest to the correctness of the implementation. + +> [!IMPORTANT] +> Installing `xformers` is optional but strongly recommended as torch SDP is only partially implemented, if that. + +[Sample workflow here](https://github.com/city96/ComfyUI_ExtraModels/files/13617463/PixArtV3.json) + +![PixArtT12](https://github.com/city96/ComfyUI_ExtraModels/assets/125218114/eb1a02f9-6114-47eb-a066-261c39c55615) + +### PixArt Sigma + +The Sigma models work just like the normal ones. Out of the released checkpoints, the 512, 1024 and 2K one are supported. + +You can find the [1024 checkpoint here](https://huggingface.co/PixArt-alpha/PixArt-Sigma/blob/main/PixArt-Sigma-XL-2-1024-MS.pth). Place it in your models folder and **select the appropriate type in the model loader / resolution selection node.** + +> [!IMPORTANT] +> Make sure to select an SDXL VAE for PixArt Sigma! + +### PixArt LCM + +The LCM model also works if you're on the latest version. To use it: + +1. Download the [PixArt LCM model](https://huggingface.co/PixArt-alpha/PixArt-LCM-XL-2-1024-MS/blob/main/transformer/diffusion_pytorch_model.safetensors) and place it in your checkpoints folder. +2. Add a `ModelSamplingDiscrete` node and set "sampling" to "lcm" +3. Adjust the KSampler settings - Set the sampler to "lcm". Your CFG should be fairly low (1.1-1.5), your steps should be around 5. + +Everything else can be the same the same as in the example above. + +![PixArtLCM](https://github.com/city96/ComfyUI_ExtraModels/assets/125218114/558f8b30-449b-4973-ad7e-6aa69832adcb) + + + +## HunYuan DiT + +WIP implementation of [HunYuan DiT by Tencent](https://github.com/Tencent/HunyuanDiT) + +The initial work on this was done by [chaojie](https://github.com/chaojie) in [this PR](https://github.com/city96/ComfyUI_ExtraModels/pull/37). + +Instructions: +- Download the [first text encoder from here](https://huggingface.co/Tencent-Hunyuan/HunyuanDiT/blob/main/t2i/clip_text_encoder/pytorch_model.bin) and place it in `ComfyUI/models/clip` - rename to "chinese-roberta-wwm-ext-large.bin" +- Download the [second text encoder from here](https://huggingface.co/Tencent-Hunyuan/HunyuanDiT/blob/main/t2i/mt5/pytorch_model.bin) and place it in `ComfyUI/models/t5` - rename it to "mT5-xl.bin" +- Download the [model file from here](https://huggingface.co/Tencent-Hunyuan/HunyuanDiT/blob/main/t2i/model/pytorch_model_module.pt) and place it in `ComfyUI/checkpoints` - rename it to "HunYuanDiT.pt" +- Download/use any SDXL VAE, for example [this one](https://huggingface.co/madebyollin/sdxl-vae-fp16-fix) + +You may also try the following alternate model files for faster loading speed/smaller file size: +- converted [second text encoder](https://huggingface.co/city96/mt5-xl-encoder-fp16/blob/main/model.safetensors) - rename to `mT5-xl-encoder-fp16.safetensors` and placed in `ComfyUI/models/t5` + +You can use the "simple" text encode node to only use one prompt, or you can use the regular one to pass different text to CLIP/T5. + +[Sample Workflow](https://github.com/city96/ComfyUI_ExtraModels/files/15444231/HyDiTV1.json) + +![image](https://github.com/city96/ComfyUI_ExtraModels/assets/125218114/7a9d6e34-d3f4-4f67-a17f-4f2d6795e54e) + + + +## DiT + +[Original Repo](https://github.com/facebookresearch/DiT) + +### Model info / implementation +- Uses class labels instead of prompts +- Limited to 256x256 or 512x512 images +- Same latent space as SD1.5 (works with the SD1.5 VAE) +- Works in FP16, but no other optimization + +### Usage + +1. Download the original model weights from the [DiT Repo](https://github.com/facebookresearch/DiT) or the converted [FP16 safetensor ones from Huggingface](https://huggingface.co/city96/DiT/tree/main). +2. Place them in your checkpoints folder. (You may need to move them if you had them in `ComfyUI\models\dit` before) +3. Load the model and select the class labels as shown in the image below +4. **Make sure to use the Empty label conditioning for the Negative input of the KSampler!** + +ConditioningCombine nodes *should* work for combining multiple labels. The area ones don't since the model currently can't handle dynamic input dimensions. + +[Sample workflow here](https://github.com/city96/ComfyUI_ExtraModels/files/13619259/DiTV2.json) + +![DIT_WORKFLOW_IMG](https://github.com/city96/ComfyUI_ExtraModels/assets/125218114/cdd4ec94-b0eb-436a-bf23-a3bcef8d7b90) + + + +## T5 + +### T5v11 + +The model files can be downloaded from the [DeepFloyd/t5-v1_1-xxl](https://huggingface.co/DeepFloyd/t5-v1_1-xxl/tree/main) repository. + +You will need to download the following 4 files: + - `config.json` + - `pytorch_model-00001-of-00002.bin` + - `pytorch_model-00002-of-00002.bin` + - `pytorch_model.bin.index.json` + +Place them in your `ComfyUI/models/t5` folder. You can put them in a subfolder called "t5-v1.1-xxl" though it doesn't matter. There are int8 safetensor files in the other DeepFloyd repo, thought they didn't work for me. + +For faster loading/smaller file sizes, you may pick one of the following alternative downloads: +- [FP16 converted version](https://huggingface.co/theunlikely/t5-v1_1-xxl-fp16/tree/main) - Same layout as the original, download both safetensor files as well as the `*.index.json` and `config.json` files. +- [BF16 converter version](https://huggingface.co/city96/t5-v1_1-xxl-encoder-bf16/tree/main) - Merged into a single safetensor, only `model.safetensors` (+`config.json` for folder mode) are reqired. + +To move T5 to a different drive/folder, do the same as you would when moving checkpoints, but add ` t5: t5` to `extra_model_paths.yaml` and create a directory called "t5" in the alternate path specified in the `base_path` variable. + +### Usage + +Loaded onto the CPU, it'll use about 22GBs of system RAM. Depending on which weights you use, it might use slightly more during loading. + +If you have a second GPU, selecting "cuda:1" as the device will allow you to use it for T5, freeing at least some VRAM/System RAM. Using FP16 as the dtype is recommended. + +Loaded in bnb4bit mode, it only takes around 6GB VRAM, making it work with 12GB cards. The only drawback is that it'll constantly stay in VRAM since BitsAndBytes doesn't allow moving the weights to the system RAM temporarily. Switching to a different workflow *should* still release the VRAM as expected. Pascal cards (1080ti, P40) seem to struggle with 4bit. Select "cpu" if you encounter issues. + +On windows, you may need a newer version of bitsandbytes for 4bit. Try `python -m pip install bitsandbytes` + +> [!IMPORTANT] +> You may also need to upgrade transformers and install spiece for the tokenizer. `pip install -r requirements.txt` + + + +## MiaoBi + +### Original from: + +- Author: Github [ShineChen1024](https://github.com/ShineChen1024) | Hugging Face [ShineChen1024](https://huggingface.co/ShineChen1024) +- https://github.com/ShineChen1024/MiaoBi +- https://huggingface.co/ShineChen1024/MiaoBi + +### Instructions +- Download the [clip model](https://huggingface.co/ShineChen1024/MiaoBi/blob/main/miaobi_beta0.9/text_encoder/model.safetensors) and rename it to "MiaoBi_CLIP.safetensors" or any you like, then place it in `ComfyUI/models/clip`. +- Download the [unet model](https://huggingface.co/ShineChen1024/MiaoBi/blob/main/miaobi_beta0.9/unet/diffusion_pytorch_model.safetensors) and rename it to "MiaoBi.safetensors", then place it in `ComfyUI/models/unet`. +- Alternatively, clone/download the entire huggingface repo to `ComfyUI/models/diffusers` and use the MiaoBi diffusers loader. + +这是妙笔的测试版本。妙笔,一个中文文生图模型,与经典的stable-diffusion 1.5版本拥有一致的结构,兼容现有的lora,controlnet,T2I-Adapter等主流插件及其权重。 + +This is the beta version of MiaoBi, a chinese text-to-image model, following the classical structure of sd-v1.5, compatible with existing mainstream plugins such as Lora, Controlnet, T2I Adapter, etc. + +Example Prompts: +- 一只精致的陶瓷猫咪雕像,全身绘有精美的传统花纹,眼睛仿佛会发光。 +- 动漫风格的风景画,有山脉、湖泊,也有繁华的小镇子,色彩鲜艳,光影效果明显。 +- 极具真实感的复杂农村的老人肖像,黑白。 +- 红烧狮子头 +- 车水马龙的上海街道,春节,舞龙舞狮。 +- 枯藤老树昏鸦,小桥流水人家。水墨画。 + +[Example Workflow](https://github.com/city96/ComfyUI_ExtraModels/files/15389380/MiaoBiV1.json) + +[Example Workflow (diffusers)](https://github.com/city96/ComfyUI_ExtraModels/files/15389381/MiaoBiV1D.json) + +![MiaoBi](https://github.com/city96/ComfyUI_ExtraModels/assets/125218114/d9e4ab7d-f61b-407f-b7dd-af5859627d0e) + + + +## VAE + +A few custom VAE models are supported. The option to select a different dtype when loading is also possible, which can be useful for testing/comparisons. You can load the models listed below using the "ExtraVAELoader" node. + +**Models like PixArt/DiT do NOT need a special VAE. Unless mentioned, use one of the following as you would with any other model:** +- [VAE for SD1.X, DiT and PixArt alpha](https://huggingface.co/stabilityai/sd-vae-ft-mse-original/blob/main/vae-ft-mse-840000-ema-pruned.safetensors). +- [VAE for SDXL and PixArt sigma](https://huggingface.co/madebyollin/sdxl-vae-fp16-fix/blob/main/diffusion_pytorch_model.safetensors) + +### Consistency Decoder + +[Original Repo](https://github.com/openai/consistencydecoder) + +This now works thanks to the work of @mrsteyk and @madebyollin - [Gist with more info](https://gist.github.com/madebyollin/865fa6a18d9099351ddbdfbe7299ccbf). + +- Download the converted safetensor VAE from [this HF repository](https://huggingface.co/mrsteyk/consistency-decoder-sd15/blob/main/stk_consistency_decoder_amalgamated.safetensors). If you downloaded the OpenAI model before, it won't work, as it is a TorchScript file. Feel free to delete it. +- Put the file in your VAE folder +- Load it with the ExtraVAELoader +- Set it to fp16 or bf16 to not run out of VRAM +- Use tiled VAE decode if required + +### Deflickering Decoder / VideoDecoder + +This is the VAE that comes baked into the [Stable Video Diffusion](https://stability.ai/news/stable-video-diffusion-open-ai-video-model) model. + +It doesn't seem particularly good as a normal VAE (color issues, pretty bad with finer details). + +Still for completeness sake the code to run it is mostly implemented. To obtain the weights just extract them from the sdv model: + +```py +from safetensors.torch import load_file, save_file + +pf = "first_stage_model." # Key prefix +sd = load_file("svd_xt.safetensors") +vae = {k.replace(pf, ''):v for k,v in sd.items() if k.startswith(pf)} +save_file(vae, "svd_xt_vae.safetensors") +``` + +### AutoencoderKL / VQModel + +`kl-f4/8/16/32` from the [compvis/latent diffusion repo](https://github.com/CompVis/latent-diffusion/tree/main#pretrained-autoencoding-models). + +`vq-f4/8/16` from the taming transformers repo, weights for both vq and kl models available [here](https://ommer-lab.com/files/latent-diffusion/) + +`vq-f8` can accepts latents from the SD unet but just like xl with v1 latents, output largely garbage. The rest are completely useless without a matching UNET that uses the correct channel count. + +![VAE_TEST](https://github.com/city96/ComfyUI_ExtraModels/assets/125218114/316c7029-ee78-4ff7-a46a-b56ef91477eb) diff --git a/ComfyUI_ExtraModels/T5/LICENSE-ComfyUI b/ComfyUI_ExtraModels/T5/LICENSE-ComfyUI new file mode 100644 index 0000000000000000000000000000000000000000..f288702d2fa16d3cdf0035b15a9fcbc552cd88e7 --- /dev/null +++ b/ComfyUI_ExtraModels/T5/LICENSE-ComfyUI @@ -0,0 +1,674 @@ + GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU General Public License is a free, copyleft license for +software and other kinds of works. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +the GNU General Public License is intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU Affero General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If the program does terminal interaction, make it output a short +notice like this when it starts in an interactive mode: + + Copyright (C) + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, your program's commands +might be different; for a GUI interface, you would use an "about box". + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU GPL, see +. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +. diff --git a/ComfyUI_ExtraModels/T5/LICENSE-T5 b/ComfyUI_ExtraModels/T5/LICENSE-T5 new file mode 100644 index 0000000000000000000000000000000000000000..261eeb9e9f8b2b4b0d119366dda99c6fd7d35c64 --- /dev/null +++ b/ComfyUI_ExtraModels/T5/LICENSE-T5 @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/ComfyUI_ExtraModels/T5/loader.py b/ComfyUI_ExtraModels/T5/loader.py new file mode 100644 index 0000000000000000000000000000000000000000..78d8e0e70dd84d367b7cf3eda1eafb23e7a4563c --- /dev/null +++ b/ComfyUI_ExtraModels/T5/loader.py @@ -0,0 +1,119 @@ +import os +import torch +import comfy.utils +import comfy.model_patcher +from comfy import model_management +import folder_paths + +from .t5v11 import T5v11Model, T5v11Tokenizer + +class EXM_T5v11: + def __init__(self, textmodel_ver="xxl", embedding_directory=None, textmodel_path=None, no_init=False, device="cpu", dtype=None): + if no_init: + return + + if device == "auto": + size = 0 + self.load_device = model_management.text_encoder_device() + self.offload_device = model_management.text_encoder_offload_device() + self.init_device = "cpu" + elif dtype == "bnb8bit": + # BNB doesn't support size enum + size = 12.4 * (1024**3) + # Or moving between devices + self.load_device = model_management.get_torch_device() + self.offload_device = self.load_device + self.init_device = self.load_device + elif dtype == "bnb4bit": + # This seems to use the same VRAM as 8bit on Pascal? + size = 6.2 * (1024**3) + self.load_device = model_management.get_torch_device() + self.offload_device = self.load_device + self.init_device = self.load_device + elif device == "cpu": + size = 0 + self.load_device = "cpu" + self.offload_device = "cpu" + self.init_device="cpu" + elif device.startswith("cuda"): + print("Direct CUDA device override!\nVRAM will not be freed by default.") + size = 0 + self.load_device = device + self.offload_device = device + self.init_device = device + else: + size = 0 + self.load_device = model_management.get_torch_device() + self.offload_device = "cpu" + self.init_device="cpu" + + self.cond_stage_model = T5v11Model( + textmodel_ver = textmodel_ver, + textmodel_path = textmodel_path, + device = device, + dtype = dtype, + ) + self.tokenizer = T5v11Tokenizer(embedding_directory=embedding_directory) + self.patcher = comfy.model_patcher.ModelPatcher( + self.cond_stage_model, + load_device = self.load_device, + offload_device = self.offload_device, + current_device = self.load_device, + size = size, + ) + + def clone(self): + n = T5(no_init=True) + n.patcher = self.patcher.clone() + n.cond_stage_model = self.cond_stage_model + n.tokenizer = self.tokenizer + return n + + def tokenize(self, text, return_word_ids=False): + return self.tokenizer.tokenize_with_weights(text, return_word_ids) + + def encode_from_tokens(self, tokens): + self.load_model() + return self.cond_stage_model.encode_token_weights(tokens) + + def encode(self, text): + tokens = self.tokenize(text) + return self.encode_from_tokens(tokens) + + def load_sd(self, sd): + return self.cond_stage_model.load_sd(sd) + + def get_sd(self): + return self.cond_stage_model.state_dict() + + def load_model(self): + if self.load_device != "cpu": + model_management.load_model_gpu(self.patcher) + return self.patcher + + def add_patches(self, patches, strength_patch=1.0, strength_model=1.0): + return self.patcher.add_patches(patches, strength_patch, strength_model) + + def get_key_patches(self): + return self.patcher.get_key_patches() + + +def load_t5(model_type, model_ver, model_path, path_type="file", device="cpu", dtype=None): + assert model_type in ["t5v11"] # Only supported model for now + model_args = { + "textmodel_ver" : model_ver, + "device" : device, + "dtype" : dtype, + } + + if path_type == "folder": + # pass directly to transformers and initialize there + # this is to avoid having to handle multi-file state dict loading for now. + model_args["textmodel_path"] = os.path.dirname(model_path) + return EXM_T5v11(**model_args) + else: + # for some reason this returns garbage with torch.int8 weights, or just OOMs + model = EXM_T5v11(**model_args) + sd = comfy.utils.load_torch_file(model_path) + model.load_sd(sd) + return model diff --git a/ComfyUI_ExtraModels/T5/nodes.py b/ComfyUI_ExtraModels/T5/nodes.py new file mode 100644 index 0000000000000000000000000000000000000000..e30eb09b093d92c396c916a81d0d0d6c258c6b01 --- /dev/null +++ b/ComfyUI_ExtraModels/T5/nodes.py @@ -0,0 +1,95 @@ +import os +import json +import torch +import folder_paths + +from .loader import load_t5 +from ..utils.dtype import string_to_dtype + +# initialize custom folder path +os.makedirs( + os.path.join(folder_paths.models_dir,"t5"), + exist_ok = True, +) +folder_paths.folder_names_and_paths["t5"] = ( + [ + os.path.join(folder_paths.models_dir,"t5"), + *folder_paths.folder_names_and_paths.get("t5", [[],set()])[0] + ], + folder_paths.supported_pt_extensions +) + +dtypes = [ + "default", + "auto (comfy)", + "FP32", + "FP16", + # Note: remove these at some point + "bnb8bit", + "bnb4bit", +] +try: torch.float8_e5m2 +except AttributeError: print("Torch version too old for FP8") +else: dtypes += ["FP8 E4M3", "FP8 E5M2"] + +class T5v11Loader: + @classmethod + def INPUT_TYPES(s): + devices = ["auto", "cpu", "gpu"] + # hack for using second GPU as offload + for k in range(1, torch.cuda.device_count()): + devices.append(f"cuda:{k}") + return { + "required": { + "t5v11_name": (folder_paths.get_filename_list("t5"),), + "t5v11_ver": (["xxl"],), + "path_type": (["folder", "file"],), + "device": (devices, {"default":"cpu"}), + "dtype": (dtypes,), + } + } + RETURN_TYPES = ("T5",) + FUNCTION = "load_model" + CATEGORY = "ExtraModels/T5" + TITLE = "T5v1.1 Loader" + + def load_model(self, t5v11_name, t5v11_ver, path_type, device, dtype): + if "bnb" in dtype: + assert device == "gpu" or device.startswith("cuda"), "BitsAndBytes only works on CUDA! Set device to 'gpu'." + dtype = string_to_dtype(dtype, "text_encoder") + if device == "cpu": + assert dtype in [None, torch.float32], f"Can't use dtype '{dtype}' with CPU! Set dtype to 'default'." + + return (load_t5( + model_type = "t5v11", + model_ver = t5v11_ver, + model_path = folder_paths.get_full_path("t5", t5v11_name), + path_type = path_type, + device = device, + dtype = dtype, + ),) + +class T5TextEncode: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "text": ("STRING", {"multiline": True}), + "T5": ("T5",), + } + } + + RETURN_TYPES = ("CONDITIONING",) + FUNCTION = "encode" + CATEGORY = "ExtraModels/T5" + TITLE = "T5 Text Encode" + + def encode(self, text, T5=None): + tokens = T5.tokenize(text) + cond = T5.encode_from_tokens(tokens) + return ([[cond, {}]], ) + +NODE_CLASS_MAPPINGS = { + "T5v11Loader" : T5v11Loader, + "T5TextEncode" : T5TextEncode, +} diff --git a/ComfyUI_ExtraModels/T5/t5_tokenizer/special_tokens_map.json b/ComfyUI_ExtraModels/T5/t5_tokenizer/special_tokens_map.json new file mode 100644 index 0000000000000000000000000000000000000000..881bdbffc06e471924ecea57f962bc5f8e2a9f21 --- /dev/null +++ b/ComfyUI_ExtraModels/T5/t5_tokenizer/special_tokens_map.json @@ -0,0 +1 @@ +{"eos_token": "", "unk_token": "", "pad_token": "", "additional_special_tokens": ["", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", ""]} \ No newline at end of file diff --git a/ComfyUI_ExtraModels/T5/t5_tokenizer/spiece.model b/ComfyUI_ExtraModels/T5/t5_tokenizer/spiece.model new file mode 100644 index 0000000000000000000000000000000000000000..317a5ccbde45300f5d1d970d4d449af2108b147e --- /dev/null +++ b/ComfyUI_ExtraModels/T5/t5_tokenizer/spiece.model @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d60acb128cf7b7f2536e8f38a5b18a05535c9e14c7a355904270e15b0945ea86 +size 791656 diff --git a/ComfyUI_ExtraModels/T5/t5_tokenizer/tokenizer_config.json b/ComfyUI_ExtraModels/T5/t5_tokenizer/tokenizer_config.json new file mode 100644 index 0000000000000000000000000000000000000000..b114c318caf72f6e89ea92e0755c41327a453198 --- /dev/null +++ b/ComfyUI_ExtraModels/T5/t5_tokenizer/tokenizer_config.json @@ -0,0 +1 @@ +{"eos_token": "", "unk_token": "", "pad_token": "", "extra_ids": 100, "additional_special_tokens": ["", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", ""], "model_max_length": 512, "name_or_path": "t5-small"} \ No newline at end of file diff --git a/ComfyUI_ExtraModels/T5/t5v11-xxl_config.json b/ComfyUI_ExtraModels/T5/t5v11-xxl_config.json new file mode 100644 index 0000000000000000000000000000000000000000..d133daa4aefba0f8b2bd96d4cb52992b32dc693a --- /dev/null +++ b/ComfyUI_ExtraModels/T5/t5v11-xxl_config.json @@ -0,0 +1,31 @@ +{ + "_name_or_path": "google/t5-v1_1-xxl", + "architectures": [ + "T5EncoderModel" + ], + "d_ff": 10240, + "d_kv": 64, + "d_model": 4096, + "decoder_start_token_id": 0, + "dense_act_fn": "gelu_new", + "dropout_rate": 0.1, + "eos_token_id": 1, + "feed_forward_proj": "gated-gelu", + "initializer_factor": 1.0, + "is_encoder_decoder": true, + "is_gated_act": true, + "layer_norm_epsilon": 1e-06, + "model_type": "t5", + "num_decoder_layers": 24, + "num_heads": 64, + "num_layers": 24, + "output_past": true, + "pad_token_id": 0, + "relative_attention_max_distance": 128, + "relative_attention_num_buckets": 32, + "tie_word_embeddings": false, + "torch_dtype": "float32", + "transformers_version": "4.21.1", + "use_cache": true, + "vocab_size": 32128 +} diff --git a/ComfyUI_ExtraModels/T5/t5v11.py b/ComfyUI_ExtraModels/T5/t5v11.py new file mode 100644 index 0000000000000000000000000000000000000000..76cec9cbecea2549fa0cb376e0c7803547b0ce4c --- /dev/null +++ b/ComfyUI_ExtraModels/T5/t5v11.py @@ -0,0 +1,227 @@ +""" +Adapted from comfyui CLIP code. +https://github.com/comfyanonymous/ComfyUI/blob/master/comfy/sd1_clip.py +""" + +import os + +from transformers import T5Tokenizer, T5EncoderModel, T5Config, modeling_utils +import torch +import traceback +import zipfile +from comfy import model_management + +from comfy.sd1_clip import parse_parentheses, token_weights, escape_important, unescape_important, safe_load_embed_zip, expand_directory_list, load_embed + +class T5v11Model(torch.nn.Module): + def __init__(self, textmodel_ver="xxl", textmodel_json_config=None, textmodel_path=None, device="cpu", max_length=120, freeze=True, dtype=None): + super().__init__() + + self.num_layers = 24 + self.max_length = max_length + self.bnb = False + + if textmodel_path is not None: + model_args = {} + model_args["low_cpu_mem_usage"] = True # Don't take 2x system ram on cpu + if dtype == "bnb8bit": + self.bnb = True + model_args["load_in_8bit"] = True + elif dtype == "bnb4bit": + self.bnb = True + model_args["load_in_4bit"] = True + else: + if dtype: model_args["torch_dtype"] = dtype + self.bnb = False + # second GPU offload hack part 2 + if device.startswith("cuda"): + model_args["device_map"] = device + print(f"Loading T5 from '{textmodel_path}'") + self.transformer = T5EncoderModel.from_pretrained(textmodel_path, **model_args) + else: + if textmodel_json_config is None: + textmodel_json_config = os.path.join( + os.path.dirname(os.path.realpath(__file__)), + f"t5v11-{textmodel_ver}_config.json" + ) + config = T5Config.from_json_file(textmodel_json_config) + self.num_layers = config.num_hidden_layers + with modeling_utils.no_init_weights(): + self.transformer = T5EncoderModel(config) + + if freeze: + self.freeze() + self.empty_tokens = [[0] * self.max_length] # token + + def freeze(self): + self.transformer = self.transformer.eval() + for param in self.parameters(): + param.requires_grad = False + + def forward(self, tokens): + device = self.transformer.get_input_embeddings().weight.device + tokens = torch.LongTensor(tokens).to(device) + attention_mask = torch.zeros_like(tokens) + max_token = 1 # token + for x in range(attention_mask.shape[0]): + for y in range(attention_mask.shape[1]): + attention_mask[x, y] = 1 + if tokens[x, y] == max_token: + break + + outputs = self.transformer(input_ids=tokens, attention_mask=attention_mask) + + z = outputs['last_hidden_state'] + z.detach().cpu().float() + return z + + def encode(self, tokens): + return self(tokens) + + def load_sd(self, sd): + return self.transformer.load_state_dict(sd, strict=False) + + def to(self, *args, **kwargs): + """BNB complains if you try to change the device or dtype""" + if self.bnb: + print("Thanks to BitsAndBytes, T5 becomes an immovable rock.", args, kwargs) + else: + self.transformer.to(*args, **kwargs) + + def encode_token_weights(self, token_weight_pairs, return_padded=False): + to_encode = list(self.empty_tokens) + for x in token_weight_pairs: + tokens = list(map(lambda a: a[0], x)) + to_encode.append(tokens) + + out = self.encode(to_encode) + z_empty = out[0:1] + + output = [] + for k in range(1, out.shape[0]): + z = out[k:k+1] + for i in range(len(z)): + for j in range(len(z[i])): + weight = token_weight_pairs[k - 1][j][1] + z[i][j] = (z[i][j] - z_empty[0][j]) * weight + z_empty[0][j] + output.append(z) + + if (len(output) == 0): + return z_empty.cpu() + + out = torch.cat(output, dim=-2) + if not return_padded: + # Count number of tokens that aren't , then use that number as an index. + keep_index = sum([sum([1 for y in x if y[0] != 0]) for x in token_weight_pairs]) + out = out[:, :keep_index, :] + return out + + +class T5v11Tokenizer: + """ + This is largely just based on the ComfyUI CLIP code. + """ + def __init__(self, tokenizer_path=None, max_length=120, embedding_directory=None, embedding_size=4096, embedding_key='t5'): + if tokenizer_path is None: + tokenizer_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "t5_tokenizer") + self.tokenizer = T5Tokenizer.from_pretrained(tokenizer_path) + self.max_length = max_length + self.max_tokens_per_section = self.max_length - 1 # but no + + self.pad_token = self.tokenizer("", add_special_tokens=False)["input_ids"][0] + self.end_token = self.tokenizer("", add_special_tokens=False)["input_ids"][0] + vocab = self.tokenizer.get_vocab() + self.inv_vocab = {v: k for k, v in vocab.items()} + self.embedding_directory = embedding_directory + self.max_word_length = 8 # haven't verified this + self.embedding_identifier = "embedding:" + self.embedding_size = embedding_size + self.embedding_key = embedding_key + + def _try_get_embedding(self, embedding_name:str): + ''' + Takes a potential embedding name and tries to retrieve it. + Returns a Tuple consisting of the embedding and any leftover string, embedding can be None. + ''' + embed = load_embed(embedding_name, self.embedding_directory, self.embedding_size, self.embedding_key) + if embed is None: + stripped = embedding_name.strip(',') + if len(stripped) < len(embedding_name): + embed = load_embed(stripped, self.embedding_directory, self.embedding_size, self.embedding_key) + return (embed, embedding_name[len(stripped):]) + return (embed, "") + + def tokenize_with_weights(self, text:str, return_word_ids=False): + ''' + Takes a prompt and converts it to a list of (token, weight, word id) elements. + Tokens can both be integer tokens and pre computed T5 tensors. + Word id values are unique per word and embedding, where the id 0 is reserved for non word tokens. + Returned list has the dimensions NxM where M is the input size of T5 + ''' + pad_token = self.pad_token + text = escape_important(text) + parsed_weights = token_weights(text, 1.0) + + #tokenize words + tokens = [] + for weighted_segment, weight in parsed_weights: + to_tokenize = unescape_important(weighted_segment).replace("\n", " ").split(' ') + to_tokenize = [x for x in to_tokenize if x != ""] + for word in to_tokenize: + #if we find an embedding, deal with the embedding + if word.startswith(self.embedding_identifier) and self.embedding_directory is not None: + embedding_name = word[len(self.embedding_identifier):].strip('\n') + embed, leftover = self._try_get_embedding(embedding_name) + if embed is None: + print(f"warning, embedding:{embedding_name} does not exist, ignoring") + else: + if len(embed.shape) == 1: + tokens.append([(embed, weight)]) + else: + tokens.append([(embed[x], weight) for x in range(embed.shape[0])]) + #if we accidentally have leftover text, continue parsing using leftover, else move on to next word + if leftover != "": + word = leftover + else: + continue + #parse word + tokens.append([(t, weight) for t in self.tokenizer(word, add_special_tokens=False)["input_ids"]]) + + #reshape token array to T5 input size + batched_tokens = [] + batch = [] + batched_tokens.append(batch) + for i, t_group in enumerate(tokens): + #determine if we're going to try and keep the tokens in a single batch + is_large = len(t_group) >= self.max_word_length + + while len(t_group) > 0: + if len(t_group) + len(batch) > self.max_length - 1: + remaining_length = self.max_length - len(batch) - 1 + #break word in two and add end token + if is_large: + batch.extend([(t,w,i+1) for t,w in t_group[:remaining_length]]) + batch.append((self.end_token, 1.0, 0)) + t_group = t_group[remaining_length:] + #add end token and pad + else: + batch.append((self.end_token, 1.0, 0)) + batch.extend([(self.pad_token, 1.0, 0)] * (remaining_length)) + #start new batch + batch = [] + batched_tokens.append(batch) + else: + batch.extend([(t,w,i+1) for t,w in t_group]) + t_group = [] + + # fill last batch + batch.extend([(self.end_token, 1.0, 0)] + [(self.pad_token, 1.0, 0)] * (self.max_length - len(batch) - 1)) + # instead of filling, just add EOS (DEBUG) + # batch.extend([(self.end_token, 1.0, 0)]) + + if not return_word_ids: + batched_tokens = [[(t, w) for t, w,_ in x] for x in batched_tokens] + return batched_tokens + + def untokenize(self, token_weight_pair): + return list(map(lambda a: (a, self.inv_vocab[a[0]]), token_weight_pair)) diff --git a/ComfyUI_ExtraModels/VAE/conf.py b/ComfyUI_ExtraModels/VAE/conf.py new file mode 100644 index 0000000000000000000000000000000000000000..91df735227def074f40a753f7cba008eb0ff1207 --- /dev/null +++ b/ComfyUI_ExtraModels/VAE/conf.py @@ -0,0 +1,159 @@ +""" +List of all VAE configs, with training parts stripped. +""" +vae_conf = { + ### AutoencoderKL ### + "kl-f4": { + "type" : "AutoencoderKL", + "embed_scale" : 4, + "embed_dim" : 3, + "z_channels" : 3, + "double_z" : True, + "resolution" : 256, + "in_channels" : 3, + "out_ch" : 3, + "ch" : 128, + "ch_mult" : [1,2,4], + "num_res_blocks" : 2, + "attn_resolutions" : [], + }, + "kl-f8": { # Default SD1.5 VAE + "type" : "AutoencoderKL", + "embed_scale" : 8, + "embed_dim" : 4, + "z_channels" : 4, + "double_z" : True, + "resolution" : 256, + "in_channels" : 3, + "out_ch" : 3, + "ch" : 128, + "ch_mult" : [1,2,4,4], + "num_res_blocks" : 2, + "attn_resolutions" : [], + }, + "kl-f8-d16": { # 16 channel VAE from https://huggingface.co/ostris/vae-kl-f8-d16/tree/main + "type" : "AutoencoderKL", + "embed_scale" : 8, + "embed_dim" : 16, + "z_channels" : 16, + "double_z" : True, + "resolution" : 256, + "in_channels" : 3, + "out_ch" : 3, + "ch" : 128, + "ch_mult" : [1,1,2,4], + "num_res_blocks" : 2, + "attn_resolutions" : [], + }, + "kl-f16": { + "type" : "AutoencoderKL", + "embed_scale" : 16, + "embed_dim" : 16, + "z_channels" : 16, + "double_z" : True, + "resolution" : 256, + "in_channels" : 3, + "out_ch" : 3, + "ch" : 128, + "ch_mult" : [1,1,2,2,4], + "num_res_blocks" : 2, + "attn_resolutions" : [16], + }, + "kl-f32": { + "type" : "AutoencoderKL", + "embed_scale" : 32, + "embed_dim" : 64, + "z_channels" : 64, + "double_z" : True, + "resolution" : 256, + "in_channels" : 3, + "out_ch" : 3, + "ch" : 128, + "ch_mult" : [1,1,2,2,4,4], + "num_res_blocks" : 2, + "attn_resolutions" : [16,8], + }, + ### VQModel ### + "vq-f4": { + "type" : "VQModel", + "embed_scale" : 4, + "n_embed" : 8192, + "embed_dim" : 3, + "z_channels" : 3, + "double_z" : False, + "resolution" : 256, + "in_channels" : 3, + "out_ch" : 3, + "ch" : 128, + "ch_mult" : [1,2,4], + "num_res_blocks" : 2, + "attn_resolutions" : [], + }, + "vq-f8": { + "type" : "VQModel", + "embed_scale" : 8, + "n_embed" : 16384, + "embed_dim" : 4, + "z_channels" : 4, + "double_z" : False, + "resolution" : 256, + "in_channels" : 3, + "out_ch" : 3, + "ch" : 128, + "ch_mult" : [1,2,2,4], + "num_res_blocks" : 2, + "attn_resolutions" : [32], + }, + "vq-f16": { + "type" : "VQModel", + "embed_scale" : 16, + "n_embed" : 16384, + "embed_dim" : 8, + "z_channels" : 8, + "double_z" : False, + "resolution" : 256, + "in_channels" : 3, + "out_ch" : 3, + "ch" : 128, + "ch_mult" : [1,1,2,2,4], + "num_res_blocks" : 2, + "attn_resolutions" : [16], + }, + # OpenAI Consistency Decoder + "Consistency-Decoder": { + "type" : "ConsistencyDecoder", + "embed_scale" : 8, + "embed_dim" : 4, + }, + # SAI Video Decoder + "SDV-VideoDecoder": { + "type" : "AutoencoderKL-VideoDecoder", + "embed_scale" : 8, + "embed_dim" : 4, + "z_channels" : 4, + "double_z" : True, + "resolution" : 256, + "in_channels" : 3, + "out_ch" : 3, + "ch" : 128, + "ch_mult" : [1,2,4,4], + "num_res_blocks" : 2, + "attn_resolutions" : [], + "video_kernel_size": [3, 1, 1] + }, + # Kandinsky-3 + "MoVQ3": { + "type" : "MoVQ3", + "embed_scale" : 8, + "embed_dim" : 4, + "double_z" : False, + "z_channels" : 4, + "resolution" : 256, + "in_channels" : 3, + "out_ch" : 3, + "ch" : 256, + "ch_mult" : [1, 2, 2, 4], + "num_res_blocks" : 2, + "attn_resolutions" : [32], + } +} diff --git a/ComfyUI_ExtraModels/VAE/loader.py b/ComfyUI_ExtraModels/VAE/loader.py new file mode 100644 index 0000000000000000000000000000000000000000..cb2cae160f2bf66271f37b3fa7cc788c225a3ecb --- /dev/null +++ b/ComfyUI_ExtraModels/VAE/loader.py @@ -0,0 +1,113 @@ +import torch +import comfy.sd +import comfy.utils +from comfy import model_management +from comfy import diffusers_convert + +class EXVAE(comfy.sd.VAE): + def __init__(self, model_path, model_conf, dtype=torch.float32): + self.latent_dim = model_conf["embed_dim"] + self.latent_scale = model_conf["embed_scale"] + self.device = model_management.vae_device() + self.offload_device = model_management.vae_offload_device() + self.vae_dtype = dtype + + sd = comfy.utils.load_torch_file(model_path) + model = None + if model_conf["type"] == "AutoencoderKL": + from .models.kl import AutoencoderKL + model = AutoencoderKL(config=model_conf) + if 'decoder.up_blocks.0.resnets.0.norm1.weight' in sd.keys(): + sd = diffusers_convert.convert_vae_state_dict(sd) + elif model_conf["type"] == "AutoencoderKL-VideoDecoder": + from .models.temporal_ae import AutoencoderKL + model = AutoencoderKL(config=model_conf) + elif model_conf["type"] == "VQModel": + from .models.vq import VQModel + model = VQModel(config=model_conf) + elif model_conf["type"] == "ConsistencyDecoder": + from .models.consistencydecoder import ConsistencyDecoder + model = ConsistencyDecoder() + sd = {f"model.{k}":v for k,v in sd.items()} + elif model_conf["type"] == "MoVQ3": + from .models.movq3 import MoVQ + model = MoVQ(model_conf) + else: + raise NotImplementedError(f"Unknown VAE type '{model_conf['type']}'") + + self.first_stage_model = model.eval() + m, u = self.first_stage_model.load_state_dict(sd, strict=False) + if len(m) > 0: print("Missing VAE keys", m) + if len(u) > 0: print("Leftover VAE keys", u) + + self.first_stage_model.to(self.vae_dtype).to(self.offload_device) + + ### Encode/Decode functions below needed due to source repo having 4 VAE channels and a scale factor of 8 hardcoded + def decode_tiled_(self, samples, tile_x=64, tile_y=64, overlap = 16): + steps = samples.shape[0] * comfy.utils.get_tiled_scale_steps(samples.shape[3], samples.shape[2], tile_x, tile_y, overlap) + steps += samples.shape[0] * comfy.utils.get_tiled_scale_steps(samples.shape[3], samples.shape[2], tile_x // 2, tile_y * 2, overlap) + steps += samples.shape[0] * comfy.utils.get_tiled_scale_steps(samples.shape[3], samples.shape[2], tile_x * 2, tile_y // 2, overlap) + pbar = comfy.utils.ProgressBar(steps) + + decode_fn = lambda a: (self.first_stage_model.decode(a.to(self.vae_dtype).to(self.device)) + 1.0).float() + output = torch.clamp(( + (comfy.utils.tiled_scale(samples, decode_fn, tile_x // 2, tile_y * 2, overlap, upscale_amount = self.latent_scale, pbar = pbar) + + comfy.utils.tiled_scale(samples, decode_fn, tile_x * 2, tile_y // 2, overlap, upscale_amount = self.latent_scale, pbar = pbar) + + comfy.utils.tiled_scale(samples, decode_fn, tile_x, tile_y, overlap, upscale_amount = self.latent_scale, pbar = pbar)) + / 3.0) / 2.0, min=0.0, max=1.0) + return output + + def encode_tiled_(self, pixel_samples, tile_x=512, tile_y=512, overlap = 64): + steps = pixel_samples.shape[0] * comfy.utils.get_tiled_scale_steps(pixel_samples.shape[3], pixel_samples.shape[2], tile_x, tile_y, overlap) + steps += pixel_samples.shape[0] * comfy.utils.get_tiled_scale_steps(pixel_samples.shape[3], pixel_samples.shape[2], tile_x // 2, tile_y * 2, overlap) + steps += pixel_samples.shape[0] * comfy.utils.get_tiled_scale_steps(pixel_samples.shape[3], pixel_samples.shape[2], tile_x * 2, tile_y // 2, overlap) + pbar = comfy.utils.ProgressBar(steps) + + encode_fn = lambda a: self.first_stage_model.encode((2. * a - 1.).to(self.vae_dtype).to(self.device)).float() + samples = comfy.utils.tiled_scale(pixel_samples, encode_fn, tile_x, tile_y, overlap, upscale_amount = (1/self.latent_scale), out_channels=self.latent_dim, pbar=pbar) + samples += comfy.utils.tiled_scale(pixel_samples, encode_fn, tile_x * 2, tile_y // 2, overlap, upscale_amount = (1/self.latent_scale), out_channels=self.latent_dim, pbar=pbar) + samples += comfy.utils.tiled_scale(pixel_samples, encode_fn, tile_x // 2, tile_y * 2, overlap, upscale_amount = (1/self.latent_scale), out_channels=self.latent_dim, pbar=pbar) + samples /= 3.0 + return samples + + def decode(self, samples_in): + self.first_stage_model = self.first_stage_model.to(self.device) + try: + memory_used = (2562 * samples_in.shape[2] * samples_in.shape[3] * 64) * 1.7 + model_management.free_memory(memory_used, self.device) + free_memory = model_management.get_free_memory(self.device) + batch_number = int(free_memory / memory_used) + batch_number = max(1, batch_number) + + pixel_samples = torch.empty((samples_in.shape[0], 3, round(samples_in.shape[2] * self.latent_scale), round(samples_in.shape[3] * self.latent_scale)), device="cpu") + for x in range(0, samples_in.shape[0], batch_number): + samples = samples_in[x:x+batch_number].to(self.vae_dtype).to(self.device) + pixel_samples[x:x+batch_number] = torch.clamp((self.first_stage_model.decode(samples).cpu().float() + 1.0) / 2.0, min=0.0, max=1.0) + except model_management.OOM_EXCEPTION as e: + print("Warning: Ran out of memory when regular VAE decoding, retrying with tiled VAE decoding.") + pixel_samples = self.decode_tiled_(samples_in) + + self.first_stage_model = self.first_stage_model.to(self.offload_device) + pixel_samples = pixel_samples.cpu().movedim(1,-1) + return pixel_samples + + def encode(self, pixel_samples): + self.first_stage_model = self.first_stage_model.to(self.device) + pixel_samples = pixel_samples.movedim(-1,1) + try: + memory_used = (2078 * pixel_samples.shape[2] * pixel_samples.shape[3]) * 1.7 #NOTE: this constant along with the one in the decode above are estimated from the mem usage for the VAE and could change. + model_management.free_memory(memory_used, self.device) + free_memory = model_management.get_free_memory(self.device) + batch_number = int(free_memory / memory_used) + batch_number = max(1, batch_number) + samples = torch.empty((pixel_samples.shape[0], self.latent_dim, round(pixel_samples.shape[2] // self.latent_scale), round(pixel_samples.shape[3] // self.latent_scale)), device="cpu") + for x in range(0, pixel_samples.shape[0], batch_number): + pixels_in = (2. * pixel_samples[x:x+batch_number] - 1.).to(self.vae_dtype).to(self.device) + samples[x:x+batch_number] = self.first_stage_model.encode(pixels_in).cpu().float() + + except model_management.OOM_EXCEPTION as e: + print("Warning: Ran out of memory when regular VAE encoding, retrying with tiled VAE encoding.") + samples = self.encode_tiled_(pixel_samples) + + self.first_stage_model = self.first_stage_model.to(self.offload_device) + return samples diff --git a/ComfyUI_ExtraModels/VAE/models/LICENSE-Consistency-Decoder b/ComfyUI_ExtraModels/VAE/models/LICENSE-Consistency-Decoder new file mode 100644 index 0000000000000000000000000000000000000000..b3841f631d7f15f158bbb9e613227550828b5ff1 --- /dev/null +++ b/ComfyUI_ExtraModels/VAE/models/LICENSE-Consistency-Decoder @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2023 OpenAI + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/ComfyUI_ExtraModels/VAE/models/LICENSE-Kandinsky-3 b/ComfyUI_ExtraModels/VAE/models/LICENSE-Kandinsky-3 new file mode 100644 index 0000000000000000000000000000000000000000..261eeb9e9f8b2b4b0d119366dda99c6fd7d35c64 --- /dev/null +++ b/ComfyUI_ExtraModels/VAE/models/LICENSE-Kandinsky-3 @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/ComfyUI_ExtraModels/VAE/models/LICENSE-Latent-Diffusion b/ComfyUI_ExtraModels/VAE/models/LICENSE-Latent-Diffusion new file mode 100644 index 0000000000000000000000000000000000000000..be24ebeed0fb31665dc3c33d2610d35f77b12709 --- /dev/null +++ b/ComfyUI_ExtraModels/VAE/models/LICENSE-Latent-Diffusion @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2022 Machine Vision and Learning Group, LMU Munich + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/ComfyUI_ExtraModels/VAE/models/LICENSE-SAI b/ComfyUI_ExtraModels/VAE/models/LICENSE-SAI new file mode 100644 index 0000000000000000000000000000000000000000..8855a41d3f4238ee6939af58b8fc278e108b9af7 --- /dev/null +++ b/ComfyUI_ExtraModels/VAE/models/LICENSE-SAI @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2023 Stability AI + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/ComfyUI_ExtraModels/VAE/models/LICENSE-SDV b/ComfyUI_ExtraModels/VAE/models/LICENSE-SDV new file mode 100644 index 0000000000000000000000000000000000000000..01e57cf8310b2fdd6676fe203820a19ab86c966b --- /dev/null +++ b/ComfyUI_ExtraModels/VAE/models/LICENSE-SDV @@ -0,0 +1,31 @@ +STABLE VIDEO DIFFUSION NON-COMMERCIAL COMMUNITY LICENSE AGREEMENT +Dated: November 21, 2023 + +“AUP” means the Stability AI Acceptable Use Policy available at https://stability.ai/use-policy, as may be updated from time to time. + +"Agreement" means the terms and conditions for use, reproduction, distribution and modification of the Software Products set forth herein. +"Derivative Work(s)” means (a) any derivative work of the Software Products as recognized by U.S. copyright laws and (b) any modifications to a Model, and any other model created which is based on or derived from the Model or the Model’s output. For clarity, Derivative Works do not include the output of any Model. +“Documentation” means any specifications, manuals, documentation, and other written information provided by Stability AI related to the Software. + +"Licensee" or "you" means you, or your employer or any other person or entity (if you are entering into this Agreement on such person or entity's behalf), of the age required under applicable laws, rules or regulations to provide legal consent and that has legal authority to bind your employer or such other person or entity if you are entering in this Agreement on their behalf. + +"Stability AI" or "we" means Stability AI Ltd. + +"Software" means, collectively, Stability AI’s proprietary models and algorithms, including machine-learning models, trained model weights and other elements of the foregoing, made available under this Agreement. + +“Software Products” means Software and Documentation. + +By using or distributing any portion or element of the Software Products, you agree to be bound by this Agreement. + + + +License Rights and Redistribution. +Subject to your compliance with this Agreement, the AUP (which is hereby incorporated herein by reference), and the Documentation, Stability AI grants you a non-exclusive, worldwide, non-transferable, non-sublicensable, revocable, royalty free and limited license under Stability AI’s intellectual property or other rights owned by Stability AI embodied in the Software Products to reproduce, distribute, and create Derivative Works of the Software Products for purposes other than commercial or production use. +b. If you distribute or make the Software Products, or any Derivative Works thereof, available to a third party, the Software Products, Derivative Works, or any portion thereof, respectively, will remain subject to this Agreement and you must (i) provide a copy of this Agreement to such third party, and (ii) retain the following attribution notice within a "Notice" text file distributed as a part of such copies: "Stable Video Diffusion is licensed under the Stable Video Diffusion Research License, Copyright (c) Stability AI Ltd. All Rights Reserved.” If you create a Derivative Work of a Software Product, you may add your own attribution notices to the Notice file included with the Software Product, provided that you clearly indicate which attributions apply to the Software Product and you must state in the NOTICE file that you changed the Software Product and how it was modified. +2. Disclaimer of Warranty. UNLESS REQUIRED BY APPLICABLE LAW, THE SOFTWARE PRODUCTS AND ANY OUTPUT AND RESULTS THEREFROM ARE PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, ANY WARRANTIES OF TITLE, NON-INFRINGEMENT, MERCHANTABILITY, OR FITNESS FOR A PARTICULAR PURPOSE. YOU ARE SOLELY RESPONSIBLE FOR DETERMINING THE APPROPRIATENESS OF USING OR REDISTRIBUTING THE SOFTWARE PRODUCTS AND ASSUME ANY RISKS ASSOCIATED WITH YOUR USE OF THE SOFTWARE PRODUCTS AND ANY OUTPUT AND RESULTS. +3. Limitation of Liability. IN NO EVENT WILL STABILITY AI OR ITS AFFILIATES BE LIABLE UNDER ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, TORT, NEGLIGENCE, PRODUCTS LIABILITY, OR OTHERWISE, ARISING OUT OF THIS AGREEMENT, FOR ANY LOST PROFITS OR ANY INDIRECT, SPECIAL, CONSEQUENTIAL, INCIDENTAL, EXEMPLARY OR PUNITIVE DAMAGES, EVEN IF STABILITY AI OR ITS AFFILIATES HAVE BEEN ADVISED OF THE POSSIBILITY OF ANY OF THE FOREGOING. +3. Intellectual Property. +a. No trademark licenses are granted under this Agreement, and in connection with the Software Products, neither Stability AI nor Licensee may use any name or mark owned by or associated with the other or any of its affiliates, except as required for reasonable and customary use in describing and redistributing the Software Products. +Subject to Stability AI’s ownership of the Software Products and Derivative Works made by or for Stability AI, with respect to any Derivative Works that are made by you, as between you and Stability AI, you are and will be the owner of such Derivative Works. +If you institute litigation or other proceedings against Stability AI (including a cross-claim or counterclaim in a lawsuit) alleging that the Software Products or associated outputs or results, or any portion of any of the foregoing, constitutes infringement of intellectual property or other rights owned or licensable by you, then any licenses granted to you under this Agreement shall terminate as of the date such litigation or claim is filed or instituted. You will indemnify and hold harmless Stability AI from and against any claim by any third party arising out of or related to your use or distribution of the Software Products in violation of this Agreement. +4. Term and Termination. The term of this Agreement will commence upon your acceptance of this Agreement or access to the Software Products and will continue in full force and effect until terminated in accordance with the terms and conditions herein. Stability AI may terminate this Agreement if you are in breach of any term or condition of this Agreement. Upon termination of this Agreement, you shall delete and cease use of the Software Products. Sections 2-4 shall survive the termination of this Agreement. diff --git a/ComfyUI_ExtraModels/VAE/models/LICENSE-Taming-Transformers b/ComfyUI_ExtraModels/VAE/models/LICENSE-Taming-Transformers new file mode 100644 index 0000000000000000000000000000000000000000..57fb4153bafcd64b60377ba0ba2c79b7530efc1e --- /dev/null +++ b/ComfyUI_ExtraModels/VAE/models/LICENSE-Taming-Transformers @@ -0,0 +1,19 @@ +Copyright (c) 2020 Patrick Esser and Robin Rombach and Björn Ommer + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE +OR OTHER DEALINGS IN THE SOFTWARE./ diff --git a/ComfyUI_ExtraModels/VAE/models/consistencydecoder.py b/ComfyUI_ExtraModels/VAE/models/consistencydecoder.py new file mode 100644 index 0000000000000000000000000000000000000000..36d4b64fc7bbc6f9a809bb8d27e26837135896d7 --- /dev/null +++ b/ComfyUI_ExtraModels/VAE/models/consistencydecoder.py @@ -0,0 +1,376 @@ +import math +import torch +import torch.nn.functional as F +import torch.nn as nn + +""" +Code below ported from https://github.com/openai/consistencydecoder +""" + +def _extract_into_tensor(arr, timesteps, broadcast_shape): + # from: https://github.com/openai/guided-diffusion/blob/22e0df8183507e13a7813f8d38d51b072ca1e67c/guided_diffusion/gaussian_diffusion.py#L895 """ + res = arr[timesteps.to(torch.int).cpu()].float().to(timesteps.device) + dims_to_append = len(broadcast_shape) - len(res.shape) + return res[(...,) + (None,) * dims_to_append] + +def betas_for_alpha_bar(num_diffusion_timesteps, alpha_bar, max_beta=0.999): + # from: https://github.com/openai/guided-diffusion/blob/22e0df8183507e13a7813f8d38d51b072ca1e67c/guided_diffusion/gaussian_diffusion.py#L45 + betas = [] + for i in range(num_diffusion_timesteps): + t1 = i / num_diffusion_timesteps + t2 = (i + 1) / num_diffusion_timesteps + betas.append(min(1 - alpha_bar(t2) / alpha_bar(t1), max_beta)) + return torch.tensor(betas) + +class ConsistencyDecoder(torch.nn.Module): + # From https://github.com/openai/consistencydecoder + def __init__(self): + super().__init__() + self.model = ConvUNetVAE() + self.n_distilled_steps = 64 + + sigma_data = 0.5 + betas = betas_for_alpha_bar( + 1024, lambda t: math.cos((t + 0.008) / 1.008 * math.pi / 2) ** 2 + ) + alphas = 1.0 - betas + alphas_cumprod = torch.cumprod(alphas, dim=0) + self.sqrt_alphas_cumprod = torch.sqrt(alphas_cumprod) + self.sqrt_one_minus_alphas_cumprod = torch.sqrt(1.0 - alphas_cumprod) + sqrt_recip_alphas_cumprod = torch.sqrt(1.0 / alphas_cumprod) + sigmas = torch.sqrt(1.0 / alphas_cumprod - 1) + self.c_skip = ( + sqrt_recip_alphas_cumprod + * sigma_data**2 + / (sigmas**2 + sigma_data**2) + ) + self.c_out = sigmas * sigma_data / (sigmas**2 + sigma_data**2) ** 0.5 + self.c_in = sqrt_recip_alphas_cumprod / (sigmas**2 + sigma_data**2) ** 0.5 + + @staticmethod + def round_timesteps(timesteps, total_timesteps, n_distilled_steps, truncate_start=True): + with torch.no_grad(): + space = torch.div(total_timesteps, n_distilled_steps, rounding_mode="floor") + rounded_timesteps = ( + torch.div(timesteps, space, rounding_mode="floor") + 1 + ) * space + if truncate_start: + rounded_timesteps[rounded_timesteps == total_timesteps] -= space + else: + rounded_timesteps[rounded_timesteps == total_timesteps] -= space + rounded_timesteps[rounded_timesteps == 0] += space + return rounded_timesteps + + @staticmethod + def ldm_transform_latent(z, extra_scale_factor=1): + channel_means = [0.38862467, 0.02253063, 0.07381133, -0.0171294] + channel_stds = [0.9654121, 1.0440036, 0.76147926, 0.77022034] + + if len(z.shape) != 4: + raise ValueError() + + z = z * 0.18215 + channels = [z[:, i] for i in range(z.shape[1])] + + channels = [ + extra_scale_factor * (c - channel_means[i]) / channel_stds[i] + for i, c in enumerate(channels) + ] + return torch.stack(channels, dim=1) + + @torch.no_grad() + def decode(self, features: torch.Tensor, schedule=[1.0, 0.5]): + features = self.ldm_transform_latent(features) + ts = self.round_timesteps( + torch.arange(0, 1024), + 1024, + self.n_distilled_steps, + truncate_start=False, + ) + shape = ( + features.size(0), + 3, + 8 * features.size(2), + 8 * features.size(3), + ) + x_start = torch.zeros(shape, device=features.device, dtype=features.dtype) + schedule_timesteps = [int((1024 - 1) * s) for s in schedule] + for i in schedule_timesteps: + t = ts[i].item() + t_ = torch.tensor([t] * features.shape[0], device=features.device) + noise = torch.randn_like(x_start, device=features.device) + x_start = ( + _extract_into_tensor(self.sqrt_alphas_cumprod, t_, x_start.shape) + * x_start + + _extract_into_tensor( + self.sqrt_one_minus_alphas_cumprod, t_, x_start.shape + ) + * noise + ) + c_in = _extract_into_tensor(self.c_in, t_, x_start.shape) + model_output = self.model((c_in * x_start).to(features.dtype), t_, features=features) + B, C = x_start.shape[:2] + model_output, _ = torch.split(model_output, C, dim=1) + pred_xstart = ( + _extract_into_tensor(self.c_out, t_, x_start.shape) * model_output + + _extract_into_tensor(self.c_skip, t_, x_start.shape) * x_start + ).clamp(-1, 1) + x_start = pred_xstart + return x_start + + def encode(self, *args, **kwargs): + raise NotImplementedError("ConsistencyDecoder can't be used for encoding!") + +""" +Model definitions ported from: +https://gist.github.com/madebyollin/865fa6a18d9099351ddbdfbe7299ccbf +https://gist.github.com/mrsteyk/74ad3ec2f6f823111ae4c90e168505ac. +""" + +class TimestepEmbedding(nn.Module): + def __init__(self, n_time=1024, n_emb=320, n_out=1280) -> None: + super().__init__() + self.emb = nn.Embedding(n_time, n_emb) + self.f_1 = nn.Linear(n_emb, n_out) + self.f_2 = nn.Linear(n_out, n_out) + + def forward(self, x) -> torch.Tensor: + x = self.emb(x) + x = self.f_1(x) + x = F.silu(x) + return self.f_2(x) + + +class ImageEmbedding(nn.Module): + def __init__(self, in_channels=7, out_channels=320) -> None: + super().__init__() + self.f = nn.Conv2d(in_channels, out_channels, kernel_size=3, padding=1) + + def forward(self, x) -> torch.Tensor: + return self.f(x) + + +class ImageUnembedding(nn.Module): + def __init__(self, in_channels=320, out_channels=6) -> None: + super().__init__() + self.gn = nn.GroupNorm(32, in_channels) + self.f = nn.Conv2d(in_channels, out_channels, kernel_size=3, padding=1) + + def forward(self, x) -> torch.Tensor: + return self.f(F.silu(self.gn(x))) + + +class ConvResblock(nn.Module): + def __init__(self, in_features=320, out_features=320) -> None: + super().__init__() + self.f_t = nn.Linear(1280, out_features * 2) + + self.gn_1 = nn.GroupNorm(32, in_features) + self.f_1 = nn.Conv2d(in_features, out_features, kernel_size=3, padding=1) + + self.gn_2 = nn.GroupNorm(32, out_features) + self.f_2 = nn.Conv2d(out_features, out_features, kernel_size=3, padding=1) + + skip_conv = in_features != out_features + self.f_s = ( + nn.Conv2d(in_features, out_features, kernel_size=1, padding=0) + if skip_conv + else nn.Identity() + ) + + def forward(self, x, t): + x_skip = x + t = self.f_t(F.silu(t)) + t = t.chunk(2, dim=1) + t_1 = t[0].unsqueeze(dim=2).unsqueeze(dim=3) + 1 + t_2 = t[1].unsqueeze(dim=2).unsqueeze(dim=3) + + gn_1 = F.silu(self.gn_1(x)) + f_1 = self.f_1(gn_1) + + gn_2 = self.gn_2(f_1) + + return self.f_s(x_skip) + self.f_2(F.silu(gn_2 * t_1 + t_2)) + + +# Also ConvResblock +class Downsample(nn.Module): + def __init__(self, in_channels=320) -> None: + super().__init__() + self.f_t = nn.Linear(1280, in_channels * 2) + + self.gn_1 = nn.GroupNorm(32, in_channels) + self.f_1 = nn.Conv2d(in_channels, in_channels, kernel_size=3, padding=1) + self.gn_2 = nn.GroupNorm(32, in_channels) + + self.f_2 = nn.Conv2d(in_channels, in_channels, kernel_size=3, padding=1) + + def forward(self, x, t) -> torch.Tensor: + x_skip = x + + t = self.f_t(F.silu(t)) + t_1, t_2 = t.chunk(2, dim=1) + t_1 = t_1.unsqueeze(2).unsqueeze(3) + 1 + t_2 = t_2.unsqueeze(2).unsqueeze(3) + + gn_1 = F.silu(self.gn_1(x)) + avg_pool2d = F.avg_pool2d(gn_1, kernel_size=(2, 2), stride=None) + f_1 = self.f_1(avg_pool2d) + gn_2 = self.gn_2(f_1) + + f_2 = self.f_2(F.silu(t_2 + (t_1 * gn_2))) + + return f_2 + F.avg_pool2d(x_skip, kernel_size=(2, 2), stride=None) + + +# Also ConvResblock +class Upsample(nn.Module): + def __init__(self, in_channels=1024) -> None: + super().__init__() + self.f_t = nn.Linear(1280, in_channels * 2) + + self.gn_1 = nn.GroupNorm(32, in_channels) + self.f_1 = nn.Conv2d(in_channels, in_channels, kernel_size=3, padding=1) + self.gn_2 = nn.GroupNorm(32, in_channels) + + self.f_2 = nn.Conv2d(in_channels, in_channels, kernel_size=3, padding=1) + + def forward(self, x, t) -> torch.Tensor: + x_skip = x + + t = self.f_t(F.silu(t)) + t_1, t_2 = t.chunk(2, dim=1) + t_1 = t_1.unsqueeze(2).unsqueeze(3) + 1 + t_2 = t_2.unsqueeze(2).unsqueeze(3) + + gn_1 = F.silu(self.gn_1(x)) + upsample = F.interpolate(gn_1.float(), scale_factor=2, mode="nearest").to(gn_1.dtype) + + f_1 = self.f_1(upsample) + gn_2 = self.gn_2(f_1) + + f_2 = self.f_2(F.silu(t_2 + (t_1 * gn_2))) + + return f_2 + F.interpolate(x_skip.float(), scale_factor=2, mode="nearest").to(x_skip.dtype) + + +class ConvUNetVAE(nn.Module): + def __init__(self) -> None: + super().__init__() + self.embed_image = ImageEmbedding() + self.embed_time = TimestepEmbedding() + + down_0 = nn.ModuleList( + [ + ConvResblock(320, 320), + ConvResblock(320, 320), + ConvResblock(320, 320), + Downsample(320), + ] + ) + down_1 = nn.ModuleList( + [ + ConvResblock(320, 640), + ConvResblock(640, 640), + ConvResblock(640, 640), + Downsample(640), + ] + ) + down_2 = nn.ModuleList( + [ + ConvResblock(640, 1024), + ConvResblock(1024, 1024), + ConvResblock(1024, 1024), + Downsample(1024), + ] + ) + down_3 = nn.ModuleList( + [ + ConvResblock(1024, 1024), + ConvResblock(1024, 1024), + ConvResblock(1024, 1024), + ] + ) + self.down = nn.ModuleList( + [ + down_0, + down_1, + down_2, + down_3, + ] + ) + + self.mid = nn.ModuleList( + [ + ConvResblock(1024, 1024), + ConvResblock(1024, 1024), + ] + ) + + up_3 = nn.ModuleList( + [ + ConvResblock(1024 * 2, 1024), + ConvResblock(1024 * 2, 1024), + ConvResblock(1024 * 2, 1024), + ConvResblock(1024 * 2, 1024), + Upsample(1024), + ] + ) + up_2 = nn.ModuleList( + [ + ConvResblock(1024 * 2, 1024), + ConvResblock(1024 * 2, 1024), + ConvResblock(1024 * 2, 1024), + ConvResblock(1024 + 640, 1024), + Upsample(1024), + ] + ) + up_1 = nn.ModuleList( + [ + ConvResblock(1024 + 640, 640), + ConvResblock(640 * 2, 640), + ConvResblock(640 * 2, 640), + ConvResblock(320 + 640, 640), + Upsample(640), + ] + ) + up_0 = nn.ModuleList( + [ + ConvResblock(320 + 640, 320), + ConvResblock(320 * 2, 320), + ConvResblock(320 * 2, 320), + ConvResblock(320 * 2, 320), + ] + ) + self.up = nn.ModuleList( + [ + up_0, + up_1, + up_2, + up_3, + ] + ) + + self.output = ImageUnembedding() + + def forward(self, x, t, features) -> torch.Tensor: + x = torch.cat([x, F.interpolate(features.float(),scale_factor=8,mode="nearest").to(features.dtype)], dim=1) + t = self.embed_time(t) + x = self.embed_image(x) + + skips = [x] + for down in self.down: + for block in down: + x = block(x, t) + skips.append(x) + + for i in range(2): + x = self.mid[i](x, t) + + for up in self.up[::-1]: + for block in up: + if isinstance(block, ConvResblock): + x = torch.concat([x, skips.pop()], dim=1) + x = block(x, t) + + return self.output(x) diff --git a/ComfyUI_ExtraModels/VAE/models/kl.py b/ComfyUI_ExtraModels/VAE/models/kl.py new file mode 100644 index 0000000000000000000000000000000000000000..7268511f904bd3ae3e791e60f85f599264979a7a --- /dev/null +++ b/ComfyUI_ExtraModels/VAE/models/kl.py @@ -0,0 +1,458 @@ +import torch +import numpy as np +from torch import nn + + +def Normalize(in_channels, num_groups=32): + return torch.nn.GroupNorm(num_groups=num_groups, num_channels=in_channels, eps=1e-6, affine=True) + +def nonlinearity(x): # swish + return x*torch.sigmoid(x) + + +class AutoencoderKL(nn.Module): + def __init__(self, config): + super().__init__() + self.embed_dim = config["embed_dim"] + self.encoder = Encoder(**config) + self.decoder = Decoder(**config) + assert config["double_z"] + self.quant_conv = torch.nn.Conv2d(2*config["z_channels"], 2*self.embed_dim, 1) + self.post_quant_conv = torch.nn.Conv2d(self.embed_dim, config["z_channels"], 1) + + def encode(self, x): + h = self.encoder(x) + moments = self.quant_conv(h) + posterior = DiagonalGaussianDistribution(moments) + return posterior.sample() + + def decode(self, z): + z = self.post_quant_conv(z) + dec = self.decoder(z) + return dec + + def forward(self, input, sample_posterior=True): + posterior = self.encode(input) + if sample_posterior: + z = posterior.sample() + else: + z = posterior.mode() + dec = self.decode(z) + return dec, posterior + + +class Encoder(nn.Module): + def __init__(self, *, ch, out_ch, ch_mult=(1,2,4,8), num_res_blocks, + attn_resolutions, dropout=0.0, resamp_with_conv=True, in_channels, + resolution, z_channels, double_z=True, use_linear_attn=False, attn_type="vanilla", + **ignore_kwargs): + super().__init__() + if use_linear_attn: attn_type = "linear" + self.ch = ch + self.temb_ch = 0 + self.num_resolutions = len(ch_mult) + self.num_res_blocks = num_res_blocks + self.resolution = resolution + self.in_channels = in_channels + + # downsampling + self.conv_in = torch.nn.Conv2d(in_channels, + self.ch, + kernel_size=3, + stride=1, + padding=1) + + curr_res = resolution + in_ch_mult = (1,)+tuple(ch_mult) + self.in_ch_mult = in_ch_mult + self.down = nn.ModuleList() + for i_level in range(self.num_resolutions): + block = nn.ModuleList() + attn = nn.ModuleList() + block_in = ch*in_ch_mult[i_level] + block_out = ch*ch_mult[i_level] + for i_block in range(self.num_res_blocks): + block.append(ResnetBlock(in_channels=block_in, + out_channels=block_out, + temb_channels=self.temb_ch, + dropout=dropout)) + block_in = block_out + if curr_res in attn_resolutions: + attn.append(make_attn(block_in, attn_type=attn_type)) + down = nn.Module() + down.block = block + down.attn = attn + if i_level != self.num_resolutions-1: + down.downsample = Downsample(block_in, resamp_with_conv) + curr_res = curr_res // 2 + self.down.append(down) + + # middle + self.mid = nn.Module() + self.mid.block_1 = ResnetBlock(in_channels=block_in, + out_channels=block_in, + temb_channels=self.temb_ch, + dropout=dropout) + self.mid.attn_1 = make_attn(block_in, attn_type=attn_type) + self.mid.block_2 = ResnetBlock(in_channels=block_in, + out_channels=block_in, + temb_channels=self.temb_ch, + dropout=dropout) + + # end + self.norm_out = Normalize(block_in) + self.conv_out = torch.nn.Conv2d(block_in, + 2*z_channels if double_z else z_channels, + kernel_size=3, + stride=1, + padding=1) + + def forward(self, x): + # timestep embedding + temb = None + + # downsampling + hs = [self.conv_in(x)] + for i_level in range(self.num_resolutions): + for i_block in range(self.num_res_blocks): + h = self.down[i_level].block[i_block](hs[-1], temb) + if len(self.down[i_level].attn) > 0: + h = self.down[i_level].attn[i_block](h) + hs.append(h) + if i_level != self.num_resolutions-1: + hs.append(self.down[i_level].downsample(hs[-1])) + + # middle + h = hs[-1] + h = self.mid.block_1(h, temb) + h = self.mid.attn_1(h) + h = self.mid.block_2(h, temb) + + # end + h = self.norm_out(h) + h = nonlinearity(h) + h = self.conv_out(h) + return h + + +class Decoder(nn.Module): + def __init__(self, *, ch, out_ch, ch_mult=(1,2,4,8), num_res_blocks, + attn_resolutions, dropout=0.0, resamp_with_conv=True, in_channels, + resolution, z_channels, give_pre_end=False, tanh_out=False, use_linear_attn=False, + attn_type="vanilla", post_quant_conv=None, **ignorekwargs): + super().__init__() + if use_linear_attn: attn_type = "linear" + self.ch = ch + self.temb_ch = 0 + self.num_resolutions = len(ch_mult) + self.num_res_blocks = num_res_blocks + self.resolution = resolution + self.in_channels = in_channels + self.give_pre_end = give_pre_end + self.tanh_out = tanh_out + self.post_quant_conv = post_quant_conv + + # compute in_ch_mult, block_in and curr_res at lowest res + in_ch_mult = (1,)+tuple(ch_mult) + block_in = ch*ch_mult[self.num_resolutions-1] + curr_res = resolution // 2**(self.num_resolutions-1) + self.z_shape = (1,z_channels,curr_res,curr_res) + print("Working with z of shape {} = {} dimensions.".format( + self.z_shape, np.prod(self.z_shape))) + + # z to block_in + self.conv_in = torch.nn.Conv2d(z_channels, + block_in, + kernel_size=3, + stride=1, + padding=1) + + # middle + self.mid = nn.Module() + self.mid.block_1 = ResnetBlock(in_channels=block_in, + out_channels=block_in, + temb_channels=self.temb_ch, + dropout=dropout) + self.mid.attn_1 = make_attn(block_in, attn_type=attn_type) + self.mid.block_2 = ResnetBlock(in_channels=block_in, + out_channels=block_in, + temb_channels=self.temb_ch, + dropout=dropout) + + # upsampling + self.up = nn.ModuleList() + for i_level in reversed(range(self.num_resolutions)): + block = nn.ModuleList() + attn = nn.ModuleList() + block_out = ch*ch_mult[i_level] + for i_block in range(self.num_res_blocks+1): + block.append(ResnetBlock(in_channels=block_in, + out_channels=block_out, + temb_channels=self.temb_ch, + dropout=dropout)) + block_in = block_out + if curr_res in attn_resolutions: + attn.append(make_attn(block_in, attn_type=attn_type)) + up = nn.Module() + up.block = block + up.attn = attn + if i_level != 0: + up.upsample = Upsample(block_in, resamp_with_conv) + curr_res = curr_res * 2 + self.up.insert(0, up) # prepend to get consistent order + + # end + self.norm_out = Normalize(block_in) + self.conv_out = torch.nn.Conv2d(block_in, + out_ch, + kernel_size=3, + stride=1, + padding=1) + + def forward(self, z): + #assert z.shape[1:] == self.z_shape[1:] + self.last_z_shape = z.shape + + # timestep embedding + temb = None + + # z to block_in + h = self.conv_in(z) + + # middle + h = self.mid.block_1(h, temb) + h = self.mid.attn_1(h) + h = self.mid.block_2(h, temb) + + # upsampling + for i_level in reversed(range(self.num_resolutions)): + for i_block in range(self.num_res_blocks+1): + h = self.up[i_level].block[i_block](h, temb) + if len(self.up[i_level].attn) > 0: + h = self.up[i_level].attn[i_block](h) + if i_level != 0: + h = self.up[i_level].upsample(h) + + # end + if self.give_pre_end: + return h + + h = self.norm_out(h) + h = nonlinearity(h) + h = self.conv_out(h) + if self.tanh_out: + h = torch.tanh(h) + return h + + +class DiagonalGaussianDistribution(object): + def __init__(self, parameters, deterministic=False): + self.parameters = parameters + self.mean, self.logvar = torch.chunk(parameters, 2, dim=1) + self.logvar = torch.clamp(self.logvar, -30.0, 20.0) + self.deterministic = deterministic + self.std = torch.exp(0.5 * self.logvar) + self.var = torch.exp(self.logvar) + if self.deterministic: + self.var = self.std = torch.zeros_like(self.mean).to(device=self.parameters.device) + + def sample(self): + x = self.mean + self.std * torch.randn(self.mean.shape).to(device=self.parameters.device) + return x + + def kl(self, other=None): + if self.deterministic: + return torch.Tensor([0.]) + else: + if other is None: + return 0.5 * torch.sum(torch.pow(self.mean, 2) + + self.var - 1.0 - self.logvar, + dim=[1, 2, 3]) + else: + return 0.5 * torch.sum( + torch.pow(self.mean - other.mean, 2) / other.var + + self.var / other.var - 1.0 - self.logvar + other.logvar, + dim=[1, 2, 3]) + + def nll(self, sample, dims=[1,2,3]): + if self.deterministic: + return torch.Tensor([0.]) + logtwopi = np.log(2.0 * np.pi) + return 0.5 * torch.sum( + logtwopi + self.logvar + torch.pow(sample - self.mean, 2) / self.var, + dim=dims) + + def mode(self): + return self.mean + + +class Upsample(nn.Module): + def __init__(self, in_channels, with_conv): + super().__init__() + self.with_conv = with_conv + if self.with_conv: + self.conv = torch.nn.Conv2d(in_channels, + in_channels, + kernel_size=3, + stride=1, + padding=1) + + def forward(self, x): + # BF16 fix + xh = x.to(torch.float32) + xh = torch.nn.functional.interpolate(xh, scale_factor=2.0, mode="nearest") + x = xh.to(x.dtype) + + if self.with_conv: + x = self.conv(x) + return x + + +class Downsample(nn.Module): + def __init__(self, in_channels, with_conv): + super().__init__() + self.with_conv = with_conv + if self.with_conv: + # no asymmetric padding in torch conv, must do it ourselves + self.conv = torch.nn.Conv2d(in_channels, + in_channels, + kernel_size=3, + stride=2, + padding=0) + + def forward(self, x): + if self.with_conv: + pad = (0,1,0,1) + x = torch.nn.functional.pad(x, pad, mode="constant", value=0) + x = self.conv(x) + else: + x = torch.nn.functional.avg_pool2d(x, kernel_size=2, stride=2) + return x + + +class ResnetBlock(nn.Module): + def __init__(self, *, in_channels, out_channels=None, conv_shortcut=False, + dropout, temb_channels=512): + super().__init__() + self.in_channels = in_channels + out_channels = in_channels if out_channels is None else out_channels + self.out_channels = out_channels + self.use_conv_shortcut = conv_shortcut + + self.norm1 = Normalize(in_channels) + self.conv1 = torch.nn.Conv2d(in_channels, + out_channels, + kernel_size=3, + stride=1, + padding=1) + if temb_channels > 0: + self.temb_proj = torch.nn.Linear(temb_channels, + out_channels) + self.norm2 = Normalize(out_channels) + self.dropout = torch.nn.Dropout(dropout) + self.conv2 = torch.nn.Conv2d(out_channels, + out_channels, + kernel_size=3, + stride=1, + padding=1) + if self.in_channels != self.out_channels: + if self.use_conv_shortcut: + self.conv_shortcut = torch.nn.Conv2d(in_channels, + out_channels, + kernel_size=3, + stride=1, + padding=1) + else: + self.nin_shortcut = torch.nn.Conv2d(in_channels, + out_channels, + kernel_size=1, + stride=1, + padding=0) + + def forward(self, x, temb): + h = x + h = self.norm1(h) + h = nonlinearity(h) + h = self.conv1(h) + + if temb is not None: + h = h + self.temb_proj(nonlinearity(temb))[:,:,None,None] + + h = self.norm2(h) + h = nonlinearity(h) + h = self.dropout(h) + h = self.conv2(h) + + if self.in_channels != self.out_channels: + if self.use_conv_shortcut: + x = self.conv_shortcut(x) + else: + x = self.nin_shortcut(x) + + return x+h + + +class AttnBlock(nn.Module): + def __init__(self, in_channels): + super().__init__() + self.in_channels = in_channels + + self.norm = Normalize(in_channels) + self.q = torch.nn.Conv2d(in_channels, + in_channels, + kernel_size=1, + stride=1, + padding=0) + self.k = torch.nn.Conv2d(in_channels, + in_channels, + kernel_size=1, + stride=1, + padding=0) + self.v = torch.nn.Conv2d(in_channels, + in_channels, + kernel_size=1, + stride=1, + padding=0) + self.proj_out = torch.nn.Conv2d(in_channels, + in_channels, + kernel_size=1, + stride=1, + padding=0) + + + def forward(self, x): + h_ = x + h_ = self.norm(h_) + q = self.q(h_) + k = self.k(h_) + v = self.v(h_) + + # compute attention + b,c,h,w = q.shape + q = q.reshape(b,c,h*w) + q = q.permute(0,2,1) # b,hw,c + k = k.reshape(b,c,h*w) # b,c,hw + w_ = torch.bmm(q,k) # b,hw,hw w[b,i,j]=sum_c q[b,i,c]k[b,c,j] + # w_ = w_ * (int(c)**(-0.5)) + w_ = w_ * (c**(-0.5)) + w_ = torch.nn.functional.softmax(w_, dim=2) + + # attend to values + v = v.reshape(b,c,h*w) + w_ = w_.permute(0,2,1) # b,hw,hw (first hw of k, second of q) + h_ = torch.bmm(v,w_) # b, c,hw (hw of q) h_[b,c,j] = sum_i v[b,c,i] w_[b,i,j] + h_ = h_.reshape(b,c,h,w) + + h_ = self.proj_out(h_) + + return x+h_ + +def make_attn(in_channels, attn_type="vanilla"): + assert attn_type in ["vanilla", "linear", "none"], f'attn_type {attn_type} unknown' + print(f"making attention of type '{attn_type}' with {in_channels} in_channels") + if attn_type == "vanilla": + return AttnBlock(in_channels) + elif attn_type == "none": + return nn.Identity(in_channels) + else: + return LinAttnBlock(in_channels) diff --git a/ComfyUI_ExtraModels/VAE/models/movq3.py b/ComfyUI_ExtraModels/VAE/models/movq3.py new file mode 100644 index 0000000000000000000000000000000000000000..685ea838546fb761a364af9a2895233457ec1f32 --- /dev/null +++ b/ComfyUI_ExtraModels/VAE/models/movq3.py @@ -0,0 +1,418 @@ +import math +import torch +import torch.nn as nn +import numpy as np +import torch.nn.functional as F + + +def nonlinearity(x): + return x*torch.sigmoid(x) + + +class SpatialNorm(nn.Module): + def __init__( + self, f_channels, zq_channels=None, norm_layer=nn.GroupNorm, freeze_norm_layer=False, add_conv=False, **norm_layer_params + ): + super().__init__() + self.norm_layer = norm_layer(num_channels=f_channels, **norm_layer_params) + if zq_channels is not None: + if freeze_norm_layer: + for p in self.norm_layer.parameters: + p.requires_grad = False + self.add_conv = add_conv + if self.add_conv: + self.conv = nn.Conv2d(zq_channels, zq_channels, kernel_size=3, stride=1, padding=1) + self.conv_y = nn.Conv2d(zq_channels, f_channels, kernel_size=1, stride=1, padding=0) + self.conv_b = nn.Conv2d(zq_channels, f_channels, kernel_size=1, stride=1, padding=0) + def forward(self, f, zq=None): + norm_f = self.norm_layer(f) + if zq is not None: + f_size = f.shape[-2:] + zq = torch.nn.functional.interpolate(zq.float(), size=f_size, mode="nearest").to(zq.dtype) + if self.add_conv: + zq = self.conv(zq) + norm_f = norm_f * self.conv_y(zq) + self.conv_b(zq) + return norm_f + + +def Normalize(in_channels, zq_ch=None, add_conv=None): + return SpatialNorm( + in_channels, zq_ch, norm_layer=nn.GroupNorm, + freeze_norm_layer=False, add_conv=add_conv, num_groups=32, eps=1e-6, affine=True + ) + + +class Upsample(nn.Module): + def __init__(self, in_channels, with_conv): + super().__init__() + self.with_conv = with_conv + if self.with_conv: + self.conv = torch.nn.Conv2d(in_channels, + in_channels, + kernel_size=3, + stride=1, + padding=1) + + def forward(self, x): + x = torch.nn.functional.interpolate(x.float(), scale_factor=2.0, mode="nearest").to(x.dtype) + if self.with_conv: + x = self.conv(x) + return x + + +class Downsample(nn.Module): + def __init__(self, in_channels, with_conv): + super().__init__() + self.with_conv = with_conv + if self.with_conv: + self.conv = torch.nn.Conv2d(in_channels, + in_channels, + kernel_size=3, + stride=2, + padding=0) + + def forward(self, x): + if self.with_conv: + pad = (0,1,0,1) + x = torch.nn.functional.pad(x, pad, mode="constant", value=0) + x = self.conv(x) + else: + x = torch.nn.functional.avg_pool2d(x, kernel_size=2, stride=2) + return x + + +class ResnetBlock(nn.Module): + def __init__(self, *, in_channels, out_channels=None, conv_shortcut=False, + dropout, temb_channels=512, zq_ch=None, add_conv=False): + super().__init__() + self.in_channels = in_channels + out_channels = in_channels if out_channels is None else out_channels + self.out_channels = out_channels + self.use_conv_shortcut = conv_shortcut + + self.norm1 = Normalize(in_channels, zq_ch, add_conv=add_conv) + self.conv1 = torch.nn.Conv2d(in_channels, + out_channels, + kernel_size=3, + stride=1, + padding=1) + if temb_channels > 0: + self.temb_proj = torch.nn.Linear(temb_channels, + out_channels) + self.norm2 = Normalize(out_channels, zq_ch, add_conv=add_conv) + self.dropout = torch.nn.Dropout(dropout) + self.conv2 = torch.nn.Conv2d(out_channels, + out_channels, + kernel_size=3, + stride=1, + padding=1) + if self.in_channels != self.out_channels: + if self.use_conv_shortcut: + self.conv_shortcut = torch.nn.Conv2d(in_channels, + out_channels, + kernel_size=3, + stride=1, + padding=1) + else: + self.nin_shortcut = torch.nn.Conv2d(in_channels, + out_channels, + kernel_size=1, + stride=1, + padding=0) + + def forward(self, x, temb, zq=None): + h = x + h = self.norm1(h, zq) + h = nonlinearity(h) + h = self.conv1(h) + + if temb is not None: + h = h + self.temb_proj(nonlinearity(temb))[:,:,None,None] + + h = self.norm2(h, zq) + h = nonlinearity(h) + h = self.dropout(h) + h = self.conv2(h) + + if self.in_channels != self.out_channels: + if self.use_conv_shortcut: + x = self.conv_shortcut(x) + else: + x = self.nin_shortcut(x) + + return x+h + + +class AttnBlock(nn.Module): + def __init__(self, in_channels, zq_ch=None, add_conv=False): + super().__init__() + self.in_channels = in_channels + + self.norm = Normalize(in_channels, zq_ch, add_conv=add_conv) + self.q = torch.nn.Conv2d(in_channels, + in_channels, + kernel_size=1, + stride=1, + padding=0) + self.k = torch.nn.Conv2d(in_channels, + in_channels, + kernel_size=1, + stride=1, + padding=0) + self.v = torch.nn.Conv2d(in_channels, + in_channels, + kernel_size=1, + stride=1, + padding=0) + self.proj_out = torch.nn.Conv2d(in_channels, + in_channels, + kernel_size=1, + stride=1, + padding=0) + + + def forward(self, x, zq=None): + h_ = x + h_ = self.norm(h_, zq) + q = self.q(h_) + k = self.k(h_) + v = self.v(h_) + + # compute attention + b,c,h,w = q.shape + q = q.reshape(b,c,h*w) + q = q.permute(0,2,1) # b,hw,c + k = k.reshape(b,c,h*w) # b,c,hw + w_ = torch.bmm(q,k) # b,hw,hw w[b,i,j]=sum_c q[b,i,c]k[b,c,j] + w_ = w_ * (int(c)**(-0.5)) + w_ = torch.nn.functional.softmax(w_, dim=2) + + # attend to values + v = v.reshape(b,c,h*w) + w_ = w_.permute(0,2,1) # b,hw,hw (first hw of k, second of q) + h_ = torch.bmm(v,w_) # b, c,hw (hw of q) h_[b,c,j] = sum_i v[b,c,i] w_[b,i,j] + h_ = h_.reshape(b,c,h,w) + + h_ = self.proj_out(h_) + + return x+h_ + + +class Encoder(nn.Module): + def __init__(self, *, ch, out_ch, ch_mult=(1,2,4,8), num_res_blocks, + attn_resolutions, dropout=0.0, resamp_with_conv=True, in_channels, + resolution, z_channels, double_z=True, **ignore_kwargs): + super().__init__() + self.ch = ch + self.temb_ch = 0 + self.num_resolutions = len(ch_mult) + self.num_res_blocks = num_res_blocks + self.resolution = resolution + self.in_channels = in_channels + + # downsampling + self.conv_in = torch.nn.Conv2d(in_channels, + self.ch, + kernel_size=3, + stride=1, + padding=1) + + curr_res = resolution + in_ch_mult = (1,)+tuple(ch_mult) + self.down = nn.ModuleList() + for i_level in range(self.num_resolutions): + block = nn.ModuleList() + attn = nn.ModuleList() + block_in = ch*in_ch_mult[i_level] + block_out = ch*ch_mult[i_level] + for i_block in range(self.num_res_blocks): + block.append(ResnetBlock(in_channels=block_in, + out_channels=block_out, + temb_channels=self.temb_ch, + dropout=dropout)) + block_in = block_out + if curr_res in attn_resolutions: + attn.append(AttnBlock(block_in)) + down = nn.Module() + down.block = block + down.attn = attn + if i_level != self.num_resolutions-1: + down.downsample = Downsample(block_in, resamp_with_conv) + curr_res = curr_res // 2 + self.down.append(down) + + # middle + self.mid = nn.Module() + self.mid.block_1 = ResnetBlock(in_channels=block_in, + out_channels=block_in, + temb_channels=self.temb_ch, + dropout=dropout) + self.mid.attn_1 = AttnBlock(block_in) + self.mid.block_2 = ResnetBlock(in_channels=block_in, + out_channels=block_in, + temb_channels=self.temb_ch, + dropout=dropout) + + # end + self.norm_out = Normalize(block_in) + self.conv_out = torch.nn.Conv2d(block_in, + 2*z_channels if double_z else z_channels, + kernel_size=3, + stride=1, + padding=1) + + + def forward(self, x): + temb = None + + # downsampling + hs = [self.conv_in(x)] + for i_level in range(self.num_resolutions): + for i_block in range(self.num_res_blocks): + h = self.down[i_level].block[i_block](hs[-1], temb) + if len(self.down[i_level].attn) > 0: + h = self.down[i_level].attn[i_block](h) + hs.append(h) + if i_level != self.num_resolutions-1: + hs.append(self.down[i_level].downsample(hs[-1])) + + # middle + h = hs[-1] + h = self.mid.block_1(h, temb) + h = self.mid.attn_1(h) + h = self.mid.block_2(h, temb) + + # end + h = self.norm_out(h) + h = nonlinearity(h) + h = self.conv_out(h) + return h + + +class Decoder(nn.Module): + def __init__(self, *, ch, out_ch, ch_mult=(1,2,4,8), num_res_blocks, + attn_resolutions, dropout=0.0, resamp_with_conv=True, in_channels, + resolution, z_channels, give_pre_end=False, zq_ch=None, add_conv=False, **ignorekwargs): + super().__init__() + self.ch = ch + self.temb_ch = 0 + self.num_resolutions = len(ch_mult) + self.num_res_blocks = num_res_blocks + self.resolution = resolution + self.in_channels = in_channels + self.give_pre_end = give_pre_end + + # compute in_ch_mult, block_in and curr_res at lowest res + in_ch_mult = (1,)+tuple(ch_mult) + block_in = ch*ch_mult[self.num_resolutions-1] + curr_res = resolution // 2**(self.num_resolutions-1) + self.z_shape = (1,z_channels,curr_res,curr_res) + + # z to block_in + self.conv_in = torch.nn.Conv2d(z_channels, + block_in, + kernel_size=3, + stride=1, + padding=1) + + # middle + self.mid = nn.Module() + self.mid.block_1 = ResnetBlock(in_channels=block_in, + out_channels=block_in, + temb_channels=self.temb_ch, + dropout=dropout, + zq_ch=zq_ch, + add_conv=add_conv) + self.mid.attn_1 = AttnBlock(block_in, zq_ch, add_conv=add_conv) + self.mid.block_2 = ResnetBlock(in_channels=block_in, + out_channels=block_in, + temb_channels=self.temb_ch, + dropout=dropout, + zq_ch=zq_ch, + add_conv=add_conv) + + # upsampling + self.up = nn.ModuleList() + for i_level in reversed(range(self.num_resolutions)): + block = nn.ModuleList() + attn = nn.ModuleList() + block_out = ch*ch_mult[i_level] + for i_block in range(self.num_res_blocks+1): + block.append(ResnetBlock(in_channels=block_in, + out_channels=block_out, + temb_channels=self.temb_ch, + dropout=dropout, + zq_ch=zq_ch, + add_conv=add_conv)) + block_in = block_out + if curr_res in attn_resolutions: + attn.append(AttnBlock(block_in, zq_ch, add_conv=add_conv)) + up = nn.Module() + up.block = block + up.attn = attn + if i_level != 0: + up.upsample = Upsample(block_in, resamp_with_conv) + curr_res = curr_res * 2 + self.up.insert(0, up) # prepend to get consistent order + + # end + self.norm_out = Normalize(block_in, zq_ch, add_conv=add_conv) + self.conv_out = torch.nn.Conv2d(block_in, + out_ch, + kernel_size=3, + stride=1, + padding=1) + + def forward(self, z, zq): + #assert z.shape[1:] == self.z_shape[1:] + self.last_z_shape = z.shape + + # timestep embedding + temb = None + + # z to block_in + h = self.conv_in(z) + + # middle + h = self.mid.block_1(h, temb, zq) + h = self.mid.attn_1(h, zq) + h = self.mid.block_2(h, temb, zq) + + # upsampling + for i_level in reversed(range(self.num_resolutions)): + for i_block in range(self.num_res_blocks+1): + h = self.up[i_level].block[i_block](h, temb, zq) + if len(self.up[i_level].attn) > 0: + h = self.up[i_level].attn[i_block](h, zq) + if i_level != 0: + h = self.up[i_level].upsample(h) + + # end + if self.give_pre_end: + return h + + h = self.norm_out(h, zq) + h = nonlinearity(h) + h = self.conv_out(h) + return h + +class MoVQ(nn.Module): + def __init__(self, generator_params): + super().__init__() + z_channels = generator_params["z_channels"] + self.encoder = Encoder(**generator_params) + self.quant_conv = torch.nn.Conv2d(z_channels, z_channels, 1) + self.post_quant_conv = torch.nn.Conv2d(z_channels, z_channels, 1) + self.decoder = Decoder(zq_ch=z_channels, **generator_params) + + @torch.no_grad() + def encode(self, x): + h = self.encoder(x) + h = self.quant_conv(h) + return h + + @torch.no_grad() + def decode(self, quant): + decoder_input = self.post_quant_conv(quant) + decoded = self.decoder(decoder_input, quant) + return decoded diff --git a/ComfyUI_ExtraModels/VAE/models/temporal_ae.py b/ComfyUI_ExtraModels/VAE/models/temporal_ae.py new file mode 100644 index 0000000000000000000000000000000000000000..5f52549c5bf05f926bb1bea74f8ea5e95d97fa72 --- /dev/null +++ b/ComfyUI_ExtraModels/VAE/models/temporal_ae.py @@ -0,0 +1,504 @@ +import math +import torch +import numpy as np +from torch import nn +from typing import Callable, Iterable, Union, Optional +from einops import rearrange, repeat + +from comfy import model_management +from .kl import ( + Encoder, Decoder, Upsample, Normalize, + AttnBlock, ResnetBlock, #MemoryEfficientAttnBlock, + DiagonalGaussianDistribution, nonlinearity, make_attn +) + +class AutoencoderKL(nn.Module): + def __init__(self, config): + super().__init__() + self.embed_dim = config["embed_dim"] + self.encoder = Encoder(**config) + self.decoder = VideoDecoder(**config) + assert config["double_z"] + # these aren't used here for some reason + # self.quant_conv = torch.nn.Conv2d(2*config["z_channels"], 2*self.embed_dim, 1) + # self.post_quant_conv = torch.nn.Conv2d(self.embed_dim, config["z_channels"], 1) + + def encode(self, x): + ## batched + # n_samples = x.shape[0] + # n_rounds = math.ceil(x.shape[0] / n_samples) + # all_out = [] + # for n in range(n_rounds): + # h = self.encoder( + # x[n * n_samples : (n + 1) * n_samples] + # ) + # moments = h # self.quant_conv(h) + # posterior = DiagonalGaussianDistribution(moments) + # all_out.append(posterior.sample()) + # z = torch.cat(all_out, dim=0) + # return z + + ## default + h = self.encoder(x) + moments = h # self.quant_conv(h) + posterior = DiagonalGaussianDistribution(moments) + return posterior.sample() + + + def decode(self, z): + ## batched - seems the same as default? + # n_samples = z.shape[0] + # n_rounds = math.ceil(z.shape[0] / n_samples) + # all_out = [] + # for n in range(n_rounds): + # dec = self.decoder( + # z[n * n_samples : (n + 1) * n_samples], + # timesteps=len(z[n * n_samples : (n + 1) * n_samples]), + # ) + # all_out.append(dec) + # out = torch.cat(all_out, dim=0) + + ## default + out = self.decoder( + z, timesteps=len(z) + ) + return out + + def forward(self, input, sample_posterior=True): + posterior = self.encode(input) + if sample_posterior: + z = posterior.sample() + else: + z = posterior.mode() + dec = self.decode(z) + return dec, posterior + +class VideoDecoder(nn.Module): + available_time_modes = ["all", "conv-only", "attn-only"] + def __init__( + self, *, ch, out_ch, ch_mult=(1,2,4,8), num_res_blocks, + attn_resolutions, dropout=0.0, resamp_with_conv=True, in_channels, + resolution, z_channels, give_pre_end=False, tanh_out=False, use_linear_attn=False, + attn_type="vanilla", + video_kernel_size: Union[int, list] = 3, alpha: float = 0.0, merge_strategy: str = "learned", time_mode: str = "conv-only", + **ignorekwargs + ): + super().__init__() + if use_linear_attn: attn_type = "linear" + self.ch = ch + self.temb_ch = 0 + self.num_resolutions = len(ch_mult) + self.num_res_blocks = num_res_blocks + self.resolution = resolution + self.in_channels = in_channels + self.give_pre_end = give_pre_end + self.tanh_out = tanh_out + + self.video_kernel_size = video_kernel_size + self.alpha = alpha + self.merge_strategy = merge_strategy + self.time_mode = time_mode + assert ( + self.time_mode in self.available_time_modes + ), f"time_mode parameter has to be in {self.available_time_modes}" + + # compute in_ch_mult, block_in and curr_res at lowest res + in_ch_mult = (1,)+tuple(ch_mult) + block_in = ch*ch_mult[self.num_resolutions-1] + curr_res = resolution // 2**(self.num_resolutions-1) + self.z_shape = (1,z_channels,curr_res,curr_res) + print("Working with z of shape {} = {} dimensions.".format( + self.z_shape, np.prod(self.z_shape))) + + # z to block_in + self.conv_in = torch.nn.Conv2d( + z_channels, + block_in, + kernel_size=3, + stride=1, + padding=1 + ) + + # middle + self.mid = nn.Module() + self.mid.block_1 = VideoResBlock( + in_channels=block_in, + out_channels=block_in, + temb_channels=self.temb_ch, + dropout=dropout, + video_kernel_size=self.video_kernel_size, + alpha=self.alpha, + merge_strategy=self.merge_strategy, + ) + self.mid.attn_1 = make_attn( + block_in, + attn_type=attn_type, + ) + self.mid.block_2 = VideoResBlock( + in_channels=block_in, + out_channels=block_in, + temb_channels=self.temb_ch, + dropout=dropout, + video_kernel_size=self.video_kernel_size, + alpha=self.alpha, + merge_strategy=self.merge_strategy, + ) + + # upsampling + self.up = nn.ModuleList() + for i_level in reversed(range(self.num_resolutions)): + block = nn.ModuleList() + attn = nn.ModuleList() + block_out = ch*ch_mult[i_level] + for i_block in range(self.num_res_blocks+1): + block.append(VideoResBlock( + in_channels=block_in, + out_channels=block_out, + temb_channels=self.temb_ch, + dropout=dropout, + video_kernel_size=self.video_kernel_size, + alpha=self.alpha, + merge_strategy=self.merge_strategy, + )) + block_in = block_out + if curr_res in attn_resolutions: + attn.append(make_attn( + block_in, + attn_type=attn_type, + )) + up = nn.Module() + up.block = block + up.attn = attn + if i_level != 0: + up.upsample = Upsample(block_in, resamp_with_conv) + curr_res = curr_res * 2 + self.up.insert(0, up) # prepend to get consistent order + + # end + self.norm_out = Normalize(block_in) + self.conv_out = AE3DConv( + in_channels = block_in, + out_channels = out_ch, + video_kernel_size=self.video_kernel_size, + kernel_size=3, + stride=1, + padding=1, + ) + + def get_last_layer(self, skip_time_mix=False, **kwargs): + if self.time_mode == "attn-only": + raise NotImplementedError("TODO") + else: + return ( + self.conv_out.time_mix_conv.weight + if not skip_time_mix + else self.conv_out.weight + ) + + def forward(self, z, **kwargs): + #assert z.shape[1:] == self.z_shape[1:] + self.last_z_shape = z.shape + + # timestep embedding + temb = None + + # z to block_in + h = self.conv_in(z) + + # middle + h = self.mid.block_1(h, temb, **kwargs) + h = self.mid.attn_1(h) + h = self.mid.block_2(h, temb, **kwargs) + + # upsampling + for i_level in reversed(range(self.num_resolutions)): + for i_block in range(self.num_res_blocks+1): + h = self.up[i_level].block[i_block](h, temb, **kwargs) + if len(self.up[i_level].attn) > 0: + h = self.up[i_level].attn[i_block](h) + if i_level != 0: + h = self.up[i_level].upsample(h) + + # end + if self.give_pre_end: + return h + + h = self.norm_out(h) + h = nonlinearity(h) + h = self.conv_out(h, **kwargs) + if self.tanh_out: + h = torch.tanh(h) + return h + + +class ResBlock(nn.Module): + """ + A residual block that can optionally change the number of channels. + :param channels: the number of input channels. + :param emb_channels: the number of timestep embedding channels. + :param dropout: the rate of dropout. + :param out_channels: if specified, the number of out channels. + :param use_conv: if True and out_channels is specified, use a spatial + convolution instead of a smaller 1x1 convolution to change the + channels in the skip connection. + :param dims: determines if the signal is 1D, 2D, or 3D. + :param use_checkpoint: if True, use gradient checkpointing on this module. + :param up: if True, use this block for upsampling. + :param down: if True, use this block for downsampling. + """ + + def __init__( + self, + channels: int, + emb_channels: int, + dropout: float, + out_channels: Optional[int] = None, + use_conv: bool = False, + use_scale_shift_norm: bool = False, + dims: int = 2, + use_checkpoint: bool = False, + up: bool = False, + down: bool = False, + kernel_size: int = 3, + exchange_temb_dims: bool = False, + skip_t_emb: bool = False, + ): + super().__init__() + self.channels = channels + self.emb_channels = emb_channels + self.dropout = dropout + self.out_channels = out_channels or channels + self.use_conv = use_conv + self.use_checkpoint = use_checkpoint + self.use_scale_shift_norm = use_scale_shift_norm + self.exchange_temb_dims = exchange_temb_dims + + if isinstance(kernel_size, Iterable): + padding = [k // 2 for k in kernel_size] + else: + padding = kernel_size // 2 + + self.in_layers = nn.Sequential( + normalization(channels), + nn.SiLU(), + conv_nd(dims, channels, self.out_channels, kernel_size, padding=padding), + ) + + self.updown = up or down + + if up: + self.h_upd = Upsample(channels, False, dims) + self.x_upd = Upsample(channels, False, dims) + elif down: + self.h_upd = Downsample(channels, False, dims) + self.x_upd = Downsample(channels, False, dims) + else: + self.h_upd = self.x_upd = nn.Identity() + + self.skip_t_emb = skip_t_emb + self.emb_out_channels = ( + 2 * self.out_channels if use_scale_shift_norm else self.out_channels + ) + if self.skip_t_emb: + print(f"Skipping timestep embedding in {self.__class__.__name__}") + assert not self.use_scale_shift_norm + self.emb_layers = None + self.exchange_temb_dims = False + else: + self.emb_layers = nn.Sequential( + nn.SiLU(), + linear( + emb_channels, + self.emb_out_channels, + ), + ) + + self.out_layers = nn.Sequential( + normalization(self.out_channels), + nn.SiLU(), + nn.Dropout(p=dropout), + zero_module( + conv_nd( + dims, + self.out_channels, + self.out_channels, + kernel_size, + padding=padding, + ) + ), + ) + + if self.out_channels == channels: + self.skip_connection = nn.Identity() + elif use_conv: + self.skip_connection = conv_nd( + dims, channels, self.out_channels, kernel_size, padding=padding + ) + else: + self.skip_connection = conv_nd(dims, channels, self.out_channels, 1) + + def forward(self, x: torch.Tensor, emb: torch.Tensor) -> torch.Tensor: + """ + Apply the block to a Tensor, conditioned on a timestep embedding. + :param x: an [N x C x ...] Tensor of features. + :param emb: an [N x emb_channels] Tensor of timestep embeddings. + :return: an [N x C x ...] Tensor of outputs. + """ + if self.use_checkpoint: + return checkpoint(self._forward, x, emb) + else: + return self._forward(x, emb) + + def _forward(self, x: torch.Tensor, emb: torch.Tensor) -> torch.Tensor: + if self.updown: + in_rest, in_conv = self.in_layers[:-1], self.in_layers[-1] + h = in_rest(x) + h = self.h_upd(h) + x = self.x_upd(x) + h = in_conv(h) + else: + h = self.in_layers(x) + + if self.skip_t_emb: + emb_out = torch.zeros_like(h) + else: + emb_out = self.emb_layers(emb).type(h.dtype) + while len(emb_out.shape) < len(h.shape): + emb_out = emb_out[..., None] + if self.use_scale_shift_norm: + out_norm, out_rest = self.out_layers[0], self.out_layers[1:] + scale, shift = torch.chunk(emb_out, 2, dim=1) + h = out_norm(h) * (1 + scale) + shift + h = out_rest(h) + else: + if self.exchange_temb_dims: + emb_out = rearrange(emb_out, "b t c ... -> b c t ...") + h = h + emb_out + h = self.out_layers(h) + return self.skip_connection(x) + h + +class VideoResBlock(ResnetBlock): + def __init__( + self, + out_channels, + *args, + dropout=0.0, + video_kernel_size=3, + alpha=0.0, + merge_strategy="learned", + **kwargs, + ): + super().__init__(out_channels=out_channels, dropout=dropout, *args, **kwargs) + if video_kernel_size is None: + video_kernel_size = [3, 1, 1] + self.time_stack = ResBlock( + channels=out_channels, + emb_channels=0, + dropout=dropout, + dims=3, + use_scale_shift_norm=False, + use_conv=False, + up=False, + down=False, + kernel_size=video_kernel_size, + use_checkpoint=False, + skip_t_emb=True, + ) + + self.merge_strategy = merge_strategy + if self.merge_strategy == "fixed": + self.register_buffer("mix_factor", torch.Tensor([alpha])) + elif self.merge_strategy == "learned": + self.register_parameter( + "mix_factor", torch.nn.Parameter(torch.Tensor([alpha])) + ) + else: + raise ValueError(f"unknown merge strategy {self.merge_strategy}") + + def get_alpha(self, bs): + if self.merge_strategy == "fixed": + return self.mix_factor + elif self.merge_strategy == "learned": + return torch.sigmoid(self.mix_factor) + else: + raise NotImplementedError() + + def forward(self, x, temb, skip_video=False, timesteps=None): + if timesteps is None: + timesteps = self.timesteps + + b, c, h, w = x.shape + + x = super().forward(x, temb) + + if not skip_video: + x_mix = rearrange(x, "(b t) c h w -> b c t h w", t=timesteps) + + x = rearrange(x, "(b t) c h w -> b c t h w", t=timesteps) + + x = self.time_stack(x, temb) + + alpha = self.get_alpha(bs=b // timesteps) + x = alpha * x + (1.0 - alpha) * x_mix + + x = rearrange(x, "b c t h w -> (b t) c h w") + return x + +class AE3DConv(torch.nn.Conv2d): + def __init__(self, in_channels, out_channels, video_kernel_size=3, *args, **kwargs): + super().__init__(in_channels, out_channels, *args, **kwargs) + if isinstance(video_kernel_size, Iterable): + padding = [int(k // 2) for k in video_kernel_size] + else: + padding = int(video_kernel_size // 2) + + self.time_mix_conv = torch.nn.Conv3d( + in_channels=out_channels, + out_channels=out_channels, + kernel_size=video_kernel_size, + padding=padding, + ) + + def forward(self, input, timesteps, skip_video=False): + x = super().forward(input) + if skip_video: + return x + x = rearrange(x, "(b t) c h w -> b c t h w", t=timesteps) + x = self.time_mix_conv(x) + return rearrange(x, "b c t h w -> (b t) c h w") + +def normalization(channels): + """ + Make a standard normalization layer. + :param channels: number of input channels. + :return: an nn.Module for normalization. + """ + return GroupNorm32(32, channels) + +class SiLU(nn.Module): + def forward(self, x): + return x * torch.sigmoid(x) + +class GroupNorm32(nn.GroupNorm): + def forward(self, x): + return super().forward(x.float()).type(x.dtype) + +def conv_nd(dims, *args, **kwargs): + """ + Create a 1D, 2D, or 3D convolution module. + """ + if dims == 1: + return nn.Conv1d(*args, **kwargs) + elif dims == 2: + return nn.Conv2d(*args, **kwargs) + elif dims == 3: + return nn.Conv3d(*args, **kwargs) + raise ValueError(f"unsupported dimensions: {dims}") + +def zero_module(module): + """ + Zero out the parameters of a module and return it. + """ + for p in module.parameters(): + p.detach().zero_() + return module diff --git a/ComfyUI_ExtraModels/VAE/models/vq.py b/ComfyUI_ExtraModels/VAE/models/vq.py new file mode 100644 index 0000000000000000000000000000000000000000..402de21571e91e52511c82edcd2d0aff745e248b --- /dev/null +++ b/ComfyUI_ExtraModels/VAE/models/vq.py @@ -0,0 +1,164 @@ +import torch +import numpy as np +from torch import nn +from einops import rearrange + +from .kl import Encoder, Decoder + +class VQModel(nn.Module): + def __init__(self, + config, + remap=None, + sane_index_shape=False, # tell vector quantizer to return indices as bhw + ): + super().__init__() + self.embed_dim = config["embed_dim"] + self.n_embed = config["n_embed"] + self.encoder = Encoder(**config) + self.decoder = Decoder(**config) + self.quantize = VectorQuantizer(self.n_embed, self.embed_dim, beta=0.25, + remap=remap, + sane_index_shape=sane_index_shape) + self.quant_conv = torch.nn.Conv2d(config["z_channels"], self.embed_dim, 1) + self.post_quant_conv = torch.nn.Conv2d(self.embed_dim, config["z_channels"], 1) + + def encode(self, x): + h = self.encoder(x) + h = self.quant_conv(h) + return h + + def decode(self, h, force_not_quantize=False): + # also go through quantization layer + if not force_not_quantize: + quant, emb_loss, info = self.quantize(h) + else: + quant = h + quant = self.post_quant_conv(quant) + dec = self.decoder(quant) + return dec + + def forward(self, input, return_pred_indices=False): + quant, diff, (_,_,ind) = self.encode(input) + dec = self.decode(quant) + if return_pred_indices: + return dec, diff, ind + return dec, diff + + +class VectorQuantizer(nn.Module): + """ + Improved version over VectorQuantizer, can be used as a drop-in replacement. Mostly + avoids costly matrix multiplications and allows for post-hoc remapping of indices. + """ + # NOTE: due to a bug the beta term was applied to the wrong term. for + # backwards compatibility we use the buggy version by default, but you can + # specify legacy=False to fix it. + def __init__(self, n_e, e_dim, beta, remap=None, unknown_index="random", + sane_index_shape=False, legacy=True): + super().__init__() + self.n_e = n_e + self.e_dim = e_dim + self.beta = beta + self.legacy = legacy + + self.embedding = nn.Embedding(self.n_e, self.e_dim) + self.embedding.weight.data.uniform_(-1.0 / self.n_e, 1.0 / self.n_e) + + self.remap = remap + if self.remap is not None: + self.register_buffer("used", torch.tensor(np.load(self.remap))) + self.re_embed = self.used.shape[0] + self.unknown_index = unknown_index # "random" or "extra" or integer + if self.unknown_index == "extra": + self.unknown_index = self.re_embed + self.re_embed = self.re_embed+1 + print(f"Remapping {self.n_e} indices to {self.re_embed} indices. " + f"Using {self.unknown_index} for unknown indices.") + else: + self.re_embed = n_e + + self.sane_index_shape = sane_index_shape + + def remap_to_used(self, inds): + ishape = inds.shape + assert len(ishape)>1 + inds = inds.reshape(ishape[0],-1) + used = self.used.to(inds) + match = (inds[:,:,None]==used[None,None,...]).long() + new = match.argmax(-1) + unknown = match.sum(2)<1 + if self.unknown_index == "random": + new[unknown]=torch.randint(0,self.re_embed,size=new[unknown].shape).to(device=new.device) + else: + new[unknown] = self.unknown_index + return new.reshape(ishape) + + def unmap_to_all(self, inds): + ishape = inds.shape + assert len(ishape)>1 + inds = inds.reshape(ishape[0],-1) + used = self.used.to(inds) + if self.re_embed > self.used.shape[0]: # extra token + inds[inds>=self.used.shape[0]] = 0 # simply set to zero + back=torch.gather(used[None,:][inds.shape[0]*[0],:], 1, inds) + return back.reshape(ishape) + + def forward(self, z, temp=None, rescale_logits=False, return_logits=False): + assert temp is None or temp==1.0, "Only for interface compatible with Gumbel" + assert rescale_logits==False, "Only for interface compatible with Gumbel" + assert return_logits==False, "Only for interface compatible with Gumbel" + # reshape z -> (batch, height, width, channel) and flatten + z = rearrange(z, 'b c h w -> b h w c').contiguous() + z_flattened = z.view(-1, self.e_dim) + # distances from z to embeddings e_j (z - e)^2 = z^2 + e^2 - 2 e * z + + d = torch.sum(z_flattened ** 2, dim=1, keepdim=True) + \ + torch.sum(self.embedding.weight**2, dim=1) - 2 * \ + torch.einsum('bd,dn->bn', z_flattened, rearrange(self.embedding.weight, 'n d -> d n')) + + min_encoding_indices = torch.argmin(d, dim=1) + z_q = self.embedding(min_encoding_indices).view(z.shape) + perplexity = None + min_encodings = None + + # compute loss for embedding + if not self.legacy: + loss = self.beta * torch.mean((z_q.detach()-z)**2) + \ + torch.mean((z_q - z.detach()) ** 2) + else: + loss = torch.mean((z_q.detach()-z)**2) + self.beta * \ + torch.mean((z_q - z.detach()) ** 2) + + # preserve gradients + z_q = z + (z_q - z).detach() + + # reshape back to match original input shape + z_q = rearrange(z_q, 'b h w c -> b c h w').contiguous() + + if self.remap is not None: + min_encoding_indices = min_encoding_indices.reshape(z.shape[0],-1) # add batch axis + min_encoding_indices = self.remap_to_used(min_encoding_indices) + min_encoding_indices = min_encoding_indices.reshape(-1,1) # flatten + + if self.sane_index_shape: + min_encoding_indices = min_encoding_indices.reshape( + z_q.shape[0], z_q.shape[2], z_q.shape[3]) + + return z_q, loss, (perplexity, min_encodings, min_encoding_indices) + + def get_codebook_entry(self, indices, shape): + # shape specifying (batch, height, width, channel) + if self.remap is not None: + indices = indices.reshape(shape[0],-1) # add batch axis + indices = self.unmap_to_all(indices) + indices = indices.reshape(-1) # flatten again + + # get quantized latent vectors + z_q = self.embedding(indices) + + if shape is not None: + z_q = z_q.view(shape) + # reshape back to match original input shape + z_q = z_q.permute(0, 3, 1, 2).contiguous() + + return z_q diff --git a/ComfyUI_ExtraModels/VAE/nodes.py b/ComfyUI_ExtraModels/VAE/nodes.py new file mode 100644 index 0000000000000000000000000000000000000000..5c114ea2b0871aacd8547c658f582d7610c92491 --- /dev/null +++ b/ComfyUI_ExtraModels/VAE/nodes.py @@ -0,0 +1,38 @@ +import folder_paths + +from .conf import vae_conf +from .loader import EXVAE + +from ..utils.dtype import string_to_dtype + +dtypes = [ + "auto", + "FP32", + "FP16", + "BF16" +] + +class ExtraVAELoader: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "vae_name": (folder_paths.get_filename_list("vae"),), + "vae_type": (list(vae_conf.keys()), {"default":"kl-f8"}), + "dtype" : (dtypes,), + } + } + RETURN_TYPES = ("VAE",) + FUNCTION = "load_vae" + CATEGORY = "ExtraModels" + TITLE = "ExtraVAELoader" + + def load_vae(self, vae_name, vae_type, dtype): + model_path = folder_paths.get_full_path("vae", vae_name) + model_conf = vae_conf[vae_type] + vae = EXVAE(model_path, model_conf, string_to_dtype(dtype, "vae")) + return (vae,) + +NODE_CLASS_MAPPINGS = { + "ExtraVAELoader" : ExtraVAELoader, +} diff --git a/ComfyUI_ExtraModels/__init__.py b/ComfyUI_ExtraModels/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..b348dc6f0ef4fe96a1e7409f138fc7abea998cec --- /dev/null +++ b/ComfyUI_ExtraModels/__init__.py @@ -0,0 +1,38 @@ +# only import if running as a custom node +try: + import comfy.utils +except ImportError: + pass +else: + NODE_CLASS_MAPPINGS = {} + + # Deci Diffusion + # from .DeciDiffusion.nodes import NODE_CLASS_MAPPINGS as DeciDiffusion_Nodes + # NODE_CLASS_MAPPINGS.update(DeciDiffusion_Nodes) + + # DiT + from .DiT.nodes import NODE_CLASS_MAPPINGS as DiT_Nodes + NODE_CLASS_MAPPINGS.update(DiT_Nodes) + + # PixArt + from .PixArt.nodes import NODE_CLASS_MAPPINGS as PixArt_Nodes + NODE_CLASS_MAPPINGS.update(PixArt_Nodes) + + # T5 + from .T5.nodes import NODE_CLASS_MAPPINGS as T5_Nodes + NODE_CLASS_MAPPINGS.update(T5_Nodes) + + # HYDiT + from .HunYuanDiT.nodes import NODE_CLASS_MAPPINGS as HunYuanDiT_Nodes + NODE_CLASS_MAPPINGS.update(HunYuanDiT_Nodes) + + # VAE + from .VAE.nodes import NODE_CLASS_MAPPINGS as VAE_Nodes + NODE_CLASS_MAPPINGS.update(VAE_Nodes) + + # MiaoBi + from .MiaoBi.nodes import NODE_CLASS_MAPPINGS as MiaoBi_Nodes + NODE_CLASS_MAPPINGS.update(MiaoBi_Nodes) + + NODE_DISPLAY_NAME_MAPPINGS = {k:v.TITLE for k,v in NODE_CLASS_MAPPINGS.items()} + __all__ = ['NODE_CLASS_MAPPINGS', 'NODE_DISPLAY_NAME_MAPPINGS'] diff --git a/ComfyUI_ExtraModels/requirements.txt b/ComfyUI_ExtraModels/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..71a73a67b52c8d561c70160853091ca32c6b0538 --- /dev/null +++ b/ComfyUI_ExtraModels/requirements.txt @@ -0,0 +1,6 @@ +timm>=0.6.13 +sentencepiece>=0.1.97 +transformers>=4.34.1 +accelerate>=0.23.0 +einops>=0.6.0 +protobuf>=3.20.3 diff --git a/ComfyUI_ExtraModels/utils/IPEX/attention.py b/ComfyUI_ExtraModels/utils/IPEX/attention.py new file mode 100644 index 0000000000000000000000000000000000000000..92da8d7e0d3922bbcac4059f97530740925c89dc --- /dev/null +++ b/ComfyUI_ExtraModels/utils/IPEX/attention.py @@ -0,0 +1,180 @@ +# Code lifted from https://github.com/Disty0/ipex_to_cuda/blob/main/attention.py +# Thanks to Disty0! + +import os +import torch +import intel_extension_for_pytorch as ipex # pylint: disable=import-error, unused-import +from functools import cache + +# pylint: disable=protected-access, missing-function-docstring, line-too-long + +# ARC GPUs can't allocate more than 4GB to a single block so we slice the attetion layers + +sdpa_slice_trigger_rate = float(os.environ.get('IPEX_SDPA_SLICE_TRIGGER_RATE', 6)) +attention_slice_rate = float(os.environ.get('IPEX_ATTENTION_SLICE_RATE', 4)) + +# Find something divisible with the input_tokens +@cache +def find_slice_size(slice_size, slice_block_size): + while (slice_size * slice_block_size) > attention_slice_rate: + slice_size = slice_size // 2 + if slice_size <= 1: + slice_size = 1 + break + return slice_size + +# Find slice sizes for SDPA +@cache +def find_sdpa_slice_sizes(query_shape, query_element_size): + if len(query_shape) == 3: + batch_size_attention, query_tokens, shape_three = query_shape + shape_four = 1 + else: + batch_size_attention, query_tokens, shape_three, shape_four = query_shape + + slice_block_size = query_tokens * shape_three * shape_four / 1024 / 1024 * query_element_size + block_size = batch_size_attention * slice_block_size + + split_slice_size = batch_size_attention + split_2_slice_size = query_tokens + split_3_slice_size = shape_three + + do_split = False + do_split_2 = False + do_split_3 = False + + if block_size > sdpa_slice_trigger_rate: + do_split = True + split_slice_size = find_slice_size(split_slice_size, slice_block_size) + if split_slice_size * slice_block_size > attention_slice_rate: + slice_2_block_size = split_slice_size * shape_three * shape_four / 1024 / 1024 * query_element_size + do_split_2 = True + split_2_slice_size = find_slice_size(split_2_slice_size, slice_2_block_size) + if split_2_slice_size * slice_2_block_size > attention_slice_rate: + slice_3_block_size = split_slice_size * split_2_slice_size * shape_four / 1024 / 1024 * query_element_size + do_split_3 = True + split_3_slice_size = find_slice_size(split_3_slice_size, slice_3_block_size) + + return do_split, do_split_2, do_split_3, split_slice_size, split_2_slice_size, split_3_slice_size + +# Find slice sizes for BMM +@cache +def find_bmm_slice_sizes(input_shape, input_element_size, mat2_shape): + batch_size_attention, input_tokens, mat2_atten_shape = input_shape[0], input_shape[1], mat2_shape[2] + slice_block_size = input_tokens * mat2_atten_shape / 1024 / 1024 * input_element_size + block_size = batch_size_attention * slice_block_size + + split_slice_size = batch_size_attention + split_2_slice_size = input_tokens + split_3_slice_size = mat2_atten_shape + + do_split = False + do_split_2 = False + do_split_3 = False + + if block_size > attention_slice_rate: + do_split = True + split_slice_size = find_slice_size(split_slice_size, slice_block_size) + if split_slice_size * slice_block_size > attention_slice_rate: + slice_2_block_size = split_slice_size * mat2_atten_shape / 1024 / 1024 * input_element_size + do_split_2 = True + split_2_slice_size = find_slice_size(split_2_slice_size, slice_2_block_size) + if split_2_slice_size * slice_2_block_size > attention_slice_rate: + slice_3_block_size = split_slice_size * split_2_slice_size / 1024 / 1024 * input_element_size + do_split_3 = True + split_3_slice_size = find_slice_size(split_3_slice_size, slice_3_block_size) + + return do_split, do_split_2, do_split_3, split_slice_size, split_2_slice_size, split_3_slice_size + + +original_torch_bmm = torch.bmm +def torch_bmm_32_bit(input, mat2, *, out=None): + if input.device.type != "xpu": + return original_torch_bmm(input, mat2, out=out) + do_split, do_split_2, do_split_3, split_slice_size, split_2_slice_size, split_3_slice_size = find_bmm_slice_sizes(input.shape, input.element_size(), mat2.shape) + + # Slice BMM + if do_split: + batch_size_attention, input_tokens, mat2_atten_shape = input.shape[0], input.shape[1], mat2.shape[2] + hidden_states = torch.zeros(input.shape[0], input.shape[1], mat2.shape[2], device=input.device, dtype=input.dtype) + for i in range(batch_size_attention // split_slice_size): + start_idx = i * split_slice_size + end_idx = (i + 1) * split_slice_size + if do_split_2: + for i2 in range(input_tokens // split_2_slice_size): # pylint: disable=invalid-name + start_idx_2 = i2 * split_2_slice_size + end_idx_2 = (i2 + 1) * split_2_slice_size + if do_split_3: + for i3 in range(mat2_atten_shape // split_3_slice_size): # pylint: disable=invalid-name + start_idx_3 = i3 * split_3_slice_size + end_idx_3 = (i3 + 1) * split_3_slice_size + hidden_states[start_idx:end_idx, start_idx_2:end_idx_2, start_idx_3:end_idx_3] = original_torch_bmm( + input[start_idx:end_idx, start_idx_2:end_idx_2, start_idx_3:end_idx_3], + mat2[start_idx:end_idx, start_idx_2:end_idx_2, start_idx_3:end_idx_3], + out=out + ) + else: + hidden_states[start_idx:end_idx, start_idx_2:end_idx_2] = original_torch_bmm( + input[start_idx:end_idx, start_idx_2:end_idx_2], + mat2[start_idx:end_idx, start_idx_2:end_idx_2], + out=out + ) + else: + hidden_states[start_idx:end_idx] = original_torch_bmm( + input[start_idx:end_idx], + mat2[start_idx:end_idx], + out=out + ) + torch.xpu.synchronize(input.device) + else: + return original_torch_bmm(input, mat2, out=out) + return hidden_states + +original_scaled_dot_product_attention = torch.nn.functional.scaled_dot_product_attention +def scaled_dot_product_attention_32_bit(query, key, value, attn_mask=None, dropout_p=0.0, is_causal=False, **kwargs): + if query.device.type != "xpu": + return original_scaled_dot_product_attention(query, key, value, attn_mask=attn_mask, dropout_p=dropout_p, is_causal=is_causal, **kwargs) + do_split, do_split_2, do_split_3, split_slice_size, split_2_slice_size, split_3_slice_size = find_sdpa_slice_sizes(query.shape, query.element_size()) + + # Slice SDPA + if do_split: + batch_size_attention, query_tokens, shape_three = query.shape[0], query.shape[1], query.shape[2] + hidden_states = torch.zeros(query.shape, device=query.device, dtype=query.dtype) + for i in range(batch_size_attention // split_slice_size): + start_idx = i * split_slice_size + end_idx = (i + 1) * split_slice_size + if do_split_2: + for i2 in range(query_tokens // split_2_slice_size): # pylint: disable=invalid-name + start_idx_2 = i2 * split_2_slice_size + end_idx_2 = (i2 + 1) * split_2_slice_size + if do_split_3: + for i3 in range(shape_three // split_3_slice_size): # pylint: disable=invalid-name + start_idx_3 = i3 * split_3_slice_size + end_idx_3 = (i3 + 1) * split_3_slice_size + hidden_states[start_idx:end_idx, start_idx_2:end_idx_2, start_idx_3:end_idx_3] = original_scaled_dot_product_attention( + query[start_idx:end_idx, start_idx_2:end_idx_2, start_idx_3:end_idx_3], + key[start_idx:end_idx, start_idx_2:end_idx_2, start_idx_3:end_idx_3], + value[start_idx:end_idx, start_idx_2:end_idx_2, start_idx_3:end_idx_3], + attn_mask=attn_mask[start_idx:end_idx, start_idx_2:end_idx_2, start_idx_3:end_idx_3] if attn_mask is not None else attn_mask, + dropout_p=dropout_p, is_causal=is_causal, **kwargs + ) + else: + hidden_states[start_idx:end_idx, start_idx_2:end_idx_2] = original_scaled_dot_product_attention( + query[start_idx:end_idx, start_idx_2:end_idx_2], + key[start_idx:end_idx, start_idx_2:end_idx_2], + value[start_idx:end_idx, start_idx_2:end_idx_2], + attn_mask=attn_mask[start_idx:end_idx, start_idx_2:end_idx_2] if attn_mask is not None else attn_mask, + dropout_p=dropout_p, is_causal=is_causal, **kwargs + ) + else: + hidden_states[start_idx:end_idx] = original_scaled_dot_product_attention( + query[start_idx:end_idx], + key[start_idx:end_idx], + value[start_idx:end_idx], + attn_mask=attn_mask[start_idx:end_idx] if attn_mask is not None else attn_mask, + dropout_p=dropout_p, is_causal=is_causal, **kwargs + ) + torch.xpu.synchronize(query.device) + else: + return original_scaled_dot_product_attention(query, key, value, attn_mask=attn_mask, dropout_p=dropout_p, is_causal=is_causal, **kwargs) + return hidden_states \ No newline at end of file diff --git a/ComfyUI_ExtraModels/utils/dtype.py b/ComfyUI_ExtraModels/utils/dtype.py new file mode 100644 index 0000000000000000000000000000000000000000..a1ffeb9d9d0e35777b9da38005e78b0fdd1ae17a --- /dev/null +++ b/ComfyUI_ExtraModels/utils/dtype.py @@ -0,0 +1,38 @@ +import torch +from comfy import model_management + +def string_to_dtype(s="none", mode=None): + s = s.lower().strip() + if s in ["default", "as-is"]: + return None + elif s in ["auto", "auto (comfy)"]: + if mode == "vae": + return model_management.vae_device() + elif mode == "text_encoder": + return model_management.text_encoder_dtype() + elif mode == "unet": + return model_management.unet_dtype() + else: + raise NotImplementedError(f"Unknown dtype mode '{mode}'") + elif s in ["none", "auto (hf)", "auto (hf/bnb)"]: + return None + elif s in ["fp32", "float32", "float"]: + return torch.float32 + elif s in ["bf16", "bfloat16"]: + return torch.bfloat16 + elif s in ["fp16", "float16", "half"]: + return torch.float16 + elif "fp8" in s or "float8" in s: + if "e5m2" in s: + return torch.float8_e5m2 + elif "e4m3" in s: + return torch.float8_e4m3fn + else: + raise NotImplementedError(f"Unknown 8bit dtype '{s}'") + elif "bnb" in s: + assert s in ["bnb8bit", "bnb4bit"], f"Unknown bnb mode '{s}'" + return s + elif s is None: + return None + else: + raise NotImplementedError(f"Unknown dtype '{s}'") diff --git a/ComfyUI_IPAdapter_plus/CrossAttentionPatch.py b/ComfyUI_IPAdapter_plus/CrossAttentionPatch.py new file mode 100644 index 0000000000000000000000000000000000000000..0f5c8e94aabe32b1fdca72458062c79664e19002 --- /dev/null +++ b/ComfyUI_IPAdapter_plus/CrossAttentionPatch.py @@ -0,0 +1,209 @@ +import torch +import math +import torch.nn.functional as F +from comfy.ldm.modules.attention import optimized_attention +from .utils import tensor_to_size + +class Attn2Replace: + def __init__(self, callback=None, **kwargs): + self.callback = [callback] + self.kwargs = [kwargs] + + def add(self, callback, **kwargs): + self.callback.append(callback) + self.kwargs.append(kwargs) + + for key, value in kwargs.items(): + setattr(self, key, value) + + def __call__(self, q, k, v, extra_options): + dtype = q.dtype + out = optimized_attention(q, k, v, extra_options["n_heads"]) + sigma = extra_options["sigmas"].detach().cpu()[0].item() if 'sigmas' in extra_options else 999999999.9 + + for i, callback in enumerate(self.callback): + if sigma <= self.kwargs[i]["sigma_start"] and sigma >= self.kwargs[i]["sigma_end"]: + out = out + callback(out, q, k, v, extra_options, **self.kwargs[i]) + + return out.to(dtype=dtype) + +def ipadapter_attention(out, q, k, v, extra_options, module_key='', ipadapter=None, weight=1.0, cond=None, cond_alt=None, uncond=None, weight_type="linear", mask=None, sigma_start=0.0, sigma_end=1.0, unfold_batch=False, embeds_scaling='V only', **kwargs): + dtype = q.dtype + cond_or_uncond = extra_options["cond_or_uncond"] + block_type = extra_options["block"][0] + #block_id = extra_options["block"][1] + t_idx = extra_options["transformer_index"] + layers = 11 if '101_to_k_ip' in ipadapter.ip_layers.to_kvs else 16 + k_key = module_key + "_to_k_ip" + v_key = module_key + "_to_v_ip" + + # extra options for AnimateDiff + ad_params = extra_options['ad_params'] if "ad_params" in extra_options else None + + b = q.shape[0] + seq_len = q.shape[1] + batch_prompt = b // len(cond_or_uncond) + _, _, oh, ow = extra_options["original_shape"] + + if weight_type == 'ease in': + weight = weight * (0.05 + 0.95 * (1 - t_idx / layers)) + elif weight_type == 'ease out': + weight = weight * (0.05 + 0.95 * (t_idx / layers)) + elif weight_type == 'ease in-out': + weight = weight * (0.05 + 0.95 * (1 - abs(t_idx - (layers/2)) / (layers/2))) + elif weight_type == 'reverse in-out': + weight = weight * (0.05 + 0.95 * (abs(t_idx - (layers/2)) / (layers/2))) + elif weight_type == 'weak input' and block_type == 'input': + weight = weight * 0.2 + elif weight_type == 'weak middle' and block_type == 'middle': + weight = weight * 0.2 + elif weight_type == 'weak output' and block_type == 'output': + weight = weight * 0.2 + elif weight_type == 'strong middle' and (block_type == 'input' or block_type == 'output'): + weight = weight * 0.2 + elif isinstance(weight, dict): + if t_idx not in weight: + return 0 + + if weight_type == "style transfer precise": + if layers == 11 and t_idx == 3: + uncond = cond + cond = cond * 0 + elif layers == 16 and (t_idx == 4 or t_idx == 5): + uncond = cond + cond = cond * 0 + elif weight_type == "composition precise": + if layers == 11 and t_idx != 3: + uncond = cond + cond = cond * 0 + elif layers == 16 and (t_idx != 4 and t_idx != 5): + uncond = cond + cond = cond * 0 + + weight = weight[t_idx] + + if cond_alt is not None and t_idx in cond_alt: + cond = cond_alt[t_idx] + del cond_alt + + if unfold_batch: + # Check AnimateDiff context window + if ad_params is not None and ad_params["sub_idxs"] is not None: + if isinstance(weight, torch.Tensor): + weight = tensor_to_size(weight, ad_params["full_length"]) + weight = torch.Tensor(weight[ad_params["sub_idxs"]]) + if torch.all(weight == 0): + return 0 + weight = weight.repeat(len(cond_or_uncond), 1, 1) # repeat for cond and uncond + elif weight == 0: + return 0 + + # if image length matches or exceeds full_length get sub_idx images + if cond.shape[0] >= ad_params["full_length"]: + cond = torch.Tensor(cond[ad_params["sub_idxs"]]) + uncond = torch.Tensor(uncond[ad_params["sub_idxs"]]) + # otherwise get sub_idxs images + else: + cond = tensor_to_size(cond, ad_params["full_length"]) + uncond = tensor_to_size(uncond, ad_params["full_length"]) + cond = cond[ad_params["sub_idxs"]] + uncond = uncond[ad_params["sub_idxs"]] + else: + if isinstance(weight, torch.Tensor): + weight = tensor_to_size(weight, batch_prompt) + if torch.all(weight == 0): + return 0 + weight = weight.repeat(len(cond_or_uncond), 1, 1) # repeat for cond and uncond + elif weight == 0: + return 0 + + cond = tensor_to_size(cond, batch_prompt) + uncond = tensor_to_size(uncond, batch_prompt) + + k_cond = ipadapter.ip_layers.to_kvs[k_key](cond) + k_uncond = ipadapter.ip_layers.to_kvs[k_key](uncond) + v_cond = ipadapter.ip_layers.to_kvs[v_key](cond) + v_uncond = ipadapter.ip_layers.to_kvs[v_key](uncond) + else: + # TODO: should we always convert the weights to a tensor? + if isinstance(weight, torch.Tensor): + weight = tensor_to_size(weight, batch_prompt) + if torch.all(weight == 0): + return 0 + weight = weight.repeat(len(cond_or_uncond), 1, 1) # repeat for cond and uncond + elif weight == 0: + return 0 + + k_cond = ipadapter.ip_layers.to_kvs[k_key](cond).repeat(batch_prompt, 1, 1) + k_uncond = ipadapter.ip_layers.to_kvs[k_key](uncond).repeat(batch_prompt, 1, 1) + v_cond = ipadapter.ip_layers.to_kvs[v_key](cond).repeat(batch_prompt, 1, 1) + v_uncond = ipadapter.ip_layers.to_kvs[v_key](uncond).repeat(batch_prompt, 1, 1) + + if len(cond_or_uncond) == 3: # TODO: cosxl, I need to check this + ip_k = torch.cat([(k_cond, k_uncond, k_cond)[i] for i in cond_or_uncond], dim=0) + ip_v = torch.cat([(v_cond, v_uncond, v_cond)[i] for i in cond_or_uncond], dim=0) + else: + ip_k = torch.cat([(k_cond, k_uncond)[i] for i in cond_or_uncond], dim=0) + ip_v = torch.cat([(v_cond, v_uncond)[i] for i in cond_or_uncond], dim=0) + + if embeds_scaling == 'K+mean(V) w/ C penalty': + scaling = float(ip_k.shape[2]) / 1280.0 + weight = weight * scaling + ip_k = ip_k * weight + ip_v_mean = torch.mean(ip_v, dim=1, keepdim=True) + ip_v = (ip_v - ip_v_mean) + ip_v_mean * weight + out_ip = optimized_attention(q, ip_k, ip_v, extra_options["n_heads"]) + del ip_v_mean + elif embeds_scaling == 'K+V w/ C penalty': + scaling = float(ip_k.shape[2]) / 1280.0 + weight = weight * scaling + ip_k = ip_k * weight + ip_v = ip_v * weight + out_ip = optimized_attention(q, ip_k, ip_v, extra_options["n_heads"]) + elif embeds_scaling == 'K+V': + ip_k = ip_k * weight + ip_v = ip_v * weight + out_ip = optimized_attention(q, ip_k, ip_v, extra_options["n_heads"]) + else: + #ip_v = ip_v * weight + out_ip = optimized_attention(q, ip_k, ip_v, extra_options["n_heads"]) + out_ip = out_ip * weight # I'm doing this to get the same results as before + + if mask is not None: + mask_h = oh / math.sqrt(oh * ow / seq_len) + mask_h = int(mask_h) + int((seq_len % int(mask_h)) != 0) + mask_w = seq_len // mask_h + + # check if using AnimateDiff and sliding context window + if (mask.shape[0] > 1 and ad_params is not None and ad_params["sub_idxs"] is not None): + # if mask length matches or exceeds full_length, get sub_idx masks + if mask.shape[0] >= ad_params["full_length"]: + mask = torch.Tensor(mask[ad_params["sub_idxs"]]) + mask = F.interpolate(mask.unsqueeze(1), size=(mask_h, mask_w), mode="bilinear").squeeze(1) + else: + mask = F.interpolate(mask.unsqueeze(1), size=(mask_h, mask_w), mode="bilinear").squeeze(1) + mask = tensor_to_size(mask, ad_params["full_length"]) + mask = mask[ad_params["sub_idxs"]] + else: + mask = F.interpolate(mask.unsqueeze(1), size=(mask_h, mask_w), mode="bilinear").squeeze(1) + mask = tensor_to_size(mask, batch_prompt) + + mask = mask.repeat(len(cond_or_uncond), 1, 1) + mask = mask.view(mask.shape[0], -1, 1).repeat(1, 1, out.shape[2]) + + # covers cases where extreme aspect ratios can cause the mask to have a wrong size + mask_len = mask_h * mask_w + if mask_len < seq_len: + pad_len = seq_len - mask_len + pad1 = pad_len // 2 + pad2 = pad_len - pad1 + mask = F.pad(mask, (0, 0, pad1, pad2), value=0.0) + elif mask_len > seq_len: + crop_start = (mask_len - seq_len) // 2 + mask = mask[:, crop_start:crop_start+seq_len, :] + + out_ip = out_ip * mask + + #out = out + out_ip + + return out_ip.to(dtype=dtype) diff --git a/ComfyUI_IPAdapter_plus/IPAdapterPlus.py b/ComfyUI_IPAdapter_plus/IPAdapterPlus.py new file mode 100644 index 0000000000000000000000000000000000000000..ede8c79aa41433425f90040c72399abf5cedf7b3 --- /dev/null +++ b/ComfyUI_IPAdapter_plus/IPAdapterPlus.py @@ -0,0 +1,2013 @@ +import torch +import os +import math +import folder_paths + +import comfy.model_management as model_management +from node_helpers import conditioning_set_values +from comfy.clip_vision import load as load_clip_vision +from comfy.sd import load_lora_for_models +import comfy.utils + +import torch.nn as nn +from PIL import Image +try: + import torchvision.transforms.v2 as T +except ImportError: + import torchvision.transforms as T + +from .image_proj_models import MLPProjModel, MLPProjModelFaceId, ProjModelFaceIdPlus, Resampler, ImageProjModel +from .CrossAttentionPatch import Attn2Replace, ipadapter_attention +from .utils import ( + encode_image_masked, + tensor_to_size, + contrast_adaptive_sharpening, + tensor_to_image, + image_to_tensor, + ipadapter_model_loader, + insightface_loader, + get_clipvision_file, + get_ipadapter_file, + get_lora_file, +) + +# set the models directory +if "ipadapter" not in folder_paths.folder_names_and_paths: + current_paths = [os.path.join(folder_paths.models_dir, "ipadapter")] +else: + current_paths, _ = folder_paths.folder_names_and_paths["ipadapter"] +folder_paths.folder_names_and_paths["ipadapter"] = (current_paths, folder_paths.supported_pt_extensions) + +WEIGHT_TYPES = ["linear", "ease in", "ease out", 'ease in-out', 'reverse in-out', 'weak input', 'weak output', 'weak middle', 'strong middle', 'style transfer', 'composition', 'strong style transfer', 'style and composition', 'style transfer precise', 'composition precise'] + +""" +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + Main IPAdapter Class +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +""" +class IPAdapter(nn.Module): + def __init__(self, ipadapter_model, cross_attention_dim=1024, output_cross_attention_dim=1024, clip_embeddings_dim=1024, clip_extra_context_tokens=4, is_sdxl=False, is_plus=False, is_full=False, is_faceid=False, is_portrait_unnorm=False, is_kwai_kolors=False, encoder_hid_proj=None, weight_kolors=1.0): + super().__init__() + + self.clip_embeddings_dim = clip_embeddings_dim + self.cross_attention_dim = cross_attention_dim + self.output_cross_attention_dim = output_cross_attention_dim + self.clip_extra_context_tokens = clip_extra_context_tokens + self.is_sdxl = is_sdxl + self.is_full = is_full + self.is_plus = is_plus + self.is_portrait_unnorm = is_portrait_unnorm + self.is_kwai_kolors = is_kwai_kolors + + if is_faceid and not is_portrait_unnorm: + self.image_proj_model = self.init_proj_faceid() + elif is_full: + self.image_proj_model = self.init_proj_full() + elif is_plus or is_portrait_unnorm: + self.image_proj_model = self.init_proj_plus() + else: + self.image_proj_model = self.init_proj() + + self.image_proj_model.load_state_dict(ipadapter_model["image_proj"]) + self.ip_layers = To_KV(ipadapter_model["ip_adapter"], encoder_hid_proj=encoder_hid_proj, weight_kolors=weight_kolors) + + def init_proj(self): + image_proj_model = ImageProjModel( + cross_attention_dim=self.cross_attention_dim, + clip_embeddings_dim=self.clip_embeddings_dim, + clip_extra_context_tokens=self.clip_extra_context_tokens + ) + return image_proj_model + + def init_proj_plus(self): + image_proj_model = Resampler( + dim=self.cross_attention_dim, + depth=4, + dim_head=64, + heads=20 if self.is_sdxl and not self.is_kwai_kolors else 12, + num_queries=self.clip_extra_context_tokens, + embedding_dim=self.clip_embeddings_dim, + output_dim=self.output_cross_attention_dim, + ff_mult=4 + ) + return image_proj_model + + def init_proj_full(self): + image_proj_model = MLPProjModel( + cross_attention_dim=self.cross_attention_dim, + clip_embeddings_dim=self.clip_embeddings_dim + ) + return image_proj_model + + def init_proj_faceid(self): + if self.is_plus: + image_proj_model = ProjModelFaceIdPlus( + cross_attention_dim=self.cross_attention_dim, + id_embeddings_dim=512, + clip_embeddings_dim=self.clip_embeddings_dim, + num_tokens=self.clip_extra_context_tokens, + ) + else: + image_proj_model = MLPProjModelFaceId( + cross_attention_dim=self.cross_attention_dim, + id_embeddings_dim=512, + num_tokens=self.clip_extra_context_tokens, + ) + return image_proj_model + + @torch.inference_mode() + def get_image_embeds(self, clip_embed, clip_embed_zeroed, batch_size): + torch_device = model_management.get_torch_device() + intermediate_device = model_management.intermediate_device() + + if batch_size == 0: + batch_size = clip_embed.shape[0] + intermediate_device = torch_device + elif batch_size > clip_embed.shape[0]: + batch_size = clip_embed.shape[0] + + clip_embed = torch.split(clip_embed, batch_size, dim=0) + clip_embed_zeroed = torch.split(clip_embed_zeroed, batch_size, dim=0) + + image_prompt_embeds = [] + uncond_image_prompt_embeds = [] + + for ce, cez in zip(clip_embed, clip_embed_zeroed): + image_prompt_embeds.append(self.image_proj_model(ce.to(torch_device)).to(intermediate_device)) + uncond_image_prompt_embeds.append(self.image_proj_model(cez.to(torch_device)).to(intermediate_device)) + + del clip_embed, clip_embed_zeroed + + image_prompt_embeds = torch.cat(image_prompt_embeds, dim=0) + uncond_image_prompt_embeds = torch.cat(uncond_image_prompt_embeds, dim=0) + + torch.cuda.empty_cache() + + #image_prompt_embeds = self.image_proj_model(clip_embed) + #uncond_image_prompt_embeds = self.image_proj_model(clip_embed_zeroed) + return image_prompt_embeds, uncond_image_prompt_embeds + + @torch.inference_mode() + def get_image_embeds_faceid_plus(self, face_embed, clip_embed, s_scale, shortcut, batch_size): + torch_device = model_management.get_torch_device() + intermediate_device = model_management.intermediate_device() + + if batch_size == 0: + batch_size = clip_embed.shape[0] + intermediate_device = torch_device + elif batch_size > clip_embed.shape[0]: + batch_size = clip_embed.shape[0] + + face_embed_batch = torch.split(face_embed, batch_size, dim=0) + clip_embed_batch = torch.split(clip_embed, batch_size, dim=0) + + embeds = [] + for face_embed, clip_embed in zip(face_embed_batch, clip_embed_batch): + embeds.append(self.image_proj_model(face_embed.to(torch_device), clip_embed.to(torch_device), scale=s_scale, shortcut=shortcut).to(intermediate_device)) + + embeds = torch.cat(embeds, dim=0) + del face_embed_batch, clip_embed_batch + torch.cuda.empty_cache() + #embeds = self.image_proj_model(face_embed, clip_embed, scale=s_scale, shortcut=shortcut) + return embeds + +class To_KV(nn.Module): + def __init__(self, state_dict, encoder_hid_proj=None, weight_kolors=1.0): + super().__init__() + + if encoder_hid_proj is not None: + hid_proj = nn.Linear(encoder_hid_proj["weight"].shape[1], encoder_hid_proj["weight"].shape[0], bias=True) + hid_proj.weight.data = encoder_hid_proj["weight"] * weight_kolors + hid_proj.bias.data = encoder_hid_proj["bias"] * weight_kolors + + self.to_kvs = nn.ModuleDict() + for key, value in state_dict.items(): + if encoder_hid_proj is not None: + linear_proj = nn.Linear(value.shape[1], value.shape[0], bias=False) + linear_proj.weight.data = value + self.to_kvs[key.replace(".weight", "").replace(".", "_")] = nn.Sequential(hid_proj, linear_proj) + else: + self.to_kvs[key.replace(".weight", "").replace(".", "_")] = nn.Linear(value.shape[1], value.shape[0], bias=False) + self.to_kvs[key.replace(".weight", "").replace(".", "_")].weight.data = value + +def set_model_patch_replace(model, patch_kwargs, key): + to = model.model_options["transformer_options"].copy() + if "patches_replace" not in to: + to["patches_replace"] = {} + else: + to["patches_replace"] = to["patches_replace"].copy() + + if "attn2" not in to["patches_replace"]: + to["patches_replace"]["attn2"] = {} + else: + to["patches_replace"]["attn2"] = to["patches_replace"]["attn2"].copy() + + if key not in to["patches_replace"]["attn2"]: + to["patches_replace"]["attn2"][key] = Attn2Replace(ipadapter_attention, **patch_kwargs) + model.model_options["transformer_options"] = to + else: + to["patches_replace"]["attn2"][key].add(ipadapter_attention, **patch_kwargs) + +def ipadapter_execute(model, + ipadapter, + clipvision, + insightface=None, + image=None, + image_composition=None, + image_negative=None, + weight=1.0, + weight_composition=1.0, + weight_faceidv2=None, + weight_kolors=1.0, + weight_type="linear", + combine_embeds="concat", + start_at=0.0, + end_at=1.0, + attn_mask=None, + pos_embed=None, + neg_embed=None, + unfold_batch=False, + embeds_scaling='V only', + layer_weights=None, + encode_batch_size=0, + style_boost=None, + composition_boost=None, + enhance_tiles=1, + enhance_ratio=1.0,): + device = model_management.get_torch_device() + dtype = model_management.unet_dtype() + if dtype not in [torch.float32, torch.float16, torch.bfloat16]: + dtype = torch.float16 if model_management.should_use_fp16() else torch.float32 + + is_full = "proj.3.weight" in ipadapter["image_proj"] + is_portrait_unnorm = "portraitunnorm" in ipadapter + is_plus = (is_full or "latents" in ipadapter["image_proj"] or "perceiver_resampler.proj_in.weight" in ipadapter["image_proj"]) and not is_portrait_unnorm + output_cross_attention_dim = ipadapter["ip_adapter"]["1.to_k_ip.weight"].shape[1] + is_sdxl = output_cross_attention_dim == 2048 + is_kwai_kolors_faceid = "perceiver_resampler.layers.0.0.to_out.weight" in ipadapter["image_proj"] and ipadapter["image_proj"]["perceiver_resampler.layers.0.0.to_out.weight"].shape[0] == 4096 + is_faceidv2 = "faceidplusv2" in ipadapter or is_kwai_kolors_faceid + is_kwai_kolors = (is_sdxl and "layers.0.0.to_out.weight" in ipadapter["image_proj"] and ipadapter["image_proj"]["layers.0.0.to_out.weight"].shape[0] == 2048) or is_kwai_kolors_faceid + is_portrait = "proj.2.weight" in ipadapter["image_proj"] and not "proj.3.weight" in ipadapter["image_proj"] and not "0.to_q_lora.down.weight" in ipadapter["ip_adapter"] and not is_kwai_kolors_faceid + is_faceid = is_portrait or "0.to_q_lora.down.weight" in ipadapter["ip_adapter"] or is_portrait_unnorm or is_kwai_kolors_faceid + + if is_faceid and not insightface: + raise Exception("insightface model is required for FaceID models") + + if is_faceidv2: + weight_faceidv2 = weight_faceidv2 if weight_faceidv2 is not None else weight*2 + + if is_kwai_kolors_faceid: + cross_attention_dim = 4096 + elif is_kwai_kolors: + cross_attention_dim = 2048 + elif (is_plus and is_sdxl and not is_faceid) or is_portrait_unnorm: + cross_attention_dim = 1280 + else: + cross_attention_dim = output_cross_attention_dim + + if is_kwai_kolors_faceid: + clip_extra_context_tokens = 6 + elif (is_plus and not is_faceid) or is_portrait or is_portrait_unnorm: + clip_extra_context_tokens = 16 + else: + clip_extra_context_tokens = 4 + + if image is not None and image.shape[1] != image.shape[2]: + print("\033[33mINFO: the IPAdapter reference image is not a square, CLIPImageProcessor will resize and crop it at the center. If the main focus of the picture is not in the middle the result might not be what you are expecting.\033[0m") + + if isinstance(weight, list): + weight = torch.tensor(weight).unsqueeze(-1).unsqueeze(-1).to(device, dtype=dtype) if unfold_batch else weight[0] + + if style_boost is not None: + weight_type = "style transfer precise" + elif composition_boost is not None: + weight_type = "composition precise" + + # special weight types + if layer_weights is not None and layer_weights != '': + weight = { int(k): float(v)*weight for k, v in [x.split(":") for x in layer_weights.split(",")] } + weight_type = weight_type if weight_type == "style transfer precise" or weight_type == "composition precise" else "linear" + elif weight_type == "style transfer": + weight = { 6:weight } if is_sdxl else { 0:weight, 1:weight, 2:weight, 3:weight, 9:weight, 10:weight, 11:weight, 12:weight, 13:weight, 14:weight, 15:weight } + elif weight_type == "composition": + weight = { 3:weight } if is_sdxl else { 4:weight*0.25, 5:weight } + elif weight_type == "strong style transfer": + if is_sdxl: + weight = { 0:weight, 1:weight, 2:weight, 4:weight, 5:weight, 6:weight, 7:weight, 8:weight, 9:weight, 10:weight } + else: + weight = { 0:weight, 1:weight, 2:weight, 3:weight, 6:weight, 7:weight, 8:weight, 9:weight, 10:weight, 11:weight, 12:weight, 13:weight, 14:weight, 15:weight } + elif weight_type == "style and composition": + if is_sdxl: + weight = { 3:weight_composition, 6:weight } + else: + weight = { 0:weight, 1:weight, 2:weight, 3:weight, 4:weight_composition*0.25, 5:weight_composition, 9:weight, 10:weight, 11:weight, 12:weight, 13:weight, 14:weight, 15:weight } + elif weight_type == "strong style and composition": + if is_sdxl: + weight = { 0:weight, 1:weight, 2:weight, 3:weight_composition, 4:weight, 5:weight, 6:weight, 7:weight, 8:weight, 9:weight, 10:weight } + else: + weight = { 0:weight, 1:weight, 2:weight, 3:weight, 4:weight_composition, 5:weight_composition, 6:weight, 7:weight, 8:weight, 9:weight, 10:weight, 11:weight, 12:weight, 13:weight, 14:weight, 15:weight } + elif weight_type == "style transfer precise": + weight_composition = style_boost if style_boost is not None else weight + if is_sdxl: + weight = { 3:weight_composition, 6:weight } + else: + weight = { 0:weight, 1:weight, 2:weight, 3:weight, 4:weight_composition*0.25, 5:weight_composition, 9:weight, 10:weight, 11:weight, 12:weight, 13:weight, 14:weight, 15:weight } + elif weight_type == "composition precise": + weight_composition = weight + weight = composition_boost if composition_boost is not None else weight + if is_sdxl: + weight = { 0:weight*.1, 1:weight*.1, 2:weight*.1, 3:weight_composition, 4:weight*.1, 5:weight*.1, 6:weight, 7:weight*.1, 8:weight*.1, 9:weight*.1, 10:weight*.1 } + else: + weight = { 0:weight, 1:weight, 2:weight, 3:weight, 4:weight_composition*0.25, 5:weight_composition, 6:weight*.1, 7:weight*.1, 8:weight*.1, 9:weight, 10:weight, 11:weight, 12:weight, 13:weight, 14:weight, 15:weight } + + clipvision_size = 224 if not is_kwai_kolors else 336 + + img_comp_cond_embeds = None + face_cond_embeds = None + if is_faceid: + if insightface is None: + raise Exception("Insightface model is required for FaceID models") + + from insightface.utils import face_align + + insightface.det_model.input_size = (640,640) # reset the detection size + image_iface = tensor_to_image(image) + face_cond_embeds = [] + image = [] + + for i in range(image_iface.shape[0]): + for size in [(size, size) for size in range(640, 256, -64)]: + insightface.det_model.input_size = size # TODO: hacky but seems to be working + face = insightface.get(image_iface[i]) + if face: + if not is_portrait_unnorm: + face_cond_embeds.append(torch.from_numpy(face[0].normed_embedding).unsqueeze(0)) + else: + face_cond_embeds.append(torch.from_numpy(face[0].embedding).unsqueeze(0)) + image.append(image_to_tensor(face_align.norm_crop(image_iface[i], landmark=face[0].kps, image_size=336 if is_kwai_kolors_faceid else 256 if is_sdxl else 224))) + + if 640 not in size: + print(f"\033[33mINFO: InsightFace detection resolution lowered to {size}.\033[0m") + break + else: + raise Exception('InsightFace: No face detected.') + face_cond_embeds = torch.stack(face_cond_embeds).to(device, dtype=dtype) + image = torch.stack(image) + del image_iface, face + + if image is not None: + img_cond_embeds = encode_image_masked(clipvision, image, batch_size=encode_batch_size, tiles=enhance_tiles, ratio=enhance_ratio, clipvision_size=clipvision_size) + if image_composition is not None: + img_comp_cond_embeds = encode_image_masked(clipvision, image_composition, batch_size=encode_batch_size, tiles=enhance_tiles, ratio=enhance_ratio, clipvision_size=clipvision_size) + + if is_plus: + img_cond_embeds = img_cond_embeds.penultimate_hidden_states + image_negative = image_negative if image_negative is not None else torch.zeros([1, clipvision_size, clipvision_size, 3]) + img_uncond_embeds = encode_image_masked(clipvision, image_negative, batch_size=encode_batch_size, clipvision_size=clipvision_size).penultimate_hidden_states + if image_composition is not None: + img_comp_cond_embeds = img_comp_cond_embeds.penultimate_hidden_states + else: + img_cond_embeds = img_cond_embeds.image_embeds if not is_faceid else face_cond_embeds + if image_negative is not None and not is_faceid: + img_uncond_embeds = encode_image_masked(clipvision, image_negative, batch_size=encode_batch_size, clipvision_size=clipvision_size).image_embeds + else: + img_uncond_embeds = torch.zeros_like(img_cond_embeds) + if image_composition is not None: + img_comp_cond_embeds = img_comp_cond_embeds.image_embeds + del image_negative, image_composition + + image = None if not is_faceid else image # if it's face_id we need the cropped face for later + elif pos_embed is not None: + img_cond_embeds = pos_embed + + if neg_embed is not None: + img_uncond_embeds = neg_embed + else: + if is_plus: + img_uncond_embeds = encode_image_masked(clipvision, torch.zeros([1, clipvision_size, clipvision_size, 3]), clipvision_size=clipvision_size).penultimate_hidden_states + else: + img_uncond_embeds = torch.zeros_like(img_cond_embeds) + del pos_embed, neg_embed + else: + raise Exception("Images or Embeds are required") + + # ensure that cond and uncond have the same batch size + img_uncond_embeds = tensor_to_size(img_uncond_embeds, img_cond_embeds.shape[0]) + + img_cond_embeds = img_cond_embeds.to(device, dtype=dtype) + img_uncond_embeds = img_uncond_embeds.to(device, dtype=dtype) + if img_comp_cond_embeds is not None: + img_comp_cond_embeds = img_comp_cond_embeds.to(device, dtype=dtype) + + # combine the embeddings if needed + if combine_embeds != "concat" and img_cond_embeds.shape[0] > 1 and not unfold_batch: + if combine_embeds == "add": + img_cond_embeds = torch.sum(img_cond_embeds, dim=0).unsqueeze(0) + if face_cond_embeds is not None: + face_cond_embeds = torch.sum(face_cond_embeds, dim=0).unsqueeze(0) + if img_comp_cond_embeds is not None: + img_comp_cond_embeds = torch.sum(img_comp_cond_embeds, dim=0).unsqueeze(0) + elif combine_embeds == "subtract": + img_cond_embeds = img_cond_embeds[0] - torch.mean(img_cond_embeds[1:], dim=0) + img_cond_embeds = img_cond_embeds.unsqueeze(0) + if face_cond_embeds is not None: + face_cond_embeds = face_cond_embeds[0] - torch.mean(face_cond_embeds[1:], dim=0) + face_cond_embeds = face_cond_embeds.unsqueeze(0) + if img_comp_cond_embeds is not None: + img_comp_cond_embeds = img_comp_cond_embeds[0] - torch.mean(img_comp_cond_embeds[1:], dim=0) + img_comp_cond_embeds = img_comp_cond_embeds.unsqueeze(0) + elif combine_embeds == "average": + img_cond_embeds = torch.mean(img_cond_embeds, dim=0).unsqueeze(0) + if face_cond_embeds is not None: + face_cond_embeds = torch.mean(face_cond_embeds, dim=0).unsqueeze(0) + if img_comp_cond_embeds is not None: + img_comp_cond_embeds = torch.mean(img_comp_cond_embeds, dim=0).unsqueeze(0) + elif combine_embeds == "norm average": + img_cond_embeds = torch.mean(img_cond_embeds / torch.norm(img_cond_embeds, dim=0, keepdim=True), dim=0).unsqueeze(0) + if face_cond_embeds is not None: + face_cond_embeds = torch.mean(face_cond_embeds / torch.norm(face_cond_embeds, dim=0, keepdim=True), dim=0).unsqueeze(0) + if img_comp_cond_embeds is not None: + img_comp_cond_embeds = torch.mean(img_comp_cond_embeds / torch.norm(img_comp_cond_embeds, dim=0, keepdim=True), dim=0).unsqueeze(0) + img_uncond_embeds = img_uncond_embeds[0].unsqueeze(0) # TODO: better strategy for uncond could be to average them + + if attn_mask is not None: + attn_mask = attn_mask.to(device, dtype=dtype) + + encoder_hid_proj = None + + if is_kwai_kolors_faceid and hasattr(model.model, "diffusion_model") and hasattr(model.model.diffusion_model, "encoder_hid_proj"): + encoder_hid_proj = model.model.diffusion_model.encoder_hid_proj.state_dict() + + ipa = IPAdapter( + ipadapter, + cross_attention_dim=cross_attention_dim, + output_cross_attention_dim=output_cross_attention_dim, + clip_embeddings_dim=img_cond_embeds.shape[-1], + clip_extra_context_tokens=clip_extra_context_tokens, + is_sdxl=is_sdxl, + is_plus=is_plus, + is_full=is_full, + is_faceid=is_faceid, + is_portrait_unnorm=is_portrait_unnorm, + is_kwai_kolors=is_kwai_kolors, + encoder_hid_proj=encoder_hid_proj, + weight_kolors=weight_kolors + ).to(device, dtype=dtype) + + if is_faceid and is_plus: + cond = ipa.get_image_embeds_faceid_plus(face_cond_embeds, img_cond_embeds, weight_faceidv2, is_faceidv2, encode_batch_size) + # TODO: check if noise helps with the uncond face embeds + uncond = ipa.get_image_embeds_faceid_plus(torch.zeros_like(face_cond_embeds), img_uncond_embeds, weight_faceidv2, is_faceidv2, encode_batch_size) + else: + cond, uncond = ipa.get_image_embeds(img_cond_embeds, img_uncond_embeds, encode_batch_size) + if img_comp_cond_embeds is not None: + cond_comp = ipa.get_image_embeds(img_comp_cond_embeds, img_uncond_embeds, encode_batch_size)[0] + + cond = cond.to(device, dtype=dtype) + uncond = uncond.to(device, dtype=dtype) + + cond_alt = None + if img_comp_cond_embeds is not None: + cond_alt = { 3: cond_comp.to(device, dtype=dtype) } + + del img_cond_embeds, img_uncond_embeds, img_comp_cond_embeds, face_cond_embeds + + sigma_start = model.get_model_object("model_sampling").percent_to_sigma(start_at) + sigma_end = model.get_model_object("model_sampling").percent_to_sigma(end_at) + + patch_kwargs = { + "ipadapter": ipa, + "weight": weight, + "cond": cond, + "cond_alt": cond_alt, + "uncond": uncond, + "weight_type": weight_type, + "mask": attn_mask, + "sigma_start": sigma_start, + "sigma_end": sigma_end, + "unfold_batch": unfold_batch, + "embeds_scaling": embeds_scaling, + } + + number = 0 + if not is_sdxl: + for id in [1,2,4,5,7,8]: # id of input_blocks that have cross attention + patch_kwargs["module_key"] = str(number*2+1) + set_model_patch_replace(model, patch_kwargs, ("input", id)) + number += 1 + for id in [3,4,5,6,7,8,9,10,11]: # id of output_blocks that have cross attention + patch_kwargs["module_key"] = str(number*2+1) + set_model_patch_replace(model, patch_kwargs, ("output", id)) + number += 1 + patch_kwargs["module_key"] = str(number*2+1) + set_model_patch_replace(model, patch_kwargs, ("middle", 0)) + else: + for id in [4,5,7,8]: # id of input_blocks that have cross attention + block_indices = range(2) if id in [4, 5] else range(10) # transformer_depth + for index in block_indices: + patch_kwargs["module_key"] = str(number*2+1) + set_model_patch_replace(model, patch_kwargs, ("input", id, index)) + number += 1 + for id in range(6): # id of output_blocks that have cross attention + block_indices = range(2) if id in [3, 4, 5] else range(10) # transformer_depth + for index in block_indices: + patch_kwargs["module_key"] = str(number*2+1) + set_model_patch_replace(model, patch_kwargs, ("output", id, index)) + number += 1 + for index in range(10): + patch_kwargs["module_key"] = str(number*2+1) + set_model_patch_replace(model, patch_kwargs, ("middle", 0, index)) + number += 1 + + return (model, image) + +""" +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + Loaders +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +""" +class IPAdapterUnifiedLoader: + def __init__(self): + self.lora = None + self.clipvision = { "file": None, "model": None } + self.ipadapter = { "file": None, "model": None } + self.insightface = { "provider": None, "model": None } + + @classmethod + def INPUT_TYPES(s): + return {"required": { + "model": ("MODEL", ), + "preset": (['LIGHT - SD1.5 only (low strength)', 'STANDARD (medium strength)', 'VIT-G (medium strength)', 'PLUS (high strength)', 'PLUS FACE (portraits)', 'FULL FACE - SD1.5 only (portraits stronger)'], ), + }, + "optional": { + "ipadapter": ("IPADAPTER", ), + }} + + RETURN_TYPES = ("MODEL", "IPADAPTER", ) + RETURN_NAMES = ("model", "ipadapter", ) + FUNCTION = "load_models" + CATEGORY = "ipadapter" + + def load_models(self, model, preset, lora_strength=0.0, provider="CPU", ipadapter=None): + pipeline = { "clipvision": { 'file': None, 'model': None }, "ipadapter": { 'file': None, 'model': None }, "insightface": { 'provider': None, 'model': None } } + if ipadapter is not None: + pipeline = ipadapter + + # 1. Load the clipvision model + clipvision_file = get_clipvision_file(preset) + if clipvision_file is None: + raise Exception("ClipVision model not found.") + + if clipvision_file != self.clipvision['file']: + if clipvision_file != pipeline['clipvision']['file']: + self.clipvision['file'] = clipvision_file + self.clipvision['model'] = load_clip_vision(clipvision_file) + print(f"\033[33mINFO: Clip Vision model loaded from {clipvision_file}\033[0m") + else: + self.clipvision = pipeline['clipvision'] + + # 2. Load the ipadapter model + is_sdxl = isinstance(model.model, (comfy.model_base.SDXL, comfy.model_base.SDXLRefiner, comfy.model_base.SDXL_instructpix2pix)) + ipadapter_file, is_insightface, lora_pattern = get_ipadapter_file(preset, is_sdxl) + if ipadapter_file is None: + raise Exception("IPAdapter model not found.") + + if ipadapter_file != self.ipadapter['file']: + if pipeline['ipadapter']['file'] != ipadapter_file: + self.ipadapter['file'] = ipadapter_file + self.ipadapter['model'] = ipadapter_model_loader(ipadapter_file) + print(f"\033[33mINFO: IPAdapter model loaded from {ipadapter_file}\033[0m") + else: + self.ipadapter = pipeline['ipadapter'] + + # 3. Load the lora model if needed + if lora_pattern is not None: + lora_file = get_lora_file(lora_pattern) + lora_model = None + if lora_file is None: + raise Exception("LoRA model not found.") + + if self.lora is not None: + if lora_file == self.lora['file']: + lora_model = self.lora['model'] + else: + self.lora = None + torch.cuda.empty_cache() + + if lora_model is None: + lora_model = comfy.utils.load_torch_file(lora_file, safe_load=True) + self.lora = { 'file': lora_file, 'model': lora_model } + print(f"\033[33mINFO: LoRA model loaded from {lora_file}\033[0m") + + if lora_strength > 0: + model, _ = load_lora_for_models(model, None, lora_model, lora_strength, 0) + + # 4. Load the insightface model if needed + if is_insightface: + if provider != self.insightface['provider']: + if pipeline['insightface']['provider'] != provider: + self.insightface['provider'] = provider + self.insightface['model'] = insightface_loader(provider) + print(f"\033[33mINFO: InsightFace model loaded with {provider} provider\033[0m") + else: + self.insightface = pipeline['insightface'] + + return (model, { 'clipvision': self.clipvision, 'ipadapter': self.ipadapter, 'insightface': self.insightface }, ) + +class IPAdapterUnifiedLoaderFaceID(IPAdapterUnifiedLoader): + @classmethod + def INPUT_TYPES(s): + return {"required": { + "model": ("MODEL", ), + "preset": (['FACEID', 'FACEID PLUS - SD1.5 only', 'FACEID PLUS V2', 'FACEID PORTRAIT (style transfer)', 'FACEID PORTRAIT UNNORM - SDXL only (strong)'], ), + "lora_strength": ("FLOAT", { "default": 0.6, "min": 0, "max": 1, "step": 0.01 }), + "provider": (["CPU", "CUDA", "ROCM", "DirectML", "OpenVINO", "CoreML"], ), + }, + "optional": { + "ipadapter": ("IPADAPTER", ), + }} + + RETURN_NAMES = ("MODEL", "ipadapter", ) + CATEGORY = "ipadapter/faceid" + +class IPAdapterUnifiedLoaderCommunity(IPAdapterUnifiedLoader): + @classmethod + def INPUT_TYPES(s): + return {"required": { + "model": ("MODEL", ), + "preset": (['Composition', 'Kolors'], ), + }, + "optional": { + "ipadapter": ("IPADAPTER", ), + }} + + CATEGORY = "ipadapter/loaders" + +class IPAdapterModelLoader: + @classmethod + def INPUT_TYPES(s): + return {"required": { "ipadapter_file": (folder_paths.get_filename_list("ipadapter"), )}} + + RETURN_TYPES = ("IPADAPTER",) + FUNCTION = "load_ipadapter_model" + CATEGORY = "ipadapter/loaders" + + def load_ipadapter_model(self, ipadapter_file): + ipadapter_file = folder_paths.get_full_path("ipadapter", ipadapter_file) + return (ipadapter_model_loader(ipadapter_file),) + +class IPAdapterInsightFaceLoader: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "provider": (["CPU", "CUDA", "ROCM"], ), + "model_name": (['buffalo_l', 'antelopev2'], ) + }, + } + + RETURN_TYPES = ("INSIGHTFACE",) + FUNCTION = "load_insightface" + CATEGORY = "ipadapter/loaders" + + def load_insightface(self, provider, model_name): + return (insightface_loader(provider, model_name=model_name),) + +""" +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + Main Apply Nodes +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +""" +class IPAdapterSimple: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "model": ("MODEL", ), + "ipadapter": ("IPADAPTER", ), + "image": ("IMAGE",), + "weight": ("FLOAT", { "default": 1.0, "min": -1, "max": 3, "step": 0.05 }), + "start_at": ("FLOAT", { "default": 0.0, "min": 0.0, "max": 1.0, "step": 0.001 }), + "end_at": ("FLOAT", { "default": 1.0, "min": 0.0, "max": 1.0, "step": 0.001 }), + "weight_type": (['standard', 'prompt is more important', 'style transfer'], ), + }, + "optional": { + "attn_mask": ("MASK",), + } + } + + RETURN_TYPES = ("MODEL",) + FUNCTION = "apply_ipadapter" + CATEGORY = "ipadapter" + + def apply_ipadapter(self, model, ipadapter, image, weight, start_at, end_at, weight_type, attn_mask=None): + if weight_type.startswith("style"): + weight_type = "style transfer" + elif weight_type == "prompt is more important": + weight_type = "ease out" + else: + weight_type = "linear" + + ipa_args = { + "image": image, + "weight": weight, + "start_at": start_at, + "end_at": end_at, + "attn_mask": attn_mask, + "weight_type": weight_type, + "insightface": ipadapter['insightface']['model'] if 'insightface' in ipadapter else None, + } + + if 'ipadapter' not in ipadapter: + raise Exception("IPAdapter model not present in the pipeline. Please load the models with the IPAdapterUnifiedLoader node.") + if 'clipvision' not in ipadapter: + raise Exception("CLIPVision model not present in the pipeline. Please load the models with the IPAdapterUnifiedLoader node.") + + return ipadapter_execute(model.clone(), ipadapter['ipadapter']['model'], ipadapter['clipvision']['model'], **ipa_args) + +class IPAdapterAdvanced: + def __init__(self): + self.unfold_batch = False + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "model": ("MODEL", ), + "ipadapter": ("IPADAPTER", ), + "image": ("IMAGE",), + "weight": ("FLOAT", { "default": 1.0, "min": -1, "max": 5, "step": 0.05 }), + "weight_type": (WEIGHT_TYPES, ), + "combine_embeds": (["concat", "add", "subtract", "average", "norm average"],), + "start_at": ("FLOAT", { "default": 0.0, "min": 0.0, "max": 1.0, "step": 0.001 }), + "end_at": ("FLOAT", { "default": 1.0, "min": 0.0, "max": 1.0, "step": 0.001 }), + "embeds_scaling": (['V only', 'K+V', 'K+V w/ C penalty', 'K+mean(V) w/ C penalty'], ), + }, + "optional": { + "image_negative": ("IMAGE",), + "attn_mask": ("MASK",), + "clip_vision": ("CLIP_VISION",), + } + } + + RETURN_TYPES = ("MODEL",) + FUNCTION = "apply_ipadapter" + CATEGORY = "ipadapter" + + def apply_ipadapter(self, model, ipadapter, start_at=0.0, end_at=1.0, weight=1.0, weight_style=1.0, weight_composition=1.0, expand_style=False, weight_type="linear", combine_embeds="concat", weight_faceidv2=None, image=None, image_style=None, image_composition=None, image_negative=None, clip_vision=None, attn_mask=None, insightface=None, embeds_scaling='V only', layer_weights=None, ipadapter_params=None, encode_batch_size=0, style_boost=None, composition_boost=None, enhance_tiles=1, enhance_ratio=1.0, weight_kolors=1.0): + is_sdxl = isinstance(model.model, (comfy.model_base.SDXL, comfy.model_base.SDXLRefiner, comfy.model_base.SDXL_instructpix2pix)) + + if 'ipadapter' in ipadapter: + ipadapter_model = ipadapter['ipadapter']['model'] + clip_vision = clip_vision if clip_vision is not None else ipadapter['clipvision']['model'] + else: + ipadapter_model = ipadapter + + if clip_vision is None: + raise Exception("Missing CLIPVision model.") + + if image_style is not None: # we are doing style + composition transfer + if not is_sdxl: + raise Exception("Style + Composition transfer is only available for SDXL models at the moment.") # TODO: check feasibility for SD1.5 models + + image = image_style + weight = weight_style + if image_composition is None: + image_composition = image_style + + weight_type = "strong style and composition" if expand_style else "style and composition" + if ipadapter_params is not None: # we are doing batch processing + image = ipadapter_params['image'] + attn_mask = ipadapter_params['attn_mask'] + weight = ipadapter_params['weight'] + weight_type = ipadapter_params['weight_type'] + start_at = ipadapter_params['start_at'] + end_at = ipadapter_params['end_at'] + else: + # at this point weight can be a list from the batch-weight or a single float + weight = [weight] + + image = image if isinstance(image, list) else [image] + + work_model = model.clone() + + for i in range(len(image)): + if image[i] is None: + continue + + ipa_args = { + "image": image[i], + "image_composition": image_composition, + "image_negative": image_negative, + "weight": weight[i], + "weight_composition": weight_composition, + "weight_faceidv2": weight_faceidv2, + "weight_type": weight_type if not isinstance(weight_type, list) else weight_type[i], + "combine_embeds": combine_embeds, + "start_at": start_at if not isinstance(start_at, list) else start_at[i], + "end_at": end_at if not isinstance(end_at, list) else end_at[i], + "attn_mask": attn_mask if not isinstance(attn_mask, list) else attn_mask[i], + "unfold_batch": self.unfold_batch, + "embeds_scaling": embeds_scaling, + "insightface": insightface if insightface is not None else ipadapter['insightface']['model'] if 'insightface' in ipadapter else None, + "layer_weights": layer_weights, + "encode_batch_size": encode_batch_size, + "style_boost": style_boost, + "composition_boost": composition_boost, + "enhance_tiles": enhance_tiles, + "enhance_ratio": enhance_ratio, + "weight_kolors": weight_kolors, + } + + work_model, face_image = ipadapter_execute(work_model, ipadapter_model, clip_vision, **ipa_args) + + del ipadapter + return (work_model, face_image, ) + +class IPAdapterBatch(IPAdapterAdvanced): + def __init__(self): + self.unfold_batch = True + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "model": ("MODEL", ), + "ipadapter": ("IPADAPTER", ), + "image": ("IMAGE",), + "weight": ("FLOAT", { "default": 1.0, "min": -1, "max": 5, "step": 0.05 }), + "weight_type": (WEIGHT_TYPES, ), + "start_at": ("FLOAT", { "default": 0.0, "min": 0.0, "max": 1.0, "step": 0.001 }), + "end_at": ("FLOAT", { "default": 1.0, "min": 0.0, "max": 1.0, "step": 0.001 }), + "embeds_scaling": (['V only', 'K+V', 'K+V w/ C penalty', 'K+mean(V) w/ C penalty'], ), + "encode_batch_size": ("INT", { "default": 0, "min": 0, "max": 4096 }), + }, + "optional": { + "image_negative": ("IMAGE",), + "attn_mask": ("MASK",), + "clip_vision": ("CLIP_VISION",), + } + } + +class IPAdapterStyleComposition(IPAdapterAdvanced): + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "model": ("MODEL", ), + "ipadapter": ("IPADAPTER", ), + "image_style": ("IMAGE",), + "image_composition": ("IMAGE",), + "weight_style": ("FLOAT", { "default": 1.0, "min": -1, "max": 5, "step": 0.05 }), + "weight_composition": ("FLOAT", { "default": 1.0, "min": -1, "max": 5, "step": 0.05 }), + "expand_style": ("BOOLEAN", { "default": False }), + "combine_embeds": (["concat", "add", "subtract", "average", "norm average"], {"default": "average"}), + "start_at": ("FLOAT", { "default": 0.0, "min": 0.0, "max": 1.0, "step": 0.001 }), + "end_at": ("FLOAT", { "default": 1.0, "min": 0.0, "max": 1.0, "step": 0.001 }), + "embeds_scaling": (['V only', 'K+V', 'K+V w/ C penalty', 'K+mean(V) w/ C penalty'], ), + }, + "optional": { + "image_negative": ("IMAGE",), + "attn_mask": ("MASK",), + "clip_vision": ("CLIP_VISION",), + } + } + + CATEGORY = "ipadapter/style_composition" + +class IPAdapterStyleCompositionBatch(IPAdapterStyleComposition): + def __init__(self): + self.unfold_batch = True + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "model": ("MODEL", ), + "ipadapter": ("IPADAPTER", ), + "image_style": ("IMAGE",), + "image_composition": ("IMAGE",), + "weight_style": ("FLOAT", { "default": 1.0, "min": -1, "max": 5, "step": 0.05 }), + "weight_composition": ("FLOAT", { "default": 1.0, "min": -1, "max": 5, "step": 0.05 }), + "expand_style": ("BOOLEAN", { "default": False }), + "start_at": ("FLOAT", { "default": 0.0, "min": 0.0, "max": 1.0, "step": 0.001 }), + "end_at": ("FLOAT", { "default": 1.0, "min": 0.0, "max": 1.0, "step": 0.001 }), + "embeds_scaling": (['V only', 'K+V', 'K+V w/ C penalty', 'K+mean(V) w/ C penalty'], ), + }, + "optional": { + "image_negative": ("IMAGE",), + "attn_mask": ("MASK",), + "clip_vision": ("CLIP_VISION",), + } + } + +class IPAdapterFaceID(IPAdapterAdvanced): + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "model": ("MODEL", ), + "ipadapter": ("IPADAPTER", ), + "image": ("IMAGE",), + "weight": ("FLOAT", { "default": 1.0, "min": -1, "max": 3, "step": 0.05 }), + "weight_faceidv2": ("FLOAT", { "default": 1.0, "min": -1, "max": 5.0, "step": 0.05 }), + "weight_type": (WEIGHT_TYPES, ), + "combine_embeds": (["concat", "add", "subtract", "average", "norm average"],), + "start_at": ("FLOAT", { "default": 0.0, "min": 0.0, "max": 1.0, "step": 0.001 }), + "end_at": ("FLOAT", { "default": 1.0, "min": 0.0, "max": 1.0, "step": 0.001 }), + "embeds_scaling": (['V only', 'K+V', 'K+V w/ C penalty', 'K+mean(V) w/ C penalty'], ), + }, + "optional": { + "image_negative": ("IMAGE",), + "attn_mask": ("MASK",), + "clip_vision": ("CLIP_VISION",), + "insightface": ("INSIGHTFACE",), + } + } + + CATEGORY = "ipadapter/faceid" + RETURN_TYPES = ("MODEL","IMAGE",) + RETURN_NAMES = ("MODEL", "face_image", ) + +class IPAAdapterFaceIDBatch(IPAdapterFaceID): + def __init__(self): + self.unfold_batch = True + +class IPAdapterFaceIDKolors(IPAdapterAdvanced): + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "model": ("MODEL", ), + "ipadapter": ("IPADAPTER", ), + "image": ("IMAGE",), + "weight": ("FLOAT", { "default": 1.0, "min": -1, "max": 3, "step": 0.05 }), + "weight_faceidv2": ("FLOAT", { "default": 1.0, "min": -1, "max": 5.0, "step": 0.05 }), + "weight_kolors": ("FLOAT", { "default": 1.0, "min": -1, "max": 5.0, "step": 0.05 }), + "weight_type": (WEIGHT_TYPES, ), + "combine_embeds": (["concat", "add", "subtract", "average", "norm average"],), + "start_at": ("FLOAT", { "default": 0.0, "min": 0.0, "max": 1.0, "step": 0.001 }), + "end_at": ("FLOAT", { "default": 1.0, "min": 0.0, "max": 1.0, "step": 0.001 }), + "embeds_scaling": (['V only', 'K+V', 'K+V w/ C penalty', 'K+mean(V) w/ C penalty'], ), + }, + "optional": { + "image_negative": ("IMAGE",), + "attn_mask": ("MASK",), + "clip_vision": ("CLIP_VISION",), + "insightface": ("INSIGHTFACE",), + } + } + + CATEGORY = "ipadapter/faceid" + RETURN_TYPES = ("MODEL","IMAGE",) + RETURN_NAMES = ("MODEL", "face_image", ) + +class IPAdapterTiled: + def __init__(self): + self.unfold_batch = False + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "model": ("MODEL", ), + "ipadapter": ("IPADAPTER", ), + "image": ("IMAGE",), + "weight": ("FLOAT", { "default": 1.0, "min": -1, "max": 3, "step": 0.05 }), + "weight_type": (WEIGHT_TYPES, ), + "combine_embeds": (["concat", "add", "subtract", "average", "norm average"],), + "start_at": ("FLOAT", { "default": 0.0, "min": 0.0, "max": 1.0, "step": 0.001 }), + "end_at": ("FLOAT", { "default": 1.0, "min": 0.0, "max": 1.0, "step": 0.001 }), + "sharpening": ("FLOAT", { "default": 0.0, "min": 0.0, "max": 1.0, "step": 0.05 }), + "embeds_scaling": (['V only', 'K+V', 'K+V w/ C penalty', 'K+mean(V) w/ C penalty'], ), + }, + "optional": { + "image_negative": ("IMAGE",), + "attn_mask": ("MASK",), + "clip_vision": ("CLIP_VISION",), + } + } + + RETURN_TYPES = ("MODEL", "IMAGE", "MASK", ) + RETURN_NAMES = ("MODEL", "tiles", "masks", ) + FUNCTION = "apply_tiled" + CATEGORY = "ipadapter/tiled" + + def apply_tiled(self, model, ipadapter, image, weight, weight_type, start_at, end_at, sharpening, combine_embeds="concat", image_negative=None, attn_mask=None, clip_vision=None, embeds_scaling='V only', encode_batch_size=0): + # 1. Select the models + if 'ipadapter' in ipadapter: + ipadapter_model = ipadapter['ipadapter']['model'] + clip_vision = clip_vision if clip_vision is not None else ipadapter['clipvision']['model'] + else: + ipadapter_model = ipadapter + clip_vision = clip_vision + + if clip_vision is None: + raise Exception("Missing CLIPVision model.") + + del ipadapter + + # 2. Extract the tiles + tile_size = 256 # I'm using 256 instead of 224 as it is more likely divisible by the latent size, it will be downscaled to 224 by the clip vision encoder + _, oh, ow, _ = image.shape + if attn_mask is None: + attn_mask = torch.ones([1, oh, ow], dtype=image.dtype, device=image.device) + + image = image.permute([0,3,1,2]) + attn_mask = attn_mask.unsqueeze(1) + # the mask should have the same proportions as the reference image and the latent + attn_mask = T.Resize((oh, ow), interpolation=T.InterpolationMode.BICUBIC, antialias=True)(attn_mask) + + # if the image is almost a square, we crop it to a square + if oh / ow > 0.75 and oh / ow < 1.33: + # crop the image to a square + image = T.CenterCrop(min(oh, ow))(image) + resize = (tile_size*2, tile_size*2) + + attn_mask = T.CenterCrop(min(oh, ow))(attn_mask) + # otherwise resize the smallest side and the other proportionally + else: + resize = (int(tile_size * ow / oh), tile_size) if oh < ow else (tile_size, int(tile_size * oh / ow)) + + # using PIL for better results + imgs = [] + for img in image: + img = T.ToPILImage()(img) + img = img.resize(resize, resample=Image.Resampling['LANCZOS']) + imgs.append(T.ToTensor()(img)) + image = torch.stack(imgs) + del imgs, img + + # we don't need a high quality resize for the mask + attn_mask = T.Resize(resize[::-1], interpolation=T.InterpolationMode.BICUBIC, antialias=True)(attn_mask) + + # we allow a maximum of 4 tiles + if oh / ow > 4 or oh / ow < 0.25: + crop = (tile_size, tile_size*4) if oh < ow else (tile_size*4, tile_size) + image = T.CenterCrop(crop)(image) + attn_mask = T.CenterCrop(crop)(attn_mask) + + attn_mask = attn_mask.squeeze(1) + + if sharpening > 0: + image = contrast_adaptive_sharpening(image, sharpening) + + image = image.permute([0,2,3,1]) + + _, oh, ow, _ = image.shape + + # find the number of tiles for each side + tiles_x = math.ceil(ow / tile_size) + tiles_y = math.ceil(oh / tile_size) + overlap_x = max(0, (tiles_x * tile_size - ow) / (tiles_x - 1 if tiles_x > 1 else 1)) + overlap_y = max(0, (tiles_y * tile_size - oh) / (tiles_y - 1 if tiles_y > 1 else 1)) + + base_mask = torch.zeros([attn_mask.shape[0], oh, ow], dtype=image.dtype, device=image.device) + + # extract all the tiles from the image and create the masks + tiles = [] + masks = [] + for y in range(tiles_y): + for x in range(tiles_x): + start_x = int(x * (tile_size - overlap_x)) + start_y = int(y * (tile_size - overlap_y)) + tiles.append(image[:, start_y:start_y+tile_size, start_x:start_x+tile_size, :]) + mask = base_mask.clone() + mask[:, start_y:start_y+tile_size, start_x:start_x+tile_size] = attn_mask[:, start_y:start_y+tile_size, start_x:start_x+tile_size] + masks.append(mask) + del mask + + # 3. Apply the ipadapter to each group of tiles + model = model.clone() + for i in range(len(tiles)): + ipa_args = { + "image": tiles[i], + "image_negative": image_negative, + "weight": weight, + "weight_type": weight_type, + "combine_embeds": combine_embeds, + "start_at": start_at, + "end_at": end_at, + "attn_mask": masks[i], + "unfold_batch": self.unfold_batch, + "embeds_scaling": embeds_scaling, + "encode_batch_size": encode_batch_size, + } + # apply the ipadapter to the model without cloning it + model, _ = ipadapter_execute(model, ipadapter_model, clip_vision, **ipa_args) + + return (model, torch.cat(tiles), torch.cat(masks), ) + +class IPAdapterTiledBatch(IPAdapterTiled): + def __init__(self): + self.unfold_batch = True + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "model": ("MODEL", ), + "ipadapter": ("IPADAPTER", ), + "image": ("IMAGE",), + "weight": ("FLOAT", { "default": 1.0, "min": -1, "max": 3, "step": 0.05 }), + "weight_type": (WEIGHT_TYPES, ), + "start_at": ("FLOAT", { "default": 0.0, "min": 0.0, "max": 1.0, "step": 0.001 }), + "end_at": ("FLOAT", { "default": 1.0, "min": 0.0, "max": 1.0, "step": 0.001 }), + "sharpening": ("FLOAT", { "default": 0.0, "min": 0.0, "max": 1.0, "step": 0.05 }), + "embeds_scaling": (['V only', 'K+V', 'K+V w/ C penalty', 'K+mean(V) w/ C penalty'], ), + "encode_batch_size": ("INT", { "default": 0, "min": 0, "max": 4096 }), + }, + "optional": { + "image_negative": ("IMAGE",), + "attn_mask": ("MASK",), + "clip_vision": ("CLIP_VISION",), + } + } + +class IPAdapterEmbeds: + def __init__(self): + self.unfold_batch = False + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "model": ("MODEL", ), + "ipadapter": ("IPADAPTER", ), + "pos_embed": ("EMBEDS",), + "weight": ("FLOAT", { "default": 1.0, "min": -1, "max": 3, "step": 0.05 }), + "weight_type": (WEIGHT_TYPES, ), + "start_at": ("FLOAT", { "default": 0.0, "min": 0.0, "max": 1.0, "step": 0.001 }), + "end_at": ("FLOAT", { "default": 1.0, "min": 0.0, "max": 1.0, "step": 0.001 }), + "embeds_scaling": (['V only', 'K+V', 'K+V w/ C penalty', 'K+mean(V) w/ C penalty'], ), + }, + "optional": { + "neg_embed": ("EMBEDS",), + "attn_mask": ("MASK",), + "clip_vision": ("CLIP_VISION",), + } + } + + RETURN_TYPES = ("MODEL",) + FUNCTION = "apply_ipadapter" + CATEGORY = "ipadapter/embeds" + + def apply_ipadapter(self, model, ipadapter, pos_embed, weight, weight_type, start_at, end_at, neg_embed=None, attn_mask=None, clip_vision=None, embeds_scaling='V only'): + ipa_args = { + "pos_embed": pos_embed, + "neg_embed": neg_embed, + "weight": weight, + "weight_type": weight_type, + "start_at": start_at, + "end_at": end_at, + "attn_mask": attn_mask, + "embeds_scaling": embeds_scaling, + "unfold_batch": self.unfold_batch, + } + + if 'ipadapter' in ipadapter: + ipadapter_model = ipadapter['ipadapter']['model'] + clip_vision = clip_vision if clip_vision is not None else ipadapter['clipvision']['model'] + else: + ipadapter_model = ipadapter + clip_vision = clip_vision + + if clip_vision is None and neg_embed is None: + raise Exception("Missing CLIPVision model.") + + del ipadapter + + return ipadapter_execute(model.clone(), ipadapter_model, clip_vision, **ipa_args) + +class IPAdapterEmbedsBatch(IPAdapterEmbeds): + def __init__(self): + self.unfold_batch = True + +class IPAdapterMS(IPAdapterAdvanced): + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "model": ("MODEL", ), + "ipadapter": ("IPADAPTER", ), + "image": ("IMAGE",), + "weight": ("FLOAT", { "default": 1.0, "min": -1, "max": 5, "step": 0.05 }), + "weight_faceidv2": ("FLOAT", { "default": 1.0, "min": -1, "max": 5.0, "step": 0.05 }), + "weight_type": (WEIGHT_TYPES, ), + "combine_embeds": (["concat", "add", "subtract", "average", "norm average"],), + "start_at": ("FLOAT", { "default": 0.0, "min": 0.0, "max": 1.0, "step": 0.001 }), + "end_at": ("FLOAT", { "default": 1.0, "min": 0.0, "max": 1.0, "step": 0.001 }), + "embeds_scaling": (['V only', 'K+V', 'K+V w/ C penalty', 'K+mean(V) w/ C penalty'], ), + "layer_weights": ("STRING", { "default": "", "multiline": True }), + }, + "optional": { + "image_negative": ("IMAGE",), + "attn_mask": ("MASK",), + "clip_vision": ("CLIP_VISION",), + "insightface": ("INSIGHTFACE",), + } + } + + CATEGORY = "ipadapter/dev" + +class IPAdapterClipVisionEnhancer(IPAdapterAdvanced): + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "model": ("MODEL", ), + "ipadapter": ("IPADAPTER", ), + "image": ("IMAGE",), + "weight": ("FLOAT", { "default": 1.0, "min": -1, "max": 5, "step": 0.05 }), + "weight_type": (WEIGHT_TYPES, ), + "combine_embeds": (["concat", "add", "subtract", "average", "norm average"],), + "start_at": ("FLOAT", { "default": 0.0, "min": 0.0, "max": 1.0, "step": 0.001 }), + "end_at": ("FLOAT", { "default": 1.0, "min": 0.0, "max": 1.0, "step": 0.001 }), + "embeds_scaling": (['V only', 'K+V', 'K+V w/ C penalty', 'K+mean(V) w/ C penalty'], ), + "enhance_tiles": ("INT", { "default": 2, "min": 1, "max": 16 }), + "enhance_ratio": ("FLOAT", { "default": 1.0, "min": 0.0, "max": 1.0, "step": 0.05 }), + }, + "optional": { + "image_negative": ("IMAGE",), + "attn_mask": ("MASK",), + "clip_vision": ("CLIP_VISION",), + } + } + + CATEGORY = "ipadapter/dev" + +class IPAdapterClipVisionEnhancerBatch(IPAdapterClipVisionEnhancer): + def __init__(self): + self.unfold_batch = True + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "model": ("MODEL", ), + "ipadapter": ("IPADAPTER", ), + "image": ("IMAGE",), + "weight": ("FLOAT", { "default": 1.0, "min": -1, "max": 5, "step": 0.05 }), + "weight_type": (WEIGHT_TYPES, ), + "start_at": ("FLOAT", { "default": 0.0, "min": 0.0, "max": 1.0, "step": 0.001 }), + "end_at": ("FLOAT", { "default": 1.0, "min": 0.0, "max": 1.0, "step": 0.001 }), + "embeds_scaling": (['V only', 'K+V', 'K+V w/ C penalty', 'K+mean(V) w/ C penalty'], ), + "enhance_tiles": ("INT", { "default": 2, "min": 1, "max": 16 }), + "enhance_ratio": ("FLOAT", { "default": 0.5, "min": 0.0, "max": 1.0, "step": 0.05 }), + "encode_batch_size": ("INT", { "default": 0, "min": 0, "max": 4096 }), + }, + "optional": { + "image_negative": ("IMAGE",), + "attn_mask": ("MASK",), + "clip_vision": ("CLIP_VISION",), + } + } + +class IPAdapterFromParams(IPAdapterAdvanced): + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "model": ("MODEL", ), + "ipadapter": ("IPADAPTER", ), + "ipadapter_params": ("IPADAPTER_PARAMS", ), + "combine_embeds": (["concat", "add", "subtract", "average", "norm average"],), + "embeds_scaling": (['V only', 'K+V', 'K+V w/ C penalty', 'K+mean(V) w/ C penalty'], ), + }, + "optional": { + "image_negative": ("IMAGE",), + "clip_vision": ("CLIP_VISION",), + } + } + + CATEGORY = "ipadapter/params" + +class IPAdapterPreciseStyleTransfer(IPAdapterAdvanced): + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "model": ("MODEL", ), + "ipadapter": ("IPADAPTER", ), + "image": ("IMAGE",), + "weight": ("FLOAT", { "default": 1.0, "min": -1, "max": 5, "step": 0.05 }), + "style_boost": ("FLOAT", { "default": 1.0, "min": -5, "max": 5, "step": 0.05 }), + "combine_embeds": (["concat", "add", "subtract", "average", "norm average"],), + "start_at": ("FLOAT", { "default": 0.0, "min": 0.0, "max": 1.0, "step": 0.001 }), + "end_at": ("FLOAT", { "default": 1.0, "min": 0.0, "max": 1.0, "step": 0.001 }), + "embeds_scaling": (['V only', 'K+V', 'K+V w/ C penalty', 'K+mean(V) w/ C penalty'], ), + }, + "optional": { + "image_negative": ("IMAGE",), + "attn_mask": ("MASK",), + "clip_vision": ("CLIP_VISION",), + } + } + +class IPAdapterPreciseStyleTransferBatch(IPAdapterPreciseStyleTransfer): + def __init__(self): + self.unfold_batch = True + +class IPAdapterPreciseComposition(IPAdapterAdvanced): + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "model": ("MODEL", ), + "ipadapter": ("IPADAPTER", ), + "image": ("IMAGE",), + "weight": ("FLOAT", { "default": 1.0, "min": -1, "max": 5, "step": 0.05 }), + "composition_boost": ("FLOAT", { "default": 0.0, "min": -5, "max": 5, "step": 0.05 }), + "combine_embeds": (["concat", "add", "subtract", "average", "norm average"],), + "start_at": ("FLOAT", { "default": 0.0, "min": 0.0, "max": 1.0, "step": 0.001 }), + "end_at": ("FLOAT", { "default": 1.0, "min": 0.0, "max": 1.0, "step": 0.001 }), + "embeds_scaling": (['V only', 'K+V', 'K+V w/ C penalty', 'K+mean(V) w/ C penalty'], ), + }, + "optional": { + "image_negative": ("IMAGE",), + "attn_mask": ("MASK",), + "clip_vision": ("CLIP_VISION",), + } + } + +class IPAdapterPreciseCompositionBatch(IPAdapterPreciseComposition): + def __init__(self): + self.unfold_batch = True + +""" +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + Helpers +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +""" +class IPAdapterEncoder: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "ipadapter": ("IPADAPTER",), + "image": ("IMAGE",), + "weight": ("FLOAT", { "default": 1.0, "min": -1.0, "max": 3.0, "step": 0.01 }), + }, + "optional": { + "mask": ("MASK",), + "clip_vision": ("CLIP_VISION",), + } + } + + RETURN_TYPES = ("EMBEDS", "EMBEDS",) + RETURN_NAMES = ("pos_embed", "neg_embed",) + FUNCTION = "encode" + CATEGORY = "ipadapter/embeds" + + def encode(self, ipadapter, image, weight, mask=None, clip_vision=None): + if 'ipadapter' in ipadapter: + ipadapter_model = ipadapter['ipadapter']['model'] + clip_vision = clip_vision if clip_vision is not None else ipadapter['clipvision']['model'] + else: + ipadapter_model = ipadapter + clip_vision = clip_vision + + if clip_vision is None: + raise Exception("Missing CLIPVision model.") + + is_plus = "proj.3.weight" in ipadapter_model["image_proj"] or "latents" in ipadapter_model["image_proj"] or "perceiver_resampler.proj_in.weight" in ipadapter_model["image_proj"] + is_kwai_kolors = is_plus and "layers.0.0.to_out.weight" in ipadapter_model["image_proj"] and ipadapter_model["image_proj"]["layers.0.0.to_out.weight"].shape[0] == 2048 + + clipvision_size = 224 if not is_kwai_kolors else 336 + + # resize and crop the mask to 224x224 + if mask is not None and mask.shape[1:3] != torch.Size([clipvision_size, clipvision_size]): + mask = mask.unsqueeze(1) + transforms = T.Compose([ + T.CenterCrop(min(mask.shape[2], mask.shape[3])), + T.Resize((clipvision_size, clipvision_size), interpolation=T.InterpolationMode.BICUBIC, antialias=True), + ]) + mask = transforms(mask).squeeze(1) + #mask = T.Resize((image.shape[1], image.shape[2]), interpolation=T.InterpolationMode.BICUBIC, antialias=True)(mask.unsqueeze(1)).squeeze(1) + + img_cond_embeds = encode_image_masked(clip_vision, image, mask, clipvision_size=clipvision_size) + + if is_plus: + img_cond_embeds = img_cond_embeds.penultimate_hidden_states + img_uncond_embeds = encode_image_masked(clip_vision, torch.zeros([1, clipvision_size, clipvision_size, 3]), clipvision_size=clipvision_size).penultimate_hidden_states + else: + img_cond_embeds = img_cond_embeds.image_embeds + img_uncond_embeds = torch.zeros_like(img_cond_embeds) + + if weight != 1: + img_cond_embeds = img_cond_embeds * weight + + return (img_cond_embeds, img_uncond_embeds, ) + +class IPAdapterCombineEmbeds: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "embed1": ("EMBEDS",), + "method": (["concat", "add", "subtract", "average", "norm average", "max", "min"], ), + }, + "optional": { + "embed2": ("EMBEDS",), + "embed3": ("EMBEDS",), + "embed4": ("EMBEDS",), + "embed5": ("EMBEDS",), + }} + + RETURN_TYPES = ("EMBEDS",) + FUNCTION = "batch" + CATEGORY = "ipadapter/embeds" + + def batch(self, embed1, method, embed2=None, embed3=None, embed4=None, embed5=None): + if method=='concat' and embed2 is None and embed3 is None and embed4 is None and embed5 is None: + return (embed1, ) + + embeds = [embed1, embed2, embed3, embed4, embed5] + embeds = [embed for embed in embeds if embed is not None] + embeds = torch.cat(embeds, dim=0) + + if method == "add": + embeds = torch.sum(embeds, dim=0).unsqueeze(0) + elif method == "subtract": + embeds = embeds[0] - torch.mean(embeds[1:], dim=0) + embeds = embeds.unsqueeze(0) + elif method == "average": + embeds = torch.mean(embeds, dim=0).unsqueeze(0) + elif method == "norm average": + embeds = torch.mean(embeds / torch.norm(embeds, dim=0, keepdim=True), dim=0).unsqueeze(0) + elif method == "max": + embeds = torch.max(embeds, dim=0).values.unsqueeze(0) + elif method == "min": + embeds = torch.min(embeds, dim=0).values.unsqueeze(0) + + return (embeds, ) + +class IPAdapterNoise: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "type": (["fade", "dissolve", "gaussian", "shuffle"], ), + "strength": ("FLOAT", { "default": 1.0, "min": 0, "max": 1, "step": 0.05 }), + "blur": ("INT", { "default": 0, "min": 0, "max": 32, "step": 1 }), + }, + "optional": { + "image_optional": ("IMAGE",), + } + } + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "make_noise" + CATEGORY = "ipadapter/utils" + + def make_noise(self, type, strength, blur, image_optional=None): + if image_optional is None: + image = torch.zeros([1, 224, 224, 3]) + else: + transforms = T.Compose([ + T.CenterCrop(min(image_optional.shape[1], image_optional.shape[2])), + T.Resize((224, 224), interpolation=T.InterpolationMode.BICUBIC, antialias=True), + ]) + image = transforms(image_optional.permute([0,3,1,2])).permute([0,2,3,1]) + + seed = int(torch.sum(image).item()) % 1000000007 # hash the image to get a seed, grants predictability + torch.manual_seed(seed) + + if type == "fade": + noise = torch.rand_like(image) + noise = image * (1 - strength) + noise * strength + elif type == "dissolve": + mask = (torch.rand_like(image) < strength).float() + noise = torch.rand_like(image) + noise = image * (1-mask) + noise * mask + elif type == "gaussian": + noise = torch.randn_like(image) * strength + noise = image + noise + elif type == "shuffle": + transforms = T.Compose([ + T.ElasticTransform(alpha=75.0, sigma=(1-strength)*3.5), + T.RandomVerticalFlip(p=1.0), + T.RandomHorizontalFlip(p=1.0), + ]) + image = transforms(image.permute([0,3,1,2])).permute([0,2,3,1]) + noise = torch.randn_like(image) * (strength*0.75) + noise = image * (1-noise) + noise + + del image + noise = torch.clamp(noise, 0, 1) + + if blur > 0: + if blur % 2 == 0: + blur += 1 + noise = T.functional.gaussian_blur(noise.permute([0,3,1,2]), blur).permute([0,2,3,1]) + + return (noise, ) + +class PrepImageForClipVision: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "image": ("IMAGE",), + "interpolation": (["LANCZOS", "BICUBIC", "HAMMING", "BILINEAR", "BOX", "NEAREST"],), + "crop_position": (["top", "bottom", "left", "right", "center", "pad"],), + "sharpening": ("FLOAT", {"default": 0.0, "min": 0, "max": 1, "step": 0.05}), + }, + } + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "prep_image" + + CATEGORY = "ipadapter/utils" + + def prep_image(self, image, interpolation="LANCZOS", crop_position="center", sharpening=0.0): + size = (224, 224) + _, oh, ow, _ = image.shape + output = image.permute([0,3,1,2]) + + if crop_position == "pad": + if oh != ow: + if oh > ow: + pad = (oh - ow) // 2 + pad = (pad, 0, pad, 0) + elif ow > oh: + pad = (ow - oh) // 2 + pad = (0, pad, 0, pad) + output = T.functional.pad(output, pad, fill=0) + else: + crop_size = min(oh, ow) + x = (ow-crop_size) // 2 + y = (oh-crop_size) // 2 + if "top" in crop_position: + y = 0 + elif "bottom" in crop_position: + y = oh-crop_size + elif "left" in crop_position: + x = 0 + elif "right" in crop_position: + x = ow-crop_size + + x2 = x+crop_size + y2 = y+crop_size + + output = output[:, :, y:y2, x:x2] + + imgs = [] + for img in output: + img = T.ToPILImage()(img) # using PIL for better results + img = img.resize(size, resample=Image.Resampling[interpolation]) + imgs.append(T.ToTensor()(img)) + output = torch.stack(imgs, dim=0) + del imgs, img + + if sharpening > 0: + output = contrast_adaptive_sharpening(output, sharpening) + + output = output.permute([0,2,3,1]) + + return (output, ) + +class IPAdapterSaveEmbeds: + def __init__(self): + self.output_dir = folder_paths.get_output_directory() + + @classmethod + def INPUT_TYPES(s): + return {"required": { + "embeds": ("EMBEDS",), + "filename_prefix": ("STRING", {"default": "IP_embeds"}) + }, + } + + RETURN_TYPES = () + FUNCTION = "save" + OUTPUT_NODE = True + CATEGORY = "ipadapter/embeds" + + def save(self, embeds, filename_prefix): + full_output_folder, filename, counter, subfolder, filename_prefix = folder_paths.get_save_image_path(filename_prefix, self.output_dir) + file = f"{filename}_{counter:05}.ipadpt" + file = os.path.join(full_output_folder, file) + + torch.save(embeds, file) + return (None, ) + +class IPAdapterLoadEmbeds: + @classmethod + def INPUT_TYPES(s): + input_dir = folder_paths.get_input_directory() + files = [os.path.relpath(os.path.join(root, file), input_dir) for root, dirs, files in os.walk(input_dir) for file in files if file.endswith('.ipadpt')] + return {"required": {"embeds": [sorted(files), ]}, } + + RETURN_TYPES = ("EMBEDS", ) + FUNCTION = "load" + CATEGORY = "ipadapter/embeds" + + def load(self, embeds): + path = folder_paths.get_annotated_filepath(embeds) + return (torch.load(path).cpu(), ) + +class IPAdapterWeights: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "weights": ("STRING", {"default": '1.0, 0.0', "multiline": True }), + "timing": (["custom", "linear", "ease_in_out", "ease_in", "ease_out", "random"], { "default": "linear" } ), + "frames": ("INT", {"default": 0, "min": 0, "max": 9999, "step": 1 }), + "start_frame": ("INT", {"default": 0, "min": 0, "max": 9999, "step": 1 }), + "end_frame": ("INT", {"default": 9999, "min": 0, "max": 9999, "step": 1 }), + "add_starting_frames": ("INT", {"default": 0, "min": 0, "max": 9999, "step": 1 }), + "add_ending_frames": ("INT", {"default": 0, "min": 0, "max": 9999, "step": 1 }), + "method": (["full batch", "shift batches", "alternate batches"], { "default": "full batch" }), + }, "optional": { + "image": ("IMAGE",), + } + } + + RETURN_TYPES = ("FLOAT", "FLOAT", "INT", "IMAGE", "IMAGE", "WEIGHTS_STRATEGY") + RETURN_NAMES = ("weights", "weights_invert", "total_frames", "image_1", "image_2", "weights_strategy") + FUNCTION = "weights" + CATEGORY = "ipadapter/weights" + + def weights(self, weights='', timing='custom', frames=0, start_frame=0, end_frame=9999, add_starting_frames=0, add_ending_frames=0, method='full batch', weights_strategy=None, image=None): + import random + + frame_count = image.shape[0] if image is not None else 0 + if weights_strategy is not None: + weights = weights_strategy["weights"] + timing = weights_strategy["timing"] + frames = weights_strategy["frames"] + start_frame = weights_strategy["start_frame"] + end_frame = weights_strategy["end_frame"] + add_starting_frames = weights_strategy["add_starting_frames"] + add_ending_frames = weights_strategy["add_ending_frames"] + method = weights_strategy["method"] + frame_count = weights_strategy["frame_count"] + else: + weights_strategy = { + "weights": weights, + "timing": timing, + "frames": frames, + "start_frame": start_frame, + "end_frame": end_frame, + "add_starting_frames": add_starting_frames, + "add_ending_frames": add_ending_frames, + "method": method, + "frame_count": frame_count, + } + + # convert the string to a list of floats separated by commas or newlines + weights = weights.replace("\n", ",") + weights = [float(weight) for weight in weights.split(",") if weight.strip() != ""] + + if timing != "custom": + frames = max(frames, 2) + start = 0.0 + end = 1.0 + + if len(weights) > 0: + start = weights[0] + end = weights[-1] + + weights = [] + + end_frame = min(end_frame, frames) + duration = end_frame - start_frame + if start_frame > 0: + weights.extend([start] * start_frame) + + for i in range(duration): + n = duration - 1 + if timing == "linear": + weights.append(start + (end - start) * i / n) + elif timing == "ease_in_out": + weights.append(start + (end - start) * (1 - math.cos(i / n * math.pi)) / 2) + elif timing == "ease_in": + weights.append(start + (end - start) * math.sin(i / n * math.pi / 2)) + elif timing == "ease_out": + weights.append(start + (end - start) * (1 - math.cos(i / n * math.pi / 2))) + elif timing == "random": + weights.append(random.uniform(start, end)) + + weights[-1] = end if timing != "random" else weights[-1] + if end_frame < frames: + weights.extend([end] * (frames - end_frame)) + + if len(weights) == 0: + weights = [0.0] + + frames = len(weights) + + # repeat the images for cross fade + image_1 = None + image_2 = None + + # Calculate the min and max of the weights + min_weight = min(weights) + max_weight = max(weights) + + if image is not None: + + if "shift" in method: + image_1 = image[:-1] + image_2 = image[1:] + + weights = weights * image_1.shape[0] + image_1 = image_1.repeat_interleave(frames, 0) + image_2 = image_2.repeat_interleave(frames, 0) + elif "alternate" in method: + image_1 = image[::2].repeat_interleave(2, 0) + image_1 = image_1[1:] + image_2 = image[1::2].repeat_interleave(2, 0) + + # Invert the weights relative to their own range + mew_weights = weights + [max_weight - (w - min_weight) for w in weights] + + mew_weights = mew_weights * (image_1.shape[0] // 2) + if image.shape[0] % 2: + image_1 = image_1[:-1] + else: + image_2 = image_2[:-1] + mew_weights = mew_weights + weights + + weights = mew_weights + image_1 = image_1.repeat_interleave(frames, 0) + image_2 = image_2.repeat_interleave(frames, 0) + else: + weights = weights * image.shape[0] + image_1 = image.repeat_interleave(frames, 0) + + # add starting and ending frames + if add_starting_frames > 0: + weights = [weights[0]] * add_starting_frames + weights + image_1 = torch.cat([image[:1].repeat(add_starting_frames, 1, 1, 1), image_1], dim=0) + if image_2 is not None: + image_2 = torch.cat([image[:1].repeat(add_starting_frames, 1, 1, 1), image_2], dim=0) + if add_ending_frames > 0: + weights = weights + [weights[-1]] * add_ending_frames + image_1 = torch.cat([image_1, image[-1:].repeat(add_ending_frames, 1, 1, 1)], dim=0) + if image_2 is not None: + image_2 = torch.cat([image_2, image[-1:].repeat(add_ending_frames, 1, 1, 1)], dim=0) + + # reverse the weights array + weights_invert = weights[::-1] + + frame_count = len(weights) + + return (weights, weights_invert, frame_count, image_1, image_2, weights_strategy,) + +class IPAdapterWeightsFromStrategy(IPAdapterWeights): + @classmethod + def INPUT_TYPES(s): + return {"required": { + "weights_strategy": ("WEIGHTS_STRATEGY",), + }, "optional": { + "image": ("IMAGE",), + } + } + +class IPAdapterPromptScheduleFromWeightsStrategy(): + @classmethod + def INPUT_TYPES(s): + return {"required": { + "weights_strategy": ("WEIGHTS_STRATEGY",), + "prompt": ("STRING", {"default": "", "multiline": True }), + }} + + RETURN_TYPES = ("STRING",) + RETURN_NAMES = ("prompt_schedule", ) + FUNCTION = "prompt_schedule" + CATEGORY = "ipadapter/weights" + + def prompt_schedule(self, weights_strategy, prompt=""): + frames = weights_strategy["frames"] + add_starting_frames = weights_strategy["add_starting_frames"] + add_ending_frames = weights_strategy["add_ending_frames"] + frame_count = weights_strategy["frame_count"] + + out = "" + + prompt = [p for p in prompt.split("\n") if p.strip() != ""] + + if len(prompt) > 0 and frame_count > 0: + # prompt_pos must be the same size as the image batch + if len(prompt) > frame_count: + prompt = prompt[:frame_count] + elif len(prompt) < frame_count: + prompt += [prompt[-1]] * (frame_count - len(prompt)) + + if add_starting_frames > 0: + out += f"\"0\": \"{prompt[0]}\",\n" + for i in range(frame_count): + out += f"\"{i * frames + add_starting_frames}\": \"{prompt[i]}\",\n" + if add_ending_frames > 0: + out += f"\"{frame_count * frames + add_starting_frames}\": \"{prompt[-1]}\",\n" + + return (out, ) + +class IPAdapterCombineWeights: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "weights_1": ("FLOAT", { "default": 0.0, "min": 0.0, "max": 1.0, "step": 0.05 }), + "weights_2": ("FLOAT", { "default": 0.0, "min": 0.0, "max": 1.0, "step": 0.05 }), + }} + RETURN_TYPES = ("FLOAT", "INT") + RETURN_NAMES = ("weights", "count") + FUNCTION = "combine" + CATEGORY = "ipadapter/utils" + + def combine(self, weights_1, weights_2): + if not isinstance(weights_1, list): + weights_1 = [weights_1] + if not isinstance(weights_2, list): + weights_2 = [weights_2] + weights = weights_1 + weights_2 + + return (weights, len(weights), ) + +class IPAdapterRegionalConditioning: + @classmethod + def INPUT_TYPES(s): + return {"required": { + #"set_cond_area": (["default", "mask bounds"],), + "image": ("IMAGE",), + "image_weight": ("FLOAT", { "default": 1.0, "min": -1.0, "max": 3.0, "step": 0.05 }), + "prompt_weight": ("FLOAT", { "default": 1.0, "min": 0.0, "max": 10.0, "step": 0.05 }), + "weight_type": (WEIGHT_TYPES, ), + "start_at": ("FLOAT", { "default": 0.0, "min": 0.0, "max": 1.0, "step": 0.001 }), + "end_at": ("FLOAT", { "default": 1.0, "min": 0.0, "max": 1.0, "step": 0.001 }), + }, "optional": { + "mask": ("MASK",), + "positive": ("CONDITIONING",), + "negative": ("CONDITIONING",), + }} + + RETURN_TYPES = ("IPADAPTER_PARAMS", "CONDITIONING", "CONDITIONING", ) + RETURN_NAMES = ("IPADAPTER_PARAMS", "POSITIVE", "NEGATIVE") + FUNCTION = "conditioning" + + CATEGORY = "ipadapter/params" + + def conditioning(self, image, image_weight, prompt_weight, weight_type, start_at, end_at, mask=None, positive=None, negative=None): + set_area_to_bounds = False #if set_cond_area == "default" else True + + if mask is not None: + if positive is not None: + positive = conditioning_set_values(positive, {"mask": mask, "set_area_to_bounds": set_area_to_bounds, "mask_strength": prompt_weight}) + if negative is not None: + negative = conditioning_set_values(negative, {"mask": mask, "set_area_to_bounds": set_area_to_bounds, "mask_strength": prompt_weight}) + + ipadapter_params = { + "image": [image], + "attn_mask": [mask], + "weight": [image_weight], + "weight_type": [weight_type], + "start_at": [start_at], + "end_at": [end_at], + } + + return (ipadapter_params, positive, negative, ) + +class IPAdapterCombineParams: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "params_1": ("IPADAPTER_PARAMS",), + "params_2": ("IPADAPTER_PARAMS",), + }, "optional": { + "params_3": ("IPADAPTER_PARAMS",), + "params_4": ("IPADAPTER_PARAMS",), + "params_5": ("IPADAPTER_PARAMS",), + }} + + RETURN_TYPES = ("IPADAPTER_PARAMS",) + FUNCTION = "combine" + CATEGORY = "ipadapter/params" + + def combine(self, params_1, params_2, params_3=None, params_4=None, params_5=None): + ipadapter_params = { + "image": params_1["image"] + params_2["image"], + "attn_mask": params_1["attn_mask"] + params_2["attn_mask"], + "weight": params_1["weight"] + params_2["weight"], + "weight_type": params_1["weight_type"] + params_2["weight_type"], + "start_at": params_1["start_at"] + params_2["start_at"], + "end_at": params_1["end_at"] + params_2["end_at"], + } + + if params_3 is not None: + ipadapter_params["image"] += params_3["image"] + ipadapter_params["attn_mask"] += params_3["attn_mask"] + ipadapter_params["weight"] += params_3["weight"] + ipadapter_params["weight_type"] += params_3["weight_type"] + ipadapter_params["start_at"] += params_3["start_at"] + ipadapter_params["end_at"] += params_3["end_at"] + if params_4 is not None: + ipadapter_params["image"] += params_4["image"] + ipadapter_params["attn_mask"] += params_4["attn_mask"] + ipadapter_params["weight"] += params_4["weight"] + ipadapter_params["weight_type"] += params_4["weight_type"] + ipadapter_params["start_at"] += params_4["start_at"] + ipadapter_params["end_at"] += params_4["end_at"] + if params_5 is not None: + ipadapter_params["image"] += params_5["image"] + ipadapter_params["attn_mask"] += params_5["attn_mask"] + ipadapter_params["weight"] += params_5["weight"] + ipadapter_params["weight_type"] += params_5["weight_type"] + ipadapter_params["start_at"] += params_5["start_at"] + ipadapter_params["end_at"] += params_5["end_at"] + + return (ipadapter_params, ) + +""" +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + Register +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +""" +NODE_CLASS_MAPPINGS = { + # Main Apply Nodes + "IPAdapter": IPAdapterSimple, + "IPAdapterAdvanced": IPAdapterAdvanced, + "IPAdapterBatch": IPAdapterBatch, + "IPAdapterFaceID": IPAdapterFaceID, + "IPAdapterFaceIDKolors": IPAdapterFaceIDKolors, + "IPAAdapterFaceIDBatch": IPAAdapterFaceIDBatch, + "IPAdapterTiled": IPAdapterTiled, + "IPAdapterTiledBatch": IPAdapterTiledBatch, + "IPAdapterEmbeds": IPAdapterEmbeds, + "IPAdapterEmbedsBatch": IPAdapterEmbedsBatch, + "IPAdapterStyleComposition": IPAdapterStyleComposition, + "IPAdapterStyleCompositionBatch": IPAdapterStyleCompositionBatch, + "IPAdapterMS": IPAdapterMS, + "IPAdapterClipVisionEnhancer": IPAdapterClipVisionEnhancer, + "IPAdapterClipVisionEnhancerBatch": IPAdapterClipVisionEnhancerBatch, + "IPAdapterFromParams": IPAdapterFromParams, + "IPAdapterPreciseStyleTransfer": IPAdapterPreciseStyleTransfer, + "IPAdapterPreciseStyleTransferBatch": IPAdapterPreciseStyleTransferBatch, + "IPAdapterPreciseComposition": IPAdapterPreciseComposition, + "IPAdapterPreciseCompositionBatch": IPAdapterPreciseCompositionBatch, + + # Loaders + "IPAdapterUnifiedLoader": IPAdapterUnifiedLoader, + "IPAdapterUnifiedLoaderFaceID": IPAdapterUnifiedLoaderFaceID, + "IPAdapterModelLoader": IPAdapterModelLoader, + "IPAdapterInsightFaceLoader": IPAdapterInsightFaceLoader, + "IPAdapterUnifiedLoaderCommunity": IPAdapterUnifiedLoaderCommunity, + + # Helpers + "IPAdapterEncoder": IPAdapterEncoder, + "IPAdapterCombineEmbeds": IPAdapterCombineEmbeds, + "IPAdapterNoise": IPAdapterNoise, + "PrepImageForClipVision": PrepImageForClipVision, + "IPAdapterSaveEmbeds": IPAdapterSaveEmbeds, + "IPAdapterLoadEmbeds": IPAdapterLoadEmbeds, + "IPAdapterWeights": IPAdapterWeights, + "IPAdapterCombineWeights": IPAdapterCombineWeights, + "IPAdapterWeightsFromStrategy": IPAdapterWeightsFromStrategy, + "IPAdapterPromptScheduleFromWeightsStrategy": IPAdapterPromptScheduleFromWeightsStrategy, + "IPAdapterRegionalConditioning": IPAdapterRegionalConditioning, + "IPAdapterCombineParams": IPAdapterCombineParams, +} + +NODE_DISPLAY_NAME_MAPPINGS = { + # Main Apply Nodes + "IPAdapter": "IPAdapter", + "IPAdapterAdvanced": "IPAdapter Advanced", + "IPAdapterBatch": "IPAdapter Batch (Adv.)", + "IPAdapterFaceID": "IPAdapter FaceID", + "IPAdapterFaceIDKolors": "IPAdapter FaceID Kolors", + "IPAAdapterFaceIDBatch": "IPAdapter FaceID Batch", + "IPAdapterTiled": "IPAdapter Tiled", + "IPAdapterTiledBatch": "IPAdapter Tiled Batch", + "IPAdapterEmbeds": "IPAdapter Embeds", + "IPAdapterEmbedsBatch": "IPAdapter Embeds Batch", + "IPAdapterStyleComposition": "IPAdapter Style & Composition SDXL", + "IPAdapterStyleCompositionBatch": "IPAdapter Style & Composition Batch SDXL", + "IPAdapterMS": "IPAdapter Mad Scientist", + "IPAdapterClipVisionEnhancer": "IPAdapter ClipVision Enhancer", + "IPAdapterClipVisionEnhancerBatch": "IPAdapter ClipVision Enhancer Batch", + "IPAdapterFromParams": "IPAdapter from Params", + "IPAdapterPreciseStyleTransfer": "IPAdapter Precise Style Transfer", + "IPAdapterPreciseStyleTransferBatch": "IPAdapter Precise Style Transfer Batch", + "IPAdapterPreciseComposition": "IPAdapter Precise Composition", + "IPAdapterPreciseCompositionBatch": "IPAdapter Precise Composition Batch", + + # Loaders + "IPAdapterUnifiedLoader": "IPAdapter Unified Loader", + "IPAdapterUnifiedLoaderFaceID": "IPAdapter Unified Loader FaceID", + "IPAdapterModelLoader": "IPAdapter Model Loader", + "IPAdapterInsightFaceLoader": "IPAdapter InsightFace Loader", + "IPAdapterUnifiedLoaderCommunity": "IPAdapter Unified Loader Community", + + # Helpers + "IPAdapterEncoder": "IPAdapter Encoder", + "IPAdapterCombineEmbeds": "IPAdapter Combine Embeds", + "IPAdapterNoise": "IPAdapter Noise", + "PrepImageForClipVision": "Prep Image For ClipVision", + "IPAdapterSaveEmbeds": "IPAdapter Save Embeds", + "IPAdapterLoadEmbeds": "IPAdapter Load Embeds", + "IPAdapterWeights": "IPAdapter Weights", + "IPAdapterWeightsFromStrategy": "IPAdapter Weights From Strategy", + "IPAdapterPromptScheduleFromWeightsStrategy": "Prompt Schedule From Weights Strategy", + "IPAdapterCombineWeights": "IPAdapter Combine Weights", + "IPAdapterRegionalConditioning": "IPAdapter Regional Conditioning", + "IPAdapterCombineParams": "IPAdapter Combine Params", +} \ No newline at end of file diff --git a/ComfyUI_IPAdapter_plus/LICENSE b/ComfyUI_IPAdapter_plus/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..f288702d2fa16d3cdf0035b15a9fcbc552cd88e7 --- /dev/null +++ b/ComfyUI_IPAdapter_plus/LICENSE @@ -0,0 +1,674 @@ + GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU General Public License is a free, copyleft license for +software and other kinds of works. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +the GNU General Public License is intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU Affero General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If the program does terminal interaction, make it output a short +notice like this when it starts in an interactive mode: + + Copyright (C) + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, your program's commands +might be different; for a GUI interface, you would use an "about box". + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU GPL, see +. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +. diff --git a/ComfyUI_IPAdapter_plus/NODES.md b/ComfyUI_IPAdapter_plus/NODES.md new file mode 100644 index 0000000000000000000000000000000000000000..e4c33a2b4eeda824d5e4366cd042495b83ef5271 --- /dev/null +++ b/ComfyUI_IPAdapter_plus/NODES.md @@ -0,0 +1,54 @@ +# Nodes reference + +Below I'm trying to document all the nodes. It's still very incomplete, be sure to check back later. + +## Loaders + +### :knot: IPAdapter Unified Loader + +Loads the full stack of models needed for IPAdapter to function. The returned object will contain information regarding the **ipadapter** and **clip vision models**. + +Multiple unified loaders should always be daisy chained through the `ipadapter` in/out. **Failing to do so will cause all models to be loaded twice.** For **the first** unified loader the `ipadapter` input **should never be connected**. + +#### Inputs +- **model**, main ComfyUI model pipeline + +#### Optional Inputs +- **ipadapter**, it's important to note that this is optional and used exclusively to daisy chain unified loaders. **The `ipadapter` input is never connected in the first `IPAdapter Unified Loader` of the chain.** + +#### Outputs +- **model**, the model pipeline is used exclusively for configuration, the model comes out of this node untouched and it can be considered a reroute. Note that this is different from the Unified Loader FaceID that actually alters the model with a LoRA. +- **ipadapter**, connect this to any ipadater node. Each node will automatically detect if the `ipadapter` object contains the full stack of models or just one (like in the case [IPAdapter Model Loader](#ipadapter-model-loader)). + +### :knot: IPAdapter Model Loader + +Loads the IPAdapter model only. The returned object will be the IPAdapter model contrary to the [Unified loader](#ipadapter-unified-loader) that contains the full stack of models. + +#### Configuration parameters +- **ipadapter_file**, the main IPAdapter model. It must be located into `ComfyUI/models/ipadapter` or in any path specified in the `extra_model_paths.yaml` configuration file. + +#### Outputs +- **IPADAPTER**, contains the loaded model only. Note that `IPADAPTER` will have a different structure when loaded by the [Unified Loader](#ipadapter-unified-loader). + +## Main IPAdapter Apply Nodes + +### :knot: IPAdapter Advanced + +This node contains all the options to fine tune the IPAdapter models. It is a drop in replacement for the old `IPAdapter Apply` that is no longer available. If you have an old workflow, delete the existing `IPadapter Apply` node, add `IPAdapter Advanced` and connect all the pipes as before. + +#### Inputs +- **model**, main model pipeline. +- **ipadapter**, the IPAdapter model. It can be connected to the [IPAdapter Model Loader](#ipadapter-model-loader) or any of the Unified Loaders. If a Unified loader is used anywhere in the workflow and you don't need a different model, it's always adviced to reuse the previous `ipadapter` pipeline. +- **image**, the reference image used to generate the positive conditioning. It should be a square image, other aspect ratios are automatically cropped in the center. + +#### Optional inputs +- **image_negative**, image used to generate the negative conditioning. This is optional and normally handled by the code. It is possible to send noise or actually any image to instruct the model about what we don't want to see in the composition. +- **attn_mask**, a mask that will be applied during the image generation. **The mask should have the same size or at least the same aspect ratio of the latent**. The mask will define the area of influence of the IPAdapter models on the final image. Black zones won't be affected, white zones will get maximum influence. It can be a grayscale mask. +- **clip_vision**, this is optional if using any of the Unified loaders. If using the [IPAdapter Model Loader](#knot-ipadapter-model-loader) you also have to provide the clip vision model with a `Load CLIP Vision` node. + +#### Configuration parameters +- **weight**, weight of the IPAdapter model. For `linear` `weight_type` (the default), a good starting point is 0.8. If you use other weight types you can experiment with higher values. +- **weight_type**, this is how the IPAdapter is applied to the UNet block. For example `ease-in` means that the input blocks have higher weight than the output ones. `week input` means that the whole input block has lower weight. `style transfer (SDXL)` only works with SDXL and it's a very powerful tool to tranfer only the style of an image but not its content. This parameter hugely impacts how the composition reacts to the text prompting. +- **combine_embeds**, when sending more than one reference image the embeddings can be sent one after the other (`concat`) or combined in various ways. For low spec GPUs it is adviced to `average` the embeds if you send multiple images. `subtract` subtracts the embeddings of the second image to the first; in case of 3 or more images they are averaged and subtracted to the first. +- **start_at/end_at**, this is the timestepping. Defines at what percentage point of the generation to start applying the IPAdapter model. The initial steps are the most important so if you start later (eg: `start_at=0.3`) the generated image will have a very light conditioning. +- **embeds_scaling**, the way the IPAdapter models are applied to the K,V. This parameter has a small impact on how the model reacts to text prompting. `K+mean(V) w/ C penalty` grants good quality at high weights (>1.0) without burning the image. diff --git a/ComfyUI_IPAdapter_plus/README.md b/ComfyUI_IPAdapter_plus/README.md new file mode 100644 index 0000000000000000000000000000000000000000..5dc1701beb08fcfa8df483293d01f4341901b386 --- /dev/null +++ b/ComfyUI_IPAdapter_plus/README.md @@ -0,0 +1,190 @@ +# ComfyUI IPAdapter plus +[ComfyUI](https://github.com/comfyanonymous/ComfyUI) reference implementation for [IPAdapter](https://github.com/tencent-ailab/IP-Adapter/) models. + +The IPAdapter are very powerful models for image-to-image conditioning. The subject or even just the style of the reference image(s) can be easily transferred to a generation. Think of it as a 1-image lora. + +# Sponsorship + +
    + +**[:heart: Github Sponsor](https://github.com/sponsors/cubiq) | [:coin: Paypal](https://paypal.me/matt3o)** + +
    + +If you like my work and wish to see updates and new features please consider sponsoring my projects. + +- [ComfyUI IPAdapter Plus](https://github.com/cubiq/ComfyUI_IPAdapter_plus) +- [ComfyUI InstantID (Native)](https://github.com/cubiq/ComfyUI_InstantID) +- [ComfyUI Essentials](https://github.com/cubiq/ComfyUI_essentials) +- [ComfyUI FaceAnalysis](https://github.com/cubiq/ComfyUI_FaceAnalysis) +- [Comfy Dungeon](https://github.com/cubiq/Comfy_Dungeon) + +Not to mention the documentation and videos tutorials. Check my **ComfyUI Advanced Understanding** videos on YouTube for example, [part 1](https://www.youtube.com/watch?v=_C7kR2TFIX0) and [part 2](https://www.youtube.com/watch?v=ijqXnW_9gzc) + +The only way to keep the code open and free is by sponsoring its development. The more sponsorships the more time I can dedicate to my open source projects. + +Please consider a [Github Sponsorship](https://github.com/sponsors/cubiq) or [PayPal donation](https://paypal.me/matt3o) (Matteo "matt3o" Spinelli). For sponsorships of $50+, let me know if you'd like to be mentioned in this readme file, you can find me on [Discord](https://latent.vision/discord) or _matt3o :snail: gmail.com_. + +## Important updates + +**2024/08/02**: Support for Kolors FaceIDv2. Please check the [example workflow](./examples/IPAdapter_FaceIDv2_Kolors.json) for best practices. + +**2024/07/26**: Added support for image batches and animation to the ClipVision Enhancer. + +**2024/07/18**: Support for Kolors. + +**2024/07/17**: Added experimental ClipVision Enhancer node. It was somehow inspired by the [Scaling on Scales](https://arxiv.org/pdf/2403.13043) paper but the implementation is a bit different. The new IPAdapterClipVisionEnhancer tries to catch small details by tiling the embeds (instead of the image in the pixel space), the result is a slightly higher resolution visual embedding with no cost of performance. + +**2024/07/11**: Added experimental Precise composition (layout) transfer. It's not as good as style. `embeds_scaling` has a huge impact. Start with strength 0.8 and boost 0.3 in SDXL and 0.6 boost 0.35 in SD1.5. + +**2024/06/28**: Added the `IPAdapter Precise Style Transfer` node. Increase the `style_boost` option to lower the bleeding of the composition layer. **Important:** works better in SDXL, start with a style_boost of 2; for SD1.5 try to increase the weight a little over 1.0 and set the style_boost to a value between -1 and +1, starting with 0. + +**2024/06/22**: Added `style transfer precise`, offers less bleeding of the embeds between the style and composition layers. It is sometimes better than the standard style transfer especially if the reference image is very different from the generated image. Works better in SDXL than SD1.5. + +**2024/05/21**: Improved memory allocation when `encode_batch_size`. Useful mostly for very long animations. + +**2024/05/02**: Add `encode_batch_size` to the Advanced batch node. This can be useful for animations with a lot of frames to reduce the VRAM usage during the image encoding. Please note that results will be slightly different based on the batch size. + +**2024/04/27**: Refactored the IPAdapterWeights mostly useful for AnimateDiff animations. + +**2024/04/21**: Added Regional Conditioning nodes to simplify attention masking and masked text conditioning. + +**2024/04/16**: Added support for the new SDXL portrait unnorm model (link below). It's very strong and tends to ignore the text conditioning. Lower the CFG to 3-4 or use a RescaleCFG node. + +**2024/04/12**: Added scheduled weights. Useful for animations. + +*(Older updates removed for readability)* + +## Example workflows + +The [examples directory](./examples/) has many workflows that cover all IPAdapter functionalities. + +![IPAdapter Example workflow](./examples/demo_workflow.jpg) + +## Video Tutorials + + + Watch the video + + +- **:star: [New IPAdapter features](https://youtu.be/_JzDcgKgghY)** +- **:art: [IPAdapter Style and Composition](https://www.youtube.com/watch?v=czcgJnoDVd4)** + +The following videos are about the previous version of IPAdapter, but they still contain valuable information. + +:nerd_face: [Basic usage video](https://youtu.be/7m9ZZFU3HWo), :rocket: [Advanced features video](https://www.youtube.com/watch?v=mJQ62ly7jrg), :japanese_goblin: [Attention Masking video](https://www.youtube.com/watch?v=vqG1VXKteQg), :movie_camera: [Animation Features video](https://www.youtube.com/watch?v=ddYbhv3WgWw) + +## Installation + +Download or git clone this repository inside `ComfyUI/custom_nodes/` directory or use the Manager. IPAdapter always requires the latest version of ComfyUI. If something doesn't work be sure to upgrade. Beware that the automatic update of the manager sometimes doesn't work and you may need to upgrade manually. + +There's now a *Unified Model Loader*, for it to work you need to name the files exactly as described below. The legacy loaders work with any file name but you have to select them manually. The models can be placed into sub-directories. + +Remember you can also use any custom location setting an `ipadapter` entry in the `extra_model_paths.yaml` file. + +- `/ComfyUI/models/clip_vision` + - [CLIP-ViT-H-14-laion2B-s32B-b79K.safetensors](https://huggingface.co/h94/IP-Adapter/resolve/main/models/image_encoder/model.safetensors), download and rename + - [CLIP-ViT-bigG-14-laion2B-39B-b160k.safetensors](https://huggingface.co/h94/IP-Adapter/resolve/main/sdxl_models/image_encoder/model.safetensors), download and rename + - [clip-vit-large-patch14-336.bin](https://huggingface.co/Kwai-Kolors/Kolors-IP-Adapter-Plus/resolve/main/image_encoder/pytorch_model.bin), download and rename only for Kolors models +- `/ComfyUI/models/ipadapter`, create it if not present + - [ip-adapter_sd15.safetensors](https://huggingface.co/h94/IP-Adapter/resolve/main/models/ip-adapter_sd15.safetensors), Basic model, average strength + - [ip-adapter_sd15_light_v11.bin](https://huggingface.co/h94/IP-Adapter/resolve/main/models/ip-adapter_sd15_light_v11.bin), Light impact model + - [ip-adapter-plus_sd15.safetensors](https://huggingface.co/h94/IP-Adapter/resolve/main/models/ip-adapter-plus_sd15.safetensors), Plus model, very strong + - [ip-adapter-plus-face_sd15.safetensors](https://huggingface.co/h94/IP-Adapter/resolve/main/models/ip-adapter-plus-face_sd15.safetensors), Face model, portraits + - [ip-adapter-full-face_sd15.safetensors](https://huggingface.co/h94/IP-Adapter/resolve/main/models/ip-adapter-full-face_sd15.safetensors), Stronger face model, not necessarily better + - [ip-adapter_sd15_vit-G.safetensors](https://huggingface.co/h94/IP-Adapter/resolve/main/models/ip-adapter_sd15_vit-G.safetensors), Base model, **requires bigG clip vision encoder** + - [ip-adapter_sdxl_vit-h.safetensors](https://huggingface.co/h94/IP-Adapter/resolve/main/sdxl_models/ip-adapter_sdxl_vit-h.safetensors), SDXL model + - [ip-adapter-plus_sdxl_vit-h.safetensors](https://huggingface.co/h94/IP-Adapter/resolve/main/sdxl_models/ip-adapter-plus_sdxl_vit-h.safetensors), SDXL plus model + - [ip-adapter-plus-face_sdxl_vit-h.safetensors](https://huggingface.co/h94/IP-Adapter/resolve/main/sdxl_models/ip-adapter-plus-face_sdxl_vit-h.safetensors), SDXL face model + - [ip-adapter_sdxl.safetensors](https://huggingface.co/h94/IP-Adapter/resolve/main/sdxl_models/ip-adapter_sdxl.safetensors), vit-G SDXL model, **requires bigG clip vision encoder** + - **Deprecated** [ip-adapter_sd15_light.safetensors](https://huggingface.co/h94/IP-Adapter/resolve/main/models/ip-adapter_sd15_light.safetensors), v1.0 Light impact model + +**FaceID** models require `insightface`, you need to install it in your ComfyUI environment. Check [this issue](https://github.com/cubiq/ComfyUI_IPAdapter_plus/issues/162) for help. Remember that most FaceID models also need a LoRA. + +For the Unified Loader to work the files need to be named exactly as shown in the list below. + +- `/ComfyUI/models/ipadapter` + - [ip-adapter-faceid_sd15.bin](https://huggingface.co/h94/IP-Adapter-FaceID/resolve/main/ip-adapter-faceid_sd15.bin), base FaceID model + - [ip-adapter-faceid-plusv2_sd15.bin](https://huggingface.co/h94/IP-Adapter-FaceID/resolve/main/ip-adapter-faceid-plusv2_sd15.bin), FaceID plus v2 + - [ip-adapter-faceid-portrait-v11_sd15.bin](https://huggingface.co/h94/IP-Adapter-FaceID/resolve/main/ip-adapter-faceid-portrait-v11_sd15.bin), text prompt style transfer for portraits + - [ip-adapter-faceid_sdxl.bin](https://huggingface.co/h94/IP-Adapter-FaceID/resolve/main/ip-adapter-faceid_sdxl.bin), SDXL base FaceID + - [ip-adapter-faceid-plusv2_sdxl.bin](https://huggingface.co/h94/IP-Adapter-FaceID/resolve/main/ip-adapter-faceid-plusv2_sdxl.bin), SDXL plus v2 + - [ip-adapter-faceid-portrait_sdxl.bin](https://huggingface.co/h94/IP-Adapter-FaceID/resolve/main/ip-adapter-faceid-portrait_sdxl.bin), SDXL text prompt style transfer + - [ip-adapter-faceid-portrait_sdxl_unnorm.bin](https://huggingface.co/h94/IP-Adapter-FaceID/resolve/main/ip-adapter-faceid-portrait_sdxl_unnorm.bin), very strong style transfer SDXL only + - **Deprecated** [ip-adapter-faceid-plus_sd15.bin](https://huggingface.co/h94/IP-Adapter-FaceID/resolve/main/ip-adapter-faceid-plus_sd15.bin), FaceID plus v1 + - **Deprecated** [ip-adapter-faceid-portrait_sd15.bin](https://huggingface.co/h94/IP-Adapter-FaceID/resolve/main/ip-adapter-faceid-portrait_sd15.bin), v1 of the portrait model + +Most FaceID models require a LoRA. If you use the `IPAdapter Unified Loader FaceID` it will be loaded automatically if you follow the naming convention. Otherwise you have to load them manually, be careful each FaceID model has to be paired with its own specific LoRA. + +- `/ComfyUI/models/loras` + - [ip-adapter-faceid_sd15_lora.safetensors](https://huggingface.co/h94/IP-Adapter-FaceID/resolve/main/ip-adapter-faceid_sd15_lora.safetensors) + - [ip-adapter-faceid-plusv2_sd15_lora.safetensors](https://huggingface.co/h94/IP-Adapter-FaceID/resolve/main/ip-adapter-faceid-plusv2_sd15_lora.safetensors) + - [ip-adapter-faceid_sdxl_lora.safetensors](https://huggingface.co/h94/IP-Adapter-FaceID/resolve/main/ip-adapter-faceid_sdxl_lora.safetensors), SDXL FaceID LoRA + - [ip-adapter-faceid-plusv2_sdxl_lora.safetensors](https://huggingface.co/h94/IP-Adapter-FaceID/resolve/main/ip-adapter-faceid-plusv2_sdxl_lora.safetensors), SDXL plus v2 LoRA + - **Deprecated** [ip-adapter-faceid-plus_sd15_lora.safetensors](https://huggingface.co/h94/IP-Adapter-FaceID/resolve/main/ip-adapter-faceid-plus_sd15_lora.safetensors), LoRA for the deprecated FaceID plus v1 model + +All models can be found on [huggingface](https://huggingface.co/h94). + +### Community's models + +The community has baked some interesting IPAdapter models. + +- `/ComfyUI/models/ipadapter` + - [ip_plus_composition_sd15.safetensors](https://huggingface.co/ostris/ip-composition-adapter/resolve/main/ip_plus_composition_sd15.safetensors), general composition ignoring style and content, more about it [here](https://huggingface.co/ostris/ip-composition-adapter) + - [ip_plus_composition_sdxl.safetensors](https://huggingface.co/ostris/ip-composition-adapter/resolve/main/ip_plus_composition_sdxl.safetensors), SDXL version + - [Kolors-IP-Adapter-Plus.bin](https://huggingface.co/Kwai-Kolors/Kolors-IP-Adapter-Plus/resolve/main/ip_adapter_plus_general.bin?download=true), IPAdapter Plus for Kolors model + - [Kolors-IP-Adapter-FaceID-Plus.bin](https://huggingface.co/Kwai-Kolors/Kolors-IP-Adapter-FaceID-Plus/resolve/main/ipa-faceid-plus.bin?download=true), IPAdapter FaceIDv2 for Kolors model. **Note:** Kolors is trained on InsightFace **antelopev2** model, you need to [manually download it](https://huggingface.co/MonsterMMORPG/tools/tree/main) and place it inside the `models/inisghtface` directory. + +if you know of other models please let me know and I will add them to the unified loader. + +## Generic suggestions + +There are many workflows included in the [examples](./examples/) directory. Please check them before asking for support. + +Usually it's a good idea to lower the `weight` to at least `0.8` and increase the number steps. To increase adherece to the prompt you may try to change the **weight type** in the `IPAdapter Advanced` node. + +## Nodes reference + +I'm (slowly) documenting all nodes. Please check the [Nodes reference](./NODES.md). + +## Troubleshooting + +Please check the [troubleshooting](https://github.com/cubiq/ComfyUI_IPAdapter_plus/issues/108) before posting a new issue. Also remember to check the previous closed issues. + +## Current sponsors + +It's only thanks to generous sponsors that **the whole community** can enjoy open and free software. Please join me in thanking the following companies and individuals! + +### :trophy: Gold sponsors + +[![Kaiber.ai](https://f.latent.vision/imgs/kaiber.png)](https://kaiber.ai/)   [![Kaiber.ai](https://f.latent.vision/imgs/replicate.png)](https://replicate.com/) + +### :tada: Silver sponsors + +[![OperArt.ai](https://f.latent.vision/imgs/openart.png?r=1)](https://openart.ai/workflows)   [![OperArt.ai](https://f.latent.vision/imgs/finetuners.png)](https://www.finetuners.ai/)   [![Comfy.ICU](https://f.latent.vision/imgs/comfyicu.png?r=1)](https://comfy.icu/) + +### Companies supporting my projects + +- [RunComfy](https://www.runcomfy.com/) (ComfyUI Cloud) + +### Esteemed individuals + +- [Jack Gane](https://github.com/ganeJackS) +- [Nathan Shipley](https://www.nathanshipley.com/) +- [Dkdnzia](https://github.com/Dkdnzia) + +### One-time Extraordinaires + +- [Eric Rollei](https://github.com/EricRollei) +- [francaleu](https://github.com/francaleu) +- [Neta.art](https://github.com/talesofai) +- [Samwise Wang](https://github.com/tzwm) +- _And all private sponsors, you know who you are!_ + +## Credits + +- [IPAdapter](https://github.com/tencent-ailab/IP-Adapter/) +- [InstantStyle](https://github.com/InstantStyle/InstantStyle) +- [B-Lora](https://github.com/yardenfren1996/B-LoRA/) +- [ComfyUI](https://github.com/comfyanonymous/ComfyUI) +- [laksjdjf](https://github.com/laksjdjf/) diff --git a/ComfyUI_IPAdapter_plus/__init__.py b/ComfyUI_IPAdapter_plus/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..1580e99ad7350fbc5287fea2553d2d5763899c7c --- /dev/null +++ b/ComfyUI_IPAdapter_plus/__init__.py @@ -0,0 +1,19 @@ +""" + ██▓ ██▓███ ▄▄▄ ▓█████▄ ▄▄▄ ██▓███ ▄▄▄█████▓▓█████ ██▀███ +▓██▒▓██░ ██▒▒████▄ ▒██▀ ██▌▒████▄ ▓██░ ██▒▓ ██▒ ▓▒▓█ ▀ ▓██ ▒ ██▒ +▒██▒▓██░ ██▓▒▒██ ▀█▄ ░██ █▌▒██ ▀█▄ ▓██░ ██▓▒▒ ▓██░ ▒░▒███ ▓██ ░▄█ ▒ +░██░▒██▄█▓▒ ▒░██▄▄▄▄██ ░▓█▄ ▌░██▄▄▄▄██ ▒██▄█▓▒ ▒░ ▓██▓ ░ ▒▓█ ▄ ▒██▀▀█▄ +░██░▒██▒ ░ ░ ▓█ ▓██▒░▒████▓ ▓█ ▓██▒▒██▒ ░ ░ ▒██▒ ░ ░▒████▒░██▓ ▒██▒ +░▓ ▒▓▒░ ░ ░ ▒▒ ▓▒█░ ▒▒▓ ▒ ▒▒ ▓▒█░▒▓▒░ ░ ░ ▒ ░░ ░░ ▒░ ░░ ▒▓ ░▒▓░ + ▒ ░░▒ ░ ▒ ▒▒ ░ ░ ▒ ▒ ▒ ▒▒ ░░▒ ░ ░ ░ ░ ░ ░▒ ░ ▒░ + ▒ ░░░ ░ ▒ ░ ░ ░ ░ ▒ ░░ ░ ░ ░░ ░ + ░ ░ ░ ░ ░ ░ ░ ░ ░ + ░ + · -—+ IPAdapter Plus Extension for ComfyUI +—- · + Brought to you by Matteo "Matt3o/Cubiq" Spinelli + https://github.com/cubiq/ComfyUI_IPAdapter_plus/ +""" + +from .IPAdapterPlus import NODE_CLASS_MAPPINGS, NODE_DISPLAY_NAME_MAPPINGS + +__all__ = ['NODE_CLASS_MAPPINGS', 'NODE_DISPLAY_NAME_MAPPINGS'] diff --git a/ComfyUI_IPAdapter_plus/image_proj_models.py b/ComfyUI_IPAdapter_plus/image_proj_models.py new file mode 100644 index 0000000000000000000000000000000000000000..da61b53bb20e94cdc203c121a66129f745e3f4f0 --- /dev/null +++ b/ComfyUI_IPAdapter_plus/image_proj_models.py @@ -0,0 +1,275 @@ +import math +import torch +import torch.nn as nn +from einops import rearrange +from einops.layers.torch import Rearrange + + +# FFN +def FeedForward(dim, mult=4): + inner_dim = int(dim * mult) + return nn.Sequential( + nn.LayerNorm(dim), + nn.Linear(dim, inner_dim, bias=False), + nn.GELU(), + nn.Linear(inner_dim, dim, bias=False), + ) + + +def reshape_tensor(x, heads): + bs, length, width = x.shape + # (bs, length, width) --> (bs, length, n_heads, dim_per_head) + x = x.view(bs, length, heads, -1) + # (bs, length, n_heads, dim_per_head) --> (bs, n_heads, length, dim_per_head) + x = x.transpose(1, 2) + # (bs, n_heads, length, dim_per_head) --> (bs*n_heads, length, dim_per_head) + x = x.reshape(bs, heads, length, -1) + return x + + +class PerceiverAttention(nn.Module): + def __init__(self, *, dim, dim_head=64, heads=8): + super().__init__() + self.scale = dim_head**-0.5 + self.dim_head = dim_head + self.heads = heads + inner_dim = dim_head * heads + + self.norm1 = nn.LayerNorm(dim) + self.norm2 = nn.LayerNorm(dim) + + self.to_q = nn.Linear(dim, inner_dim, bias=False) + self.to_kv = nn.Linear(dim, inner_dim * 2, bias=False) + self.to_out = nn.Linear(inner_dim, dim, bias=False) + + def forward(self, x, latents): + """ + Args: + x (torch.Tensor): image features + shape (b, n1, D) + latent (torch.Tensor): latent features + shape (b, n2, D) + """ + x = self.norm1(x) + latents = self.norm2(latents) + + b, l, _ = latents.shape + + q = self.to_q(latents) + kv_input = torch.cat((x, latents), dim=-2) + k, v = self.to_kv(kv_input).chunk(2, dim=-1) + + q = reshape_tensor(q, self.heads) + k = reshape_tensor(k, self.heads) + v = reshape_tensor(v, self.heads) + + # attention + scale = 1 / math.sqrt(math.sqrt(self.dim_head)) + weight = (q * scale) @ (k * scale).transpose(-2, -1) # More stable with f16 than dividing afterwards + weight = torch.softmax(weight.float(), dim=-1).type(weight.dtype) + out = weight @ v + + out = out.permute(0, 2, 1, 3).reshape(b, l, -1) + + return self.to_out(out) + + +class Resampler(nn.Module): + def __init__( + self, + dim=1024, + depth=8, + dim_head=64, + heads=16, + num_queries=8, + embedding_dim=768, + output_dim=1024, + ff_mult=4, + max_seq_len: int = 257, # CLIP tokens + CLS token + apply_pos_emb: bool = False, + num_latents_mean_pooled: int = 0, # number of latents derived from mean pooled representation of the sequence + ): + super().__init__() + self.pos_emb = nn.Embedding(max_seq_len, embedding_dim) if apply_pos_emb else None + + self.latents = nn.Parameter(torch.randn(1, num_queries, dim) / dim**0.5) + + self.proj_in = nn.Linear(embedding_dim, dim) + + self.proj_out = nn.Linear(dim, output_dim) + self.norm_out = nn.LayerNorm(output_dim) + + self.to_latents_from_mean_pooled_seq = ( + nn.Sequential( + nn.LayerNorm(dim), + nn.Linear(dim, dim * num_latents_mean_pooled), + Rearrange("b (n d) -> b n d", n=num_latents_mean_pooled), + ) + if num_latents_mean_pooled > 0 + else None + ) + + self.layers = nn.ModuleList([]) + for _ in range(depth): + self.layers.append( + nn.ModuleList( + [ + PerceiverAttention(dim=dim, dim_head=dim_head, heads=heads), + FeedForward(dim=dim, mult=ff_mult), + ] + ) + ) + + def forward(self, x): + if self.pos_emb is not None: + n, device = x.shape[1], x.device + pos_emb = self.pos_emb(torch.arange(n, device=device)) + x = x + pos_emb + + latents = self.latents.repeat(x.size(0), 1, 1) + + x = self.proj_in(x) + + if self.to_latents_from_mean_pooled_seq: + meanpooled_seq = masked_mean(x, dim=1, mask=torch.ones(x.shape[:2], device=x.device, dtype=torch.bool)) + meanpooled_latents = self.to_latents_from_mean_pooled_seq(meanpooled_seq) + latents = torch.cat((meanpooled_latents, latents), dim=-2) + + for attn, ff in self.layers: + latents = attn(x, latents) + latents + latents = ff(latents) + latents + + latents = self.proj_out(latents) + return self.norm_out(latents) + + +def masked_mean(t, *, dim, mask=None): + if mask is None: + return t.mean(dim=dim) + + denom = mask.sum(dim=dim, keepdim=True) + mask = rearrange(mask, "b n -> b n 1") + masked_t = t.masked_fill(~mask, 0.0) + + return masked_t.sum(dim=dim) / denom.clamp(min=1e-5) + + +class FacePerceiverResampler(nn.Module): + def __init__( + self, + *, + dim=768, + depth=4, + dim_head=64, + heads=16, + embedding_dim=1280, + output_dim=768, + ff_mult=4, + ): + super().__init__() + + self.proj_in = nn.Linear(embedding_dim, dim) + self.proj_out = nn.Linear(dim, output_dim) + self.norm_out = nn.LayerNorm(output_dim) + self.layers = nn.ModuleList([]) + for _ in range(depth): + self.layers.append( + nn.ModuleList( + [ + PerceiverAttention(dim=dim, dim_head=dim_head, heads=heads), + FeedForward(dim=dim, mult=ff_mult), + ] + ) + ) + + def forward(self, latents, x): + x = self.proj_in(x) + for attn, ff in self.layers: + latents = attn(x, latents) + latents + latents = ff(latents) + latents + latents = self.proj_out(latents) + return self.norm_out(latents) + + +class MLPProjModel(nn.Module): + def __init__(self, cross_attention_dim=1024, clip_embeddings_dim=1024): + super().__init__() + + self.proj = nn.Sequential( + nn.Linear(clip_embeddings_dim, clip_embeddings_dim), + nn.GELU(), + nn.Linear(clip_embeddings_dim, cross_attention_dim), + nn.LayerNorm(cross_attention_dim) + ) + + def forward(self, image_embeds): + clip_extra_context_tokens = self.proj(image_embeds) + return clip_extra_context_tokens + +class MLPProjModelFaceId(nn.Module): + def __init__(self, cross_attention_dim=768, id_embeddings_dim=512, num_tokens=4): + super().__init__() + + self.cross_attention_dim = cross_attention_dim + self.num_tokens = num_tokens + + self.proj = nn.Sequential( + nn.Linear(id_embeddings_dim, id_embeddings_dim*2), + nn.GELU(), + nn.Linear(id_embeddings_dim*2, cross_attention_dim*num_tokens), + ) + self.norm = nn.LayerNorm(cross_attention_dim) + + def forward(self, id_embeds): + x = self.proj(id_embeds) + x = x.reshape(-1, self.num_tokens, self.cross_attention_dim) + x = self.norm(x) + return x + +class ProjModelFaceIdPlus(nn.Module): + def __init__(self, cross_attention_dim=768, id_embeddings_dim=512, clip_embeddings_dim=1280, num_tokens=4): + super().__init__() + + self.cross_attention_dim = cross_attention_dim + self.num_tokens = num_tokens + + self.proj = nn.Sequential( + nn.Linear(id_embeddings_dim, id_embeddings_dim*2), + nn.GELU(), + nn.Linear(id_embeddings_dim*2, cross_attention_dim*num_tokens), + ) + self.norm = nn.LayerNorm(cross_attention_dim) + + self.perceiver_resampler = FacePerceiverResampler( + dim=cross_attention_dim, + depth=4, + dim_head=64, + heads=cross_attention_dim // 64, + embedding_dim=clip_embeddings_dim, + output_dim=cross_attention_dim, + ff_mult=4, + ) + + def forward(self, id_embeds, clip_embeds, scale=1.0, shortcut=False): + x = self.proj(id_embeds) + x = x.reshape(-1, self.num_tokens, self.cross_attention_dim) + x = self.norm(x) + out = self.perceiver_resampler(x, clip_embeds) + if shortcut: + out = x + scale * out + return out + +class ImageProjModel(nn.Module): + def __init__(self, cross_attention_dim=1024, clip_embeddings_dim=1024, clip_extra_context_tokens=4): + super().__init__() + + self.cross_attention_dim = cross_attention_dim + self.clip_extra_context_tokens = clip_extra_context_tokens + self.proj = nn.Linear(clip_embeddings_dim, self.clip_extra_context_tokens * cross_attention_dim) + self.norm = nn.LayerNorm(cross_attention_dim) + + def forward(self, image_embeds): + embeds = image_embeds + x = self.proj(embeds).reshape(-1, self.clip_extra_context_tokens, self.cross_attention_dim) + x = self.norm(x) + return x diff --git a/ComfyUI_IPAdapter_plus/models/legacy_directory_do_not_use.txt b/ComfyUI_IPAdapter_plus/models/legacy_directory_do_not_use.txt new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/ComfyUI_IPAdapter_plus/pyproject.toml b/ComfyUI_IPAdapter_plus/pyproject.toml new file mode 100644 index 0000000000000000000000000000000000000000..bf900be48db46329989e6d3837814ab8aa59c2f9 --- /dev/null +++ b/ComfyUI_IPAdapter_plus/pyproject.toml @@ -0,0 +1,14 @@ +[project] +name = "comfyui_ipadapter_plus" +description = "ComfyUI reference implementation for the IPAdapter models. The IPAdapter are very powerful models for image conditioning. The style and composition of a reference can be easily transferred to the generation. Think of it as a 1-image lora." +version = "2.0.0" +license = "GPL-3.0 license" + +[project.urls] +Repository = "https://github.com/cubiq/ComfyUI_IPAdapter_plus" +# Used by Comfy Registry https://comfyregistry.org + +[tool.comfy] +PublisherId = "matteo" +DisplayName = "ComfyUI_IPAdapter_plus" +Icon = "" diff --git a/ComfyUI_IPAdapter_plus/utils.py b/ComfyUI_IPAdapter_plus/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..cbd32c52ae2fc7cef74cf7200418dad0cc6f4259 --- /dev/null +++ b/ComfyUI_IPAdapter_plus/utils.py @@ -0,0 +1,388 @@ +import re +import torch +import os +import folder_paths +from comfy.clip_vision import clip_preprocess, Output +import comfy.utils +import comfy.model_management as model_management +try: + import torchvision.transforms.v2 as T +except ImportError: + import torchvision.transforms as T + +def get_clipvision_file(preset): + preset = preset.lower() + clipvision_list = folder_paths.get_filename_list("clip_vision") + + if preset.startswith("vit-g"): + pattern = r'(ViT.bigG.14.*39B.b160k|ipadapter.*sdxl|sdxl.*model\.(bin|safetensors))' + elif preset.startswith("kolors"): + pattern = r'(clip.vit.large.patch14.336\.(bin|safetensors))' + else: + pattern = r'(ViT.H.14.*s32B.b79K|ipadapter.*sd15|sd1.?5.*model\.(bin|safetensors))' + clipvision_file = [e for e in clipvision_list if re.search(pattern, e, re.IGNORECASE)] + + clipvision_file = folder_paths.get_full_path("clip_vision", clipvision_file[0]) if clipvision_file else None + + return clipvision_file + +def get_ipadapter_file(preset, is_sdxl): + preset = preset.lower() + ipadapter_list = folder_paths.get_filename_list("ipadapter") + is_insightface = False + lora_pattern = None + + if preset.startswith("light"): + if is_sdxl: + raise Exception("light model is not supported for SDXL") + pattern = r'sd15.light.v11\.(safetensors|bin)$' + # if v11 is not found, try with the old version + if not [e for e in ipadapter_list if re.search(pattern, e, re.IGNORECASE)]: + pattern = r'sd15.light\.(safetensors|bin)$' + elif preset.startswith("standard"): + if is_sdxl: + pattern = r'ip.adapter.sdxl.vit.h\.(safetensors|bin)$' + else: + pattern = r'ip.adapter.sd15\.(safetensors|bin)$' + elif preset.startswith("vit-g"): + if is_sdxl: + pattern = r'ip.adapter.sdxl\.(safetensors|bin)$' + else: + pattern = r'sd15.vit.g\.(safetensors|bin)$' + elif preset.startswith("plus ("): + if is_sdxl: + pattern = r'plus.sdxl.vit.h\.(safetensors|bin)$' + else: + pattern = r'ip.adapter.plus.sd15\.(safetensors|bin)$' + elif preset.startswith("plus face"): + if is_sdxl: + pattern = r'plus.face.sdxl.vit.h\.(safetensors|bin)$' + else: + pattern = r'plus.face.sd15\.(safetensors|bin)$' + elif preset.startswith("full"): + if is_sdxl: + raise Exception("full face model is not supported for SDXL") + pattern = r'full.face.sd15\.(safetensors|bin)$' + elif preset.startswith("faceid portrait ("): + if is_sdxl: + pattern = r'portrait.sdxl\.(safetensors|bin)$' + else: + pattern = r'portrait.v11.sd15\.(safetensors|bin)$' + # if v11 is not found, try with the old version + if not [e for e in ipadapter_list if re.search(pattern, e, re.IGNORECASE)]: + pattern = r'portrait.sd15\.(safetensors|bin)$' + is_insightface = True + elif preset.startswith("faceid portrait unnorm"): + if is_sdxl: + pattern = r'portrait.sdxl.unnorm\.(safetensors|bin)$' + else: + raise Exception("portrait unnorm model is not supported for SD1.5") + is_insightface = True + elif preset == "faceid": + if is_sdxl: + pattern = r'faceid.sdxl\.(safetensors|bin)$' + lora_pattern = r'faceid.sdxl.lora\.safetensors$' + else: + pattern = r'faceid.sd15\.(safetensors|bin)$' + lora_pattern = r'faceid.sd15.lora\.safetensors$' + is_insightface = True + elif preset.startswith("faceid plus -"): + if is_sdxl: + raise Exception("faceid plus model is not supported for SDXL") + pattern = r'faceid.plus.sd15\.(safetensors|bin)$' + lora_pattern = r'faceid.plus.sd15.lora\.safetensors$' + is_insightface = True + elif preset.startswith("faceid plus v2"): + if is_sdxl: + pattern = r'faceid.plusv2.sdxl\.(safetensors|bin)$' + lora_pattern = r'faceid.plusv2.sdxl.lora\.safetensors$' + else: + pattern = r'faceid.plusv2.sd15\.(safetensors|bin)$' + lora_pattern = r'faceid.plusv2.sd15.lora\.safetensors$' + is_insightface = True + # Community's models + elif preset.startswith("composition"): + if is_sdxl: + pattern = r'plus.composition.sdxl\.safetensors$' + else: + pattern = r'plus.composition.sd15\.safetensors$' + elif preset.startswith("kolors"): + if is_sdxl: + pattern = r'(ip_adapter_plus_general|kolors.ip.adapter.plus)\.(safetensors|bin)$' + else: + raise Exception("Only supported for Kolors model") + else: + raise Exception(f"invalid type '{preset}'") + + ipadapter_file = [e for e in ipadapter_list if re.search(pattern, e, re.IGNORECASE)] + ipadapter_file = folder_paths.get_full_path("ipadapter", ipadapter_file[0]) if ipadapter_file else None + + return ipadapter_file, is_insightface, lora_pattern + +def get_lora_file(pattern): + lora_list = folder_paths.get_filename_list("loras") + lora_file = [e for e in lora_list if re.search(pattern, e, re.IGNORECASE)] + lora_file = folder_paths.get_full_path("loras", lora_file[0]) if lora_file else None + + return lora_file + +def ipadapter_model_loader(file): + model = comfy.utils.load_torch_file(file, safe_load=True) + + if file.lower().endswith(".safetensors"): + st_model = {"image_proj": {}, "ip_adapter": {}} + for key in model.keys(): + if key.startswith("image_proj."): + st_model["image_proj"][key.replace("image_proj.", "")] = model[key] + elif key.startswith("ip_adapter."): + st_model["ip_adapter"][key.replace("ip_adapter.", "")] = model[key] + elif key.startswith("adapter_modules."): + st_model["ip_adapter"][key.replace("adapter_modules.", "")] = model[key] + model = st_model + del st_model + elif "adapter_modules" in model.keys(): + model["ip_adapter"] = model.pop("adapter_modules") + + if not "ip_adapter" in model.keys() or not model["ip_adapter"]: + raise Exception("invalid IPAdapter model {}".format(file)) + + if 'plusv2' in file.lower(): + model["faceidplusv2"] = True + + if 'unnorm' in file.lower(): + model["portraitunnorm"] = True + + return model + +def insightface_loader(provider, model_name='buffalo_l'): + try: + from insightface.app import FaceAnalysis + except ImportError as e: + raise Exception(e) + + path = os.path.join(folder_paths.models_dir, "insightface") + model = FaceAnalysis(name=model_name, root=path, providers=[provider + 'ExecutionProvider',]) + model.prepare(ctx_id=0, det_size=(640, 640)) + return model + +def split_tiles(embeds, num_split): + _, H, W, _ = embeds.shape + out = [] + for x in embeds: + x = x.unsqueeze(0) + h, w = H // num_split, W // num_split + x_split = torch.cat([x[:, i*h:(i+1)*h, j*w:(j+1)*w, :] for i in range(num_split) for j in range(num_split)], dim=0) + out.append(x_split) + + x_split = torch.stack(out, dim=0) + + return x_split + +def merge_hiddenstates(x, tiles): + chunk_size = tiles*tiles + x = x.split(chunk_size) + + out = [] + for embeds in x: + num_tiles = embeds.shape[0] + tile_size = int((embeds.shape[1]-1) ** 0.5) + grid_size = int(num_tiles ** 0.5) + + # Extract class tokens + class_tokens = embeds[:, 0, :] # Save class tokens: [num_tiles, embeds[-1]] + avg_class_token = class_tokens.mean(dim=0, keepdim=True).unsqueeze(0) # Average token, shape: [1, 1, embeds[-1]] + + patch_embeds = embeds[:, 1:, :] # Shape: [num_tiles, tile_size^2, embeds[-1]] + reshaped = patch_embeds.reshape(grid_size, grid_size, tile_size, tile_size, embeds.shape[-1]) + + merged = torch.cat([torch.cat([reshaped[i, j] for j in range(grid_size)], dim=1) + for i in range(grid_size)], dim=0) + + merged = merged.unsqueeze(0) # Shape: [1, grid_size*tile_size, grid_size*tile_size, embeds[-1]] + + # Pool to original size + pooled = torch.nn.functional.adaptive_avg_pool2d(merged.permute(0, 3, 1, 2), (tile_size, tile_size)).permute(0, 2, 3, 1) + flattened = pooled.reshape(1, tile_size*tile_size, embeds.shape[-1]) + + # Add back the class token + with_class = torch.cat([avg_class_token, flattened], dim=1) # Shape: original shape + out.append(with_class) + + out = torch.cat(out, dim=0) + + return out + +def merge_embeddings(x, tiles): # TODO: this needs so much testing that I don't even + chunk_size = tiles*tiles + x = x.split(chunk_size) + + out = [] + for embeds in x: + num_tiles = embeds.shape[0] + grid_size = int(num_tiles ** 0.5) + tile_size = int(embeds.shape[1] ** 0.5) + reshaped = embeds.reshape(grid_size, grid_size, tile_size, tile_size) + + # Merge the tiles + merged = torch.cat([torch.cat([reshaped[i, j] for j in range(grid_size)], dim=1) + for i in range(grid_size)], dim=0) + + merged = merged.unsqueeze(0) # Shape: [1, grid_size*tile_size, grid_size*tile_size] + + # Pool to original size + pooled = torch.nn.functional.adaptive_avg_pool2d(merged, (tile_size, tile_size)) # pool to [1, tile_size, tile_size] + pooled = pooled.flatten(1) # flatten to [1, tile_size^2] + out.append(pooled) + out = torch.cat(out, dim=0) + + return out + +def encode_image_masked(clip_vision, image, mask=None, batch_size=0, tiles=1, ratio=1.0, clipvision_size=224): + # full image embeds + embeds = encode_image_masked_(clip_vision, image, mask, batch_size, clipvision_size=clipvision_size) + tiles = min(tiles, 16) + + if tiles > 1: + # split in tiles + image_split = split_tiles(image, tiles) + + # get the embeds for each tile + embeds_split = Output() + for i in image_split: + encoded = encode_image_masked_(clip_vision, i, mask, batch_size, clipvision_size=clipvision_size) + if not hasattr(embeds_split, "image_embeds"): + #embeds_split["last_hidden_state"] = encoded["last_hidden_state"] + embeds_split["image_embeds"] = encoded["image_embeds"] + embeds_split["penultimate_hidden_states"] = encoded["penultimate_hidden_states"] + else: + #embeds_split["last_hidden_state"] = torch.cat((embeds_split["last_hidden_state"], encoded["last_hidden_state"]), dim=0) + embeds_split["image_embeds"] = torch.cat((embeds_split["image_embeds"], encoded["image_embeds"]), dim=0) + embeds_split["penultimate_hidden_states"] = torch.cat((embeds_split["penultimate_hidden_states"], encoded["penultimate_hidden_states"]), dim=0) + + #embeds_split['last_hidden_state'] = merge_hiddenstates(embeds_split['last_hidden_state']) + embeds_split["image_embeds"] = merge_embeddings(embeds_split["image_embeds"], tiles) + embeds_split["penultimate_hidden_states"] = merge_hiddenstates(embeds_split["penultimate_hidden_states"], tiles) + + #embeds['last_hidden_state'] = torch.cat([embeds_split['last_hidden_state'], embeds['last_hidden_state']]) + if embeds['image_embeds'].shape[0] > 1: # if we have more than one image we need to average the embeddings for consistency + embeds['image_embeds'] = embeds['image_embeds']*ratio + embeds_split['image_embeds']*(1-ratio) + embeds['penultimate_hidden_states'] = embeds['penultimate_hidden_states']*ratio + embeds_split['penultimate_hidden_states']*(1-ratio) + #embeds['image_embeds'] = (embeds['image_embeds']*ratio + embeds_split['image_embeds']) / 2 + #embeds['penultimate_hidden_states'] = (embeds['penultimate_hidden_states']*ratio + embeds_split['penultimate_hidden_states']) / 2 + else: # otherwise we can concatenate them, they can be averaged later + embeds['image_embeds'] = torch.cat([embeds['image_embeds']*ratio, embeds_split['image_embeds']]) + embeds['penultimate_hidden_states'] = torch.cat([embeds['penultimate_hidden_states']*ratio, embeds_split['penultimate_hidden_states']]) + + #del embeds_split + + return embeds + +def encode_image_masked_(clip_vision, image, mask=None, batch_size=0, clipvision_size=224): + model_management.load_model_gpu(clip_vision.patcher) + outputs = Output() + + if batch_size == 0: + batch_size = image.shape[0] + elif batch_size > image.shape[0]: + batch_size = image.shape[0] + + image_batch = torch.split(image, batch_size, dim=0) + + for img in image_batch: + img = img.to(clip_vision.load_device) + pixel_values = clip_preprocess(img, size=clipvision_size).float() + + # TODO: support for multiple masks + if mask is not None: + pixel_values = pixel_values * mask.to(clip_vision.load_device) + + out = clip_vision.model(pixel_values=pixel_values, intermediate_output=-2) + + if not hasattr(outputs, "last_hidden_state"): + outputs["last_hidden_state"] = out[0].to(model_management.intermediate_device()) + outputs["image_embeds"] = out[2].to(model_management.intermediate_device()) + outputs["penultimate_hidden_states"] = out[1].to(model_management.intermediate_device()) + else: + outputs["last_hidden_state"] = torch.cat((outputs["last_hidden_state"], out[0].to(model_management.intermediate_device())), dim=0) + outputs["image_embeds"] = torch.cat((outputs["image_embeds"], out[2].to(model_management.intermediate_device())), dim=0) + outputs["penultimate_hidden_states"] = torch.cat((outputs["penultimate_hidden_states"], out[1].to(model_management.intermediate_device())), dim=0) + + del img, pixel_values, out + torch.cuda.empty_cache() + + return outputs + +def tensor_to_size(source, dest_size): + if isinstance(dest_size, torch.Tensor): + dest_size = dest_size.shape[0] + source_size = source.shape[0] + + if source_size < dest_size: + shape = [dest_size - source_size] + [1]*(source.dim()-1) + source = torch.cat((source, source[-1:].repeat(shape)), dim=0) + elif source_size > dest_size: + source = source[:dest_size] + + return source + +def min_(tensor_list): + # return the element-wise min of the tensor list. + x = torch.stack(tensor_list) + mn = x.min(axis=0)[0] + return torch.clamp(mn, min=0) + +def max_(tensor_list): + # return the element-wise max of the tensor list. + x = torch.stack(tensor_list) + mx = x.max(axis=0)[0] + return torch.clamp(mx, max=1) + +# From https://github.com/Jamy-L/Pytorch-Contrast-Adaptive-Sharpening/ +def contrast_adaptive_sharpening(image, amount): + img = T.functional.pad(image, (1, 1, 1, 1)).cpu() + + a = img[..., :-2, :-2] + b = img[..., :-2, 1:-1] + c = img[..., :-2, 2:] + d = img[..., 1:-1, :-2] + e = img[..., 1:-1, 1:-1] + f = img[..., 1:-1, 2:] + g = img[..., 2:, :-2] + h = img[..., 2:, 1:-1] + i = img[..., 2:, 2:] + + # Computing contrast + cross = (b, d, e, f, h) + mn = min_(cross) + mx = max_(cross) + + diag = (a, c, g, i) + mn2 = min_(diag) + mx2 = max_(diag) + mx = mx + mx2 + mn = mn + mn2 + + # Computing local weight + inv_mx = torch.reciprocal(mx) + amp = inv_mx * torch.minimum(mn, (2 - mx)) + + # scaling + amp = torch.sqrt(amp) + w = - amp * (amount * (1/5 - 1/8) + 1/8) + div = torch.reciprocal(1 + 4*w) + + output = ((b + d + f + h)*w + e) * div + output = torch.nan_to_num(output) + output = output.clamp(0, 1) + + return output + +def tensor_to_image(tensor): + image = tensor.mul(255).clamp(0, 255).byte().cpu() + image = image[..., [2, 1, 0]].numpy() + return image + +def image_to_tensor(image): + tensor = torch.clamp(torch.from_numpy(image).float() / 255., 0, 1) + tensor = tensor[..., [2, 1, 0]] + return tensor diff --git a/ComfyUI_InstantID/CrossAttentionPatch.py b/ComfyUI_InstantID/CrossAttentionPatch.py new file mode 100644 index 0000000000000000000000000000000000000000..4fbc67ef3893ea84ae8a239f9a72f158cde17011 --- /dev/null +++ b/ComfyUI_InstantID/CrossAttentionPatch.py @@ -0,0 +1,190 @@ +import torch +import math +import torch.nn.functional as F +from comfy.ldm.modules.attention import optimized_attention +from .utils import tensor_to_size + +class Attn2Replace: + def __init__(self, callback=None, **kwargs): + self.callback = [callback] + self.kwargs = [kwargs] + + def add(self, callback, **kwargs): + self.callback.append(callback) + self.kwargs.append(kwargs) + + for key, value in kwargs.items(): + setattr(self, key, value) + + def __call__(self, q, k, v, extra_options): + dtype = q.dtype + out = optimized_attention(q, k, v, extra_options["n_heads"]) + sigma = extra_options["sigmas"].detach().cpu()[0].item() if 'sigmas' in extra_options else 999999999.9 + + for i, callback in enumerate(self.callback): + if sigma <= self.kwargs[i]["sigma_start"] and sigma >= self.kwargs[i]["sigma_end"]: + out = out + callback(out, q, k, v, extra_options, **self.kwargs[i]) + + return out.to(dtype=dtype) + +def instantid_attention(out, q, k, v, extra_options, module_key='', ipadapter=None, weight=1.0, cond=None, cond_alt=None, uncond=None, weight_type="linear", mask=None, sigma_start=0.0, sigma_end=1.0, unfold_batch=False, embeds_scaling='V only', **kwargs): + dtype = q.dtype + cond_or_uncond = extra_options["cond_or_uncond"] + block_type = extra_options["block"][0] + #block_id = extra_options["block"][1] + t_idx = extra_options["transformer_index"] + layers = 11 if '101_to_k_ip' in ipadapter.ip_layers.to_kvs else 16 + k_key = module_key + "_to_k_ip" + v_key = module_key + "_to_v_ip" + + # extra options for AnimateDiff + ad_params = extra_options['ad_params'] if "ad_params" in extra_options else None + + b = q.shape[0] + seq_len = q.shape[1] + batch_prompt = b // len(cond_or_uncond) + _, _, oh, ow = extra_options["original_shape"] + + if weight_type == 'ease in': + weight = weight * (0.05 + 0.95 * (1 - t_idx / layers)) + elif weight_type == 'ease out': + weight = weight * (0.05 + 0.95 * (t_idx / layers)) + elif weight_type == 'ease in-out': + weight = weight * (0.05 + 0.95 * (1 - abs(t_idx - (layers/2)) / (layers/2))) + elif weight_type == 'reverse in-out': + weight = weight * (0.05 + 0.95 * (abs(t_idx - (layers/2)) / (layers/2))) + elif weight_type == 'weak input' and block_type == 'input': + weight = weight * 0.2 + elif weight_type == 'weak middle' and block_type == 'middle': + weight = weight * 0.2 + elif weight_type == 'weak output' and block_type == 'output': + weight = weight * 0.2 + elif weight_type == 'strong middle' and (block_type == 'input' or block_type == 'output'): + weight = weight * 0.2 + elif isinstance(weight, dict): + if t_idx not in weight: + return 0 + + weight = weight[t_idx] + + if cond_alt is not None and t_idx in cond_alt: + cond = cond_alt[t_idx] + del cond_alt + + if unfold_batch: + # Check AnimateDiff context window + if ad_params is not None and ad_params["sub_idxs"] is not None: + if isinstance(weight, torch.Tensor): + weight = tensor_to_size(weight, ad_params["full_length"]) + weight = torch.Tensor(weight[ad_params["sub_idxs"]]) + if torch.all(weight == 0): + return 0 + weight = weight.repeat(len(cond_or_uncond), 1, 1) # repeat for cond and uncond + elif weight == 0: + return 0 + + # if image length matches or exceeds full_length get sub_idx images + if cond.shape[0] >= ad_params["full_length"]: + cond = torch.Tensor(cond[ad_params["sub_idxs"]]) + uncond = torch.Tensor(uncond[ad_params["sub_idxs"]]) + # otherwise get sub_idxs images + else: + cond = tensor_to_size(cond, ad_params["full_length"]) + uncond = tensor_to_size(uncond, ad_params["full_length"]) + cond = cond[ad_params["sub_idxs"]] + uncond = uncond[ad_params["sub_idxs"]] + else: + if isinstance(weight, torch.Tensor): + weight = tensor_to_size(weight, batch_prompt) + if torch.all(weight == 0): + return 0 + weight = weight.repeat(len(cond_or_uncond), 1, 1) # repeat for cond and uncond + elif weight == 0: + return 0 + + cond = tensor_to_size(cond, batch_prompt) + uncond = tensor_to_size(uncond, batch_prompt) + + k_cond = ipadapter.ip_layers.to_kvs[k_key](cond) + k_uncond = ipadapter.ip_layers.to_kvs[k_key](uncond) + v_cond = ipadapter.ip_layers.to_kvs[v_key](cond) + v_uncond = ipadapter.ip_layers.to_kvs[v_key](uncond) + else: + # TODO: should we always convert the weights to a tensor? + if isinstance(weight, torch.Tensor): + weight = tensor_to_size(weight, batch_prompt) + if torch.all(weight == 0): + return 0 + weight = weight.repeat(len(cond_or_uncond), 1, 1) # repeat for cond and uncond + elif weight == 0: + return 0 + + k_cond = ipadapter.ip_layers.to_kvs[k_key](cond).repeat(batch_prompt, 1, 1) + k_uncond = ipadapter.ip_layers.to_kvs[k_key](uncond).repeat(batch_prompt, 1, 1) + v_cond = ipadapter.ip_layers.to_kvs[v_key](cond).repeat(batch_prompt, 1, 1) + v_uncond = ipadapter.ip_layers.to_kvs[v_key](uncond).repeat(batch_prompt, 1, 1) + + ip_k = torch.cat([(k_cond, k_uncond)[i] for i in cond_or_uncond], dim=0) + ip_v = torch.cat([(v_cond, v_uncond)[i] for i in cond_or_uncond], dim=0) + + if embeds_scaling == 'K+mean(V) w/ C penalty': + scaling = float(ip_k.shape[2]) / 1280.0 + weight = weight * scaling + ip_k = ip_k * weight + ip_v_mean = torch.mean(ip_v, dim=1, keepdim=True) + ip_v = (ip_v - ip_v_mean) + ip_v_mean * weight + out_ip = optimized_attention(q, ip_k, ip_v, extra_options["n_heads"]) + del ip_v_mean + elif embeds_scaling == 'K+V w/ C penalty': + scaling = float(ip_k.shape[2]) / 1280.0 + weight = weight * scaling + ip_k = ip_k * weight + ip_v = ip_v * weight + out_ip = optimized_attention(q, ip_k, ip_v, extra_options["n_heads"]) + elif embeds_scaling == 'K+V': + ip_k = ip_k * weight + ip_v = ip_v * weight + out_ip = optimized_attention(q, ip_k, ip_v, extra_options["n_heads"]) + else: + #ip_v = ip_v * weight + out_ip = optimized_attention(q, ip_k, ip_v, extra_options["n_heads"]) + out_ip = out_ip * weight # I'm doing this to get the same results as before + + if mask is not None: + mask_h = oh / math.sqrt(oh * ow / seq_len) + mask_h = int(mask_h) + int((seq_len % int(mask_h)) != 0) + mask_w = seq_len // mask_h + + # check if using AnimateDiff and sliding context window + if (mask.shape[0] > 1 and ad_params is not None and ad_params["sub_idxs"] is not None): + # if mask length matches or exceeds full_length, get sub_idx masks + if mask.shape[0] >= ad_params["full_length"]: + mask = torch.Tensor(mask[ad_params["sub_idxs"]]) + mask = F.interpolate(mask.unsqueeze(1), size=(mask_h, mask_w), mode="bilinear").squeeze(1) + else: + mask = F.interpolate(mask.unsqueeze(1), size=(mask_h, mask_w), mode="bilinear").squeeze(1) + mask = tensor_to_size(mask, ad_params["full_length"]) + mask = mask[ad_params["sub_idxs"]] + else: + mask = F.interpolate(mask.unsqueeze(1), size=(mask_h, mask_w), mode="bilinear").squeeze(1) + mask = tensor_to_size(mask, batch_prompt) + + mask = mask.repeat(len(cond_or_uncond), 1, 1) + mask = mask.view(mask.shape[0], -1, 1).repeat(1, 1, out.shape[2]) + + # covers cases where extreme aspect ratios can cause the mask to have a wrong size + mask_len = mask_h * mask_w + if mask_len < seq_len: + pad_len = seq_len - mask_len + pad1 = pad_len // 2 + pad2 = pad_len - pad1 + mask = F.pad(mask, (0, 0, pad1, pad2), value=0.0) + elif mask_len > seq_len: + crop_start = (mask_len - seq_len) // 2 + mask = mask[:, crop_start:crop_start+seq_len, :] + + out_ip = out_ip * mask + + #out = out + out_ip + + return out_ip.to(dtype=dtype) diff --git a/ComfyUI_InstantID/InstantID.py b/ComfyUI_InstantID/InstantID.py new file mode 100644 index 0000000000000000000000000000000000000000..a6ac1950ec29028897c27c142f2c1adf16ae29ce --- /dev/null +++ b/ComfyUI_InstantID/InstantID.py @@ -0,0 +1,607 @@ +import torch +import os +import comfy.utils +import folder_paths +import numpy as np +import math +import cv2 +import PIL.Image +from .resampler import Resampler +from .CrossAttentionPatch import Attn2Replace, instantid_attention +from .utils import tensor_to_image + +from insightface.app import FaceAnalysis + +try: + import torchvision.transforms.v2 as T +except ImportError: + import torchvision.transforms as T + +import torch.nn.functional as F + +MODELS_DIR = os.path.join(folder_paths.models_dir, "instantid") +if "instantid" not in folder_paths.folder_names_and_paths: + current_paths = [MODELS_DIR] +else: + current_paths, _ = folder_paths.folder_names_and_paths["instantid"] +folder_paths.folder_names_and_paths["instantid"] = (current_paths, folder_paths.supported_pt_extensions) + +INSIGHTFACE_DIR = os.path.join(folder_paths.models_dir, "insightface") + +def draw_kps(image_pil, kps, color_list=[(255,0,0), (0,255,0), (0,0,255), (255,255,0), (255,0,255)]): + stickwidth = 4 + limbSeq = np.array([[0, 2], [1, 2], [3, 2], [4, 2]]) + kps = np.array(kps) + + h, w, _ = image_pil.shape + out_img = np.zeros([h, w, 3]) + + for i in range(len(limbSeq)): + index = limbSeq[i] + color = color_list[index[0]] + + x = kps[index][:, 0] + y = kps[index][:, 1] + length = ((x[0] - x[1]) ** 2 + (y[0] - y[1]) ** 2) ** 0.5 + angle = math.degrees(math.atan2(y[0] - y[1], x[0] - x[1])) + polygon = cv2.ellipse2Poly((int(np.mean(x)), int(np.mean(y))), (int(length / 2), stickwidth), int(angle), 0, 360, 1) + out_img = cv2.fillConvexPoly(out_img.copy(), polygon, color) + out_img = (out_img * 0.6).astype(np.uint8) + + for idx_kp, kp in enumerate(kps): + color = color_list[idx_kp] + x, y = kp + out_img = cv2.circle(out_img.copy(), (int(x), int(y)), 10, color, -1) + + out_img_pil = PIL.Image.fromarray(out_img.astype(np.uint8)) + return out_img_pil + +class InstantID(torch.nn.Module): + def __init__(self, instantid_model, cross_attention_dim=1280, output_cross_attention_dim=1024, clip_embeddings_dim=512, clip_extra_context_tokens=16): + super().__init__() + + self.clip_embeddings_dim = clip_embeddings_dim + self.cross_attention_dim = cross_attention_dim + self.output_cross_attention_dim = output_cross_attention_dim + self.clip_extra_context_tokens = clip_extra_context_tokens + + self.image_proj_model = self.init_proj() + + self.image_proj_model.load_state_dict(instantid_model["image_proj"]) + self.ip_layers = To_KV(instantid_model["ip_adapter"]) + + def init_proj(self): + image_proj_model = Resampler( + dim=self.cross_attention_dim, + depth=4, + dim_head=64, + heads=20, + num_queries=self.clip_extra_context_tokens, + embedding_dim=self.clip_embeddings_dim, + output_dim=self.output_cross_attention_dim, + ff_mult=4 + ) + return image_proj_model + + @torch.inference_mode() + def get_image_embeds(self, clip_embed, clip_embed_zeroed): + #image_prompt_embeds = clip_embed.clone().detach() + image_prompt_embeds = self.image_proj_model(clip_embed) + #uncond_image_prompt_embeds = clip_embed_zeroed.clone().detach() + uncond_image_prompt_embeds = self.image_proj_model(clip_embed_zeroed) + + return image_prompt_embeds, uncond_image_prompt_embeds + +class ImageProjModel(torch.nn.Module): + def __init__(self, cross_attention_dim=1024, clip_embeddings_dim=1024, clip_extra_context_tokens=4): + super().__init__() + + self.cross_attention_dim = cross_attention_dim + self.clip_extra_context_tokens = clip_extra_context_tokens + self.proj = torch.nn.Linear(clip_embeddings_dim, self.clip_extra_context_tokens * cross_attention_dim) + self.norm = torch.nn.LayerNorm(cross_attention_dim) + + def forward(self, image_embeds): + embeds = image_embeds + clip_extra_context_tokens = self.proj(embeds).reshape(-1, self.clip_extra_context_tokens, self.cross_attention_dim) + clip_extra_context_tokens = self.norm(clip_extra_context_tokens) + return clip_extra_context_tokens + +class To_KV(torch.nn.Module): + def __init__(self, state_dict): + super().__init__() + + self.to_kvs = torch.nn.ModuleDict() + for key, value in state_dict.items(): + k = key.replace(".weight", "").replace(".", "_") + self.to_kvs[k] = torch.nn.Linear(value.shape[1], value.shape[0], bias=False) + self.to_kvs[k].weight.data = value + +def _set_model_patch_replace(model, patch_kwargs, key): + to = model.model_options["transformer_options"].copy() + if "patches_replace" not in to: + to["patches_replace"] = {} + else: + to["patches_replace"] = to["patches_replace"].copy() + + if "attn2" not in to["patches_replace"]: + to["patches_replace"]["attn2"] = {} + else: + to["patches_replace"]["attn2"] = to["patches_replace"]["attn2"].copy() + + if key not in to["patches_replace"]["attn2"]: + to["patches_replace"]["attn2"][key] = Attn2Replace(instantid_attention, **patch_kwargs) + model.model_options["transformer_options"] = to + else: + to["patches_replace"]["attn2"][key].add(instantid_attention, **patch_kwargs) + +class InstantIDModelLoader: + @classmethod + def INPUT_TYPES(s): + return {"required": { "instantid_file": (folder_paths.get_filename_list("instantid"), )}} + + RETURN_TYPES = ("INSTANTID",) + FUNCTION = "load_model" + CATEGORY = "InstantID" + + def load_model(self, instantid_file): + ckpt_path = folder_paths.get_full_path("instantid", instantid_file) + + model = comfy.utils.load_torch_file(ckpt_path, safe_load=True) + + if ckpt_path.lower().endswith(".safetensors"): + st_model = {"image_proj": {}, "ip_adapter": {}} + for key in model.keys(): + if key.startswith("image_proj."): + st_model["image_proj"][key.replace("image_proj.", "")] = model[key] + elif key.startswith("ip_adapter."): + st_model["ip_adapter"][key.replace("ip_adapter.", "")] = model[key] + model = st_model + + model = InstantID( + model, + cross_attention_dim=1280, + output_cross_attention_dim=model["ip_adapter"]["1.to_k_ip.weight"].shape[1], + clip_embeddings_dim=512, + clip_extra_context_tokens=16, + ) + + return (model,) + +def extractFeatures(insightface, image, extract_kps=False): + face_img = tensor_to_image(image) + out = [] + + insightface.det_model.input_size = (640,640) # reset the detection size + + for i in range(face_img.shape[0]): + for size in [(size, size) for size in range(640, 128, -64)]: + insightface.det_model.input_size = size # TODO: hacky but seems to be working + face = insightface.get(face_img[i]) + if face: + face = sorted(face, key=lambda x:(x['bbox'][2]-x['bbox'][0])*(x['bbox'][3]-x['bbox'][1]))[-1] + + if extract_kps: + out.append(draw_kps(face_img[i], face['kps'])) + else: + out.append(torch.from_numpy(face['embedding']).unsqueeze(0)) + + if 640 not in size: + print(f"\033[33mINFO: InsightFace detection resolution lowered to {size}.\033[0m") + break + + if out: + if extract_kps: + out = torch.stack(T.ToTensor()(out), dim=0).permute([0,2,3,1]) + else: + out = torch.stack(out, dim=0) + else: + out = None + + return out + +class InstantIDFaceAnalysis: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "provider": (["CPU", "CUDA", "ROCM"], ), + }, + } + + RETURN_TYPES = ("FACEANALYSIS",) + FUNCTION = "load_insight_face" + CATEGORY = "InstantID" + + def load_insight_face(self, provider): + model = FaceAnalysis(name="antelopev2", root=INSIGHTFACE_DIR, providers=[provider + 'ExecutionProvider',]) # alternative to buffalo_l + model.prepare(ctx_id=0, det_size=(640, 640)) + + return (model,) + +class FaceKeypointsPreprocessor: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "faceanalysis": ("FACEANALYSIS", ), + "image": ("IMAGE", ), + }, + } + RETURN_TYPES = ("IMAGE",) + FUNCTION = "preprocess_image" + CATEGORY = "InstantID" + + def preprocess_image(self, faceanalysis, image): + face_kps = extractFeatures(faceanalysis, image, extract_kps=True) + + if face_kps is None: + face_kps = torch.zeros_like(image) + print(f"\033[33mWARNING: no face detected, unable to extract the keypoints!\033[0m") + #raise Exception('Face Keypoints Image: No face detected.') + + return (face_kps,) + +def add_noise(image, factor): + seed = int(torch.sum(image).item()) % 1000000007 + torch.manual_seed(seed) + mask = (torch.rand_like(image) < factor).float() + noise = torch.rand_like(image) + noise = torch.zeros_like(image) * (1-mask) + noise * mask + + return factor*noise + +class ApplyInstantID: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "instantid": ("INSTANTID", ), + "insightface": ("FACEANALYSIS", ), + "control_net": ("CONTROL_NET", ), + "image": ("IMAGE", ), + "model": ("MODEL", ), + "positive": ("CONDITIONING", ), + "negative": ("CONDITIONING", ), + "weight": ("FLOAT", {"default": .8, "min": 0.0, "max": 5.0, "step": 0.01, }), + "start_at": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 1.0, "step": 0.001, }), + "end_at": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.001, }), + }, + "optional": { + "image_kps": ("IMAGE",), + "mask": ("MASK",), + } + } + + RETURN_TYPES = ("MODEL", "CONDITIONING", "CONDITIONING",) + RETURN_NAMES = ("MODEL", "positive", "negative", ) + FUNCTION = "apply_instantid" + CATEGORY = "InstantID" + + def apply_instantid(self, instantid, insightface, control_net, image, model, positive, negative, start_at, end_at, weight=.8, ip_weight=None, cn_strength=None, noise=0.35, image_kps=None, mask=None, combine_embeds='average'): + self.dtype = torch.float16 if comfy.model_management.should_use_fp16() else torch.float32 + self.device = comfy.model_management.get_torch_device() + + ip_weight = weight if ip_weight is None else ip_weight + cn_strength = weight if cn_strength is None else cn_strength + + face_embed = extractFeatures(insightface, image) + if face_embed is None: + raise Exception('Reference Image: No face detected.') + + # if no keypoints image is provided, use the image itself (only the first one in the batch) + face_kps = extractFeatures(insightface, image_kps if image_kps is not None else image[0].unsqueeze(0), extract_kps=True) + + if face_kps is None: + face_kps = torch.zeros_like(image) if image_kps is None else image_kps + print(f"\033[33mWARNING: No face detected in the keypoints image!\033[0m") + + clip_embed = face_embed + # InstantID works better with averaged embeds (TODO: needs testing) + if clip_embed.shape[0] > 1: + if combine_embeds == 'average': + clip_embed = torch.mean(clip_embed, dim=0).unsqueeze(0) + elif combine_embeds == 'norm average': + clip_embed = torch.mean(clip_embed / torch.norm(clip_embed, dim=0, keepdim=True), dim=0).unsqueeze(0) + + if noise > 0: + seed = int(torch.sum(clip_embed).item()) % 1000000007 + torch.manual_seed(seed) + clip_embed_zeroed = noise * torch.rand_like(clip_embed) + #clip_embed_zeroed = add_noise(clip_embed, noise) + else: + clip_embed_zeroed = torch.zeros_like(clip_embed) + + # 1: patch the attention + self.instantid = instantid + self.instantid.to(self.device, dtype=self.dtype) + + image_prompt_embeds, uncond_image_prompt_embeds = self.instantid.get_image_embeds(clip_embed.to(self.device, dtype=self.dtype), clip_embed_zeroed.to(self.device, dtype=self.dtype)) + + image_prompt_embeds = image_prompt_embeds.to(self.device, dtype=self.dtype) + uncond_image_prompt_embeds = uncond_image_prompt_embeds.to(self.device, dtype=self.dtype) + + work_model = model.clone() + + sigma_start = model.get_model_object("model_sampling").percent_to_sigma(start_at) + sigma_end = model.get_model_object("model_sampling").percent_to_sigma(end_at) + + if mask is not None: + mask = mask.to(self.device) + + patch_kwargs = { + "ipadapter": self.instantid, + "weight": ip_weight, + "cond": image_prompt_embeds, + "uncond": uncond_image_prompt_embeds, + "mask": mask, + "sigma_start": sigma_start, + "sigma_end": sigma_end, + } + + number = 0 + for id in [4,5,7,8]: # id of input_blocks that have cross attention + block_indices = range(2) if id in [4, 5] else range(10) # transformer_depth + for index in block_indices: + patch_kwargs["module_key"] = str(number*2+1) + _set_model_patch_replace(work_model, patch_kwargs, ("input", id, index)) + number += 1 + for id in range(6): # id of output_blocks that have cross attention + block_indices = range(2) if id in [3, 4, 5] else range(10) # transformer_depth + for index in block_indices: + patch_kwargs["module_key"] = str(number*2+1) + _set_model_patch_replace(work_model, patch_kwargs, ("output", id, index)) + number += 1 + for index in range(10): + patch_kwargs["module_key"] = str(number*2+1) + _set_model_patch_replace(work_model, patch_kwargs, ("middle", 0, index)) + number += 1 + + # 2: do the ControlNet + if mask is not None and len(mask.shape) < 3: + mask = mask.unsqueeze(0) + + cnets = {} + cond_uncond = [] + + is_cond = True + for conditioning in [positive, negative]: + c = [] + for t in conditioning: + d = t[1].copy() + + prev_cnet = d.get('control', None) + if prev_cnet in cnets: + c_net = cnets[prev_cnet] + else: + c_net = control_net.copy().set_cond_hint(face_kps.movedim(-1,1), cn_strength, (start_at, end_at)) + c_net.set_previous_controlnet(prev_cnet) + cnets[prev_cnet] = c_net + + d['control'] = c_net + d['control_apply_to_uncond'] = False + d['cross_attn_controlnet'] = image_prompt_embeds.to(comfy.model_management.intermediate_device()) if is_cond else uncond_image_prompt_embeds.to(comfy.model_management.intermediate_device()) + + if mask is not None and is_cond: + d['mask'] = mask + d['set_area_to_bounds'] = False + + n = [t[0], d] + c.append(n) + cond_uncond.append(c) + is_cond = False + + return(work_model, cond_uncond[0], cond_uncond[1], ) + +class ApplyInstantIDAdvanced(ApplyInstantID): + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "instantid": ("INSTANTID", ), + "insightface": ("FACEANALYSIS", ), + "control_net": ("CONTROL_NET", ), + "image": ("IMAGE", ), + "model": ("MODEL", ), + "positive": ("CONDITIONING", ), + "negative": ("CONDITIONING", ), + "ip_weight": ("FLOAT", {"default": .8, "min": 0.0, "max": 3.0, "step": 0.01, }), + "cn_strength": ("FLOAT", {"default": .8, "min": 0.0, "max": 10.0, "step": 0.01, }), + "start_at": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 1.0, "step": 0.001, }), + "end_at": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.001, }), + "noise": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 1.0, "step": 0.1, }), + "combine_embeds": (['average', 'norm average', 'concat'], {"default": 'average'}), + }, + "optional": { + "image_kps": ("IMAGE",), + "mask": ("MASK",), + } + } + +class InstantIDAttentionPatch: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "instantid": ("INSTANTID", ), + "insightface": ("FACEANALYSIS", ), + "image": ("IMAGE", ), + "model": ("MODEL", ), + "weight": ("FLOAT", {"default": 1.0, "min": -1.0, "max": 3.0, "step": 0.01, }), + "start_at": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 1.0, "step": 0.001, }), + "end_at": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.001, }), + "noise": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 1.0, "step": 0.1, }), + }, + "optional": { + "mask": ("MASK",), + } + } + + RETURN_TYPES = ("MODEL", "FACE_EMBEDS") + FUNCTION = "patch_attention" + CATEGORY = "InstantID" + + def patch_attention(self, instantid, insightface, image, model, weight, start_at, end_at, noise=0.0, mask=None): + self.dtype = torch.float16 if comfy.model_management.should_use_fp16() else torch.float32 + self.device = comfy.model_management.get_torch_device() + + face_embed = extractFeatures(insightface, image) + if face_embed is None: + raise Exception('Reference Image: No face detected.') + + clip_embed = face_embed + # InstantID works better with averaged embeds (TODO: needs testing) + if clip_embed.shape[0] > 1: + clip_embed = torch.mean(clip_embed, dim=0).unsqueeze(0) + + if noise > 0: + seed = int(torch.sum(clip_embed).item()) % 1000000007 + torch.manual_seed(seed) + clip_embed_zeroed = noise * torch.rand_like(clip_embed) + else: + clip_embed_zeroed = torch.zeros_like(clip_embed) + + # 1: patch the attention + self.instantid = instantid + self.instantid.to(self.device, dtype=self.dtype) + + image_prompt_embeds, uncond_image_prompt_embeds = self.instantid.get_image_embeds(clip_embed.to(self.device, dtype=self.dtype), clip_embed_zeroed.to(self.device, dtype=self.dtype)) + + image_prompt_embeds = image_prompt_embeds.to(self.device, dtype=self.dtype) + uncond_image_prompt_embeds = uncond_image_prompt_embeds.to(self.device, dtype=self.dtype) + + if weight == 0: + return (model, { "cond": image_prompt_embeds, "uncond": uncond_image_prompt_embeds } ) + + work_model = model.clone() + + sigma_start = model.get_model_object("model_sampling").percent_to_sigma(start_at) + sigma_end = model.get_model_object("model_sampling").percent_to_sigma(end_at) + + if mask is not None: + mask = mask.to(self.device) + + patch_kwargs = { + "weight": weight, + "ipadapter": self.instantid, + "cond": image_prompt_embeds, + "uncond": uncond_image_prompt_embeds, + "mask": mask, + "sigma_start": sigma_start, + "sigma_end": sigma_end, + } + + number = 0 + for id in [4,5,7,8]: # id of input_blocks that have cross attention + block_indices = range(2) if id in [4, 5] else range(10) # transformer_depth + for index in block_indices: + patch_kwargs["module_key"] = str(number*2+1) + _set_model_patch_replace(work_model, patch_kwargs, ("input", id, index)) + number += 1 + for id in range(6): # id of output_blocks that have cross attention + block_indices = range(2) if id in [3, 4, 5] else range(10) # transformer_depth + for index in block_indices: + patch_kwargs["module_key"] = str(number*2+1) + _set_model_patch_replace(work_model, patch_kwargs, ("output", id, index)) + number += 1 + for index in range(10): + patch_kwargs["module_key"] = str(number*2+1) + _set_model_patch_replace(work_model, patch_kwargs, ("middle", 0, index)) + number += 1 + + return(work_model, { "cond": image_prompt_embeds, "uncond": uncond_image_prompt_embeds }, ) + +class ApplyInstantIDControlNet: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "face_embeds": ("FACE_EMBEDS", ), + "control_net": ("CONTROL_NET", ), + "image_kps": ("IMAGE", ), + "positive": ("CONDITIONING", ), + "negative": ("CONDITIONING", ), + "strength": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01, }), + "start_at": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 1.0, "step": 0.001, }), + "end_at": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.001, }), + }, + "optional": { + "mask": ("MASK",), + } + } + + RETURN_TYPES = ("CONDITIONING", "CONDITIONING",) + RETURN_NAMES = ("positive", "negative", ) + FUNCTION = "apply_controlnet" + CATEGORY = "InstantID" + + def apply_controlnet(self, face_embeds, control_net, image_kps, positive, negative, strength, start_at, end_at, mask=None): + self.device = comfy.model_management.get_torch_device() + + if strength == 0: + return (positive, negative) + + if mask is not None: + mask = mask.to(self.device) + + if mask is not None and len(mask.shape) < 3: + mask = mask.unsqueeze(0) + + image_prompt_embeds = face_embeds['cond'] + uncond_image_prompt_embeds = face_embeds['uncond'] + + cnets = {} + cond_uncond = [] + control_hint = image_kps.movedim(-1,1) + + is_cond = True + for conditioning in [positive, negative]: + c = [] + for t in conditioning: + d = t[1].copy() + + prev_cnet = d.get('control', None) + if prev_cnet in cnets: + c_net = cnets[prev_cnet] + else: + c_net = control_net.copy().set_cond_hint(control_hint, strength, (start_at, end_at)) + c_net.set_previous_controlnet(prev_cnet) + cnets[prev_cnet] = c_net + + d['control'] = c_net + d['control_apply_to_uncond'] = False + d['cross_attn_controlnet'] = image_prompt_embeds.to(comfy.model_management.intermediate_device()) if is_cond else uncond_image_prompt_embeds.to(comfy.model_management.intermediate_device()) + + if mask is not None and is_cond: + d['mask'] = mask + d['set_area_to_bounds'] = False + + n = [t[0], d] + c.append(n) + cond_uncond.append(c) + is_cond = False + + return(cond_uncond[0], cond_uncond[1]) + + +NODE_CLASS_MAPPINGS = { + "InstantIDModelLoader": InstantIDModelLoader, + "InstantIDFaceAnalysis": InstantIDFaceAnalysis, + "ApplyInstantID": ApplyInstantID, + "ApplyInstantIDAdvanced": ApplyInstantIDAdvanced, + "FaceKeypointsPreprocessor": FaceKeypointsPreprocessor, + + "InstantIDAttentionPatch": InstantIDAttentionPatch, + "ApplyInstantIDControlNet": ApplyInstantIDControlNet, +} + +NODE_DISPLAY_NAME_MAPPINGS = { + "InstantIDModelLoader": "Load InstantID Model", + "InstantIDFaceAnalysis": "InstantID Face Analysis", + "ApplyInstantID": "Apply InstantID", + "ApplyInstantIDAdvanced": "Apply InstantID Advanced", + "FaceKeypointsPreprocessor": "Face Keypoints Preprocessor", + + "InstantIDAttentionPatch": "InstantID Patch Attention", + "ApplyInstantIDControlNet": "InstantID Apply ControlNet", +} diff --git a/ComfyUI_InstantID/LICENSE b/ComfyUI_InstantID/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..261eeb9e9f8b2b4b0d119366dda99c6fd7d35c64 --- /dev/null +++ b/ComfyUI_InstantID/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/ComfyUI_InstantID/README.md b/ComfyUI_InstantID/README.md new file mode 100644 index 0000000000000000000000000000000000000000..1e33237815a65d9eac06f12f6ebfeff98e5d86bf --- /dev/null +++ b/ComfyUI_InstantID/README.md @@ -0,0 +1,138 @@ +# ComfyUI InstantID (Native Support) + +Native [InstantID](https://github.com/InstantID/InstantID) support for [ComfyUI](https://github.com/comfyanonymous/ComfyUI). + +This extension differs from the many already available as it doesn't use *diffusers* but instead implements InstantID natively and it fully integrates with ComfyUI. + +# Sponsorship + +
    + +**[:heart: Github Sponsor](https://github.com/sponsors/cubiq) | [:coin: Paypal](https://paypal.me/matt3o)** + +
    + +If you like my work and wish to see updates and new features please consider sponsoring my projects. + +- [ComfyUI IPAdapter Plus](https://github.com/cubiq/ComfyUI_IPAdapter_plus) +- [ComfyUI InstantID (Native)](https://github.com/cubiq/ComfyUI_InstantID) +- [ComfyUI Essentials](https://github.com/cubiq/ComfyUI_essentials) +- [ComfyUI FaceAnalysis](https://github.com/cubiq/ComfyUI_FaceAnalysis) +- [Comfy Dungeon](https://github.com/cubiq/Comfy_Dungeon) + +Not to mention the documentation and videos tutorials. Check my **ComfyUI Advanced Understanding** videos on YouTube for example, [part 1](https://www.youtube.com/watch?v=_C7kR2TFIX0) and [part 2](https://www.youtube.com/watch?v=ijqXnW_9gzc) + +The only way to keep the code open and free is by sponsoring its development. The more sponsorships the more time I can dedicate to my open source projects. + +Please consider a [Github Sponsorship](https://github.com/sponsors/cubiq) or [PayPal donation](https://paypal.me/matt3o) (Matteo "matt3o" Spinelli). For sponsorships of $50+, let me know if you'd like to be mentioned in this readme file, you can find me on [Discord](https://latent.vision/discord) or _matt3o :snail: gmail.com_. + +## Important updates + +- **2024/02/27:** Added [noise injection](#noise-injection) in the negative embeds. + +- **2024/02/26:** Fixed a small but nasty bug. Results will be different and you may need to lower the CFG. + +- **2024/02/20:** I refactored the nodes so they are hopefully easier to use. **This is a breaking update**, the previous workflows won't work anymore. + +## Basic Workflow + +In the `examples` directory you'll find some basic workflows. + +![workflow](examples/instantid_basic_workflow.jpg) + +## Video Tutorial + + + Watch the video + + +** :movie_camera: [Introduction to InstantID features](https://youtu.be/wMLiGhogOPE)** + +## Installation + +**Upgrade ComfyUI to the latest version!** + +Download or `git clone` this repository into the `ComfyUI/custom_nodes/` directory or use the Manager. + +InstantID requires `insightface`, you need to add it to your libraries together with `onnxruntime` and `onnxruntime-gpu`. + +The InsightFace model is **antelopev2** (not the classic buffalo_l). Download the models (for example from [here](https://drive.google.com/file/d/18wEUfMNohBJ4K3Ly5wpTejPfDzp-8fI8/view?usp=sharing) or [here](https://huggingface.co/MonsterMMORPG/tools/tree/main)), unzip and place them in the `ComfyUI/models/insightface/models/antelopev2` directory. + +The **main model** can be downloaded from [HuggingFace](https://huggingface.co/InstantX/InstantID/resolve/main/ip-adapter.bin?download=true) and should be placed into the `ComfyUI/models/instantid` directory. (Note that the model is called *ip_adapter* as it is based on the [IPAdapter](https://github.com/tencent-ailab/IP-Adapter)). + +You also needs a [controlnet](https://huggingface.co/InstantX/InstantID/resolve/main/ControlNetModel/diffusion_pytorch_model.safetensors?download=true), place it in the ComfyUI controlnet directory. + +**Remember at the moment this is only for SDXL.** + +## Watermarks! + +The training data is full of watermarks, to avoid them to show up in your generations use a resolution slightly different from 1024×1024 (or the standard ones) for example **1016×1016** works pretty well. + +## Lower the CFG! + +It's important to lower the CFG to at least 4/5 or you can use the `RescaleCFG` node. + +## Face keypoints + +The person is posed based on the keypoints generated from the reference image. You can use a different pose by sending an image to the `image_kps` input. + +Day Dreaming + +## Noise Injection + +The default InstantID implementation seems to really burn the image, I find that by injecting noise to the negative embeds we can mitigate the effect and also increase the likeliness to the reference. The default Apply InstantID node automatically injects 35% noise, if you want to fine tune the effect you can use the Advanced InstantID node. + +This is still experimental and may change in the future. + +## Additional Controlnets + +You can add more controlnets to the generation. An example workflow for depth controlnet is provided. + +## Styling with IPAdapter + +It's possible to style the composition with IPAdapter. An example is provided. + +IPAdapter + +## Multi-ID + +Multi-ID is supported but the workflow is a bit complicated and the generation slower. I'll check if I can find a better way of doing it. The "hackish" workflow is provided in the example directory. + +IPAdapter + +## Advanced Node + +There's an InstantID advanced node available, at the moment the only difference with the standard one is that you can set the weights for the instantID models and the controlnet separately. It now also includes a noise injection option. It might be helpful for finetuning. + +The instantID model influences the composition of about 25%, the rest is the controlnet. + +The noise helps reducing the "burn" effect. + +## Other notes + +It works very well with SDXL Turbo/Lighting. Best results with community's checkpoints. + +## Current sponsors + +It's only thanks to generous sponsors that **the whole community** can enjoy open and free software. Please join me in thanking the following companies and individuals! + +### Gold sponsors + +[![Kaiber.ai](https://f.latent.vision/imgs/kaiber.png)](https://kaiber.ai/)   [![Kaiber.ai](https://f.latent.vision/imgs/replicate.png)](https://replicate.com/) + +### Companies supporting my projects + +- [RunComfy](https://www.runcomfy.com/) (ComfyUI Cloud) + +### Esteemed individuals + +- [Jack Gane](https://github.com/ganeJackS) +- [Nathan Shipley](https://www.nathanshipley.com/) + +### One-time Extraordinaire + +- [Eric Rollei](https://github.com/EricRollei) +- [francaleu](https://github.com/francaleu) +- [Neta.art](https://github.com/talesofai) +- [Samwise Wang](https://github.com/tzwm) +- _And all private sponsors, you know who you are!_ \ No newline at end of file diff --git a/ComfyUI_InstantID/__init__.py b/ComfyUI_InstantID/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..fcb75123d26d03df837c1da6fc99da3bd4903c79 --- /dev/null +++ b/ComfyUI_InstantID/__init__.py @@ -0,0 +1,3 @@ +from .InstantID import NODE_CLASS_MAPPINGS, NODE_DISPLAY_NAME_MAPPINGS + +__all__ = ['NODE_CLASS_MAPPINGS', 'NODE_DISPLAY_NAME_MAPPINGS'] diff --git a/ComfyUI_InstantID/pyproject.toml b/ComfyUI_InstantID/pyproject.toml new file mode 100644 index 0000000000000000000000000000000000000000..d52c9245fad1f7106bee7723e69b95f42907f4b3 --- /dev/null +++ b/ComfyUI_InstantID/pyproject.toml @@ -0,0 +1,15 @@ +[project] +name = "comfyui_instantid" +description = "Native InstantID support for ComfyUI. This extension differs from the many already available as it doesn't use diffusers but instead implements InstantID natively and it fully integrates with ComfyUI." +version = "1.0.0" +license = "LICENSE" +dependencies = ["insightface", "onnxruntime", "onnxruntime-gpu"] + +[project.urls] +Repository = "https://github.com/cubiq/ComfyUI_InstantID" +# Used by Comfy Registry https://comfyregistry.org + +[tool.comfy] +PublisherId = "matteo" +DisplayName = "ComfyUI_InstantID" +Icon = "" diff --git a/ComfyUI_InstantID/requirements.txt b/ComfyUI_InstantID/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..02e23e5d5e30230eed83233800e1a4a52e9472c0 --- /dev/null +++ b/ComfyUI_InstantID/requirements.txt @@ -0,0 +1,3 @@ +insightface +onnxruntime +onnxruntime-gpu diff --git a/ComfyUI_InstantID/resampler.py b/ComfyUI_InstantID/resampler.py new file mode 100644 index 0000000000000000000000000000000000000000..4521c8c3e6f17caf4547c3dd84118da760e5179f --- /dev/null +++ b/ComfyUI_InstantID/resampler.py @@ -0,0 +1,121 @@ +# modified from https://github.com/mlfoundations/open_flamingo/blob/main/open_flamingo/src/helpers.py +import math + +import torch +import torch.nn as nn + + +# FFN +def FeedForward(dim, mult=4): + inner_dim = int(dim * mult) + return nn.Sequential( + nn.LayerNorm(dim), + nn.Linear(dim, inner_dim, bias=False), + nn.GELU(), + nn.Linear(inner_dim, dim, bias=False), + ) + + +def reshape_tensor(x, heads): + bs, length, width = x.shape + #(bs, length, width) --> (bs, length, n_heads, dim_per_head) + x = x.view(bs, length, heads, -1) + # (bs, length, n_heads, dim_per_head) --> (bs, n_heads, length, dim_per_head) + x = x.transpose(1, 2) + # (bs, n_heads, length, dim_per_head) --> (bs*n_heads, length, dim_per_head) + x = x.reshape(bs, heads, length, -1) + return x + + +class PerceiverAttention(nn.Module): + def __init__(self, *, dim, dim_head=64, heads=8): + super().__init__() + self.scale = dim_head**-0.5 + self.dim_head = dim_head + self.heads = heads + inner_dim = dim_head * heads + + self.norm1 = nn.LayerNorm(dim) + self.norm2 = nn.LayerNorm(dim) + + self.to_q = nn.Linear(dim, inner_dim, bias=False) + self.to_kv = nn.Linear(dim, inner_dim * 2, bias=False) + self.to_out = nn.Linear(inner_dim, dim, bias=False) + + + def forward(self, x, latents): + """ + Args: + x (torch.Tensor): image features + shape (b, n1, D) + latent (torch.Tensor): latent features + shape (b, n2, D) + """ + x = self.norm1(x) + latents = self.norm2(latents) + + b, l, _ = latents.shape + + q = self.to_q(latents) + kv_input = torch.cat((x, latents), dim=-2) + k, v = self.to_kv(kv_input).chunk(2, dim=-1) + + q = reshape_tensor(q, self.heads) + k = reshape_tensor(k, self.heads) + v = reshape_tensor(v, self.heads) + + # attention + scale = 1 / math.sqrt(math.sqrt(self.dim_head)) + weight = (q * scale) @ (k * scale).transpose(-2, -1) # More stable with f16 than dividing afterwards + weight = torch.softmax(weight.float(), dim=-1).type(weight.dtype) + out = weight @ v + + out = out.permute(0, 2, 1, 3).reshape(b, l, -1) + + return self.to_out(out) + + +class Resampler(nn.Module): + def __init__( + self, + dim=1024, + depth=8, + dim_head=64, + heads=16, + num_queries=8, + embedding_dim=768, + output_dim=1024, + ff_mult=4, + ): + super().__init__() + + self.latents = nn.Parameter(torch.randn(1, num_queries, dim) / dim**0.5) + + self.proj_in = nn.Linear(embedding_dim, dim) + + self.proj_out = nn.Linear(dim, output_dim) + self.norm_out = nn.LayerNorm(output_dim) + + self.layers = nn.ModuleList([]) + for _ in range(depth): + self.layers.append( + nn.ModuleList( + [ + PerceiverAttention(dim=dim, dim_head=dim_head, heads=heads), + FeedForward(dim=dim, mult=ff_mult), + ] + ) + ) + + def forward(self, x): + + latents = self.latents.repeat(x.size(0), 1, 1) + + x = self.proj_in(x) + + for attn, ff in self.layers: + latents = attn(x, latents) + latents + latents = ff(latents) + latents + + latents = self.proj_out(latents) + return self.norm_out(latents) \ No newline at end of file diff --git a/ComfyUI_InstantID/utils.py b/ComfyUI_InstantID/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..94da4a62f65d86dd87a4c335221097f0ec0a502e --- /dev/null +++ b/ComfyUI_InstantID/utils.py @@ -0,0 +1,24 @@ +import torch + +def tensor_to_size(source, dest_size): + if isinstance(dest_size, torch.Tensor): + dest_size = dest_size.shape[0] + source_size = source.shape[0] + + if source_size < dest_size: + shape = [dest_size - source_size] + [1]*(source.dim()-1) + source = torch.cat((source, source[-1:].repeat(shape)), dim=0) + elif source_size > dest_size: + source = source[:dest_size] + + return source + +def tensor_to_image(tensor): + image = tensor.mul(255).clamp(0, 255).byte().cpu() + image = image[..., [2, 1, 0]].numpy() + return image + +def image_to_tensor(image): + tensor = torch.clamp(torch.from_numpy(image).float() / 255., 0, 1) + tensor = tensor[..., [2, 1, 0]] + return tensor diff --git a/ComfyUI_Noise/LICENSE b/ComfyUI_Noise/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..f288702d2fa16d3cdf0035b15a9fcbc552cd88e7 --- /dev/null +++ b/ComfyUI_Noise/LICENSE @@ -0,0 +1,674 @@ + GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU General Public License is a free, copyleft license for +software and other kinds of works. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +the GNU General Public License is intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU Affero General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If the program does terminal interaction, make it output a short +notice like this when it starts in an interactive mode: + + Copyright (C) + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, your program's commands +might be different; for a GUI interface, you would use an "about box". + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU GPL, see +. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +. diff --git a/ComfyUI_Noise/README.md b/ComfyUI_Noise/README.md new file mode 100644 index 0000000000000000000000000000000000000000..7402929546b9cc995e7346f96f5ee5724b5ab9e6 --- /dev/null +++ b/ComfyUI_Noise/README.md @@ -0,0 +1,88 @@ +# ComfyUI Noise + +This repo contains 6 nodes for [ComfyUI](https://github.com/comfyanonymous/ComfyUI) that allows for more control and flexibility over the noise. This allows e.g. for workflows with small variations to generations or finding the accompanying noise to some input image and prompt. + +## Nodes + +### Noisy Latent Image: +This node lets you generate noise, you can find this node under `latent>noise` and it the following settings: +- **source**: where to generate the noise, currently supports GPU and CPU. +- **seed**: the noise seed. +- **width**: image width. +- **height**: image height. +- **batch_size**: batch size. + +### Duplicate Batch Index: +The functionality of this node has been moved to core, please use: `Latent>Batch>Repeat Latent Batch` and `Latent>Batch>Latent From Batch` instead. + +This node lets you duplicate a certain sample in the batch, this can be used to duplicate e.g. encoded images but also noise generated from the node listed above. You can find this node under `latent` and it has the following settings: +- **latents**: the latents. +- **batch_index**: which sample in the latents to duplicate. +- **batch_size**: the new batch size, (i.e. how many times to duplicate the sample). + +### Slerp Latents: +This node lets you mix two latents together. Both of the input latents must share the same dimensions or the node will ignore the mix factor and instead output the top slot. When it comes to other things attached to the latents such as e.g. masks, only those of the top slot are passed on. You can find this node under `latent` and it comes with the following inputs: +- **latents1**: first batch of latents. +- **latents2**: second batch of latents. This input is optional. +- **mask**: determines where in the latents to slerp. This input is optional +- **factor**: how much of the second batch of latents should be slerped into the first. + +### Get Sigma: +This node can be used to calculate the amount of noise a sampler expects when it starts denoising. You can find this node under `latent>noise` and it comes with the following inputs and settings: +- **model**: The model for which to calculate the sigma. +- **sampler_name**: the name of the sampler for which to calculate the sigma. +- **scheduler**: the type of schedule used in the sampler +- **steps**: the total number of steps in the schedule +- **start_at_step**: the start step of the sampler, i.e. how much noise it expects in the input image +- **end_at_step**: the current end step of the previous sampler, i.e. how much noise already is in the image. + +Most of the time you'd simply want to keep `start_at_step` at zero, and `end_at_step` at `steps`, but if you'd want to re-inject some noise in between two samplers, e.g. one sampler that denoises from 0 to 15, and a second that denoises from 10 to 20, you'd want to use a `start_at_step` 10 and an `end_at_step` of 15. So that the image we get, which is at step 15, can be noised back down to step 10, so the second sampler can bring it to 20. Take note that the Advanced Ksampler has a settings for `add_noise` and `return_with_leftover_noise` which when working with these nodes we both want to have disabled. + +### Inject Noise: +This node lets you actually inject the noise into an image latent, you can find this node under `latent>noise` and it comes with the following inputs: +- **latents**: The latents to inject the noise into. +- **noise**: The noise. This input is optional +- **mask**: determines where to inject noise. This input is optional +- **strength**: The strength of the noise. Note that we can use the node above to calculate for us an appropriate strength value. + +### Unsampler: +This node does the reverse of a sampler. It calculates the noise that would generate the image given the model and the prompt. You can find this node under `sampling` and it takes the following inputs and settings: +- **model**: The model to target. +- **steps**: number of steps to noise. +- **end_step**: to what step to travel back to. +- **cfg**: classifier free guidance scale. +- **sampler_name**: The name of the sampling technique to use. +- **scheduler**: The type of schedule to use. +- **normalize**: whether to normalize the noise before output. Useful when passing it on to an Inject Noise node which expects normalizes noise. +- **positive**: Positive prompt. +- **negative**: Negative prompt. +- **latent_image**: The image to renoise. + +When trying to reconstruct the target image as faithful as possible this works best if both the unsampler and sampler use a cfg scale close to 1.0 and similar number of steps. But it is fun and worth it to play around with these settings to get a better intuition of the results. This node let's you do similar things the A1111 [img2img alternative](https://github.com/AUTOMATIC1111/stable-diffusion-webui/wiki/Features#img2img-alternative-test) script does + +## Examples + +here are some examples that show how to use the nodes above. Workflows to these examples can be found in the `example_workflow` folder. + +
    + +generating variations + + +![screenshot of a workflow that demos generating small variations to a given seed](https://github.com/BlenderNeko/ComfyUI_noise/blob/master/examples/example_variation.png) + +To create small variations to a given generation we can do the following: We generate the noise of the seed that we're interested using a `Noisy Latent Image` node, we then create an entire batch of these with a `Duplicate Batch Index` node. Note that if we were doing this for img2img we can use this same node to duplicate the image latents. Next we generate some more noise, but this time we generate a batch of noise rather than a single sample. We then Slerp this newly created noise into the other one with a `Slerp Latents` node. To figure out the required strength for injecting this noise we use a `Get Sigma` node. And finally we inject the slerped noise into a batch of empty latents with a `Inject Noise` node. Take note that we use an advanced Ksampler with the `add_noise` setting disabled + +
    + +
    + +"unsampling" + + +![screenshot of a workflow that demos generating small variations to a given seed](https://github.com/BlenderNeko/ComfyUI_noise/blob/master/examples/example_unsample.png) + +To get the noise that recreates a certain image, we first load an image. Then we use the `Unsampler` node with a low cfg value. To check if this is working we then take the resulting noise and feed it back into an advanced ksampler with the `add_noise` setting disabled, and a cfg of 1.0. + +
    + diff --git a/ComfyUI_Noise/__init__.py b/ComfyUI_Noise/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..d721463be66961a2f388b3a756760d167ea5d510 --- /dev/null +++ b/ComfyUI_Noise/__init__.py @@ -0,0 +1,3 @@ +from .nodes import NODE_CLASS_MAPPINGS, NODE_DISPLAY_NAME_MAPPINGS + +__all__ = ['NODE_CLASS_MAPPINGS', 'NODE_DISPLAY_NAME_MAPPINGS'] \ No newline at end of file diff --git a/ComfyUI_Noise/example_workflows/unsample_example.json b/ComfyUI_Noise/example_workflows/unsample_example.json new file mode 100644 index 0000000000000000000000000000000000000000..86ebae968a66c3450636d45465de50d9a628e6ce --- /dev/null +++ b/ComfyUI_Noise/example_workflows/unsample_example.json @@ -0,0 +1,698 @@ +{ + "last_node_id": 27, + "last_link_id": 66, + "nodes": [ + { + "id": 23, + "type": "Reroute", + "pos": [ + 228, + 840 + ], + "size": [ + 75, + 26 + ], + "flags": {}, + "order": 5, + "mode": 0, + "inputs": [ + { + "name": "", + "type": "*", + "link": 50 + } + ], + "outputs": [ + { + "name": "", + "type": "VAE", + "links": [ + 51, + 52 + ], + "slot_index": 0 + } + ], + "properties": { + "showOutputText": false, + "horizontal": false + } + }, + { + "id": 24, + "type": "Reroute", + "pos": [ + 400, + 740 + ], + "size": [ + 75, + 26 + ], + "flags": {}, + "order": 2, + "mode": 0, + "inputs": [ + { + "name": "", + "type": "*", + "link": 53 + } + ], + "outputs": [ + { + "name": "", + "type": "MODEL", + "links": [ + 54 + ], + "slot_index": 0 + } + ], + "properties": { + "showOutputText": false, + "horizontal": false + } + }, + { + "id": 8, + "type": "VAEDecode", + "pos": [ + 970, + 640 + ], + "size": { + "0": 210, + "1": 46 + }, + "flags": {}, + "order": 11, + "mode": 0, + "inputs": [ + { + "name": "samples", + "type": "LATENT", + "link": 44 + }, + { + "name": "vae", + "type": "VAE", + "link": 52 + } + ], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 9 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEDecode" + } + }, + { + "id": 9, + "type": "SaveImage", + "pos": [ + 1280, + 681 + ], + "size": { + "0": 367.50909423828125, + "1": 383.8414306640625 + }, + "flags": {}, + "order": 12, + "mode": 0, + "inputs": [ + { + "name": "images", + "type": "IMAGE", + "link": 9 + } + ], + "properties": {}, + "widgets_values": [ + "ComfyUI" + ] + }, + { + "id": 7, + "type": "CLIPTextEncode", + "pos": [ + -64, + 642 + ], + "size": { + "0": 425.27801513671875, + "1": 180.6060791015625 + }, + "flags": {}, + "order": 4, + "mode": 0, + "inputs": [ + { + "name": "clip", + "type": "CLIP", + "link": 5 + } + ], + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 56 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "text, watermark" + ] + }, + { + "id": 6, + "type": "CLIPTextEncode", + "pos": [ + -68, + 432 + ], + "size": { + "0": 422.84503173828125, + "1": 164.31304931640625 + }, + "flags": {}, + "order": 3, + "mode": 0, + "inputs": [ + { + "name": "clip", + "type": "CLIP", + "link": 3 + } + ], + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 59 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "beautiful scenery nature glass bottle landscape, , purple galaxy bottle," + ] + }, + { + "id": 19, + "type": "LoadImage", + "pos": [ + -124, + 906 + ], + "size": { + "0": 434.40911865234375, + "1": 440.44140625 + }, + "flags": {}, + "order": 0, + "mode": 0, + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 34 + ], + "slot_index": 0 + }, + { + "name": "MASK", + "type": "MASK", + "links": null + } + ], + "properties": { + "Node name for S&R": "LoadImage" + }, + "widgets_values": [ + "example.png", + "image" + ] + }, + { + "id": 12, + "type": "KSamplerAdvanced", + "pos": [ + 950, + 740 + ], + "size": { + "0": 315, + "1": 334 + }, + "flags": {}, + "order": 10, + "mode": 0, + "inputs": [ + { + "name": "model", + "type": "MODEL", + "link": 54 + }, + { + "name": "positive", + "type": "CONDITIONING", + "link": 61 + }, + { + "name": "negative", + "type": "CONDITIONING", + "link": 58 + }, + { + "name": "latent_image", + "type": "LATENT", + "link": 66 + } + ], + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 44 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "KSamplerAdvanced" + }, + "widgets_values": [ + "disable", + 0, + "fixed", + 25, + 1, + "dpmpp_2m", + "karras", + 0, + 25, + "disable" + ] + }, + { + "id": 26, + "type": "Reroute", + "pos": [ + 450, + 670 + ], + "size": [ + 75, + 26 + ], + "flags": {}, + "order": 6, + "mode": 0, + "inputs": [ + { + "name": "", + "type": "*", + "link": 59 + } + ], + "outputs": [ + { + "name": "", + "type": "CONDITIONING", + "links": [ + 61, + 62 + ], + "slot_index": 0 + } + ], + "properties": { + "showOutputText": false, + "horizontal": false + } + }, + { + "id": 25, + "type": "Reroute", + "pos": [ + 430, + 700 + ], + "size": [ + 75, + 26 + ], + "flags": {}, + "order": 7, + "mode": 0, + "inputs": [ + { + "name": "", + "type": "*", + "link": 56 + } + ], + "outputs": [ + { + "name": "", + "type": "CONDITIONING", + "links": [ + 58, + 63 + ], + "slot_index": 0 + } + ], + "properties": { + "showOutputText": false, + "horizontal": false + } + }, + { + "id": 20, + "type": "VAEEncode", + "pos": [ + 354, + 894 + ], + "size": { + "0": 210, + "1": 46 + }, + "flags": {}, + "order": 8, + "mode": 0, + "inputs": [ + { + "name": "pixels", + "type": "IMAGE", + "link": 34 + }, + { + "name": "vae", + "type": "VAE", + "link": 51 + } + ], + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 64 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEEncode" + } + }, + { + "id": 4, + "type": "CheckpointLoaderSimple", + "pos": [ + -635, + 661 + ], + "size": { + "0": 315, + "1": 98 + }, + "flags": {}, + "order": 1, + "mode": 0, + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 53, + 65 + ], + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 3, + 5 + ], + "slot_index": 1 + }, + { + "name": "VAE", + "type": "VAE", + "links": [ + 50 + ], + "slot_index": 2 + } + ], + "properties": { + "Node name for S&R": "CheckpointLoaderSimple" + }, + "widgets_values": [ + "v1-5-pruned-emaonly.safetensors" + ] + }, + { + "id": 27, + "type": "BNK_Unsampler", + "pos": [ + 608, + 857 + ], + "size": { + "0": 315, + "1": 214 + }, + "flags": {}, + "order": 9, + "mode": 0, + "inputs": [ + { + "name": "model", + "type": "MODEL", + "link": 65 + }, + { + "name": "positive", + "type": "CONDITIONING", + "link": 62 + }, + { + "name": "negative", + "type": "CONDITIONING", + "link": 63 + }, + { + "name": "latent_image", + "type": "LATENT", + "link": 64 + } + ], + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 66 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "BNK_Unsampler" + }, + "widgets_values": [ + 25, + 0, + 1, + "dpmpp_2m", + "karras" + ] + } + ], + "links": [ + [ + 3, + 4, + 1, + 6, + 0, + "CLIP" + ], + [ + 5, + 4, + 1, + 7, + 0, + "CLIP" + ], + [ + 9, + 8, + 0, + 9, + 0, + "IMAGE" + ], + [ + 34, + 19, + 0, + 20, + 0, + "IMAGE" + ], + [ + 44, + 12, + 0, + 8, + 0, + "LATENT" + ], + [ + 50, + 4, + 2, + 23, + 0, + "*" + ], + [ + 51, + 23, + 0, + 20, + 1, + "VAE" + ], + [ + 52, + 23, + 0, + 8, + 1, + "VAE" + ], + [ + 53, + 4, + 0, + 24, + 0, + "*" + ], + [ + 54, + 24, + 0, + 12, + 0, + "MODEL" + ], + [ + 56, + 7, + 0, + 25, + 0, + "*" + ], + [ + 58, + 25, + 0, + 12, + 2, + "CONDITIONING" + ], + [ + 59, + 6, + 0, + 26, + 0, + "*" + ], + [ + 61, + 26, + 0, + 12, + 1, + "CONDITIONING" + ], + [ + 62, + 26, + 0, + 27, + 1, + "CONDITIONING" + ], + [ + 63, + 25, + 0, + 27, + 2, + "CONDITIONING" + ], + [ + 64, + 20, + 0, + 27, + 3, + "LATENT" + ], + [ + 65, + 4, + 0, + 27, + 0, + "MODEL" + ], + [ + 66, + 27, + 0, + 12, + 3, + "LATENT" + ] + ], + "groups": [], + "config": {}, + "extra": {}, + "version": 0.4 +} \ No newline at end of file diff --git a/ComfyUI_Noise/example_workflows/variations_example.json b/ComfyUI_Noise/example_workflows/variations_example.json new file mode 100644 index 0000000000000000000000000000000000000000..a9a75e41d34ecaeeb3a2d7f19f1c117a9ff4103d --- /dev/null +++ b/ComfyUI_Noise/example_workflows/variations_example.json @@ -0,0 +1,868 @@ +{ + "last_node_id": 39, + "last_link_id": 84, + "nodes": [ + { + "id": 26, + "type": "Reroute", + "pos": [ + 450, + 670 + ], + "size": [ + 75, + 26 + ], + "flags": {}, + "order": 10, + "mode": 0, + "inputs": [ + { + "name": "", + "type": "*", + "link": 59 + } + ], + "outputs": [ + { + "name": "", + "type": "CONDITIONING", + "links": [ + 61 + ], + "slot_index": 0 + } + ], + "properties": { + "showOutputText": false, + "horizontal": false + } + }, + { + "id": 25, + "type": "Reroute", + "pos": [ + 430, + 700 + ], + "size": [ + 75, + 26 + ], + "flags": {}, + "order": 11, + "mode": 0, + "inputs": [ + { + "name": "", + "type": "*", + "link": 56 + } + ], + "outputs": [ + { + "name": "", + "type": "CONDITIONING", + "links": [ + 58 + ], + "slot_index": 0 + } + ], + "properties": { + "showOutputText": false, + "horizontal": false + } + }, + { + "id": 24, + "type": "Reroute", + "pos": [ + 400, + 740 + ], + "size": [ + 75, + 26 + ], + "flags": {}, + "order": 4, + "mode": 0, + "inputs": [ + { + "name": "", + "type": "*", + "link": 53 + } + ], + "outputs": [ + { + "name": "", + "type": "MODEL", + "links": [ + 54 + ], + "slot_index": 0 + } + ], + "properties": { + "showOutputText": false, + "horizontal": false + } + }, + { + "id": 7, + "type": "CLIPTextEncode", + "pos": [ + -64, + 642 + ], + "size": { + "0": 425.27801513671875, + "1": 180.6060791015625 + }, + "flags": {}, + "order": 7, + "mode": 0, + "inputs": [ + { + "name": "clip", + "type": "CLIP", + "link": 5 + } + ], + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 56 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "text, watermark" + ] + }, + { + "id": 6, + "type": "CLIPTextEncode", + "pos": [ + -68, + 432 + ], + "size": { + "0": 422.84503173828125, + "1": 164.31304931640625 + }, + "flags": {}, + "order": 6, + "mode": 0, + "inputs": [ + { + "name": "clip", + "type": "CLIP", + "link": 3 + } + ], + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 59 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "beautiful scenery nature glass bottle landscape, , purple galaxy bottle," + ] + }, + { + "id": 12, + "type": "KSamplerAdvanced", + "pos": [ + 835, + 887 + ], + "size": { + "0": 315, + "1": 334 + }, + "flags": {}, + "order": 14, + "mode": 0, + "inputs": [ + { + "name": "model", + "type": "MODEL", + "link": 54 + }, + { + "name": "positive", + "type": "CONDITIONING", + "link": 61 + }, + { + "name": "negative", + "type": "CONDITIONING", + "link": 58 + }, + { + "name": "latent_image", + "type": "LATENT", + "link": 84 + } + ], + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 44 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "KSamplerAdvanced" + }, + "widgets_values": [ + "disable", + 0, + "fixed", + 25, + 8, + "dpmpp_2m", + "karras", + 0, + 25, + "disable" + ] + }, + { + "id": 23, + "type": "Reroute", + "pos": [ + -230, + 1632 + ], + "size": [ + 75, + 26 + ], + "flags": {}, + "order": 8, + "mode": 0, + "inputs": [ + { + "name": "", + "type": "*", + "link": 50 + } + ], + "outputs": [ + { + "name": "", + "type": "VAE", + "links": [ + 52 + ], + "slot_index": 0 + } + ], + "properties": { + "showOutputText": false, + "horizontal": false + } + }, + { + "id": 8, + "type": "VAEDecode", + "pos": [ + 1183, + 1133 + ], + "size": { + "0": 210, + "1": 46 + }, + "flags": {}, + "order": 15, + "mode": 0, + "inputs": [ + { + "name": "samples", + "type": "LATENT", + "link": 44 + }, + { + "name": "vae", + "type": "VAE", + "link": 52 + } + ], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 9 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEDecode" + } + }, + { + "id": 9, + "type": "SaveImage", + "pos": [ + 771, + 1259 + ], + "size": { + "0": 494.55535888671875, + "1": 524.3897705078125 + }, + "flags": {}, + "order": 16, + "mode": 0, + "inputs": [ + { + "name": "images", + "type": "IMAGE", + "link": 9 + } + ], + "properties": {}, + "widgets_values": [ + "ComfyUI" + ] + }, + { + "id": 4, + "type": "CheckpointLoaderSimple", + "pos": [ + -635, + 661 + ], + "size": { + "0": 315, + "1": 98 + }, + "flags": {}, + "order": 0, + "mode": 0, + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 53, + 74 + ], + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 3, + 5 + ], + "slot_index": 1 + }, + { + "name": "VAE", + "type": "VAE", + "links": [ + 50 + ], + "slot_index": 2 + } + ], + "properties": { + "Node name for S&R": "CheckpointLoaderSimple" + }, + "widgets_values": [ + "v1-5-pruned-emaonly.safetensors" + ] + }, + { + "id": 34, + "type": "BNK_NoisyLatentImage", + "pos": [ + -216, + 980 + ], + "size": { + "0": 315, + "1": 178 + }, + "flags": {}, + "order": 1, + "mode": 0, + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 75 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "BNK_NoisyLatentImage" + }, + "widgets_values": [ + "CPU", + 0, + "fixed", + 512, + 512, + 1 + ] + }, + { + "id": 35, + "type": "BNK_NoisyLatentImage", + "pos": [ + -217, + 1197 + ], + "size": { + "0": 315, + "1": 178 + }, + "flags": {}, + "order": 2, + "mode": 0, + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 77 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "BNK_NoisyLatentImage" + }, + "widgets_values": [ + "CPU", + 1, + "fixed", + 512, + 512, + 4 + ] + }, + { + "id": 37, + "type": "BNK_DuplicateBatchIndex", + "pos": [ + 134, + 1012 + ], + "size": { + "0": 315, + "1": 82 + }, + "flags": {}, + "order": 9, + "mode": 0, + "inputs": [ + { + "name": "latents", + "type": "LATENT", + "link": 75 + } + ], + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 76 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "BNK_DuplicateBatchIndex" + }, + "widgets_values": [ + 0, + 4 + ] + }, + { + "id": 38, + "type": "BNK_SlerpLatent", + "pos": [ + 137, + 1144 + ], + "size": { + "0": 315, + "1": 98 + }, + "flags": {}, + "order": 12, + "mode": 0, + "inputs": [ + { + "name": "latents1", + "type": "LATENT", + "link": 76 + }, + { + "name": "latents2", + "type": "LATENT", + "link": 77 + }, + { + "name": "mask", + "type": "MASK", + "link": null + } + ], + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 81 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "BNK_SlerpLatent" + }, + "widgets_values": [ + 0.05 + ] + }, + { + "id": 39, + "type": "BNK_InjectNoise", + "pos": [ + 476, + 1131 + ], + "size": [ + 315, + 98 + ], + "flags": {}, + "order": 13, + "mode": 0, + "inputs": [ + { + "name": "latents", + "type": "LATENT", + "link": 82 + }, + { + "name": "noise", + "type": "LATENT", + "link": 81 + }, + { + "name": "mask", + "type": "MASK", + "link": null + }, + { + "name": "strength", + "type": "FLOAT", + "link": 80, + "widget": { + "name": "strength", + "config": [ + "FLOAT", + { + "default": 1, + "min": 0, + "max": 20, + "step": 0.01 + } + ] + } + } + ], + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 84 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "BNK_InjectNoise" + }, + "widgets_values": [ + 1 + ] + }, + { + "id": 33, + "type": "EmptyLatentImage", + "pos": [ + 474, + 985 + ], + "size": { + "0": 315, + "1": 106 + }, + "flags": {}, + "order": 3, + "mode": 0, + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 82 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "EmptyLatentImage" + }, + "widgets_values": [ + 512, + 512, + 4 + ] + }, + { + "id": 36, + "type": "BNK_GetSigma", + "pos": [ + -221, + 1420 + ], + "size": { + "0": 315, + "1": 154 + }, + "flags": {}, + "order": 5, + "mode": 0, + "inputs": [ + { + "name": "model", + "type": "MODEL", + "link": 74 + } + ], + "outputs": [ + { + "name": "FLOAT", + "type": "FLOAT", + "links": [ + 80 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "BNK_GetSigma" + }, + "widgets_values": [ + "dpmpp_2m", + "karras", + 25, + 0, + 25 + ] + } + ], + "links": [ + [ + 3, + 4, + 1, + 6, + 0, + "CLIP" + ], + [ + 5, + 4, + 1, + 7, + 0, + "CLIP" + ], + [ + 9, + 8, + 0, + 9, + 0, + "IMAGE" + ], + [ + 44, + 12, + 0, + 8, + 0, + "LATENT" + ], + [ + 50, + 4, + 2, + 23, + 0, + "*" + ], + [ + 52, + 23, + 0, + 8, + 1, + "VAE" + ], + [ + 53, + 4, + 0, + 24, + 0, + "*" + ], + [ + 54, + 24, + 0, + 12, + 0, + "MODEL" + ], + [ + 56, + 7, + 0, + 25, + 0, + "*" + ], + [ + 58, + 25, + 0, + 12, + 2, + "CONDITIONING" + ], + [ + 59, + 6, + 0, + 26, + 0, + "*" + ], + [ + 61, + 26, + 0, + 12, + 1, + "CONDITIONING" + ], + [ + 74, + 4, + 0, + 36, + 0, + "MODEL" + ], + [ + 75, + 34, + 0, + 37, + 0, + "LATENT" + ], + [ + 76, + 37, + 0, + 38, + 0, + "LATENT" + ], + [ + 77, + 35, + 0, + 38, + 1, + "LATENT" + ], + [ + 80, + 36, + 0, + 39, + 3, + "FLOAT" + ], + [ + 81, + 38, + 0, + 39, + 1, + "LATENT" + ], + [ + 82, + 33, + 0, + 39, + 0, + "LATENT" + ], + [ + 84, + 39, + 0, + 12, + 3, + "LATENT" + ] + ], + "groups": [], + "config": {}, + "extra": {}, + "version": 0.4 +} \ No newline at end of file diff --git a/ComfyUI_Noise/nodes.py b/ComfyUI_Noise/nodes.py new file mode 100644 index 0000000000000000000000000000000000000000..318cc73c0384377326e38cba48064e7f0707a51d --- /dev/null +++ b/ComfyUI_Noise/nodes.py @@ -0,0 +1,266 @@ +import torch + +import os +import sys + +sys.path.insert(0, os.path.join(os.path.dirname(os.path.realpath(__file__)), "comfy")) + +import comfy.model_management +import comfy.sample +import comfy.sampler_helpers + +MAX_RESOLUTION=8192 + +def prepare_mask(mask, shape): + mask = torch.nn.functional.interpolate(mask.reshape((-1, 1, mask.shape[-2], mask.shape[-1])), size=(shape[2], shape[3]), mode="bilinear") + mask = mask.expand((-1,shape[1],-1,-1)) + if mask.shape[0] < shape[0]: + mask = mask.repeat((shape[0] -1) // mask.shape[0] + 1, 1, 1, 1)[:shape[0]] + return mask + +class NoisyLatentImage: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "source":(["CPU", "GPU"], ), + "seed": ("INT", {"default": 0, "min": 0, "max": 0xffffffffffffffff}), + "width": ("INT", {"default": 512, "min": 64, "max": MAX_RESOLUTION, "step": 8}), + "height": ("INT", {"default": 512, "min": 64, "max": MAX_RESOLUTION, "step": 8}), + "batch_size": ("INT", {"default": 1, "min": 1, "max": 64}), + }} + RETURN_TYPES = ("LATENT",) + FUNCTION = "create_noisy_latents" + + CATEGORY = "latent/noise" + + def create_noisy_latents(self, source, seed, width, height, batch_size): + torch.manual_seed(seed) + if source == "CPU": + device = "cpu" + else: + device = comfy.model_management.get_torch_device() + noise = torch.randn((batch_size, 4, height // 8, width // 8), dtype=torch.float32, device=device).cpu() + return ({"samples":noise}, ) + +class DuplicateBatchIndex: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "latents":("LATENT",), + "batch_index": ("INT", {"default": 0, "min": 0, "max": 63}), + "batch_size": ("INT", {"default": 1, "min": 1, "max": 64}), + }} + + RETURN_TYPES = ("LATENT",) + FUNCTION = "duplicate_index" + + CATEGORY = "latent" + + def duplicate_index(self, latents, batch_index, batch_size): + s = latents.copy() + batch_index = min(s["samples"].shape[0] - 1, batch_index) + target = s["samples"][batch_index:batch_index + 1].clone() + target = target.repeat((batch_size,1,1,1)) + s["samples"] = target + return (s,) + +# from https://discuss.pytorch.org/t/help-regarding-slerp-function-for-generative-model-sampling/32475 +def slerp(val, low, high): + dims = low.shape + + #flatten to batches + low = low.reshape(dims[0], -1) + high = high.reshape(dims[0], -1) + + low_norm = low/torch.norm(low, dim=1, keepdim=True) + high_norm = high/torch.norm(high, dim=1, keepdim=True) + + # in case we divide by zero + low_norm[low_norm != low_norm] = 0.0 + high_norm[high_norm != high_norm] = 0.0 + + omega = torch.acos((low_norm*high_norm).sum(1)) + so = torch.sin(omega) + res = (torch.sin((1.0-val)*omega)/so).unsqueeze(1)*low + (torch.sin(val*omega)/so).unsqueeze(1) * high + return res.reshape(dims) + +class LatentSlerp: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "latents1":("LATENT",), + "factor": ("FLOAT", {"default": .5, "min": 0.0, "max": 1.0, "step": 0.01}), + }, + "optional" :{ + "latents2":("LATENT",), + "mask": ("MASK", ), + }} + + RETURN_TYPES = ("LATENT",) + FUNCTION = "slerp_latents" + + CATEGORY = "latent" + + def slerp_latents(self, latents1, factor, latents2=None, mask=None): + s = latents1.copy() + if latents2 is None: + return (s,) + if latents1["samples"].shape != latents2["samples"].shape: + print("warning, shapes in LatentSlerp not the same, ignoring") + return (s,) + slerped = slerp(factor, latents1["samples"].clone(), latents2["samples"].clone()) + if mask is not None: + mask = prepare_mask(mask, slerped.shape) + slerped = mask * slerped + (1-mask) * latents1["samples"] + s["samples"] = slerped + return (s,) + +class GetSigma: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "model": ("MODEL",), + "sampler_name": (comfy.samplers.KSampler.SAMPLERS, ), + "scheduler": (comfy.samplers.KSampler.SCHEDULERS, ), + "steps": ("INT", {"default": 10000, "min": 0, "max": 10000}), + "start_at_step": ("INT", {"default": 0, "min": 0, "max": 10000}), + "end_at_step": ("INT", {"default": 10000, "min": 1, "max": 10000}), + }} + + RETURN_TYPES = ("FLOAT",) + FUNCTION = "calc_sigma" + + CATEGORY = "latent/noise" + + def calc_sigma(self, model, sampler_name, scheduler, steps, start_at_step, end_at_step): + device = comfy.model_management.get_torch_device() + end_at_step = min(steps, end_at_step) + start_at_step = min(start_at_step, end_at_step) + comfy.model_management.load_model_gpu(model) + sampler = comfy.samplers.KSampler(model, steps=steps, device=device, sampler=sampler_name, scheduler=scheduler, denoise=1.0, model_options=model.model_options) + sigmas = sampler.sigmas + sigma = sigmas[start_at_step] - sigmas[end_at_step] + sigma /= model.model.latent_format.scale_factor + return (sigma.cpu().numpy(),) + +class InjectNoise: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "latents":("LATENT",), + + "strength": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 200.0, "step": 0.01}), + }, + "optional":{ + "noise": ("LATENT",), + "mask": ("MASK", ), + }} + + RETURN_TYPES = ("LATENT",) + FUNCTION = "inject_noise" + + CATEGORY = "latent/noise" + + def inject_noise(self, latents, strength, noise=None, mask=None): + s = latents.copy() + if noise is None: + return (s,) + if latents["samples"].shape != noise["samples"].shape: + print("warning, shapes in InjectNoise not the same, ignoring") + return (s,) + noised = s["samples"].clone() + noise["samples"].clone() * strength + if mask is not None: + mask = prepare_mask(mask, noised.shape) + noised = mask * noised + (1-mask) * latents["samples"] + s["samples"] = noised + return (s,) + +class Unsampler: + @classmethod + def INPUT_TYPES(s): + return {"required": + {"model": ("MODEL",), + "steps": ("INT", {"default": 20, "min": 1, "max": 10000}), + "end_at_step": ("INT", {"default": 0, "min": 0, "max": 10000}), + "cfg": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 100.0}), + "sampler_name": (comfy.samplers.KSampler.SAMPLERS, ), + "scheduler": (comfy.samplers.KSampler.SCHEDULERS, ), + "normalize": (["disable", "enable"], ), + "positive": ("CONDITIONING", ), + "negative": ("CONDITIONING", ), + "latent_image": ("LATENT", ), + }} + + RETURN_TYPES = ("LATENT",) + FUNCTION = "unsampler" + + CATEGORY = "sampling" + + def unsampler(self, model, cfg, sampler_name, steps, end_at_step, scheduler, normalize, positive, negative, latent_image): + normalize = normalize == "enable" + device = comfy.model_management.get_torch_device() + latent = latent_image + latent_image = latent["samples"] + + end_at_step = min(end_at_step, steps-1) + end_at_step = steps - end_at_step + + noise = torch.zeros(latent_image.size(), dtype=latent_image.dtype, layout=latent_image.layout, device="cpu") + noise_mask = None + if "noise_mask" in latent: + noise_mask = comfy.sampler_helpers.prepare_mask(latent["noise_mask"], noise.shape, device) + + noise = noise.to(device) + latent_image = latent_image.to(device) + + conds0 = \ + {"positive": comfy.sampler_helpers.convert_cond(positive), + "negative": comfy.sampler_helpers.convert_cond(negative)} + + conds = {} + for k in conds0: + conds[k] = list(map(lambda a: a.copy(), conds0[k])) + + models, inference_memory = comfy.sampler_helpers.get_additional_models(conds, model.model_dtype()) + + comfy.model_management.load_models_gpu([model] + models, model.memory_required(noise.shape) + inference_memory) + + sampler = comfy.samplers.KSampler(model, steps=steps, device=device, sampler=sampler_name, scheduler=scheduler, denoise=1.0, model_options=model.model_options) + + sigmas = sampler.sigmas.flip(0) + 0.0001 + + pbar = comfy.utils.ProgressBar(steps) + def callback(step, x0, x, total_steps): + pbar.update_absolute(step + 1, total_steps) + + samples = sampler.sample(noise, positive, negative, cfg=cfg, latent_image=latent_image, force_full_denoise=False, denoise_mask=noise_mask, sigmas=sigmas, start_step=0, last_step=end_at_step, callback=callback) + if normalize: + #technically doesn't normalize because unsampling is not guaranteed to end at a std given by the schedule + samples -= samples.mean() + samples /= samples.std() + samples = samples.cpu() + + comfy.sampler_helpers.cleanup_additional_models(models) + + out = latent.copy() + out["samples"] = samples + return (out, ) + +NODE_CLASS_MAPPINGS = { + "BNK_NoisyLatentImage": NoisyLatentImage, + #"BNK_DuplicateBatchIndex": DuplicateBatchIndex, + "BNK_SlerpLatent": LatentSlerp, + "BNK_GetSigma": GetSigma, + "BNK_InjectNoise": InjectNoise, + "BNK_Unsampler": Unsampler, +} + +NODE_DISPLAY_NAME_MAPPINGS = { + "BNK_NoisyLatentImage": "Noisy Latent Image", + #"BNK_DuplicateBatchIndex": "Duplicate Batch Index", + "BNK_SlerpLatent": "Slerp Latents", + "BNK_GetSigma": "Get Sigma", + "BNK_InjectNoise": "Inject Noise", + "BNK_Unsampler": "Unsampler", +} diff --git a/ComfyUI_UltimateSDUpscale/LICENSE b/ComfyUI_UltimateSDUpscale/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..e62ec04cdeece724caeeeeaeb6ae1f6af1bb6b9a --- /dev/null +++ b/ComfyUI_UltimateSDUpscale/LICENSE @@ -0,0 +1,674 @@ +GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU General Public License is a free, copyleft license for +software and other kinds of works. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +the GNU General Public License is intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU Affero General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If the program does terminal interaction, make it output a short +notice like this when it starts in an interactive mode: + + Copyright (C) + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, your program's commands +might be different; for a GUI interface, you would use an "about box". + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU GPL, see +. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +. diff --git a/ComfyUI_UltimateSDUpscale/README.md b/ComfyUI_UltimateSDUpscale/README.md new file mode 100644 index 0000000000000000000000000000000000000000..bf9b379ff0c6d8c648e45ebcdf01200964b646cb --- /dev/null +++ b/ComfyUI_UltimateSDUpscale/README.md @@ -0,0 +1,34 @@ +# ComfyUI_UltimateSDUpscale + + [ComfyUI](https://github.com/comfyanonymous/ComfyUI) nodes for the [Ultimate Stable Diffusion Upscale script by Coyote-A](https://github.com/Coyote-A/ultimate-upscale-for-automatic1111). This is a wrapper for the script used in the A1111 extension. + +## Installation + +Enter the following command from the commandline starting in ComfyUI/custom_nodes/ +``` +git clone https://github.com/ssitu/ComfyUI_UltimateSDUpscale --recursive +``` + +## Usage + +Nodes can be found in the node menu under `image/upscaling`: + +|Node|Description| +| --- | --- | +| Ultimate SD Upscale | The primary node that has the most of the inputs as the original extension script. | +| Ultimate SD Upscale
    (No Upscale) | Same as the primary node, but without the upscale inputs and assumes that the input image is already upscaled. Use this if you already have an upscaled image or just want to do the tiled sampling. | + +--- + +Details about most of the parameters can be found [here](https://github.com/Coyote-A/ultimate-upscale-for-automatic1111/wiki/FAQ#parameters-descriptions). + +Parameters not found in the original repository: + +* `upscale_by` The number to multiply the width and height of the image by. If you want to specify an exact width and height, use the "No Upscale" version of the node and perform the upscaling separately (e.g., ImageUpscaleWithModel -> ImageScale -> UltimateSDUpscaleNoUpscale). +* `force_uniform_tiles` If enabled, tiles that would be cut off by the edges of the image will expand the tile using the rest of the image to keep the same tile size determined by `tile_width` and `tile_height`, which is what the A1111 Web UI does. If disabled, the minimal size for tiles will be used, which may make the sampling faster but may cause artifacts due to irregular tile sizes. + +## Examples + +#### Using the ControlNet tile model: + +![image](https://github.com/ssitu/ComfyUI_UltimateSDUpscale/assets/57548627/64f8d3b2-10ae-45ee-9f8a-40b798a51655) diff --git a/ComfyUI_UltimateSDUpscale/__init__.py b/ComfyUI_UltimateSDUpscale/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..fd194e4dfc380ae62c3fef3ddfe14a5cbc8ccf8f --- /dev/null +++ b/ComfyUI_UltimateSDUpscale/__init__.py @@ -0,0 +1,48 @@ +import sys +import os + +# Remove other custom_node paths from sys.path to avoid conflicts +custom_node_paths = [path for path in sys.path if "custom_node" in path] +original_sys_path = sys.path.copy() +for path in custom_node_paths: + sys.path.remove(path) + +# Add this repository's path to sys.path for third-party imports +repo_dir = os.path.dirname(os.path.realpath(__file__)) +sys.path.insert(0, repo_dir) +original_modules = sys.modules.copy() + +# Place aside potentially conflicting modules +modules_used = [ + "modules", + "modules.devices", + "modules.images", + "modules.processing", + "modules.scripts", + "modules.shared", + "modules.upscaler", + "utils", +] +original_imported_modules = {} +for module in modules_used: + if module in sys.modules: + original_imported_modules[module] = sys.modules.pop(module) + +# Proceed with node setup +from .nodes import NODE_CLASS_MAPPINGS, NODE_DISPLAY_NAME_MAPPINGS +__all__ = ["NODE_CLASS_MAPPINGS", "NODE_DISPLAY_NAME_MAPPINGS"] + +# Clean up imports +# Remove any new modules +modules_to_remove = [] +for module in sys.modules: + if module not in original_modules: + modules_to_remove.append(module) +for module in modules_to_remove: + del sys.modules[module] + +# Restore original modules +sys.modules.update(original_imported_modules) + +# Restore original sys.path +sys.path = original_sys_path diff --git a/ComfyUI_UltimateSDUpscale/gradio.py b/ComfyUI_UltimateSDUpscale/gradio.py new file mode 100644 index 0000000000000000000000000000000000000000..0baca4418b105ad30b5f0084d2bbd72b51d19d20 --- /dev/null +++ b/ComfyUI_UltimateSDUpscale/gradio.py @@ -0,0 +1 @@ +# Empty gradio module for the ultimate-upscale.py import because gradio is not needed \ No newline at end of file diff --git a/ComfyUI_UltimateSDUpscale/modules/__init__.py b/ComfyUI_UltimateSDUpscale/modules/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/ComfyUI_UltimateSDUpscale/modules/devices.py b/ComfyUI_UltimateSDUpscale/modules/devices.py new file mode 100644 index 0000000000000000000000000000000000000000..3e37b88d60d4043b032d895bcd9dae5251fd73c2 --- /dev/null +++ b/ComfyUI_UltimateSDUpscale/modules/devices.py @@ -0,0 +1,2 @@ +def torch_gc(): + pass diff --git a/ComfyUI_UltimateSDUpscale/modules/images.py b/ComfyUI_UltimateSDUpscale/modules/images.py new file mode 100644 index 0000000000000000000000000000000000000000..502c819a5cd5d7dad43061020f44be7fd01430e9 --- /dev/null +++ b/ComfyUI_UltimateSDUpscale/modules/images.py @@ -0,0 +1,8 @@ +from PIL import Image + + +def flatten(img, bgcolor): + # Replace transparency with bgcolor + if img.mode in ("RGB"): + return img + return Image.alpha_composite(Image.new("RGBA", img.size, bgcolor), img).convert("RGB") diff --git a/ComfyUI_UltimateSDUpscale/modules/processing.py b/ComfyUI_UltimateSDUpscale/modules/processing.py new file mode 100644 index 0000000000000000000000000000000000000000..f001f0e63c4a472fdb8774240391bfdf8e17936b --- /dev/null +++ b/ComfyUI_UltimateSDUpscale/modules/processing.py @@ -0,0 +1,163 @@ +from PIL import Image, ImageFilter +import torch +import math +from nodes import common_ksampler, VAEEncode, VAEDecode, VAEDecodeTiled +from utils import pil_to_tensor, tensor_to_pil, get_crop_region, expand_crop, crop_cond +from modules import shared + +if (not hasattr(Image, 'Resampling')): # For older versions of Pillow + Image.Resampling = Image + + +class StableDiffusionProcessing: + + def __init__(self, init_img, model, positive, negative, vae, seed, steps, cfg, sampler_name, scheduler, denoise, upscale_by, uniform_tile_mode, tiled_decode): + # Variables used by the USDU script + self.init_images = [init_img] + self.image_mask = None + self.mask_blur = 0 + self.inpaint_full_res_padding = 0 + self.width = init_img.width + self.height = init_img.height + + # ComfyUI Sampler inputs + self.model = model + self.positive = positive + self.negative = negative + self.vae = vae + self.seed = seed + self.steps = steps + self.cfg = cfg + self.sampler_name = sampler_name + self.scheduler = scheduler + self.denoise = denoise + + # Variables used only by this script + self.init_size = init_img.width, init_img.height + self.upscale_by = upscale_by + self.uniform_tile_mode = uniform_tile_mode + self.tiled_decode = tiled_decode + self.vae_decoder = VAEDecode() + self.vae_encoder = VAEEncode() + self.vae_decoder_tiled = VAEDecodeTiled() + + # Other required A1111 variables for the USDU script that is currently unused in this script + self.extra_generation_params = {} + + +class Processed: + + def __init__(self, p: StableDiffusionProcessing, images: list, seed: int, info: str): + self.images = images + self.seed = seed + self.info = info + + def infotext(self, p: StableDiffusionProcessing, index): + return None + + +def fix_seed(p: StableDiffusionProcessing): + pass + + +def process_images(p: StableDiffusionProcessing) -> Processed: + # Where the main image generation happens in A1111 + + # Setup + image_mask = p.image_mask.convert('L') + init_image = p.init_images[0] + + # Locate the white region of the mask outlining the tile and add padding + crop_region = get_crop_region(image_mask, p.inpaint_full_res_padding) + + if p.uniform_tile_mode: + # Expand the crop region to match the processing size ratio and then resize it to the processing size + x1, y1, x2, y2 = crop_region + crop_width = x2 - x1 + crop_height = y2 - y1 + crop_ratio = crop_width / crop_height + p_ratio = p.width / p.height + if crop_ratio > p_ratio: + target_width = crop_width + target_height = round(crop_width / p_ratio) + else: + target_width = round(crop_height * p_ratio) + target_height = crop_height + crop_region, _ = expand_crop(crop_region, image_mask.width, image_mask.height, target_width, target_height) + tile_size = p.width, p.height + else: + # Uses the minimal size that can fit the mask, minimizes tile size but may lead to image sizes that the model is not trained on + x1, y1, x2, y2 = crop_region + crop_width = x2 - x1 + crop_height = y2 - y1 + target_width = math.ceil(crop_width / 8) * 8 + target_height = math.ceil(crop_height / 8) * 8 + crop_region, tile_size = expand_crop(crop_region, image_mask.width, + image_mask.height, target_width, target_height) + + # Blur the mask + if p.mask_blur > 0: + image_mask = image_mask.filter(ImageFilter.GaussianBlur(p.mask_blur)) + + # Crop the images to get the tiles that will be used for generation + tiles = [img.crop(crop_region) for img in shared.batch] + + # Assume the same size for all images in the batch + initial_tile_size = tiles[0].size + + # Resize if necessary + for i, tile in enumerate(tiles): + if tile.size != tile_size: + tiles[i] = tile.resize(tile_size, Image.Resampling.LANCZOS) + + # Crop conditioning + positive_cropped = crop_cond(p.positive, crop_region, p.init_size, init_image.size, tile_size) + negative_cropped = crop_cond(p.negative, crop_region, p.init_size, init_image.size, tile_size) + + # Encode the image + batched_tiles = torch.cat([pil_to_tensor(tile) for tile in tiles], dim=0) + (latent,) = p.vae_encoder.encode(p.vae, batched_tiles) + + # Generate samples + (samples,) = common_ksampler(p.model, p.seed, p.steps, p.cfg, p.sampler_name, + p.scheduler, positive_cropped, negative_cropped, latent, denoise=p.denoise) + + # Decode the sample + if not p.tiled_decode: + (decoded,) = p.vae_decoder.decode(p.vae, samples) + else: + print("[USDU] Using tiled decode") + (decoded,) = p.vae_decoder_tiled.decode(p.vae, samples, 512) # Default tile size is 512 + + # Convert the sample to a PIL image + tiles_sampled = [tensor_to_pil(decoded, i) for i in range(len(decoded))] + + for i, tile_sampled in enumerate(tiles_sampled): + init_image = shared.batch[i] + + # Resize back to the original size + if tile_sampled.size != initial_tile_size: + tile_sampled = tile_sampled.resize(initial_tile_size, Image.Resampling.LANCZOS) + + + # Put the tile into position + image_tile_only = Image.new('RGBA', init_image.size) + image_tile_only.paste(tile_sampled, crop_region[:2]) + + # Add the mask as an alpha channel + # Must make a copy due to the possibility of an edge becoming black + temp = image_tile_only.copy() + temp.putalpha(image_mask) + image_tile_only.paste(temp, image_tile_only) + + # Add back the tile to the initial image according to the mask in the alpha channel + result = init_image.convert('RGBA') + result.alpha_composite(image_tile_only) + + # Convert back to RGB + result = result.convert('RGB') + + shared.batch[i] = result + + processed = Processed(p, [shared.batch[0]], p.seed, None) + return processed diff --git a/ComfyUI_UltimateSDUpscale/modules/scripts.py b/ComfyUI_UltimateSDUpscale/modules/scripts.py new file mode 100644 index 0000000000000000000000000000000000000000..5cbd134fc07811ffaad9b9f05033603d32c29c52 --- /dev/null +++ b/ComfyUI_UltimateSDUpscale/modules/scripts.py @@ -0,0 +1,2 @@ +class Script: + pass diff --git a/ComfyUI_UltimateSDUpscale/modules/shared.py b/ComfyUI_UltimateSDUpscale/modules/shared.py new file mode 100644 index 0000000000000000000000000000000000000000..9d4bdd7217e8a66944c469d782de2474589801a9 --- /dev/null +++ b/ComfyUI_UltimateSDUpscale/modules/shared.py @@ -0,0 +1,24 @@ +class Options: + img2img_background_color = "#ffffff" # Set to white for now + + +class State: + interrupted = False + + def begin(self): + pass + + def end(self): + pass + + +opts = Options() +state = State() + +# Will only ever hold 1 upscaler +sd_upscalers = [None] +# The upscaler usable by ComfyUI nodes +actual_upscaler = None + +# Batch of images to upscale +batch = None diff --git a/ComfyUI_UltimateSDUpscale/modules/upscaler.py b/ComfyUI_UltimateSDUpscale/modules/upscaler.py new file mode 100644 index 0000000000000000000000000000000000000000..b05f547fbc9af775ec528c744b59fa872beb7fc5 --- /dev/null +++ b/ComfyUI_UltimateSDUpscale/modules/upscaler.py @@ -0,0 +1,30 @@ +from PIL import Image +from utils import tensor_to_pil, pil_to_tensor +from comfy_extras.nodes_upscale_model import ImageUpscaleWithModel +from modules import shared + +if (not hasattr(Image, 'Resampling')): # For older versions of Pillow + Image.Resampling = Image + + +class Upscaler: + + def _upscale(self, img: Image, scale): + if (shared.actual_upscaler is None): + return img.resize((img.width * scale, img.height * scale), Image.Resampling.NEAREST) + tensor = pil_to_tensor(img) + image_upscale_node = ImageUpscaleWithModel() + (upscaled,) = image_upscale_node.upscale(shared.actual_upscaler, tensor) + return tensor_to_pil(upscaled) + + def upscale(self, img: Image, scale, selected_model: str = None): + shared.batch = [self._upscale(img, scale) for img in shared.batch] + return shared.batch[0] + + +class UpscalerData: + name = "" + data_path = "" + + def __init__(self): + self.scaler = Upscaler() diff --git a/ComfyUI_UltimateSDUpscale/nodes.py b/ComfyUI_UltimateSDUpscale/nodes.py new file mode 100644 index 0000000000000000000000000000000000000000..9fbbb9304db87c19941066cc5d83e4291ee33315 --- /dev/null +++ b/ComfyUI_UltimateSDUpscale/nodes.py @@ -0,0 +1,210 @@ +# ComfyUI Node for Ultimate SD Upscale by Coyote-A: https://github.com/Coyote-A/ultimate-upscale-for-automatic1111 + +import logging +import torch +import comfy +from usdu_patch import usdu +from utils import tensor_to_pil, pil_to_tensor +from modules.processing import StableDiffusionProcessing +import modules.shared as shared +from modules.upscaler import UpscalerData + +MAX_RESOLUTION = 8192 +# The modes available for Ultimate SD Upscale +MODES = { + "Linear": usdu.USDUMode.LINEAR, + "Chess": usdu.USDUMode.CHESS, + "None": usdu.USDUMode.NONE, +} +# The seam fix modes +SEAM_FIX_MODES = { + "None": usdu.USDUSFMode.NONE, + "Band Pass": usdu.USDUSFMode.BAND_PASS, + "Half Tile": usdu.USDUSFMode.HALF_TILE, + "Half Tile + Intersections": usdu.USDUSFMode.HALF_TILE_PLUS_INTERSECTIONS, +} + + +def USDU_base_inputs(): + return [ + ("image", ("IMAGE",)), + # Sampling Params + ("model", ("MODEL",)), + ("positive", ("CONDITIONING",)), + ("negative", ("CONDITIONING",)), + ("vae", ("VAE",)), + ("upscale_by", ("FLOAT", {"default": 2, "min": 0.05, "max": 4, "step": 0.05})), + ("seed", ("INT", {"default": 0, "min": 0, "max": 0xffffffffffffffff})), + ("steps", ("INT", {"default": 20, "min": 1, "max": 10000, "step": 1})), + ("cfg", ("FLOAT", {"default": 8.0, "min": 0.0, "max": 100.0})), + ("sampler_name", (comfy.samplers.KSampler.SAMPLERS,)), + ("scheduler", (comfy.samplers.KSampler.SCHEDULERS,)), + ("denoise", ("FLOAT", {"default": 0.2, "min": 0.0, "max": 1.0, "step": 0.01})), + # Upscale Params + ("upscale_model", ("UPSCALE_MODEL",)), + ("mode_type", (list(MODES.keys()),)), + ("tile_width", ("INT", {"default": 512, "min": 64, "max": MAX_RESOLUTION, "step": 8})), + ("tile_height", ("INT", {"default": 512, "min": 64, "max": MAX_RESOLUTION, "step": 8})), + ("mask_blur", ("INT", {"default": 8, "min": 0, "max": 64, "step": 1})), + ("tile_padding", ("INT", {"default": 32, "min": 0, "max": MAX_RESOLUTION, "step": 8})), + # Seam fix params + ("seam_fix_mode", (list(SEAM_FIX_MODES.keys()),)), + ("seam_fix_denoise", ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01})), + ("seam_fix_width", ("INT", {"default": 64, "min": 0, "max": MAX_RESOLUTION, "step": 8})), + ("seam_fix_mask_blur", ("INT", {"default": 8, "min": 0, "max": 64, "step": 1})), + ("seam_fix_padding", ("INT", {"default": 16, "min": 0, "max": MAX_RESOLUTION, "step": 8})), + # Misc + ("force_uniform_tiles", ("BOOLEAN", {"default": True})), + ("tiled_decode", ("BOOLEAN", {"default": False})), + ] + + +def prepare_inputs(required: list, optional: list = None): + inputs = {} + if required: + inputs["required"] = {} + for name, type in required: + inputs["required"][name] = type + if optional: + inputs["optional"] = {} + for name, type in optional: + inputs["optional"][name] = type + return inputs + + +def remove_input(inputs: list, input_name: str): + for i, (n, _) in enumerate(inputs): + if n == input_name: + del inputs[i] + break + + +def rename_input(inputs: list, old_name: str, new_name: str): + for i, (n, t) in enumerate(inputs): + if n == old_name: + inputs[i] = (new_name, t) + break + + +class UltimateSDUpscale: + @classmethod + def INPUT_TYPES(s): + return prepare_inputs(USDU_base_inputs()) + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "upscale" + CATEGORY = "image/upscaling" + + def upscale(self, image, model, positive, negative, vae, upscale_by, seed, + steps, cfg, sampler_name, scheduler, denoise, upscale_model, + mode_type, tile_width, tile_height, mask_blur, tile_padding, + seam_fix_mode, seam_fix_denoise, seam_fix_mask_blur, + seam_fix_width, seam_fix_padding, force_uniform_tiles, tiled_decode): + # + # Set up A1111 patches + # + + # Upscaler + # An object that the script works with + shared.sd_upscalers[0] = UpscalerData() + # Where the actual upscaler is stored, will be used when the script upscales using the Upscaler in UpscalerData + shared.actual_upscaler = upscale_model + + # Set the batch of images + shared.batch = [tensor_to_pil(image, i) for i in range(len(image))] + + # Processing + sdprocessing = StableDiffusionProcessing( + tensor_to_pil(image), model, positive, negative, vae, + seed, steps, cfg, sampler_name, scheduler, denoise, upscale_by, force_uniform_tiles, tiled_decode + ) + + # Disable logging + logger = logging.getLogger() + old_level = logger.getEffectiveLevel() + logger.setLevel(logging.CRITICAL + 1) + try: + # + # Running the script + # + script = usdu.Script() + processed = script.run(p=sdprocessing, _=None, tile_width=tile_width, tile_height=tile_height, + mask_blur=mask_blur, padding=tile_padding, seams_fix_width=seam_fix_width, + seams_fix_denoise=seam_fix_denoise, seams_fix_padding=seam_fix_padding, + upscaler_index=0, save_upscaled_image=False, redraw_mode=MODES[mode_type], + save_seams_fix_image=False, seams_fix_mask_blur=seam_fix_mask_blur, + seams_fix_type=SEAM_FIX_MODES[seam_fix_mode], target_size_type=2, + custom_width=None, custom_height=None, custom_scale=upscale_by) + + # Return the resulting images + images = [pil_to_tensor(img) for img in shared.batch] + tensor = torch.cat(images, dim=0) + return (tensor,) + finally: + # Restore the original logging level + print("Restoring logging level to", old_level) + logger.setLevel(old_level) + + +class UltimateSDUpscaleNoUpscale: + @classmethod + def INPUT_TYPES(s): + required = USDU_base_inputs() + remove_input(required, "upscale_model") + remove_input(required, "upscale_by") + rename_input(required, "image", "upscaled_image") + return prepare_inputs(required) + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "upscale" + CATEGORY = "image/upscaling" + + def upscale(self, upscaled_image, model, positive, negative, vae, seed, + steps, cfg, sampler_name, scheduler, denoise, + mode_type, tile_width, tile_height, mask_blur, tile_padding, + seam_fix_mode, seam_fix_denoise, seam_fix_mask_blur, + seam_fix_width, seam_fix_padding, force_uniform_tiles, tiled_decode): + + shared.sd_upscalers[0] = UpscalerData() + shared.actual_upscaler = None + shared.batch = [tensor_to_pil(upscaled_image, i) for i in range(len(upscaled_image))] + sdprocessing = StableDiffusionProcessing( + tensor_to_pil(upscaled_image), model, positive, negative, vae, + seed, steps, cfg, sampler_name, scheduler, denoise, 1, force_uniform_tiles, tiled_decode + ) + + # Disable logging + logger = logging.getLogger() + old_level = logger.getEffectiveLevel() + logger.setLevel(logging.CRITICAL + 1) + try: + script = usdu.Script() + processed = script.run(p=sdprocessing, _=None, tile_width=tile_width, tile_height=tile_height, + mask_blur=mask_blur, padding=tile_padding, seams_fix_width=seam_fix_width, + seams_fix_denoise=seam_fix_denoise, seams_fix_padding=seam_fix_padding, + upscaler_index=0, save_upscaled_image=False, redraw_mode=MODES[mode_type], + save_seams_fix_image=False, seams_fix_mask_blur=seam_fix_mask_blur, + seams_fix_type=SEAM_FIX_MODES[seam_fix_mode], target_size_type=2, + custom_width=None, custom_height=None, custom_scale=1) + + images = [pil_to_tensor(img) for img in shared.batch] + tensor = torch.cat(images, dim=0) + return (tensor,) + finally: + # Restore the original logging level + print("Restoring logging level to", old_level) + logger.setLevel(old_level) + + +# A dictionary that contains all nodes you want to export with their names +# NOTE: names should be globally unique +NODE_CLASS_MAPPINGS = { + "UltimateSDUpscale": UltimateSDUpscale, + "UltimateSDUpscaleNoUpscale": UltimateSDUpscaleNoUpscale +} + +# A dictionary that contains the friendly/humanly readable titles for the nodes +NODE_DISPLAY_NAME_MAPPINGS = { + "UltimateSDUpscale": "Ultimate SD Upscale", + "UltimateSDUpscaleNoUpscale": "Ultimate SD Upscale (No Upscale)" +} diff --git a/ComfyUI_UltimateSDUpscale/repositories/__init__.py b/ComfyUI_UltimateSDUpscale/repositories/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..a30c56508421465568ab2697a638bafb3e9326ca --- /dev/null +++ b/ComfyUI_UltimateSDUpscale/repositories/__init__.py @@ -0,0 +1,14 @@ +import os +import sys +import importlib.util + +repositories_path = os.path.dirname(os.path.realpath(__file__)) + +# Import the script +script_name = os.path.join("scripts", "ultimate-upscale") +repo_name = "ultimate_sd_upscale" +script_path = os.path.join(repositories_path, repo_name, f"{script_name}.py") +spec = importlib.util.spec_from_file_location(script_name, script_path) +ultimate_upscale = importlib.util.module_from_spec(spec) +sys.modules[script_name] = ultimate_upscale +spec.loader.exec_module(ultimate_upscale) diff --git a/ComfyUI_UltimateSDUpscale/repositories/ultimate_sd_upscale/LICENSE b/ComfyUI_UltimateSDUpscale/repositories/ultimate_sd_upscale/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..ebfe3f5212b6396c75ee993947fe1ebdd6a91207 --- /dev/null +++ b/ComfyUI_UltimateSDUpscale/repositories/ultimate_sd_upscale/LICENSE @@ -0,0 +1,674 @@ + GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU General Public License is a free, copyleft license for +software and other kinds of works. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +the GNU General Public License is intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU Affero General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + ultimate-upscale-for-automatic1111 + Copyright (C) 2023 Mirzam + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If the program does terminal interaction, make it output a short +notice like this when it starts in an interactive mode: + + Copyright (C) 2023 Mirzam + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, your program's commands +might be different; for a GUI interface, you would use an "about box". + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU GPL, see +. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +. diff --git a/ComfyUI_UltimateSDUpscale/repositories/ultimate_sd_upscale/README.md b/ComfyUI_UltimateSDUpscale/repositories/ultimate_sd_upscale/README.md new file mode 100644 index 0000000000000000000000000000000000000000..3c73378ccd894a59b46f905368b9f284b2c69654 --- /dev/null +++ b/ComfyUI_UltimateSDUpscale/repositories/ultimate_sd_upscale/README.md @@ -0,0 +1,119 @@ +# Ultimate SD Upscale extension for [AUTOMATIC1111 Stable Diffusion web UI](https://github.com/AUTOMATIC1111/stable-diffusion-webui) +Now you have the opportunity to use a large denoise (0.3-0.5) and not spawn many artifacts. Works on any video card, since you can use a 512x512 tile size and the image will converge. + +News channel: https://t.me/usdunews + +# Instructions +All instructions can be found on the project's [wiki](https://github.com/Coyote-A/ultimate-upscale-for-automatic1111/wiki). + +# Refs + +https://github.com/ssitu/ComfyUI_UltimateSDUpscale - Implementation for ComfyUI + +# Examples +More on [wiki page](https://github.com/Coyote-A/ultimate-upscale-for-automatic1111/wiki/Examples) + +
    + E1 + Original image + + ![Original](https://i.imgur.com/J8mRYOD.png) + + 2k upscaled. **Tile size**: 512, **Padding**: 32, **Mask blur**: 16, **Denoise**: 0.4 + ![2k upscale](https://i.imgur.com/0aKua4r.png) +
    + +
    + E2 + Original image + + ![Original](https://i.imgur.com/aALNI2w.png) + + 2k upscaled. **Tile size**: 768, **Padding**: 55, **Mask blur**: 20, **Denoise**: 0.35 + ![2k upscale](https://i.imgur.com/B5PHz0J.png) + + 4k upscaled. **Tile size**: 768, **Padding**: 55, **Mask blur**: 20, **Denoise**: 0.35 + ![4k upscale](https://i.imgur.com/tIUQ7TJ.jpg) +
    + +
    + E3 + Original image + + ![Original](https://i.imgur.com/AGtszA8.png) + + 4k upscaled. **Tile size**: 768, **Padding**: 55, **Mask blur**: 20, **Denoise**: 0.4 + ![4k upscale](https://i.imgur.com/LCYLfCs.jpg) +
    + +# API Usage + +```javascript +{ +"script_name" : "ultimate sd upscale", +"script_args" : [ + null, // _ (not used) + 512, // tile_width + 512, // tile_height + 8, // mask_blur + 32, // padding + 64, // seams_fix_width + 0.35, // seams_fix_denoise + 32, // seams_fix_padding + 0, // upscaler_index + true, // save_upscaled_image a.k.a Upscaled + 0, // redraw_mode + false, // save_seams_fix_image a.k.a Seams fix + 8, // seams_fix_mask_blur + 0, // seams_fix_type + 0, // target_size_type + 2048, // custom_width + 2048, // custom_height + 2 // custom_scale +] +} +``` +upscaler_index +| Value | | +|:-------------:| -----:| +| 0 | None | +| 1 | Lanczos | +| 2 | Nearest | +| 3 | ESRGAN_4x | +| 4 | LDSR | +| 5 | R-ESRGAN_4x+ | +| 6 | R-ESRGAN 4x+ Anime6B | +| 7 | ScuNET GAN | +| 8 | ScuNET PSNR | +| 9 | SwinIR 4x | + +redraw_mode +| Value | | +|:-------------:| -----:| +| 0 | Linear | +| 1 | Chess | +| 2 | None | + +seams_fix_mask_blur +| Value | | +|:-------------:| -----:| +| 0 | None | +| 1 | BAND_PASS | +| 2 | HALF_TILE | +| 3 | HALF_TILE_PLUS_INTERSECTIONS | + +seams_fix_type +| Value | | +|:-------------:| -----:| +| 0 | None | +| 1 | Band pass | +| 2 | Half tile offset pass | +| 3 | Half tile offset pass + intersections | + +seams_fix_type +| Value | | +|:-------------:| -----:| +| 0 | From img2img2 settings | +| 1 | Custom size | +| 2 | Scale from image size | + diff --git a/ComfyUI_UltimateSDUpscale/repositories/ultimate_sd_upscale/scripts/ultimate-upscale.py b/ComfyUI_UltimateSDUpscale/repositories/ultimate_sd_upscale/scripts/ultimate-upscale.py new file mode 100644 index 0000000000000000000000000000000000000000..fe69546534a02e383c5736a590e46971cc583499 --- /dev/null +++ b/ComfyUI_UltimateSDUpscale/repositories/ultimate_sd_upscale/scripts/ultimate-upscale.py @@ -0,0 +1,569 @@ +import math +import gradio as gr +from PIL import Image, ImageDraw, ImageOps +from modules import processing, shared, images, devices, scripts +from modules.processing import StableDiffusionProcessing +from modules.processing import Processed +from modules.shared import opts, state +from enum import Enum + +elem_id_prefix = "ultimateupscale" + +class USDUMode(Enum): + LINEAR = 0 + CHESS = 1 + NONE = 2 + +class USDUSFMode(Enum): + NONE = 0 + BAND_PASS = 1 + HALF_TILE = 2 + HALF_TILE_PLUS_INTERSECTIONS = 3 + +class USDUpscaler(): + + def __init__(self, p, image, upscaler_index:int, save_redraw, save_seams_fix, tile_width, tile_height) -> None: + self.p:StableDiffusionProcessing = p + self.image:Image = image + self.scale_factor = math.ceil(max(p.width, p.height) / max(image.width, image.height)) + self.upscaler = shared.sd_upscalers[upscaler_index] + self.redraw = USDURedraw() + self.redraw.save = save_redraw + self.redraw.tile_width = tile_width if tile_width > 0 else tile_height + self.redraw.tile_height = tile_height if tile_height > 0 else tile_width + self.seams_fix = USDUSeamsFix() + self.seams_fix.save = save_seams_fix + self.seams_fix.tile_width = tile_width if tile_width > 0 else tile_height + self.seams_fix.tile_height = tile_height if tile_height > 0 else tile_width + self.initial_info = None + self.rows = math.ceil(self.p.height / self.redraw.tile_height) + self.cols = math.ceil(self.p.width / self.redraw.tile_width) + + def get_factor(self, num): + # Its just return, don't need elif + if num == 1: + return 2 + if num % 4 == 0: + return 4 + if num % 3 == 0: + return 3 + if num % 2 == 0: + return 2 + return 0 + + def get_factors(self): + scales = [] + current_scale = 1 + current_scale_factor = self.get_factor(self.scale_factor) + while current_scale_factor == 0: + self.scale_factor += 1 + current_scale_factor = self.get_factor(self.scale_factor) + while current_scale < self.scale_factor: + current_scale_factor = self.get_factor(self.scale_factor // current_scale) + scales.append(current_scale_factor) + current_scale = current_scale * current_scale_factor + if current_scale_factor == 0: + break + self.scales = enumerate(scales) + + def upscale(self): + # Log info + print(f"Canva size: {self.p.width}x{self.p.height}") + print(f"Image size: {self.image.width}x{self.image.height}") + print(f"Scale factor: {self.scale_factor}") + # Check upscaler is not empty + if self.upscaler.name == "None": + self.image = self.image.resize((self.p.width, self.p.height), resample=Image.LANCZOS) + return + # Get list with scale factors + self.get_factors() + # Upscaling image over all factors + for index, value in self.scales: + print(f"Upscaling iteration {index+1} with scale factor {value}") + self.image = self.upscaler.scaler.upscale(self.image, value, self.upscaler.data_path) + # Resize image to set values + self.image = self.image.resize((self.p.width, self.p.height), resample=Image.LANCZOS) + + def setup_redraw(self, redraw_mode, padding, mask_blur): + self.redraw.mode = USDUMode(redraw_mode) + self.redraw.enabled = self.redraw.mode != USDUMode.NONE + self.redraw.padding = padding + self.p.mask_blur = mask_blur + + def setup_seams_fix(self, padding, denoise, mask_blur, width, mode): + self.seams_fix.padding = padding + self.seams_fix.denoise = denoise + self.seams_fix.mask_blur = mask_blur + self.seams_fix.width = width + self.seams_fix.mode = USDUSFMode(mode) + self.seams_fix.enabled = self.seams_fix.mode != USDUSFMode.NONE + + def save_image(self): + if type(self.p.prompt) != list: + images.save_image(self.image, self.p.outpath_samples, "", self.p.seed, self.p.prompt, opts.samples_format, info=self.initial_info, p=self.p) + else: + images.save_image(self.image, self.p.outpath_samples, "", self.p.seed, self.p.prompt[0], opts.samples_format, info=self.initial_info, p=self.p) + + def calc_jobs_count(self): + redraw_job_count = (self.rows * self.cols) if self.redraw.enabled else 0 + seams_job_count = 0 + if self.seams_fix.mode == USDUSFMode.BAND_PASS: + seams_job_count = self.rows + self.cols - 2 + elif self.seams_fix.mode == USDUSFMode.HALF_TILE: + seams_job_count = self.rows * (self.cols - 1) + (self.rows - 1) * self.cols + elif self.seams_fix.mode == USDUSFMode.HALF_TILE_PLUS_INTERSECTIONS: + seams_job_count = self.rows * (self.cols - 1) + (self.rows - 1) * self.cols + (self.rows - 1) * (self.cols - 1) + + state.job_count = redraw_job_count + seams_job_count + + def print_info(self): + print(f"Tile size: {self.redraw.tile_width}x{self.redraw.tile_height}") + print(f"Tiles amount: {self.rows * self.cols}") + print(f"Grid: {self.rows}x{self.cols}") + print(f"Redraw enabled: {self.redraw.enabled}") + print(f"Seams fix mode: {self.seams_fix.mode.name}") + + def add_extra_info(self): + self.p.extra_generation_params["Ultimate SD upscale upscaler"] = self.upscaler.name + self.p.extra_generation_params["Ultimate SD upscale tile_width"] = self.redraw.tile_width + self.p.extra_generation_params["Ultimate SD upscale tile_height"] = self.redraw.tile_height + self.p.extra_generation_params["Ultimate SD upscale mask_blur"] = self.p.mask_blur + self.p.extra_generation_params["Ultimate SD upscale padding"] = self.redraw.padding + + def process(self): + state.begin() + self.calc_jobs_count() + self.result_images = [] + if self.redraw.enabled: + self.image = self.redraw.start(self.p, self.image, self.rows, self.cols) + self.initial_info = self.redraw.initial_info + self.result_images.append(self.image) + if self.redraw.save: + self.save_image() + + if self.seams_fix.enabled: + self.image = self.seams_fix.start(self.p, self.image, self.rows, self.cols) + self.initial_info = self.seams_fix.initial_info + self.result_images.append(self.image) + if self.seams_fix.save: + self.save_image() + state.end() + +class USDURedraw(): + + def init_draw(self, p, width, height): + p.inpaint_full_res = True + p.inpaint_full_res_padding = self.padding + p.width = math.ceil((self.tile_width+self.padding) / 64) * 64 + p.height = math.ceil((self.tile_height+self.padding) / 64) * 64 + mask = Image.new("L", (width, height), "black") + draw = ImageDraw.Draw(mask) + return mask, draw + + def calc_rectangle(self, xi, yi): + x1 = xi * self.tile_width + y1 = yi * self.tile_height + x2 = xi * self.tile_width + self.tile_width + y2 = yi * self.tile_height + self.tile_height + + return x1, y1, x2, y2 + + def linear_process(self, p, image, rows, cols): + mask, draw = self.init_draw(p, image.width, image.height) + for yi in range(rows): + for xi in range(cols): + if state.interrupted: + break + draw.rectangle(self.calc_rectangle(xi, yi), fill="white") + p.init_images = [image] + p.image_mask = mask + processed = processing.process_images(p) + draw.rectangle(self.calc_rectangle(xi, yi), fill="black") + if (len(processed.images) > 0): + image = processed.images[0] + + p.width = image.width + p.height = image.height + self.initial_info = processed.infotext(p, 0) + + return image + + def chess_process(self, p, image, rows, cols): + mask, draw = self.init_draw(p, image.width, image.height) + tiles = [] + # calc tiles colors + for yi in range(rows): + for xi in range(cols): + if state.interrupted: + break + if xi == 0: + tiles.append([]) + color = xi % 2 == 0 + if yi > 0 and yi % 2 != 0: + color = not color + tiles[yi].append(color) + + for yi in range(len(tiles)): + for xi in range(len(tiles[yi])): + if state.interrupted: + break + if not tiles[yi][xi]: + tiles[yi][xi] = not tiles[yi][xi] + continue + tiles[yi][xi] = not tiles[yi][xi] + draw.rectangle(self.calc_rectangle(xi, yi), fill="white") + p.init_images = [image] + p.image_mask = mask + processed = processing.process_images(p) + draw.rectangle(self.calc_rectangle(xi, yi), fill="black") + if (len(processed.images) > 0): + image = processed.images[0] + + for yi in range(len(tiles)): + for xi in range(len(tiles[yi])): + if state.interrupted: + break + if not tiles[yi][xi]: + continue + draw.rectangle(self.calc_rectangle(xi, yi), fill="white") + p.init_images = [image] + p.image_mask = mask + processed = processing.process_images(p) + draw.rectangle(self.calc_rectangle(xi, yi), fill="black") + if (len(processed.images) > 0): + image = processed.images[0] + + p.width = image.width + p.height = image.height + self.initial_info = processed.infotext(p, 0) + + return image + + def start(self, p, image, rows, cols): + self.initial_info = None + if self.mode == USDUMode.LINEAR: + return self.linear_process(p, image, rows, cols) + if self.mode == USDUMode.CHESS: + return self.chess_process(p, image, rows, cols) + +class USDUSeamsFix(): + + def init_draw(self, p): + self.initial_info = None + p.width = math.ceil((self.tile_width+self.padding) / 64) * 64 + p.height = math.ceil((self.tile_height+self.padding) / 64) * 64 + + def half_tile_process(self, p, image, rows, cols): + + self.init_draw(p) + processed = None + + gradient = Image.linear_gradient("L") + row_gradient = Image.new("L", (self.tile_width, self.tile_height), "black") + row_gradient.paste(gradient.resize( + (self.tile_width, self.tile_height//2), resample=Image.BICUBIC), (0, 0)) + row_gradient.paste(gradient.rotate(180).resize( + (self.tile_width, self.tile_height//2), resample=Image.BICUBIC), + (0, self.tile_height//2)) + col_gradient = Image.new("L", (self.tile_width, self.tile_height), "black") + col_gradient.paste(gradient.rotate(90).resize( + (self.tile_width//2, self.tile_height), resample=Image.BICUBIC), (0, 0)) + col_gradient.paste(gradient.rotate(270).resize( + (self.tile_width//2, self.tile_height), resample=Image.BICUBIC), (self.tile_width//2, 0)) + + p.denoising_strength = self.denoise + p.mask_blur = self.mask_blur + + for yi in range(rows-1): + for xi in range(cols): + if state.interrupted: + break + p.width = self.tile_width + p.height = self.tile_height + p.inpaint_full_res = True + p.inpaint_full_res_padding = self.padding + mask = Image.new("L", (image.width, image.height), "black") + mask.paste(row_gradient, (xi*self.tile_width, yi*self.tile_height + self.tile_height//2)) + + p.init_images = [image] + p.image_mask = mask + processed = processing.process_images(p) + if (len(processed.images) > 0): + image = processed.images[0] + + for yi in range(rows): + for xi in range(cols-1): + if state.interrupted: + break + p.width = self.tile_width + p.height = self.tile_height + p.inpaint_full_res = True + p.inpaint_full_res_padding = self.padding + mask = Image.new("L", (image.width, image.height), "black") + mask.paste(col_gradient, (xi*self.tile_width+self.tile_width//2, yi*self.tile_height)) + + p.init_images = [image] + p.image_mask = mask + processed = processing.process_images(p) + if (len(processed.images) > 0): + image = processed.images[0] + + p.width = image.width + p.height = image.height + if processed is not None: + self.initial_info = processed.infotext(p, 0) + + return image + + def half_tile_process_corners(self, p, image, rows, cols): + fixed_image = self.half_tile_process(p, image, rows, cols) + processed = None + self.init_draw(p) + gradient = Image.radial_gradient("L").resize( + (self.tile_width, self.tile_height), resample=Image.BICUBIC) + gradient = ImageOps.invert(gradient) + p.denoising_strength = self.denoise + #p.mask_blur = 0 + p.mask_blur = self.mask_blur + + for yi in range(rows-1): + for xi in range(cols-1): + if state.interrupted: + break + p.width = self.tile_width + p.height = self.tile_height + p.inpaint_full_res = True + p.inpaint_full_res_padding = 0 + mask = Image.new("L", (fixed_image.width, fixed_image.height), "black") + mask.paste(gradient, (xi*self.tile_width + self.tile_width//2, + yi*self.tile_height + self.tile_height//2)) + + p.init_images = [fixed_image] + p.image_mask = mask + processed = processing.process_images(p) + if (len(processed.images) > 0): + fixed_image = processed.images[0] + + p.width = fixed_image.width + p.height = fixed_image.height + if processed is not None: + self.initial_info = processed.infotext(p, 0) + + return fixed_image + + def band_pass_process(self, p, image, cols, rows): + + self.init_draw(p) + processed = None + + p.denoising_strength = self.denoise + p.mask_blur = 0 + + gradient = Image.linear_gradient("L") + mirror_gradient = Image.new("L", (256, 256), "black") + mirror_gradient.paste(gradient.resize((256, 128), resample=Image.BICUBIC), (0, 0)) + mirror_gradient.paste(gradient.rotate(180).resize((256, 128), resample=Image.BICUBIC), (0, 128)) + + row_gradient = mirror_gradient.resize((image.width, self.width), resample=Image.BICUBIC) + col_gradient = mirror_gradient.rotate(90).resize((self.width, image.height), resample=Image.BICUBIC) + + for xi in range(1, rows): + if state.interrupted: + break + p.width = self.width + self.padding * 2 + p.height = image.height + p.inpaint_full_res = True + p.inpaint_full_res_padding = self.padding + mask = Image.new("L", (image.width, image.height), "black") + mask.paste(col_gradient, (xi * self.tile_width - self.width // 2, 0)) + + p.init_images = [image] + p.image_mask = mask + processed = processing.process_images(p) + if (len(processed.images) > 0): + image = processed.images[0] + for yi in range(1, cols): + if state.interrupted: + break + p.width = image.width + p.height = self.width + self.padding * 2 + p.inpaint_full_res = True + p.inpaint_full_res_padding = self.padding + mask = Image.new("L", (image.width, image.height), "black") + mask.paste(row_gradient, (0, yi * self.tile_height - self.width // 2)) + + p.init_images = [image] + p.image_mask = mask + processed = processing.process_images(p) + if (len(processed.images) > 0): + image = processed.images[0] + + p.width = image.width + p.height = image.height + if processed is not None: + self.initial_info = processed.infotext(p, 0) + + return image + + def start(self, p, image, rows, cols): + if USDUSFMode(self.mode) == USDUSFMode.BAND_PASS: + return self.band_pass_process(p, image, rows, cols) + elif USDUSFMode(self.mode) == USDUSFMode.HALF_TILE: + return self.half_tile_process(p, image, rows, cols) + elif USDUSFMode(self.mode) == USDUSFMode.HALF_TILE_PLUS_INTERSECTIONS: + return self.half_tile_process_corners(p, image, rows, cols) + else: + return image + +class Script(scripts.Script): + def title(self): + return "Ultimate SD upscale" + + def show(self, is_img2img): + return is_img2img + + def ui(self, is_img2img): + + target_size_types = [ + "From img2img2 settings", + "Custom size", + "Scale from image size" + ] + + seams_fix_types = [ + "None", + "Band pass", + "Half tile offset pass", + "Half tile offset pass + intersections" + ] + + redrow_modes = [ + "Linear", + "Chess", + "None" + ] + + info = gr.HTML( + "

    Will upscale the image depending on the selected target size type

    ") + + with gr.Row(): + target_size_type = gr.Dropdown(label="Target size type", elem_id=f"{elem_id_prefix}_target_size_type", choices=[k for k in target_size_types], type="index", + value=next(iter(target_size_types))) + + custom_width = gr.Slider(label='Custom width', elem_id=f"{elem_id_prefix}_custom_width", minimum=64, maximum=8192, step=64, value=2048, visible=False, interactive=True) + custom_height = gr.Slider(label='Custom height', elem_id=f"{elem_id_prefix}_custom_height", minimum=64, maximum=8192, step=64, value=2048, visible=False, interactive=True) + custom_scale = gr.Slider(label='Scale', elem_id=f"{elem_id_prefix}_custom_scale", minimum=1, maximum=16, step=0.01, value=2, visible=False, interactive=True) + + gr.HTML("

    Redraw options:

    ") + with gr.Row(): + upscaler_index = gr.Radio(label='Upscaler', elem_id=f"{elem_id_prefix}_upscaler_index", choices=[x.name for x in shared.sd_upscalers], + value=shared.sd_upscalers[0].name, type="index") + with gr.Row(): + redraw_mode = gr.Dropdown(label="Type", elem_id=f"{elem_id_prefix}_redraw_mode", choices=[k for k in redrow_modes], type="index", value=next(iter(redrow_modes))) + tile_width = gr.Slider(elem_id=f"{elem_id_prefix}_tile_width", minimum=0, maximum=2048, step=64, label='Tile width', value=512) + tile_height = gr.Slider(elem_id=f"{elem_id_prefix}_tile_height", minimum=0, maximum=2048, step=64, label='Tile height', value=0) + mask_blur = gr.Slider(elem_id=f"{elem_id_prefix}_mask_blur", label='Mask blur', minimum=0, maximum=64, step=1, value=8) + padding = gr.Slider(elem_id=f"{elem_id_prefix}_padding", label='Padding', minimum=0, maximum=512, step=1, value=32) + gr.HTML("

    Seams fix:

    ") + with gr.Row(): + seams_fix_type = gr.Dropdown(label="Type", elem_id=f"{elem_id_prefix}_seams_fix_type", choices=[k for k in seams_fix_types], type="index", value=next(iter(seams_fix_types))) + seams_fix_denoise = gr.Slider(label='Denoise', elem_id=f"{elem_id_prefix}_seams_fix_denoise", minimum=0, maximum=1, step=0.01, value=0.35, visible=False, interactive=True) + seams_fix_width = gr.Slider(label='Width', elem_id=f"{elem_id_prefix}_seams_fix_width", minimum=0, maximum=128, step=1, value=64, visible=False, interactive=True) + seams_fix_mask_blur = gr.Slider(label='Mask blur', elem_id=f"{elem_id_prefix}_seams_fix_mask_blur", minimum=0, maximum=64, step=1, value=4, visible=False, interactive=True) + seams_fix_padding = gr.Slider(label='Padding', elem_id=f"{elem_id_prefix}_seams_fix_padding", minimum=0, maximum=128, step=1, value=16, visible=False, interactive=True) + gr.HTML("

    Save options:

    ") + with gr.Row(): + save_upscaled_image = gr.Checkbox(label="Upscaled", elem_id=f"{elem_id_prefix}_save_upscaled_image", value=True) + save_seams_fix_image = gr.Checkbox(label="Seams fix", elem_id=f"{elem_id_prefix}_save_seams_fix_image", value=False) + + def select_fix_type(fix_index): + all_visible = fix_index != 0 + mask_blur_visible = fix_index == 2 or fix_index == 3 + width_visible = fix_index == 1 + + return [gr.update(visible=all_visible), + gr.update(visible=width_visible), + gr.update(visible=mask_blur_visible), + gr.update(visible=all_visible)] + + seams_fix_type.change( + fn=select_fix_type, + inputs=seams_fix_type, + outputs=[seams_fix_denoise, seams_fix_width, seams_fix_mask_blur, seams_fix_padding] + ) + + def select_scale_type(scale_index): + is_custom_size = scale_index == 1 + is_custom_scale = scale_index == 2 + + return [gr.update(visible=is_custom_size), + gr.update(visible=is_custom_size), + gr.update(visible=is_custom_scale), + ] + + target_size_type.change( + fn=select_scale_type, + inputs=target_size_type, + outputs=[custom_width, custom_height, custom_scale] + ) + + def init_field(scale_name): + try: + scale_index = target_size_types.index(scale_name) + custom_width.visible = custom_height.visible = scale_index == 1 + custom_scale.visible = scale_index == 2 + except: + pass + + target_size_type.init_field = init_field + + return [info, tile_width, tile_height, mask_blur, padding, seams_fix_width, seams_fix_denoise, seams_fix_padding, + upscaler_index, save_upscaled_image, redraw_mode, save_seams_fix_image, seams_fix_mask_blur, + seams_fix_type, target_size_type, custom_width, custom_height, custom_scale] + + def run(self, p, _, tile_width, tile_height, mask_blur, padding, seams_fix_width, seams_fix_denoise, seams_fix_padding, + upscaler_index, save_upscaled_image, redraw_mode, save_seams_fix_image, seams_fix_mask_blur, + seams_fix_type, target_size_type, custom_width, custom_height, custom_scale): + + # Init + processing.fix_seed(p) + devices.torch_gc() + + p.do_not_save_grid = True + p.do_not_save_samples = True + p.inpaint_full_res = False + + p.inpainting_fill = 1 + p.n_iter = 1 + p.batch_size = 1 + + seed = p.seed + + # Init image + init_img = p.init_images[0] + if init_img == None: + return Processed(p, [], seed, "Empty image") + init_img = images.flatten(init_img, opts.img2img_background_color) + + #override size + if target_size_type == 1: + p.width = custom_width + p.height = custom_height + if target_size_type == 2: + p.width = math.ceil((init_img.width * custom_scale) / 64) * 64 + p.height = math.ceil((init_img.height * custom_scale) / 64) * 64 + + # Upscaling + upscaler = USDUpscaler(p, init_img, upscaler_index, save_upscaled_image, save_seams_fix_image, tile_width, tile_height) + upscaler.upscale() + + # Drawing + upscaler.setup_redraw(redraw_mode, padding, mask_blur) + upscaler.setup_seams_fix(seams_fix_padding, seams_fix_denoise, seams_fix_mask_blur, seams_fix_width, seams_fix_type) + upscaler.print_info() + upscaler.add_extra_info() + upscaler.process() + result_images = upscaler.result_images + + return Processed(p, result_images, seed, upscaler.initial_info if upscaler.initial_info is not None else "") + diff --git a/ComfyUI_UltimateSDUpscale/usdu_patch.py b/ComfyUI_UltimateSDUpscale/usdu_patch.py new file mode 100644 index 0000000000000000000000000000000000000000..3abecf1729de155529718505cb85e3d0ac6341be --- /dev/null +++ b/ComfyUI_UltimateSDUpscale/usdu_patch.py @@ -0,0 +1,66 @@ +# Make some patches to the script +from repositories import ultimate_upscale as usdu +import modules.shared as shared +import math +from PIL import Image + + +if (not hasattr(Image, 'Resampling')): # For older versions of Pillow + Image.Resampling = Image + +# +# Instead of using multiples of 64, use multiples of 8 +# + +# Upscaler +old_init = usdu.USDUpscaler.__init__ + + +def new_init(self, p, image, upscaler_index, save_redraw, save_seams_fix, tile_width, tile_height): + p.width = math.ceil((image.width * p.upscale_by) / 8) * 8 + p.height = math.ceil((image.height * p.upscale_by) / 8) * 8 + old_init(self, p, image, upscaler_index, save_redraw, save_seams_fix, tile_width, tile_height) + + +usdu.USDUpscaler.__init__ = new_init + +# Redraw +old_setup_redraw = usdu.USDURedraw.init_draw + + +def new_setup_redraw(self, p, width, height): + mask, draw = old_setup_redraw(self, p, width, height) + p.width = math.ceil((self.tile_width + self.padding) / 8) * 8 + p.height = math.ceil((self.tile_height + self.padding) / 8) * 8 + return mask, draw + + +usdu.USDURedraw.init_draw = new_setup_redraw + +# Seams fix +old_setup_seams_fix = usdu.USDUSeamsFix.init_draw + + +def new_setup_seams_fix(self, p): + old_setup_seams_fix(self, p) + p.width = math.ceil((self.tile_width + self.padding) / 8) * 8 + p.height = math.ceil((self.tile_height + self.padding) / 8) * 8 + + +usdu.USDUSeamsFix.init_draw = new_setup_seams_fix + + +# +# Make the script upscale on a batch of images instead of one image +# + +old_upscale = usdu.USDUpscaler.upscale + + +def new_upscale(self): + old_upscale(self) + shared.batch = [self.image] + \ + [img.resize((self.p.width, self.p.height), resample=Image.LANCZOS) for img in shared.batch[1:]] + + +usdu.USDUpscaler.upscale = new_upscale diff --git a/ComfyUI_UltimateSDUpscale/utils.py b/ComfyUI_UltimateSDUpscale/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..a9bdcf22fd34138cab0dca9259edeb8037ea6de0 --- /dev/null +++ b/ComfyUI_UltimateSDUpscale/utils.py @@ -0,0 +1,460 @@ +import numpy as np +from PIL import Image, ImageFilter +import torch +import torch.nn.functional as F +from torchvision.transforms import GaussianBlur +import math + +if (not hasattr(Image, 'Resampling')): # For older versions of Pillow + Image.Resampling = Image + +BLUR_KERNEL_SIZE = 15 + + +def tensor_to_pil(img_tensor, batch_index=0): + # Takes an image in a batch in the form of a tensor of shape [batch_size, channels, height, width] + # and returns an PIL Image with the corresponding mode deduced by the number of channels + + # Take the image in the batch given by batch_index + img_tensor = img_tensor[batch_index].unsqueeze(0) + i = 255. * img_tensor.cpu().numpy() + img = Image.fromarray(np.clip(i, 0, 255).astype(np.uint8).squeeze()) + return img + + +def pil_to_tensor(image): + # Takes a PIL image and returns a tensor of shape [1, height, width, channels] + image = np.array(image).astype(np.float32) / 255.0 + image = torch.from_numpy(image).unsqueeze(0) + if len(image.shape) == 3: # If the image is grayscale, add a channel dimension + image = image.unsqueeze(-1) + return image + + +def controlnet_hint_to_pil(tensor, batch_index=0): + return tensor_to_pil(tensor.movedim(1, -1), batch_index) + + +def pil_to_controlnet_hint(img): + return pil_to_tensor(img).movedim(-1, 1) + + +def crop_tensor(tensor, region): + # Takes a tensor of shape [batch_size, height, width, channels] and crops it to the given region + x1, y1, x2, y2 = region + return tensor[:, y1:y2, x1:x2, :] + + +def resize_tensor(tensor, size, mode="nearest-exact"): + # Takes a tensor of shape [B, C, H, W] and resizes + # it to a shape of [B, C, size[0], size[1]] using the given mode + return torch.nn.functional.interpolate(tensor, size=size, mode=mode) + + +def get_crop_region(mask, pad=0): + # Takes a black and white PIL image in 'L' mode and returns the coordinates of the white rectangular mask region + # Should be equivalent to the get_crop_region function from https://github.com/AUTOMATIC1111/stable-diffusion-webui/blob/master/modules/masking.py + coordinates = mask.getbbox() + if coordinates is not None: + x1, y1, x2, y2 = coordinates + else: + x1, y1, x2, y2 = mask.width, mask.height, 0, 0 + # Apply padding + x1 = max(x1 - pad, 0) + y1 = max(y1 - pad, 0) + x2 = min(x2 + pad, mask.width) + y2 = min(y2 + pad, mask.height) + return fix_crop_region((x1, y1, x2, y2), (mask.width, mask.height)) + + +def fix_crop_region(region, image_size): + # Remove the extra pixel added by the get_crop_region function + image_width, image_height = image_size + x1, y1, x2, y2 = region + if x2 < image_width: + x2 -= 1 + if y2 < image_height: + y2 -= 1 + return x1, y1, x2, y2 + + +def expand_crop(region, width, height, target_width, target_height): + ''' + Expands a crop region to a specified target size. + :param region: A tuple of the form (x1, y1, x2, y2) denoting the upper left and the lower right points + of the rectangular region. Expected to have x2 > x1 and y2 > y1. + :param width: The width of the image the crop region is from. + :param height: The height of the image the crop region is from. + :param target_width: The desired width of the crop region. + :param target_height: The desired height of the crop region. + ''' + x1, y1, x2, y2 = region + actual_width = x2 - x1 + actual_height = y2 - y1 + # target_width = math.ceil(actual_width / 8) * 8 + # target_height = math.ceil(actual_height / 8) * 8 + + # Try to expand region to the right of half the difference + width_diff = target_width - actual_width + x2 = min(x2 + width_diff // 2, width) + # Expand region to the left of the difference including the pixels that could not be expanded to the right + width_diff = target_width - (x2 - x1) + x1 = max(x1 - width_diff, 0) + # Try the right again + width_diff = target_width - (x2 - x1) + x2 = min(x2 + width_diff, width) + + # Try to expand region to the bottom of half the difference + height_diff = target_height - actual_height + y2 = min(y2 + height_diff // 2, height) + # Expand region to the top of the difference including the pixels that could not be expanded to the bottom + height_diff = target_height - (y2 - y1) + y1 = max(y1 - height_diff, 0) + # Try the bottom again + height_diff = target_height - (y2 - y1) + y2 = min(y2 + height_diff, height) + + return (x1, y1, x2, y2), (target_width, target_height) + + +def resize_region(region, init_size, resize_size): + # Resize a crop so that it fits an image that was resized to the given width and height + x1, y1, x2, y2 = region + init_width, init_height = init_size + resize_width, resize_height = resize_size + x1 = math.floor(x1 * resize_width / init_width) + x2 = math.ceil(x2 * resize_width / init_width) + y1 = math.floor(y1 * resize_height / init_height) + y2 = math.ceil(y2 * resize_height / init_height) + return (x1, y1, x2, y2) + + +def pad_image(image, left_pad, right_pad, top_pad, bottom_pad, fill=False, blur=False): + ''' + Pads an image with the given number of pixels on each side and fills the padding with data from the edges. + :param image: A PIL image + :param left_pad: The number of pixels to pad on the left side + :param right_pad: The number of pixels to pad on the right side + :param top_pad: The number of pixels to pad on the top side + :param bottom_pad: The number of pixels to pad on the bottom side + :param blur: Whether to blur the padded edges + :return: A PIL image with size (image.width + left_pad + right_pad, image.height + top_pad + bottom_pad) + ''' + left_edge = image.crop((0, 1, 1, image.height - 1)) + right_edge = image.crop((image.width - 1, 1, image.width, image.height - 1)) + top_edge = image.crop((1, 0, image.width - 1, 1)) + bottom_edge = image.crop((1, image.height - 1, image.width - 1, image.height)) + new_width = image.width + left_pad + right_pad + new_height = image.height + top_pad + bottom_pad + padded_image = Image.new(image.mode, (new_width, new_height)) + padded_image.paste(image, (left_pad, top_pad)) + if fill: + for i in range(left_pad): + edge = left_edge.resize( + (1, new_height - i * (top_pad + bottom_pad) // left_pad), resample=Image.Resampling.NEAREST) + padded_image.paste(edge, (i, i * top_pad // left_pad)) + for i in range(right_pad): + edge = right_edge.resize( + (1, new_height - i * (top_pad + bottom_pad) // right_pad), resample=Image.Resampling.NEAREST) + padded_image.paste(edge, (new_width - 1 - i, i * top_pad // right_pad)) + for i in range(top_pad): + edge = top_edge.resize( + (new_width - i * (left_pad + right_pad) // top_pad, 1), resample=Image.Resampling.NEAREST) + padded_image.paste(edge, (i * left_pad // top_pad, i)) + for i in range(bottom_pad): + edge = bottom_edge.resize( + (new_width - i * (left_pad + right_pad) // bottom_pad, 1), resample=Image.Resampling.NEAREST) + padded_image.paste(edge, (i * left_pad // bottom_pad, new_height - 1 - i)) + if blur and not (left_pad == right_pad == top_pad == bottom_pad == 0): + padded_image = padded_image.filter(ImageFilter.GaussianBlur(BLUR_KERNEL_SIZE)) + padded_image.paste(image, (left_pad, top_pad)) + return padded_image + + +def pad_image2(image, left_pad, right_pad, top_pad, bottom_pad, fill=False, blur=False): + ''' + Pads an image with the given number of pixels on each side and fills the padding with data from the edges. + Faster than pad_image, but only pads with edge data in straight lines. + :param image: A PIL image + :param left_pad: The number of pixels to pad on the left side + :param right_pad: The number of pixels to pad on the right side + :param top_pad: The number of pixels to pad on the top side + :param bottom_pad: The number of pixels to pad on the bottom side + :param blur: Whether to blur the padded edges + :return: A PIL image with size (image.width + left_pad + right_pad, image.height + top_pad + bottom_pad) + ''' + left_edge = image.crop((0, 1, 1, image.height - 1)) + right_edge = image.crop((image.width - 1, 1, image.width, image.height - 1)) + top_edge = image.crop((1, 0, image.width - 1, 1)) + bottom_edge = image.crop((1, image.height - 1, image.width - 1, image.height)) + new_width = image.width + left_pad + right_pad + new_height = image.height + top_pad + bottom_pad + padded_image = Image.new(image.mode, (new_width, new_height)) + padded_image.paste(image, (left_pad, top_pad)) + if fill: + if left_pad > 0: + padded_image.paste(left_edge.resize((left_pad, new_height), resample=Image.Resampling.NEAREST), (0, 0)) + if right_pad > 0: + padded_image.paste(right_edge.resize((right_pad, new_height), + resample=Image.Resampling.NEAREST), (new_width - right_pad, 0)) + if top_pad > 0: + padded_image.paste(top_edge.resize((new_width, top_pad), resample=Image.Resampling.NEAREST), (0, 0)) + if bottom_pad > 0: + padded_image.paste(bottom_edge.resize((new_width, bottom_pad), + resample=Image.Resampling.NEAREST), (0, new_height - bottom_pad)) + if blur and not (left_pad == right_pad == top_pad == bottom_pad == 0): + padded_image = padded_image.filter(ImageFilter.GaussianBlur(BLUR_KERNEL_SIZE)) + padded_image.paste(image, (left_pad, top_pad)) + return padded_image + + +def pad_tensor(tensor, left_pad, right_pad, top_pad, bottom_pad, fill=False, blur=False): + ''' + Pads an image tensor with the given number of pixels on each side and fills the padding with data from the edges. + :param tensor: A tensor of shape [B, H, W, C] + :param left_pad: The number of pixels to pad on the left side + :param right_pad: The number of pixels to pad on the right side + :param top_pad: The number of pixels to pad on the top side + :param bottom_pad: The number of pixels to pad on the bottom side + :param blur: Whether to blur the padded edges + :return: A tensor of shape [B, H + top_pad + bottom_pad, W + left_pad + right_pad, C] + ''' + batch_size, channels, height, width = tensor.shape + h_pad = left_pad + right_pad + v_pad = top_pad + bottom_pad + new_width = width + h_pad + new_height = height + v_pad + + # Create empty image + padded = torch.zeros((batch_size, channels, new_height, new_width), dtype=tensor.dtype) + + # Copy the original image into the centor of the padded tensor + padded[:, :, top_pad:top_pad + height, left_pad:left_pad + width] = tensor + + # Duplicate the edges of the original image into the padding + if top_pad > 0: + padded[:, :, :top_pad, :] = padded[:, :, top_pad:top_pad + 1, :] # Top edge + if bottom_pad > 0: + padded[:, :, -bottom_pad:, :] = padded[:, :, -bottom_pad - 1:-bottom_pad, :] # Bottom edge + if left_pad > 0: + padded[:, :, :, :left_pad] = padded[:, :, :, left_pad:left_pad + 1] # Left edge + if right_pad > 0: + padded[:, :, :, -right_pad:] = padded[:, :, :, -right_pad - 1:-right_pad] # Right edge + + return padded + + +def resize_and_pad_image(image, width, height, fill=False, blur=False): + ''' + Resizes an image to the given width and height and pads it to the given width and height. + :param image: A PIL image + :param width: The width of the resized image + :param height: The height of the resized image + :param fill: Whether to fill the padding with data from the edges + :param blur: Whether to blur the padded edges + :return: A PIL image of size (width, height) + ''' + width_ratio = width / image.width + height_ratio = height / image.height + if height_ratio > width_ratio: + resize_ratio = width_ratio + else: + resize_ratio = height_ratio + resize_width = round(image.width * resize_ratio) + resize_height = round(image.height * resize_ratio) + resized = image.resize((resize_width, resize_height), resample=Image.Resampling.LANCZOS) + # Pad the sides of the image to get the image to the desired size that wasn't covered by the resize + horizontal_pad = (width - resize_width) // 2 + vertical_pad = (height - resize_height) // 2 + result = pad_image2(resized, horizontal_pad, horizontal_pad, vertical_pad, vertical_pad, fill, blur) + result = result.resize((width, height), resample=Image.Resampling.LANCZOS) + return result, (horizontal_pad, vertical_pad) + + +def resize_and_pad_tensor(tensor, width, height, fill=False, blur=False): + ''' + Resizes an image tensor to the given width and height and pads it to the given width and height. + :param tensor: A tensor of shape [B, H, W, C] + :param width: The width of the resized image + :param height: The height of the resized image + :param fill: Whether to fill the padding with data from the edges + :param blur: Whether to blur the padded edges + :return: A tensor of shape [B, height, width, C] + ''' + # Resize the image to the closest size that maintains the aspect ratio + width_ratio = width / tensor.shape[3] + height_ratio = height / tensor.shape[2] + if height_ratio > width_ratio: + resize_ratio = width_ratio + else: + resize_ratio = height_ratio + resize_width = round(tensor.shape[3] * resize_ratio) + resize_height = round(tensor.shape[2] * resize_ratio) + resized = F.interpolate(tensor, size=(resize_height, resize_width), mode='nearest-exact') + # Pad the sides of the image to get the image to the desired size that wasn't covered by the resize + horizontal_pad = (width - resize_width) // 2 + vertical_pad = (height - resize_height) // 2 + result = pad_tensor(resized, horizontal_pad, horizontal_pad, vertical_pad, vertical_pad, fill, blur) + result = F.interpolate(result, size=(height, width), mode='nearest-exact') + return result + + +def crop_controlnet(cond_dict, region, init_size, canvas_size, tile_size, w_pad, h_pad): + if "control" not in cond_dict: + return + c = cond_dict["control"] + controlnet = c.copy() + cond_dict["control"] = controlnet + while c is not None: + # hint is shape (B, C, H, W) + hint = controlnet.cond_hint_original + resized_crop = resize_region(region, canvas_size, hint.shape[:-3:-1]) + hint = crop_tensor(hint.movedim(1, -1), resized_crop).movedim(-1, 1) + hint = resize_tensor(hint, tile_size[::-1]) + controlnet.cond_hint_original = hint + c = c.previous_controlnet + controlnet.set_previous_controlnet(c.copy() if c is not None else None) + controlnet = controlnet.previous_controlnet + + +def region_intersection(region1, region2): + """ + Returns the coordinates of the intersection of two rectangular regions. + :param region1: A tuple of the form (x1, y1, x2, y2) denoting the upper left and the lower right points + of the first rectangular region. Expected to have x2 > x1 and y2 > y1. + :param region2: The second rectangular region with the same format as the first. + :return: A tuple of the form (x1, y1, x2, y2) denoting the rectangular intersection. + None if there is no intersection. + """ + x1, y1, x2, y2 = region1 + x1_, y1_, x2_, y2_ = region2 + x1 = max(x1, x1_) + y1 = max(y1, y1_) + x2 = min(x2, x2_) + y2 = min(y2, y2_) + if x1 >= x2 or y1 >= y2: + return None + return (x1, y1, x2, y2) + + +def crop_gligen(cond_dict, region, init_size, canvas_size, tile_size, w_pad, h_pad): + if "gligen" not in cond_dict: + return + type, model, cond = cond_dict["gligen"] + if type != "position": + from warnings import warn + warn(f"Unknown gligen type {type}") + return + cropped = [] + for c in cond: + emb, h, w, y, x = c + # Get the coordinates of the box in the upscaled image + x1 = x * 8 + y1 = y * 8 + x2 = x1 + w * 8 + y2 = y1 + h * 8 + gligen_upscaled_box = resize_region((x1, y1, x2, y2), init_size, canvas_size) + + # Calculate the intersection of the gligen box and the region + intersection = region_intersection(gligen_upscaled_box, region) + if intersection is None: + continue + x1, y1, x2, y2 = intersection + + # Offset the gligen box so that the origin is at the top left of the tile region + x1 -= region[0] + y1 -= region[1] + x2 -= region[0] + y2 -= region[1] + + # Add the padding + x1 += w_pad + y1 += h_pad + x2 += w_pad + y2 += h_pad + + # Set the new position params + h = (y2 - y1) // 8 + w = (x2 - x1) // 8 + x = x1 // 8 + y = y1 // 8 + cropped.append((emb, h, w, y, x)) + + cond_dict["gligen"] = (type, model, cropped) + + +def crop_area(cond_dict, region, init_size, canvas_size, tile_size, w_pad, h_pad): + if "area" not in cond_dict: + return + + # Resize the area conditioning to the canvas size and confine it to the tile region + h, w, y, x = cond_dict["area"] + w, h, x, y = 8 * w, 8 * h, 8 * x, 8 * y + x1, y1, x2, y2 = resize_region((x, y, x + w, y + h), init_size, canvas_size) + intersection = region_intersection((x1, y1, x2, y2), region) + if intersection is None: + del cond_dict["area"] + del cond_dict["strength"] + return + x1, y1, x2, y2 = intersection + + # Offset origin to the top left of the tile + x1 -= region[0] + y1 -= region[1] + x2 -= region[0] + y2 -= region[1] + + # Add the padding + x1 += w_pad + y1 += h_pad + x2 += w_pad + y2 += h_pad + + # Set the params for tile + w, h = (x2 - x1) // 8, (y2 - y1) // 8 + x, y = x1 // 8, y1 // 8 + + cond_dict["area"] = (h, w, y, x) + + +def crop_mask(cond_dict, region, init_size, canvas_size, tile_size, w_pad, h_pad): + if "mask" not in cond_dict: + return + mask_tensor = cond_dict["mask"] # (B, H, W) + masks = [] + for i in range(mask_tensor.shape[0]): + # Convert to PIL image + mask = tensor_to_pil(mask_tensor, i) # W x H + + # Resize the mask to the canvas size + mask = mask.resize(canvas_size, Image.Resampling.BICUBIC) + + # Crop the mask to the region + mask = mask.crop(region) + + # Add padding + mask, _ = resize_and_pad_image(mask, tile_size[0], tile_size[1], fill=True) + + # Resize the mask to the tile size + if tile_size != mask.size: + mask = mask.resize(tile_size, Image.Resampling.BICUBIC) + + # Convert back to tensor + mask = pil_to_tensor(mask) # (1, H, W, 1) + mask = mask.squeeze(-1) # (1, H, W) + masks.append(mask) + + cond_dict["mask"] = torch.cat(masks, dim=0) # (B, H, W) + + +def crop_cond(cond, region, init_size, canvas_size, tile_size, w_pad=0, h_pad=0): + cropped = [] + for emb, x in cond: + cond_dict = x.copy() + n = [emb, cond_dict] + crop_controlnet(cond_dict, region, init_size, canvas_size, tile_size, w_pad, h_pad) + crop_gligen(cond_dict, region, init_size, canvas_size, tile_size, w_pad, h_pad) + crop_area(cond_dict, region, init_size, canvas_size, tile_size, w_pad, h_pad) + crop_mask(cond_dict, region, init_size, canvas_size, tile_size, w_pad, h_pad) + cropped.append(n) + return cropped diff --git a/ComfyUI_essentials/LICENSE b/ComfyUI_essentials/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..948b5e4192d70b665b15bb5a917bd98b3771eb4b --- /dev/null +++ b/ComfyUI_essentials/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2023 Matteo Spinelli + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/ComfyUI_essentials/README.md b/ComfyUI_essentials/README.md new file mode 100644 index 0000000000000000000000000000000000000000..c02d986782888256ba8c5242c0f5f958c5a34506 --- /dev/null +++ b/ComfyUI_essentials/README.md @@ -0,0 +1,49 @@ +# :wrench: ComfyUI Essentials + +Essential nodes that are weirdly missing from ComfyUI core. With few exceptions they are new features and not commodities. I hope this will be just a temporary repository until the nodes get included into ComfyUI. + +# Sponsorship + +
    + +**[:heart: Github Sponsor](https://github.com/sponsors/cubiq) | [:coin: Paypal](https://paypal.me/matt3o)** + +
    + +If you like my work and wish to see updates and new features please consider sponsoring my projects. + +- [ComfyUI IPAdapter Plus](https://github.com/cubiq/ComfyUI_IPAdapter_plus) +- [ComfyUI InstantID (Native)](https://github.com/cubiq/ComfyUI_InstantID) +- [ComfyUI Essentials](https://github.com/cubiq/ComfyUI_essentials) +- [ComfyUI FaceAnalysis](https://github.com/cubiq/ComfyUI_FaceAnalysis) + +Not to mention the documentation and videos tutorials. Check my **ComfyUI Advanced Understanding** videos on YouTube for example, [part 1](https://www.youtube.com/watch?v=_C7kR2TFIX0) and [part 2](https://www.youtube.com/watch?v=ijqXnW_9gzc) + +The only way to keep the code open and free is by sponsoring its development. The more sponsorships the more time I can dedicate to my open source projects. + +Please consider a [Github Sponsorship](https://github.com/sponsors/cubiq) or [PayPal donation](https://paypal.me/matt3o) (Matteo "matt3o" Spinelli). For sponsorships of $50+, let me know if you'd like to be mentioned in this readme file, you can find me on [Discord](https://latent.vision/discord) or _matt3o :snail: gmail.com_. + +## Current sponsors + +It's only thanks to generous sponsors that **the whole community** can enjoy open and free software. Please join me in thanking the following companies and individuals! + +### :trophy: Gold sponsors + +[![Kaiber.ai](https://f.latent.vision/imgs/kaiber.png)](https://kaiber.ai/)   [![Kaiber.ai](https://f.latent.vision/imgs/replicate.png)](https://replicate.com/)   [![InstaSD](https://f.latent.vision/imgs/instasd.png)](https://replicate.com/) + +### :tada: Silver sponsors + +[![OperArt.ai](https://f.latent.vision/imgs/openart.png?r=1)](https://openart.ai/workflows)   [![OperArt.ai](https://f.latent.vision/imgs/finetuners.png)](https://www.finetuners.ai/)   [![Comfy.ICU](https://f.latent.vision/imgs/comfyicu.png?r=1)](https://comfy.icu/) + +### Other companies supporting my projects + +- [RunComfy](https://www.runcomfy.com/) (ComfyUI Cloud) + +### Esteemed individuals + +- [Øystein Ø. Olsen](https://github.com/FireNeslo) +- [Jack Gane](https://github.com/ganeJackS) +- [Nathan Shipley](https://www.nathanshipley.com/) +- [Dkdnzia](https://github.com/Dkdnzia) + +[And all my public and private sponsors!](https://github.com/sponsors/cubiq) \ No newline at end of file diff --git a/ComfyUI_essentials/__init__.py b/ComfyUI_essentials/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..f711d30fd3aa5fe1e7ffbed1694da8221886e211 --- /dev/null +++ b/ComfyUI_essentials/__init__.py @@ -0,0 +1,34 @@ +#from .essentials import NODE_CLASS_MAPPINGS, NODE_DISPLAY_NAME_MAPPINGS +from .image import IMAGE_CLASS_MAPPINGS, IMAGE_NAME_MAPPINGS +from .mask import MASK_CLASS_MAPPINGS, MASK_NAME_MAPPINGS +from .sampling import SAMPLING_CLASS_MAPPINGS, SAMPLING_NAME_MAPPINGS +from .segmentation import SEG_CLASS_MAPPINGS, SEG_NAME_MAPPINGS +from .misc import MISC_CLASS_MAPPINGS, MISC_NAME_MAPPINGS +from .conditioning import COND_CLASS_MAPPINGS, COND_NAME_MAPPINGS +from .text import TEXT_CLASS_MAPPINGS, TEXT_NAME_MAPPINGS + +NODE_CLASS_MAPPINGS = {} +NODE_DISPLAY_NAME_MAPPINGS = {} + +NODE_CLASS_MAPPINGS.update(COND_CLASS_MAPPINGS) +NODE_DISPLAY_NAME_MAPPINGS.update(COND_NAME_MAPPINGS) + +NODE_CLASS_MAPPINGS.update(IMAGE_CLASS_MAPPINGS) +NODE_DISPLAY_NAME_MAPPINGS.update(IMAGE_NAME_MAPPINGS) + +NODE_CLASS_MAPPINGS.update(MASK_CLASS_MAPPINGS) +NODE_DISPLAY_NAME_MAPPINGS.update(MASK_NAME_MAPPINGS) + +NODE_CLASS_MAPPINGS.update(SAMPLING_CLASS_MAPPINGS) +NODE_DISPLAY_NAME_MAPPINGS.update(SAMPLING_NAME_MAPPINGS) + +NODE_CLASS_MAPPINGS.update(SEG_CLASS_MAPPINGS) +NODE_DISPLAY_NAME_MAPPINGS.update(SEG_NAME_MAPPINGS) + +NODE_CLASS_MAPPINGS.update(TEXT_CLASS_MAPPINGS) +NODE_DISPLAY_NAME_MAPPINGS.update(TEXT_NAME_MAPPINGS) + +NODE_CLASS_MAPPINGS.update(MISC_CLASS_MAPPINGS) +NODE_DISPLAY_NAME_MAPPINGS.update(MISC_NAME_MAPPINGS) + +__all__ = ['NODE_CLASS_MAPPINGS', 'NODE_DISPLAY_NAME_MAPPINGS'] diff --git a/ComfyUI_essentials/carve.py b/ComfyUI_essentials/carve.py new file mode 100644 index 0000000000000000000000000000000000000000..017e804d84324fde3d4004a3ed832676b8237985 --- /dev/null +++ b/ComfyUI_essentials/carve.py @@ -0,0 +1,454 @@ +# MIT licensed code from https://github.com/li-plus/seam-carving/ + +from enum import Enum +from typing import Optional, Tuple + +import numba as nb +import numpy as np +from scipy.ndimage import sobel + +DROP_MASK_ENERGY = 1e5 +KEEP_MASK_ENERGY = 1e3 + + +class OrderMode(str, Enum): + WIDTH_FIRST = "width-first" + HEIGHT_FIRST = "height-first" + + +class EnergyMode(str, Enum): + FORWARD = "forward" + BACKWARD = "backward" + + +def _list_enum(enum_class) -> Tuple: + return tuple(x.value for x in enum_class) + + +def _rgb2gray(rgb: np.ndarray) -> np.ndarray: + """Convert an RGB image to a grayscale image""" + coeffs = np.array([0.2125, 0.7154, 0.0721], dtype=np.float32) + return (rgb @ coeffs).astype(rgb.dtype) + + +def _get_seam_mask(src: np.ndarray, seam: np.ndarray) -> np.ndarray: + """Convert a list of seam column indices to a mask""" + return np.eye(src.shape[1], dtype=bool)[seam] + + +def _remove_seam_mask(src: np.ndarray, seam_mask: np.ndarray) -> np.ndarray: + """Remove a seam from the source image according to the given seam_mask""" + if src.ndim == 3: + h, w, c = src.shape + seam_mask = np.broadcast_to(seam_mask[:, :, None], src.shape) + dst = src[~seam_mask].reshape((h, w - 1, c)) + else: + h, w = src.shape + dst = src[~seam_mask].reshape((h, w - 1)) + return dst + + +def _get_energy(gray: np.ndarray) -> np.ndarray: + """Get backward energy map from the source image""" + assert gray.ndim == 2 + + gray = gray.astype(np.float32) + grad_x = sobel(gray, axis=1) + grad_y = sobel(gray, axis=0) + energy = np.abs(grad_x) + np.abs(grad_y) + return energy + + +@nb.njit(nb.int32[:](nb.float32[:, :]), cache=True) +def _get_backward_seam(energy: np.ndarray) -> np.ndarray: + """Compute the minimum vertical seam from the backward energy map""" + h, w = energy.shape + inf = np.array([np.inf], dtype=np.float32) + cost = np.concatenate((inf, energy[0], inf)) + parent = np.empty((h, w), dtype=np.int32) + base_idx = np.arange(-1, w - 1, dtype=np.int32) + + for r in range(1, h): + choices = np.vstack((cost[:-2], cost[1:-1], cost[2:])) + min_idx = np.argmin(choices, axis=0) + base_idx + parent[r] = min_idx + cost[1:-1] = cost[1:-1][min_idx] + energy[r] + + c = np.argmin(cost[1:-1]) + seam = np.empty(h, dtype=np.int32) + for r in range(h - 1, -1, -1): + seam[r] = c + c = parent[r, c] + + return seam + + +def _get_backward_seams( + gray: np.ndarray, num_seams: int, aux_energy: Optional[np.ndarray] +) -> np.ndarray: + """Compute the minimum N vertical seams using backward energy""" + h, w = gray.shape + seams = np.zeros((h, w), dtype=bool) + rows = np.arange(h, dtype=np.int32) + idx_map = np.broadcast_to(np.arange(w, dtype=np.int32), (h, w)) + energy = _get_energy(gray) + if aux_energy is not None: + energy += aux_energy + for _ in range(num_seams): + seam = _get_backward_seam(energy) + seams[rows, idx_map[rows, seam]] = True + + seam_mask = _get_seam_mask(gray, seam) + gray = _remove_seam_mask(gray, seam_mask) + idx_map = _remove_seam_mask(idx_map, seam_mask) + if aux_energy is not None: + aux_energy = _remove_seam_mask(aux_energy, seam_mask) + + # Only need to re-compute the energy in the bounding box of the seam + _, cur_w = energy.shape + lo = max(0, np.min(seam) - 1) + hi = min(cur_w, np.max(seam) + 1) + pad_lo = 1 if lo > 0 else 0 + pad_hi = 1 if hi < cur_w - 1 else 0 + mid_block = gray[:, lo - pad_lo : hi + pad_hi] + _, mid_w = mid_block.shape + mid_energy = _get_energy(mid_block)[:, pad_lo : mid_w - pad_hi] + if aux_energy is not None: + mid_energy += aux_energy[:, lo:hi] + energy = np.hstack((energy[:, :lo], mid_energy, energy[:, hi + 1 :])) + + return seams + + +@nb.njit( + [ + nb.int32[:](nb.float32[:, :], nb.none), + nb.int32[:](nb.float32[:, :], nb.float32[:, :]), + ], + cache=True, +) +def _get_forward_seam(gray: np.ndarray, aux_energy: Optional[np.ndarray]) -> np.ndarray: + """Compute the minimum vertical seam using forward energy""" + h, w = gray.shape + + gray = np.hstack((gray[:, :1], gray, gray[:, -1:])) + + inf = np.array([np.inf], dtype=np.float32) + dp = np.concatenate((inf, np.abs(gray[0, 2:] - gray[0, :-2]), inf)) + + parent = np.empty((h, w), dtype=np.int32) + base_idx = np.arange(-1, w - 1, dtype=np.int32) + + inf = np.array([np.inf], dtype=np.float32) + for r in range(1, h): + curr_shl = gray[r, 2:] + curr_shr = gray[r, :-2] + cost_mid = np.abs(curr_shl - curr_shr) + if aux_energy is not None: + cost_mid += aux_energy[r] + + prev_mid = gray[r - 1, 1:-1] + cost_left = cost_mid + np.abs(prev_mid - curr_shr) + cost_right = cost_mid + np.abs(prev_mid - curr_shl) + + dp_mid = dp[1:-1] + dp_left = dp[:-2] + dp_right = dp[2:] + + choices = np.vstack( + (cost_left + dp_left, cost_mid + dp_mid, cost_right + dp_right) + ) + min_idx = np.argmin(choices, axis=0) + parent[r] = min_idx + base_idx + # numba does not support specifying axis in np.min, below loop is equivalent to: + # `dp_mid[:] = np.min(choices, axis=0)` or `dp_mid[:] = choices[min_idx, np.arange(w)]` + for j, i in enumerate(min_idx): + dp_mid[j] = choices[i, j] + + c = np.argmin(dp[1:-1]) + seam = np.empty(h, dtype=np.int32) + for r in range(h - 1, -1, -1): + seam[r] = c + c = parent[r, c] + + return seam + + +def _get_forward_seams( + gray: np.ndarray, num_seams: int, aux_energy: Optional[np.ndarray] +) -> np.ndarray: + """Compute minimum N vertical seams using forward energy""" + h, w = gray.shape + seams = np.zeros((h, w), dtype=bool) + rows = np.arange(h, dtype=np.int32) + idx_map = np.broadcast_to(np.arange(w, dtype=np.int32), (h, w)) + for _ in range(num_seams): + seam = _get_forward_seam(gray, aux_energy) + seams[rows, idx_map[rows, seam]] = True + seam_mask = _get_seam_mask(gray, seam) + gray = _remove_seam_mask(gray, seam_mask) + idx_map = _remove_seam_mask(idx_map, seam_mask) + if aux_energy is not None: + aux_energy = _remove_seam_mask(aux_energy, seam_mask) + + return seams + + +def _get_seams( + gray: np.ndarray, num_seams: int, energy_mode: str, aux_energy: Optional[np.ndarray] +) -> np.ndarray: + """Get the minimum N seams from the grayscale image""" + gray = np.asarray(gray, dtype=np.float32) + if energy_mode == EnergyMode.BACKWARD: + return _get_backward_seams(gray, num_seams, aux_energy) + elif energy_mode == EnergyMode.FORWARD: + return _get_forward_seams(gray, num_seams, aux_energy) + else: + raise ValueError( + f"expect energy_mode to be one of {_list_enum(EnergyMode)}, got {energy_mode}" + ) + + +def _reduce_width( + src: np.ndarray, + delta_width: int, + energy_mode: str, + aux_energy: Optional[np.ndarray], +) -> Tuple[np.ndarray, Optional[np.ndarray]]: + """Reduce the width of image by delta_width pixels""" + assert src.ndim in (2, 3) and delta_width >= 0 + if src.ndim == 2: + gray = src + src_h, src_w = src.shape + dst_shape: Tuple[int, ...] = (src_h, src_w - delta_width) + else: + gray = _rgb2gray(src) + src_h, src_w, src_c = src.shape + dst_shape = (src_h, src_w - delta_width, src_c) + + to_keep = ~_get_seams(gray, delta_width, energy_mode, aux_energy) + dst = src[to_keep].reshape(dst_shape) + if aux_energy is not None: + aux_energy = aux_energy[to_keep].reshape(dst_shape[:2]) + return dst, aux_energy + + +@nb.njit( + nb.float32[:, :, :](nb.float32[:, :, :], nb.boolean[:, :], nb.int32), cache=True +) +def _insert_seams_kernel( + src: np.ndarray, seams: np.ndarray, delta_width: int +) -> np.ndarray: + """The numba kernel for inserting seams""" + src_h, src_w, src_c = src.shape + dst = np.empty((src_h, src_w + delta_width, src_c), dtype=src.dtype) + for row in range(src_h): + dst_col = 0 + for src_col in range(src_w): + if seams[row, src_col]: + left = src[row, max(src_col - 1, 0)] + right = src[row, src_col] + dst[row, dst_col] = (left + right) / 2 + dst_col += 1 + dst[row, dst_col] = src[row, src_col] + dst_col += 1 + return dst + + +def _insert_seams(src: np.ndarray, seams: np.ndarray, delta_width: int) -> np.ndarray: + """Insert multiple seams into the source image""" + dst = src.astype(np.float32) + if dst.ndim == 2: + dst = dst[:, :, None] + dst = _insert_seams_kernel(dst, seams, delta_width).astype(src.dtype) + if src.ndim == 2: + dst = dst.squeeze(-1) + return dst + + +def _expand_width( + src: np.ndarray, + delta_width: int, + energy_mode: str, + aux_energy: Optional[np.ndarray], + step_ratio: float, +) -> Tuple[np.ndarray, Optional[np.ndarray]]: + """Expand the width of image by delta_width pixels""" + assert src.ndim in (2, 3) and delta_width >= 0 + if not 0 < step_ratio <= 1: + raise ValueError(f"expect `step_ratio` to be between (0,1], got {step_ratio}") + + dst = src + while delta_width > 0: + max_step_size = max(1, round(step_ratio * dst.shape[1])) + step_size = min(max_step_size, delta_width) + gray = dst if dst.ndim == 2 else _rgb2gray(dst) + seams = _get_seams(gray, step_size, energy_mode, aux_energy) + dst = _insert_seams(dst, seams, step_size) + if aux_energy is not None: + aux_energy = _insert_seams(aux_energy, seams, step_size) + delta_width -= step_size + + return dst, aux_energy + + +def _resize_width( + src: np.ndarray, + width: int, + energy_mode: str, + aux_energy: Optional[np.ndarray], + step_ratio: float, +) -> Tuple[np.ndarray, Optional[np.ndarray]]: + """Resize the width of image by removing vertical seams""" + assert src.size > 0 and src.ndim in (2, 3) + assert width > 0 + + src_w = src.shape[1] + if src_w < width: + dst, aux_energy = _expand_width( + src, width - src_w, energy_mode, aux_energy, step_ratio + ) + else: + dst, aux_energy = _reduce_width(src, src_w - width, energy_mode, aux_energy) + return dst, aux_energy + + +def _transpose_image(src: np.ndarray) -> np.ndarray: + """Transpose a source image in rgb or grayscale format""" + if src.ndim == 3: + dst = src.transpose((1, 0, 2)) + else: + dst = src.T + return dst + + +def _resize_height( + src: np.ndarray, + height: int, + energy_mode: str, + aux_energy: Optional[np.ndarray], + step_ratio: float, +) -> Tuple[np.ndarray, Optional[np.ndarray]]: + """Resize the height of image by removing horizontal seams""" + assert src.ndim in (2, 3) and height > 0 + if aux_energy is not None: + aux_energy = aux_energy.T + src = _transpose_image(src) + src, aux_energy = _resize_width(src, height, energy_mode, aux_energy, step_ratio) + src = _transpose_image(src) + if aux_energy is not None: + aux_energy = aux_energy.T + return src, aux_energy + + +def _check_mask(mask: np.ndarray, shape: Tuple[int, ...]) -> np.ndarray: + """Ensure the mask to be a 2D grayscale map of specific shape""" + mask = np.asarray(mask, dtype=bool) + if mask.ndim != 2: + raise ValueError(f"expect mask to be a 2d binary map, got shape {mask.shape}") + if mask.shape != shape: + raise ValueError( + f"expect the shape of mask to match the image, got {mask.shape} vs {shape}" + ) + return mask + + +def _check_src(src: np.ndarray) -> np.ndarray: + """Ensure the source to be RGB or grayscale""" + src = np.asarray(src) + if src.size == 0 or src.ndim not in (2, 3): + raise ValueError( + f"expect a 3d rgb image or a 2d grayscale image, got image in shape {src.shape}" + ) + return src + + +def seam_carving( + src: np.ndarray, + size: Optional[Tuple[int, int]] = None, + energy_mode: str = "backward", + order: str = "width-first", + keep_mask: Optional[np.ndarray] = None, + drop_mask: Optional[np.ndarray] = None, + step_ratio: float = 0.5, +) -> np.ndarray: + """Resize the image using the content-aware seam-carving algorithm. + + :param src: A source image in RGB or grayscale format. + :param size: The target size in pixels, as a 2-tuple (width, height). + :param energy_mode: Policy to compute energy for the source image. Could be + one of ``backward`` or ``forward``. If ``backward``, compute the energy + as the gradient at each pixel. If ``forward``, compute the energy as the + distances between adjacent pixels after each pixel is removed. + :param order: The order to remove horizontal and vertical seams. Could be + one of ``width-first`` or ``height-first``. In ``width-first`` mode, we + remove or insert all vertical seams first, then the horizontal ones, + while ``height-first`` is the opposite. + :param keep_mask: An optional mask where the foreground is protected from + seam removal. If not specified, no area will be protected. + :param drop_mask: An optional binary object mask to remove. If given, the + object will be removed before resizing the image to the target size. + :param step_ratio: The maximum size expansion ratio in one seam carving step. + The image will be expanded in multiple steps if target size is too large. + :return: A resized copy of the source image. + """ + src = _check_src(src) + + if order not in _list_enum(OrderMode): + raise ValueError( + f"expect order to be one of {_list_enum(OrderMode)}, got {order}" + ) + + aux_energy = None + + if keep_mask is not None: + keep_mask = _check_mask(keep_mask, src.shape[:2]) + + aux_energy = np.zeros(src.shape[:2], dtype=np.float32) + aux_energy[keep_mask] += KEEP_MASK_ENERGY + + # remove object if `drop_mask` is given + if drop_mask is not None: + drop_mask = _check_mask(drop_mask, src.shape[:2]) + + if aux_energy is None: + aux_energy = np.zeros(src.shape[:2], dtype=np.float32) + aux_energy[drop_mask] -= DROP_MASK_ENERGY + + if order == OrderMode.HEIGHT_FIRST: + src = _transpose_image(src) + aux_energy = aux_energy.T + + num_seams = (aux_energy < 0).sum(1).max() + while num_seams > 0: + src, aux_energy = _reduce_width(src, num_seams, energy_mode, aux_energy) + num_seams = (aux_energy < 0).sum(1).max() + + if order == OrderMode.HEIGHT_FIRST: + src = _transpose_image(src) + aux_energy = aux_energy.T + + # resize image if `size` is given + if size is not None: + width, height = size + width = round(width) + height = round(height) + if width <= 0 or height <= 0: + raise ValueError(f"expect target size to be positive, got {size}") + + if order == OrderMode.WIDTH_FIRST: + src, aux_energy = _resize_width( + src, width, energy_mode, aux_energy, step_ratio + ) + src, aux_energy = _resize_height( + src, height, energy_mode, aux_energy, step_ratio + ) + else: + src, aux_energy = _resize_height( + src, height, energy_mode, aux_energy, step_ratio + ) + src, aux_energy = _resize_width( + src, width, energy_mode, aux_energy, step_ratio + ) + + return src diff --git a/ComfyUI_essentials/conditioning.py b/ComfyUI_essentials/conditioning.py new file mode 100644 index 0000000000000000000000000000000000000000..58e199fe8f8d70914cb93df6967a3b97c1136f82 --- /dev/null +++ b/ComfyUI_essentials/conditioning.py @@ -0,0 +1,100 @@ +import torch +from nodes import MAX_RESOLUTION, ConditioningZeroOut, ConditioningSetTimestepRange, ConditioningCombine + +class CLIPTextEncodeSDXLSimplified: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "width": ("INT", {"default": 1024.0, "min": 0, "max": MAX_RESOLUTION}), + "height": ("INT", {"default": 1024.0, "min": 0, "max": MAX_RESOLUTION}), + "size_cond_factor": ("INT", {"default": 4, "min": 1, "max": 16 }), + "text": ("STRING", {"multiline": True, "dynamicPrompts": True, "default": ""}), + "clip": ("CLIP", ), + }} + RETURN_TYPES = ("CONDITIONING",) + FUNCTION = "execute" + CATEGORY = "essentials/conditioning" + + def execute(self, clip, width, height, size_cond_factor, text): + crop_w = 0 + crop_h = 0 + width = width*size_cond_factor + height = height*size_cond_factor + target_width = width + target_height = height + text_g = text_l = text + + tokens = clip.tokenize(text_g) + tokens["l"] = clip.tokenize(text_l)["l"] + if len(tokens["l"]) != len(tokens["g"]): + empty = clip.tokenize("") + while len(tokens["l"]) < len(tokens["g"]): + tokens["l"] += empty["l"] + while len(tokens["l"]) > len(tokens["g"]): + tokens["g"] += empty["g"] + cond, pooled = clip.encode_from_tokens(tokens, return_pooled=True) + return ([[cond, {"pooled_output": pooled, "width": width, "height": height, "crop_w": crop_w, "crop_h": crop_h, "target_width": target_width, "target_height": target_height}]], ) + +class ConditioningCombineMultiple: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "conditioning_1": ("CONDITIONING",), + "conditioning_2": ("CONDITIONING",), + }, "optional": { + "conditioning_3": ("CONDITIONING",), + "conditioning_4": ("CONDITIONING",), + "conditioning_5": ("CONDITIONING",), + }, + } + RETURN_TYPES = ("CONDITIONING",) + FUNCTION = "execute" + CATEGORY = "essentials/conditioning" + + def execute(self, conditioning_1, conditioning_2, conditioning_3=None, conditioning_4=None, conditioning_5=None): + c = conditioning_1 + conditioning_2 + + if conditioning_3 is not None: + c += conditioning_3 + if conditioning_4 is not None: + c += conditioning_4 + if conditioning_5 is not None: + c += conditioning_5 + + return (c,) + +class SD3NegativeConditioning: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "conditioning": ("CONDITIONING",), + "end": ("FLOAT", {"default": 0.1, "min": 0.0, "max": 1.0, "step": 0.001 }), + }} + RETURN_TYPES = ("CONDITIONING",) + FUNCTION = "execute" + CATEGORY = "essentials/conditioning" + + def execute(self, conditioning, end): + zero_c = ConditioningZeroOut().zero_out(conditioning)[0] + + if end == 0: + return (zero_c, ) + + c = ConditioningSetTimestepRange().set_range(conditioning, 0, end)[0] + zero_c = ConditioningSetTimestepRange().set_range(zero_c, end, 1.0)[0] + c = ConditioningCombine().combine(zero_c, c)[0] + + return (c, ) + +COND_CLASS_MAPPINGS = { + "CLIPTextEncodeSDXL+": CLIPTextEncodeSDXLSimplified, + "ConditioningCombineMultiple+": ConditioningCombineMultiple, + "SD3NegativeConditioning+": SD3NegativeConditioning, +} + +COND_NAME_MAPPINGS = { + "CLIPTextEncodeSDXL+": "🔧 SDXL CLIPTextEncode", + "ConditioningCombineMultiple+": "🔧 Cond Combine Multiple", + "SD3NegativeConditioning+": "🔧 SD3 Negative Conditioning" +} \ No newline at end of file diff --git a/ComfyUI_essentials/essentials.py b/ComfyUI_essentials/essentials.py new file mode 100644 index 0000000000000000000000000000000000000000..28f77734a3f31a0710ce9c4f85f066ab06f061ac --- /dev/null +++ b/ComfyUI_essentials/essentials.py @@ -0,0 +1,2014 @@ +import warnings +warnings.filterwarnings('ignore', module="torchvision") +import ast +import math +import random +import os +import operator as op +import numpy as np +import scipy +from PIL import Image, ImageDraw, ImageFont, ImageColor, ImageFilter +import io + +import torch +import torch.nn.functional as F +import torchvision.transforms.v2 as T + +from nodes import MAX_RESOLUTION, SaveImage, common_ksampler +import folder_paths +import comfy.utils +import comfy.samplers +import comfy.sample + +STOCHASTIC_SAMPLERS = ["euler_ancestral", "dpm_2_ancestral", "dpmpp_2s_ancestral", "dpmpp_sde", "dpmpp_sde_gpu", "dpmpp_2m_sde", "dpmpp_2m_sde_gpu", "dpmpp_3m_sde", "dpmpp_3m_sde_gpu", "ddpm"] + +def p(image): + return image.permute([0,3,1,2]) +def pb(image): + return image.permute([0,2,3,1]) + +# from https://github.com/pythongosssss/ComfyUI-Custom-Scripts +class AnyType(str): + def __ne__(self, __value: object) -> bool: + return False +any = AnyType("*") + +EPSILON = 1e-5 + +class GetImageSize: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image": ("IMAGE",), + } + } + + RETURN_TYPES = ("INT", "INT") + RETURN_NAMES = ("width", "height") + FUNCTION = "execute" + CATEGORY = "essentials" + + def execute(self, image): + return (image.shape[2], image.shape[1],) + +class ImageResize: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image": ("IMAGE",), + "width": ("INT", { "default": 512, "min": 0, "max": MAX_RESOLUTION, "step": 8, }), + "height": ("INT", { "default": 512, "min": 0, "max": MAX_RESOLUTION, "step": 8, }), + "interpolation": (["nearest", "bilinear", "bicubic", "area", "nearest-exact", "lanczos"],), + "keep_proportion": ("BOOLEAN", { "default": False }), + "condition": (["always", "downscale if bigger", "upscale if smaller"],), + "multiple_of": ("INT", { "default": 0, "min": 0, "max": 512, "step": 1, }), + } + } + + RETURN_TYPES = ("IMAGE", "INT", "INT",) + RETURN_NAMES = ("IMAGE", "width", "height",) + FUNCTION = "execute" + CATEGORY = "essentials" + + def execute(self, image, width, height, keep_proportion, interpolation="nearest", condition="always", multiple_of=0): + _, oh, ow, _ = image.shape + + if keep_proportion is True: + if width == 0 and oh < height: + width = MAX_RESOLUTION + elif width == 0 and oh >= height: + width = ow + + if height == 0 and ow < width: + height = MAX_RESOLUTION + elif height == 0 and ow >= width: + height = ow + + #width = ow if width == 0 else width + #height = oh if height == 0 else height + ratio = min(width / ow, height / oh) + width = round(ow*ratio) + height = round(oh*ratio) + else: + if width == 0: + width = ow + if height == 0: + height = oh + + if multiple_of > 1: + width = width - (width % multiple_of) + height = height - (height % multiple_of) + + outputs = p(image) + + if "always" in condition or ("bigger" in condition and (oh > height or ow > width)) or ("smaller" in condition and (oh < height or ow < width)): + if interpolation == "lanczos": + outputs = comfy.utils.lanczos(outputs, width, height) + else: + outputs = F.interpolate(outputs, size=(height, width), mode=interpolation) + + outputs = pb(outputs) + + return(outputs, outputs.shape[2], outputs.shape[1],) + +class ImageFlip: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image": ("IMAGE",), + "axis": (["x", "y", "xy"],), + } + } + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "execute" + CATEGORY = "essentials" + + def execute(self, image, axis): + dim = () + if "y" in axis: + dim += (1,) + if "x" in axis: + dim += (2,) + image = torch.flip(image, dim) + + return(image,) + +class ImageCrop: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image": ("IMAGE",), + "width": ("INT", { "default": 256, "min": 0, "max": MAX_RESOLUTION, "step": 8, }), + "height": ("INT", { "default": 256, "min": 0, "max": MAX_RESOLUTION, "step": 8, }), + "position": (["top-left", "top-center", "top-right", "right-center", "bottom-right", "bottom-center", "bottom-left", "left-center", "center"],), + "x_offset": ("INT", { "default": 0, "min": -99999, "step": 1, }), + "y_offset": ("INT", { "default": 0, "min": -99999, "step": 1, }), + } + } + + RETURN_TYPES = ("IMAGE","INT","INT",) + RETURN_NAMES = ("IMAGE","x","y",) + FUNCTION = "execute" + CATEGORY = "essentials" + + def execute(self, image, width, height, position, x_offset, y_offset): + _, oh, ow, _ = image.shape + + width = min(ow, width) + height = min(oh, height) + + if "center" in position: + x = round((ow-width) / 2) + y = round((oh-height) / 2) + if "top" in position: + y = 0 + if "bottom" in position: + y = oh-height + if "left" in position: + x = 0 + if "right" in position: + x = ow-width + + x += x_offset + y += y_offset + + x2 = x+width + y2 = y+height + + if x2 > ow: + x2 = ow + if x < 0: + x = 0 + if y2 > oh: + y2 = oh + if y < 0: + y = 0 + + image = image[:, y:y2, x:x2, :] + + return(image, x, y, ) + +class ImageDesaturate: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image": ("IMAGE",), + "factor": ("FLOAT", { "default": 1.00, "min": 0.00, "max": 1.00, "step": 0.05, }), + } + } + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "execute" + CATEGORY = "essentials" + + def execute(self, image, factor): + grayscale = 0.299 * image[..., 0] + 0.587 * image[..., 1] + 0.114 * image[..., 2] + grayscale = (1.0 - factor) * image + factor * grayscale.unsqueeze(-1).repeat(1, 1, 1, 3) + return(grayscale,) + +class ImagePosterize: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image": ("IMAGE",), + "threshold": ("FLOAT", { "default": 0.50, "min": 0.00, "max": 1.00, "step": 0.05, }), + } + } + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "execute" + CATEGORY = "essentials" + + def execute(self, image, threshold): + image = 0.299 * image[..., 0] + 0.587 * image[..., 1] + 0.114 * image[..., 2] + #image = image.mean(dim=3, keepdim=True) + image = (image > threshold).float() + image = image.unsqueeze(-1).repeat(1, 1, 1, 3) + + return(image,) + +class ImageEnhanceDifference: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image1": ("IMAGE",), + "image2": ("IMAGE",), + "exponent": ("FLOAT", { "default": 0.75, "min": 0.00, "max": 1.00, "step": 0.05, }), + } + } + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "execute" + CATEGORY = "essentials" + + def execute(self, image1, image2, exponent): + if image1.shape != image2.shape: + image2 = p(image2) + image2 = comfy.utils.common_upscale(image2, image1.shape[2], image1.shape[1], upscale_method='bicubic', crop='center') + image2 = pb(image2) + + diff_image = image1 - image2 + diff_image = torch.pow(diff_image, exponent) + diff_image = torch.clamp(diff_image, 0, 1) + + return(diff_image,) + +class ImageExpandBatch: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image": ("IMAGE",), + "size": ("INT", { "default": 16, "min": 1, "step": 1, }), + "method": (["expand", "repeat all", "repeat first", "repeat last"],) + } + } + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "execute" + CATEGORY = "essentials" + + def execute(self, image, size, method): + orig_size = image.shape[0] + + if orig_size == size: + return (image,) + + if size <= 1: + return (image[:size],) + + if 'expand' in method: + out = torch.empty([size] + list(image.shape)[1:], dtype=image.dtype, device=image.device) + if size < orig_size: + scale = (orig_size - 1) / (size - 1) + for i in range(size): + out[i] = image[min(round(i * scale), orig_size - 1)] + else: + scale = orig_size / size + for i in range(size): + out[i] = image[min(math.floor((i + 0.5) * scale), orig_size - 1)] + elif 'all' in method: + out = image.repeat([math.ceil(size / image.shape[0])] + [1] * (len(image.shape) - 1))[:size] + elif 'first' in method: + if size < image.shape[0]: + out = image[:size] + else: + out = torch.cat([image[:1].repeat(size-image.shape[0], 1, 1, 1), image], dim=0) + elif 'last' in method: + if size < image.shape[0]: + out = image[:size] + else: + out = torch.cat((image, image[-1:].repeat((size-image.shape[0], 1, 1, 1))), dim=0) + + return (out,) + +class ImageListToBatch: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image": ("IMAGE",), + } + } + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "execute" + INPUT_IS_LIST = True + CATEGORY = "essentials" + + def execute(self, image): + shape = image[0].shape[1:3] + out = [] + + for i in range(len(image)): + img = p(image[i]) + if image[i].shape[1:3] != shape: + transforms = T.Compose([ + T.CenterCrop(min(img.shape[2], img.shape[3])), + T.Resize((shape[0], shape[1]), interpolation=T.InterpolationMode.BICUBIC), + ]) + img = transforms(img) + out.append(pb(img)) + #image[i] = pb(transforms(img)) + + out = torch.cat(out, dim=0) + + return (out,) + +class ExtractKeyframes: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image": ("IMAGE",), + "threshold": ("FLOAT", { "default": 0.85, "min": 0.00, "max": 1.00, "step": 0.01, }), + } + } + + RETURN_TYPES = ("IMAGE", "STRING") + RETURN_NAMES = ("KEYFRAMES", "indexes") + + FUNCTION = "execute" + CATEGORY = "essentials" + + def execute(self, image, threshold): + window_size = 2 + + variations = torch.sum(torch.abs(image[1:] - image[:-1]), dim=[1, 2, 3]) + #variations = torch.sum((image[1:] - image[:-1]) ** 2, dim=[1, 2, 3]) + threshold = torch.quantile(variations.float(), threshold).item() + + keyframes = [] + for i in range(image.shape[0] - window_size + 1): + window = image[i:i + window_size] + variation = torch.sum(torch.abs(window[-1] - window[0])).item() + + if variation > threshold: + keyframes.append(i + window_size - 1) + + return (image[keyframes], ','.join(map(str, keyframes)),) + +class MaskFlip: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "mask": ("MASK",), + "axis": (["x", "y", "xy"],), + } + } + + RETURN_TYPES = ("MASK",) + FUNCTION = "execute" + CATEGORY = "essentials" + + def execute(self, mask, axis): + dim = () + if "y" in axis: + dim += (1,) + if "x" in axis: + dim += (2,) + mask = torch.flip(mask, dims=dim) + + return(mask,) + +class MaskBlur: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "mask": ("MASK",), + "amount": ("FLOAT", { "default": 6.0, "min": 0, "step": 0.5, }), + } + } + + RETURN_TYPES = ("MASK",) + FUNCTION = "execute" + CATEGORY = "essentials" + + def execute(self, mask, amount): + size = int(6 * amount +1) + if size % 2 == 0: + size+= 1 + + if mask.dim() == 2: + mask = mask.unsqueeze(0) + + blurred = mask.unsqueeze(1) + blurred = T.GaussianBlur(size, amount)(blurred) + blurred = blurred.squeeze(1) + + return(blurred,) + +class MaskPreview(SaveImage): + def __init__(self): + self.output_dir = folder_paths.get_temp_directory() + self.type = "temp" + self.prefix_append = "_temp_" + ''.join(random.choice("abcdefghijklmnopqrstupvxyz") for x in range(5)) + self.compress_level = 4 + + @classmethod + def INPUT_TYPES(s): + return { + "required": {"mask": ("MASK",), }, + "hidden": {"prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO"}, + } + + FUNCTION = "execute" + CATEGORY = "essentials" + + def execute(self, mask, filename_prefix="ComfyUI", prompt=None, extra_pnginfo=None): + preview = mask.reshape((-1, 1, mask.shape[-2], mask.shape[-1])).movedim(1, -1).expand(-1, -1, -1, 3) + return self.save_images(preview, filename_prefix, prompt, extra_pnginfo) + +class MaskBatch: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "mask1": ("MASK",), + "mask2": ("MASK",), + } + } + + RETURN_TYPES = ("MASK",) + FUNCTION = "execute" + CATEGORY = "essentials" + + def execute(self, mask1, mask2): + if mask1.shape[1:] != mask2.shape[1:]: + mask2 = F.interpolate(mask2.unsqueeze(1), size=(mask1.shape[1], mask1.shape[2]), mode="bicubic").squeeze(1) + + out = torch.cat((mask1, mask2), dim=0) + return (out,) + +class MaskExpandBatch: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "mask": ("MASK",), + "size": ("INT", { "default": 16, "min": 1, "step": 1, }), + "method": (["expand", "repeat all", "repeat first", "repeat last"],) + } + } + + RETURN_TYPES = ("MASK",) + FUNCTION = "execute" + CATEGORY = "essentials" + + def execute(self, mask, size, method): + orig_size = mask.shape[0] + + if orig_size == size: + return (mask,) + + if size <= 1: + return (mask[:size],) + + if 'expand' in method: + out = torch.empty([size] + list(mask.shape)[1:], dtype=mask.dtype, device=mask.device) + if size < orig_size: + scale = (orig_size - 1) / (size - 1) + for i in range(size): + out[i] = mask[min(round(i * scale), orig_size - 1)] + else: + scale = orig_size / size + for i in range(size): + out[i] = mask[min(math.floor((i + 0.5) * scale), orig_size - 1)] + elif 'all' in method: + out = mask.repeat([math.ceil(size / mask.shape[0])] + [1] * (len(mask.shape) - 1))[:size] + elif 'first' in method: + if size < mask.shape[0]: + out = mask[:size] + else: + out = torch.cat([mask[:1].repeat(size-mask.shape[0], 1, 1), mask], dim=0) + elif 'last' in method: + if size < mask.shape[0]: + out = mask[:size] + else: + out = torch.cat((mask, mask[-1:].repeat((size-mask.shape[0], 1, 1))), dim=0) + + return (out,) + +def cubic_bezier(t, p): + p0, p1, p2, p3 = p + return (1 - t)**3 * p0 + 3 * (1 - t)**2 * t * p1 + 3 * (1 - t) * t**2 * p2 + t**3 * p3 + +class MaskBoundingBox: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "mask": ("MASK",), + "padding": ("INT", { "default": 0, "min": 0, "max": 4096, "step": 1, }), + "blur": ("INT", { "default": 0, "min": 0, "max": 128, "step": 1, }), + }, + "optional": { + "image_optional": ("IMAGE",), + } + } + + RETURN_TYPES = ("MASK", "IMAGE", "INT", "INT", "INT", "INT") + RETURN_NAMES = ("MASK", "IMAGE", "x", "y", "width", "height") + FUNCTION = "execute" + CATEGORY = "essentials" + + def execute(self, mask, padding, blur, image_optional=None): + if mask.dim() == 2: + mask = mask.unsqueeze(0) + + if image_optional is None: + image_optional = mask.unsqueeze(3).repeat(1, 1, 1, 3) + + # resize the image if it's not the same size as the mask + if image_optional.shape[1] != mask.shape[1] or image_optional.shape[2] != mask.shape[2]: + image_optional = p(image_optional) + image_optional = comfy.utils.common_upscale(image_optional, mask.shape[2], mask.shape[1], upscale_method='bicubic', crop='center') + image_optional = pb(image_optional) + + # match batch size + if image_optional.shape[0] < mask.shape[0]: + image_optional = torch.cat((image_optional, image_optional[-1].unsqueeze(0).repeat(mask.shape[0]-image_optional.shape[0], 1, 1, 1)), dim=0) + elif image_optional.shape[0] > mask.shape[0]: + image_optional = image_optional[:mask.shape[0]] + + # blur the mask + if blur > 0: + if blur % 2 == 0: + blur += 1 + mask = T.functional.gaussian_blur(mask.unsqueeze(1), blur).squeeze(1) + + _, y, x = torch.where(mask) + x1 = max(0, x.min().item() - padding) + x2 = min(mask.shape[2], x.max().item() + 1 + padding) + y1 = max(0, y.min().item() - padding) + y2 = min(mask.shape[1], y.max().item() + 1 + padding) + + # crop the mask + mask = mask[:, y1:y2, x1:x2] + image_optional = image_optional[:, y1:y2, x1:x2, :] + + return (mask, image_optional, x1, y1, x2 - x1, y2 - y1) + +class MaskFromColor: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image": ("IMAGE", ), + "red": ("INT", { "default": 255, "min": 0, "max": 255, "step": 1, }), + "green": ("INT", { "default": 255, "min": 0, "max": 255, "step": 1, }), + "blue": ("INT", { "default": 255, "min": 0, "max": 255, "step": 1, }), + "threshold": ("INT", { "default": 0, "min": 0, "max": 127, "step": 1, }), + } + } + + RETURN_TYPES = ("MASK",) + FUNCTION = "execute" + CATEGORY = "essentials" + + def execute(self, image, red, green, blue, threshold): + temp = (torch.clamp(image, 0, 1.0) * 255.0).round().to(torch.int) + color = torch.tensor([red, green, blue]) + lower_bound = (color - threshold).clamp(min=0) + upper_bound = (color + threshold).clamp(max=255) + lower_bound = lower_bound.view(1, 1, 1, 3) + upper_bound = upper_bound.view(1, 1, 1, 3) + mask = (temp >= lower_bound) & (temp <= upper_bound) + mask = mask.all(dim=-1) + mask = mask.float() + + return (mask, ) + +class MaskFromSegmentation: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image": ("IMAGE", ), + "segments": ("INT", { "default": 6, "min": 1, "max": 16, "step": 1, }), + "remove_isolated_pixels": ("INT", { "default": 0, "min": 0, "max": 32, "step": 1, }), + "remove_small_masks": ("FLOAT", { "default": 0.0, "min": 0., "max": 1., "step": 0.01, }), + "fill_holes": ("BOOLEAN", { "default": False }), + } + } + + RETURN_TYPES = ("MASK",) + FUNCTION = "execute" + CATEGORY = "essentials" + + def execute(self, image, segments, remove_isolated_pixels, fill_holes, remove_small_masks): + im = image[0] # we only work on the first image in the batch + im = Image.fromarray((im * 255).to(torch.uint8).cpu().numpy(), mode="RGB") + im = im.quantize(palette=im.quantize(colors=segments), dither=Image.Dither.NONE) + im = torch.tensor(np.array(im.convert("RGB"))).float() / 255.0 + + colors = im.reshape(-1, im.shape[-1]) + colors = torch.unique(colors, dim=0) + + masks = [] + for color in colors: + mask = (im == color).all(dim=-1).float() + # remove isolated pixels + if remove_isolated_pixels > 0: + mask_np = mask.cpu().numpy() + mask_np = scipy.ndimage.binary_opening(mask_np, structure=np.ones((remove_isolated_pixels, remove_isolated_pixels))) + mask = torch.from_numpy(mask_np) + + # fill holes + if fill_holes: + mask_np = mask.cpu().numpy() + mask_np = scipy.ndimage.binary_fill_holes(mask_np) + mask = torch.from_numpy(mask_np) + + # if the mask is too small, it's probably noise + if mask.sum() / (mask.shape[0]*mask.shape[1]) > remove_small_masks: + masks.append(mask) + + if masks == []: + masks.append(torch.zeros_like(im).squeeze(-1).unsqueeze(0)) # return an empty mask if no masks were found, prevents errors + + mask = torch.stack(masks, dim=0).float() + + return (mask, ) + +class MaskFromRGBCMYBW: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image": ("IMAGE", ), + "threshold_r": ("FLOAT", { "default": 0.15, "min": 0.0, "max": 1, "step": 0.01, }), + "threshold_g": ("FLOAT", { "default": 0.15, "min": 0.0, "max": 1, "step": 0.01, }), + "threshold_b": ("FLOAT", { "default": 0.15, "min": 0.0, "max": 1, "step": 0.01, }), + "remove_isolated_pixels": ("INT", { "default": 0, "min": 0, "max": 32, "step": 1, }), + "fill_holes": ("BOOLEAN", { "default": False }), + } + } + + RETURN_TYPES = ("MASK","MASK","MASK","MASK","MASK","MASK","MASK","MASK",) + RETURN_NAMES = ("red","green","blue","cyan","magenta","yellow","black","white",) + FUNCTION = "execute" + CATEGORY = "essentials" + + def execute(self, image, threshold_r, threshold_g, threshold_b, remove_isolated_pixels, fill_holes): + red = ((image[..., 0] >= 1-threshold_r) & (image[..., 1] < threshold_g) & (image[..., 2] < threshold_b)).float() + green = ((image[..., 0] < threshold_r) & (image[..., 1] >= 1-threshold_g) & (image[..., 2] < threshold_b)).float() + blue = ((image[..., 0] < threshold_r) & (image[..., 1] < threshold_g) & (image[..., 2] >= 1-threshold_b)).float() + + cyan = ((image[..., 0] < threshold_r) & (image[..., 1] >= 1-threshold_g) & (image[..., 2] >= 1-threshold_b)).float() + magenta = ((image[..., 0] >= 1-threshold_r) & (image[..., 1] < threshold_g) & (image[..., 2] > 1-threshold_b)).float() + yellow = ((image[..., 0] >= 1-threshold_r) & (image[..., 1] >= 1-threshold_g) & (image[..., 2] < threshold_b)).float() + + black = ((image[..., 0] <= threshold_r) & (image[..., 1] <= threshold_g) & (image[..., 2] <= threshold_b)).float() + white = ((image[..., 0] >= 1-threshold_r) & (image[..., 1] >= 1-threshold_g) & (image[..., 2] >= 1-threshold_b)).float() + + if remove_isolated_pixels > 0 or fill_holes: + colors = [red, green, blue, cyan, magenta, yellow, black, white] + color_names = ['red', 'green', 'blue', 'cyan', 'magenta', 'yellow', 'black', 'white'] + processed_colors = {} + + for color_name, color in zip(color_names, colors): + color = color.cpu().numpy() + masks = [] + + for i in range(image.shape[0]): + mask = color[i] + if remove_isolated_pixels > 0: + mask = scipy.ndimage.binary_opening(mask, structure=np.ones((remove_isolated_pixels, remove_isolated_pixels))) + if fill_holes: + mask = scipy.ndimage.binary_fill_holes(mask) + mask = torch.from_numpy(mask) + masks.append(mask) + + processed_colors[color_name] = torch.stack(masks, dim=0).float() + + red = processed_colors['red'] + green = processed_colors['green'] + blue = processed_colors['blue'] + cyan = processed_colors['cyan'] + magenta = processed_colors['magenta'] + yellow = processed_colors['yellow'] + black = processed_colors['black'] + white = processed_colors['white'] + + del colors, processed_colors + + return (red, green, blue, cyan, magenta, yellow, black, white,) + +class MaskSmooth: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "mask": ("MASK",), + "amount": ("INT", { "default": 0, "min": 0, "max": 127, "step": 1, }), + } + } + + RETURN_TYPES = ("MASK",) + FUNCTION = "execute" + CATEGORY = "essentials" + + def execute(self, mask, amount): + if amount == 0: + return (mask,) + + if amount % 2 == 0: + amount += 1 + + mask = mask > 0.5 + mask = T.functional.gaussian_blur(mask.unsqueeze(1), amount).squeeze(1).float() + + return (mask,) + +class MaskFromBatch: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "mask": ("MASK", ), + "start": ("INT", { "default": 0, "min": 0, "step": 1, }), + "length": ("INT", { "default": -1, "min": -1, "step": 1, }), + } + } + + RETURN_TYPES = ("MASK",) + FUNCTION = "execute" + CATEGORY = "essentials" + + def execute(self, mask, start, length): + if length<0: + length = mask.shape[0] + start = min(start, mask.shape[0]-1) + length = min(mask.shape[0]-start, length) + return (mask[start:start + length], ) + +class MaskFromList: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "values": ("FLOAT", { "min": 0.0, "max": 1.0, "step": 0.01, }), + "width": ("INT", { "default": 32, "min": 1, "max": MAX_RESOLUTION, "step": 8, }), + "height": ("INT", { "default": 32, "min": 1, "max": MAX_RESOLUTION, "step": 8, }), + } + } + + RETURN_TYPES = ("MASK",) + FUNCTION = "execute" + CATEGORY = "essentials" + + def execute(self, values, width, height): + if not isinstance(values, list): + values = [values] + + values = torch.tensor(values).float() + values = torch.clamp(values, 0.0, 1.0) + #values = (values - values.min()) / values.max() + + return (values.unsqueeze(1).unsqueeze(2).repeat(1, width, height), ) + +class ImageFromBatch: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image": ("IMAGE", ), + "start": ("INT", { "default": 0, "min": 0, "step": 1, }), + "length": ("INT", { "default": -1, "min": -1, "step": 1, }), + } + } + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "execute" + CATEGORY = "essentials" + + def execute(self, image, start, length): + if length<0: + length = image.shape[0] + start = min(start, image.shape[0]-1) + length = min(image.shape[0]-start, length) + return (image[start:start + length], ) + +class ImageCompositeFromMaskBatch: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image_from": ("IMAGE", ), + "image_to": ("IMAGE", ), + "mask": ("MASK", ) + } + } + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "execute" + CATEGORY = "essentials" + + def execute(self, image_from, image_to, mask): + frames = mask.shape[0] + + if image_from.shape[1] != image_to.shape[1] or image_from.shape[2] != image_to.shape[2]: + image_to = p(image_to) + image_to = comfy.utils.common_upscale(image_to, image_from.shape[2], image_from.shape[1], upscale_method='bicubic', crop='center') + image_to = pb(image_to) + + if frames < image_from.shape[0]: + image_from = image_from[:frames] + elif frames > image_from.shape[0]: + image_from = torch.cat((image_from, image_from[-1].unsqueeze(0).repeat(frames-image_from.shape[0], 1, 1, 1)), dim=0) + + mask = mask.unsqueeze(3).repeat(1, 1, 1, 3) + + if image_from.shape[1] != mask.shape[1] or image_from.shape[2] != mask.shape[2]: + mask = p(mask) + mask = comfy.utils.common_upscale(mask, image_from.shape[2], image_from.shape[1], upscale_method='bicubic', crop='center') + mask = pb(mask) + + out = mask * image_to + (1 - mask) * image_from + + return (out, ) + +class TransitionMask: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "width": ("INT", { "default": 512, "min": 1, "max": MAX_RESOLUTION, "step": 1, }), + "height": ("INT", { "default": 512, "min": 1, "max": MAX_RESOLUTION, "step": 1, }), + "frames": ("INT", { "default": 16, "min": 1, "max": 9999, "step": 1, }), + "start_frame": ("INT", { "default": 0, "min": 0, "step": 1, }), + "end_frame": ("INT", { "default": 9999, "min": 0, "step": 1, }), + "transition_type": (["horizontal slide", "vertical slide", "horizontal bar", "vertical bar", "center box", "horizontal door", "vertical door", "circle", "fade"],), + "timing_function": (["linear", "in", "out", "in-out"],) + } + } + + RETURN_TYPES = ("MASK",) + FUNCTION = "execute" + CATEGORY = "essentials" + + def execute(self, width, height, frames, start_frame, end_frame, transition_type, timing_function): + if timing_function == 'in': + tf = [0.0, 0.0, 0.5, 1.0] + elif timing_function == 'out': + tf = [0.0, 0.5, 1.0, 1.0] + elif timing_function == 'in-out': + tf = [0, 1, 0, 1] + #elif timing_function == 'back': + # tf = [0, 1.334, 1.334, 0] + else: + tf = [0, 0, 1, 1] + + out = [] + + end_frame = min(frames, end_frame) + transition = end_frame - start_frame + + if start_frame > 0: + out = out + [torch.full((height, width), 0.0, dtype=torch.float32, device="cpu")] * start_frame + + for i in range(transition): + frame = torch.full((height, width), 0.0, dtype=torch.float32, device="cpu") + progress = i/(transition-1) + + if timing_function != 'linear': + progress = cubic_bezier(progress, tf) + + if "horizontal slide" in transition_type: + pos = round(width*progress) + frame[:, :pos] = 1.0 + elif "vertical slide" in transition_type: + pos = round(height*progress) + frame[:pos, :] = 1.0 + elif "box" in transition_type: + box_w = round(width*progress) + box_h = round(height*progress) + x1 = (width - box_w) // 2 + y1 = (height - box_h) // 2 + x2 = x1 + box_w + y2 = y1 + box_h + frame[y1:y2, x1:x2] = 1.0 + elif "circle" in transition_type: + radius = math.ceil(math.sqrt(pow(width,2)+pow(height,2))*progress/2) + c_x = width // 2 + c_y = height // 2 + # is this real life? Am I hallucinating? + x = torch.arange(0, width, dtype=torch.float32, device="cpu") + y = torch.arange(0, height, dtype=torch.float32, device="cpu") + y, x = torch.meshgrid((y, x), indexing="ij") + circle = ((x - c_x) ** 2 + (y - c_y) ** 2) <= (radius ** 2) + frame[circle] = 1.0 + elif "horizontal bar" in transition_type: + bar = round(height*progress) + y1 = (height - bar) // 2 + y2 = y1 + bar + frame[y1:y2, :] = 1.0 + elif "vertical bar" in transition_type: + bar = round(width*progress) + x1 = (width - bar) // 2 + x2 = x1 + bar + frame[:, x1:x2] = 1.0 + elif "horizontal door" in transition_type: + bar = math.ceil(height*progress/2) + if bar > 0: + frame[:bar, :] = 1.0 + frame[-bar:, :] = 1.0 + elif "vertical door" in transition_type: + bar = math.ceil(width*progress/2) + if bar > 0: + frame[:, :bar] = 1.0 + frame[:, -bar:] = 1.0 + elif "fade" in transition_type: + frame[:,:] = progress + + out.append(frame) + + if end_frame < frames: + out = out + [torch.full((height, width), 1.0, dtype=torch.float32, device="cpu")] * (frames - end_frame) + + out = torch.stack(out, dim=0) + + return (out, ) + +def min_(tensor_list): + # return the element-wise min of the tensor list. + x = torch.stack(tensor_list) + mn = x.min(axis=0)[0] + return torch.clamp(mn, min=0) + +def max_(tensor_list): + # return the element-wise max of the tensor list. + x = torch.stack(tensor_list) + mx = x.max(axis=0)[0] + return torch.clamp(mx, max=1) + +# From https://github.com/Jamy-L/Pytorch-Contrast-Adaptive-Sharpening/ +class ImageCAS: + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "image": ("IMAGE",), + "amount": ("FLOAT", {"default": 0.8, "min": 0, "max": 1, "step": 0.05}), + }, + } + + RETURN_TYPES = ("IMAGE",) + CATEGORY = "essentials" + FUNCTION = "execute" + + def execute(self, image, amount): + img = F.pad(p(image), pad=(1, 1, 1, 1)).cpu() + + a = img[..., :-2, :-2] + b = img[..., :-2, 1:-1] + c = img[..., :-2, 2:] + d = img[..., 1:-1, :-2] + e = img[..., 1:-1, 1:-1] + f = img[..., 1:-1, 2:] + g = img[..., 2:, :-2] + h = img[..., 2:, 1:-1] + i = img[..., 2:, 2:] + + # Computing contrast + cross = (b, d, e, f, h) + mn = min_(cross) + mx = max_(cross) + + diag = (a, c, g, i) + mn2 = min_(diag) + mx2 = max_(diag) + mx = mx + mx2 + mn = mn + mn2 + + # Computing local weight + inv_mx = torch.reciprocal(mx + EPSILON) + amp = inv_mx * torch.minimum(mn, (2 - mx)) + + # scaling + amp = torch.sqrt(amp) + w = - amp * (amount * (1/5 - 1/8) + 1/8) + div = torch.reciprocal(1 + 4*w) + + output = ((b + d + f + h)*w + e) * div + output = output.clamp(0, 1) + #output = torch.nan_to_num(output) # this seems the only way to ensure there are no NaNs + + output = pb(output) + + return (output,) + +operators = { + ast.Add: op.add, + ast.Sub: op.sub, + ast.Mult: op.mul, + ast.Div: op.truediv, + ast.FloorDiv: op.floordiv, + ast.Pow: op.pow, + ast.BitXor: op.xor, + ast.USub: op.neg, + ast.Mod: op.mod, +} + +op_functions = { + 'min': min, + 'max': max +} + +class SimpleMath: + def __init__(self): + pass + + @classmethod + def INPUT_TYPES(s): + return { + "optional": { + "a": ("INT,FLOAT", { "default": 0.0, "step": 0.1 }), + "b": ("INT,FLOAT", { "default": 0.0, "step": 0.1 }), + }, + "required": { + "value": ("STRING", { "multiline": False, "default": "" }), + }, + } + + RETURN_TYPES = ("INT", "FLOAT", ) + FUNCTION = "execute" + CATEGORY = "essentials" + + def execute(self, value, a = 0.0, b = 0.0): + def eval_(node): + if isinstance(node, ast.Num): # number + return node.n + elif isinstance(node, ast.Name): # variable + if node.id == "a": + return a + if node.id == "b": + return b + elif isinstance(node, ast.BinOp): # + return operators[type(node.op)](eval_(node.left), eval_(node.right)) + elif isinstance(node, ast.UnaryOp): # e.g., -1 + return operators[type(node.op)](eval_(node.operand)) + elif isinstance(node, ast.Call): # custom function + if node.func.id in op_functions: + args =[eval_(arg) for arg in node.args] + return op_functions[node.func.id](*args) + else: + return 0 + + result = eval_(ast.parse(value, mode='eval').body) + + if math.isnan(result): + result = 0.0 + + return (round(result), result, ) + +class ModelCompile(): + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "model": ("MODEL",), + "fullgraph": ("BOOLEAN", { "default": False }), + "dynamic": ("BOOLEAN", { "default": False }), + "mode": (["default", "reduce-overhead", "max-autotune", "max-autotune-no-cudagraphs"],), + }, + } + + RETURN_TYPES = ("MODEL", ) + FUNCTION = "execute" + CATEGORY = "essentials" + + def execute(self, model, fullgraph, dynamic, mode): + work_model = model.clone() + torch._dynamo.config.suppress_errors = True + work_model.model.diffusion_model = torch.compile(work_model.model.diffusion_model, dynamic=dynamic, fullgraph=fullgraph, mode=mode) + return( work_model, ) + +class ConsoleDebug: + def __init__(self): + pass + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "value": (any, {}), + }, + "optional": { + "prefix": ("STRING", { "multiline": False, "default": "Value:" }) + } + } + + RETURN_TYPES = () + FUNCTION = "execute" + CATEGORY = "essentials" + OUTPUT_NODE = True + + def execute(self, value, prefix): + print(f"\033[96m{prefix} {value}\033[0m") + + return (None,) + +class DebugTensorShape: + def __init__(self): + pass + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "tensor": (any, {}), + }, + } + + RETURN_TYPES = () + FUNCTION = "execute" + CATEGORY = "essentials" + OUTPUT_NODE = True + + def execute(self, tensor): + shapes = [] + def tensorShape(tensor): + if isinstance(tensor, dict): + for k in tensor: + tensorShape(tensor[k]) + elif isinstance(tensor, list): + for i in range(len(tensor)): + tensorShape(tensor[i]) + elif hasattr(tensor, 'shape'): + shapes.append(list(tensor.shape)) + + tensorShape(tensor) + + print(f"\033[96mShapes found: {shapes}\033[0m") + + return (None,) + +class BatchCount: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "batch": (any, {}), + }, + } + + RETURN_TYPES = ("INT",) + FUNCTION = "execute" + CATEGORY = "essentials" + + def execute(self, batch): + count = 0 + if hasattr(batch, 'shape'): + count = batch.shape[0] + elif isinstance(batch, dict) and 'samples' in batch: + count = batch['samples'].shape[0] + elif isinstance(batch, list) or isinstance(batch, dict): + count = len(batch) + + return (count, ) + +class ImageSeamCarving: + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "image": ("IMAGE",), + "width": ("INT", { "default": 512, "min": 1, "max": MAX_RESOLUTION, "step": 1, }), + "height": ("INT", { "default": 512, "min": 1, "max": MAX_RESOLUTION, "step": 1, }), + "energy": (["backward", "forward"],), + "order": (["width-first", "height-first"],), + }, + "optional": { + "keep_mask": ("MASK",), + "drop_mask": ("MASK",), + } + } + + RETURN_TYPES = ("IMAGE",) + CATEGORY = "essentials" + FUNCTION = "execute" + + def execute(self, image, width, height, energy, order, keep_mask=None, drop_mask=None): + try: + from .carve import seam_carving + except ImportError as e: + raise Exception(e) + + img = p(image) + + if keep_mask is not None: + #keep_mask = keep_mask.reshape((-1, 1, keep_mask.shape[-2], keep_mask.shape[-1])).movedim(1, -1) + keep_mask = p(keep_mask.unsqueeze(-1)) + + if keep_mask.shape[2] != img.shape[2] or keep_mask.shape[3] != img.shape[3]: + keep_mask = F.interpolate(keep_mask, size=(img.shape[2], img.shape[3]), mode="bilinear") + if drop_mask is not None: + drop_mask = p(drop_mask.unsqueeze(-1)) + + if drop_mask.shape[2] != img.shape[2] or drop_mask.shape[3] != img.shape[3]: + drop_mask = F.interpolate(drop_mask, size=(img.shape[2], img.shape[3]), mode="bilinear") + + out = [] + for i in range(img.shape[0]): + resized = seam_carving( + T.ToPILImage()(img[i]), + size=(width, height), + energy_mode=energy, + order=order, + keep_mask=T.ToPILImage()(keep_mask[i]) if keep_mask is not None else None, + drop_mask=T.ToPILImage()(drop_mask[i]) if drop_mask is not None else None, + ) + out.append(T.ToTensor()(resized)) + + out = torch.stack(out) + out = pb(out) + + return(out, ) + +class CLIPTextEncodeSDXLSimplified: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "width": ("INT", {"default": 1024.0, "min": 0, "max": MAX_RESOLUTION}), + "height": ("INT", {"default": 1024.0, "min": 0, "max": MAX_RESOLUTION}), + "size_cond_factor": ("INT", {"default": 4, "min": 1, "max": 16 }), + "text": ("STRING", {"multiline": True, "dynamicPrompts": True, "default": ""}), + "clip": ("CLIP", ), + }} + RETURN_TYPES = ("CONDITIONING",) + FUNCTION = "execute" + CATEGORY = "essentials" + + def execute(self, clip, width, height, size_cond_factor, text): + crop_w = 0 + crop_h = 0 + width = width*size_cond_factor + height = height*size_cond_factor + target_width = width + target_height = height + text_g = text_l = text + + tokens = clip.tokenize(text_g) + tokens["l"] = clip.tokenize(text_l)["l"] + if len(tokens["l"]) != len(tokens["g"]): + empty = clip.tokenize("") + while len(tokens["l"]) < len(tokens["g"]): + tokens["l"] += empty["l"] + while len(tokens["l"]) > len(tokens["g"]): + tokens["g"] += empty["g"] + cond, pooled = clip.encode_from_tokens(tokens, return_pooled=True) + return ([[cond, {"pooled_output": pooled, "width": width, "height": height, "crop_w": crop_w, "crop_h": crop_h, "target_width": target_width, "target_height": target_height}]], ) + +class KSamplerVariationsStochastic: + @classmethod + def INPUT_TYPES(s): + return {"required":{ + "model": ("MODEL",), + "latent_image": ("LATENT", ), + "noise_seed": ("INT", {"default": 0, "min": 0, "max": 0xffffffffffffffff}), + "steps": ("INT", {"default": 25, "min": 1, "max": 10000}), + "cfg": ("FLOAT", {"default": 7.0, "min": 0.0, "max": 100.0, "step":0.1, "round": 0.01}), + "sampler": (comfy.samplers.KSampler.SAMPLERS, ), + "scheduler": (comfy.samplers.KSampler.SCHEDULERS, ), + "positive": ("CONDITIONING", ), + "negative": ("CONDITIONING", ), + "variation_seed": ("INT:seed", {"default": 0, "min": 0, "max": 0xffffffffffffffff}), + "variation_strength": ("FLOAT", {"default": 0.2, "min": 0.0, "max": 1.0, "step":0.05, "round": 0.01}), + #"variation_sampler": (comfy.samplers.KSampler.SAMPLERS, ), + "cfg_scale": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step":0.05, "round": 0.01}), + }} + + RETURN_TYPES = ("LATENT", ) + FUNCTION = "execute" + CATEGORY = "essentials" + + def execute(self, model, latent_image, noise_seed, steps, cfg, sampler, scheduler, positive, negative, variation_seed, variation_strength, cfg_scale, variation_sampler="dpmpp_2m_sde"): + # Stage 1: composition sampler + force_full_denoise = False # return with leftover noise = "enable" + disable_noise = False # add noise = "enable" + + end_at_step = max(int(steps * (1-variation_strength)), 1) + start_at_step = 0 + + work_latent = latent_image.copy() + batch_size = work_latent["samples"].shape[0] + work_latent["samples"] = work_latent["samples"][0].unsqueeze(0) + + stage1 = common_ksampler(model, noise_seed, steps, cfg, sampler, scheduler, positive, negative, work_latent, denoise=1.0, disable_noise=disable_noise, start_step=start_at_step, last_step=end_at_step, force_full_denoise=force_full_denoise)[0] + + if batch_size > 1: + stage1["samples"] = stage1["samples"].clone().repeat(batch_size, 1, 1, 1) + + # Stage 2: variation sampler + force_full_denoise = True + disable_noise = True + cfg = max(cfg * cfg_scale, 1.0) + start_at_step = end_at_step + end_at_step = steps + + return common_ksampler(model, variation_seed, steps, cfg, variation_sampler, scheduler, positive, negative, stage1, denoise=1.0, disable_noise=disable_noise, start_step=start_at_step, last_step=end_at_step, force_full_denoise=force_full_denoise) + +# From https://github.com/BlenderNeko/ComfyUI_Noise/ +def slerp(val, low, high): + dims = low.shape + + low = low.reshape(dims[0], -1) + high = high.reshape(dims[0], -1) + + low_norm = low/torch.norm(low, dim=1, keepdim=True) + high_norm = high/torch.norm(high, dim=1, keepdim=True) + + low_norm[low_norm != low_norm] = 0.0 + high_norm[high_norm != high_norm] = 0.0 + + omega = torch.acos((low_norm*high_norm).sum(1)) + so = torch.sin(omega) + res = (torch.sin((1.0-val)*omega)/so).unsqueeze(1)*low + (torch.sin(val*omega)/so).unsqueeze(1) * high + + return res.reshape(dims) + +def prepare_mask(mask, shape): + mask = torch.nn.functional.interpolate(mask.reshape((-1, 1, mask.shape[-2], mask.shape[-1])), size=(shape[2], shape[3]), mode="bilinear") + mask = mask.expand((-1,shape[1],-1,-1)) + if mask.shape[0] < shape[0]: + mask = mask.repeat((shape[0] -1) // mask.shape[0] + 1, 1, 1, 1)[:shape[0]] + return mask + +def expand_mask(mask, expand, tapered_corners): + c = 0 if tapered_corners else 1 + kernel = np.array([[c, 1, c], + [1, 1, 1], + [c, 1, c]]) + mask = mask.reshape((-1, mask.shape[-2], mask.shape[-1])) + out = [] + for m in mask: + output = m.numpy() + for _ in range(abs(expand)): + if expand < 0: + output = scipy.ndimage.grey_erosion(output, footprint=kernel) + else: + output = scipy.ndimage.grey_dilation(output, footprint=kernel) + output = torch.from_numpy(output) + out.append(output) + + return torch.stack(out, dim=0) + +class KSamplerVariationsWithNoise: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "model": ("MODEL", ), + "latent_image": ("LATENT", ), + "main_seed": ("INT:seed", {"default": 0, "min": 0, "max": 0xffffffffffffffff}), + "steps": ("INT", {"default": 20, "min": 1, "max": 10000}), + "cfg": ("FLOAT", {"default": 8.0, "min": 0.0, "max": 100.0, "step":0.1, "round": 0.01}), + "sampler_name": (comfy.samplers.KSampler.SAMPLERS, ), + "scheduler": (comfy.samplers.KSampler.SCHEDULERS, ), + "positive": ("CONDITIONING", ), + "negative": ("CONDITIONING", ), + "variation_strength": ("FLOAT", {"default": 0.17, "min": 0.0, "max": 1.0, "step":0.01, "round": 0.01}), + #"start_at_step": ("INT", {"default": 0, "min": 0, "max": 10000}), + #"end_at_step": ("INT", {"default": 10000, "min": 0, "max": 10000}), + #"return_with_leftover_noise": (["disable", "enable"], ), + "variation_seed": ("INT:seed", {"default": 12345, "min": 0, "max": 0xffffffffffffffff}), + "denoise": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step":0.01, "round": 0.01}), + }} + + RETURN_TYPES = ("LATENT",) + FUNCTION = "execute" + CATEGORY = "essentials" + + def execute(self, model, latent_image, main_seed, steps, cfg, sampler_name, scheduler, positive, negative, variation_strength, variation_seed, denoise): + if main_seed == variation_seed: + variation_seed += 1 + + end_at_step = steps #min(steps, end_at_step) + start_at_step = round(end_at_step - end_at_step * denoise) + + force_full_denoise = True + disable_noise = True + + device = comfy.model_management.get_torch_device() + + # Generate base noise + batch_size, _, height, width = latent_image["samples"].shape + generator = torch.manual_seed(main_seed) + base_noise = torch.randn((1, 4, height, width), dtype=torch.float32, device="cpu", generator=generator).repeat(batch_size, 1, 1, 1).cpu() + + # Generate variation noise + generator = torch.manual_seed(variation_seed) + variation_noise = torch.randn((batch_size, 4, height, width), dtype=torch.float32, device="cpu", generator=generator).cpu() + + slerp_noise = slerp(variation_strength, base_noise, variation_noise) + + # Calculate sigma + comfy.model_management.load_model_gpu(model) + sampler = comfy.samplers.KSampler(model, steps=steps, device=device, sampler=sampler_name, scheduler=scheduler, denoise=1.0, model_options=model.model_options) + sigmas = sampler.sigmas + sigma = sigmas[start_at_step] - sigmas[end_at_step] + sigma /= model.model.latent_format.scale_factor + sigma = sigma.detach().cpu().item() + + work_latent = latent_image.copy() + work_latent["samples"] = latent_image["samples"].clone() + slerp_noise * sigma + + # if there's a mask we need to expand it to avoid artifacts, 5 pixels should be enough + if "noise_mask" in latent_image: + noise_mask = prepare_mask(latent_image["noise_mask"], latent_image['samples'].shape) + work_latent["samples"] = noise_mask * work_latent["samples"] + (1-noise_mask) * latent_image["samples"] + work_latent['noise_mask'] = expand_mask(latent_image["noise_mask"].clone(), 5, True) + + return common_ksampler(model, main_seed, steps, cfg, sampler_name, scheduler, positive, negative, work_latent, denoise=1.0, disable_noise=disable_noise, start_step=start_at_step, last_step=end_at_step, force_full_denoise=force_full_denoise) + +class SDXLEmptyLatentSizePicker: + def __init__(self): + self.device = comfy.model_management.intermediate_device() + + @classmethod + def INPUT_TYPES(s): + return {"required": { + "resolution": (["704x1408 (0.5)","704x1344 (0.52)","768x1344 (0.57)","768x1280 (0.6)","832x1216 (0.68)","832x1152 (0.72)","896x1152 (0.78)","896x1088 (0.82)","960x1088 (0.88)","960x1024 (0.94)","1024x1024 (1.0)","1024x960 (1.07)","1088x960 (1.13)","1088x896 (1.21)","1152x896 (1.29)","1152x832 (1.38)","1216x832 (1.46)","1280x768 (1.67)","1344x768 (1.75)","1344x704 (1.91)","1408x704 (2.0)","1472x704 (2.09)","1536x640 (2.4)","1600x640 (2.5)","1664x576 (2.89)","1728x576 (3.0)",], {"default": "1024x1024 (1.0)"}), + "batch_size": ("INT", {"default": 1, "min": 1, "max": 4096}), + }} + + RETURN_TYPES = ("LATENT","INT","INT",) + RETURN_NAMES = ("LATENT","width", "height",) + FUNCTION = "execute" + CATEGORY = "essentials" + + def execute(self, resolution, batch_size): + width, height = resolution.split(" ")[0].split("x") + width = int(width) + height = int(height) + + latent = torch.zeros([batch_size, 4, height // 8, width // 8], device=self.device) + + return ({"samples":latent}, width, height,) + +LUTS_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "luts") +# From https://github.com/yoonsikp/pycubelut/blob/master/pycubelut.py (MIT license) +class ImageApplyLUT: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image": ("IMAGE",), + "lut_file": ([f for f in os.listdir(LUTS_DIR) if f.endswith('.cube')], ), + "log_colorspace": ("BOOLEAN", { "default": False }), + "clip_values": ("BOOLEAN", { "default": False }), + "strength": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.1 }), + }} + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "execute" + CATEGORY = "essentials" + + # TODO: check if we can do without numpy + def execute(self, image, lut_file, log_colorspace, clip_values, strength): + from colour.io.luts.iridas_cube import read_LUT_IridasCube + + lut = read_LUT_IridasCube(os.path.join(LUTS_DIR, lut_file)) + lut.name = lut_file + + if clip_values: + if lut.domain[0].max() == lut.domain[0].min() and lut.domain[1].max() == lut.domain[1].min(): + lut.table = np.clip(lut.table, lut.domain[0, 0], lut.domain[1, 0]) + else: + if len(lut.table.shape) == 2: # 3x1D + for dim in range(3): + lut.table[:, dim] = np.clip(lut.table[:, dim], lut.domain[0, dim], lut.domain[1, dim]) + else: # 3D + for dim in range(3): + lut.table[:, :, :, dim] = np.clip(lut.table[:, :, :, dim], lut.domain[0, dim], lut.domain[1, dim]) + + out = [] + for img in image: # TODO: is this more resource efficient? should we use a batch instead? + lut_img = img.numpy().copy() + + is_non_default_domain = not np.array_equal(lut.domain, np.array([[0., 0., 0.], [1., 1., 1.]])) + dom_scale = None + if is_non_default_domain: + dom_scale = lut.domain[1] - lut.domain[0] + lut_img = lut_img * dom_scale + lut.domain[0] + if log_colorspace: + lut_img = lut_img ** (1/2.2) + lut_img = lut.apply(lut_img) + if log_colorspace: + lut_img = lut_img ** (2.2) + if is_non_default_domain: + lut_img = (lut_img - lut.domain[0]) / dom_scale + + lut_img = torch.from_numpy(lut_img) + if strength < 1.0: + lut_img = strength * lut_img + (1 - strength) * img + out.append(lut_img) + + out = torch.stack(out) + + return (out, ) + +FONTS_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "fonts") +class DrawText: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "text": ("STRING", { "multiline": True, "dynamicPrompts": True, "default": "Hello, World!" }), + "font": ([f for f in os.listdir(FONTS_DIR) if f.endswith('.ttf') or f.endswith('.otf')], ), + "size": ("INT", { "default": 56, "min": 1, "max": 9999, "step": 1 }), + "color": ("STRING", { "multiline": False, "default": "#FFFFFF" }), + "background_color": ("STRING", { "multiline": False, "default": "#00000000" }), + "shadow_distance": ("INT", { "default": 0, "min": 0, "max": 100, "step": 1 }), + "shadow_blur": ("INT", { "default": 0, "min": 0, "max": 100, "step": 1 }), + "shadow_color": ("STRING", { "multiline": False, "default": "#000000" }), + "alignment": (["left", "center", "right"],), + "width": ("INT", { "default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 1 }), + "height": ("INT", { "default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 1 }), + }, + } + + RETURN_TYPES = ("IMAGE", "MASK",) + FUNCTION = "execute" + CATEGORY = "essentials" + + def execute(self, text, font, size, color, background_color, shadow_distance, shadow_blur, shadow_color, alignment, width, height): + font = ImageFont.truetype(os.path.join(FONTS_DIR, font), size) + + lines = text.split("\n") + + # Calculate the width and height of the text + text_width = max(font.getbbox(line)[2] for line in lines) + line_height = font.getmask(text).getbbox()[3] + font.getmetrics()[1] # add descent to height + text_height = line_height * len(lines) + + width = width if width > 0 else text_width + height = height if height > 0 else text_height + + background_color = ImageColor.getrgb(background_color) + image = Image.new('RGBA', (width + shadow_distance, height + shadow_distance), color=background_color) + + image_shadow = None + if shadow_distance > 0: + image_shadow = Image.new('RGBA', (width + shadow_distance, height + shadow_distance), color=background_color) + + for i, line in enumerate(lines): + line_width = font.getbbox(line)[2] + #text_height =font.getbbox(line)[3] + if alignment == "left": + x = 0 + elif alignment == "center": + x = (width - line_width) / 2 + elif alignment == "right": + x = width - line_width + y = i * line_height + + draw = ImageDraw.Draw(image) + draw.text((x, y), line, font=font, fill=color) + + if image_shadow is not None: + draw = ImageDraw.Draw(image_shadow) + draw.text((x + shadow_distance, y + shadow_distance), line, font=font, fill=shadow_color) + + if image_shadow is not None: + image_shadow = image_shadow.filter(ImageFilter.GaussianBlur(shadow_blur)) + image = Image.alpha_composite(image_shadow, image) + + image = pb(T.ToTensor()(image).unsqueeze(0)) + mask = image[:, :, :, 3] if image.shape[3] == 4 else torch.ones_like(image[:, :, :, 0]) + + return (image[:, :, :, :3], mask,) + +class RemBGSession: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "model": (["u2net: general purpose", "u2netp: lightweight general purpose", "u2net_human_seg: human segmentation", "u2net_cloth_seg: cloths Parsing", "silueta: very small u2net", "isnet-general-use: general purpose", "isnet-anime: anime illustrations", "sam: general purpose"],), + "providers": (['CPU', 'CUDA', 'ROCM', 'DirectML', 'OpenVINO', 'CoreML', 'Tensorrt', 'Azure'],), + }, + } + + RETURN_TYPES = ("REMBG_SESSION",) + FUNCTION = "execute" + CATEGORY = "essentials" + + def execute(self, model, providers): + from rembg import new_session as rembg_new_session + + model = model.split(":")[0] + return (rembg_new_session(model, providers=[providers+"ExecutionProvider"]),) + +class ImageRemoveBackground: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "rembg_session": ("REMBG_SESSION",), + "image": ("IMAGE",), + }, + } + + RETURN_TYPES = ("IMAGE", "MASK",) + FUNCTION = "execute" + CATEGORY = "essentials" + + def execute(self, rembg_session, image): + from rembg import remove as rembg + + image = p(image) + output = [] + for img in image: + img = T.ToPILImage()(img) + img = rembg(img, session=rembg_session) + output.append(T.ToTensor()(img)) + + output = torch.stack(output, dim=0) + output = pb(output) + mask = output[:, :, :, 3] if output.shape[3] == 4 else torch.ones_like(output[:, :, :, 0]) + + return(output[:, :, :, :3], mask,) + +class PixelOEPixelize: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image": ("IMAGE",), + "downscale_mode": (["contrast", "bicubic", "nearest", "center", "k-centroid"],), + "target_size": ("INT", { "default": 128, "min": 1, "max": MAX_RESOLUTION, "step": 16 }), + "patch_size": ("INT", { "default": 16, "min": 4, "max": 32, "step": 2 }), + "thickness": ("INT", { "default": 2, "min": 1, "max": 16, "step": 1 }), + #"contrast": ("FLOAT", { "default": 1.0, "min": 0.0, "max": 100.0, "step": 0.1 }), + #"saturation": ("FLOAT", { "default": 1.0, "min": 0.0, "max": 100.0, "step": 0.1 }), + "color_matching": ("BOOLEAN", { "default": True }), + "upscale": ("BOOLEAN", { "default": True }), + }, + } + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "execute" + CATEGORY = "essentials" + + def execute(self, image, downscale_mode, target_size, patch_size, thickness, color_matching, upscale): + from pixeloe.pixelize import pixelize + + image = image.clone().mul(255).clamp(0, 255).byte().cpu().numpy() + output = [] + for img in image: + img = pixelize(img, + mode=downscale_mode, + target_size=target_size, + patch_size=patch_size, + thickness=thickness, + contrast=1.0, + saturation=1.0, + color_matching=color_matching, + no_upscale=not upscale) + output.append(T.ToTensor()(img)) + + output = torch.stack(output, dim=0) + output = pb(output) + + return(output,) + +class NoiseFromImage: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image": ("IMAGE",), + "noise_size": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01 }), + "color_noise": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 1.0, "step": 0.01 }), + "mask_strength": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01 }), + "mask_scale_diff": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 1.0, "step": 0.01 }), + "noise_strenght": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01 }), + "saturation": ("FLOAT", {"default": 2.0, "min": 0.0, "max": 100.0, "step": 0.1 }), + "contrast": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 100.0, "step": 0.1 }), + "blur": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.1 }), + }, + "optional": { + "noise_mask": ("IMAGE",), + } + } + + RETURN_TYPES = ("IMAGE","IMAGE",) + FUNCTION = "execute" + CATEGORY = "essentials" + + def execute(self, image, noise_size, color_noise, mask_strength, mask_scale_diff, noise_strenght, saturation, contrast, blur, noise_mask=None): + torch.manual_seed(0) + + elastic_alpha = max(image.shape[1], image.shape[2])# * noise_size + elastic_sigma = elastic_alpha / 400 * noise_size + + blur_size = int(6 * blur+1) + if blur_size % 2 == 0: + blur_size+= 1 + + if noise_mask is None: + noise_mask = image + + # Ensure noise mask is the same size as the image + if noise_mask.shape[1:] != image.shape[1:]: + noise_mask = F.interpolate(p(noise_mask), size=(image.shape[1], image.shape[2]), mode='bicubic', align_corners=False) + noise_mask = pb(noise_mask) + # Ensure we have the same number of masks and images + if noise_mask.shape[0] > image.shape[0]: + noise_mask = noise_mask[:image.shape[0]] + else: + noise_mask = torch.cat((noise_mask, noise_mask[-1:].repeat((image.shape[0]-noise_mask.shape[0], 1, 1, 1))), dim=0) + + # Convert image to grayscale mask + noise_mask = noise_mask.mean(dim=3).unsqueeze(-1) + + # add color noise + imgs = p(image.clone()) + if color_noise > 0: + color_noise = torch.normal(torch.zeros_like(imgs), std=color_noise) + + #color_noise = torch.rand_like(imgs) * (color_noise * 2) - color_noise + + color_noise *= (imgs - imgs.min()) / (imgs.max() - imgs.min()) + + imgs = imgs + color_noise + imgs = imgs.clamp(0, 1) + + # create fine noise + fine_noise = [] + for n in imgs: + avg_color = n.mean(dim=[1,2]) + + tmp_noise = T.ElasticTransform(alpha=elastic_alpha, sigma=elastic_sigma, fill=avg_color.tolist())(n) + #tmp_noise = T.functional.adjust_saturation(tmp_noise, 2.0) + tmp_noise = T.GaussianBlur(blur_size, blur)(tmp_noise) + tmp_noise = T.ColorJitter(contrast=(contrast,contrast), saturation=(saturation,saturation))(tmp_noise) + fine_noise.append(tmp_noise) + + #tmp_noise = F.interpolate(tmp_noise, scale_factor=.1, mode='bilinear', align_corners=False) + #tmp_noise = F.interpolate(tmp_noise, size=(tmp_noise.shape[1], tmp_noise.shape[2]), mode='bilinear', align_corners=False) + + #tmp_noise = T.ElasticTransform(alpha=elastic_alpha, sigma=elastic_sigma/3, fill=avg_color.tolist())(n) + #tmp_noise = T.GaussianBlur(blur_size, blur)(tmp_noise) + #tmp_noise = T.functional.adjust_saturation(tmp_noise, saturation) + #tmp_noise = T.ColorJitter(contrast=(contrast,contrast), saturation=(saturation,saturation))(tmp_noise) + #fine_noise.append(tmp_noise) + + imgs = None + del imgs + + fine_noise = torch.stack(fine_noise, dim=0) + fine_noise = pb(fine_noise) + #fine_noise = torch.stack(fine_noise, dim=0) + #fine_noise = pb(fine_noise) + mask_scale_diff = min(mask_scale_diff, 0.99) + if mask_scale_diff > 0: + coarse_noise = F.interpolate(p(fine_noise), scale_factor=1-mask_scale_diff, mode='area') + coarse_noise = F.interpolate(coarse_noise, size=(fine_noise.shape[1], fine_noise.shape[2]), mode='bilinear', align_corners=False) + coarse_noise = pb(coarse_noise) + else: + coarse_noise = fine_noise + + #noise_mask = noise_mask * mask_strength + (1 - mask_strength) + # merge fine and coarse noise + output = (1 - noise_mask) * coarse_noise + noise_mask * fine_noise + #noise_mask = noise_mask * mask_strength + if mask_strength < 1: + noise_mask = noise_mask.pow(mask_strength) + noise_mask = torch.nan_to_num(noise_mask).clamp(0, 1) + output = noise_mask * output + (1 - noise_mask) * image + + # apply noise to image + output = output * noise_strenght + image * (1 - noise_strenght) + output = output.clamp(0, 1) + + return (output,noise_mask.repeat(1,1,1,3),) + +class RemoveLatentMask: + @classmethod + def INPUT_TYPES(s): + return {"required": { "samples": ("LATENT",),}} + RETURN_TYPES = ("LATENT",) + FUNCTION = "execute" + + CATEGORY = "essentials" + + def execute(self, samples): + s = samples.copy() + if "noise_mask" in s: + del s["noise_mask"] + + return (s,) + +class ConditioningCombineMultiple: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "conditioning_1": ("CONDITIONING",), + "conditioning_2": ("CONDITIONING",), + }, "optional": { + "conditioning_3": ("CONDITIONING",), + "conditioning_4": ("CONDITIONING",), + "conditioning_5": ("CONDITIONING",), + }, + } + RETURN_TYPES = ("CONDITIONING",) + FUNCTION = "execute" + CATEGORY = "essentials" + + def execute(self, conditioning_1, conditioning_2, conditioning_3=None, conditioning_4=None, conditioning_5=None): + c = conditioning_1 + conditioning_2 + + if conditioning_3 is not None: + c += conditioning_3 + if conditioning_4 is not None: + c += conditioning_4 + if conditioning_5 is not None: + c += conditioning_5 + + return (c,) + +class ImageBatchMultiple: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image_1": ("IMAGE",), + "image_2": ("IMAGE",), + "method": (["nearest-exact", "bilinear", "area", "bicubic", "lanczos"], { "default": "lanczos" }), + }, "optional": { + "image_3": ("IMAGE",), + "image_4": ("IMAGE",), + "image_5": ("IMAGE",), + }, + } + RETURN_TYPES = ("IMAGE",) + FUNCTION = "execute" + CATEGORY = "essentials" + + def execute(self, image_1, image_2, method, image_3=None, image_4=None, image_5=None): + if image_1.shape[1:] != image_2.shape[1:]: + image_2 = comfy.utils.common_upscale(image_2.movedim(-1,1), image_1.shape[2], image_1.shape[1], method, "center").movedim(1,-1) + out = torch.cat((image_1, image_2), dim=0) + + if image_3 is not None: + if image_1.shape[1:] != image_3.shape[1:]: + image_3 = comfy.utils.common_upscale(image_3.movedim(-1,1), image_1.shape[2], image_1.shape[1], method, "center").movedim(1,-1) + out = torch.cat((out, image_3), dim=0) + if image_4 is not None: + if image_1.shape[1:] != image_4.shape[1:]: + image_4 = comfy.utils.common_upscale(image_4.movedim(-1,1), image_1.shape[2], image_1.shape[1], method, "center").movedim(1,-1) + out = torch.cat((out, image_4), dim=0) + if image_5 is not None: + if image_1.shape[1:] != image_5.shape[1:]: + image_5 = comfy.utils.common_upscale(image_5.movedim(-1,1), image_1.shape[2], image_1.shape[1], method, "center").movedim(1,-1) + out = torch.cat((out, image_5), dim=0) + + return (out,) + + +NODE_CLASS_MAPPINGS = { + "GetImageSize+": GetImageSize, + + "ImageResize+": ImageResize, + "ImageCrop+": ImageCrop, + "ImageFlip+": ImageFlip, + + "ImageDesaturate+": ImageDesaturate, + "ImagePosterize+": ImagePosterize, + "ImageCASharpening+": ImageCAS, + "ImageSeamCarving+": ImageSeamCarving, + "ImageEnhanceDifference+": ImageEnhanceDifference, + "ImageExpandBatch+": ImageExpandBatch, + "ImageFromBatch+": ImageFromBatch, + "ImageListToBatch+": ImageListToBatch, + "ImageCompositeFromMaskBatch+": ImageCompositeFromMaskBatch, + "ExtractKeyframes+": ExtractKeyframes, + "ImageApplyLUT+": ImageApplyLUT, + "PixelOEPixelize+": PixelOEPixelize, + + "MaskBlur+": MaskBlur, + "MaskFlip+": MaskFlip, + "MaskPreview+": MaskPreview, + "MaskBatch+": MaskBatch, + "MaskExpandBatch+": MaskExpandBatch, + "TransitionMask+": TransitionMask, + "MaskFromColor+": MaskFromColor, + "MaskFromBatch+": MaskFromBatch, + "MaskBoundingBox+": MaskBoundingBox, + "MaskFromSegmentation+": MaskFromSegmentation, + "MaskFromRGBCMYBW+": MaskFromRGBCMYBW, + "MaskSmooth+": MaskSmooth, + "MaskFromList+": MaskFromList, + + "SimpleMath+": SimpleMath, + "ConsoleDebug+": ConsoleDebug, + "DebugTensorShape+": DebugTensorShape, + + "ModelCompile+": ModelCompile, + "BatchCount+": BatchCount, + + "KSamplerVariationsStochastic+": KSamplerVariationsStochastic, + "KSamplerVariationsWithNoise+": KSamplerVariationsWithNoise, + "CLIPTextEncodeSDXL+": CLIPTextEncodeSDXLSimplified, + "SDXLEmptyLatentSizePicker+": SDXLEmptyLatentSizePicker, + + "DrawText+": DrawText, + "RemBGSession+": RemBGSession, + "ImageRemoveBackground+": ImageRemoveBackground, + + "RemoveLatentMask+": RemoveLatentMask, + "ConditioningCombineMultiple+": ConditioningCombineMultiple, + "ImageBatchMultiple+": ImageBatchMultiple, + + #"NoiseFromImage~": NoiseFromImage, +} + +NODE_DISPLAY_NAME_MAPPINGS = { + "GetImageSize+": "🔧 Get Image Size", + "ImageResize+": "🔧 Image Resize", + "ImageCrop+": "🔧 Image Crop", + "ImageFlip+": "🔧 Image Flip", + + "ImageDesaturate+": "🔧 Image Desaturate", + "ImagePosterize+": "🔧 Image Posterize", + "ImageCASharpening+": "🔧 Image Contrast Adaptive Sharpening", + "ImageSeamCarving+": "🔧 Image Seam Carving", + "ImageEnhanceDifference+": "🔧 Image Enhance Difference", + "ImageExpandBatch+": "🔧 Image Expand Batch", + "ImageFromBatch+": "🔧 Image From Batch", + "ImageListToBatch+": "🔧 Image List To Batch", + "ImageCompositeFromMaskBatch+": "🔧 Image Composite From Mask Batch", + "ExtractKeyframes+": "🔧 Extract Keyframes (experimental)", + "ImageApplyLUT+": "🔧 Image Apply LUT", + "PixelOEPixelize+": "🔧 Pixelize", + + "MaskBlur+": "🔧 Mask Blur", + "MaskFlip+": "🔧 Mask Flip", + "MaskPreview+": "🔧 Mask Preview", + "MaskBatch+": "🔧 Mask Batch", + "MaskExpandBatch+": "🔧 Mask Expand Batch", + "TransitionMask+": "🔧 Transition Mask", + "MaskFromColor+": "🔧 Mask From Color", + "MaskFromBatch+": "🔧 Mask From Batch", + "MaskBoundingBox+": "🔧 Mask Bounding Box", + "MaskFromSegmentation+": "🔧 Mask From Segmentation", + "MaskFromRGBCMYBW+": "🔧 Mask From RGB/CMY/BW", + "MaskSmooth+": "🔧 Mask Smooth", + "MaskFromList+": "🔧 Mask From List", + + "SimpleMath+": "🔧 Simple Math", + "ConsoleDebug+": "🔧 Console Debug", + "DebugTensorShape+": "🔧 Tensor Shape Debug", + + "ModelCompile+": "🔧 Compile Model", + "BatchCount+": "🔧 Batch Count", + + "KSamplerVariationsStochastic+": "🔧 KSampler Stochastic Variations", + "KSamplerVariationsWithNoise+": "🔧 KSampler Variations with Noise Injection", + "CLIPTextEncodeSDXL+": "🔧 SDXLCLIPTextEncode", + "SDXLEmptyLatentSizePicker+": "🔧 SDXL Empty Latent Size Picker", + + "DrawText+": "🔧 Draw Text", + "RemBGSession+": "🔧 RemBG Session", + "ImageRemoveBackground+": "🔧 Image Remove Background", + + "RemoveLatentMask+": "🔧 Remove Latent Mask", + + "ConditioningCombineMultiple+": "🔧 Conditionings Combine Multiple ", + "ImageBatchMultiple+": "🔧 Images Batch Multiple", + + #"NoiseFromImage~": "🔧 Noise From Image", +} diff --git a/ComfyUI_essentials/fonts/ShareTechMono-Regular.ttf b/ComfyUI_essentials/fonts/ShareTechMono-Regular.ttf new file mode 100644 index 0000000000000000000000000000000000000000..0ae0b19750c51a751bc45f54622443d55d643999 Binary files /dev/null and b/ComfyUI_essentials/fonts/ShareTechMono-Regular.ttf differ diff --git a/ComfyUI_essentials/fonts/put_font_files_here.txt b/ComfyUI_essentials/fonts/put_font_files_here.txt new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/ComfyUI_essentials/histogram_matching.py b/ComfyUI_essentials/histogram_matching.py new file mode 100644 index 0000000000000000000000000000000000000000..f7b469446ad98c6e310453981c4e439431e98049 --- /dev/null +++ b/ComfyUI_essentials/histogram_matching.py @@ -0,0 +1,87 @@ +# from MIT licensed https://github.com/nemodleo/pytorch-histogram-matching +import torch +import torch.nn as nn +import torch.nn.functional as F + +class Histogram_Matching(nn.Module): + def __init__(self, differentiable=False): + super(Histogram_Matching, self).__init__() + self.differentiable = differentiable + + def forward(self, dst, ref): + # B C + B, C, H, W = dst.size() + # assertion + assert dst.device == ref.device + # [B*C 256] + hist_dst = self.cal_hist(dst) + hist_ref = self.cal_hist(ref) + # [B*C 256] + tables = self.cal_trans_batch(hist_dst, hist_ref) + # [B C H W] + rst = dst.clone() + for b in range(B): + for c in range(C): + rst[b,c] = tables[b*c, (dst[b,c] * 255).long()] + # [B C H W] + rst /= 255. + return rst + + def cal_hist(self, img): + B, C, H, W = img.size() + # [B*C 256] + if self.differentiable: + hists = self.soft_histc_batch(img * 255, bins=256, min=0, max=256, sigma=3*25) + else: + hists = torch.stack([torch.histc(img[b,c] * 255, bins=256, min=0, max=255) for b in range(B) for c in range(C)]) + hists = hists.float() + hists = F.normalize(hists, p=1) + # BC 256 + bc, n = hists.size() + # [B*C 256 256] + triu = torch.ones(bc, n, n, device=hists.device).triu() + # [B*C 256] + hists = torch.bmm(hists[:,None,:], triu)[:,0,:] + return hists + + def soft_histc_batch(self, x, bins=256, min=0, max=256, sigma=3*25): + # B C H W + B, C, H, W = x.size() + # [B*C H*W] + x = x.view(B*C, -1) + # 1 + delta = float(max - min) / float(bins) + # [256] + centers = float(min) + delta * (torch.arange(bins, device=x.device, dtype=torch.bfloat16) + 0.5) + # [B*C 1 H*W] + x = torch.unsqueeze(x, 1) + # [1 256 1] + centers = centers[None,:,None] + # [B*C 256 H*W] + x = x - centers + # [B*C 256 H*W] + x = x.type(torch.bfloat16) + # [B*C 256 H*W] + x = torch.sigmoid(sigma * (x + delta/2)) - torch.sigmoid(sigma * (x - delta/2)) + # [B*C 256] + x = x.sum(dim=2) + # [B*C 256] + x = x.type(torch.float32) + # prevent oom + # torch.cuda.empty_cache() + return x + + def cal_trans_batch(self, hist_dst, hist_ref): + # [B*C 256 256] + hist_dst = hist_dst[:,None,:].repeat(1,256,1) + # [B*C 256 256] + hist_ref = hist_ref[:,:,None].repeat(1,1,256) + # [B*C 256 256] + table = hist_dst - hist_ref + # [B*C 256 256] + table = torch.where(table>=0, 1., 0.) + # [B*C 256] + table = torch.sum(table, dim=1) - 1 + # [B*C 256] + table = torch.clamp(table, min=0, max=255) + return table diff --git a/ComfyUI_essentials/image.py b/ComfyUI_essentials/image.py new file mode 100644 index 0000000000000000000000000000000000000000..d2ee3e3ae350befd7a2f299f44bf8ed3ab26ad92 --- /dev/null +++ b/ComfyUI_essentials/image.py @@ -0,0 +1,1770 @@ +from .utils import max_, min_ +from nodes import MAX_RESOLUTION +import comfy.utils +from nodes import SaveImage +from node_helpers import pillow +from PIL import Image, ImageOps + +import kornia +import torch +import torch.nn.functional as F +import torchvision.transforms.v2 as T + +#import warnings +#warnings.filterwarnings('ignore', module="torchvision") +import math +import os +import numpy as np +import folder_paths +from pathlib import Path +import random + +""" +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + Image analysis +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +""" + +class ImageEnhanceDifference: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image1": ("IMAGE",), + "image2": ("IMAGE",), + "exponent": ("FLOAT", { "default": 0.75, "min": 0.00, "max": 1.00, "step": 0.05, }), + } + } + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "execute" + CATEGORY = "essentials/image analysis" + + def execute(self, image1, image2, exponent): + if image1.shape[1:] != image2.shape[1:]: + image2 = comfy.utils.common_upscale(image2.permute([0,3,1,2]), image1.shape[2], image1.shape[1], upscale_method='bicubic', crop='center').permute([0,2,3,1]) + + diff_image = image1 - image2 + diff_image = torch.pow(diff_image, exponent) + diff_image = torch.clamp(diff_image, 0, 1) + + return(diff_image,) + +""" +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + Batch tools +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +""" + +class ImageBatchMultiple: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image_1": ("IMAGE",), + "method": (["nearest-exact", "bilinear", "area", "bicubic", "lanczos"], { "default": "lanczos" }), + }, "optional": { + "image_2": ("IMAGE",), + "image_3": ("IMAGE",), + "image_4": ("IMAGE",), + "image_5": ("IMAGE",), + }, + } + RETURN_TYPES = ("IMAGE",) + FUNCTION = "execute" + CATEGORY = "essentials/image batch" + + def execute(self, image_1, method, image_2=None, image_3=None, image_4=None, image_5=None): + out = image_1 + + if image_2 is not None: + if image_1.shape[1:] != image_2.shape[1:]: + image_2 = comfy.utils.common_upscale(image_2.movedim(-1,1), image_1.shape[2], image_1.shape[1], method, "center").movedim(1,-1) + out = torch.cat((image_1, image_2), dim=0) + if image_3 is not None: + if image_1.shape[1:] != image_3.shape[1:]: + image_3 = comfy.utils.common_upscale(image_3.movedim(-1,1), image_1.shape[2], image_1.shape[1], method, "center").movedim(1,-1) + out = torch.cat((out, image_3), dim=0) + if image_4 is not None: + if image_1.shape[1:] != image_4.shape[1:]: + image_4 = comfy.utils.common_upscale(image_4.movedim(-1,1), image_1.shape[2], image_1.shape[1], method, "center").movedim(1,-1) + out = torch.cat((out, image_4), dim=0) + if image_5 is not None: + if image_1.shape[1:] != image_5.shape[1:]: + image_5 = comfy.utils.common_upscale(image_5.movedim(-1,1), image_1.shape[2], image_1.shape[1], method, "center").movedim(1,-1) + out = torch.cat((out, image_5), dim=0) + + return (out,) + + +class ImageExpandBatch: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image": ("IMAGE",), + "size": ("INT", { "default": 16, "min": 1, "step": 1, }), + "method": (["expand", "repeat all", "repeat first", "repeat last"],) + } + } + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "execute" + CATEGORY = "essentials/image batch" + + def execute(self, image, size, method): + orig_size = image.shape[0] + + if orig_size == size: + return (image,) + + if size <= 1: + return (image[:size],) + + if 'expand' in method: + out = torch.empty([size] + list(image.shape)[1:], dtype=image.dtype, device=image.device) + if size < orig_size: + scale = (orig_size - 1) / (size - 1) + for i in range(size): + out[i] = image[min(round(i * scale), orig_size - 1)] + else: + scale = orig_size / size + for i in range(size): + out[i] = image[min(math.floor((i + 0.5) * scale), orig_size - 1)] + elif 'all' in method: + out = image.repeat([math.ceil(size / image.shape[0])] + [1] * (len(image.shape) - 1))[:size] + elif 'first' in method: + if size < image.shape[0]: + out = image[:size] + else: + out = torch.cat([image[:1].repeat(size-image.shape[0], 1, 1, 1), image], dim=0) + elif 'last' in method: + if size < image.shape[0]: + out = image[:size] + else: + out = torch.cat((image, image[-1:].repeat((size-image.shape[0], 1, 1, 1))), dim=0) + + return (out,) + +class ImageFromBatch: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image": ("IMAGE", ), + "start": ("INT", { "default": 0, "min": 0, "step": 1, }), + "length": ("INT", { "default": -1, "min": -1, "step": 1, }), + } + } + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "execute" + CATEGORY = "essentials/image batch" + + def execute(self, image, start, length): + if length<0: + length = image.shape[0] + start = min(start, image.shape[0]-1) + length = min(image.shape[0]-start, length) + return (image[start:start + length], ) + + +class ImageListToBatch: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image": ("IMAGE",), + } + } + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "execute" + INPUT_IS_LIST = True + CATEGORY = "essentials/image batch" + + def execute(self, image): + shape = image[0].shape[1:3] + out = [] + + for i in range(len(image)): + img = image[i] + if image[i].shape[1:3] != shape: + img = comfy.utils.common_upscale(img.permute([0,3,1,2]), shape[1], shape[0], upscale_method='bicubic', crop='center').permute([0,2,3,1]) + out.append(img) + + out = torch.cat(out, dim=0) + + return (out,) + +class ImageBatchToList: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image": ("IMAGE",), + } + } + + RETURN_TYPES = ("IMAGE",) + OUTPUT_IS_LIST = (True,) + FUNCTION = "execute" + CATEGORY = "essentials/image batch" + + def execute(self, image): + return ([image[i].unsqueeze(0) for i in range(image.shape[0])], ) + + +""" +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + Image manipulation +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +""" + +class ImageCompositeFromMaskBatch: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image_from": ("IMAGE", ), + "image_to": ("IMAGE", ), + "mask": ("MASK", ) + } + } + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "execute" + CATEGORY = "essentials/image manipulation" + + def execute(self, image_from, image_to, mask): + frames = mask.shape[0] + + if image_from.shape[1] != image_to.shape[1] or image_from.shape[2] != image_to.shape[2]: + image_to = comfy.utils.common_upscale(image_to.permute([0,3,1,2]), image_from.shape[2], image_from.shape[1], upscale_method='bicubic', crop='center').permute([0,2,3,1]) + + if frames < image_from.shape[0]: + image_from = image_from[:frames] + elif frames > image_from.shape[0]: + image_from = torch.cat((image_from, image_from[-1].unsqueeze(0).repeat(frames-image_from.shape[0], 1, 1, 1)), dim=0) + + mask = mask.unsqueeze(3).repeat(1, 1, 1, 3) + + if image_from.shape[1] != mask.shape[1] or image_from.shape[2] != mask.shape[2]: + mask = comfy.utils.common_upscale(mask.permute([0,3,1,2]), image_from.shape[2], image_from.shape[1], upscale_method='bicubic', crop='center').permute([0,2,3,1]) + + out = mask * image_to + (1 - mask) * image_from + + return (out, ) + +class ImageComposite: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "destination": ("IMAGE",), + "source": ("IMAGE",), + "x": ("INT", { "default": 0, "min": -MAX_RESOLUTION, "max": MAX_RESOLUTION, "step": 1 }), + "y": ("INT", { "default": 0, "min": -MAX_RESOLUTION, "max": MAX_RESOLUTION, "step": 1 }), + "offset_x": ("INT", { "default": 0, "min": -MAX_RESOLUTION, "max": MAX_RESOLUTION, "step": 1 }), + "offset_y": ("INT", { "default": 0, "min": -MAX_RESOLUTION, "max": MAX_RESOLUTION, "step": 1 }), + }, + "optional": { + "mask": ("MASK",), + } + } + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "execute" + CATEGORY = "essentials/image manipulation" + + def execute(self, destination, source, x, y, offset_x, offset_y, mask=None): + if mask is None: + mask = torch.ones_like(source)[:,:,:,0] + + mask = mask.unsqueeze(-1).repeat(1, 1, 1, 3) + + if mask.shape[1:3] != source.shape[1:3]: + mask = F.interpolate(mask.permute([0, 3, 1, 2]), size=(source.shape[1], source.shape[2]), mode='bicubic') + mask = mask.permute([0, 2, 3, 1]) + + if mask.shape[0] > source.shape[0]: + mask = mask[:source.shape[0]] + elif mask.shape[0] < source.shape[0]: + mask = torch.cat((mask, mask[-1:].repeat((source.shape[0]-mask.shape[0], 1, 1, 1))), dim=0) + + if destination.shape[0] > source.shape[0]: + destination = destination[:source.shape[0]] + elif destination.shape[0] < source.shape[0]: + destination = torch.cat((destination, destination[-1:].repeat((source.shape[0]-destination.shape[0], 1, 1, 1))), dim=0) + + if not isinstance(x, list): + x = [x] + if not isinstance(y, list): + y = [y] + + if len(x) < destination.shape[0]: + x = x + [x[-1]] * (destination.shape[0] - len(x)) + if len(y) < destination.shape[0]: + y = y + [y[-1]] * (destination.shape[0] - len(y)) + + x = [i + offset_x for i in x] + y = [i + offset_y for i in y] + + output = [] + for i in range(destination.shape[0]): + d = destination[i].clone() + s = source[i] + m = mask[i] + + if x[i]+source.shape[2] > destination.shape[2]: + s = s[:, :, :destination.shape[2]-x[i], :] + m = m[:, :, :destination.shape[2]-x[i], :] + if y[i]+source.shape[1] > destination.shape[1]: + s = s[:, :destination.shape[1]-y[i], :, :] + m = m[:destination.shape[1]-y[i], :, :] + + #output.append(s * m + d[y[i]:y[i]+s.shape[0], x[i]:x[i]+s.shape[1], :] * (1 - m)) + d[y[i]:y[i]+s.shape[0], x[i]:x[i]+s.shape[1], :] = s * m + d[y[i]:y[i]+s.shape[0], x[i]:x[i]+s.shape[1], :] * (1 - m) + output.append(d) + + output = torch.stack(output) + + # apply the source to the destination at XY position using the mask + #for i in range(destination.shape[0]): + # output[i, y[i]:y[i]+source.shape[1], x[i]:x[i]+source.shape[2], :] = source * mask + destination[i, y[i]:y[i]+source.shape[1], x[i]:x[i]+source.shape[2], :] * (1 - mask) + + #for x_, y_ in zip(x, y): + # output[:, y_:y_+source.shape[1], x_:x_+source.shape[2], :] = source * mask + destination[:, y_:y_+source.shape[1], x_:x_+source.shape[2], :] * (1 - mask) + + #output[:, y:y+source.shape[1], x:x+source.shape[2], :] = source * mask + destination[:, y:y+source.shape[1], x:x+source.shape[2], :] * (1 - mask) + #output = destination * (1 - mask) + source * mask + + return (output,) + +class ImageResize: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image": ("IMAGE",), + "width": ("INT", { "default": 512, "min": 0, "max": MAX_RESOLUTION, "step": 1, }), + "height": ("INT", { "default": 512, "min": 0, "max": MAX_RESOLUTION, "step": 1, }), + "interpolation": (["nearest", "bilinear", "bicubic", "area", "nearest-exact", "lanczos"],), + "method": (["stretch", "keep proportion", "fill / crop", "pad"],), + "condition": (["always", "downscale if bigger", "upscale if smaller", "if bigger area", "if smaller area"],), + "multiple_of": ("INT", { "default": 0, "min": 0, "max": 512, "step": 1, }), + } + } + + RETURN_TYPES = ("IMAGE", "INT", "INT",) + RETURN_NAMES = ("IMAGE", "width", "height",) + FUNCTION = "execute" + CATEGORY = "essentials/image manipulation" + + def execute(self, image, width, height, method="stretch", interpolation="nearest", condition="always", multiple_of=0, keep_proportion=False): + _, oh, ow, _ = image.shape + x = y = x2 = y2 = 0 + pad_left = pad_right = pad_top = pad_bottom = 0 + + if keep_proportion: + method = "keep proportion" + + if multiple_of > 1: + width = width - (width % multiple_of) + height = height - (height % multiple_of) + + if method == 'keep proportion' or method == 'pad': + if width == 0 and oh < height: + width = MAX_RESOLUTION + elif width == 0 and oh >= height: + width = ow + + if height == 0 and ow < width: + height = MAX_RESOLUTION + elif height == 0 and ow >= width: + height = oh + + ratio = min(width / ow, height / oh) + new_width = round(ow*ratio) + new_height = round(oh*ratio) + + if method == 'pad': + pad_left = (width - new_width) // 2 + pad_right = width - new_width - pad_left + pad_top = (height - new_height) // 2 + pad_bottom = height - new_height - pad_top + + width = new_width + height = new_height + elif method.startswith('fill'): + width = width if width > 0 else ow + height = height if height > 0 else oh + + ratio = max(width / ow, height / oh) + new_width = round(ow*ratio) + new_height = round(oh*ratio) + x = (new_width - width) // 2 + y = (new_height - height) // 2 + x2 = x + width + y2 = y + height + if x2 > new_width: + x -= (x2 - new_width) + if x < 0: + x = 0 + if y2 > new_height: + y -= (y2 - new_height) + if y < 0: + y = 0 + width = new_width + height = new_height + else: + width = width if width > 0 else ow + height = height if height > 0 else oh + + if "always" in condition \ + or ("downscale if bigger" == condition and (oh > height or ow > width)) or ("upscale if smaller" == condition and (oh < height or ow < width)) \ + or ("bigger area" in condition and (oh * ow > height * width)) or ("smaller area" in condition and (oh * ow < height * width)): + + outputs = image.permute(0,3,1,2) + + if interpolation == "lanczos": + outputs = comfy.utils.lanczos(outputs, width, height) + else: + outputs = F.interpolate(outputs, size=(height, width), mode=interpolation) + + if method == 'pad': + if pad_left > 0 or pad_right > 0 or pad_top > 0 or pad_bottom > 0: + outputs = F.pad(outputs, (pad_left, pad_right, pad_top, pad_bottom), value=0) + + outputs = outputs.permute(0,2,3,1) + + if method.startswith('fill'): + if x > 0 or y > 0 or x2 > 0 or y2 > 0: + outputs = outputs[:, y:y2, x:x2, :] + else: + outputs = image + + if multiple_of > 1 and (outputs.shape[2] % multiple_of != 0 or outputs.shape[1] % multiple_of != 0): + width = outputs.shape[2] + height = outputs.shape[1] + x = (width % multiple_of) // 2 + y = (height % multiple_of) // 2 + x2 = width - ((width % multiple_of) - x) + y2 = height - ((height % multiple_of) - y) + outputs = outputs[:, y:y2, x:x2, :] + + outputs = torch.clamp(outputs, 0, 1) + + return(outputs, outputs.shape[2], outputs.shape[1],) + +class ImageFlip: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image": ("IMAGE",), + "axis": (["x", "y", "xy"],), + } + } + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "execute" + CATEGORY = "essentials/image manipulation" + + def execute(self, image, axis): + dim = () + if "y" in axis: + dim += (1,) + if "x" in axis: + dim += (2,) + image = torch.flip(image, dim) + + return(image,) + +class ImageCrop: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image": ("IMAGE",), + "width": ("INT", { "default": 256, "min": 0, "max": MAX_RESOLUTION, "step": 8, }), + "height": ("INT", { "default": 256, "min": 0, "max": MAX_RESOLUTION, "step": 8, }), + "position": (["top-left", "top-center", "top-right", "right-center", "bottom-right", "bottom-center", "bottom-left", "left-center", "center"],), + "x_offset": ("INT", { "default": 0, "min": -99999, "step": 1, }), + "y_offset": ("INT", { "default": 0, "min": -99999, "step": 1, }), + } + } + + RETURN_TYPES = ("IMAGE","INT","INT",) + RETURN_NAMES = ("IMAGE","x","y",) + FUNCTION = "execute" + CATEGORY = "essentials/image manipulation" + + def execute(self, image, width, height, position, x_offset, y_offset): + _, oh, ow, _ = image.shape + + width = min(ow, width) + height = min(oh, height) + + if "center" in position: + x = round((ow-width) / 2) + y = round((oh-height) / 2) + if "top" in position: + y = 0 + if "bottom" in position: + y = oh-height + if "left" in position: + x = 0 + if "right" in position: + x = ow-width + + x += x_offset + y += y_offset + + x2 = x+width + y2 = y+height + + if x2 > ow: + x2 = ow + if x < 0: + x = 0 + if y2 > oh: + y2 = oh + if y < 0: + y = 0 + + image = image[:, y:y2, x:x2, :] + + return(image, x, y, ) + +class ImageTile: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image": ("IMAGE",), + "rows": ("INT", { "default": 2, "min": 1, "max": 256, "step": 1, }), + "cols": ("INT", { "default": 2, "min": 1, "max": 256, "step": 1, }), + "overlap": ("FLOAT", { "default": 0, "min": 0, "max": 0.5, "step": 0.01, }), + "overlap_x": ("INT", { "default": 0, "min": 0, "max": MAX_RESOLUTION//2, "step": 1, }), + "overlap_y": ("INT", { "default": 0, "min": 0, "max": MAX_RESOLUTION//2, "step": 1, }), + } + } + + RETURN_TYPES = ("IMAGE", "INT", "INT", "INT", "INT") + RETURN_NAMES = ("IMAGE", "tile_width", "tile_height", "overlap_x", "overlap_y",) + FUNCTION = "execute" + CATEGORY = "essentials/image manipulation" + + def execute(self, image, rows, cols, overlap, overlap_x, overlap_y): + h, w = image.shape[1:3] + tile_h = h // rows + tile_w = w // cols + h = tile_h * rows + w = tile_w * cols + overlap_h = int(tile_h * overlap) + overlap_y + overlap_w = int(tile_w * overlap) + overlap_x + + # max overlap is half of the tile size + overlap_h = min(tile_h // 2, overlap_h) + overlap_w = min(tile_w // 2, overlap_w) + + if rows == 1: + overlap_h = 0 + if cols == 1: + overlap_w = 0 + + tiles = [] + for i in range(rows): + for j in range(cols): + y1 = i * tile_h + x1 = j * tile_w + + if i > 0: + y1 -= overlap_h + if j > 0: + x1 -= overlap_w + + y2 = y1 + tile_h + overlap_h + x2 = x1 + tile_w + overlap_w + + if y2 > h: + y2 = h + y1 = y2 - tile_h - overlap_h + if x2 > w: + x2 = w + x1 = x2 - tile_w - overlap_w + + tiles.append(image[:, y1:y2, x1:x2, :]) + tiles = torch.cat(tiles, dim=0) + + return(tiles, tile_w+overlap_w, tile_h+overlap_h, overlap_w, overlap_h,) + +class ImageUntile: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "tiles": ("IMAGE",), + "overlap_x": ("INT", { "default": 0, "min": 0, "max": MAX_RESOLUTION//2, "step": 1, }), + "overlap_y": ("INT", { "default": 0, "min": 0, "max": MAX_RESOLUTION//2, "step": 1, }), + "rows": ("INT", { "default": 2, "min": 1, "max": 256, "step": 1, }), + "cols": ("INT", { "default": 2, "min": 1, "max": 256, "step": 1, }), + } + } + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "execute" + CATEGORY = "essentials/image manipulation" + + def execute(self, tiles, overlap_x, overlap_y, rows, cols): + tile_h, tile_w = tiles.shape[1:3] + tile_h -= overlap_y + tile_w -= overlap_x + out_w = cols * tile_w + out_h = rows * tile_h + + out = torch.zeros((1, out_h, out_w, tiles.shape[3]), device=tiles.device, dtype=tiles.dtype) + + for i in range(rows): + for j in range(cols): + y1 = i * tile_h + x1 = j * tile_w + + if i > 0: + y1 -= overlap_y + if j > 0: + x1 -= overlap_x + + y2 = y1 + tile_h + overlap_y + x2 = x1 + tile_w + overlap_x + + if y2 > out_h: + y2 = out_h + y1 = y2 - tile_h - overlap_y + if x2 > out_w: + x2 = out_w + x1 = x2 - tile_w - overlap_x + + mask = torch.ones((1, tile_h+overlap_y, tile_w+overlap_x), device=tiles.device, dtype=tiles.dtype) + + # feather the overlap on top + if i > 0 and overlap_y > 0: + mask[:, :overlap_y, :] *= torch.linspace(0, 1, overlap_y, device=tiles.device, dtype=tiles.dtype).unsqueeze(1) + # feather the overlap on bottom + #if i < rows - 1: + # mask[:, -overlap_y:, :] *= torch.linspace(1, 0, overlap_y, device=tiles.device, dtype=tiles.dtype).unsqueeze(1) + # feather the overlap on left + if j > 0 and overlap_x > 0: + mask[:, :, :overlap_x] *= torch.linspace(0, 1, overlap_x, device=tiles.device, dtype=tiles.dtype).unsqueeze(0) + # feather the overlap on right + #if j < cols - 1: + # mask[:, :, -overlap_x:] *= torch.linspace(1, 0, overlap_x, device=tiles.device, dtype=tiles.dtype).unsqueeze(0) + + mask = mask.unsqueeze(-1).repeat(1, 1, 1, tiles.shape[3]) + tile = tiles[i * cols + j] * mask + out[:, y1:y2, x1:x2, :] = out[:, y1:y2, x1:x2, :] * (1 - mask) + tile + return(out, ) + +class ImageSeamCarving: + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "image": ("IMAGE",), + "width": ("INT", { "default": 512, "min": 1, "max": MAX_RESOLUTION, "step": 1, }), + "height": ("INT", { "default": 512, "min": 1, "max": MAX_RESOLUTION, "step": 1, }), + "energy": (["backward", "forward"],), + "order": (["width-first", "height-first"],), + }, + "optional": { + "keep_mask": ("MASK",), + "drop_mask": ("MASK",), + } + } + + RETURN_TYPES = ("IMAGE",) + CATEGORY = "essentials/image manipulation" + FUNCTION = "execute" + + def execute(self, image, width, height, energy, order, keep_mask=None, drop_mask=None): + from .carve import seam_carving + + img = image.permute([0, 3, 1, 2]) + + if keep_mask is not None: + #keep_mask = keep_mask.reshape((-1, 1, keep_mask.shape[-2], keep_mask.shape[-1])).movedim(1, -1) + keep_mask = keep_mask.unsqueeze(1) + + if keep_mask.shape[2] != img.shape[2] or keep_mask.shape[3] != img.shape[3]: + keep_mask = F.interpolate(keep_mask, size=(img.shape[2], img.shape[3]), mode="bilinear") + if drop_mask is not None: + drop_mask = drop_mask.unsqueeze(1) + + if drop_mask.shape[2] != img.shape[2] or drop_mask.shape[3] != img.shape[3]: + drop_mask = F.interpolate(drop_mask, size=(img.shape[2], img.shape[3]), mode="bilinear") + + out = [] + for i in range(img.shape[0]): + resized = seam_carving( + T.ToPILImage()(img[i]), + size=(width, height), + energy_mode=energy, + order=order, + keep_mask=T.ToPILImage()(keep_mask[i]) if keep_mask is not None else None, + drop_mask=T.ToPILImage()(drop_mask[i]) if drop_mask is not None else None, + ) + out.append(T.ToTensor()(resized)) + + out = torch.stack(out).permute([0, 2, 3, 1]) + + return(out, ) + +class ImageRandomTransform: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image": ("IMAGE",), + "seed": ("INT", {"default": 0, "min": 0, "max": 0xffffffffffffffff}), + "repeat": ("INT", { "default": 1, "min": 1, "max": 256, "step": 1, }), + "variation": ("FLOAT", { "default": 0.1, "min": 0.0, "max": 1.0, "step": 0.05, }), + } + } + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "execute" + CATEGORY = "essentials/image manipulation" + + def execute(self, image, seed, repeat, variation): + h, w = image.shape[1:3] + image = image.repeat(repeat, 1, 1, 1).permute([0, 3, 1, 2]) + + distortion = 0.2 * variation + rotation = 5 * variation + brightness = 0.5 * variation + contrast = 0.5 * variation + saturation = 0.5 * variation + hue = 0.2 * variation + scale = 0.5 * variation + + torch.manual_seed(seed) + + out = [] + for i in image: + tramsforms = T.Compose([ + T.RandomPerspective(distortion_scale=distortion, p=0.5), + T.RandomRotation(degrees=rotation, interpolation=T.InterpolationMode.BILINEAR, expand=True), + T.ColorJitter(brightness=brightness, contrast=contrast, saturation=saturation, hue=(-hue, hue)), + T.RandomHorizontalFlip(p=0.5), + T.RandomResizedCrop((h, w), scale=(1-scale, 1+scale), ratio=(w/h, w/h), interpolation=T.InterpolationMode.BICUBIC), + ]) + out.append(tramsforms(i.unsqueeze(0))) + + out = torch.cat(out, dim=0).permute([0, 2, 3, 1]).clamp(0, 1) + + return (out,) + +class RemBGSession: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "model": (["u2net: general purpose", "u2netp: lightweight general purpose", "u2net_human_seg: human segmentation", "u2net_cloth_seg: cloths Parsing", "silueta: very small u2net", "isnet-general-use: general purpose", "isnet-anime: anime illustrations", "sam: general purpose"],), + "providers": (['CPU', 'CUDA', 'ROCM', 'DirectML', 'OpenVINO', 'CoreML', 'Tensorrt', 'Azure'],), + }, + } + + RETURN_TYPES = ("REMBG_SESSION",) + FUNCTION = "execute" + CATEGORY = "essentials/image manipulation" + + def execute(self, model, providers): + from rembg import new_session, remove + + model = model.split(":")[0] + + class Session: + def __init__(self, model, providers): + self.session = new_session(model, providers=[providers+"ExecutionProvider"]) + def process(self, image): + return remove(image, session=self.session) + + return (Session(model, providers),) + +class TransparentBGSession: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "mode": (["base", "fast", "base-nightly"],), + "use_jit": ("BOOLEAN", { "default": True }), + }, + } + + RETURN_TYPES = ("REMBG_SESSION",) + FUNCTION = "execute" + CATEGORY = "essentials/image manipulation" + + def execute(self, mode, use_jit): + from transparent_background import Remover + + class Session: + def __init__(self, mode, use_jit): + self.session = Remover(mode=mode, jit=use_jit) + def process(self, image): + return self.session.process(image) + + return (Session(mode, use_jit),) + +class ImageRemoveBackground: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "rembg_session": ("REMBG_SESSION",), + "image": ("IMAGE",), + }, + } + + RETURN_TYPES = ("IMAGE", "MASK",) + FUNCTION = "execute" + CATEGORY = "essentials/image manipulation" + + def execute(self, rembg_session, image): + image = image.permute([0, 3, 1, 2]) + output = [] + for img in image: + img = T.ToPILImage()(img) + img = rembg_session.process(img) + output.append(T.ToTensor()(img)) + + output = torch.stack(output, dim=0) + output = output.permute([0, 2, 3, 1]) + mask = output[:, :, :, 3] if output.shape[3] == 4 else torch.ones_like(output[:, :, :, 0]) + # output = output[:, :, :, :3] + + return(output, mask,) + +""" +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + Image processing +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +""" + +class ImageDesaturate: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image": ("IMAGE",), + "factor": ("FLOAT", { "default": 1.00, "min": 0.00, "max": 1.00, "step": 0.05, }), + "method": (["luminance (Rec.709)", "luminance (Rec.601)", "average", "lightness"],), + } + } + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "execute" + CATEGORY = "essentials/image processing" + + def execute(self, image, factor, method): + if method == "luminance (Rec.709)": + grayscale = 0.2126 * image[..., 0] + 0.7152 * image[..., 1] + 0.0722 * image[..., 2] + elif method == "luminance (Rec.601)": + grayscale = 0.299 * image[..., 0] + 0.587 * image[..., 1] + 0.114 * image[..., 2] + elif method == "average": + grayscale = image.mean(dim=3) + elif method == "lightness": + grayscale = (torch.max(image, dim=3)[0] + torch.min(image, dim=3)[0]) / 2 + + grayscale = (1.0 - factor) * image + factor * grayscale.unsqueeze(-1).repeat(1, 1, 1, 3) + grayscale = torch.clamp(grayscale, 0, 1) + + return(grayscale,) + +class PixelOEPixelize: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image": ("IMAGE",), + "downscale_mode": (["contrast", "bicubic", "nearest", "center", "k-centroid"],), + "target_size": ("INT", { "default": 128, "min": 0, "max": MAX_RESOLUTION, "step": 8 }), + "patch_size": ("INT", { "default": 16, "min": 4, "max": 32, "step": 2 }), + "thickness": ("INT", { "default": 2, "min": 1, "max": 16, "step": 1 }), + "color_matching": ("BOOLEAN", { "default": True }), + "upscale": ("BOOLEAN", { "default": True }), + #"contrast": ("FLOAT", { "default": 1.0, "min": 0.0, "max": 100.0, "step": 0.1 }), + #"saturation": ("FLOAT", { "default": 1.0, "min": 0.0, "max": 100.0, "step": 0.1 }), + }, + } + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "execute" + CATEGORY = "essentials/image processing" + + def execute(self, image, downscale_mode, target_size, patch_size, thickness, color_matching, upscale): + from pixeloe.pixelize import pixelize + + image = image.clone().mul(255).clamp(0, 255).byte().cpu().numpy() + output = [] + for img in image: + img = pixelize(img, + mode=downscale_mode, + target_size=target_size, + patch_size=patch_size, + thickness=thickness, + contrast=1.0, + saturation=1.0, + color_matching=color_matching, + no_upscale=not upscale) + output.append(T.ToTensor()(img)) + + output = torch.stack(output, dim=0).permute([0, 2, 3, 1]) + + return(output,) + +class ImagePosterize: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image": ("IMAGE",), + "threshold": ("FLOAT", { "default": 0.50, "min": 0.00, "max": 1.00, "step": 0.05, }), + } + } + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "execute" + CATEGORY = "essentials/image processing" + + def execute(self, image, threshold): + image = image.mean(dim=3, keepdim=True) + image = (image > threshold).float() + image = image.repeat(1, 1, 1, 3) + + return(image,) + +# From https://github.com/yoonsikp/pycubelut/blob/master/pycubelut.py (MIT license) +class ImageApplyLUT: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image": ("IMAGE",), + "lut_file": (folder_paths.get_filename_list("luts"),), + "gamma_correction": ("BOOLEAN", { "default": True }), + "clip_values": ("BOOLEAN", { "default": True }), + "strength": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.1 }), + }} + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "execute" + CATEGORY = "essentials/image processing" + + # TODO: check if we can do without numpy + def execute(self, image, lut_file, gamma_correction, clip_values, strength): + lut_file_path = folder_paths.get_full_path("luts", lut_file) + if not lut_file_path or not Path(lut_file_path).exists(): + print(f"Could not find LUT file: {lut_file_path}") + return (image,) + + from colour.io.luts.iridas_cube import read_LUT_IridasCube + + device = image.device + lut = read_LUT_IridasCube(lut_file_path) + lut.name = lut_file + + if clip_values: + if lut.domain[0].max() == lut.domain[0].min() and lut.domain[1].max() == lut.domain[1].min(): + lut.table = np.clip(lut.table, lut.domain[0, 0], lut.domain[1, 0]) + else: + if len(lut.table.shape) == 2: # 3x1D + for dim in range(3): + lut.table[:, dim] = np.clip(lut.table[:, dim], lut.domain[0, dim], lut.domain[1, dim]) + else: # 3D + for dim in range(3): + lut.table[:, :, :, dim] = np.clip(lut.table[:, :, :, dim], lut.domain[0, dim], lut.domain[1, dim]) + + out = [] + for img in image: # TODO: is this more resource efficient? should we use a batch instead? + lut_img = img.cpu().numpy().copy() + + is_non_default_domain = not np.array_equal(lut.domain, np.array([[0., 0., 0.], [1., 1., 1.]])) + dom_scale = None + if is_non_default_domain: + dom_scale = lut.domain[1] - lut.domain[0] + lut_img = lut_img * dom_scale + lut.domain[0] + if gamma_correction: + lut_img = lut_img ** (1/2.2) + lut_img = lut.apply(lut_img) + if gamma_correction: + lut_img = lut_img ** (2.2) + if is_non_default_domain: + lut_img = (lut_img - lut.domain[0]) / dom_scale + + lut_img = torch.from_numpy(lut_img).to(device) + if strength < 1.0: + lut_img = strength * lut_img + (1 - strength) * img + out.append(lut_img) + + out = torch.stack(out) + + return (out, ) + +# From https://github.com/Jamy-L/Pytorch-Contrast-Adaptive-Sharpening/ +class ImageCAS: + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "image": ("IMAGE",), + "amount": ("FLOAT", {"default": 0.8, "min": 0, "max": 1, "step": 0.05}), + }, + } + + RETURN_TYPES = ("IMAGE",) + CATEGORY = "essentials/image processing" + FUNCTION = "execute" + + def execute(self, image, amount): + epsilon = 1e-5 + img = F.pad(image.permute([0,3,1,2]), pad=(1, 1, 1, 1)) + + a = img[..., :-2, :-2] + b = img[..., :-2, 1:-1] + c = img[..., :-2, 2:] + d = img[..., 1:-1, :-2] + e = img[..., 1:-1, 1:-1] + f = img[..., 1:-1, 2:] + g = img[..., 2:, :-2] + h = img[..., 2:, 1:-1] + i = img[..., 2:, 2:] + + # Computing contrast + cross = (b, d, e, f, h) + mn = min_(cross) + mx = max_(cross) + + diag = (a, c, g, i) + mn2 = min_(diag) + mx2 = max_(diag) + mx = mx + mx2 + mn = mn + mn2 + + # Computing local weight + inv_mx = torch.reciprocal(mx + epsilon) + amp = inv_mx * torch.minimum(mn, (2 - mx)) + + # scaling + amp = torch.sqrt(amp) + w = - amp * (amount * (1/5 - 1/8) + 1/8) + div = torch.reciprocal(1 + 4*w) + + output = ((b + d + f + h)*w + e) * div + output = output.clamp(0, 1) + #output = torch.nan_to_num(output) + + output = output.permute([0,2,3,1]) + + return (output,) + +class ImageSmartSharpen: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image": ("IMAGE",), + "noise_radius": ("INT", { "default": 7, "min": 1, "max": 25, "step": 1, }), + "preserve_edges": ("FLOAT", { "default": 0.75, "min": 0.0, "max": 1.0, "step": 0.05 }), + "sharpen": ("FLOAT", { "default": 5.0, "min": 0.0, "max": 25.0, "step": 0.5 }), + "ratio": ("FLOAT", { "default": 0.5, "min": 0.0, "max": 1.0, "step": 0.1 }), + }} + + RETURN_TYPES = ("IMAGE",) + CATEGORY = "essentials/image processing" + FUNCTION = "execute" + + def execute(self, image, noise_radius, preserve_edges, sharpen, ratio): + import cv2 + + output = [] + #diagonal = np.sqrt(image.shape[1]**2 + image.shape[2]**2) + if preserve_edges > 0: + preserve_edges = max(1 - preserve_edges, 0.05) + + for img in image: + if noise_radius > 1: + sigma = 0.3 * ((noise_radius - 1) * 0.5 - 1) + 0.8 # this is what pytorch uses for blur + #sigma_color = preserve_edges * (diagonal / 2048) + blurred = cv2.bilateralFilter(img.cpu().numpy(), noise_radius, preserve_edges, sigma) + blurred = torch.from_numpy(blurred) + else: + blurred = img + + if sharpen > 0: + sharpened = kornia.enhance.sharpness(img.permute(2,0,1), sharpen).permute(1,2,0) + else: + sharpened = img + + img = ratio * sharpened + (1 - ratio) * blurred + img = torch.clamp(img, 0, 1) + output.append(img) + + del blurred, sharpened + output = torch.stack(output) + + return (output,) + + +class ExtractKeyframes: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image": ("IMAGE",), + "threshold": ("FLOAT", { "default": 0.85, "min": 0.00, "max": 1.00, "step": 0.01, }), + } + } + + RETURN_TYPES = ("IMAGE", "STRING") + RETURN_NAMES = ("KEYFRAMES", "indexes") + + FUNCTION = "execute" + CATEGORY = "essentials" + + def execute(self, image, threshold): + window_size = 2 + + variations = torch.sum(torch.abs(image[1:] - image[:-1]), dim=[1, 2, 3]) + #variations = torch.sum((image[1:] - image[:-1]) ** 2, dim=[1, 2, 3]) + threshold = torch.quantile(variations.float(), threshold).item() + + keyframes = [] + for i in range(image.shape[0] - window_size + 1): + window = image[i:i + window_size] + variation = torch.sum(torch.abs(window[-1] - window[0])).item() + + if variation > threshold: + keyframes.append(i + window_size - 1) + + return (image[keyframes], ','.join(map(str, keyframes)),) + +class ImageColorMatch: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image": ("IMAGE",), + "reference": ("IMAGE",), + "color_space": (["LAB", "YCbCr", "RGB", "LUV", "YUV", "XYZ"],), + "factor": ("FLOAT", { "default": 1.0, "min": 0.0, "max": 1.0, "step": 0.05, }), + "device": (["auto", "cpu", "gpu"],), + "batch_size": ("INT", { "default": 0, "min": 0, "max": 1024, "step": 1, }), + }, + "optional": { + "reference_mask": ("MASK",), + } + } + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "execute" + CATEGORY = "essentials/image processing" + + def execute(self, image, reference, color_space, factor, device, batch_size, reference_mask=None): + if "gpu" == device: + device = comfy.model_management.get_torch_device() + elif "auto" == device: + device = comfy.model_management.intermediate_device() + else: + device = 'cpu' + + image = image.permute([0, 3, 1, 2]) + reference = reference.permute([0, 3, 1, 2]).to(device) + + # Ensure reference_mask is in the correct format and on the right device + if reference_mask is not None: + assert reference_mask.ndim == 3, f"Expected reference_mask to have 3 dimensions, but got {reference_mask.ndim}" + assert reference_mask.shape[0] == reference.shape[0], f"Frame count mismatch: reference_mask has {reference_mask.shape[0]} frames, but reference has {reference.shape[0]}" + + # Reshape mask to (batch, 1, height, width) + reference_mask = reference_mask.unsqueeze(1).to(device) + + # Ensure the mask is binary (0 or 1) + reference_mask = (reference_mask > 0.5).float() + + # Ensure spatial dimensions match + if reference_mask.shape[2:] != reference.shape[2:]: + reference_mask = comfy.utils.common_upscale( + reference_mask, + reference.shape[3], reference.shape[2], + upscale_method='bicubic', + crop='center' + ) + + if batch_size == 0 or batch_size > image.shape[0]: + batch_size = image.shape[0] + + if "LAB" == color_space: + reference = kornia.color.rgb_to_lab(reference) + elif "YCbCr" == color_space: + reference = kornia.color.rgb_to_ycbcr(reference) + elif "LUV" == color_space: + reference = kornia.color.rgb_to_luv(reference) + elif "YUV" == color_space: + reference = kornia.color.rgb_to_yuv(reference) + elif "XYZ" == color_space: + reference = kornia.color.rgb_to_xyz(reference) + + reference_mean, reference_std = self.compute_mean_std(reference, reference_mask) + + image_batch = torch.split(image, batch_size, dim=0) + output = [] + + for image in image_batch: + image = image.to(device) + + if color_space == "LAB": + image = kornia.color.rgb_to_lab(image) + elif color_space == "YCbCr": + image = kornia.color.rgb_to_ycbcr(image) + elif color_space == "LUV": + image = kornia.color.rgb_to_luv(image) + elif color_space == "YUV": + image = kornia.color.rgb_to_yuv(image) + elif color_space == "XYZ": + image = kornia.color.rgb_to_xyz(image) + + image_mean, image_std = self.compute_mean_std(image) + + matched = torch.nan_to_num((image - image_mean) / image_std) * torch.nan_to_num(reference_std) + reference_mean + matched = factor * matched + (1 - factor) * image + + if color_space == "LAB": + matched = kornia.color.lab_to_rgb(matched) + elif color_space == "YCbCr": + matched = kornia.color.ycbcr_to_rgb(matched) + elif color_space == "LUV": + matched = kornia.color.luv_to_rgb(matched) + elif color_space == "YUV": + matched = kornia.color.yuv_to_rgb(matched) + elif color_space == "XYZ": + matched = kornia.color.xyz_to_rgb(matched) + + out = matched.permute([0, 2, 3, 1]).clamp(0, 1).to(comfy.model_management.intermediate_device()) + output.append(out) + + out = None + output = torch.cat(output, dim=0) + return (output,) + + def compute_mean_std(self, tensor, mask=None): + if mask is not None: + # Apply mask to the tensor + masked_tensor = tensor * mask + + # Calculate the sum of the mask for each channel + mask_sum = mask.sum(dim=[2, 3], keepdim=True) + + # Avoid division by zero + mask_sum = torch.clamp(mask_sum, min=1e-6) + + # Calculate mean and std only for masked area + mean = torch.nan_to_num(masked_tensor.sum(dim=[2, 3], keepdim=True) / mask_sum) + std = torch.sqrt(torch.nan_to_num(((masked_tensor - mean) ** 2 * mask).sum(dim=[2, 3], keepdim=True) / mask_sum)) + else: + mean = tensor.mean(dim=[2, 3], keepdim=True) + std = tensor.std(dim=[2, 3], keepdim=True) + return mean, std + +class ImageColorMatchAdobe(ImageColorMatch): + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image": ("IMAGE",), + "reference": ("IMAGE",), + "color_space": (["RGB", "LAB"],), + "luminance_factor": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 2.0, "step": 0.05}), + "color_intensity_factor": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 2.0, "step": 0.05}), + "fade_factor": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.05}), + "neutralization_factor": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 1.0, "step": 0.05}), + "device": (["auto", "cpu", "gpu"],), + }, + "optional": { + "reference_mask": ("MASK",), + } + } + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "execute" + CATEGORY = "essentials/image processing" + + def analyze_color_statistics(self, image, mask=None): + # Assuming image is in RGB format + l, a, b = kornia.color.rgb_to_lab(image).chunk(3, dim=1) + + if mask is not None: + # Ensure mask is binary and has the same spatial dimensions as the image + mask = F.interpolate(mask, size=image.shape[2:], mode='nearest') + mask = (mask > 0.5).float() + + # Apply mask to each channel + l = l * mask + a = a * mask + b = b * mask + + # Compute masked mean and std + num_pixels = mask.sum() + mean_l = (l * mask).sum() / num_pixels + mean_a = (a * mask).sum() / num_pixels + mean_b = (b * mask).sum() / num_pixels + std_l = torch.sqrt(((l - mean_l)**2 * mask).sum() / num_pixels) + var_ab = ((a - mean_a)**2 + (b - mean_b)**2) * mask + std_ab = torch.sqrt(var_ab.sum() / num_pixels) + else: + mean_l = l.mean() + std_l = l.std() + mean_a = a.mean() + mean_b = b.mean() + std_ab = torch.sqrt(a.var() + b.var()) + + return mean_l, std_l, mean_a, mean_b, std_ab + + def apply_color_transformation(self, image, source_stats, dest_stats, L, C, N): + l, a, b = kornia.color.rgb_to_lab(image).chunk(3, dim=1) + + # Unpack statistics + src_mean_l, src_std_l, src_mean_a, src_mean_b, src_std_ab = source_stats + dest_mean_l, dest_std_l, dest_mean_a, dest_mean_b, dest_std_ab = dest_stats + + # Adjust luminance + l_new = (l - dest_mean_l) * (src_std_l / dest_std_l) * L + src_mean_l + + # Neutralize color cast + a = a - N * dest_mean_a + b = b - N * dest_mean_b + + # Adjust color intensity + a_new = a * (src_std_ab / dest_std_ab) * C + b_new = b * (src_std_ab / dest_std_ab) * C + + # Combine channels + lab_new = torch.cat([l_new, a_new, b_new], dim=1) + + # Convert back to RGB + rgb_new = kornia.color.lab_to_rgb(lab_new) + + return rgb_new + + def execute(self, image, reference, color_space, luminance_factor, color_intensity_factor, fade_factor, neutralization_factor, device, reference_mask=None): + if "gpu" == device: + device = comfy.model_management.get_torch_device() + elif "auto" == device: + device = comfy.model_management.intermediate_device() + else: + device = 'cpu' + + # Ensure image and reference are in the correct shape (B, C, H, W) + image = image.permute(0, 3, 1, 2).to(device) + reference = reference.permute(0, 3, 1, 2).to(device) + + # Handle reference_mask (if provided) + if reference_mask is not None: + # Ensure reference_mask is 4D (B, 1, H, W) + if reference_mask.ndim == 2: + reference_mask = reference_mask.unsqueeze(0).unsqueeze(0) + elif reference_mask.ndim == 3: + reference_mask = reference_mask.unsqueeze(1) + reference_mask = reference_mask.to(device) + + # Analyze color statistics + source_stats = self.analyze_color_statistics(reference, reference_mask) + dest_stats = self.analyze_color_statistics(image) + + # Apply color transformation + transformed = self.apply_color_transformation( + image, source_stats, dest_stats, + luminance_factor, color_intensity_factor, neutralization_factor + ) + + # Apply fade factor + result = fade_factor * transformed + (1 - fade_factor) * image + + # Convert back to (B, H, W, C) format and ensure values are in [0, 1] range + result = result.permute(0, 2, 3, 1).clamp(0, 1).to(comfy.model_management.intermediate_device()) + + return (result,) + + +class ImageHistogramMatch: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image": ("IMAGE",), + "reference": ("IMAGE",), + "method": (["pytorch", "skimage"],), + "factor": ("FLOAT", { "default": 1.0, "min": 0.0, "max": 1.0, "step": 0.05, }), + "device": (["auto", "cpu", "gpu"],), + } + } + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "execute" + CATEGORY = "essentials/image processing" + + def execute(self, image, reference, method, factor, device): + if "gpu" == device: + device = comfy.model_management.get_torch_device() + elif "auto" == device: + device = comfy.model_management.intermediate_device() + else: + device = 'cpu' + + if "pytorch" in method: + from .histogram_matching import Histogram_Matching + + image = image.permute([0, 3, 1, 2]).to(device) + reference = reference.permute([0, 3, 1, 2]).to(device)[0].unsqueeze(0) + image.requires_grad = True + reference.requires_grad = True + + out = [] + + for i in image: + i = i.unsqueeze(0) + hm = Histogram_Matching(differentiable=True) + out.append(hm(i, reference)) + out = torch.cat(out, dim=0) + out = factor * out + (1 - factor) * image + out = out.permute([0, 2, 3, 1]).clamp(0, 1) + else: + from skimage.exposure import match_histograms + + out = torch.from_numpy(match_histograms(image.cpu().numpy(), reference.cpu().numpy(), channel_axis=3)).to(device) + out = factor * out + (1 - factor) * image.to(device) + + return (out.to(comfy.model_management.intermediate_device()),) + +""" +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + Utilities +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +""" + +class ImageToDevice: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image": ("IMAGE",), + "device": (["auto", "cpu", "gpu"],), + } + } + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "execute" + CATEGORY = "essentials/image utils" + + def execute(self, image, device): + if "gpu" == device: + device = comfy.model_management.get_torch_device() + elif "auto" == device: + device = comfy.model_management.intermediate_device() + else: + device = 'cpu' + + image = image.clone().to(device) + torch.cuda.empty_cache() + + return (image,) + +class GetImageSize: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image": ("IMAGE",), + } + } + + RETURN_TYPES = ("INT", "INT", "INT",) + RETURN_NAMES = ("width", "height", "count") + FUNCTION = "execute" + CATEGORY = "essentials/image utils" + + def execute(self, image): + return (image.shape[2], image.shape[1], image.shape[0]) + +class ImageRemoveAlpha: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image": ("IMAGE",), + }, + } + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "execute" + CATEGORY = "essentials/image utils" + + def execute(self, image): + if image.shape[3] == 4: + image = image[..., :3] + return (image,) + +class ImagePreviewFromLatent(SaveImage): + def __init__(self): + self.output_dir = folder_paths.get_temp_directory() + self.type = "temp" + self.prefix_append = "_temp_" + ''.join(random.choice("abcdefghijklmnopqrstupvxyz") for x in range(5)) + self.compress_level = 1 + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "latent": ("LATENT",), + "vae": ("VAE", ), + "tile_size": ("INT", {"default": 0, "min": 0, "max": 4096, "step": 64}) + }, "optional": { + "image": (["none"], {"image_upload": False}), + }, "hidden": { + "prompt": "PROMPT", + "extra_pnginfo": "EXTRA_PNGINFO", + }, + } + + RETURN_TYPES = ("IMAGE", "MASK", "INT", "INT",) + RETURN_NAMES = ("IMAGE", "MASK", "width", "height",) + FUNCTION = "execute" + CATEGORY = "essentials/image utils" + + def execute(self, latent, vae, tile_size, prompt=None, extra_pnginfo=None, image=None, filename_prefix="ComfyUI"): + mask = torch.zeros((64,64), dtype=torch.float32, device="cpu") + ui = None + + if image.startswith("clipspace"): + image_path = folder_paths.get_annotated_filepath(image) + if not os.path.exists(image_path): + raise ValueError(f"Clipspace image does not exist anymore, select 'none' in the image field.") + + img = pillow(Image.open, image_path) + img = pillow(ImageOps.exif_transpose, img) + if img.mode == "I": + img = img.point(lambda i: i * (1 / 255)) + image = img.convert("RGB") + image = np.array(image).astype(np.float32) / 255.0 + image = torch.from_numpy(image)[None,] + if "A" in img.getbands(): + mask = np.array(img.getchannel('A')).astype(np.float32) / 255.0 + mask = 1. - torch.from_numpy(mask) + ui = { + "filename": os.path.basename(image_path), + "subfolder": os.path.dirname(image_path), + "type": "temp", + } + else: + if tile_size > 0: + tile_size = max(tile_size, 320) + image = vae.decode_tiled(latent["samples"], tile_x=tile_size // 8, tile_y=tile_size // 8, ) + else: + image = vae.decode(latent["samples"]) + ui = self.save_images(image, filename_prefix, prompt, extra_pnginfo) + + out = {**ui, "result": (image, mask, image.shape[2], image.shape[1],)} + return out + +class NoiseFromImage: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image": ("IMAGE",), + "noise_strenght": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01 }), + "noise_size": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01 }), + "color_noise": ("FLOAT", {"default": 0.2, "min": 0.0, "max": 1.0, "step": 0.01 }), + "mask_strength": ("FLOAT", {"default": 0.5, "min": 0.0, "max": 1.0, "step": 0.01 }), + "mask_scale_diff": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 1.0, "step": 0.01 }), + "mask_contrast": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 100.0, "step": 0.1 }), + "saturation": ("FLOAT", {"default": 2.0, "min": 0.0, "max": 100.0, "step": 0.1 }), + "contrast": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 100.0, "step": 0.1 }), + "blur": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.1 }), + }, + "optional": { + "noise_mask": ("IMAGE",), + } + } + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "execute" + CATEGORY = "essentials/image utils" + + def execute(self, image, noise_size, color_noise, mask_strength, mask_scale_diff, mask_contrast, noise_strenght, saturation, contrast, blur, noise_mask=None): + torch.manual_seed(0) + + elastic_alpha = max(image.shape[1], image.shape[2])# * noise_size + elastic_sigma = elastic_alpha / 400 * noise_size + + blur_size = int(6 * blur+1) + if blur_size % 2 == 0: + blur_size+= 1 + + if noise_mask is None: + noise_mask = image + + # increase contrast of the mask + if mask_contrast != 1: + noise_mask = T.ColorJitter(contrast=(mask_contrast,mask_contrast))(noise_mask.permute([0, 3, 1, 2])).permute([0, 2, 3, 1]) + + # Ensure noise mask is the same size as the image + if noise_mask.shape[1:] != image.shape[1:]: + noise_mask = F.interpolate(noise_mask.permute([0, 3, 1, 2]), size=(image.shape[1], image.shape[2]), mode='bicubic', align_corners=False) + noise_mask = noise_mask.permute([0, 2, 3, 1]) + # Ensure we have the same number of masks and images + if noise_mask.shape[0] > image.shape[0]: + noise_mask = noise_mask[:image.shape[0]] + else: + noise_mask = torch.cat((noise_mask, noise_mask[-1:].repeat((image.shape[0]-noise_mask.shape[0], 1, 1, 1))), dim=0) + + # Convert mask to grayscale mask + noise_mask = noise_mask.mean(dim=3).unsqueeze(-1) + + # add color noise + imgs = image.clone().permute([0, 3, 1, 2]) + if color_noise > 0: + color_noise = torch.normal(torch.zeros_like(imgs), std=color_noise) + color_noise *= (imgs - imgs.min()) / (imgs.max() - imgs.min()) + + imgs = imgs + color_noise + imgs = imgs.clamp(0, 1) + + # create fine and coarse noise + fine_noise = [] + for n in imgs: + avg_color = n.mean(dim=[1,2]) + + tmp_noise = T.ElasticTransform(alpha=elastic_alpha, sigma=elastic_sigma, fill=avg_color.tolist())(n) + if blur > 0: + tmp_noise = T.GaussianBlur(blur_size, blur)(tmp_noise) + tmp_noise = T.ColorJitter(contrast=(contrast,contrast), saturation=(saturation,saturation))(tmp_noise) + fine_noise.append(tmp_noise) + + imgs = None + del imgs + + fine_noise = torch.stack(fine_noise, dim=0) + fine_noise = fine_noise.permute([0, 2, 3, 1]) + #fine_noise = torch.stack(fine_noise, dim=0) + #fine_noise = pb(fine_noise) + mask_scale_diff = min(mask_scale_diff, 0.99) + if mask_scale_diff > 0: + coarse_noise = F.interpolate(fine_noise.permute([0, 3, 1, 2]), scale_factor=1-mask_scale_diff, mode='area') + coarse_noise = F.interpolate(coarse_noise, size=(fine_noise.shape[1], fine_noise.shape[2]), mode='bilinear', align_corners=False) + coarse_noise = coarse_noise.permute([0, 2, 3, 1]) + else: + coarse_noise = fine_noise + + output = (1 - noise_mask) * coarse_noise + noise_mask * fine_noise + + if mask_strength < 1: + noise_mask = noise_mask.pow(mask_strength) + noise_mask = torch.nan_to_num(noise_mask).clamp(0, 1) + output = noise_mask * output + (1 - noise_mask) * image + + # apply noise to image + output = output * noise_strenght + image * (1 - noise_strenght) + output = output.clamp(0, 1) + + return (output, ) + +IMAGE_CLASS_MAPPINGS = { + # Image analysis + "ImageEnhanceDifference+": ImageEnhanceDifference, + + # Image batch + "ImageBatchMultiple+": ImageBatchMultiple, + "ImageExpandBatch+": ImageExpandBatch, + "ImageFromBatch+": ImageFromBatch, + "ImageListToBatch+": ImageListToBatch, + "ImageBatchToList+": ImageBatchToList, + + # Image manipulation + "ImageCompositeFromMaskBatch+": ImageCompositeFromMaskBatch, + "ImageComposite+": ImageComposite, + "ImageCrop+": ImageCrop, + "ImageFlip+": ImageFlip, + "ImageRandomTransform+": ImageRandomTransform, + "ImageRemoveAlpha+": ImageRemoveAlpha, + "ImageRemoveBackground+": ImageRemoveBackground, + "ImageResize+": ImageResize, + "ImageSeamCarving+": ImageSeamCarving, + "ImageTile+": ImageTile, + "ImageUntile+": ImageUntile, + "RemBGSession+": RemBGSession, + "TransparentBGSession+": TransparentBGSession, + + # Image processing + "ImageApplyLUT+": ImageApplyLUT, + "ImageCASharpening+": ImageCAS, + "ImageDesaturate+": ImageDesaturate, + "PixelOEPixelize+": PixelOEPixelize, + "ImagePosterize+": ImagePosterize, + "ImageColorMatch+": ImageColorMatch, + "ImageColorMatchAdobe+": ImageColorMatchAdobe, + "ImageHistogramMatch+": ImageHistogramMatch, + "ImageSmartSharpen+": ImageSmartSharpen, + + # Utilities + "GetImageSize+": GetImageSize, + "ImageToDevice+": ImageToDevice, + "ImagePreviewFromLatent+": ImagePreviewFromLatent, + "NoiseFromImage+": NoiseFromImage, + #"ExtractKeyframes+": ExtractKeyframes, +} + +IMAGE_NAME_MAPPINGS = { + # Image analysis + "ImageEnhanceDifference+": "🔧 Image Enhance Difference", + + # Image batch + "ImageBatchMultiple+": "🔧 Images Batch Multiple", + "ImageExpandBatch+": "🔧 Image Expand Batch", + "ImageFromBatch+": "🔧 Image From Batch", + "ImageListToBatch+": "🔧 Image List To Batch", + "ImageBatchToList+": "🔧 Image Batch To List", + + # Image manipulation + "ImageCompositeFromMaskBatch+": "🔧 Image Composite From Mask Batch", + "ImageComposite+": "🔧 Image Composite", + "ImageCrop+": "🔧 Image Crop", + "ImageFlip+": "🔧 Image Flip", + "ImageRandomTransform+": "🔧 Image Random Transform", + "ImageRemoveAlpha+": "🔧 Image Remove Alpha", + "ImageRemoveBackground+": "🔧 Image Remove Background", + "ImageResize+": "🔧 Image Resize", + "ImageSeamCarving+": "🔧 Image Seam Carving", + "ImageTile+": "🔧 Image Tile", + "ImageUntile+": "🔧 Image Untile", + "RemBGSession+": "🔧 RemBG Session", + "TransparentBGSession+": "🔧 InSPyReNet TransparentBG", + + # Image processing + "ImageApplyLUT+": "🔧 Image Apply LUT", + "ImageCASharpening+": "🔧 Image Contrast Adaptive Sharpening", + "ImageDesaturate+": "🔧 Image Desaturate", + "PixelOEPixelize+": "🔧 Pixelize", + "ImagePosterize+": "🔧 Image Posterize", + "ImageColorMatch+": "🔧 Image Color Match", + "ImageColorMatchAdobe+": "🔧 Image Color Match Adobe", + "ImageHistogramMatch+": "🔧 Image Histogram Match", + "ImageSmartSharpen+": "🔧 Image Smart Sharpen", + + # Utilities + "GetImageSize+": "🔧 Get Image Size", + "ImageToDevice+": "🔧 Image To Device", + "ImagePreviewFromLatent+": "🔧 Image Preview From Latent", + "NoiseFromImage+": "🔧 Noise From Image", +} diff --git a/ComfyUI_essentials/luts/put_luts_files_here.txt b/ComfyUI_essentials/luts/put_luts_files_here.txt new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/ComfyUI_essentials/mask.py b/ComfyUI_essentials/mask.py new file mode 100644 index 0000000000000000000000000000000000000000..5298d0f720da152109aa2398a58d654b349bc874 --- /dev/null +++ b/ComfyUI_essentials/mask.py @@ -0,0 +1,593 @@ +from nodes import SaveImage +import torch +import torchvision.transforms.v2 as T +import random +import folder_paths +import comfy.utils +from .image import ImageExpandBatch +import numpy as np +import scipy +from PIL import Image +from nodes import MAX_RESOLUTION +import math + +class MaskBlur: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "mask": ("MASK",), + "amount": ("INT", { "default": 6, "min": 0, "max": 256, "step": 1, }), + "device": (["auto", "cpu", "gpu"],), + } + } + + RETURN_TYPES = ("MASK",) + FUNCTION = "execute" + CATEGORY = "essentials/mask" + + def execute(self, mask, amount, device): + if amount == 0: + return (mask,) + + if "gpu" == device: + mask = mask.to(comfy.model_management.get_torch_device()) + elif "cpu" == device: + mask = mask.to('cpu') + + if amount % 2 == 0: + amount+= 1 + + if mask.dim() == 2: + mask = mask.unsqueeze(0) + + mask = T.functional.gaussian_blur(mask.unsqueeze(1), amount).squeeze(1) + + if "gpu" == device or "cpu" == device: + mask = mask.to(comfy.model_management.intermediate_device()) + + return(mask,) + +class MaskFlip: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "mask": ("MASK",), + "axis": (["x", "y", "xy"],), + } + } + + RETURN_TYPES = ("MASK",) + FUNCTION = "execute" + CATEGORY = "essentials/mask" + + def execute(self, mask, axis): + if mask.dim() == 2: + mask = mask.unsqueeze(0) + + dim = () + if "y" in axis: + dim += (1,) + if "x" in axis: + dim += (2,) + mask = torch.flip(mask, dims=dim) + + return(mask,) + +class MaskPreview(SaveImage): + def __init__(self): + self.output_dir = folder_paths.get_temp_directory() + self.type = "temp" + self.prefix_append = "_temp_" + ''.join(random.choice("abcdefghijklmnopqrstupvxyz") for x in range(5)) + self.compress_level = 4 + + @classmethod + def INPUT_TYPES(s): + return { + "required": {"mask": ("MASK",), }, + "hidden": {"prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO"}, + } + + FUNCTION = "execute" + CATEGORY = "essentials/mask" + + def execute(self, mask, filename_prefix="ComfyUI", prompt=None, extra_pnginfo=None): + preview = mask.reshape((-1, 1, mask.shape[-2], mask.shape[-1])).movedim(1, -1).expand(-1, -1, -1, 3) + return self.save_images(preview, filename_prefix, prompt, extra_pnginfo) + +class MaskBatch: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "mask1": ("MASK",), + "mask2": ("MASK",), + } + } + + RETURN_TYPES = ("MASK",) + FUNCTION = "execute" + CATEGORY = "essentials/mask batch" + + def execute(self, mask1, mask2): + if mask1.shape[1:] != mask2.shape[1:]: + mask2 = comfy.utils.common_upscale(mask2.unsqueeze(1).expand(-1,3,-1,-1), mask1.shape[2], mask1.shape[1], upscale_method='bicubic', crop='center')[:,0,:,:] + + return (torch.cat((mask1, mask2), dim=0),) + +class MaskExpandBatch: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "mask": ("MASK",), + "size": ("INT", { "default": 16, "min": 1, "step": 1, }), + "method": (["expand", "repeat all", "repeat first", "repeat last"],) + } + } + + RETURN_TYPES = ("MASK",) + FUNCTION = "execute" + CATEGORY = "essentials/mask batch" + + def execute(self, mask, size, method): + return (ImageExpandBatch().execute(mask.unsqueeze(1).expand(-1,3,-1,-1), size, method)[0][:,0,:,:],) + + +class MaskBoundingBox: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "mask": ("MASK",), + "padding": ("INT", { "default": 0, "min": 0, "max": 4096, "step": 1, }), + "blur": ("INT", { "default": 0, "min": 0, "max": 256, "step": 1, }), + }, + "optional": { + "image_optional": ("IMAGE",), + } + } + + RETURN_TYPES = ("MASK", "IMAGE", "INT", "INT", "INT", "INT") + RETURN_NAMES = ("MASK", "IMAGE", "x", "y", "width", "height") + FUNCTION = "execute" + CATEGORY = "essentials/mask" + + def execute(self, mask, padding, blur, image_optional=None): + if mask.dim() == 2: + mask = mask.unsqueeze(0) + + if image_optional is None: + image_optional = mask.unsqueeze(3).repeat(1, 1, 1, 3) + + # resize the image if it's not the same size as the mask + if image_optional.shape[1:] != mask.shape[1:]: + image_optional = comfy.utils.common_upscale(image_optional.permute([0,3,1,2]), mask.shape[2], mask.shape[1], upscale_method='bicubic', crop='center').permute([0,2,3,1]) + + # match batch size + if image_optional.shape[0] < mask.shape[0]: + image_optional = torch.cat((image_optional, image_optional[-1].unsqueeze(0).repeat(mask.shape[0]-image_optional.shape[0], 1, 1, 1)), dim=0) + elif image_optional.shape[0] > mask.shape[0]: + image_optional = image_optional[:mask.shape[0]] + + # blur the mask + if blur > 0: + if blur % 2 == 0: + blur += 1 + mask = T.functional.gaussian_blur(mask.unsqueeze(1), blur).squeeze(1) + + _, y, x = torch.where(mask) + x1 = max(0, x.min().item() - padding) + x2 = min(mask.shape[2], x.max().item() + 1 + padding) + y1 = max(0, y.min().item() - padding) + y2 = min(mask.shape[1], y.max().item() + 1 + padding) + + # crop the mask + mask = mask[:, y1:y2, x1:x2] + image_optional = image_optional[:, y1:y2, x1:x2, :] + + return (mask, image_optional, x1, y1, x2 - x1, y2 - y1) + + +class MaskFromColor: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image": ("IMAGE", ), + "red": ("INT", { "default": 255, "min": 0, "max": 255, "step": 1, }), + "green": ("INT", { "default": 255, "min": 0, "max": 255, "step": 1, }), + "blue": ("INT", { "default": 255, "min": 0, "max": 255, "step": 1, }), + "threshold": ("INT", { "default": 0, "min": 0, "max": 127, "step": 1, }), + } + } + + RETURN_TYPES = ("MASK",) + FUNCTION = "execute" + CATEGORY = "essentials/mask" + + def execute(self, image, red, green, blue, threshold): + temp = (torch.clamp(image, 0, 1.0) * 255.0).round().to(torch.int) + color = torch.tensor([red, green, blue]) + lower_bound = (color - threshold).clamp(min=0) + upper_bound = (color + threshold).clamp(max=255) + lower_bound = lower_bound.view(1, 1, 1, 3) + upper_bound = upper_bound.view(1, 1, 1, 3) + mask = (temp >= lower_bound) & (temp <= upper_bound) + mask = mask.all(dim=-1) + mask = mask.float() + + return (mask, ) + + +class MaskFromSegmentation: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image": ("IMAGE", ), + "segments": ("INT", { "default": 6, "min": 1, "max": 16, "step": 1, }), + "remove_isolated_pixels": ("INT", { "default": 0, "min": 0, "max": 32, "step": 1, }), + "remove_small_masks": ("FLOAT", { "default": 0.0, "min": 0., "max": 1., "step": 0.01, }), + "fill_holes": ("BOOLEAN", { "default": False }), + } + } + + RETURN_TYPES = ("MASK",) + FUNCTION = "execute" + CATEGORY = "essentials/mask" + + def execute(self, image, segments, remove_isolated_pixels, fill_holes, remove_small_masks): + im = image[0] # we only work on the first image in the batch + im = Image.fromarray((im * 255).to(torch.uint8).cpu().numpy(), mode="RGB") + im = im.quantize(palette=im.quantize(colors=segments), dither=Image.Dither.NONE) + im = torch.tensor(np.array(im.convert("RGB"))).float() / 255.0 + + colors = im.reshape(-1, im.shape[-1]) + colors = torch.unique(colors, dim=0) + + masks = [] + for color in colors: + mask = (im == color).all(dim=-1).float() + # remove isolated pixels + if remove_isolated_pixels > 0: + mask = torch.from_numpy(scipy.ndimage.binary_opening(mask.cpu().numpy(), structure=np.ones((remove_isolated_pixels, remove_isolated_pixels)))) + + # fill holes + if fill_holes: + mask = torch.from_numpy(scipy.ndimage.binary_fill_holes(mask.cpu().numpy())) + + # if the mask is too small, it's probably noise + if mask.sum() / (mask.shape[0]*mask.shape[1]) > remove_small_masks: + masks.append(mask) + + if masks == []: + masks.append(torch.zeros_like(im)[:,:,0]) # return an empty mask if no masks were found, prevents errors + + mask = torch.stack(masks, dim=0).float() + + return (mask, ) + + +class MaskFix: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "mask": ("MASK",), + "erode_dilate": ("INT", { "default": 0, "min": -256, "max": 256, "step": 1, }), + "fill_holes": ("INT", { "default": 0, "min": 0, "max": 128, "step": 1, }), + "remove_isolated_pixels": ("INT", { "default": 0, "min": 0, "max": 32, "step": 1, }), + "smooth": ("INT", { "default": 0, "min": 0, "max": 256, "step": 1, }), + "blur": ("INT", { "default": 0, "min": 0, "max": 256, "step": 1, }), + } + } + + RETURN_TYPES = ("MASK",) + FUNCTION = "execute" + CATEGORY = "essentials/mask" + + def execute(self, mask, erode_dilate, smooth, remove_isolated_pixels, blur, fill_holes): + masks = [] + for m in mask: + # erode and dilate + if erode_dilate != 0: + if erode_dilate < 0: + m = torch.from_numpy(scipy.ndimage.grey_erosion(m.cpu().numpy(), size=(-erode_dilate, -erode_dilate))) + else: + m = torch.from_numpy(scipy.ndimage.grey_dilation(m.cpu().numpy(), size=(erode_dilate, erode_dilate))) + + # fill holes + if fill_holes > 0: + #m = torch.from_numpy(scipy.ndimage.binary_fill_holes(m.cpu().numpy(), structure=np.ones((fill_holes,fill_holes)))).float() + m = torch.from_numpy(scipy.ndimage.grey_closing(m.cpu().numpy(), size=(fill_holes, fill_holes))) + + # remove isolated pixels + if remove_isolated_pixels > 0: + m = torch.from_numpy(scipy.ndimage.grey_opening(m.cpu().numpy(), size=(remove_isolated_pixels, remove_isolated_pixels))) + + # smooth the mask + if smooth > 0: + if smooth % 2 == 0: + smooth += 1 + m = T.functional.gaussian_blur((m > 0.5).unsqueeze(0), smooth).squeeze(0) + + # blur the mask + if blur > 0: + if blur % 2 == 0: + blur += 1 + m = T.functional.gaussian_blur(m.float().unsqueeze(0), blur).squeeze(0) + + masks.append(m.float()) + + masks = torch.stack(masks, dim=0).float() + + return (masks, ) + +class MaskSmooth: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "mask": ("MASK",), + "amount": ("INT", { "default": 0, "min": 0, "max": 127, "step": 1, }), + } + } + + RETURN_TYPES = ("MASK",) + FUNCTION = "execute" + CATEGORY = "essentials/mask" + + def execute(self, mask, amount): + if amount == 0: + return (mask,) + + if amount % 2 == 0: + amount += 1 + + mask = mask > 0.5 + mask = T.functional.gaussian_blur(mask.unsqueeze(1), amount).squeeze(1).float() + + return (mask,) + +class MaskFromBatch: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "mask": ("MASK", ), + "start": ("INT", { "default": 0, "min": 0, "step": 1, }), + "length": ("INT", { "default": 1, "min": 1, "step": 1, }), + } + } + + RETURN_TYPES = ("MASK",) + FUNCTION = "execute" + CATEGORY = "essentials/mask batch" + + def execute(self, mask, start, length): + if length > mask.shape[0]: + length = mask.shape[0] + + start = min(start, mask.shape[0]-1) + length = min(mask.shape[0]-start, length) + return (mask[start:start + length], ) + +class MaskFromList: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "width": ("INT", { "default": 32, "min": 0, "max": MAX_RESOLUTION, "step": 8, }), + "height": ("INT", { "default": 32, "min": 0, "max": MAX_RESOLUTION, "step": 8, }), + }, "optional": { + "values": ("INT,FLOAT", { "default": 0.0, "min": 0.0, "max": 1.0, }), + "str_values": ("STRING", { "default": "", "multiline": True, "placeholder": "0.0, 0.5, 1.0",}), + } + } + + RETURN_TYPES = ("MASK",) + FUNCTION = "execute" + CATEGORY = "essentials/mask" + + def execute(self, width, height, values=None, str_values=""): + out = [] + + if values is not None: + if not isinstance(values, list): + out = [values] + else: + out.extend(values) + + if str_values != "": + str_values = [float(v) for v in str_values.split(",")] + out.extend(str_values) + + if out == []: + raise ValueError("No values provided") + + out = torch.tensor(out).float().clamp(0.0, 1.0) + out = out.view(-1, 1, 1).expand(-1, height, width) + + values = None + str_values = "" + + return (out, ) + +class MaskFromRGBCMYBW: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image": ("IMAGE", ), + "threshold_r": ("FLOAT", { "default": 0.15, "min": 0.0, "max": 1, "step": 0.01, }), + "threshold_g": ("FLOAT", { "default": 0.15, "min": 0.0, "max": 1, "step": 0.01, }), + "threshold_b": ("FLOAT", { "default": 0.15, "min": 0.0, "max": 1, "step": 0.01, }), + } + } + + RETURN_TYPES = ("MASK","MASK","MASK","MASK","MASK","MASK","MASK","MASK",) + RETURN_NAMES = ("red","green","blue","cyan","magenta","yellow","black","white",) + FUNCTION = "execute" + CATEGORY = "essentials/mask" + + def execute(self, image, threshold_r, threshold_g, threshold_b): + red = ((image[..., 0] >= 1-threshold_r) & (image[..., 1] < threshold_g) & (image[..., 2] < threshold_b)).float() + green = ((image[..., 0] < threshold_r) & (image[..., 1] >= 1-threshold_g) & (image[..., 2] < threshold_b)).float() + blue = ((image[..., 0] < threshold_r) & (image[..., 1] < threshold_g) & (image[..., 2] >= 1-threshold_b)).float() + + cyan = ((image[..., 0] < threshold_r) & (image[..., 1] >= 1-threshold_g) & (image[..., 2] >= 1-threshold_b)).float() + magenta = ((image[..., 0] >= 1-threshold_r) & (image[..., 1] < threshold_g) & (image[..., 2] > 1-threshold_b)).float() + yellow = ((image[..., 0] >= 1-threshold_r) & (image[..., 1] >= 1-threshold_g) & (image[..., 2] < threshold_b)).float() + + black = ((image[..., 0] <= threshold_r) & (image[..., 1] <= threshold_g) & (image[..., 2] <= threshold_b)).float() + white = ((image[..., 0] >= 1-threshold_r) & (image[..., 1] >= 1-threshold_g) & (image[..., 2] >= 1-threshold_b)).float() + + return (red, green, blue, cyan, magenta, yellow, black, white,) + +class TransitionMask: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "width": ("INT", { "default": 512, "min": 1, "max": MAX_RESOLUTION, "step": 1, }), + "height": ("INT", { "default": 512, "min": 1, "max": MAX_RESOLUTION, "step": 1, }), + "frames": ("INT", { "default": 16, "min": 1, "max": 9999, "step": 1, }), + "start_frame": ("INT", { "default": 0, "min": 0, "step": 1, }), + "end_frame": ("INT", { "default": 9999, "min": 0, "step": 1, }), + "transition_type": (["horizontal slide", "vertical slide", "horizontal bar", "vertical bar", "center box", "horizontal door", "vertical door", "circle", "fade"],), + "timing_function": (["linear", "in", "out", "in-out"],) + } + } + + RETURN_TYPES = ("MASK",) + FUNCTION = "execute" + CATEGORY = "essentials/mask" + + def linear(self, i, t): + return i/t + def ease_in(self, i, t): + return pow(i/t, 2) + def ease_out(self, i, t): + return 1 - pow(1 - i/t, 2) + def ease_in_out(self, i, t): + if i < t/2: + return pow(i/(t/2), 2) / 2 + else: + return 1 - pow(1 - (i - t/2)/(t/2), 2) / 2 + + def execute(self, width, height, frames, start_frame, end_frame, transition_type, timing_function): + if timing_function == 'in': + timing_function = self.ease_in + elif timing_function == 'out': + timing_function = self.ease_out + elif timing_function == 'in-out': + timing_function = self.ease_in_out + else: + timing_function = self.linear + + out = [] + + end_frame = min(frames, end_frame) + transition = end_frame - start_frame + + if start_frame > 0: + out = out + [torch.full((height, width), 0.0, dtype=torch.float32, device="cpu")] * start_frame + + for i in range(transition): + frame = torch.full((height, width), 0.0, dtype=torch.float32, device="cpu") + progress = timing_function(i, transition-1) + + if "horizontal slide" in transition_type: + pos = round(width*progress) + frame[:, :pos] = 1.0 + elif "vertical slide" in transition_type: + pos = round(height*progress) + frame[:pos, :] = 1.0 + elif "box" in transition_type: + box_w = round(width*progress) + box_h = round(height*progress) + x1 = (width - box_w) // 2 + y1 = (height - box_h) // 2 + x2 = x1 + box_w + y2 = y1 + box_h + frame[y1:y2, x1:x2] = 1.0 + elif "circle" in transition_type: + radius = math.ceil(math.sqrt(pow(width,2)+pow(height,2))*progress/2) + c_x = width // 2 + c_y = height // 2 + # is this real life? Am I hallucinating? + x = torch.arange(0, width, dtype=torch.float32, device="cpu") + y = torch.arange(0, height, dtype=torch.float32, device="cpu") + y, x = torch.meshgrid((y, x), indexing="ij") + circle = ((x - c_x) ** 2 + (y - c_y) ** 2) <= (radius ** 2) + frame[circle] = 1.0 + elif "horizontal bar" in transition_type: + bar = round(height*progress) + y1 = (height - bar) // 2 + y2 = y1 + bar + frame[y1:y2, :] = 1.0 + elif "vertical bar" in transition_type: + bar = round(width*progress) + x1 = (width - bar) // 2 + x2 = x1 + bar + frame[:, x1:x2] = 1.0 + elif "horizontal door" in transition_type: + bar = math.ceil(height*progress/2) + if bar > 0: + frame[:bar, :] = 1.0 + frame[-bar:, :] = 1.0 + elif "vertical door" in transition_type: + bar = math.ceil(width*progress/2) + if bar > 0: + frame[:, :bar] = 1.0 + frame[:, -bar:] = 1.0 + elif "fade" in transition_type: + frame[:,:] = progress + + out.append(frame) + + if end_frame < frames: + out = out + [torch.full((height, width), 1.0, dtype=torch.float32, device="cpu")] * (frames - end_frame) + + out = torch.stack(out, dim=0) + + return (out, ) + +MASK_CLASS_MAPPINGS = { + "MaskBlur+": MaskBlur, + "MaskBoundingBox+": MaskBoundingBox, + "MaskFix+": MaskFix, + "MaskFlip+": MaskFlip, + "MaskFromColor+": MaskFromColor, + "MaskFromList+": MaskFromList, + "MaskFromRGBCMYBW+": MaskFromRGBCMYBW, + "MaskFromSegmentation+": MaskFromSegmentation, + "MaskPreview+": MaskPreview, + "MaskSmooth+": MaskSmooth, + "TransitionMask+": TransitionMask, + + # Batch + "MaskBatch+": MaskBatch, + "MaskExpandBatch+": MaskExpandBatch, + "MaskFromBatch+": MaskFromBatch, +} + +MASK_NAME_MAPPINGS = { + "MaskBlur+": "🔧 Mask Blur", + "MaskFix+": "🔧 Mask Fix", + "MaskFlip+": "🔧 Mask Flip", + "MaskFromColor+": "🔧 Mask From Color", + "MaskFromList+": "🔧 Mask From List", + "MaskFromRGBCMYBW+": "🔧 Mask From RGB/CMY/BW", + "MaskFromSegmentation+": "🔧 Mask From Segmentation", + "MaskPreview+": "🔧 Mask Preview", + "MaskBoundingBox+": "🔧 Mask Bounding Box", + "MaskSmooth+": "🔧 Mask Smooth", + "TransitionMask+": "🔧 Transition Mask", + + "MaskBatch+": "🔧 Mask Batch", + "MaskExpandBatch+": "🔧 Mask Expand Batch", + "MaskFromBatch+": "🔧 Mask From Batch", +} diff --git a/ComfyUI_essentials/misc.py b/ComfyUI_essentials/misc.py new file mode 100644 index 0000000000000000000000000000000000000000..987c606ceb40b05e763c63fc1cd695639a55f9e5 --- /dev/null +++ b/ComfyUI_essentials/misc.py @@ -0,0 +1,528 @@ +import math +import torch +from .utils import AnyType +import comfy.model_management +from nodes import MAX_RESOLUTION + +any = AnyType("*") + +class SimpleMathFloat: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "value": ("FLOAT", { "default": 0.0, "min": -0xffffffffffffffff, "max": 0xffffffffffffffff, "step": 0.05 }), + }, + } + + RETURN_TYPES = ("FLOAT", ) + FUNCTION = "execute" + CATEGORY = "essentials/utilities" + + def execute(self, value): + return (float(value), ) + +class SimpleMathPercent: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "value": ("FLOAT", { "default": 0.0, "min": 0, "max": 1, "step": 0.05 }), + }, + } + + RETURN_TYPES = ("FLOAT", ) + FUNCTION = "execute" + CATEGORY = "essentials/utilities" + + def execute(self, value): + return (float(value), ) + +class SimpleMathInt: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "value": ("INT", { "default": 0, "min": -0xffffffffffffffff, "max": 0xffffffffffffffff, "step": 1 }), + }, + } + + RETURN_TYPES = ("INT",) + FUNCTION = "execute" + CATEGORY = "essentials/utilities" + + def execute(self, value): + return (int(value), ) + +class SimpleMathSlider: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "value": ("FLOAT", { "display": "slider", "default": 0.5, "min": 0.0, "max": 1.0, "step": 0.001 }), + "min": ("FLOAT", { "default": 0.0, "min": -0xffffffffffffffff, "max": 0xffffffffffffffff, "step": 0.001 }), + "max": ("FLOAT", { "default": 1.0, "min": -0xffffffffffffffff, "max": 0xffffffffffffffff, "step": 0.001 }), + "rounding": ("INT", { "default": 0, "min": 0, "max": 10, "step": 1 }), + }, + } + + RETURN_TYPES = ("FLOAT", "INT",) + FUNCTION = "execute" + CATEGORY = "essentials/utilities" + + def execute(self, value, min, max, rounding): + value = min + value * (max - min) + + if rounding > 0: + value = round(value, rounding) + + return (value, int(value), ) + +class SimpleMathSliderLowRes: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "value": ("INT", { "display": "slider", "default": 5, "min": 0, "max": 10, "step": 1 }), + "min": ("FLOAT", { "default": 0.0, "min": -0xffffffffffffffff, "max": 0xffffffffffffffff, "step": 0.001 }), + "max": ("FLOAT", { "default": 1.0, "min": -0xffffffffffffffff, "max": 0xffffffffffffffff, "step": 0.001 }), + "rounding": ("INT", { "default": 0, "min": 0, "max": 10, "step": 1 }), + }, + } + + RETURN_TYPES = ("FLOAT", "INT",) + FUNCTION = "execute" + CATEGORY = "essentials/utilities" + + def execute(self, value, min, max, rounding): + value = 0.1 * value + value = min + value * (max - min) + if rounding > 0: + value = round(value, rounding) + + return (value, ) + +class SimpleMathBoolean: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "value": ("BOOLEAN", { "default": False }), + }, + } + + RETURN_TYPES = ("BOOLEAN",) + FUNCTION = "execute" + CATEGORY = "essentials/utilities" + + def execute(self, value): + return (value, int(value), ) + +class SimpleMath: + @classmethod + def INPUT_TYPES(s): + return { + "optional": { + "a": (any, { "default": 0.0 }), + "b": (any, { "default": 0.0 }), + "c": (any, { "default": 0.0 }), + }, + "required": { + "value": ("STRING", { "multiline": False, "default": "" }), + }, + } + + RETURN_TYPES = ("INT", "FLOAT", ) + FUNCTION = "execute" + CATEGORY = "essentials/utilities" + + def execute(self, value, a = 0.0, b = 0.0, c = 0.0, d = 0.0): + import ast + import operator as op + + h, w = 0.0, 0.0 + if hasattr(a, 'shape'): + a = list(a.shape) + if hasattr(b, 'shape'): + b = list(b.shape) + if hasattr(c, 'shape'): + c = list(c.shape) + if hasattr(d, 'shape'): + d = list(d.shape) + + if isinstance(a, str): + a = float(a) + if isinstance(b, str): + b = float(b) + if isinstance(c, str): + c = float(c) + if isinstance(d, str): + d = float(d) + + operators = { + ast.Add: op.add, + ast.Sub: op.sub, + ast.Mult: op.mul, + ast.Div: op.truediv, + ast.FloorDiv: op.floordiv, + ast.Pow: op.pow, + #ast.BitXor: op.xor, + #ast.BitOr: op.or_, + #ast.BitAnd: op.and_, + ast.USub: op.neg, + ast.Mod: op.mod, + ast.Eq: op.eq, + ast.NotEq: op.ne, + ast.Lt: op.lt, + ast.LtE: op.le, + ast.Gt: op.gt, + ast.GtE: op.ge, + ast.And: lambda x, y: x and y, + ast.Or: lambda x, y: x or y, + ast.Not: op.not_ + } + + op_functions = { + 'min': min, + 'max': max, + 'round': round, + 'sum': sum, + 'len': len, + } + + def eval_(node): + if isinstance(node, ast.Num): # number + return node.n + elif isinstance(node, ast.Name): # variable + if node.id == "a": + return a + if node.id == "b": + return b + if node.id == "c": + return c + if node.id == "d": + return d + elif isinstance(node, ast.BinOp): # + return operators[type(node.op)](eval_(node.left), eval_(node.right)) + elif isinstance(node, ast.UnaryOp): # e.g., -1 + return operators[type(node.op)](eval_(node.operand)) + elif isinstance(node, ast.Compare): # comparison operators + left = eval_(node.left) + for op, comparator in zip(node.ops, node.comparators): + if not operators[type(op)](left, eval_(comparator)): + return 0 + return 1 + elif isinstance(node, ast.BoolOp): # boolean operators (And, Or) + values = [eval_(value) for value in node.values] + return operators[type(node.op)](*values) + elif isinstance(node, ast.Call): # custom function + if node.func.id in op_functions: + args =[eval_(arg) for arg in node.args] + return op_functions[node.func.id](*args) + elif isinstance(node, ast.Subscript): # indexing or slicing + value = eval_(node.value) + if isinstance(node.slice, ast.Constant): + return value[node.slice.value] + else: + return 0 + else: + return 0 + + result = eval_(ast.parse(value, mode='eval').body) + + if math.isnan(result): + result = 0.0 + + return (round(result), result, ) + +class SimpleMathDual: + @classmethod + def INPUT_TYPES(s): + return { + "optional": { + "a": (any, { "default": 0.0 }), + "b": (any, { "default": 0.0 }), + "c": (any, { "default": 0.0 }), + "d": (any, { "default": 0.0 }), + }, + "required": { + "value_1": ("STRING", { "multiline": False, "default": "" }), + "value_2": ("STRING", { "multiline": False, "default": "" }), + }, + } + + RETURN_TYPES = ("INT", "FLOAT", "INT", "FLOAT", ) + RETURN_NAMES = ("int_1", "float_1", "int_2", "float_2" ) + FUNCTION = "execute" + CATEGORY = "essentials/utilities" + + def execute(self, value_1, value_2, a = 0.0, b = 0.0, c = 0.0, d = 0.0): + return SimpleMath().execute(value_1, a, b, c, d) + SimpleMath().execute(value_2, a, b, c, d) + +class SimpleMathCondition: + @classmethod + def INPUT_TYPES(s): + return { + "optional": { + "a": (any, { "default": 0.0 }), + "b": (any, { "default": 0.0 }), + "c": (any, { "default": 0.0 }), + }, + "required": { + "evaluate": (any, {"default": 0}), + "on_true": ("STRING", { "multiline": False, "default": "" }), + "on_false": ("STRING", { "multiline": False, "default": "" }), + }, + } + + RETURN_TYPES = ("INT", "FLOAT", ) + FUNCTION = "execute" + CATEGORY = "essentials/utilities" + + def execute(self, evaluate, on_true, on_false, a = 0.0, b = 0.0, c = 0.0): + return SimpleMath().execute(on_true if evaluate else on_false, a, b, c) + +class SimpleCondition: + def __init__(self): + pass + + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "evaluate": (any, {"default": 0}), + "on_true": (any, {"default": 0}), + }, + "optional": { + "on_false": (any, {"default": None}), + }, + } + + RETURN_TYPES = (any,) + RETURN_NAMES = ("result",) + FUNCTION = "execute" + + CATEGORY = "essentials/utilities" + + def execute(self, evaluate, on_true, on_false=None): + from comfy_execution.graph import ExecutionBlocker + if not evaluate: + return (on_false if on_false is not None else ExecutionBlocker(None),) + + return (on_true,) + +class SimpleComparison: + def __init__(self): + pass + + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "a": (any, {"default": 0}), + "b": (any, {"default": 0}), + "comparison": (["==", "!=", "<", "<=", ">", ">="],), + }, + } + + RETURN_TYPES = ("BOOLEAN",) + FUNCTION = "execute" + + CATEGORY = "essentials/utilities" + + def execute(self, a, b, comparison): + if comparison == "==": + return (a == b,) + elif comparison == "!=": + return (a != b,) + elif comparison == "<": + return (a < b,) + elif comparison == "<=": + return (a <= b,) + elif comparison == ">": + return (a > b,) + elif comparison == ">=": + return (a >= b,) + +class ConsoleDebug: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "value": (any, {}), + }, + "optional": { + "prefix": ("STRING", { "multiline": False, "default": "Value:" }) + } + } + + RETURN_TYPES = () + FUNCTION = "execute" + CATEGORY = "essentials/utilities" + OUTPUT_NODE = True + + def execute(self, value, prefix): + print(f"\033[96m{prefix} {value}\033[0m") + + return (None,) + +class DebugTensorShape: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "tensor": (any, {}), + }, + } + + RETURN_TYPES = () + FUNCTION = "execute" + CATEGORY = "essentials/utilities" + OUTPUT_NODE = True + + def execute(self, tensor): + shapes = [] + def tensorShape(tensor): + if isinstance(tensor, dict): + for k in tensor: + tensorShape(tensor[k]) + elif isinstance(tensor, list): + for i in range(len(tensor)): + tensorShape(tensor[i]) + elif hasattr(tensor, 'shape'): + shapes.append(list(tensor.shape)) + + tensorShape(tensor) + + print(f"\033[96mShapes found: {shapes}\033[0m") + + return (None,) + +class BatchCount: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "batch": (any, {}), + }, + } + + RETURN_TYPES = ("INT",) + FUNCTION = "execute" + CATEGORY = "essentials/utilities" + + def execute(self, batch): + count = 0 + if hasattr(batch, 'shape'): + count = batch.shape[0] + elif isinstance(batch, dict) and 'samples' in batch: + count = batch['samples'].shape[0] + elif isinstance(batch, list) or isinstance(batch, dict): + count = len(batch) + + return (count, ) + +class ModelCompile(): + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "model": ("MODEL",), + "fullgraph": ("BOOLEAN", { "default": False }), + "dynamic": ("BOOLEAN", { "default": False }), + "mode": (["default", "reduce-overhead", "max-autotune", "max-autotune-no-cudagraphs"],), + }, + } + + RETURN_TYPES = ("MODEL", ) + FUNCTION = "execute" + CATEGORY = "essentials/utilities" + + def execute(self, model, fullgraph, dynamic, mode): + work_model = model.clone() + torch._dynamo.config.suppress_errors = True + work_model.model.diffusion_model = torch.compile(work_model.model.diffusion_model, dynamic=dynamic, fullgraph=fullgraph, mode=mode) + return (work_model, ) + +class RemoveLatentMask: + @classmethod + def INPUT_TYPES(s): + return {"required": { "samples": ("LATENT",),}} + RETURN_TYPES = ("LATENT",) + FUNCTION = "execute" + + CATEGORY = "essentials/utilities" + + def execute(self, samples): + s = samples.copy() + if "noise_mask" in s: + del s["noise_mask"] + + return (s,) + +class SDXLEmptyLatentSizePicker: + def __init__(self): + self.device = comfy.model_management.intermediate_device() + + @classmethod + def INPUT_TYPES(s): + return {"required": { + "resolution": (["704x1408 (0.5)","704x1344 (0.52)","768x1344 (0.57)","768x1280 (0.6)","832x1216 (0.68)","832x1152 (0.72)","896x1152 (0.78)","896x1088 (0.82)","960x1088 (0.88)","960x1024 (0.94)","1024x1024 (1.0)","1024x960 (1.07)","1088x960 (1.13)","1088x896 (1.21)","1152x896 (1.29)","1152x832 (1.38)","1216x832 (1.46)","1280x768 (1.67)","1344x768 (1.75)","1344x704 (1.91)","1408x704 (2.0)","1472x704 (2.09)","1536x640 (2.4)","1600x640 (2.5)","1664x576 (2.89)","1728x576 (3.0)",], {"default": "1024x1024 (1.0)"}), + "batch_size": ("INT", {"default": 1, "min": 1, "max": 4096}), + "width_override": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 8}), + "height_override": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 8}), + }} + + RETURN_TYPES = ("LATENT","INT","INT",) + RETURN_NAMES = ("LATENT","width","height",) + FUNCTION = "execute" + CATEGORY = "essentials/utilities" + + def execute(self, resolution, batch_size, width_override=0, height_override=0): + width, height = resolution.split(" ")[0].split("x") + width = width_override if width_override > 0 else int(width) + height = height_override if height_override > 0 else int(height) + + latent = torch.zeros([batch_size, 4, height // 8, width // 8], device=self.device) + + return ({"samples":latent}, width, height,) + +MISC_CLASS_MAPPINGS = { + "BatchCount+": BatchCount, + "ConsoleDebug+": ConsoleDebug, + "DebugTensorShape+": DebugTensorShape, + "ModelCompile+": ModelCompile, + "RemoveLatentMask+": RemoveLatentMask, + "SDXLEmptyLatentSizePicker+": SDXLEmptyLatentSizePicker, + "SimpleComparison+": SimpleComparison, + "SimpleCondition+": SimpleCondition, + "SimpleMath+": SimpleMath, + "SimpleMathDual+": SimpleMathDual, + "SimpleMathCondition+": SimpleMathCondition, + "SimpleMathBoolean+": SimpleMathBoolean, + "SimpleMathFloat+": SimpleMathFloat, + "SimpleMathInt+": SimpleMathInt, + "SimpleMathPercent+": SimpleMathPercent, + "SimpleMathSlider+": SimpleMathSlider, + "SimpleMathSliderLowRes+": SimpleMathSliderLowRes, +} + +MISC_NAME_MAPPINGS = { + "BatchCount+": "🔧 Batch Count", + "ConsoleDebug+": "🔧 Console Debug", + "DebugTensorShape+": "🔧 Debug Tensor Shape", + "ModelCompile+": "🔧 Model Compile", + "RemoveLatentMask+": "🔧 Remove Latent Mask", + "SDXLEmptyLatentSizePicker+": "🔧 Empty Latent Size Picker", + "SimpleComparison+": "🔧 Simple Comparison", + "SimpleCondition+": "🔧 Simple Condition", + "SimpleMath+": "🔧 Simple Math", + "SimpleMathDual+": "🔧 Simple Math Dual", + "SimpleMathCondition+": "🔧 Simple Math Condition", + "SimpleMathBoolean+": "🔧 Simple Math Boolean", + "SimpleMathFloat+": "🔧 Simple Math Float", + "SimpleMathInt+": "🔧 Simple Math Int", + "SimpleMathPercent+": "🔧 Simple Math Percent", + "SimpleMathSlider+": "🔧 Simple Math Slider", + "SimpleMathSliderLowRes+": "🔧 Simple Math Slider low-res", +} \ No newline at end of file diff --git a/ComfyUI_essentials/pyproject.toml b/ComfyUI_essentials/pyproject.toml new file mode 100644 index 0000000000000000000000000000000000000000..affa1310b05b6f70f302a84771918984499defae --- /dev/null +++ b/ComfyUI_essentials/pyproject.toml @@ -0,0 +1,15 @@ +[project] +name = "comfyui_essentials" +description = "Essential nodes that are weirdly missing from ComfyUI core. With few exceptions they are new features and not commodities." +version = "1.1.0" +license = { file = "LICENSE" } +dependencies = ["numba", "colour-science", "rembg", "pixeloe"] + +[project.urls] +Repository = "https://github.com/cubiq/ComfyUI_essentials" +# Used by Comfy Registry https://comfyregistry.org + +[tool.comfy] +PublisherId = "matteo" +DisplayName = "ComfyUI_essentials" +Icon = "" diff --git a/ComfyUI_essentials/requirements.txt b/ComfyUI_essentials/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..ca6bb9baa5e2f119c4c286cbb193cf7c4b72ceb2 --- /dev/null +++ b/ComfyUI_essentials/requirements.txt @@ -0,0 +1,5 @@ +numba +colour-science +rembg +pixeloe +transparent-background \ No newline at end of file diff --git a/ComfyUI_essentials/sampling.py b/ComfyUI_essentials/sampling.py new file mode 100644 index 0000000000000000000000000000000000000000..9970804f57768a9d112055a88d0fe235d084d025 --- /dev/null +++ b/ComfyUI_essentials/sampling.py @@ -0,0 +1,607 @@ +import os +import comfy.samplers +import comfy.sample +import torch +from nodes import common_ksampler, CLIPTextEncode +from comfy.utils import ProgressBar +from .utils import expand_mask, FONTS_DIR, parse_string_to_list +import torchvision.transforms.v2 as T +import torch.nn.functional as F +import logging + +# From https://github.com/BlenderNeko/ComfyUI_Noise/ +def slerp(val, low, high): + dims = low.shape + + low = low.reshape(dims[0], -1) + high = high.reshape(dims[0], -1) + + low_norm = low/torch.norm(low, dim=1, keepdim=True) + high_norm = high/torch.norm(high, dim=1, keepdim=True) + + low_norm[low_norm != low_norm] = 0.0 + high_norm[high_norm != high_norm] = 0.0 + + omega = torch.acos((low_norm*high_norm).sum(1)) + so = torch.sin(omega) + res = (torch.sin((1.0-val)*omega)/so).unsqueeze(1)*low + (torch.sin(val*omega)/so).unsqueeze(1) * high + + return res.reshape(dims) + +class KSamplerVariationsWithNoise: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "model": ("MODEL", ), + "latent_image": ("LATENT", ), + "main_seed": ("INT:seed", {"default": 0, "min": 0, "max": 0xffffffffffffffff}), + "steps": ("INT", {"default": 20, "min": 1, "max": 10000}), + "cfg": ("FLOAT", {"default": 8.0, "min": 0.0, "max": 100.0, "step":0.1, "round": 0.01}), + "sampler_name": (comfy.samplers.KSampler.SAMPLERS, ), + "scheduler": (comfy.samplers.KSampler.SCHEDULERS, ), + "positive": ("CONDITIONING", ), + "negative": ("CONDITIONING", ), + "variation_strength": ("FLOAT", {"default": 0.17, "min": 0.0, "max": 1.0, "step":0.01, "round": 0.01}), + #"start_at_step": ("INT", {"default": 0, "min": 0, "max": 10000}), + #"end_at_step": ("INT", {"default": 10000, "min": 0, "max": 10000}), + #"return_with_leftover_noise": (["disable", "enable"], ), + "variation_seed": ("INT:seed", {"default": 12345, "min": 0, "max": 0xffffffffffffffff}), + "denoise": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step":0.01, "round": 0.01}), + }} + + RETURN_TYPES = ("LATENT",) + FUNCTION = "execute" + CATEGORY = "essentials/sampling" + + def prepare_mask(self, mask, shape): + mask = torch.nn.functional.interpolate(mask.reshape((-1, 1, mask.shape[-2], mask.shape[-1])), size=(shape[2], shape[3]), mode="bilinear") + mask = mask.expand((-1,shape[1],-1,-1)) + if mask.shape[0] < shape[0]: + mask = mask.repeat((shape[0] -1) // mask.shape[0] + 1, 1, 1, 1)[:shape[0]] + return mask + + def execute(self, model, latent_image, main_seed, steps, cfg, sampler_name, scheduler, positive, negative, variation_strength, variation_seed, denoise): + if main_seed == variation_seed: + variation_seed += 1 + + end_at_step = steps #min(steps, end_at_step) + start_at_step = round(end_at_step - end_at_step * denoise) + + force_full_denoise = True + disable_noise = True + + device = comfy.model_management.get_torch_device() + + # Generate base noise + batch_size, _, height, width = latent_image["samples"].shape + generator = torch.manual_seed(main_seed) + base_noise = torch.randn((1, 4, height, width), dtype=torch.float32, device="cpu", generator=generator).repeat(batch_size, 1, 1, 1).cpu() + + # Generate variation noise + generator = torch.manual_seed(variation_seed) + variation_noise = torch.randn((batch_size, 4, height, width), dtype=torch.float32, device="cpu", generator=generator).cpu() + + slerp_noise = slerp(variation_strength, base_noise, variation_noise) + + # Calculate sigma + comfy.model_management.load_model_gpu(model) + sampler = comfy.samplers.KSampler(model, steps=steps, device=device, sampler=sampler_name, scheduler=scheduler, denoise=1.0, model_options=model.model_options) + sigmas = sampler.sigmas + sigma = sigmas[start_at_step] - sigmas[end_at_step] + sigma /= model.model.latent_format.scale_factor + sigma = sigma.detach().cpu().item() + + work_latent = latent_image.copy() + work_latent["samples"] = latent_image["samples"].clone() + slerp_noise * sigma + + # if there's a mask we need to expand it to avoid artifacts, 5 pixels should be enough + if "noise_mask" in latent_image: + noise_mask = self.prepare_mask(latent_image["noise_mask"], latent_image['samples'].shape) + work_latent["samples"] = noise_mask * work_latent["samples"] + (1-noise_mask) * latent_image["samples"] + work_latent['noise_mask'] = expand_mask(latent_image["noise_mask"].clone(), 5, True) + + return common_ksampler(model, main_seed, steps, cfg, sampler_name, scheduler, positive, negative, work_latent, denoise=1.0, disable_noise=disable_noise, start_step=start_at_step, last_step=end_at_step, force_full_denoise=force_full_denoise) + + +class KSamplerVariationsStochastic: + @classmethod + def INPUT_TYPES(s): + return {"required":{ + "model": ("MODEL",), + "latent_image": ("LATENT", ), + "noise_seed": ("INT", {"default": 0, "min": 0, "max": 0xffffffffffffffff}), + "steps": ("INT", {"default": 25, "min": 1, "max": 10000}), + "cfg": ("FLOAT", {"default": 7.0, "min": 0.0, "max": 100.0, "step":0.1, "round": 0.01}), + "sampler": (comfy.samplers.KSampler.SAMPLERS, ), + "scheduler": (comfy.samplers.KSampler.SCHEDULERS, ), + "positive": ("CONDITIONING", ), + "negative": ("CONDITIONING", ), + "variation_seed": ("INT:seed", {"default": 0, "min": 0, "max": 0xffffffffffffffff}), + "variation_strength": ("FLOAT", {"default": 0.2, "min": 0.0, "max": 1.0, "step":0.05, "round": 0.01}), + #"variation_sampler": (comfy.samplers.KSampler.SAMPLERS, ), + "cfg_scale": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step":0.05, "round": 0.01}), + }} + + RETURN_TYPES = ("LATENT", ) + FUNCTION = "execute" + CATEGORY = "essentials/sampling" + + def execute(self, model, latent_image, noise_seed, steps, cfg, sampler, scheduler, positive, negative, variation_seed, variation_strength, cfg_scale, variation_sampler="dpmpp_2m_sde"): + # Stage 1: composition sampler + force_full_denoise = False # return with leftover noise = "enable" + disable_noise = False # add noise = "enable" + + end_at_step = max(int(steps * (1-variation_strength)), 1) + start_at_step = 0 + + work_latent = latent_image.copy() + batch_size = work_latent["samples"].shape[0] + work_latent["samples"] = work_latent["samples"][0].unsqueeze(0) + + stage1 = common_ksampler(model, noise_seed, steps, cfg, sampler, scheduler, positive, negative, work_latent, denoise=1.0, disable_noise=disable_noise, start_step=start_at_step, last_step=end_at_step, force_full_denoise=force_full_denoise)[0] + + if batch_size > 1: + stage1["samples"] = stage1["samples"].clone().repeat(batch_size, 1, 1, 1) + + # Stage 2: variation sampler + force_full_denoise = True + disable_noise = True + cfg = max(cfg * cfg_scale, 1.0) + start_at_step = end_at_step + end_at_step = steps + + return common_ksampler(model, variation_seed, steps, cfg, variation_sampler, scheduler, positive, negative, stage1, denoise=1.0, disable_noise=disable_noise, start_step=start_at_step, last_step=end_at_step, force_full_denoise=force_full_denoise) + +class InjectLatentNoise: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "latent": ("LATENT", ), + "noise_seed": ("INT", {"default": 0, "min": 0, "max": 0xffffffffffffffff}), + "noise_strength": ("FLOAT", {"default": 1.0, "min": -20.0, "max": 20.0, "step":0.01, "round": 0.01}), + "normalize": (["false", "true"], {"default": "false"}), + }, + "optional": { + "mask": ("MASK", ), + }} + + RETURN_TYPES = ("LATENT",) + FUNCTION = "execute" + CATEGORY = "essentials/sampling" + + def execute(self, latent, noise_seed, noise_strength, normalize="false", mask=None): + torch.manual_seed(noise_seed) + noise_latent = latent.copy() + original_samples = noise_latent["samples"].clone() + random_noise = torch.randn_like(original_samples) + + if normalize == "true": + mean = original_samples.mean() + std = original_samples.std() + random_noise = random_noise * std + mean + + random_noise = original_samples + random_noise * noise_strength + + if mask is not None: + mask = F.interpolate(mask.reshape((-1, 1, mask.shape[-2], mask.shape[-1])), size=(random_noise.shape[2], random_noise.shape[3]), mode="bilinear") + mask = mask.expand((-1,random_noise.shape[1],-1,-1)).clamp(0.0, 1.0) + if mask.shape[0] < random_noise.shape[0]: + mask = mask.repeat((random_noise.shape[0] -1) // mask.shape[0] + 1, 1, 1, 1)[:random_noise.shape[0]] + elif mask.shape[0] > random_noise.shape[0]: + mask = mask[:random_noise.shape[0]] + random_noise = mask * random_noise + (1-mask) * original_samples + + noise_latent["samples"] = random_noise + + return (noise_latent, ) + +class TextEncodeForSamplerParams: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "text": ("STRING", {"multiline": True, "dynamicPrompts": True, "default": "Separate prompts with at least three dashes\n---\nLike so"}), + "clip": ("CLIP", ) + }} + + RETURN_TYPES = ("CONDITIONING", ) + FUNCTION = "execute" + CATEGORY = "essentials/sampling" + + def execute(self, text, clip): + import re + output_text = [] + output_encoded = [] + text = re.sub(r'[-*=~]{4,}\n', '---\n', text) + text = text.split("---\n") + + for t in text: + t = t.strip() + if t: + output_text.append(t) + output_encoded.append(CLIPTextEncode().encode(clip, t)[0]) + + #if len(output_encoded) == 1: + # output = output_encoded[0] + #else: + output = {"text": output_text, "encoded": output_encoded} + + return (output, ) + +class SamplerSelectHelper: + @classmethod + def INPUT_TYPES(s): + return {"required": { + **{s: ("BOOLEAN", { "default": False }) for s in comfy.samplers.KSampler.SAMPLERS}, + }} + + RETURN_TYPES = ("STRING", ) + FUNCTION = "execute" + CATEGORY = "essentials/sampling" + + def execute(self, **values): + values = [v for v in values if values[v]] + values = ", ".join(values) + + return (values, ) + +class SchedulerSelectHelper: + @classmethod + def INPUT_TYPES(s): + return {"required": { + **{s: ("BOOLEAN", { "default": False }) for s in comfy.samplers.KSampler.SCHEDULERS}, + }} + + RETURN_TYPES = ("STRING", ) + FUNCTION = "execute" + CATEGORY = "essentials/sampling" + + def execute(self, **values): + values = [v for v in values if values[v]] + values = ", ".join(values) + + return (values, ) + +class FluxSamplerParams: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "model": ("MODEL", ), + "conditioning": ("CONDITIONING", ), + "latent_image": ("LATENT", ), + + "seed": ("STRING", { "multiline": False, "dynamicPrompts": False, "default": "?" }), + "sampler": ("STRING", { "multiline": False, "dynamicPrompts": False, "default": "euler" }), + "scheduler": ("STRING", { "multiline": False, "dynamicPrompts": False, "default": "simple" }), + "steps": ("STRING", { "multiline": False, "dynamicPrompts": False, "default": "20" }), + "guidance": ("STRING", { "multiline": False, "dynamicPrompts": False, "default": "3.5" }), + "max_shift": ("STRING", { "multiline": False, "dynamicPrompts": False, "default": "" }), + "base_shift": ("STRING", { "multiline": False, "dynamicPrompts": False, "default": "" }), + "denoise": ("STRING", { "multiline": False, "dynamicPrompts": False, "default": "1.0" }), + }} + + RETURN_TYPES = ("LATENT","SAMPLER_PARAMS") + RETURN_NAMES = ("latent", "params") + FUNCTION = "execute" + CATEGORY = "essentials/sampling" + + def execute(self, model, conditioning, latent_image, seed, sampler, scheduler, steps, guidance, max_shift, base_shift, denoise): + import random + import time + from comfy_extras.nodes_custom_sampler import Noise_RandomNoise, BasicScheduler, BasicGuider, SamplerCustomAdvanced + from comfy_extras.nodes_latent import LatentBatch + from comfy_extras.nodes_model_advanced import ModelSamplingFlux, ModelSamplingAuraFlow + from node_helpers import conditioning_set_values + + is_schnell = model.model.model_type == comfy.model_base.ModelType.FLOW + + noise = seed.replace("\n", ",").split(",") + noise = [random.randint(0, 999999) if "?" in n else int(n) for n in noise] + if not noise: + noise = [random.randint(0, 999999)] + + if sampler == '*': + sampler = comfy.samplers.KSampler.SAMPLERS + elif sampler.startswith("!"): + sampler = sampler.replace("\n", ",").split(",") + sampler = [s.strip("! ") for s in sampler] + sampler = [s for s in comfy.samplers.KSampler.SAMPLERS if s not in sampler] + else: + sampler = sampler.replace("\n", ",").split(",") + sampler = [s.strip() for s in sampler if s.strip() in comfy.samplers.KSampler.SAMPLERS] + if not sampler: + sampler = ['ipndm'] + + if scheduler == '*': + scheduler = comfy.samplers.KSampler.SCHEDULERS + elif scheduler.startswith("!"): + scheduler = scheduler.replace("\n", ",").split(",") + scheduler = [s.strip("! ") for s in scheduler] + scheduler = [s for s in comfy.samplers.KSampler.SCHEDULERS if s not in scheduler] + else: + scheduler = scheduler.replace("\n", ",").split(",") + scheduler = [s.strip() for s in scheduler] + scheduler = [s for s in scheduler if s in comfy.samplers.KSampler.SCHEDULERS] + if not scheduler: + scheduler = ['simple'] + + if steps == "": + if is_schnell: + steps = "4" + else: + steps = "20" + steps = parse_string_to_list(steps) + + denoise = "1.0" if denoise == "" else denoise + denoise = parse_string_to_list(denoise) + + guidance = "3.5" if guidance == "" else guidance + guidance = parse_string_to_list(guidance) + + if not is_schnell: + max_shift = "1.15" if max_shift == "" else max_shift + base_shift = "0.5" if base_shift == "" else base_shift + else: + max_shift = "0" + base_shift = "1.0" if base_shift == "" else base_shift + + max_shift = parse_string_to_list(max_shift) + base_shift = parse_string_to_list(base_shift) + + cond_text = None + if isinstance(conditioning, dict) and "encoded" in conditioning: + cond_text = conditioning["text"] + cond_encoded = conditioning["encoded"] + else: + cond_encoded = [conditioning] + + out_latent = None + out_params = [] + + basicschedueler = BasicScheduler() + basicguider = BasicGuider() + samplercustomadvanced = SamplerCustomAdvanced() + latentbatch = LatentBatch() + modelsamplingflux = ModelSamplingFlux() if not is_schnell else ModelSamplingAuraFlow() + width = latent_image["samples"].shape[3]*8 + height = latent_image["samples"].shape[2]*8 + + # count total number of samples + total_samples = len(cond_encoded) * len(noise) * len(max_shift) * len(base_shift) * len(guidance) * len(sampler) * len(scheduler) * len(steps) * len(denoise) + current_sample = 0 + if total_samples > 1: + pbar = ProgressBar(total_samples) + + for i in range(len(cond_encoded)): + conditioning = cond_encoded[i] + ct = cond_text[i] if cond_text else None + for n in noise: + randnoise = Noise_RandomNoise(n) + for ms in max_shift: + for bs in base_shift: + if is_schnell: + work_model = modelsamplingflux.patch_aura(model, bs)[0] + else: + work_model = modelsamplingflux.patch(model, ms, bs, width, height)[0] + for g in guidance: + cond = conditioning_set_values(conditioning, {"guidance": g}) + guider = basicguider.get_guider(work_model, cond)[0] + for s in sampler: + samplerobj = comfy.samplers.sampler_object(s) + for sc in scheduler: + for st in steps: + for d in denoise: + sigmas = basicschedueler.get_sigmas(work_model, sc, st, d)[0] + current_sample += 1 + logging.info(f"Sampling {current_sample}/{total_samples} with seed {n}, sampler {s}, scheduler {sc}, steps {st}, guidance {g}, max_shift {ms}, base_shift {bs}, denoise {d}") + start_time = time.time() + latent = samplercustomadvanced.sample(randnoise, guider, samplerobj, sigmas, latent_image)[1] + elapsed_time = time.time() - start_time + out_params.append({"time": elapsed_time, + "seed": n, + "width": width, + "height": height, + "sampler": s, + "scheduler": sc, + "steps": st, + "guidance": g, + "max_shift": ms, + "base_shift": bs, + "denoise": d, + "prompt": ct}) + + if out_latent is None: + out_latent = latent + else: + out_latent = latentbatch.batch(out_latent, latent)[0] + if total_samples > 1: + pbar.update(1) + + return (out_latent, out_params) + +class PlotParameters: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "images": ("IMAGE", ), + "params": ("SAMPLER_PARAMS", ), + "order_by": (["none", "time", "seed", "steps", "denoise", "sampler", "scheduler", "guidance", "max_shift", "base_shift"], ), + "cols_value": (["none", "time", "seed", "steps", "denoise", "sampler", "scheduler", "guidance", "max_shift", "base_shift"], ), + "cols_num": ("INT", {"default": -1, "min": -1, "max": 1024 }), + "add_prompt": (["false", "true", "excerpt"], ), + "add_params": (["false", "true", "changes only"], {"default": "true"}), + }} + + RETURN_TYPES = ("IMAGE", ) + FUNCTION = "execute" + CATEGORY = "essentials/sampling" + + def execute(self, images, params, order_by, cols_value, cols_num, add_prompt, add_params): + from PIL import Image, ImageDraw, ImageFont + import math + import textwrap + + if images.shape[0] != len(params): + raise ValueError("Number of images and number of parameters do not match.") + + _params = params.copy() + + if order_by != "none": + sorted_params = sorted(_params, key=lambda x: x[order_by]) + indices = [_params.index(item) for item in sorted_params] + images = images[torch.tensor(indices)] + _params = sorted_params + + if cols_value != "none" and cols_num > -1: + groups = {} + for p in _params: + value = p[cols_value] + if value not in groups: + groups[value] = [] + groups[value].append(p) + cols_num = len(groups) + + sorted_params = [] + groups = list(groups.values()) + for g in zip(*groups): + sorted_params.extend(g) + + indices = [_params.index(item) for item in sorted_params] + images = images[torch.tensor(indices)] + _params = sorted_params + elif cols_num == 0: + cols_num = int(math.sqrt(images.shape[0])) + cols_num = max(1, min(cols_num, 1024)) + + width = images.shape[2] + out_image = [] + + font = ImageFont.truetype(os.path.join(FONTS_DIR, 'ShareTechMono-Regular.ttf'), min(48, int(32*(width/1024)))) + text_padding = 3 + line_height = font.getmask('Q').getbbox()[3] + font.getmetrics()[1] + text_padding*2 + char_width = font.getbbox('M')[2]+1 # using monospace font + + if add_params == "changes only": + value_tracker = {} + for p in _params: + for key, value in p.items(): + if key != "time": + if key not in value_tracker: + value_tracker[key] = set() + value_tracker[key].add(value) + changing_keys = {key for key, values in value_tracker.items() if len(values) > 1 or key == "prompt"} + + result = [] + for p in _params: + changing_params = {key: value for key, value in p.items() if key in changing_keys} + result.append(changing_params) + + _params = result + + for (image, param) in zip(images, _params): + image = image.permute(2, 0, 1) + + if add_params != "false": + if add_params == "changes only": + text = "\n".join([f"{key}: {value}" for key, value in param.items() if key != "prompt"]) + else: + text = f"time: {param['time']:.2f}s, seed: {param['seed']}, steps: {param['steps']}, size: {param['width']}×{param['height']}\ndenoise: {param['denoise']}, sampler: {param['sampler']}, sched: {param['scheduler']}\nguidance: {param['guidance']}, max/base shift: {param['max_shift']}/{param['base_shift']}" + + lines = text.split("\n") + text_height = line_height * len(lines) + text_image = Image.new('RGB', (width, text_height), color=(0, 0, 0)) + + for i, line in enumerate(lines): + draw = ImageDraw.Draw(text_image) + draw.text((text_padding, i * line_height + text_padding), line, font=font, fill=(255, 255, 255)) + + text_image = T.ToTensor()(text_image).to(image.device) + image = torch.cat([image, text_image], 1) + + if 'prompt' in param and param['prompt'] and add_prompt != "false": + prompt = param['prompt'] + if add_prompt == "excerpt": + prompt = " ".join(param['prompt'].split()[:64]) + prompt += "..." + + cols = math.ceil(width / char_width) + prompt_lines = textwrap.wrap(prompt, width=cols) + prompt_height = line_height * len(prompt_lines) + prompt_image = Image.new('RGB', (width, prompt_height), color=(0, 0, 0)) + + for i, line in enumerate(prompt_lines): + draw = ImageDraw.Draw(prompt_image) + draw.text((text_padding, i * line_height + text_padding), line, font=font, fill=(255, 255, 255)) + + prompt_image = T.ToTensor()(prompt_image).to(image.device) + image = torch.cat([image, prompt_image], 1) + + # a little cleanup + image = torch.nan_to_num(image, nan=0.0).clamp(0.0, 1.0) + out_image.append(image) + + # ensure all images have the same height + if add_prompt != "false" or add_params == "changes only": + max_height = max([image.shape[1] for image in out_image]) + out_image = [F.pad(image, (0, 0, 0, max_height - image.shape[1])) for image in out_image] + + out_image = torch.stack(out_image, 0).permute(0, 2, 3, 1) + + # merge images + if cols_num > -1: + cols = min(cols_num, out_image.shape[0]) + b, h, w, c = out_image.shape + rows = math.ceil(b / cols) + + # Pad the tensor if necessary + if b % cols != 0: + padding = cols - (b % cols) + out_image = F.pad(out_image, (0, 0, 0, 0, 0, 0, 0, padding)) + b = out_image.shape[0] + + # Reshape and transpose + out_image = out_image.reshape(rows, cols, h, w, c) + out_image = out_image.permute(0, 2, 1, 3, 4) + out_image = out_image.reshape(rows * h, cols * w, c).unsqueeze(0) + + """ + width = out_image.shape[2] + # add the title and notes on top + if title and export_labels: + title_font = ImageFont.truetype(os.path.join(FONTS_DIR, 'ShareTechMono-Regular.ttf'), 48) + title_width = title_font.getbbox(title)[2] + title_padding = 6 + title_line_height = title_font.getmask(title).getbbox()[3] + title_font.getmetrics()[1] + title_padding*2 + title_text_height = title_line_height + title_text_image = Image.new('RGB', (width, title_text_height), color=(0, 0, 0, 0)) + + draw = ImageDraw.Draw(title_text_image) + draw.text((width//2 - title_width//2, title_padding), title, font=title_font, fill=(255, 255, 255)) + + title_text_image = T.ToTensor()(title_text_image).unsqueeze(0).permute([0,2,3,1]).to(out_image.device) + out_image = torch.cat([title_text_image, out_image], 1) + """ + + return (out_image, ) + +SAMPLING_CLASS_MAPPINGS = { + "KSamplerVariationsStochastic+": KSamplerVariationsStochastic, + "KSamplerVariationsWithNoise+": KSamplerVariationsWithNoise, + "InjectLatentNoise+": InjectLatentNoise, + "FluxSamplerParams+": FluxSamplerParams, + "PlotParameters+": PlotParameters, + "TextEncodeForSamplerParams+": TextEncodeForSamplerParams, + "SamplerSelectHelper+": SamplerSelectHelper, + "SchedulerSelectHelper+": SchedulerSelectHelper, +} + +SAMPLING_NAME_MAPPINGS = { + "KSamplerVariationsStochastic+": "🔧 KSampler Stochastic Variations", + "KSamplerVariationsWithNoise+": "🔧 KSampler Variations with Noise Injection", + "InjectLatentNoise+": "🔧 Inject Latent Noise", + "FluxSamplerParams+": "🔧 Flux Sampler Parameters", + "PlotParameters+": "🔧 Plot Sampler Parameters", + "TextEncodeForSamplerParams+": "🔧Text Encode for Sampler Params", + "SamplerSelectHelper+": "🔧 Sampler Select Helper", + "SchedulerSelectHelper+": "🔧 Scheduler Select Helper", +} \ No newline at end of file diff --git a/ComfyUI_essentials/segmentation.py b/ComfyUI_essentials/segmentation.py new file mode 100644 index 0000000000000000000000000000000000000000..55f870144afb2f51da26fd1c3bd97c03ed4223b7 --- /dev/null +++ b/ComfyUI_essentials/segmentation.py @@ -0,0 +1,89 @@ +import torch +import torchvision.transforms.v2 as T +import torch.nn.functional as F +from .utils import expand_mask + +class LoadCLIPSegModels: + @classmethod + def INPUT_TYPES(s): + return { + "required": {}, + } + + RETURN_TYPES = ("CLIP_SEG",) + FUNCTION = "execute" + CATEGORY = "essentials/segmentation" + + def execute(self): + from transformers import CLIPSegProcessor, CLIPSegForImageSegmentation + processor = CLIPSegProcessor.from_pretrained("CIDAS/clipseg-rd64-refined") + model = CLIPSegForImageSegmentation.from_pretrained("CIDAS/clipseg-rd64-refined") + + return ((processor, model),) + +class ApplyCLIPSeg: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "clip_seg": ("CLIP_SEG",), + "image": ("IMAGE",), + "prompt": ("STRING", { "multiline": False, "default": "" }), + "threshold": ("FLOAT", { "default": 0.4, "min": 0.0, "max": 1.0, "step": 0.05 }), + "smooth": ("INT", { "default": 9, "min": 0, "max": 32, "step": 1 }), + "dilate": ("INT", { "default": 0, "min": -32, "max": 32, "step": 1 }), + "blur": ("INT", { "default": 0, "min": 0, "max": 64, "step": 1 }), + }, + } + + RETURN_TYPES = ("MASK",) + FUNCTION = "execute" + CATEGORY = "essentials/segmentation" + + def execute(self, image, clip_seg, prompt, threshold, smooth, dilate, blur): + processor, model = clip_seg + + imagenp = image.mul(255).clamp(0, 255).byte().cpu().numpy() + + outputs = [] + for i in imagenp: + inputs = processor(text=prompt, images=[i], return_tensors="pt") + out = model(**inputs) + out = out.logits.unsqueeze(1) + out = torch.sigmoid(out[0][0]) + out = (out > threshold) + outputs.append(out) + + del imagenp + + outputs = torch.stack(outputs, dim=0) + + if smooth > 0: + if smooth % 2 == 0: + smooth += 1 + outputs = T.functional.gaussian_blur(outputs, smooth) + + outputs = outputs.float() + + if dilate != 0: + outputs = expand_mask(outputs, dilate, True) + + if blur > 0: + if blur % 2 == 0: + blur += 1 + outputs = T.functional.gaussian_blur(outputs, blur) + + # resize to original size + outputs = F.interpolate(outputs.unsqueeze(1), size=(image.shape[1], image.shape[2]), mode='bicubic').squeeze(1) + + return (outputs,) + +SEG_CLASS_MAPPINGS = { + "ApplyCLIPSeg+": ApplyCLIPSeg, + "LoadCLIPSegModels+": LoadCLIPSegModels, +} + +SEG_NAME_MAPPINGS = { + "ApplyCLIPSeg+": "🔧 Apply CLIPSeg", + "LoadCLIPSegModels+": "🔧 Load CLIPSeg Models", +} \ No newline at end of file diff --git a/ComfyUI_essentials/text.py b/ComfyUI_essentials/text.py new file mode 100644 index 0000000000000000000000000000000000000000..ac52c4edd848bfe5358290597e212be105fb8a51 --- /dev/null +++ b/ComfyUI_essentials/text.py @@ -0,0 +1,113 @@ +import os +import torch +from nodes import MAX_RESOLUTION +import torchvision.transforms.v2 as T +from .utils import FONTS_DIR + +class DrawText: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "text": ("STRING", { "multiline": True, "dynamicPrompts": True, "default": "Hello, World!" }), + "font": (sorted([f for f in os.listdir(FONTS_DIR) if f.endswith('.ttf') or f.endswith('.otf')]), ), + "size": ("INT", { "default": 56, "min": 1, "max": 9999, "step": 1 }), + "color": ("STRING", { "multiline": False, "default": "#FFFFFF" }), + "background_color": ("STRING", { "multiline": False, "default": "#00000000" }), + "shadow_distance": ("INT", { "default": 0, "min": 0, "max": 100, "step": 1 }), + "shadow_blur": ("INT", { "default": 0, "min": 0, "max": 100, "step": 1 }), + "shadow_color": ("STRING", { "multiline": False, "default": "#000000" }), + "horizontal_align": (["left", "center", "right"],), + "vertical_align": (["top", "center", "bottom"],), + "offset_x": ("INT", { "default": 0, "min": -MAX_RESOLUTION, "max": MAX_RESOLUTION, "step": 1 }), + "offset_y": ("INT", { "default": 0, "min": -MAX_RESOLUTION, "max": MAX_RESOLUTION, "step": 1 }), + "direction": (["ltr", "rtl"],), + }, + "optional": { + "img_composite": ("IMAGE",), + }, + } + + RETURN_TYPES = ("IMAGE", "MASK",) + FUNCTION = "execute" + CATEGORY = "essentials/text" + + def execute(self, text, font, size, color, background_color, shadow_distance, shadow_blur, shadow_color, horizontal_align, vertical_align, offset_x, offset_y, direction, img_composite=None): + from PIL import Image, ImageDraw, ImageFont, ImageColor, ImageFilter + + font = ImageFont.truetype(os.path.join(FONTS_DIR, font), size) + + lines = text.split("\n") + if direction == "rtl": + lines = [line[::-1] for line in lines] + + # Calculate the width and height of the text + text_width = max(font.getbbox(line)[2] for line in lines) + line_height = font.getmask(text).getbbox()[3] + font.getmetrics()[1] # add descent to height + text_height = line_height * len(lines) + + if img_composite is not None: + img_composite = T.ToPILImage()(img_composite.permute([0,3,1,2])[0]).convert('RGBA') + width = img_composite.width + height = img_composite.height + image = Image.new('RGBA', (width, height), color=background_color) + else: + width = text_width + height = text_height + background_color = ImageColor.getrgb(background_color) + image = Image.new('RGBA', (width + shadow_distance, height + shadow_distance), color=background_color) + + image_shadow = None + if shadow_distance > 0: + image_shadow = image.copy() + #image_shadow = Image.new('RGBA', (width + shadow_distance, height + shadow_distance), color=background_color) + + for i, line in enumerate(lines): + line_width = font.getbbox(line)[2] + #text_height =font.getbbox(line)[3] + if horizontal_align == "left": + x = 0 + elif horizontal_align == "center": + x = (width - line_width) / 2 + elif horizontal_align == "right": + x = width - line_width + + if vertical_align == "top": + y = 0 + elif vertical_align == "center": + y = (height - text_height) / 2 + elif vertical_align == "bottom": + y = height - text_height + + x += offset_x + y += i * line_height + offset_y + + draw = ImageDraw.Draw(image) + draw.text((x, y), line, font=font, fill=color) + + if image_shadow is not None: + draw = ImageDraw.Draw(image_shadow) + draw.text((x + shadow_distance, y + shadow_distance), line, font=font, fill=shadow_color) + + if image_shadow is not None: + image_shadow = image_shadow.filter(ImageFilter.GaussianBlur(shadow_blur)) + image = Image.alpha_composite(image_shadow, image) + + #image = T.ToTensor()(image).unsqueeze(0).permute([0,2,3,1]) + mask = T.ToTensor()(image).unsqueeze(0).permute([0,2,3,1]) + mask = mask[:, :, :, 3] if mask.shape[3] == 4 else torch.ones_like(mask[:, :, :, 0]) + + if img_composite is not None: + image = Image.alpha_composite(img_composite, image) + + image = T.ToTensor()(image).unsqueeze(0).permute([0,2,3,1]) + + return (image[:, :, :, :3], mask,) + +TEXT_CLASS_MAPPINGS = { + "DrawText+": DrawText, +} + +TEXT_NAME_MAPPINGS = { + "DrawText+": "🔧 Draw Text", +} \ No newline at end of file diff --git a/ComfyUI_essentials/utils.py b/ComfyUI_essentials/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..aa1b1146c9d03e67c0b2ca313c8ff0dd66bcfaa7 --- /dev/null +++ b/ComfyUI_essentials/utils.py @@ -0,0 +1,89 @@ +import torch +import numpy as np +import scipy +import os +#import re +from pathlib import Path +import folder_paths + +FONTS_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "fonts") + +SCRIPT_DIR = Path(__file__).parent +folder_paths.add_model_folder_path("luts", (SCRIPT_DIR / "luts").as_posix()) +folder_paths.add_model_folder_path( + "luts", (Path(folder_paths.models_dir) / "luts").as_posix() +) + +# from https://github.com/pythongosssss/ComfyUI-Custom-Scripts +class AnyType(str): + def __ne__(self, __value: object) -> bool: + return False + +def min_(tensor_list): + # return the element-wise min of the tensor list. + x = torch.stack(tensor_list) + mn = x.min(axis=0)[0] + return torch.clamp(mn, min=0) + +def max_(tensor_list): + # return the element-wise max of the tensor list. + x = torch.stack(tensor_list) + mx = x.max(axis=0)[0] + return torch.clamp(mx, max=1) + +def expand_mask(mask, expand, tapered_corners): + c = 0 if tapered_corners else 1 + kernel = np.array([[c, 1, c], + [1, 1, 1], + [c, 1, c]]) + mask = mask.reshape((-1, mask.shape[-2], mask.shape[-1])) + out = [] + for m in mask: + output = m.numpy() + for _ in range(abs(expand)): + if expand < 0: + output = scipy.ndimage.grey_erosion(output, footprint=kernel) + else: + output = scipy.ndimage.grey_dilation(output, footprint=kernel) + output = torch.from_numpy(output) + out.append(output) + + return torch.stack(out, dim=0) + +def parse_string_to_list(s): + elements = s.split(',') + result = [] + + def parse_number(s): + try: + if '.' in s: + return float(s) + else: + return int(s) + except ValueError: + return 0 + + def decimal_places(s): + if '.' in s: + return len(s.split('.')[1]) + return 0 + + for element in elements: + element = element.strip() + if '...' in element: + start, rest = element.split('...') + end, step = rest.split('+') + decimals = decimal_places(step) + start = parse_number(start) + end = parse_number(end) + step = parse_number(step) + current = start + if (start > end and step > 0) or (start < end and step < 0): + step = -step + while current <= end: + result.append(round(current, decimals)) + current += step + else: + result.append(round(parse_number(element), decimal_places(element))) + + return result \ No newline at end of file diff --git a/ComfyUI_essentials/workflow_all_nodes.json b/ComfyUI_essentials/workflow_all_nodes.json new file mode 100644 index 0000000000000000000000000000000000000000..fab4c98929e12b9f7ac35bd433ffa55af81480b2 --- /dev/null +++ b/ComfyUI_essentials/workflow_all_nodes.json @@ -0,0 +1,994 @@ +{ + "last_node_id": 42, + "last_link_id": 61, + "nodes": [ + { + "id": 9, + "type": "ConsoleDebug+", + "pos": [ + 720, + 140 + ], + "size": { + "0": 210, + "1": 60 + }, + "flags": {}, + "order": 12, + "mode": 0, + "inputs": [ + { + "name": "value", + "type": "*", + "link": 3 + } + ], + "properties": { + "Node name for S&R": "ConsoleDebug+" + }, + "widgets_values": [ + "Height:" + ] + }, + { + "id": 28, + "type": "PreviewImage", + "pos": [ + 860, + 1180 + ], + "size": { + "0": 210, + "1": 246 + }, + "flags": {}, + "order": 17, + "mode": 0, + "inputs": [ + { + "name": "images", + "type": "IMAGE", + "link": 23 + } + ], + "properties": { + "Node name for S&R": "PreviewImage" + } + }, + { + "id": 12, + "type": "PreviewImage", + "pos": [ + 860, + 580 + ], + "size": { + "0": 210, + "1": 246 + }, + "flags": {}, + "order": 15, + "mode": 0, + "inputs": [ + { + "name": "images", + "type": "IMAGE", + "link": 11 + } + ], + "properties": { + "Node name for S&R": "PreviewImage" + } + }, + { + "id": 14, + "type": "PreviewImage", + "pos": [ + 860, + 880 + ], + "size": { + "0": 210, + "1": 246 + }, + "flags": {}, + "order": 16, + "mode": 0, + "inputs": [ + { + "name": "images", + "type": "IMAGE", + "link": 13 + } + ], + "properties": { + "Node name for S&R": "PreviewImage" + } + }, + { + "id": 18, + "type": "MaskPreview+", + "pos": [ + 2100, + 90 + ], + "size": { + "0": 210, + "1": 246 + }, + "flags": {}, + "order": 20, + "mode": 0, + "inputs": [ + { + "name": "mask", + "type": "MASK", + "link": 19 + } + ], + "properties": { + "Node name for S&R": "MaskPreview+" + } + }, + { + "id": 1, + "type": "GetImageSize+", + "pos": [ + 450, + 80 + ], + "size": { + "0": 210, + "1": 46 + }, + "flags": {}, + "order": 2, + "mode": 0, + "inputs": [ + { + "name": "image", + "type": "IMAGE", + "link": 1 + } + ], + "outputs": [ + { + "name": "width", + "type": "INT", + "links": [ + 2 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "height", + "type": "INT", + "links": [ + 3 + ], + "shape": 3, + "slot_index": 1 + } + ], + "properties": { + "Node name for S&R": "GetImageSize+" + } + }, + { + "id": 8, + "type": "ConsoleDebug+", + "pos": [ + 720, + 40 + ], + "size": { + "0": 210, + "1": 60 + }, + "flags": {}, + "order": 11, + "mode": 0, + "inputs": [ + { + "name": "value", + "type": "*", + "link": 2 + } + ], + "properties": { + "Node name for S&R": "ConsoleDebug+" + }, + "widgets_values": [ + "Width:" + ] + }, + { + "id": 10, + "type": "PreviewImage", + "pos": [ + 860, + 280 + ], + "size": { + "0": 210, + "1": 246 + }, + "flags": {}, + "order": 13, + "mode": 0, + "inputs": [ + { + "name": "images", + "type": "IMAGE", + "link": 9 + } + ], + "properties": { + "Node name for S&R": "PreviewImage" + } + }, + { + "id": 36, + "type": "SimpleMath+", + "pos": [ + 1650, + 780 + ], + "size": { + "0": 210, + "1": 80 + }, + "flags": {}, + "order": 14, + "mode": 0, + "inputs": [ + { + "name": "a", + "type": "INT,FLOAT", + "link": 44 + }, + { + "name": "b", + "type": "INT,FLOAT", + "link": 45 + } + ], + "outputs": [ + { + "name": "INT", + "type": "INT", + "links": [ + 46 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "FLOAT", + "type": "FLOAT", + "links": null, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "SimpleMath+" + }, + "widgets_values": [ + "a*b" + ] + }, + { + "id": 23, + "type": "ConsoleDebug+", + "pos": [ + 1920, + 780 + ], + "size": { + "0": 210, + "1": 60 + }, + "flags": {}, + "order": 22, + "mode": 0, + "inputs": [ + { + "name": "value", + "type": "*", + "link": 46 + } + ], + "properties": { + "Node name for S&R": "ConsoleDebug+" + }, + "widgets_values": [ + "Value:" + ] + }, + { + "id": 2, + "type": "ImageResize+", + "pos": [ + 430, + 340 + ], + "size": { + "0": 310, + "1": 170 + }, + "flags": {}, + "order": 3, + "mode": 0, + "inputs": [ + { + "name": "image", + "type": "IMAGE", + "link": 4 + } + ], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 9 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "width", + "type": "INT", + "links": [ + 44 + ], + "shape": 3, + "slot_index": 1 + }, + { + "name": "height", + "type": "INT", + "links": [ + 45 + ], + "shape": 3, + "slot_index": 2 + } + ], + "properties": { + "Node name for S&R": "ImageResize+" + }, + "widgets_values": [ + 256, + 64, + "lanczos", + true + ] + }, + { + "id": 4, + "type": "ImageFlip+", + "pos": [ + 430, + 800 + ], + "size": { + "0": 310, + "1": 60 + }, + "flags": {}, + "order": 4, + "mode": 0, + "inputs": [ + { + "name": "image", + "type": "IMAGE", + "link": 6 + } + ], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 11 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "ImageFlip+" + }, + "widgets_values": [ + "xy" + ] + }, + { + "id": 6, + "type": "ImagePosterize+", + "pos": [ + 430, + 1000 + ], + "size": { + "0": 310, + "1": 60 + }, + "flags": {}, + "order": 5, + "mode": 0, + "inputs": [ + { + "name": "image", + "type": "IMAGE", + "link": 8 + } + ], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 13 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "ImagePosterize+" + }, + "widgets_values": [ + 0.5 + ] + }, + { + "id": 27, + "type": "ImageCASharpening+", + "pos": [ + 430, + 1110 + ], + "size": { + "0": 310.79998779296875, + "1": 60 + }, + "flags": {}, + "order": 6, + "mode": 0, + "inputs": [ + { + "name": "image", + "type": "IMAGE", + "link": 22 + } + ], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 23 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "ImageCASharpening+" + }, + "widgets_values": [ + 0.8 + ] + }, + { + "id": 15, + "type": "MaskBlur+", + "pos": [ + 1690, + 130 + ], + "size": { + "0": 310, + "1": 82 + }, + "flags": {}, + "order": 9, + "mode": 0, + "inputs": [ + { + "name": "mask", + "type": "MASK", + "link": 14 + } + ], + "outputs": [ + { + "name": "MASK", + "type": "MASK", + "links": [ + 19 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "MaskBlur+" + }, + "widgets_values": [ + 45, + 28.5 + ] + }, + { + "id": 16, + "type": "MaskFlip+", + "pos": [ + 1690, + 270 + ], + "size": { + "0": 310, + "1": 60 + }, + "flags": {}, + "order": 10, + "mode": 0, + "inputs": [ + { + "name": "mask", + "type": "MASK", + "link": 15 + } + ], + "outputs": [ + { + "name": "MASK", + "type": "MASK", + "links": [ + 18 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "MaskFlip+" + }, + "widgets_values": [ + "xy" + ] + }, + { + "id": 13, + "type": "PreviewImage", + "pos": [ + 1100, + 760 + ], + "size": { + "0": 210, + "1": 246 + }, + "flags": {}, + "order": 18, + "mode": 0, + "inputs": [ + { + "name": "images", + "type": "IMAGE", + "link": 49 + } + ], + "properties": { + "Node name for S&R": "PreviewImage" + } + }, + { + "id": 37, + "type": "ImageDesaturate+", + "pos": [ + 500, + 920 + ], + "size": { + "0": 190, + "1": 30 + }, + "flags": {}, + "order": 7, + "mode": 0, + "inputs": [ + { + "name": "image", + "type": "IMAGE", + "link": 48 + } + ], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 49 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "ImageDesaturate+" + } + }, + { + "id": 7, + "type": "LoadImage", + "pos": [ + -90, + 650 + ], + "size": { + "0": 315, + "1": 314 + }, + "flags": {}, + "order": 0, + "mode": 0, + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 1, + 4, + 6, + 8, + 22, + 48, + 57 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "MASK", + "type": "MASK", + "links": null, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "LoadImage" + }, + "widgets_values": [ + "venere.jpg", + "image" + ] + }, + { + "id": 11, + "type": "PreviewImage", + "pos": [ + 1100, + 450 + ], + "size": { + "0": 210, + "1": 246 + }, + "flags": {}, + "order": 19, + "mode": 0, + "inputs": [ + { + "name": "images", + "type": "IMAGE", + "link": 58 + } + ], + "properties": { + "Node name for S&R": "PreviewImage" + } + }, + { + "id": 40, + "type": "ImageCrop+", + "pos": [ + 430, + 560 + ], + "size": { + "0": 310, + "1": 194 + }, + "flags": {}, + "order": 8, + "mode": 0, + "inputs": [ + { + "name": "image", + "type": "IMAGE", + "link": 57 + } + ], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 58 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "x", + "type": "INT", + "links": null, + "shape": 3 + }, + { + "name": "y", + "type": "INT", + "links": null, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "ImageCrop+" + }, + "widgets_values": [ + 256, + 256, + "center", + 0, + 0 + ] + }, + { + "id": 20, + "type": "LoadImageMask", + "pos": [ + 1400, + 260 + ], + "size": { + "0": 220.70516967773438, + "1": 318 + }, + "flags": {}, + "order": 1, + "mode": 0, + "outputs": [ + { + "name": "MASK", + "type": "MASK", + "links": [ + 14, + 15 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "LoadImageMask" + }, + "widgets_values": [ + "cwf_inpaint_example_mask.png", + "alpha", + "image" + ] + }, + { + "id": 21, + "type": "MaskPreview+", + "pos": [ + 2100, + 380 + ], + "size": { + "0": 210, + "1": 246 + }, + "flags": {}, + "order": 21, + "mode": 0, + "inputs": [ + { + "name": "mask", + "type": "MASK", + "link": 18 + } + ], + "properties": { + "Node name for S&R": "MaskPreview+" + } + } + ], + "links": [ + [ + 1, + 7, + 0, + 1, + 0, + "IMAGE" + ], + [ + 2, + 1, + 0, + 8, + 0, + "*" + ], + [ + 3, + 1, + 1, + 9, + 0, + "*" + ], + [ + 4, + 7, + 0, + 2, + 0, + "IMAGE" + ], + [ + 6, + 7, + 0, + 4, + 0, + "IMAGE" + ], + [ + 8, + 7, + 0, + 6, + 0, + "IMAGE" + ], + [ + 9, + 2, + 0, + 10, + 0, + "IMAGE" + ], + [ + 11, + 4, + 0, + 12, + 0, + "IMAGE" + ], + [ + 13, + 6, + 0, + 14, + 0, + "IMAGE" + ], + [ + 14, + 20, + 0, + 15, + 0, + "MASK" + ], + [ + 15, + 20, + 0, + 16, + 0, + "MASK" + ], + [ + 18, + 16, + 0, + 21, + 0, + "MASK" + ], + [ + 19, + 15, + 0, + 18, + 0, + "MASK" + ], + [ + 22, + 7, + 0, + 27, + 0, + "IMAGE" + ], + [ + 23, + 27, + 0, + 28, + 0, + "IMAGE" + ], + [ + 44, + 2, + 1, + 36, + 0, + "INT,FLOAT" + ], + [ + 45, + 2, + 2, + 36, + 1, + "INT,FLOAT" + ], + [ + 46, + 36, + 0, + 23, + 0, + "*" + ], + [ + 48, + 7, + 0, + 37, + 0, + "IMAGE" + ], + [ + 49, + 37, + 0, + 13, + 0, + "IMAGE" + ], + [ + 57, + 7, + 0, + 40, + 0, + "IMAGE" + ], + [ + 58, + 40, + 0, + 11, + 0, + "IMAGE" + ] + ], + "groups": [], + "config": {}, + "extra": {}, + "version": 0.4 +} \ No newline at end of file